pyNIBS 0.2024.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. pyNIBS-0.2024.8.dist-info/LICENSE +623 -0
  2. pyNIBS-0.2024.8.dist-info/METADATA +723 -0
  3. pyNIBS-0.2024.8.dist-info/RECORD +107 -0
  4. pyNIBS-0.2024.8.dist-info/WHEEL +5 -0
  5. pyNIBS-0.2024.8.dist-info/top_level.txt +1 -0
  6. pynibs/__init__.py +34 -0
  7. pynibs/coil.py +1367 -0
  8. pynibs/congruence/__init__.py +15 -0
  9. pynibs/congruence/congruence.py +1108 -0
  10. pynibs/congruence/ext_metrics.py +257 -0
  11. pynibs/congruence/stimulation_threshold.py +318 -0
  12. pynibs/data/configuration_exp0.yaml +59 -0
  13. pynibs/data/configuration_linear_MEP.yaml +61 -0
  14. pynibs/data/configuration_linear_RT.yaml +61 -0
  15. pynibs/data/configuration_sigmoid4.yaml +68 -0
  16. pynibs/data/network mapping configuration/configuration guide.md +238 -0
  17. pynibs/data/network mapping configuration/configuration_TEMPLATE.yaml +42 -0
  18. pynibs/data/network mapping configuration/configuration_for_testing.yaml +43 -0
  19. pynibs/data/network mapping configuration/configuration_modelTMS.yaml +43 -0
  20. pynibs/data/network mapping configuration/configuration_reg_isi_05.yaml +43 -0
  21. pynibs/data/network mapping configuration/output_documentation.md +185 -0
  22. pynibs/data/network mapping configuration/recommendations_for_accuracy_threshold.md +77 -0
  23. pynibs/data/neuron/models/L23_PC_cADpyr_biphasic_v1.csv +1281 -0
  24. pynibs/data/neuron/models/L23_PC_cADpyr_monophasic_v1.csv +1281 -0
  25. pynibs/data/neuron/models/L4_LBC_biphasic_v1.csv +1281 -0
  26. pynibs/data/neuron/models/L4_LBC_monophasic_v1.csv +1281 -0
  27. pynibs/data/neuron/models/L4_NBC_biphasic_v1.csv +1281 -0
  28. pynibs/data/neuron/models/L4_NBC_monophasic_v1.csv +1281 -0
  29. pynibs/data/neuron/models/L4_SBC_biphasic_v1.csv +1281 -0
  30. pynibs/data/neuron/models/L4_SBC_monophasic_v1.csv +1281 -0
  31. pynibs/data/neuron/models/L5_TTPC2_cADpyr_biphasic_v1.csv +1281 -0
  32. pynibs/data/neuron/models/L5_TTPC2_cADpyr_monophasic_v1.csv +1281 -0
  33. pynibs/expio/Mep.py +1518 -0
  34. pynibs/expio/__init__.py +8 -0
  35. pynibs/expio/brainsight.py +979 -0
  36. pynibs/expio/brainvis.py +71 -0
  37. pynibs/expio/cobot.py +239 -0
  38. pynibs/expio/exp.py +1876 -0
  39. pynibs/expio/fit_funs.py +287 -0
  40. pynibs/expio/localite.py +1987 -0
  41. pynibs/expio/signal_ced.py +51 -0
  42. pynibs/expio/visor.py +624 -0
  43. pynibs/freesurfer.py +502 -0
  44. pynibs/hdf5_io/__init__.py +10 -0
  45. pynibs/hdf5_io/hdf5_io.py +1857 -0
  46. pynibs/hdf5_io/xdmf.py +1542 -0
  47. pynibs/mesh/__init__.py +3 -0
  48. pynibs/mesh/mesh_struct.py +1394 -0
  49. pynibs/mesh/transformations.py +866 -0
  50. pynibs/mesh/utils.py +1103 -0
  51. pynibs/models/_TMS.py +211 -0
  52. pynibs/models/__init__.py +0 -0
  53. pynibs/muap.py +392 -0
  54. pynibs/neuron/__init__.py +2 -0
  55. pynibs/neuron/neuron_regression.py +284 -0
  56. pynibs/neuron/util.py +58 -0
  57. pynibs/optimization/__init__.py +5 -0
  58. pynibs/optimization/multichannel.py +278 -0
  59. pynibs/optimization/opt_mep.py +152 -0
  60. pynibs/optimization/optimization.py +1445 -0
  61. pynibs/optimization/workhorses.py +698 -0
  62. pynibs/pckg/__init__.py +0 -0
  63. pynibs/pckg/biosig/biosig4c++-1.9.5.src_fixed.tar.gz +0 -0
  64. pynibs/pckg/libeep/__init__.py +0 -0
  65. pynibs/pckg/libeep/pyeep.so +0 -0
  66. pynibs/regression/__init__.py +11 -0
  67. pynibs/regression/dual_node_detection.py +2375 -0
  68. pynibs/regression/regression.py +2984 -0
  69. pynibs/regression/score_types.py +0 -0
  70. pynibs/roi/__init__.py +2 -0
  71. pynibs/roi/roi.py +895 -0
  72. pynibs/roi/roi_structs.py +1233 -0
  73. pynibs/subject.py +1009 -0
  74. pynibs/tensor_scaling.py +144 -0
  75. pynibs/tests/data/InstrumentMarker20200225163611937.xml +19 -0
  76. pynibs/tests/data/TriggerMarkers_Coil0_20200225163443682.xml +14 -0
  77. pynibs/tests/data/TriggerMarkers_Coil1_20200225170337572.xml +6373 -0
  78. pynibs/tests/data/Xdmf.dtd +89 -0
  79. pynibs/tests/data/brainsight_niiImage_nifticoord.txt +145 -0
  80. pynibs/tests/data/brainsight_niiImage_nifticoord_largefile.txt +1434 -0
  81. pynibs/tests/data/brainsight_niiImage_niifticoord_mixedtargets.txt +47 -0
  82. pynibs/tests/data/create_subject_testsub.py +332 -0
  83. pynibs/tests/data/data.hdf5 +0 -0
  84. pynibs/tests/data/geo.hdf5 +0 -0
  85. pynibs/tests/test_coil.py +474 -0
  86. pynibs/tests/test_elements2nodes.py +100 -0
  87. pynibs/tests/test_hdf5_io/test_xdmf.py +61 -0
  88. pynibs/tests/test_mesh_transformations.py +123 -0
  89. pynibs/tests/test_mesh_utils.py +143 -0
  90. pynibs/tests/test_nnav_imports.py +101 -0
  91. pynibs/tests/test_quality_measures.py +117 -0
  92. pynibs/tests/test_regressdata.py +289 -0
  93. pynibs/tests/test_roi.py +17 -0
  94. pynibs/tests/test_rotations.py +86 -0
  95. pynibs/tests/test_subject.py +71 -0
  96. pynibs/tests/test_util.py +24 -0
  97. pynibs/tms_pulse.py +34 -0
  98. pynibs/util/__init__.py +4 -0
  99. pynibs/util/dosing.py +233 -0
  100. pynibs/util/quality_measures.py +562 -0
  101. pynibs/util/rotations.py +340 -0
  102. pynibs/util/simnibs.py +763 -0
  103. pynibs/util/util.py +727 -0
  104. pynibs/visualization/__init__.py +2 -0
  105. pynibs/visualization/para.py +4372 -0
  106. pynibs/visualization/plot_2D.py +137 -0
  107. pynibs/visualization/render_3D.py +347 -0
@@ -0,0 +1,1987 @@
1
+ """ Functions to import data from Localite TMS Navigator """
2
+ import os
3
+ import h5py
4
+ import warnings
5
+ import datetime
6
+ import numpy as np
7
+ import pandas as pd
8
+ from tqdm import tqdm
9
+ from matplotlib import pyplot as plt
10
+ from xml.etree import ElementTree as xmlt, ElementTree as ET
11
+
12
+ import pynibs
13
+
14
+
15
+ def get_tms_elements(xml_paths, verbose=False):
16
+ """
17
+ Read needed elements out of the tms-xml-file.
18
+
19
+ Parameters
20
+ ----------
21
+ xml_paths : list of str or str
22
+ Paths to coil0-file and optionally coil1-file. If there is no coil1-file, use empty string.
23
+ verbose : bool, default: False
24
+ Flag indicating verbosity.
25
+
26
+ Returns
27
+ -------
28
+ coils_array : np.ndarray of float
29
+ (3, NX4, 4), Coil0, coil1 and mean-value of N 4x4 coil-arrays.
30
+ ts_tms_lst : list of int
31
+ [N] Timestamps of valid tms-measurements.
32
+ current_lst : list of int
33
+ [N] Measured currents.
34
+ idx_invalid : list of int
35
+ List of indices of invalid coil positions (w.r.t. all timestamps incl invalid).
36
+ """
37
+ if isinstance(xml_paths, str):
38
+ xml_paths = [xml_paths]
39
+ # handle case if there is no coil1
40
+ if len(xml_paths) > 1 and not xml_paths[1]:
41
+ xml_paths[1] = xml_paths[0]
42
+ if len(xml_paths) == 1:
43
+ xml_paths.append(xml_paths[0])
44
+
45
+ # allocate new array and lists
46
+ coils_array, ts_tms_lst, current_lst = np.empty([3, 0, 4, 4]), [], []
47
+
48
+ # parse XML document
49
+ coil0_tree, coil1_tree = xmlt.parse(xml_paths[0]), xmlt.parse(xml_paths[1])
50
+ coil0_root, coil1_root = coil0_tree.getroot(), coil1_tree.getroot()
51
+
52
+ # iterate over all 'TriggerMarker' tags
53
+ i_stim = 0
54
+ idx_invalid = []
55
+
56
+ for coil0_tm, coil1_tm in zip(coil0_root.iter('TriggerMarker'), coil1_root.iter('TriggerMarker')):
57
+ coil_array = np.empty([0, 1, 4, 4])
58
+
59
+ # get tag were the matrix is
60
+ coil0_ma, coil1_ma = coil0_tm.find('Matrix4D'), coil1_tm.find('Matrix4D')
61
+
62
+ # get coil0
63
+ coil_array = np.append(coil_array, read_coil(coil0_ma), axis=0)
64
+
65
+ # if present, get coil1
66
+ if xml_paths[0] == xml_paths[1]:
67
+ coil_array = np.append(coil_array, np.identity(4)[np.newaxis, np.newaxis, :, :], axis=0)
68
+ else:
69
+ coil_array = np.append(coil_array, read_coil(coil1_ma), axis=0)
70
+
71
+ # check for not valid coils and calculate mean value
72
+ if not np.allclose(coil_array[0, 0, :, :], np.identity(4)) and \
73
+ not np.allclose(coil_array[1, 0, :, :], np.identity(4)):
74
+ coil_array = np.append(coil_array,
75
+ np.expand_dims((coil_array[0, :, :, :] + coil_array[1, :, :, :]) / 2, axis=0),
76
+ axis=0)
77
+ elif np.allclose(coil_array[0, 0, :, :], np.identity(4)) and not np.allclose(coil_array[1, 0, :, :],
78
+ np.identity(4)):
79
+ coil_array = np.append(coil_array, np.expand_dims(coil_array[1, :, :, :], axis=0), axis=0)
80
+ elif np.allclose(coil_array[1, 0, :, :], np.identity(4)) and not np.allclose(coil_array[0, 0, :, :],
81
+ np.identity(4)):
82
+ coil_array = np.append(coil_array, np.expand_dims(coil_array[0, :, :, :], axis=0), axis=0)
83
+ else:
84
+ idx_invalid.append(i_stim)
85
+ if verbose:
86
+ print("Removing untracked (and possibly accidental) coil position #{} (identity matrix)".format(i_stim))
87
+ i_stim += 1
88
+ continue
89
+
90
+ # print(i_stim)
91
+ i_stim += 1
92
+
93
+ coils_array = np.append(coils_array, coil_array, axis=1)
94
+
95
+ # get timestamp
96
+ ts_tms_lst.append(int(coil0_tm.get('recordingTime')))
97
+
98
+ # get current
99
+ xml_rv = coil0_tm.find('ResponseValues')
100
+ xml_va = xml_rv.findall('Value')
101
+
102
+ # if valueA is NaN, compute dI/dt with amplitudeA
103
+ if xml_va[0].get('response') == 'NaN':
104
+ current_lst.append(str(round(float(xml_va[2].get('response')) * 1.4461))) # was 1.38
105
+ else:
106
+ current_lst.append(xml_va[0].get('response'))
107
+
108
+ return [coils_array, ts_tms_lst, current_lst, idx_invalid]
109
+
110
+
111
+ def read_coil(xml_ma):
112
+ """
113
+ Read coil-data from xml element.
114
+
115
+ Parameters
116
+ ----------
117
+ xml_ma : xml-element
118
+ Coil data.
119
+
120
+ Returns
121
+ -------
122
+ coil : np.ndarray of float
123
+ (4, 4) Coil elements.
124
+ """
125
+ # index2 for all coils from triggermarker
126
+ coil = np.empty([1, 1, 4, 4])
127
+ for coil_index1 in range(4):
128
+ for coil_index2 in range(4):
129
+ coil[0, 0, coil_index1, coil_index2] = (float(xml_ma.get('data' + str(coil_index1) + str(coil_index2))))
130
+ return coil
131
+
132
+
133
+ def match_instrument_marker_file(xml_paths, im_path):
134
+ """
135
+ Assign right instrument marker condition to every triggermarker (get instrument marker out of file).
136
+
137
+ Parameters
138
+ ----------
139
+ xml_paths : list of str
140
+ Paths to coil0-file and optionally coil1-file if there is no coil1-file, use empty string.
141
+ im_path : str
142
+ Path to instrument-marker-file.
143
+
144
+ Returns
145
+ -------
146
+ coil_cond_lst : list of str
147
+ Right conditions.
148
+ drop_idx : list of int
149
+ Indices of trigger markers that were dropped.
150
+ """
151
+ tm_array, tms_time_arr, tms_cur_arr, tms_idx_invalid = get_tms_elements(xml_paths)
152
+ # get coil mean value
153
+ tm_array = tm_array[2]
154
+ im_array, im_cond_lst = get_marker(im_path, markertype='InstrumentMarker')[:2]
155
+
156
+ # get indices of conditions
157
+ im_index_lst, drop_idx = match_tm_to_im(tm_array, im_array, tms_time_arr, tms_cur_arr)
158
+
159
+ # list to save conditions
160
+ coil_cond_lst = []
161
+
162
+ for cond_index in im_index_lst:
163
+ coil_cond_lst.append(im_cond_lst[cond_index])
164
+ return coil_cond_lst, drop_idx
165
+
166
+
167
+ def match_instrument_marker_string(xml_paths, condition_order):
168
+ """
169
+ Assign right instrument marker condition to every triggermarker (get instrument marker out of list of strings).
170
+
171
+ Parameters
172
+ ----------
173
+ xml_paths : list of str
174
+ Paths to coil0-file and optionally coil1-file if there is no coil1-file, use empty string.
175
+ condition_order : list of str
176
+ Conditions in the right order.
177
+
178
+ Returns
179
+ -------
180
+ coil_cond_lst : list of str
181
+ Strings containing the right conditions.
182
+ drop_idx : list of int
183
+ Indices of trigger markers that were dropped.
184
+ """
185
+ drop_idx = []
186
+ max_time_dif = 90000
187
+ max_mep_dif = 7
188
+
189
+ tm_pos_arr, tms_time_arr, tms_cur_arr, tms_idx_invalid = get_tms_elements(xml_paths)
190
+
191
+ # get coil mean value
192
+ tm_pos_arr = tm_pos_arr[2, :, :, :]
193
+
194
+ # list for condition results
195
+ conditions = []
196
+
197
+ # index of instrument marker
198
+ cond_idx = 0
199
+
200
+ # iterate over all trigger marker
201
+ for tm_index in range((tm_pos_arr.shape[0]) - 1):
202
+ conditions.append(condition_order[cond_idx])
203
+ if float(tms_cur_arr[tm_index]) == 0.:
204
+ drop_idx.append(tm_index)
205
+ tm_matrix_post = tm_pos_arr[tm_index + 1, :, :]
206
+ tm_matrix = tm_pos_arr[tm_index, :, :]
207
+
208
+ same_tm = arrays_similar(tm_matrix, tm_matrix_post)
209
+ time_dif = tms_time_arr[tm_index + 1] - tms_time_arr[tm_index] > max_time_dif
210
+ amp_dif = np.abs(float(tms_cur_arr[tm_index + 1]) - float(tms_cur_arr[tm_index])) > max_mep_dif
211
+ if not same_tm and time_dif and amp_dif:
212
+ arrays_similar(tm_matrix, tm_matrix_post)
213
+ cond_idx += 1
214
+ if cond_idx == len(condition_order):
215
+ raise ValueError("Too many coil conditions found!")
216
+
217
+ # assign last element to very last element
218
+ conditions.append(conditions[-1])
219
+ if cond_idx != len(condition_order) - 1:
220
+ raise ValueError("Did not find all coil positions!")
221
+
222
+ return conditions, drop_idx
223
+
224
+
225
+ def arrays_similar(tm_matrix, tm_matrix_post, # , tm_mean_last,
226
+ pos_rtol=0, pos_atol=3.6, ang_rtol=.1, ang_atol=.1):
227
+ """
228
+ Compares angles and position for similarity.
229
+
230
+ Splitting the comparison into angles and position is advisable, as the absolute tolerance (atol) should be
231
+ different for angles (degree) and position (millimeter) comparisons.
232
+
233
+ Parameters
234
+ ----------
235
+ tm_matrix : array-like
236
+ (4, 4) TMS Navigator trigger marker or instrument marker array.
237
+ tm_matrix_post : array-like
238
+ (4, 4) TMS Navigator trigger marker or instrument marker array.
239
+ pos_rtol : float, default: 0
240
+ Relative tolerance for position comparison.
241
+ pos_atol : float, default: 3.6
242
+ Absolute tolerance for position comparison in millimeters.
243
+ ang_rtol : float, default: 0.1
244
+ Relative tolerance for angle comparison in degrees.
245
+ ang_atol : float, default: 0.1
246
+ Absolute tolerance for angle comparison in degrees.
247
+
248
+ Returns
249
+ -------
250
+ next_same : bool
251
+ True if the position and angle differences between `tm_matrix` and `tm_matrix_post`
252
+ are within the specified tolerances, False otherwise.
253
+ """
254
+ # position
255
+ pos = np.allclose(tm_matrix[0:3, 3], tm_matrix_post[0:3, 3], rtol=pos_rtol, atol=pos_atol)
256
+
257
+ # angles
258
+ ang = np.allclose(tm_matrix[0:3, 0:2], tm_matrix_post[0:3, 0:2], rtol=ang_rtol, atol=ang_atol)
259
+
260
+ # if tm_mean_last is not None:
261
+ # last_pos = np.allclose(tm_matrix[0:3, 3], tm_mean_last[0:3, 3], rtol=pos_rtol, atol=pos_atol)
262
+ # last_ang = np.allclose(tm_matrix[0:3, 0:2], tm_mean_last[0:3, 0:2], rtol=ang_rtol, atol=ang_atol)
263
+
264
+ next_same = pos and ang
265
+ # last_same = last_pos and last_ang
266
+ return next_same
267
+
268
+
269
+ def match_tm_to_im(tm_array, im_array, tms_time_arr, tms_cur_arr):
270
+ """
271
+ Match triggermarker with instrumentmarker and get index of best fitting instrumentmarker.
272
+
273
+ Parameters
274
+ ----------
275
+ tm_array : np.ndarray of float
276
+ (N, 4, 4) Mean-value of Nx4x4 coil matrices.
277
+ im_array : np.ndarray of float
278
+ (M, 4, 4) Instrument-marker-matrices.
279
+ tms_time_arr : np.ndarray
280
+ Array of TMS times in seconds.
281
+ tms_cur_arr : np.ndarray
282
+ Array of TMS currents corresponding to ```tms_time_arr```.
283
+
284
+ Returns
285
+ -------
286
+ im_index_lst : list of int
287
+ Indices of best fitting instrument markers.
288
+ drop_idx : list of int
289
+ Indices of trigger markers that were dropped.
290
+ """
291
+ max_time_dif = (tms_time_arr[1] - tms_time_arr[0]) * 3
292
+ # max_mep_dif = 9
293
+
294
+ im_index_lst = []
295
+ drop_idx = []
296
+
297
+ for tm_index in range(tm_array.shape[0]):
298
+ # after first zap, check time diff
299
+ if tm_index > 0:
300
+ if tms_time_arr[tm_index] - tms_time_arr[tm_index - 1] < max_time_dif:
301
+ im_index_lst.append(im_index_lst[-1])
302
+ continue
303
+
304
+ allclose_index_lst = []
305
+ diff_small = []
306
+
307
+ atol_ori = 0.4
308
+ atol_pos = 3
309
+ repeat = False
310
+
311
+ # tm = tm_array[tm_index, :, :]
312
+
313
+ # proc_diffs = np.argmin[procrustes(tm, im_array[i])[2] for i in range(im_array.shape[0])]
314
+
315
+ # diffs = np.abs(tm - im_array)
316
+ # diffs[0:3, 0:3] /= np.max(diffs[:, 0:3, 0:4], axis=0)
317
+ # best_fit = np.argmin(np.array([np.sum(diffs[i]) for i in range(len(diffs))]))
318
+ # small_diff_ori = int(np.argmin(np.array([np.sum(diffs[i][0:3, 0:3]) for i in range(len(diffs))])))
319
+ # small_diff_pos = int(np.argmin(np.array([np.sum(diffs[i][0:3, 3]) for i in range(len(diffs))])))
320
+ # a = rot_to_quat(tm[:3,:3])[1:]
321
+ # b = [quaternion_diff(a, rot_to_quat(im_array[i, :3, :3])[1:]) for i in range(im_array.shape[0])]
322
+
323
+ while not allclose_index_lst:
324
+ if repeat:
325
+ print(('Warning: Trigger marker #{:0>4}: No matching instrument marker within '
326
+ 'atol_ori={} and atol_pos={}! Increasing tolerances by 0.1 and 0.5.'
327
+ .format(tm_index, atol_ori, atol_pos)))
328
+
329
+ atol_ori = atol_ori + 0.1
330
+ atol_pos = atol_pos + 0.5
331
+
332
+ for im_index in range(im_array.shape[0]):
333
+
334
+ # # check if arrays are close
335
+ # if np.allclose(tm_array[tm_index, :, :], im_array[im_index, :, :], rtol=rtol):
336
+ # allclose_index_lst.append(im_index)
337
+
338
+ # check if arrays are close
339
+ diff = np.abs(tm_array[tm_index, :, :] - im_array[im_index, :, :])
340
+
341
+ if (diff[0:3, 0:3] < atol_ori).all() and (diff[0:3, 3] < atol_pos).all():
342
+ diff_small.append(diff)
343
+ allclose_index_lst.append(im_index)
344
+
345
+ if not allclose_index_lst:
346
+ allclose_index_lst.append(-1)
347
+ # repeat = True
348
+
349
+ # if multiple arrays are close, choose value, with the smallest difference
350
+ if len(allclose_index_lst) > 1:
351
+ smallest_value_index = int(np.argmin(np.array([np.sum(diff_small[i]) for i in range(len(diff_small))])))
352
+ small_diff_ori = int(np.argmin(np.array([np.sum(diff_small[i][0:3, 0:3]) for i in range(len(diff_small))])))
353
+ small_diff_pos = int(np.argmin(np.array([np.sum(diff_small[i][0:3, 3]) for i in range(len(diff_small))])))
354
+ if not small_diff_ori == small_diff_pos:
355
+ # print allclose_index_lst
356
+ print("Warning: Triggermarker #{:0>4}: "
357
+ "Cannot decide for instrument marker , dropping this one. ".format(tm_index))
358
+ drop_idx.append(tm_index)
359
+ # im_index_lst.append(im_index_lst[-1])
360
+ # else:
361
+ # assert best_fit == allclose_index_lst[smallest_value_index]
362
+ im_index_lst.append(allclose_index_lst[smallest_value_index])
363
+
364
+ else:
365
+ # assert best_fit == allclose_index_lst[0]
366
+ im_index_lst.append(allclose_index_lst[0])
367
+
368
+ # # if multile arrays are close, choose value,
369
+ # where the difference to the instrument marker has the smallest
370
+ # # frobenius norm
371
+ # if len(allclose_index_lst) > 1:
372
+ # smallest_value_index = int(np.argmin(np.linalg.norm(im_array[allclose_index_lst, :, :] -
373
+ # tm_array[tm_index, :, :], axis=(1, 2))))
374
+ # im_index_lst.append(allclose_index_lst[smallest_value_index])
375
+ # else:
376
+ # im_index_lst.append(allclose_index_lst[0])
377
+
378
+ return im_index_lst, drop_idx
379
+
380
+
381
+ def get_marker(im_path, markertype):
382
+ """
383
+ Read instrument-marker and conditions from Neuronavigator .xml-file.
384
+
385
+ Coil axes definition as defined by Localite.
386
+
387
+ Parameters
388
+ ----------
389
+ im_path : str or list of str
390
+ Path to instrument-marker-file.
391
+ markertype : str
392
+ One of ['TriggerMarker','InstrumentMarker'].
393
+
394
+ Returns
395
+ -------
396
+ im_array : np.ndarray of float
397
+ (M, 4, 4) Instrument marker matrices.
398
+ marker_descriptions : list of str
399
+ Labels of the marker conditions.
400
+ marker_times : list of float
401
+ Onset times.
402
+ """
403
+ assert markertype in ['TriggerMarker', 'InstrumentMarker']
404
+ if isinstance(im_path, str):
405
+ return get_single_marker_file(im_path, markertype)
406
+
407
+ elif isinstance(im_path, list):
408
+ # if multiple triggermarker files are provided, pick a marker with data
409
+ matsimnibs, marker_descriptions, marker_times = [], [], []
410
+
411
+ # get data from all files
412
+ for im in im_path:
413
+ im_array_t, marker_descriptions_t, marker_times_t = get_single_marker_file(im, markertype)
414
+ matsimnibs.append(im_array_t)
415
+ marker_descriptions.append(marker_descriptions_t)
416
+ marker_times.append(marker_times_t)
417
+
418
+ # get indices for all files where markers are empty
419
+ # empty_arr = []
420
+ # for arr in im_array: # arr = im_array[0]
421
+ # empty_arr.append(markers_are_empty(arr))
422
+ # # assert np.all(np.sum(np.array(empty_arr).astype(int), axis=0) == 1)
423
+ #
424
+ # # go through the zaps and pick a marker with data.
425
+ # idx = []
426
+ # tmp = 0
427
+ # for i in range(len(im_array[0])):
428
+ # for j in range(len(im_array)):
429
+ # if not empty_arr[j][i]:
430
+ # tmp = j
431
+ # # if all are empty, just use the last value (is empty anyway)
432
+ # idx.append(tmp) # append
433
+ #
434
+ # # build marker idx based on idx
435
+ # final_arr = []
436
+ # for it, i in enumerate(idx):
437
+ # final_arr.append(im_array[i][it])
438
+ return np.array(matsimnibs), marker_descriptions, marker_times
439
+ else:
440
+ raise NotImplementedError(f"type {type(im_path)} not implemented.")
441
+
442
+
443
+ def markers_are_empty(arr):
444
+ return [marker_is_empty(arr[i, :, :]) for i in range(arr.shape[0])]
445
+
446
+
447
+ def marker_is_empty(arr):
448
+ return np.all(arr == np.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]))
449
+
450
+
451
+ def get_single_marker_file(im_path, markertype):
452
+ """
453
+ Read instrument-marker and conditions from Neuronavigator .xml-file.
454
+
455
+ Parameters
456
+ ----------
457
+ im_path : str or list of str
458
+ Path to instrument-marker-file.
459
+ markertype : str
460
+ One of ['TriggerMarker','InstrumentMarker'].
461
+
462
+ Returns
463
+ -------
464
+ im_array : np.ndarray of float
465
+ (M, 4, 4) Instrument marker matrices.
466
+ marker_descriptions : list of str
467
+ Labels of the marker conditions.
468
+ marker_times : list of float
469
+ Onset times.
470
+ """
471
+ im_array = np.empty([0, 4, 4])
472
+ marker_descriptions = []
473
+ marker_times = []
474
+ # parse XML document
475
+ im_tree = xmlt.parse(im_path)
476
+ im_root = im_tree.getroot()
477
+
478
+ # iterate over all 'InstrumentMarker' tags
479
+ for marker_i in im_root.iter(markertype):
480
+ marker_array = np.empty([1, 4, 4])
481
+ # get tag were the matrix is
482
+ if markertype == 'InstrumentMarker':
483
+ marker_object = marker_i.find('Marker')
484
+ marker_descriptions.append(marker_object.get('description'))
485
+ matrix4d = marker_object.find('Matrix4D')
486
+
487
+ elif markertype == 'TriggerMarker':
488
+ matrix4d = marker_i.find('Matrix4D')
489
+ marker_descriptions.append(marker_i.get('description'))
490
+ marker_times.append(marker_i.get('recordingTime'))
491
+
492
+ else:
493
+ raise ValueError(f"markertype '{markertype}' unknown.")
494
+
495
+ # get values
496
+ for im_index1 in range(4):
497
+ for im_index2 in range(4):
498
+ marker_array[0, im_index1, im_index2] = (float(matrix4d.get('data' + str(im_index1) + str(im_index2))))
499
+ im_array = np.append(im_array, marker_array, axis=0)
500
+
501
+ return im_array, marker_descriptions, marker_times
502
+
503
+
504
+ def read_triggermarker_localite(fn_xml):
505
+ warnings.warn("This function is deprecated. Use `read_triggermarker` instead.", DeprecationWarning)
506
+ return read_triggermarker(fn_xml)
507
+
508
+
509
+ def read_triggermarker(fn_xml):
510
+ """
511
+ Read instrument-marker and conditions from neuronavigator .xml-file.
512
+
513
+ Parameters
514
+ ----------
515
+ fn_xml : str
516
+ Path to TriggerMarker.xml file
517
+ (e.g. Subject_0/Sessions/Session_YYYYMMDDHHMMSS/TMSTrigger/TriggerMarkers_Coil1_YYYYMMDDHHMMSS.xml)
518
+
519
+ Returns
520
+ -------
521
+ list of
522
+
523
+ m_nnav : np.ndarray of float
524
+ (4, 4, N) Neuronavigation coordinates of N stimuli (4x4 matrices).
525
+ didt : np.ndarray of float
526
+ (N) Rate of change of coil current in (A/us).
527
+ stim_int : np.ndarray of float
528
+ (N) Stimulator intensity in (% MSO).
529
+ descr : list of str
530
+ (N) Labels of the instrument-marker-conditions.
531
+ rec_time : list of str
532
+ (N) Recording time in ms.
533
+ """
534
+ m_nnav = np.empty([4, 4, 0])
535
+ descr = []
536
+ stim_int = []
537
+ didt = []
538
+ rec_time = []
539
+
540
+ # parse XML document
541
+ im_tree = xmlt.parse(fn_xml)
542
+ im_root = im_tree.getroot()
543
+
544
+ # iterate over all 'InstrumentMarker' tags
545
+ for im_iter in im_root.iter('TriggerMarker'):
546
+ m = np.empty([4, 4, 1])
547
+
548
+ # read description
549
+ descr.append(im_iter.get('description'))
550
+ rec_time.append(im_iter.get('recordingTime'))
551
+
552
+ # read di/dt and stimulator intensity
553
+ im_rv = im_iter.find('ResponseValues').findall('Value')
554
+
555
+ for _im_rv in im_rv:
556
+ # di/dt
557
+ if _im_rv.get('key') == "valueA":
558
+ didt.append(float(_im_rv.get('response')))
559
+ # stimulator intensity
560
+ elif _im_rv.get('key') == "amplitudeA":
561
+ stim_int.append(float(_im_rv.get('response')))
562
+
563
+ # read matrix
564
+ im_ma = im_iter.find('Matrix4D')
565
+
566
+ for im_index1 in range(4):
567
+ for im_index2 in range(4):
568
+ m[im_index1, im_index2, 0] = (float(im_ma.get('data' + str(im_index1) + str(im_index2))))
569
+
570
+ # check if coil position is untracked
571
+ np.eye(4)
572
+ if (m[:, :, 0] == np.eye(4)).all():
573
+ print(f"Untracked coil position found for idx {m_nnav.shape[2]}.")
574
+ m[:3, :4, 0] = np.nan
575
+
576
+ m_nnav = np.append(m_nnav, m, axis=2)
577
+
578
+ didt = np.array(didt)
579
+ stim_int = np.array(stim_int)
580
+
581
+ return [m_nnav, didt, stim_int, descr, rec_time]
582
+
583
+
584
+ def merge_exp_data_localite(subject, coil_outlier_corr_cond, remove_coil_skin_distance_outlier, coil_distance_corr,
585
+ exp_idx=0, mesh_idx=0, drop_mep_idx=None, mep_onsets=None, cfs_data_column=None,
586
+ channels=None, verbose=False, plot=False, start_mep=18, end_mep=35):
587
+ """
588
+ Merge the TMS coil positions (TriggerMarker) and the mep data into an experiment.hdf5 file.
589
+
590
+ Parameters
591
+ ----------
592
+ subject : pynibs.subject.Subject
593
+ Subject object.
594
+ coil_outlier_corr_cond : bool
595
+ Correct outlier of coil position and orientation (+-2 mm, +-3 deg) in case of conditions.
596
+ remove_coil_skin_distance_outlier : bool
597
+ Remove outlier of coil position lying too far away from the skin surface (+- 5 mm).
598
+ coil_distance_corr : bool
599
+ Perform coil <-> head distance correction (coil is moved towards head surface until coil touches scalp).
600
+ exp_idx : str, default: 0
601
+ Experiment ID.
602
+ mesh_idx : str, default: 0
603
+ Mesh ID.
604
+ drop_mep_idx : List of int, optional
605
+ Which MEPs to remove before matching.
606
+ mep_onsets : List of int, optional
607
+ If there are multiple .cfs per TMS Navigator sessions, onsets in [ms] of .cfs. E.g.: ``[0, 71186]``.
608
+ cfs_data_column : int or list of int, optional
609
+ Column(s) of dataset in .cfs file.
610
+ channels : list of str, optional
611
+ List of channel names.
612
+ verbose : bool, default: false
613
+ Flag to indicate verbosity.
614
+ plot : bool, default: False
615
+ Plot MEPs and p2p evaluation.
616
+ start_mep : float, default: 18
617
+ Start of time frame after TMS pulse where p2p value is evaluated (in ms).
618
+ end_mep : float, default: 35
619
+ End of time frame after TMS pulse where p2p value is evaluated (in ms).
620
+ """
621
+ if channels is None:
622
+ channels = ["channel_0"]
623
+ mep_paths_lst = subject.exp[exp_idx]['fn_data']
624
+ tms_paths_lst = subject.exp[exp_idx]['fn_tms_nav']
625
+ im_lst = subject.exp[exp_idx]['cond']
626
+ nii_exp_path_lst = subject.exp[exp_idx]['fn_mri_nii']
627
+ nii_conform_path = os.path.join(subject.mesh[mesh_idx]["mesh_folder"], subject.mesh[mesh_idx]["fn_mri_conform"])
628
+ fn_exp_hdf5 = subject.exp[exp_idx]['fn_exp_hdf5'][0]
629
+ fn_coil = subject.exp[exp_idx]['fn_coil']
630
+ fn_mesh_hdf5 = subject.mesh[mesh_idx]['fn_mesh_hdf5']
631
+ temp_dir = os.path.join(os.path.split(subject.exp[exp_idx]['fn_exp_hdf5'][0])[0], "nnav2simnibs")
632
+ subject_obj = subject
633
+
634
+ # allocate dict
635
+ dict_lst = []
636
+
637
+ # handle instrument marker
638
+ if len(im_lst) < len(tms_paths_lst):
639
+ for _ in range(len(tms_paths_lst)):
640
+ im_lst.append(im_lst[0])
641
+
642
+ # handle coil serial numbers
643
+ coil_sn_lst = pynibs.get_coil_sn_lst(fn_coil)
644
+
645
+ # get TMS pulse onset
646
+ tms_pulse_time = subject.exp[exp_idx]['tms_pulse_time']
647
+
648
+ # iterate over all files
649
+ if mep_onsets is None:
650
+ mep_onsets = [None] * len(mep_paths_lst)
651
+
652
+ len_conds = []
653
+
654
+ for cfs_paths, tms_paths, coil_sn, nii_exp_path, im, mep_onsets \
655
+ in zip(mep_paths_lst, tms_paths_lst, coil_sn_lst, nii_exp_path_lst, im_lst, mep_onsets):
656
+ dict_lst.extend(combine_nnav_mep(xml_paths=tms_paths,
657
+ cfs_paths=cfs_paths,
658
+ im=im,
659
+ coil_sn=coil_sn,
660
+ nii_exp_path=nii_exp_path,
661
+ nii_conform_path=nii_conform_path,
662
+ patient_id=subject.id,
663
+ tms_pulse_time=tms_pulse_time,
664
+ drop_mep_idx=drop_mep_idx,
665
+ mep_onsets=mep_onsets,
666
+ cfs_data_column=cfs_data_column,
667
+ temp_dir=temp_dir,
668
+ channels=channels,
669
+ nnav_system=subject_obj.exp[exp_idx]["nnav_system"],
670
+ mesh_approach=subject_obj.mesh[mesh_idx]["approach"],
671
+ plot=plot,
672
+ start_mep=start_mep,
673
+ end_mep=end_mep))
674
+
675
+ if len(len_conds) == 0:
676
+ len_conds.append(len(dict_lst))
677
+ else:
678
+ len_conds.append(len(dict_lst) - len_conds[-1])
679
+
680
+ # convert list of dict to dict of list
681
+ results_dct = pynibs.list2dict(dict_lst)
682
+
683
+ # check if we have a single pulse TMS experiments where every pulse is one condition
684
+ single_pulse_experiment = np.zeros(len(len_conds))
685
+
686
+ start = 0
687
+ stop = len_conds[0]
688
+ for i in range(len(len_conds)):
689
+ if len(np.unique(np.array(results_dct["condition"])[start:stop])) == len_conds[i]:
690
+ single_pulse_experiment[i] = True
691
+
692
+ if i < (len(len_conds) - 1):
693
+ start = stop
694
+ stop = stop + len_conds[i + 1]
695
+
696
+ # redefine condition vector because in case of multiple .cfs files and .xml files the conditions may double
697
+ if single_pulse_experiment.all():
698
+ results_dct["condition"] = np.arange(len(dict_lst))
699
+
700
+ # reformat coil positions to 4x4 matrices
701
+ coil_0 = np.zeros((4, 4, len(dict_lst)))
702
+ coil_1 = np.zeros((4, 4, len(dict_lst)))
703
+ coil_mean = np.zeros((4, 4, len(dict_lst)))
704
+
705
+ # coil_0[3, 3, :] = 1
706
+ # coil_1[3, 3, :] = 1
707
+ # coil_mean[3, 3, :] = 1
708
+
709
+ for m in range(4):
710
+ for n in range(4):
711
+ coil_0[m, n, :] = results_dct[f"coil0_{m}{n}"]
712
+ coil_1[m, n, :] = results_dct[f"coil1_{m}{n}"]
713
+ coil_mean[m, n, :] = results_dct[f"coil_mean_{m}{n}"]
714
+
715
+ results_dct.pop(f"coil0_{m}{n}")
716
+ results_dct.pop(f"coil1_{m}{n}")
717
+ results_dct.pop(f"coil_mean_{m}{n}")
718
+
719
+ coil_0 = np.split(coil_0, coil_0.shape[2], axis=2)
720
+ coil_1 = np.split(coil_1, coil_1.shape[2], axis=2)
721
+ coil_mean = np.split(coil_mean, coil_mean.shape[2], axis=2)
722
+
723
+ coil_0 = [c.reshape((4, 4)) for c in coil_0]
724
+ coil_1 = [c.reshape((4, 4)) for c in coil_1]
725
+ coil_mean = [c.reshape((4, 4)) for c in coil_mean]
726
+
727
+ results_dct["coil_0"] = coil_0
728
+ results_dct["coil_1"] = coil_1
729
+ results_dct["coil_mean"] = coil_mean
730
+
731
+ results_dct["current"] = [float(c) for c in results_dct["current"]]
732
+
733
+ # coil outlier correction
734
+ if subject_obj.exp[exp_idx]["fn_exp_hdf5"] is not None or subject_obj.exp[exp_idx]["fn_exp_hdf5"] != []:
735
+ fn_exp_hdf5 = subject_obj.exp[exp_idx]["fn_exp_hdf5"][0]
736
+
737
+ elif subject_obj.exp[exp_idx]["fn_exp_csv"] is not None or subject_obj.exp[exp_idx]["fn_exp_csv"] != []:
738
+ fn_exp_hdf5 = subject_obj.exp[exp_idx]["fn_exp_csv"][0]
739
+
740
+ elif fn_exp_hdf5 is None or fn_exp_hdf5 == []:
741
+ fn_exp_hdf5 = os.path.join(subject_obj.subject_folder, "exp", exp_idx, "experiment.hdf5")
742
+
743
+ # remove coil position outliers (in case of conditions)
744
+ #######################################################
745
+ if coil_outlier_corr_cond:
746
+ print("Removing coil position outliers")
747
+ results_dct = pynibs.coil_outlier_correction_cond(exp=results_dct,
748
+ outlier_angle=5.,
749
+ outlier_loc=3.,
750
+ fn_exp_out=fn_exp_hdf5)
751
+
752
+ # perform coil <-> head distance correction
753
+ ###########################################
754
+ if coil_distance_corr:
755
+ print("Performing coil <-> head distance correction")
756
+ results_dct = pynibs.coil_distance_correction(exp=results_dct,
757
+ fn_geo_hdf5=fn_mesh_hdf5,
758
+ remove_coil_skin_distance_outlier=remove_coil_skin_distance_outlier,
759
+ fn_plot=os.path.split(fn_exp_hdf5)[0])
760
+
761
+ # plot finally used mep data
762
+ ############################
763
+ if plot:
764
+ print("Creating MEP plots ...")
765
+ sampling_rate = pynibs.get_mep_sampling_rate(cfs_paths[0])
766
+
767
+ # Compute start and stop idx according to sampling rate
768
+ start_mep = int((20 / 1000.) * sampling_rate)
769
+ end_mep = int((35 / 1000.) * sampling_rate)
770
+
771
+ # compute tms pulse idx in samplerate space
772
+ tms_pulse_sample = int(tms_pulse_time * sampling_rate)
773
+
774
+ for i_mep in tqdm(range(len(results_dct["mep_raw_data"]))):
775
+ for i_channel, channel in enumerate(channels):
776
+ # TODO: merge this code with calc_p2p
777
+ sweep = results_dct["mep_raw_data"][i_mep][i_channel, :]
778
+ sweep_filt = results_dct["mep_filt_data"][i_mep][i_channel, :]
779
+
780
+ # get index for begin of mep search window
781
+ # index_max_begin = np.argmin(sweep) + start_mep # get TMS impulse # int(0.221 / 0.4 * sweep.size)
782
+ # beginning of mep search window
783
+ srch_win_start = tms_pulse_sample + start_mep # get TMS impulse # in
784
+
785
+ if srch_win_start >= sweep_filt.size:
786
+ srch_win_start = sweep_filt.size - 1
787
+
788
+ # index_max_end = sweep_filt.size # int(0.234 / 0.4 * sweep.size) + 1
789
+ srch_win_end = srch_win_start + end_mep - start_mep
790
+
791
+ fn_channel = os.path.join(os.path.split(cfs_paths[0])[0], "plots", channel)
792
+
793
+ if not os.path.exists(fn_channel):
794
+ os.makedirs(fn_channel)
795
+
796
+ fn_plot = os.path.join(fn_channel, f"mep_{i_mep:04}")
797
+ t = np.arange(len(sweep)) / sampling_rate
798
+ sweep_min_idx = np.argmin(sweep_filt[srch_win_start:srch_win_end]) + srch_win_start
799
+ sweep_max_idx = np.argmax(sweep_filt[srch_win_start:srch_win_end]) + srch_win_start
800
+
801
+ plt.figure(figsize=[4.07, 3.52])
802
+ plt.plot(t, sweep)
803
+ plt.plot(t, sweep_filt)
804
+ plt.scatter(t[sweep_min_idx], sweep_filt[sweep_min_idx], 15, color="r")
805
+ plt.scatter(t[sweep_max_idx], sweep_filt[sweep_max_idx], 15, color="r")
806
+ plt.plot(t, np.ones(len(t)) * sweep_filt[sweep_min_idx], linestyle="--", color="r", linewidth=1)
807
+ plt.plot(t, np.ones(len(t)) * sweep_filt[sweep_max_idx], linestyle="--", color="r", linewidth=1)
808
+ plt.grid()
809
+ plt.legend(["raw", "filtered", "p2p"], loc='upper right')
810
+
811
+ plt.xlim([np.max((tms_pulse_time - 0.01, np.min(t))),
812
+ np.min((t[tms_pulse_sample + end_mep] + 0.1, np.max(t)))])
813
+ plt.ylim([-1.1 * np.abs(sweep_filt[sweep_min_idx]), 1.1 * np.abs(sweep_filt[sweep_max_idx])])
814
+
815
+ plt.xlabel("time in s", fontsize=11)
816
+ plt.ylabel("MEP in mV", fontsize=11)
817
+ plt.tight_layout()
818
+
819
+ plt.savefig(fn_plot, dpi=200, transparent=True)
820
+ plt.close()
821
+
822
+ # Write experimental data to hdf5
823
+ ###############################################
824
+ # stimulation data
825
+ df_stim_data = pd.DataFrame.from_dict(results_dct)
826
+ df_stim_data = df_stim_data.drop(columns=["mep"])
827
+ df_stim_data = df_stim_data.drop(columns=["mep_raw_data_time"])
828
+ df_stim_data = df_stim_data.drop(columns=["mep_filt_data"])
829
+ df_stim_data = df_stim_data.drop(columns=["mep_raw_data"])
830
+
831
+ # raw data
832
+ phys_data_raw_emg = {"time": results_dct["mep_raw_data_time"]}
833
+
834
+ for chan_idx, chan in enumerate(channels):
835
+ results_dct[f"mep_raw_data_{chan}"] = [sweep[chan_idx, :] for sweep in results_dct["mep_raw_data"]]
836
+ phys_data_raw_emg[f"mep_raw_data_{chan}"] = results_dct[f"mep_raw_data_{chan}"]
837
+
838
+ df_phys_data_raw_emg = pd.DataFrame.from_dict(phys_data_raw_emg)
839
+
840
+ # post-processed data
841
+ phys_data_postproc_emg = {"time": results_dct["mep_raw_data_time"]}
842
+
843
+ for chan_idx, chan in enumerate(channels):
844
+ phys_data_postproc_emg[f"mep_filt_data_{chan}"] = [sweep[chan_idx, :] for sweep in results_dct["mep_filt_data"]]
845
+ phys_data_postproc_emg[f"p2p_{chan}"] = [mep[chan_idx] for mep in results_dct["mep"]]
846
+ phys_data_postproc_emg[f"mep_latency_{chan}"] = [lat[chan_idx] for lat in results_dct["mep_latency"]]
847
+
848
+ df_phys_data_postproc_emg = pd.DataFrame.from_dict(phys_data_postproc_emg)
849
+
850
+ # save in .hdf5 file
851
+ df_stim_data.to_hdf(fn_exp_hdf5, "stim_data")
852
+ # df_stim_data[['coil_mean']].to_hdf(fn_exp_hdf5, "stim_data")
853
+ # [print(type(df_stim_data[['coil_mean']].iloc[0].values)) for i in range(df_stim_data.shape[0])]
854
+ # df_stim_data.columns
855
+ df_phys_data_postproc_emg.to_hdf(fn_exp_hdf5, "phys_data/postproc/EMG")
856
+ df_phys_data_raw_emg.to_hdf(fn_exp_hdf5, "phys_data/raw/EMG")
857
+
858
+ with h5py.File(fn_exp_hdf5, "a") as f:
859
+ f.create_dataset(name="stim_data/info/channels", data=np.array(channels).astype("|S"))
860
+
861
+
862
+ def merge_exp_data_rt(subject, coil_outlier_corr_cond, remove_coil_skin_distance_outlier, coil_distance_corr,
863
+ cond=None, exp_idx=0, mesh_idx=0, drop_trial_idx=None, verbose=False, plot=False):
864
+ """
865
+ Merge the TMS coil positions (TriggerMarker) and the mep data into an experiment.hdf5 file.
866
+
867
+ Parameters
868
+ ----------
869
+ subject : pynibs.subject.Subject
870
+ Subject object.
871
+ coil_outlier_corr_cond : bool
872
+ Correct outlier of coil position and orientation (+-2 mm, +-3 deg) in case of conditions.
873
+ remove_coil_skin_distance_outlier : bool
874
+ Remove outlier of coil position lying too far away from the skin surface (+- 5 mm).
875
+ coil_distance_corr : bool
876
+ Perform coil <-> head distance correction (coil is moved towards head surface until coil touches scalp).
877
+ cond : string, optional
878
+ Which condition to analyse.
879
+ exp_idx : str, default: 0
880
+ Experiment ID.
881
+ mesh_idx : str, default: 0
882
+ Mesh ID.
883
+ drop_trial_idx : List of int, optional
884
+ Which MEPs to remove before matching.
885
+ verbose : bool, default: False
886
+ Flag indicating verbosity.
887
+ plot : bool, default: False
888
+ Plot RTs and a running average over 10 trials.
889
+ """
890
+ behavior_paths_lst = subject.exp[exp_idx]['fn_data']
891
+ tms_paths_lst = subject.exp[exp_idx]['fn_tms_nav']
892
+ im_lst = subject.exp[exp_idx]['cond']
893
+ nii_exp_path_lst = subject.exp[exp_idx]['fn_mri_nii']
894
+ nii_conform_path = os.path.join(subject.mesh[mesh_idx]["mesh_folder"], subject.mesh[mesh_idx]["fn_mri_conform"])
895
+ fn_exp_hdf5 = subject.exp[exp_idx]['fn_exp_hdf5'][0]
896
+ fn_coil = subject.exp[exp_idx]['fn_coil']
897
+ fn_mesh_hdf5 = subject.mesh[mesh_idx]['fn_mesh_hdf5']
898
+ temp_dir = os.path.join(os.path.split(subject.exp[exp_idx]['fn_exp_hdf5'][0])[0],
899
+ "nnav2simnibs",
900
+ f"mesh_{mesh_idx}")
901
+ subject_obj = subject
902
+
903
+ # allocate dict
904
+ dict_lst = []
905
+
906
+ # handle instrument marker
907
+ if len(im_lst) < len(tms_paths_lst):
908
+ for _ in range(len(tms_paths_lst)):
909
+ im_lst.append(im_lst[0])
910
+
911
+ # handle coil serial numbers
912
+ coil_sn_lst = pynibs.get_coil_sn_lst(fn_coil)
913
+
914
+ # get TMS pulse onset
915
+ tms_pulse_time = subject.exp[exp_idx]['tms_pulse_time']
916
+
917
+ len_conds = []
918
+
919
+ for behavior_paths, tms_paths, coil_sn, nii_exp_path, im \
920
+ in zip(behavior_paths_lst, tms_paths_lst, coil_sn_lst, nii_exp_path_lst, im_lst):
921
+ dict_lst.extend(combine_nnav_rt(xml_paths=tms_paths,
922
+ behavior_paths=behavior_paths,
923
+ im=im,
924
+ coil_sn=coil_sn,
925
+ nii_exp_path=nii_exp_path,
926
+ nii_conform_path=nii_conform_path,
927
+ patient_id=subject.id,
928
+ drop_trial_idx=drop_trial_idx,
929
+ temp_dir=temp_dir,
930
+ cond=cond,
931
+ nnav_system=subject_obj.exp[exp_idx]["nnav_system"],
932
+ mesh_approach=subject_obj.mesh[mesh_idx]["approach"],
933
+ plot=plot))
934
+
935
+ if len(len_conds) == 0:
936
+ len_conds.append(len(dict_lst))
937
+ else:
938
+ len_conds.append(len(dict_lst) - len_conds[-1])
939
+
940
+ # convert list of dict to dict of list
941
+ results_dct = pynibs.list2dict(dict_lst)
942
+
943
+ # check if we have a single pulse TMS experiments where every pulse is one condition
944
+ single_pulse_experiment = np.zeros(len(len_conds))
945
+
946
+ results_dct["condition"] = np.arange(len(dict_lst))
947
+
948
+ # reformat coil positions to 4x4 matrices
949
+ coil_0 = np.zeros((4, 4, len(dict_lst)))
950
+ coil_1 = np.zeros((4, 4, len(dict_lst)))
951
+ coil_mean = np.zeros((4, 4, len(dict_lst)))
952
+
953
+ # coil_0[3, 3, :] = 1
954
+ # coil_1[3, 3, :] = 1
955
+ # coil_mean[3, 3, :] = 1
956
+
957
+ for m in range(4):
958
+ for n in range(4):
959
+ coil_0[m, n, :] = results_dct[f"coil0_{m}{n}"]
960
+ coil_1[m, n, :] = results_dct[f"coil1_{m}{n}"]
961
+ coil_mean[m, n, :] = results_dct[f"coil_mean_{m}{n}"]
962
+
963
+ results_dct.pop(f"coil0_{m}{n}")
964
+ results_dct.pop(f"coil1_{m}{n}")
965
+ results_dct.pop(f"coil_mean_{m}{n}")
966
+
967
+ coil_0 = np.split(coil_0, coil_0.shape[2], axis=2)
968
+ coil_1 = np.split(coil_1, coil_1.shape[2], axis=2)
969
+ coil_mean = np.split(coil_mean, coil_mean.shape[2], axis=2)
970
+
971
+ coil_0 = [c.reshape((4, 4)) for c in coil_0]
972
+ coil_1 = [c.reshape((4, 4)) for c in coil_1]
973
+ coil_mean = [c.reshape((4, 4)) for c in coil_mean]
974
+
975
+ results_dct["coil_0"] = coil_0
976
+ results_dct["coil_1"] = coil_1
977
+ results_dct["coil_mean"] = coil_mean
978
+
979
+ results_dct["current"] = [float(c) for c in results_dct["current"]]
980
+
981
+ # coil outlier correction
982
+ if subject_obj.exp[exp_idx]["fn_exp_hdf5"] is not None or subject_obj.exp[exp_idx]["fn_exp_hdf5"] != []:
983
+ fn_exp_hdf5 = subject_obj.exp[exp_idx]["fn_exp_hdf5"][0]
984
+
985
+ elif subject_obj.exp[exp_idx]["fn_exp_csv"] is not None or subject_obj.exp[exp_idx]["fn_exp_csv"] != []:
986
+ fn_exp_hdf5 = subject_obj.exp[exp_idx]["fn_exp_csv"][0]
987
+
988
+ elif fn_exp_hdf5 is None or fn_exp_hdf5 == []:
989
+ fn_exp_hdf5 = os.path.join(subject_obj.subject_folder, "exp", exp_idx, "experiment.hdf5")
990
+
991
+ # remove coil position outliers (in case of conditions)
992
+ #######################################################
993
+ if coil_outlier_corr_cond:
994
+ print("Removing coil position outliers")
995
+ results_dct = pynibs.coil_outlier_correction_cond(exp=results_dct,
996
+ outlier_angle=5.,
997
+ outlier_loc=3.,
998
+ fn_exp_out=fn_exp_hdf5)
999
+
1000
+ # perform coil <-> head distance correction
1001
+ ###########################################
1002
+ if coil_distance_corr:
1003
+ print("Performing coil <-> head distance correction")
1004
+ results_dct = pynibs.coil_distance_correction(exp=results_dct,
1005
+ fn_geo_hdf5=fn_mesh_hdf5,
1006
+ remove_coil_skin_distance_outlier=remove_coil_skin_distance_outlier,
1007
+ fn_plot=os.path.split(fn_exp_hdf5)[0])
1008
+
1009
+ # plot finally used rt data
1010
+ ############################
1011
+ if plot:
1012
+ # assign data
1013
+ x = results_dct['number']
1014
+ y = results_dct['rt']
1015
+
1016
+ # calculate running average over 10 trials
1017
+ avg_window = 10
1018
+ average_y = []
1019
+ for ind in range(len(y) - avg_window + 1):
1020
+ average_y.append(np.mean(y[ind:ind + avg_window]))
1021
+ # insert NaNs to match lengths
1022
+ for ind in range(avg_window - 1):
1023
+ average_y.insert(0, np.nan)
1024
+
1025
+ # create plot
1026
+ plt.close()
1027
+ plt.figure(num='RT-plot for subject ' + subject.id)
1028
+ plt.scatter(x, y, s=10, label='reaction times in ms')
1029
+ plt.plot(x, average_y, color='blue', linestyle='-', label='running average')
1030
+ plt.xlabel('trial number')
1031
+ plt.legend()
1032
+ plt.title('development of the reaction times over the course of the trial')
1033
+ fn_rt_plot = os.path.join(subject_obj.subject_folder, "exp", "model_TMS", "plot_RT.png")
1034
+ plt.savefig(fn_rt_plot, dpi=600)
1035
+ # plt.show()
1036
+ plt.close()
1037
+
1038
+ # Write experimental data to hdf5
1039
+ ###############################################
1040
+ # stimulation data
1041
+ stim_dict = {
1042
+ 'date': results_dct['date'],
1043
+ 'coil_sn': results_dct['coil_sn'],
1044
+ 'patient_id': results_dct['patient_id'],
1045
+ 'coil_0': results_dct['coil_0'],
1046
+ 'coil_1': results_dct['coil_1'],
1047
+ 'coil_mean': results_dct['coil_mean'],
1048
+ 'current': results_dct['current'],
1049
+ 'time_tms': results_dct['ts_tms'],
1050
+ }
1051
+ df_stim_data = pd.DataFrame.from_dict(stim_dict)
1052
+
1053
+ behave_dict = {
1054
+ 'number': results_dct['number'],
1055
+ 'rt': results_dct['rt'],
1056
+ 'time_trial': results_dct['time_trial']
1057
+ }
1058
+ df_behave_data = pd.DataFrame.from_dict(behave_dict)
1059
+
1060
+ # save in .hdf5 file
1061
+ df_stim_data.to_hdf(fn_exp_hdf5, "stim_data")
1062
+ df_behave_data.to_hdf(fn_exp_hdf5, "behavioral_data")
1063
+
1064
+
1065
+ def merge_exp_data_ft(subject, coil_outlier_corr_cond, remove_coil_skin_distance_outlier, coil_distance_corr,
1066
+ cond, exp_idx=0, mesh_idx=0, drop_trial_idx=None, verbose=False, plot=False):
1067
+ """
1068
+ Merge the TMS coil positions (TriggerMarker) and the mep data into an experiment.hdf5 file.
1069
+
1070
+ Parameters
1071
+ ----------
1072
+ subject : pynibs.subject.Subject
1073
+ Subject object.
1074
+ exp_idx : str
1075
+ Experiment ID.
1076
+ mesh_idx : str
1077
+ Mesh ID.
1078
+ coil_outlier_corr_cond : bool
1079
+ Correct outlier of coil position and orientation (+-2 mm, +-3 deg) in case of conditions.
1080
+ cond : string
1081
+ Which behavioral measurement to analyse.
1082
+ remove_coil_skin_distance_outlier : bool
1083
+ Remove outlier of coil position lying too far away from the skin surface (+- 5 mm).
1084
+ coil_distance_corr : bool
1085
+ Perform coil <-> head distance correction (coil is moved towards head surface until coil touches scalp).
1086
+ drop_trial_idx : List of int or None
1087
+ Which MEPs to remove before matching.
1088
+ verbose : bool
1089
+ Plot output messages.
1090
+ plot : bool, optional, default: False
1091
+ Plot RTs and a running average over 10 trials.
1092
+ """
1093
+ behavior_paths_lst = subject.exp[exp_idx]['fn_data']
1094
+ tms_paths_lst = subject.exp[exp_idx]['fn_tms_nav']
1095
+ im_lst = subject.exp[exp_idx]['cond']
1096
+ nii_exp_path_lst = subject.exp[exp_idx]['fn_mri_nii']
1097
+ nii_conform_path = os.path.join(subject.mesh[mesh_idx]["mesh_folder"], subject.mesh[mesh_idx]["fn_mri_conform"])
1098
+ fn_exp_hdf5 = subject.exp[exp_idx]['fn_exp_hdf5'][0]
1099
+ fn_coil = subject.exp[exp_idx]['fn_coil']
1100
+ fn_mesh_hdf5 = subject.mesh[mesh_idx]['fn_mesh_hdf5']
1101
+ temp_dir = os.path.join(os.path.split(subject.exp[exp_idx]['fn_exp_hdf5'][0])[0],
1102
+ "nnav2simnibs",
1103
+ f"mesh_{mesh_idx}")
1104
+ subject_obj = subject
1105
+
1106
+ # allocate dict
1107
+ dict_lst = []
1108
+
1109
+ # handle instrument marker
1110
+ if len(im_lst) < len(tms_paths_lst):
1111
+ for _ in range(len(tms_paths_lst)):
1112
+ im_lst.append(im_lst[0])
1113
+
1114
+ # handle coil serial numbers
1115
+ coil_sn_lst = pynibs.get_coil_sn_lst(fn_coil)
1116
+
1117
+ # get TMS pulse onset
1118
+ tms_pulse_time = subject.exp[exp_idx]['tms_pulse_time']
1119
+
1120
+ len_conds = []
1121
+
1122
+ for behavior_paths, tms_paths, coil_sn, nii_exp_path, im \
1123
+ in zip(behavior_paths_lst, tms_paths_lst, coil_sn_lst, nii_exp_path_lst, im_lst):
1124
+ dict_lst.extend(combine_nnav_ft(xml_paths=tms_paths,
1125
+ behavior_paths=behavior_paths,
1126
+ im=im,
1127
+ coil_sn=coil_sn,
1128
+ nii_exp_path=nii_exp_path,
1129
+ nii_conform_path=nii_conform_path,
1130
+ patient_id=subject.id,
1131
+ drop_trial_idx=drop_trial_idx,
1132
+ temp_dir=temp_dir,
1133
+ cond=cond,
1134
+ nnav_system=subject_obj.exp[exp_idx]["nnav_system"],
1135
+ mesh_approach=subject_obj.mesh[mesh_idx]["approach"],
1136
+ plot=plot))
1137
+
1138
+ if len(len_conds) == 0:
1139
+ len_conds.append(len(dict_lst))
1140
+ else:
1141
+ len_conds.append(len(dict_lst) - len_conds[-1])
1142
+
1143
+ # convert list of dict to dict of list
1144
+ results_dct = pynibs.list2dict(dict_lst)
1145
+
1146
+ # check if we have a single pulse TMS experiments where every pulse is one condition
1147
+ single_pulse_experiment = np.zeros(len(len_conds))
1148
+
1149
+ results_dct["condition"] = np.arange(len(dict_lst))
1150
+
1151
+ # reformat coil positions to 4x4 matrices
1152
+ coil_0 = np.zeros((4, 4, len(dict_lst)))
1153
+ coil_1 = np.zeros((4, 4, len(dict_lst)))
1154
+ coil_mean = np.zeros((4, 4, len(dict_lst)))
1155
+
1156
+ # coil_0[3, 3, :] = 1
1157
+ # coil_1[3, 3, :] = 1
1158
+ # coil_mean[3, 3, :] = 1
1159
+
1160
+ for m in range(4):
1161
+ for n in range(4):
1162
+ coil_0[m, n, :] = results_dct[f"coil0_{m}{n}"]
1163
+ coil_1[m, n, :] = results_dct[f"coil1_{m}{n}"]
1164
+ coil_mean[m, n, :] = results_dct[f"coil_mean_{m}{n}"]
1165
+
1166
+ results_dct.pop(f"coil0_{m}{n}")
1167
+ results_dct.pop(f"coil1_{m}{n}")
1168
+ results_dct.pop(f"coil_mean_{m}{n}")
1169
+
1170
+ coil_0 = np.split(coil_0, coil_0.shape[2], axis=2)
1171
+ coil_1 = np.split(coil_1, coil_1.shape[2], axis=2)
1172
+ coil_mean = np.split(coil_mean, coil_mean.shape[2], axis=2)
1173
+
1174
+ coil_0 = [c.reshape((4, 4)) for c in coil_0]
1175
+ coil_1 = [c.reshape((4, 4)) for c in coil_1]
1176
+ coil_mean = [c.reshape((4, 4)) for c in coil_mean]
1177
+
1178
+ results_dct["coil_0"] = coil_0
1179
+ results_dct["coil_1"] = coil_1
1180
+ results_dct["coil_mean"] = coil_mean
1181
+
1182
+ results_dct["current"] = [float(c) for c in results_dct["current"]]
1183
+
1184
+ # create dir
1185
+ path_exp_hdf5 = os.path.join(subject_obj.subject_folder, "exp", exp_idx, cond)
1186
+ isExist = os.path.exists(path_exp_hdf5)
1187
+ if not isExist:
1188
+ os.makedirs(path_exp_hdf5)
1189
+
1190
+ # coil outlier correction
1191
+ if subject_obj.exp[exp_idx]["fn_exp_hdf5"] is not None or subject_obj.exp[exp_idx]["fn_exp_hdf5"] != []:
1192
+ fn_exp_hdf5 = os.path.join(path_exp_hdf5, "experiment.hdf5")
1193
+
1194
+ elif subject_obj.exp[exp_idx]["fn_exp_csv"] is not None or subject_obj.exp[exp_idx]["fn_exp_csv"] != []:
1195
+ fn_exp_hdf5 = subject_obj.exp[exp_idx]["fn_exp_csv"][0]
1196
+
1197
+ elif fn_exp_hdf5 is None or fn_exp_hdf5 == []:
1198
+ fn_exp_hdf5 = os.path.join(subject_obj.subject_folder, "exp", exp_idx, "experiment.hdf5")
1199
+
1200
+ # remove coil position outliers (in case of conditions)
1201
+ #######################################################
1202
+ if coil_outlier_corr_cond:
1203
+ print("Removing coil position outliers")
1204
+ results_dct = pynibs.coil_outlier_correction_cond(exp=results_dct,
1205
+ outlier_angle=5.,
1206
+ outlier_loc=3.,
1207
+ fn_exp_out=fn_exp_hdf5)
1208
+
1209
+ # perform coil <-> head distance correction
1210
+ ###########################################
1211
+ if coil_distance_corr:
1212
+ print("Performing coil <-> head distance correction")
1213
+ results_dct = pynibs.coil_distance_correction(exp=results_dct,
1214
+ fn_geo_hdf5=fn_mesh_hdf5,
1215
+ remove_coil_skin_distance_outlier=remove_coil_skin_distance_outlier,
1216
+ fn_plot=os.path.split(fn_exp_hdf5)[0])
1217
+
1218
+ # plot finally used ft data
1219
+ ############################
1220
+ if plot:
1221
+ # assign data
1222
+ x = results_dct['number']
1223
+ y = results_dct['ft']
1224
+
1225
+ # calculate running average over 10 trials
1226
+ avg_window = 10
1227
+ average_y = []
1228
+ for ind in range(len(y) - avg_window + 1):
1229
+ average_y.append(np.mean(y[ind:ind + avg_window]))
1230
+ # insert NaNs to match lengths
1231
+ for ind in range(avg_window - 1):
1232
+ average_y.insert(0, np.nan)
1233
+
1234
+ # create plot
1235
+ plt.close()
1236
+ plt.figure(num=cond + ' FT-plot for subject ' + subject.id)
1237
+ plt.scatter(x, y, s=10, label='reaction times in ms')
1238
+ plt.plot(x, average_y, color='blue', linestyle='-', label='running average')
1239
+ plt.xlabel('trial number')
1240
+ plt.legend()
1241
+ plt.title('Finger tapping performance over the course of the trial')
1242
+ fn_ft_plot = os.path.join(subject_obj.subject_folder, "exp", "FingerTapping", cond, "plot_ft.png")
1243
+ plt.savefig(fn_ft_plot, dpi=600)
1244
+ # plt.show()
1245
+ plt.close()
1246
+
1247
+ # Write experimental data to hdf5
1248
+ ###############################################
1249
+ # stimulation data
1250
+ stim_dict = {
1251
+ 'date': results_dct['date'],
1252
+ 'coil_sn': results_dct['coil_sn'],
1253
+ 'patient_id': results_dct['patient_id'],
1254
+ 'coil_0': results_dct['coil_0'],
1255
+ 'coil_1': results_dct['coil_1'],
1256
+ 'coil_mean': results_dct['coil_mean'],
1257
+ 'current': results_dct['current'],
1258
+ 'time_tms': results_dct['ts_tms'],
1259
+ }
1260
+ df_stim_data = pd.DataFrame.from_dict(stim_dict)
1261
+
1262
+ behave_dict = {
1263
+ 'number': results_dct['number'],
1264
+ 'ft': results_dct['ft'],
1265
+ 'time_trial': results_dct['time_trial']
1266
+ }
1267
+ df_behave_data = pd.DataFrame.from_dict(behave_dict)
1268
+
1269
+ # save in .hdf5 file
1270
+ df_stim_data.to_hdf(fn_exp_hdf5, "stim_data")
1271
+ df_behave_data.to_hdf(fn_exp_hdf5, "behavioral_data")
1272
+
1273
+
1274
+ def match_behave_and_triggermarker(mep_time_lst, xml_paths, bnd_factor=0.99 / 2, isi=None):
1275
+ """
1276
+ Sort out timestamps of mep and tms files that do not match.
1277
+
1278
+ Parameters
1279
+ ----------
1280
+ mep_time_lst : list of datetime.timedelta
1281
+ timedeltas of MEP recordings.
1282
+ xml_paths : list of str
1283
+ Paths to coil0-file and optionally coil1-file; if there is no coil1-file, use empty string.
1284
+ bnd_factor : float, default: 0.99/2
1285
+ Bound factor relative to interstimulus interval in which +- interval to match neuronavigation and mep data
1286
+ from their timestamps (0 means perfect matching, 0.5 means +- half interstimulus interval).
1287
+ isi : float, optional
1288
+ Interstimulus intervals. If not provided it's estimated from the first trial.
1289
+
1290
+ Returns
1291
+ -------
1292
+ tms_index_lst : list of int
1293
+ Indices of tms-timestamps that match.
1294
+ mep_index_lst : list of int
1295
+ Indices of mep-timestamps that match.
1296
+ tms_time_lst : list of datetime
1297
+ TMS timestamps.
1298
+ """
1299
+ # mep_time_lst = []
1300
+ # for cfs_path in cfs_paths:
1301
+ # _, mep_time_lst_tmp = get_mep_elements(cfs_path, tms_pulse_time)
1302
+ # mep_time_lst.extend(mep_time_lst_tmp)
1303
+
1304
+ _, tms_ts_lst, _, tms_idx_invalid = pynibs.localite.get_tms_elements(xml_paths, verbose=True)
1305
+
1306
+ # get timestamp difference of mep measurements
1307
+ if isi is None:
1308
+ isi = (mep_time_lst[1] - mep_time_lst[0]).total_seconds()
1309
+
1310
+ # get offset to match first timestamps of mep and tms
1311
+ coil_offset = datetime.timedelta(seconds=float(tms_ts_lst[0]) / 1000)
1312
+
1313
+ # match start time with the timestamp of the xml file
1314
+ # tms_time_lst = [mep_time_lst[0] - time_offset + datetime.timedelta(seconds=float(ts) / 1000) for ts in tms_ts_lst]
1315
+ coil_time_delta_lst = [-coil_offset + datetime.timedelta(seconds=float(ts) / 1000) for ts in tms_ts_lst]
1316
+ coil_time_delta_lst_orig = [-coil_offset + datetime.timedelta(seconds=float(ts) / 1000) for ts in tms_ts_lst]
1317
+
1318
+ # get index for cfs and xml files
1319
+ # mep_time_index, mep_index_lst = 0, []
1320
+ # tms_time_index, tms_index_lst = 0, []
1321
+
1322
+ # get maximal list length of time lists
1323
+ # min_lst_length = min([len(lst) for lst in [mep_time_lst, tms_time_delta_lst]])
1324
+
1325
+ # mep_last_working_idx = 0
1326
+ # tms_last_working_idx = 0
1327
+
1328
+ if (len(coil_time_delta_lst) + len(tms_idx_invalid)) == len(mep_time_lst):
1329
+ print("Equal amount of TMS and MEP data...")
1330
+ print(f"Removing invalid coil positions {tms_idx_invalid} from MEP data...")
1331
+
1332
+ # invalid coil positions were already removed in previous call of get_tms_elements(xml_paths)
1333
+ coil_to_mep_match_lst = [i for i in range(len(tms_ts_lst))]
1334
+
1335
+ # MEP indices without invalid coil positions
1336
+ mep_index_lst = [i for i in range(len(mep_time_lst)) if i not in tms_idx_invalid]
1337
+
1338
+ else:
1339
+ mep_index_lst = []
1340
+ coil_to_mep_match_lst = []
1341
+ mep_time_lst = np.array(mep_time_lst)
1342
+ coil_time_delta_lst = np.array(coil_time_delta_lst)
1343
+
1344
+ # iterate over all MEPs
1345
+ for mep_index in range(len(mep_time_lst)):
1346
+ # set bounds
1347
+ time_bnd_l = mep_time_lst[mep_index] + datetime.timedelta(
1348
+ seconds=-isi * bnd_factor) # time bound low
1349
+ time_bnd_h = mep_time_lst[mep_index] + datetime.timedelta(
1350
+ seconds=+isi * bnd_factor) # time bound high
1351
+
1352
+ # search for corresponding TMS coil positions
1353
+ coil_mep_in_bound = (time_bnd_l <= coil_time_delta_lst) & (coil_time_delta_lst <= time_bnd_h)
1354
+
1355
+ # no TMS coil position in bound (untracked coil position already removed)
1356
+ if np.sum(coil_mep_in_bound) == 0:
1357
+ print(f"Untracked coil position, excluding MEP_idx: {mep_index}")
1358
+
1359
+ # one correct TMS coil position in bound
1360
+ elif np.sum(coil_mep_in_bound) == 1:
1361
+ mep_index_lst.append(mep_index)
1362
+ coil_match_index = np.where(coil_mep_in_bound)[0][0]
1363
+ coil_to_mep_match_lst.append(coil_match_index)
1364
+
1365
+ # zero times on last match to avoid time shift
1366
+ mep_time_lst -= mep_time_lst[mep_index]
1367
+ coil_time_delta_lst -= coil_time_delta_lst[coil_match_index]
1368
+
1369
+ # one correct and one accidental TMS coil position in bound -> take closest
1370
+ elif np.sum(coil_mep_in_bound) > 1:
1371
+ mep_index_lst.append(mep_index)
1372
+ delta_t = np.abs(np.array([mep_time_lst[mep_index] for _ in range(np.sum(coil_mep_in_bound))]) -
1373
+ np.array(coil_time_delta_lst)[coil_mep_in_bound])
1374
+ coil_match_index = np.where(coil_mep_in_bound)[0][np.argmin(delta_t)]
1375
+ coil_to_mep_match_lst.append(coil_match_index)
1376
+
1377
+ print(f"Two tracked TMS coil positions found within search window -> choosing closest index by time.")
1378
+ print(
1379
+ f"MEP_idx: {mep_index} ({mep_time_lst[mep_index]}) -> "
1380
+ f"TMS_idx: {coil_match_index} ({coil_time_delta_lst[coil_match_index]})")
1381
+
1382
+ # zero times on last match
1383
+ mep_time_lst -= mep_time_lst[mep_index]
1384
+ coil_time_delta_lst -= coil_time_delta_lst[coil_match_index]
1385
+
1386
+ return [coil_to_mep_match_lst, mep_index_lst, coil_time_delta_lst_orig]
1387
+
1388
+
1389
+ def get_patient_id(xml_path):
1390
+ """
1391
+ Read patient-ID.
1392
+
1393
+ Parameters
1394
+ ----------
1395
+ xml_path : str
1396
+ Path to coil0-file.
1397
+
1398
+ Returns
1399
+ -------
1400
+ xml_pd.find('patientID').text : str
1401
+ ID of patient.
1402
+ """
1403
+
1404
+ patient_data_path = os.path.dirname(xml_path) + '/PatientData.xml'
1405
+ # parse XML document
1406
+ xml_tree = ET.parse(patient_data_path)
1407
+ xml_root = xml_tree.getroot()
1408
+ xml_pd = xml_root.find('patientData')
1409
+ return xml_pd.find('patientID').text
1410
+
1411
+
1412
+ def combine_nnav_mep(xml_paths, cfs_paths, im, coil_sn,
1413
+ nii_exp_path, nii_conform_path,
1414
+ patient_id, tms_pulse_time, drop_mep_idx, mep_onsets, nnav_system, mesh_approach="headreco",
1415
+ temp_dir=None, cfs_data_column=0, channels=None, plot=False, start_mep=18, end_mep=35):
1416
+ """
1417
+ Creates dictionary containing all experimental data.
1418
+
1419
+ Parameters
1420
+ ----------
1421
+ xml_paths : list of str
1422
+ Paths to coil0-file and optionally coil1-file; if there is no coil1-file, use empty string.
1423
+ cfs_paths : list of str
1424
+ Paths to .cfs mep file.
1425
+ im : list of str
1426
+ List of path to the instrument-marker-file or list of strings containing the instrument marker.
1427
+ coil_sn : str
1428
+ Coil-serial-number.
1429
+ nii_exp_path : str
1430
+ Path to the .nii file that was used in the experiment.
1431
+ nii_conform_path : str
1432
+ Path to the conform*.nii file used to calculate the E-fields with SimNIBS.
1433
+ patient_id : str
1434
+ Patient id.
1435
+ tms_pulse_time : float
1436
+ Time in [s] of TMS pulse as specified in signal.
1437
+ drop_mep_idx : List of int or None
1438
+ Which MEPs to remove before matching.
1439
+ mep_onsets : List of int or None
1440
+ If there are multiple .cfs per TMS Navigator sessions, onsets in [ms] of .cfs. E.g.: [0, 71186].
1441
+ nnav_system : str
1442
+ Type of neuronavigation system ("Localite", "Visor").
1443
+ mesh_approach : str, default: "headreco"
1444
+ Approach the mesh is generated with ("headreco" or "mri2mesh").
1445
+ temp_dir : str, default: None (fn_exp_mri_nii folder)
1446
+ Directory to save temporary files (transformation .nii and .mat files) (fn_exp_mri_nii folder).
1447
+ cfs_data_column : int or list of int, default: 0
1448
+ Column(s) of dataset in .cfs file.
1449
+ channels : list of str, optional
1450
+ Channel names.
1451
+ plot : bool, default: False
1452
+ Plot MEPs and p2p evaluation.
1453
+ start_mep : float, default: 18
1454
+ Start of time frame after TMS pulse where p2p value is evaluated (in ms).
1455
+ end_mep : float, default: 35
1456
+ End of time frame after TMS pulse where p2p value is evaluated (in ms).
1457
+
1458
+ Returns
1459
+ -------
1460
+ dict_lst : list of dicts, one dict for each zap
1461
+ 'number'
1462
+ 'condition'
1463
+ 'current'
1464
+ 'mep_raw_data'
1465
+ 'mep'
1466
+ 'mep_latency'
1467
+ 'mep_filt_data'
1468
+ 'mep_raw_data_time'
1469
+ 'time_tms'
1470
+ 'ts_tms'
1471
+ 'time_mep'
1472
+ 'date'
1473
+ 'coil_sn'
1474
+ 'patient_id'
1475
+ """
1476
+ # get arrays and lists
1477
+ coil_array, ts_tms_lst, current_lst, tms_idx_invalid = pynibs.get_tms_elements(xml_paths, verbose=False)
1478
+
1479
+ # get MEP amplitudes from .cfs files
1480
+ time_mep_lst, mep_latencies = [], []
1481
+ last_mep_onset = datetime.timedelta(seconds=0)
1482
+ mep_raw_data, mep_filt_data, p2p_arr = None, None, None
1483
+ mep_raw_data_time = None
1484
+
1485
+ if not isinstance(cfs_paths, list):
1486
+ cfs_paths = [cfs_paths]
1487
+
1488
+ for idx, cfs_path in enumerate(cfs_paths):
1489
+ # calc MEP amplitudes and MEP onset times from .cfs file
1490
+ p2p_array_tmp, time_mep_lst_tmp, \
1491
+ mep_raw_data_tmp, mep_filt_data_tmp, \
1492
+ mep_raw_data_time, mep_latency = pynibs.get_mep_elements(mep_fn=cfs_path,
1493
+ tms_pulse_time=tms_pulse_time,
1494
+ drop_mep_idx=drop_mep_idx,
1495
+ cfs_data_column=cfs_data_column,
1496
+ channels=channels,
1497
+ plot=plot,
1498
+ start_mep=start_mep,
1499
+ end_mep=end_mep)
1500
+
1501
+ # add .cfs onsets from subject object and add onset of last mep from last .cfs file
1502
+ if mep_onsets is not None:
1503
+ time_mep_lst_tmp = [time_mep_lst_tmp[i] + datetime.timedelta(milliseconds=mep_onsets[idx]) +
1504
+ last_mep_onset for
1505
+ i in range(len(time_mep_lst_tmp))]
1506
+ time_mep_lst.extend(time_mep_lst_tmp)
1507
+
1508
+ if idx == 0:
1509
+ p2p_arr = p2p_array_tmp
1510
+ mep_raw_data = mep_raw_data_tmp
1511
+ mep_filt_data = mep_filt_data_tmp
1512
+ mep_latencies = mep_latency
1513
+ else:
1514
+ mep_raw_data = np.vstack((mep_raw_data, mep_raw_data_tmp))
1515
+ mep_filt_data = np.vstack((mep_filt_data, mep_filt_data_tmp))
1516
+ p2p_arr = np.concatenate((p2p_arr, p2p_array_tmp), axis=1)
1517
+ mep_latencies.append(mep_latency)
1518
+
1519
+ last_mep_onset = time_mep_lst[-1]
1520
+ mep_latencies = np.array(mep_latencies)
1521
+
1522
+ # match TMS Navigator zaps and MEPs
1523
+ tms_index_lst, mep_index_lst, time_tms_lst = match_behave_and_triggermarker(mep_time_lst=time_mep_lst,
1524
+ xml_paths=xml_paths,
1525
+ bnd_factor=0.99 / 2) # 0.99/2
1526
+
1527
+ if cfs_paths[0].endswith("cfs"):
1528
+ experiment_date_time = pynibs.get_time_date(cfs_paths)
1529
+ else:
1530
+ experiment_date_time = "N/A"
1531
+
1532
+ # get indices of not recognizable coils
1533
+ unit_matrix_index_list = []
1534
+ for unit_matrix_index1 in range(coil_array.shape[0]):
1535
+ for unit_matrix_index2 in range(coil_array.shape[1]):
1536
+ if np.allclose(coil_array[unit_matrix_index1, unit_matrix_index2, :, :], np.identity(4)):
1537
+ unit_matrix_index_list.append([unit_matrix_index1, unit_matrix_index2])
1538
+
1539
+ # set condition names in case of random sampling
1540
+ if im is None or im == [""] or im == "":
1541
+ coil_cond_lst = [str(i) for i in range(len(ts_tms_lst))]
1542
+ drop_idx = []
1543
+ else:
1544
+ # get conditions from instrument markers
1545
+ if os.path.isfile(im[0]):
1546
+ coil_cond_lst, drop_idx = pynibs.match_instrument_marker_file(xml_paths, im[0])
1547
+ else:
1548
+ coil_cond_lst, drop_idx = pynibs.match_instrument_marker_string(xml_paths, im)
1549
+
1550
+ # coordinate transform (for coil_0, coil_1, coil_mean)
1551
+ for idx in range(coil_array.shape[0]):
1552
+ # move axis, calculate and move back
1553
+ m_simnibs = np.moveaxis(coil_array[idx, :, :, :], 0, 2)
1554
+ m_simnibs = pynibs.nnav2simnibs(fn_exp_nii=nii_exp_path[0],
1555
+ fn_conform_nii=nii_conform_path,
1556
+ m_nnav=m_simnibs,
1557
+ nnav_system=nnav_system,
1558
+ mesh_approach=mesh_approach,
1559
+ temp_dir=temp_dir)
1560
+
1561
+ coil_array[idx, :, :, :] = np.moveaxis(m_simnibs, 2, 0)
1562
+
1563
+ # replace transformed identity matrices
1564
+ for unit_matrix_indices in unit_matrix_index_list:
1565
+ coil_array[unit_matrix_indices[0], unit_matrix_indices[1], :, :] = np.identity(4)
1566
+
1567
+ # list for dictionaries
1568
+ dict_lst = []
1569
+ idx = 0
1570
+
1571
+ assert len(tms_index_lst) == len(mep_index_lst)
1572
+
1573
+ delta_t = []
1574
+ ts_mep = [time_mep_lst[i] for i in mep_index_lst]
1575
+ ts_tms = [time_tms_lst[i] for i in tms_index_lst]
1576
+
1577
+ for t1, t2 in zip(ts_mep, ts_tms):
1578
+ # print(f"MEP: {t1} TMS: {t2}")
1579
+ delta_t.append(np.abs(t1 - t2))
1580
+
1581
+ plt.plot(np.array([delta_t[i].microseconds for i in range(len(delta_t))]) / 1000)
1582
+ plt.xlabel("TMS pulse #", fontsize=11)
1583
+ plt.ylabel(r"$\Delta t$ in ms", fontsize=11)
1584
+ fn_plot = os.path.join(os.path.split(cfs_paths[0])[0], "delta_t_mep_vs_tms.png")
1585
+ plt.savefig(fn_plot, dpi=600)
1586
+ plt.close()
1587
+
1588
+ # iterate over mep and tms indices to get valid matches of MEPs and TMS Navigator information
1589
+ for tms_index, mep_index in zip(tms_index_lst, mep_index_lst):
1590
+ if tms_index not in drop_idx:
1591
+ dictionary = {'number': idx,
1592
+ 'condition': coil_cond_lst[tms_index],
1593
+ 'current': current_lst[tms_index],
1594
+ 'mep_raw_data': mep_raw_data[:, mep_index, :],
1595
+ 'mep': p2p_arr[:, mep_index],
1596
+ 'mep_latency': mep_latencies[:, mep_index],
1597
+ 'mep_filt_data': mep_filt_data[:, mep_index, :],
1598
+ 'mep_raw_data_time': mep_raw_data_time,
1599
+ 'time_tms': time_tms_lst[tms_index].total_seconds(),
1600
+ 'ts_tms': ts_tms_lst[tms_index],
1601
+ 'time_mep': time_mep_lst[mep_index].total_seconds(),
1602
+ 'date': experiment_date_time,
1603
+ 'coil_sn': coil_sn,
1604
+ 'patient_id': patient_id}
1605
+
1606
+ # write coils
1607
+ for index1 in range(4):
1608
+ for index2 in range(4):
1609
+ dictionary.update({'coil0_' + str(index1) + str(index2): coil_array[0, tms_index, index1, index2]})
1610
+ dictionary.update({'coil1_' + str(index1) + str(index2): coil_array[1, tms_index, index1, index2]})
1611
+ dictionary.update(
1612
+ {'coil_mean_' + str(index1) + str(index2): coil_array[2, tms_index, index1, index2]})
1613
+
1614
+ # get time difference
1615
+ time_diff = time_tms_lst[tms_index] - time_mep_lst[mep_index]
1616
+ time_diff = time_diff.total_seconds() * 1000
1617
+ dictionary.update({'time_diff': time_diff})
1618
+
1619
+ # append to list
1620
+ dict_lst.append(dictionary)
1621
+
1622
+ idx += 1
1623
+
1624
+ return dict_lst
1625
+
1626
+
1627
+ def combine_nnav_rt(xml_paths, behavior_paths, im, coil_sn,
1628
+ nii_exp_path, nii_conform_path,
1629
+ patient_id, drop_trial_idx, nnav_system, cond,
1630
+ mesh_approach="headreco", temp_dir=None, plot=False):
1631
+ """
1632
+ Creates dictionary containing all experimental data.
1633
+
1634
+ Parameters
1635
+ ----------
1636
+ xml_paths : list of str
1637
+ Paths to coil0-file and optionally coil1-file if there is no coil1-file, use empty string.
1638
+ behavior_paths : list of str
1639
+ Paths to .cfs mep file.
1640
+ im : list of str
1641
+ List of path to the instrument-marker-file or list of strings containing the instrument marker.
1642
+ coil_sn : str
1643
+ Coil-serial-number.
1644
+ nii_exp_path : str
1645
+ Path to the .nii file that was used in the experiment.
1646
+ nii_conform_path : str
1647
+ Path to the conform*.nii file used to calculate the E-fields with SimNIBS.
1648
+ patient_id : str
1649
+ Patient id.
1650
+ drop_trial_idx : List of int or None
1651
+ Which MEPs to remove before matching.
1652
+ nnav_system : str
1653
+ Type of neuronavigation system ("Localite", "Visor").
1654
+ cond : str
1655
+ Condition name in data_path.
1656
+ mesh_approach : str, default: "headreco"
1657
+ Approach the mesh is generated with ("headreco" or "mri2mesh").
1658
+ temp_dir : str, optional
1659
+ Directory to save temporary files (transformation .nii and .mat files) (fn_exp_mri_nii folder).
1660
+ plot : bool, default: False
1661
+ Plot MEPs and p2p evaluation.
1662
+
1663
+ Returns
1664
+ -------
1665
+ dict_lst : list of dicts, one dict for each zap
1666
+ 'number'
1667
+ 'condition'
1668
+ 'current'
1669
+ 'mep_raw_data'
1670
+ 'mep'
1671
+ 'mep_latency'
1672
+ 'mep_filt_data'
1673
+ 'mep_raw_data_time'
1674
+ 'time_tms'
1675
+ 'ts_tms'
1676
+ 'time_mep'
1677
+ 'date'
1678
+ 'coil_sn'
1679
+ 'patient_id'
1680
+ """
1681
+
1682
+ # get arrays and lists
1683
+ coil_array, ts_tms_lst, current_lst, tms_idx_invalid = pynibs.get_tms_elements(xml_paths, verbose=False)
1684
+
1685
+ # get RT from .csv files
1686
+ time_mep_lst = []
1687
+ # last_mep_onset = datetime.timedelta(seconds=0)
1688
+ rt_arr = None
1689
+
1690
+ if not isinstance(behavior_paths, list):
1691
+ behavior_paths = [behavior_paths]
1692
+
1693
+ for idx, behavior_path in enumerate(behavior_paths):
1694
+ # get RT and trial onsets from .csv file
1695
+ rt_array_tmp, trial_onset_lst_tmp, mean_isi = pynibs.get_trial_data_from_csv(behavior_fn=behavior_path,
1696
+ drop_trial_idx=drop_trial_idx,
1697
+ cond=cond,
1698
+ only_corr=True)
1699
+
1700
+ time_mep_lst.extend(trial_onset_lst_tmp)
1701
+
1702
+ if idx == 0:
1703
+ rt_arr = rt_array_tmp
1704
+ else:
1705
+ rt_arr = np.concatenate((rt_arr, rt_array_tmp), axis=1)
1706
+
1707
+ # last_mep_onset = time_mep_lst[-1]
1708
+
1709
+ # match TMS Navigator zaps and MEPs
1710
+ time_mep_lst = [datetime.timedelta(seconds=onset / 1000) for onset in time_mep_lst]
1711
+
1712
+ tms_index_lst, mep_index_lst, time_tms_lst = match_behave_and_triggermarker(mep_time_lst=time_mep_lst,
1713
+ xml_paths=xml_paths,
1714
+ isi=mean_isi) # 0.99/2
1715
+
1716
+ experiment_date_time = "N/A"
1717
+
1718
+ # get indices of not recognizable coils
1719
+ unit_matrix_index_list = []
1720
+ for unit_matrix_index1 in range(coil_array.shape[0]):
1721
+ for unit_matrix_index2 in range(coil_array.shape[1]):
1722
+ if np.allclose(coil_array[unit_matrix_index1, unit_matrix_index2, :, :], np.identity(4)):
1723
+ unit_matrix_index_list.append([unit_matrix_index1, unit_matrix_index2])
1724
+
1725
+ # set condition names in case of random sampling
1726
+ if im is None or im == [""] or im == "":
1727
+ coil_cond_lst = [str(i) for i in range(len(ts_tms_lst))]
1728
+ drop_idx = []
1729
+ else:
1730
+ # get conditions from instrument markers
1731
+ if os.path.isfile(im[0]):
1732
+ coil_cond_lst, drop_idx = pynibs.match_instrument_marker_file(xml_paths, im[0])
1733
+ else:
1734
+ coil_cond_lst, drop_idx = pynibs.match_instrument_marker_string(xml_paths, im)
1735
+
1736
+ # coordinate transform (for coil_0, coil_1, coil_mean)
1737
+ for idx in range(coil_array.shape[0]):
1738
+ # move axis, calculate and move back
1739
+ m_simnibs = np.moveaxis(coil_array[idx, :, :, :], 0, 2)
1740
+ m_simnibs = pynibs.nnav2simnibs(fn_exp_nii=nii_exp_path[0],
1741
+ fn_conform_nii=nii_conform_path,
1742
+ m_nnav=m_simnibs,
1743
+ nnav_system=nnav_system,
1744
+ mesh_approach=mesh_approach,
1745
+ temp_dir=temp_dir)
1746
+
1747
+ coil_array[idx, :, :, :] = np.moveaxis(m_simnibs, 2, 0)
1748
+
1749
+ # replace transformed identity matrices
1750
+ for unit_matrix_indices in unit_matrix_index_list:
1751
+ coil_array[unit_matrix_indices[0], unit_matrix_indices[1], :, :] = np.identity(4)
1752
+
1753
+ # list for dictionaries
1754
+ dict_lst = []
1755
+ idx = 0
1756
+
1757
+ assert len(tms_index_lst) == len(mep_index_lst)
1758
+
1759
+ delta_t = []
1760
+ ts_mep = [time_mep_lst[i] for i in mep_index_lst]
1761
+ ts_tms = [time_tms_lst[i] for i in tms_index_lst]
1762
+
1763
+ for t1, t2 in zip(ts_mep, ts_tms):
1764
+ # print(f"MEP: {t1} TMS: {t2}")
1765
+ delta_t.append(np.abs(t1 - t2))
1766
+
1767
+ plt.plot(np.array([delta_t[i].microseconds for i in range(len(delta_t))]) / 1000)
1768
+ plt.xlabel("TMS pulse #", fontsize=11)
1769
+ plt.ylabel(r"$\Delta t$ in ms", fontsize=11)
1770
+ fn_plot = os.path.join(os.path.split(behavior_paths[0])[0], "delta_t_mep_vs_tms.png")
1771
+ plt.savefig(fn_plot, dpi=600)
1772
+ plt.close()
1773
+
1774
+ # iterate over trial and tms indices to get valid matches of trials and TMS Navigator information
1775
+ for tms_index, mep_index in zip(tms_index_lst, mep_index_lst):
1776
+ if tms_index not in drop_idx:
1777
+ dictionary = {'number': idx,
1778
+ 'condition': coil_cond_lst[tms_index],
1779
+ 'current': current_lst[tms_index],
1780
+ 'rt': rt_arr[mep_index],
1781
+ 'time_tms': time_tms_lst[tms_index].total_seconds(),
1782
+ 'ts_tms': ts_tms_lst[tms_index],
1783
+ 'time_trial': time_mep_lst[mep_index].total_seconds(),
1784
+ 'date': experiment_date_time,
1785
+ 'coil_sn': coil_sn,
1786
+ 'patient_id': patient_id}
1787
+
1788
+ # write coils
1789
+ for index1 in range(4):
1790
+ for index2 in range(4):
1791
+ dictionary.update({'coil0_' + str(index1) + str(index2): coil_array[0, tms_index, index1, index2]})
1792
+ dictionary.update({'coil1_' + str(index1) + str(index2): coil_array[1, tms_index, index1, index2]})
1793
+ dictionary.update(
1794
+ {'coil_mean_' + str(index1) + str(index2): coil_array[2, tms_index, index1, index2]})
1795
+
1796
+ # get time difference
1797
+ time_diff = time_tms_lst[tms_index] - time_mep_lst[mep_index]
1798
+ time_diff = time_diff.total_seconds() * 1000
1799
+ dictionary.update({'time_diff': time_diff})
1800
+
1801
+ # append to list
1802
+ dict_lst.append(dictionary)
1803
+
1804
+ idx += 1
1805
+
1806
+ return dict_lst
1807
+
1808
+
1809
+ def combine_nnav_ft(xml_paths, behavior_paths, im, coil_sn,
1810
+ nii_exp_path, nii_conform_path,
1811
+ patient_id, drop_trial_idx, nnav_system, cond,
1812
+ mesh_approach="headreco", temp_dir=None, plot=False):
1813
+ """
1814
+ Creates dictionary containing all experimental data.
1815
+
1816
+ Parameters
1817
+ ----------
1818
+ xml_paths : list of str
1819
+ Paths to coil0-file and optionally coil1-file if there is no coil1-file, use empty string
1820
+ behavior_paths : list of str
1821
+ Paths to .csv ft file
1822
+ im : list of str
1823
+ List of path to the instrument-marker-file or list of strings containing the instrument marker
1824
+ coil_sn : str
1825
+ Coil-serial-number
1826
+ nii_exp_path : str
1827
+ Path to the .nii file that was used in the experiment
1828
+ nii_conform_path : str
1829
+ Path to the conform*.nii file used to calculate the E-fields with SimNIBS
1830
+ patient_id : str
1831
+ Patient id
1832
+ drop_trial_idx : List of int or None
1833
+ Which fts to remove before matching.
1834
+ temp_dir : str, default: None (fn_exp_mri_nii folder)
1835
+ Directory to save temporary files (transformation .nii and .mat files)
1836
+ nnav_system : str
1837
+ Type of neuronavigation system ("Localite", "Visor")
1838
+ cond : str
1839
+ behavioral outcome
1840
+ mesh_approach : str, default: "headreco"
1841
+ Approach the mesh is generated with ("headreco" or "mri2mesh")
1842
+ plot : bool, default: False
1843
+ Plot MEPs and p2p evaluation
1844
+
1845
+ Returns
1846
+ -------
1847
+ dict_lst : list of dicts, one dict for each zap
1848
+ 'number'
1849
+ 'condition'
1850
+ 'current'
1851
+ 'mep_raw_data'
1852
+ 'mep'
1853
+ 'mep_latency'
1854
+ 'mep_filt_data'
1855
+ 'mep_raw_data_time'
1856
+ 'time_tms'
1857
+ 'ts_tms'
1858
+ 'time_mep'
1859
+ 'date'
1860
+ 'coil_sn'
1861
+ 'patient_id'
1862
+ """
1863
+
1864
+ # get arrays and lists
1865
+ coil_array, ts_tms_lst, current_lst, tms_idx = pynibs.get_tms_elements(xml_paths, verbose=False)
1866
+
1867
+ # get finger tapping data from .csv files
1868
+ time_ft_lst = []
1869
+ # last_mep_onset = datetime.timedelta(seconds=0)
1870
+ ft_arr = None
1871
+
1872
+ if not isinstance(behavior_paths, list):
1873
+ behavior_paths = [behavior_paths]
1874
+
1875
+ for idx, behavior_path in enumerate(behavior_paths):
1876
+ # get finger tapping data and trial onsets from .csv file
1877
+ ft_array_tmp, trial_onset_lst_tmp, mean_isi = pynibs.get_ft_data_from_csv(behavior_fn=behavior_path,
1878
+ drop_trial_idx=drop_trial_idx,
1879
+ cond=cond)
1880
+
1881
+ time_ft_lst.extend(trial_onset_lst_tmp)
1882
+
1883
+ if idx == 0:
1884
+ ft_arr = ft_array_tmp
1885
+ else:
1886
+ ft_arr = np.concatenate((ft_arr, ft_array_tmp), axis=1)
1887
+
1888
+ # last_mep_onset = time_mep_lst[-1]
1889
+
1890
+ # match TMS Navigator zaps and fts
1891
+ time_ft_lst = [datetime.timedelta(seconds=onset / 1000) for onset in time_ft_lst]
1892
+
1893
+ tms_index_lst, ft_index_lst, time_tms_lst = match_behave_and_triggermarker(mep_time_lst=time_ft_lst,
1894
+ xml_paths=xml_paths,
1895
+ isi=mean_isi) # 0.99/2
1896
+
1897
+ experiment_date_time = "N/A"
1898
+
1899
+ # get indices of not recognizable coils
1900
+ unit_matrix_index_list = []
1901
+ for unit_matrix_index1 in range(coil_array.shape[0]):
1902
+ for unit_matrix_index2 in range(coil_array.shape[1]):
1903
+ if np.allclose(coil_array[unit_matrix_index1, unit_matrix_index2, :, :], np.identity(4)):
1904
+ unit_matrix_index_list.append([unit_matrix_index1, unit_matrix_index2])
1905
+
1906
+ # set condition names in case of random sampling
1907
+ if im is None or im == [""] or im == "":
1908
+ coil_cond_lst = [str(i) for i in range(len(ts_tms_lst))]
1909
+ drop_idx = []
1910
+ else:
1911
+ # get conditions from instrument markers
1912
+ if os.path.isfile(im[0]):
1913
+ coil_cond_lst, drop_idx = pynibs.match_instrument_marker_file(xml_paths, im[0])
1914
+ else:
1915
+ coil_cond_lst, drop_idx = pynibs.match_instrument_marker_string(xml_paths, im)
1916
+
1917
+ # coordinate transform (for coil_0, coil_1, coil_mean)
1918
+ for idx in range(coil_array.shape[0]):
1919
+ # move axis, calculate and move back
1920
+ m_simnibs = np.moveaxis(coil_array[idx, :, :, :], 0, 2)
1921
+ m_simnibs = pynibs.nnav2simnibs(fn_exp_nii=nii_exp_path[0],
1922
+ fn_conform_nii=nii_conform_path,
1923
+ m_nnav=m_simnibs,
1924
+ nnav_system=nnav_system,
1925
+ mesh_approach=mesh_approach,
1926
+ temp_dir=temp_dir)
1927
+
1928
+ coil_array[idx, :, :, :] = np.moveaxis(m_simnibs, 2, 0)
1929
+
1930
+ # replace transformed identity matrices
1931
+ for unit_matrix_indices in unit_matrix_index_list:
1932
+ coil_array[unit_matrix_indices[0], unit_matrix_indices[1], :, :] = np.identity(4)
1933
+
1934
+ # list for dictionaries
1935
+ dict_lst = []
1936
+ idx = 0
1937
+
1938
+ assert len(tms_index_lst) == len(ft_index_lst)
1939
+
1940
+ delta_t = []
1941
+ ts_ft = [time_ft_lst[i] for i in ft_index_lst]
1942
+ ts_tms = [time_tms_lst[i] for i in tms_index_lst]
1943
+
1944
+ for t1, t2 in zip(ts_ft, ts_tms):
1945
+ # print(f"MEP: {t1} TMS: {t2}")
1946
+ delta_t.append(np.abs(t1 - t2))
1947
+
1948
+ plt.plot(np.array([delta_t[i].microseconds for i in range(len(delta_t))]) / 1000)
1949
+ plt.xlabel("TMS pulse #", fontsize=11)
1950
+ plt.ylabel(r"$\Delta t$ in ms", fontsize=11)
1951
+ fn_plot = os.path.join(os.path.split(behavior_paths[0])[0], "delta_t_ft_vs_tms.png")
1952
+ plt.savefig(fn_plot, dpi=600)
1953
+ plt.close()
1954
+
1955
+ # iterate over trial and tms indices to get valid matches of trials and TMS Navigator information
1956
+ for tms_index, ft_index in zip(tms_index_lst, ft_index_lst):
1957
+ if tms_index not in drop_idx:
1958
+ dictionary = {'number': idx,
1959
+ 'condition': coil_cond_lst[tms_index],
1960
+ 'current': current_lst[tms_index],
1961
+ 'ft': ft_arr[ft_index],
1962
+ 'time_tms': time_tms_lst[tms_index].total_seconds(),
1963
+ 'ts_tms': ts_tms_lst[tms_index],
1964
+ 'time_trial': time_ft_lst[ft_index].total_seconds(),
1965
+ 'date': experiment_date_time,
1966
+ 'coil_sn': coil_sn,
1967
+ 'patient_id': patient_id}
1968
+
1969
+ # write coils
1970
+ for index1 in range(4):
1971
+ for index2 in range(4):
1972
+ dictionary.update({'coil0_' + str(index1) + str(index2): coil_array[0, tms_index, index1, index2]})
1973
+ dictionary.update({'coil1_' + str(index1) + str(index2): coil_array[1, tms_index, index1, index2]})
1974
+ dictionary.update(
1975
+ {'coil_mean_' + str(index1) + str(index2): coil_array[2, tms_index, index1, index2]})
1976
+
1977
+ # get time difference
1978
+ time_diff = time_tms_lst[tms_index] - time_ft_lst[ft_index]
1979
+ time_diff = time_diff.total_seconds() * 1000
1980
+ dictionary.update({'time_diff': time_diff})
1981
+
1982
+ # append to list
1983
+ dict_lst.append(dictionary)
1984
+
1985
+ idx += 1
1986
+
1987
+ return dict_lst