pyNIBS 0.2024.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. pyNIBS-0.2024.8.dist-info/LICENSE +623 -0
  2. pyNIBS-0.2024.8.dist-info/METADATA +723 -0
  3. pyNIBS-0.2024.8.dist-info/RECORD +107 -0
  4. pyNIBS-0.2024.8.dist-info/WHEEL +5 -0
  5. pyNIBS-0.2024.8.dist-info/top_level.txt +1 -0
  6. pynibs/__init__.py +34 -0
  7. pynibs/coil.py +1367 -0
  8. pynibs/congruence/__init__.py +15 -0
  9. pynibs/congruence/congruence.py +1108 -0
  10. pynibs/congruence/ext_metrics.py +257 -0
  11. pynibs/congruence/stimulation_threshold.py +318 -0
  12. pynibs/data/configuration_exp0.yaml +59 -0
  13. pynibs/data/configuration_linear_MEP.yaml +61 -0
  14. pynibs/data/configuration_linear_RT.yaml +61 -0
  15. pynibs/data/configuration_sigmoid4.yaml +68 -0
  16. pynibs/data/network mapping configuration/configuration guide.md +238 -0
  17. pynibs/data/network mapping configuration/configuration_TEMPLATE.yaml +42 -0
  18. pynibs/data/network mapping configuration/configuration_for_testing.yaml +43 -0
  19. pynibs/data/network mapping configuration/configuration_modelTMS.yaml +43 -0
  20. pynibs/data/network mapping configuration/configuration_reg_isi_05.yaml +43 -0
  21. pynibs/data/network mapping configuration/output_documentation.md +185 -0
  22. pynibs/data/network mapping configuration/recommendations_for_accuracy_threshold.md +77 -0
  23. pynibs/data/neuron/models/L23_PC_cADpyr_biphasic_v1.csv +1281 -0
  24. pynibs/data/neuron/models/L23_PC_cADpyr_monophasic_v1.csv +1281 -0
  25. pynibs/data/neuron/models/L4_LBC_biphasic_v1.csv +1281 -0
  26. pynibs/data/neuron/models/L4_LBC_monophasic_v1.csv +1281 -0
  27. pynibs/data/neuron/models/L4_NBC_biphasic_v1.csv +1281 -0
  28. pynibs/data/neuron/models/L4_NBC_monophasic_v1.csv +1281 -0
  29. pynibs/data/neuron/models/L4_SBC_biphasic_v1.csv +1281 -0
  30. pynibs/data/neuron/models/L4_SBC_monophasic_v1.csv +1281 -0
  31. pynibs/data/neuron/models/L5_TTPC2_cADpyr_biphasic_v1.csv +1281 -0
  32. pynibs/data/neuron/models/L5_TTPC2_cADpyr_monophasic_v1.csv +1281 -0
  33. pynibs/expio/Mep.py +1518 -0
  34. pynibs/expio/__init__.py +8 -0
  35. pynibs/expio/brainsight.py +979 -0
  36. pynibs/expio/brainvis.py +71 -0
  37. pynibs/expio/cobot.py +239 -0
  38. pynibs/expio/exp.py +1876 -0
  39. pynibs/expio/fit_funs.py +287 -0
  40. pynibs/expio/localite.py +1987 -0
  41. pynibs/expio/signal_ced.py +51 -0
  42. pynibs/expio/visor.py +624 -0
  43. pynibs/freesurfer.py +502 -0
  44. pynibs/hdf5_io/__init__.py +10 -0
  45. pynibs/hdf5_io/hdf5_io.py +1857 -0
  46. pynibs/hdf5_io/xdmf.py +1542 -0
  47. pynibs/mesh/__init__.py +3 -0
  48. pynibs/mesh/mesh_struct.py +1394 -0
  49. pynibs/mesh/transformations.py +866 -0
  50. pynibs/mesh/utils.py +1103 -0
  51. pynibs/models/_TMS.py +211 -0
  52. pynibs/models/__init__.py +0 -0
  53. pynibs/muap.py +392 -0
  54. pynibs/neuron/__init__.py +2 -0
  55. pynibs/neuron/neuron_regression.py +284 -0
  56. pynibs/neuron/util.py +58 -0
  57. pynibs/optimization/__init__.py +5 -0
  58. pynibs/optimization/multichannel.py +278 -0
  59. pynibs/optimization/opt_mep.py +152 -0
  60. pynibs/optimization/optimization.py +1445 -0
  61. pynibs/optimization/workhorses.py +698 -0
  62. pynibs/pckg/__init__.py +0 -0
  63. pynibs/pckg/biosig/biosig4c++-1.9.5.src_fixed.tar.gz +0 -0
  64. pynibs/pckg/libeep/__init__.py +0 -0
  65. pynibs/pckg/libeep/pyeep.so +0 -0
  66. pynibs/regression/__init__.py +11 -0
  67. pynibs/regression/dual_node_detection.py +2375 -0
  68. pynibs/regression/regression.py +2984 -0
  69. pynibs/regression/score_types.py +0 -0
  70. pynibs/roi/__init__.py +2 -0
  71. pynibs/roi/roi.py +895 -0
  72. pynibs/roi/roi_structs.py +1233 -0
  73. pynibs/subject.py +1009 -0
  74. pynibs/tensor_scaling.py +144 -0
  75. pynibs/tests/data/InstrumentMarker20200225163611937.xml +19 -0
  76. pynibs/tests/data/TriggerMarkers_Coil0_20200225163443682.xml +14 -0
  77. pynibs/tests/data/TriggerMarkers_Coil1_20200225170337572.xml +6373 -0
  78. pynibs/tests/data/Xdmf.dtd +89 -0
  79. pynibs/tests/data/brainsight_niiImage_nifticoord.txt +145 -0
  80. pynibs/tests/data/brainsight_niiImage_nifticoord_largefile.txt +1434 -0
  81. pynibs/tests/data/brainsight_niiImage_niifticoord_mixedtargets.txt +47 -0
  82. pynibs/tests/data/create_subject_testsub.py +332 -0
  83. pynibs/tests/data/data.hdf5 +0 -0
  84. pynibs/tests/data/geo.hdf5 +0 -0
  85. pynibs/tests/test_coil.py +474 -0
  86. pynibs/tests/test_elements2nodes.py +100 -0
  87. pynibs/tests/test_hdf5_io/test_xdmf.py +61 -0
  88. pynibs/tests/test_mesh_transformations.py +123 -0
  89. pynibs/tests/test_mesh_utils.py +143 -0
  90. pynibs/tests/test_nnav_imports.py +101 -0
  91. pynibs/tests/test_quality_measures.py +117 -0
  92. pynibs/tests/test_regressdata.py +289 -0
  93. pynibs/tests/test_roi.py +17 -0
  94. pynibs/tests/test_rotations.py +86 -0
  95. pynibs/tests/test_subject.py +71 -0
  96. pynibs/tests/test_util.py +24 -0
  97. pynibs/tms_pulse.py +34 -0
  98. pynibs/util/__init__.py +4 -0
  99. pynibs/util/dosing.py +233 -0
  100. pynibs/util/quality_measures.py +562 -0
  101. pynibs/util/rotations.py +340 -0
  102. pynibs/util/simnibs.py +763 -0
  103. pynibs/util/util.py +727 -0
  104. pynibs/visualization/__init__.py +2 -0
  105. pynibs/visualization/para.py +4372 -0
  106. pynibs/visualization/plot_2D.py +137 -0
  107. pynibs/visualization/render_3D.py +347 -0
@@ -0,0 +1,979 @@
1
+ """ Functions to import data from Rogue Research Brainsight neuronavigation systems """
2
+ from collections import OrderedDict
3
+ import pandas as pd
4
+ import numpy as np
5
+ import warnings
6
+ import datetime
7
+ import pynibs
8
+ import h5py
9
+ import io
10
+ import os
11
+
12
+
13
+ class BrainsightCSVParser:
14
+ """
15
+ Parser for Brainsight CSV files.
16
+
17
+ Attributes
18
+ ----------
19
+ current_state : BrainsightCSVParser.State
20
+ The current state of the parser.
21
+ header_file : str
22
+ The file header.
23
+ header_data : list of str
24
+ The header data.
25
+ data : list of list of str
26
+ The parsed data rows.
27
+
28
+ Methods
29
+ -------
30
+ check_state_transition(line)
31
+ Checks the state transition based on the given line.
32
+ parse_line(line)
33
+ Parses a line and updates the corresponding attributes.
34
+ """
35
+
36
+ class State:
37
+ FILE_HEADER = 0
38
+ SKIPPING = 1
39
+ DATA_HEADER = 2
40
+ DATA = 3
41
+ FIN = 4
42
+
43
+ def __init__(self):
44
+ """
45
+ Initialize the BrainsightCSVParser object.
46
+ """
47
+ self.current_state = BrainsightCSVParser.State.FILE_HEADER
48
+ self.header_file = ""
49
+ self.header_data = []
50
+ self.data = []
51
+
52
+ def check_state_transition(self, line):
53
+ """
54
+ Checks the state transition based on the given line.
55
+
56
+ Parameters
57
+ ----------
58
+ line : str
59
+ The line to check.
60
+ """
61
+ new_state = BrainsightCSVParser.State.SKIPPING
62
+ if line.startswith("# Sample Name"): # data header info
63
+ new_state = BrainsightCSVParser.State.DATA_HEADER
64
+ elif line.startswith("Sample "):
65
+ new_state = BrainsightCSVParser.State.DATA
66
+ elif line.startswith("# Target Name"):
67
+ new_state = BrainsightCSVParser.State.SKIPPING
68
+ elif line.startswith("# "):
69
+ # we are still parsing the file header
70
+ if self.current_state == BrainsightCSVParser.State.FILE_HEADER:
71
+ new_state = BrainsightCSVParser.State.FILE_HEADER
72
+ # if we surpassed the 'FILE_HEADER' state already, a line with
73
+ # a leading '#' is a comment, which should be skipped
74
+ else:
75
+ new_state = BrainsightCSVParser.State.SKIPPING
76
+ # all other lines should be skipped
77
+
78
+ self.current_state = new_state
79
+
80
+ def parse_line(self, line):
81
+ """
82
+ Parses a line and updates the corresponding attributes.
83
+
84
+ Parameters
85
+ ----------
86
+ line : str
87
+ The line to parse.
88
+ """
89
+ if self.current_state == BrainsightCSVParser.State.FILE_HEADER:
90
+ self.header_file += line
91
+ elif self.current_state == BrainsightCSVParser.State.DATA_HEADER:
92
+ line = line[2:-1] # strip leading comment character '# ' and trailing newline '\n'
93
+ self.header_data = line.split('\t')
94
+ elif self.current_state == BrainsightCSVParser.State.DATA:
95
+ line = line[:-1] # strip trailing newline '\n'
96
+ data_row = line.split('\t')
97
+ self.data.append(data_row)
98
+
99
+
100
+ def write_targets_brainsight(targets, fn_out, names=None, overwrite=True):
101
+ """
102
+ Writes coil position and orientations in .txt file for import into Brainsight.
103
+
104
+ Parameters
105
+ ----------
106
+ targets : np.ndarray of float
107
+ (4, 4, N_targets), Tensor containing the 4x4 matrices with coil orientation and position.
108
+ fn_out : str
109
+ Filename of output file.
110
+ names : list of str, optional
111
+ Target names (if None they will be numbered by their order).
112
+ overwrite : bool, default: True
113
+ Overwrite existing .txt file.
114
+
115
+ Returns
116
+ -------
117
+ <file> .txt file containing the targets for import into Brainsight
118
+ """
119
+
120
+ targets = np.atleast_3d(targets)
121
+
122
+ if names is None:
123
+ names = [f"{i:04}" for i in range(targets.shape[2])]
124
+ if isinstance(names, str):
125
+ names = [names]
126
+ assert targets.shape[:2] == (4, 4), 'Expecting array with shape (4, 4, N instrument marker).'
127
+
128
+ if not fn_out.lower().endswith('.txt'):
129
+ fn_out += '.txt'
130
+
131
+ assert not os.path.exists(fn_out) or overwrite, '.txt file already exists. Remove or set overwrite=True.'
132
+
133
+ with io.open(fn_out, 'w', newline='\n') as f: # correct windows style would be \r\n, but Localite uses \n
134
+ f.write('# Version: 12\n')
135
+ f.write('# Coordinate system: NIfTI:Aligned\n')
136
+ f.write('# Created by: pynibs\n')
137
+ f.write('# Units: millimetres, degrees, milliseconds, and microvolts\n')
138
+ f.write('# Encoding: UTF-8\n')
139
+ f.write('# Notes: Each column is delimited by a tab. Each value within a column is delimited by a semicolon.\n')
140
+ f.write(
141
+ '# Target Name Loc. X Loc. Y Loc. Z m0n0 m0n1 m0n2 m1n0 m1n1 m1n2 m2n0 m2n1 m2n2\n')
142
+
143
+ for i_t in range(targets.shape[2]):
144
+ f.write(f'{names[i_t]}\t' +
145
+ f'{targets[0, 3, i_t]:.4f}\t{targets[1, 3, i_t]:.4f}\t{targets[2, 3, i_t]:.4f}\t' +
146
+ f'{targets[0, 0, i_t]:.4f}\t{targets[1, 0, i_t]:.4f}\t{targets[2, 0, i_t]:.4f}\t' +
147
+ f'{targets[0, 1, i_t]:.4f}\t{targets[1, 1, i_t]:.4f}\t{targets[2, 1, i_t]:.4f}\t' +
148
+ f'{targets[0, 2, i_t]:.4f}\t{targets[1, 2, i_t]:.4f}\t{targets[2, 2, i_t]:.4f}\n')
149
+
150
+
151
+ def read_targets_brainsight(fn):
152
+ """
153
+ Reads target coil position and orientations from .txt file and returns it as 4 x 4 x N_targets numpy array.
154
+
155
+ Parameters
156
+ ----------
157
+ fn : str
158
+ Filename of output file.
159
+
160
+ Returns
161
+ -------
162
+ m_nnav : np.ndarray of float
163
+ (4, 4, N_targets) Tensor containing the 4x4 matrices with coil orientation and position.
164
+ """
165
+ warnings.warn("This function is deprecated. Please use pynibs.brainsight.get_marker(markertype='Targets') instead.",
166
+ DeprecationWarning)
167
+ with io.open(fn, 'r') as f:
168
+ lines = f.readlines()
169
+ start_idx = [i + 1 for i, l in enumerate(lines) if l.startswith("# Target")][0]
170
+ stop_idx = [i for i, l in enumerate(lines) if l.startswith("# Sample Name") or l.startswith("# Session Name")]
171
+
172
+ if not stop_idx:
173
+ stop_idx = len(lines)
174
+ else:
175
+ stop_idx = np.min(stop_idx)
176
+
177
+ n_targets = stop_idx - start_idx
178
+ names = ['' for _ in range(n_targets)]
179
+ m_nnav = np.zeros((4, 4, n_targets))
180
+ for i_loc, i_glo in enumerate(range(start_idx, stop_idx)):
181
+ line = lines[i_glo].split(sep='\t')
182
+ names[i_loc] = line[0]
183
+ m_nnav[:3, 0, i_loc] = np.array(line[4:7]).astype(float)
184
+ m_nnav[:3, 1, i_loc] = np.array(line[7:10]).astype(float)
185
+ m_nnav[:3, 2, i_loc] = np.array(line[10:13]).astype(float)
186
+ m_nnav[:3, 3, i_loc] = np.array(line[1:4]).astype(float)
187
+ m_nnav[3, 3, i_loc] = 1
188
+ return m_nnav
189
+
190
+
191
+ def create_merged_nnav_emg_file_brainsight(fn_brainsight_nnav_info, fn_emg_info, fn_out, p2p_window_start=18,
192
+ p2p_window_end=35):
193
+ """
194
+ Creates a merged file containing TMS coil positions and EMG data from Brainsight information.
195
+
196
+ Parameters
197
+ ----------
198
+ fn_brainsight_nnav_info : str
199
+ File path of the Brainsight nnav coil information.
200
+ fn_emg_info : str
201
+ File path of the EMG information.
202
+ fn_out : str
203
+ File path of the output merged file.
204
+ p2p_window_start : int, default: 18
205
+ Start of the time frame after TMS pulse where p2p value is evaluated (in ms).
206
+ p2p_window_end : int, default: 35
207
+ End of the time frame after TMS pulse where p2p value is evaluated (in ms).
208
+ """
209
+ # read EMG information
210
+ _, ext = os.path.splitext(fn_emg_info)
211
+
212
+ if ext == ".mat":
213
+ try:
214
+ with h5py.File(fn_emg_info, 'r') as data_h5:
215
+ keys = list(data_h5.keys())
216
+ keys = [k for k in keys if not k.startswith('#')]
217
+
218
+ if len(keys) != 1:
219
+ raise ValueError(f"Key ambiguity! Please provide only exactly one data field in file: "
220
+ f"{fn_emg_info}")
221
+
222
+ emg_data = data_h5[f"/{keys[0]}/values"][:]
223
+ emg_start = data_h5[f"/{keys[0]}/start"][0][0]
224
+ emg_res = data_h5[f"/{keys[0]}/interval"][0][0]
225
+
226
+ if emg_res < .1: # time step between samples might be stored in seconds
227
+ emg_res *= 1e3
228
+
229
+ num_stim = emg_data.shape[0]
230
+ num_channels = emg_data.shape[1]
231
+ num_emg_samples = emg_data.shape[2]
232
+
233
+ emg_timestamps_h5_ref = data_h5[f"/{keys[0]}/frameinfo/start"][:]
234
+ emg_timestamps = []
235
+ for stim_idx in range(num_stim):
236
+ emg_timestamps.append(data_h5[emg_timestamps_h5_ref[0][stim_idx]][0][0])
237
+ emg_timestamps = np.asarray(emg_timestamps, dtype=np.float32)
238
+ emg_timestamps -= emg_timestamps[0]
239
+ except OSError:
240
+ raise NotImplementedError(f"Could not read {fn_emg_info} -- is this a matlab v7.3 .mat file?")
241
+
242
+ elif ext == ".cfs":
243
+ emg_data, emg_timestamps, num_stim, num_channels, num_emg_samples, sampling_rate = \
244
+ pynibs.read_biosig_emg_data(fn_data=fn_emg_info, include_first_trigger=True, type="cfs")
245
+ emg_res = 1000 / sampling_rate # [ms]
246
+ emg_start = 0 # EMG start time not included in CSF file / always 0 for CED Signal (?)
247
+ else:
248
+ raise ValueError(f"[Error] create_merged_nnav_emg_file_brainsight: unsupported EMG data file extension: {ext}"
249
+ "Only CED Signal files (.cfs) or MATLAB v.7.3 files (.mat) are supported.")
250
+
251
+ # read nnav coil information
252
+ with open(fn_brainsight_nnav_info, 'r') as data_txt_f:
253
+ csv_parser = BrainsightCSVParser()
254
+ while True:
255
+ line = data_txt_f.readline()
256
+ if not line:
257
+ break
258
+ csv_parser.check_state_transition(line)
259
+ csv_parser.parse_line(line)
260
+
261
+ nnav_timestamps = []
262
+ start_time_string = current_time_string = csv_parser.data[0][24] # 25. column of CSv data = time stamp
263
+ start_time = datetime.datetime.strptime(start_time_string, '%H:%M:%S.%f')
264
+ for c_idx in range(0, num_stim):
265
+ current_time_string = csv_parser.data[c_idx][24] # 25. column of CSv data = time stamp
266
+ current_time = datetime.datetime.strptime(current_time_string, '%H:%M:%S.%f')
267
+ nnav_timestamps.append((current_time - start_time).total_seconds())
268
+
269
+ nnav_timestamps = np.asarray(nnav_timestamps, dtype=np.float32)
270
+
271
+ # verify timesteps
272
+ timestamp_difference = np.abs(nnav_timestamps - emg_timestamps)
273
+ if np.any(timestamp_difference > 1):
274
+ print("[ WARNING ] Detected time stamp differences larger than 1 second. "
275
+ "Please check whether data from the neuronavigation are aligned with the EMG data.")
276
+
277
+ # append EMG info to the nnav coil info
278
+ # 1) header
279
+ header_out = csv_parser.header_data.copy()
280
+ header_out.append("EMG Start")
281
+ header_out.append("EMG End")
282
+ header_out.append("EMG Res.")
283
+ header_out.append("EMG Channels")
284
+ header_out.append("EMG Window Start")
285
+ header_out.append("EMG Window End")
286
+ for c_idx in range(num_channels):
287
+ header_out.append(f"EMG Peak-to-peak {c_idx + 1}")
288
+ header_out.append(f"EMG Data {c_idx + 1}")
289
+
290
+ # 2) data
291
+ data_text_out = []
292
+ for stim_idx in range(len(emg_data)):
293
+ # take what we already have in the nnav data
294
+ row = csv_parser.data[stim_idx].copy()
295
+ # extend it by the EMG data
296
+ row.extend([str(emg_start), # EMG Start
297
+ "(null)", # EMG End
298
+ str(emg_res), # EMG Res.
299
+ str(num_channels), # EMG Channels
300
+ str(p2p_window_start), # EMG Window Start
301
+ str(p2p_window_end)]) # EMG Window End
302
+ for c_idx in range(num_channels):
303
+ row.append("0") # EMG Peak-to-peak c_idx+1
304
+ emg_data_list = emg_data[stim_idx, c_idx, :].tolist()
305
+ row.append( # EMG data
306
+ ';'.join(map(str, emg_data_list))
307
+ )
308
+ data_text_out.append('\t'.join(row))
309
+
310
+ with open(fn_out, 'w') as outfile:
311
+ outfile.write(csv_parser.header_file)
312
+ outfile.write("# " + '\t'.join(header_out))
313
+ outfile.write('\n')
314
+ outfile.write('\n'.join(data_text_out))
315
+ outfile.write('\n')
316
+ outfile.write("# Session Name: END")
317
+
318
+
319
+ def merge_exp_data_brainsight(subject, exp_idx, mesh_idx, coil_outlier_corr_cond=False,
320
+ remove_coil_skin_distance_outlier=True, coil_distance_corr=True,
321
+ average_emg_data_from_same_condition=False, verbose=False, plot=False,
322
+ start_mep=18, end_mep=35):
323
+ """
324
+ Merge the TMS coil positions and the mep data into an experiment.hdf5 file.
325
+
326
+ Parameters
327
+ ----------
328
+ subject : pynibs.subject.Subject
329
+ Subject object.
330
+ exp_idx : str
331
+ Experiment ID.
332
+ mesh_idx : str
333
+ Mesh ID.
334
+ coil_outlier_corr_cond : bool, default: False
335
+ Correct outlier of coil position and orientation (+-2 mm, +-3 deg) in case of conditions.
336
+ remove_coil_skin_distance_outlier : bool, default: True
337
+ Remove outlier of coil position lying too far away from the skin surface (-5/+2 mm).
338
+ coil_distance_corr : bool, default: True
339
+ Perform coil <-> head distance correction (coil is moved towards head surface until coil touches scalp).
340
+ average_emg_data_from_same_condition : bool, default: False
341
+ Flag indicating whether to average EMG data from the same condition. Only meaningful in conjunction with
342
+ 'coil_distance_correction', which averages the coil position within a condition.
343
+ verbose : bool, default: False
344
+ Plot output messages.
345
+ plot : bool, default: False
346
+ Plot MEPs and p2p evaluation.
347
+ start_mep : int, default: 18
348
+ Start of time frame after TMS pulse where p2p value is evaluated (in ms).
349
+ end_mep : int, default: 35
350
+ End of time frame after TMS pulse where p2p value is evaluated (in ms).
351
+ """
352
+ nii_exp_path_lst = subject.exp[exp_idx]['fn_mri_nii']
353
+ # nii_conform_path = os.path.join(os.path.split(subject.mesh[mesh_idx]["fn_mesh_hdf5"])[0],
354
+ # subject.id + "_T1fs_conform.nii.gz")
355
+ nii_conform_path = subject.exp[exp_idx]["fn_mri_nii"][0][0]
356
+ fn_exp_hdf5 = subject.exp[exp_idx]['fn_exp_hdf5'][0]
357
+ fn_coil = subject.exp[exp_idx]['fn_coil'][0][0]
358
+ fn_mesh_hdf5 = subject.mesh[mesh_idx]['fn_mesh_hdf5']
359
+ if isinstance(subject.exp[exp_idx]['fn_exp_hdf5'][0], list):
360
+ temp_dir = os.path.join(os.path.split(subject.exp[exp_idx]['fn_exp_hdf5'][0])[0],
361
+ "nnav2simnibs",
362
+ f"mesh_{mesh_idx}")
363
+ else:
364
+ temp_dir = os.path.join(os.path.split(subject.exp[exp_idx]['fn_exp_hdf5'])[0],
365
+ "nnav2simnibs",
366
+ f"mesh_{mesh_idx}")
367
+ fn_data = None
368
+ if "fn_data" in subject.exp[exp_idx].keys():
369
+ if isinstance(subject.exp[exp_idx]["fn_data"], list):
370
+ if isinstance(subject.exp[exp_idx]["fn_data"][0], str):
371
+ fn_data = subject.exp[exp_idx]["fn_data"][0]
372
+ else:
373
+ fn_data = subject.exp[exp_idx]["fn_data"][0][0]
374
+ elif isinstance(subject.exp[exp_idx]["fn_data"], str):
375
+ fn_data = subject.exp[exp_idx]["fn_data"]
376
+ else:
377
+ raise ValueError(f"Invalid exp[{exp_idx}]['fn_data']: "
378
+ "either specify the path as a single string or as a nested list of strings [['path']]!"
379
+ f"Will attempt to read EMG data from exp[{exp_idx}]['fn_tms_nav']")
380
+ else:
381
+ print(f"No EMG data specified in exp[{exp_idx}]['fn_data']: "
382
+ f"Will read EMG data from exp[{exp_idx}]['fn_tms_nav']")
383
+
384
+ if "fn_tms_nav" in subject.exp[exp_idx]:
385
+ if isinstance(subject.exp[exp_idx]["fn_tms_nav"], list):
386
+ if isinstance(subject.exp[exp_idx]["fn_tms_nav"][0], str):
387
+ fn_tms_nav = subject.exp[exp_idx]["fn_tms_nav"][0]
388
+ else:
389
+ fn_tms_nav = subject.exp[exp_idx]["fn_tms_nav"][0][0]
390
+ elif isinstance(subject.exp[exp_idx]["fn_tms_nav"], str):
391
+ fn_tms_nav = subject.exp[exp_idx]["fn_tms_nav"][0][0]
392
+ else:
393
+ raise ValueError("Invalid exp[*]['fn_tms_nav']: "
394
+ "Either specify the path as a single string or as a nested list of strings [['path']]!")
395
+ else:
396
+ raise KeyError("No exp[*]['fn_tms_nav'] found in subject object. Cannot continue to merge data,")
397
+
398
+ if fn_data is not None:
399
+ fn_out, out_ext = os.path.splitext(fn_tms_nav)
400
+ fn_out += "_merged" + out_ext
401
+
402
+ if not os.path.exists(fn_out):
403
+ create_merged_nnav_emg_file_brainsight(
404
+ fn_brainsight_nnav_info=fn_tms_nav,
405
+ fn_emg_info=fn_data,
406
+ fn_out=fn_out
407
+ )
408
+
409
+ fn_tms_nav = fn_out
410
+
411
+ # read Brainsight data
412
+ ######################
413
+ if verbose:
414
+ print(f"Reading Brainsight data from file: {fn_tms_nav}")
415
+
416
+ d_bs, m_nnav, n_stims = read_samples_brainsight(fn_tms_nav)
417
+
418
+ # transform from brainsight to simnibs space
419
+ ############################################
420
+ if verbose:
421
+ print(f"Transforming coil positions from Brainsight to SimNIBS space")
422
+ print(nii_conform_path)
423
+
424
+ m_simnibs = pynibs.nnav2simnibs(fn_exp_nii=nii_exp_path_lst[0][0],
425
+ fn_conform_nii=nii_conform_path,
426
+ m_nnav=m_nnav,
427
+ nnav_system="brainsight",
428
+ mesh_approach="headreco",
429
+ fiducials=None,
430
+ orientation='RAS',
431
+ fsl_cmd=None,
432
+ target='simnibs',
433
+ temp_dir=temp_dir,
434
+ rem_tmp=True,
435
+ verbose=verbose)
436
+
437
+ # create dictionary containing stimulation and physiological data
438
+ if verbose:
439
+ print(f"Creating dictionary containing stimulation and physiological data")
440
+
441
+ current_scaling_factor = 1.43 if "current_scaling_factor" not in subject.exp[exp_idx].keys() \
442
+ else subject.exp[exp_idx]["current_scaling_factor"]
443
+
444
+ # create output directory (which is going to be persisted to experiment.hdf5)
445
+ d = dict()
446
+ d['coil_0'] = []
447
+ d['coil_1'] = []
448
+ d['coil_mean'] = []
449
+ d['number'] = []
450
+ d['condition'] = []
451
+ d['current'] = []
452
+ d['date'] = []
453
+ d['time'] = []
454
+ d['coil_sn'] = []
455
+ d['patient_id'] = []
456
+
457
+ for i in range(n_stims):
458
+ d['coil_0'].append(m_simnibs[:, :, i])
459
+ d['coil_1'].append(np.zeros((4, 4)) * np.NaN)
460
+ d['coil_mean'].append(np.nanmean(np.stack((d['coil_0'][-1],
461
+ d['coil_1'][-1]), axis=2), axis=2))
462
+ d['number'].append(d_bs['Index'][i])
463
+ # Sample Name is unique for each pulse, so no conditions will be detected
464
+ # d['condition'].append(d_bs['Sample Name'][i])
465
+ # Assoc. Target will assignt he same number to pulses that had the same target/marker
466
+ # (despite minor deviations in the true location of the coil after the pulse)
467
+ cond = d_bs['Assoc. Target'][i]
468
+ if cond == '(null)':
469
+ cond = i
470
+ d['condition'].append(cond)
471
+ d['current'].append(1 * current_scaling_factor)
472
+ d['date'].append(d_bs["Date"][i])
473
+ d['time'].append(d_bs["Time"][i])
474
+ d['coil_sn'].append(os.path.split(fn_coil)[1])
475
+ d['patient_id'].append(subject.id)
476
+
477
+ # add remaining data to keys that have not already been added or that should not not be added
478
+ for key in d_bs.keys():
479
+ if key not in ['Sample Name', 'Session Name', 'Index',
480
+ 'Loc. X', 'Loc. Y', 'Loc. Z',
481
+ 'm0n0', 'm0n1', 'm0n2',
482
+ 'm1n0', 'm1n1', 'm1n2',
483
+ 'm2n0', 'm2n1', 'm2n2',
484
+ 'Date', 'Time'] and \
485
+ not (key.startswith('EMG Peak-to-peak') or
486
+ key.startswith('EMG Data')):
487
+
488
+ try:
489
+ d[key].append(d_bs[key][i])
490
+ except KeyError:
491
+ d[key] = []
492
+ d[key].append(d_bs[key][i])
493
+
494
+ # add physiological raw data
495
+ channels = subject.exp[exp_idx]["channels"]
496
+
497
+ if verbose:
498
+ print(f"Postprocessing MEP data")
499
+ if plot:
500
+ print(" Creating MEP plots ...")
501
+
502
+ for c_idx, chan_name in enumerate(channels):
503
+ # brainsight indexes channels 1-based
504
+ c_idx += 1
505
+
506
+ d[f"mep_raw_data_time_{chan_name}"] = []
507
+ d[f"mep_filt_data_time_{chan_name}"] = []
508
+ d[f"mep_raw_data_{chan_name}"] = []
509
+ d[f"mep_filt_data_{chan_name}"] = []
510
+ d[f"p2p_brainsight_{chan_name}"] = []
511
+ d[f"p2p_{chan_name}"] = []
512
+ d[f"mep_latency_{chan_name}"] = []
513
+
514
+ for i in range(n_stims):
515
+ fn_plot = None
516
+ if plot:
517
+ fn_channel = os.path.join(os.path.dirname(fn_tms_nav), "plots", str(chan_name))
518
+ fn_plot = os.path.join(fn_channel, f"mep_{i:04}")
519
+ os.makedirs(fn_channel, exist_ok=True)
520
+
521
+ # filter data and calculate p2p values
522
+ p2p, mep_filt_data, latency = pynibs.calc_p2p(sweep=d_bs[f"EMG Data {c_idx}"][i],
523
+ tms_pulse_time=d_bs[f"Offset"][i],
524
+ sampling_rate=1000 / d_bs["EMG Res."][i],
525
+ start_mep=start_mep, end_mep=end_mep,
526
+ measurement_start_time=float(d["EMG Start"][i]),
527
+ fn_plot=fn_plot)
528
+
529
+ d[f"mep_raw_data_time_{chan_name}"].append(np.arange(d_bs["EMG Start"][i],
530
+ d_bs["EMG End"][i], d_bs["EMG Res."][i]))
531
+ d[f"mep_filt_data_time_{chan_name}"].append(
532
+ np.arange(d_bs["EMG Start"][i], d_bs["EMG End"][i], d_bs["EMG Res."][i]))
533
+ d[f"mep_raw_data_{chan_name}"].append(d_bs[f"EMG Data {c_idx}"][i])
534
+ d[f"mep_filt_data_{chan_name}"].append(mep_filt_data)
535
+ d[f"p2p_brainsight_{chan_name}"].append(d_bs[f"EMG Peak-to-peak {c_idx}"][i])
536
+ d[f"p2p_{chan_name}"].append(p2p)
537
+ d[f"mep_latency_{chan_name}"].append(latency)
538
+
539
+ # set filename of experiment.hdf5
540
+ if subject.exp[exp_idx]["fn_exp_hdf5"] is not None or subject.exp[exp_idx]["fn_exp_hdf5"] != []:
541
+ fn_exp_hdf5 = subject.exp[exp_idx]["fn_exp_hdf5"][0] if isinstance(subject.exp[exp_idx]["fn_exp_hdf5"], list) \
542
+ else subject.exp[exp_idx]["fn_exp_hdf5"]
543
+
544
+ elif subject.exp[exp_idx]["fn_exp_csv"] is not None or subject.exp[exp_idx]["fn_exp_csv"] != []:
545
+ fn_exp_hdf5 = subject.exp[exp_idx]["fn_exp_csv"][0]
546
+
547
+ elif fn_exp_hdf5 is None or fn_exp_hdf5 == []:
548
+ fn_exp_hdf5 = os.path.join(subject.subject_folder, "exp", exp_idx, "experiment.hdf5")
549
+
550
+ # remove coil position outliers (in case of conditions)
551
+ if coil_outlier_corr_cond:
552
+ if verbose:
553
+ print("Removing coil position outliers")
554
+ d = pynibs.coil_outlier_correction_cond(exp=d, outlier_angle=5., outlier_loc=3., fn_exp_out=fn_exp_hdf5)
555
+
556
+ # perform coil <-> head distance correction
557
+ if coil_distance_corr:
558
+ if verbose:
559
+ print("Performing coil <-> head distance correction")
560
+ d = pynibs.coil_distance_correction(exp=d,
561
+ fn_geo_hdf5=fn_mesh_hdf5,
562
+ remove_coil_skin_distance_outlier=remove_coil_skin_distance_outlier,
563
+ fn_plot=os.path.split(fn_exp_hdf5)[0])
564
+
565
+ d_avg = dict()
566
+ for key in d.keys():
567
+ d_avg[key] = []
568
+
569
+ # TODO: This only meaningfully works in conjunction with 'coil_distance_correction', which averages the
570
+ # coil position within a condition. For each condition, we wil then have 1) and average coil position,
571
+ # 2) an average EMG sample, 3) only 1 full dataset (all but one samples are discarded after averaging)
572
+ if average_emg_data_from_same_condition:
573
+ if verbose:
574
+ print("Averaging samples from the same condition")
575
+
576
+ conditions_per_sample = np.array(d['condition'])
577
+ conditions_unique = np.unique(conditions_per_sample)
578
+
579
+ # Create new, dedicated dictionary with all EMG data to:
580
+ # 1) collect all EG related keys of the main dictionary
581
+ # 2) convert the stored lists to numpy arrays (for list-based indexing)
582
+ all_emg_data = dict()
583
+ for c_idx, chan_name in enumerate(channels):
584
+ all_emg_data[f"mep_raw_data_{chan_name}"] = np.array(d[f"mep_raw_data_{chan_name}"])
585
+ all_emg_data[f"mep_filt_data_{chan_name}"] = np.array(d[f"mep_filt_data_{chan_name}"])
586
+ all_emg_data[f"p2p_brainsight_{chan_name}"] = np.array(d[f"p2p_brainsight_{chan_name}"])
587
+ all_emg_data[f"p2p_{chan_name}"] = np.array(d[f"p2p_{chan_name}"])
588
+
589
+ # create a single entry in the new result dictionary (d_avg) for each condition
590
+ for condition_id in conditions_unique:
591
+ condition_idcs = np.where(conditions_per_sample == condition_id)[0]
592
+
593
+ # just copy the first occurrence (within a condition) of non-emg related keys
594
+ if len(condition_idcs) > 0:
595
+ for key in d.keys():
596
+ if key not in all_emg_data.keys():
597
+ d_avg[key].append(d[key][condition_idcs[0]])
598
+
599
+ # average all emg-related keys of a condition
600
+ for key in all_emg_data.keys():
601
+ d_avg[key].append(np.mean(all_emg_data[key][condition_idcs], axis=0))
602
+
603
+ d = d_avg
604
+
605
+ # create dictionary of stimulation data
606
+ d_stim_data = dict()
607
+ d_stim_data["coil_0"] = d["coil_0"]
608
+ d_stim_data["coil_1"] = d["coil_1"]
609
+ d_stim_data["coil_mean"] = d["coil_mean"]
610
+ d_stim_data["number"] = d["number"]
611
+ d_stim_data["condition"] = d["condition"]
612
+ d_stim_data["current"] = d["current"]
613
+ d_stim_data["date"] = d["date"]
614
+ d_stim_data["time"] = d["time"]
615
+ d_stim_data["Creation Cause"] = d["Creation Cause"]
616
+ d_stim_data["Offset"] = d["Offset"]
617
+
618
+ # create dictionary of raw physiological data
619
+ d_phys_data_raw = dict()
620
+ d_phys_data_raw["EMG Start"] = d["EMG Start"]
621
+ d_phys_data_raw["EMG End"] = d["EMG End"]
622
+ d_phys_data_raw["EMG Res."] = d["EMG Res."]
623
+ d_phys_data_raw["EMG Channels"] = d["EMG Channels"]
624
+ d_phys_data_raw["EMG Window Start"] = d["EMG Window Start"]
625
+ d_phys_data_raw["EMG Window End"] = d["EMG Window End"]
626
+
627
+ for chan in channels:
628
+ d_phys_data_raw[f"mep_raw_data_time_{chan}"] = d[f"mep_raw_data_time_{chan}"]
629
+ d_phys_data_raw[f"mep_raw_data_{chan}"] = d[f"mep_raw_data_{chan}"]
630
+
631
+ # create dictionary of postprocessed physiological data
632
+ d_phys_data_postproc = dict()
633
+
634
+ for chan in channels:
635
+ d_phys_data_postproc[f"mep_filt_data_time_{chan}"] = d[f"mep_filt_data_time_{chan}"]
636
+ d_phys_data_postproc[f"mep_filt_data_{chan}"] = d[f"mep_filt_data_{chan}"]
637
+ d_phys_data_postproc[f"p2p_brainsight_{chan}"] = d[f"p2p_brainsight_{chan}"]
638
+ d_phys_data_postproc[f"p2p_{chan}"] = d[f"p2p_{chan}"]
639
+ d_phys_data_postproc[f"mep_latency_{chan}"] = d[f"mep_latency_{chan}"]
640
+
641
+ # create pandas dataframes from dicts
642
+ df_stim_data = pd.DataFrame.from_dict(d_stim_data)
643
+ df_phys_data_raw = pd.DataFrame.from_dict(d_phys_data_raw)
644
+ df_phys_data_postproc = pd.DataFrame.from_dict(d_phys_data_postproc)
645
+
646
+ # save in .hdf5 file
647
+ if verbose:
648
+ print(f"Saving experimental data to file: {fn_exp_hdf5}")
649
+ df_stim_data.to_hdf(fn_exp_hdf5, "stim_data")
650
+ df_phys_data_raw.to_hdf(fn_exp_hdf5, "phys_data/raw/EMG")
651
+ df_phys_data_postproc.to_hdf(fn_exp_hdf5, "phys_data/postproc/EMG")
652
+
653
+
654
+ def read_samples_brainsight(fn_tms_nav):
655
+ """
656
+ Reads sample data from a Brainsight navigation system file.
657
+
658
+ Parameters
659
+ ----------
660
+ fn_tms_nav : str
661
+ The file path of the Brainsight navigation system file.
662
+
663
+ Returns
664
+ -------
665
+ d_bs : collections.OrderedDict
666
+ An ordered dictionary containing the parsed data from the Brainsight file.
667
+ The keys of the dictionary correspond to the column headers in the Brainsight file,
668
+ and the values are lists of the data in those columns.
669
+ m_nnav : numpy.ndarray
670
+ A 3D numpy array of shape (4, 4, n_stims) containing the 4x4 transformation matrices for each stimulation.
671
+ n_stims : int
672
+ The number of stimulations, inferred from the number of rows in the Brainsight file.
673
+ """
674
+ warnings.warn("This function is deprecated. Please use pynibs.brainsight.get_marker(markertype='Samples') instead.",
675
+ DeprecationWarning)
676
+ d_bs = OrderedDict()
677
+ with io.open(fn_tms_nav, 'r') as f:
678
+ lines = f.readlines()
679
+ start_idx = [i + 1 for i, l in enumerate(lines) if l.startswith("# Sample Name")][0]
680
+ stop_idx = [i for i, l in enumerate(lines) if l.startswith("# Session Name")]
681
+ if not stop_idx:
682
+ stop_idx = len(lines)
683
+ else:
684
+ stop_idx = np.min(stop_idx)
685
+ n_stims = stop_idx - start_idx
686
+ m_nnav = np.zeros((4, 4, n_stims))
687
+ keys = lines[start_idx - 1].split('\t')
688
+ keys[0] = keys[0].replace('# ', '')
689
+ keys[-1] = keys[-1].replace('\n', '')
690
+ # create brainsight dict
691
+ for key in keys:
692
+ d_bs[key] = []
693
+ # collect data; for each line...
694
+ for idx_coil_sample, idx_line in enumerate(range(start_idx, stop_idx)):
695
+ line = lines[idx_line].split(sep='\t')
696
+
697
+ # ...populate the keys of the brainsight-dict
698
+ for idx_key, key in enumerate(d_bs.keys()):
699
+ # standard (string) entries can be appended straightforwardly
700
+ if key in ["Sample Name", "Session Name", "Creation Cause", "Crosshairs Driver", "Date", "Time",
701
+ "EMG Channels", "Assoc. Target"]:
702
+ d_bs[key].append(line[idx_key])
703
+ # other keys include: "Loc. X/Y/Z", "EMG Data 1/2", "EMG Peak-to-Peak 1" etc...
704
+ else:
705
+ # each sample of EMG data is separated by a semicolon
706
+ if ";" in line[idx_key]:
707
+ d_bs[key].append(np.array(line[idx_key].split(';')).astype(float))
708
+ # other non-EMG-data (numeric) entries
709
+ else:
710
+ # if key was selected for export but no corresponding data could be collected, "(null)" is entered
711
+ if line[idx_key] != "(null)":
712
+ try:
713
+ d_bs[key].append(float(line[idx_key]))
714
+ except Exception:
715
+ pass
716
+ else:
717
+ d_bs[key].append(float(-1))
718
+
719
+ # assemble coil configuration matrix
720
+ if key in ["Loc. X"]:
721
+ m_nnav[0, 3, idx_coil_sample] = float(d_bs[key][-1])
722
+ elif key in ["Loc. Y"]:
723
+ m_nnav[1, 3, idx_coil_sample] = float(d_bs[key][-1])
724
+ elif key in ["Loc. Z"]:
725
+ m_nnav[2, 3, idx_coil_sample] = float(d_bs[key][-1])
726
+ elif key in ["m0n0"]:
727
+ m_nnav[0, 0, idx_coil_sample] = float(d_bs[key][-1])
728
+ elif key in ["m0n1"]:
729
+ m_nnav[1, 0, idx_coil_sample] = float(d_bs[key][-1])
730
+ elif key in ["m0n2"]:
731
+ m_nnav[2, 0, idx_coil_sample] = float(d_bs[key][-1])
732
+ elif key in ["m1n0"]:
733
+ m_nnav[0, 1, idx_coil_sample] = float(d_bs[key][-1])
734
+ elif key in ["m1n1"]:
735
+ m_nnav[1, 1, idx_coil_sample] = float(d_bs[key][-1])
736
+ elif key in ["m1n2"]:
737
+ m_nnav[2, 1, idx_coil_sample] = float(d_bs[key][-1])
738
+ elif key in ["m2n0"]:
739
+ m_nnav[0, 2, idx_coil_sample] = float(d_bs[key][-1])
740
+ elif key in ["m2n1"]:
741
+ m_nnav[1, 2, idx_coil_sample] = float(d_bs[key][-1])
742
+ elif key in ["m2n2"]:
743
+ m_nnav[2, 2, idx_coil_sample] = float(d_bs[key][-1])
744
+
745
+ m_nnav[3, 3, idx_coil_sample] = 1
746
+ return d_bs, m_nnav, n_stims
747
+
748
+
749
+ def transform_brainsight(data, col_names):
750
+ """
751
+ Transforms Brainsight coil position/orientation into SimNIBS matsimnibs
752
+
753
+ data: list of lists with positions
754
+ col_names: list of str
755
+
756
+ Returns:
757
+ --------
758
+ names : list of str
759
+ Samples/target names. len(names) = n_pos
760
+ matsimnibs: np.ndarray
761
+ Coil position/orientation in matsimnibs style with Brainsight coil axes definition
762
+ shape: (n_pos, 4, 4)
763
+
764
+ Written by Ole Numssen, numssen@cbs.mpg.de, 2022.
765
+ """
766
+ matsimnibs = np.zeros((len(data), 4, 4))
767
+ pos_names = []
768
+ for pos, i in zip(data, range(len(data))):
769
+ p_dict = {x: pos[k] for x, k in zip(col_names, range(len(col_names)))}
770
+
771
+ m = [[p_dict['m0n0'], p_dict['m1n0'], p_dict['m2n0'], p_dict['Loc. X']],
772
+ [p_dict['m0n1'], p_dict['m1n1'], p_dict['m2n1'], p_dict['Loc. Y']],
773
+ [p_dict['m0n2'], p_dict['m1n2'], p_dict['m2n2'], p_dict['Loc. Z']],
774
+ [0, 0, 0, 1]]
775
+
776
+ matsimnibs[i] = np.array(m).astype(float)
777
+ pos_names.append(pos[0])
778
+
779
+ # adjust coil axes definition to simnibs style
780
+ return pos_names, matsimnibs # @ self.simnibs2brainsight()
781
+
782
+
783
+ def simnibs2brainsight():
784
+ """
785
+ Flip matrix for brainsight -> simnibs and vice versa.
786
+
787
+ Example:
788
+ if brainsight_mat.shape = (n_pos, 4, 4):
789
+ simnibs_mat = brainsight_mat @ simnibs2localite()
790
+ localite_mat = simnibs_mat @ brainsight2localite()
791
+ """
792
+ return np.array([
793
+ [-1, +0, +0, +0], # +x -> -x
794
+ [+0, +1, +0, +0], # +y -> +y
795
+ [+0, +0, -1, +0], # +z -> -z
796
+ [0, 0, 0, 1]])
797
+
798
+
799
+ def get_marker(fn, markertype):
800
+ """
801
+ Read instrument-marker and conditions from Brainsight session.
802
+
803
+ BrainSight stores recorded coil placements as 'Samples' and predefined coil placements
804
+ as 'Targets'.
805
+
806
+ Coil axes definitions as defined by Brainsight.
807
+
808
+ Parameters
809
+ ----------
810
+ fn : str
811
+ Path to instrument-marker-file.
812
+ markertype : str
813
+ One of ['Samples', 'Targets', 'TriggerMarker', 'InstrumentMarker'].
814
+
815
+ Returns
816
+ -------
817
+ matsimnibs : np.ndarray of float
818
+ (M, 4, 4) Instrument marker matrices.
819
+ marker_descriptions : list of str
820
+ Labels of the marker conditions.
821
+ marker_times : list of str
822
+ YYYY-MM-DD HH:MM:SS.sss time stamps of the samples. [-1] for InstrumentMarkers.
823
+ """
824
+ assert markertype in ['Samples', 'Targets', 'TriggerMarker', 'InstrumentMarker']
825
+
826
+ # init empty values in case nothing is found in fn
827
+ coord_sys, encoding, version = '', '', -1
828
+ data_targets, data_samples = [], []
829
+
830
+ with open(fn, 'r') as f:
831
+ # read header
832
+ while True:
833
+ line = f.readline().rstrip()
834
+ if line.startswith('# Target Name') or line.startswith('# Sample Name'):
835
+ break
836
+ elif line.startswith('# Version:'):
837
+ version = int(line.replace("# Version: ", ""))
838
+
839
+ if version == -1:
840
+ raise ValueError(f"Cannot read version from {fn}")
841
+
842
+ # read data, probably 'Targets'
843
+ if line.startswith('# Target Name'):
844
+ # get column names
845
+ col_names_targets = line.replace('# ', '').split('\t')
846
+
847
+ # read all target lines
848
+ while line:
849
+ line = f.readline()
850
+ if line.startswith('#'):
851
+ break
852
+ if line:
853
+ data_targets.append(line.rstrip().split('\t'))
854
+
855
+ if line.startswith('# Sample Name'):
856
+ # get column names
857
+ col_names_samples = line.replace('# ', '').split('\t')
858
+
859
+ # read all sample lines
860
+ while line:
861
+ line = f.readline()
862
+ if line.startswith('#'):
863
+ break
864
+ data_samples.append(line.rstrip().split('\t'))
865
+
866
+ # get matsimnibs arrays in simnibs space and axes definition
867
+ if len(data_targets):
868
+ names_targets, matsimnibs_targets = transform_brainsight(data_targets, col_names_targets)
869
+ else:
870
+ names_targets, matsimnibs_targets = [], None
871
+
872
+ if len(data_samples):
873
+ names_samples, matsimnibs_samples = transform_brainsight(data_samples, col_names_samples)
874
+ else:
875
+ names_samples, matsimnibs_samples = [], None
876
+
877
+ if (matsimnibs_targets is None or matsimnibs_targets.size == 0) and \
878
+ (matsimnibs_samples is None or matsimnibs_samples.size == 0):
879
+ raise ValueError(f"Could not find any targets in {fn}.")
880
+
881
+ if markertype in ['Targets', 'InstrumentMarker']:
882
+ return matsimnibs_targets, names_targets, [-1] * len(names_targets)
883
+ else:
884
+ try:
885
+ # get timings for samples
886
+ date_col = np.where(np.array(col_names_samples) == "Date")[0][0]
887
+ time_col = np.where(np.array(col_names_samples) == "Time")[0][0]
888
+
889
+ times_samples = [f"{sample[date_col]} {sample[time_col]}" for sample in data_samples]
890
+ except Exception:
891
+ print(f"Cannot extract date and time from {fn}.")
892
+ times_samples = [-1] * len(names_samples)
893
+
894
+ return matsimnibs_samples, names_samples, times_samples
895
+
896
+
897
+ def read_triggermarker(fn):
898
+ """
899
+ Read instrument-marker and conditions from Brainsight session.
900
+
901
+ BrainSight stores recorded coil placements as 'Samples' and predefined coil placements
902
+ as 'Targets'.
903
+
904
+ Coil axes definitions as defined by Brainsight.
905
+ dI/dt is returned as list of [-1] to keep consistent with read_triggermarker functions for other neuronavigation
906
+ systems.
907
+
908
+ Parameters
909
+ ----------
910
+ fn : str or list of str
911
+ Path to instrument-marker-file.
912
+
913
+ Returns
914
+ -------
915
+ matsimnibs : np.ndarray of float
916
+ (4, 4, N) Instrument marker matrices
917
+ didt : np.ndarray of float
918
+ (N) Rate of change of coil current in (A/us).
919
+ stim_int : np.ndarray of float
920
+ (N) Stimulator intensity in (% MSO).
921
+ marker_descriptions : list of str
922
+ Labels of the marker conditions.
923
+ marker_times : list of str
924
+ YYYY-MM-DD HH:MM:SS.sss time stamps of the samples. [-1] for InstrumentMarkers.
925
+ """
926
+ # init empty values in case nothing is found in fn
927
+ coord_sys, encoding, version = '', '', -1
928
+ data_targets, data_samples = [], []
929
+
930
+ with open(fn, 'r') as f:
931
+ # read header
932
+ while True:
933
+ line = f.readline().rstrip()
934
+ if line.startswith('# Target Name') or line.startswith('# Sample Name'):
935
+ break
936
+ elif line.startswith('# Version:'):
937
+ version = int(line.replace("# Version: ", ""))
938
+
939
+ if version == -1:
940
+ raise ValueError(f"Cannot read version from {fn}")
941
+
942
+ # read data, probably 'Targets'
943
+ if line.startswith('# Target Name'):
944
+ # read all target lines
945
+ while line:
946
+ line = f.readline()
947
+ if line.startswith('#'):
948
+ break
949
+
950
+ if line.startswith('# Sample Name'):
951
+ # get column names
952
+ col_names_samples = line.replace('# ', '').split('\t')
953
+
954
+ # read all sample lines
955
+ while line:
956
+ line = f.readline()
957
+ if line.startswith('#'):
958
+ break
959
+ data_samples.append(line.rstrip().split('\t'))
960
+
961
+ if len(data_samples):
962
+ names_samples, matsimnibs_samples = transform_brainsight(data_samples, col_names_samples)
963
+ else:
964
+ names_samples, matsimnibs_samples = [], None
965
+
966
+ if (matsimnibs_samples is None or matsimnibs_samples.size == 0):
967
+ raise ValueError(f"Could not find any targets in {fn}.")
968
+
969
+ # get timings for samples
970
+ date_col = np.where(np.array(col_names_samples) == "Date")[0][0]
971
+ time_col = np.where(np.array(col_names_samples) == "Time")[0][0]
972
+ times_samples = [f"{sample[date_col]} {sample[time_col]}" for sample in data_samples]
973
+
974
+ intensity_col = np.where(np.array(col_names_samples) == "Stim. Power")[0][0]
975
+ intensity_samples = [sample[intensity_col] for sample in data_samples]
976
+
977
+ matsimnibs_samples = np.moveaxis(matsimnibs_samples, 0, -1)
978
+
979
+ return matsimnibs_samples, [-1] * len(names_samples), intensity_samples, names_samples, times_samples