pyNIBS 0.2024.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. pyNIBS-0.2024.8.dist-info/LICENSE +623 -0
  2. pyNIBS-0.2024.8.dist-info/METADATA +723 -0
  3. pyNIBS-0.2024.8.dist-info/RECORD +107 -0
  4. pyNIBS-0.2024.8.dist-info/WHEEL +5 -0
  5. pyNIBS-0.2024.8.dist-info/top_level.txt +1 -0
  6. pynibs/__init__.py +34 -0
  7. pynibs/coil.py +1367 -0
  8. pynibs/congruence/__init__.py +15 -0
  9. pynibs/congruence/congruence.py +1108 -0
  10. pynibs/congruence/ext_metrics.py +257 -0
  11. pynibs/congruence/stimulation_threshold.py +318 -0
  12. pynibs/data/configuration_exp0.yaml +59 -0
  13. pynibs/data/configuration_linear_MEP.yaml +61 -0
  14. pynibs/data/configuration_linear_RT.yaml +61 -0
  15. pynibs/data/configuration_sigmoid4.yaml +68 -0
  16. pynibs/data/network mapping configuration/configuration guide.md +238 -0
  17. pynibs/data/network mapping configuration/configuration_TEMPLATE.yaml +42 -0
  18. pynibs/data/network mapping configuration/configuration_for_testing.yaml +43 -0
  19. pynibs/data/network mapping configuration/configuration_modelTMS.yaml +43 -0
  20. pynibs/data/network mapping configuration/configuration_reg_isi_05.yaml +43 -0
  21. pynibs/data/network mapping configuration/output_documentation.md +185 -0
  22. pynibs/data/network mapping configuration/recommendations_for_accuracy_threshold.md +77 -0
  23. pynibs/data/neuron/models/L23_PC_cADpyr_biphasic_v1.csv +1281 -0
  24. pynibs/data/neuron/models/L23_PC_cADpyr_monophasic_v1.csv +1281 -0
  25. pynibs/data/neuron/models/L4_LBC_biphasic_v1.csv +1281 -0
  26. pynibs/data/neuron/models/L4_LBC_monophasic_v1.csv +1281 -0
  27. pynibs/data/neuron/models/L4_NBC_biphasic_v1.csv +1281 -0
  28. pynibs/data/neuron/models/L4_NBC_monophasic_v1.csv +1281 -0
  29. pynibs/data/neuron/models/L4_SBC_biphasic_v1.csv +1281 -0
  30. pynibs/data/neuron/models/L4_SBC_monophasic_v1.csv +1281 -0
  31. pynibs/data/neuron/models/L5_TTPC2_cADpyr_biphasic_v1.csv +1281 -0
  32. pynibs/data/neuron/models/L5_TTPC2_cADpyr_monophasic_v1.csv +1281 -0
  33. pynibs/expio/Mep.py +1518 -0
  34. pynibs/expio/__init__.py +8 -0
  35. pynibs/expio/brainsight.py +979 -0
  36. pynibs/expio/brainvis.py +71 -0
  37. pynibs/expio/cobot.py +239 -0
  38. pynibs/expio/exp.py +1876 -0
  39. pynibs/expio/fit_funs.py +287 -0
  40. pynibs/expio/localite.py +1987 -0
  41. pynibs/expio/signal_ced.py +51 -0
  42. pynibs/expio/visor.py +624 -0
  43. pynibs/freesurfer.py +502 -0
  44. pynibs/hdf5_io/__init__.py +10 -0
  45. pynibs/hdf5_io/hdf5_io.py +1857 -0
  46. pynibs/hdf5_io/xdmf.py +1542 -0
  47. pynibs/mesh/__init__.py +3 -0
  48. pynibs/mesh/mesh_struct.py +1394 -0
  49. pynibs/mesh/transformations.py +866 -0
  50. pynibs/mesh/utils.py +1103 -0
  51. pynibs/models/_TMS.py +211 -0
  52. pynibs/models/__init__.py +0 -0
  53. pynibs/muap.py +392 -0
  54. pynibs/neuron/__init__.py +2 -0
  55. pynibs/neuron/neuron_regression.py +284 -0
  56. pynibs/neuron/util.py +58 -0
  57. pynibs/optimization/__init__.py +5 -0
  58. pynibs/optimization/multichannel.py +278 -0
  59. pynibs/optimization/opt_mep.py +152 -0
  60. pynibs/optimization/optimization.py +1445 -0
  61. pynibs/optimization/workhorses.py +698 -0
  62. pynibs/pckg/__init__.py +0 -0
  63. pynibs/pckg/biosig/biosig4c++-1.9.5.src_fixed.tar.gz +0 -0
  64. pynibs/pckg/libeep/__init__.py +0 -0
  65. pynibs/pckg/libeep/pyeep.so +0 -0
  66. pynibs/regression/__init__.py +11 -0
  67. pynibs/regression/dual_node_detection.py +2375 -0
  68. pynibs/regression/regression.py +2984 -0
  69. pynibs/regression/score_types.py +0 -0
  70. pynibs/roi/__init__.py +2 -0
  71. pynibs/roi/roi.py +895 -0
  72. pynibs/roi/roi_structs.py +1233 -0
  73. pynibs/subject.py +1009 -0
  74. pynibs/tensor_scaling.py +144 -0
  75. pynibs/tests/data/InstrumentMarker20200225163611937.xml +19 -0
  76. pynibs/tests/data/TriggerMarkers_Coil0_20200225163443682.xml +14 -0
  77. pynibs/tests/data/TriggerMarkers_Coil1_20200225170337572.xml +6373 -0
  78. pynibs/tests/data/Xdmf.dtd +89 -0
  79. pynibs/tests/data/brainsight_niiImage_nifticoord.txt +145 -0
  80. pynibs/tests/data/brainsight_niiImage_nifticoord_largefile.txt +1434 -0
  81. pynibs/tests/data/brainsight_niiImage_niifticoord_mixedtargets.txt +47 -0
  82. pynibs/tests/data/create_subject_testsub.py +332 -0
  83. pynibs/tests/data/data.hdf5 +0 -0
  84. pynibs/tests/data/geo.hdf5 +0 -0
  85. pynibs/tests/test_coil.py +474 -0
  86. pynibs/tests/test_elements2nodes.py +100 -0
  87. pynibs/tests/test_hdf5_io/test_xdmf.py +61 -0
  88. pynibs/tests/test_mesh_transformations.py +123 -0
  89. pynibs/tests/test_mesh_utils.py +143 -0
  90. pynibs/tests/test_nnav_imports.py +101 -0
  91. pynibs/tests/test_quality_measures.py +117 -0
  92. pynibs/tests/test_regressdata.py +289 -0
  93. pynibs/tests/test_roi.py +17 -0
  94. pynibs/tests/test_rotations.py +86 -0
  95. pynibs/tests/test_subject.py +71 -0
  96. pynibs/tests/test_util.py +24 -0
  97. pynibs/tms_pulse.py +34 -0
  98. pynibs/util/__init__.py +4 -0
  99. pynibs/util/dosing.py +233 -0
  100. pynibs/util/quality_measures.py +562 -0
  101. pynibs/util/rotations.py +340 -0
  102. pynibs/util/simnibs.py +763 -0
  103. pynibs/util/util.py +727 -0
  104. pynibs/visualization/__init__.py +2 -0
  105. pynibs/visualization/para.py +4372 -0
  106. pynibs/visualization/plot_2D.py +137 -0
  107. pynibs/visualization/render_3D.py +347 -0
@@ -0,0 +1,51 @@
1
+ """ """
2
+ import os
3
+ import h5py
4
+ import numpy as np
5
+ import pynibs
6
+
7
+
8
+ def cfs2hdf5(fn_cfs, fn_hdf5=None):
9
+ """
10
+ Converts EMG data included in .cfs file to .hdf5 format.
11
+
12
+ Parameters
13
+ ----------
14
+ fn_cfs : str
15
+ Filename of .cfs file.
16
+ fn_hdf5 : str, optional
17
+ Filename of .hdf5 file (if not provided, a file with same name as fn_cfs will be created with .hdf5 extension).
18
+
19
+ Returns
20
+ -------
21
+ <file> : .hdf5 File
22
+ File containing:
23
+
24
+ * EMG data in f["emg"][:]
25
+ * Time axis in f["time"][:]
26
+ """
27
+ try:
28
+ import biosig
29
+ except ImportError:
30
+ ImportError("Please install biosig from pynibs/pkg/biosig folder!")
31
+
32
+ if fn_hdf5 is None:
33
+ fn_hdf5 = os.path.splitext(fn_cfs)[0] + ".hdf5"
34
+
35
+ # load header and data
36
+ cfs_header = biosig.header(fn_cfs)
37
+ emg = biosig.data(fn_cfs)[:, 0]
38
+
39
+ sweep_index = cfs_header.find('NumberOfSweeps')
40
+ comma_index = cfs_header.find(',', sweep_index)
41
+ sweeps = int(cfs_header[sweep_index + 18:comma_index])
42
+ records = emg.shape[0]
43
+ samples = int(records / sweeps)
44
+ sampling_rate = pynibs.get_mep_sampling_rate(fn_cfs)
45
+ emg = np.reshape(emg, (sweeps, samples))
46
+ time = np.linspace(0, samples, samples) / sampling_rate
47
+
48
+ with h5py.File(fn_hdf5, "w") as f:
49
+ f.create_dataset("emg", data=emg)
50
+ f.create_dataset("time", data=time)
51
+ f.create_dataset("sampling_rate", data=np.array([sampling_rate]))
pynibs/expio/visor.py ADDED
@@ -0,0 +1,624 @@
1
+ """ Functions to import data from ANT Visor 2 / ANT EEG software go here """
2
+ import os
3
+ import h5py
4
+ import warnings
5
+ import numpy as np
6
+ import pandas as pd
7
+ from scipy import signal
8
+ from scipy.spatial.transform import Rotation
9
+
10
+ import pynibs
11
+
12
+ try:
13
+ from pynibs.pckg import libeep
14
+ except (ImportError, SyntaxError):
15
+ pass
16
+
17
+
18
+ def read_nlr(fname):
19
+ """
20
+ Reads NLR coordinates from *_recording.mri file.
21
+
22
+ Parameters
23
+ ----------
24
+ fname : str
25
+ FIle path of the NLR recording MRI file.
26
+
27
+ Returns
28
+ -------
29
+ fiducials : np.ndarray of float
30
+ (3, 3) The rows contain the fiducial points in ANT NIfTI space (nasion, left ear, right ear).
31
+ Each fiducial point is represented as [x, y, z] coordinates.
32
+ """
33
+ f = open(fname, "r")
34
+ text = f.readlines()
35
+
36
+ fiducials = np.empty((3, 3))
37
+
38
+ for i, line in enumerate(text):
39
+ # nasion
40
+ if "VoxelOnPositiveXAxis" in line:
41
+ line = text[i + 1].replace("\t", " ")
42
+ line = line.replace("\n", "")
43
+ fiducials[0, :] = np.array([int(t) for t in line.split(" ")])
44
+
45
+ # left ear
46
+ if "VoxelOnNegativeYAxis" in line:
47
+ line = text[i + 1].replace("\t", " ")
48
+ line = line.replace("\n", "")
49
+ fiducials[1, :] = np.array([int(t) for t in line.split(" ")])
50
+
51
+ # right ear
52
+ if "VoxelOnPositiveYAxis" in line:
53
+ line = text[i + 1].replace("\t", " ")
54
+ line = line.replace("\n", "")
55
+ fiducials[2, :] = np.array([int(t) for t in line.split(" ")])
56
+
57
+ return fiducials
58
+
59
+
60
+ def get_instrument_marker(im_path, verbose=False):
61
+ """
62
+ Return all instrument markers from visor .cnt file.
63
+
64
+ Coordinate system in raw ANT space (NLR) is defined as:
65
+ - origin: intersection between line of ear fiducials and nasion
66
+ - x-axis: origin -> nasion
67
+ - y-axis: origin -> left ear
68
+ - z-axis: origin -> superior
69
+
70
+ Parameters
71
+ ----------
72
+ im_path : str
73
+ Path to instrument-marker-file .cnt file.
74
+ verbose: bool, default: False
75
+ Flag indicating verbosity.
76
+
77
+ Returns
78
+ -------
79
+ im_list : list of dict
80
+ List containing stimulation parameters.
81
+
82
+ * coil_mean_raw: 4 x 4 numpy array
83
+ * StimulusID: int
84
+ * etc...
85
+
86
+ Raises
87
+ ------
88
+ AssertionError
89
+ If the .cnt file contains no instrument markers.
90
+ """
91
+ f = libeep.read_cnt(im_path)
92
+ n_trig = f.get_trigger_count()
93
+ # some triggers (2?) are some other information
94
+ # so only take the ones with 'StimulusID' at 3rd position
95
+ ims = [f.get_trigger(i)[3] for i in range(n_trig) if "StimulusID" in f.get_trigger(i)[3]]
96
+ # or: if f.get_trigger(i)[0] == '6'
97
+
98
+ if verbose:
99
+ print(f"Found {len(ims)} instrument markers.")
100
+ assert len(ims), "No instrument markers found in file"
101
+
102
+ # now build list of matsimnibs from the instrument markers
103
+ data = []
104
+
105
+ for i, im in enumerate(ims):
106
+
107
+ # transform string from .cnt file to dictionary
108
+ data.append(dict(item.split('=') for item in im.split()[1:] if '=' in item))
109
+
110
+ # floatify numeric variables
111
+ for key in data[-1].keys():
112
+ try:
113
+ if key == "StimulusID":
114
+ data[-1][key] = int(data[-1][key])
115
+ else:
116
+ data[-1][key] = float(data[-1][key])
117
+ except ValueError:
118
+ pass
119
+
120
+ # transform to SimNIBS raw format
121
+ matsimnibs_raw = np.zeros((4, 4))
122
+ matsimnibs_raw[3, 3] = 1
123
+ matsimnibs_raw[0:3, 3] = np.array([data[-1]['PosX'], data[-1]['PosY'], data[-1]['PosZ']]) * 1000
124
+ quat = np.array([data[-1]['QuatX'], data[-1]['QuatY'], data[-1]['QuatZ'], data[-1]['QuatW']])
125
+ matsimnibs_raw[0:3, 0:3] = Rotation.from_quat(quat).as_dcm()
126
+ data[-1]["coil_mean_raw"] = matsimnibs_raw
127
+
128
+ return data
129
+
130
+
131
+ def get_cnt_data(fn, channels='all', trigger_val='1', max_duration=10,
132
+ fn_hdf5=None, path_hdf5=None, verbose=False, return_data=False):
133
+ """
134
+ Reads ANT .cnt EMG/EEG data file and chunks timeseries into triggerN - trigggerN+1.
135
+
136
+ It can directly write the zaps into hdf5 if argument is provided, starting with the first trigger and ending
137
+ with get_sample_count()-1.
138
+
139
+ Parameters
140
+ ----------
141
+ fn: str
142
+ Path to the .cnt file.
143
+ channels: str, int, list of int, or list of str, default: 'all'
144
+ Which channel(s) to return. Can be channel number(s) or channel name(s).
145
+ trigger_val: str, default: '1'
146
+ Trigger value to read as zap trigger.
147
+ max_duration : int, default: 10
148
+ Maximum duration in [s] per chunk. Rest is dropped.
149
+ fn_hdf5: str, optional
150
+ If provided, the cnt data is written into an hdf5 file under "path_hdf5" as pandas dataframe
151
+ with column name "qoi_name" and nothing is returned.
152
+ path_hdf5: str, default: None
153
+ If fn_hdf5, path within the HDF5 file where the data is saved (e.g. "/phys_data/raw/EEG")
154
+ verbose: bool, default: False
155
+ Flag indicating verbosity.
156
+ return_data: bool, default: False
157
+ If true, the data is returned as list of numpy arrays.
158
+
159
+ Returns
160
+ -------
161
+ data_lst: list of np.ndarray, optional
162
+ (samples,channels), List of EEG/EMG data. Only returned if "fn_hdf5" is not None.
163
+ """
164
+
165
+ f = libeep.read_cnt(fn)
166
+ n_trig = f.get_trigger_count()
167
+ n_samples = f.get_sample_count()
168
+ n_channels = f.get_channel_count()
169
+ sf = f.get_sample_frequency()
170
+ chan_names = [f.get_channel(i)[0].lower() for i in range(n_channels)]
171
+
172
+ if channels == 'all' or isinstance(channels, list) and channels[0] == 'all':
173
+ channels_idx = range(n_channels)
174
+ elif isinstance(channels, int):
175
+ channels_idx = [channels]
176
+ elif isinstance(channels, str):
177
+ channels_idx = chan_names.index(channels.lower())
178
+ elif type(channels) == list and all(type(chan) == int for chan in channels):
179
+ channels_idx = channels
180
+ assert np.all(np.array(channels_idx) >= 0), "Only positive channels numbers allowd"
181
+ assert np.max(np.array(channels_idx)) < n_channels, f"Only {n_channels} channels found."
182
+
183
+ elif type(channels) == list and all(type(chan) == str for chan in channels):
184
+ channels_idx = [chan_names.index(chan.lower()) for chan in channels]
185
+
186
+ else:
187
+ raise NotImplementedError("Channels must be 'all', list(int), list(str)")
188
+
189
+ assert channels_idx, "No channels with name / idx found."
190
+
191
+ if fn_hdf5 is not None:
192
+ assert path_hdf5, "Please provide path_hdf5="
193
+
194
+ if verbose:
195
+ print(f"get_cnt_data: {n_trig} triggers found.")
196
+ print(f"get_cnt_data: {n_samples} samples found.")
197
+ print(f"get_cnt_data: {sf} Hz sampling frequency.")
198
+ print(f"get_cnt_data: {n_channels} channels found.")
199
+
200
+ # get data between samples
201
+ data_lst = []
202
+
203
+ # chunk into triggers
204
+ trigger_idx = 0
205
+ # arr_idx = 0
206
+ last_zap_done = False
207
+ trigger_zap = 0
208
+ # we want the data between trigger and trigger+1
209
+ while trigger_idx < n_trig - 1:
210
+
211
+ try:
212
+ start = f.get_trigger(trigger_idx)
213
+
214
+ # only use the triggers that have the correct trifger value
215
+ if start[0] != trigger_val:
216
+ if verbose:
217
+ print(f"get_cnt_data: Skipping idx {trigger_idx}: {start} (start)")
218
+ trigger_idx += 1
219
+ continue
220
+ end = f.get_trigger(trigger_idx + 1)
221
+ # also trigger+1 needs to have the correct trigger_val
222
+ while end[0] != trigger_val:
223
+ if verbose:
224
+ print(f"Skipping idx {trigger_idx}: {start} (end)")
225
+ trigger_idx += 1
226
+ if trigger_idx >= n_trig - 1:
227
+ break
228
+ end = f.get_trigger(trigger_idx)
229
+
230
+ # some sanity checks
231
+ if not start[1] < end[1]:
232
+ if verbose:
233
+ print(f"Trigger {trigger_idx} and {trigger_idx + 1}: wrong sample number "
234
+ f"({trigger_idx}: {start[1]}, {trigger_idx + 1}: {end[1]}]")
235
+ # the eeg cnt files and with a trigger. get data from trigger to end-offile
236
+ if trigger_idx == n_trig - 1:
237
+ end = (end[0], f.get_sample_count())
238
+ last_zap_done = True
239
+
240
+ assert start[1] < (end[1] - 1), \
241
+ f"Trigger {trigger_idx} and {trigger_idx + 1}: too close together " \
242
+ f"({trigger_idx}: {start[1]}, {trigger_idx + 1}: {end[1]}]"
243
+
244
+ # get sample number from trigger-tuple
245
+ start = start[1]
246
+ end = end[1] - 1
247
+ length_org = end - start
248
+
249
+ # cut to max duration chunk length
250
+ end = np.min((end, start + sf * max_duration))
251
+
252
+ if verbose:
253
+ print(f"get_cnt_data: Trigger {trigger_idx:0>3}: {float(length_org) / sf:2.2}s / "
254
+ f"{float(end - start) / sf:0.2}s")
255
+ data = f.get_samples(start, end)
256
+ data_res = np.reshape(data, (end - start, n_channels), order='F')
257
+
258
+ if return_data:
259
+ data_lst.append(data_res)
260
+
261
+ except (SystemError, UnicodeDecodeError) as e:
262
+ print(f"Trigger {trigger_idx} error")
263
+ print(e)
264
+ continue
265
+
266
+ if fn_hdf5 is not None:
267
+ with h5py.File(fn_hdf5, "a") as fi:
268
+ fi[path_hdf5 + f"/{trigger_zap:04d}"] = data_res
269
+ trigger_zap += 1
270
+
271
+ trigger_idx += 1
272
+
273
+ # grap data for the last zap (trigger to end_of_file
274
+ if not last_zap_done:
275
+
276
+ try:
277
+ start = f.get_trigger(trigger_idx)
278
+
279
+ # only use the triggers that have the correct trigger value
280
+ if start[0] != trigger_val:
281
+ if verbose:
282
+ print(f"get_cnt_data: Skipping idx {trigger_idx}: {start} (start)")
283
+ trigger_idx += 1
284
+ end = f.get_sample_count()
285
+
286
+ assert start[1] < (end - 1), \
287
+ f"Trigger {trigger_idx} and {trigger_idx + 1}: too close together " \
288
+ f"({trigger_idx}: {start[1]}, {trigger_idx + 1}: {end}]"
289
+
290
+ # get sample number from trigger-tuple
291
+ start = start[1]
292
+ length_org = end - start
293
+
294
+ # cut to max duration chunk length
295
+ end = np.min((end, start + sf * max_duration))
296
+
297
+ if verbose:
298
+ print(f"get_cnt_data: Trigger {trigger_idx:0>3}: {float(length_org) / sf:2.2}s / "
299
+ f"{float(end - start) / sf:0.2}s")
300
+ data = f.get_samples(start, end)
301
+ data_res = np.reshape(data, (end - start, n_channels), order='F')
302
+
303
+ if return_data:
304
+ data_lst.append(data_res)
305
+
306
+ if fn_hdf5 is not None:
307
+ with h5py.File(fn_hdf5, "a") as fi:
308
+ fi[path_hdf5 + f"/{trigger_zap:04d}"] = data_res
309
+ trigger_zap += 1
310
+
311
+ trigger_idx += 1
312
+
313
+ except (SystemError, UnicodeDecodeError) as e:
314
+ print(f"Trigger {trigger_idx} error")
315
+ print(e)
316
+
317
+ # reshape according to channel count
318
+ # [chan1, chan2, chan3, chan1, chan2, chan3]
319
+ # data_res = np.reshape(data, (end - start, n_channels), order='F')
320
+ #
321
+ # if return_data:
322
+ # data_lst.append(data_res[:, channels_idx])
323
+ #
324
+ # if fn_hdf5 is not None:
325
+ # with h5py.File(fn_hdf5, "a") as fi:
326
+ # fi[path_hdf5 + f"/{trigger_idx:04d}"] = data_res[:, channels_idx]
327
+
328
+ # append last chunk
329
+ # start = f.get_trigger(n_trig - 2)[1]
330
+ # end = n_samples - 1
331
+ # end = np.min((end, start + sf * max_duration)) # cut to max chunk length
332
+ #
333
+ # data = f.get_samples(start, end)
334
+ # data_res = np.reshape(data, (end - start, n_channels), order='F')
335
+ # if fn_hdf5:
336
+ # write_arr_to_hdf5(fn_hdf5=fn_hdf5,
337
+ # arr_name=arr_name.format(arr_idx),
338
+ # data=data_res[:, channels_idx],
339
+ # verbose=verbose)
340
+ # else:
341
+
342
+ if return_data:
343
+ return data_lst
344
+
345
+
346
+ def filter_emg(emg, fs):
347
+ """
348
+ Filter EMG signals.
349
+
350
+ Parameters
351
+ ----------
352
+ emg : list of np.ndarray
353
+ (n_stimuli), Raw EMG data. Each list entry contains a np.ndarray of size [n_samples x n_channel].
354
+ Each channel is filtered in the same way.
355
+ fs : float
356
+ Sampling frequency.
357
+
358
+ Returns
359
+ -------
360
+ emg_filt : list of np.ndarray
361
+ (n_stimuli), Filtered EMG data.
362
+ """
363
+
364
+ # 5 Hz Butterworth high pass
365
+ ############################
366
+ b_butterhigh, a_butterhigh = signal.butter(N=5, Wn=5, btype='high', analog=False, fs=fs)
367
+ # plot_frequency_response(a_butterhigh, b_butterhigh, fs=fs)
368
+
369
+ # 200 Hz Butterworth low pass
370
+ ############################
371
+ b_butterlow, a_butterlow = signal.butter(N=5, Wn=200, btype='low', analog=False, fs=fs)
372
+ # plot_frequency_response(a_butterlow, b_butterlow, fs=fs)
373
+
374
+ # 50 Hz Notch filter
375
+ ############################
376
+ b_notch50, a_notch50 = signal.iirnotch(w0=50 / (fs / 2), Q=30)
377
+ # plot_frequency_response(a_notch50, b_notch50, fs=fs)
378
+
379
+ # 100 Hz Notch filter
380
+ ############################
381
+ b_notch100, a_notch100 = signal.iirnotch(w0=100 / (fs / 2), Q=50)
382
+ # plot_frequency_response(a_notch100, b_notch100, fs=fs)
383
+
384
+ # 150 Hz Notch filter
385
+ ############################
386
+ b_notch150, a_notch150 = signal.iirnotch(w0=150 / (fs / 2), Q=30)
387
+ # plot_frequency_response(a_notch150, b_notch150, fs=fs)
388
+
389
+ # 200 Hz Notch filter
390
+ ############################
391
+ b_notch200, a_notch200 = signal.iirnotch(w0=200 / (fs / 2), Q=30)
392
+ # plot_frequency_response(a_notch200, b_notch200, fs=fs)
393
+
394
+ # Filter signals
395
+ emg_filt = []
396
+
397
+ for e in emg:
398
+ emg_filt.append(np.zeros(e.shape))
399
+ for i_channel in range(e.shape[1]):
400
+ emg_filt[-1][:, i_channel] = signal.filtfilt(b_notch50, a_notch50, e[:, i_channel])
401
+ emg_filt[-1][:, i_channel] = signal.filtfilt(b_notch100, a_notch100, emg_filt[-1][:, i_channel])
402
+ emg_filt[-1][:, i_channel] = signal.filtfilt(b_notch150, a_notch150, emg_filt[-1][:, i_channel])
403
+ emg_filt[-1][:, i_channel] = signal.filtfilt(b_notch200, a_notch200, emg_filt[-1][:, i_channel])
404
+ emg_filt[-1][:, i_channel] = signal.filtfilt(b_butterlow, a_butterlow, emg_filt[-1][:, i_channel])
405
+ emg_filt[-1][:, i_channel] = signal.filtfilt(b_butterhigh, a_butterhigh, emg_filt[-1][:, i_channel])
406
+ emg_filt[-1][:, i_channel] = signal.filtfilt(b_notch50, a_notch50, emg_filt[-1][:, i_channel])
407
+
408
+ return emg_filt
409
+
410
+
411
+ def merge_exp_data_visor(subject, exp_id=0, mesh_idx=0, verbose=False, start_mep=18, end_mep=35):
412
+ """
413
+ Merges all experimental data from visor experiment into one .hdf5 file.
414
+
415
+ Parameters
416
+ ----------
417
+ subject : pynibs.Subject
418
+ Subject object.
419
+ exp_id : int, default: 0
420
+ Experiment index.
421
+ mesh_idx : int, default: 0
422
+ Mesh index.
423
+ verbose : bool, default: False
424
+ Flag indicating verbosity.
425
+ start_mep : float, default: 18
426
+ Start of time frame after TMS pulse where p2p value is evaluated (in ms).
427
+ end_mep : float, default: 35
428
+ End of time frame after TMS pulse where p2p value is evaluated (in ms).
429
+
430
+ Returns
431
+ -------
432
+ <File> : .hdf5 file
433
+ File containing the stimulation and physiological data as pandas dataframes:
434
+
435
+ * "stim_data": Stimulation parameters (e.g. coil positions, etc.)
436
+ * "phys_data/info/EMG": Information about EMG data recordings (e.g. sampling frequency, etc.)
437
+ * "phys_data/info/EEG": Information about EEG data recordings (e.g. sampling frequency, etc.)
438
+ * "phys_data/raw/EMG": Raw EMG data
439
+ * "phys_data/raw/EEG": Raw EEG data
440
+ * "phys_data/postproc/EMG": Post-processed EMG data (e.g. filtered, p2p, etc.)
441
+ * "phys_data/postproc/EEG": Post-processed EEG data (e.g. filtered, p2p, etc.)
442
+ """
443
+ # mep_paths_lst = subject.exp[exp_id]['fn_data']
444
+
445
+ # im_lst = subject.exp[exp_id]['cond']
446
+ # nii_exp_path_lst = subject.exp[exp_id]['fn_mri_nii']
447
+ # nii_conform_path = subject.mesh[mesh_idx]['fn_mri_conform']
448
+ fn_exp_hdf5 = subject.exp[exp_id]['fn_exp_hdf5']
449
+ fn_current = subject.exp[exp_id]['fn_current'][0]
450
+ # fn_coil = subject.exp[exp_id]['fn_coil']
451
+ # fn_mesh_hdf5 = subject.mesh[mesh_idx]['fn_mesh_hdf5']
452
+ exp_id = exp_id
453
+
454
+ if os.path.exists(fn_exp_hdf5):
455
+ os.remove(fn_exp_hdf5)
456
+
457
+ # read stimulation parameters
458
+ # ===================================================================================
459
+ if 'fn_visor_cnt' in subject.exp[exp_id]:
460
+ print(f"Reading stimulation parameters from {subject.exp[exp_id]['fn_visor_cnt']}")
461
+
462
+ assert 'fn_fiducials' in subject.exp[exp_id]
463
+ assert 'fn_current' in subject.exp[exp_id]
464
+ assert len(subject.exp[exp_id]['fn_visor_cnt']) == 1, "Multiple coils not implemented for visor"
465
+ fn_visor_cnt = subject.exp[exp_id]['fn_visor_cnt'][0]
466
+
467
+ fn_fiducials = subject.exp[exp_id]['fn_fiducials'][0]
468
+
469
+ ims_list = pynibs.visor.get_instrument_marker(fn_visor_cnt)
470
+ ims_dict = pynibs.list2dict(ims_list)
471
+ n_stim = len(ims_list)
472
+
473
+ # read fiducials and transform to simnibs space
474
+ fiducials = pynibs.visor.read_nlr(fn_fiducials)
475
+
476
+ # fiducial correction
477
+ if 'fiducial_corr' in subject.exp[exp_id]:
478
+ fiducal_corr = np.array(subject.exp[exp_id]['fiducial_corr'])
479
+ if any(np.abs(fiducal_corr[fiducal_corr != 0]) < .1):
480
+ warnings.warn("fiducial_corr are expected to be given in mm.")
481
+ fiducials += fiducal_corr
482
+
483
+ fn_exp_nii = subject.exp[exp_id]['fn_mri_nii'][0][0]
484
+
485
+ matsimnibs_raw = np.dstack(ims_dict["coil_mean_raw"])
486
+
487
+ matsimnibs = pynibs.nnav2simnibs(fn_exp_nii=fn_exp_nii,
488
+ fn_conform_nii=subject.mesh[mesh_idx]['fn_mri_conform'],
489
+ m_nnav=matsimnibs_raw,
490
+ nnav_system="visor",
491
+ fiducials=fiducials,
492
+ verbose=verbose)
493
+
494
+ # read coil current
495
+ current = np.loadtxt(fn_current)
496
+
497
+ if subject.exp[exp_id]["cond"][0][0] != "":
498
+ raise NotImplementedError("Individual conditions and average coil position over it not implemented yet")
499
+
500
+ # create stim_data dataframe
501
+ stim_data = {"coil_mean": [matsimnibs[:, :, i] for i in range(n_stim)],
502
+ "coil_type": [np.array(os.path.split(subject.exp[exp_id]["fn_coil"][0][0])[1]).astype(
503
+ "|S")] * n_stim,
504
+ "current": current,
505
+ "condition": [f"{(i - 1):04d}" for i in ims_dict["StimulusID"]]}
506
+
507
+ df_stim_data = pd.DataFrame.from_dict(stim_data)
508
+ df_stim_data.to_hdf(fn_exp_hdf5, "stim_data")
509
+
510
+ print(f"Writing stim_data dataframe to {fn_exp_hdf5}")
511
+
512
+ else:
513
+ warnings.warn("No visor positions found.")
514
+
515
+ # read emg
516
+ # ===================================================================================
517
+ if 'fn_emg_cnt' in subject.exp[exp_id]:
518
+
519
+ print(f"Reading EMG data from {subject.exp[exp_id]['fn_emg_cnt'][0]}")
520
+
521
+ # which emg_channel to use
522
+ emg_channels = subject.exp[exp_id]['emg_channels']
523
+
524
+ if isinstance(emg_channels, list) and len(emg_channels) > 1:
525
+ warnings.warn("Multiple EMG channels are untested.")
526
+
527
+ emg_trigger_value = subject.exp[exp_id]['emg_trigger_value'][0]
528
+
529
+ max_duration = 10 # maximum EMG time series duration per after zap
530
+ try:
531
+ max_duration = subject.exp[exp_id]['emg_max_duration'][0]
532
+ except KeyError:
533
+ pass
534
+ fn_emg_cnt = subject.exp[exp_id]['fn_emg_cnt'][0]
535
+
536
+ # read info
537
+ cnt_info = pynibs.get_cnt_infos(fn_emg_cnt)
538
+
539
+ phys_data_info_emg = dict()
540
+ for key in cnt_info.keys():
541
+ phys_data_info_emg[key] = cnt_info[key]
542
+
543
+ phys_data_info_emg["max_duration"] = max_duration
544
+ phys_data_info_emg["emg_channels"] = emg_channels
545
+
546
+ df_phys_data_info_emg = pd.DataFrame.from_dict(phys_data_info_emg)
547
+ df_phys_data_info_emg.to_hdf(fn_exp_hdf5, "phys_data/info/EMG")
548
+ print(f"Writing EMG info dataframe (phys_data/info/EMG) to {fn_exp_hdf5}")
549
+
550
+ # read raw emg data from cnt file and write to hdf5 file
551
+ emg = pynibs.visor.get_cnt_data(fn_emg_cnt,
552
+ channels=emg_channels,
553
+ max_duration=max_duration,
554
+ trigger_val=emg_trigger_value,
555
+ verbose=verbose,
556
+ fn_hdf5=fn_exp_hdf5,
557
+ path_hdf5="phys_data/raw/EMG",
558
+ return_data=True)
559
+
560
+ print(f"Writing EMG raw dataframe (phys_data/raw/EMG) to {fn_exp_hdf5}")
561
+
562
+ # filter data
563
+ emg_filt = pynibs.visor.filter_emg(emg=emg, fs=phys_data_info_emg["sampling_rate"])
564
+ df_phys_data_postproc_emg = pd.DataFrame.from_dict({"filtered": emg_filt})
565
+
566
+ # calc p2p
567
+ # TODO: implement p2p function
568
+ # p2p = calc_p2p(emg_filt)
569
+ # df_phys_data_postproc_emg["p2p"] = p2p
570
+
571
+ df_phys_data_postproc_emg.to_hdf(fn_exp_hdf5, "phys_data/postproc/EMG")
572
+ print(f"Writing EMG postproc dataframe (phys_data/postproc/EMG) to {fn_exp_hdf5}")
573
+
574
+ # read eeg
575
+ # ===================================================================================
576
+ if 'fn_eeg_cnt' in subject.exp[exp_id]:
577
+ # which emg_channel to use?
578
+ max_duration = 10 # maximum EMG time series duration per after zap
579
+
580
+ try:
581
+ max_duration = subject.exp[exp_id]['eeg_max_duration'][0]
582
+ except KeyError:
583
+ pass
584
+
585
+ eeg_trigger_value = subject.exp[exp_id]['eeg_trigger_value'][0]
586
+
587
+ # eeg_channel can be int, str, list of int, list of str
588
+ eeg_channels = ['all']
589
+ try:
590
+ try:
591
+ # list of int
592
+ eeg_channels = subject.exp[exp_id]['eeg_channels']
593
+ except ValueError:
594
+ # list of str (gets casted to b'')
595
+ eeg_channels = subject.exp[exp_id]['eeg_channels'].astype(str).tolist()
596
+ except KeyError: # key not defined, fall back to default
597
+ pass
598
+
599
+ fn_eeg_cnt = subject.exp[exp_id]['fn_eeg_cnt'][0]
600
+
601
+ phys_data_info_eeg = dict()
602
+ for key in cnt_info.keys():
603
+ phys_data_info_eeg[key] = cnt_info[key]
604
+
605
+ phys_data_info_eeg["max_duration"] = max_duration
606
+ phys_data_info_eeg["eeg_channels"] = eeg_channels
607
+
608
+ df_phys_data_info_eeg = pd.DataFrame.from_dict(phys_data_info_eeg)
609
+ df_phys_data_info_eeg.to_hdf(fn_exp_hdf5, "phys_data/info/EEG")
610
+ print(f"Writing EEG info dataframe (phys_data/info/EEG) to {fn_exp_hdf5}")
611
+
612
+ # read raw eeg data from cnt file and write to hdf5 file
613
+ pynibs.visor.get_cnt_data(fn_eeg_cnt,
614
+ channels=eeg_channels,
615
+ max_duration=max_duration,
616
+ trigger_val=eeg_trigger_value,
617
+ verbose=verbose,
618
+ fn_hdf5=fn_exp_hdf5,
619
+ path_hdf5="phys_data/raw/EEG",
620
+ return_data=False)
621
+
622
+ print(f"Writing EEG raw dataframe (phys_data/raw/EEG) to {fn_exp_hdf5}")
623
+
624
+ print("DONE")