pyNIBS 0.2024.8__py3-none-any.whl → 0.2026.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. pynibs/__init__.py +26 -14
  2. pynibs/coil/__init__.py +6 -0
  3. pynibs/{coil.py → coil/coil.py} +213 -543
  4. pynibs/coil/export.py +508 -0
  5. pynibs/congruence/__init__.py +4 -1
  6. pynibs/congruence/congruence.py +37 -45
  7. pynibs/congruence/ext_metrics.py +40 -11
  8. pynibs/congruence/stimulation_threshold.py +1 -2
  9. pynibs/expio/Mep.py +120 -370
  10. pynibs/expio/__init__.py +10 -0
  11. pynibs/expio/brainsight.py +34 -37
  12. pynibs/expio/cobot.py +25 -25
  13. pynibs/expio/exp.py +10 -7
  14. pynibs/expio/fit_funs.py +3 -0
  15. pynibs/expio/invesalius.py +70 -0
  16. pynibs/expio/localite.py +190 -91
  17. pynibs/expio/neurone.py +139 -0
  18. pynibs/expio/signal_ced.py +345 -2
  19. pynibs/expio/visor.py +16 -15
  20. pynibs/freesurfer.py +34 -33
  21. pynibs/hdf5_io/hdf5_io.py +149 -132
  22. pynibs/hdf5_io/xdmf.py +35 -31
  23. pynibs/mesh/__init__.py +1 -1
  24. pynibs/mesh/mesh_struct.py +77 -92
  25. pynibs/mesh/transformations.py +121 -21
  26. pynibs/mesh/utils.py +191 -99
  27. pynibs/models/_TMS.py +2 -1
  28. pynibs/muap.py +1 -2
  29. pynibs/neuron/__init__.py +10 -0
  30. pynibs/neuron/models/mep.py +566 -0
  31. pynibs/neuron/neuron_regression.py +98 -8
  32. pynibs/optimization/__init__.py +12 -2
  33. pynibs/optimization/{optimization.py → coil_opt.py} +157 -133
  34. pynibs/optimization/multichannel.py +1174 -24
  35. pynibs/optimization/workhorses.py +7 -8
  36. pynibs/regression/__init__.py +4 -2
  37. pynibs/regression/dual_node_detection.py +229 -219
  38. pynibs/regression/regression.py +92 -61
  39. pynibs/roi/__init__.py +4 -1
  40. pynibs/roi/roi_structs.py +19 -21
  41. pynibs/roi/{roi.py → roi_utils.py} +56 -33
  42. pynibs/subject.py +24 -14
  43. pynibs/util/__init__.py +20 -4
  44. pynibs/util/dosing.py +4 -5
  45. pynibs/util/quality_measures.py +39 -38
  46. pynibs/util/rotations.py +116 -9
  47. pynibs/util/{simnibs.py → simnibs_io.py} +29 -19
  48. pynibs/util/{util.py → utils.py} +20 -22
  49. pynibs/visualization/para.py +4 -4
  50. pynibs/visualization/render_3D.py +4 -4
  51. pynibs-0.2026.1.dist-info/METADATA +105 -0
  52. pynibs-0.2026.1.dist-info/RECORD +69 -0
  53. {pyNIBS-0.2024.8.dist-info → pynibs-0.2026.1.dist-info}/WHEEL +1 -1
  54. pyNIBS-0.2024.8.dist-info/METADATA +0 -723
  55. pyNIBS-0.2024.8.dist-info/RECORD +0 -107
  56. pynibs/data/configuration_exp0.yaml +0 -59
  57. pynibs/data/configuration_linear_MEP.yaml +0 -61
  58. pynibs/data/configuration_linear_RT.yaml +0 -61
  59. pynibs/data/configuration_sigmoid4.yaml +0 -68
  60. pynibs/data/network mapping configuration/configuration guide.md +0 -238
  61. pynibs/data/network mapping configuration/configuration_TEMPLATE.yaml +0 -42
  62. pynibs/data/network mapping configuration/configuration_for_testing.yaml +0 -43
  63. pynibs/data/network mapping configuration/configuration_modelTMS.yaml +0 -43
  64. pynibs/data/network mapping configuration/configuration_reg_isi_05.yaml +0 -43
  65. pynibs/data/network mapping configuration/output_documentation.md +0 -185
  66. pynibs/data/network mapping configuration/recommendations_for_accuracy_threshold.md +0 -77
  67. pynibs/data/neuron/models/L23_PC_cADpyr_biphasic_v1.csv +0 -1281
  68. pynibs/data/neuron/models/L23_PC_cADpyr_monophasic_v1.csv +0 -1281
  69. pynibs/data/neuron/models/L4_LBC_biphasic_v1.csv +0 -1281
  70. pynibs/data/neuron/models/L4_LBC_monophasic_v1.csv +0 -1281
  71. pynibs/data/neuron/models/L4_NBC_biphasic_v1.csv +0 -1281
  72. pynibs/data/neuron/models/L4_NBC_monophasic_v1.csv +0 -1281
  73. pynibs/data/neuron/models/L4_SBC_biphasic_v1.csv +0 -1281
  74. pynibs/data/neuron/models/L4_SBC_monophasic_v1.csv +0 -1281
  75. pynibs/data/neuron/models/L5_TTPC2_cADpyr_biphasic_v1.csv +0 -1281
  76. pynibs/data/neuron/models/L5_TTPC2_cADpyr_monophasic_v1.csv +0 -1281
  77. pynibs/tests/data/InstrumentMarker20200225163611937.xml +0 -19
  78. pynibs/tests/data/TriggerMarkers_Coil0_20200225163443682.xml +0 -14
  79. pynibs/tests/data/TriggerMarkers_Coil1_20200225170337572.xml +0 -6373
  80. pynibs/tests/data/Xdmf.dtd +0 -89
  81. pynibs/tests/data/brainsight_niiImage_nifticoord.txt +0 -145
  82. pynibs/tests/data/brainsight_niiImage_nifticoord_largefile.txt +0 -1434
  83. pynibs/tests/data/brainsight_niiImage_niifticoord_mixedtargets.txt +0 -47
  84. pynibs/tests/data/create_subject_testsub.py +0 -332
  85. pynibs/tests/data/data.hdf5 +0 -0
  86. pynibs/tests/data/geo.hdf5 +0 -0
  87. pynibs/tests/test_coil.py +0 -474
  88. pynibs/tests/test_elements2nodes.py +0 -100
  89. pynibs/tests/test_hdf5_io/test_xdmf.py +0 -61
  90. pynibs/tests/test_mesh_transformations.py +0 -123
  91. pynibs/tests/test_mesh_utils.py +0 -143
  92. pynibs/tests/test_nnav_imports.py +0 -101
  93. pynibs/tests/test_quality_measures.py +0 -117
  94. pynibs/tests/test_regressdata.py +0 -289
  95. pynibs/tests/test_roi.py +0 -17
  96. pynibs/tests/test_rotations.py +0 -86
  97. pynibs/tests/test_subject.py +0 -71
  98. pynibs/tests/test_util.py +0 -24
  99. /pynibs/{regression/score_types.py → neuron/models/m1_montbrio.py} +0 -0
  100. {pyNIBS-0.2024.8.dist-info → pynibs-0.2026.1.dist-info/licenses}/LICENSE +0 -0
  101. {pyNIBS-0.2024.8.dist-info → pynibs-0.2026.1.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,13 @@
1
- """ """
1
+ """Import functions for CED Signal data."""
2
+ import re
2
3
  import os
4
+ import json
3
5
  import h5py
6
+ import datetime
7
+ import warnings
4
8
  import numpy as np
9
+ import pandas as pd
10
+ from scipy import io as spio
5
11
  import pynibs
6
12
 
7
13
 
@@ -41,7 +47,7 @@ def cfs2hdf5(fn_cfs, fn_hdf5=None):
41
47
  sweeps = int(cfs_header[sweep_index + 18:comma_index])
42
48
  records = emg.shape[0]
43
49
  samples = int(records / sweeps)
44
- sampling_rate = pynibs.get_mep_sampling_rate(fn_cfs)
50
+ sampling_rate = pynibs.expio.get_mep_sampling_rate(fn_cfs)
45
51
  emg = np.reshape(emg, (sweeps, samples))
46
52
  time = np.linspace(0, samples, samples) / sampling_rate
47
53
 
@@ -49,3 +55,340 @@ def cfs2hdf5(fn_cfs, fn_hdf5=None):
49
55
  f.create_dataset("emg", data=emg)
50
56
  f.create_dataset("time", data=time)
51
57
  f.create_dataset("sampling_rate", data=np.array([sampling_rate]))
58
+
59
+
60
+ def read_biosig_emg_data(fn_data, include_first_trigger=False, filetype="cfs"):
61
+ """
62
+ Reads EMG data from a biosig file.
63
+
64
+ Parameters
65
+ ----------
66
+ fn_data : str
67
+ Path to the biosig file.
68
+ include_first_trigger : bool, default: False
69
+ Whether to include the first trigger event in the data (default: False).
70
+ filetype : str, default: 'cfs'
71
+ Type of the biosig file.
72
+
73
+ Returns
74
+ -------
75
+ emg_data : np.ndarray
76
+ (num_sweeps, num_channels, samples_per_sweep) EMG data with shape.
77
+ time_diff_list : list
78
+ Time differences between trigger events in seconds.
79
+ num_sweeps : int
80
+ Number of sweeps in the EMG data.
81
+ num_channels : int
82
+ Number of channels in the EMG data.
83
+ samples_per_sweep : int
84
+ Number of samples per sweep in the EMG data.
85
+ sampling_rate : int
86
+ Sampling rate of the EMG data.
87
+ """
88
+ try:
89
+ import biosig
90
+ except ImportError:
91
+ ImportError("Please install biosig from pynibs/pkg/biosig folder!")
92
+ return
93
+
94
+ if type == "cfs": # TODO: also move the TXT reader here?
95
+ cfs_fn = fn_data
96
+ cfs_header = json.loads(biosig.header(cfs_fn))
97
+ cfs_emg = biosig.data(cfs_fn)
98
+
99
+ num_sweeps = cfs_header["NumberOfSweeps"]
100
+ num_channels = cfs_emg.shape[1]
101
+
102
+ total_num_samples = cfs_header["NumberOfSamples"]
103
+ samples_per_sweep = int(total_num_samples / num_sweeps)
104
+ sampling_rate = int(cfs_header["Samplingrate"])
105
+
106
+ # get timestamps
107
+ tms_pulse_timedelta = datetime.timedelta()
108
+ # get hour, minute and second
109
+ time_mep_list = []
110
+ time_diff_list = []
111
+ trigger_event_idcs = []
112
+ if include_first_trigger:
113
+ trigger_event_idcs.append(0)
114
+ time_diff_list.append(0)
115
+ time_mep_list.append(
116
+ datetime.datetime.strptime(
117
+ cfs_header["EVENT"][0]["TimeStamp"], '%Y-%b-%d %H:%M:%S'
118
+ )
119
+ -
120
+ datetime.timedelta(
121
+ seconds=float(cfs_header["EVENT"][0]["POS"])
122
+ )
123
+ )
124
+
125
+ # convert time string into integer
126
+ for event in cfs_header["EVENT"]:
127
+ date = datetime.datetime.strptime(event["TimeStamp"], '%Y-%b-%d %H:%M:%S')
128
+
129
+ # we are interested in the tms pulse time, so add it to ts
130
+ date += tms_pulse_timedelta
131
+ time_mep_list.append(date)
132
+ time_diff_list.append((date - time_mep_list[0]).total_seconds())
133
+
134
+ # compute indices in data block corresponding to the events
135
+ if event["TYP"] == "0x7ffe":
136
+ trigger_event_idcs.append(
137
+ round(event["POS"] * cfs_header["Samplingrate"])
138
+ )
139
+
140
+ num_sweeps = min(num_sweeps, len(trigger_event_idcs))
141
+
142
+ emg_data = np.zeros((num_sweeps, num_channels, samples_per_sweep), dtype=np.float32)
143
+
144
+ for c_idx in range(num_channels):
145
+ # Use emg data starting from the index of the first trigger event
146
+ # assumptions:
147
+ # - after an initial offset all emg data were captured consecutively
148
+ # - the first emg data frame may be captured without an explicit TMS
149
+ # tigger (e.g. by checking the "write to disk" option)
150
+ # - if we had dropouts in between the emg data block (not just at the
151
+ # beginning) we would need to adhere to the entire trigger_event_indices
152
+ # list.
153
+ emg_data[:, c_idx, :] = np.reshape(
154
+ cfs_emg[trigger_event_idcs[0]:, c_idx],
155
+ (num_sweeps, samples_per_sweep)
156
+ )
157
+
158
+ return emg_data, time_diff_list, num_sweeps, num_channels, samples_per_sweep, sampling_rate
159
+
160
+
161
+ def get_mep_elements(mep_fn, tms_pulse_time, drop_mep_idx=None, cfs_data_column=0, channels=None, time_format="delta",
162
+ plot=False, start_mep=18, end_mep=35):
163
+ """
164
+ Read EMG data from CED .cfs or .txt file and returns MEP amplitudes.
165
+
166
+ Parameters
167
+ ----------
168
+ mep_fn : string
169
+ path to .cfs-file or .txt file (Signal export).
170
+ tms_pulse_time : float
171
+ Time in [s] of TMS pulse as specified in signal.
172
+ drop_mep_idx : List of int or None, optional
173
+ Which MEPs to remove before matching.
174
+ cfs_data_column : int or list of int, default: 0
175
+ Column(s) of dataset in cfs file. +1 for .txt.
176
+ channels : list of str, optional
177
+ Channel names.
178
+ time_format : str, default: "delta"
179
+ Format of mep time stamps in time_mep_lst to return.
180
+
181
+ * ``"delta"`` returns list of datetime.timedelta in seconds.
182
+ * ``"hms"`` returns datetime.datetime(year, month, day, hour, minute, second, microsecond).
183
+
184
+ plot : bool, default: False
185
+ Plot MEPs.
186
+ start_mep : float, default: 18
187
+ Start of time frame after TMS pulse where p2p value is evaluated (in ms).
188
+ end_mep : float, default: 35
189
+ End of time frame after TMS pulse where p2p value is evaluated (in ms).
190
+
191
+ Returns
192
+ -------
193
+ p2p_arr : np.ndarray of float
194
+ (N_channel, N_stim) Peak to peak values of N sweeps.
195
+ time_pulse_lst : list of datetime.timedelta
196
+ TMS pulse timestamps
197
+ mep_raw_data : np.ndarray of float
198
+ (N_channel, N_stim, N_samples) Raw (unfiltered) MEP data.
199
+ mep_filt_data : np.ndarray of float
200
+ (N_channel, N_stim, N_samples) Filtered MEP data (Butterworth lowpass filter).
201
+ emg_time_axes : np.ndarray of float
202
+ (N_samples) Time axis corresponding to EMG data.
203
+ mep_onset_arr : np.ndarray of float
204
+ (S_samples) MEP onset after TMS pulse.
205
+ """
206
+ # convert pulse time to datetime object in case of "delta"
207
+ if time_format == "delta":
208
+ tms_pulse_timedelta = datetime.timedelta(milliseconds=tms_pulse_time * 1000)
209
+ elif time_format == "hms":
210
+ tms_pulse_timedelta = datetime.timedelta()
211
+ else:
212
+ raise NotImplementedError("Specified time_format not implemented yet...")
213
+
214
+ if mep_fn.endswith('.cfs'):
215
+ # get data from cfs file
216
+ import biosig
217
+ mep_raw_data_tmp = biosig.data(mep_fn)
218
+ mep_raw_data_tmp = mep_raw_data_tmp[:, cfs_data_column] # get first channel
219
+
220
+ # get header from cfs file
221
+ cfs_header = biosig.header(mep_fn)
222
+
223
+ # get timestamps
224
+ # get all indices of timestamps from cfs header
225
+ ts_mep_lst = [timestamp.start() for timestamp in re.finditer('TimeStamp', cfs_header)]
226
+ # get hour, minute and second
227
+ time_pulse_lst = []
228
+ # convert time string into integer
229
+ for index in ts_mep_lst:
230
+ hour = int(cfs_header[index + 26:index + 28])
231
+ minute = int(cfs_header[index + 29:index + 31])
232
+ second = int(cfs_header[index + 32:index + 34])
233
+ # fix bug with second 60
234
+ if second == 60:
235
+ ts = datetime.datetime(1900, 1, 1, hour, minute, 59)
236
+ ts += datetime.timedelta(seconds=1)
237
+ else:
238
+ ts = datetime.datetime(1900, 1, 1, hour, minute, second)
239
+
240
+ # we are interested in the tms pulse time, so add it to ts
241
+ ts += tms_pulse_timedelta
242
+ time_pulse_lst.append(ts)
243
+
244
+ if time_format == "delta":
245
+ time_pulse_lst = [time_pulse_lst[i] - time_pulse_lst[0] for i in range(len(time_pulse_lst))]
246
+ if time_format == "hms":
247
+ pass
248
+
249
+ # add first timestamp (not saved by signal) and shift other by isi
250
+ time_pulse_lst = [datetime.timedelta(seconds=0)] + [
251
+ t + time_pulse_lst[1] - time_pulse_lst[0] for t in time_pulse_lst]
252
+
253
+ # get peak-to-peak values
254
+ # get the ratio of samples per sweep
255
+ sweep_index = cfs_header.find('NumberOfSweeps')
256
+ comma_index = cfs_header.find(',', sweep_index)
257
+ n_sweeps = int(cfs_header[sweep_index + 18:comma_index])
258
+ record_index = cfs_header.find('NumberOfRecords')
259
+ comma_index = cfs_header.find(',', record_index)
260
+ records = int(cfs_header[record_index + 19:comma_index])
261
+ n_samples = int(records / n_sweeps)
262
+ if not isinstance(n_samples, int):
263
+ print('Warning: Number of samples is not an integer.')
264
+ # TODO: Correct get_mep_elements() sample number check. This does not work as expected (from Ole)
265
+
266
+ # reshape numpy array
267
+ mep_raw_arr = np.zeros((len(cfs_data_column), n_sweeps, n_samples))
268
+
269
+ for i in cfs_data_column:
270
+ mep_raw_arr[i, :, :] = np.reshape(mep_raw_data_tmp[:, i], (n_sweeps, n_samples))
271
+
272
+ sampling_rate = pynibs.expio.get_mep_sampling_rate(mep_fn)
273
+
274
+ elif mep_fn.endswith('.mat'):
275
+ mep_data = spio.loadmat(mep_fn, struct_as_record=False, squeeze_me=True)
276
+
277
+ # find data
278
+ for k in mep_data.keys():
279
+ if isinstance(mep_data[k], spio.matlab.mio5_params.mat_struct):
280
+ mep_data = mep_data[k].__dict__
281
+ break
282
+
283
+ n_samples = mep_data['points']
284
+ mep_raw_arr = mep_data['values'].transpose(1, 2, 0)
285
+ time_pulse_lst = [datetime.timedelta(seconds=f.__dict__['start']) for f in mep_data['frameinfo']]
286
+ sampling_rate = pynibs.expio.get_mep_sampling_rate(mep_fn)
287
+
288
+ elif mep_fn.endswith('.txt'):
289
+ warnings.warn(".txt import is deprecated - use .mat or .cfs.", DeprecationWarning)
290
+ print("Reading MEP from .txt file")
291
+ # The Signal text export looks like this:
292
+ #
293
+ # "s"\t"ADC 0"\t"ADC 1"
294
+ # 0.00000000\t-0.066681\t-0.047607
295
+ # 0.00025000\t-0.066376\t-0.049286
296
+ # 0.00050000\t-0.066528\t-0.056610
297
+ #
298
+ # "s"\t"ADC 0"\t"ADC 1"
299
+ # 0.00000000\t-0.066681\t-0.047607
300
+ # 0.00025000\t-0.066376\t-0.049286
301
+ # 0.00050000\t-0.066528\t-0.056610
302
+ #
303
+ # With first column = time, second = 1st electrode, ...
304
+ # This is an example of 2 sweeps, 3 samples each, sampling rate = 4000
305
+
306
+ # Find number of samples per sweep
307
+ pattern = '"s"'
308
+ with open(mep_fn, 'r') as f:
309
+ for line_nr, line in enumerate(f):
310
+ print(f'{line_nr}: {line}')
311
+ if pattern in line and line_nr > 0:
312
+ # find second occurance of "s" -> end of first sweep
313
+ n_samples = line_nr
314
+ print(f'{line_nr}: {line}')
315
+ if line != '\n':
316
+ last_sample_time = line
317
+
318
+ # extract time (first column) of last samples
319
+ last_sample_time = float(last_sample_time[0:last_sample_time.find('\t')])
320
+
321
+ # subtract 2 because first row is header ("s"\t"ADC 0"\t"ADC 1") and last row is blank
322
+ n_samples = n_samples - 2
323
+
324
+ df_mep = pd.read_csv(mep_fn,
325
+ delimiter="\t",
326
+ skip_blank_lines=True,
327
+ skiprows=lambda x: x % (n_samples + 2) == 0 and x > 0)
328
+
329
+ n_sweeps = int(df_mep.shape[0] / n_samples)
330
+ mep_raw_arr = np.zeros((len(cfs_data_column), n_sweeps, n_samples))
331
+
332
+ for i in range(n_sweeps):
333
+ mep_raw_arr[:, i, :] = df_mep.iloc[i * n_samples:(i + 1) * n_samples, 1:].transpose()
334
+
335
+ # get sampling rate by dividing number of sweeps by timing
336
+ sampling_rate = int(mep_raw_arr.shape[2] - 1) / last_sample_time
337
+
338
+ # build time_mep_list
339
+ # we only have information about the single mep timings, so let's assume signal sticks strictly to the protocol
340
+ sample_len = last_sample_time + 1 / sampling_rate
341
+
342
+ # TODO: The ISI is missing here, do we want to add it to the subject object?
343
+ time_pulse_lst = [datetime.timedelta(seconds=i * sample_len) +
344
+ tms_pulse_timedelta for i in range(mep_raw_arr.shape[1])]
345
+
346
+ else:
347
+ raise ValueError("Unknown MEP file extension. Use .cfs or .txt.")
348
+
349
+ # get peak to peak value of every sweep and plot results in mep/plots/channels
350
+ if channels is None:
351
+ channels = [str(i) for i in cfs_data_column]
352
+
353
+ tmp = np.zeros((mep_raw_arr.shape[0], mep_raw_arr.shape[1], 3)).astype(object)
354
+ for i_channel in range(mep_raw_arr.shape[0]):
355
+ print(f"Calculating p2p values for channel: {channels[i_channel]}")
356
+
357
+ for i_zap in range(mep_raw_arr.shape[1]):
358
+ tmp[i_channel, i_zap, 0], \
359
+ tmp[i_channel, i_zap, 1], \
360
+ tmp[i_channel, i_zap, 2] = pynibs.expio.calc_p2p(sweep=mep_raw_arr[i_channel, i_zap, :],
361
+ tms_pulse_time=tms_pulse_time,
362
+ sampling_rate=sampling_rate,
363
+ fn_plot=None,
364
+ start_mep=start_mep,
365
+ end_mep=end_mep)
366
+
367
+ p2p_arr = np.zeros((tmp.shape[0], tmp.shape[1]))
368
+ mep_onset_arr = np.zeros((tmp.shape[0], tmp.shape[1]))
369
+ mep_filt_arr = np.zeros(mep_raw_arr.shape)
370
+
371
+ emg_time_axes = np.arange(mep_raw_arr.shape[2]) / sampling_rate
372
+
373
+ for idx_channel in cfs_data_column:
374
+ for i, t in enumerate(tmp[idx_channel, :, :]):
375
+ p2p_arr[idx_channel, i] = tmp[idx_channel, i, 0]
376
+ mep_onset_arr[idx_channel, i] = tmp[idx_channel, i, 2]
377
+ mep_filt_arr[idx_channel, i, :] = tmp[idx_channel, i, 1]
378
+
379
+ if time_format == "delta":
380
+ time_pulse_lst = [time_pulse_lst[i] - time_pulse_lst[0] for i in range(len(time_pulse_lst))]
381
+ elif time_format == "hms":
382
+ pass
383
+
384
+ # remove MEPs according to drop_mep_idx and reset time
385
+ if drop_mep_idx is not None:
386
+ p2p_arr = np.delete(p2p_arr, drop_mep_idx)
387
+ mep_onset_arr = np.delete(mep_onset_arr, drop_mep_idx)
388
+ time_pulse_lst = np.delete(time_pulse_lst, drop_mep_idx)
389
+
390
+ keep_mep_idx = [i for i in range(mep_raw_arr.shape[1]) if i not in np.array(drop_mep_idx)]
391
+ mep_raw_arr = mep_raw_arr[:, keep_mep_idx, :]
392
+ mep_filt_arr = mep_filt_arr[:, keep_mep_idx, :]
393
+
394
+ return p2p_arr, time_pulse_lst, mep_raw_arr, mep_filt_arr, emg_time_axes, mep_onset_arr
pynibs/expio/visor.py CHANGED
@@ -17,12 +17,12 @@ except (ImportError, SyntaxError):
17
17
 
18
18
  def read_nlr(fname):
19
19
  """
20
- Reads NLR coordinates from *_recording.mri file.
20
+ Reads NLR coordinates from _recording.mri file.
21
21
 
22
22
  Parameters
23
23
  ----------
24
24
  fname : str
25
- FIle path of the NLR recording MRI file.
25
+ File path of the NLR recording MRI file.
26
26
 
27
27
  Returns
28
28
  -------
@@ -62,6 +62,7 @@ def get_instrument_marker(im_path, verbose=False):
62
62
  Return all instrument markers from visor .cnt file.
63
63
 
64
64
  Coordinate system in raw ANT space (NLR) is defined as:
65
+
65
66
  - origin: intersection between line of ear fiducials and nasion
66
67
  - x-axis: origin -> nasion
67
68
  - y-axis: origin -> left ear
@@ -83,6 +84,7 @@ def get_instrument_marker(im_path, verbose=False):
83
84
  * StimulusID: int
84
85
  * etc...
85
86
 
87
+
86
88
  Raises
87
89
  ------
88
90
  AssertionError
@@ -161,7 +163,6 @@ def get_cnt_data(fn, channels='all', trigger_val='1', max_duration=10,
161
163
  data_lst: list of np.ndarray, optional
162
164
  (samples,channels), List of EEG/EMG data. Only returned if "fn_hdf5" is not None.
163
165
  """
164
-
165
166
  f = libeep.read_cnt(fn)
166
167
  n_trig = f.get_trigger_count()
167
168
  n_samples = f.get_sample_count()
@@ -360,7 +361,6 @@ def filter_emg(emg, fs):
360
361
  emg_filt : list of np.ndarray
361
362
  (n_stimuli), Filtered EMG data.
362
363
  """
363
-
364
364
  # 5 Hz Butterworth high pass
365
365
  ############################
366
366
  b_butterhigh, a_butterhigh = signal.butter(N=5, Wn=5, btype='high', analog=False, fs=fs)
@@ -439,6 +439,7 @@ def merge_exp_data_visor(subject, exp_id=0, mesh_idx=0, verbose=False, start_mep
439
439
  * "phys_data/raw/EEG": Raw EEG data
440
440
  * "phys_data/postproc/EMG": Post-processed EMG data (e.g. filtered, p2p, etc.)
441
441
  * "phys_data/postproc/EEG": Post-processed EEG data (e.g. filtered, p2p, etc.)
442
+
442
443
  """
443
444
  # mep_paths_lst = subject.exp[exp_id]['fn_data']
444
445
 
@@ -466,12 +467,12 @@ def merge_exp_data_visor(subject, exp_id=0, mesh_idx=0, verbose=False, start_mep
466
467
 
467
468
  fn_fiducials = subject.exp[exp_id]['fn_fiducials'][0]
468
469
 
469
- ims_list = pynibs.visor.get_instrument_marker(fn_visor_cnt)
470
- ims_dict = pynibs.list2dict(ims_list)
470
+ ims_list = get_instrument_marker(fn_visor_cnt)
471
+ ims_dict = pynibs.util.utils.list2dict(ims_list)
471
472
  n_stim = len(ims_list)
472
473
 
473
474
  # read fiducials and transform to simnibs space
474
- fiducials = pynibs.visor.read_nlr(fn_fiducials)
475
+ fiducials = read_nlr(fn_fiducials)
475
476
 
476
477
  # fiducial correction
477
478
  if 'fiducial_corr' in subject.exp[exp_id]:
@@ -484,7 +485,7 @@ def merge_exp_data_visor(subject, exp_id=0, mesh_idx=0, verbose=False, start_mep
484
485
 
485
486
  matsimnibs_raw = np.dstack(ims_dict["coil_mean_raw"])
486
487
 
487
- matsimnibs = pynibs.nnav2simnibs(fn_exp_nii=fn_exp_nii,
488
+ matsimnibs = pynibs.expio.nnav2simnibs(fn_exp_nii=fn_exp_nii,
488
489
  fn_conform_nii=subject.mesh[mesh_idx]['fn_mri_conform'],
489
490
  m_nnav=matsimnibs_raw,
490
491
  nnav_system="visor",
@@ -505,7 +506,7 @@ def merge_exp_data_visor(subject, exp_id=0, mesh_idx=0, verbose=False, start_mep
505
506
  "condition": [f"{(i - 1):04d}" for i in ims_dict["StimulusID"]]}
506
507
 
507
508
  df_stim_data = pd.DataFrame.from_dict(stim_data)
508
- df_stim_data.to_hdf(fn_exp_hdf5, "stim_data")
509
+ df_stim_data.to_hdf(fn_exp_hdf5, key="stim_data")
509
510
 
510
511
  print(f"Writing stim_data dataframe to {fn_exp_hdf5}")
511
512
 
@@ -534,7 +535,7 @@ def merge_exp_data_visor(subject, exp_id=0, mesh_idx=0, verbose=False, start_mep
534
535
  fn_emg_cnt = subject.exp[exp_id]['fn_emg_cnt'][0]
535
536
 
536
537
  # read info
537
- cnt_info = pynibs.get_cnt_infos(fn_emg_cnt)
538
+ cnt_info = get_cnt_infos(fn_emg_cnt)
538
539
 
539
540
  phys_data_info_emg = dict()
540
541
  for key in cnt_info.keys():
@@ -544,11 +545,11 @@ def merge_exp_data_visor(subject, exp_id=0, mesh_idx=0, verbose=False, start_mep
544
545
  phys_data_info_emg["emg_channels"] = emg_channels
545
546
 
546
547
  df_phys_data_info_emg = pd.DataFrame.from_dict(phys_data_info_emg)
547
- df_phys_data_info_emg.to_hdf(fn_exp_hdf5, "phys_data/info/EMG")
548
+ df_phys_data_info_emg.to_hdf(fn_exp_hdf5, key="phys_data/info/EMG")
548
549
  print(f"Writing EMG info dataframe (phys_data/info/EMG) to {fn_exp_hdf5}")
549
550
 
550
551
  # read raw emg data from cnt file and write to hdf5 file
551
- emg = pynibs.visor.get_cnt_data(fn_emg_cnt,
552
+ emg = get_cnt_data(fn_emg_cnt,
552
553
  channels=emg_channels,
553
554
  max_duration=max_duration,
554
555
  trigger_val=emg_trigger_value,
@@ -560,7 +561,7 @@ def merge_exp_data_visor(subject, exp_id=0, mesh_idx=0, verbose=False, start_mep
560
561
  print(f"Writing EMG raw dataframe (phys_data/raw/EMG) to {fn_exp_hdf5}")
561
562
 
562
563
  # filter data
563
- emg_filt = pynibs.visor.filter_emg(emg=emg, fs=phys_data_info_emg["sampling_rate"])
564
+ emg_filt = filter_emg(emg=emg, fs=phys_data_info_emg["sampling_rate"])
564
565
  df_phys_data_postproc_emg = pd.DataFrame.from_dict({"filtered": emg_filt})
565
566
 
566
567
  # calc p2p
@@ -568,7 +569,7 @@ def merge_exp_data_visor(subject, exp_id=0, mesh_idx=0, verbose=False, start_mep
568
569
  # p2p = calc_p2p(emg_filt)
569
570
  # df_phys_data_postproc_emg["p2p"] = p2p
570
571
 
571
- df_phys_data_postproc_emg.to_hdf(fn_exp_hdf5, "phys_data/postproc/EMG")
572
+ df_phys_data_postproc_emg.to_hdf(fn_exp_hdf5, key="phys_data/postproc/EMG")
572
573
  print(f"Writing EMG postproc dataframe (phys_data/postproc/EMG) to {fn_exp_hdf5}")
573
574
 
574
575
  # read eeg
@@ -606,7 +607,7 @@ def merge_exp_data_visor(subject, exp_id=0, mesh_idx=0, verbose=False, start_mep
606
607
  phys_data_info_eeg["eeg_channels"] = eeg_channels
607
608
 
608
609
  df_phys_data_info_eeg = pd.DataFrame.from_dict(phys_data_info_eeg)
609
- df_phys_data_info_eeg.to_hdf(fn_exp_hdf5, "phys_data/info/EEG")
610
+ df_phys_data_info_eeg.to_hdf(fn_exp_hdf5, key="phys_data/info/EEG")
610
611
  print(f"Writing EEG info dataframe (phys_data/info/EEG) to {fn_exp_hdf5}")
611
612
 
612
613
  # read raw eeg data from cnt file and write to hdf5 file
pynibs/freesurfer.py CHANGED
@@ -47,7 +47,7 @@ def read_curv_data(fname_curv, fname_inf, raw=False):
47
47
  points_inf, con_inf = nibabel.freesurfer.read_geometry(fname_inf)
48
48
 
49
49
  # interpolate point data to element centers
50
- curv = pynibs.data_nodes2elements(curv_points, con_inf)
50
+ curv = pynibs.mesh.data_nodes2elements(curv_points, con_inf)
51
51
 
52
52
  # normalize curvature data, optional
53
53
  if not raw:
@@ -81,7 +81,7 @@ def make_group_average(subjects=None, subject_dir=None, average=None, hemi="lh",
81
81
  Which hemisphere: ``lh`` or ``rh``.
82
82
 
83
83
  .. deprecated:: 0.0.1
84
- Don't use any more.
84
+ Don't use anymore.
85
85
  template : str, default: 'mytemplate'
86
86
  Basename of new template.
87
87
  steps : int, default: 2
@@ -114,7 +114,7 @@ def make_group_average(subjects=None, subject_dir=None, average=None, hemi="lh",
114
114
  for i, subject in enumerate(subjects):
115
115
  if not os.path.exists(os.path.join(subject_dir, subject_names[i])):
116
116
  print(('Generating symlink of subject {} in {}'.format(subject_names[i], subject_dir)))
117
- pynibs.bash('ln -s ' + subject + ' ' + subject_dir)
117
+ pynibs.util.utils.bash('ln -s ' + subject + ' ' + subject_dir)
118
118
 
119
119
  os.chdir(subject_dir)
120
120
 
@@ -140,15 +140,15 @@ def make_group_average(subjects=None, subject_dir=None, average=None, hemi="lh",
140
140
  # print [command, "{}/surf/{}.sphere {}{}.tif {}.sphere.myreg{}".format(subject,
141
141
  # hemi, template, i, hemi, i)]
142
142
  processes.add(subprocess.Popen(
143
- [f'{command} '
144
- f'{os.path.split(subject)[1]}{os.sep}surf{os.sep}lh.sphere lh{template}{i}.tif '
145
- f'{"lh"}.sphere.{template}{i}'],
146
- shell=True))
143
+ [f'{command} '
144
+ f'{os.path.split(subject)[1]}{os.sep}surf{os.sep}lh.sphere lh{template}{i}.tif '
145
+ f'{"lh"}.sphere.{template}{i}'],
146
+ shell=True))
147
147
  processes.add(subprocess.Popen(
148
- [f"{command} "
149
- f"{os.path.split(subject)[1]}{os.sep}surf{os.sep}rh.sphere rh{template}{i}.tif "
150
- f"{'rh'}.sphere.{template}{i}"],
151
- shell=True))
148
+ [f"{command} "
149
+ f"{os.path.split(subject)[1]}{os.sep}surf{os.sep}rh.sphere rh{template}{i}.tif "
150
+ f"{'rh'}.sphere.{template}{i}"],
151
+ shell=True))
152
152
 
153
153
  # check if n_cpu processes are reached
154
154
  if len(processes) >= n_cpu:
@@ -203,7 +203,6 @@ def make_average_subject(subjects, subject_dir, average_dir, fn_reg='sphere.reg'
203
203
  <Files> : .tif and .reg files
204
204
  Average template in average_dir and registered curvature files, ``?h.sphere.reg`` in subjects/surf folders.
205
205
  """
206
-
207
206
  subject_names = [os.path.split(subjects[i])[1] for i in range(len(subjects))]
208
207
 
209
208
  # check for right operating system
@@ -221,7 +220,7 @@ def make_average_subject(subjects, subject_dir, average_dir, fn_reg='sphere.reg'
221
220
  for i, subject in enumerate(subjects):
222
221
  if not os.path.exists(os.path.join(subject_dir, subject_names[i])):
223
222
  print(('Generating symlink of subject {} in {}'.format(subject_names[i], subject_dir)))
224
- pynibs.bash('ln -s ' + subject + ' ' + subject_dir)
223
+ pynibs.util.utils.bash('ln -s ' + subject + ' ' + subject_dir)
225
224
 
226
225
  # make average template
227
226
  if os.path.exists(f'{subject_dir}{os.sep}avg_template'):
@@ -308,19 +307,19 @@ def data_sub2avg(fn_subject_obj, fn_average_obj, hemisphere, fn_in_hdf5_data, da
308
307
 
309
308
  # filename of mapped subject data in nodes in .curv format for freesurfer (will be generated)
310
309
  fn_data_sub_nodes_mapped_curv = [os.path.join(
311
- os.path.split(fn_out_hdf5_geo)[0],
312
- hemisphere + '.' +
313
- os.path.split(subject_dir)[1] +
314
- '_' +
315
- data_label[i] +
316
- '.curv')
310
+ os.path.split(fn_out_hdf5_geo)[0],
311
+ hemisphere + '.' +
312
+ os.path.split(subject_dir)[1] +
313
+ '_' +
314
+ data_label[i] +
315
+ '.curv')
317
316
  for i in range(len(data_label))]
318
317
 
319
318
  # filename of mapped subject data in nodes on average template in .curv format for freesurfer (will be generated)
320
319
  fn_data_sub_nodes_mapped_avg_curv = [os.path.join(
321
- os.path.split(fn_data_sub_nodes_mapped_curv[i])[0],
322
- os.path.splitext(os.path.split(fn_data_sub_nodes_mapped_curv[i])[1])[0] +
323
- '_avg.curv')
320
+ os.path.split(fn_data_sub_nodes_mapped_curv[i])[0],
321
+ os.path.splitext(os.path.split(fn_data_sub_nodes_mapped_curv[i])[1])[0] +
322
+ '_avg.curv')
324
323
  for i in range(len(data_label))]
325
324
 
326
325
  if verbose:
@@ -353,7 +352,7 @@ def data_sub2avg(fn_subject_obj, fn_average_obj, hemisphere, fn_in_hdf5_data, da
353
352
  if verbose:
354
353
  print('> Transforming subject data from element centers to nodes')
355
354
  # data_sub_nodes = [pynibs.data_elements2nodes(data_sub[i], con_ROI) for i in range(len(data_label))]
356
- data_sub_nodes = pynibs.data_elements2nodes(data_sub, con_roi)
355
+ data_sub_nodes = pynibs.mesh.data_elements2nodes(data_sub, con_roi)
357
356
 
358
357
  else:
359
358
  data_sub_nodes = data_sub
@@ -365,15 +364,17 @@ def data_sub2avg(fn_subject_obj, fn_average_obj, hemisphere, fn_in_hdf5_data, da
365
364
  if subject.roi[mesh_idx][roi_idx]['fn_mask']:
366
365
  if verbose:
367
366
  print('> Mapping point data of ROI to whole brain surface')
368
- data_sub_nodes_mapped = pynibs.map_data_to_surface(datasets=data_sub_nodes,
369
- points_datasets=[points_roi] * len(data_sub_nodes),
370
- con_datasets=[con_roi] * len(data_sub_nodes),
371
- fname_fsl_gm=os.path.join(mesh_folder_sub, fn_sub_gm),
372
- fname_fsl_wm=os.path.join(mesh_folder_sub, fn_sub_wm),
373
- delta=subject.roi[mesh_idx][roi_idx]['delta'],
374
- input_data_in_center=False,
375
- return_data_in_center=False,
376
- data_substitute=data_substitute)
367
+ data_sub_nodes_mapped = pynibs.mesh.map_data_to_surface(datasets=data_sub_nodes,
368
+ points_datasets=[points_roi] * len(data_sub_nodes),
369
+ con_datasets=[con_roi] * len(data_sub_nodes),
370
+ fname_fsl_gm=os.path.join(mesh_folder_sub,
371
+ fn_sub_gm),
372
+ fname_fsl_wm=os.path.join(mesh_folder_sub,
373
+ fn_sub_wm),
374
+ delta=subject.roi[mesh_idx][roi_idx]['delta'],
375
+ input_data_in_center=False,
376
+ return_data_in_center=False,
377
+ data_substitute=data_substitute)
377
378
  else:
378
379
  data_sub_nodes_mapped = data_sub_nodes
379
380
 
@@ -428,7 +429,7 @@ def data_sub2avg(fn_subject_obj, fn_average_obj, hemisphere, fn_in_hdf5_data, da
428
429
  if verbose:
429
430
  print('> Transforming mapped nodal data to element centers')
430
431
  for i in range(len(data_label)):
431
- data_avg_centers_mapped[i] = pynibs.data_nodes2elements(data_avg_nodes_mapped[i], con_avg)[:, np.newaxis]
432
+ data_avg_centers_mapped[i] = pynibs.mesh.data_nodes2elements(data_avg_nodes_mapped[i], con_avg)[:, np.newaxis]
432
433
 
433
434
  # create .hdf5 geometry file
434
435
  if verbose: