py-neuromodulation 0.0.2__py3-none-any.whl → 0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. py_neuromodulation/ConnectivityDecoding/Automated Anatomical Labeling 3 (Rolls 2020).nii +0 -0
  2. py_neuromodulation/ConnectivityDecoding/_get_grid_hull.m +34 -0
  3. py_neuromodulation/ConnectivityDecoding/_get_grid_whole_brain.py +106 -0
  4. py_neuromodulation/ConnectivityDecoding/_helper_write_connectome.py +119 -0
  5. py_neuromodulation/ConnectivityDecoding/mni_coords_cortical_surface.mat +0 -0
  6. py_neuromodulation/ConnectivityDecoding/mni_coords_whole_brain.mat +0 -0
  7. py_neuromodulation/ConnectivityDecoding/rmap_func_all.nii +0 -0
  8. py_neuromodulation/ConnectivityDecoding/rmap_struc.nii +0 -0
  9. py_neuromodulation/{helper.py → _write_example_dataset_helper.py} +1 -1
  10. py_neuromodulation/nm_EpochStream.py +2 -3
  11. py_neuromodulation/nm_IO.py +43 -70
  12. py_neuromodulation/nm_RMAP.py +308 -11
  13. py_neuromodulation/nm_analysis.py +1 -1
  14. py_neuromodulation/nm_artifacts.py +25 -0
  15. py_neuromodulation/nm_bispectra.py +64 -29
  16. py_neuromodulation/nm_bursts.py +44 -30
  17. py_neuromodulation/nm_coherence.py +2 -1
  18. py_neuromodulation/nm_features.py +4 -2
  19. py_neuromodulation/nm_filter.py +63 -32
  20. py_neuromodulation/nm_filter_preprocessing.py +91 -0
  21. py_neuromodulation/nm_fooof.py +47 -29
  22. py_neuromodulation/nm_mne_connectivity.py +1 -1
  23. py_neuromodulation/nm_normalization.py +50 -74
  24. py_neuromodulation/nm_oscillatory.py +151 -31
  25. py_neuromodulation/nm_plots.py +13 -10
  26. py_neuromodulation/nm_rereference.py +10 -8
  27. py_neuromodulation/nm_run_analysis.py +28 -13
  28. py_neuromodulation/nm_settings.json +51 -3
  29. py_neuromodulation/nm_sharpwaves.py +103 -136
  30. py_neuromodulation/nm_stats.py +44 -30
  31. py_neuromodulation/nm_stream_abc.py +18 -10
  32. py_neuromodulation/nm_stream_offline.py +188 -46
  33. py_neuromodulation/utils/_logging.py +24 -0
  34. {py_neuromodulation-0.0.2.dist-info → py_neuromodulation-0.0.4.dist-info}/METADATA +72 -32
  35. py_neuromodulation-0.0.4.dist-info/RECORD +72 -0
  36. {py_neuromodulation-0.0.2.dist-info → py_neuromodulation-0.0.4.dist-info}/WHEEL +1 -1
  37. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/MOV_aligned_features_ch_ECOG_RIGHT_0_all.png +0 -0
  38. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/all_feature_plt.pdf +0 -0
  39. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_FEATURES.csv +0 -182
  40. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_LM_ML_RES.p +0 -0
  41. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_SETTINGS.json +0 -273
  42. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_SIDECAR.json +0 -6
  43. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_decoding_performance.png +0 -0
  44. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_nm_channels.csv +0 -11
  45. py_neuromodulation/py_neuromodulation.egg-info/PKG-INFO +0 -104
  46. py_neuromodulation/py_neuromodulation.egg-info/dependency_links.txt +0 -1
  47. py_neuromodulation/py_neuromodulation.egg-info/requires.txt +0 -26
  48. py_neuromodulation/py_neuromodulation.egg-info/top_level.txt +0 -1
  49. py_neuromodulation-0.0.2.dist-info/RECORD +0 -73
  50. /py_neuromodulation/{py_neuromodulation.egg-info/SOURCES.txt → utils/__init__.py} +0 -0
  51. {py_neuromodulation-0.0.2.dist-info → py_neuromodulation-0.0.4.dist-info/licenses}/LICENSE +0 -0
@@ -0,0 +1,34 @@
1
+ addpath('C:\code\wjn_toolbox');
2
+ addpath(genpath('C:\code\leaddbs'));
3
+ addpath(genpath('C:\code\spm12'));
4
+
5
+
6
+ %%
7
+ ctx = wjn_mni_cortex();
8
+ downsample_ctx=ctx.vertices(1:20:end,:); %downsample by 10
9
+
10
+ save("downsampled_cortex.mat", "downsample_ctx")
11
+
12
+ figure;
13
+ scatter3(downsample_ctx(:,1), downsample_ctx(:,2), downsample_ctx(:,3), 'filled');
14
+ title('3D Scatter Plot Example');
15
+ xlabel('X-axis');
16
+ ylabel('Y-axis');
17
+ zlabel('Z-axis');
18
+ grid on;
19
+
20
+
21
+
22
+ PATH_OUT = "D:\Connectome_RMAP_OUT\ROIs";
23
+
24
+ for a =1:size(downsample_ctx,1)
25
+ disp(a)
26
+ roiname = fullfile(PATH_OUT, strcat('ROI-', string(a), '.nii'));
27
+ mni = [downsample_ctx(a, 1) downsample_ctx(a, 2) downsample_ctx(a, 3)];
28
+ wjn_spherical_roi(roiname,mni,4);
29
+ end
30
+
31
+
32
+
33
+
34
+
@@ -0,0 +1,106 @@
1
+ import nibabel as nib
2
+ import numpy as np
3
+ import scipy.io as sio
4
+ import os
5
+ from matplotlib import pyplot as plt
6
+
7
+
8
+ class NiiToMNI:
9
+
10
+ def __init__(
11
+ self,
12
+ PATH_nii_file: str = r"C:\code\RMap_ROI_Estimation\Automated Anatomical Labeling 3 (Rolls 2020).nii",
13
+ ) -> None:
14
+
15
+ self.img = nib.load(PATH_nii_file)
16
+ self.data = self.img.get_fdata()
17
+
18
+ def downsample_nii(
19
+ self,
20
+ resampling_factor: int = 150,
21
+ ):
22
+
23
+ # PATH_MNI_TO_ATLAS = r"C:\code\mni_to_atlas\src\mni_to_atlas\atlases\AAL.nii"
24
+ # img_mni_to_atlas = nib.load(PATH_MNI_TO_ATLAS)
25
+
26
+ x_dim, y_dim, z_dim = self.data.shape
27
+
28
+ # Create arrays of voxel coordinates
29
+ x_coords, y_coords, z_coords = np.meshgrid(
30
+ range(x_dim), range(y_dim), range(z_dim), indexing="ij"
31
+ )
32
+
33
+ # Downsample here the voxels --> check lateron if the voxels have non-zero values
34
+ x_c_flatten = x_coords.flatten()[::resampling_factor]
35
+ y_c_flatten = y_coords.flatten()[::resampling_factor]
36
+ z_c_flatten = z_coords.flatten()[::resampling_factor]
37
+
38
+ # Combine coordinates into a single array
39
+ voxel_coordinates = np.column_stack(
40
+ (
41
+ x_c_flatten,
42
+ y_c_flatten,
43
+ z_c_flatten,
44
+ np.ones(x_c_flatten.shape[0]),
45
+ )
46
+ )
47
+
48
+ aff_m = self.img.affine
49
+ aff_m[0, 0] = 2
50
+ aff_m[0, 3] = -90
51
+
52
+ mni_coordinates = np.dot(aff_m, voxel_coordinates.T).T[:, :3]
53
+
54
+ return mni_coordinates
55
+
56
+ def select_non_zero_voxels(
57
+ self,
58
+ mni_coordinates: np.array,
59
+ ):
60
+
61
+ coords = np.hstack(
62
+ (mni_coordinates, np.ones((mni_coordinates.shape[0], 1)))
63
+ )
64
+
65
+ # and transform back to get the voxel values
66
+ voxels_downsampled = np.array(
67
+ np.linalg.solve(self.img.affine, coords.T).T
68
+ ).astype(int)[:, :3]
69
+
70
+ ival = []
71
+ coord_ = []
72
+ for i in range(voxels_downsampled.shape[0]):
73
+ ival.append(self.data[tuple(voxels_downsampled[i, :])])
74
+ coord_.append(mni_coordinates[i, :])
75
+
76
+ # get only voxel values non-zero
77
+ ival = np.array(ival)
78
+ coord_ = np.array(coord_)
79
+ ival_non_zero = ival[ival != 0]
80
+ coord_non_zero = coord_[ival != 0]
81
+ print(coord_non_zero.shape)
82
+
83
+ return coord_non_zero, ival_non_zero
84
+
85
+ def plot_3d_coordinates(self, coord_non_zero: np.array):
86
+ fig = plt.figure()
87
+ ax = fig.add_subplot(111, projection="3d")
88
+ ax.scatter(
89
+ coord_non_zero[:, 0],
90
+ coord_non_zero[:, 1],
91
+ coord_non_zero[:, 2],
92
+ s=50,
93
+ alpha=0.2,
94
+ )
95
+ plt.show()
96
+
97
+
98
+ if __name__ == "__main__":
99
+
100
+ nii_to_mni = NiiToMNI(
101
+ PATH_nii_file=r"C:\code\py_neuromodulation\ConnectivityDecoding\Automated Anatomical Labeling 3 (Rolls 2020).nii"
102
+ )
103
+ mni_coordinates = nii_to_mni.downsample_nii(resampling_factor=150)
104
+ coord_non_zero, ival_non_zero = nii_to_mni.select_non_zero_voxels(
105
+ mni_coordinates
106
+ )
@@ -0,0 +1,119 @@
1
+ import nibabel as nib
2
+ import numpy as np
3
+ import scipy.io as sio
4
+ import os
5
+ from matplotlib import pyplot as plt
6
+
7
+
8
+ def write_connectome_mat(
9
+ PATH_Fingerprints: str = r"D:\Connectome_RMAP_OUT\ROIs\HCP1000 6K",
10
+ PATH_CONNECTOME: str = os.path.join(
11
+ "py_neuromodulation",
12
+ "ConnectivityDecoding",
13
+ "connectome_struct.mat",
14
+ ),
15
+ func_: bool = False,
16
+ ):
17
+
18
+ # connectome = sio.loadmat(PATH_CONNECTOME) # check if read was successful
19
+
20
+ # load all fingerprints and put them in .npy
21
+ dict_connectome = {}
22
+ if func_ is False:
23
+ files_fps = [f for f in os.listdir(PATH_Fingerprints) if ".nii" in f]
24
+ else:
25
+ files_fps = [
26
+ f
27
+ for f in os.listdir(PATH_Fingerprints)
28
+ if "func_seed_AvgR_Fz.nii" in f
29
+ ]
30
+
31
+ # I except 1025 files, check which ones are missing
32
+ missing_files = []
33
+
34
+ for i in range(1, 1026):
35
+
36
+ MISSING = False
37
+
38
+ if func_ is False:
39
+ if f"ROI-{i}_struc_seed.nii" not in files_fps:
40
+ missing_files.append(f"ROI-{i}_struc_seed.nii")
41
+ MISSING = True
42
+ else:
43
+ if f"ROI-{i}_func_seed_AvgR_Fz.nii" not in files_fps:
44
+ missing_files.append(f"ROI-{i}_func_seed_AvgR_Fz.nii")
45
+ MISSING = True
46
+
47
+ if MISSING:
48
+ ROI_file = os.path.join(
49
+ r"D:\Connectome_RMAP_OUT\whole_brain\ROIs", f"ROI-{i}.nii"
50
+ )
51
+ # copy the ROI file to the following folder:
52
+ PATH_ROI_OUT = (
53
+ r"D:\Connectome_RMAP_OUT\whole_brain\ROI_missing_struc"
54
+ )
55
+ import shutil
56
+
57
+ shutil.copy(ROI_file, os.path.join(PATH_ROI_OUT, f"ROI-{i}.nii"))
58
+
59
+ for idx, f in enumerate(files_fps):
60
+ # load the .nii file and put it all in in a dictionary with the name of the file
61
+ fp = (
62
+ nib.load(os.path.join(PATH_Fingerprints, f))
63
+ .get_fdata()
64
+ .astype(np.float16)
65
+ )
66
+ if "struc" in f:
67
+ dict_connectome[f[f.find("ROI-") + 4 : f.find("_struc")]] = fp
68
+ else:
69
+ dict_connectome[
70
+ f[f.find("ROI-") + 4 : f.find("_func_seed_AvgR_Fz.nii")]
71
+ ] = fp
72
+
73
+ print(idx)
74
+ # save the dictionary
75
+ sio.savemat(
76
+ PATH_CONNECTOME,
77
+ dict_connectome,
78
+ )
79
+
80
+
81
+ if __name__ == "__main__":
82
+
83
+ write_connectome_mat(
84
+ PATH_Fingerprints=r"D:\Connectome_RMAP_OUT\whole_brain\struc\HCP1000 6K",
85
+ PATH_CONNECTOME=os.path.join(
86
+ "py_neuromodulation",
87
+ "ConnectivityDecoding",
88
+ "connectome_whole_brain_struc.mat",
89
+ ),
90
+ ) # 58 files are missing
91
+
92
+ write_connectome_mat(
93
+ PATH_Fingerprints=r"D:\Connectome_RMAP_OUT\whole_brain\func",
94
+ PATH_CONNECTOME=os.path.join(
95
+ "py_neuromodulation",
96
+ "ConnectivityDecoding",
97
+ "connectome_whole_brain_func.mat",
98
+ ),
99
+ func_=True,
100
+ )
101
+
102
+ write_connectome_mat(
103
+ PATH_Fingerprints=r"D:\Connectome_RMAP_OUT\hull\func\GSP 1000 (Yeo 2011)_Full Set (Yeo 2011)",
104
+ PATH_CONNECTOME=os.path.join(
105
+ "py_neuromodulation",
106
+ "ConnectivityDecoding",
107
+ "connectome_hull_func.mat",
108
+ ),
109
+ func_=True,
110
+ ) # all there
111
+
112
+ write_connectome_mat(
113
+ PATH_Fingerprints=r"D:\Connectome_RMAP_OUT\hull\struc\HCP1000 6K",
114
+ PATH_CONNECTOME=os.path.join(
115
+ "py_neuromodulation",
116
+ "ConnectivityDecoding",
117
+ "connectome_hull_struc.mat",
118
+ ),
119
+ ) # 5 missing
@@ -1,6 +1,6 @@
1
1
  import mne
2
2
  import mne_bids
3
- import pybv
3
+ import pybv # pip install pybv
4
4
  import os
5
5
 
6
6
 
@@ -4,11 +4,11 @@ import pickle
4
4
  import numpy as np
5
5
  import pandas as pd
6
6
 
7
- import py_neuromodulation as py_nm
7
+ import py_neuromodulation as nm
8
8
  from py_neuromodulation import nm_generator
9
9
 
10
10
 
11
- class EpochStream(py_nm.nm_stream.PNStream):
11
+ class EpochStream(nm.nm_stream.PNStream):
12
12
  def __init__(self) -> None:
13
13
  super().__init__()
14
14
 
@@ -43,7 +43,6 @@ class EpochStream(py_nm.nm_stream.PNStream):
43
43
  def run(
44
44
  self,
45
45
  ):
46
-
47
46
  self._set_run()
48
47
  # shape is n, channels=7, 800 Hz
49
48
 
@@ -2,6 +2,10 @@ import json
2
2
  import os
3
3
  import sys
4
4
  from pathlib import Path
5
+ import logging
6
+
7
+ logger = logging.getLogger("PynmLogger")
8
+
5
9
 
6
10
  import mne
7
11
  import mne_bids
@@ -9,6 +13,9 @@ import numpy as np
9
13
  import pandas as pd
10
14
  from scipy import io
11
15
 
16
+ import pyarrow
17
+ from pyarrow import csv
18
+
12
19
  import py_neuromodulation
13
20
 
14
21
  _PathLike = str | os.PathLike
@@ -27,7 +34,6 @@ def load_nm_channels(
27
34
  reference Union[list, str]
28
35
  """
29
36
 
30
-
31
37
  if isinstance(nm_channels, pd.DataFrame):
32
38
  nm_ch_return = nm_channels
33
39
  elif nm_channels:
@@ -39,6 +45,7 @@ def load_nm_channels(
39
45
 
40
46
  return nm_ch_return
41
47
 
48
+
42
49
  def read_BIDS_data(
43
50
  PATH_RUN: _PathLike | mne_bids.BIDSPath,
44
51
  BIDS_PATH: _PathLike | None = None,
@@ -70,10 +77,8 @@ def read_BIDS_data(
70
77
  if raw_arr.info["line_freq"] is not None:
71
78
  line_noise = int(raw_arr.info["line_freq"])
72
79
  else:
73
- print(
74
- "Line noise is not available in the data, using value of {} Hz.".format(
75
- line_noise
76
- )
80
+ logger.info(
81
+ f"Line noise is not available in the data, using value of {line_noise} Hz."
77
82
  )
78
83
  return (
79
84
  raw_arr,
@@ -120,7 +125,6 @@ def read_grid(PATH_GRIDS: _PathLike | None, grid_str: str) -> pd.DataFrame:
120
125
  def get_annotations(
121
126
  PATH_ANNOTATIONS: str, PATH_RUN: str, raw_arr: mne.io.RawArray
122
127
  ):
123
-
124
128
  try:
125
129
  annot = mne.read_annotations(
126
130
  Path(PATH_ANNOTATIONS) / (os.path.basename(PATH_RUN)[:-5] + ".txt")
@@ -130,18 +134,19 @@ def get_annotations(
130
134
  # annotations starting with "BAD" are omitted with reject_by_annotations 'omit' param
131
135
  annot_data = raw_arr.get_data(reject_by_annotation="omit")
132
136
  except FileNotFoundError:
133
- print("Annotations file could not be found")
134
- print(
135
- "expected location: "
137
+ logger.critical(
138
+ "Annotations file could not be found"
139
+ + "expected location: "
136
140
  + str(
137
- Path(PATH_ANNOTATIONS) / (os.path.basename(PATH_RUN)[:-5] + ".txt")
141
+ Path(PATH_ANNOTATIONS)
142
+ / (os.path.basename(PATH_RUN)[:-5] + ".txt")
138
143
  )
139
144
  )
140
145
  return annot, annot_data, raw_arr
141
146
 
142
147
 
143
148
  def read_plot_modules(
144
- PATH_PLOT: _PathLike = Path(__file__).absolute().parent / "plots"
149
+ PATH_PLOT: _PathLike = Path(__file__).absolute().parent / "plots",
145
150
  ):
146
151
  """Read required .mat files for plotting
147
152
 
@@ -180,55 +185,6 @@ def read_plot_modules(
180
185
  )
181
186
 
182
187
 
183
- def add_labels(
184
- features: pd.DataFrame,
185
- settings: dict,
186
- nm_channels: pd.DataFrame,
187
- raw_arr_data: np.ndarray,
188
- fs: int | float,
189
- ) -> pd.DataFrame | None:
190
- """Given a constructed feature data frame, resample the target labels and add to dataframe
191
-
192
- Parameters
193
- ----------
194
- features : pd.DataFrame
195
- computed feature dataframe
196
- settings_wrapper : settings.py
197
- initialized settings used for feature estimation
198
- raw_arr_data : np.ndarray
199
- raw data including target
200
-
201
- Returns
202
- -------
203
- pd.DataFrame | None
204
- computed feature dataframe including resampled features
205
- """
206
- # resample_label
207
- ind_label = np.where(nm_channels.target == 1)[0]
208
- if ind_label.shape[0] == 0:
209
- print("no target specified")
210
- return None
211
-
212
- offset_time = settings["segment_length_features_ms"]
213
-
214
- offset_start = np.ceil(offset_time / 1000 * fs).astype(int)
215
- data = raw_arr_data[ind_label, offset_start:]
216
- if data.ndim == 1:
217
- data = np.expand_dims(data, axis=0)
218
- label_downsampled = data[
219
- :,
220
- :: int(np.ceil(fs / settings["sampling_rate_features_hz"])),
221
- ]
222
-
223
- # and add to df
224
- if features.shape[0] == label_downsampled.shape[1]:
225
- for idx, label_ch in enumerate(nm_channels.name[ind_label]):
226
- features[label_ch] = label_downsampled[idx, :]
227
- else:
228
- print("label dimensions don't match, saving downsampled label extra")
229
- return features
230
-
231
-
232
188
  def save_features_and_settings(
233
189
  df_features,
234
190
  run_analysis,
@@ -256,7 +212,7 @@ def save_features_and_settings(
256
212
 
257
213
  # create out folder if doesn't exist
258
214
  if not os.path.exists(os.path.join(out_path, folder_name)):
259
- print("Creating output folder: " + str(folder_name))
215
+ logger.Info(f"Creating output folder: {folder_name}")
260
216
  os.makedirs(os.path.join(out_path, folder_name))
261
217
 
262
218
  dict_sidecar = {"fs": fs, "coords": coords, "line_noise": line_noise}
@@ -267,6 +223,16 @@ def save_features_and_settings(
267
223
  save_nm_channels(nm_channels, out_path, folder_name)
268
224
 
269
225
 
226
+ def write_csv(df, path_out):
227
+ """
228
+ Function to save Pandas dataframes to disk as CSV using
229
+ PyArrow (almost 10x faster than Pandas)
230
+ Difference with pandas.df.to_csv() is that it does not
231
+ write an index column by default
232
+ """
233
+ csv.write_csv(pyarrow.Table.from_pandas(df), path_out)
234
+
235
+
270
236
  def save_settings(
271
237
  settings: dict, path_out: _PathLike, folder_name: str | None = None
272
238
  ) -> None:
@@ -278,7 +244,7 @@ def save_settings(
278
244
 
279
245
  with open(path_out, "w") as f:
280
246
  json.dump(settings, f, indent=4)
281
- print("settings.json saved to " + path_out)
247
+ logger.info(f"settings.json saved to {path_out}")
282
248
 
283
249
 
284
250
  def save_nm_channels(
@@ -291,8 +257,8 @@ def save_nm_channels(
291
257
  path_out = os.path.join(
292
258
  path_out, folder_name, folder_name + "_nm_channels.csv"
293
259
  )
294
- nmchannels.to_csv(path_out)
295
- print("nm_channels.csv saved to " + path_out)
260
+ write_csv(nmchannels, path_out)
261
+ logger.info(f"nm_channels.csv saved to {path_out}")
296
262
 
297
263
 
298
264
  def save_features(
@@ -305,8 +271,8 @@ def save_features(
305
271
  path_out = os.path.join(
306
272
  path_out, folder_name, folder_name + "_FEATURES.csv"
307
273
  )
308
- df_features.to_csv(path_out)
309
- print("FEATURES.csv saved to " + str(path_out))
274
+ write_csv(df_features, path_out)
275
+ logger.info(f"FEATURES.csv saved to {str(path_out)}")
310
276
 
311
277
 
312
278
  def save_sidecar(
@@ -333,7 +299,7 @@ def save_general_dict(
333
299
  indent=4,
334
300
  separators=(",", ": "),
335
301
  )
336
- print(f"{str_add} saved to " + str(path_out))
302
+ logger.info(f"{str_add} saved to " + str(path_out))
337
303
 
338
304
 
339
305
  def default_json_convert(obj) -> list | int | float:
@@ -359,11 +325,11 @@ def read_settings(PATH: str) -> dict:
359
325
 
360
326
 
361
327
  def read_features(PATH: str) -> pd.DataFrame:
362
- return pd.read_csv(PATH + "_FEATURES.csv", index_col=0)
328
+ return pd.read_csv(PATH + "_FEATURES.csv", engine="pyarrow")
363
329
 
364
330
 
365
331
  def read_nm_channels(PATH: str) -> pd.DataFrame:
366
- return pd.read_csv(PATH + "_nm_channels.csv", index_col=0)
332
+ return pd.read_csv(PATH + "_nm_channels.csv")
367
333
 
368
334
 
369
335
  def get_run_list_indir(PATH: str) -> list:
@@ -405,7 +371,14 @@ def get_paths_example_data():
405
371
 
406
372
  PATH_BIDS = Path(SCRIPT_DIR) / "data"
407
373
 
408
- PATH_RUN = Path(SCRIPT_DIR) / "data" / f"sub-{sub}" / f"ses-{ses}" / datatype / RUN_NAME
374
+ PATH_RUN = (
375
+ Path(SCRIPT_DIR)
376
+ / "data"
377
+ / f"sub-{sub}"
378
+ / f"ses-{ses}"
379
+ / datatype
380
+ / RUN_NAME
381
+ )
409
382
 
410
383
  # Provide a path for the output data.
411
384
  PATH_OUT = PATH_BIDS / "derivatives"