py-neuromodulation 0.0.3__py3-none-any.whl → 0.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (233) hide show
  1. py_neuromodulation/ConnectivityDecoding/Automated Anatomical Labeling 3 (Rolls 2020).nii +0 -0
  2. py_neuromodulation/ConnectivityDecoding/_get_grid_hull.m +34 -0
  3. py_neuromodulation/ConnectivityDecoding/_get_grid_whole_brain.py +95 -0
  4. py_neuromodulation/ConnectivityDecoding/_helper_write_connectome.py +107 -0
  5. py_neuromodulation/ConnectivityDecoding/mni_coords_cortical_surface.mat +0 -0
  6. py_neuromodulation/ConnectivityDecoding/mni_coords_whole_brain.mat +0 -0
  7. py_neuromodulation/ConnectivityDecoding/rmap_func_all.nii +0 -0
  8. py_neuromodulation/ConnectivityDecoding/rmap_struc.nii +0 -0
  9. py_neuromodulation/FieldTrip.py +589 -589
  10. py_neuromodulation/__init__.py +74 -13
  11. py_neuromodulation/_write_example_dataset_helper.py +83 -65
  12. py_neuromodulation/data/README +6 -0
  13. py_neuromodulation/data/dataset_description.json +8 -0
  14. py_neuromodulation/data/participants.json +32 -0
  15. py_neuromodulation/data/participants.tsv +2 -0
  16. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_space-mni_coordsystem.json +5 -0
  17. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_space-mni_electrodes.tsv +11 -0
  18. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_channels.tsv +11 -0
  19. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.eeg +0 -0
  20. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.json +18 -0
  21. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.vhdr +35 -0
  22. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.vmrk +13 -0
  23. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/sub-testsub_ses-EphysMedOff_scans.tsv +2 -0
  24. py_neuromodulation/grid_cortex.tsv +40 -0
  25. py_neuromodulation/grid_subcortex.tsv +1429 -0
  26. py_neuromodulation/liblsl/libpugixml.so.1.12 +0 -0
  27. py_neuromodulation/liblsl/linux/bionic_amd64/liblsl.1.16.2.so +0 -0
  28. py_neuromodulation/liblsl/linux/bookworm_amd64/liblsl.1.16.2.so +0 -0
  29. py_neuromodulation/liblsl/linux/focal_amd46/liblsl.1.16.2.so +0 -0
  30. py_neuromodulation/liblsl/linux/jammy_amd64/liblsl.1.16.2.so +0 -0
  31. py_neuromodulation/liblsl/linux/jammy_x86/liblsl.1.16.2.so +0 -0
  32. py_neuromodulation/liblsl/linux/noble_amd64/liblsl.1.16.2.so +0 -0
  33. py_neuromodulation/liblsl/macos/amd64/liblsl.1.16.2.dylib +0 -0
  34. py_neuromodulation/liblsl/macos/arm64/liblsl.1.16.0.dylib +0 -0
  35. py_neuromodulation/liblsl/windows/amd64/liblsl.1.16.2.dll +0 -0
  36. py_neuromodulation/liblsl/windows/x86/liblsl.1.16.2.dll +0 -0
  37. py_neuromodulation/nm_IO.py +413 -417
  38. py_neuromodulation/nm_RMAP.py +496 -531
  39. py_neuromodulation/nm_analysis.py +993 -1074
  40. py_neuromodulation/nm_artifacts.py +30 -25
  41. py_neuromodulation/nm_bispectra.py +154 -168
  42. py_neuromodulation/nm_bursts.py +292 -198
  43. py_neuromodulation/nm_coherence.py +251 -205
  44. py_neuromodulation/nm_database.py +149 -0
  45. py_neuromodulation/nm_decode.py +918 -992
  46. py_neuromodulation/nm_define_nmchannels.py +300 -302
  47. py_neuromodulation/nm_features.py +144 -116
  48. py_neuromodulation/nm_filter.py +219 -219
  49. py_neuromodulation/nm_filter_preprocessing.py +79 -91
  50. py_neuromodulation/nm_fooof.py +139 -159
  51. py_neuromodulation/nm_generator.py +45 -37
  52. py_neuromodulation/nm_hjorth_raw.py +52 -73
  53. py_neuromodulation/nm_kalmanfilter.py +71 -58
  54. py_neuromodulation/nm_linelength.py +21 -33
  55. py_neuromodulation/nm_logger.py +66 -0
  56. py_neuromodulation/nm_mne_connectivity.py +149 -112
  57. py_neuromodulation/nm_mnelsl_generator.py +90 -0
  58. py_neuromodulation/nm_mnelsl_stream.py +116 -0
  59. py_neuromodulation/nm_nolds.py +96 -93
  60. py_neuromodulation/nm_normalization.py +173 -214
  61. py_neuromodulation/nm_oscillatory.py +423 -448
  62. py_neuromodulation/nm_plots.py +585 -612
  63. py_neuromodulation/nm_preprocessing.py +83 -0
  64. py_neuromodulation/nm_projection.py +370 -394
  65. py_neuromodulation/nm_rereference.py +97 -95
  66. py_neuromodulation/nm_resample.py +59 -50
  67. py_neuromodulation/nm_run_analysis.py +325 -435
  68. py_neuromodulation/nm_settings.py +289 -68
  69. py_neuromodulation/nm_settings.yaml +244 -0
  70. py_neuromodulation/nm_sharpwaves.py +423 -401
  71. py_neuromodulation/nm_stats.py +464 -480
  72. py_neuromodulation/nm_stream.py +398 -0
  73. py_neuromodulation/nm_stream_abc.py +166 -218
  74. py_neuromodulation/nm_types.py +193 -0
  75. py_neuromodulation/plots/STN_surf.mat +0 -0
  76. py_neuromodulation/plots/Vertices.mat +0 -0
  77. py_neuromodulation/plots/faces.mat +0 -0
  78. py_neuromodulation/plots/grid.mat +0 -0
  79. {py_neuromodulation-0.0.3.dist-info → py_neuromodulation-0.0.5.dist-info}/METADATA +185 -182
  80. py_neuromodulation-0.0.5.dist-info/RECORD +83 -0
  81. {py_neuromodulation-0.0.3.dist-info → py_neuromodulation-0.0.5.dist-info}/WHEEL +1 -2
  82. {py_neuromodulation-0.0.3.dist-info → py_neuromodulation-0.0.5.dist-info/licenses}/LICENSE +21 -21
  83. docs/build/_downloads/09df217f95985497f45d69e2d4bdc5b1/plot_2_example_add_feature.py +0 -68
  84. docs/build/_downloads/3b4900a2b2818ff30362215b76f7d5eb/plot_1_example_BIDS.py +0 -233
  85. docs/build/_downloads/7e92dd2e6cc86b239d14cafad972ae4f/plot_3_example_sharpwave_analysis.py +0 -219
  86. docs/build/_downloads/c2db0bf2b334d541b00662b991682256/plot_6_real_time_demo.py +0 -97
  87. docs/build/_downloads/ce3914826f782cbd1ea8fd024eaf0ac3/plot_5_example_rmap_computing.py +0 -64
  88. docs/build/_downloads/da36848a41e6a3235d91fb7cfb6d59b4/plot_0_first_demo.py +0 -192
  89. docs/build/_downloads/eaa4305c75b19a1e2eea941f742a6331/plot_4_example_gridPointProjection.py +0 -210
  90. docs/build/html/_downloads/09df217f95985497f45d69e2d4bdc5b1/plot_2_example_add_feature.py +0 -68
  91. docs/build/html/_downloads/3b4900a2b2818ff30362215b76f7d5eb/plot_1_example_BIDS.py +0 -239
  92. docs/build/html/_downloads/7e92dd2e6cc86b239d14cafad972ae4f/plot_3_example_sharpwave_analysis.py +0 -219
  93. docs/build/html/_downloads/c2db0bf2b334d541b00662b991682256/plot_6_real_time_demo.py +0 -97
  94. docs/build/html/_downloads/ce3914826f782cbd1ea8fd024eaf0ac3/plot_5_example_rmap_computing.py +0 -64
  95. docs/build/html/_downloads/da36848a41e6a3235d91fb7cfb6d59b4/plot_0_first_demo.py +0 -192
  96. docs/build/html/_downloads/eaa4305c75b19a1e2eea941f742a6331/plot_4_example_gridPointProjection.py +0 -210
  97. docs/source/_build/html/_downloads/09df217f95985497f45d69e2d4bdc5b1/plot_2_example_add_feature.py +0 -76
  98. docs/source/_build/html/_downloads/0d0d0a76e8f648d5d3cbc47da6351932/plot_real_time_demo.py +0 -97
  99. docs/source/_build/html/_downloads/3b4900a2b2818ff30362215b76f7d5eb/plot_1_example_BIDS.py +0 -240
  100. docs/source/_build/html/_downloads/5d73cadc59a8805c47e3b84063afc157/plot_example_BIDS.py +0 -233
  101. docs/source/_build/html/_downloads/7660317fa5a6bfbd12fcca9961457fc4/plot_example_rmap_computing.py +0 -63
  102. docs/source/_build/html/_downloads/7e92dd2e6cc86b239d14cafad972ae4f/plot_3_example_sharpwave_analysis.py +0 -219
  103. docs/source/_build/html/_downloads/839e5b319379f7fd9e867deb00fd797f/plot_example_gridPointProjection.py +0 -210
  104. docs/source/_build/html/_downloads/ae8be19afe5e559f011fc9b138968ba0/plot_first_demo.py +0 -192
  105. docs/source/_build/html/_downloads/b8b06cacc17969d3725a0b6f1d7741c5/plot_example_sharpwave_analysis.py +0 -219
  106. docs/source/_build/html/_downloads/c2db0bf2b334d541b00662b991682256/plot_6_real_time_demo.py +0 -121
  107. docs/source/_build/html/_downloads/c31a86c0b68cb4167d968091ace8080d/plot_example_add_feature.py +0 -68
  108. docs/source/_build/html/_downloads/ce3914826f782cbd1ea8fd024eaf0ac3/plot_5_example_rmap_computing.py +0 -64
  109. docs/source/_build/html/_downloads/da36848a41e6a3235d91fb7cfb6d59b4/plot_0_first_demo.py +0 -189
  110. docs/source/_build/html/_downloads/eaa4305c75b19a1e2eea941f742a6331/plot_4_example_gridPointProjection.py +0 -210
  111. docs/source/auto_examples/plot_0_first_demo.py +0 -189
  112. docs/source/auto_examples/plot_1_example_BIDS.py +0 -240
  113. docs/source/auto_examples/plot_2_example_add_feature.py +0 -76
  114. docs/source/auto_examples/plot_3_example_sharpwave_analysis.py +0 -219
  115. docs/source/auto_examples/plot_4_example_gridPointProjection.py +0 -210
  116. docs/source/auto_examples/plot_5_example_rmap_computing.py +0 -64
  117. docs/source/auto_examples/plot_6_real_time_demo.py +0 -121
  118. docs/source/conf.py +0 -105
  119. examples/plot_0_first_demo.py +0 -189
  120. examples/plot_1_example_BIDS.py +0 -240
  121. examples/plot_2_example_add_feature.py +0 -76
  122. examples/plot_3_example_sharpwave_analysis.py +0 -219
  123. examples/plot_4_example_gridPointProjection.py +0 -210
  124. examples/plot_5_example_rmap_computing.py +0 -64
  125. examples/plot_6_real_time_demo.py +0 -121
  126. packages/realtime_decoding/build/lib/realtime_decoding/__init__.py +0 -4
  127. packages/realtime_decoding/build/lib/realtime_decoding/decoder.py +0 -104
  128. packages/realtime_decoding/build/lib/realtime_decoding/features.py +0 -163
  129. packages/realtime_decoding/build/lib/realtime_decoding/helpers.py +0 -15
  130. packages/realtime_decoding/build/lib/realtime_decoding/run_decoding.py +0 -345
  131. packages/realtime_decoding/build/lib/realtime_decoding/trainer.py +0 -54
  132. packages/tmsi/build/lib/TMSiFileFormats/__init__.py +0 -37
  133. packages/tmsi/build/lib/TMSiFileFormats/file_formats/__init__.py +0 -36
  134. packages/tmsi/build/lib/TMSiFileFormats/file_formats/lsl_stream_writer.py +0 -200
  135. packages/tmsi/build/lib/TMSiFileFormats/file_formats/poly5_file_writer.py +0 -496
  136. packages/tmsi/build/lib/TMSiFileFormats/file_formats/poly5_to_edf_converter.py +0 -236
  137. packages/tmsi/build/lib/TMSiFileFormats/file_formats/xdf_file_writer.py +0 -977
  138. packages/tmsi/build/lib/TMSiFileFormats/file_readers/__init__.py +0 -35
  139. packages/tmsi/build/lib/TMSiFileFormats/file_readers/edf_reader.py +0 -116
  140. packages/tmsi/build/lib/TMSiFileFormats/file_readers/poly5reader.py +0 -294
  141. packages/tmsi/build/lib/TMSiFileFormats/file_readers/xdf_reader.py +0 -229
  142. packages/tmsi/build/lib/TMSiFileFormats/file_writer.py +0 -102
  143. packages/tmsi/build/lib/TMSiPlotters/__init__.py +0 -2
  144. packages/tmsi/build/lib/TMSiPlotters/gui/__init__.py +0 -39
  145. packages/tmsi/build/lib/TMSiPlotters/gui/_plotter_gui.py +0 -234
  146. packages/tmsi/build/lib/TMSiPlotters/gui/plotting_gui.py +0 -440
  147. packages/tmsi/build/lib/TMSiPlotters/plotters/__init__.py +0 -44
  148. packages/tmsi/build/lib/TMSiPlotters/plotters/hd_emg_plotter.py +0 -446
  149. packages/tmsi/build/lib/TMSiPlotters/plotters/impedance_plotter.py +0 -589
  150. packages/tmsi/build/lib/TMSiPlotters/plotters/signal_plotter.py +0 -1326
  151. packages/tmsi/build/lib/TMSiSDK/__init__.py +0 -54
  152. packages/tmsi/build/lib/TMSiSDK/device.py +0 -588
  153. packages/tmsi/build/lib/TMSiSDK/devices/__init__.py +0 -34
  154. packages/tmsi/build/lib/TMSiSDK/devices/saga/TMSi_Device_API.py +0 -1764
  155. packages/tmsi/build/lib/TMSiSDK/devices/saga/__init__.py +0 -34
  156. packages/tmsi/build/lib/TMSiSDK/devices/saga/saga_device.py +0 -1366
  157. packages/tmsi/build/lib/TMSiSDK/devices/saga/saga_types.py +0 -520
  158. packages/tmsi/build/lib/TMSiSDK/devices/saga/xml_saga_config.py +0 -165
  159. packages/tmsi/build/lib/TMSiSDK/error.py +0 -95
  160. packages/tmsi/build/lib/TMSiSDK/sample_data.py +0 -63
  161. packages/tmsi/build/lib/TMSiSDK/sample_data_server.py +0 -99
  162. packages/tmsi/build/lib/TMSiSDK/settings.py +0 -45
  163. packages/tmsi/build/lib/TMSiSDK/tmsi_device.py +0 -111
  164. packages/tmsi/build/lib/__init__.py +0 -4
  165. packages/tmsi/build/lib/apex_sdk/__init__.py +0 -34
  166. packages/tmsi/build/lib/apex_sdk/device/__init__.py +0 -41
  167. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_API.py +0 -1009
  168. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_API_enums.py +0 -239
  169. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_API_structures.py +0 -668
  170. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_device.py +0 -1611
  171. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_dongle.py +0 -38
  172. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_event_reader.py +0 -57
  173. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_structures/apex_channel.py +0 -44
  174. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_structures/apex_config.py +0 -150
  175. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_structures/apex_const.py +0 -36
  176. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_structures/apex_impedance_channel.py +0 -48
  177. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_structures/apex_info.py +0 -108
  178. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_structures/dongle_info.py +0 -39
  179. packages/tmsi/build/lib/apex_sdk/device/devices/apex/measurements/download_measurement.py +0 -77
  180. packages/tmsi/build/lib/apex_sdk/device/devices/apex/measurements/eeg_measurement.py +0 -150
  181. packages/tmsi/build/lib/apex_sdk/device/devices/apex/measurements/impedance_measurement.py +0 -129
  182. packages/tmsi/build/lib/apex_sdk/device/threads/conversion_thread.py +0 -59
  183. packages/tmsi/build/lib/apex_sdk/device/threads/sampling_thread.py +0 -57
  184. packages/tmsi/build/lib/apex_sdk/device/tmsi_channel.py +0 -83
  185. packages/tmsi/build/lib/apex_sdk/device/tmsi_device.py +0 -201
  186. packages/tmsi/build/lib/apex_sdk/device/tmsi_device_enums.py +0 -103
  187. packages/tmsi/build/lib/apex_sdk/device/tmsi_dongle.py +0 -43
  188. packages/tmsi/build/lib/apex_sdk/device/tmsi_event_reader.py +0 -50
  189. packages/tmsi/build/lib/apex_sdk/device/tmsi_measurement.py +0 -118
  190. packages/tmsi/build/lib/apex_sdk/sample_data_server/__init__.py +0 -33
  191. packages/tmsi/build/lib/apex_sdk/sample_data_server/event_data.py +0 -44
  192. packages/tmsi/build/lib/apex_sdk/sample_data_server/sample_data.py +0 -50
  193. packages/tmsi/build/lib/apex_sdk/sample_data_server/sample_data_server.py +0 -136
  194. packages/tmsi/build/lib/apex_sdk/tmsi_errors/error.py +0 -126
  195. packages/tmsi/build/lib/apex_sdk/tmsi_sdk.py +0 -113
  196. packages/tmsi/build/lib/apex_sdk/tmsi_utilities/apex/apex_structure_generator.py +0 -134
  197. packages/tmsi/build/lib/apex_sdk/tmsi_utilities/decorators.py +0 -60
  198. packages/tmsi/build/lib/apex_sdk/tmsi_utilities/logger_filter.py +0 -42
  199. packages/tmsi/build/lib/apex_sdk/tmsi_utilities/singleton.py +0 -42
  200. packages/tmsi/build/lib/apex_sdk/tmsi_utilities/support_functions.py +0 -72
  201. packages/tmsi/build/lib/apex_sdk/tmsi_utilities/tmsi_logger.py +0 -98
  202. py_neuromodulation/nm_EpochStream.py +0 -92
  203. py_neuromodulation/nm_across_patient_decoding.py +0 -927
  204. py_neuromodulation/nm_cohortwrapper.py +0 -435
  205. py_neuromodulation/nm_eval_timing.py +0 -239
  206. py_neuromodulation/nm_features_abc.py +0 -39
  207. py_neuromodulation/nm_stream_offline.py +0 -358
  208. py_neuromodulation/utils/_logging.py +0 -24
  209. py_neuromodulation-0.0.3.dist-info/RECORD +0 -188
  210. py_neuromodulation-0.0.3.dist-info/top_level.txt +0 -5
  211. tests/__init__.py +0 -0
  212. tests/conftest.py +0 -117
  213. tests/test_all_examples.py +0 -10
  214. tests/test_all_features.py +0 -63
  215. tests/test_bispectra.py +0 -70
  216. tests/test_bursts.py +0 -105
  217. tests/test_feature_sampling_rates.py +0 -143
  218. tests/test_fooof.py +0 -16
  219. tests/test_initalization_offline_stream.py +0 -41
  220. tests/test_multiprocessing.py +0 -58
  221. tests/test_nan_values.py +0 -29
  222. tests/test_nm_filter.py +0 -95
  223. tests/test_nm_resample.py +0 -63
  224. tests/test_normalization_settings.py +0 -146
  225. tests/test_notch_filter.py +0 -31
  226. tests/test_osc_features.py +0 -424
  227. tests/test_preprocessing_filter.py +0 -151
  228. tests/test_rereference.py +0 -171
  229. tests/test_sampling.py +0 -57
  230. tests/test_settings_change_after_init.py +0 -76
  231. tests/test_sharpwave.py +0 -165
  232. tests/test_target_channel_add.py +0 -100
  233. tests/test_timing.py +0 -80
@@ -1,417 +1,413 @@
1
- import json
2
- import os
3
- import sys
4
- from pathlib import Path
5
- import logging
6
-
7
- logger = logging.getLogger("PynmLogger")
8
-
9
-
10
- import mne
11
- import mne_bids
12
- import numpy as np
13
- import pandas as pd
14
- from scipy import io
15
-
16
- import pyarrow
17
- from pyarrow import csv
18
-
19
- import py_neuromodulation
20
-
21
- _PathLike = str | os.PathLike
22
-
23
-
24
- def load_nm_channels(
25
- nm_channels: pd.DataFrame | _PathLike,
26
- ) -> pd.DataFrame:
27
- """Read nm_channels from path or specify via BIDS arguments.
28
- Nexessary parameters are then
29
- ch_names (list),
30
- ch_types (list),
31
- bads (list)
32
- used_types (list)
33
- target_keywords (list)
34
- reference Union[list, str]
35
- """
36
-
37
- if isinstance(nm_channels, pd.DataFrame):
38
- nm_ch_return = nm_channels
39
- elif nm_channels:
40
- if not os.path.isfile(nm_channels):
41
- raise ValueError(
42
- "PATH_NM_CHANNELS is not a valid file. Got: " f"{nm_channels}"
43
- )
44
- nm_ch_return = pd.read_csv(nm_channels)
45
-
46
- return nm_ch_return
47
-
48
-
49
- def read_BIDS_data(
50
- PATH_RUN: _PathLike | mne_bids.BIDSPath,
51
- BIDS_PATH: _PathLike | None = None,
52
- datatype: str = "ieeg",
53
- line_noise: int = 50,
54
- ) -> tuple[mne.io.Raw, np.ndarray, int | float, int, list | None, list | None]:
55
- """Given a run path and bids data path, read the respective data
56
-
57
- Parameters
58
- ----------
59
- PATH_RUN : string
60
- BIDS_PATH : string
61
- datatype : string
62
-
63
- Returns
64
- -------
65
- raw_arr : mne.io.RawArray
66
- raw_arr_data : np.ndarray
67
- fs : int
68
- line_noise : int
69
- """
70
- if isinstance(PATH_RUN, mne_bids.BIDSPath):
71
- bids_path = PATH_RUN
72
- else:
73
- bids_path = mne_bids.get_bids_path_from_fname(PATH_RUN)
74
-
75
- raw_arr = mne_bids.read_raw_bids(bids_path)
76
- coord_list, coord_names = get_coord_list(raw_arr)
77
- if raw_arr.info["line_freq"] is not None:
78
- line_noise = int(raw_arr.info["line_freq"])
79
- else:
80
- logger.info(
81
- f"Line noise is not available in the data, using value of {line_noise} Hz."
82
- )
83
- return (
84
- raw_arr,
85
- raw_arr.get_data(),
86
- raw_arr.info["sfreq"],
87
- line_noise,
88
- coord_list,
89
- coord_names,
90
- )
91
-
92
-
93
- def get_coord_list(
94
- raw: mne.io.BaseRaw,
95
- ) -> tuple[list, list] | tuple[None, None]:
96
- montage = raw.get_montage()
97
- if montage is not None:
98
- coord_list = np.array(
99
- list(dict(montage.get_positions()["ch_pos"]).values())
100
- ).tolist()
101
- coord_names = np.array(
102
- list(dict(montage.get_positions()["ch_pos"]).keys())
103
- ).tolist()
104
- else:
105
- coord_list = None
106
- coord_names = None
107
-
108
- return coord_list, coord_names
109
-
110
-
111
- def read_grid(PATH_GRIDS: _PathLike | None, grid_str: str) -> pd.DataFrame:
112
- if PATH_GRIDS is None:
113
- grid = pd.read_csv(
114
- Path(__file__).parent / ("grid_" + grid_str.lower() + ".tsv"),
115
- sep="\t",
116
- )
117
- else:
118
- grid = pd.read_csv(
119
- Path(PATH_GRIDS) / ("grid_" + grid_str.lower() + ".tsv"),
120
- sep="\t",
121
- )
122
- return grid
123
-
124
-
125
- def get_annotations(
126
- PATH_ANNOTATIONS: str, PATH_RUN: str, raw_arr: mne.io.RawArray
127
- ):
128
- try:
129
- annot = mne.read_annotations(
130
- Path(PATH_ANNOTATIONS) / (os.path.basename(PATH_RUN)[:-5] + ".txt")
131
- )
132
- raw_arr.set_annotations(annot)
133
-
134
- # annotations starting with "BAD" are omitted with reject_by_annotations 'omit' param
135
- annot_data = raw_arr.get_data(reject_by_annotation="omit")
136
- except FileNotFoundError:
137
- logger.critical(
138
- "Annotations file could not be found"
139
- + "expected location: "
140
- + str(
141
- Path(PATH_ANNOTATIONS)
142
- / (os.path.basename(PATH_RUN)[:-5] + ".txt")
143
- )
144
- )
145
- return annot, annot_data, raw_arr
146
-
147
-
148
- def read_plot_modules(
149
- PATH_PLOT: _PathLike = Path(__file__).absolute().parent / "plots",
150
- ):
151
- """Read required .mat files for plotting
152
-
153
- Parameters
154
- ----------
155
- PATH_PLOT : regexp, optional
156
- path to plotting files, by default
157
- """
158
-
159
- faces = io.loadmat(os.path.join(PATH_PLOT, "faces.mat"))
160
- vertices = io.loadmat(os.path.join(PATH_PLOT, "Vertices.mat"))
161
- grid = io.loadmat(os.path.join(PATH_PLOT, "grid.mat"))["grid"]
162
- stn_surf = io.loadmat(os.path.join(PATH_PLOT, "STN_surf.mat"))
163
- x_ver = stn_surf["vertices"][::2, 0]
164
- y_ver = stn_surf["vertices"][::2, 1]
165
- x_ecog = vertices["Vertices"][::1, 0]
166
- y_ecog = vertices["Vertices"][::1, 1]
167
- z_ecog = vertices["Vertices"][::1, 2]
168
- x_stn = stn_surf["vertices"][::1, 0]
169
- y_stn = stn_surf["vertices"][::1, 1]
170
- z_stn = stn_surf["vertices"][::1, 2]
171
-
172
- return (
173
- faces,
174
- vertices,
175
- grid,
176
- stn_surf,
177
- x_ver,
178
- y_ver,
179
- x_ecog,
180
- y_ecog,
181
- z_ecog,
182
- x_stn,
183
- y_stn,
184
- z_stn,
185
- )
186
-
187
-
188
- def save_features_and_settings(
189
- df_features,
190
- run_analysis,
191
- folder_name,
192
- out_path,
193
- settings,
194
- nm_channels,
195
- coords,
196
- fs,
197
- line_noise,
198
- ) -> None:
199
- """save settings.json, nm_channels.csv and features.csv
200
-
201
- Parameters
202
- ----------
203
- df_ : pd.Dataframe
204
- feature dataframe
205
- run_analysis_ : run_analysis.py object
206
- This includes all (optionally projected) run_analysis estimated data
207
- inluding added the resampled labels in features_arr
208
- folder_name : string
209
- output path
210
- settings_wrapper : settings.py object
211
- """
212
-
213
- # create out folder if doesn't exist
214
- if not os.path.exists(os.path.join(out_path, folder_name)):
215
- logger.Info(f"Creating output folder: {folder_name}")
216
- os.makedirs(os.path.join(out_path, folder_name))
217
-
218
- dict_sidecar = {"fs": fs, "coords": coords, "line_noise": line_noise}
219
-
220
- save_sidecar(dict_sidecar, out_path, folder_name)
221
- save_features(df_features, out_path, folder_name)
222
- save_settings(settings, out_path, folder_name)
223
- save_nm_channels(nm_channels, out_path, folder_name)
224
-
225
-
226
- def write_csv(df, path_out):
227
- """
228
- Function to save Pandas dataframes to disk as CSV using
229
- PyArrow (almost 10x faster than Pandas)
230
- Difference with pandas.df.to_csv() is that it does not
231
- write an index column by default
232
- """
233
- csv.write_csv(pyarrow.Table.from_pandas(df), path_out)
234
-
235
-
236
- def save_settings(
237
- settings: dict, path_out: _PathLike, folder_name: str | None = None
238
- ) -> None:
239
- path_out = _pathlike_to_str(path_out)
240
- if folder_name is not None:
241
- path_out = os.path.join(
242
- path_out, folder_name, folder_name + "_SETTINGS.json"
243
- )
244
-
245
- with open(path_out, "w") as f:
246
- json.dump(settings, f, indent=4)
247
- logger.info(f"settings.json saved to {path_out}")
248
-
249
-
250
- def save_nm_channels(
251
- nmchannels: pd.DataFrame,
252
- path_out: _PathLike,
253
- folder_name: str | None = None,
254
- ) -> None:
255
- path_out = _pathlike_to_str(path_out)
256
- if folder_name is not None:
257
- path_out = os.path.join(
258
- path_out, folder_name, folder_name + "_nm_channels.csv"
259
- )
260
- write_csv(nmchannels, path_out)
261
- logger.info(f"nm_channels.csv saved to {path_out}")
262
-
263
-
264
- def save_features(
265
- df_features: pd.DataFrame,
266
- path_out: _PathLike,
267
- folder_name: str | None = None,
268
- ) -> None:
269
- path_out = _pathlike_to_str(path_out)
270
- if folder_name is not None:
271
- path_out = os.path.join(
272
- path_out, folder_name, folder_name + "_FEATURES.csv"
273
- )
274
- write_csv(df_features, path_out)
275
- logger.info(f"FEATURES.csv saved to {str(path_out)}")
276
-
277
-
278
- def save_sidecar(
279
- sidecar: dict, path_out: _PathLike, folder_name: str | None = None
280
- ) -> None:
281
- path_out = _pathlike_to_str(path_out)
282
- save_general_dict(sidecar, path_out, "_SIDECAR.json", folder_name)
283
-
284
-
285
- def save_general_dict(
286
- dict_: dict,
287
- path_out: _PathLike,
288
- str_add: str,
289
- folder_name: str | None = None,
290
- ) -> None:
291
- if folder_name is not None:
292
- path_out = os.path.join(path_out, folder_name, folder_name + str_add)
293
-
294
- with open(path_out, "w") as f:
295
- json.dump(
296
- dict_,
297
- f,
298
- default=default_json_convert,
299
- indent=4,
300
- separators=(",", ": "),
301
- )
302
- logger.info(f"{str_add} saved to " + str(path_out))
303
-
304
-
305
- def default_json_convert(obj) -> list | int | float:
306
- if isinstance(obj, np.ndarray):
307
- return obj.tolist()
308
- if isinstance(obj, pd.DataFrame):
309
- return obj.to_numpy().tolist()
310
- if isinstance(obj, np.integer):
311
- return int(obj)
312
- if isinstance(obj, np.floating):
313
- return float(obj)
314
- raise TypeError("Not serializable")
315
-
316
-
317
- def read_sidecar(PATH: str) -> dict:
318
- with open(PATH + "_SIDECAR.json") as f:
319
- return json.load(f)
320
-
321
-
322
- def read_settings(PATH: str) -> dict:
323
- with open(PATH if ".json" in PATH else PATH + "_SETTINGS.json") as f:
324
- return json.load(f)
325
-
326
-
327
- def read_features(PATH: str) -> pd.DataFrame:
328
- return pd.read_csv(PATH + "_FEATURES.csv", engine="pyarrow")
329
-
330
-
331
- def read_nm_channels(PATH: str) -> pd.DataFrame:
332
- return pd.read_csv(PATH + "_nm_channels.csv")
333
-
334
-
335
- def get_run_list_indir(PATH: str) -> list:
336
- f_files = []
337
- for dirpath, _, files in os.walk(PATH):
338
- for x in files:
339
- if "FEATURES" in x:
340
- f_files.append(os.path.basename(dirpath))
341
- return f_files
342
-
343
-
344
- def loadmat(filename) -> dict:
345
- """
346
- this function should be called instead of direct spio.loadmat
347
- as it cures the problem of not properly recovering python dictionaries
348
- from mat files. It calls the function check keys to cure all entries
349
- which are still mat-objects
350
- """
351
- data = io.loadmat(filename, struct_as_record=False, squeeze_me=True)
352
- return _check_keys(data)
353
-
354
-
355
- def get_paths_example_data():
356
- """
357
- This function should provide RUN_NAME, PATH_RUN, PATH_BIDS, PATH_OUT and datatype for the example
358
- dataset used in most examples.
359
- """
360
-
361
- SCRIPT_DIR = Path(py_neuromodulation.__file__).parent.absolute()
362
-
363
- sub = "testsub"
364
- ses = "EphysMedOff"
365
- task = "gripforce"
366
- run = 0
367
- datatype = "ieeg"
368
-
369
- # Define run name and access paths in the BIDS format.
370
- RUN_NAME = f"sub-{sub}_ses-{ses}_task-{task}_run-{run}"
371
-
372
- PATH_BIDS = Path(SCRIPT_DIR) / "data"
373
-
374
- PATH_RUN = (
375
- Path(SCRIPT_DIR)
376
- / "data"
377
- / f"sub-{sub}"
378
- / f"ses-{ses}"
379
- / datatype
380
- / RUN_NAME
381
- )
382
-
383
- # Provide a path for the output data.
384
- PATH_OUT = PATH_BIDS / "derivatives"
385
-
386
- return RUN_NAME, PATH_RUN, PATH_BIDS, PATH_OUT, datatype
387
-
388
-
389
- def _check_keys(dict):
390
- """
391
- checks if entries in dictionary are mat-objects. If yes
392
- todict is called to change them to nested dictionaries
393
- """
394
- for key in dict:
395
- if isinstance(dict[key], io.matlab.mio5_params.mat_struct):
396
- dict[key] = _todict(dict[key])
397
- return dict
398
-
399
-
400
- def _todict(matobj) -> dict:
401
- """
402
- A recursive function which constructs from matobjects nested dictionaries
403
- """
404
- dict = {}
405
- for strg in matobj._fieldnames:
406
- elem = matobj.__dict__[strg]
407
- if isinstance(elem, io.matlab.mio5_params.mat_struct):
408
- dict[strg] = _todict(elem)
409
- else:
410
- dict[strg] = elem
411
- return dict
412
-
413
-
414
- def _pathlike_to_str(path: _PathLike) -> str:
415
- if isinstance(path, str):
416
- return path
417
- return str(path)
1
+ import json
2
+ from pathlib import PurePath, Path
3
+ from typing import TYPE_CHECKING
4
+
5
+ import numpy as np
6
+ import pandas as pd
7
+
8
+ from py_neuromodulation.nm_types import _PathLike
9
+ from py_neuromodulation import logger, PYNM_DIR
10
+
11
+ if TYPE_CHECKING:
12
+ from mne_bids import BIDSPath
13
+ from mne import io as mne_io
14
+
15
+
16
+ def load_nm_channels(
17
+ nm_channels: pd.DataFrame | _PathLike,
18
+ ) -> "pd.DataFrame":
19
+ """Read nm_channels from path or specify via BIDS arguments.
20
+ Necessary parameters are then ch_names (list), ch_types (list), bads (list), used_types (list),
21
+ target_keywords (list) and reference Union[list, str].
22
+ """
23
+
24
+ if isinstance(nm_channels, pd.DataFrame):
25
+ nm_ch_return = nm_channels
26
+ elif nm_channels:
27
+ if not Path(nm_channels).is_file():
28
+ raise ValueError(
29
+ "PATH_NM_CHANNELS is not a valid file. Got: " f"{nm_channels}"
30
+ )
31
+ nm_ch_return = pd.read_csv(nm_channels)
32
+
33
+ return nm_ch_return
34
+
35
+
36
+ def read_BIDS_data(
37
+ PATH_RUN: "_PathLike | BIDSPath",
38
+ line_noise: int = 50,
39
+ ) -> tuple["mne_io.Raw", np.ndarray, float, int, list | None, list | None]:
40
+ """Given a run path and bids data path, read the respective data
41
+
42
+ Parameters
43
+ ----------
44
+ PATH_RUN : path to bids run file
45
+ supported formats: https://bids-specification.readthedocs.io/en/v1.2.1/04-modality-specific-files/04-intracranial-electroencephalography.html#ieeg-recording-data
46
+ line_noise: int, optional
47
+ by default 50
48
+
49
+ Returns
50
+ -------
51
+ raw_arr : mne.io.RawArray
52
+ raw_arr_data : np.ndarray
53
+ sfreq : float
54
+ line_noise : int
55
+ coord_list : list | None
56
+ coord_names : list | None
57
+ """
58
+
59
+ from mne_bids import read_raw_bids, get_bids_path_from_fname
60
+
61
+ bids_path = get_bids_path_from_fname(PATH_RUN)
62
+
63
+ raw_arr = read_raw_bids(bids_path)
64
+ coord_list, coord_names = get_coord_list(raw_arr)
65
+ if raw_arr.info["line_freq"] is not None:
66
+ line_noise = int(raw_arr.info["line_freq"])
67
+ else:
68
+ logger.info(
69
+ f"Line noise is not available in the data, using value of {line_noise} Hz."
70
+ )
71
+ return (
72
+ raw_arr,
73
+ raw_arr.get_data(),
74
+ raw_arr.info["sfreq"],
75
+ line_noise,
76
+ coord_list,
77
+ coord_names,
78
+ )
79
+
80
+
81
+ def read_mne_data(
82
+ PATH_RUN: "_PathLike | BIDSPath",
83
+ line_noise: int = 50,
84
+ ):
85
+ """Read data in the mne.io.read_raw supported format.
86
+
87
+ Parameters
88
+ ----------
89
+ PATH_RUN : _PathLike | BIDSPath
90
+ Path to mne.io.read_raw supported types https://mne.tools/stable/generated/mne.io.read_raw.html
91
+ line_noise : int, optional
92
+ line noise, by default 50
93
+
94
+ Returns
95
+ -------
96
+ raw : mne.io.Raw
97
+ sfreq : float
98
+ ch_names : list[str]
99
+ ch_type : list[str]
100
+ bads : list[str]
101
+ """
102
+
103
+ from mne import io as mne_io
104
+
105
+ raw_arr = mne_io.read_raw(PATH_RUN)
106
+ sfreq = raw_arr.info["sfreq"]
107
+ ch_names = raw_arr.info["ch_names"]
108
+ ch_types = raw_arr.get_channel_types()
109
+ logger.info(
110
+ "Channel data is read using mne.io.read_raw function. Channel types might not be correct"
111
+ " and set to 'eeg' by default"
112
+ )
113
+ bads = raw_arr.info["bads"]
114
+
115
+ if raw_arr.info["line_freq"] is not None:
116
+ line_noise = int(raw_arr.info["line_freq"])
117
+ else:
118
+ logger.info(
119
+ f"Line noise is not available in the data, using value of {line_noise} Hz."
120
+ )
121
+
122
+ return raw_arr.get_data(), sfreq, ch_names, ch_types, bads
123
+
124
+
125
+ def get_coord_list(
126
+ raw: "mne_io.BaseRaw",
127
+ ) -> tuple[list, list] | tuple[None, None]:
128
+ """Return the coordinate list and names from mne RawArray
129
+
130
+ Parameters
131
+ ----------
132
+ raw : mne_io.BaseRaw
133
+
134
+ Returns
135
+ -------
136
+ coord_list[list, list] | coord_names[None, None]
137
+ """
138
+ montage = raw.get_montage()
139
+ if montage is not None:
140
+ coord_list = np.array(
141
+ list(dict(montage.get_positions()["ch_pos"]).values())
142
+ ).tolist()
143
+ coord_names = np.array(
144
+ list(dict(montage.get_positions()["ch_pos"]).keys())
145
+ ).tolist()
146
+ else:
147
+ coord_list = None
148
+ coord_names = None
149
+
150
+ return coord_list, coord_names
151
+
152
+
153
+ def read_grid(PATH_GRIDS: _PathLike | None, grid_str: str) -> pd.DataFrame:
154
+ """Read grid file from path or PYNM_DIR
155
+
156
+ Parameters
157
+ ----------
158
+ PATH_GRIDS : _PathLike | None
159
+ path to grid file, by default None
160
+ grid_str : str
161
+ grid name
162
+
163
+ Returns
164
+ -------
165
+ pd.DataFrame
166
+ pd.DataFrame including mni x,y,z coordinates for each grid point
167
+ """
168
+ if PATH_GRIDS is None:
169
+ grid = pd.read_csv(PYNM_DIR / ("grid_" + grid_str.lower() + ".tsv"), sep="\t")
170
+ else:
171
+ grid = pd.read_csv(
172
+ PurePath(PATH_GRIDS, "grid_" + grid_str.lower() + ".tsv"), sep="\t"
173
+ )
174
+ return grid
175
+
176
+
177
+ def get_annotations(PATH_ANNOTATIONS: str, PATH_RUN: str, raw_arr: "mne_io.RawArray"):
178
+ filepath = PurePath(PATH_ANNOTATIONS, PurePath(PATH_RUN).name[:-5] + ".txt")
179
+ from mne import read_annotations
180
+
181
+ try:
182
+ annot = read_annotations(filepath)
183
+ raw_arr.set_annotations(annot)
184
+
185
+ # annotations starting with "BAD" are omitted with reject_by_annotations 'omit' param
186
+ annot_data = raw_arr.get_data(reject_by_annotation="omit")
187
+ except FileNotFoundError:
188
+ logger.critical(f"Annotations file could not be found: {filepath}")
189
+
190
+ return annot, annot_data, raw_arr
191
+
192
+
193
+ def read_plot_modules(
194
+ PATH_PLOT: _PathLike = PYNM_DIR / "plots",
195
+ ):
196
+ """Read required .mat files for plotting
197
+
198
+ Parameters
199
+ ----------
200
+ PATH_PLOT : regexp, optional
201
+ path to plotting files, by default
202
+ """
203
+
204
+ faces = loadmat(PurePath(PATH_PLOT, "faces.mat"))
205
+ vertices = loadmat(PurePath(PATH_PLOT, "Vertices.mat"))
206
+ grid = loadmat(PurePath(PATH_PLOT, "grid.mat"))["grid"]
207
+ stn_surf = loadmat(PurePath(PATH_PLOT, "STN_surf.mat"))
208
+ x_ver = stn_surf["vertices"][::2, 0]
209
+ y_ver = stn_surf["vertices"][::2, 1]
210
+ x_ecog = vertices["Vertices"][::1, 0]
211
+ y_ecog = vertices["Vertices"][::1, 1]
212
+ z_ecog = vertices["Vertices"][::1, 2]
213
+ x_stn = stn_surf["vertices"][::1, 0]
214
+ y_stn = stn_surf["vertices"][::1, 1]
215
+ z_stn = stn_surf["vertices"][::1, 2]
216
+
217
+ return (
218
+ faces,
219
+ vertices,
220
+ grid,
221
+ stn_surf,
222
+ x_ver,
223
+ y_ver,
224
+ x_ecog,
225
+ y_ecog,
226
+ z_ecog,
227
+ x_stn,
228
+ y_stn,
229
+ z_stn,
230
+ )
231
+
232
+
233
+ def write_csv(df, path_out):
234
+ """
235
+ Function to save Pandas dataframes to disk as CSV using
236
+ PyArrow (almost 10x faster than Pandas)
237
+ Difference with pandas.df.to_csv() is that it does not
238
+ write an index column by default
239
+ """
240
+ from pyarrow import csv, Table
241
+
242
+ csv.write_csv(Table.from_pandas(df), path_out)
243
+
244
+
245
+ def save_nm_channels(
246
+ nmchannels: pd.DataFrame,
247
+ out_dir: _PathLike,
248
+ prefix: str = "",
249
+ ) -> None:
250
+ filename = f"{prefix}_nm_channels.csv" if prefix else "nm_channels.csv"
251
+ path_out = PurePath(out_dir, filename)
252
+ write_csv(nmchannels, path_out)
253
+ logger.info(f"nm_channels.csv saved to {path_out}")
254
+
255
+
256
+ def save_features(
257
+ df_features: pd.DataFrame,
258
+ out_dir: _PathLike,
259
+ prefix: str = "",
260
+ ) -> None:
261
+ filename = f"{prefix}_FEATURES.csv" if prefix else "_FEATURES.csv"
262
+ out_dir = PurePath(out_dir, filename)
263
+ write_csv(df_features, out_dir)
264
+ logger.info(f"FEATURES.csv saved to {str(out_dir)}")
265
+
266
+
267
+ def save_sidecar(
268
+ sidecar: dict,
269
+ out_dir: _PathLike,
270
+ prefix: str = "",
271
+ ) -> None:
272
+ save_general_dict(sidecar, out_dir, prefix, "_SIDECAR.json")
273
+
274
+
275
+ def save_general_dict(
276
+ dict_: dict,
277
+ out_dir: _PathLike,
278
+ prefix: str = "",
279
+ str_add: str = "",
280
+ ) -> None:
281
+ # We should change this to a proper experiment name
282
+
283
+ path_out = PurePath(out_dir, f"{prefix}{str_add}")
284
+
285
+ with open(path_out, "w") as f:
286
+ json.dump(
287
+ dict_,
288
+ f,
289
+ default=default_json_convert,
290
+ indent=4,
291
+ separators=(",", ": "),
292
+ )
293
+ logger.info(f"{str_add} saved to {path_out}")
294
+
295
+
296
+ def default_json_convert(obj) -> list | float:
297
+ if isinstance(obj, np.ndarray):
298
+ return obj.tolist()
299
+ if isinstance(obj, pd.DataFrame):
300
+ return obj.to_numpy().tolist()
301
+ if isinstance(obj, np.integer):
302
+ return int(obj)
303
+ if isinstance(obj, np.floating):
304
+ return float(obj)
305
+ raise TypeError("Not serializable")
306
+
307
+
308
+ def read_sidecar(PATH: _PathLike) -> dict:
309
+ with open(PurePath(str(PATH) + "_SIDECAR.json")) as f:
310
+ return json.load(f)
311
+
312
+
313
+ def read_features(PATH: _PathLike) -> pd.DataFrame:
314
+ return pd.read_csv(str(PATH) + "_FEATURES.csv", engine="pyarrow")
315
+
316
+
317
+ def read_nm_channels(PATH: _PathLike) -> pd.DataFrame:
318
+ return pd.read_csv(str(PATH) + "_nm_channels.csv")
319
+
320
+
321
+ def get_run_list_indir(PATH: _PathLike) -> list:
322
+ from os import walk
323
+
324
+ f_files = []
325
+ # for dirpath, _, files in Path(PATH).walk(): # Only works in python >=3.12
326
+ for dirpath, _, files in walk(PATH):
327
+ for x in files:
328
+ if "FEATURES" in x:
329
+ f_files.append(PurePath(dirpath).name)
330
+ return f_files
331
+
332
+
333
+ def loadmat(filename) -> dict:
334
+ """
335
+ this function should be called instead of direct spio.loadmat
336
+ as it cures the problem of not properly recovering python dictionaries
337
+ from mat files. It calls the function check keys to cure all entries
338
+ which are still mat-objects
339
+ """
340
+ from scipy.io import loadmat as sio_loadmat
341
+
342
+ data = sio_loadmat(filename, struct_as_record=False, squeeze_me=True)
343
+ return _check_keys(data)
344
+
345
+
346
+ def get_paths_example_data():
347
+ """
348
+ This function should provide RUN_NAME, PATH_RUN, PATH_BIDS, PATH_OUT and datatype for the example
349
+ dataset used in most examples.
350
+ """
351
+
352
+ sub = "testsub"
353
+ ses = "EphysMedOff"
354
+ task = "gripforce"
355
+ run = 0
356
+ datatype = "ieeg"
357
+
358
+ # Define run name and access paths in the BIDS format.
359
+ RUN_NAME = f"sub-{sub}_ses-{ses}_task-{task}_run-{run}"
360
+
361
+ PATH_BIDS = PYNM_DIR / "data"
362
+
363
+ PATH_RUN = PYNM_DIR / "data" / f"sub-{sub}" / f"ses-{ses}" / datatype / RUN_NAME
364
+
365
+ # Provide a path for the output data.
366
+ PATH_OUT = PATH_BIDS / "derivatives"
367
+
368
+ return RUN_NAME, PATH_RUN, PATH_BIDS, PATH_OUT, datatype
369
+
370
+
371
+ def _check_keys(dict):
372
+ """
373
+ checks if entries in dictionary are mat-objects. If yes
374
+ todict is called to change them to nested dictionaries
375
+ """
376
+ from scipy.io.matlab import mat_struct
377
+
378
+ for key in dict:
379
+ if isinstance(dict[key], mat_struct):
380
+ dict[key] = _todict(dict[key])
381
+ return dict
382
+
383
+
384
+ def _todict(matobj) -> dict:
385
+ """
386
+ A recursive function which constructs from matobjects nested dictionaries
387
+ """
388
+ from scipy.io.matlab import mat_struct
389
+
390
+ dict = {}
391
+ for strg in matobj._fieldnames:
392
+ elem = matobj.__dict__[strg]
393
+ if isinstance(elem, mat_struct):
394
+ dict[strg] = _todict(elem)
395
+ else:
396
+ dict[strg] = elem
397
+ return dict
398
+
399
+
400
+ def generate_unique_filename(path: _PathLike):
401
+ path = Path(path)
402
+
403
+ dir = path.parent
404
+ filename = path.stem
405
+ extension = path.suffix
406
+
407
+ counter = 1
408
+ while True:
409
+ new_filename = f"{filename}_{counter}{extension}"
410
+ new_file_path = dir / new_filename
411
+ if not new_file_path.exists():
412
+ return Path(new_file_path)
413
+ counter += 1