py-neuromodulation 0.0.2__py3-none-any.whl → 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (204) hide show
  1. docs/build/_downloads/09df217f95985497f45d69e2d4bdc5b1/plot_2_example_add_feature.py +68 -0
  2. docs/build/_downloads/3b4900a2b2818ff30362215b76f7d5eb/plot_1_example_BIDS.py +233 -0
  3. docs/build/_downloads/7e92dd2e6cc86b239d14cafad972ae4f/plot_3_example_sharpwave_analysis.py +219 -0
  4. docs/build/_downloads/c2db0bf2b334d541b00662b991682256/plot_6_real_time_demo.py +97 -0
  5. docs/build/_downloads/ce3914826f782cbd1ea8fd024eaf0ac3/plot_5_example_rmap_computing.py +64 -0
  6. docs/build/_downloads/da36848a41e6a3235d91fb7cfb6d59b4/plot_0_first_demo.py +192 -0
  7. docs/build/_downloads/eaa4305c75b19a1e2eea941f742a6331/plot_4_example_gridPointProjection.py +210 -0
  8. docs/build/html/_downloads/09df217f95985497f45d69e2d4bdc5b1/plot_2_example_add_feature.py +68 -0
  9. docs/build/html/_downloads/3b4900a2b2818ff30362215b76f7d5eb/plot_1_example_BIDS.py +239 -0
  10. docs/build/html/_downloads/7e92dd2e6cc86b239d14cafad972ae4f/plot_3_example_sharpwave_analysis.py +219 -0
  11. docs/build/html/_downloads/c2db0bf2b334d541b00662b991682256/plot_6_real_time_demo.py +97 -0
  12. docs/build/html/_downloads/ce3914826f782cbd1ea8fd024eaf0ac3/plot_5_example_rmap_computing.py +64 -0
  13. docs/build/html/_downloads/da36848a41e6a3235d91fb7cfb6d59b4/plot_0_first_demo.py +192 -0
  14. docs/build/html/_downloads/eaa4305c75b19a1e2eea941f742a6331/plot_4_example_gridPointProjection.py +210 -0
  15. docs/source/_build/html/_downloads/09df217f95985497f45d69e2d4bdc5b1/plot_2_example_add_feature.py +76 -0
  16. docs/source/_build/html/_downloads/0d0d0a76e8f648d5d3cbc47da6351932/plot_real_time_demo.py +97 -0
  17. docs/source/_build/html/_downloads/3b4900a2b2818ff30362215b76f7d5eb/plot_1_example_BIDS.py +240 -0
  18. docs/source/_build/html/_downloads/5d73cadc59a8805c47e3b84063afc157/plot_example_BIDS.py +233 -0
  19. docs/source/_build/html/_downloads/7660317fa5a6bfbd12fcca9961457fc4/plot_example_rmap_computing.py +63 -0
  20. docs/source/_build/html/_downloads/7e92dd2e6cc86b239d14cafad972ae4f/plot_3_example_sharpwave_analysis.py +219 -0
  21. docs/source/_build/html/_downloads/839e5b319379f7fd9e867deb00fd797f/plot_example_gridPointProjection.py +210 -0
  22. docs/source/_build/html/_downloads/ae8be19afe5e559f011fc9b138968ba0/plot_first_demo.py +192 -0
  23. docs/source/_build/html/_downloads/b8b06cacc17969d3725a0b6f1d7741c5/plot_example_sharpwave_analysis.py +219 -0
  24. docs/source/_build/html/_downloads/c2db0bf2b334d541b00662b991682256/plot_6_real_time_demo.py +121 -0
  25. docs/source/_build/html/_downloads/c31a86c0b68cb4167d968091ace8080d/plot_example_add_feature.py +68 -0
  26. docs/source/_build/html/_downloads/ce3914826f782cbd1ea8fd024eaf0ac3/plot_5_example_rmap_computing.py +64 -0
  27. docs/source/_build/html/_downloads/da36848a41e6a3235d91fb7cfb6d59b4/plot_0_first_demo.py +189 -0
  28. docs/source/_build/html/_downloads/eaa4305c75b19a1e2eea941f742a6331/plot_4_example_gridPointProjection.py +210 -0
  29. docs/source/auto_examples/plot_0_first_demo.py +189 -0
  30. docs/source/auto_examples/plot_1_example_BIDS.py +240 -0
  31. docs/source/auto_examples/plot_2_example_add_feature.py +76 -0
  32. docs/source/auto_examples/plot_3_example_sharpwave_analysis.py +219 -0
  33. docs/source/auto_examples/plot_4_example_gridPointProjection.py +210 -0
  34. docs/source/auto_examples/plot_5_example_rmap_computing.py +64 -0
  35. docs/source/auto_examples/plot_6_real_time_demo.py +121 -0
  36. docs/source/conf.py +105 -0
  37. examples/plot_0_first_demo.py +189 -0
  38. examples/plot_1_example_BIDS.py +240 -0
  39. examples/plot_2_example_add_feature.py +76 -0
  40. examples/plot_3_example_sharpwave_analysis.py +219 -0
  41. examples/plot_4_example_gridPointProjection.py +210 -0
  42. examples/plot_5_example_rmap_computing.py +64 -0
  43. examples/plot_6_real_time_demo.py +121 -0
  44. packages/realtime_decoding/build/lib/realtime_decoding/__init__.py +4 -0
  45. packages/realtime_decoding/build/lib/realtime_decoding/decoder.py +104 -0
  46. packages/realtime_decoding/build/lib/realtime_decoding/features.py +163 -0
  47. packages/realtime_decoding/build/lib/realtime_decoding/helpers.py +15 -0
  48. packages/realtime_decoding/build/lib/realtime_decoding/run_decoding.py +345 -0
  49. packages/realtime_decoding/build/lib/realtime_decoding/trainer.py +54 -0
  50. packages/tmsi/build/lib/TMSiFileFormats/__init__.py +37 -0
  51. packages/tmsi/build/lib/TMSiFileFormats/file_formats/__init__.py +36 -0
  52. packages/tmsi/build/lib/TMSiFileFormats/file_formats/lsl_stream_writer.py +200 -0
  53. packages/tmsi/build/lib/TMSiFileFormats/file_formats/poly5_file_writer.py +496 -0
  54. packages/tmsi/build/lib/TMSiFileFormats/file_formats/poly5_to_edf_converter.py +236 -0
  55. packages/tmsi/build/lib/TMSiFileFormats/file_formats/xdf_file_writer.py +977 -0
  56. packages/tmsi/build/lib/TMSiFileFormats/file_readers/__init__.py +35 -0
  57. packages/tmsi/build/lib/TMSiFileFormats/file_readers/edf_reader.py +116 -0
  58. packages/tmsi/build/lib/TMSiFileFormats/file_readers/poly5reader.py +294 -0
  59. packages/tmsi/build/lib/TMSiFileFormats/file_readers/xdf_reader.py +229 -0
  60. packages/tmsi/build/lib/TMSiFileFormats/file_writer.py +102 -0
  61. packages/tmsi/build/lib/TMSiPlotters/__init__.py +2 -0
  62. packages/tmsi/build/lib/TMSiPlotters/gui/__init__.py +39 -0
  63. packages/tmsi/build/lib/TMSiPlotters/gui/_plotter_gui.py +234 -0
  64. packages/tmsi/build/lib/TMSiPlotters/gui/plotting_gui.py +440 -0
  65. packages/tmsi/build/lib/TMSiPlotters/plotters/__init__.py +44 -0
  66. packages/tmsi/build/lib/TMSiPlotters/plotters/hd_emg_plotter.py +446 -0
  67. packages/tmsi/build/lib/TMSiPlotters/plotters/impedance_plotter.py +589 -0
  68. packages/tmsi/build/lib/TMSiPlotters/plotters/signal_plotter.py +1326 -0
  69. packages/tmsi/build/lib/TMSiSDK/__init__.py +54 -0
  70. packages/tmsi/build/lib/TMSiSDK/device.py +588 -0
  71. packages/tmsi/build/lib/TMSiSDK/devices/__init__.py +34 -0
  72. packages/tmsi/build/lib/TMSiSDK/devices/saga/TMSi_Device_API.py +1764 -0
  73. packages/tmsi/build/lib/TMSiSDK/devices/saga/__init__.py +34 -0
  74. packages/tmsi/build/lib/TMSiSDK/devices/saga/saga_device.py +1366 -0
  75. packages/tmsi/build/lib/TMSiSDK/devices/saga/saga_types.py +520 -0
  76. packages/tmsi/build/lib/TMSiSDK/devices/saga/xml_saga_config.py +165 -0
  77. packages/tmsi/build/lib/TMSiSDK/error.py +95 -0
  78. packages/tmsi/build/lib/TMSiSDK/sample_data.py +63 -0
  79. packages/tmsi/build/lib/TMSiSDK/sample_data_server.py +99 -0
  80. packages/tmsi/build/lib/TMSiSDK/settings.py +45 -0
  81. packages/tmsi/build/lib/TMSiSDK/tmsi_device.py +111 -0
  82. packages/tmsi/build/lib/__init__.py +4 -0
  83. packages/tmsi/build/lib/apex_sdk/__init__.py +34 -0
  84. packages/tmsi/build/lib/apex_sdk/device/__init__.py +41 -0
  85. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_API.py +1009 -0
  86. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_API_enums.py +239 -0
  87. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_API_structures.py +668 -0
  88. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_device.py +1611 -0
  89. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_dongle.py +38 -0
  90. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_event_reader.py +57 -0
  91. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_structures/apex_channel.py +44 -0
  92. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_structures/apex_config.py +150 -0
  93. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_structures/apex_const.py +36 -0
  94. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_structures/apex_impedance_channel.py +48 -0
  95. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_structures/apex_info.py +108 -0
  96. packages/tmsi/build/lib/apex_sdk/device/devices/apex/apex_structures/dongle_info.py +39 -0
  97. packages/tmsi/build/lib/apex_sdk/device/devices/apex/measurements/download_measurement.py +77 -0
  98. packages/tmsi/build/lib/apex_sdk/device/devices/apex/measurements/eeg_measurement.py +150 -0
  99. packages/tmsi/build/lib/apex_sdk/device/devices/apex/measurements/impedance_measurement.py +129 -0
  100. packages/tmsi/build/lib/apex_sdk/device/threads/conversion_thread.py +59 -0
  101. packages/tmsi/build/lib/apex_sdk/device/threads/sampling_thread.py +57 -0
  102. packages/tmsi/build/lib/apex_sdk/device/tmsi_channel.py +83 -0
  103. packages/tmsi/build/lib/apex_sdk/device/tmsi_device.py +201 -0
  104. packages/tmsi/build/lib/apex_sdk/device/tmsi_device_enums.py +103 -0
  105. packages/tmsi/build/lib/apex_sdk/device/tmsi_dongle.py +43 -0
  106. packages/tmsi/build/lib/apex_sdk/device/tmsi_event_reader.py +50 -0
  107. packages/tmsi/build/lib/apex_sdk/device/tmsi_measurement.py +118 -0
  108. packages/tmsi/build/lib/apex_sdk/sample_data_server/__init__.py +33 -0
  109. packages/tmsi/build/lib/apex_sdk/sample_data_server/event_data.py +44 -0
  110. packages/tmsi/build/lib/apex_sdk/sample_data_server/sample_data.py +50 -0
  111. packages/tmsi/build/lib/apex_sdk/sample_data_server/sample_data_server.py +136 -0
  112. packages/tmsi/build/lib/apex_sdk/tmsi_errors/error.py +126 -0
  113. packages/tmsi/build/lib/apex_sdk/tmsi_sdk.py +113 -0
  114. packages/tmsi/build/lib/apex_sdk/tmsi_utilities/apex/apex_structure_generator.py +134 -0
  115. packages/tmsi/build/lib/apex_sdk/tmsi_utilities/decorators.py +60 -0
  116. packages/tmsi/build/lib/apex_sdk/tmsi_utilities/logger_filter.py +42 -0
  117. packages/tmsi/build/lib/apex_sdk/tmsi_utilities/singleton.py +42 -0
  118. packages/tmsi/build/lib/apex_sdk/tmsi_utilities/support_functions.py +72 -0
  119. packages/tmsi/build/lib/apex_sdk/tmsi_utilities/tmsi_logger.py +98 -0
  120. py_neuromodulation/{helper.py → _write_example_dataset_helper.py} +1 -1
  121. py_neuromodulation/nm_EpochStream.py +2 -3
  122. py_neuromodulation/nm_IO.py +43 -70
  123. py_neuromodulation/nm_RMAP.py +308 -11
  124. py_neuromodulation/nm_analysis.py +1 -1
  125. py_neuromodulation/nm_artifacts.py +25 -0
  126. py_neuromodulation/nm_bispectra.py +64 -29
  127. py_neuromodulation/nm_bursts.py +44 -30
  128. py_neuromodulation/nm_coherence.py +2 -1
  129. py_neuromodulation/nm_features.py +4 -2
  130. py_neuromodulation/nm_filter.py +63 -32
  131. py_neuromodulation/nm_filter_preprocessing.py +91 -0
  132. py_neuromodulation/nm_fooof.py +47 -29
  133. py_neuromodulation/nm_mne_connectivity.py +1 -1
  134. py_neuromodulation/nm_normalization.py +50 -74
  135. py_neuromodulation/nm_oscillatory.py +151 -31
  136. py_neuromodulation/nm_plots.py +13 -10
  137. py_neuromodulation/nm_rereference.py +10 -8
  138. py_neuromodulation/nm_run_analysis.py +28 -13
  139. py_neuromodulation/nm_sharpwaves.py +103 -136
  140. py_neuromodulation/nm_stats.py +44 -30
  141. py_neuromodulation/nm_stream_abc.py +18 -10
  142. py_neuromodulation/nm_stream_offline.py +181 -40
  143. py_neuromodulation/utils/_logging.py +24 -0
  144. {py_neuromodulation-0.0.2.dist-info → py_neuromodulation-0.0.3.dist-info}/METADATA +182 -142
  145. py_neuromodulation-0.0.3.dist-info/RECORD +188 -0
  146. {py_neuromodulation-0.0.2.dist-info → py_neuromodulation-0.0.3.dist-info}/WHEEL +2 -1
  147. py_neuromodulation-0.0.3.dist-info/top_level.txt +5 -0
  148. tests/__init__.py +0 -0
  149. tests/conftest.py +117 -0
  150. tests/test_all_examples.py +10 -0
  151. tests/test_all_features.py +63 -0
  152. tests/test_bispectra.py +70 -0
  153. tests/test_bursts.py +105 -0
  154. tests/test_feature_sampling_rates.py +143 -0
  155. tests/test_fooof.py +16 -0
  156. tests/test_initalization_offline_stream.py +41 -0
  157. tests/test_multiprocessing.py +58 -0
  158. tests/test_nan_values.py +29 -0
  159. tests/test_nm_filter.py +95 -0
  160. tests/test_nm_resample.py +63 -0
  161. tests/test_normalization_settings.py +146 -0
  162. tests/test_notch_filter.py +31 -0
  163. tests/test_osc_features.py +424 -0
  164. tests/test_preprocessing_filter.py +151 -0
  165. tests/test_rereference.py +171 -0
  166. tests/test_sampling.py +57 -0
  167. tests/test_settings_change_after_init.py +76 -0
  168. tests/test_sharpwave.py +165 -0
  169. tests/test_target_channel_add.py +100 -0
  170. tests/test_timing.py +80 -0
  171. py_neuromodulation/data/README +0 -6
  172. py_neuromodulation/data/dataset_description.json +0 -8
  173. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/MOV_aligned_features_ch_ECOG_RIGHT_0_all.png +0 -0
  174. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/all_feature_plt.pdf +0 -0
  175. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_FEATURES.csv +0 -182
  176. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_LM_ML_RES.p +0 -0
  177. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_SETTINGS.json +0 -273
  178. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_SIDECAR.json +0 -6
  179. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_decoding_performance.png +0 -0
  180. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_nm_channels.csv +0 -11
  181. py_neuromodulation/data/participants.json +0 -32
  182. py_neuromodulation/data/participants.tsv +0 -2
  183. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_space-mni_coordsystem.json +0 -5
  184. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_space-mni_electrodes.tsv +0 -11
  185. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_channels.tsv +0 -11
  186. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.eeg +0 -0
  187. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.json +0 -18
  188. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.vhdr +0 -35
  189. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.vmrk +0 -13
  190. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/sub-testsub_ses-EphysMedOff_scans.tsv +0 -2
  191. py_neuromodulation/grid_cortex.tsv +0 -40
  192. py_neuromodulation/grid_subcortex.tsv +0 -1429
  193. py_neuromodulation/nm_settings.json +0 -290
  194. py_neuromodulation/plots/STN_surf.mat +0 -0
  195. py_neuromodulation/plots/Vertices.mat +0 -0
  196. py_neuromodulation/plots/faces.mat +0 -0
  197. py_neuromodulation/plots/grid.mat +0 -0
  198. py_neuromodulation/py_neuromodulation.egg-info/PKG-INFO +0 -104
  199. py_neuromodulation/py_neuromodulation.egg-info/dependency_links.txt +0 -1
  200. py_neuromodulation/py_neuromodulation.egg-info/requires.txt +0 -26
  201. py_neuromodulation/py_neuromodulation.egg-info/top_level.txt +0 -1
  202. py_neuromodulation-0.0.2.dist-info/RECORD +0 -73
  203. /py_neuromodulation/{py_neuromodulation.egg-info/SOURCES.txt → utils/__init__.py} +0 -0
  204. {py_neuromodulation-0.0.2.dist-info → py_neuromodulation-0.0.3.dist-info}/LICENSE +0 -0
@@ -1,7 +1,9 @@
1
+ import logging
2
+ from typing import Iterable
3
+
1
4
  import numpy as np
2
5
  from fooof import FOOOF
3
6
  from scipy import fft
4
- from typing import Iterable
5
7
 
6
8
  from py_neuromodulation import nm_features_abc
7
9
 
@@ -18,15 +20,6 @@ class FooofAnalyzer(nm_features_abc.Feature):
18
20
  self.ap_mode = "knee" if self.settings_fooof["knee"] else "fixed"
19
21
  self.max_n_peaks = self.settings_fooof["max_n_peaks"]
20
22
 
21
- self.fm = FOOOF(
22
- aperiodic_mode=self.ap_mode,
23
- peak_width_limits=self.settings_fooof["peak_width_limits"],
24
- max_n_peaks=self.settings_fooof["max_n_peaks"],
25
- min_peak_height=self.settings_fooof["min_peak_height"],
26
- peak_threshold=self.settings_fooof["peak_threshold"],
27
- verbose=False,
28
- )
29
-
30
23
  self.num_samples = int(
31
24
  self.settings_fooof["windowlength_ms"] * sfreq / 1000
32
25
  )
@@ -75,44 +68,70 @@ class FooofAnalyzer(nm_features_abc.Feature):
75
68
  spectrum = self._get_spectrum(data[ch_idx, :])
76
69
 
77
70
  try:
78
- self.fm.fit(self.f_vec, spectrum, self.freq_range)
71
+ fm = FOOOF(
72
+ aperiodic_mode=self.ap_mode,
73
+ peak_width_limits=self.settings_fooof["peak_width_limits"],
74
+ max_n_peaks=self.settings_fooof["max_n_peaks"],
75
+ min_peak_height=self.settings_fooof["min_peak_height"],
76
+ peak_threshold=self.settings_fooof["peak_threshold"],
77
+ verbose=False,
78
+ )
79
+ fm.fit(self.f_vec, spectrum, self.freq_range)
79
80
  except Exception as e:
80
- print(e)
81
- print(f"failing spectrum: {spectrum}")
81
+ logging.critical(e, exc_info=True)
82
+
83
+ if fm.fooofed_spectrum_ is None:
84
+ FIT_PASSED = False
85
+ else:
86
+ FIT_PASSED = True
87
+
82
88
  if self.settings_fooof["aperiodic"]["exponent"]:
83
89
  features_compute[f"{ch_name}_fooof_a_exp"] = (
84
- np.nan_to_num(self.fm.get_params("aperiodic_params", "exponent"))
85
- if self.fm.fooofed_spectrum_ is not None
90
+ np.nan_to_num(fm.get_params("aperiodic_params", "exponent"))
91
+ if FIT_PASSED is True
86
92
  else None
87
93
  )
88
94
 
89
95
  if self.settings_fooof["aperiodic"]["offset"]:
90
96
  features_compute[f"{ch_name}_fooof_a_offset"] = (
91
- np.nan_to_num(self.fm.get_params("aperiodic_params", "offset"))
92
- if self.fm.fooofed_spectrum_ is not None
97
+ np.nan_to_num(fm.get_params("aperiodic_params", "offset"))
98
+ if FIT_PASSED is True
93
99
  else None
94
100
  )
95
-
101
+
96
102
  if self.settings_fooof["aperiodic"]["knee"]:
97
- features_compute[f"{ch_name}_fooof_a_knee"] = (
98
- np.nan_to_num(self.fm.get_params("aperiodic_params", "knee"))
99
- if self.fm.fooofed_spectrum_ is not None
100
- else None
103
+ if FIT_PASSED is False:
104
+ knee_freq = None
105
+ else:
106
+ if fm.get_params("aperiodic_params", "exponent") != 0:
107
+ knee_fooof = fm.get_params("aperiodic_params", "knee")
108
+ knee_freq = np.nan_to_num(
109
+ knee_fooof
110
+ ** (
111
+ 1
112
+ / fm.get_params("aperiodic_params", "exponent")
113
+ )
114
+ )
115
+ else:
116
+ knee_freq = None
117
+
118
+ features_compute[f"{ch_name}_fooof_a_knee_frequency"] = (
119
+ knee_freq
101
120
  )
102
121
 
103
122
  peaks_bw = (
104
- self.fm.get_params("peak_params", "BW")
105
- if self.fm.fooofed_spectrum_ is not None
123
+ fm.get_params("peak_params", "BW")
124
+ if FIT_PASSED is True
106
125
  else None
107
126
  )
108
127
  peaks_cf = (
109
- self.fm.get_params("peak_params", "CF")
110
- if self.fm.fooofed_spectrum_ is not None
128
+ fm.get_params("peak_params", "CF")
129
+ if FIT_PASSED is True
111
130
  else None
112
131
  )
113
132
  peaks_pw = (
114
- self.fm.get_params("peak_params", "PW")
115
- if self.fm.fooofed_spectrum_ is not None
133
+ fm.get_params("peak_params", "PW")
134
+ if FIT_PASSED is True
116
135
  else None
117
136
  )
118
137
 
@@ -122,7 +141,6 @@ class FooofAnalyzer(nm_features_abc.Feature):
122
141
  peaks_pw = [peaks_pw]
123
142
 
124
143
  for peak_idx in range(self.max_n_peaks):
125
-
126
144
  if self.settings_fooof["periodic"]["band_width"]:
127
145
  features_compute[f"{ch_name}_fooof_p_{peak_idx}_bw"] = (
128
146
  peaks_bw[peak_idx] if peak_idx < len(peaks_bw) else None
@@ -59,7 +59,7 @@ class MNEConnectivity(nm_features_abc.Feature):
59
59
  if epochs.events.shape[0] < 2:
60
60
  raise Exception(
61
61
  f"A minimum of 2 epochs is required for mne_connectivity,"
62
- f" got only {epochs.events.shape[0]}. Increase settings['segment_length']"
62
+ f" got only {epochs.events.shape[0]}. Increase settings['segment_length_features_ms']"
63
63
  )
64
64
  return epochs
65
65
 
@@ -3,8 +3,6 @@ from enum import Enum
3
3
 
4
4
  from sklearn import preprocessing
5
5
  import numpy as np
6
-
7
-
8
6
  class NORM_METHODS(Enum):
9
7
  MEAN = "mean"
10
8
  MEDIAN = "median"
@@ -138,6 +136,17 @@ class FeatureNormalizer:
138
136
 
139
137
  return data
140
138
 
139
+ """
140
+ Functions to check for NaN's before deciding which Numpy function to call
141
+ """
142
+ def nan_mean(data, axis):
143
+ return np.nanmean(data, axis=axis) if np.any(np.isnan(sum(data))) else np.mean(data, axis=axis)
144
+
145
+ def nan_std(data, axis):
146
+ return np.nanstd(data, axis=axis) if np.any(np.isnan(sum(data))) else np.std(data, axis=axis)
147
+
148
+ def nan_median(data, axis):
149
+ return np.nanmedian(data, axis=axis) if np.any(np.isnan(sum(data))) else np.median(data, axis=axis)
141
150
 
142
151
  def _normalize_and_clip(
143
152
  current: np.ndarray,
@@ -147,82 +156,49 @@ def _normalize_and_clip(
147
156
  description: str,
148
157
  ) -> tuple[np.ndarray, np.ndarray]:
149
158
  """Normalize data."""
150
- if method == NORM_METHODS.MEAN.value:
151
- mean = np.nanmean(previous, axis=0)
152
- current = (current - mean) / mean
153
- elif method == NORM_METHODS.MEDIAN.value:
154
- median = np.nanmedian(previous, axis=0)
155
- current = (current - median) / median
156
- elif method == NORM_METHODS.ZSCORE.value:
157
- mean = np.nanmean(previous, axis=0)
158
- current = (current - mean) / np.nanstd(previous, axis=0)
159
- elif method == NORM_METHODS.ZSCORE_MEDIAN.value:
160
- current = (current - np.nanmedian(previous, axis=0)) / np.nanstd(
161
- previous, axis=0
162
- )
163
- # For the following methods we check for the shape of current
164
- # when current is a 1D array, then it is the post-processing normalization,
165
- # and we need to expand, and take the [0, :] component
166
- # When current is a 2D array, then it is pre-processing normalization, and
167
- # there's no need for expanding.
168
- elif method == NORM_METHODS.QUANTILE.value:
169
- if len(current.shape) == 1:
170
- current = (
171
- preprocessing.QuantileTransformer(n_quantiles=300)
172
- .fit(np.nan_to_num(previous))
173
- .transform(np.expand_dims(current, axis=0))[0, :]
174
- )
175
- else:
176
- current = (
177
- preprocessing.QuantileTransformer(n_quantiles=300)
178
- .fit(np.nan_to_num(previous))
179
- .transform(current)
180
- )
181
- elif method == NORM_METHODS.ROBUST.value:
182
- if len(current.shape) == 1:
183
- current = (
184
- preprocessing.RobustScaler()
185
- .fit(np.nan_to_num(previous))
186
- .transform(np.expand_dims(current, axis=0))[0, :]
187
- )
188
- else:
159
+ match method:
160
+ case NORM_METHODS.MEAN.value:
161
+ mean = nan_mean(previous, axis=0)
162
+ current = (current - mean) / mean
163
+ case NORM_METHODS.MEDIAN.value:
164
+ median = nan_median(previous, axis=0)
165
+ current = (current - median) / median
166
+ case NORM_METHODS.ZSCORE.value:
167
+ current = (current - nan_mean(previous, axis=0)) / nan_std(previous, axis=0)
168
+ case NORM_METHODS.ZSCORE_MEDIAN.value:
169
+ current = (current - nan_median(previous, axis=0)) / nan_std(previous, axis=0)
170
+ # For the following methods we check for the shape of current
171
+ # when current is a 1D array, then it is the post-processing normalization,
172
+ # and we need to expand, and remove the extra dimension afterwards
173
+ # When current is a 2D array, then it is pre-processing normalization, and
174
+ # there's no need for expanding.
175
+ case (NORM_METHODS.QUANTILE.value |
176
+ NORM_METHODS.ROBUST.value |
177
+ NORM_METHODS.MINMAX.value |
178
+ NORM_METHODS.POWER.value):
179
+
180
+ norm_methods = {
181
+ NORM_METHODS.QUANTILE.value : lambda: preprocessing.QuantileTransformer(n_quantiles=300),
182
+ NORM_METHODS.ROBUST.value : preprocessing.RobustScaler,
183
+ NORM_METHODS.MINMAX.value : preprocessing.MinMaxScaler,
184
+ NORM_METHODS.POWER.value : preprocessing.PowerTransformer
185
+ }
186
+
189
187
  current = (
190
- preprocessing.RobustScaler()
188
+ norm_methods[method]()
191
189
  .fit(np.nan_to_num(previous))
192
- .transform(current)
190
+ .transform(
191
+ # if post-processing: pad dimensions to 2
192
+ np.reshape(current, (2-len(current.shape))*(1,) + current.shape)
193
+ )
194
+ .squeeze() # if post-processing: remove extra dimension
193
195
  )
194
-
195
- elif method == NORM_METHODS.MINMAX.value:
196
- if len(current.shape) == 1:
197
- current = (
198
- preprocessing.MinMaxScaler()
199
- .fit(np.nan_to_num(previous))
200
- .transform(np.expand_dims(current, axis=0))[0, :]
201
- )
202
- else:
203
- current = (
204
- preprocessing.MinMaxScaler()
205
- .fit(np.nan_to_num(previous))
206
- .transform(current)
207
- )
208
- elif method == NORM_METHODS.POWER.value:
209
- if len(current.shape) == 1:
210
- current = (
211
- preprocessing.PowerTransformer()
212
- .fit(np.nan_to_num(previous))
213
- .transform(np.expand_dims(current, axis=0))[0, :]
196
+
197
+ case _:
198
+ raise ValueError(
199
+ f"Only {[e.value for e in NORM_METHODS]} are supported as "
200
+ f"{description} normalization methods. Got {method}."
214
201
  )
215
- else:
216
- current = (
217
- preprocessing.PowerTransformer()
218
- .fit(np.nan_to_num(previous))
219
- .transform(current)
220
- )
221
- else:
222
- raise ValueError(
223
- f"Only {[e.value for e in NORM_METHODS]} are supported as "
224
- f"{description} normalization methods. Got {method}."
225
- )
226
202
 
227
203
  if clip:
228
204
  current = _clip(data=current, clip=clip)
@@ -38,6 +38,15 @@ class OscillatoryFeature(nm_features_abc.Feature):
38
38
  assert isinstance(
39
39
  s[osc_feature_name]["windowlength_ms"], int
40
40
  ), f"windowlength_ms needs to be type int, got {s[osc_feature_name]['windowlength_ms']}"
41
+
42
+ assert (
43
+ s[osc_feature_name]["windowlength_ms"]
44
+ <= s["segment_length_features_ms"]
45
+ ), (
46
+ f"oscillatory feature windowlength_ms = ({s[osc_feature_name]['windowlength_ms']})"
47
+ f"needs to be smaller than"
48
+ f"s['segment_length_features_ms'] = {s['segment_length_features_ms']}",
49
+ )
41
50
  else:
42
51
  for seg_length in s[osc_feature_name][
43
52
  "segment_lengths_ms"
@@ -48,12 +57,6 @@ class OscillatoryFeature(nm_features_abc.Feature):
48
57
  assert isinstance(
49
58
  s[osc_feature_name]["log_transform"], bool
50
59
  ), f"log_transform needs to be type bool, got {s[osc_feature_name]['log_transform']}"
51
- assert isinstance(
52
- s[osc_feature_name]["kalman_filter"], bool
53
- ), f"kalman_filter needs to be type bool, got {s[osc_feature_name]['kalman_filter']}"
54
-
55
- if s[osc_feature_name]["kalman_filter"] is True:
56
- nm_kalmanfilter.test_kf_settings(s, ch_names, sfreq)
57
60
 
58
61
  assert isinstance(s["frequency_ranges_hz"], dict)
59
62
 
@@ -95,6 +98,36 @@ class OscillatoryFeature(nm_features_abc.Feature):
95
98
  feature_calc = self.KF_dict[KF_name].x[0]
96
99
  return feature_calc
97
100
 
101
+ def estimate_osc_features(
102
+ self,
103
+ features_compute: dict,
104
+ data: np.ndarray,
105
+ feature_name: np.ndarray,
106
+ est_name: str,
107
+ ):
108
+ for feature_est_name in list(self.s[est_name]["features"].keys()):
109
+ if self.s[est_name]["features"][feature_est_name] is True:
110
+ # switch case for feature_est_name
111
+ match feature_est_name:
112
+ case "mean":
113
+ features_compute[
114
+ f"{feature_name}_{feature_est_name}"
115
+ ] = np.nanmean(data)
116
+ case "median":
117
+ features_compute[
118
+ f"{feature_name}_{feature_est_name}"
119
+ ] = np.nanmedian(data)
120
+ case "std":
121
+ features_compute[
122
+ f"{feature_name}_{feature_est_name}"
123
+ ] = np.nanstd(data)
124
+ case "max":
125
+ features_compute[
126
+ f"{feature_name}_{feature_est_name}"
127
+ ] = np.nanmax(data)
128
+
129
+ return features_compute
130
+
98
131
 
99
132
  class FFT(OscillatoryFeature):
100
133
  def __init__(
@@ -104,8 +137,6 @@ class FFT(OscillatoryFeature):
104
137
  sfreq: float,
105
138
  ) -> None:
106
139
  super().__init__(settings, ch_names, sfreq)
107
- if self.s["fft_settings"]["kalman_filter"]:
108
- self.init_KF("fft")
109
140
 
110
141
  if self.s["fft_settings"]["log_transform"]:
111
142
  self.log_transform = True
@@ -114,13 +145,15 @@ class FFT(OscillatoryFeature):
114
145
 
115
146
  window_ms = self.s["fft_settings"]["windowlength_ms"]
116
147
  self.window_samples = int(-np.floor(window_ms / 1000 * sfreq))
117
- freqs = fft.rfftfreq(-self.window_samples, 1 / np.floor(self.sfreq))
148
+ self.freqs = fft.rfftfreq(
149
+ -self.window_samples, 1 / np.floor(self.sfreq)
150
+ )
118
151
 
119
152
  self.feature_params = []
120
153
  for ch_idx, ch_name in enumerate(self.ch_names):
121
154
  for fband, f_range in self.f_ranges_dict.items():
122
155
  idx_range = np.where(
123
- (freqs >= f_range[0]) & (freqs < f_range[1])
156
+ (self.freqs >= f_range[0]) & (self.freqs < f_range[1])
124
157
  )[0]
125
158
  feature_name = "_".join([ch_name, "fft", fband])
126
159
  self.feature_params.append((ch_idx, feature_name, idx_range))
@@ -132,17 +165,87 @@ class FFT(OscillatoryFeature):
132
165
  def calc_feature(self, data: np.ndarray, features_compute: dict) -> dict:
133
166
  data = data[:, self.window_samples :]
134
167
  Z = np.abs(fft.rfft(data))
168
+
169
+ if self.log_transform:
170
+ Z = np.log10(Z)
171
+
135
172
  for ch_idx, feature_name, idx_range in self.feature_params:
136
173
  Z_ch = Z[ch_idx, idx_range]
137
- feature_calc = np.mean(Z_ch)
138
174
 
139
- if self.log_transform:
140
- feature_calc = np.log(feature_calc)
175
+ features_compute = self.estimate_osc_features(
176
+ features_compute, Z_ch, feature_name, "fft_settings"
177
+ )
141
178
 
142
- if self.KF_dict:
143
- feature_calc = self.update_KF(feature_calc, feature_name)
179
+ for ch_idx, ch_name in enumerate(self.ch_names):
180
+ if self.s["fft_settings"]["return_spectrum"]:
181
+ features_compute.update(
182
+ {
183
+ f"{ch_name}_fft_psd_{str(f)}": Z[ch_idx][idx]
184
+ for idx, f in enumerate(self.freqs.astype(int))
185
+ }
186
+ )
187
+
188
+ return features_compute
189
+
190
+
191
+ class Welch(OscillatoryFeature):
192
+ def __init__(
193
+ self,
194
+ settings: dict,
195
+ ch_names: Iterable[str],
196
+ sfreq: float,
197
+ ) -> None:
198
+ super().__init__(settings, ch_names, sfreq)
199
+
200
+ self.log_transform = self.s["welch_settings"]["log_transform"]
201
+
202
+ self.feature_params = []
203
+ for ch_idx, ch_name in enumerate(self.ch_names):
204
+ for fband, f_range in self.f_ranges_dict.items():
205
+ feature_name = "_".join([ch_name, "welch", fband])
206
+ self.feature_params.append((ch_idx, feature_name, f_range))
207
+
208
+ @staticmethod
209
+ def test_settings(s: dict, ch_names: Iterable[str], sfreq: int | float):
210
+ OscillatoryFeature.test_settings_osc(
211
+ s, ch_names, sfreq, "welch_settings"
212
+ )
213
+
214
+ def calc_feature(self, data: np.ndarray, features_compute: dict) -> dict:
215
+ freqs, Z = signal.welch(
216
+ data,
217
+ fs=self.sfreq,
218
+ window="hann",
219
+ nperseg=self.sfreq,
220
+ noverlap=None,
221
+ )
222
+
223
+ if self.log_transform:
224
+ Z = np.log10(Z)
225
+
226
+ for ch_idx, feature_name, f_range in self.feature_params:
227
+ Z_ch = Z[ch_idx]
228
+
229
+ idx_range = np.where((freqs >= f_range[0]) & (freqs <= f_range[1]))[
230
+ 0
231
+ ]
232
+
233
+ features_compute = self.estimate_osc_features(
234
+ features_compute,
235
+ Z_ch[idx_range],
236
+ feature_name,
237
+ "welch_settings",
238
+ )
239
+
240
+ for ch_idx, ch_name in enumerate(self.ch_names):
241
+ if self.s["welch_settings"]["return_spectrum"]:
242
+ features_compute.update(
243
+ {
244
+ f"{ch_name}_welch_psd_{str(f)}": Z[ch_idx][idx]
245
+ for idx, f in enumerate(freqs.astype(int))
246
+ }
247
+ )
144
248
 
145
- features_compute[feature_name] = feature_calc
146
249
  return features_compute
147
250
 
148
251
 
@@ -154,10 +257,9 @@ class STFT(OscillatoryFeature):
154
257
  sfreq: float,
155
258
  ) -> None:
156
259
  super().__init__(settings, ch_names, sfreq)
157
- if self.s["stft_settings"]["kalman_filter"]:
158
- self.init_KF("stft")
159
260
 
160
261
  self.nperseg = int(self.s["stft_settings"]["windowlength_ms"])
262
+ self.log_transform = self.s["stft_settings"]["log_transform"]
161
263
 
162
264
  self.feature_params = []
163
265
  for ch_idx, ch_name in enumerate(self.ch_names):
@@ -172,7 +274,7 @@ class STFT(OscillatoryFeature):
172
274
  )
173
275
 
174
276
  def calc_feature(self, data: np.ndarray, features_compute: dict) -> dict:
175
- f, _, Zxx = signal.stft(
277
+ freqs, _, Zxx = signal.stft(
176
278
  data,
177
279
  fs=self.sfreq,
178
280
  window="hamming",
@@ -180,15 +282,30 @@ class STFT(OscillatoryFeature):
180
282
  boundary="even",
181
283
  )
182
284
  Z = np.abs(Zxx)
285
+ if self.log_transform:
286
+ Z = np.log10(Z)
183
287
  for ch_idx, feature_name, f_range in self.feature_params:
184
288
  Z_ch = Z[ch_idx]
185
- idx_range = np.where((f >= f_range[0]) & (f <= f_range[1]))[0]
186
- feature_calc = np.mean(Z_ch[idx_range, :]) # 1. dim: f, 2. dim: t
187
-
188
- if self.KF_dict:
189
- feature_calc = self.update_KF(feature_calc, feature_name)
289
+ idx_range = np.where((freqs >= f_range[0]) & (freqs <= f_range[1]))[
290
+ 0
291
+ ]
292
+
293
+ features_compute = self.estimate_osc_features(
294
+ features_compute,
295
+ Z_ch[idx_range, :],
296
+ feature_name,
297
+ "stft_settings",
298
+ )
190
299
 
191
- features_compute[feature_name] = feature_calc
300
+ for ch_idx, ch_name in enumerate(self.ch_names):
301
+ if self.s["stft_settings"]["return_spectrum"]:
302
+ Z_ch_mean = Z[ch_idx].mean(axis=1)
303
+ features_compute.update(
304
+ {
305
+ f"{ch_name}_stft_psd_{str(f)}": Z_ch_mean[idx]
306
+ for idx, f in enumerate(freqs.astype(int))
307
+ }
308
+ )
192
309
 
193
310
  return features_compute
194
311
 
@@ -204,7 +321,7 @@ class BandPower(OscillatoryFeature):
204
321
  super().__init__(settings, ch_names, sfreq)
205
322
  bp_settings = self.s["bandpass_filter_settings"]
206
323
 
207
- self.bandpass_filter = nm_filter.BandPassFilter(
324
+ self.bandpass_filter = nm_filter.MNEFilter(
208
325
  f_ranges=list(self.f_ranges_dict.values()),
209
326
  sfreq=self.sfreq,
210
327
  filter_length=self.sfreq - 1,
@@ -265,7 +382,9 @@ class BandPower(OscillatoryFeature):
265
382
  ].values()
266
383
  ), "Set at least one bandpower_feature to True."
267
384
 
268
- for fband_name, seg_length_fband in s["bandpass_filter_settings"]["segment_lengths_ms"].items():
385
+ for fband_name, seg_length_fband in s["bandpass_filter_settings"][
386
+ "segment_lengths_ms"
387
+ ].items():
269
388
  assert isinstance(seg_length_fband, int), (
270
389
  f"bandpass segment_lengths_ms for {fband_name} "
271
390
  f"needs to be of type int, got {seg_length_fband}"
@@ -275,15 +394,16 @@ class BandPower(OscillatoryFeature):
275
394
  f"segment length {seg_length_fband} needs to be smaller than "
276
395
  f" s['segment_length_features_ms'] = {s['segment_length_features_ms']}"
277
396
  )
278
-
397
+
279
398
  for fband_name in list(s["frequency_ranges_hz"].keys()):
280
- assert fband_name in list(s["bandpass_filter_settings"]["segment_lengths_ms"].keys()), (
399
+ assert fband_name in list(
400
+ s["bandpass_filter_settings"]["segment_lengths_ms"].keys()
401
+ ), (
281
402
  f"frequency range {fband_name} "
282
403
  "needs to be defined in s['bandpass_filter_settings']['segment_lengths_ms']"
283
404
  )
284
405
 
285
406
  def calc_feature(self, data: np.ndarray, features_compute: dict) -> dict:
286
-
287
407
  data = self.bandpass_filter.filter_data(data)
288
408
 
289
409
  for (
@@ -297,7 +417,7 @@ class BandPower(OscillatoryFeature):
297
417
  ) in self.feature_params:
298
418
  if bp_feature == "activity":
299
419
  if self.log_transform:
300
- feature_calc = np.log(
420
+ feature_calc = np.log10(
301
421
  np.var(data[ch_idx, f_band_idx, -seglen:])
302
422
  )
303
423
  else:
@@ -6,6 +6,9 @@ from matplotlib import gridspec
6
6
  from typing import Optional
7
7
  import seaborn as sb
8
8
  import pandas as pd
9
+ import logging
10
+
11
+ logger = logging.getLogger("PynmLogger")
9
12
 
10
13
  from py_neuromodulation import nm_IO, nm_stats
11
14
 
@@ -87,7 +90,9 @@ def plot_epoch(
87
90
  ):
88
91
  if z_score is None:
89
92
  X_epoch = stats.zscore(
90
- np.nan_to_num(np.nanmean(np.squeeze(X_epoch), axis=0)), axis=0
93
+ np.nan_to_num(np.nanmean(np.squeeze(X_epoch), axis=0)),
94
+ axis=0,
95
+ nan_policy="omit",
91
96
  ).T
92
97
  y_epoch = np.stack(np.array(y_epoch))
93
98
  plt.figure(figsize=(6, 6))
@@ -237,7 +242,7 @@ def plot_corr_matrix(
237
242
 
238
243
  if save_plot:
239
244
  plt.savefig(plt_path, bbox_inches="tight")
240
- print("Correlation matrix figure saved to " + str(plt_path))
245
+ logger.info(f"Correlation matrix figure saved to {plt_path}")
241
246
 
242
247
  if show_plot is False:
243
248
  plt.close()
@@ -329,7 +334,7 @@ def plot_epochs_avg(
329
334
 
330
335
  if normalize_data:
331
336
  X_epoch_mean = stats.zscore(
332
- np.nanmean(np.squeeze(X_epoch), axis=0), axis=0
337
+ np.nanmean(np.squeeze(X_epoch), axis=0), axis=0, nan_policy="omit"
333
338
  ).T
334
339
  else:
335
340
  X_epoch_mean = np.nanmean(np.squeeze(X_epoch), axis=0).T
@@ -385,7 +390,7 @@ def plot_epochs_avg(
385
390
  feature_name=feature_str_add,
386
391
  )
387
392
  plt.savefig(plt_path, bbox_inches="tight")
388
- print("Feature epoch average figure saved to: " + str(plt_path))
393
+ logger.info(f"Feature epoch average figure saved to: {str(plt_path)}")
389
394
  if show_plot is False:
390
395
  plt.close()
391
396
 
@@ -441,7 +446,6 @@ def plot_all_features(
441
446
  OUT_PATH: str = None,
442
447
  feature_file: str = None,
443
448
  ):
444
-
445
449
  if time_limit_high_s is not None:
446
450
  df = df[df["time"] < time_limit_high_s * 1000]
447
451
  if time_limit_low_s is not None:
@@ -449,7 +453,7 @@ def plot_all_features(
449
453
 
450
454
  cols_plt = [c for c in df.columns if c != "time"]
451
455
  if normalize is True:
452
- data_plt = stats.zscore(df[cols_plt])
456
+ data_plt = stats.zscore(df[cols_plt], nan_policy="omit")
453
457
  else:
454
458
  data_plt = df[cols_plt]
455
459
 
@@ -487,7 +491,6 @@ class NM_Plot:
487
491
  sess_right: Optional[bool] = False,
488
492
  proj_matrix_cortex: np.ndarray | None = None,
489
493
  ) -> None:
490
-
491
494
  self.grid_cortex = grid_cortex
492
495
  self.grid_subcortex = grid_subcortex
493
496
  self.ecog_strip = ecog_strip
@@ -510,7 +513,6 @@ class NM_Plot:
510
513
  ) = nm_IO.read_plot_modules()
511
514
 
512
515
  def plot_grid_elec_3d(self) -> None:
513
-
514
516
  plot_grid_elec_3d(np.array(self.grid_cortex), np.array(self.ecog_strip))
515
517
 
516
518
  def plot_cortex(
@@ -552,7 +554,6 @@ class NM_Plot:
552
554
  axes.axes.set_aspect("equal", anchor="C")
553
555
 
554
556
  if grid_cortex is not None:
555
-
556
557
  grid_color = (
557
558
  np.ones(grid_cortex.shape[0])
558
559
  if grid_color is None
@@ -604,6 +605,8 @@ class NM_Plot:
604
605
  feature_name=feature_str_add,
605
606
  )
606
607
  plt.savefig(plt_path, bbox_inches="tight")
607
- print("Feature epoch average figure saved to: " + str(plt_path))
608
+ logger.info(
609
+ f"Feature epoch average figure saved to: {str(plt_path)}"
610
+ )
608
611
  if show_plot is False:
609
612
  plt.close()