py-neuromodulation 0.0.2__py3-none-any.whl → 0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. py_neuromodulation/ConnectivityDecoding/Automated Anatomical Labeling 3 (Rolls 2020).nii +0 -0
  2. py_neuromodulation/ConnectivityDecoding/_get_grid_hull.m +34 -0
  3. py_neuromodulation/ConnectivityDecoding/_get_grid_whole_brain.py +106 -0
  4. py_neuromodulation/ConnectivityDecoding/_helper_write_connectome.py +119 -0
  5. py_neuromodulation/ConnectivityDecoding/mni_coords_cortical_surface.mat +0 -0
  6. py_neuromodulation/ConnectivityDecoding/mni_coords_whole_brain.mat +0 -0
  7. py_neuromodulation/ConnectivityDecoding/rmap_func_all.nii +0 -0
  8. py_neuromodulation/ConnectivityDecoding/rmap_struc.nii +0 -0
  9. py_neuromodulation/{helper.py → _write_example_dataset_helper.py} +1 -1
  10. py_neuromodulation/nm_EpochStream.py +2 -3
  11. py_neuromodulation/nm_IO.py +43 -70
  12. py_neuromodulation/nm_RMAP.py +308 -11
  13. py_neuromodulation/nm_analysis.py +1 -1
  14. py_neuromodulation/nm_artifacts.py +25 -0
  15. py_neuromodulation/nm_bispectra.py +64 -29
  16. py_neuromodulation/nm_bursts.py +44 -30
  17. py_neuromodulation/nm_coherence.py +2 -1
  18. py_neuromodulation/nm_features.py +4 -2
  19. py_neuromodulation/nm_filter.py +63 -32
  20. py_neuromodulation/nm_filter_preprocessing.py +91 -0
  21. py_neuromodulation/nm_fooof.py +47 -29
  22. py_neuromodulation/nm_mne_connectivity.py +1 -1
  23. py_neuromodulation/nm_normalization.py +50 -74
  24. py_neuromodulation/nm_oscillatory.py +151 -31
  25. py_neuromodulation/nm_plots.py +13 -10
  26. py_neuromodulation/nm_rereference.py +10 -8
  27. py_neuromodulation/nm_run_analysis.py +28 -13
  28. py_neuromodulation/nm_settings.json +51 -3
  29. py_neuromodulation/nm_sharpwaves.py +103 -136
  30. py_neuromodulation/nm_stats.py +44 -30
  31. py_neuromodulation/nm_stream_abc.py +18 -10
  32. py_neuromodulation/nm_stream_offline.py +188 -46
  33. py_neuromodulation/utils/_logging.py +24 -0
  34. {py_neuromodulation-0.0.2.dist-info → py_neuromodulation-0.0.4.dist-info}/METADATA +72 -32
  35. py_neuromodulation-0.0.4.dist-info/RECORD +72 -0
  36. {py_neuromodulation-0.0.2.dist-info → py_neuromodulation-0.0.4.dist-info}/WHEEL +1 -1
  37. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/MOV_aligned_features_ch_ECOG_RIGHT_0_all.png +0 -0
  38. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/all_feature_plt.pdf +0 -0
  39. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_FEATURES.csv +0 -182
  40. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_LM_ML_RES.p +0 -0
  41. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_SETTINGS.json +0 -273
  42. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_SIDECAR.json +0 -6
  43. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_decoding_performance.png +0 -0
  44. py_neuromodulation/data/derivatives/sub-testsub_ses-EphysMedOff_task-gripforce_run-0/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_nm_channels.csv +0 -11
  45. py_neuromodulation/py_neuromodulation.egg-info/PKG-INFO +0 -104
  46. py_neuromodulation/py_neuromodulation.egg-info/dependency_links.txt +0 -1
  47. py_neuromodulation/py_neuromodulation.egg-info/requires.txt +0 -26
  48. py_neuromodulation/py_neuromodulation.egg-info/top_level.txt +0 -1
  49. py_neuromodulation-0.0.2.dist-info/RECORD +0 -73
  50. /py_neuromodulation/{py_neuromodulation.egg-info/SOURCES.txt → utils/__init__.py} +0 -0
  51. {py_neuromodulation-0.0.2.dist-info → py_neuromodulation-0.0.4.dist-info/licenses}/LICENSE +0 -0
@@ -1,9 +1,15 @@
1
1
  """Module that contains PNStream ABC."""
2
+
2
3
  from abc import ABC, abstractmethod
3
4
  import os
4
5
  import pathlib
5
6
  import _pickle as cPickle
6
7
 
8
+ from .utils import _logging # logger initialization
9
+
10
+ # Logger use in different modules: logger = logging.getLogger("PynmLogger")
11
+
12
+
7
13
  import pandas as pd
8
14
  from sklearn import base
9
15
 
@@ -12,7 +18,7 @@ from py_neuromodulation import (
12
18
  nm_IO,
13
19
  nm_plots,
14
20
  nm_run_analysis,
15
- nm_settings
21
+ nm_settings,
16
22
  )
17
23
 
18
24
  _PathLike = str | os.PathLike
@@ -72,7 +78,9 @@ class PNStream(ABC):
72
78
  self.settings = self._load_settings(settings)
73
79
 
74
80
  if sampling_rate_features_hz is not None:
75
- self.settings["sampling_rate_features_hz"] = sampling_rate_features_hz
81
+ self.settings["sampling_rate_features_hz"] = (
82
+ sampling_rate_features_hz
83
+ )
76
84
 
77
85
  self.nm_channels = self._load_nm_channels(nm_channels)
78
86
  if path_grids is None:
@@ -138,10 +146,12 @@ class PNStream(ABC):
138
146
  nm_channels: pd.DataFrame | _PathLike,
139
147
  ) -> pd.DataFrame:
140
148
  if not isinstance(nm_channels, pd.DataFrame):
141
- nm_channels = nm_IO.load_nm_channels(nm_channels)
142
-
143
- if nm_channels.query("used == 1 and target == 0").shape[0] == 0:
144
- raise ValueError("No channels selected for analysis that have column 'used' = 1 and 'target' = 0. Please check your nm_channels")
149
+ nm_channels = nm_IO.load_nm_channels(nm_channels)
150
+
151
+ if nm_channels.query("used == 1 and target == 0").shape[0] == 0:
152
+ raise ValueError(
153
+ "No channels selected for analysis that have column 'used' = 1 and 'target' = 0. Please check your nm_channels"
154
+ )
145
155
 
146
156
  return nm_channels
147
157
 
@@ -149,7 +159,7 @@ class PNStream(ABC):
149
159
  def _load_settings(settings: dict | _PathLike | None) -> dict:
150
160
  if isinstance(settings, dict):
151
161
  return settings
152
- if settings is None:
162
+ if settings is None:
153
163
  return nm_settings.get_default_settings()
154
164
  return nm_IO.read_settings(str(settings))
155
165
 
@@ -196,9 +206,7 @@ class PNStream(ABC):
196
206
  ) -> None:
197
207
  self.run_analysis.save_nm_channels(out_path_root, folder_name)
198
208
 
199
- def save_settings(
200
- self, out_path_root: _PathLike, folder_name: str
201
- ) -> None:
209
+ def save_settings(self, out_path_root: _PathLike, folder_name: str) -> None:
202
210
  self.run_analysis.save_settings(out_path_root, folder_name)
203
211
 
204
212
  def save_sidecar(self, out_path_root: _PathLike, folder_name: str) -> None:
@@ -1,17 +1,28 @@
1
1
  """Module for offline data streams."""
2
- import math
3
- import os
4
2
 
3
+ import os
4
+ from joblib import Parallel, delayed
5
5
  import numpy as np
6
6
  import pandas as pd
7
+ from itertools import count
8
+ import logging
9
+
10
+ logger = logging.getLogger("PynmLogger")
11
+
12
+ import mne
7
13
 
8
- from py_neuromodulation import nm_generator, nm_IO, nm_stream_abc, nm_define_nmchannels
14
+ from py_neuromodulation import (
15
+ nm_generator,
16
+ nm_IO,
17
+ nm_stream_abc,
18
+ nm_define_nmchannels,
19
+ )
9
20
 
10
21
  _PathLike = str | os.PathLike
11
22
 
12
23
 
13
24
  class _OfflineStream(nm_stream_abc.PNStream):
14
- """Offline stream base class.
25
+ """Offline stream base class.
15
26
  This class can be inhereted for different types of offline streams, e.g. epoch-based or continuous.
16
27
 
17
28
  Parameters
@@ -19,19 +30,38 @@ class _OfflineStream(nm_stream_abc.PNStream):
19
30
  nm_stream_abc : nm_stream_abc.PNStream
20
31
  """
21
32
 
22
- def _add_labels(
23
- self, features: pd.DataFrame, data: np.ndarray
24
- ) -> pd.DataFrame:
25
- """Add resampled labels to features if there are target channels."""
33
+ def _add_target(
34
+ self, feature_series: pd.Series, data: np.ndarray
35
+ ) -> pd.Series:
36
+ """Add target channels to feature series.
37
+
38
+ Parameters
39
+ ----------
40
+ feature_series : pd.Series
41
+ data : np.ndarray
42
+ Raw data with shape (n_channels, n_samples). Channels not for feature computation are also included
43
+
44
+ Returns
45
+ -------
46
+ pd.Series
47
+ feature series with target channels added
48
+ """
49
+
26
50
  if self.nm_channels["target"].sum() > 0:
27
- features = nm_IO.add_labels(
28
- features=features,
29
- settings=self.settings,
30
- nm_channels=self.nm_channels,
31
- raw_arr_data=data,
32
- fs=self.sfreq,
33
- )
34
- return features
51
+ if not self.target_idx_initialized:
52
+ self.target_indexes = self.nm_channels[
53
+ self.nm_channels["target"] == 1
54
+ ].index
55
+ self.target_names = self.nm_channels.loc[
56
+ self.target_indexes, "name"
57
+ ].to_list()
58
+ self.target_idx_initialized = True
59
+
60
+ for target_idx, target_name in zip(
61
+ self.target_indexes, self.target_names
62
+ ):
63
+ feature_series[target_name] = data[target_idx, -1]
64
+ return feature_series
35
65
 
36
66
  def _add_timestamp(
37
67
  self, feature_series: pd.Series, cnt_samples: int
@@ -41,11 +71,10 @@ class _OfflineStream(nm_stream_abc.PNStream):
41
71
  Due to normalization run_analysis needs to keep track of the counted
42
72
  samples. These are accessed here for time conversion.
43
73
  """
44
- timestamp = cnt_samples * 1000 / self.sfreq
45
74
  feature_series["time"] = cnt_samples * 1000 / self.sfreq
46
75
 
47
76
  if self.verbose:
48
- print(
77
+ logging.info(
49
78
  str(np.round(feature_series["time"] / 1000, 2))
50
79
  + " seconds of data processed"
51
80
  )
@@ -59,9 +88,9 @@ class _OfflineStream(nm_stream_abc.PNStream):
59
88
  if not len(names_expected) == data.shape[0]:
60
89
  raise ValueError(
61
90
  "If data is passed as an array, the first dimension must"
62
- " match the number of channel names in `nm_channels`. Got:"
63
- f" Data columns: {data.shape[0]}, nm_channels.name:"
64
- f" {len(names_expected)}."
91
+ " match the number of channel names in `nm_channels`.\n"
92
+ f" Number of data channels (data.shape[0]): {data.shape[0]}\n"
93
+ f" Length of nm_channels[\"name\"]: {len(names_expected)}."
65
94
  )
66
95
  return data
67
96
  names_data = data.columns.to_list()
@@ -71,51 +100,155 @@ class _OfflineStream(nm_stream_abc.PNStream):
71
100
  ):
72
101
  raise ValueError(
73
102
  "If data is passed as a DataFrame, the"
74
- "columns must match the channel names in `nm_channels`. Got:"
75
- f"Data columns: {names_data}, nm_channels.name: {names_data}."
103
+ "column names must match the channel names in `nm_channels`.\n"
104
+ f"Input dataframe column names: {names_data}\n"
105
+ f"Expected (from nm_channels[\"name\"]): : {names_expected}."
106
+ )
107
+ return data.to_numpy().transpose()
108
+
109
+ def _check_settings_for_parallel(self):
110
+ """Check specified settings and raise error if parallel processing is not possible.
111
+
112
+ Raises:
113
+ ValueError: depending on the settings, parallel processing is not possible
114
+ """
115
+
116
+ if "raw_normalization" in self.settings["preprocessing"]:
117
+ raise ValueError(
118
+ "Parallel processing is not possible with raw_normalization normalization."
119
+ )
120
+ if self.settings["postprocessing"]["feature_normalization"] is True:
121
+ raise ValueError(
122
+ "Parallel processing is not possible with feature normalization."
123
+ )
124
+ if self.settings["features"]["bursts"] is True:
125
+ raise ValueError(
126
+ "Parallel processing is not possible with burst estimation."
76
127
  )
77
- return data.to_numpy()
128
+
129
+ def _process_batch(self, data_batch, cnt_samples):
130
+ feature_series = self.run_analysis.process(
131
+ data_batch.astype(np.float64)
132
+ )
133
+ feature_series = self._add_timestamp(feature_series, cnt_samples)
134
+ feature_series = self._add_target(
135
+ feature_series=feature_series, data=data_batch
136
+ )
137
+ return feature_series
78
138
 
79
139
  def _run_offline(
80
140
  self,
81
141
  data: np.ndarray,
82
142
  out_path_root: _PathLike | None = None,
83
143
  folder_name: str = "sub",
144
+ parallel: bool = False,
145
+ n_jobs: int = -2,
84
146
  ) -> pd.DataFrame:
85
147
  generator = nm_generator.raw_data_generator(
86
148
  data=data,
87
149
  settings=self.settings,
88
150
  sfreq=self.sfreq,
89
151
  )
90
- features = []
152
+
91
153
  sample_add = self.sfreq / self.run_analysis.sfreq_features
92
154
 
93
155
  offset_time = self.settings["segment_length_features_ms"]
94
- #offset_start = np.ceil(offset_time / 1000 * self.sfreq).astype(int)
156
+ # offset_start = np.ceil(offset_time / 1000 * self.sfreq).astype(int)
95
157
  offset_start = offset_time / 1000 * self.sfreq
96
158
 
97
- cnt_samples = offset_start
159
+ if parallel:
160
+ l_features = Parallel(n_jobs=n_jobs, verbose=10)(
161
+ delayed(self._process_batch)(data_batch, cnt_samples)
162
+ for data_batch, cnt_samples in zip(
163
+ generator, count(offset_start, sample_add)
164
+ )
165
+ )
98
166
 
99
- while True:
100
- data_batch = next(generator, None)
101
- if data_batch is None:
102
- break
103
- feature_series = self.run_analysis.process(data_batch.astype(np.float64))
104
- feature_series = self._add_timestamp(feature_series, cnt_samples)
105
- features.append(feature_series)
167
+ else:
168
+ l_features = []
169
+ cnt_samples = offset_start
170
+ while True:
171
+ data_batch = next(generator, None)
172
+ if data_batch is None:
173
+ break
174
+ feature_series = self.run_analysis.process(
175
+ data_batch.astype(np.float64)
176
+ )
177
+ feature_series = self._add_timestamp(
178
+ feature_series, cnt_samples
179
+ )
106
180
 
107
- if self.model is not None:
108
- prediction = self.model.predict(feature_series)
181
+ feature_series = self._add_target(
182
+ feature_series=feature_series, data=data_batch
183
+ )
109
184
 
110
- cnt_samples += sample_add
185
+ l_features.append(feature_series)
111
186
 
112
- feature_df = pd.DataFrame(features)
113
- feature_df = self._add_labels(features=feature_df, data=data)
187
+ cnt_samples += sample_add
188
+ feature_df = pd.DataFrame(l_features)
114
189
 
115
190
  self.save_after_stream(out_path_root, folder_name, feature_df)
116
191
 
117
192
  return feature_df
118
193
 
194
+ def plot_raw_signal(
195
+ self,
196
+ sfreq: float = None,
197
+ data: np.array = None,
198
+ lowpass: float = None,
199
+ highpass: float = None,
200
+ picks: list = None,
201
+ plot_time: bool = True,
202
+ plot_psd: bool = False,
203
+ ) -> None:
204
+ """Use MNE-RawArray Plot to investigate PSD or raw_signal plot.
205
+
206
+ Parameters
207
+ ----------
208
+ sfreq : float
209
+ sampling frequency [Hz]
210
+ data : np.array, optional
211
+ data (n_channels, n_times), by default None
212
+ plot_time : bool, optional
213
+ mne.io.RawArray.plot(), by default True
214
+ plot_psd : bool, optional
215
+ mne.io.RawArray.plot(), by default True
216
+
217
+ Raises
218
+ ------
219
+ ValueError
220
+ raise Exception when no data is passed
221
+ """
222
+ if self.data is None and data is None:
223
+ raise ValueError("No data passed to plot_raw_signal function.")
224
+
225
+ if data is None and self.data is not None:
226
+ data = self.data
227
+
228
+ if sfreq is None:
229
+ sfreq = self.sfreq
230
+
231
+ if self.nm_channels is not None:
232
+ ch_names = self.nm_channels["name"].to_list()
233
+ ch_types = self.nm_channels["type"].to_list()
234
+ else:
235
+ ch_names = [f"ch_{i}" for i in range(data.shape[0])]
236
+ ch_types = ["ecog" for i in range(data.shape[0])]
237
+
238
+ # create mne.RawArray
239
+ info = mne.create_info(
240
+ ch_names=ch_names, sfreq=sfreq, ch_types=ch_types
241
+ )
242
+ raw = mne.io.RawArray(data, info)
243
+
244
+ if picks is not None:
245
+ raw = raw.pick(picks)
246
+ self.raw = raw
247
+ if plot_time:
248
+ raw.plot(highpass=highpass, lowpass=lowpass)
249
+ if plot_psd:
250
+ raw.compute_psd().plot()
251
+
119
252
 
120
253
  class Stream(_OfflineStream):
121
254
  def __init__(
@@ -129,7 +262,7 @@ class Stream(_OfflineStream):
129
262
  path_grids: _PathLike | None = None,
130
263
  coord_names: list | None = None,
131
264
  coord_list: list | None = None,
132
- verbose: bool = True,
265
+ verbose: bool = True,
133
266
  ) -> None:
134
267
  """Stream initialization
135
268
 
@@ -154,11 +287,13 @@ class Stream(_OfflineStream):
154
287
  coord_list : list | None, optional
155
288
  coordinates in the form [[coord_1_x, coord_1_y, coord_1_z], [coord_2_x, coord_2_y, coord_2_z],], by default None
156
289
  verbose : bool, optional
157
- print out stream computation time information, by default True
290
+ log stream computation time information, by default True
158
291
  """
159
292
 
160
293
  if nm_channels is None and data is not None:
161
- nm_channels = nm_define_nmchannels.get_default_channels_from_data(data)
294
+ nm_channels = nm_define_nmchannels.get_default_channels_from_data(
295
+ data
296
+ )
162
297
 
163
298
  if nm_channels is None and data is None:
164
299
  raise ValueError(
@@ -179,11 +314,15 @@ class Stream(_OfflineStream):
179
314
 
180
315
  self.data = data
181
316
 
317
+ self.target_idx_initialized = False
318
+
182
319
  def run(
183
320
  self,
184
321
  data: np.ndarray | pd.DataFrame = None,
185
322
  out_path_root: _PathLike | None = None,
186
323
  folder_name: str = "sub",
324
+ parallel: bool = False,
325
+ n_jobs: int = -2,
187
326
  ) -> pd.DataFrame:
188
327
  """Call run function for offline stream.
189
328
 
@@ -210,8 +349,11 @@ class Stream(_OfflineStream):
210
349
  elif self.data is not None:
211
350
  data = self._handle_data(self.data)
212
351
  elif self.data is None and data is None:
213
- raise ValueError(
214
- "No data passed to run function."
215
- )
352
+ raise ValueError("No data passed to run function.")
353
+
354
+ if parallel is True:
355
+ self._check_settings_for_parallel()
216
356
 
217
- return self._run_offline(data, out_path_root, folder_name)
357
+ return self._run_offline(
358
+ data, out_path_root, folder_name, parallel=parallel, n_jobs=n_jobs
359
+ )
@@ -0,0 +1,24 @@
1
+ import logging
2
+
3
+ # include the filename in the log output
4
+ # Configure the logger
5
+ logger = logging.getLogger("PynmLogger")
6
+ logger.setLevel(logging.INFO)
7
+
8
+ # Create a file handler and set its level to DEBUG
9
+ file_handler = logging.FileHandler("logfile_pynm.log")
10
+ file_handler.setLevel(logging.INFO)
11
+
12
+ # console_handler = logging.StreamHandler()
13
+ # console_handler.setLevel(logging.DEBUG)
14
+
15
+ # Create a formatter and add it to the handler
16
+ formatter = logging.Formatter(
17
+ "%(asctime)s:%(levelname)s:%(name)s:%(filename)s:%(message)s"
18
+ )
19
+ file_handler.setFormatter(formatter)
20
+ # console_handler.setFormatter(formatter)
21
+
22
+ # Add the file handler to the logger
23
+ logger.addHandler(file_handler)
24
+ # logger.addHandler(console_handler)
@@ -1,44 +1,73 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.3
2
2
  Name: py_neuromodulation
3
- Version: 0.0.2
3
+ Version: 0.0.4
4
4
  Summary: Real-time analysis of intracranial neurophysiology recordings.
5
- Keywords: real-time,eeg,ieeg,dbs,ecog,electrocorticography,deep-brain-stimulation,machine-learning
5
+ Project-URL: bugtracker, https://github.com/neuromodulation/py_neuromodulation/issues
6
+ Project-URL: repository, https://github.com/neuromodulation/py_neuromodulation
6
7
  Author-email: Timon Merk <timon.merk@charite.de>
7
8
  Maintainer: Timon Merk
8
- Requires-Python: >=3.10
9
- Description-Content-Type: text/x-rst
9
+ License: MIT License
10
+
11
+ Copyright (c) 2021 Interventional Cognitive Neuromodulation - Neumann Lab Berlin
12
+
13
+ Permission is hereby granted, free of charge, to any person obtaining a copy
14
+ of this software and associated documentation files (the "Software"), to deal
15
+ in the Software without restriction, including without limitation the rights
16
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
17
+ copies of the Software, and to permit persons to whom the Software is
18
+ furnished to do so, subject to the following conditions:
19
+
20
+ The above copyright notice and this permission notice shall be included in all
21
+ copies or substantial portions of the Software.
22
+
23
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
26
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
28
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
29
+ SOFTWARE.
30
+ License-File: LICENSE
31
+ Keywords: dbs,deep-brain-stimulation,ecog,eeg,electrocorticography,ieeg,machine-learning,real-time
10
32
  Classifier: Development Status :: 2 - Pre-Alpha
11
- Classifier: License :: OSI Approved :: MIT License
33
+ Classifier: License :: OSI Approved :: MIT License
12
34
  Classifier: Programming Language :: Python
13
35
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
14
- Requires-Dist: mne
15
- Requires-Dist: filterpy >= 1.4.5
36
+ Requires-Python: >=3.10
37
+ Requires-Dist: black>=24.2.0
38
+ Requires-Dist: filterpy>=1.4.5
16
39
  Requires-Dist: fooof
40
+ Requires-Dist: hatch>=1.9.4
17
41
  Requires-Dist: imbalanced-learn
18
- Requires-Dist: matplotlib >= 3.3.4
19
- Requires-Dist: mne-bids >= 0.8
42
+ Requires-Dist: ipython
43
+ Requires-Dist: joblib>=1.3.2
44
+ Requires-Dist: matplotlib>=3.3.4
45
+ Requires-Dist: mne
46
+ Requires-Dist: mne-bids>=0.8
20
47
  Requires-Dist: mne-connectivity
21
48
  Requires-Dist: mrmr-selection
22
49
  Requires-Dist: nolds
23
- Requires-Dist: numpy >= 1.21.2
24
- Requires-Dist: pandas >= 1.2.2
50
+ Requires-Dist: notebook
51
+ Requires-Dist: numpy>=1.21.2
52
+ Requires-Dist: pandas>=1.2.2
25
53
  Requires-Dist: pip
26
- Requires-Dist: pynput
54
+ Requires-Dist: pyarrow>=14.0.2
27
55
  Requires-Dist: pybids
56
+ Requires-Dist: pybispectra>=1.0.0
57
+ Requires-Dist: pynput
58
+ Requires-Dist: pyparrm
59
+ Requires-Dist: pytest>=8.0.2
28
60
  Requires-Dist: scikit-image
29
- Requires-Dist: scikit-learn >= 0.24.2
61
+ Requires-Dist: scikit-learn>=0.24.2
30
62
  Requires-Dist: scikit-optimize
31
- Requires-Dist: scipy >= 1.7.1
32
- Requires-Dist: seaborn >= 0.11
33
- Requires-Dist: notebook
34
- Requires-Dist: ipython
35
- Requires-Dist: pybispectra
36
- Requires-Dist: black ; extra == "dev"
37
- Requires-Dist: pytest ; extra == "dev"
38
- Requires-Dist: pytest-cov ; extra == "dev"
39
- Project-URL: bugtracker, https://github.com/neuromodulation/py_neuromodulation/issues
40
- Project-URL: repository, https://github.com/neuromodulation/py_neuromodulation
63
+ Requires-Dist: scipy>=1.7.1
64
+ Requires-Dist: seaborn>=0.11
65
+ Requires-Dist: wget
41
66
  Provides-Extra: dev
67
+ Requires-Dist: black; extra == 'dev'
68
+ Requires-Dist: pytest; extra == 'dev'
69
+ Requires-Dist: pytest-cov; extra == 'dev'
70
+ Description-Content-Type: text/x-rst
42
71
 
43
72
  py_neuromodulation
44
73
  ==================
@@ -82,12 +111,24 @@ py_neuromodulation requires at least python 3.10. For installation you can use p
82
111
 
83
112
  pip install py-neuromodulation
84
113
 
85
- We recommend however installing the package in a new new conda environment:
114
+
115
+ We recommend however installing the package using `rye <https://rye-up.com/guide/installation/>`_:
86
116
 
87
117
  .. code-block::
88
118
 
89
119
  git clone https://github.com/neuromodulation/py_neuromodulation.git
90
- conda create -n pynm-test python=3.10
120
+ rye pin 3.11
121
+ rye sync
122
+
123
+ And then activating the virtual environment e.g. in Windows using:
124
+
125
+ .. code-block::
126
+
127
+ .\.venv\Scripts\activate
128
+
129
+ Alternatively you can also install the package in a conda environment:
130
+
131
+ conda create -n pynm-test python=3.11
91
132
  conda activate pynm-test
92
133
 
93
134
  Then install the packages listed in the `pyproject.toml`:
@@ -107,27 +148,27 @@ Then *py_neuromodulation* can be imported via:
107
148
 
108
149
  .. code-block::
109
150
 
110
- import py_neuromodulation as py_nm
151
+ import py_neuromodulation as nm
111
152
 
112
153
  Basic Usage
113
154
  ===========
114
155
 
115
156
  .. code-block:: python
116
157
 
117
- import py_neuromodulation as pn
158
+ import py_neuromodulation as nm
118
159
  import numpy as np
119
160
 
120
161
  NUM_CHANNELS = 5
121
162
  NUM_DATA = 10000
122
163
  sfreq = 1000 # Hz
123
- feature_freq = 3 # Hz
164
+ sampling_rate_features_hz = 3 # Hz
124
165
 
125
166
  data = np.random.random([NUM_CHANNELS, NUM_DATA])
126
167
 
127
- stream = pn.Stream(sfreq=sfreq, data=data, sampling_rate_features_hz=sampling_rate_features_hz)
168
+ stream = nm.Stream(sfreq=sfreq, data=data, sampling_rate_features_hz=sampling_rate_features_hz)
128
169
  features = stream.run()
129
170
 
130
- Check the `Usage <https://py-neuromodulation.readthedocs.io/en/latest/usage.html>`_ and `First examples <https://py-neuromodulation.readthedocs.io/en/latest/auto_examples/plot_first_demo.html>`_ for further introduction.
171
+ Check the `Usage <https://py-neuromodulation.readthedocs.io/en/latest/usage.html>`_ and `First examples <https://py-neuromodulation.readthedocs.io/en/latest/auto_examples/plot_0_first_demo.html>`_ for further introduction.
131
172
 
132
173
  Contact information
133
174
  -------------------
@@ -139,4 +180,3 @@ References
139
180
 
140
181
  .. [1] Merk, T. et al. *Invasive neurophysiology and whole brain connectomics for neural decoding in patients with brain implants*, `https://doi.org/10.21203/rs.3.rs-3212709/v1` (2023).
141
182
  .. [2] Merk, T. et al. *Electrocorticography is superior to subthalamic local field potentials for movement decoding in Parkinson’s disease*. Elife 11, e75126, `https://doi.org/10.7554/eLife.75126` (2022).
142
-
@@ -0,0 +1,72 @@
1
+ py_neuromodulation/FieldTrip.py,sha256=cq4BIibMvcpyAHd7U8eEK4AhVSOTrRKXcywtgtc-Mg4,18597
2
+ py_neuromodulation/__init__.py,sha256=3aujkEqF4dzt8WKtK7gaolh782UsR5FFkqcg7fOtze4,296
3
+ py_neuromodulation/_write_example_dataset_helper.py,sha256=b-kG1JiuIhebVSNCrumTOgdXPTmxXkkduCpUFlmMlOg,2530
4
+ py_neuromodulation/grid_cortex.tsv,sha256=DRnJEBwLBqyzCp3UwBL-Us_9h9GyokCorYl0HLsHSLk,683
5
+ py_neuromodulation/grid_subcortex.tsv,sha256=oCQDYLDdYSa1DAI9ybwECfuzWulFzXqKHyf7oZ1oDBM,25842
6
+ py_neuromodulation/nm_EpochStream.py,sha256=fqLNIQRVtcQOMtOyw-omu3KegEs6tWxZI-GQWqdAeG0,2595
7
+ py_neuromodulation/nm_IO.py,sha256=SFOsCMwu2XOYUjFaDauciVkZsq0YG-Vx4rrevqNDd4g,11688
8
+ py_neuromodulation/nm_RMAP.py,sha256=2wHHD9gwW6Y3XZnGUwmDrWHcz7HjwwLqcwwSjADA0-Q,15791
9
+ py_neuromodulation/nm_across_patient_decoding.py,sha256=9Mwu5o5cUAbtSP8BTx4c-BJfJZtk8wsMx_WH0VIegdI,38690
10
+ py_neuromodulation/nm_analysis.py,sha256=ZIuc1cqG5_2DvoTwvSDL88nyYa_XglANq90xUqeSEXM,38528
11
+ py_neuromodulation/nm_artifacts.py,sha256=KZZ68Q13P64vp05vRiLZ9CPmyWurhcB6K0ldY91axCI,692
12
+ py_neuromodulation/nm_bispectra.py,sha256=7XduXwvryckEMlNeRqKLioNK90rW8AZpvA_Z2I7gThI,6322
13
+ py_neuromodulation/nm_bursts.py,sha256=2xMJjluXttjeijm9e7EaaJJ2fcnnX1kLb600TiJvCzY,7710
14
+ py_neuromodulation/nm_coherence.py,sha256=Iy8FtYBb7qaRFGpzHwOK_IA6O4bvtrumcIyS_Umh0aY,7435
15
+ py_neuromodulation/nm_cohortwrapper.py,sha256=eJVCBW745DB5MX7OBVJh2ZAT1vy-3-_2MCHnjrsO9qI,16643
16
+ py_neuromodulation/nm_decode.py,sha256=d15D-l0RUAcW8FfXyDcPaNwRpS9wuq1XK-pWmImPHDg,35645
17
+ py_neuromodulation/nm_define_nmchannels.py,sha256=8rjZoDIwhsIgdqfSxScaJvY9tNGWNLecTLxEmvhHMuE,11074
18
+ py_neuromodulation/nm_eval_timing.py,sha256=JcnLYyxjH8hvNX7rbJ4PEj-NZzqs2NMGOhyuQQuGiYc,8800
19
+ py_neuromodulation/nm_features.py,sha256=IsfN6nFE_f0OPFaHPjFhjPxMO7pKc6pxkylt6V09LEk,3527
20
+ py_neuromodulation/nm_features_abc.py,sha256=6CX8NHbtpGDY-l9K4CUomlRJVZHPmo6KHJX4RJ1YolM,917
21
+ py_neuromodulation/nm_filter.py,sha256=dusgRby4xDxrj62uINuucm0juCwpGb7Lr3iN4IOs8R4,7542
22
+ py_neuromodulation/nm_filter_preprocessing.py,sha256=rR1QKaf9q_7sWB5J129rBTz5H2PO6rvGxMmU86-npi4,3366
23
+ py_neuromodulation/nm_fooof.py,sha256=rOpKyKzTTnMWBplujsw5kzHAvILBGuCjP0q0UozTIiM,6147
24
+ py_neuromodulation/nm_generator.py,sha256=YPa-1X5gCUVSpxTT9upbbY4kjqoVlRd5njQHBjgpAgk,1088
25
+ py_neuromodulation/nm_hjorth_raw.py,sha256=EvXOBr3k9Q5xypv1Gj8GFqa5EJ_wRYNDGgGnXCUekiM,2180
26
+ py_neuromodulation/nm_kalmanfilter.py,sha256=dgs7_XMBpLEE49l6aAVNA4d6ovTS63CZsAI472eG3JA,2006
27
+ py_neuromodulation/nm_linelength.py,sha256=0iZO7j6g7IeprvlEvWG6a_CaUL2p0sQKGiFgDrzYtxk,954
28
+ py_neuromodulation/nm_mne_connectivity.py,sha256=UCgLRQSrzl0OpOhHdUc1iqWr6fPiHZeU0zNeZfaplJE,3907
29
+ py_neuromodulation/nm_nolds.py,sha256=KZr3ZKqvKJkYKh2BMYuuUMkP8hhmJTUgtLZrE8l-DqQ,3660
30
+ py_neuromodulation/nm_normalization.py,sha256=9v3fiNT0HexD9ycp2gB6BPp_ch86kYez_YNI9yB3zGw,7574
31
+ py_neuromodulation/nm_oscillatory.py,sha256=dvxiytWXwd9_gsvroUhDQkQZBTIZzQEhl2XS2QzmOIQ,16476
32
+ py_neuromodulation/nm_plots.py,sha256=B5A91ejZwoxx2YRa3fB6G_fNamdCm27JSOInqF1OfWc,17869
33
+ py_neuromodulation/nm_projection.py,sha256=g8VersyOGiWj3PjR_U6dvLttsp6CHi_GCyYnu_ha8uc,15309
34
+ py_neuromodulation/nm_rereference.py,sha256=SApVkeWdMmMfD1sr77HSs7MW3eQwVHKEhfm-0zyBu9M,3381
35
+ py_neuromodulation/nm_resample.py,sha256=MSMu8gvmrYZ194fAVJdlB_4yEkuGKHgHrLd11zBDRY0,1122
36
+ py_neuromodulation/nm_run_analysis.py,sha256=kZKcVkAbeAhKzoXyFaNK4Vo_aJTTSAVZxxr8y3GMyCc,15046
37
+ py_neuromodulation/nm_settings.json,sha256=KxvvCTJBaY0PmrxpbNJaY-NQc-eI99YX3lJAwQwDVq4,8287
38
+ py_neuromodulation/nm_settings.py,sha256=7d17vwSGrABxQJ7C6x3FqP8kdboUFpm5LrN4djoVlFA,1775
39
+ py_neuromodulation/nm_sharpwaves.py,sha256=mk6DYjO-834aCEk9-x24DP6b1rSA9gC1Jf6in8GPAgA,16891
40
+ py_neuromodulation/nm_stats.py,sha256=wgYk3jzrfSNEZEWdPSpxQcGcsdNs-vhfKvRhMptSbXM,16029
41
+ py_neuromodulation/nm_stream_abc.py,sha256=hQHFVOnCdZiVfq9NyS5npFQ4DFqJUsY4jbaegEhMvgk,7755
42
+ py_neuromodulation/nm_stream_offline.py,sha256=U7CQlaEMNe3uZJq4vqNROuFjOY_sWWN5GjLq-8_W6C4,12484
43
+ py_neuromodulation/ConnectivityDecoding/Automated Anatomical Labeling 3 (Rolls 2020).nii,sha256=Sp-cjF_AuT0Tlilb5s8lB14hVgkXJiR2uKMS9nOQOeg,902981
44
+ py_neuromodulation/ConnectivityDecoding/_get_grid_hull.m,sha256=pI8MWNBSv4um6VsPlQQqOvqqfk5gXj421KWWPUYMNgQ,748
45
+ py_neuromodulation/ConnectivityDecoding/_get_grid_whole_brain.py,sha256=f4nNTStuZuGOGOz4QZMZAdURpWRK0Ljb4eKP1db-kJc,3207
46
+ py_neuromodulation/ConnectivityDecoding/_helper_write_connectome.py,sha256=uOlV_IGoxtoGH6cseSACyDF94HERNH1dUsIMKFvFIkg,3688
47
+ py_neuromodulation/ConnectivityDecoding/mni_coords_cortical_surface.mat,sha256=AZc0mgiAiqXVAxAnfxwICeh-dQX62RfTeRN_knS-i60,11622
48
+ py_neuromodulation/ConnectivityDecoding/mni_coords_whole_brain.mat,sha256=YxT9nrXZ2IECheEhN1SgSsqNyihHUTLuZQ7o5yP4Q-c,29864
49
+ py_neuromodulation/ConnectivityDecoding/rmap_func_all.nii,sha256=WjVA02B2cGNi670_45fdNssspf8GKbkKgRStZ2d4_FU,7221384
50
+ py_neuromodulation/ConnectivityDecoding/rmap_struc.nii,sha256=XsEMjsCxjAsMFvw1_jpQ-wIU2BUuZ_lISPwMa7zDmDk,7221384
51
+ py_neuromodulation/data/README,sha256=Xw-QRZXGq2YefZ3ZDvrK736DruAkFVeaqOn70BzFnPs,714
52
+ py_neuromodulation/data/dataset_description.json,sha256=4LRbbS8x0ifRZhqTNOrtPmm306AmipYnl9duL3OXHrA,130
53
+ py_neuromodulation/data/participants.json,sha256=o24uxJ95GE_b3SAA6nQg6G8Ene6zPL_u02wYT38A8OY,788
54
+ py_neuromodulation/data/participants.tsv,sha256=rPmG1JoJ2O0xbhlLHDJszAK-QSR_7gkSiaykn-HaXvo,79
55
+ py_neuromodulation/data/sub-testsub/ses-EphysMedOff/sub-testsub_ses-EphysMedOff_scans.tsv,sha256=zu2Ym07qo164jbClxfT3L960-nHQeRo6WG3QmbpNgl0,91
56
+ py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_space-mni_coordsystem.json,sha256=NzLVNODlEAIDwTk49_YcbOUjWiRMdZ8toZYBpmAWS1A,123
57
+ py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_space-mni_electrodes.tsv,sha256=eHBvuZa9WJGI5LSid2TBRxzgyacK7AwrFGKEByl3i4w,733
58
+ py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_channels.tsv,sha256=cZSFnJqjRK8mrtTN271U9L6H_Lo2kuUHimhcKcv9xdY,790
59
+ py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.eeg,sha256=NnQeMDrKpeK3lctIZ5Bzh85UMTs5OCUCaivn_NU5rc4,760040
60
+ py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.json,sha256=A0zHnbKXKLBmPly9NshT2B6lsrogFZeIAMXeTzwEYyk,492
61
+ py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.vhdr,sha256=hUwxHtbCZMo16-e06xJ1rVqgs1Ac8LtP8_5AmhjeDw8,1023
62
+ py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.vmrk,sha256=WV1mN_QGC1WlFh5qcKzXpedyMhtXVHVwgV5v8n1-_Zw,553
63
+ py_neuromodulation/plots/STN_surf.mat,sha256=Hh2nfQRmP0TYIt8SWV-ajKnC8yk7EmBoAj1c6RMoi9g,124526
64
+ py_neuromodulation/plots/Vertices.mat,sha256=k72WB8-0Datt-bRScxcfpp38LBPHV34zzodS49kQAgs,3709780
65
+ py_neuromodulation/plots/faces.mat,sha256=gWSr-9qNxsAPoG8q-muNnaFZHVHGKqnKN0j3Q1JfMk4,2859245
66
+ py_neuromodulation/plots/grid.mat,sha256=G3MYIVcvk8lbf_kKWIe3lZZ4d0QV6HkoLsOFN1A2vhE,775
67
+ py_neuromodulation/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
68
+ py_neuromodulation/utils/_logging.py,sha256=vADt0XAWSzDAvhuOfaOjyZRSZCA11Pym2wi0uvPIoTo,738
69
+ py_neuromodulation-0.0.4.dist-info/METADATA,sha256=uVW2tGNLA5o_MD5blFmYgAC_ILaM9w9EkIvphS3oOPQ,7049
70
+ py_neuromodulation-0.0.4.dist-info/WHEEL,sha256=osohxoshIHTFJFVPhsi1UkZuLRGMHRXZzwEBW2ezjrc,87
71
+ py_neuromodulation-0.0.4.dist-info/licenses/LICENSE,sha256=V6yfss2JKJxeY-TsaEVqVomfd8ttziMyB9ik5L-BI7o,1139
72
+ py_neuromodulation-0.0.4.dist-info/RECORD,,