py-neuromodulation 0.0.4__py3-none-any.whl → 0.0.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py_neuromodulation/ConnectivityDecoding/_get_grid_hull.m +34 -34
- py_neuromodulation/ConnectivityDecoding/_get_grid_whole_brain.py +95 -106
- py_neuromodulation/ConnectivityDecoding/_helper_write_connectome.py +107 -119
- py_neuromodulation/FieldTrip.py +589 -589
- py_neuromodulation/__init__.py +74 -13
- py_neuromodulation/_write_example_dataset_helper.py +83 -65
- py_neuromodulation/data/README +6 -6
- py_neuromodulation/data/dataset_description.json +8 -8
- py_neuromodulation/data/participants.json +32 -32
- py_neuromodulation/data/participants.tsv +2 -2
- py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_space-mni_coordsystem.json +5 -5
- py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_space-mni_electrodes.tsv +11 -11
- py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_channels.tsv +11 -11
- py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.json +18 -18
- py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.vhdr +35 -35
- py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.vmrk +13 -13
- py_neuromodulation/data/sub-testsub/ses-EphysMedOff/sub-testsub_ses-EphysMedOff_scans.tsv +2 -2
- py_neuromodulation/grid_cortex.tsv +40 -40
- py_neuromodulation/liblsl/libpugixml.so.1.12 +0 -0
- py_neuromodulation/liblsl/linux/bionic_amd64/liblsl.1.16.2.so +0 -0
- py_neuromodulation/liblsl/linux/bookworm_amd64/liblsl.1.16.2.so +0 -0
- py_neuromodulation/liblsl/linux/focal_amd46/liblsl.1.16.2.so +0 -0
- py_neuromodulation/liblsl/linux/jammy_amd64/liblsl.1.16.2.so +0 -0
- py_neuromodulation/liblsl/linux/jammy_x86/liblsl.1.16.2.so +0 -0
- py_neuromodulation/liblsl/linux/noble_amd64/liblsl.1.16.2.so +0 -0
- py_neuromodulation/liblsl/macos/amd64/liblsl.1.16.2.dylib +0 -0
- py_neuromodulation/liblsl/macos/arm64/liblsl.1.16.0.dylib +0 -0
- py_neuromodulation/liblsl/windows/amd64/liblsl.1.16.2.dll +0 -0
- py_neuromodulation/liblsl/windows/x86/liblsl.1.16.2.dll +0 -0
- py_neuromodulation/nm_IO.py +413 -417
- py_neuromodulation/nm_RMAP.py +496 -531
- py_neuromodulation/nm_analysis.py +993 -1074
- py_neuromodulation/nm_artifacts.py +30 -25
- py_neuromodulation/nm_bispectra.py +154 -168
- py_neuromodulation/nm_bursts.py +292 -198
- py_neuromodulation/nm_coherence.py +251 -205
- py_neuromodulation/nm_database.py +149 -0
- py_neuromodulation/nm_decode.py +918 -992
- py_neuromodulation/nm_define_nmchannels.py +300 -302
- py_neuromodulation/nm_features.py +144 -116
- py_neuromodulation/nm_filter.py +219 -219
- py_neuromodulation/nm_filter_preprocessing.py +79 -91
- py_neuromodulation/nm_fooof.py +139 -159
- py_neuromodulation/nm_generator.py +45 -37
- py_neuromodulation/nm_hjorth_raw.py +52 -73
- py_neuromodulation/nm_kalmanfilter.py +71 -58
- py_neuromodulation/nm_linelength.py +21 -33
- py_neuromodulation/nm_logger.py +66 -0
- py_neuromodulation/nm_mne_connectivity.py +149 -112
- py_neuromodulation/nm_mnelsl_generator.py +90 -0
- py_neuromodulation/nm_mnelsl_stream.py +116 -0
- py_neuromodulation/nm_nolds.py +96 -93
- py_neuromodulation/nm_normalization.py +173 -214
- py_neuromodulation/nm_oscillatory.py +423 -448
- py_neuromodulation/nm_plots.py +585 -612
- py_neuromodulation/nm_preprocessing.py +83 -0
- py_neuromodulation/nm_projection.py +370 -394
- py_neuromodulation/nm_rereference.py +97 -95
- py_neuromodulation/nm_resample.py +59 -50
- py_neuromodulation/nm_run_analysis.py +325 -435
- py_neuromodulation/nm_settings.py +289 -68
- py_neuromodulation/nm_settings.yaml +244 -0
- py_neuromodulation/nm_sharpwaves.py +423 -401
- py_neuromodulation/nm_stats.py +464 -480
- py_neuromodulation/nm_stream.py +398 -0
- py_neuromodulation/nm_stream_abc.py +166 -218
- py_neuromodulation/nm_types.py +193 -0
- {py_neuromodulation-0.0.4.dist-info → py_neuromodulation-0.0.5.dist-info}/METADATA +29 -26
- py_neuromodulation-0.0.5.dist-info/RECORD +83 -0
- {py_neuromodulation-0.0.4.dist-info → py_neuromodulation-0.0.5.dist-info}/WHEEL +1 -1
- {py_neuromodulation-0.0.4.dist-info → py_neuromodulation-0.0.5.dist-info}/licenses/LICENSE +21 -21
- py_neuromodulation/nm_EpochStream.py +0 -92
- py_neuromodulation/nm_across_patient_decoding.py +0 -927
- py_neuromodulation/nm_cohortwrapper.py +0 -435
- py_neuromodulation/nm_eval_timing.py +0 -239
- py_neuromodulation/nm_features_abc.py +0 -39
- py_neuromodulation/nm_settings.json +0 -338
- py_neuromodulation/nm_stream_offline.py +0 -359
- py_neuromodulation/utils/_logging.py +0 -24
- py_neuromodulation-0.0.4.dist-info/RECORD +0 -72
|
@@ -1,33 +1,21 @@
|
|
|
1
|
-
import numpy as np
|
|
2
|
-
from
|
|
3
|
-
|
|
4
|
-
from py_neuromodulation import
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
class LineLength(
|
|
8
|
-
def __init__(
|
|
9
|
-
self
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
sfreq: int | float,
|
|
23
|
-
):
|
|
24
|
-
# no settings to be checked
|
|
25
|
-
pass
|
|
26
|
-
|
|
27
|
-
def calc_feature(self, data: np.ndarray, features_compute: dict) -> dict:
|
|
28
|
-
for ch_idx, ch_name in enumerate(self.ch_names):
|
|
29
|
-
features_compute[
|
|
30
|
-
"_".join([ch_name, "LineLength"])
|
|
31
|
-
] = self.get_line_length(data[ch_idx, :])
|
|
32
|
-
|
|
33
|
-
return features_compute
|
|
1
|
+
import numpy as np
|
|
2
|
+
from collections.abc import Sequence
|
|
3
|
+
|
|
4
|
+
from py_neuromodulation.nm_features import NMFeature
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class LineLength(NMFeature):
|
|
8
|
+
def __init__(self, settings: dict, ch_names: Sequence[str], sfreq: float) -> None:
|
|
9
|
+
self.ch_names = ch_names
|
|
10
|
+
|
|
11
|
+
def calc_feature(self, data: np.ndarray) -> dict:
|
|
12
|
+
|
|
13
|
+
line_length = np.mean(
|
|
14
|
+
np.abs(np.diff(data, axis=-1)) / (data.shape[1] - 1), axis=-1
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
feature_results = {}
|
|
18
|
+
for ch_idx, ch_name in enumerate(self.ch_names):
|
|
19
|
+
feature_results[f"{ch_name}_LineLength"] = line_length[ch_idx]
|
|
20
|
+
|
|
21
|
+
return feature_results
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from py_neuromodulation.nm_types import _PathLike
|
|
3
|
+
import logging
|
|
4
|
+
|
|
5
|
+
INFOFORMAT = "%(name)s:\t%(message)s"
|
|
6
|
+
DEBUGFORMAT = "%(asctime)s:%(levelname)s:%(name)s:%(filename)s:%(funcName)s:%(lineno)d:\t%(message)s"
|
|
7
|
+
|
|
8
|
+
LOG_LEVELS = {
|
|
9
|
+
"DEBUG": (logging.DEBUG, DEBUGFORMAT),
|
|
10
|
+
"INFO": (logging.INFO, INFOFORMAT),
|
|
11
|
+
"WARNING": (logging.WARN, DEBUGFORMAT),
|
|
12
|
+
"ERROR": (logging.ERROR, DEBUGFORMAT),
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class NMLogger(logging.Logger):
|
|
17
|
+
"""
|
|
18
|
+
Subclass of logging.Logger with some extra functionality
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
def __init__(self, name: str, level: str = "INFO") -> None:
|
|
22
|
+
super().__init__(name, LOG_LEVELS[level][0])
|
|
23
|
+
|
|
24
|
+
self.setLevel(level)
|
|
25
|
+
|
|
26
|
+
self._console_handler = logging.StreamHandler()
|
|
27
|
+
self._console_handler.setLevel(level)
|
|
28
|
+
self._console_handler.setFormatter(logging.Formatter(LOG_LEVELS[level][1]))
|
|
29
|
+
|
|
30
|
+
self.addHandler(self._console_handler)
|
|
31
|
+
|
|
32
|
+
def set_level(self, level: str):
|
|
33
|
+
"""
|
|
34
|
+
Set console logging level
|
|
35
|
+
"""
|
|
36
|
+
self.setLevel(level)
|
|
37
|
+
self._console_handler.setLevel(level)
|
|
38
|
+
self._console_handler.setFormatter(logging.Formatter(LOG_LEVELS[level][1]))
|
|
39
|
+
|
|
40
|
+
def log_to_file(self, path: _PathLike, mode: str = "w"):
|
|
41
|
+
"""
|
|
42
|
+
Add file handlers to the logger
|
|
43
|
+
|
|
44
|
+
Parameters
|
|
45
|
+
----------
|
|
46
|
+
path: directory where to save logfiles
|
|
47
|
+
mode : str, ('w', 'a')
|
|
48
|
+
w: overwrite files
|
|
49
|
+
a: append to files
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
path = Path(path)
|
|
53
|
+
path.mkdir(parents=True, exist_ok=True)
|
|
54
|
+
|
|
55
|
+
self.debug_file_handler = logging.FileHandler(path / "logfile_pynm_debug.log")
|
|
56
|
+
self.debug_file_handler.setLevel(logging.DEBUG)
|
|
57
|
+
self.debug_file_handler.setFormatter(logging.Formatter(DEBUGFORMAT))
|
|
58
|
+
|
|
59
|
+
self.info_file_handler = logging.FileHandler(
|
|
60
|
+
path / "logfile_pynm_info.log", mode=mode
|
|
61
|
+
)
|
|
62
|
+
self.info_file_handler.setLevel(logging.INFO)
|
|
63
|
+
self.info_file_handler.setFormatter(logging.Formatter(INFOFORMAT))
|
|
64
|
+
|
|
65
|
+
self.addHandler(self.info_file_handler)
|
|
66
|
+
self.addHandler(self.debug_file_handler)
|
|
@@ -1,112 +1,149 @@
|
|
|
1
|
-
from
|
|
2
|
-
import numpy as np
|
|
3
|
-
|
|
4
|
-
import
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
from py_neuromodulation import
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
1
|
+
from collections.abc import Iterable
|
|
2
|
+
import numpy as np
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
5
|
+
|
|
6
|
+
from py_neuromodulation.nm_features import NMFeature
|
|
7
|
+
from py_neuromodulation.nm_types import NMBaseModel
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from py_neuromodulation.nm_settings import NMSettings
|
|
11
|
+
from mne.io import RawArray
|
|
12
|
+
from mne import Epochs
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class MNEConnectivitySettings(NMBaseModel):
|
|
16
|
+
method: str = "plv"
|
|
17
|
+
mode: str = "multitaper"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class MNEConnectivity(NMFeature):
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
settings: "NMSettings",
|
|
24
|
+
ch_names: Iterable[str],
|
|
25
|
+
sfreq: float,
|
|
26
|
+
) -> None:
|
|
27
|
+
from mne import create_info
|
|
28
|
+
|
|
29
|
+
self.settings = settings
|
|
30
|
+
|
|
31
|
+
self.ch_names = ch_names
|
|
32
|
+
self.sfreq = sfreq
|
|
33
|
+
|
|
34
|
+
# Params used by spectral_connectivity_epochs
|
|
35
|
+
self.mode = settings.mne_connectivity.mode
|
|
36
|
+
self.method = settings.mne_connectivity.method
|
|
37
|
+
|
|
38
|
+
self.fbands = settings.frequency_ranges_hz
|
|
39
|
+
self.fband_ranges: list = []
|
|
40
|
+
self.result_keys = []
|
|
41
|
+
|
|
42
|
+
self.raw_info = create_info(ch_names=self.ch_names, sfreq=self.sfreq)
|
|
43
|
+
self.raw_array: "RawArray"
|
|
44
|
+
self.epochs: "Epochs"
|
|
45
|
+
self.prev_batch_shape: tuple = (-1, -1) # sentinel value
|
|
46
|
+
|
|
47
|
+
def calc_feature(self, data: np.ndarray) -> dict:
|
|
48
|
+
from mne.io import RawArray
|
|
49
|
+
from mne import Epochs
|
|
50
|
+
from mne_connectivity import spectral_connectivity_epochs
|
|
51
|
+
|
|
52
|
+
time_samples_s = data.shape[1] / self.sfreq
|
|
53
|
+
epoch_length: float = 1 # TODO: Make this a parameter?
|
|
54
|
+
|
|
55
|
+
if epoch_length > time_samples_s:
|
|
56
|
+
raise ValueError(
|
|
57
|
+
f"the intended epoch length for mne connectivity: {epoch_length}s"
|
|
58
|
+
f" are longer than the passed data array {np.round(time_samples_s, 2)}s"
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
# Only reinitialize the raw_array and epochs object if the data shape has changed
|
|
62
|
+
# That could mean that the channels have been re-selected, or we're in the last batch
|
|
63
|
+
# TODO: If sfreq or channels change, do we re-initialize the whole Stream object?
|
|
64
|
+
if data.shape != self.prev_batch_shape:
|
|
65
|
+
self.raw_array = RawArray(
|
|
66
|
+
data=data,
|
|
67
|
+
info=self.raw_info,
|
|
68
|
+
copy=None, # type: ignore
|
|
69
|
+
verbose=False,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
# self.events = make_fixed_length_events(self.raw_array, duration=epoch_length)
|
|
73
|
+
# Equivalent code for those parameters:
|
|
74
|
+
event_times = np.arange(
|
|
75
|
+
0, data.shape[-1], self.sfreq * epoch_length, dtype=int
|
|
76
|
+
)
|
|
77
|
+
events = np.column_stack(
|
|
78
|
+
(
|
|
79
|
+
event_times,
|
|
80
|
+
np.zeros_like(event_times, dtype=int),
|
|
81
|
+
np.ones_like(event_times, dtype=int),
|
|
82
|
+
)
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
# there need to be minimum 2 of two epochs, otherwise mne_connectivity
|
|
86
|
+
# is not correctly initialized
|
|
87
|
+
if events.shape[0] < 2:
|
|
88
|
+
raise RuntimeError(
|
|
89
|
+
f"A minimum of 2 epochs is required for mne_connectivity,"
|
|
90
|
+
f" got only {events.shape[0]}. Increase settings['segment_length_features_ms']"
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
self.epochs = Epochs(
|
|
94
|
+
self.raw_array,
|
|
95
|
+
events=events,
|
|
96
|
+
event_id={"rest": 1},
|
|
97
|
+
tmin=0,
|
|
98
|
+
tmax=epoch_length,
|
|
99
|
+
baseline=None,
|
|
100
|
+
reject_by_annotation=True,
|
|
101
|
+
verbose=False,
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
# Trick the function "spectral_connectivity_epochs" into not calling "add_annotations_to_metadata"
|
|
105
|
+
# TODO: This is a hack, and maybe needs a fix in the mne_connectivity library
|
|
106
|
+
self.epochs._metadata = pd.DataFrame(index=np.arange(events.shape[0]))
|
|
107
|
+
|
|
108
|
+
else:
|
|
109
|
+
# As long as the initialization parameters, channels, sfreq and batch size are the same
|
|
110
|
+
# We can re-use the existing epochs object by updating the raw data
|
|
111
|
+
self.raw_array._data = data
|
|
112
|
+
self.epochs._raw = self.raw_array
|
|
113
|
+
|
|
114
|
+
# n_jobs is here kept to 1, since setup of the multiprocessing Pool
|
|
115
|
+
# takes longer than most batch computing sizes
|
|
116
|
+
spec_out = spectral_connectivity_epochs(
|
|
117
|
+
data=self.epochs,
|
|
118
|
+
sfreq=self.sfreq,
|
|
119
|
+
method=self.method,
|
|
120
|
+
mode=self.mode,
|
|
121
|
+
indices=(np.array([0, 0, 1, 1]), np.array([2, 3, 2, 3])),
|
|
122
|
+
verbose=False,
|
|
123
|
+
)
|
|
124
|
+
dat_conn: np.ndarray = spec_out.get_data()
|
|
125
|
+
|
|
126
|
+
# Get frequency band ranges only for the first batch, it's already the same
|
|
127
|
+
if len(self.fband_ranges) == 0:
|
|
128
|
+
for fband_range in self.fbands.values():
|
|
129
|
+
self.fband_ranges.append(
|
|
130
|
+
np.where(
|
|
131
|
+
(np.array(spec_out.freqs) > fband_range[0])
|
|
132
|
+
& (np.array(spec_out.freqs) < fband_range[1])
|
|
133
|
+
)[0]
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
# TODO: If I compute the mean for the entire fband, results are almost the same before
|
|
137
|
+
# normalization (0.9999999... vs 1.0), but some change wildly after normalization (-3 vs 0)
|
|
138
|
+
# Investigate why, is this a bug in normalization?
|
|
139
|
+
feature_results = {}
|
|
140
|
+
for conn in np.arange(dat_conn.shape[0]):
|
|
141
|
+
for fband_idx, fband in enumerate(self.fbands):
|
|
142
|
+
feature_results["_".join(["ch1", self.method, str(conn), fband])] = (
|
|
143
|
+
np.mean(dat_conn[conn, self.fband_ranges[fband_idx]])
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
# Store current experiment parameters to check if re-initialization is needed
|
|
147
|
+
self.prev_batch_shape = data.shape
|
|
148
|
+
|
|
149
|
+
return feature_results
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
import mne
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from py_neuromodulation import logger, nm_types, nm_IO
|
|
6
|
+
|
|
7
|
+
class LSLOfflinePlayer:
|
|
8
|
+
|
|
9
|
+
def __init__(
|
|
10
|
+
self,
|
|
11
|
+
stream_name: str | None = "lsl_offline_player",
|
|
12
|
+
f_name: str | nm_types.PathLike = None,
|
|
13
|
+
raw: mne.io.Raw | None = None,
|
|
14
|
+
sfreq: int | float | None = None,
|
|
15
|
+
data: np.ndarray | None = None,
|
|
16
|
+
ch_type: str | None = "dbs",
|
|
17
|
+
) -> None:
|
|
18
|
+
"""Initialization of MNE-LSL offline player.
|
|
19
|
+
Either a filename (PathLike) is provided,
|
|
20
|
+
or data and sampling frequency to initialize an example mock-up stream.
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
Parameters
|
|
24
|
+
----------
|
|
25
|
+
stream_name : str, optional
|
|
26
|
+
LSL stream name, by default "example_stream"
|
|
27
|
+
f_name : str | None, optional
|
|
28
|
+
file name used for streaming, by default None
|
|
29
|
+
sfreq : int | float | None, optional
|
|
30
|
+
sampling rate, by default None
|
|
31
|
+
data : np.ndarray | None, optional
|
|
32
|
+
data used for streaming, by default None
|
|
33
|
+
ch_type: str | None, optional
|
|
34
|
+
channel type to select for streaming, by default "dbs"
|
|
35
|
+
|
|
36
|
+
Raises
|
|
37
|
+
------
|
|
38
|
+
ValueError
|
|
39
|
+
_description_
|
|
40
|
+
"""
|
|
41
|
+
self.sfreq = sfreq
|
|
42
|
+
self.stream_name = stream_name
|
|
43
|
+
got_raw = raw is not None
|
|
44
|
+
got_fname = f_name is not None
|
|
45
|
+
got_sfreq_data = sfreq is not None and data is not None
|
|
46
|
+
|
|
47
|
+
if not (got_fname or got_sfreq_data or got_raw):
|
|
48
|
+
error_msg = "Either f_name or raw or sfreq and data must be provided."
|
|
49
|
+
logger.critical(error_msg)
|
|
50
|
+
raise ValueError(error_msg)
|
|
51
|
+
|
|
52
|
+
if got_fname:
|
|
53
|
+
(self._path_raw, data, sfreq, line_noise, coord_list, coord_names) = nm_IO.read_BIDS_data(f_name)
|
|
54
|
+
|
|
55
|
+
elif got_raw:
|
|
56
|
+
self._path_raw = raw
|
|
57
|
+
|
|
58
|
+
elif got_sfreq_data:
|
|
59
|
+
info = mne.create_info(
|
|
60
|
+
ch_names=[f"ch{i}" for i in range(data.shape[0])],
|
|
61
|
+
ch_types=[ch_type for _ in range(data.shape[0])],
|
|
62
|
+
sfreq=sfreq,
|
|
63
|
+
)
|
|
64
|
+
raw = mne.io.RawArray(data, info)
|
|
65
|
+
self._path_raw = Path.cwd() / "temp_raw.fif"
|
|
66
|
+
raw.save(self._path_raw, overwrite=True)
|
|
67
|
+
|
|
68
|
+
def start_player(self, chunk_size: int = 10, n_repeat: int = 1):
|
|
69
|
+
"""Start MNE-LSL Player
|
|
70
|
+
|
|
71
|
+
Parameters
|
|
72
|
+
----------
|
|
73
|
+
chunk_size : int, optional
|
|
74
|
+
_description_, by default 1
|
|
75
|
+
n_repeat : int, optional
|
|
76
|
+
_description_, by default 1
|
|
77
|
+
"""
|
|
78
|
+
from mne_lsl.player import PlayerLSL
|
|
79
|
+
|
|
80
|
+
self.player = PlayerLSL(
|
|
81
|
+
self._path_raw,
|
|
82
|
+
name=self.stream_name,
|
|
83
|
+
chunk_size=chunk_size,
|
|
84
|
+
n_repeat=n_repeat,
|
|
85
|
+
)
|
|
86
|
+
self.player = self.player.start()
|
|
87
|
+
|
|
88
|
+
def stop_player(self):
|
|
89
|
+
"""Stop MNE-LSL Player"""
|
|
90
|
+
self.player.stop()
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
from collections.abc import Iterator
|
|
2
|
+
import time
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
import numpy as np
|
|
5
|
+
from py_neuromodulation import logger
|
|
6
|
+
from mne_lsl.lsl import resolve_streams
|
|
7
|
+
import os
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from py_neuromodulation import NMSettings
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class LSLStream:
|
|
14
|
+
"""
|
|
15
|
+
Class is used to create and connect to a LSL stream and pull data from it.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def __init__(self, settings: "NMSettings", stream_name: str | None = None) -> None:
|
|
19
|
+
"""
|
|
20
|
+
Initialize the LSL stream.
|
|
21
|
+
|
|
22
|
+
Parameters:
|
|
23
|
+
-----------
|
|
24
|
+
settings : nm_settings.NMSettings object
|
|
25
|
+
stream_name : str, optional
|
|
26
|
+
Name of the stream to connect to. If not provided, the first available stream is used.
|
|
27
|
+
|
|
28
|
+
Raises:
|
|
29
|
+
-------
|
|
30
|
+
RuntimeError
|
|
31
|
+
If no stream is running under the provided name or if there are multiple streams running
|
|
32
|
+
under the same name.
|
|
33
|
+
"""
|
|
34
|
+
from mne_lsl.stream import StreamLSL
|
|
35
|
+
self.stream: StreamLSL
|
|
36
|
+
|
|
37
|
+
self.settings = settings
|
|
38
|
+
self._n_seconds_wait_before_disconnect = 3
|
|
39
|
+
try:
|
|
40
|
+
if stream_name is None:
|
|
41
|
+
stream_name = resolve_streams()[0].name
|
|
42
|
+
logger.info(
|
|
43
|
+
f"Stream name not provided. Using first available stream: {stream_name}"
|
|
44
|
+
)
|
|
45
|
+
self.stream = StreamLSL(name=stream_name, bufsize=2).connect(timeout=2)
|
|
46
|
+
except Exception as e:
|
|
47
|
+
msg = f"Could not connect to stream: {e}. Either no stream is running under the name {stream_name} or there is several streams under this name."
|
|
48
|
+
logger.exception(msg)
|
|
49
|
+
raise RuntimeError(msg)
|
|
50
|
+
|
|
51
|
+
if self.stream.sinfo is None:
|
|
52
|
+
raise RuntimeError("Stream info is None. Check if the stream is running.")
|
|
53
|
+
|
|
54
|
+
self.winsize = settings.segment_length_features_ms / self.stream.sinfo.sfreq
|
|
55
|
+
self.sampling_interval = 1 / self.settings.sampling_rate_features_hz
|
|
56
|
+
|
|
57
|
+
# If not running the generator when the escape key is pressed.
|
|
58
|
+
self.headless: bool = not os.environ.get("DISPLAY")
|
|
59
|
+
if not self.headless:
|
|
60
|
+
from pynput import keyboard
|
|
61
|
+
|
|
62
|
+
self.listener = keyboard.Listener(
|
|
63
|
+
on_press=lambda key: key != keyboard.Key.esc # type: ignore
|
|
64
|
+
)
|
|
65
|
+
self.listener.start()
|
|
66
|
+
|
|
67
|
+
def get_next_batch(self) -> Iterator[tuple[np.ndarray, np.ndarray]]:
|
|
68
|
+
self.last_time = time.time()
|
|
69
|
+
check_data = None
|
|
70
|
+
data = None
|
|
71
|
+
stream_start_time = None
|
|
72
|
+
|
|
73
|
+
while self.stream.connected:
|
|
74
|
+
time_diff = time.time() - self.last_time # in s
|
|
75
|
+
time.sleep(0.005)
|
|
76
|
+
if time_diff >= self.sampling_interval:
|
|
77
|
+
self.last_time = time.time()
|
|
78
|
+
|
|
79
|
+
logger.debug(f"Pull data - current time: {self.last_time}")
|
|
80
|
+
logger.debug(f"time since last data pull {time_diff} seconds")
|
|
81
|
+
|
|
82
|
+
if time_diff >= 2 * self.sampling_interval:
|
|
83
|
+
logger.warning(
|
|
84
|
+
"Feature computation time between two consecutive samples"
|
|
85
|
+
"was twice the feature sampling interval"
|
|
86
|
+
)
|
|
87
|
+
if data is not None:
|
|
88
|
+
check_data = data
|
|
89
|
+
|
|
90
|
+
data, timestamp = self.stream.get_data(winsize=self.winsize)
|
|
91
|
+
if stream_start_time is None:
|
|
92
|
+
stream_start_time = timestamp[0]
|
|
93
|
+
|
|
94
|
+
for i in range(self._n_seconds_wait_before_disconnect):
|
|
95
|
+
if (
|
|
96
|
+
data is not None
|
|
97
|
+
and check_data is not None
|
|
98
|
+
and np.allclose(data, check_data, atol=1e-7, rtol=1e-7)
|
|
99
|
+
):
|
|
100
|
+
logger.warning(
|
|
101
|
+
f"No new data incoming. Disconnecting stream in {3-i} seconds."
|
|
102
|
+
)
|
|
103
|
+
time.sleep(1)
|
|
104
|
+
i += 1
|
|
105
|
+
if i == self._n_seconds_wait_before_disconnect:
|
|
106
|
+
self.stream.disconnect()
|
|
107
|
+
logger.warning("Stream disconnected.")
|
|
108
|
+
break
|
|
109
|
+
|
|
110
|
+
yield timestamp, data
|
|
111
|
+
|
|
112
|
+
logger.info(f"Stream time: {timestamp[-1] - stream_start_time}")
|
|
113
|
+
|
|
114
|
+
if not self.headless and not self.listener.running:
|
|
115
|
+
logger.info("Keyboard interrupt")
|
|
116
|
+
self.stream.disconnect()
|