eegdash 0.3.3.dev61__py3-none-any.whl → 0.5.0.dev180784713__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. eegdash/__init__.py +19 -6
  2. eegdash/api.py +336 -539
  3. eegdash/bids_eeg_metadata.py +495 -0
  4. eegdash/const.py +349 -0
  5. eegdash/dataset/__init__.py +28 -0
  6. eegdash/dataset/base.py +311 -0
  7. eegdash/dataset/bids_dataset.py +641 -0
  8. eegdash/dataset/dataset.py +692 -0
  9. eegdash/dataset/dataset_summary.csv +255 -0
  10. eegdash/dataset/registry.py +287 -0
  11. eegdash/downloader.py +197 -0
  12. eegdash/features/__init__.py +15 -13
  13. eegdash/features/datasets.py +329 -138
  14. eegdash/features/decorators.py +105 -13
  15. eegdash/features/extractors.py +233 -63
  16. eegdash/features/feature_bank/__init__.py +12 -12
  17. eegdash/features/feature_bank/complexity.py +22 -20
  18. eegdash/features/feature_bank/connectivity.py +27 -28
  19. eegdash/features/feature_bank/csp.py +3 -1
  20. eegdash/features/feature_bank/dimensionality.py +6 -6
  21. eegdash/features/feature_bank/signal.py +29 -30
  22. eegdash/features/feature_bank/spectral.py +40 -44
  23. eegdash/features/feature_bank/utils.py +8 -0
  24. eegdash/features/inspect.py +126 -15
  25. eegdash/features/serialization.py +58 -17
  26. eegdash/features/utils.py +90 -16
  27. eegdash/hbn/__init__.py +28 -0
  28. eegdash/hbn/preprocessing.py +105 -0
  29. eegdash/hbn/windows.py +428 -0
  30. eegdash/logging.py +54 -0
  31. eegdash/mongodb.py +55 -24
  32. eegdash/paths.py +52 -0
  33. eegdash/utils.py +29 -1
  34. eegdash-0.5.0.dev180784713.dist-info/METADATA +121 -0
  35. eegdash-0.5.0.dev180784713.dist-info/RECORD +38 -0
  36. eegdash-0.5.0.dev180784713.dist-info/licenses/LICENSE +29 -0
  37. eegdash/data_config.py +0 -34
  38. eegdash/data_utils.py +0 -687
  39. eegdash/dataset.py +0 -69
  40. eegdash/preprocessing.py +0 -63
  41. eegdash-0.3.3.dev61.dist-info/METADATA +0 -192
  42. eegdash-0.3.3.dev61.dist-info/RECORD +0 -28
  43. eegdash-0.3.3.dev61.dist-info/licenses/LICENSE +0 -23
  44. {eegdash-0.3.3.dev61.dist-info → eegdash-0.5.0.dev180784713.dist-info}/WHEEL +0 -0
  45. {eegdash-0.3.3.dev61.dist-info → eegdash-0.5.0.dev180784713.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,50 @@
1
+ from __future__ import annotations
2
+
1
3
  import inspect
2
4
  from collections.abc import Callable
3
5
 
4
6
  from . import extractors, feature_bank
5
- from .extractors import FeatureExtractor, MultivariateFeature, _get_underlying_func
7
+ from .extractors import _get_underlying_func
8
+
9
+ __all__ = [
10
+ "get_all_feature_extractors",
11
+ "get_all_feature_preprocessors",
12
+ "get_all_feature_kinds",
13
+ "get_all_features",
14
+ "get_feature_kind",
15
+ "get_feature_predecessors",
16
+ ]
17
+
18
+
19
+ def get_feature_predecessors(feature_or_extractor: Callable | None) -> list:
20
+ """Get the dependency hierarchy for a feature or feature extractor.
6
21
 
22
+ This function recursively traverses the `parent_extractor_type` attribute
23
+ of a feature or extractor to build a list representing its dependency
24
+ lineage.
7
25
 
8
- def get_feature_predecessors(feature_or_extractor: Callable):
26
+ Parameters
27
+ ----------
28
+ feature_or_extractor : callable
29
+ The feature function or :class:`~eegdash.features.extractors.FeatureExtractor`
30
+ class to inspect.
31
+
32
+ Returns
33
+ -------
34
+ list
35
+ A nested list representing the dependency tree. For a simple linear
36
+ chain, this will be a flat list from the specific feature up to the
37
+ base :class:`~eegdash.features.extractors.FeatureExtractor`. For
38
+ multiple dependencies, it will contain tuples of sub-dependencies.
39
+
40
+ """
41
+ current = feature_or_extractor
42
+ if current is None:
43
+ return [None]
44
+ if isinstance(current, extractors.FeatureExtractor):
45
+ current = current.preprocessor
9
46
  current = _get_underlying_func(feature_or_extractor)
10
- if current is FeatureExtractor:
11
- return [current]
12
- predecessor = getattr(current, "parent_extractor_type", [FeatureExtractor])
47
+ predecessor = getattr(current, "parent_extractor_type", [None])
13
48
  if len(predecessor) == 1:
14
49
  return [current, *get_feature_predecessors(predecessor[0])]
15
50
  else:
@@ -20,29 +55,105 @@ def get_feature_predecessors(feature_or_extractor: Callable):
20
55
  return [current, tuple(predecessors)]
21
56
 
22
57
 
23
- def get_feature_kind(feature: Callable):
58
+ def get_feature_kind(feature: Callable) -> extractors.MultivariateFeature:
59
+ """Get the 'kind' of a feature function.
60
+
61
+ The feature kind (e.g., univariate, bivariate) is typically attached by a
62
+ decorator.
63
+
64
+ Parameters
65
+ ----------
66
+ feature : callable
67
+ The feature function to inspect.
68
+
69
+ Returns
70
+ -------
71
+ :class:`~eegdash.features.extractors.MultivariateFeature`
72
+ An instance of the feature kind (e.g., ``UnivariateFeature()``).
73
+
74
+ """
24
75
  return _get_underlying_func(feature).feature_kind
25
76
 
26
77
 
27
- def get_all_features():
78
+ def get_all_features() -> list[tuple[str, Callable]]:
79
+ """Get a list of all available feature functions.
80
+
81
+ Scans the `eegdash.features.feature_bank` module for functions that have
82
+ been decorated to have a `feature_kind` attribute.
83
+
84
+ Returns
85
+ -------
86
+ list[tuple[str, callable]]
87
+ A list of (name, function) tuples for all discovered features.
88
+
89
+ """
90
+
28
91
  def isfeature(x):
29
92
  return hasattr(_get_underlying_func(x), "feature_kind")
30
93
 
31
94
  return inspect.getmembers(feature_bank, isfeature)
32
95
 
33
96
 
34
- def get_all_feature_extractors():
97
+ def get_all_feature_extractors() -> list[tuple[str, Callable]]:
98
+ """Get a list of all available feature extractor callables.
99
+
100
+ A feature extractor is any callable in the feature bank that participates
101
+ in the feature graph, meaning it declares a ``parent_extractor_type``
102
+ via :class:`~eegdash.features.decorators.FeaturePredecessor`. This
103
+ includes both preprocessors and the final feature functions.
104
+
105
+ Returns
106
+ -------
107
+ list[tuple[str, callable]]
108
+ A list of (name, callable) tuples for all discovered feature
109
+ extractors.
110
+
111
+ """
112
+
35
113
  def isfeatureextractor(x):
36
- return inspect.isclass(x) and issubclass(x, FeatureExtractor)
114
+ y = _get_underlying_func(x)
115
+ return callable(y) and hasattr(y, "parent_extractor_type")
116
+
117
+ return inspect.getmembers(feature_bank, isfeatureextractor)
118
+
119
+
120
+ def get_all_feature_preprocessors() -> list[tuple[str, Callable]]:
121
+ """Get a list of all available preprocessor functions.
122
+
123
+ Scans the `eegdash.features.feature_bank` module for all preprocessor functions.
124
+
125
+ Returns
126
+ -------
127
+ list[tuple[str, Callable]]
128
+ A list of (name, function) tuples for all discovered feature preprocessors.
129
+
130
+ """
131
+
132
+ def isfeatureextractor(x):
133
+ y = _get_underlying_func(x)
134
+ return (
135
+ callable(y)
136
+ and not hasattr(y, "feature_kind")
137
+ and hasattr(y, "parent_extractor_type")
138
+ )
139
+
140
+ return inspect.getmembers(feature_bank, isfeatureextractor)
141
+
142
+
143
+ def get_all_feature_kinds() -> list[tuple[str, type[extractors.MultivariateFeature]]]:
144
+ """Get a list of all available feature 'kind' classes.
145
+
146
+ Scans the `eegdash.features.extractors` module for all classes that
147
+ subclass :class:`~eegdash.features.extractors.MultivariateFeature`.
37
148
 
38
- return [
39
- ("FeatureExtractor", FeatureExtractor),
40
- *inspect.getmembers(feature_bank, isfeatureextractor),
41
- ]
149
+ Returns
150
+ -------
151
+ list[tuple[str, type[eegdash.features.extractors.MultivariateFeature]]]
152
+ A list of (name, class) tuples for all discovered feature kinds.
42
153
 
154
+ """
43
155
 
44
- def get_all_feature_kinds():
45
156
  def isfeaturekind(x):
46
- return inspect.isclass(x) and issubclass(x, MultivariateFeature)
157
+ return inspect.isclass(x) and issubclass(x, extractors.MultivariateFeature)
47
158
 
48
159
  return inspect.getmembers(extractors, isfeaturekind)
@@ -1,8 +1,13 @@
1
- """Convenience functions for storing and loading of features datasets.
1
+ """Convenience functions for storing and loading features datasets.
2
+
3
+ See Also
4
+ --------
5
+ https://github.com/braindecode/braindecode/blob/master/braindecode/datautil/serialization.py#L165-L229
2
6
 
3
- see also: https://github.com/braindecode/braindecode//blob/master/braindecode/datautil/serialization.py#L165-L229
4
7
  """
5
8
 
9
+ from __future__ import annotations
10
+
6
11
  from pathlib import Path
7
12
 
8
13
  import pandas as pd
@@ -13,33 +18,48 @@ from braindecode.datautil.serialization import _load_kwargs_json
13
18
 
14
19
  from .datasets import FeaturesConcatDataset, FeaturesDataset
15
20
 
21
+ __all__ = [
22
+ "load_features_concat_dataset",
23
+ ]
24
+
25
+
26
+ def load_features_concat_dataset(
27
+ path: str | Path, ids_to_load: list[int] | None = None, n_jobs: int = 1
28
+ ) -> FeaturesConcatDataset:
29
+ """Load a stored :class:`~eegdash.features.datasets.FeaturesConcatDataset` from a directory.
16
30
 
17
- def load_features_concat_dataset(path, ids_to_load=None, n_jobs=1):
18
- """Load a stored FeaturesConcatDataset of FeaturesDatasets from files.
31
+ This function reconstructs a
32
+ :class:`~eegdash.features.datasets.FeaturesConcatDataset` by loading
33
+ individual :class:`~eegdash.features.datasets.FeaturesDataset` instances
34
+ from subdirectories within the given path. It uses joblib for parallel
35
+ loading.
19
36
 
20
37
  Parameters
21
38
  ----------
22
- path: str | pathlib.Path
23
- Path to the directory of the .fif / -epo.fif and .json files.
24
- ids_to_load: list of int | None
25
- Ids of specific files to load.
26
- n_jobs: int
27
- Number of jobs to be used to read files in parallel.
39
+ path : str or pathlib.Path
40
+ The path to the directory where the dataset was saved. This directory
41
+ should contain subdirectories (e.g., "0", "1", "2", ...) for each
42
+ individual dataset.
43
+ ids_to_load : list of int, optional
44
+ A list of specific dataset IDs (subdirectory names) to load. If None,
45
+ all subdirectories in the path will be loaded.
46
+ n_jobs : int, default 1
47
+ The number of jobs to use for parallel loading. -1 means using all
48
+ processors.
28
49
 
29
50
  Returns
30
51
  -------
31
- concat_dataset: FeaturesConcatDataset of FeaturesDatasets
52
+ eegdash.features.datasets.FeaturesConcatDataset
53
+ A concatenated dataset containing the loaded
54
+ :class:`~eegdash.features.datasets.FeaturesDataset` instances.
32
55
 
33
56
  """
34
57
  # Make sure we always work with a pathlib.Path
35
58
  path = Path(path)
36
59
 
37
- # else we have a dataset saved in the new way with subdirectories in path
38
- # for every dataset with description.json and -feat.parquet,
39
- # target_name.json, raw_preproc_kwargs.json, window_kwargs.json,
40
- # window_preproc_kwargs.json, features_kwargs.json
41
60
  if ids_to_load is None:
42
- ids_to_load = [p.name for p in path.iterdir()]
61
+ # Get all subdirectories and sort them numerically
62
+ ids_to_load = [p.name for p in path.iterdir() if p.is_dir()]
43
63
  ids_to_load = sorted(ids_to_load, key=lambda i: int(i))
44
64
  ids_to_load = [str(i) for i in ids_to_load]
45
65
 
@@ -47,7 +67,28 @@ def load_features_concat_dataset(path, ids_to_load=None, n_jobs=1):
47
67
  return FeaturesConcatDataset(datasets)
48
68
 
49
69
 
50
- def _load_parallel(path, i):
70
+ def _load_parallel(path: Path, i: str) -> FeaturesDataset:
71
+ """Load a single :class:`~eegdash.features.datasets.FeaturesDataset` from its subdirectory.
72
+
73
+ This is a helper function for
74
+ :func:`~eegdash.features.serialization.load_features_concat_dataset` that
75
+ handles the loading of one dataset's files (features, metadata, descriptions, etc.).
76
+
77
+ Parameters
78
+ ----------
79
+ path : pathlib.Path
80
+ The root directory of the saved
81
+ :class:`~eegdash.features.datasets.FeaturesConcatDataset`.
82
+ i : str
83
+ The identifier of the dataset to load, corresponding to its
84
+ subdirectory name.
85
+
86
+ Returns
87
+ -------
88
+ eegdash.features.datasets.FeaturesDataset
89
+ The loaded dataset instance.
90
+
91
+ """
51
92
  sub_dir = path / i
52
93
 
53
94
  parquet_name_pattern = "{}-feat.parquet"
eegdash/features/utils.py CHANGED
@@ -14,15 +14,41 @@ from braindecode.datasets.base import (
14
14
  WindowsDataset,
15
15
  )
16
16
 
17
+ from . import extractors
17
18
  from .datasets import FeaturesConcatDataset, FeaturesDataset
18
- from .extractors import FeatureExtractor
19
+
20
+ __all__ = [
21
+ "extract_features",
22
+ "fit_feature_extractors",
23
+ ]
19
24
 
20
25
 
21
26
  def _extract_features_from_windowsdataset(
22
27
  win_ds: EEGWindowsDataset | WindowsDataset,
23
- feature_extractor: FeatureExtractor,
28
+ feature_extractor: extractors.FeatureExtractor,
24
29
  batch_size: int = 512,
25
- ):
30
+ ) -> FeaturesDataset:
31
+ """Extract features from a single `WindowsDataset`.
32
+
33
+ This is a helper function that iterates through a `WindowsDataset` in
34
+ batches, applies a `FeatureExtractor`, and returns the results as a
35
+ `FeaturesDataset`.
36
+
37
+ Parameters
38
+ ----------
39
+ win_ds : EEGWindowsDataset or WindowsDataset
40
+ The windowed dataset to extract features from.
41
+ feature_extractor : ~eegdash.features.extractors.FeatureExtractor
42
+ The feature extractor instance to apply.
43
+ batch_size : int, default 512
44
+ The number of windows to process in each batch.
45
+
46
+ Returns
47
+ -------
48
+ ~eegdash.features.datasets.FeaturesDataset
49
+ A new dataset containing the extracted features and associated metadata.
50
+
51
+ """
26
52
  metadata = win_ds.metadata
27
53
  if not win_ds.targets_from == "metadata":
28
54
  metadata = copy.deepcopy(metadata)
@@ -51,33 +77,59 @@ def _extract_features_from_windowsdataset(
51
77
  features_dict[k].extend(v)
52
78
  features_df = pd.DataFrame(features_dict)
53
79
  if not win_ds.targets_from == "metadata":
54
- metadata.set_index("orig_index", drop=False, inplace=True)
55
80
  metadata.reset_index(drop=True, inplace=True)
56
- metadata.drop("orig_index", axis=1, inplace=True)
81
+ metadata.drop("orig_index", axis=1, inplace=True, errors="ignore")
57
82
 
58
- # FUTURE: truly support WindowsDataset objects
59
83
  return FeaturesDataset(
60
84
  features_df,
61
85
  metadata=metadata,
62
86
  description=win_ds.description,
63
87
  raw_info=win_ds.raw.info,
64
- raw_preproc_kwargs=win_ds.raw_preproc_kwargs,
65
- window_kwargs=win_ds.window_kwargs,
88
+ raw_preproc_kwargs=getattr(win_ds, "raw_preproc_kwargs", None),
89
+ window_kwargs=getattr(win_ds, "window_kwargs", None),
66
90
  features_kwargs=feature_extractor.features_kwargs,
67
91
  )
68
92
 
69
93
 
70
94
  def extract_features(
71
95
  concat_dataset: BaseConcatDataset,
72
- features: FeatureExtractor | Dict[str, Callable] | List[Callable],
96
+ features: extractors.FeatureExtractor | Dict[str, Callable] | List[Callable],
73
97
  *,
74
98
  batch_size: int = 512,
75
99
  n_jobs: int = 1,
76
- ):
100
+ ) -> FeaturesConcatDataset:
101
+ """Extract features from a concatenated dataset of windows.
102
+
103
+ This function applies a feature extractor to each `WindowsDataset` within a
104
+ `BaseConcatDataset` in parallel and returns a `FeaturesConcatDataset`
105
+ with the results.
106
+
107
+ Parameters
108
+ ----------
109
+ concat_dataset : BaseConcatDataset
110
+ A concatenated dataset of `WindowsDataset` or `EEGWindowsDataset`
111
+ instances.
112
+ features : ~eegdash.features.extractors.FeatureExtractor or dict or list
113
+ The feature extractor(s) to apply. Can be a
114
+ :class:`~eegdash.features.extractors.FeatureExtractor`
115
+ instance, a dictionary of named feature functions, or a list of
116
+ feature functions.
117
+ batch_size : int, default 512
118
+ The size of batches to use for feature extraction.
119
+ n_jobs : int, default 1
120
+ The number of parallel jobs to use for extracting features from the
121
+ datasets.
122
+
123
+ Returns
124
+ -------
125
+ ~eegdash.features.datasets.FeaturesConcatDataset
126
+ A new concatenated dataset containing the extracted features.
127
+
128
+ """
77
129
  if isinstance(features, list):
78
130
  features = dict(enumerate(features))
79
- if not isinstance(features, FeatureExtractor):
80
- features = FeatureExtractor(features)
131
+ if not isinstance(features, extractors.FeatureExtractor):
132
+ features = extractors.FeatureExtractor(features)
81
133
  feature_ds_list = list(
82
134
  tqdm(
83
135
  Parallel(n_jobs=n_jobs, return_as="generator")(
@@ -95,13 +147,35 @@ def extract_features(
95
147
 
96
148
  def fit_feature_extractors(
97
149
  concat_dataset: BaseConcatDataset,
98
- features: FeatureExtractor | Dict[str, Callable] | List[Callable],
150
+ features: extractors.FeatureExtractor | Dict[str, Callable] | List[Callable],
99
151
  batch_size: int = 8192,
100
- ):
152
+ ) -> extractors.FeatureExtractor:
153
+ """Fit trainable feature extractors on a dataset.
154
+
155
+ If the provided feature extractor (or any of its sub-extractors) is
156
+ trainable (i.e., subclasses
157
+ :class:`~eegdash.features.extractors.TrainableFeature`), this function
158
+ iterates through the dataset to fit it.
159
+
160
+ Parameters
161
+ ----------
162
+ concat_dataset : BaseConcatDataset
163
+ The dataset to use for fitting the feature extractors.
164
+ features : ~eegdash.features.extractors.FeatureExtractor or dict or list
165
+ The feature extractor(s) to fit.
166
+ batch_size : int, default 8192
167
+ The batch size to use when iterating through the dataset for fitting.
168
+
169
+ Returns
170
+ -------
171
+ ~eegdash.features.extractors.FeatureExtractor
172
+ The fitted feature extractor.
173
+
174
+ """
101
175
  if isinstance(features, list):
102
176
  features = dict(enumerate(features))
103
- if not isinstance(features, FeatureExtractor):
104
- features = FeatureExtractor(features)
177
+ if not isinstance(features, extractors.FeatureExtractor):
178
+ features = extractors.FeatureExtractor(features)
105
179
  if not features._is_trainable:
106
180
  return features
107
181
  features.clear()
@@ -0,0 +1,28 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: BSD-3-Clause
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """Healthy Brain Network (HBN) specific utilities and preprocessing.
6
+
7
+ This module provides specialized functions for working with the Healthy Brain Network
8
+ dataset, including preprocessing pipelines, annotation handling, and windowing utilities
9
+ tailored for HBN EEG data analysis.
10
+ """
11
+
12
+ from .preprocessing import hbn_ec_ec_reannotation
13
+ from .windows import (
14
+ add_aux_anchors,
15
+ add_extras_columns,
16
+ annotate_trials_with_target,
17
+ build_trial_table,
18
+ keep_only_recordings_with,
19
+ )
20
+
21
+ __all__ = [
22
+ "hbn_ec_ec_reannotation",
23
+ "build_trial_table",
24
+ "annotate_trials_with_target",
25
+ "add_aux_anchors",
26
+ "add_extras_columns",
27
+ "keep_only_recordings_with",
28
+ ]
@@ -0,0 +1,105 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: BSD-3-Clause
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """Preprocessing utilities specific to the Healthy Brain Network dataset.
6
+
7
+ This module contains preprocessing classes and functions designed specifically for
8
+ HBN EEG data, including specialized annotation handling for eyes-open/eyes-closed
9
+ paradigms and other HBN-specific preprocessing steps.
10
+ """
11
+
12
+ import mne
13
+ import numpy as np
14
+
15
+ from braindecode.preprocessing import Preprocessor
16
+
17
+ from ..logging import logger
18
+
19
+
20
+ class hbn_ec_ec_reannotation(Preprocessor):
21
+ """Preprocessor to reannotate HBN data for eyes-open/eyes-closed events.
22
+
23
+ This preprocessor is specifically designed for Healthy Brain Network (HBN)
24
+ datasets. It identifies existing annotations for "instructed_toCloseEyes"
25
+ and "instructed_toOpenEyes" and creates new, regularly spaced annotations
26
+ for "eyes_closed" and "eyes_open" segments, respectively.
27
+
28
+ This is useful for creating windowed datasets based on these new, more
29
+ precise event markers.
30
+
31
+ Notes
32
+ -----
33
+ This class inherits from :class:`braindecode.preprocessing.Preprocessor`
34
+ and is intended to be used within a braindecode preprocessing pipeline.
35
+
36
+ """
37
+
38
+ def __init__(self):
39
+ super().__init__(fn=self.transform, apply_on_array=False)
40
+
41
+ def transform(self, raw: mne.io.Raw) -> mne.io.Raw:
42
+ """Create new annotations for eyes-open and eyes-closed periods.
43
+
44
+ This function finds the original "instructed_to..." annotations and
45
+ generates new annotations every 2 seconds within specific time ranges
46
+ relative to the original markers:
47
+ - "eyes_closed": 15s to 29s after "instructed_toCloseEyes"
48
+ - "eyes_open": 5s to 19s after "instructed_toOpenEyes"
49
+
50
+ The original annotations in the `mne.io.Raw` object are replaced by
51
+ this new set of annotations.
52
+
53
+ Parameters
54
+ ----------
55
+ raw : mne.io.Raw
56
+ The raw MNE object containing the HBN data and original annotations.
57
+
58
+ Returns
59
+ -------
60
+ mne.io.Raw
61
+ The raw MNE object with the modified annotations.
62
+
63
+ """
64
+ events, event_id = mne.events_from_annotations(raw)
65
+
66
+ logger.info("Original events found with ids: %s", event_id)
67
+
68
+ # Create new events array for 2-second segments
69
+ new_events = []
70
+ sfreq = raw.info["sfreq"]
71
+
72
+ close_event_id = event_id.get("instructed_toCloseEyes")
73
+ if close_event_id:
74
+ for event in events[events[:, 2] == close_event_id]:
75
+ # For each original event, create events every 2s from 15s to 29s after
76
+ start_times = event[0] + np.arange(15, 29, 2) * sfreq
77
+ new_events.extend([[int(t), 0, 1] for t in start_times])
78
+
79
+ open_event_id = event_id.get("instructed_toOpenEyes")
80
+ if open_event_id:
81
+ for event in events[events[:, 2] == open_event_id]:
82
+ # For each original event, create events every 2s from 5s to 19s after
83
+ start_times = event[0] + np.arange(5, 19, 2) * sfreq
84
+ new_events.extend([[int(t), 0, 2] for t in start_times])
85
+
86
+ if not new_events:
87
+ logger.warning(
88
+ "Could not find 'instructed_toCloseEyes' or 'instructed_toOpenEyes' "
89
+ "annotations. No new events created."
90
+ )
91
+ return raw
92
+
93
+ # replace events in raw
94
+ new_events = np.array(new_events)
95
+
96
+ annot_from_events = mne.annotations_from_events(
97
+ events=new_events,
98
+ event_desc={1: "eyes_closed", 2: "eyes_open"},
99
+ sfreq=raw.info["sfreq"],
100
+ orig_time=raw.info.get("meas_date"),
101
+ )
102
+
103
+ raw.set_annotations(annot_from_events)
104
+
105
+ return raw