eegdash 0.3.9.dev182388821__py3-none-any.whl → 0.4.0.dev144__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eegdash might be problematic. Click here for more details.

eegdash/downloader.py ADDED
@@ -0,0 +1,187 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """File downloading utilities for EEG data from cloud storage.
6
+
7
+ This module provides functions for downloading EEG data files and BIDS dependencies from
8
+ AWS S3 storage, with support for caching and progress tracking. It handles the communication
9
+ between the EEGDash metadata database and the actual EEG data stored in the cloud.
10
+ """
11
+
12
+ import re
13
+ import tempfile
14
+ from pathlib import Path
15
+ from typing import Any
16
+ from urllib.parse import urlsplit
17
+
18
+ import mne
19
+ import numpy as np
20
+ import s3fs
21
+ import xarray as xr
22
+ from fsspec.callbacks import TqdmCallback
23
+
24
+
25
+ def get_s3_filesystem():
26
+ """Returns an S3FileSystem object."""
27
+ return s3fs.S3FileSystem(anon=True, client_kwargs={"region_name": "us-east-2"})
28
+
29
+
30
+ def get_s3path(s3_bucket: str, filepath: str) -> str:
31
+ """Helper to form an AWS S3 URI for the given relative filepath."""
32
+ return f"{s3_bucket}/{filepath}"
33
+
34
+
35
+ def download_s3_file(s3_path: str, local_path: Path, s3_open_neuro: bool):
36
+ """Download function that gets the raw EEG data from S3."""
37
+ filesystem = get_s3_filesystem()
38
+ if not s3_open_neuro:
39
+ s3_path = re.sub(r"(^|/)ds\d{6}/", r"\1", s3_path, count=1)
40
+ # TODO: remove this hack when competition is over
41
+ if s3_path.endswith(".set"):
42
+ s3_path = s3_path[:-4] + ".bdf"
43
+ local_path = local_path.with_suffix(".bdf")
44
+
45
+ local_path.parent.mkdir(parents=True, exist_ok=True)
46
+ _filesystem_get(filesystem=filesystem, s3path=s3_path, filepath=local_path)
47
+
48
+ return local_path
49
+
50
+
51
+ def download_dependencies(
52
+ s3_bucket: str,
53
+ bids_dependencies: list[str],
54
+ bids_dependencies_original: list[str],
55
+ cache_dir: Path,
56
+ dataset_folder: Path,
57
+ record: dict[str, Any],
58
+ s3_open_neuro: bool,
59
+ ):
60
+ """Download all BIDS dependency files from S3 and cache them locally."""
61
+ filesystem = get_s3_filesystem()
62
+ for i, dep in enumerate(bids_dependencies):
63
+ if not s3_open_neuro:
64
+ if dep.endswith(".set"):
65
+ dep = dep[:-4] + ".bdf"
66
+
67
+ s3path = get_s3path(s3_bucket, dep)
68
+ if not s3_open_neuro:
69
+ dep = bids_dependencies_original[i]
70
+
71
+ dep_path = Path(dep)
72
+ if dep_path.parts and dep_path.parts[0] == record.get("dataset"):
73
+ dep_local = Path(dataset_folder, *dep_path.parts[1:])
74
+ else:
75
+ dep_local = Path(dataset_folder) / dep_path
76
+ filepath = cache_dir / dep_local
77
+ if not s3_open_neuro:
78
+ if filepath.suffix == ".set":
79
+ filepath = filepath.with_suffix(".bdf")
80
+
81
+ if not filepath.exists():
82
+ filepath.parent.mkdir(parents=True, exist_ok=True)
83
+ _filesystem_get(filesystem=filesystem, s3path=s3path, filepath=filepath)
84
+
85
+
86
+ def _filesystem_get(filesystem: s3fs.S3FileSystem, s3path: str, filepath: Path):
87
+ """Helper to download a file from S3 with a progress bar."""
88
+ info = filesystem.info(s3path)
89
+ size = info.get("size") or info.get("Size")
90
+
91
+ callback = TqdmCallback(
92
+ size=size,
93
+ tqdm_kwargs=dict(
94
+ desc=f"Downloading {Path(s3path).name}",
95
+ unit="B",
96
+ unit_scale=True,
97
+ unit_divisor=1024,
98
+ dynamic_ncols=True,
99
+ leave=True,
100
+ mininterval=0.2,
101
+ smoothing=0.1,
102
+ miniters=1,
103
+ bar_format="{desc}: {percentage:3.0f}%|{bar}| {n_fmt}/{total_fmt} "
104
+ "[{elapsed}<{remaining}, {rate_fmt}]",
105
+ ),
106
+ )
107
+ filesystem.get(s3path, str(filepath), callback=callback)
108
+ return filepath
109
+
110
+
111
+ def load_eeg_from_s3(s3path: str):
112
+ """Load EEG data from an S3 URI into an ``xarray.DataArray``.
113
+
114
+ Preserves the original filename, downloads sidecar files when applicable
115
+ (e.g., ``.fdt`` for EEGLAB, ``.vmrk``/``.eeg`` for BrainVision), and uses
116
+ MNE's direct readers.
117
+
118
+ Parameters
119
+ ----------
120
+ s3path : str
121
+ An S3 URI (should start with "s3://").
122
+
123
+ Returns
124
+ -------
125
+ xr.DataArray
126
+ EEG data with dimensions ``("channel", "time")``.
127
+
128
+ Raises
129
+ ------
130
+ ValueError
131
+ If the file extension is unsupported.
132
+
133
+ """
134
+ filesystem = get_s3_filesystem()
135
+ # choose a temp dir so sidecars can be colocated
136
+ with tempfile.TemporaryDirectory() as tmpdir:
137
+ # Derive local filenames from the S3 key to keep base name consistent
138
+ s3_key = urlsplit(s3path).path # e.g., "/dsXXXX/sub-.../..._eeg.set"
139
+ basename = Path(s3_key).name
140
+ ext = Path(basename).suffix.lower()
141
+ local_main = Path(tmpdir) / basename
142
+
143
+ # Download main file
144
+ with (
145
+ filesystem.open(s3path, mode="rb") as fsrc,
146
+ open(local_main, "wb") as fdst,
147
+ ):
148
+ fdst.write(fsrc.read())
149
+
150
+ # Determine and fetch any required sidecars
151
+ sidecars: list[str] = []
152
+ if ext == ".set": # EEGLAB
153
+ sidecars = [".fdt"]
154
+ elif ext == ".vhdr": # BrainVision
155
+ sidecars = [".vmrk", ".eeg", ".dat", ".raw"]
156
+
157
+ for sc_ext in sidecars:
158
+ sc_key = s3_key[: -len(ext)] + sc_ext
159
+ sc_uri = f"s3://{urlsplit(s3path).netloc}{sc_key}"
160
+ try:
161
+ # If sidecar exists, download next to the main file
162
+ info = filesystem.info(sc_uri)
163
+ if info:
164
+ sc_local = Path(tmpdir) / Path(sc_key).name
165
+ with (
166
+ filesystem.open(sc_uri, mode="rb") as fsrc,
167
+ open(sc_local, "wb") as fdst,
168
+ ):
169
+ fdst.write(fsrc.read())
170
+ except Exception:
171
+ # Sidecar not present; skip silently
172
+ pass
173
+
174
+ # Read using appropriate MNE reader
175
+ raw = mne.io.read_raw(str(local_main), preload=True, verbose=False)
176
+
177
+ data = raw.get_data()
178
+ fs = raw.info["sfreq"]
179
+ max_time = data.shape[1] / fs
180
+ time_steps = np.linspace(0, max_time, data.shape[1]).squeeze()
181
+ channel_names = raw.ch_names
182
+
183
+ return xr.DataArray(
184
+ data=data,
185
+ dims=["channel", "time"],
186
+ coords={"time": time_steps, "channel": channel_names},
187
+ )
@@ -3,7 +3,6 @@ from __future__ import annotations
3
3
  import json
4
4
  import os
5
5
  import shutil
6
- import warnings
7
6
  from collections.abc import Callable
8
7
  from typing import Dict, List
9
8
 
@@ -17,6 +16,8 @@ from braindecode.datasets.base import (
17
16
  _create_description,
18
17
  )
19
18
 
19
+ from ..logging import logger
20
+
20
21
 
21
22
  class FeaturesDataset(EEGWindowsDataset):
22
23
  """Returns samples from a pandas DataFrame object along with a target.
@@ -283,7 +284,7 @@ class FeaturesConcatDataset(BaseConcatDataset):
283
284
  # the following will be True for all datasets preprocessed and
284
285
  # stored in parallel with braindecode.preprocessing.preprocess
285
286
  if i_ds + 1 + offset < n_sub_dirs:
286
- warnings.warn(
287
+ logger.warning(
287
288
  f"The number of saved datasets ({i_ds + 1 + offset}) "
288
289
  f"does not match the number of existing "
289
290
  f"subdirectories ({n_sub_dirs}). You may now "
@@ -294,7 +295,7 @@ class FeaturesConcatDataset(BaseConcatDataset):
294
295
  # if path contains files or directories that were not touched, raise
295
296
  # warning
296
297
  if path_contents:
297
- warnings.warn(
298
+ logger.warning(
298
299
  f"Chosen directory {path} contains other "
299
300
  f"subdirectories or files {path_contents}."
300
301
  )
@@ -1,6 +1,8 @@
1
- """Convenience functions for storing and loading of features datasets.
1
+ """Convenience functions for storing and loading features datasets.
2
+
3
+ See Also:
4
+ https://github.com/braindecode/braindecode//blob/master/braindecode/datautil/serialization.py#L165-L229
2
5
 
3
- see also: https://github.com/braindecode/braindecode//blob/master/braindecode/datautil/serialization.py#L165-L229
4
6
  """
5
7
 
6
8
  from pathlib import Path
@@ -15,7 +17,7 @@ from .datasets import FeaturesConcatDataset, FeaturesDataset
15
17
 
16
18
 
17
19
  def load_features_concat_dataset(path, ids_to_load=None, n_jobs=1):
18
- """Load a stored FeaturesConcatDataset of FeaturesDatasets from files.
20
+ """Load a stored features dataset from files.
19
21
 
20
22
  Parameters
21
23
  ----------
@@ -28,7 +30,9 @@ def load_features_concat_dataset(path, ids_to_load=None, n_jobs=1):
28
30
 
29
31
  Returns
30
32
  -------
31
- concat_dataset: FeaturesConcatDataset of FeaturesDatasets
33
+ concat_dataset: eegdash.features.datasets.FeaturesConcatDataset
34
+ A concatenation of multiple eegdash.features.datasets.FeaturesDataset
35
+ instances loaded from the given directory.
32
36
 
33
37
  """
34
38
  # Make sure we always work with a pathlib.Path
eegdash/hbn/__init__.py CHANGED
@@ -1,3 +1,14 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """Healthy Brain Network (HBN) specific utilities and preprocessing.
6
+
7
+ This module provides specialized functions for working with the Healthy Brain Network
8
+ dataset, including preprocessing pipelines, annotation handling, and windowing utilities
9
+ tailored for HBN EEG data analysis.
10
+ """
11
+
1
12
  from .preprocessing import hbn_ec_ec_reannotation
2
13
  from .windows import (
3
14
  add_aux_anchors,
@@ -1,11 +1,20 @@
1
- import logging
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """Preprocessing utilities specific to the Healthy Brain Network dataset.
6
+
7
+ This module contains preprocessing classes and functions designed specifically for
8
+ HBN EEG data, including specialized annotation handling for eyes-open/eyes-closed
9
+ paradigms and other HBN-specific preprocessing steps.
10
+ """
2
11
 
3
12
  import mne
4
13
  import numpy as np
5
14
 
6
15
  from braindecode.preprocessing import Preprocessor
7
16
 
8
- logger = logging.getLogger("eegdash")
17
+ from ..logging import logger
9
18
 
10
19
 
11
20
  class hbn_ec_ec_reannotation(Preprocessor):
eegdash/hbn/windows.py CHANGED
@@ -1,3 +1,15 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """Windowing and trial processing utilities for HBN datasets.
6
+
7
+ This module provides functions for building trial tables, adding auxiliary anchors,
8
+ annotating trials with targets, and filtering recordings based on various criteria.
9
+ These utilities are specifically designed for working with HBN EEG data structures
10
+ and experimental paradigms.
11
+ """
12
+
1
13
  import logging
2
14
 
3
15
  import mne
@@ -7,8 +19,6 @@ from mne_bids import get_bids_path_from_fname
7
19
 
8
20
  from braindecode.datasets.base import BaseConcatDataset
9
21
 
10
- logger = logging.getLogger("eegdash")
11
-
12
22
 
13
23
  def build_trial_table(events_df: pd.DataFrame) -> pd.DataFrame:
14
24
  """One row per contrast trial with stimulus/response metrics."""
eegdash/logging.py ADDED
@@ -0,0 +1,33 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """Logging configuration for EEGDash.
6
+
7
+ This module sets up centralized logging for the EEGDash package using Rich for enhanced
8
+ console output formatting. It provides a consistent logging interface across all modules.
9
+ """
10
+
11
+ import logging
12
+
13
+ from rich.logging import RichHandler
14
+
15
+ # Get the root logger
16
+ root_logger = logging.getLogger()
17
+
18
+ # --- This is the key part ---
19
+ # 1. Remove any handlers that may have been added by default
20
+ root_logger.handlers = []
21
+
22
+ # 2. Add your RichHandler
23
+ root_logger.addHandler(RichHandler(rich_tracebacks=True, markup=True))
24
+ # ---------------------------
25
+
26
+ # 3. Set the level for the root logger
27
+ root_logger.setLevel(logging.INFO)
28
+
29
+ # Now, get your package-specific logger. It will inherit the
30
+ # configuration from the root logger we just set up.
31
+ logger = logging.getLogger("eegdash")
32
+
33
+ logger.setLevel(logging.INFO)
eegdash/mongodb.py CHANGED
@@ -1,3 +1,14 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """MongoDB connection and operations management.
6
+
7
+ This module provides thread-safe MongoDB connection management and high-level database
8
+ operations for the EEGDash metadata database. It includes methods for finding, adding,
9
+ and updating EEG data records with proper connection pooling and error handling.
10
+ """
11
+
1
12
  import threading
2
13
 
3
14
  from pymongo import MongoClient
eegdash/paths.py CHANGED
@@ -1,3 +1,14 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """Path utilities and cache directory management.
6
+
7
+ This module provides functions for resolving consistent cache directories and path
8
+ management throughout the EEGDash package, with integration to MNE-Python's
9
+ configuration system.
10
+ """
11
+
1
12
  from __future__ import annotations
2
13
 
3
14
  import os
eegdash/utils.py CHANGED
@@ -1,3 +1,13 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """General utility functions for EEGDash.
6
+
7
+ This module contains miscellaneous utility functions used across the EEGDash package,
8
+ including MongoDB client initialization and configuration helpers.
9
+ """
10
+
1
11
  from mne.utils import get_config, set_config, use_log_level
2
12
 
3
13
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: eegdash
3
- Version: 0.3.9.dev182388821
3
+ Version: 0.4.0.dev144
4
4
  Summary: EEG data for machine learning
5
5
  Author-email: Young Truong <dt.young112@gmail.com>, Arnaud Delorme <adelorme@gmail.com>, Aviv Dotan <avivd220@gmail.com>, Oren Shriki <oren70@gmail.com>, Bruno Aristimunha <b.aristimunha@gmail.com>
6
6
  License-Expression: GPL-3.0-only
@@ -25,7 +25,7 @@ Requires-Python: >=3.10
25
25
  Description-Content-Type: text/markdown
26
26
  License-File: LICENSE
27
27
  Requires-Dist: braindecode>=1.0
28
- Requires-Dist: mne_bids>=0.16.0
28
+ Requires-Dist: mne_bids>=0.17.0
29
29
  Requires-Dist: numba
30
30
  Requires-Dist: numpy
31
31
  Requires-Dist: pandas
@@ -41,6 +41,7 @@ Requires-Dist: pymatreader
41
41
  Requires-Dist: eeglabio
42
42
  Requires-Dist: tabulate
43
43
  Requires-Dist: docstring_inheritance
44
+ Requires-Dist: rich
44
45
  Provides-Extra: tests
45
46
  Requires-Dist: pytest; extra == "tests"
46
47
  Requires-Dist: pytest-cov; extra == "tests"
@@ -64,6 +65,7 @@ Requires-Dist: ipython; extra == "docs"
64
65
  Requires-Dist: lightgbm; extra == "docs"
65
66
  Requires-Dist: plotly; extra == "docs"
66
67
  Requires-Dist: nbformat; extra == "docs"
68
+ Requires-Dist: graphviz; extra == "docs"
67
69
  Provides-Extra: all
68
70
  Requires-Dist: eegdash[docs]; extra == "all"
69
71
  Requires-Dist: eegdash[dev]; extra == "all"
@@ -86,22 +88,6 @@ To leverage recent and ongoing advancements in large-scale computational methods
86
88
 
87
89
  The data in EEG-DaSh originates from a collaboration involving 25 laboratories, encompassing 27,053 participants. This extensive collection includes MEEG data, which is a combination of EEG and MEG signals. The data is sourced from various studies conducted by these labs, involving both healthy subjects and clinical populations with conditions such as ADHD, depression, schizophrenia, dementia, autism, and psychosis. Additionally, data spans different mental states like sleep, meditation, and cognitive tasks. In addition, EEG-DaSh will incorporate a subset of the data converted from NEMAR, which includes 330 MEEG BIDS-formatted datasets, further expanding the archive with well-curated, standardized neuroelectromagnetic data.
88
90
 
89
- ## Featured data
90
-
91
- The following HBN datasets are currently featured on EEGDash. Documentation about these datasets is available [here](https://neuromechanist.github.io/data/hbn/).
92
-
93
- | DatasetID | Participants | Files | Sessions | Population | Channels | Is 10-20? | Modality | Size |
94
- |---|---|---|---|---|---|---|---|---|
95
- | [ds005505](https://nemar.org/dataexplorer/detail?dataset_id=ds005505) | 136 | 5393 | 1 | Healthy | 129 | other | Visual | 103 GB |
96
- | [ds005506](https://nemar.org/dataexplorer/detail?dataset_id=ds005506) | 150 | 5645 | 1 | Healthy | 129 | other | Visual | 112 GB |
97
- | [ds005507](https://nemar.org/dataexplorer/detail?dataset_id=ds005507) | 184 | 7273 | 1 | Healthy | 129 | other | Visual | 140 GB |
98
- | [ds005508](https://nemar.org/dataexplorer/detail?dataset_id=ds005508) | 324 | 13393 | 1 | Healthy | 129 | other | Visual | 230 GB |
99
- | [ds005510](https://nemar.org/dataexplorer/detail?dataset_id=ds005510) | 135 | 4933 | 1 | Healthy | 129 | other | Visual | 91 GB |
100
- | [ds005512](https://nemar.org/dataexplorer/detail?dataset_id=ds005512) | 257 | 9305 | 1 | Healthy | 129 | other | Visual | 157 GB |
101
- | [ds005514](https://nemar.org/dataexplorer/detail?dataset_id=ds005514) | 295 | 11565 | 1 | Healthy | 129 | other | Visual | 185 GB |
102
-
103
- A total of [246 other datasets](datasets.md) are also available through EEGDash.
104
-
105
91
  ## Data format
106
92
 
107
93
  EEGDash queries return a **Pytorch Dataset** formatted to facilitate machine learning (ML) and deep learning (DL) applications. PyTorch Datasets are the best format for EEGDash queries because they provide an efficient, scalable, and flexible structure for machine learning (ML) and deep learning (DL) applications. They allow seamless integration with PyTorch’s DataLoader, enabling efficient batching, shuffling, and parallel data loading, which is essential for training deep learning models on large EEG datasets.
@@ -113,47 +99,11 @@ EEGDash datasets are processed using the popular [braindecode](https://braindeco
113
99
  ## EEG-Dash usage
114
100
 
115
101
  ### Install
116
- Use your preferred Python environment manager with Python > 3.9 to install the package.
102
+ Use your preferred Python environment manager with Python > 3.10 to install the package.
117
103
  * To install the eegdash package, use the following command: `pip install eegdash`
118
104
  * To verify the installation, start a Python session and type: `from eegdash import EEGDash`
119
105
 
120
- ### Data access
121
-
122
- To use the data from a single subject, enter:
123
-
124
- ```python
125
- from eegdash import EEGDashDataset
126
-
127
- ds_NDARDB033FW5 = EEGDashDataset(
128
- {"dataset": "ds005514", "task":
129
- "RestingState", "subject": "NDARDB033FW5"},
130
- cache_dir="."
131
- )
132
- ```
133
-
134
- This will search and download the metadata for the task **RestingState** for subject **NDARDB033FW5** in BIDS dataset **ds005514**. The actual data will not be downloaded at this stage. Following standard practice, data is only downloaded once it is processed. The **ds_NDARDB033FW5** object is a fully functional braindecode dataset, which is itself a PyTorch dataset. This [tutorial](https://github.com/sccn/EEGDash/blob/develop/notebooks/tutorial_eoec.ipynb) shows how to preprocess the EEG data, extracting portions of the data containing eyes-open and eyes-closed segments, then perform eyes-open vs. eyes-closed classification using a (shallow) deep-learning model.
135
-
136
- To use the data from multiple subjects, enter:
137
-
138
- ```python
139
- from eegdash import EEGDashDataset
140
-
141
- ds_ds005505rest = EEGDashDataset(
142
- {"dataset": "ds005505", "task": "RestingState"}, target_name="sex", cache_dir=".
143
- )
144
- ```
145
-
146
- This will search and download the metadata for the task 'RestingState' for all subjects in BIDS dataset 'ds005505' (a total of 136). As above, the actual data will not be downloaded at this stage so this command is quick to execute. Also, the target class for each subject is assigned using the target_name parameter. This means that this object is ready to be directly fed to a deep learning model, although the [tutorial script](https://github.com/sccn/EEGDash/blob/develop/notebooks/tutorial_sex_classification.ipynb) performs minimal processing on it, prior to training a deep-learning model. Because 14 gigabytes of data are downloaded, this tutorial takes about 10 minutes to execute.
147
-
148
- ### Automatic caching
149
-
150
- By default, EEGDash caches downloaded data under a single, consistent folder:
151
-
152
- - If ``EEGDASH_CACHE_DIR`` is set in your environment, that path is used.
153
- - Else, if MNE’s ``MNE_DATA`` config is set, that path is used to align with other EEG tooling.
154
- - Otherwise, ``.eegdash_cache`` in the current working directory is used.
155
-
156
- This means that if you run the tutorial [scripts](https://github.com/sccn/EEGDash/tree/develop/notebooks), the data will only be downloaded the first time the script is executed and reused thereafter.
106
+ Please check our tutorial webpages to explore what you can do with [eegdash](https://eegdash.org/)!
157
107
 
158
108
  ## Education -- Coming soon...
159
109
 
@@ -0,0 +1,37 @@
1
+ eegdash/__init__.py,sha256=mb1qG2Bvohd8m8HMQlfoq8GO9ANnQZ3bnjL8QnJKlFU,704
2
+ eegdash/api.py,sha256=OLQbpOoIZpZVJSss3imLFmN6d1Fpwk6tj-yRMQlr00Q,39429
3
+ eegdash/bids_eeg_metadata.py,sha256=EFJ1grNcqS0eF0hg45F6St8gFc3Hlzsgccpr-9XTMZk,14153
4
+ eegdash/const.py,sha256=-cwrtncqJHZ19lGf2MK-IgKy7hTMfb6H-CeL50Oonyg,7883
5
+ eegdash/data_utils.py,sha256=s9FyPpsw32ndBsusL4TQX6rOsLEiN73RneEuXKffaYc,26477
6
+ eegdash/downloader.py,sha256=TsoFDmzSBLiTwW_scfju4MXEIq088upHvdEfGw_c8WM,6256
7
+ eegdash/logging.py,sha256=Tbz-zXaxvzZkYmrAQTEyFqevzWtM5QZPP6LL7XNy8d0,952
8
+ eegdash/mongodb.py,sha256=0QpkAdwQOisbCr0-rd0wPFQiG0IT9h2Ae-CXYdrt65o,2430
9
+ eegdash/paths.py,sha256=-bl81r7UyPr-Kq6V6j6h9Mq6dxg5T5EkBVJlOLmQecg,1217
10
+ eegdash/utils.py,sha256=05MwB7Y447qkWfxCqgGy2DZUHPV1c1xvr3EUyhD0OHI,723
11
+ eegdash/dataset/__init__.py,sha256=HKDfV2DHBv63BqYLBWDMvU8jbFNRC7DqQbxL7RG1DKQ,863
12
+ eegdash/dataset/dataset.py,sha256=e_rliu4E-uPtz_miUSzGukUahCHHhyXB2Gu3pm3cyHo,7062
13
+ eegdash/dataset/dataset_summary.csv,sha256=a5Y21LmBPKLVRt5uKNXO7lSRDjsDmJLzv6-3HryF5JU,23614
14
+ eegdash/dataset/registry.py,sha256=KmPDfazhdsIyUouo3qdqDaHiTKHCZcEvXQJeHphZijY,7057
15
+ eegdash/features/__init__.py,sha256=BXNhjvL4_SSFAY1lcP9nyGpkbJNtoOMH4AHlF6OyABo,4078
16
+ eegdash/features/datasets.py,sha256=eV4d86EU4fu1yoIMdPQnot6YZDRGG4qE9h77lk7iVhU,18317
17
+ eegdash/features/decorators.py,sha256=v0qaJz_dcX703p1fvFYbAIXmwK3d8naYGlq7fRVKn_w,1313
18
+ eegdash/features/extractors.py,sha256=H7h6tP3dKoRcjDJpWWAo0ppmokCq5QlhqMcehYwYV9s,6845
19
+ eegdash/features/inspect.py,sha256=PmbWhx5H_WqpnorUpWONUSkUtaIHkZblRa_Xyk7Szyc,1569
20
+ eegdash/features/serialization.py,sha256=LmDrQEb-NLNgak_LabdDnr_J_v0QyLPzm_E8IiIHgMQ,2960
21
+ eegdash/features/utils.py,sha256=eM6DdyOpdVfNh7dSPykJ0WaTDtaGvkCQWAmW0G8v60Y,3784
22
+ eegdash/features/feature_bank/__init__.py,sha256=YsMXLC1FEtHL3IEw9pYw1fc5IY0x_hr2qWQowI5gZj8,2991
23
+ eegdash/features/feature_bank/complexity.py,sha256=iy9uaLInsYdxKZlXHTWlgEpP9fVI-v9TqLGfnS15-Eg,3258
24
+ eegdash/features/feature_bank/connectivity.py,sha256=bQ6KlxWm5GNpCS9ypLqBUr2L171Yq7wpBQT2tRQKTZ4,2159
25
+ eegdash/features/feature_bank/csp.py,sha256=jKPrmqBj7FliybNbg035cVZddvVSkhk9OazcscDpipU,3303
26
+ eegdash/features/feature_bank/dimensionality.py,sha256=j_Ds71Y1AbV2uLFQj8EuXQ4kzofLBlQtPV5snMkF7i4,3965
27
+ eegdash/features/feature_bank/signal.py,sha256=3Tb8z9gX7iZipxQJ9DSyy30JfdmW58kgvimSyZX74p8,3404
28
+ eegdash/features/feature_bank/spectral.py,sha256=bNB7skusePs1gX7NOU6yRlw_Gr4UOCkO_ylkCgybzug,3319
29
+ eegdash/features/feature_bank/utils.py,sha256=DGh-Q7-XFIittP7iBBxvsJaZrlVvuY5mw-G7q6C-PCI,1237
30
+ eegdash/hbn/__init__.py,sha256=hsI5pmIuYDzr--aE5UiToO-P9XL5fVRKahZzdsAodro,794
31
+ eegdash/hbn/preprocessing.py,sha256=cfsLXnGuUaVJ3NhueDgmdc0w7jflmIi69occuB4bs7M,2609
32
+ eegdash/hbn/windows.py,sha256=23KyVl0pQn4o40wM3Rsu8nl5tN-REAusU7wcv9L4a5U,10351
33
+ eegdash-0.4.0.dev144.dist-info/licenses/LICENSE,sha256=asisR-xupy_NrQBFXnx6yqXeZcYWLvbAaiETl25iXT0,931
34
+ eegdash-0.4.0.dev144.dist-info/METADATA,sha256=CgGdbNlkxb0Ako1Q1cwdwEotFGf-CBz2yEZm6WBMmlw,6776
35
+ eegdash-0.4.0.dev144.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
36
+ eegdash-0.4.0.dev144.dist-info/top_level.txt,sha256=zavO69HQ6MyZM0aQMR2zUS6TAFc7bnN5GEpDpOpFZzU,8
37
+ eegdash-0.4.0.dev144.dist-info/RECORD,,
@@ -1,35 +0,0 @@
1
- eegdash/__init__.py,sha256=JV_edkvsUdIH15LC98ZUBjFdCPi7Y72URaxZwmAKUSk,290
2
- eegdash/api.py,sha256=xuNi5mP8zFmTETbRkYajSS7ba320HaDeX7XzcR_t3ZU,40216
3
- eegdash/bids_eeg_metadata.py,sha256=LZrGPGVdnGUbZlD4M_aAW4kEItzwTTeZFicH-jyqDyc,9712
4
- eegdash/const.py,sha256=qdFBEL9kIrsj9CdxbXhBkR61R3CrTGSaj5Iq0YOACIs,7313
5
- eegdash/data_utils.py,sha256=DZ-B03VleA9-mOUzGXcS4N18dVC2uFkFGXMFsKK8nUc,34166
6
- eegdash/mongodb.py,sha256=GD3WgA253oFgpzOHrYaj4P1mRjNtDMT5Oj4kVvHswjI,2006
7
- eegdash/paths.py,sha256=246xkectTxDAYcREs1Qma_F1Y-oSmLlb0hn0F2Za5Ss,866
8
- eegdash/utils.py,sha256=7TfQ9D0LrAJ7FgnSXEvWgeHWK2QqaqS-_WcWXD86ObQ,408
9
- eegdash/dataset/__init__.py,sha256=Qmzki5G8GaFlzTb10e4SmC3WkKuJyo1Ckii15tCEHAo,157
10
- eegdash/dataset/dataset.py,sha256=YuyzmqN5M0itimzUD1NF1hcDwkb6fg91dRZtK6HbYOc,6521
11
- eegdash/dataset/dataset_summary.csv,sha256=XF0vdHz77DFyVLTaET8lL5gQQ4r-q1xAfSDWH5GTPLA,23655
12
- eegdash/dataset/registry.py,sha256=genOqAuf9cQBnHhPqRwfLP7S1XsnkLot6sLyJozPtf4,4150
13
- eegdash/features/__init__.py,sha256=BXNhjvL4_SSFAY1lcP9nyGpkbJNtoOMH4AHlF6OyABo,4078
14
- eegdash/features/datasets.py,sha256=kU1DO70ArSIy-LF1hHD2NN4iT-kJrI0mVpSkyV_OSeI,18301
15
- eegdash/features/decorators.py,sha256=v0qaJz_dcX703p1fvFYbAIXmwK3d8naYGlq7fRVKn_w,1313
16
- eegdash/features/extractors.py,sha256=H7h6tP3dKoRcjDJpWWAo0ppmokCq5QlhqMcehYwYV9s,6845
17
- eegdash/features/inspect.py,sha256=PmbWhx5H_WqpnorUpWONUSkUtaIHkZblRa_Xyk7Szyc,1569
18
- eegdash/features/serialization.py,sha256=snXuHVd0CoT2ese0iWi5RwZrVHCGc0oCZ8-SXqGY88I,2848
19
- eegdash/features/utils.py,sha256=eM6DdyOpdVfNh7dSPykJ0WaTDtaGvkCQWAmW0G8v60Y,3784
20
- eegdash/features/feature_bank/__init__.py,sha256=YsMXLC1FEtHL3IEw9pYw1fc5IY0x_hr2qWQowI5gZj8,2991
21
- eegdash/features/feature_bank/complexity.py,sha256=iy9uaLInsYdxKZlXHTWlgEpP9fVI-v9TqLGfnS15-Eg,3258
22
- eegdash/features/feature_bank/connectivity.py,sha256=bQ6KlxWm5GNpCS9ypLqBUr2L171Yq7wpBQT2tRQKTZ4,2159
23
- eegdash/features/feature_bank/csp.py,sha256=jKPrmqBj7FliybNbg035cVZddvVSkhk9OazcscDpipU,3303
24
- eegdash/features/feature_bank/dimensionality.py,sha256=j_Ds71Y1AbV2uLFQj8EuXQ4kzofLBlQtPV5snMkF7i4,3965
25
- eegdash/features/feature_bank/signal.py,sha256=3Tb8z9gX7iZipxQJ9DSyy30JfdmW58kgvimSyZX74p8,3404
26
- eegdash/features/feature_bank/spectral.py,sha256=bNB7skusePs1gX7NOU6yRlw_Gr4UOCkO_ylkCgybzug,3319
27
- eegdash/features/feature_bank/utils.py,sha256=DGh-Q7-XFIittP7iBBxvsJaZrlVvuY5mw-G7q6C-PCI,1237
28
- eegdash/hbn/__init__.py,sha256=U8mK64napnKU746C5DOwkX7W7sg3iW5kb_cVv2pfFq0,394
29
- eegdash/hbn/preprocessing.py,sha256=7S_TTRKPKEk47tTnh2D6WExBt4cctAMxUxGDjJqq5lU,2221
30
- eegdash/hbn/windows.py,sha256=DU_QruLOHQOttZbXCgtO4mjKaG3E5STWjMQ0_s-g0gw,9929
31
- eegdash-0.3.9.dev182388821.dist-info/licenses/LICENSE,sha256=asisR-xupy_NrQBFXnx6yqXeZcYWLvbAaiETl25iXT0,931
32
- eegdash-0.3.9.dev182388821.dist-info/METADATA,sha256=fLL2T760cfhM0fCyaJbPa41UJzNih7QsLbBwsQHzpGk,10348
33
- eegdash-0.3.9.dev182388821.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
34
- eegdash-0.3.9.dev182388821.dist-info/top_level.txt,sha256=zavO69HQ6MyZM0aQMR2zUS6TAFc7bnN5GEpDpOpFZzU,8
35
- eegdash-0.3.9.dev182388821.dist-info/RECORD,,