eegdash 0.4.0.dev132__py3-none-any.whl → 0.4.0.dev144__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eegdash might be problematic. Click here for more details.

eegdash/__init__.py CHANGED
@@ -1,3 +1,14 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """EEGDash: A comprehensive platform for EEG data management and analysis.
6
+
7
+ EEGDash provides a unified interface for accessing, querying, and analyzing large-scale
8
+ EEG datasets. It integrates with cloud storage, MongoDB databases, and machine learning
9
+ frameworks to streamline EEG research workflows.
10
+ """
11
+
1
12
  from .api import EEGDash, EEGDashDataset
2
13
  from .dataset import EEGChallengeDataset
3
14
  from .hbn import preprocessing
@@ -7,4 +18,4 @@ _init_mongo_client()
7
18
 
8
19
  __all__ = ["EEGDash", "EEGDashDataset", "EEGChallengeDataset", "preprocessing"]
9
20
 
10
- __version__ = "0.4.0.dev132"
21
+ __version__ = "0.4.0.dev144"
eegdash/api.py CHANGED
@@ -1,3 +1,15 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """High-level interface to the EEGDash metadata database.
6
+
7
+ This module provides the main EEGDash class which serves as the primary entry point for
8
+ interacting with the EEGDash ecosystem. It offers methods to query, insert, and update
9
+ metadata records stored in the EEGDash MongoDB database, and includes utilities to load
10
+ EEG data from S3 for matched records.
11
+ """
12
+
1
13
  import os
2
14
  from pathlib import Path
3
15
  from typing import Any, Mapping
@@ -549,16 +561,54 @@ class EEGDashDataset(BaseConcatDataset, metaclass=NumpyDocstringInheritanceInitM
549
561
 
550
562
  Examples
551
563
  --------
552
- # Find by single subject
553
- >>> ds = EEGDashDataset(dataset="ds005505", subject="NDARCA153NKE")
554
-
555
- # Find by a list of subjects and a specific task
556
- >>> subjects = ["NDARCA153NKE", "NDARXT792GY8"]
557
- >>> ds = EEGDashDataset(dataset="ds005505", subject=subjects, task="RestingState")
558
-
559
- # Use a raw MongoDB query for advanced filtering
560
- >>> raw_query = {"dataset": "ds005505", "subject": {"$in": subjects}}
561
- >>> ds = EEGDashDataset(query=raw_query)
564
+ Basic usage with dataset and subject filtering:
565
+
566
+ >>> from eegdash import EEGDashDataset
567
+ >>> dataset = EEGDashDataset(
568
+ ... cache_dir="./data",
569
+ ... dataset="ds002718",
570
+ ... subject="012"
571
+ ... )
572
+ >>> print(f"Number of recordings: {len(dataset)}")
573
+
574
+ Filter by multiple subjects and specific task:
575
+
576
+ >>> subjects = ["012", "013", "014"]
577
+ >>> dataset = EEGDashDataset(
578
+ ... cache_dir="./data",
579
+ ... dataset="ds002718",
580
+ ... subject=subjects,
581
+ ... task="RestingState"
582
+ ... )
583
+
584
+ Load and inspect EEG data from recordings:
585
+
586
+ >>> if len(dataset) > 0:
587
+ ... recording = dataset[0]
588
+ ... raw = recording.load()
589
+ ... print(f"Sampling rate: {raw.info['sfreq']} Hz")
590
+ ... print(f"Number of channels: {len(raw.ch_names)}")
591
+ ... print(f"Duration: {raw.times[-1]:.1f} seconds")
592
+
593
+ Advanced filtering with raw MongoDB queries:
594
+
595
+ >>> from eegdash import EEGDashDataset
596
+ >>> query = {
597
+ ... "dataset": "ds002718",
598
+ ... "subject": {"$in": ["012", "013"]},
599
+ ... "task": "RestingState"
600
+ ... }
601
+ >>> dataset = EEGDashDataset(cache_dir="./data", query=query)
602
+
603
+ Working with dataset collections and braindecode integration:
604
+
605
+ >>> # EEGDashDataset is a braindecode BaseConcatDataset
606
+ >>> for i, recording in enumerate(dataset):
607
+ ... if i >= 2: # limit output
608
+ ... break
609
+ ... print(f"Recording {i}: {recording.description}")
610
+ ... raw = recording.load()
611
+ ... print(f" Channels: {len(raw.ch_names)}, Duration: {raw.times[-1]:.1f}s")
562
612
 
563
613
  Parameters
564
614
  ----------
@@ -1,3 +1,14 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """BIDS metadata processing and query building utilities.
6
+
7
+ This module provides functions for processing BIDS-formatted EEG metadata, building database
8
+ queries from user parameters, and enriching metadata records with participant information.
9
+ It handles the translation between user-friendly query parameters and MongoDB query syntax.
10
+ """
11
+
1
12
  import re
2
13
  from pathlib import Path
3
14
  from typing import Any
eegdash/const.py CHANGED
@@ -1,3 +1,21 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """Configuration constants and mappings for EEGDash.
6
+
7
+ This module contains global configuration settings, allowed query fields, and mapping
8
+ constants used throughout the EEGDash package. It defines the interface between EEGDash
9
+ releases and OpenNeuro dataset identifiers, as well as validation rules for database queries.
10
+ """
11
+
12
+ __all__ = [
13
+ "config",
14
+ "ALLOWED_QUERY_FIELDS",
15
+ "RELEASE_TO_OPENNEURO_DATASET_MAP",
16
+ "SUBJECT_MINI_RELEASE_MAP",
17
+ ]
18
+
1
19
  ALLOWED_QUERY_FIELDS = {
2
20
  "data_name",
3
21
  "dataset",
eegdash/data_utils.py CHANGED
@@ -1,3 +1,14 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """Data utilities and dataset classes for EEG data handling.
6
+
7
+ This module provides core dataset classes for working with EEG data in the EEGDash ecosystem,
8
+ including classes for individual recordings and collections of datasets. It integrates with
9
+ braindecode for machine learning workflows and handles data loading from both local and remote sources.
10
+ """
11
+
1
12
  import io
2
13
  import json
3
14
  import os
@@ -1,4 +1,22 @@
1
+ """Public API for dataset helpers and dynamically generated datasets."""
2
+
3
+ from . import dataset as _dataset_mod # triggers dynamic class registration
1
4
  from .dataset import EEGChallengeDataset
2
5
  from .registry import register_openneuro_datasets
3
6
 
4
- __all__ = ["EEGChallengeDataset", "register_openneuro_datasets"]
7
+ # Re-export dynamically generated dataset classes at the package level so that
8
+ # ``eegdash.dataset`` shows them in the API docs and users can import as
9
+ # ``from eegdash.dataset import DSXXXXX``.
10
+ _dyn_names = []
11
+ for _name in getattr(_dataset_mod, "__all__", []):
12
+ if _name == "EEGChallengeDataset":
13
+ # Already imported explicitly above
14
+ continue
15
+ _obj = getattr(_dataset_mod, _name, None)
16
+ if _obj is not None:
17
+ globals()[_name] = _obj
18
+ _dyn_names.append(_name)
19
+
20
+ __all__ = ["EEGChallengeDataset", "register_openneuro_datasets"] + _dyn_names
21
+
22
+ del _dataset_mod, _name, _obj, _dyn_names
@@ -10,7 +10,6 @@
10
10
  8,ds005508,3342,324,10,129,500,269.281,229.81 GB,246753736933,0,,,,,
11
11
  9,ds005507,1812,184,10,129,500,168.649,139.37 GB,149646718160,0,,,,,
12
12
  10,ds005506,1405,150,10,129,500,127.896,111.88 GB,120126449650,0,,,,,
13
- 11,test,2,1,1,64,500,20.556,0 B,0,0,,,,,
14
13
  12,ds004854,1,1,1,64,128,0.535,79.21 MB,83057080,0,,,,,
15
14
  13,ds004853,1,1,1,64,128,0.535,79.21 MB,83057080,0,,,,,
16
15
  14,ds004844,68,17,1,64,1024,21.252,22.33 GB,23976121966,0,ds004844,,,Multisensory,Decision-making
@@ -57,14 +57,8 @@ def register_openneuro_datasets(
57
57
 
58
58
  init = make_init(dataset_id)
59
59
 
60
- doc = f"""OpenNeuro dataset ``{dataset_id}``.
61
-
62
- {_markdown_table(row_series)}
63
-
64
- This class is a thin convenience wrapper for the dataset ``{dataset_id}``.
65
- Constructor arguments are forwarded to :class:`{base_class.__name__}`; see the
66
- base class documentation for parameter details and examples.
67
- """
60
+ # Generate rich docstring with dataset metadata
61
+ doc = _generate_rich_docstring(dataset_id, row_series, base_class)
68
62
 
69
63
  # init.__doc__ = doc
70
64
 
@@ -90,6 +84,94 @@ def register_openneuro_datasets(
90
84
  return registered
91
85
 
92
86
 
87
+ def _generate_rich_docstring(dataset_id: str, row_series: pd.Series, base_class) -> str:
88
+ """Generate a comprehensive docstring for a dataset class."""
89
+ # Extract metadata with safe defaults
90
+ n_subjects = row_series.get("n_subjects", "Unknown")
91
+ n_records = row_series.get("n_records", "Unknown")
92
+ n_tasks = row_series.get("n_tasks", "Unknown")
93
+ modality = row_series.get("modality of exp", "")
94
+ exp_type = row_series.get("type of exp", "")
95
+ subject_type = row_series.get("Type Subject", "")
96
+ duration = row_series.get("duration_hours_total", "Unknown")
97
+ size = row_series.get("size", "Unknown")
98
+
99
+ # Create description based on available metadata
100
+ description_parts = []
101
+ if modality and str(modality).strip():
102
+ description_parts.append(f"**Modality**: {modality}")
103
+ if exp_type and str(exp_type).strip():
104
+ description_parts.append(f"**Type**: {exp_type}")
105
+ if subject_type and str(subject_type).strip():
106
+ description_parts.append(f"**Subjects**: {subject_type}")
107
+
108
+ description = (
109
+ " | ".join(description_parts)
110
+ if description_parts
111
+ else "EEG dataset from OpenNeuro"
112
+ )
113
+
114
+ # Generate the docstring
115
+ docstring = f"""OpenNeuro dataset ``{dataset_id}``.
116
+
117
+ {description}
118
+
119
+ This dataset contains {n_subjects} subjects with {n_records} recordings across {n_tasks} tasks.
120
+ Total duration: {duration} hours. Dataset size: {size}.
121
+
122
+ {_markdown_table(row_series)}
123
+
124
+ This dataset class provides convenient access to the ``{dataset_id}`` dataset through the EEGDash interface.
125
+ It inherits all functionality from :class:`~{base_class.__module__}.{base_class.__name__}` with the dataset filter pre-configured.
126
+
127
+ Parameters
128
+ ----------
129
+ cache_dir : str
130
+ Directory to cache downloaded data.
131
+ query : dict, optional
132
+ Additional MongoDB-style filters to AND with the dataset selection.
133
+ Must not contain the key ``dataset``.
134
+ s3_bucket : str, optional
135
+ Base S3 bucket used to locate the data.
136
+ **kwargs
137
+ Additional arguments passed to the base dataset class.
138
+
139
+ Examples
140
+ --------
141
+ Basic usage:
142
+
143
+ >>> from eegdash.dataset import {dataset_id.upper()}
144
+ >>> dataset = {dataset_id.upper()}(cache_dir="./data")
145
+ >>> print(f"Number of recordings: {{len(dataset)}}")
146
+
147
+ Load a specific recording:
148
+
149
+ >>> if len(dataset) > 0:
150
+ ... recording = dataset[0]
151
+ ... raw = recording.load()
152
+ ... print(f"Sampling rate: {{raw.info['sfreq']}} Hz")
153
+ ... print(f"Number of channels: {{len(raw.ch_names)}}")
154
+
155
+ Filter by additional criteria:
156
+
157
+ >>> # Get subset with specific task or subject
158
+ >>> filtered_dataset = {dataset_id.upper()}(
159
+ ... cache_dir="./data",
160
+ ... query={{"task": "RestingState"}} # if applicable
161
+ ... )
162
+
163
+ Notes
164
+ -----
165
+ More details available in the `NEMAR documentation <https://nemar.org/dataexplorer/detail?dataset_id={dataset_id}>`__.
166
+
167
+ See Also
168
+ --------
169
+ {base_class.__name__} : Base dataset class with full API documentation
170
+ """
171
+
172
+ return docstring
173
+
174
+
93
175
  def _markdown_table(row_series: pd.Series) -> str:
94
176
  """Create a reStructuredText grid table from a pandas Series."""
95
177
  if row_series.empty:
@@ -128,7 +210,12 @@ def _markdown_table(row_series: pd.Series) -> str:
128
210
  table = tabulate(df, headers="keys", tablefmt="rst", showindex=False)
129
211
 
130
212
  # Add a caption for the table
131
- caption = f"Short overview of dataset {dataset_id} more details in the `Nemar documentation <https://nemar.org/dataexplorer/detail?dataset_id={dataset_id}>`_."
213
+ # Use an anonymous external link (double underscore) to avoid duplicate
214
+ # target warnings when this docstring is repeated across many classes.
215
+ caption = (
216
+ f"Short overview of dataset {dataset_id} more details in the "
217
+ f"`NeMAR documentation <https://nemar.org/dataexplorer/detail?dataset_id={dataset_id}>`__."
218
+ )
132
219
  # adding caption below the table
133
220
  # Indent the table to fit within the admonition block
134
221
  indented_table = "\n".join(" " + line for line in table.split("\n"))
eegdash/downloader.py CHANGED
@@ -1,3 +1,14 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """File downloading utilities for EEG data from cloud storage.
6
+
7
+ This module provides functions for downloading EEG data files and BIDS dependencies from
8
+ AWS S3 storage, with support for caching and progress tracking. It handles the communication
9
+ between the EEGDash metadata database and the actual EEG data stored in the cloud.
10
+ """
11
+
1
12
  import re
2
13
  import tempfile
3
14
  from pathlib import Path
@@ -1,6 +1,8 @@
1
- """Convenience functions for storing and loading of features datasets.
1
+ """Convenience functions for storing and loading features datasets.
2
+
3
+ See Also:
4
+ https://github.com/braindecode/braindecode//blob/master/braindecode/datautil/serialization.py#L165-L229
2
5
 
3
- see also: https://github.com/braindecode/braindecode//blob/master/braindecode/datautil/serialization.py#L165-L229
4
6
  """
5
7
 
6
8
  from pathlib import Path
@@ -15,7 +17,7 @@ from .datasets import FeaturesConcatDataset, FeaturesDataset
15
17
 
16
18
 
17
19
  def load_features_concat_dataset(path, ids_to_load=None, n_jobs=1):
18
- """Load a stored FeaturesConcatDataset of FeaturesDatasets from files.
20
+ """Load a stored features dataset from files.
19
21
 
20
22
  Parameters
21
23
  ----------
@@ -28,7 +30,9 @@ def load_features_concat_dataset(path, ids_to_load=None, n_jobs=1):
28
30
 
29
31
  Returns
30
32
  -------
31
- concat_dataset: FeaturesConcatDataset of FeaturesDatasets
33
+ concat_dataset: eegdash.features.datasets.FeaturesConcatDataset
34
+ A concatenation of multiple eegdash.features.datasets.FeaturesDataset
35
+ instances loaded from the given directory.
32
36
 
33
37
  """
34
38
  # Make sure we always work with a pathlib.Path
eegdash/hbn/__init__.py CHANGED
@@ -1,3 +1,14 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """Healthy Brain Network (HBN) specific utilities and preprocessing.
6
+
7
+ This module provides specialized functions for working with the Healthy Brain Network
8
+ dataset, including preprocessing pipelines, annotation handling, and windowing utilities
9
+ tailored for HBN EEG data analysis.
10
+ """
11
+
1
12
  from .preprocessing import hbn_ec_ec_reannotation
2
13
  from .windows import (
3
14
  add_aux_anchors,
@@ -1,3 +1,14 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """Preprocessing utilities specific to the Healthy Brain Network dataset.
6
+
7
+ This module contains preprocessing classes and functions designed specifically for
8
+ HBN EEG data, including specialized annotation handling for eyes-open/eyes-closed
9
+ paradigms and other HBN-specific preprocessing steps.
10
+ """
11
+
1
12
  import mne
2
13
  import numpy as np
3
14
 
eegdash/hbn/windows.py CHANGED
@@ -1,3 +1,15 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """Windowing and trial processing utilities for HBN datasets.
6
+
7
+ This module provides functions for building trial tables, adding auxiliary anchors,
8
+ annotating trials with targets, and filtering recordings based on various criteria.
9
+ These utilities are specifically designed for working with HBN EEG data structures
10
+ and experimental paradigms.
11
+ """
12
+
1
13
  import logging
2
14
 
3
15
  import mne
eegdash/logging.py CHANGED
@@ -1,3 +1,13 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """Logging configuration for EEGDash.
6
+
7
+ This module sets up centralized logging for the EEGDash package using Rich for enhanced
8
+ console output formatting. It provides a consistent logging interface across all modules.
9
+ """
10
+
1
11
  import logging
2
12
 
3
13
  from rich.logging import RichHandler
eegdash/mongodb.py CHANGED
@@ -1,3 +1,14 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """MongoDB connection and operations management.
6
+
7
+ This module provides thread-safe MongoDB connection management and high-level database
8
+ operations for the EEGDash metadata database. It includes methods for finding, adding,
9
+ and updating EEG data records with proper connection pooling and error handling.
10
+ """
11
+
1
12
  import threading
2
13
 
3
14
  from pymongo import MongoClient
eegdash/paths.py CHANGED
@@ -1,3 +1,14 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """Path utilities and cache directory management.
6
+
7
+ This module provides functions for resolving consistent cache directories and path
8
+ management throughout the EEGDash package, with integration to MNE-Python's
9
+ configuration system.
10
+ """
11
+
1
12
  from __future__ import annotations
2
13
 
3
14
  import os
eegdash/utils.py CHANGED
@@ -1,3 +1,13 @@
1
+ # Authors: The EEGDash contributors.
2
+ # License: GNU General Public License
3
+ # Copyright the EEGDash contributors.
4
+
5
+ """General utility functions for EEGDash.
6
+
7
+ This module contains miscellaneous utility functions used across the EEGDash package,
8
+ including MongoDB client initialization and configuration helpers.
9
+ """
10
+
1
11
  from mne.utils import get_config, set_config, use_log_level
2
12
 
3
13
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: eegdash
3
- Version: 0.4.0.dev132
3
+ Version: 0.4.0.dev144
4
4
  Summary: EEG data for machine learning
5
5
  Author-email: Young Truong <dt.young112@gmail.com>, Arnaud Delorme <adelorme@gmail.com>, Aviv Dotan <avivd220@gmail.com>, Oren Shriki <oren70@gmail.com>, Bruno Aristimunha <b.aristimunha@gmail.com>
6
6
  License-Expression: GPL-3.0-only
@@ -65,6 +65,7 @@ Requires-Dist: ipython; extra == "docs"
65
65
  Requires-Dist: lightgbm; extra == "docs"
66
66
  Requires-Dist: plotly; extra == "docs"
67
67
  Requires-Dist: nbformat; extra == "docs"
68
+ Requires-Dist: graphviz; extra == "docs"
68
69
  Provides-Extra: all
69
70
  Requires-Dist: eegdash[docs]; extra == "all"
70
71
  Requires-Dist: eegdash[dev]; extra == "all"
@@ -0,0 +1,37 @@
1
+ eegdash/__init__.py,sha256=mb1qG2Bvohd8m8HMQlfoq8GO9ANnQZ3bnjL8QnJKlFU,704
2
+ eegdash/api.py,sha256=OLQbpOoIZpZVJSss3imLFmN6d1Fpwk6tj-yRMQlr00Q,39429
3
+ eegdash/bids_eeg_metadata.py,sha256=EFJ1grNcqS0eF0hg45F6St8gFc3Hlzsgccpr-9XTMZk,14153
4
+ eegdash/const.py,sha256=-cwrtncqJHZ19lGf2MK-IgKy7hTMfb6H-CeL50Oonyg,7883
5
+ eegdash/data_utils.py,sha256=s9FyPpsw32ndBsusL4TQX6rOsLEiN73RneEuXKffaYc,26477
6
+ eegdash/downloader.py,sha256=TsoFDmzSBLiTwW_scfju4MXEIq088upHvdEfGw_c8WM,6256
7
+ eegdash/logging.py,sha256=Tbz-zXaxvzZkYmrAQTEyFqevzWtM5QZPP6LL7XNy8d0,952
8
+ eegdash/mongodb.py,sha256=0QpkAdwQOisbCr0-rd0wPFQiG0IT9h2Ae-CXYdrt65o,2430
9
+ eegdash/paths.py,sha256=-bl81r7UyPr-Kq6V6j6h9Mq6dxg5T5EkBVJlOLmQecg,1217
10
+ eegdash/utils.py,sha256=05MwB7Y447qkWfxCqgGy2DZUHPV1c1xvr3EUyhD0OHI,723
11
+ eegdash/dataset/__init__.py,sha256=HKDfV2DHBv63BqYLBWDMvU8jbFNRC7DqQbxL7RG1DKQ,863
12
+ eegdash/dataset/dataset.py,sha256=e_rliu4E-uPtz_miUSzGukUahCHHhyXB2Gu3pm3cyHo,7062
13
+ eegdash/dataset/dataset_summary.csv,sha256=a5Y21LmBPKLVRt5uKNXO7lSRDjsDmJLzv6-3HryF5JU,23614
14
+ eegdash/dataset/registry.py,sha256=KmPDfazhdsIyUouo3qdqDaHiTKHCZcEvXQJeHphZijY,7057
15
+ eegdash/features/__init__.py,sha256=BXNhjvL4_SSFAY1lcP9nyGpkbJNtoOMH4AHlF6OyABo,4078
16
+ eegdash/features/datasets.py,sha256=eV4d86EU4fu1yoIMdPQnot6YZDRGG4qE9h77lk7iVhU,18317
17
+ eegdash/features/decorators.py,sha256=v0qaJz_dcX703p1fvFYbAIXmwK3d8naYGlq7fRVKn_w,1313
18
+ eegdash/features/extractors.py,sha256=H7h6tP3dKoRcjDJpWWAo0ppmokCq5QlhqMcehYwYV9s,6845
19
+ eegdash/features/inspect.py,sha256=PmbWhx5H_WqpnorUpWONUSkUtaIHkZblRa_Xyk7Szyc,1569
20
+ eegdash/features/serialization.py,sha256=LmDrQEb-NLNgak_LabdDnr_J_v0QyLPzm_E8IiIHgMQ,2960
21
+ eegdash/features/utils.py,sha256=eM6DdyOpdVfNh7dSPykJ0WaTDtaGvkCQWAmW0G8v60Y,3784
22
+ eegdash/features/feature_bank/__init__.py,sha256=YsMXLC1FEtHL3IEw9pYw1fc5IY0x_hr2qWQowI5gZj8,2991
23
+ eegdash/features/feature_bank/complexity.py,sha256=iy9uaLInsYdxKZlXHTWlgEpP9fVI-v9TqLGfnS15-Eg,3258
24
+ eegdash/features/feature_bank/connectivity.py,sha256=bQ6KlxWm5GNpCS9ypLqBUr2L171Yq7wpBQT2tRQKTZ4,2159
25
+ eegdash/features/feature_bank/csp.py,sha256=jKPrmqBj7FliybNbg035cVZddvVSkhk9OazcscDpipU,3303
26
+ eegdash/features/feature_bank/dimensionality.py,sha256=j_Ds71Y1AbV2uLFQj8EuXQ4kzofLBlQtPV5snMkF7i4,3965
27
+ eegdash/features/feature_bank/signal.py,sha256=3Tb8z9gX7iZipxQJ9DSyy30JfdmW58kgvimSyZX74p8,3404
28
+ eegdash/features/feature_bank/spectral.py,sha256=bNB7skusePs1gX7NOU6yRlw_Gr4UOCkO_ylkCgybzug,3319
29
+ eegdash/features/feature_bank/utils.py,sha256=DGh-Q7-XFIittP7iBBxvsJaZrlVvuY5mw-G7q6C-PCI,1237
30
+ eegdash/hbn/__init__.py,sha256=hsI5pmIuYDzr--aE5UiToO-P9XL5fVRKahZzdsAodro,794
31
+ eegdash/hbn/preprocessing.py,sha256=cfsLXnGuUaVJ3NhueDgmdc0w7jflmIi69occuB4bs7M,2609
32
+ eegdash/hbn/windows.py,sha256=23KyVl0pQn4o40wM3Rsu8nl5tN-REAusU7wcv9L4a5U,10351
33
+ eegdash-0.4.0.dev144.dist-info/licenses/LICENSE,sha256=asisR-xupy_NrQBFXnx6yqXeZcYWLvbAaiETl25iXT0,931
34
+ eegdash-0.4.0.dev144.dist-info/METADATA,sha256=CgGdbNlkxb0Ako1Q1cwdwEotFGf-CBz2yEZm6WBMmlw,6776
35
+ eegdash-0.4.0.dev144.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
36
+ eegdash-0.4.0.dev144.dist-info/top_level.txt,sha256=zavO69HQ6MyZM0aQMR2zUS6TAFc7bnN5GEpDpOpFZzU,8
37
+ eegdash-0.4.0.dev144.dist-info/RECORD,,
@@ -1,37 +0,0 @@
1
- eegdash/__init__.py,sha256=X0mp26TKeJDAerze3q3hnCgN6V9Sokwwvj_eDdjswaE,284
2
- eegdash/api.py,sha256=0YNbL1u-Wh51r-dRU2ijUmfOTIharr3OFGgLOxEznX0,37813
3
- eegdash/bids_eeg_metadata.py,sha256=XUkQp2M8zQ_wH5JC8lQiVR0TWssOjSdrdEWkgwGjiZ8,13699
4
- eegdash/const.py,sha256=qdFBEL9kIrsj9CdxbXhBkR61R3CrTGSaj5Iq0YOACIs,7313
5
- eegdash/data_utils.py,sha256=dePEXcJefo3gmD534bb576p8v9jErDNLDHQoDGHqH-g,26006
6
- eegdash/downloader.py,sha256=B-8u0c39F4inV-v_WgYZrUKAGPQOdfqC2RX1qNrRQYM,5808
7
- eegdash/logging.py,sha256=SZdB7WLT5b2okecWpvLx4UWUxg3DiA11Z5d9lhYdDyc,616
8
- eegdash/mongodb.py,sha256=GD3WgA253oFgpzOHrYaj4P1mRjNtDMT5Oj4kVvHswjI,2006
9
- eegdash/paths.py,sha256=246xkectTxDAYcREs1Qma_F1Y-oSmLlb0hn0F2Za5Ss,866
10
- eegdash/utils.py,sha256=7TfQ9D0LrAJ7FgnSXEvWgeHWK2QqaqS-_WcWXD86ObQ,408
11
- eegdash/dataset/__init__.py,sha256=Qmzki5G8GaFlzTb10e4SmC3WkKuJyo1Ckii15tCEHAo,157
12
- eegdash/dataset/dataset.py,sha256=e_rliu4E-uPtz_miUSzGukUahCHHhyXB2Gu3pm3cyHo,7062
13
- eegdash/dataset/dataset_summary.csv,sha256=XF0vdHz77DFyVLTaET8lL5gQQ4r-q1xAfSDWH5GTPLA,23655
14
- eegdash/dataset/registry.py,sha256=genOqAuf9cQBnHhPqRwfLP7S1XsnkLot6sLyJozPtf4,4150
15
- eegdash/features/__init__.py,sha256=BXNhjvL4_SSFAY1lcP9nyGpkbJNtoOMH4AHlF6OyABo,4078
16
- eegdash/features/datasets.py,sha256=eV4d86EU4fu1yoIMdPQnot6YZDRGG4qE9h77lk7iVhU,18317
17
- eegdash/features/decorators.py,sha256=v0qaJz_dcX703p1fvFYbAIXmwK3d8naYGlq7fRVKn_w,1313
18
- eegdash/features/extractors.py,sha256=H7h6tP3dKoRcjDJpWWAo0ppmokCq5QlhqMcehYwYV9s,6845
19
- eegdash/features/inspect.py,sha256=PmbWhx5H_WqpnorUpWONUSkUtaIHkZblRa_Xyk7Szyc,1569
20
- eegdash/features/serialization.py,sha256=snXuHVd0CoT2ese0iWi5RwZrVHCGc0oCZ8-SXqGY88I,2848
21
- eegdash/features/utils.py,sha256=eM6DdyOpdVfNh7dSPykJ0WaTDtaGvkCQWAmW0G8v60Y,3784
22
- eegdash/features/feature_bank/__init__.py,sha256=YsMXLC1FEtHL3IEw9pYw1fc5IY0x_hr2qWQowI5gZj8,2991
23
- eegdash/features/feature_bank/complexity.py,sha256=iy9uaLInsYdxKZlXHTWlgEpP9fVI-v9TqLGfnS15-Eg,3258
24
- eegdash/features/feature_bank/connectivity.py,sha256=bQ6KlxWm5GNpCS9ypLqBUr2L171Yq7wpBQT2tRQKTZ4,2159
25
- eegdash/features/feature_bank/csp.py,sha256=jKPrmqBj7FliybNbg035cVZddvVSkhk9OazcscDpipU,3303
26
- eegdash/features/feature_bank/dimensionality.py,sha256=j_Ds71Y1AbV2uLFQj8EuXQ4kzofLBlQtPV5snMkF7i4,3965
27
- eegdash/features/feature_bank/signal.py,sha256=3Tb8z9gX7iZipxQJ9DSyy30JfdmW58kgvimSyZX74p8,3404
28
- eegdash/features/feature_bank/spectral.py,sha256=bNB7skusePs1gX7NOU6yRlw_Gr4UOCkO_ylkCgybzug,3319
29
- eegdash/features/feature_bank/utils.py,sha256=DGh-Q7-XFIittP7iBBxvsJaZrlVvuY5mw-G7q6C-PCI,1237
30
- eegdash/hbn/__init__.py,sha256=U8mK64napnKU746C5DOwkX7W7sg3iW5kb_cVv2pfFq0,394
31
- eegdash/hbn/preprocessing.py,sha256=zMkDFXQxWLn-Iy0cHRk-JL5c479HpclgIAGm-xuKPFg,2196
32
- eegdash/hbn/windows.py,sha256=Obv4L2hP8ACancUawkMYbiusfvhAw-mG_vqyDDMwOJ8,9890
33
- eegdash-0.4.0.dev132.dist-info/licenses/LICENSE,sha256=asisR-xupy_NrQBFXnx6yqXeZcYWLvbAaiETl25iXT0,931
34
- eegdash-0.4.0.dev132.dist-info/METADATA,sha256=2_hkPms-a1bnPet5DKAXRy4vO33LLtnQYgea7EMwFUk,6735
35
- eegdash-0.4.0.dev132.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
36
- eegdash-0.4.0.dev132.dist-info/top_level.txt,sha256=zavO69HQ6MyZM0aQMR2zUS6TAFc7bnN5GEpDpOpFZzU,8
37
- eegdash-0.4.0.dev132.dist-info/RECORD,,