eegdash 0.0.2__tar.gz → 0.5.0.dev170116588__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. eegdash-0.5.0.dev170116588/LICENSE +29 -0
  2. eegdash-0.5.0.dev170116588/MANIFEST.in +7 -0
  3. eegdash-0.5.0.dev170116588/PKG-INFO +137 -0
  4. eegdash-0.5.0.dev170116588/README.md +45 -0
  5. eegdash-0.5.0.dev170116588/docs/Makefile +58 -0
  6. eegdash-0.5.0.dev170116588/docs/source/_templates/autosummary/module.rst +82 -0
  7. eegdash-0.5.0.dev170116588/docs/source/api/api.rst +241 -0
  8. eegdash-0.5.0.dev170116588/docs/source/api/api_core.rst +206 -0
  9. eegdash-0.5.0.dev170116588/docs/source/api/api_features.rst +24 -0
  10. eegdash-0.5.0.dev170116588/docs/source/api/features_overview.rst +37 -0
  11. eegdash-0.5.0.dev170116588/docs/source/conf.py +1190 -0
  12. eegdash-0.5.0.dev170116588/docs/source/dataset_summary/bubble.rst +19 -0
  13. eegdash-0.5.0.dev170116588/docs/source/dataset_summary/kde.rst +18 -0
  14. eegdash-0.5.0.dev170116588/docs/source/dataset_summary/sankey.rst +20 -0
  15. eegdash-0.5.0.dev170116588/docs/source/dataset_summary/table.rst +27 -0
  16. eegdash-0.5.0.dev170116588/docs/source/dataset_summary/treemap.rst +19 -0
  17. eegdash-0.5.0.dev170116588/docs/source/dataset_summary.rst +82 -0
  18. eegdash-0.5.0.dev170116588/docs/source/developer_notes.rst +418 -0
  19. eegdash-0.5.0.dev170116588/docs/source/index.rst +301 -0
  20. eegdash-0.5.0.dev170116588/docs/source/install/install.rst +83 -0
  21. eegdash-0.5.0.dev170116588/docs/source/install/install_pip.rst +21 -0
  22. eegdash-0.5.0.dev170116588/docs/source/install/install_source.rst +84 -0
  23. eegdash-0.5.0.dev170116588/docs/source/user_guide.rst +233 -0
  24. eegdash-0.5.0.dev170116588/eegdash/__init__.py +38 -0
  25. eegdash-0.5.0.dev170116588/eegdash/api.py +269 -0
  26. eegdash-0.5.0.dev170116588/eegdash/bids_metadata.py +447 -0
  27. eegdash-0.5.0.dev170116588/eegdash/const.py +350 -0
  28. eegdash-0.5.0.dev170116588/eegdash/dataset/__init__.py +29 -0
  29. eegdash-0.5.0.dev170116588/eegdash/dataset/base.py +204 -0
  30. eegdash-0.5.0.dev170116588/eegdash/dataset/bids_dataset.py +740 -0
  31. eegdash-0.5.0.dev170116588/eegdash/dataset/dataset.py +804 -0
  32. eegdash-0.5.0.dev170116588/eegdash/dataset/dataset_summary.csv +523 -0
  33. eegdash-0.5.0.dev170116588/eegdash/dataset/registry.py +395 -0
  34. eegdash-0.5.0.dev170116588/eegdash/downloader.py +264 -0
  35. eegdash-0.5.0.dev170116588/eegdash/features/__init__.py +150 -0
  36. eegdash-0.5.0.dev170116588/eegdash/features/datasets.py +682 -0
  37. eegdash-0.5.0.dev170116588/eegdash/features/decorators.py +143 -0
  38. eegdash-0.5.0.dev170116588/eegdash/features/extractors.py +380 -0
  39. eegdash-0.5.0.dev170116588/eegdash/features/feature_bank/__init__.py +110 -0
  40. eegdash-0.5.0.dev170116588/eegdash/features/feature_bank/complexity.py +106 -0
  41. eegdash-0.5.0.dev170116588/eegdash/features/feature_bank/connectivity.py +58 -0
  42. eegdash-0.5.0.dev170116588/eegdash/features/feature_bank/csp.py +103 -0
  43. eegdash-0.5.0.dev170116588/eegdash/features/feature_bank/dimensionality.py +134 -0
  44. eegdash-0.5.0.dev170116588/eegdash/features/feature_bank/signal.py +128 -0
  45. eegdash-0.5.0.dev170116588/eegdash/features/feature_bank/spectral.py +115 -0
  46. eegdash-0.5.0.dev170116588/eegdash/features/feature_bank/utils.py +56 -0
  47. eegdash-0.5.0.dev170116588/eegdash/features/inspect.py +159 -0
  48. eegdash-0.5.0.dev170116588/eegdash/features/serialization.py +124 -0
  49. eegdash-0.5.0.dev170116588/eegdash/features/utils.py +192 -0
  50. eegdash-0.5.0.dev170116588/eegdash/hbn/__init__.py +28 -0
  51. eegdash-0.5.0.dev170116588/eegdash/hbn/preprocessing.py +105 -0
  52. eegdash-0.5.0.dev170116588/eegdash/hbn/windows.py +436 -0
  53. eegdash-0.5.0.dev170116588/eegdash/http_api_client.py +178 -0
  54. eegdash-0.5.0.dev170116588/eegdash/local_bids.py +167 -0
  55. eegdash-0.5.0.dev170116588/eegdash/logging.py +35 -0
  56. eegdash-0.5.0.dev170116588/eegdash/paths.py +60 -0
  57. eegdash-0.5.0.dev170116588/eegdash/schemas.py +972 -0
  58. eegdash-0.5.0.dev170116588/eegdash.egg-info/PKG-INFO +137 -0
  59. eegdash-0.5.0.dev170116588/eegdash.egg-info/SOURCES.txt +62 -0
  60. eegdash-0.5.0.dev170116588/eegdash.egg-info/requires.txt +68 -0
  61. eegdash-0.5.0.dev170116588/pyproject.toml +160 -0
  62. eegdash-0.0.2/LICENSE +0 -20
  63. eegdash-0.0.2/PKG-INFO +0 -139
  64. eegdash-0.0.2/README.md +0 -103
  65. eegdash-0.0.2/eegdash/__init__.py +0 -1
  66. eegdash-0.0.2/eegdash/data_utils.py +0 -217
  67. eegdash-0.0.2/eegdash/main.py +0 -17
  68. eegdash-0.0.2/eegdash/script.py +0 -25
  69. eegdash-0.0.2/eegdash/signalstore_data_utils.py +0 -630
  70. eegdash-0.0.2/eegdash.egg-info/PKG-INFO +0 -139
  71. eegdash-0.0.2/eegdash.egg-info/SOURCES.txt +0 -14
  72. eegdash-0.0.2/eegdash.egg-info/requires.txt +0 -1
  73. eegdash-0.0.2/pyproject.toml +0 -27
  74. eegdash-0.0.2/tests/__init__.py +0 -3
  75. {eegdash-0.0.2 → eegdash-0.5.0.dev170116588}/eegdash.egg-info/dependency_links.txt +0 -0
  76. {eegdash-0.0.2 → eegdash-0.5.0.dev170116588}/eegdash.egg-info/top_level.txt +0 -0
  77. {eegdash-0.0.2 → eegdash-0.5.0.dev170116588}/setup.cfg +0 -0
@@ -0,0 +1,29 @@
1
+ BSD 3-Clause License
2
+
3
+ Copyright (c) 2024-2025, EEGDash contributors
4
+ All rights reserved.
5
+
6
+ Redistribution and use in source and binary forms, with or without
7
+ modification, are permitted provided that the following conditions are met:
8
+
9
+ 1. Redistributions of source code must retain the above copyright notice, this
10
+ list of conditions and the following disclaimer.
11
+
12
+ 2. Redistributions in binary form must reproduce the above copyright notice,
13
+ this list of conditions and the following disclaimer in the documentation
14
+ and/or other materials provided with the distribution.
15
+
16
+ 3. Neither the name of the copyright holder nor the names of its
17
+ contributors may be used to endorse or promote products derived from
18
+ this software without specific prior written permission.
19
+
20
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -0,0 +1,7 @@
1
+ include README.md
2
+ include LICENSE
3
+
4
+ include eegdash/dataset/dataset_summary.csv
5
+
6
+ recursive-include docs *.ipynb *.rst conf.py Makefile
7
+ recursive-exclude docs *checkpoint.ipynb
@@ -0,0 +1,137 @@
1
+ Metadata-Version: 2.4
2
+ Name: eegdash
3
+ Version: 0.5.0.dev170116588
4
+ Summary: EEG data for machine learning
5
+ Author-email: Young Truong <dt.young112@gmail.com>, Arnaud Delorme <adelorme@ucsd.edu>, Aviv Dotan <avivdot@bgu.post.ac.il>, Oren Shriki <shrikio@bgu.ac.il>, Bruno Aristimunha <b.aristimunha@gmail.com>
6
+ License-Expression: BSD-3-Clause
7
+ Project-URL: Homepage, https://github.com/sccn/EEG-Dash-Data
8
+ Project-URL: Issues, https://github.com/sccn/EEG-Dash-Data/issues
9
+ Classifier: Operating System :: OS Independent
10
+ Classifier: Intended Audience :: Science/Research
11
+ Classifier: Intended Audience :: Developers
12
+ Classifier: Programming Language :: Python
13
+ Classifier: Topic :: Software Development
14
+ Classifier: Topic :: Scientific/Engineering
15
+ Classifier: Development Status :: 3 - Alpha
16
+ Classifier: Operating System :: Microsoft :: Windows
17
+ Classifier: Operating System :: POSIX
18
+ Classifier: Operating System :: Unix
19
+ Classifier: Operating System :: MacOS
20
+ Classifier: Programming Language :: Python :: 3
21
+ Classifier: Programming Language :: Python :: 3.10
22
+ Classifier: Programming Language :: Python :: 3.11
23
+ Classifier: Programming Language :: Python :: 3.12
24
+ Classifier: Programming Language :: Python :: 3.13
25
+ Requires-Python: >=3.11
26
+ Description-Content-Type: text/markdown
27
+ License-File: LICENSE
28
+ Requires-Dist: braindecode[hub]>=1.3.2
29
+ Requires-Dist: mne_bids>=0.18.0
30
+ Requires-Dist: numba
31
+ Requires-Dist: requests>=2.25.0
32
+ Requires-Dist: s3fs
33
+ Requires-Dist: tqdm
34
+ Requires-Dist: pymatreader
35
+ Requires-Dist: eeglabio
36
+ Requires-Dist: tabulate
37
+ Requires-Dist: rich
38
+ Requires-Dist: pyarrow
39
+ Requires-Dist: pandas>=2.0
40
+ Requires-Dist: pydantic
41
+ Provides-Extra: tests
42
+ Requires-Dist: pytest; extra == "tests"
43
+ Requires-Dist: pytest-cov; extra == "tests"
44
+ Requires-Dist: pytest-sugar; extra == "tests"
45
+ Requires-Dist: codecov; extra == "tests"
46
+ Requires-Dist: pytest_cases; extra == "tests"
47
+ Requires-Dist: pytest-benchmark; extra == "tests"
48
+ Requires-Dist: pytest-xdist; extra == "tests"
49
+ Provides-Extra: dev
50
+ Requires-Dist: pre-commit; extra == "dev"
51
+ Requires-Dist: ipykernel; extra == "dev"
52
+ Requires-Dist: lightning>=2.0; extra == "dev"
53
+ Requires-Dist: torchmetrics>=0.7; extra == "dev"
54
+ Requires-Dist: scikit-learn; extra == "dev"
55
+ Requires-Dist: tensorboard; extra == "dev"
56
+ Provides-Extra: docs
57
+ Requires-Dist: sphinx; extra == "docs"
58
+ Requires-Dist: sphinx_design; extra == "docs"
59
+ Requires-Dist: sphinx_gallery; extra == "docs"
60
+ Requires-Dist: sphinx_rtd_theme; extra == "docs"
61
+ Requires-Dist: pydata-sphinx-theme; extra == "docs"
62
+ Requires-Dist: sphinx-autobuild; extra == "docs"
63
+ Requires-Dist: sphinx-copybutton; extra == "docs"
64
+ Requires-Dist: sphinx-sitemap; extra == "docs"
65
+ Requires-Dist: numpydoc; extra == "docs"
66
+ Requires-Dist: memory_profiler; extra == "docs"
67
+ Requires-Dist: ipython; extra == "docs"
68
+ Requires-Dist: lightgbm; extra == "docs"
69
+ Requires-Dist: plotly; extra == "docs"
70
+ Requires-Dist: nbformat; extra == "docs"
71
+ Requires-Dist: graphviz; extra == "docs"
72
+ Requires-Dist: neato; extra == "docs"
73
+ Requires-Dist: moabb; extra == "docs"
74
+ Provides-Extra: digestion
75
+ Requires-Dist: pybids; extra == "digestion"
76
+ Requires-Dist: gql[requests]; extra == "digestion"
77
+ Requires-Dist: requests_toolbelt; extra == "digestion"
78
+ Requires-Dist: beautifulsoup4; extra == "digestion"
79
+ Requires-Dist: python-dotenv; extra == "digestion"
80
+ Requires-Dist: selenium; extra == "digestion"
81
+ Requires-Dist: httpx; extra == "digestion"
82
+ Requires-Dist: tenacity; extra == "digestion"
83
+ Requires-Dist: hishel; extra == "digestion"
84
+ Requires-Dist: pydantic>=2; extra == "digestion"
85
+ Requires-Dist: PyGithub>=2; extra == "digestion"
86
+ Provides-Extra: all
87
+ Requires-Dist: eegdash[docs]; extra == "all"
88
+ Requires-Dist: eegdash[dev]; extra == "all"
89
+ Requires-Dist: eegdash[tests]; extra == "all"
90
+ Requires-Dist: eegdash[digestion]; extra == "all"
91
+ Dynamic: license-file
92
+
93
+ # EEG-Dash
94
+
95
+ [![PyPI version](https://img.shields.io/pypi/v/eegdash)](https://pypi.org/project/eegdash/)
96
+ [![Docs](https://img.shields.io/badge/docs-stable-brightgreen.svg)](https://sccn.github.io/eegdash)
97
+
98
+ [![License: BSD-3-Clause](https://img.shields.io/badge/License-BSD--3--Clause-blue.svg)](LICENSE)
99
+ [![Python versions](https://img.shields.io/pypi/pyversions/eegdash.svg)](https://pypi.org/project/eegdash/)
100
+ [![Downloads](https://pepy.tech/badge/eegdash)](https://pepy.tech/project/eegdash)
101
+ [![Coverage](https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fraw.githubusercontent.com%2Feegdash%2FEEGDash%2Fmain%2Fcoverage.json&query=%24.totals.percent_covered_display&suffix=%25&label=coverage)](https://github.com/eegdash/EEGDash/blob/main/coverage.json)
102
+
103
+ To leverage recent and ongoing advancements in large-scale computational methods and to ensure the preservation of scientific data generated from publicly funded research, the EEG-DaSh data archive will create a data-sharing resource for MEEG (EEG, MEG) data contributed by collaborators for machine learning (ML) and deep learning (DL) applications.
104
+
105
+ ## Data source
106
+
107
+ The data in EEG-DaSh originates from a collaboration involving 25 laboratories, encompassing 27,053 participants. This extensive collection includes MEEG data, which is a combination of EEG and MEG signals. The data is sourced from various studies conducted by these labs, involving both healthy subjects and clinical populations with conditions such as ADHD, depression, schizophrenia, dementia, autism, and psychosis. Additionally, data spans different mental states like sleep, meditation, and cognitive tasks. In addition, EEG-DaSh will incorporate a subset of the data converted from NEMAR, which includes 330 MEEG BIDS-formatted datasets, further expanding the archive with well-curated, standardized neuroelectromagnetic data.
108
+
109
+ ## Data format
110
+
111
+ EEGDash queries return a **Pytorch Dataset** formatted to facilitate machine learning (ML) and deep learning (DL) applications. PyTorch Datasets are the best format for EEGDash queries because they provide an efficient, scalable, and flexible structure for machine learning (ML) and deep learning (DL) applications. They allow seamless integration with PyTorch’s DataLoader, enabling efficient batching, shuffling, and parallel data loading, which is essential for training deep learning models on large EEG datasets.
112
+
113
+ ## Data preprocessing
114
+
115
+ EEGDash datasets are processed using the popular [braindecode](https://braindecode.org/stable/index.html) library. In fact, EEGDash datasets are braindecode datasets, which are themselves PyTorch datasets. This means that any preprocessing possible on braindecode datasets is also possible on EEGDash datasets. Refer to [braindecode](https://braindecode.org/stable/index.html) tutorials for guidance on preprocessing EEG data.
116
+
117
+ ## EEG-Dash usage
118
+
119
+ ### Install
120
+ Use your preferred Python environment manager with Python > 3.10 to install the package.
121
+ * To install the eegdash package, use the following command: `pip install eegdash`
122
+ * To verify the installation, start a Python session and type: `from eegdash import EEGDash`
123
+
124
+ Please check our tutorial webpages to explore what you can do with [eegdash](https://eegdash.org/)!
125
+
126
+ ## Education -- Coming soon...
127
+
128
+ We organize workshops and educational events to foster cross-cultural education and student training, offering both online and in-person opportunities in collaboration with US and Israeli partners. Events for 2025 will be announced via the EEGLABNEWS mailing list. Be sure to [subscribe](https://sccn.ucsd.edu/mailman/listinfo/eeglabnews).
129
+
130
+ ## About EEG-DaSh
131
+
132
+ EEG-DaSh is a collaborative initiative between the United States and Israel, supported by the National Science Foundation (NSF). The partnership brings together experts from the Swartz Center for Computational Neuroscience (SCCN) at the University of California San Diego (UCSD) and Ben-Gurion University (BGU) in Israel.
133
+
134
+ ![Screenshot 2024-10-03 at 09 14 06](https://github.com/user-attachments/assets/327639d3-c3b4-46b1-9335-37803209b0d3)
135
+
136
+
137
+
@@ -0,0 +1,45 @@
1
+ # EEG-Dash
2
+
3
+ [![PyPI version](https://img.shields.io/pypi/v/eegdash)](https://pypi.org/project/eegdash/)
4
+ [![Docs](https://img.shields.io/badge/docs-stable-brightgreen.svg)](https://sccn.github.io/eegdash)
5
+
6
+ [![License: BSD-3-Clause](https://img.shields.io/badge/License-BSD--3--Clause-blue.svg)](LICENSE)
7
+ [![Python versions](https://img.shields.io/pypi/pyversions/eegdash.svg)](https://pypi.org/project/eegdash/)
8
+ [![Downloads](https://pepy.tech/badge/eegdash)](https://pepy.tech/project/eegdash)
9
+ [![Coverage](https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fraw.githubusercontent.com%2Feegdash%2FEEGDash%2Fmain%2Fcoverage.json&query=%24.totals.percent_covered_display&suffix=%25&label=coverage)](https://github.com/eegdash/EEGDash/blob/main/coverage.json)
10
+
11
+ To leverage recent and ongoing advancements in large-scale computational methods and to ensure the preservation of scientific data generated from publicly funded research, the EEG-DaSh data archive will create a data-sharing resource for MEEG (EEG, MEG) data contributed by collaborators for machine learning (ML) and deep learning (DL) applications.
12
+
13
+ ## Data source
14
+
15
+ The data in EEG-DaSh originates from a collaboration involving 25 laboratories, encompassing 27,053 participants. This extensive collection includes MEEG data, which is a combination of EEG and MEG signals. The data is sourced from various studies conducted by these labs, involving both healthy subjects and clinical populations with conditions such as ADHD, depression, schizophrenia, dementia, autism, and psychosis. Additionally, data spans different mental states like sleep, meditation, and cognitive tasks. In addition, EEG-DaSh will incorporate a subset of the data converted from NEMAR, which includes 330 MEEG BIDS-formatted datasets, further expanding the archive with well-curated, standardized neuroelectromagnetic data.
16
+
17
+ ## Data format
18
+
19
+ EEGDash queries return a **Pytorch Dataset** formatted to facilitate machine learning (ML) and deep learning (DL) applications. PyTorch Datasets are the best format for EEGDash queries because they provide an efficient, scalable, and flexible structure for machine learning (ML) and deep learning (DL) applications. They allow seamless integration with PyTorch’s DataLoader, enabling efficient batching, shuffling, and parallel data loading, which is essential for training deep learning models on large EEG datasets.
20
+
21
+ ## Data preprocessing
22
+
23
+ EEGDash datasets are processed using the popular [braindecode](https://braindecode.org/stable/index.html) library. In fact, EEGDash datasets are braindecode datasets, which are themselves PyTorch datasets. This means that any preprocessing possible on braindecode datasets is also possible on EEGDash datasets. Refer to [braindecode](https://braindecode.org/stable/index.html) tutorials for guidance on preprocessing EEG data.
24
+
25
+ ## EEG-Dash usage
26
+
27
+ ### Install
28
+ Use your preferred Python environment manager with Python > 3.10 to install the package.
29
+ * To install the eegdash package, use the following command: `pip install eegdash`
30
+ * To verify the installation, start a Python session and type: `from eegdash import EEGDash`
31
+
32
+ Please check our tutorial webpages to explore what you can do with [eegdash](https://eegdash.org/)!
33
+
34
+ ## Education -- Coming soon...
35
+
36
+ We organize workshops and educational events to foster cross-cultural education and student training, offering both online and in-person opportunities in collaboration with US and Israeli partners. Events for 2025 will be announced via the EEGLABNEWS mailing list. Be sure to [subscribe](https://sccn.ucsd.edu/mailman/listinfo/eeglabnews).
37
+
38
+ ## About EEG-DaSh
39
+
40
+ EEG-DaSh is a collaborative initiative between the United States and Israel, supported by the National Science Foundation (NSF). The partnership brings together experts from the Swartz Center for Computational Neuroscience (SCCN) at the University of California San Diego (UCSD) and Ben-Gurion University (BGU) in Israel.
41
+
42
+ ![Screenshot 2024-10-03 at 09 14 06](https://github.com/user-attachments/assets/327639d3-c3b4-46b1-9335-37803209b0d3)
43
+
44
+
45
+
@@ -0,0 +1,58 @@
1
+ # Minimal makefile for Sphinx documentation
2
+ SPHINXOPTS ?=
3
+ SPHINXBUILD ?= sphinx-build
4
+ SOURCEDIR = source
5
+ BUILDDIR = build
6
+ PKG ?= eegdash
7
+ APIDIR := $(SOURCEDIR)/api
8
+
9
+ help:
10
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
11
+
12
+ .PHONY: apidoc
13
+ apidoc:
14
+ # Generate full API docs, then prune duplicates covered by autosummary
15
+ @rm -f "$(APIDIR)"/dataset/eegdash.features*
16
+ @SPHINX_APIDOC_OPTIONS=members,undoc-members,show-inheritance,noindex \
17
+ python -m sphinx.ext.apidoc -f -e -T -o "$(APIDIR)/dataset" "../$(PKG)" "../$(PKG)/features"
18
+
19
+
20
+ # Standard build runs examples
21
+ html: apidoc
22
+
23
+ # Fast build: do NOT execute examples (sphinx-gallery)
24
+ .PHONY: html-noplot
25
+ html-noplot: apidoc
26
+ @python prepare_summary_tables.py --target $(BUILDDIR)
27
+ @$(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" \
28
+ $(SPHINXOPTS) -D sphinx_gallery_conf.plot_gallery=0 $(O)
29
+
30
+ # Very fast build: limit datasets to 5
31
+ .PHONY: html-fast
32
+ html-fast: apidoc
33
+ @EEGDASH_DOC_LIMIT=5 python prepare_summary_tables.py --target $(BUILDDIR)
34
+ @EEGDASH_DOC_LIMIT=5 $(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" \
35
+ $(SPHINXOPTS) -D sphinx_gallery_conf.plot_gallery=0 $(O)
36
+
37
+ # Custom clean target to remove generated API docs and build files
38
+ .PHONY: clean
39
+ clean:
40
+ @echo "Removing generated API documentation..."
41
+ @rm -rf "$(APIDIR)/dataset"
42
+ @rm -rf "$(APIDIR)/generated"
43
+ @echo "Removing generated dataset pages..."
44
+ @rm -rf "$(APIDIR)/datasets"
45
+ @rm -f "$(APIDIR)/api_dataset.rst"
46
+ @echo "Removing other generated directories..."
47
+ @rm -rf "$(SOURCEDIR)/generated"
48
+ @rm -rf "$(SOURCEDIR)/gen_modules"
49
+ @echo "Removing build directory..."
50
+ @rm -rf "$(BUILDDIR)"
51
+ @echo "Clean completed."
52
+
53
+ .PHONY: help apidoc
54
+ Makefile: ;
55
+
56
+ %: Makefile
57
+ @python prepare_summary_tables.py --target $(BUILDDIR)
58
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
@@ -0,0 +1,82 @@
1
+ {{ fullname | escape | underline}}
2
+
3
+ .. automodule:: {{ fullname }}
4
+ :members:
5
+ :undoc-members:
6
+ :show-inheritance:
7
+ :member-order: bysource
8
+ :no-index:
9
+
10
+ {% block attributes %}
11
+ {%- if attributes %}
12
+ .. rubric:: {{ _('Module Attributes') }}
13
+
14
+ .. autosummary::
15
+ {% for item in attributes %}
16
+ {{ item }}
17
+ {%- endfor %}
18
+ {% endif %}
19
+ {%- endblock %}
20
+
21
+ {%- block functions %}
22
+ {%- if functions %}
23
+ .. rubric:: {{ _('Functions') }}
24
+
25
+ .. autosummary::
26
+ {% for item in functions %}
27
+ {{ item }}
28
+ {%- endfor %}
29
+ {% endif %}
30
+ {%- endblock %}
31
+
32
+ {%- block classes %}
33
+ {%- if classes %}
34
+ .. rubric:: {{ _('Classes') }}
35
+
36
+ .. autosummary::
37
+ {% for item in classes %}
38
+ {{ item }}
39
+ {%- endfor %}
40
+ {% endif %}
41
+ {%- endblock %}
42
+
43
+ {%- block exceptions %}
44
+ {%- if exceptions %}
45
+ .. rubric:: {{ _('Exceptions') }}
46
+
47
+ .. autosummary::
48
+ {% for item in exceptions %}
49
+ {{ item }}
50
+ {%- endfor %}
51
+ {% endif %}
52
+ {%- endblock %}
53
+
54
+ {%- block modules %}
55
+ {%- if modules %}
56
+ .. rubric:: Modules
57
+
58
+ .. autosummary::
59
+ :toctree:
60
+ :recursive:
61
+ {% for item in modules %}
62
+ {{ item }}
63
+ {%- endfor %}
64
+ {% endif %}
65
+ {%- endblock %}
66
+
67
+ {% if sg_api_usage %}
68
+ .. _sg_api_{{ fullname }}:
69
+
70
+ API Usage
71
+ ---------
72
+
73
+ .. raw:: html
74
+
75
+ <div class="sg-api-usage">
76
+
77
+ {{ sg_api_usage }}
78
+
79
+ .. raw:: html
80
+
81
+ </div>
82
+ {% endif %}
@@ -0,0 +1,241 @@
1
+ :html_theme.sidebar_primary.remove: true
2
+ :html_theme.sidebar_secondary.remove: true
3
+
4
+ .. _api:
5
+
6
+ #############
7
+ API Reference
8
+ #############
9
+
10
+ The EEGDash API reference curates everything you need to integrate, extend,
11
+ and automate EEGDash—from core dataset helpers to feature extraction and rich
12
+ dataset metadata. The focus is interoperability, extensibility, and ease of use.
13
+
14
+ .. raw:: html
15
+
16
+ <h2 class="hf-section-title">What's inside EEGDash</h2>
17
+ <p class="hf-section-subtitle">Everything you need to discover, prepare, and benchmark EEG and MEG data.</p>
18
+
19
+ .. grid:: 1 1 2 2
20
+ :gutter: 4
21
+ :class-container: hf-feature-grid
22
+
23
+ .. grid-item-card:: Dataset discovery
24
+ :link: ../dataset_summary
25
+ :link-type: doc
26
+ :text-align: left
27
+ :class-card: feature-card hf-reveal hf-delay-1
28
+
29
+ :octicon:`search;1.5em;sd-text-primary`
30
+
31
+ Search metadata, modalities, tasks, and cohorts with unified filters.
32
+
33
+ .. grid-item-card:: Reproducible preprocessing
34
+ :link: ../user_guide
35
+ :link-type: doc
36
+ :text-align: left
37
+ :class-card: feature-card hf-reveal hf-delay-2
38
+
39
+ :octicon:`plug;1.5em;sd-text-primary`
40
+
41
+ One-command pipelines with EEGPrep, MNE, and BIDS alignment.
42
+
43
+ .. grid-item-card:: Benchmarks and features
44
+ :link: ../generated/auto_examples/index
45
+ :link-type: doc
46
+ :text-align: left
47
+ :class-card: feature-card hf-reveal hf-delay-3
48
+
49
+ :octicon:`rocket;1.5em;sd-text-primary`
50
+
51
+ Export model-ready features and compare baselines across datasets.
52
+
53
+ .. grid-item-card:: BIDS-first interoperability
54
+ :link: ../user_guide
55
+ :link-type: doc
56
+ :text-align: left
57
+ :class-card: feature-card hf-reveal hf-delay-3
58
+
59
+ .. image:: ../_static/bids_logo_black.svg
60
+ :alt: BIDS
61
+ :class: hf-feature-logo
62
+
63
+ Keep metadata consistent and portable across teams and tools.
64
+
65
+ The API is organized into three main components:
66
+
67
+
68
+ .. grid:: 1
69
+ :gutter: 4
70
+ :class-container: sd-gap-4 sd-mb-4
71
+
72
+ .. grid-item-card::
73
+ :link: api_core
74
+ :link-type: doc
75
+ :text-align: center
76
+ :class-card: api-grid-card
77
+ :class-header: api-grid-card__header
78
+ :class-body: api-grid-card__body
79
+ :class-footer: api-grid-card__footer
80
+
81
+ .. raw:: html
82
+
83
+ <span class="fa-solid fa-microchip api-grid-card__icon" aria-hidden="true"></span>
84
+
85
+ .. rst-class:: api-grid-card__title
86
+
87
+ **Core API**
88
+ ^^^
89
+
90
+ Build, query, and manage EEGDash datasets and utilities.
91
+
92
+ +++
93
+
94
+ .. button-ref:: api_core
95
+ :color: primary
96
+ :class: api-grid-card__button
97
+ :click-parent:
98
+
99
+ → Explore Core API
100
+
101
+ .. grid-item-card::
102
+ :link: api_features
103
+ :link-type: doc
104
+ :text-align: center
105
+ :class-card: api-grid-card
106
+ :class-header: api-grid-card__header
107
+ :class-body: api-grid-card__body
108
+ :class-footer: api-grid-card__footer
109
+
110
+ .. raw:: html
111
+
112
+ <span class="fa-solid fa-wave-square api-grid-card__icon" aria-hidden="true"></span>
113
+
114
+ .. rst-class:: api-grid-card__title
115
+
116
+ **Feature engineering**
117
+ ^^^
118
+
119
+ Extract statistical, spectral, and machine-learning-ready features.
120
+
121
+ +++
122
+
123
+ .. button-ref:: api_features
124
+ :color: primary
125
+ :class: api-grid-card__button
126
+ :click-parent:
127
+
128
+ → Explore Feature Engineering
129
+
130
+ .. grid-item-card::
131
+ :link: dataset/api_dataset
132
+ :link-type: doc
133
+ :text-align: center
134
+ :class-card: api-grid-card
135
+ :class-header: api-grid-card__header
136
+ :class-body: api-grid-card__body
137
+ :class-footer: api-grid-card__footer
138
+
139
+ .. raw:: html
140
+
141
+ <span class="fa-solid fa-database api-grid-card__icon" aria-hidden="true"></span>
142
+
143
+ .. rst-class:: api-grid-card__title
144
+
145
+ **Dataset catalog**
146
+ ^^^
147
+
148
+ Browse dynamically generated dataset classes with rich metadata.
149
+
150
+ +++
151
+
152
+ .. button-ref:: dataset/api_dataset
153
+ :color: primary
154
+ :class: api-grid-card__button
155
+ :click-parent:
156
+
157
+ → Explore the Dataset API
158
+
159
+
160
+ ********************
161
+ REST API Endpoints
162
+ ********************
163
+
164
+ The EEGDash metadata server exposes a FastAPI REST interface for discovery and
165
+ querying. Base URL: `https://data.eegdash.org`_. Below is a concise map of the main
166
+ entrypoints and their purpose.
167
+
168
+ .. _https://data.eegdash.org: https://data.eegdash.org
169
+
170
+
171
+
172
+ Meta Endpoints
173
+ ==============
174
+
175
+ - ``GET /``
176
+ Returns API name, version, and available databases.
177
+ - ``GET /health``
178
+ Returns API health and MongoDB connection status.
179
+ - ``GET /metrics``
180
+ Prometheus metrics (if enabled).
181
+
182
+ Public Data Endpoints
183
+ =====================
184
+
185
+ - ``GET /api/{database}/records``
186
+ Query records (files) with filter and pagination.
187
+ - ``GET /api/{database}/count``
188
+ Count records matching a filter.
189
+ - ``GET /api/{database}/datasets/names``
190
+ List unique dataset names from records.
191
+ - ``GET /api/{database}/metadata/{dataset}``
192
+ Get metadata for a single dataset (from records).
193
+ - ``GET /api/{database}/datasets/summary``
194
+ Get summary statistics and metadata for all datasets (with pagination, filtering).
195
+ Query params: ``limit`` (1-1000), ``skip``, ``modality`` (eeg/meg/ieeg), ``source`` (openneuro/nemar/zenodo/etc.).
196
+ Response includes aggregate totals for datasets, subjects, files, and size.
197
+ - ``GET /api/{database}/datasets/summary/{dataset_id}``
198
+ Get detailed summary for a specific dataset.
199
+ ``dataset_id`` may be the dataset ID or dataset name.
200
+ - ``GET /api/{database}/datasets/{dataset_id}``
201
+ Get a specific dataset document by ID.
202
+ - ``GET /api/{database}/datasets``
203
+ List dataset documents (with filtering and pagination).
204
+ - ``GET /api/{database}/datasets/stats/records``
205
+ Get aggregated ``nchans`` and ``sampling_frequency`` counts for all datasets.
206
+ Used to generate summary tables efficiently.
207
+
208
+ Admin Endpoints (require Bearer token)
209
+ ======================================
210
+
211
+ - ``POST /admin/{database}/records``
212
+ Insert a single record (file document).
213
+ - ``POST /admin/{database}/records/bulk``
214
+ Insert multiple records (max 1000 per request).
215
+ - ``POST /admin/{database}/datasets``
216
+ Insert or update a single dataset document (upsert by ``dataset_id``).
217
+ - ``POST /admin/{database}/datasets/bulk``
218
+ Insert or update multiple dataset documents (max 500 per request).
219
+ - ``PATCH /admin/{database}/records``
220
+ Update records matching a filter (only ``$set`` allowed).
221
+ - ``GET /admin/security/blocked``
222
+ List blocked IPs and offense counts.
223
+ - ``POST /admin/security/unblock``
224
+ Unblock a specific IP.
225
+
226
+
227
+ ******************
228
+ Related Guides
229
+ ******************
230
+
231
+ - :doc:`Tutorial gallery <../generated/auto_examples/index>`
232
+ - :doc:`Dataset summary <../dataset_summary>`
233
+ - :doc:`Installation guide <../install/install>`
234
+
235
+ .. toctree::
236
+ :hidden:
237
+
238
+ api_core
239
+ api_features
240
+ dataset/api_dataset
241
+ ../developer_notes