eegdash 0.3.4.dev69__tar.gz → 0.3.4.dev70__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eegdash might be problematic. Click here for more details.

Files changed (51) hide show
  1. {eegdash-0.3.4.dev69/eegdash.egg-info → eegdash-0.3.4.dev70}/PKG-INFO +2 -1
  2. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/docs/source/conf.py +1 -1
  3. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/__init__.py +1 -1
  4. eegdash-0.3.4.dev70/eegdash/registry.py +136 -0
  5. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70/eegdash.egg-info}/PKG-INFO +2 -1
  6. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash.egg-info/requires.txt +1 -0
  7. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/pyproject.toml +1 -0
  8. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/tests/test_dataset_registration.py +1 -0
  9. eegdash-0.3.4.dev69/eegdash/registry.py +0 -72
  10. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/LICENSE +0 -0
  11. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/MANIFEST.in +0 -0
  12. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/README.md +0 -0
  13. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/docs/Makefile +0 -0
  14. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/docs/source/dataset_summary.rst +0 -0
  15. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/docs/source/index.rst +0 -0
  16. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/docs/source/install/install.rst +0 -0
  17. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/docs/source/install/install_pip.rst +0 -0
  18. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/docs/source/install/install_source.rst +0 -0
  19. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/api.py +0 -0
  20. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/data_config.py +0 -0
  21. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/data_utils.py +0 -0
  22. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/dataset.py +0 -0
  23. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/dataset_summary.csv +0 -0
  24. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/features/__init__.py +0 -0
  25. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/features/datasets.py +0 -0
  26. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/features/decorators.py +0 -0
  27. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/features/extractors.py +0 -0
  28. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/features/feature_bank/__init__.py +0 -0
  29. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/features/feature_bank/complexity.py +0 -0
  30. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/features/feature_bank/connectivity.py +0 -0
  31. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/features/feature_bank/csp.py +0 -0
  32. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/features/feature_bank/dimensionality.py +0 -0
  33. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/features/feature_bank/signal.py +0 -0
  34. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/features/feature_bank/spectral.py +0 -0
  35. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/features/feature_bank/utils.py +0 -0
  36. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/features/inspect.py +0 -0
  37. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/features/serialization.py +0 -0
  38. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/features/utils.py +0 -0
  39. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/mongodb.py +0 -0
  40. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/preprocessing.py +0 -0
  41. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash/utils.py +0 -0
  42. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash.egg-info/SOURCES.txt +0 -0
  43. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash.egg-info/dependency_links.txt +0 -0
  44. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/eegdash.egg-info/top_level.txt +0 -0
  45. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/setup.cfg +0 -0
  46. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/tests/test_correctness.py +0 -0
  47. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/tests/test_dataset.py +0 -0
  48. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/tests/test_eegdash.py +0 -0
  49. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/tests/test_functional.py +0 -0
  50. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/tests/test_init.py +0 -0
  51. {eegdash-0.3.4.dev69 → eegdash-0.3.4.dev70}/tests/test_mongo_connection.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: eegdash
3
- Version: 0.3.4.dev69
3
+ Version: 0.3.4.dev70
4
4
  Summary: EEG data for machine learning
5
5
  Author-email: Young Truong <dt.young112@gmail.com>, Arnaud Delorme <adelorme@gmail.com>, Aviv Dotan <avivd220@gmail.com>, Oren Shriki <oren70@gmail.com>, Bruno Aristimunha <b.aristimunha@gmail.com>
6
6
  License-Expression: GPL-3.0-only
@@ -38,6 +38,7 @@ Requires-Dist: tqdm
38
38
  Requires-Dist: xarray
39
39
  Requires-Dist: h5io>=0.2.4
40
40
  Requires-Dist: pymatreader
41
+ Requires-Dist: tabulate
41
42
  Provides-Extra: tests
42
43
  Requires-Dist: pytest; extra == "tests"
43
44
  Requires-Dist: pytest-cov; extra == "tests"
@@ -96,7 +96,7 @@ html_sidebars = {"api": [], "dataset_summary": [], "installation": []}
96
96
 
97
97
 
98
98
  # -- Extension configurations ------------------------------------------------
99
-
99
+ autoclass_content = "both"
100
100
 
101
101
  # Numpydoc
102
102
  numpydoc_show_class_members = False
@@ -7,4 +7,4 @@ __init__mongo_client()
7
7
 
8
8
  __all__ = ["EEGDash", "EEGDashDataset", "EEGChallengeDataset"]
9
9
 
10
- __version__ = "0.3.4.dev69"
10
+ __version__ = "0.3.4.dev70"
@@ -0,0 +1,136 @@
1
+ from __future__ import annotations
2
+
3
+ from pathlib import Path
4
+ from typing import Any, Dict
5
+
6
+ import pandas as pd
7
+ from tabulate import tabulate
8
+
9
+
10
+ def register_openneuro_datasets(
11
+ summary_file: str | Path,
12
+ *,
13
+ base_class=None,
14
+ namespace: Dict[str, Any] | None = None,
15
+ add_to_all: bool = True,
16
+ ) -> Dict[str, type]:
17
+ """Dynamically create dataset classes from a summary file."""
18
+ if base_class is None:
19
+ from .api import EEGDashDataset as base_class # lazy import
20
+
21
+ summary_path = Path(summary_file)
22
+ namespace = namespace if namespace is not None else globals()
23
+ module_name = namespace.get("__name__", __name__)
24
+ registered: Dict[str, type] = {}
25
+
26
+ df = pd.read_csv(summary_path, comment="#", skip_blank_lines=True)
27
+ for _, row_series in df.iterrows():
28
+ row = row_series.tolist()
29
+ dataset_id = str(row[0]).strip()
30
+ if not dataset_id:
31
+ continue
32
+
33
+ class_name = dataset_id.upper()
34
+
35
+ # avoid zero-arg super() here
36
+ def make_init(_dataset: str):
37
+ def __init__(
38
+ self,
39
+ cache_dir: str,
40
+ query: dict | None = None,
41
+ s3_bucket: str | None = None,
42
+ **kwargs,
43
+ ):
44
+ q = {"dataset": _dataset}
45
+ if query:
46
+ q.update(query)
47
+ # call base_class.__init__ directly
48
+ base_class.__init__(
49
+ self,
50
+ query=q,
51
+ cache_dir=cache_dir,
52
+ s3_bucket=s3_bucket,
53
+ **kwargs,
54
+ )
55
+
56
+ return __init__
57
+
58
+ init = make_init(dataset_id)
59
+
60
+ doc = f"""Create an instance for OpenNeuro dataset ``{dataset_id}``.
61
+
62
+ {markdown_table(row_series)}
63
+
64
+ Parameters
65
+ ----------
66
+ cache_dir : str
67
+ Local cache directory.
68
+ query : dict | None
69
+ Extra Mongo query merged with ``{{'dataset': '{dataset_id}'}}``.
70
+ s3_bucket : str | None
71
+ Optional S3 bucket name.
72
+ **kwargs
73
+ Passed through to {base_class.__name__}.
74
+ """
75
+
76
+ init.__doc__ = doc
77
+
78
+ cls = type(
79
+ class_name,
80
+ (base_class,),
81
+ {
82
+ "_dataset": dataset_id,
83
+ "__init__": init,
84
+ "__doc__": doc,
85
+ "__module__": module_name, #
86
+ },
87
+ )
88
+
89
+ namespace[class_name] = cls
90
+ registered[class_name] = cls
91
+
92
+ if add_to_all:
93
+ ns_all = namespace.setdefault("__all__", [])
94
+ if isinstance(ns_all, list) and class_name not in ns_all:
95
+ ns_all.append(class_name)
96
+
97
+ return registered
98
+
99
+
100
+ def markdown_table(row_series: pd.Series) -> str:
101
+ """Create a reStructuredText grid table from a pandas Series."""
102
+ if row_series.empty:
103
+ return ""
104
+
105
+ # Prepare the dataframe with user's suggested logic
106
+ df = (
107
+ row_series.to_frame()
108
+ .T.rename(
109
+ columns={
110
+ "n_subjects": "#Subj",
111
+ "nchans_set": "#Chan",
112
+ "n_tasks": "#Classes",
113
+ "sampling_freqs": "Freq(Hz)",
114
+ "duration_hours_total": "Duration(H)",
115
+ }
116
+ )
117
+ .reindex(
118
+ columns=[
119
+ "dataset",
120
+ "#Subj",
121
+ "#Chan",
122
+ "#Classes",
123
+ "Freq(Hz)",
124
+ "Duration(H)",
125
+ ]
126
+ )
127
+ .infer_objects(copy=False)
128
+ .fillna("")
129
+ )
130
+
131
+ # Use tabulate for the final rst formatting
132
+ table = tabulate(df, headers="keys", tablefmt="rst", showindex=False)
133
+
134
+ # Indent the table to fit within the admonition block
135
+ indented_table = "\n".join(" " + line for line in table.split("\n"))
136
+ return f"\n\n{indented_table}"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: eegdash
3
- Version: 0.3.4.dev69
3
+ Version: 0.3.4.dev70
4
4
  Summary: EEG data for machine learning
5
5
  Author-email: Young Truong <dt.young112@gmail.com>, Arnaud Delorme <adelorme@gmail.com>, Aviv Dotan <avivd220@gmail.com>, Oren Shriki <oren70@gmail.com>, Bruno Aristimunha <b.aristimunha@gmail.com>
6
6
  License-Expression: GPL-3.0-only
@@ -38,6 +38,7 @@ Requires-Dist: tqdm
38
38
  Requires-Dist: xarray
39
39
  Requires-Dist: h5io>=0.2.4
40
40
  Requires-Dist: pymatreader
41
+ Requires-Dist: tabulate
41
42
  Provides-Extra: tests
42
43
  Requires-Dist: pytest; extra == "tests"
43
44
  Requires-Dist: pytest-cov; extra == "tests"
@@ -12,6 +12,7 @@ tqdm
12
12
  xarray
13
13
  h5io>=0.2.4
14
14
  pymatreader
15
+ tabulate
15
16
 
16
17
  [all]
17
18
  pre-commit
@@ -49,6 +49,7 @@ dependencies = [
49
49
  "xarray",
50
50
  "h5io >= 0.2.4",
51
51
  "pymatreader",
52
+ "tabulate",
52
53
  ]
53
54
 
54
55
  [project.urls]
@@ -16,6 +16,7 @@ def test_register_openneuro_datasets(tmp_path: Path):
16
16
  summary.write_text(
17
17
  "\n".join(
18
18
  [
19
+ "dataset_id,num_subjects,num_sessions,num_runs,num_channels,sampling_rate,duration",
19
20
  "ds002718,18,18,1,74,250,14.844",
20
21
  "ds000001,1,1,1,1,1,1",
21
22
  ]
@@ -1,72 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import csv
4
- from pathlib import Path
5
- from typing import Any, Dict
6
-
7
-
8
- def register_openneuro_datasets(
9
- summary_file: str | Path,
10
- *,
11
- base_class=None,
12
- namespace: Dict[str, Any] | None = None,
13
- ) -> Dict[str, type]:
14
- """Dynamically create dataset classes from a summary file.
15
-
16
- Parameters
17
- ----------
18
- summary_file : str | Path
19
- Path to a CSV file where each line starts with the dataset identifier.
20
- base_class : type | None
21
- Base class for the generated datasets. If ``None``, defaults to
22
- :class:`eegdash.api.EEGDashDataset`.
23
- namespace : dict | None
24
- Mapping where the new classes will be registered. Defaults to the
25
- module's global namespace.
26
-
27
- Returns
28
- -------
29
- dict
30
- Mapping from class names to the generated classes.
31
-
32
- """
33
- if base_class is None:
34
- from .api import EEGDashDataset as base_class # lazy import
35
-
36
- summary_path = Path(summary_file)
37
- namespace = namespace if namespace is not None else globals()
38
- registered: Dict[str, type] = {}
39
-
40
- with summary_path.open() as f:
41
- reader = csv.reader(f)
42
- for row in reader:
43
- if not row:
44
- continue
45
- dataset_id = row[0].strip()
46
- if not dataset_id or dataset_id.startswith("#"):
47
- continue
48
- class_name = dataset_id.upper()
49
-
50
- def __init__(
51
- self,
52
- cache_dir: str,
53
- query: dict | None = None,
54
- s3_bucket: str | None = None,
55
- **kwargs,
56
- ):
57
- q = {"dataset": self._dataset}
58
- if query:
59
- q.update(query)
60
- super().__init__(
61
- query=q, cache_dir=cache_dir, s3_bucket=s3_bucket, **kwargs
62
- )
63
-
64
- cls = type(
65
- class_name,
66
- (base_class,),
67
- {"_dataset": dataset_id, "__init__": __init__},
68
- )
69
- namespace[class_name] = cls
70
- registered[class_name] = cls
71
-
72
- return registered
File without changes
File without changes
File without changes
File without changes