eegdash 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eegdash might be problematic. Click here for more details.

eegdash/__init__.py CHANGED
@@ -5,4 +5,4 @@ from .utils import __init__mongo_client
5
5
  __init__mongo_client()
6
6
 
7
7
  __all__ = ["EEGDash", "EEGDashDataset", "EEGChallengeDataset"]
8
- __version__ = "0.2.0"
8
+ __version__ = "0.3.0"
eegdash/api.py CHANGED
@@ -9,13 +9,14 @@ import numpy as np
9
9
  import xarray as xr
10
10
  from dotenv import load_dotenv
11
11
  from joblib import Parallel, delayed
12
- from pymongo import InsertOne, MongoClient, UpdateOne
12
+ from pymongo import InsertOne, UpdateOne
13
13
  from s3fs import S3FileSystem
14
14
 
15
15
  from braindecode.datasets import BaseConcatDataset
16
16
 
17
17
  from .data_config import config as data_config
18
18
  from .data_utils import EEGBIDSDataset, EEGDashBaseDataset
19
+ from .mongodb import MongoConnectionManager
19
20
 
20
21
  logger = logging.getLogger("eegdash")
21
22
 
@@ -33,8 +34,6 @@ class EEGDash:
33
34
 
34
35
  """
35
36
 
36
- AWS_BUCKET = "s3://openneuro.org"
37
-
38
37
  def __init__(self, *, is_public: bool = True, is_staging: bool = False) -> None:
39
38
  """Create new instance of the EEGDash Database client.
40
39
 
@@ -55,6 +54,7 @@ class EEGDash:
55
54
  """
56
55
  self.config = data_config
57
56
  self.is_public = is_public
57
+ self.is_staging = is_staging
58
58
 
59
59
  if self.is_public:
60
60
  DB_CONNECTION_STRING = mne.utils.get_config("EEGDASH_DB_URI")
@@ -62,31 +62,15 @@ class EEGDash:
62
62
  load_dotenv()
63
63
  DB_CONNECTION_STRING = os.getenv("DB_CONNECTION_STRING")
64
64
 
65
- self.__client = MongoClient(DB_CONNECTION_STRING)
66
- self.__db = (
67
- self.__client["eegdash"]
68
- if not is_staging
69
- else self.__client["eegdashstaging"]
65
+ # Use singleton to get MongoDB client, database, and collection
66
+ self.__client, self.__db, self.__collection = MongoConnectionManager.get_client(
67
+ DB_CONNECTION_STRING, is_staging
70
68
  )
71
- self.__collection = self.__db["records"]
72
69
 
73
70
  self.filesystem = S3FileSystem(
74
71
  anon=True, client_kwargs={"region_name": "us-east-2"}
75
72
  )
76
73
 
77
- # MongoDB Operations
78
- # These methods provide a high-level interface to interact with the MongoDB
79
- # collection, allowing users to find, add, and update EEG data records.
80
- # - find:
81
- # - exist:
82
- # - add_request:
83
- # - add:
84
- # - update_request:
85
- # - remove_field:
86
- # - remove_field_from_db:
87
- # - close: Close the MongoDB connection.
88
- # - __del__: Destructor to close the MongoDB connection.
89
-
90
74
  def find(self, query: dict[str, Any], *args, **kwargs) -> list[Mapping[str, Any]]:
91
75
  """Find records in the MongoDB collection that satisfy the given query.
92
76
 
@@ -117,26 +101,48 @@ class EEGDash:
117
101
  return [result for result in results]
118
102
 
119
103
  def exist(self, query: dict[str, Any]) -> bool:
120
- """Check if the given query matches any records in the MongoDB collection.
104
+ """Return True if at least one record matches the query, else False.
121
105
 
122
- Note that currently only a limited set of query fields is allowed here.
106
+ This is a lightweight existence check that uses MongoDB's ``find_one``
107
+ instead of fetching all matching documents (which would be wasteful in
108
+ both time and memory for broad queries). Only a restricted set of
109
+ fields is accepted to avoid accidental full scans caused by malformed
110
+ or unsupported keys.
123
111
 
124
112
  Parameters
125
113
  ----------
126
- query: dict
127
- A dictionary that specifies the query to be executed; this is a reference
128
- document that is used to match records in the MongoDB collection.
114
+ query : dict
115
+ Mapping of allowed field(s) to value(s). Allowed keys: ``data_name``
116
+ and ``dataset``. The query must not be empty.
129
117
 
130
118
  Returns
131
119
  -------
132
- bool:
133
- True if at least one record matches the query, False otherwise.
120
+ bool
121
+ True if at least one matching record exists; False otherwise.
122
+
123
+ Raises
124
+ ------
125
+ TypeError
126
+ If ``query`` is not a dict.
127
+ ValueError
128
+ If ``query`` is empty or contains unsupported field names.
134
129
 
135
130
  """
136
- accepted_query_fields = ["data_name", "dataset"]
137
- assert all(field in accepted_query_fields for field in query.keys())
138
- sessions = self.find(query)
139
- return len(sessions) > 0
131
+ if not isinstance(query, dict):
132
+ raise TypeError("query must be a dict")
133
+ if not query:
134
+ raise ValueError("query cannot be empty")
135
+
136
+ accepted_query_fields = {"data_name", "dataset"}
137
+ unknown = set(query.keys()) - accepted_query_fields
138
+ if unknown:
139
+ raise ValueError(
140
+ f"Unsupported query field(s): {', '.join(sorted(unknown))}. "
141
+ f"Allowed: {sorted(accepted_query_fields)}"
142
+ )
143
+
144
+ doc = self.__collection.find_one(query, projection={"_id": 1})
145
+ return doc is not None
140
146
 
141
147
  def _validate_input(self, record: dict[str, Any]) -> dict[str, Any]:
142
148
  """Internal method to validate the input record against the expected schema.
@@ -491,13 +497,24 @@ class EEGDash:
491
497
  return self.__collection
492
498
 
493
499
  def close(self):
494
- """Close the MongoDB client connection."""
495
- if hasattr(self, "_EEGDash__client"):
496
- self.__client.close()
500
+ """Close the MongoDB client connection.
501
+
502
+ Note: Since MongoDB clients are now managed by a singleton,
503
+ this method no longer closes connections. Use close_all_connections()
504
+ class method to close all connections if needed.
505
+ """
506
+ # Individual instances no longer close the shared client
507
+ pass
508
+
509
+ @classmethod
510
+ def close_all_connections(cls):
511
+ """Close all MongoDB client connections managed by the singleton."""
512
+ MongoConnectionManager.close_all()
497
513
 
498
514
  def __del__(self):
499
515
  """Ensure connection is closed when object is deleted."""
500
- self.close()
516
+ # No longer needed since we're using singleton pattern
517
+ pass
501
518
 
502
519
 
503
520
  class EEGDashDataset(BaseConcatDataset):
@@ -651,28 +668,6 @@ class EEGDashDataset(BaseConcatDataset):
651
668
  and included in the returned dataset description(s).
652
669
 
653
670
  """
654
-
655
- def get_base_dataset_from_bids_file(
656
- bids_dataset: EEGBIDSDataset,
657
- bids_file: str,
658
- eeg_dash_instance: EEGDash,
659
- s3_bucket: str | None,
660
- ) -> EEGDashBaseDataset:
661
- """Instantiate a single EEGDashBaseDataset given a local BIDS file. Note
662
- this does not actually load the data from disk, but will access the metadata.
663
- """
664
- record = eeg_dash_instance.load_eeg_attrs_from_bids_file(
665
- bids_dataset, bids_file
666
- )
667
- description = {}
668
- for field in description_fields:
669
- value = self.find_key_in_nested_dict(record, field)
670
- if value is not None:
671
- description[field] = value
672
- return EEGDashBaseDataset(
673
- record, self.cache_dir, s3_bucket, description=description, **kwargs
674
- )
675
-
676
671
  bids_dataset = EEGBIDSDataset(
677
672
  data_dir=data_dir,
678
673
  dataset=dataset,
@@ -680,11 +675,41 @@ class EEGDashDataset(BaseConcatDataset):
680
675
  eeg_dash_instance = EEGDash()
681
676
  try:
682
677
  datasets = Parallel(n_jobs=-1, prefer="threads", verbose=1)(
683
- delayed(get_base_dataset_from_bids_file)(
684
- bids_dataset, bids_file, eeg_dash_instance, s3_bucket
678
+ delayed(self.get_base_dataset_from_bids_file)(
679
+ bids_dataset=bids_dataset,
680
+ bids_file=bids_file,
681
+ eeg_dash_instance=eeg_dash_instance,
682
+ s3_bucket=s3_bucket,
683
+ description_fields=description_fields,
685
684
  )
686
685
  for bids_file in bids_dataset.get_files()
687
686
  )
688
687
  return datasets
689
688
  finally:
690
689
  eeg_dash_instance.close()
690
+
691
+ def get_base_dataset_from_bids_file(
692
+ self,
693
+ bids_dataset: EEGBIDSDataset,
694
+ bids_file: str,
695
+ eeg_dash_instance: EEGDash,
696
+ s3_bucket: str | None,
697
+ description_fields: list[str],
698
+ ) -> EEGDashBaseDataset:
699
+ """Instantiate a single EEGDashBaseDataset given a local BIDS file. Note
700
+ this does not actually load the data from disk, but will access the metadata.
701
+ """
702
+ record = eeg_dash_instance.load_eeg_attrs_from_bids_file(
703
+ bids_dataset, bids_file
704
+ )
705
+ description = {}
706
+ for field in description_fields:
707
+ value = self.find_key_in_nested_dict(record, field)
708
+ if value is not None:
709
+ description[field] = value
710
+ return EEGDashBaseDataset(
711
+ record,
712
+ self.cache_dir,
713
+ s3_bucket,
714
+ description=description,
715
+ )
eegdash/data_utils.py CHANGED
@@ -14,9 +14,7 @@ from bids import BIDSLayout
14
14
  from joblib import Parallel, delayed
15
15
  from mne._fiff.utils import _read_segments_file
16
16
  from mne.io import BaseRaw
17
- from mne_bids import (
18
- BIDSPath,
19
- )
17
+ from mne_bids import BIDSPath
20
18
 
21
19
  from braindecode.datasets import BaseDataset
22
20
 
@@ -30,7 +28,7 @@ class EEGDashBaseDataset(BaseDataset):
30
28
  conjunction with the preprocessing and training pipelines of braindecode.
31
29
  """
32
30
 
33
- AWS_BUCKET = "s3://openneuro.org"
31
+ _AWS_BUCKET = "s3://openneuro.org"
34
32
 
35
33
  def __init__(
36
34
  self,
@@ -64,7 +62,7 @@ class EEGDashBaseDataset(BaseDataset):
64
62
  suffix="eeg",
65
63
  **bids_kwargs,
66
64
  )
67
- self.s3_bucket = s3_bucket if s3_bucket else self.AWS_BUCKET
65
+ self.s3_bucket = s3_bucket if s3_bucket else self._AWS_BUCKET
68
66
  self.s3file = self.get_s3path(record["bidspath"])
69
67
  self.filecache = self.cache_dir / record["bidspath"]
70
68
  self.bids_dependencies = record["bidsdependencies"]
@@ -176,7 +174,7 @@ class EEGDashBaseRaw(BaseRaw):
176
174
 
177
175
  """
178
176
 
179
- AWS_BUCKET = "s3://openneuro.org"
177
+ _AWS_BUCKET = "s3://openneuro.org"
180
178
 
181
179
  def __init__(
182
180
  self,
@@ -218,7 +216,7 @@ class EEGDashBaseRaw(BaseRaw):
218
216
  )
219
217
 
220
218
  def get_s3path(self, filepath):
221
- return f"{self.AWS_BUCKET}/{filepath}"
219
+ return f"{self._AWS_BUCKET}/{filepath}"
222
220
 
223
221
  def _download_s3(self):
224
222
  self.filecache.parent.mkdir(parents=True, exist_ok=True)
eegdash/dataset.py CHANGED
@@ -5,8 +5,9 @@ class EEGChallengeDataset(EEGDashDataset):
5
5
  def __init__(
6
6
  self,
7
7
  release: str = "R5",
8
+ query: dict | None = None,
8
9
  cache_dir: str = ".eegdash_cache",
9
- s3_bucket: str | None = "s3://nmdatasets/NeurIPS25/R5_L100",
10
+ s3_bucket: str | None = "s3://nmdatasets/NeurIPS25/",
10
11
  **kwargs,
11
12
  ):
12
13
  """Create a new EEGDashDataset from a given query or local BIDS dataset directory
@@ -15,27 +16,19 @@ class EEGChallengeDataset(EEGDashDataset):
15
16
 
16
17
  Parameters
17
18
  ----------
19
+ release: str
20
+ Release name. Can be one of ["R1", ..., "R11"]
18
21
  query : dict | None
19
- Optionally a dictionary that specifies the query to be executed; see
20
- EEGDash.find() for details on the query format.
21
- data_dir : str | list[str] | None
22
- Optionally a string or a list of strings specifying one or more local
23
- BIDS dataset directories from which to load the EEG data files. Exactly one
24
- of query or data_dir must be provided.
25
- dataset : str | list[str] | None
26
- If data_dir is given, a name or list of names for for the dataset(s) to be loaded.
27
- description_fields : list[str]
28
- A list of fields to be extracted from the dataset records
29
- and included in the returned data description(s). Examples are typical
30
- subject metadata fields such as "subject", "session", "run", "task", etc.;
31
- see also data_config.description_fields for the default set of fields.
22
+ Optionally a dictionary that specifies a query to be executed,
23
+ in addition to the dataset (automatically inferred from the release argument).
24
+ See EEGDash.find() for details on the query format.
32
25
  cache_dir : str
33
26
  A directory where the dataset will be cached locally.
34
27
  s3_bucket : str | None
35
28
  An optional S3 bucket URI to use instead of the
36
29
  default OpenNeuro bucket for loading data files.
37
30
  kwargs : dict
38
- Additional keyword arguments to be passed to the EEGDashBaseDataset
31
+ Additional keyword arguments to be passed to the EEGDashDataset
39
32
  constructor.
40
33
 
41
34
  """
@@ -52,9 +45,25 @@ class EEGChallengeDataset(EEGDashDataset):
52
45
  "R2": "ds005506",
53
46
  "R1": "ds005505",
54
47
  }
48
+
49
+ self.release = release
50
+ if release not in dsnumber_release_map:
51
+ raise ValueError(f"Unknown release: {release}")
52
+
53
+ dataset = dsnumber_release_map[release]
54
+ if query is None:
55
+ query = {"dataset": dataset}
56
+ elif "dataset" not in query:
57
+ query["dataset"] = dataset
58
+ elif query["dataset"] != dataset:
59
+ raise ValueError(
60
+ f"Query dataset {query['dataset']} does not match the release {release} "
61
+ f"which corresponds to dataset {dataset}."
62
+ )
63
+
55
64
  super().__init__(
56
- query={"dataset": dsnumber_release_map[release]},
65
+ query=query,
57
66
  cache_dir=cache_dir,
58
- s3_bucket=s3_bucket,
67
+ s3_bucket=f"{s3_bucket}/{release}_L100",
59
68
  **kwargs,
60
69
  )
@@ -14,7 +14,50 @@ from .extractors import (
14
14
  TrainableFeature,
15
15
  UnivariateFeature,
16
16
  )
17
- from .feature_bank import * # noqa: F401
17
+ from .feature_bank import ( # Complexity; Connectivity; CSP; Dimensionality; Signal; Spectral
18
+ CoherenceFeatureExtractor,
19
+ CommonSpatialPattern,
20
+ DBSpectralFeatureExtractor,
21
+ EntropyFeatureExtractor,
22
+ HilbertFeatureExtractor,
23
+ NormalizedSpectralFeatureExtractor,
24
+ SpectralFeatureExtractor,
25
+ complexity_approx_entropy,
26
+ complexity_lempel_ziv,
27
+ complexity_sample_entropy,
28
+ complexity_svd_entropy,
29
+ connectivity_imaginary_coherence,
30
+ connectivity_lagged_coherence,
31
+ connectivity_magnitude_square_coherence,
32
+ dimensionality_detrended_fluctuation_analysis,
33
+ dimensionality_higuchi_fractal_dim,
34
+ dimensionality_hurst_exp,
35
+ dimensionality_katz_fractal_dim,
36
+ dimensionality_petrosian_fractal_dim,
37
+ signal_decorrelation_time,
38
+ signal_hjorth_activity,
39
+ signal_hjorth_complexity,
40
+ signal_hjorth_mobility,
41
+ signal_kurtosis,
42
+ signal_line_length,
43
+ signal_mean,
44
+ signal_peak_to_peak,
45
+ signal_quantile,
46
+ signal_root_mean_square,
47
+ signal_skewness,
48
+ signal_std,
49
+ signal_variance,
50
+ signal_zero_crossings,
51
+ spectral_bands_power,
52
+ spectral_edge,
53
+ spectral_entropy,
54
+ spectral_hjorth_activity,
55
+ spectral_hjorth_complexity,
56
+ spectral_hjorth_mobility,
57
+ spectral_moment,
58
+ spectral_root_total_power,
59
+ spectral_slope,
60
+ )
18
61
  from .inspect import (
19
62
  get_all_feature_extractors,
20
63
  get_all_feature_kinds,
@@ -23,10 +66,7 @@ from .inspect import (
23
66
  get_feature_predecessors,
24
67
  )
25
68
  from .serialization import load_features_concat_dataset
26
- from .utils import (
27
- extract_features,
28
- fit_feature_extractors,
29
- )
69
+ from .utils import extract_features, fit_feature_extractors
30
70
 
31
71
  __all__ = [
32
72
  "FeaturesConcatDataset",
@@ -50,4 +90,59 @@ __all__ = [
50
90
  "load_features_concat_dataset",
51
91
  "extract_features",
52
92
  "fit_feature_extractors",
93
+ # Feature part
94
+ # Complexity
95
+ "EntropyFeatureExtractor",
96
+ "complexity_approx_entropy",
97
+ "complexity_sample_entropy",
98
+ "complexity_svd_entropy",
99
+ "complexity_lempel_ziv",
100
+ # Connectivity
101
+ "CoherenceFeatureExtractor",
102
+ "connectivity_magnitude_square_coherence",
103
+ "connectivity_imaginary_coherence",
104
+ "connectivity_lagged_coherence",
105
+ # CSP
106
+ "CommonSpatialPattern",
107
+ # Dimensionality
108
+ "dimensionality_higuchi_fractal_dim",
109
+ "dimensionality_petrosian_fractal_dim",
110
+ "dimensionality_katz_fractal_dim",
111
+ "dimensionality_hurst_exp",
112
+ "dimensionality_detrended_fluctuation_analysis",
113
+ # Signal
114
+ "HilbertFeatureExtractor",
115
+ "signal_mean",
116
+ "signal_variance",
117
+ "signal_skewness",
118
+ "signal_kurtosis",
119
+ "signal_std",
120
+ "signal_root_mean_square",
121
+ "signal_peak_to_peak",
122
+ "signal_quantile",
123
+ "signal_zero_crossings",
124
+ "signal_line_length",
125
+ "signal_hjorth_activity",
126
+ "signal_hjorth_mobility",
127
+ "signal_hjorth_complexity",
128
+ "signal_decorrelation_time",
129
+ # Spectral
130
+ "SpectralFeatureExtractor",
131
+ "NormalizedSpectralFeatureExtractor",
132
+ "DBSpectralFeatureExtractor",
133
+ "spectral_root_total_power",
134
+ "spectral_moment",
135
+ "spectral_entropy",
136
+ "spectral_edge",
137
+ "spectral_slope",
138
+ "spectral_bands_power",
139
+ "spectral_hjorth_activity",
140
+ "spectral_hjorth_mobility",
141
+ "spectral_hjorth_complexity",
53
142
  ]
143
+
144
+
145
+ # This import is not working because of the indice
146
+ # way of the numba, needs to be improve later.
147
+ # TO DO
148
+ #
@@ -1,6 +1,110 @@
1
- from .complexity import *
2
- from .connectivity import *
3
- from .csp import *
4
- from .dimensionality import *
5
- from .signal import *
6
- from .spectral import *
1
+ """Feature bank public API exports.
2
+
3
+ This module consolidates and re-exports the feature extractors and feature
4
+ functions so users can import them directly from
5
+ ``eegdash.features.feature_bank``.
6
+ """
7
+
8
+ from .complexity import (
9
+ EntropyFeatureExtractor,
10
+ complexity_approx_entropy,
11
+ complexity_lempel_ziv,
12
+ complexity_sample_entropy,
13
+ complexity_svd_entropy,
14
+ )
15
+ from .connectivity import (
16
+ CoherenceFeatureExtractor,
17
+ connectivity_imaginary_coherence,
18
+ connectivity_lagged_coherence,
19
+ connectivity_magnitude_square_coherence,
20
+ )
21
+ from .csp import CommonSpatialPattern
22
+ from .dimensionality import (
23
+ dimensionality_detrended_fluctuation_analysis,
24
+ dimensionality_higuchi_fractal_dim,
25
+ dimensionality_hurst_exp,
26
+ dimensionality_katz_fractal_dim,
27
+ dimensionality_petrosian_fractal_dim,
28
+ )
29
+ from .signal import (
30
+ HilbertFeatureExtractor,
31
+ signal_decorrelation_time,
32
+ signal_hjorth_activity,
33
+ signal_hjorth_complexity,
34
+ signal_hjorth_mobility,
35
+ signal_kurtosis,
36
+ signal_line_length,
37
+ signal_mean,
38
+ signal_peak_to_peak,
39
+ signal_quantile,
40
+ signal_root_mean_square,
41
+ signal_skewness,
42
+ signal_std,
43
+ signal_variance,
44
+ signal_zero_crossings,
45
+ )
46
+ from .spectral import (
47
+ DBSpectralFeatureExtractor,
48
+ NormalizedSpectralFeatureExtractor,
49
+ SpectralFeatureExtractor,
50
+ spectral_bands_power,
51
+ spectral_edge,
52
+ spectral_entropy,
53
+ spectral_hjorth_activity,
54
+ spectral_hjorth_complexity,
55
+ spectral_hjorth_mobility,
56
+ spectral_moment,
57
+ spectral_root_total_power,
58
+ spectral_slope,
59
+ )
60
+
61
+ __all__ = [
62
+ # Complexity
63
+ "EntropyFeatureExtractor",
64
+ "complexity_approx_entropy",
65
+ "complexity_sample_entropy",
66
+ "complexity_svd_entropy",
67
+ "complexity_lempel_ziv",
68
+ # Connectivity
69
+ "CoherenceFeatureExtractor",
70
+ "connectivity_magnitude_square_coherence",
71
+ "connectivity_imaginary_coherence",
72
+ "connectivity_lagged_coherence",
73
+ # CSP
74
+ "CommonSpatialPattern",
75
+ # Dimensionality
76
+ "dimensionality_higuchi_fractal_dim",
77
+ "dimensionality_petrosian_fractal_dim",
78
+ "dimensionality_katz_fractal_dim",
79
+ "dimensionality_hurst_exp",
80
+ "dimensionality_detrended_fluctuation_analysis",
81
+ # Signal
82
+ "HilbertFeatureExtractor",
83
+ "signal_mean",
84
+ "signal_variance",
85
+ "signal_skewness",
86
+ "signal_kurtosis",
87
+ "signal_std",
88
+ "signal_root_mean_square",
89
+ "signal_peak_to_peak",
90
+ "signal_quantile",
91
+ "signal_zero_crossings",
92
+ "signal_line_length",
93
+ "signal_hjorth_activity",
94
+ "signal_hjorth_mobility",
95
+ "signal_hjorth_complexity",
96
+ "signal_decorrelation_time",
97
+ # Spectral
98
+ "SpectralFeatureExtractor",
99
+ "NormalizedSpectralFeatureExtractor",
100
+ "DBSpectralFeatureExtractor",
101
+ "spectral_root_total_power",
102
+ "spectral_moment",
103
+ "spectral_entropy",
104
+ "spectral_edge",
105
+ "spectral_slope",
106
+ "spectral_bands_power",
107
+ "spectral_hjorth_activity",
108
+ "spectral_hjorth_mobility",
109
+ "spectral_hjorth_complexity",
110
+ ]
@@ -11,10 +11,7 @@ from mne.io import read_info
11
11
 
12
12
  from braindecode.datautil.serialization import _load_kwargs_json
13
13
 
14
- from .datasets import (
15
- FeaturesConcatDataset,
16
- FeaturesDataset,
17
- )
14
+ from .datasets import FeaturesConcatDataset, FeaturesDataset
18
15
 
19
16
 
20
17
  def load_features_concat_dataset(path, ids_to_load=None, n_jobs=1):
eegdash/mongodb.py ADDED
@@ -0,0 +1,66 @@
1
+ import threading
2
+
3
+ from pymongo import MongoClient
4
+
5
+ # MongoDB Operations
6
+ # These methods provide a high-level interface to interact with the MongoDB
7
+ # collection, allowing users to find, add, and update EEG data records.
8
+ # - find:
9
+ # - exist:
10
+ # - add_request:
11
+ # - add:
12
+ # - update_request:
13
+ # - remove_field:
14
+ # - remove_field_from_db:
15
+ # - close: Close the MongoDB connection.
16
+ # - __del__: Destructor to close the MongoDB connection.
17
+
18
+
19
+ class MongoConnectionManager:
20
+ """Singleton class to manage MongoDB client connections."""
21
+
22
+ _instances = {}
23
+ _lock = threading.Lock()
24
+
25
+ @classmethod
26
+ def get_client(cls, connection_string: str, is_staging: bool = False):
27
+ """Get or create a MongoDB client for the given connection string and staging flag.
28
+
29
+ Parameters
30
+ ----------
31
+ connection_string : str
32
+ The MongoDB connection string
33
+ is_staging : bool
34
+ Whether to use staging database
35
+
36
+ Returns
37
+ -------
38
+ tuple
39
+ A tuple of (client, database, collection)
40
+
41
+ """
42
+ # Create a unique key based on connection string and staging flag
43
+ key = (connection_string, is_staging)
44
+
45
+ if key not in cls._instances:
46
+ with cls._lock:
47
+ # Double-check pattern to avoid race conditions
48
+ if key not in cls._instances:
49
+ client = MongoClient(connection_string)
50
+ db_name = "eegdashstaging" if is_staging else "eegdash"
51
+ db = client[db_name]
52
+ collection = db["records"]
53
+ cls._instances[key] = (client, db, collection)
54
+
55
+ return cls._instances[key]
56
+
57
+ @classmethod
58
+ def close_all(cls):
59
+ """Close all MongoDB client connections."""
60
+ with cls._lock:
61
+ for client, _, _ in cls._instances.values():
62
+ try:
63
+ client.close()
64
+ except Exception:
65
+ pass
66
+ cls._instances.clear()
eegdash/preprocessing.py CHANGED
@@ -3,9 +3,7 @@ import logging
3
3
  import mne
4
4
  import numpy as np
5
5
 
6
- from braindecode.preprocessing import (
7
- Preprocessor,
8
- )
6
+ from braindecode.preprocessing import Preprocessor
9
7
 
10
8
  logger = logging.getLogger("eegdash")
11
9
 
eegdash/utils.py CHANGED
@@ -1,11 +1,11 @@
1
- def __init__mongo_client():
2
- from mne.utils import get_config, set_config
1
+ from mne.utils import get_config, set_config, use_log_level
3
2
 
4
- if get_config("EEGDASH_DB_URI") is None:
5
- # Set the default MongoDB URI for EEGDash
6
- # This is a placeholder and should be replaced with your actual MongoDB URI
7
3
 
8
- set_config(
9
- "EEGDASH_DB_URI",
10
- "mongodb+srv://eegdash-user:mdzoMjQcHWTVnKDq@cluster0.vz35p.mongodb.net/?retryWrites=true&w=majority&appName=Cluster0",
11
- )
4
+ def __init__mongo_client():
5
+ with use_log_level("ERROR"):
6
+ if get_config("EEGDASH_DB_URI") is None:
7
+ set_config(
8
+ "EEGDASH_DB_URI",
9
+ "mongodb+srv://eegdash-user:mdzoMjQcHWTVnKDq@cluster0.vz35p.mongodb.net/?retryWrites=true&w=majority&appName=Cluster0",
10
+ set_env=True,
11
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: eegdash
3
- Version: 0.2.0
3
+ Version: 0.3.0
4
4
  Summary: EEG data for machine learning
5
5
  Author-email: Young Truong <dt.young112@gmail.com>, Arnaud Delorme <adelorme@gmail.com>, Bruno Aristimunha <b.aristimunha@gmail.com>
6
6
  License: GNU General Public License
@@ -43,7 +43,7 @@ Classifier: Programming Language :: Python :: 3
43
43
  Classifier: Programming Language :: Python :: 3.10
44
44
  Classifier: Programming Language :: Python :: 3.11
45
45
  Classifier: Programming Language :: Python :: 3.12
46
- Requires-Python: >3.10
46
+ Requires-Python: >=3.10
47
47
  Description-Content-Type: text/markdown
48
48
  License-File: LICENSE
49
49
  Requires-Dist: braindecode>=1.0
@@ -63,23 +63,35 @@ Requires-Dist: pytest; extra == "tests"
63
63
  Requires-Dist: pytest-cov; extra == "tests"
64
64
  Requires-Dist: codecov; extra == "tests"
65
65
  Requires-Dist: pytest_cases; extra == "tests"
66
+ Requires-Dist: pytest-benchmark; extra == "tests"
66
67
  Provides-Extra: dev
67
68
  Requires-Dist: pre-commit; extra == "dev"
68
69
  Provides-Extra: docs
69
70
  Requires-Dist: sphinx; extra == "docs"
71
+ Requires-Dist: sphinx_design; extra == "docs"
70
72
  Requires-Dist: sphinx_gallery; extra == "docs"
71
73
  Requires-Dist: sphinx_rtd_theme; extra == "docs"
74
+ Requires-Dist: pydata-sphinx-theme; extra == "docs"
72
75
  Requires-Dist: numpydoc; extra == "docs"
76
+ Requires-Dist: memory_profiler; extra == "docs"
77
+ Requires-Dist: ipython; extra == "docs"
78
+ Requires-Dist: lightgbm; extra == "docs"
73
79
  Provides-Extra: all
80
+ Requires-Dist: pre-commit; extra == "all"
74
81
  Requires-Dist: pytest; extra == "all"
75
82
  Requires-Dist: pytest-cov; extra == "all"
76
83
  Requires-Dist: codecov; extra == "all"
77
84
  Requires-Dist: pytest_cases; extra == "all"
78
- Requires-Dist: pre-commit; extra == "all"
85
+ Requires-Dist: pytest-benchmark; extra == "all"
79
86
  Requires-Dist: sphinx; extra == "all"
87
+ Requires-Dist: sphinx_design; extra == "all"
80
88
  Requires-Dist: sphinx_gallery; extra == "all"
81
89
  Requires-Dist: sphinx_rtd_theme; extra == "all"
90
+ Requires-Dist: pydata-sphinx-theme; extra == "all"
82
91
  Requires-Dist: numpydoc; extra == "all"
92
+ Requires-Dist: memory_profiler; extra == "all"
93
+ Requires-Dist: ipython; extra == "all"
94
+ Requires-Dist: lightgbm; extra == "all"
83
95
  Dynamic: license-file
84
96
 
85
97
  # EEG-Dash
@@ -164,7 +176,3 @@ EEG-DaSh is a collaborative initiative between the United States and Israel, sup
164
176
 
165
177
 
166
178
 
167
- python3 -m pip install --upgrade build
168
- python3 -m build
169
- python3 -m pip install --upgrade twine
170
- python3 -m twine upload --repository eegdash dist/*
@@ -1,18 +1,19 @@
1
- eegdash/__init__.py,sha256=nMBZrB4bJs79rl9TZ_x-IVPNTHYJS6V55Pus9fErB5E,232
2
- eegdash/api.py,sha256=1mbufz6qgujrY_9V2GWUP4Eqel_q4ns9XVrmbGcSJG0,25776
1
+ eegdash/__init__.py,sha256=MntTC7axANEZtRLMIzcWZGS6-WiuYqdtR5Ef6fTY28c,232
2
+ eegdash/api.py,sha256=igpkLTCjH6NNQf8tEizA6SF8cYa1gE22-hNVxNRPszg,26747
3
3
  eegdash/data_config.py,sha256=OS6ERO-jHrnEOfMJUehY7ieABdsRw_qWzOKJ4pzSfqw,1323
4
- eegdash/data_utils.py,sha256=mR0TtERYIefakGQ98jwAeeRVKSNDU9eBlUoH1AY9tnc,23663
5
- eegdash/dataset.py,sha256=qXcE4JxxYj89VQ84sKmq7kGcunZqt1pp5wz7a62j_OQ,2460
6
- eegdash/preprocessing.py,sha256=wvqAO8UgDoQQz7xjVykrl4V8AawS4tpKR4Vrr_9BovY,2230
7
- eegdash/utils.py,sha256=ZxVW4ll5MaSZ_ht1L5p7YJxOtYi3b0547oa5W_jbH4A,450
8
- eegdash/features/__init__.py,sha256=484CLxpPifc8ZQfeM8jWZLvtVKljCxn3qqlUCaq-Yxk,1284
4
+ eegdash/data_utils.py,sha256=Y8Kh52zcDKY5sXblMrYhbRrL4BkQ-QUghjs0RPeW0oo,23658
5
+ eegdash/dataset.py,sha256=NK5CVGtHvGNlhdbZPaRUWtyLBDxUlyiSz-ot3BrxHQE,2443
6
+ eegdash/mongodb.py,sha256=GD3WgA253oFgpzOHrYaj4P1mRjNtDMT5Oj4kVvHswjI,2006
7
+ eegdash/preprocessing.py,sha256=7S_TTRKPKEk47tTnh2D6WExBt4cctAMxUxGDjJqq5lU,2221
8
+ eegdash/utils.py,sha256=wU9CBQZLW_LIQIBwhgQm5bU4X-rSsVNPdeF2iE4QGJ4,410
9
+ eegdash/features/__init__.py,sha256=BXNhjvL4_SSFAY1lcP9nyGpkbJNtoOMH4AHlF6OyABo,4078
9
10
  eegdash/features/datasets.py,sha256=kU1DO70ArSIy-LF1hHD2NN4iT-kJrI0mVpSkyV_OSeI,18301
10
11
  eegdash/features/decorators.py,sha256=v0qaJz_dcX703p1fvFYbAIXmwK3d8naYGlq7fRVKn_w,1313
11
12
  eegdash/features/extractors.py,sha256=H7h6tP3dKoRcjDJpWWAo0ppmokCq5QlhqMcehYwYV9s,6845
12
13
  eegdash/features/inspect.py,sha256=PmbWhx5H_WqpnorUpWONUSkUtaIHkZblRa_Xyk7Szyc,1569
13
- eegdash/features/serialization.py,sha256=pNsTz0EeRPPYE-A61XK7UoMShI9YBEHQqC5STbzUU6A,2861
14
+ eegdash/features/serialization.py,sha256=snXuHVd0CoT2ese0iWi5RwZrVHCGc0oCZ8-SXqGY88I,2848
14
15
  eegdash/features/utils.py,sha256=eM6DdyOpdVfNh7dSPykJ0WaTDtaGvkCQWAmW0G8v60Y,3784
15
- eegdash/features/feature_bank/__init__.py,sha256=BKrM3aaggXrfey1yEjEBYaxOV5e3UK-o8oGeB30epOg,149
16
+ eegdash/features/feature_bank/__init__.py,sha256=YsMXLC1FEtHL3IEw9pYw1fc5IY0x_hr2qWQowI5gZj8,2991
16
17
  eegdash/features/feature_bank/complexity.py,sha256=Ds1GAXZ0LGM32xB4EZC2jbMljUBv0yicf2SkuyLvN5I,3183
17
18
  eegdash/features/feature_bank/connectivity.py,sha256=bQ6KlxWm5GNpCS9ypLqBUr2L171Yq7wpBQT2tRQKTZ4,2159
18
19
  eegdash/features/feature_bank/csp.py,sha256=YOzieLnOcqjvfrcjvg8R3S4SWuC1BqK5J5WXVNCCTc0,3304
@@ -20,8 +21,8 @@ eegdash/features/feature_bank/dimensionality.py,sha256=j_Ds71Y1AbV2uLFQj8EuXQ4kz
20
21
  eegdash/features/feature_bank/signal.py,sha256=3Tb8z9gX7iZipxQJ9DSyy30JfdmW58kgvimSyZX74p8,3404
21
22
  eegdash/features/feature_bank/spectral.py,sha256=bNB7skusePs1gX7NOU6yRlw_Gr4UOCkO_ylkCgybzug,3319
22
23
  eegdash/features/feature_bank/utils.py,sha256=DGh-Q7-XFIittP7iBBxvsJaZrlVvuY5mw-G7q6C-PCI,1237
23
- eegdash-0.2.0.dist-info/licenses/LICENSE,sha256=KykUD4H3kw3HLz5bZ0kxMWwZotnk8rhkfCCerGyX2sk,855
24
- eegdash-0.2.0.dist-info/METADATA,sha256=GhxMc7p2HvTZo9lZFjBX1tJ70VeMlMnaYBhvpqw0iG8,10220
25
- eegdash-0.2.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
26
- eegdash-0.2.0.dist-info/top_level.txt,sha256=zavO69HQ6MyZM0aQMR2zUS6TAFc7bnN5GEpDpOpFZzU,8
27
- eegdash-0.2.0.dist-info/RECORD,,
24
+ eegdash-0.3.0.dist-info/licenses/LICENSE,sha256=KykUD4H3kw3HLz5bZ0kxMWwZotnk8rhkfCCerGyX2sk,855
25
+ eegdash-0.3.0.dist-info/METADATA,sha256=N4w48rU1-1ESNg9ESdMG7HplEuOeIXvJX-h_anvWDN0,10621
26
+ eegdash-0.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
27
+ eegdash-0.3.0.dist-info/top_level.txt,sha256=zavO69HQ6MyZM0aQMR2zUS6TAFc7bnN5GEpDpOpFZzU,8
28
+ eegdash-0.3.0.dist-info/RECORD,,