eegdash 0.4.0.dev176__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eegdash might be problematic. Click here for more details.

eegdash/__init__.py CHANGED
@@ -18,4 +18,4 @@ _init_mongo_client()
18
18
 
19
19
  __all__ = ["EEGDash", "EEGDashDataset", "EEGChallengeDataset", "preprocessing"]
20
20
 
21
- __version__ = "0.4.0.dev176"
21
+ __version__ = "0.4.1"
eegdash/api.py CHANGED
@@ -16,7 +16,7 @@ from typing import Any, Mapping
16
16
 
17
17
  import mne
18
18
  from docstring_inheritance import NumpyDocstringInheritanceInitMeta
19
- from dotenv import load_dotenv
19
+ from mne.utils import _soft_import
20
20
  from mne_bids import find_matching_paths
21
21
  from pymongo import InsertOne, UpdateOne
22
22
  from rich.console import Console
@@ -89,7 +89,8 @@ class EEGDash:
89
89
  except Exception:
90
90
  DB_CONNECTION_STRING = None
91
91
  else:
92
- load_dotenv()
92
+ dotenv = _soft_import("dotenv", "eegdash[full] is necessary.")
93
+ dotenv.load_dotenv()
93
94
  DB_CONNECTION_STRING = os.getenv("DB_CONNECTION_STRING")
94
95
 
95
96
  # Use singleton to get MongoDB client, database, and collection
eegdash/data_utils.py CHANGED
@@ -20,13 +20,10 @@ from typing import Any
20
20
 
21
21
  import mne
22
22
  import mne_bids
23
- import numpy as np
24
23
  import pandas as pd
25
- from joblib import Parallel, delayed
26
24
  from mne._fiff.utils import _read_segments_file
27
25
  from mne.io import BaseRaw
28
- from mne.utils.check import _soft_import
29
- from mne_bids import BIDSPath
26
+ from mne_bids import BIDSPath, find_matching_paths
30
27
 
31
28
  from braindecode.datasets import BaseDataset
32
29
 
@@ -348,18 +345,13 @@ class EEGBIDSDataset:
348
345
  data_dir=None, # location of bids dataset
349
346
  dataset="", # dataset name
350
347
  ):
351
- bids_lib = _soft_import("bids", purpose="digestion of datasets", strict=False)
352
-
353
- if bids_lib is None:
354
- raise ImportError(
355
- "The 'pybids' package is required to use EEGBIDSDataset. "
356
- "Please install it via 'pip install eegdash[digestion]'."
357
- )
358
-
359
348
  if data_dir is None or not os.path.exists(data_dir):
360
349
  raise ValueError("data_dir must be specified and must exist")
350
+
361
351
  self.bidsdir = Path(data_dir)
362
352
  self.dataset = dataset
353
+ self.data_dir = data_dir
354
+
363
355
  # Accept exact dataset folder or a variant with informative suffixes
364
356
  # (e.g., dsXXXXX-bdf, dsXXXXX-bdf-mini) to avoid collisions.
365
357
  dir_name = self.bidsdir.name
@@ -367,10 +359,11 @@ class EEGBIDSDataset:
367
359
  raise AssertionError(
368
360
  f"BIDS directory '{dir_name}' does not correspond to dataset '{self.dataset}'"
369
361
  )
370
- self.layout = bids_lib.BIDSLayout(data_dir)
362
+
363
+ # Initialize BIDS paths using fast mne_bids approach instead of pybids
364
+ self._init_bids_paths()
371
365
 
372
366
  # get all recording files in the bids directory
373
- self.files = self._get_recordings(self.layout)
374
367
  assert len(self.files) > 0, ValueError(
375
368
  "Unable to construct EEG dataset. No EEG recordings found."
376
369
  )
@@ -387,29 +380,103 @@ class EEGBIDSDataset:
387
380
  """
388
381
  return self.get_bids_file_attribute("modality", self.files[0]).lower() == "eeg"
389
382
 
390
- def _get_recordings(self, layout) -> list[str]:
391
- """Get a list of all EEG recording files in the BIDS layout."""
392
- files = []
393
- for ext, exts in self.RAW_EXTENSIONS.items():
394
- files = layout.get(extension=ext, return_type="filename")
395
- if files:
383
+ def _init_bids_paths(self) -> None:
384
+ """Initialize BIDS file paths using mne_bids for fast discovery.
385
+
386
+ Uses mne_bids.find_matching_paths() for efficient pattern-based file
387
+ discovery instead of heavy pybids BIDSLayout indexing.
388
+ """
389
+ # Initialize cache for BIDSPath objects
390
+ self._bids_path_cache = {}
391
+
392
+ # Find all EEG recordings using pattern matching (fast!)
393
+ self.files = []
394
+ for ext in self.RAW_EXTENSIONS.keys():
395
+ # find_matching_paths returns BIDSPath objects
396
+ paths = find_matching_paths(self.bidsdir, datatypes="eeg", extensions=ext)
397
+ if paths:
398
+ # Convert BIDSPath objects to filename strings
399
+ self.files = [str(p.fpath) for p in paths]
396
400
  break
397
- return files
398
401
 
399
- def _get_relative_bidspath(self, filename: str) -> str:
400
- """Make a file path relative to the BIDS parent directory."""
401
- bids_parent_dir = self.bidsdir.parent.absolute()
402
- return str(Path(filename).relative_to(bids_parent_dir))
402
+ def _get_bids_path_from_file(self, data_filepath: str):
403
+ """Get a BIDSPath object for a data file with caching.
404
+
405
+ Parameters
406
+ ----------
407
+ data_filepath : str
408
+ The path to the data file.
409
+
410
+ Returns
411
+ -------
412
+ BIDSPath
413
+ The BIDSPath object for the file.
414
+
415
+ """
416
+ if data_filepath not in self._bids_path_cache:
417
+ # Parse the filename to extract BIDS entities
418
+ filepath = Path(data_filepath)
419
+ filename = filepath.name
420
+
421
+ # Extract entities from filename using BIDS pattern
422
+ # Expected format: sub-<label>[_ses-<label>][_task-<label>][_run-<label>]_eeg.<ext>
423
+ subject = re.search(r"sub-([^_]*)", filename)
424
+ session = re.search(r"ses-([^_]*)", filename)
425
+ task = re.search(r"task-([^_]*)", filename)
426
+ run = re.search(r"run-([^_]*)", filename)
427
+
428
+ bids_path = BIDSPath(
429
+ subject=subject.group(1) if subject else None,
430
+ session=session.group(1) if session else None,
431
+ task=task.group(1) if task else None,
432
+ run=int(run.group(1)) if run else None,
433
+ datatype="eeg",
434
+ extension=filepath.suffix,
435
+ root=self.bidsdir,
436
+ )
437
+ self._bids_path_cache[data_filepath] = bids_path
438
+
439
+ return self._bids_path_cache[data_filepath]
403
440
 
404
- def _get_property_from_filename(self, property: str, filename: str) -> str:
405
- """Parse a BIDS entity from a filename."""
406
- import platform
441
+ def _get_json_with_inheritance(
442
+ self, data_filepath: str, json_filename: str
443
+ ) -> dict:
444
+ """Get JSON metadata with BIDS inheritance handling.
407
445
 
408
- if platform.system() == "Windows":
409
- lookup = re.search(rf"{property}-(.*?)[_\\]", filename)
410
- else:
411
- lookup = re.search(rf"{property}-(.*?)[_\/]", filename)
412
- return lookup.group(1) if lookup else ""
446
+ Walks up the directory tree to find and merge JSON files following
447
+ BIDS inheritance principles.
448
+
449
+ Parameters
450
+ ----------
451
+ data_filepath : str
452
+ The path to the data file.
453
+ json_filename : str
454
+ The name of the JSON file to find (e.g., "eeg.json").
455
+
456
+ Returns
457
+ -------
458
+ dict
459
+ The merged JSON metadata.
460
+
461
+ """
462
+ json_dict = {}
463
+ current_dir = Path(data_filepath).parent
464
+ root_dir = self.bidsdir
465
+
466
+ # Walk up from file directory to root, collecting JSON files
467
+ while current_dir >= root_dir:
468
+ json_path = current_dir / json_filename
469
+ if json_path.exists():
470
+ with open(json_path) as f:
471
+ json_dict.update(json.load(f))
472
+
473
+ # Stop at BIDS root (contains dataset_description.json)
474
+ if (current_dir / "dataset_description.json").exists():
475
+ break
476
+
477
+ current_dir = current_dir.parent
478
+
479
+ return json_dict
413
480
 
414
481
  def _merge_json_inheritance(self, json_files: list[str | Path]) -> dict:
415
482
  """Merge a list of JSON files according to BIDS inheritance."""
@@ -476,82 +543,6 @@ class EEGBIDSDataset:
476
543
  )
477
544
  return meta_files
478
545
 
479
- def _scan_directory(self, directory: str, extension: str) -> list[Path]:
480
- """Scan a directory for files with a given extension."""
481
- result_files = []
482
- directory_to_ignore = [".git", ".datalad", "derivatives", "code"]
483
- with os.scandir(directory) as entries:
484
- for entry in entries:
485
- if entry.is_file() and entry.name.endswith(extension):
486
- result_files.append(Path(entry.path))
487
- elif entry.is_dir() and not any(
488
- name in entry.name for name in directory_to_ignore
489
- ):
490
- result_files.append(Path(entry.path))
491
- return result_files
492
-
493
- def _get_files_with_extension_parallel(
494
- self, directory: str, extension: str = ".set", max_workers: int = -1
495
- ) -> list[Path]:
496
- """Scan a directory tree in parallel for files with a given extension."""
497
- result_files = []
498
- dirs_to_scan = [directory]
499
-
500
- while dirs_to_scan:
501
- logger.info(
502
- f"Directories to scan: {len(dirs_to_scan)}, files: {dirs_to_scan}"
503
- )
504
- results = Parallel(n_jobs=max_workers, prefer="threads", verbose=1)(
505
- delayed(self._scan_directory)(d, extension) for d in dirs_to_scan
506
- )
507
-
508
- dirs_to_scan = []
509
- for res in results:
510
- for path in res:
511
- if os.path.isdir(path):
512
- dirs_to_scan.append(path)
513
- else:
514
- result_files.append(path)
515
- logger.info(f"Found {len(result_files)} files.")
516
-
517
- return result_files
518
-
519
- def load_and_preprocess_raw(
520
- self, raw_file: str, preprocess: bool = False
521
- ) -> np.ndarray:
522
- """Load and optionally preprocess a raw data file.
523
-
524
- This is a utility function for testing or debugging, not for general use.
525
-
526
- Parameters
527
- ----------
528
- raw_file : str
529
- Path to the raw EEGLAB file (.set).
530
- preprocess : bool, default False
531
- If True, apply a high-pass filter, notch filter, and resample the data.
532
-
533
- Returns
534
- -------
535
- numpy.ndarray
536
- The loaded and processed data as a NumPy array.
537
-
538
- """
539
- logger.info(f"Loading raw data from {raw_file}")
540
- EEG = mne.io.read_raw_eeglab(raw_file, preload=True, verbose="error")
541
-
542
- if preprocess:
543
- EEG = EEG.filter(l_freq=0.25, h_freq=25, verbose=False)
544
- EEG = EEG.notch_filter(freqs=(60), verbose=False)
545
- sfreq = 128
546
- if EEG.info["sfreq"] != sfreq:
547
- EEG = EEG.resample(sfreq)
548
-
549
- mat_data = EEG.get_data()
550
-
551
- if len(mat_data.shape) > 2:
552
- raise ValueError("Expect raw data to be CxT dimension")
553
- return mat_data
554
-
555
546
  def get_files(self) -> list[str]:
556
547
  """Get all EEG recording file paths in the BIDS dataset.
557
548
 
@@ -563,31 +554,6 @@ class EEGBIDSDataset:
563
554
  """
564
555
  return self.files
565
556
 
566
- def resolve_bids_json(self, json_files: list[str]) -> dict:
567
- """Resolve BIDS JSON inheritance and merge files.
568
-
569
- Parameters
570
- ----------
571
- json_files : list of str
572
- A list of JSON file paths, ordered from the lowest (most specific)
573
- to highest level of the BIDS hierarchy.
574
-
575
- Returns
576
- -------
577
- dict
578
- A dictionary containing the merged JSON data.
579
-
580
- """
581
- if not json_files:
582
- raise ValueError("No JSON files provided")
583
- json_files.reverse()
584
-
585
- json_dict = {}
586
- for json_file in json_files:
587
- with open(json_file) as f:
588
- json_dict.update(json.load(f))
589
- return json_dict
590
-
591
557
  def get_bids_file_attribute(self, attribute: str, data_filepath: str) -> Any:
592
558
  """Retrieve a specific attribute from BIDS metadata.
593
559
 
@@ -604,21 +570,29 @@ class EEGBIDSDataset:
604
570
  The value of the requested attribute, or None if not found.
605
571
 
606
572
  """
607
- entities = self.layout.parse_file_entities(data_filepath)
608
- bidsfile = self.layout.get(**entities)[0]
609
- attributes = bidsfile.get_entities(metadata="all")
610
- attribute_mapping = {
611
- "sfreq": "SamplingFrequency",
612
- "modality": "datatype",
613
- "task": "task",
614
- "session": "session",
615
- "run": "run",
616
- "subject": "subject",
617
- "ntimes": "RecordingDuration",
618
- "nchans": "EEGChannelCount",
573
+ bids_path = self._get_bids_path_from_file(data_filepath)
574
+
575
+ # Direct BIDSPath properties for entities
576
+ direct_attrs = {
577
+ "subject": bids_path.subject,
578
+ "session": bids_path.session,
579
+ "task": bids_path.task,
580
+ "run": bids_path.run,
581
+ "modality": bids_path.datatype,
619
582
  }
620
- attribute_value = attributes.get(attribute_mapping.get(attribute), None)
621
- return attribute_value
583
+
584
+ if attribute in direct_attrs:
585
+ return direct_attrs[attribute]
586
+
587
+ # For JSON-based attributes, read and cache eeg.json
588
+ eeg_json = self._get_json_with_inheritance(data_filepath, "eeg.json")
589
+ json_attrs = {
590
+ "sfreq": eeg_json.get("SamplingFrequency"),
591
+ "ntimes": eeg_json.get("RecordingDuration"),
592
+ "nchans": eeg_json.get("EEGChannelCount"),
593
+ }
594
+
595
+ return json_attrs.get(attribute)
622
596
 
623
597
  def channel_labels(self, data_filepath: str) -> list[str]:
624
598
  """Get a list of channel labels from channels.tsv.
@@ -634,9 +608,27 @@ class EEGBIDSDataset:
634
608
  A list of channel names.
635
609
 
636
610
  """
637
- channels_tsv = pd.read_csv(
638
- self.get_bids_metadata_files(data_filepath, "channels.tsv")[0], sep="\t"
639
- )
611
+ # Find channels.tsv in the same directory as the data file
612
+ # It can be named either "channels.tsv" or "*_channels.tsv"
613
+ filepath = Path(data_filepath)
614
+ parent_dir = filepath.parent
615
+
616
+ # Try the standard channels.tsv first
617
+ channels_tsv_path = parent_dir / "channels.tsv"
618
+ if not channels_tsv_path.exists():
619
+ # Try to find *_channels.tsv matching the filename prefix
620
+ base_name = filepath.stem # filename without extension
621
+ for tsv_file in parent_dir.glob("*_channels.tsv"):
622
+ # Check if it matches by looking at task/run components
623
+ tsv_name = tsv_file.stem.replace("_channels", "")
624
+ if base_name.startswith(tsv_name):
625
+ channels_tsv_path = tsv_file
626
+ break
627
+
628
+ if not channels_tsv_path.exists():
629
+ raise FileNotFoundError(f"No channels.tsv found for {data_filepath}")
630
+
631
+ channels_tsv = pd.read_csv(channels_tsv_path, sep="\t")
640
632
  return channels_tsv["name"].tolist()
641
633
 
642
634
  def channel_types(self, data_filepath: str) -> list[str]:
@@ -653,9 +645,27 @@ class EEGBIDSDataset:
653
645
  A list of channel types.
654
646
 
655
647
  """
656
- channels_tsv = pd.read_csv(
657
- self.get_bids_metadata_files(data_filepath, "channels.tsv")[0], sep="\t"
658
- )
648
+ # Find channels.tsv in the same directory as the data file
649
+ # It can be named either "channels.tsv" or "*_channels.tsv"
650
+ filepath = Path(data_filepath)
651
+ parent_dir = filepath.parent
652
+
653
+ # Try the standard channels.tsv first
654
+ channels_tsv_path = parent_dir / "channels.tsv"
655
+ if not channels_tsv_path.exists():
656
+ # Try to find *_channels.tsv matching the filename prefix
657
+ base_name = filepath.stem # filename without extension
658
+ for tsv_file in parent_dir.glob("*_channels.tsv"):
659
+ # Check if it matches by looking at task/run components
660
+ tsv_name = tsv_file.stem.replace("_channels", "")
661
+ if base_name.startswith(tsv_name):
662
+ channels_tsv_path = tsv_file
663
+ break
664
+
665
+ if not channels_tsv_path.exists():
666
+ raise FileNotFoundError(f"No channels.tsv found for {data_filepath}")
667
+
668
+ channels_tsv = pd.read_csv(channels_tsv_path, sep="\t")
659
669
  return channels_tsv["type"].tolist()
660
670
 
661
671
  def num_times(self, data_filepath: str) -> int:
@@ -674,10 +684,10 @@ class EEGBIDSDataset:
674
684
  The approximate number of time points.
675
685
 
676
686
  """
677
- eeg_jsons = self.get_bids_metadata_files(data_filepath, "eeg.json")
678
- eeg_json_dict = self._merge_json_inheritance(eeg_jsons)
687
+ eeg_json_dict = self._get_json_with_inheritance(data_filepath, "eeg.json")
679
688
  return int(
680
- eeg_json_dict["SamplingFrequency"] * eeg_json_dict["RecordingDuration"]
689
+ eeg_json_dict.get("SamplingFrequency", 0)
690
+ * eeg_json_dict.get("RecordingDuration", 0)
681
691
  )
682
692
 
683
693
  def subject_participant_tsv(self, data_filepath: str) -> dict[str, Any]:
@@ -718,34 +728,7 @@ class EEGBIDSDataset:
718
728
  The merged eeg.json metadata.
719
729
 
720
730
  """
721
- eeg_jsons = self.get_bids_metadata_files(data_filepath, "eeg.json")
722
- return self._merge_json_inheritance(eeg_jsons)
723
-
724
- def channel_tsv(self, data_filepath: str) -> dict[str, Any]:
725
- """Get the channels.tsv metadata as a dictionary.
726
-
727
- Parameters
728
- ----------
729
- data_filepath : str
730
- The path to the data file.
731
-
732
- Returns
733
- -------
734
- dict
735
- The channels.tsv data, with columns as keys.
736
-
737
- """
738
- channels_tsv_path = self.get_bids_metadata_files(data_filepath, "channels.tsv")[
739
- 0
740
- ]
741
- channels_tsv = pd.read_csv(channels_tsv_path, sep="\t")
742
- channel_tsv_dict = channels_tsv.to_dict()
743
- for list_field in ["name", "type", "units"]:
744
- if list_field in channel_tsv_dict:
745
- channel_tsv_dict[list_field] = list(
746
- channel_tsv_dict[list_field].values()
747
- )
748
- return channel_tsv_dict
731
+ return self._get_json_with_inheritance(data_filepath, "eeg.json")
749
732
 
750
733
 
751
734
  __all__ = ["EEGDashBaseDataset", "EEGBIDSDataset", "EEGDashBaseRaw"]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: eegdash
3
- Version: 0.4.0.dev176
3
+ Version: 0.4.1
4
4
  Summary: EEG data for machine learning
5
5
  Author-email: Young Truong <dt.young112@gmail.com>, Arnaud Delorme <adelorme@gmail.com>, Aviv Dotan <avivd220@gmail.com>, Oren Shriki <oren70@gmail.com>, Bruno Aristimunha <b.aristimunha@gmail.com>
6
6
  License-Expression: GPL-3.0-only
@@ -27,17 +27,12 @@ License-File: LICENSE
27
27
  Requires-Dist: braindecode>=1.0
28
28
  Requires-Dist: mne_bids>=0.17.0
29
29
  Requires-Dist: numba
30
- Requires-Dist: numpy
31
- Requires-Dist: pandas
32
30
  Requires-Dist: pymongo
33
- Requires-Dist: python-dotenv
34
31
  Requires-Dist: s3fs
35
32
  Requires-Dist: tqdm
36
- Requires-Dist: h5io>=0.2.4
37
33
  Requires-Dist: pymatreader
38
34
  Requires-Dist: eeglabio
39
35
  Requires-Dist: tabulate
40
- Requires-Dist: docstring_inheritance
41
36
  Requires-Dist: rich
42
37
  Provides-Extra: tests
43
38
  Requires-Dist: pytest; extra == "tests"
@@ -67,6 +62,7 @@ Requires-Dist: nbformat; extra == "docs"
67
62
  Requires-Dist: graphviz; extra == "docs"
68
63
  Provides-Extra: digestion
69
64
  Requires-Dist: pybids; extra == "digestion"
65
+ Requires-Dist: python-dotenv; extra == "digestion"
70
66
  Provides-Extra: all
71
67
  Requires-Dist: eegdash[docs]; extra == "all"
72
68
  Requires-Dist: eegdash[dev]; extra == "all"
@@ -1,8 +1,8 @@
1
- eegdash/__init__.py,sha256=A7Uh_kXQuDzVmoSuZyWHE7RRxwhA2_JzaAmLD7LZM4E,704
2
- eegdash/api.py,sha256=az7uUYrEvJfBtzvkzYKzXqEztDSCMsCmgIywpdzYqc8,38381
1
+ eegdash/__init__.py,sha256=EmkUVFpceFXHYEKj0yDMa8TMCc1KWWSYs02P4ItZrbA,697
2
+ eegdash/api.py,sha256=CK1sYV401ZAcJ22YVcvZdv3BnG7GpPvf-C9iJDCLNCY,38467
3
3
  eegdash/bids_eeg_metadata.py,sha256=kEmFUe07tivkuIoC5T-YwfO4QQYJBxuc769ZBV1UCKo,16682
4
4
  eegdash/const.py,sha256=9WMetN7YMQJbkN2PhzItxtVRZ4VBXLP82vFu9pY6xok,9066
5
- eegdash/data_utils.py,sha256=LOA-8ShugnIXlqmGmmSClTCBr3khQSconmYAcvT_9tY,26528
5
+ eegdash/data_utils.py,sha256=t7m00gwfST3MlOFmGwEr2LNPo-fPbQ1WFwC6y8XW7yw,26119
6
6
  eegdash/downloader.py,sha256=Z-9EEJildqJxIihwdtXc_h9kzCkuF9LWIwQEfyG9Huw,6030
7
7
  eegdash/logging.py,sha256=OQ4jMtwv1h-gzjxmr3PCpcsKi5-3Nhd3r9PJ4UN7oQI,1467
8
8
  eegdash/mongodb.py,sha256=9FJDeEebOD5RzNYfAf1lhr0R-pECAlnug6Sjhd9_oUw,3469
@@ -30,8 +30,8 @@ eegdash/features/feature_bank/utils.py,sha256=zCdkfDMLWJhPjBqb5Xz0jLKg8gm3qQDY1G
30
30
  eegdash/hbn/__init__.py,sha256=hsI5pmIuYDzr--aE5UiToO-P9XL5fVRKahZzdsAodro,794
31
31
  eegdash/hbn/preprocessing.py,sha256=xp0HBz8WGhLI5c2Zkk4QiVUzGoIZep8YypnHNZsUJ4o,3800
32
32
  eegdash/hbn/windows.py,sha256=Z_fhG3kaHd5MAPg60FwFnxMJay8EzacXytUaCsOENGc,14408
33
- eegdash-0.4.0.dev176.dist-info/licenses/LICENSE,sha256=asisR-xupy_NrQBFXnx6yqXeZcYWLvbAaiETl25iXT0,931
34
- eegdash-0.4.0.dev176.dist-info/METADATA,sha256=85mnc40qlaAusZRVmbWhmM9RaD29YxaReud8ICVuDp4,6927
35
- eegdash-0.4.0.dev176.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
36
- eegdash-0.4.0.dev176.dist-info/top_level.txt,sha256=zavO69HQ6MyZM0aQMR2zUS6TAFc7bnN5GEpDpOpFZzU,8
37
- eegdash-0.4.0.dev176.dist-info/RECORD,,
33
+ eegdash-0.4.1.dist-info/licenses/LICENSE,sha256=asisR-xupy_NrQBFXnx6yqXeZcYWLvbAaiETl25iXT0,931
34
+ eegdash-0.4.1.dist-info/METADATA,sha256=2bAo2XqxBWJS3qR8b4knurNH6KBgXYMKbSUqHAn5h6o,6835
35
+ eegdash-0.4.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
36
+ eegdash-0.4.1.dist-info/top_level.txt,sha256=zavO69HQ6MyZM0aQMR2zUS6TAFc7bnN5GEpDpOpFZzU,8
37
+ eegdash-0.4.1.dist-info/RECORD,,