ide4eeg 0.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ide4eeg/__init__.py +55 -0
- ide4eeg/__main__.py +11 -0
- ide4eeg/_appcds.py +170 -0
- ide4eeg/analysis/__init__.py +3 -0
- ide4eeg/analysis/analysis.py +81 -0
- ide4eeg/analysis/connectivity/__init__.py +19 -0
- ide4eeg/analysis/connectivity/aec/__init__.py +1 -0
- ide4eeg/analysis/connectivity/aec/utils.py +53 -0
- ide4eeg/analysis/connectivity/conn.py +1232 -0
- ide4eeg/analysis/connectivity/data.py +627 -0
- ide4eeg/analysis/connectivity/mvar/__init__.py +4 -0
- ide4eeg/analysis/connectivity/mvar/comp.py +35 -0
- ide4eeg/analysis/connectivity/mvar/fitting.py +306 -0
- ide4eeg/analysis/connectivity/mvarmodel.py +225 -0
- ide4eeg/analysis/connectivity_analysis.py +720 -0
- ide4eeg/analysis/dipole/__init__.py +8 -0
- ide4eeg/analysis/dipole/fitting.py +561 -0
- ide4eeg/analysis/dipole/lut.py +35 -0
- ide4eeg/analysis/dipole/visualize.py +364 -0
- ide4eeg/analysis/dipole_analysis.py +393 -0
- ide4eeg/analysis/eeg_profiles.py +638 -0
- ide4eeg/analysis/mne_catalog.py +1843 -0
- ide4eeg/analysis/mp_analysis.py +1064 -0
- ide4eeg/analysis/mp_bookviewer_qt.py +793 -0
- ide4eeg/analysis/tf_statistics.py +1940 -0
- ide4eeg/api.py +682 -0
- ide4eeg/cli.py +146 -0
- ide4eeg/download_examples.py +187 -0
- ide4eeg/download_tools.py +1332 -0
- ide4eeg/gui.py +18079 -0
- ide4eeg/gui_config.py +1306 -0
- ide4eeg/input/__init__.py +0 -0
- ide4eeg/input/create_signal.py +512 -0
- ide4eeg/input/input.py +1614 -0
- ide4eeg/install_diagnostics.py +279 -0
- ide4eeg/install_runner.py +633 -0
- ide4eeg/main.py +581 -0
- ide4eeg/plots/__init__.py +0 -0
- ide4eeg/plots/plots.py +171 -0
- ide4eeg/preprocessing/__init__.py +0 -0
- ide4eeg/preprocessing/artifacts.py +1187 -0
- ide4eeg/preprocessing/channels_and_signal.py +1487 -0
- ide4eeg/preprocessing/constraints.py +96 -0
- ide4eeg/preprocessing/facetag/__init__.py +393 -0
- ide4eeg/preprocessing/facetag/_eeg_paint.py +661 -0
- ide4eeg/preprocessing/facetag/auto_detect_qt.py +662 -0
- ide4eeg/preprocessing/facetag/insightface_backend.py +299 -0
- ide4eeg/preprocessing/facetag/insightface_gaze.py +132 -0
- ide4eeg/preprocessing/facetag/l2cs_gaze.py +287 -0
- ide4eeg/preprocessing/facetag/reference.py +542 -0
- ide4eeg/preprocessing/facetag/review_window.py +137 -0
- ide4eeg/preprocessing/facetag/review_window_qt.py +734 -0
- ide4eeg/preprocessing/facetag/trim_window_qt.py +584 -0
- ide4eeg/preprocessing/facetag/video_reader.py +251 -0
- ide4eeg/preprocessing/ica.py +540 -0
- ide4eeg/preprocessing/mp_preprocessing.py +955 -0
- ide4eeg/preprocessing/preprocessing.py +1304 -0
- ide4eeg/preprocessing/rest_and_epochs.py +551 -0
- ide4eeg/preprocessing/svarog_review.py +614 -0
- ide4eeg/references.py +286 -0
- ide4eeg/utils/__init__.py +0 -0
- ide4eeg/utils/log.py +55 -0
- ide4eeg/utils/parallel.py +160 -0
- ide4eeg-0.8.dist-info/METADATA +287 -0
- ide4eeg-0.8.dist-info/RECORD +69 -0
- ide4eeg-0.8.dist-info/WHEEL +5 -0
- ide4eeg-0.8.dist-info/entry_points.txt +2 -0
- ide4eeg-0.8.dist-info/licenses/LICENSE +674 -0
- ide4eeg-0.8.dist-info/top_level.txt +1 -0
ide4eeg/__init__.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# Ensure "spawn" start method for multiprocessing — required on Linux
|
|
2
|
+
# where the default "fork" crashes with Qt. macOS already uses "spawn"
|
|
3
|
+
# by default since Python 3.8. Must run before any multiprocessing use.
|
|
4
|
+
#
|
|
5
|
+
# Only touch the start method from the main process: child processes
|
|
6
|
+
# (e.g. joblib/loky workers) re-import this package during unpickling
|
|
7
|
+
# and calling set_start_method there raises "context has already been
|
|
8
|
+
# set" even when the context is already spawn (the guard below can't
|
|
9
|
+
# tell the difference in a spawned child, because the child never
|
|
10
|
+
# explicitly called set_start_method itself — see
|
|
11
|
+
# https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods).
|
|
12
|
+
import multiprocessing as _mp
|
|
13
|
+
_is_main = _mp.current_process().name == "MainProcess"
|
|
14
|
+
if _is_main:
|
|
15
|
+
if _mp.get_start_method(allow_none=True) != "spawn":
|
|
16
|
+
_mp.set_start_method("spawn")
|
|
17
|
+
|
|
18
|
+
# Once-per-process migration of runtime data from old in-package /
|
|
19
|
+
# upstream locations to the consolidated ~/.obci/ide4eeg/ tree.
|
|
20
|
+
# Idempotent — silently no-ops when nothing to migrate. Done eagerly
|
|
21
|
+
# at import time so subsequent code (insightface_backend, l2cs_gaze)
|
|
22
|
+
# sees the consolidated paths regardless of import order. Skipped in
|
|
23
|
+
# child processes (joblib/loky workers) because they inherit the
|
|
24
|
+
# already-migrated state from the parent.
|
|
25
|
+
if _is_main:
|
|
26
|
+
try:
|
|
27
|
+
from ide4eeg.download_tools import migrate_legacy_paths
|
|
28
|
+
migrate_legacy_paths()
|
|
29
|
+
except Exception as _exc:
|
|
30
|
+
# Per-step rename failures are caught + logged inside
|
|
31
|
+
# migrate_legacy_paths; this outer except covers anything
|
|
32
|
+
# broader (e.g. ImportError on a malformed download_tools).
|
|
33
|
+
# Log a warning so a real bug doesn't get silently swallowed.
|
|
34
|
+
import logging as _logging
|
|
35
|
+
_logging.getLogger(__name__).warning(
|
|
36
|
+
"Path migration skipped: %s: %s",
|
|
37
|
+
type(_exc).__name__, _exc)
|
|
38
|
+
del _logging, _exc
|
|
39
|
+
|
|
40
|
+
del _mp, _is_main
|
|
41
|
+
|
|
42
|
+
__version__ = "0.8"
|
|
43
|
+
|
|
44
|
+
#: Prefix used for per-recording output directories.
|
|
45
|
+
#: Layout: ``<output_root>/<OUTPUT_DIR_PREFIX><filename_core>/{preprocessing,analysis}/``
|
|
46
|
+
#: Single source of truth — pipeline writers and GUI viewers should
|
|
47
|
+
#: reference this rather than the literal string.
|
|
48
|
+
OUTPUT_DIR_PREFIX = "IDE4EEG_OUT_"
|
|
49
|
+
|
|
50
|
+
#: Sentinel value for ``config["electrodes_layout"]`` meaning
|
|
51
|
+
#: "use the 3D positions already in the file, don't apply a
|
|
52
|
+
#: standard montage". Auto-set by the preprocessing step when
|
|
53
|
+
#: native positions are detected; exposed as a selectable combobox
|
|
54
|
+
#: item in the GUI.
|
|
55
|
+
NATIVE_POSITIONS_SENTINEL = "native (from file)"
|
ide4eeg/__main__.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"""Module entry point: ``python -m ide4eeg`` delegates to the CLI.
|
|
2
|
+
|
|
3
|
+
Equivalent to running the installed ``ide4eeg`` console script (also
|
|
4
|
+
defined in pyproject.toml as the ``[project.gui-scripts]`` entry).
|
|
5
|
+
The two paths share the same dispatcher so behaviour is identical.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from ide4eeg.cli import main
|
|
9
|
+
|
|
10
|
+
if __name__ == "__main__":
|
|
11
|
+
main()
|
ide4eeg/_appcds.py
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
"""AppCDS (Application Class Data Sharing) flag builder for Java jars.
|
|
2
|
+
|
|
3
|
+
When IDE4EEG launches Svarog or ConnectiVIS, the JVM normally re-loads
|
|
4
|
+
all the Swing/AWT/3rd-party classes from scratch — a 0.5–1 s tax on
|
|
5
|
+
every cold start. AppCDS lets the JVM persist class metadata to a
|
|
6
|
+
``.jsa`` archive on first run and reuse it on every subsequent run.
|
|
7
|
+
|
|
8
|
+
This module centralises the flag construction so all four launch
|
|
9
|
+
sites (gui.py ``_launch_svarog`` / ``_launch_connectivis``, the three
|
|
10
|
+
``review_*_via_svarog`` entry points in ``preprocessing/svarog_review``)
|
|
11
|
+
follow the same scheme as the standalone Svarog launcher
|
|
12
|
+
(``standalone-package-files/svarog`` in svarog4) and the connecti-VIS
|
|
13
|
+
``run.sh``.
|
|
14
|
+
|
|
15
|
+
Filename scheme
|
|
16
|
+
---------------
|
|
17
|
+
|
|
18
|
+
``<jar-dir>/<jar-stem>-<jvm-id>.jsa``
|
|
19
|
+
|
|
20
|
+
* ``<jar-dir>`` — directory containing the jar. Putting the archive
|
|
21
|
+
next to the jar means an uninstall (rm -rf the install dir) cleans
|
|
22
|
+
it up automatically, and the launcher script + IDE4EEG share the
|
|
23
|
+
same file.
|
|
24
|
+
* ``<jar-stem>`` — jar basename minus ``.jar``. Different versions
|
|
25
|
+
of the same app land on different filenames automatically.
|
|
26
|
+
* ``<jvm-id>`` — first 8 hex chars of ``sha1(<absolute path to java
|
|
27
|
+
binary>)``. AppCDS archives are tied to the exact JVM build that
|
|
28
|
+
wrote them, so a JDK upgrade that lands on a new install path needs
|
|
29
|
+
a fresh archive. Tagging with the path means the upgrade naturally
|
|
30
|
+
triggers regeneration instead of silently falling back via
|
|
31
|
+
``-Xshare:auto`` on a stale file.
|
|
32
|
+
|
|
33
|
+
Freshness check
|
|
34
|
+
---------------
|
|
35
|
+
|
|
36
|
+
If the archive's mtime is older than the jar's, treat it as stale
|
|
37
|
+
(jar has been redeployed, the archive's class list is out of date)
|
|
38
|
+
and regenerate. Saves a manual ``rm`` step after every Svarog
|
|
39
|
+
rebuild.
|
|
40
|
+
|
|
41
|
+
JDK floor
|
|
42
|
+
---------
|
|
43
|
+
|
|
44
|
+
``-XX:ArchiveClassesAtExit`` (JEP 350, dynamic CDS) requires JDK 13+.
|
|
45
|
+
On JDK 11/12 the JVM rejects the flag with "Unrecognized VM option"
|
|
46
|
+
and aborts before main, so the version-probe gate is load-bearing.
|
|
47
|
+
We probe lazily — only when the archive is missing or stale — because
|
|
48
|
+
a fresh archive is itself proof that the JVM was JDK 13+ when it
|
|
49
|
+
wrote it (path-tagged jvm-id means a JDK swap lands on a new
|
|
50
|
+
filename), so reuse can skip the probe entirely and recover the full
|
|
51
|
+
~380 ms cold-start win.
|
|
52
|
+
|
|
53
|
+
Failure modes
|
|
54
|
+
-------------
|
|
55
|
+
|
|
56
|
+
Anything unexpected (jar dir read-only, jar gone between mtime calls,
|
|
57
|
+
sha1 module missing on a platform we haven't seen, ``java`` not on
|
|
58
|
+
PATH, version probe failed) → return ``[]`` and let the launch proceed
|
|
59
|
+
without AppCDS. AppCDS is a perf optimisation, never a correctness
|
|
60
|
+
requirement.
|
|
61
|
+
"""
|
|
62
|
+
|
|
63
|
+
from __future__ import annotations
|
|
64
|
+
|
|
65
|
+
import hashlib
|
|
66
|
+
import os
|
|
67
|
+
import re
|
|
68
|
+
import shutil
|
|
69
|
+
import subprocess
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _java_major(java_path: str) -> int | None:
|
|
73
|
+
"""Probe ``java -version`` and return the feature version.
|
|
74
|
+
|
|
75
|
+
Returns ``None`` on any failure (binary missing, timeout, output
|
|
76
|
+
unparseable). Callers treat ``None`` as "don't enable AppCDS".
|
|
77
|
+
|
|
78
|
+
Handles both the modern ``openjdk version "17.0.9"`` form and the
|
|
79
|
+
Java-8 legacy ``java version "1.8.0_392"`` form (where the real
|
|
80
|
+
major is 8, not 1).
|
|
81
|
+
"""
|
|
82
|
+
resolved = shutil.which(java_path) or java_path
|
|
83
|
+
try:
|
|
84
|
+
out = subprocess.run(
|
|
85
|
+
[resolved, "-version"],
|
|
86
|
+
capture_output=True, text=True, timeout=5,
|
|
87
|
+
)
|
|
88
|
+
except (OSError, subprocess.TimeoutExpired):
|
|
89
|
+
return None
|
|
90
|
+
text = (out.stderr or "") + (out.stdout or "")
|
|
91
|
+
m = re.search(r'version "(\d+)(?:\.(\d+))?', text)
|
|
92
|
+
if not m:
|
|
93
|
+
return None
|
|
94
|
+
first = int(m.group(1))
|
|
95
|
+
return int(m.group(2)) if first == 1 and m.group(2) else first
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def appcds_flags(jar_path: str, java_path: str = "java") -> list[str]:
|
|
99
|
+
"""Build AppCDS flags for launching ``jar_path`` under ``java_path``.
|
|
100
|
+
|
|
101
|
+
Parameters
|
|
102
|
+
----------
|
|
103
|
+
jar_path : str
|
|
104
|
+
Absolute path to the jar. Must exist; otherwise returns ``[]``.
|
|
105
|
+
java_path : str
|
|
106
|
+
``java`` binary to launch with. ``"java"`` (the default) is
|
|
107
|
+
resolved via :func:`shutil.which`; an absolute path is used
|
|
108
|
+
as-is. The jvm-id is derived from the resolved path so a
|
|
109
|
+
JDK upgrade swapping out the binary changes the archive name.
|
|
110
|
+
|
|
111
|
+
Returns
|
|
112
|
+
-------
|
|
113
|
+
list[str]
|
|
114
|
+
Either ``[-XX:SharedArchiveFile=..., -Xshare:auto]`` (reuse a
|
|
115
|
+
valid archive), ``[-XX:ArchiveClassesAtExit=...]`` (generate
|
|
116
|
+
on next clean exit), or ``[]`` (fall back to no AppCDS — jar
|
|
117
|
+
missing, jar dir read-only, JDK < 13, etc.).
|
|
118
|
+
"""
|
|
119
|
+
try:
|
|
120
|
+
jar_path = os.fspath(jar_path)
|
|
121
|
+
if not os.path.isfile(jar_path):
|
|
122
|
+
return []
|
|
123
|
+
|
|
124
|
+
resolved_java = shutil.which(java_path) or java_path
|
|
125
|
+
jvm_id = hashlib.sha1(
|
|
126
|
+
resolved_java.encode("utf-8")).hexdigest()[:8]
|
|
127
|
+
|
|
128
|
+
jar_dir = os.path.dirname(os.path.abspath(jar_path)) or "."
|
|
129
|
+
if not os.access(jar_dir, os.W_OK):
|
|
130
|
+
return []
|
|
131
|
+
|
|
132
|
+
jar_stem = os.path.splitext(os.path.basename(jar_path))[0]
|
|
133
|
+
archive = os.path.join(jar_dir, f"{jar_stem}-{jvm_id}.jsa")
|
|
134
|
+
|
|
135
|
+
try:
|
|
136
|
+
jar_mtime = os.path.getmtime(jar_path)
|
|
137
|
+
except OSError:
|
|
138
|
+
return []
|
|
139
|
+
|
|
140
|
+
if os.path.isfile(archive):
|
|
141
|
+
try:
|
|
142
|
+
archive_mtime = os.path.getmtime(archive)
|
|
143
|
+
except OSError:
|
|
144
|
+
archive_mtime = None
|
|
145
|
+
if archive_mtime is not None and archive_mtime > jar_mtime:
|
|
146
|
+
# Fresh archive → JVM that wrote it was JDK 13+ already
|
|
147
|
+
# (couldn't have written it otherwise), so skip the
|
|
148
|
+
# version probe and reuse directly.
|
|
149
|
+
return [
|
|
150
|
+
f"-XX:SharedArchiveFile={archive}",
|
|
151
|
+
"-Xshare:auto",
|
|
152
|
+
]
|
|
153
|
+
# Stale (jar redeployed since we wrote the archive) — drop
|
|
154
|
+
# so the next clean exit regenerates.
|
|
155
|
+
try:
|
|
156
|
+
os.remove(archive)
|
|
157
|
+
except OSError:
|
|
158
|
+
pass
|
|
159
|
+
|
|
160
|
+
# No fresh archive → we need ArchiveClassesAtExit, which is
|
|
161
|
+
# JDK 13+ only. Probe now (worth ~100 ms cold-path subprocess
|
|
162
|
+
# only on the first launch of a (jar, jvm) pair).
|
|
163
|
+
major = _java_major(java_path)
|
|
164
|
+
if major is None or major < 13:
|
|
165
|
+
return []
|
|
166
|
+
|
|
167
|
+
return [f"-XX:ArchiveClassesAtExit={archive}"]
|
|
168
|
+
except Exception:
|
|
169
|
+
# Any unexpected error: skip AppCDS, never block the launch.
|
|
170
|
+
return []
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
"""Analysis orchestrator for IDE4EEG.
|
|
2
|
+
|
|
3
|
+
Dispatches time-domain and time-frequency analyses based on config flags.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
# Author: Szymon Bocian, 2023
|
|
7
|
+
|
|
8
|
+
from .eeg_profiles import eeg_profiles
|
|
9
|
+
from .connectivity_analysis import connectivity_analysis
|
|
10
|
+
from .dipole_analysis import dipole_analysis
|
|
11
|
+
|
|
12
|
+
import logging
|
|
13
|
+
import matplotlib.pyplot as plt
|
|
14
|
+
|
|
15
|
+
def analysis(rest, rest_clean, epochs, epochs_clean, data, config, path, tags_desc_id, file_name):
|
|
16
|
+
'''
|
|
17
|
+
Preparing analysis and plots for the prepared epochs.
|
|
18
|
+
|
|
19
|
+
Parameters
|
|
20
|
+
----------
|
|
21
|
+
rest: mne.epochs.Epochs or None
|
|
22
|
+
Prepared rest segments from the signal. If None, REST path wasn't prepare in config file.
|
|
23
|
+
rest_clean: mne.epochs.Epochs or None
|
|
24
|
+
Prepared rest segments from the signal without segments with artifacts. If None, REST path wasn't
|
|
25
|
+
prepare in config file.
|
|
26
|
+
epochs: mne.epochs.Epochs or None
|
|
27
|
+
Prepared epochs from the signal. If None, EPOCHS path wasn't prepare in config file.
|
|
28
|
+
epochs_clean: mne.epochs.Epochs or None
|
|
29
|
+
Prepared epochs from the signal without epochs with artifacts. If None, REST path wasn't
|
|
30
|
+
prepare in config file.
|
|
31
|
+
data: pandas.core.frame.DataFrame
|
|
32
|
+
Dataframe with basic information about tags.
|
|
33
|
+
config: dict
|
|
34
|
+
Dictionary of configuration variables.
|
|
35
|
+
path: str
|
|
36
|
+
Path to the output directory.
|
|
37
|
+
tags_desc_id: dict
|
|
38
|
+
Dictionary of the names of prepared tags and their values in the prepared STIM signal.
|
|
39
|
+
file_name: str
|
|
40
|
+
Name of the file with raw signal.
|
|
41
|
+
'''
|
|
42
|
+
|
|
43
|
+
cancel = config.get("_cancel_event")
|
|
44
|
+
|
|
45
|
+
def _check_cancel():
|
|
46
|
+
if cancel is not None and cancel.is_set():
|
|
47
|
+
logging.info("Analysis cancelled by user.")
|
|
48
|
+
raise InterruptedError("Analysis cancelled by user")
|
|
49
|
+
|
|
50
|
+
if config.get("prepare_eeg_profiles"):
|
|
51
|
+
_check_cancel()
|
|
52
|
+
logging.info("Preparing EEG profiles.")
|
|
53
|
+
eeg_profiles(rest_clean, epochs_clean, config, path, file_name)
|
|
54
|
+
|
|
55
|
+
if config.get("prepare_connectivity_analysis"):
|
|
56
|
+
_check_cancel()
|
|
57
|
+
logging.info("Preparing connectivity analysis.")
|
|
58
|
+
connectivity_analysis(rest_clean, epochs_clean, config, path, file_name)
|
|
59
|
+
|
|
60
|
+
if config.get("prepare_dipole_fitting"):
|
|
61
|
+
_check_cancel()
|
|
62
|
+
logging.info("Preparing dipole fitting analysis.")
|
|
63
|
+
dipole_analysis(rest_clean, epochs_clean, config, path, file_name)
|
|
64
|
+
|
|
65
|
+
if config.get("prepare_tf_statistics"):
|
|
66
|
+
_check_cancel()
|
|
67
|
+
logging.info("Preparing ERD/ERS significance analysis.")
|
|
68
|
+
from .tf_statistics import tf_statistics_analysis
|
|
69
|
+
tf_statistics_analysis(rest_clean, epochs_clean, config, path, file_name)
|
|
70
|
+
|
|
71
|
+
# MNE catalog analyses
|
|
72
|
+
mne_ids = config.get("mne_catalog", [])
|
|
73
|
+
if mne_ids:
|
|
74
|
+
_check_cancel()
|
|
75
|
+
logging.info("Running MNE catalog analyses (%d selected).",
|
|
76
|
+
len(mne_ids))
|
|
77
|
+
from .mne_catalog import run_mne_catalog
|
|
78
|
+
run_mne_catalog(mne_ids, rest_clean, epochs_clean, config,
|
|
79
|
+
path, file_name)
|
|
80
|
+
|
|
81
|
+
plt.close("all")
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""Brain connectivity analysis based on ConnectiviPy.
|
|
3
|
+
|
|
4
|
+
Original authors: Dominik Krzemiński, Maciej Kamiński (University of Warsaw).
|
|
5
|
+
Developed during Google Summer of Code 2015 under the International
|
|
6
|
+
Neuroinformatics Coordinating Facility (INCF).
|
|
7
|
+
https://github.com/dokato/connectivipy
|
|
8
|
+
Integrated into IDE4EEG for EEG connectivity estimation.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from .data import Data
|
|
12
|
+
from .conn import conn_estim_dc
|
|
13
|
+
from .mvarmodel import Mvar
|
|
14
|
+
from .mvar.fitting import mvar_gen, mvar_gen_inst, fitting_algorithms
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
"Data", "conn_estim_dc", "Mvar",
|
|
18
|
+
"mvar_gen", "mvar_gen_inst", "fitting_algorithms",
|
|
19
|
+
]
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
import scipy.signal as ss
|
|
3
|
+
|
|
4
|
+
FQ_BANDS = {'theta': [6, 7],
|
|
5
|
+
'alpha': [8, 13],
|
|
6
|
+
'beta': [15, 25],
|
|
7
|
+
'low-gamma': [30, 45],
|
|
8
|
+
'high-gamma': [55, 70]}
|
|
9
|
+
|
|
10
|
+
def check_bands_correct(band):
|
|
11
|
+
return band in FQ_BANDS.keys()
|
|
12
|
+
|
|
13
|
+
def design_band_filter(lowcut, highcut, fs, rp = None, rs = None,
|
|
14
|
+
filttype = 'butter', btype = 'bandpass', order = 5):
|
|
15
|
+
btypes = {'bandpass', 'bandstop'}
|
|
16
|
+
filttypes = {'butter', 'cheby1', 'cheby2', 'ellip', 'bessel'}
|
|
17
|
+
if not btype in btypes:
|
|
18
|
+
raise ValueError("This is only for band filters: {'bandpass', 'bandstop'}")
|
|
19
|
+
if not filttype in filttypes:
|
|
20
|
+
raise ValueError('Not supported filter type, check docs.')
|
|
21
|
+
filtstr = 'ss.' + filttype + '(order,'
|
|
22
|
+
if filttype == 'cheby1' or filttype == 'ellip':
|
|
23
|
+
filtstr += 'rp,'
|
|
24
|
+
if filttype == 'cheby2' or filttype == 'ellip':
|
|
25
|
+
filtstr += 'rs,'
|
|
26
|
+
filtstr += '[low, high], btype = btype)'
|
|
27
|
+
f_nq = fs / 2
|
|
28
|
+
low, high = lowcut / f_nq, highcut / f_nq
|
|
29
|
+
b, a = eval(filtstr)
|
|
30
|
+
return b, a
|
|
31
|
+
|
|
32
|
+
def butter_bandpass(lowcut, highcut, fs, order = 4):
|
|
33
|
+
return design_band_filter(lowcut, highcut, fs, order = order, btype = 'bandpass')
|
|
34
|
+
|
|
35
|
+
def butter_bandstop(lowcut, highcut, fs, order = 4):
|
|
36
|
+
return design_band_filter(lowcut, highcut, fs, order = order, btype = 'bandstop')
|
|
37
|
+
|
|
38
|
+
def filter_band(data, fs, band = None, filter = None, filtfilt = True):
|
|
39
|
+
if band == filter == None:
|
|
40
|
+
raise ValueError("When *band* is None, *filter* can't be None")
|
|
41
|
+
if filter is None:
|
|
42
|
+
b, a = butter_bandpass(band[0], band[1], fs)
|
|
43
|
+
else:
|
|
44
|
+
b, a = filter
|
|
45
|
+
if filtfilt:
|
|
46
|
+
fdata = ss.filtfilt(b, a, data)
|
|
47
|
+
else:
|
|
48
|
+
fdata = ss.lfilter(b, a, data)
|
|
49
|
+
return fdata
|
|
50
|
+
|
|
51
|
+
def calc_ampenv(data):
|
|
52
|
+
return np.abs(ss.hilbert(data))
|
|
53
|
+
|