py-neuromodulation 0.0.7__py3-none-any.whl → 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py_neuromodulation/ConnectivityDecoding/_get_grid_whole_brain.py +0 -1
- py_neuromodulation/ConnectivityDecoding/_helper_write_connectome.py +0 -2
- py_neuromodulation/__init__.py +12 -4
- py_neuromodulation/analysis/RMAP.py +3 -3
- py_neuromodulation/analysis/decode.py +55 -2
- py_neuromodulation/analysis/feature_reader.py +1 -0
- py_neuromodulation/analysis/stats.py +3 -3
- py_neuromodulation/default_settings.yaml +25 -20
- py_neuromodulation/features/bandpower.py +65 -23
- py_neuromodulation/features/bursts.py +9 -8
- py_neuromodulation/features/coherence.py +7 -4
- py_neuromodulation/features/feature_processor.py +4 -4
- py_neuromodulation/features/fooof.py +7 -6
- py_neuromodulation/features/mne_connectivity.py +60 -87
- py_neuromodulation/features/oscillatory.py +5 -4
- py_neuromodulation/features/sharpwaves.py +21 -0
- py_neuromodulation/filter/kalman_filter.py +17 -6
- py_neuromodulation/gui/__init__.py +3 -0
- py_neuromodulation/gui/backend/app_backend.py +419 -0
- py_neuromodulation/gui/backend/app_manager.py +345 -0
- py_neuromodulation/gui/backend/app_pynm.py +253 -0
- py_neuromodulation/gui/backend/app_socket.py +97 -0
- py_neuromodulation/gui/backend/app_utils.py +306 -0
- py_neuromodulation/gui/backend/app_window.py +202 -0
- py_neuromodulation/gui/frontend/assets/Figtree-VariableFont_wght-CkXbWBDP.ttf +0 -0
- py_neuromodulation/gui/frontend/assets/index-_6V8ZfAS.js +300137 -0
- py_neuromodulation/gui/frontend/assets/plotly-DTCwMlpS.js +23594 -0
- py_neuromodulation/gui/frontend/charite.svg +16 -0
- py_neuromodulation/gui/frontend/index.html +14 -0
- py_neuromodulation/gui/window_api.py +115 -0
- py_neuromodulation/lsl_api.cfg +3 -0
- py_neuromodulation/processing/data_preprocessor.py +9 -2
- py_neuromodulation/processing/filter_preprocessing.py +43 -27
- py_neuromodulation/processing/normalization.py +32 -17
- py_neuromodulation/processing/projection.py +2 -2
- py_neuromodulation/processing/resample.py +6 -2
- py_neuromodulation/run_gui.py +36 -0
- py_neuromodulation/stream/__init__.py +7 -1
- py_neuromodulation/stream/backend_interface.py +47 -0
- py_neuromodulation/stream/data_processor.py +24 -3
- py_neuromodulation/stream/mnelsl_player.py +121 -21
- py_neuromodulation/stream/mnelsl_stream.py +9 -17
- py_neuromodulation/stream/settings.py +80 -34
- py_neuromodulation/stream/stream.py +83 -62
- py_neuromodulation/utils/channels.py +1 -1
- py_neuromodulation/utils/file_writer.py +110 -0
- py_neuromodulation/utils/io.py +46 -5
- py_neuromodulation/utils/perf.py +156 -0
- py_neuromodulation/utils/pydantic_extensions.py +322 -0
- py_neuromodulation/utils/types.py +33 -107
- {py_neuromodulation-0.0.7.dist-info → py_neuromodulation-0.1.1.dist-info}/METADATA +23 -4
- {py_neuromodulation-0.0.7.dist-info → py_neuromodulation-0.1.1.dist-info}/RECORD +55 -35
- {py_neuromodulation-0.0.7.dist-info → py_neuromodulation-0.1.1.dist-info}/WHEEL +1 -1
- py_neuromodulation-0.1.1.dist-info/entry_points.txt +2 -0
- {py_neuromodulation-0.0.7.dist-info → py_neuromodulation-0.1.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,322 @@
|
|
|
1
|
+
import copy
|
|
2
|
+
from typing import (
|
|
3
|
+
Any,
|
|
4
|
+
get_origin,
|
|
5
|
+
get_args,
|
|
6
|
+
get_type_hints,
|
|
7
|
+
Literal,
|
|
8
|
+
cast,
|
|
9
|
+
Sequence,
|
|
10
|
+
)
|
|
11
|
+
from typing_extensions import Unpack, TypedDict
|
|
12
|
+
from pydantic import BaseModel, GetCoreSchemaHandler, ConfigDict
|
|
13
|
+
|
|
14
|
+
from pydantic_core import (
|
|
15
|
+
ErrorDetails,
|
|
16
|
+
PydanticUndefined,
|
|
17
|
+
InitErrorDetails,
|
|
18
|
+
ValidationError,
|
|
19
|
+
CoreSchema,
|
|
20
|
+
core_schema,
|
|
21
|
+
)
|
|
22
|
+
from pydantic.fields import FieldInfo, _FieldInfoInputs, _FromFieldInfoInputs
|
|
23
|
+
from pprint import pformat
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def create_validation_error(
|
|
27
|
+
error_message: str,
|
|
28
|
+
location: list[str | int] = [],
|
|
29
|
+
title: str = "Validation Error",
|
|
30
|
+
error_type="value_error",
|
|
31
|
+
) -> ValidationError:
|
|
32
|
+
"""
|
|
33
|
+
Factory function to create a Pydantic v2 ValidationError.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
error_message (str): The error message for the ValidationError.
|
|
37
|
+
loc (List[str | int], optional): The location of the error. Defaults to None.
|
|
38
|
+
title (str, optional): The title of the error. Defaults to "Validation Error".
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
ValidationError: A Pydantic ValidationError instance.
|
|
42
|
+
"""
|
|
43
|
+
|
|
44
|
+
return ValidationError.from_exception_data(
|
|
45
|
+
title=title,
|
|
46
|
+
line_errors=[
|
|
47
|
+
InitErrorDetails(
|
|
48
|
+
type=error_type,
|
|
49
|
+
loc=tuple(location),
|
|
50
|
+
input=None,
|
|
51
|
+
ctx={"error": error_message},
|
|
52
|
+
)
|
|
53
|
+
],
|
|
54
|
+
input_type="python",
|
|
55
|
+
hide_input=False,
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class NMErrorList:
|
|
60
|
+
"""Class to handle data about Pydantic errors.
|
|
61
|
+
Stores data in a list of InitErrorDetails. Errors can be accessed but not modified.
|
|
62
|
+
|
|
63
|
+
:return: _description_
|
|
64
|
+
:rtype: _type_
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
def __init__(
|
|
68
|
+
self, errors: Sequence[InitErrorDetails | ErrorDetails] | None = None
|
|
69
|
+
) -> None:
|
|
70
|
+
if errors is None:
|
|
71
|
+
self.__errors: list[InitErrorDetails | ErrorDetails] = []
|
|
72
|
+
else:
|
|
73
|
+
self.__errors: list[InitErrorDetails | ErrorDetails] = [e for e in errors]
|
|
74
|
+
|
|
75
|
+
def add_error(
|
|
76
|
+
self,
|
|
77
|
+
error_message: str,
|
|
78
|
+
location: list[str | int] = [],
|
|
79
|
+
error_type="value_error",
|
|
80
|
+
) -> None:
|
|
81
|
+
self.__errors.append(
|
|
82
|
+
InitErrorDetails(
|
|
83
|
+
type=error_type,
|
|
84
|
+
loc=tuple(location),
|
|
85
|
+
input=None,
|
|
86
|
+
ctx={"error": error_message},
|
|
87
|
+
)
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
def create_error(self, title: str = "Validation Error") -> ValidationError:
|
|
91
|
+
return ValidationError.from_exception_data(
|
|
92
|
+
title=title, line_errors=cast(list[InitErrorDetails], self.__errors)
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
def extend(self, errors: "NMErrorList"):
|
|
96
|
+
self.__errors.extend(errors.__errors)
|
|
97
|
+
|
|
98
|
+
def __iter__(self):
|
|
99
|
+
return iter(self.__errors)
|
|
100
|
+
|
|
101
|
+
def __len__(self):
|
|
102
|
+
return len(self.__errors)
|
|
103
|
+
|
|
104
|
+
def __getitem__(self, idx):
|
|
105
|
+
# Return a copy of the error to prevent modification
|
|
106
|
+
return copy.deepcopy(self.__errors[idx])
|
|
107
|
+
|
|
108
|
+
def __repr__(self):
|
|
109
|
+
return repr(self.__errors)
|
|
110
|
+
|
|
111
|
+
def __str__(self):
|
|
112
|
+
return str(self.__errors)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
class _NMExtraFieldInputs(TypedDict, total=False):
|
|
116
|
+
"""Additional fields to add on top of the pydantic FieldInfo"""
|
|
117
|
+
|
|
118
|
+
custom_metadata: dict[str, Any]
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
class _NMFieldInfoInputs(_FieldInfoInputs, _NMExtraFieldInputs, total=False):
|
|
122
|
+
"""Combine pydantic FieldInfo inputs with PyNM additional inputs"""
|
|
123
|
+
|
|
124
|
+
pass
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
class _NMFromFieldInfoInputs(_FromFieldInfoInputs, _NMExtraFieldInputs, total=False):
|
|
128
|
+
"""Combine pydantic FieldInfo.from_field inputs with PyNM additional inputs"""
|
|
129
|
+
|
|
130
|
+
pass
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
class NMFieldInfo(FieldInfo):
|
|
134
|
+
# Add default values for any other custom fields here
|
|
135
|
+
_default_values = {}
|
|
136
|
+
|
|
137
|
+
def __init__(self, **kwargs: Unpack[_NMFieldInfoInputs]) -> None:
|
|
138
|
+
self.sequence: bool = kwargs.pop("sequence", False) # type: ignore
|
|
139
|
+
self.custom_metadata: dict[str, Any] = kwargs.pop("custom_metadata", {})
|
|
140
|
+
super().__init__(**kwargs)
|
|
141
|
+
|
|
142
|
+
def __get_pydantic_core_schema__(
|
|
143
|
+
self, source_type: Any, handler: GetCoreSchemaHandler
|
|
144
|
+
) -> CoreSchema:
|
|
145
|
+
schema = handler(source_type)
|
|
146
|
+
|
|
147
|
+
if self.sequence:
|
|
148
|
+
|
|
149
|
+
def sequence_validator(v: Any) -> Any:
|
|
150
|
+
if isinstance(v, (list, tuple)):
|
|
151
|
+
return v
|
|
152
|
+
if isinstance(v, dict) and "root" in v:
|
|
153
|
+
return v["root"]
|
|
154
|
+
return [v]
|
|
155
|
+
|
|
156
|
+
return core_schema.no_info_before_validator_function(
|
|
157
|
+
sequence_validator, schema
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
return schema
|
|
161
|
+
|
|
162
|
+
@staticmethod
|
|
163
|
+
def from_field(
|
|
164
|
+
default: Any = PydanticUndefined,
|
|
165
|
+
**kwargs: Unpack[_NMFromFieldInfoInputs],
|
|
166
|
+
) -> "NMFieldInfo":
|
|
167
|
+
if "annotation" in kwargs:
|
|
168
|
+
raise TypeError('"annotation" is not permitted as a Field keyword argument')
|
|
169
|
+
return NMFieldInfo(default=default, **kwargs)
|
|
170
|
+
|
|
171
|
+
def __repr_args__(self):
|
|
172
|
+
yield from super().__repr_args__()
|
|
173
|
+
extra_fields = get_type_hints(_NMExtraFieldInputs)
|
|
174
|
+
for field in extra_fields:
|
|
175
|
+
value = getattr(self, field)
|
|
176
|
+
yield field, value
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def NMField(
|
|
180
|
+
default: Any = PydanticUndefined,
|
|
181
|
+
**kwargs: Unpack[_NMFromFieldInfoInputs],
|
|
182
|
+
) -> Any:
|
|
183
|
+
return NMFieldInfo.from_field(default=default, **kwargs)
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
class NMBaseModel(BaseModel):
|
|
187
|
+
model_config = ConfigDict(validate_assignment=False, extra="allow")
|
|
188
|
+
|
|
189
|
+
def __init__(self, *args, **kwargs) -> None:
|
|
190
|
+
"""Pydantic does not support positional arguments by default.
|
|
191
|
+
This is a workaround to support positional arguments for models like FrequencyRange.
|
|
192
|
+
It converts positional arguments to kwargs and then calls the base class __init__.
|
|
193
|
+
"""
|
|
194
|
+
|
|
195
|
+
if not args:
|
|
196
|
+
# Simple case - just use kwargs
|
|
197
|
+
super().__init__(*args, **kwargs)
|
|
198
|
+
return
|
|
199
|
+
|
|
200
|
+
field_names = list(self.model_fields.keys())
|
|
201
|
+
# If we have more positional args than fields, that's an error
|
|
202
|
+
if len(args) > len(field_names):
|
|
203
|
+
raise ValueError(
|
|
204
|
+
f"Too many positional arguments. Expected at most {len(field_names)}, got {len(args)}"
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
# Convert positional args to kwargs, checking for duplicates if args:
|
|
208
|
+
complete_kwargs = {}
|
|
209
|
+
for i, arg in enumerate(args):
|
|
210
|
+
field_name = field_names[i]
|
|
211
|
+
if field_name in kwargs:
|
|
212
|
+
raise ValueError(
|
|
213
|
+
f"Got multiple values for field '{field_name}': "
|
|
214
|
+
f"positional argument and keyword argument"
|
|
215
|
+
)
|
|
216
|
+
complete_kwargs[field_name] = arg
|
|
217
|
+
|
|
218
|
+
# Add remaining kwargs
|
|
219
|
+
complete_kwargs.update(kwargs)
|
|
220
|
+
super().__init__(**complete_kwargs)
|
|
221
|
+
|
|
222
|
+
__init__.__pydantic_base_init__ = True # type: ignore
|
|
223
|
+
|
|
224
|
+
def __str__(self):
|
|
225
|
+
return pformat(self.model_dump())
|
|
226
|
+
|
|
227
|
+
# def __repr__(self):
|
|
228
|
+
# return pformat(self.model_dump())
|
|
229
|
+
|
|
230
|
+
def validate(self, context: Any | None = None) -> Any: # type: ignore
|
|
231
|
+
return self.model_validate(self.model_dump(), context=context)
|
|
232
|
+
|
|
233
|
+
def __getitem__(self, key):
|
|
234
|
+
return getattr(self, key)
|
|
235
|
+
|
|
236
|
+
def __setitem__(self, key, value) -> None:
|
|
237
|
+
setattr(self, key, value)
|
|
238
|
+
|
|
239
|
+
@property
|
|
240
|
+
def fields(self) -> dict[str, FieldInfo | NMFieldInfo]:
|
|
241
|
+
return self.model_fields # type: ignore
|
|
242
|
+
|
|
243
|
+
def serialize_with_metadata(self):
|
|
244
|
+
result: dict[str, Any] = {"__field_type__": self.__class__.__name__}
|
|
245
|
+
|
|
246
|
+
for field_name, field_info in self.fields.items():
|
|
247
|
+
value = getattr(self, field_name)
|
|
248
|
+
if isinstance(value, NMBaseModel):
|
|
249
|
+
result[field_name] = value.serialize_with_metadata()
|
|
250
|
+
elif isinstance(value, list):
|
|
251
|
+
result[field_name] = [
|
|
252
|
+
item.serialize_with_metadata()
|
|
253
|
+
if isinstance(item, NMBaseModel)
|
|
254
|
+
else item
|
|
255
|
+
for item in value
|
|
256
|
+
]
|
|
257
|
+
elif isinstance(value, dict):
|
|
258
|
+
result[field_name] = {
|
|
259
|
+
k: v.serialize_with_metadata() if isinstance(v, NMBaseModel) else v
|
|
260
|
+
for k, v in value.items()
|
|
261
|
+
}
|
|
262
|
+
else:
|
|
263
|
+
result[field_name] = value
|
|
264
|
+
|
|
265
|
+
# Extract unit information from Annotated type
|
|
266
|
+
if isinstance(field_info, NMFieldInfo):
|
|
267
|
+
# Convert scalar value to dict with metadata
|
|
268
|
+
field_dict = {
|
|
269
|
+
"__value__": value,
|
|
270
|
+
# __field_type__ will be overwritte if set in custom_metadata
|
|
271
|
+
"__field_type__": type(value).__name__,
|
|
272
|
+
**{
|
|
273
|
+
f"__{tag}__": value
|
|
274
|
+
for tag, value in field_info.custom_metadata.items()
|
|
275
|
+
},
|
|
276
|
+
}
|
|
277
|
+
# Add possible values for Literal types
|
|
278
|
+
if get_origin(field_info.annotation) is Literal:
|
|
279
|
+
field_dict["__valid_values__"] = list(
|
|
280
|
+
get_args(field_info.annotation)
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
result[field_name] = field_dict
|
|
284
|
+
return result
|
|
285
|
+
|
|
286
|
+
@classmethod
|
|
287
|
+
def unvalidated(cls, **data: Any) -> Any:
|
|
288
|
+
def process_value(value: Any, field_type: Any) -> Any:
|
|
289
|
+
if isinstance(value, dict) and hasattr(
|
|
290
|
+
field_type, "__pydantic_core_schema__"
|
|
291
|
+
):
|
|
292
|
+
# Recursively handle nested Pydantic models
|
|
293
|
+
return field_type.unvalidated(**value)
|
|
294
|
+
elif isinstance(value, list):
|
|
295
|
+
# Handle lists of Pydantic models
|
|
296
|
+
if hasattr(field_type, "__args__") and hasattr(
|
|
297
|
+
field_type.__args__[0], "__pydantic_core_schema__"
|
|
298
|
+
):
|
|
299
|
+
return [
|
|
300
|
+
field_type.__args__[0].unvalidated(**item)
|
|
301
|
+
if isinstance(item, dict)
|
|
302
|
+
else item
|
|
303
|
+
for item in value
|
|
304
|
+
]
|
|
305
|
+
return value
|
|
306
|
+
|
|
307
|
+
processed_data = {}
|
|
308
|
+
for name, field in cls.model_fields.items():
|
|
309
|
+
try:
|
|
310
|
+
value = data[name]
|
|
311
|
+
processed_data[name] = process_value(value, field.annotation)
|
|
312
|
+
except KeyError:
|
|
313
|
+
if not field.is_required():
|
|
314
|
+
processed_data[name] = copy.deepcopy(field.default)
|
|
315
|
+
else:
|
|
316
|
+
raise TypeError(f"Missing required keyword argument {name!r}")
|
|
317
|
+
|
|
318
|
+
self = cls.__new__(cls)
|
|
319
|
+
object.__setattr__(self, "__dict__", processed_data)
|
|
320
|
+
object.__setattr__(self, "__pydantic_private__", {"extra": None})
|
|
321
|
+
object.__setattr__(self, "__pydantic_fields_set__", set(processed_data.keys()))
|
|
322
|
+
return self
|
|
@@ -1,10 +1,13 @@
|
|
|
1
1
|
from os import PathLike
|
|
2
2
|
from math import isnan
|
|
3
|
-
from typing import
|
|
4
|
-
from pydantic import
|
|
5
|
-
from
|
|
6
|
-
from
|
|
3
|
+
from typing import Literal, TYPE_CHECKING, Any
|
|
4
|
+
from pydantic import BaseModel, model_validator, Field
|
|
5
|
+
from .pydantic_extensions import NMBaseModel, NMField
|
|
6
|
+
from abc import abstractmethod
|
|
7
|
+
|
|
7
8
|
from collections.abc import Sequence
|
|
9
|
+
from datetime import datetime
|
|
10
|
+
|
|
8
11
|
|
|
9
12
|
if TYPE_CHECKING:
|
|
10
13
|
import numpy as np
|
|
@@ -16,7 +19,7 @@ if TYPE_CHECKING:
|
|
|
16
19
|
|
|
17
20
|
_PathLike = str | PathLike
|
|
18
21
|
|
|
19
|
-
|
|
22
|
+
FEATURE_NAME = Literal[
|
|
20
23
|
"raw_hjorth",
|
|
21
24
|
"return_raw",
|
|
22
25
|
"bandpass_filter",
|
|
@@ -33,7 +36,7 @@ FeatureName = Literal[
|
|
|
33
36
|
"bispectrum",
|
|
34
37
|
]
|
|
35
38
|
|
|
36
|
-
|
|
39
|
+
PREPROCESSOR_NAME = Literal[
|
|
37
40
|
"preprocessing_filter",
|
|
38
41
|
"notch_filter",
|
|
39
42
|
"raw_resampling",
|
|
@@ -41,7 +44,7 @@ PreprocessorName = Literal[
|
|
|
41
44
|
"raw_normalization",
|
|
42
45
|
]
|
|
43
46
|
|
|
44
|
-
|
|
47
|
+
NORM_METHOD = Literal[
|
|
45
48
|
"mean",
|
|
46
49
|
"median",
|
|
47
50
|
"zscore",
|
|
@@ -52,13 +55,8 @@ NormMethod = Literal[
|
|
|
52
55
|
"minmax",
|
|
53
56
|
]
|
|
54
57
|
|
|
55
|
-
###################################
|
|
56
|
-
######## PROTOCOL CLASSES ########
|
|
57
|
-
###################################
|
|
58
|
-
|
|
59
58
|
|
|
60
|
-
|
|
61
|
-
class NMFeature(Protocol):
|
|
59
|
+
class NMFeature:
|
|
62
60
|
def __init__(
|
|
63
61
|
self, settings: "NMSettings", ch_names: Sequence[str], sfreq: int | float
|
|
64
62
|
) -> None: ...
|
|
@@ -79,46 +77,10 @@ class NMFeature(Protocol):
|
|
|
79
77
|
...
|
|
80
78
|
|
|
81
79
|
|
|
82
|
-
class NMPreprocessor
|
|
83
|
-
def __init__(self, sfreq: float, settings: "NMSettings") -> None: ...
|
|
84
|
-
|
|
80
|
+
class NMPreprocessor:
|
|
85
81
|
def process(self, data: "np.ndarray") -> "np.ndarray": ...
|
|
86
82
|
|
|
87
83
|
|
|
88
|
-
###################################
|
|
89
|
-
######## PYDANTIC CLASSES ########
|
|
90
|
-
###################################
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
class NMBaseModel(BaseModel):
|
|
94
|
-
model_config = ConfigDict(validate_assignment=False, extra="allow")
|
|
95
|
-
|
|
96
|
-
def __init__(self, *args, **kwargs) -> None:
|
|
97
|
-
if kwargs:
|
|
98
|
-
super().__init__(**kwargs)
|
|
99
|
-
else:
|
|
100
|
-
field_names = list(self.model_fields.keys())
|
|
101
|
-
kwargs = {}
|
|
102
|
-
for i in range(len(args)):
|
|
103
|
-
kwargs[field_names[i]] = args[i]
|
|
104
|
-
super().__init__(**kwargs)
|
|
105
|
-
|
|
106
|
-
def __str__(self):
|
|
107
|
-
return pformat(self.model_dump())
|
|
108
|
-
|
|
109
|
-
def __repr__(self):
|
|
110
|
-
return pformat(self.model_dump())
|
|
111
|
-
|
|
112
|
-
def validate(self) -> Any: # type: ignore
|
|
113
|
-
return self.model_validate(self.model_dump())
|
|
114
|
-
|
|
115
|
-
def __getitem__(self, key):
|
|
116
|
-
return getattr(self, key)
|
|
117
|
-
|
|
118
|
-
def __setitem__(self, key, value) -> None:
|
|
119
|
-
setattr(self, key, value)
|
|
120
|
-
|
|
121
|
-
|
|
122
84
|
class FrequencyRange(NMBaseModel):
|
|
123
85
|
frequency_low_hz: float = Field(gt=0)
|
|
124
86
|
frequency_high_hz: float = Field(gt=0)
|
|
@@ -144,28 +106,18 @@ class FrequencyRange(NMBaseModel):
|
|
|
144
106
|
@model_validator(mode="after")
|
|
145
107
|
def validate_range(self):
|
|
146
108
|
if not (isnan(self.frequency_high_hz) or isnan(self.frequency_low_hz)):
|
|
147
|
-
assert (
|
|
148
|
-
|
|
149
|
-
)
|
|
109
|
+
assert self.frequency_high_hz > self.frequency_low_hz, (
|
|
110
|
+
"Frequency high must be greater than frequency low"
|
|
111
|
+
)
|
|
150
112
|
return self
|
|
151
113
|
|
|
152
|
-
@classmethod
|
|
153
|
-
def create_from(cls, input) -> "FrequencyRange":
|
|
154
|
-
match input:
|
|
155
|
-
case FrequencyRange():
|
|
156
|
-
return input
|
|
157
|
-
case dict() if "frequency_low_hz" in input and "frequency_high_hz" in input:
|
|
158
|
-
return FrequencyRange(
|
|
159
|
-
input["frequency_low_hz"], input["frequency_high_hz"]
|
|
160
|
-
)
|
|
161
|
-
case Sequence() if len(input) == 2:
|
|
162
|
-
return FrequencyRange(input[0], input[1])
|
|
163
|
-
case _:
|
|
164
|
-
raise ValueError("Invalid input for FrequencyRange creation.")
|
|
165
|
-
|
|
166
114
|
@model_validator(mode="before")
|
|
167
115
|
@classmethod
|
|
168
116
|
def check_input(cls, input):
|
|
117
|
+
"""Pydantic validator to convert the input to a dictionary when passed as a list
|
|
118
|
+
as we have it by default in the default_settings.yaml file
|
|
119
|
+
For example, [1,2] will be converted to {"frequency_low_hz": 1, "frequency_high_hz": 2}
|
|
120
|
+
"""
|
|
169
121
|
match input:
|
|
170
122
|
case dict() if "frequency_low_hz" in input and "frequency_high_hz" in input:
|
|
171
123
|
return input
|
|
@@ -209,43 +161,17 @@ class BoolSelector(NMBaseModel):
|
|
|
209
161
|
for f in cls.list_all():
|
|
210
162
|
print(f)
|
|
211
163
|
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
Args:
|
|
228
|
-
error_message (str): The error message for the ValidationError.
|
|
229
|
-
loc (List[str | int], optional): The location of the error. Defaults to None.
|
|
230
|
-
title (str, optional): The title of the error. Defaults to "Validation Error".
|
|
231
|
-
input_type (Literal["python", "json"], optional): Whether the error is for a Python object or JSON. Defaults to "python".
|
|
232
|
-
hide_input (bool, optional): Whether to hide the input value in the error message. Defaults to False.
|
|
233
|
-
|
|
234
|
-
Returns:
|
|
235
|
-
ValidationError: A Pydantic ValidationError instance.
|
|
236
|
-
"""
|
|
237
|
-
if loc is None:
|
|
238
|
-
loc = []
|
|
239
|
-
|
|
240
|
-
line_errors = [
|
|
241
|
-
InitErrorDetails(
|
|
242
|
-
type="value_error", loc=tuple(loc), input=None, ctx={"error": error_message}
|
|
243
|
-
)
|
|
244
|
-
]
|
|
245
|
-
|
|
246
|
-
return ValidationError.from_exception_data(
|
|
247
|
-
title=title,
|
|
248
|
-
line_errors=line_errors,
|
|
249
|
-
input_type=input_type,
|
|
250
|
-
hide_input=hide_input,
|
|
251
|
-
)
|
|
164
|
+
|
|
165
|
+
#################
|
|
166
|
+
### GUI TYPES ###
|
|
167
|
+
#################
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
class FileInfo(BaseModel):
|
|
171
|
+
name: str
|
|
172
|
+
path: str
|
|
173
|
+
dir: str
|
|
174
|
+
is_directory: bool
|
|
175
|
+
size: int
|
|
176
|
+
created_at: datetime
|
|
177
|
+
modified_at: datetime
|
|
@@ -1,9 +1,11 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: py_neuromodulation
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.1.1
|
|
4
4
|
Summary: Real-time analysis of intracranial neurophysiology recordings.
|
|
5
|
-
Project-URL:
|
|
6
|
-
Project-URL:
|
|
5
|
+
Project-URL: Homepage, https://neuromodulation.github.io/py_neuromodulation/
|
|
6
|
+
Project-URL: Documentation, https://neuromodulation.github.io/py_neuromodulation/
|
|
7
|
+
Project-URL: Repository, https://github.com/neuromodulation/py_neuromodulation
|
|
8
|
+
Project-URL: Issues, https://github.com/neuromodulation/py_neuromodulation/issues
|
|
7
9
|
Author-email: Timon Merk <timon.merk@charite.de>
|
|
8
10
|
Maintainer: Timon Merk
|
|
9
11
|
License: MIT License
|
|
@@ -34,7 +36,10 @@ Classifier: License :: OSI Approved :: MIT License
|
|
|
34
36
|
Classifier: Programming Language :: Python
|
|
35
37
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
36
38
|
Requires-Python: >=3.11
|
|
39
|
+
Requires-Dist: cbor2>=5.6.4
|
|
40
|
+
Requires-Dist: fastapi
|
|
37
41
|
Requires-Dist: fooof
|
|
42
|
+
Requires-Dist: imbalanced-learn
|
|
38
43
|
Requires-Dist: joblib>=1.3.2
|
|
39
44
|
Requires-Dist: llvmlite>=0.43.0
|
|
40
45
|
Requires-Dist: matplotlib>=3.9.0
|
|
@@ -43,18 +48,25 @@ Requires-Dist: mne-bids>=0.8
|
|
|
43
48
|
Requires-Dist: mne-connectivity
|
|
44
49
|
Requires-Dist: mne-lsl>=1.2.0
|
|
45
50
|
Requires-Dist: mrmr-selection
|
|
51
|
+
Requires-Dist: msgpack>=1.1.0
|
|
52
|
+
Requires-Dist: nibabel>=5.3.2
|
|
46
53
|
Requires-Dist: nolds>=0.6.1
|
|
47
54
|
Requires-Dist: numba>=0.60.0
|
|
48
55
|
Requires-Dist: numpy>=1.21.2
|
|
56
|
+
Requires-Dist: numpy>=2.0.0
|
|
49
57
|
Requires-Dist: pandas>=2.0.0
|
|
50
58
|
Requires-Dist: pyarrow>=14.0.2
|
|
51
59
|
Requires-Dist: pybispectra>=1.2.0
|
|
52
60
|
Requires-Dist: pydantic>=2.7.3
|
|
53
61
|
Requires-Dist: pyparrm
|
|
62
|
+
Requires-Dist: pywebview
|
|
54
63
|
Requires-Dist: scikit-learn>=0.24.2
|
|
55
64
|
Requires-Dist: scikit-optimize
|
|
56
65
|
Requires-Dist: scipy>=1.7.1
|
|
57
66
|
Requires-Dist: seaborn>=0.11
|
|
67
|
+
Requires-Dist: skops>=0.10.0
|
|
68
|
+
Requires-Dist: uvicorn[standard]>=0.30.6
|
|
69
|
+
Requires-Dist: uvloop; platform_system != 'Windows'
|
|
58
70
|
Provides-Extra: dev
|
|
59
71
|
Requires-Dist: notebook; extra == 'dev'
|
|
60
72
|
Requires-Dist: pytest-cov; extra == 'dev'
|
|
@@ -81,6 +93,8 @@ Description-Content-Type: text/x-rst
|
|
|
81
93
|
py_neuromodulation
|
|
82
94
|
==================
|
|
83
95
|
|
|
96
|
+
Documentation: https://neuromodulation.github.io/py_neuromodulation/
|
|
97
|
+
|
|
84
98
|
Analyzing neural data can be a troublesome, trial and error prone,
|
|
85
99
|
and beginner unfriendly process. *py_neuromodulation* allows using a simple
|
|
86
100
|
interface for extraction of established neurophysiological features and includes commonly applied pre -and postprocessing methods.
|
|
@@ -161,6 +175,11 @@ Contact information
|
|
|
161
175
|
For any question or suggestion please find my contact
|
|
162
176
|
information at `my GitHub profile <https://github.com/timonmerk>`_.
|
|
163
177
|
|
|
178
|
+
Contributing guide
|
|
179
|
+
------------------
|
|
180
|
+
https://neuromodulation.github.io/py_neuromodulation/contributing.html
|
|
181
|
+
|
|
182
|
+
|
|
164
183
|
References
|
|
165
184
|
----------
|
|
166
185
|
|