paradigma 0.2.0__tar.gz → 0.3.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- paradigma-0.3.1/PKG-INFO +79 -0
- paradigma-0.3.1/README.md +59 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/pyproject.toml +5 -2
- paradigma-0.3.1/src/paradigma/constants.py +65 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/feature_extraction.py +42 -17
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/gait_analysis.py +20 -18
- paradigma-0.3.1/src/paradigma/gait_analysis_config.py +266 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/heart_rate_util.py +2 -2
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/imu_preprocessing.py +32 -29
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg_preprocessing.py +14 -14
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/preprocessing_config.py +20 -15
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/util.py +4 -4
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/windowing.py +8 -6
- paradigma-0.2.0/PKG-INFO +0 -58
- paradigma-0.2.0/README.md +0 -38
- paradigma-0.2.0/src/paradigma/constants.py +0 -35
- paradigma-0.2.0/src/paradigma/gait_analysis_config.py +0 -244
- {paradigma-0.2.0 → paradigma-0.3.1}/LICENSE +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/__init__.py +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/heart_rate_analysis.py +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/heart_rate_analysis_config.py +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/classifier/LR_PPG_quality.pkl +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/classifier/LR_model.mat +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/feat_extraction/acc_feature.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/feat_extraction/peakdet.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/feat_extraction/ppg_features.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/extract_hr_segments.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/extract_overlapping_segments.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/AUTHORS.txt +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/ChangeLog.txt +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/LICENSE_BSD.txt +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/LICENSE_GPLv3.txt +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/README.txt +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/.svn/entries +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/demo_jsonlab_basic.m.svn-base +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/demo_ubjson_basic.m.svn-base +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/example1.json.svn-base +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/example2.json.svn-base +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/example3.json.svn-base +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/example4.json.svn-base +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/jsonlab_basictest.matlab.svn-base +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/jsonlab_selftest.m.svn-base +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/jsonlab_selftest.matlab.svn-base +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/jsonlab_speedtest.m.svn-base +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/demo_jsonlab_basic.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/demo_ubjson_basic.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/example1.json +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/example2.json +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/example3.json +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/example4.json +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/jsonlab_basictest.matlab +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/jsonlab_selftest.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/jsonlab_selftest.matlab +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/examples/jsonlab_speedtest.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/jsonopt.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/loadjson.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/loadubjson.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/mergestruct.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/savejson.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/saveubjson.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/jsonlab/varargin2struct.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/sample_prob_final.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/synchronization.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/glob_functions/tsdf_scan_meta.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/Long_TFD_JOT.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/PPG_TFD_HR.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/.gitignore +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/CHANGELOG.md +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/LICENCE.md +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/README.md +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/README.pdf +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/common/gen_Doppler_kern.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/common/gen_Doppler_lag_kern.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/common/gen_lag_kern.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/dec_tfd.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/decimated_TFDs/dec_di_gdtfd.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/decimated_TFDs/dec_li_gdtfd.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/decimated_TFDs/dec_nonsep_gdtfd.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/decimated_TFDs/dec_sep_gdtfd.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/full_TFDs/di_gdtfd.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/full_TFDs/li_gdtfd.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/full_TFDs/nonsep_gdtfd.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/full_TFDs/sep_gdtfd.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/full_tfd.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/load_curdir.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/pics/decimated_TFDs_examples.png +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/pics/full_TFDs_examples.png +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/utils/check_dec_params_seq.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/utils/dispEE.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/utils/dispVars.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/utils/disp_bytes.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/utils/fold_vector_full.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/utils/fold_vector_half.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/utils/gen_LFM.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/utils/get_analytic_signal.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/utils/get_window.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/utils/isreal_fn.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/utils/padWin.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/hr_functions/TFD toolbox JOT/utils/vtfd.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/preprocessing/preprocessing_imu.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/ppg/preprocessing/preprocessing_ppg.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/quantification.py +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/tremor/TremorFeaturesAndClassification.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/tremor/feat_extraction/DerivativesExtract.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/tremor/feat_extraction/ExtractBandSignalsRMS.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/tremor/feat_extraction/MFCCExtract.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/tremor/feat_extraction/PSDBandPower.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/tremor/feat_extraction/PSDEst.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/tremor/feat_extraction/PSDExtrAxis.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/tremor/feat_extraction/PSDExtrOpt.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/tremor/preprocessing/InterpData.m +0 -0
- {paradigma-0.2.0 → paradigma-0.3.1}/src/paradigma/tremor/weekly_aggregates/WeeklyAggregates.m +0 -0
paradigma-0.3.1/PKG-INFO
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: paradigma
|
|
3
|
+
Version: 0.3.1
|
|
4
|
+
Summary: Paradigma - a toolbox for Digital Biomarkers for Parkinson's Disease
|
|
5
|
+
License: Apache-2.0
|
|
6
|
+
Author: Peter Kok
|
|
7
|
+
Author-email: p.kok@esciencecenter.nl
|
|
8
|
+
Requires-Python: >=3.10,<4.0
|
|
9
|
+
Classifier: License :: OSI Approved :: Apache Software License
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
14
|
+
Requires-Dist: pandas (>=2.1.4,<3.0.0)
|
|
15
|
+
Requires-Dist: pytype (>=2024.4.11,<2025.0.0)
|
|
16
|
+
Requires-Dist: scikit-learn (>=1.3.2,<2.0.0)
|
|
17
|
+
Requires-Dist: tsdf (>=0.5.2,<0.6.0)
|
|
18
|
+
Description-Content-Type: text/markdown
|
|
19
|
+
|
|
20
|
+
# paradigma
|
|
21
|
+
|
|
22
|
+
| Badges | |
|
|
23
|
+
|:----:|----|
|
|
24
|
+
| **Packages and Releases** | [](https://github.com/biomarkersparkinson/paradigma/releases/latest) [](https://pypi.python.org/pypi/paradigma/) [](https://research-software-directory.org/software/paradigma) |
|
|
25
|
+
| **Build Status** | [](https://www.python.org/downloads/) [](https://github.com/biomarkersParkinson/paradigma/actions/workflows/build-and-test.yml) [](https://github.com/biomarkersParkinson/paradigma/actions/workflows/pages/pages-build-deployment) |
|
|
26
|
+
| **License** | [](https://github.com/biomarkersparkinson/paradigma/blob/main/LICENSE) |
|
|
27
|
+
<!-- | **DOI** | [](https://doi.org/10.5281/zenodo.7867899) | -->
|
|
28
|
+
<!-- | **Fairness** | [](https://fair-software.eu) [](https://www.bestpractices.dev/projects/8083) | -->
|
|
29
|
+
|
|
30
|
+
Digital Biomarkers for Parkinson's Disease Toolbox
|
|
31
|
+
|
|
32
|
+
A package ([documentation](https://biomarkersparkinson.github.io/paradigma/)) to process wearable sensor data for Parkinson's disease.
|
|
33
|
+
|
|
34
|
+
## Installation
|
|
35
|
+
|
|
36
|
+
The package is available in PyPi and requires [Python 3.10](https://www.python.org/downloads/) or higher. It can be installed using:
|
|
37
|
+
|
|
38
|
+
```bash
|
|
39
|
+
pip install paradigma
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
## Usage
|
|
43
|
+
|
|
44
|
+
See our [extended documentation](https://biomarkersparkinson.github.io/paradigma/).
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
## Development
|
|
48
|
+
|
|
49
|
+
### Installation
|
|
50
|
+
The package requires Python 3.10 or higher. Use [Poetry](https://python-poetry.org/docs/#installation) to set up the environment and install the dependencies:
|
|
51
|
+
|
|
52
|
+
```bash
|
|
53
|
+
poetry install
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
### Testing
|
|
57
|
+
|
|
58
|
+
```bash
|
|
59
|
+
poetry run pytest
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
### Building documentation
|
|
63
|
+
|
|
64
|
+
```bash
|
|
65
|
+
poetry run make html --directory docs/
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
## Contributing
|
|
69
|
+
|
|
70
|
+
Interested in contributing? Check out the contributing guidelines. Please note that this project is released with a Code of Conduct. By contributing to this project, you agree to abide by its terms.
|
|
71
|
+
|
|
72
|
+
## License
|
|
73
|
+
|
|
74
|
+
`paradigma` was created by Peter Kok, Vedran Kasalica, Erik Post, Kars Veldkamp, Nienke Timmermans, Diogo Coutinho Soriano, Luc Evers. It is licensed under the terms of the Apache License 2.0 license.
|
|
75
|
+
|
|
76
|
+
## Credits
|
|
77
|
+
|
|
78
|
+
`paradigma` was created with [`cookiecutter`](https://cookiecutter.readthedocs.io/en/latest/) and the `py-pkgs-cookiecutter` [template](https://github.com/py-pkgs/py-pkgs-cookiecutter).
|
|
79
|
+
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
# paradigma
|
|
2
|
+
|
|
3
|
+
| Badges | |
|
|
4
|
+
|:----:|----|
|
|
5
|
+
| **Packages and Releases** | [](https://github.com/biomarkersparkinson/paradigma/releases/latest) [](https://pypi.python.org/pypi/paradigma/) [](https://research-software-directory.org/software/paradigma) |
|
|
6
|
+
| **Build Status** | [](https://www.python.org/downloads/) [](https://github.com/biomarkersParkinson/paradigma/actions/workflows/build-and-test.yml) [](https://github.com/biomarkersParkinson/paradigma/actions/workflows/pages/pages-build-deployment) |
|
|
7
|
+
| **License** | [](https://github.com/biomarkersparkinson/paradigma/blob/main/LICENSE) |
|
|
8
|
+
<!-- | **DOI** | [](https://doi.org/10.5281/zenodo.7867899) | -->
|
|
9
|
+
<!-- | **Fairness** | [](https://fair-software.eu) [](https://www.bestpractices.dev/projects/8083) | -->
|
|
10
|
+
|
|
11
|
+
Digital Biomarkers for Parkinson's Disease Toolbox
|
|
12
|
+
|
|
13
|
+
A package ([documentation](https://biomarkersparkinson.github.io/paradigma/)) to process wearable sensor data for Parkinson's disease.
|
|
14
|
+
|
|
15
|
+
## Installation
|
|
16
|
+
|
|
17
|
+
The package is available in PyPi and requires [Python 3.10](https://www.python.org/downloads/) or higher. It can be installed using:
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
pip install paradigma
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
## Usage
|
|
24
|
+
|
|
25
|
+
See our [extended documentation](https://biomarkersparkinson.github.io/paradigma/).
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
## Development
|
|
29
|
+
|
|
30
|
+
### Installation
|
|
31
|
+
The package requires Python 3.10 or higher. Use [Poetry](https://python-poetry.org/docs/#installation) to set up the environment and install the dependencies:
|
|
32
|
+
|
|
33
|
+
```bash
|
|
34
|
+
poetry install
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
### Testing
|
|
38
|
+
|
|
39
|
+
```bash
|
|
40
|
+
poetry run pytest
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
### Building documentation
|
|
44
|
+
|
|
45
|
+
```bash
|
|
46
|
+
poetry run make html --directory docs/
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
## Contributing
|
|
50
|
+
|
|
51
|
+
Interested in contributing? Check out the contributing guidelines. Please note that this project is released with a Code of Conduct. By contributing to this project, you agree to abide by its terms.
|
|
52
|
+
|
|
53
|
+
## License
|
|
54
|
+
|
|
55
|
+
`paradigma` was created by Peter Kok, Vedran Kasalica, Erik Post, Kars Veldkamp, Nienke Timmermans, Diogo Coutinho Soriano, Luc Evers. It is licensed under the terms of the Apache License 2.0 license.
|
|
56
|
+
|
|
57
|
+
## Credits
|
|
58
|
+
|
|
59
|
+
`paradigma` was created with [`cookiecutter`](https://cookiecutter.readthedocs.io/en/latest/) and the `py-pkgs-cookiecutter` [template](https://github.com/py-pkgs/py-pkgs-cookiecutter).
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "paradigma"
|
|
3
|
-
version = "0.
|
|
3
|
+
version = "0.3.1"
|
|
4
4
|
description = "Paradigma - a toolbox for Digital Biomarkers for Parkinson's Disease"
|
|
5
5
|
authors = [ "Peter Kok <p.kok@esciencecenter.nl>",
|
|
6
6
|
"Vedran Kasalica <v.kaslica@esciencecenter.nl>",
|
|
@@ -13,10 +13,13 @@ license = "Apache License 2.0"
|
|
|
13
13
|
readme = "README.md"
|
|
14
14
|
|
|
15
15
|
[tool.poetry.dependencies]
|
|
16
|
-
python = "^3.
|
|
16
|
+
python = "^3.10"
|
|
17
17
|
pandas = "^2.1.4"
|
|
18
18
|
scikit-learn = "^1.3.2"
|
|
19
19
|
tsdf = "^0.5.2"
|
|
20
|
+
pytype = "^2024.4.11"
|
|
21
|
+
# for the record: pytype was installed directly with pip (in the poetry environment),
|
|
22
|
+
# because poetry didn't handle the install for different CPU architectures
|
|
20
23
|
|
|
21
24
|
[tool.poetry.group.testing.dependencies]
|
|
22
25
|
ipykernel = "^6.27.1"
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
@dataclass(frozen=True)
|
|
5
|
+
class DataColumns():
|
|
6
|
+
"""
|
|
7
|
+
Class containing the data channels in `tsdf`.
|
|
8
|
+
"""
|
|
9
|
+
ACCELEROMETER_X : str = "accelerometer_x"
|
|
10
|
+
ACCELEROMETER_Y : str = "accelerometer_y"
|
|
11
|
+
ACCELEROMETER_Z : str = "accelerometer_z"
|
|
12
|
+
GYROSCOPE_X : str = "gyroscope_x"
|
|
13
|
+
GYROSCOPE_Y : str = "gyroscope_y"
|
|
14
|
+
GYROSCOPE_Z : str = "gyroscope_z"
|
|
15
|
+
PPG : str = "green"
|
|
16
|
+
TIME : str = "time"
|
|
17
|
+
|
|
18
|
+
# The following are used in gait analysis
|
|
19
|
+
GRAV_ACCELEROMETER_X : str = "grav_accelerometer_x"
|
|
20
|
+
GRAV_ACCELEROMETER_Y : str = "grav_accelerometer_y"
|
|
21
|
+
GRAV_ACCELEROMETER_Z : str = "grav_accelerometer_z"
|
|
22
|
+
PRED_GAIT : str = "pred_gait"
|
|
23
|
+
PRED_ARM_SWING : str = "pred_arm_swing"
|
|
24
|
+
ANGLE : str = "angle"
|
|
25
|
+
ANGLE_SMOOTH : str = "angle_smooth"
|
|
26
|
+
VELOCITY : str = "velocity"
|
|
27
|
+
SEGMENT_NR : str = "segment_nr"
|
|
28
|
+
|
|
29
|
+
@dataclass(frozen=True)
|
|
30
|
+
class DataUnits():
|
|
31
|
+
"""
|
|
32
|
+
Class containing the data channel unit types in `tsdf`.
|
|
33
|
+
"""
|
|
34
|
+
ACCELERATION: str = "m/s^2"
|
|
35
|
+
""" The acceleration is in m/s^2. """
|
|
36
|
+
|
|
37
|
+
ROTATION: str = "deg/s"
|
|
38
|
+
""" The rotation is in degrees per second. """
|
|
39
|
+
|
|
40
|
+
GRAVITY: str = "g"
|
|
41
|
+
""" The acceleration due to gravity is in g. """
|
|
42
|
+
|
|
43
|
+
POWER_SPECTRAL_DENSITY: str = "g^2/Hz"
|
|
44
|
+
""" The power spectral density is in g^2/Hz. """
|
|
45
|
+
|
|
46
|
+
FREQUENCY: str = "Hz"
|
|
47
|
+
""" The frequency is in Hz. """
|
|
48
|
+
|
|
49
|
+
NONE: str = "none"
|
|
50
|
+
""" The data channel has no unit. """
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@dataclass(frozen=True)
|
|
54
|
+
class TimeUnit():
|
|
55
|
+
"""
|
|
56
|
+
Class containing the `time` channel unit types in `tsdf`.
|
|
57
|
+
"""
|
|
58
|
+
RELATIVE_MS : str = "relative_ms"
|
|
59
|
+
""" The time is relative to the start time in milliseconds. """
|
|
60
|
+
ABSOLUTE_MS : str = "absolute_ms"
|
|
61
|
+
""" The time is absolute in milliseconds. """
|
|
62
|
+
DIFFERENCE_MS : str = "difference_ms"
|
|
63
|
+
""" The time is the difference between consecutive samples in milliseconds. """
|
|
64
|
+
|
|
65
|
+
UNIX_TICKS_MS: int = 1000
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from typing import List
|
|
1
2
|
import numpy as np
|
|
2
3
|
import pandas as pd
|
|
3
4
|
from sklearn.decomposition import PCA
|
|
@@ -6,6 +7,9 @@ from scipy import signal, fft
|
|
|
6
7
|
from scipy.integrate import cumulative_trapezoid
|
|
7
8
|
from scipy.signal import find_peaks
|
|
8
9
|
|
|
10
|
+
from paradigma.constants import DataColumns
|
|
11
|
+
from paradigma.gait_analysis_config import IMUConfig
|
|
12
|
+
|
|
9
13
|
|
|
10
14
|
def generate_statistics(
|
|
11
15
|
sensor_col: pd.Series,
|
|
@@ -39,7 +43,7 @@ def generate_statistics(
|
|
|
39
43
|
|
|
40
44
|
def generate_std_norm(
|
|
41
45
|
df: pd.DataFrame,
|
|
42
|
-
cols:
|
|
46
|
+
cols: List[str],
|
|
43
47
|
) -> pd.Series:
|
|
44
48
|
"""Generate the standard deviation of the norm of the accelerometer axes.
|
|
45
49
|
|
|
@@ -47,7 +51,7 @@ def generate_std_norm(
|
|
|
47
51
|
----------
|
|
48
52
|
df: pd.DataFrame
|
|
49
53
|
The dataframe containing the accelerometer axes
|
|
50
|
-
cols:
|
|
54
|
+
cols: List[str]
|
|
51
55
|
The names of the columns containing the accelerometer axes
|
|
52
56
|
|
|
53
57
|
Returns
|
|
@@ -125,8 +129,8 @@ def signal_to_ffts(
|
|
|
125
129
|
|
|
126
130
|
def compute_power_in_bandwidth(
|
|
127
131
|
sensor_col: list,
|
|
128
|
-
fmin:
|
|
129
|
-
fmax:
|
|
132
|
+
fmin: float,
|
|
133
|
+
fmax: float,
|
|
130
134
|
sampling_frequency: int = 100,
|
|
131
135
|
window_type: str = 'hann',
|
|
132
136
|
) -> float:
|
|
@@ -140,9 +144,9 @@ def compute_power_in_bandwidth(
|
|
|
140
144
|
sensor_col: list
|
|
141
145
|
The sensor column to be transformed (e.g. x-axis of accelerometer). This corresponds to a single window, which is a single row of the dataframe,
|
|
142
146
|
and contains values of individual timestamps composing the window.
|
|
143
|
-
fmin:
|
|
147
|
+
fmin: float
|
|
144
148
|
The lower bound of the frequency band
|
|
145
|
-
fmax:
|
|
149
|
+
fmax: float
|
|
146
150
|
The upper bound of the frequency band
|
|
147
151
|
sampling_frequency: int
|
|
148
152
|
The sampling frequency of the signal (default: 100)
|
|
@@ -162,10 +166,10 @@ def compute_power_in_bandwidth(
|
|
|
162
166
|
|
|
163
167
|
def compute_perc_power(
|
|
164
168
|
sensor_col: list,
|
|
165
|
-
fmin_band:
|
|
166
|
-
fmax_band:
|
|
167
|
-
fmin_total:
|
|
168
|
-
fmax_total:
|
|
169
|
+
fmin_band: float,
|
|
170
|
+
fmax_band: float,
|
|
171
|
+
fmin_total: float = 0,
|
|
172
|
+
fmax_total: float = 100,
|
|
169
173
|
sampling_frequency: int = 100,
|
|
170
174
|
window_type: str = 'hann'
|
|
171
175
|
) -> float:
|
|
@@ -177,13 +181,13 @@ def compute_perc_power(
|
|
|
177
181
|
----------
|
|
178
182
|
sensor_col: list
|
|
179
183
|
The sensor column to be transformed (e.g. x-axis of accelerometer). This corresponds to a single window, which is a single row of the dataframe
|
|
180
|
-
fmin_band:
|
|
184
|
+
fmin_band: float
|
|
181
185
|
The lower bound of the frequency band
|
|
182
|
-
fmax_band:
|
|
186
|
+
fmax_band: float
|
|
183
187
|
The upper bound of the frequency band
|
|
184
|
-
fmin_total:
|
|
188
|
+
fmin_total: float
|
|
185
189
|
The lower bound of the frequency spectrum (default: 0)
|
|
186
|
-
fmax_total:
|
|
190
|
+
fmax_total: float
|
|
187
191
|
The upper bound of the frequency spectrum (default: 100)
|
|
188
192
|
sampling_frequency: int
|
|
189
193
|
The sampling frequency of the signal (default: 100)
|
|
@@ -217,8 +221,8 @@ def compute_perc_power(
|
|
|
217
221
|
def get_dominant_frequency(
|
|
218
222
|
signal_ffts: list,
|
|
219
223
|
signal_freqs: list,
|
|
220
|
-
fmin:
|
|
221
|
-
fmax:
|
|
224
|
+
fmin: float,
|
|
225
|
+
fmax: float
|
|
222
226
|
) -> float:
|
|
223
227
|
"""Note: signal_ffts and signal_freqs are single cells (which corresponds to a single window) of signal_ffts and signal_freqs, as it is used with apply function.
|
|
224
228
|
|
|
@@ -602,7 +606,28 @@ def extract_peak_angular_velocity(
|
|
|
602
606
|
return
|
|
603
607
|
|
|
604
608
|
|
|
605
|
-
def extract_temporal_domain_features(config, df_windowed, l_gravity_stats=['mean', 'std']):
|
|
609
|
+
def extract_temporal_domain_features(config: IMUConfig, df_windowed:pd.DataFrame, l_gravity_stats=['mean', 'std']) -> pd.DataFrame:
|
|
610
|
+
"""
|
|
611
|
+
Compute temporal domain features for the accelerometer signal. The features are added to the dataframe. Therefore the original dataframe is modified, and the modified dataframe is returned.
|
|
612
|
+
|
|
613
|
+
Parameters
|
|
614
|
+
----------
|
|
615
|
+
|
|
616
|
+
config: GaitFeatureExtractionConfig
|
|
617
|
+
The configuration object containing the parameters for the feature extraction
|
|
618
|
+
|
|
619
|
+
df_windowed: pd.DataFrame
|
|
620
|
+
The dataframe containing the windowed accelerometer signal
|
|
621
|
+
|
|
622
|
+
l_gravity_stats: list, optional
|
|
623
|
+
The statistics to be computed for the gravity component of the accelerometer signal (default: ['mean', 'std'])
|
|
624
|
+
|
|
625
|
+
Returns
|
|
626
|
+
-------
|
|
627
|
+
pd.DataFrame
|
|
628
|
+
The dataframe with the added temporal domain features.
|
|
629
|
+
"""
|
|
630
|
+
|
|
606
631
|
# compute the mean and standard deviation of the gravity component of the acceleration signal for each axis
|
|
607
632
|
for col in config.l_gravity_cols:
|
|
608
633
|
for stat in l_gravity_stats:
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import numpy as np
|
|
3
3
|
import pandas as pd
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Union
|
|
4
6
|
|
|
5
7
|
import tsdf
|
|
6
8
|
|
|
@@ -16,7 +18,7 @@ from paradigma.windowing import tabulate_windows, create_segments, discard_segme
|
|
|
16
18
|
from paradigma.util import get_end_iso8601, write_data, read_metadata
|
|
17
19
|
|
|
18
20
|
|
|
19
|
-
def extract_gait_features(input_path: str, output_path: str, config: GaitFeatureExtractionConfig) -> None:
|
|
21
|
+
def extract_gait_features(input_path: Union[str, Path], output_path: Union[str, Path], config: GaitFeatureExtractionConfig) -> None:
|
|
20
22
|
# load data
|
|
21
23
|
metadata_time, metadata_samples = read_metadata(input_path, config.meta_filename, config.time_filename, config.values_filename)
|
|
22
24
|
df = tsdf.load_dataframe_from_binaries([metadata_time, metadata_samples], tsdf.constants.ConcatenationType.columns)
|
|
@@ -57,7 +59,7 @@ def extract_gait_features(input_path: str, output_path: str, config: GaitFeature
|
|
|
57
59
|
write_data(metadata_time, metadata_samples, output_path, 'gait_meta.json', df_windowed)
|
|
58
60
|
|
|
59
61
|
|
|
60
|
-
def detect_gait(input_path: str, output_path: str, path_to_classifier_input: str, config: GaitDetectionConfig) -> None:
|
|
62
|
+
def detect_gait(input_path: Union[str, Path], output_path: Union[str, Path], path_to_classifier_input: Union[str, Path], config: GaitDetectionConfig) -> None:
|
|
61
63
|
|
|
62
64
|
# Load the data
|
|
63
65
|
metadata_time, metadata_samples = read_metadata(input_path, config.meta_filename, config.time_filename, config.values_filename)
|
|
@@ -69,12 +71,12 @@ def detect_gait(input_path: str, output_path: str, path_to_classifier_input: str
|
|
|
69
71
|
threshold = float(f.read())
|
|
70
72
|
|
|
71
73
|
# Prepare the data
|
|
72
|
-
clf.feature_names_in_ = [f'{x}_power_below_gait' for x in config.
|
|
73
|
-
[f'{x}_power_gait' for x in config.
|
|
74
|
-
[f'{x}_power_tremor' for x in config.
|
|
75
|
-
[f'{x}_power_above_tremor' for x in config.
|
|
76
|
-
['std_norm_acc'] + [f'cc_{i}_accelerometer' for i in range(1, 13)] + [f'grav_{x}_{y}' for x in config.
|
|
77
|
-
[f'{x}_dominant_frequency' for x in config.
|
|
74
|
+
clf.feature_names_in_ = [f'{x}_power_below_gait' for x in config.l_accelerometer_cols] + \
|
|
75
|
+
[f'{x}_power_gait' for x in config.l_accelerometer_cols] + \
|
|
76
|
+
[f'{x}_power_tremor' for x in config.l_accelerometer_cols] + \
|
|
77
|
+
[f'{x}_power_above_tremor' for x in config.l_accelerometer_cols] + \
|
|
78
|
+
['std_norm_acc'] + [f'cc_{i}_accelerometer' for i in range(1, 13)] + [f'grav_{x}_{y}' for x in config.l_accelerometer_cols for y in ['mean', 'std']] + \
|
|
79
|
+
[f'{x}_dominant_frequency' for x in config.l_accelerometer_cols]
|
|
78
80
|
X = df.loc[:, clf.feature_names_in_]
|
|
79
81
|
|
|
80
82
|
# Make prediction
|
|
@@ -98,7 +100,7 @@ def detect_gait(input_path: str, output_path: str, path_to_classifier_input: str
|
|
|
98
100
|
write_data(metadata_time, metadata_samples, output_path, 'gait_meta.json', df)
|
|
99
101
|
|
|
100
102
|
|
|
101
|
-
def extract_arm_swing_features(input_path: str, output_path: str, config: ArmSwingFeatureExtractionConfig) -> None:
|
|
103
|
+
def extract_arm_swing_features(input_path: Union[str, Path], output_path: Union[str, Path], config: ArmSwingFeatureExtractionConfig) -> None:
|
|
102
104
|
# load accelerometer and gyroscope data
|
|
103
105
|
l_dfs = []
|
|
104
106
|
for sensor in ['accelerometer', 'gyroscope']:
|
|
@@ -120,7 +122,7 @@ def extract_arm_swing_features(input_path: str, output_path: str, config: ArmSwi
|
|
|
120
122
|
# perform principal component analysis on the gyroscope signals to obtain the angular velocity in the
|
|
121
123
|
# direction of the swing of the arm
|
|
122
124
|
df[config.velocity_colname] = pca_transform_gyroscope(
|
|
123
|
-
df=df,
|
|
125
|
+
df=df,
|
|
124
126
|
y_gyro_colname=DataColumns.GYROSCOPE_Y,
|
|
125
127
|
z_gyro_colname=DataColumns.GYROSCOPE_Z,
|
|
126
128
|
pred_gait_colname=config.pred_gait_colname
|
|
@@ -281,7 +283,7 @@ def extract_arm_swing_features(input_path: str, output_path: str, config: ArmSwi
|
|
|
281
283
|
write_data(metadata_time, metadata_samples, output_path, 'arm_swing_meta.json', df_windowed)
|
|
282
284
|
|
|
283
285
|
|
|
284
|
-
def detect_arm_swing(input_path: str, output_path: str, path_to_classifier_input: str, config: ArmSwingDetectionConfig) -> None:
|
|
286
|
+
def detect_arm_swing(input_path: Union[str, Path], output_path: Union[str, Path], path_to_classifier_input: Union[str, Path], config: ArmSwingDetectionConfig) -> None:
|
|
285
287
|
# Load the data
|
|
286
288
|
metadata_time, metadata_samples = read_metadata(input_path, config.meta_filename, config.time_filename, config.values_filename)
|
|
287
289
|
df = tsdf.load_dataframe_from_binaries([metadata_time, metadata_samples], tsdf.constants.ConcatenationType.columns)
|
|
@@ -290,15 +292,15 @@ def detect_arm_swing(input_path: str, output_path: str, path_to_classifier_input
|
|
|
290
292
|
clf = pd.read_pickle(os.path.join(path_to_classifier_input, config.classifier_file_name))
|
|
291
293
|
|
|
292
294
|
# Prepare the data
|
|
293
|
-
clf.feature_names_in_ = ['std_norm_acc'] + [f'{x}_power_below_gait' for x in config.
|
|
294
|
-
[f'{x}_power_gait' for x in config.
|
|
295
|
-
[f'{x}_power_tremor' for x in config.
|
|
296
|
-
[f'{x}_power_above_tremor' for x in config.
|
|
295
|
+
clf.feature_names_in_ = ['std_norm_acc'] + [f'{x}_power_below_gait' for x in config.l_accelerometer_cols] + \
|
|
296
|
+
[f'{x}_power_gait' for x in config.l_accelerometer_cols] + \
|
|
297
|
+
[f'{x}_power_tremor' for x in config.l_accelerometer_cols] + \
|
|
298
|
+
[f'{x}_power_above_tremor' for x in config.l_accelerometer_cols] + \
|
|
297
299
|
[f'cc_{i}_accelerometer' for i in range(1, 13)] + [f'cc_{i}_gyroscope' for i in range(1, 13)] + \
|
|
298
|
-
[f'grav_{x}_mean' for x in config.
|
|
300
|
+
[f'grav_{x}_mean' for x in config.l_accelerometer_cols] + [f'grav_{x}_std' for x in config.l_accelerometer_cols] + \
|
|
299
301
|
['range_of_motion', 'forward_peak_ang_vel_mean', 'backward_peak_ang_vel_mean', 'forward_peak_ang_vel_std',
|
|
300
302
|
'backward_peak_ang_vel_std', 'angle_perc_power', 'angle_dominant_frequency'] + \
|
|
301
|
-
[f'{x}_dominant_frequency' for x in config.
|
|
303
|
+
[f'{x}_dominant_frequency' for x in config.l_accelerometer_cols]
|
|
302
304
|
|
|
303
305
|
X = df.loc[:, clf.feature_names_in_]
|
|
304
306
|
|
|
@@ -323,7 +325,7 @@ def detect_arm_swing(input_path: str, output_path: str, path_to_classifier_input
|
|
|
323
325
|
write_data(metadata_time, metadata_samples, output_path, 'arm_swing_meta.json', df)
|
|
324
326
|
|
|
325
327
|
|
|
326
|
-
def quantify_arm_swing(path_to_feature_input: str, path_to_prediction_input: str, output_path: str, config: ArmSwingQuantificationConfig) -> None:
|
|
328
|
+
def quantify_arm_swing(path_to_feature_input: Union[str, Path], path_to_prediction_input: Union[str, Path], output_path: Union[str, Path], config: ArmSwingQuantificationConfig) -> None:
|
|
327
329
|
# Load the features & predictions
|
|
328
330
|
metadata_time, metadata_samples = read_metadata(path_to_feature_input, config.meta_filename, config.time_filename, config.values_filename)
|
|
329
331
|
df_features = tsdf.load_dataframe_from_binaries([metadata_time, metadata_samples], tsdf.constants.ConcatenationType.columns)
|