paradigma 0.3.1__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. paradigma/assets/gait_detection_clf_package.pkl +0 -0
  2. paradigma/assets/gait_filtering_clf_package.pkl +0 -0
  3. paradigma/assets/ppg_quality_clf_package.pkl +0 -0
  4. paradigma/assets/tremor_detection_clf_package.pkl +0 -0
  5. paradigma/classification.py +115 -0
  6. paradigma/config.py +314 -0
  7. paradigma/constants.py +48 -7
  8. paradigma/feature_extraction.py +811 -547
  9. paradigma/pipelines/__init__.py +0 -0
  10. paradigma/pipelines/gait_pipeline.py +727 -0
  11. paradigma/pipelines/heart_rate_pipeline.py +426 -0
  12. paradigma/pipelines/heart_rate_utils.py +780 -0
  13. paradigma/pipelines/tremor_pipeline.py +299 -0
  14. paradigma/preprocessing.py +363 -0
  15. paradigma/segmenting.py +396 -0
  16. paradigma/testing.py +416 -0
  17. paradigma/util.py +393 -16
  18. {paradigma-0.3.1.dist-info → paradigma-0.4.0.dist-info}/METADATA +58 -14
  19. paradigma-0.4.0.dist-info/RECORD +22 -0
  20. {paradigma-0.3.1.dist-info → paradigma-0.4.0.dist-info}/WHEEL +1 -1
  21. paradigma/gait_analysis.py +0 -415
  22. paradigma/gait_analysis_config.py +0 -266
  23. paradigma/heart_rate_analysis.py +0 -127
  24. paradigma/heart_rate_analysis_config.py +0 -9
  25. paradigma/heart_rate_util.py +0 -173
  26. paradigma/imu_preprocessing.py +0 -232
  27. paradigma/ppg/classifier/LR_PPG_quality.pkl +0 -0
  28. paradigma/ppg/classifier/LR_model.mat +0 -0
  29. paradigma/ppg/feat_extraction/acc_feature.m +0 -20
  30. paradigma/ppg/feat_extraction/peakdet.m +0 -64
  31. paradigma/ppg/feat_extraction/ppg_features.m +0 -53
  32. paradigma/ppg/glob_functions/extract_hr_segments.m +0 -37
  33. paradigma/ppg/glob_functions/extract_overlapping_segments.m +0 -23
  34. paradigma/ppg/glob_functions/jsonlab/AUTHORS.txt +0 -41
  35. paradigma/ppg/glob_functions/jsonlab/ChangeLog.txt +0 -74
  36. paradigma/ppg/glob_functions/jsonlab/LICENSE_BSD.txt +0 -25
  37. paradigma/ppg/glob_functions/jsonlab/LICENSE_GPLv3.txt +0 -699
  38. paradigma/ppg/glob_functions/jsonlab/README.txt +0 -394
  39. paradigma/ppg/glob_functions/jsonlab/examples/.svn/entries +0 -368
  40. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/demo_jsonlab_basic.m.svn-base +0 -180
  41. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/demo_ubjson_basic.m.svn-base +0 -180
  42. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/example1.json.svn-base +0 -23
  43. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/example2.json.svn-base +0 -22
  44. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/example3.json.svn-base +0 -11
  45. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/example4.json.svn-base +0 -34
  46. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/jsonlab_basictest.matlab.svn-base +0 -662
  47. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/jsonlab_selftest.m.svn-base +0 -27
  48. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/jsonlab_selftest.matlab.svn-base +0 -144
  49. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/jsonlab_speedtest.m.svn-base +0 -21
  50. paradigma/ppg/glob_functions/jsonlab/examples/demo_jsonlab_basic.m +0 -180
  51. paradigma/ppg/glob_functions/jsonlab/examples/demo_ubjson_basic.m +0 -180
  52. paradigma/ppg/glob_functions/jsonlab/examples/example1.json +0 -23
  53. paradigma/ppg/glob_functions/jsonlab/examples/example2.json +0 -22
  54. paradigma/ppg/glob_functions/jsonlab/examples/example3.json +0 -11
  55. paradigma/ppg/glob_functions/jsonlab/examples/example4.json +0 -34
  56. paradigma/ppg/glob_functions/jsonlab/examples/jsonlab_basictest.matlab +0 -662
  57. paradigma/ppg/glob_functions/jsonlab/examples/jsonlab_selftest.m +0 -27
  58. paradigma/ppg/glob_functions/jsonlab/examples/jsonlab_selftest.matlab +0 -144
  59. paradigma/ppg/glob_functions/jsonlab/examples/jsonlab_speedtest.m +0 -21
  60. paradigma/ppg/glob_functions/jsonlab/jsonopt.m +0 -32
  61. paradigma/ppg/glob_functions/jsonlab/loadjson.m +0 -566
  62. paradigma/ppg/glob_functions/jsonlab/loadubjson.m +0 -528
  63. paradigma/ppg/glob_functions/jsonlab/mergestruct.m +0 -33
  64. paradigma/ppg/glob_functions/jsonlab/savejson.m +0 -475
  65. paradigma/ppg/glob_functions/jsonlab/saveubjson.m +0 -504
  66. paradigma/ppg/glob_functions/jsonlab/varargin2struct.m +0 -40
  67. paradigma/ppg/glob_functions/sample_prob_final.m +0 -49
  68. paradigma/ppg/glob_functions/synchronization.m +0 -76
  69. paradigma/ppg/glob_functions/tsdf_scan_meta.m +0 -22
  70. paradigma/ppg/hr_functions/Long_TFD_JOT.m +0 -37
  71. paradigma/ppg/hr_functions/PPG_TFD_HR.m +0 -59
  72. paradigma/ppg/hr_functions/TFD toolbox JOT/.gitignore +0 -4
  73. paradigma/ppg/hr_functions/TFD toolbox JOT/CHANGELOG.md +0 -23
  74. paradigma/ppg/hr_functions/TFD toolbox JOT/LICENCE.md +0 -27
  75. paradigma/ppg/hr_functions/TFD toolbox JOT/README.md +0 -251
  76. paradigma/ppg/hr_functions/TFD toolbox JOT/README.pdf +0 -0
  77. paradigma/ppg/hr_functions/TFD toolbox JOT/common/gen_Doppler_kern.m +0 -142
  78. paradigma/ppg/hr_functions/TFD toolbox JOT/common/gen_Doppler_lag_kern.m +0 -314
  79. paradigma/ppg/hr_functions/TFD toolbox JOT/common/gen_lag_kern.m +0 -123
  80. paradigma/ppg/hr_functions/TFD toolbox JOT/dec_tfd.m +0 -154
  81. paradigma/ppg/hr_functions/TFD toolbox JOT/decimated_TFDs/dec_di_gdtfd.m +0 -194
  82. paradigma/ppg/hr_functions/TFD toolbox JOT/decimated_TFDs/dec_li_gdtfd.m +0 -200
  83. paradigma/ppg/hr_functions/TFD toolbox JOT/decimated_TFDs/dec_nonsep_gdtfd.m +0 -229
  84. paradigma/ppg/hr_functions/TFD toolbox JOT/decimated_TFDs/dec_sep_gdtfd.m +0 -241
  85. paradigma/ppg/hr_functions/TFD toolbox JOT/full_TFDs/di_gdtfd.m +0 -157
  86. paradigma/ppg/hr_functions/TFD toolbox JOT/full_TFDs/li_gdtfd.m +0 -190
  87. paradigma/ppg/hr_functions/TFD toolbox JOT/full_TFDs/nonsep_gdtfd.m +0 -196
  88. paradigma/ppg/hr_functions/TFD toolbox JOT/full_TFDs/sep_gdtfd.m +0 -199
  89. paradigma/ppg/hr_functions/TFD toolbox JOT/full_tfd.m +0 -144
  90. paradigma/ppg/hr_functions/TFD toolbox JOT/load_curdir.m +0 -13
  91. paradigma/ppg/hr_functions/TFD toolbox JOT/pics/decimated_TFDs_examples.png +0 -0
  92. paradigma/ppg/hr_functions/TFD toolbox JOT/pics/full_TFDs_examples.png +0 -0
  93. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/check_dec_params_seq.m +0 -79
  94. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/dispEE.m +0 -9
  95. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/dispVars.m +0 -26
  96. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/disp_bytes.m +0 -25
  97. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/fold_vector_full.m +0 -40
  98. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/fold_vector_half.m +0 -34
  99. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/gen_LFM.m +0 -29
  100. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/get_analytic_signal.m +0 -76
  101. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/get_window.m +0 -176
  102. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/isreal_fn.m +0 -11
  103. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/padWin.m +0 -97
  104. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/vtfd.m +0 -149
  105. paradigma/ppg/preprocessing/preprocessing_imu.m +0 -15
  106. paradigma/ppg/preprocessing/preprocessing_ppg.m +0 -13
  107. paradigma/ppg_preprocessing.py +0 -313
  108. paradigma/preprocessing_config.py +0 -69
  109. paradigma/quantification.py +0 -58
  110. paradigma/tremor/TremorFeaturesAndClassification.m +0 -345
  111. paradigma/tremor/feat_extraction/DerivativesExtract.m +0 -22
  112. paradigma/tremor/feat_extraction/ExtractBandSignalsRMS.m +0 -72
  113. paradigma/tremor/feat_extraction/MFCCExtract.m +0 -100
  114. paradigma/tremor/feat_extraction/PSDBandPower.m +0 -52
  115. paradigma/tremor/feat_extraction/PSDEst.m +0 -63
  116. paradigma/tremor/feat_extraction/PSDExtrAxis.m +0 -88
  117. paradigma/tremor/feat_extraction/PSDExtrOpt.m +0 -95
  118. paradigma/tremor/preprocessing/InterpData.m +0 -32
  119. paradigma/tremor/weekly_aggregates/WeeklyAggregates.m +0 -295
  120. paradigma/windowing.py +0 -219
  121. paradigma-0.3.1.dist-info/RECORD +0 -108
  122. {paradigma-0.3.1.dist-info → paradigma-0.4.0.dist-info}/LICENSE +0 -0
paradigma/testing.py ADDED
@@ -0,0 +1,416 @@
1
+ import json
2
+ import numpy as np
3
+ import os
4
+ import pandas as pd
5
+ from pathlib import Path
6
+ import tsdf
7
+ from typing import List
8
+
9
+ from paradigma.classification import ClassifierPackage
10
+ from paradigma.config import IMUConfig, PPGConfig, GaitConfig, TremorConfig, HeartRateConfig
11
+ from paradigma.constants import DataColumns, TimeUnit
12
+ from paradigma.pipelines.gait_pipeline import extract_gait_features, detect_gait, \
13
+ extract_arm_activity_features, filter_gait
14
+ from paradigma.pipelines.tremor_pipeline import extract_tremor_features, detect_tremor, \
15
+ aggregate_tremor
16
+ from paradigma.pipelines.heart_rate_pipeline import extract_signal_quality_features, signal_quality_classification, \
17
+ aggregate_heart_rate
18
+ from paradigma.preprocessing import preprocess_imu_data, preprocess_ppg_data
19
+ from paradigma.util import read_metadata, write_df_data, get_end_iso8601
20
+
21
+
22
+ def preprocess_imu_data_io(path_to_input: str | Path, path_to_output: str | Path,
23
+ config: IMUConfig, sensor: str, watch_side: str) -> None:
24
+ # Load data
25
+ metadata_time, metadata_values = read_metadata(str(path_to_input), str(config.meta_filename),
26
+ str(config.time_filename), str(config.values_filename))
27
+ df = tsdf.load_dataframe_from_binaries([metadata_time, metadata_values], tsdf.constants.ConcatenationType.columns)
28
+
29
+ # Preprocess data
30
+ df = preprocess_imu_data(df=df, config=config, sensor=sensor, watch_side=watch_side)
31
+
32
+ # Store data
33
+ for sensor, units in zip(['accelerometer', 'gyroscope'], ['g', config.rotation_units]):
34
+ if any(sensor in col for col in df.columns):
35
+ df_sensor = df[[DataColumns.TIME] + [x for x in df.columns if sensor in x]]
36
+
37
+ metadata_values.channels = [x for x in df.columns if sensor in x]
38
+ metadata_values.units = list(np.repeat(units, len(metadata_values.channels)))
39
+ metadata_values.scale_factors = []
40
+ metadata_values.file_name = f'{sensor}_values.bin'
41
+
42
+ metadata_time.file_name = f'{sensor}_time.bin'
43
+ metadata_time.units = [TimeUnit.RELATIVE_S]
44
+
45
+ write_df_data(metadata_time, metadata_values, path_to_output, f'{sensor}_meta.json', df_sensor)
46
+
47
+
48
+ def preprocess_ppg_data_io(path_to_input_ppg: str | Path, path_to_input_imu: str | Path,
49
+ output_path: str | Path, ppg_config: PPGConfig,
50
+ imu_config: IMUConfig) -> None:
51
+ """
52
+ Preprocess PPG and IMU data by resampling, filtering, and aligning the data segments.
53
+
54
+ Parameters
55
+ ----------
56
+ path_to_input_ppg : str | Path
57
+ Path to the PPG data.
58
+ path_to_input_imu : str | Path
59
+ Path to the IMU data.
60
+ output_path : str | Path
61
+ Path to store the preprocessed data.
62
+ ppg_config : PPGConfig
63
+ Configuration object for PPG preprocessing.
64
+ imu_config : IMUConfig
65
+ Configuration object for IMU preprocessing.
66
+
67
+ Returns
68
+ -------
69
+ None
70
+ """
71
+
72
+ # Load PPG data
73
+ # Load data
74
+ metadata_time_ppg, metadata_values_ppg = read_metadata(path_to_input_ppg, ppg_config.meta_filename,
75
+ ppg_config.time_filename, ppg_config.values_filename)
76
+ df_ppg = tsdf.load_dataframe_from_binaries([metadata_time_ppg, metadata_values_ppg], tsdf.constants.ConcatenationType.columns)
77
+
78
+ # Load IMU data
79
+ metadata_time_imu, metadata_values_imu = read_metadata(path_to_input_imu, imu_config.meta_filename,
80
+ imu_config.time_filename, imu_config.values_filename)
81
+ df_imu = tsdf.load_dataframe_from_binaries([metadata_time_imu, metadata_values_imu], tsdf.constants.ConcatenationType.columns)
82
+
83
+ # Drop the gyroscope columns from the IMU data
84
+ cols_to_drop = df_imu.filter(regex='^gyroscope_').columns
85
+ df_acc = df_imu.drop(cols_to_drop, axis=1)
86
+
87
+ # Preprocess data
88
+ df_ppg_proc, df_acc_proc = preprocess_ppg_data(
89
+ df_ppg=df_ppg,
90
+ df_acc=df_acc,
91
+ ppg_config=ppg_config,
92
+ imu_config=imu_config,
93
+ start_time_ppg=metadata_time_ppg.start_iso8601,
94
+ start_time_imu=metadata_time_imu.start_iso8601
95
+ )
96
+
97
+ # Store data
98
+ metadata_values_imu.channels = list(imu_config.d_channels_accelerometer.keys())
99
+ metadata_values_imu.units = list(imu_config.d_channels_accelerometer.values())
100
+ metadata_values_imu.file_name = 'accelerometer_values.bin'
101
+ metadata_time_imu.units = [TimeUnit.ABSOLUTE_MS]
102
+ metadata_time_imu.file_name = 'accelerometer_time.bin'
103
+ write_df_data(metadata_time_imu, metadata_values_imu, output_path, 'accelerometer_meta.json', df_acc_proc)
104
+
105
+ metadata_values_ppg.channels = list(ppg_config.d_channels_ppg.keys())
106
+ metadata_values_ppg.units = list(ppg_config.d_channels_ppg.values())
107
+ metadata_values_ppg.file_name = 'PPG_values.bin'
108
+ metadata_time_ppg.units = [TimeUnit.ABSOLUTE_MS]
109
+ metadata_time_ppg.file_name = 'PPG_time.bin'
110
+ write_df_data(metadata_time_ppg, metadata_values_ppg, output_path, 'PPG_meta.json', df_ppg_proc)
111
+
112
+
113
+ def extract_gait_features_io(
114
+ config: GaitConfig,
115
+ path_to_input: str | Path,
116
+ path_to_output: str | Path
117
+ ) -> None:
118
+ # Load data
119
+ metadata_time, metadata_values = read_metadata(path_to_input, config.meta_filename, config.time_filename, config.values_filename)
120
+ df = tsdf.load_dataframe_from_binaries([metadata_time, metadata_values], tsdf.constants.ConcatenationType.columns)
121
+
122
+ # Extract gait features
123
+ df_features = extract_gait_features(df=df, config=config)
124
+
125
+ # Store data
126
+ end_iso8601 = get_end_iso8601(start_iso8601=metadata_time.start_iso8601,
127
+ window_length_seconds=int(df_features[DataColumns.TIME][-1:].values[0] + config.window_length_s))
128
+
129
+ metadata_values.file_name = 'gait_values.bin'
130
+ metadata_time.file_name = 'gait_time.bin'
131
+ metadata_values.end_iso8601 = end_iso8601
132
+ metadata_time.end_iso8601 = end_iso8601
133
+
134
+ metadata_values.channels = list(config.d_channels_values.keys())
135
+ metadata_values.units = list(config.d_channels_values.values())
136
+
137
+ metadata_time.channels = [DataColumns.TIME]
138
+ metadata_time.units = [TimeUnit.RELATIVE_S]
139
+
140
+ write_df_data(metadata_time, metadata_values, path_to_output, 'gait_meta.json', df_features)
141
+
142
+
143
+ def detect_gait_io(
144
+ config: GaitConfig,
145
+ path_to_input: str | Path,
146
+ path_to_output: str | Path,
147
+ full_path_to_classifier_package: str | Path,
148
+ ) -> None:
149
+
150
+ # Load the data
151
+ config.set_filenames('gait')
152
+
153
+ metadata_time, metadata_values = read_metadata(path_to_input, config.meta_filename, config.time_filename, config.values_filename)
154
+ df = tsdf.load_dataframe_from_binaries([metadata_time, metadata_values], tsdf.constants.ConcatenationType.columns)
155
+
156
+ clf_package = ClassifierPackage.load(full_path_to_classifier_package)
157
+
158
+ df[DataColumns.PRED_GAIT_PROBA] = detect_gait(
159
+ df=df,
160
+ clf_package=clf_package
161
+ )
162
+
163
+ # Prepare the metadata
164
+ metadata_values.file_name = 'gait_values.bin'
165
+ metadata_time.file_name = 'gait_time.bin'
166
+
167
+ metadata_values.channels = [DataColumns.PRED_GAIT_PROBA]
168
+ metadata_values.units = ['probability']
169
+
170
+ metadata_time.channels = [DataColumns.TIME]
171
+ metadata_time.units = [TimeUnit.RELATIVE_S]
172
+
173
+ write_df_data(metadata_time, metadata_values, path_to_output, 'gait_meta.json', df)
174
+
175
+
176
+ def extract_arm_activity_features_io(
177
+ config: GaitConfig,
178
+ path_to_timestamp_input: str | Path,
179
+ path_to_prediction_input: str | Path,
180
+ full_path_to_classifier_package: str | Path,
181
+ path_to_output: str | Path
182
+ ) -> None:
183
+ # Load accelerometer and gyroscope data
184
+ dfs = []
185
+ for sensor in ['accelerometer', 'gyroscope']:
186
+ config.set_sensor(sensor)
187
+ meta_ts_filename = f'{sensor}_meta.json'
188
+ values_ts_filename = f'{sensor}_values.bin'
189
+ time_ts_filename = f'{sensor}_time.bin'
190
+
191
+ metadata_ts_dict = tsdf.load_metadata_from_path(os.path.join(path_to_timestamp_input, meta_ts_filename))
192
+ metadata_ts_time = metadata_ts_dict[time_ts_filename]
193
+ metadata_ts_values = metadata_ts_dict[values_ts_filename]
194
+ dfs.append(tsdf.load_dataframe_from_binaries([metadata_ts_time, metadata_ts_values], tsdf.constants.ConcatenationType.columns))
195
+
196
+ df_ts = pd.merge(dfs[0], dfs[1], on=DataColumns.TIME)
197
+
198
+ # Load gait predictions
199
+ meta_pred_filename = 'gait_meta.json'
200
+ values_pred_filename = 'gait_values.bin'
201
+ time_pred_filename = 'gait_time.bin'
202
+
203
+ metadata_pred_dict = tsdf.load_metadata_from_path(os.path.join(path_to_prediction_input, meta_pred_filename))
204
+ metadata_pred_time = metadata_pred_dict[time_pred_filename]
205
+ metadata_pred_values = metadata_pred_dict[values_pred_filename]
206
+
207
+ df_pred_gait = tsdf.load_dataframe_from_binaries([metadata_pred_time, metadata_pred_values], tsdf.constants.ConcatenationType.columns)
208
+
209
+ clf_package = ClassifierPackage.load(full_path_to_classifier_package)
210
+
211
+ # Extract arm activity features
212
+ config = GaitConfig(step='arm_activity')
213
+ df_features = extract_arm_activity_features(
214
+ config=config,
215
+ df_timestamps=df_ts,
216
+ df_predictions=df_pred_gait,
217
+ threshold=clf_package.threshold
218
+ )
219
+
220
+ end_iso8601 = get_end_iso8601(metadata_ts_values.start_iso8601, df_features[DataColumns.TIME][-1:].values[0] + config.window_length_s)
221
+
222
+ metadata_ts_values.end_iso8601 = end_iso8601
223
+ metadata_ts_values.file_name = 'arm_activity_values.bin'
224
+ metadata_ts_time.end_iso8601 = end_iso8601
225
+ metadata_ts_time.file_name = 'arm_activity_time.bin'
226
+
227
+ metadata_ts_values.channels = list(config.d_channels_values.keys())
228
+ metadata_ts_values.units = list(config.d_channels_values.values())
229
+
230
+ metadata_ts_time.channels = [DataColumns.TIME]
231
+ metadata_ts_time.units = [TimeUnit.RELATIVE_S]
232
+
233
+ write_df_data(metadata_ts_time, metadata_ts_values, path_to_output, 'arm_activity_meta.json', df_features)
234
+
235
+
236
+ def filter_gait_io(
237
+ config: GaitConfig,
238
+ path_to_input: str | Path,
239
+ path_to_output: str | Path,
240
+ full_path_to_classifier_package: str | Path,
241
+ ) -> None:
242
+ # Load the data
243
+ config.set_filenames('arm_activity')
244
+
245
+ metadata_time, metadata_values = read_metadata(path_to_input, config.meta_filename, config.time_filename, config.values_filename)
246
+ df = tsdf.load_dataframe_from_binaries([metadata_time, metadata_values], tsdf.constants.ConcatenationType.columns)
247
+
248
+ clf_package = ClassifierPackage.load(filepath=full_path_to_classifier_package)
249
+
250
+ df[DataColumns.PRED_NO_OTHER_ARM_ACTIVITY_PROBA] = filter_gait(
251
+ df=df,
252
+ clf_package=clf_package
253
+ )
254
+
255
+ # Prepare the metadata
256
+ metadata_values.file_name = 'arm_activity_values.bin'
257
+ metadata_time.file_name = 'arm_activity_time.bin'
258
+
259
+ metadata_values.channels = [DataColumns.PRED_NO_OTHER_ARM_ACTIVITY_PROBA]
260
+ metadata_values.units = ['probability']
261
+
262
+ metadata_time.channels = [DataColumns.TIME]
263
+ metadata_time.units = [TimeUnit.RELATIVE_S]
264
+
265
+ write_df_data(metadata_time, metadata_values, path_to_output, 'arm_activity_meta.json', df)
266
+
267
+
268
+ def extract_tremor_features_io(input_path: str | Path, output_path: str | Path, config: TremorConfig) -> None:
269
+ # Load data
270
+ metadata_time, metadata_values = read_metadata(input_path, config.meta_filename, config.time_filename, config.values_filename)
271
+ df = tsdf.load_dataframe_from_binaries([metadata_time, metadata_values], tsdf.constants.ConcatenationType.columns)
272
+
273
+ # Extract tremor features
274
+ df_windowed = extract_tremor_features(df, config)
275
+
276
+ # Store data
277
+ end_iso8601 = get_end_iso8601(start_iso8601=metadata_time.start_iso8601,
278
+ window_length_seconds=int(df_windowed[DataColumns.TIME][-1:].values[0] + config.window_length_s))
279
+
280
+ metadata_values.end_iso8601 = end_iso8601
281
+ metadata_values.file_name = 'tremor_values.bin'
282
+ metadata_time.end_iso8601 = end_iso8601
283
+ metadata_time.file_name = 'tremor_time.bin'
284
+
285
+ metadata_values.channels = list(config.d_channels_values.keys())
286
+ metadata_values.units = list(config.d_channels_values.values())
287
+
288
+ metadata_time.channels = [DataColumns.TIME]
289
+ metadata_time.units = ['relative_time_ms']
290
+
291
+ write_df_data(metadata_time, metadata_values, output_path, 'tremor_meta.json', df_windowed)
292
+
293
+
294
+ def detect_tremor_io(input_path: str | Path, output_path: str | Path, path_to_classifier_input: str | Path, config: TremorConfig) -> None:
295
+
296
+ # Load the data
297
+ config.set_filenames('tremor')
298
+
299
+ metadata_time, metadata_values = read_metadata(input_path, config.meta_filename, config.time_filename, config.values_filename)
300
+ df = tsdf.load_dataframe_from_binaries([metadata_time, metadata_values], tsdf.constants.ConcatenationType.columns)
301
+
302
+ df = detect_tremor(df, config, path_to_classifier_input)
303
+
304
+ # Prepare the metadata
305
+ metadata_values.file_name = 'tremor_values.bin'
306
+ metadata_time.file_name = 'tremor_time.bin'
307
+
308
+ metadata_values.channels = list(config.d_channels_values.keys())
309
+ metadata_values.units = list(config.d_channels_values.values())
310
+
311
+ metadata_time.channels = [DataColumns.TIME]
312
+ metadata_time.units = ['relative_time_ms']
313
+
314
+ write_df_data(metadata_time, metadata_values, output_path, 'tremor_meta.json', df)
315
+
316
+
317
+ def aggregate_tremor_io(path_to_feature_input: str | Path, path_to_prediction_input: str | Path, output_path: str | Path, config: TremorConfig) -> None:
318
+
319
+ # Load the features & predictions
320
+ metadata_time, metadata_values = read_metadata(path_to_feature_input, config.meta_filename, config.time_filename, config.values_filename)
321
+ df_features = tsdf.load_dataframe_from_binaries([metadata_time, metadata_values], tsdf.constants.ConcatenationType.columns)
322
+
323
+ metadata_dict = tsdf.load_metadata_from_path(path_to_prediction_input / config.meta_filename)
324
+ metadata_time = metadata_dict[config.time_filename]
325
+ metadata_values = metadata_dict[config.values_filename]
326
+ df_predictions = tsdf.load_dataframe_from_binaries([metadata_time, metadata_values], tsdf.constants.ConcatenationType.columns)
327
+
328
+ # Subset features
329
+ df_features = df_features[['tremor_power', 'below_tremor_power']]
330
+
331
+ # Concatenate predictions and tremor power
332
+ df = pd.concat([df_predictions, df_features], axis=1)
333
+
334
+ # Compute aggregated tremor measures
335
+ d_aggregates = aggregate_tremor(df, config)
336
+
337
+ # Save output
338
+ with open(output_path / "tremor_aggregates.json", 'w') as json_file:
339
+ json.dump(d_aggregates, json_file, indent=4)
340
+
341
+
342
+ def extract_signal_quality_features_io(input_path: str | Path, output_path: str | Path, ppg_config: HeartRateConfig, acc_config: HeartRateConfig) -> pd.DataFrame:
343
+ """
344
+ Extract signal quality features from the PPG signal and save them to a file.
345
+
346
+ Parameters
347
+ ----------
348
+ input_path : str | Path
349
+ The path to the directory containing the preprocessed PPG and accelerometer data.
350
+ output_path : str | Path
351
+ The path to the directory where the extracted features will be saved.
352
+ ppg_config: HeartRateConfig
353
+ The configuration for the signal quality feature extraction of the ppg signal.
354
+ acc_config: HeartRateConfig
355
+ The configuration for the signal quality feature extraction of the accelerometer signal.
356
+
357
+ Returns
358
+ -------
359
+ df_windowed : pd.DataFrame
360
+ The DataFrame containing the extracted signal quality features.
361
+
362
+ """
363
+ # Load PPG data
364
+ metadata_time, metadata_values = read_metadata(input_path, ppg_config.meta_filename, ppg_config.time_filename, ppg_config.values_filename)
365
+ df_ppg = tsdf.load_dataframe_from_binaries([metadata_time, metadata_values], tsdf.constants.ConcatenationType.columns)
366
+
367
+ # Load IMU data
368
+ metadata_time, metadata_values = read_metadata(input_path, acc_config.meta_filename, acc_config.time_filename, acc_config.values_filename)
369
+ df_acc = tsdf.load_dataframe_from_binaries([metadata_time, metadata_values], tsdf.constants.ConcatenationType.columns)
370
+
371
+ # Extract signal quality features
372
+ df_windowed = extract_signal_quality_features(df_ppg, df_acc, ppg_config, acc_config)
373
+
374
+ # Save the extracted features
375
+ #TO BE ADDED
376
+ return df_windowed
377
+
378
+
379
+ def signal_quality_classification_io(input_path: str | Path, output_path: str | Path, path_to_classifier_input: str | Path, config: HeartRateConfig) -> None:
380
+
381
+ # Load the data
382
+ metadata_time, metadata_values = read_metadata(input_path, config.meta_filename, config.time_filename, config.values_filename)
383
+ df_windowed = tsdf.load_dataframe_from_binaries([metadata_time, metadata_values], tsdf.constants.ConcatenationType.columns)
384
+
385
+ df_sqa = signal_quality_classification(df_windowed, config, path_to_classifier_input)
386
+
387
+
388
+ def aggregate_heart_rate_io(
389
+ full_path_to_input: str | Path,
390
+ full_path_to_output: str | Path,
391
+ aggregates: List[str] = ['mode', '99p']
392
+ ) -> None:
393
+ """
394
+ Extract heart rate from the PPG signal and save the aggregated heart rate estimates to a file.
395
+
396
+ Parameters
397
+ ----------
398
+ input_path : str | Path
399
+ The path to the directory containing the heart rate estimates.
400
+ output_path : str | Path
401
+ The path to the directory where the aggregated heart rate estimates will be saved.
402
+ aggregates : List[str]
403
+ The list of aggregation methods to be used for the heart rate estimates. The default is ['mode', '99p'].
404
+ """
405
+
406
+ # Load the heart rate estimates
407
+ with open(full_path_to_input, 'r') as f:
408
+ df_hr = json.load(f)
409
+
410
+ # Aggregate the heart rate estimates
411
+ hr_values = df_hr['heart_rate'].values
412
+ df_hr_aggregates = aggregate_heart_rate(hr_values, aggregates)
413
+
414
+ # Save the aggregated heart rate estimates
415
+ with open(full_path_to_output, 'w') as json_file:
416
+ json.dump(df_hr_aggregates, json_file, indent=4)