paradigma 0.3.2__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. paradigma/assets/gait_detection_clf_package.pkl +0 -0
  2. paradigma/assets/gait_filtering_clf_package.pkl +0 -0
  3. paradigma/assets/ppg_quality_clf_package.pkl +0 -0
  4. paradigma/assets/tremor_detection_clf_package.pkl +0 -0
  5. paradigma/classification.py +115 -0
  6. paradigma/config.py +314 -0
  7. paradigma/constants.py +48 -7
  8. paradigma/feature_extraction.py +811 -547
  9. paradigma/pipelines/__init__.py +0 -0
  10. paradigma/pipelines/gait_pipeline.py +727 -0
  11. paradigma/pipelines/heart_rate_pipeline.py +426 -0
  12. paradigma/pipelines/heart_rate_utils.py +780 -0
  13. paradigma/pipelines/tremor_pipeline.py +299 -0
  14. paradigma/preprocessing.py +363 -0
  15. paradigma/segmenting.py +396 -0
  16. paradigma/testing.py +416 -0
  17. paradigma/util.py +393 -16
  18. paradigma-0.4.1.dist-info/METADATA +138 -0
  19. paradigma-0.4.1.dist-info/RECORD +22 -0
  20. {paradigma-0.3.2.dist-info → paradigma-0.4.1.dist-info}/WHEEL +1 -1
  21. paradigma/gait_analysis.py +0 -415
  22. paradigma/gait_analysis_config.py +0 -266
  23. paradigma/heart_rate_analysis.py +0 -127
  24. paradigma/heart_rate_analysis_config.py +0 -9
  25. paradigma/heart_rate_util.py +0 -173
  26. paradigma/imu_preprocessing.py +0 -232
  27. paradigma/ppg/classifier/LR_PPG_quality.pkl +0 -0
  28. paradigma/ppg/classifier/LR_model.mat +0 -0
  29. paradigma/ppg/feat_extraction/acc_feature.m +0 -20
  30. paradigma/ppg/feat_extraction/peakdet.m +0 -64
  31. paradigma/ppg/feat_extraction/ppg_features.m +0 -53
  32. paradigma/ppg/glob_functions/extract_hr_segments.m +0 -37
  33. paradigma/ppg/glob_functions/extract_overlapping_segments.m +0 -23
  34. paradigma/ppg/glob_functions/jsonlab/AUTHORS.txt +0 -41
  35. paradigma/ppg/glob_functions/jsonlab/ChangeLog.txt +0 -74
  36. paradigma/ppg/glob_functions/jsonlab/LICENSE_BSD.txt +0 -25
  37. paradigma/ppg/glob_functions/jsonlab/LICENSE_GPLv3.txt +0 -699
  38. paradigma/ppg/glob_functions/jsonlab/README.txt +0 -394
  39. paradigma/ppg/glob_functions/jsonlab/examples/.svn/entries +0 -368
  40. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/demo_jsonlab_basic.m.svn-base +0 -180
  41. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/demo_ubjson_basic.m.svn-base +0 -180
  42. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/example1.json.svn-base +0 -23
  43. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/example2.json.svn-base +0 -22
  44. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/example3.json.svn-base +0 -11
  45. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/example4.json.svn-base +0 -34
  46. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/jsonlab_basictest.matlab.svn-base +0 -662
  47. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/jsonlab_selftest.m.svn-base +0 -27
  48. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/jsonlab_selftest.matlab.svn-base +0 -144
  49. paradigma/ppg/glob_functions/jsonlab/examples/.svn/text-base/jsonlab_speedtest.m.svn-base +0 -21
  50. paradigma/ppg/glob_functions/jsonlab/examples/demo_jsonlab_basic.m +0 -180
  51. paradigma/ppg/glob_functions/jsonlab/examples/demo_ubjson_basic.m +0 -180
  52. paradigma/ppg/glob_functions/jsonlab/examples/example1.json +0 -23
  53. paradigma/ppg/glob_functions/jsonlab/examples/example2.json +0 -22
  54. paradigma/ppg/glob_functions/jsonlab/examples/example3.json +0 -11
  55. paradigma/ppg/glob_functions/jsonlab/examples/example4.json +0 -34
  56. paradigma/ppg/glob_functions/jsonlab/examples/jsonlab_basictest.matlab +0 -662
  57. paradigma/ppg/glob_functions/jsonlab/examples/jsonlab_selftest.m +0 -27
  58. paradigma/ppg/glob_functions/jsonlab/examples/jsonlab_selftest.matlab +0 -144
  59. paradigma/ppg/glob_functions/jsonlab/examples/jsonlab_speedtest.m +0 -21
  60. paradigma/ppg/glob_functions/jsonlab/jsonopt.m +0 -32
  61. paradigma/ppg/glob_functions/jsonlab/loadjson.m +0 -566
  62. paradigma/ppg/glob_functions/jsonlab/loadubjson.m +0 -528
  63. paradigma/ppg/glob_functions/jsonlab/mergestruct.m +0 -33
  64. paradigma/ppg/glob_functions/jsonlab/savejson.m +0 -475
  65. paradigma/ppg/glob_functions/jsonlab/saveubjson.m +0 -504
  66. paradigma/ppg/glob_functions/jsonlab/varargin2struct.m +0 -40
  67. paradigma/ppg/glob_functions/sample_prob_final.m +0 -49
  68. paradigma/ppg/glob_functions/synchronization.m +0 -76
  69. paradigma/ppg/glob_functions/tsdf_scan_meta.m +0 -22
  70. paradigma/ppg/hr_functions/Long_TFD_JOT.m +0 -37
  71. paradigma/ppg/hr_functions/PPG_TFD_HR.m +0 -59
  72. paradigma/ppg/hr_functions/TFD toolbox JOT/.gitignore +0 -4
  73. paradigma/ppg/hr_functions/TFD toolbox JOT/CHANGELOG.md +0 -23
  74. paradigma/ppg/hr_functions/TFD toolbox JOT/LICENCE.md +0 -27
  75. paradigma/ppg/hr_functions/TFD toolbox JOT/README.md +0 -251
  76. paradigma/ppg/hr_functions/TFD toolbox JOT/README.pdf +0 -0
  77. paradigma/ppg/hr_functions/TFD toolbox JOT/common/gen_Doppler_kern.m +0 -142
  78. paradigma/ppg/hr_functions/TFD toolbox JOT/common/gen_Doppler_lag_kern.m +0 -314
  79. paradigma/ppg/hr_functions/TFD toolbox JOT/common/gen_lag_kern.m +0 -123
  80. paradigma/ppg/hr_functions/TFD toolbox JOT/dec_tfd.m +0 -154
  81. paradigma/ppg/hr_functions/TFD toolbox JOT/decimated_TFDs/dec_di_gdtfd.m +0 -194
  82. paradigma/ppg/hr_functions/TFD toolbox JOT/decimated_TFDs/dec_li_gdtfd.m +0 -200
  83. paradigma/ppg/hr_functions/TFD toolbox JOT/decimated_TFDs/dec_nonsep_gdtfd.m +0 -229
  84. paradigma/ppg/hr_functions/TFD toolbox JOT/decimated_TFDs/dec_sep_gdtfd.m +0 -241
  85. paradigma/ppg/hr_functions/TFD toolbox JOT/full_TFDs/di_gdtfd.m +0 -157
  86. paradigma/ppg/hr_functions/TFD toolbox JOT/full_TFDs/li_gdtfd.m +0 -190
  87. paradigma/ppg/hr_functions/TFD toolbox JOT/full_TFDs/nonsep_gdtfd.m +0 -196
  88. paradigma/ppg/hr_functions/TFD toolbox JOT/full_TFDs/sep_gdtfd.m +0 -199
  89. paradigma/ppg/hr_functions/TFD toolbox JOT/full_tfd.m +0 -144
  90. paradigma/ppg/hr_functions/TFD toolbox JOT/load_curdir.m +0 -13
  91. paradigma/ppg/hr_functions/TFD toolbox JOT/pics/decimated_TFDs_examples.png +0 -0
  92. paradigma/ppg/hr_functions/TFD toolbox JOT/pics/full_TFDs_examples.png +0 -0
  93. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/check_dec_params_seq.m +0 -79
  94. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/dispEE.m +0 -9
  95. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/dispVars.m +0 -26
  96. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/disp_bytes.m +0 -25
  97. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/fold_vector_full.m +0 -40
  98. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/fold_vector_half.m +0 -34
  99. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/gen_LFM.m +0 -29
  100. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/get_analytic_signal.m +0 -76
  101. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/get_window.m +0 -176
  102. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/isreal_fn.m +0 -11
  103. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/padWin.m +0 -97
  104. paradigma/ppg/hr_functions/TFD toolbox JOT/utils/vtfd.m +0 -149
  105. paradigma/ppg/preprocessing/preprocessing_imu.m +0 -15
  106. paradigma/ppg/preprocessing/preprocessing_ppg.m +0 -13
  107. paradigma/ppg_preprocessing.py +0 -313
  108. paradigma/preprocessing_config.py +0 -69
  109. paradigma/quantification.py +0 -58
  110. paradigma/tremor/TremorFeaturesAndClassification.m +0 -345
  111. paradigma/tremor/feat_extraction/DerivativesExtract.m +0 -22
  112. paradigma/tremor/feat_extraction/ExtractBandSignalsRMS.m +0 -72
  113. paradigma/tremor/feat_extraction/MFCCExtract.m +0 -100
  114. paradigma/tremor/feat_extraction/PSDBandPower.m +0 -52
  115. paradigma/tremor/feat_extraction/PSDEst.m +0 -63
  116. paradigma/tremor/feat_extraction/PSDExtrAxis.m +0 -88
  117. paradigma/tremor/feat_extraction/PSDExtrOpt.m +0 -95
  118. paradigma/tremor/preprocessing/InterpData.m +0 -32
  119. paradigma/tremor/weekly_aggregates/WeeklyAggregates.m +0 -295
  120. paradigma/windowing.py +0 -219
  121. paradigma-0.3.2.dist-info/METADATA +0 -79
  122. paradigma-0.3.2.dist-info/RECORD +0 -108
  123. {paradigma-0.3.2.dist-info → paradigma-0.4.1.dist-info}/LICENSE +0 -0
@@ -1,415 +0,0 @@
1
- import os
2
- import numpy as np
3
- import pandas as pd
4
- from pathlib import Path
5
- from typing import Union
6
-
7
- import tsdf
8
-
9
- from paradigma.constants import DataColumns
10
- from paradigma.gait_analysis_config import GaitFeatureExtractionConfig, GaitDetectionConfig, \
11
- ArmSwingFeatureExtractionConfig, ArmSwingDetectionConfig, ArmSwingQuantificationConfig
12
- from paradigma.feature_extraction import extract_temporal_domain_features, \
13
- extract_spectral_domain_features, pca_transform_gyroscope, compute_angle, \
14
- remove_moving_average_angle, extract_angle_extremes, extract_range_of_motion, \
15
- extract_peak_angular_velocity, signal_to_ffts, get_dominant_frequency, compute_perc_power
16
- from paradigma.quantification import aggregate_segments
17
- from paradigma.windowing import tabulate_windows, create_segments, discard_segments
18
- from paradigma.util import get_end_iso8601, write_data, read_metadata
19
-
20
-
21
- def extract_gait_features(input_path: Union[str, Path], output_path: Union[str, Path], config: GaitFeatureExtractionConfig) -> None:
22
- # load data
23
- metadata_time, metadata_samples = read_metadata(input_path, config.meta_filename, config.time_filename, config.values_filename)
24
- df = tsdf.load_dataframe_from_binaries([metadata_time, metadata_samples], tsdf.constants.ConcatenationType.columns)
25
-
26
- # group sequences of timestamps into windows
27
- df_windowed = tabulate_windows(
28
- df=df,
29
- time_column_name=config.time_colname,
30
- data_point_level_cols=config.l_data_point_level_cols,
31
- window_length_s=config.window_length_s,
32
- window_step_size_s=config.window_step_size_s,
33
- sampling_frequency=config.sampling_frequency
34
- )
35
-
36
-
37
- # compute statistics of the temporal domain signals
38
- df_windowed = extract_temporal_domain_features(config, df_windowed, l_gravity_stats=['mean', 'std'])
39
-
40
- # transform the signals from the temporal domain to the spectral domain using the fast fourier transform
41
- # and extract spectral features
42
- df_windowed = extract_spectral_domain_features(config, df_windowed, config.sensor, config.l_accelerometer_cols)
43
-
44
- end_iso8601 = get_end_iso8601(start_iso8601=metadata_time.start_iso8601,
45
- window_length_seconds=int(df_windowed[config.time_colname][-1:].values[0] + config.window_length_s))
46
-
47
- metadata_samples.end_iso8601 = end_iso8601
48
- metadata_samples.file_name = 'gait_values.bin'
49
- metadata_time.end_iso8601 = end_iso8601
50
- metadata_time.file_name = 'gait_time.bin'
51
-
52
- metadata_samples.channels = list(config.d_channels_values.keys())
53
- metadata_samples.units = list(config.d_channels_values.values())
54
-
55
- metadata_time.channels = ['time']
56
- metadata_time.units = ['relative_time_ms']
57
- metadata_time.data_type = np.int64
58
-
59
- write_data(metadata_time, metadata_samples, output_path, 'gait_meta.json', df_windowed)
60
-
61
-
62
- def detect_gait(input_path: Union[str, Path], output_path: Union[str, Path], path_to_classifier_input: Union[str, Path], config: GaitDetectionConfig) -> None:
63
-
64
- # Load the data
65
- metadata_time, metadata_samples = read_metadata(input_path, config.meta_filename, config.time_filename, config.values_filename)
66
- df = tsdf.load_dataframe_from_binaries([metadata_time, metadata_samples], tsdf.constants.ConcatenationType.columns)
67
-
68
- # Initialize the classifier
69
- clf = pd.read_pickle(os.path.join(path_to_classifier_input, config.classifier_file_name))
70
- with open(os.path.join(path_to_classifier_input, config.thresholds_file_name), 'r') as f:
71
- threshold = float(f.read())
72
-
73
- # Prepare the data
74
- clf.feature_names_in_ = [f'{x}_power_below_gait' for x in config.l_accelerometer_cols] + \
75
- [f'{x}_power_gait' for x in config.l_accelerometer_cols] + \
76
- [f'{x}_power_tremor' for x in config.l_accelerometer_cols] + \
77
- [f'{x}_power_above_tremor' for x in config.l_accelerometer_cols] + \
78
- ['std_norm_acc'] + [f'cc_{i}_accelerometer' for i in range(1, 13)] + [f'grav_{x}_{y}' for x in config.l_accelerometer_cols for y in ['mean', 'std']] + \
79
- [f'{x}_dominant_frequency' for x in config.l_accelerometer_cols]
80
- X = df.loc[:, clf.feature_names_in_]
81
-
82
- # Make prediction
83
- df['pred_gait_proba'] = clf.predict_proba(X)[:, 1]
84
- df['pred_gait'] = df['pred_gait_proba'] > threshold
85
-
86
- # Prepare the metadata
87
- metadata_samples.file_name = 'gait_values.bin'
88
- metadata_time.file_name = 'gait_time.bin'
89
-
90
- metadata_samples.channels = ['pred_gait_proba']
91
- metadata_samples.units = ['probability']
92
- metadata_samples.data_type = np.float32
93
- metadata_samples.bits = 32
94
-
95
- metadata_time.channels = [config.time_colname]
96
- metadata_time.units = ['relative_time_ms']
97
- metadata_time.data_type = np.int32
98
- metadata_time.bits = 32
99
-
100
- write_data(metadata_time, metadata_samples, output_path, 'gait_meta.json', df)
101
-
102
-
103
- def extract_arm_swing_features(input_path: Union[str, Path], output_path: Union[str, Path], config: ArmSwingFeatureExtractionConfig) -> None:
104
- # load accelerometer and gyroscope data
105
- l_dfs = []
106
- for sensor in ['accelerometer', 'gyroscope']:
107
- config.set_sensor(sensor)
108
- meta_filename = f'{sensor}_meta.json'
109
- values_filename = f'{sensor}_samples.bin'
110
- time_filename = f'{sensor}_time.bin'
111
-
112
- metadata_dict = tsdf.load_metadata_from_path(os.path.join(input_path, meta_filename))
113
- metadata_time = metadata_dict[time_filename]
114
- metadata_samples = metadata_dict[values_filename]
115
- l_dfs.append(tsdf.load_dataframe_from_binaries([metadata_time, metadata_samples], tsdf.constants.ConcatenationType.columns))
116
-
117
- df = pd.merge(l_dfs[0], l_dfs[1], on=config.time_colname)
118
-
119
- # temporary add "random" predictions
120
- df[config.pred_gait_colname] = np.concatenate([np.repeat([1], df.shape[0]//3), np.repeat([0], df.shape[0]//3), np.repeat([1], df.shape[0] + 1 - 2*df.shape[0]//3)], axis=0)
121
-
122
- # perform principal component analysis on the gyroscope signals to obtain the angular velocity in the
123
- # direction of the swing of the arm
124
- df[config.velocity_colname] = pca_transform_gyroscope(
125
- df=df,
126
- y_gyro_colname=DataColumns.GYROSCOPE_Y,
127
- z_gyro_colname=DataColumns.GYROSCOPE_Z,
128
- pred_gait_colname=config.pred_gait_colname
129
- )
130
-
131
- # integrate the angular velocity to obtain an estimation of the angle
132
- df[config.angle_colname] = compute_angle(
133
- velocity_col=df[config.velocity_colname],
134
- time_col=df[config.time_colname]
135
- )
136
-
137
- # remove the moving average from the angle to account for possible drift caused by the integration
138
- # of noise in the angular velocity
139
- df[config.angle_smooth_colname] = remove_moving_average_angle(
140
- angle_col=df[config.angle_colname],
141
- sampling_frequency=config.sampling_frequency
142
- )
143
-
144
- # use only predicted gait for the subsequent steps
145
- df = df.loc[df[config.pred_gait_colname]==1].reset_index(drop=True)
146
-
147
- # group consecutive timestamps into segments with new segments starting after a pre-specified gap
148
- df_segments = create_segments(
149
- df=df,
150
- time_colname=config.time_colname,
151
- segment_nr_colname='segment_nr',
152
- minimum_gap_s=3
153
- )
154
-
155
- # remove any segments that do not adhere to predetermined criteria
156
- df_segments = discard_segments(
157
- df=df_segments,
158
- time_colname=config.time_colname,
159
- segment_nr_colname='segment_nr',
160
- minimum_segment_length_s=3
161
- )
162
-
163
- # create windows of a fixed length and step size from the time series per segment
164
- l_dfs = []
165
- for segment_nr in df_segments[config.segment_nr_colname].unique():
166
- df_single_segment = df_segments.loc[df_segments[config.segment_nr_colname]==segment_nr].copy().reset_index(drop=True)
167
- l_dfs.append(tabulate_windows(
168
- df=df_single_segment,
169
- time_column_name=config.time_colname,
170
- segment_nr_colname=config.segment_nr_colname,
171
- data_point_level_cols=config.l_data_point_level_cols,
172
- window_length_s=config.window_length_s,
173
- window_step_size_s=config.window_step_size_s,
174
- segment_nr=segment_nr,
175
- sampling_frequency=config.sampling_frequency,
176
- )
177
- )
178
- df_windowed = pd.concat(l_dfs).reset_index(drop=True)
179
-
180
- del df, df_segments
181
-
182
- # transform the angle from the temporal domain to the spectral domain using the fast fourier transform
183
- df_windowed['angle_freqs'], df_windowed['angle_fft'] = signal_to_ffts(
184
- sensor_col=df_windowed[config.angle_smooth_colname],
185
- window_type=config.window_type,
186
- sampling_frequency=config.sampling_frequency)
187
-
188
- # obtain the dominant frequency of the angle signal in the frequency band of interest
189
- # defined by the highest peak in the power spectrum
190
- df_windowed['angle_dominant_frequency'] = df_windowed.apply(
191
- lambda x: get_dominant_frequency(signal_ffts=x['angle_fft'],
192
- signal_freqs=x['angle_freqs'],
193
- fmin=config.power_band_low_frequency,
194
- fmax=config.power_band_high_frequency
195
- ), axis=1
196
- )
197
-
198
- df_windowed = df_windowed.drop(columns=['angle_fft', 'angle_freqs'])
199
-
200
- # compute the percentage of power in the frequency band of interest (i.e., the frequency band of the arm swing)
201
- df_windowed['angle_perc_power'] = df_windowed[config.angle_smooth_colname].apply(
202
- lambda x: compute_perc_power(
203
- sensor_col=x,
204
- fmin_band=config.power_band_low_frequency,
205
- fmax_band=config.power_band_high_frequency,
206
- fmin_total=config.spectrum_low_frequency,
207
- fmax_total=config.spectrum_high_frequency,
208
- sampling_frequency=config.sampling_frequency,
209
- window_type=config.window_type
210
- )
211
- )
212
-
213
- # note to eScience: why are the columns 'angle_new_minima', 'angle_new_maxima',
214
- # 'angle_minima_deleted' and 'angle_maxima deleted' created here? Should a copy
215
- # of 'df_windowed' be created inside 'extract_angle_extremes' to prevent this from
216
- # happening?
217
- # determine the extrema (minima and maxima) of the angle signal
218
- extract_angle_extremes(
219
- df=df_windowed,
220
- angle_colname=config.angle_smooth_colname,
221
- dominant_frequency_colname='angle_dominant_frequency',
222
- sampling_frequency=config.sampling_frequency
223
- )
224
-
225
- df_windowed = df_windowed.drop(columns=[config.angle_smooth_colname])
226
-
227
- # calculate the change in angle between consecutive extrema (minima and maxima) of the angle signal inside the window
228
- df_windowed['angle_amplitudes'] = extract_range_of_motion(
229
- angle_extrema_values_col=df_windowed['angle_extrema_values']
230
- )
231
-
232
- df_windowed = df_windowed.drop(columns=['angle_extrema_values'])
233
-
234
- # aggregate the changes in angle between consecutive extrema to obtain the range of motion
235
- df_windowed['range_of_motion'] = df_windowed['angle_amplitudes'].apply(lambda x: np.mean(x) if len(x) > 0 else 0).replace(np.nan, 0)
236
-
237
- df_windowed = df_windowed.drop(columns=['angle_amplitudes'])
238
-
239
- # compute the forward and backward peak angular velocity using the extrema of the angular velocity
240
- extract_peak_angular_velocity(
241
- df=df_windowed,
242
- velocity_colname=config.velocity_colname,
243
- angle_minima_colname='angle_minima',
244
- angle_maxima_colname='angle_maxima'
245
- )
246
-
247
- df_windowed = df_windowed.drop(columns=['angle_minima','angle_maxima', 'angle_new_minima',
248
- 'angle_new_maxima', config.velocity_colname])
249
-
250
- # compute aggregated measures of the peak angular velocity
251
- for dir in ['forward', 'backward']:
252
- df_windowed[f'{dir}_peak_ang_vel_mean'] = df_windowed[f'{dir}_peak_ang_vel'].apply(lambda x: np.mean(x) if len(x) > 0 else 0)
253
- df_windowed[f'{dir}_peak_ang_vel_std'] = df_windowed[f'{dir}_peak_ang_vel'].apply(lambda x: np.std(x) if len(x) > 0 else 0)
254
-
255
- df_windowed = df_windowed.drop(columns=[f'{dir}_peak_ang_vel'])
256
-
257
- # compute statistics of the temporal domain accelerometer signals
258
- df_windowed = extract_temporal_domain_features(config, df_windowed, l_gravity_stats=['mean', 'std'])
259
-
260
- # transform the accelerometer and gyroscope signals from the temporal domain to the spectral domain
261
- # using the fast fourier transform and extract spectral features
262
- for sensor, l_sensor_colnames in zip(['accelerometer', 'gyroscope'], [config.l_accelerometer_cols, config.l_gyroscope_cols]):
263
- df_windowed = extract_spectral_domain_features(config, df_windowed, sensor, l_sensor_colnames)
264
-
265
- end_iso8601 = get_end_iso8601(metadata_samples.start_iso8601,
266
- df_windowed[config.time_colname][-1:].values[0] + config.window_length_s)
267
-
268
- metadata_samples.end_iso8601 = end_iso8601
269
- metadata_samples.file_name = 'arm_swing_values.bin'
270
- metadata_time.end_iso8601 = end_iso8601
271
- metadata_time.file_name = 'arm_swing_time.bin'
272
-
273
- metadata_samples.channels = list(config.d_channels_values.keys())
274
- metadata_samples.units = list(config.d_channels_values.values())
275
- metadata_samples.data_type = np.float32
276
- metadata_samples.bits = 32
277
-
278
- metadata_time.channels = [config.time_colname]
279
- metadata_time.units = ['relative_time_ms']
280
- metadata_time.data_type = np.int32
281
- metadata_time.bits = 32
282
-
283
- write_data(metadata_time, metadata_samples, output_path, 'arm_swing_meta.json', df_windowed)
284
-
285
-
286
- def detect_arm_swing(input_path: Union[str, Path], output_path: Union[str, Path], path_to_classifier_input: Union[str, Path], config: ArmSwingDetectionConfig) -> None:
287
- # Load the data
288
- metadata_time, metadata_samples = read_metadata(input_path, config.meta_filename, config.time_filename, config.values_filename)
289
- df = tsdf.load_dataframe_from_binaries([metadata_time, metadata_samples], tsdf.constants.ConcatenationType.columns)
290
-
291
- # Initialize the classifier
292
- clf = pd.read_pickle(os.path.join(path_to_classifier_input, config.classifier_file_name))
293
-
294
- # Prepare the data
295
- clf.feature_names_in_ = ['std_norm_acc'] + [f'{x}_power_below_gait' for x in config.l_accelerometer_cols] + \
296
- [f'{x}_power_gait' for x in config.l_accelerometer_cols] + \
297
- [f'{x}_power_tremor' for x in config.l_accelerometer_cols] + \
298
- [f'{x}_power_above_tremor' for x in config.l_accelerometer_cols] + \
299
- [f'cc_{i}_accelerometer' for i in range(1, 13)] + [f'cc_{i}_gyroscope' for i in range(1, 13)] + \
300
- [f'grav_{x}_mean' for x in config.l_accelerometer_cols] + [f'grav_{x}_std' for x in config.l_accelerometer_cols] + \
301
- ['range_of_motion', 'forward_peak_ang_vel_mean', 'backward_peak_ang_vel_mean', 'forward_peak_ang_vel_std',
302
- 'backward_peak_ang_vel_std', 'angle_perc_power', 'angle_dominant_frequency'] + \
303
- [f'{x}_dominant_frequency' for x in config.l_accelerometer_cols]
304
-
305
- X = df.loc[:, clf.feature_names_in_]
306
-
307
- # Make prediction
308
- # df['pred_arm_swing_proba'] = clf.predict_proba(X)[:, 1]
309
- df['pred_arm_swing'] = clf.predict(X)
310
-
311
- # Prepare the metadata
312
- metadata_samples.file_name = 'arm_swing_values.bin'
313
- metadata_time.file_name = 'arm_swing_time.bin'
314
-
315
- metadata_samples.channels = ['pred_arm_swing']
316
- metadata_samples.units = ['boolean']
317
- metadata_samples.data_type = np.int8
318
- metadata_samples.bits = 8
319
-
320
- metadata_time.channels = ['time']
321
- metadata_time.units = ['relative_time_ms']
322
- metadata_time.data_type = np.int32
323
- metadata_time.bits = 32
324
-
325
- write_data(metadata_time, metadata_samples, output_path, 'arm_swing_meta.json', df)
326
-
327
-
328
- def quantify_arm_swing(path_to_feature_input: Union[str, Path], path_to_prediction_input: Union[str, Path], output_path: Union[str, Path], config: ArmSwingQuantificationConfig) -> None:
329
- # Load the features & predictions
330
- metadata_time, metadata_samples = read_metadata(path_to_feature_input, config.meta_filename, config.time_filename, config.values_filename)
331
- df_features = tsdf.load_dataframe_from_binaries([metadata_time, metadata_samples], tsdf.constants.ConcatenationType.columns)
332
-
333
- metadata_dict = tsdf.load_metadata_from_path(os.path.join(path_to_prediction_input, config.meta_filename))
334
- metadata_time = metadata_dict[config.time_filename]
335
- metadata_samples = metadata_dict[config.values_filename]
336
- df_predictions = tsdf.load_dataframe_from_binaries([metadata_time, metadata_samples], tsdf.constants.ConcatenationType.columns)
337
-
338
- # Validate
339
- # dataframes have same length
340
- assert df_features.shape[0] == df_predictions.shape[0]
341
-
342
- # dataframes have same time column
343
- assert df_features['time'].equals(df_predictions['time'])
344
-
345
- # Prepare the data
346
-
347
- # subset features
348
- l_feature_cols = ['time', 'range_of_motion', 'forward_peak_ang_vel_mean', 'backward_peak_ang_vel_mean']
349
- df_features = df_features[l_feature_cols]
350
-
351
- # concatenate features and predictions
352
- df = pd.concat([df_features, df_predictions[config.pred_arm_swing_colname]], axis=1)
353
-
354
- # temporarily for testing: manually determine predictions
355
- df[config.pred_arm_swing_colname] = np.concatenate([np.repeat([1], df.shape[0]//3), np.repeat([0], df.shape[0]//3), np.repeat([1], df.shape[0] - 2*df.shape[0]//3)], axis=0)
356
-
357
- # keep only predicted arm swing
358
- df_arm_swing = df.loc[df[config.pred_arm_swing_colname]==1].copy().reset_index(drop=True)
359
-
360
- del df
361
-
362
- # create peak angular velocity
363
- df_arm_swing.loc[:, 'peak_ang_vel'] = df_arm_swing.loc[:, ['forward_peak_ang_vel_mean', 'backward_peak_ang_vel_mean']].mean(axis=1)
364
- df_arm_swing = df_arm_swing.drop(columns=['forward_peak_ang_vel_mean', 'backward_peak_ang_vel_mean'])
365
-
366
- # Segmenting
367
-
368
- df_arm_swing = create_segments(
369
- df=df_arm_swing,
370
- time_colname='time',
371
- segment_nr_colname='segment_nr',
372
- minimum_gap_s=config.segment_gap_s
373
- )
374
- df_arm_swing = discard_segments(
375
- df=df_arm_swing,
376
- time_colname='time',
377
- segment_nr_colname='segment_nr',
378
- minimum_segment_length_s=config.min_segment_length_s
379
- )
380
-
381
- # Quantify arm swing
382
- df_aggregates = aggregate_segments(
383
- df=df_arm_swing,
384
- time_colname='time',
385
- segment_nr_colname='segment_nr',
386
- window_step_size_s=config.window_step_size,
387
- l_metrics=['range_of_motion', 'peak_ang_vel'],
388
- l_aggregates=['median'],
389
- l_quantiles=[0.95]
390
- )
391
-
392
- df_aggregates['segment_duration_ms'] = df_aggregates['segment_duration_s'] * 1000
393
- df_aggregates = df_aggregates.drop(columns=['segment_nr'])
394
-
395
- # Store data
396
- metadata_samples.file_name = 'arm_swing_values.bin'
397
- metadata_time.file_name = 'arm_swing_time.bin'
398
-
399
- metadata_samples.channels = ['range_of_motion_median', 'range_of_motion_quantile_95',
400
- 'peak_ang_vel_median', 'peak_ang_vel_quantile_95']
401
- metadata_samples.units = ['deg', 'deg', 'deg/s', 'deg/s']
402
- metadata_samples.data_type = np.float32
403
- metadata_samples.bits = 32
404
-
405
- metadata_time.channels = ['time', 'segment_duration_ms']
406
- metadata_time.units = ['relative_time_ms', 'ms']
407
- metadata_time.data_type = np.int32
408
- metadata_time.bits = 32
409
-
410
- write_data(metadata_time, metadata_samples, output_path, 'arm_swing_meta.json', df_aggregates)
411
-
412
-
413
- def aggregate_weekly_arm_swing():
414
- pass
415
-
@@ -1,266 +0,0 @@
1
- from typing import Dict, List
2
-
3
- from paradigma.constants import DataColumns, DataUnits
4
-
5
-
6
-
7
- class IMUConfig:
8
- """
9
- Base class for Gait feature extraction and Gait detection configurations, based on the IMU data (accelerometer, gyroscope).
10
- """
11
- def __init__(self):
12
-
13
- self.time_colname = DataColumns.TIME
14
-
15
- self.l_accelerometer_cols: List[str] = [
16
- DataColumns.ACCELEROMETER_X,
17
- DataColumns.ACCELEROMETER_Y,
18
- DataColumns.ACCELEROMETER_Z,
19
- ]
20
- self.l_gyroscope_cols: List[str] = [
21
- DataColumns.GYROSCOPE_X,
22
- DataColumns.GYROSCOPE_Y,
23
- DataColumns.GYROSCOPE_Z,
24
- ]
25
-
26
- self.l_gravity_cols: List[str] = [
27
- DataColumns.GRAV_ACCELEROMETER_X,
28
- DataColumns.GRAV_ACCELEROMETER_Y,
29
- DataColumns.GRAV_ACCELEROMETER_Z,
30
- ]
31
-
32
- def set_sensor(self, sensor: str) -> None:
33
- """Sets the sensor and derived filenames"""
34
- self.sensor: str = sensor
35
- self.set_filenames(sensor)
36
-
37
- def set_filenames(self, prefix: str) -> None:
38
- """Sets the filenames based on the prefix,
39
-
40
- Parameters
41
- ----------
42
- prefix : str
43
- The prefix for the filenames.
44
- """
45
- self.meta_filename = f"{prefix}_meta.json"
46
- self.time_filename = f"{prefix}_time.bin"
47
- self.values_filename = f"{prefix}_samples.bin"
48
-
49
- def set_filenames_values(self, prefix: str) -> None:
50
- """Sets the filenames based on the prefix,
51
-
52
- Parameters
53
- ----------
54
- prefix : str
55
- The prefix for the filenames.
56
- """
57
- self.meta_filename = f"{prefix}_meta.json"
58
- self.time_filename = f"{prefix}_time.bin"
59
- self.values_filename = f"{prefix}_values.bin"
60
-
61
-
62
- class GaitFeatureExtractionConfig (IMUConfig):
63
-
64
- def __init__(self) -> None:
65
- super().__init__()
66
- self.set_sensor("accelerometer")
67
- self.set_sampling_frequency(100)
68
-
69
- self.window_type: str = "hann"
70
- self.verbose: int = 0
71
-
72
- self.window_length_s: int = 6
73
- self.window_step_size_s: int = 1
74
-
75
- # cepstral coefficients
76
- self.cc_low_frequency: int = 0
77
- self.cc_high_frequency: int = 25
78
- self.n_dct_filters_cc: int = 20
79
- self.n_coefficients_cc: int = 12
80
-
81
- self.d_frequency_bandwidths: Dict[str, List[float]] = {
82
- "power_below_gait": [0.3, 0.7],
83
- "power_gait": [0.7, 3.5],
84
- "power_tremor": [3.5, 8],
85
- "power_above_tremor": [8, self.sampling_frequency],
86
- }
87
-
88
-
89
- self.l_window_level_cols: List[str] = [
90
- "id",
91
- "window_nr",
92
- "window_start",
93
- "window_end",
94
- ]
95
- self.l_data_point_level_cols: List[str] = (
96
- self.l_accelerometer_cols + self.l_gravity_cols
97
- )
98
-
99
- # TODO: generate this dictionary using object attributes (self.X) and parameters (e.g., n_dct_filters for cc)
100
- self.d_channels_values: Dict[str, str] = {
101
- f"grav_{self.sensor}_x_mean": DataUnits.GRAVITY,
102
- f"grav_{self.sensor}_y_mean": DataUnits.GRAVITY,
103
- f"grav_{self.sensor}_z_mean": DataUnits.GRAVITY,
104
- f"grav_{self.sensor}_x_std": DataUnits.GRAVITY,
105
- f"grav_{self.sensor}_y_std": DataUnits.GRAVITY,
106
- f"grav_{self.sensor}_z_std": DataUnits.GRAVITY,
107
- f"{self.sensor}_x_power_below_gait": DataUnits.POWER_SPECTRAL_DENSITY,
108
- f"{self.sensor}_y_power_below_gait": DataUnits.POWER_SPECTRAL_DENSITY,
109
- f"{self.sensor}_z_power_below_gait": DataUnits.POWER_SPECTRAL_DENSITY,
110
- f"{self.sensor}_x_power_gait": DataUnits.POWER_SPECTRAL_DENSITY,
111
- f"{self.sensor}_y_power_gait": DataUnits.POWER_SPECTRAL_DENSITY,
112
- f"{self.sensor}_z_power_gait": DataUnits.POWER_SPECTRAL_DENSITY,
113
- f"{self.sensor}_x_power_tremor": DataUnits.POWER_SPECTRAL_DENSITY,
114
- f"{self.sensor}_y_power_tremor": DataUnits.POWER_SPECTRAL_DENSITY,
115
- f"{self.sensor}_z_power_tremor": DataUnits.POWER_SPECTRAL_DENSITY,
116
- f"{self.sensor}_x_power_above_tremor": DataUnits.POWER_SPECTRAL_DENSITY,
117
- f"{self.sensor}_y_power_above_tremor": DataUnits.POWER_SPECTRAL_DENSITY,
118
- f"{self.sensor}_z_power_above_tremor": DataUnits.POWER_SPECTRAL_DENSITY,
119
- f"{self.sensor}_x_dominant_frequency": DataUnits.FREQUENCY,
120
- f"{self.sensor}_y_dominant_frequency": DataUnits.FREQUENCY,
121
- f"{self.sensor}_z_dominant_frequency": DataUnits.FREQUENCY,
122
- "std_norm_acc": DataUnits.GRAVITY,
123
- }
124
-
125
- for cc_coef in range(1, self.n_coefficients_cc + 1):
126
- self.d_channels_values[f"cc_{cc_coef}_{self.sensor}"] = "g"
127
-
128
- def set_sampling_frequency(self, sampling_frequency: int) -> None:
129
- """Sets the sampling frequency and derived variables"""
130
- self.sampling_frequency: int = sampling_frequency
131
- self.spectrum_low_frequency: int = 0 # Hz
132
- self.spectrum_high_frequency: int = int(self.sampling_frequency / 2) # Hz
133
- self.filter_length: int = self.spectrum_high_frequency - 1
134
-
135
-
136
- class GaitDetectionConfig(IMUConfig):
137
-
138
- def __init__(self) -> None:
139
- super().__init__()
140
- self.classifier_file_name = "gd_classifier.pkl"
141
- self.thresholds_file_name = "gd_threshold.txt"
142
-
143
- self.set_filenames_values("gait")
144
-
145
-
146
-
147
- class ArmSwingFeatureExtractionConfig(IMUConfig):
148
-
149
- def initialize_window_length_fields(self, window_length_s: int) -> None:
150
- self.window_length_s = window_length_s
151
- self.window_overlap_s = window_length_s * 0.75
152
- self.window_step_size_s = window_length_s - self.window_overlap_s
153
-
154
- def initialize_sampling_frequency_fields(self, sampling_frequency: int) -> None:
155
- self.sampling_frequency = sampling_frequency
156
-
157
- # computing power
158
- self.power_band_low_frequency = 0.3
159
- self.power_band_high_frequency = 3
160
- self.spectrum_low_frequency = 0
161
- self.spectrum_high_frequency = int(sampling_frequency / 2)
162
-
163
- self.d_frequency_bandwidths = {
164
- "power_below_gait": [0.3, 0.7],
165
- "power_gait": [0.7, 3.5],
166
- "power_tremor": [3.5, 8],
167
- "power_above_tremor": [8, sampling_frequency],
168
- }
169
-
170
- # cepstral coefficients
171
- self.cc_low_frequency = 0
172
- self.cc_high_frequency = 25
173
- self.n_dct_filters_cc: int = 20
174
- self.n_coefficients_cc: int = 12
175
-
176
- def initialize_column_names(
177
- self
178
- ) -> None:
179
-
180
- self.pred_gait_colname=DataColumns.PRED_GAIT
181
- self.angle_smooth_colname: str = DataColumns.ANGLE_SMOOTH
182
- self.angle_colname=DataColumns.ANGLE
183
- self.velocity_colname=DataColumns.VELOCITY
184
- self.segment_nr_colname=DataColumns.SEGMENT_NR
185
-
186
-
187
- self.l_data_point_level_cols: List[str] = (
188
- self.l_accelerometer_cols
189
- + self.l_gyroscope_cols
190
- + self.l_gravity_cols
191
- + [self.angle_smooth_colname, self.velocity_colname]
192
- )
193
-
194
- def __init__(self) -> None:
195
- super().__init__()
196
- # general
197
- self.sensor = "IMU"
198
- self.units = "degrees"
199
-
200
- # windowing
201
- self.window_type = "hann"
202
- self.initialize_window_length_fields(3)
203
-
204
- self.initialize_sampling_frequency_fields(100)
205
-
206
- self.initialize_column_names()
207
-
208
- self.d_channels_values = {
209
- "angle_perc_power": "proportion",
210
- "range_of_motion": "deg",
211
- "forward_peak_ang_vel_mean": DataUnits.ROTATION,
212
- "forward_peak_ang_vel_std": DataUnits.ROTATION,
213
- "backward_peak_ang_vel_mean": DataUnits.ROTATION,
214
- "backward_peak_ang_vel_std": DataUnits.ROTATION,
215
- "std_norm_acc": DataUnits.GRAVITY,
216
- "grav_accelerometer_x_mean": DataUnits.GRAVITY,
217
- "grav_accelerometer_x_std": DataUnits.GRAVITY,
218
- "grav_accelerometer_y_mean": DataUnits.GRAVITY,
219
- "grav_accelerometer_y_std": DataUnits.GRAVITY,
220
- "grav_accelerometer_z_mean": DataUnits.GRAVITY,
221
- "grav_accelerometer_z_std": DataUnits.GRAVITY,
222
- "accelerometer_x_power_below_gait": "X",
223
- "accelerometer_x_power_gait": "X",
224
- "accelerometer_x_power_tremor": "X",
225
- "accelerometer_x_power_above_tremor": "X",
226
- "accelerometer_x_dominant_frequency": DataUnits.FREQUENCY,
227
- "accelerometer_y_power_below_gait": "X",
228
- "accelerometer_y_power_gait": "X",
229
- "accelerometer_y_power_tremor": "X",
230
- "accelerometer_y_power_above_tremor": "X",
231
- "accelerometer_y_dominant_frequency": DataUnits.FREQUENCY,
232
- "accelerometer_z_power_below_gait": "X",
233
- "accelerometer_z_power_gait": "X",
234
- "accelerometer_z_power_tremor": "X",
235
- "accelerometer_z_power_above_tremor": "X",
236
- "accelerometer_z_dominant_frequency": DataUnits.FREQUENCY,
237
- "angle_dominant_frequency": DataUnits.FREQUENCY,
238
- }
239
-
240
- for sensor in ["accelerometer", "gyroscope"]:
241
- for cc_coef in range(1, self.n_coefficients_cc + 1):
242
- self.d_channels_values[f"cc_{cc_coef}_{sensor}"] = DataUnits.GRAVITY
243
-
244
-
245
- class ArmSwingDetectionConfig(IMUConfig):
246
-
247
- def __init__(self) -> None:
248
- super().__init__()
249
- self.classifier_file_name = "asd_classifier.pkl"
250
-
251
- self.set_filenames_values("arm_swing")
252
-
253
-
254
-
255
- class ArmSwingQuantificationConfig(IMUConfig):
256
-
257
- def __init__(self) -> None:
258
- super().__init__()
259
- self.set_filenames_values("arm_swing")
260
-
261
- self.pred_arm_swing_colname = DataColumns.PRED_ARM_SWING
262
-
263
- self.window_length_s = 3
264
- self.window_step_size = 0.75
265
- self.segment_gap_s = 3
266
- self.min_segment_length_s = 3