kinemotion 0.76.0__py3-none-any.whl → 0.76.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kinemotion/countermovement_jump/analysis.py +14 -57
- kinemotion/countermovement_jump/metrics_validator.py +4 -6
- kinemotion/drop_jump/__init__.py +0 -6
- kinemotion/drop_jump/analysis.py +10 -10
- kinemotion/drop_jump/kinematics.py +6 -6
- kinemotion/squat_jump/analysis.py +72 -37
- kinemotion/squat_jump/debug_overlay.py +97 -149
- kinemotion/squat_jump/kinematics.py +6 -12
- kinemotion/squat_jump/metrics_validator.py +8 -16
- {kinemotion-0.76.0.dist-info → kinemotion-0.76.2.dist-info}/METADATA +25 -25
- {kinemotion-0.76.0.dist-info → kinemotion-0.76.2.dist-info}/RECORD +14 -14
- {kinemotion-0.76.0.dist-info → kinemotion-0.76.2.dist-info}/WHEEL +0 -0
- {kinemotion-0.76.0.dist-info → kinemotion-0.76.2.dist-info}/entry_points.txt +0 -0
- {kinemotion-0.76.0.dist-info → kinemotion-0.76.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -55,50 +55,7 @@ class CMJPhase(Enum):
|
|
|
55
55
|
UNKNOWN = "unknown"
|
|
56
56
|
|
|
57
57
|
|
|
58
|
-
def
|
|
59
|
-
positions: FloatArray,
|
|
60
|
-
velocities: FloatArray,
|
|
61
|
-
min_search_frame: int = 80,
|
|
62
|
-
) -> int:
|
|
63
|
-
"""
|
|
64
|
-
Find the lowest point of countermovement (transition from eccentric to concentric).
|
|
65
|
-
|
|
66
|
-
The lowest point occurs BEFORE the peak height (the jump apex). It's where
|
|
67
|
-
velocity crosses from positive (downward/squatting) to negative (upward/jumping).
|
|
68
|
-
|
|
69
|
-
Args:
|
|
70
|
-
positions: Array of vertical positions (higher value = lower in video)
|
|
71
|
-
velocities: Array of SIGNED vertical velocities (positive=down, negative=up)
|
|
72
|
-
min_search_frame: Minimum frame to start searching (default: frame 80)
|
|
73
|
-
|
|
74
|
-
Returns:
|
|
75
|
-
Frame index of lowest point.
|
|
76
|
-
"""
|
|
77
|
-
# First, find the peak height (minimum y value = highest jump point)
|
|
78
|
-
peak_height_frame = int(np.argmin(positions))
|
|
79
|
-
|
|
80
|
-
# Lowest point MUST be before peak height
|
|
81
|
-
# Search from min_search_frame to peak_height_frame
|
|
82
|
-
start_frame = min_search_frame
|
|
83
|
-
end_frame = peak_height_frame
|
|
84
|
-
|
|
85
|
-
if end_frame <= start_frame:
|
|
86
|
-
start_frame = int(len(positions) * 0.3)
|
|
87
|
-
end_frame = int(len(positions) * 0.7)
|
|
88
|
-
|
|
89
|
-
search_positions = positions[start_frame:end_frame]
|
|
90
|
-
|
|
91
|
-
if len(search_positions) == 0:
|
|
92
|
-
return start_frame
|
|
93
|
-
|
|
94
|
-
# Find maximum position value in this range (lowest point in video)
|
|
95
|
-
lowest_idx = int(np.argmax(search_positions))
|
|
96
|
-
lowest_frame = start_frame + lowest_idx
|
|
97
|
-
|
|
98
|
-
return lowest_frame
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
def find_cmj_takeoff_from_velocity_peak(
|
|
58
|
+
def _find_cmj_takeoff_from_velocity_peak(
|
|
102
59
|
positions: FloatArray,
|
|
103
60
|
velocities: FloatArray,
|
|
104
61
|
lowest_point_frame: int,
|
|
@@ -135,7 +92,7 @@ def find_cmj_takeoff_from_velocity_peak(
|
|
|
135
92
|
return float(takeoff_frame)
|
|
136
93
|
|
|
137
94
|
|
|
138
|
-
def
|
|
95
|
+
def _find_cmj_landing_from_position_peak(
|
|
139
96
|
positions: FloatArray,
|
|
140
97
|
velocities: FloatArray,
|
|
141
98
|
accelerations: FloatArray,
|
|
@@ -193,7 +150,7 @@ def find_cmj_landing_from_position_peak(
|
|
|
193
150
|
reason="Experimental alternative superseded by backward search algorithm",
|
|
194
151
|
since="0.34.0",
|
|
195
152
|
)
|
|
196
|
-
def
|
|
153
|
+
def _find_interpolated_takeoff_landing(
|
|
197
154
|
positions: FloatArray,
|
|
198
155
|
velocities: FloatArray,
|
|
199
156
|
lowest_point_frame: int,
|
|
@@ -226,19 +183,19 @@ def find_interpolated_takeoff_landing(
|
|
|
226
183
|
)
|
|
227
184
|
|
|
228
185
|
# Find takeoff using peak velocity method (CMJ-specific)
|
|
229
|
-
takeoff_frame =
|
|
186
|
+
takeoff_frame = _find_cmj_takeoff_from_velocity_peak(
|
|
230
187
|
positions, velocities, lowest_point_frame, fps
|
|
231
188
|
)
|
|
232
189
|
|
|
233
190
|
# Find landing using position peak and impact detection
|
|
234
|
-
landing_frame =
|
|
191
|
+
landing_frame = _find_cmj_landing_from_position_peak(
|
|
235
192
|
positions, velocities, accelerations, int(takeoff_frame), fps
|
|
236
193
|
)
|
|
237
194
|
|
|
238
195
|
return (takeoff_frame, landing_frame)
|
|
239
196
|
|
|
240
197
|
|
|
241
|
-
def
|
|
198
|
+
def _find_takeoff_frame(
|
|
242
199
|
velocities: FloatArray,
|
|
243
200
|
peak_height_frame: int,
|
|
244
201
|
fps: float,
|
|
@@ -301,7 +258,7 @@ def find_takeoff_frame(
|
|
|
301
258
|
return float(peak_vel_frame)
|
|
302
259
|
|
|
303
260
|
|
|
304
|
-
def
|
|
261
|
+
def _find_lowest_frame(
|
|
305
262
|
velocities: FloatArray, positions: FloatArray, takeoff_frame: float, fps: float
|
|
306
263
|
) -> float:
|
|
307
264
|
"""Find lowest point frame before takeoff."""
|
|
@@ -370,7 +327,7 @@ def _find_landing_impact(
|
|
|
370
327
|
return float(landing_frame)
|
|
371
328
|
|
|
372
329
|
|
|
373
|
-
def
|
|
330
|
+
def _find_landing_frame(
|
|
374
331
|
accelerations: FloatArray,
|
|
375
332
|
velocities: FloatArray,
|
|
376
333
|
peak_height_frame: int,
|
|
@@ -422,7 +379,7 @@ def compute_average_hip_position(
|
|
|
422
379
|
return (float(np.mean(x_positions)), float(np.mean(y_positions)))
|
|
423
380
|
|
|
424
381
|
|
|
425
|
-
def
|
|
382
|
+
def _find_standing_end(
|
|
426
383
|
velocities: FloatArray,
|
|
427
384
|
lowest_point: float,
|
|
428
385
|
_positions: FloatArray | None = None,
|
|
@@ -533,12 +490,12 @@ def detect_cmj_phases(
|
|
|
533
490
|
|
|
534
491
|
# Step 2-4: Find all phases using helper functions
|
|
535
492
|
with timer.measure("cmj_find_takeoff"):
|
|
536
|
-
takeoff_frame =
|
|
493
|
+
takeoff_frame = _find_takeoff_frame(
|
|
537
494
|
velocities, peak_height_frame, fps, accelerations=accelerations
|
|
538
495
|
)
|
|
539
496
|
|
|
540
497
|
with timer.measure("cmj_find_lowest_point"):
|
|
541
|
-
lowest_point =
|
|
498
|
+
lowest_point = _find_lowest_frame(velocities, positions, takeoff_frame, fps)
|
|
542
499
|
|
|
543
500
|
# Determine landing frame
|
|
544
501
|
with timer.measure("cmj_find_landing"):
|
|
@@ -552,7 +509,7 @@ def detect_cmj_phases(
|
|
|
552
509
|
)
|
|
553
510
|
# We still reference peak_height_frame from Hips, as Feet peak
|
|
554
511
|
# might be different/noisy but generally they align in time.
|
|
555
|
-
landing_frame =
|
|
512
|
+
landing_frame = _find_landing_frame(
|
|
556
513
|
landing_accelerations,
|
|
557
514
|
landing_velocities,
|
|
558
515
|
peak_height_frame,
|
|
@@ -560,7 +517,7 @@ def detect_cmj_phases(
|
|
|
560
517
|
)
|
|
561
518
|
else:
|
|
562
519
|
# Use primary signal (Hips)
|
|
563
|
-
landing_frame =
|
|
520
|
+
landing_frame = _find_landing_frame(
|
|
564
521
|
accelerations,
|
|
565
522
|
velocities,
|
|
566
523
|
peak_height_frame,
|
|
@@ -568,6 +525,6 @@ def detect_cmj_phases(
|
|
|
568
525
|
)
|
|
569
526
|
|
|
570
527
|
with timer.measure("cmj_find_standing_end"):
|
|
571
|
-
standing_end =
|
|
528
|
+
standing_end = _find_standing_end(velocities, lowest_point, positions, accelerations)
|
|
572
529
|
|
|
573
530
|
return (standing_end, lowest_point, takeoff_frame, landing_frame)
|
|
@@ -124,8 +124,8 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
124
124
|
self._check_flight_time(data, result, profile)
|
|
125
125
|
self._check_jump_height(data, result, profile)
|
|
126
126
|
self._check_countermovement_depth(data, result, profile)
|
|
127
|
-
self._check_concentric_duration(data, result
|
|
128
|
-
self._check_eccentric_duration(data, result
|
|
127
|
+
self._check_concentric_duration(data, result)
|
|
128
|
+
self._check_eccentric_duration(data, result)
|
|
129
129
|
self._check_peak_velocities(data, result, profile)
|
|
130
130
|
|
|
131
131
|
# CROSS-VALIDATION CHECKS
|
|
@@ -220,7 +220,7 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
220
220
|
)
|
|
221
221
|
|
|
222
222
|
def _check_concentric_duration(
|
|
223
|
-
self, metrics: MetricsDict, result: CMJValidationResult
|
|
223
|
+
self, metrics: MetricsDict, result: CMJValidationResult
|
|
224
224
|
) -> None:
|
|
225
225
|
"""Validate concentric duration (contact time)."""
|
|
226
226
|
duration_raw = self._get_metric_value(
|
|
@@ -256,9 +256,7 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
256
256
|
value=duration,
|
|
257
257
|
)
|
|
258
258
|
|
|
259
|
-
def _check_eccentric_duration(
|
|
260
|
-
self, metrics: MetricsDict, result: CMJValidationResult, profile: AthleteProfile
|
|
261
|
-
) -> None:
|
|
259
|
+
def _check_eccentric_duration(self, metrics: MetricsDict, result: CMJValidationResult) -> None:
|
|
262
260
|
"""Validate eccentric duration."""
|
|
263
261
|
duration_raw = self._get_metric_value(
|
|
264
262
|
metrics, "eccentric_duration_ms", "eccentric_duration"
|
kinemotion/drop_jump/__init__.py
CHANGED
|
@@ -3,11 +3,8 @@
|
|
|
3
3
|
from ..core.smoothing import interpolate_threshold_crossing
|
|
4
4
|
from .analysis import (
|
|
5
5
|
ContactState,
|
|
6
|
-
calculate_adaptive_threshold,
|
|
7
6
|
compute_average_foot_position,
|
|
8
7
|
detect_ground_contact,
|
|
9
|
-
find_interpolated_phase_transitions_with_curvature,
|
|
10
|
-
refine_transition_with_curvature,
|
|
11
8
|
)
|
|
12
9
|
from .debug_overlay import DropJumpDebugOverlayRenderer
|
|
13
10
|
from .kinematics import DropJumpMetrics, calculate_drop_jump_metrics
|
|
@@ -17,10 +14,7 @@ __all__ = [
|
|
|
17
14
|
"ContactState",
|
|
18
15
|
"detect_ground_contact",
|
|
19
16
|
"compute_average_foot_position",
|
|
20
|
-
"calculate_adaptive_threshold",
|
|
21
17
|
"interpolate_threshold_crossing",
|
|
22
|
-
"refine_transition_with_curvature",
|
|
23
|
-
"find_interpolated_phase_transitions_with_curvature",
|
|
24
18
|
# Metrics
|
|
25
19
|
"DropJumpMetrics",
|
|
26
20
|
"calculate_drop_jump_metrics",
|
kinemotion/drop_jump/analysis.py
CHANGED
|
@@ -27,7 +27,7 @@ class ContactState(Enum):
|
|
|
27
27
|
remove_in="1.0.0",
|
|
28
28
|
since="0.34.0",
|
|
29
29
|
)
|
|
30
|
-
def
|
|
30
|
+
def _calculate_adaptive_threshold(
|
|
31
31
|
positions: FloatArray,
|
|
32
32
|
fps: float,
|
|
33
33
|
baseline_duration: float = 3.0,
|
|
@@ -188,7 +188,7 @@ def _find_drop_from_baseline(
|
|
|
188
188
|
return 0
|
|
189
189
|
|
|
190
190
|
|
|
191
|
-
def
|
|
191
|
+
def _detect_drop_start(
|
|
192
192
|
positions: FloatArray,
|
|
193
193
|
fps: float,
|
|
194
194
|
min_stationary_duration: float = 1.0,
|
|
@@ -580,7 +580,7 @@ def _interpolate_phase_end(
|
|
|
580
580
|
)
|
|
581
581
|
|
|
582
582
|
|
|
583
|
-
def
|
|
583
|
+
def _find_interpolated_phase_transitions(
|
|
584
584
|
foot_positions: FloatArray,
|
|
585
585
|
contact_states: list[ContactState],
|
|
586
586
|
velocity_threshold: float,
|
|
@@ -622,7 +622,7 @@ def find_interpolated_phase_transitions(
|
|
|
622
622
|
return interpolated_phases
|
|
623
623
|
|
|
624
624
|
|
|
625
|
-
def
|
|
625
|
+
def _refine_transition_with_curvature(
|
|
626
626
|
foot_positions: FloatArray,
|
|
627
627
|
estimated_frame: float,
|
|
628
628
|
transition_type: str,
|
|
@@ -720,7 +720,7 @@ def _refine_phase_boundaries(
|
|
|
720
720
|
Returns:
|
|
721
721
|
Tuple of (refined_start, refined_end) fractional frame indices
|
|
722
722
|
"""
|
|
723
|
-
refined_start =
|
|
723
|
+
refined_start = _refine_transition_with_curvature(
|
|
724
724
|
foot_positions,
|
|
725
725
|
start_frac,
|
|
726
726
|
start_type,
|
|
@@ -728,7 +728,7 @@ def _refine_phase_boundaries(
|
|
|
728
728
|
smoothing_window=smoothing_window,
|
|
729
729
|
polyorder=polyorder,
|
|
730
730
|
)
|
|
731
|
-
refined_end =
|
|
731
|
+
refined_end = _refine_transition_with_curvature(
|
|
732
732
|
foot_positions,
|
|
733
733
|
end_frac,
|
|
734
734
|
end_type,
|
|
@@ -739,7 +739,7 @@ def _refine_phase_boundaries(
|
|
|
739
739
|
return refined_start, refined_end
|
|
740
740
|
|
|
741
741
|
|
|
742
|
-
def
|
|
742
|
+
def _find_interpolated_phase_transitions_with_curvature(
|
|
743
743
|
foot_positions: FloatArray,
|
|
744
744
|
contact_states: list[ContactState],
|
|
745
745
|
velocity_threshold: float,
|
|
@@ -767,7 +767,7 @@ def find_interpolated_phase_transitions_with_curvature(
|
|
|
767
767
|
List of (start_frame, end_frame, state) tuples with fractional frame indices
|
|
768
768
|
"""
|
|
769
769
|
# Get interpolated phases using velocity
|
|
770
|
-
interpolated_phases =
|
|
770
|
+
interpolated_phases = _find_interpolated_phase_transitions(
|
|
771
771
|
foot_positions, contact_states, velocity_threshold, smoothing_window
|
|
772
772
|
)
|
|
773
773
|
|
|
@@ -808,7 +808,7 @@ def find_interpolated_phase_transitions_with_curvature(
|
|
|
808
808
|
return refined_phases
|
|
809
809
|
|
|
810
810
|
|
|
811
|
-
def
|
|
811
|
+
def _find_landing_from_acceleration(
|
|
812
812
|
positions: FloatArray,
|
|
813
813
|
accelerations: FloatArray,
|
|
814
814
|
takeoff_frame: int,
|
|
@@ -954,7 +954,7 @@ def _calculate_average_visibility(
|
|
|
954
954
|
reason="Alternative implementation not called by pipeline",
|
|
955
955
|
since="0.34.0",
|
|
956
956
|
)
|
|
957
|
-
def
|
|
957
|
+
def _extract_foot_positions_and_visibilities(
|
|
958
958
|
smoothed_landmarks: list[dict[str, tuple[float, float, float]] | None],
|
|
959
959
|
) -> tuple[np.ndarray, np.ndarray]:
|
|
960
960
|
"""
|
|
@@ -10,10 +10,10 @@ from ..core.smoothing import compute_acceleration_from_derivative
|
|
|
10
10
|
from ..core.timing import NULL_TIMER, Timer
|
|
11
11
|
from .analysis import (
|
|
12
12
|
ContactState,
|
|
13
|
-
|
|
13
|
+
_detect_drop_start, # pyright: ignore[reportPrivateUsage]
|
|
14
|
+
_find_interpolated_phase_transitions_with_curvature, # pyright: ignore[reportPrivateUsage]
|
|
15
|
+
_find_landing_from_acceleration, # pyright: ignore[reportPrivateUsage]
|
|
14
16
|
find_contact_phases,
|
|
15
|
-
find_interpolated_phase_transitions_with_curvature,
|
|
16
|
-
find_landing_from_acceleration,
|
|
17
17
|
)
|
|
18
18
|
|
|
19
19
|
if TYPE_CHECKING:
|
|
@@ -159,7 +159,7 @@ def _determine_drop_start_frame(
|
|
|
159
159
|
"""
|
|
160
160
|
if drop_start_frame is None:
|
|
161
161
|
# Auto-detect where drop jump actually starts (skip initial stationary period)
|
|
162
|
-
return
|
|
162
|
+
return _detect_drop_start(
|
|
163
163
|
foot_y_positions,
|
|
164
164
|
fps,
|
|
165
165
|
min_stationary_duration=0.5,
|
|
@@ -445,7 +445,7 @@ def _analyze_flight_phase(
|
|
|
445
445
|
accelerations = compute_acceleration_from_derivative(
|
|
446
446
|
foot_y_positions, window_length=smoothing_window, polyorder=polyorder
|
|
447
447
|
)
|
|
448
|
-
flight_end =
|
|
448
|
+
flight_end = _find_landing_from_acceleration(
|
|
449
449
|
foot_y_positions, accelerations, flight_start, fps
|
|
450
450
|
)
|
|
451
451
|
|
|
@@ -559,7 +559,7 @@ def calculate_drop_jump_metrics(
|
|
|
559
559
|
# Find contact phases
|
|
560
560
|
with timer.measure("dj_find_phases"):
|
|
561
561
|
phases = find_contact_phases(contact_states)
|
|
562
|
-
interpolated_phases =
|
|
562
|
+
interpolated_phases = _find_interpolated_phase_transitions_with_curvature(
|
|
563
563
|
foot_y_positions,
|
|
564
564
|
contact_states,
|
|
565
565
|
velocity_threshold,
|
|
@@ -21,7 +21,6 @@ class SJPhase(Enum):
|
|
|
21
21
|
def detect_sj_phases(
|
|
22
22
|
positions: FloatArray,
|
|
23
23
|
fps: float,
|
|
24
|
-
squat_hold_threshold: float = 0.02,
|
|
25
24
|
velocity_threshold: float = 0.1,
|
|
26
25
|
window_length: int = 5,
|
|
27
26
|
polyorder: int = 2,
|
|
@@ -38,7 +37,6 @@ def detect_sj_phases(
|
|
|
38
37
|
Args:
|
|
39
38
|
positions: 1D array of vertical positions (normalized coordinates)
|
|
40
39
|
fps: Video frames per second
|
|
41
|
-
squat_hold_threshold: Threshold for detecting squat hold phase (m)
|
|
42
40
|
velocity_threshold: Threshold for detecting flight phase (m/s)
|
|
43
41
|
window_length: Window size for velocity smoothing
|
|
44
42
|
polyorder: Polynomial order for smoothing
|
|
@@ -55,7 +53,7 @@ def detect_sj_phases(
|
|
|
55
53
|
window_length += 1
|
|
56
54
|
|
|
57
55
|
# Step 1: Detect squat hold start
|
|
58
|
-
squat_hold_start =
|
|
56
|
+
squat_hold_start = _detect_squat_start(
|
|
59
57
|
positions,
|
|
60
58
|
fps,
|
|
61
59
|
velocity_threshold=velocity_threshold,
|
|
@@ -77,7 +75,7 @@ def detect_sj_phases(
|
|
|
77
75
|
)
|
|
78
76
|
|
|
79
77
|
# Step 3: Detect takeoff (this marks the start of concentric phase)
|
|
80
|
-
takeoff_frame =
|
|
78
|
+
takeoff_frame = _detect_takeoff(
|
|
81
79
|
positions, velocities, fps, velocity_threshold=velocity_threshold
|
|
82
80
|
)
|
|
83
81
|
|
|
@@ -94,7 +92,7 @@ def detect_sj_phases(
|
|
|
94
92
|
break
|
|
95
93
|
|
|
96
94
|
# Step 4: Detect landing
|
|
97
|
-
landing_frame =
|
|
95
|
+
landing_frame = _detect_landing(
|
|
98
96
|
positions,
|
|
99
97
|
velocities,
|
|
100
98
|
fps,
|
|
@@ -128,7 +126,7 @@ def detect_sj_phases(
|
|
|
128
126
|
return (squat_hold_start, concentric_start, takeoff_frame, landing_frame)
|
|
129
127
|
|
|
130
128
|
|
|
131
|
-
def
|
|
129
|
+
def _detect_squat_start(
|
|
132
130
|
positions: FloatArray,
|
|
133
131
|
fps: float,
|
|
134
132
|
velocity_threshold: float = 0.1,
|
|
@@ -183,7 +181,27 @@ def detect_squat_start(
|
|
|
183
181
|
return None
|
|
184
182
|
|
|
185
183
|
|
|
186
|
-
def
|
|
184
|
+
def _find_takeoff_threshold_crossing(
|
|
185
|
+
velocities: FloatArray,
|
|
186
|
+
search_start: int,
|
|
187
|
+
search_end: int,
|
|
188
|
+
velocity_threshold: float,
|
|
189
|
+
min_duration_frames: int,
|
|
190
|
+
) -> int | None:
|
|
191
|
+
"""Find first frame where velocity exceeds threshold for minimum duration."""
|
|
192
|
+
above_threshold = velocities[search_start:search_end] <= -velocity_threshold
|
|
193
|
+
if not np.any(above_threshold):
|
|
194
|
+
return None
|
|
195
|
+
|
|
196
|
+
threshold_indices = np.nonzero(above_threshold)[0]
|
|
197
|
+
for idx in threshold_indices:
|
|
198
|
+
if idx + min_duration_frames < len(above_threshold):
|
|
199
|
+
if np.all(above_threshold[idx : idx + min_duration_frames]):
|
|
200
|
+
return search_start + idx
|
|
201
|
+
return None
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def _detect_takeoff(
|
|
187
205
|
positions: FloatArray,
|
|
188
206
|
velocities: FloatArray,
|
|
189
207
|
fps: float,
|
|
@@ -212,7 +230,7 @@ def detect_takeoff(
|
|
|
212
230
|
return None
|
|
213
231
|
|
|
214
232
|
# Find squat start to determine where to begin search
|
|
215
|
-
squat_start =
|
|
233
|
+
squat_start = _detect_squat_start(positions, fps)
|
|
216
234
|
if squat_start is None:
|
|
217
235
|
# If no squat start detected, start from reasonable middle point
|
|
218
236
|
squat_start = len(positions) // 3
|
|
@@ -234,20 +252,44 @@ def detect_takeoff(
|
|
|
234
252
|
# Verify velocity exceeds threshold
|
|
235
253
|
if velocities[takeoff_frame] > -velocity_threshold:
|
|
236
254
|
# Velocity not high enough - actual takeoff may be later
|
|
237
|
-
# Look for frames where velocity exceeds threshold
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
threshold_indices = np.where(above_threshold)[0]
|
|
242
|
-
for idx in threshold_indices:
|
|
243
|
-
if idx + min_duration_frames < len(above_threshold):
|
|
244
|
-
if np.all(above_threshold[idx : idx + min_duration_frames]):
|
|
245
|
-
return search_start + idx
|
|
255
|
+
# Look for frames where velocity exceeds threshold with duration filter
|
|
256
|
+
return _find_takeoff_threshold_crossing(
|
|
257
|
+
velocities, search_start, search_end, velocity_threshold, min_duration_frames
|
|
258
|
+
)
|
|
246
259
|
|
|
247
260
|
return takeoff_frame if velocities[takeoff_frame] <= -velocity_threshold else None
|
|
248
261
|
|
|
249
262
|
|
|
250
|
-
def
|
|
263
|
+
def _detect_impact_landing(
|
|
264
|
+
accelerations: FloatArray,
|
|
265
|
+
search_start: int,
|
|
266
|
+
search_end: int,
|
|
267
|
+
) -> int | None:
|
|
268
|
+
"""Detect landing by finding the maximum acceleration spike."""
|
|
269
|
+
landing_accelerations = accelerations[search_start:search_end]
|
|
270
|
+
if len(landing_accelerations) == 0:
|
|
271
|
+
return None
|
|
272
|
+
|
|
273
|
+
# Find maximum acceleration spike (impact)
|
|
274
|
+
landing_idx = int(np.argmax(landing_accelerations))
|
|
275
|
+
return search_start + landing_idx
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def _refine_landing_by_velocity(
|
|
279
|
+
velocities: FloatArray,
|
|
280
|
+
landing_frame: int,
|
|
281
|
+
) -> int:
|
|
282
|
+
"""Refine landing frame by looking for positive (downward) velocity."""
|
|
283
|
+
if landing_frame < len(velocities) and velocities[landing_frame] < 0:
|
|
284
|
+
# Velocity still upward - landing might not be detected yet
|
|
285
|
+
# Look ahead for where velocity becomes positive
|
|
286
|
+
for i in range(landing_frame, min(landing_frame + 10, len(velocities))):
|
|
287
|
+
if velocities[i] >= 0:
|
|
288
|
+
return i
|
|
289
|
+
return landing_frame
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
def _detect_landing(
|
|
251
293
|
positions: FloatArray,
|
|
252
294
|
velocities: FloatArray,
|
|
253
295
|
fps: float,
|
|
@@ -270,6 +312,8 @@ def detect_landing(
|
|
|
270
312
|
velocity_threshold: Maximum velocity threshold for landing detection
|
|
271
313
|
min_flight_frames: Minimum frames in flight before landing
|
|
272
314
|
landing_search_window_s: Time window to search for landing after peak (seconds)
|
|
315
|
+
window_length: Window size for velocity smoothing
|
|
316
|
+
polyorder: Polynomial order for smoothing
|
|
273
317
|
|
|
274
318
|
Returns:
|
|
275
319
|
Frame index where landing occurs, or None if not detected
|
|
@@ -278,7 +322,7 @@ def detect_landing(
|
|
|
278
322
|
return None
|
|
279
323
|
|
|
280
324
|
# Find takeoff first (needed to determine where to start peak search)
|
|
281
|
-
takeoff_frame =
|
|
325
|
+
takeoff_frame = _detect_takeoff(
|
|
282
326
|
positions, velocities, fps, velocity_threshold, 5, landing_search_window_s
|
|
283
327
|
)
|
|
284
328
|
if takeoff_frame is None:
|
|
@@ -294,8 +338,7 @@ def detect_landing(
|
|
|
294
338
|
|
|
295
339
|
# Find peak height (minimum position value in normalized coords = highest point)
|
|
296
340
|
flight_positions = positions[search_start:search_end]
|
|
297
|
-
|
|
298
|
-
peak_frame = search_start + peak_idx
|
|
341
|
+
peak_frame = search_start + int(np.argmin(flight_positions))
|
|
299
342
|
|
|
300
343
|
# After peak, look for landing using impact detection
|
|
301
344
|
landing_search_start = peak_frame + min_flight_frames
|
|
@@ -312,8 +355,12 @@ def detect_landing(
|
|
|
312
355
|
landing_window = window_length
|
|
313
356
|
if landing_window % 2 == 0:
|
|
314
357
|
landing_window += 1
|
|
358
|
+
# Use polyorder for smoothing (must be at least 2 for deriv=2)
|
|
359
|
+
eff_polyorder = max(2, polyorder)
|
|
315
360
|
accelerations = np.abs(
|
|
316
|
-
savgol_filter(
|
|
361
|
+
savgol_filter(
|
|
362
|
+
positions, landing_window, eff_polyorder, deriv=2, delta=1.0, mode="interp"
|
|
363
|
+
)
|
|
317
364
|
)
|
|
318
365
|
else:
|
|
319
366
|
# Fallback for short sequences
|
|
@@ -321,22 +368,10 @@ def detect_landing(
|
|
|
321
368
|
accelerations = np.abs(np.diff(velocities_abs, prepend=velocities_abs[0]))
|
|
322
369
|
|
|
323
370
|
# Find impact: maximum positive acceleration (deceleration spike)
|
|
324
|
-
|
|
371
|
+
landing_frame = _detect_impact_landing(accelerations, landing_search_start, landing_search_end)
|
|
325
372
|
|
|
326
|
-
if
|
|
373
|
+
if landing_frame is None:
|
|
327
374
|
return None
|
|
328
375
|
|
|
329
|
-
# Find maximum acceleration spike (impact)
|
|
330
|
-
landing_idx = int(np.argmax(landing_accelerations))
|
|
331
|
-
landing_frame = landing_search_start + landing_idx
|
|
332
|
-
|
|
333
376
|
# Additional verification: velocity should be positive (downward) at landing
|
|
334
|
-
|
|
335
|
-
# Velocity still upward - landing might not be detected yet
|
|
336
|
-
# Look ahead for where velocity becomes positive
|
|
337
|
-
for i in range(landing_frame, min(landing_frame + 10, len(velocities))):
|
|
338
|
-
if velocities[i] >= 0:
|
|
339
|
-
landing_frame = i
|
|
340
|
-
break
|
|
341
|
-
|
|
342
|
-
return landing_frame
|
|
377
|
+
return _refine_landing_by_velocity(velocities, landing_frame)
|
|
@@ -1,83 +1,43 @@
|
|
|
1
1
|
"""Debug overlay visualization for Squat Jump analysis."""
|
|
2
2
|
|
|
3
|
-
from typing import Any
|
|
4
|
-
|
|
5
3
|
import cv2
|
|
6
4
|
import numpy as np
|
|
7
5
|
|
|
8
|
-
|
|
9
|
-
|
|
6
|
+
from ..core.debug_overlay_utils import BaseDebugOverlayRenderer
|
|
7
|
+
from ..core.overlay_constants import (
|
|
8
|
+
CYAN,
|
|
9
|
+
GREEN,
|
|
10
|
+
PHASE_LABEL_LINE_HEIGHT,
|
|
11
|
+
PHASE_LABEL_START_Y,
|
|
12
|
+
RED,
|
|
13
|
+
WHITE,
|
|
14
|
+
Color,
|
|
15
|
+
LandmarkDict,
|
|
16
|
+
)
|
|
17
|
+
from .analysis import SJPhase
|
|
18
|
+
from .kinematics import SJMetrics
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class SquatJumpDebugOverlayRenderer(BaseDebugOverlayRenderer):
|
|
10
22
|
"""Debug overlay renderer for Squat Jump analysis results."""
|
|
11
23
|
|
|
12
|
-
def
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
"""Initialize debug overlay renderer.
|
|
23
|
-
|
|
24
|
-
Args:
|
|
25
|
-
output_path: Path to output video file
|
|
26
|
-
input_width: Width of input frames
|
|
27
|
-
input_height: Height of input frames
|
|
28
|
-
output_width: Width of output video
|
|
29
|
-
output_height: Height of output video
|
|
30
|
-
fps: Frames per second for output video
|
|
31
|
-
timer: Optional timer for performance profiling
|
|
32
|
-
"""
|
|
33
|
-
self.output_path = output_path
|
|
34
|
-
self.input_width = input_width
|
|
35
|
-
self.input_height = input_height
|
|
36
|
-
self.output_width = output_width
|
|
37
|
-
self.output_height = output_height
|
|
38
|
-
self.fps = fps
|
|
39
|
-
self.timer = timer
|
|
40
|
-
|
|
41
|
-
self.writer = None
|
|
42
|
-
self.frame_count = 0
|
|
43
|
-
|
|
44
|
-
def __enter__(self):
|
|
45
|
-
"""Enter context manager and initialize video writer."""
|
|
46
|
-
fourcc = cv2.VideoWriter_fourcc(*"mp4v") # type: ignore[attr-defined]
|
|
47
|
-
self.writer = cv2.VideoWriter(
|
|
48
|
-
self.output_path,
|
|
49
|
-
fourcc,
|
|
50
|
-
self.fps,
|
|
51
|
-
(self.output_width, self.output_height),
|
|
52
|
-
)
|
|
53
|
-
return self
|
|
54
|
-
|
|
55
|
-
def __exit__(
|
|
56
|
-
self,
|
|
57
|
-
exc_type: type[BaseException] | None,
|
|
58
|
-
exc_val: BaseException | None,
|
|
59
|
-
exc_tb: Any,
|
|
60
|
-
) -> None:
|
|
61
|
-
"""Exit context manager and release video writer."""
|
|
62
|
-
if self.writer:
|
|
63
|
-
self.writer.release()
|
|
64
|
-
|
|
65
|
-
def write_frame(self, frame: np.ndarray) -> None:
|
|
66
|
-
"""Write a frame to the output video.
|
|
67
|
-
|
|
68
|
-
Args:
|
|
69
|
-
frame: Annotated frame to write
|
|
70
|
-
"""
|
|
71
|
-
if self.writer:
|
|
72
|
-
self.writer.write(frame)
|
|
73
|
-
self.frame_count += 1
|
|
24
|
+
def _get_phase_color(self, phase: SJPhase) -> Color:
|
|
25
|
+
"""Get color based on jump phase."""
|
|
26
|
+
phase_colors = {
|
|
27
|
+
SJPhase.SQUAT_HOLD: (255, 255, 0), # Yellow
|
|
28
|
+
SJPhase.CONCENTRIC: (0, 165, 255), # Orange
|
|
29
|
+
SJPhase.FLIGHT: RED,
|
|
30
|
+
SJPhase.LANDING: GREEN,
|
|
31
|
+
SJPhase.UNKNOWN: WHITE,
|
|
32
|
+
}
|
|
33
|
+
return phase_colors.get(phase, WHITE)
|
|
74
34
|
|
|
75
35
|
def render_frame(
|
|
76
36
|
self,
|
|
77
37
|
frame: np.ndarray,
|
|
78
|
-
landmarks:
|
|
38
|
+
landmarks: LandmarkDict | None,
|
|
79
39
|
frame_index: int,
|
|
80
|
-
metrics:
|
|
40
|
+
metrics: SJMetrics | None = None,
|
|
81
41
|
) -> np.ndarray:
|
|
82
42
|
"""Render debug overlay on a single frame.
|
|
83
43
|
|
|
@@ -93,36 +53,53 @@ class SquatJumpDebugOverlayRenderer:
|
|
|
93
53
|
# Create a copy to avoid modifying the original
|
|
94
54
|
annotated_frame = frame.copy()
|
|
95
55
|
|
|
96
|
-
#
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
56
|
+
# Determine current phase
|
|
57
|
+
current_phase = SJPhase.UNKNOWN
|
|
58
|
+
if metrics:
|
|
59
|
+
if (
|
|
60
|
+
metrics.squat_hold_start_frame is not None
|
|
61
|
+
and metrics.concentric_start_frame is not None
|
|
62
|
+
and metrics.squat_hold_start_frame <= frame_index < metrics.concentric_start_frame
|
|
63
|
+
):
|
|
64
|
+
current_phase = SJPhase.SQUAT_HOLD
|
|
65
|
+
elif (
|
|
66
|
+
metrics.concentric_start_frame is not None
|
|
67
|
+
and metrics.takeoff_frame is not None
|
|
68
|
+
and metrics.concentric_start_frame <= frame_index < metrics.takeoff_frame
|
|
69
|
+
):
|
|
70
|
+
current_phase = SJPhase.CONCENTRIC
|
|
71
|
+
elif (
|
|
72
|
+
metrics.takeoff_frame is not None
|
|
73
|
+
and metrics.landing_frame is not None
|
|
74
|
+
and metrics.takeoff_frame <= frame_index < metrics.landing_frame
|
|
75
|
+
):
|
|
76
|
+
current_phase = SJPhase.FLIGHT
|
|
77
|
+
elif (
|
|
78
|
+
metrics.landing_frame is not None
|
|
79
|
+
and metrics.landing_frame <= frame_index < metrics.landing_frame + 15
|
|
80
|
+
):
|
|
81
|
+
current_phase = SJPhase.LANDING
|
|
82
|
+
|
|
83
|
+
# Draw skeleton and landmarks
|
|
111
84
|
if landmarks:
|
|
112
|
-
self.
|
|
85
|
+
self._draw_skeleton(annotated_frame, landmarks)
|
|
113
86
|
|
|
114
|
-
#
|
|
87
|
+
# Draw frame information
|
|
88
|
+
self._draw_frame_info(annotated_frame, frame_index, current_phase)
|
|
89
|
+
|
|
90
|
+
# Draw metrics if available
|
|
115
91
|
if metrics:
|
|
116
|
-
self._draw_metrics(annotated_frame, metrics)
|
|
92
|
+
self._draw_metrics(annotated_frame, metrics, frame_index)
|
|
117
93
|
|
|
118
94
|
return annotated_frame
|
|
119
95
|
|
|
120
|
-
def _draw_frame_info(self, frame: np.ndarray, frame_index: int) -> None:
|
|
96
|
+
def _draw_frame_info(self, frame: np.ndarray, frame_index: int, phase: SJPhase) -> None:
|
|
121
97
|
"""Draw frame information overlay.
|
|
122
98
|
|
|
123
99
|
Args:
|
|
124
100
|
frame: Frame to draw on
|
|
125
101
|
frame_index: Current frame index
|
|
102
|
+
phase: Current jump phase
|
|
126
103
|
"""
|
|
127
104
|
# Draw frame counter
|
|
128
105
|
cv2.putText(
|
|
@@ -131,85 +108,56 @@ class SquatJumpDebugOverlayRenderer:
|
|
|
131
108
|
(10, 30),
|
|
132
109
|
cv2.FONT_HERSHEY_SIMPLEX,
|
|
133
110
|
0.7,
|
|
134
|
-
|
|
111
|
+
WHITE,
|
|
135
112
|
2,
|
|
136
113
|
cv2.LINE_AA,
|
|
137
114
|
)
|
|
138
115
|
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
116
|
+
# Draw phase label
|
|
117
|
+
phase_color = self._get_phase_color(phase)
|
|
118
|
+
cv2.putText(
|
|
119
|
+
frame,
|
|
120
|
+
f"Phase: {phase.value.replace('_', ' ').upper()}",
|
|
121
|
+
(10, 70),
|
|
122
|
+
cv2.FONT_HERSHEY_SIMPLEX,
|
|
123
|
+
0.8,
|
|
124
|
+
phase_color,
|
|
125
|
+
2,
|
|
126
|
+
cv2.LINE_AA,
|
|
127
|
+
)
|
|
149
128
|
|
|
150
|
-
def _draw_metrics(self, frame: np.ndarray, metrics:
|
|
129
|
+
def _draw_metrics(self, frame: np.ndarray, metrics: SJMetrics, frame_index: int) -> None:
|
|
151
130
|
"""Draw metrics information on frame.
|
|
152
131
|
|
|
153
132
|
Args:
|
|
154
133
|
frame: Frame to draw on
|
|
155
134
|
metrics: Metrics object with analysis results
|
|
135
|
+
frame_index: Current frame index
|
|
156
136
|
"""
|
|
157
|
-
#
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
)
|
|
174
|
-
y_offset += 30
|
|
175
|
-
|
|
176
|
-
if hasattr(metrics, "flight_time"):
|
|
177
|
-
text = f"Flight Time: {metrics.flight_time * 1000:.1f} ms"
|
|
178
|
-
cv2.putText(
|
|
179
|
-
frame,
|
|
180
|
-
text,
|
|
181
|
-
(10, y_offset),
|
|
182
|
-
cv2.FONT_HERSHEY_SIMPLEX,
|
|
183
|
-
0.6,
|
|
184
|
-
(255, 255, 255),
|
|
185
|
-
2,
|
|
186
|
-
cv2.LINE_AA,
|
|
187
|
-
)
|
|
188
|
-
y_offset += 30
|
|
189
|
-
|
|
190
|
-
if hasattr(metrics, "squat_hold_duration"):
|
|
191
|
-
text = f"Squat Hold: {metrics.squat_hold_duration * 1000:.1f} ms"
|
|
192
|
-
cv2.putText(
|
|
193
|
-
frame,
|
|
194
|
-
text,
|
|
195
|
-
(10, y_offset),
|
|
196
|
-
cv2.FONT_HERSHEY_SIMPLEX,
|
|
197
|
-
0.6,
|
|
198
|
-
(255, 255, 255),
|
|
199
|
-
2,
|
|
200
|
-
cv2.LINE_AA,
|
|
201
|
-
)
|
|
202
|
-
y_offset += 30
|
|
203
|
-
|
|
204
|
-
if hasattr(metrics, "concentric_duration"):
|
|
205
|
-
text = f"Concentric: {metrics.concentric_duration * 1000:.1f} ms"
|
|
137
|
+
# Only show summary metrics after takeoff or at the end
|
|
138
|
+
if metrics.takeoff_frame is None or frame_index < metrics.takeoff_frame:
|
|
139
|
+
return
|
|
140
|
+
|
|
141
|
+
y_offset = PHASE_LABEL_START_Y + 100
|
|
142
|
+
|
|
143
|
+
# Display key metrics
|
|
144
|
+
metric_items: list[tuple[str, Color]] = [
|
|
145
|
+
(f"Jump Height: {metrics.jump_height:.3f} m", WHITE),
|
|
146
|
+
(f"Flight Time: {metrics.flight_time * 1000:.1f} ms", RED),
|
|
147
|
+
(f"Concentric: {metrics.concentric_duration * 1000:.1f} ms", CYAN),
|
|
148
|
+
]
|
|
149
|
+
if metrics.peak_power is not None:
|
|
150
|
+
metric_items.append((f"Peak Power: {metrics.peak_power:.0f} W", GREEN))
|
|
151
|
+
|
|
152
|
+
for text, color in metric_items:
|
|
206
153
|
cv2.putText(
|
|
207
154
|
frame,
|
|
208
155
|
text,
|
|
209
156
|
(10, y_offset),
|
|
210
157
|
cv2.FONT_HERSHEY_SIMPLEX,
|
|
211
158
|
0.6,
|
|
212
|
-
|
|
159
|
+
color,
|
|
213
160
|
2,
|
|
214
161
|
cv2.LINE_AA,
|
|
215
162
|
)
|
|
163
|
+
y_offset += PHASE_LABEL_LINE_HEIGHT
|
|
@@ -165,13 +165,11 @@ def calculate_sj_metrics(
|
|
|
165
165
|
peak_concentric_velocity = 0.0
|
|
166
166
|
|
|
167
167
|
# Calculate power and force if mass is provided
|
|
168
|
-
peak_power =
|
|
168
|
+
peak_power = _calculate_peak_power(velocities, concentric_start, takeoff_frame, mass_kg)
|
|
169
|
+
mean_power = _calculate_mean_power(
|
|
169
170
|
positions, velocities, concentric_start, takeoff_frame, fps, mass_kg
|
|
170
171
|
)
|
|
171
|
-
|
|
172
|
-
positions, velocities, concentric_start, takeoff_frame, fps, mass_kg
|
|
173
|
-
)
|
|
174
|
-
peak_force = calculate_peak_force(
|
|
172
|
+
peak_force = _calculate_peak_force(
|
|
175
173
|
positions, velocities, concentric_start, takeoff_frame, fps, mass_kg
|
|
176
174
|
)
|
|
177
175
|
|
|
@@ -193,12 +191,10 @@ def calculate_sj_metrics(
|
|
|
193
191
|
)
|
|
194
192
|
|
|
195
193
|
|
|
196
|
-
def
|
|
197
|
-
positions: FloatArray,
|
|
194
|
+
def _calculate_peak_power(
|
|
198
195
|
velocities: FloatArray,
|
|
199
196
|
concentric_start: int,
|
|
200
197
|
takeoff_frame: int,
|
|
201
|
-
fps: float,
|
|
202
198
|
mass_kg: float | None,
|
|
203
199
|
) -> float | None:
|
|
204
200
|
"""Calculate peak power using Sayers et al. (1999) regression equation.
|
|
@@ -212,11 +208,9 @@ def calculate_peak_power(
|
|
|
212
208
|
- Superior to Lewis formula (73% error) and Harman equation
|
|
213
209
|
|
|
214
210
|
Args:
|
|
215
|
-
positions: 1D array of vertical positions (not used in regression)
|
|
216
211
|
velocities: 1D array of vertical velocities
|
|
217
212
|
concentric_start: Frame index where concentric phase begins
|
|
218
213
|
takeoff_frame: Frame index where takeoff occurs
|
|
219
|
-
fps: Video frames per second
|
|
220
214
|
mass_kg: Athlete mass in kilograms
|
|
221
215
|
|
|
222
216
|
Returns:
|
|
@@ -246,7 +240,7 @@ def calculate_peak_power(
|
|
|
246
240
|
return float(peak_power)
|
|
247
241
|
|
|
248
242
|
|
|
249
|
-
def
|
|
243
|
+
def _calculate_mean_power(
|
|
250
244
|
positions: FloatArray,
|
|
251
245
|
velocities: FloatArray,
|
|
252
246
|
concentric_start: int,
|
|
@@ -297,7 +291,7 @@ def calculate_mean_power(
|
|
|
297
291
|
return float(mean_power)
|
|
298
292
|
|
|
299
293
|
|
|
300
|
-
def
|
|
294
|
+
def _calculate_peak_force(
|
|
301
295
|
positions: FloatArray,
|
|
302
296
|
velocities: FloatArray,
|
|
303
297
|
concentric_start: int,
|
|
@@ -114,11 +114,11 @@ class SJMetricsValidator(MetricsValidator):
|
|
|
114
114
|
# PRIMARY BOUNDS CHECKS
|
|
115
115
|
self._check_flight_time(data, result, profile)
|
|
116
116
|
self._check_jump_height(data, result, profile)
|
|
117
|
-
self._check_squat_hold_duration(data, result
|
|
118
|
-
self._check_concentric_duration(data, result
|
|
117
|
+
self._check_squat_hold_duration(data, result)
|
|
118
|
+
self._check_concentric_duration(data, result)
|
|
119
119
|
self._check_peak_concentric_velocity(data, result, profile)
|
|
120
|
-
self._check_power_metrics(data, result
|
|
121
|
-
self._check_force_metrics(data, result
|
|
120
|
+
self._check_power_metrics(data, result)
|
|
121
|
+
self._check_force_metrics(data, result)
|
|
122
122
|
|
|
123
123
|
# CROSS-VALIDATION CHECKS
|
|
124
124
|
self._check_flight_time_height_consistency(data, result)
|
|
@@ -183,9 +183,7 @@ class SJMetricsValidator(MetricsValidator):
|
|
|
183
183
|
format_str="{value:.3f}m",
|
|
184
184
|
)
|
|
185
185
|
|
|
186
|
-
def _check_squat_hold_duration(
|
|
187
|
-
self, metrics: MetricsDict, result: SJValidationResult, profile: AthleteProfile
|
|
188
|
-
) -> None:
|
|
186
|
+
def _check_squat_hold_duration(self, metrics: MetricsDict, result: SJValidationResult) -> None:
|
|
189
187
|
"""Validate squat hold duration."""
|
|
190
188
|
duration_raw = self._get_metric_value(
|
|
191
189
|
metrics, "squat_hold_duration_ms", "squat_hold_duration"
|
|
@@ -210,9 +208,7 @@ class SJMetricsValidator(MetricsValidator):
|
|
|
210
208
|
value=duration,
|
|
211
209
|
)
|
|
212
210
|
|
|
213
|
-
def _check_concentric_duration(
|
|
214
|
-
self, metrics: MetricsDict, result: SJValidationResult, profile: AthleteProfile
|
|
215
|
-
) -> None:
|
|
211
|
+
def _check_concentric_duration(self, metrics: MetricsDict, result: SJValidationResult) -> None:
|
|
216
212
|
"""Validate concentric duration."""
|
|
217
213
|
duration_raw = self._get_metric_value(
|
|
218
214
|
metrics, "concentric_duration_ms", "concentric_duration"
|
|
@@ -268,9 +264,7 @@ class SJMetricsValidator(MetricsValidator):
|
|
|
268
264
|
format_str="{value:.3f} m/s",
|
|
269
265
|
)
|
|
270
266
|
|
|
271
|
-
def _check_power_metrics(
|
|
272
|
-
self, metrics: MetricsDict, result: SJValidationResult, profile: AthleteProfile
|
|
273
|
-
) -> None:
|
|
267
|
+
def _check_power_metrics(self, metrics: MetricsDict, result: SJValidationResult) -> None:
|
|
274
268
|
"""Validate power metrics (peak and mean)."""
|
|
275
269
|
power_checks = [
|
|
276
270
|
("peak_power", "peak_power_w", SJBounds.PEAK_POWER, ""),
|
|
@@ -305,9 +299,7 @@ class SJMetricsValidator(MetricsValidator):
|
|
|
305
299
|
value=power,
|
|
306
300
|
)
|
|
307
301
|
|
|
308
|
-
def _check_force_metrics(
|
|
309
|
-
self, metrics: MetricsDict, result: SJValidationResult, profile: AthleteProfile
|
|
310
|
-
) -> None:
|
|
302
|
+
def _check_force_metrics(self, metrics: MetricsDict, result: SJValidationResult) -> None:
|
|
311
303
|
"""Validate force metrics."""
|
|
312
304
|
peak_force = self._get_metric_value(metrics, "peak_force_n", "peak_force")
|
|
313
305
|
if peak_force is None:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: kinemotion
|
|
3
|
-
Version: 0.76.
|
|
3
|
+
Version: 0.76.2
|
|
4
4
|
Summary: Video-based kinematic analysis for athletic performance
|
|
5
5
|
Project-URL: Homepage, https://github.com/feniix/kinemotion
|
|
6
6
|
Project-URL: Repository, https://github.com/feniix/kinemotion
|
|
@@ -625,9 +625,9 @@ The debug video includes:
|
|
|
625
625
|
**Solutions**:
|
|
626
626
|
|
|
627
627
|
1. **Check video quality**: Ensure the athlete is clearly visible in profile view
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
628
|
+
2. **Increase smoothing**: Use `--smoothing-window 7` or higher
|
|
629
|
+
3. **Adjust detection confidence**: Try `--detection-confidence 0.6` or `--tracking-confidence 0.6`
|
|
630
|
+
4. **Generate debug video**: Use `--output` to visualize what's being tracked
|
|
631
631
|
|
|
632
632
|
### No Pose Detected
|
|
633
633
|
|
|
@@ -636,9 +636,9 @@ The debug video includes:
|
|
|
636
636
|
**Solutions**:
|
|
637
637
|
|
|
638
638
|
1. **Verify video format**: OpenCV must be able to read the video
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
639
|
+
2. **Check framing**: Ensure full body is visible in side view
|
|
640
|
+
3. **Lower confidence thresholds**: Try `--detection-confidence 0.3 --tracking-confidence 0.3`
|
|
641
|
+
4. **Test video playback**: Verify video opens correctly with standard video players
|
|
642
642
|
|
|
643
643
|
### Incorrect Contact Detection
|
|
644
644
|
|
|
@@ -647,11 +647,11 @@ The debug video includes:
|
|
|
647
647
|
**Solutions**:
|
|
648
648
|
|
|
649
649
|
1. **Generate debug video**: Visualize contact states to diagnose the issue
|
|
650
|
-
|
|
650
|
+
2. **Adjust velocity threshold**:
|
|
651
651
|
- If missing contacts: decrease to `--velocity-threshold 0.01`
|
|
652
652
|
- If false contacts: increase to `--velocity-threshold 0.03`
|
|
653
|
-
|
|
654
|
-
|
|
653
|
+
3. **Adjust minimum frames**: `--min-contact-frames 5` for longer required contact
|
|
654
|
+
4. **Check visibility**: Lower `--visibility-threshold 0.3` if feet are partially obscured
|
|
655
655
|
|
|
656
656
|
### Jump Height Seems Wrong
|
|
657
657
|
|
|
@@ -660,9 +660,9 @@ The debug video includes:
|
|
|
660
660
|
**Solutions**:
|
|
661
661
|
|
|
662
662
|
1. **Check video quality**: Ensure video frame rate is adequate (30fps or higher recommended)
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
663
|
+
2. **Verify flight time detection**: Check `flight_start_frame` and `flight_end_frame` in JSON
|
|
664
|
+
3. **Compare measurements**: JSON output includes both `jump_height_m` (primary) and `jump_height_kinematic_m` (kinematic-only)
|
|
665
|
+
4. **Check for drop jump detection**: If doing a drop jump, ensure first phase is elevated enough (>5% of frame height)
|
|
666
666
|
|
|
667
667
|
### Video Codec Issues
|
|
668
668
|
|
|
@@ -671,30 +671,30 @@ The debug video includes:
|
|
|
671
671
|
**Solutions**:
|
|
672
672
|
|
|
673
673
|
1. **Install additional codecs**: Ensure OpenCV has proper video codec support
|
|
674
|
-
|
|
675
|
-
|
|
674
|
+
2. **Try different output format**: Use `.avi` extension instead of `.mp4`
|
|
675
|
+
3. **Check output path**: Ensure write permissions for output directory
|
|
676
676
|
|
|
677
677
|
## How It Works
|
|
678
678
|
|
|
679
679
|
1. **Pose Tracking**: MediaPipe extracts 2D pose landmarks (foot points: ankles, heels, foot indices) from each frame
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
680
|
+
2. **Position Calculation**: Averages ankle, heel, and foot index positions to determine foot location
|
|
681
|
+
3. **Smoothing**: Savitzky-Golay filter reduces tracking jitter while preserving motion dynamics
|
|
682
|
+
4. **Contact Detection**: Analyzes vertical position velocity to identify ground contact vs. flight phases
|
|
683
|
+
5. **Phase Identification**: Finds continuous ground contact and flight periods
|
|
684
684
|
- Automatically detects drop jumps vs regular jumps
|
|
685
685
|
- For drop jumps: identifies box → drop → ground contact → jump sequence
|
|
686
|
-
|
|
686
|
+
6. **Sub-Frame Interpolation**: Estimates exact transition times between frames
|
|
687
687
|
- Uses Savitzky-Golay derivative for smooth velocity calculation
|
|
688
688
|
- Linear interpolation of velocity to find threshold crossings
|
|
689
689
|
- Achieves sub-millisecond timing precision (at 30fps: ±10ms vs ±33ms)
|
|
690
690
|
- Reduces timing error by 60-70% for contact and flight measurements
|
|
691
691
|
- Smoother velocity curves eliminate false threshold crossings
|
|
692
|
-
|
|
692
|
+
7. **Trajectory Curvature Analysis**: Refines transitions using acceleration patterns
|
|
693
693
|
- Computes second derivative (acceleration) from position trajectory
|
|
694
694
|
- Detects landing impact by acceleration spike
|
|
695
695
|
- Identifies takeoff by acceleration change patterns
|
|
696
696
|
- Provides independent validation and refinement of velocity-based detection
|
|
697
|
-
|
|
697
|
+
8. **Metric Calculation**:
|
|
698
698
|
- Ground contact time = contact phase duration (using fractional frames)
|
|
699
699
|
- Flight time = flight phase duration (using fractional frames)
|
|
700
700
|
- Jump height = kinematic estimate from flight time: (g × t²) / 8
|
|
@@ -744,9 +744,9 @@ uv run ruff check && uv run pyright && uv run pytest
|
|
|
744
744
|
Before committing code, ensure all checks pass:
|
|
745
745
|
|
|
746
746
|
1. Format with Black
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
747
|
+
2. Fix linting issues with ruff
|
|
748
|
+
3. Ensure type safety with pyright
|
|
749
|
+
4. Run all tests with pytest
|
|
750
750
|
|
|
751
751
|
See [CONTRIBUTING.md](CONTRIBUTING.md) for contribution guidelines and requirements, or [CLAUDE.md](CLAUDE.md) for detailed development guidelines.
|
|
752
752
|
|
|
@@ -23,20 +23,20 @@ kinemotion/core/validation.py,sha256=-8Wwe56PO37F0OAEMpWr1AB_7QmFtDY5bVmux3oiLYM
|
|
|
23
23
|
kinemotion/core/video_analysis_base.py,sha256=U8j-6-dv6uiGUiIHl53AIVFUiVHotgTmMNvCArSXx0E,4045
|
|
24
24
|
kinemotion/core/video_io.py,sha256=tLAHm63_sap-CXQpLzmgUXpWZ5_TtBI9LHP8Tk2L-z4,9355
|
|
25
25
|
kinemotion/countermovement_jump/__init__.py,sha256=SkAw9ka8Yd1Qfv9hcvk22m3EfucROzYrSNGNF5kDzho,113
|
|
26
|
-
kinemotion/countermovement_jump/analysis.py,sha256=
|
|
26
|
+
kinemotion/countermovement_jump/analysis.py,sha256=XJd7o7p7l0WvjJuIzTBCnIyrhoUj7kryKZB9BltQIZY,19219
|
|
27
27
|
kinemotion/countermovement_jump/api.py,sha256=uNo2JLuFDeBdpi3Y2qf-DyG-1KIRwmSF7HjXMu9Cwj0,19320
|
|
28
28
|
kinemotion/countermovement_jump/cli.py,sha256=m727IOg5BuixgNraCXc2sjW5jGrxrg7RKvFS4qyrBK8,8902
|
|
29
29
|
kinemotion/countermovement_jump/debug_overlay.py,sha256=vF5Apiz8zDRpgrVzf52manLW99m1kHQAPSdUkar5rPs,11474
|
|
30
30
|
kinemotion/countermovement_jump/joint_angles.py,sha256=by5M4LDtUfd2_Z9DmcgUl0nsvarsBYjgsE8KWWYcn08,11255
|
|
31
31
|
kinemotion/countermovement_jump/kinematics.py,sha256=KwA8uSj3g1SeNf0NXMSHsp3gIw6Gfa-6QWIwdYdRXYw,13362
|
|
32
|
-
kinemotion/countermovement_jump/metrics_validator.py,sha256=
|
|
32
|
+
kinemotion/countermovement_jump/metrics_validator.py,sha256=ma1XSLT-RIDrcjYmgfixf244TwbiosRzN7oFr4yWCXg,24609
|
|
33
33
|
kinemotion/countermovement_jump/validation_bounds.py,sha256=-0iXDhH-RntiGZi_Co22V6qtA5D-hLzkrPkVcfoNd2U,11343
|
|
34
|
-
kinemotion/drop_jump/__init__.py,sha256=
|
|
35
|
-
kinemotion/drop_jump/analysis.py,sha256=
|
|
34
|
+
kinemotion/drop_jump/__init__.py,sha256=p1lqit5OQruIcCizCJwG0OuUab6ktK_9HCp9eaHS2Io,616
|
|
35
|
+
kinemotion/drop_jump/analysis.py,sha256=i38uOZEpmH0T-_8-m9Ne4I24dQO2B5tgYVIgwG0kJ4o,34880
|
|
36
36
|
kinemotion/drop_jump/api.py,sha256=xcA7CkBjLQZhIs6UHr2mo5jT1p-D6e3cgRAFLSwZCmE,21927
|
|
37
37
|
kinemotion/drop_jump/cli.py,sha256=WTUJWCjBl9SgR3Z-2cml1EQhVF8HaXIXQ27fS4tnR7U,14693
|
|
38
38
|
kinemotion/drop_jump/debug_overlay.py,sha256=X4mvCi5Qi1gnvSZZAsUs-0ZRUx9mVBbEUznOFO21HO8,8470
|
|
39
|
-
kinemotion/drop_jump/kinematics.py,sha256=
|
|
39
|
+
kinemotion/drop_jump/kinematics.py,sha256=wQSXvchqZgc-KBnLRWJw_EuhXwo764xQltRL38toou4,23780
|
|
40
40
|
kinemotion/drop_jump/metrics_validator.py,sha256=yY0wzFzUUDLn6pZcOnwErMlIt_aTWq-RyAkqIemBG5M,7885
|
|
41
41
|
kinemotion/drop_jump/validation_bounds.py,sha256=k31qy-kCXTiCTx0RPo2t8yZ-faLxqGO-AeF05QfBFb0,5125
|
|
42
42
|
kinemotion/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -44,16 +44,16 @@ kinemotion/models/pose_landmarker_lite.task,sha256=WZKeHR7pUodzXd2DOxnPSsRtKbx6_
|
|
|
44
44
|
kinemotion/models/rtmpose-s_simcc-body7_pt-body7-halpe26_700e-256x192-7f134165_20230605.onnx,sha256=dfZTq8kbhv8RxWiXS0HUIJNCUpxYTBN45dFIorPflEs,133
|
|
45
45
|
kinemotion/models/yolox_tiny_8xb8-300e_humanart-6f3252f9.onnx,sha256=UsutHVQ6GP3X5pCcp52EN8q7o2J3d-TnxZqlF48kY6I,133
|
|
46
46
|
kinemotion/squat_jump/__init__.py,sha256=h6ubO3BUANxqjKMdN-KtlN6m77HARAP25PLzHw9k-Lk,99
|
|
47
|
-
kinemotion/squat_jump/analysis.py,sha256=
|
|
47
|
+
kinemotion/squat_jump/analysis.py,sha256=Y6pRtQDaSd2fl_WYwkGKWQ9_uOjATwGkrWRd1TOcOL8,13508
|
|
48
48
|
kinemotion/squat_jump/api.py,sha256=YMbq2BQzB_SnZ_Z-2KnR_OO2xuua-Zg0hP29Ghbk_d4,20111
|
|
49
49
|
kinemotion/squat_jump/cli.py,sha256=09Q9O4_sHxw6QWDyPiynDQZSMixTO32NrJ5PTXTJNIk,9806
|
|
50
|
-
kinemotion/squat_jump/debug_overlay.py,sha256=
|
|
51
|
-
kinemotion/squat_jump/kinematics.py,sha256=
|
|
52
|
-
kinemotion/squat_jump/metrics_validator.py,sha256=
|
|
50
|
+
kinemotion/squat_jump/debug_overlay.py,sha256=IZij8XQvWnmxfDhOZZiLIQ-0xuICx6lDYqcdS7TA3Kw,5280
|
|
51
|
+
kinemotion/squat_jump/kinematics.py,sha256=RU5JobjkSV6Bxs3ope-4fvbeLBITFb_dv9uvHQY3fAE,12052
|
|
52
|
+
kinemotion/squat_jump/metrics_validator.py,sha256=euqd3dYrDCqdifVhs0RPc-UUprkyR-PzTg-D_rIfmI4,15914
|
|
53
53
|
kinemotion/squat_jump/validation_bounds.py,sha256=q01eQ8Eg01Y5UV3KlvZS1S9iY628OVPUwLoukHZvQOs,7276
|
|
54
54
|
kinemotion/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
55
|
-
kinemotion-0.76.
|
|
56
|
-
kinemotion-0.76.
|
|
57
|
-
kinemotion-0.76.
|
|
58
|
-
kinemotion-0.76.
|
|
59
|
-
kinemotion-0.76.
|
|
55
|
+
kinemotion-0.76.2.dist-info/METADATA,sha256=Tro_HX69rBHnPkSVhhC2MPmI9qx5fWyTP4338wBjClA,27690
|
|
56
|
+
kinemotion-0.76.2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
57
|
+
kinemotion-0.76.2.dist-info/entry_points.txt,sha256=zaqnAnjLvcdrk1Qvj5nvXZCZ2gp0prS7it1zTJygcIY,50
|
|
58
|
+
kinemotion-0.76.2.dist-info/licenses/LICENSE,sha256=KZajvqsHw0NoOHOi2q0FZ4NBe9HdV6oey-IPYAtHXfg,1088
|
|
59
|
+
kinemotion-0.76.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|