kinemotion 0.10.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kinemotion might be problematic. Click here for more details.
- kinemotion/__init__.py +14 -0
- kinemotion/api.py +428 -0
- kinemotion/cli.py +20 -0
- kinemotion/core/__init__.py +40 -0
- kinemotion/core/auto_tuning.py +289 -0
- kinemotion/core/filtering.py +345 -0
- kinemotion/core/pose.py +220 -0
- kinemotion/core/smoothing.py +366 -0
- kinemotion/core/video_io.py +166 -0
- kinemotion/dropjump/__init__.py +29 -0
- kinemotion/dropjump/analysis.py +639 -0
- kinemotion/dropjump/cli.py +738 -0
- kinemotion/dropjump/debug_overlay.py +252 -0
- kinemotion/dropjump/kinematics.py +439 -0
- kinemotion/py.typed +0 -0
- kinemotion-0.10.2.dist-info/METADATA +561 -0
- kinemotion-0.10.2.dist-info/RECORD +20 -0
- kinemotion-0.10.2.dist-info/WHEEL +4 -0
- kinemotion-0.10.2.dist-info/entry_points.txt +2 -0
- kinemotion-0.10.2.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,252 @@
|
|
|
1
|
+
"""Debug overlay rendering for drop jump analysis."""
|
|
2
|
+
|
|
3
|
+
import cv2
|
|
4
|
+
import numpy as np
|
|
5
|
+
|
|
6
|
+
from ..core.pose import compute_center_of_mass
|
|
7
|
+
from .analysis import ContactState, compute_average_foot_position
|
|
8
|
+
from .kinematics import DropJumpMetrics
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class DebugOverlayRenderer:
|
|
12
|
+
"""Renders debug information on video frames."""
|
|
13
|
+
|
|
14
|
+
def __init__(
|
|
15
|
+
self,
|
|
16
|
+
output_path: str,
|
|
17
|
+
width: int,
|
|
18
|
+
height: int,
|
|
19
|
+
display_width: int,
|
|
20
|
+
display_height: int,
|
|
21
|
+
fps: float,
|
|
22
|
+
):
|
|
23
|
+
"""
|
|
24
|
+
Initialize overlay renderer.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
output_path: Path for output video
|
|
28
|
+
width: Encoded frame width (from source video)
|
|
29
|
+
height: Encoded frame height (from source video)
|
|
30
|
+
display_width: Display width (considering SAR)
|
|
31
|
+
display_height: Display height (considering SAR)
|
|
32
|
+
fps: Frames per second
|
|
33
|
+
"""
|
|
34
|
+
self.width = width
|
|
35
|
+
self.height = height
|
|
36
|
+
self.display_width = display_width
|
|
37
|
+
self.display_height = display_height
|
|
38
|
+
self.needs_resize = (display_width != width) or (display_height != height)
|
|
39
|
+
|
|
40
|
+
# Try H.264 codec first (better quality/compatibility), fallback to mp4v
|
|
41
|
+
fourcc = cv2.VideoWriter_fourcc(*"avc1")
|
|
42
|
+
# IMPORTANT: cv2.VideoWriter expects (width, height) tuple - NOT (height, width)
|
|
43
|
+
# Write at display dimensions so video displays correctly without SAR metadata
|
|
44
|
+
self.writer = cv2.VideoWriter(
|
|
45
|
+
output_path, fourcc, fps, (display_width, display_height)
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
# Check if writer opened successfully, fallback to mp4v if not
|
|
49
|
+
if not self.writer.isOpened():
|
|
50
|
+
fourcc = cv2.VideoWriter_fourcc(*"mp4v")
|
|
51
|
+
self.writer = cv2.VideoWriter(
|
|
52
|
+
output_path, fourcc, fps, (display_width, display_height)
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
if not self.writer.isOpened():
|
|
56
|
+
raise ValueError(
|
|
57
|
+
f"Failed to create video writer for {output_path} with dimensions "
|
|
58
|
+
f"{display_width}x{display_height}"
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
def render_frame(
|
|
62
|
+
self,
|
|
63
|
+
frame: np.ndarray,
|
|
64
|
+
landmarks: dict[str, tuple[float, float, float]] | None,
|
|
65
|
+
contact_state: ContactState,
|
|
66
|
+
frame_idx: int,
|
|
67
|
+
metrics: DropJumpMetrics | None = None,
|
|
68
|
+
use_com: bool = False,
|
|
69
|
+
) -> np.ndarray:
|
|
70
|
+
"""
|
|
71
|
+
Render debug overlay on frame.
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
frame: Original video frame
|
|
75
|
+
landmarks: Pose landmarks for this frame
|
|
76
|
+
contact_state: Ground contact state
|
|
77
|
+
frame_idx: Current frame index
|
|
78
|
+
metrics: Drop-jump metrics (optional)
|
|
79
|
+
use_com: Whether to visualize CoM instead of feet (optional)
|
|
80
|
+
|
|
81
|
+
Returns:
|
|
82
|
+
Frame with debug overlay
|
|
83
|
+
"""
|
|
84
|
+
annotated = frame.copy()
|
|
85
|
+
|
|
86
|
+
# Draw landmarks if available
|
|
87
|
+
if landmarks:
|
|
88
|
+
if use_com:
|
|
89
|
+
# Draw center of mass position
|
|
90
|
+
com_x, com_y, _ = compute_center_of_mass(landmarks) # com_vis not used
|
|
91
|
+
px = int(com_x * self.width)
|
|
92
|
+
py = int(com_y * self.height)
|
|
93
|
+
|
|
94
|
+
# Draw CoM with larger circle
|
|
95
|
+
color = (
|
|
96
|
+
(0, 255, 0)
|
|
97
|
+
if contact_state == ContactState.ON_GROUND
|
|
98
|
+
else (0, 0, 255)
|
|
99
|
+
)
|
|
100
|
+
cv2.circle(annotated, (px, py), 15, color, -1)
|
|
101
|
+
cv2.circle(annotated, (px, py), 17, (255, 255, 255), 2) # White border
|
|
102
|
+
|
|
103
|
+
# Draw body segments for reference
|
|
104
|
+
# Draw hip midpoint
|
|
105
|
+
if "left_hip" in landmarks and "right_hip" in landmarks:
|
|
106
|
+
lh_x, lh_y, _ = landmarks["left_hip"]
|
|
107
|
+
rh_x, rh_y, _ = landmarks["right_hip"]
|
|
108
|
+
hip_x = int((lh_x + rh_x) / 2 * self.width)
|
|
109
|
+
hip_y = int((lh_y + rh_y) / 2 * self.height)
|
|
110
|
+
cv2.circle(
|
|
111
|
+
annotated, (hip_x, hip_y), 8, (255, 165, 0), -1
|
|
112
|
+
) # Orange
|
|
113
|
+
# Draw line from hip to CoM
|
|
114
|
+
cv2.line(annotated, (hip_x, hip_y), (px, py), (255, 165, 0), 2)
|
|
115
|
+
else:
|
|
116
|
+
# Draw foot position (original method)
|
|
117
|
+
foot_x, foot_y = compute_average_foot_position(landmarks)
|
|
118
|
+
px = int(foot_x * self.width)
|
|
119
|
+
py = int(foot_y * self.height)
|
|
120
|
+
|
|
121
|
+
# Draw foot position circle
|
|
122
|
+
color = (
|
|
123
|
+
(0, 255, 0)
|
|
124
|
+
if contact_state == ContactState.ON_GROUND
|
|
125
|
+
else (0, 0, 255)
|
|
126
|
+
)
|
|
127
|
+
cv2.circle(annotated, (px, py), 10, color, -1)
|
|
128
|
+
|
|
129
|
+
# Draw individual foot landmarks
|
|
130
|
+
foot_keys = ["left_ankle", "right_ankle", "left_heel", "right_heel"]
|
|
131
|
+
for key in foot_keys:
|
|
132
|
+
if key in landmarks:
|
|
133
|
+
x, y, vis = landmarks[key]
|
|
134
|
+
if vis > 0.5:
|
|
135
|
+
lx = int(x * self.width)
|
|
136
|
+
ly = int(y * self.height)
|
|
137
|
+
cv2.circle(annotated, (lx, ly), 5, (255, 255, 0), -1)
|
|
138
|
+
|
|
139
|
+
# Draw contact state
|
|
140
|
+
state_text = f"State: {contact_state.value}"
|
|
141
|
+
state_color = (
|
|
142
|
+
(0, 255, 0) if contact_state == ContactState.ON_GROUND else (0, 0, 255)
|
|
143
|
+
)
|
|
144
|
+
cv2.putText(
|
|
145
|
+
annotated,
|
|
146
|
+
state_text,
|
|
147
|
+
(10, 30),
|
|
148
|
+
cv2.FONT_HERSHEY_SIMPLEX,
|
|
149
|
+
1,
|
|
150
|
+
state_color,
|
|
151
|
+
2,
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
# Draw frame number
|
|
155
|
+
cv2.putText(
|
|
156
|
+
annotated,
|
|
157
|
+
f"Frame: {frame_idx}",
|
|
158
|
+
(10, 70),
|
|
159
|
+
cv2.FONT_HERSHEY_SIMPLEX,
|
|
160
|
+
0.7,
|
|
161
|
+
(255, 255, 255),
|
|
162
|
+
2,
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
# Draw metrics if in relevant phase
|
|
166
|
+
if metrics:
|
|
167
|
+
y_offset = 110
|
|
168
|
+
if (
|
|
169
|
+
metrics.contact_start_frame
|
|
170
|
+
and metrics.contact_end_frame
|
|
171
|
+
and metrics.contact_start_frame
|
|
172
|
+
<= frame_idx
|
|
173
|
+
<= metrics.contact_end_frame
|
|
174
|
+
):
|
|
175
|
+
cv2.putText(
|
|
176
|
+
annotated,
|
|
177
|
+
"GROUND CONTACT",
|
|
178
|
+
(10, y_offset),
|
|
179
|
+
cv2.FONT_HERSHEY_SIMPLEX,
|
|
180
|
+
0.7,
|
|
181
|
+
(0, 255, 0),
|
|
182
|
+
2,
|
|
183
|
+
)
|
|
184
|
+
y_offset += 40
|
|
185
|
+
|
|
186
|
+
if (
|
|
187
|
+
metrics.flight_start_frame
|
|
188
|
+
and metrics.flight_end_frame
|
|
189
|
+
and metrics.flight_start_frame <= frame_idx <= metrics.flight_end_frame
|
|
190
|
+
):
|
|
191
|
+
cv2.putText(
|
|
192
|
+
annotated,
|
|
193
|
+
"FLIGHT PHASE",
|
|
194
|
+
(10, y_offset),
|
|
195
|
+
cv2.FONT_HERSHEY_SIMPLEX,
|
|
196
|
+
0.7,
|
|
197
|
+
(0, 0, 255),
|
|
198
|
+
2,
|
|
199
|
+
)
|
|
200
|
+
y_offset += 40
|
|
201
|
+
|
|
202
|
+
if metrics.peak_height_frame == frame_idx:
|
|
203
|
+
cv2.putText(
|
|
204
|
+
annotated,
|
|
205
|
+
"PEAK HEIGHT",
|
|
206
|
+
(10, y_offset),
|
|
207
|
+
cv2.FONT_HERSHEY_SIMPLEX,
|
|
208
|
+
0.7,
|
|
209
|
+
(255, 0, 255),
|
|
210
|
+
2,
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
return annotated
|
|
214
|
+
|
|
215
|
+
def write_frame(self, frame: np.ndarray) -> None:
|
|
216
|
+
"""
|
|
217
|
+
Write frame to output video.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
frame: Video frame with shape (height, width, 3)
|
|
221
|
+
|
|
222
|
+
Raises:
|
|
223
|
+
ValueError: If frame dimensions don't match expected encoded dimensions
|
|
224
|
+
"""
|
|
225
|
+
# Validate frame dimensions match expected encoded dimensions
|
|
226
|
+
frame_height, frame_width = frame.shape[:2]
|
|
227
|
+
if frame_height != self.height or frame_width != self.width:
|
|
228
|
+
raise ValueError(
|
|
229
|
+
f"Frame dimensions ({frame_width}x{frame_height}) don't match "
|
|
230
|
+
f"source dimensions ({self.width}x{self.height}). "
|
|
231
|
+
f"Aspect ratio must be preserved from source video."
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
# Resize to display dimensions if needed (to handle SAR)
|
|
235
|
+
if self.needs_resize:
|
|
236
|
+
frame = cv2.resize(
|
|
237
|
+
frame,
|
|
238
|
+
(self.display_width, self.display_height),
|
|
239
|
+
interpolation=cv2.INTER_LANCZOS4,
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
self.writer.write(frame)
|
|
243
|
+
|
|
244
|
+
def close(self) -> None:
|
|
245
|
+
"""Release video writer."""
|
|
246
|
+
self.writer.release()
|
|
247
|
+
|
|
248
|
+
def __enter__(self) -> "DebugOverlayRenderer":
|
|
249
|
+
return self
|
|
250
|
+
|
|
251
|
+
def __exit__(self, exc_type, exc_val, exc_tb) -> None: # type: ignore[no-untyped-def]
|
|
252
|
+
self.close()
|
|
@@ -0,0 +1,439 @@
|
|
|
1
|
+
"""Kinematic calculations for drop-jump metrics."""
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
|
|
5
|
+
from .analysis import (
|
|
6
|
+
ContactState,
|
|
7
|
+
detect_drop_start,
|
|
8
|
+
find_contact_phases,
|
|
9
|
+
find_interpolated_phase_transitions_with_curvature,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class DropJumpMetrics:
|
|
14
|
+
"""Container for drop-jump analysis metrics."""
|
|
15
|
+
|
|
16
|
+
def __init__(self) -> None:
|
|
17
|
+
self.ground_contact_time: float | None = None
|
|
18
|
+
self.flight_time: float | None = None
|
|
19
|
+
self.jump_height: float | None = None
|
|
20
|
+
self.jump_height_kinematic: float | None = None # From flight time
|
|
21
|
+
self.jump_height_trajectory: float | None = None # From position tracking
|
|
22
|
+
self.contact_start_frame: int | None = None
|
|
23
|
+
self.contact_end_frame: int | None = None
|
|
24
|
+
self.flight_start_frame: int | None = None
|
|
25
|
+
self.flight_end_frame: int | None = None
|
|
26
|
+
self.peak_height_frame: int | None = None
|
|
27
|
+
# Fractional frame indices for sub-frame precision timing
|
|
28
|
+
self.contact_start_frame_precise: float | None = None
|
|
29
|
+
self.contact_end_frame_precise: float | None = None
|
|
30
|
+
self.flight_start_frame_precise: float | None = None
|
|
31
|
+
self.flight_end_frame_precise: float | None = None
|
|
32
|
+
|
|
33
|
+
def to_dict(self) -> dict:
|
|
34
|
+
"""Convert metrics to dictionary for JSON output."""
|
|
35
|
+
return {
|
|
36
|
+
"ground_contact_time_ms": (
|
|
37
|
+
round(self.ground_contact_time * 1000, 2)
|
|
38
|
+
if self.ground_contact_time is not None
|
|
39
|
+
else None
|
|
40
|
+
),
|
|
41
|
+
"flight_time_ms": (
|
|
42
|
+
round(self.flight_time * 1000, 2)
|
|
43
|
+
if self.flight_time is not None
|
|
44
|
+
else None
|
|
45
|
+
),
|
|
46
|
+
"jump_height_m": (
|
|
47
|
+
round(self.jump_height, 3) if self.jump_height is not None else None
|
|
48
|
+
),
|
|
49
|
+
"jump_height_kinematic_m": (
|
|
50
|
+
round(self.jump_height_kinematic, 3)
|
|
51
|
+
if self.jump_height_kinematic is not None
|
|
52
|
+
else None
|
|
53
|
+
),
|
|
54
|
+
"jump_height_trajectory_normalized": (
|
|
55
|
+
round(self.jump_height_trajectory, 4)
|
|
56
|
+
if self.jump_height_trajectory is not None
|
|
57
|
+
else None
|
|
58
|
+
),
|
|
59
|
+
"contact_start_frame": (
|
|
60
|
+
int(self.contact_start_frame)
|
|
61
|
+
if self.contact_start_frame is not None
|
|
62
|
+
else None
|
|
63
|
+
),
|
|
64
|
+
"contact_end_frame": (
|
|
65
|
+
int(self.contact_end_frame)
|
|
66
|
+
if self.contact_end_frame is not None
|
|
67
|
+
else None
|
|
68
|
+
),
|
|
69
|
+
"flight_start_frame": (
|
|
70
|
+
int(self.flight_start_frame)
|
|
71
|
+
if self.flight_start_frame is not None
|
|
72
|
+
else None
|
|
73
|
+
),
|
|
74
|
+
"flight_end_frame": (
|
|
75
|
+
int(self.flight_end_frame)
|
|
76
|
+
if self.flight_end_frame is not None
|
|
77
|
+
else None
|
|
78
|
+
),
|
|
79
|
+
"peak_height_frame": (
|
|
80
|
+
int(self.peak_height_frame)
|
|
81
|
+
if self.peak_height_frame is not None
|
|
82
|
+
else None
|
|
83
|
+
),
|
|
84
|
+
"contact_start_frame_precise": (
|
|
85
|
+
round(self.contact_start_frame_precise, 3)
|
|
86
|
+
if self.contact_start_frame_precise is not None
|
|
87
|
+
else None
|
|
88
|
+
),
|
|
89
|
+
"contact_end_frame_precise": (
|
|
90
|
+
round(self.contact_end_frame_precise, 3)
|
|
91
|
+
if self.contact_end_frame_precise is not None
|
|
92
|
+
else None
|
|
93
|
+
),
|
|
94
|
+
"flight_start_frame_precise": (
|
|
95
|
+
round(self.flight_start_frame_precise, 3)
|
|
96
|
+
if self.flight_start_frame_precise is not None
|
|
97
|
+
else None
|
|
98
|
+
),
|
|
99
|
+
"flight_end_frame_precise": (
|
|
100
|
+
round(self.flight_end_frame_precise, 3)
|
|
101
|
+
if self.flight_end_frame_precise is not None
|
|
102
|
+
else None
|
|
103
|
+
),
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def calculate_drop_jump_metrics(
|
|
108
|
+
contact_states: list[ContactState],
|
|
109
|
+
foot_y_positions: np.ndarray,
|
|
110
|
+
fps: float,
|
|
111
|
+
drop_height_m: float | None = None,
|
|
112
|
+
drop_start_frame: int | None = None,
|
|
113
|
+
velocity_threshold: float = 0.02,
|
|
114
|
+
smoothing_window: int = 5,
|
|
115
|
+
polyorder: int = 2,
|
|
116
|
+
use_curvature: bool = True,
|
|
117
|
+
kinematic_correction_factor: float = 1.0,
|
|
118
|
+
) -> DropJumpMetrics:
|
|
119
|
+
"""
|
|
120
|
+
Calculate drop-jump metrics from contact states and positions.
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
contact_states: Contact state for each frame
|
|
124
|
+
foot_y_positions: Vertical positions of feet (normalized 0-1)
|
|
125
|
+
fps: Video frame rate
|
|
126
|
+
drop_height_m: Known drop box/platform height in meters for calibration (optional)
|
|
127
|
+
velocity_threshold: Velocity threshold used for contact detection (for interpolation)
|
|
128
|
+
smoothing_window: Window size for velocity/acceleration smoothing (must be odd)
|
|
129
|
+
polyorder: Polynomial order for Savitzky-Golay filter (default: 2)
|
|
130
|
+
use_curvature: Whether to use curvature analysis for refining transitions
|
|
131
|
+
kinematic_correction_factor: Correction factor for kinematic jump height calculation
|
|
132
|
+
(default: 1.0 = no correction). Historical testing suggested 1.35, but this is
|
|
133
|
+
unvalidated. Use calibrated measurement (--drop-height) for validated results.
|
|
134
|
+
|
|
135
|
+
Returns:
|
|
136
|
+
DropJumpMetrics object with calculated values
|
|
137
|
+
"""
|
|
138
|
+
metrics = DropJumpMetrics()
|
|
139
|
+
|
|
140
|
+
# Detect or use manually specified drop jump start frame
|
|
141
|
+
if drop_start_frame is None:
|
|
142
|
+
# Auto-detect where drop jump actually starts (skip initial stationary period)
|
|
143
|
+
drop_start_frame = detect_drop_start(
|
|
144
|
+
foot_y_positions,
|
|
145
|
+
fps,
|
|
146
|
+
min_stationary_duration=0.5, # 0.5s stable period (~30 frames @ 60fps)
|
|
147
|
+
position_change_threshold=0.005, # 0.5% of frame height - sensitive to drop start
|
|
148
|
+
smoothing_window=smoothing_window,
|
|
149
|
+
)
|
|
150
|
+
# If manually specified or auto-detected, use it; otherwise start from frame 0
|
|
151
|
+
drop_start_frame_value: int
|
|
152
|
+
if drop_start_frame is None: # pyright: ignore[reportUnnecessaryComparison]
|
|
153
|
+
drop_start_frame_value = 0
|
|
154
|
+
else:
|
|
155
|
+
drop_start_frame_value = drop_start_frame
|
|
156
|
+
|
|
157
|
+
phases = find_contact_phases(contact_states)
|
|
158
|
+
|
|
159
|
+
# Get interpolated phases with curvature-based refinement
|
|
160
|
+
# Combines velocity interpolation + acceleration pattern analysis
|
|
161
|
+
interpolated_phases = find_interpolated_phase_transitions_with_curvature(
|
|
162
|
+
foot_y_positions,
|
|
163
|
+
contact_states,
|
|
164
|
+
velocity_threshold,
|
|
165
|
+
smoothing_window,
|
|
166
|
+
polyorder,
|
|
167
|
+
use_curvature,
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
if not phases:
|
|
171
|
+
return metrics
|
|
172
|
+
|
|
173
|
+
# Filter phases to only include those after drop start
|
|
174
|
+
# This removes the initial stationary period where athlete is standing on box
|
|
175
|
+
if drop_start_frame_value > 0:
|
|
176
|
+
phases = [
|
|
177
|
+
(start, end, state)
|
|
178
|
+
for start, end, state in phases
|
|
179
|
+
if end >= drop_start_frame_value
|
|
180
|
+
]
|
|
181
|
+
interpolated_phases = [
|
|
182
|
+
(start, end, state)
|
|
183
|
+
for start, end, state in interpolated_phases
|
|
184
|
+
if end >= drop_start_frame_value
|
|
185
|
+
]
|
|
186
|
+
|
|
187
|
+
if not phases:
|
|
188
|
+
return metrics
|
|
189
|
+
|
|
190
|
+
# Find the main contact phase
|
|
191
|
+
# For drop jumps: find first ON_GROUND after first IN_AIR (the landing after drop)
|
|
192
|
+
# For regular jumps: use longest ON_GROUND phase
|
|
193
|
+
ground_phases = [
|
|
194
|
+
(start, end, i)
|
|
195
|
+
for i, (start, end, state) in enumerate(phases)
|
|
196
|
+
if state == ContactState.ON_GROUND
|
|
197
|
+
]
|
|
198
|
+
air_phases_indexed = [
|
|
199
|
+
(start, end, i)
|
|
200
|
+
for i, (start, end, state) in enumerate(phases)
|
|
201
|
+
if state == ContactState.IN_AIR
|
|
202
|
+
]
|
|
203
|
+
|
|
204
|
+
if not ground_phases:
|
|
205
|
+
return metrics
|
|
206
|
+
|
|
207
|
+
# Initialize contact variables with first ground phase as fallback
|
|
208
|
+
# (will be overridden by drop jump or regular jump detection logic)
|
|
209
|
+
contact_start, contact_end = ground_phases[0][0], ground_phases[0][1]
|
|
210
|
+
|
|
211
|
+
# Detect if this is a drop jump or regular jump
|
|
212
|
+
# Drop jump: first ground phase is elevated (lower y), followed by drop, then landing (higher y)
|
|
213
|
+
is_drop_jump = False
|
|
214
|
+
if air_phases_indexed and len(ground_phases) >= 2:
|
|
215
|
+
first_ground_start, first_ground_end, first_ground_idx = ground_phases[0]
|
|
216
|
+
first_air_idx = air_phases_indexed[0][2]
|
|
217
|
+
|
|
218
|
+
# Find ground phase after first air phase
|
|
219
|
+
ground_after_air = [
|
|
220
|
+
(start, end, idx)
|
|
221
|
+
for start, end, idx in ground_phases
|
|
222
|
+
if idx > first_air_idx
|
|
223
|
+
]
|
|
224
|
+
|
|
225
|
+
if ground_after_air and first_ground_idx < first_air_idx:
|
|
226
|
+
# Check if first ground is at higher elevation (lower y) than ground after air
|
|
227
|
+
first_ground_y = float(
|
|
228
|
+
np.mean(foot_y_positions[first_ground_start : first_ground_end + 1])
|
|
229
|
+
)
|
|
230
|
+
second_ground_start, second_ground_end, _ = ground_after_air[0]
|
|
231
|
+
second_ground_y = float(
|
|
232
|
+
np.mean(foot_y_positions[second_ground_start : second_ground_end + 1])
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
# If first ground is significantly higher (>5% of frame), it's a drop jump
|
|
236
|
+
if second_ground_y - first_ground_y > 0.05:
|
|
237
|
+
is_drop_jump = True
|
|
238
|
+
contact_start, contact_end = second_ground_start, second_ground_end
|
|
239
|
+
|
|
240
|
+
if not is_drop_jump:
|
|
241
|
+
# Regular jump: use longest ground contact phase
|
|
242
|
+
contact_start, contact_end = max(
|
|
243
|
+
[(s, e) for s, e, _ in ground_phases], key=lambda p: p[1] - p[0]
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
# Store integer frame indices (for visualization)
|
|
247
|
+
metrics.contact_start_frame = contact_start
|
|
248
|
+
metrics.contact_end_frame = contact_end
|
|
249
|
+
|
|
250
|
+
# Find corresponding interpolated phase for precise timing
|
|
251
|
+
contact_start_frac = float(contact_start)
|
|
252
|
+
contact_end_frac = float(contact_end)
|
|
253
|
+
|
|
254
|
+
# Find the matching ground phase in interpolated_phases
|
|
255
|
+
for start_frac, end_frac, state in interpolated_phases:
|
|
256
|
+
# Match by checking if integer frames are within this phase
|
|
257
|
+
if (
|
|
258
|
+
state == ContactState.ON_GROUND
|
|
259
|
+
and int(start_frac) <= contact_start <= int(end_frac) + 1
|
|
260
|
+
and int(start_frac) <= contact_end <= int(end_frac) + 1
|
|
261
|
+
):
|
|
262
|
+
contact_start_frac = start_frac
|
|
263
|
+
contact_end_frac = end_frac
|
|
264
|
+
break
|
|
265
|
+
|
|
266
|
+
# Calculate ground contact time using fractional frames
|
|
267
|
+
contact_frames_precise = contact_end_frac - contact_start_frac
|
|
268
|
+
metrics.ground_contact_time = contact_frames_precise / fps
|
|
269
|
+
metrics.contact_start_frame_precise = contact_start_frac
|
|
270
|
+
metrics.contact_end_frame_precise = contact_end_frac
|
|
271
|
+
|
|
272
|
+
# Calculate calibration scale factor from drop height if provided
|
|
273
|
+
scale_factor = 1.0
|
|
274
|
+
if drop_height_m is not None and len(phases) >= 2:
|
|
275
|
+
# Find the initial drop by looking for first IN_AIR phase
|
|
276
|
+
# This represents the drop from the box
|
|
277
|
+
|
|
278
|
+
if air_phases_indexed and ground_phases:
|
|
279
|
+
# Get first air phase (the drop)
|
|
280
|
+
first_air_start, first_air_end, _ = air_phases_indexed[0]
|
|
281
|
+
|
|
282
|
+
# Initial position: at start of drop (on the box)
|
|
283
|
+
# Look back a few frames to get stable position on box
|
|
284
|
+
lookback_start = max(0, first_air_start - 5)
|
|
285
|
+
if lookback_start < first_air_start:
|
|
286
|
+
initial_position = float(
|
|
287
|
+
np.mean(foot_y_positions[lookback_start:first_air_start])
|
|
288
|
+
)
|
|
289
|
+
else:
|
|
290
|
+
initial_position = float(foot_y_positions[first_air_start])
|
|
291
|
+
|
|
292
|
+
# Landing position: at the ground after drop
|
|
293
|
+
# Use position at end of first air phase
|
|
294
|
+
landing_position = float(foot_y_positions[first_air_end])
|
|
295
|
+
|
|
296
|
+
# Drop distance in normalized coordinates (y increases downward)
|
|
297
|
+
drop_normalized = landing_position - initial_position
|
|
298
|
+
|
|
299
|
+
if drop_normalized > 0.01: # Sanity check (at least 1% of frame height)
|
|
300
|
+
# Calculate scale factor: real_meters / normalized_distance
|
|
301
|
+
scale_factor = drop_height_m / drop_normalized
|
|
302
|
+
|
|
303
|
+
# Find flight phase after ground contact
|
|
304
|
+
flight_phases = [
|
|
305
|
+
(start, end)
|
|
306
|
+
for start, end, state in phases
|
|
307
|
+
if state == ContactState.IN_AIR and start > contact_end
|
|
308
|
+
]
|
|
309
|
+
|
|
310
|
+
if flight_phases:
|
|
311
|
+
flight_start, flight_end = flight_phases[0]
|
|
312
|
+
|
|
313
|
+
# Store integer frame indices (for visualization)
|
|
314
|
+
metrics.flight_start_frame = flight_start
|
|
315
|
+
metrics.flight_end_frame = flight_end
|
|
316
|
+
|
|
317
|
+
# Find corresponding interpolated phase for precise timing
|
|
318
|
+
flight_start_frac = float(flight_start)
|
|
319
|
+
flight_end_frac = float(flight_end)
|
|
320
|
+
|
|
321
|
+
# Find the matching air phase in interpolated_phases
|
|
322
|
+
for start_frac, end_frac, state in interpolated_phases:
|
|
323
|
+
# Match by checking if integer frames are within this phase
|
|
324
|
+
if (
|
|
325
|
+
state == ContactState.IN_AIR
|
|
326
|
+
and int(start_frac) <= flight_start <= int(end_frac) + 1
|
|
327
|
+
and int(start_frac) <= flight_end <= int(end_frac) + 1
|
|
328
|
+
):
|
|
329
|
+
flight_start_frac = start_frac
|
|
330
|
+
flight_end_frac = end_frac
|
|
331
|
+
break
|
|
332
|
+
|
|
333
|
+
# Calculate flight time using fractional frames
|
|
334
|
+
flight_frames_precise = flight_end_frac - flight_start_frac
|
|
335
|
+
metrics.flight_time = flight_frames_precise / fps
|
|
336
|
+
metrics.flight_start_frame_precise = flight_start_frac
|
|
337
|
+
metrics.flight_end_frame_precise = flight_end_frac
|
|
338
|
+
|
|
339
|
+
# Calculate jump height using flight time (kinematic method)
|
|
340
|
+
# h = (g * t^2) / 8, where t is total flight time
|
|
341
|
+
g = 9.81 # m/s^2
|
|
342
|
+
jump_height_kinematic = (g * metrics.flight_time**2) / 8
|
|
343
|
+
|
|
344
|
+
# Calculate jump height from trajectory (position-based method)
|
|
345
|
+
# This measures actual vertical displacement from takeoff to peak
|
|
346
|
+
takeoff_position = foot_y_positions[flight_start]
|
|
347
|
+
flight_positions = foot_y_positions[flight_start : flight_end + 1]
|
|
348
|
+
|
|
349
|
+
if len(flight_positions) > 0:
|
|
350
|
+
peak_idx = np.argmin(flight_positions)
|
|
351
|
+
metrics.peak_height_frame = int(flight_start + peak_idx)
|
|
352
|
+
peak_position = np.min(flight_positions)
|
|
353
|
+
|
|
354
|
+
# Height in normalized coordinates (0-1 range)
|
|
355
|
+
height_normalized = float(takeoff_position - peak_position)
|
|
356
|
+
|
|
357
|
+
# Store trajectory value (in normalized coordinates)
|
|
358
|
+
metrics.jump_height_trajectory = height_normalized
|
|
359
|
+
|
|
360
|
+
# Choose measurement method based on calibration availability
|
|
361
|
+
if drop_height_m is not None and scale_factor > 1.0:
|
|
362
|
+
# Use calibrated trajectory measurement (most accurate)
|
|
363
|
+
metrics.jump_height = height_normalized * scale_factor
|
|
364
|
+
metrics.jump_height_kinematic = jump_height_kinematic
|
|
365
|
+
else:
|
|
366
|
+
# Apply kinematic correction factor to kinematic method
|
|
367
|
+
# ⚠️ WARNING: Kinematic correction factor is EXPERIMENTAL and UNVALIDATED
|
|
368
|
+
#
|
|
369
|
+
# The kinematic method h = (g × t²) / 8 may underestimate jump height due to:
|
|
370
|
+
# 1. Contact detection timing (may detect landing slightly early/late)
|
|
371
|
+
# 2. Frame rate limitations (30 fps = 33ms intervals between samples)
|
|
372
|
+
# 3. Foot position vs center of mass difference (feet land before CoM peak)
|
|
373
|
+
#
|
|
374
|
+
# Default correction factor is 1.0 (no correction). Historical testing
|
|
375
|
+
# suggested 1.35 could improve accuracy, but:
|
|
376
|
+
# - This value has NOT been validated against gold standards
|
|
377
|
+
# (force plates, motion capture)
|
|
378
|
+
# - The actual correction needed may vary by athlete, jump type, and video quality
|
|
379
|
+
# - Using a correction factor without validation is experimental
|
|
380
|
+
#
|
|
381
|
+
# For validated measurements, use:
|
|
382
|
+
# - Calibrated measurement with --drop-height parameter
|
|
383
|
+
# - Or compare against validated measurement systems
|
|
384
|
+
metrics.jump_height = (
|
|
385
|
+
jump_height_kinematic * kinematic_correction_factor
|
|
386
|
+
)
|
|
387
|
+
metrics.jump_height_kinematic = jump_height_kinematic
|
|
388
|
+
else:
|
|
389
|
+
# Fallback to kinematic if no position data
|
|
390
|
+
if drop_height_m is None:
|
|
391
|
+
# Apply kinematic correction factor (see detailed comment above)
|
|
392
|
+
metrics.jump_height = (
|
|
393
|
+
jump_height_kinematic * kinematic_correction_factor
|
|
394
|
+
)
|
|
395
|
+
else:
|
|
396
|
+
metrics.jump_height = jump_height_kinematic
|
|
397
|
+
metrics.jump_height_kinematic = jump_height_kinematic
|
|
398
|
+
|
|
399
|
+
return metrics
|
|
400
|
+
|
|
401
|
+
|
|
402
|
+
def estimate_jump_height_from_trajectory(
|
|
403
|
+
foot_y_positions: np.ndarray,
|
|
404
|
+
flight_start: int,
|
|
405
|
+
flight_end: int,
|
|
406
|
+
pixel_to_meter_ratio: float | None = None,
|
|
407
|
+
) -> float:
|
|
408
|
+
"""
|
|
409
|
+
Estimate jump height from position trajectory.
|
|
410
|
+
|
|
411
|
+
Args:
|
|
412
|
+
foot_y_positions: Vertical positions of feet (normalized or pixels)
|
|
413
|
+
flight_start: Frame where flight begins
|
|
414
|
+
flight_end: Frame where flight ends
|
|
415
|
+
pixel_to_meter_ratio: Conversion factor from pixels to meters
|
|
416
|
+
|
|
417
|
+
Returns:
|
|
418
|
+
Estimated jump height in meters (or normalized units if no calibration)
|
|
419
|
+
"""
|
|
420
|
+
if flight_end < flight_start:
|
|
421
|
+
return 0.0
|
|
422
|
+
|
|
423
|
+
# Get position at takeoff (end of contact) and peak (minimum y during flight)
|
|
424
|
+
takeoff_position = foot_y_positions[flight_start]
|
|
425
|
+
flight_positions = foot_y_positions[flight_start : flight_end + 1]
|
|
426
|
+
|
|
427
|
+
if len(flight_positions) == 0:
|
|
428
|
+
return 0.0
|
|
429
|
+
|
|
430
|
+
peak_position = np.min(flight_positions)
|
|
431
|
+
|
|
432
|
+
# Height difference (in normalized coordinates, y increases downward)
|
|
433
|
+
height_diff = takeoff_position - peak_position
|
|
434
|
+
|
|
435
|
+
# Convert to meters if calibration available
|
|
436
|
+
if pixel_to_meter_ratio is not None:
|
|
437
|
+
return float(height_diff * pixel_to_meter_ratio)
|
|
438
|
+
|
|
439
|
+
return float(height_diff)
|
kinemotion/py.typed
ADDED
|
File without changes
|