kinemotion 0.76.3__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kinemotion might be problematic. Click here for more details.
- kinemotion/__init__.py +3 -18
- kinemotion/api.py +7 -27
- kinemotion/cli.py +2 -4
- kinemotion/{countermovement_jump → cmj}/analysis.py +158 -16
- kinemotion/{countermovement_jump → cmj}/api.py +18 -46
- kinemotion/{countermovement_jump → cmj}/cli.py +46 -6
- kinemotion/cmj/debug_overlay.py +457 -0
- kinemotion/{countermovement_jump → cmj}/joint_angles.py +31 -96
- kinemotion/{countermovement_jump → cmj}/metrics_validator.py +293 -184
- kinemotion/{countermovement_jump → cmj}/validation_bounds.py +18 -1
- kinemotion/core/__init__.py +2 -11
- kinemotion/core/auto_tuning.py +107 -149
- kinemotion/core/cli_utils.py +0 -74
- kinemotion/core/debug_overlay_utils.py +15 -142
- kinemotion/core/experimental.py +51 -55
- kinemotion/core/filtering.py +56 -116
- kinemotion/core/pipeline_utils.py +2 -2
- kinemotion/core/pose.py +98 -47
- kinemotion/core/quality.py +6 -4
- kinemotion/core/smoothing.py +51 -65
- kinemotion/core/types.py +0 -15
- kinemotion/core/validation.py +7 -76
- kinemotion/core/video_io.py +27 -41
- kinemotion/{drop_jump → dropjump}/__init__.py +8 -2
- kinemotion/{drop_jump → dropjump}/analysis.py +120 -282
- kinemotion/{drop_jump → dropjump}/api.py +33 -59
- kinemotion/{drop_jump → dropjump}/cli.py +136 -70
- kinemotion/dropjump/debug_overlay.py +182 -0
- kinemotion/{drop_jump → dropjump}/kinematics.py +65 -175
- kinemotion/{drop_jump → dropjump}/metrics_validator.py +51 -25
- kinemotion/{drop_jump → dropjump}/validation_bounds.py +1 -1
- kinemotion/models/rtmpose-s_simcc-body7_pt-body7-halpe26_700e-256x192-7f134165_20230605.onnx +3 -0
- kinemotion/models/yolox_tiny_8xb8-300e_humanart-6f3252f9.onnx +3 -0
- {kinemotion-0.76.3.dist-info → kinemotion-1.0.0.dist-info}/METADATA +26 -75
- kinemotion-1.0.0.dist-info/RECORD +49 -0
- kinemotion/core/overlay_constants.py +0 -61
- kinemotion/core/video_analysis_base.py +0 -132
- kinemotion/countermovement_jump/debug_overlay.py +0 -325
- kinemotion/drop_jump/debug_overlay.py +0 -241
- kinemotion/squat_jump/__init__.py +0 -5
- kinemotion/squat_jump/analysis.py +0 -377
- kinemotion/squat_jump/api.py +0 -610
- kinemotion/squat_jump/cli.py +0 -309
- kinemotion/squat_jump/debug_overlay.py +0 -163
- kinemotion/squat_jump/kinematics.py +0 -342
- kinemotion/squat_jump/metrics_validator.py +0 -438
- kinemotion/squat_jump/validation_bounds.py +0 -221
- kinemotion-0.76.3.dist-info/RECORD +0 -57
- /kinemotion/{countermovement_jump → cmj}/__init__.py +0 -0
- /kinemotion/{countermovement_jump → cmj}/kinematics.py +0 -0
- {kinemotion-0.76.3.dist-info → kinemotion-1.0.0.dist-info}/WHEEL +0 -0
- {kinemotion-0.76.3.dist-info → kinemotion-1.0.0.dist-info}/entry_points.txt +0 -0
- {kinemotion-0.76.3.dist-info → kinemotion-1.0.0.dist-info}/licenses/LICENSE +0 -0
kinemotion/core/smoothing.py
CHANGED
|
@@ -181,18 +181,6 @@ def _smooth_landmarks_core( # NOSONAR(S1172) - polyorder used via closure
|
|
|
181
181
|
return smoothed_sequence
|
|
182
182
|
|
|
183
183
|
|
|
184
|
-
def _ensure_odd_window_length(window_length: int) -> int:
|
|
185
|
-
"""Ensure window_length is odd (required for Savitzky-Golay filter).
|
|
186
|
-
|
|
187
|
-
Args:
|
|
188
|
-
window_length: Desired window length
|
|
189
|
-
|
|
190
|
-
Returns:
|
|
191
|
-
Odd window length (increments by 1 if even)
|
|
192
|
-
"""
|
|
193
|
-
return window_length + 1 if window_length % 2 == 0 else window_length
|
|
194
|
-
|
|
195
|
-
|
|
196
184
|
def smooth_landmarks(
|
|
197
185
|
landmark_sequence: LandmarkSequence,
|
|
198
186
|
window_length: int = 5,
|
|
@@ -212,7 +200,9 @@ def smooth_landmarks(
|
|
|
212
200
|
if len(landmark_sequence) < window_length:
|
|
213
201
|
return landmark_sequence
|
|
214
202
|
|
|
215
|
-
window_length
|
|
203
|
+
# Ensure window_length is odd
|
|
204
|
+
if window_length % 2 == 0:
|
|
205
|
+
window_length += 1
|
|
216
206
|
|
|
217
207
|
def savgol_smoother(
|
|
218
208
|
x_coords: list[float], y_coords: list[float], _valid_frames: list[int]
|
|
@@ -241,87 +231,61 @@ def compute_velocity(positions: np.ndarray, fps: float, smooth_window: int = 3)
|
|
|
241
231
|
|
|
242
232
|
# Smooth velocity if we have enough data
|
|
243
233
|
if len(velocity) >= smooth_window and smooth_window > 1:
|
|
244
|
-
smooth_window
|
|
234
|
+
if smooth_window % 2 == 0:
|
|
235
|
+
smooth_window += 1
|
|
245
236
|
for dim in range(velocity.shape[1]):
|
|
246
237
|
velocity[:, dim] = savgol_filter(velocity[:, dim], smooth_window, 1)
|
|
247
238
|
|
|
248
239
|
return velocity
|
|
249
240
|
|
|
250
241
|
|
|
251
|
-
def
|
|
242
|
+
def compute_velocity_from_derivative(
|
|
252
243
|
positions: np.ndarray,
|
|
253
|
-
deriv_order: int,
|
|
254
244
|
window_length: int = 5,
|
|
255
245
|
polyorder: int = 2,
|
|
256
246
|
) -> np.ndarray:
|
|
257
247
|
"""
|
|
258
|
-
Compute
|
|
248
|
+
Compute velocity as derivative of smoothed position trajectory.
|
|
249
|
+
|
|
250
|
+
Uses Savitzky-Golay filter to compute the derivative directly, which provides
|
|
251
|
+
a much smoother and more accurate velocity estimate than frame-to-frame differences.
|
|
259
252
|
|
|
260
|
-
This
|
|
261
|
-
|
|
253
|
+
This method:
|
|
254
|
+
1. Fits a polynomial to the position data in a sliding window
|
|
255
|
+
2. Analytically computes the derivative of that polynomial
|
|
256
|
+
3. Returns smooth velocity values
|
|
262
257
|
|
|
263
258
|
Args:
|
|
264
259
|
positions: 1D array of position values (e.g., foot y-positions)
|
|
265
|
-
deriv_order: Order of derivative (1 for velocity, 2 for acceleration)
|
|
266
260
|
window_length: Window size for smoothing (must be odd, >= polyorder + 2)
|
|
267
261
|
polyorder: Polynomial order for Savitzky-Golay filter (typically 2 or 3)
|
|
268
262
|
|
|
269
263
|
Returns:
|
|
270
|
-
Array of
|
|
264
|
+
Array of absolute velocity values (magnitude of derivative)
|
|
271
265
|
"""
|
|
272
266
|
if len(positions) < window_length:
|
|
273
267
|
# Fallback to simple differences for short sequences
|
|
274
|
-
|
|
275
|
-
return np.abs(np.diff(positions, prepend=positions[0]))
|
|
276
|
-
# Second derivative fallback
|
|
277
|
-
velocity = np.diff(positions, prepend=positions[0])
|
|
278
|
-
return np.diff(velocity, prepend=velocity[0])
|
|
268
|
+
return np.abs(np.diff(positions, prepend=positions[0]))
|
|
279
269
|
|
|
280
|
-
window_length
|
|
270
|
+
# Ensure window_length is odd
|
|
271
|
+
if window_length % 2 == 0:
|
|
272
|
+
window_length += 1
|
|
281
273
|
|
|
282
274
|
# Compute derivative using Savitzky-Golay filter
|
|
283
|
-
#
|
|
275
|
+
# deriv=1: compute first derivative
|
|
276
|
+
# delta=1.0: frame spacing (velocity per frame)
|
|
284
277
|
# mode='interp': interpolate at boundaries
|
|
285
|
-
|
|
278
|
+
velocity = savgol_filter(
|
|
286
279
|
positions,
|
|
287
280
|
window_length,
|
|
288
281
|
polyorder,
|
|
289
|
-
deriv=
|
|
290
|
-
delta=1.0,
|
|
282
|
+
deriv=1, # First derivative
|
|
283
|
+
delta=1.0, # Frame spacing
|
|
291
284
|
mode="interp",
|
|
292
285
|
)
|
|
293
286
|
|
|
294
|
-
# Return absolute
|
|
295
|
-
return np.abs(
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
def compute_velocity_from_derivative(
|
|
299
|
-
positions: np.ndarray,
|
|
300
|
-
window_length: int = 5,
|
|
301
|
-
polyorder: int = 2,
|
|
302
|
-
) -> np.ndarray:
|
|
303
|
-
"""
|
|
304
|
-
Compute velocity as derivative of smoothed position trajectory.
|
|
305
|
-
|
|
306
|
-
Uses Savitzky-Golay filter to compute the derivative directly, which provides
|
|
307
|
-
a much smoother and more accurate velocity estimate than frame-to-frame differences.
|
|
308
|
-
|
|
309
|
-
This method:
|
|
310
|
-
1. Fits a polynomial to the position data in a sliding window
|
|
311
|
-
2. Analytically computes the derivative of that polynomial
|
|
312
|
-
3. Returns smooth velocity values
|
|
313
|
-
|
|
314
|
-
Args:
|
|
315
|
-
positions: 1D array of position values (e.g., foot y-positions)
|
|
316
|
-
window_length: Window size for smoothing (must be odd, >= polyorder + 2)
|
|
317
|
-
polyorder: Polynomial order for Savitzky-Golay filter (typically 2 or 3)
|
|
318
|
-
|
|
319
|
-
Returns:
|
|
320
|
-
Array of absolute velocity values (magnitude of derivative)
|
|
321
|
-
"""
|
|
322
|
-
return _compute_derivative(
|
|
323
|
-
positions, deriv_order=1, window_length=window_length, polyorder=polyorder
|
|
324
|
-
)
|
|
287
|
+
# Return absolute velocity (magnitude only)
|
|
288
|
+
return np.abs(velocity)
|
|
325
289
|
|
|
326
290
|
|
|
327
291
|
def compute_acceleration_from_derivative(
|
|
@@ -350,10 +314,30 @@ def compute_acceleration_from_derivative(
|
|
|
350
314
|
Returns:
|
|
351
315
|
Array of acceleration values (second derivative of position)
|
|
352
316
|
"""
|
|
353
|
-
|
|
354
|
-
|
|
317
|
+
if len(positions) < window_length:
|
|
318
|
+
# Fallback to simple second differences for short sequences
|
|
319
|
+
velocity = np.diff(positions, prepend=positions[0])
|
|
320
|
+
return np.diff(velocity, prepend=velocity[0])
|
|
321
|
+
|
|
322
|
+
# Ensure window_length is odd
|
|
323
|
+
if window_length % 2 == 0:
|
|
324
|
+
window_length += 1
|
|
325
|
+
|
|
326
|
+
# Compute second derivative using Savitzky-Golay filter
|
|
327
|
+
# deriv=2: compute second derivative (acceleration/curvature)
|
|
328
|
+
# delta=1.0: frame spacing
|
|
329
|
+
# mode='interp': interpolate at boundaries
|
|
330
|
+
acceleration = savgol_filter(
|
|
331
|
+
positions,
|
|
332
|
+
window_length,
|
|
333
|
+
polyorder,
|
|
334
|
+
deriv=2, # Second derivative
|
|
335
|
+
delta=1.0, # Frame spacing
|
|
336
|
+
mode="interp",
|
|
355
337
|
)
|
|
356
338
|
|
|
339
|
+
return acceleration
|
|
340
|
+
|
|
357
341
|
|
|
358
342
|
def smooth_landmarks_advanced(
|
|
359
343
|
landmark_sequence: LandmarkSequence,
|
|
@@ -392,7 +376,9 @@ def smooth_landmarks_advanced(
|
|
|
392
376
|
if len(landmark_sequence) < window_length:
|
|
393
377
|
return landmark_sequence
|
|
394
378
|
|
|
395
|
-
window_length
|
|
379
|
+
# Ensure window_length is odd
|
|
380
|
+
if window_length % 2 == 0:
|
|
381
|
+
window_length += 1
|
|
396
382
|
|
|
397
383
|
def advanced_smoother(
|
|
398
384
|
x_coords: list[float], y_coords: list[float], _valid_frames: list[int]
|
kinemotion/core/types.py
CHANGED
|
@@ -29,19 +29,6 @@ LandmarkSequence: TypeAlias = list[LandmarkFrame]
|
|
|
29
29
|
# - Wrapper structures: e.g. {"data": {...actual metrics...}}
|
|
30
30
|
MetricsDict: TypeAlias = dict[str, Any]
|
|
31
31
|
|
|
32
|
-
# MediaPipe foot landmark names used for position and visibility tracking
|
|
33
|
-
FOOT_KEYS: tuple[str, ...] = (
|
|
34
|
-
"left_ankle",
|
|
35
|
-
"right_ankle",
|
|
36
|
-
"left_heel",
|
|
37
|
-
"right_heel",
|
|
38
|
-
"left_foot_index",
|
|
39
|
-
"right_foot_index",
|
|
40
|
-
)
|
|
41
|
-
|
|
42
|
-
# MediaPipe hip landmark names used for position tracking
|
|
43
|
-
HIP_KEYS: tuple[str, ...] = ("left_hip", "right_hip")
|
|
44
|
-
|
|
45
32
|
__all__ = [
|
|
46
33
|
"FloatArray",
|
|
47
34
|
"Float64Array",
|
|
@@ -52,6 +39,4 @@ __all__ = [
|
|
|
52
39
|
"LandmarkFrame",
|
|
53
40
|
"LandmarkSequence",
|
|
54
41
|
"MetricsDict",
|
|
55
|
-
"FOOT_KEYS",
|
|
56
|
-
"HIP_KEYS",
|
|
57
42
|
]
|
kinemotion/core/validation.py
CHANGED
|
@@ -78,18 +78,19 @@ class MetricBounds:
|
|
|
78
78
|
|
|
79
79
|
def contains(self, value: float, profile: AthleteProfile) -> bool:
|
|
80
80
|
"""Check if value is within bounds for athlete profile."""
|
|
81
|
-
|
|
82
|
-
if profile in (AthleteProfile.ELDERLY, AthleteProfile.UNTRAINED):
|
|
81
|
+
if profile == AthleteProfile.ELDERLY:
|
|
83
82
|
return self.practical_min <= value <= self.recreational_max
|
|
84
|
-
|
|
83
|
+
elif profile == AthleteProfile.UNTRAINED:
|
|
84
|
+
return self.practical_min <= value <= self.recreational_max
|
|
85
|
+
elif profile == AthleteProfile.RECREATIONAL:
|
|
85
86
|
return self.recreational_min <= value <= self.recreational_max
|
|
86
|
-
|
|
87
|
-
return self.elite_min <= value <= self.elite_max
|
|
88
|
-
if profile == AthleteProfile.TRAINED:
|
|
87
|
+
elif profile == AthleteProfile.TRAINED:
|
|
89
88
|
# Trained athletes: midpoint between recreational and elite
|
|
90
89
|
trained_min = (self.recreational_min + self.elite_min) / 2
|
|
91
90
|
trained_max = (self.recreational_max + self.elite_max) / 2
|
|
92
91
|
return trained_min <= value <= trained_max
|
|
92
|
+
elif profile == AthleteProfile.ELITE:
|
|
93
|
+
return self.elite_min <= value <= self.elite_max
|
|
93
94
|
return False
|
|
94
95
|
|
|
95
96
|
def is_physically_possible(self, value: float) -> bool:
|
|
@@ -198,73 +199,3 @@ class MetricsValidator(ABC):
|
|
|
198
199
|
ValidationResult with all issues and status
|
|
199
200
|
"""
|
|
200
201
|
pass
|
|
201
|
-
|
|
202
|
-
def _validate_metric_with_bounds(
|
|
203
|
-
self,
|
|
204
|
-
name: str,
|
|
205
|
-
value: float,
|
|
206
|
-
bounds: MetricBounds,
|
|
207
|
-
profile: AthleteProfile | None,
|
|
208
|
-
result: ValidationResult,
|
|
209
|
-
error_suffix: str = "physically impossible",
|
|
210
|
-
format_str: str = "{value}",
|
|
211
|
-
) -> None:
|
|
212
|
-
"""Generic validation for metrics with physical and profile bounds.
|
|
213
|
-
|
|
214
|
-
Args:
|
|
215
|
-
name: Metric name for messages
|
|
216
|
-
value: Metric value
|
|
217
|
-
bounds: Bounds definition
|
|
218
|
-
profile: Athlete profile for expected ranges (can be None)
|
|
219
|
-
result: Validation result to add issues to
|
|
220
|
-
error_suffix: Description for out-of-bounds errors
|
|
221
|
-
format_str: Format string for value display
|
|
222
|
-
"""
|
|
223
|
-
formatted_value = format_str.format(value=value)
|
|
224
|
-
display_name = name.replace("_", " ").title()
|
|
225
|
-
|
|
226
|
-
if not bounds.is_physically_possible(value):
|
|
227
|
-
result.add_error(
|
|
228
|
-
name,
|
|
229
|
-
f"{display_name} {formatted_value} {error_suffix}",
|
|
230
|
-
value=value,
|
|
231
|
-
bounds=(bounds.absolute_min, bounds.absolute_max),
|
|
232
|
-
)
|
|
233
|
-
elif profile is not None and bounds.contains(value, profile):
|
|
234
|
-
result.add_info(
|
|
235
|
-
name,
|
|
236
|
-
f"{display_name} {formatted_value} within expected range for {profile.value}",
|
|
237
|
-
value=value,
|
|
238
|
-
)
|
|
239
|
-
elif profile is not None:
|
|
240
|
-
expected_min, expected_max = self._get_profile_range(profile, bounds)
|
|
241
|
-
result.add_warning(
|
|
242
|
-
name,
|
|
243
|
-
f"{display_name} {formatted_value} outside typical range "
|
|
244
|
-
f"[{expected_min:.3f}-{expected_max:.3f}] for {profile.value}",
|
|
245
|
-
value=value,
|
|
246
|
-
bounds=(expected_min, expected_max),
|
|
247
|
-
)
|
|
248
|
-
|
|
249
|
-
@staticmethod
|
|
250
|
-
def _get_profile_range(profile: AthleteProfile, bounds: MetricBounds) -> tuple[float, float]:
|
|
251
|
-
"""Get min/max bounds for specific profile.
|
|
252
|
-
|
|
253
|
-
Args:
|
|
254
|
-
profile: Athlete profile
|
|
255
|
-
bounds: Metric bounds definition
|
|
256
|
-
|
|
257
|
-
Returns:
|
|
258
|
-
Tuple of (min, max) bounds for the profile
|
|
259
|
-
"""
|
|
260
|
-
profile_ranges = {
|
|
261
|
-
AthleteProfile.ELDERLY: (bounds.practical_min, bounds.recreational_max),
|
|
262
|
-
AthleteProfile.UNTRAINED: (bounds.practical_min, bounds.recreational_max),
|
|
263
|
-
AthleteProfile.RECREATIONAL: (bounds.recreational_min, bounds.recreational_max),
|
|
264
|
-
AthleteProfile.TRAINED: (
|
|
265
|
-
(bounds.recreational_min + bounds.elite_min) / 2,
|
|
266
|
-
(bounds.recreational_max + bounds.elite_max) / 2,
|
|
267
|
-
),
|
|
268
|
-
AthleteProfile.ELITE: (bounds.elite_min, bounds.elite_max),
|
|
269
|
-
}
|
|
270
|
-
return profile_ranges.get(profile, (bounds.absolute_min, bounds.absolute_max))
|
kinemotion/core/video_io.py
CHANGED
|
@@ -18,17 +18,6 @@ class VideoProcessor:
|
|
|
18
18
|
No dimensions are hardcoded - all dimensions are extracted from actual frame data.
|
|
19
19
|
"""
|
|
20
20
|
|
|
21
|
-
# Mapping of rotation angles to OpenCV rotation operations
|
|
22
|
-
# Keys are normalized angles (equivalent angles grouped)
|
|
23
|
-
_ROTATION_OPS: dict[int, int] = {
|
|
24
|
-
-90: cv2.ROTATE_90_CLOCKWISE,
|
|
25
|
-
270: cv2.ROTATE_90_CLOCKWISE,
|
|
26
|
-
90: cv2.ROTATE_90_COUNTERCLOCKWISE,
|
|
27
|
-
-270: cv2.ROTATE_90_COUNTERCLOCKWISE,
|
|
28
|
-
180: cv2.ROTATE_180,
|
|
29
|
-
-180: cv2.ROTATE_180,
|
|
30
|
-
}
|
|
31
|
-
|
|
32
21
|
def __init__(self, video_path: str, timer: Timer | None = None) -> None:
|
|
33
22
|
"""
|
|
34
23
|
Initialize video processor.
|
|
@@ -50,27 +39,9 @@ class VideoProcessor:
|
|
|
50
39
|
self._current_timestamp_ms: int = 0 # Timestamp for the current frame
|
|
51
40
|
|
|
52
41
|
# Read first frame to get actual dimensions
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
#
|
|
56
|
-
self.rotation = 0 # Will be set by _extract_video_metadata()
|
|
57
|
-
self.codec: str | None = None # Will be set by _extract_video_metadata()
|
|
58
|
-
|
|
59
|
-
# Initialize display dimensions (may be adjusted by SAR metadata)
|
|
60
|
-
self.display_width = self.width
|
|
61
|
-
self.display_height = self.height
|
|
62
|
-
self._extract_video_metadata()
|
|
63
|
-
|
|
64
|
-
# Apply rotation to dimensions if needed
|
|
65
|
-
self._apply_rotation_to_dimensions()
|
|
66
|
-
|
|
67
|
-
def _extract_dimensions_from_frame(self) -> None:
|
|
68
|
-
"""Extract video dimensions by reading the first frame.
|
|
69
|
-
|
|
70
|
-
This is critical for preserving aspect ratio, especially with mobile videos
|
|
71
|
-
that have rotation metadata. OpenCV properties (CAP_PROP_FRAME_WIDTH/HEIGHT)
|
|
72
|
-
may return incorrect dimensions, so we read the actual frame data.
|
|
73
|
-
"""
|
|
42
|
+
# This is critical for preserving aspect ratio, especially with mobile videos
|
|
43
|
+
# that have rotation metadata. OpenCV properties (CAP_PROP_FRAME_WIDTH/HEIGHT)
|
|
44
|
+
# may return incorrect dimensions, so we read the actual frame data.
|
|
74
45
|
ret, first_frame = self.cap.read()
|
|
75
46
|
if ret:
|
|
76
47
|
# frame.shape is (height, width, channels) - extract actual dimensions
|
|
@@ -81,13 +52,22 @@ class VideoProcessor:
|
|
|
81
52
|
self.width = int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH))
|
|
82
53
|
self.height = int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
|
|
83
54
|
|
|
84
|
-
|
|
85
|
-
|
|
55
|
+
# Extract rotation metadata from video (iPhones store rotation in
|
|
56
|
+
# side_data_list). OpenCV ignores rotation metadata, so we need to
|
|
57
|
+
# extract and apply it manually
|
|
58
|
+
self.rotation = 0 # Will be set by _extract_video_metadata()
|
|
59
|
+
|
|
60
|
+
# Extract codec information from video metadata
|
|
61
|
+
self.codec: str | None = None # Will be set by _extract_video_metadata()
|
|
86
62
|
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
63
|
+
# Calculate display dimensions considering SAR (Sample Aspect Ratio)
|
|
64
|
+
# Mobile videos often have non-square pixels encoded in SAR metadata
|
|
65
|
+
# OpenCV doesn't directly expose SAR, but we need to handle display correctly
|
|
66
|
+
self.display_width = self.width
|
|
67
|
+
self.display_height = self.height
|
|
68
|
+
self._extract_video_metadata()
|
|
69
|
+
|
|
70
|
+
# Apply rotation to dimensions if needed
|
|
91
71
|
if self.rotation in [90, -90, 270]:
|
|
92
72
|
# Swap dimensions for 90/-90 degree rotations
|
|
93
73
|
self.width, self.height = self.height, self.width
|
|
@@ -236,9 +216,15 @@ class VideoProcessor:
|
|
|
236
216
|
|
|
237
217
|
# Apply rotation if video has rotation metadata
|
|
238
218
|
with self.timer.measure("frame_rotation"):
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
frame = cv2.rotate(frame,
|
|
219
|
+
if self.rotation == -90 or self.rotation == 270:
|
|
220
|
+
# -90 degrees = rotate 90 degrees clockwise
|
|
221
|
+
frame = cv2.rotate(frame, cv2.ROTATE_90_CLOCKWISE)
|
|
222
|
+
elif self.rotation == 90 or self.rotation == -270:
|
|
223
|
+
# 90 degrees = rotate 90 degrees counter-clockwise
|
|
224
|
+
frame = cv2.rotate(frame, cv2.ROTATE_90_COUNTERCLOCKWISE)
|
|
225
|
+
elif self.rotation == 180 or self.rotation == -180:
|
|
226
|
+
# 180 degrees rotation
|
|
227
|
+
frame = cv2.rotate(frame, cv2.ROTATE_180)
|
|
242
228
|
|
|
243
229
|
self._frame_index += 1
|
|
244
230
|
return frame
|
|
@@ -3,10 +3,13 @@
|
|
|
3
3
|
from ..core.smoothing import interpolate_threshold_crossing
|
|
4
4
|
from .analysis import (
|
|
5
5
|
ContactState,
|
|
6
|
+
calculate_adaptive_threshold,
|
|
6
7
|
compute_average_foot_position,
|
|
7
8
|
detect_ground_contact,
|
|
9
|
+
find_interpolated_phase_transitions_with_curvature,
|
|
10
|
+
refine_transition_with_curvature,
|
|
8
11
|
)
|
|
9
|
-
from .debug_overlay import
|
|
12
|
+
from .debug_overlay import DebugOverlayRenderer
|
|
10
13
|
from .kinematics import DropJumpMetrics, calculate_drop_jump_metrics
|
|
11
14
|
|
|
12
15
|
__all__ = [
|
|
@@ -14,10 +17,13 @@ __all__ = [
|
|
|
14
17
|
"ContactState",
|
|
15
18
|
"detect_ground_contact",
|
|
16
19
|
"compute_average_foot_position",
|
|
20
|
+
"calculate_adaptive_threshold",
|
|
17
21
|
"interpolate_threshold_crossing",
|
|
22
|
+
"refine_transition_with_curvature",
|
|
23
|
+
"find_interpolated_phase_transitions_with_curvature",
|
|
18
24
|
# Metrics
|
|
19
25
|
"DropJumpMetrics",
|
|
20
26
|
"calculate_drop_jump_metrics",
|
|
21
27
|
# Debug overlay
|
|
22
|
-
"
|
|
28
|
+
"DebugOverlayRenderer",
|
|
23
29
|
]
|