kinemotion 0.17.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kinemotion might be problematic. Click here for more details.

@@ -0,0 +1,790 @@
1
+ """Ground contact detection logic for drop-jump analysis."""
2
+
3
+ from enum import Enum
4
+
5
+ import numpy as np
6
+
7
+ from ..core.smoothing import (
8
+ compute_acceleration_from_derivative,
9
+ compute_velocity_from_derivative,
10
+ )
11
+
12
+
13
+ class ContactState(Enum):
14
+ """States for foot-ground contact."""
15
+
16
+ IN_AIR = "in_air"
17
+ ON_GROUND = "on_ground"
18
+ UNKNOWN = "unknown"
19
+
20
+
21
+ def calculate_adaptive_threshold(
22
+ positions: np.ndarray,
23
+ fps: float,
24
+ baseline_duration: float = 3.0,
25
+ multiplier: float = 1.5,
26
+ smoothing_window: int = 5,
27
+ polyorder: int = 2,
28
+ ) -> float:
29
+ """
30
+ Calculate adaptive velocity threshold based on baseline motion characteristics.
31
+
32
+ Analyzes the first few seconds of video (assumed to be relatively stationary,
33
+ e.g., athlete standing on box) to determine the noise floor, then sets threshold
34
+ as a multiple of this baseline noise.
35
+
36
+ This adapts to:
37
+ - Different camera distances (closer = more pixel movement)
38
+ - Different lighting conditions (affects tracking quality)
39
+ - Different frame rates (higher fps = smoother motion)
40
+ - Video compression artifacts
41
+
42
+ Args:
43
+ positions: Array of vertical positions (0-1 normalized)
44
+ fps: Video frame rate
45
+ baseline_duration: Duration in seconds to analyze for baseline (default: 3.0s)
46
+ multiplier: Factor above baseline noise to set threshold (default: 1.5x)
47
+ smoothing_window: Window size for velocity computation
48
+ polyorder: Polynomial order for Savitzky-Golay filter (default: 2)
49
+
50
+ Returns:
51
+ Adaptive velocity threshold value
52
+
53
+ Example:
54
+ At 30fps with 3s baseline:
55
+ - Analyzes first 90 frames
56
+ - Computes velocity for this "stationary" period
57
+ - 95th percentile velocity = 0.012 (noise level)
58
+ - Threshold = 0.012 × 1.5 = 0.018
59
+ """
60
+ if len(positions) < 2:
61
+ return 0.02 # Fallback to default
62
+
63
+ # Calculate number of frames for baseline analysis
64
+ baseline_frames = int(fps * baseline_duration)
65
+ baseline_frames = min(baseline_frames, len(positions))
66
+
67
+ if baseline_frames < smoothing_window:
68
+ return 0.02 # Not enough data, use default
69
+
70
+ # Extract baseline period (assumed relatively stationary)
71
+ baseline_positions = positions[:baseline_frames]
72
+
73
+ # Compute velocity for baseline period using derivative
74
+ baseline_velocities = compute_velocity_from_derivative(
75
+ baseline_positions, window_length=smoothing_window, polyorder=polyorder
76
+ )
77
+
78
+ # Calculate noise floor as 95th percentile of baseline velocities
79
+ # Using 95th percentile instead of max to be robust against outliers
80
+ noise_floor = float(np.percentile(np.abs(baseline_velocities), 95))
81
+
82
+ # Set threshold as multiplier of noise floor
83
+ # Minimum threshold to avoid being too sensitive
84
+ adaptive_threshold = max(noise_floor * multiplier, 0.005)
85
+
86
+ # Maximum threshold to ensure we still detect contact
87
+ adaptive_threshold = min(adaptive_threshold, 0.05)
88
+
89
+ return adaptive_threshold
90
+
91
+
92
+ def _find_stable_baseline(
93
+ positions: np.ndarray,
94
+ min_stable_frames: int,
95
+ stability_threshold: float = 0.01,
96
+ debug: bool = False,
97
+ ) -> tuple[int, float]:
98
+ """Find first stable period and return baseline position.
99
+
100
+ Returns:
101
+ Tuple of (baseline_start_frame, baseline_position). Returns (-1, 0.0) if not found.
102
+ """
103
+ stable_window = min_stable_frames
104
+
105
+ for start_idx in range(0, len(positions) - stable_window, 5):
106
+ window = positions[start_idx : start_idx + stable_window]
107
+ window_std = float(np.std(window))
108
+
109
+ if window_std < stability_threshold:
110
+ baseline_start = start_idx
111
+ baseline_position = float(np.median(window))
112
+
113
+ if debug:
114
+ end_frame = baseline_start + stable_window - 1
115
+ print("[detect_drop_start] Found stable period:")
116
+ print(f" frames {baseline_start}-{end_frame}")
117
+ print(f" baseline_position: {baseline_position:.4f}")
118
+ print(f" baseline_std: {window_std:.4f} < {stability_threshold:.4f}")
119
+
120
+ return baseline_start, baseline_position
121
+
122
+ if debug:
123
+ print(
124
+ f"[detect_drop_start] No stable period found "
125
+ f"(variance always > {stability_threshold:.4f})"
126
+ )
127
+ return -1, 0.0
128
+
129
+
130
+ def _find_drop_from_baseline(
131
+ positions: np.ndarray,
132
+ baseline_start: int,
133
+ baseline_position: float,
134
+ stable_window: int,
135
+ position_change_threshold: float,
136
+ smoothing_window: int,
137
+ debug: bool = False,
138
+ ) -> int:
139
+ """Find drop start after stable baseline period.
140
+
141
+ Returns:
142
+ Drop frame index, or 0 if not found.
143
+ """
144
+ search_start = baseline_start + stable_window
145
+ window_size = max(3, smoothing_window)
146
+
147
+ for i in range(search_start, len(positions) - window_size):
148
+ window_positions = positions[i : i + window_size]
149
+ avg_position = float(np.mean(window_positions))
150
+ position_change = avg_position - baseline_position
151
+
152
+ if position_change > position_change_threshold:
153
+ drop_frame = max(baseline_start, i - window_size)
154
+
155
+ if debug:
156
+ print(f"[detect_drop_start] Drop detected at frame {drop_frame}")
157
+ print(
158
+ f" position_change: {position_change:.4f} > "
159
+ f"{position_change_threshold:.4f}"
160
+ )
161
+ print(
162
+ f" avg_position: {avg_position:.4f} vs "
163
+ f"baseline: {baseline_position:.4f}"
164
+ )
165
+
166
+ return drop_frame
167
+
168
+ if debug:
169
+ print("[detect_drop_start] No drop detected after stable period")
170
+ return 0
171
+
172
+
173
+ def detect_drop_start(
174
+ positions: np.ndarray,
175
+ fps: float,
176
+ min_stationary_duration: float = 1.0,
177
+ position_change_threshold: float = 0.02,
178
+ smoothing_window: int = 5,
179
+ debug: bool = False,
180
+ ) -> int:
181
+ """
182
+ Detect when the drop jump actually starts by finding stable period then detecting drop.
183
+
184
+ Strategy:
185
+ 1. Scan forward to find first STABLE period (low variance over N frames)
186
+ 2. Use that stable period as baseline
187
+ 3. Detect when position starts changing significantly from baseline
188
+
189
+ This handles videos where athlete steps onto box at start (unstable beginning).
190
+
191
+ Args:
192
+ positions: Array of vertical positions (0-1 normalized, y increases downward)
193
+ fps: Video frame rate
194
+ min_stationary_duration: Minimum duration (seconds) of stable period (default: 1.0s)
195
+ position_change_threshold: Position change indicating start of drop
196
+ (default: 0.02 = 2% of frame)
197
+ smoothing_window: Window for computing position variance
198
+ debug: Print debug information (default: False)
199
+
200
+ Returns:
201
+ Frame index where drop starts (or 0 if no clear stable period found)
202
+
203
+ Example:
204
+ - Frames 0-14: Stepping onto box (noisy, unstable)
205
+ - Frames 15-119: Standing on box (stable, low variance)
206
+ - Frame 119: Drop begins (position changes significantly)
207
+ - Returns: 119
208
+ """
209
+ min_stable_frames = int(fps * min_stationary_duration)
210
+ if len(positions) < min_stable_frames + 30:
211
+ if debug:
212
+ print(
213
+ f"[detect_drop_start] Video too short: {len(positions)} < "
214
+ f"{min_stable_frames + 30}"
215
+ )
216
+ return 0
217
+
218
+ # Find stable baseline period
219
+ baseline_start, baseline_position = _find_stable_baseline(
220
+ positions, min_stable_frames, debug=debug
221
+ )
222
+
223
+ if baseline_start < 0:
224
+ return 0
225
+
226
+ # Find drop from baseline
227
+ return _find_drop_from_baseline(
228
+ positions,
229
+ baseline_start,
230
+ baseline_position,
231
+ min_stable_frames,
232
+ position_change_threshold,
233
+ smoothing_window,
234
+ debug,
235
+ )
236
+
237
+
238
+ def detect_ground_contact(
239
+ foot_positions: np.ndarray,
240
+ velocity_threshold: float = 0.02,
241
+ min_contact_frames: int = 3,
242
+ visibility_threshold: float = 0.5,
243
+ visibilities: np.ndarray | None = None,
244
+ window_length: int = 5,
245
+ polyorder: int = 2,
246
+ ) -> list[ContactState]:
247
+ """
248
+ Detect when feet are in contact with ground based on vertical motion.
249
+
250
+ Uses derivative-based velocity calculation via Savitzky-Golay filter for smooth,
251
+ accurate velocity estimates. This is consistent with the velocity calculation used
252
+ throughout the pipeline for sub-frame interpolation and curvature analysis.
253
+
254
+ Args:
255
+ foot_positions: Array of foot y-positions (normalized, 0-1, where 1 is bottom)
256
+ velocity_threshold: Threshold for vertical velocity to consider stationary
257
+ min_contact_frames: Minimum consecutive frames to confirm contact
258
+ visibility_threshold: Minimum visibility score to trust landmark
259
+ visibilities: Array of visibility scores for each frame
260
+ window_length: Window size for velocity derivative calculation (must be odd)
261
+ polyorder: Polynomial order for Savitzky-Golay filter (default: 2)
262
+
263
+ Returns:
264
+ List of ContactState for each frame
265
+ """
266
+ n_frames = len(foot_positions)
267
+ states = [ContactState.UNKNOWN] * n_frames
268
+
269
+ if n_frames < 2:
270
+ return states
271
+
272
+ # Compute vertical velocity using derivative-based method
273
+ # This provides smoother, more accurate velocity estimates than frame-to-frame differences
274
+ # and is consistent with the velocity calculation used for sub-frame interpolation
275
+ velocities = compute_velocity_from_derivative(
276
+ foot_positions, window_length=window_length, polyorder=polyorder
277
+ )
278
+
279
+ # Detect potential contact frames based on low velocity
280
+ is_stationary = np.abs(velocities) < velocity_threshold
281
+
282
+ # Apply visibility filter
283
+ if visibilities is not None:
284
+ is_visible = visibilities > visibility_threshold
285
+ is_stationary = is_stationary & is_visible
286
+
287
+ # Apply minimum contact duration filter
288
+ contact_frames = []
289
+ current_run = []
290
+
291
+ for i, stationary in enumerate(is_stationary):
292
+ if stationary:
293
+ current_run.append(i)
294
+ else:
295
+ if len(current_run) >= min_contact_frames:
296
+ contact_frames.extend(current_run)
297
+ current_run = []
298
+
299
+ # Don't forget the last run
300
+ if len(current_run) >= min_contact_frames:
301
+ contact_frames.extend(current_run)
302
+
303
+ # Set states
304
+ for i in range(n_frames):
305
+ if visibilities is not None and visibilities[i] < visibility_threshold:
306
+ states[i] = ContactState.UNKNOWN
307
+ elif i in contact_frames:
308
+ states[i] = ContactState.ON_GROUND
309
+ else:
310
+ states[i] = ContactState.IN_AIR
311
+
312
+ return states
313
+
314
+
315
+ def find_contact_phases(
316
+ contact_states: list[ContactState],
317
+ ) -> list[tuple[int, int, ContactState]]:
318
+ """
319
+ Identify continuous phases of contact/flight.
320
+
321
+ Args:
322
+ contact_states: List of ContactState for each frame
323
+
324
+ Returns:
325
+ List of (start_frame, end_frame, state) tuples for each phase
326
+ """
327
+ phases: list[tuple[int, int, ContactState]] = []
328
+ if not contact_states:
329
+ return phases
330
+
331
+ current_state = contact_states[0]
332
+ phase_start = 0
333
+
334
+ for i in range(1, len(contact_states)):
335
+ if contact_states[i] != current_state:
336
+ # Phase transition
337
+ phases.append((phase_start, i - 1, current_state))
338
+ current_state = contact_states[i]
339
+ phase_start = i
340
+
341
+ # Don't forget the last phase
342
+ phases.append((phase_start, len(contact_states) - 1, current_state))
343
+
344
+ return phases
345
+
346
+
347
+ def interpolate_threshold_crossing(
348
+ vel_before: float,
349
+ vel_after: float,
350
+ velocity_threshold: float,
351
+ ) -> float:
352
+ """
353
+ Find fractional offset where velocity crosses threshold between two frames.
354
+
355
+ Uses linear interpolation assuming velocity changes linearly between frames.
356
+
357
+ Args:
358
+ vel_before: Velocity at frame boundary N (absolute value)
359
+ vel_after: Velocity at frame boundary N+1 (absolute value)
360
+ velocity_threshold: Threshold value
361
+
362
+ Returns:
363
+ Fractional offset from frame N (0.0 to 1.0)
364
+ """
365
+ # Handle edge cases
366
+ if abs(vel_after - vel_before) < 1e-9: # Velocity not changing
367
+ return 0.5
368
+
369
+ # Linear interpolation: at what fraction t does velocity equal threshold?
370
+ # vel(t) = vel_before + t * (vel_after - vel_before)
371
+ # Solve for t when vel(t) = threshold:
372
+ # threshold = vel_before + t * (vel_after - vel_before)
373
+ # t = (threshold - vel_before) / (vel_after - vel_before)
374
+
375
+ t = (velocity_threshold - vel_before) / (vel_after - vel_before)
376
+
377
+ # Clamp to [0, 1] range
378
+ return float(max(0.0, min(1.0, t)))
379
+
380
+
381
+ def _interpolate_phase_start(
382
+ start_idx: int,
383
+ state: ContactState,
384
+ velocities: np.ndarray,
385
+ velocity_threshold: float,
386
+ ) -> float:
387
+ """Interpolate start boundary of a phase with sub-frame precision.
388
+
389
+ Returns:
390
+ Fractional start frame, or float(start_idx) if no interpolation.
391
+ """
392
+ if start_idx <= 0 or start_idx >= len(velocities):
393
+ return float(start_idx)
394
+
395
+ vel_before = velocities[start_idx - 1]
396
+ vel_at = velocities[start_idx]
397
+
398
+ # Check threshold crossing based on state
399
+ is_landing = (
400
+ state == ContactState.ON_GROUND and vel_before > velocity_threshold > vel_at
401
+ )
402
+ is_takeoff = (
403
+ state == ContactState.IN_AIR and vel_before < velocity_threshold < vel_at
404
+ )
405
+
406
+ if is_landing or is_takeoff:
407
+ offset = interpolate_threshold_crossing(vel_before, vel_at, velocity_threshold)
408
+ return (start_idx - 1) + offset
409
+
410
+ return float(start_idx)
411
+
412
+
413
+ def _interpolate_phase_end(
414
+ end_idx: int,
415
+ state: ContactState,
416
+ velocities: np.ndarray,
417
+ velocity_threshold: float,
418
+ max_idx: int,
419
+ ) -> float:
420
+ """Interpolate end boundary of a phase with sub-frame precision.
421
+
422
+ Returns:
423
+ Fractional end frame, or float(end_idx) if no interpolation.
424
+ """
425
+ if end_idx >= max_idx - 1 or end_idx + 1 >= len(velocities):
426
+ return float(end_idx)
427
+
428
+ vel_at = velocities[end_idx]
429
+ vel_after = velocities[end_idx + 1]
430
+
431
+ # Check threshold crossing based on state
432
+ is_takeoff = (
433
+ state == ContactState.ON_GROUND and vel_at < velocity_threshold < vel_after
434
+ )
435
+ is_landing = (
436
+ state == ContactState.IN_AIR and vel_at > velocity_threshold > vel_after
437
+ )
438
+
439
+ if is_takeoff or is_landing:
440
+ offset = interpolate_threshold_crossing(vel_at, vel_after, velocity_threshold)
441
+ return end_idx + offset
442
+
443
+ return float(end_idx)
444
+
445
+
446
+ def find_interpolated_phase_transitions(
447
+ foot_positions: np.ndarray,
448
+ contact_states: list[ContactState],
449
+ velocity_threshold: float,
450
+ smoothing_window: int = 5,
451
+ ) -> list[tuple[float, float, ContactState]]:
452
+ """
453
+ Find contact phases with sub-frame interpolation for precise timing.
454
+
455
+ Uses derivative-based velocity from smoothed trajectory for interpolation.
456
+ This provides much smoother velocity estimates than frame-to-frame differences,
457
+ leading to more accurate threshold crossing detection.
458
+
459
+ Args:
460
+ foot_positions: Array of foot y-positions (normalized, 0-1)
461
+ contact_states: List of ContactState for each frame
462
+ velocity_threshold: Threshold used for contact detection
463
+ smoothing_window: Window size for velocity smoothing (must be odd)
464
+
465
+ Returns:
466
+ List of (start_frame, end_frame, state) tuples with fractional frame indices
467
+ """
468
+ phases = find_contact_phases(contact_states)
469
+ if not phases or len(foot_positions) < 2:
470
+ return []
471
+
472
+ velocities = compute_velocity_from_derivative(
473
+ foot_positions, window_length=smoothing_window, polyorder=2
474
+ )
475
+
476
+ interpolated_phases: list[tuple[float, float, ContactState]] = []
477
+
478
+ for start_idx, end_idx, state in phases:
479
+ start_frac = _interpolate_phase_start(
480
+ start_idx, state, velocities, velocity_threshold
481
+ )
482
+ end_frac = _interpolate_phase_end(
483
+ end_idx, state, velocities, velocity_threshold, len(foot_positions)
484
+ )
485
+ interpolated_phases.append((start_frac, end_frac, state))
486
+
487
+ return interpolated_phases
488
+
489
+
490
+ def refine_transition_with_curvature(
491
+ foot_positions: np.ndarray,
492
+ estimated_frame: float,
493
+ transition_type: str,
494
+ search_window: int = 3,
495
+ smoothing_window: int = 5,
496
+ polyorder: int = 2,
497
+ ) -> float:
498
+ """
499
+ Refine phase transition timing using trajectory curvature analysis.
500
+
501
+ Looks for characteristic acceleration patterns near estimated transition:
502
+ - Landing: Large acceleration spike (rapid deceleration on impact)
503
+ - Takeoff: Acceleration change (transition from static to upward motion)
504
+
505
+ Args:
506
+ foot_positions: Array of foot y-positions (normalized, 0-1)
507
+ estimated_frame: Initial estimate of transition frame (from velocity)
508
+ transition_type: Type of transition ("landing" or "takeoff")
509
+ search_window: Number of frames to search around estimate
510
+ smoothing_window: Window size for acceleration computation
511
+ polyorder: Polynomial order for Savitzky-Golay filter (default: 2)
512
+
513
+ Returns:
514
+ Refined fractional frame index
515
+ """
516
+ if len(foot_positions) < smoothing_window:
517
+ return estimated_frame
518
+
519
+ # Compute acceleration (second derivative)
520
+ acceleration = compute_acceleration_from_derivative(
521
+ foot_positions, window_length=smoothing_window, polyorder=polyorder
522
+ )
523
+
524
+ # Define search range around estimated transition
525
+ est_int = int(estimated_frame)
526
+ search_start = max(0, est_int - search_window)
527
+ search_end = min(len(acceleration), est_int + search_window + 1)
528
+
529
+ if search_end <= search_start:
530
+ return estimated_frame
531
+
532
+ # Extract acceleration in search window
533
+ accel_window = acceleration[search_start:search_end]
534
+
535
+ if len(accel_window) == 0:
536
+ return estimated_frame
537
+
538
+ if transition_type == "landing":
539
+ # Landing: Look for large magnitude acceleration (impact deceleration)
540
+ # Find frame with maximum absolute acceleration
541
+ peak_idx = np.argmax(np.abs(accel_window))
542
+ refined_frame = float(search_start + peak_idx)
543
+
544
+ elif transition_type == "takeoff":
545
+ # Takeoff: Look for acceleration magnitude change
546
+ # Find frame with large acceleration change (derivative of acceleration)
547
+ if len(accel_window) < 2:
548
+ return estimated_frame
549
+
550
+ accel_diff = np.abs(np.diff(accel_window))
551
+ peak_idx = np.argmax(accel_diff)
552
+ refined_frame = float(search_start + peak_idx)
553
+
554
+ else:
555
+ return estimated_frame
556
+
557
+ # Blend with original estimate (don't stray too far)
558
+ # 70% curvature-based, 30% velocity-based
559
+ blend_factor = 0.7
560
+ refined_frame = blend_factor * refined_frame + (1 - blend_factor) * estimated_frame
561
+
562
+ return refined_frame
563
+
564
+
565
+ def find_interpolated_phase_transitions_with_curvature(
566
+ foot_positions: np.ndarray,
567
+ contact_states: list[ContactState],
568
+ velocity_threshold: float,
569
+ smoothing_window: int = 5,
570
+ polyorder: int = 2,
571
+ use_curvature: bool = True,
572
+ ) -> list[tuple[float, float, ContactState]]:
573
+ """
574
+ Find contact phases with sub-frame interpolation and curvature refinement.
575
+
576
+ Combines three methods for maximum accuracy:
577
+ 1. Velocity thresholding (coarse integer frame detection)
578
+ 2. Velocity interpolation (sub-frame precision)
579
+ 3. Curvature analysis (refinement based on acceleration patterns)
580
+
581
+ Args:
582
+ foot_positions: Array of foot y-positions (normalized, 0-1)
583
+ contact_states: List of ContactState for each frame
584
+ velocity_threshold: Threshold used for contact detection
585
+ smoothing_window: Window size for velocity/acceleration smoothing
586
+ polyorder: Polynomial order for Savitzky-Golay filter (default: 2)
587
+ use_curvature: Whether to apply curvature-based refinement
588
+
589
+ Returns:
590
+ List of (start_frame, end_frame, state) tuples with fractional frame indices
591
+ """
592
+ # Get interpolated phases using velocity
593
+ interpolated_phases = find_interpolated_phase_transitions(
594
+ foot_positions, contact_states, velocity_threshold, smoothing_window
595
+ )
596
+
597
+ if not use_curvature or len(interpolated_phases) == 0:
598
+ return interpolated_phases
599
+
600
+ # Refine phase boundaries using curvature analysis
601
+ refined_phases: list[tuple[float, float, ContactState]] = []
602
+
603
+ for start_frac, end_frac, state in interpolated_phases:
604
+ refined_start = start_frac
605
+ refined_end = end_frac
606
+
607
+ if state == ContactState.ON_GROUND:
608
+ # Refine landing (start of ground contact)
609
+ refined_start = refine_transition_with_curvature(
610
+ foot_positions,
611
+ start_frac,
612
+ "landing",
613
+ search_window=3,
614
+ smoothing_window=smoothing_window,
615
+ polyorder=polyorder,
616
+ )
617
+ # Refine takeoff (end of ground contact)
618
+ refined_end = refine_transition_with_curvature(
619
+ foot_positions,
620
+ end_frac,
621
+ "takeoff",
622
+ search_window=3,
623
+ smoothing_window=smoothing_window,
624
+ polyorder=polyorder,
625
+ )
626
+
627
+ elif state == ContactState.IN_AIR:
628
+ # For flight phases, takeoff is at start, landing is at end
629
+ refined_start = refine_transition_with_curvature(
630
+ foot_positions,
631
+ start_frac,
632
+ "takeoff",
633
+ search_window=3,
634
+ smoothing_window=smoothing_window,
635
+ polyorder=polyorder,
636
+ )
637
+ refined_end = refine_transition_with_curvature(
638
+ foot_positions,
639
+ end_frac,
640
+ "landing",
641
+ search_window=3,
642
+ smoothing_window=smoothing_window,
643
+ polyorder=polyorder,
644
+ )
645
+
646
+ refined_phases.append((refined_start, refined_end, state))
647
+
648
+ return refined_phases
649
+
650
+
651
+ def find_landing_from_acceleration(
652
+ positions: np.ndarray,
653
+ accelerations: np.ndarray,
654
+ takeoff_frame: int,
655
+ fps: float,
656
+ search_duration: float = 0.7,
657
+ ) -> int:
658
+ """
659
+ Find landing frame by detecting impact acceleration after takeoff.
660
+
661
+ Similar to CMJ landing detection, looks for maximum positive acceleration
662
+ (deceleration on ground impact) after the jump peak.
663
+
664
+ Args:
665
+ positions: Array of vertical positions (normalized 0-1)
666
+ accelerations: Array of accelerations (second derivative)
667
+ takeoff_frame: Frame at takeoff (end of ground contact)
668
+ fps: Video frame rate
669
+ search_duration: Duration in seconds to search for landing (default: 0.7s)
670
+
671
+ Returns:
672
+ Landing frame index (integer)
673
+ """
674
+ # Find peak height (minimum y value = highest point)
675
+ search_start = takeoff_frame
676
+ search_end = min(len(positions), takeoff_frame + int(fps * search_duration))
677
+
678
+ if search_end <= search_start:
679
+ return min(len(positions) - 1, takeoff_frame + int(fps * 0.3))
680
+
681
+ flight_positions = positions[search_start:search_end]
682
+ peak_idx = int(np.argmin(flight_positions))
683
+ peak_frame = search_start + peak_idx
684
+
685
+ # After peak, look for landing (impact with ground)
686
+ # Landing is detected by maximum positive acceleration (deceleration on impact)
687
+ landing_search_start = peak_frame + 2
688
+ landing_search_end = min(len(accelerations), landing_search_start + int(fps * 0.6))
689
+
690
+ if landing_search_end <= landing_search_start:
691
+ return min(len(positions) - 1, peak_frame + int(fps * 0.2))
692
+
693
+ # Find impact: maximum positive acceleration after peak
694
+ landing_accelerations = accelerations[landing_search_start:landing_search_end]
695
+ impact_idx = int(np.argmax(landing_accelerations))
696
+ impact_frame = landing_search_start + impact_idx
697
+
698
+ # After acceleration peak, look for position stabilization (full ground contact)
699
+ # Check where vertical position stops decreasing (athlete stops compressing)
700
+ stabilization_search_start = impact_frame
701
+ stabilization_search_end = min(len(positions), impact_frame + int(fps * 0.2))
702
+
703
+ landing_frame = impact_frame
704
+ if stabilization_search_end > stabilization_search_start + 3:
705
+ # Find where position reaches maximum (lowest point) and starts stabilizing
706
+ search_positions = positions[
707
+ stabilization_search_start:stabilization_search_end
708
+ ]
709
+
710
+ # Look for the frame where position reaches its maximum (deepest landing)
711
+ max_pos_idx = int(np.argmax(search_positions))
712
+
713
+ # Landing is just after max position (athlete at deepest landing compression)
714
+ landing_frame = stabilization_search_start + max_pos_idx
715
+ landing_frame = min(len(positions) - 1, landing_frame)
716
+
717
+ return landing_frame
718
+
719
+
720
+ def compute_average_foot_position(
721
+ landmarks: dict[str, tuple[float, float, float]],
722
+ ) -> tuple[float, float]:
723
+ """
724
+ Compute average foot position from ankle and foot landmarks.
725
+
726
+ Args:
727
+ landmarks: Dictionary of landmark positions
728
+
729
+ Returns:
730
+ (x, y) average foot position in normalized coordinates
731
+ """
732
+ foot_keys = [
733
+ "left_ankle",
734
+ "right_ankle",
735
+ "left_heel",
736
+ "right_heel",
737
+ "left_foot_index",
738
+ "right_foot_index",
739
+ ]
740
+
741
+ x_positions = []
742
+ y_positions = []
743
+
744
+ for key in foot_keys:
745
+ if key in landmarks:
746
+ x, y, visibility = landmarks[key]
747
+ if visibility > 0.5: # Only use visible landmarks
748
+ x_positions.append(x)
749
+ y_positions.append(y)
750
+
751
+ if not x_positions:
752
+ return (0.5, 0.5) # Default to center if no visible feet
753
+
754
+ return (float(np.mean(x_positions)), float(np.mean(y_positions)))
755
+
756
+
757
+ def extract_foot_positions_and_visibilities(
758
+ smoothed_landmarks: list[dict[str, tuple[float, float, float]] | None],
759
+ ) -> tuple[np.ndarray, np.ndarray]:
760
+ """
761
+ Extract vertical positions and average visibilities from smoothed landmarks.
762
+
763
+ This utility function eliminates code duplication between CLI and programmatic usage.
764
+
765
+ Args:
766
+ smoothed_landmarks: Smoothed landmark sequence from tracking
767
+
768
+ Returns:
769
+ Tuple of (vertical_positions, visibilities) as numpy arrays
770
+ """
771
+ position_list: list[float] = []
772
+ visibilities_list: list[float] = []
773
+
774
+ for frame_landmarks in smoothed_landmarks:
775
+ if frame_landmarks:
776
+ _, foot_y = compute_average_foot_position(frame_landmarks)
777
+ position_list.append(foot_y)
778
+
779
+ # Average visibility of foot landmarks
780
+ foot_vis = []
781
+ for key in ["left_ankle", "right_ankle", "left_heel", "right_heel"]:
782
+ if key in frame_landmarks:
783
+ foot_vis.append(frame_landmarks[key][2])
784
+ visibilities_list.append(float(np.mean(foot_vis)) if foot_vis else 0.0)
785
+ else:
786
+ # Fill missing frames with last known position or default
787
+ position_list.append(position_list[-1] if position_list else 0.5)
788
+ visibilities_list.append(0.0)
789
+
790
+ return np.array(position_list), np.array(visibilities_list)