kinemotion 0.35.0__py3-none-any.whl → 0.35.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kinemotion might be problematic. Click here for more details.

kinemotion/api.py CHANGED
@@ -548,9 +548,12 @@ def process_dropjump_video(
548
548
 
549
549
  # Check if drop start was auto-detected
550
550
  drop_frame = None
551
- if drop_start_frame is None and metrics.contact_start_frame is not None:
552
- # Auto-detected
553
- drop_frame = metrics.contact_start_frame
551
+ if drop_start_frame is None and metrics.drop_start_frame is not None:
552
+ # Auto-detected drop start from box
553
+ drop_frame = metrics.drop_start_frame
554
+ elif drop_start_frame is not None:
555
+ # Manual drop start provided
556
+ drop_frame = drop_start_frame
554
557
 
555
558
  algorithm_config = AlgorithmConfig(
556
559
  detection_method="forward_search",
@@ -422,11 +422,59 @@ def find_landing_frame(
422
422
  return float(landing_search_start + landing_idx)
423
423
 
424
424
 
425
- def find_standing_end(velocities: np.ndarray, lowest_point: float) -> float | None:
426
- """Find end of standing phase before lowest point."""
425
+ def find_standing_end(
426
+ velocities: np.ndarray,
427
+ lowest_point: float,
428
+ positions: np.ndarray | None = None,
429
+ accelerations: np.ndarray | None = None,
430
+ ) -> float | None:
431
+ """
432
+ Find end of standing phase before lowest point.
433
+
434
+ Uses acceleration-based detection to identify when downward movement begins.
435
+ Acceleration captures movement initiation even when velocity is negligible,
436
+ making it ideal for detecting slow countermovement starts.
437
+
438
+ Args:
439
+ velocities: Signed velocity array (for backward compatibility)
440
+ lowest_point: Frame index of lowest point
441
+ positions: Position array (unused, kept for backward compatibility)
442
+ accelerations: Acceleration array (if provided, uses
443
+ acceleration-based detection)
444
+
445
+ Returns:
446
+ Frame index where standing ends (countermovement begins), or None
447
+ """
427
448
  if lowest_point <= 20:
428
449
  return None
429
450
 
451
+ # Acceleration-based detection (best for detecting movement initiation)
452
+ if accelerations is not None:
453
+ # Use middle section of standing phase as baseline (avoids initial settling)
454
+ baseline_start = 10
455
+ baseline_end = min(40, int(lowest_point) - 10)
456
+
457
+ if baseline_end <= baseline_start:
458
+ return None
459
+
460
+ # Calculate baseline acceleration statistics
461
+ baseline_accel = accelerations[baseline_start:baseline_end]
462
+ baseline_mean = float(np.mean(baseline_accel))
463
+ baseline_std = float(np.std(baseline_accel))
464
+
465
+ # Threshold: 3 standard deviations above baseline
466
+ # This detects when acceleration significantly increases (movement starts)
467
+ accel_threshold = baseline_mean + 3.0 * baseline_std
468
+
469
+ # Search forward from baseline for acceleration spike
470
+ for i in range(baseline_end, int(lowest_point)):
471
+ if accelerations[i] > accel_threshold:
472
+ # Found start of downward acceleration
473
+ return float(i)
474
+
475
+ return None
476
+
477
+ # Fallback: velocity-based detection (legacy)
430
478
  standing_search = velocities[: int(lowest_point)]
431
479
  low_vel = np.abs(standing_search) < 0.005
432
480
  if np.any(low_vel):
@@ -479,6 +527,6 @@ def detect_cmj_phases(
479
527
  takeoff_frame = find_takeoff_frame(velocities, peak_height_frame, fps)
480
528
  lowest_point = find_lowest_frame(velocities, positions, takeoff_frame, fps)
481
529
  landing_frame = find_landing_frame(accelerations, peak_height_frame, fps)
482
- standing_end = find_standing_end(velocities, lowest_point)
530
+ standing_end = find_standing_end(velocities, lowest_point, positions, accelerations)
483
531
 
484
532
  return (standing_end, lowest_point, takeoff_frame, landing_frame)
@@ -108,10 +108,12 @@ def auto_tune_parameters(
108
108
  # =================================================================
109
109
 
110
110
  # Velocity threshold: Scale inversely with fps
111
- # At 30fps, feet move ~2% of frame per frame when "stationary"
112
- # At 60fps, feet move ~1% of frame per frame when "stationary"
113
- # Formula: threshold = 0.02 * (30 / fps)
114
- base_velocity_threshold = 0.02 * (30.0 / fps)
111
+ # Empirically validated with 45° oblique videos at 60fps:
112
+ # - Standing (stationary): ~0.001 mean, 0.0011 max
113
+ # - Flight/drop (moving): ~0.005-0.009
114
+ # Target threshold: 0.002 at 60fps for clear separation
115
+ # Formula: threshold = 0.004 * (30 / fps)
116
+ base_velocity_threshold = 0.004 * (30.0 / fps)
115
117
 
116
118
  # Min contact frames: Scale with fps to maintain same time duration
117
119
  # Goal: ~100ms minimum contact (3 frames @ 30fps, 6 frames @ 60fps)
@@ -57,6 +57,7 @@ class DropJumpMetrics:
57
57
  self.jump_height: float | None = None
58
58
  self.jump_height_kinematic: float | None = None # From flight time
59
59
  self.jump_height_trajectory: float | None = None # From position tracking
60
+ self.drop_start_frame: int | None = None # Frame when athlete leaves box
60
61
  self.contact_start_frame: int | None = None
61
62
  self.contact_end_frame: int | None = None
62
63
  self.flight_start_frame: int | None = None
@@ -164,7 +165,7 @@ def _determine_drop_start_frame(
164
165
  foot_y_positions,
165
166
  fps,
166
167
  min_stationary_duration=0.5,
167
- position_change_threshold=0.005,
168
+ position_change_threshold=0.01, # Improved from 0.005 for better accuracy
168
169
  smoothing_window=smoothing_window,
169
170
  )
170
171
  return drop_start_frame
@@ -412,6 +413,11 @@ def calculate_drop_jump_metrics(
412
413
  drop_start_frame, foot_y_positions, fps, smoothing_window
413
414
  )
414
415
 
416
+ # Store drop start frame in metrics
417
+ metrics.drop_start_frame = (
418
+ drop_start_frame_value if drop_start_frame_value > 0 else None
419
+ )
420
+
415
421
  # Find contact phases
416
422
  phases = find_contact_phases(contact_states)
417
423
  interpolated_phases = find_interpolated_phase_transitions_with_curvature(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: kinemotion
3
- Version: 0.35.0
3
+ Version: 0.35.1
4
4
  Summary: Video-based kinematic analysis for athletic performance
5
5
  Project-URL: Homepage, https://github.com/feniix/kinemotion
6
6
  Project-URL: Repository, https://github.com/feniix/kinemotion
@@ -1,8 +1,8 @@
1
1
  kinemotion/__init__.py,sha256=sxdDOekOrIgjxm842gy-6zfq7OWmGl9ShJtXCm4JI7c,723
2
- kinemotion/api.py,sha256=Rdz5XjFsIMK-9rByzOYiDTPz27vINB01fmdeDzPoxZY,39339
2
+ kinemotion/api.py,sha256=oZB4Xk8KLqwOEYJCZnmEKPn_mAyQacw4s4QAhwTPH8I,39479
3
3
  kinemotion/cli.py,sha256=cqYV_7URH0JUDy1VQ_EDLv63FmNO4Ns20m6s1XAjiP4,464
4
4
  kinemotion/cmj/__init__.py,sha256=Ynv0-Oco4I3Y1Ubj25m3h9h2XFqeNwpAewXmAYOmwfU,127
5
- kinemotion/cmj/analysis.py,sha256=DmZ7vptPd5PAkaWW-oSablA6DWrCj8u2qPxlKQm9cVU,17089
5
+ kinemotion/cmj/analysis.py,sha256=OfNTMLPwZIRYbX-Yd8jgZ-7pqnHRz7L2bWAHVYFsQ60,18955
6
6
  kinemotion/cmj/cli.py,sha256=Mj2h9It1jVjAauvtCxfLWTRijj7zbYhxZuebhw2Zz6w,10828
7
7
  kinemotion/cmj/debug_overlay.py,sha256=fXmWoHhqMLGo4vTtB6Ezs3yLUDOLw63zLIgU2gFlJQU,15892
8
8
  kinemotion/cmj/joint_angles.py,sha256=HmheIEiKcQz39cRezk4h-htorOhGNPsqKIR9RsAEKts,9960
@@ -10,7 +10,7 @@ kinemotion/cmj/kinematics.py,sha256=qRBe87NkX-7HQTQ8RoF-EpvfcffgP5vycJJRrxpHboc,
10
10
  kinemotion/cmj/metrics_validator.py,sha256=p_hgg0Q0UAiWGNXKSW4E9kPvyEgPNAujaxrk2XUmsBY,28619
11
11
  kinemotion/cmj/validation_bounds.py,sha256=yRhmpUzGJs0QYyL1o3mOkgUSTe4XTO2MONMITTjCv3c,11778
12
12
  kinemotion/core/__init__.py,sha256=HsqolRa60cW3vrG8F9Lvr9WvWcs5hCmsTzSgo7imi-4,1278
13
- kinemotion/core/auto_tuning.py,sha256=j6cul_qC6k0XyryCG93C1AWH2MKPj3UBMzuX02xaqfI,11235
13
+ kinemotion/core/auto_tuning.py,sha256=wtCUMOhBChVJNXfEeku3GCMW4qED6MF-O_mv2sPTiVQ,11324
14
14
  kinemotion/core/cli_utils.py,sha256=zbnifPhD-OYofJioeYfJtshuWcl8OAEWtqCGVF4ctAI,7966
15
15
  kinemotion/core/debug_overlay_utils.py,sha256=TyUb5okv5qw8oeaX3jsUO_kpwf1NnaHEAOTm-8LwTno,4587
16
16
  kinemotion/core/experimental.py,sha256=IK05AF4aZS15ke85hF3TWCqRIXU1AlD_XKzFz735Ua8,3640
@@ -26,12 +26,12 @@ kinemotion/dropjump/__init__.py,sha256=tC3H3BrCg8Oj-db-Vrtx4PH_llR1Ppkd5jwaOjhQc
26
26
  kinemotion/dropjump/analysis.py,sha256=B_N_51WoChyQ8I7yaeKeqj3vw7NufgV_3QL-FBZEtW4,28752
27
27
  kinemotion/dropjump/cli.py,sha256=n_Wfv3AC6YIgRPYhO3F2nTSai0NR7fh95nAoWjryQeY,16250
28
28
  kinemotion/dropjump/debug_overlay.py,sha256=LkPw6ucb7beoYWS4L-Lvjs1KLCm5wAWDAfiznUeV2IQ,5668
29
- kinemotion/dropjump/kinematics.py,sha256=IH6nCOwTuocQNX1VPS_am9vPpMRUUla0a0MjDhEiXnA,17129
29
+ kinemotion/dropjump/kinematics.py,sha256=yB4ws4VG59SUGcw1J-uXfDFfCMXBdzRh5C4jo0osXbs,17404
30
30
  kinemotion/dropjump/metrics_validator.py,sha256=sx4RodHpeiW8_PRB0GUJvkUWto1Ard1Dvrc9z8eKk7M,9351
31
31
  kinemotion/dropjump/validation_bounds.py,sha256=5b4I3CKPybuvrbn-nP5yCcGF_sH4Vtyw3a5AWWvWnBk,4645
32
32
  kinemotion/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
- kinemotion-0.35.0.dist-info/METADATA,sha256=vMAyXr_N5nJrQ4WvcnmQClVOw_EyvliO3PN09HMIsMw,26020
34
- kinemotion-0.35.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
35
- kinemotion-0.35.0.dist-info/entry_points.txt,sha256=zaqnAnjLvcdrk1Qvj5nvXZCZ2gp0prS7it1zTJygcIY,50
36
- kinemotion-0.35.0.dist-info/licenses/LICENSE,sha256=KZajvqsHw0NoOHOi2q0FZ4NBe9HdV6oey-IPYAtHXfg,1088
37
- kinemotion-0.35.0.dist-info/RECORD,,
33
+ kinemotion-0.35.1.dist-info/METADATA,sha256=n93tZOI22N9m3SF1OHG2HImIGP7_fru3jN22Jr8xKrw,26020
34
+ kinemotion-0.35.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
35
+ kinemotion-0.35.1.dist-info/entry_points.txt,sha256=zaqnAnjLvcdrk1Qvj5nvXZCZ2gp0prS7it1zTJygcIY,50
36
+ kinemotion-0.35.1.dist-info/licenses/LICENSE,sha256=KZajvqsHw0NoOHOi2q0FZ4NBe9HdV6oey-IPYAtHXfg,1088
37
+ kinemotion-0.35.1.dist-info/RECORD,,