kinemotion 0.10.12__py3-none-any.whl → 0.11.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kinemotion might be problematic. Click here for more details.

@@ -0,0 +1,532 @@
1
+ """Phase detection logic for Counter Movement Jump (CMJ) analysis."""
2
+
3
+ from enum import Enum
4
+
5
+ import numpy as np
6
+ from scipy.signal import savgol_filter
7
+
8
+ from ..core.smoothing import compute_acceleration_from_derivative
9
+
10
+
11
+ def compute_signed_velocity(
12
+ positions: np.ndarray, window_length: int = 5, polyorder: int = 2
13
+ ) -> np.ndarray:
14
+ """
15
+ Compute SIGNED velocity for CMJ phase detection.
16
+
17
+ Unlike drop jump which uses absolute velocity, CMJ needs signed velocity to
18
+ distinguish upward (negative) from downward (positive) motion.
19
+
20
+ Args:
21
+ positions: 1D array of y-positions in normalized coordinates
22
+ window_length: Window size for Savitzky-Golay filter
23
+ polyorder: Polynomial order
24
+
25
+ Returns:
26
+ Signed velocity array where:
27
+ - Negative = upward motion (y decreasing, jumping up)
28
+ - Positive = downward motion (y increasing, squatting/falling)
29
+ """
30
+ if len(positions) < window_length:
31
+ return np.diff(positions, prepend=positions[0])
32
+
33
+ if window_length % 2 == 0:
34
+ window_length += 1
35
+
36
+ velocity = savgol_filter(
37
+ positions, window_length, polyorder, deriv=1, delta=1.0, mode="interp"
38
+ )
39
+
40
+ return velocity
41
+
42
+
43
+ class CMJPhase(Enum):
44
+ """Phases of a counter movement jump."""
45
+
46
+ STANDING = "standing"
47
+ ECCENTRIC = "eccentric" # Downward movement
48
+ TRANSITION = "transition" # At lowest point
49
+ CONCENTRIC = "concentric" # Upward movement
50
+ FLIGHT = "flight"
51
+ LANDING = "landing"
52
+ UNKNOWN = "unknown"
53
+
54
+
55
+ def find_standing_phase(
56
+ positions: np.ndarray,
57
+ velocities: np.ndarray,
58
+ fps: float,
59
+ min_standing_duration: float = 0.5,
60
+ velocity_threshold: float = 0.01,
61
+ ) -> int | None:
62
+ """
63
+ Find the end of standing phase (start of countermovement).
64
+
65
+ Looks for a period of low velocity (standing) followed by consistent downward motion.
66
+
67
+ Args:
68
+ positions: Array of vertical positions (normalized 0-1)
69
+ velocities: Array of vertical velocities
70
+ fps: Video frame rate
71
+ min_standing_duration: Minimum standing duration in seconds (default: 0.5s)
72
+ velocity_threshold: Velocity threshold for standing detection
73
+
74
+ Returns:
75
+ Frame index where countermovement begins, or None if not detected.
76
+ """
77
+ min_standing_frames = int(fps * min_standing_duration)
78
+
79
+ if len(positions) < min_standing_frames:
80
+ return None
81
+
82
+ # Find periods of low velocity (standing)
83
+ is_standing = np.abs(velocities) < velocity_threshold
84
+
85
+ # Look for first sustained standing period
86
+ standing_count = 0
87
+ standing_end = None
88
+
89
+ for i in range(len(is_standing)):
90
+ if is_standing[i]:
91
+ standing_count += 1
92
+ if standing_count >= min_standing_frames:
93
+ standing_end = i
94
+ else:
95
+ if standing_end is not None:
96
+ # Found end of standing phase
97
+ return standing_end
98
+ standing_count = 0
99
+
100
+ return None
101
+
102
+
103
+ def find_countermovement_start(
104
+ velocities: np.ndarray,
105
+ countermovement_threshold: float = 0.015,
106
+ min_eccentric_frames: int = 3,
107
+ standing_start: int | None = None,
108
+ ) -> int | None:
109
+ """
110
+ Find the start of countermovement (eccentric phase).
111
+
112
+ Detects when velocity becomes consistently positive (downward motion in normalized coords).
113
+
114
+ Args:
115
+ velocities: Array of SIGNED vertical velocities
116
+ countermovement_threshold: Velocity threshold for detecting downward motion (POSITIVE)
117
+ min_eccentric_frames: Minimum consecutive frames of downward motion
118
+ standing_start: Optional frame where standing phase ended
119
+
120
+ Returns:
121
+ Frame index where countermovement begins, or None if not detected.
122
+ """
123
+ start_frame = standing_start if standing_start is not None else 0
124
+
125
+ # Look for sustained downward velocity (POSITIVE in normalized coords)
126
+ is_downward = velocities[start_frame:] > countermovement_threshold
127
+ consecutive_count = 0
128
+
129
+ for i in range(len(is_downward)):
130
+ if is_downward[i]:
131
+ consecutive_count += 1
132
+ if consecutive_count >= min_eccentric_frames:
133
+ # Found start of eccentric phase
134
+ return start_frame + i - consecutive_count + 1
135
+ else:
136
+ consecutive_count = 0
137
+
138
+ return None
139
+
140
+
141
+ def find_lowest_point(
142
+ positions: np.ndarray,
143
+ velocities: np.ndarray,
144
+ min_search_frame: int = 80,
145
+ ) -> int:
146
+ """
147
+ Find the lowest point of countermovement (transition from eccentric to concentric).
148
+
149
+ The lowest point occurs BEFORE the peak height (the jump apex). It's where
150
+ velocity crosses from positive (downward/squatting) to negative (upward/jumping).
151
+
152
+ Args:
153
+ positions: Array of vertical positions (higher value = lower in video)
154
+ velocities: Array of SIGNED vertical velocities (positive=down, negative=up)
155
+ min_search_frame: Minimum frame to start searching (default: frame 80)
156
+
157
+ Returns:
158
+ Frame index of lowest point.
159
+ """
160
+ # First, find the peak height (minimum y value = highest jump point)
161
+ peak_height_frame = int(np.argmin(positions))
162
+
163
+ # Lowest point MUST be before peak height
164
+ # Search from min_search_frame to peak_height_frame
165
+ start_frame = min_search_frame
166
+ end_frame = peak_height_frame
167
+
168
+ if end_frame <= start_frame:
169
+ start_frame = int(len(positions) * 0.3)
170
+ end_frame = int(len(positions) * 0.7)
171
+
172
+ search_positions = positions[start_frame:end_frame]
173
+
174
+ if len(search_positions) == 0:
175
+ return start_frame
176
+
177
+ # Find maximum position value in this range (lowest point in video)
178
+ lowest_idx = int(np.argmax(search_positions))
179
+ lowest_frame = start_frame + lowest_idx
180
+
181
+ return lowest_frame
182
+
183
+
184
+ def refine_transition_with_curvature(
185
+ positions: np.ndarray,
186
+ velocities: np.ndarray,
187
+ initial_frame: int,
188
+ transition_type: str,
189
+ search_radius: int = 3,
190
+ window_length: int = 5,
191
+ polyorder: int = 2,
192
+ ) -> float:
193
+ """
194
+ Refine transition frame using trajectory curvature (acceleration patterns).
195
+
196
+ Uses acceleration (second derivative) to identify characteristic patterns:
197
+ - Landing: Large acceleration spike (impact deceleration)
198
+ - Takeoff: Acceleration change (transition from static to flight)
199
+
200
+ Args:
201
+ positions: Array of vertical positions
202
+ velocities: Array of vertical velocities
203
+ initial_frame: Initial estimate of transition frame
204
+ transition_type: Type of transition ("takeoff" or "landing")
205
+ search_radius: Frames to search around initial estimate (±radius)
206
+ window_length: Window size for acceleration calculation
207
+ polyorder: Polynomial order for Savitzky-Golay filter
208
+
209
+ Returns:
210
+ Refined fractional frame index.
211
+ """
212
+ # Compute acceleration using second derivative
213
+ acceleration = compute_acceleration_from_derivative(
214
+ positions, window_length=window_length, polyorder=polyorder
215
+ )
216
+
217
+ # Define search window
218
+ search_start = max(0, initial_frame - search_radius)
219
+ search_end = min(len(positions), initial_frame + search_radius + 1)
220
+
221
+ if search_start >= search_end:
222
+ return float(initial_frame)
223
+
224
+ search_accel = acceleration[search_start:search_end]
225
+
226
+ if transition_type == "landing":
227
+ # Landing: Find maximum absolute acceleration (impact)
228
+ peak_idx = int(np.argmax(np.abs(search_accel)))
229
+ elif transition_type == "takeoff":
230
+ # Takeoff: Find maximum acceleration change
231
+ accel_change = np.abs(np.diff(search_accel))
232
+ if len(accel_change) > 0:
233
+ peak_idx = int(np.argmax(accel_change))
234
+ else:
235
+ peak_idx = 0
236
+ else:
237
+ return float(initial_frame)
238
+
239
+ curvature_frame = search_start + peak_idx
240
+
241
+ # Blend curvature-based estimate with velocity-based estimate
242
+ # 70% curvature, 30% velocity
243
+ blended_frame = 0.7 * curvature_frame + 0.3 * initial_frame
244
+
245
+ return float(blended_frame)
246
+
247
+
248
+ def interpolate_threshold_crossing(
249
+ vel_before: float,
250
+ vel_after: float,
251
+ velocity_threshold: float,
252
+ ) -> float:
253
+ """
254
+ Find fractional offset where velocity crosses threshold between two frames.
255
+
256
+ Uses linear interpolation assuming velocity changes linearly between frames.
257
+
258
+ Args:
259
+ vel_before: Velocity at frame boundary N (absolute value)
260
+ vel_after: Velocity at frame boundary N+1 (absolute value)
261
+ velocity_threshold: Threshold value
262
+
263
+ Returns:
264
+ Fractional offset from frame N (0.0 to 1.0)
265
+ """
266
+ # Handle edge cases
267
+ if abs(vel_after - vel_before) < 1e-9: # Velocity not changing
268
+ return 0.5
269
+
270
+ # Linear interpolation
271
+ t = (velocity_threshold - vel_before) / (vel_after - vel_before)
272
+
273
+ # Clamp to [0, 1] range
274
+ return float(max(0.0, min(1.0, t)))
275
+
276
+
277
+ def find_cmj_takeoff_from_velocity_peak(
278
+ positions: np.ndarray,
279
+ velocities: np.ndarray,
280
+ lowest_point_frame: int,
281
+ fps: float,
282
+ window_length: int = 5,
283
+ polyorder: int = 2,
284
+ ) -> float:
285
+ """
286
+ Find CMJ takeoff frame as peak upward velocity during concentric phase.
287
+
288
+ Takeoff occurs at maximum push-off velocity (most negative velocity),
289
+ just as feet leave the ground. This is BEFORE peak height is reached.
290
+
291
+ Args:
292
+ positions: Array of vertical positions
293
+ velocities: Array of SIGNED vertical velocities (negative = upward)
294
+ lowest_point_frame: Frame at lowest point
295
+ fps: Video frame rate
296
+ window_length: Window size for derivative calculations
297
+ polyorder: Polynomial order for Savitzky-Golay filter
298
+
299
+ Returns:
300
+ Takeoff frame with fractional precision.
301
+ """
302
+ concentric_start = int(lowest_point_frame)
303
+ search_duration = int(
304
+ fps * 0.3
305
+ ) # Search next 0.3 seconds (concentric to takeoff is brief)
306
+ search_end = min(len(velocities), concentric_start + search_duration)
307
+
308
+ if search_end <= concentric_start:
309
+ return float(concentric_start + 1)
310
+
311
+ # Find peak upward velocity (most NEGATIVE velocity)
312
+ # In normalized coords: negative velocity = y decreasing = jumping up
313
+ concentric_velocities = velocities[concentric_start:search_end]
314
+ takeoff_idx = int(
315
+ np.argmin(concentric_velocities)
316
+ ) # Most negative = fastest upward = takeoff
317
+ takeoff_frame = concentric_start + takeoff_idx
318
+
319
+ return float(takeoff_frame)
320
+
321
+
322
+ def find_cmj_landing_from_position_peak(
323
+ positions: np.ndarray,
324
+ velocities: np.ndarray,
325
+ accelerations: np.ndarray,
326
+ takeoff_frame: int,
327
+ fps: float,
328
+ ) -> float:
329
+ """
330
+ Find CMJ landing frame by detecting impact after peak height.
331
+
332
+ Landing occurs when feet contact ground after peak height, detected by
333
+ finding where velocity transitions from negative (still going up/at peak)
334
+ to positive (falling) and position stabilizes.
335
+
336
+ Args:
337
+ positions: Array of vertical positions
338
+ velocities: Array of SIGNED vertical velocities (negative = up, positive = down)
339
+ accelerations: Array of accelerations (second derivative)
340
+ takeoff_frame: Frame at takeoff
341
+ fps: Video frame rate
342
+
343
+ Returns:
344
+ Landing frame with fractional precision.
345
+ """
346
+ # Find peak height (minimum position value in normalized coords)
347
+ search_start = int(takeoff_frame)
348
+ search_duration = int(fps * 0.7) # Search next 0.7 seconds for peak
349
+ search_end = min(len(positions), search_start + search_duration)
350
+
351
+ if search_end <= search_start:
352
+ return float(search_start + int(fps * 0.3))
353
+
354
+ # Find peak height (minimum y value = highest point in frame)
355
+ flight_positions = positions[search_start:search_end]
356
+ peak_idx = int(np.argmin(flight_positions))
357
+ peak_frame = search_start + peak_idx
358
+
359
+ # After peak, look for landing (impact with ground)
360
+ # Landing is detected by maximum positive acceleration (deceleration on impact)
361
+ landing_search_start = peak_frame + 2
362
+ landing_search_end = min(len(accelerations), landing_search_start + int(fps * 0.5))
363
+
364
+ if landing_search_end <= landing_search_start:
365
+ return float(peak_frame + int(fps * 0.2))
366
+
367
+ # Find impact: maximum positive acceleration after peak
368
+ # Positive acceleration = slowing down upward motion or impact deceleration
369
+ landing_accelerations = accelerations[landing_search_start:landing_search_end]
370
+ impact_idx = int(np.argmax(landing_accelerations)) # Max positive = impact
371
+ landing_frame = landing_search_start + impact_idx
372
+
373
+ return float(landing_frame)
374
+
375
+
376
+ def find_interpolated_takeoff_landing(
377
+ positions: np.ndarray,
378
+ velocities: np.ndarray,
379
+ lowest_point_frame: int,
380
+ window_length: int = 5,
381
+ polyorder: int = 2,
382
+ ) -> tuple[float, float] | None:
383
+ """
384
+ Find takeoff and landing frames for CMJ using physics-based detection.
385
+
386
+ CMJ-specific: Takeoff is detected as peak velocity (end of push-off),
387
+ not as high velocity threshold (which detects mid-flight).
388
+
389
+ Args:
390
+ positions: Array of vertical positions
391
+ velocities: Array of vertical velocities
392
+ lowest_point_frame: Frame at lowest point
393
+ window_length: Window size for derivative calculations
394
+ polyorder: Polynomial order for Savitzky-Golay filter
395
+
396
+ Returns:
397
+ Tuple of (takeoff_frame, landing_frame) with fractional precision, or None.
398
+ """
399
+ # Get FPS from velocity array length and assumed duration
400
+ # This is approximate but sufficient for search windows
401
+ fps = 30.0 # Default assumption
402
+
403
+ # Compute accelerations for landing detection
404
+ accelerations = compute_acceleration_from_derivative(
405
+ positions, window_length=window_length, polyorder=polyorder
406
+ )
407
+
408
+ # Find takeoff using peak velocity method (CMJ-specific)
409
+ takeoff_frame = find_cmj_takeoff_from_velocity_peak(
410
+ positions, velocities, lowest_point_frame, fps, window_length, polyorder
411
+ )
412
+
413
+ # Find landing using position peak and impact detection
414
+ landing_frame = find_cmj_landing_from_position_peak(
415
+ positions, velocities, accelerations, int(takeoff_frame), fps
416
+ )
417
+
418
+ return (takeoff_frame, landing_frame)
419
+
420
+
421
+ def _find_takeoff_frame(
422
+ velocities: np.ndarray, peak_height_frame: int, fps: float
423
+ ) -> float:
424
+ """Find takeoff frame as peak upward velocity before peak height."""
425
+ takeoff_search_start = max(0, peak_height_frame - int(fps * 0.35))
426
+ takeoff_search_end = peak_height_frame - 2
427
+
428
+ takeoff_velocities = velocities[takeoff_search_start:takeoff_search_end]
429
+
430
+ if len(takeoff_velocities) > 0:
431
+ peak_vel_idx = int(np.argmin(takeoff_velocities))
432
+ return float(takeoff_search_start + peak_vel_idx)
433
+ else:
434
+ return float(peak_height_frame - int(fps * 0.3))
435
+
436
+
437
+ def _find_lowest_frame(
438
+ velocities: np.ndarray, positions: np.ndarray, takeoff_frame: float, fps: float
439
+ ) -> float:
440
+ """Find lowest point frame before takeoff."""
441
+ lowest_search_start = max(0, int(takeoff_frame) - int(fps * 0.4))
442
+ lowest_search_end = int(takeoff_frame)
443
+
444
+ # Find where velocity crosses from positive to negative
445
+ for i in range(lowest_search_end - 1, lowest_search_start, -1):
446
+ if i > 0 and velocities[i] < 0 and velocities[i - 1] >= 0:
447
+ return float(i)
448
+
449
+ # Fallback: use maximum position
450
+ lowest_positions = positions[lowest_search_start:lowest_search_end]
451
+ if len(lowest_positions) > 0:
452
+ lowest_idx = int(np.argmax(lowest_positions))
453
+ return float(lowest_search_start + lowest_idx)
454
+ else:
455
+ return float(int(takeoff_frame) - int(fps * 0.2))
456
+
457
+
458
+ def _find_landing_frame(
459
+ accelerations: np.ndarray, peak_height_frame: int, fps: float
460
+ ) -> float:
461
+ """Find landing frame after peak height."""
462
+ landing_search_start = peak_height_frame
463
+ landing_search_end = min(len(accelerations), peak_height_frame + int(fps * 0.5))
464
+ landing_accelerations = accelerations[landing_search_start:landing_search_end]
465
+
466
+ if len(landing_accelerations) > 0:
467
+ landing_idx = int(np.argmin(landing_accelerations))
468
+ return float(landing_search_start + landing_idx)
469
+ else:
470
+ return float(peak_height_frame + int(fps * 0.3))
471
+
472
+
473
+ def _find_standing_end(velocities: np.ndarray, lowest_point: float) -> float | None:
474
+ """Find end of standing phase before lowest point."""
475
+ if lowest_point <= 20:
476
+ return None
477
+
478
+ standing_search = velocities[: int(lowest_point)]
479
+ low_vel = np.abs(standing_search) < 0.005
480
+ if np.any(low_vel):
481
+ standing_frames = np.nonzero(low_vel)[0]
482
+ if len(standing_frames) > 10:
483
+ return float(standing_frames[-1])
484
+
485
+ return None
486
+
487
+
488
+ def detect_cmj_phases(
489
+ positions: np.ndarray,
490
+ fps: float,
491
+ window_length: int = 5,
492
+ polyorder: int = 2,
493
+ ) -> tuple[float | None, float, float, float] | None:
494
+ """
495
+ Detect all phases of a counter movement jump using a simplified, robust approach.
496
+
497
+ Strategy: Work BACKWARD from peak height to find all phases.
498
+ 1. Find peak height (global minimum y)
499
+ 2. Find takeoff (peak negative velocity before peak height)
500
+ 3. Find lowest point (maximum y value before takeoff)
501
+ 4. Find landing (impact after peak height)
502
+
503
+ Args:
504
+ positions: Array of vertical positions (normalized 0-1)
505
+ fps: Video frame rate
506
+ window_length: Window size for derivative calculations
507
+ polyorder: Polynomial order for Savitzky-Golay filter
508
+
509
+ Returns:
510
+ Tuple of (standing_end_frame, lowest_point_frame, takeoff_frame, landing_frame)
511
+ with fractional precision, or None if phases cannot be detected.
512
+ """
513
+ # Compute SIGNED velocities and accelerations
514
+ velocities = compute_signed_velocity(
515
+ positions, window_length=window_length, polyorder=polyorder
516
+ )
517
+ accelerations = compute_acceleration_from_derivative(
518
+ positions, window_length=window_length, polyorder=polyorder
519
+ )
520
+
521
+ # Step 1: Find peak height (global minimum y = highest point in frame)
522
+ peak_height_frame = int(np.argmin(positions))
523
+ if peak_height_frame < 10:
524
+ return None # Peak too early, invalid
525
+
526
+ # Step 2-4: Find all phases using helper functions
527
+ takeoff_frame = _find_takeoff_frame(velocities, peak_height_frame, fps)
528
+ lowest_point = _find_lowest_frame(velocities, positions, takeoff_frame, fps)
529
+ landing_frame = _find_landing_frame(accelerations, peak_height_frame, fps)
530
+ standing_end = _find_standing_end(velocities, lowest_point)
531
+
532
+ return (standing_end, lowest_point, takeoff_frame, landing_frame)