kinemotion 0.17.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kinemotion might be problematic. Click here for more details.

kinemotion/cli.py ADDED
@@ -0,0 +1,22 @@
1
+ """Command-line interface for kinemotion analysis."""
2
+
3
+ import click
4
+
5
+ from .cmj.cli import cmj_analyze
6
+ from .dropjump.cli import dropjump_analyze
7
+
8
+
9
+ @click.group()
10
+ @click.version_option(package_name="dropjump-analyze")
11
+ def cli() -> None:
12
+ """Kinemotion: Video-based kinematic analysis for athletic performance."""
13
+ pass
14
+
15
+
16
+ # Register commands from submodules
17
+ cli.add_command(dropjump_analyze)
18
+ cli.add_command(cmj_analyze)
19
+
20
+
21
+ if __name__ == "__main__":
22
+ cli()
@@ -0,0 +1,5 @@
1
+ """Counter Movement Jump (CMJ) analysis module."""
2
+
3
+ from kinemotion.cmj.kinematics import CMJMetrics
4
+
5
+ __all__ = ["CMJMetrics"]
@@ -0,0 +1,528 @@
1
+ """Phase detection logic for Counter Movement Jump (CMJ) analysis."""
2
+
3
+ from enum import Enum
4
+
5
+ import numpy as np
6
+ from scipy.signal import savgol_filter
7
+
8
+ from ..core.smoothing import compute_acceleration_from_derivative
9
+
10
+
11
+ def compute_signed_velocity(
12
+ positions: np.ndarray, window_length: int = 5, polyorder: int = 2
13
+ ) -> np.ndarray:
14
+ """
15
+ Compute SIGNED velocity for CMJ phase detection.
16
+
17
+ Unlike drop jump which uses absolute velocity, CMJ needs signed velocity to
18
+ distinguish upward (negative) from downward (positive) motion.
19
+
20
+ Args:
21
+ positions: 1D array of y-positions in normalized coordinates
22
+ window_length: Window size for Savitzky-Golay filter
23
+ polyorder: Polynomial order
24
+
25
+ Returns:
26
+ Signed velocity array where:
27
+ - Negative = upward motion (y decreasing, jumping up)
28
+ - Positive = downward motion (y increasing, squatting/falling)
29
+ """
30
+ if len(positions) < window_length:
31
+ return np.diff(positions, prepend=positions[0])
32
+
33
+ if window_length % 2 == 0:
34
+ window_length += 1
35
+
36
+ velocity = savgol_filter(
37
+ positions, window_length, polyorder, deriv=1, delta=1.0, mode="interp"
38
+ )
39
+
40
+ return velocity
41
+
42
+
43
+ class CMJPhase(Enum):
44
+ """Phases of a counter movement jump."""
45
+
46
+ STANDING = "standing"
47
+ ECCENTRIC = "eccentric" # Downward movement
48
+ TRANSITION = "transition" # At lowest point
49
+ CONCENTRIC = "concentric" # Upward movement
50
+ FLIGHT = "flight"
51
+ LANDING = "landing"
52
+ UNKNOWN = "unknown"
53
+
54
+
55
+ def find_standing_phase(
56
+ positions: np.ndarray,
57
+ velocities: np.ndarray,
58
+ fps: float,
59
+ min_standing_duration: float = 0.5,
60
+ velocity_threshold: float = 0.01,
61
+ ) -> int | None:
62
+ """
63
+ Find the end of standing phase (start of countermovement).
64
+
65
+ Looks for a period of low velocity (standing) followed by consistent downward motion.
66
+
67
+ Args:
68
+ positions: Array of vertical positions (normalized 0-1)
69
+ velocities: Array of vertical velocities
70
+ fps: Video frame rate
71
+ min_standing_duration: Minimum standing duration in seconds (default: 0.5s)
72
+ velocity_threshold: Velocity threshold for standing detection
73
+
74
+ Returns:
75
+ Frame index where countermovement begins, or None if not detected.
76
+ """
77
+ min_standing_frames = int(fps * min_standing_duration)
78
+
79
+ if len(positions) < min_standing_frames:
80
+ return None
81
+
82
+ # Find periods of low velocity (standing)
83
+ is_standing = np.abs(velocities) < velocity_threshold
84
+
85
+ # Look for first sustained standing period
86
+ standing_count = 0
87
+ standing_end = None
88
+
89
+ for i in range(len(is_standing)):
90
+ if is_standing[i]:
91
+ standing_count += 1
92
+ if standing_count >= min_standing_frames:
93
+ standing_end = i
94
+ else:
95
+ if standing_end is not None:
96
+ # Found end of standing phase
97
+ return standing_end
98
+ standing_count = 0
99
+
100
+ return None
101
+
102
+
103
+ def find_countermovement_start(
104
+ velocities: np.ndarray,
105
+ countermovement_threshold: float = 0.015,
106
+ min_eccentric_frames: int = 3,
107
+ standing_start: int | None = None,
108
+ ) -> int | None:
109
+ """
110
+ Find the start of countermovement (eccentric phase).
111
+
112
+ Detects when velocity becomes consistently positive (downward motion in normalized coords).
113
+
114
+ Args:
115
+ velocities: Array of SIGNED vertical velocities
116
+ countermovement_threshold: Velocity threshold for detecting downward motion (POSITIVE)
117
+ min_eccentric_frames: Minimum consecutive frames of downward motion
118
+ standing_start: Optional frame where standing phase ended
119
+
120
+ Returns:
121
+ Frame index where countermovement begins, or None if not detected.
122
+ """
123
+ start_frame = standing_start if standing_start is not None else 0
124
+
125
+ # Look for sustained downward velocity (POSITIVE in normalized coords)
126
+ is_downward = velocities[start_frame:] > countermovement_threshold
127
+ consecutive_count = 0
128
+
129
+ for i in range(len(is_downward)):
130
+ if is_downward[i]:
131
+ consecutive_count += 1
132
+ if consecutive_count >= min_eccentric_frames:
133
+ # Found start of eccentric phase
134
+ return start_frame + i - consecutive_count + 1
135
+ else:
136
+ consecutive_count = 0
137
+
138
+ return None
139
+
140
+
141
+ def find_lowest_point(
142
+ positions: np.ndarray,
143
+ velocities: np.ndarray,
144
+ min_search_frame: int = 80,
145
+ ) -> int:
146
+ """
147
+ Find the lowest point of countermovement (transition from eccentric to concentric).
148
+
149
+ The lowest point occurs BEFORE the peak height (the jump apex). It's where
150
+ velocity crosses from positive (downward/squatting) to negative (upward/jumping).
151
+
152
+ Args:
153
+ positions: Array of vertical positions (higher value = lower in video)
154
+ velocities: Array of SIGNED vertical velocities (positive=down, negative=up)
155
+ min_search_frame: Minimum frame to start searching (default: frame 80)
156
+
157
+ Returns:
158
+ Frame index of lowest point.
159
+ """
160
+ # First, find the peak height (minimum y value = highest jump point)
161
+ peak_height_frame = int(np.argmin(positions))
162
+
163
+ # Lowest point MUST be before peak height
164
+ # Search from min_search_frame to peak_height_frame
165
+ start_frame = min_search_frame
166
+ end_frame = peak_height_frame
167
+
168
+ if end_frame <= start_frame:
169
+ start_frame = int(len(positions) * 0.3)
170
+ end_frame = int(len(positions) * 0.7)
171
+
172
+ search_positions = positions[start_frame:end_frame]
173
+
174
+ if len(search_positions) == 0:
175
+ return start_frame
176
+
177
+ # Find maximum position value in this range (lowest point in video)
178
+ lowest_idx = int(np.argmax(search_positions))
179
+ lowest_frame = start_frame + lowest_idx
180
+
181
+ return lowest_frame
182
+
183
+
184
+ def refine_transition_with_curvature(
185
+ positions: np.ndarray,
186
+ velocities: np.ndarray,
187
+ initial_frame: int,
188
+ transition_type: str,
189
+ search_radius: int = 3,
190
+ window_length: int = 5,
191
+ polyorder: int = 2,
192
+ ) -> float:
193
+ """
194
+ Refine transition frame using trajectory curvature (acceleration patterns).
195
+
196
+ Uses acceleration (second derivative) to identify characteristic patterns:
197
+ - Landing: Large acceleration spike (impact deceleration)
198
+ - Takeoff: Acceleration change (transition from static to flight)
199
+
200
+ Args:
201
+ positions: Array of vertical positions
202
+ velocities: Array of vertical velocities
203
+ initial_frame: Initial estimate of transition frame
204
+ transition_type: Type of transition ("takeoff" or "landing")
205
+ search_radius: Frames to search around initial estimate (±radius)
206
+ window_length: Window size for acceleration calculation
207
+ polyorder: Polynomial order for Savitzky-Golay filter
208
+
209
+ Returns:
210
+ Refined fractional frame index.
211
+ """
212
+ # Compute acceleration using second derivative
213
+ acceleration = compute_acceleration_from_derivative(
214
+ positions, window_length=window_length, polyorder=polyorder
215
+ )
216
+
217
+ # Define search window
218
+ search_start = max(0, initial_frame - search_radius)
219
+ search_end = min(len(positions), initial_frame + search_radius + 1)
220
+
221
+ if search_start >= search_end:
222
+ return float(initial_frame)
223
+
224
+ search_accel = acceleration[search_start:search_end]
225
+
226
+ if transition_type == "landing":
227
+ # Landing: Find maximum absolute acceleration (impact)
228
+ peak_idx = int(np.argmax(np.abs(search_accel)))
229
+ elif transition_type == "takeoff":
230
+ # Takeoff: Find maximum acceleration change
231
+ accel_change = np.abs(np.diff(search_accel))
232
+ if len(accel_change) > 0:
233
+ peak_idx = int(np.argmax(accel_change))
234
+ else:
235
+ peak_idx = 0
236
+ else:
237
+ return float(initial_frame)
238
+
239
+ curvature_frame = search_start + peak_idx
240
+
241
+ # Blend curvature-based estimate with velocity-based estimate
242
+ # 70% curvature, 30% velocity
243
+ blended_frame = 0.7 * curvature_frame + 0.3 * initial_frame
244
+
245
+ return float(blended_frame)
246
+
247
+
248
+ def interpolate_threshold_crossing(
249
+ vel_before: float,
250
+ vel_after: float,
251
+ velocity_threshold: float,
252
+ ) -> float:
253
+ """
254
+ Find fractional offset where velocity crosses threshold between two frames.
255
+
256
+ Uses linear interpolation assuming velocity changes linearly between frames.
257
+
258
+ Args:
259
+ vel_before: Velocity at frame boundary N (absolute value)
260
+ vel_after: Velocity at frame boundary N+1 (absolute value)
261
+ velocity_threshold: Threshold value
262
+
263
+ Returns:
264
+ Fractional offset from frame N (0.0 to 1.0)
265
+ """
266
+ # Handle edge cases
267
+ if abs(vel_after - vel_before) < 1e-9: # Velocity not changing
268
+ return 0.5
269
+
270
+ # Linear interpolation
271
+ t = (velocity_threshold - vel_before) / (vel_after - vel_before)
272
+
273
+ # Clamp to [0, 1] range
274
+ return float(max(0.0, min(1.0, t)))
275
+
276
+
277
+ def find_cmj_takeoff_from_velocity_peak(
278
+ positions: np.ndarray,
279
+ velocities: np.ndarray,
280
+ lowest_point_frame: int,
281
+ fps: float,
282
+ ) -> float:
283
+ """
284
+ Find CMJ takeoff frame as peak upward velocity during concentric phase.
285
+
286
+ Takeoff occurs at maximum push-off velocity (most negative velocity),
287
+ just as feet leave the ground. This is BEFORE peak height is reached.
288
+
289
+ Args:
290
+ positions: Array of vertical positions
291
+ velocities: Array of SIGNED vertical velocities (negative = upward)
292
+ lowest_point_frame: Frame at lowest point
293
+ fps: Video frame rate
294
+
295
+ Returns:
296
+ Takeoff frame with fractional precision.
297
+ """
298
+ concentric_start = int(lowest_point_frame)
299
+ search_duration = int(
300
+ fps * 0.3
301
+ ) # Search next 0.3 seconds (concentric to takeoff is brief)
302
+ search_end = min(len(velocities), concentric_start + search_duration)
303
+
304
+ if search_end <= concentric_start:
305
+ return float(concentric_start + 1)
306
+
307
+ # Find peak upward velocity (most NEGATIVE velocity)
308
+ # In normalized coords: negative velocity = y decreasing = jumping up
309
+ concentric_velocities = velocities[concentric_start:search_end]
310
+ takeoff_idx = int(
311
+ np.argmin(concentric_velocities)
312
+ ) # Most negative = fastest upward = takeoff
313
+ takeoff_frame = concentric_start + takeoff_idx
314
+
315
+ return float(takeoff_frame)
316
+
317
+
318
+ def find_cmj_landing_from_position_peak(
319
+ positions: np.ndarray,
320
+ velocities: np.ndarray,
321
+ accelerations: np.ndarray,
322
+ takeoff_frame: int,
323
+ fps: float,
324
+ ) -> float:
325
+ """
326
+ Find CMJ landing frame by detecting impact after peak height.
327
+
328
+ Landing occurs when feet contact ground after peak height, detected by
329
+ finding where velocity transitions from negative (still going up/at peak)
330
+ to positive (falling) and position stabilizes.
331
+
332
+ Args:
333
+ positions: Array of vertical positions
334
+ velocities: Array of SIGNED vertical velocities (negative = up, positive = down)
335
+ accelerations: Array of accelerations (second derivative)
336
+ takeoff_frame: Frame at takeoff
337
+ fps: Video frame rate
338
+
339
+ Returns:
340
+ Landing frame with fractional precision.
341
+ """
342
+ # Find peak height (minimum position value in normalized coords)
343
+ search_start = int(takeoff_frame)
344
+ search_duration = int(fps * 0.7) # Search next 0.7 seconds for peak
345
+ search_end = min(len(positions), search_start + search_duration)
346
+
347
+ if search_end <= search_start:
348
+ return float(search_start + int(fps * 0.3))
349
+
350
+ # Find peak height (minimum y value = highest point in frame)
351
+ flight_positions = positions[search_start:search_end]
352
+ peak_idx = int(np.argmin(flight_positions))
353
+ peak_frame = search_start + peak_idx
354
+
355
+ # After peak, look for landing (impact with ground)
356
+ # Landing is detected by maximum positive acceleration (deceleration on impact)
357
+ landing_search_start = peak_frame + 2
358
+ landing_search_end = min(len(accelerations), landing_search_start + int(fps * 0.5))
359
+
360
+ if landing_search_end <= landing_search_start:
361
+ return float(peak_frame + int(fps * 0.2))
362
+
363
+ # Find impact: maximum positive acceleration after peak
364
+ # Positive acceleration = slowing down upward motion or impact deceleration
365
+ landing_accelerations = accelerations[landing_search_start:landing_search_end]
366
+ impact_idx = int(np.argmax(landing_accelerations)) # Max positive = impact
367
+ landing_frame = landing_search_start + impact_idx
368
+
369
+ return float(landing_frame)
370
+
371
+
372
+ def find_interpolated_takeoff_landing(
373
+ positions: np.ndarray,
374
+ velocities: np.ndarray,
375
+ lowest_point_frame: int,
376
+ window_length: int = 5,
377
+ polyorder: int = 2,
378
+ ) -> tuple[float, float] | None:
379
+ """
380
+ Find takeoff and landing frames for CMJ using physics-based detection.
381
+
382
+ CMJ-specific: Takeoff is detected as peak velocity (end of push-off),
383
+ not as high velocity threshold (which detects mid-flight).
384
+
385
+ Args:
386
+ positions: Array of vertical positions
387
+ velocities: Array of vertical velocities
388
+ lowest_point_frame: Frame at lowest point
389
+ window_length: Window size for derivative calculations
390
+ polyorder: Polynomial order for Savitzky-Golay filter
391
+
392
+ Returns:
393
+ Tuple of (takeoff_frame, landing_frame) with fractional precision, or None.
394
+ """
395
+ # Get FPS from velocity array length and assumed duration
396
+ # This is approximate but sufficient for search windows
397
+ fps = 30.0 # Default assumption
398
+
399
+ # Compute accelerations for landing detection
400
+ accelerations = compute_acceleration_from_derivative(
401
+ positions, window_length=window_length, polyorder=polyorder
402
+ )
403
+
404
+ # Find takeoff using peak velocity method (CMJ-specific)
405
+ takeoff_frame = find_cmj_takeoff_from_velocity_peak(
406
+ positions, velocities, lowest_point_frame, fps
407
+ )
408
+
409
+ # Find landing using position peak and impact detection
410
+ landing_frame = find_cmj_landing_from_position_peak(
411
+ positions, velocities, accelerations, int(takeoff_frame), fps
412
+ )
413
+
414
+ return (takeoff_frame, landing_frame)
415
+
416
+
417
+ def _find_takeoff_frame(
418
+ velocities: np.ndarray, peak_height_frame: int, fps: float
419
+ ) -> float:
420
+ """Find takeoff frame as peak upward velocity before peak height."""
421
+ takeoff_search_start = max(0, peak_height_frame - int(fps * 0.35))
422
+ takeoff_search_end = peak_height_frame - 2
423
+
424
+ takeoff_velocities = velocities[takeoff_search_start:takeoff_search_end]
425
+
426
+ if len(takeoff_velocities) > 0:
427
+ peak_vel_idx = int(np.argmin(takeoff_velocities))
428
+ return float(takeoff_search_start + peak_vel_idx)
429
+ else:
430
+ return float(peak_height_frame - int(fps * 0.3))
431
+
432
+
433
+ def _find_lowest_frame(
434
+ velocities: np.ndarray, positions: np.ndarray, takeoff_frame: float, fps: float
435
+ ) -> float:
436
+ """Find lowest point frame before takeoff."""
437
+ lowest_search_start = max(0, int(takeoff_frame) - int(fps * 0.4))
438
+ lowest_search_end = int(takeoff_frame)
439
+
440
+ # Find where velocity crosses from positive to negative
441
+ for i in range(lowest_search_end - 1, lowest_search_start, -1):
442
+ if i > 0 and velocities[i] < 0 and velocities[i - 1] >= 0:
443
+ return float(i)
444
+
445
+ # Fallback: use maximum position
446
+ lowest_positions = positions[lowest_search_start:lowest_search_end]
447
+ if len(lowest_positions) > 0:
448
+ lowest_idx = int(np.argmax(lowest_positions))
449
+ return float(lowest_search_start + lowest_idx)
450
+ else:
451
+ return float(int(takeoff_frame) - int(fps * 0.2))
452
+
453
+
454
+ def _find_landing_frame(
455
+ accelerations: np.ndarray, peak_height_frame: int, fps: float
456
+ ) -> float:
457
+ """Find landing frame after peak height."""
458
+ landing_search_start = peak_height_frame
459
+ landing_search_end = min(len(accelerations), peak_height_frame + int(fps * 0.5))
460
+ landing_accelerations = accelerations[landing_search_start:landing_search_end]
461
+
462
+ if len(landing_accelerations) > 0:
463
+ landing_idx = int(np.argmin(landing_accelerations))
464
+ return float(landing_search_start + landing_idx)
465
+ else:
466
+ return float(peak_height_frame + int(fps * 0.3))
467
+
468
+
469
+ def _find_standing_end(velocities: np.ndarray, lowest_point: float) -> float | None:
470
+ """Find end of standing phase before lowest point."""
471
+ if lowest_point <= 20:
472
+ return None
473
+
474
+ standing_search = velocities[: int(lowest_point)]
475
+ low_vel = np.abs(standing_search) < 0.005
476
+ if np.any(low_vel):
477
+ standing_frames = np.nonzero(low_vel)[0]
478
+ if len(standing_frames) > 10:
479
+ return float(standing_frames[-1])
480
+
481
+ return None
482
+
483
+
484
+ def detect_cmj_phases(
485
+ positions: np.ndarray,
486
+ fps: float,
487
+ window_length: int = 5,
488
+ polyorder: int = 2,
489
+ ) -> tuple[float | None, float, float, float] | None:
490
+ """
491
+ Detect all phases of a counter movement jump using a simplified, robust approach.
492
+
493
+ Strategy: Work BACKWARD from peak height to find all phases.
494
+ 1. Find peak height (global minimum y)
495
+ 2. Find takeoff (peak negative velocity before peak height)
496
+ 3. Find lowest point (maximum y value before takeoff)
497
+ 4. Find landing (impact after peak height)
498
+
499
+ Args:
500
+ positions: Array of vertical positions (normalized 0-1)
501
+ fps: Video frame rate
502
+ window_length: Window size for derivative calculations
503
+ polyorder: Polynomial order for Savitzky-Golay filter
504
+
505
+ Returns:
506
+ Tuple of (standing_end_frame, lowest_point_frame, takeoff_frame, landing_frame)
507
+ with fractional precision, or None if phases cannot be detected.
508
+ """
509
+ # Compute SIGNED velocities and accelerations
510
+ velocities = compute_signed_velocity(
511
+ positions, window_length=window_length, polyorder=polyorder
512
+ )
513
+ accelerations = compute_acceleration_from_derivative(
514
+ positions, window_length=window_length, polyorder=polyorder
515
+ )
516
+
517
+ # Step 1: Find peak height (global minimum y = highest point in frame)
518
+ peak_height_frame = int(np.argmin(positions))
519
+ if peak_height_frame < 10:
520
+ return None # Peak too early, invalid
521
+
522
+ # Step 2-4: Find all phases using helper functions
523
+ takeoff_frame = _find_takeoff_frame(velocities, peak_height_frame, fps)
524
+ lowest_point = _find_lowest_frame(velocities, positions, takeoff_frame, fps)
525
+ landing_frame = _find_landing_frame(accelerations, peak_height_frame, fps)
526
+ standing_end = _find_standing_end(velocities, lowest_point)
527
+
528
+ return (standing_end, lowest_point, takeoff_frame, landing_frame)