kinemotion 0.47.1__py3-none-any.whl → 0.47.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kinemotion might be problematic. Click here for more details.

kinemotion/api.py CHANGED
@@ -5,12 +5,18 @@ import time
5
5
  from collections.abc import Callable
6
6
  from dataclasses import dataclass
7
7
  from pathlib import Path
8
+ from typing import TYPE_CHECKING
9
+
10
+ if TYPE_CHECKING:
11
+ from numpy.typing import NDArray
8
12
 
9
13
  from .cmj.analysis import detect_cmj_phases
10
14
  from .cmj.debug_overlay import CMJDebugOverlayRenderer
11
15
  from .cmj.kinematics import CMJMetrics, calculate_cmj_metrics
12
16
  from .cmj.metrics_validator import CMJMetricsValidator
13
17
  from .core.auto_tuning import (
18
+ AnalysisParameters,
19
+ QualityPreset,
14
20
  analyze_video_sample,
15
21
  auto_tune_parameters,
16
22
  )
@@ -38,7 +44,7 @@ from .core.pipeline_utils import (
38
44
  process_videos_bulk_generic,
39
45
  )
40
46
  from .core.pose import PoseTracker
41
- from .core.quality import assess_jump_quality
47
+ from .core.quality import QualityAssessment, assess_jump_quality
42
48
  from .core.timing import NULL_TIMER, PerformanceTimer, Timer
43
49
  from .core.video_io import VideoProcessor
44
50
  from .dropjump.analysis import (
@@ -78,6 +84,146 @@ class DropJumpVideoConfig:
78
84
  tracking_confidence: float | None = None
79
85
 
80
86
 
87
+ def _assess_dropjump_quality(
88
+ vertical_positions: "NDArray",
89
+ visibilities: "NDArray",
90
+ contact_states: list,
91
+ fps: float,
92
+ timer: Timer,
93
+ ) -> tuple:
94
+ """Assess tracking quality and detect phases.
95
+
96
+ Returns:
97
+ Tuple of (quality_result, outlier_mask, phases_detected, phase_count)
98
+ """
99
+ _, outlier_mask = reject_outliers(
100
+ vertical_positions,
101
+ use_ransac=True,
102
+ use_median=True,
103
+ interpolate=False,
104
+ )
105
+
106
+ phases = find_contact_phases(contact_states)
107
+ phases_detected = len(phases) > 0
108
+ phase_count = len(phases)
109
+
110
+ quality_result = assess_jump_quality(
111
+ visibilities=visibilities,
112
+ positions=vertical_positions,
113
+ outlier_mask=outlier_mask,
114
+ fps=fps,
115
+ phases_detected=phases_detected,
116
+ phase_count=phase_count,
117
+ )
118
+
119
+ return quality_result, outlier_mask, phases_detected, phase_count
120
+
121
+
122
+ def _build_dropjump_metadata(
123
+ video_path: str,
124
+ video: "VideoProcessor",
125
+ params: "AnalysisParameters",
126
+ quality_result: QualityAssessment,
127
+ drop_start_frame: int | None,
128
+ metrics: DropJumpMetrics,
129
+ processing_time: float,
130
+ quality_preset: "QualityPreset",
131
+ timer: Timer,
132
+ ) -> ResultMetadata:
133
+ """Build complete result metadata."""
134
+ drop_frame = None
135
+ if drop_start_frame is None and metrics.drop_start_frame is not None:
136
+ drop_frame = metrics.drop_start_frame
137
+ elif drop_start_frame is not None:
138
+ drop_frame = drop_start_frame
139
+
140
+ algorithm_config = AlgorithmConfig(
141
+ detection_method="forward_search",
142
+ tracking_method="mediapipe_pose",
143
+ model_complexity=1,
144
+ smoothing=SmoothingConfig(
145
+ window_size=params.smoothing_window,
146
+ polynomial_order=params.polyorder,
147
+ use_bilateral_filter=params.bilateral_filter,
148
+ use_outlier_rejection=params.outlier_rejection,
149
+ ),
150
+ detection=DetectionConfig(
151
+ velocity_threshold=params.velocity_threshold,
152
+ min_contact_frames=params.min_contact_frames,
153
+ visibility_threshold=params.visibility_threshold,
154
+ use_curvature_refinement=params.use_curvature,
155
+ ),
156
+ drop_detection=DropDetectionConfig(
157
+ auto_detect_drop_start=(drop_start_frame is None),
158
+ detected_drop_frame=drop_frame,
159
+ min_stationary_duration_s=0.5,
160
+ ),
161
+ )
162
+
163
+ video_info = VideoInfo(
164
+ source_path=video_path,
165
+ fps=video.fps,
166
+ width=video.width,
167
+ height=video.height,
168
+ duration_s=video.frame_count / video.fps,
169
+ frame_count=video.frame_count,
170
+ codec=video.codec,
171
+ )
172
+
173
+ stage_times = convert_timer_to_stage_names(timer.get_metrics())
174
+
175
+ processing_info = ProcessingInfo(
176
+ version=get_kinemotion_version(),
177
+ timestamp=create_timestamp(),
178
+ quality_preset=quality_preset.value,
179
+ processing_time_s=processing_time,
180
+ timing_breakdown=stage_times,
181
+ )
182
+
183
+ return ResultMetadata(
184
+ quality=quality_result,
185
+ video=video_info,
186
+ processing=processing_info,
187
+ algorithm=algorithm_config,
188
+ )
189
+
190
+
191
+ def _save_dropjump_json(
192
+ json_output: str,
193
+ metrics: DropJumpMetrics,
194
+ timer: Timer,
195
+ verbose: bool,
196
+ ) -> None:
197
+ """Save metrics to JSON file."""
198
+ with timer.measure("json_serialization"):
199
+ output_path = Path(json_output)
200
+ metrics_dict = metrics.to_dict()
201
+ json_str = json.dumps(metrics_dict, indent=2)
202
+ output_path.write_text(json_str)
203
+
204
+ if verbose:
205
+ print(f"Metrics written to: {json_output}")
206
+
207
+
208
+ def _print_dropjump_summary(
209
+ start_time: float,
210
+ timer: Timer,
211
+ ) -> None:
212
+ """Print verbose timing summary."""
213
+ total_time = time.time() - start_time
214
+ stage_times = convert_timer_to_stage_names(timer.get_metrics())
215
+
216
+ print("\n=== Timing Summary ===")
217
+ for stage, duration in stage_times.items():
218
+ percentage = (duration / total_time) * 100
219
+ dur_ms = duration * 1000
220
+ print(f"{stage:.<40} {dur_ms:>6.0f}ms ({percentage:>5.1f}%)")
221
+ total_ms = total_time * 1000
222
+ print(f"{('Total'):.>40} {total_ms:>6.0f}ms (100.0%)")
223
+ print()
224
+ print("Analysis complete!")
225
+
226
+
81
227
  def _generate_debug_video(
82
228
  output_video: str,
83
229
  frames: list,
@@ -281,65 +427,10 @@ def process_dropjump_video(
281
427
  if verbose:
282
428
  print("Assessing tracking quality...")
283
429
  with timer.measure("quality_assessment"):
284
- _, outlier_mask = reject_outliers(
285
- vertical_positions,
286
- use_ransac=True,
287
- use_median=True,
288
- interpolate=False,
289
- )
290
-
291
- phases = find_contact_phases(contact_states)
292
- phases_detected = len(phases) > 0
293
- phase_count = len(phases)
294
-
295
- quality_result = assess_jump_quality(
296
- visibilities=visibilities,
297
- positions=vertical_positions,
298
- outlier_mask=outlier_mask,
299
- fps=video.fps,
300
- phases_detected=phases_detected,
301
- phase_count=phase_count,
430
+ quality_result, _, _, _ = _assess_dropjump_quality(
431
+ vertical_positions, visibilities, contact_states, video.fps, timer
302
432
  )
303
433
 
304
- drop_frame = None
305
- if drop_start_frame is None and metrics.drop_start_frame is not None:
306
- drop_frame = metrics.drop_start_frame
307
- elif drop_start_frame is not None:
308
- drop_frame = drop_start_frame
309
-
310
- algorithm_config = AlgorithmConfig(
311
- detection_method="forward_search",
312
- tracking_method="mediapipe_pose",
313
- model_complexity=1,
314
- smoothing=SmoothingConfig(
315
- window_size=params.smoothing_window,
316
- polynomial_order=params.polyorder,
317
- use_bilateral_filter=params.bilateral_filter,
318
- use_outlier_rejection=params.outlier_rejection,
319
- ),
320
- detection=DetectionConfig(
321
- velocity_threshold=params.velocity_threshold,
322
- min_contact_frames=params.min_contact_frames,
323
- visibility_threshold=params.visibility_threshold,
324
- use_curvature_refinement=params.use_curvature,
325
- ),
326
- drop_detection=DropDetectionConfig(
327
- auto_detect_drop_start=(drop_start_frame is None),
328
- detected_drop_frame=drop_frame,
329
- min_stationary_duration_s=0.5,
330
- ),
331
- )
332
-
333
- video_info = VideoInfo(
334
- source_path=video_path,
335
- fps=video.fps,
336
- width=video.width,
337
- height=video.height,
338
- duration_s=video.frame_count / video.fps,
339
- frame_count=video.frame_count,
340
- codec=video.codec,
341
- )
342
-
343
434
  if verbose and quality_result.warnings:
344
435
  print("\n⚠️ Quality Warnings:")
345
436
  for warning in quality_result.warnings:
@@ -370,48 +461,24 @@ def process_dropjump_video(
370
461
  print(f" [{issue.severity.value}] {issue.metric}: {issue.message}")
371
462
 
372
463
  processing_time = time.time() - start_time
373
- stage_times = convert_timer_to_stage_names(timer.get_metrics())
374
-
375
- processing_info = ProcessingInfo(
376
- version=get_kinemotion_version(),
377
- timestamp=create_timestamp(),
378
- quality_preset=quality_preset.value,
379
- processing_time_s=processing_time,
380
- timing_breakdown=stage_times,
381
- )
382
-
383
- result_metadata = ResultMetadata(
384
- quality=quality_result,
385
- video=video_info,
386
- processing=processing_info,
387
- algorithm=algorithm_config,
464
+ result_metadata = _build_dropjump_metadata(
465
+ video_path,
466
+ video,
467
+ params,
468
+ quality_result,
469
+ drop_start_frame,
470
+ metrics,
471
+ processing_time,
472
+ quality_preset,
473
+ timer,
388
474
  )
389
-
390
475
  metrics.result_metadata = result_metadata
391
476
 
392
477
  if json_output:
393
- with timer.measure("json_serialization"):
394
- output_path = Path(json_output)
395
- metrics_dict = metrics.to_dict()
396
- json_str = json.dumps(metrics_dict, indent=2)
397
- output_path.write_text(json_str)
398
-
399
- if verbose:
400
- print(f"Metrics written to: {json_output}")
478
+ _save_dropjump_json(json_output, metrics, timer, verbose)
401
479
 
402
480
  if verbose:
403
- total_time = time.time() - start_time
404
- stage_times_verbose = convert_timer_to_stage_names(timer.get_metrics())
405
-
406
- print("\n=== Timing Summary ===")
407
- for stage, duration in stage_times_verbose.items():
408
- percentage = (duration / total_time) * 100
409
- dur_ms = duration * 1000
410
- print(f"{stage:.<40} {dur_ms:>6.0f}ms ({percentage:>5.1f}%)")
411
- total_ms = total_time * 1000
412
- print(f"{('Total'):.>40} {total_ms:>6.0f}ms (100.0%)")
413
- print()
414
- print("Analysis complete!")
481
+ _print_dropjump_summary(start_time, timer)
415
482
 
416
483
  return metrics
417
484
 
@@ -480,7 +480,7 @@ def compute_average_hip_position(
480
480
  def find_standing_end(
481
481
  velocities: np.ndarray,
482
482
  lowest_point: float,
483
- positions: np.ndarray | None = None,
483
+ _positions: np.ndarray | None = None,
484
484
  accelerations: np.ndarray | None = None,
485
485
  ) -> float | None:
486
486
  """
@@ -493,7 +493,7 @@ def find_standing_end(
493
493
  Args:
494
494
  velocities: Signed velocity array (for backward compatibility)
495
495
  lowest_point: Frame index of lowest point
496
- positions: Position array (unused, kept for backward compatibility)
496
+ _positions: Intentionally unused - kept for backward compatibility
497
497
  accelerations: Acceleration array (if provided, uses
498
498
  acceleration-based detection)
499
499
 
@@ -147,133 +147,219 @@ class CMJMetrics:
147
147
  return result
148
148
 
149
149
 
150
- def calculate_cmj_metrics(
150
+ def _calculate_scale_factor(
151
151
  positions: NDArray[np.float64],
152
- velocities: NDArray[np.float64],
153
- standing_start_frame: float | None,
154
- lowest_point_frame: float,
155
152
  takeoff_frame: float,
156
153
  landing_frame: float,
157
- fps: float,
158
- tracking_method: str = "foot",
159
- ) -> CMJMetrics:
160
- """Calculate all CMJ metrics from detected phases.
154
+ jump_height: float,
155
+ ) -> float:
156
+ """Calculate meters per normalized unit scaling factor from flight phase.
161
157
 
162
158
  Args:
163
- positions: Array of vertical positions (normalized coordinates)
164
- velocities: Array of vertical velocities
165
- standing_start_frame: Frame where countermovement begins (fractional)
166
- lowest_point_frame: Frame at lowest point (fractional)
167
- takeoff_frame: Frame at takeoff (fractional)
168
- landing_frame: Frame at landing (fractional)
169
- fps: Video frames per second
170
- tracking_method: Tracking method used ("foot" or "com")
159
+ positions: Array of vertical positions
160
+ takeoff_frame: Takeoff frame index
161
+ landing_frame: Landing frame index
162
+ jump_height: Calculated jump height in meters
171
163
 
172
164
  Returns:
173
- CMJMetrics object with all calculated metrics.
165
+ Scale factor (meters per normalized unit)
174
166
  """
175
- # Calculate flight time from takeoff to landing
176
- flight_time = (landing_frame - takeoff_frame) / fps
177
-
178
- # Calculate jump height from flight time using kinematic formula
179
- # h = g * t^2 / 8 (where t is total flight time)
180
- g = 9.81 # gravity in m/s^2
181
- jump_height = (g * flight_time**2) / 8
182
-
183
- # Determine scaling factor (meters per normalized unit)
184
- # We use the flight phase displacement in normalized units compared to
185
- # kinematic jump height
186
167
  flight_start_idx = int(takeoff_frame)
187
168
  flight_end_idx = int(landing_frame)
188
169
  flight_positions = positions[flight_start_idx:flight_end_idx]
189
170
 
190
- scale_factor = 0.0
191
- if len(flight_positions) > 0:
192
- # Peak height is minimum y value (highest point in frame)
193
- peak_flight_pos = np.min(flight_positions)
194
- takeoff_pos = positions[flight_start_idx]
195
- # Displacement is upward (takeoff_pos - peak_pos) because y decreases upward
196
- flight_displacement = takeoff_pos - peak_flight_pos
171
+ if len(flight_positions) == 0:
172
+ return 0.0
197
173
 
198
- if flight_displacement > 0.001: # Avoid division by zero or noise
199
- scale_factor = jump_height / flight_displacement
174
+ peak_flight_pos = np.min(flight_positions)
175
+ takeoff_pos = positions[flight_start_idx]
176
+ flight_displacement = takeoff_pos - peak_flight_pos
177
+
178
+ if flight_displacement > 0.001:
179
+ return jump_height / flight_displacement
180
+ return 0.0
200
181
 
201
- # Calculate countermovement depth
202
- if standing_start_frame is not None:
203
- standing_position = positions[int(standing_start_frame)]
204
- else:
205
- # Use position at start of recording if standing not detected
206
- standing_position = positions[0]
207
182
 
183
+ def _calculate_countermovement_depth(
184
+ positions: NDArray[np.float64],
185
+ standing_start_frame: float | None,
186
+ lowest_point_frame: float,
187
+ scale_factor: float,
188
+ ) -> float:
189
+ """Calculate countermovement depth in meters.
190
+
191
+ Args:
192
+ positions: Array of vertical positions
193
+ standing_start_frame: Standing phase end frame (or None)
194
+ lowest_point_frame: Lowest point frame index
195
+ scale_factor: Meters per normalized unit
196
+
197
+ Returns:
198
+ Countermovement depth in meters
199
+ """
200
+ standing_position = (
201
+ positions[int(standing_start_frame)]
202
+ if standing_start_frame is not None
203
+ else positions[0]
204
+ )
208
205
  lowest_position = positions[int(lowest_point_frame)]
209
- # Depth in normalized units
210
206
  depth_normalized = abs(standing_position - lowest_position)
211
- # Convert to meters
212
- countermovement_depth = depth_normalized * scale_factor
207
+ return depth_normalized * scale_factor
208
+
209
+
210
+ def _calculate_phase_durations(
211
+ standing_start_frame: float | None,
212
+ lowest_point_frame: float,
213
+ takeoff_frame: float,
214
+ fps: float,
215
+ ) -> tuple[float, float, float]:
216
+ """Calculate phase durations in seconds.
217
+
218
+ Args:
219
+ standing_start_frame: Standing phase end frame (or None)
220
+ lowest_point_frame: Lowest point frame index
221
+ takeoff_frame: Takeoff frame index
222
+ fps: Frames per second
213
223
 
214
- # Calculate phase durations
224
+ Returns:
225
+ Tuple of (eccentric_duration, concentric_duration, total_movement_time)
226
+ """
215
227
  if standing_start_frame is not None:
216
228
  eccentric_duration = (lowest_point_frame - standing_start_frame) / fps
217
229
  total_movement_time = (takeoff_frame - standing_start_frame) / fps
218
230
  else:
219
- # If no standing phase detected, measure from start
220
231
  eccentric_duration = lowest_point_frame / fps
221
232
  total_movement_time = takeoff_frame / fps
222
233
 
223
234
  concentric_duration = (takeoff_frame - lowest_point_frame) / fps
235
+ return eccentric_duration, concentric_duration, total_movement_time
224
236
 
225
- # Velocity scaling factor: units/frame -> meters/second
226
- # v_m_s = v_units_frame * fps * scale_factor
227
- velocity_scale = scale_factor * fps
228
237
 
229
- # Calculate peak velocities
230
- # Eccentric phase: Downward motion = Positive velocity in image coords
231
- if standing_start_frame is not None:
232
- eccentric_start_idx = int(standing_start_frame)
233
- else:
234
- eccentric_start_idx = 0
238
+ def _calculate_peak_velocities(
239
+ velocities: NDArray[np.float64],
240
+ standing_start_frame: float | None,
241
+ lowest_point_frame: float,
242
+ takeoff_frame: float,
243
+ velocity_scale: float,
244
+ ) -> tuple[float, float]:
245
+ """Calculate peak eccentric and concentric velocities.
235
246
 
247
+ Args:
248
+ velocities: Array of velocities
249
+ standing_start_frame: Standing phase end frame (or None)
250
+ lowest_point_frame: Lowest point frame index
251
+ takeoff_frame: Takeoff frame index
252
+ velocity_scale: Velocity scaling factor
253
+
254
+ Returns:
255
+ Tuple of (peak_eccentric_velocity, peak_concentric_velocity)
256
+ """
257
+ eccentric_start_idx = int(standing_start_frame) if standing_start_frame else 0
236
258
  eccentric_end_idx = int(lowest_point_frame)
237
259
  eccentric_velocities = velocities[eccentric_start_idx:eccentric_end_idx]
238
260
 
261
+ peak_eccentric_velocity = 0.0
239
262
  if len(eccentric_velocities) > 0:
240
- # Peak eccentric velocity is maximum positive value (fastest downward)
241
- # We take max and ensure it's positive (it should be)
242
- peak_eccentric_velocity = float(np.max(eccentric_velocities)) * velocity_scale
243
- # If max is negative (weird), it means no downward motion detected
244
- if peak_eccentric_velocity < 0:
245
- peak_eccentric_velocity = 0.0
246
- else:
247
- peak_eccentric_velocity = 0.0
263
+ peak = float(np.max(eccentric_velocities)) * velocity_scale
264
+ peak_eccentric_velocity = max(0.0, peak)
248
265
 
249
- # Concentric phase: Upward motion = Negative velocity in image coords
250
266
  concentric_start_idx = int(lowest_point_frame)
251
267
  concentric_end_idx = int(takeoff_frame)
252
268
  concentric_velocities = velocities[concentric_start_idx:concentric_end_idx]
253
269
 
270
+ peak_concentric_velocity = 0.0
254
271
  if len(concentric_velocities) > 0:
255
- # Peak concentric velocity is minimum value (most negative = fastest upward)
256
- # We take abs to report magnitude
257
272
  peak_concentric_velocity = (
258
273
  abs(float(np.min(concentric_velocities))) * velocity_scale
259
274
  )
260
- else:
261
- peak_concentric_velocity = 0.0
262
275
 
263
- # Estimate transition time (amortization phase)
264
- # Look for period around lowest point where velocity is near zero
265
- transition_threshold = 0.005 # Very low velocity threshold
266
- search_window = int(fps * 0.1) # Search within ±100ms
276
+ return peak_eccentric_velocity, peak_concentric_velocity
277
+
278
+
279
+ def _calculate_transition_time(
280
+ velocities: NDArray[np.float64],
281
+ lowest_point_frame: float,
282
+ fps: float,
283
+ ) -> float | None:
284
+ """Calculate transition/amortization time around lowest point.
285
+
286
+ Args:
287
+ velocities: Array of velocities
288
+ lowest_point_frame: Lowest point frame index
289
+ fps: Frames per second
290
+
291
+ Returns:
292
+ Transition time in seconds, or None if no transition detected
293
+ """
294
+ transition_threshold = 0.005
295
+ search_window = int(fps * 0.1)
267
296
 
268
297
  transition_start_idx = max(0, int(lowest_point_frame) - search_window)
269
298
  transition_end_idx = min(len(velocities), int(lowest_point_frame) + search_window)
270
299
 
271
- transition_frames = 0
272
- for i in range(transition_start_idx, transition_end_idx):
273
- if abs(velocities[i]) < transition_threshold:
274
- transition_frames += 1
300
+ transition_frames = sum(
301
+ 1
302
+ for i in range(transition_start_idx, transition_end_idx)
303
+ if abs(velocities[i]) < transition_threshold
304
+ )
305
+
306
+ return transition_frames / fps if transition_frames > 0 else None
307
+
308
+
309
+ def calculate_cmj_metrics(
310
+ positions: NDArray[np.float64],
311
+ velocities: NDArray[np.float64],
312
+ standing_start_frame: float | None,
313
+ lowest_point_frame: float,
314
+ takeoff_frame: float,
315
+ landing_frame: float,
316
+ fps: float,
317
+ tracking_method: str = "foot",
318
+ ) -> CMJMetrics:
319
+ """Calculate all CMJ metrics from detected phases.
320
+
321
+ Args:
322
+ positions: Array of vertical positions (normalized coordinates)
323
+ velocities: Array of vertical velocities
324
+ standing_start_frame: Frame where countermovement begins (fractional)
325
+ lowest_point_frame: Frame at lowest point (fractional)
326
+ takeoff_frame: Frame at takeoff (fractional)
327
+ landing_frame: Frame at landing (fractional)
328
+ fps: Video frames per second
329
+ tracking_method: Tracking method used ("foot" or "com")
330
+
331
+ Returns:
332
+ CMJMetrics object with all calculated metrics.
333
+ """
334
+ # Calculate jump height from flight time using kinematic formula: h = g*t²/8
335
+ g = 9.81
336
+ flight_time = (landing_frame - takeoff_frame) / fps
337
+ jump_height = (g * flight_time**2) / 8
338
+
339
+ # Calculate scaling factor and derived metrics
340
+ scale_factor = _calculate_scale_factor(
341
+ positions, takeoff_frame, landing_frame, jump_height
342
+ )
343
+ countermovement_depth = _calculate_countermovement_depth(
344
+ positions, standing_start_frame, lowest_point_frame, scale_factor
345
+ )
346
+
347
+ eccentric_duration, concentric_duration, total_movement_time = (
348
+ _calculate_phase_durations(
349
+ standing_start_frame, lowest_point_frame, takeoff_frame, fps
350
+ )
351
+ )
352
+
353
+ velocity_scale = scale_factor * fps
354
+ peak_eccentric_velocity, peak_concentric_velocity = _calculate_peak_velocities(
355
+ velocities,
356
+ standing_start_frame,
357
+ lowest_point_frame,
358
+ takeoff_frame,
359
+ velocity_scale,
360
+ )
275
361
 
276
- transition_time = transition_frames / fps if transition_frames > 0 else None
362
+ transition_time = _calculate_transition_time(velocities, lowest_point_frame, fps)
277
363
 
278
364
  return CMJMetrics(
279
365
  jump_height=jump_height,
@@ -449,7 +449,7 @@ class CMJMetricsValidator(MetricsValidator):
449
449
  # Convert ms to seconds
450
450
  flight_time = flight_time_ms / 1000.0
451
451
 
452
- # h = g * t^2 / 8
452
+ # Calculate expected height using kinematic formula: h = g*t²/8
453
453
  g = 9.81
454
454
  expected_height = (g * flight_time**2) / 8
455
455
  error_pct = abs(jump_height - expected_height) / expected_height
@@ -483,7 +483,7 @@ class CMJMetricsValidator(MetricsValidator):
483
483
  if velocity is None or jump_height is None:
484
484
  return
485
485
 
486
- # h = v^2 / (2*g)
486
+ # Calculate expected velocity using kinematic formula: v² = 2*g*h
487
487
  g = 9.81
488
488
  expected_velocity = (2 * g * jump_height) ** 0.5
489
489
  error_pct = abs(velocity - expected_velocity) / expected_velocity
kinemotion/core/timing.py CHANGED
@@ -152,11 +152,12 @@ class NullTimer:
152
152
  The context manager protocol (__enter__/__exit__) has minimal overhead.
153
153
 
154
154
  Args:
155
- name: Ignored - kept for protocol compatibility
155
+ name: Operation name (unused in no-op implementation)
156
156
 
157
157
  Returns:
158
158
  Singleton null context manager
159
159
  """
160
+ del name # Intentionally unused - satisfies Timer protocol
160
161
  return _NULL_CONTEXT
161
162
 
162
163
  def get_metrics(self) -> dict[str, float]:
@@ -70,6 +70,59 @@ class DropJumpBounds:
70
70
  )
71
71
 
72
72
 
73
+ def _score_jump_height(jump_height: float) -> float:
74
+ """Convert jump height to athlete profile score (0-4).
75
+
76
+ Args:
77
+ jump_height: Jump height in meters
78
+
79
+ Returns:
80
+ Score from 0 (elderly) to 4 (elite)
81
+ """
82
+ thresholds = [(0.25, 0), (0.35, 1), (0.50, 2), (0.70, 3)]
83
+ for threshold, score in thresholds:
84
+ if jump_height < threshold:
85
+ return float(score)
86
+ return 4.0 # Elite
87
+
88
+
89
+ def _score_contact_time(contact_time_s: float) -> float:
90
+ """Convert contact time to athlete profile score (0-4).
91
+
92
+ Args:
93
+ contact_time_s: Ground contact time in seconds
94
+
95
+ Returns:
96
+ Score from 0 (elderly) to 4 (elite)
97
+ """
98
+ thresholds = [(0.60, 0), (0.50, 1), (0.45, 2), (0.40, 3)]
99
+ for threshold, score in thresholds:
100
+ if contact_time_s > threshold:
101
+ return float(score)
102
+ return 4.0 # Elite
103
+
104
+
105
+ def _classify_combined_score(combined_score: float) -> AthleteProfile:
106
+ """Classify combined score into athlete profile.
107
+
108
+ Args:
109
+ combined_score: Weighted score from height and contact time
110
+
111
+ Returns:
112
+ Athlete profile classification
113
+ """
114
+ thresholds = [
115
+ (1.0, AthleteProfile.ELDERLY),
116
+ (1.7, AthleteProfile.UNTRAINED),
117
+ (2.7, AthleteProfile.RECREATIONAL),
118
+ (3.7, AthleteProfile.TRAINED),
119
+ ]
120
+ for threshold, profile in thresholds:
121
+ if combined_score < threshold:
122
+ return profile
123
+ return AthleteProfile.ELITE
124
+
125
+
73
126
  def estimate_athlete_profile(
74
127
  metrics: dict, gender: str | None = None
75
128
  ) -> AthleteProfile:
@@ -92,48 +145,14 @@ def estimate_athlete_profile(
92
145
  contact_time = metrics.get("data", {}).get("ground_contact_time_ms")
93
146
 
94
147
  if jump_height is None or contact_time is None:
95
- return AthleteProfile.RECREATIONAL # Default
148
+ return AthleteProfile.RECREATIONAL
96
149
 
97
- # Convert contact_time from ms to seconds
98
150
  contact_time_s = contact_time / 1000.0
99
151
 
100
- # Decision logic: Use weighted combination to avoid over-weighting single metrics
101
- # Calculate profile scores based on each metric
102
- height_score = 0.0
103
- if jump_height < 0.25:
104
- height_score = 0 # Elderly
105
- elif jump_height < 0.35:
106
- height_score = 1 # Untrained
107
- elif jump_height < 0.50:
108
- height_score = 2 # Recreational
109
- elif jump_height < 0.70:
110
- height_score = 3 # Trained
111
- else:
112
- height_score = 4 # Elite
113
-
114
- contact_score = 0.0
115
- if contact_time_s > 0.60:
116
- contact_score = 0 # Elderly
117
- elif contact_time_s > 0.50:
118
- contact_score = 1 # Untrained
119
- elif contact_time_s > 0.45:
120
- contact_score = 2 # Recreational
121
- elif contact_time_s > 0.40:
122
- contact_score = 3 # Trained
123
- else:
124
- contact_score = 4 # Elite
125
-
126
- # Weight height more heavily (70%) than contact time (30%)
152
+ # Calculate weighted combination: height (70%) + contact time (30%)
127
153
  # Height is more reliable indicator across populations
154
+ height_score = _score_jump_height(jump_height)
155
+ contact_score = _score_contact_time(contact_time_s)
128
156
  combined_score = (height_score * 0.70) + (contact_score * 0.30)
129
157
 
130
- if combined_score < 1.0:
131
- return AthleteProfile.ELDERLY
132
- elif combined_score < 1.7:
133
- return AthleteProfile.UNTRAINED
134
- elif combined_score < 2.7:
135
- return AthleteProfile.RECREATIONAL
136
- elif combined_score < 3.7:
137
- return AthleteProfile.TRAINED
138
- else:
139
- return AthleteProfile.ELITE
158
+ return _classify_combined_score(combined_score)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: kinemotion
3
- Version: 0.47.1
3
+ Version: 0.47.2
4
4
  Summary: Video-based kinematic analysis for athletic performance
5
5
  Project-URL: Homepage, https://github.com/feniix/kinemotion
6
6
  Project-URL: Repository, https://github.com/feniix/kinemotion
@@ -1,13 +1,13 @@
1
1
  kinemotion/__init__.py,sha256=wPItmyGJUOFM6GPRVhAEvRz0-ErI7e2qiUREYJ9EfPQ,943
2
- kinemotion/api.py,sha256=K3E5kEQQyPZrEWYaIczJNxxWREWfclIvQYjXcX--9-k,31185
2
+ kinemotion/api.py,sha256=vqr3IZPQhvAMgNI0WbU295ufXRJ8-tu638KojF_upaI,32153
3
3
  kinemotion/cli.py,sha256=cqYV_7URH0JUDy1VQ_EDLv63FmNO4Ns20m6s1XAjiP4,464
4
4
  kinemotion/cmj/__init__.py,sha256=Ynv0-Oco4I3Y1Ubj25m3h9h2XFqeNwpAewXmAYOmwfU,127
5
- kinemotion/cmj/analysis.py,sha256=YDj7HpSCzrsw6mPtE3phDfYuAWQV0w-CCiLiQjkh3Mg,22196
5
+ kinemotion/cmj/analysis.py,sha256=3l0vYQB9tN4HtEO2MPFHVtrdzSmXgwpCm03qzYLCF0c,22196
6
6
  kinemotion/cmj/cli.py,sha256=HpZgLWoLjcgsfOZu6EQ_26tg6QwTgFjR-Ly8WCBg24c,9904
7
7
  kinemotion/cmj/debug_overlay.py,sha256=fXmWoHhqMLGo4vTtB6Ezs3yLUDOLw63zLIgU2gFlJQU,15892
8
8
  kinemotion/cmj/joint_angles.py,sha256=HmheIEiKcQz39cRezk4h-htorOhGNPsqKIR9RsAEKts,9960
9
- kinemotion/cmj/kinematics.py,sha256=Lq9m9MNQxnXv31VhKmXVrlM7rRkhi8PxW50N_CC8_8Y,11860
10
- kinemotion/cmj/metrics_validator.py,sha256=V_fmlczYH06SBtwqESv-IfGi3wDsIy3RQbd7VwOyNo0,31359
9
+ kinemotion/cmj/kinematics.py,sha256=Q-L8M7wG-MJ6EJTq6GO17c8sD5cb0Jg6Hc5vUZr14bA,13673
10
+ kinemotion/cmj/metrics_validator.py,sha256=JAakR4RgNvUc7GM9Aj2TQrtatYpCCCGSzkBMXOldKjw,31455
11
11
  kinemotion/cmj/validation_bounds.py,sha256=9ZTo68fl3ooyWjXXyTMRLpK9tFANa_rQf3oHhq7iQGE,11995
12
12
  kinemotion/core/__init__.py,sha256=rBIEx9sW6E-nyVdWmoVGJYhfPikLukoDp7lxKri7RTQ,1543
13
13
  kinemotion/core/auto_tuning.py,sha256=wtCUMOhBChVJNXfEeku3GCMW4qED6MF-O_mv2sPTiVQ,11324
@@ -22,7 +22,7 @@ kinemotion/core/pipeline_utils.py,sha256=0u7o-UFZX6cOu3NaWpFmEy5ejS0WUKggZ1HSdeZ
22
22
  kinemotion/core/pose.py,sha256=z1OGuwnc-NdK6Aoc9UYCyPBzomw4eInexOWonZbsEoA,9057
23
23
  kinemotion/core/quality.py,sha256=dPGQp08y8DdEUbUdjTThnUOUsALgF0D2sdz50cm6wLI,13098
24
24
  kinemotion/core/smoothing.py,sha256=FZmv3rumn0mYKU2y3JPKz46EvD8TVmQ6_GsN_Vp3BdU,15650
25
- kinemotion/core/timing.py,sha256=Shu9KnoX-2K7gzTQr6apIcrqFaxiONR2go6g3xE9xb8,12039
25
+ kinemotion/core/timing.py,sha256=8DJLtyQIGA0BKT07A2b32OHi1cdGQAoX5ahjGt17vlI,12113
26
26
  kinemotion/core/validation.py,sha256=LmKfSl4Ayw3DgwKD9IrhsPdzp5ia4drLsHA2UuU1SCM,6310
27
27
  kinemotion/core/video_io.py,sha256=vCwpWnlW2y29l48dFXokdehQn42w_IQvayxbVTjpXqQ,7863
28
28
  kinemotion/dropjump/__init__.py,sha256=tC3H3BrCg8Oj-db-Vrtx4PH_llR1Ppkd5jwaOjhQcLg,862
@@ -31,10 +31,10 @@ kinemotion/dropjump/cli.py,sha256=eLIA0rnx60vqD__PinB1-5nQ8_xQUhCGplwsB0u9MgU,15
31
31
  kinemotion/dropjump/debug_overlay.py,sha256=8XVuDyZ3nuNoCYkxcUWC7wyEoHyBxx77Sb--B1KiYWw,5974
32
32
  kinemotion/dropjump/kinematics.py,sha256=PATlGaClutGKJslL-LRIXHmTsvb-xEB8PUIMScU_K4c,19849
33
33
  kinemotion/dropjump/metrics_validator.py,sha256=CrTlGup8q2kyPXtA6HNwm7_yq0AsBaDllG7RVZdXmYA,9342
34
- kinemotion/dropjump/validation_bounds.py,sha256=5b4I3CKPybuvrbn-nP5yCcGF_sH4Vtyw3a5AWWvWnBk,4645
34
+ kinemotion/dropjump/validation_bounds.py,sha256=fyl04ZV7nfvHkL5eob6oEpV9Hxce6aiOWQ9pclLp7AQ,5077
35
35
  kinemotion/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
36
- kinemotion-0.47.1.dist-info/METADATA,sha256=XPL4ms0o_O4Xk9M-NC59nVWEe5UC7qPbYnUG7KBjSvU,26020
37
- kinemotion-0.47.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
38
- kinemotion-0.47.1.dist-info/entry_points.txt,sha256=zaqnAnjLvcdrk1Qvj5nvXZCZ2gp0prS7it1zTJygcIY,50
39
- kinemotion-0.47.1.dist-info/licenses/LICENSE,sha256=KZajvqsHw0NoOHOi2q0FZ4NBe9HdV6oey-IPYAtHXfg,1088
40
- kinemotion-0.47.1.dist-info/RECORD,,
36
+ kinemotion-0.47.2.dist-info/METADATA,sha256=j1rpZaKZM8ANWsND2jaj7JrDSvDfdHUiFJnn6EfqpoY,26020
37
+ kinemotion-0.47.2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
38
+ kinemotion-0.47.2.dist-info/entry_points.txt,sha256=zaqnAnjLvcdrk1Qvj5nvXZCZ2gp0prS7it1zTJygcIY,50
39
+ kinemotion-0.47.2.dist-info/licenses/LICENSE,sha256=KZajvqsHw0NoOHOi2q0FZ4NBe9HdV6oey-IPYAtHXfg,1088
40
+ kinemotion-0.47.2.dist-info/RECORD,,