kinemotion 0.59.1__py3-none-any.whl → 0.60.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kinemotion might be problematic. Click here for more details.
- kinemotion/cmj/analysis.py +4 -12
- kinemotion/cmj/api.py +5 -15
- kinemotion/cmj/cli.py +9 -25
- kinemotion/cmj/debug_overlay.py +3 -9
- kinemotion/cmj/kinematics.py +11 -31
- kinemotion/cmj/metrics_validator.py +27 -67
- kinemotion/cmj/validation_bounds.py +1 -3
- kinemotion/core/auto_tuning.py +2 -6
- kinemotion/core/debug_overlay_utils.py +4 -13
- kinemotion/core/determinism.py +1 -3
- kinemotion/core/filtering.py +2 -6
- kinemotion/core/pipeline_utils.py +2 -5
- kinemotion/core/pose.py +4 -12
- kinemotion/core/quality.py +2 -6
- kinemotion/core/smoothing.py +3 -9
- kinemotion/core/validation.py +2 -6
- kinemotion/dropjump/analysis.py +9 -26
- kinemotion/dropjump/api.py +4 -12
- kinemotion/dropjump/cli.py +5 -14
- kinemotion/dropjump/debug_overlay.py +1 -3
- kinemotion/dropjump/kinematics.py +9 -27
- kinemotion/dropjump/metrics_validator.py +7 -20
- kinemotion/dropjump/validation_bounds.py +1 -3
- {kinemotion-0.59.1.dist-info → kinemotion-0.60.1.dist-info}/METADATA +1 -1
- kinemotion-0.60.1.dist-info/RECORD +42 -0
- kinemotion-0.59.1.dist-info/RECORD +0 -42
- {kinemotion-0.59.1.dist-info → kinemotion-0.60.1.dist-info}/WHEEL +0 -0
- {kinemotion-0.59.1.dist-info → kinemotion-0.60.1.dist-info}/entry_points.txt +0 -0
- {kinemotion-0.59.1.dist-info → kinemotion-0.60.1.dist-info}/licenses/LICENSE +0 -0
kinemotion/cmj/analysis.py
CHANGED
|
@@ -216,9 +216,7 @@ def find_cmj_takeoff_from_velocity_peak(
|
|
|
216
216
|
Takeoff frame with fractional precision.
|
|
217
217
|
"""
|
|
218
218
|
concentric_start = int(lowest_point_frame)
|
|
219
|
-
search_duration = int(
|
|
220
|
-
fps * 0.3
|
|
221
|
-
) # Search next 0.3 seconds (concentric to takeoff is brief)
|
|
219
|
+
search_duration = int(fps * 0.3) # Search next 0.3 seconds (concentric to takeoff is brief)
|
|
222
220
|
search_end = min(len(velocities), concentric_start + search_duration)
|
|
223
221
|
|
|
224
222
|
if search_end <= concentric_start:
|
|
@@ -227,9 +225,7 @@ def find_cmj_takeoff_from_velocity_peak(
|
|
|
227
225
|
# Find peak upward velocity (most NEGATIVE velocity)
|
|
228
226
|
# In normalized coords: negative velocity = y decreasing = jumping up
|
|
229
227
|
concentric_velocities = velocities[concentric_start:search_end]
|
|
230
|
-
takeoff_idx = int(
|
|
231
|
-
np.argmin(concentric_velocities)
|
|
232
|
-
) # Most negative = fastest upward = takeoff
|
|
228
|
+
takeoff_idx = int(np.argmin(concentric_velocities)) # Most negative = fastest upward = takeoff
|
|
233
229
|
takeoff_frame = concentric_start + takeoff_idx
|
|
234
230
|
|
|
235
231
|
return float(takeoff_frame)
|
|
@@ -338,9 +334,7 @@ def find_interpolated_takeoff_landing(
|
|
|
338
334
|
return (takeoff_frame, landing_frame)
|
|
339
335
|
|
|
340
336
|
|
|
341
|
-
def find_takeoff_frame(
|
|
342
|
-
velocities: np.ndarray, peak_height_frame: int, fps: float
|
|
343
|
-
) -> float:
|
|
337
|
+
def find_takeoff_frame(velocities: np.ndarray, peak_height_frame: int, fps: float) -> float:
|
|
344
338
|
"""Find takeoff frame as peak upward velocity before peak height.
|
|
345
339
|
|
|
346
340
|
Robust detection: When velocities are nearly identical (flat), detects
|
|
@@ -621,8 +615,6 @@ def detect_cmj_phases(
|
|
|
621
615
|
)
|
|
622
616
|
|
|
623
617
|
with timer.measure("cmj_find_standing_end"):
|
|
624
|
-
standing_end = find_standing_end(
|
|
625
|
-
velocities, lowest_point, positions, accelerations
|
|
626
|
-
)
|
|
618
|
+
standing_end = find_standing_end(velocities, lowest_point, positions, accelerations)
|
|
627
619
|
|
|
628
620
|
return (standing_end, lowest_point, takeoff_frame, landing_frame)
|
kinemotion/cmj/api.py
CHANGED
|
@@ -91,9 +91,7 @@ def _generate_debug_video(
|
|
|
91
91
|
timer=timer,
|
|
92
92
|
) as renderer:
|
|
93
93
|
for frame, idx in zip(frames, frame_indices, strict=True):
|
|
94
|
-
annotated = renderer.render_frame(
|
|
95
|
-
frame, smoothed_landmarks[idx], idx, metrics
|
|
96
|
-
)
|
|
94
|
+
annotated = renderer.render_frame(frame, smoothed_landmarks[idx], idx, metrics)
|
|
97
95
|
renderer.write_frame(annotated)
|
|
98
96
|
|
|
99
97
|
if verbose:
|
|
@@ -142,9 +140,7 @@ def _print_quality_warnings(quality_result: QualityAssessment, verbose: bool) ->
|
|
|
142
140
|
print()
|
|
143
141
|
|
|
144
142
|
|
|
145
|
-
def _print_validation_results(
|
|
146
|
-
validation_result: ValidationResult, verbose: bool
|
|
147
|
-
) -> None:
|
|
143
|
+
def _print_validation_results(validation_result: ValidationResult, verbose: bool) -> None:
|
|
148
144
|
"""Print validation issues if present."""
|
|
149
145
|
if verbose and validation_result.issues:
|
|
150
146
|
print("\n⚠️ Validation Results:")
|
|
@@ -327,13 +323,9 @@ def process_cmj_video(
|
|
|
327
323
|
)
|
|
328
324
|
|
|
329
325
|
if verbose:
|
|
330
|
-
print_verbose_parameters(
|
|
331
|
-
video, characteristics, quality_preset, params
|
|
332
|
-
)
|
|
326
|
+
print_verbose_parameters(video, characteristics, quality_preset, params)
|
|
333
327
|
|
|
334
|
-
smoothed_landmarks = apply_smoothing(
|
|
335
|
-
landmarks_sequence, params, verbose, timer
|
|
336
|
-
)
|
|
328
|
+
smoothed_landmarks = apply_smoothing(landmarks_sequence, params, verbose, timer)
|
|
337
329
|
|
|
338
330
|
if verbose:
|
|
339
331
|
print("Extracting vertical positions (Hip and Foot)...")
|
|
@@ -341,9 +333,7 @@ def process_cmj_video(
|
|
|
341
333
|
vertical_positions, visibilities = extract_vertical_positions(
|
|
342
334
|
smoothed_landmarks, target="hip"
|
|
343
335
|
)
|
|
344
|
-
foot_positions, _ = extract_vertical_positions(
|
|
345
|
-
smoothed_landmarks, target="foot"
|
|
346
|
-
)
|
|
336
|
+
foot_positions, _ = extract_vertical_positions(smoothed_landmarks, target="foot")
|
|
347
337
|
|
|
348
338
|
if verbose:
|
|
349
339
|
print("Detecting CMJ phases...")
|
kinemotion/cmj/cli.py
CHANGED
|
@@ -49,12 +49,8 @@ def _process_batch_videos(
|
|
|
49
49
|
for video in video_files:
|
|
50
50
|
try:
|
|
51
51
|
click.echo(f"\nProcessing: {video}", err=True)
|
|
52
|
-
out_path, json_path = generate_batch_output_paths(
|
|
53
|
-
|
|
54
|
-
)
|
|
55
|
-
_process_single(
|
|
56
|
-
video, out_path, json_path, quality_preset, verbose, expert_params
|
|
57
|
-
)
|
|
52
|
+
out_path, json_path = generate_batch_output_paths(video, output_dir, json_output_dir)
|
|
53
|
+
_process_single(video, out_path, json_path, quality_preset, verbose, expert_params)
|
|
58
54
|
except Exception as e:
|
|
59
55
|
click.echo(f"Error processing {video}: {e}", err=True)
|
|
60
56
|
continue
|
|
@@ -311,30 +307,18 @@ def _output_results(metrics: CMJMetrics, json_output: str | None) -> None:
|
|
|
311
307
|
click.echo("=" * 60, err=True)
|
|
312
308
|
click.echo(f"Jump height: {metrics.jump_height:.3f} m", err=True)
|
|
313
309
|
click.echo(f"Flight time: {metrics.flight_time * 1000:.1f} ms", err=True)
|
|
310
|
+
click.echo(f"Countermovement depth: {metrics.countermovement_depth:.3f} m", err=True)
|
|
311
|
+
click.echo(f"Eccentric duration: {metrics.eccentric_duration * 1000:.1f} ms", err=True)
|
|
312
|
+
click.echo(f"Concentric duration: {metrics.concentric_duration * 1000:.1f} ms", err=True)
|
|
313
|
+
click.echo(f"Total movement time: {metrics.total_movement_time * 1000:.1f} ms", err=True)
|
|
314
314
|
click.echo(
|
|
315
|
-
f"
|
|
316
|
-
)
|
|
317
|
-
click.echo(
|
|
318
|
-
f"Eccentric duration: {metrics.eccentric_duration * 1000:.1f} ms", err=True
|
|
319
|
-
)
|
|
320
|
-
click.echo(
|
|
321
|
-
f"Concentric duration: {metrics.concentric_duration * 1000:.1f} ms", err=True
|
|
322
|
-
)
|
|
323
|
-
click.echo(
|
|
324
|
-
f"Total movement time: {metrics.total_movement_time * 1000:.1f} ms", err=True
|
|
325
|
-
)
|
|
326
|
-
click.echo(
|
|
327
|
-
f"Peak eccentric velocity: {abs(metrics.peak_eccentric_velocity):.3f} "
|
|
328
|
-
"m/s (downward)",
|
|
315
|
+
f"Peak eccentric velocity: {abs(metrics.peak_eccentric_velocity):.3f} m/s (downward)",
|
|
329
316
|
err=True,
|
|
330
317
|
)
|
|
331
318
|
click.echo(
|
|
332
|
-
f"Peak concentric velocity: {metrics.peak_concentric_velocity:.3f} "
|
|
333
|
-
"m/s (upward)",
|
|
319
|
+
f"Peak concentric velocity: {metrics.peak_concentric_velocity:.3f} m/s (upward)",
|
|
334
320
|
err=True,
|
|
335
321
|
)
|
|
336
322
|
if metrics.transition_time is not None:
|
|
337
|
-
click.echo(
|
|
338
|
-
f"Transition time: {metrics.transition_time * 1000:.1f} ms", err=True
|
|
339
|
-
)
|
|
323
|
+
click.echo(f"Transition time: {metrics.transition_time * 1000:.1f} ms", err=True)
|
|
340
324
|
click.echo("=" * 60, err=True)
|
kinemotion/cmj/debug_overlay.py
CHANGED
|
@@ -142,9 +142,7 @@ class CMJDebugOverlayRenderer(BaseDebugOverlayRenderer):
|
|
|
142
142
|
|
|
143
143
|
# Draw ALL visible segments (not just one side)
|
|
144
144
|
for start_key, end_key, color, thickness in segments:
|
|
145
|
-
self._draw_segment(
|
|
146
|
-
frame, landmarks, start_key, end_key, color, thickness
|
|
147
|
-
)
|
|
145
|
+
self._draw_segment(frame, landmarks, start_key, end_key, color, thickness)
|
|
148
146
|
|
|
149
147
|
# Draw joints as circles for this side
|
|
150
148
|
self._draw_joints(frame, landmarks, side_prefix)
|
|
@@ -323,9 +321,7 @@ class CMJDebugOverlayRenderer(BaseDebugOverlayRenderer):
|
|
|
323
321
|
phase_text = f"Phase: {phase.upper()}"
|
|
324
322
|
text_size = cv2.getTextSize(phase_text, cv2.FONT_HERSHEY_SIMPLEX, 1, 2)[0]
|
|
325
323
|
cv2.rectangle(frame, (5, 5), (text_size[0] + 15, 45), phase_color, -1)
|
|
326
|
-
cv2.putText(
|
|
327
|
-
frame, phase_text, (10, 35), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 0), 2
|
|
328
|
-
)
|
|
324
|
+
cv2.putText(frame, phase_text, (10, 35), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 0), 2)
|
|
329
325
|
|
|
330
326
|
def _draw_key_frame_markers(
|
|
331
327
|
self, frame: np.ndarray, frame_idx: int, metrics: CMJMetrics
|
|
@@ -334,9 +330,7 @@ class CMJDebugOverlayRenderer(BaseDebugOverlayRenderer):
|
|
|
334
330
|
y_offset = 120
|
|
335
331
|
markers = []
|
|
336
332
|
|
|
337
|
-
if metrics.standing_start_frame and frame_idx == int(
|
|
338
|
-
metrics.standing_start_frame
|
|
339
|
-
):
|
|
333
|
+
if metrics.standing_start_frame and frame_idx == int(metrics.standing_start_frame):
|
|
340
334
|
markers.append("COUNTERMOVEMENT START")
|
|
341
335
|
|
|
342
336
|
if frame_idx == int(metrics.lowest_point_frame):
|
kinemotion/cmj/kinematics.py
CHANGED
|
@@ -98,29 +98,17 @@ class CMJMetrics:
|
|
|
98
98
|
data: CMJDataDict = {
|
|
99
99
|
"jump_height_m": format_float_metric(self.jump_height, 1, 3), # type: ignore[typeddict-item]
|
|
100
100
|
"flight_time_ms": format_float_metric(self.flight_time, 1000, 2), # type: ignore[typeddict-item]
|
|
101
|
-
"countermovement_depth_m": format_float_metric(
|
|
102
|
-
|
|
103
|
-
), # type: ignore[typeddict-item]
|
|
104
|
-
"
|
|
105
|
-
|
|
106
|
-
), # type: ignore[typeddict-item]
|
|
107
|
-
"concentric_duration_ms": format_float_metric(
|
|
108
|
-
self.concentric_duration, 1000, 2
|
|
109
|
-
), # type: ignore[typeddict-item]
|
|
110
|
-
"total_movement_time_ms": format_float_metric(
|
|
111
|
-
self.total_movement_time, 1000, 2
|
|
112
|
-
), # type: ignore[typeddict-item]
|
|
113
|
-
"peak_eccentric_velocity_m_s": format_float_metric(
|
|
114
|
-
self.peak_eccentric_velocity, 1, 4
|
|
115
|
-
), # type: ignore[typeddict-item]
|
|
101
|
+
"countermovement_depth_m": format_float_metric(self.countermovement_depth, 1, 3), # type: ignore[typeddict-item]
|
|
102
|
+
"eccentric_duration_ms": format_float_metric(self.eccentric_duration, 1000, 2), # type: ignore[typeddict-item]
|
|
103
|
+
"concentric_duration_ms": format_float_metric(self.concentric_duration, 1000, 2), # type: ignore[typeddict-item]
|
|
104
|
+
"total_movement_time_ms": format_float_metric(self.total_movement_time, 1000, 2), # type: ignore[typeddict-item]
|
|
105
|
+
"peak_eccentric_velocity_m_s": format_float_metric(self.peak_eccentric_velocity, 1, 4), # type: ignore[typeddict-item]
|
|
116
106
|
"peak_concentric_velocity_m_s": format_float_metric(
|
|
117
107
|
self.peak_concentric_velocity, 1, 4
|
|
118
108
|
), # type: ignore[typeddict-item]
|
|
119
109
|
"transition_time_ms": format_float_metric(self.transition_time, 1000, 2),
|
|
120
110
|
"standing_start_frame": (
|
|
121
|
-
float(self.standing_start_frame)
|
|
122
|
-
if self.standing_start_frame is not None
|
|
123
|
-
else None
|
|
111
|
+
float(self.standing_start_frame) if self.standing_start_frame is not None else None
|
|
124
112
|
),
|
|
125
113
|
"lowest_point_frame": float(self.lowest_point_frame),
|
|
126
114
|
"takeoff_frame": float(self.takeoff_frame),
|
|
@@ -198,9 +186,7 @@ def _calculate_countermovement_depth(
|
|
|
198
186
|
Countermovement depth in meters
|
|
199
187
|
"""
|
|
200
188
|
standing_position = (
|
|
201
|
-
positions[int(standing_start_frame)]
|
|
202
|
-
if standing_start_frame is not None
|
|
203
|
-
else positions[0]
|
|
189
|
+
positions[int(standing_start_frame)] if standing_start_frame is not None else positions[0]
|
|
204
190
|
)
|
|
205
191
|
lowest_position = positions[int(lowest_point_frame)]
|
|
206
192
|
depth_normalized = abs(standing_position - lowest_position)
|
|
@@ -269,9 +255,7 @@ def _calculate_peak_velocities(
|
|
|
269
255
|
|
|
270
256
|
peak_concentric_velocity = 0.0
|
|
271
257
|
if len(concentric_velocities) > 0:
|
|
272
|
-
peak_concentric_velocity = (
|
|
273
|
-
abs(float(np.min(concentric_velocities))) * velocity_scale
|
|
274
|
-
)
|
|
258
|
+
peak_concentric_velocity = abs(float(np.min(concentric_velocities))) * velocity_scale
|
|
275
259
|
|
|
276
260
|
return peak_eccentric_velocity, peak_concentric_velocity
|
|
277
261
|
|
|
@@ -337,17 +321,13 @@ def calculate_cmj_metrics(
|
|
|
337
321
|
jump_height = (g * flight_time**2) / 8
|
|
338
322
|
|
|
339
323
|
# Calculate scaling factor and derived metrics
|
|
340
|
-
scale_factor = _calculate_scale_factor(
|
|
341
|
-
positions, takeoff_frame, landing_frame, jump_height
|
|
342
|
-
)
|
|
324
|
+
scale_factor = _calculate_scale_factor(positions, takeoff_frame, landing_frame, jump_height)
|
|
343
325
|
countermovement_depth = _calculate_countermovement_depth(
|
|
344
326
|
positions, standing_start_frame, lowest_point_frame, scale_factor
|
|
345
327
|
)
|
|
346
328
|
|
|
347
|
-
eccentric_duration, concentric_duration, total_movement_time = (
|
|
348
|
-
|
|
349
|
-
standing_start_frame, lowest_point_frame, takeoff_frame, fps
|
|
350
|
-
)
|
|
329
|
+
eccentric_duration, concentric_duration, total_movement_time = _calculate_phase_durations(
|
|
330
|
+
standing_start_frame, lowest_point_frame, takeoff_frame, fps
|
|
351
331
|
)
|
|
352
332
|
|
|
353
333
|
velocity_scale = scale_factor * fps
|
|
@@ -52,13 +52,9 @@ class CMJValidationResult(ValidationResult):
|
|
|
52
52
|
}
|
|
53
53
|
for issue in self.issues
|
|
54
54
|
],
|
|
55
|
-
"athlete_profile": (
|
|
56
|
-
self.athlete_profile.value if self.athlete_profile else None
|
|
57
|
-
),
|
|
55
|
+
"athlete_profile": (self.athlete_profile.value if self.athlete_profile else None),
|
|
58
56
|
"rsi": self.rsi,
|
|
59
|
-
"height_flight_time_consistency_percent": (
|
|
60
|
-
self.height_flight_time_consistency
|
|
61
|
-
),
|
|
57
|
+
"height_flight_time_consistency_percent": (self.height_flight_time_consistency),
|
|
62
58
|
"velocity_height_consistency_percent": self.velocity_height_consistency,
|
|
63
59
|
"depth_height_ratio": self.depth_height_ratio,
|
|
64
60
|
"contact_depth_ratio": self.contact_depth_ratio,
|
|
@@ -135,9 +131,7 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
135
131
|
self, metrics: dict, result: CMJValidationResult, profile: AthleteProfile
|
|
136
132
|
) -> None:
|
|
137
133
|
"""Validate flight time."""
|
|
138
|
-
flight_time_raw = self._get_metric_value(
|
|
139
|
-
metrics, "flight_time_ms", "flight_time"
|
|
140
|
-
)
|
|
134
|
+
flight_time_raw = self._get_metric_value(metrics, "flight_time_ms", "flight_time")
|
|
141
135
|
if flight_time_raw is None:
|
|
142
136
|
return
|
|
143
137
|
|
|
@@ -167,8 +161,7 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
167
161
|
elif bounds.contains(flight_time, profile):
|
|
168
162
|
result.add_info(
|
|
169
163
|
"flight_time",
|
|
170
|
-
f"Flight time {flight_time:.3f}s within expected range for "
|
|
171
|
-
f"{profile.value}",
|
|
164
|
+
f"Flight time {flight_time:.3f}s within expected range for {profile.value}",
|
|
172
165
|
value=flight_time,
|
|
173
166
|
)
|
|
174
167
|
else:
|
|
@@ -210,8 +203,7 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
210
203
|
elif bounds.contains(jump_height, profile):
|
|
211
204
|
result.add_info(
|
|
212
205
|
"jump_height",
|
|
213
|
-
f"Jump height {jump_height:.3f}m within expected range for "
|
|
214
|
-
f"{profile.value}",
|
|
206
|
+
f"Jump height {jump_height:.3f}m within expected range for {profile.value}",
|
|
215
207
|
value=jump_height,
|
|
216
208
|
)
|
|
217
209
|
else:
|
|
@@ -228,9 +220,7 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
228
220
|
self, metrics: dict, result: CMJValidationResult, profile: AthleteProfile
|
|
229
221
|
) -> None:
|
|
230
222
|
"""Validate countermovement depth."""
|
|
231
|
-
depth = self._get_metric_value(
|
|
232
|
-
metrics, "countermovement_depth_m", "countermovement_depth"
|
|
233
|
-
)
|
|
223
|
+
depth = self._get_metric_value(metrics, "countermovement_depth_m", "countermovement_depth")
|
|
234
224
|
if depth is None:
|
|
235
225
|
return
|
|
236
226
|
|
|
@@ -254,8 +244,7 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
254
244
|
elif bounds.contains(depth, profile):
|
|
255
245
|
result.add_info(
|
|
256
246
|
"countermovement_depth",
|
|
257
|
-
f"Countermovement depth {depth:.3f}m within expected range for "
|
|
258
|
-
f"{profile.value}",
|
|
247
|
+
f"Countermovement depth {depth:.3f}m within expected range for {profile.value}",
|
|
259
248
|
value=depth,
|
|
260
249
|
)
|
|
261
250
|
else:
|
|
@@ -298,16 +287,14 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
298
287
|
else:
|
|
299
288
|
result.add_error(
|
|
300
289
|
"concentric_duration",
|
|
301
|
-
f"Concentric duration {duration:.3f}s likely includes "
|
|
302
|
-
"standing phase",
|
|
290
|
+
f"Concentric duration {duration:.3f}s likely includes standing phase",
|
|
303
291
|
value=duration,
|
|
304
292
|
bounds=(bounds.absolute_min, bounds.absolute_max),
|
|
305
293
|
)
|
|
306
294
|
elif bounds.contains(duration, profile):
|
|
307
295
|
result.add_info(
|
|
308
296
|
"concentric_duration",
|
|
309
|
-
f"Concentric duration {duration:.3f}s within expected range for "
|
|
310
|
-
f"{profile.value}",
|
|
297
|
+
f"Concentric duration {duration:.3f}s within expected range for {profile.value}",
|
|
311
298
|
value=duration,
|
|
312
299
|
)
|
|
313
300
|
else:
|
|
@@ -348,8 +335,7 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
348
335
|
elif bounds.contains(duration, profile):
|
|
349
336
|
result.add_info(
|
|
350
337
|
"eccentric_duration",
|
|
351
|
-
f"Eccentric duration {duration:.3f}s within expected range for "
|
|
352
|
-
f"{profile.value}",
|
|
338
|
+
f"Eccentric duration {duration:.3f}s within expected range for {profile.value}",
|
|
353
339
|
value=duration,
|
|
354
340
|
)
|
|
355
341
|
else:
|
|
@@ -382,8 +368,7 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
382
368
|
elif bounds.contains(ecc_vel, profile):
|
|
383
369
|
result.add_info(
|
|
384
370
|
"peak_eccentric_velocity",
|
|
385
|
-
f"Peak eccentric velocity {ecc_vel:.2f} m/s within range "
|
|
386
|
-
f"for {profile.value}",
|
|
371
|
+
f"Peak eccentric velocity {ecc_vel:.2f} m/s within range for {profile.value}",
|
|
387
372
|
value=ecc_vel,
|
|
388
373
|
)
|
|
389
374
|
else:
|
|
@@ -406,24 +391,21 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
406
391
|
if con_vel < bounds.absolute_min:
|
|
407
392
|
result.add_error(
|
|
408
393
|
"peak_concentric_velocity",
|
|
409
|
-
f"Peak concentric velocity {con_vel:.2f} m/s "
|
|
410
|
-
"insufficient to leave ground",
|
|
394
|
+
f"Peak concentric velocity {con_vel:.2f} m/s insufficient to leave ground",
|
|
411
395
|
value=con_vel,
|
|
412
396
|
bounds=(bounds.absolute_min, bounds.absolute_max),
|
|
413
397
|
)
|
|
414
398
|
else:
|
|
415
399
|
result.add_error(
|
|
416
400
|
"peak_concentric_velocity",
|
|
417
|
-
f"Peak concentric velocity {con_vel:.2f} m/s exceeds "
|
|
418
|
-
"elite capability",
|
|
401
|
+
f"Peak concentric velocity {con_vel:.2f} m/s exceeds elite capability",
|
|
419
402
|
value=con_vel,
|
|
420
403
|
bounds=(bounds.absolute_min, bounds.absolute_max),
|
|
421
404
|
)
|
|
422
405
|
elif bounds.contains(con_vel, profile):
|
|
423
406
|
result.add_info(
|
|
424
407
|
"peak_concentric_velocity",
|
|
425
|
-
f"Peak concentric velocity {con_vel:.2f} m/s within range "
|
|
426
|
-
f"for {profile.value}",
|
|
408
|
+
f"Peak concentric velocity {con_vel:.2f} m/s within range for {profile.value}",
|
|
427
409
|
value=con_vel,
|
|
428
410
|
)
|
|
429
411
|
else:
|
|
@@ -468,8 +450,7 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
468
450
|
else:
|
|
469
451
|
result.add_info(
|
|
470
452
|
"height_flight_time_consistency",
|
|
471
|
-
f"Jump height and flight time consistent "
|
|
472
|
-
f"(error {error_pct * 100:.1f}%)",
|
|
453
|
+
f"Jump height and flight time consistent (error {error_pct * 100:.1f}%)",
|
|
473
454
|
value=error_pct,
|
|
474
455
|
)
|
|
475
456
|
|
|
@@ -505,8 +486,7 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
505
486
|
else:
|
|
506
487
|
result.add_info(
|
|
507
488
|
"velocity_height_consistency",
|
|
508
|
-
f"Peak velocity and jump height consistent "
|
|
509
|
-
f"(error {error_pct * 100:.1f}%)",
|
|
489
|
+
f"Peak velocity and jump height consistent (error {error_pct * 100:.1f}%)",
|
|
510
490
|
value=error_pct,
|
|
511
491
|
)
|
|
512
492
|
|
|
@@ -514,9 +494,7 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
514
494
|
self, metrics: dict, result: CMJValidationResult, profile: AthleteProfile
|
|
515
495
|
) -> None:
|
|
516
496
|
"""Validate Reactive Strength Index."""
|
|
517
|
-
flight_time_raw = self._get_metric_value(
|
|
518
|
-
metrics, "flight_time_ms", "flight_time"
|
|
519
|
-
)
|
|
497
|
+
flight_time_raw = self._get_metric_value(metrics, "flight_time_ms", "flight_time")
|
|
520
498
|
concentric_duration_raw = self._get_metric_value(
|
|
521
499
|
metrics, "concentric_duration_ms", "concentric_duration"
|
|
522
500
|
)
|
|
@@ -577,16 +555,12 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
577
555
|
bounds=(expected_min, expected_max),
|
|
578
556
|
)
|
|
579
557
|
|
|
580
|
-
def _check_depth_height_ratio(
|
|
581
|
-
self, metrics: dict, result: CMJValidationResult
|
|
582
|
-
) -> None:
|
|
558
|
+
def _check_depth_height_ratio(self, metrics: dict, result: CMJValidationResult) -> None:
|
|
583
559
|
"""Check countermovement depth to jump height ratio."""
|
|
584
560
|
depth = metrics.get("countermovement_depth_m")
|
|
585
561
|
jump_height = metrics.get("jump_height_m")
|
|
586
562
|
|
|
587
|
-
if
|
|
588
|
-
depth is None or jump_height is None or depth < 0.05
|
|
589
|
-
): # Skip if depth minimal
|
|
563
|
+
if depth is None or jump_height is None or depth < 0.05: # Skip if depth minimal
|
|
590
564
|
return
|
|
591
565
|
|
|
592
566
|
ratio = jump_height / depth
|
|
@@ -621,9 +595,7 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
621
595
|
value=ratio,
|
|
622
596
|
)
|
|
623
597
|
|
|
624
|
-
def _check_contact_depth_ratio(
|
|
625
|
-
self, metrics: dict, result: CMJValidationResult
|
|
626
|
-
) -> None:
|
|
598
|
+
def _check_contact_depth_ratio(self, metrics: dict, result: CMJValidationResult) -> None:
|
|
627
599
|
"""Check contact time to countermovement depth ratio."""
|
|
628
600
|
contact_ms = metrics.get("concentric_duration_ms")
|
|
629
601
|
depth = metrics.get("countermovement_depth_m")
|
|
@@ -639,8 +611,7 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
639
611
|
if ratio < MetricConsistency.CONTACT_DEPTH_RATIO_MIN:
|
|
640
612
|
result.add_warning(
|
|
641
613
|
"contact_depth_ratio",
|
|
642
|
-
f"Contact time {ratio:.2f}s/m to depth ratio: Very fast for "
|
|
643
|
-
"depth traversed",
|
|
614
|
+
f"Contact time {ratio:.2f}s/m to depth ratio: Very fast for depth traversed",
|
|
644
615
|
value=ratio,
|
|
645
616
|
bounds=(
|
|
646
617
|
MetricConsistency.CONTACT_DEPTH_RATIO_MIN,
|
|
@@ -692,8 +663,7 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
692
663
|
if not TripleExtensionBounds.knee_angle_valid(knee, profile):
|
|
693
664
|
result.add_warning(
|
|
694
665
|
"knee_angle",
|
|
695
|
-
f"Knee angle {knee:.1f}° outside expected range for "
|
|
696
|
-
f"{profile.value}",
|
|
666
|
+
f"Knee angle {knee:.1f}° outside expected range for {profile.value}",
|
|
697
667
|
value=knee,
|
|
698
668
|
)
|
|
699
669
|
else:
|
|
@@ -708,15 +678,13 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
708
678
|
if not TripleExtensionBounds.ankle_angle_valid(ankle, profile):
|
|
709
679
|
result.add_warning(
|
|
710
680
|
"ankle_angle",
|
|
711
|
-
f"Ankle angle {ankle:.1f}° outside expected range for "
|
|
712
|
-
f"{profile.value}",
|
|
681
|
+
f"Ankle angle {ankle:.1f}° outside expected range for {profile.value}",
|
|
713
682
|
value=ankle,
|
|
714
683
|
)
|
|
715
684
|
else:
|
|
716
685
|
result.add_info(
|
|
717
686
|
"ankle_angle",
|
|
718
|
-
f"Ankle angle {ankle:.1f}° within expected range for "
|
|
719
|
-
f"{profile.value}",
|
|
687
|
+
f"Ankle angle {ankle:.1f}° within expected range for {profile.value}",
|
|
720
688
|
value=ankle,
|
|
721
689
|
)
|
|
722
690
|
|
|
@@ -763,15 +731,9 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
763
731
|
|
|
764
732
|
if hip <= hip_min + boundary_threshold or hip >= hip_max - boundary_threshold:
|
|
765
733
|
joints_at_boundary += 1
|
|
766
|
-
if
|
|
767
|
-
knee <= knee_min + boundary_threshold
|
|
768
|
-
or knee >= knee_max - boundary_threshold
|
|
769
|
-
):
|
|
734
|
+
if knee <= knee_min + boundary_threshold or knee >= knee_max - boundary_threshold:
|
|
770
735
|
joints_at_boundary += 1
|
|
771
|
-
if
|
|
772
|
-
ankle <= ankle_min + boundary_threshold
|
|
773
|
-
or ankle >= ankle_max - boundary_threshold
|
|
774
|
-
):
|
|
736
|
+
if ankle <= ankle_min + boundary_threshold or ankle >= ankle_max - boundary_threshold:
|
|
775
737
|
joints_at_boundary += 1
|
|
776
738
|
|
|
777
739
|
# If 2+ joints at boundaries, likely compensation pattern
|
|
@@ -785,9 +747,7 @@ class CMJMetricsValidator(MetricsValidator):
|
|
|
785
747
|
)
|
|
786
748
|
|
|
787
749
|
@staticmethod
|
|
788
|
-
def _get_profile_range(
|
|
789
|
-
profile: AthleteProfile, bounds: MetricBounds
|
|
790
|
-
) -> tuple[float, float]:
|
|
750
|
+
def _get_profile_range(profile: AthleteProfile, bounds: MetricBounds) -> tuple[float, float]:
|
|
791
751
|
"""Get min/max bounds for specific profile."""
|
|
792
752
|
if profile == AthleteProfile.ELDERLY:
|
|
793
753
|
return (bounds.practical_min, bounds.recreational_max)
|
|
@@ -299,9 +299,7 @@ ATHLETE_PROFILES = {
|
|
|
299
299
|
}
|
|
300
300
|
|
|
301
301
|
|
|
302
|
-
def estimate_athlete_profile(
|
|
303
|
-
metrics_dict: dict, gender: str | None = None
|
|
304
|
-
) -> AthleteProfile:
|
|
302
|
+
def estimate_athlete_profile(metrics_dict: dict, gender: str | None = None) -> AthleteProfile:
|
|
305
303
|
"""Estimate athlete profile from metrics.
|
|
306
304
|
|
|
307
305
|
Uses jump height as primary classifier:
|
kinemotion/core/auto_tuning.py
CHANGED
|
@@ -168,9 +168,7 @@ def auto_tune_parameters(
|
|
|
168
168
|
elif quality_preset == QualityPreset.ACCURATE:
|
|
169
169
|
# Accurate: Maximize accuracy, accept slower processing
|
|
170
170
|
velocity_threshold = base_velocity_threshold * 0.5 # More sensitive
|
|
171
|
-
min_contact_frames = (
|
|
172
|
-
base_min_contact_frames # Don't increase (would miss brief)
|
|
173
|
-
)
|
|
171
|
+
min_contact_frames = base_min_contact_frames # Don't increase (would miss brief)
|
|
174
172
|
smoothing_window = min(11, base_smoothing_window + 2 + smoothing_adjustment)
|
|
175
173
|
bilateral_filter = True # Always use for best accuracy
|
|
176
174
|
detection_confidence = 0.6
|
|
@@ -298,9 +296,7 @@ def analyze_video_sample(
|
|
|
298
296
|
if not frame_landmarks:
|
|
299
297
|
continue
|
|
300
298
|
|
|
301
|
-
frame_vis, frame_y_positions = _collect_foot_visibility_and_positions(
|
|
302
|
-
frame_landmarks
|
|
303
|
-
)
|
|
299
|
+
frame_vis, frame_y_positions = _collect_foot_visibility_and_positions(frame_landmarks)
|
|
304
300
|
|
|
305
301
|
if frame_vis:
|
|
306
302
|
visibilities.append(float(np.mean(frame_vis)))
|
|
@@ -49,16 +49,11 @@ def create_video_writer(
|
|
|
49
49
|
for codec in codecs_to_try:
|
|
50
50
|
try:
|
|
51
51
|
fourcc = cv2.VideoWriter_fourcc(*codec)
|
|
52
|
-
writer = cv2.VideoWriter(
|
|
53
|
-
output_path, fourcc, fps, (display_width, display_height)
|
|
54
|
-
)
|
|
52
|
+
writer = cv2.VideoWriter(output_path, fourcc, fps, (display_width, display_height))
|
|
55
53
|
if writer.isOpened():
|
|
56
54
|
used_codec = codec
|
|
57
55
|
if codec == "mp4v":
|
|
58
|
-
print(
|
|
59
|
-
f"Warning: Fallback to {codec} codec. "
|
|
60
|
-
"Video may not play in browsers."
|
|
61
|
-
)
|
|
56
|
+
print(f"Warning: Fallback to {codec} codec. Video may not play in browsers.")
|
|
62
57
|
break
|
|
63
58
|
except Exception:
|
|
64
59
|
continue
|
|
@@ -174,9 +169,7 @@ class BaseDebugOverlayRenderer:
|
|
|
174
169
|
)
|
|
175
170
|
|
|
176
171
|
with self.timer.measure("debug_video_write"):
|
|
177
|
-
write_overlay_frame(
|
|
178
|
-
self.writer, frame, self.display_width, self.display_height
|
|
179
|
-
)
|
|
172
|
+
write_overlay_frame(self.writer, frame, self.display_width, self.display_height)
|
|
180
173
|
|
|
181
174
|
def close(self) -> None:
|
|
182
175
|
"""Release video writer and re-encode if possible."""
|
|
@@ -187,9 +180,7 @@ class BaseDebugOverlayRenderer:
|
|
|
187
180
|
temp_path = None
|
|
188
181
|
try:
|
|
189
182
|
temp_path = str(
|
|
190
|
-
Path(self.output_path).with_suffix(
|
|
191
|
-
".temp" + Path(self.output_path).suffix
|
|
192
|
-
)
|
|
183
|
+
Path(self.output_path).with_suffix(".temp" + Path(self.output_path).suffix)
|
|
193
184
|
)
|
|
194
185
|
|
|
195
186
|
# Convert to H.264 with yuv420p pixel format for browser compatibility
|
kinemotion/core/determinism.py
CHANGED
|
@@ -32,9 +32,7 @@ def get_video_hash_seed(video_path: str) -> int:
|
|
|
32
32
|
return int(hash_value[:8], 16)
|
|
33
33
|
|
|
34
34
|
|
|
35
|
-
def set_deterministic_mode(
|
|
36
|
-
seed: int | None = None, video_path: str | None = None
|
|
37
|
-
) -> None:
|
|
35
|
+
def set_deterministic_mode(seed: int | None = None, video_path: str | None = None) -> None:
|
|
38
36
|
"""Set random seeds for reproducible analysis.
|
|
39
37
|
|
|
40
38
|
Sets seeds for:
|
kinemotion/core/filtering.py
CHANGED
|
@@ -147,9 +147,7 @@ def remove_outliers(
|
|
|
147
147
|
|
|
148
148
|
# Interpolate
|
|
149
149
|
t = (idx - idx_before) / (idx_after - idx_before)
|
|
150
|
-
positions_clean[idx] = (
|
|
151
|
-
positions[idx_before] * (1 - t) + positions[idx_after] * t
|
|
152
|
-
)
|
|
150
|
+
positions_clean[idx] = positions[idx_before] * (1 - t) + positions[idx_after] * t
|
|
153
151
|
elif len(valid_before) > 0:
|
|
154
152
|
# Use last valid value
|
|
155
153
|
positions_clean[idx] = positions[valid_before[-1]]
|
|
@@ -219,9 +217,7 @@ def reject_outliers(
|
|
|
219
217
|
|
|
220
218
|
# Remove/replace outliers
|
|
221
219
|
if interpolate:
|
|
222
|
-
cleaned_positions = remove_outliers(
|
|
223
|
-
positions, outlier_mask, method="interpolate"
|
|
224
|
-
)
|
|
220
|
+
cleaned_positions = remove_outliers(positions, outlier_mask, method="interpolate")
|
|
225
221
|
else:
|
|
226
222
|
cleaned_positions = positions.copy()
|
|
227
223
|
|
|
@@ -35,8 +35,7 @@ def parse_quality_preset(quality: str) -> QualityPreset:
|
|
|
35
35
|
return QualityPreset(quality.lower())
|
|
36
36
|
except ValueError as e:
|
|
37
37
|
raise ValueError(
|
|
38
|
-
f"Invalid quality preset: {quality}. "
|
|
39
|
-
"Must be 'fast', 'balanced', or 'accurate'"
|
|
38
|
+
f"Invalid quality preset: {quality}. Must be 'fast', 'balanced', or 'accurate'"
|
|
40
39
|
) from e
|
|
41
40
|
|
|
42
41
|
|
|
@@ -408,9 +407,7 @@ def process_videos_bulk_generic(
|
|
|
408
407
|
results: list[TResult] = []
|
|
409
408
|
|
|
410
409
|
with ProcessPoolExecutor(max_workers=max_workers) as executor:
|
|
411
|
-
future_to_config = {
|
|
412
|
-
executor.submit(processor_func, config): config for config in configs
|
|
413
|
-
}
|
|
410
|
+
future_to_config = {executor.submit(processor_func, config): config for config in configs}
|
|
414
411
|
|
|
415
412
|
for future in as_completed(future_to_config):
|
|
416
413
|
config = future_to_config[future]
|
kinemotion/core/pose.py
CHANGED
|
@@ -33,9 +33,7 @@ class PoseTracker:
|
|
|
33
33
|
model_complexity=1,
|
|
34
34
|
)
|
|
35
35
|
|
|
36
|
-
def process_frame(
|
|
37
|
-
self, frame: np.ndarray
|
|
38
|
-
) -> dict[str, tuple[float, float, float]] | None:
|
|
36
|
+
def process_frame(self, frame: np.ndarray) -> dict[str, tuple[float, float, float]] | None:
|
|
39
37
|
"""
|
|
40
38
|
Process a single frame and extract pose landmarks.
|
|
41
39
|
|
|
@@ -241,9 +239,7 @@ def compute_center_of_mass(
|
|
|
241
239
|
0.05,
|
|
242
240
|
visibility_threshold,
|
|
243
241
|
)
|
|
244
|
-
_add_foot_segment(
|
|
245
|
-
segments, weights, visibilities, landmarks, side, visibility_threshold
|
|
246
|
-
)
|
|
242
|
+
_add_foot_segment(segments, weights, visibilities, landmarks, side, visibility_threshold)
|
|
247
243
|
|
|
248
244
|
# Fallback if no segments found
|
|
249
245
|
if not segments:
|
|
@@ -257,12 +253,8 @@ def compute_center_of_mass(
|
|
|
257
253
|
total_weight = sum(weights)
|
|
258
254
|
normalized_weights = [w / total_weight for w in weights]
|
|
259
255
|
|
|
260
|
-
com_x = float(
|
|
261
|
-
|
|
262
|
-
)
|
|
263
|
-
com_y = float(
|
|
264
|
-
sum(p[1] * w for p, w in zip(segments, normalized_weights, strict=True))
|
|
265
|
-
)
|
|
256
|
+
com_x = float(sum(p[0] * w for p, w in zip(segments, normalized_weights, strict=True)))
|
|
257
|
+
com_y = float(sum(p[1] * w for p, w in zip(segments, normalized_weights, strict=True)))
|
|
266
258
|
com_visibility = float(np.mean(visibilities)) if visibilities else 0.0
|
|
267
259
|
|
|
268
260
|
return (com_x, com_y, com_visibility)
|
kinemotion/core/quality.py
CHANGED
|
@@ -59,12 +59,8 @@ class QualityAssessment:
|
|
|
59
59
|
"tracking_stable": self.quality_indicators.tracking_stable,
|
|
60
60
|
"phase_detection_clear": self.quality_indicators.phase_detection_clear,
|
|
61
61
|
"outliers_detected": self.quality_indicators.outliers_detected,
|
|
62
|
-
"outlier_percentage": round(
|
|
63
|
-
|
|
64
|
-
),
|
|
65
|
-
"position_variance": round(
|
|
66
|
-
self.quality_indicators.position_variance, 6
|
|
67
|
-
),
|
|
62
|
+
"outlier_percentage": round(self.quality_indicators.outlier_percentage, 1),
|
|
63
|
+
"position_variance": round(self.quality_indicators.position_variance, 6),
|
|
68
64
|
"fps": round(self.quality_indicators.fps, 1),
|
|
69
65
|
},
|
|
70
66
|
"warnings": self.warnings,
|
kinemotion/core/smoothing.py
CHANGED
|
@@ -207,14 +207,10 @@ def smooth_landmarks(
|
|
|
207
207
|
y_smooth = savgol_filter(y_coords, window_length, polyorder)
|
|
208
208
|
return x_smooth, y_smooth
|
|
209
209
|
|
|
210
|
-
return _smooth_landmarks_core(
|
|
211
|
-
landmark_sequence, window_length, polyorder, savgol_smoother
|
|
212
|
-
)
|
|
210
|
+
return _smooth_landmarks_core(landmark_sequence, window_length, polyorder, savgol_smoother)
|
|
213
211
|
|
|
214
212
|
|
|
215
|
-
def compute_velocity(
|
|
216
|
-
positions: np.ndarray, fps: float, smooth_window: int = 3
|
|
217
|
-
) -> np.ndarray:
|
|
213
|
+
def compute_velocity(positions: np.ndarray, fps: float, smooth_window: int = 3) -> np.ndarray:
|
|
218
214
|
"""
|
|
219
215
|
Compute velocity from position data.
|
|
220
216
|
|
|
@@ -423,9 +419,7 @@ def smooth_landmarks_advanced(
|
|
|
423
419
|
|
|
424
420
|
return x_smooth, y_smooth
|
|
425
421
|
|
|
426
|
-
return _smooth_landmarks_core(
|
|
427
|
-
landmark_sequence, window_length, polyorder, advanced_smoother
|
|
428
|
-
)
|
|
422
|
+
return _smooth_landmarks_core(landmark_sequence, window_length, polyorder, advanced_smoother)
|
|
429
423
|
|
|
430
424
|
|
|
431
425
|
def interpolate_threshold_crossing(
|
kinemotion/core/validation.py
CHANGED
|
@@ -160,12 +160,8 @@ class ValidationResult:
|
|
|
160
160
|
|
|
161
161
|
def finalize_status(self) -> None:
|
|
162
162
|
"""Determine final pass/fail status based on issues."""
|
|
163
|
-
has_errors = any(
|
|
164
|
-
|
|
165
|
-
)
|
|
166
|
-
has_warnings = any(
|
|
167
|
-
issue.severity == ValidationSeverity.WARNING for issue in self.issues
|
|
168
|
-
)
|
|
163
|
+
has_errors = any(issue.severity == ValidationSeverity.ERROR for issue in self.issues)
|
|
164
|
+
has_warnings = any(issue.severity == ValidationSeverity.WARNING for issue in self.issues)
|
|
169
165
|
|
|
170
166
|
if has_errors:
|
|
171
167
|
self.status = "FAIL"
|
kinemotion/dropjump/analysis.py
CHANGED
|
@@ -176,13 +176,9 @@ def _find_drop_from_baseline(
|
|
|
176
176
|
if debug:
|
|
177
177
|
print(f"[detect_drop_start] Drop detected at frame {drop_frame}")
|
|
178
178
|
print(
|
|
179
|
-
f" position_change: {position_change:.4f} > "
|
|
180
|
-
f"{position_change_threshold:.4f}"
|
|
181
|
-
)
|
|
182
|
-
print(
|
|
183
|
-
f" avg_position: {avg_position:.4f} vs baseline: "
|
|
184
|
-
f"{baseline_position:.4f}"
|
|
179
|
+
f" position_change: {position_change:.4f} > {position_change_threshold:.4f}"
|
|
185
180
|
)
|
|
181
|
+
print(f" avg_position: {avg_position:.4f} vs baseline: {baseline_position:.4f}")
|
|
186
182
|
|
|
187
183
|
return drop_frame
|
|
188
184
|
|
|
@@ -233,8 +229,7 @@ def detect_drop_start(
|
|
|
233
229
|
if len(positions) < min_stable_frames + 30:
|
|
234
230
|
if debug:
|
|
235
231
|
print(
|
|
236
|
-
f"[detect_drop_start] Video too short: {len(positions)} < "
|
|
237
|
-
f"{min_stable_frames + 30}"
|
|
232
|
+
f"[detect_drop_start] Video too short: {len(positions)} < {min_stable_frames + 30}"
|
|
238
233
|
)
|
|
239
234
|
return 0
|
|
240
235
|
|
|
@@ -393,9 +388,7 @@ def detect_ground_contact(
|
|
|
393
388
|
contact_frames = _find_contact_frames(is_stationary, min_contact_frames)
|
|
394
389
|
|
|
395
390
|
# Assign states
|
|
396
|
-
return _assign_contact_states(
|
|
397
|
-
n_frames, contact_frames, visibilities, visibility_threshold
|
|
398
|
-
)
|
|
391
|
+
return _assign_contact_states(n_frames, contact_frames, visibilities, visibility_threshold)
|
|
399
392
|
|
|
400
393
|
|
|
401
394
|
def find_contact_phases(
|
|
@@ -448,12 +441,8 @@ def _interpolate_phase_start(
|
|
|
448
441
|
vel_at = velocities[start_idx]
|
|
449
442
|
|
|
450
443
|
# Check threshold crossing based on state
|
|
451
|
-
is_landing =
|
|
452
|
-
|
|
453
|
-
)
|
|
454
|
-
is_takeoff = (
|
|
455
|
-
state == ContactState.IN_AIR and vel_before < velocity_threshold < vel_at
|
|
456
|
-
)
|
|
444
|
+
is_landing = state == ContactState.ON_GROUND and vel_before > velocity_threshold > vel_at
|
|
445
|
+
is_takeoff = state == ContactState.IN_AIR and vel_before < velocity_threshold < vel_at
|
|
457
446
|
|
|
458
447
|
if is_landing or is_takeoff:
|
|
459
448
|
offset = interpolate_threshold_crossing(vel_before, vel_at, velocity_threshold)
|
|
@@ -481,12 +470,8 @@ def _interpolate_phase_end(
|
|
|
481
470
|
vel_after = velocities[end_idx + 1]
|
|
482
471
|
|
|
483
472
|
# Check threshold crossing based on state
|
|
484
|
-
is_takeoff =
|
|
485
|
-
|
|
486
|
-
)
|
|
487
|
-
is_landing = (
|
|
488
|
-
state == ContactState.IN_AIR and vel_at > velocity_threshold > vel_after
|
|
489
|
-
)
|
|
473
|
+
is_takeoff = state == ContactState.ON_GROUND and vel_at < velocity_threshold < vel_after
|
|
474
|
+
is_landing = state == ContactState.IN_AIR and vel_at > velocity_threshold > vel_after
|
|
490
475
|
|
|
491
476
|
if is_takeoff or is_landing:
|
|
492
477
|
offset = interpolate_threshold_crossing(vel_at, vel_after, velocity_threshold)
|
|
@@ -528,9 +513,7 @@ def find_interpolated_phase_transitions(
|
|
|
528
513
|
interpolated_phases: list[tuple[float, float, ContactState]] = []
|
|
529
514
|
|
|
530
515
|
for start_idx, end_idx, state in phases:
|
|
531
|
-
start_frac = _interpolate_phase_start(
|
|
532
|
-
start_idx, state, velocities, velocity_threshold
|
|
533
|
-
)
|
|
516
|
+
start_frac = _interpolate_phase_start(start_idx, state, velocities, velocity_threshold)
|
|
534
517
|
end_frac = _interpolate_phase_end(
|
|
535
518
|
end_idx, state, velocities, velocity_threshold, len(foot_positions)
|
|
536
519
|
)
|
kinemotion/dropjump/api.py
CHANGED
|
@@ -299,9 +299,7 @@ def _tune_and_smooth(
|
|
|
299
299
|
Tuple of (smoothed_landmarks, params, characteristics)
|
|
300
300
|
"""
|
|
301
301
|
with timer.measure("parameter_auto_tuning"):
|
|
302
|
-
characteristics = analyze_video_sample(
|
|
303
|
-
landmarks_sequence, video_fps, frame_count
|
|
304
|
-
)
|
|
302
|
+
characteristics = analyze_video_sample(landmarks_sequence, video_fps, frame_count)
|
|
305
303
|
params = auto_tune_parameters(characteristics, quality_preset)
|
|
306
304
|
|
|
307
305
|
# Apply overrides if provided
|
|
@@ -337,9 +335,7 @@ def _extract_positions_and_detect_contact(
|
|
|
337
335
|
if verbose:
|
|
338
336
|
print("Extracting foot positions...")
|
|
339
337
|
with timer.measure("vertical_position_extraction"):
|
|
340
|
-
vertical_positions, visibilities = extract_vertical_positions(
|
|
341
|
-
smoothed_landmarks
|
|
342
|
-
)
|
|
338
|
+
vertical_positions, visibilities = extract_vertical_positions(smoothed_landmarks)
|
|
343
339
|
|
|
344
340
|
if verbose:
|
|
345
341
|
print("Detecting ground contact...")
|
|
@@ -557,9 +553,7 @@ def process_dropjump_video(
|
|
|
557
553
|
print_verbose_parameters(video, characteristics, quality_preset, params)
|
|
558
554
|
|
|
559
555
|
vertical_positions, visibilities, contact_states = (
|
|
560
|
-
_extract_positions_and_detect_contact(
|
|
561
|
-
smoothed_landmarks, params, timer, verbose
|
|
562
|
-
)
|
|
556
|
+
_extract_positions_and_detect_contact(smoothed_landmarks, params, timer, verbose)
|
|
563
557
|
)
|
|
564
558
|
|
|
565
559
|
metrics, quality_result = _calculate_metrics_and_assess_quality(
|
|
@@ -623,9 +617,7 @@ def process_dropjump_videos_bulk(
|
|
|
623
617
|
"""
|
|
624
618
|
|
|
625
619
|
def error_factory(video_path: str, error_msg: str) -> DropJumpVideoResult:
|
|
626
|
-
return DropJumpVideoResult(
|
|
627
|
-
video_path=video_path, success=False, error=error_msg
|
|
628
|
-
)
|
|
620
|
+
return DropJumpVideoResult(video_path=video_path, success=False, error=error_msg)
|
|
629
621
|
|
|
630
622
|
return process_videos_bulk_generic(
|
|
631
623
|
configs,
|
kinemotion/dropjump/cli.py
CHANGED
|
@@ -382,16 +382,10 @@ def _compute_batch_statistics(results: list[DropJumpVideoResult]) -> None:
|
|
|
382
382
|
if successful:
|
|
383
383
|
# Calculate average metrics
|
|
384
384
|
with_gct = [
|
|
385
|
-
r
|
|
386
|
-
for r in successful
|
|
387
|
-
if r.metrics and r.metrics.ground_contact_time is not None
|
|
388
|
-
]
|
|
389
|
-
with_flight = [
|
|
390
|
-
r for r in successful if r.metrics and r.metrics.flight_time is not None
|
|
391
|
-
]
|
|
392
|
-
with_jump = [
|
|
393
|
-
r for r in successful if r.metrics and r.metrics.jump_height is not None
|
|
385
|
+
r for r in successful if r.metrics and r.metrics.ground_contact_time is not None
|
|
394
386
|
]
|
|
387
|
+
with_flight = [r for r in successful if r.metrics and r.metrics.flight_time is not None]
|
|
388
|
+
with_jump = [r for r in successful if r.metrics and r.metrics.jump_height is not None]
|
|
395
389
|
|
|
396
390
|
if with_gct:
|
|
397
391
|
avg_gct = sum(
|
|
@@ -528,9 +522,7 @@ def _process_batch(
|
|
|
528
522
|
expert_params: AnalysisParameters,
|
|
529
523
|
) -> None:
|
|
530
524
|
"""Process multiple videos in batch mode using parallel processing."""
|
|
531
|
-
click.echo(
|
|
532
|
-
f"\nBatch processing {len(video_files)} videos with {workers} workers", err=True
|
|
533
|
-
)
|
|
525
|
+
click.echo(f"\nBatch processing {len(video_files)} videos with {workers} workers", err=True)
|
|
534
526
|
click.echo("=" * 70, err=True)
|
|
535
527
|
|
|
536
528
|
# Setup output directories
|
|
@@ -550,8 +542,7 @@ def _process_batch(
|
|
|
550
542
|
status = "✓" if result.success else "✗"
|
|
551
543
|
video_name = Path(result.video_path).name
|
|
552
544
|
click.echo(
|
|
553
|
-
f"[{completed}/{len(configs)}] {status} {video_name} "
|
|
554
|
-
f"({result.processing_time:.1f}s)",
|
|
545
|
+
f"[{completed}/{len(configs)}] {status} {video_name} ({result.processing_time:.1f}s)",
|
|
555
546
|
err=True,
|
|
556
547
|
)
|
|
557
548
|
if not result.success:
|
|
@@ -150,9 +150,7 @@ class DebugOverlayRenderer(BaseDebugOverlayRenderer):
|
|
|
150
150
|
self._draw_foot_visualization(annotated, landmarks, contact_state)
|
|
151
151
|
|
|
152
152
|
# Draw contact state
|
|
153
|
-
state_color = (
|
|
154
|
-
(0, 255, 0) if contact_state == ContactState.ON_GROUND else (0, 0, 255)
|
|
155
|
-
)
|
|
153
|
+
state_color = (0, 255, 0) if contact_state == ContactState.ON_GROUND else (0, 0, 255)
|
|
156
154
|
cv2.putText(
|
|
157
155
|
annotated,
|
|
158
156
|
f"State: {contact_state.value}",
|
|
@@ -87,17 +87,11 @@ class DropJumpMetrics:
|
|
|
87
87
|
Dictionary containing formatted metric values.
|
|
88
88
|
"""
|
|
89
89
|
return {
|
|
90
|
-
"ground_contact_time_ms": format_float_metric(
|
|
91
|
-
self.ground_contact_time, 1000, 2
|
|
92
|
-
),
|
|
90
|
+
"ground_contact_time_ms": format_float_metric(self.ground_contact_time, 1000, 2),
|
|
93
91
|
"flight_time_ms": format_float_metric(self.flight_time, 1000, 2),
|
|
94
92
|
"jump_height_m": format_float_metric(self.jump_height, 1, 3),
|
|
95
|
-
"jump_height_kinematic_m": format_float_metric(
|
|
96
|
-
|
|
97
|
-
),
|
|
98
|
-
"jump_height_trajectory_m": format_float_metric(
|
|
99
|
-
self.jump_height_trajectory_m, 1, 3
|
|
100
|
-
),
|
|
93
|
+
"jump_height_kinematic_m": format_float_metric(self.jump_height_kinematic, 1, 3),
|
|
94
|
+
"jump_height_trajectory_m": format_float_metric(self.jump_height_trajectory_m, 1, 3),
|
|
101
95
|
"jump_height_trajectory_normalized": format_float_metric(
|
|
102
96
|
self.jump_height_trajectory, 1, 4
|
|
103
97
|
),
|
|
@@ -109,15 +103,11 @@ class DropJumpMetrics:
|
|
|
109
103
|
"contact_start_frame_precise": format_float_metric(
|
|
110
104
|
self.contact_start_frame_precise, 1, 3
|
|
111
105
|
),
|
|
112
|
-
"contact_end_frame_precise": format_float_metric(
|
|
113
|
-
self.contact_end_frame_precise, 1, 3
|
|
114
|
-
),
|
|
106
|
+
"contact_end_frame_precise": format_float_metric(self.contact_end_frame_precise, 1, 3),
|
|
115
107
|
"flight_start_frame_precise": format_float_metric(
|
|
116
108
|
self.flight_start_frame_precise, 1, 3
|
|
117
109
|
),
|
|
118
|
-
"flight_end_frame_precise": format_float_metric(
|
|
119
|
-
self.flight_end_frame_precise, 1, 3
|
|
120
|
-
),
|
|
110
|
+
"flight_end_frame_precise": format_float_metric(self.flight_end_frame_precise, 1, 3),
|
|
121
111
|
}
|
|
122
112
|
|
|
123
113
|
def _build_metadata_dict(self) -> dict:
|
|
@@ -183,9 +173,7 @@ def _filter_phases_after_drop(
|
|
|
183
173
|
phases: list[tuple[int, int, ContactState]],
|
|
184
174
|
interpolated_phases: list[tuple[float, float, ContactState]],
|
|
185
175
|
drop_start_frame: int,
|
|
186
|
-
) -> tuple[
|
|
187
|
-
list[tuple[int, int, ContactState]], list[tuple[float, float, ContactState]]
|
|
188
|
-
]:
|
|
176
|
+
) -> tuple[list[tuple[int, int, ContactState]], list[tuple[float, float, ContactState]]]:
|
|
189
177
|
"""Filter phases to only include those after drop start.
|
|
190
178
|
|
|
191
179
|
Args:
|
|
@@ -203,9 +191,7 @@ def _filter_phases_after_drop(
|
|
|
203
191
|
(start, end, state) for start, end, state in phases if end >= drop_start_frame
|
|
204
192
|
]
|
|
205
193
|
filtered_interpolated = [
|
|
206
|
-
(start, end, state)
|
|
207
|
-
for start, end, state in interpolated_phases
|
|
208
|
-
if end >= drop_start_frame
|
|
194
|
+
(start, end, state) for start, end, state in interpolated_phases if end >= drop_start_frame
|
|
209
195
|
]
|
|
210
196
|
return filtered_phases, filtered_interpolated
|
|
211
197
|
|
|
@@ -268,9 +254,7 @@ def _identify_main_contact_phase(
|
|
|
268
254
|
|
|
269
255
|
# Find ground phase after first air phase
|
|
270
256
|
ground_after_air = [
|
|
271
|
-
(start, end, idx)
|
|
272
|
-
for start, end, idx in ground_phases
|
|
273
|
-
if idx > first_air_idx
|
|
257
|
+
(start, end, idx) for start, end, idx in ground_phases if idx > first_air_idx
|
|
274
258
|
]
|
|
275
259
|
|
|
276
260
|
if ground_after_air and first_ground_idx < first_air_idx:
|
|
@@ -467,9 +451,7 @@ def calculate_drop_jump_metrics(
|
|
|
467
451
|
)
|
|
468
452
|
|
|
469
453
|
# Store drop start frame in metrics
|
|
470
|
-
metrics.drop_start_frame =
|
|
471
|
-
drop_start_frame_value if drop_start_frame_value > 0 else None
|
|
472
|
-
)
|
|
454
|
+
metrics.drop_start_frame = drop_start_frame_value if drop_start_frame_value > 0 else None
|
|
473
455
|
|
|
474
456
|
# Find contact phases
|
|
475
457
|
with timer.measure("dj_find_phases"):
|
|
@@ -45,9 +45,7 @@ class DropJumpValidationResult(ValidationResult):
|
|
|
45
45
|
}
|
|
46
46
|
for issue in self.issues
|
|
47
47
|
],
|
|
48
|
-
"athlete_profile": (
|
|
49
|
-
self.athlete_profile.value if self.athlete_profile else None
|
|
50
|
-
),
|
|
48
|
+
"athlete_profile": (self.athlete_profile.value if self.athlete_profile else None),
|
|
51
49
|
"rsi": self.rsi,
|
|
52
50
|
"contact_flight_ratio": self.contact_flight_ratio,
|
|
53
51
|
"height_kinematic_trajectory_consistency_percent": (
|
|
@@ -130,14 +128,11 @@ class DropJumpMetricsValidator(MetricsValidator):
|
|
|
130
128
|
profile_name = result.athlete_profile.value
|
|
131
129
|
result.add_warning(
|
|
132
130
|
"contact_time",
|
|
133
|
-
f"Contact time {contact_time_s:.3f}s unusual for "
|
|
134
|
-
f"{profile_name} athlete",
|
|
131
|
+
f"Contact time {contact_time_s:.3f}s unusual for {profile_name} athlete",
|
|
135
132
|
value=contact_time_s,
|
|
136
133
|
)
|
|
137
134
|
|
|
138
|
-
def _check_flight_time(
|
|
139
|
-
self, flight_time_ms: float, result: DropJumpValidationResult
|
|
140
|
-
) -> None:
|
|
135
|
+
def _check_flight_time(self, flight_time_ms: float, result: DropJumpValidationResult) -> None:
|
|
141
136
|
"""Validate flight time."""
|
|
142
137
|
flight_time_s = flight_time_ms / 1000.0
|
|
143
138
|
bounds = DropJumpBounds.FLIGHT_TIME
|
|
@@ -149,9 +144,7 @@ class DropJumpMetricsValidator(MetricsValidator):
|
|
|
149
144
|
value=flight_time_s,
|
|
150
145
|
bounds=(bounds.absolute_min, bounds.absolute_max),
|
|
151
146
|
)
|
|
152
|
-
elif result.athlete_profile and not bounds.contains(
|
|
153
|
-
flight_time_s, result.athlete_profile
|
|
154
|
-
):
|
|
147
|
+
elif result.athlete_profile and not bounds.contains(flight_time_s, result.athlete_profile):
|
|
155
148
|
profile_name = result.athlete_profile.value
|
|
156
149
|
result.add_warning(
|
|
157
150
|
"flight_time",
|
|
@@ -159,9 +152,7 @@ class DropJumpMetricsValidator(MetricsValidator):
|
|
|
159
152
|
value=flight_time_s,
|
|
160
153
|
)
|
|
161
154
|
|
|
162
|
-
def _check_jump_height(
|
|
163
|
-
self, jump_height_m: float, result: DropJumpValidationResult
|
|
164
|
-
) -> None:
|
|
155
|
+
def _check_jump_height(self, jump_height_m: float, result: DropJumpValidationResult) -> None:
|
|
165
156
|
"""Validate jump height."""
|
|
166
157
|
bounds = DropJumpBounds.JUMP_HEIGHT
|
|
167
158
|
|
|
@@ -172,9 +163,7 @@ class DropJumpMetricsValidator(MetricsValidator):
|
|
|
172
163
|
value=jump_height_m,
|
|
173
164
|
bounds=(bounds.absolute_min, bounds.absolute_max),
|
|
174
165
|
)
|
|
175
|
-
elif result.athlete_profile and not bounds.contains(
|
|
176
|
-
jump_height_m, result.athlete_profile
|
|
177
|
-
):
|
|
166
|
+
elif result.athlete_profile and not bounds.contains(jump_height_m, result.athlete_profile):
|
|
178
167
|
profile_name = result.athlete_profile.value
|
|
179
168
|
result.add_warning(
|
|
180
169
|
"jump_height",
|
|
@@ -206,9 +195,7 @@ class DropJumpMetricsValidator(MetricsValidator):
|
|
|
206
195
|
value=rsi,
|
|
207
196
|
bounds=(bounds.absolute_min, bounds.absolute_max),
|
|
208
197
|
)
|
|
209
|
-
elif result.athlete_profile and not bounds.contains(
|
|
210
|
-
rsi, result.athlete_profile
|
|
211
|
-
):
|
|
198
|
+
elif result.athlete_profile and not bounds.contains(rsi, result.athlete_profile):
|
|
212
199
|
result.add_warning(
|
|
213
200
|
"rsi",
|
|
214
201
|
f"RSI {rsi:.2f} unusual for {result.athlete_profile.value} athlete",
|
|
@@ -123,9 +123,7 @@ def _classify_combined_score(combined_score: float) -> AthleteProfile:
|
|
|
123
123
|
return AthleteProfile.ELITE
|
|
124
124
|
|
|
125
125
|
|
|
126
|
-
def estimate_athlete_profile(
|
|
127
|
-
metrics: dict, gender: str | None = None
|
|
128
|
-
) -> AthleteProfile:
|
|
126
|
+
def estimate_athlete_profile(metrics: dict, gender: str | None = None) -> AthleteProfile:
|
|
129
127
|
"""Estimate athlete profile from drop jump metrics.
|
|
130
128
|
|
|
131
129
|
Uses jump_height and contact_time to classify athlete level.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: kinemotion
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.60.1
|
|
4
4
|
Summary: Video-based kinematic analysis for athletic performance
|
|
5
5
|
Project-URL: Homepage, https://github.com/feniix/kinemotion
|
|
6
6
|
Project-URL: Repository, https://github.com/feniix/kinemotion
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
kinemotion/__init__.py,sha256=Ho_BUtsM0PBxBW1ye9RlUg0ZqBlgGudRI9bZTF7QKUI,966
|
|
2
|
+
kinemotion/api.py,sha256=uG1e4bTnj2c-6cbZJEZ_LjMwFdaG32ba2KcK_XjE_NI,1040
|
|
3
|
+
kinemotion/cli.py,sha256=cqYV_7URH0JUDy1VQ_EDLv63FmNO4Ns20m6s1XAjiP4,464
|
|
4
|
+
kinemotion/cmj/__init__.py,sha256=SkAw9ka8Yd1Qfv9hcvk22m3EfucROzYrSNGNF5kDzho,113
|
|
5
|
+
kinemotion/cmj/analysis.py,sha256=ZtKVfDj1-qoegXPklMO1GX8_OtD8pYbTyTCW4FCeaS8,22140
|
|
6
|
+
kinemotion/cmj/api.py,sha256=Xu6PepQKQFzRWZMESaWvHyPV-dueCH1TQQeQdil1KiQ,16626
|
|
7
|
+
kinemotion/cmj/cli.py,sha256=P2b77IIw6kqTSIkncxlShzhmjIwqMFBNd-pZxYP-TsI,9918
|
|
8
|
+
kinemotion/cmj/debug_overlay.py,sha256=bX9aPLhXiLCCMZW9v8Y4OiOAaZO0i-UGr-Pl8HCsmbI,15810
|
|
9
|
+
kinemotion/cmj/joint_angles.py,sha256=HmheIEiKcQz39cRezk4h-htorOhGNPsqKIR9RsAEKts,9960
|
|
10
|
+
kinemotion/cmj/kinematics.py,sha256=5xAqBP_lwDGP4fcnMXozELj0XRzBcWYGmI0tsVMYbnw,13413
|
|
11
|
+
kinemotion/cmj/metrics_validator.py,sha256=10Dx7-o5-ziQ9YXnzs98v_ZqJxi3ax3COwNY8M_KqqM,30835
|
|
12
|
+
kinemotion/cmj/validation_bounds.py,sha256=1QXaX3uclU2ceZya90u5qVT1tWU4kGkUW0CQbvh317I,11989
|
|
13
|
+
kinemotion/core/__init__.py,sha256=U2fnLUGXQ0jbwpXhdksYKDXbeQndEHjn9gwTAEJ9Av0,1451
|
|
14
|
+
kinemotion/core/auto_tuning.py,sha256=lhAqPc-eLjMYx9BCvKdECE7TD2Dweb9KcifV6JHaXOE,11278
|
|
15
|
+
kinemotion/core/cli_utils.py,sha256=sQPbT6XWWau-sm9yuN5c3eS5xNzoQGGXwSz6hQXtRvM,1859
|
|
16
|
+
kinemotion/core/debug_overlay_utils.py,sha256=YlDmKns6x37H4yvulGGEUJ_D8G0bDZFTSbV8ig2hfFQ,8400
|
|
17
|
+
kinemotion/core/determinism.py,sha256=Frw-KAOvAxTL_XtxoWpXCjMbQPUKEAusK6JctlkeuRo,2509
|
|
18
|
+
kinemotion/core/experimental.py,sha256=IK05AF4aZS15ke85hF3TWCqRIXU1AlD_XKzFz735Ua8,3640
|
|
19
|
+
kinemotion/core/filtering.py,sha256=Oc__pV6iHEGyyovbqa5SUi-6v8QyvaRVwA0LRayM884,11355
|
|
20
|
+
kinemotion/core/formatting.py,sha256=G_3eqgOtym9RFOZVEwCxye4A2cyrmgvtQ214vIshowU,2480
|
|
21
|
+
kinemotion/core/metadata.py,sha256=bJAVa4nym__zx1hNowSZduMGKBSGOPxTbBQkjm6N0D0,7207
|
|
22
|
+
kinemotion/core/pipeline_utils.py,sha256=q32c1AJ8KI4Ht-K3ZiI7ectQKtg8k_FLdLy6WPBPWkU,14927
|
|
23
|
+
kinemotion/core/pose.py,sha256=b7RQF4prb40hb4Yr5ATBFsj3dvEX0ohkG4h65lqHf8E,8993
|
|
24
|
+
kinemotion/core/quality.py,sha256=VUkRL2N6B7lfIZ2pE9han_U68JwarmZz1U0ygHkgkhE,13022
|
|
25
|
+
kinemotion/core/smoothing.py,sha256=-RPZzNjgtBQ-Ri0o-inkwIfx30IKo7ZTSnxXJ3Itn9w,15616
|
|
26
|
+
kinemotion/core/timing.py,sha256=d1rjZc07Nbi5Jrio9AC-zeS0dNAlbPyNIydLz7X75Pk,7804
|
|
27
|
+
kinemotion/core/validation.py,sha256=0xVv-ftWveV60fJ97kmZMuy2Qqqb5aZLR50dDIrjnhg,6773
|
|
28
|
+
kinemotion/core/video_io.py,sha256=vCwpWnlW2y29l48dFXokdehQn42w_IQvayxbVTjpXqQ,7863
|
|
29
|
+
kinemotion/dropjump/__init__.py,sha256=tC3H3BrCg8Oj-db-Vrtx4PH_llR1Ppkd5jwaOjhQcLg,862
|
|
30
|
+
kinemotion/dropjump/analysis.py,sha256=WQ2ol5fWAXA2y-0UDGXgF587qsOsKNgkWPQLXaNZMdU,28005
|
|
31
|
+
kinemotion/dropjump/api.py,sha256=uidio49CXisyWKd287CnCrM51GusG9DWAIUKGH85fpM,20584
|
|
32
|
+
kinemotion/dropjump/cli.py,sha256=gUef9nmyR5952h1WnfBGyCdFXQvzVTlCKYAjJGcO4sE,16819
|
|
33
|
+
kinemotion/dropjump/debug_overlay.py,sha256=9RQYXPRf0q2wdy6y2Ak2R4tpRceDwC8aJrXZzkmh3Wo,5942
|
|
34
|
+
kinemotion/dropjump/kinematics.py,sha256=dx4PuXKfKMKcsc_HX6sXj8rHXf9ksiZIOAIkJ4vBlY4,19637
|
|
35
|
+
kinemotion/dropjump/metrics_validator.py,sha256=367-TFal2bVDU3zwoCDpYY_lnYjSdXOrvNIIxVnIWiE,9190
|
|
36
|
+
kinemotion/dropjump/validation_bounds.py,sha256=MUMJhGV62peFuIHdPR1uulMS4bI-i_JerGh5T9HF8Wk,5071
|
|
37
|
+
kinemotion/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
38
|
+
kinemotion-0.60.1.dist-info/METADATA,sha256=k8G4cr9iQcUW98O_r8en-APbAJ5EHCP01O8-jlNApOU,26020
|
|
39
|
+
kinemotion-0.60.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
40
|
+
kinemotion-0.60.1.dist-info/entry_points.txt,sha256=zaqnAnjLvcdrk1Qvj5nvXZCZ2gp0prS7it1zTJygcIY,50
|
|
41
|
+
kinemotion-0.60.1.dist-info/licenses/LICENSE,sha256=KZajvqsHw0NoOHOi2q0FZ4NBe9HdV6oey-IPYAtHXfg,1088
|
|
42
|
+
kinemotion-0.60.1.dist-info/RECORD,,
|
|
@@ -1,42 +0,0 @@
|
|
|
1
|
-
kinemotion/__init__.py,sha256=Ho_BUtsM0PBxBW1ye9RlUg0ZqBlgGudRI9bZTF7QKUI,966
|
|
2
|
-
kinemotion/api.py,sha256=uG1e4bTnj2c-6cbZJEZ_LjMwFdaG32ba2KcK_XjE_NI,1040
|
|
3
|
-
kinemotion/cli.py,sha256=cqYV_7URH0JUDy1VQ_EDLv63FmNO4Ns20m6s1XAjiP4,464
|
|
4
|
-
kinemotion/cmj/__init__.py,sha256=SkAw9ka8Yd1Qfv9hcvk22m3EfucROzYrSNGNF5kDzho,113
|
|
5
|
-
kinemotion/cmj/analysis.py,sha256=3l0vYQB9tN4HtEO2MPFHVtrdzSmXgwpCm03qzYLCF0c,22196
|
|
6
|
-
kinemotion/cmj/api.py,sha256=TYWja-Ellfyq_R2ixfvQyCWnPON7CG7IZk8odlLVM8E,16784
|
|
7
|
-
kinemotion/cmj/cli.py,sha256=r3k5LDRXob12PV_6f6XnXOzKXoGn5WfeCMXkxiJ_CYE,10078
|
|
8
|
-
kinemotion/cmj/debug_overlay.py,sha256=fXmWoHhqMLGo4vTtB6Ezs3yLUDOLw63zLIgU2gFlJQU,15892
|
|
9
|
-
kinemotion/cmj/joint_angles.py,sha256=HmheIEiKcQz39cRezk4h-htorOhGNPsqKIR9RsAEKts,9960
|
|
10
|
-
kinemotion/cmj/kinematics.py,sha256=Q-L8M7wG-MJ6EJTq6GO17c8sD5cb0Jg6Hc5vUZr14bA,13673
|
|
11
|
-
kinemotion/cmj/metrics_validator.py,sha256=JAakR4RgNvUc7GM9Aj2TQrtatYpCCCGSzkBMXOldKjw,31455
|
|
12
|
-
kinemotion/cmj/validation_bounds.py,sha256=9ZTo68fl3ooyWjXXyTMRLpK9tFANa_rQf3oHhq7iQGE,11995
|
|
13
|
-
kinemotion/core/__init__.py,sha256=U2fnLUGXQ0jbwpXhdksYKDXbeQndEHjn9gwTAEJ9Av0,1451
|
|
14
|
-
kinemotion/core/auto_tuning.py,sha256=wtCUMOhBChVJNXfEeku3GCMW4qED6MF-O_mv2sPTiVQ,11324
|
|
15
|
-
kinemotion/core/cli_utils.py,sha256=sQPbT6XWWau-sm9yuN5c3eS5xNzoQGGXwSz6hQXtRvM,1859
|
|
16
|
-
kinemotion/core/debug_overlay_utils.py,sha256=-goE3w4gBij99y1U4ckU5iaQPS0SupcHplT04DDWzUo,8579
|
|
17
|
-
kinemotion/core/determinism.py,sha256=NwVrHqJiVxxFHTBPVy8aDBJH2SLIcYIpdGFp7glblB8,2515
|
|
18
|
-
kinemotion/core/experimental.py,sha256=IK05AF4aZS15ke85hF3TWCqRIXU1AlD_XKzFz735Ua8,3640
|
|
19
|
-
kinemotion/core/filtering.py,sha256=GsC9BB71V07LJJHgS2lsaxUAtJsupcUiwtZFDgODh8c,11417
|
|
20
|
-
kinemotion/core/formatting.py,sha256=G_3eqgOtym9RFOZVEwCxye4A2cyrmgvtQ214vIshowU,2480
|
|
21
|
-
kinemotion/core/metadata.py,sha256=bJAVa4nym__zx1hNowSZduMGKBSGOPxTbBQkjm6N0D0,7207
|
|
22
|
-
kinemotion/core/pipeline_utils.py,sha256=0u7o-UFZX6cOu3NaWpFmEy5ejS0WUKggZ1HSdeZXhoA,14964
|
|
23
|
-
kinemotion/core/pose.py,sha256=z1OGuwnc-NdK6Aoc9UYCyPBzomw4eInexOWonZbsEoA,9057
|
|
24
|
-
kinemotion/core/quality.py,sha256=dPGQp08y8DdEUbUdjTThnUOUsALgF0D2sdz50cm6wLI,13098
|
|
25
|
-
kinemotion/core/smoothing.py,sha256=FZmv3rumn0mYKU2y3JPKz46EvD8TVmQ6_GsN_Vp3BdU,15650
|
|
26
|
-
kinemotion/core/timing.py,sha256=d1rjZc07Nbi5Jrio9AC-zeS0dNAlbPyNIydLz7X75Pk,7804
|
|
27
|
-
kinemotion/core/validation.py,sha256=UOdB0v-xZjYvUogwlSdX4luDHy1E0VCJvmu4VvrVPyc,6817
|
|
28
|
-
kinemotion/core/video_io.py,sha256=vCwpWnlW2y29l48dFXokdehQn42w_IQvayxbVTjpXqQ,7863
|
|
29
|
-
kinemotion/dropjump/__init__.py,sha256=tC3H3BrCg8Oj-db-Vrtx4PH_llR1Ppkd5jwaOjhQcLg,862
|
|
30
|
-
kinemotion/dropjump/analysis.py,sha256=p7nnCe7V6vnhQKZVYk--_nhsTvVa_WY-A3zXmyplsew,28211
|
|
31
|
-
kinemotion/dropjump/api.py,sha256=O8DSTLankRibFH8pf1A9idK0x9-khKpG1h2X5nlg5Ms,20688
|
|
32
|
-
kinemotion/dropjump/cli.py,sha256=Ho80fSOgH8zo2e8dGQA90VXL-mZPVvnpc1ZKtl51vB0,16917
|
|
33
|
-
kinemotion/dropjump/debug_overlay.py,sha256=8XVuDyZ3nuNoCYkxcUWC7wyEoHyBxx77Sb--B1KiYWw,5974
|
|
34
|
-
kinemotion/dropjump/kinematics.py,sha256=PATlGaClutGKJslL-LRIXHmTsvb-xEB8PUIMScU_K4c,19849
|
|
35
|
-
kinemotion/dropjump/metrics_validator.py,sha256=CrTlGup8q2kyPXtA6HNwm7_yq0AsBaDllG7RVZdXmYA,9342
|
|
36
|
-
kinemotion/dropjump/validation_bounds.py,sha256=fyl04ZV7nfvHkL5eob6oEpV9Hxce6aiOWQ9pclLp7AQ,5077
|
|
37
|
-
kinemotion/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
38
|
-
kinemotion-0.59.1.dist-info/METADATA,sha256=DBN3B90UTv9Z4_MG6CxmwIPppLQIikrSqkLHUOp9YK0,26020
|
|
39
|
-
kinemotion-0.59.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
40
|
-
kinemotion-0.59.1.dist-info/entry_points.txt,sha256=zaqnAnjLvcdrk1Qvj5nvXZCZ2gp0prS7it1zTJygcIY,50
|
|
41
|
-
kinemotion-0.59.1.dist-info/licenses/LICENSE,sha256=KZajvqsHw0NoOHOi2q0FZ4NBe9HdV6oey-IPYAtHXfg,1088
|
|
42
|
-
kinemotion-0.59.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|