kinemotion 0.10.6__py3-none-any.whl → 0.67.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kinemotion might be problematic. Click here for more details.

Files changed (48) hide show
  1. kinemotion/__init__.py +31 -6
  2. kinemotion/api.py +39 -598
  3. kinemotion/cli.py +2 -0
  4. kinemotion/cmj/__init__.py +5 -0
  5. kinemotion/cmj/analysis.py +621 -0
  6. kinemotion/cmj/api.py +563 -0
  7. kinemotion/cmj/cli.py +324 -0
  8. kinemotion/cmj/debug_overlay.py +457 -0
  9. kinemotion/cmj/joint_angles.py +307 -0
  10. kinemotion/cmj/kinematics.py +360 -0
  11. kinemotion/cmj/metrics_validator.py +767 -0
  12. kinemotion/cmj/validation_bounds.py +341 -0
  13. kinemotion/core/__init__.py +28 -0
  14. kinemotion/core/auto_tuning.py +71 -37
  15. kinemotion/core/cli_utils.py +60 -0
  16. kinemotion/core/debug_overlay_utils.py +385 -0
  17. kinemotion/core/determinism.py +83 -0
  18. kinemotion/core/experimental.py +103 -0
  19. kinemotion/core/filtering.py +9 -6
  20. kinemotion/core/formatting.py +75 -0
  21. kinemotion/core/metadata.py +231 -0
  22. kinemotion/core/model_downloader.py +172 -0
  23. kinemotion/core/pipeline_utils.py +433 -0
  24. kinemotion/core/pose.py +298 -141
  25. kinemotion/core/pose_landmarks.py +67 -0
  26. kinemotion/core/quality.py +393 -0
  27. kinemotion/core/smoothing.py +250 -154
  28. kinemotion/core/timing.py +247 -0
  29. kinemotion/core/types.py +42 -0
  30. kinemotion/core/validation.py +201 -0
  31. kinemotion/core/video_io.py +135 -50
  32. kinemotion/dropjump/__init__.py +1 -1
  33. kinemotion/dropjump/analysis.py +367 -182
  34. kinemotion/dropjump/api.py +665 -0
  35. kinemotion/dropjump/cli.py +156 -466
  36. kinemotion/dropjump/debug_overlay.py +136 -206
  37. kinemotion/dropjump/kinematics.py +232 -255
  38. kinemotion/dropjump/metrics_validator.py +240 -0
  39. kinemotion/dropjump/validation_bounds.py +157 -0
  40. kinemotion/models/__init__.py +0 -0
  41. kinemotion/models/pose_landmarker_lite.task +0 -0
  42. kinemotion-0.67.0.dist-info/METADATA +726 -0
  43. kinemotion-0.67.0.dist-info/RECORD +47 -0
  44. {kinemotion-0.10.6.dist-info → kinemotion-0.67.0.dist-info}/WHEEL +1 -1
  45. kinemotion-0.10.6.dist-info/METADATA +0 -561
  46. kinemotion-0.10.6.dist-info/RECORD +0 -20
  47. {kinemotion-0.10.6.dist-info → kinemotion-0.67.0.dist-info}/entry_points.txt +0 -0
  48. {kinemotion-0.10.6.dist-info → kinemotion-0.67.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,393 @@
1
+ """Quality assessment and confidence scoring for pose tracking and analysis."""
2
+
3
+ from dataclasses import dataclass
4
+ from typing import Literal
5
+
6
+ import numpy as np
7
+ from numpy.typing import NDArray
8
+
9
+
10
+ @dataclass
11
+ class QualityIndicators:
12
+ """Detailed quality indicators for pose tracking and analysis.
13
+
14
+ Attributes:
15
+ avg_visibility: Mean visibility score across all key landmarks (0-1)
16
+ min_visibility: Minimum visibility score encountered (0-1)
17
+ tracking_stable: Whether landmark tracking was stable (low jitter)
18
+ phase_detection_clear: Whether phase transitions were clearly detected
19
+ outliers_detected: Number of outlier frames detected and corrected
20
+ outlier_percentage: Percentage of frames with outliers (0-100)
21
+ position_variance: Variance in position tracking (lower is more stable)
22
+ fps: Video frame rate (higher is better for accuracy)
23
+ """
24
+
25
+ avg_visibility: float
26
+ min_visibility: float
27
+ tracking_stable: bool
28
+ phase_detection_clear: bool
29
+ outliers_detected: int
30
+ outlier_percentage: float
31
+ position_variance: float
32
+ fps: float
33
+
34
+
35
+ @dataclass
36
+ class QualityAssessment:
37
+ """Overall quality assessment with confidence level and warnings.
38
+
39
+ Attributes:
40
+ confidence: Overall confidence level (high/medium/low)
41
+ quality_indicators: Detailed quality metrics
42
+ warnings: List of warning messages for user
43
+ quality_score: Numerical quality score (0-100)
44
+ """
45
+
46
+ confidence: Literal["high", "medium", "low"]
47
+ quality_indicators: QualityIndicators
48
+ warnings: list[str]
49
+ quality_score: float
50
+
51
+ def to_dict(self) -> dict:
52
+ """Convert quality assessment to JSON-serializable dictionary."""
53
+ return {
54
+ "confidence": self.confidence,
55
+ "quality_score": round(self.quality_score, 1),
56
+ "quality_indicators": {
57
+ "avg_visibility": round(self.quality_indicators.avg_visibility, 3),
58
+ "min_visibility": round(self.quality_indicators.min_visibility, 3),
59
+ "tracking_stable": self.quality_indicators.tracking_stable,
60
+ "phase_detection_clear": self.quality_indicators.phase_detection_clear,
61
+ "outliers_detected": self.quality_indicators.outliers_detected,
62
+ "outlier_percentage": round(self.quality_indicators.outlier_percentage, 1),
63
+ "position_variance": round(self.quality_indicators.position_variance, 6),
64
+ "fps": round(self.quality_indicators.fps, 1),
65
+ },
66
+ "warnings": self.warnings,
67
+ }
68
+
69
+
70
+ def calculate_position_stability(
71
+ positions: NDArray[np.float64],
72
+ window_size: int = 10,
73
+ ) -> float:
74
+ """
75
+ Calculate position tracking stability using rolling variance.
76
+
77
+ Lower variance indicates more stable tracking (less jitter).
78
+
79
+ Args:
80
+ positions: Array of position values (e.g., foot y-positions)
81
+ window_size: Window size for rolling variance calculation
82
+
83
+ Returns:
84
+ Mean rolling variance (lower is better)
85
+ """
86
+ if len(positions) < window_size:
87
+ return float(np.var(positions))
88
+
89
+ # Calculate rolling variance
90
+ rolling_vars = []
91
+ for i in range(len(positions) - window_size + 1):
92
+ window = positions[i : i + window_size]
93
+ rolling_vars.append(np.var(window))
94
+
95
+ return float(np.mean(rolling_vars))
96
+
97
+
98
+ def assess_tracking_quality(
99
+ visibilities: NDArray[np.float64],
100
+ positions: NDArray[np.float64],
101
+ outlier_mask: NDArray[np.bool_] | None,
102
+ fps: float,
103
+ phases_detected: bool = True,
104
+ phase_count: int = 0,
105
+ ) -> QualityAssessment:
106
+ """
107
+ Assess overall tracking quality and assign confidence level.
108
+
109
+ Evaluates multiple quality indicators to determine confidence:
110
+ - Landmark visibility (MediaPipe confidence scores)
111
+ - Tracking stability (position variance, jitter)
112
+ - Outlier detection (frames requiring correction)
113
+ - Phase detection success (clear transitions found)
114
+ - Frame rate (higher = better temporal resolution)
115
+
116
+ Args:
117
+ visibilities: Array of visibility scores for each frame (0-1)
118
+ positions: Array of tracked positions (normalized coordinates)
119
+ outlier_mask: Boolean array marking outlier frames (None if no outliers)
120
+ fps: Video frame rate
121
+ phases_detected: Whether jump phases were successfully detected
122
+ phase_count: Number of phases detected (0 if failed)
123
+
124
+ Returns:
125
+ QualityAssessment object with confidence level, indicators, and warnings
126
+ """
127
+ # Calculate visibility metrics
128
+ avg_visibility = float(np.mean(visibilities))
129
+ min_visibility = float(np.min(visibilities))
130
+
131
+ # Calculate tracking stability
132
+ position_variance = calculate_position_stability(positions)
133
+ tracking_stable = position_variance < 0.001 # Threshold for stable tracking
134
+
135
+ # Count outliers
136
+ outliers_detected = 0
137
+ outlier_percentage = 0.0
138
+ if outlier_mask is not None:
139
+ outliers_detected = int(np.sum(outlier_mask))
140
+ outlier_percentage = (outliers_detected / len(outlier_mask)) * 100
141
+
142
+ # Assess phase detection clarity
143
+ phase_detection_clear = phases_detected and phase_count >= 2
144
+
145
+ # Create quality indicators
146
+ indicators = QualityIndicators(
147
+ avg_visibility=avg_visibility,
148
+ min_visibility=min_visibility,
149
+ tracking_stable=tracking_stable,
150
+ phase_detection_clear=phase_detection_clear,
151
+ outliers_detected=outliers_detected,
152
+ outlier_percentage=outlier_percentage,
153
+ position_variance=position_variance,
154
+ fps=fps,
155
+ )
156
+
157
+ # Calculate overall quality score (0-100)
158
+ quality_score = _calculate_quality_score(indicators)
159
+
160
+ # Determine confidence level
161
+ confidence = _determine_confidence_level(quality_score)
162
+
163
+ # Generate warnings
164
+ warnings = _generate_warnings(indicators, confidence)
165
+
166
+ return QualityAssessment(
167
+ confidence=confidence,
168
+ quality_indicators=indicators,
169
+ warnings=warnings,
170
+ quality_score=quality_score,
171
+ )
172
+
173
+
174
+ def _calculate_quality_score(indicators: QualityIndicators) -> float:
175
+ """
176
+ Calculate numerical quality score (0-100) from quality indicators.
177
+
178
+ Weighted combination of different quality factors:
179
+ - Visibility: 40% weight (most critical)
180
+ - Tracking stability: 25% weight
181
+ - Outlier rate: 20% weight
182
+ - Phase detection: 10% weight
183
+ - Frame rate: 5% weight
184
+
185
+ Args:
186
+ indicators: Quality indicators object
187
+
188
+ Returns:
189
+ Quality score from 0 (worst) to 100 (best)
190
+ """
191
+ # Visibility score (40% weight)
192
+ # Perfect: avg_vis=1.0, min_vis>0.8
193
+ visibility_score = indicators.avg_visibility * 100
194
+ if indicators.min_visibility < 0.5:
195
+ visibility_score *= 0.7 # Penalty for low minimum visibility
196
+
197
+ # Tracking stability score (25% weight)
198
+ # Perfect: position_variance < 0.0005
199
+ # Good: position_variance < 0.001
200
+ # Medium: position_variance < 0.003
201
+ if indicators.position_variance < 0.0005:
202
+ stability_score = 100.0
203
+ elif indicators.position_variance < 0.001:
204
+ stability_score = 85.0
205
+ elif indicators.position_variance < 0.003:
206
+ stability_score = 65.0
207
+ else:
208
+ stability_score = max(0.0, 100 - indicators.position_variance * 10000)
209
+
210
+ # Outlier score (20% weight)
211
+ # Perfect: 0% outliers
212
+ # Good: <5% outliers
213
+ # Acceptable: <10% outliers
214
+ outlier_score = max(0.0, 100 - indicators.outlier_percentage * 10)
215
+
216
+ # Phase detection score (10% weight)
217
+ phase_score = 100.0 if indicators.phase_detection_clear else 50.0
218
+
219
+ # Frame rate score (5% weight)
220
+ # Perfect: 60fps+
221
+ # Good: 30-60fps
222
+ # Poor: <30fps
223
+ if indicators.fps >= 60:
224
+ fps_score = 100.0
225
+ elif indicators.fps >= 30:
226
+ fps_score = 80.0
227
+ elif indicators.fps >= 24:
228
+ fps_score = 60.0
229
+ else:
230
+ fps_score = 40.0
231
+
232
+ # Weighted combination
233
+ quality_score = (
234
+ visibility_score * 0.40
235
+ + stability_score * 0.25
236
+ + outlier_score * 0.20
237
+ + phase_score * 0.10
238
+ + fps_score * 0.05
239
+ )
240
+
241
+ return float(np.clip(quality_score, 0, 100))
242
+
243
+
244
+ def _determine_confidence_level(
245
+ quality_score: float,
246
+ ) -> Literal["high", "medium", "low"]:
247
+ """
248
+ Determine confidence level from quality score.
249
+
250
+ Thresholds:
251
+ - High: quality_score >= 75
252
+ - Medium: quality_score >= 50
253
+ - Low: quality_score < 50
254
+
255
+ Args:
256
+ quality_score: Numerical quality score (0-100)
257
+
258
+ Returns:
259
+ Confidence level: "high", "medium", or "low"
260
+ """
261
+ if quality_score >= 75:
262
+ return "high"
263
+ elif quality_score >= 50:
264
+ return "medium"
265
+ else:
266
+ return "low"
267
+
268
+
269
+ def _generate_warnings(
270
+ indicators: QualityIndicators,
271
+ confidence: Literal["high", "medium", "low"],
272
+ ) -> list[str]:
273
+ """
274
+ Generate user-facing warning messages based on quality indicators.
275
+
276
+ Args:
277
+ indicators: Quality indicators object
278
+ confidence: Overall confidence level
279
+
280
+ Returns:
281
+ List of warning messages (empty if no warnings)
282
+ """
283
+ warnings: list[str] = []
284
+
285
+ # Visibility warnings
286
+ if indicators.avg_visibility < 0.7:
287
+ warnings.append(
288
+ f"Poor landmark visibility (avg {indicators.avg_visibility:.2f}). "
289
+ "Check lighting, camera angle, and ensure full body is visible."
290
+ )
291
+ elif indicators.avg_visibility < 0.8:
292
+ warnings.append(
293
+ f"Moderate landmark visibility (avg {indicators.avg_visibility:.2f}). "
294
+ "Results may be less accurate."
295
+ )
296
+
297
+ if indicators.min_visibility < 0.5:
298
+ warnings.append(
299
+ f"Very low visibility detected ({indicators.min_visibility:.2f}). "
300
+ "Some frames may have occlusion or tracking loss."
301
+ )
302
+
303
+ # Tracking stability warnings
304
+ if not indicators.tracking_stable:
305
+ warnings.append(
306
+ f"Unstable landmark tracking detected "
307
+ f"(variance {indicators.position_variance:.4f}). This may indicate "
308
+ "jitter or occlusion. Consider better lighting or camera position."
309
+ )
310
+
311
+ # Outlier warnings
312
+ if indicators.outlier_percentage > 10:
313
+ warnings.append(
314
+ f"High outlier rate ({indicators.outlier_percentage:.1f}%). "
315
+ f"{indicators.outliers_detected} frames required correction. "
316
+ "This may reduce measurement accuracy."
317
+ )
318
+ elif indicators.outlier_percentage > 5:
319
+ warnings.append(
320
+ f"Moderate outlier rate ({indicators.outlier_percentage:.1f}%). "
321
+ f"{indicators.outliers_detected} frames were corrected."
322
+ )
323
+
324
+ # Phase detection warnings
325
+ if not indicators.phase_detection_clear:
326
+ warnings.append(
327
+ "Unclear phase transitions detected. "
328
+ "Jump phases may not be accurately identified. "
329
+ "Check if full jump is captured in video."
330
+ )
331
+
332
+ # Frame rate warnings
333
+ if indicators.fps < 30:
334
+ warnings.append(
335
+ f"Low frame rate ({indicators.fps:.0f} fps). "
336
+ "Recommend recording at 30fps or higher for better accuracy. "
337
+ "Validated apps use 120-240fps."
338
+ )
339
+ elif indicators.fps < 60:
340
+ warnings.append(
341
+ f"Frame rate is {indicators.fps:.0f} fps. "
342
+ "Consider 60fps or higher for improved temporal resolution. "
343
+ "Validated apps (MyJump) use 120-240fps."
344
+ )
345
+
346
+ # Overall confidence warning
347
+ if confidence == "low":
348
+ warnings.append(
349
+ "⚠️ LOW CONFIDENCE: Results may be unreliable. Review quality "
350
+ "indicators and consider re-recording with better conditions."
351
+ )
352
+ elif confidence == "medium":
353
+ warnings.append(
354
+ "⚠️ MEDIUM CONFIDENCE: Results should be interpreted with caution. "
355
+ "Check quality indicators for specific issues."
356
+ )
357
+
358
+ return warnings
359
+
360
+
361
+ def assess_jump_quality(
362
+ visibilities: NDArray[np.float64],
363
+ positions: NDArray[np.float64],
364
+ outlier_mask: NDArray[np.bool_] | None,
365
+ fps: float,
366
+ phases_detected: bool = True,
367
+ phase_count: int = 0,
368
+ ) -> QualityAssessment:
369
+ """
370
+ Convenience function for assessing jump analysis quality.
371
+
372
+ This is the main entry point for quality assessment, called from
373
+ dropjump and CMJ analysis modules.
374
+
375
+ Args:
376
+ visibilities: Array of visibility scores (0-1)
377
+ positions: Array of tracked positions
378
+ outlier_mask: Boolean array marking outliers (None if none detected)
379
+ fps: Video frame rate
380
+ phases_detected: Whether phases were successfully detected
381
+ phase_count: Number of phases detected
382
+
383
+ Returns:
384
+ QualityAssessment with confidence, indicators, and warnings
385
+ """
386
+ return assess_tracking_quality(
387
+ visibilities=visibilities,
388
+ positions=positions,
389
+ outlier_mask=outlier_mask,
390
+ fps=fps,
391
+ phases_detected=phases_detected,
392
+ phase_count=phase_count,
393
+ )