kinemotion 0.4.0__tar.gz → 0.5.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. kinemotion-0.5.3/.github/workflows/release.yml +45 -0
  2. {kinemotion-0.4.0 → kinemotion-0.5.3}/.gitignore +1 -1
  3. kinemotion-0.5.3/.pre-commit-config.yaml +33 -0
  4. kinemotion-0.5.3/CHANGELOG.md +31 -0
  5. kinemotion-0.5.3/GEMINI.md +162 -0
  6. {kinemotion-0.4.0 → kinemotion-0.5.3}/PKG-INFO +8 -2
  7. {kinemotion-0.4.0 → kinemotion-0.5.3}/README.md +6 -0
  8. {kinemotion-0.4.0 → kinemotion-0.5.3}/docs/VALIDATION_PLAN.md +27 -24
  9. {kinemotion-0.4.0 → kinemotion-0.5.3}/pyproject.toml +22 -7
  10. {kinemotion-0.4.0 → kinemotion-0.5.3}/.tool-versions +0 -0
  11. {kinemotion-0.4.0 → kinemotion-0.5.3}/CLAUDE.md +0 -0
  12. {kinemotion-0.4.0 → kinemotion-0.5.3}/LICENSE +0 -0
  13. {kinemotion-0.4.0 → kinemotion-0.5.3}/docs/ERRORS_FINDINGS.md +0 -0
  14. {kinemotion-0.4.0 → kinemotion-0.5.3}/docs/FRAMERATE.md +0 -0
  15. {kinemotion-0.4.0 → kinemotion-0.5.3}/docs/IMPLEMENTATION_PLAN.md +0 -0
  16. {kinemotion-0.4.0 → kinemotion-0.5.3}/docs/IMU_METADATA_PRESERVATION.md +0 -0
  17. {kinemotion-0.4.0 → kinemotion-0.5.3}/docs/PARAMETERS.md +0 -0
  18. {kinemotion-0.4.0 → kinemotion-0.5.3}/examples/programmatic_usage.py +0 -0
  19. {kinemotion-0.4.0 → kinemotion-0.5.3}/src/kinemotion/__init__.py +0 -0
  20. {kinemotion-0.4.0 → kinemotion-0.5.3}/src/kinemotion/cli.py +0 -0
  21. {kinemotion-0.4.0 → kinemotion-0.5.3}/src/kinemotion/core/__init__.py +0 -0
  22. {kinemotion-0.4.0 → kinemotion-0.5.3}/src/kinemotion/core/filtering.py +0 -0
  23. {kinemotion-0.4.0 → kinemotion-0.5.3}/src/kinemotion/core/pose.py +0 -0
  24. {kinemotion-0.4.0 → kinemotion-0.5.3}/src/kinemotion/core/smoothing.py +0 -0
  25. {kinemotion-0.4.0 → kinemotion-0.5.3}/src/kinemotion/core/video_io.py +0 -0
  26. {kinemotion-0.4.0 → kinemotion-0.5.3}/src/kinemotion/dropjump/__init__.py +0 -0
  27. {kinemotion-0.4.0 → kinemotion-0.5.3}/src/kinemotion/dropjump/analysis.py +0 -0
  28. {kinemotion-0.4.0 → kinemotion-0.5.3}/src/kinemotion/dropjump/cli.py +0 -0
  29. {kinemotion-0.4.0 → kinemotion-0.5.3}/src/kinemotion/dropjump/debug_overlay.py +0 -0
  30. {kinemotion-0.4.0 → kinemotion-0.5.3}/src/kinemotion/dropjump/kinematics.py +0 -0
  31. {kinemotion-0.4.0 → kinemotion-0.5.3}/tests/__init__.py +0 -0
  32. {kinemotion-0.4.0 → kinemotion-0.5.3}/tests/test_adaptive_threshold.py +0 -0
  33. {kinemotion-0.4.0 → kinemotion-0.5.3}/tests/test_aspect_ratio.py +0 -0
  34. {kinemotion-0.4.0 → kinemotion-0.5.3}/tests/test_com_estimation.py +0 -0
  35. {kinemotion-0.4.0 → kinemotion-0.5.3}/tests/test_contact_detection.py +0 -0
  36. {kinemotion-0.4.0 → kinemotion-0.5.3}/tests/test_filtering.py +0 -0
  37. {kinemotion-0.4.0 → kinemotion-0.5.3}/tests/test_kinematics.py +0 -0
  38. {kinemotion-0.4.0 → kinemotion-0.5.3}/tests/test_polyorder.py +0 -0
@@ -0,0 +1,45 @@
1
+ name: Release
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+
8
+ jobs:
9
+ release:
10
+ name: Create Release
11
+ runs-on: ubuntu-latest
12
+ permissions:
13
+ contents: write
14
+ steps:
15
+ - name: Checkout repository
16
+ uses: actions/checkout@v4
17
+ with:
18
+ fetch-depth: 0
19
+
20
+ - name: Set up Python
21
+ uses: actions/setup-python@v5
22
+ with:
23
+ python-version: "3.12"
24
+
25
+ - name: Set up uv
26
+ uses: astral-sh/setup-uv@v5
27
+ with:
28
+ version: "0.8.17"
29
+
30
+ - name: Install dependencies
31
+ run: uv sync
32
+
33
+ - name: Create Release
34
+ env:
35
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
36
+ PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
37
+ run: |
38
+ git config user.name "github-actions"
39
+ git config user.email "github-actions@github.com"
40
+ uv run semantic-release version
41
+
42
+ - name: Publish to PyPI
43
+ uses: pypa/gh-action-pypi-publish@release/v1
44
+ with:
45
+ password: ${{ secrets.PYPI_API_TOKEN }}
@@ -61,4 +61,4 @@ Thumbs.db
61
61
  *.jpeg
62
62
  *.jpg
63
63
 
64
- .claude/settings.local.json*
64
+ .claude/settings.local.json*
@@ -0,0 +1,33 @@
1
+ # See https://pre-commit.com for more information
2
+ # See https://pre-commit.com/hooks.html for more hooks
3
+ repos:
4
+ - repo: https://github.com/pre-commit/pre-commit-hooks
5
+ rev: v6.0.0
6
+ hooks:
7
+ - id: trailing-whitespace
8
+ - id: end-of-file-fixer
9
+ - id: check-yaml
10
+ - id: check-added-large-files
11
+ args: ['--maxkb=1000']
12
+ - id: check-merge-conflict
13
+ - id: check-toml
14
+ - id: debug-statements
15
+ - id: mixed-line-ending
16
+
17
+ - repo: https://github.com/psf/black
18
+ rev: 23.12.1
19
+ hooks:
20
+ - id: black
21
+
22
+ - repo: https://github.com/astral-sh/ruff-pre-commit
23
+ rev: v0.1.9
24
+ hooks:
25
+ - id: ruff
26
+ args: [--fix, --exit-non-zero-on-fix]
27
+
28
+ - repo: https://github.com/pre-commit/mirrors-mypy
29
+ rev: v1.7.1
30
+ hooks:
31
+ - id: mypy
32
+ args: [--ignore-missing-imports, --no-strict-optional]
33
+ exclude: ^tests/
@@ -0,0 +1,31 @@
1
+ # Changelog
2
+
3
+ All notable changes to this project will be documented in this file.
4
+
5
+ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6
+ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
+
8
+ ## [Unreleased]
9
+
10
+ ### Added
11
+ - Your new feature here.
12
+
13
+ ### Changed
14
+ - Your change here.
15
+
16
+ ### Deprecated
17
+ - Your deprecated feature here.
18
+
19
+ ### Removed
20
+ - Your removed feature here.
21
+
22
+ ### Fixed
23
+ - Your bug fix here.
24
+
25
+ ### Security
26
+ - Your security fix here.
27
+
28
+ ## [0.5.0] - 2025-10-26
29
+
30
+ ### Added
31
+ - Initial release of `kinemotion`.
@@ -0,0 +1,162 @@
1
+ # GEMINI.md
2
+
3
+ This file provides guidance to the Gemini model when working with code in this repository.
4
+
5
+ ## Repository Purpose
6
+
7
+ Kinemotion is a video-based kinematic analysis tool for athletic performance. It analyzes drop-jump videos to estimate ground contact time, flight time, and jump height. The analysis is done by tracking an athlete's movement using MediaPipe pose tracking and applying advanced kinematic calculations. It supports both traditional foot-based tracking and a more accurate center of mass (CoM) tracking.
8
+
9
+ **IMPORTANT**: The tool's accuracy has not been validated against gold-standard measurements. Any accuracy claims are theoretical.
10
+
11
+ ## Project Setup
12
+
13
+ ### Dependencies
14
+
15
+ The project uses `uv` for dependency management and `asdf` for Python version management.
16
+
17
+ - **Python Version**: 3.12.7 (specified in `.tool-versions`). MediaPipe requires Python <= 3.12.
18
+ - **Install Dependencies**: `uv sync`
19
+
20
+ **Key Libraries:**
21
+
22
+ - **Production**: `click`, `opencv-python`, `mediapipe`, `numpy`, `scipy`.
23
+ - **Development**: `pytest`, `black`, `ruff`, `mypy`.
24
+
25
+ ### Development Commands
26
+
27
+ - **Run CLI**: `uv run kinemotion dropjump-analyze <video_path>`
28
+ - **Install/Sync Dependencies**: `uv sync`
29
+ - **Run Tests**: `uv run pytest`
30
+ - **Format Code**: `uv run black src/`
31
+ - **Lint Code**: `uv run ruff check`
32
+ - **Auto-fix Linting**: `uv run ruff check --fix`
33
+ - **Type Check**: `uv run mypy src/kinemotion`
34
+ - **Run All Checks**: `uv run ruff check && uv run mypy src/kinemotion && uv run pytest`
35
+
36
+ ## Architecture
37
+
38
+ ### Module Structure
39
+
40
+ ```text
41
+ src/kinemotion/
42
+ ├── cli.py # Main CLI entry point
43
+ ├── core/ # Shared functionality (pose, smoothing, filtering, video_io)
44
+ └── dropjump/ # Drop jump specific analysis (cli, analysis, kinematics, debug_overlay)
45
+ tests/ # Unit and integration tests
46
+ docs/ # Documentation (PARAMETERS.md is key)
47
+ ```
48
+
49
+ - `core/` contains reusable code for different jump types.
50
+ - `dropjump/` contains logic specific to drop jumps.
51
+ - The main `cli.py` registers subcommands from modules like `dropjump/cli.py`.
52
+
53
+ ### Analysis Pipeline
54
+
55
+ 1. **Pose Tracking** (`core/pose.py`): Extracts 13 body landmarks per frame using MediaPipe.
56
+ 2. **Center of Mass (CoM) Estimation** (`core/pose.py`): Optional, more accurate tracking using a biomechanical model.
57
+ 3. **Smoothing** (`core/smoothing.py`): A Savitzky-Golay filter reduces jitter.
58
+ 4. **Contact Detection** (`dropjump/analysis.py`): Analyzes vertical velocity to determine ground contact vs. flight.
59
+ 5. **Phase Identification**: Finds continuous ground contact and flight periods.
60
+ 6. **Sub-Frame Interpolation** (`dropjump/analysis.py`): Estimates exact transition times between frames using linear interpolation on the velocity curve, improving timing precision significantly.
61
+ 7. **Trajectory Curvature Analysis** (`dropjump/analysis.py`): Refines transition timing by detecting acceleration spikes (e.g., landing impact).
62
+ 8. **Metrics Calculation** (`dropjump/kinematics.py`): Calculates ground contact time, flight time, and jump height.
63
+ 9. **Output**: Provides metrics in JSON format and an optional debug video.
64
+
65
+ ## Critical Implementation Details
66
+
67
+ ### 1. Aspect Ratio Preservation & SAR Handling (`core/video_io.py`)
68
+
69
+ - **CRITICAL**: The tool must preserve the source video's exact aspect ratio, including Sample Aspect Ratio (SAR) from mobile videos.
70
+ - **DO**: Get frame dimensions from the first actual frame read from the video (`frame.shape[:2]`), not from `cv2.CAP_PROP_*` properties, which can be wrong for rotated videos.
71
+ - **DO**: Use `ffprobe` to extract SAR and calculate correct display dimensions.
72
+ - The `DebugOverlayRenderer` uses these display dimensions for the output video.
73
+
74
+ ### 2. Sub-Frame Interpolation (`dropjump/analysis.py`)
75
+
76
+ - **CRITICAL**: Timing precision is achieved by interpolating between frames.
77
+ - **Velocity Calculation**: Velocity is computed as the **first derivative of the smoothed position trajectory** using a Savitzky-Golay filter (`savgol_filter(..., deriv=1)`). This is much smoother and more accurate than simple frame-to-frame differences.
78
+ - **Interpolation**: When velocity crosses the contact threshold between two frames, linear interpolation is used to find the fractional frame index of the crossing. This improves timing accuracy from ~33ms to ~10ms at 30fps.
79
+
80
+ ### 3. Trajectory Curvature Analysis (`dropjump/analysis.py`)
81
+
82
+ - **CRITICAL**: Event timing is further refined using acceleration patterns.
83
+ - **Acceleration Calculation**: Acceleration is the **second derivative of the smoothed position** (`savgol_filter(..., deriv=2)`).
84
+ - **Event Detection**:
85
+ - **Landing**: A large acceleration spike (impact deceleration).
86
+ - **Takeoff**: A sharp change in acceleration.
87
+ - **Blending**: The final transition time is a weighted blend: 70% from the curvature-based estimate and 30% from the velocity-based estimate. This is enabled by default via `--use-curvature`.
88
+
89
+ ### 4. JSON Serialization of NumPy Types (`dropjump/kinematics.py`)
90
+
91
+ - **CRITICAL**: Standard `json.dump` cannot serialize NumPy integer types (e.g., `np.int64`).
92
+ - **DO**: Explicitly cast all NumPy numbers to standard Python types (`int()`, `float()`) within the `to_dict()` methods of data classes before serialization.
93
+
94
+ ### 5. OpenCV Frame Dimensions
95
+
96
+ - **CRITICAL**: Be aware of dimension ordering differences.
97
+ - **NumPy `frame.shape`**: `(height, width, channels)`
98
+ - **OpenCV `cv2.VideoWriter()` size**: `(width, height)`
99
+ - Always pass dimensions to OpenCV functions in `(width, height)` order.
100
+
101
+ ## Code Quality & Workflow
102
+
103
+ When contributing code, strictly adhere to the project's quality standards.
104
+
105
+ 1. **Format Code**: `uv run black src/`
106
+ 2. **Lint and Fix**: `uv run ruff check --fix`
107
+ 3. **Type Check**: `uv run mypy src/kinemotion`
108
+ 4. **Run Tests**: `uv run pytest`
109
+
110
+ **Run all checks before committing**: `uv run ruff check && uv run mypy src/kinemotion && uv run pytest`
111
+
112
+ - **Type Safety**: The project uses `mypy` in strict mode. All functions must have full type annotations.
113
+ - **Linting**: `ruff` is used for linting. Configuration is in `pyproject.toml`.
114
+ - **Formatting**: `black` is used for code formatting.
115
+
116
+ ## Common Development Tasks
117
+
118
+ - **Adding New Metrics**:
119
+ 1. Update `DropJumpMetrics` in `dropjump/kinematics.py`.
120
+ 2. Add calculation logic in `calculate_drop_jump_metrics()`.
121
+ 3. Update `to_dict()` method (remember to cast NumPy types).
122
+ 4. (Optional) Add visualization in `DebugOverlayRenderer`.
123
+ 5. Add tests in `tests/test_kinematics.py`.
124
+ - **Modifying Contact Detection**: Edit `detect_ground_contact()` in `dropjump/analysis.py`.
125
+ - **Adjusting Smoothing**: Modify `smooth_landmarks()` in `core/smoothing.py`.
126
+
127
+ ## Parameter Tuning
128
+
129
+ A comprehensive guide to all CLI parameters is in `docs/PARAMETERS.md`. Refer to it for detailed explanations.
130
+
131
+ **Key `dropjump-analyze` parameters:**
132
+
133
+ - `--smoothing-window`: Controls trajectory smoothness. Increase for noisy video.
134
+ - `--polyorder`: Polynomial order for smoothing. `2` is ideal for jump physics.
135
+ - `--velocity-threshold`: Contact sensitivity. Decrease to detect shorter contacts.
136
+ - `--min-contact-frames`: Temporal filter. Increase to remove false contacts.
137
+ - `--drop-height`: **Important for accuracy.** Calibrates jump height using a known box height in meters.
138
+ - `--use-curvature`: Enables acceleration-based timing refinement (default: True).
139
+ - `--outlier-rejection`: Removes tracking glitches before smoothing (default: True).
140
+ - `--bilateral-filter`: Experimental edge-preserving smoothing alternative to Savitzky-Golay.
141
+
142
+ ## Testing
143
+
144
+ - **Run all tests**: `uv run pytest`
145
+ - **Run a specific test file**: `uv run pytest tests/test_contact_detection.py -v`
146
+ - The project has comprehensive test coverage for core functionalities like aspect ratio, contact detection, CoM estimation, and kinematics.
147
+
148
+ ## CLI Usage Examples
149
+
150
+ ```bash
151
+ # Get help for the dropjump command
152
+ uv run kinemotion dropjump-analyze --help
153
+
154
+ # Basic analysis, print JSON to stdout
155
+ uv run kinemotion dropjump-analyze video.mp4
156
+
157
+ # Full analysis: generate debug video, save metrics, and use calibration
158
+ uv run kinemotion dropjump-analyze video.mp4 \
159
+ --output debug_video.mp4 \
160
+ --json-output metrics.json \
161
+ --drop-height 0.40
162
+ ```
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: kinemotion
3
- Version: 0.4.0
3
+ Version: 0.5.3
4
4
  Summary: Video-based kinematic analysis for athletic performance
5
5
  Project-URL: Homepage, https://github.com/feniix/kinemotion
6
6
  Project-URL: Repository, https://github.com/feniix/kinemotion
@@ -9,7 +9,7 @@ Author-email: Sebastian Otaegui <feniix@gmail.com>
9
9
  License: MIT
10
10
  License-File: LICENSE
11
11
  Keywords: athletic-performance,drop-jump,kinemetry,kinemotion,mediapipe,pose-tracking,video-analysis
12
- Classifier: Development Status :: 4 - Beta
12
+ Classifier: Development Status :: 3 - Alpha
13
13
  Classifier: Intended Audience :: Science/Research
14
14
  Classifier: License :: OSI Approved :: MIT License
15
15
  Classifier: Programming Language :: Python :: 3
@@ -28,6 +28,12 @@ Description-Content-Type: text/markdown
28
28
 
29
29
  # Kinemotion
30
30
 
31
+ [![PyPI version](https://badge.fury.io/py/kinemotion.svg)](https://badge.fury.io/py/kinemotion)
32
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
33
+ [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
34
+ [![Checked with mypy](http://www.mypy-lang.org/static/mypy_badge.svg)](http://mypy-lang.org/)
35
+ [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
36
+
31
37
  A video-based kinematic analysis tool for athletic performance. Analyzes side-view drop-jump videos to estimate key performance metrics: ground contact time, flight time, and jump height. Uses MediaPipe pose tracking and advanced kinematics.
32
38
 
33
39
  ## Features
@@ -1,5 +1,11 @@
1
1
  # Kinemotion
2
2
 
3
+ [![PyPI version](https://badge.fury.io/py/kinemotion.svg)](https://badge.fury.io/py/kinemotion)
4
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
5
+ [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
6
+ [![Checked with mypy](http://www.mypy-lang.org/static/mypy_badge.svg)](http://mypy-lang.org/)
7
+ [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
8
+
3
9
  A video-based kinematic analysis tool for athletic performance. Analyzes side-view drop-jump videos to estimate key performance metrics: ground contact time, flight time, and jump height. Uses MediaPipe pose tracking and advanced kinematics.
4
10
 
5
11
  ## Features
@@ -21,7 +21,7 @@ For a hobby project, validation means:
21
21
  - ✅ **Reasonable accuracy**: Measurements are "in the ballpark" for practical use
22
22
  - ✅ **Consistency**: Repeated measurements give similar results
23
23
  - ✅ **Sanity checks**: Results make physical sense (jump height from flight time checks out)
24
- - ✅ **Comparative accuracy**: Similar to other free/affordable tools (MyJump2, jump mats)
24
+ - ✅ **Comparative accuracy**: Similar to other free/affordable tools (My Jump Lab, jump mats)
25
25
  - ❌ **NOT research-grade**: Not validated against force plates or motion capture systems
26
26
 
27
27
  ### Realistic Goals
@@ -72,15 +72,17 @@ For a hobby project, validation means:
72
72
 
73
73
  These methods cost nothing and can be done immediately by any user.
74
74
 
75
- ### 3.1 Compare Against MyJump2 App
75
+ ### 3.1 Compare Against My Jump Lab App
76
76
 
77
- **What it is**: MyJump2 is a popular iPhone/Android app ($10-15) that calculates jump height from slow-motion video. It's been validated in multiple research studies (ICC > 0.95 vs. force plates).
77
+ **What it is**: My Jump Lab (also known as My Jump 3) is a popular iPhone/Android app that calculates jump height from slow-motion video. It's been validated in multiple research studies (ICC > 0.95 vs. force plates).
78
+
79
+ **Download**: [iOS App Store](https://apps.apple.com/us/app/my-jump-lab-my-jump-3/id1554077178) | [Google Play](https://play.google.com/store/apps/details?id=com.my.jump.lab&hl=en)
78
80
 
79
81
  **How to do it**:
80
82
 
81
- 1. Download MyJump2 app (~$10, or use free trial if available)
83
+ 1. Download My Jump Lab app from the links above
82
84
  2. Record 10-20 jumps (countermovement or drop jumps)
83
- 3. Process same video with both MyJump2 and Kinemotion
85
+ 3. Process same video with both My Jump Lab and Kinemotion
84
86
  4. Compare jump heights, contact times, flight times
85
87
  5. Calculate: mean difference, correlation, percentage error
86
88
 
@@ -92,7 +94,7 @@ These methods cost nothing and can be done immediately by any user.
92
94
 
93
95
  **Time required**: 2-3 hours
94
96
 
95
- **Validation value**: ⭐⭐⭐⭐⭐ (High - MyJump2 is well-validated)
97
+ **Validation value**: ⭐⭐⭐⭐⭐ (High - My Jump Lab is well-validated)
96
98
 
97
99
  ### 3.2 Tracker Video Analysis Tool
98
100
 
@@ -137,10 +139,10 @@ These methods cost nothing and can be done immediately by any user.
137
139
 
138
140
  **Validation value**: ⭐⭐⭐⭐⭐ (Excellent - rigorous position-based validation, free)
139
141
 
140
- **Advantages over MyJump2**:
142
+ **Advantages over My Jump Lab**:
141
143
 
142
144
  - Tracks full trajectory (not just flight time)
143
- - Open-source and free (MyJump2 costs $10-15)
145
+ - Open-source and free (My Jump Lab costs $10-15)
144
146
  - More detailed analysis capabilities
145
147
  - Established in academic settings
146
148
 
@@ -328,7 +330,7 @@ print(f"Mean absolute error: {mae:.2f} cm")
328
330
  #### Day 1-2: Setup
329
331
 
330
332
  - [ ] Download Tracker (<https://physlets.org/tracker/>) - free and open-source
331
- - [ ] Alternative: Install MyJump2 app if iOS device available
333
+ - [ ] Alternative: Install My Jump Lab app if iOS device available
332
334
  - [ ] Set up recording environment (good lighting, clear background)
333
335
  - [ ] Test camera angles and distances
334
336
 
@@ -344,14 +346,14 @@ print(f"Mean absolute error: {mae:.2f} cm")
344
346
 
345
347
  - [ ] Process all videos with Kinemotion
346
348
  - [ ] Analyze same videos with Tracker (track ankle/heel position)
347
- - [ ] Alternative: Use MyJump2 if available
349
+ - [ ] Alternative: Use My Jump Lab if available
348
350
  - [ ] Export Tracker position data and calculate jump height
349
351
  - [ ] Calculate correlation, mean difference, MAE
350
352
  - [ ] Document results in validation notes
351
353
 
352
354
  #### Phase 1 Success Criteria
353
355
 
354
- - Correlation r > 0.85 with Tracker/MyJump2
356
+ - Correlation r > 0.85 with Tracker/My Jump Lab
355
357
  - Mean difference < 5cm for jump height
356
358
  - Contact/flight times within ±30ms
357
359
  - Event detection within ±2-3 frames
@@ -404,19 +406,20 @@ Create a validation issue on GitHub:
404
406
  Help validate Kinemotion by comparing it with other tools!
405
407
 
406
408
  **What to do**:
407
- 1. Record jumps with Kinemotion
408
- 2. Compare with MyJump2, jump mat, or manual analysis
409
- 3. Share your results here
409
+ 1. Record jump videos (with phone/camera at 60fps+)
410
+ 2. Analyze videos with Kinemotion
411
+ 3. Compare with My Jump Lab, jump mat, or manual analysis
412
+ 4. Share your results here
410
413
 
411
414
  **Data to share**:
412
415
  - Number of jumps analyzed
413
- - Reference tool used (MyJump2, jump mat, manual)
416
+ - Reference tool used (My Jump Lab, jump mat, manual)
414
417
  - Correlation (if calculated)
415
418
  - Mean difference
416
419
  - Your assessment (good agreement? systematic bias?)
417
420
 
418
421
  **Example**:
419
- - 15 jumps compared with MyJump2
422
+ - 15 jumps compared with My Jump Lab
420
423
  - Correlation: r = 0.91
421
424
  - Mean difference: -2.3cm (Kinemotion slightly lower)
422
425
  - Assessment: Good agreement for practical use
@@ -461,7 +464,7 @@ Help validate Kinemotion by comparing it with other tools!
461
464
 
462
465
  ### Realistic Timeline (Solo Developer)
463
466
 
464
- **Week 1**: MyJump2 comparison (2-3 hours total)
467
+ **Week 1**: My Jump Lab comparison (2-3 hours total)
465
468
  **Week 2**: Manual video analysis (3-4 hours total)
466
469
  **Week 3**: Physics checks and repeatability (2-3 hours total)
467
470
  **Week 4**: Documentation and results write-up (2-3 hours total)
@@ -474,13 +477,13 @@ Help validate Kinemotion by comparing it with other tools!
474
477
 
475
478
  **Minimum (Free)**:
476
479
 
477
- - MyJump2 app (or use free trial): $0-15
480
+ - My Jump Lab app (or use free trial): $0-15
478
481
  - Time: 10-15 hours
479
482
  - **Total: $0-15**
480
483
 
481
484
  **Recommended (Low-Cost)**:
482
485
 
483
- - MyJump2 app: $10-15
486
+ - My Jump Lab app: $10-15
484
487
  - Basic jump mat: $200-400 (optional)
485
488
  - Time: 15-20 hours
486
489
  - **Total: $10-415**
@@ -534,7 +537,7 @@ Sometimes opportunities arise unexpectedly:
534
537
  ```markdown
535
538
  ## Validation Status
536
539
 
537
- Kinemotion has been validated through comparison with MyJump2 app and manual video analysis:
540
+ Kinemotion has been validated through comparison with My Jump Lab app and manual video analysis:
538
541
 
539
542
  - **Jump height**: Correlation r = 0.88, MAE = 4.2cm (n=25 jumps, 30fps video)
540
543
  - **Flight time**: Correlation r = 0.91, MAE = 24ms (n=25 jumps, 30fps video)
@@ -575,7 +578,7 @@ Template:
575
578
  ## Methods
576
579
 
577
580
  ### Reference Tools
578
- - [List tools used: MyJump2, manual analysis, etc.]
581
+ - [List tools used: My Jump Lab, manual analysis, etc.]
579
582
 
580
583
  ### Testing Protocol
581
584
  - [Number of jumps, video settings, conditions]
@@ -654,7 +657,7 @@ Template:
654
657
 
655
658
  You don't need perfect validation on day one. Start simple:
656
659
 
657
- 1. **Phase 1**: Compare with MyJump2 (1 week, free)
660
+ 1. **Phase 1**: Compare with My Jump Lab (1 week, free)
658
661
  2. **Phase 2**: Manual verification (1 week, free)
659
662
  3. **Phase 3**: Document results (1 week)
660
663
  4. **Phase 4**: Community validation (ongoing)
@@ -682,7 +685,7 @@ Provide users with **honest, evidence-based information** about tool accuracy so
682
685
  |--------|---------------|-------------------|
683
686
  | **Budget** | $0-500 | $15,000-30,000 |
684
687
  | **Time** | 1-3 months | 6-12 months |
685
- | **Reference** | MyJump2, jump mat | Force plates, motion capture |
688
+ | **Reference** | My Jump Lab, jump mat | Force plates, motion capture |
686
689
  | **Participants** | Self + volunteers | 30-50 recruited participants |
687
690
  | **Statistics** | Correlation, MAE | ICC, Bland-Altman, LOA |
688
691
  | **Ethics** | None required | IRB approval needed |
@@ -703,4 +706,4 @@ Provide users with **honest, evidence-based information** about tool accuracy so
703
706
  - Added Section 3.2: Tracker video analysis tool (free, open-source, rigorous)
704
707
  - Clarified that validation assumes 60fps baseline for accuracy targets
705
708
 
706
- **Next Steps**: Start with Phase 1 (Tracker or MyJump2 comparison at 60fps+)
709
+ **Next Steps**: Start with Phase 1 (Tracker or My Jump Lab comparison at 60fps+)
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "kinemotion"
3
- version = "0.4.0"
3
+ version = "0.5.3"
4
4
  description = "Video-based kinematic analysis for athletic performance"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.10,<3.13"
@@ -10,7 +10,7 @@ authors = [
10
10
  ]
11
11
  keywords = ["kinemotion", "kinemetry", "video-analysis", "drop-jump", "athletic-performance", "pose-tracking", "mediapipe"]
12
12
  classifiers = [
13
- "Development Status :: 4 - Beta",
13
+ "Development Status :: 3 - Alpha",
14
14
  "Intended Audience :: Science/Research",
15
15
  "License :: OSI Approved :: MIT License",
16
16
  "Programming Language :: Python :: 3",
@@ -49,13 +49,10 @@ dev-dependencies = [
49
49
  "black>=23.12.0",
50
50
  "ruff>=0.1.8",
51
51
  "mypy>=1.7.0",
52
+ "pre-commit>=3.6.0",
53
+ "python-semantic-release>=9.8.2",
52
54
  ]
53
55
 
54
- [[tool.uv.index]]
55
- name = "testpypi"
56
- url = "https://test.pypi.org/simple/"
57
- publish-url = "https://test.pypi.org/legacy/"
58
- explicit = true
59
56
 
60
57
  [tool.ruff]
61
58
  line-length = 100
@@ -94,3 +91,21 @@ module = [
94
91
  "scipy.*",
95
92
  ]
96
93
  ignore_missing_imports = true
94
+
95
+ [tool.semantic_release]
96
+ version_toml = ["pyproject.toml:project.version"]
97
+ branch = "main"
98
+ changelog_file = "CHANGELOG.md"
99
+ build_command = "uv build"
100
+ dist_path = "dist/"
101
+ upload_to_vcs_release = true
102
+ remove_dist = false
103
+ patch_without_tag = false
104
+ major_on_zero = false
105
+ allow_zero_version = true
106
+ commit_message = "chore(release): {version} [skip ci]"
107
+
108
+ [tool.semantic_release.commit_parser_options]
109
+ allowed_tags = ["feat", "fix", "docs", "style", "refactor", "perf", "test", "chore", "build", "ci"]
110
+ minor_tags = ["feat"]
111
+ patch_tags = ["fix", "perf"]
File without changes
File without changes
File without changes
File without changes
File without changes