sports2d 0.6.3__tar.gz → 0.7.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sports2d-0.6.3 → sports2d-0.7.2}/PKG-INFO +47 -38
- {sports2d-0.6.3 → sports2d-0.7.2}/README.md +45 -37
- {sports2d-0.6.3 → sports2d-0.7.2}/Sports2D/Demo/Config_demo.toml +23 -17
- {sports2d-0.6.3 → sports2d-0.7.2}/Sports2D/Sports2D.py +9 -13
- {sports2d-0.6.3 → sports2d-0.7.2}/Sports2D/Utilities/common.py +29 -14
- {sports2d-0.6.3 → sports2d-0.7.2}/Sports2D/Utilities/skeletons.py +0 -1
- {sports2d-0.6.3 → sports2d-0.7.2}/Sports2D/Utilities/tests.py +15 -6
- {sports2d-0.6.3 → sports2d-0.7.2}/Sports2D/process.py +148 -59
- {sports2d-0.6.3 → sports2d-0.7.2}/setup.cfg +2 -1
- {sports2d-0.6.3 → sports2d-0.7.2}/sports2d.egg-info/PKG-INFO +47 -38
- {sports2d-0.6.3 → sports2d-0.7.2}/sports2d.egg-info/requires.txt +1 -0
- {sports2d-0.6.3 → sports2d-0.7.2}/LICENSE +0 -0
- {sports2d-0.6.3 → sports2d-0.7.2}/Sports2D/Demo/demo.mp4 +0 -0
- {sports2d-0.6.3 → sports2d-0.7.2}/Sports2D/Utilities/__init__.py +0 -0
- {sports2d-0.6.3 → sports2d-0.7.2}/Sports2D/Utilities/filter.py +0 -0
- {sports2d-0.6.3 → sports2d-0.7.2}/Sports2D/__init__.py +0 -0
- {sports2d-0.6.3 → sports2d-0.7.2}/pyproject.toml +0 -0
- {sports2d-0.6.3 → sports2d-0.7.2}/setup.py +0 -0
- {sports2d-0.6.3 → sports2d-0.7.2}/sports2d.egg-info/SOURCES.txt +0 -0
- {sports2d-0.6.3 → sports2d-0.7.2}/sports2d.egg-info/dependency_links.txt +0 -0
- {sports2d-0.6.3 → sports2d-0.7.2}/sports2d.egg-info/entry_points.txt +0 -0
- {sports2d-0.6.3 → sports2d-0.7.2}/sports2d.egg-info/not-zip-safe +0 -0
- {sports2d-0.6.3 → sports2d-0.7.2}/sports2d.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: sports2d
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.7.2
|
|
4
4
|
Summary: Detect pose and compute 2D joint angles from a video.
|
|
5
5
|
Home-page: https://github.com/davidpagnon/Sports2D
|
|
6
6
|
Author: David Pagnon
|
|
@@ -39,6 +39,7 @@ Requires-Dist: openvino
|
|
|
39
39
|
Requires-Dist: tqdm
|
|
40
40
|
Requires-Dist: imageio_ffmpeg
|
|
41
41
|
Requires-Dist: deep-sort-realtime
|
|
42
|
+
Requires-Dist: Pose2Sim
|
|
42
43
|
|
|
43
44
|
|
|
44
45
|
[](https://github.com/davidpagnon/sports2d/actions/workflows/continuous-integration.yml)
|
|
@@ -64,14 +65,13 @@ Requires-Dist: deep-sort-realtime
|
|
|
64
65
|
|
|
65
66
|
> **`Announcement:`\
|
|
66
67
|
> Complete rewriting of the code!** Run `pip install sports2d -U` to get the latest version.
|
|
68
|
+
> - MarkerAugmentation and Inverse Kinematics for accurate 3D motion with OpenSim. **New in v0.7!**
|
|
69
|
+
> - Any detector and pose estimation model can be used. **New in v0.6!**
|
|
70
|
+
> - Results in meters rather than pixels. **New in v0.5!**
|
|
67
71
|
> - Faster, more accurate
|
|
68
72
|
> - Works from a webcam
|
|
69
|
-
> - Results in meters rather than pixels. **New in v0.5!**
|
|
70
73
|
> - Better visualization output
|
|
71
74
|
> - More flexible, easier to run
|
|
72
|
-
> - Batch process multiple videos at once
|
|
73
|
-
>
|
|
74
|
-
> Note: Colab version broken for now. I'll fix it in the next few weeks.
|
|
75
75
|
|
|
76
76
|
***N.B.:*** As always, I am more than happy to welcome contributions (see [How to contribute](#how-to-contribute-and-to-do-list))!
|
|
77
77
|
<!--User-friendly Colab version released! (and latest issues fixed, too)\
|
|
@@ -220,10 +220,7 @@ The Demo video is voluntarily challenging to demonstrate the robustness of the p
|
|
|
220
220
|
|
|
221
221
|
The OpenSim skeleton is not rigged yet. **[Feel free to contribute!](https://github.com/perfanalytics/pose2sim/issues/40)**
|
|
222
222
|
|
|
223
|
-
|
|
224
|
-
-->
|
|
225
|
-
|
|
226
|
-
|
|
223
|
+
<img src="Content/sports2d_blender.gif" width="760">
|
|
227
224
|
|
|
228
225
|
<br>
|
|
229
226
|
|
|
@@ -238,10 +235,9 @@ The Demo video is voluntarily challenging to demonstrate the robustness of the p
|
|
|
238
235
|
- **File -> Open Model:** Open your scaled model (e.g., `Model_Pose2Sim_LSTM.osim`).
|
|
239
236
|
- **File -> Load Motion:** Open your motion file (e.g., `angles.mot`).
|
|
240
237
|
|
|
241
|
-
<
|
|
238
|
+
<img src="Content/sports2d_opensim.gif" width="760">
|
|
242
239
|
|
|
243
|
-
|
|
244
|
-
-->
|
|
240
|
+
<br>
|
|
245
241
|
|
|
246
242
|
|
|
247
243
|
|
|
@@ -273,21 +269,26 @@ sports2d --time_range 1.2 2.7
|
|
|
273
269
|
|
|
274
270
|
|
|
275
271
|
#### Get coordinates in meters:
|
|
272
|
+
> **N.B.:** Depth is estimated from a neutral pose.
|
|
276
273
|
|
|
277
274
|
<!-- You either need to provide a calibration file, or simply the height of a person (Note that the latter will not take distortions into account, and that it will be less accurate for motion in the frontal plane).\-->
|
|
278
275
|
You may need to convert pixel coordinates to meters.\
|
|
279
|
-
Just provide the height of the reference person (and their ID in case of multiple person detection)
|
|
280
|
-
The floor angle and the origin of the xy axis are computed automatically from gait. If you analyze another type of motion, you can manually specify them.\
|
|
281
|
-
Note that it does not take distortions into account, and that it will be less accurate for motions in the frontal plane.
|
|
276
|
+
Just provide the height of the reference person (and their ID in case of multiple person detection).
|
|
282
277
|
|
|
283
|
-
|
|
278
|
+
You can also specify whether the visible side of the person is left, right, front, or back. Set it to 'auto' if you do not want to find it automatically (only works for motion in the sagittal plane), or to 'none' if you want to keep 2D instead of 3D coordinates (if the person goes right, and then left for example).
|
|
279
|
+
|
|
280
|
+
The floor angle and the origin of the xy axis are computed automatically from gait. If you analyze another type of motion, you can manually specify them. Note that `y` points down.\
|
|
281
|
+
Also note that distortions are not taken into account, and that results will be less accurate for motions in the frontal plane.
|
|
282
|
+
|
|
283
|
+
<!-- ``` cmd
|
|
284
284
|
sports2d --to_meters True --calib_file calib_demo.toml
|
|
285
|
-
```
|
|
285
|
+
``` -->
|
|
286
286
|
``` cmd
|
|
287
287
|
sports2d --to_meters True --px_to_m_person_height 1.65 --px_to_m_from_person_id 2
|
|
288
288
|
```
|
|
289
289
|
``` cmd
|
|
290
|
-
sports2d --to_meters True --px_to_m_person_height 1.65 --px_to_m_from_person_id 2
|
|
290
|
+
sports2d --to_meters True --px_to_m_person_height 1.65 --px_to_m_from_person_id 2 `
|
|
291
|
+
--visible_side front none auto --floor_angle 0 --xy_origin 0 940
|
|
291
292
|
```
|
|
292
293
|
|
|
293
294
|
<br>
|
|
@@ -296,24 +297,28 @@ sports2d --to_meters True --px_to_m_person_height 1.65 --px_to_m_from_person_id
|
|
|
296
297
|
#### Run inverse kinematics:
|
|
297
298
|
> N.B.: [Full install](#full-install) required.
|
|
298
299
|
|
|
299
|
-
> N.B
|
|
300
|
-
|
|
301
|
-
Analyzed persons can be showing their left, right, front, or back side. If you want to ignore a certain person, set `--visible_side none`.
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
Why IK?
|
|
307
|
-
Add section in how it works
|
|
300
|
+
> **N.B.:** The person needs to be moving on a single plane for the whole selected time range.
|
|
308
301
|
|
|
302
|
+
OpenSim inverse kinematics allows you to set joint constraints, joint angle limits, to constrain the bones to keep the same length all along the motion and potentially to have equal sizes on left and right side. Most generally, it gives more biomechanically accurate results. It can also give you the opportunity to compute joint torques, muscle forces, ground reaction forces, and more, [with MoCo](https://opensim-org.github.io/opensim-moco-site/) for example.
|
|
309
303
|
|
|
304
|
+
This is done via [Pose2Sim](https://github.com/perfanalytics/pose2sim).\
|
|
305
|
+
Model scaling is done according to the mean of the segment lengths, across a subset of frames. We remove the 10% fastest frames (potential outliers), the frames where the speed is 0 (person probably out of frame), the frames where the average knee and hip flexion angles are above 45° (pose estimation is not precise when the person is crouching) and the 20% most extreme segment values after the previous operations (potential outliers). All these parameters can be edited in your Config.toml file.
|
|
310
306
|
|
|
311
307
|
```cmd
|
|
312
|
-
sports2d --time_range 1.2 2.7
|
|
308
|
+
sports2d --time_range 1.2 2.7 `
|
|
309
|
+
--do_ik true `
|
|
310
|
+
--px_to_m_from_person_id 1 --px_to_m_person_height 1.65 `
|
|
311
|
+
--visible_side front auto
|
|
313
312
|
```
|
|
314
313
|
|
|
314
|
+
You can optionally use the LSTM marker augmentation to improve the quality of the output motion.\
|
|
315
|
+
You can also optionally give the participants proper masses. Mass has no influence on motion, only on forces (if you decide to further pursue kinetics analysis).
|
|
316
|
+
|
|
315
317
|
```cmd
|
|
316
|
-
sports2d --time_range 1.2 2.7
|
|
318
|
+
sports2d --time_range 1.2 2.7 `
|
|
319
|
+
--do_ik true --use_augmentation True `
|
|
320
|
+
--px_to_m_from_person_id 1 --px_to_m_person_height 1.65 `
|
|
321
|
+
--visible_side front left --participant_mass 67.0 55.0
|
|
317
322
|
```
|
|
318
323
|
|
|
319
324
|
<br>
|
|
@@ -379,7 +384,9 @@ sports2d --video_input demo.mp4 other_video.mp4 --time_range 1.2 2.7 0 3.5
|
|
|
379
384
|
```
|
|
380
385
|
- Choose whether you want video, images, trc pose file, angle mot file, real-time display, and plots:
|
|
381
386
|
```cmd
|
|
382
|
-
sports2d --save_vid false --save_img true
|
|
387
|
+
sports2d --save_vid false --save_img true `
|
|
388
|
+
--save_pose false --save_angles true `
|
|
389
|
+
--show_realtime_results false --show_graphs false
|
|
383
390
|
```
|
|
384
391
|
- Save results to a custom directory, specify the slow-motion factor:
|
|
385
392
|
``` cmd
|
|
@@ -396,12 +403,12 @@ sports2d --video_input demo.mp4 other_video.mp4 --time_range 1.2 2.7 0 3.5
|
|
|
396
403
|
```
|
|
397
404
|
- Use any custom (deployed) MMPose model
|
|
398
405
|
``` cmd
|
|
399
|
-
sports2d --pose_model BodyWithFeet :
|
|
400
|
-
--mode """{'det_class':'YOLOX',
|
|
401
|
-
'det_model':'https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/onnx_sdk/yolox_m_8xb8-300e_humanart-c2c7a14a.zip',
|
|
402
|
-
'det_input_size':[640, 640],
|
|
403
|
-
'pose_class':'RTMPose',
|
|
404
|
-
'pose_model':'https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/onnx_sdk/rtmpose-m_simcc-body7_pt-body7-halpe26_700e-256x192-4d3e73dd_20230605.zip',
|
|
406
|
+
sports2d --pose_model BodyWithFeet : `
|
|
407
|
+
--mode """{'det_class':'YOLOX', `
|
|
408
|
+
'det_model':'https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/onnx_sdk/yolox_m_8xb8-300e_humanart-c2c7a14a.zip', `
|
|
409
|
+
'det_input_size':[640, 640], `
|
|
410
|
+
'pose_class':'RTMPose', `
|
|
411
|
+
'pose_model':'https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/onnx_sdk/rtmpose-m_simcc-body7_pt-body7-halpe26_700e-256x192-4d3e73dd_20230605.zip', `
|
|
405
412
|
'pose_input_size':[192,256]}"""
|
|
406
413
|
```
|
|
407
414
|
|
|
@@ -418,9 +425,10 @@ sports2d --help
|
|
|
418
425
|
|
|
419
426
|
```
|
|
420
427
|
'config': ["C", "path to a toml configuration file"],
|
|
428
|
+
|
|
421
429
|
'video_input': ["i", "webcam, or video_path.mp4, or video1_path.avi video2_path.mp4 ... Beware that images won't be saved if paths contain non ASCII characters"],
|
|
422
430
|
'px_to_m_person_height': ["H", "height of the person in meters. 1.70 if not specified"],
|
|
423
|
-
'visible_side': ["", "front, back, left, right, auto, or none. 'front auto' if not specified. If 'auto', will be either left or right depending on the direction of the motion. If 'none', no IK for this person"],
|
|
431
|
+
'visible_side': ["", "front, back, left, right, auto, or none. 'front none auto' if not specified. If 'auto', will be either left or right depending on the direction of the motion. If 'none', no IK for this person"],
|
|
424
432
|
'load_trc_px': ["", "load trc file to avaid running pose estimation again. false if not specified"],
|
|
425
433
|
'compare': ["", "visually compare motion with trc file. false if not specified"],
|
|
426
434
|
'webcam_id': ["w", "webcam ID. 0 if not specified"],
|
|
@@ -454,6 +462,7 @@ sports2d --help
|
|
|
454
462
|
'do_ik': ["", "do inverse kinematics. false if not specified"],
|
|
455
463
|
'use_augmentation': ["", "Use LSTM marker augmentation. false if not specified"],
|
|
456
464
|
'use_contacts_muscles': ["", "Use model with contact spheres and muscles. false if not specified"],
|
|
465
|
+
'participant_mass': ["", "mass of the participant in kg or none. Defaults to 70 if not provided. No influence on kinematics (motion), only on kinetics (forces)"],
|
|
457
466
|
'close_to_zero_speed_m': ["","Sum for all keypoints: about 50 px/frame or 0.2 m/frame"],
|
|
458
467
|
'multiperson': ["", "multiperson involves tracking: will be faster if set to false. true if not specified"],
|
|
459
468
|
'tracking_mode': ["", "sports2d or rtmlib. sports2d is generally much more accurate and comparable in speed. sports2d if not specified"],
|
|
@@ -586,7 +595,7 @@ Sports2D:
|
|
|
586
595
|
|
|
587
596
|
4. **Chooses the right persons to keep.** In single-person mode, only keeps the person with the highest average scores over the sequence. In multi-person mode, only retrieves the keypoints with high enough confidence, and only keeps the persons with high enough average confidence over each frame.
|
|
588
597
|
|
|
589
|
-
4. **Converts the pixel coordinates to meters.** The user can provide a calibration file, or simply the size of a specified person. The floor angle and the coordinate origin can either be detected automatically from the gait sequence, or be manually specified.
|
|
598
|
+
4. **Converts the pixel coordinates to meters.** The user can provide a calibration file, or simply the size of a specified person. The floor angle and the coordinate origin can either be detected automatically from the gait sequence, or be manually specified. The depth coordinates are set to normative values, depending on whether the person is going left, right, facing the camera, or looking away.
|
|
590
599
|
|
|
591
600
|
5. **Computes the selected joint and segment angles**, and flips them on the left/right side if the respective foot is pointing to the left/right.
|
|
592
601
|
|
|
@@ -22,14 +22,13 @@
|
|
|
22
22
|
|
|
23
23
|
> **`Announcement:`\
|
|
24
24
|
> Complete rewriting of the code!** Run `pip install sports2d -U` to get the latest version.
|
|
25
|
+
> - MarkerAugmentation and Inverse Kinematics for accurate 3D motion with OpenSim. **New in v0.7!**
|
|
26
|
+
> - Any detector and pose estimation model can be used. **New in v0.6!**
|
|
27
|
+
> - Results in meters rather than pixels. **New in v0.5!**
|
|
25
28
|
> - Faster, more accurate
|
|
26
29
|
> - Works from a webcam
|
|
27
|
-
> - Results in meters rather than pixels. **New in v0.5!**
|
|
28
30
|
> - Better visualization output
|
|
29
31
|
> - More flexible, easier to run
|
|
30
|
-
> - Batch process multiple videos at once
|
|
31
|
-
>
|
|
32
|
-
> Note: Colab version broken for now. I'll fix it in the next few weeks.
|
|
33
32
|
|
|
34
33
|
***N.B.:*** As always, I am more than happy to welcome contributions (see [How to contribute](#how-to-contribute-and-to-do-list))!
|
|
35
34
|
<!--User-friendly Colab version released! (and latest issues fixed, too)\
|
|
@@ -178,10 +177,7 @@ The Demo video is voluntarily challenging to demonstrate the robustness of the p
|
|
|
178
177
|
|
|
179
178
|
The OpenSim skeleton is not rigged yet. **[Feel free to contribute!](https://github.com/perfanalytics/pose2sim/issues/40)**
|
|
180
179
|
|
|
181
|
-
|
|
182
|
-
-->
|
|
183
|
-
|
|
184
|
-
|
|
180
|
+
<img src="Content/sports2d_blender.gif" width="760">
|
|
185
181
|
|
|
186
182
|
<br>
|
|
187
183
|
|
|
@@ -196,10 +192,9 @@ The Demo video is voluntarily challenging to demonstrate the robustness of the p
|
|
|
196
192
|
- **File -> Open Model:** Open your scaled model (e.g., `Model_Pose2Sim_LSTM.osim`).
|
|
197
193
|
- **File -> Load Motion:** Open your motion file (e.g., `angles.mot`).
|
|
198
194
|
|
|
199
|
-
<
|
|
195
|
+
<img src="Content/sports2d_opensim.gif" width="760">
|
|
200
196
|
|
|
201
|
-
|
|
202
|
-
-->
|
|
197
|
+
<br>
|
|
203
198
|
|
|
204
199
|
|
|
205
200
|
|
|
@@ -231,21 +226,26 @@ sports2d --time_range 1.2 2.7
|
|
|
231
226
|
|
|
232
227
|
|
|
233
228
|
#### Get coordinates in meters:
|
|
229
|
+
> **N.B.:** Depth is estimated from a neutral pose.
|
|
234
230
|
|
|
235
231
|
<!-- You either need to provide a calibration file, or simply the height of a person (Note that the latter will not take distortions into account, and that it will be less accurate for motion in the frontal plane).\-->
|
|
236
232
|
You may need to convert pixel coordinates to meters.\
|
|
237
|
-
Just provide the height of the reference person (and their ID in case of multiple person detection)
|
|
238
|
-
The floor angle and the origin of the xy axis are computed automatically from gait. If you analyze another type of motion, you can manually specify them.\
|
|
239
|
-
Note that it does not take distortions into account, and that it will be less accurate for motions in the frontal plane.
|
|
233
|
+
Just provide the height of the reference person (and their ID in case of multiple person detection).
|
|
240
234
|
|
|
241
|
-
|
|
235
|
+
You can also specify whether the visible side of the person is left, right, front, or back. Set it to 'auto' if you do not want to find it automatically (only works for motion in the sagittal plane), or to 'none' if you want to keep 2D instead of 3D coordinates (if the person goes right, and then left for example).
|
|
236
|
+
|
|
237
|
+
The floor angle and the origin of the xy axis are computed automatically from gait. If you analyze another type of motion, you can manually specify them. Note that `y` points down.\
|
|
238
|
+
Also note that distortions are not taken into account, and that results will be less accurate for motions in the frontal plane.
|
|
239
|
+
|
|
240
|
+
<!-- ``` cmd
|
|
242
241
|
sports2d --to_meters True --calib_file calib_demo.toml
|
|
243
|
-
```
|
|
242
|
+
``` -->
|
|
244
243
|
``` cmd
|
|
245
244
|
sports2d --to_meters True --px_to_m_person_height 1.65 --px_to_m_from_person_id 2
|
|
246
245
|
```
|
|
247
246
|
``` cmd
|
|
248
|
-
sports2d --to_meters True --px_to_m_person_height 1.65 --px_to_m_from_person_id 2
|
|
247
|
+
sports2d --to_meters True --px_to_m_person_height 1.65 --px_to_m_from_person_id 2 `
|
|
248
|
+
--visible_side front none auto --floor_angle 0 --xy_origin 0 940
|
|
249
249
|
```
|
|
250
250
|
|
|
251
251
|
<br>
|
|
@@ -254,24 +254,28 @@ sports2d --to_meters True --px_to_m_person_height 1.65 --px_to_m_from_person_id
|
|
|
254
254
|
#### Run inverse kinematics:
|
|
255
255
|
> N.B.: [Full install](#full-install) required.
|
|
256
256
|
|
|
257
|
-
> N.B
|
|
258
|
-
|
|
259
|
-
Analyzed persons can be showing their left, right, front, or back side. If you want to ignore a certain person, set `--visible_side none`.
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
Why IK?
|
|
265
|
-
Add section in how it works
|
|
257
|
+
> **N.B.:** The person needs to be moving on a single plane for the whole selected time range.
|
|
266
258
|
|
|
259
|
+
OpenSim inverse kinematics allows you to set joint constraints, joint angle limits, to constrain the bones to keep the same length all along the motion and potentially to have equal sizes on left and right side. Most generally, it gives more biomechanically accurate results. It can also give you the opportunity to compute joint torques, muscle forces, ground reaction forces, and more, [with MoCo](https://opensim-org.github.io/opensim-moco-site/) for example.
|
|
267
260
|
|
|
261
|
+
This is done via [Pose2Sim](https://github.com/perfanalytics/pose2sim).\
|
|
262
|
+
Model scaling is done according to the mean of the segment lengths, across a subset of frames. We remove the 10% fastest frames (potential outliers), the frames where the speed is 0 (person probably out of frame), the frames where the average knee and hip flexion angles are above 45° (pose estimation is not precise when the person is crouching) and the 20% most extreme segment values after the previous operations (potential outliers). All these parameters can be edited in your Config.toml file.
|
|
268
263
|
|
|
269
264
|
```cmd
|
|
270
|
-
sports2d --time_range 1.2 2.7
|
|
265
|
+
sports2d --time_range 1.2 2.7 `
|
|
266
|
+
--do_ik true `
|
|
267
|
+
--px_to_m_from_person_id 1 --px_to_m_person_height 1.65 `
|
|
268
|
+
--visible_side front auto
|
|
271
269
|
```
|
|
272
270
|
|
|
271
|
+
You can optionally use the LSTM marker augmentation to improve the quality of the output motion.\
|
|
272
|
+
You can also optionally give the participants proper masses. Mass has no influence on motion, only on forces (if you decide to further pursue kinetics analysis).
|
|
273
|
+
|
|
273
274
|
```cmd
|
|
274
|
-
sports2d --time_range 1.2 2.7
|
|
275
|
+
sports2d --time_range 1.2 2.7 `
|
|
276
|
+
--do_ik true --use_augmentation True `
|
|
277
|
+
--px_to_m_from_person_id 1 --px_to_m_person_height 1.65 `
|
|
278
|
+
--visible_side front left --participant_mass 67.0 55.0
|
|
275
279
|
```
|
|
276
280
|
|
|
277
281
|
<br>
|
|
@@ -337,7 +341,9 @@ sports2d --video_input demo.mp4 other_video.mp4 --time_range 1.2 2.7 0 3.5
|
|
|
337
341
|
```
|
|
338
342
|
- Choose whether you want video, images, trc pose file, angle mot file, real-time display, and plots:
|
|
339
343
|
```cmd
|
|
340
|
-
sports2d --save_vid false --save_img true
|
|
344
|
+
sports2d --save_vid false --save_img true `
|
|
345
|
+
--save_pose false --save_angles true `
|
|
346
|
+
--show_realtime_results false --show_graphs false
|
|
341
347
|
```
|
|
342
348
|
- Save results to a custom directory, specify the slow-motion factor:
|
|
343
349
|
``` cmd
|
|
@@ -354,12 +360,12 @@ sports2d --video_input demo.mp4 other_video.mp4 --time_range 1.2 2.7 0 3.5
|
|
|
354
360
|
```
|
|
355
361
|
- Use any custom (deployed) MMPose model
|
|
356
362
|
``` cmd
|
|
357
|
-
sports2d --pose_model BodyWithFeet :
|
|
358
|
-
--mode """{'det_class':'YOLOX',
|
|
359
|
-
'det_model':'https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/onnx_sdk/yolox_m_8xb8-300e_humanart-c2c7a14a.zip',
|
|
360
|
-
'det_input_size':[640, 640],
|
|
361
|
-
'pose_class':'RTMPose',
|
|
362
|
-
'pose_model':'https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/onnx_sdk/rtmpose-m_simcc-body7_pt-body7-halpe26_700e-256x192-4d3e73dd_20230605.zip',
|
|
363
|
+
sports2d --pose_model BodyWithFeet : `
|
|
364
|
+
--mode """{'det_class':'YOLOX', `
|
|
365
|
+
'det_model':'https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/onnx_sdk/yolox_m_8xb8-300e_humanart-c2c7a14a.zip', `
|
|
366
|
+
'det_input_size':[640, 640], `
|
|
367
|
+
'pose_class':'RTMPose', `
|
|
368
|
+
'pose_model':'https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/onnx_sdk/rtmpose-m_simcc-body7_pt-body7-halpe26_700e-256x192-4d3e73dd_20230605.zip', `
|
|
363
369
|
'pose_input_size':[192,256]}"""
|
|
364
370
|
```
|
|
365
371
|
|
|
@@ -376,9 +382,10 @@ sports2d --help
|
|
|
376
382
|
|
|
377
383
|
```
|
|
378
384
|
'config': ["C", "path to a toml configuration file"],
|
|
385
|
+
|
|
379
386
|
'video_input': ["i", "webcam, or video_path.mp4, or video1_path.avi video2_path.mp4 ... Beware that images won't be saved if paths contain non ASCII characters"],
|
|
380
387
|
'px_to_m_person_height': ["H", "height of the person in meters. 1.70 if not specified"],
|
|
381
|
-
'visible_side': ["", "front, back, left, right, auto, or none. 'front auto' if not specified. If 'auto', will be either left or right depending on the direction of the motion. If 'none', no IK for this person"],
|
|
388
|
+
'visible_side': ["", "front, back, left, right, auto, or none. 'front none auto' if not specified. If 'auto', will be either left or right depending on the direction of the motion. If 'none', no IK for this person"],
|
|
382
389
|
'load_trc_px': ["", "load trc file to avaid running pose estimation again. false if not specified"],
|
|
383
390
|
'compare': ["", "visually compare motion with trc file. false if not specified"],
|
|
384
391
|
'webcam_id': ["w", "webcam ID. 0 if not specified"],
|
|
@@ -412,6 +419,7 @@ sports2d --help
|
|
|
412
419
|
'do_ik': ["", "do inverse kinematics. false if not specified"],
|
|
413
420
|
'use_augmentation': ["", "Use LSTM marker augmentation. false if not specified"],
|
|
414
421
|
'use_contacts_muscles': ["", "Use model with contact spheres and muscles. false if not specified"],
|
|
422
|
+
'participant_mass': ["", "mass of the participant in kg or none. Defaults to 70 if not provided. No influence on kinematics (motion), only on kinetics (forces)"],
|
|
415
423
|
'close_to_zero_speed_m': ["","Sum for all keypoints: about 50 px/frame or 0.2 m/frame"],
|
|
416
424
|
'multiperson': ["", "multiperson involves tracking: will be faster if set to false. true if not specified"],
|
|
417
425
|
'tracking_mode': ["", "sports2d or rtmlib. sports2d is generally much more accurate and comparable in speed. sports2d if not specified"],
|
|
@@ -544,7 +552,7 @@ Sports2D:
|
|
|
544
552
|
|
|
545
553
|
4. **Chooses the right persons to keep.** In single-person mode, only keeps the person with the highest average scores over the sequence. In multi-person mode, only retrieves the keypoints with high enough confidence, and only keeps the persons with high enough average confidence over each frame.
|
|
546
554
|
|
|
547
|
-
4. **Converts the pixel coordinates to meters.** The user can provide a calibration file, or simply the size of a specified person. The floor angle and the coordinate origin can either be detected automatically from the gait sequence, or be manually specified.
|
|
555
|
+
4. **Converts the pixel coordinates to meters.** The user can provide a calibration file, or simply the size of a specified person. The floor angle and the coordinate origin can either be detected automatically from the gait sequence, or be manually specified. The depth coordinates are set to normative values, depending on whether the person is going left, right, facing the camera, or looking away.
|
|
548
556
|
|
|
549
557
|
5. **Computes the selected joint and segment angles**, and flips them on the left/right side if the respective foot is pointing to the left/right.
|
|
550
558
|
|
|
@@ -18,10 +18,10 @@ video_input = 'demo.mp4' # 'webcam' or '<video_path.ext>', or ['video1_path.mp
|
|
|
18
18
|
px_to_m_from_person_id = 2 # Person to use for pixels to meters conversion (not used if a calibration file is provided)
|
|
19
19
|
px_to_m_person_height = 1.65 # Height of the reference person in meters (for pixels -> meters conversion).
|
|
20
20
|
visible_side = ['front', 'none', 'auto'] # Choose visible side among ['right', 'left', 'front', 'back', 'auto', 'none']. String or list of strings.
|
|
21
|
-
|
|
22
|
-
|
|
21
|
+
# if 'auto', will be either 'left', 'right', or 'front' depending on the direction of the motion
|
|
22
|
+
# if 'none', coordinates will be left in 2D rather than 3D
|
|
23
23
|
load_trc_px = '' # If you do not want to recalculate pose, load it from a trc file (in px, not in m)
|
|
24
|
-
compare = false
|
|
24
|
+
compare = false # Not implemented yet
|
|
25
25
|
|
|
26
26
|
# Video parameters
|
|
27
27
|
time_range = [] # [] for the whole video, or [start_time, end_time] (in seconds), or [[start_time1, end_time1], [start_time2, end_time2], ...]
|
|
@@ -53,7 +53,16 @@ result_dir = '' # If empty, project dir is current dir
|
|
|
53
53
|
slowmo_factor = 1 # 1 for normal speed. For a video recorded at 240 fps and exported to 30 fps, it would be 240/30 = 8
|
|
54
54
|
|
|
55
55
|
# Pose detection parameters
|
|
56
|
-
pose_model = 'Body_with_feet' #With RTMLib:
|
|
56
|
+
pose_model = 'Body_with_feet' #With RTMLib:
|
|
57
|
+
# - Body_with_feet (default HALPE_26 model),
|
|
58
|
+
# - Whole_body_wrist (COCO_133_WRIST: body + feet + 2 hand_points),
|
|
59
|
+
# - Whole_body (COCO_133: body + feet + hands),
|
|
60
|
+
# - Body (COCO_17). Marker augmentation won't work, Kinematic analysis will work,
|
|
61
|
+
# - Hand (HAND_21, only lightweight mode. Potentially better results with Whole_body),
|
|
62
|
+
# - Face (FACE_106),
|
|
63
|
+
# - Animal (ANIMAL2D_17)
|
|
64
|
+
# /!\ Only RTMPose is natively embeded in Pose2Sim. For all other pose estimation methods, you will have to run them yourself, and then refer to the documentation to convert the output files if needed
|
|
65
|
+
# /!\ For Face and Animal, use mode="""{dictionary}""", and find the corresponding .onnx model there https://github.com/open-mmlab/mmpose/tree/main/projects/rtmpose
|
|
57
66
|
mode = 'balanced' # 'lightweight', 'balanced', 'performance', or """{dictionary}""" (see below)
|
|
58
67
|
|
|
59
68
|
# A dictionary (WITHIN THREE DOUBLE QUOTES) allows you to manually select the person detection (if top_down approach) and/or pose estimation models (see https://github.com/Tau-J/rtmlib).
|
|
@@ -81,7 +90,7 @@ det_frequency = 4 # Run person detection only every N frames, and inbetwee
|
|
|
81
90
|
device = 'auto' # 'auto', 'CPU', 'CUDA', 'MPS', 'ROCM'
|
|
82
91
|
backend = 'auto' # 'auto', 'openvino', 'onnxruntime', 'opencv'
|
|
83
92
|
tracking_mode = 'sports2d' # 'sports2d' or 'deepsort'. 'deepsort' is slower but more robust in difficult configurations
|
|
84
|
-
deepsort_params = """{'max_age':30, 'n_init':3, 'max_cosine_distance':0.3, 'max_iou_distance':0.8, 'embedder_gpu': True, embedder':'torchreid'}""" # """{dictionary between 3 double quotes}"""
|
|
93
|
+
# deepsort_params = """{'max_age':30, 'n_init':3, 'max_cosine_distance':0.3, 'max_iou_distance':0.8, 'embedder_gpu': True, embedder':'torchreid'}""" # """{dictionary between 3 double quotes}"""
|
|
85
94
|
# More robust in crowded scenes but tricky to parametrize. More information there: https://github.com/levan92/deep_sort_realtime/blob/master/deep_sort_realtime/deepsort_tracker.py#L51
|
|
86
95
|
# Requires `pip install torch torchvision torchreid gdown tensorboard`
|
|
87
96
|
|
|
@@ -105,11 +114,6 @@ xy_origin = ['auto'] # ['auto'] or [px_x,px_y]. N.B.: px_y points downwards.
|
|
|
105
114
|
# If conversion from a calibration file
|
|
106
115
|
calib_file = '' # Calibration in the Pose2Sim format. 'calib_demo.toml', or '' if not available
|
|
107
116
|
|
|
108
|
-
fastest_frames_to_remove_percent = 0.1 # Frames with high speed are considered as outliers
|
|
109
|
-
close_to_zero_speed_px = 50 # Sum for all keypoints: about 50 px/frame or 0.2 m/frame
|
|
110
|
-
large_hip_knee_angles = 45 # Hip and knee angles below this value are considered as imprecise
|
|
111
|
-
trimmed_extrema_percent = 0.5 # Proportion of the most extreme segment values to remove before calculating their mean)
|
|
112
|
-
|
|
113
117
|
|
|
114
118
|
[angles]
|
|
115
119
|
display_angle_values_on = ['body', 'list'] # 'body', 'list', ['body', 'list'], 'none'. Display angle values on the body, as a list in the upper left of the image, both, or do not display them.
|
|
@@ -147,19 +151,21 @@ filter_type = 'butterworth' # butterworth, gaussian, LOESS, median
|
|
|
147
151
|
|
|
148
152
|
|
|
149
153
|
[kinematics]
|
|
150
|
-
do_ik =
|
|
154
|
+
do_ik = true # Do scaling and inverse kinematics?
|
|
151
155
|
use_augmentation = true # true or false (lowercase) # Set to true if you want to use the model with augmented markers
|
|
152
156
|
use_contacts_muscles = true # true or false (lowercase) # If true, contact spheres and muscles are added to the model
|
|
153
|
-
|
|
154
|
-
osim_setup_path = '../OpenSim_setup' # Path to the OpenSim setup folder
|
|
157
|
+
participant_mass = [67.0, 55.0] # kg # defaults to 70 if not provided. No influence on kinematics (motion), only on kinetics (forces)
|
|
155
158
|
right_left_symmetry = true # true or false (lowercase) # Set to false only if you have good reasons to think the participant is not symmetrical (e.g. prosthetic limb)
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
+
|
|
160
|
+
# Choosing best frames to scale the model
|
|
161
|
+
default_height = 1.7 # meters # If automatic height calculation did not work, this value is used to scale the model
|
|
159
162
|
fastest_frames_to_remove_percent = 0.1 # Frames with high speed are considered as outliers
|
|
160
|
-
|
|
163
|
+
close_to_zero_speed_px = 50 # Sum for all keypoints: about 50 px/frame
|
|
164
|
+
close_to_zero_speed_m = 0.2 # Sum for all keypoints: 0.2 m/frame
|
|
161
165
|
large_hip_knee_angles = 45 # Hip and knee angles below this value are considered as imprecise
|
|
162
166
|
trimmed_extrema_percent = 0.5 # Proportion of the most extreme segment values to remove before calculating their mean)
|
|
167
|
+
remove_individual_scaling_setup = true # true or false (lowercase) # If true, the individual scaling setup files are removed to avoid cluttering
|
|
168
|
+
remove_individual_ik_setup = true # true or false (lowercase) # If true, the individual IK setup files are removed to avoid cluttering
|
|
163
169
|
|
|
164
170
|
|
|
165
171
|
[logging]
|
|
@@ -124,7 +124,7 @@ from Sports2D import Sports2D
|
|
|
124
124
|
DEFAULT_CONFIG = {'project': {'video_input': ['demo.mp4'],
|
|
125
125
|
'px_to_m_from_person_id': 2,
|
|
126
126
|
'px_to_m_person_height': 1.65,
|
|
127
|
-
'visible_side': ['front', 'auto'],
|
|
127
|
+
'visible_side': ['front', 'none', 'auto'],
|
|
128
128
|
'load_trc_px': '',
|
|
129
129
|
'compare': False,
|
|
130
130
|
'time_range': [],
|
|
@@ -159,11 +159,7 @@ DEFAULT_CONFIG = {'project': {'video_input': ['demo.mp4'],
|
|
|
159
159
|
'calib_file': '',
|
|
160
160
|
'floor_angle': 'auto',
|
|
161
161
|
'xy_origin': ['auto'],
|
|
162
|
-
'save_calib': True
|
|
163
|
-
'fastest_frames_to_remove_percent': 0.1,
|
|
164
|
-
'close_to_zero_speed_px': 50,
|
|
165
|
-
'large_hip_knee_angles': 45,
|
|
166
|
-
'trimmed_extrema_percent': 0.5
|
|
162
|
+
'save_calib': True
|
|
167
163
|
},
|
|
168
164
|
'angles': {'display_angle_values_on': ['body', 'list'],
|
|
169
165
|
'fontSize': 0.3,
|
|
@@ -209,12 +205,14 @@ DEFAULT_CONFIG = {'project': {'video_input': ['demo.mp4'],
|
|
|
209
205
|
},
|
|
210
206
|
'kinematics':{'do_ik': False,
|
|
211
207
|
'use_augmentation': False,
|
|
212
|
-
'use_contacts_muscles':
|
|
208
|
+
'use_contacts_muscles': True,
|
|
209
|
+
'participant_mass': [67.0, 55.0],
|
|
213
210
|
'right_left_symmetry': True,
|
|
214
211
|
'default_height': 1.70,
|
|
215
212
|
'remove_individual_scaling_setup': True,
|
|
216
213
|
'remove_individual_ik_setup': True,
|
|
217
214
|
'fastest_frames_to_remove_percent': 0.1,
|
|
215
|
+
'close_to_zero_speed_px': 50,
|
|
218
216
|
'close_to_zero_speed_m': 0.2,
|
|
219
217
|
'large_hip_knee_angles': 45,
|
|
220
218
|
'trimmed_extrema_percent': 0.5,
|
|
@@ -226,7 +224,7 @@ DEFAULT_CONFIG = {'project': {'video_input': ['demo.mp4'],
|
|
|
226
224
|
CONFIG_HELP = {'config': ["C", "path to a toml configuration file"],
|
|
227
225
|
'video_input': ["i", "webcam, or video_path.mp4, or video1_path.avi video2_path.mp4 ... Beware that images won't be saved if paths contain non ASCII characters"],
|
|
228
226
|
'px_to_m_person_height': ["H", "height of the person in meters. 1.70 if not specified"],
|
|
229
|
-
'visible_side': ["", "front, back, left, right, auto, or none. 'front auto' if not specified. If 'auto', will be either left or right depending on the direction of the motion. If 'none', no IK for this person"],
|
|
227
|
+
'visible_side': ["", "front, back, left, right, auto, or none. 'front none auto' if not specified. If 'auto', will be either left or right depending on the direction of the motion. If 'none', no IK for this person"],
|
|
230
228
|
'load_trc_px': ["", "load trc file to avaid running pose estimation again. false if not specified"],
|
|
231
229
|
'compare': ["", "visually compare motion with trc file. false if not specified"],
|
|
232
230
|
'webcam_id': ["w", "webcam ID. 0 if not specified"],
|
|
@@ -260,6 +258,7 @@ CONFIG_HELP = {'config': ["C", "path to a toml configuration file"],
|
|
|
260
258
|
'do_ik': ["", "do inverse kinematics. false if not specified"],
|
|
261
259
|
'use_augmentation': ["", "Use LSTM marker augmentation. false if not specified"],
|
|
262
260
|
'use_contacts_muscles': ["", "Use model with contact spheres and muscles. false if not specified"],
|
|
261
|
+
'participant_mass': ["", "mass of the participant in kg or none. Defaults to 70 if not provided. No influence on kinematics (motion), only on kinetics (forces)"],
|
|
263
262
|
'close_to_zero_speed_m': ["","Sum for all keypoints: about 50 px/frame or 0.2 m/frame"],
|
|
264
263
|
'multiperson': ["", "multiperson involves tracking: will be faster if set to false. true if not specified"],
|
|
265
264
|
'tracking_mode': ["", "sports2d or rtmlib. sports2d is generally much more accurate and comparable in speed. sports2d if not specified"],
|
|
@@ -270,10 +269,6 @@ CONFIG_HELP = {'config': ["C", "path to a toml configuration file"],
|
|
|
270
269
|
'keypoint_likelihood_threshold': ["", "detected keypoints are not retained if likelihood is below this threshold. 0.3 if not specified"],
|
|
271
270
|
'average_likelihood_threshold': ["", "detected persons are not retained if average keypoint likelihood is below this threshold. 0.5 if not specified"],
|
|
272
271
|
'keypoint_number_threshold': ["", "detected persons are not retained if number of detected keypoints is below this threshold. 0.3 if not specified, i.e., i.e., 30 percent"],
|
|
273
|
-
'fastest_frames_to_remove_percent': ["", "Frames with high speed are considered as outliers. Defaults to 0.1"],
|
|
274
|
-
'close_to_zero_speed_px': ["", "Sum for all keypoints: about 50 px/frame or 0.2 m/frame. Defaults to 50"],
|
|
275
|
-
'large_hip_knee_angles': ["", "Hip and knee angles below this value are considered as imprecise. Defaults to 45"],
|
|
276
|
-
'trimmed_extrema_percent': ["", "Proportion of the most extreme segment values to remove before calculating their mean. Defaults to 50"],
|
|
277
272
|
'fontSize': ["", "font size for angle values. 0.3 if not specified"],
|
|
278
273
|
'flip_left_right': ["", "true or false. true to get consistent angles with people facing both left and right sides. Set it to false if you want timeseries to be continuous even when the participent switches their stance. true if not specified"],
|
|
279
274
|
'correct_segment_angles_with_floor_angle': ["", "true or false. If the camera is tilted, corrects segment angles as regards to the floor angle. Set to false is the floor is tilted instead. True if not specified"],
|
|
@@ -293,7 +288,8 @@ CONFIG_HELP = {'config': ["C", "path to a toml configuration file"],
|
|
|
293
288
|
'remove_individual_scaling_setup': ["", "remove individual scaling setup files generated during scaling. true if not specified"],
|
|
294
289
|
'remove_individual_ik_setup': ["", "remove individual IK setup files generated during IK. true if not specified"],
|
|
295
290
|
'fastest_frames_to_remove_percent': ["", "Frames with high speed are considered as outliers. Defaults to 0.1"],
|
|
296
|
-
'close_to_zero_speed_m': ["","Sum for all keypoints: about
|
|
291
|
+
'close_to_zero_speed_m': ["","Sum for all keypoints: about 0.2 m/frame. Defaults to 0.2"],
|
|
292
|
+
'close_to_zero_speed_px': ["", "Sum for all keypoints: about 50 px/frame. Defaults to 50"],
|
|
297
293
|
'large_hip_knee_angles': ["", "Hip and knee angles below this value are considered as imprecise and ignored. Defaults to 45"],
|
|
298
294
|
'trimmed_extrema_percent': ["", "Proportion of the most extreme segment values to remove before calculating their mean. Defaults to 50"],
|
|
299
295
|
'use_custom_logging': ["", "use custom logging. false if not specified"]
|
|
@@ -22,6 +22,7 @@ import subprocess
|
|
|
22
22
|
from pathlib import Path
|
|
23
23
|
import itertools as it
|
|
24
24
|
import logging
|
|
25
|
+
from collections import defaultdict
|
|
25
26
|
from anytree import PreOrderIter
|
|
26
27
|
|
|
27
28
|
import numpy as np
|
|
@@ -32,9 +33,9 @@ import cv2
|
|
|
32
33
|
import c3d
|
|
33
34
|
|
|
34
35
|
import matplotlib.pyplot as plt
|
|
35
|
-
from PyQt5.QtWidgets import QMainWindow, QApplication, QWidget, QTabWidget, QVBoxLayout
|
|
36
36
|
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
|
|
37
37
|
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
|
|
38
|
+
from PyQt5.QtWidgets import QMainWindow, QApplication, QWidget, QTabWidget, QVBoxLayout
|
|
38
39
|
|
|
39
40
|
|
|
40
41
|
## AUTHORSHIP INFORMATION
|
|
@@ -64,7 +65,7 @@ angle_dict = { # lowercase!
|
|
|
64
65
|
'right elbow': [['RWrist', 'RElbow', 'RShoulder'], 'flexion', 180, -1],
|
|
65
66
|
'left elbow': [['LWrist', 'LElbow', 'LShoulder'], 'flexion', 180, -1],
|
|
66
67
|
'right wrist': [['RElbow', 'RWrist', 'RIndex'], 'flexion', -180, 1],
|
|
67
|
-
'left wrist': [['LElbow', '
|
|
68
|
+
'left wrist': [['LElbow', 'LWrist', 'LIndex'], 'flexion', -180, 1],
|
|
68
69
|
|
|
69
70
|
# segment angles
|
|
70
71
|
'right foot': [['RBigToe', 'RHeel'], 'horizontal', 0, -1],
|
|
@@ -97,18 +98,18 @@ marker_Z_positions = {'right':
|
|
|
97
98
|
"LHip": 0.105, "LKnee": 0.0886, "LAnkle": 0.0972, "LBigToe":0.0766, "LHeel":0.0883, "LSmallToe": 0.1200,
|
|
98
99
|
"LShoulder": 0.2016, "LElbow": 0.1613, "LWrist": 0.120, "LThumb": 0.1625, "LIndex": 0.1735, "LPinky": 0.1740, "LEye": 0.0311,
|
|
99
100
|
"Hip": 0.0, "Neck": 0.0, "Head":0.0, "Nose": 0.0},
|
|
100
|
-
'front':
|
|
101
|
-
{"RHip": 0.0301, "RKnee": 0.
|
|
102
|
-
"RShoulder": -0.01275, "RElbow": 0.
|
|
103
|
-
"LHip":
|
|
104
|
-
"LShoulder": 0.01275, "LElbow":
|
|
105
|
-
"Hip": 0.0301, "Neck":
|
|
101
|
+
'front': # original knee:0.0179
|
|
102
|
+
{"RHip": 0.0301, "RKnee": 0.129, "RAnkle": 0.0230, "RBigToe": 0.2179, "RHeel": -0.0119, "RSmallToe": 0.1804,
|
|
103
|
+
"RShoulder": -0.01275, "RElbow": 0.0702, "RWrist": 0.1076, "RThumb": 0.0106, "RIndex": -0.0004, "RPinky": -0.0009, "REye": 0.0702,
|
|
104
|
+
"LHip": 0.0301, "LKnee": 0.129, "LAnkle": 0.0230, "LBigToe": 0.2179, "LHeel": -0.0119, "LSmallToe": 0.1804,
|
|
105
|
+
"LShoulder": -0.01275, "LElbow": 0.0702, "LWrist": 0.1076, "LThumb": 0.0106, "LIndex": -0.0004, "LPinky": -0.0009, "LEye": 0.0702,
|
|
106
|
+
"Hip": 0.0301, "Neck": 0.0008, "Head": 0.0655, "Nose": 0.1076},
|
|
106
107
|
'back':
|
|
107
|
-
{"RHip": -0.0301, "RKnee": -0.
|
|
108
|
-
"RShoulder": 0.01275, "RElbow":
|
|
109
|
-
"LHip": 0.0301, "LKnee": 0.
|
|
110
|
-
"LShoulder":
|
|
111
|
-
"Hip": 0.0301, "Neck": -0.0008, "Head": -0.0655, "Nose": 0.1076},
|
|
108
|
+
{"RHip": -0.0301, "RKnee": -0.129, "RAnkle": -0.0230, "RBigToe": -0.2179, "RHeel": 0.0119, "RSmallToe": -0.1804,
|
|
109
|
+
"RShoulder": 0.01275, "RElbow": 0.0702, "RWrist": -1076.0002, "RThumb": -0.0106, "RIndex": 0.0004, "RPinky": 0.0009, "REye": -0.0702,
|
|
110
|
+
"LHip": -0.0301, "LKnee": -0.129, "LAnkle": -0.0230, "LBigToe": -0.2179, "LHeel": 0.0119, "LSmallToe": -0.1804,
|
|
111
|
+
"LShoulder": 0.01275, "LElbow": 0.0702, "LWrist": -0.1076, "LThumb": -0.0106, "LIndex": 0.0004, "LPinky": 0.0009, "LEye": -0.0702,
|
|
112
|
+
"Hip": -0.0301, "Neck": -0.0008, "Head": -0.0655, "Nose": -0.1076},
|
|
112
113
|
}
|
|
113
114
|
|
|
114
115
|
colors = [(255, 0, 0), (0, 0, 255), (255, 255, 0), (255, 0, 255), (0, 255, 255), (0, 0, 0), (255, 255, 255),
|
|
@@ -170,6 +171,15 @@ class plotWindow():
|
|
|
170
171
|
self.app.exec_()
|
|
171
172
|
|
|
172
173
|
## FUNCTIONS
|
|
174
|
+
def to_dict(d):
|
|
175
|
+
'''
|
|
176
|
+
Convert a defaultdict to a dict.
|
|
177
|
+
'''
|
|
178
|
+
if isinstance(d, defaultdict):
|
|
179
|
+
return {k: to_dict(v) for k, v in d.items()}
|
|
180
|
+
return d
|
|
181
|
+
|
|
182
|
+
|
|
173
183
|
def read_trc(trc_path):
|
|
174
184
|
'''
|
|
175
185
|
Read a TRC file and extract its contents.
|
|
@@ -575,7 +585,7 @@ def add_neck_hip_coords(kpt_name, p_X, p_Y, p_scores, kpt_ids, kpt_names):
|
|
|
575
585
|
return p_X, p_Y, p_scores
|
|
576
586
|
|
|
577
587
|
|
|
578
|
-
def best_coords_for_measurements(Q_coords, keypoints_names, fastest_frames_to_remove_percent=0.2, close_to_zero_speed=0.2, large_hip_knee_angles=45):
|
|
588
|
+
def best_coords_for_measurements(Q_coords, keypoints_names, beginning_frames_to_remove_percent=0.2, end_frames_to_remove_percent=0.2, fastest_frames_to_remove_percent=0.2, close_to_zero_speed=0.2, large_hip_knee_angles=45):
|
|
579
589
|
'''
|
|
580
590
|
Compute the best coordinates for measurements, after removing:
|
|
581
591
|
- 20% fastest frames (may be outliers)
|
|
@@ -585,6 +595,8 @@ def best_coords_for_measurements(Q_coords, keypoints_names, fastest_frames_to_re
|
|
|
585
595
|
INPUTS:
|
|
586
596
|
- Q_coords: pd.DataFrame. The XYZ coordinates of each marker
|
|
587
597
|
- keypoints_names: list. The list of marker names
|
|
598
|
+
- beginning_frames_to_remove_percent: float
|
|
599
|
+
- end_frames_to_remove_percent: float
|
|
588
600
|
- fastest_frames_to_remove_percent: float
|
|
589
601
|
- close_to_zero_speed: float (sum for all keypoints: about 50 px/frame or 0.2 m/frame)
|
|
590
602
|
- large_hip_knee_angles: int
|
|
@@ -607,6 +619,9 @@ def best_coords_for_measurements(Q_coords, keypoints_names, fastest_frames_to_re
|
|
|
607
619
|
Q_coords = pd.concat((Q_coords.reset_index(drop=True), df_Hip), axis=1)
|
|
608
620
|
n_markers = len(keypoints_names)
|
|
609
621
|
|
|
622
|
+
# Removing first and last frames
|
|
623
|
+
# Q_coords = Q_coords.iloc[int(len(Q_coords) * beginning_frames_to_remove_percent):int(len(Q_coords) * (1-end_frames_to_remove_percent))]
|
|
624
|
+
|
|
610
625
|
# Using 80% slowest frames
|
|
611
626
|
sum_speeds = pd.Series(np.nansum([np.linalg.norm(Q_coords.iloc[:,kpt:kpt+3].diff(), axis=1) for kpt in range(n_markers)], axis=0))
|
|
612
627
|
sum_speeds = sum_speeds[sum_speeds>close_to_zero_speed] # Removing when speeds close to zero (out of frame)
|