sports2d 0.6.2__py3-none-any.whl → 0.6.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- Sports2D/Demo/Config_demo.toml +36 -21
- Sports2D/Sports2D.py +35 -13
- Sports2D/Utilities/common.py +106 -0
- Sports2D/Utilities/skeletons.py +7 -8
- Sports2D/Utilities/tests.py +3 -3
- Sports2D/process.py +126 -53
- {sports2d-0.6.2.dist-info → sports2d-0.6.3.dist-info}/METADATA +299 -183
- sports2d-0.6.3.dist-info/RECORD +16 -0
- sports2d-0.6.2.dist-info/RECORD +0 -16
- {sports2d-0.6.2.dist-info → sports2d-0.6.3.dist-info}/LICENSE +0 -0
- {sports2d-0.6.2.dist-info → sports2d-0.6.3.dist-info}/WHEEL +0 -0
- {sports2d-0.6.2.dist-info → sports2d-0.6.3.dist-info}/entry_points.txt +0 -0
- {sports2d-0.6.2.dist-info → sports2d-0.6.3.dist-info}/top_level.txt +0 -0
Sports2D/process.py
CHANGED
|
@@ -787,9 +787,9 @@ def get_personID_with_highest_scores(all_frames_scores):
|
|
|
787
787
|
return person_id
|
|
788
788
|
|
|
789
789
|
|
|
790
|
-
def compute_floor_line(trc_data, keypoint_names = ['LBigToe', 'RBigToe'], toe_speed_below =
|
|
790
|
+
def compute_floor_line(trc_data, keypoint_names = ['LBigToe', 'RBigToe'], toe_speed_below = 7, tot_speed_above=2.0):
|
|
791
791
|
'''
|
|
792
|
-
Compute the floor line equation and
|
|
792
|
+
Compute the floor line equation, angle, and direction
|
|
793
793
|
from the feet keypoints when they have zero speed.
|
|
794
794
|
|
|
795
795
|
N.B.: Y coordinates point downwards
|
|
@@ -802,6 +802,7 @@ def compute_floor_line(trc_data, keypoint_names = ['LBigToe', 'RBigToe'], toe_sp
|
|
|
802
802
|
OUTPUT:
|
|
803
803
|
- angle: float. The angle of the floor line in radians
|
|
804
804
|
- xy_origin: list. The origin of the floor line
|
|
805
|
+
- gait_direction: float. Left if < 0, 'right' otherwise
|
|
805
806
|
'''
|
|
806
807
|
|
|
807
808
|
# Remove frames where the person is mostly not moving (outlier)
|
|
@@ -810,36 +811,45 @@ def compute_floor_line(trc_data, keypoint_names = ['LBigToe', 'RBigToe'], toe_sp
|
|
|
810
811
|
|
|
811
812
|
# Retrieve zero-speed coordinates for the foot
|
|
812
813
|
low_speeds_X, low_speeds_Y = [], []
|
|
814
|
+
gait_direction_val = []
|
|
813
815
|
for kpt in keypoint_names:
|
|
814
816
|
speeds = np.linalg.norm(trc_data[kpt].diff(), axis=1)
|
|
815
|
-
|
|
817
|
+
|
|
816
818
|
low_speed_frames = trc_data[speeds<toe_speed_below].index
|
|
817
819
|
low_speeds_coords = trc_data[kpt].loc[low_speed_frames]
|
|
818
820
|
low_speeds_coords = low_speeds_coords[low_speeds_coords!=0]
|
|
819
821
|
|
|
820
|
-
|
|
822
|
+
low_speeds_X_kpt = low_speeds_coords.iloc[:,0].tolist()
|
|
823
|
+
low_speeds_X += low_speeds_X_kpt
|
|
821
824
|
low_speeds_Y += low_speeds_coords.iloc[:,1].tolist()
|
|
822
825
|
|
|
826
|
+
# gait direction (between [-1,1])
|
|
827
|
+
X_trend_val = np.polyfit(range(len(low_speeds_X_kpt)), low_speeds_X_kpt, 1)[0]
|
|
828
|
+
gait_direction_kpt = X_trend_val * len(low_speeds_X_kpt) / (np.max(low_speeds_X_kpt) - np.min(low_speeds_X_kpt))
|
|
829
|
+
gait_direction_val.append(gait_direction_kpt)
|
|
830
|
+
|
|
823
831
|
# Fit a line to the zero-speed coordinates
|
|
824
832
|
floor_line = np.polyfit(low_speeds_X, low_speeds_Y, 1) # (slope, intercept)
|
|
825
|
-
|
|
833
|
+
angle = -np.arctan(floor_line[0]) # angle of the floor line in degrees
|
|
834
|
+
xy_origin = [0, floor_line[1]] # origin of the floor line
|
|
826
835
|
|
|
827
|
-
#
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
return angle, xy_origin
|
|
836
|
+
# Gait direction
|
|
837
|
+
gait_direction = np.mean(gait_direction_val)
|
|
838
|
+
|
|
839
|
+
return angle, xy_origin, gait_direction
|
|
831
840
|
|
|
832
841
|
|
|
833
|
-
def convert_px_to_meters(Q_coords_kpt,
|
|
842
|
+
def convert_px_to_meters(Q_coords_kpt, px_to_m_person_height_m, height_px, cx, cy, floor_angle, visible_side='none'):
|
|
834
843
|
'''
|
|
835
844
|
Convert pixel coordinates to meters.
|
|
836
845
|
|
|
837
846
|
INPUTS:
|
|
838
847
|
- Q_coords_kpt: pd.DataFrame. The xyz coordinates of a keypoint in pixels, with z filled with zeros
|
|
839
|
-
-
|
|
848
|
+
- px_to_m_person_height_m: float. The height of the person in meters
|
|
840
849
|
- height_px: float. The height of the person in pixels
|
|
841
850
|
- cx, cy: float. The origin of the image in pixels
|
|
842
851
|
- floor_angle: float. The angle of the floor in radians
|
|
852
|
+
- visible_side: str. The side of the person that is visible ('right', 'left', 'front', 'back', 'none')
|
|
843
853
|
|
|
844
854
|
OUTPUT:
|
|
845
855
|
- Q_coords_kpt_m: pd.DataFrame. The XYZ coordinates of a keypoint in meters
|
|
@@ -848,10 +858,17 @@ def convert_px_to_meters(Q_coords_kpt, person_height_m, height_px, cx, cy, floor
|
|
|
848
858
|
u = Q_coords_kpt.iloc[:,0]
|
|
849
859
|
v = Q_coords_kpt.iloc[:,1]
|
|
850
860
|
|
|
851
|
-
X =
|
|
852
|
-
Y = -
|
|
861
|
+
X = px_to_m_person_height_m / height_px * ((u-cx) + (v-cy)*np.sin(floor_angle))
|
|
862
|
+
Y = - px_to_m_person_height_m / height_px * np.cos(floor_angle) * (v-cy - np.tan(floor_angle)*(u-cx))
|
|
853
863
|
|
|
854
|
-
|
|
864
|
+
if 'marker_Z_positions' in globals() and visible_side!='none':
|
|
865
|
+
marker_name = Q_coords_kpt.columns[0]
|
|
866
|
+
Z = X.copy()
|
|
867
|
+
Z[:] = marker_Z_positions[visible_side][marker_name]
|
|
868
|
+
else:
|
|
869
|
+
Z = np.zeros_like(X)
|
|
870
|
+
|
|
871
|
+
Q_coords_kpt_m = pd.DataFrame(np.array([X, Y, Z]).T, columns=Q_coords_kpt.columns)
|
|
855
872
|
|
|
856
873
|
return Q_coords_kpt_m
|
|
857
874
|
|
|
@@ -902,11 +919,13 @@ def process_fun(config_dict, video_file, time_range, frame_rate, result_dir):
|
|
|
902
919
|
|
|
903
920
|
# Base parameters
|
|
904
921
|
video_dir = Path(config_dict.get('project').get('video_dir'))
|
|
905
|
-
|
|
922
|
+
px_to_m_from_person_id = int(config_dict.get('project').get('px_to_m_from_person_id'))
|
|
923
|
+
px_to_m_person_height_m = config_dict.get('project').get('px_to_m_person_height')
|
|
924
|
+
visible_side = config_dict.get('project').get('visible_side')
|
|
906
925
|
# Pose from file
|
|
907
|
-
|
|
908
|
-
if
|
|
909
|
-
else:
|
|
926
|
+
load_trc_px = config_dict.get('project').get('load_trc_px')
|
|
927
|
+
if load_trc_px == '': load_trc_px = None
|
|
928
|
+
else: load_trc_px = Path(load_trc_px).resolve()
|
|
910
929
|
compare = config_dict.get('project').get('compare')
|
|
911
930
|
# Webcam settings
|
|
912
931
|
webcam_id = config_dict.get('project').get('webcam_id')
|
|
@@ -942,13 +961,13 @@ def process_fun(config_dict, video_file, time_range, frame_rate, result_dir):
|
|
|
942
961
|
|
|
943
962
|
# Pixel to meters conversion
|
|
944
963
|
to_meters = config_dict.get('px_to_meters_conversion').get('to_meters')
|
|
964
|
+
make_c3d = config_dict.get('px_to_meters_conversion').get('make_c3d')
|
|
945
965
|
save_calib = config_dict.get('px_to_meters_conversion').get('save_calib')
|
|
946
966
|
# Calibration from file
|
|
947
967
|
calib_file = config_dict.get('px_to_meters_conversion').get('calib_file')
|
|
948
968
|
if calib_file == '': calib_file = None
|
|
949
969
|
else: calib_file = Path(calib_file).resolve()
|
|
950
970
|
# Calibration from person height
|
|
951
|
-
calib_on_person_id = int(config_dict.get('px_to_meters_conversion').get('calib_on_person_id'))
|
|
952
971
|
floor_angle = config_dict.get('px_to_meters_conversion').get('floor_angle') # 'auto' or float
|
|
953
972
|
floor_angle = np.radians(float(floor_angle)) if floor_angle != 'auto' else floor_angle
|
|
954
973
|
xy_origin = config_dict.get('px_to_meters_conversion').get('xy_origin') # ['auto'] or [x, y]
|
|
@@ -992,9 +1011,21 @@ def process_fun(config_dict, video_file, time_range, frame_rate, result_dir):
|
|
|
992
1011
|
gaussian_filter_kernel, loess_filter_kernel, median_filter_kernel]
|
|
993
1012
|
|
|
994
1013
|
# Inverse kinematics settings
|
|
995
|
-
do_ik = config_dict.get('
|
|
996
|
-
|
|
997
|
-
|
|
1014
|
+
do_ik = config_dict.get('kinematics').get('do_ik')
|
|
1015
|
+
use_augmentation = config_dict.get('kinematics').get('use_augmentation')
|
|
1016
|
+
use_contacts_muscles = config_dict.get('kinematics').get('use_contacts_muscles')
|
|
1017
|
+
|
|
1018
|
+
osim_setup_path = config_dict.get('kinematics').get('osim_setup_path')
|
|
1019
|
+
right_left_symmetry = config_dict.get('kinematics').get('right_left_symmetry')
|
|
1020
|
+
default_height = config_dict.get('kinematics').get('default_height')
|
|
1021
|
+
remove_scaling_setup = config_dict.get('kinematics').get('remove_individual_scaling_setup')
|
|
1022
|
+
remove_ik_setup = config_dict.get('kinematics').get('remove_individual_ik_setup')
|
|
1023
|
+
fastest_frames_to_remove_percent = config_dict.get('kinematics').get('fastest_frames_to_remove_percent')
|
|
1024
|
+
large_hip_knee_angles = config_dict.get('kinematics').get('large_hip_knee_angles')
|
|
1025
|
+
trimmed_extrema_percent = config_dict.get('kinematics').get('trimmed_extrema_percent')
|
|
1026
|
+
close_to_zero_speed = config_dict.get('kinematics').get('close_to_zero_speed_m')
|
|
1027
|
+
|
|
1028
|
+
if do_ik: from Pose2Sim import Pose2Sim
|
|
998
1029
|
|
|
999
1030
|
# Create output directories
|
|
1000
1031
|
if video_file == "webcam":
|
|
@@ -1077,12 +1108,12 @@ def process_fun(config_dict, video_file, time_range, frame_rate, result_dir):
|
|
|
1077
1108
|
|
|
1078
1109
|
|
|
1079
1110
|
# Skip pose estimation or set it up:
|
|
1080
|
-
if
|
|
1081
|
-
if not '_px' in str(
|
|
1082
|
-
logging.error(f'\n{
|
|
1083
|
-
logging.info(f'\nUsing a pose file instead of running pose estimation and tracking: {
|
|
1111
|
+
if load_trc_px:
|
|
1112
|
+
if not '_px' in str(load_trc_px):
|
|
1113
|
+
logging.error(f'\n{load_trc_px} file needs to be in px, not in meters.')
|
|
1114
|
+
logging.info(f'\nUsing a pose file instead of running pose estimation and tracking: {load_trc_px}.')
|
|
1084
1115
|
# Load pose file in px
|
|
1085
|
-
Q_coords, _, _, keypoints_names, _ = read_trc(
|
|
1116
|
+
Q_coords, _, _, keypoints_names, _ = read_trc(load_trc_px)
|
|
1086
1117
|
keypoints_ids = [i for i in range(len(keypoints_names))]
|
|
1087
1118
|
keypoints_all, scores_all = load_pose_file(Q_coords)
|
|
1088
1119
|
for pre, _, node in RenderTree(model_name):
|
|
@@ -1137,7 +1168,7 @@ def process_fun(config_dict, video_file, time_range, frame_rate, result_dir):
|
|
|
1137
1168
|
frame_count = 0
|
|
1138
1169
|
while cap.isOpened():
|
|
1139
1170
|
# Skip to the starting frame
|
|
1140
|
-
if frame_count < frame_range[0] and not
|
|
1171
|
+
if frame_count < frame_range[0] and not load_trc_px:
|
|
1141
1172
|
cap.read()
|
|
1142
1173
|
frame_count += 1
|
|
1143
1174
|
continue
|
|
@@ -1163,7 +1194,7 @@ def process_fun(config_dict, video_file, time_range, frame_rate, result_dir):
|
|
|
1163
1194
|
|
|
1164
1195
|
|
|
1165
1196
|
# Retrieve pose or Estimate pose and track people
|
|
1166
|
-
if
|
|
1197
|
+
if load_trc_px:
|
|
1167
1198
|
if frame_nb >= len(keypoints_all):
|
|
1168
1199
|
break
|
|
1169
1200
|
keypoints = keypoints_all[frame_nb]
|
|
@@ -1184,7 +1215,7 @@ def process_fun(config_dict, video_file, time_range, frame_rate, result_dir):
|
|
|
1184
1215
|
valid_X, valid_Y, valid_scores = [], [], []
|
|
1185
1216
|
valid_X_flipped, valid_angles = [], []
|
|
1186
1217
|
for person_idx in range(len(keypoints)):
|
|
1187
|
-
if
|
|
1218
|
+
if load_trc_px:
|
|
1188
1219
|
person_X = keypoints[person_idx][:,0]
|
|
1189
1220
|
person_Y = keypoints[person_idx][:,1]
|
|
1190
1221
|
person_scores = scores[person_idx]
|
|
@@ -1293,8 +1324,8 @@ def process_fun(config_dict, video_file, time_range, frame_rate, result_dir):
|
|
|
1293
1324
|
frame_range = [0,frame_count] if video_file == 'webcam' else frame_range
|
|
1294
1325
|
all_frames_time = pd.Series(np.linspace(frame_range[0]/fps, frame_range[1]/fps, frame_count+1), name='time')
|
|
1295
1326
|
if not multiperson:
|
|
1296
|
-
|
|
1297
|
-
detected_persons = [
|
|
1327
|
+
px_to_m_from_person_id = get_personID_with_highest_scores(all_frames_scores)
|
|
1328
|
+
detected_persons = [px_to_m_from_person_id]
|
|
1298
1329
|
else:
|
|
1299
1330
|
detected_persons = range(all_frames_X_homog.shape[1])
|
|
1300
1331
|
|
|
@@ -1367,7 +1398,7 @@ def process_fun(config_dict, video_file, time_range, frame_rate, result_dir):
|
|
|
1367
1398
|
# Build TRC file
|
|
1368
1399
|
trc_data_i = trc_data_from_XYZtime(all_frames_X_person_filt, all_frames_Y_person_filt, all_frames_Z_homog, all_frames_time)
|
|
1369
1400
|
trc_data.append(trc_data_i)
|
|
1370
|
-
if not
|
|
1401
|
+
if not load_trc_px:
|
|
1371
1402
|
make_trc_with_trc_data(trc_data_i, str(pose_path_person), fps=fps)
|
|
1372
1403
|
logging.info(f'Pose in pixels saved to {pose_path_person.resolve()}.')
|
|
1373
1404
|
|
|
@@ -1382,9 +1413,9 @@ def process_fun(config_dict, video_file, time_range, frame_rate, result_dir):
|
|
|
1382
1413
|
# Convert px to meters
|
|
1383
1414
|
if to_meters:
|
|
1384
1415
|
logging.info('\nConverting pose to meters:')
|
|
1385
|
-
if
|
|
1386
|
-
logging.warning(f'Person #{
|
|
1387
|
-
|
|
1416
|
+
if px_to_m_from_person_id>=len(trc_data):
|
|
1417
|
+
logging.warning(f'Person #{px_to_m_from_person_id} not detected in the video. Calibrating on person #0 instead.')
|
|
1418
|
+
px_to_m_from_person_id = 0
|
|
1388
1419
|
if calib_file:
|
|
1389
1420
|
logging.info(f'Using calibration file to convert coordinates in meters: {calib_file}.')
|
|
1390
1421
|
calib_params_dict = retrieve_calib_params(calib_file)
|
|
@@ -1393,43 +1424,68 @@ def process_fun(config_dict, video_file, time_range, frame_rate, result_dir):
|
|
|
1393
1424
|
else:
|
|
1394
1425
|
# Compute calibration parameters
|
|
1395
1426
|
if not multiperson:
|
|
1396
|
-
selected_person_id =
|
|
1397
|
-
|
|
1398
|
-
height_px = compute_height(trc_data[
|
|
1427
|
+
selected_person_id = px_to_m_from_person_id
|
|
1428
|
+
px_to_m_from_person_id = 0
|
|
1429
|
+
height_px = compute_height(trc_data[px_to_m_from_person_id].iloc[:,1:], keypoints_names,
|
|
1399
1430
|
fastest_frames_to_remove_percent=fastest_frames_to_remove_percent, close_to_zero_speed=close_to_zero_speed_px, large_hip_knee_angles=large_hip_knee_angles, trimmed_extrema_percent=trimmed_extrema_percent)
|
|
1400
1431
|
|
|
1432
|
+
toe_speed_below = 1 # m/s (below which the foot is considered to be stationary)
|
|
1433
|
+
px_per_m = height_px/px_to_m_person_height_m
|
|
1434
|
+
toe_speed_below_px_frame = toe_speed_below * px_per_m / fps
|
|
1401
1435
|
if floor_angle == 'auto' or xy_origin == 'auto':
|
|
1402
1436
|
# estimated from the line formed by the toes when they are on the ground (where speed = 0)
|
|
1403
1437
|
try:
|
|
1404
|
-
|
|
1405
|
-
|
|
1406
|
-
|
|
1407
|
-
|
|
1408
|
-
floor_angle_estim, xy_origin_estim = compute_floor_line(trc_data[calib_on_person_id], keypoint_names=['LBigToe', 'RBigToe'], toe_speed_below=toe_speed_below_px_frame)
|
|
1409
|
-
except: # no feet points
|
|
1410
|
-
floor_angle_estim, xy_origin_estim = compute_floor_line(trc_data[calib_on_person_id], keypoint_names=['LAnkle', 'RAnkle'], toe_speed_below=toe_speed_below_px_frame)
|
|
1438
|
+
if all(key in trc_data[px_to_m_from_person_id] for key in ['LBigToe', 'RBigToe']):
|
|
1439
|
+
floor_angle_estim, xy_origin_estim, _ = compute_floor_line(trc_data[px_to_m_from_person_id], keypoint_names=['LBigToe', 'RBigToe'], toe_speed_below=toe_speed_below_px_frame)
|
|
1440
|
+
else:
|
|
1441
|
+
floor_angle_estim, xy_origin_estim, _ = compute_floor_line(trc_data[px_to_m_from_person_id], keypoint_names=['LAnkle', 'RAnkle'], toe_speed_below=toe_speed_below_px_frame)
|
|
1411
1442
|
xy_origin_estim[0] = xy_origin_estim[0]-0.13
|
|
1412
1443
|
logging.warning(f'The RBigToe and LBigToe are missing from your model. Using ankles - 13 cm to compute the floor line.')
|
|
1413
1444
|
except:
|
|
1414
1445
|
floor_angle_estim = 0
|
|
1415
1446
|
xy_origin_estim = cam_width/2, cam_height/2
|
|
1416
|
-
logging.warning(f'Could not estimate the floor angle and xy_origin. Make sure that the full body is visible. Using floor angle = 0° and xy_origin = [{cam_width/2}, {cam_height/2}].')
|
|
1447
|
+
logging.warning(f'Could not estimate the floor angle and xy_origin for person {px_to_m_from_person_id}. Make sure that the full body is visible. Using floor angle = 0° and xy_origin = [{cam_width/2}, {cam_height/2}].')
|
|
1417
1448
|
if not floor_angle == 'auto':
|
|
1418
1449
|
floor_angle_estim = floor_angle
|
|
1419
1450
|
if xy_origin == 'auto':
|
|
1420
1451
|
cx, cy = xy_origin_estim
|
|
1421
1452
|
else:
|
|
1422
1453
|
cx, cy = xy_origin
|
|
1423
|
-
logging.info(f'Using height of person #{
|
|
1454
|
+
logging.info(f'Using height of person #{px_to_m_from_person_id} ({px_to_m_person_height_m}m) to convert coordinates in meters. '
|
|
1424
1455
|
f'Floor angle: {np.degrees(floor_angle_estim) if not floor_angle=="auto" else f"auto (estimation: {round(np.degrees(floor_angle_estim),2)}°)"}, '
|
|
1425
1456
|
f'xy_origin: {xy_origin if not xy_origin=="auto" else f"auto (estimation: {[round(c) for c in xy_origin_estim]})"}.')
|
|
1426
1457
|
|
|
1427
1458
|
# Coordinates in m
|
|
1428
1459
|
for i in range(len(trc_data)):
|
|
1460
|
+
# print(i)
|
|
1429
1461
|
if not np.array(trc_data[i].iloc[:,1:] ==0).all():
|
|
1430
|
-
|
|
1462
|
+
# Automatically determine visible side
|
|
1463
|
+
visible_side_i = visible_side[i] if len(visible_side)>i else 'auto' # set to 'auto' if list too short
|
|
1464
|
+
|
|
1465
|
+
# Set to 'front' if slope of X values between [-5,5]
|
|
1466
|
+
if visible_side_i == 'auto':
|
|
1467
|
+
try:
|
|
1468
|
+
if all(key in trc_data[i] for key in ['LBigToe', 'RBigToe']):
|
|
1469
|
+
_, _, gait_direction = compute_floor_line(trc_data[i], keypoint_names=['LBigToe', 'RBigToe'], toe_speed_below=toe_speed_below_px_frame)
|
|
1470
|
+
else:
|
|
1471
|
+
_, _, gait_direction = compute_floor_line(trc_data[i], keypoint_names=['LAnkle', 'RAnkle'], toe_speed_below=toe_speed_below_px_frame)
|
|
1472
|
+
logging.warning(f'The RBigToe and LBigToe are missing from your model. Gait direction will be determined from the ankle points.')
|
|
1473
|
+
visible_side_i = 'right' if gait_direction > 0.6 \
|
|
1474
|
+
else 'left' if gait_direction < -0.6 \
|
|
1475
|
+
else 'front'
|
|
1476
|
+
except:
|
|
1477
|
+
visible_side_i = 'none'
|
|
1478
|
+
logging.warning(f'Could not automatically find gait direction for person {i}. Please set visible_side to "front", "back", "left", or "right" for this person. Setting to "none".')
|
|
1479
|
+
|
|
1480
|
+
# skip if none
|
|
1481
|
+
if visible_side_i == 'none':
|
|
1482
|
+
logging.info(f'Skipping because "visible_side" is set to none for person {i}.')
|
|
1483
|
+
continue
|
|
1484
|
+
|
|
1485
|
+
# Convert to meters
|
|
1486
|
+
trc_data_m_i = pd.concat([convert_px_to_meters(trc_data[i][kpt_name], px_to_m_person_height_m, height_px, cx, cy, -floor_angle_estim, visible_side=visible_side_i) for kpt_name in keypoints_names], axis=1)
|
|
1431
1487
|
trc_data_m_i.insert(0, 't', all_frames_time)
|
|
1432
|
-
trc_data_unfiltered_m_i = pd.concat([convert_px_to_meters(trc_data_unfiltered[i][kpt_name],
|
|
1488
|
+
trc_data_unfiltered_m_i = pd.concat([convert_px_to_meters(trc_data_unfiltered[i][kpt_name], px_to_m_person_height_m, height_px, cx, cy, -floor_angle_estim) for kpt_name in keypoints_names], axis=1)
|
|
1433
1489
|
trc_data_unfiltered_m_i.insert(0, 't', all_frames_time)
|
|
1434
1490
|
|
|
1435
1491
|
if to_meters and show_plots:
|
|
@@ -1439,7 +1495,9 @@ def process_fun(config_dict, video_file, time_range, frame_rate, result_dir):
|
|
|
1439
1495
|
idx_path = selected_person_id if not multiperson and not calib_file else i
|
|
1440
1496
|
pose_path_person_m_i = (pose_output_path.parent / (pose_output_path_m.stem + f'_person{idx_path:02d}.trc'))
|
|
1441
1497
|
make_trc_with_trc_data(trc_data_m_i, pose_path_person_m_i, fps=fps)
|
|
1442
|
-
|
|
1498
|
+
if make_c3d:
|
|
1499
|
+
c3d_path = convert_to_c3d(pose_path_person_m_i)
|
|
1500
|
+
logging.info(f'Person {idx_path}: Pose in meters saved to {pose_path_person_m_i.resolve()}. {"Also saved in c3d format." if make_c3d else ""}')
|
|
1443
1501
|
|
|
1444
1502
|
|
|
1445
1503
|
|
|
@@ -1457,7 +1515,7 @@ def process_fun(config_dict, video_file, time_range, frame_rate, result_dir):
|
|
|
1457
1515
|
|
|
1458
1516
|
|
|
1459
1517
|
# z = 3.0 # distance between the camera and the person. Required in the calibration file but simplified in the equations
|
|
1460
|
-
# f = height_px /
|
|
1518
|
+
# f = height_px / px_to_m_person_height_m * z
|
|
1461
1519
|
|
|
1462
1520
|
|
|
1463
1521
|
# # Name
|
|
@@ -1491,7 +1549,7 @@ def process_fun(config_dict, video_file, time_range, frame_rate, result_dir):
|
|
|
1491
1549
|
|
|
1492
1550
|
# Post-processing angles
|
|
1493
1551
|
if save_angles and calculate_angles:
|
|
1494
|
-
logging.info('\nPost-processing angles:')
|
|
1552
|
+
logging.info('\nPost-processing angles (without inverse kinematics):')
|
|
1495
1553
|
all_frames_angles = make_homogeneous(all_frames_angles)
|
|
1496
1554
|
|
|
1497
1555
|
# unwrap angles
|
|
@@ -1568,3 +1626,18 @@ def process_fun(config_dict, video_file, time_range, frame_rate, result_dir):
|
|
|
1568
1626
|
if show_plots:
|
|
1569
1627
|
all_frames_angles_person.insert(0, 't', all_frames_time)
|
|
1570
1628
|
angle_plots(all_frames_angles_person, angle_data, i) # i = current person
|
|
1629
|
+
|
|
1630
|
+
|
|
1631
|
+
# # Run scaling and inverse kinematics
|
|
1632
|
+
# if save_angles and calculate_angles and do_ik:
|
|
1633
|
+
# logging.info('\nPost-processing angles (with inverse kinematics):')
|
|
1634
|
+
# if not to_meters:
|
|
1635
|
+
# logging.error('IK requires positions in meters rather than in pixels. Set to_meters to True.')
|
|
1636
|
+
# raise ValueError('IK requires positions in meters rather than in pixels. Set to_meters to True.')
|
|
1637
|
+
|
|
1638
|
+
|
|
1639
|
+
# marker_Z_positions
|
|
1640
|
+
# if 'none': No IK possible.
|
|
1641
|
+
# visible_side=='auto'
|
|
1642
|
+
|
|
1643
|
+
# convert_to_c3d(trc_path)
|