text
stringlengths 1
93.6k
|
|---|
'recording_20211004_S19_S06_03', 'recording_20211004_S12_S20_01', 'recording_20211004_S12_S20_02',
|
'recording_20211004_S12_S20_03', 'recording_20220315_S21_S30_03', 'recording_20220315_S21_S30_05',
|
'recording_20220318_S32_S31_01', 'recording_20220318_S32_S31_02', 'recording_20220318_S34_S33_01',
|
'recording_20220318_S33_S34_01', 'recording_20220318_S33_S34_02', 'recording_20220415_S36_S35_02',
|
'recording_20220415_S35_S36_02']
|
else:
|
test_recording_name_list = None
|
################################# read egobody data info
|
if args.dataset == 'egobody':
|
df = pd.read_csv(os.path.join(args.dataset_root, 'egobody_rohm_info.csv'))
|
recording_name_list = list(df['recording_name'])
|
start_frame_list = list(df['target_start_frame'])
|
end_frame_list = list(df['target_end_frame'])
|
idx_list = list(df['target_idx'])
|
gender_list = list(df['target_gender'])
|
view_list = list(df['view'])
|
scene_name_list = list(df['scene_name'])
|
body_idx_fpv_list = list(df['body_idx_fpv'])
|
start_frame_dict = dict(zip(recording_name_list, start_frame_list))
|
end_frame_dict = dict(zip(recording_name_list, end_frame_list))
|
idx_dict = dict(zip(recording_name_list, idx_list))
|
gender_dict = dict(zip(recording_name_list, gender_list))
|
view_dict = dict(zip(recording_name_list, view_list))
|
scene_name_dict = dict(zip(recording_name_list, scene_name_list))
|
body_idx_fpv_dict = dict(zip(recording_name_list, body_idx_fpv_list))
|
if args.visualize:
|
import open3d as o3d
|
from utils.other_utils import LIMBS_BODY_SMPL
|
from utils.other_utils import *
|
mesh_frame = o3d.geometry.TriangleMesh.create_coordinate_frame(size=1.0, origin=[0, 0, 0])
|
vis = o3d.visualization.Visualizer()
|
vis.create_window()
|
vis.add_geometry(mesh_frame)
|
print('Visualizing...')
|
if args.vis_option == 'skeleton':
|
print('[blue/yellow - prediction] [blue] visible parts / [yellow] occluded parts')
|
print('[green - initialized input]')
|
print('[foot contact label - prediction]: [red] not in contact with floor / [green] in contact with floor')
|
elif args.vis_option == 'mesh':
|
print('[blue - prediction]')
|
print('[green - initialized input]')
|
################################# evaluate metrics
|
skating_list = {}
|
acc_list = {}
|
acc_error_list = {}
|
ground_pene_dist_list = {}
|
ground_pene_freq_list = {}
|
gmpjpe_list = {}
|
mpjpe_list = {}
|
mpjpe_list_vis = {}
|
mpjpe_list_occ = {}
|
joint_mask_list = {}
|
for recording_name in test_recording_name_list:
|
if args.dataset == 'prox':
|
cam2world_dir = os.path.join(args.dataset_root, 'cam2world')
|
scene_name = recording_name.split("_")[0]
|
with open(os.path.join(cam2world_dir, scene_name + '.json'), 'r') as f:
|
cam2world = np.array(json.load(f))
|
elif args.dataset == 'egobody':
|
view = view_dict[recording_name]
|
body_idx = idx_dict[recording_name]
|
scene_name = scene_name_dict[recording_name]
|
gender_gt = gender_dict[recording_name]
|
######################### load calibration from sub kinect to main kinect (between color cameras)
|
# master: kinect 12, sub_1: kinect 11, sub_2: kinect 13, sub_3, kinect 14, sub_4: kinect 15
|
calib_trans_dir = os.path.join(args.dataset_root, 'calibrations', recording_name) # extrinsics
|
with open(os.path.join(calib_trans_dir, 'cal_trans', 'kinect12_to_world', scene_name + '.json'), 'r') as f:
|
cam2world = np.asarray(json.load(f)['trans'])
|
if view == 'sub_1':
|
trans_subtomain_path = os.path.join(calib_trans_dir, 'cal_trans', 'kinect_11to12_color.json')
|
elif view == 'sub_2':
|
trans_subtomain_path = os.path.join(calib_trans_dir, 'cal_trans', 'kinect_13to12_color.json')
|
elif view == 'sub_3':
|
trans_subtomain_path = os.path.join(calib_trans_dir, 'cal_trans', 'kinect_14to12_color.json')
|
elif view == 'sub_4':
|
trans_subtomain_path = os.path.join(calib_trans_dir, 'cal_trans', 'kinect_15to12_color.json')
|
if view != 'master':
|
with open(os.path.join(trans_subtomain_path), 'r') as f:
|
trans_subtomain = np.asarray(json.load(f)['trans'])
|
cam2world = np.matmul(cam2world, trans_subtomain)
|
################################# read test results data
|
saved_data_path = '{}/{}.pkl'.format(args.saved_data_dir, recording_name)
|
with open(saved_data_path, 'rb') as f:
|
saved_data = pickle.load(f)
|
print(saved_data_path)
|
repr_name_list = saved_data['repr_name_list']
|
repr_dim_dict = saved_data['repr_dim_dict']
|
frame_name_list = saved_data['frame_name_list'] if args.dataset == 'egobody' else None
|
rec_ric_data_noisy_list = saved_data['rec_ric_data_noisy_list']
|
joints_gt_scene_coord_list = saved_data['joints_gt_scene_coord_list'] if args.dataset == 'egobody' else None
|
rec_ric_data_rec_list_from_smpl = saved_data['rec_ric_data_rec_list_from_smpl']
|
joints_input_scene_coord_list = saved_data['joints_input_scene_coord_list']
|
motion_repr_rec_list = saved_data['motion_repr_rec_list']
|
motion_repr_noisy_list = saved_data['motion_repr_noisy_list']
|
mask_joint_vis_list = saved_data['mask_joint_vis_list'] # [n_clip, 143, 22]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.