text stringlengths 1 93.6k |
|---|
if not osp.isdir(self.args.save_inversion_path):
|
os.mkdir(self.args.save_inversion_path)
|
x_np = x[0].detach().cpu().numpy()
|
if ith == -1:
|
basename = str(self.pcd_id)
|
else:
|
basename = str(self.pcd_id)+'_'+str(ith)
|
np.savetxt(osp.join(self.args.save_inversion_path,basename+"_%.4d"%curr_step+'_x.txt'), x_np, fmt = "%f;%f;%f")
|
# save checkpoint for each stage
|
#self.checkpoint_flags.append('s_'+str(stage)+' x')
|
#self.checkpoint_pcd.append(x)
|
#self.checkpoint_flags.append('s_'+str(stage)+' x_map')
|
#self.checkpoint_pcd.append(x_map)
|
### save point clouds
|
self.x = x
|
if not osp.isdir(self.args.save_inversion_path):
|
os.mkdir(self.args.save_inversion_path)
|
x_np = x[0].detach().cpu().numpy()
|
#x_map_np = x_map[0].detach().cpu().numpy()
|
partial_np = self.partial[0].detach().cpu().numpy()
|
if ith == -1:
|
basename = str(self.pcd_id)
|
else:
|
basename = str(self.pcd_id)+'_'+str(ith)
|
if self.gt is not None:
|
gt_np = self.gt[0].detach().cpu().numpy()
|
np.savetxt(osp.join(self.args.save_inversion_path,basename+'_gt.txt'), gt_np, fmt = "%f;%f;%f")
|
np.savetxt(osp.join(self.args.save_inversion_path,basename+'_x.txt'), x_np, fmt = "%f;%f;%f")
|
#np.savetxt(osp.join(self.args.save_inversion_path,basename+'_xmap.txt'), x_map_np, fmt = "%f;%f;%f")
|
np.savetxt(osp.join(self.args.save_inversion_path,basename+'_partial.txt'), partial_np, fmt = "%f;%f;%f")
|
if save_curve:
|
cd_curve_arr = np.array(cd_curve_list)
|
np.save(osp.join(self.args.save_inversion_path,basename+'_cd_curve.npy'),cd_curve_arr)
|
if bool_gt:
|
return ucd_loss.item(), directed_hausdorff_loss.item(), gt_cd_loss.item()
|
else:
|
return ucd_loss.item(), directed_hausdorff_loss.item()
|
def reset_G_tmp(self):
|
"""
|
to save the pc for visualizaton
|
"""
|
self.checkpoint_pcd = [] # to save the staged checkpoints
|
self.checkpoint_flags = []
|
if self.mask_type == 'voxel_mask':
|
self.to_reset_mask = True # reset hole center for each shape
|
def reset_whole_network(self, load_path_name):
|
checkpoint = torch.load(load_path_name, map_location=self.args.device)
|
self.Encoder.load_state_dict(checkpoint['Encoder_state_dict'])
|
self.Decoder.load_state_dict(checkpoint['Decoder_state_dict'])
|
self.DI_Disentangler.load_state_dict(checkpoint['DI'])
|
self.DS_Disentangler.load_state_dict(checkpoint['DS'])
|
self.MS_Disentangler.load_state_dict(checkpoint['MS'])
|
#self.Z_Mapper.load_state_dict(checkpoint['Mapper'])
|
self.DI_Classifier.load_state_dict(checkpoint['DIC'])
|
self.DS_Classifier.load_state_dict(checkpoint['DSC'])
|
return
|
def set_virtual_real(self, virtual_partial=None, real_partial=None, rotmat=None, azel=None):
|
if virtual_partial is not None:
|
self.virtual_partial = virtual_partial
|
if real_partial is not None:
|
self.real_partial = real_partial
|
if azel is not None:
|
self.azel = azel
|
if rotmat is not None:
|
self.rotmat = rotmat
|
def set_target(self, gt=None, partial=None):
|
'''
|
set partial and gt
|
'''
|
if gt is not None:
|
if len(gt.shape) == 2:
|
self.gt = gt.unsqueeze(0)
|
else:
|
self.gt = gt
|
# for visualization
|
self.checkpoint_flags.append('GT')
|
self.checkpoint_pcd.append(self.gt)
|
else:
|
self.gt = None
|
if partial is not None:
|
if self.args.target_downsample_method.lower() == 'fps':
|
partial_size = self.args.target_downsample_size
|
if len(partial.shape)==2:
|
self.partial = self.downsample(partial.unsqueeze(0), partial_size)
|
else:
|
self.partial = self.downsample(partial, partial_size)
|
else:
|
if len(partial.shape)==2:
|
self.partial = partial.unsqueeze(0)
|
else:
|
self.partial = partial
|
else:
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.