text stringlengths 1 93.6k |
|---|
dist_mat = torch.norm(delta,dim=3)
|
dist_new = dist_mat.transpose(1,2) # (B, hole_n, 2048)
|
if self.mask_type == 'knn_hole':
|
# idx (B, hole_n, hole_k), dist (B, hole_n, hole_k)
|
dist, idx = torch.topk(dist_new,self.hole_k,largest=False)
|
for i in range(self.hole_n):
|
dist_per_hole = dist_new[:,i,:].unsqueeze(2)
|
if self.mask_type == 'knn_hole':
|
threshold_dist = dist[:,i, -1]
|
if self.mask_type == 'ball_hole':
|
threshold_dist = self.hole_radius
|
flag_map[dist_per_hole <= threshold_dist] = 0
|
target = torch.mul(pcd, flag_map)
|
return target
|
elif self.mask_type == 'voxel_mask':
|
"""
|
voxels in the partial and optionally surroundings are 1, the rest are 0.
|
"""
|
### set static mask for each new partial pcd
|
if self.to_reset_mask:
|
mask_partial = self.voxelize(self.target, n_bins=self.args.voxel_bins, pcd_limit=0.5, threshold=0)
|
# optional to add surrounding to the mask partial
|
surrounding = self.args.surrounding
|
self.mask_dict = {}
|
for key_gt in mask_partial:
|
x,y,z = key_gt
|
surrounding_ls = []
|
surrounding_ls.append((x,y,z))
|
for x_s in range(x-surrounding+1, x+surrounding):
|
for y_s in range(y-surrounding+1, y+surrounding):
|
for z_s in range(z-surrounding+1, z+surrounding):
|
surrounding_ls.append((x_s,y_s,z_s))
|
for xyz in surrounding_ls:
|
self.mask_dict[xyz] = 1
|
# turn off mask after set mask, until next partial pcd
|
self.to_reset_mask = False
|
### preprocess
|
n_bins = self.args.voxel_bins
|
mask_tensor = torch.zeros(2048,1)
|
pcd_new = pcd*n_bins + n_bins * 0.5
|
pcd_new = pcd_new.type(torch.int64)
|
ls_voxels = pcd_new.squeeze(0).tolist() # 2028 of sublists
|
tuple_voxels = [tuple(itm) for itm in ls_voxels]
|
for i in range(2048):
|
tuple_voxel = tuple_voxels[i]
|
if tuple_voxel in self.mask_dict:
|
mask_tensor[i] = 1
|
mask_tensor = mask_tensor.unsqueeze(0).cuda()
|
pcd_map = torch.mul(pcd, mask_tensor)
|
return pcd_map
|
elif self.mask_type == 'k_mask':
|
pcd_map = self.k_mask(self.partial, pcd,stage)
|
return pcd_map
|
elif self.mask_type == 'tau_mask':
|
pcd_map = self.tau_mask(self.target, pcd,stage)
|
return pcd_map
|
else:
|
raise NotImplementedError
|
def voxelize(self, pcd, n_bins=32, pcd_limit=0.5, threshold=0):
|
"""
|
given a partial/GT pcd
|
return {0,1} masks with resolution n_bins^3
|
voxel_limit in case the pcd is very small, but still assume it is symmetric
|
threshold is needed, in case we would need to handle noise
|
the form of output is a dict, key (x,y,z) , value: count
|
"""
|
pcd_new = pcd * n_bins + n_bins * 0.5
|
pcd_new = pcd_new.type(torch.int64)
|
ls_voxels = pcd_new.squeeze(0).tolist() # 2028 of sublists
|
tuple_voxels = [tuple(itm) for itm in ls_voxels]
|
mask_dict = {}
|
for tuple_voxel in tuple_voxels:
|
if tuple_voxel not in mask_dict:
|
mask_dict[tuple_voxel] = 1
|
else:
|
mask_dict[tuple_voxel] += 1
|
for voxel, cnt in mask_dict.items():
|
if cnt <= threshold:
|
del mask_dict[voxel]
|
return mask_dict
|
def tau_mask(self, target, x, stage=-1):
|
"""
|
tau mask
|
"""
|
# dist_mat shape (B, N_target, N_output), where B = 1
|
stage = max(0, stage)
|
dist_tau = self.args.tau_mask_dist[stage]
|
dist_mat = distChamfer_raw(target, x)
|
idx0, idx1, idx2 = torch.where(dist_mat<dist_tau)
|
idx = torch.unique(idx2).type(torch.long)
|
x_map = x[:, idx]
|
return x_map
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.