| """
|
| 3DGS Codebook Quantizer
|
| ========================
|
| ไฝฟ็จๅทฒ่ฎญ็ปๅฅฝ็ codebook ๅฏนๆฐ็ 3DGS .ply ๆไปถ่ฟ่ก้ๅ๏ผ
|
| ๅฐ่ฟ็ปญ็นๅพๆ ๅฐไธบ็ฆปๆฃ็ดขๅผ๏ผๅนถๅฏ้ๅฐ้ๅปบ้ๅๅ็็นๅพๅๅ .plyใ
|
|
|
| ่พๅบ๏ผ
|
| <scene>_quantized.npz โโ ๅ็ฑป็ดขๅผ + ้ๅปบ่ฏฏๅทฎ็ป่ฎก
|
| <scene>_quantized.ply โโ ๏ผๅฏ้๏ผ็จ codebook ้ๅปบ็นๅพๅๅๅ็ๆฐ .ply
|
| """
|
|
|
| import os
|
| import argparse
|
| import numpy as np
|
| from plyfile import PlyData, PlyElement
|
| import time
|
|
|
|
|
|
|
|
|
|
|
|
|
| def read_ply(ply_path: str) -> dict:
|
| plydata = PlyData.read(ply_path)
|
| vertex = plydata['vertex']
|
|
|
| positions = np.stack([vertex['x'], vertex['y'], vertex['z']], axis=1)
|
| opacities = vertex['opacity'][:, np.newaxis]
|
| scales = np.stack([vertex['scale_0'], vertex['scale_1'],
|
| vertex['scale_2']], axis=1)
|
| rotations = np.stack([vertex['rot_0'], vertex['rot_1'],
|
| vertex['rot_2'], vertex['rot_3']], axis=1)
|
| dc = np.stack([vertex['f_dc_0'], vertex['f_dc_1'],
|
| vertex['f_dc_2']], axis=1)
|
|
|
| sh_keys = sorted(
|
| [k for k in vertex.data.dtype.names if k.startswith('f_rest_')],
|
| key=lambda s: int(s.split('_')[-1])
|
| )
|
| sh_rest = np.stack([vertex[k] for k in sh_keys], axis=1) \
|
| if sh_keys else None
|
|
|
| filter_3d = None
|
| if 'filter_3D' in vertex.data.dtype.names:
|
| filter_3d = vertex['filter_3D'][:, np.newaxis]
|
|
|
| print(f"[read_ply] {os.path.basename(ply_path)}๏ผ{positions.shape[0]} ไธช้ซๆฏ็น")
|
| return {
|
| 'positions': positions,
|
| 'opacities': opacities,
|
| 'scales': scales,
|
| 'rotations': rotations,
|
| 'dc': dc,
|
| 'sh_rest': sh_rest,
|
| 'filter_3d': filter_3d,
|
| 'plydata': plydata,
|
| 'sh_keys': sh_keys,
|
| }
|
|
|
|
|
|
|
|
|
|
|
|
|
| def load_codebook(codebook_dir: str, name: str):
|
| """
|
| ๅ ่ฝฝๅไธช codebook .npz๏ผ่ฟๅ codebook ็ฉ้ต (K, D)ใ
|
| ่ฎญ็ปๆถไฟๅญ็ indices ๅฑไบ่ฎญ็ป้๏ผ้ๅๆถไธไฝฟ็จใ
|
| """
|
| path = os.path.join(codebook_dir, f"{name}_codebook.npz")
|
| if not os.path.exists(path):
|
| raise FileNotFoundError(f"ๆพไธๅฐ codebook ๆไปถ๏ผ{path}")
|
| npz = np.load(path)
|
| codebook = npz['codebook'].astype(np.float32)
|
| print(f"[load] {name}_codebook๏ผK={codebook.shape[0]}, D={codebook.shape[1]}")
|
| return codebook
|
|
|
|
|
|
|
|
|
|
|
|
|
| def quantize(features: np.ndarray, codebook: np.ndarray, name: str,
|
| batch_size: int = 65536):
|
| """
|
| ๅฏน features (N, D) ๅจ codebook (K, D) ไธญๅๆ่ฟ้ปๆ็ดขใ
|
| ้็จๅๆน่ฎก็ฎ้ฟๅ
ไธๆฌกๆงๆ้ (N, K) ็ๅทจๅ็ฉ้ตๆ็ๅ
ๅญใ
|
|
|
| ่ฟๅ๏ผ
|
| indices : (N,) int32 ๆฏไธช็นๅฏนๅบ็ codebook ็ดขๅผ
|
| reconstructed: (N, D) float32 ้ๅๅ้ๅปบ็็นๅพ
|
| """
|
| features = features.astype(np.float32)
|
| N, D = features.shape
|
| K = codebook.shape[0]
|
| indices = np.empty(N, dtype=np.int32)
|
|
|
|
|
|
|
|
|
| cb_norm2 = np.sum(codebook ** 2, axis=1)
|
|
|
| t0 = time.time()
|
| for start in range(0, N, batch_size):
|
| end = min(start + batch_size, N)
|
| feat = features[start:end]
|
|
|
| feat_norm2 = np.sum(feat ** 2, axis=1, keepdims=True)
|
|
|
| dist2 = feat_norm2 + cb_norm2[np.newaxis, :] \
|
| - 2.0 * (feat @ codebook.T)
|
| indices[start:end] = np.argmin(dist2, axis=1)
|
|
|
| elapsed = time.time() - t0
|
| reconstructed = codebook[indices]
|
|
|
|
|
| diff = features - reconstructed
|
| rmse = float(np.sqrt(np.mean(diff ** 2)))
|
| max_e = float(np.abs(diff).max())
|
| usage = len(np.unique(indices))
|
|
|
| print(f"[{name:8s}] ้ๅๅฎๆ {elapsed:.1f}s | "
|
| f"RMSE={rmse:.6f} MaxErr={max_e:.6f} "
|
| f"ไฝฟ็จ {usage}/{K} ไธช cluster "
|
| f"({100*usage/K:.1f}%)")
|
|
|
| return indices, reconstructed, {'rmse': rmse, 'max_err': max_e, 'cluster_usage': usage}
|
|
|
|
|
|
|
|
|
|
|
|
|
| def quantize_all(data: dict, codebook_dir: str):
|
| """
|
| ๅ ่ฝฝๅไธช codebook๏ผๅฏนๆฐๅบๆฏ็้ซๆฏ็น้ไธ้ๅใ
|
|
|
| ่ฟๅ๏ผ
|
| results dict {name: {'indices', 'reconstructed', 'stats'}}
|
| codebooks dict {name: np.ndarray}
|
| """
|
| feature_map = {
|
| 'scale': data['scales'],
|
| 'rotation': data['rotations'],
|
| 'dc': data['dc'],
|
| 'sh': data['sh_rest'],
|
| }
|
|
|
| if data['sh_rest'] is None:
|
| raise ValueError("PLY ไธญๆ f_rest_* ๅญๆฎต๏ผๆ ๆณ้ๅ SHใ")
|
|
|
| results = {}
|
| codebooks = {}
|
| for name, features in feature_map.items():
|
| print(f"\n{'='*55}")
|
| print(f" ้ๅ [{name}] ็นๅพ็ปดๅบฆ: {features.shape[1]}")
|
| print(f"{'='*55}")
|
|
|
| cb = load_codebook(codebook_dir, name)
|
| codebooks[name] = cb
|
|
|
| idx, recon, stats = quantize(features, cb, name)
|
| results[name] = {
|
| 'indices': idx,
|
| 'reconstructed': recon,
|
| 'stats': stats,
|
| }
|
|
|
| return results, codebooks
|
|
|
|
|
|
|
|
|
|
|
|
|
| def save_quantized(save_path: str, data: dict, results: dict) -> None:
|
| """
|
| ไฟๅญ้ๅๅ็ๅ็ฑป็ดขๅผๅ็ป่ฎกไฟกๆฏๅฐๅไธช .npzใ
|
|
|
| ๆไปถๅ
ๅฎน๏ผ
|
| scale_indices (N,) int32
|
| rotation_indices (N,) int32
|
| dc_indices (N,) int32
|
| sh_indices (N,) int32
|
| positions (N, 3) float32 ๅๅงๅๆ ๏ผๆนไพฟๅ็ปญๅฏน้ฝ๏ผ
|
| opacities (N, 1) float32
|
| """
|
| save_dict = {
|
| 'positions': data['positions'].astype(np.float32),
|
| 'opacities': data['opacities'].astype(np.float32),
|
| 'scale_indices': results['scale']['indices'],
|
| 'rotation_indices': results['rotation']['indices'],
|
| 'dc_indices': results['dc']['indices'],
|
| 'sh_indices': results['sh']['indices'],
|
| }
|
| np.savez_compressed(save_path, **save_dict)
|
| size_mb = os.path.getsize(save_path) / 1024 / 1024
|
| print(f"\n[ไฟๅญ] ้ๅ็ดขๅผ โ {save_path} ({size_mb:.2f} MB)")
|
|
|
|
|
|
|
|
|
|
|
|
|
| def save_reconstructed_ply(
|
| save_path: str,
|
| data: dict,
|
| results: dict,
|
| ) -> None:
|
| """
|
| ็จ codebook ้ๅปบ็็นๅพๆฟๆขๅๅงๅผ๏ผๅๅบๆฐ็ .ply ๆไปถใ
|
| positions ๅ opacities ไฟๆไธๅ๏ผๆช้ๅ๏ผใ
|
| """
|
| plydata = data['plydata']
|
| vertex = plydata['vertex']
|
| sh_keys = data['sh_keys']
|
|
|
|
|
| scales_r = results['scale']['reconstructed']
|
| rotations_r = results['rotation']['reconstructed']
|
| dc_r = results['dc']['reconstructed']
|
| sh_r = results['sh']['reconstructed']
|
|
|
|
|
|
|
| arr = vertex.data.copy()
|
|
|
| arr['scale_0'] = scales_r[:, 0]
|
| arr['scale_1'] = scales_r[:, 1]
|
| arr['scale_2'] = scales_r[:, 2]
|
|
|
| arr['rot_0'] = rotations_r[:, 0]
|
| arr['rot_1'] = rotations_r[:, 1]
|
| arr['rot_2'] = rotations_r[:, 2]
|
| arr['rot_3'] = rotations_r[:, 3]
|
|
|
| arr['f_dc_0'] = dc_r[:, 0]
|
| arr['f_dc_1'] = dc_r[:, 1]
|
| arr['f_dc_2'] = dc_r[:, 2]
|
|
|
| for i, key in enumerate(sh_keys):
|
| arr[key] = sh_r[:, i]
|
|
|
|
|
| new_vertex = PlyElement.describe(arr, 'vertex')
|
| new_plydata = PlyData([new_vertex], text=plydata.text)
|
| new_plydata.write(save_path)
|
|
|
| size_mb = os.path.getsize(save_path) / 1024 / 1024
|
| print(f"[ไฟๅญ] ้ๅปบ .ply โ {save_path} ({size_mb:.2f} MB)")
|
|
|
|
|
|
|
|
|
|
|
|
|
| def print_summary(results: dict) -> None:
|
| print(f"\n{'='*55}")
|
| print(f" ้ๅๆฑๆป")
|
| print(f"{'='*55}")
|
| print(f" {'็นๅพ':<10} {'RMSE':>10} {'MaxErr':>10} {'Clusterไฝฟ็จ็':>14}")
|
| print(f" {'-'*46}")
|
| for name, res in results.items():
|
| s = res['stats']
|
| print(f" {name:<10} {s['rmse']:>10.6f} {s['max_err']:>10.6f} "
|
| f" {s['cluster_usage']:>5} / {len(np.unique(res['indices'])):>5}"
|
| f" ({100*s['cluster_usage']/s['cluster_usage']:.0f}%)")
|
| print(f"{'='*55}")
|
|
|
|
|
|
|
|
|
|
|
|
|
| def parse_args():
|
| parser = argparse.ArgumentParser(
|
| description="็จๅทฒ่ฎญ็ป็ codebook ้ๅๆฐ็ 3DGS .ply ๆไปถ"
|
| )
|
| parser.add_argument('ply_path', type=str,
|
| help='ๅพ
้ๅ็ 3DGS .ply ๆไปถ่ทฏๅพ')
|
| parser.add_argument('--codebook_dir', type=str, default='./codebooks',
|
| help='ๅญๆพๅไธช *_codebook.npz ็็ฎๅฝ๏ผ้ป่ฎค๏ผ./codebooks๏ผ')
|
| parser.add_argument('--save_dir', type=str, default='./quantized',
|
| help='้ๅ็ปๆ่พๅบ็ฎๅฝ๏ผ้ป่ฎค๏ผ./quantized๏ผ')
|
| parser.add_argument('--save_ply', action='store_true',
|
| help='ๅๆถ่พๅบ็จ codebook ้ๅปบ็นๅพๅ็ .ply ๆไปถ')
|
| return parser.parse_args()
|
|
|
|
|
| if __name__ == '__main__':
|
| args = parse_args()
|
| os.makedirs(args.save_dir, exist_ok=True)
|
|
|
|
|
| data = read_ply(args.ply_path)
|
|
|
|
|
| results, codebooks = quantize_all(data, args.codebook_dir)
|
|
|
|
|
| print_summary(results)
|
|
|
|
|
| scene_name = os.path.splitext(os.path.basename(args.ply_path))[0]
|
| npz_path = os.path.join(args.save_dir, f"{scene_name}_quantized.npz")
|
| save_quantized(npz_path, data, results)
|
|
|
|
|
| if args.save_ply:
|
| ply_out = os.path.join(args.save_dir, f"{scene_name}_reconstructed.ply")
|
| save_reconstructed_ply(ply_out, data, results)
|
|
|
| print("\nๅ
จ้จๅฎๆ๏ผ") |