""" 3DGS Codebook Quantizer ======================== 使用已训练好的 codebook 对新的 3DGS .ply 文件进行量化, 将连续特征映射为离散索引,并可选地重建量化后的特征写回 .ply。 输出: _quantized.npz —— 四类索引 + 重建误差统计 _quantized.ply —— (可选)用 codebook 重建特征后写回的新 .ply """ import os import argparse import numpy as np from plyfile import PlyData, PlyElement import time # ───────────────────────────────────────────── # 1. PLY 读取(复用训练时的读法) # ───────────────────────────────────────────── def read_ply(ply_path: str) -> dict: plydata = PlyData.read(ply_path) vertex = plydata['vertex'] positions = np.stack([vertex['x'], vertex['y'], vertex['z']], axis=1) opacities = vertex['opacity'][:, np.newaxis] scales = np.stack([vertex['scale_0'], vertex['scale_1'], vertex['scale_2']], axis=1) rotations = np.stack([vertex['rot_0'], vertex['rot_1'], vertex['rot_2'], vertex['rot_3']], axis=1) dc = np.stack([vertex['f_dc_0'], vertex['f_dc_1'], vertex['f_dc_2']], axis=1) sh_keys = sorted( [k for k in vertex.data.dtype.names if k.startswith('f_rest_')], key=lambda s: int(s.split('_')[-1]) ) sh_rest = np.stack([vertex[k] for k in sh_keys], axis=1) \ if sh_keys else None filter_3d = None if 'filter_3D' in vertex.data.dtype.names: filter_3d = vertex['filter_3D'][:, np.newaxis] print(f"[read_ply] {os.path.basename(ply_path)}:{positions.shape[0]} 个高斯点") return { 'positions': positions, 'opacities': opacities, 'scales': scales, 'rotations': rotations, 'dc': dc, 'sh_rest': sh_rest, 'filter_3d': filter_3d, 'plydata': plydata, 'sh_keys': sh_keys, } # ───────────────────────────────────────────── # 2. 加载 codebook # ───────────────────────────────────────────── def load_codebook(codebook_dir: str, name: str): """ 加载单个 codebook .npz,返回 codebook 矩阵 (K, D)。 训练时保存的 indices 属于训练集,量化时不使用。 """ path = os.path.join(codebook_dir, f"{name}_codebook.npz") if not os.path.exists(path): raise FileNotFoundError(f"找不到 codebook 文件:{path}") npz = np.load(path) codebook = npz['codebook'].astype(np.float32) # (K, D) print(f"[load] {name}_codebook:K={codebook.shape[0]}, D={codebook.shape[1]}") return codebook # ───────────────────────────────────────────── # 3. 最近邻量化(核心) # ───────────────────────────────────────────── def quantize(features: np.ndarray, codebook: np.ndarray, name: str, batch_size: int = 65536): """ 对 features (N, D) 在 codebook (K, D) 中做最近邻搜索。 采用分批计算避免一次性构造 (N, K) 的巨型矩阵撑爆内存。 返回: indices : (N,) int32 每个点对应的 codebook 索引 reconstructed: (N, D) float32 量化后重建的特征 """ features = features.astype(np.float32) N, D = features.shape K = codebook.shape[0] indices = np.empty(N, dtype=np.int32) # ── 分批最近邻 ────────────────────────── # 利用展开的 L2 距离公式: # ||x - c||^2 = ||x||^2 + ||c||^2 - 2 * x @ c^T cb_norm2 = np.sum(codebook ** 2, axis=1) # (K,) t0 = time.time() for start in range(0, N, batch_size): end = min(start + batch_size, N) feat = features[start:end] # (B, D) feat_norm2 = np.sum(feat ** 2, axis=1, keepdims=True) # (B, 1) # (B, K) 距离矩阵 dist2 = feat_norm2 + cb_norm2[np.newaxis, :] \ - 2.0 * (feat @ codebook.T) indices[start:end] = np.argmin(dist2, axis=1) elapsed = time.time() - t0 reconstructed = codebook[indices] # (N, D) # ── 误差统计 ────────────────────────── diff = features - reconstructed rmse = float(np.sqrt(np.mean(diff ** 2))) max_e = float(np.abs(diff).max()) usage = len(np.unique(indices)) # 实际用到多少个 cluster print(f"[{name:8s}] 量化完成 {elapsed:.1f}s | " f"RMSE={rmse:.6f} MaxErr={max_e:.6f} " f"使用 {usage}/{K} 个 cluster " f"({100*usage/K:.1f}%)") return indices, reconstructed, {'rmse': rmse, 'max_err': max_e, 'cluster_usage': usage} # ───────────────────────────────────────────── # 4. 量化全部特征 # ───────────────────────────────────────────── def quantize_all(data: dict, codebook_dir: str): """ 加载四个 codebook,对新场景的高斯点逐一量化。 返回: results dict {name: {'indices', 'reconstructed', 'stats'}} codebooks dict {name: np.ndarray} """ feature_map = { 'scale': data['scales'], 'rotation': data['rotations'], 'dc': data['dc'], 'sh': data['sh_rest'], } if data['sh_rest'] is None: raise ValueError("PLY 中无 f_rest_* 字段,无法量化 SH。") results = {} codebooks = {} for name, features in feature_map.items(): print(f"\n{'='*55}") print(f" 量化 [{name}] 特征维度: {features.shape[1]}") print(f"{'='*55}") cb = load_codebook(codebook_dir, name) codebooks[name] = cb idx, recon, stats = quantize(features, cb, name) results[name] = { 'indices': idx, 'reconstructed': recon, 'stats': stats, } return results, codebooks # ───────────────────────────────────────────── # 5. 保存量化结果(索引 + 统计) # ───────────────────────────────────────────── def save_quantized(save_path: str, data: dict, results: dict) -> None: """ 保存量化后的四类索引和统计信息到单个 .npz。 文件内容: scale_indices (N,) int32 rotation_indices (N,) int32 dc_indices (N,) int32 sh_indices (N,) int32 positions (N, 3) float32 原始坐标(方便后续对齐) opacities (N, 1) float32 """ save_dict = { 'positions': data['positions'].astype(np.float32), 'opacities': data['opacities'].astype(np.float32), 'scale_indices': results['scale']['indices'], 'rotation_indices': results['rotation']['indices'], 'dc_indices': results['dc']['indices'], 'sh_indices': results['sh']['indices'], } np.savez_compressed(save_path, **save_dict) size_mb = os.path.getsize(save_path) / 1024 / 1024 print(f"\n[保存] 量化索引 → {save_path} ({size_mb:.2f} MB)") # ───────────────────────────────────────────── # 6. (可选)写回量化重建的 .ply # ───────────────────────────────────────────── def save_reconstructed_ply( save_path: str, data: dict, results: dict, ) -> None: """ 用 codebook 重建的特征替换原始值,写出新的 .ply 文件。 positions 和 opacities 保持不变(未量化)。 """ plydata = data['plydata'] vertex = plydata['vertex'] sh_keys = data['sh_keys'] # ── 取出重建值 ────────────────────────── scales_r = results['scale']['reconstructed'] # (N, 3) rotations_r = results['rotation']['reconstructed'] # (N, 4) dc_r = results['dc']['reconstructed'] # (N, 3) sh_r = results['sh']['reconstructed'] # (N, 45) # ── 修改 vertex 数组 ──────────────────── # 注意:vertex.data 是结构化 numpy 数组,直接按字段赋值 arr = vertex.data.copy() arr['scale_0'] = scales_r[:, 0] arr['scale_1'] = scales_r[:, 1] arr['scale_2'] = scales_r[:, 2] arr['rot_0'] = rotations_r[:, 0] arr['rot_1'] = rotations_r[:, 1] arr['rot_2'] = rotations_r[:, 2] arr['rot_3'] = rotations_r[:, 3] arr['f_dc_0'] = dc_r[:, 0] arr['f_dc_1'] = dc_r[:, 1] arr['f_dc_2'] = dc_r[:, 2] for i, key in enumerate(sh_keys): arr[key] = sh_r[:, i] # ── 写出新 ply ────────────────────────── new_vertex = PlyElement.describe(arr, 'vertex') new_plydata = PlyData([new_vertex], text=plydata.text) new_plydata.write(save_path) size_mb = os.path.getsize(save_path) / 1024 / 1024 print(f"[保存] 重建 .ply → {save_path} ({size_mb:.2f} MB)") # ───────────────────────────────────────────── # 7. 打印汇总统计 # ───────────────────────────────────────────── def print_summary(results: dict) -> None: print(f"\n{'='*55}") print(f" 量化汇总") print(f"{'='*55}") print(f" {'特征':<10} {'RMSE':>10} {'MaxErr':>10} {'Cluster使用率':>14}") print(f" {'-'*46}") for name, res in results.items(): s = res['stats'] print(f" {name:<10} {s['rmse']:>10.6f} {s['max_err']:>10.6f} " f" {s['cluster_usage']:>5} / {len(np.unique(res['indices'])):>5}" f" ({100*s['cluster_usage']/s['cluster_usage']:.0f}%)") print(f"{'='*55}") # ───────────────────────────────────────────── # 8. CLI 入口 # ───────────────────────────────────────────── def parse_args(): parser = argparse.ArgumentParser( description="用已训练的 codebook 量化新的 3DGS .ply 文件" ) parser.add_argument('ply_path', type=str, help='待量化的 3DGS .ply 文件路径') parser.add_argument('--codebook_dir', type=str, default='./codebooks', help='存放四个 *_codebook.npz 的目录(默认:./codebooks)') parser.add_argument('--save_dir', type=str, default='./quantized', help='量化结果输出目录(默认:./quantized)') parser.add_argument('--save_ply', action='store_true', help='同时输出用 codebook 重建特征后的 .ply 文件') return parser.parse_args() if __name__ == '__main__': args = parse_args() os.makedirs(args.save_dir, exist_ok=True) # ── 读取新场景 ────────────────────────── data = read_ply(args.ply_path) # ── 量化 ──────────────────────────────── results, codebooks = quantize_all(data, args.codebook_dir) # ── 打印汇总 ──────────────────────────── print_summary(results) # ── 保存索引 ──────────────────────────── scene_name = os.path.splitext(os.path.basename(args.ply_path))[0] npz_path = os.path.join(args.save_dir, f"{scene_name}_quantized.npz") save_quantized(npz_path, data, results) # ── (可选)写回重建 ply ───────────────── if args.save_ply: ply_out = os.path.join(args.save_dir, f"{scene_name}_reconstructed.ply") save_reconstructed_ply(ply_out, data, results) print("\n全部完成!")