LLFF / bulid_codebook.py
SlekLi's picture
Upload bulid_codebook.py
16c1081 verified
"""
3DGS Codebook Builder
=====================
ไฝฟ็”จ KMeans ๅฏน 3D Gaussian Splatting ๆจกๅž‹็š„ๅ››็ฑป็‰นๅพๅˆ†ๅˆซๆž„ๅปบ codebook๏ผš
- scale (3็ปด) โ†’ 16384 ไธช็ฆปๆ•ฃ็ดขๅผ•
- rotation (4็ปด) โ†’ 16384 ไธช็ฆปๆ•ฃ็ดขๅผ•
- DC (3็ปด) โ†’ 4096 ไธช็ฆปๆ•ฃ็ดขๅผ•
- SH rest (45็ปด) โ†’ 4096 ไธช็ฆปๆ•ฃ็ดขๅผ•
ๆฏไธช codebook ๅ•็‹ฌไฟๅญ˜ไธบ .npz ๆ–‡ไปถ๏ผŒๅŒ…ๅซ๏ผš
- codebook : (K, D) float32 โ€”โ€” ่š็ฑปไธญๅฟƒ
- indices : (N,) int32 โ€”โ€” ๆฏไธช้ซ˜ๆ–ฏ็‚นๅฏนๅบ”็š„็ดขๅผ•
"""
import os
import argparse
import numpy as np
from plyfile import PlyData
from sklearn.cluster import MiniBatchKMeans
import time
# โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
# 1. PLY ่ฏปๅ–
# โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
def read_ply(ply_path: str) -> dict:
"""่ฏปๅ– 3DGS .ply ๆ–‡ไปถ๏ผŒ่ฟ”ๅ›žๅ„ๅฑžๆ€ง numpy ๆ•ฐ็ป„ใ€‚"""
plydata = PlyData.read(ply_path)
vertex = plydata['vertex']
positions = np.stack([vertex['x'], vertex['y'], vertex['z']], axis=1) # (N, 3)
opacities = vertex['opacity'][:, np.newaxis] # (N, 1)
scales = np.stack([vertex['scale_0'], vertex['scale_1'],
vertex['scale_2']], axis=1) # (N, 3)
rotations = np.stack([vertex['rot_0'], vertex['rot_1'],
vertex['rot_2'], vertex['rot_3']], axis=1) # (N, 4)
dc = np.stack([vertex['f_dc_0'], vertex['f_dc_1'],
vertex['f_dc_2']], axis=1) # (N, 3)
sh_keys = sorted(
[k for k in vertex.data.dtype.names if k.startswith('f_rest_')],
key=lambda s: int(s.split('_')[-1])
)
sh_rest = np.stack([vertex[k] for k in sh_keys], axis=1) \
if sh_keys else None # (N, 45)
# filter_3D ๆ˜ฏๅฏ้€‰ๅญ—ๆฎต๏ผˆ้ƒจๅˆ†็‰ˆๆœฌๆœ‰๏ผŒ้ƒจๅˆ†ๆฒกๆœ‰๏ผ‰
filter_3d = None
if 'filter_3D' in vertex.data.dtype.names:
filter_3d = vertex['filter_3D'][:, np.newaxis] # (N, 1)
print(f"[read_ply] ่ฏปๅ–ๅฎŒๆˆ๏ผš{positions.shape[0]} ไธช้ซ˜ๆ–ฏ็‚น")
if sh_rest is not None:
print(f" SH rest ็ปดๅบฆ๏ผš{sh_rest.shape[1]} "
f"๏ผˆๆœŸๆœ› 45 = 15 ็ƒ่ฐ็ณปๆ•ฐ ร— 3 ้€š้“๏ผ‰")
return {
'positions': positions,
'opacities': opacities,
'scales': scales,
'rotations': rotations,
'dc': dc,
'sh_rest': sh_rest,
'filter_3d': filter_3d,
'plydata': plydata,
}
# โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
# 2. KMeans ่š็ฑป๏ผˆMiniBatchKMeans๏ผŒ้€Ÿๅบฆๅฟซ๏ผ‰
# โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
def build_codebook(
features: np.ndarray,
n_clusters: int,
name: str,
random_state: int = 42,
batch_size: int = 65536,
max_iter: int = 300,
) -> tuple[np.ndarray, np.ndarray]:
"""
ๅฏน features (N, D) ๆ‰ง่กŒ MiniBatchKMeans๏ผŒ่ฟ”ๅ›ž๏ผš
codebook : (K, D) float32
indices : (N,) int32
"""
N, D = features.shape
# ่‹ฅ็‚นๆ•ฐๅฐ‘ไบŽ cluster ๆ•ฐ๏ผŒ็›ดๆŽฅๆŠŠๆฏไธช็‚นๅฝ“ไธ€ไธช cluster
K = min(n_clusters, N)
if K < n_clusters:
print(f"[{name}] ่ญฆๅ‘Š๏ผš้ซ˜ๆ–ฏ็‚นๆ•ฐ ({N}) < ็›ฎๆ ‡ cluster ๆ•ฐ ({n_clusters})๏ผŒ"
f"่‡ชๅŠจ่ฐƒๆ•ดไธบ K={K}")
print(f"[{name}] ๅผ€ๅง‹ KMeans๏ผšN={N}, D={D}, K={K} ...")
t0 = time.time()
kmeans = MiniBatchKMeans(
n_clusters=K,
batch_size=min(batch_size, N),
max_iter=max_iter,
random_state=random_state,
n_init=3,
verbose=0,
)
kmeans.fit(features.astype(np.float32))
codebook = kmeans.cluster_centers_.astype(np.float32) # (K, D)
indices = kmeans.labels_.astype(np.int32) # (N,)
elapsed = time.time() - t0
inertia = kmeans.inertia_
print(f"[{name}] ๅฎŒๆˆ๏ผ่€—ๆ—ถ {elapsed:.1f}s | inertia={inertia:.4f}")
print(f" codebook shape: {codebook.shape} | "
f"็ดขๅผ•่Œƒๅ›ด: [{indices.min()}, {indices.max()}]")
return codebook, indices
# โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
# 3. ไฟๅญ˜ๅ•ไธช codebook
# โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
def save_codebook(
save_dir: str,
name: str,
codebook: np.ndarray,
indices: np.ndarray,
) -> None:
"""ๅฐ† codebook ๅ’Œ indices ๅญ˜ไธบ <name>_codebook.npzใ€‚"""
os.makedirs(save_dir, exist_ok=True)
out_path = os.path.join(save_dir, f"{name}_codebook.npz")
np.savez_compressed(out_path, codebook=codebook, indices=indices)
size_mb = os.path.getsize(out_path) / 1024 / 1024
print(f"[{name}] ๅทฒไฟๅญ˜ โ†’ {out_path} ({size_mb:.2f} MB)")
# โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
# 4. ไธปๆต็จ‹
# โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
CODEBOOK_CONFIG = {
# name n_clusters
'scale': 16384,
'rotation': 16384,
'dc': 4096,
'sh': 4096,
}
def build_all_codebooks(
ply_path: str,
save_dir: str,
random_state: int = 42,
) -> dict:
"""
่ฏปๅ– PLY โ†’ ๅฏนๅ››็ฑป็‰นๅพๅˆ†ๅˆซ่š็ฑป โ†’ ๅˆ†ๅผ€ไฟๅญ˜ใ€‚
่ฟ”ๅ›žๅญ—ๅ…ธ๏ผš
{
'scale': (codebook_array, indices_array),
'rotation': ...,
'dc': ...,
'sh': ...,
}
"""
# โ”€โ”€ ่ฏปๅ–ๆ•ฐๆฎ โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
data = read_ply(ply_path)
scales = data['scales'] # (N, 3)
rotations = data['rotations'] # (N, 4)
dc = data['dc'] # (N, 3)
sh_rest = data['sh_rest'] # (N, 45) ๅทฒๅŽป้™ค DC ็š„ SH
if sh_rest is None:
raise ValueError("PLY ๆ–‡ไปถไธญๆœชๆ‰พๅˆฐ f_rest_* ๅญ—ๆฎต๏ผŒๆ— ๆณ•ๆž„ๅปบ SH codebookใ€‚")
feature_map = {
'scale': scales,
'rotation': rotations,
'dc': dc,
'sh': sh_rest, # SH codebook ไฝฟ็”จๅŽปๆމ DC ็š„ 45 ็ปด้ซ˜้ข‘ๅˆ†้‡
}
# โ”€โ”€ ้€ไธ€่š็ฑปๅนถไฟๅญ˜ โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
results = {}
for name, n_clusters in CODEBOOK_CONFIG.items():
features = feature_map[name]
print(f"\n{'='*55}")
print(f" ๆž„ๅปบ [{name}] codebook | ็‰นๅพ็ปดๅบฆ: {features.shape[1]}"
f" | ็›ฎๆ ‡ K: {n_clusters}")
print(f"{'='*55}")
codebook, indices = build_codebook(
features,
n_clusters=n_clusters,
name=name,
random_state=random_state,
)
save_codebook(save_dir, name, codebook, indices)
results[name] = (codebook, indices)
print(f"\n{'='*55}")
print(" ๆ‰€ๆœ‰ codebook ๆž„ๅปบๅฎŒๆฏ•๏ผ")
print(f" ่พ“ๅ‡บ็›ฎๅฝ•๏ผš{os.path.abspath(save_dir)}")
print(f"{'='*55}")
return results
# โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
# 5. ้ชŒ่ฏ๏ผšไปŽ codebook ้‡ๅปบ็‰นๅพๅนถ่ฎก็ฎ—่ฏฏๅทฎ
# โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
def evaluate_codebooks(
ply_path: str,
save_dir: str,
) -> None:
"""
ๅŠ ่ฝฝๅทฒไฟๅญ˜็š„ๅ››ไธช codebook๏ผŒ้‡ๅปบ็‰นๅพ๏ผŒ
่ฎก็ฎ—ๆฏไธช็ปดๅบฆ็š„ๅ‡ๆ–นๆ น่ฏฏๅทฎ๏ผˆRMSE๏ผ‰ใ€‚
"""
data = read_ply(ply_path)
feature_map = {
'scale': data['scales'],
'rotation': data['rotations'],
'dc': data['dc'],
'sh': data['sh_rest'],
}
print("\n[่ฏ„ไผฐ] ้‡ๅปบ่ฏฏๅทฎ๏ผˆRMSE๏ผ‰๏ผš")
for name in CODEBOOK_CONFIG:
path = os.path.join(save_dir, f"{name}_codebook.npz")
if not os.path.exists(path):
print(f" [{name}] ๆ–‡ไปถไธๅญ˜ๅœจ๏ผŒ่ทณ่ฟ‡")
continue
npz = np.load(path)
codebook = npz['codebook'] # (K, D)
indices = npz['indices'] # (N,)
original = feature_map[name].astype(np.float32)
reconstructed = codebook[indices] # (N, D)
rmse = np.sqrt(np.mean((original - reconstructed) ** 2))
max_err = np.abs(original - reconstructed).max()
print(f" [{name:8s}] K={codebook.shape[0]:6d} D={codebook.shape[1]:3d}"
f" RMSE={rmse:.6f} MaxErr={max_err:.6f}")
# โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
# 6. CLI ๅ…ฅๅฃ
# โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
def parse_args():
parser = argparse.ArgumentParser(
description="ไธบ 3DGS .ply ๆ–‡ไปถๆž„ๅปบๅ››ไธช KMeans codebook"
)
parser.add_argument('ply_path', type=str,default="./merge/original_3dgs.ply",
help='่พ“ๅ…ฅ็š„ 3DGS .ply ๆ–‡ไปถ่ทฏๅพ„')
parser.add_argument('--save_dir', type=str, default='./codebooks',
help='codebook ไฟๅญ˜็›ฎๅฝ•๏ผˆ้ป˜่ฎค๏ผš./codebooks๏ผ‰')
parser.add_argument('--seed', type=int, default=42,
help='้šๆœบ็งๅญ๏ผˆ้ป˜่ฎค๏ผš42๏ผ‰')
parser.add_argument('--evaluate', action='store_true',
help='ๆž„ๅปบๅฎŒๆˆๅŽ่ฎก็ฎ— RMSE ้‡ๅปบ่ฏฏๅทฎ')
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
build_all_codebooks(
ply_path=args.ply_path,
save_dir=args.save_dir,
random_state=args.seed,
)
if args.evaluate:
evaluate_codebooks(
ply_path=args.ply_path,
save_dir=args.save_dir,
)