ChessBot-Dataset / raw2dataset.py
KeithG33's picture
update for lc0 train data
d42df3a
"""
raw2dataset.py — Convert raw PGN data into sharded train/test .pgn.zst files.
Each worker gets a subset of input files and writes its own output shards
directly — no serialization bottleneck. Games are distributed round-robin
across each worker's shards for variety within files. Input files are shuffled
and distributed round-robin across workers so each worker gets a mix of sources.
Each shard targets ~POSITIONS_PER_SHARD positions.
Usage:
python raw2dataset.py
"""
import io
import time
import random
from pathlib import Path
from multiprocessing import Pool, Value
import chess.pgn
import zstandard as zstd
from tqdm import tqdm
# ── Configuration ──
RAW_DATA_DIR = Path("/home/kage/chess_workspace/chess-drive2/pgn-data/raw_data")
OUTPUT_DIR = Path("/home/kage/chess_workspace/chess-drive2/ChessBot-Dataset")
TRAIN_DIR = OUTPUT_DIR / "train"
TEST_DIR = OUTPUT_DIR / "test"
POSITIONS_PER_SHARD = 2_000_000
TEST_RATIO = 0.005 # 0.5% of games go to test
ZSTD_LEVEL = 10
NUM_WORKERS = 32
def find_input_files(raw_dir: Path) -> list[Path]:
"""Find all .pgn and .pgn.zst files recursively."""
files = []
for pattern in ("**/*.pgn", "**/*.pgn.zst"):
files.extend(raw_dir.glob(pattern))
return sorted(files)
def open_pgn_reader(path: Path):
"""Open a .pgn or .pgn.zst file and return a text stream."""
if path.suffixes == [".pgn", ".zst"] or (path.suffix == ".zst" and ".pgn" in path.name):
fb = path.open("rb")
dctx = zstd.ZstdDecompressor()
return io.TextIOWrapper(dctx.stream_reader(fb), encoding="utf-8", errors="replace")
elif path.suffix == ".pgn":
return path.open("r", encoding="utf-8", errors="replace")
else:
raise ValueError(f"Unsupported file: {path}")
def count_positions(game) -> int:
"""Count positions (moves) in a game's mainline."""
return sum(1 for _ in game.mainline())
class ShardWriter:
"""Writes games to numbered .pgn.zst shard files, creating new ones as needed.
Uses a shared counter so all workers produce sequentially numbered files."""
def __init__(self, output_dir: Path, prefix: str, positions_per_shard: int, counter):
self.output_dir = output_dir
self.prefix = prefix
self.positions_per_shard = positions_per_shard
self.counter = counter
self.positions_in_shard = 0
self.writer = None
self.total_positions = 0
self.total_games = 0
self.num_shards = 0
self._open_new_shard()
def _next_shard_id(self) -> int:
with self.counter.get_lock():
shard_id = self.counter.value
self.counter.value += 1
return shard_id
def _open_new_shard(self):
if self.writer is not None:
self.writer.close()
shard_id = self._next_shard_id()
path = self.output_dir / f"{self.prefix}_{shard_id:04d}.pgn.zst"
fb = path.open("wb")
cctx = zstd.ZstdCompressor(level=ZSTD_LEVEL)
self.writer = io.TextIOWrapper(cctx.stream_writer(fb), encoding="utf-8")
self.positions_in_shard = 0
self.num_shards += 1
def write_game(self, pgn_text: str, num_positions: int):
if self.positions_in_shard >= self.positions_per_shard:
self._open_new_shard()
self.writer.write(pgn_text)
self.writer.write("\n\n")
self.positions_in_shard += num_positions
self.total_positions += num_positions
self.total_games += 1
def close(self):
if self.writer is not None:
self.writer.close()
self.writer = None
def _init_worker(train_ctr, test_ctr, file_ctr):
"""Store shared counters in each worker process."""
global _train_counter, _test_counter, _file_counter
_train_counter = train_ctr
_test_counter = test_ctr
_file_counter = file_ctr
def worker_process(args):
"""
Each worker reads its assigned input files and writes directly to its own
train/test shard files. Returns stats dict.
"""
worker_id, input_files = args
train_writer = ShardWriter(TRAIN_DIR, "train", POSITIONS_PER_SHARD, _train_counter)
test_writer = ShardWriter(TEST_DIR, "test", POSITIONS_PER_SHARD, _test_counter)
for path in input_files:
try:
with open_pgn_reader(path) as f:
while (game := chess.pgn.read_game(f)) is not None:
positions = count_positions(game)
if positions == 0:
continue
exporter = chess.pgn.StringExporter(headers=True, variations=True, comments=True)
pgn_text = game.accept(exporter)
if random.random() < TEST_RATIO:
test_writer.write_game(pgn_text, positions)
else:
train_writer.write_game(pgn_text, positions)
except Exception as e:
print(f"Worker {worker_id} error reading {path}: {e}")
with _file_counter.get_lock():
_file_counter.value += 1
train_writer.close()
test_writer.close()
return {
"worker_id": worker_id,
"train_games": train_writer.total_games,
"train_positions": train_writer.total_positions,
"train_shards": train_writer.num_shards,
"test_games": test_writer.total_games,
"test_positions": test_writer.total_positions,
"test_shards": test_writer.num_shards,
"files_processed": len(input_files),
}
def main():
TRAIN_DIR.mkdir(parents=True, exist_ok=True)
TEST_DIR.mkdir(parents=True, exist_ok=True)
input_files = find_input_files(RAW_DATA_DIR)
random.shuffle(input_files)
print(f"Found {len(input_files)} input files")
# Distribute input files round-robin across workers
worker_files: list[list[Path]] = [[] for _ in range(NUM_WORKERS)]
for i, f in enumerate(input_files):
worker_files[i % NUM_WORKERS].append(f)
tasks = [(wid, files) for wid, files in enumerate(worker_files) if files]
print(f"Distributing across {len(tasks)} workers...")
train_counter = Value('i', 0)
test_counter = Value('i', 0)
file_counter = Value('i', 0)
total_files = len(input_files)
total_train_games = total_train_positions = total_train_shards = 0
total_test_games = total_test_positions = total_test_shards = 0
with Pool(processes=len(tasks), initializer=_init_worker, initargs=(train_counter, test_counter, file_counter)) as pool:
results = pool.map_async(worker_process, tasks)
pbar = tqdm(total=total_files, desc="Processing files", unit="file")
while not results.ready():
done = file_counter.value
pbar.n = done
pbar.refresh()
time.sleep(0.5)
pbar.n = total_files
pbar.refresh()
pbar.close()
for stats in results.get():
total_train_games += stats["train_games"]
total_train_positions += stats["train_positions"]
total_train_shards += stats["train_shards"]
total_test_games += stats["test_games"]
total_test_positions += stats["test_positions"]
total_test_shards += stats["test_shards"]
print(f"\nDone!")
print(f" Train: {total_train_games:,} games, {total_train_positions:,} positions, {total_train_shards} shards")
print(f" Test: {total_test_games:,} games, {total_test_positions:,} positions, {total_test_shards} shards")
if __name__ == "__main__":
main()