import { useRef, useEffect, useState, useCallback } from "react"; interface WaveformPlayerProps { audioUrl: string; duration?: number | null; } export default function WaveformPlayer({ audioUrl, duration }: WaveformPlayerProps) { const canvasRef = useRef(null); const audioRef = useRef(null); const animRef = useRef(0); const waveformRef = useRef([]); const [playing, setPlaying] = useState(false); const [currentTime, setCurrTime] = useState(0); const [totalDuration, setTotalDuration] = useState(0); const [hovering, setHovering] = useState(false); const [hoverX, setHoverX] = useState(0); // Decode audio and compute waveform peaks useEffect(() => { if (!audioUrl) return; const ctx = new AudioContext(); fetch(audioUrl) .then((r) => r.arrayBuffer()) .then((buf) => ctx.decodeAudioData(buf)) .then((decoded) => { const raw = decoded.getChannelData(0); const bars = 100; const blockSize = Math.floor(raw.length / bars); const peaks: number[] = []; for (let i = 0; i < bars; i++) { let sum = 0; for (let j = 0; j < blockSize; j++) { sum += Math.abs(raw[i * blockSize + j]); } peaks.push(sum / blockSize); } // Normalize const max = Math.max(...peaks, 0.01); waveformRef.current = peaks.map((p) => p / max); drawWaveform(); ctx.close(); }) .catch(() => {}); }, [audioUrl]); const drawWaveform = useCallback(() => { const canvas = canvasRef.current; if (!canvas) return; const ctx = canvas.getContext("2d"); if (!ctx) return; const dpr = window.devicePixelRatio || 1; const rect = canvas.getBoundingClientRect(); canvas.width = rect.width * dpr; canvas.height = rect.height * dpr; ctx.scale(dpr, dpr); const w = rect.width; const h = rect.height; const peaks = waveformRef.current; const bars = peaks.length || 1; const audio = audioRef.current; const progress = audio && audio.duration ? audio.currentTime / audio.duration : 0; ctx.clearRect(0, 0, w, h); const barWidth = (w / bars) * 0.7; const gap = (w / bars) * 0.3; const mid = h / 2; for (let i = 0; i < bars; i++) { const x = (i / bars) * w; const barH = Math.max(2, (peaks[i] || 0) * mid * 0.9); const iPlayed = i / bars < progress; ctx.fillStyle = iPlayed ? "#c084fc" : hovering && x < hoverX ? "rgba(192,132,252,0.4)" : "#444"; ctx.beginPath(); ctx.roundRect(x + gap / 2, mid - barH, barWidth, barH * 2, 1.5); ctx.fill(); } }, [hovering, hoverX]); // Animation loop useEffect(() => { const tick = () => { const audio = audioRef.current; if (audio) setCurrTime(audio.currentTime); drawWaveform(); animRef.current = requestAnimationFrame(tick); }; animRef.current = requestAnimationFrame(tick); return () => cancelAnimationFrame(animRef.current); }, [drawWaveform]); const togglePlay = () => { const audio = audioRef.current; if (!audio) return; if (audio.paused) { audio.play(); setPlaying(true); } else { audio.pause(); setPlaying(false); } }; const seek = (e: React.MouseEvent) => { const audio = audioRef.current; const canvas = canvasRef.current; if (!audio || !canvas || !audio.duration) return; const rect = canvas.getBoundingClientRect(); const ratio = (e.clientX - rect.left) / rect.width; audio.currentTime = ratio * audio.duration; }; const handleMouseMove = (e: React.MouseEvent) => { const canvas = canvasRef.current; if (!canvas) return; const rect = canvas.getBoundingClientRect(); setHoverX(e.clientX - rect.left); }; const fmt = (s: number) => { const m = Math.floor(s / 60); const sec = Math.floor(s % 60); return `${m}:${sec.toString().padStart(2, "0")}`; }; return (
); }