import React, { useState, useEffect, useRef } from 'react'; import CalibrationOverlay from './CalibrationOverlay'; const FLOW_STEPS = { intro: 'intro', permission: 'permission', ready: 'ready' }; const FOCUS_STATES = { pending: 'pending', focused: 'focused', notFocused: 'not-focused' }; function HelloIcon() { return ( ); } function CameraIcon() { return ( ); } function GazeMiniMap({ gazeData }) { const canvasRef = useRef(null); const screenAspect = typeof window !== 'undefined' ? window.screen.width / window.screen.height : 16 / 9; const MAP_H = 100; const MAP_W = Math.round(MAP_H * screenAspect); useEffect(() => { const cvs = canvasRef.current; if (!cvs) return; const ctx = cvs.getContext('2d'); const w = cvs.width; const h = cvs.height; ctx.clearRect(0, 0, w, h); // Screen background ctx.fillStyle = 'rgba(20, 20, 30, 0.85)'; ctx.fillRect(0, 0, w, h); // Screen border ctx.strokeStyle = 'rgba(255,255,255,0.25)'; ctx.lineWidth = 1; ctx.strokeRect(0.5, 0.5, w - 1, h - 1); // Grid lines ctx.strokeStyle = 'rgba(255,255,255,0.07)'; ctx.lineWidth = 0.5; for (let i = 1; i < 4; i++) { ctx.beginPath(); ctx.moveTo((w * i) / 4, 0); ctx.lineTo((w * i) / 4, h); ctx.stroke(); } for (let i = 1; i < 3; i++) { ctx.beginPath(); ctx.moveTo(0, (h * i) / 3); ctx.lineTo(w, (h * i) / 3); ctx.stroke(); } // Center crosshair const cx = w / 2; const cy = h / 2; ctx.strokeStyle = 'rgba(255,255,255,0.15)'; ctx.lineWidth = 1; ctx.beginPath(); ctx.moveTo(cx - 6, cy); ctx.lineTo(cx + 6, cy); ctx.moveTo(cx, cy - 6); ctx.lineTo(cx, cy + 6); ctx.stroke(); if (!gazeData || gazeData.gaze_x == null || gazeData.gaze_y == null) { ctx.fillStyle = 'rgba(255,255,255,0.3)'; ctx.font = '10px Arial'; ctx.textAlign = 'center'; ctx.fillText('No gaze data', cx, cy + 3); ctx.textAlign = 'left'; return; } const gx = gazeData.gaze_x; const gy = gazeData.gaze_y; const onScreen = gazeData.on_screen; const dotX = gx * w; const dotY = gy * h; const gradient = ctx.createRadialGradient(dotX, dotY, 0, dotX, dotY, 14); gradient.addColorStop(0, onScreen ? 'rgba(74, 222, 128, 0.5)' : 'rgba(248, 113, 113, 0.5)'); gradient.addColorStop(1, 'rgba(0,0,0,0)'); ctx.fillStyle = gradient; ctx.fillRect(dotX - 14, dotY - 14, 28, 28); ctx.beginPath(); ctx.arc(dotX, dotY, 5, 0, 2 * Math.PI); ctx.fillStyle = onScreen ? '#4ade80' : '#f87171'; ctx.fill(); ctx.strokeStyle = '#fff'; ctx.lineWidth = 1.5; ctx.stroke(); ctx.fillStyle = 'rgba(255,255,255,0.5)'; ctx.font = '9px Arial'; ctx.textAlign = 'right'; ctx.fillText(`${(gx * 100).toFixed(0)}%, ${(gy * 100).toFixed(0)}%`, w - 4, h - 4); ctx.textAlign = 'left'; }, [gazeData]); return ( ); } function FocusPageLocal({ videoManager, sessionResult, setSessionResult, isActive, isTutorialActive, setIsTutorialActive }) { const [currentFrame, setCurrentFrame] = useState(15); const [timelineEvents, setTimelineEvents] = useState([]); const [stats, setStats] = useState(null); const [systemStats, setSystemStats] = useState(null); const [availableModels, setAvailableModels] = useState([]); const [currentModel, setCurrentModel] = useState('mlp'); const [flowStep, setFlowStep] = useState(FLOW_STEPS.ready); const [cameraReady, setCameraReady] = useState(false); const [isStarting, setIsStarting] = useState(false); const [focusState, setFocusState] = useState(FOCUS_STATES.pending); const [cameraError, setCameraError] = useState(''); const [calibrationState, setCalibrationState] = useState(null); const [l2csBoost, setL2csBoost] = useState(false); const [l2csBoostAvailable, setL2csBoostAvailable] = useState(false); const [eyeGazeEnabled, setEyeGazeEnabled] = useState(false); const [prevModel, setPrevModel] = useState('mlp'); const [isCalibrated, setIsCalibrated] = useState(false); const [gazeData, setGazeData] = useState(null); const localVideoRef = useRef(null); const displayCanvasRef = useRef(null); const pipVideoRef = useRef(null); const pipStreamRef = useRef(null); const previewFrameRef = useRef(null); // Sync flowStep with isTutorialActive from props useEffect(() => { if (isTutorialActive) { setFlowStep(FLOW_STEPS.intro); } else { setFlowStep(FLOW_STEPS.ready); } }, [isTutorialActive]); const formatDuration = (seconds) => { if (seconds === 0) return '0s'; const mins = Math.floor(seconds / 60); const secs = Math.floor(seconds % 60); return `${mins}m ${secs}s`; }; const stopPreviewLoop = () => { if (previewFrameRef.current) { cancelAnimationFrame(previewFrameRef.current); previewFrameRef.current = null; } }; const startPreviewLoop = () => { stopPreviewLoop(); const renderPreview = () => { const canvas = displayCanvasRef.current; const video = localVideoRef.current; if (!canvas || !video || !cameraReady || videoManager?.isStreaming) { previewFrameRef.current = null; return; } if (video.readyState >= 2) { const ctx = canvas.getContext('2d'); ctx.drawImage(video, 0, 0, canvas.width, canvas.height); } previewFrameRef.current = requestAnimationFrame(renderPreview); }; previewFrameRef.current = requestAnimationFrame(renderPreview); }; const getErrorMessage = (err) => { if (err?.name === 'NotAllowedError') return 'Camera permission denied. Please allow camera access.'; if (err?.name === 'NotFoundError') return 'No camera found. Please connect a camera.'; if (err?.name === 'NotReadableError') return 'Camera is already in use by another application.'; if (err?.target?.url) return `WebSocket connection failed: ${err.target.url}. Check backend.`; return err?.message || 'Failed to start focus session.'; }; useEffect(() => { if (!videoManager) return; const originalOnStatusUpdate = videoManager.callbacks.onStatusUpdate; const originalOnSessionEnd = videoManager.callbacks.onSessionEnd; videoManager.callbacks.onStatusUpdate = (isFocused) => { setTimelineEvents((prev) => { const newEvents = [...prev, { isFocused, timestamp: Date.now() }]; if (newEvents.length > 60) newEvents.shift(); return newEvents; }); setFocusState(isFocused ? FOCUS_STATES.focused : FOCUS_STATES.notFocused); if (originalOnStatusUpdate) originalOnStatusUpdate(isFocused); }; videoManager.callbacks.onSessionEnd = (summary) => { setFocusState(FOCUS_STATES.pending); setCameraReady(false); if (originalOnSessionEnd) originalOnSessionEnd(summary); }; videoManager.callbacks.onCalibrationUpdate = (state) => { setCalibrationState(state && state.active ? state : null); if (state && state.done && state.success) setIsCalibrated(true); }; videoManager.callbacks.onGazeData = (data) => setGazeData(data); const statsInterval = setInterval(() => { if (videoManager && videoManager.getStats) setStats(videoManager.getStats()); }, 1000); return () => { videoManager.callbacks.onStatusUpdate = originalOnStatusUpdate; videoManager.callbacks.onSessionEnd = originalOnSessionEnd; videoManager.callbacks.onCalibrationUpdate = undefined; videoManager.callbacks.onGazeData = undefined; clearInterval(statsInterval); }; }, [videoManager]); useEffect(() => { fetch('/api/models') .then((res) => res.json()) .then((data) => { if (data.available) setAvailableModels(data.available); if (data.current) { setCurrentModel(data.current); if (data.current === 'l2cs') setEyeGazeEnabled(true); } }) .catch((err) => console.error('Failed to fetch models:', err)); fetch('/api/settings') .then((res) => res.json()) .then((data) => { if (data && data.l2cs_boost !== undefined) setL2csBoost(data.l2cs_boost); if (data && data.l2cs_boost_available !== undefined) setL2csBoostAvailable(data.l2cs_boost_available); }) .catch((err) => console.error('Failed to fetch settings:', err)); }, []); useEffect(() => { if (flowStep === FLOW_STEPS.ready && cameraReady && !videoManager?.isStreaming) { startPreviewLoop(); return; } stopPreviewLoop(); }, [cameraReady, flowStep, videoManager?.isStreaming]); useEffect(() => { if (!isActive) stopPreviewLoop(); }, [isActive]); useEffect(() => { return () => { stopPreviewLoop(); if (pipVideoRef.current) { pipVideoRef.current.pause(); pipVideoRef.current.srcObject = null; } if (pipStreamRef.current) { pipStreamRef.current.getTracks().forEach((t) => t.stop()); pipStreamRef.current = null; } }; }, []); useEffect(() => { const fetchSystem = () => { fetch('/api/stats/system') .then(res => res.json()) .then(data => setSystemStats(data)) .catch(() => setSystemStats(null)); }; fetchSystem(); const interval = setInterval(fetchSystem, 3000); return () => clearInterval(interval); }, []); const handleModelChange = async (modelName) => { try { const res = await fetch('/api/settings', { method: 'PUT', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ model_name: modelName }) }); const result = await res.json(); if (result.updated) { setCurrentModel(modelName); if (modelName === 'l2cs') { setEyeGazeEnabled(true); } else if (eyeGazeEnabled) { setEyeGazeEnabled(false); setIsCalibrated(false); setGazeData(null); } } } catch (err) { console.error('Failed to switch model:', err); } }; const handleL2csBoostToggle = async () => { if (!l2csBoostAvailable) return; const next = !l2csBoost; try { const res = await fetch('/api/settings', { method: 'PUT', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ l2cs_boost: next }) }); if (res.ok) setL2csBoost(next); else alert((await res.json().catch(() => ({}))).detail || 'Could not enable L2CS boost'); } catch (err) { console.error('Failed to toggle L2CS boost:', err); } }; const handleEyeGazeToggle = async () => { const next = !eyeGazeEnabled; if (next) { setPrevModel(currentModel); await handleModelChange('l2cs'); setEyeGazeEnabled(true); } else { const restoreTo = prevModel === 'l2cs' ? 'mlp' : prevModel; await handleModelChange(restoreTo); setEyeGazeEnabled(false); setIsCalibrated(false); setGazeData(null); } }; const [calibrationSetupOpen, setCalibrationSetupOpen] = useState(false); const handleCalibrate = () => setCalibrationSetupOpen(true); const handleCalibrationServerStart = () => { if (videoManager) videoManager.startCalibration(); }; const handleEnableCamera = async () => { if (!videoManager) return; try { setCameraError(''); await videoManager.initCamera(localVideoRef.current, displayCanvasRef.current); setCameraReady(true); setFlowStep(FLOW_STEPS.ready); setFocusState(FOCUS_STATES.pending); if (setIsTutorialActive) setIsTutorialActive(false); // Close tutorial flag } catch (err) { setCameraError(getErrorMessage(err)); console.error('Camera init error:', err); } }; const handleSkipTutorial = () => { setFlowStep(FLOW_STEPS.ready); if (setIsTutorialActive) setIsTutorialActive(false); }; const handleStart = async () => { try { setIsStarting(true); setSessionResult(null); setTimelineEvents([]); setFocusState(FOCUS_STATES.pending); setCameraError(''); if (!cameraReady) { await videoManager.initCamera(localVideoRef.current, displayCanvasRef.current); setCameraReady(true); setFlowStep(FLOW_STEPS.ready); } await videoManager.startStreaming(); } catch (err) { const errorMessage = getErrorMessage(err); setCameraError(errorMessage); setFocusState(FOCUS_STATES.pending); console.error('Start error:', err); alert(`Failed to start: ${errorMessage}\n\nCheck browser console for details.`); } finally { setIsStarting(false); } }; const handleStop = async () => { if (videoManager) await videoManager.stopStreaming(); try { if (document.pictureInPictureElement === pipVideoRef.current) { await document.exitPictureInPicture(); } } catch (_) {} if (pipVideoRef.current) { pipVideoRef.current.pause(); pipVideoRef.current.srcObject = null; } if (pipStreamRef.current) { pipStreamRef.current.getTracks().forEach((t) => t.stop()); pipStreamRef.current = null; } stopPreviewLoop(); setFocusState(FOCUS_STATES.pending); setCameraReady(false); }; const handlePiP = async () => { try { if (!videoManager || !videoManager.isStreaming) return alert('Please start the video first.'); if (!displayCanvasRef.current) return alert('Video not ready.'); if (document.pictureInPictureElement === pipVideoRef.current) { await document.exitPictureInPicture(); return; } if (!document.pictureInPictureEnabled) return alert('Picture-in-Picture is not supported.'); const pipVideo = pipVideoRef.current; if (!pipVideo) return alert('PiP video element not ready.'); const isSafariPiP = typeof pipVideo.webkitSetPresentationMode === 'function'; let stream = pipStreamRef.current; if (!stream) { const capture = displayCanvasRef.current.captureStream; if (typeof capture === 'function') stream = capture.call(displayCanvasRef.current, 30); if (!stream || stream.getTracks().length === 0) { const cameraStream = localVideoRef.current?.srcObject; if (!cameraStream) return alert('Camera stream not ready.'); stream = cameraStream; } pipStreamRef.current = stream; } pipVideo.srcObject = stream; if (pipVideo.readyState < 2) { await new Promise((resolve) => { const onReady = () => { pipVideo.removeEventListener('loadeddata', onReady); pipVideo.removeEventListener('canplay', onReady); resolve(); }; pipVideo.addEventListener('loadeddata', onReady); pipVideo.addEventListener('canplay', onReady); setTimeout(resolve, 600); }); } try { await pipVideo.play(); } catch (_) {} if (isSafariPiP) { try { pipVideo.webkitSetPresentationMode('picture-in-picture'); return; } catch (e) { const cameraStream = localVideoRef.current?.srcObject; if (cameraStream && cameraStream !== pipVideo.srcObject) { pipVideo.srcObject = cameraStream; try { await pipVideo.play(); } catch (_) {} pipVideo.webkitSetPresentationMode('picture-in-picture'); return; } throw e; } } if (typeof pipVideo.requestPictureInPicture === 'function') { await pipVideo.requestPictureInPicture(); } else { alert('Picture-in-Picture is not supported in this browser.'); } } catch (err) { console.error('PiP error:', err); alert(`Failed to enter Picture-in-Picture: ${err.message}`); } }; const handleFrameChange = (val) => { const rate = parseInt(val, 10); setCurrentFrame(rate); if (videoManager) videoManager.setFrameRate(rate); }; const handlePreview = () => { if (!videoManager || !videoManager.isStreaming) return alert('Please start a session first.'); const currentStats = videoManager.getStats(); if (!currentStats.sessionId) return alert('No active session.'); const sessionDuration = Math.floor((Date.now() - (videoManager.sessionStartTime || Date.now())) / 1000); const focusScore = currentStats.framesProcessed > 0 ? (currentStats.framesProcessed * (currentStats.currentStatus ? 1 : 0)) / currentStats.framesProcessed : 0; setSessionResult({ duration_seconds: sessionDuration, focus_score: focusScore, total_frames: currentStats.framesProcessed, focused_frames: Math.floor(currentStats.framesProcessed * focusScore) }); }; const handleCloseOverlay = () => setSessionResult(null); const pageStyle = isActive ? undefined : { position: 'absolute', width: '1px', height: '1px', overflow: 'hidden', opacity: 0, pointerEvents: 'none' }; const focusStateLabel = { [FOCUS_STATES.pending]: 'Pending', [FOCUS_STATES.focused]: 'Focused', [FOCUS_STATES.notFocused]: 'Not Focused' }[focusState]; const introHighlights = [ { title: 'Live focus tracking', text: 'Head pose, gaze, and eye openness are read continuously during the session.' }, { title: 'Quick setup', text: 'Front-facing light and a stable camera angle give the cleanest preview.' }, { title: 'Private by default', text: 'Only session metadata is stored locally, not the raw camera footage.' }, { title: 'Sync across devices', text: 'Your progress is automatically saved to this browser. You can migrate your data anytime via the Data Management section at the top of My Records.' } ]; const permissionSteps = [ { title: 'Allow browser access', text: 'Approve the camera prompt so the preview can appear immediately.' }, { title: 'Check your framing', text: 'Keep your face visible and centered for more stable landmark detection.' }, { title: 'Start when ready', text: 'After the preview appears, use the page controls to begin or stop.' } ]; const renderIntroCard = () => { if (flowStep === FLOW_STEPS.intro) { return (
Focus Session

Before you begin

The focus page uses your live camera preview to estimate attention in real time. Review the setup notes below, then continue to camera access.

{introHighlights.map((item) => (

{item.title}

{item.text}

))}
You can still change frame rate and available model options after the preview loads.
); } if (flowStep === FLOW_STEPS.permission && !cameraReady) { return (
Camera Setup

Enable camera access

Once access is granted, your preview appears here and the rest of the Focus page behaves like the other dashboard screens.

{permissionSteps.map((item, index) => (
{index + 1}

{item.title}

{item.text}

))}
{cameraError ?
{cameraError}
: null}
); } return null; }; return (
{renderIntroCard()}
{systemStats && (systemStats.cpu_percent != null || systemStats.memory_percent != null) && (
CPU: 80 ? '#ff6b6b' : systemStats.cpu_percent > 50 ? '#ffc168' : '#66d9a0' }}>{systemStats.cpu_percent}% RAM: 85 ? '#ff6b6b' : systemStats.memory_percent > 60 ? '#ffc168' : '#66d9a0' }}>{systemStats.memory_percent}% ({systemStats.memory_used_mb}/{systemStats.memory_total_mb} MB)
)} {flowStep === FLOW_STEPS.ready ? ( <> {availableModels.length > 0 ? (
Model: {availableModels.map((name) => ( ))}
) : null}
Timeline
{timelineEvents.map((event, index) => (
))}
{eyeGazeEnabled ? : null}
{eyeGazeEnabled && videoManager?.isStreaming ? (
GAZE MAP {isCalibrated ? '(calibrated)' : '(raw)'}
) : null} {cameraError ?
{cameraError}
: null}
handleFrameChange(e.target.value)} /> handleFrameChange(e.target.value)} />
) : null}
); } export default FocusPageLocal;