integration_test2 / src /components /FocusPageLocal.jsx
Kexin-251202's picture
update src/ tutorial & data management
ad1b410 verified
import React, { useState, useEffect, useRef } from 'react';
import CalibrationOverlay from './CalibrationOverlay';
const FLOW_STEPS = {
intro: 'intro',
permission: 'permission',
ready: 'ready'
};
const FOCUS_STATES = {
pending: 'pending',
focused: 'focused',
notFocused: 'not-focused'
};
function HelloIcon() {
return (
<svg width="96" height="96" viewBox="0 0 96 96" aria-hidden="true">
<circle cx="48" cy="48" r="40" fill="#007BFF" />
<path d="M30 38c0-4 2.7-7 6-7s6 3 6 7" fill="none" stroke="#fff" strokeWidth="6" strokeLinecap="round" />
<path d="M54 38c0-4 2.7-7 6-7s6 3 6 7" fill="none" stroke="#fff" strokeWidth="6" strokeLinecap="round" />
<path d="M30 52c3 11 10 17 18 17s15-6 18-17" fill="none" stroke="#fff" strokeWidth="6" strokeLinecap="round" />
</svg>
);
}
function CameraIcon() {
return (
<svg width="110" height="110" viewBox="0 0 110 110" aria-hidden="true">
<rect x="30" y="36" width="50" height="34" rx="5" fill="none" stroke="#007BFF" strokeWidth="6" />
<path d="M24 72h62c0 9-7 16-16 16H40c-9 0-16-7-16-16Z" fill="none" stroke="#007BFF" strokeWidth="6" />
<path d="M55 28v8" stroke="#007BFF" strokeWidth="6" strokeLinecap="round" />
<circle cx="55" cy="36" r="14" fill="none" stroke="#007BFF" strokeWidth="6" />
<circle cx="55" cy="36" r="4" fill="#007BFF" />
<path d="M46 83h18" stroke="#007BFF" strokeWidth="6" strokeLinecap="round" />
</svg>
);
}
function GazeMiniMap({ gazeData }) {
const canvasRef = useRef(null);
const screenAspect = typeof window !== 'undefined'
? window.screen.width / window.screen.height
: 16 / 9;
const MAP_H = 100;
const MAP_W = Math.round(MAP_H * screenAspect);
useEffect(() => {
const cvs = canvasRef.current;
if (!cvs) return;
const ctx = cvs.getContext('2d');
const w = cvs.width;
const h = cvs.height;
ctx.clearRect(0, 0, w, h);
// Screen background
ctx.fillStyle = 'rgba(20, 20, 30, 0.85)';
ctx.fillRect(0, 0, w, h);
// Screen border
ctx.strokeStyle = 'rgba(255,255,255,0.25)';
ctx.lineWidth = 1;
ctx.strokeRect(0.5, 0.5, w - 1, h - 1);
// Grid lines
ctx.strokeStyle = 'rgba(255,255,255,0.07)';
ctx.lineWidth = 0.5;
for (let i = 1; i < 4; i++) {
ctx.beginPath();
ctx.moveTo((w * i) / 4, 0);
ctx.lineTo((w * i) / 4, h);
ctx.stroke();
}
for (let i = 1; i < 3; i++) {
ctx.beginPath();
ctx.moveTo(0, (h * i) / 3);
ctx.lineTo(w, (h * i) / 3);
ctx.stroke();
}
// Center crosshair
const cx = w / 2;
const cy = h / 2;
ctx.strokeStyle = 'rgba(255,255,255,0.15)';
ctx.lineWidth = 1;
ctx.beginPath();
ctx.moveTo(cx - 6, cy);
ctx.lineTo(cx + 6, cy);
ctx.moveTo(cx, cy - 6);
ctx.lineTo(cx, cy + 6);
ctx.stroke();
if (!gazeData || gazeData.gaze_x == null || gazeData.gaze_y == null) {
ctx.fillStyle = 'rgba(255,255,255,0.3)';
ctx.font = '10px Arial';
ctx.textAlign = 'center';
ctx.fillText('No gaze data', cx, cy + 3);
ctx.textAlign = 'left';
return;
}
const gx = gazeData.gaze_x;
const gy = gazeData.gaze_y;
const onScreen = gazeData.on_screen;
const dotX = gx * w;
const dotY = gy * h;
const gradient = ctx.createRadialGradient(dotX, dotY, 0, dotX, dotY, 14);
gradient.addColorStop(0, onScreen ? 'rgba(74, 222, 128, 0.5)' : 'rgba(248, 113, 113, 0.5)');
gradient.addColorStop(1, 'rgba(0,0,0,0)');
ctx.fillStyle = gradient;
ctx.fillRect(dotX - 14, dotY - 14, 28, 28);
ctx.beginPath();
ctx.arc(dotX, dotY, 5, 0, 2 * Math.PI);
ctx.fillStyle = onScreen ? '#4ade80' : '#f87171';
ctx.fill();
ctx.strokeStyle = '#fff';
ctx.lineWidth = 1.5;
ctx.stroke();
ctx.fillStyle = 'rgba(255,255,255,0.5)';
ctx.font = '9px Arial';
ctx.textAlign = 'right';
ctx.fillText(`${(gx * 100).toFixed(0)}%, ${(gy * 100).toFixed(0)}%`, w - 4, h - 4);
ctx.textAlign = 'left';
}, [gazeData]);
return (
<canvas
ref={canvasRef}
width={MAP_W}
height={MAP_H}
style={{ borderRadius: '8px', border: '1px solid rgba(255,255,255,0.1)', display: 'block' }}
/>
);
}
function FocusPageLocal({ videoManager, sessionResult, setSessionResult, isActive, isTutorialActive, setIsTutorialActive }) {
const [currentFrame, setCurrentFrame] = useState(15);
const [timelineEvents, setTimelineEvents] = useState([]);
const [stats, setStats] = useState(null);
const [systemStats, setSystemStats] = useState(null);
const [availableModels, setAvailableModels] = useState([]);
const [currentModel, setCurrentModel] = useState('mlp');
const [flowStep, setFlowStep] = useState(FLOW_STEPS.ready);
const [cameraReady, setCameraReady] = useState(false);
const [isStarting, setIsStarting] = useState(false);
const [focusState, setFocusState] = useState(FOCUS_STATES.pending);
const [cameraError, setCameraError] = useState('');
const [calibrationState, setCalibrationState] = useState(null);
const [l2csBoost, setL2csBoost] = useState(false);
const [l2csBoostAvailable, setL2csBoostAvailable] = useState(false);
const [eyeGazeEnabled, setEyeGazeEnabled] = useState(false);
const [prevModel, setPrevModel] = useState('mlp');
const [isCalibrated, setIsCalibrated] = useState(false);
const [gazeData, setGazeData] = useState(null);
const localVideoRef = useRef(null);
const displayCanvasRef = useRef(null);
const pipVideoRef = useRef(null);
const pipStreamRef = useRef(null);
const previewFrameRef = useRef(null);
// Sync flowStep with isTutorialActive from props
useEffect(() => {
if (isTutorialActive) {
setFlowStep(FLOW_STEPS.intro);
} else {
setFlowStep(FLOW_STEPS.ready);
}
}, [isTutorialActive]);
const formatDuration = (seconds) => {
if (seconds === 0) return '0s';
const mins = Math.floor(seconds / 60);
const secs = Math.floor(seconds % 60);
return `${mins}m ${secs}s`;
};
const stopPreviewLoop = () => {
if (previewFrameRef.current) {
cancelAnimationFrame(previewFrameRef.current);
previewFrameRef.current = null;
}
};
const startPreviewLoop = () => {
stopPreviewLoop();
const renderPreview = () => {
const canvas = displayCanvasRef.current;
const video = localVideoRef.current;
if (!canvas || !video || !cameraReady || videoManager?.isStreaming) {
previewFrameRef.current = null;
return;
}
if (video.readyState >= 2) {
const ctx = canvas.getContext('2d');
ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
}
previewFrameRef.current = requestAnimationFrame(renderPreview);
};
previewFrameRef.current = requestAnimationFrame(renderPreview);
};
const getErrorMessage = (err) => {
if (err?.name === 'NotAllowedError') return 'Camera permission denied. Please allow camera access.';
if (err?.name === 'NotFoundError') return 'No camera found. Please connect a camera.';
if (err?.name === 'NotReadableError') return 'Camera is already in use by another application.';
if (err?.target?.url) return `WebSocket connection failed: ${err.target.url}. Check backend.`;
return err?.message || 'Failed to start focus session.';
};
useEffect(() => {
if (!videoManager) return;
const originalOnStatusUpdate = videoManager.callbacks.onStatusUpdate;
const originalOnSessionEnd = videoManager.callbacks.onSessionEnd;
videoManager.callbacks.onStatusUpdate = (isFocused) => {
setTimelineEvents((prev) => {
const newEvents = [...prev, { isFocused, timestamp: Date.now() }];
if (newEvents.length > 60) newEvents.shift();
return newEvents;
});
setFocusState(isFocused ? FOCUS_STATES.focused : FOCUS_STATES.notFocused);
if (originalOnStatusUpdate) originalOnStatusUpdate(isFocused);
};
videoManager.callbacks.onSessionEnd = (summary) => {
setFocusState(FOCUS_STATES.pending);
setCameraReady(false);
if (originalOnSessionEnd) originalOnSessionEnd(summary);
};
videoManager.callbacks.onCalibrationUpdate = (state) => {
setCalibrationState(state && state.active ? state : null);
if (state && state.done && state.success) setIsCalibrated(true);
};
videoManager.callbacks.onGazeData = (data) => setGazeData(data);
const statsInterval = setInterval(() => {
if (videoManager && videoManager.getStats) setStats(videoManager.getStats());
}, 1000);
return () => {
videoManager.callbacks.onStatusUpdate = originalOnStatusUpdate;
videoManager.callbacks.onSessionEnd = originalOnSessionEnd;
videoManager.callbacks.onCalibrationUpdate = undefined;
videoManager.callbacks.onGazeData = undefined;
clearInterval(statsInterval);
};
}, [videoManager]);
useEffect(() => {
fetch('/api/models')
.then((res) => res.json())
.then((data) => {
if (data.available) setAvailableModels(data.available);
if (data.current) {
setCurrentModel(data.current);
if (data.current === 'l2cs') setEyeGazeEnabled(true);
}
})
.catch((err) => console.error('Failed to fetch models:', err));
fetch('/api/settings')
.then((res) => res.json())
.then((data) => {
if (data && data.l2cs_boost !== undefined) setL2csBoost(data.l2cs_boost);
if (data && data.l2cs_boost_available !== undefined) setL2csBoostAvailable(data.l2cs_boost_available);
})
.catch((err) => console.error('Failed to fetch settings:', err));
}, []);
useEffect(() => {
if (flowStep === FLOW_STEPS.ready && cameraReady && !videoManager?.isStreaming) {
startPreviewLoop();
return;
}
stopPreviewLoop();
}, [cameraReady, flowStep, videoManager?.isStreaming]);
useEffect(() => {
if (!isActive) stopPreviewLoop();
}, [isActive]);
useEffect(() => {
return () => {
stopPreviewLoop();
if (pipVideoRef.current) {
pipVideoRef.current.pause();
pipVideoRef.current.srcObject = null;
}
if (pipStreamRef.current) {
pipStreamRef.current.getTracks().forEach((t) => t.stop());
pipStreamRef.current = null;
}
};
}, []);
useEffect(() => {
const fetchSystem = () => {
fetch('/api/stats/system')
.then(res => res.json())
.then(data => setSystemStats(data))
.catch(() => setSystemStats(null));
};
fetchSystem();
const interval = setInterval(fetchSystem, 3000);
return () => clearInterval(interval);
}, []);
const handleModelChange = async (modelName) => {
try {
const res = await fetch('/api/settings', {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ model_name: modelName })
});
const result = await res.json();
if (result.updated) {
setCurrentModel(modelName);
if (modelName === 'l2cs') {
setEyeGazeEnabled(true);
} else if (eyeGazeEnabled) {
setEyeGazeEnabled(false);
setIsCalibrated(false);
setGazeData(null);
}
}
} catch (err) {
console.error('Failed to switch model:', err);
}
};
const handleL2csBoostToggle = async () => {
if (!l2csBoostAvailable) return;
const next = !l2csBoost;
try {
const res = await fetch('/api/settings', {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ l2cs_boost: next })
});
if (res.ok) setL2csBoost(next);
else alert((await res.json().catch(() => ({}))).detail || 'Could not enable L2CS boost');
} catch (err) {
console.error('Failed to toggle L2CS boost:', err);
}
};
const handleEyeGazeToggle = async () => {
const next = !eyeGazeEnabled;
if (next) {
setPrevModel(currentModel);
await handleModelChange('l2cs');
setEyeGazeEnabled(true);
} else {
const restoreTo = prevModel === 'l2cs' ? 'mlp' : prevModel;
await handleModelChange(restoreTo);
setEyeGazeEnabled(false);
setIsCalibrated(false);
setGazeData(null);
}
};
const [calibrationSetupOpen, setCalibrationSetupOpen] = useState(false);
const handleCalibrate = () => setCalibrationSetupOpen(true);
const handleCalibrationServerStart = () => { if (videoManager) videoManager.startCalibration(); };
const handleEnableCamera = async () => {
if (!videoManager) return;
try {
setCameraError('');
await videoManager.initCamera(localVideoRef.current, displayCanvasRef.current);
setCameraReady(true);
setFlowStep(FLOW_STEPS.ready);
setFocusState(FOCUS_STATES.pending);
if (setIsTutorialActive) setIsTutorialActive(false); // Close tutorial flag
} catch (err) {
setCameraError(getErrorMessage(err));
console.error('Camera init error:', err);
}
};
const handleSkipTutorial = () => {
setFlowStep(FLOW_STEPS.ready);
if (setIsTutorialActive) setIsTutorialActive(false);
};
const handleStart = async () => {
try {
setIsStarting(true);
setSessionResult(null);
setTimelineEvents([]);
setFocusState(FOCUS_STATES.pending);
setCameraError('');
if (!cameraReady) {
await videoManager.initCamera(localVideoRef.current, displayCanvasRef.current);
setCameraReady(true);
setFlowStep(FLOW_STEPS.ready);
}
await videoManager.startStreaming();
} catch (err) {
const errorMessage = getErrorMessage(err);
setCameraError(errorMessage);
setFocusState(FOCUS_STATES.pending);
console.error('Start error:', err);
alert(`Failed to start: ${errorMessage}\n\nCheck browser console for details.`);
} finally {
setIsStarting(false);
}
};
const handleStop = async () => {
if (videoManager) await videoManager.stopStreaming();
try {
if (document.pictureInPictureElement === pipVideoRef.current) {
await document.exitPictureInPicture();
}
} catch (_) {}
if (pipVideoRef.current) {
pipVideoRef.current.pause();
pipVideoRef.current.srcObject = null;
}
if (pipStreamRef.current) {
pipStreamRef.current.getTracks().forEach((t) => t.stop());
pipStreamRef.current = null;
}
stopPreviewLoop();
setFocusState(FOCUS_STATES.pending);
setCameraReady(false);
};
const handlePiP = async () => {
try {
if (!videoManager || !videoManager.isStreaming) return alert('Please start the video first.');
if (!displayCanvasRef.current) return alert('Video not ready.');
if (document.pictureInPictureElement === pipVideoRef.current) {
await document.exitPictureInPicture();
return;
}
if (!document.pictureInPictureEnabled) return alert('Picture-in-Picture is not supported.');
const pipVideo = pipVideoRef.current;
if (!pipVideo) return alert('PiP video element not ready.');
const isSafariPiP = typeof pipVideo.webkitSetPresentationMode === 'function';
let stream = pipStreamRef.current;
if (!stream) {
const capture = displayCanvasRef.current.captureStream;
if (typeof capture === 'function') stream = capture.call(displayCanvasRef.current, 30);
if (!stream || stream.getTracks().length === 0) {
const cameraStream = localVideoRef.current?.srcObject;
if (!cameraStream) return alert('Camera stream not ready.');
stream = cameraStream;
}
pipStreamRef.current = stream;
}
pipVideo.srcObject = stream;
if (pipVideo.readyState < 2) {
await new Promise((resolve) => {
const onReady = () => {
pipVideo.removeEventListener('loadeddata', onReady);
pipVideo.removeEventListener('canplay', onReady);
resolve();
};
pipVideo.addEventListener('loadeddata', onReady);
pipVideo.addEventListener('canplay', onReady);
setTimeout(resolve, 600);
});
}
try { await pipVideo.play(); } catch (_) {}
if (isSafariPiP) {
try {
pipVideo.webkitSetPresentationMode('picture-in-picture');
return;
} catch (e) {
const cameraStream = localVideoRef.current?.srcObject;
if (cameraStream && cameraStream !== pipVideo.srcObject) {
pipVideo.srcObject = cameraStream;
try { await pipVideo.play(); } catch (_) {}
pipVideo.webkitSetPresentationMode('picture-in-picture');
return;
}
throw e;
}
}
if (typeof pipVideo.requestPictureInPicture === 'function') {
await pipVideo.requestPictureInPicture();
} else {
alert('Picture-in-Picture is not supported in this browser.');
}
} catch (err) {
console.error('PiP error:', err);
alert(`Failed to enter Picture-in-Picture: ${err.message}`);
}
};
const handleFrameChange = (val) => {
const rate = parseInt(val, 10);
setCurrentFrame(rate);
if (videoManager) videoManager.setFrameRate(rate);
};
const handlePreview = () => {
if (!videoManager || !videoManager.isStreaming) return alert('Please start a session first.');
const currentStats = videoManager.getStats();
if (!currentStats.sessionId) return alert('No active session.');
const sessionDuration = Math.floor((Date.now() - (videoManager.sessionStartTime || Date.now())) / 1000);
const focusScore = currentStats.framesProcessed > 0
? (currentStats.framesProcessed * (currentStats.currentStatus ? 1 : 0)) / currentStats.framesProcessed
: 0;
setSessionResult({
duration_seconds: sessionDuration,
focus_score: focusScore,
total_frames: currentStats.framesProcessed,
focused_frames: Math.floor(currentStats.framesProcessed * focusScore)
});
};
const handleCloseOverlay = () => setSessionResult(null);
const pageStyle = isActive
? undefined
: { position: 'absolute', width: '1px', height: '1px', overflow: 'hidden', opacity: 0, pointerEvents: 'none' };
const focusStateLabel = {
[FOCUS_STATES.pending]: 'Pending',
[FOCUS_STATES.focused]: 'Focused',
[FOCUS_STATES.notFocused]: 'Not Focused'
}[focusState];
const introHighlights = [
{
title: 'Live focus tracking',
text: 'Head pose, gaze, and eye openness are read continuously during the session.'
},
{
title: 'Quick setup',
text: 'Front-facing light and a stable camera angle give the cleanest preview.'
},
{
title: 'Private by default',
text: 'Only session metadata is stored locally, not the raw camera footage.'
},
{
title: 'Sync across devices',
text: 'Your progress is automatically saved to this browser. You can migrate your data anytime via the Data Management section at the top of My Records.'
}
];
const permissionSteps = [
{ title: 'Allow browser access', text: 'Approve the camera prompt so the preview can appear immediately.' },
{ title: 'Check your framing', text: 'Keep your face visible and centered for more stable landmark detection.' },
{ title: 'Start when ready', text: 'After the preview appears, use the page controls to begin or stop.' }
];
const renderIntroCard = () => {
if (flowStep === FLOW_STEPS.intro) {
return (
<div className="focus-flow-overlay">
<div className="focus-flow-card">
<div className="focus-flow-header">
<div>
<div className="focus-flow-eyebrow">Focus Session</div>
<h2>Before you begin</h2>
</div>
<div className="focus-flow-icon"><HelloIcon /></div>
</div>
<p className="focus-flow-lead">
The focus page uses your live camera preview to estimate attention in real time.
Review the setup notes below, then continue to camera access.
</p>
<div className="focus-flow-grid">
{introHighlights.map((item) => (
<article key={item.title} className="focus-flow-panel">
<h3>{item.title}</h3>
<p>{item.text}</p>
</article>
))}
</div>
<div className="focus-flow-footer">
<div className="focus-flow-note">
You can still change frame rate and available model options after the preview loads.
</div>
<div style={{ display: 'flex', gap: '10px' }}>
<button className="focus-flow-secondary" onClick={handleSkipTutorial}>
Skip
</button>
<button className="focus-flow-button" onClick={() => setFlowStep(FLOW_STEPS.permission)}>
Continue
</button>
</div>
</div>
</div>
</div>
);
}
if (flowStep === FLOW_STEPS.permission && !cameraReady) {
return (
<div className="focus-flow-overlay">
<div className="focus-flow-card">
<div className="focus-flow-header">
<div>
<div className="focus-flow-eyebrow">Camera Setup</div>
<h2>Enable camera access</h2>
</div>
<div className="focus-flow-icon"><CameraIcon /></div>
</div>
<p className="focus-flow-lead">
Once access is granted, your preview appears here and the rest of the Focus page behaves like the other dashboard screens.
</p>
<div className="focus-flow-steps">
{permissionSteps.map((item, index) => (
<div key={item.title} className="focus-flow-step">
<div className="focus-flow-step-number">{index + 1}</div>
<div className="focus-flow-step-copy">
<h3>{item.title}</h3>
<p>{item.text}</p>
</div>
</div>
))}
</div>
{cameraError ? <div className="focus-inline-error">{cameraError}</div> : null}
<div className="focus-flow-footer">
<button type="button" className="focus-flow-secondary" onClick={() => setFlowStep(FLOW_STEPS.intro)}>Back</button>
<button className="focus-flow-button" onClick={handleEnableCamera}>Enable Camera</button>
</div>
</div>
</div>
);
}
return null;
};
return (
<main id="page-b" className="page" style={pageStyle}>
<CalibrationOverlay
calibration={calibrationState}
videoManager={videoManager}
localVideoRef={localVideoRef}
onRequestStart={handleCalibrationServerStart}
setupOpen={calibrationSetupOpen}
setSetupOpen={setCalibrationSetupOpen}
/>
{renderIntroCard()}
<section id="display-area" className="focus-display-shell">
<video ref={pipVideoRef} muted playsInline autoPlay style={{ position: 'absolute', width: '1px', height: '1px', opacity: 0, pointerEvents: 'none' }} />
<video ref={localVideoRef} muted playsInline autoPlay style={{ display: 'none' }} />
<canvas ref={displayCanvasRef} width={640} height={480} style={{ width: '100%', height: '100%', objectFit: 'contain', backgroundColor: '#101010' }} />
{flowStep === FLOW_STEPS.ready ? (
<>
<div className={`focus-state-pill ${focusState}`}>
<span className="focus-state-dot" />
{focusStateLabel}
</div>
{!cameraReady && !videoManager?.isStreaming ? (
<div className="focus-idle-overlay">
<p>Camera is paused.</p>
<span>Use Start to enable the camera and begin detection.</span>
</div>
) : null}
</>
) : null}
{sessionResult && (
<div className="session-result-overlay">
<h3>Session Complete!</h3>
<div className="result-item"><span className="label">Duration:</span><span className="value">{formatDuration(sessionResult.duration_seconds)}</span></div>
<div className="result-item"><span className="label">Focus Score:</span><span className="value">{(sessionResult.focus_score * 100).toFixed(1)}%</span></div>
<button onClick={handleCloseOverlay} style={{ marginTop: '20px', padding: '8px 20px', background: 'transparent', border: '1px solid white', color: 'white', borderRadius: '20px', cursor: 'pointer' }}>Close</button>
</div>
)}
</section>
{systemStats && (systemStats.cpu_percent != null || systemStats.memory_percent != null) && (
<section style={{ display: 'flex', alignItems: 'center', justifyContent: 'center', gap: '16px', padding: '6px 12px', background: 'rgba(0,0,0,0.3)', borderRadius: '8px', margin: '6px auto', maxWidth: '400px', fontSize: '13px', color: '#aaa' }}>
<span title="Server CPU">CPU: <strong style={{ color: systemStats.cpu_percent > 80 ? '#ff6b6b' : systemStats.cpu_percent > 50 ? '#ffc168' : '#66d9a0' }}>{systemStats.cpu_percent}%</strong></span>
<span title="Server memory">RAM: <strong style={{ color: systemStats.memory_percent > 85 ? '#ff6b6b' : systemStats.memory_percent > 60 ? '#ffc168' : '#66d9a0' }}>{systemStats.memory_percent}%</strong> ({systemStats.memory_used_mb}/{systemStats.memory_total_mb} MB)</span>
</section>
)}
{flowStep === FLOW_STEPS.ready ? (
<>
{availableModels.length > 0 ? (
<section className="focus-model-strip">
<span className="focus-model-label">Model:</span>
{availableModels.map((name) => (
<button key={name} onClick={() => handleModelChange(name)} className={`focus-model-button ${currentModel === name ? 'active' : ''}`}>{name}</button>
))}
</section>
) : null}
<section id="timeline-area">
<div className="timeline-label">Timeline</div>
<div id="timeline-visuals">
{timelineEvents.map((event, index) => (
<div key={index} className="timeline-block" style={{ backgroundColor: event.isFocused ? '#00FF00' : '#FF0000', width: '10px', height: '20px', display: 'inline-block', marginRight: '2px', borderRadius: '2px' }} title={event.isFocused ? 'Focused' : 'Distracted'} />
))}
</div>
<div id="timeline-line" />
</section>
<section id="control-panel">
<button id="btn-cam-start" className="action-btn green" onClick={handleStart} disabled={isStarting}>{isStarting ? 'Starting...' : 'Start'}</button>
<button type="button" className="action-btn" style={{ backgroundColor: eyeGazeEnabled ? '#8b5cf6' : '#475569', position: 'relative' }} onClick={handleEyeGazeToggle} title={eyeGazeEnabled ? (isCalibrated ? 'Eye Gaze ON (Calibrated)' : 'Eye Gaze ON (Uncalibrated)') : 'Enable L2CS eye gaze tracking'}>
Eye Gaze {eyeGazeEnabled ? 'ON' : 'OFF'}
{eyeGazeEnabled && <span style={{ position: 'absolute', top: '-4px', right: '-4px', width: '10px', height: '10px', borderRadius: '50%', backgroundColor: isCalibrated ? '#4ade80' : '#fbbf24', border: '2px solid #1e1e2e' }} title={isCalibrated ? 'Calibrated' : 'Not calibrated'} />}
</button>
{eyeGazeEnabled ? <button type="button" className="action-btn" style={{ backgroundColor: isCalibrated ? '#22c55e' : '#8b5cf6' }} onClick={handleCalibrate} disabled={!videoManager?.isStreaming} title="9-point gaze calibration for accurate tracking">{isCalibrated ? 'Re-Calibrate' : 'Calibrate'}</button> : null}
<button id="btn-floating" className="action-btn yellow" onClick={handlePiP}>Floating Window</button>
<button id="btn-preview" className="action-btn" style={{ backgroundColor: '#ff7a52' }} onClick={handlePreview}>Preview Result</button>
<button id="btn-cam-stop" className="action-btn red" onClick={handleStop}>Stop</button>
</section>
{eyeGazeEnabled && videoManager?.isStreaming ? (
<section style={{ display: 'flex', alignItems: 'center', justifyContent: 'center', gap: '14px', padding: '8px 14px', background: 'rgba(0,0,0,0.3)', borderRadius: '10px', margin: '6px auto', maxWidth: '400px' }}>
<div style={{ textAlign: 'center' }}>
<div style={{ fontSize: '11px', color: '#888', marginBottom: '4px', letterSpacing: '0.5px' }}>GAZE MAP {isCalibrated ? '(calibrated)' : '(raw)'}</div>
<GazeMiniMap gazeData={gazeData} />
</div>
</section>
) : null}
{cameraError ? <div className="focus-inline-error focus-inline-error-standalone">{cameraError}</div> : null}
<section id="frame-control">
<label htmlFor="frame-slider">Frame Rate (FPS)</label>
<input type="range" id="frame-slider" min="10" max="30" value={currentFrame} onChange={(e) => handleFrameChange(e.target.value)} />
<input type="number" id="frame-input" min="10" max="30" value={currentFrame} onChange={(e) => handleFrameChange(e.target.value)} />
</section>
</>
) : null}
</main>
);
}
export default FocusPageLocal;