// src/utils/VideoManagerLocal.js // Local video processing implementation using WebSocket + Canvas, without WebRTC. const DEFAULT_CALIBRATION_STATE = Object.freeze({ active: false, collecting: false, target: null, index: 0, numPoints: 0, done: false, success: false, }); const createCalibrationState = (overrides = {}) => ({ ...DEFAULT_CALIBRATION_STATE, ...overrides, }); const formatSignedInt = (value) => `${value > 0 ? '+' : ''}${value.toFixed(0)}`; const buildHeadPoseText = (data) => ( `yaw:${formatSignedInt(data.yaw)} ` + `pitch:${formatSignedInt(data.pitch)} ` + `roll:${formatSignedInt(data.roll)}` ); export class VideoManagerLocal { constructor(callbacks) { this.callbacks = callbacks || {}; this.localVideoElement = null; // Local camera preview element. this.displayVideoElement = null; // Processed output display element. this.canvas = null; this.stream = null; this.ws = null; this.isStreaming = false; this.sessionId = null; this.sessionStartTime = null; this.frameRate = 15; // Lower FPS reduces transfer and processing load. this.captureInterval = null; this.reconnectTimeout = null; // Status smoothing this.currentStatus = false; this.statusBuffer = []; this.bufferSize = 3; // Detection data this.latestDetectionData = null; this.lastConfidence = 0; // Tessellation connections (fetched once from server) this._tessellation = null; // Continuous render loop this._animFrameId = null; // Calibration state this.calibration = createCalibrationState(); // Performance metrics this.stats = { framesSent: 0, framesProcessed: 0, avgLatency: 0, lastLatencies: [] }; } // Initialize the camera async initCamera(localVideoRef, displayCanvasRef) { try { console.log('Initializing local camera...'); this.stream = await navigator.mediaDevices.getUserMedia({ video: { width: { ideal: 640 }, height: { ideal: 480 }, facingMode: 'user' }, audio: false }); this.localVideoElement = localVideoRef; this.displayCanvas = displayCanvasRef; // Show the local camera stream if (this.localVideoElement) { this.localVideoElement.srcObject = this.stream; this.localVideoElement.play(); } // Create a smaller capture canvas for faster encoding and transfer. this.canvas = document.createElement('canvas'); this.canvas.width = 640; this.canvas.height = 480; console.log('Local camera initialized'); return true; } catch (error) { console.error('Camera init error:', error); throw error; } } // Start streaming async startStreaming() { if (!this.stream) { throw new Error('Camera not initialized'); } if (this.isStreaming) { console.warn('Already streaming'); return; } console.log('Starting WebSocket streaming...'); this.isStreaming = true; try { // Fetch tessellation topology (once) if (!this._tessellation) { try { const res = await fetch('/api/mesh-topology'); const data = await res.json(); this._tessellation = data.tessellation; // [[start, end], ...] } catch (e) { console.warn('Failed to fetch mesh topology:', e); } } // Open the WebSocket connection await this.connectWebSocket(); // Start sending captured frames on a timer this.startCapture(); // Start continuous render loop for smooth video this._lastDetection = null; this._startRenderLoop(); console.log('Streaming started'); } catch (error) { this.isStreaming = false; this._stopRenderLoop(); this._lastDetection = null; if (this.captureInterval) { clearInterval(this.captureInterval); this.captureInterval = null; } if (this.reconnectTimeout) { clearTimeout(this.reconnectTimeout); this.reconnectTimeout = null; } if (this.ws) { this.ws.onopen = null; this.ws.onmessage = null; this.ws.onerror = null; this.ws.onclose = null; try { this.ws.close(); } catch (_) { } this.ws = null; } throw error instanceof Error ? error : new Error('Failed to start video streaming.'); } } // Connect the WebSocket async connectWebSocket() { return new Promise((resolve, reject) => { const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; const wsUrl = `${protocol}//${window.location.host}/ws/video`; console.log('Connecting to WebSocket:', wsUrl); const socket = new WebSocket(wsUrl); this.ws = socket; let settled = false; let opened = false; const rejectWithMessage = (message) => { if (settled) return; settled = true; reject(new Error(message)); }; socket.onopen = () => { opened = true; settled = true; console.log('WebSocket connected'); // Send the start-session control message socket.send(JSON.stringify({ type: 'start_session' })); resolve(); }; socket.onmessage = (event) => { try { const data = JSON.parse(event.data); this.handleServerMessage(data); } catch (e) { console.error('Failed to parse message:', e); } }; socket.onerror = () => { console.error('WebSocket error:', { url: wsUrl, readyState: socket.readyState }); rejectWithMessage( `Failed to connect to ${wsUrl}. ` + 'Check that the backend server is running and reachable.' ); }; socket.onclose = (event) => { console.log('WebSocket disconnected', event.code, event.reason); if (this.ws === socket) { this.ws = null; } if (!opened) { rejectWithMessage( `WebSocket closed before connection was established ` + `(${event.code || 'no code'}). ` + 'Check that the backend server is running on the expected port.' ); return; } if (this.isStreaming) { console.log('Attempting to reconnect...'); if (this.reconnectTimeout) { clearTimeout(this.reconnectTimeout); } this.reconnectTimeout = setTimeout(() => { this.reconnectTimeout = null; if (!this.isStreaming) return; this.connectWebSocket().catch((error) => { console.error('Reconnect failed:', error); }); }, 2000); } }; }); } // Capture and send frames (binary blobs for speed) startCapture() { const interval = 1000 / this.frameRate; this._sendingBlob = false; // prevent overlapping toBlob calls this.captureInterval = setInterval(() => { if (!this.isStreaming || !this.ws || this.ws.readyState !== WebSocket.OPEN) return; if (this._sendingBlob) return; // previous frame still encoding, skip try { const ctx = this.canvas.getContext('2d'); ctx.drawImage(this.localVideoElement, 0, 0, this.canvas.width, this.canvas.height); this._sendingBlob = true; this.canvas.toBlob((blob) => { this._sendingBlob = false; if (blob && this.ws && this.ws.readyState === WebSocket.OPEN) { this.ws.send(blob); this.stats.framesSent++; } }, 'image/jpeg', 0.75); } catch (error) { this._sendingBlob = false; console.error('Capture error:', error); } }, interval); console.log(`Capturing at ${this.frameRate} FPS`); } // Continuous render loop: draws camera feed + last detection overlay at display refresh rate _startRenderLoop() { const render = () => { if (!this.isStreaming) return; if (this.displayCanvas && this.localVideoElement && this.localVideoElement.readyState >= 2) { const ctx = this.displayCanvas.getContext('2d'); const w = this.displayCanvas.width; const h = this.displayCanvas.height; // Always draw the live camera feed ctx.drawImage(this.localVideoElement, 0, 0, w, h); // Overlay last known detection results const data = this._lastDetection; if (data) { if (data.landmarks) { this.drawFaceMesh(ctx, data.landmarks, w, h); } // Top HUD bar (matching live_demo.py) ctx.fillStyle = 'rgba(0,0,0,0.7)'; ctx.fillRect(0, 0, w, 55); const statusText = data.focused ? 'FOCUSED' : 'NOT FOCUSED'; const color = data.focused ? '#00FF00' : '#FF0000'; ctx.fillStyle = color; ctx.font = 'bold 18px Arial'; ctx.fillText(statusText, 10, 22); // Model name + mesh label (top right) if (data.model) { ctx.fillStyle = '#FFFFFF'; ctx.font = '12px Arial'; ctx.textAlign = 'right'; ctx.fillText(data.model.toUpperCase(), w - 10, 22); ctx.textAlign = 'left'; } // Detail line: conf, S_face, S_eye, MAR ctx.fillStyle = '#FFFFFF'; ctx.font = '12px Arial'; let detail = `conf:${(data.confidence || 0).toFixed(2)}`; if (data.sf !== undefined) detail += ` S_face:${data.sf.toFixed(2)}`; if (data.se !== undefined) detail += ` S_eye:${data.se.toFixed(2)}`; if (data.mar !== undefined) detail += ` MAR:${data.mar.toFixed(2)}`; ctx.fillText(detail, 10, 38); // Head pose angles (right side) if (data.yaw !== undefined) { ctx.fillStyle = '#B4B4B4'; ctx.font = '11px Arial'; ctx.textAlign = 'right'; ctx.fillText(buildHeadPoseText(data), w - 10, 48); ctx.textAlign = 'left'; } } // Gaze minimap — small screen representation in bottom-right corner if (data && data.gaze_x !== undefined && data.gaze_y !== undefined) { const mapW = 120; const mapH = 80; const mapPad = 10; const mapX = w - mapW - mapPad; const mapY = h - mapH - 30; // above the performance stats bar // Background (rounded rect with fallback) ctx.fillStyle = 'rgba(0, 0, 0, 0.7)'; ctx.beginPath(); if (ctx.roundRect) { ctx.roundRect(mapX - 4, mapY - 4, mapW + 8, mapH + 8, 6); } else { ctx.rect(mapX - 4, mapY - 4, mapW + 8, mapH + 8); } ctx.fill(); // Screen area ctx.fillStyle = data.on_screen ? 'rgba(30, 40, 60, 0.9)' : 'rgba(60, 20, 20, 0.9)'; ctx.fillRect(mapX, mapY, mapW, mapH); ctx.strokeStyle = data.on_screen ? 'rgba(100, 180, 255, 0.6)' : 'rgba(255, 100, 100, 0.6)'; ctx.lineWidth = 1; ctx.strokeRect(mapX, mapY, mapW, mapH); // Gaze dot — clamp to minimap bounds for visibility const dotX = mapX + Math.max(0, Math.min(1, data.gaze_x)) * mapW; const dotY = mapY + Math.max(0, Math.min(1, data.gaze_y)) * mapH; const dotColor = data.on_screen ? '#00FF00' : '#FF4444'; // Glow ctx.beginPath(); ctx.arc(dotX, dotY, 8, 0, 2 * Math.PI); ctx.fillStyle = data.on_screen ? 'rgba(0, 255, 0, 0.15)' : 'rgba(255, 68, 68, 0.15)'; ctx.fill(); // Dot ctx.beginPath(); ctx.arc(dotX, dotY, 4, 0, 2 * Math.PI); ctx.fillStyle = dotColor; ctx.fill(); ctx.strokeStyle = '#FFFFFF'; ctx.lineWidth = 1.5; ctx.stroke(); // Label ctx.fillStyle = 'rgba(255, 255, 255, 0.6)'; ctx.font = '9px Arial'; ctx.textAlign = 'left'; ctx.fillText('GAZE', mapX + 3, mapY + 10); } // Performance stats ctx.fillStyle = 'rgba(0,0,0,0.5)'; ctx.fillRect(0, h - 25, w, 25); ctx.font = '12px Arial'; ctx.fillStyle = '#FFFFFF'; ctx.fillText(`FPS: ${this.frameRate} | Latency: ${this.stats.avgLatency.toFixed(0)}ms`, 10, h - 8); } this._animFrameId = requestAnimationFrame(render); }; this._animFrameId = requestAnimationFrame(render); } _stopRenderLoop() { if (this._animFrameId) { cancelAnimationFrame(this._animFrameId); this._animFrameId = null; } } // Handle messages from the server handleServerMessage(data) { switch (data.type) { case 'session_started': this.sessionId = data.session_id; this.sessionStartTime = Date.now(); console.log('Session started:', this.sessionId); if (this.callbacks.onSessionStart) { this.callbacks.onSessionStart(this.sessionId); } break; case 'detection': this.stats.framesProcessed++; // Track latency from send→receive const now = performance.now(); if (this._lastSendTime) { const latency = now - this._lastSendTime; this.stats.lastLatencies.push(latency); if (this.stats.lastLatencies.length > 10) this.stats.lastLatencies.shift(); const latencySum = this.stats.lastLatencies.reduce((a, b) => a + b, 0); this.stats.avgLatency = latencySum / this.stats.lastLatencies.length; } this.updateStatus(data.focused); this.latestDetectionData = { confidence: data.confidence || 0, focused: data.focused, timestamp: now }; this.lastConfidence = data.confidence || 0; if (this.callbacks.onStatusUpdate) { this.callbacks.onStatusUpdate(this.currentStatus); } // Normalize response: server sends 'lm' (sparse) or 'landmarks' const detectionData = { focused: data.focused, confidence: data.confidence || 0, model: data.model, landmarks: data.lm || data.landmarks || null, yaw: data.yaw, pitch: data.pitch, roll: data.roll, mar: data.mar, sf: data.sf, se: data.se, gaze_x: data.gaze_x, gaze_y: data.gaze_y, on_screen: data.on_screen, gaze_yaw: data.gaze_yaw, gaze_pitch: data.gaze_pitch, eye_gaze_enabled: data.eye_gaze_enabled || false, }; this.drawDetectionResult(detectionData); break; case 'session_ended': console.log('Received session_ended message'); console.log('Session summary:', data.summary); if (this.callbacks.onSessionEnd) { console.log('Calling onSessionEnd callback'); this.callbacks.onSessionEnd(data.summary); } else { console.warn('No onSessionEnd callback registered'); } this.sessionId = null; this.sessionStartTime = null; break; case 'calibration_started': this.calibration = { active: true, collecting: true, target: data.target, index: data.index, numPoints: data.num_points, done: false, success: false, }; if (this.callbacks.onCalibrationUpdate) { this.callbacks.onCalibrationUpdate({ ...this.calibration }); } break; case 'calibration_point': this.calibration.target = data.target; this.calibration.index = data.index; if (this.callbacks.onCalibrationUpdate) { this.callbacks.onCalibrationUpdate({ ...this.calibration }); } break; case 'calibration_verify': this.calibration.collecting = true; this.calibration.target = data.target; this.calibration.index = -1; // special: verification phase this.calibration.verifying = true; this.calibration.verifyMessage = data.message || 'Verify calibration'; if (this.callbacks.onCalibrationUpdate) { this.callbacks.onCalibrationUpdate({ ...this.calibration }); } break; case 'calibration_done': this.calibration.collecting = false; this.calibration.verifying = false; this.calibration.done = true; this.calibration.success = data.success; if (this.callbacks.onCalibrationUpdate) { this.callbacks.onCalibrationUpdate({ ...this.calibration }); } setTimeout(() => { this.calibration.active = false; if (this.callbacks.onCalibrationUpdate) { this.callbacks.onCalibrationUpdate({ ...this.calibration }); } }, 2000); break; case 'calibration_cancelled': this.calibration = createCalibrationState(); if (this.callbacks.onCalibrationUpdate) { this.callbacks.onCalibrationUpdate({ ...this.calibration }); } break; case 'error': console.error('Server error:', data.message); break; default: console.log('Unknown message type:', data.type); } } startCalibration() { if (this.ws && this.ws.readyState === WebSocket.OPEN) { this.ws.send(JSON.stringify({ type: 'calibration_start' })); } } nextCalibrationPoint() { if (this.ws && this.ws.readyState === WebSocket.OPEN) { this.ws.send(JSON.stringify({ type: 'calibration_next' })); } } cancelCalibration() { if (this.ws && this.ws.readyState === WebSocket.OPEN) { this.ws.send(JSON.stringify({ type: 'calibration_cancel' })); } this.calibration = createCalibrationState(); if (this.callbacks.onCalibrationUpdate) { this.callbacks.onCalibrationUpdate({ ...this.calibration }); } } // Face mesh landmark index groups (matches live_demo.py) static FACE_OVAL = [ 10, 338, 297, 332, 284, 251, 389, 356, 454, 323, 361, 288, 397, 365, 379, 378, 400, 377, 152, 148, 176, 149, 150, 136, 172, 58, 132, 93, 234, 127, 162, 21, 54, 103, 67, 109, 10, ]; static LEFT_EYE = [33, 7, 163, 144, 145, 153, 154, 155, 133, 173, 157, 158, 159, 160, 161, 246]; static RIGHT_EYE = [362, 382, 381, 380, 374, 373, 390, 249, 263, 466, 388, 387, 386, 385, 384, 398]; static LEFT_IRIS = [468, 469, 470, 471, 472]; static RIGHT_IRIS = [473, 474, 475, 476, 477]; static LEFT_EYEBROW = [70, 63, 105, 66, 107, 55, 65, 52, 53, 46]; static RIGHT_EYEBROW = [300, 293, 334, 296, 336, 285, 295, 282, 283, 276]; static NOSE_BRIDGE = [6, 197, 195, 5, 4, 1, 19, 94, 2]; static LIPS_OUTER = [61, 146, 91, 181, 84, 17, 314, 405, 321, 375, 291, 409, 270, 269, 267, 0, 37, 39, 40, 185, 61]; static LIPS_INNER = [78, 95, 88, 178, 87, 14, 317, 402, 318, 324, 308, 415, 310, 311, 312, 13, 82, 81, 80, 191, 78]; static LEFT_EAR_POINTS = [33, 160, 158, 133, 153, 145]; static RIGHT_EAR_POINTS = [362, 385, 387, 263, 373, 380]; // Iris/eye corners for gaze lines static LEFT_IRIS_CENTER = 468; static RIGHT_IRIS_CENTER = 473; static LEFT_EYE_INNER = 133; static LEFT_EYE_OUTER = 33; static RIGHT_EYE_INNER = 362; static RIGHT_EYE_OUTER = 263; // Draw a polyline through landmark indices (lm can be array or sparse object) _drawPolyline(ctx, lm, indices, w, h, color, lineWidth, closed = false) { if (!lm || indices.length < 2) return; const isArray = Array.isArray(lm); const _get = isArray ? (i) => lm[i] : (i) => lm[String(i)]; ctx.beginPath(); const firstPt = _get(indices[0]); if (!firstPt) return; ctx.moveTo(firstPt[0] * w, firstPt[1] * h); for (let i = 1; i < indices.length; i++) { const pt = _get(indices[i]); if (!pt) continue; ctx.lineTo(pt[0] * w, pt[1] * h); } if (closed) ctx.closePath(); ctx.strokeStyle = color; ctx.lineWidth = lineWidth; ctx.stroke(); } // Draw face mesh overlay from landmarks (supports both array and sparse object) drawFaceMesh(ctx, lm, w, h) { if (!lm) return; const isArray = Array.isArray(lm); // For array format need at least 468 entries; for sparse object just check it has keys if (isArray && lm.length < 468) return; if (!isArray && typeof lm === 'object' && Object.keys(lm).length === 0) return; const _get = isArray ? (i) => lm[i] : (i) => lm[String(i)]; // Tessellation (gray triangular grid, semi-transparent) if (this._tessellation && isArray) { ctx.strokeStyle = 'rgba(200,200,200,0.25)'; ctx.lineWidth = 1; ctx.beginPath(); for (const [s, e] of this._tessellation) { const ps = lm[s], pe = lm[e]; if (!ps || !pe) continue; ctx.moveTo(ps[0] * w, ps[1] * h); ctx.lineTo(pe[0] * w, pe[1] * h); } ctx.stroke(); } // Face oval this._drawPolyline(ctx, lm, VideoManagerLocal.FACE_OVAL, w, h, 'rgba(0,255,255,0.5)', 1, true); // Eyebrows this._drawPolyline(ctx, lm, VideoManagerLocal.LEFT_EYEBROW, w, h, '#90EE90', 2); this._drawPolyline(ctx, lm, VideoManagerLocal.RIGHT_EYEBROW, w, h, '#90EE90', 2); // Eyes this._drawPolyline(ctx, lm, VideoManagerLocal.LEFT_EYE, w, h, '#00FF00', 2, true); this._drawPolyline(ctx, lm, VideoManagerLocal.RIGHT_EYE, w, h, '#00FF00', 2, true); // Nose bridge this._drawPolyline(ctx, lm, VideoManagerLocal.NOSE_BRIDGE, w, h, 'rgba(0,165,255,0.6)', 1); // Lips this._drawPolyline(ctx, lm, VideoManagerLocal.LIPS_OUTER, w, h, '#FF00FF', 1); this._drawPolyline(ctx, lm, VideoManagerLocal.LIPS_INNER, w, h, 'rgba(200,0,200,0.7)', 1); // EAR key points (yellow dots) for (const earIndices of [VideoManagerLocal.LEFT_EAR_POINTS, VideoManagerLocal.RIGHT_EAR_POINTS]) { for (const idx of earIndices) { const pt = _get(idx); if (!pt) continue; ctx.beginPath(); ctx.arc(pt[0] * w, pt[1] * h, 3, 0, 2 * Math.PI); ctx.fillStyle = '#FFFF00'; ctx.fill(); } } // Irises (circles + gaze direction lines) const irisSets = [ { iris: VideoManagerLocal.LEFT_IRIS, center: VideoManagerLocal.LEFT_IRIS_CENTER, inner: VideoManagerLocal.LEFT_EYE_INNER, outer: VideoManagerLocal.LEFT_EYE_OUTER, }, { iris: VideoManagerLocal.RIGHT_IRIS, center: VideoManagerLocal.RIGHT_IRIS_CENTER, inner: VideoManagerLocal.RIGHT_EYE_INNER, outer: VideoManagerLocal.RIGHT_EYE_OUTER, }, ]; // Get L2CS gaze angles + on_screen state + eye gaze toggle from latest detection data const detection = this._lastDetection; const gazeYaw = detection ? detection.gaze_yaw : undefined; const gazePitch = detection ? detection.gaze_pitch : undefined; const onScreen = detection ? detection.on_screen : undefined; const eyeGazeEnabled = detection ? detection.eye_gaze_enabled : false; const hasL2CSGaze = gazeYaw !== undefined && gazePitch !== undefined; const gazeLineColor = (onScreen === false) ? '#FF0000' : '#00FF00'; const gazeLineLength = 100; for (const { iris, center: centerIdx, inner, outer } of irisSets) { const centerPt = _get(iris[0]); if (!centerPt) continue; const cx = centerPt[0] * w, cy = centerPt[1] * h; let radiusSum = 0, count = 0; for (let i = 1; i < iris.length; i++) { const pt = _get(iris[i]); if (!pt) continue; radiusSum += Math.hypot(pt[0] * w - cx, pt[1] * h - cy); count++; } const radius = Math.max(count > 0 ? radiusSum / count : 3, 2); // Iris circle ctx.beginPath(); ctx.arc(cx, cy, radius, 0, 2 * Math.PI); ctx.strokeStyle = '#FF00FF'; ctx.lineWidth = 2; ctx.stroke(); // Iris center anchor dot (color-coded) ctx.beginPath(); ctx.arc(cx, cy, 3, 0, 2 * Math.PI); ctx.fillStyle = gazeLineColor; ctx.fill(); ctx.strokeStyle = '#FFFFFF'; ctx.lineWidth = 1; ctx.stroke(); // Gaze direction line — only draw when eye gaze toggle is ON if (eyeGazeEnabled) { if (hasL2CSGaze) { // L2CS pitch/yaw in radians -> pixel direction vector // Matches upstream L2CS-Net vis.py draw_gaze formula: // dx = -length * sin(pitch) * cos(yaw) // dy = -length * sin(yaw) const dx = -gazeLineLength * Math.sin(gazePitch) * Math.cos(gazeYaw); const dy = -gazeLineLength * Math.sin(gazeYaw); const ex = cx + dx; const ey = cy + dy; // Main gaze line (thick, color-coded) ctx.beginPath(); ctx.moveTo(cx, cy); ctx.lineTo(ex, ey); ctx.strokeStyle = gazeLineColor; ctx.lineWidth = 3; ctx.stroke(); // Arrowhead const angle = Math.atan2(ey - cy, ex - cx); const arrowLen = 10; ctx.beginPath(); ctx.moveTo(ex, ey); ctx.lineTo(ex - arrowLen * Math.cos(angle - 0.4), ey - arrowLen * Math.sin(angle - 0.4)); ctx.moveTo(ex, ey); ctx.lineTo(ex - arrowLen * Math.cos(angle + 0.4), ey - arrowLen * Math.sin(angle + 0.4)); ctx.strokeStyle = gazeLineColor; ctx.lineWidth = 2; ctx.stroke(); } else { // Geometric fallback: iris displacement from eye center (scaled up) const innerPt = _get(inner); const outerPt = _get(outer); if (innerPt && outerPt) { const eyeCx = (innerPt[0] + outerPt[0]) / 2.0 * w; const eyeCy = (innerPt[1] + outerPt[1]) / 2.0 * h; const fdx = cx - eyeCx; const fdy = cy - eyeCy; const flen = Math.hypot(fdx, fdy); if (flen > 0.5) { const scale = gazeLineLength / flen; ctx.beginPath(); ctx.moveTo(cx, cy); ctx.lineTo(cx + fdx * scale, cy + fdy * scale); ctx.strokeStyle = '#00FFFF'; ctx.lineWidth = 2; ctx.stroke(); } } } } // When eye gaze is OFF, no gaze lines are drawn } } // Store detection data for the render loop to draw drawDetectionResult(data) { this._lastDetection = data; } updateStatus(newFocused) { this.statusBuffer.push(newFocused); if (this.statusBuffer.length > this.bufferSize) { this.statusBuffer.shift(); } if (this.statusBuffer.length < this.bufferSize) return false; const focusedCount = this.statusBuffer.filter(f => f).length; const focusedRatio = focusedCount / this.statusBuffer.length; const previousStatus = this.currentStatus; if (focusedRatio >= 0.75) { this.currentStatus = true; } else if (focusedRatio <= 0.25) { this.currentStatus = false; } } async stopStreaming() { console.log('Stopping streaming...'); this.isStreaming = false; if (this.reconnectTimeout) { clearTimeout(this.reconnectTimeout); this.reconnectTimeout = null; } // Stop the render loop this._stopRenderLoop(); this._lastDetection = null; // Stop frame capture if (this.captureInterval) { clearInterval(this.captureInterval); this.captureInterval = null; } // Send the end-session request and wait for the response if (this.ws && this.ws.readyState === WebSocket.OPEN && this.sessionId) { const sessionId = this.sessionId; // Wait for the session_ended message const waitForSessionEnd = new Promise((resolve) => { const originalHandler = this.ws.onmessage; const timeout = setTimeout(() => { this.ws.onmessage = originalHandler; console.log('Session end timeout, proceeding anyway'); resolve(); }, 2000); this.ws.onmessage = (event) => { try { const data = JSON.parse(event.data); if (data.type === 'session_ended') { clearTimeout(timeout); this.handleServerMessage(data); this.ws.onmessage = originalHandler; resolve(); } else { // Continue handling non-terminal messages this.handleServerMessage(data); } } catch (e) { console.error('Failed to parse message:', e); } }; }); console.log('Sending end_session request for session:', sessionId); this.ws.send(JSON.stringify({ type: 'end_session', session_id: sessionId })); // Wait for the response or a timeout await waitForSessionEnd; } // Delay socket shutdown briefly so pending messages can flush await new Promise(resolve => setTimeout(resolve, 200)); // Close the WebSocket if (this.ws) { this.ws.close(); this.ws = null; } // Stop the camera if (this.stream) { this.stream.getTracks().forEach(track => track.stop()); this.stream = null; } // Clear the video element if (this.localVideoElement) { this.localVideoElement.srcObject = null; } // Clear the canvas if (this.displayCanvas) { const ctx = this.displayCanvas.getContext('2d'); ctx.clearRect(0, 0, this.displayCanvas.width, this.displayCanvas.height); } console.log('Streaming stopped'); console.log('Stats:', this.stats); } setFrameRate(rate) { this.frameRate = Math.max(10, Math.min(30, rate)); console.log(`Frame rate set to ${this.frameRate} FPS`); // Restart capture if streaming is already active if (this.isStreaming && this.captureInterval) { clearInterval(this.captureInterval); this.startCapture(); } } getStats() { return { ...this.stats, isStreaming: this.isStreaming, sessionId: this.sessionId, currentStatus: this.currentStatus, lastConfidence: this.lastConfidence }; } }