// src/utils/VideoManagerLocal.js // 本地视频处理版本 - 使用 WebSocket + Canvas,不依赖 WebRTC export class VideoManagerLocal { constructor(callbacks) { this.callbacks = callbacks || {}; this.localVideoElement = null; // 显示本地摄像头 this.displayVideoElement = null; // 显示处理后的视频 this.canvas = null; this.stream = null; this.ws = null; this.isStreaming = false; this.sessionId = null; this.sessionStartTime = null; this.frameRate = 15; // 降低帧率以减少网络负载 this.captureInterval = null; // 状态平滑处理 this.currentStatus = false; this.statusBuffer = []; this.bufferSize = 3; // 检测数据 this.latestDetectionData = null; this.lastConfidence = 0; // Tessellation connections (fetched once from server) this._tessellation = null; // Continuous render loop this._animFrameId = null; // 通知系统 this.notificationEnabled = true; this.notificationThreshold = 30; this.unfocusedStartTime = null; this.lastNotificationTime = null; this.notificationCooldown = 60000; // Calibration state this.calibration = { active: false, collecting: false, target: null, index: 0, numPoints: 0, done: false, success: false, }; // 性能统计 this.stats = { framesSent: 0, framesProcessed: 0, avgLatency: 0, lastLatencies: [] }; } // 初始化摄像头 async initCamera(localVideoRef, displayCanvasRef) { try { console.log('Initializing local camera...'); this.stream = await navigator.mediaDevices.getUserMedia({ video: { width: { ideal: 640 }, height: { ideal: 480 }, facingMode: 'user' }, audio: false }); this.localVideoElement = localVideoRef; this.displayCanvas = displayCanvasRef; // 显示本地视频流 if (this.localVideoElement) { this.localVideoElement.srcObject = this.stream; this.localVideoElement.play(); } // 创建用于截图的 canvas (smaller for faster encode + transfer) this.canvas = document.createElement('canvas'); this.canvas.width = 640; this.canvas.height = 480; console.log('Local camera initialized'); return true; } catch (error) { console.error('Camera init error:', error); throw error; } } // 开始流式处理 async startStreaming() { if (!this.stream) { throw new Error('Camera not initialized'); } if (this.isStreaming) { console.warn('Already streaming'); return; } console.log('Starting WebSocket streaming...'); this.isStreaming = true; // Fetch tessellation topology (once) if (!this._tessellation) { try { const res = await fetch('/api/mesh-topology'); const data = await res.json(); this._tessellation = data.tessellation; // [[start, end], ...] } catch (e) { console.warn('Failed to fetch mesh topology:', e); } } // 请求通知权限 await this.requestNotificationPermission(); await this.loadNotificationSettings(); // 建立 WebSocket 连接 await this.connectWebSocket(); // 开始定期截图并发送 this.startCapture(); // Start continuous render loop for smooth video this._lastDetection = null; this._startRenderLoop(); console.log('Streaming started'); } // 建立 WebSocket 连接 async connectWebSocket() { return new Promise((resolve, reject) => { const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; const wsUrl = `${protocol}//${window.location.host}/ws/video`; console.log('Connecting to WebSocket:', wsUrl); this.ws = new WebSocket(wsUrl); this.ws.onopen = () => { console.log('WebSocket connected'); // 发送开始会话请求 this.ws.send(JSON.stringify({ type: 'start_session' })); resolve(); }; this.ws.onmessage = (event) => { try { const data = JSON.parse(event.data); this.handleServerMessage(data); } catch (e) { console.error('Failed to parse message:', e); } }; this.ws.onerror = (error) => { console.error('WebSocket error:', error); reject(error); }; this.ws.onclose = () => { console.log('WebSocket disconnected'); if (this.isStreaming) { console.log('Attempting to reconnect...'); setTimeout(() => this.connectWebSocket(), 2000); } }; }); } // 开始截图并发送 (binary blobs for speed) startCapture() { const interval = 1000 / this.frameRate; this._sendingBlob = false; // prevent overlapping toBlob calls this.captureInterval = setInterval(() => { if (!this.isStreaming || !this.ws || this.ws.readyState !== WebSocket.OPEN) return; if (this._sendingBlob) return; // previous frame still encoding, skip try { const ctx = this.canvas.getContext('2d'); ctx.drawImage(this.localVideoElement, 0, 0, this.canvas.width, this.canvas.height); this._sendingBlob = true; this.canvas.toBlob((blob) => { this._sendingBlob = false; if (blob && this.ws && this.ws.readyState === WebSocket.OPEN) { this.ws.send(blob); this.stats.framesSent++; } }, 'image/jpeg', 0.75); } catch (error) { this._sendingBlob = false; console.error('Capture error:', error); } }, interval); console.log(`Capturing at ${this.frameRate} FPS`); } // Continuous render loop: draws camera feed + last detection overlay at display refresh rate _startRenderLoop() { const render = () => { if (!this.isStreaming) return; if (this.displayCanvas && this.localVideoElement && this.localVideoElement.readyState >= 2) { const ctx = this.displayCanvas.getContext('2d'); const w = this.displayCanvas.width; const h = this.displayCanvas.height; // Always draw the live camera feed ctx.drawImage(this.localVideoElement, 0, 0, w, h); // Overlay last known detection results const data = this._lastDetection; if (data) { if (data.landmarks) { this.drawFaceMesh(ctx, data.landmarks, w, h); } // Top HUD bar (matching live_demo.py) ctx.fillStyle = 'rgba(0,0,0,0.7)'; ctx.fillRect(0, 0, w, 55); const statusText = data.focused ? 'FOCUSED' : 'NOT FOCUSED'; const color = data.focused ? '#00FF00' : '#FF0000'; ctx.fillStyle = color; ctx.font = 'bold 18px Arial'; ctx.fillText(statusText, 10, 22); // Model name + mesh label (top right) if (data.model) { ctx.fillStyle = '#FFFFFF'; ctx.font = '12px Arial'; ctx.textAlign = 'right'; ctx.fillText(data.model.toUpperCase(), w - 10, 22); ctx.textAlign = 'left'; } // Detail line: conf, S_face, S_eye, MAR ctx.fillStyle = '#FFFFFF'; ctx.font = '12px Arial'; let detail = `conf:${(data.confidence || 0).toFixed(2)}`; if (data.sf !== undefined) detail += ` S_face:${data.sf.toFixed(2)}`; if (data.se !== undefined) detail += ` S_eye:${data.se.toFixed(2)}`; if (data.mar !== undefined) detail += ` MAR:${data.mar.toFixed(2)}`; ctx.fillText(detail, 10, 38); // Head pose angles (right side) if (data.yaw !== undefined) { ctx.fillStyle = '#B4B4B4'; ctx.font = '11px Arial'; ctx.textAlign = 'right'; ctx.fillText(`yaw:${data.yaw > 0 ? '+' : ''}${data.yaw.toFixed(0)} pitch:${data.pitch > 0 ? '+' : ''}${data.pitch.toFixed(0)} roll:${data.roll > 0 ? '+' : ''}${data.roll.toFixed(0)}`, w - 10, 48); ctx.textAlign = 'left'; } } // Gaze pointer (L2CS + calibration) if (data && data.gaze_x !== undefined && data.gaze_y !== undefined) { const gx = data.gaze_x * w; const gy = data.gaze_y * h; ctx.beginPath(); ctx.arc(gx, gy, 8, 0, 2 * Math.PI); ctx.fillStyle = data.on_screen ? 'rgba(0, 200, 255, 0.7)' : 'rgba(255, 80, 80, 0.5)'; ctx.fill(); ctx.strokeStyle = '#FFFFFF'; ctx.lineWidth = 2; ctx.stroke(); } // Performance stats ctx.fillStyle = 'rgba(0,0,0,0.5)'; ctx.fillRect(0, h - 25, w, 25); ctx.font = '12px Arial'; ctx.fillStyle = '#FFFFFF'; ctx.fillText(`FPS: ${this.frameRate} | Latency: ${this.stats.avgLatency.toFixed(0)}ms`, 10, h - 8); } this._animFrameId = requestAnimationFrame(render); }; this._animFrameId = requestAnimationFrame(render); } _stopRenderLoop() { if (this._animFrameId) { cancelAnimationFrame(this._animFrameId); this._animFrameId = null; } } // 处理服务器消息 handleServerMessage(data) { switch (data.type) { case 'session_started': this.sessionId = data.session_id; this.sessionStartTime = Date.now(); console.log('Session started:', this.sessionId); if (this.callbacks.onSessionStart) { this.callbacks.onSessionStart(this.sessionId); } break; case 'detection': this.stats.framesProcessed++; // Track latency from send→receive const now = performance.now(); if (this._lastSendTime) { const latency = now - this._lastSendTime; this.stats.lastLatencies.push(latency); if (this.stats.lastLatencies.length > 10) this.stats.lastLatencies.shift(); this.stats.avgLatency = this.stats.lastLatencies.reduce((a, b) => a + b, 0) / this.stats.lastLatencies.length; } this.updateStatus(data.focused); this.latestDetectionData = { confidence: data.confidence || 0, focused: data.focused, timestamp: now }; this.lastConfidence = data.confidence || 0; if (this.callbacks.onStatusUpdate) { this.callbacks.onStatusUpdate(this.currentStatus); } // Normalize response: server sends 'lm' (sparse) or 'landmarks' const detectionData = { focused: data.focused, confidence: data.confidence || 0, model: data.model, landmarks: data.lm || data.landmarks || null, yaw: data.yaw, pitch: data.pitch, roll: data.roll, mar: data.mar, sf: data.sf, se: data.se, gaze_x: data.gaze_x, gaze_y: data.gaze_y, on_screen: data.on_screen, }; this.drawDetectionResult(detectionData); break; case 'session_ended': console.log('Received session_ended message'); console.log('Session summary:', data.summary); if (this.callbacks.onSessionEnd) { console.log('Calling onSessionEnd callback'); this.callbacks.onSessionEnd(data.summary); } else { console.warn('No onSessionEnd callback registered'); } this.sessionId = null; this.sessionStartTime = null; break; case 'calibration_started': this.calibration = { active: true, collecting: true, target: data.target, index: data.index, numPoints: data.num_points, done: false, success: false, }; if (this.callbacks.onCalibrationUpdate) { this.callbacks.onCalibrationUpdate({ ...this.calibration }); } break; case 'calibration_point': this.calibration.target = data.target; this.calibration.index = data.index; if (this.callbacks.onCalibrationUpdate) { this.callbacks.onCalibrationUpdate({ ...this.calibration }); } break; case 'calibration_done': this.calibration.collecting = false; this.calibration.done = true; this.calibration.success = data.success; if (this.callbacks.onCalibrationUpdate) { this.callbacks.onCalibrationUpdate({ ...this.calibration }); } setTimeout(() => { this.calibration.active = false; if (this.callbacks.onCalibrationUpdate) { this.callbacks.onCalibrationUpdate({ ...this.calibration }); } }, 2000); break; case 'calibration_cancelled': this.calibration = { active: false, collecting: false, target: null, index: 0, numPoints: 0, done: false, success: false }; if (this.callbacks.onCalibrationUpdate) { this.callbacks.onCalibrationUpdate({ ...this.calibration }); } break; case 'error': console.error('Server error:', data.message); break; default: console.log('Unknown message type:', data.type); } } startCalibration() { if (this.ws && this.ws.readyState === WebSocket.OPEN) { this.ws.send(JSON.stringify({ type: 'calibration_start' })); } } nextCalibrationPoint() { if (this.ws && this.ws.readyState === WebSocket.OPEN) { this.ws.send(JSON.stringify({ type: 'calibration_next' })); } } cancelCalibration() { if (this.ws && this.ws.readyState === WebSocket.OPEN) { this.ws.send(JSON.stringify({ type: 'calibration_cancel' })); } this.calibration = { active: false, collecting: false, target: null, index: 0, numPoints: 0, done: false, success: false }; if (this.callbacks.onCalibrationUpdate) { this.callbacks.onCalibrationUpdate({ ...this.calibration }); } } // Face mesh landmark index groups (matches live_demo.py) static FACE_OVAL = [10,338,297,332,284,251,389,356,454,323,361,288,397,365,379,378,400,377,152,148,176,149,150,136,172,58,132,93,234,127,162,21,54,103,67,109,10]; static LEFT_EYE = [33,7,163,144,145,153,154,155,133,173,157,158,159,160,161,246]; static RIGHT_EYE = [362,382,381,380,374,373,390,249,263,466,388,387,386,385,384,398]; static LEFT_IRIS = [468,469,470,471,472]; static RIGHT_IRIS = [473,474,475,476,477]; static LEFT_EYEBROW = [70,63,105,66,107,55,65,52,53,46]; static RIGHT_EYEBROW = [300,293,334,296,336,285,295,282,283,276]; static NOSE_BRIDGE = [6,197,195,5,4,1,19,94,2]; static LIPS_OUTER = [61,146,91,181,84,17,314,405,321,375,291,409,270,269,267,0,37,39,40,185,61]; static LIPS_INNER = [78,95,88,178,87,14,317,402,318,324,308,415,310,311,312,13,82,81,80,191,78]; static LEFT_EAR_POINTS = [33, 160, 158, 133, 153, 145]; static RIGHT_EAR_POINTS = [362, 385, 387, 263, 373, 380]; // Iris/eye corners for gaze lines static LEFT_IRIS_CENTER = 468; static RIGHT_IRIS_CENTER = 473; static LEFT_EYE_INNER = 133; static LEFT_EYE_OUTER = 33; static RIGHT_EYE_INNER = 362; static RIGHT_EYE_OUTER = 263; // Draw a polyline through landmark indices (lm can be array or sparse object) _drawPolyline(ctx, lm, indices, w, h, color, lineWidth, closed = false) { if (!lm || indices.length < 2) return; const isArray = Array.isArray(lm); const _get = isArray ? (i) => lm[i] : (i) => lm[String(i)]; ctx.beginPath(); const firstPt = _get(indices[0]); if (!firstPt) return; ctx.moveTo(firstPt[0] * w, firstPt[1] * h); for (let i = 1; i < indices.length; i++) { const pt = _get(indices[i]); if (!pt) continue; ctx.lineTo(pt[0] * w, pt[1] * h); } if (closed) ctx.closePath(); ctx.strokeStyle = color; ctx.lineWidth = lineWidth; ctx.stroke(); } // Draw face mesh overlay from landmarks (supports both array and sparse object) drawFaceMesh(ctx, lm, w, h) { if (!lm) return; const isArray = Array.isArray(lm); // For array format need at least 468 entries; for sparse object just check it has keys if (isArray && lm.length < 468) return; if (!isArray && typeof lm === 'object' && Object.keys(lm).length === 0) return; const _get = isArray ? (i) => lm[i] : (i) => lm[String(i)]; // Tessellation (gray triangular grid, semi-transparent) if (this._tessellation && isArray) { ctx.strokeStyle = 'rgba(200,200,200,0.25)'; ctx.lineWidth = 1; ctx.beginPath(); for (const [s, e] of this._tessellation) { const ps = lm[s], pe = lm[e]; if (!ps || !pe) continue; ctx.moveTo(ps[0] * w, ps[1] * h); ctx.lineTo(pe[0] * w, pe[1] * h); } ctx.stroke(); } // Face oval this._drawPolyline(ctx, lm, VideoManagerLocal.FACE_OVAL, w, h, 'rgba(0,255,255,0.5)', 1, true); // Eyebrows this._drawPolyline(ctx, lm, VideoManagerLocal.LEFT_EYEBROW, w, h, '#90EE90', 2); this._drawPolyline(ctx, lm, VideoManagerLocal.RIGHT_EYEBROW, w, h, '#90EE90', 2); // Eyes this._drawPolyline(ctx, lm, VideoManagerLocal.LEFT_EYE, w, h, '#00FF00', 2, true); this._drawPolyline(ctx, lm, VideoManagerLocal.RIGHT_EYE, w, h, '#00FF00', 2, true); // Nose bridge this._drawPolyline(ctx, lm, VideoManagerLocal.NOSE_BRIDGE, w, h, 'rgba(0,165,255,0.6)', 1); // Lips this._drawPolyline(ctx, lm, VideoManagerLocal.LIPS_OUTER, w, h, '#FF00FF', 1); this._drawPolyline(ctx, lm, VideoManagerLocal.LIPS_INNER, w, h, 'rgba(200,0,200,0.7)', 1); // EAR key points (yellow dots) for (const earIndices of [VideoManagerLocal.LEFT_EAR_POINTS, VideoManagerLocal.RIGHT_EAR_POINTS]) { for (const idx of earIndices) { const pt = _get(idx); if (!pt) continue; ctx.beginPath(); ctx.arc(pt[0] * w, pt[1] * h, 3, 0, 2 * Math.PI); ctx.fillStyle = '#FFFF00'; ctx.fill(); } } // Irises (circles + gaze direction lines) const irisSets = [ { iris: VideoManagerLocal.LEFT_IRIS, center: VideoManagerLocal.LEFT_IRIS_CENTER, inner: VideoManagerLocal.LEFT_EYE_INNER, outer: VideoManagerLocal.LEFT_EYE_OUTER }, { iris: VideoManagerLocal.RIGHT_IRIS, center: VideoManagerLocal.RIGHT_IRIS_CENTER, inner: VideoManagerLocal.RIGHT_EYE_INNER, outer: VideoManagerLocal.RIGHT_EYE_OUTER }, ]; for (const { iris, center: centerIdx, inner, outer } of irisSets) { const centerPt = _get(iris[0]); if (!centerPt) continue; const cx = centerPt[0] * w, cy = centerPt[1] * h; let radiusSum = 0, count = 0; for (let i = 1; i < iris.length; i++) { const pt = _get(iris[i]); if (!pt) continue; radiusSum += Math.hypot(pt[0] * w - cx, pt[1] * h - cy); count++; } const radius = Math.max(count > 0 ? radiusSum / count : 3, 2); // Iris circle ctx.beginPath(); ctx.arc(cx, cy, radius, 0, 2 * Math.PI); ctx.strokeStyle = '#FF00FF'; ctx.lineWidth = 2; ctx.stroke(); // Iris center dot ctx.beginPath(); ctx.arc(cx, cy, 2, 0, 2 * Math.PI); ctx.fillStyle = '#FFFFFF'; ctx.fill(); // Gaze direction line (red) const innerPt = _get(inner); const outerPt = _get(outer); if (innerPt && outerPt) { const eyeCx = (innerPt[0] + outerPt[0]) / 2.0 * w; const eyeCy = (innerPt[1] + outerPt[1]) / 2.0 * h; const dx = cx - eyeCx; const dy = cy - eyeCy; ctx.beginPath(); ctx.moveTo(cx, cy); ctx.lineTo(cx + dx * 3, cy + dy * 3); ctx.strokeStyle = '#FF0000'; ctx.lineWidth = 1; ctx.stroke(); } } } // Store detection data for the render loop to draw drawDetectionResult(data) { this._lastDetection = data; } updateStatus(newFocused) { this.statusBuffer.push(newFocused); if (this.statusBuffer.length > this.bufferSize) { this.statusBuffer.shift(); } if (this.statusBuffer.length < this.bufferSize) return false; const focusedCount = this.statusBuffer.filter(f => f).length; const focusedRatio = focusedCount / this.statusBuffer.length; const previousStatus = this.currentStatus; if (focusedRatio >= 0.75) { this.currentStatus = true; } else if (focusedRatio <= 0.25) { this.currentStatus = false; } this.handleNotificationLogic(previousStatus, this.currentStatus); } handleNotificationLogic(previousStatus, currentStatus) { const now = Date.now(); if (previousStatus && !currentStatus) { this.unfocusedStartTime = now; } if (!previousStatus && currentStatus) { this.unfocusedStartTime = null; } if (!currentStatus && this.unfocusedStartTime) { const unfocusedDuration = (now - this.unfocusedStartTime) / 1000; if (unfocusedDuration >= this.notificationThreshold) { const canSendNotification = !this.lastNotificationTime || (now - this.lastNotificationTime) >= this.notificationCooldown; if (canSendNotification) { this.sendNotification( 'Focus Alert', `You've been distracted for ${Math.floor(unfocusedDuration)} seconds. Get back to work!` ); this.lastNotificationTime = now; } } } } async requestNotificationPermission() { if ('Notification' in window && Notification.permission === 'default') { try { await Notification.requestPermission(); } catch (error) { console.error('Failed to request notification permission:', error); } } } async loadNotificationSettings() { try { const response = await fetch('/api/settings'); const settings = await response.json(); if (settings) { this.notificationEnabled = settings.notification_enabled ?? true; this.notificationThreshold = settings.notification_threshold ?? 30; } } catch (error) { console.error('Failed to load notification settings:', error); } } sendNotification(title, message) { if (!this.notificationEnabled) return; if ('Notification' in window && Notification.permission === 'granted') { try { const notification = new Notification(title, { body: message, icon: '/vite.svg', badge: '/vite.svg', tag: 'focus-guard-distraction', requireInteraction: false }); setTimeout(() => notification.close(), 3000); } catch (error) { console.error('Failed to send notification:', error); } } } async stopStreaming() { console.log('Stopping streaming...'); this.isStreaming = false; // Stop render loop this._stopRenderLoop(); this._lastDetection = null; // 停止截图 if (this.captureInterval) { clearInterval(this.captureInterval); this.captureInterval = null; } // 发送结束会话请求并等待响应 if (this.ws && this.ws.readyState === WebSocket.OPEN && this.sessionId) { const sessionId = this.sessionId; // 等待 session_ended 消息 const waitForSessionEnd = new Promise((resolve) => { const originalHandler = this.ws.onmessage; const timeout = setTimeout(() => { this.ws.onmessage = originalHandler; console.log('Session end timeout, proceeding anyway'); resolve(); }, 2000); this.ws.onmessage = (event) => { try { const data = JSON.parse(event.data); if (data.type === 'session_ended') { clearTimeout(timeout); this.handleServerMessage(data); this.ws.onmessage = originalHandler; resolve(); } else { // 仍然处理其他消息 this.handleServerMessage(data); } } catch (e) { console.error('Failed to parse message:', e); } }; }); console.log('Sending end_session request for session:', sessionId); this.ws.send(JSON.stringify({ type: 'end_session', session_id: sessionId })); // 等待响应或超时 await waitForSessionEnd; } // 延迟关闭 WebSocket 确保消息发送完成 await new Promise(resolve => setTimeout(resolve, 200)); // 关闭 WebSocket if (this.ws) { this.ws.close(); this.ws = null; } // 停止摄像头 if (this.stream) { this.stream.getTracks().forEach(track => track.stop()); this.stream = null; } // 清空视频 if (this.localVideoElement) { this.localVideoElement.srcObject = null; } // 清空 canvas if (this.displayCanvas) { const ctx = this.displayCanvas.getContext('2d'); ctx.clearRect(0, 0, this.displayCanvas.width, this.displayCanvas.height); } // 清理状态 this.unfocusedStartTime = null; this.lastNotificationTime = null; console.log('Streaming stopped'); console.log('Stats:', this.stats); } setFrameRate(rate) { this.frameRate = Math.max(10, Math.min(30, rate)); console.log(`Frame rate set to ${this.frameRate} FPS`); // 重启截图(如果正在运行) if (this.isStreaming && this.captureInterval) { clearInterval(this.captureInterval); this.startCapture(); } } getStats() { return { ...this.stats, isStreaming: this.isStreaming, sessionId: this.sessionId, currentStatus: this.currentStatus, lastConfidence: this.lastConfidence }; } }