integration_test2 / src /utils /VideoManagerLocal.js
Kexin-251202's picture
update src/
5cc4335 verified
// src/utils/VideoManagerLocal.js
// Local video processing implementation using WebSocket + Canvas, without WebRTC.
export class VideoManagerLocal {
constructor(callbacks) {
this.callbacks = callbacks || {};
this.localVideoElement = null; // Local camera preview element.
this.displayVideoElement = null; // Processed output display element.
this.canvas = null;
this.stream = null;
this.ws = null;
this.isStreaming = false;
this.sessionId = null;
this.sessionStartTime = null;
this.frameRate = 15; // Lower FPS reduces transfer and processing load.
this.captureInterval = null;
this.reconnectTimeout = null;
// Status smoothing
this.currentStatus = false;
this.statusBuffer = [];
this.bufferSize = 3;
// Detection data
this.latestDetectionData = null;
this.lastConfidence = 0;
// Tessellation connections (fetched once from server)
this._tessellation = null;
// Continuous render loop
this._animFrameId = null;
// Notification state
this.notificationEnabled = true;
this.notificationThreshold = 30;
this.unfocusedStartTime = null;
this.lastNotificationTime = null;
this.notificationCooldown = 60000;
// Performance metrics
this.stats = {
framesSent: 0,
framesProcessed: 0,
avgLatency: 0,
lastLatencies: []
};
// Calibration state (9-point gaze calibration)
this.calibrationState = {
active: false,
collecting: false,
done: false,
success: false,
target: [0.5, 0.5],
index: 0,
numPoints: 9
};
}
// Initialize the camera
async initCamera(localVideoRef, displayCanvasRef) {
try {
console.log('Initializing local camera...');
this.stream = await navigator.mediaDevices.getUserMedia({
video: {
width: { ideal: 640 },
height: { ideal: 480 },
facingMode: 'user'
},
audio: false
});
this.localVideoElement = localVideoRef;
this.displayCanvas = displayCanvasRef;
// Show the local camera stream
if (this.localVideoElement) {
this.localVideoElement.srcObject = this.stream;
this.localVideoElement.play();
}
// Capture at 640x480 for L2CS / gaze (matches HF commit 2eba0cc).
this.canvas = document.createElement('canvas');
this.canvas.width = 640;
this.canvas.height = 480;
console.log('Local camera initialized');
return true;
} catch (error) {
console.error('Camera init error:', error);
throw error;
}
}
// Start streaming
async startStreaming() {
if (!this.stream) {
throw new Error('Camera not initialized');
}
if (this.isStreaming) {
console.warn('Already streaming');
return;
}
console.log('Starting WebSocket streaming...');
this.isStreaming = true;
try {
// Fetch tessellation topology (once)
if (!this._tessellation) {
try {
const res = await fetch('/api/mesh-topology');
const data = await res.json();
this._tessellation = data.tessellation; // [[start, end], ...]
} catch (e) {
console.warn('Failed to fetch mesh topology:', e);
}
}
// Request notification permission
await this.requestNotificationPermission();
await this.loadNotificationSettings();
// Open the WebSocket connection
await this.connectWebSocket();
// Start sending captured frames on a timer
this.startCapture();
// Start continuous render loop for smooth video
this._lastDetection = null;
this._startRenderLoop();
console.log('Streaming started');
} catch (error) {
this.isStreaming = false;
this._stopRenderLoop();
this._lastDetection = null;
if (this.captureInterval) {
clearInterval(this.captureInterval);
this.captureInterval = null;
}
if (this.reconnectTimeout) {
clearTimeout(this.reconnectTimeout);
this.reconnectTimeout = null;
}
if (this.ws) {
this.ws.onopen = null;
this.ws.onmessage = null;
this.ws.onerror = null;
this.ws.onclose = null;
try {
this.ws.close();
} catch (_) {}
this.ws = null;
}
throw error instanceof Error ? error : new Error('Failed to start video streaming.');
}
}
// Connect the WebSocket
async connectWebSocket() {
return new Promise((resolve, reject) => {
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
const wsUrl = `${protocol}//${window.location.host}/ws/video`;
console.log('Connecting to WebSocket:', wsUrl);
const socket = new WebSocket(wsUrl);
this.ws = socket;
let settled = false;
let opened = false;
const rejectWithMessage = (message) => {
if (settled) return;
settled = true;
reject(new Error(message));
};
socket.onopen = () => {
opened = true;
settled = true;
console.log('WebSocket connected');
// Send the start-session control message
socket.send(JSON.stringify({ type: 'start_session' }));
resolve();
};
socket.onmessage = (event) => {
try {
const data = JSON.parse(event.data);
this.handleServerMessage(data);
} catch (e) {
console.error('Failed to parse message:', e);
}
};
socket.onerror = () => {
console.error('WebSocket error:', { url: wsUrl, readyState: socket.readyState });
rejectWithMessage(`Failed to connect to ${wsUrl}. Check that the backend server is running and reachable.`);
};
socket.onclose = (event) => {
console.log('WebSocket disconnected', event.code, event.reason);
if (this.ws === socket) {
this.ws = null;
}
if (!opened) {
rejectWithMessage(`WebSocket closed before connection was established (${event.code || 'no code'}). Check that the backend server is running on the expected port.`);
return;
}
if (this.isStreaming) {
console.log('Attempting to reconnect...');
if (this.reconnectTimeout) {
clearTimeout(this.reconnectTimeout);
}
this.reconnectTimeout = setTimeout(() => {
this.reconnectTimeout = null;
if (!this.isStreaming) return;
this.connectWebSocket().catch((error) => {
console.error('Reconnect failed:', error);
});
}, 2000);
}
};
});
}
// Capture and send frames (binary blobs for speed)
startCapture() {
const interval = 1000 / this.frameRate;
this._sendingBlob = false; // prevent overlapping toBlob calls
this.captureInterval = setInterval(() => {
if (!this.isStreaming || !this.ws || this.ws.readyState !== WebSocket.OPEN) return;
if (this._sendingBlob) return; // previous frame still encoding, skip
try {
const ctx = this.canvas.getContext('2d');
ctx.drawImage(this.localVideoElement, 0, 0, this.canvas.width, this.canvas.height);
this._sendingBlob = true;
this.canvas.toBlob((blob) => {
this._sendingBlob = false;
if (blob && this.ws && this.ws.readyState === WebSocket.OPEN) {
this.ws.send(blob);
this.stats.framesSent++;
}
}, 'image/jpeg', 0.75);
} catch (error) {
this._sendingBlob = false;
console.error('Capture error:', error);
}
}, interval);
console.log(`Capturing at ${this.frameRate} FPS`);
}
// Continuous render loop: draws camera feed + last detection overlay at display refresh rate
_startRenderLoop() {
const render = () => {
if (!this.isStreaming) return;
if (this.displayCanvas && this.localVideoElement && this.localVideoElement.readyState >= 2) {
const ctx = this.displayCanvas.getContext('2d');
const w = this.displayCanvas.width;
const h = this.displayCanvas.height;
// Always draw the live camera feed
ctx.drawImage(this.localVideoElement, 0, 0, w, h);
// Overlay last known detection results
const data = this._lastDetection;
if (data) {
const isL2cs = data.model === 'l2cs';
if (data.landmarks && !isL2cs) {
this.drawFaceMesh(ctx, data.landmarks, w, h);
}
// Top HUD bar (matching live_demo.py)
ctx.fillStyle = 'rgba(0,0,0,0.7)';
ctx.fillRect(0, 0, w, 55);
const statusText = data.focused ? 'FOCUSED' : 'NOT FOCUSED';
const color = data.focused ? '#00FF00' : '#FF0000';
ctx.fillStyle = color;
ctx.font = 'bold 18px Arial';
ctx.fillText(statusText, 10, 22);
// Model name + mesh label (top right)
if (data.model) {
ctx.fillStyle = '#FFFFFF';
ctx.font = '12px Arial';
ctx.textAlign = 'right';
ctx.fillText(data.model.toUpperCase(), w - 10, 22);
ctx.textAlign = 'left';
}
// Detail line: conf, S_face, S_eye, MAR
ctx.fillStyle = '#FFFFFF';
ctx.font = '12px Arial';
let detail = `conf:${(data.confidence || 0).toFixed(2)}`;
if (data.sf !== undefined) detail += ` S_face:${data.sf.toFixed(2)}`;
if (data.se !== undefined) detail += ` S_eye:${data.se.toFixed(2)}`;
if (data.mar !== undefined) detail += ` MAR:${data.mar.toFixed(2)}`;
ctx.fillText(detail, 10, 38);
// Head pose angles (right side)
if (data.yaw !== undefined) {
ctx.fillStyle = '#B4B4B4';
ctx.font = '11px Arial';
ctx.textAlign = 'right';
ctx.fillText(`yaw:${data.yaw > 0 ? '+' : ''}${data.yaw.toFixed(0)} pitch:${data.pitch > 0 ? '+' : ''}${data.pitch.toFixed(0)} roll:${data.roll > 0 ? '+' : ''}${data.roll.toFixed(0)}`, w - 10, 48);
ctx.textAlign = 'left';
}
// Gaze pointer removed from camera — shown in mini-map only.
// Eye gaze (L2CS): iris-based arrows matching live_demo.py
if (isL2cs && data.landmarks) {
const lm = data.landmarks;
const getPt = (idx) => {
if (!lm) return null;
if (Array.isArray(lm)) return lm[idx] || null;
return lm[String(idx)] || null;
};
// Draw eye contours (green)
this._drawPolyline(ctx, lm, VideoManagerLocal.LEFT_EYE, w, h, '#00FF00', 2, true);
this._drawPolyline(ctx, lm, VideoManagerLocal.RIGHT_EYE, w, h, '#00FF00', 2, true);
// EAR key points (yellow)
for (const earIndices of [VideoManagerLocal.LEFT_EAR_POINTS, VideoManagerLocal.RIGHT_EAR_POINTS]) {
for (const idx of earIndices) {
const pt = getPt(idx);
if (!pt) continue;
ctx.beginPath();
ctx.arc(pt[0] * w, pt[1] * h, 3, 0, 2 * Math.PI);
ctx.fillStyle = '#FFFF00';
ctx.fill();
}
}
// Irises + gaze lines (matching live_demo.py)
const irisSets = [
{ iris: VideoManagerLocal.LEFT_IRIS, inner: 133, outer: 33 },
{ iris: VideoManagerLocal.RIGHT_IRIS, inner: 362, outer: 263 },
];
for (const { iris, inner, outer } of irisSets) {
const centerPt = getPt(iris[0]);
if (!centerPt) continue;
const cx = centerPt[0] * w, cy = centerPt[1] * h;
// Iris circle (magenta)
let radiusSum = 0, count = 0;
for (let i = 1; i < iris.length; i++) {
const pt = getPt(iris[i]);
if (!pt) continue;
radiusSum += Math.hypot(pt[0] * w - cx, pt[1] * h - cy);
count++;
}
const radius = Math.max(count > 0 ? radiusSum / count : 3, 2);
ctx.beginPath();
ctx.arc(cx, cy, radius, 0, 2 * Math.PI);
ctx.strokeStyle = '#FF00FF';
ctx.lineWidth = 2;
ctx.stroke();
// Iris center dot (white)
ctx.beginPath();
ctx.arc(cx, cy, 2, 0, 2 * Math.PI);
ctx.fillStyle = '#FFFFFF';
ctx.fill();
// Gaze direction line (red) — from iris center, 3x displacement
const innerPt = getPt(inner);
const outerPt = getPt(outer);
if (innerPt && outerPt) {
const eyeCx = (innerPt[0] + outerPt[0]) / 2.0 * w;
const eyeCy = (innerPt[1] + outerPt[1]) / 2.0 * h;
const dx = cx - eyeCx;
const dy = cy - eyeCy;
ctx.beginPath();
ctx.moveTo(cx, cy);
ctx.lineTo(cx + dx * 3, cy + dy * 3);
ctx.strokeStyle = '#FF0000';
ctx.lineWidth = 1;
ctx.stroke();
}
}
}
}
// Performance stats
ctx.fillStyle = 'rgba(0,0,0,0.5)';
ctx.fillRect(0, h - 25, w, 25);
ctx.font = '12px Arial';
ctx.fillStyle = '#FFFFFF';
ctx.fillText(`FPS: ${this.frameRate} | Latency: ${this.stats.avgLatency.toFixed(0)}ms`, 10, h - 8);
}
this._animFrameId = requestAnimationFrame(render);
};
this._animFrameId = requestAnimationFrame(render);
}
_stopRenderLoop() {
if (this._animFrameId) {
cancelAnimationFrame(this._animFrameId);
this._animFrameId = null;
}
}
// Handle messages from the server
handleServerMessage(data) {
switch (data.type) {
case 'session_started':
this.sessionId = data.session_id;
this.sessionStartTime = Date.now();
console.log('Session started:', this.sessionId);
if (this.callbacks.onSessionStart) {
this.callbacks.onSessionStart(this.sessionId);
}
break;
case 'detection':
this.stats.framesProcessed++;
// Track latency from send→receive
const now = performance.now();
if (this._lastSendTime) {
const latency = now - this._lastSendTime;
this.stats.lastLatencies.push(latency);
if (this.stats.lastLatencies.length > 10) this.stats.lastLatencies.shift();
this.stats.avgLatency = this.stats.lastLatencies.reduce((a, b) => a + b, 0) / this.stats.lastLatencies.length;
}
this.updateStatus(data.focused);
this.latestDetectionData = {
confidence: data.confidence || 0,
focused: data.focused,
timestamp: now
};
this.lastConfidence = data.confidence || 0;
if (this.callbacks.onStatusUpdate) {
this.callbacks.onStatusUpdate(this.currentStatus);
}
// Normalize response: server sends 'lm' (sparse) or 'landmarks'
const detectionData = {
focused: data.focused,
confidence: data.confidence || 0,
model: data.model,
landmarks: data.lm || data.landmarks || null,
yaw: data.yaw,
pitch: data.pitch,
roll: data.roll,
mar: data.mar,
sf: data.sf,
se: data.se,
gaze_x: data.gaze_x,
gaze_y: data.gaze_y,
gaze_yaw: data.gaze_yaw,
gaze_pitch: data.gaze_pitch,
on_screen: data.on_screen,
};
this.drawDetectionResult(detectionData);
// Emit gaze data for mini-map
if (this.callbacks.onGazeData) {
this.callbacks.onGazeData({
gaze_x: data.gaze_x != null ? data.gaze_x : null,
gaze_y: data.gaze_y != null ? data.gaze_y : null,
on_screen: data.on_screen != null ? data.on_screen : null,
});
}
break;
case 'calibration_started':
this.calibrationState = {
active: true,
collecting: true,
done: false,
success: false,
target: data.target || [0.5, 0.5],
index: data.index ?? 0,
numPoints: data.num_points ?? 9,
};
if (this.callbacks.onCalibrationUpdate) {
this.callbacks.onCalibrationUpdate(this.calibrationState);
}
break;
case 'calibration_point':
this.calibrationState = {
...this.calibrationState,
target: data.target || [0.5, 0.5],
index: data.index ?? this.calibrationState.index,
};
if (this.callbacks.onCalibrationUpdate) {
this.callbacks.onCalibrationUpdate(this.calibrationState);
}
break;
case 'calibration_done':
this.calibrationState = {
...this.calibrationState,
active: true,
collecting: false,
done: true,
success: data.success === true,
error: data.error || null,
};
if (this.callbacks.onCalibrationUpdate) {
this.callbacks.onCalibrationUpdate(this.calibrationState);
}
break;
case 'calibration_cancelled':
this.calibrationState = {
active: false,
collecting: false,
done: false,
success: false,
target: [0.5, 0.5],
index: 0,
numPoints: 9,
};
if (this.callbacks.onCalibrationUpdate) {
this.callbacks.onCalibrationUpdate(this.calibrationState);
}
break;
case 'session_ended':
console.log('Received session_ended message');
console.log('Session summary:', data.summary);
if (this.callbacks.onSessionEnd) {
console.log('Calling onSessionEnd callback');
this.callbacks.onSessionEnd(data.summary);
} else {
console.warn('No onSessionEnd callback registered');
}
this.sessionId = null;
this.sessionStartTime = null;
break;
case 'error':
console.error('Server error:', data.message);
break;
default:
console.log('Unknown message type:', data.type);
}
}
// Face mesh landmark index groups (matches live_demo.py)
static FACE_OVAL = [10,338,297,332,284,251,389,356,454,323,361,288,397,365,379,378,400,377,152,148,176,149,150,136,172,58,132,93,234,127,162,21,54,103,67,109,10];
static LEFT_EYE = [33,7,163,144,145,153,154,155,133,173,157,158,159,160,161,246];
static RIGHT_EYE = [362,382,381,380,374,373,390,249,263,466,388,387,386,385,384,398];
static LEFT_IRIS = [468,469,470,471,472];
static RIGHT_IRIS = [473,474,475,476,477];
static LEFT_EYEBROW = [70,63,105,66,107,55,65,52,53,46];
static RIGHT_EYEBROW = [300,293,334,296,336,285,295,282,283,276];
static NOSE_BRIDGE = [6,197,195,5,4,1,19,94,2];
static LIPS_OUTER = [61,146,91,181,84,17,314,405,321,375,291,409,270,269,267,0,37,39,40,185,61];
static LIPS_INNER = [78,95,88,178,87,14,317,402,318,324,308,415,310,311,312,13,82,81,80,191,78];
static LEFT_EAR_POINTS = [33, 160, 158, 133, 153, 145];
static RIGHT_EAR_POINTS = [362, 385, 387, 263, 373, 380];
// Iris/eye corners for gaze lines
static LEFT_IRIS_CENTER = 468;
static RIGHT_IRIS_CENTER = 473;
static LEFT_EYE_INNER = 133;
static LEFT_EYE_OUTER = 33;
static RIGHT_EYE_INNER = 362;
static RIGHT_EYE_OUTER = 263;
// Draw a polyline through landmark indices (lm can be array or sparse object)
_drawPolyline(ctx, lm, indices, w, h, color, lineWidth, closed = false) {
if (!lm || indices.length < 2) return;
const isArray = Array.isArray(lm);
const _get = isArray ? (i) => lm[i] : (i) => lm[String(i)];
ctx.beginPath();
const firstPt = _get(indices[0]);
if (!firstPt) return;
ctx.moveTo(firstPt[0] * w, firstPt[1] * h);
for (let i = 1; i < indices.length; i++) {
const pt = _get(indices[i]);
if (!pt) continue;
ctx.lineTo(pt[0] * w, pt[1] * h);
}
if (closed) ctx.closePath();
ctx.strokeStyle = color;
ctx.lineWidth = lineWidth;
ctx.stroke();
}
// Draw face mesh overlay from landmarks (supports both array and sparse object)
drawFaceMesh(ctx, lm, w, h) {
if (!lm) return;
const isArray = Array.isArray(lm);
// For array format need at least 468 entries; for sparse object just check it has keys
if (isArray && lm.length < 468) return;
if (!isArray && typeof lm === 'object' && Object.keys(lm).length === 0) return;
const _get = isArray ? (i) => lm[i] : (i) => lm[String(i)];
// Tessellation (gray triangular grid, semi-transparent)
if (this._tessellation && isArray) {
ctx.strokeStyle = 'rgba(200,200,200,0.25)';
ctx.lineWidth = 1;
ctx.beginPath();
for (const [s, e] of this._tessellation) {
const ps = lm[s], pe = lm[e];
if (!ps || !pe) continue;
ctx.moveTo(ps[0] * w, ps[1] * h);
ctx.lineTo(pe[0] * w, pe[1] * h);
}
ctx.stroke();
}
// Face oval
this._drawPolyline(ctx, lm, VideoManagerLocal.FACE_OVAL, w, h, 'rgba(0,255,255,0.5)', 1, true);
// Eyebrows
this._drawPolyline(ctx, lm, VideoManagerLocal.LEFT_EYEBROW, w, h, '#90EE90', 2);
this._drawPolyline(ctx, lm, VideoManagerLocal.RIGHT_EYEBROW, w, h, '#90EE90', 2);
// Eyes
this._drawPolyline(ctx, lm, VideoManagerLocal.LEFT_EYE, w, h, '#00FF00', 2, true);
this._drawPolyline(ctx, lm, VideoManagerLocal.RIGHT_EYE, w, h, '#00FF00', 2, true);
// Nose bridge
this._drawPolyline(ctx, lm, VideoManagerLocal.NOSE_BRIDGE, w, h, 'rgba(0,165,255,0.6)', 1);
// Lips
this._drawPolyline(ctx, lm, VideoManagerLocal.LIPS_OUTER, w, h, '#FF00FF', 1);
this._drawPolyline(ctx, lm, VideoManagerLocal.LIPS_INNER, w, h, 'rgba(200,0,200,0.7)', 1);
// EAR key points (yellow dots)
for (const earIndices of [VideoManagerLocal.LEFT_EAR_POINTS, VideoManagerLocal.RIGHT_EAR_POINTS]) {
for (const idx of earIndices) {
const pt = _get(idx);
if (!pt) continue;
ctx.beginPath();
ctx.arc(pt[0] * w, pt[1] * h, 3, 0, 2 * Math.PI);
ctx.fillStyle = '#FFFF00';
ctx.fill();
}
}
// Irises (circles + gaze direction lines)
const irisSets = [
{ iris: VideoManagerLocal.LEFT_IRIS, center: VideoManagerLocal.LEFT_IRIS_CENTER, inner: VideoManagerLocal.LEFT_EYE_INNER, outer: VideoManagerLocal.LEFT_EYE_OUTER },
{ iris: VideoManagerLocal.RIGHT_IRIS, center: VideoManagerLocal.RIGHT_IRIS_CENTER, inner: VideoManagerLocal.RIGHT_EYE_INNER, outer: VideoManagerLocal.RIGHT_EYE_OUTER },
];
for (const { iris, center: centerIdx, inner, outer } of irisSets) {
const centerPt = _get(iris[0]);
if (!centerPt) continue;
const cx = centerPt[0] * w, cy = centerPt[1] * h;
let radiusSum = 0, count = 0;
for (let i = 1; i < iris.length; i++) {
const pt = _get(iris[i]);
if (!pt) continue;
radiusSum += Math.hypot(pt[0] * w - cx, pt[1] * h - cy);
count++;
}
const radius = Math.max(count > 0 ? radiusSum / count : 3, 2);
// Iris circle
ctx.beginPath();
ctx.arc(cx, cy, radius, 0, 2 * Math.PI);
ctx.strokeStyle = '#FF00FF';
ctx.lineWidth = 2;
ctx.stroke();
// Iris center dot
ctx.beginPath();
ctx.arc(cx, cy, 2, 0, 2 * Math.PI);
ctx.fillStyle = '#FFFFFF';
ctx.fill();
// Gaze direction line (red)
const innerPt = _get(inner);
const outerPt = _get(outer);
if (innerPt && outerPt) {
const eyeCx = (innerPt[0] + outerPt[0]) / 2.0 * w;
const eyeCy = (innerPt[1] + outerPt[1]) / 2.0 * h;
const dx = cx - eyeCx;
const dy = cy - eyeCy;
ctx.beginPath();
ctx.moveTo(cx, cy);
ctx.lineTo(cx + dx * 3, cy + dy * 3);
ctx.strokeStyle = '#FF0000';
ctx.lineWidth = 1;
ctx.stroke();
}
}
}
// Store detection data for the render loop to draw
drawDetectionResult(data) {
this._lastDetection = data;
}
updateStatus(newFocused) {
this.statusBuffer.push(newFocused);
if (this.statusBuffer.length > this.bufferSize) {
this.statusBuffer.shift();
}
if (this.statusBuffer.length < this.bufferSize) return false;
const focusedCount = this.statusBuffer.filter(f => f).length;
const focusedRatio = focusedCount / this.statusBuffer.length;
const previousStatus = this.currentStatus;
if (focusedRatio >= 0.75) {
this.currentStatus = true;
} else if (focusedRatio <= 0.25) {
this.currentStatus = false;
}
this.handleNotificationLogic(previousStatus, this.currentStatus);
}
handleNotificationLogic(previousStatus, currentStatus) {
const now = Date.now();
if (previousStatus && !currentStatus) {
this.unfocusedStartTime = now;
}
if (!previousStatus && currentStatus) {
this.unfocusedStartTime = null;
}
if (!currentStatus && this.unfocusedStartTime) {
const unfocusedDuration = (now - this.unfocusedStartTime) / 1000;
if (unfocusedDuration >= this.notificationThreshold) {
const canSendNotification = !this.lastNotificationTime ||
(now - this.lastNotificationTime) >= this.notificationCooldown;
if (canSendNotification) {
this.sendNotification(
'Focus Alert',
`You've been distracted for ${Math.floor(unfocusedDuration)} seconds. Get back to work!`
);
this.lastNotificationTime = now;
}
}
}
}
async requestNotificationPermission() {
if ('Notification' in window && Notification.permission === 'default') {
try {
await Notification.requestPermission();
} catch (error) {
console.error('Failed to request notification permission:', error);
}
}
}
async loadNotificationSettings() {
try {
const response = await fetch('/api/settings');
const settings = await response.json();
if (settings) {
this.notificationEnabled = settings.notification_enabled ?? true;
this.notificationThreshold = settings.notification_threshold ?? 30;
}
} catch (error) {
console.error('Failed to load notification settings:', error);
}
}
sendNotification(title, message) {
if (!this.notificationEnabled) return;
if ('Notification' in window && Notification.permission === 'granted') {
try {
const notification = new Notification(title, {
body: message,
icon: '/vite.svg',
badge: '/vite.svg',
tag: 'focus-guard-distraction',
requireInteraction: false
});
setTimeout(() => notification.close(), 3000);
} catch (error) {
console.error('Failed to send notification:', error);
}
}
}
async stopStreaming() {
console.log('Stopping streaming...');
this.isStreaming = false;
if (this.reconnectTimeout) {
clearTimeout(this.reconnectTimeout);
this.reconnectTimeout = null;
}
// Stop the render loop
this._stopRenderLoop();
this._lastDetection = null;
// Stop frame capture
if (this.captureInterval) {
clearInterval(this.captureInterval);
this.captureInterval = null;
}
// Send the end-session request and wait for the response
if (this.ws && this.ws.readyState === WebSocket.OPEN && this.sessionId) {
const sessionId = this.sessionId;
// Wait for the session_ended message
const waitForSessionEnd = new Promise((resolve) => {
const originalHandler = this.ws.onmessage;
const timeout = setTimeout(() => {
this.ws.onmessage = originalHandler;
console.log('Session end timeout, proceeding anyway');
resolve();
}, 2000);
this.ws.onmessage = (event) => {
try {
const data = JSON.parse(event.data);
if (data.type === 'session_ended') {
clearTimeout(timeout);
this.handleServerMessage(data);
this.ws.onmessage = originalHandler;
resolve();
} else {
// Continue handling non-terminal messages
this.handleServerMessage(data);
}
} catch (e) {
console.error('Failed to parse message:', e);
}
};
});
console.log('Sending end_session request for session:', sessionId);
this.ws.send(JSON.stringify({
type: 'end_session',
session_id: sessionId
}));
// Wait for the response or a timeout
await waitForSessionEnd;
}
// Delay socket shutdown briefly so pending messages can flush
await new Promise(resolve => setTimeout(resolve, 200));
// Close the WebSocket
if (this.ws) {
this.ws.close();
this.ws = null;
}
// Stop the camera
if (this.stream) {
this.stream.getTracks().forEach(track => track.stop());
this.stream = null;
}
// Clear the video element
if (this.localVideoElement) {
this.localVideoElement.srcObject = null;
}
// Clear the canvas
if (this.displayCanvas) {
const ctx = this.displayCanvas.getContext('2d');
ctx.clearRect(0, 0, this.displayCanvas.width, this.displayCanvas.height);
}
// Reset transient state
this.unfocusedStartTime = null;
this.lastNotificationTime = null;
console.log('Streaming stopped');
console.log('Stats:', this.stats);
}
setFrameRate(rate) {
this.frameRate = Math.max(10, Math.min(30, rate));
console.log(`Frame rate set to ${this.frameRate} FPS`);
// Restart capture if streaming is already active
if (this.isStreaming && this.captureInterval) {
clearInterval(this.captureInterval);
this.startCapture();
}
}
startCalibration() {
if (!this.ws || this.ws.readyState !== WebSocket.OPEN) return;
this.ws.send(JSON.stringify({ type: 'calibration_start' }));
}
nextCalibrationPoint() {
if (!this.ws || this.ws.readyState !== WebSocket.OPEN) return;
this.ws.send(JSON.stringify({ type: 'calibration_next' }));
}
cancelCalibration() {
if (!this.ws || this.ws.readyState !== WebSocket.OPEN) return;
this.ws.send(JSON.stringify({ type: 'calibration_cancel' }));
}
getCalibrationState() {
return this.calibrationState;
}
dismissCalibrationDone() {
this.calibrationState = {
active: false,
collecting: false,
done: false,
success: false,
target: [0.5, 0.5],
index: 0,
numPoints: 9,
};
if (this.callbacks.onCalibrationUpdate) {
this.callbacks.onCalibrationUpdate(this.calibrationState);
}
}
getStats() {
return {
...this.stats,
isStreaming: this.isStreaming,
sessionId: this.sessionId,
currentStatus: this.currentStatus,
lastConfidence: this.lastConfidence
};
}
}