marriedtermiteblyi commited on
Commit
b288335
·
verified ·
1 Parent(s): f1a9ee6

put notes/dcc43ef1-4f83-4afe-a59d-61a9ae99cb51.json

Browse files
notes/dcc43ef1-4f83-4afe-a59d-61a9ae99cb51.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
  "id": "dcc43ef1-4f83-4afe-a59d-61a9ae99cb51",
3
  "title": "Untitled",
4
- "content": "import express from 'express';\nimport { MongoClient, ObjectId } from 'mongodb';\nimport multer from 'multer';\nimport bcryptjs from 'bcryptjs';\nimport jwt from 'jsonwebtoken';\nimport fs from 'fs';\nimport path from 'path';\nimport { fileURLToPath } from 'url';\nimport dotenv from 'dotenv';\n\n// Load environment variables\ndotenv.config({ path: '.env.server' });\n\nconst __dirname = path.dirname(fileURLToPath(import.meta.url));\nconst uploadsDir = path.join(__dirname, 'uploads');\nconst tempChunksDir = path.join(__dirname, 'uploads_tmp');\n\nif (!fs.existsSync(uploadsDir)) {\n fs.mkdirSync(uploadsDir, { recursive: true });\n}\n\nif (!fs.existsSync(tempChunksDir)) {\n fs.mkdirSync(tempChunksDir, { recursive: true });\n}\n\nconst app = express();\nconst PORT = process.env.PORT || 3001;\nconst MONGODB_URI = process.env.MONGODB_URI || 'mongodb://localhost:27017';\nconst JWT_SECRET = process.env.JWT_SECRET || 'your-secret-key-change-in-production';\n\nlet db;\nlet mongoClient;\n\n// Initialize MongoDB\nasync function initMongoDB() {\n try {\n console.log('🔄 Connecting to MongoDB...');\n console.log('URI:', MONGODB_URI.replace(/:[^:]*@/, ':****@')); // Hide password in logs\n \n mongoClient = new MongoClient(MONGODB_URI, {\n serverSelectionTimeoutMS: 5000,\n connectTimeoutMS: 10000,\n // HIGH-PERFORMANCE POOLING: Optimize connection reuse\n maxPoolSize: 50, // Increased from default 10 for concurrent uploads\n minPoolSize: 10, // Keep minimum connections ready\n maxIdleTimeMS: 30000,\n waitQueueTimeoutMS: 10000,\n });\n \n await mongoClient.connect();\n console.log('✅ MongoDB connected successfully');\n \n db = mongoClient.db('file-caddy');\n\n // Create collections if they don't exist\n const collections = await db.listCollections().toArray();\n const collectionNames = collections.map(c => c.name);\n\n if (!collectionNames.includes('users')) {\n await db.createCollection('users');\n await db.collection('users').createIndex({ email: 1 }, { unique: true });\n await db.collection('users').createIndex({ username: 1 }, { unique: true, sparse: true });\n console.log('📦 Created users collection');\n }\n if (!collectionNames.includes('files')) {\n await db.createCollection('files');\n await db.collection('files').createIndex({ user_id: 1 });\n console.log('📦 Created files collection');\n }\n if (!collectionNames.includes('user_roles')) {\n await db.createCollection('user_roles');\n console.log('📦 Created user_roles collection');\n }\n\n console.log('✅ MongoDB fully initialized');\n } catch (err) {\n console.error('❌ MongoDB connection error:', err.message);\n console.error('Full error:', err);\n process.exit(1);\n }\n}\n\n// Middleware - Memory efficient for large uploads\n// Limit JSON requests but NOT file uploads (handled by multer streaming)\napp.use(express.json({ limit: '10mb' }));\napp.use(express.urlencoded({ extended: true, limit: '10mb' }));\n\n// Serve uploads with aggressive caching for faster retrieval\napp.use('/uploads', express.static(uploadsDir, { \n maxAge: '31d',\n setHeaders: (res) => {\n res.setHeader('Cache-Control', 'public, max-age=2678400, immutable');\n }\n}));\n\n// Increase timeout for large uploads\napp.use((req, res, next) => {\n req.setTimeout(3600000); // 1 hour timeout\n res.setTimeout(3600000);\n next();\n});\n\n// Multer configuration - Optimized for faster uploads\nconst storage = multer.diskStorage({\n destination: (req, file, cb) => {\n cb(null, uploadsDir);\n },\n filename: (req, file, cb) => {\n const uniqueName = `${Date.now()}-${Math.random().toString(36).substr(2, 9)}-${file.originalname}`;\n cb(null, uniqueName);\n }\n});\n\n// High-performance streaming upload configuration\n// Uses disk streaming (not memory buffering) for maximum throughput\nconst upload = multer({\n storage,\n limits: {\n fileSize: 5 * 1024 * 1024 * 1024, // 5GB limit\n files: 10\n },\n // CRITICAL OPTIMIZATION: 512KB buffer for better network/disk throughput\n // Larger buffer means fewer I/O cycles and faster streaming for large files\n highWaterMark: 512 * 1024 // 512KB chunks for faster upload performance\n});\n\nconst chunkStorage = multer.diskStorage({\n destination: (req, file, cb) => {\n cb(null, tempChunksDir);\n },\n filename: (req, file, cb) => {\n const uploadId = req.body.uploadId || 'unknown';\n const chunkIndex = req.body.chunkIndex || '0';\n cb(null, `${uploadId}.part.${chunkIndex}`);\n }\n});\n\nconst chunkUpload = multer({\n storage: chunkStorage,\n limits: {\n fileSize: 6 * 1024 * 1024, // 6MB max per chunk\n files: 1\n },\n highWaterMark: 512 * 1024\n});\n\nasync function tryFinalizeChunkedUpload({ uploadId, totalChunks, originalName, description, mimetype, userId, fileSize }) {\n const parts = Array.from({ length: totalChunks }, (_, index) =>\n path.join(tempChunksDir, `${uploadId}.part.${index}`)\n );\n\n for (const partPath of parts) {\n try {\n await fs.promises.access(partPath, fs.constants.R_OK);\n } catch {\n return null;\n }\n }\n\n const safeName = `${Date.now()}-${uploadId}-${path.basename(originalName)}`;\n const finalPath = path.join(uploadsDir, safeName);\n const writeStream = fs.createWriteStream(finalPath, { flags: 'w' });\n\n for (const partPath of parts) {\n await new Promise((resolve, reject) => {\n const readStream = fs.createReadStream(partPath);\n readStream.on('error', reject);\n readStream.on('end', () => {\n fs.unlink(partPath, () => {});\n resolve(null);\n });\n readStream.pipe(writeStream, { end: false });\n });\n }\n\n writeStream.end();\n await new Promise((resolve, reject) => {\n writeStream.on('finish', resolve);\n writeStream.on('error', reject);\n });\n\n const fileDoc = {\n user_id: userId,\n file_name: originalName,\n file_size: fileSize,\n file_type: mimetype || 'application/octet-stream',\n storage_path: safeName,\n description: description || null,\n is_public: false,\n created_at: new Date(),\n updated_at: new Date(),\n };\n\n db.collection('files').insertOne(fileDoc).catch(err => console.error('DB insert error:', err));\n\n return { id: safeName, file_name: originalName, file_size: fileSize };\n}\n\nasync function cleanupOldTempChunks(ageMs = 2 * 60 * 60 * 1000) {\n try {\n const files = await fs.promises.readdir(tempChunksDir);\n const now = Date.now();\n\n await Promise.all(files.map(async (file) => {\n if (!file.includes('.part.')) {\n return;\n }\n const filePath = path.join(tempChunksDir, file);\n try {\n const stat = await fs.promises.stat(filePath);\n if (now - stat.mtimeMs > ageMs) {\n await fs.promises.unlink(filePath);\n }\n } catch (err) {\n // ignore missing file or permission issues\n }\n }));\n } catch (err) {\n console.error('Error cleaning temp chunk files:', err);\n }\n}\n\n// Run cleanup once at startup, then periodically every hour\ncleanupOldTempChunks().catch(err => console.error('Startup temp cleanup failed:', err));\nsetInterval(() => cleanupOldTempChunks(), 60 * 60 * 1000);\n\n// Middleware to verify JWT\nfunction verifyToken(req, res, next) {\n const token = req.headers['authorization']?.split(' ')[1];\n if (!token) {\n return res.status(401).json({ error: 'No token provided' });\n }\n try {\n const decoded = jwt.verify(token, JWT_SECRET);\n req.userId = decoded.userId;\n req.email = decoded.email;\n next();\n } catch (err) {\n res.status(401).json({ error: 'Invalid token' });\n }\n}\n\n// Auth Routes\n\n// Sign up\napp.post('/auth/signup', async (req, res) => {\n try {\n const { email, password, displayName, username } = req.body;\n\n if (!email || !password) {\n return res.status(400).json({ error: 'Email and password required' });\n }\n\n if (username && !/^[a-zA-Z0-9_]{3,20}$/.test(username)) {\n return res.status(400).json({ error: 'Username must be 3-20 characters (letters, numbers, underscore only)' });\n }\n\n const usersCollection = db.collection('users');\n const existing = await usersCollection.findOne({ email });\n\n if (existing) {\n return res.status(400).json({ error: 'Email already exists' });\n }\n\n if (username) {\n const existingUsername = await usersCollection.findOne({ username });\n if (existingUsername) {\n return res.status(400).json({ error: 'Username already taken' });\n }\n }\n\n const hashedPassword = await bcryptjs.hash(password, 10);\n const user = {\n email,\n password: hashedPassword,\n username: username || null,\n displayName: displayName || email.split('@')[0],\n display_name: displayName || email.split('@')[0],\n bio: '',\n avatar_url: '',\n location: '',\n website: '',\n phone: '',\n social_media: {\n twitter: '',\n github: '',\n linkedin: '',\n instagram: ''\n },\n created_at: new Date(),\n updated_at: new Date(),\n };\n\n const result = await usersCollection.insertOne(user);\n const userId = result.insertedId.toString();\n\n const token = jwt.sign({ userId, email }, JWT_SECRET, { expiresIn: '7d' });\n\n res.json({\n user: {\n id: userId,\n email,\n displayName: user.displayName,\n username: user.username,\n },\n token,\n });\n } catch (err) {\n console.error('Signup error:', err);\n if (err.code === 11000) {\n const field = Object.keys(err.keyPattern)[0];\n res.status(400).json({ error: `${field} already exists` });\n } else {\n res.status(500).json({ error: 'Signup failed' });\n }\n }\n});\n\n// Sign in\napp.post('/auth/signin', async (req, res) => {\n try {\n const { email, password } = req.body;\n\n if (!email || !password) {\n return res.status(400).json({ error: 'Email and password required' });\n }\n\n const usersCollection = db.collection('users');\n const user = await usersCollection.findOne({ email });\n\n if (!user) {\n return res.status(400).json({ error: 'Invalid credentials' });\n }\n\n const isValid = await bcryptjs.compare(password, user.password);\n\n if (!isValid) {\n return res.status(400).json({ error: 'Invalid credentials' });\n }\n\n const token = jwt.sign({ userId: user._id.toString(), email }, JWT_SECRET, { expiresIn: '7d' });\n\n res.json({\n user: {\n id: user._id.toString(),\n email: user.email,\n displayName: user.displayName,\n username: user.username,\n },\n token,\n });\n } catch (err) {\n console.error('Signin error:', err);\n res.status(500).json({ error: 'Signin failed' });\n }\n});\n\n// Get current user\napp.get('/auth/me', verifyToken, async (req, res) => {\n try {\n const usersCollection = db.collection('users');\n const user = await usersCollection.findOne({ _id: new ObjectId(req.userId) });\n\n if (!user) {\n return res.status(404).json({ error: 'User not found' });\n }\n\n // Check if admin\n const roleDoc = await db.collection('user_roles').findOne({ user_id: req.userId, role: 'admin' });\n\n res.json({\n id: user._id.toString(),\n email: user.email,\n displayName: user.displayName,\n username: user.username || '',\n display_name: user.display_name || user.displayName,\n bio: user.bio || '',\n avatar_url: user.avatar_url || '',\n location: user.location || '',\n website: user.website || '',\n phone: user.phone || '',\n social_media: user.social_media || { twitter: '', github: '', linkedin: '', instagram: '' },\n created_at: user.created_at,\n isAdmin: !!roleDoc,\n });\n } catch (err) {\n console.error('Get user error:', err);\n res.status(500).json({ error: 'Failed to fetch user' });\n }\n});\n\n// Change password\napp.post('/auth/change-password', verifyToken, async (req, res) => {\n try {\n const { currentPassword, newPassword } = req.body;\n\n if (!currentPassword || !newPassword) {\n return res.status(400).json({ error: 'Current and new password required' });\n }\n\n if (newPassword.length < 6) {\n return res.status(400).json({ error: 'New password must be at least 6 characters' });\n }\n\n const usersCollection = db.collection('users');\n const user = await usersCollection.findOne({ _id: new ObjectId(req.userId) });\n\n if (!user) {\n return res.status(404).json({ error: 'User not found' });\n }\n\n // Verify current password\n const isValid = await bcryptjs.compare(currentPassword, user.password);\n if (!isValid) {\n return res.status(401).json({ error: 'Current password is incorrect' });\n }\n\n // Hash new password\n const hashedPassword = await bcryptjs.hash(newPassword, 10);\n\n // Update password\n await usersCollection.updateOne(\n { _id: new ObjectId(req.userId) },\n { $set: { password: hashedPassword, updated_at: new Date() } }\n );\n\n res.json({ success: true, message: 'Password changed successfully' });\n } catch (err) {\n console.error('Change password error:', err);\n res.status(500).json({ error: 'Failed to change password' });\n }\n});\n\n// Update profile\napp.post('/auth/update-profile', verifyToken, async (req, res) => {\n try {\n const { display_name, bio, avatar_url, location, website, phone, social_media } = req.body;\n\n const usersCollection = db.collection('users');\n const updates = {};\n \n if (display_name !== undefined) updates.display_name = display_name;\n if (bio !== undefined) updates.bio = bio;\n if (avatar_url !== undefined) updates.avatar_url = avatar_url;\n if (location !== undefined) updates.location = location;\n if (website !== undefined) updates.website = website;\n if (phone !== undefined) updates.phone = phone;\n if (social_media !== undefined) updates.social_media = social_media;\n updates.updated_at = new Date();\n\n await usersCollection.updateOne(\n { _id: new ObjectId(req.userId) },\n { $set: updates }\n );\n\n // Return updated user\n const user = await usersCollection.findOne({ _id: new ObjectId(req.userId) });\n const roleDoc = await db.collection('user_roles').findOne({ user_id: req.userId, role: 'admin' });\n\n res.json({\n id: user._id.toString(),\n email: user.email,\n displayName: user.displayName,\n username: user.username || '',\n display_name: user.display_name || user.displayName,\n bio: user.bio || '',\n avatar_url: user.avatar_url || '',\n location: user.location || '',\n website: user.website || '',\n phone: user.phone || '',\n social_media: user.social_media || { twitter: '', github: '', linkedin: '', instagram: '' },\n created_at: user.created_at,\n isAdmin: !!roleDoc,\n message: 'Profile updated successfully'\n });\n } catch (err) {\n console.error('Update profile error:', err);\n if (err.code === 11000) {\n res.status(400).json({ error: 'Username already taken' });\n } else {\n res.status(500).json({ error: 'Failed to update profile' });\n }\n }\n});\n\n// File Routes\n\n// Chunked upload support for large files\napp.post('/api/files/upload-chunk', verifyToken, (req, res, next) => {\n chunkUpload.single('chunk')(req, res, (err) => {\n if (err) {\n console.error('Chunk upload error:', err.message);\n if (err.code === 'LIMIT_FILE_SIZE') {\n return res.status(413).json({ error: `Chunk too large. Max size: ${err.limit} bytes` });\n }\n return res.status(400).json({ error: err.message || 'Chunk upload failed' });\n }\n next();\n });\n}, async (req, res) => {\n try {\n const uploadId = req.body.uploadId;\n const chunkIndex = Number(req.body.chunkIndex);\n const totalChunks = Number(req.body.totalChunks);\n const originalName = req.body.fileName;\n const description = req.body.description;\n const isLastChunk = req.body.isLastChunk === 'true';\n\n if (!uploadId || Number.isNaN(chunkIndex) || Number.isNaN(totalChunks) || !originalName) {\n return res.status(400).json({ error: 'Missing upload metadata' });\n }\n\n if (!req.file) {\n return res.status(400).json({ error: 'No chunk file provided' });\n }\n\n if (isLastChunk) {\n const result = await tryFinalizeChunkedUpload({\n uploadId,\n totalChunks,\n originalName,\n description,\n mimetype: req.file.mimetype,\n userId: req.userId,\n fileSize: Number(req.body.fileSize) || req.file.size,\n });\n\n if (result) {\n return res.json({ ...result, chunkComplete: true });\n }\n }\n\n res.json({ chunkIndex, totalChunks, chunkComplete: false });\n } catch (err) {\n console.error('Chunk upload error:', err);\n res.status(500).json({ error: 'Chunk upload failed' });\n }\n});\n\n// Upload file - Memory-efficient streaming\napp.post('/api/files/upload', verifyToken, (req, res, next) => {\n upload.single('file')(req, res, (err) => {\n // Handle multer errors\n if (err) {\n console.error('Multer error:', err.message);\n if (err.code === 'LIMIT_FILE_SIZE') {\n return res.status(413).json({ error: `File too large. Max size: ${err.limit} bytes` });\n }\n if (err.code === 'LIMIT_FILE_COUNT') {\n return res.status(413).json({ error: 'Too many files' });\n }\n return res.status(400).json({ error: err.message || 'Upload failed' });\n }\n next();\n });\n}, async (req, res) => {\n try {\n if (!req.file) {\n return res.status(400).json({ error: 'No file provided' });\n }\n\n const { description } = req.body;\n const filesCollection = db.collection('files');\n\n // Get actual file size from disk\n const actualSize = req.file.size;\n\n const fileDoc = {\n user_id: req.userId,\n file_name: req.file.originalname,\n file_size: actualSize,\n file_type: req.file.mimetype,\n storage_path: req.file.filename,\n description: description || null,\n is_public: false,\n created_at: new Date(),\n updated_at: new Date(),\n };\n\n // Fast async insert - doesn't wait for completion\n filesCollection.insertOne(fileDoc).catch(err => console.error('DB insert error:', err));\n\n // Send response immediately - data saves in background\n res.json({\n id: req.file.filename,\n file_name: fileDoc.file_name,\n file_size: fileDoc.file_size,\n }).end();\n } catch (err) {\n console.error('Upload error:', err);\n if (req.file && req.file.path) {\n // Async cleanup - don't block response\n fs.unlink(req.file.path, () => {});\n }\n res.status(500).json({ error: 'Upload failed' });\n }\n});\n\n// Get files\napp.get('/api/files', verifyToken, async (req, res) => {\n try {\n const filesCollection = db.collection('files');\n let query = {};\n\n // Check if admin\n const roleDoc = await db.collection('user_roles').findOne({ user_id: req.userId, role: 'admin' });\n\n if (!roleDoc) {\n query.user_id = req.userId;\n }\n\n const files = await filesCollection\n .find(query)\n .sort({ created_at: -1 })\n .toArray();\n\n const formattedFiles = files.map(f => ({\n id: f._id.toString(),\n ...f,\n _id: undefined,\n }));\n\n res.json(formattedFiles);\n } catch (err) {\n console.error('Get files error:', err);\n res.status(500).json({ error: 'Failed to fetch files' });\n }\n});\n\n// Delete file\napp.delete('/api/files/:id', verifyToken, async (req, res) => {\n try {\n const filesCollection = db.collection('files');\n const fileDoc = await filesCollection.findOne({ _id: new ObjectId(req.params.id) });\n\n if (!fileDoc) {\n return res.status(404).json({ error: 'File not found' });\n }\n\n // Check ownership or admin\n const roleDoc = await db.collection('user_roles').findOne({ user_id: req.userId, role: 'admin' });\n\n if (fileDoc.user_id !== req.userId && !roleDoc) {\n return res.status(403).json({ error: 'Unauthorized' });\n }\n\n // Delete file from disk\n const filePath = path.join(uploadsDir, fileDoc.storage_path);\n fs.unlink(filePath, (err) => {\n if (err) console.error('Error deleting file:', err);\n });\n\n await filesCollection.deleteOne({ _id: new ObjectId(req.params.id) });\n\n res.json({ success: true });\n } catch (err) {\n console.error('Delete file error:', err);\n res.status(500).json({ error: 'Failed to delete file' });\n }\n});\n\n// Get file by storage path\napp.get('/api/files/download/:filename', (req, res) => {\n try {\n const filename = req.params.filename;\n const filepath = path.join(uploadsDir, filename);\n\n // Security: ensure the file is within uploads directory\n if (!path.resolve(filepath).startsWith(path.resolve(uploadsDir))) {\n return res.status(403).json({ error: 'Access denied' });\n }\n\n if (!fs.existsSync(filepath)) {\n return res.status(404).json({ error: 'File not found' });\n }\n\n res.download(filepath);\n } catch (err) {\n console.error('Download error:', err);\n res.status(500).json({ error: 'Download failed' });\n }\n});\n\n// Admin Routes\n\n// Get all users\napp.get('/api/admin/users', verifyToken, async (req, res) => {\n try {\n // Check if admin\n const roleDoc = await db.collection('user_roles').findOne({ user_id: req.userId, role: 'admin' });\n\n if (!roleDoc) {\n return res.status(403).json({ error: 'Unauthorized' });\n }\n\n const usersCollection = db.collection('users');\n const users = await usersCollection\n .find({}, { projection: { password: 0 } })\n .toArray();\n\n const formattedUsers = users.map(u => ({\n id: u._id.toString(),\n ...u,\n _id: undefined,\n }));\n\n res.json(formattedUsers);\n } catch (err) {\n console.error('Get users error:', err);\n res.status(500).json({ error: 'Failed to fetch users' });\n }\n});\n\n// Get admin files\napp.get('/api/admin/files', verifyToken, async (req, res) => {\n try {\n // Check if admin\n const roleDoc = await db.collection('user_roles').findOne({ user_id: req.userId, role: 'admin' });\n\n if (!roleDoc) {\n return res.status(403).json({ error: 'Unauthorized' });\n }\n\n const filesCollection = db.collection('files');\n const files = await filesCollection\n .find({})\n .sort({ created_at: -1 })\n .toArray();\n\n const formattedFiles = files.map(f => ({\n id: f._id.toString(),\n ...f,\n _id: undefined,\n }));\n\n res.json(formattedFiles);\n } catch (err) {\n console.error('Get admin files error:', err);\n res.status(500).json({ error: 'Failed to fetch files' });\n }\n});\n\n// Set user as admin\napp.post('/api/admin/users/:userId/role', verifyToken, async (req, res) => {\n try {\n // Check if admin\n const roleDoc = await db.collection('user_roles').findOne({ user_id: req.userId, role: 'admin' });\n\n if (!roleDoc) {\n return res.status(403).json({ error: 'Unauthorized' });\n }\n\n const { role } = req.body;\n const userRolesCollection = db.collection('user_roles');\n\n if (role === 'admin') {\n await userRolesCollection.updateOne(\n { user_id: req.params.userId },\n { $set: { user_id: req.params.userId, role: 'admin' } },\n { upsert: true }\n );\n } else {\n await userRolesCollection.deleteOne({ user_id: req.params.userId, role: 'admin' });\n }\n\n res.json({ success: true });\n } catch (err) {\n console.error('Set role error:', err);\n res.status(500).json({ error: 'Failed to update role' });\n }\n});\n\n// Health check\napp.get('/health', (req, res) => {\n res.json({ status: 'ok' });\n});\n\n// Start server\ninitMongoDB().then(() => {\n app.listen(PORT, () => {\n console.log(`Server running on http://localhost:${PORT}`);\n });\n});\n",
5
  "language": "javascript",
6
  "createdAt": 1776244783913,
7
- "updatedAt": 1776407563188
8
  }
 
1
  {
2
  "id": "dcc43ef1-4f83-4afe-a59d-61a9ae99cb51",
3
  "title": "Untitled",
4
+ "content": "import express from 'express';\nimport { MongoClient, ObjectId } from 'mongodb';\nimport multer from 'multer';\nimport bcryptjs from 'bcryptjs';\nimport jwt from 'jsonwebtoken';\nimport fs from 'fs';\nimport path from 'path';\nimport { fileURLToPath } from 'url';\nimport dotenv from 'dotenv';\n\n// Load environment variables\ndotenv.config({ path: '.env.server' });\n\nconst __dirname = path.dirname(fileURLToPath(import.meta.url));\nconst uploadsDir = path.join(__dirname, 'uploads');\nconst tempChunksDir = path.join(__dirname, 'uploads_tmp');\n\nif (!fs.existsSync(uploadsDir)) {\n fs.mkdirSync(uploadsDir, { recursive: true });\n}\n\nif (!fs.existsSync(tempChunksDir)) {\n fs.mkdirSync(tempChunksDir, { recursive: true });\n}\n\nconst app = express();\nconst PORT = process.env.PORT || 3001;\nconst MONGODB_URI = process.env.MONGODB_URI || 'mongodb://localhost:27017';\nconst JWT_SECRET = process.env.JWT_SECRET || 'your-secret-key-change-in-production';\n\nlet db;\nlet mongoClient;\n\n// Initialize MongoDB\nasync function initMongoDB() {\n try {\n console.log('🔄 Connecting to MongoDB...');\n console.log('URI:', MONGODB_URI.replace(/:[^:]*@/, ':****@')); // Hide password in logs\n \n mongoClient = new MongoClient(MONGODB_URI, {\n serverSelectionTimeoutMS: 5000,\n connectTimeoutMS: 10000,\n // HIGH-PERFORMANCE POOLING: Optimize connection reuse\n maxPoolSize: 50, // Increased from default 10 for concurrent uploads\n minPoolSize: 10, // Keep minimum connections ready\n maxIdleTimeMS: 30000,\n waitQueueTimeoutMS: 10000,\n });\n \n await mongoClient.connect();\n console.log('✅ MongoDB connected successfully');\n \n db = mongoClient.db('file-caddy');\n\n // Create collections if they don't exist\n const collections = await db.listCollections().toArray();\n const collectionNames = collections.map(c => c.name);\n\n if (!collectionNames.includes('users')) {\n await db.createCollection('users');\n await db.collection('users').createIndex({ email: 1 }, { unique: true });\n await db.collection('users').createIndex({ username: 1 }, { unique: true, sparse: true });\n console.log('📦 Created users collection');\n }\n if (!collectionNames.includes('files')) {\n await db.createCollection('files');\n await db.collection('files').createIndex({ user_id: 1 });\n console.log('📦 Created files collection');\n }\n if (!collectionNames.includes('user_roles')) {\n await db.createCollection('user_roles');\n console.log('📦 Created user_roles collection');\n }\n\n console.log('✅ MongoDB fully initialized');\n } catch (err) {\n console.error('❌ MongoDB connection error:', err.message);\n console.error('Full error:', err);\n process.exit(1);\n }\n}\n\n// Middleware - Memory efficient for large uploads\n// Limit JSON requests but NOT file uploads (handled by multer streaming)\napp.use(express.json({ limit: '10mb' }));\napp.use(express.urlencoded({ extended: true, limit: '10mb' }));\n\n// Serve uploads with aggressive caching for faster retrieval\napp.use('/uploads', express.static(uploadsDir, { \n maxAge: '31d',\n setHeaders: (res) => {\n res.setHeader('Cache-Control', 'public, max-age=2678400, immutable');\n }\n}));\n\n// Increase timeout for large uploads\napp.use((req, res, next) => {\n req.setTimeout(3600000); // 1 hour timeout\n res.setTimeout(3600000);\n next();\n});\n\n// Multer configuration - Optimized for faster uploads\nconst storage = multer.diskStorage({\n destination: (req, file, cb) => {\n cb(null, uploadsDir);\n },\n filename: (req, file, cb) => {\n const uniqueName = `${Date.now()}-${Math.random().toString(36).substr(2, 9)}-${file.originalname}`;\n cb(null, uniqueName);\n }\n});\n\n// High-performance streaming upload configuration\n// Uses disk streaming (not memory buffering) for maximum throughput\nconst upload = multer({\n storage,\n limits: {\n fileSize: 5 * 1024 * 1024 * 1024, // 5GB limit\n files: 10\n },\n // CRITICAL OPTIMIZATION: 512KB buffer for better network/disk throughput\n // Larger buffer means fewer I/O cycles and faster streaming for large files\n highWaterMark: 512 * 1024 // 512KB chunks for faster upload performance\n});\n\nconst chunkStorage = multer.diskStorage({\n destination: (req, file, cb) => {\n cb(null, tempChunksDir);\n },\n filename: (req, file, cb) => {\n const uploadId = req.body.uploadId || 'unknown';\n const chunkIndex = req.body.chunkIndex || '0';\n cb(null, `${uploadId}.part.${chunkIndex}`);\n }\n});\n\nconst chunkUpload = multer({\n storage: chunkStorage,\n limits: {\n fileSize: 7 * 1024 * 1024, // 7MB max per chunk to allow FormData overhead\n files: 1\n },\n highWaterMark: 512 * 1024\n});\n\nasync function tryFinalizeChunkedUpload({ uploadId, totalChunks, originalName, description, mimetype, userId, fileSize }) {\n const parts = Array.from({ length: totalChunks }, (_, index) =>\n path.join(tempChunksDir, `${uploadId}.part.${index}`)\n );\n\n for (const partPath of parts) {\n try {\n await fs.promises.access(partPath, fs.constants.R_OK);\n } catch {\n return null;\n }\n }\n\n const safeName = `${Date.now()}-${uploadId}-${path.basename(originalName)}`;\n const finalPath = path.join(uploadsDir, safeName);\n const writeStream = fs.createWriteStream(finalPath, { flags: 'w' });\n\n for (const partPath of parts) {\n await new Promise((resolve, reject) => {\n const readStream = fs.createReadStream(partPath);\n readStream.on('error', reject);\n readStream.on('end', () => {\n fs.unlink(partPath, () => {});\n resolve(null);\n });\n readStream.pipe(writeStream, { end: false });\n });\n }\n\n writeStream.end();\n await new Promise((resolve, reject) => {\n writeStream.on('finish', resolve);\n writeStream.on('error', reject);\n });\n\n const fileDoc = {\n user_id: userId,\n file_name: originalName,\n file_size: fileSize,\n file_type: mimetype || 'application/octet-stream',\n storage_path: safeName,\n description: description || null,\n is_public: false,\n created_at: new Date(),\n updated_at: new Date(),\n };\n\n db.collection('files').insertOne(fileDoc).catch(err => console.error('DB insert error:', err));\n\n return { id: safeName, file_name: originalName, file_size: fileSize };\n}\n\nasync function cleanupOldTempChunks(ageMs = 2 * 60 * 60 * 1000) {\n try {\n const files = await fs.promises.readdir(tempChunksDir);\n const now = Date.now();\n\n await Promise.all(files.map(async (file) => {\n if (!file.includes('.part.')) {\n return;\n }\n const filePath = path.join(tempChunksDir, file);\n try {\n const stat = await fs.promises.stat(filePath);\n if (now - stat.mtimeMs > ageMs) {\n await fs.promises.unlink(filePath);\n }\n } catch (err) {\n // ignore missing file or permission issues\n }\n }));\n } catch (err) {\n console.error('Error cleaning temp chunk files:', err);\n }\n}\n\n// Run cleanup once at startup, then periodically every hour\ncleanupOldTempChunks().catch(err => console.error('Startup temp cleanup failed:', err));\nsetInterval(() => cleanupOldTempChunks(), 60 * 60 * 1000);\n\n// Middleware to verify JWT\nfunction verifyToken(req, res, next) {\n const token = req.headers['authorization']?.split(' ')[1];\n if (!token) {\n return res.status(401).json({ error: 'No token provided' });\n }\n try {\n const decoded = jwt.verify(token, JWT_SECRET);\n req.userId = decoded.userId;\n req.email = decoded.email;\n next();\n } catch (err) {\n res.status(401).json({ error: 'Invalid token' });\n }\n}\n\n// Auth Routes\n\n// Sign up\napp.post('/auth/signup', async (req, res) => {\n try {\n const { email, password, displayName, username } = req.body;\n\n if (!email || !password) {\n return res.status(400).json({ error: 'Email and password required' });\n }\n\n if (username && !/^[a-zA-Z0-9_]{3,20}$/.test(username)) {\n return res.status(400).json({ error: 'Username must be 3-20 characters (letters, numbers, underscore only)' });\n }\n\n const usersCollection = db.collection('users');\n const existing = await usersCollection.findOne({ email });\n\n if (existing) {\n return res.status(400).json({ error: 'Email already exists' });\n }\n\n if (username) {\n const existingUsername = await usersCollection.findOne({ username });\n if (existingUsername) {\n return res.status(400).json({ error: 'Username already taken' });\n }\n }\n\n const hashedPassword = await bcryptjs.hash(password, 10);\n const user = {\n email,\n password: hashedPassword,\n username: username || null,\n displayName: displayName || email.split('@')[0],\n display_name: displayName || email.split('@')[0],\n bio: '',\n avatar_url: '',\n location: '',\n website: '',\n phone: '',\n social_media: {\n twitter: '',\n github: '',\n linkedin: '',\n instagram: ''\n },\n created_at: new Date(),\n updated_at: new Date(),\n };\n\n const result = await usersCollection.insertOne(user);\n const userId = result.insertedId.toString();\n\n const token = jwt.sign({ userId, email }, JWT_SECRET, { expiresIn: '7d' });\n\n res.json({\n user: {\n id: userId,\n email,\n displayName: user.displayName,\n username: user.username,\n },\n token,\n });\n } catch (err) {\n console.error('Signup error:', err);\n if (err.code === 11000) {\n const field = Object.keys(err.keyPattern)[0];\n res.status(400).json({ error: `${field} already exists` });\n } else {\n res.status(500).json({ error: 'Signup failed' });\n }\n }\n});\n\n// Sign in\napp.post('/auth/signin', async (req, res) => {\n try {\n const { email, password } = req.body;\n\n if (!email || !password) {\n return res.status(400).json({ error: 'Email and password required' });\n }\n\n const usersCollection = db.collection('users');\n const user = await usersCollection.findOne({ email });\n\n if (!user) {\n return res.status(400).json({ error: 'Invalid credentials' });\n }\n\n const isValid = await bcryptjs.compare(password, user.password);\n\n if (!isValid) {\n return res.status(400).json({ error: 'Invalid credentials' });\n }\n\n const token = jwt.sign({ userId: user._id.toString(), email }, JWT_SECRET, { expiresIn: '7d' });\n\n res.json({\n user: {\n id: user._id.toString(),\n email: user.email,\n displayName: user.displayName,\n username: user.username,\n },\n token,\n });\n } catch (err) {\n console.error('Signin error:', err);\n res.status(500).json({ error: 'Signin failed' });\n }\n});\n\n// Get current user\napp.get('/auth/me', verifyToken, async (req, res) => {\n try {\n const usersCollection = db.collection('users');\n const user = await usersCollection.findOne({ _id: new ObjectId(req.userId) });\n\n if (!user) {\n return res.status(404).json({ error: 'User not found' });\n }\n\n // Check if admin\n const roleDoc = await db.collection('user_roles').findOne({ user_id: req.userId, role: 'admin' });\n\n res.json({\n id: user._id.toString(),\n email: user.email,\n displayName: user.displayName,\n username: user.username || '',\n display_name: user.display_name || user.displayName,\n bio: user.bio || '',\n avatar_url: user.avatar_url || '',\n location: user.location || '',\n website: user.website || '',\n phone: user.phone || '',\n social_media: user.social_media || { twitter: '', github: '', linkedin: '', instagram: '' },\n created_at: user.created_at,\n isAdmin: !!roleDoc,\n });\n } catch (err) {\n console.error('Get user error:', err);\n res.status(500).json({ error: 'Failed to fetch user' });\n }\n});\n\n// Change password\napp.post('/auth/change-password', verifyToken, async (req, res) => {\n try {\n const { currentPassword, newPassword } = req.body;\n\n if (!currentPassword || !newPassword) {\n return res.status(400).json({ error: 'Current and new password required' });\n }\n\n if (newPassword.length < 6) {\n return res.status(400).json({ error: 'New password must be at least 6 characters' });\n }\n\n const usersCollection = db.collection('users');\n const user = await usersCollection.findOne({ _id: new ObjectId(req.userId) });\n\n if (!user) {\n return res.status(404).json({ error: 'User not found' });\n }\n\n // Verify current password\n const isValid = await bcryptjs.compare(currentPassword, user.password);\n if (!isValid) {\n return res.status(401).json({ error: 'Current password is incorrect' });\n }\n\n // Hash new password\n const hashedPassword = await bcryptjs.hash(newPassword, 10);\n\n // Update password\n await usersCollection.updateOne(\n { _id: new ObjectId(req.userId) },\n { $set: { password: hashedPassword, updated_at: new Date() } }\n );\n\n res.json({ success: true, message: 'Password changed successfully' });\n } catch (err) {\n console.error('Change password error:', err);\n res.status(500).json({ error: 'Failed to change password' });\n }\n});\n\n// Update profile\napp.post('/auth/update-profile', verifyToken, async (req, res) => {\n try {\n const { display_name, bio, avatar_url, location, website, phone, social_media } = req.body;\n\n const usersCollection = db.collection('users');\n const updates = {};\n \n if (display_name !== undefined) updates.display_name = display_name;\n if (bio !== undefined) updates.bio = bio;\n if (avatar_url !== undefined) updates.avatar_url = avatar_url;\n if (location !== undefined) updates.location = location;\n if (website !== undefined) updates.website = website;\n if (phone !== undefined) updates.phone = phone;\n if (social_media !== undefined) updates.social_media = social_media;\n updates.updated_at = new Date();\n\n await usersCollection.updateOne(\n { _id: new ObjectId(req.userId) },\n { $set: updates }\n );\n\n // Return updated user\n const user = await usersCollection.findOne({ _id: new ObjectId(req.userId) });\n const roleDoc = await db.collection('user_roles').findOne({ user_id: req.userId, role: 'admin' });\n\n res.json({\n id: user._id.toString(),\n email: user.email,\n displayName: user.displayName,\n username: user.username || '',\n display_name: user.display_name || user.displayName,\n bio: user.bio || '',\n avatar_url: user.avatar_url || '',\n location: user.location || '',\n website: user.website || '',\n phone: user.phone || '',\n social_media: user.social_media || { twitter: '', github: '', linkedin: '', instagram: '' },\n created_at: user.created_at,\n isAdmin: !!roleDoc,\n message: 'Profile updated successfully'\n });\n } catch (err) {\n console.error('Update profile error:', err);\n if (err.code === 11000) {\n res.status(400).json({ error: 'Username already taken' });\n } else {\n res.status(500).json({ error: 'Failed to update profile' });\n }\n }\n});\n\n// File Routes\n\n// Chunked upload support for large files\napp.post('/api/files/upload-chunk', verifyToken, (req, res, next) => {\n chunkUpload.single('chunk')(req, res, (err) => {\n if (err) {\n console.error('Chunk upload error:', err.message);\n if (err.code === 'LIMIT_FILE_SIZE') {\n return res.status(413).json({ error: `Chunk too large. Max size: ${err.limit} bytes` });\n }\n return res.status(400).json({ error: err.message || 'Chunk upload failed' });\n }\n next();\n });\n}, async (req, res) => {\n try {\n const uploadId = req.body.uploadId;\n const chunkIndex = Number(req.body.chunkIndex);\n const totalChunks = Number(req.body.totalChunks);\n const originalName = req.body.fileName;\n const description = req.body.description;\n const isLastChunk = req.body.isLastChunk === 'true';\n\n if (!uploadId || Number.isNaN(chunkIndex) || Number.isNaN(totalChunks) || !originalName) {\n return res.status(400).json({ error: 'Missing upload metadata' });\n }\n\n if (!req.file) {\n return res.status(400).json({ error: 'No chunk file provided' });\n }\n\n if (isLastChunk) {\n const result = await tryFinalizeChunkedUpload({\n uploadId,\n totalChunks,\n originalName,\n description,\n mimetype: req.file.mimetype,\n userId: req.userId,\n fileSize: Number(req.body.fileSize) || req.file.size,\n });\n\n if (result) {\n return res.json({ ...result, chunkComplete: true });\n }\n }\n\n res.json({ chunkIndex, totalChunks, chunkComplete: false });\n } catch (err) {\n console.error('Chunk upload error:', err);\n res.status(500).json({ error: 'Chunk upload failed' });\n }\n});\n\n// Upload file - Memory-efficient streaming\napp.post('/api/files/upload', verifyToken, (req, res, next) => {\n upload.single('file')(req, res, (err) => {\n // Handle multer errors\n if (err) {\n console.error('Multer error:', err.message);\n if (err.code === 'LIMIT_FILE_SIZE') {\n return res.status(413).json({ error: `File too large. Max size: ${err.limit} bytes` });\n }\n if (err.code === 'LIMIT_FILE_COUNT') {\n return res.status(413).json({ error: 'Too many files' });\n }\n return res.status(400).json({ error: err.message || 'Upload failed' });\n }\n next();\n });\n}, async (req, res) => {\n try {\n if (!req.file) {\n return res.status(400).json({ error: 'No file provided' });\n }\n\n const { description } = req.body;\n const filesCollection = db.collection('files');\n\n // Get actual file size from disk\n const actualSize = req.file.size;\n\n const fileDoc = {\n user_id: req.userId,\n file_name: req.file.originalname,\n file_size: actualSize,\n file_type: req.file.mimetype,\n storage_path: req.file.filename,\n description: description || null,\n is_public: false,\n created_at: new Date(),\n updated_at: new Date(),\n };\n\n // Fast async insert - doesn't wait for completion\n filesCollection.insertOne(fileDoc).catch(err => console.error('DB insert error:', err));\n\n // Send response immediately - data saves in background\n res.json({\n id: req.file.filename,\n file_name: fileDoc.file_name,\n file_size: fileDoc.file_size,\n }).end();\n } catch (err) {\n console.error('Upload error:', err);\n if (req.file && req.file.path) {\n // Async cleanup - don't block response\n fs.unlink(req.file.path, () => {});\n }\n res.status(500).json({ error: 'Upload failed' });\n }\n});\n\n// Get files\napp.get('/api/files', verifyToken, async (req, res) => {\n try {\n const filesCollection = db.collection('files');\n let query = {};\n\n // Check if admin\n const roleDoc = await db.collection('user_roles').findOne({ user_id: req.userId, role: 'admin' });\n\n if (!roleDoc) {\n query.user_id = req.userId;\n }\n\n const files = await filesCollection\n .find(query)\n .sort({ created_at: -1 })\n .toArray();\n\n const formattedFiles = files.map(f => ({\n id: f._id.toString(),\n ...f,\n _id: undefined,\n }));\n\n res.json(formattedFiles);\n } catch (err) {\n console.error('Get files error:', err);\n res.status(500).json({ error: 'Failed to fetch files' });\n }\n});\n\n// Delete file\napp.delete('/api/files/:id', verifyToken, async (req, res) => {\n try {\n const filesCollection = db.collection('files');\n const fileDoc = await filesCollection.findOne({ _id: new ObjectId(req.params.id) });\n\n if (!fileDoc) {\n return res.status(404).json({ error: 'File not found' });\n }\n\n // Check ownership or admin\n const roleDoc = await db.collection('user_roles').findOne({ user_id: req.userId, role: 'admin' });\n\n if (fileDoc.user_id !== req.userId && !roleDoc) {\n return res.status(403).json({ error: 'Unauthorized' });\n }\n\n // Delete file from disk\n const filePath = path.join(uploadsDir, fileDoc.storage_path);\n fs.unlink(filePath, (err) => {\n if (err) console.error('Error deleting file:', err);\n });\n\n await filesCollection.deleteOne({ _id: new ObjectId(req.params.id) });\n\n res.json({ success: true });\n } catch (err) {\n console.error('Delete file error:', err);\n res.status(500).json({ error: 'Failed to delete file' });\n }\n});\n\n// Get file by storage path\napp.get('/api/files/download/:filename', (req, res) => {\n try {\n const filename = req.params.filename;\n const filepath = path.join(uploadsDir, filename);\n\n // Security: ensure the file is within uploads directory\n if (!path.resolve(filepath).startsWith(path.resolve(uploadsDir))) {\n return res.status(403).json({ error: 'Access denied' });\n }\n\n if (!fs.existsSync(filepath)) {\n return res.status(404).json({ error: 'File not found' });\n }\n\n res.download(filepath);\n } catch (err) {\n console.error('Download error:', err);\n res.status(500).json({ error: 'Download failed' });\n }\n});\n\n// Admin Routes\n\n// Get all users\napp.get('/api/admin/users', verifyToken, async (req, res) => {\n try {\n // Check if admin\n const roleDoc = await db.collection('user_roles').findOne({ user_id: req.userId, role: 'admin' });\n\n if (!roleDoc) {\n return res.status(403).json({ error: 'Unauthorized' });\n }\n\n const usersCollection = db.collection('users');\n const users = await usersCollection\n .find({}, { projection: { password: 0 } })\n .toArray();\n\n const formattedUsers = users.map(u => ({\n id: u._id.toString(),\n ...u,\n _id: undefined,\n }));\n\n res.json(formattedUsers);\n } catch (err) {\n console.error('Get users error:', err);\n res.status(500).json({ error: 'Failed to fetch users' });\n }\n});\n\n// Get admin files\napp.get('/api/admin/files', verifyToken, async (req, res) => {\n try {\n // Check if admin\n const roleDoc = await db.collection('user_roles').findOne({ user_id: req.userId, role: 'admin' });\n\n if (!roleDoc) {\n return res.status(403).json({ error: 'Unauthorized' });\n }\n\n const filesCollection = db.collection('files');\n const files = await filesCollection\n .find({})\n .sort({ created_at: -1 })\n .toArray();\n\n const formattedFiles = files.map(f => ({\n id: f._id.toString(),\n ...f,\n _id: undefined,\n }));\n\n res.json(formattedFiles);\n } catch (err) {\n console.error('Get admin files error:', err);\n res.status(500).json({ error: 'Failed to fetch files' });\n }\n});\n\n// Set user as admin\napp.post('/api/admin/users/:userId/role', verifyToken, async (req, res) => {\n try {\n // Check if admin\n const roleDoc = await db.collection('user_roles').findOne({ user_id: req.userId, role: 'admin' });\n\n if (!roleDoc) {\n return res.status(403).json({ error: 'Unauthorized' });\n }\n\n const { role } = req.body;\n const userRolesCollection = db.collection('user_roles');\n\n if (role === 'admin') {\n await userRolesCollection.updateOne(\n { user_id: req.params.userId },\n { $set: { user_id: req.params.userId, role: 'admin' } },\n { upsert: true }\n );\n } else {\n await userRolesCollection.deleteOne({ user_id: req.params.userId, role: 'admin' });\n }\n\n res.json({ success: true });\n } catch (err) {\n console.error('Set role error:', err);\n res.status(500).json({ error: 'Failed to update role' });\n }\n});\n\n// Health check\napp.get('/health', (req, res) => {\n res.json({ status: 'ok' });\n});\n\n// Start server\ninitMongoDB().then(() => {\n app.listen(PORT, () => {\n console.log(`Server running on http://localhost:${PORT}`);\n });\n});\n",
5
  "language": "javascript",
6
  "createdAt": 1776244783913,
7
+ "updatedAt": 1776407799020
8
  }