Resolve merge conflicts and add production-ready backend with MongoDB

This commit is contained in:
EP
2025-03-07 17:27:02 -05:00
21 changed files with 1768 additions and 129 deletions

2
server/.env Normal file
View File

@@ -0,0 +1,2 @@
PORT=5000
MONGODB_URI=mongodb://localhost:27017/r00ts

4
server/.env.example Normal file
View File

@@ -0,0 +1,4 @@
PORT=5000
MONGODB_URI=mongodb://localhost:27017/r00ts
# For production, use MongoDB Atlas or other cloud database
# MONGODB_URI=mongodb+srv://<username>:<password>@cluster.mongodb.net/r00ts

82
server/README.md Normal file
View File

@@ -0,0 +1,82 @@
# R00TS Backend Server
This is the backend server for the R00TS application, providing API endpoints for word and dataset management.
## Features
- RESTful API for word submissions and retrieval
- MongoDB integration for persistent data storage
- Automatic dataset creation and backup
- Production-ready configuration
## Prerequisites
- Node.js (v14 or higher)
- MongoDB (local installation or MongoDB Atlas account)
## Installation
1. Clone the repository (if you haven't already)
2. Navigate to the server directory:
```
cd server
```
3. Install dependencies:
```
npm install
```
4. Create a `.env` file based on the `.env.example` template:
```
cp .env.example .env
```
5. Update the `.env` file with your MongoDB connection string
## Running the Server
### Development Mode
```
npm run dev
```
This will start the server with nodemon, which automatically restarts when changes are detected.
### Production Mode
```
npm start
```
## API Endpoints
### Words
- `GET /api/words` - Get all words
- `POST /api/words` - Add or update a word
- `GET /api/words/stats` - Get word statistics
### Datasets
- `GET /api/datasets` - Get all datasets (limited info)
- `GET /api/datasets/:filename` - Get a specific dataset by filename
- `POST /api/datasets` - Create a new dataset snapshot
- `GET /api/datasets/recent/list` - Get recent datasets (limited to 5)
## Deployment
For production deployment, we recommend:
1. Set up a MongoDB Atlas cluster for your database
2. Update the `.env` file with your production MongoDB URI
3. Deploy to a hosting service like Heroku, Vercel, or DigitalOcean
## Data Migration
If you have existing data in localStorage that you want to migrate to the database:
1. Export your localStorage data
2. Use the import functionality (coming soon) to upload to the server
## License
See the main project license file.

27
server/models/Dataset.js Normal file
View File

@@ -0,0 +1,27 @@
const mongoose = require('mongoose');
const DatasetSchema = new mongoose.Schema({
filename: {
type: String,
required: true,
unique: true
},
timestamp: {
type: Date,
default: Date.now
},
wordCount: {
type: Number,
required: true
},
totalSubmissions: {
type: Number,
required: true
},
data: {
type: Object,
required: true
}
});
module.exports = mongoose.model('Dataset', DatasetSchema);

26
server/models/Word.js Normal file
View File

@@ -0,0 +1,26 @@
const mongoose = require('mongoose');
const WordSchema = new mongoose.Schema({
word: {
type: String,
required: true,
trim: true,
lowercase: true,
unique: true
},
count: {
type: Number,
required: true,
default: 1
},
createdAt: {
type: Date,
default: Date.now
},
updatedAt: {
type: Date,
default: Date.now
}
});
module.exports = mongoose.model('Word', WordSchema);

29
server/package.json Normal file
View File

@@ -0,0 +1,29 @@
{
"name": "r00ts-server",
"version": "1.0.0",
"description": "Backend server for R00TS application",
"main": "server.js",
"scripts": {
"start": "node server.js",
"dev": "nodemon server.js",
"prod": "pm2 start server.js --name r00ts-server",
"stop": "pm2 stop r00ts-server",
"restart": "pm2 restart r00ts-server",
"status": "pm2 status r00ts-server",
"logs": "pm2 logs r00ts-server",
"backup": "node scripts/backup.js"
},
"dependencies": {
"cors": "^2.8.5",
"dotenv": "^16.3.1",
"express": "^4.18.2",
"mongoose": "^7.5.0",
"morgan": "^1.10.0",
"pm2": "^5.3.0",
"cron": "^2.4.0",
"mongodb-backup": "^1.6.9"
},
"devDependencies": {
"nodemon": "^3.0.1"
}
}

86
server/routes/datasets.js Normal file
View File

@@ -0,0 +1,86 @@
const express = require('express');
const router = express.Router();
const Dataset = require('../models/Dataset');
const Word = require('../models/Word');
// Get all datasets (limited info)
router.get('/', async (req, res) => {
try {
const datasets = await Dataset.find().select('-data').sort({ timestamp: -1 });
res.json(datasets);
} catch (err) {
console.error('Error fetching datasets:', err);
res.status(500).json({ message: 'Server error' });
}
});
// Get a specific dataset by filename
router.get('/:filename', async (req, res) => {
try {
const dataset = await Dataset.findOne({ filename: req.params.filename });
if (!dataset) {
return res.status(404).json({ message: 'Dataset not found' });
}
res.json(dataset);
} catch (err) {
console.error('Error fetching dataset:', err);
res.status(500).json({ message: 'Server error' });
}
});
// Create a new dataset snapshot
router.post('/', async (req, res) => {
try {
// Get all words from the database
const words = await Word.find();
// Format data to match the existing structure
const formattedWords = {};
words.forEach(word => {
formattedWords[word.word] = word.count;
});
// Calculate stats
const wordCount = words.length;
const totalSubmissions = words.reduce((sum, word) => sum + word.count, 0);
// Create filename with timestamp
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const filename = `roots_dataset_${timestamp}.json`;
// Create new dataset
const newDataset = new Dataset({
filename,
timestamp: new Date(),
wordCount,
totalSubmissions,
data: formattedWords
});
await newDataset.save();
res.status(201).json(newDataset);
} catch (err) {
console.error('Error creating dataset:', err);
res.status(500).json({ message: 'Server error' });
}
});
// Get recent datasets (limited to 5)
router.get('/recent/list', async (req, res) => {
try {
const datasets = await Dataset.find()
.select('-data')
.sort({ timestamp: -1 })
.limit(5);
res.json(datasets);
} catch (err) {
console.error('Error fetching recent datasets:', err);
res.status(500).json({ message: 'Server error' });
}
});
module.exports = router;

66
server/routes/words.js Normal file
View File

@@ -0,0 +1,66 @@
const express = require('express');
const router = express.Router();
const Word = require('../models/Word');
// Get all words
router.get('/', async (req, res) => {
try {
const words = await Word.find();
// Format data to match the existing structure
const formattedWords = {};
words.forEach(word => {
formattedWords[word.word] = word.count;
});
res.json(formattedWords);
} catch (err) {
console.error('Error fetching words:', err);
res.status(500).json({ message: 'Server error' });
}
});
// Add or update a word
router.post('/', async (req, res) => {
try {
const { word } = req.body;
if (!word || typeof word !== 'string') {
return res.status(400).json({ message: 'Word is required and must be a string' });
}
const normalizedWord = word.trim().toLowerCase();
// Find and update if exists, or create new
const updatedWord = await Word.findOneAndUpdate(
{ word: normalizedWord },
{ $inc: { count: 1 }, updatedAt: Date.now() },
{ new: true, upsert: true }
);
res.json(updatedWord);
} catch (err) {
console.error('Error adding word:', err);
res.status(500).json({ message: 'Server error' });
}
});
// Get statistics
router.get('/stats', async (req, res) => {
try {
const totalWords = await Word.countDocuments();
const totalSubmissions = await Word.aggregate([
{ $group: { _id: null, total: { $sum: '$count' } } }
]);
res.json({
uniqueWords: totalWords,
totalSubmissions: totalSubmissions.length > 0 ? totalSubmissions[0].total : 0
});
} catch (err) {
console.error('Error fetching stats:', err);
res.status(500).json({ message: 'Server error' });
}
});
module.exports = router;

85
server/scripts/backup.js Normal file
View File

@@ -0,0 +1,85 @@
/**
* R00TS Automated Database Backup Script
* This script creates backups of the MongoDB database and manages backup rotation
*/
require('dotenv').config({ path: '../.env' });
const backup = require('mongodb-backup');
const fs = require('fs');
const path = require('path');
const { CronJob } = require('cron');
// Create backups directory if it doesn't exist
const backupDir = path.join(__dirname, '../backups');
if (!fs.existsSync(backupDir)) {
fs.mkdirSync(backupDir, { recursive: true });
console.log(`Created backups directory at ${backupDir}`);
}
/**
* Perform MongoDB backup
*/
function performBackup() {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const backupPath = path.join(backupDir, `backup-${timestamp}`);
console.log(`Starting backup at ${new Date().toLocaleString()}...`);
backup({
uri: process.env.MONGODB_URI,
root: backupPath,
callback: function(err) {
if (err) {
console.error('Backup failed:', err);
} else {
console.log(`Backup completed successfully at ${backupPath}`);
// Rotate backups (keep only the last 7 backups)
rotateBackups();
}
}
});
}
/**
* Rotate backups to keep only the most recent ones
*/
function rotateBackups() {
fs.readdir(backupDir, (err, files) => {
if (err) {
console.error('Error reading backup directory:', err);
return;
}
// Sort files by creation time (oldest first)
const sortedFiles = files.map(file => ({
name: file,
path: path.join(backupDir, file),
time: fs.statSync(path.join(backupDir, file)).birthtime
})).sort((a, b) => a.time - b.time);
// Keep only the 7 most recent backups
const MAX_BACKUPS = 7;
if (sortedFiles.length > MAX_BACKUPS) {
const filesToDelete = sortedFiles.slice(0, sortedFiles.length - MAX_BACKUPS);
filesToDelete.forEach(file => {
fs.rm(file.path, { recursive: true, force: true }, (err) => {
if (err) {
console.error(`Error deleting old backup ${file.name}:`, err);
} else {
console.log(`Deleted old backup: ${file.name}`);
}
});
});
}
});
}
// If this script is run directly, perform a backup immediately
if (require.main === module) {
performBackup();
}
// Schedule automatic backups (daily at 3:00 AM)
const backupJob = new CronJob('0 3 * * *', performBackup, null, true);
module.exports = { performBackup, backupJob };

176
server/server.js Normal file
View File

@@ -0,0 +1,176 @@
require('dotenv').config();
const express = require('express');
const mongoose = require('mongoose');
const cors = require('cors');
const morgan = require('morgan');
const path = require('path');
const fs = require('fs');
const { CronJob } = require('cron');
// Create logs directory if it doesn't exist
const logsDir = path.join(__dirname, 'logs');
if (!fs.existsSync(logsDir)) {
fs.mkdirSync(logsDir, { recursive: true });
console.log(`Created logs directory at ${logsDir}`);
}
// Configure logging
const logStream = fs.createWriteStream(
path.join(logsDir, `server-${new Date().toISOString().split('T')[0]}.log`),
{ flags: 'a' }
);
// Redirect console output to log file and console
const originalConsoleLog = console.log;
const originalConsoleError = console.error;
console.log = function() {
const args = Array.from(arguments);
const timestamp = new Date().toISOString();
const logMessage = `[${timestamp}] INFO: ${args.join(' ')}
`;
logStream.write(logMessage);
originalConsoleLog.apply(console, arguments);
};
console.error = function() {
const args = Array.from(arguments);
const timestamp = new Date().toISOString();
const logMessage = `[${timestamp}] ERROR: ${args.join(' ')}
`;
logStream.write(logMessage);
originalConsoleError.apply(console, arguments);
};
// Import routes
const wordRoutes = require('./routes/words');
const datasetRoutes = require('./routes/datasets');
const app = express();
const PORT = process.env.PORT || 5000;
// Middleware
app.use(cors());
app.use(express.json());
app.use(morgan('dev'));
// Serve static files from the React app
app.use(express.static(path.join(__dirname, '..')));
// Connect to MongoDB with retry logic
const connectWithRetry = () => {
console.log('Attempting to connect to MongoDB...');
mongoose.connect(process.env.MONGODB_URI, {
useNewUrlParser: true,
useUnifiedTopology: true,
})
.then(() => {
console.log('MongoDB connected successfully');
// Initialize scheduled tasks after successful connection
initializeScheduledTasks();
})
.catch(err => {
console.error('MongoDB connection error:', err);
console.log('Retrying connection in 5 seconds...');
setTimeout(connectWithRetry, 5000);
});
};
// Handle MongoDB disconnection (auto-reconnect)
mongoose.connection.on('disconnected', () => {
console.log('MongoDB disconnected! Attempting to reconnect...');
connectWithRetry();
});
// Initial connection
connectWithRetry();
// Initialize scheduled tasks
function initializeScheduledTasks() {
// Import backup script
const { performBackup } = require('./scripts/backup');
// Schedule daily backup at 3:00 AM
const backupJob = new CronJob('0 3 * * *', performBackup, null, true);
console.log('Scheduled automatic database backup job');
// Schedule weekly database maintenance at 2:00 AM on Sundays
const maintenanceJob = new CronJob('0 2 * * 0', async () => {
console.log('Running weekly database maintenance...');
try {
// Perform any maintenance tasks here
// For example, compact collections, validate data integrity, etc.
console.log('Database maintenance completed successfully');
} catch (error) {
console.error('Database maintenance error:', error);
}
}, null, true);
console.log('Scheduled weekly database maintenance job');
}
// Health check endpoint
app.get('/api/health', (req, res) => {
const dbStatus = mongoose.connection.readyState === 1 ? 'connected' : 'disconnected';
res.json({
status: 'ok',
timestamp: new Date().toISOString(),
server: {
uptime: process.uptime(),
memory: process.memoryUsage(),
},
database: {
status: dbStatus
}
});
});
// API Routes
app.use('/api/words', wordRoutes);
app.use('/api/datasets', datasetRoutes);
// Serve the main HTML file for any other request
app.get('*', (req, res) => {
res.sendFile(path.join(__dirname, '..', 'index.html'));
});
// Error handling middleware
app.use((err, req, res, next) => {
console.error('Unhandled error:', err);
res.status(500).json({ error: 'Internal server error', message: err.message });
});
// Handle uncaught exceptions
process.on('uncaughtException', (err) => {
console.error('Uncaught exception:', err);
// Keep the process alive but log the error
});
// Handle unhandled promise rejections
process.on('unhandledRejection', (reason, promise) => {
console.error('Unhandled promise rejection:', reason);
// Keep the process alive but log the error
});
// Start the server
const server = app.listen(PORT, () => {
console.log(`R00TS server running on port ${PORT}`);
console.log(`Server time: ${new Date().toLocaleString()}`);
console.log(`Environment: ${process.env.NODE_ENV || 'development'}`);
console.log(`MongoDB URI: ${process.env.MONGODB_URI.replace(/\/.*@/, '//***:***@')}`);
console.log('Server is ready to accept connections');
});
// Graceful shutdown
process.on('SIGTERM', () => {
console.log('SIGTERM received, shutting down gracefully');
server.close(() => {
console.log('Server closed');
mongoose.connection.close(false, () => {
console.log('MongoDB connection closed');
process.exit(0);
});
});
});
module.exports = app; // Export for testing