Version 0.4
This commit is contained in:
26
backend/.env.example
Normal file
26
backend/.env.example
Normal file
@@ -0,0 +1,26 @@
|
||||
# Server Configuration
|
||||
PORT=5000
|
||||
NODE_ENV=development
|
||||
|
||||
# JWT Secret (change this to something random!)
|
||||
JWT_SECRET=your_super_secret_jwt_key_change_this
|
||||
|
||||
# Encryption key for secrets at rest (SMTP passwords, etc.)
|
||||
# If not set, falls back to JWT_SECRET. Using a separate key is recommended
|
||||
# so that a JWT_SECRET compromise does not also expose encrypted data.
|
||||
ENCRYPTION_KEY=your_separate_encryption_key_change_this
|
||||
|
||||
# Database
|
||||
DATABASE_PATH=../data/status.db
|
||||
|
||||
# CORS (whitelist frontend URL)
|
||||
FRONTEND_URL=http://localhost:3000
|
||||
|
||||
# Monitoring defaults (in seconds)
|
||||
DEFAULT_CHECK_INTERVAL=300
|
||||
DEFAULT_TIMEOUT=10
|
||||
|
||||
# Trust reverse proxy headers (X-Forwarded-For) for correct client IPs.
|
||||
# Set to 'false' if the app is NOT behind a reverse proxy (nginx, Cloudflare, etc.).
|
||||
# Default: true
|
||||
TRUST_PROXY=true
|
||||
32
backend/package.json
Normal file
32
backend/package.json
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"name": "arcane-status-backend",
|
||||
"version": "0.4.0",
|
||||
"description": "Status page backend",
|
||||
"main": "src/server.js",
|
||||
"scripts": {
|
||||
"start": "node src/server.js",
|
||||
"dev": "nodemon src/server.js",
|
||||
"test": "node --test tests/smoke.api.test.js"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"axios": "^1.15.0",
|
||||
"bcryptjs": "^3.0.3",
|
||||
"cors": "^2.8.6",
|
||||
"dotenv": "^17.4.2",
|
||||
"express": "^5.2.1",
|
||||
"express-rate-limit": "^8.3.2",
|
||||
"helmet": "^8.1.0",
|
||||
"jsonwebtoken": "^9.0.3",
|
||||
"node-cron": "^4.2.1",
|
||||
"nodemailer": "^6.10.1",
|
||||
"socket.io": "^4.8.3",
|
||||
"sqlite": "^5.1.1",
|
||||
"sqlite3": "^6.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"nodemon": "^3.1.14"
|
||||
}
|
||||
}
|
||||
131
backend/src/controllers/apiKeyController.js
Normal file
131
backend/src/controllers/apiKeyController.js
Normal file
@@ -0,0 +1,131 @@
|
||||
const crypto = require('crypto');
|
||||
const bcrypt = require('bcryptjs');
|
||||
const { getDatabase } = require('../models/database');
|
||||
|
||||
/**
|
||||
* Generate a cryptographically secure API key.
|
||||
* Format: sk_<32 random hex chars> (total ~35 chars)
|
||||
* Prefix stored: first 12 chars of the full key, enough to narrow DB lookup
|
||||
* without leaking the secret.
|
||||
*/
|
||||
function generateApiKey() {
|
||||
const secret = crypto.randomBytes(24).toString('hex'); // 48 hex chars
|
||||
const rawKey = `sk_${secret}`;
|
||||
const prefix = rawKey.substring(0, 12); // "sk_" + 9 chars
|
||||
return { rawKey, prefix };
|
||||
}
|
||||
|
||||
// GET /admin/api-keys
|
||||
async function listApiKeys(req, res) {
|
||||
try {
|
||||
const db = getDatabase();
|
||||
const keys = await db.all(`
|
||||
SELECT ak.id, ak.name, ak.key_prefix, ak.scope, ak.endpoint_ids,
|
||||
ak.active, ak.last_used_at, ak.expires_at, ak.created_at,
|
||||
u.name AS created_by_name
|
||||
FROM api_keys ak
|
||||
LEFT JOIN users u ON u.id = ak.created_by
|
||||
ORDER BY ak.created_at DESC
|
||||
`);
|
||||
|
||||
// Parse endpoint_ids JSON
|
||||
for (const key of keys) {
|
||||
key.endpoint_ids = key.endpoint_ids ? JSON.parse(key.endpoint_ids) : null;
|
||||
}
|
||||
|
||||
res.json(keys);
|
||||
} catch (err) {
|
||||
console.error('List API keys error:', err);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
// POST /admin/api-keys
|
||||
async function createApiKey(req, res) {
|
||||
try {
|
||||
const { name, scope = 'global', endpoint_ids = null, expires_at = null } = req.body;
|
||||
|
||||
if (!name || !name.trim()) {
|
||||
return res.status(400).json({ error: 'Key name is required' });
|
||||
}
|
||||
|
||||
if (!['global', 'endpoint'].includes(scope)) {
|
||||
return res.status(400).json({ error: 'scope must be "global" or "endpoint"' });
|
||||
}
|
||||
|
||||
if (scope === 'endpoint') {
|
||||
if (!Array.isArray(endpoint_ids) || endpoint_ids.length === 0) {
|
||||
return res.status(400).json({ error: 'endpoint_ids array required for endpoint-scoped keys' });
|
||||
}
|
||||
}
|
||||
|
||||
const { rawKey, prefix } = generateApiKey();
|
||||
const hash = await bcrypt.hash(rawKey, 12);
|
||||
|
||||
const db = getDatabase();
|
||||
const result = await db.run(
|
||||
`INSERT INTO api_keys (name, key_hash, key_prefix, scope, endpoint_ids, created_by, expires_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
name.trim(),
|
||||
hash,
|
||||
prefix,
|
||||
scope,
|
||||
scope === 'endpoint' ? JSON.stringify(endpoint_ids) : null,
|
||||
req.user.id,
|
||||
expires_at || null
|
||||
]
|
||||
);
|
||||
|
||||
const created = await db.get('SELECT * FROM api_keys WHERE id = ?', [result.lastID]);
|
||||
created.endpoint_ids = created.endpoint_ids ? JSON.parse(created.endpoint_ids) : null;
|
||||
|
||||
// Return raw key in this response only, it will never be recoverable again
|
||||
res.status(201).json({
|
||||
...created,
|
||||
raw_key: rawKey,
|
||||
_warning: 'Store this key securely. It will not be shown again.'
|
||||
});
|
||||
} catch (err) {
|
||||
console.error('Create API key error:', err);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
// DELETE /admin/api-keys/:id (revoke)
|
||||
async function revokeApiKey(req, res) {
|
||||
try {
|
||||
const db = getDatabase();
|
||||
const key = await db.get('SELECT * FROM api_keys WHERE id = ?', [req.params.id]);
|
||||
|
||||
if (!key) {
|
||||
return res.status(404).json({ error: 'API key not found' });
|
||||
}
|
||||
|
||||
await db.run('UPDATE api_keys SET active = 0 WHERE id = ?', [req.params.id]);
|
||||
res.json({ message: 'API key revoked' });
|
||||
} catch (err) {
|
||||
console.error('Revoke API key error:', err);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
// DELETE (hard delete) /admin/api-keys/:id/delete
|
||||
async function deleteApiKey(req, res) {
|
||||
try {
|
||||
const db = getDatabase();
|
||||
const key = await db.get('SELECT * FROM api_keys WHERE id = ?', [req.params.id]);
|
||||
|
||||
if (!key) {
|
||||
return res.status(404).json({ error: 'API key not found' });
|
||||
}
|
||||
|
||||
await db.run('DELETE FROM api_keys WHERE id = ?', [req.params.id]);
|
||||
res.json({ message: 'API key deleted' });
|
||||
} catch (err) {
|
||||
console.error('Delete API key error:', err);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { listApiKeys, createApiKey, revokeApiKey, deleteApiKey };
|
||||
42
backend/src/controllers/authController.js
Normal file
42
backend/src/controllers/authController.js
Normal file
@@ -0,0 +1,42 @@
|
||||
const jwt = require('jsonwebtoken');
|
||||
const bcrypt = require('bcryptjs');
|
||||
const { getDatabase } = require('../models/database');
|
||||
|
||||
async function login(req, res) {
|
||||
try {
|
||||
const { email, password } = req.body;
|
||||
|
||||
if (!email || !password) {
|
||||
return res.status(400).json({ error: 'Email and password are required' });
|
||||
}
|
||||
|
||||
const db = getDatabase();
|
||||
const user = await db.get('SELECT * FROM users WHERE email = ? AND active = 1', [email]);
|
||||
|
||||
if (!user) {
|
||||
return res.status(401).json({ error: 'Invalid credentials' });
|
||||
}
|
||||
|
||||
const isPasswordValid = await bcrypt.compare(password, user.password_hash);
|
||||
|
||||
if (!isPasswordValid) {
|
||||
return res.status(401).json({ error: 'Invalid credentials' });
|
||||
}
|
||||
|
||||
// Generate JWT token
|
||||
const token = jwt.sign(
|
||||
{ id: user.id, email: user.email, role: user.role },
|
||||
process.env.JWT_SECRET,
|
||||
{ expiresIn: '24h' }
|
||||
);
|
||||
|
||||
res.json({ token, user: { id: user.id, email: user.email, name: user.name, role: user.role } });
|
||||
} catch (error) {
|
||||
console.error('Login error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
login
|
||||
};
|
||||
152
backend/src/controllers/categoryController.js
Normal file
152
backend/src/controllers/categoryController.js
Normal file
@@ -0,0 +1,152 @@
|
||||
const {
|
||||
listCategories,
|
||||
getCategoryById: getCategoryRecordById,
|
||||
getCategoryEndpointCount,
|
||||
listEndpointsForCategory,
|
||||
createCategoryRecord,
|
||||
getMaxCategorySortOrder,
|
||||
updateCategoryRecord,
|
||||
clearCategoryFromEndpoints,
|
||||
deleteCategoryRecord,
|
||||
reorderCategoryRecords,
|
||||
} = require('../data/categoryData');
|
||||
const { getLatestCheckResult } = require('../data/endpointData');
|
||||
|
||||
async function getAllCategories(req, res) {
|
||||
try {
|
||||
const categories = await listCategories();
|
||||
|
||||
for (let category of categories) {
|
||||
const countResult = await getCategoryEndpointCount(category.id);
|
||||
category.endpoint_count = countResult?.count || 0;
|
||||
}
|
||||
|
||||
res.json(categories);
|
||||
} catch (error) {
|
||||
console.error('Get categories error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getCategoryById(req, res) {
|
||||
try {
|
||||
const category = await getCategoryRecordById(req.params.id);
|
||||
|
||||
if (!category) {
|
||||
return res.status(404).json({ error: 'Category not found' });
|
||||
}
|
||||
|
||||
const endpoints = await listEndpointsForCategory(category.id);
|
||||
|
||||
for (let endpoint of endpoints) {
|
||||
const result = await getLatestCheckResult(endpoint.id);
|
||||
endpoint.latest = result || null;
|
||||
}
|
||||
|
||||
res.json({ ...category, endpoints });
|
||||
} catch (error) {
|
||||
console.error('Get category error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function createCategory(req, res) {
|
||||
try {
|
||||
const { name, description } = req.body;
|
||||
|
||||
if (!name) {
|
||||
return res.status(400).json({ error: 'Name is required' });
|
||||
}
|
||||
|
||||
const maxOrder = await getMaxCategorySortOrder();
|
||||
const newOrder = (maxOrder?.max || 0) + 1;
|
||||
|
||||
const result = await createCategoryRecord(name, description || null, newOrder);
|
||||
const category = await getCategoryRecordById(result.lastID);
|
||||
|
||||
res.status(201).json(category);
|
||||
} catch (error) {
|
||||
console.error('Create category error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function updateCategory(req, res) {
|
||||
try {
|
||||
const { name, description } = req.body;
|
||||
|
||||
const existing = await getCategoryRecordById(req.params.id);
|
||||
if (!existing) {
|
||||
return res.status(404).json({ error: 'Category not found' });
|
||||
}
|
||||
|
||||
await updateCategoryRecord(req.params.id, name, description || null);
|
||||
const category = await getCategoryRecordById(req.params.id);
|
||||
|
||||
res.json(category);
|
||||
} catch (error) {
|
||||
console.error('Update category error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteCategory(req, res) {
|
||||
try {
|
||||
const existing = await getCategoryRecordById(req.params.id);
|
||||
if (!existing) {
|
||||
return res.status(404).json({ error: 'Category not found' });
|
||||
}
|
||||
|
||||
await clearCategoryFromEndpoints(req.params.id);
|
||||
await deleteCategoryRecord(req.params.id);
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('Delete category error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function reorderCategories(req, res) {
|
||||
try {
|
||||
const { order } = req.body;
|
||||
|
||||
if (!Array.isArray(order)) {
|
||||
return res.status(400).json({ error: 'Order must be an array of category IDs' });
|
||||
}
|
||||
|
||||
await reorderCategoryRecords(order);
|
||||
const categories = await listCategories();
|
||||
|
||||
for (let category of categories) {
|
||||
const countResult = await getCategoryEndpointCount(category.id);
|
||||
category.endpoint_count = countResult?.count || 0;
|
||||
}
|
||||
|
||||
res.json(categories);
|
||||
} catch (error) {
|
||||
console.error('Reorder categories error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getAllCategories,
|
||||
getCategoryById,
|
||||
createCategory,
|
||||
updateCategory,
|
||||
deleteCategory,
|
||||
reorderCategories
|
||||
};
|
||||
|
||||
async function getPublicCategories(req, res) {
|
||||
try {
|
||||
const categories = await listCategories();
|
||||
res.json(categories);
|
||||
} catch (error) {
|
||||
console.error('Get public categories error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.getPublicCategories = getPublicCategories;
|
||||
414
backend/src/controllers/endpointController.js
Normal file
414
backend/src/controllers/endpointController.js
Normal file
@@ -0,0 +1,414 @@
|
||||
const { getDatabase } = require('../models/database');
|
||||
const { scheduleEndpoint, stopScheduling } = require('../services/monitoringService');
|
||||
const { validateEndpointUrl } = require('../middleware/auth');
|
||||
const {
|
||||
listEndpointsWithCategory,
|
||||
listCategoriesOrdered,
|
||||
getLatestCheckResult,
|
||||
getUptimeSummary,
|
||||
getEndpointById: getEndpointRecordById,
|
||||
getRecentCheckResults,
|
||||
createEndpointRecord,
|
||||
updateEndpointRecord,
|
||||
deleteEndpointRecord,
|
||||
reorderEndpointRecords,
|
||||
} = require('../data/endpointData');
|
||||
|
||||
async function getAllEndpoints(req, res) {
|
||||
try {
|
||||
const endpoints = await listEndpointsWithCategory();
|
||||
const categories = await listCategoriesOrdered();
|
||||
|
||||
for (const endpoint of endpoints) {
|
||||
const result = await getLatestCheckResult(endpoint.id);
|
||||
endpoint.latest = result || null;
|
||||
|
||||
const uptimeRow = await getUptimeSummary(endpoint.id, 30);
|
||||
|
||||
endpoint.uptime_30d =
|
||||
uptimeRow?.total > 0
|
||||
? parseFloat(((uptimeRow.ups / uptimeRow.total) * 100).toFixed(2))
|
||||
: null;
|
||||
|
||||
endpoint.total_checks_30d = uptimeRow?.total ?? 0;
|
||||
endpoint.successful_checks_30d = uptimeRow?.ups ?? 0;
|
||||
}
|
||||
|
||||
res.json({ endpoints, categories });
|
||||
} catch (error) {
|
||||
console.error('Get endpoints error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getEndpointById(req, res) {
|
||||
try {
|
||||
const db = getDatabase();
|
||||
const endpoint = await getEndpointRecordById(req.params.id);
|
||||
|
||||
if (!endpoint) {
|
||||
return res.status(404).json({ error: 'Endpoint not found' });
|
||||
}
|
||||
|
||||
const results = await getRecentCheckResults(endpoint.id, 100);
|
||||
|
||||
endpoint.latest = results[0] || null;
|
||||
|
||||
// 30d uptime summary for endpoint detail hero / dashboard
|
||||
const uptimeRow = await getUptimeSummary(endpoint.id, 30);
|
||||
|
||||
endpoint.uptime_30d =
|
||||
uptimeRow?.total > 0
|
||||
? parseFloat(((uptimeRow.ups / uptimeRow.total) * 100).toFixed(2))
|
||||
: null;
|
||||
|
||||
endpoint.total_checks_30d = uptimeRow?.total ?? 0;
|
||||
endpoint.successful_checks_30d = uptimeRow?.ups ?? 0;
|
||||
endpoint.downtime_events_30d = Math.max(
|
||||
0,
|
||||
(uptimeRow?.total ?? 0) - (uptimeRow?.ups ?? 0)
|
||||
);
|
||||
|
||||
let typeStats = null;
|
||||
if (endpoint.type === 'tcp' || endpoint.type === 'ping') {
|
||||
const statsRow = await db.get(
|
||||
`SELECT
|
||||
COUNT(*) AS total,
|
||||
SUM(CASE WHEN status = 'down' THEN 1 ELSE 0 END) AS downs,
|
||||
AVG(response_time) AS avg_rt,
|
||||
MIN(response_time) AS min_rt,
|
||||
MAX(response_time) AS max_rt,
|
||||
SQRT(MAX(0, AVG(response_time * response_time) - AVG(response_time) * AVG(response_time))) AS jitter
|
||||
FROM check_results
|
||||
WHERE endpoint_id = ?
|
||||
AND checked_at > datetime('now', '-24 hours')
|
||||
AND response_time IS NOT NULL`,
|
||||
[endpoint.id]
|
||||
);
|
||||
|
||||
if (statsRow && statsRow.total > 0) {
|
||||
typeStats = {
|
||||
total: statsRow.total,
|
||||
packet_loss: parseFloat(((statsRow.downs / statsRow.total) * 100).toFixed(1)),
|
||||
avg_rt: Math.round(statsRow.avg_rt),
|
||||
min_rt: statsRow.min_rt,
|
||||
max_rt: statsRow.max_rt,
|
||||
jitter: Math.round(statsRow.jitter ?? 0),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let pingStats = null;
|
||||
if (endpoint.type === 'http' && endpoint.ping_enabled) {
|
||||
const pingRow = await db.get(
|
||||
`SELECT
|
||||
COUNT(*) AS total,
|
||||
SUM(CASE WHEN ping_response_time IS NULL THEN 1 ELSE 0 END) AS timeouts,
|
||||
AVG(ping_response_time) AS avg_rt,
|
||||
MIN(ping_response_time) AS min_rt,
|
||||
MAX(ping_response_time) AS max_rt,
|
||||
SQRT(MAX(0, AVG(ping_response_time * ping_response_time) - AVG(ping_response_time) * AVG(ping_response_time))) AS jitter
|
||||
FROM check_results
|
||||
WHERE endpoint_id = ?
|
||||
AND checked_at > datetime('now', '-24 hours')`,
|
||||
[endpoint.id]
|
||||
);
|
||||
|
||||
if (pingRow && pingRow.total > 0) {
|
||||
pingStats = {
|
||||
total: pingRow.total,
|
||||
packet_loss: parseFloat(((pingRow.timeouts / pingRow.total) * 100).toFixed(1)),
|
||||
avg_rt: pingRow.avg_rt !== null ? Math.round(pingRow.avg_rt) : null,
|
||||
min_rt: pingRow.min_rt,
|
||||
max_rt: pingRow.max_rt,
|
||||
jitter: pingRow.avg_rt !== null ? Math.round(pingRow.jitter ?? 0) : null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
res.json({ endpoint, results, typeStats, pingStats });
|
||||
} catch (error) {
|
||||
console.error('Get endpoint error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function createEndpoint(req, res) {
|
||||
try {
|
||||
const { name, url, type, interval, timeout, active, ping_enabled, category_id } = req.body;
|
||||
|
||||
if (!name || !url) {
|
||||
return res.status(400).json({ error: 'Name and URL are required' });
|
||||
}
|
||||
|
||||
const endpointType = type || 'http';
|
||||
const urlError = await validateEndpointUrl(url, endpointType);
|
||||
|
||||
if (urlError) {
|
||||
return res.status(400).json({ error: urlError });
|
||||
}
|
||||
|
||||
const result = await createEndpointRecord({
|
||||
name,
|
||||
url,
|
||||
type: endpointType,
|
||||
interval: interval || 300,
|
||||
timeout: timeout || 10,
|
||||
active: active !== false ? 1 : 0,
|
||||
ping_enabled: ping_enabled && endpointType === 'http' ? 1 : 0,
|
||||
group_id: category_id || null,
|
||||
});
|
||||
|
||||
const endpoint = await getEndpointRecordById(result.lastID);
|
||||
|
||||
if (endpoint.active) {
|
||||
await scheduleEndpoint(endpoint);
|
||||
}
|
||||
|
||||
res.status(201).json(endpoint);
|
||||
} catch (error) {
|
||||
console.error('Create endpoint error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function updateEndpoint(req, res) {
|
||||
try {
|
||||
const { name, url, type, interval, timeout, active, ping_enabled, category_id } = req.body;
|
||||
if (url && type) {
|
||||
const urlError = await validateEndpointUrl(url, type);
|
||||
if (urlError) {
|
||||
return res.status(400).json({ error: urlError });
|
||||
}
|
||||
|
||||
await updateEndpointRecord(
|
||||
req.params.id,
|
||||
{
|
||||
name,
|
||||
url,
|
||||
type,
|
||||
interval,
|
||||
timeout,
|
||||
active: active ? 1 : 0,
|
||||
ping_enabled: ping_enabled && type === 'http' ? 1 : 0,
|
||||
group_id: category_id || null,
|
||||
},
|
||||
true
|
||||
);
|
||||
} else {
|
||||
await updateEndpointRecord(
|
||||
req.params.id,
|
||||
{
|
||||
name,
|
||||
type,
|
||||
interval,
|
||||
timeout,
|
||||
active: active ? 1 : 0,
|
||||
ping_enabled: ping_enabled && type === 'http' ? 1 : 0,
|
||||
group_id: category_id || null,
|
||||
},
|
||||
false
|
||||
);
|
||||
}
|
||||
|
||||
const endpoint = await getEndpointRecordById(req.params.id);
|
||||
|
||||
if (endpoint.active) {
|
||||
await scheduleEndpoint(endpoint);
|
||||
} else {
|
||||
stopScheduling(endpoint.id);
|
||||
}
|
||||
|
||||
res.json(endpoint);
|
||||
} catch (error) {
|
||||
console.error('Update endpoint error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteEndpoint(req, res) {
|
||||
try {
|
||||
stopScheduling(parseInt(req.params.id, 10));
|
||||
|
||||
await deleteEndpointRecord(req.params.id);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('Delete endpoint error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getUptime(req, res) {
|
||||
try {
|
||||
const db = getDatabase();
|
||||
const days = parseInt(req.query.days, 10) || 30;
|
||||
const endpointId = req.params.id;
|
||||
|
||||
const cutoffDate = new Date(
|
||||
Date.now() - days * 24 * 60 * 60 * 1000
|
||||
).toISOString();
|
||||
|
||||
const results = await db.all(
|
||||
`SELECT status
|
||||
FROM check_results
|
||||
WHERE endpoint_id = ? AND checked_at > ?
|
||||
ORDER BY checked_at ASC`,
|
||||
[endpointId, cutoffDate]
|
||||
);
|
||||
|
||||
const ups = results.filter((r) => r.status === 'up').length;
|
||||
const total = results.length;
|
||||
const uptime = total > 0 ? ((ups / total) * 100).toFixed(2) : 0;
|
||||
|
||||
res.json({ uptime, ups, total, days });
|
||||
} catch (error) {
|
||||
console.error('Get uptime error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getHistory(req, res) {
|
||||
try {
|
||||
const db = getDatabase();
|
||||
const days = parseInt(req.query.days, 10) || 90;
|
||||
const endpointId = req.params.id;
|
||||
|
||||
const endpoint = await db.get('SELECT id FROM endpoints WHERE id = ?', [endpointId]);
|
||||
if (!endpoint) return res.status(404).json({ error: 'Endpoint not found' });
|
||||
|
||||
const rows = await db.all(
|
||||
`SELECT
|
||||
date(checked_at) AS day,
|
||||
COUNT(*) AS total,
|
||||
SUM(CASE WHEN status = 'up' THEN 1 ELSE 0 END) AS ups
|
||||
FROM check_results
|
||||
WHERE endpoint_id = ?
|
||||
AND checked_at > datetime('now', '-' || ? || ' days')
|
||||
GROUP BY day
|
||||
ORDER BY day ASC`,
|
||||
[endpointId, days]
|
||||
);
|
||||
|
||||
const data = rows.map((r) => ({
|
||||
date: r.day,
|
||||
uptime: r.total > 0 ? parseFloat(((r.ups / r.total) * 100).toFixed(2)) : null,
|
||||
checks: r.total,
|
||||
}));
|
||||
|
||||
res.json({ days, data });
|
||||
} catch (error) {
|
||||
console.error('Get history error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getResponseTimes(req, res) {
|
||||
try {
|
||||
const db = getDatabase();
|
||||
const endpointId = req.params.id;
|
||||
|
||||
const hoursParam = req.query.hours ? parseInt(req.query.hours, 10) || null : null;
|
||||
const daysParam = hoursParam ? null : parseInt(req.query.days, 10) || 30;
|
||||
|
||||
const endpoint = await db.get('SELECT id FROM endpoints WHERE id = ?', [endpointId]);
|
||||
if (!endpoint) {
|
||||
return res.status(404).json({ error: 'Endpoint not found' });
|
||||
}
|
||||
|
||||
let useHourly;
|
||||
let cutoffExpr;
|
||||
let cutoffArgs;
|
||||
|
||||
if (hoursParam) {
|
||||
useHourly = true;
|
||||
cutoffExpr = `datetime('now', '-' || ? || ' hours')`;
|
||||
cutoffArgs = [hoursParam];
|
||||
} else {
|
||||
cutoffExpr = `datetime('now', '-' || ? || ' days')`;
|
||||
cutoffArgs = [daysParam];
|
||||
|
||||
const dayCount = await db.get(
|
||||
`SELECT COUNT(DISTINCT date(checked_at)) AS cnt
|
||||
FROM check_results
|
||||
WHERE endpoint_id = ?
|
||||
AND checked_at > ${cutoffExpr}
|
||||
AND response_time IS NOT NULL`,
|
||||
[endpointId, ...cutoffArgs]
|
||||
);
|
||||
useHourly = (dayCount?.cnt ?? 0) <= 2;
|
||||
}
|
||||
|
||||
let rows;
|
||||
if (useHourly) {
|
||||
rows = await db.all(
|
||||
`SELECT
|
||||
strftime('%Y-%m-%dT%H:00:00', checked_at) AS day,
|
||||
ROUND(AVG(response_time)) AS avg,
|
||||
MIN(response_time) AS min,
|
||||
MAX(response_time) AS max,
|
||||
COUNT(*) AS checks
|
||||
FROM check_results
|
||||
WHERE endpoint_id = ?
|
||||
AND checked_at > ${cutoffExpr}
|
||||
AND response_time IS NOT NULL
|
||||
GROUP BY strftime('%Y-%m-%d %H', checked_at)
|
||||
ORDER BY day ASC`,
|
||||
[endpointId, ...cutoffArgs]
|
||||
);
|
||||
} else {
|
||||
rows = await db.all(
|
||||
`SELECT
|
||||
date(checked_at) AS day,
|
||||
ROUND(AVG(response_time)) AS avg,
|
||||
MIN(response_time) AS min,
|
||||
MAX(response_time) AS max,
|
||||
COUNT(*) AS checks
|
||||
FROM check_results
|
||||
WHERE endpoint_id = ?
|
||||
AND checked_at > ${cutoffExpr}
|
||||
AND response_time IS NOT NULL
|
||||
GROUP BY day
|
||||
ORDER BY day ASC`,
|
||||
[endpointId, ...cutoffArgs]
|
||||
);
|
||||
}
|
||||
|
||||
res.json({
|
||||
days: daysParam,
|
||||
hours: hoursParam,
|
||||
granularity: useHourly ? 'hour' : 'day',
|
||||
data: rows,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Get response times error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function reorderEndpoints(req, res) {
|
||||
try {
|
||||
const { updates } = req.body;
|
||||
if (!Array.isArray(updates) || updates.length === 0) {
|
||||
return res.status(400).json({ error: 'updates array is required' });
|
||||
}
|
||||
|
||||
await reorderEndpointRecords(updates);
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('Reorder endpoints error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getAllEndpoints,
|
||||
getEndpointById,
|
||||
createEndpoint,
|
||||
updateEndpoint,
|
||||
deleteEndpoint,
|
||||
getUptime,
|
||||
getHistory,
|
||||
getResponseTimes,
|
||||
reorderEndpoints,
|
||||
};
|
||||
380
backend/src/controllers/incidentController.js
Normal file
380
backend/src/controllers/incidentController.js
Normal file
@@ -0,0 +1,380 @@
|
||||
const { getDatabase, runInTransaction } = require('../models/database');
|
||||
const {
|
||||
listIncidentsOrdered,
|
||||
getIncidentById: getIncidentRecordById,
|
||||
listIncidentEndpoints,
|
||||
getLatestIncidentUpdate,
|
||||
listIncidentUpdates,
|
||||
createIncidentRecord,
|
||||
linkIncidentEndpoint,
|
||||
createIncidentUpdate,
|
||||
updateIncidentCore,
|
||||
deleteIncidentLinksExceptSource,
|
||||
deleteAllIncidentLinks,
|
||||
markIncidentResolved,
|
||||
setIncidentAdminManaged,
|
||||
setIncidentStatus,
|
||||
getIncidentUpdateById,
|
||||
reopenIncidentRecord,
|
||||
setIncidentPostMortem,
|
||||
deleteIncidentRecord,
|
||||
} = require('../data/incidentData');
|
||||
const { queueIncidentNotification } = require('../services/notificationService');
|
||||
|
||||
// Helpers
|
||||
|
||||
async function enrichIncident(incident, { includeUpdates = false } = {}) {
|
||||
incident.endpoints = await listIncidentEndpoints(incident.id);
|
||||
|
||||
// Always include the latest update for preview
|
||||
incident.latest_update = await getLatestIncidentUpdate(incident.id);
|
||||
|
||||
if (includeUpdates) {
|
||||
incident.updates = await listIncidentUpdates(incident.id);
|
||||
}
|
||||
|
||||
return incident;
|
||||
}
|
||||
|
||||
// Public
|
||||
|
||||
async function getAllIncidents(req, res) {
|
||||
try {
|
||||
const incidents = await listIncidentsOrdered();
|
||||
|
||||
for (const incident of incidents) {
|
||||
await enrichIncident(incident);
|
||||
}
|
||||
|
||||
res.json(incidents);
|
||||
} catch (error) {
|
||||
console.error('Get incidents error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getIncidentById(req, res) {
|
||||
try {
|
||||
const incident = await getIncidentRecordById(req.params.id);
|
||||
if (!incident) return res.status(404).json({ error: 'Incident not found' });
|
||||
|
||||
await enrichIncident(incident, { includeUpdates: true });
|
||||
res.json(incident);
|
||||
} catch (error) {
|
||||
console.error('Get incident error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
// Admin CRUD
|
||||
|
||||
async function createIncident(req, res) {
|
||||
try {
|
||||
const {
|
||||
title,
|
||||
description,
|
||||
severity = 'degraded',
|
||||
status = 'investigating',
|
||||
source = 'manual',
|
||||
endpoint_ids = [],
|
||||
initial_message,
|
||||
created_by = 'admin'
|
||||
} = req.body;
|
||||
|
||||
if (!title || !severity) {
|
||||
return res.status(400).json({ error: 'Title and severity are required' });
|
||||
}
|
||||
|
||||
const incidentId = await runInTransaction(async () => {
|
||||
const result = await createIncidentRecord({
|
||||
title,
|
||||
description: description || '',
|
||||
severity,
|
||||
status,
|
||||
source,
|
||||
});
|
||||
|
||||
for (const endpointId of endpoint_ids) {
|
||||
await linkIncidentEndpoint(result.lastID, endpointId);
|
||||
}
|
||||
|
||||
if (initial_message) {
|
||||
await createIncidentUpdate(result.lastID, initial_message, status, created_by);
|
||||
}
|
||||
|
||||
return result.lastID;
|
||||
});
|
||||
|
||||
const incident = await getIncidentRecordById(incidentId);
|
||||
await enrichIncident(incident, { includeUpdates: true });
|
||||
await queueIncidentNotification('incident_created', incident.id, initial_message || incident.description || 'New incident created.');
|
||||
res.status(201).json(incident);
|
||||
} catch (error) {
|
||||
console.error('Create incident error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function updateIncident(req, res) {
|
||||
try {
|
||||
const { title, description, severity, status, endpoint_ids = [] } = req.body;
|
||||
const existing = await getIncidentRecordById(req.params.id);
|
||||
if (!existing) return res.status(404).json({ error: 'Incident not found' });
|
||||
|
||||
// Mark as admin-managed if a human is editing an auto-created incident
|
||||
const adminManaged = existing.auto_created ? 1 : (existing.admin_managed || 0);
|
||||
|
||||
await runInTransaction(async () => {
|
||||
await updateIncidentCore(req.params.id, {
|
||||
title,
|
||||
description: description || '',
|
||||
severity,
|
||||
status,
|
||||
admin_managed: adminManaged,
|
||||
});
|
||||
|
||||
if (existing.auto_created && existing.source_endpoint_id) {
|
||||
await deleteIncidentLinksExceptSource(req.params.id, existing.source_endpoint_id);
|
||||
for (const endpointId of endpoint_ids) {
|
||||
await linkIncidentEndpoint(req.params.id, endpointId, true);
|
||||
}
|
||||
await linkIncidentEndpoint(req.params.id, existing.source_endpoint_id, true);
|
||||
} else {
|
||||
await deleteAllIncidentLinks(req.params.id);
|
||||
for (const endpointId of endpoint_ids) {
|
||||
await linkIncidentEndpoint(req.params.id, endpointId);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const incident = await getIncidentRecordById(req.params.id);
|
||||
await enrichIncident(incident, { includeUpdates: true });
|
||||
await queueIncidentNotification('incident_updated', incident.id, 'Incident details were updated.');
|
||||
res.json(incident);
|
||||
} catch (error) {
|
||||
console.error('Update incident error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function resolveIncident(req, res) {
|
||||
try {
|
||||
const { message, created_by = 'admin' } = req.body;
|
||||
const existing = await getIncidentRecordById(req.params.id);
|
||||
if (!existing) return res.status(404).json({ error: 'Incident not found' });
|
||||
|
||||
// Resolving an auto-incident manually marks it as admin-managed
|
||||
const adminManaged = existing.auto_created ? 1 : (existing.admin_managed || 0);
|
||||
|
||||
const closingMessage = message || 'This incident has been resolved.';
|
||||
await runInTransaction(async () => {
|
||||
await markIncidentResolved(req.params.id, adminManaged);
|
||||
await createIncidentUpdate(req.params.id, closingMessage, 'resolved', created_by);
|
||||
});
|
||||
|
||||
const incident = await getIncidentRecordById(req.params.id);
|
||||
await enrichIncident(incident, { includeUpdates: true });
|
||||
await queueIncidentNotification('incident_resolved', incident.id, closingMessage);
|
||||
res.json(incident);
|
||||
} catch (error) {
|
||||
console.error('Resolve incident error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteIncident(req, res) {
|
||||
try {
|
||||
await deleteIncidentRecord(req.params.id);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('Delete incident error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
// Incident Updates (timeline)
|
||||
|
||||
async function addIncidentUpdate(req, res) {
|
||||
try {
|
||||
const { message, status_label, created_by = 'admin' } = req.body;
|
||||
|
||||
if (!message) {
|
||||
return res.status(400).json({ error: 'Message is required' });
|
||||
}
|
||||
|
||||
const incident = await getIncidentRecordById(req.params.id);
|
||||
if (!incident) return res.status(404).json({ error: 'Incident not found' });
|
||||
|
||||
const updateId = await runInTransaction(async () => {
|
||||
if (incident.auto_created && !incident.admin_managed) {
|
||||
await setIncidentAdminManaged(req.params.id, 1);
|
||||
}
|
||||
|
||||
const result = await createIncidentUpdate(req.params.id, message, status_label || null, created_by);
|
||||
if (status_label) {
|
||||
await setIncidentStatus(req.params.id, status_label);
|
||||
}
|
||||
return result.lastID;
|
||||
});
|
||||
|
||||
const update = await getIncidentUpdateById(updateId);
|
||||
await queueIncidentNotification('incident_updated', req.params.id, message);
|
||||
res.status(201).json(update);
|
||||
} catch (error) {
|
||||
console.error('Add incident update error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
// Maintenance Windows
|
||||
|
||||
async function getAllMaintenance(req, res) {
|
||||
try {
|
||||
const db = getDatabase();
|
||||
const windows = await db.all(`
|
||||
SELECT mw.*, e.name AS endpoint_name
|
||||
FROM maintenance_windows mw
|
||||
LEFT JOIN endpoints e ON e.id = mw.endpoint_id
|
||||
ORDER BY mw.start_time DESC
|
||||
`);
|
||||
res.json(windows);
|
||||
} catch (error) {
|
||||
console.error('Get maintenance error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function createMaintenance(req, res) {
|
||||
try {
|
||||
const { title, description, endpoint_id, start_time, end_time } = req.body;
|
||||
|
||||
if (!title || !start_time || !end_time) {
|
||||
return res.status(400).json({ error: 'Title, start_time, and end_time are required' });
|
||||
}
|
||||
|
||||
const db = getDatabase();
|
||||
const result = await db.run(
|
||||
`INSERT INTO maintenance_windows (title, description, endpoint_id, start_time, end_time)
|
||||
VALUES (?, ?, ?, ?, ?)`,
|
||||
[title, description || '', endpoint_id || null, start_time, end_time]
|
||||
);
|
||||
|
||||
const window = await db.get('SELECT * FROM maintenance_windows WHERE id = ?', [result.lastID]);
|
||||
res.status(201).json(window);
|
||||
} catch (error) {
|
||||
console.error('Create maintenance error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function updateMaintenance(req, res) {
|
||||
try {
|
||||
const { title, description, endpoint_id, start_time, end_time } = req.body;
|
||||
const db = getDatabase();
|
||||
|
||||
await db.run(
|
||||
`UPDATE maintenance_windows
|
||||
SET title = ?, description = ?, endpoint_id = ?, start_time = ?, end_time = ?,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?`,
|
||||
[title, description || '', endpoint_id || null, start_time, end_time, req.params.id]
|
||||
);
|
||||
|
||||
const window = await db.get('SELECT * FROM maintenance_windows WHERE id = ?', [req.params.id]);
|
||||
res.json(window);
|
||||
} catch (error) {
|
||||
console.error('Update maintenance error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteMaintenance(req, res) {
|
||||
try {
|
||||
const db = getDatabase();
|
||||
await db.run('DELETE FROM maintenance_windows WHERE id = ?', [req.params.id]);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('Delete maintenance error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
// Reopen
|
||||
|
||||
async function reopenIncident(req, res) {
|
||||
try {
|
||||
const incident = await getIncidentRecordById(req.params.id);
|
||||
if (!incident) return res.status(404).json({ error: 'Incident not found' });
|
||||
|
||||
if (!incident.resolved_at) {
|
||||
return res.status(400).json({ error: 'Incident is not resolved' });
|
||||
}
|
||||
|
||||
// Admins may re-open within 7 days of resolution
|
||||
const REOPEN_WINDOW_DAYS = 7;
|
||||
const resolvedAt = new Date(incident.resolved_at.replace(' ', 'T') + 'Z');
|
||||
const ageMs = Date.now() - resolvedAt.getTime();
|
||||
const ageDays = ageMs / (1000 * 60 * 60 * 24);
|
||||
|
||||
if (ageDays > REOPEN_WINDOW_DAYS) {
|
||||
return res.status(403).json({
|
||||
error: `This incident can no longer be re-opened. The ${REOPEN_WINDOW_DAYS}-day re-open window has expired.`,
|
||||
expired: true,
|
||||
});
|
||||
}
|
||||
|
||||
const { message = '', created_by = 'admin' } = req.body;
|
||||
await runInTransaction(async () => {
|
||||
await reopenIncidentRecord(req.params.id);
|
||||
await createIncidentUpdate(
|
||||
req.params.id,
|
||||
message || 'This incident has been re-opened by an administrator.',
|
||||
'investigating',
|
||||
created_by
|
||||
);
|
||||
});
|
||||
|
||||
const updated = await getIncidentRecordById(req.params.id);
|
||||
await enrichIncident(updated, { includeUpdates: true });
|
||||
await queueIncidentNotification('incident_updated', updated.id, message || 'Incident re-opened by administrator.');
|
||||
res.json(updated);
|
||||
} catch (error) {
|
||||
console.error('Reopen incident error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
// Post-mortem
|
||||
|
||||
async function setPostMortem(req, res) {
|
||||
try {
|
||||
const { post_mortem } = req.body;
|
||||
const incident = await getIncidentRecordById(req.params.id);
|
||||
if (!incident) return res.status(404).json({ error: 'Incident not found' });
|
||||
|
||||
await setIncidentPostMortem(req.params.id, post_mortem || null);
|
||||
|
||||
const updated = await getIncidentRecordById(req.params.id);
|
||||
await enrichIncident(updated, { includeUpdates: true });
|
||||
res.json(updated);
|
||||
} catch (error) {
|
||||
console.error('Set post-mortem error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getAllIncidents,
|
||||
getIncidentById,
|
||||
createIncident,
|
||||
updateIncident,
|
||||
resolveIncident,
|
||||
deleteIncident,
|
||||
addIncidentUpdate,
|
||||
reopenIncident,
|
||||
setPostMortem,
|
||||
getAllMaintenance,
|
||||
createMaintenance,
|
||||
updateMaintenance,
|
||||
deleteMaintenance,
|
||||
};
|
||||
275
backend/src/controllers/notificationController.js
Normal file
275
backend/src/controllers/notificationController.js
Normal file
@@ -0,0 +1,275 @@
|
||||
const { getDatabase, runInTransaction } = require('../models/database');
|
||||
const { verifySmtpConnection, sendMail } = require('../services/smtpService');
|
||||
const { renderTemplate } = require('../services/notificationTemplates');
|
||||
const {
|
||||
getNotificationDefaults,
|
||||
setNotificationDefaults,
|
||||
getNotificationHealth,
|
||||
ensureUserNotificationDefaults,
|
||||
} = require('../services/notificationService');
|
||||
|
||||
async function getMyNotificationPreferences(req, res) {
|
||||
try {
|
||||
const db = getDatabase();
|
||||
await ensureUserNotificationDefaults(req.user.id, req.user.role);
|
||||
|
||||
const allScope = await db.get(
|
||||
`SELECT * FROM email_notifications
|
||||
WHERE user_id = ? AND scope_type = 'all' AND endpoint_id IS NULL AND category_id IS NULL
|
||||
LIMIT 1`,
|
||||
[req.user.id]
|
||||
);
|
||||
|
||||
const scoped = await db.all(
|
||||
`SELECT scope_type, endpoint_id, category_id
|
||||
FROM email_notifications
|
||||
WHERE user_id = ? AND scope_type IN ('endpoint', 'category') AND active = 1`,
|
||||
[req.user.id]
|
||||
);
|
||||
|
||||
res.json({
|
||||
notifyOnDown: Number(allScope?.notify_on_down || 0) === 1,
|
||||
notifyOnDegraded: Number(allScope?.notify_on_degraded || 0) === 1,
|
||||
notifyOnRecovered: Number(allScope?.notify_on_recovery || 0) === 1,
|
||||
notifyOnIncident: Number(allScope?.notify_on_incident || 0) === 1,
|
||||
scope: scoped.length > 0 ? 'selected' : 'all',
|
||||
selectedEndpointIds: scoped.filter((row) => row.scope_type === 'endpoint').map((row) => row.endpoint_id),
|
||||
selectedCategoryIds: scoped.filter((row) => row.scope_type === 'category').map((row) => row.category_id),
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Get notification preferences error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function updateMyNotificationPreferences(req, res) {
|
||||
try {
|
||||
const {
|
||||
notifyOnDown,
|
||||
notifyOnDegraded,
|
||||
notifyOnRecovered,
|
||||
notifyOnIncident,
|
||||
scope,
|
||||
selectedEndpointIds = [],
|
||||
selectedCategoryIds = [],
|
||||
} = req.body;
|
||||
|
||||
await runInTransaction(async (db) => {
|
||||
await ensureUserNotificationDefaults(req.user.id, req.user.role);
|
||||
|
||||
await db.run(
|
||||
`UPDATE email_notifications
|
||||
SET notify_on_down = ?, notify_on_degraded = ?, notify_on_recovery = ?, notify_on_incident = ?, updated_at = CURRENT_TIMESTAMP
|
||||
WHERE user_id = ? AND scope_type = 'all' AND endpoint_id IS NULL AND category_id IS NULL`,
|
||||
[
|
||||
notifyOnDown ? 1 : 0,
|
||||
notifyOnDegraded ? 1 : 0,
|
||||
notifyOnRecovered ? 1 : 0,
|
||||
notifyOnIncident ? 1 : 0,
|
||||
req.user.id,
|
||||
]
|
||||
);
|
||||
|
||||
await db.run(
|
||||
`DELETE FROM email_notifications
|
||||
WHERE user_id = ? AND scope_type IN ('endpoint', 'category')`,
|
||||
[req.user.id]
|
||||
);
|
||||
|
||||
if (scope === 'selected') {
|
||||
for (const endpointId of selectedEndpointIds) {
|
||||
await db.run(
|
||||
`INSERT INTO email_notifications
|
||||
(user_id, endpoint_id, category_id, scope_type, notify_on_down, notify_on_recovery, notify_on_degraded, notify_on_incident, active)
|
||||
VALUES (?, ?, NULL, 'endpoint', 1, 1, 1, 1, 1)`,
|
||||
[req.user.id, endpointId]
|
||||
);
|
||||
}
|
||||
|
||||
for (const categoryId of selectedCategoryIds) {
|
||||
await db.run(
|
||||
`INSERT INTO email_notifications
|
||||
(user_id, endpoint_id, category_id, scope_type, notify_on_down, notify_on_recovery, notify_on_degraded, notify_on_incident, active)
|
||||
VALUES (?, NULL, ?, 'category', 1, 1, 1, 1, 1)`,
|
||||
[req.user.id, categoryId]
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('Update notification preferences error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function sendSmtpTestEmail(req, res) {
|
||||
try {
|
||||
const { to } = req.body;
|
||||
if (!to || !/^\S+@\S+\.\S+$/.test(to)) {
|
||||
return res.status(400).json({ error: 'A valid recipient email is required.' });
|
||||
}
|
||||
|
||||
await verifySmtpConnection();
|
||||
|
||||
// Use configured public URL from settings
|
||||
const { getSettingsMap } = require('../services/settingsService');
|
||||
const settings = await getSettingsMap();
|
||||
const publicUrl = String(settings.publicUrl || process.env.PUBLIC_STATUS_PAGE_URL || process.env.FRONTEND_URL || 'http://localhost:3000');
|
||||
|
||||
const template = renderTemplate('incident_updated', {
|
||||
incident: {
|
||||
title: 'SMTP Test Notification',
|
||||
status: 'test',
|
||||
},
|
||||
message: 'SMTP credentials are valid and outbound email delivery is working.',
|
||||
timestamp: new Date().toISOString(),
|
||||
statusPageUrl: publicUrl,
|
||||
});
|
||||
|
||||
await sendMail({ to, subject: template.subject, text: template.text, html: template.html });
|
||||
|
||||
res.json({ success: true, message: 'Test email sent successfully.' });
|
||||
} catch (error) {
|
||||
console.error('Send test email error:', error);
|
||||
res.status(400).json({ error: error.message || 'Failed to send test email.' });
|
||||
}
|
||||
}
|
||||
|
||||
async function listExtraRecipients(req, res) {
|
||||
try {
|
||||
const db = getDatabase();
|
||||
const recipients = await db.all(
|
||||
'SELECT id, email, name, active, created_at, updated_at FROM notification_extra_recipients ORDER BY created_at DESC'
|
||||
);
|
||||
res.json(recipients);
|
||||
} catch (error) {
|
||||
console.error('List extra recipients error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function createExtraRecipient(req, res) {
|
||||
try {
|
||||
const { email, name } = req.body;
|
||||
if (!email || !/^\S+@\S+\.\S+$/.test(email)) {
|
||||
return res.status(400).json({ error: 'A valid email is required.' });
|
||||
}
|
||||
|
||||
const db = getDatabase();
|
||||
const result = await db.run(
|
||||
'INSERT INTO notification_extra_recipients (email, name, active, updated_at) VALUES (?, ?, 1, CURRENT_TIMESTAMP)',
|
||||
[email.trim().toLowerCase(), (name || '').trim() || null]
|
||||
);
|
||||
|
||||
const row = await db.get('SELECT id, email, name, active, created_at, updated_at FROM notification_extra_recipients WHERE id = ?', [result.lastID]);
|
||||
res.status(201).json(row);
|
||||
} catch (error) {
|
||||
console.error('Create extra recipient error:', error);
|
||||
if (String(error.message || '').includes('UNIQUE')) {
|
||||
return res.status(400).json({ error: 'Recipient already exists.' });
|
||||
}
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteExtraRecipient(req, res) {
|
||||
try {
|
||||
const db = getDatabase();
|
||||
await db.run('DELETE FROM notification_extra_recipients WHERE id = ?', [req.params.id]);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('Delete extra recipient error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getDeliveryLogs(req, res) {
|
||||
try {
|
||||
const db = getDatabase();
|
||||
const filters = [];
|
||||
const params = [];
|
||||
|
||||
if (req.query.endpointId) {
|
||||
filters.push('d.endpoint_id = ?');
|
||||
params.push(Number(req.query.endpointId));
|
||||
}
|
||||
if (req.query.eventType) {
|
||||
filters.push('d.event_type = ?');
|
||||
params.push(req.query.eventType);
|
||||
}
|
||||
if (req.query.status) {
|
||||
filters.push('d.status = ?');
|
||||
params.push(req.query.status);
|
||||
}
|
||||
if (req.query.fromDate) {
|
||||
filters.push('datetime(d.created_at) >= datetime(?)');
|
||||
params.push(req.query.fromDate);
|
||||
}
|
||||
if (req.query.toDate) {
|
||||
filters.push('datetime(d.created_at) <= datetime(?)');
|
||||
params.push(req.query.toDate);
|
||||
}
|
||||
|
||||
const whereClause = filters.length > 0 ? `WHERE ${filters.join(' AND ')}` : '';
|
||||
const limit = Math.min(200, Math.max(20, Number(req.query.limit) || 100));
|
||||
params.push(limit);
|
||||
|
||||
const rows = await db.all(
|
||||
`SELECT d.*, e.name AS endpoint_name
|
||||
FROM notification_deliveries d
|
||||
LEFT JOIN endpoints e ON e.id = d.endpoint_id
|
||||
${whereClause}
|
||||
ORDER BY d.created_at DESC
|
||||
LIMIT ?`,
|
||||
params
|
||||
);
|
||||
|
||||
res.json(rows);
|
||||
} catch (error) {
|
||||
console.error('Get delivery logs error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getNotificationDefaultsController(req, res) {
|
||||
try {
|
||||
res.json(await getNotificationDefaults());
|
||||
} catch (error) {
|
||||
console.error('Get notification defaults error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function updateNotificationDefaultsController(req, res) {
|
||||
try {
|
||||
await setNotificationDefaults(req.body || {});
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('Update notification defaults error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getSmtpHealthController(req, res) {
|
||||
try {
|
||||
res.json(await getNotificationHealth());
|
||||
} catch (error) {
|
||||
console.error('Get SMTP health error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getMyNotificationPreferences,
|
||||
updateMyNotificationPreferences,
|
||||
sendSmtpTestEmail,
|
||||
listExtraRecipients,
|
||||
createExtraRecipient,
|
||||
deleteExtraRecipient,
|
||||
getDeliveryLogs,
|
||||
getNotificationDefaultsController,
|
||||
updateNotificationDefaultsController,
|
||||
getSmtpHealthController,
|
||||
};
|
||||
127
backend/src/controllers/profileController.js
Normal file
127
backend/src/controllers/profileController.js
Normal file
@@ -0,0 +1,127 @@
|
||||
const { getDatabase } = require('../models/database');
|
||||
const bcrypt = require('bcryptjs');
|
||||
const { validatePassword } = require('../middleware/auth');
|
||||
|
||||
async function getUserProfile(req, res) {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const db = getDatabase();
|
||||
|
||||
const user = await db.get(
|
||||
'SELECT id, name, email, role FROM users WHERE id = ?',
|
||||
[userId]
|
||||
);
|
||||
|
||||
if (!user) {
|
||||
return res.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
res.json(user);
|
||||
} catch (error) {
|
||||
console.error('Get profile error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function updateUserProfile(req, res) {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const { name, email } = req.body;
|
||||
const db = getDatabase();
|
||||
|
||||
if (!name && !email) {
|
||||
return res.status(400).json({ error: 'At least one field is required' });
|
||||
}
|
||||
|
||||
// Check if new email already exists
|
||||
if (email) {
|
||||
const existing = await db.get(
|
||||
'SELECT id FROM users WHERE email = ? AND id != ?',
|
||||
[email, userId]
|
||||
);
|
||||
if (existing) {
|
||||
return res.status(400).json({ error: 'Email already in use' });
|
||||
}
|
||||
}
|
||||
|
||||
const updateFields = [];
|
||||
const updateValues = [];
|
||||
|
||||
if (name) {
|
||||
updateFields.push('name = ?');
|
||||
updateValues.push(name);
|
||||
}
|
||||
if (email) {
|
||||
updateFields.push('email = ?');
|
||||
updateValues.push(email);
|
||||
}
|
||||
|
||||
updateValues.push(userId);
|
||||
|
||||
const query = `UPDATE users SET ${updateFields.join(', ')}, updated_at = CURRENT_TIMESTAMP WHERE id = ?`;
|
||||
await db.run(query, updateValues);
|
||||
|
||||
const updated = await db.get(
|
||||
'SELECT id, name, email, role FROM users WHERE id = ?',
|
||||
[userId]
|
||||
);
|
||||
|
||||
res.json({ success: true, user: updated });
|
||||
} catch (error) {
|
||||
console.error('Update profile error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function changePassword(req, res) {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const { currentPassword, newPassword, confirmPassword } = req.body;
|
||||
const db = getDatabase();
|
||||
|
||||
if (!currentPassword || !newPassword || !confirmPassword) {
|
||||
return res.status(400).json({ error: 'All password fields are required' });
|
||||
}
|
||||
|
||||
// Validate password strength
|
||||
const passwordErrors = validatePassword(newPassword);
|
||||
if (passwordErrors.length > 0) {
|
||||
return res.status(400).json({ error: passwordErrors.join('. ') });
|
||||
}
|
||||
|
||||
if (newPassword !== confirmPassword) {
|
||||
return res.status(400).json({ error: 'New passwords do not match' });
|
||||
}
|
||||
|
||||
// Get current user
|
||||
const user = await db.get('SELECT password_hash FROM users WHERE id = ?', [userId]);
|
||||
|
||||
if (!user) {
|
||||
return res.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
// Verify current password
|
||||
const isValid = await bcrypt.compare(currentPassword, user.password_hash);
|
||||
if (!isValid) {
|
||||
return res.status(401).json({ error: 'Current password is incorrect' });
|
||||
}
|
||||
|
||||
// Hash and update new password
|
||||
const hashedPassword = await bcrypt.hash(newPassword, 10);
|
||||
await db.run(
|
||||
'UPDATE users SET password_hash = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?',
|
||||
[hashedPassword, userId]
|
||||
);
|
||||
|
||||
res.json({ success: true, message: 'Password changed successfully' });
|
||||
} catch (error) {
|
||||
console.error('Change password error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getUserProfile,
|
||||
updateUserProfile,
|
||||
changePassword
|
||||
};
|
||||
228
backend/src/controllers/setupController.js
Normal file
228
backend/src/controllers/setupController.js
Normal file
@@ -0,0 +1,228 @@
|
||||
const bcrypt = require('bcryptjs');
|
||||
const { getDatabase, isSetupComplete } = require('../models/database');
|
||||
const { validatePassword } = require('../middleware/auth');
|
||||
const { setSettings, getSettingsMap, getSmtpConfig, saveSmtpConfig } = require('../services/settingsService');
|
||||
const { ensureUserNotificationDefaults } = require('../services/notificationService');
|
||||
|
||||
async function getSetupStatus(req, res) {
|
||||
try {
|
||||
const complete = await isSetupComplete();
|
||||
if (complete) {
|
||||
return res.status(400).json({ error: 'Setup already completed' });
|
||||
}
|
||||
|
||||
res.json({ setupRequired: true });
|
||||
} catch (error) {
|
||||
console.error('Get setup status error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function completeSetup(req, res) {
|
||||
try {
|
||||
const {
|
||||
title,
|
||||
adminName,
|
||||
adminEmail,
|
||||
adminPassword,
|
||||
smtpHost,
|
||||
smtpPort,
|
||||
smtpUser,
|
||||
smtpPassword,
|
||||
smtpFromEmail,
|
||||
smtpFromName,
|
||||
smtpTlsMode,
|
||||
smtpTimeoutMs,
|
||||
publicUrl,
|
||||
} = req.body;
|
||||
|
||||
if (!title || !adminName || !adminEmail || !adminPassword) {
|
||||
return res.status(400).json({ error: 'Title and admin credentials are required' });
|
||||
}
|
||||
|
||||
// Validate admin password strength
|
||||
const passwordErrors = validatePassword(adminPassword);
|
||||
if (passwordErrors.length > 0) {
|
||||
return res.status(400).json({ error: passwordErrors.join('. ') });
|
||||
}
|
||||
|
||||
const db = getDatabase();
|
||||
|
||||
// Check if setup is already complete
|
||||
const complete = await isSetupComplete();
|
||||
if (complete) {
|
||||
return res.status(400).json({ error: 'Setup has already been completed' });
|
||||
}
|
||||
|
||||
// Hash admin password
|
||||
const hashedPassword = await bcrypt.hash(adminPassword, 10);
|
||||
|
||||
// Create admin user
|
||||
await db.run(
|
||||
'INSERT INTO users (email, password_hash, name, role, active) VALUES (?, ?, ?, ?, ?)',
|
||||
[adminEmail, hashedPassword, adminName, 'admin', 1]
|
||||
);
|
||||
|
||||
await setSettings({
|
||||
title,
|
||||
logoUrl: '',
|
||||
primaryColor: '#6366f1',
|
||||
secondaryColor: '#8b5cf6',
|
||||
publicUrl: publicUrl || process.env.FRONTEND_URL || 'http://localhost:3000',
|
||||
});
|
||||
|
||||
const smtpResult = await saveSmtpConfig(
|
||||
{
|
||||
smtpHost,
|
||||
smtpPort,
|
||||
smtpUser,
|
||||
smtpPassword,
|
||||
smtpFromEmail,
|
||||
smtpFromName,
|
||||
smtpTlsMode,
|
||||
smtpTimeoutMs,
|
||||
},
|
||||
{ preservePassword: false, allowEmpty: true }
|
||||
);
|
||||
|
||||
if (!smtpResult.success) {
|
||||
return res.status(400).json({ error: smtpResult.error });
|
||||
}
|
||||
|
||||
// Generate JWT for admin user
|
||||
const jwt = require('jsonwebtoken');
|
||||
const user = await db.get('SELECT * FROM users WHERE email = ?', [adminEmail]);
|
||||
await ensureUserNotificationDefaults(user.id, 'admin');
|
||||
const token = jwt.sign(
|
||||
{ id: user.id, email: user.email, role: user.role },
|
||||
process.env.JWT_SECRET,
|
||||
{ expiresIn: '24h' }
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Setup completed successfully',
|
||||
token,
|
||||
user: { id: user.id, email: user.email, name: user.name, role: user.role }
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Complete setup error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getSiteSettings(req, res) {
|
||||
try {
|
||||
const settings = await getSettingsMap();
|
||||
|
||||
const result = {};
|
||||
// Sensitive keys that should not be exposed publicly
|
||||
const sensitiveKeys = [
|
||||
'smtpPassword',
|
||||
'smtpUser',
|
||||
'smtpHost',
|
||||
'smtpPort',
|
||||
'smtpFromEmail',
|
||||
'smtpFromName',
|
||||
'smtpTlsMode',
|
||||
'smtpTimeoutMs',
|
||||
'smtpLastError',
|
||||
'smtpLastErrorAt',
|
||||
'smtpFailureStreak',
|
||||
'smtpLastSuccessAt',
|
||||
];
|
||||
|
||||
Object.keys(settings).forEach((key) => {
|
||||
if (sensitiveKeys.includes(key)) return;
|
||||
result[key] = settings[key];
|
||||
});
|
||||
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('Get settings error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function updateSettings(req, res) {
|
||||
try {
|
||||
const {
|
||||
title,
|
||||
publicUrl,
|
||||
smtpHost,
|
||||
smtpPort,
|
||||
smtpUser,
|
||||
smtpPassword,
|
||||
smtpFromEmail,
|
||||
smtpFromName,
|
||||
smtpTlsMode,
|
||||
smtpTimeoutMs,
|
||||
} = req.body;
|
||||
|
||||
if (title) {
|
||||
await setSettings({ title });
|
||||
}
|
||||
if (publicUrl !== undefined) {
|
||||
await setSettings({ publicUrl: String(publicUrl) });
|
||||
}
|
||||
|
||||
const hasSmtpFields = [
|
||||
smtpHost,
|
||||
smtpPort,
|
||||
smtpUser,
|
||||
smtpPassword,
|
||||
smtpFromEmail,
|
||||
smtpFromName,
|
||||
smtpTlsMode,
|
||||
smtpTimeoutMs,
|
||||
].some((value) => value !== undefined);
|
||||
|
||||
if (hasSmtpFields) {
|
||||
const smtpResult = await saveSmtpConfig(
|
||||
{
|
||||
smtpHost,
|
||||
smtpPort,
|
||||
smtpUser,
|
||||
smtpPassword,
|
||||
smtpFromEmail,
|
||||
smtpFromName,
|
||||
smtpTlsMode,
|
||||
smtpTimeoutMs,
|
||||
},
|
||||
{ preservePassword: true, allowEmpty: true }
|
||||
);
|
||||
|
||||
if (!smtpResult.success) {
|
||||
return res.status(400).json({ error: smtpResult.error });
|
||||
}
|
||||
}
|
||||
|
||||
res.json({ success: true, message: 'Settings updated' });
|
||||
} catch (error) {
|
||||
console.error('Update settings error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getAdminSettings(req, res) {
|
||||
try {
|
||||
const settings = await getSettingsMap();
|
||||
const smtp = await getSmtpConfig({ includePassword: false });
|
||||
res.json({
|
||||
...settings,
|
||||
...smtp,
|
||||
smtpPassword: '',
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Get admin settings error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getSetupStatus,
|
||||
completeSetup,
|
||||
getSiteSettings,
|
||||
updateSettings,
|
||||
getAdminSettings,
|
||||
};
|
||||
106
backend/src/controllers/userController.js
Normal file
106
backend/src/controllers/userController.js
Normal file
@@ -0,0 +1,106 @@
|
||||
const bcrypt = require('bcryptjs');
|
||||
const { getDatabase } = require('../models/database');
|
||||
const { validatePassword } = require('../middleware/auth');
|
||||
const { ensureUserNotificationDefaults } = require('../services/notificationService');
|
||||
|
||||
const ALLOWED_ROLES = ['admin', 'viewer'];
|
||||
|
||||
async function getAllUsers(req, res) {
|
||||
try {
|
||||
const db = getDatabase();
|
||||
const users = await db.all('SELECT id, name, email, role, active, created_at FROM users');
|
||||
res.json(users || []);
|
||||
} catch (error) {
|
||||
console.error('Get users error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function createUser(req, res) {
|
||||
try {
|
||||
const { name, email, password, role } = req.body;
|
||||
|
||||
if (!name || !email || !password || !role) {
|
||||
return res.status(400).json({ error: 'Name, email, password, and role are required' });
|
||||
}
|
||||
|
||||
// Validate role
|
||||
if (!ALLOWED_ROLES.includes(role)) {
|
||||
return res.status(400).json({ error: `Invalid role. Allowed roles: ${ALLOWED_ROLES.join(', ')}` });
|
||||
}
|
||||
|
||||
// Validate password strength
|
||||
const passwordErrors = validatePassword(password);
|
||||
if (passwordErrors.length > 0) {
|
||||
return res.status(400).json({ error: passwordErrors.join('. ') });
|
||||
}
|
||||
|
||||
const db = getDatabase();
|
||||
|
||||
// Check if user already exists
|
||||
const existing = await db.get('SELECT id FROM users WHERE email = ?', [email]);
|
||||
if (existing) {
|
||||
return res.status(400).json({ error: 'User with this email already exists' });
|
||||
}
|
||||
|
||||
// Hash password
|
||||
const hashedPassword = await bcrypt.hash(password, 10);
|
||||
|
||||
// Create user
|
||||
const result = await db.run(
|
||||
'INSERT INTO users (email, password_hash, name, role, active) VALUES (?, ?, ?, ?, ?)',
|
||||
[email, hashedPassword, name, role, 1]
|
||||
);
|
||||
|
||||
const newUser = await db.get(
|
||||
'SELECT id, name, email, role, active FROM users WHERE id = ?',
|
||||
[result.lastID]
|
||||
);
|
||||
|
||||
await ensureUserNotificationDefaults(newUser.id, newUser.role);
|
||||
|
||||
res.status(201).json(newUser);
|
||||
} catch (error) {
|
||||
console.error('Create user error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteUser(req, res) {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const db = getDatabase();
|
||||
|
||||
// Prevent deleting the logged-in user's own account
|
||||
if (req.user.id === parseInt(id)) {
|
||||
return res.status(400).json({ error: 'Cannot delete your own account' });
|
||||
}
|
||||
|
||||
// Prevent deleting the last user
|
||||
const totalUsers = await db.get('SELECT COUNT(*) as count FROM users');
|
||||
if (totalUsers.count <= 1) {
|
||||
return res.status(400).json({ error: 'Cannot delete the last user' });
|
||||
}
|
||||
|
||||
// Prevent deleting the last admin, would leave no one able to manage the system
|
||||
const targetUser = await db.get('SELECT role FROM users WHERE id = ?', [id]);
|
||||
if (targetUser && targetUser.role === 'admin') {
|
||||
const adminCount = await db.get("SELECT COUNT(*) as count FROM users WHERE role = 'admin'");
|
||||
if (adminCount.count <= 1) {
|
||||
return res.status(400).json({ error: 'Cannot delete the last admin user' });
|
||||
}
|
||||
}
|
||||
|
||||
await db.run('DELETE FROM users WHERE id = ?', [id]);
|
||||
res.json({ success: true, message: 'User deleted' });
|
||||
} catch (error) {
|
||||
console.error('Delete user error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getAllUsers,
|
||||
createUser,
|
||||
deleteUser
|
||||
};
|
||||
196
backend/src/controllers/v1Controller.js
Normal file
196
backend/src/controllers/v1Controller.js
Normal file
@@ -0,0 +1,196 @@
|
||||
/**
|
||||
* v1Controller.js
|
||||
*
|
||||
* Implements the /api/v1 public status API using a format based on the
|
||||
* Atlassian Statuspage API the closest to a universal status page standard.
|
||||
*
|
||||
* Status indicators (internal → universal):
|
||||
* up → operational
|
||||
* degraded → degraded_performance
|
||||
* down → major_outage
|
||||
* unknown/null → unknown
|
||||
* maintenance → under_maintenance
|
||||
*
|
||||
* Overall page indicator:
|
||||
* none → all operational
|
||||
* minor → at least one degraded_performance
|
||||
* critical → at least one major_outage
|
||||
*/
|
||||
|
||||
const {
|
||||
getPageMetaRow,
|
||||
getActiveMaintenanceEndpointIds,
|
||||
listComponentsWithLatestAndUptime,
|
||||
listV1Incidents,
|
||||
listScheduledMaintenances,
|
||||
} = require('../data/v1Data');
|
||||
|
||||
function toUniversalStatus(internalStatus, inMaintenance = false) {
|
||||
if (inMaintenance) return 'under_maintenance';
|
||||
switch (internalStatus) {
|
||||
case 'up': return 'operational';
|
||||
case 'degraded': return 'degraded_performance';
|
||||
case 'down': return 'major_outage';
|
||||
default: return 'unknown';
|
||||
}
|
||||
}
|
||||
|
||||
function overallIndicator(components) {
|
||||
if (components.some(c => c.status === 'major_outage')) return { indicator: 'critical', description: 'Major System Outage' };
|
||||
if (components.some(c => c.status === 'degraded_performance')) return { indicator: 'minor', description: 'Partially Degraded Service' };
|
||||
if (components.some(c => c.status === 'under_maintenance')) return { indicator: 'maintenance', description: 'Under Maintenance' };
|
||||
if (components.some(c => c.status === 'unknown')) return { indicator: 'none', description: 'System Status Unknown' };
|
||||
return { indicator: 'none', description: 'All Systems Operational' };
|
||||
}
|
||||
|
||||
async function getPageMeta() {
|
||||
const rows = await getPageMetaRow();
|
||||
const meta = {};
|
||||
for (const r of rows) meta[r.key] = r.value;
|
||||
return {
|
||||
id: 'status',
|
||||
name: meta.title || 'Status Page',
|
||||
url: meta.site_url || null,
|
||||
updated_at: new Date().toISOString()
|
||||
};
|
||||
}
|
||||
|
||||
async function buildComponents({ allowedIds = null, authed = false } = {}) {
|
||||
const maintenanceIds = await getActiveMaintenanceEndpointIds();
|
||||
const rows = await listComponentsWithLatestAndUptime(allowedIds);
|
||||
|
||||
const components = [];
|
||||
|
||||
for (const row of rows) {
|
||||
const inMaintenance = maintenanceIds.has(row.id);
|
||||
const univStatus = toUniversalStatus(row.latest_status, inMaintenance);
|
||||
|
||||
const component = {
|
||||
id: String(row.id),
|
||||
name: row.name,
|
||||
status: univStatus,
|
||||
group_id: row.group_id ? String(row.group_id) : null,
|
||||
is_group: false,
|
||||
updated_at: row.latest_checked_at || row.updated_at,
|
||||
};
|
||||
|
||||
if (authed) {
|
||||
component.response_time_ms = row.latest_response_time ?? null;
|
||||
component.uptime_30d_pct = row.uptime_total > 0
|
||||
? parseFloat(((row.uptime_ups / row.uptime_total) * 100).toFixed(4))
|
||||
: null;
|
||||
component.uptime_30d_checks = row.uptime_total;
|
||||
component.uptime_30d_up = row.uptime_ups;
|
||||
}
|
||||
|
||||
components.push(component);
|
||||
}
|
||||
|
||||
return { components, component_groups: [] };
|
||||
}
|
||||
|
||||
async function getStatusJson(req, res) {
|
||||
try {
|
||||
const [page, { components }] = await Promise.all([
|
||||
getPageMeta(),
|
||||
buildComponents()
|
||||
]);
|
||||
|
||||
res.json({
|
||||
page,
|
||||
components,
|
||||
component_groups: [],
|
||||
incidents: [],
|
||||
scheduled_maintenances: []
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('getStatusJson error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getSummary(req, res) {
|
||||
try {
|
||||
const { components } = await buildComponents();
|
||||
const indicator = overallIndicator(components);
|
||||
res.json({ indicator: indicator.indicator, description: indicator.description });
|
||||
} catch (error) {
|
||||
console.error('getSummary error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getComponents(req, res) {
|
||||
try {
|
||||
const { components, component_groups } = await buildComponents({ authed: !!req.user });
|
||||
res.json({ components, component_groups });
|
||||
} catch (error) {
|
||||
console.error('getComponents error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getComponentById(req, res) {
|
||||
try {
|
||||
const { components } = await buildComponents({ allowedIds: [parseInt(req.params.id, 10)], authed: !!req.user });
|
||||
const component = components[0];
|
||||
if (!component) {
|
||||
return res.status(404).json({ error: 'Component not found' });
|
||||
}
|
||||
res.json(component);
|
||||
} catch (error) {
|
||||
console.error('getComponentById error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getIncidents(req, res) {
|
||||
try {
|
||||
const incidents = await listV1Incidents(false);
|
||||
res.json(incidents);
|
||||
} catch (error) {
|
||||
console.error('getIncidents error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getIncidentById(req, res) {
|
||||
try {
|
||||
const incidents = await listV1Incidents(false);
|
||||
const incident = incidents.find(i => i.id === parseInt(req.params.id, 10));
|
||||
if (!incident) {
|
||||
return res.status(404).json({ error: 'Incident not found' });
|
||||
}
|
||||
res.json(incident);
|
||||
} catch (error) {
|
||||
console.error('getIncidentById error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
async function getScheduledMaintenances(req, res) {
|
||||
try {
|
||||
const windows = await listScheduledMaintenances();
|
||||
for (const w of windows) {
|
||||
w.endpoints = w.endpoint_id
|
||||
? [{ id: w.endpoint_id, name: w.endpoint_name || null }]
|
||||
: [];
|
||||
delete w.endpoint_name;
|
||||
delete w.endpoint_id;
|
||||
}
|
||||
res.json(windows);
|
||||
} catch (error) {
|
||||
console.error('getScheduledMaintenances error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getStatusJson,
|
||||
getSummary,
|
||||
getComponents,
|
||||
getComponentById,
|
||||
getIncidents,
|
||||
getIncidentById,
|
||||
getScheduledMaintenances
|
||||
};
|
||||
73
backend/src/data/categoryData.js
Normal file
73
backend/src/data/categoryData.js
Normal file
@@ -0,0 +1,73 @@
|
||||
const { getDatabase, runInTransaction } = require('../models/database');
|
||||
|
||||
async function listCategories() {
|
||||
const db = getDatabase();
|
||||
return db.all('SELECT * FROM endpoint_groups ORDER BY sort_order ASC, id ASC');
|
||||
}
|
||||
|
||||
async function getCategoryById(categoryId) {
|
||||
const db = getDatabase();
|
||||
return db.get('SELECT * FROM endpoint_groups WHERE id = ?', [categoryId]);
|
||||
}
|
||||
|
||||
async function getCategoryEndpointCount(categoryId) {
|
||||
const db = getDatabase();
|
||||
return db.get('SELECT COUNT(*) as count FROM endpoints WHERE group_id = ?', [categoryId]);
|
||||
}
|
||||
|
||||
async function listEndpointsForCategory(categoryId) {
|
||||
const db = getDatabase();
|
||||
return db.all('SELECT * FROM endpoints WHERE group_id = ? ORDER BY name ASC', [categoryId]);
|
||||
}
|
||||
|
||||
async function createCategoryRecord(name, description, sortOrder) {
|
||||
const db = getDatabase();
|
||||
return db.run(
|
||||
'INSERT INTO endpoint_groups (name, description, sort_order) VALUES (?, ?, ?)',
|
||||
[name, description || null, sortOrder]
|
||||
);
|
||||
}
|
||||
|
||||
async function getMaxCategorySortOrder() {
|
||||
const db = getDatabase();
|
||||
return db.get('SELECT MAX(sort_order) as max FROM endpoint_groups');
|
||||
}
|
||||
|
||||
async function updateCategoryRecord(categoryId, name, description) {
|
||||
const db = getDatabase();
|
||||
return db.run(
|
||||
'UPDATE endpoint_groups SET name = ?, description = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?',
|
||||
[name, description || null, categoryId]
|
||||
);
|
||||
}
|
||||
|
||||
async function clearCategoryFromEndpoints(categoryId) {
|
||||
const db = getDatabase();
|
||||
return db.run('UPDATE endpoints SET group_id = NULL WHERE group_id = ?', [categoryId]);
|
||||
}
|
||||
|
||||
async function deleteCategoryRecord(categoryId) {
|
||||
const db = getDatabase();
|
||||
return db.run('DELETE FROM endpoint_groups WHERE id = ?', [categoryId]);
|
||||
}
|
||||
|
||||
async function reorderCategoryRecords(order) {
|
||||
await runInTransaction(async (db) => {
|
||||
for (let i = 0; i < order.length; i++) {
|
||||
await db.run('UPDATE endpoint_groups SET sort_order = ? WHERE id = ?', [i, order[i]]);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
listCategories,
|
||||
getCategoryById,
|
||||
getCategoryEndpointCount,
|
||||
listEndpointsForCategory,
|
||||
createCategoryRecord,
|
||||
getMaxCategorySortOrder,
|
||||
updateCategoryRecord,
|
||||
clearCategoryFromEndpoints,
|
||||
deleteCategoryRecord,
|
||||
reorderCategoryRecords,
|
||||
};
|
||||
146
backend/src/data/endpointData.js
Normal file
146
backend/src/data/endpointData.js
Normal file
@@ -0,0 +1,146 @@
|
||||
const { getDatabase, runInTransaction } = require('../models/database');
|
||||
|
||||
async function listEndpointsWithCategory() {
|
||||
const db = getDatabase();
|
||||
return db.all(
|
||||
`SELECT e.*, g.name as category_name, g.sort_order as category_order
|
||||
FROM endpoints e
|
||||
LEFT JOIN endpoint_groups g ON e.group_id = g.id
|
||||
ORDER BY COALESCE(g.sort_order, 99999) ASC, COALESCE(e.sort_order, 99999) ASC, e.name ASC`
|
||||
);
|
||||
}
|
||||
|
||||
async function listCategoriesOrdered() {
|
||||
const db = getDatabase();
|
||||
return db.all('SELECT * FROM endpoint_groups ORDER BY sort_order ASC, id ASC');
|
||||
}
|
||||
|
||||
async function getLatestCheckResult(endpointId) {
|
||||
const db = getDatabase();
|
||||
return db.get(
|
||||
`SELECT status, response_time, checked_at
|
||||
FROM check_results
|
||||
WHERE endpoint_id = ?
|
||||
ORDER BY checked_at DESC
|
||||
LIMIT 1`,
|
||||
[endpointId]
|
||||
);
|
||||
}
|
||||
|
||||
async function getUptimeSummary(endpointId, days = 30) {
|
||||
const db = getDatabase();
|
||||
return db.get(
|
||||
`SELECT
|
||||
COUNT(*) AS total,
|
||||
SUM(CASE WHEN status = 'up' THEN 1 ELSE 0 END) AS ups
|
||||
FROM check_results
|
||||
WHERE endpoint_id = ?
|
||||
AND checked_at > datetime('now', '-' || ? || ' days')`,
|
||||
[endpointId, days]
|
||||
);
|
||||
}
|
||||
|
||||
async function getEndpointById(endpointId) {
|
||||
const db = getDatabase();
|
||||
return db.get('SELECT * FROM endpoints WHERE id = ?', [endpointId]);
|
||||
}
|
||||
|
||||
async function getRecentCheckResults(endpointId, limit = 100) {
|
||||
const db = getDatabase();
|
||||
return db.all(
|
||||
`SELECT *
|
||||
FROM check_results
|
||||
WHERE endpoint_id = ?
|
||||
ORDER BY checked_at DESC
|
||||
LIMIT ?`,
|
||||
[endpointId, limit]
|
||||
);
|
||||
}
|
||||
|
||||
async function createEndpointRecord(payload) {
|
||||
const db = getDatabase();
|
||||
return db.run(
|
||||
`INSERT INTO endpoints
|
||||
(name, url, type, interval, timeout, active, ping_enabled, group_id)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
payload.name,
|
||||
payload.url,
|
||||
payload.type,
|
||||
payload.interval,
|
||||
payload.timeout,
|
||||
payload.active,
|
||||
payload.ping_enabled,
|
||||
payload.group_id,
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
async function updateEndpointRecord(endpointId, payload, includeUrl = true) {
|
||||
const db = getDatabase();
|
||||
if (includeUrl) {
|
||||
return db.run(
|
||||
`UPDATE endpoints
|
||||
SET name = ?, url = ?, type = ?, interval = ?, timeout = ?, active = ?, ping_enabled = ?, group_id = ?, updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?`,
|
||||
[
|
||||
payload.name,
|
||||
payload.url,
|
||||
payload.type,
|
||||
payload.interval,
|
||||
payload.timeout,
|
||||
payload.active,
|
||||
payload.ping_enabled,
|
||||
payload.group_id,
|
||||
endpointId,
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
return db.run(
|
||||
`UPDATE endpoints
|
||||
SET name = ?, type = ?, interval = ?, timeout = ?, active = ?, ping_enabled = ?, group_id = ?, updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?`,
|
||||
[
|
||||
payload.name,
|
||||
payload.type,
|
||||
payload.interval,
|
||||
payload.timeout,
|
||||
payload.active,
|
||||
payload.ping_enabled,
|
||||
payload.group_id,
|
||||
endpointId,
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
async function deleteEndpointRecord(endpointId) {
|
||||
const db = getDatabase();
|
||||
return db.run('DELETE FROM endpoints WHERE id = ?', [endpointId]);
|
||||
}
|
||||
|
||||
async function reorderEndpointRecords(updates) {
|
||||
await runInTransaction(async (db) => {
|
||||
for (const { id, sort_order, category_id } of updates) {
|
||||
await db.run(
|
||||
`UPDATE endpoints
|
||||
SET sort_order = ?, group_id = ?, updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?`,
|
||||
[sort_order, category_id ?? null, id]
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
listEndpointsWithCategory,
|
||||
listCategoriesOrdered,
|
||||
getLatestCheckResult,
|
||||
getUptimeSummary,
|
||||
getEndpointById,
|
||||
getRecentCheckResults,
|
||||
createEndpointRecord,
|
||||
updateEndpointRecord,
|
||||
deleteEndpointRecord,
|
||||
reorderEndpointRecords,
|
||||
};
|
||||
177
backend/src/data/incidentData.js
Normal file
177
backend/src/data/incidentData.js
Normal file
@@ -0,0 +1,177 @@
|
||||
const { getDatabase } = require('../models/database');
|
||||
|
||||
async function listIncidentsOrdered() {
|
||||
const db = getDatabase();
|
||||
return db.all(
|
||||
`SELECT * FROM incidents
|
||||
ORDER BY
|
||||
CASE WHEN resolved_at IS NULL THEN 0 ELSE 1 END ASC,
|
||||
start_time DESC`
|
||||
);
|
||||
}
|
||||
|
||||
async function getIncidentById(incidentId) {
|
||||
const db = getDatabase();
|
||||
return db.get('SELECT * FROM incidents WHERE id = ?', [incidentId]);
|
||||
}
|
||||
|
||||
async function listIncidentEndpoints(incidentId) {
|
||||
const db = getDatabase();
|
||||
return db.all(
|
||||
`SELECT e.id, e.name, e.url, e.type FROM endpoints e
|
||||
JOIN incident_endpoints ie ON e.id = ie.endpoint_id
|
||||
WHERE ie.incident_id = ?`,
|
||||
[incidentId]
|
||||
);
|
||||
}
|
||||
|
||||
async function getLatestIncidentUpdate(incidentId) {
|
||||
const db = getDatabase();
|
||||
return db.get(
|
||||
`SELECT * FROM incident_updates WHERE incident_id = ? ORDER BY created_at DESC LIMIT 1`,
|
||||
[incidentId]
|
||||
);
|
||||
}
|
||||
|
||||
async function listIncidentUpdates(incidentId) {
|
||||
const db = getDatabase();
|
||||
return db.all(
|
||||
`SELECT * FROM incident_updates WHERE incident_id = ? ORDER BY created_at ASC`,
|
||||
[incidentId]
|
||||
);
|
||||
}
|
||||
|
||||
async function createIncidentRecord(payload) {
|
||||
const db = getDatabase();
|
||||
return db.run(
|
||||
`INSERT INTO incidents (title, description, severity, status, source, auto_created)
|
||||
VALUES (?, ?, ?, ?, ?, 0)`,
|
||||
[payload.title, payload.description, payload.severity, payload.status, payload.source]
|
||||
);
|
||||
}
|
||||
|
||||
async function linkIncidentEndpoint(incidentId, endpointId, ignoreConflicts = false) {
|
||||
const db = getDatabase();
|
||||
if (ignoreConflicts) {
|
||||
return db.run(
|
||||
'INSERT OR IGNORE INTO incident_endpoints (incident_id, endpoint_id) VALUES (?, ?)',
|
||||
[incidentId, endpointId]
|
||||
);
|
||||
}
|
||||
return db.run('INSERT INTO incident_endpoints (incident_id, endpoint_id) VALUES (?, ?)', [incidentId, endpointId]);
|
||||
}
|
||||
|
||||
async function createIncidentUpdate(incidentId, message, statusLabel, createdBy) {
|
||||
const db = getDatabase();
|
||||
return db.run(
|
||||
`INSERT INTO incident_updates (incident_id, message, status_label, created_by)
|
||||
VALUES (?, ?, ?, ?)`,
|
||||
[incidentId, message, statusLabel, createdBy]
|
||||
);
|
||||
}
|
||||
|
||||
async function updateIncidentCore(incidentId, payload) {
|
||||
const db = getDatabase();
|
||||
return db.run(
|
||||
`UPDATE incidents
|
||||
SET title = ?, description = ?, severity = ?, status = ?,
|
||||
admin_managed = ?,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?`,
|
||||
[payload.title, payload.description, payload.severity, payload.status, payload.admin_managed, incidentId]
|
||||
);
|
||||
}
|
||||
|
||||
async function deleteIncidentLinksExceptSource(incidentId, sourceEndpointId) {
|
||||
const db = getDatabase();
|
||||
return db.run(
|
||||
'DELETE FROM incident_endpoints WHERE incident_id = ? AND endpoint_id != ?',
|
||||
[incidentId, sourceEndpointId]
|
||||
);
|
||||
}
|
||||
|
||||
async function deleteAllIncidentLinks(incidentId) {
|
||||
const db = getDatabase();
|
||||
return db.run('DELETE FROM incident_endpoints WHERE incident_id = ?', [incidentId]);
|
||||
}
|
||||
|
||||
async function markIncidentResolved(incidentId, adminManaged) {
|
||||
const db = getDatabase();
|
||||
return db.run(
|
||||
`UPDATE incidents
|
||||
SET resolved_at = CURRENT_TIMESTAMP,
|
||||
status = 'resolved',
|
||||
admin_managed = ?,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?`,
|
||||
[adminManaged, incidentId]
|
||||
);
|
||||
}
|
||||
|
||||
async function setIncidentAdminManaged(incidentId, adminManaged = 1) {
|
||||
const db = getDatabase();
|
||||
return db.run(
|
||||
`UPDATE incidents SET admin_managed = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?`,
|
||||
[adminManaged, incidentId]
|
||||
);
|
||||
}
|
||||
|
||||
async function setIncidentStatus(incidentId, status) {
|
||||
const db = getDatabase();
|
||||
return db.run(
|
||||
`UPDATE incidents SET status = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?`,
|
||||
[status, incidentId]
|
||||
);
|
||||
}
|
||||
|
||||
async function getIncidentUpdateById(updateId) {
|
||||
const db = getDatabase();
|
||||
return db.get('SELECT * FROM incident_updates WHERE id = ?', [updateId]);
|
||||
}
|
||||
|
||||
async function reopenIncidentRecord(incidentId) {
|
||||
const db = getDatabase();
|
||||
return db.run(
|
||||
`UPDATE incidents
|
||||
SET resolved_at = NULL,
|
||||
status = 'investigating',
|
||||
admin_managed = 1,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?`,
|
||||
[incidentId]
|
||||
);
|
||||
}
|
||||
|
||||
async function setIncidentPostMortem(incidentId, postMortem) {
|
||||
const db = getDatabase();
|
||||
return db.run(
|
||||
`UPDATE incidents SET post_mortem = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?`,
|
||||
[postMortem || null, incidentId]
|
||||
);
|
||||
}
|
||||
|
||||
async function deleteIncidentRecord(incidentId) {
|
||||
const db = getDatabase();
|
||||
return db.run('DELETE FROM incidents WHERE id = ?', [incidentId]);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
listIncidentsOrdered,
|
||||
getIncidentById,
|
||||
listIncidentEndpoints,
|
||||
getLatestIncidentUpdate,
|
||||
listIncidentUpdates,
|
||||
createIncidentRecord,
|
||||
linkIncidentEndpoint,
|
||||
createIncidentUpdate,
|
||||
updateIncidentCore,
|
||||
deleteIncidentLinksExceptSource,
|
||||
deleteAllIncidentLinks,
|
||||
markIncidentResolved,
|
||||
setIncidentAdminManaged,
|
||||
setIncidentStatus,
|
||||
getIncidentUpdateById,
|
||||
reopenIncidentRecord,
|
||||
setIncidentPostMortem,
|
||||
deleteIncidentRecord,
|
||||
};
|
||||
142
backend/src/data/v1Data.js
Normal file
142
backend/src/data/v1Data.js
Normal file
@@ -0,0 +1,142 @@
|
||||
const { getDatabase } = require('../models/database');
|
||||
|
||||
async function getPageMetaRow() {
|
||||
const db = getDatabase();
|
||||
return db.all(`SELECT key, value FROM settings WHERE key IN ('title', 'site_url')`);
|
||||
}
|
||||
|
||||
async function getActiveMaintenanceEndpointIds() {
|
||||
const db = getDatabase();
|
||||
const rows = await db.all(
|
||||
`SELECT endpoint_id FROM maintenance_windows
|
||||
WHERE start_time <= datetime('now')
|
||||
AND end_time >= datetime('now')
|
||||
AND endpoint_id IS NOT NULL`
|
||||
);
|
||||
return new Set(rows.map((row) => row.endpoint_id));
|
||||
}
|
||||
|
||||
function buildAllowedIdsClause(allowedIds = null) {
|
||||
if (!allowedIds || allowedIds.length === 0) {
|
||||
return { clause: '', args: [] };
|
||||
}
|
||||
const placeholders = allowedIds.map(() => '?').join(', ');
|
||||
return {
|
||||
clause: `AND e.id IN (${placeholders})`,
|
||||
args: allowedIds,
|
||||
};
|
||||
}
|
||||
|
||||
async function listComponentsWithLatestAndUptime(allowedIds = null) {
|
||||
const db = getDatabase();
|
||||
const { clause, args } = buildAllowedIdsClause(allowedIds);
|
||||
|
||||
return db.all(
|
||||
`WITH latest AS (
|
||||
SELECT cr.endpoint_id, cr.status, cr.response_time, cr.checked_at
|
||||
FROM check_results cr
|
||||
INNER JOIN (
|
||||
SELECT endpoint_id, MAX(checked_at) AS max_checked_at
|
||||
FROM check_results
|
||||
GROUP BY endpoint_id
|
||||
) m ON m.endpoint_id = cr.endpoint_id AND m.max_checked_at = cr.checked_at
|
||||
),
|
||||
uptime AS (
|
||||
SELECT
|
||||
endpoint_id,
|
||||
COUNT(*) AS total,
|
||||
SUM(CASE WHEN status = 'up' THEN 1 ELSE 0 END) AS ups
|
||||
FROM check_results
|
||||
WHERE checked_at > datetime('now', '-30 days')
|
||||
GROUP BY endpoint_id
|
||||
)
|
||||
SELECT
|
||||
e.id,
|
||||
e.name,
|
||||
e.group_id,
|
||||
e.updated_at,
|
||||
l.status AS latest_status,
|
||||
l.response_time AS latest_response_time,
|
||||
l.checked_at AS latest_checked_at,
|
||||
COALESCE(u.total, 0) AS uptime_total,
|
||||
COALESCE(u.ups, 0) AS uptime_ups
|
||||
FROM endpoints e
|
||||
LEFT JOIN latest l ON l.endpoint_id = e.id
|
||||
LEFT JOIN uptime u ON u.endpoint_id = e.id
|
||||
WHERE e.active = 1
|
||||
${clause}
|
||||
ORDER BY e.created_at ASC`,
|
||||
args
|
||||
);
|
||||
}
|
||||
|
||||
async function listV1Incidents(activeOnly = false) {
|
||||
const db = getDatabase();
|
||||
const where = activeOnly ? 'WHERE i.resolved_at IS NULL' : '';
|
||||
const incidents = await db.all(
|
||||
`SELECT i.*
|
||||
FROM incidents i
|
||||
${where}
|
||||
ORDER BY i.created_at DESC`
|
||||
);
|
||||
|
||||
if (incidents.length === 0) return [];
|
||||
|
||||
const ids = incidents.map((i) => i.id);
|
||||
const placeholders = ids.map(() => '?').join(', ');
|
||||
|
||||
const updates = await db.all(
|
||||
`SELECT * FROM incident_updates
|
||||
WHERE incident_id IN (${placeholders})
|
||||
ORDER BY created_at ASC`,
|
||||
ids
|
||||
);
|
||||
|
||||
const endpoints = await db.all(
|
||||
`SELECT ie.incident_id, e.id, e.name
|
||||
FROM incident_endpoints ie
|
||||
JOIN endpoints e ON e.id = ie.endpoint_id
|
||||
WHERE ie.incident_id IN (${placeholders})`,
|
||||
ids
|
||||
);
|
||||
|
||||
const updatesByIncident = new Map();
|
||||
const endpointsByIncident = new Map();
|
||||
|
||||
for (const row of updates) {
|
||||
if (!updatesByIncident.has(row.incident_id)) updatesByIncident.set(row.incident_id, []);
|
||||
updatesByIncident.get(row.incident_id).push(row);
|
||||
}
|
||||
|
||||
for (const row of endpoints) {
|
||||
if (!endpointsByIncident.has(row.incident_id)) endpointsByIncident.set(row.incident_id, []);
|
||||
endpointsByIncident.get(row.incident_id).push({ id: row.id, name: row.name });
|
||||
}
|
||||
|
||||
for (const incident of incidents) {
|
||||
incident.updates = updatesByIncident.get(incident.id) || [];
|
||||
incident.endpoints = endpointsByIncident.get(incident.id) || [];
|
||||
}
|
||||
|
||||
return incidents;
|
||||
}
|
||||
|
||||
async function listScheduledMaintenances() {
|
||||
const db = getDatabase();
|
||||
return db.all(
|
||||
`SELECT mw.id, mw.title, mw.description, mw.start_time, mw.end_time, mw.created_at, mw.updated_at,
|
||||
mw.endpoint_id, e.name AS endpoint_name
|
||||
FROM maintenance_windows mw
|
||||
LEFT JOIN endpoints e ON e.id = mw.endpoint_id
|
||||
WHERE mw.end_time >= datetime('now')
|
||||
ORDER BY mw.start_time ASC`
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getPageMetaRow,
|
||||
getActiveMaintenanceEndpointIds,
|
||||
listComponentsWithLatestAndUptime,
|
||||
listV1Incidents,
|
||||
listScheduledMaintenances,
|
||||
};
|
||||
88
backend/src/db/index.js
Normal file
88
backend/src/db/index.js
Normal file
@@ -0,0 +1,88 @@
|
||||
const sqlite3 = require('sqlite3').verbose();
|
||||
const { open } = require('sqlite');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const { initializeSchema, runMigrations } = require('./schema');
|
||||
|
||||
require('dotenv').config();
|
||||
|
||||
let db = null;
|
||||
|
||||
async function isSchemaPresent(database) {
|
||||
const row = await database.get(
|
||||
`SELECT name FROM sqlite_master WHERE type = 'table' AND name = 'settings'`
|
||||
);
|
||||
return !!row;
|
||||
}
|
||||
|
||||
async function initializeDatabase() {
|
||||
if (db) return db;
|
||||
|
||||
const dbPath = process.env.DATABASE_PATH
|
||||
? path.resolve(process.env.DATABASE_PATH)
|
||||
: path.join(__dirname, '../../data/status.db');
|
||||
|
||||
const dbDir = path.dirname(dbPath);
|
||||
if (!fs.existsSync(dbDir)) {
|
||||
fs.mkdirSync(dbDir, { recursive: true });
|
||||
}
|
||||
|
||||
db = await open({
|
||||
filename: dbPath,
|
||||
driver: sqlite3.Database,
|
||||
});
|
||||
|
||||
await db.exec('PRAGMA foreign_keys = ON');
|
||||
|
||||
const schemaPresent = await isSchemaPresent(db);
|
||||
if (!schemaPresent) {
|
||||
await initializeSchema(db);
|
||||
console.log('Schema initialized (first setup)');
|
||||
}
|
||||
|
||||
await runMigrations(db);
|
||||
|
||||
await db.run('DELETE FROM token_blocklist WHERE expires_at < datetime("now")');
|
||||
|
||||
console.log('Database initialized');
|
||||
return db;
|
||||
}
|
||||
|
||||
function getDatabase() {
|
||||
if (!db) throw new Error('Database not initialized');
|
||||
return db;
|
||||
}
|
||||
|
||||
async function isSetupComplete() {
|
||||
try {
|
||||
const database = getDatabase();
|
||||
const admin = await database.get('SELECT * FROM users WHERE role = ?', ['admin']);
|
||||
return !!admin;
|
||||
} catch (_) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function runInTransaction(work) {
|
||||
const database = getDatabase();
|
||||
await database.exec('BEGIN IMMEDIATE');
|
||||
try {
|
||||
const result = await work(database);
|
||||
await database.exec('COMMIT');
|
||||
return result;
|
||||
} catch (error) {
|
||||
try {
|
||||
await database.exec('ROLLBACK');
|
||||
} catch (_) {
|
||||
// Ignore rollback errors and rethrow original error
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
initializeDatabase,
|
||||
getDatabase,
|
||||
isSetupComplete,
|
||||
runInTransaction,
|
||||
};
|
||||
310
backend/src/db/schema.js
Normal file
310
backend/src/db/schema.js
Normal file
@@ -0,0 +1,310 @@
|
||||
async function initializeSchema(db) {
|
||||
await db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS settings (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
key TEXT UNIQUE NOT NULL,
|
||||
value TEXT,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS endpoint_groups (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
sort_order INTEGER DEFAULT 0,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS endpoints (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
url TEXT NOT NULL,
|
||||
type TEXT DEFAULT 'http',
|
||||
interval INTEGER DEFAULT 300,
|
||||
active BOOLEAN DEFAULT 1,
|
||||
timeout INTEGER DEFAULT 10,
|
||||
group_id INTEGER,
|
||||
sla_uptime REAL DEFAULT 99.9,
|
||||
ping_enabled BOOLEAN DEFAULT 0,
|
||||
sort_order INTEGER DEFAULT 0,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY(group_id) REFERENCES endpoint_groups(id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS check_results (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
endpoint_id INTEGER NOT NULL,
|
||||
status TEXT,
|
||||
response_time INTEGER,
|
||||
error_message TEXT,
|
||||
ping_response_time INTEGER DEFAULT NULL,
|
||||
checked_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY(endpoint_id) REFERENCES endpoints(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS incidents (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
title TEXT NOT NULL,
|
||||
description TEXT,
|
||||
severity TEXT DEFAULT 'degraded',
|
||||
status TEXT DEFAULT 'investigating',
|
||||
source TEXT DEFAULT 'manual',
|
||||
auto_created BOOLEAN DEFAULT 0,
|
||||
source_endpoint_id INTEGER,
|
||||
admin_managed INTEGER DEFAULT 0,
|
||||
post_mortem TEXT DEFAULT NULL,
|
||||
start_time DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
resolved_at DATETIME,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY(source_endpoint_id) REFERENCES endpoints(id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS incident_updates (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
incident_id INTEGER NOT NULL,
|
||||
message TEXT NOT NULL,
|
||||
status_label TEXT,
|
||||
created_by TEXT DEFAULT 'system',
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY(incident_id) REFERENCES incidents(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS maintenance_windows (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
title TEXT NOT NULL,
|
||||
description TEXT,
|
||||
endpoint_id INTEGER,
|
||||
start_time DATETIME NOT NULL,
|
||||
end_time DATETIME NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY(endpoint_id) REFERENCES endpoints(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS incident_endpoints (
|
||||
incident_id INTEGER NOT NULL,
|
||||
endpoint_id INTEGER NOT NULL,
|
||||
PRIMARY KEY(incident_id, endpoint_id),
|
||||
FOREIGN KEY(incident_id) REFERENCES incidents(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY(endpoint_id) REFERENCES endpoints(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
email TEXT UNIQUE NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
name TEXT,
|
||||
role TEXT DEFAULT 'viewer',
|
||||
active BOOLEAN DEFAULT 1,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS email_notifications (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
endpoint_id INTEGER,
|
||||
category_id INTEGER,
|
||||
scope_type TEXT DEFAULT 'all',
|
||||
notify_on_down BOOLEAN DEFAULT 1,
|
||||
notify_on_recovery BOOLEAN DEFAULT 1,
|
||||
notify_on_degraded BOOLEAN DEFAULT 0,
|
||||
notify_on_incident BOOLEAN DEFAULT 1,
|
||||
active BOOLEAN DEFAULT 1,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY(user_id) REFERENCES users(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY(endpoint_id) REFERENCES endpoints(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY(category_id) REFERENCES endpoint_groups(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS notification_extra_recipients (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
email TEXT UNIQUE NOT NULL,
|
||||
name TEXT,
|
||||
active BOOLEAN DEFAULT 1,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS notification_deliveries (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
event_type TEXT NOT NULL,
|
||||
event_key TEXT NOT NULL,
|
||||
recipient_email TEXT NOT NULL,
|
||||
recipient_name TEXT,
|
||||
user_id INTEGER,
|
||||
endpoint_id INTEGER,
|
||||
incident_id INTEGER,
|
||||
status TEXT NOT NULL DEFAULT 'queued',
|
||||
attempt_count INTEGER DEFAULT 0,
|
||||
max_attempts INTEGER DEFAULT 5,
|
||||
next_attempt_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
error_reason TEXT,
|
||||
payload_json TEXT,
|
||||
sent_at DATETIME,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(event_key, recipient_email),
|
||||
FOREIGN KEY(user_id) REFERENCES users(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY(endpoint_id) REFERENCES endpoints(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY(incident_id) REFERENCES incidents(id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS endpoint_alert_state (
|
||||
endpoint_id INTEGER PRIMARY KEY,
|
||||
last_status TEXT,
|
||||
consecutive_failures INTEGER DEFAULT 0,
|
||||
outage_started_at DATETIME,
|
||||
last_alert_sent_at DATETIME,
|
||||
last_recovery_sent_at DATETIME,
|
||||
last_reminder_sent_at DATETIME,
|
||||
last_transition_at DATETIME,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY(endpoint_id) REFERENCES endpoints(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sla_tracking (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
endpoint_id INTEGER NOT NULL,
|
||||
month DATE NOT NULL,
|
||||
uptime_percentage REAL,
|
||||
total_checks INTEGER,
|
||||
successful_checks INTEGER,
|
||||
downtime_minutes INTEGER,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(endpoint_id, month),
|
||||
FOREIGN KEY(endpoint_id) REFERENCES endpoints(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS token_blocklist (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
token TEXT UNIQUE NOT NULL,
|
||||
expires_at DATETIME NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS api_keys (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
key_hash TEXT UNIQUE NOT NULL,
|
||||
key_prefix TEXT NOT NULL,
|
||||
scope TEXT NOT NULL DEFAULT 'global',
|
||||
endpoint_ids TEXT DEFAULT NULL,
|
||||
created_by INTEGER NOT NULL,
|
||||
last_used_at DATETIME DEFAULT NULL,
|
||||
expires_at DATETIME DEFAULT NULL,
|
||||
active BOOLEAN DEFAULT 1,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY(created_by) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_check_results_endpoint ON check_results(endpoint_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_check_results_checked_at ON check_results(checked_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_incident_endpoints_endpoint ON incident_endpoints(endpoint_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_incident_updates_incident ON incident_updates(incident_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_maintenance_windows_endpoint ON maintenance_windows(endpoint_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_maintenance_windows_times ON maintenance_windows(start_time, end_time);
|
||||
CREATE INDEX IF NOT EXISTS idx_sla_tracking_endpoint ON sla_tracking(endpoint_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_token_blocklist_token ON token_blocklist(token);
|
||||
CREATE INDEX IF NOT EXISTS idx_token_blocklist_expires ON token_blocklist(expires_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_hash ON api_keys(key_hash);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_active ON api_keys(active);
|
||||
CREATE INDEX IF NOT EXISTS idx_email_notifications_user ON email_notifications(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_email_notifications_scope ON email_notifications(scope_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_notification_deliveries_status ON notification_deliveries(status, next_attempt_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_notification_deliveries_event ON notification_deliveries(event_type, created_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_notification_deliveries_endpoint ON notification_deliveries(endpoint_id, created_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_notification_deliveries_incident ON notification_deliveries(incident_id, created_at);
|
||||
`);
|
||||
}
|
||||
|
||||
async function getColumns(db, tableName) {
|
||||
return db.all(`PRAGMA table_info(${tableName})`);
|
||||
}
|
||||
|
||||
async function ensureColumn(db, tableName, columnName, definition) {
|
||||
const columns = await getColumns(db, tableName);
|
||||
if (columns.some((column) => column.name === columnName)) return;
|
||||
await db.exec(`ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${definition}`);
|
||||
}
|
||||
|
||||
async function ensureSetting(db, key, value) {
|
||||
await db.run('INSERT OR IGNORE INTO settings (key, value) VALUES (?, ?)', [key, value]);
|
||||
}
|
||||
|
||||
async function runMigrations(db) {
|
||||
await ensureSetting(db, 'publicUrl', process.env.FRONTEND_URL || 'http://localhost:3000');
|
||||
await db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS notification_extra_recipients (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
email TEXT UNIQUE NOT NULL,
|
||||
name TEXT,
|
||||
active BOOLEAN DEFAULT 1,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS notification_deliveries (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
event_type TEXT NOT NULL,
|
||||
event_key TEXT NOT NULL,
|
||||
recipient_email TEXT NOT NULL,
|
||||
recipient_name TEXT,
|
||||
user_id INTEGER,
|
||||
endpoint_id INTEGER,
|
||||
incident_id INTEGER,
|
||||
status TEXT NOT NULL DEFAULT 'queued',
|
||||
attempt_count INTEGER DEFAULT 0,
|
||||
max_attempts INTEGER DEFAULT 5,
|
||||
next_attempt_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
error_reason TEXT,
|
||||
payload_json TEXT,
|
||||
sent_at DATETIME,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(event_key, recipient_email)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS endpoint_alert_state (
|
||||
endpoint_id INTEGER PRIMARY KEY,
|
||||
last_status TEXT,
|
||||
consecutive_failures INTEGER DEFAULT 0,
|
||||
outage_started_at DATETIME,
|
||||
last_alert_sent_at DATETIME,
|
||||
last_recovery_sent_at DATETIME,
|
||||
last_reminder_sent_at DATETIME,
|
||||
last_transition_at DATETIME,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`);
|
||||
|
||||
await ensureColumn(db, 'email_notifications', 'category_id', 'INTEGER');
|
||||
await ensureColumn(db, 'email_notifications', 'scope_type', "TEXT DEFAULT 'all'");
|
||||
await ensureColumn(db, 'email_notifications', 'notify_on_incident', 'BOOLEAN DEFAULT 1');
|
||||
await ensureColumn(db, 'email_notifications', 'active', 'BOOLEAN DEFAULT 1');
|
||||
await ensureColumn(db, 'email_notifications', 'updated_at', 'DATETIME DEFAULT CURRENT_TIMESTAMP');
|
||||
|
||||
await db.exec(`
|
||||
CREATE INDEX IF NOT EXISTS idx_email_notifications_user ON email_notifications(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_email_notifications_scope ON email_notifications(scope_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_notification_deliveries_status ON notification_deliveries(status, next_attempt_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_notification_deliveries_event ON notification_deliveries(event_type, created_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_notification_deliveries_endpoint ON notification_deliveries(endpoint_id, created_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_notification_deliveries_incident ON notification_deliveries(incident_id, created_at);
|
||||
`);
|
||||
|
||||
await ensureSetting(db, 'smtpTlsMode', 'starttls');
|
||||
await ensureSetting(db, 'smtpTimeoutMs', '10000');
|
||||
await ensureSetting(db, 'smtpFromEmail', '');
|
||||
await ensureSetting(db, 'smtpFromName', '');
|
||||
await ensureSetting(db, 'notificationsAutoSubscribeAdmins', '1');
|
||||
await ensureSetting(db, 'notificationFailureThreshold', '2');
|
||||
await ensureSetting(db, 'notificationCooldownMs', '900000');
|
||||
await ensureSetting(db, 'notificationReminderMinutes', '60');
|
||||
}
|
||||
|
||||
module.exports = { initializeSchema, runMigrations };
|
||||
74
backend/src/middleware/apiKeyAuth.js
Normal file
74
backend/src/middleware/apiKeyAuth.js
Normal file
@@ -0,0 +1,74 @@
|
||||
const bcrypt = require('bcryptjs');
|
||||
const { getDatabase } = require('../models/database');
|
||||
|
||||
/**
|
||||
* API key authentication middleware.
|
||||
* Accepts the key via:
|
||||
* - Authorization: Bearer <key>
|
||||
* - X-API-Key: <key>
|
||||
*
|
||||
* Sets req.apiKey on success with { id, scope, endpoint_ids }.
|
||||
* Does NOT block unauthenticated requests call requireApiKey() after
|
||||
* this if you need to enforce auth. Used on /api/v1 routes to enrich
|
||||
* responses when a valid key is present.
|
||||
*/
|
||||
async function optionalApiKey(req, res, next) {
|
||||
const rawKey = extractKey(req);
|
||||
if (!rawKey) return next();
|
||||
|
||||
try {
|
||||
const db = getDatabase();
|
||||
// Keys are prefixed with "sk_" find candidates by prefix (first 12 chars)
|
||||
const prefix = rawKey.substring(0, 12);
|
||||
const candidates = await db.all(
|
||||
`SELECT * FROM api_keys WHERE key_prefix = ? AND active = 1
|
||||
AND (expires_at IS NULL OR expires_at > datetime('now'))`,
|
||||
[prefix]
|
||||
);
|
||||
|
||||
for (const candidate of candidates) {
|
||||
const match = await bcrypt.compare(rawKey, candidate.key_hash);
|
||||
if (match) {
|
||||
// Update last_used_at fire and forget, don't block response
|
||||
db.run('UPDATE api_keys SET last_used_at = datetime("now") WHERE id = ?', [candidate.id]).catch(() => {});
|
||||
|
||||
req.apiKey = {
|
||||
id: candidate.id,
|
||||
name: candidate.name,
|
||||
scope: candidate.scope,
|
||||
endpoint_ids: candidate.endpoint_ids ? JSON.parse(candidate.endpoint_ids) : null
|
||||
};
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('API key auth error:', err);
|
||||
}
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
/* Middleware that requires a valid API key.
|
||||
* Must be used after optionalApiKey. */
|
||||
function requireApiKey(req, res, next) {
|
||||
if (!req.apiKey) {
|
||||
return res.status(401).json({ error: 'Valid API key required' });
|
||||
}
|
||||
next();
|
||||
}
|
||||
|
||||
function extractKey(req) {
|
||||
const xApiKey = req.headers['x-api-key'];
|
||||
if (xApiKey) return xApiKey;
|
||||
|
||||
const auth = req.headers['authorization'];
|
||||
if (auth && auth.startsWith('Bearer ')) {
|
||||
const token = auth.slice(7);
|
||||
// Only treat as API key if it starts with our prefix (not a JWT)
|
||||
if (token.startsWith('sk_')) return token;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
module.exports = { optionalApiKey, requireApiKey };
|
||||
293
backend/src/middleware/auth.js
Normal file
293
backend/src/middleware/auth.js
Normal file
@@ -0,0 +1,293 @@
|
||||
const jwt = require('jsonwebtoken');
|
||||
const crypto = require('crypto');
|
||||
const dns = require('dns');
|
||||
const { promisify } = require('util');
|
||||
const { getDatabase } = require('../models/database');
|
||||
|
||||
const dnsResolve4 = promisify(dns.resolve4);
|
||||
const dnsResolve6 = promisify(dns.resolve6);
|
||||
|
||||
/* SHA-256 hash a token so we never store raw JWTs in the blocklist.
|
||||
* If the DB is compromised, attackers cannot recover the original tokens. */
|
||||
function hashToken(token) {
|
||||
return crypto.createHash('sha256').update(token).digest('hex');
|
||||
}
|
||||
|
||||
function authenticateToken(req, res, next) {
|
||||
const authHeader = req.headers['authorization'];
|
||||
const token = authHeader && authHeader.split(' ')[1];
|
||||
|
||||
if (!token) {
|
||||
return res.status(401).json({ error: 'Access token required' });
|
||||
}
|
||||
|
||||
// Verify JWT signature first (fast, no DB hit for invalid tokens)
|
||||
jwt.verify(token, process.env.JWT_SECRET, async (err, user) => {
|
||||
if (err) {
|
||||
return res.status(403).json({ error: 'Invalid or expired token' });
|
||||
}
|
||||
|
||||
// Check if token has been revoked (logout) via DB
|
||||
try {
|
||||
const revoked = await isTokenRevoked(token);
|
||||
if (revoked) {
|
||||
return res.status(403).json({ error: 'Token has been revoked' });
|
||||
}
|
||||
} catch (dbErr) {
|
||||
console.error('Token blocklist check failed:', dbErr);
|
||||
return res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
|
||||
req.user = user;
|
||||
req.token = token;
|
||||
next();
|
||||
});
|
||||
}
|
||||
|
||||
// Role-based authorization middleware
|
||||
function requireRole(...allowedRoles) {
|
||||
return (req, res, next) => {
|
||||
if (!req.user || !allowedRoles.includes(req.user.role)) {
|
||||
return res.status(403).json({ error: 'Insufficient permissions' });
|
||||
}
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Revoke a token by storing its SHA-256 hash in the database blocklist.
|
||||
* The token's own expiry is used so we know when to clean it up.
|
||||
*/
|
||||
async function revokeToken(token) {
|
||||
const db = getDatabase();
|
||||
const tokenHash = hashToken(token);
|
||||
// Decode (without verifying again) to get the expiry timestamp
|
||||
const decoded = jwt.decode(token);
|
||||
const expiresAt = decoded?.exp
|
||||
? new Date(decoded.exp * 1000).toISOString()
|
||||
: new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(); // fallback 24h
|
||||
|
||||
await db.run(
|
||||
'INSERT OR IGNORE INTO token_blocklist (token, expires_at) VALUES (?, ?)',
|
||||
[tokenHash, expiresAt]
|
||||
);
|
||||
}
|
||||
|
||||
async function isTokenRevoked(token) {
|
||||
const db = getDatabase();
|
||||
const tokenHash = hashToken(token);
|
||||
const row = await db.get(
|
||||
'SELECT id FROM token_blocklist WHERE token = ? AND expires_at > datetime("now")',
|
||||
[tokenHash]
|
||||
);
|
||||
return !!row;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove expired tokens from the blocklist.
|
||||
* Called periodically to keep the table small.
|
||||
*/
|
||||
async function cleanupExpiredTokens() {
|
||||
try {
|
||||
const db = getDatabase();
|
||||
const result = await db.run('DELETE FROM token_blocklist WHERE expires_at < datetime("now")');
|
||||
if (result.changes > 0) {
|
||||
console.log(`✓ Cleaned up ${result.changes} expired blocked tokens`);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Token cleanup error:', err);
|
||||
}
|
||||
}
|
||||
|
||||
// Password strength validation
|
||||
function validatePassword(password) {
|
||||
const errors = [];
|
||||
if (!password || password.length < 8) {
|
||||
errors.push('Password must be at least 8 characters long');
|
||||
}
|
||||
if (!/[A-Z]/.test(password)) {
|
||||
errors.push('Password must contain at least one uppercase letter');
|
||||
}
|
||||
if (!/[a-z]/.test(password)) {
|
||||
errors.push('Password must contain at least one lowercase letter');
|
||||
}
|
||||
if (!/[0-9]/.test(password)) {
|
||||
errors.push('Password must contain at least one number');
|
||||
}
|
||||
if (!/[^A-Za-z0-9]/.test(password)) {
|
||||
errors.push('Password must contain at least one special character');
|
||||
}
|
||||
return errors;
|
||||
}
|
||||
|
||||
// URL validation for endpoints (SSRF prevention)
|
||||
async function validateEndpointUrl(url, type) {
|
||||
if (!url || typeof url !== 'string') {
|
||||
return 'URL is required';
|
||||
}
|
||||
|
||||
if (type === 'http') {
|
||||
// Must start with http:// or https://
|
||||
if (!/^https?:\/\//i.test(url)) {
|
||||
return 'HTTP endpoints must start with http:// or https://';
|
||||
}
|
||||
|
||||
try {
|
||||
const parsed = new URL(url);
|
||||
const hostname = parsed.hostname;
|
||||
|
||||
// Block dangerous schemes
|
||||
if (!['http:', 'https:'].includes(parsed.protocol)) {
|
||||
return 'Only http and https protocols are allowed';
|
||||
}
|
||||
|
||||
// Block internal/private IPs (synchronous pattern check)
|
||||
if (isPrivateHost(hostname)) {
|
||||
return 'URLs pointing to private/internal addresses are not allowed';
|
||||
}
|
||||
|
||||
// DNS resolution check (catches DNS-rebinding attacks)
|
||||
if (await resolvesToPrivateIP(hostname)) {
|
||||
return 'URL resolves to a private/internal address and is not allowed';
|
||||
}
|
||||
} catch {
|
||||
return 'Invalid URL format';
|
||||
}
|
||||
} else if (type === 'tcp') {
|
||||
// TCP format: host:port
|
||||
const parts = url.split(':');
|
||||
if (parts.length !== 2 || !parts[1] || isNaN(parseInt(parts[1]))) {
|
||||
return 'TCP endpoints must be in host:port format';
|
||||
}
|
||||
const host = parts[0];
|
||||
if (!/^[a-zA-Z0-9._-]+$/.test(host)) {
|
||||
return 'Invalid hostname for TCP endpoint';
|
||||
}
|
||||
if (isPrivateHost(host)) {
|
||||
return 'Addresses pointing to private/internal hosts are not allowed';
|
||||
}
|
||||
if (await resolvesToPrivateIP(host)) {
|
||||
return 'Address resolves to a private/internal host and is not allowed';
|
||||
}
|
||||
} else if (type === 'ping') {
|
||||
// Ping: only valid hostnames/IPs
|
||||
if (!/^[a-zA-Z0-9._-]+$/.test(url)) {
|
||||
return 'Invalid hostname for ping endpoint';
|
||||
}
|
||||
if (isPrivateHost(url)) {
|
||||
return 'Addresses pointing to private/internal hosts are not allowed';
|
||||
}
|
||||
if (await resolvesToPrivateIP(url)) {
|
||||
return 'Address resolves to a private/internal host and is not allowed';
|
||||
}
|
||||
}
|
||||
|
||||
return null; // valid
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether a raw IP string is private/reserved.
|
||||
*/
|
||||
function isPrivateIP(ip) {
|
||||
// Unwrap IPv4-mapped IPv6 (e.g. ::ffff:127.0.0.1 → 127.0.0.1)
|
||||
const v4Mapped = ip.match(/^::ffff:(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})$/i);
|
||||
const normalised = v4Mapped ? v4Mapped[1] : ip;
|
||||
|
||||
// Exact matches
|
||||
if (['localhost', '::1', '::'].includes(normalised.toLowerCase())) return true;
|
||||
|
||||
// IPv4 checks
|
||||
const v4Match = normalised.match(/^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/);
|
||||
if (v4Match) {
|
||||
const [, a, b, c, d] = v4Match.map(Number);
|
||||
if (a === 0) return true; // 0.0.0.0/8
|
||||
if (a === 10) return true; // 10.0.0.0/8
|
||||
if (a === 127) return true; // 127.0.0.0/8
|
||||
if (a === 169 && b === 254) return true; // 169.254.0.0/16 (link-local + cloud metadata)
|
||||
if (a === 172 && b >= 16 && b <= 31) return true; // 172.16.0.0/12
|
||||
if (a === 192 && b === 168) return true; // 192.168.0.0/16
|
||||
return false;
|
||||
}
|
||||
|
||||
// IPv6 private/reserved ranges
|
||||
const v6Ranges = [
|
||||
/^fc00:/i, // Unique local
|
||||
/^fd[0-9a-f]{2}:/i, // Unique local
|
||||
/^fe80:/i, // Link-local
|
||||
/^::1$/, // Loopback
|
||||
/^::$/, // Unspecified
|
||||
];
|
||||
if (v6Ranges.some(r => r.test(normalised))) return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronous hostname check catches obvious patterns.
|
||||
* Handles bracket notation, decimal/octal encoded IPs, localhost variants, etc.
|
||||
*/
|
||||
function isPrivateHost(hostname) {
|
||||
// Strip brackets from IPv6 URLs like [::1]
|
||||
let host = hostname.replace(/^\[|\]$/g, '').toLowerCase();
|
||||
|
||||
// Block localhost variants (including subdomains of localhost)
|
||||
if (host === 'localhost' || host.endsWith('.localhost')) return true;
|
||||
|
||||
// Detect and convert decimal-encoded IPs (e.g. 2130706433 = 127.0.0.1)
|
||||
if (/^\d+$/.test(host)) {
|
||||
const num = parseInt(host, 10);
|
||||
if (num >= 0 && num <= 0xFFFFFFFF) {
|
||||
const a = (num >>> 24) & 0xFF;
|
||||
const b = (num >>> 16) & 0xFF;
|
||||
const c = (num >>> 8) & 0xFF;
|
||||
const d = num & 0xFF;
|
||||
host = `${a}.${b}.${c}.${d}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Detect and convert octal-encoded octets (e.g. 0177.0.0.1 = 127.0.0.1)
|
||||
if (/^0[0-7]*\./.test(host)) {
|
||||
const parts = host.split('.');
|
||||
if (parts.length === 4 && parts.every(p => /^0?[0-7]*$/.test(p) || /^\d+$/.test(p))) {
|
||||
const decoded = parts.map(p => p.startsWith('0') && p.length > 1 ? parseInt(p, 8) : parseInt(p, 10));
|
||||
if (decoded.every(n => n >= 0 && n <= 255)) {
|
||||
host = decoded.join('.');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return isPrivateIP(host);
|
||||
}
|
||||
|
||||
/**
|
||||
* Async DNS-resolution check resolves a hostname and verifies none of the
|
||||
* resulting IPs are private. Call this in addition to isPrivateHost() to
|
||||
* prevent DNS-rebinding attacks where a public domain resolves to 127.0.0.1.
|
||||
*
|
||||
* Returns true if ANY resolved address is private.
|
||||
*/
|
||||
async function resolvesToPrivateIP(hostname) {
|
||||
// Skip if it already looks like a raw IP
|
||||
if (/^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/.test(hostname)) return isPrivateIP(hostname);
|
||||
if (/^[:\da-f]+$/i.test(hostname)) return isPrivateIP(hostname);
|
||||
|
||||
const ips = [];
|
||||
try { ips.push(...await dnsResolve4(hostname)); } catch (_) {}
|
||||
try { ips.push(...await dnsResolve6(hostname)); } catch (_) {}
|
||||
|
||||
// If DNS resolution fails entirely, block it (fail-closed)
|
||||
if (ips.length === 0) return true;
|
||||
|
||||
return ips.some(ip => isPrivateIP(ip));
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
authenticateToken,
|
||||
requireRole,
|
||||
revokeToken,
|
||||
isTokenRevoked,
|
||||
cleanupExpiredTokens,
|
||||
validatePassword,
|
||||
validateEndpointUrl,
|
||||
resolvesToPrivateIP
|
||||
};
|
||||
1
backend/src/models/database.js
Normal file
1
backend/src/models/database.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('../db');
|
||||
107
backend/src/routes/api.js
Normal file
107
backend/src/routes/api.js
Normal file
@@ -0,0 +1,107 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const rateLimit = require('express-rate-limit');
|
||||
const { authenticateToken, requireRole } = require('../middleware/auth');
|
||||
const endpointController = require('../controllers/endpointController');
|
||||
const incidentController = require('../controllers/incidentController');
|
||||
const userController = require('../controllers/userController');
|
||||
const categoryController = require('../controllers/categoryController');
|
||||
const { updateSettings } = require('../controllers/setupController');
|
||||
const apiKeyController = require('../controllers/apiKeyController');
|
||||
const notificationController = require('../controllers/notificationController');
|
||||
|
||||
// ── Rate limiters ──────────────────────────────────────────────────────────
|
||||
|
||||
// Public status page routes: 60 requests per minute per IP
|
||||
const publicLimiter = rateLimit({
|
||||
windowMs: 60 * 1000,
|
||||
max: 60,
|
||||
message: { error: 'Too many requests, please try again shortly' },
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false
|
||||
});
|
||||
|
||||
// Admin routes: 120 requests per minute per IP
|
||||
const adminLimiter = rateLimit({
|
||||
windowMs: 60 * 1000,
|
||||
max: 120,
|
||||
message: { error: 'Too many requests, please slow down' },
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false
|
||||
});
|
||||
|
||||
// Public routes
|
||||
|
||||
// Get all endpoints (public status page)
|
||||
router.get('/public/endpoints', publicLimiter, endpointController.getAllEndpoints);
|
||||
router.get('/public/endpoints/:id', publicLimiter, endpointController.getEndpointById);
|
||||
router.get('/public/endpoints/:id/uptime', publicLimiter, endpointController.getUptime);
|
||||
router.get('/public/endpoints/:id/history', publicLimiter, endpointController.getHistory);
|
||||
router.get('/public/endpoints/:id/response-times', publicLimiter, endpointController.getResponseTimes);
|
||||
|
||||
// Incidents (public)
|
||||
router.get('/public/incidents', publicLimiter, incidentController.getAllIncidents);
|
||||
router.get('/public/incidents/:id', publicLimiter, incidentController.getIncidentById);
|
||||
|
||||
// Maintenance windows (public)
|
||||
router.get('/public/maintenance', publicLimiter, incidentController.getAllMaintenance);
|
||||
|
||||
// Protected routes (require authentication + admin role)
|
||||
|
||||
// Endpoints - CRUD
|
||||
router.post('/admin/endpoints', authenticateToken, requireRole('admin'), adminLimiter, endpointController.createEndpoint);
|
||||
router.put('/admin/endpoints/reorder', authenticateToken, requireRole('admin'), adminLimiter, endpointController.reorderEndpoints);
|
||||
router.put('/admin/endpoints/:id', authenticateToken, requireRole('admin'), adminLimiter, endpointController.updateEndpoint);
|
||||
router.delete('/admin/endpoints/:id', authenticateToken, requireRole('admin'), adminLimiter, endpointController.deleteEndpoint);
|
||||
|
||||
// Incidents - CRUD
|
||||
router.post('/admin/incidents', authenticateToken, requireRole('admin'), adminLimiter, incidentController.createIncident);
|
||||
router.put('/admin/incidents/:id', authenticateToken, requireRole('admin'), adminLimiter, incidentController.updateIncident);
|
||||
router.delete('/admin/incidents/:id', authenticateToken, requireRole('admin'), adminLimiter, incidentController.deleteIncident);
|
||||
router.patch('/admin/incidents/:id/resolve', authenticateToken, requireRole('admin'), adminLimiter, incidentController.resolveIncident);
|
||||
router.post('/admin/incidents/:id/updates', authenticateToken, requireRole('admin'), adminLimiter, incidentController.addIncidentUpdate);
|
||||
router.post('/admin/incidents/:id/reopen', authenticateToken, requireRole('admin'), adminLimiter, incidentController.reopenIncident);
|
||||
router.put('/admin/incidents/:id/post-mortem', authenticateToken, requireRole('admin'), adminLimiter, incidentController.setPostMortem);
|
||||
|
||||
// Maintenance windows - CRUD
|
||||
router.post('/admin/maintenance', authenticateToken, requireRole('admin'), adminLimiter, incidentController.createMaintenance);
|
||||
router.put('/admin/maintenance/:id', authenticateToken, requireRole('admin'), adminLimiter, incidentController.updateMaintenance);
|
||||
router.delete('/admin/maintenance/:id', authenticateToken, requireRole('admin'), adminLimiter, incidentController.deleteMaintenance);
|
||||
|
||||
// Settings
|
||||
router.post('/admin/settings', authenticateToken, requireRole('admin'), adminLimiter, updateSettings);
|
||||
router.post('/admin/settings/test-email', authenticateToken, requireRole('admin'), adminLimiter, notificationController.sendSmtpTestEmail);
|
||||
|
||||
// Notification preferences (all authenticated users)
|
||||
router.get('/notifications/preferences', authenticateToken, adminLimiter, notificationController.getMyNotificationPreferences);
|
||||
router.put('/notifications/preferences', authenticateToken, adminLimiter, notificationController.updateMyNotificationPreferences);
|
||||
|
||||
// Notification admin operations
|
||||
router.get('/admin/notifications/defaults', authenticateToken, requireRole('admin'), adminLimiter, notificationController.getNotificationDefaultsController);
|
||||
router.put('/admin/notifications/defaults', authenticateToken, requireRole('admin'), adminLimiter, notificationController.updateNotificationDefaultsController);
|
||||
router.get('/admin/notifications/health', authenticateToken, requireRole('admin'), adminLimiter, notificationController.getSmtpHealthController);
|
||||
router.get('/admin/notifications/deliveries', authenticateToken, requireRole('admin'), adminLimiter, notificationController.getDeliveryLogs);
|
||||
router.get('/admin/notifications/extra-recipients', authenticateToken, requireRole('admin'), adminLimiter, notificationController.listExtraRecipients);
|
||||
router.post('/admin/notifications/extra-recipients', authenticateToken, requireRole('admin'), adminLimiter, notificationController.createExtraRecipient);
|
||||
router.delete('/admin/notifications/extra-recipients/:id', authenticateToken, requireRole('admin'), adminLimiter, notificationController.deleteExtraRecipient);
|
||||
|
||||
// API Keys
|
||||
router.get('/admin/api-keys', authenticateToken, requireRole('admin'), adminLimiter, apiKeyController.listApiKeys);
|
||||
router.post('/admin/api-keys', authenticateToken, requireRole('admin'), adminLimiter, apiKeyController.createApiKey);
|
||||
router.delete('/admin/api-keys/:id', authenticateToken, requireRole('admin'), adminLimiter, apiKeyController.revokeApiKey);
|
||||
router.delete('/admin/api-keys/:id/hard', authenticateToken, requireRole('admin'), adminLimiter, apiKeyController.deleteApiKey);
|
||||
|
||||
// Users - CRUD (admin only)
|
||||
router.get('/admin/users', authenticateToken, requireRole('admin'), adminLimiter, userController.getAllUsers);
|
||||
router.post('/admin/users', authenticateToken, requireRole('admin'), adminLimiter, userController.createUser);
|
||||
router.delete('/admin/users/:id', authenticateToken, requireRole('admin'), adminLimiter, userController.deleteUser);
|
||||
|
||||
// Categories - CRUD
|
||||
router.get('/admin/categories', authenticateToken, requireRole('admin'), adminLimiter, categoryController.getAllCategories);
|
||||
router.post('/admin/categories', authenticateToken, requireRole('admin'), adminLimiter, categoryController.createCategory);
|
||||
router.put('/admin/categories/reorder', authenticateToken, requireRole('admin'), adminLimiter, categoryController.reorderCategories);
|
||||
router.get('/admin/categories/:id', authenticateToken, requireRole('admin'), adminLimiter, categoryController.getCategoryById);
|
||||
router.put('/admin/categories/:id', authenticateToken, requireRole('admin'), adminLimiter, categoryController.updateCategory);
|
||||
router.delete('/admin/categories/:id', authenticateToken, requireRole('admin'), adminLimiter, categoryController.deleteCategory);
|
||||
|
||||
module.exports = router;
|
||||
49
backend/src/routes/auth.js
Normal file
49
backend/src/routes/auth.js
Normal file
@@ -0,0 +1,49 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const rateLimit = require('express-rate-limit');
|
||||
const { authenticateToken, requireRole, revokeToken } = require('../middleware/auth');
|
||||
const { login } = require('../controllers/authController');
|
||||
const { getSetupStatus, completeSetup, getSiteSettings, getAdminSettings } = require('../controllers/setupController');
|
||||
const { getUserProfile, updateUserProfile, changePassword: changeUserPassword } = require('../controllers/profileController');
|
||||
|
||||
// Rate limiters
|
||||
const loginLimiter = rateLimit({
|
||||
windowMs: 15 * 60 * 1000, // 15 minutes
|
||||
max: 10, // 10 attempts per window
|
||||
message: { error: 'Too many login attempts, please try again after 15 minutes' },
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false
|
||||
});
|
||||
|
||||
const setupLimiter = rateLimit({
|
||||
windowMs: 15 * 60 * 1000,
|
||||
max: 5,
|
||||
message: { error: 'Too many setup attempts, please try again later' },
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false
|
||||
});
|
||||
|
||||
// Public routes
|
||||
router.post('/login', loginLimiter, login);
|
||||
router.get('/setup', getSetupStatus);
|
||||
router.post('/setup', setupLimiter, completeSetup);
|
||||
router.get('/settings', getSiteSettings);
|
||||
|
||||
// Protected routes
|
||||
router.get('/profile', authenticateToken, getUserProfile);
|
||||
router.get('/settings/admin', authenticateToken, requireRole('admin'), getAdminSettings);
|
||||
router.put('/profile', authenticateToken, updateUserProfile);
|
||||
router.post('/change-password', authenticateToken, changeUserPassword);
|
||||
|
||||
// Logout - revoke the current token
|
||||
router.post('/logout', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
await revokeToken(req.token);
|
||||
res.json({ success: true, message: 'Logged out successfully' });
|
||||
} catch (err) {
|
||||
console.error('Logout error:', err);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
38
backend/src/routes/v1.js
Normal file
38
backend/src/routes/v1.js
Normal file
@@ -0,0 +1,38 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const rateLimit = require('express-rate-limit');
|
||||
const { optionalApiKey } = require('../middleware/apiKeyAuth');
|
||||
const v1 = require('../controllers/v1Controller');
|
||||
|
||||
// Public API rate limiter: 60 requests per minute per IP
|
||||
const v1Limiter = rateLimit({
|
||||
windowMs: 60 * 1000,
|
||||
max: 60,
|
||||
message: { error: 'Too many requests, please try again shortly' },
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false
|
||||
});
|
||||
|
||||
// Apply rate limiter and optional API key auth to all v1 routes.
|
||||
// Routes are public by default; a valid key enriches responses.
|
||||
router.use(v1Limiter);
|
||||
router.use(optionalApiKey);
|
||||
|
||||
// Full status dump primary integration endpoint
|
||||
router.get('/status.json', v1.getStatusJson);
|
||||
|
||||
// Lightweight summary (page name + overall indicator)
|
||||
router.get('/summary', v1.getSummary);
|
||||
|
||||
// Components (endpoints)
|
||||
router.get('/components', v1.getComponents);
|
||||
router.get('/components/:id', v1.getComponentById);
|
||||
|
||||
// Incidents
|
||||
router.get('/incidents', v1.getIncidents);
|
||||
router.get('/incidents/:id', v1.getIncidentById);
|
||||
|
||||
// Scheduled maintenances
|
||||
router.get('/scheduled-maintenances', v1.getScheduledMaintenances);
|
||||
|
||||
module.exports = router;
|
||||
136
backend/src/server.js
Normal file
136
backend/src/server.js
Normal file
@@ -0,0 +1,136 @@
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const helmet = require('helmet');
|
||||
const http = require('http');
|
||||
const { Server } = require('socket.io');
|
||||
const crypto = require('crypto');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
require('dotenv').config();
|
||||
|
||||
if (!process.env.JWT_SECRET) {
|
||||
process.env.JWT_SECRET = crypto.randomBytes(64).toString('hex');
|
||||
console.log('Generated random JWT_SECRET (one-time)');
|
||||
}
|
||||
if (!process.env.ENCRYPTION_KEY) {
|
||||
process.env.ENCRYPTION_KEY = crypto.randomBytes(32).toString('hex');
|
||||
}
|
||||
|
||||
const { initializeDatabase } = require('./models/database');
|
||||
const { scheduleAllEndpoints, setSocket } = require('./services/monitoringService');
|
||||
const { initializeNotificationWorker } = require('./services/notificationService');
|
||||
const { cleanupExpiredTokens } = require('./middleware/auth');
|
||||
const apiRoutes = require('./routes/api');
|
||||
const authRoutes = require('./routes/auth');
|
||||
const v1Routes = require('./routes/v1');
|
||||
|
||||
function createApp() {
|
||||
const app = express();
|
||||
|
||||
const trustProxy = process.env.TRUST_PROXY !== 'false';
|
||||
if (trustProxy) app.set('trust proxy', 1);
|
||||
|
||||
app.use(helmet());
|
||||
const allowedOrigin = process.env.FRONTEND_URL || 'http://localhost:3000';
|
||||
app.use(cors({
|
||||
origin: allowedOrigin,
|
||||
methods: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE'],
|
||||
credentials: true,
|
||||
}));
|
||||
app.use(express.json({ limit: '1mb' }));
|
||||
|
||||
const frontendBuild = path.join(__dirname, '../../frontend/build');
|
||||
if (fs.existsSync(frontendBuild)) {
|
||||
app.use(express.static(frontendBuild));
|
||||
}
|
||||
|
||||
app.use('/api/auth', authRoutes);
|
||||
app.use('/api/v1', v1Routes);
|
||||
app.use('/api', apiRoutes);
|
||||
|
||||
app.get('/health', (req, res) => {
|
||||
res.json({ status: 'ok' });
|
||||
});
|
||||
|
||||
if (fs.existsSync(frontendBuild)) {
|
||||
app.get('/{*splat}', (req, res) => {
|
||||
res.sendFile(path.join(frontendBuild, 'index.html'));
|
||||
});
|
||||
}
|
||||
|
||||
return app;
|
||||
}
|
||||
|
||||
function createRealtimeServer(app) {
|
||||
const server = http.createServer(app);
|
||||
const io = new Server(server, {
|
||||
cors: {
|
||||
origin: process.env.FRONTEND_URL || 'http://localhost:3000',
|
||||
methods: ['GET', 'POST'],
|
||||
},
|
||||
});
|
||||
|
||||
io.on('connection', (socket) => {
|
||||
console.log('Client connected:', socket.id);
|
||||
socket.on('disconnect', () => {
|
||||
console.log('Client disconnected:', socket.id);
|
||||
});
|
||||
});
|
||||
|
||||
return { server, io };
|
||||
}
|
||||
|
||||
const app = createApp();
|
||||
const { server, io } = createRealtimeServer(app);
|
||||
|
||||
// Startup
|
||||
async function start() {
|
||||
try {
|
||||
// Initialize database
|
||||
await initializeDatabase();
|
||||
|
||||
// Set up monitoring service
|
||||
setSocket(io);
|
||||
initializeNotificationWorker();
|
||||
await scheduleAllEndpoints();
|
||||
|
||||
// Clean up expired blocked tokens every hour
|
||||
const cleanupTimer = setInterval(cleanupExpiredTokens, 60 * 60 * 1000);
|
||||
if (typeof cleanupTimer.unref === 'function') {
|
||||
cleanupTimer.unref();
|
||||
}
|
||||
|
||||
// Start server
|
||||
const PORT = process.env.PORT || 5000;
|
||||
await new Promise((resolve) => {
|
||||
server.listen(PORT, resolve);
|
||||
});
|
||||
|
||||
const resolvedPort = server.address() && typeof server.address() === 'object'
|
||||
? server.address().port
|
||||
: PORT;
|
||||
|
||||
console.log(`\n✓ Arcane Status running on http://localhost:${resolvedPort}`);
|
||||
if (require('fs').existsSync(path.join(__dirname, '../../frontend/build'))) {
|
||||
console.log('✓ Serving frontend from build (production mode)');
|
||||
} else {
|
||||
console.log("✓ No frontend build found, run 'npm run build' for production");
|
||||
console.log("✓ For development, run 'npm run dev' from the project root");
|
||||
}
|
||||
console.log(`✓ First time? Visit http://localhost:${resolvedPort} to complete setup\n`);
|
||||
|
||||
return { server, io };
|
||||
} catch (error) {
|
||||
console.error('Failed to start server:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
start().catch(() => {
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { app, server, io, start, createApp, createRealtimeServer };
|
||||
291
backend/src/services/incident/autoIncidentService.js
Normal file
291
backend/src/services/incident/autoIncidentService.js
Normal file
@@ -0,0 +1,291 @@
|
||||
const { getDatabase } = require('../../models/database');
|
||||
const { queueIncidentNotification } = require('../notificationService');
|
||||
|
||||
let io = null;
|
||||
|
||||
const AUTO_INCIDENT_THRESHOLD_MIN = 5;
|
||||
const AUTO_ESCALATE_THRESHOLD_MIN = 30;
|
||||
|
||||
function setIncidentSocket(socketInstance) {
|
||||
io = socketInstance;
|
||||
}
|
||||
|
||||
function parseSQLiteUTC(str) {
|
||||
if (!str) return new Date();
|
||||
return new Date(str.replace(' ', 'T') + 'Z');
|
||||
}
|
||||
|
||||
async function getConsecutiveDownMinutes(endpointId) {
|
||||
const db = getDatabase();
|
||||
|
||||
const lastUp = await db.get(
|
||||
`SELECT checked_at FROM check_results
|
||||
WHERE endpoint_id = ? AND status = 'up'
|
||||
ORDER BY checked_at DESC LIMIT 1`,
|
||||
[endpointId]
|
||||
);
|
||||
|
||||
const sinceTimestamp = lastUp ? lastUp.checked_at : '1970-01-01';
|
||||
|
||||
const firstDownAfterUp = await db.get(
|
||||
`SELECT checked_at FROM check_results
|
||||
WHERE endpoint_id = ? AND status = 'down'
|
||||
AND checked_at > ?
|
||||
ORDER BY checked_at ASC LIMIT 1`,
|
||||
[endpointId, sinceTimestamp]
|
||||
);
|
||||
|
||||
if (!firstDownAfterUp) return 0;
|
||||
|
||||
const outageStart = parseSQLiteUTC(firstDownAfterUp.checked_at);
|
||||
return (Date.now() - outageStart.getTime()) / 60000;
|
||||
}
|
||||
|
||||
async function getConsecutiveUpCount(endpointId) {
|
||||
const db = getDatabase();
|
||||
const recent = await db.all(
|
||||
`SELECT status FROM check_results
|
||||
WHERE endpoint_id = ?
|
||||
ORDER BY checked_at DESC LIMIT 15`,
|
||||
[endpointId]
|
||||
);
|
||||
|
||||
let count = 0;
|
||||
for (const row of recent) {
|
||||
if (row.status === 'up') count++;
|
||||
else break;
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
async function isInMaintenanceWindow(endpointId) {
|
||||
const db = getDatabase();
|
||||
const now = new Date().toISOString();
|
||||
const window = await db.get(
|
||||
`SELECT id FROM maintenance_windows
|
||||
WHERE (endpoint_id = ? OR endpoint_id IS NULL)
|
||||
AND start_time <= ?
|
||||
AND end_time >= ?`,
|
||||
[endpointId, now, now]
|
||||
);
|
||||
return !!window;
|
||||
}
|
||||
|
||||
async function handleAutoIncident(endpoint, status) {
|
||||
const db = getDatabase();
|
||||
console.log(`[AutoIncident] ${endpoint.name} -> ${status}`);
|
||||
|
||||
const openIncident = await db.get(
|
||||
`SELECT * FROM incidents
|
||||
WHERE source_endpoint_id = ?
|
||||
AND auto_created = 1
|
||||
AND resolved_at IS NULL
|
||||
ORDER BY created_at DESC LIMIT 1`,
|
||||
[endpoint.id]
|
||||
);
|
||||
|
||||
if (status === 'up') {
|
||||
if (!openIncident) return;
|
||||
|
||||
if (openIncident.admin_managed) {
|
||||
console.log(`[AutoIncident] Incident #${openIncident.id} is admin-managed, skipping auto-resolve.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const consecutiveUps = await getConsecutiveUpCount(endpoint.id);
|
||||
console.log(`[AutoIncident] ${endpoint.name} up streak: ${consecutiveUps}/10`);
|
||||
|
||||
if (consecutiveUps < 10) {
|
||||
if (openIncident.severity === 'down') {
|
||||
await db.run(
|
||||
`UPDATE incidents SET severity = 'degraded', updated_at = CURRENT_TIMESTAMP WHERE id = ?`,
|
||||
[openIncident.id]
|
||||
);
|
||||
await db.run(
|
||||
`INSERT INTO incident_updates (incident_id, message, status_label, created_by) VALUES (?, ?, 'monitoring', 'system')`,
|
||||
[
|
||||
openIncident.id,
|
||||
`**${endpoint.name}** appears to be partially recovering. Severity has been reduced to degraded while we continue monitoring for stability.`
|
||||
]
|
||||
);
|
||||
const updatedIncident = await db.get('SELECT * FROM incidents WHERE id = ?', [openIncident.id]);
|
||||
if (io) io.emit('incidentUpdated', updatedIncident);
|
||||
}
|
||||
|
||||
if (openIncident.status !== 'monitoring') {
|
||||
await db.run(
|
||||
`UPDATE incidents SET status = 'monitoring', updated_at = CURRENT_TIMESTAMP WHERE id = ?`,
|
||||
[openIncident.id]
|
||||
);
|
||||
await db.run(
|
||||
`INSERT INTO incident_updates (incident_id, message, status_label, created_by) VALUES (?, ?, 'monitoring', 'system')`,
|
||||
[
|
||||
openIncident.id,
|
||||
`**${endpoint.name}** is responding again. We are monitoring the service for stability before closing this incident (${consecutiveUps}/10 checks passed).`
|
||||
]
|
||||
);
|
||||
const monitoringIncident = await db.get('SELECT * FROM incidents WHERE id = ?', [openIncident.id]);
|
||||
console.log(`[AutoIncident] Incident #${openIncident.id} moved to monitoring (${consecutiveUps}/10 ups)`);
|
||||
if (io) io.emit('incidentUpdated', monitoringIncident);
|
||||
await queueIncidentNotification('incident_updated', openIncident.id, `${endpoint.name} entered monitoring state.`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
await db.run(
|
||||
`UPDATE incidents
|
||||
SET resolved_at = CURRENT_TIMESTAMP,
|
||||
status = 'resolved',
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?`,
|
||||
[openIncident.id]
|
||||
);
|
||||
await db.run(
|
||||
`INSERT INTO incident_updates (incident_id, message, status_label, created_by) VALUES (?, ?, 'resolved', 'system')`,
|
||||
[
|
||||
openIncident.id,
|
||||
`The issue with **${endpoint.name}** has been resolved. The service has passed 10 consecutive health checks and is operating normally.`
|
||||
]
|
||||
);
|
||||
const resolvedIncident = await db.get('SELECT * FROM incidents WHERE id = ?', [openIncident.id]);
|
||||
console.log(`Auto-resolved incident #${openIncident.id} for ${endpoint.name}`);
|
||||
if (io) io.emit('incidentResolved', resolvedIncident);
|
||||
await queueIncidentNotification('incident_resolved', openIncident.id, `${endpoint.name} has recovered and the incident is resolved.`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (await isInMaintenanceWindow(endpoint.id)) {
|
||||
console.log(`[AutoIncident] ${endpoint.name} is in a maintenance window, skipping.`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (openIncident && openIncident.status === 'monitoring') {
|
||||
await db.run(
|
||||
`UPDATE incidents
|
||||
SET status = 'investigating',
|
||||
severity = 'degraded',
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?`,
|
||||
[openIncident.id]
|
||||
);
|
||||
await db.run(
|
||||
`INSERT INTO incident_updates (incident_id, message, status_label, created_by) VALUES (?, ?, 'investigating', 'system')`,
|
||||
[
|
||||
openIncident.id,
|
||||
`**${endpoint.name}** became unavailable again while under stability monitoring. The up-check counter has been reset. Our team continues to investigate.`
|
||||
]
|
||||
);
|
||||
const revertedIncident = await db.get('SELECT * FROM incidents WHERE id = ?', [openIncident.id]);
|
||||
console.log(`Incident #${openIncident.id} reverted from monitoring to investigating for ${endpoint.name}`);
|
||||
if (io) io.emit('incidentUpdated', revertedIncident);
|
||||
await queueIncidentNotification('incident_updated', openIncident.id, `${endpoint.name} became unavailable again.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const minutesDown = await getConsecutiveDownMinutes(endpoint.id);
|
||||
console.log(`[AutoIncident] ${endpoint.name} down ~${minutesDown.toFixed(1)}m | openIncident: ${openIncident ? '#' + openIncident.id : 'none'}`);
|
||||
|
||||
if (openIncident && minutesDown >= AUTO_ESCALATE_THRESHOLD_MIN && openIncident.severity !== 'down') {
|
||||
await db.run(
|
||||
`UPDATE incidents SET severity = 'down', updated_at = CURRENT_TIMESTAMP WHERE id = ?`,
|
||||
[openIncident.id]
|
||||
);
|
||||
await db.run(
|
||||
`INSERT INTO incident_updates (incident_id, message, status_label, created_by) VALUES (?, ?, 'identified', 'system')`,
|
||||
[
|
||||
openIncident.id,
|
||||
`**${endpoint.name}** has been unavailable for over 30 minutes. Severity escalated to critical. Our team is actively investigating.`
|
||||
]
|
||||
);
|
||||
const escalatedIncident = await db.get('SELECT * FROM incidents WHERE id = ?', [openIncident.id]);
|
||||
console.log(`Auto-escalated incident #${openIncident.id} for ${endpoint.name} to 'down'`);
|
||||
if (io) io.emit('incidentUpdated', escalatedIncident);
|
||||
await queueIncidentNotification('incident_updated', openIncident.id, `${endpoint.name} outage severity escalated.`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (openIncident) return;
|
||||
|
||||
const openIncidentByTag = await db.get(
|
||||
`SELECT i.id FROM incidents i
|
||||
JOIN incident_endpoints ie ON ie.incident_id = i.id
|
||||
WHERE ie.endpoint_id = ?
|
||||
AND i.resolved_at IS NULL
|
||||
ORDER BY i.created_at DESC LIMIT 1`,
|
||||
[endpoint.id]
|
||||
);
|
||||
if (openIncidentByTag) {
|
||||
console.log(`[AutoIncident] Open incident #${openIncidentByTag.id} already covers ${endpoint.name} (via tag), skipping auto-create.`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (minutesDown < AUTO_INCIDENT_THRESHOLD_MIN) {
|
||||
console.log(`[AutoIncident] ${endpoint.name} not yet at threshold (${minutesDown.toFixed(1)}m < ${AUTO_INCIDENT_THRESHOLD_MIN}m), waiting.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const REOPEN_COOLDOWN_MIN = 30;
|
||||
const recentlyResolved = await db.get(
|
||||
`SELECT * FROM incidents
|
||||
WHERE source_endpoint_id = ?
|
||||
AND auto_created = 1
|
||||
AND resolved_at IS NOT NULL
|
||||
AND resolved_at >= datetime('now', '-${REOPEN_COOLDOWN_MIN} minutes')
|
||||
ORDER BY resolved_at DESC LIMIT 1`,
|
||||
[endpoint.id]
|
||||
);
|
||||
|
||||
if (recentlyResolved) {
|
||||
await db.run(
|
||||
`UPDATE incidents
|
||||
SET resolved_at = NULL,
|
||||
status = 'investigating',
|
||||
severity = 'degraded',
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?`,
|
||||
[recentlyResolved.id]
|
||||
);
|
||||
await db.run(
|
||||
`INSERT INTO incident_updates (incident_id, message, status_label, created_by) VALUES (?, ?, 'investigating', 'system')`,
|
||||
[
|
||||
recentlyResolved.id,
|
||||
`This incident has been automatically re-opened. **${endpoint.name}** became unavailable again within ${REOPEN_COOLDOWN_MIN} minutes of the previous resolution.`
|
||||
]
|
||||
);
|
||||
const reopenedIncident = await db.get('SELECT * FROM incidents WHERE id = ?', [recentlyResolved.id]);
|
||||
console.log(`Re-opened incident #${recentlyResolved.id} for ${endpoint.name} (flap within ${REOPEN_COOLDOWN_MIN}m)`);
|
||||
if (io) io.emit('incidentCreated', { ...reopenedIncident, endpoints: [endpoint] });
|
||||
await queueIncidentNotification('incident_updated', recentlyResolved.id, `${endpoint.name} outage re-opened after recent recovery.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const title = `${endpoint.name} is experiencing issues`;
|
||||
const description = `Our systems have detected an issue with **${endpoint.name}**. Our team has been notified and updates will be provided shortly.`;
|
||||
|
||||
const result = await db.run(
|
||||
`INSERT INTO incidents
|
||||
(title, description, severity, status, source, auto_created, source_endpoint_id)
|
||||
VALUES (?, ?, 'degraded', 'investigating', 'auto', 1, ?)`,
|
||||
[title, description, endpoint.id]
|
||||
);
|
||||
const incidentId = result.lastID;
|
||||
|
||||
await db.run(
|
||||
'INSERT INTO incident_endpoints (incident_id, endpoint_id) VALUES (?, ?)',
|
||||
[incidentId, endpoint.id]
|
||||
);
|
||||
await db.run(
|
||||
`INSERT INTO incident_updates (incident_id, message, status_label, created_by) VALUES (?, ?, 'investigating', 'system')`,
|
||||
[incidentId, description]
|
||||
);
|
||||
|
||||
const newIncident = await db.get('SELECT * FROM incidents WHERE id = ?', [incidentId]);
|
||||
console.log(`Auto-created incident #${incidentId} for ${endpoint.name} (down ${Math.round(minutesDown)}m)`);
|
||||
if (io) io.emit('incidentCreated', { ...newIncident, endpoints: [endpoint] });
|
||||
await queueIncidentNotification('incident_created', incidentId, description);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
setIncidentSocket,
|
||||
handleAutoIncident,
|
||||
};
|
||||
24
backend/src/services/monitoring/checkers/httpChecker.js
Normal file
24
backend/src/services/monitoring/checkers/httpChecker.js
Normal file
@@ -0,0 +1,24 @@
|
||||
const axios = require('axios');
|
||||
const { checkPing } = require('./pingChecker');
|
||||
|
||||
function extractHostname(url) {
|
||||
return url.replace(/^https?:\/\//i, '').replace(/\/.*$/, '').replace(/:\d+$/, '').replace(/\/$/, '');
|
||||
}
|
||||
|
||||
async function checkHTTP(endpoint, timeoutSeconds) {
|
||||
const startTime = Date.now();
|
||||
const response = await axios.head(endpoint.url, { timeout: timeoutSeconds * 1000 });
|
||||
|
||||
const status = response.status >= 200 && response.status < 400 ? 'up' : 'down';
|
||||
const responseTime = Date.now() - startTime;
|
||||
|
||||
let pingResponseTime = null;
|
||||
if (endpoint.ping_enabled) {
|
||||
const hostname = extractHostname(endpoint.url);
|
||||
pingResponseTime = await checkPing(hostname, timeoutSeconds);
|
||||
}
|
||||
|
||||
return { status, responseTime, pingResponseTime };
|
||||
}
|
||||
|
||||
module.exports = { checkHTTP };
|
||||
30
backend/src/services/monitoring/checkers/pingChecker.js
Normal file
30
backend/src/services/monitoring/checkers/pingChecker.js
Normal file
@@ -0,0 +1,30 @@
|
||||
const { execFile } = require('child_process');
|
||||
|
||||
function checkPing(host, timeout) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const validHost = /^[a-zA-Z0-9._-]+$/.test(host);
|
||||
if (!validHost) {
|
||||
return reject(new Error('Invalid hostname'));
|
||||
}
|
||||
|
||||
const args = process.platform === 'win32'
|
||||
? ['-n', '1', '-w', String(timeout * 1000), host]
|
||||
: ['-4', '-c', '1', '-W', String(timeout), host];
|
||||
|
||||
execFile('ping', args, (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
const detail = (stderr || stdout || error.message || '').trim();
|
||||
reject(new Error(detail || error.message));
|
||||
} else {
|
||||
let avg = null;
|
||||
const linuxMatch = stdout.match(/rtt[^=]+=\s*[\d.]+\/([\d.]+)\//i);
|
||||
const winMatch = stdout.match(/Average\s*=\s*([\d.]+)ms/i);
|
||||
if (linuxMatch) avg = Math.round(parseFloat(linuxMatch[1]));
|
||||
else if (winMatch) avg = Math.round(parseFloat(winMatch[1]));
|
||||
resolve(avg);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { checkPing };
|
||||
21
backend/src/services/monitoring/checkers/tcpChecker.js
Normal file
21
backend/src/services/monitoring/checkers/tcpChecker.js
Normal file
@@ -0,0 +1,21 @@
|
||||
const net = require('net');
|
||||
|
||||
function checkTCP(host, port, timeout) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const socket = new net.Socket();
|
||||
|
||||
socket.setTimeout(timeout * 1000);
|
||||
socket.on('error', reject);
|
||||
socket.on('timeout', () => {
|
||||
socket.destroy();
|
||||
reject(new Error('TCP connection timeout'));
|
||||
});
|
||||
|
||||
socket.connect(port, host, () => {
|
||||
socket.destroy();
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { checkTCP };
|
||||
138
backend/src/services/monitoringService.js
Normal file
138
backend/src/services/monitoringService.js
Normal file
@@ -0,0 +1,138 @@
|
||||
const cron = require('node-cron');
|
||||
const { getDatabase } = require('../models/database');
|
||||
const { checkHTTP } = require('./monitoring/checkers/httpChecker');
|
||||
const { checkTCP } = require('./monitoring/checkers/tcpChecker');
|
||||
const { checkPing } = require('./monitoring/checkers/pingChecker');
|
||||
const { handleAutoIncident, setIncidentSocket } = require('./incident/autoIncidentService');
|
||||
const { processEndpointTransition } = require('./notificationService');
|
||||
|
||||
let io = null;
|
||||
const scheduledTasks = new Map();
|
||||
|
||||
function setSocket(socketInstance) {
|
||||
io = socketInstance;
|
||||
setIncidentSocket(socketInstance);
|
||||
}
|
||||
|
||||
async function performCheck(endpoint) {
|
||||
const startTime = Date.now();
|
||||
let status = 'down';
|
||||
let responseTime = 0;
|
||||
let errorMessage = null;
|
||||
let pingResponseTime = null;
|
||||
|
||||
const db = getDatabase();
|
||||
const stillExists = await db.get('SELECT id FROM endpoints WHERE id = ?', [endpoint.id]);
|
||||
if (!stillExists) {
|
||||
console.warn(`[Monitor] Skipping check for deleted endpoint "${endpoint.name}" (id=${endpoint.id}) - stopping task`);
|
||||
stopScheduling(endpoint.id);
|
||||
return { status: 'unknown', responseTime: 0, errorMessage: 'endpoint deleted' };
|
||||
}
|
||||
|
||||
console.log(`[Monitor] Checking "${endpoint.name}" (id=${endpoint.id}, type=${endpoint.type}, url=${endpoint.url})`);
|
||||
|
||||
try {
|
||||
if (endpoint.type === 'http') {
|
||||
const httpResult = await checkHTTP(endpoint, endpoint.timeout);
|
||||
status = httpResult.status;
|
||||
responseTime = httpResult.responseTime;
|
||||
pingResponseTime = httpResult.pingResponseTime;
|
||||
} else if (endpoint.type === 'tcp') {
|
||||
const [host, port] = endpoint.url.split(':');
|
||||
await checkTCP(host, parseInt(port, 10), endpoint.timeout);
|
||||
status = 'up';
|
||||
responseTime = Date.now() - startTime;
|
||||
} else if (endpoint.type === 'ping') {
|
||||
const rtt = await checkPing(endpoint.url, endpoint.timeout);
|
||||
status = 'up';
|
||||
responseTime = rtt !== null ? rtt : (Date.now() - startTime);
|
||||
}
|
||||
} catch (error) {
|
||||
status = 'down';
|
||||
responseTime = Date.now() - startTime;
|
||||
errorMessage = error.message;
|
||||
console.warn(`[Monitor] ${endpoint.type} check FAILED for "${endpoint.name}" (${endpoint.url}): ${error.message}`);
|
||||
}
|
||||
|
||||
console.log(`[Monitor] ${endpoint.type} result for "${endpoint.name}": ${status} (${responseTime}ms)${errorMessage ? ' | ' + errorMessage : ''}`);
|
||||
|
||||
await db.run(
|
||||
'INSERT INTO check_results (endpoint_id, status, response_time, error_message, ping_response_time) VALUES (?, ?, ?, ?, ?)',
|
||||
[endpoint.id, status, responseTime, errorMessage, pingResponseTime]
|
||||
);
|
||||
|
||||
if (io) {
|
||||
io.emit('checkResult', {
|
||||
endpoint_id: endpoint.id,
|
||||
status,
|
||||
responseTime,
|
||||
checked_at: new Date(),
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
await handleAutoIncident(endpoint, status);
|
||||
} catch (err) {
|
||||
console.error(`[AutoIncident] Error processing ${endpoint.name}:`, err.message, err.stack);
|
||||
}
|
||||
|
||||
try {
|
||||
await processEndpointTransition(endpoint, status, responseTime, new Date().toISOString());
|
||||
} catch (err) {
|
||||
console.error(`[Notification] Error processing endpoint notification for ${endpoint.name}:`, err.message);
|
||||
}
|
||||
|
||||
return { status, responseTime, errorMessage };
|
||||
}
|
||||
|
||||
async function scheduleEndpoint(endpoint) {
|
||||
if (scheduledTasks.has(endpoint.id)) {
|
||||
const task = scheduledTasks.get(endpoint.id);
|
||||
task.stop();
|
||||
}
|
||||
|
||||
if (!endpoint.active) return;
|
||||
|
||||
const minInterval = 30;
|
||||
const interval = Math.max(minInterval, parseInt(endpoint.interval, 10) || 300);
|
||||
const cronExpression = `*/${interval} * * * * *`;
|
||||
|
||||
const task = cron.schedule(
|
||||
cronExpression,
|
||||
() => {
|
||||
performCheck(endpoint).catch((err) => console.error(`Check failed for ${endpoint.name}:`, err));
|
||||
},
|
||||
{ runOnInit: true }
|
||||
);
|
||||
|
||||
scheduledTasks.set(endpoint.id, task);
|
||||
console.log(`Scheduled ${endpoint.name} every ${interval}s`);
|
||||
}
|
||||
|
||||
async function scheduleAllEndpoints() {
|
||||
const db = getDatabase();
|
||||
const endpoints = await db.all('SELECT * FROM endpoints WHERE active = 1');
|
||||
|
||||
for (const endpoint of endpoints) {
|
||||
await scheduleEndpoint(endpoint);
|
||||
}
|
||||
|
||||
console.log(`Scheduled ${endpoints.length} endpoints`);
|
||||
}
|
||||
|
||||
function stopScheduling(endpointId) {
|
||||
if (scheduledTasks.has(endpointId)) {
|
||||
const task = scheduledTasks.get(endpointId);
|
||||
task.stop();
|
||||
scheduledTasks.delete(endpointId);
|
||||
console.log(`Stopped scheduling endpoint ${endpointId}`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
setSocket,
|
||||
performCheck,
|
||||
scheduleEndpoint,
|
||||
scheduleAllEndpoints,
|
||||
stopScheduling,
|
||||
};
|
||||
516
backend/src/services/notificationService.js
Normal file
516
backend/src/services/notificationService.js
Normal file
@@ -0,0 +1,516 @@
|
||||
const { getDatabase, runInTransaction } = require('../models/database');
|
||||
const { getSetting, setSettings } = require('./settingsService');
|
||||
const { sendMail, registerSmtpFailure } = require('./smtpService');
|
||||
const { renderTemplate } = require('./notificationTemplates');
|
||||
|
||||
const EVENT_TYPES = [
|
||||
'endpoint_down',
|
||||
'endpoint_degraded',
|
||||
'endpoint_recovered',
|
||||
'incident_created',
|
||||
'incident_updated',
|
||||
'incident_resolved',
|
||||
];
|
||||
|
||||
let workerTimer = null;
|
||||
let workerRunning = false;
|
||||
|
||||
function toIso(value = Date.now()) {
|
||||
const date = value instanceof Date ? value : new Date(value);
|
||||
return date.toISOString();
|
||||
}
|
||||
|
||||
function mapEventToPreferenceField(eventType) {
|
||||
if (eventType === 'endpoint_down') return 'notify_on_down';
|
||||
if (eventType === 'endpoint_degraded') return 'notify_on_degraded';
|
||||
if (eventType === 'endpoint_recovered') return 'notify_on_recovery';
|
||||
return 'notify_on_incident';
|
||||
}
|
||||
|
||||
async function ensureUserNotificationDefaults(userId, role) {
|
||||
const db = getDatabase();
|
||||
const existing = await db.get(
|
||||
"SELECT id FROM email_notifications WHERE user_id = ? AND scope_type = 'all' AND endpoint_id IS NULL AND category_id IS NULL",
|
||||
[userId]
|
||||
);
|
||||
if (existing) return;
|
||||
|
||||
const autoSubscribeAdmins = String(await getSetting('notificationsAutoSubscribeAdmins', '1')) === '1';
|
||||
const enabled = role === 'admin' ? (autoSubscribeAdmins ? 1 : 0) : 0;
|
||||
|
||||
await db.run(
|
||||
`INSERT INTO email_notifications
|
||||
(user_id, endpoint_id, category_id, scope_type, notify_on_down, notify_on_recovery, notify_on_degraded, notify_on_incident, active)
|
||||
VALUES (?, NULL, NULL, 'all', ?, ?, ?, ?, 1)`,
|
||||
[userId, enabled, enabled, enabled, enabled]
|
||||
);
|
||||
}
|
||||
|
||||
async function isEndpointInMaintenance(endpointId) {
|
||||
if (!endpointId) return false;
|
||||
const db = getDatabase();
|
||||
const now = new Date().toISOString();
|
||||
const row = await db.get(
|
||||
`SELECT id FROM maintenance_windows
|
||||
WHERE (endpoint_id = ? OR endpoint_id IS NULL)
|
||||
AND start_time <= ?
|
||||
AND end_time >= ?
|
||||
LIMIT 1`,
|
||||
[endpointId, now, now]
|
||||
);
|
||||
return !!row;
|
||||
}
|
||||
|
||||
async function getEndpointAlertState(endpointId) {
|
||||
const db = getDatabase();
|
||||
const state = await db.get('SELECT * FROM endpoint_alert_state WHERE endpoint_id = ?', [endpointId]);
|
||||
if (state) return state;
|
||||
|
||||
await db.run(
|
||||
`INSERT INTO endpoint_alert_state (endpoint_id, last_status, consecutive_failures, updated_at)
|
||||
VALUES (?, NULL, 0, CURRENT_TIMESTAMP)`,
|
||||
[endpointId]
|
||||
);
|
||||
return db.get('SELECT * FROM endpoint_alert_state WHERE endpoint_id = ?', [endpointId]);
|
||||
}
|
||||
|
||||
function shouldEmitByCooldown(lastAlertAt, cooldownMs) {
|
||||
if (!lastAlertAt) return true;
|
||||
return Date.now() - new Date(lastAlertAt).getTime() >= cooldownMs;
|
||||
}
|
||||
|
||||
async function processEndpointTransition(endpoint, status, responseTime, checkedAt) {
|
||||
const db = getDatabase();
|
||||
const state = await getEndpointAlertState(endpoint.id);
|
||||
|
||||
const threshold = Number(await getSetting('notificationFailureThreshold', '2')) || 2;
|
||||
const cooldownMs = Number(await getSetting('notificationCooldownMs', '900000')) || 900000;
|
||||
const reminderMinutes = Number(await getSetting('notificationReminderMinutes', '60')) || 60;
|
||||
|
||||
const inMaintenance = await isEndpointInMaintenance(endpoint.id);
|
||||
const timestamp = checkedAt || new Date().toISOString();
|
||||
const basePayload = {
|
||||
endpoint: {
|
||||
id: endpoint.id,
|
||||
name: endpoint.name,
|
||||
status,
|
||||
responseTime,
|
||||
url: endpoint.url,
|
||||
},
|
||||
timestamp,
|
||||
maintenance: inMaintenance,
|
||||
};
|
||||
|
||||
const lastStatus = state.last_status || null;
|
||||
let nextFailures = status === 'down' ? Number(state.consecutive_failures || 0) + 1 : 0;
|
||||
|
||||
if (status === 'up') {
|
||||
const wasOutage = lastStatus === 'down' || lastStatus === 'degraded';
|
||||
if (wasOutage && state.outage_started_at) {
|
||||
await queueNotificationEvent('endpoint_recovered', {
|
||||
endpointId: endpoint.id,
|
||||
eventKey: `endpoint_recovered:${endpoint.id}:${state.outage_started_at}`,
|
||||
payload: {
|
||||
...basePayload,
|
||||
message: `${endpoint.name} is responding normally again.`,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
await db.run(
|
||||
`UPDATE endpoint_alert_state
|
||||
SET last_status = 'up',
|
||||
consecutive_failures = 0,
|
||||
outage_started_at = NULL,
|
||||
last_recovery_sent_at = CURRENT_TIMESTAMP,
|
||||
last_transition_at = CURRENT_TIMESTAMP,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE endpoint_id = ?`,
|
||||
[endpoint.id]
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (status === 'down') {
|
||||
const outageStart = state.outage_started_at || toIso();
|
||||
const transitionToDown = lastStatus !== 'down' && nextFailures >= threshold;
|
||||
const cooldownAllowed = shouldEmitByCooldown(state.last_alert_sent_at, cooldownMs);
|
||||
|
||||
if (transitionToDown && cooldownAllowed) {
|
||||
if (!inMaintenance) {
|
||||
await queueNotificationEvent('endpoint_down', {
|
||||
endpointId: endpoint.id,
|
||||
eventKey: `endpoint_down:${endpoint.id}:${outageStart}`,
|
||||
payload: {
|
||||
...basePayload,
|
||||
message: `${endpoint.name} has failed ${nextFailures} consecutive health checks.`,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
await db.run(
|
||||
`UPDATE endpoint_alert_state
|
||||
SET last_status = 'down',
|
||||
consecutive_failures = ?,
|
||||
outage_started_at = ?,
|
||||
last_alert_sent_at = CURRENT_TIMESTAMP,
|
||||
last_transition_at = CURRENT_TIMESTAMP,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE endpoint_id = ?`,
|
||||
[nextFailures, outageStart, endpoint.id]
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const reminderMs = reminderMinutes > 0 ? reminderMinutes * 60 * 1000 : 0;
|
||||
const lastReminderAt = state.last_reminder_sent_at ? new Date(state.last_reminder_sent_at).getTime() : 0;
|
||||
if (!inMaintenance && reminderMs > 0 && state.outage_started_at && Date.now() - lastReminderAt >= reminderMs && lastStatus === 'down') {
|
||||
await queueNotificationEvent('endpoint_down', {
|
||||
endpointId: endpoint.id,
|
||||
eventKey: `endpoint_down:reminder:${endpoint.id}:${Math.floor(Date.now() / reminderMs)}`,
|
||||
payload: {
|
||||
...basePayload,
|
||||
reminder: true,
|
||||
message: `${endpoint.name} remains unavailable.`,
|
||||
},
|
||||
});
|
||||
await db.run(
|
||||
`UPDATE endpoint_alert_state
|
||||
SET consecutive_failures = ?,
|
||||
last_reminder_sent_at = CURRENT_TIMESTAMP,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE endpoint_id = ?`,
|
||||
[nextFailures, endpoint.id]
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
await db.run(
|
||||
`UPDATE endpoint_alert_state
|
||||
SET consecutive_failures = ?,
|
||||
outage_started_at = COALESCE(outage_started_at, ?),
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE endpoint_id = ?`,
|
||||
[nextFailures, outageStart, endpoint.id]
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (status === 'degraded') {
|
||||
const transitionToDegraded = lastStatus !== 'degraded';
|
||||
if (transitionToDegraded && !inMaintenance && shouldEmitByCooldown(state.last_alert_sent_at, cooldownMs)) {
|
||||
await queueNotificationEvent('endpoint_degraded', {
|
||||
endpointId: endpoint.id,
|
||||
eventKey: `endpoint_degraded:${endpoint.id}:${Math.floor(Date.now() / 60000)}`,
|
||||
payload: {
|
||||
...basePayload,
|
||||
message: `${endpoint.name} is experiencing degraded performance.`,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
await db.run(
|
||||
`UPDATE endpoint_alert_state
|
||||
SET last_status = 'degraded',
|
||||
consecutive_failures = 0,
|
||||
outage_started_at = COALESCE(outage_started_at, ?),
|
||||
last_alert_sent_at = CASE WHEN ? THEN CURRENT_TIMESTAMP ELSE last_alert_sent_at END,
|
||||
last_transition_at = CASE WHEN ? THEN CURRENT_TIMESTAMP ELSE last_transition_at END,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE endpoint_id = ?`,
|
||||
[toIso(), transitionToDegraded ? 1 : 0, transitionToDegraded ? 1 : 0, endpoint.id]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function queueNotificationEvent(eventType, { endpointId = null, incidentId = null, eventKey, payload = {} }) {
|
||||
if (!EVENT_TYPES.includes(eventType)) return;
|
||||
|
||||
const recipients = await resolveRecipients({ eventType, endpointId, incidentId });
|
||||
if (recipients.length === 0) return;
|
||||
|
||||
const db = getDatabase();
|
||||
const serialized = JSON.stringify(payload || {});
|
||||
|
||||
await runInTransaction(async (database) => {
|
||||
for (const recipient of recipients) {
|
||||
await database.run(
|
||||
`INSERT OR IGNORE INTO notification_deliveries
|
||||
(event_type, event_key, recipient_email, recipient_name, user_id, endpoint_id, incident_id, status, attempt_count, max_attempts, next_attempt_at, payload_json, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, 'queued', 0, 5, CURRENT_TIMESTAMP, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)`,
|
||||
[
|
||||
eventType,
|
||||
eventKey,
|
||||
recipient.email,
|
||||
recipient.name || null,
|
||||
recipient.userId || null,
|
||||
endpointId,
|
||||
incidentId,
|
||||
serialized,
|
||||
]
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
triggerWorkerSoon();
|
||||
}
|
||||
|
||||
async function resolveRecipients({ eventType, endpointId, incidentId }) {
|
||||
const db = getDatabase();
|
||||
const preferenceField = mapEventToPreferenceField(eventType);
|
||||
const users = await db.all('SELECT id, email, name, role, active FROM users WHERE active = 1');
|
||||
const recipients = [];
|
||||
|
||||
for (const user of users) {
|
||||
await ensureUserNotificationDefaults(user.id, user.role);
|
||||
|
||||
const allScope = await db.get(
|
||||
`SELECT * FROM email_notifications
|
||||
WHERE user_id = ? AND scope_type = 'all' AND endpoint_id IS NULL AND category_id IS NULL
|
||||
LIMIT 1`,
|
||||
[user.id]
|
||||
);
|
||||
|
||||
if (!allScope || Number(allScope.active) !== 1 || Number(allScope[preferenceField]) !== 1) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const selectedRows = await db.all(
|
||||
`SELECT scope_type, endpoint_id, category_id
|
||||
FROM email_notifications
|
||||
WHERE user_id = ? AND scope_type IN ('endpoint', 'category') AND active = 1`,
|
||||
[user.id]
|
||||
);
|
||||
|
||||
const hasScopedRules = selectedRows.length > 0;
|
||||
if (hasScopedRules) {
|
||||
const endpointIds = new Set();
|
||||
const categoryIds = new Set();
|
||||
selectedRows.forEach((row) => {
|
||||
if (row.scope_type === 'endpoint' && row.endpoint_id) endpointIds.add(Number(row.endpoint_id));
|
||||
if (row.scope_type === 'category' && row.category_id) categoryIds.add(Number(row.category_id));
|
||||
});
|
||||
|
||||
let match = false;
|
||||
if (endpointId && endpointIds.has(Number(endpointId))) {
|
||||
match = true;
|
||||
}
|
||||
|
||||
if (!match && endpointId && categoryIds.size > 0) {
|
||||
const endpoint = await db.get('SELECT group_id FROM endpoints WHERE id = ?', [endpointId]);
|
||||
if (endpoint?.group_id && categoryIds.has(Number(endpoint.group_id))) {
|
||||
match = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!match && incidentId) {
|
||||
const incidentEndpoints = await db.all('SELECT endpoint_id FROM incident_endpoints WHERE incident_id = ?', [incidentId]);
|
||||
for (const row of incidentEndpoints) {
|
||||
if (endpointIds.has(Number(row.endpoint_id))) {
|
||||
match = true;
|
||||
break;
|
||||
}
|
||||
if (categoryIds.size > 0) {
|
||||
const endpoint = await db.get('SELECT group_id FROM endpoints WHERE id = ?', [row.endpoint_id]);
|
||||
if (endpoint?.group_id && categoryIds.has(Number(endpoint.group_id))) {
|
||||
match = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!match) continue;
|
||||
}
|
||||
|
||||
recipients.push({ userId: user.id, email: user.email, name: user.name || null });
|
||||
}
|
||||
|
||||
const extras = await db.all('SELECT id, email, name FROM notification_extra_recipients WHERE active = 1');
|
||||
for (const extra of extras) {
|
||||
recipients.push({ userId: null, email: extra.email, name: extra.name || null });
|
||||
}
|
||||
|
||||
return recipients;
|
||||
}
|
||||
|
||||
async function attachEventContext(delivery) {
|
||||
const db = getDatabase();
|
||||
const payload = delivery.payload_json ? JSON.parse(delivery.payload_json) : {};
|
||||
|
||||
if (delivery.endpoint_id) {
|
||||
const endpoint = await db.get('SELECT id, name, url FROM endpoints WHERE id = ?', [delivery.endpoint_id]);
|
||||
if (endpoint) {
|
||||
payload.endpoint = {
|
||||
...(payload.endpoint || {}),
|
||||
id: endpoint.id,
|
||||
name: endpoint.name,
|
||||
url: endpoint.url,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (delivery.incident_id) {
|
||||
const incident = await db.get('SELECT id, title, status, updated_at FROM incidents WHERE id = ?', [delivery.incident_id]);
|
||||
if (incident) {
|
||||
payload.incident = incident;
|
||||
}
|
||||
}
|
||||
|
||||
const settings = await getSettingsMap();
|
||||
const publicUrl = String(settings.publicUrl || process.env.PUBLIC_STATUS_PAGE_URL || process.env.FRONTEND_URL || 'http://localhost:3000');
|
||||
payload.statusPageUrl = publicUrl;
|
||||
payload.timestamp = payload.timestamp || new Date().toISOString();
|
||||
return payload;
|
||||
}
|
||||
|
||||
function getBackoffMs(attemptCount) {
|
||||
const base = 30 * 1000;
|
||||
return Math.min(base * (2 ** Math.max(0, attemptCount - 1)), 30 * 60 * 1000);
|
||||
}
|
||||
|
||||
async function processNotificationDelivery(delivery) {
|
||||
const db = getDatabase();
|
||||
const payload = await attachEventContext(delivery);
|
||||
const template = renderTemplate(delivery.event_type, payload);
|
||||
|
||||
try {
|
||||
await sendMail({
|
||||
to: delivery.recipient_email,
|
||||
subject: template.subject,
|
||||
text: template.text,
|
||||
html: template.html,
|
||||
});
|
||||
|
||||
await db.run(
|
||||
`UPDATE notification_deliveries
|
||||
SET status = 'sent',
|
||||
attempt_count = attempt_count + 1,
|
||||
sent_at = CURRENT_TIMESTAMP,
|
||||
error_reason = NULL,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?`,
|
||||
[delivery.id]
|
||||
);
|
||||
} catch (error) {
|
||||
const nextAttempt = Number(delivery.attempt_count || 0) + 1;
|
||||
const maxAttempts = Number(delivery.max_attempts || 5);
|
||||
const shouldFail = nextAttempt >= maxAttempts;
|
||||
const retryDelay = getBackoffMs(nextAttempt);
|
||||
|
||||
await db.run(
|
||||
`UPDATE notification_deliveries
|
||||
SET status = ?,
|
||||
attempt_count = ?,
|
||||
error_reason = ?,
|
||||
next_attempt_at = CASE WHEN ? THEN next_attempt_at ELSE datetime('now', '+' || ? || ' seconds') END,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?`,
|
||||
[
|
||||
shouldFail ? 'failed' : 'queued',
|
||||
nextAttempt,
|
||||
(error.message || 'SMTP send failed').slice(0, 1000),
|
||||
shouldFail ? 1 : 0,
|
||||
Math.ceil(retryDelay / 1000),
|
||||
delivery.id,
|
||||
]
|
||||
);
|
||||
|
||||
await registerSmtpFailure(error);
|
||||
}
|
||||
}
|
||||
|
||||
async function processDueDeliveries(limit = 20) {
|
||||
if (workerRunning) return;
|
||||
workerRunning = true;
|
||||
try {
|
||||
const db = getDatabase();
|
||||
const deliveries = await db.all(
|
||||
`SELECT * FROM notification_deliveries
|
||||
WHERE status = 'queued'
|
||||
AND datetime(next_attempt_at) <= datetime('now')
|
||||
ORDER BY created_at ASC
|
||||
LIMIT ?`,
|
||||
[limit]
|
||||
);
|
||||
|
||||
for (const delivery of deliveries) {
|
||||
await processNotificationDelivery(delivery);
|
||||
}
|
||||
} finally {
|
||||
workerRunning = false;
|
||||
}
|
||||
}
|
||||
|
||||
function triggerWorkerSoon() {
|
||||
setTimeout(() => {
|
||||
processDueDeliveries().catch((error) => {
|
||||
console.error('Notification worker error:', error);
|
||||
});
|
||||
}, 50);
|
||||
}
|
||||
|
||||
function initializeNotificationWorker() {
|
||||
if (workerTimer) return;
|
||||
workerTimer = setInterval(() => {
|
||||
processDueDeliveries().catch((error) => {
|
||||
console.error('Notification worker error:', error);
|
||||
});
|
||||
}, 5000);
|
||||
if (typeof workerTimer.unref === 'function') {
|
||||
workerTimer.unref();
|
||||
}
|
||||
}
|
||||
|
||||
async function queueIncidentNotification(eventType, incidentId, message = '') {
|
||||
const suffix = Date.now();
|
||||
await queueNotificationEvent(eventType, {
|
||||
incidentId,
|
||||
eventKey: `${eventType}:incident:${incidentId}:${suffix}`,
|
||||
payload: {
|
||||
message,
|
||||
timestamp: new Date().toISOString(),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async function getNotificationHealth() {
|
||||
const failureStreak = Number(await getSetting('smtpFailureStreak', '0')) || 0;
|
||||
return {
|
||||
lastSuccessfulSendAt: await getSetting('smtpLastSuccessAt', ''),
|
||||
lastError: await getSetting('smtpLastError', ''),
|
||||
lastErrorAt: await getSetting('smtpLastErrorAt', ''),
|
||||
failureStreak,
|
||||
healthy: failureStreak === 0,
|
||||
};
|
||||
}
|
||||
|
||||
async function setNotificationDefaults({ autoSubscribeAdmins, failureThreshold, cooldownMs, reminderMinutes }) {
|
||||
await setSettings({
|
||||
notificationsAutoSubscribeAdmins: autoSubscribeAdmins ? '1' : '0',
|
||||
notificationFailureThreshold: String(Math.max(1, Number(failureThreshold) || 2)),
|
||||
notificationCooldownMs: String(Math.max(1000, Number(cooldownMs) || 900000)),
|
||||
notificationReminderMinutes: String(Math.max(0, Number(reminderMinutes) || 0)),
|
||||
});
|
||||
}
|
||||
|
||||
async function getNotificationDefaults() {
|
||||
return {
|
||||
autoSubscribeAdmins: String(await getSetting('notificationsAutoSubscribeAdmins', '1')) === '1',
|
||||
failureThreshold: Number(await getSetting('notificationFailureThreshold', '2')) || 2,
|
||||
cooldownMs: Number(await getSetting('notificationCooldownMs', '900000')) || 900000,
|
||||
reminderMinutes: Number(await getSetting('notificationReminderMinutes', '60')) || 60,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
EVENT_TYPES,
|
||||
initializeNotificationWorker,
|
||||
processEndpointTransition,
|
||||
queueNotificationEvent,
|
||||
queueIncidentNotification,
|
||||
ensureUserNotificationDefaults,
|
||||
getNotificationHealth,
|
||||
getNotificationDefaults,
|
||||
setNotificationDefaults,
|
||||
};
|
||||
104
backend/src/services/notificationTemplates.js
Normal file
104
backend/src/services/notificationTemplates.js
Normal file
@@ -0,0 +1,104 @@
|
||||
function formatTimestamp(value) {
|
||||
if (!value) return 'Unknown';
|
||||
const date = value instanceof Date ? value : new Date(value);
|
||||
if (Number.isNaN(date.getTime())) return 'Unknown';
|
||||
return date.toISOString().replace('T', ' ').replace('Z', ' UTC');
|
||||
}
|
||||
|
||||
function buildSubject(eventType, payload) {
|
||||
const endpointName = payload.endpoint?.name || 'Endpoint';
|
||||
const incidentTitle = payload.incident?.title || 'Incident';
|
||||
|
||||
switch (eventType) {
|
||||
case 'endpoint_down':
|
||||
return `[Status] ${endpointName} is down`;
|
||||
case 'endpoint_degraded':
|
||||
return `[Status] ${endpointName} is degraded`;
|
||||
case 'endpoint_recovered':
|
||||
return `[Status] ${endpointName} recovered`;
|
||||
case 'incident_created':
|
||||
return `[Incident] ${incidentTitle}`;
|
||||
case 'incident_updated':
|
||||
return `[Incident Update] ${incidentTitle}`;
|
||||
case 'incident_resolved':
|
||||
return `[Resolved] ${incidentTitle}`;
|
||||
default:
|
||||
return '[Status] Notification';
|
||||
}
|
||||
}
|
||||
|
||||
function buildLines(eventType, payload) {
|
||||
const lines = [];
|
||||
const pageUrl = payload.statusPageUrl || '';
|
||||
|
||||
if (payload.endpoint) {
|
||||
lines.push(`Endpoint: ${payload.endpoint.name}`);
|
||||
if (payload.endpoint.status) lines.push(`Status: ${payload.endpoint.status}`);
|
||||
if (typeof payload.endpoint.responseTime === 'number') {
|
||||
lines.push(`Response time: ${payload.endpoint.responseTime} ms`);
|
||||
}
|
||||
}
|
||||
|
||||
if (payload.incident) {
|
||||
lines.push(`Incident: ${payload.incident.title}`);
|
||||
if (payload.incident.status) lines.push(`Incident status: ${payload.incident.status}`);
|
||||
}
|
||||
|
||||
if (payload.message) {
|
||||
lines.push(`Message: ${payload.message}`);
|
||||
}
|
||||
|
||||
if (payload.timestamp) {
|
||||
lines.push(`Timestamp: ${formatTimestamp(payload.timestamp)}`);
|
||||
}
|
||||
|
||||
if (payload.maintenance === true) {
|
||||
lines.push('Maintenance: This event happened during a maintenance window.');
|
||||
}
|
||||
|
||||
if (pageUrl) {
|
||||
lines.push(`Status page: ${pageUrl}`);
|
||||
}
|
||||
|
||||
if (eventType === 'endpoint_down' && payload.reminder === true) {
|
||||
lines.push('Reminder: The outage is still ongoing.');
|
||||
}
|
||||
|
||||
return lines;
|
||||
}
|
||||
|
||||
function renderTemplate(eventType, payload) {
|
||||
const subject = buildSubject(eventType, payload);
|
||||
const lines = buildLines(eventType, payload);
|
||||
const text = `${subject}\n\n${lines.join('\n')}`;
|
||||
|
||||
const htmlRows = lines
|
||||
.map((line) => `<tr><td style="padding:6px 0;font-size:14px;color:#334155;">${escapeHtml(line)}</td></tr>`)
|
||||
.join('');
|
||||
|
||||
const html = `
|
||||
<div style="font-family:Verdana,Segoe UI,sans-serif;background:#f8fafc;padding:24px;">
|
||||
<table role="presentation" style="max-width:640px;width:100%;margin:0 auto;background:#ffffff;border:1px solid #e2e8f0;border-radius:12px;padding:20px;">
|
||||
<tr>
|
||||
<td style="font-size:20px;font-weight:700;color:#0f172a;padding-bottom:12px;">${escapeHtml(subject)}</td>
|
||||
</tr>
|
||||
${htmlRows}
|
||||
</table>
|
||||
</div>
|
||||
`;
|
||||
|
||||
return { subject, text, html };
|
||||
}
|
||||
|
||||
function escapeHtml(value) {
|
||||
return String(value)
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
renderTemplate,
|
||||
};
|
||||
165
backend/src/services/settingsService.js
Normal file
165
backend/src/services/settingsService.js
Normal file
@@ -0,0 +1,165 @@
|
||||
const { getDatabase } = require('../models/database');
|
||||
const { encrypt, decrypt } = require('../utils/crypto');
|
||||
|
||||
const TLS_MODES = ['none', 'starttls', 'tls'];
|
||||
|
||||
function parseMaybeJson(value) {
|
||||
if (typeof value !== 'string') return value;
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
async function getSettingsMap() {
|
||||
const db = getDatabase();
|
||||
const rows = await db.all('SELECT key, value FROM settings');
|
||||
return rows.reduce((acc, row) => {
|
||||
acc[row.key] = parseMaybeJson(row.value);
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
|
||||
async function getSetting(key, fallback = null) {
|
||||
const db = getDatabase();
|
||||
const row = await db.get('SELECT value FROM settings WHERE key = ?', [key]);
|
||||
if (!row) return fallback;
|
||||
return parseMaybeJson(row.value);
|
||||
}
|
||||
|
||||
async function setSettings(updates) {
|
||||
const db = getDatabase();
|
||||
for (const [key, value] of Object.entries(updates)) {
|
||||
await db.run(
|
||||
'INSERT OR REPLACE INTO settings (key, value, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP)',
|
||||
[key, typeof value === 'string' ? value : JSON.stringify(value)]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function coerceNumber(value, fallback) {
|
||||
const parsed = Number(value);
|
||||
return Number.isFinite(parsed) ? parsed : fallback;
|
||||
}
|
||||
|
||||
async function getSmtpConfig({ includePassword = false } = {}) {
|
||||
const settings = await getSettingsMap();
|
||||
const encryptedPassword = String(settings.smtpPassword || '');
|
||||
|
||||
const smtpConfig = {
|
||||
smtpHost: String(settings.smtpHost || ''),
|
||||
smtpPort: coerceNumber(settings.smtpPort, 587),
|
||||
smtpUser: String(settings.smtpUser || ''),
|
||||
smtpFromEmail: String(settings.smtpFromEmail || ''),
|
||||
smtpFromName: String(settings.smtpFromName || ''),
|
||||
smtpTlsMode: TLS_MODES.includes(settings.smtpTlsMode) ? settings.smtpTlsMode : 'starttls',
|
||||
smtpTimeoutMs: coerceNumber(settings.smtpTimeoutMs, 10000),
|
||||
hasSmtpPassword: !!encryptedPassword,
|
||||
};
|
||||
|
||||
if (includePassword) {
|
||||
smtpConfig.smtpPassword = encryptedPassword ? decrypt(encryptedPassword) : '';
|
||||
smtpConfig.smtpPasswordEncrypted = encryptedPassword;
|
||||
}
|
||||
|
||||
return smtpConfig;
|
||||
}
|
||||
|
||||
function validateSmtpConfig(input, { allowEmpty = true } = {}) {
|
||||
const host = (input.smtpHost || '').trim();
|
||||
const user = (input.smtpUser || '').trim();
|
||||
const fromEmail = (input.smtpFromEmail || '').trim();
|
||||
const fromName = (input.smtpFromName || '').trim();
|
||||
const port = coerceNumber(input.smtpPort, 0);
|
||||
const timeoutMs = coerceNumber(input.smtpTimeoutMs, 0);
|
||||
const tlsMode = (input.smtpTlsMode || 'starttls').trim().toLowerCase();
|
||||
|
||||
const anyProvided = [host, user, fromEmail, input.smtpPassword || '', fromName].some(Boolean);
|
||||
if (allowEmpty && !anyProvided) {
|
||||
return { valid: true };
|
||||
}
|
||||
|
||||
if (!host) return { valid: false, error: 'SMTP host is required when email notifications are configured.' };
|
||||
if (!Number.isInteger(port) || port < 1 || port > 65535) {
|
||||
return { valid: false, error: 'SMTP port must be between 1 and 65535.' };
|
||||
}
|
||||
if (!user) return { valid: false, error: 'SMTP username is required when email notifications are configured.' };
|
||||
if (!fromEmail) return { valid: false, error: 'From email is required for outgoing notifications.' };
|
||||
if (!/^\S+@\S+\.\S+$/.test(fromEmail)) {
|
||||
return { valid: false, error: 'From email must be a valid email address.' };
|
||||
}
|
||||
if (!TLS_MODES.includes(tlsMode)) {
|
||||
return { valid: false, error: `TLS mode must be one of: ${TLS_MODES.join(', ')}.` };
|
||||
}
|
||||
if (!Number.isInteger(timeoutMs) || timeoutMs < 1000 || timeoutMs > 120000) {
|
||||
return { valid: false, error: 'SMTP timeout must be between 1000ms and 120000ms.' };
|
||||
}
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
normalized: {
|
||||
smtpHost: host,
|
||||
smtpPort: port,
|
||||
smtpUser: user,
|
||||
smtpFromEmail: fromEmail,
|
||||
smtpFromName: fromName,
|
||||
smtpTlsMode: tlsMode,
|
||||
smtpTimeoutMs: timeoutMs,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function saveSmtpConfig(input, { preservePassword = true, allowEmpty = true } = {}) {
|
||||
const current = await getSmtpConfig({ includePassword: true });
|
||||
const providedPassword = typeof input.smtpPassword === 'string' ? input.smtpPassword : '';
|
||||
|
||||
// Determine the plaintext password to use for validation
|
||||
const validationPlain = providedPassword || (preservePassword ? current.smtpPassword : '');
|
||||
|
||||
const validation = validateSmtpConfig({ ...input, smtpPassword: validationPlain }, { allowEmpty });
|
||||
if (!validation.valid) {
|
||||
return { success: false, error: validation.error };
|
||||
}
|
||||
|
||||
const normalized = validation.normalized || {
|
||||
smtpHost: '',
|
||||
smtpPort: 587,
|
||||
smtpUser: '',
|
||||
smtpFromEmail: '',
|
||||
smtpFromName: '',
|
||||
smtpTlsMode: 'starttls',
|
||||
smtpTimeoutMs: 10000,
|
||||
};
|
||||
|
||||
// Build the encrypted value without double‑encrypting the stored password
|
||||
let nextPasswordEncrypted = '';
|
||||
if (providedPassword) {
|
||||
nextPasswordEncrypted = encrypt(providedPassword);
|
||||
} else if (preservePassword && current.smtpPasswordEncrypted) {
|
||||
nextPasswordEncrypted = current.smtpPasswordEncrypted; // keep raw encrypted blob
|
||||
}
|
||||
|
||||
await setSettings({
|
||||
smtpHost: normalized.smtpHost,
|
||||
smtpPort: String(normalized.smtpPort),
|
||||
smtpUser: normalized.smtpUser,
|
||||
smtpFromEmail: normalized.smtpFromEmail,
|
||||
smtpFromName: normalized.smtpFromName,
|
||||
smtpTlsMode: normalized.smtpTlsMode,
|
||||
smtpTimeoutMs: String(normalized.smtpTimeoutMs),
|
||||
smtpPassword: nextPasswordEncrypted,
|
||||
});
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
TLS_MODES,
|
||||
getSettingsMap,
|
||||
getSetting,
|
||||
setSettings,
|
||||
getSmtpConfig,
|
||||
validateSmtpConfig,
|
||||
saveSmtpConfig,
|
||||
};
|
||||
83
backend/src/services/smtpService.js
Normal file
83
backend/src/services/smtpService.js
Normal file
@@ -0,0 +1,83 @@
|
||||
const nodemailer = require('nodemailer');
|
||||
const { getSmtpConfig, setSettings, getSetting } = require('./settingsService');
|
||||
|
||||
function buildTransportOptions(config) {
|
||||
const base = {
|
||||
host: config.smtpHost,
|
||||
port: Number(config.smtpPort),
|
||||
auth: {
|
||||
user: config.smtpUser,
|
||||
pass: config.smtpPassword,
|
||||
},
|
||||
connectionTimeout: Number(config.smtpTimeoutMs),
|
||||
greetingTimeout: Number(config.smtpTimeoutMs),
|
||||
socketTimeout: Number(config.smtpTimeoutMs),
|
||||
};
|
||||
|
||||
if (config.smtpTlsMode === 'tls') {
|
||||
return { ...base, secure: true };
|
||||
}
|
||||
if (config.smtpTlsMode === 'starttls') {
|
||||
return { ...base, secure: false, requireTLS: true };
|
||||
}
|
||||
return { ...base, secure: false, ignoreTLS: true };
|
||||
}
|
||||
|
||||
function formatFromAddress(config) {
|
||||
const email = config.smtpFromEmail || config.smtpUser;
|
||||
if (config.smtpFromName) {
|
||||
return `${config.smtpFromName} <${email}>`;
|
||||
}
|
||||
return email;
|
||||
}
|
||||
|
||||
async function verifySmtpConnection() {
|
||||
const config = await getSmtpConfig({ includePassword: true });
|
||||
if (!config.smtpHost || !config.smtpUser || !config.smtpPassword || !config.smtpFromEmail) {
|
||||
throw new Error('SMTP is not fully configured. Host, username, password, and from email are required.');
|
||||
}
|
||||
|
||||
const transport = nodemailer.createTransport(buildTransportOptions(config));
|
||||
await transport.verify();
|
||||
return config;
|
||||
}
|
||||
|
||||
async function sendMail({ to, subject, text, html }) {
|
||||
const config = await getSmtpConfig({ includePassword: true });
|
||||
if (!config.smtpHost || !config.smtpUser || !config.smtpPassword || !config.smtpFromEmail) {
|
||||
throw new Error('SMTP is not fully configured.');
|
||||
}
|
||||
|
||||
const transport = nodemailer.createTransport(buildTransportOptions(config));
|
||||
const result = await transport.sendMail({
|
||||
from: formatFromAddress(config),
|
||||
to,
|
||||
subject,
|
||||
text,
|
||||
html,
|
||||
});
|
||||
|
||||
await setSettings({
|
||||
smtpLastSuccessAt: new Date().toISOString(),
|
||||
smtpLastError: '',
|
||||
smtpFailureStreak: '0',
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function registerSmtpFailure(error) {
|
||||
const streakSetting = await getSetting('smtpFailureStreak', '0');
|
||||
const failureStreak = Number(streakSetting || 0) + 1;
|
||||
await setSettings({
|
||||
smtpLastError: error.message || 'Unknown SMTP error',
|
||||
smtpLastErrorAt: new Date().toISOString(),
|
||||
smtpFailureStreak: String(failureStreak),
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
verifySmtpConnection,
|
||||
sendMail,
|
||||
registerSmtpFailure,
|
||||
};
|
||||
68
backend/src/utils/crypto.js
Normal file
68
backend/src/utils/crypto.js
Normal file
@@ -0,0 +1,68 @@
|
||||
const crypto = require('crypto');
|
||||
|
||||
const ALGORITHM = 'aes-256-gcm';
|
||||
const IV_LENGTH = 16;
|
||||
const TAG_LENGTH = 16;
|
||||
|
||||
/**
|
||||
* Derives a 256-bit encryption key from a dedicated ENCRYPTION_KEY,
|
||||
* falling back to JWT_SECRET for backward compatibility.
|
||||
* Using a separate key ensures that a JWT_SECRET leak does not also
|
||||
* compromise encrypted data (SMTP passwords, etc.).
|
||||
*/
|
||||
function getEncryptionKey() {
|
||||
const secret = process.env.ENCRYPTION_KEY || process.env.JWT_SECRET;
|
||||
if (!secret) throw new Error('ENCRYPTION_KEY (or JWT_SECRET) is required for encryption');
|
||||
return crypto.createHash('sha256').update(secret).digest();
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt a plaintext string.
|
||||
* Returns a hex string in the format: iv:encrypted:authTag
|
||||
*/
|
||||
function encrypt(text) {
|
||||
if (!text) return '';
|
||||
|
||||
const key = getEncryptionKey();
|
||||
const iv = crypto.randomBytes(IV_LENGTH);
|
||||
const cipher = crypto.createCipheriv(ALGORITHM, key, iv);
|
||||
|
||||
let encrypted = cipher.update(text, 'utf8', 'hex');
|
||||
encrypted += cipher.final('hex');
|
||||
const authTag = cipher.getAuthTag().toString('hex');
|
||||
|
||||
return `${iv.toString('hex')}:${encrypted}:${authTag}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrypt a string encrypted with encrypt().
|
||||
* Expects format: iv:encrypted:authTag
|
||||
*/
|
||||
function decrypt(encryptedText) {
|
||||
if (!encryptedText) return '';
|
||||
|
||||
// If it doesn't look like our encrypted format, return as-is
|
||||
// (handles legacy unencrypted values)
|
||||
const parts = encryptedText.split(':');
|
||||
if (parts.length !== 3) return encryptedText;
|
||||
|
||||
try {
|
||||
const key = getEncryptionKey();
|
||||
const iv = Buffer.from(parts[0], 'hex');
|
||||
const encrypted = parts[1];
|
||||
const authTag = Buffer.from(parts[2], 'hex');
|
||||
|
||||
const decipher = crypto.createDecipheriv(ALGORITHM, key, iv);
|
||||
decipher.setAuthTag(authTag);
|
||||
|
||||
let decrypted = decipher.update(encrypted, 'hex', 'utf8');
|
||||
decrypted += decipher.final('utf8');
|
||||
|
||||
return decrypted;
|
||||
} catch {
|
||||
// If decryption fails, it may be a legacy plaintext value
|
||||
return encryptedText;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { encrypt, decrypt };
|
||||
64
backend/tests/smoke.api.test.js
Normal file
64
backend/tests/smoke.api.test.js
Normal file
@@ -0,0 +1,64 @@
|
||||
const test = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
|
||||
const { initializeDatabase } = require('../src/models/database');
|
||||
const { start } = require('../src/server');
|
||||
|
||||
function getBaseUrl(server) {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === 'string') {
|
||||
throw new Error('Unable to resolve server address');
|
||||
}
|
||||
return `http://127.0.0.1:${address.port}`;
|
||||
}
|
||||
|
||||
test('smoke: core API routes are reachable', async (t) => {
|
||||
await initializeDatabase();
|
||||
process.env.PORT = '0';
|
||||
|
||||
const { server } = await start();
|
||||
const baseUrl = getBaseUrl(server);
|
||||
|
||||
try {
|
||||
await t.test('health endpoint responds', async () => {
|
||||
const response = await fetch(`${baseUrl}/health`);
|
||||
assert.equal(response.status, 200);
|
||||
const body = await response.json();
|
||||
assert.equal(body.status, 'ok');
|
||||
});
|
||||
|
||||
await t.test('v1 status endpoint responds with expected shape', async () => {
|
||||
const response = await fetch(`${baseUrl}/api/v1/status.json`);
|
||||
assert.equal(response.status, 200);
|
||||
|
||||
const body = await response.json();
|
||||
assert.ok(body.page);
|
||||
assert.ok(Array.isArray(body.components));
|
||||
assert.ok(Array.isArray(body.component_groups));
|
||||
assert.ok(Array.isArray(body.incidents));
|
||||
assert.ok(Array.isArray(body.scheduled_maintenances));
|
||||
});
|
||||
|
||||
await t.test('admin endpoint create is protected', async () => {
|
||||
const response = await fetch(`${baseUrl}/api/admin/endpoints`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ name: 'Smoke', url: 'https://example.com', type: 'http' }),
|
||||
});
|
||||
assert.equal(response.status, 401);
|
||||
});
|
||||
|
||||
await t.test('admin incident create is protected', async () => {
|
||||
const response = await fetch(`${baseUrl}/api/admin/incidents`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ title: 'Smoke incident', severity: 'degraded' }),
|
||||
});
|
||||
assert.equal(response.status, 401);
|
||||
});
|
||||
} finally {
|
||||
await new Promise((resolve, reject) => {
|
||||
server.close((err) => (err ? reject(err) : resolve()));
|
||||
});
|
||||
}
|
||||
});
|
||||
Reference in New Issue
Block a user