- Add rss_feeds + rss_items tables with indexes and HN default seed - Add 5-min background RSS sync loop in monitor.js with 90-day prune - New /api/rss/feeds route for feed CRUD with immediate fetch on add - Rewrite /api/rss route with search, feed filter, pagination, read/bookmark PATCH - Full NewsFeed component rewrite: feed manager, search bar, filter pills, read/unread tracking, bookmarks, favicons, auto-refresh with new items badge - Remove placeholder widget, NewsFeed now spans 4 cols / 3 rows - Add rss-parser deps to Dockerfile for standalone monitor
200 lines
6.4 KiB
JavaScript
200 lines
6.4 KiB
JavaScript
const sqlite3 = require('sqlite3');
|
|
const { open } = require('sqlite');
|
|
const RSSParser = require('rss-parser');
|
|
// Node 18+ has global fetch built-in
|
|
|
|
const DEFAULT_SERVICES = [
|
|
{ name: 'Website', url: 'https://akkolli.net' },
|
|
{ name: 'Gitea', url: 'https://code.akkolli.net' },
|
|
{ name: 'Nextcloud', url: 'http://host.docker.internal:6060' },
|
|
];
|
|
|
|
async function getServices(db) {
|
|
try {
|
|
const rows = await db.all('SELECT name, url FROM monitored_services');
|
|
if (rows && rows.length > 0) return rows;
|
|
} catch (e) {
|
|
// Table might not exist yet
|
|
}
|
|
return DEFAULT_SERVICES;
|
|
}
|
|
|
|
async function seedDefaults(db) {
|
|
// Ensure monitored_services table exists
|
|
await db.exec(`
|
|
CREATE TABLE IF NOT EXISTS monitored_services (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
name TEXT NOT NULL UNIQUE,
|
|
url TEXT NOT NULL,
|
|
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
);
|
|
`);
|
|
|
|
// Seed defaults if table is empty
|
|
const count = await db.get('SELECT COUNT(*) as cnt FROM monitored_services');
|
|
if (count.cnt === 0) {
|
|
for (const s of DEFAULT_SERVICES) {
|
|
await db.run(
|
|
'INSERT OR IGNORE INTO monitored_services (name, url) VALUES (?, ?)',
|
|
s.name, s.url
|
|
);
|
|
}
|
|
console.log('Seeded default services into monitored_services');
|
|
}
|
|
}
|
|
|
|
async function setupRssTables(db) {
|
|
await db.exec(`
|
|
CREATE TABLE IF NOT EXISTS rss_feeds (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
name TEXT NOT NULL,
|
|
url TEXT NOT NULL UNIQUE,
|
|
last_fetched DATETIME,
|
|
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
);
|
|
CREATE TABLE IF NOT EXISTS rss_items (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
feed_id INTEGER NOT NULL,
|
|
title TEXT NOT NULL,
|
|
link TEXT NOT NULL UNIQUE,
|
|
pub_date DATETIME,
|
|
creator TEXT,
|
|
snippet TEXT,
|
|
read INTEGER DEFAULT 0,
|
|
bookmarked INTEGER DEFAULT 0,
|
|
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
);
|
|
CREATE INDEX IF NOT EXISTS idx_rss_items_feed ON rss_items(feed_id);
|
|
CREATE INDEX IF NOT EXISTS idx_rss_items_pubdate ON rss_items(pub_date DESC);
|
|
`);
|
|
|
|
// Seed default HN feed
|
|
const count = await db.get('SELECT COUNT(*) as cnt FROM rss_feeds');
|
|
if (count.cnt === 0) {
|
|
await db.run(
|
|
'INSERT INTO rss_feeds (name, url) VALUES (?, ?)',
|
|
'Hacker News', 'https://news.ycombinator.com/rss'
|
|
);
|
|
console.log('Seeded default RSS feed (Hacker News)');
|
|
}
|
|
}
|
|
|
|
async function syncRssFeeds(db) {
|
|
const parser = new RSSParser({ timeout: 5000 });
|
|
const feeds = await db.all('SELECT * FROM rss_feeds');
|
|
|
|
for (const feed of feeds) {
|
|
try {
|
|
const parsed = await parser.parseURL(feed.url);
|
|
for (const item of parsed.items || []) {
|
|
if (!item.title || !item.link) continue;
|
|
await db.run(
|
|
`INSERT OR IGNORE INTO rss_items (feed_id, title, link, pub_date, creator, snippet)
|
|
VALUES (?, ?, ?, ?, ?, ?)`,
|
|
feed.id,
|
|
item.title,
|
|
item.link,
|
|
item.pubDate || item.isoDate || null,
|
|
item.creator || item.author || null,
|
|
(item.contentSnippet || item.content || '').substring(0, 500) || null
|
|
);
|
|
}
|
|
await db.run(
|
|
'UPDATE rss_feeds SET last_fetched = ? WHERE id = ?',
|
|
new Date().toISOString(), feed.id
|
|
);
|
|
console.log(`RSS synced: ${feed.name} (${(parsed.items || []).length} items)`);
|
|
} catch (err) {
|
|
console.error(`RSS sync error for ${feed.name}:`, err.message);
|
|
}
|
|
}
|
|
|
|
// Prune items older than 90 days
|
|
try {
|
|
await db.run(`DELETE FROM rss_items WHERE created_at < datetime('now', '-90 days')`);
|
|
} catch (e) { }
|
|
}
|
|
|
|
async function monitor() {
|
|
console.log('Starting monitoring loop...');
|
|
|
|
const dbPath = process.env.DB_PATH || './dashboard.db';
|
|
|
|
const db = await open({
|
|
filename: dbPath,
|
|
driver: sqlite3.Database
|
|
});
|
|
|
|
// Ensure table exists (in case monitor runs before app)
|
|
await db.exec(`
|
|
CREATE TABLE IF NOT EXISTS uptime_logs (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
service_name TEXT NOT NULL,
|
|
url TEXT NOT NULL,
|
|
status TEXT NOT NULL,
|
|
latency INTEGER,
|
|
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
);
|
|
`);
|
|
|
|
await seedDefaults(db);
|
|
|
|
// Setup RSS tables and do initial sync
|
|
await setupRssTables(db);
|
|
syncRssFeeds(db); // initial sync, don't await to not block uptime start
|
|
|
|
setInterval(async () => {
|
|
console.log('Running checks...');
|
|
const now = new Date().toISOString();
|
|
|
|
// Re-read services each interval so new additions are picked up
|
|
const services = await getServices(db);
|
|
|
|
for (const service of services) {
|
|
const start = performance.now();
|
|
let status = 'down';
|
|
let latency = 0;
|
|
|
|
try {
|
|
const controller = new AbortController();
|
|
const timeout = setTimeout(() => controller.abort(), 5000);
|
|
|
|
const res = await fetch(service.url, {
|
|
method: 'HEAD',
|
|
signal: controller.signal
|
|
});
|
|
clearTimeout(timeout);
|
|
|
|
// Any HTTP response means the service is reachable (up).
|
|
// Only network errors/timeouts (caught below) count as down.
|
|
status = res.status < 500 ? 'up' : 'down';
|
|
const end = performance.now();
|
|
latency = Math.round(end - start);
|
|
} catch (err) {
|
|
status = 'down';
|
|
latency = 0;
|
|
}
|
|
|
|
try {
|
|
await db.run(
|
|
`INSERT INTO uptime_logs (service_name, url, status, latency, timestamp) VALUES (?, ?, ?, ?, ?)`,
|
|
service.name, service.url, status, latency, now
|
|
);
|
|
} catch (dbErr) {
|
|
console.error('DB Write Error:', dbErr);
|
|
}
|
|
}
|
|
|
|
// Prune old logs (keep 400 days for yearly view)
|
|
try {
|
|
await db.run(`DELETE FROM uptime_logs WHERE timestamp < datetime('now', '-400 days')`);
|
|
} catch (e) { }
|
|
|
|
}, 60000); // Run every minute
|
|
|
|
// RSS sync every 5 minutes
|
|
setInterval(() => syncRssFeeds(db), 5 * 60 * 1000);
|
|
}
|
|
|
|
monitor();
|