implemented historical data for 7 days
This commit is contained in:
parent
22af6dac35
commit
ec6889bcdf
3 changed files with 65 additions and 5 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -1,3 +1,5 @@
|
|||
data
|
||||
|
||||
# dependencies (bun install)
|
||||
node_modules
|
||||
|
||||
|
|
|
|||
66
app.ts
66
app.ts
|
|
@ -2,9 +2,65 @@ import { Hono } from 'hono';
|
|||
import { serve } from 'bun';
|
||||
import Parser from 'rss-parser';
|
||||
import { Feed, type Item } from 'feed';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
const app = new Hono();
|
||||
const parser = new Parser();
|
||||
const DATA_DIR = path.resolve('.', 'data');
|
||||
|
||||
// Ensure data directory exists
|
||||
if (!fs.existsSync(DATA_DIR)) fs.mkdirSync(DATA_DIR, { recursive: true });
|
||||
|
||||
// Derive a safe filename from the feed URL
|
||||
function getStorePath(feedUrl: string): string {
|
||||
const safeName = feedUrl.replace(/[^a-z0-9]/gi, '_').toLowerCase();
|
||||
return path.join(DATA_DIR, `${safeName}.json`);
|
||||
}
|
||||
|
||||
// Async feed: fetch remote, load+merge local, persist only on change, return merged data
|
||||
async function syncFeed(feedUrl: string): Promise<{ title: string; feedId: string; feedLink: string; items: Parser.Item[] }> {
|
||||
// Fetch remote feed
|
||||
const { title, feedUrl: feedId, link: feedLink, items: remoteItems } = await parser.parseURL(feedUrl);
|
||||
|
||||
// Determine storage path
|
||||
const storePath = getStorePath(feedUrl);
|
||||
let localItems: Parser.Item[] = [];
|
||||
let localItemsJson = '';
|
||||
|
||||
// Load and parse local file once, reset if corrupted
|
||||
if (fs.existsSync(storePath)) {
|
||||
try {
|
||||
localItemsJson = fs.readFileSync(storePath, 'utf-8');
|
||||
localItems = JSON.parse(localItemsJson) as Parser.Item[];
|
||||
} catch {
|
||||
localItems = [];
|
||||
localItemsJson = '';
|
||||
}
|
||||
}
|
||||
|
||||
const sevenDaysAgo = new Date();
|
||||
sevenDaysAgo.setHours(0, 0, 0, 0);
|
||||
sevenDaysAgo.setDate(sevenDaysAgo.getDate() - 7);
|
||||
|
||||
// Merge fresh items (dedupe by link)
|
||||
const localLinks = new Set(localItems.map(i => i.link));
|
||||
const mergedItems = [...localItems.filter(p => new Date(p.pubDate as string) >= sevenDaysAgo)];
|
||||
for (const remoteItem of remoteItems) {
|
||||
if (remoteItem.link && !localLinks.has(remoteItem.link)) mergedItems.push(remoteItem);
|
||||
}
|
||||
|
||||
// Serialize merged
|
||||
const mergedItemsJson = JSON.stringify(mergedItems, null, 2);
|
||||
|
||||
// Persist only if changed
|
||||
if (mergedItemsJson !== localItemsJson) {
|
||||
fs.writeFileSync(storePath, mergedItemsJson, 'utf-8');
|
||||
}
|
||||
|
||||
return { title: title as string, feedId: feedId as string, feedLink: feedLink as string, items: mergedItems };
|
||||
}
|
||||
|
||||
|
||||
app.get('/group-by-day', async (c) => {
|
||||
const feedUrl = c.req.query('feedUrl');
|
||||
|
|
@ -12,13 +68,13 @@ app.get('/group-by-day', async (c) => {
|
|||
return c.text('Missing feedUrl query parameter', 400);
|
||||
}
|
||||
try {
|
||||
const { title, feedUrl: feedId, link: feedLink, items } = await parser.parseURL(feedUrl);
|
||||
const { title, feedId, feedLink, items } = await syncFeed(feedUrl);
|
||||
|
||||
const grouped = items.reduce((acc, { pubDate, content, contentSnippet, summary, link }) => {
|
||||
const grouped = items.reduce((acc: Record<string, ({link: string, content: string})[]>, { pubDate, content, contentSnippet, summary, link }) => {
|
||||
const day = new Date(pubDate as string).toISOString().slice(0, 10);
|
||||
acc[day] = acc[day] || [];
|
||||
acc[day].push({
|
||||
link,
|
||||
link: link as string,
|
||||
content: content || contentSnippet || summary || ''
|
||||
});
|
||||
return acc;
|
||||
|
|
@ -38,8 +94,8 @@ app.get('/group-by-day', async (c) => {
|
|||
title: `${title} - ${dateStr}`,
|
||||
id: day,
|
||||
date: new Date(day),
|
||||
link: grouped[day][0].link,
|
||||
content: grouped[day].map((p: any) => p.content).join('\n\n---------------------\n\n')
|
||||
link: grouped[day]?.at(0)?.link as string,
|
||||
content: grouped[day]?.map((p: any) => p.content).join('\n\n---------------------\n\n') as string
|
||||
};
|
||||
}).filter(p => p.date < today);
|
||||
|
||||
|
|
|
|||
|
|
@ -8,4 +8,6 @@ services:
|
|||
- PORT=${PORT:-3000}
|
||||
ports:
|
||||
- "${PORT:-3000}:${PORT:-3000}"
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
restart: unless-stopped
|
||||
Loading…
Add table
Reference in a new issue