aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--README.md15
-rwxr-xr-xserve.py66
-rw-r--r--setup.sql70
-rw-r--r--tagrss.py109
-rw-r--r--views/index.tpl16
-rw-r--r--views/list_feeds.tpl42
6 files changed, 260 insertions, 58 deletions
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..5db23d6
--- /dev/null
+++ b/README.md
@@ -0,0 +1,15 @@
+# TagRSS
+
+Extremely simple RSS reader with support for tags, which can be applied to multiple feeds.
+
+This project is not in a finished state, but the core functionality is present.
+
+## To do
+
+* Add filtering by tag/feed
+* Do more user input validation
+* Handle more `requests` and `feedparser` error conditions
+* Add logging
+* Add some reasonably high internal limit on tag count
+* Add support for authentication
+* Allow specifying update interval on a per-feed basis
diff --git a/serve.py b/serve.py
index 1cd6f78..41a4757 100755
--- a/serve.py
+++ b/serve.py
@@ -7,6 +7,7 @@ import gevent.lock
import argparse
import pathlib
+import math
import schedule
import threading
import time
@@ -14,6 +15,9 @@ import typing
import tagrss
+MAX_PER_PAGE_ENTRIES = 1000
+DEFAULT_PER_PAGE_ENTRIES = 50
+
parser = argparse.ArgumentParser()
parser.add_argument("--host", default="localhost")
parser.add_argument("--port", default=8000, type=int)
@@ -58,11 +62,44 @@ def serialise_tags(tags: list[str]) -> str:
return result
-@bottle.route("/")
+@bottle.get("/")
def index():
+ per_page: int = min(MAX_PER_PAGE_ENTRIES, int(bottle.request.query.get("per_page", DEFAULT_PER_PAGE_ENTRIES))) # type: ignore
+ page_num = int(bottle.request.query.get("page_num", 1)) # type: ignore
+ offset = (page_num - 1) * per_page
+ with core_lock:
+ total_pages: int = max(1, math.ceil(core.get_entry_count() / per_page))
+ entries = core.get_entries(limit=per_page, offset=offset)
+ return bottle.template(
+ "index",
+ entries=entries,
+ offset=offset,
+ page_num=page_num,
+ total_pages=total_pages,
+ per_page=per_page,
+ max_per_page=MAX_PER_PAGE_ENTRIES,
+ core=core,
+ )
+
+
+@bottle.get("/list_feeds")
+def list_feeds():
+ per_page: int = min(MAX_PER_PAGE_ENTRIES, int(bottle.request.query.get("per_page", DEFAULT_PER_PAGE_ENTRIES))) # type: ignore
+ page_num = int(bottle.request.query.get("page_num", 1)) # type: ignore
+ offset = (page_num - 1) * per_page
with core_lock:
- entries = core.get_entries(limit=100)
- return bottle.template("index", entries=entries, core=core)
+ total_pages: int = max(1, math.ceil(core.get_feed_count() / per_page))
+ feeds = core.get_feeds(limit=per_page, offset=offset)
+ return bottle.template(
+ "list_feeds",
+ feeds=feeds,
+ offset=offset,
+ page_num=page_num,
+ total_pages=total_pages,
+ per_page=per_page,
+ max_per_page=MAX_PER_PAGE_ENTRIES,
+ core=core,
+ )
@bottle.get("/add_feed")
@@ -143,21 +180,22 @@ def serve_static(path):
def update_feeds(run_event: threading.Event):
def inner_update():
with core_lock:
- core.fetch_all_new_feed_entries()
+ feeds = core.get_all_feed_ids()
+ for feed_id in feeds():
+ core.fetch_new_feed_entries(feed_id)
+
inner_update()
schedule.every(args.update_seconds).seconds.do(inner_update)
- try:
- while run_event.is_set():
- schedule.run_pending()
- time.sleep(1)
- except KeyboardInterrupt:
- return
+ while run_event.is_set():
+ schedule.run_pending()
+ time.sleep(1)
+
-run_event = threading.Event()
-run_event.set()
-threading.Thread(target=update_feeds, args=(run_event,)).start()
+feed_update_run_event = threading.Event()
+feed_update_run_event.set()
+threading.Thread(target=update_feeds, args=(feed_update_run_event,)).start()
bottle.run(host=args.host, port=args.port, server="gevent")
-run_event.clear()
+feed_update_run_event.clear()
with core_lock:
core.close()
diff --git a/setup.sql b/setup.sql
index 02aac26..e00666a 100644
--- a/setup.sql
+++ b/setup.sql
@@ -5,7 +5,17 @@ CREATE TABLE IF NOT EXISTS tagrss_info(info_key TEXT PRIMARY KEY, value TEXT) ST
INSERT
OR REPLACE INTO tagrss_info(info_key, value)
VALUES
- ("version", "0.9.0");
+ ("version", "0.10.0");
+
+CREATE TABLE IF NOT EXISTS feed_count(
+ id INTEGER PRIMARY KEY CHECK (id = 0),
+ count INTEGER CHECK(count >= 0)
+) STRICT;
+
+INSERT
+ OR IGNORE INTO feed_count(id, count)
+VALUES
+ (0, 0);
CREATE TABLE IF NOT EXISTS feeds(
id INTEGER PRIMARY KEY,
@@ -13,6 +23,27 @@ CREATE TABLE IF NOT EXISTS feeds(
title TEXT
) STRICT;
+CREATE TRIGGER IF NOT EXISTS trig_feeds__increment_feed_count_after_insert
+AFTER
+INSERT
+ ON feeds BEGIN
+UPDATE
+ feed_count
+SET
+ count = count + 1;
+
+END;
+
+CREATE TRIGGER IF NOT EXISTS trig_feeds__decrement_feed_count_after_delete
+AFTER
+ DELETE ON feeds BEGIN
+UPDATE
+ feed_count
+SET
+ count = count - 1;
+
+END;
+
CREATE TABLE IF NOT EXISTS feed_tags(
feed_id INTEGER REFERENCES feeds(id) ON DELETE CASCADE,
tag TEXT
@@ -22,18 +53,26 @@ CREATE INDEX IF NOT EXISTS idx_feed_tags__feed_id__tag ON feed_tags(feed_id, tag
CREATE INDEX IF NOT EXISTS idx_feed_tags__tag__feed_id ON feed_tags(tag, feed_id);
+CREATE TABLE IF NOT EXISTS entry_count(
+ id INTEGER PRIMARY KEY CHECK (id = 0),
+ count INTEGER CHECK(count >= 0)
+) STRICT;
+
+INSERT
+ OR IGNORE INTO entry_count(id, count)
+VALUES
+ (0, 0);
+
CREATE TABLE IF NOT EXISTS entries(
- id INTEGER PRIMARY KEY,
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
feed_id INTEGER REFERENCES feeds(id) ON DELETE CASCADE,
title TEXT,
link TEXT,
epoch_published INTEGER,
epoch_updated INTEGER,
- epoch_stored INTEGER
+ epoch_downloaded INTEGER
) STRICT;
-CREATE INDEX IF NOT EXISTS idx_entries__epoch_stored ON entries(epoch_stored);
-
CREATE INDEX IF NOT EXISTS idx_entries__feed_id__title__link__epoch_published__epoch_updated ON entries(
feed_id,
title,
@@ -62,3 +101,24 @@ WHERE
);
END;
+
+CREATE TRIGGER IF NOT EXISTS trig_entries__increment_entry_count_after_insert
+AFTER
+INSERT
+ ON entries BEGIN
+UPDATE
+ entry_count
+SET
+ count = count + 1;
+
+END;
+
+CREATE TRIGGER IF NOT EXISTS trig_entries__decrement_entry_count_after_delete
+AFTER
+ DELETE ON entries BEGIN
+UPDATE
+ entry_count
+SET
+ count = count - 1;
+
+END; \ No newline at end of file
diff --git a/tagrss.py b/tagrss.py
index f6d125e..038c940 100644
--- a/tagrss.py
+++ b/tagrss.py
@@ -34,6 +34,7 @@ class TagRss:
def add_feed(self, feed_source: str, tags: list[str]) -> None:
response = requests.get(feed_source)
+ epoch_downloaded: int = int(time.time())
if response.status_code != requests.codes.ok:
raise FeedFetchError(feed_source, response.status_code)
try:
@@ -60,25 +61,28 @@ class TagRss:
"INSERT INTO feed_tags(feed_id, tag) VALUES(?, ?);",
((feed_id, tag) for tag in tags),
)
- self.store_feed_entries(feed_id, parsed)
+ self.store_feed_entries(feed_id, parsed, epoch_downloaded)
- def get_entries(self, *, limit: int) -> list[dict[str, typing.Any]]:
+ def get_entries(
+ self, *, limit: int, offset: int = 0
+ ) -> list[dict[str, typing.Any]]:
with self.connection:
resp = self.connection.execute(
- "SELECT feed_id, title, link, epoch_published, epoch_updated FROM entries \
- ORDER BY epoch_stored DESC LIMIT ?;",
- (limit,),
+ "SELECT id, feed_id, title, link, epoch_published, epoch_updated FROM entries \
+ ORDER BY id DESC LIMIT ? OFFSET ?;",
+ (limit, offset),
).fetchall()
entries = []
for entry in resp:
entries.append(
{
- "feed_id": entry[0],
- "title": entry[1],
- "link": entry[2],
- "epoch_published": entry[3],
- "epoch_updated": entry[4],
+ "id": entry[0],
+ "feed_id": entry[1],
+ "title": entry[2],
+ "link": entry[3],
+ "epoch_published": entry[4],
+ "epoch_updated": entry[5],
}
)
return entries
@@ -130,29 +134,68 @@ class TagRss:
with self.connection:
self.connection.execute("DELETE FROM feeds WHERE id = ?;", (feed_id,))
- def fetch_all_new_feed_entries(self) -> None:
+ def get_feeds(self, *, limit: int, offset: int = 0) -> list[dict[str, typing.Any]]:
with self.connection:
- resp = self.connection.execute("SELECT id, source FROM feeds;")
+ resp = self.connection.execute(
+ "SELECT id, source, title FROM feeds \
+ ORDER BY id ASC LIMIT ? OFFSET ?;",
+ (limit, offset),
+ ).fetchall()
+ feeds = []
+ for row in resp:
+ feeds.append(
+ {
+ "id": row[0],
+ "source": row[1],
+ "title": row[2],
+ }
+ )
+ return feeds
+
+ def get_all_feed_ids(self):
+ def inner():
+ with self.connection:
+ resp = self.connection.execute("SELECT id FROM feeds;")
while True:
row = resp.fetchone()
if not row:
break
- feed_id = row[0]
- feed_source = row[1]
- response = requests.get(feed_source)
- if response.status_code != requests.codes.ok:
- continue # TODO: log this somehow
- try:
- base: str = response.headers["Content-Location"]
- except KeyError:
- base: str = feed_source
- parsed = feedparser.parse(
- io.BytesIO(bytes(response.text, encoding="utf-8")),
- response_headers={"Content-Location": base},
- )
- self.store_feed_entries(feed_id, parsed)
+ yield row[0]
+
+ return inner
+
+ def get_entry_count(self) -> int:
+ with self.connection:
+ return self.connection.execute("SELECT count from entry_count;").fetchone()[
+ 0
+ ]
- def store_feed_entries(self, feed_id: int, parsed_feed):
+ def get_feed_count(self) -> int:
+ with self.connection:
+ return self.connection.execute("SELECT count from feed_count;").fetchone()[
+ 0
+ ]
+
+ def fetch_new_feed_entries(self, feed_id: int) -> None:
+ with self.connection:
+ feed_source: str = self.connection.execute(
+ "SELECT source FROM feeds WHERE id = ?;", (feed_id,)
+ ).fetchone()[0]
+ response = requests.get(feed_source)
+ epoch_downloaded: int = int(time.time())
+ if response.status_code != requests.codes.ok:
+ raise FeedFetchError(feed_source, response.status_code)
+ try:
+ base: str = response.headers["Content-Location"]
+ except KeyError:
+ base: str = feed_source
+ parsed = feedparser.parse(
+ io.BytesIO(bytes(response.text, encoding="utf-8")),
+ response_headers={"Content-Location": base},
+ )
+ self.store_feed_entries(feed_id, parsed, epoch_downloaded)
+
+ def store_feed_entries(self, feed_id: int, parsed_feed, epoch_downloaded: int):
for entry in reversed(parsed_feed.entries):
link: str = entry.get("link", None)
title: str = entry.get("title", None)
@@ -170,7 +213,7 @@ class TagRss:
epoch_updated = None
with self.connection:
self.connection.execute(
- "INSERT INTO entries(feed_id, title, link, epoch_published, epoch_updated, epoch_stored) \
+ "INSERT INTO entries(feed_id, title, link, epoch_published, epoch_updated, epoch_downloaded) \
VALUES(?, ?, ?, ?, ?, ?);",
(
feed_id,
@@ -178,17 +221,9 @@ class TagRss:
link,
epoch_published,
epoch_updated,
- int(time.time()),
+ epoch_downloaded,
),
)
-
def close(self) -> None:
- with self.connection:
- self.connection.executescript(
- """
-PRAGMA analysis_limit=1000;
-PRAGMA optimize;
- """
- )
self.connection.close()
diff --git a/views/index.tpl b/views/index.tpl
index fe716f0..dc79b0b 100644
--- a/views/index.tpl
+++ b/views/index.tpl
@@ -46,7 +46,10 @@
<body>
<h1>TagRSS</h1>
<nav>
- <p><a href="/add_feed" class="no-visited-indication">Add feed</a></p>
+ <p>
+ <a href="/add_feed" class="no-visited-indication">Add feed</a>&nbsp;|
+ <a href="/list_feeds" class="no-visited-indication">List feeds</a>
+ </p>
</nav>
<table>
<thead>
@@ -61,7 +64,7 @@
<tbody>
% for i, entry in enumerate(entries):
<tr>
- <td>{{i + 1}}</td>
+ <td>{{i + 1 + offset}}</td>
<td><a href="{{entry['link']}}">{{entry["title"]}}</a></td>
<%
date = ""
@@ -96,5 +99,14 @@
% end
</tbody>
</table>
+ <form>
+ <label>Page
+ <input type="number" value="{{page_num}}" min="1" max="{{total_pages}}" name="page_num">
+ </label> of {{total_pages}}.
+ <label>Per page:
+ <input type="number" value="{{per_page}}" min="1" max="{{max_per_page}}" name="per_page">
+ </label>
+ <input type="submit" value="Go">
+ </form>
</body>
</html>
diff --git a/views/list_feeds.tpl b/views/list_feeds.tpl
new file mode 100644
index 0000000..22334e3
--- /dev/null
+++ b/views/list_feeds.tpl
@@ -0,0 +1,42 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+ <meta charset="UTF-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
+ <title>List Feeds | TagRSS</title>
+ <link href="/static/styles/main.css" rel="stylesheet">
+</head>
+<body>
+ <a href="/" class="no-visited-indication">&lt; home</a>
+ <h1>Feeds</h1>
+ <table>
+ <thead>
+ <tr>
+ <th>#</th>
+ <th>Feed</th>
+ <th>Source</th>
+ <th>Manage</th>
+ </tr>
+ </thead>
+ <tbody>
+ % for i, feed in enumerate(feeds):
+ <tr>
+ <td>{{i + 1 + offset}}</td>
+ <td>{{feed["title"]}}</td>
+ <td><a href="{{feed['source']}}" class="no-visited-indication">🔗</a></td>
+ <td><a href="/manage_feed?feed={{feed['id']}}" class="no-visited-indication">âš™</a></td>
+ </tr>
+ % end
+ </tbody>
+ </table>
+ <form>
+ <label>Page
+ <input type="number" value="{{page_num}}" min="1" max="{{total_pages}}" name="page_num">
+ </label> of {{total_pages}}.
+ <label>Per page:
+ <input type="number" value="{{per_page}}" min="1" max="{{max_per_page}}" name="per_page">
+ </label>
+ <input type="submit" value="Go">
+ </form>
+</body>
+</html> \ No newline at end of file