0.1.0
Some checks failed
OSV-Scanner Scheduled Scan / scan-scheduled (push) Successful in 28s
CI / build-backend (push) Failing after 31s
CI / build-frontend (push) Successful in 50s

This commit is contained in:
2025-12-26 21:31:05 -06:00
parent 10ea615b8f
commit 28273473e1
118 changed files with 13787 additions and 0 deletions

512
internal/api/handlers.go Normal file
View File

@@ -0,0 +1,512 @@
package api
import (
"crypto/rand"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"io"
"log"
"net"
"net/http"
"net/url"
"os"
"strings"
"sync"
"time"
"git.quad4.io/Go-Libs/RSS"
readability "github.com/go-shiori/go-readability"
"golang.org/x/time/rate"
)
type ProxyResponse struct {
Feed FeedInfo `json:"feed"`
Articles []Article `json:"articles"`
}
type FeedInfo struct {
Title string `json:"title"`
SiteURL string `json:"siteUrl"`
Description string `json:"description"`
LastFetched int64 `json:"lastFetched"`
}
type Article struct {
ID string `json:"id"`
FeedID string `json:"feedId"`
Title string `json:"title"`
Link string `json:"link"`
Description string `json:"description"`
Author string `json:"author"`
PubDate int64 `json:"pubDate"`
Read bool `json:"read"`
Saved bool `json:"saved"`
ImageURL string `json:"imageUrl"`
}
type RateLimiter struct {
clients map[string]*rate.Limiter
mu *sync.RWMutex
r rate.Limit
b int
File string
}
func NewRateLimiter(r rate.Limit, b int, file string) *RateLimiter {
rl := &RateLimiter{
clients: make(map[string]*rate.Limiter),
mu: &sync.RWMutex{},
r: r,
b: b,
File: file,
}
if file != "" {
rl.LoadHashes()
}
return rl
}
func (rl *RateLimiter) LoadHashes() {
rl.mu.Lock()
defer rl.mu.Unlock()
data, err := os.ReadFile(rl.File)
if err != nil {
return
}
var hashes []string
if err := json.Unmarshal(data, &hashes); err != nil {
return
}
for _, h := range hashes {
rl.clients[h] = rate.NewLimiter(rl.r, rl.b)
}
}
func (rl *RateLimiter) SaveHashes() {
rl.mu.RLock()
var hashes []string
for h := range rl.clients {
hashes = append(hashes, h)
}
rl.mu.RUnlock()
data, _ := json.MarshalIndent(hashes, "", " ")
os.WriteFile(rl.File, data, 0600)
}
func (rl *RateLimiter) GetLimiter(id string) *rate.Limiter {
rl.mu.RLock()
limiter, exists := rl.clients[id]
rl.mu.RUnlock()
if !exists {
rl.mu.Lock()
limiter = rate.NewLimiter(rl.r, rl.b)
rl.clients[id] = limiter
rl.mu.Unlock()
if rl.File != "" {
rl.SaveHashes()
}
}
return limiter
}
var Limiter = NewRateLimiter(rate.Every(time.Second), 5, "")
var ForbiddenPatterns = []string{
".git", ".env", ".aws", ".config", ".ssh",
"wp-admin", "wp-login", "phpinfo", ".php",
"etc/passwd", "cgi-bin",
}
func BotBlockerMiddleware(next http.HandlerFunc) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
path := strings.ToLower(r.URL.Path)
query := strings.ToLower(r.URL.RawQuery)
for _, pattern := range ForbiddenPatterns {
if strings.Contains(path, pattern) || strings.Contains(query, pattern) {
log.Printf("Blocked suspicious request: %s from %s", r.URL.String(), r.RemoteAddr)
http.Error(w, "Forbidden", http.StatusForbidden)
return
}
}
next.ServeHTTP(w, r)
}
}
type AuthManager struct {
Mode string // "none", "token", "multi"
MasterToken string
AllowRegistration bool
AuthFile string
Tokens map[string]bool
mu sync.RWMutex
}
func NewAuthManager(mode, token, file string, allowReg bool) *AuthManager {
am := &AuthManager{
Mode: mode,
MasterToken: token,
AllowRegistration: allowReg,
AuthFile: file,
Tokens: make(map[string]bool),
}
if mode == "multi" && file != "" {
am.LoadTokens()
}
return am
}
func (am *AuthManager) LoadTokens() {
am.mu.Lock()
defer am.mu.Unlock()
data, err := os.ReadFile(am.AuthFile)
if err != nil {
if os.IsNotExist(err) {
return
}
log.Printf("Error reading auth file: %v", err)
return
}
var tokens []string
if err := json.Unmarshal(data, &tokens); err != nil {
log.Printf("Error parsing auth file: %v", err)
return
}
for _, t := range tokens {
am.Tokens[t] = true
}
}
func (am *AuthManager) SaveTokens() {
am.mu.RLock()
var tokens []string
for t := range am.Tokens {
tokens = append(tokens, t)
}
am.mu.RUnlock()
data, err := json.MarshalIndent(tokens, "", " ")
if err != nil {
log.Printf("Error marshaling tokens: %v", err)
return
}
if err := os.WriteFile(am.AuthFile, data, 0600); err != nil {
log.Printf("Error writing auth file: %v", err)
}
}
func (am *AuthManager) Validate(token string) bool {
if am.Mode == "none" {
return true
}
if am.Mode == "token" {
return token == am.MasterToken
}
if am.Mode == "multi" {
am.mu.RLock()
defer am.mu.RUnlock()
return am.Tokens[token]
}
return false
}
func (am *AuthManager) Register() (string, error) {
if am.Mode != "multi" || !am.AllowRegistration {
return "", http.ErrNotSupported
}
b := make([]byte, 8)
if _, err := io.ReadFull(rand.Reader, b); err != nil {
return "", err
}
token := hex.EncodeToString(b)
formatted := token[0:4] + "-" + token[4:8] + "-" + token[8:12] + "-" + token[12:16]
am.mu.Lock()
am.Tokens[formatted] = true
am.mu.Unlock()
am.SaveTokens()
return formatted, nil
}
func AuthMiddleware(am *AuthManager, next http.HandlerFunc) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
if am.Mode == "none" {
next.ServeHTTP(w, r)
return
}
token := r.Header.Get("X-Account-Number")
if token == "" {
authHeader := r.Header.Get("Authorization")
if strings.HasPrefix(authHeader, "Bearer ") {
token = strings.TrimPrefix(authHeader, "Bearer ")
}
}
if !am.Validate(token) {
w.Header().Set("WWW-Authenticate", `Bearer realm="Web News"`)
http.Error(w, "Unauthorized: Invalid Account Number", http.StatusUnauthorized)
return
}
next.ServeHTTP(w, r)
}
}
func LimitMiddleware(next http.HandlerFunc) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ip, _, err := net.SplitHostPort(r.RemoteAddr)
if err != nil {
ip = r.RemoteAddr
}
if xff := r.Header.Get("X-Forwarded-For"); xff != "" {
if comma := strings.IndexByte(xff, ','); comma != -1 {
ip = xff[:comma]
} else {
ip = xff
}
}
ua := r.Header.Get("User-Agent")
hash := sha256.New()
hash.Write([]byte(ip + ua))
clientID := hex.EncodeToString(hash.Sum(nil))
l := Limiter.GetLimiter(clientID)
if !l.Allow() {
http.Error(w, http.StatusText(http.StatusTooManyRequests), http.StatusTooManyRequests)
return
}
next.ServeHTTP(w, r)
}
}
func HandleFeedProxy(w http.ResponseWriter, r *http.Request) {
if r.Method == "OPTIONS" {
w.WriteHeader(http.StatusOK)
return
}
feedURL := r.URL.Query().Get("url")
if feedURL == "" {
http.Error(w, "Missing url parameter", http.StatusBadRequest)
return
}
client := &http.Client{Timeout: 15 * time.Second}
req, err := http.NewRequest("GET", feedURL, nil)
if err != nil {
http.Error(w, "Failed to create request: "+err.Error(), http.StatusInternalServerError)
return
}
// Add browser-like headers to avoid being blocked by Cloudflare/Bot protection
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36")
req.Header.Set("Accept", "application/rss+xml, application/xml;q=0.9, text/xml;q=0.8, */*;q=0.7")
req.Header.Set("Cache-Control", "no-cache")
req.Header.Set("Pragma", "no-cache")
resp, err := client.Do(req)
if err != nil {
http.Error(w, "Failed to fetch feed: "+err.Error(), http.StatusInternalServerError)
return
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
http.Error(w, "Feed returned status "+resp.Status, http.StatusBadGateway)
return
}
data, err := io.ReadAll(resp.Body)
if err != nil {
http.Error(w, "Failed to read feed body", http.StatusInternalServerError)
return
}
parsedFeed, err := rss.Parse(data)
if err != nil {
http.Error(w, "Failed to parse feed: "+err.Error(), http.StatusInternalServerError)
return
}
articles := make([]Article, 0, len(parsedFeed.Items))
for _, item := range parsedFeed.Items {
id := item.GUID
if id == "" {
id = item.Link
}
pubDate := time.Now().UnixMilli()
if item.Published != nil {
pubDate = item.Published.UnixMilli()
}
author := ""
if item.Author != nil {
author = item.Author.Name
}
imageURL := ""
for _, enc := range item.Enclosures {
if enc.Type == "image/jpeg" || enc.Type == "image/png" || enc.Type == "image/gif" {
imageURL = enc.URL
break
}
}
articles = append(articles, Article{
ID: id,
FeedID: feedURL,
Title: item.Title,
Link: item.Link,
Description: item.Description,
Author: author,
PubDate: pubDate,
Read: false,
Saved: false,
ImageURL: imageURL,
})
}
response := ProxyResponse{
Feed: FeedInfo{
Title: parsedFeed.Title,
SiteURL: parsedFeed.Link,
Description: parsedFeed.Description,
LastFetched: time.Now().UnixMilli(),
},
Articles: articles,
}
w.Header().Set("Content-Type", "application/json")
if err := json.NewEncoder(w).Encode(response); err != nil {
log.Printf("Error encoding feed proxy response: %v", err)
}
}
func HandleProxy(w http.ResponseWriter, r *http.Request) {
if r.Method == "OPTIONS" {
w.WriteHeader(http.StatusOK)
return
}
targetURL := r.URL.Query().Get("url")
if targetURL == "" {
http.Error(w, "Missing url parameter", http.StatusBadRequest)
return
}
client := &http.Client{Timeout: 15 * time.Second}
req, err := http.NewRequest("GET", targetURL, nil)
if err != nil {
http.Error(w, "Failed to create request: "+err.Error(), http.StatusInternalServerError)
return
}
// Add browser-like headers
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36")
req.Header.Set("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8")
resp, err := client.Do(req)
if err != nil {
http.Error(w, "Failed to fetch URL: "+err.Error(), http.StatusInternalServerError)
return
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
http.Error(w, "Target returned status "+resp.Status, http.StatusBadGateway)
return
}
w.Header().Set("Content-Type", resp.Header.Get("Content-Type"))
if _, err := io.Copy(w, resp.Body); err != nil {
log.Printf("Error copying proxy response body: %v", err)
}
}
type FullTextResponse struct {
Title string `json:"title"`
Content string `json:"content"`
TextContent string `json:"textContent"`
Excerpt string `json:"excerpt"`
Byline string `json:"byline"`
SiteName string `json:"siteName"`
Image string `json:"image"`
Favicon string `json:"favicon"`
URL string `json:"url"`
}
func HandleFullText(w http.ResponseWriter, r *http.Request) {
if r.Method == "OPTIONS" {
w.WriteHeader(http.StatusOK)
return
}
targetURL := r.URL.Query().Get("url")
if targetURL == "" {
http.Error(w, "Missing url parameter", http.StatusBadRequest)
return
}
parsedURL, _ := url.Parse(targetURL)
article, err := readability.FromURL(targetURL, 15*time.Second)
if err != nil {
client := &http.Client{Timeout: 15 * time.Second}
req, err := http.NewRequest("GET", targetURL, nil)
if err != nil {
http.Error(w, "Failed to create request: "+err.Error(), http.StatusInternalServerError)
return
}
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36")
req.Header.Set("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8")
resp, err := client.Do(req)
if err != nil {
http.Error(w, "Failed to fetch content: "+err.Error(), http.StatusInternalServerError)
return
}
defer resp.Body.Close()
article, err = readability.FromReader(resp.Body, parsedURL)
if err != nil {
http.Error(w, "Failed to extract content: "+err.Error(), http.StatusInternalServerError)
return
}
}
response := FullTextResponse{
Title: article.Title,
Content: article.Content,
TextContent: article.TextContent,
Excerpt: article.Excerpt,
Byline: article.Byline,
SiteName: article.SiteName,
Image: article.Image,
Favicon: article.Favicon,
URL: targetURL,
}
w.Header().Set("Content-Type", "application/json")
if err := json.NewEncoder(w).Encode(response); err != nil {
log.Printf("Error encoding fulltext response: %v", err)
}
}

512
internal/storage/sqlite.go Normal file
View File

@@ -0,0 +1,512 @@
package storage
import (
"database/sql"
"encoding/json"
"fmt"
"log"
"os"
"path/filepath"
"sync"
"time"
_ "modernc.org/sqlite"
)
type SQLiteDB struct {
db *sql.DB
mu sync.RWMutex
}
func NewSQLiteDB(path string) (*SQLiteDB, error) {
// Ensure directory exists
dir := filepath.Dir(path)
if err := os.MkdirAll(dir, 0750); err != nil {
return nil, err
}
db, err := sql.Open("sqlite", path)
if err != nil {
return nil, err
}
s := &SQLiteDB{db: db}
if err := s.init(); err != nil {
if closeErr := db.Close(); closeErr != nil {
return nil, fmt.Errorf("init error: %v, close error: %v", err, closeErr)
}
return nil, err
}
return s, nil
}
func (s *SQLiteDB) init() error {
s.mu.Lock()
defer s.mu.Unlock()
// Enable WAL mode
_, err := s.db.Exec("PRAGMA journal_mode=WAL;")
if err != nil {
return err
}
queries := []string{
`CREATE TABLE IF NOT EXISTS categories (
id TEXT PRIMARY KEY,
name TEXT,
"order" INTEGER
);`,
`CREATE TABLE IF NOT EXISTS feeds (
id TEXT PRIMARY KEY,
title TEXT,
categoryId TEXT,
"order" INTEGER,
enabled INTEGER,
fetchInterval INTEGER,
FOREIGN KEY(categoryId) REFERENCES categories(id)
);`,
`CREATE TABLE IF NOT EXISTS articles (
id TEXT PRIMARY KEY,
feedId TEXT,
title TEXT,
link TEXT,
description TEXT,
content TEXT,
author TEXT,
pubDate INTEGER,
read INTEGER,
saved INTEGER,
imageUrl TEXT,
readAt INTEGER,
FOREIGN KEY(feedId) REFERENCES feeds(id)
);`,
`CREATE INDEX IF NOT EXISTS idx_articles_pubDate ON articles(pubDate);`,
`CREATE INDEX IF NOT EXISTS idx_articles_readAt ON articles(readAt);`,
`CREATE TABLE IF NOT EXISTS settings (
key TEXT PRIMARY KEY,
value TEXT
);`,
}
for _, q := range queries {
if _, err := s.db.Exec(q); err != nil {
return err
}
}
return nil
}
func (s *SQLiteDB) Close() error {
return s.db.Close()
}
// Database Stats and Operations
type DBStats struct {
Size int64 `json:"size"`
Path string `json:"path"`
Articles int `json:"articles"`
Feeds int `json:"feeds"`
WALEnabled bool `json:"walEnabled"`
}
func (s *SQLiteDB) GetStats() (DBStats, error) {
var stats DBStats
// Get file size
var path string
err := s.db.QueryRow("PRAGMA database_list").Scan(interface{}(nil), interface{}(nil), &path)
if err != nil {
// Fallback if PRAGMA fails
return stats, err
}
stats.Path = path
fi, err := os.Stat(path)
if err == nil {
stats.Size = fi.Size()
}
// Count articles
if err := s.db.QueryRow("SELECT COUNT(*) FROM articles").Scan(&stats.Articles); err != nil {
log.Printf("Error counting articles: %v", err)
}
// Count feeds
if err := s.db.QueryRow("SELECT COUNT(*) FROM feeds").Scan(&stats.Feeds); err != nil {
log.Printf("Error counting feeds: %v", err)
}
// Check WAL
var mode string
if err := s.db.QueryRow("PRAGMA journal_mode").Scan(&mode); err != nil {
log.Printf("Error checking journal mode: %v", err)
}
stats.WALEnabled = mode == "wal"
return stats, nil
}
func (s *SQLiteDB) Vacuum() error {
_, err := s.db.Exec("VACUUM")
return err
}
func (s *SQLiteDB) IntegrityCheck() (string, error) {
var res string
err := s.db.QueryRow("PRAGMA integrity_check").Scan(&res)
return res, err
}
// Data Operations
func (s *SQLiteDB) SaveSettings(settingsJSON string) error {
_, err := s.db.Exec("INSERT OR REPLACE INTO settings (key, value) VALUES ('main', ?)", settingsJSON)
return err
}
func (s *SQLiteDB) GetSettings() (string, error) {
var val string
err := s.db.QueryRow("SELECT value FROM settings WHERE key = 'main'").Scan(&val)
if err == sql.ErrNoRows {
return "{}", nil
}
return val, err
}
func (s *SQLiteDB) SaveCategories(catsJSON string) error {
var cats []struct {
ID string `json:"id"`
Name string `json:"name"`
Order int `json:"order"`
}
if err := json.Unmarshal([]byte(catsJSON), &cats); err != nil {
return err
}
tx, err := s.db.Begin()
if err != nil {
return err
}
defer tx.Rollback()
for _, c := range cats {
_, err := tx.Exec("INSERT OR REPLACE INTO categories (id, name, \"order\") VALUES (?, ?, ?)", c.ID, c.Name, c.Order)
if err != nil {
return err
}
}
return tx.Commit()
}
func (s *SQLiteDB) GetCategories() (string, error) {
rows, err := s.db.Query("SELECT id, name, \"order\" FROM categories ORDER BY \"order\" ASC")
if err != nil {
return "[]", err
}
defer rows.Close()
var cats []map[string]any = []map[string]any{}
for rows.Next() {
var id, name string
var order int
if err := rows.Scan(&id, &name, &order); err != nil {
return "[]", err
}
cats = append(cats, map[string]any{"id": id, "name": name, "order": order})
}
b, _ := json.Marshal(cats)
return string(b), nil
}
func (s *SQLiteDB) SaveFeeds(feedsJSON string) error {
var feeds []struct {
ID string `json:"id"`
Title string `json:"title"`
CategoryID string `json:"categoryId"`
Order int `json:"order"`
Enabled bool `json:"enabled"`
FetchInterval int `json:"fetchInterval"`
}
if err := json.Unmarshal([]byte(feedsJSON), &feeds); err != nil {
return err
}
tx, err := s.db.Begin()
if err != nil {
return err
}
defer tx.Rollback()
for _, f := range feeds {
enabled := 0
if f.Enabled {
enabled = 1
}
_, err := tx.Exec("INSERT OR REPLACE INTO feeds (id, title, categoryId, \"order\", enabled, fetchInterval) VALUES (?, ?, ?, ?, ?, ?)",
f.ID, f.Title, f.CategoryID, f.Order, enabled, f.FetchInterval)
if err != nil {
return err
}
}
return tx.Commit()
}
func (s *SQLiteDB) GetFeeds() (string, error) {
rows, err := s.db.Query("SELECT id, title, categoryId, \"order\", enabled, fetchInterval FROM feeds ORDER BY \"order\" ASC")
if err != nil {
return "[]", err
}
defer rows.Close()
var feeds []map[string]any = []map[string]any{}
for rows.Next() {
var id, title, categoryId string
var order, enabled, fetchInterval int
if err := rows.Scan(&id, &title, &categoryId, &order, &enabled, &fetchInterval); err != nil {
return "[]", err
}
feeds = append(feeds, map[string]any{
"id": id,
"title": title,
"categoryId": categoryId,
"order": order,
"enabled": enabled == 1,
"fetchInterval": fetchInterval,
})
}
b, _ := json.Marshal(feeds)
return string(b), nil
}
func (s *SQLiteDB) SaveArticles(articlesJSON string) error {
var articles []map[string]any
if err := json.Unmarshal([]byte(articlesJSON), &articles); err != nil {
return err
}
tx, err := s.db.Begin()
if err != nil {
return err
}
defer tx.Rollback()
for _, a := range articles {
read := 0
if r, ok := a["read"].(bool); ok && r {
read = 1
}
saved := 0
if sa, ok := a["saved"].(bool); ok && sa {
saved = 1
}
_, err := tx.Exec(`INSERT OR REPLACE INTO articles
(id, feedId, title, link, description, content, author, pubDate, read, saved, imageUrl, readAt)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
a["id"], a["feedId"], a["title"], a["link"], a["description"], a["content"], a["author"], a["pubDate"], read, saved, a["imageUrl"], a["readAt"])
if err != nil {
return err
}
}
return tx.Commit()
}
func (s *SQLiteDB) GetArticles(feedId string, offset, limit int) (string, error) {
var rows *sql.Rows
var err error
if feedId != "" {
rows, err = s.db.Query("SELECT id, feedId, title, link, description, content, author, pubDate, read, saved, imageUrl, readAt FROM articles WHERE feedId = ? ORDER BY pubDate DESC LIMIT ? OFFSET ?", feedId, limit, offset)
} else {
rows, err = s.db.Query("SELECT id, feedId, title, link, description, content, author, pubDate, read, saved, imageUrl, readAt FROM articles ORDER BY pubDate DESC LIMIT ? OFFSET ?", limit, offset)
}
if err != nil {
return "[]", err
}
defer rows.Close()
var articles []map[string]any = []map[string]any{}
for rows.Next() {
var id, feedId, title, link, description, content, author, imageUrl sql.NullString
var pubDate int64
var read, saved int
var readAt sql.NullInt64
err := rows.Scan(&id, &feedId, &title, &link, &description, &content, &author, &pubDate, &read, &saved, &imageUrl, &readAt)
if err != nil {
return "[]", err
}
a := map[string]any{
"id": id.String,
"feedId": feedId.String,
"title": title.String,
"link": link.String,
"description": description.String,
"content": content.String,
"author": author.String,
"pubDate": pubDate,
"read": read == 1,
"saved": saved == 1,
"imageUrl": imageUrl.String,
}
if readAt.Valid {
a["readAt"] = readAt.Int64
}
articles = append(articles, a)
}
b, _ := json.Marshal(articles)
return string(b), nil
}
func (s *SQLiteDB) SearchArticles(query string, limit int) (string, error) {
q := "%" + query + "%"
rows, err := s.db.Query(`SELECT id, feedId, title, link, description, content, author, pubDate, read, saved, imageUrl, readAt
FROM articles
WHERE title LIKE ? OR description LIKE ? OR content LIKE ?
ORDER BY pubDate DESC LIMIT ?`, q, q, q, limit)
if err != nil {
return "[]", err
}
defer rows.Close()
var articles []map[string]any = []map[string]any{}
for rows.Next() {
var id, feedId, title, link, description, content, author, imageUrl sql.NullString
var pubDate int64
var read, saved int
var readAt sql.NullInt64
err := rows.Scan(&id, &feedId, &title, &link, &description, &content, &author, &pubDate, &read, &saved, &imageUrl, &readAt)
if err != nil {
return "[]", err
}
a := map[string]any{
"id": id.String,
"feedId": feedId.String,
"title": title.String,
"link": link.String,
"description": description.String,
"content": content.String,
"author": author.String,
"pubDate": pubDate,
"read": read == 1,
"saved": saved == 1,
"imageUrl": imageUrl.String,
}
if readAt.Valid {
a["readAt"] = readAt.Int64
}
articles = append(articles, a)
}
b, _ := json.Marshal(articles)
return string(b), nil
}
func (s *SQLiteDB) UpdateArticle(articleJSON string) error {
var a map[string]any
if err := json.Unmarshal([]byte(articleJSON), &a); err != nil {
return err
}
read := 0
if r, ok := a["read"].(bool); ok && r {
read = 1
}
saved := 0
if sa, ok := a["saved"].(bool); ok && sa {
saved = 1
}
_, err := s.db.Exec(`UPDATE articles SET read = ?, saved = ?, readAt = ?, content = ? WHERE id = ?`,
read, saved, a["readAt"], a["content"], a["id"])
return err
}
func (s *SQLiteDB) MarkAsRead(id string) error {
now := time.Now().UnixMilli()
_, err := s.db.Exec(`UPDATE articles SET read = 1, readAt = ? WHERE id = ? AND read = 0`, now, id)
return err
}
func (s *SQLiteDB) DeleteFeed(feedId string) error {
tx, err := s.db.Begin()
if err != nil {
return err
}
defer tx.Rollback()
if _, err := tx.Exec("DELETE FROM articles WHERE feedId = ?", feedId); err != nil {
return err
}
if _, err := tx.Exec("DELETE FROM feeds WHERE id = ?", feedId); err != nil {
return err
}
return tx.Commit()
}
func (s *SQLiteDB) PurgeOldContent(days int) (int64, error) {
cutoff := time.Now().AddDate(0, 0, -days).UnixMilli()
res, err := s.db.Exec("UPDATE articles SET content = NULL WHERE saved = 0 AND pubDate < ?", cutoff)
if err != nil {
return 0, err
}
return res.RowsAffected()
}
func (s *SQLiteDB) ClearAll() error {
queries := []string{
"DELETE FROM articles",
"DELETE FROM feeds",
"DELETE FROM categories",
"DELETE FROM settings",
}
for _, q := range queries {
if _, err := s.db.Exec(q); err != nil {
return err
}
}
return nil
}
func (s *SQLiteDB) GetReadingHistory(days int) (string, error) {
cutoff := time.Now().AddDate(0, 0, -days).UnixMilli()
rows, err := s.db.Query(`
SELECT strftime('%Y-%m-%d', datetime(readAt/1000, 'unixepoch')) as date, COUNT(*) as count
FROM articles
WHERE read = 1 AND readAt > ?
GROUP BY date
ORDER BY date DESC`, cutoff)
if err != nil {
return "[]", err
}
defer rows.Close()
var history []map[string]any = []map[string]any{}
for rows.Next() {
var date string
var count int
if err := rows.Scan(&date, &count); err != nil {
continue
}
// Convert date string back to timestamp for frontend
t, _ := time.Parse("2006-01-02", date)
history = append(history, map[string]any{
"date": t.UnixMilli(),
"count": count,
})
}
b, _ := json.Marshal(history)
return string(b), nil
}