replace localStorage with pglite for persistent data, force-add previously ignored lib/ files

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-02 14:05:40 +01:00
parent 3cdcfb7266
commit c96c24a250
31 changed files with 1552 additions and 162 deletions
+119
View File
@@ -0,0 +1,119 @@
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"
import { assembleFeed } from "./assemble-feed"
const mockFetch = vi.fn()
beforeEach(() => {
vi.stubGlobal("fetch", mockFetch)
})
afterEach(() => {
vi.restoreAllMocks()
})
function okResponse(data: unknown) {
return new Response(JSON.stringify({ data }), {
status: 200,
headers: { "Content-Type": "application/json" },
})
}
const TOPICS = [
{ id: 1, label: "Umwelt", abgeordnetenwatch_url: "https://www.abgeordnetenwatch.de/themen-dip21/umwelt" },
{ id: 2, label: "Bildung" },
{ id: 3, label: "Wirtschaft" },
]
const POLLS = [
{
id: 100,
label: "Klimaschutzgesetz",
abgeordnetenwatch_url: "https://www.abgeordnetenwatch.de/bundestag/21/abstimmungen/klimaschutzgesetz",
field_poll_date: "2024-06-15",
field_topics: [
{ id: 1, label: "Umwelt", abgeordnetenwatch_url: "https://www.abgeordnetenwatch.de/themen-dip21/umwelt" },
],
},
{ id: 101, label: "Schulreform", field_poll_date: "2024-06-10", field_topics: [{ id: 2 }] },
{ id: 102, label: "Steuerreform", field_poll_date: "2024-06-05", field_topics: [{ id: 3 }] },
]
describe("assembleFeed", () => {
it("returns empty feed when no follows", async () => {
mockFetch.mockResolvedValueOnce(okResponse(TOPICS)).mockResolvedValueOnce(okResponse(POLLS))
const feed = await assembleFeed([], [])
expect(feed).toEqual([])
})
it("filters polls by followed topic IDs", async () => {
mockFetch.mockResolvedValueOnce(okResponse(TOPICS)).mockResolvedValueOnce(okResponse(POLLS))
const feed = await assembleFeed([1, 2], [])
expect(feed).toHaveLength(2)
expect(feed[0].title).toBe("Klimaschutzgesetz")
expect(feed[1].title).toBe("Schulreform")
})
it("sorts by date descending", async () => {
mockFetch.mockResolvedValueOnce(okResponse(TOPICS)).mockResolvedValueOnce(okResponse(POLLS))
const feed = await assembleFeed([1, 2, 3], [])
expect(feed[0].date).toBe("2024-06-15")
expect(feed[1].date).toBe("2024-06-10")
expect(feed[2].date).toBe("2024-06-05")
})
it("includes topic labels and URLs", async () => {
mockFetch.mockResolvedValueOnce(okResponse(TOPICS)).mockResolvedValueOnce(okResponse(POLLS))
const feed = await assembleFeed([1], [])
expect(feed[0].topics).toEqual([{ label: "Umwelt", url: "https://www.abgeordnetenwatch.de/themen-dip21/umwelt" }])
expect(feed[0].url).toBe("https://www.abgeordnetenwatch.de/bundestag/21/abstimmungen/klimaschutzgesetz")
})
it("fetches polls for followed politicians", async () => {
mockFetch
.mockResolvedValueOnce(okResponse(TOPICS))
.mockResolvedValueOnce(okResponse(POLLS))
.mockResolvedValueOnce(okResponse([{ id: 500 }]))
.mockResolvedValueOnce(okResponse([{ id: 900, poll: { id: 100 } }]))
.mockResolvedValueOnce(
okResponse([
{
id: 100,
label: "Klimaschutzgesetz",
field_poll_date: "2024-06-15",
field_topics: [{ id: 1 }],
},
]),
)
const feed = await assembleFeed([], [42])
expect(feed).toHaveLength(1)
expect(feed[0].title).toBe("Klimaschutzgesetz")
})
it("deduplicates polls appearing from both topics and politicians", async () => {
mockFetch
.mockResolvedValueOnce(okResponse(TOPICS))
.mockResolvedValueOnce(okResponse(POLLS))
.mockResolvedValueOnce(okResponse([{ id: 500 }]))
.mockResolvedValueOnce(okResponse([{ id: 900, poll: { id: 100 } }]))
.mockResolvedValueOnce(
okResponse([
{
id: 100,
label: "Klimaschutzgesetz",
field_poll_date: "2024-06-15",
field_topics: [{ id: 1 }],
},
]),
)
const feed = await assembleFeed([1], [42])
const ids = feed.map((f) => f.id)
const unique = new Set(ids)
expect(ids.length).toBe(unique.size)
})
})
+83
View File
@@ -0,0 +1,83 @@
import {
type Poll,
fetchCandidacyMandates,
fetchPolls,
fetchPollsByIds,
fetchTopics,
fetchVotes,
} from "@/shared/lib/aw-api"
export interface FeedTopicRef {
label: string
url: string | null
}
export interface FeedItem {
id: string
kind: "poll"
status: "upcoming" | "past"
title: string
url: string | null
date: string | null
topics: FeedTopicRef[]
source: string
}
function classifyPoll(poll: Poll): "upcoming" | "past" {
// field_accepted is only present on completed votes
if (poll.field_accepted != null) return "past"
if (!poll.field_poll_date) return "upcoming"
const today = new Date().toISOString().slice(0, 10)
return poll.field_poll_date > today ? "upcoming" : "past"
}
export async function assembleFeed(followedTopicIDs: number[], followedPoliticianIDs: number[]): Promise<FeedItem[]> {
const [topics, polls] = await Promise.all([fetchTopics(), fetchPolls(150)])
const topicMap = new Map(topics.map((t) => [t.id, t.label]))
const topicSet = new Set(followedTopicIDs)
const filteredByTopics = topicSet.size > 0 ? polls.filter((p) => p.field_topics.some((t) => topicSet.has(t.id))) : []
const politicianPolls = await fetchPollsForPoliticians(followedPoliticianIDs)
const combined = new Map<number, Poll>()
for (const p of [...filteredByTopics, ...politicianPolls]) combined.set(p.id, p)
const items: FeedItem[] = Array.from(combined.values()).map((poll) => ({
id: `poll-${poll.id}`,
kind: "poll",
status: classifyPoll(poll),
title: poll.label,
url: poll.abgeordnetenwatch_url ?? null,
date: poll.field_poll_date,
topics: poll.field_topics.flatMap((t) => {
const label = t.label ?? topicMap.get(t.id)
return label ? [{ label, url: t.abgeordnetenwatch_url ?? null }] : []
}),
source: "Bundestag",
}))
return items.sort((a, b) => {
if (a.date && b.date) return b.date.localeCompare(a.date)
if (!a.date && b.date) return 1
if (a.date && !b.date) return -1
return a.title.localeCompare(b.title)
})
}
async function fetchPollsForPoliticians(politicianIDs: number[]): Promise<Poll[]> {
if (politicianIDs.length === 0) return []
const mandateResults = await Promise.all(politicianIDs.map((pid) => fetchCandidacyMandates(pid)))
const mandateIDs = mandateResults.flatMap((mandates) => mandates.slice(0, 3).map((m) => m.id))
const voteResults = await Promise.all(mandateIDs.map((mid) => fetchVotes(mid)))
const pollIDSet = new Set<number>()
for (const votes of voteResults) {
for (const v of votes) {
if (v.poll?.id != null) pollIDSet.add(v.poll.id)
}
}
return fetchPollsByIds(Array.from(pollIDSet))
}
+71
View File
@@ -0,0 +1,71 @@
import type { FeedItem } from "./assemble-feed"
import { clearFeedCache, loadFeedCache, mergeFeedItems, saveFeedCache } from "./feed-cache"
function makeItem(id: string, date: string | null = "2025-01-15", title = `Poll ${id}`): FeedItem {
return { id, kind: "poll", status: "past", title, url: null, date, topics: [], source: "Bundestag" }
}
beforeEach(() => localStorage.clear())
describe("feed cache persistence", () => {
it("returns null when no cache exists", () => {
expect(loadFeedCache()).toBeNull()
})
it("round-trips save and load", () => {
const items = [makeItem("poll-1"), makeItem("poll-2")]
saveFeedCache(items)
const loaded = loadFeedCache()
expect(loaded).not.toBeNull()
expect(loaded?.items).toEqual(items)
expect(typeof loaded?.updatedAt).toBe("number")
})
it("clears cache", () => {
saveFeedCache([makeItem("poll-1")])
clearFeedCache()
expect(loadFeedCache()).toBeNull()
})
it("returns null for corrupted data", () => {
localStorage.setItem("agw_feed_cache", "not-json")
expect(loadFeedCache()).toBeNull()
})
it("returns null for missing items array", () => {
localStorage.setItem("agw_feed_cache", JSON.stringify({ updatedAt: 123 }))
expect(loadFeedCache()).toBeNull()
})
})
describe("mergeFeedItems", () => {
it("keeps old items and adds new ones", () => {
const cached = [makeItem("poll-1", "2025-01-10"), makeItem("poll-2", "2025-01-11")]
const fresh = [makeItem("poll-3", "2025-01-12")]
const merged = mergeFeedItems(cached, fresh)
expect(merged).toHaveLength(3)
expect(merged.map((i) => i.id)).toContain("poll-1")
expect(merged.map((i) => i.id)).toContain("poll-3")
})
it("deduplicates by id, preferring fresh", () => {
const cached = [makeItem("poll-1", "2025-01-10", "Old Title")]
const fresh = [makeItem("poll-1", "2025-01-10", "New Title")]
const merged = mergeFeedItems(cached, fresh)
expect(merged).toHaveLength(1)
expect(merged[0].title).toBe("New Title")
})
it("sorts by date descending", () => {
const cached = [makeItem("poll-1", "2025-01-01")]
const fresh = [makeItem("poll-2", "2025-01-15"), makeItem("poll-3", "2025-01-10")]
const merged = mergeFeedItems(cached, fresh)
expect(merged.map((i) => i.date)).toEqual(["2025-01-15", "2025-01-10", "2025-01-01"])
})
it("sorts null dates after dated items", () => {
const items = mergeFeedItems([makeItem("poll-1", null, "Zebra")], [makeItem("poll-2", "2025-01-01")])
expect(items[0].id).toBe("poll-2")
expect(items[1].id).toBe("poll-1")
})
})
+33
View File
@@ -0,0 +1,33 @@
import { clearCachedFeed, loadCachedFeed, saveCachedFeed } from "@/shared/db/feed-cache-db"
import type { PGlite } from "@electric-sql/pglite"
import type { FeedItem } from "./assemble-feed"
export interface FeedCacheData {
items: FeedItem[]
updatedAt: number
}
export async function loadFeedCache(db: PGlite): Promise<FeedCacheData | null> {
return loadCachedFeed(db)
}
export async function saveFeedCache(db: PGlite, items: FeedItem[]): Promise<void> {
await saveCachedFeed(db, items)
}
export async function clearFeedCache(db: PGlite): Promise<void> {
await clearCachedFeed(db)
}
export function mergeFeedItems(cached: FeedItem[], fresh: FeedItem[]): FeedItem[] {
const map = new Map<string, FeedItem>()
for (const item of cached) map.set(item.id, item)
for (const item of fresh) map.set(item.id, item)
return Array.from(map.values()).sort((a, b) => {
if (a.date && b.date) return b.date.localeCompare(a.date)
if (!a.date && b.date) return 1
if (a.date && !b.date) return -1
return a.title.localeCompare(b.title)
})
}