rework scan page, add ingest-source browsing, bump version to 2026.04.15.8
Some checks failed
Build and Push Docker Image / build (push) Has been cancelled

This commit is contained in:
2026-04-15 18:31:00 +02:00
parent c6698db51a
commit b1a9eeb481
10 changed files with 641 additions and 94 deletions

View File

@@ -0,0 +1,47 @@
# Scan Page Rework Implementation Plan
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
**Goal:** Rework the Scan page to prioritize progress + fresh ingest visibility, and add a scalable filterable/lazy-loaded library table.
**Architecture:** Keep `/api/scan` lightweight for status/progress and compact recent ingest rows. Add `/api/scan/items` for paginated/filterable DB browsing. Update `ScanPage` to render: scan card header count, compact 5-row recent ingest table, then a filterable lazy-loaded library table.
**Tech Stack:** Bun + Hono, React 19 + TanStack Router, bun:test, Biome.
---
### Task 1: Backend scan payload + items endpoint (TDD)
**Files:**
- Modify: `server/api/__tests__/scan.test.ts`
- Modify: `server/db/schema.ts`
- Modify: `server/db/index.ts`
- Modify: `server/services/rescan.ts`
- Modify: `server/api/scan.ts`
- [ ] Add failing tests for scan item query parsing/normalization and SQL filter behavior helpers.
- [ ] Run targeted tests to verify failure.
- [ ] Add `media_items.ingest_source` schema + migration, set value on upsert (`scan`/`webhook`).
- [ ] Extend `GET /api/scan` recent item shape with timestamp + ingest source and clamp to 5 rows.
- [ ] Add `GET /api/scan/items` with filters (`q,status,type,source`) + pagination (`offset,limit`), returning `{ rows,total,hasMore }`.
- [ ] Run targeted and full backend tests.
### Task 2: Scan page UI rework + lazy table
**Files:**
- Modify: `src/features/scan/ScanPage.tsx`
- [ ] Refactor scan box header to show scanned count in top-right.
- [ ] Replace large recent-items table with a compact 5-row recent ingest list directly under progress bar.
- [ ] Add filter controls for library table (`q,status,type,source`) with default “All”.
- [ ] Add lazy loading flow (initial fetch + load more) against `/api/scan/items`.
- [ ] Render new table with useful file metadata columns and consistent truncation/tooltips.
### Task 3: Verification
**Files:**
- Modify: none
- [ ] Run `bun test`.
- [ ] Run `bun run lint` and format if needed.
- [ ] Confirm no regressions in scan start/stop/progress behavior.

View File

@@ -1,6 +1,6 @@
{ {
"name": "netfelix-audio-fix", "name": "netfelix-audio-fix",
"version": "2026.04.15.7", "version": "2026.04.15.8",
"scripts": { "scripts": {
"dev:server": "NODE_ENV=development bun --hot server/index.tsx", "dev:server": "NODE_ENV=development bun --hot server/index.tsx",
"dev:client": "vite", "dev:client": "vite",

View File

@@ -1,5 +1,5 @@
import { describe, expect, test } from "bun:test"; import { describe, expect, test } from "bun:test";
import { extractErrorSummary, shouldSendLiveUpdate, yieldAfterChunk } from "../execute"; import { enqueueUnseenJobs, extractErrorSummary, shouldSendLiveUpdate, yieldAfterChunk } from "../execute";
describe("extractErrorSummary", () => { describe("extractErrorSummary", () => {
test("pulls the real error line out of ffmpeg's banner", () => { test("pulls the real error line out of ffmpeg's banner", () => {
@@ -71,3 +71,15 @@ describe("yieldAfterChunk", () => {
expect(yieldCalls).toBe(1); expect(yieldCalls).toBe(1);
}); });
}); });
describe("enqueueUnseenJobs", () => {
test("appends only unseen job ids to the active queue", () => {
const queue = [{ id: 1 }, { id: 2 }] as { id: number }[];
const seen = new Set([1, 2]);
const added = enqueueUnseenJobs(queue, seen, [{ id: 2 }, { id: 3 }, { id: 4 }] as { id: number }[]);
expect(added).toBe(2);
expect(queue.map((j) => j.id)).toEqual([1, 2, 3, 4]);
expect(seen.has(3)).toBeTrue();
expect(seen.has(4)).toBeTrue();
});
});

View File

@@ -1,5 +1,5 @@
import { describe, expect, test } from "bun:test"; import { describe, expect, test } from "bun:test";
import { parseScanLimit } from "../scan"; import { buildScanItemsWhere, parseScanItemsQuery, parseScanLimit } from "../scan";
describe("parseScanLimit", () => { describe("parseScanLimit", () => {
test("accepts positive integers and nullish/empty as no-limit", () => { test("accepts positive integers and nullish/empty as no-limit", () => {
@@ -29,3 +29,73 @@ describe("parseScanLimit", () => {
expect(parseScanLimit(Number.POSITIVE_INFINITY)).toEqual({ ok: false }); expect(parseScanLimit(Number.POSITIVE_INFINITY)).toEqual({ ok: false });
}); });
}); });
describe("parseScanItemsQuery", () => {
test("normalizes default filters and pagination", () => {
const q = parseScanItemsQuery({});
expect(q).toEqual({
offset: 0,
limit: 50,
search: "",
status: "all",
type: "all",
source: "all",
});
});
test("clamps limit and offset, trims and lowercases values", () => {
const q = parseScanItemsQuery({
offset: "-12",
limit: "5000",
q: " The Wire ",
status: "SCANNED",
type: "EPISODE",
source: "WEBHOOK",
});
expect(q).toEqual({
offset: 0,
limit: 200,
search: "The Wire",
status: "scanned",
type: "episode",
source: "webhook",
});
});
test("falls back to all for unknown enum values", () => {
const q = parseScanItemsQuery({ status: "zzz", type: "cartoon", source: "mqtt" });
expect(q.status).toBe("all");
expect(q.type).toBe("all");
expect(q.source).toBe("all");
});
});
describe("buildScanItemsWhere", () => {
test("builds combined where clause + args in stable order", () => {
const where = buildScanItemsWhere({
offset: 0,
limit: 50,
search: "blade",
status: "scanned",
type: "movie",
source: "webhook",
});
expect(where.sql).toBe(
"WHERE scan_status = ? AND lower(type) = ? AND ingest_source = ? AND (lower(name) LIKE ? OR lower(file_path) LIKE ?)",
);
expect(where.args).toEqual(["scanned", "movie", "webhook", "%blade%", "%blade%"]);
});
test("returns empty where when all filters are broad", () => {
const where = buildScanItemsWhere({
offset: 0,
limit: 50,
search: "",
status: "all",
type: "all",
source: "all",
});
expect(where.sql).toBe("");
expect(where.args).toEqual([]);
});
});

View File

@@ -23,6 +23,8 @@ const app = new Hono();
let queueRunning = false; let queueRunning = false;
let runningProc: ReturnType<typeof Bun.spawn> | null = null; let runningProc: ReturnType<typeof Bun.spawn> | null = null;
let runningJobId: number | null = null; let runningJobId: number | null = null;
let activeQueue: Job[] | null = null;
let activeSeen: Set<number> | null = null;
const LIVE_UPDATE_INTERVAL_MS = 500; const LIVE_UPDATE_INTERVAL_MS = 500;
const STREAM_CHUNKS_BEFORE_YIELD = 24; const STREAM_CHUNKS_BEFORE_YIELD = 24;
@@ -41,6 +43,17 @@ export async function yieldAfterChunk(
return 0; return 0;
} }
export function enqueueUnseenJobs<T extends { id: number }>(queue: T[], seen: Set<number>, jobs: T[]): number {
let added = 0;
for (const job of jobs) {
if (seen.has(job.id)) continue;
queue.push(job);
seen.add(job.id);
added += 1;
}
return added;
}
function emitQueueStatus( function emitQueueStatus(
status: "running" | "paused" | "sleeping" | "idle", status: "running" | "paused" | "sleeping" | "idle",
extra: { until?: string; seconds?: number } = {}, extra: { until?: string; seconds?: number } = {},
@@ -56,6 +69,8 @@ async function runSequential(initial: Job[]): Promise<void> {
let first = true; let first = true;
const queue: Job[] = [...initial]; const queue: Job[] = [...initial];
const seen = new Set<number>(queue.map((j) => j.id)); const seen = new Set<number>(queue.map((j) => j.id));
activeQueue = queue;
activeSeen = seen;
while (queue.length > 0) { while (queue.length > 0) {
const job = queue.shift() as Job; const job = queue.shift() as Job;
@@ -99,15 +114,12 @@ async function runSequential(initial: Job[]): Promise<void> {
// manually clicks "Run all" again. // manually clicks "Run all" again.
if (queue.length === 0) { if (queue.length === 0) {
const more = db.prepare("SELECT * FROM jobs WHERE status = 'pending' ORDER BY created_at").all() as Job[]; const more = db.prepare("SELECT * FROM jobs WHERE status = 'pending' ORDER BY created_at").all() as Job[];
for (const m of more) { enqueueUnseenJobs(queue, seen, more);
if (!seen.has(m.id)) {
queue.push(m);
seen.add(m.id);
}
}
} }
} }
} finally { } finally {
activeQueue = null;
activeSeen = null;
queueRunning = false; queueRunning = false;
emitQueueStatus("idle"); emitQueueStatus("idle");
} }
@@ -178,8 +190,12 @@ function loadJobRow(jobId: number) {
app.post("/start", (c) => { app.post("/start", (c) => {
const db = getDb(); const db = getDb();
const pending = db.prepare("SELECT * FROM jobs WHERE status = 'pending' ORDER BY created_at").all() as Job[]; const pending = db.prepare("SELECT * FROM jobs WHERE status = 'pending' ORDER BY created_at").all() as Job[];
if (queueRunning && activeQueue && activeSeen) {
const queued = enqueueUnseenJobs(activeQueue, activeSeen, pending);
return c.json({ ok: true, started: 0, queued });
}
runSequential(pending).catch((err) => logError("Queue failed:", err)); runSequential(pending).catch((err) => logError("Queue failed:", err));
return c.json({ ok: true, started: pending.length }); return c.json({ ok: true, started: pending.length, queued: pending.length });
}); });
// ─── Run single ─────────────────────────────────────────────────────────────── // ─── Run single ───────────────────────────────────────────────────────────────

View File

@@ -23,6 +23,78 @@ export function parseScanLimit(raw: unknown): { ok: true; value: number | null }
return { ok: true, value: n }; return { ok: true, value: n };
} }
type ScanStatusFilter = "all" | "pending" | "scanned" | "error";
type ScanTypeFilter = "all" | "movie" | "episode";
type ScanSourceFilter = "all" | "scan" | "webhook";
export interface ScanItemsQuery {
offset: number;
limit: number;
search: string;
status: ScanStatusFilter;
type: ScanTypeFilter;
source: ScanSourceFilter;
}
function parsePositiveInt(raw: unknown, fallback: number): number {
const n = typeof raw === "number" ? raw : Number(raw);
if (!Number.isFinite(n)) return fallback;
if (!Number.isInteger(n)) return fallback;
return n;
}
function clamp(n: number, min: number, max: number): number {
if (n < min) return min;
if (n > max) return max;
return n;
}
function parseOneOf<T extends readonly string[]>(raw: unknown, allowed: T, fallback: T[number]): T[number] {
if (typeof raw !== "string") return fallback;
const lowered = raw.toLowerCase();
return (allowed as readonly string[]).includes(lowered) ? (lowered as T[number]) : fallback;
}
export function parseScanItemsQuery(raw: Record<string, unknown>): ScanItemsQuery {
const limit = clamp(parsePositiveInt(raw.limit, 50), 1, 200);
const offset = Math.max(0, parsePositiveInt(raw.offset, 0));
const search = typeof raw.q === "string" ? raw.q.trim() : "";
return {
offset,
limit,
search,
status: parseOneOf(raw.status, ["all", "pending", "scanned", "error"] as const, "all"),
type: parseOneOf(raw.type, ["all", "movie", "episode"] as const, "all"),
source: parseOneOf(raw.source, ["all", "scan", "webhook"] as const, "all"),
};
}
export function buildScanItemsWhere(query: ScanItemsQuery): { sql: string; args: unknown[] } {
const clauses: string[] = [];
const args: unknown[] = [];
if (query.status !== "all") {
clauses.push("scan_status = ?");
args.push(query.status);
}
if (query.type !== "all") {
clauses.push("lower(type) = ?");
args.push(query.type);
}
if (query.source !== "all") {
clauses.push("ingest_source = ?");
args.push(query.source);
}
if (query.search.length > 0) {
clauses.push("(lower(name) LIKE ? OR lower(file_path) LIKE ?)");
const needle = `%${query.search.toLowerCase()}%`;
args.push(needle, needle);
}
return {
sql: clauses.length > 0 ? `WHERE ${clauses.join(" AND ")}` : "",
args,
};
}
// ─── State ──────────────────────────────────────────────────────────────────── // ─── State ────────────────────────────────────────────────────────────────────
let scanAbort: AbortController | null = null; let scanAbort: AbortController | null = null;
@@ -60,12 +132,65 @@ app.get("/", (c) => {
const errors = (db.prepare("SELECT COUNT(*) as n FROM media_items WHERE scan_status = 'error'").get() as { n: number }) const errors = (db.prepare("SELECT COUNT(*) as n FROM media_items WHERE scan_status = 'error'").get() as { n: number })
.n; .n;
const recentItems = db const recentItems = db
.prepare("SELECT name, type, scan_status, file_path FROM media_items ORDER BY last_scanned_at DESC LIMIT 50") .prepare(
.all() as { name: string; type: string; scan_status: string; file_path: string }[]; "SELECT name, type, scan_status, file_path, last_scanned_at, ingest_source FROM media_items ORDER BY COALESCE(last_scanned_at, created_at) DESC, id DESC LIMIT 5",
)
.all() as {
name: string;
type: string;
scan_status: string;
file_path: string;
last_scanned_at: string | null;
ingest_source: string | null;
}[];
return c.json({ running, progress: { scanned, total, errors }, recentItems, scanLimit: currentScanLimit() }); return c.json({ running, progress: { scanned, total, errors }, recentItems, scanLimit: currentScanLimit() });
}); });
app.get("/items", (c) => {
const db = getDb();
const query = parseScanItemsQuery({
offset: c.req.query("offset"),
limit: c.req.query("limit"),
q: c.req.query("q"),
status: c.req.query("status"),
type: c.req.query("type"),
source: c.req.query("source"),
});
const where = buildScanItemsWhere(query);
const rows = db
.prepare(
`
SELECT id, jellyfin_id, name, type, series_name, season_number, episode_number,
scan_status, original_language, orig_lang_source, container, file_size, file_path,
last_scanned_at, ingest_source
FROM media_items
${where.sql}
ORDER BY COALESCE(last_scanned_at, created_at) DESC, id DESC
LIMIT ? OFFSET ?
`,
)
.all(...where.args, query.limit, query.offset) as Array<{
id: number;
jellyfin_id: string;
name: string;
type: string;
series_name: string | null;
season_number: number | null;
episode_number: number | null;
scan_status: string;
original_language: string | null;
orig_lang_source: string | null;
container: string | null;
file_size: number | null;
file_path: string;
last_scanned_at: string | null;
ingest_source: string | null;
}>;
const total = (db.prepare(`SELECT COUNT(*) as n FROM media_items ${where.sql}`).get(...where.args) as { n: number }).n;
return c.json({ rows, total, hasMore: query.offset + rows.length < total, query });
});
// ─── Start ──────────────────────────────────────────────────────────────────── // ─── Start ────────────────────────────────────────────────────────────────────
app.post("/start", async (c) => { app.post("/start", async (c) => {

View File

@@ -79,6 +79,7 @@ function migrate(db: Database): void {
// RENAME COLUMN preserves values; both alters are no-ops on fresh DBs. // RENAME COLUMN preserves values; both alters are no-ops on fresh DBs.
alter("ALTER TABLE review_plans RENAME COLUMN webhook_verified TO verified"); alter("ALTER TABLE review_plans RENAME COLUMN webhook_verified TO verified");
alter("ALTER TABLE review_plans DROP COLUMN verified"); alter("ALTER TABLE review_plans DROP COLUMN verified");
alter("ALTER TABLE media_items ADD COLUMN ingest_source TEXT NOT NULL DEFAULT 'scan'");
} }
function seedDefaults(db: Database): void { function seedDefaults(db: Database): void {

View File

@@ -31,12 +31,13 @@ CREATE TABLE IF NOT EXISTS media_items (
tvdb_id TEXT, tvdb_id TEXT,
jellyfin_raw TEXT, jellyfin_raw TEXT,
external_raw TEXT, external_raw TEXT,
scan_status TEXT NOT NULL DEFAULT 'pending', scan_status TEXT NOT NULL DEFAULT 'pending',
scan_error TEXT, scan_error TEXT,
last_scanned_at TEXT, last_scanned_at TEXT,
last_executed_at TEXT, ingest_source TEXT NOT NULL DEFAULT 'scan',
created_at TEXT NOT NULL DEFAULT (datetime('now')) last_executed_at TEXT,
); created_at TEXT NOT NULL DEFAULT (datetime('now'))
);
CREATE TABLE IF NOT EXISTS media_streams ( CREATE TABLE IF NOT EXISTS media_streams (
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,

View File

@@ -133,11 +133,11 @@ export async function upsertJellyfinItem(
season_number, episode_number, year, file_path, file_size, container, season_number, episode_number, year, file_path, file_size, container,
runtime_ticks, date_last_refreshed, runtime_ticks, date_last_refreshed,
original_language, orig_lang_source, needs_review, original_language, orig_lang_source, needs_review,
imdb_id, tmdb_id, tvdb_id, imdb_id, tmdb_id, tvdb_id,
jellyfin_raw, external_raw, jellyfin_raw, external_raw,
scan_status, last_scanned_at${opts.executed ? ", last_executed_at" : ""} scan_status, last_scanned_at, ingest_source${opts.executed ? ", last_executed_at" : ""}
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'scanned', datetime('now')${opts.executed ? ", datetime('now')" : ""}) ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'scanned', datetime('now'), ?${opts.executed ? ", datetime('now')" : ""})
ON CONFLICT(jellyfin_id) DO UPDATE SET ON CONFLICT(jellyfin_id) DO UPDATE SET
type = excluded.type, name = excluded.name, original_title = excluded.original_title, type = excluded.type, name = excluded.name, original_title = excluded.original_title,
series_name = excluded.series_name, series_jellyfin_id = excluded.series_jellyfin_id, series_name = excluded.series_name, series_jellyfin_id = excluded.series_jellyfin_id,
season_number = excluded.season_number, episode_number = excluded.episode_number, season_number = excluded.season_number, episode_number = excluded.episode_number,
@@ -145,12 +145,13 @@ export async function upsertJellyfinItem(
file_size = excluded.file_size, container = excluded.container, file_size = excluded.file_size, container = excluded.container,
runtime_ticks = excluded.runtime_ticks, date_last_refreshed = excluded.date_last_refreshed, runtime_ticks = excluded.runtime_ticks, date_last_refreshed = excluded.date_last_refreshed,
original_language = excluded.original_language, orig_lang_source = excluded.orig_lang_source, original_language = excluded.original_language, orig_lang_source = excluded.orig_lang_source,
needs_review = excluded.needs_review, imdb_id = excluded.imdb_id, needs_review = excluded.needs_review, imdb_id = excluded.imdb_id,
tmdb_id = excluded.tmdb_id, tvdb_id = excluded.tvdb_id, tmdb_id = excluded.tmdb_id, tvdb_id = excluded.tvdb_id,
jellyfin_raw = excluded.jellyfin_raw, external_raw = excluded.external_raw, jellyfin_raw = excluded.jellyfin_raw, external_raw = excluded.external_raw,
scan_status = 'scanned', last_scanned_at = datetime('now') scan_status = 'scanned', last_scanned_at = datetime('now'),
${opts.executed ? ", last_executed_at = datetime('now')" : ""} ingest_source = excluded.ingest_source
`); ${opts.executed ? ", last_executed_at = datetime('now')" : ""}
`);
upsertItem.run( upsertItem.run(
jellyfinItem.Id, jellyfinItem.Id,
jellyfinItem.Type === "Episode" ? "Episode" : "Movie", jellyfinItem.Type === "Episode" ? "Episode" : "Movie",
@@ -174,6 +175,7 @@ export async function upsertJellyfinItem(
tvdbId, tvdbId,
jellyfinRaw, jellyfinRaw,
externalRawJson, externalRawJson,
source,
); );
const itemRow = db.prepare("SELECT id FROM media_items WHERE jellyfin_id = ?").get(jellyfinItem.Id) as { const itemRow = db.prepare("SELECT id FROM media_items WHERE jellyfin_id = ?").get(jellyfinItem.Id) as {

View File

@@ -10,10 +10,41 @@ import { formatThousands } from "~/shared/lib/utils";
interface ScanStatus { interface ScanStatus {
running: boolean; running: boolean;
progress: { scanned: number; total: number; errors: number }; progress: { scanned: number; total: number; errors: number };
recentItems: { name: string; type: string; scan_status: string; file_path: string }[]; recentItems: {
name: string;
type: string;
scan_status: string;
file_path: string;
last_scanned_at: string | null;
ingest_source: "scan" | "webhook" | null;
}[];
scanLimit: number | null; scanLimit: number | null;
} }
interface ScanItemsRow {
id: number;
jellyfin_id: string;
name: string;
type: "Movie" | "Episode";
series_name: string | null;
season_number: number | null;
episode_number: number | null;
scan_status: string;
original_language: string | null;
orig_lang_source: string | null;
container: string | null;
file_size: number | null;
file_path: string;
last_scanned_at: string | null;
ingest_source: "scan" | "webhook" | null;
}
interface ScanItemsResponse {
rows: ScanItemsRow[];
total: number;
hasMore: boolean;
}
interface DashboardStats { interface DashboardStats {
totalItems: number; totalItems: number;
scanned: number; scanned: number;
@@ -47,6 +78,22 @@ interface LogEntry {
file?: string; file?: string;
} }
interface RecentIngestRow {
name: string;
type: string;
status: string;
file: string;
scannedAt: string | null;
source: "scan" | "webhook" | null;
}
interface ItemFilters {
q: string;
status: "all" | "pending" | "scanned" | "error";
type: "all" | "movie" | "episode";
source: "all" | "scan" | "webhook";
}
// Mutable buffer for SSE data — flushed to React state on an interval // Mutable buffer for SSE data — flushed to React state on an interval
interface SseBuf { interface SseBuf {
scanned: number; scanned: number;
@@ -65,19 +112,54 @@ function freshBuf(): SseBuf {
const FLUSH_MS = 200; const FLUSH_MS = 200;
function statusBadgeVariant(status: string): "pending" | "done" | "error" | "default" {
if (status === "pending") return "pending";
if (status === "done" || status === "scanned") return "done";
if (status === "error") return "error";
return "default";
}
function formatScannedAt(ts: string | null): string {
if (!ts) return "—";
const d = new Date(ts.includes("T") ? ts : `${ts}Z`);
if (Number.isNaN(d.getTime())) return ts;
return d.toLocaleString([], { year: "numeric", month: "2-digit", day: "2-digit", hour: "2-digit", minute: "2-digit" });
}
function formatFileSize(bytes: number | null): string {
if (!bytes || bytes <= 0) return "—";
if (bytes < 1000) return `${bytes} B`;
if (bytes < 1000 ** 2) return `${(bytes / 1000).toFixed(1)} kB`;
if (bytes < 1000 ** 3) return `${(bytes / 1000 ** 2).toFixed(1)} MB`;
return `${(bytes / 1000 ** 3).toFixed(1)} GB`;
}
function episodeLabel(row: ScanItemsRow): string {
if (row.type !== "Episode") return "—";
const season = row.season_number ?? 0;
const episode = row.episode_number ?? 0;
return `S${String(season).padStart(2, "0")}E${String(episode).padStart(2, "0")}`;
}
export function ScanPage() { export function ScanPage() {
const navigate = useNavigate(); const navigate = useNavigate();
const [status, setStatus] = useState<ScanStatus | null>(null); const [status, setStatus] = useState<ScanStatus | null>(null);
const [stats, setStats] = useState<DashboardStats | null>(null); const [stats, setStats] = useState<DashboardStats | null>(null);
const [configChecked, setConfigChecked] = useState(false); const [configChecked, setConfigChecked] = useState(false);
const [limit, setLimit] = useState(""); const [limit, setLimit] = useState("");
const [log, setLog] = useState<LogEntry[]>([]); const [recentIngest, setRecentIngest] = useState<RecentIngestRow[]>([]);
const [statusLabel, setStatusLabel] = useState(""); const [statusLabel, setStatusLabel] = useState("");
const [scanComplete, setScanComplete] = useState(false); const [scanComplete, setScanComplete] = useState(false);
const [currentItem, setCurrentItem] = useState(""); const [currentItem, setCurrentItem] = useState("");
const [progressScanned, setProgressScanned] = useState(0); const [progressScanned, setProgressScanned] = useState(0);
const [progressTotal, setProgressTotal] = useState(0); const [progressTotal, setProgressTotal] = useState(0);
const [errors, setErrors] = useState(0); const [errors, setErrors] = useState(0);
const [filters, setFilters] = useState<ItemFilters>({ q: "", status: "all", type: "all", source: "all" });
const [itemsRows, setItemsRows] = useState<ScanItemsRow[]>([]);
const [itemsOffset, setItemsOffset] = useState(0);
const [itemsHasMore, setItemsHasMore] = useState(false);
const [itemsTotal, setItemsTotal] = useState(0);
const [itemsLoading, setItemsLoading] = useState(false);
const esRef = useRef<EventSource | null>(null); const esRef = useRef<EventSource | null>(null);
const bufRef = useRef<SseBuf>(freshBuf()); const bufRef = useRef<SseBuf>(freshBuf());
const timerRef = useRef<ReturnType<typeof setInterval> | null>(null); const timerRef = useRef<ReturnType<typeof setInterval> | null>(null);
@@ -122,7 +204,19 @@ export function ScanPage() {
setCurrentItem(b.currentItem); setCurrentItem(b.currentItem);
if (b.newLogs.length > 0) { if (b.newLogs.length > 0) {
const batch = b.newLogs.splice(0); const batch = b.newLogs.splice(0);
setLog((prev) => [...batch.reverse(), ...prev].slice(0, 100)); setRecentIngest((prev) =>
[
...batch.map((item) => ({
name: item.name,
type: item.type,
status: item.status,
file: item.file ?? item.name,
scannedAt: new Date().toISOString(),
source: "scan" as const,
})),
...prev,
].slice(0, 5),
);
} }
b.dirty = false; b.dirty = false;
} }
@@ -172,13 +266,55 @@ export function ScanPage() {
setErrors(s.progress.errors); setErrors(s.progress.errors);
setStatusLabel(s.running ? "Scan in progress…" : "Scan idle"); setStatusLabel(s.running ? "Scan in progress…" : "Scan idle");
if (s.scanLimit != null) setLimit(String(s.scanLimit)); if (s.scanLimit != null) setLimit(String(s.scanLimit));
setLog(s.recentItems.map((i) => ({ name: i.name, type: i.type, status: i.scan_status, file: i.file_path }))); setRecentIngest(
s.recentItems.map((i) => ({
name: i.name,
type: i.type,
status: i.scan_status,
file: i.file_path,
scannedAt: i.last_scanned_at,
source: i.ingest_source,
})),
);
}, []); }, []);
useEffect(() => { useEffect(() => {
load(); load();
}, [load]); }, [load]);
const fetchItems = useCallback(
async (offset: number, append: boolean) => {
setItemsLoading(true);
try {
const qs = new URLSearchParams({
offset: String(offset),
limit: "50",
q: filters.q,
status: filters.status,
type: filters.type,
source: filters.source,
});
const res = await api.get<ScanItemsResponse>(`/api/scan/items?${qs.toString()}`);
setItemsRows((prev) => (append ? [...prev, ...res.rows] : res.rows));
setItemsOffset(offset + res.rows.length);
setItemsHasMore(res.hasMore);
setItemsTotal(res.total);
} finally {
setItemsLoading(false);
}
},
[filters],
);
useEffect(() => {
fetchItems(0, false);
}, [fetchItems]);
useEffect(() => {
if (!scanComplete) return;
fetchItems(0, false);
}, [scanComplete, fetchItems]);
const connectSse = useCallback(() => { const connectSse = useCallback(() => {
esRef.current?.close(); esRef.current?.close();
const buf = bufRef.current; const buf = bufRef.current;
@@ -229,7 +365,7 @@ export function ScanPage() {
}, [status?.running, connectSse, stopFlushing]); }, [status?.running, connectSse, stopFlushing]);
const startScan = async () => { const startScan = async () => {
setLog([]); setRecentIngest([]);
setProgressScanned(0); setProgressScanned(0);
setProgressTotal(0); setProgressTotal(0);
setErrors(0); setErrors(0);
@@ -284,37 +420,48 @@ export function ScanPage() {
)} )}
<div className="border border-gray-200 rounded-lg px-4 py-3 mb-6"> <div className="border border-gray-200 rounded-lg px-4 py-3 mb-6">
<div className="flex items-center flex-wrap gap-2 mb-3"> <div className="flex items-start justify-between gap-3 mb-3">
<span className="text-sm font-medium">{statusLabel || (running ? "Scan in progress…" : "Scan idle")}</span> <div className="space-y-2 min-w-0">
{scanComplete && ( <div className="flex items-center flex-wrap gap-2">
<Link to="/pipeline" className="text-blue-600 hover:underline text-sm"> <span className="text-sm font-medium">{statusLabel || (running ? "Scan in progress…" : "Scan idle")}</span>
Review in Pipeline {scanComplete && (
</Link> <Link to="/pipeline" className="text-blue-600 hover:underline text-sm">
)} Review in Pipeline
{running ? ( </Link>
<Button variant="secondary" size="sm" onClick={stopScan}> )}
Stop {errors > 0 && <Badge variant="error">{errors} error(s)</Badge>}
</Button>
) : (
<div className="flex items-center gap-2">
<label className="flex items-center gap-1.5 text-xs m-0">
Limit
<input
type="number"
value={limit}
onChange={(e) => setLimit(e.target.value)}
placeholder="all"
min="1"
className="border border-gray-300 rounded px-1.5 py-0.5 text-xs w-16"
/>
items
</label>
<Button size="sm" onClick={startScan}>
Start Scan
</Button>
</div> </div>
)} {running ? (
{errors > 0 && <Badge variant="error">{errors} error(s)</Badge>} <Button variant="secondary" size="sm" onClick={stopScan}>
Stop
</Button>
) : (
<div className="flex items-center gap-2">
<label className="flex items-center gap-1.5 text-xs m-0">
Limit
<input
type="number"
value={limit}
onChange={(e) => setLimit(e.target.value)}
placeholder="all"
min="1"
className="border border-gray-300 rounded px-1.5 py-0.5 text-xs w-16"
/>
items
</label>
<Button size="sm" onClick={startScan}>
Start Scan
</Button>
</div>
)}
</div>
<div className="text-right shrink-0">
<div className="text-sm font-semibold text-gray-700">
{formatThousands(progressScanned)}
{progressTotal > 0 ? ` / ${formatThousands(progressTotal)}` : ""}
</div>
<div className="text-[0.7rem] text-gray-500">scanned</div>
</div>
</div> </div>
{(running || progressScanned > 0) && ( {(running || progressScanned > 0) && (
@@ -325,48 +472,174 @@ export function ScanPage() {
</div> </div>
)} )}
<div className="flex items-center gap-2 text-gray-500 text-xs"> <div className="flex items-center gap-2 text-gray-500 text-xs">
<span> {currentItem && <span className="truncate max-w-2xl text-gray-400">{currentItem}</span>}
{progressScanned}
{progressTotal > 0 ? ` / ${progressTotal}` : ""} scanned
</span>
{currentItem && <span className="truncate max-w-xs text-gray-400">{currentItem}</span>}
</div> </div>
</> </>
)} )}
<div className="mt-3">
<h3 className="font-semibold text-sm mb-2">Recent ingest (5)</h3>
<table className="w-full border-collapse text-[0.78rem]">
<thead>
<tr>
{["Time", "Source", "Type", "File", "Status"].map((h) => (
<th
key={h}
className="text-left text-[0.66rem] font-bold uppercase tracking-[0.05em] text-gray-500 py-1 px-2 border-b border-gray-200 whitespace-nowrap"
>
{h}
</th>
))}
</tr>
</thead>
<tbody>
{recentIngest.length === 0 && (
<tr>
<td colSpan={5} className="py-2 px-2 text-gray-400">
No ingested items yet.
</td>
</tr>
)}
{recentIngest.map((item, i) => {
const fileName = item.file.split("/").pop() ?? item.name;
return (
<tr key={`${item.file}-${i}`} className="hover:bg-gray-50">
<td className="py-1.5 px-2 border-b border-gray-100 whitespace-nowrap">{formatScannedAt(item.scannedAt)}</td>
<td className="py-1.5 px-2 border-b border-gray-100">
<Badge variant="default">{item.source ?? "scan"}</Badge>
</td>
<td className="py-1.5 px-2 border-b border-gray-100">{item.type}</td>
<td className="py-1.5 px-2 border-b border-gray-100 truncate max-w-96" title={item.file}>
{fileName}
</td>
<td className="py-1.5 px-2 border-b border-gray-100">
<Badge variant={statusBadgeVariant(item.status)}>{item.status}</Badge>
</td>
</tr>
);
})}
</tbody>
</table>
</div>
</div> </div>
{/* Log */} <div className="mb-2 flex items-end justify-between gap-3">
<h3 className="font-semibold text-sm mb-2">Recent items</h3> <h3 className="font-semibold text-sm">Library items</h3>
<table className="w-full border-collapse text-[0.82rem]"> <span className="text-xs text-gray-500">{formatThousands(itemsTotal)} total</span>
</div>
<div className="border border-gray-200 rounded-lg p-3 mb-3 flex flex-wrap items-end gap-2">
<label className="text-xs text-gray-600 flex flex-col gap-1">
Search
<input
type="text"
value={filters.q}
onChange={(e) => setFilters((prev) => ({ ...prev, q: e.target.value }))}
placeholder="Name or path"
className="border border-gray-300 rounded px-2 py-1 text-xs w-56"
/>
</label>
<label className="text-xs text-gray-600 flex flex-col gap-1">
Status
<select
value={filters.status}
onChange={(e) => setFilters((prev) => ({ ...prev, status: e.target.value as ItemFilters["status"] }))}
className="border border-gray-300 rounded px-2 py-1 text-xs"
>
<option value="all">All</option>
<option value="scanned">Scanned</option>
<option value="pending">Pending</option>
<option value="error">Error</option>
</select>
</label>
<label className="text-xs text-gray-600 flex flex-col gap-1">
Type
<select
value={filters.type}
onChange={(e) => setFilters((prev) => ({ ...prev, type: e.target.value as ItemFilters["type"] }))}
className="border border-gray-300 rounded px-2 py-1 text-xs"
>
<option value="all">All</option>
<option value="movie">Movie</option>
<option value="episode">Episode</option>
</select>
</label>
<label className="text-xs text-gray-600 flex flex-col gap-1">
Source
<select
value={filters.source}
onChange={(e) => setFilters((prev) => ({ ...prev, source: e.target.value as ItemFilters["source"] }))}
className="border border-gray-300 rounded px-2 py-1 text-xs"
>
<option value="all">All</option>
<option value="scan">Scan</option>
<option value="webhook">Webhook</option>
</select>
</label>
</div>
<table className="w-full border-collapse text-[0.8rem]">
<thead> <thead>
<tr> <tr>
{["Type", "File", "Status"].map((h) => ( {["Scanned", "Name", "Type", "Series / Ep", "Language", "Container", "Size", "Source", "Status", "Path"].map(
<th (h) => (
key={h} <th
className="text-left text-[0.68rem] font-bold uppercase tracking-[0.06em] text-gray-500 py-1 px-2 border-b-2 border-gray-200 whitespace-nowrap" key={h}
> className="text-left text-[0.66rem] font-bold uppercase tracking-[0.05em] text-gray-500 py-1 px-2 border-b border-gray-200 whitespace-nowrap"
{h} >
</th> {h}
))} </th>
),
)}
</tr> </tr>
</thead> </thead>
<tbody> <tbody>
{log.map((item, i) => { {itemsRows.length === 0 && !itemsLoading && (
const fileName = item.file ? (item.file.split("/").pop() ?? item.name) : item.name; <tr>
return ( <td colSpan={10} className="py-3 px-2 text-gray-400">
<tr key={i} className="hover:bg-gray-50"> No items match the current filters.
<td className="py-1.5 px-2 border-b border-gray-100">{item.type}</td> </td>
<td className="py-1.5 px-2 border-b border-gray-100" title={item.file ?? item.name}> </tr>
{fileName} )}
</td> {itemsRows.map((row) => (
<td className="py-1.5 px-2 border-b border-gray-100"> <tr key={row.id} className="hover:bg-gray-50">
<Badge variant={item.status as "error" | "done" | "pending"}>{item.status}</Badge> <td className="py-1.5 px-2 border-b border-gray-100 whitespace-nowrap">
</td> {formatScannedAt(row.last_scanned_at)}
</tr> </td>
); <td className="py-1.5 px-2 border-b border-gray-100">{row.name}</td>
})} <td className="py-1.5 px-2 border-b border-gray-100">{row.type}</td>
<td className="py-1.5 px-2 border-b border-gray-100">
<div>{row.series_name ?? "—"}</div>
<div className="text-[0.68rem] text-gray-500">{episodeLabel(row)}</div>
</td>
<td className="py-1.5 px-2 border-b border-gray-100">
<div>{row.original_language ?? "—"}</div>
<div className="text-[0.68rem] text-gray-500">{row.orig_lang_source ?? "—"}</div>
</td>
<td className="py-1.5 px-2 border-b border-gray-100">{row.container ?? "—"}</td>
<td className="py-1.5 px-2 border-b border-gray-100 whitespace-nowrap">{formatFileSize(row.file_size)}</td>
<td className="py-1.5 px-2 border-b border-gray-100">
<Badge variant="default">{row.ingest_source ?? "scan"}</Badge>
</td>
<td className="py-1.5 px-2 border-b border-gray-100">
<Badge variant={statusBadgeVariant(row.scan_status)}>{row.scan_status}</Badge>
</td>
<td className="py-1.5 px-2 border-b border-gray-100 truncate max-w-xs" title={row.file_path}>
{row.file_path}
</td>
</tr>
))}
</tbody> </tbody>
</table> </table>
<div className="mt-3 flex items-center gap-2">
{itemsHasMore && (
<Button size="sm" variant="secondary" onClick={() => fetchItems(itemsOffset, true)} disabled={itemsLoading}>
{itemsLoading ? "Loading…" : "Load more"}
</Button>
)}
{itemsLoading && !itemsHasMore && <span className="text-xs text-gray-500">Loading</span>}
</div>
</div> </div>
); );
} }