rework scan page, add ingest-source browsing, bump version to 2026.04.15.8
Some checks failed
Build and Push Docker Image / build (push) Has been cancelled
Some checks failed
Build and Push Docker Image / build (push) Has been cancelled
This commit is contained in:
47
docs/superpowers/plans/2026-04-15-scan-page-rework.md
Normal file
47
docs/superpowers/plans/2026-04-15-scan-page-rework.md
Normal file
@@ -0,0 +1,47 @@
|
||||
# Scan Page Rework Implementation Plan
|
||||
|
||||
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||
|
||||
**Goal:** Rework the Scan page to prioritize progress + fresh ingest visibility, and add a scalable filterable/lazy-loaded library table.
|
||||
|
||||
**Architecture:** Keep `/api/scan` lightweight for status/progress and compact recent ingest rows. Add `/api/scan/items` for paginated/filterable DB browsing. Update `ScanPage` to render: scan card header count, compact 5-row recent ingest table, then a filterable lazy-loaded library table.
|
||||
|
||||
**Tech Stack:** Bun + Hono, React 19 + TanStack Router, bun:test, Biome.
|
||||
|
||||
---
|
||||
|
||||
### Task 1: Backend scan payload + items endpoint (TDD)
|
||||
|
||||
**Files:**
|
||||
- Modify: `server/api/__tests__/scan.test.ts`
|
||||
- Modify: `server/db/schema.ts`
|
||||
- Modify: `server/db/index.ts`
|
||||
- Modify: `server/services/rescan.ts`
|
||||
- Modify: `server/api/scan.ts`
|
||||
|
||||
- [ ] Add failing tests for scan item query parsing/normalization and SQL filter behavior helpers.
|
||||
- [ ] Run targeted tests to verify failure.
|
||||
- [ ] Add `media_items.ingest_source` schema + migration, set value on upsert (`scan`/`webhook`).
|
||||
- [ ] Extend `GET /api/scan` recent item shape with timestamp + ingest source and clamp to 5 rows.
|
||||
- [ ] Add `GET /api/scan/items` with filters (`q,status,type,source`) + pagination (`offset,limit`), returning `{ rows,total,hasMore }`.
|
||||
- [ ] Run targeted and full backend tests.
|
||||
|
||||
### Task 2: Scan page UI rework + lazy table
|
||||
|
||||
**Files:**
|
||||
- Modify: `src/features/scan/ScanPage.tsx`
|
||||
|
||||
- [ ] Refactor scan box header to show scanned count in top-right.
|
||||
- [ ] Replace large recent-items table with a compact 5-row recent ingest list directly under progress bar.
|
||||
- [ ] Add filter controls for library table (`q,status,type,source`) with default “All”.
|
||||
- [ ] Add lazy loading flow (initial fetch + load more) against `/api/scan/items`.
|
||||
- [ ] Render new table with useful file metadata columns and consistent truncation/tooltips.
|
||||
|
||||
### Task 3: Verification
|
||||
|
||||
**Files:**
|
||||
- Modify: none
|
||||
|
||||
- [ ] Run `bun test`.
|
||||
- [ ] Run `bun run lint` and format if needed.
|
||||
- [ ] Confirm no regressions in scan start/stop/progress behavior.
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "netfelix-audio-fix",
|
||||
"version": "2026.04.15.7",
|
||||
"version": "2026.04.15.8",
|
||||
"scripts": {
|
||||
"dev:server": "NODE_ENV=development bun --hot server/index.tsx",
|
||||
"dev:client": "vite",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { extractErrorSummary, shouldSendLiveUpdate, yieldAfterChunk } from "../execute";
|
||||
import { enqueueUnseenJobs, extractErrorSummary, shouldSendLiveUpdate, yieldAfterChunk } from "../execute";
|
||||
|
||||
describe("extractErrorSummary", () => {
|
||||
test("pulls the real error line out of ffmpeg's banner", () => {
|
||||
@@ -71,3 +71,15 @@ describe("yieldAfterChunk", () => {
|
||||
expect(yieldCalls).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("enqueueUnseenJobs", () => {
|
||||
test("appends only unseen job ids to the active queue", () => {
|
||||
const queue = [{ id: 1 }, { id: 2 }] as { id: number }[];
|
||||
const seen = new Set([1, 2]);
|
||||
const added = enqueueUnseenJobs(queue, seen, [{ id: 2 }, { id: 3 }, { id: 4 }] as { id: number }[]);
|
||||
expect(added).toBe(2);
|
||||
expect(queue.map((j) => j.id)).toEqual([1, 2, 3, 4]);
|
||||
expect(seen.has(3)).toBeTrue();
|
||||
expect(seen.has(4)).toBeTrue();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { parseScanLimit } from "../scan";
|
||||
import { buildScanItemsWhere, parseScanItemsQuery, parseScanLimit } from "../scan";
|
||||
|
||||
describe("parseScanLimit", () => {
|
||||
test("accepts positive integers and nullish/empty as no-limit", () => {
|
||||
@@ -29,3 +29,73 @@ describe("parseScanLimit", () => {
|
||||
expect(parseScanLimit(Number.POSITIVE_INFINITY)).toEqual({ ok: false });
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseScanItemsQuery", () => {
|
||||
test("normalizes default filters and pagination", () => {
|
||||
const q = parseScanItemsQuery({});
|
||||
expect(q).toEqual({
|
||||
offset: 0,
|
||||
limit: 50,
|
||||
search: "",
|
||||
status: "all",
|
||||
type: "all",
|
||||
source: "all",
|
||||
});
|
||||
});
|
||||
|
||||
test("clamps limit and offset, trims and lowercases values", () => {
|
||||
const q = parseScanItemsQuery({
|
||||
offset: "-12",
|
||||
limit: "5000",
|
||||
q: " The Wire ",
|
||||
status: "SCANNED",
|
||||
type: "EPISODE",
|
||||
source: "WEBHOOK",
|
||||
});
|
||||
expect(q).toEqual({
|
||||
offset: 0,
|
||||
limit: 200,
|
||||
search: "The Wire",
|
||||
status: "scanned",
|
||||
type: "episode",
|
||||
source: "webhook",
|
||||
});
|
||||
});
|
||||
|
||||
test("falls back to all for unknown enum values", () => {
|
||||
const q = parseScanItemsQuery({ status: "zzz", type: "cartoon", source: "mqtt" });
|
||||
expect(q.status).toBe("all");
|
||||
expect(q.type).toBe("all");
|
||||
expect(q.source).toBe("all");
|
||||
});
|
||||
});
|
||||
|
||||
describe("buildScanItemsWhere", () => {
|
||||
test("builds combined where clause + args in stable order", () => {
|
||||
const where = buildScanItemsWhere({
|
||||
offset: 0,
|
||||
limit: 50,
|
||||
search: "blade",
|
||||
status: "scanned",
|
||||
type: "movie",
|
||||
source: "webhook",
|
||||
});
|
||||
expect(where.sql).toBe(
|
||||
"WHERE scan_status = ? AND lower(type) = ? AND ingest_source = ? AND (lower(name) LIKE ? OR lower(file_path) LIKE ?)",
|
||||
);
|
||||
expect(where.args).toEqual(["scanned", "movie", "webhook", "%blade%", "%blade%"]);
|
||||
});
|
||||
|
||||
test("returns empty where when all filters are broad", () => {
|
||||
const where = buildScanItemsWhere({
|
||||
offset: 0,
|
||||
limit: 50,
|
||||
search: "",
|
||||
status: "all",
|
||||
type: "all",
|
||||
source: "all",
|
||||
});
|
||||
expect(where.sql).toBe("");
|
||||
expect(where.args).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -23,6 +23,8 @@ const app = new Hono();
|
||||
let queueRunning = false;
|
||||
let runningProc: ReturnType<typeof Bun.spawn> | null = null;
|
||||
let runningJobId: number | null = null;
|
||||
let activeQueue: Job[] | null = null;
|
||||
let activeSeen: Set<number> | null = null;
|
||||
const LIVE_UPDATE_INTERVAL_MS = 500;
|
||||
const STREAM_CHUNKS_BEFORE_YIELD = 24;
|
||||
|
||||
@@ -41,6 +43,17 @@ export async function yieldAfterChunk(
|
||||
return 0;
|
||||
}
|
||||
|
||||
export function enqueueUnseenJobs<T extends { id: number }>(queue: T[], seen: Set<number>, jobs: T[]): number {
|
||||
let added = 0;
|
||||
for (const job of jobs) {
|
||||
if (seen.has(job.id)) continue;
|
||||
queue.push(job);
|
||||
seen.add(job.id);
|
||||
added += 1;
|
||||
}
|
||||
return added;
|
||||
}
|
||||
|
||||
function emitQueueStatus(
|
||||
status: "running" | "paused" | "sleeping" | "idle",
|
||||
extra: { until?: string; seconds?: number } = {},
|
||||
@@ -56,6 +69,8 @@ async function runSequential(initial: Job[]): Promise<void> {
|
||||
let first = true;
|
||||
const queue: Job[] = [...initial];
|
||||
const seen = new Set<number>(queue.map((j) => j.id));
|
||||
activeQueue = queue;
|
||||
activeSeen = seen;
|
||||
|
||||
while (queue.length > 0) {
|
||||
const job = queue.shift() as Job;
|
||||
@@ -99,15 +114,12 @@ async function runSequential(initial: Job[]): Promise<void> {
|
||||
// manually clicks "Run all" again.
|
||||
if (queue.length === 0) {
|
||||
const more = db.prepare("SELECT * FROM jobs WHERE status = 'pending' ORDER BY created_at").all() as Job[];
|
||||
for (const m of more) {
|
||||
if (!seen.has(m.id)) {
|
||||
queue.push(m);
|
||||
seen.add(m.id);
|
||||
}
|
||||
}
|
||||
enqueueUnseenJobs(queue, seen, more);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
activeQueue = null;
|
||||
activeSeen = null;
|
||||
queueRunning = false;
|
||||
emitQueueStatus("idle");
|
||||
}
|
||||
@@ -178,8 +190,12 @@ function loadJobRow(jobId: number) {
|
||||
app.post("/start", (c) => {
|
||||
const db = getDb();
|
||||
const pending = db.prepare("SELECT * FROM jobs WHERE status = 'pending' ORDER BY created_at").all() as Job[];
|
||||
if (queueRunning && activeQueue && activeSeen) {
|
||||
const queued = enqueueUnseenJobs(activeQueue, activeSeen, pending);
|
||||
return c.json({ ok: true, started: 0, queued });
|
||||
}
|
||||
runSequential(pending).catch((err) => logError("Queue failed:", err));
|
||||
return c.json({ ok: true, started: pending.length });
|
||||
return c.json({ ok: true, started: pending.length, queued: pending.length });
|
||||
});
|
||||
|
||||
// ─── Run single ───────────────────────────────────────────────────────────────
|
||||
|
||||
@@ -23,6 +23,78 @@ export function parseScanLimit(raw: unknown): { ok: true; value: number | null }
|
||||
return { ok: true, value: n };
|
||||
}
|
||||
|
||||
type ScanStatusFilter = "all" | "pending" | "scanned" | "error";
|
||||
type ScanTypeFilter = "all" | "movie" | "episode";
|
||||
type ScanSourceFilter = "all" | "scan" | "webhook";
|
||||
|
||||
export interface ScanItemsQuery {
|
||||
offset: number;
|
||||
limit: number;
|
||||
search: string;
|
||||
status: ScanStatusFilter;
|
||||
type: ScanTypeFilter;
|
||||
source: ScanSourceFilter;
|
||||
}
|
||||
|
||||
function parsePositiveInt(raw: unknown, fallback: number): number {
|
||||
const n = typeof raw === "number" ? raw : Number(raw);
|
||||
if (!Number.isFinite(n)) return fallback;
|
||||
if (!Number.isInteger(n)) return fallback;
|
||||
return n;
|
||||
}
|
||||
|
||||
function clamp(n: number, min: number, max: number): number {
|
||||
if (n < min) return min;
|
||||
if (n > max) return max;
|
||||
return n;
|
||||
}
|
||||
|
||||
function parseOneOf<T extends readonly string[]>(raw: unknown, allowed: T, fallback: T[number]): T[number] {
|
||||
if (typeof raw !== "string") return fallback;
|
||||
const lowered = raw.toLowerCase();
|
||||
return (allowed as readonly string[]).includes(lowered) ? (lowered as T[number]) : fallback;
|
||||
}
|
||||
|
||||
export function parseScanItemsQuery(raw: Record<string, unknown>): ScanItemsQuery {
|
||||
const limit = clamp(parsePositiveInt(raw.limit, 50), 1, 200);
|
||||
const offset = Math.max(0, parsePositiveInt(raw.offset, 0));
|
||||
const search = typeof raw.q === "string" ? raw.q.trim() : "";
|
||||
return {
|
||||
offset,
|
||||
limit,
|
||||
search,
|
||||
status: parseOneOf(raw.status, ["all", "pending", "scanned", "error"] as const, "all"),
|
||||
type: parseOneOf(raw.type, ["all", "movie", "episode"] as const, "all"),
|
||||
source: parseOneOf(raw.source, ["all", "scan", "webhook"] as const, "all"),
|
||||
};
|
||||
}
|
||||
|
||||
export function buildScanItemsWhere(query: ScanItemsQuery): { sql: string; args: unknown[] } {
|
||||
const clauses: string[] = [];
|
||||
const args: unknown[] = [];
|
||||
if (query.status !== "all") {
|
||||
clauses.push("scan_status = ?");
|
||||
args.push(query.status);
|
||||
}
|
||||
if (query.type !== "all") {
|
||||
clauses.push("lower(type) = ?");
|
||||
args.push(query.type);
|
||||
}
|
||||
if (query.source !== "all") {
|
||||
clauses.push("ingest_source = ?");
|
||||
args.push(query.source);
|
||||
}
|
||||
if (query.search.length > 0) {
|
||||
clauses.push("(lower(name) LIKE ? OR lower(file_path) LIKE ?)");
|
||||
const needle = `%${query.search.toLowerCase()}%`;
|
||||
args.push(needle, needle);
|
||||
}
|
||||
return {
|
||||
sql: clauses.length > 0 ? `WHERE ${clauses.join(" AND ")}` : "",
|
||||
args,
|
||||
};
|
||||
}
|
||||
|
||||
// ─── State ────────────────────────────────────────────────────────────────────
|
||||
|
||||
let scanAbort: AbortController | null = null;
|
||||
@@ -60,12 +132,65 @@ app.get("/", (c) => {
|
||||
const errors = (db.prepare("SELECT COUNT(*) as n FROM media_items WHERE scan_status = 'error'").get() as { n: number })
|
||||
.n;
|
||||
const recentItems = db
|
||||
.prepare("SELECT name, type, scan_status, file_path FROM media_items ORDER BY last_scanned_at DESC LIMIT 50")
|
||||
.all() as { name: string; type: string; scan_status: string; file_path: string }[];
|
||||
.prepare(
|
||||
"SELECT name, type, scan_status, file_path, last_scanned_at, ingest_source FROM media_items ORDER BY COALESCE(last_scanned_at, created_at) DESC, id DESC LIMIT 5",
|
||||
)
|
||||
.all() as {
|
||||
name: string;
|
||||
type: string;
|
||||
scan_status: string;
|
||||
file_path: string;
|
||||
last_scanned_at: string | null;
|
||||
ingest_source: string | null;
|
||||
}[];
|
||||
|
||||
return c.json({ running, progress: { scanned, total, errors }, recentItems, scanLimit: currentScanLimit() });
|
||||
});
|
||||
|
||||
app.get("/items", (c) => {
|
||||
const db = getDb();
|
||||
const query = parseScanItemsQuery({
|
||||
offset: c.req.query("offset"),
|
||||
limit: c.req.query("limit"),
|
||||
q: c.req.query("q"),
|
||||
status: c.req.query("status"),
|
||||
type: c.req.query("type"),
|
||||
source: c.req.query("source"),
|
||||
});
|
||||
const where = buildScanItemsWhere(query);
|
||||
const rows = db
|
||||
.prepare(
|
||||
`
|
||||
SELECT id, jellyfin_id, name, type, series_name, season_number, episode_number,
|
||||
scan_status, original_language, orig_lang_source, container, file_size, file_path,
|
||||
last_scanned_at, ingest_source
|
||||
FROM media_items
|
||||
${where.sql}
|
||||
ORDER BY COALESCE(last_scanned_at, created_at) DESC, id DESC
|
||||
LIMIT ? OFFSET ?
|
||||
`,
|
||||
)
|
||||
.all(...where.args, query.limit, query.offset) as Array<{
|
||||
id: number;
|
||||
jellyfin_id: string;
|
||||
name: string;
|
||||
type: string;
|
||||
series_name: string | null;
|
||||
season_number: number | null;
|
||||
episode_number: number | null;
|
||||
scan_status: string;
|
||||
original_language: string | null;
|
||||
orig_lang_source: string | null;
|
||||
container: string | null;
|
||||
file_size: number | null;
|
||||
file_path: string;
|
||||
last_scanned_at: string | null;
|
||||
ingest_source: string | null;
|
||||
}>;
|
||||
const total = (db.prepare(`SELECT COUNT(*) as n FROM media_items ${where.sql}`).get(...where.args) as { n: number }).n;
|
||||
return c.json({ rows, total, hasMore: query.offset + rows.length < total, query });
|
||||
});
|
||||
|
||||
// ─── Start ────────────────────────────────────────────────────────────────────
|
||||
|
||||
app.post("/start", async (c) => {
|
||||
|
||||
@@ -79,6 +79,7 @@ function migrate(db: Database): void {
|
||||
// RENAME COLUMN preserves values; both alters are no-ops on fresh DBs.
|
||||
alter("ALTER TABLE review_plans RENAME COLUMN webhook_verified TO verified");
|
||||
alter("ALTER TABLE review_plans DROP COLUMN verified");
|
||||
alter("ALTER TABLE media_items ADD COLUMN ingest_source TEXT NOT NULL DEFAULT 'scan'");
|
||||
}
|
||||
|
||||
function seedDefaults(db: Database): void {
|
||||
|
||||
@@ -34,9 +34,10 @@ CREATE TABLE IF NOT EXISTS media_items (
|
||||
scan_status TEXT NOT NULL DEFAULT 'pending',
|
||||
scan_error TEXT,
|
||||
last_scanned_at TEXT,
|
||||
ingest_source TEXT NOT NULL DEFAULT 'scan',
|
||||
last_executed_at TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS media_streams (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
|
||||
@@ -135,8 +135,8 @@ export async function upsertJellyfinItem(
|
||||
original_language, orig_lang_source, needs_review,
|
||||
imdb_id, tmdb_id, tvdb_id,
|
||||
jellyfin_raw, external_raw,
|
||||
scan_status, last_scanned_at${opts.executed ? ", last_executed_at" : ""}
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'scanned', datetime('now')${opts.executed ? ", datetime('now')" : ""})
|
||||
scan_status, last_scanned_at, ingest_source${opts.executed ? ", last_executed_at" : ""}
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'scanned', datetime('now'), ?${opts.executed ? ", datetime('now')" : ""})
|
||||
ON CONFLICT(jellyfin_id) DO UPDATE SET
|
||||
type = excluded.type, name = excluded.name, original_title = excluded.original_title,
|
||||
series_name = excluded.series_name, series_jellyfin_id = excluded.series_jellyfin_id,
|
||||
@@ -148,7 +148,8 @@ export async function upsertJellyfinItem(
|
||||
needs_review = excluded.needs_review, imdb_id = excluded.imdb_id,
|
||||
tmdb_id = excluded.tmdb_id, tvdb_id = excluded.tvdb_id,
|
||||
jellyfin_raw = excluded.jellyfin_raw, external_raw = excluded.external_raw,
|
||||
scan_status = 'scanned', last_scanned_at = datetime('now')
|
||||
scan_status = 'scanned', last_scanned_at = datetime('now'),
|
||||
ingest_source = excluded.ingest_source
|
||||
${opts.executed ? ", last_executed_at = datetime('now')" : ""}
|
||||
`);
|
||||
upsertItem.run(
|
||||
@@ -174,6 +175,7 @@ export async function upsertJellyfinItem(
|
||||
tvdbId,
|
||||
jellyfinRaw,
|
||||
externalRawJson,
|
||||
source,
|
||||
);
|
||||
|
||||
const itemRow = db.prepare("SELECT id FROM media_items WHERE jellyfin_id = ?").get(jellyfinItem.Id) as {
|
||||
|
||||
@@ -10,10 +10,41 @@ import { formatThousands } from "~/shared/lib/utils";
|
||||
interface ScanStatus {
|
||||
running: boolean;
|
||||
progress: { scanned: number; total: number; errors: number };
|
||||
recentItems: { name: string; type: string; scan_status: string; file_path: string }[];
|
||||
recentItems: {
|
||||
name: string;
|
||||
type: string;
|
||||
scan_status: string;
|
||||
file_path: string;
|
||||
last_scanned_at: string | null;
|
||||
ingest_source: "scan" | "webhook" | null;
|
||||
}[];
|
||||
scanLimit: number | null;
|
||||
}
|
||||
|
||||
interface ScanItemsRow {
|
||||
id: number;
|
||||
jellyfin_id: string;
|
||||
name: string;
|
||||
type: "Movie" | "Episode";
|
||||
series_name: string | null;
|
||||
season_number: number | null;
|
||||
episode_number: number | null;
|
||||
scan_status: string;
|
||||
original_language: string | null;
|
||||
orig_lang_source: string | null;
|
||||
container: string | null;
|
||||
file_size: number | null;
|
||||
file_path: string;
|
||||
last_scanned_at: string | null;
|
||||
ingest_source: "scan" | "webhook" | null;
|
||||
}
|
||||
|
||||
interface ScanItemsResponse {
|
||||
rows: ScanItemsRow[];
|
||||
total: number;
|
||||
hasMore: boolean;
|
||||
}
|
||||
|
||||
interface DashboardStats {
|
||||
totalItems: number;
|
||||
scanned: number;
|
||||
@@ -47,6 +78,22 @@ interface LogEntry {
|
||||
file?: string;
|
||||
}
|
||||
|
||||
interface RecentIngestRow {
|
||||
name: string;
|
||||
type: string;
|
||||
status: string;
|
||||
file: string;
|
||||
scannedAt: string | null;
|
||||
source: "scan" | "webhook" | null;
|
||||
}
|
||||
|
||||
interface ItemFilters {
|
||||
q: string;
|
||||
status: "all" | "pending" | "scanned" | "error";
|
||||
type: "all" | "movie" | "episode";
|
||||
source: "all" | "scan" | "webhook";
|
||||
}
|
||||
|
||||
// Mutable buffer for SSE data — flushed to React state on an interval
|
||||
interface SseBuf {
|
||||
scanned: number;
|
||||
@@ -65,19 +112,54 @@ function freshBuf(): SseBuf {
|
||||
|
||||
const FLUSH_MS = 200;
|
||||
|
||||
function statusBadgeVariant(status: string): "pending" | "done" | "error" | "default" {
|
||||
if (status === "pending") return "pending";
|
||||
if (status === "done" || status === "scanned") return "done";
|
||||
if (status === "error") return "error";
|
||||
return "default";
|
||||
}
|
||||
|
||||
function formatScannedAt(ts: string | null): string {
|
||||
if (!ts) return "—";
|
||||
const d = new Date(ts.includes("T") ? ts : `${ts}Z`);
|
||||
if (Number.isNaN(d.getTime())) return ts;
|
||||
return d.toLocaleString([], { year: "numeric", month: "2-digit", day: "2-digit", hour: "2-digit", minute: "2-digit" });
|
||||
}
|
||||
|
||||
function formatFileSize(bytes: number | null): string {
|
||||
if (!bytes || bytes <= 0) return "—";
|
||||
if (bytes < 1000) return `${bytes} B`;
|
||||
if (bytes < 1000 ** 2) return `${(bytes / 1000).toFixed(1)} kB`;
|
||||
if (bytes < 1000 ** 3) return `${(bytes / 1000 ** 2).toFixed(1)} MB`;
|
||||
return `${(bytes / 1000 ** 3).toFixed(1)} GB`;
|
||||
}
|
||||
|
||||
function episodeLabel(row: ScanItemsRow): string {
|
||||
if (row.type !== "Episode") return "—";
|
||||
const season = row.season_number ?? 0;
|
||||
const episode = row.episode_number ?? 0;
|
||||
return `S${String(season).padStart(2, "0")}E${String(episode).padStart(2, "0")}`;
|
||||
}
|
||||
|
||||
export function ScanPage() {
|
||||
const navigate = useNavigate();
|
||||
const [status, setStatus] = useState<ScanStatus | null>(null);
|
||||
const [stats, setStats] = useState<DashboardStats | null>(null);
|
||||
const [configChecked, setConfigChecked] = useState(false);
|
||||
const [limit, setLimit] = useState("");
|
||||
const [log, setLog] = useState<LogEntry[]>([]);
|
||||
const [recentIngest, setRecentIngest] = useState<RecentIngestRow[]>([]);
|
||||
const [statusLabel, setStatusLabel] = useState("");
|
||||
const [scanComplete, setScanComplete] = useState(false);
|
||||
const [currentItem, setCurrentItem] = useState("");
|
||||
const [progressScanned, setProgressScanned] = useState(0);
|
||||
const [progressTotal, setProgressTotal] = useState(0);
|
||||
const [errors, setErrors] = useState(0);
|
||||
const [filters, setFilters] = useState<ItemFilters>({ q: "", status: "all", type: "all", source: "all" });
|
||||
const [itemsRows, setItemsRows] = useState<ScanItemsRow[]>([]);
|
||||
const [itemsOffset, setItemsOffset] = useState(0);
|
||||
const [itemsHasMore, setItemsHasMore] = useState(false);
|
||||
const [itemsTotal, setItemsTotal] = useState(0);
|
||||
const [itemsLoading, setItemsLoading] = useState(false);
|
||||
const esRef = useRef<EventSource | null>(null);
|
||||
const bufRef = useRef<SseBuf>(freshBuf());
|
||||
const timerRef = useRef<ReturnType<typeof setInterval> | null>(null);
|
||||
@@ -122,7 +204,19 @@ export function ScanPage() {
|
||||
setCurrentItem(b.currentItem);
|
||||
if (b.newLogs.length > 0) {
|
||||
const batch = b.newLogs.splice(0);
|
||||
setLog((prev) => [...batch.reverse(), ...prev].slice(0, 100));
|
||||
setRecentIngest((prev) =>
|
||||
[
|
||||
...batch.map((item) => ({
|
||||
name: item.name,
|
||||
type: item.type,
|
||||
status: item.status,
|
||||
file: item.file ?? item.name,
|
||||
scannedAt: new Date().toISOString(),
|
||||
source: "scan" as const,
|
||||
})),
|
||||
...prev,
|
||||
].slice(0, 5),
|
||||
);
|
||||
}
|
||||
b.dirty = false;
|
||||
}
|
||||
@@ -172,13 +266,55 @@ export function ScanPage() {
|
||||
setErrors(s.progress.errors);
|
||||
setStatusLabel(s.running ? "Scan in progress…" : "Scan idle");
|
||||
if (s.scanLimit != null) setLimit(String(s.scanLimit));
|
||||
setLog(s.recentItems.map((i) => ({ name: i.name, type: i.type, status: i.scan_status, file: i.file_path })));
|
||||
setRecentIngest(
|
||||
s.recentItems.map((i) => ({
|
||||
name: i.name,
|
||||
type: i.type,
|
||||
status: i.scan_status,
|
||||
file: i.file_path,
|
||||
scannedAt: i.last_scanned_at,
|
||||
source: i.ingest_source,
|
||||
})),
|
||||
);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
load();
|
||||
}, [load]);
|
||||
|
||||
const fetchItems = useCallback(
|
||||
async (offset: number, append: boolean) => {
|
||||
setItemsLoading(true);
|
||||
try {
|
||||
const qs = new URLSearchParams({
|
||||
offset: String(offset),
|
||||
limit: "50",
|
||||
q: filters.q,
|
||||
status: filters.status,
|
||||
type: filters.type,
|
||||
source: filters.source,
|
||||
});
|
||||
const res = await api.get<ScanItemsResponse>(`/api/scan/items?${qs.toString()}`);
|
||||
setItemsRows((prev) => (append ? [...prev, ...res.rows] : res.rows));
|
||||
setItemsOffset(offset + res.rows.length);
|
||||
setItemsHasMore(res.hasMore);
|
||||
setItemsTotal(res.total);
|
||||
} finally {
|
||||
setItemsLoading(false);
|
||||
}
|
||||
},
|
||||
[filters],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
fetchItems(0, false);
|
||||
}, [fetchItems]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!scanComplete) return;
|
||||
fetchItems(0, false);
|
||||
}, [scanComplete, fetchItems]);
|
||||
|
||||
const connectSse = useCallback(() => {
|
||||
esRef.current?.close();
|
||||
const buf = bufRef.current;
|
||||
@@ -229,7 +365,7 @@ export function ScanPage() {
|
||||
}, [status?.running, connectSse, stopFlushing]);
|
||||
|
||||
const startScan = async () => {
|
||||
setLog([]);
|
||||
setRecentIngest([]);
|
||||
setProgressScanned(0);
|
||||
setProgressTotal(0);
|
||||
setErrors(0);
|
||||
@@ -284,13 +420,17 @@ export function ScanPage() {
|
||||
)}
|
||||
|
||||
<div className="border border-gray-200 rounded-lg px-4 py-3 mb-6">
|
||||
<div className="flex items-center flex-wrap gap-2 mb-3">
|
||||
<div className="flex items-start justify-between gap-3 mb-3">
|
||||
<div className="space-y-2 min-w-0">
|
||||
<div className="flex items-center flex-wrap gap-2">
|
||||
<span className="text-sm font-medium">{statusLabel || (running ? "Scan in progress…" : "Scan idle")}</span>
|
||||
{scanComplete && (
|
||||
<Link to="/pipeline" className="text-blue-600 hover:underline text-sm">
|
||||
Review in Pipeline →
|
||||
</Link>
|
||||
)}
|
||||
{errors > 0 && <Badge variant="error">{errors} error(s)</Badge>}
|
||||
</div>
|
||||
{running ? (
|
||||
<Button variant="secondary" size="sm" onClick={stopScan}>
|
||||
Stop
|
||||
@@ -314,7 +454,14 @@ export function ScanPage() {
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
{errors > 0 && <Badge variant="error">{errors} error(s)</Badge>}
|
||||
</div>
|
||||
<div className="text-right shrink-0">
|
||||
<div className="text-sm font-semibold text-gray-700">
|
||||
{formatThousands(progressScanned)}
|
||||
{progressTotal > 0 ? ` / ${formatThousands(progressTotal)}` : ""}
|
||||
</div>
|
||||
<div className="text-[0.7rem] text-gray-500">scanned</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{(running || progressScanned > 0) && (
|
||||
@@ -325,25 +472,20 @@ export function ScanPage() {
|
||||
</div>
|
||||
)}
|
||||
<div className="flex items-center gap-2 text-gray-500 text-xs">
|
||||
<span>
|
||||
{progressScanned}
|
||||
{progressTotal > 0 ? ` / ${progressTotal}` : ""} scanned
|
||||
</span>
|
||||
{currentItem && <span className="truncate max-w-xs text-gray-400">{currentItem}</span>}
|
||||
{currentItem && <span className="truncate max-w-2xl text-gray-400">{currentItem}</span>}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Log */}
|
||||
<h3 className="font-semibold text-sm mb-2">Recent items</h3>
|
||||
<table className="w-full border-collapse text-[0.82rem]">
|
||||
<div className="mt-3">
|
||||
<h3 className="font-semibold text-sm mb-2">Recent ingest (5)</h3>
|
||||
<table className="w-full border-collapse text-[0.78rem]">
|
||||
<thead>
|
||||
<tr>
|
||||
{["Type", "File", "Status"].map((h) => (
|
||||
{["Time", "Source", "Type", "File", "Status"].map((h) => (
|
||||
<th
|
||||
key={h}
|
||||
className="text-left text-[0.68rem] font-bold uppercase tracking-[0.06em] text-gray-500 py-1 px-2 border-b-2 border-gray-200 whitespace-nowrap"
|
||||
className="text-left text-[0.66rem] font-bold uppercase tracking-[0.05em] text-gray-500 py-1 px-2 border-b border-gray-200 whitespace-nowrap"
|
||||
>
|
||||
{h}
|
||||
</th>
|
||||
@@ -351,16 +493,27 @@ export function ScanPage() {
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{log.map((item, i) => {
|
||||
const fileName = item.file ? (item.file.split("/").pop() ?? item.name) : item.name;
|
||||
{recentIngest.length === 0 && (
|
||||
<tr>
|
||||
<td colSpan={5} className="py-2 px-2 text-gray-400">
|
||||
No ingested items yet.
|
||||
</td>
|
||||
</tr>
|
||||
)}
|
||||
{recentIngest.map((item, i) => {
|
||||
const fileName = item.file.split("/").pop() ?? item.name;
|
||||
return (
|
||||
<tr key={i} className="hover:bg-gray-50">
|
||||
<tr key={`${item.file}-${i}`} className="hover:bg-gray-50">
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 whitespace-nowrap">{formatScannedAt(item.scannedAt)}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<Badge variant="default">{item.source ?? "scan"}</Badge>
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">{item.type}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100" title={item.file ?? item.name}>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 truncate max-w-96" title={item.file}>
|
||||
{fileName}
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<Badge variant={item.status as "error" | "done" | "pending"}>{item.status}</Badge>
|
||||
<Badge variant={statusBadgeVariant(item.status)}>{item.status}</Badge>
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
@@ -368,5 +521,125 @@ export function ScanPage() {
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="mb-2 flex items-end justify-between gap-3">
|
||||
<h3 className="font-semibold text-sm">Library items</h3>
|
||||
<span className="text-xs text-gray-500">{formatThousands(itemsTotal)} total</span>
|
||||
</div>
|
||||
|
||||
<div className="border border-gray-200 rounded-lg p-3 mb-3 flex flex-wrap items-end gap-2">
|
||||
<label className="text-xs text-gray-600 flex flex-col gap-1">
|
||||
Search
|
||||
<input
|
||||
type="text"
|
||||
value={filters.q}
|
||||
onChange={(e) => setFilters((prev) => ({ ...prev, q: e.target.value }))}
|
||||
placeholder="Name or path"
|
||||
className="border border-gray-300 rounded px-2 py-1 text-xs w-56"
|
||||
/>
|
||||
</label>
|
||||
<label className="text-xs text-gray-600 flex flex-col gap-1">
|
||||
Status
|
||||
<select
|
||||
value={filters.status}
|
||||
onChange={(e) => setFilters((prev) => ({ ...prev, status: e.target.value as ItemFilters["status"] }))}
|
||||
className="border border-gray-300 rounded px-2 py-1 text-xs"
|
||||
>
|
||||
<option value="all">All</option>
|
||||
<option value="scanned">Scanned</option>
|
||||
<option value="pending">Pending</option>
|
||||
<option value="error">Error</option>
|
||||
</select>
|
||||
</label>
|
||||
<label className="text-xs text-gray-600 flex flex-col gap-1">
|
||||
Type
|
||||
<select
|
||||
value={filters.type}
|
||||
onChange={(e) => setFilters((prev) => ({ ...prev, type: e.target.value as ItemFilters["type"] }))}
|
||||
className="border border-gray-300 rounded px-2 py-1 text-xs"
|
||||
>
|
||||
<option value="all">All</option>
|
||||
<option value="movie">Movie</option>
|
||||
<option value="episode">Episode</option>
|
||||
</select>
|
||||
</label>
|
||||
<label className="text-xs text-gray-600 flex flex-col gap-1">
|
||||
Source
|
||||
<select
|
||||
value={filters.source}
|
||||
onChange={(e) => setFilters((prev) => ({ ...prev, source: e.target.value as ItemFilters["source"] }))}
|
||||
className="border border-gray-300 rounded px-2 py-1 text-xs"
|
||||
>
|
||||
<option value="all">All</option>
|
||||
<option value="scan">Scan</option>
|
||||
<option value="webhook">Webhook</option>
|
||||
</select>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<table className="w-full border-collapse text-[0.8rem]">
|
||||
<thead>
|
||||
<tr>
|
||||
{["Scanned", "Name", "Type", "Series / Ep", "Language", "Container", "Size", "Source", "Status", "Path"].map(
|
||||
(h) => (
|
||||
<th
|
||||
key={h}
|
||||
className="text-left text-[0.66rem] font-bold uppercase tracking-[0.05em] text-gray-500 py-1 px-2 border-b border-gray-200 whitespace-nowrap"
|
||||
>
|
||||
{h}
|
||||
</th>
|
||||
),
|
||||
)}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{itemsRows.length === 0 && !itemsLoading && (
|
||||
<tr>
|
||||
<td colSpan={10} className="py-3 px-2 text-gray-400">
|
||||
No items match the current filters.
|
||||
</td>
|
||||
</tr>
|
||||
)}
|
||||
{itemsRows.map((row) => (
|
||||
<tr key={row.id} className="hover:bg-gray-50">
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 whitespace-nowrap">
|
||||
{formatScannedAt(row.last_scanned_at)}
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">{row.name}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">{row.type}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<div>{row.series_name ?? "—"}</div>
|
||||
<div className="text-[0.68rem] text-gray-500">{episodeLabel(row)}</div>
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<div>{row.original_language ?? "—"}</div>
|
||||
<div className="text-[0.68rem] text-gray-500">{row.orig_lang_source ?? "—"}</div>
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">{row.container ?? "—"}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 whitespace-nowrap">{formatFileSize(row.file_size)}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<Badge variant="default">{row.ingest_source ?? "scan"}</Badge>
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<Badge variant={statusBadgeVariant(row.scan_status)}>{row.scan_status}</Badge>
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 truncate max-w-xs" title={row.file_path}>
|
||||
{row.file_path}
|
||||
</td>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<div className="mt-3 flex items-center gap-2">
|
||||
{itemsHasMore && (
|
||||
<Button size="sm" variant="secondary" onClick={() => fetchItems(itemsOffset, true)} disabled={itemsLoading}>
|
||||
{itemsLoading ? "Loading…" : "Load more"}
|
||||
</Button>
|
||||
)}
|
||||
{itemsLoading && !itemsHasMore && <span className="text-xs text-gray-500">Loading…</span>}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user