rework scan page, add ingest-source browsing, bump version to 2026.04.15.8
All checks were successful
Build and Push Docker Image / build (push) Successful in 4m56s

This commit is contained in:
2026-04-15 18:31:00 +02:00
parent c6698db51a
commit a2bdecd298
10 changed files with 641 additions and 94 deletions

View File

@@ -1,5 +1,5 @@
import { describe, expect, test } from "bun:test";
import { extractErrorSummary, shouldSendLiveUpdate, yieldAfterChunk } from "../execute";
import { enqueueUnseenJobs, extractErrorSummary, shouldSendLiveUpdate, yieldAfterChunk } from "../execute";
describe("extractErrorSummary", () => {
test("pulls the real error line out of ffmpeg's banner", () => {
@@ -71,3 +71,15 @@ describe("yieldAfterChunk", () => {
expect(yieldCalls).toBe(1);
});
});
describe("enqueueUnseenJobs", () => {
test("appends only unseen job ids to the active queue", () => {
const queue = [{ id: 1 }, { id: 2 }] as { id: number }[];
const seen = new Set([1, 2]);
const added = enqueueUnseenJobs(queue, seen, [{ id: 2 }, { id: 3 }, { id: 4 }] as { id: number }[]);
expect(added).toBe(2);
expect(queue.map((j) => j.id)).toEqual([1, 2, 3, 4]);
expect(seen.has(3)).toBeTrue();
expect(seen.has(4)).toBeTrue();
});
});

View File

@@ -1,5 +1,5 @@
import { describe, expect, test } from "bun:test";
import { parseScanLimit } from "../scan";
import { buildScanItemsWhere, parseScanItemsQuery, parseScanLimit } from "../scan";
describe("parseScanLimit", () => {
test("accepts positive integers and nullish/empty as no-limit", () => {
@@ -29,3 +29,73 @@ describe("parseScanLimit", () => {
expect(parseScanLimit(Number.POSITIVE_INFINITY)).toEqual({ ok: false });
});
});
describe("parseScanItemsQuery", () => {
test("normalizes default filters and pagination", () => {
const q = parseScanItemsQuery({});
expect(q).toEqual({
offset: 0,
limit: 50,
search: "",
status: "all",
type: "all",
source: "all",
});
});
test("clamps limit and offset, trims and lowercases values", () => {
const q = parseScanItemsQuery({
offset: "-12",
limit: "5000",
q: " The Wire ",
status: "SCANNED",
type: "EPISODE",
source: "WEBHOOK",
});
expect(q).toEqual({
offset: 0,
limit: 200,
search: "The Wire",
status: "scanned",
type: "episode",
source: "webhook",
});
});
test("falls back to all for unknown enum values", () => {
const q = parseScanItemsQuery({ status: "zzz", type: "cartoon", source: "mqtt" });
expect(q.status).toBe("all");
expect(q.type).toBe("all");
expect(q.source).toBe("all");
});
});
describe("buildScanItemsWhere", () => {
test("builds combined where clause + args in stable order", () => {
const where = buildScanItemsWhere({
offset: 0,
limit: 50,
search: "blade",
status: "scanned",
type: "movie",
source: "webhook",
});
expect(where.sql).toBe(
"WHERE scan_status = ? AND lower(type) = ? AND ingest_source = ? AND (lower(name) LIKE ? OR lower(file_path) LIKE ?)",
);
expect(where.args).toEqual(["scanned", "movie", "webhook", "%blade%", "%blade%"]);
});
test("returns empty where when all filters are broad", () => {
const where = buildScanItemsWhere({
offset: 0,
limit: 50,
search: "",
status: "all",
type: "all",
source: "all",
});
expect(where.sql).toBe("");
expect(where.args).toEqual([]);
});
});

View File

@@ -23,6 +23,8 @@ const app = new Hono();
let queueRunning = false;
let runningProc: ReturnType<typeof Bun.spawn> | null = null;
let runningJobId: number | null = null;
let activeQueue: Job[] | null = null;
let activeSeen: Set<number> | null = null;
const LIVE_UPDATE_INTERVAL_MS = 500;
const STREAM_CHUNKS_BEFORE_YIELD = 24;
@@ -41,6 +43,17 @@ export async function yieldAfterChunk(
return 0;
}
export function enqueueUnseenJobs<T extends { id: number }>(queue: T[], seen: Set<number>, jobs: T[]): number {
let added = 0;
for (const job of jobs) {
if (seen.has(job.id)) continue;
queue.push(job);
seen.add(job.id);
added += 1;
}
return added;
}
function emitQueueStatus(
status: "running" | "paused" | "sleeping" | "idle",
extra: { until?: string; seconds?: number } = {},
@@ -56,6 +69,8 @@ async function runSequential(initial: Job[]): Promise<void> {
let first = true;
const queue: Job[] = [...initial];
const seen = new Set<number>(queue.map((j) => j.id));
activeQueue = queue;
activeSeen = seen;
while (queue.length > 0) {
const job = queue.shift() as Job;
@@ -99,15 +114,12 @@ async function runSequential(initial: Job[]): Promise<void> {
// manually clicks "Run all" again.
if (queue.length === 0) {
const more = db.prepare("SELECT * FROM jobs WHERE status = 'pending' ORDER BY created_at").all() as Job[];
for (const m of more) {
if (!seen.has(m.id)) {
queue.push(m);
seen.add(m.id);
}
}
enqueueUnseenJobs(queue, seen, more);
}
}
} finally {
activeQueue = null;
activeSeen = null;
queueRunning = false;
emitQueueStatus("idle");
}
@@ -178,8 +190,12 @@ function loadJobRow(jobId: number) {
app.post("/start", (c) => {
const db = getDb();
const pending = db.prepare("SELECT * FROM jobs WHERE status = 'pending' ORDER BY created_at").all() as Job[];
if (queueRunning && activeQueue && activeSeen) {
const queued = enqueueUnseenJobs(activeQueue, activeSeen, pending);
return c.json({ ok: true, started: 0, queued });
}
runSequential(pending).catch((err) => logError("Queue failed:", err));
return c.json({ ok: true, started: pending.length });
return c.json({ ok: true, started: pending.length, queued: pending.length });
});
// ─── Run single ───────────────────────────────────────────────────────────────

View File

@@ -23,6 +23,78 @@ export function parseScanLimit(raw: unknown): { ok: true; value: number | null }
return { ok: true, value: n };
}
type ScanStatusFilter = "all" | "pending" | "scanned" | "error";
type ScanTypeFilter = "all" | "movie" | "episode";
type ScanSourceFilter = "all" | "scan" | "webhook";
export interface ScanItemsQuery {
offset: number;
limit: number;
search: string;
status: ScanStatusFilter;
type: ScanTypeFilter;
source: ScanSourceFilter;
}
function parsePositiveInt(raw: unknown, fallback: number): number {
const n = typeof raw === "number" ? raw : Number(raw);
if (!Number.isFinite(n)) return fallback;
if (!Number.isInteger(n)) return fallback;
return n;
}
function clamp(n: number, min: number, max: number): number {
if (n < min) return min;
if (n > max) return max;
return n;
}
function parseOneOf<T extends readonly string[]>(raw: unknown, allowed: T, fallback: T[number]): T[number] {
if (typeof raw !== "string") return fallback;
const lowered = raw.toLowerCase();
return (allowed as readonly string[]).includes(lowered) ? (lowered as T[number]) : fallback;
}
export function parseScanItemsQuery(raw: Record<string, unknown>): ScanItemsQuery {
const limit = clamp(parsePositiveInt(raw.limit, 50), 1, 200);
const offset = Math.max(0, parsePositiveInt(raw.offset, 0));
const search = typeof raw.q === "string" ? raw.q.trim() : "";
return {
offset,
limit,
search,
status: parseOneOf(raw.status, ["all", "pending", "scanned", "error"] as const, "all"),
type: parseOneOf(raw.type, ["all", "movie", "episode"] as const, "all"),
source: parseOneOf(raw.source, ["all", "scan", "webhook"] as const, "all"),
};
}
export function buildScanItemsWhere(query: ScanItemsQuery): { sql: string; args: string[] } {
const clauses: string[] = [];
const args: string[] = [];
if (query.status !== "all") {
clauses.push("scan_status = ?");
args.push(query.status);
}
if (query.type !== "all") {
clauses.push("lower(type) = ?");
args.push(query.type);
}
if (query.source !== "all") {
clauses.push("ingest_source = ?");
args.push(query.source);
}
if (query.search.length > 0) {
clauses.push("(lower(name) LIKE ? OR lower(file_path) LIKE ?)");
const needle = `%${query.search.toLowerCase()}%`;
args.push(needle, needle);
}
return {
sql: clauses.length > 0 ? `WHERE ${clauses.join(" AND ")}` : "",
args,
};
}
// ─── State ────────────────────────────────────────────────────────────────────
let scanAbort: AbortController | null = null;
@@ -60,12 +132,65 @@ app.get("/", (c) => {
const errors = (db.prepare("SELECT COUNT(*) as n FROM media_items WHERE scan_status = 'error'").get() as { n: number })
.n;
const recentItems = db
.prepare("SELECT name, type, scan_status, file_path FROM media_items ORDER BY last_scanned_at DESC LIMIT 50")
.all() as { name: string; type: string; scan_status: string; file_path: string }[];
.prepare(
"SELECT name, type, scan_status, file_path, last_scanned_at, ingest_source FROM media_items ORDER BY COALESCE(last_scanned_at, created_at) DESC, id DESC LIMIT 5",
)
.all() as {
name: string;
type: string;
scan_status: string;
file_path: string;
last_scanned_at: string | null;
ingest_source: string | null;
}[];
return c.json({ running, progress: { scanned, total, errors }, recentItems, scanLimit: currentScanLimit() });
});
app.get("/items", (c) => {
const db = getDb();
const query = parseScanItemsQuery({
offset: c.req.query("offset"),
limit: c.req.query("limit"),
q: c.req.query("q"),
status: c.req.query("status"),
type: c.req.query("type"),
source: c.req.query("source"),
});
const where = buildScanItemsWhere(query);
const rows = db
.prepare(
`
SELECT id, jellyfin_id, name, type, series_name, season_number, episode_number,
scan_status, original_language, orig_lang_source, container, file_size, file_path,
last_scanned_at, ingest_source
FROM media_items
${where.sql}
ORDER BY COALESCE(last_scanned_at, created_at) DESC, id DESC
LIMIT ? OFFSET ?
`,
)
.all(...where.args, query.limit, query.offset) as Array<{
id: number;
jellyfin_id: string;
name: string;
type: string;
series_name: string | null;
season_number: number | null;
episode_number: number | null;
scan_status: string;
original_language: string | null;
orig_lang_source: string | null;
container: string | null;
file_size: number | null;
file_path: string;
last_scanned_at: string | null;
ingest_source: string | null;
}>;
const total = (db.prepare(`SELECT COUNT(*) as n FROM media_items ${where.sql}`).get(...where.args) as { n: number }).n;
return c.json({ rows, total, hasMore: query.offset + rows.length < total, query });
});
// ─── Start ────────────────────────────────────────────────────────────────────
app.post("/start", async (c) => {

View File

@@ -79,6 +79,7 @@ function migrate(db: Database): void {
// RENAME COLUMN preserves values; both alters are no-ops on fresh DBs.
alter("ALTER TABLE review_plans RENAME COLUMN webhook_verified TO verified");
alter("ALTER TABLE review_plans DROP COLUMN verified");
alter("ALTER TABLE media_items ADD COLUMN ingest_source TEXT NOT NULL DEFAULT 'scan'");
}
function seedDefaults(db: Database): void {

View File

@@ -31,12 +31,13 @@ CREATE TABLE IF NOT EXISTS media_items (
tvdb_id TEXT,
jellyfin_raw TEXT,
external_raw TEXT,
scan_status TEXT NOT NULL DEFAULT 'pending',
scan_error TEXT,
last_scanned_at TEXT,
last_executed_at TEXT,
created_at TEXT NOT NULL DEFAULT (datetime('now'))
);
scan_status TEXT NOT NULL DEFAULT 'pending',
scan_error TEXT,
last_scanned_at TEXT,
ingest_source TEXT NOT NULL DEFAULT 'scan',
last_executed_at TEXT,
created_at TEXT NOT NULL DEFAULT (datetime('now'))
);
CREATE TABLE IF NOT EXISTS media_streams (
id INTEGER PRIMARY KEY AUTOINCREMENT,

View File

@@ -133,11 +133,11 @@ export async function upsertJellyfinItem(
season_number, episode_number, year, file_path, file_size, container,
runtime_ticks, date_last_refreshed,
original_language, orig_lang_source, needs_review,
imdb_id, tmdb_id, tvdb_id,
jellyfin_raw, external_raw,
scan_status, last_scanned_at${opts.executed ? ", last_executed_at" : ""}
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'scanned', datetime('now')${opts.executed ? ", datetime('now')" : ""})
ON CONFLICT(jellyfin_id) DO UPDATE SET
imdb_id, tmdb_id, tvdb_id,
jellyfin_raw, external_raw,
scan_status, last_scanned_at, ingest_source${opts.executed ? ", last_executed_at" : ""}
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'scanned', datetime('now'), ?${opts.executed ? ", datetime('now')" : ""})
ON CONFLICT(jellyfin_id) DO UPDATE SET
type = excluded.type, name = excluded.name, original_title = excluded.original_title,
series_name = excluded.series_name, series_jellyfin_id = excluded.series_jellyfin_id,
season_number = excluded.season_number, episode_number = excluded.episode_number,
@@ -145,12 +145,13 @@ export async function upsertJellyfinItem(
file_size = excluded.file_size, container = excluded.container,
runtime_ticks = excluded.runtime_ticks, date_last_refreshed = excluded.date_last_refreshed,
original_language = excluded.original_language, orig_lang_source = excluded.orig_lang_source,
needs_review = excluded.needs_review, imdb_id = excluded.imdb_id,
tmdb_id = excluded.tmdb_id, tvdb_id = excluded.tvdb_id,
jellyfin_raw = excluded.jellyfin_raw, external_raw = excluded.external_raw,
scan_status = 'scanned', last_scanned_at = datetime('now')
${opts.executed ? ", last_executed_at = datetime('now')" : ""}
`);
needs_review = excluded.needs_review, imdb_id = excluded.imdb_id,
tmdb_id = excluded.tmdb_id, tvdb_id = excluded.tvdb_id,
jellyfin_raw = excluded.jellyfin_raw, external_raw = excluded.external_raw,
scan_status = 'scanned', last_scanned_at = datetime('now'),
ingest_source = excluded.ingest_source
${opts.executed ? ", last_executed_at = datetime('now')" : ""}
`);
upsertItem.run(
jellyfinItem.Id,
jellyfinItem.Type === "Episode" ? "Episode" : "Movie",
@@ -174,6 +175,7 @@ export async function upsertJellyfinItem(
tvdbId,
jellyfinRaw,
externalRawJson,
source,
);
const itemRow = db.prepare("SELECT id FROM media_items WHERE jellyfin_id = ?").get(jellyfinItem.Id) as {