Files
netfelix-audio-fix/server/api/scan.ts
T
felixfoertsch 3341ceed14
Build and Push Docker Image / build (push) Successful in 1m13s
remove analyzer from scan, scan is now pure jellyfin ingest
upsertJellyfinItem no longer runs analyzeItem or creates stream_decisions.
it inserts a minimal review_plans stub (pending, unsorted). all analysis
happens in processInbox. this means after scan, ALL items land in the
inbox — the "needs action" count equals the inbox count until processing
classifies them.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-20 11:31:30 +02:00

241 lines
9.1 KiB
TypeScript

import { Hono } from "hono";
import { stream } from "hono/streaming";
import { getAllConfig, getConfig, getDb, setConfig } from "../db/index";
import { log, error as logError, warn } from "../lib/log";
import { getAllItems, getDevItems } from "../services/jellyfin";
import { emitPipelineChanged } from "./execute";
import { upsertJellyfinItem } from "../services/rescan";
import { isInScanWindow, msUntilScanWindow, nextScanWindowTime, waitForScanWindow } from "../services/scheduler";
const app = new Hono();
/**
* Validate a scan `limit` input. Must be a positive integer or absent —
* NaN/negatives/non-numerics would disable the progress cap
* (`processed >= NaN` never trips) or produce bogus totals via
* `Math.min(NaN, …)`. Exported for unit tests.
*/
export function parseScanLimit(raw: unknown): { ok: true; value: number | null } | { ok: false } {
if (raw == null || raw === "") return { ok: true, value: null };
const n = typeof raw === "number" ? raw : Number(raw);
if (!Number.isInteger(n) || n <= 0) return { ok: false };
return { ok: true, value: n };
}
// ─── State ────────────────────────────────────────────────────────────────────
let scanAbort: AbortController | null = null;
const scanListeners = new Set<(data: string) => void>();
function emitSse(type: string, data: unknown): void {
const line = `event: ${type}\ndata: ${JSON.stringify(data)}\n\n`;
for (const listener of scanListeners) listener(line);
}
function currentScanLimit(): number | null {
const v = getConfig("scan_limit");
return v ? Number(v) : null;
}
// ─── Status ───────────────────────────────────────────────────────────────────
app.get("/", (c) => {
const db = getDb();
const running = getConfig("scan_running") === "1";
const total = (db.prepare("SELECT COUNT(*) as n FROM media_items").get() as { n: number }).n;
const scanned = (
db.prepare("SELECT COUNT(*) as n FROM media_items WHERE scan_status = 'scanned'").get() as { n: number }
).n;
const errors = (db.prepare("SELECT COUNT(*) as n FROM media_items WHERE scan_status = 'error'").get() as { n: number })
.n;
const recentItems = db
.prepare(
"SELECT name, type, scan_status, file_path, last_scanned_at, ingest_source FROM media_items ORDER BY COALESCE(last_scanned_at, created_at) DESC, id DESC LIMIT 5",
)
.all() as {
name: string;
type: string;
scan_status: string;
file_path: string;
last_scanned_at: string | null;
ingest_source: string | null;
}[];
return c.json({ running, progress: { scanned, total, errors }, recentItems, scanLimit: currentScanLimit() });
});
// ─── Start ────────────────────────────────────────────────────────────────────
app.post("/start", async (c) => {
const db = getDb();
// Atomic claim: only succeed if scan_running is not already '1'.
const claim = db.prepare("UPDATE config SET value = '1' WHERE key = 'scan_running' AND value != '1'").run();
if (claim.changes === 0) {
return c.json({ ok: false, error: "Scan already running" }, 409);
}
const body = await c.req.json<{ limit?: unknown }>().catch(() => ({ limit: undefined }));
const formLimit = parseScanLimit(body.limit);
const envLimit = parseScanLimit(process.env.SCAN_LIMIT);
if (!formLimit.ok || !envLimit.ok) {
db.prepare("UPDATE config SET value = '0' WHERE key = 'scan_running'").run();
return c.json({ ok: false, error: "limit must be a positive integer" }, 400);
}
const limit: number | null = formLimit.value ?? envLimit.value ?? null;
setConfig("scan_limit", limit != null ? String(limit) : "");
runScan(limit).catch((err) => {
logError("Scan failed:", err);
setConfig("scan_running", "0");
emitSse("error", { message: String(err) });
});
return c.json({ ok: true });
});
// ─── Stop ─────────────────────────────────────────────────────────────────────
app.post("/stop", (c) => {
scanAbort?.abort();
setConfig("scan_running", "0");
return c.json({ ok: true });
});
// ─── SSE ──────────────────────────────────────────────────────────────────────
app.get("/events", (c) => {
return stream(c, async (s) => {
c.header("Content-Type", "text/event-stream");
c.header("Cache-Control", "no-cache");
c.header("Connection", "keep-alive");
const queue: string[] = [];
let resolve: (() => void) | null = null;
const listener = (data: string) => {
queue.push(data);
resolve?.();
};
scanListeners.add(listener);
s.onAbort(() => {
scanListeners.delete(listener);
});
try {
while (!s.closed) {
if (queue.length > 0) {
await s.write(queue.shift()!);
} else {
await new Promise<void>((res) => {
resolve = res;
setTimeout(res, 25_000);
});
resolve = null;
if (queue.length === 0) await s.write(": keepalive\n\n");
}
}
} finally {
scanListeners.delete(listener);
}
});
});
// ─── Core scan logic ──────────────────────────────────────────────────────────
async function runScan(limit: number | null = null): Promise<void> {
log(`Scan started${limit ? ` (limit: ${limit})` : ""}`);
scanAbort = new AbortController();
const { signal } = scanAbort;
const isDev = process.env.NODE_ENV === "development";
const db = getDb();
if (isDev) {
// Order matters only if foreign keys are enforced without CASCADE; we
// have ON DELETE CASCADE on media_streams/review_plans/stream_decisions/
// jobs, so deleting media_items would be enough. List them explicitly
// for clarity and to survive future schema drift.
db.prepare("DELETE FROM jobs").run();
db.prepare("DELETE FROM stream_decisions").run();
db.prepare("DELETE FROM review_plans").run();
db.prepare("DELETE FROM media_streams").run();
db.prepare("DELETE FROM media_items").run();
}
const cfg = getAllConfig();
const jellyfinCfg = { url: cfg.jellyfin_url, apiKey: cfg.jellyfin_api_key, userId: cfg.jellyfin_user_id };
const rescanCfg = {};
let processed = 0;
let errors = 0;
let total = 0;
const itemSource = isDev
? getDevItems(jellyfinCfg)
: getAllItems(jellyfinCfg, (_fetched, jellyfinTotal) => {
total = limit != null ? Math.min(limit, jellyfinTotal) : jellyfinTotal;
});
for await (const jellyfinItem of itemSource) {
if (signal.aborted) break;
if (!isDev && limit != null && processed >= limit) break;
if (!jellyfinItem.Name || !jellyfinItem.Path) {
warn(`Skipping item without name/path: id=${jellyfinItem.Id}`);
continue;
}
// Honour the scan window between items so overnight-only setups don't hog
// Jellyfin during the day. Checked between items rather than mid-item so
// we don't leave a partial upsert sitting in flight.
if (!isInScanWindow()) {
emitSse("paused", {
until: nextScanWindowTime(),
seconds: Math.round(msUntilScanWindow() / 1000),
});
await waitForScanWindow();
if (signal.aborted) break;
emitSse("resumed", {});
}
processed++;
emitSse("progress", { scanned: processed, total, current_item: jellyfinItem.Name, errors, running: true });
try {
await upsertJellyfinItem(db, jellyfinItem, rescanCfg);
if (processed % 25 === 0) emitPipelineChanged();
emitSse("log", { name: jellyfinItem.Name, type: jellyfinItem.Type, status: "scanned", file: jellyfinItem.Path });
} catch (err) {
errors++;
logError(`Error scanning ${jellyfinItem.Name}:`, err);
try {
db
.prepare("UPDATE media_items SET scan_status = 'error', scan_error = ? WHERE jellyfin_id = ?")
.run(String(err), jellyfinItem.Id);
} catch (dbErr) {
// Failed to persist the error status — log it so the incident
// doesn't disappear silently. We can't do much more; the outer
// loop continues so the scan still finishes.
logError(`Failed to record scan error for ${jellyfinItem.Id}:`, dbErr);
}
emitSse("log", { name: jellyfinItem.Name, type: jellyfinItem.Type, status: "error", file: jellyfinItem.Path });
}
}
setConfig("scan_running", "0");
log(`Scan complete: ${processed} scanned, ${errors} errors`);
emitSse("complete", { scanned: processed, total, errors });
if (getConfig("auto_processing") === "1") {
const { processInbox, getAudioLanguages } = await import("./review");
const { emitInboxSorted, emitInboxSortStart, emitInboxSortProgress } = await import("./execute");
processInbox(db, getAudioLanguages(), undefined, {
onStart: emitInboxSortStart,
onProgress: emitInboxSortProgress,
})
.then((result) => emitInboxSorted(result))
.catch(() => emitInboxSorted({ moved_to_queue: 0, moved_to_review: 0 }));
}
}
export default app;