All checks were successful
Build and Push Docker Image / build (push) Successful in 1m30s
worked through AUDIT.md. triage: - finding 2 (subtitle rescan wipes decisions): confirmed. /:id/rescan now snapshots custom_titles and calls reanalyze() after the stream delete/ insert, mirroring the review rescan flow. exported reanalyze + titleKey from review.ts so both routes share the logic. - finding 3 (scan limit accepts NaN/negatives): confirmed. extracted parseScanLimit into a pure helper, added unit tests covering NaN, negatives, floats, infinity, numeric strings. invalid input 400s and releases the scan_running lock. - finding 4 (parseId lenient): confirmed. tightened the regex to /^\d+$/ so "42abc", "abc42", "+42", "42.0" all return null. rewrote the test that codified the old lossy behaviour. - finding 5 (setup_complete set before jellyfin test passes): confirmed. the /jellyfin endpoint still persists url+key unconditionally, but now only flips setup_complete=1 on a successful connection test. - finding 6 (swallowed errors): partial. the mqtt restart and version- fetch swallows are intentional best-effort with downstream surfaces (getMqttStatus, UI fallback). only the scan.ts db-update swallow was a real visibility gap — logs via logError now. - finding 1 (auth): left as-is. redacting secrets on GET without auth on POST is security theater; real fix is an auth layer, which is a design decision not a bugfix. audit removed from the tree. - lint fail on ffmpeg.test.ts: formatted. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
278 lines
11 KiB
TypeScript
278 lines
11 KiB
TypeScript
import { Hono } from "hono";
|
|
import { stream } from "hono/streaming";
|
|
import { getAllConfig, getConfig, getDb, setConfig } from "../db/index";
|
|
import { log, error as logError, warn } from "../lib/log";
|
|
import { getAllItems, getDevItems } from "../services/jellyfin";
|
|
import { loadLibrary as loadRadarrLibrary, isUsable as radarrUsable } from "../services/radarr";
|
|
import { upsertJellyfinItem } from "../services/rescan";
|
|
import { isInScanWindow, msUntilScanWindow, nextScanWindowTime, waitForScanWindow } from "../services/scheduler";
|
|
import { loadLibrary as loadSonarrLibrary, isUsable as sonarrUsable } from "../services/sonarr";
|
|
|
|
const app = new Hono();
|
|
|
|
/**
|
|
* Validate a scan `limit` input. Must be a positive integer or absent —
|
|
* NaN/negatives/non-numerics would disable the progress cap
|
|
* (`processed >= NaN` never trips) or produce bogus totals via
|
|
* `Math.min(NaN, …)`. Exported for unit tests.
|
|
*/
|
|
export function parseScanLimit(raw: unknown): { ok: true; value: number | null } | { ok: false } {
|
|
if (raw == null || raw === "") return { ok: true, value: null };
|
|
const n = typeof raw === "number" ? raw : Number(raw);
|
|
if (!Number.isInteger(n) || n <= 0) return { ok: false };
|
|
return { ok: true, value: n };
|
|
}
|
|
|
|
// ─── State ────────────────────────────────────────────────────────────────────
|
|
|
|
let scanAbort: AbortController | null = null;
|
|
const scanListeners = new Set<(data: string) => void>();
|
|
|
|
function emitSse(type: string, data: unknown): void {
|
|
const line = `event: ${type}\ndata: ${JSON.stringify(data)}\n\n`;
|
|
for (const listener of scanListeners) listener(line);
|
|
}
|
|
|
|
function currentScanLimit(): number | null {
|
|
const v = getConfig("scan_limit");
|
|
return v ? Number(v) : null;
|
|
}
|
|
|
|
function parseLanguageList(raw: string | null, fallback: string[]): string[] {
|
|
if (!raw) return fallback;
|
|
try {
|
|
const parsed = JSON.parse(raw);
|
|
return Array.isArray(parsed) ? parsed.filter((v): v is string => typeof v === "string") : fallback;
|
|
} catch {
|
|
return fallback;
|
|
}
|
|
}
|
|
|
|
// ─── Status ───────────────────────────────────────────────────────────────────
|
|
|
|
app.get("/", (c) => {
|
|
const db = getDb();
|
|
const running = getConfig("scan_running") === "1";
|
|
const total = (db.prepare("SELECT COUNT(*) as n FROM media_items").get() as { n: number }).n;
|
|
const scanned = (
|
|
db.prepare("SELECT COUNT(*) as n FROM media_items WHERE scan_status = 'scanned'").get() as { n: number }
|
|
).n;
|
|
const errors = (db.prepare("SELECT COUNT(*) as n FROM media_items WHERE scan_status = 'error'").get() as { n: number })
|
|
.n;
|
|
const recentItems = db
|
|
.prepare("SELECT name, type, scan_status, file_path FROM media_items ORDER BY last_scanned_at DESC LIMIT 50")
|
|
.all() as { name: string; type: string; scan_status: string; file_path: string }[];
|
|
|
|
return c.json({ running, progress: { scanned, total, errors }, recentItems, scanLimit: currentScanLimit() });
|
|
});
|
|
|
|
// ─── Start ────────────────────────────────────────────────────────────────────
|
|
|
|
app.post("/start", async (c) => {
|
|
const db = getDb();
|
|
// Atomic claim: only succeed if scan_running is not already '1'.
|
|
const claim = db.prepare("UPDATE config SET value = '1' WHERE key = 'scan_running' AND value != '1'").run();
|
|
if (claim.changes === 0) {
|
|
return c.json({ ok: false, error: "Scan already running" }, 409);
|
|
}
|
|
|
|
const body = await c.req.json<{ limit?: unknown }>().catch(() => ({ limit: undefined }));
|
|
const formLimit = parseScanLimit(body.limit);
|
|
const envLimit = parseScanLimit(process.env.SCAN_LIMIT);
|
|
if (!formLimit.ok || !envLimit.ok) {
|
|
db.prepare("UPDATE config SET value = '0' WHERE key = 'scan_running'").run();
|
|
return c.json({ ok: false, error: "limit must be a positive integer" }, 400);
|
|
}
|
|
const limit: number | null = formLimit.value ?? envLimit.value ?? null;
|
|
setConfig("scan_limit", limit != null ? String(limit) : "");
|
|
|
|
runScan(limit).catch((err) => {
|
|
logError("Scan failed:", err);
|
|
setConfig("scan_running", "0");
|
|
emitSse("error", { message: String(err) });
|
|
});
|
|
|
|
return c.json({ ok: true });
|
|
});
|
|
|
|
// ─── Stop ─────────────────────────────────────────────────────────────────────
|
|
|
|
app.post("/stop", (c) => {
|
|
scanAbort?.abort();
|
|
setConfig("scan_running", "0");
|
|
return c.json({ ok: true });
|
|
});
|
|
|
|
// ─── SSE ──────────────────────────────────────────────────────────────────────
|
|
|
|
app.get("/events", (c) => {
|
|
return stream(c, async (s) => {
|
|
c.header("Content-Type", "text/event-stream");
|
|
c.header("Cache-Control", "no-cache");
|
|
c.header("Connection", "keep-alive");
|
|
|
|
const queue: string[] = [];
|
|
let resolve: (() => void) | null = null;
|
|
|
|
const listener = (data: string) => {
|
|
queue.push(data);
|
|
resolve?.();
|
|
};
|
|
|
|
scanListeners.add(listener);
|
|
s.onAbort(() => {
|
|
scanListeners.delete(listener);
|
|
});
|
|
|
|
try {
|
|
while (!s.closed) {
|
|
if (queue.length > 0) {
|
|
await s.write(queue.shift()!);
|
|
} else {
|
|
await new Promise<void>((res) => {
|
|
resolve = res;
|
|
setTimeout(res, 25_000);
|
|
});
|
|
resolve = null;
|
|
if (queue.length === 0) await s.write(": keepalive\n\n");
|
|
}
|
|
}
|
|
} finally {
|
|
scanListeners.delete(listener);
|
|
}
|
|
});
|
|
});
|
|
|
|
// ─── Core scan logic ──────────────────────────────────────────────────────────
|
|
|
|
async function runScan(limit: number | null = null): Promise<void> {
|
|
log(`Scan started${limit ? ` (limit: ${limit})` : ""}`);
|
|
scanAbort = new AbortController();
|
|
const { signal } = scanAbort;
|
|
const isDev = process.env.NODE_ENV === "development";
|
|
const db = getDb();
|
|
|
|
if (isDev) {
|
|
// Order matters only if foreign keys are enforced without CASCADE; we
|
|
// have ON DELETE CASCADE on media_streams/review_plans/stream_decisions/
|
|
// subtitle_files/jobs, so deleting media_items would be enough. List
|
|
// them explicitly for clarity and to survive future schema drift.
|
|
db.prepare("DELETE FROM jobs").run();
|
|
db.prepare("DELETE FROM subtitle_files").run();
|
|
db.prepare("DELETE FROM stream_decisions").run();
|
|
db.prepare("DELETE FROM review_plans").run();
|
|
db.prepare("DELETE FROM media_streams").run();
|
|
db.prepare("DELETE FROM media_items").run();
|
|
}
|
|
|
|
const cfg = getAllConfig();
|
|
const jellyfinCfg = { url: cfg.jellyfin_url, apiKey: cfg.jellyfin_api_key, userId: cfg.jellyfin_user_id };
|
|
const audioLanguages = parseLanguageList(cfg.audio_languages ?? null, []);
|
|
const radarrCfg = { url: cfg.radarr_url, apiKey: cfg.radarr_api_key };
|
|
const sonarrCfg = { url: cfg.sonarr_url, apiKey: cfg.sonarr_api_key };
|
|
// 'enabled' in config means the user toggled it on. Only actually use it
|
|
// if the URL+key pass URL parsing — otherwise we'd hit ERR_INVALID_URL on
|
|
// every item. Refuse to call invalid endpoints rather than spamming logs.
|
|
const radarrEnabled = cfg.radarr_enabled === "1" && radarrUsable(radarrCfg);
|
|
const sonarrEnabled = cfg.sonarr_enabled === "1" && sonarrUsable(sonarrCfg);
|
|
|
|
if (cfg.radarr_enabled === "1" && !radarrEnabled) {
|
|
warn(`Radarr is enabled in config but URL/API key is invalid (url='${cfg.radarr_url}') — skipping Radarr lookups`);
|
|
}
|
|
if (cfg.sonarr_enabled === "1" && !sonarrEnabled) {
|
|
warn(`Sonarr is enabled in config but URL/API key is invalid (url='${cfg.sonarr_url}') — skipping Sonarr lookups`);
|
|
}
|
|
|
|
// Pre-load both libraries once so per-item lookups are O(1) cache hits
|
|
// instead of HTTP round-trips. The previous code called /api/v3/movie
|
|
// (the entire library!) once per item that didn't match by tmdbId.
|
|
const [radarrLibrary, sonarrLibrary] = await Promise.all([
|
|
radarrEnabled ? loadRadarrLibrary(radarrCfg) : Promise.resolve(null),
|
|
sonarrEnabled ? loadSonarrLibrary(sonarrCfg) : Promise.resolve(null),
|
|
]);
|
|
|
|
log(
|
|
`External language sources: radarr=${radarrEnabled ? `enabled (${cfg.radarr_url}, ${radarrLibrary?.byTmdbId.size ?? 0} movies in library)` : "disabled"}, sonarr=${sonarrEnabled ? `enabled (${cfg.sonarr_url}, ${sonarrLibrary?.byTvdbId.size ?? 0} series in library)` : "disabled"}`,
|
|
);
|
|
let processed = 0;
|
|
let errors = 0;
|
|
let total = 0;
|
|
|
|
const rescanCfg = {
|
|
audioLanguages,
|
|
radarr: radarrEnabled ? radarrCfg : null,
|
|
sonarr: sonarrEnabled ? sonarrCfg : null,
|
|
radarrLibrary,
|
|
sonarrLibrary,
|
|
};
|
|
|
|
let radarrMisses = 0;
|
|
let radarrHits = 0;
|
|
let sonarrMisses = 0;
|
|
let sonarrHits = 0;
|
|
let missingProviderIds = 0;
|
|
|
|
const itemSource = isDev
|
|
? getDevItems(jellyfinCfg)
|
|
: getAllItems(jellyfinCfg, (_fetched, jellyfinTotal) => {
|
|
total = limit != null ? Math.min(limit, jellyfinTotal) : jellyfinTotal;
|
|
});
|
|
for await (const jellyfinItem of itemSource) {
|
|
if (signal.aborted) break;
|
|
if (!isDev && limit != null && processed >= limit) break;
|
|
if (!jellyfinItem.Name || !jellyfinItem.Path) {
|
|
warn(`Skipping item without name/path: id=${jellyfinItem.Id}`);
|
|
continue;
|
|
}
|
|
|
|
// Honour the scan window between items so overnight-only setups don't hog
|
|
// Jellyfin during the day. Checked between items rather than mid-item so
|
|
// we don't leave a partial upsert sitting in flight.
|
|
if (!isInScanWindow()) {
|
|
emitSse("paused", {
|
|
until: nextScanWindowTime(),
|
|
seconds: Math.round(msUntilScanWindow() / 1000),
|
|
});
|
|
await waitForScanWindow();
|
|
if (signal.aborted) break;
|
|
emitSse("resumed", {});
|
|
}
|
|
|
|
processed++;
|
|
emitSse("progress", { scanned: processed, total, current_item: jellyfinItem.Name, errors, running: true });
|
|
|
|
try {
|
|
const result = await upsertJellyfinItem(db, jellyfinItem, rescanCfg);
|
|
if (result.radarrHit) radarrHits++;
|
|
if (result.radarrMiss) radarrMisses++;
|
|
if (result.sonarrHit) sonarrHits++;
|
|
if (result.sonarrMiss) sonarrMisses++;
|
|
if (result.missingProviderId) missingProviderIds++;
|
|
emitSse("log", { name: jellyfinItem.Name, type: jellyfinItem.Type, status: "scanned", file: jellyfinItem.Path });
|
|
} catch (err) {
|
|
errors++;
|
|
logError(`Error scanning ${jellyfinItem.Name}:`, err);
|
|
try {
|
|
db
|
|
.prepare("UPDATE media_items SET scan_status = 'error', scan_error = ? WHERE jellyfin_id = ?")
|
|
.run(String(err), jellyfinItem.Id);
|
|
} catch (dbErr) {
|
|
// Failed to persist the error status — log it so the incident
|
|
// doesn't disappear silently. We can't do much more; the outer
|
|
// loop continues so the scan still finishes.
|
|
logError(`Failed to record scan error for ${jellyfinItem.Id}:`, dbErr);
|
|
}
|
|
emitSse("log", { name: jellyfinItem.Name, type: jellyfinItem.Type, status: "error", file: jellyfinItem.Path });
|
|
}
|
|
}
|
|
|
|
setConfig("scan_running", "0");
|
|
log(`Scan complete: ${processed} scanned, ${errors} errors`);
|
|
log(
|
|
` language sources: radarr hits=${radarrHits} misses=${radarrMisses}, sonarr hits=${sonarrHits} misses=${sonarrMisses}, no provider id=${missingProviderIds}`,
|
|
);
|
|
emitSse("complete", { scanned: processed, total, errors });
|
|
}
|
|
|
|
export default app;
|