Files
netfelix-audio-fix/server/api/scan.ts
Felix Förtsch 6fcaeca82c
Some checks failed
Build and Push Docker Image / build (push) Failing after 16s
write canonical iso3 language metadata, tighten is_noop, store full jellyfin data
ffmpeg now writes -metadata:s:a:i language=<iso3> on every kept audio track so
files end up with canonical 3-letter tags (en → eng, ger → deu, null → und).
analyzer passes stream.profile (not title) to transcodeTarget so lossless
dts-hd ma in mkv correctly targets flac. is_noop also checks og-is-default and
canonical-language so pipeline-would-change-it cases stop showing as done.

normalizeLanguage gains 2→3 mapping, and mapStream no longer normalizes at
ingest so the raw jellyfin tag survives for the canonical check.

per-item scan work runs in a single db.transaction for large sqlite speedups,
extracted into server/services/rescan.ts so execute.ts can reuse it.

on successful job, execute calls jellyfin /Items/{id}/Refresh, waits for
DateLastRefreshed to change, refetches the item, and upserts it through the
same pipeline; plan flips to done iff the fresh streams satisfy is_noop.

schema wiped + rewritten to carry jellyfin_raw, external_raw, profile,
bit_depth, date_last_refreshed, runtime_ticks, original_title, last_executed_at
— so future scans aren't required to stay correct. user must drop data/*.db.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-13 13:56:19 +02:00

246 lines
9.8 KiB
TypeScript

import { Hono } from "hono";
import { stream } from "hono/streaming";
import { getAllConfig, getConfig, getDb, setConfig } from "../db/index";
import { log, error as logError, warn } from "../lib/log";
import { getAllItems, getDevItems } from "../services/jellyfin";
import { loadLibrary as loadRadarrLibrary, isUsable as radarrUsable } from "../services/radarr";
import { upsertJellyfinItem } from "../services/rescan";
import { loadLibrary as loadSonarrLibrary, isUsable as sonarrUsable } from "../services/sonarr";
const app = new Hono();
// ─── State ────────────────────────────────────────────────────────────────────
let scanAbort: AbortController | null = null;
const scanListeners = new Set<(data: string) => void>();
function emitSse(type: string, data: unknown): void {
const line = `event: ${type}\ndata: ${JSON.stringify(data)}\n\n`;
for (const listener of scanListeners) listener(line);
}
function currentScanLimit(): number | null {
const v = getConfig("scan_limit");
return v ? Number(v) : null;
}
function parseLanguageList(raw: string | null, fallback: string[]): string[] {
if (!raw) return fallback;
try {
const parsed = JSON.parse(raw);
return Array.isArray(parsed) ? parsed.filter((v): v is string => typeof v === "string") : fallback;
} catch {
return fallback;
}
}
// ─── Status ───────────────────────────────────────────────────────────────────
app.get("/", (c) => {
const db = getDb();
const running = getConfig("scan_running") === "1";
const total = (db.prepare("SELECT COUNT(*) as n FROM media_items").get() as { n: number }).n;
const scanned = (
db.prepare("SELECT COUNT(*) as n FROM media_items WHERE scan_status = 'scanned'").get() as { n: number }
).n;
const errors = (db.prepare("SELECT COUNT(*) as n FROM media_items WHERE scan_status = 'error'").get() as { n: number })
.n;
const recentItems = db
.prepare("SELECT name, type, scan_status, file_path FROM media_items ORDER BY last_scanned_at DESC LIMIT 50")
.all() as { name: string; type: string; scan_status: string; file_path: string }[];
return c.json({ running, progress: { scanned, total, errors }, recentItems, scanLimit: currentScanLimit() });
});
// ─── Start ────────────────────────────────────────────────────────────────────
app.post("/start", async (c) => {
const db = getDb();
// Atomic claim: only succeed if scan_running is not already '1'.
const claim = db.prepare("UPDATE config SET value = '1' WHERE key = 'scan_running' AND value != '1'").run();
if (claim.changes === 0) {
return c.json({ ok: false, error: "Scan already running" }, 409);
}
const body = await c.req.json<{ limit?: number }>().catch(() => ({ limit: undefined }));
const formLimit = body.limit ?? null;
const envLimit = process.env.SCAN_LIMIT ? Number(process.env.SCAN_LIMIT) : null;
const limit = formLimit ?? envLimit ?? null;
setConfig("scan_limit", limit != null ? String(limit) : "");
runScan(limit).catch((err) => {
logError("Scan failed:", err);
setConfig("scan_running", "0");
emitSse("error", { message: String(err) });
});
return c.json({ ok: true });
});
// ─── Stop ─────────────────────────────────────────────────────────────────────
app.post("/stop", (c) => {
scanAbort?.abort();
setConfig("scan_running", "0");
return c.json({ ok: true });
});
// ─── SSE ──────────────────────────────────────────────────────────────────────
app.get("/events", (c) => {
return stream(c, async (s) => {
c.header("Content-Type", "text/event-stream");
c.header("Cache-Control", "no-cache");
c.header("Connection", "keep-alive");
const queue: string[] = [];
let resolve: (() => void) | null = null;
const listener = (data: string) => {
queue.push(data);
resolve?.();
};
scanListeners.add(listener);
s.onAbort(() => {
scanListeners.delete(listener);
});
try {
while (!s.closed) {
if (queue.length > 0) {
await s.write(queue.shift()!);
} else {
await new Promise<void>((res) => {
resolve = res;
setTimeout(res, 25_000);
});
resolve = null;
if (queue.length === 0) await s.write(": keepalive\n\n");
}
}
} finally {
scanListeners.delete(listener);
}
});
});
// ─── Core scan logic ──────────────────────────────────────────────────────────
async function runScan(limit: number | null = null): Promise<void> {
log(`Scan started${limit ? ` (limit: ${limit})` : ""}`);
scanAbort = new AbortController();
const { signal } = scanAbort;
const isDev = process.env.NODE_ENV === "development";
const db = getDb();
if (isDev) {
// Order matters only if foreign keys are enforced without CASCADE; we
// have ON DELETE CASCADE on media_streams/review_plans/stream_decisions/
// subtitle_files/jobs, so deleting media_items would be enough. List
// them explicitly for clarity and to survive future schema drift.
db.prepare("DELETE FROM jobs").run();
db.prepare("DELETE FROM subtitle_files").run();
db.prepare("DELETE FROM stream_decisions").run();
db.prepare("DELETE FROM review_plans").run();
db.prepare("DELETE FROM media_streams").run();
db.prepare("DELETE FROM media_items").run();
}
const cfg = getAllConfig();
const jellyfinCfg = { url: cfg.jellyfin_url, apiKey: cfg.jellyfin_api_key, userId: cfg.jellyfin_user_id };
const subtitleLanguages = parseLanguageList(cfg.subtitle_languages ?? null, ["eng", "deu", "spa"]);
const audioLanguages = parseLanguageList(cfg.audio_languages ?? null, []);
const radarrCfg = { url: cfg.radarr_url, apiKey: cfg.radarr_api_key };
const sonarrCfg = { url: cfg.sonarr_url, apiKey: cfg.sonarr_api_key };
// 'enabled' in config means the user toggled it on. Only actually use it
// if the URL+key pass URL parsing — otherwise we'd hit ERR_INVALID_URL on
// every item. Refuse to call invalid endpoints rather than spamming logs.
const radarrEnabled = cfg.radarr_enabled === "1" && radarrUsable(radarrCfg);
const sonarrEnabled = cfg.sonarr_enabled === "1" && sonarrUsable(sonarrCfg);
if (cfg.radarr_enabled === "1" && !radarrEnabled) {
warn(`Radarr is enabled in config but URL/API key is invalid (url='${cfg.radarr_url}') — skipping Radarr lookups`);
}
if (cfg.sonarr_enabled === "1" && !sonarrEnabled) {
warn(`Sonarr is enabled in config but URL/API key is invalid (url='${cfg.sonarr_url}') — skipping Sonarr lookups`);
}
// Pre-load both libraries once so per-item lookups are O(1) cache hits
// instead of HTTP round-trips. The previous code called /api/v3/movie
// (the entire library!) once per item that didn't match by tmdbId.
const [radarrLibrary, sonarrLibrary] = await Promise.all([
radarrEnabled ? loadRadarrLibrary(radarrCfg) : Promise.resolve(null),
sonarrEnabled ? loadSonarrLibrary(sonarrCfg) : Promise.resolve(null),
]);
log(
`External language sources: radarr=${radarrEnabled ? `enabled (${cfg.radarr_url}, ${radarrLibrary?.byTmdbId.size ?? 0} movies in library)` : "disabled"}, sonarr=${sonarrEnabled ? `enabled (${cfg.sonarr_url}, ${sonarrLibrary?.byTvdbId.size ?? 0} series in library)` : "disabled"}`,
);
let processed = 0;
let errors = 0;
let total = 0;
const rescanCfg = {
subtitleLanguages,
audioLanguages,
radarr: radarrEnabled ? radarrCfg : null,
sonarr: sonarrEnabled ? sonarrCfg : null,
radarrLibrary,
sonarrLibrary,
};
let radarrMisses = 0;
let radarrHits = 0;
let sonarrMisses = 0;
let sonarrHits = 0;
let missingProviderIds = 0;
const itemSource = isDev
? getDevItems(jellyfinCfg)
: getAllItems(jellyfinCfg, (_fetched, jellyfinTotal) => {
total = limit != null ? Math.min(limit, jellyfinTotal) : jellyfinTotal;
});
for await (const jellyfinItem of itemSource) {
if (signal.aborted) break;
if (!isDev && limit != null && processed >= limit) break;
if (!jellyfinItem.Name || !jellyfinItem.Path) {
warn(`Skipping item without name/path: id=${jellyfinItem.Id}`);
continue;
}
processed++;
emitSse("progress", { scanned: processed, total, current_item: jellyfinItem.Name, errors, running: true });
try {
const result = await upsertJellyfinItem(db, jellyfinItem, rescanCfg);
if (result.radarrHit) radarrHits++;
if (result.radarrMiss) radarrMisses++;
if (result.sonarrHit) sonarrHits++;
if (result.sonarrMiss) sonarrMisses++;
if (result.missingProviderId) missingProviderIds++;
emitSse("log", { name: jellyfinItem.Name, type: jellyfinItem.Type, status: "scanned", file: jellyfinItem.Path });
} catch (err) {
errors++;
logError(`Error scanning ${jellyfinItem.Name}:`, err);
try {
db
.prepare("UPDATE media_items SET scan_status = 'error', scan_error = ? WHERE jellyfin_id = ?")
.run(String(err), jellyfinItem.Id);
} catch {
/* ignore */
}
emitSse("log", { name: jellyfinItem.Name, type: jellyfinItem.Type, status: "error", file: jellyfinItem.Path });
}
}
setConfig("scan_running", "0");
log(`Scan complete: ${processed} scanned, ${errors} errors`);
log(
` language sources: radarr hits=${radarrHits} misses=${radarrMisses}, sonarr hits=${sonarrHits} misses=${sonarrMisses}, no provider id=${missingProviderIds}`,
);
emitSse("complete", { scanned: processed, total, errors });
}
export default app;