write canonical iso3 language metadata, tighten is_noop, store full jellyfin data
Build and Push Docker Image / build (push) Failing after 16s
Build and Push Docker Image / build (push) Failing after 16s
ffmpeg now writes -metadata:s:a:i language=<iso3> on every kept audio track so
files end up with canonical 3-letter tags (en → eng, ger → deu, null → und).
analyzer passes stream.profile (not title) to transcodeTarget so lossless
dts-hd ma in mkv correctly targets flac. is_noop also checks og-is-default and
canonical-language so pipeline-would-change-it cases stop showing as done.
normalizeLanguage gains 2→3 mapping, and mapStream no longer normalizes at
ingest so the raw jellyfin tag survives for the canonical check.
per-item scan work runs in a single db.transaction for large sqlite speedups,
extracted into server/services/rescan.ts so execute.ts can reuse it.
on successful job, execute calls jellyfin /Items/{id}/Refresh, waits for
DateLastRefreshed to change, refetches the item, and upserts it through the
same pipeline; plan flips to done iff the fresh streams satisfy is_noop.
schema wiped + rewritten to carry jellyfin_raw, external_raw, profile,
bit_depth, date_last_refreshed, runtime_ticks, original_title, last_executed_at
— so future scans aren't required to stay correct. user must drop data/*.db.
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
+80
-2
@@ -1,9 +1,12 @@
|
||||
import { accessSync, constants } from "node:fs";
|
||||
import { Hono } from "hono";
|
||||
import { stream } from "hono/streaming";
|
||||
import { getDb } from "../db/index";
|
||||
import { log, error as logError } from "../lib/log";
|
||||
import { getAllConfig, getDb } from "../db/index";
|
||||
import { log, error as logError, warn } from "../lib/log";
|
||||
import { predictExtractedFiles } from "../services/ffmpeg";
|
||||
import { getItem, refreshItem } from "../services/jellyfin";
|
||||
import { loadLibrary as loadRadarrLibrary, isUsable as radarrUsable } from "../services/radarr";
|
||||
import { upsertJellyfinItem } from "../services/rescan";
|
||||
import {
|
||||
getSchedulerState,
|
||||
isInScheduleWindow,
|
||||
@@ -13,8 +16,73 @@ import {
|
||||
updateSchedulerState,
|
||||
waitForWindow,
|
||||
} from "../services/scheduler";
|
||||
import { loadLibrary as loadSonarrLibrary, isUsable as sonarrUsable } from "../services/sonarr";
|
||||
import type { Job, MediaItem, MediaStream } from "../types";
|
||||
|
||||
function parseLanguageList(raw: string | null | undefined, fallback: string[]): string[] {
|
||||
if (!raw) return fallback;
|
||||
try {
|
||||
const parsed = JSON.parse(raw);
|
||||
return Array.isArray(parsed) ? parsed.filter((v): v is string => typeof v === "string") : fallback;
|
||||
} catch {
|
||||
return fallback;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* After a job finishes successfully, ask Jellyfin to re-scan the file,
|
||||
* fetch the fresh item, and upsert it — including running analyzeItem so the
|
||||
* review plan reflects whether the file is now fully conformant. If is_noop
|
||||
* is true on the refreshed streams, the plan lands in `done`; otherwise it
|
||||
* flips back to `pending` so the user sees what still needs attention.
|
||||
*/
|
||||
async function refreshItemFromJellyfin(itemId: number): Promise<void> {
|
||||
const db = getDb();
|
||||
const row = db.prepare("SELECT jellyfin_id FROM media_items WHERE id = ?").get(itemId) as
|
||||
| { jellyfin_id: string }
|
||||
| undefined;
|
||||
if (!row) return;
|
||||
|
||||
const cfg = getAllConfig();
|
||||
const jellyfinCfg = { url: cfg.jellyfin_url, apiKey: cfg.jellyfin_api_key, userId: cfg.jellyfin_user_id };
|
||||
if (!jellyfinCfg.url || !jellyfinCfg.apiKey) return;
|
||||
|
||||
try {
|
||||
await refreshItem(jellyfinCfg, row.jellyfin_id);
|
||||
} catch (err) {
|
||||
warn(`Jellyfin refresh for item ${itemId} failed: ${String(err)}`);
|
||||
}
|
||||
|
||||
const fresh = await getItem(jellyfinCfg, row.jellyfin_id);
|
||||
if (!fresh) {
|
||||
warn(`Jellyfin returned no item for ${row.jellyfin_id} after refresh`);
|
||||
return;
|
||||
}
|
||||
|
||||
const radarrCfg = { url: cfg.radarr_url, apiKey: cfg.radarr_api_key };
|
||||
const sonarrCfg = { url: cfg.sonarr_url, apiKey: cfg.sonarr_api_key };
|
||||
const radarrEnabled = cfg.radarr_enabled === "1" && radarrUsable(radarrCfg);
|
||||
const sonarrEnabled = cfg.sonarr_enabled === "1" && sonarrUsable(sonarrCfg);
|
||||
const [radarrLibrary, sonarrLibrary] = await Promise.all([
|
||||
radarrEnabled ? loadRadarrLibrary(radarrCfg) : Promise.resolve(null),
|
||||
sonarrEnabled ? loadSonarrLibrary(sonarrCfg) : Promise.resolve(null),
|
||||
]);
|
||||
|
||||
await upsertJellyfinItem(
|
||||
db,
|
||||
fresh,
|
||||
{
|
||||
subtitleLanguages: parseLanguageList(cfg.subtitle_languages, ["eng", "deu", "spa"]),
|
||||
audioLanguages: parseLanguageList(cfg.audio_languages, []),
|
||||
radarr: radarrEnabled ? radarrCfg : null,
|
||||
sonarr: sonarrEnabled ? sonarrCfg : null,
|
||||
radarrLibrary,
|
||||
sonarrLibrary,
|
||||
},
|
||||
{ executed: true },
|
||||
);
|
||||
}
|
||||
|
||||
const app = new Hono();
|
||||
|
||||
// ─── Sequential local queue ──────────────────────────────────────────────────
|
||||
@@ -435,6 +503,16 @@ async function runJob(job: Job): Promise<void> {
|
||||
|
||||
log(`Job ${job.id} completed successfully`);
|
||||
emitJobUpdate(job.id, "done", fullOutput);
|
||||
|
||||
// Ask Jellyfin to rescan the file and pull the fresh metadata so our DB
|
||||
// reflects what actually ended up on disk. If the refreshed streams still
|
||||
// don't satisfy is_noop (e.g. a codec didn't transcode as planned), the
|
||||
// plan flips back to 'pending' in the same upsert and the UI shows it.
|
||||
try {
|
||||
await refreshItemFromJellyfin(job.item_id);
|
||||
} catch (refreshErr) {
|
||||
warn(`Post-job refresh for item ${job.item_id} failed: ${String(refreshErr)}`);
|
||||
}
|
||||
} catch (err) {
|
||||
logError(`Job ${job.id} failed:`, err);
|
||||
const fullOutput = `${outputLines.join("\n")}\n${String(err)}`;
|
||||
|
||||
Reference in New Issue
Block a user