diff --git a/docs/superpowers/plans/2026-04-20-drop-jellyfin.md b/docs/superpowers/plans/2026-04-20-drop-jellyfin.md new file mode 100644 index 0000000..3bc201c --- /dev/null +++ b/docs/superpowers/plans/2026-04-20-drop-jellyfin.md @@ -0,0 +1,1379 @@ +# Drop Jellyfin: Filesystem-Native Scan Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Replace all Jellyfin dependencies with direct filesystem scanning (glob + ffprobe) and path parsing, keeping Sonarr/Radarr for original language resolution. + +**Architecture:** Walk `/tv` and `/movies` mount roots to discover video files. Run `ffprobe -print_format json` on each file to extract stream metadata. Parse Radarr/Sonarr naming conventions from paths for movie/series metadata and provider IDs. Remove Jellyfin service, MQTT, webhooks, and all `jellyfin_*` DB columns. + +**Tech Stack:** Bun, ffprobe (JSON output), Sonarr/Radarr APIs (language resolution), SQLite + +--- + +## File Structure + +### New files +- `server/services/discover.ts` — filesystem walk + video file detection +- `server/services/probe.ts` — ffprobe wrapper, returns typed stream metadata +- `server/services/path-parser.ts` — extract movie/series metadata + provider IDs from file paths +- `server/services/language-utils.ts` — normalizeLanguage + guessOriginalLanguage (extracted from jellyfin.ts) +- `server/services/__tests__/discover.test.ts` +- `server/services/__tests__/probe.test.ts` +- `server/services/__tests__/path-parser.test.ts` + +### Modified files +- `server/db/schema.ts` — new schema without Jellyfin columns, `file_path` as unique key +- `server/types.ts` — remove Jellyfin types, update `MediaItem` interface +- `server/api/scan.ts` — rewrite to use discover + probe instead of Jellyfin API +- `server/services/rescan.ts` — rewrite `upsertItem()` to accept probe data instead of JellyfinItem +- `server/services/language-resolver.ts` — remove `resolveSeriesTvdb` Jellyfin callback, resolve via Sonarr path lookup +- `server/services/sonarr.ts` — add byTitle index for name-based lookup +- `server/api/review.ts` — replace `jellyfin_id`/`series_jellyfin_id` references with `file_path`/`series_key` +- `src/shared/lib/types.ts` — frontend types: drop jellyfin fields +- `src/features/settings/SettingsPage.tsx` — remove Jellyfin section +- `src/features/pipeline/SeriesCard.tsx` — remove Jellyfin link, use series_key +- `src/features/pipeline/InboxColumn.tsx` — use series_key +- `src/features/pipeline/ReviewColumn.tsx` — use series_key + +### Deleted files +- `server/services/jellyfin.ts` +- `server/services/webhook.ts` +- `server/services/mqtt.ts` +- `server/services/__tests__/webhook.test.ts` +- `server/services/__tests__/rescan.test.ts` (rewritten) +- `src/features/settings/MqttSection.tsx` +- `src/shared/components/MqttBadge.tsx` + +--- + +## Task 1: Path parser + +Parse Radarr/Sonarr naming conventions from file paths. This is the foundation for all metadata extraction. + +**Files:** +- Create: `server/services/path-parser.ts` +- Create: `server/services/__tests__/path-parser.test.ts` + +**Path formats observed:** +``` +Movies: /movies/{Title} ({Year})/{Title} ({Year}) [imdbid-{id}] - {release info}.mkv +Series: /tv/{Series} ({Year})/Season {NN}/{Series} ({Year}) - S{NN}E{NN} - {Episode Title} [{info}].mkv +``` + +- [ ] **Step 1: Write failing tests for path parser** + +```typescript +// server/services/__tests__/path-parser.test.ts +import { describe, expect, test } from "bun:test"; +import { parsePath } from "../path-parser"; + +describe("parsePath", () => { + test("movie with imdb id", () => { + const result = parsePath( + "/movies/Hot Fuzz (2007)/Hot Fuzz (2007) [imdbid-tt0425112] - [Bluray-1080p][DTS 5.1][x264]-CtrlHD.mkv", + "/movies", + "/tv", + ); + expect(result).toEqual({ + type: "Movie", + name: "Hot Fuzz", + year: 2007, + seriesName: null, + seasonNumber: null, + episodeNumber: null, + imdbId: "tt0425112", + tmdbId: null, + tvdbId: null, + container: "mkv", + }); + }); + + test("movie with tmdb id", () => { + const result = parsePath( + "/movies/Dune (2021)/Dune (2021) [tmdbid-438631] - [Bluray-2160p].mkv", + "/movies", + "/tv", + ); + expect(result).toEqual({ + type: "Movie", + name: "Dune", + year: 2021, + seriesName: null, + seasonNumber: null, + episodeNumber: null, + imdbId: null, + tmdbId: "438631", + tvdbId: null, + container: "mkv", + }); + }); + + test("movie with both imdb and tmdb", () => { + const result = parsePath( + "/movies/Arrival (2016)/Arrival (2016) [imdbid-tt2543164][tmdbid-329865] - [Bluray-1080p].mkv", + "/movies", + "/tv", + ); + expect(result).toEqual({ + type: "Movie", + name: "Arrival", + year: 2016, + seriesName: null, + seasonNumber: null, + episodeNumber: null, + imdbId: "tt2543164", + tmdbId: "329865", + tvdbId: null, + container: "mkv", + }); + }); + + test("episode standard format", () => { + const result = parsePath( + "/tv/Breaking Bad (2008)/Season 05/Breaking Bad (2008) - S05E03 - Hazard Pay [WEBDL-1080p][AC3 5.1][h264]-BS.mkv", + "/movies", + "/tv", + ); + expect(result).toEqual({ + type: "Episode", + name: "Hazard Pay", + year: 2008, + seriesName: "Breaking Bad", + seasonNumber: 5, + episodeNumber: 3, + imdbId: null, + tmdbId: null, + tvdbId: null, + container: "mkv", + }); + }); + + test("episode with tvdb id in folder", () => { + const result = parsePath( + "/tv/Arrow (2012) [tvdbid-257655]/Season 01/Arrow (2012) - S01E01 - Pilot [Bluray-1080p].mkv", + "/movies", + "/tv", + ); + expect(result).toEqual({ + type: "Episode", + name: "Pilot", + year: 2012, + seriesName: "Arrow", + seasonNumber: 1, + episodeNumber: 1, + imdbId: null, + tmdbId: null, + tvdbId: "257655", + container: "mkv", + }); + }); + + test("multi-episode file", () => { + const result = parsePath( + "/tv/Breaking Bad (2008)/Season 02/Breaking Bad (2008) - S02E01-E13 - Seven Thirty-Seven [info].mkv", + "/movies", + "/tv", + ); + expect(result).toEqual({ + type: "Episode", + name: "Seven Thirty-Seven", + year: 2008, + seriesName: "Breaking Bad", + seasonNumber: 2, + episodeNumber: 1, + imdbId: null, + tmdbId: null, + tvdbId: null, + container: "mkv", + }); + }); + + test("mp4 container", () => { + const result = parsePath( + "/movies/Up (2009)/Up (2009) [imdbid-tt1049413].mp4", + "/movies", + "/tv", + ); + expect(result?.container).toBe("mp4"); + }); + + test("returns null for non-video file", () => { + const result = parsePath("/movies/something/file.nfo", "/movies", "/tv"); + expect(result).toBeNull(); + }); + + test("movie without provider ids (bare folder)", () => { + const result = parsePath( + "/movies/Old Movie (1995)/Old Movie (1995).mkv", + "/movies", + "/tv", + ); + expect(result).toEqual({ + type: "Movie", + name: "Old Movie", + year: 1995, + seriesName: null, + seasonNumber: null, + episodeNumber: null, + imdbId: null, + tmdbId: null, + tvdbId: null, + container: "mkv", + }); + }); +}); +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `bun test server/services/__tests__/path-parser.test.ts` +Expected: FAIL — module not found + +- [ ] **Step 3: Implement path parser** + +```typescript +// server/services/path-parser.ts + +export interface ParsedPath { + type: "Movie" | "Episode"; + name: string; + year: number | null; + seriesName: string | null; + seasonNumber: number | null; + episodeNumber: number | null; + imdbId: string | null; + tmdbId: string | null; + tvdbId: string | null; + container: string; +} + +const VIDEO_EXTENSIONS = new Set(["mkv", "mp4", "avi", "m4v", "ts", "wmv"]); + +/** + * Parse a video file path into structured metadata. + * Expects Radarr/Sonarr naming conventions. + * + * @param filePath - absolute path to the video file + * @param moviesRoot - mount root for movies (e.g. "/movies") + * @param tvRoot - mount root for TV series (e.g. "/tv") + * @returns parsed metadata or null if not a recognized video file + */ +export function parsePath(filePath: string, moviesRoot: string, tvRoot: string): ParsedPath | null { + const ext = filePath.split(".").pop()?.toLowerCase() ?? ""; + if (!VIDEO_EXTENSIONS.has(ext)) return null; + + const isMovie = filePath.startsWith(moviesRoot + "/"); + const isEpisode = filePath.startsWith(tvRoot + "/"); + if (!isMovie && !isEpisode) return null; + + if (isMovie) return parseMoviePath(filePath, moviesRoot, ext); + return parseEpisodePath(filePath, tvRoot, ext); +} + +function parseMoviePath(filePath: string, root: string, ext: string): ParsedPath { + const relative = filePath.slice(root.length + 1); + const folderName = relative.split("/")[0]; + const fileName = relative.split("/").pop() ?? ""; + + const { title, year } = parseTitleYear(folderName); + const ids = parseProviderIds(fileName) ?? parseProviderIds(folderName); + + return { + type: "Movie", + name: title, + year, + seriesName: null, + seasonNumber: null, + episodeNumber: null, + imdbId: ids?.imdbId ?? null, + tmdbId: ids?.tmdbId ?? null, + tvdbId: null, + container: ext, + }; +} + +function parseEpisodePath(filePath: string, root: string, ext: string): ParsedPath { + const relative = filePath.slice(root.length + 1); + const parts = relative.split("/"); + const seriesFolder = parts[0]; + const fileName = parts[parts.length - 1]; + + const { title: seriesName, year } = parseTitleYear(seriesFolder); + const ids = parseProviderIds(seriesFolder); + + const epMatch = fileName.match(/S(\d+)E(\d+)/i); + const seasonNumber = epMatch ? Number.parseInt(epMatch[1], 10) : null; + const episodeNumber = epMatch ? Number.parseInt(epMatch[2], 10) : null; + + // Episode title: after "- S01E01-E13 - " or "- S01E01 - " + const titleMatch = fileName.match(/- S\d+E\d+(?:-E\d+)? - (.+?)(?:\s*[\[\(]|$)/i); + const episodeName = titleMatch ? titleMatch[1].replace(/\.[^.]+$/, "").trim() : seriesName; + + return { + type: "Episode", + name: episodeName, + year, + seriesName, + seasonNumber, + episodeNumber, + imdbId: null, + tmdbId: null, + tvdbId: ids?.tvdbId ?? null, + container: ext, + }; +} + +function parseTitleYear(s: string): { title: string; year: number | null } { + const cleaned = s.replace(/\s*\[[^\]]*\]/g, "").trim(); + const match = cleaned.match(/^(.+?)\s*\((\d{4})\)\s*$/); + if (match) return { title: match[1].trim(), year: Number.parseInt(match[2], 10) }; + return { title: cleaned, year: null }; +} + +function parseProviderIds(s: string): { imdbId: string | null; tmdbId: string | null; tvdbId: string | null } | null { + const imdb = s.match(/\[imdbid-(tt\d+)\]/i); + const tmdb = s.match(/\[tmdbid-(\d+)\]/i); + const tvdb = s.match(/\[tvdbid-(\d+)\]/i); + if (!imdb && !tmdb && !tvdb) return null; + return { + imdbId: imdb?.[1] ?? null, + tmdbId: tmdb?.[1] ?? null, + tvdbId: tvdb?.[1] ?? null, + }; +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `bun test server/services/__tests__/path-parser.test.ts` +Expected: all PASS + +- [ ] **Step 5: Commit** + +```bash +git add server/services/path-parser.ts server/services/__tests__/path-parser.test.ts +git commit -m "add path parser for Radarr/Sonarr naming conventions" +``` + +--- + +## Task 2: ffprobe wrapper + +Typed wrapper around `ffprobe` to extract stream metadata from video files. + +**Files:** +- Create: `server/services/probe.ts` +- Create: `server/services/__tests__/probe.test.ts` + +- [ ] **Step 1: Write failing tests for probe** + +```typescript +// server/services/__tests__/probe.test.ts +import { describe, expect, test } from "bun:test"; +import { parseProbeOutput } from "../probe"; + +const SAMPLE_OUTPUT = { + format: { + filename: "/movies/test.mkv", + size: "1500000000", + duration: "7200.000000", + format_name: "matroska,webm", + }, + streams: [ + { + index: 0, + codec_type: "video", + codec_name: "h264", + profile: "High", + tags: {}, + }, + { + index: 1, + codec_type: "audio", + codec_name: "dts", + profile: "DTS-HD MA", + channels: 6, + channel_layout: "5.1(side)", + bit_rate: "1509000", + sample_rate: "48000", + bits_per_raw_sample: "24", + disposition: { default: 1, forced: 0, hearing_impaired: 0 }, + tags: { language: "eng", title: "Surround 5.1" }, + }, + { + index: 2, + codec_type: "audio", + codec_name: "aac", + profile: "LC", + channels: 2, + channel_layout: "stereo", + bit_rate: "128000", + sample_rate: "44100", + bits_per_raw_sample: "16", + disposition: { default: 0, forced: 0, hearing_impaired: 0 }, + tags: { language: "deu", title: "German Stereo" }, + }, + { + index: 3, + codec_type: "subtitle", + codec_name: "subrip", + disposition: { default: 0, forced: 0, hearing_impaired: 1 }, + tags: { language: "eng", title: "English SDH" }, + }, + ], +}; + +describe("parseProbeOutput", () => { + test("parses format metadata", () => { + const result = parseProbeOutput(JSON.stringify(SAMPLE_OUTPUT)); + expect(result.fileSize).toBe(1500000000); + expect(result.durationSeconds).toBe(7200); + expect(result.container).toBe("matroska"); + }); + + test("parses video stream", () => { + const result = parseProbeOutput(JSON.stringify(SAMPLE_OUTPUT)); + const video = result.streams.find((s) => s.type === "Video"); + expect(video).toBeDefined(); + expect(video!.codec).toBe("h264"); + expect(video!.profile).toBe("High"); + expect(video!.streamIndex).toBe(0); + }); + + test("parses audio streams with full metadata", () => { + const result = parseProbeOutput(JSON.stringify(SAMPLE_OUTPUT)); + const audio = result.streams.filter((s) => s.type === "Audio"); + expect(audio).toHaveLength(2); + expect(audio[0]).toMatchObject({ + streamIndex: 1, + codec: "dts", + profile: "DTS-HD MA", + language: "eng", + title: "Surround 5.1", + channels: 6, + channelLayout: "5.1(side)", + bitRate: 1509000, + sampleRate: 48000, + bitDepth: 24, + isDefault: 1, + isForced: 0, + isHearingImpaired: 0, + }); + expect(audio[1]).toMatchObject({ + streamIndex: 2, + codec: "aac", + language: "deu", + isDefault: 0, + }); + }); + + test("parses subtitle stream with hearing impaired flag", () => { + const result = parseProbeOutput(JSON.stringify(SAMPLE_OUTPUT)); + const sub = result.streams.find((s) => s.type === "Subtitle"); + expect(sub).toBeDefined(); + expect(sub!.isHearingImpaired).toBe(1); + expect(sub!.language).toBe("eng"); + }); +}); +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `bun test server/services/__tests__/probe.test.ts` +Expected: FAIL — module not found + +- [ ] **Step 3: Implement probe module** + +```typescript +// server/services/probe.ts + +export interface ProbeStream { + streamIndex: number; + type: "Video" | "Audio" | "Subtitle" | "Data" | "EmbeddedImage"; + codec: string | null; + profile: string | null; + language: string | null; + title: string | null; + isDefault: number; + isForced: number; + isHearingImpaired: number; + channels: number | null; + channelLayout: string | null; + bitRate: number | null; + sampleRate: number | null; + bitDepth: number | null; +} + +export interface ProbeResult { + fileSize: number | null; + durationSeconds: number | null; + container: string | null; + streams: ProbeStream[]; +} + +const CODEC_TYPE_MAP: Record = { + video: "Video", + audio: "Audio", + subtitle: "Subtitle", + data: "Data", + attachment: "EmbeddedImage", +}; + +/** + * Run ffprobe on a file and return parsed metadata. + */ +export async function probeFile(filePath: string): Promise { + const proc = Bun.spawn([ + "ffprobe", "-v", "quiet", + "-print_format", "json", + "-show_format", "-show_streams", + filePath, + ]); + + const output = await new Response(proc.stdout).text(); + const code = await proc.exited; + if (code !== 0) throw new Error(`ffprobe exited ${code} for ${filePath}`); + + return parseProbeOutput(output); +} + +/** Parse ffprobe JSON output into a ProbeResult. Exported for unit testing. */ +export function parseProbeOutput(json: string): ProbeResult { + const data = JSON.parse(json) as { + format?: { size?: string; duration?: string; format_name?: string }; + streams?: FfprobeStream[]; + }; + + const format = data.format ?? {}; + const container = format.format_name?.split(",")[0] ?? null; + + return { + fileSize: format.size ? Number.parseInt(format.size, 10) : null, + durationSeconds: format.duration ? Number.parseFloat(format.duration) : null, + container, + streams: (data.streams ?? []).map(mapFfprobeStream), + }; +} + +interface FfprobeStream { + index: number; + codec_type: string; + codec_name?: string; + profile?: string; + channels?: number; + channel_layout?: string; + bit_rate?: string; + sample_rate?: string; + bits_per_raw_sample?: string; + disposition?: { default?: number; forced?: number; hearing_impaired?: number }; + tags?: Record; +} + +function mapFfprobeStream(s: FfprobeStream): ProbeStream { + const disp = s.disposition ?? {}; + const tags = s.tags ?? {}; + const lang = tags.language ?? tags.LANGUAGE ?? null; + + return { + streamIndex: s.index, + type: CODEC_TYPE_MAP[s.codec_type] ?? "Data", + codec: s.codec_name ?? null, + profile: s.profile ?? null, + language: lang, + title: tags.title ?? tags.TITLE ?? null, + isDefault: disp.default ?? 0, + isForced: disp.forced ?? 0, + isHearingImpaired: disp.hearing_impaired ?? 0, + channels: s.channels ?? null, + channelLayout: s.channel_layout ?? null, + bitRate: s.bit_rate ? Number.parseInt(s.bit_rate, 10) : null, + sampleRate: s.sample_rate ? Number.parseInt(s.sample_rate, 10) : null, + bitDepth: s.bits_per_raw_sample ? Number.parseInt(s.bits_per_raw_sample, 10) : null, + }; +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `bun test server/services/__tests__/probe.test.ts` +Expected: all PASS + +- [ ] **Step 5: Commit** + +```bash +git add server/services/probe.ts server/services/__tests__/probe.test.ts +git commit -m "add ffprobe wrapper for stream metadata extraction" +``` + +--- + +## Task 3: Filesystem discovery + +Walk mount roots and yield video file paths. + +**Files:** +- Create: `server/services/discover.ts` +- Create: `server/services/__tests__/discover.test.ts` + +- [ ] **Step 1: Write failing tests** + +```typescript +// server/services/__tests__/discover.test.ts +import { describe, expect, test, beforeAll, afterAll } from "bun:test"; +import { mkdirSync, writeFileSync, rmSync } from "node:fs"; +import { discoverVideoFiles } from "../discover"; + +const TMP = "/tmp/discover-test"; + +beforeAll(() => { + mkdirSync(`${TMP}/movies/Movie A (2020)`, { recursive: true }); + mkdirSync(`${TMP}/tv/Show B (2019)/Season 01`, { recursive: true }); + writeFileSync(`${TMP}/movies/Movie A (2020)/Movie A (2020).mkv`, ""); + writeFileSync(`${TMP}/movies/Movie A (2020)/Movie A (2020).nfo`, ""); + writeFileSync(`${TMP}/tv/Show B (2019)/Season 01/Show B (2019) - S01E01 - Pilot.mkv`, ""); + writeFileSync(`${TMP}/tv/Show B (2019)/Season 01/Show B (2019) - S01E01 - Pilot.srt`, ""); +}); + +afterAll(() => { + rmSync(TMP, { recursive: true, force: true }); +}); + +describe("discoverVideoFiles", () => { + test("finds video files in both roots", async () => { + const files = await discoverVideoFiles([`${TMP}/movies`, `${TMP}/tv`]); + expect(files).toHaveLength(2); + expect(files).toContain(`${TMP}/movies/Movie A (2020)/Movie A (2020).mkv`); + expect(files).toContain(`${TMP}/tv/Show B (2019)/Season 01/Show B (2019) - S01E01 - Pilot.mkv`); + }); + + test("ignores non-video files", async () => { + const files = await discoverVideoFiles([`${TMP}/movies`]); + expect(files.every((f) => f.endsWith(".mkv"))).toBe(true); + }); + + test("handles missing directory gracefully", async () => { + const files = await discoverVideoFiles(["/nonexistent/path"]); + expect(files).toHaveLength(0); + }); +}); +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `bun test server/services/__tests__/discover.test.ts` +Expected: FAIL — module not found + +- [ ] **Step 3: Implement discover module** + +```typescript +// server/services/discover.ts +import { Glob } from "bun"; +import { existsSync } from "node:fs"; + +const VIDEO_EXTENSIONS = ["mkv", "mp4", "avi", "m4v", "ts", "wmv"]; +const GLOB_PATTERN = `**/*.{${VIDEO_EXTENSIONS.join(",")}}`; + +/** + * Recursively discover all video files under the given root directories. + * Returns absolute paths sorted alphabetically. + */ +export async function discoverVideoFiles(roots: string[]): Promise { + const files: string[] = []; + + for (const root of roots) { + if (!existsSync(root)) continue; + const glob = new Glob(GLOB_PATTERN); + for await (const match of glob.scan({ cwd: root, absolute: true })) { + files.push(match); + } + } + + return files.sort(); +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `bun test server/services/__tests__/discover.test.ts` +Expected: all PASS + +- [ ] **Step 5: Commit** + +```bash +git add server/services/discover.ts server/services/__tests__/discover.test.ts +git commit -m "add filesystem discovery for video files" +``` + +--- + +## Task 4: Extract `normalizeLanguage` from jellyfin.ts + +Move the `normalizeLanguage()` utility out of `jellyfin.ts` before we delete it — it's used by sonarr.ts and the analyzer. + +**Files:** +- Create: `server/services/language-utils.ts` +- Modify: `server/services/sonarr.ts` — update import +- Modify: any other files importing `normalizeLanguage` from jellyfin + +- [ ] **Step 1: Identify all imports of normalizeLanguage from jellyfin** + +Run: `grep -r "normalizeLanguage.*jellyfin\|from.*jellyfin.*normalizeLanguage" server/ --include="*.ts"` + +- [ ] **Step 2: Create language-utils.ts** + +Copy `normalizeLanguage` (and its lookup tables) from `server/services/jellyfin.ts`. Also add the `guessOriginalLanguage` function (replaces `extractOriginalLanguage`) since we need it in the new scan flow. + +```typescript +// server/services/language-utils.ts + +/** + * Language normalization utilities extracted from jellyfin.ts. + * Shared across probe-based and external-API language handling. + */ + +// Copy the full ISO_639_2_TO_1, ISO_639_1_TO_2, and ALIAS maps from jellyfin.ts + +/** + * Normalize a raw language tag to ISO 639-2/B (3-letter code). + */ +export function normalizeLanguage(lang: string): string { + // ... exact logic from jellyfin.ts normalizeLanguage ... +} + +/** + * Guess original language from audio streams by looking at the default track. + * Heuristic: prefer the default audio track, skip dubs/commentary, fall back to first. + */ +export function guessOriginalLanguage( + audioStreams: { language: string | null; title: string | null; isDefault: number }[], +): string | null { + if (audioStreams.length === 0) return null; + const dominated = /\b(dub|dubbed|commentary|synchro|synchron)\b/i; + const defaultTrack = audioStreams.find((s) => s.isDefault && !dominated.test(s.title ?? "")); + const candidate = defaultTrack ?? audioStreams.find((s) => !dominated.test(s.title ?? "")) ?? audioStreams[0]; + if (!candidate?.language) return null; + return normalizeLanguage(candidate.language); +} +``` + +- [ ] **Step 3: Update all imports** + +Replace `import { normalizeLanguage } from "./jellyfin"` with `import { normalizeLanguage } from "./language-utils"` in all files found in step 1. + +- [ ] **Step 4: Run full test suite** + +Run: `bun test` +Expected: all existing tests still pass + +- [ ] **Step 5: Commit** + +```bash +git add server/services/language-utils.ts server/services/sonarr.ts +git commit -m "extract normalizeLanguage to language-utils.ts, decouple from jellyfin" +``` + +--- + +## Task 5: New database schema + +Drop Jellyfin columns, make `file_path` the unique key, add `series_key` for grouping episodes. + +**Files:** +- Modify: `server/db/schema.ts` +- Modify: `server/types.ts` + +- [ ] **Step 1: Update schema.ts** + +```sql +CREATE TABLE IF NOT EXISTS media_items ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + file_path TEXT NOT NULL UNIQUE, + type TEXT NOT NULL, + name TEXT NOT NULL, + series_name TEXT, + series_key TEXT, + season_number INTEGER, + episode_number INTEGER, + year INTEGER, + file_size INTEGER, + container TEXT, + duration_seconds REAL, + original_language TEXT, + orig_lang_source TEXT, + needs_review INTEGER NOT NULL DEFAULT 1, + imdb_id TEXT, + tmdb_id TEXT, + tvdb_id TEXT, + scan_status TEXT NOT NULL DEFAULT 'pending', + scan_error TEXT, + last_scanned_at TEXT, + last_executed_at TEXT, + created_at TEXT NOT NULL DEFAULT (datetime('now')) +); +``` + +Dropped: `jellyfin_id`, `original_title`, `series_jellyfin_id`, `date_last_refreshed`, `jellyfin_raw`, `external_raw`, `ingest_source`, `language_display` (from streams). +Added: `series_key` (derived from series folder path). +Changed: `runtime_ticks` to `duration_seconds`, `file_path` is UNIQUE. + +Update DEFAULT_CONFIG: +- Remove: `jellyfin_url`, `jellyfin_api_key`, `jellyfin_user_id`, `mqtt_*` keys +- Add: `movies_root: "/movies"`, `tv_root: "/tv"` + +- [ ] **Step 2: Update types.ts** + +Update `MediaItem` interface: +- Remove: `jellyfin_id`, `original_title`, `series_jellyfin_id`, `date_last_refreshed`, `jellyfin_raw`, `external_raw`, `ingest_source` +- Add: `series_key: string | null`, `duration_seconds: number | null` +- Change: `orig_lang_source` type to `"probe" | "radarr" | "sonarr" | "manual" | null` + +Update `MediaStream` interface: +- Remove: `language_display` + +Remove all Jellyfin-specific interfaces (`JellyfinItem`, `JellyfinMediaStream`, `JellyfinUser`, etc.). + +- [ ] **Step 3: Commit** + +```bash +git add server/db/schema.ts server/types.ts +git commit -m "new schema: drop jellyfin columns, file_path as unique key" +``` + +--- + +## Task 6: Rewrite rescan.ts (upsert from probe data) + +Replace `upsertJellyfinItem()` with `upsertScannedItem()` that accepts parsed path + probe data. + +**Files:** +- Modify: `server/services/rescan.ts` +- Create: `server/services/__tests__/rescan.test.ts` (rewritten) + +- [ ] **Step 1: Write failing tests** + +```typescript +// server/services/__tests__/rescan.test.ts +import { describe, expect, test } from "bun:test"; +import { Database } from "bun:sqlite"; +import { upsertScannedItem } from "../rescan"; +import { SCHEMA } from "../../db/schema"; +import type { ProbeResult } from "../probe"; +import type { ParsedPath } from "../path-parser"; + +function freshDb(): Database { + const db = new Database(":memory:"); + db.exec(SCHEMA); + return db; +} + +const PARSED: ParsedPath = { + type: "Movie", + name: "Hot Fuzz", + year: 2007, + seriesName: null, + seasonNumber: null, + episodeNumber: null, + imdbId: "tt0425112", + tmdbId: null, + tvdbId: null, + container: "mkv", +}; + +const PROBE: ProbeResult = { + fileSize: 5_000_000_000, + durationSeconds: 7200, + container: "matroska", + streams: [ + { streamIndex: 0, type: "Video", codec: "h264", profile: "High", language: null, title: null, isDefault: 1, isForced: 0, isHearingImpaired: 0, channels: null, channelLayout: null, bitRate: null, sampleRate: null, bitDepth: null }, + { streamIndex: 1, type: "Audio", codec: "dts", profile: "DTS-HD MA", language: "eng", title: "English 5.1", isDefault: 1, isForced: 0, isHearingImpaired: 0, channels: 6, channelLayout: "5.1(side)", bitRate: 1509000, sampleRate: 48000, bitDepth: 24 }, + ], +}; + +describe("upsertScannedItem", () => { + test("inserts new item with streams", () => { + const db = freshDb(); + const result = upsertScannedItem(db, "/movies/Hot Fuzz (2007)/Hot Fuzz (2007).mkv", PARSED, PROBE); + expect(result.itemId).toBeGreaterThan(0); + + const item = db.prepare("SELECT * FROM media_items WHERE id = ?").get(result.itemId) as any; + expect(item.name).toBe("Hot Fuzz"); + expect(item.type).toBe("Movie"); + expect(item.imdb_id).toBe("tt0425112"); + expect(item.file_size).toBe(5_000_000_000); + expect(item.duration_seconds).toBe(7200); + + const streams = db.prepare("SELECT * FROM media_streams WHERE item_id = ?").all(result.itemId); + expect(streams).toHaveLength(2); + }); + + test("upserts on same file_path", () => { + const db = freshDb(); + upsertScannedItem(db, "/movies/test.mkv", PARSED, PROBE); + const updated = { ...PROBE, fileSize: 6_000_000_000 }; + const result = upsertScannedItem(db, "/movies/test.mkv", PARSED, updated); + const item = db.prepare("SELECT * FROM media_items WHERE id = ?").get(result.itemId) as any; + expect(item.file_size).toBe(6_000_000_000); + }); + + test("preserves manual language override on rescan", () => { + const db = freshDb(); + const result = upsertScannedItem(db, "/movies/test.mkv", PARSED, PROBE); + db.prepare("UPDATE media_items SET original_language = 'fra', orig_lang_source = 'manual' WHERE id = ?").run(result.itemId); + + upsertScannedItem(db, "/movies/test.mkv", PARSED, PROBE); + const item = db.prepare("SELECT original_language, orig_lang_source FROM media_items WHERE id = ?").get(result.itemId) as any; + expect(item.original_language).toBe("fra"); + expect(item.orig_lang_source).toBe("manual"); + }); + + test("creates review_plan stub", () => { + const db = freshDb(); + const result = upsertScannedItem(db, "/movies/test.mkv", PARSED, PROBE); + const plan = db.prepare("SELECT * FROM review_plans WHERE item_id = ?").get(result.itemId) as any; + expect(plan).toBeDefined(); + expect(plan.status).toBe("pending"); + }); + + test("guesses original language from default audio track", () => { + const db = freshDb(); + const result = upsertScannedItem(db, "/movies/test.mkv", PARSED, PROBE); + expect(result.origLang).toBe("eng"); + expect(result.origLangSource).toBe("probe"); + }); +}); +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `bun test server/services/__tests__/rescan.test.ts` +Expected: FAIL + +- [ ] **Step 3: Implement new upsertScannedItem** + +```typescript +// server/services/rescan.ts +import type { Database } from "bun:sqlite"; +import type { ParsedPath } from "./path-parser"; +import type { ProbeResult } from "./probe"; +import { guessOriginalLanguage } from "./language-utils"; + +export interface UpsertResult { + itemId: number; + origLang: string | null; + origLangSource: string | null; + needsReview: number; + isNew: boolean; +} + +/** + * Upsert a scanned item (metadata + streams + review_plan) in one transaction. + * Driven by filesystem path + ffprobe output. + */ +export function upsertScannedItem( + db: Database, + filePath: string, + parsed: ParsedPath, + probe: ProbeResult, +): UpsertResult { + const seriesKey = parsed.seriesName + ? `${parsed.seriesName}|${parsed.year ?? ""}` + : null; + + const existing = db + .prepare("SELECT id, original_language, orig_lang_source FROM media_items WHERE file_path = ?") + .get(filePath) as { id: number; original_language: string | null; orig_lang_source: string | null } | undefined; + const hasManualOverride = existing?.orig_lang_source === "manual"; + + const audioStreams = probe.streams + .filter((s) => s.type === "Audio") + .map((s) => ({ language: s.language, title: s.title, isDefault: s.isDefault })); + const probeGuess = guessOriginalLanguage(audioStreams); + + const origLang = hasManualOverride ? existing!.original_language : probeGuess; + const origLangSource: string | null = hasManualOverride ? "manual" : (probeGuess ? "probe" : null); + const needsReview = origLang ? (hasManualOverride ? 0 : 1) : 1; + + const result: UpsertResult = { itemId: -1, origLang, origLangSource, needsReview, isNew: !existing }; + + db.transaction(() => { + db.prepare(` + INSERT INTO media_items ( + file_path, type, name, series_name, series_key, + season_number, episode_number, year, file_size, container, + duration_seconds, original_language, orig_lang_source, needs_review, + imdb_id, tmdb_id, tvdb_id, + scan_status, last_scanned_at + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'scanned', datetime('now')) + ON CONFLICT(file_path) DO UPDATE SET + type = excluded.type, name = excluded.name, + series_name = excluded.series_name, series_key = excluded.series_key, + season_number = excluded.season_number, episode_number = excluded.episode_number, + year = excluded.year, file_size = excluded.file_size, container = excluded.container, + duration_seconds = excluded.duration_seconds, + original_language = excluded.original_language, orig_lang_source = excluded.orig_lang_source, + needs_review = excluded.needs_review, + imdb_id = excluded.imdb_id, tmdb_id = excluded.tmdb_id, tvdb_id = excluded.tvdb_id, + scan_status = 'scanned', last_scanned_at = datetime('now') + `).run( + filePath, parsed.type, parsed.name, parsed.seriesName, seriesKey, + parsed.seasonNumber, parsed.episodeNumber, parsed.year, + probe.fileSize, parsed.container, probe.durationSeconds, + origLang, origLangSource, needsReview, + parsed.imdbId, parsed.tmdbId, parsed.tvdbId, + ); + + const row = db.prepare("SELECT id FROM media_items WHERE file_path = ?").get(filePath) as { id: number }; + const itemId = row.id; + result.itemId = itemId; + + db.prepare("DELETE FROM media_streams WHERE item_id = ?").run(itemId); + const ins = db.prepare(` + INSERT INTO media_streams ( + item_id, stream_index, type, codec, profile, language, title, + is_default, is_forced, is_hearing_impaired, + channels, channel_layout, bit_rate, sample_rate, bit_depth + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + `); + for (const s of probe.streams) { + ins.run( + itemId, s.streamIndex, s.type, s.codec, s.profile, s.language, s.title, + s.isDefault, s.isForced, s.isHearingImpaired, + s.channels, s.channelLayout, s.bitRate, s.sampleRate, s.bitDepth, + ); + } + + db.prepare(` + INSERT INTO review_plans (item_id, status, is_noop, sorted) + VALUES (?, 'pending', 0, 0) + ON CONFLICT(item_id) DO UPDATE SET + status = CASE + WHEN review_plans.status = 'error' THEN 'pending' + ELSE review_plans.status + END, + sorted = CASE + WHEN review_plans.status = 'error' THEN 0 + ELSE review_plans.sorted + END + `).run(itemId); + })(); + + return result; +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `bun test server/services/__tests__/rescan.test.ts` +Expected: all PASS + +- [ ] **Step 5: Commit** + +```bash +git add server/services/rescan.ts server/services/__tests__/rescan.test.ts +git commit -m "rewrite rescan: upsertScannedItem from probe data, no jellyfin" +``` + +--- + +## Task 7: Rewrite scan.ts (filesystem walk + ffprobe) + +Replace the Jellyfin-paginated scan with: discover files, parse paths, probe each, upsert. + +**Files:** +- Modify: `server/api/scan.ts` + +- [ ] **Step 1: Rewrite scan.ts** + +Replace the `runScan` function to: +1. Call `discoverVideoFiles([moviesRoot, tvRoot])` +2. For each file: `parsePath()` then `probeFile()` then `upsertScannedItem()` +3. Keep SSE progress, scan window, abort, and auto_processing logic +4. Remove all Jellyfin imports + +Key changes from current: +- No `JellyfinConfig` or `getAllItems()` +- No `getDevItems()` (dev mode uses scan with a limit instead) +- Status endpoint removes `ingest_source` from query +- Error recording uses `file_path` instead of `jellyfin_id` + +- [ ] **Step 2: Run tests** + +Run: `bun test` +Expected: scan-related tests may need fixture updates + +- [ ] **Step 3: Commit** + +```bash +git add server/api/scan.ts +git commit -m "rewrite scan: filesystem walk + ffprobe, no jellyfin" +``` + +--- + +## Task 8: Update language-resolver.ts (drop Jellyfin callback) + +Remove the `resolveSeriesTvdb` callback that fetched TVDB IDs from Jellyfin. TVDB IDs now come from path parsing or Sonarr title lookup. + +**Files:** +- Modify: `server/services/language-resolver.ts` +- Modify: `server/api/review.ts` — remove Jellyfin import and `resolveSeriesTvdb` setup + +- [ ] **Step 1: Simplify language-resolver.ts** + +Remove `resolveSeriesTvdb` from `LanguageResolverConfig`. The `resolveEpisode` function: +1. First tries `item.tvdb_id` (from path parser) +2. Falls back to Sonarr `byTitle` lookup using `item.series_name` +3. Falls back to probe guess + +- [ ] **Step 2: Update review.ts processInbox** + +Remove: +- `import { getItem } from "../services/jellyfin"` +- `JellyfinConfig` construction +- `seriesTvdbCache` and `resolveSeriesTvdb` closure + +- [ ] **Step 3: Run tests** + +Run: `bun test` +Expected: pass after updating test fixtures + +- [ ] **Step 4: Commit** + +```bash +git add server/services/language-resolver.ts server/api/review.ts +git commit -m "simplify language-resolver: drop jellyfin resolveSeriesTvdb callback" +``` + +--- + +## Task 9: Update review.ts (replace jellyfin_id with file_path/series_key) + +Mechanical replacement throughout review.ts. + +**Files:** +- Modify: `server/api/review.ts` +- Modify: `server/api/__tests__/*.test.ts` — update fixtures + +- [ ] **Step 1: Replace all `series_jellyfin_id` with `series_key`** + +- `mi.series_jellyfin_id` becomes `mi.series_key` +- `seriesJellyfinId` in returned objects becomes `seriesKey` +- SQL `WHERE series_jellyfin_id = ?` becomes `WHERE series_key = ?` + +- [ ] **Step 2: Replace `jellyfin_id` references** + +- Remove from SELECT queries +- Rescan-series endpoint: accept `seriesKey` instead of `seriesJellyfinId` +- Error recording: use `file_path` instead of `jellyfin_id` + +- [ ] **Step 3: Remove Jellyfin imports** + +Remove all `jellyfin.ts` imports from review.ts. + +- [ ] **Step 4: Fix test fixtures** + +Update test files in `server/api/__tests__/` to match new schema (insert with `file_path` instead of `jellyfin_id`, etc.). + +- [ ] **Step 5: Run tests** + +Run: `bun test server/api/__tests__/` +Expected: all pass + +- [ ] **Step 6: Commit** + +```bash +git add server/api/review.ts server/api/__tests__/ +git commit -m "review.ts: replace jellyfin_id/series_jellyfin_id with file_path/series_key" +``` + +--- + +## Task 10: Update frontend types and components + +**Files:** +- Modify: `src/shared/lib/types.ts` +- Modify: `src/features/settings/SettingsPage.tsx` +- Modify: `src/features/pipeline/SeriesCard.tsx` +- Modify: `src/features/pipeline/InboxColumn.tsx` +- Modify: `src/features/pipeline/ReviewColumn.tsx` +- Delete: `src/features/settings/MqttSection.tsx` +- Delete: `src/shared/components/MqttBadge.tsx` + +- [ ] **Step 1: Update frontend types** + +In `src/shared/lib/types.ts`: +- Remove `jellyfin_id`, `series_jellyfin_id`, `jellyfin_raw`, `external_raw`, `ingest_source`, `date_last_refreshed`, `original_title`, `runtime_ticks`, `language_display` +- Add `series_key`, `duration_seconds` +- Change `seriesJellyfinId` to `seriesKey` in group types + +- [ ] **Step 2: Update SeriesCard** + +- Remove Jellyfin web link +- Rename `seriesJellyfinId` prop to `seriesKey` +- Update rescan API call body + +- [ ] **Step 3: Update InboxColumn and ReviewColumn** + +- Change `group.seriesJellyfinId` to `group.seriesKey` + +- [ ] **Step 4: Update SettingsPage** + +- Remove Jellyfin `ConnSection` +- Remove MQTT section +- Add media roots display (movies_root, tv_root) + +- [ ] **Step 5: Delete MQTT files** + +- `src/features/settings/MqttSection.tsx` +- `src/shared/components/MqttBadge.tsx` + +- [ ] **Step 6: Run dev server, verify UI loads** + +Run: `bun run dev` +Expected: compiles, UI renders + +- [ ] **Step 7: Commit** + +```bash +git add src/ +git rm src/features/settings/MqttSection.tsx src/shared/components/MqttBadge.tsx +git commit -m "frontend: drop jellyfin/mqtt, use series_key" +``` + +--- + +## Task 11: Delete Jellyfin/MQTT/webhook server files + +**Files:** +- Delete: `server/services/jellyfin.ts` +- Delete: `server/services/webhook.ts` +- Delete: `server/services/mqtt.ts` +- Delete: `server/services/__tests__/webhook.test.ts` +- Modify: `server/api/setup.ts` — remove Jellyfin endpoint +- Modify: `server/index.tsx` — remove MQTT startup + +- [ ] **Step 1: Delete service files** + +```bash +git rm server/services/jellyfin.ts server/services/webhook.ts server/services/mqtt.ts +git rm server/services/__tests__/webhook.test.ts +``` + +- [ ] **Step 2: Update server/index.tsx** + +Remove MQTT import and startup call. + +- [ ] **Step 3: Update server/api/setup.ts** + +Remove `/api/settings/jellyfin` endpoint. + +- [ ] **Step 4: Find and fix dead imports** + +Run: `bun run build` +Fix any remaining imports of deleted modules. + +- [ ] **Step 5: Run full test suite** + +Run: `bun test` +Expected: all pass + +- [ ] **Step 6: Commit** + +```bash +git add -A +git commit -m "remove jellyfin, mqtt, webhook services" +``` + +--- + +## Task 12: Sonarr byTitle index for name-based lookup + +**Files:** +- Modify: `server/services/sonarr.ts` +- Modify: `server/services/language-resolver.ts` + +- [ ] **Step 1: Add byTitle to SonarrLibrary** + +```typescript +interface SonarrSeries { + id?: number; + tvdbId?: number; + title?: string; + originalLanguage?: { name: string }; +} + +export interface SonarrLibrary { + byTvdbId: Map; + byTitle: Map; +} +``` + +Update `loadLibrary` to populate `byTitle` (lowercase title as key). + +- [ ] **Step 2: Update language-resolver resolveEpisode** + +Use `cfg.sonarrLibrary.byTitle.get(item.series_name.toLowerCase())` as fallback when `item.tvdb_id` is missing. + +- [ ] **Step 3: Run tests** + +Run: `bun test` +Expected: all pass + +- [ ] **Step 4: Commit** + +```bash +git add server/services/sonarr.ts server/services/language-resolver.ts +git commit -m "sonarr: add byTitle index for name-based episode language resolution" +``` + +--- + +## Task 13: End-to-end verification + +- [ ] **Step 1: Delete database, start fresh** + +```bash +rm -f data/netfelix.db +``` + +- [ ] **Step 2: Start dev server, verify startup** + +Run: `bun run dev` +Expected: fresh DB created with new schema, no errors + +- [ ] **Step 3: Configure and run scan** + +Set audio_languages, verify files discovered, probed, and populated in pipeline. + +- [ ] **Step 4: Verify processInbox resolves languages** + +Items with Radarr/Sonarr match should auto-classify. + +- [ ] **Step 5: Test execute on one item** + +Approve and execute one item, verify FFmpeg command works. + +- [ ] **Step 6: Bump version and commit** + +```bash +# Update version in package.json to current CalVer +git add package.json +git commit -m "bump version to 2026.04.20" +``` + +--- + +## Summary of dropped functionality (intentional) + +| Feature | Status | +|---------|--------| +| Jellyfin connection/config | Removed | +| MQTT webhook listener | Removed | +| Jellyfin webhook handler | Removed | +| New arrival auto-detection | **Deferred** — will return via Sonarr/Radarr webhooks | +| Jellyfin link on series cards | Removed | +| `ingest_source` field | Removed (was "scan" or "webhook") | +| Dev mode random items | Removed (dev mode uses scan with limit) | +| `date_last_refreshed` / refresh triggering | Removed | +| `original_title` field | Removed (unused in UI) | +| `language_display` field | Removed (ffprobe doesn't provide it) | diff --git a/package.json b/package.json index 0dda46d..e48192c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "netfelix-audio-fix", - "version": "2026.04.21.3", + "version": "2026.04.21.4", "scripts": { "dev:server": "NODE_ENV=development bun --hot server/index.tsx", "dev:client": "vite", diff --git a/server/api/review.ts b/server/api/review.ts index 8075876..492de19 100644 --- a/server/api/review.ts +++ b/server/api/review.ts @@ -1123,6 +1123,65 @@ app.post("/process-inbox/stop", (c) => { return c.json({ ok: true, message: "not running" }); }); +// ─── Process single item ──────────────────────────────────────────────────── +// Runs language resolution + reanalysis + sort for one inbox item. +app.post("/:id/process", async (c) => { + const db = getDb(); + const id = parseId(c.req.param("id")); + if (id == null) return c.json({ error: "invalid id" }, 400); + const plan = db + .prepare("SELECT id FROM review_plans WHERE item_id = ? AND status = 'pending' AND sorted = 0") + .get(id) as { id: number } | undefined; + if (!plan) return c.json({ error: "item not in inbox" }, 404); + + // Build language resolver (same as processInbox) + const cfg = getAllConfig(); + const radarrCfg = { url: cfg.radarr_url, apiKey: cfg.radarr_api_key }; + const sonarrCfg = { url: cfg.sonarr_url, apiKey: cfg.sonarr_api_key }; + const radarrEnabled = cfg.radarr_enabled === "1" && radarrUsable(radarrCfg); + const sonarrEnabled = cfg.sonarr_enabled === "1" && sonarrUsable(sonarrCfg); + const [radarrLibrary, sonarrLibrary] = await Promise.all([ + radarrEnabled ? loadRadarrLibrary(radarrCfg) : Promise.resolve(null), + sonarrEnabled ? loadSonarrLibrary(sonarrCfg) : Promise.resolve(null), + ]); + const resolverCfg: LanguageResolverConfig = { + radarr: radarrEnabled ? radarrCfg : null, + sonarr: sonarrEnabled ? sonarrCfg : null, + radarrLibrary, + sonarrLibrary, + }; + + // Resolve language + const langResult = await resolveLanguage(db, id, resolverCfg); + if (langResult.externalRaw != null) { + db + .prepare("UPDATE media_items SET original_language = ?, orig_lang_source = ?, needs_review = ? WHERE id = ?") + .run(langResult.origLang, langResult.origLangSource, langResult.needsReview, id); + } + + // Reanalyze + sort + const audioLanguages = getAudioLanguages(); + reanalyze(db, id, audioLanguages); + const updated = db.prepare("SELECT auto_class, is_noop FROM review_plans WHERE item_id = ?").get(id) as + | { auto_class: string | null; is_noop: number } + | undefined; + + if (updated && !updated.is_noop) { + if (updated.auto_class === "auto") { + db + .prepare("UPDATE review_plans SET status = 'approved', reviewed_at = datetime('now'), sorted = 1 WHERE id = ?") + .run(plan.id); + const { item, streams, decisions } = loadItemDetail(db, id); + if (item) enqueueAudioJob(db, id, buildCommand(item, streams, decisions)); + } else { + db.prepare("UPDATE review_plans SET sorted = 1 WHERE id = ?").run(plan.id); + } + } + + emitPipelineChanged(); + return c.json({ ok: true, destination: updated?.auto_class === "auto" ? "queue" : "review" }); +}); + // ─── Approve all ready ─────────────────────────────────────────────────────── // Bulk-approves every auto_heuristic-classified plan currently in Review. app.post("/approve-ready", (c) => { diff --git a/src/features/pipeline/ColumnShell.tsx b/src/features/pipeline/ColumnShell.tsx index 57b474d..605939a 100644 --- a/src/features/pipeline/ColumnShell.tsx +++ b/src/features/pipeline/ColumnShell.tsx @@ -14,7 +14,8 @@ interface ColumnShellProps { count: ReactNode; subtitle?: ReactNode; backward?: ColumnAction; - skip?: ColumnAction; + /** Middle slot: accepts a ColumnAction button or any ReactNode (e.g. a dropdown). */ + middle?: ColumnAction | ReactNode; forward?: ColumnAction; children: ReactNode; } @@ -40,6 +41,10 @@ function ActionButton({ action }: { action: ColumnAction }) { ); } +function isColumnAction(v: unknown): v is ColumnAction { + return typeof v === "object" && v !== null && "label" in v && "onClick" in v; +} + /** * Equal-width pipeline column with a fixed three-row header (title + count, * subtitle, button row) and a scrolling body. All five pipeline columns share @@ -51,7 +56,7 @@ function ActionButton({ action }: { action: ColumnAction }) { * same height; buttons passed as disabled still occupy their slot so the * header never jumps between states. */ -export function ColumnShell({ title, count, subtitle, backward, skip, forward, children }: ColumnShellProps) { +export function ColumnShell({ title, count, subtitle, backward, middle, forward, children }: ColumnShellProps) { return (
@@ -66,7 +71,9 @@ export function ColumnShell({ title, count, subtitle, backward, skip, forward, c */}
{backward && }
-
{skip && }
+
+ {middle && (isColumnAction(middle) ? : middle)} +
{forward && }
diff --git a/src/features/pipeline/InboxColumn.tsx b/src/features/pipeline/InboxColumn.tsx index e5e6a92..c2d2aa2 100644 --- a/src/features/pipeline/InboxColumn.tsx +++ b/src/features/pipeline/InboxColumn.tsx @@ -23,6 +23,8 @@ interface InboxColumnProps { onToggleAutoProcessing: (enabled: boolean) => void; onMutate: () => void; sortProgress: { processed: number; total: number } | null; + sort: InboxSort; + onChangeSort: (next: InboxSort) => void; } export function InboxColumn({ @@ -32,11 +34,12 @@ export function InboxColumn({ onToggleAutoProcessing, onMutate, sortProgress, + sort, + onChangeSort, }: InboxColumnProps) { const [groups, setGroups] = useState(initialResponse.groups); const [hasMore, setHasMore] = useState(initialResponse.hasMore); const [loadingMore, setLoadingMore] = useState(false); - const [sort, setSort] = useState("scan_asc"); const sentinelRef = useRef(null); // Optimistic mirror of the auto-process checkbox so a click flips the @@ -52,15 +55,6 @@ export function InboxColumn({ setHasMore(initialResponse.hasMore); }, [initialResponse]); - const changeSort = useCallback(async (next: InboxSort) => { - setSort(next); - const res = await api.get( - `/api/review/groups?bucket=inbox&offset=0&limit=${PAGE_SIZE}&sort=${next}`, - ); - setGroups(res.groups); - setHasMore(res.hasMore); - }, []); - const loadMore = useCallback(async () => { if (loadingMore || !hasMore) return; setLoadingMore(true); @@ -95,6 +89,11 @@ export function InboxColumn({ await api.post("/api/review/process-inbox/stop"); }; + const processItem = async (itemId: number) => { + await api.post(`/api/review/${itemId}/process`); + onMutate(); + }; + // Progress bar fills the subtitle slot during an active sort so the user // sees real work happening instead of a frozen button. The auto-process // toggle hides while a sort runs — it can't be flipped meaningfully @@ -114,36 +113,24 @@ export function InboxColumn({
) : ( -
- -
- -
+ ); const backward = sorting ? { label: "Stop Sorting", onClick: stopProcess, danger: true } : undefined; + + // Sort dropdown + Process Inbox button share the forward slot const forward = sorting ? undefined : { @@ -154,12 +141,39 @@ export function InboxColumn({ title: "Process inbox to Queue / Review", }; + const sortDropdown = !sorting ? ( + + ) : undefined; + return ( - +
{groups.map((group) => { if (group.kind === "movie") { - return ; + return ( + processItem(group.item.item_id ?? (group.item as { id: number }).id)} + /> + ); } return ( { + // Process all episodes in this series + const ids = group.seasons.flatMap((s) => s.episodes.map((ep) => ep.item_id)); + Promise.all(ids.map((id) => api.post(`/api/review/${id}/process`))).then(() => onMutate()); + }} /> ); })} diff --git a/src/features/pipeline/PipelineCard.tsx b/src/features/pipeline/PipelineCard.tsx index 0e62c7c..6ca8a03 100644 --- a/src/features/pipeline/PipelineCard.tsx +++ b/src/features/pipeline/PipelineCard.tsx @@ -58,6 +58,8 @@ interface PipelineCardProps { // expanded series episodes don't get this (the series' "Approve all" // covers the prior-episodes-in-series case). onApproveUpToHere?: () => void; + // Inbox: process this single item (resolve language + classify → Review/Queue). + onProcess?: () => void; } function formatChannels(n: number | null | undefined): string | null { @@ -86,6 +88,7 @@ export function PipelineCard({ onSkip, onUnapprove, onApproveUpToHere, + onProcess, }: PipelineCardProps) { const title = item.type === "Episode" @@ -104,7 +107,7 @@ export function PipelineCard({ // media_item rows (no plan) in which case we fall back to item.id. const mediaItemId: number = item.item_id ?? (item as { id: number }).id; - const hasActionRow = !!(onSkip || onApprove || onUnapprove || onApproveUpToHere); + const hasActionRow = !!(onSkip || onApprove || onUnapprove || onApproveUpToHere || onProcess); const hasTranscodeReasons = !!item.transcode_reasons && item.transcode_reasons.length > 0; const hasInfoRow = hasTranscodeReasons || item.job_type === "copy" || !!item.auto_class; @@ -153,6 +156,16 @@ export function PipelineCard({ ← Back to review )} + {onProcess && ( + + )}
)} diff --git a/src/features/pipeline/PipelineHeader.tsx b/src/features/pipeline/PipelineHeader.tsx index caa7e7e..322cb73 100644 --- a/src/features/pipeline/PipelineHeader.tsx +++ b/src/features/pipeline/PipelineHeader.tsx @@ -116,6 +116,9 @@ export function PipelineHeader() { setErrors(b.errors); setCurrentItem(b.currentItem); b.dirty = false; + // Refresh dashboard stats periodically so the stat pills update + // during a scan (every ~5s to avoid hammering the server). + if (b.scanned % 25 === 0) loadStats(); } if (b.complete) { diff --git a/src/features/pipeline/PipelinePage.tsx b/src/features/pipeline/PipelinePage.tsx index f8d09a7..451976f 100644 --- a/src/features/pipeline/PipelinePage.tsx +++ b/src/features/pipeline/PipelinePage.tsx @@ -25,6 +25,8 @@ interface SortProgress { total: number; } +type InboxSort = "scan_asc" | "scan_desc" | "name_asc" | "name_desc"; + export function PipelinePage() { const [data, setData] = useState(null); const [inboxInitial, setInboxInitial] = useState(null); @@ -33,6 +35,8 @@ export function PipelinePage() { const [queueStatus, setQueueStatus] = useState(null); const [sortProgress, setSortProgress] = useState(null); const [loading, setLoading] = useState(true); + const [inboxSort, setInboxSort] = useState("scan_asc"); + const inboxSortRef = useRef("scan_asc"); const loadPipeline = useCallback(async () => { const res = await api.get("/api/review/pipeline"); @@ -40,8 +44,9 @@ export function PipelinePage() { }, []); const loadGroups = useCallback(async () => { + const sort = inboxSortRef.current; const [inbox, review] = await Promise.all([ - api.get("/api/review/groups?bucket=inbox&offset=0&limit=25"), + api.get(`/api/review/groups?bucket=inbox&offset=0&limit=25&sort=${sort}`), api.get("/api/review/groups?bucket=review&offset=0&limit=25"), ]); setInboxInitial(inbox); @@ -137,6 +142,12 @@ export function PipelinePage() { onToggleAutoProcessing={toggleAutoProcessing} onMutate={loadAll} sortProgress={sortProgress} + sort={inboxSort} + onChangeSort={(next) => { + inboxSortRef.current = next; + setInboxSort(next); + loadGroups(); + }} /> +
{groups.map((group, index) => { const prior = index > 0 ? priorIds(index) : null; diff --git a/src/features/pipeline/SeriesCard.tsx b/src/features/pipeline/SeriesCard.tsx index 8133f02..e6834e0 100644 --- a/src/features/pipeline/SeriesCard.tsx +++ b/src/features/pipeline/SeriesCard.tsx @@ -15,6 +15,8 @@ interface SeriesCardProps { // Review-column affordance: approve every card visually above this // series in one round-trip. See ReviewColumn for the id computation. onApproveUpToHere?: () => void; + // Inbox: process entire series (resolve language + classify → Review/Queue). + onProcess?: () => void; } export function SeriesCard({ @@ -26,6 +28,7 @@ export function SeriesCard({ originalLanguage, onMutate, onApproveUpToHere, + onProcess, }: SeriesCardProps) { const [expanded, setExpanded] = useState(false); const [rescanning, setRescanning] = useState(false); @@ -88,20 +91,35 @@ export function SeriesCard({ > ↻ - + {onProcess && ( + + )} + {!onProcess && ( + + )}
{/* Title row */}