Compare commits
40 Commits
45f4175929
..
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 794680ec17 | |||
| 201c02c810 | |||
| 495a40a6c6 | |||
| 84e669922b | |||
| 0d560743f3 | |||
| 331cc7b58e | |||
| d8d1b43556 | |||
| 13b2630a09 | |||
| 56f16c5ad3 | |||
| 6265301d47 | |||
| 583fa3e218 | |||
| 24d15a5057 | |||
| 330d3de425 | |||
| ac8772e3bf | |||
| b9b4a50e8a | |||
| da5bd6cac2 | |||
| 75104402fa | |||
| 43b190a1a0 | |||
| 6faa5986a3 | |||
| 114b6687c6 | |||
| 82c8c89fb9 | |||
| 67f1b9440e | |||
| 8e1deb39d5 | |||
| 455796ebb6 | |||
| c595ad3792 | |||
| ef417bea09 | |||
| 68771bb980 | |||
| b2d10bd3d4 | |||
| 0c595a787e | |||
| 7d30e6c1a6 | |||
| a2bdecd298 | |||
| c6698db51a | |||
| 604fdc5c6c | |||
| c22642630d | |||
| ab65909e6e | |||
| 07c98f36f0 | |||
| 4e96382097 | |||
| 3f910873eb | |||
| 3f848c0d31 | |||
| 967d2f56ad |
@@ -0,0 +1,857 @@
|
||||
# Review column lazy-load + season grouping — Implementation Plan
|
||||
|
||||
> **For agentic workers:** Use superpowers:subagent-driven-development. Checkbox (`- [ ]`) syntax tracks progress.
|
||||
|
||||
**Goal:** Replace the 500-item review cap with group-paginated infinite scroll; nest season sub-groups inside series when they have pending work across >1 season; wire the existing `/season/:key/:season/approve-all` endpoint into the UI.
|
||||
|
||||
**Architecture:** Move the grouping logic from the client to the server so groups are always returned complete. New `GET /api/review/groups?offset=N&limit=25` endpoint. Client's ReviewColumn becomes a stateful list that extends itself via `IntersectionObserver` on a sentinel.
|
||||
|
||||
**Tech Stack:** Bun + Hono (server), React 19 + TanStack Router (client), bun:sqlite.
|
||||
|
||||
---
|
||||
|
||||
## Task 1: Server — build grouped data structure + new endpoint
|
||||
|
||||
**Files:**
|
||||
- Modify: `server/api/review.ts`
|
||||
|
||||
- [ ] **Step 1: Add shared types + builder**
|
||||
|
||||
At the top of `server/api/review.ts` (near the other type definitions), add exported types:
|
||||
|
||||
```ts
|
||||
export type ReviewGroup =
|
||||
| { kind: "movie"; item: PipelineReviewItem }
|
||||
| {
|
||||
kind: "series";
|
||||
seriesKey: string;
|
||||
seriesName: string;
|
||||
seriesJellyfinId: string | null;
|
||||
episodeCount: number;
|
||||
minConfidence: "high" | "low";
|
||||
originalLanguage: string | null;
|
||||
seasons: Array<{ season: number | null; episodes: PipelineReviewItem[] }>;
|
||||
};
|
||||
|
||||
export interface ReviewGroupsResponse {
|
||||
groups: ReviewGroup[];
|
||||
totalGroups: number;
|
||||
totalItems: number;
|
||||
hasMore: boolean;
|
||||
}
|
||||
```
|
||||
|
||||
Add a helper after the existing `enrichWithStreamsAndReasons` helper:
|
||||
|
||||
```ts
|
||||
function buildReviewGroups(db: ReturnType<typeof getDb>): {
|
||||
groups: ReviewGroup[];
|
||||
totalItems: number;
|
||||
} {
|
||||
// Fetch ALL pending non-noop items. Grouping + pagination happen in memory.
|
||||
const rows = db
|
||||
.prepare(`
|
||||
SELECT rp.*, mi.name, mi.series_name, mi.series_jellyfin_id,
|
||||
mi.jellyfin_id,
|
||||
mi.season_number, mi.episode_number, mi.type, mi.container,
|
||||
mi.original_language, mi.orig_lang_source, mi.file_path
|
||||
FROM review_plans rp
|
||||
JOIN media_items mi ON mi.id = rp.item_id
|
||||
WHERE rp.status = 'pending' AND rp.is_noop = 0
|
||||
ORDER BY
|
||||
CASE rp.confidence WHEN 'high' THEN 0 ELSE 1 END,
|
||||
COALESCE(mi.series_name, mi.name),
|
||||
mi.season_number, mi.episode_number
|
||||
`)
|
||||
.all() as PipelineReviewItem[];
|
||||
|
||||
const movies: PipelineReviewItem[] = [];
|
||||
const seriesMap = new Map<
|
||||
string,
|
||||
{
|
||||
seriesName: string;
|
||||
seriesJellyfinId: string | null;
|
||||
seasons: Map<number | null, PipelineReviewItem[]>;
|
||||
originalLanguage: string | null;
|
||||
minConfidence: "high" | "low";
|
||||
firstName: string;
|
||||
}
|
||||
>();
|
||||
|
||||
for (const row of rows) {
|
||||
if (row.type === "Movie") {
|
||||
movies.push(row);
|
||||
continue;
|
||||
}
|
||||
const key = row.series_jellyfin_id ?? row.series_name ?? String(row.item_id);
|
||||
let entry = seriesMap.get(key);
|
||||
if (!entry) {
|
||||
entry = {
|
||||
seriesName: row.series_name ?? "",
|
||||
seriesJellyfinId: row.series_jellyfin_id,
|
||||
seasons: new Map(),
|
||||
originalLanguage: row.original_language,
|
||||
minConfidence: row.confidence,
|
||||
firstName: row.series_name ?? "",
|
||||
};
|
||||
seriesMap.set(key, entry);
|
||||
}
|
||||
const season = row.season_number;
|
||||
let bucket = entry.seasons.get(season);
|
||||
if (!bucket) {
|
||||
bucket = [];
|
||||
entry.seasons.set(season, bucket);
|
||||
}
|
||||
bucket.push(row);
|
||||
if (row.confidence === "high" && entry.minConfidence === "low") {
|
||||
// Keep minConfidence as the "best" confidence across episodes — if any
|
||||
// episode is high, that's the group's dominant confidence for sort.
|
||||
// Actually we want the LOWEST (low wins) so user sees low-confidence
|
||||
// groups sorted after high-confidence ones. Revisit: keep low if present.
|
||||
}
|
||||
if (row.confidence === "low") entry.minConfidence = "low";
|
||||
}
|
||||
|
||||
// Sort season keys within each series (nulls last), episodes by episode_number.
|
||||
const seriesGroups: ReviewGroup[] = [];
|
||||
for (const [seriesKey, entry] of seriesMap) {
|
||||
const seasonKeys = [...entry.seasons.keys()].sort((a, b) => {
|
||||
if (a === null) return 1;
|
||||
if (b === null) return -1;
|
||||
return a - b;
|
||||
});
|
||||
const seasons = seasonKeys.map((season) => ({
|
||||
season,
|
||||
episodes: (entry.seasons.get(season) ?? []).sort(
|
||||
(a, b) => (a.episode_number ?? 0) - (b.episode_number ?? 0),
|
||||
),
|
||||
}));
|
||||
const episodeCount = seasons.reduce((sum, s) => sum + s.episodes.length, 0);
|
||||
seriesGroups.push({
|
||||
kind: "series",
|
||||
seriesKey,
|
||||
seriesName: entry.seriesName,
|
||||
seriesJellyfinId: entry.seriesJellyfinId,
|
||||
episodeCount,
|
||||
minConfidence: entry.minConfidence,
|
||||
originalLanguage: entry.originalLanguage,
|
||||
seasons,
|
||||
});
|
||||
}
|
||||
|
||||
// Interleave movies + series, sort by (minConfidence, name).
|
||||
const movieGroups: ReviewGroup[] = movies.map((m) => ({ kind: "movie" as const, item: m }));
|
||||
const allGroups = [...movieGroups, ...seriesGroups].sort((a, b) => {
|
||||
const confA = a.kind === "movie" ? a.item.confidence : a.minConfidence;
|
||||
const confB = b.kind === "movie" ? b.item.confidence : b.minConfidence;
|
||||
const rankA = confA === "high" ? 0 : 1;
|
||||
const rankB = confB === "high" ? 0 : 1;
|
||||
if (rankA !== rankB) return rankA - rankB;
|
||||
const nameA = a.kind === "movie" ? a.item.name : a.seriesName;
|
||||
const nameB = b.kind === "movie" ? b.item.name : b.seriesName;
|
||||
return nameA.localeCompare(nameB);
|
||||
});
|
||||
|
||||
const totalItems = movieGroups.length + seriesGroups.reduce((sum, g) => sum + (g as { episodeCount: number }).episodeCount, 0);
|
||||
return { groups: allGroups, totalItems };
|
||||
}
|
||||
```
|
||||
|
||||
(Delete the stray comment block inside the loop about "keep minConfidence as the best" — the actual logic below it is correct. I left a TODO-style note while drafting; clean it up when editing.)
|
||||
|
||||
- [ ] **Step 2: Add the `/groups` endpoint**
|
||||
|
||||
Add before `app.get("/pipeline", …)`:
|
||||
|
||||
```ts
|
||||
app.get("/groups", (c) => {
|
||||
const db = getDb();
|
||||
const offset = Math.max(0, Number.parseInt(c.req.query("offset") ?? "0", 10) || 0);
|
||||
const limit = Math.max(1, Math.min(200, Number.parseInt(c.req.query("limit") ?? "25", 10) || 25));
|
||||
|
||||
const { groups, totalItems } = buildReviewGroups(db);
|
||||
const page = groups.slice(offset, offset + limit);
|
||||
|
||||
// Enrich each visible episode/movie with audio streams + transcode reasons
|
||||
// (same shape the existing UI expects — reuse the helper already in this file).
|
||||
const flatItemsForEnrichment: Array<{ id: number; plan_id?: number; item_id: number; transcode_reasons?: string[]; audio_streams?: PipelineAudioStream[] }> = [];
|
||||
for (const g of page) {
|
||||
if (g.kind === "movie") flatItemsForEnrichment.push(g.item as never);
|
||||
else for (const s of g.seasons) for (const ep of s.episodes) flatItemsForEnrichment.push(ep as never);
|
||||
}
|
||||
enrichWithStreamsAndReasons(flatItemsForEnrichment);
|
||||
|
||||
return c.json<ReviewGroupsResponse>({
|
||||
groups: page,
|
||||
totalGroups: groups.length,
|
||||
totalItems,
|
||||
hasMore: offset + limit < groups.length,
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
`PipelineAudioStream` already imported; if not, add to existing import block.
|
||||
|
||||
- [ ] **Step 3: Modify `/pipeline` to drop `review`/`reviewTotal`**
|
||||
|
||||
In the existing `app.get("/pipeline", …)` handler (around line 270):
|
||||
|
||||
- Delete the `review` SELECT (lines ~278–293) and the enrichment of `review` rows.
|
||||
- Delete the `reviewTotal` count query (lines ~294–296).
|
||||
- Add in its place: `const reviewItemsTotal = (db.prepare("SELECT COUNT(*) as n FROM review_plans WHERE status = 'pending' AND is_noop = 0").get() as { n: number }).n;`
|
||||
- In the final `return c.json({...})` (line ~430), replace `review, reviewTotal` with `reviewItemsTotal`.
|
||||
|
||||
- [ ] **Step 4: Run tests + lint + tsc**
|
||||
|
||||
```
|
||||
mise exec bun -- bun test
|
||||
mise exec bun -- bun run lint
|
||||
mise exec bun -- bunx tsc --noEmit --project tsconfig.server.json
|
||||
```
|
||||
|
||||
All must pass. If tests that hit `/pipeline` fail because they expect `review[]`, update them in the same commit (they need to migrate anyway).
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add server/api/review.ts
|
||||
git commit -m "review: add /groups endpoint with server-side grouping + pagination"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 2: Server — test `/groups` endpoint
|
||||
|
||||
**Files:**
|
||||
- Create: `server/api/__tests__/review-groups.test.ts`
|
||||
|
||||
- [ ] **Step 1: Write the test file**
|
||||
|
||||
```ts
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { Hono } from "hono";
|
||||
import reviewRoutes from "../review";
|
||||
import { setupTestDb, seedItem, seedPlan } from "./test-helpers"; // adjust to the project's test helpers; see existing webhook.test.ts for how tests wire up a DB
|
||||
|
||||
const app = new Hono();
|
||||
app.route("/api/review", reviewRoutes);
|
||||
|
||||
describe("GET /api/review/groups", () => {
|
||||
test("returns complete series even when total items exceed limit", async () => {
|
||||
const db = setupTestDb();
|
||||
// Seed 1 series with 30 episodes, all pending non-noop
|
||||
for (let i = 1; i <= 30; i++) seedItem(db, { type: "Episode", seriesName: "Breaking Bad", seasonNumber: 1, episodeNumber: i });
|
||||
for (const row of db.prepare("SELECT id FROM media_items").all() as { id: number }[]) seedPlan(db, row.id, { pending: true, isNoop: false });
|
||||
|
||||
const res = await app.request("/api/review/groups?offset=0&limit=25");
|
||||
const body = await res.json();
|
||||
|
||||
expect(body.groups).toHaveLength(1);
|
||||
expect(body.groups[0].kind).toBe("series");
|
||||
expect(body.groups[0].episodeCount).toBe(30);
|
||||
expect(body.groups[0].seasons[0].episodes).toHaveLength(30);
|
||||
expect(body.totalItems).toBe(30);
|
||||
expect(body.hasMore).toBe(false);
|
||||
});
|
||||
|
||||
test("paginates groups with hasMore=true", async () => {
|
||||
const db = setupTestDb();
|
||||
for (let i = 1; i <= 50; i++) seedItem(db, { type: "Movie", name: `Movie ${String(i).padStart(2, "0")}` });
|
||||
for (const row of db.prepare("SELECT id FROM media_items").all() as { id: number }[]) seedPlan(db, row.id, { pending: true, isNoop: false });
|
||||
|
||||
const page1 = await (await app.request("/api/review/groups?offset=0&limit=25")).json();
|
||||
const page2 = await (await app.request("/api/review/groups?offset=25&limit=25")).json();
|
||||
|
||||
expect(page1.groups).toHaveLength(25);
|
||||
expect(page1.hasMore).toBe(true);
|
||||
expect(page2.groups).toHaveLength(25);
|
||||
expect(page2.hasMore).toBe(false);
|
||||
const ids1 = page1.groups.map((g: { item: { item_id: number } }) => g.item.item_id);
|
||||
const ids2 = page2.groups.map((g: { item: { item_id: number } }) => g.item.item_id);
|
||||
expect(ids1.filter((id: number) => ids2.includes(id))).toHaveLength(0);
|
||||
});
|
||||
|
||||
test("buckets episodes by season, nulls last", async () => {
|
||||
const db = setupTestDb();
|
||||
for (let ep = 1; ep <= 3; ep++) seedItem(db, { type: "Episode", seriesName: "Lost", seasonNumber: 1, episodeNumber: ep });
|
||||
for (let ep = 1; ep <= 2; ep++) seedItem(db, { type: "Episode", seriesName: "Lost", seasonNumber: 2, episodeNumber: ep });
|
||||
seedItem(db, { type: "Episode", seriesName: "Lost", seasonNumber: null, episodeNumber: null });
|
||||
for (const row of db.prepare("SELECT id FROM media_items").all() as { id: number }[]) seedPlan(db, row.id, { pending: true, isNoop: false });
|
||||
|
||||
const body = await (await app.request("/api/review/groups?offset=0&limit=25")).json();
|
||||
const lost = body.groups[0];
|
||||
expect(lost.kind).toBe("series");
|
||||
expect(lost.seasons.map((s: { season: number | null }) => s.season)).toEqual([1, 2, null]);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
Important: this test file needs the project's actual test-helpers pattern. Before writing, look at `server/services/__tests__/webhook.test.ts` (the 60-line one that's still in the repo after the verified-flag block was removed) and **copy its setup style** — including how it creates a test DB, how it seeds media_items and review_plans, and how it invokes the Hono app. Replace the placeholder `setupTestDb`, `seedItem`, `seedPlan` calls with whatever the real helpers are.
|
||||
|
||||
- [ ] **Step 2: Run the tests**
|
||||
|
||||
```
|
||||
mise exec bun -- bun test server/api/__tests__/review-groups.test.ts
|
||||
```
|
||||
|
||||
Expected: 3 passes.
|
||||
|
||||
- [ ] **Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git add server/api/__tests__/review-groups.test.ts
|
||||
git commit -m "test: /groups endpoint — series completeness, pagination, season buckets"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 3: Client types + PipelinePage
|
||||
|
||||
**Files:**
|
||||
- Modify: `src/shared/lib/types.ts`
|
||||
- Modify: `src/features/pipeline/PipelinePage.tsx`
|
||||
|
||||
- [ ] **Step 1: Update shared types**
|
||||
|
||||
In `src/shared/lib/types.ts`, replace the `PipelineData` interface's `review` and `reviewTotal` fields with `reviewItemsTotal: number`. Add types for the new groups response:
|
||||
|
||||
```ts
|
||||
export type ReviewGroup =
|
||||
| { kind: "movie"; item: PipelineReviewItem }
|
||||
| {
|
||||
kind: "series";
|
||||
seriesKey: string;
|
||||
seriesName: string;
|
||||
seriesJellyfinId: string | null;
|
||||
episodeCount: number;
|
||||
minConfidence: "high" | "low";
|
||||
originalLanguage: string | null;
|
||||
seasons: Array<{ season: number | null; episodes: PipelineReviewItem[] }>;
|
||||
};
|
||||
|
||||
export interface ReviewGroupsResponse {
|
||||
groups: ReviewGroup[];
|
||||
totalGroups: number;
|
||||
totalItems: number;
|
||||
hasMore: boolean;
|
||||
}
|
||||
```
|
||||
|
||||
The `PipelineData` interface becomes:
|
||||
```ts
|
||||
export interface PipelineData {
|
||||
reviewItemsTotal: number;
|
||||
queued: PipelineJobItem[];
|
||||
processing: PipelineJobItem[];
|
||||
done: PipelineJobItem[];
|
||||
doneCount: number;
|
||||
jellyfinUrl: string;
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Update PipelinePage**
|
||||
|
||||
Change `PipelinePage.tsx`:
|
||||
|
||||
- Add state for the initial groups page: `const [initialGroups, setInitialGroups] = useState<ReviewGroupsResponse | null>(null);`
|
||||
- In `load()`, fetch both in parallel:
|
||||
```ts
|
||||
const [pipelineRes, groupsRes] = await Promise.all([
|
||||
api.get<PipelineData>("/api/review/pipeline"),
|
||||
api.get<ReviewGroupsResponse>("/api/review/groups?offset=0&limit=25"),
|
||||
]);
|
||||
setData(pipelineRes);
|
||||
setInitialGroups(groupsRes);
|
||||
```
|
||||
- Wait for both before rendering (loading gate: `if (loading || !data || !initialGroups) return <Loading />`).
|
||||
- Pass to ReviewColumn: `<ReviewColumn initialResponse={initialGroups} totalItems={data.reviewItemsTotal} jellyfinUrl={data.jellyfinUrl} onMutate={load} />` — drop `items` and `total` props.
|
||||
|
||||
- [ ] **Step 3: Tsc + lint**
|
||||
|
||||
```
|
||||
mise exec bun -- bunx tsc --noEmit
|
||||
mise exec bun -- bun run lint
|
||||
```
|
||||
|
||||
Expected: errors in `ReviewColumn.tsx` because its props type hasn't been updated yet — that's fine, Task 4 fixes it. For this step, only verify that types.ts and PipelinePage.tsx themselves compile internally. If the build breaks because of ReviewColumn, commit these two files anyway and proceed to Task 4 immediately.
|
||||
|
||||
- [ ] **Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add src/shared/lib/types.ts src/features/pipeline/PipelinePage.tsx
|
||||
git commit -m "pipeline: fetch review groups endpoint in parallel with pipeline"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 4: Client — ReviewColumn with infinite scroll
|
||||
|
||||
**Files:**
|
||||
- Modify: `src/features/pipeline/ReviewColumn.tsx`
|
||||
|
||||
- [ ] **Step 1: Rewrite ReviewColumn**
|
||||
|
||||
Replace the file contents with:
|
||||
|
||||
```tsx
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { api } from "~/shared/lib/api";
|
||||
import type { ReviewGroup, ReviewGroupsResponse } from "~/shared/lib/types";
|
||||
import { ColumnShell } from "./ColumnShell";
|
||||
import { PipelineCard } from "./PipelineCard";
|
||||
import { SeriesCard } from "./SeriesCard";
|
||||
|
||||
const PAGE_SIZE = 25;
|
||||
|
||||
interface ReviewColumnProps {
|
||||
initialResponse: ReviewGroupsResponse;
|
||||
totalItems: number;
|
||||
jellyfinUrl: string;
|
||||
onMutate: () => void;
|
||||
}
|
||||
|
||||
export function ReviewColumn({ initialResponse, totalItems, jellyfinUrl, onMutate }: ReviewColumnProps) {
|
||||
const [groups, setGroups] = useState<ReviewGroup[]>(initialResponse.groups);
|
||||
const [hasMore, setHasMore] = useState(initialResponse.hasMore);
|
||||
const [loadingMore, setLoadingMore] = useState(false);
|
||||
const sentinelRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
// Reset when parent passes a new initial page (onMutate refetch)
|
||||
useEffect(() => {
|
||||
setGroups(initialResponse.groups);
|
||||
setHasMore(initialResponse.hasMore);
|
||||
}, [initialResponse]);
|
||||
|
||||
const loadMore = useCallback(async () => {
|
||||
if (loadingMore || !hasMore) return;
|
||||
setLoadingMore(true);
|
||||
try {
|
||||
const res = await api.get<ReviewGroupsResponse>(`/api/review/groups?offset=${groups.length}&limit=${PAGE_SIZE}`);
|
||||
setGroups((prev) => [...prev, ...res.groups]);
|
||||
setHasMore(res.hasMore);
|
||||
} finally {
|
||||
setLoadingMore(false);
|
||||
}
|
||||
}, [groups.length, hasMore, loadingMore]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!hasMore || !sentinelRef.current) return;
|
||||
const observer = new IntersectionObserver(
|
||||
(entries) => {
|
||||
if (entries[0]?.isIntersecting) loadMore();
|
||||
},
|
||||
{ rootMargin: "200px" },
|
||||
);
|
||||
observer.observe(sentinelRef.current);
|
||||
return () => observer.disconnect();
|
||||
}, [hasMore, loadMore]);
|
||||
|
||||
const skipAll = async () => {
|
||||
if (!confirm(`Skip all ${totalItems} pending items? They won't be processed unless you unskip them.`)) return;
|
||||
await api.post("/api/review/skip-all");
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const autoApprove = async () => {
|
||||
const res = await api.post<{ ok: boolean; count: number }>("/api/review/auto-approve");
|
||||
onMutate();
|
||||
if (res.count === 0) alert("No high-confidence items to auto-approve.");
|
||||
};
|
||||
|
||||
const approveItem = async (itemId: number) => {
|
||||
await api.post(`/api/review/${itemId}/approve`);
|
||||
onMutate();
|
||||
};
|
||||
const skipItem = async (itemId: number) => {
|
||||
await api.post(`/api/review/${itemId}/skip`);
|
||||
onMutate();
|
||||
};
|
||||
const approveBatch = async (itemIds: number[]) => {
|
||||
if (itemIds.length === 0) return;
|
||||
await api.post<{ ok: boolean; count: number }>("/api/review/approve-batch", { itemIds });
|
||||
onMutate();
|
||||
};
|
||||
|
||||
// Compute ids per visible group for "Approve above"
|
||||
const idsByGroup: number[][] = groups.map((g) =>
|
||||
g.kind === "movie" ? [g.item.item_id] : g.seasons.flatMap((s) => s.episodes.map((ep) => ep.item_id)),
|
||||
);
|
||||
const priorIds = (index: number): number[] => idsByGroup.slice(0, index).flat();
|
||||
|
||||
const actions =
|
||||
totalItems > 0
|
||||
? [
|
||||
{ label: "Auto Review", onClick: autoApprove, primary: true },
|
||||
{ label: "Skip all", onClick: skipAll },
|
||||
]
|
||||
: undefined;
|
||||
|
||||
return (
|
||||
<ColumnShell title="Review" count={totalItems} actions={actions}>
|
||||
<div className="space-y-2">
|
||||
{groups.map((group, index) => {
|
||||
const prior = index > 0 ? priorIds(index) : null;
|
||||
const onApproveUpToHere = prior && prior.length > 0 ? () => approveBatch(prior) : undefined;
|
||||
if (group.kind === "movie") {
|
||||
return (
|
||||
<PipelineCard
|
||||
key={group.item.id}
|
||||
item={group.item}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
onToggleStream={async (streamId, action) => {
|
||||
await api.patch(`/api/review/${group.item.item_id}/stream/${streamId}`, { action });
|
||||
onMutate();
|
||||
}}
|
||||
onApprove={() => approveItem(group.item.item_id)}
|
||||
onSkip={() => skipItem(group.item.item_id)}
|
||||
onApproveUpToHere={onApproveUpToHere}
|
||||
/>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<SeriesCard
|
||||
key={group.seriesKey}
|
||||
seriesKey={group.seriesKey}
|
||||
seriesName={group.seriesName}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
seriesJellyfinId={group.seriesJellyfinId}
|
||||
seasons={group.seasons}
|
||||
episodeCount={group.episodeCount}
|
||||
originalLanguage={group.originalLanguage}
|
||||
onMutate={onMutate}
|
||||
onApproveUpToHere={onApproveUpToHere}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
{groups.length === 0 && <p className="text-sm text-gray-400 text-center py-8">No items to review</p>}
|
||||
{hasMore && (
|
||||
<div ref={sentinelRef} className="py-4 text-center text-xs text-gray-400">
|
||||
{loadingMore ? "Loading more…" : ""}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</ColumnShell>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Tsc + lint**
|
||||
|
||||
```
|
||||
mise exec bun -- bunx tsc --noEmit
|
||||
mise exec bun -- bun run lint
|
||||
```
|
||||
|
||||
Expected: the call site in ReviewColumn passes `seasons`, `episodeCount`, `originalLanguage` props to SeriesCard — this will fail until Task 5 updates SeriesCard. Same handling as Task 3 step 3: commit and proceed.
|
||||
|
||||
- [ ] **Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git add src/features/pipeline/ReviewColumn.tsx
|
||||
git commit -m "review column: infinite scroll with IntersectionObserver sentinel"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 5: Client — SeriesCard season nesting
|
||||
|
||||
**Files:**
|
||||
- Modify: `src/features/pipeline/SeriesCard.tsx`
|
||||
|
||||
- [ ] **Step 1: Rewrite SeriesCard**
|
||||
|
||||
Replace the file contents with:
|
||||
|
||||
```tsx
|
||||
import { useState } from "react";
|
||||
import { api } from "~/shared/lib/api";
|
||||
import { LANG_NAMES } from "~/shared/lib/lang";
|
||||
import type { PipelineReviewItem } from "~/shared/lib/types";
|
||||
import { PipelineCard } from "./PipelineCard";
|
||||
|
||||
interface SeriesCardProps {
|
||||
seriesKey: string;
|
||||
seriesName: string;
|
||||
jellyfinUrl: string;
|
||||
seriesJellyfinId: string | null;
|
||||
seasons: Array<{ season: number | null; episodes: PipelineReviewItem[] }>;
|
||||
episodeCount: number;
|
||||
originalLanguage: string | null;
|
||||
onMutate: () => void;
|
||||
onApproveUpToHere?: () => void;
|
||||
}
|
||||
|
||||
export function SeriesCard({
|
||||
seriesKey,
|
||||
seriesName,
|
||||
jellyfinUrl,
|
||||
seriesJellyfinId,
|
||||
seasons,
|
||||
episodeCount,
|
||||
originalLanguage,
|
||||
onMutate,
|
||||
onApproveUpToHere,
|
||||
}: SeriesCardProps) {
|
||||
const [expanded, setExpanded] = useState(false);
|
||||
|
||||
const flatEpisodes = seasons.flatMap((s) => s.episodes);
|
||||
const highCount = flatEpisodes.filter((e) => e.confidence === "high").length;
|
||||
const lowCount = flatEpisodes.filter((e) => e.confidence === "low").length;
|
||||
const multipleSeasons = seasons.length > 1;
|
||||
|
||||
const setSeriesLanguage = async (lang: string) => {
|
||||
await api.patch(`/api/review/series/${encodeURIComponent(seriesKey)}/language`, { language: lang });
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const approveSeries = async () => {
|
||||
await api.post(`/api/review/series/${encodeURIComponent(seriesKey)}/approve-all`);
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const approveSeason = async (season: number | null) => {
|
||||
if (season == null) return;
|
||||
await api.post(`/api/review/season/${encodeURIComponent(seriesKey)}/${season}/approve-all`);
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const jellyfinLink =
|
||||
jellyfinUrl && seriesJellyfinId ? `${jellyfinUrl}/web/index.html#!/details?id=${seriesJellyfinId}` : null;
|
||||
|
||||
return (
|
||||
<div className="group/series rounded-lg border bg-white overflow-hidden">
|
||||
{/* Title row */}
|
||||
<div
|
||||
className="flex items-center gap-2 px-3 pt-3 pb-1 cursor-pointer hover:bg-gray-50 rounded-t-lg"
|
||||
onClick={() => setExpanded(!expanded)}
|
||||
>
|
||||
<span className="text-xs text-gray-400 shrink-0">{expanded ? "▼" : "▶"}</span>
|
||||
{jellyfinLink ? (
|
||||
<a
|
||||
href={jellyfinLink}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-sm font-medium truncate hover:text-blue-600 hover:underline"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
{seriesName}
|
||||
</a>
|
||||
) : (
|
||||
<p className="text-sm font-medium truncate">{seriesName}</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Controls row */}
|
||||
<div className="flex items-center gap-2 px-3 pb-3 pt-1">
|
||||
<span className="text-xs text-gray-500 shrink-0">{episodeCount} eps</span>
|
||||
{multipleSeasons && <span className="text-xs text-gray-500 shrink-0">· {seasons.length} seasons</span>}
|
||||
{highCount > 0 && <span className="text-xs text-green-600 shrink-0">{highCount} ready</span>}
|
||||
{lowCount > 0 && <span className="text-xs text-amber-600 shrink-0">{lowCount} review</span>}
|
||||
<div className="flex-1" />
|
||||
<select
|
||||
className="h-6 text-xs border border-gray-300 rounded px-1 bg-white shrink-0"
|
||||
value={originalLanguage ?? ""}
|
||||
onChange={(e) => {
|
||||
e.stopPropagation();
|
||||
setSeriesLanguage(e.target.value);
|
||||
}}
|
||||
>
|
||||
<option value="">unknown</option>
|
||||
{Object.entries(LANG_NAMES).map(([code, name]) => (
|
||||
<option key={code} value={code}>
|
||||
{name}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
{onApproveUpToHere && (
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onApproveUpToHere();
|
||||
}}
|
||||
title="Approve every card listed above this one"
|
||||
className="text-xs px-2 py-1 rounded border border-blue-600 text-blue-700 bg-white hover:bg-blue-50 cursor-pointer whitespace-nowrap shrink-0 opacity-0 group-hover/series:opacity-100 transition-opacity"
|
||||
>
|
||||
↑ Approve above
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
approveSeries();
|
||||
}}
|
||||
className="text-xs px-2 py-1 rounded bg-blue-600 text-white hover:bg-blue-700 cursor-pointer whitespace-nowrap shrink-0"
|
||||
>
|
||||
Approve series
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{expanded && (
|
||||
<div className="border-t">
|
||||
{multipleSeasons
|
||||
? seasons.map((s) => (
|
||||
<SeasonGroup
|
||||
key={s.season ?? "unknown"}
|
||||
season={s.season}
|
||||
episodes={s.episodes}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
onApproveSeason={() => approveSeason(s.season)}
|
||||
onMutate={onMutate}
|
||||
/>
|
||||
))
|
||||
: flatEpisodes.map((ep) => (
|
||||
<EpisodeRow key={ep.id} ep={ep} jellyfinUrl={jellyfinUrl} onMutate={onMutate} />
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function SeasonGroup({
|
||||
season,
|
||||
episodes,
|
||||
jellyfinUrl,
|
||||
onApproveSeason,
|
||||
onMutate,
|
||||
}: {
|
||||
season: number | null;
|
||||
episodes: PipelineReviewItem[];
|
||||
jellyfinUrl: string;
|
||||
onApproveSeason: () => void;
|
||||
onMutate: () => void;
|
||||
}) {
|
||||
const [open, setOpen] = useState(false);
|
||||
const highCount = episodes.filter((e) => e.confidence === "high").length;
|
||||
const lowCount = episodes.filter((e) => e.confidence === "low").length;
|
||||
const label = season == null ? "No season" : `Season ${String(season).padStart(2, "0")}`;
|
||||
|
||||
return (
|
||||
<div className="border-t first:border-t-0">
|
||||
<div
|
||||
className="flex items-center gap-2 px-3 py-2 cursor-pointer hover:bg-gray-50"
|
||||
onClick={() => setOpen(!open)}
|
||||
>
|
||||
<span className="text-xs text-gray-400 shrink-0">{open ? "▼" : "▶"}</span>
|
||||
<span className="text-xs font-medium shrink-0">{label}</span>
|
||||
<span className="text-xs text-gray-500 shrink-0">· {episodes.length} eps</span>
|
||||
{highCount > 0 && <span className="text-xs text-green-600 shrink-0">{highCount} ready</span>}
|
||||
{lowCount > 0 && <span className="text-xs text-amber-600 shrink-0">{lowCount} review</span>}
|
||||
<div className="flex-1" />
|
||||
{season != null && (
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onApproveSeason();
|
||||
}}
|
||||
className="text-xs px-2 py-1 rounded border border-blue-600 text-blue-700 bg-white hover:bg-blue-50 cursor-pointer whitespace-nowrap shrink-0"
|
||||
>
|
||||
Approve season
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
{open && (
|
||||
<div className="px-3 pb-3 space-y-2 pt-2">
|
||||
{episodes.map((ep) => (
|
||||
<EpisodeRow key={ep.id} ep={ep} jellyfinUrl={jellyfinUrl} onMutate={onMutate} />
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function EpisodeRow({ ep, jellyfinUrl, onMutate }: { ep: PipelineReviewItem; jellyfinUrl: string; onMutate: () => void }) {
|
||||
return (
|
||||
<div className="px-3 py-1">
|
||||
<PipelineCard
|
||||
item={ep}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
onToggleStream={async (streamId, action) => {
|
||||
await api.patch(`/api/review/${ep.item_id}/stream/${streamId}`, { action });
|
||||
onMutate();
|
||||
}}
|
||||
onApprove={async () => {
|
||||
await api.post(`/api/review/${ep.item_id}/approve`);
|
||||
onMutate();
|
||||
}}
|
||||
onSkip={async () => {
|
||||
await api.post(`/api/review/${ep.item_id}/skip`);
|
||||
onMutate();
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
(The `EpisodeRow` wrapper keeps the padding consistent whether episodes render directly under the series or under a season group.)
|
||||
|
||||
- [ ] **Step 2: Lint + tsc + test + build**
|
||||
|
||||
```
|
||||
mise exec bun -- bun run lint
|
||||
mise exec bun -- bunx tsc --noEmit
|
||||
mise exec bun -- bun test
|
||||
mise exec bun -- bun run build
|
||||
```
|
||||
|
||||
All must pass now that the whole pipeline (server → types → PipelinePage → ReviewColumn → SeriesCard) is consistent.
|
||||
|
||||
- [ ] **Step 3: Manual smoke test**
|
||||
|
||||
```
|
||||
mise exec bun -- bun run dev
|
||||
```
|
||||
|
||||
Navigate to the Pipeline page:
|
||||
- Confirm no "Showing first 500 of N" banner.
|
||||
- Scroll the Review column to the bottom; new groups auto-load.
|
||||
- Find a series with pending work in >1 season; expand it; confirm nested seasons with working `Approve season` button.
|
||||
- Find a series with pending work in a single season; expand it; confirm flat episode list (no season nesting).
|
||||
- Click `Approve series` on a series with many pending episodes; confirm the whole series vanishes from the column.
|
||||
|
||||
Kill the dev server.
|
||||
|
||||
- [ ] **Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add src/features/pipeline/SeriesCard.tsx
|
||||
git commit -m "series card: nest seasons when >1 pending, add Approve season button"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 6: Version bump + final push
|
||||
|
||||
- [ ] **Step 1: Bump CalVer**
|
||||
|
||||
In `package.json`, set version to today's next free dot-suffix (today is 2026-04-15; prior releases are `.1` and `.2`, so use `.3` unless already taken).
|
||||
|
||||
- [ ] **Step 2: Final checks**
|
||||
|
||||
```
|
||||
mise exec bun -- bun run lint
|
||||
mise exec bun -- bunx tsc --noEmit
|
||||
mise exec bun -- bunx tsc --noEmit --project tsconfig.server.json
|
||||
mise exec bun -- bun test
|
||||
mise exec bun -- bun run build
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Commit + push**
|
||||
|
||||
```bash
|
||||
git add package.json
|
||||
git commit -m "v2026.04.15.3 — review column lazy-load + season grouping"
|
||||
git push gitea main
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Guided Gates (user-verified)
|
||||
|
||||
- **GG-1:** No "Showing first 500 of N" banner.
|
||||
- **GG-2:** A series with episodes previously split across the cap now shows the correct episode count.
|
||||
- **GG-3:** A series with >1 pending season expands into nested season groups, each with a working `Approve season` button.
|
||||
- **GG-4:** A series with 1 pending season expands flat (no extra nesting).
|
||||
- **GG-5:** Scrolling to the bottom of Review auto-loads the next page; no scroll = no extra fetch.
|
||||
@@ -0,0 +1,47 @@
|
||||
# Scan Page Rework Implementation Plan
|
||||
|
||||
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||
|
||||
**Goal:** Rework the Scan page to prioritize progress + fresh ingest visibility, and add a scalable filterable/lazy-loaded library table.
|
||||
|
||||
**Architecture:** Keep `/api/scan` lightweight for status/progress and compact recent ingest rows. Add `/api/scan/items` for paginated/filterable DB browsing. Update `ScanPage` to render: scan card header count, compact 5-row recent ingest table, then a filterable lazy-loaded library table.
|
||||
|
||||
**Tech Stack:** Bun + Hono, React 19 + TanStack Router, bun:test, Biome.
|
||||
|
||||
---
|
||||
|
||||
### Task 1: Backend scan payload + items endpoint (TDD)
|
||||
|
||||
**Files:**
|
||||
- Modify: `server/api/__tests__/scan.test.ts`
|
||||
- Modify: `server/db/schema.ts`
|
||||
- Modify: `server/db/index.ts`
|
||||
- Modify: `server/services/rescan.ts`
|
||||
- Modify: `server/api/scan.ts`
|
||||
|
||||
- [ ] Add failing tests for scan item query parsing/normalization and SQL filter behavior helpers.
|
||||
- [ ] Run targeted tests to verify failure.
|
||||
- [ ] Add `media_items.ingest_source` schema + migration, set value on upsert (`scan`/`webhook`).
|
||||
- [ ] Extend `GET /api/scan` recent item shape with timestamp + ingest source and clamp to 5 rows.
|
||||
- [ ] Add `GET /api/scan/items` with filters (`q,status,type,source`) + pagination (`offset,limit`), returning `{ rows,total,hasMore }`.
|
||||
- [ ] Run targeted and full backend tests.
|
||||
|
||||
### Task 2: Scan page UI rework + lazy table
|
||||
|
||||
**Files:**
|
||||
- Modify: `src/features/scan/ScanPage.tsx`
|
||||
|
||||
- [ ] Refactor scan box header to show scanned count in top-right.
|
||||
- [ ] Replace large recent-items table with a compact 5-row recent ingest list directly under progress bar.
|
||||
- [ ] Add filter controls for library table (`q,status,type,source`) with default “All”.
|
||||
- [ ] Add lazy loading flow (initial fetch + load more) against `/api/scan/items`.
|
||||
- [ ] Render new table with useful file metadata columns and consistent truncation/tooltips.
|
||||
|
||||
### Task 3: Verification
|
||||
|
||||
**Files:**
|
||||
- Modify: none
|
||||
|
||||
- [ ] Run `bun test`.
|
||||
- [ ] Run `bun run lint` and format if needed.
|
||||
- [ ] Confirm no regressions in scan start/stop/progress behavior.
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,111 @@
|
||||
# Review column lazy-load + season grouping
|
||||
|
||||
Date: 2026-04-15
|
||||
|
||||
## Summary
|
||||
|
||||
Replace the Review column's 500-item hard cap with server-side group-paginated lazy loading. Series are always returned complete (every pending non-noop episode, grouped by season), eliminating the "2 eps" mirage caused by groups getting split across the cap. When a series has pending work in more than one season, the UI nests seasons as collapsible sub-groups, each with its own "Approve season" button.
|
||||
|
||||
## Motivation
|
||||
|
||||
`server/api/review.ts:277` caps the pipeline's review list at 500 items. ReviewColumn groups client-side, so any series whose episodes spill beyond the cap shows a wrong episode count and partial episode list. The banner "Showing first 500 of N" is present but misleading — the *groups* don't survive the cut, not just the tail.
|
||||
|
||||
The existing "Approve all" button on a series card already calls `/series/:seriesKey/approve-all`, which operates on the DB directly and does approve every pending episode — so functionality works, only the display is wrong. Still, partial groups are confusing and the 500 cap forces users to approve in waves.
|
||||
|
||||
## Server changes
|
||||
|
||||
### New endpoint `GET /api/review/groups?offset=0&limit=25`
|
||||
|
||||
Response:
|
||||
```ts
|
||||
{
|
||||
groups: ReviewGroup[];
|
||||
totalGroups: number;
|
||||
totalItems: number;
|
||||
hasMore: boolean;
|
||||
}
|
||||
|
||||
type ReviewGroup =
|
||||
| { kind: "movie"; item: PipelineReviewItem }
|
||||
| {
|
||||
kind: "series";
|
||||
seriesKey: string;
|
||||
seriesName: string;
|
||||
seriesJellyfinId: string | null;
|
||||
episodeCount: number;
|
||||
minConfidence: "high" | "low";
|
||||
originalLanguage: string | null;
|
||||
seasons: Array<{ season: number | null; episodes: PipelineReviewItem[] }>;
|
||||
};
|
||||
```
|
||||
|
||||
Ordering:
|
||||
- Groups ordered by (min confidence across group ASC — `high` < `low`), then (series_name or movie name ASC)
|
||||
- Within a series, seasons ordered by `season_number` ASC (`null` last)
|
||||
- Within a season, episodes ordered by `episode_number` ASC
|
||||
|
||||
Implementation outline:
|
||||
1. Query all pending non-noop plans joined to media_items (existing `review` query minus the LIMIT).
|
||||
2. Walk once in sort order, producing groups: a Movie becomes a one-shot `{ kind: "movie" }`; consecutive Episodes sharing `series_jellyfin_id` (or `series_name` fallback) accumulate into a `{ kind: "series" }` with `seasons` bucketed by `season_number`.
|
||||
3. Apply `.slice(offset, offset + limit)` over the full group list, enrich per-episode audio streams + transcode reasons for episodes that survive (reuse existing `enrichWithStreamsAndReasons`).
|
||||
4. `totalGroups` = full group count before slicing. `totalItems` = sum of episode counts + movie count (unchanged from today's `reviewTotal`). `hasMore` = `offset + limit < totalGroups`.
|
||||
|
||||
### `GET /api/review/pipeline` changes
|
||||
|
||||
Drop `review` and `reviewTotal` from the response. Add `reviewItemsTotal: number` so the column header shows a count before the groups endpoint resolves. Queue / Processing / Done / doneCount stay unchanged.
|
||||
|
||||
### Kept as-is
|
||||
|
||||
- `POST /api/review/series/:seriesKey/approve-all` (`review.ts:529`)
|
||||
- `POST /api/review/season/:seriesKey/:season/approve-all` (`review.ts:549`) — already implemented, just unused by the UI until now
|
||||
|
||||
## Client changes
|
||||
|
||||
### PipelinePage
|
||||
|
||||
Fetches `/api/review/pipeline` for queue columns (existing) and separately `/api/review/groups?offset=0&limit=25` for the Review column's initial page. `onMutate` refetches both. Pass `reviewGroups`, `reviewGroupsTotalItems`, `reviewHasMore` into `ReviewColumn`.
|
||||
|
||||
### ReviewColumn
|
||||
|
||||
Replace the hard-cap rendering with infinite scroll:
|
||||
- Render the current loaded groups.
|
||||
- Append a sentinel `<div>` at the bottom when `hasMore`. An `IntersectionObserver` attached to it triggers a fetch of the next page when it enters the scroll viewport.
|
||||
- Pagination state (`offset`, `groups`, `hasMore`, `loading`) lives locally in ReviewColumn — parent passes `initialGroups` on mount and whenever the filter changes (`onMutate` → parent refetches page 0).
|
||||
- Remove the "Showing first N of M" banner and the `truncated` logic.
|
||||
|
||||
### SeriesCard
|
||||
|
||||
When `seasons.length > 1`:
|
||||
- Render seasons as collapsible sub-groups inside the expanded series body.
|
||||
- Each season header: `S{NN} — {episodeCount} eps · {high} high / {low} low` + an `Approve season` button.
|
||||
|
||||
When `seasons.length === 1`:
|
||||
- Render the current flat episode list (no extra nesting).
|
||||
|
||||
Rename the existing header button `Approve all` → `Approve series`.
|
||||
|
||||
### "Approve above"
|
||||
|
||||
Keeps its current "approve every group currently visible above this card" semantic. With lazy loading, that means "everything the user has scrolled past". Compute item ids client-side across the loaded groups as today. No endpoint change.
|
||||
|
||||
## Data flow
|
||||
|
||||
1. PipelinePage mounts → parallel fetch `/pipeline` + `/groups?offset=0&limit=25`.
|
||||
2. User scrolls; sentinel becomes visible → fetch `/groups?offset=25&limit=25`; appended to the list.
|
||||
3. User clicks `Approve series` on a card → `POST /series/:key/approve-all` → `onMutate` → parent refetches `/pipeline` + `/groups?offset=0&limit=25`. Series gone from list.
|
||||
4. User clicks `Approve season S02` on a nested season → `POST /season/:key/2/approve-all` → `onMutate` → same refetch.
|
||||
|
||||
## Testing
|
||||
|
||||
- Server unit test: `/groups` endpoint returns a series with all pending episodes even when the total item count exceeds `limit * offset_pages`.
|
||||
- Server unit test: offset/limit/hasMore correctness across the group boundary.
|
||||
- Server unit test: seasons array is populated, sorted, with `null` season_number ordered last.
|
||||
- Manual: scroll through the Review column on a library with >1000 pending items and confirm episode counts match `SELECT COUNT(*) ... WHERE pending AND is_noop=0` scoped per series.
|
||||
|
||||
## Guided Gates
|
||||
|
||||
- **GG-1:** No "Showing first 500 of N" banner ever appears.
|
||||
- **GG-2:** A series whose episodes previously split across the cap now shows the correct episode count immediately on first page load (if the series is in the first page) or after scroll (if not).
|
||||
- **GG-3:** A series with pending episodes in 2+ seasons expands into nested season sub-groups, each with an `Approve season` button that approves only that season.
|
||||
- **GG-4:** A series with pending episodes in exactly one season expands into the flat episode list as before.
|
||||
- **GG-5:** Scrolling to the bottom of the Review column auto-fetches the next page without a click; scrolling stops fetching when `hasMore` is false.
|
||||
@@ -0,0 +1,337 @@
|
||||
# Inbox column & auto-processing — design
|
||||
|
||||
**Date:** 2026-04-18
|
||||
**Status:** Draft for implementation
|
||||
|
||||
## Summary
|
||||
|
||||
Split today's single `Review` column on the Pipeline page into two user-facing buckets
|
||||
(**Inbox** and **Review**) plus an automatic distribution step. The analyzer
|
||||
classifies each plan into `auto` / `auto_heuristic` / `manual`. A per-install
|
||||
`auto_processing` toggle controls whether the distribution runs automatically
|
||||
after every scan or requires a manual "Auto Review" click.
|
||||
|
||||
## Motivation
|
||||
|
||||
Today the Review column mixes two very different workloads:
|
||||
|
||||
- Items where the correct decision is mechanical (one language track, authoritative
|
||||
OG language, no ambiguity) — a human click adds no value.
|
||||
- Items where the decision is genuinely ambiguous (unknown OG, unlabeled tracks,
|
||||
OG not present in the file) — a human must look.
|
||||
|
||||
The user currently has to visually scan the whole column and cherry-pick. The
|
||||
existing `confidence = 'high'` flag partially captures this, but it tracks
|
||||
metadata source authority, not decision clarity: a `confidence=high` item can
|
||||
still be one where the analyzer dropped a same-language commentary track via a
|
||||
title heuristic.
|
||||
|
||||
## Goals
|
||||
|
||||
- Route mechanical decisions straight to the Queue (optionally, on scan).
|
||||
- Give the user a single "glance and approve" lane for decisions that were
|
||||
automatic but relied on a text heuristic.
|
||||
- Keep genuinely ambiguous items in a dedicated manual-review lane.
|
||||
- Do not regress the current workflow when auto-processing is disabled.
|
||||
|
||||
## Non-goals
|
||||
|
||||
- Parallel ffmpeg execution (multiple jobs at once). The Processing column's
|
||||
header gains a ready-to-populate subtitle slot, but the runtime change is a
|
||||
separate spec.
|
||||
- Webhook ingestion fixes. Webhook-ingested items will flow through the same
|
||||
Inbox → Review/Queue path as scanned items, but we are not fixing any
|
||||
existing webhook reliability issue in this work.
|
||||
- Changes to noop handling, subtitle extraction, or the job runner.
|
||||
|
||||
## Column layout
|
||||
|
||||
Five columns, left to right: **Inbox → Review → Queue → Processing → Done**.
|
||||
|
||||
```
|
||||
┌──────────┐ ┌──────────┐ ┌────────┐ ┌────────────┐ ┌──────┐
|
||||
│ Inbox │→ │ Review │→ │ Queue │→ │ Processing │→ │ Done │
|
||||
│ │ │ │ │ │ │ │ │ │
|
||||
│ [Auto ▸] │ │ [Approve │ │ │ │ │ │ │
|
||||
│ │ │ ready] │ │ │ │ │ │ │
|
||||
└──────────┘ └──────────┘ └────────┘ └────────────┘ └──────┘
|
||||
```
|
||||
|
||||
Each column header uses a common subtitle slot:
|
||||
|
||||
| Column | Subtitle content |
|
||||
|------------|--------------------------------------------------------------|
|
||||
| Inbox | `auto-processing on` / `auto-processing off` |
|
||||
| Review | `N ready · M need decisions` |
|
||||
| Queue | `scheduled: HH:MM–HH:MM` or `running anytime` |
|
||||
| Processing | `sequential` (placeholder — populated when parallel lands) |
|
||||
| Done | `N in desired state` (replaces today's top-right counter) |
|
||||
|
||||
The pipeline page header gains a single `Auto-process new items` checkbox,
|
||||
mirroring the `auto_processing` setting — flipping it writes to config
|
||||
immediately and (when turning on) kicks off a one-shot sort so the Inbox drains.
|
||||
|
||||
## Classification rules
|
||||
|
||||
The analyzer (`server/services/analyzer.ts`) gains an `auto_class` output
|
||||
alongside the existing `is_noop`, `job_type`, etc. Values:
|
||||
|
||||
**`auto`** — safe to enqueue without a glance. All must hold:
|
||||
|
||||
- `original_language` is set AND `orig_lang_source` ∈ `{radarr, sonarr, manual}`.
|
||||
- `needs_review = 0` (no conflict between Jellyfin's language and the
|
||||
authoritative source).
|
||||
- At least one kept audio track has a language tag matching OG (OG is
|
||||
actually present in the file).
|
||||
- Every kept audio track has an explicit language tag (no `und`/null).
|
||||
- No same-language dedup was resolved by the commentary/AD title heuristic
|
||||
(`NON_PRIMARY_AUDIO_TITLE` in `server/services/analyzer.ts`).
|
||||
|
||||
Covers the "one English track → copy/transcode", "English + additional German
|
||||
kept", and "two English tracks resolved by channel count" cases.
|
||||
|
||||
**`auto_heuristic`** — decision is automatic but relied on a title pattern match.
|
||||
Same as `auto` except a track within a kept-language group was dropped because
|
||||
its title matched the commentary/AD/descriptive pattern.
|
||||
|
||||
**`manual`** — user must look. Any of:
|
||||
|
||||
- OG language unknown.
|
||||
- OG known but `orig_lang_source` not in `{radarr, sonarr, manual}`.
|
||||
- `needs_review = 1`.
|
||||
- OG known but not present in any kept audio track (can't fulfil).
|
||||
- Any kept audio track has a null/`und` language tag.
|
||||
|
||||
Noop items (`is_noop = 1`) never enter Inbox, Review, or Queue — unchanged
|
||||
from today.
|
||||
|
||||
### Classifier placement
|
||||
|
||||
`auto_class` is computed inside `analyzeItem()` and returned on the `PlanResult`
|
||||
type. It is persisted to `review_plans.auto_class` at the same point that
|
||||
`is_noop`, `apple_compat`, `job_type`, and `notes` are written today (the
|
||||
upsert in `server/api/review.ts` around line 165 and the equivalent upsert in
|
||||
`server/services/rescan.ts`).
|
||||
|
||||
## Data model
|
||||
|
||||
### `review_plans` changes
|
||||
|
||||
Add:
|
||||
|
||||
- `auto_class TEXT` — nullable. `'auto' | 'auto_heuristic' | 'manual'`.
|
||||
`NULL` only during the migration window before backfill completes.
|
||||
- `sorted INTEGER NOT NULL DEFAULT 0` — `0` = in Inbox, `1` = distributed.
|
||||
|
||||
Drop:
|
||||
|
||||
- `confidence` — subsumed by `auto_class`. The `auto_class = 'auto'` case is
|
||||
strictly stronger than `confidence = 'high'` (adds the
|
||||
no-heuristic-used constraint).
|
||||
|
||||
Indexes:
|
||||
|
||||
- Add `CREATE INDEX idx_review_plans_sorted ON review_plans(sorted)`.
|
||||
- Drop `idx_review_plans_status` only if unused after the queries below are in
|
||||
place; keep it otherwise (the column-filter queries still benefit).
|
||||
|
||||
### Migration
|
||||
|
||||
Per the project's forward-looking rule, schema changes use the try/catch
|
||||
`ALTER TABLE` pattern in `server/db/index.ts`. Three SQL statements, each
|
||||
wrapped in its own try/catch so existing databases skip already-applied steps:
|
||||
|
||||
```sql
|
||||
ALTER TABLE review_plans ADD COLUMN auto_class TEXT;
|
||||
ALTER TABLE review_plans ADD COLUMN sorted INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE review_plans DROP COLUMN confidence;
|
||||
```
|
||||
|
||||
Backfill on first boot after the migration:
|
||||
|
||||
- Re-run the classifier for every plan with `auto_class IS NULL` and write the
|
||||
result. This re-uses the analyzer — no separate code path.
|
||||
- Set `sorted = 1` for every plan with `status IN ('pending','approved','skipped','done','error')`.
|
||||
Existing plans were already past the Inbox stage in the old world; dumping
|
||||
them back into Inbox on upgrade would create noise.
|
||||
|
||||
New plans created post-migration start with `sorted = 0` unless the
|
||||
`auto_processing` flow upgrades them in the same transaction.
|
||||
|
||||
### `config` changes
|
||||
|
||||
Add key `auto_processing` with default `"0"`. Also add to
|
||||
`server/db/schema.ts` `DEFAULT_CONFIG`.
|
||||
|
||||
## Status & sort transitions
|
||||
|
||||
```
|
||||
scan / rescan creates plan with:
|
||||
status = 'pending'
|
||||
sorted = 0
|
||||
auto_class = <classifier result>
|
||||
|
||||
Sort distributor (sort-inbox endpoint OR auto-on-scan hook) walks every
|
||||
sorted = 0 plan:
|
||||
|
||||
auto_class = 'auto' → sorted = 1, status = 'approved',
|
||||
reviewed_at = now(), job enqueued → Queue
|
||||
auto_class = 'auto_heuristic' → sorted = 1 → Review ⚡
|
||||
auto_class = 'manual' → sorted = 1 → Review ✋
|
||||
```
|
||||
|
||||
`sorted = 1` is a terminal state for the Inbox concept — a rescan does not
|
||||
reset it. If a rescan changes `auto_class` (e.g., radarr now provides a
|
||||
language), the new value is persisted but the plan stays where the user last
|
||||
saw it; the user approves it manually from Review if they want to act on the
|
||||
change.
|
||||
|
||||
## Column queries
|
||||
|
||||
| Column | Query |
|
||||
|------------|-----------------------------------------------------------------------|
|
||||
| Inbox | `status='pending' AND is_noop=0 AND sorted=0` |
|
||||
| Review | `status='pending' AND is_noop=0 AND sorted=1` |
|
||||
| Queue | unchanged — `jobs.status='pending'` |
|
||||
| Processing | unchanged — `jobs.status='running'` |
|
||||
| Done | unchanged |
|
||||
|
||||
Within Review, the badge is driven by `auto_class`:
|
||||
- `auto_heuristic` → ⚡ Ready
|
||||
- `manual` → ✋ Needs decision
|
||||
|
||||
## Backend
|
||||
|
||||
### New endpoints
|
||||
|
||||
- `POST /api/review/sort-inbox` — distributor. Walks every plan with
|
||||
`sorted = 0`, writes the transitions above in a single transaction, enqueues
|
||||
jobs for `auto` items via the existing `enqueueAudioJob` helper. Returns
|
||||
`{ moved_to_queue: number, moved_to_review: number }`. Broadcasts an SSE
|
||||
`inbox_sorted` event on the existing `/api/execute/events` stream so the
|
||||
Pipeline page refetches.
|
||||
|
||||
- `POST /api/review/approve-ready` — bulk-approves every plan with
|
||||
`status='pending' AND sorted=1 AND auto_class='auto_heuristic'`. Re-uses the
|
||||
existing per-item approve path (`approveBatch` semantics). Returns
|
||||
`{ count: number }`.
|
||||
|
||||
### Removed endpoint
|
||||
|
||||
- `POST /api/review/auto-approve` — removed. Its responsibilities split into
|
||||
`sort-inbox` (distribution) and `approve-ready` (bulk-confirm). Any UI
|
||||
calling it is updated in this change.
|
||||
|
||||
### Modified endpoints
|
||||
|
||||
- `GET /api/review/pipeline` — add fields to the response:
|
||||
- `inboxTotal: number`
|
||||
- `reviewReadyCount: number` (pending+sorted+`auto_heuristic`)
|
||||
- `reviewManualCount: number` (pending+sorted+`manual`)
|
||||
- `autoProcessing: boolean`
|
||||
- existing `reviewItemsTotal` now reflects only the Review column
|
||||
(`sorted=1`). Note this in the endpoint's doc comment.
|
||||
|
||||
- `GET /api/review/groups` — add `?bucket=inbox|review` query parameter.
|
||||
Default `review` for back-compat during rollout; the frontend passes it
|
||||
explicitly. Grouping / season-nesting / lazy-load pagination are shared
|
||||
between buckets.
|
||||
|
||||
- `POST /api/settings/auto-processing` — new endpoint following the
|
||||
per-topic pattern used by `/api/settings/audio-languages` and
|
||||
`/api/settings/mqtt`. Accepts `{ enabled: boolean }`. When flipped from
|
||||
`false` → `true`, schedule a `sort-inbox` pass before returning.
|
||||
|
||||
### Rescan hook
|
||||
|
||||
In `server/services/rescan.ts`, after a scan run finishes processing items,
|
||||
check `getConfig('auto_processing')`. When enabled, call the same distributor
|
||||
used by `/sort-inbox` so freshly-scanned items drain automatically. Emits the
|
||||
same `inbox_sorted` SSE event on completion.
|
||||
|
||||
## Frontend
|
||||
|
||||
### New components
|
||||
|
||||
- `src/features/pipeline/InboxColumn.tsx` — mirrors `ReviewColumn.tsx`:
|
||||
lazy-loaded via `/api/review/groups?bucket=inbox`, same season-grouped card
|
||||
layout. Header action: **Auto Review** → `POST /sort-inbox` → `onMutate()`.
|
||||
Secondary action: **Skip all** (reuses `skip-all` endpoint; Inbox items are
|
||||
`status='pending'` so `skip-all` already matches).
|
||||
|
||||
### Modified components
|
||||
|
||||
- `src/features/pipeline/ReviewColumn.tsx` — header primary action switches to
|
||||
**Approve all ready** (rendered only when `reviewReadyCount > 0`) →
|
||||
`POST /approve-ready`. Skip all stays. The column's fetch URL passes
|
||||
`?bucket=review`.
|
||||
|
||||
- `src/features/pipeline/PipelineCard.tsx` and `SeriesCard.tsx` — accept
|
||||
optional `autoClass` (for movie cards) / an aggregate `readyCount` (for
|
||||
series cards) to render the ⚡ / ✋ badge. Series cards show the badge on
|
||||
each episode row that needs it, and the series header surfaces "N ready"
|
||||
when any episode is `auto_heuristic`.
|
||||
|
||||
- `src/features/pipeline/ColumnShell.tsx` — add `subtitle?: string` prop,
|
||||
rendered under the title above the action row. All five columns use it.
|
||||
|
||||
- `src/features/pipeline/PipelinePage.tsx` — renders five columns. Adds the
|
||||
`Auto-process new items` checkbox inline in the page header, replacing
|
||||
today's standalone `N files in desired state` counter (the counter moves
|
||||
into Done's subtitle). Fetches `inboxInitialGroups` in parallel with the
|
||||
existing review groups fetch.
|
||||
|
||||
- `src/features/settings/SettingsPage.tsx` — add a corresponding
|
||||
`Auto-process new items` toggle in the existing processing/schedule
|
||||
section, posting to `/api/settings/auto-processing` on change.
|
||||
|
||||
### SSE handling
|
||||
|
||||
Add `inbox_sorted` listener to the existing EventSource setup in
|
||||
`PipelinePage.tsx`. On event, call the debounced `loadPipeline()` reload
|
||||
(same path used by `job_update`).
|
||||
|
||||
## Tests
|
||||
|
||||
New:
|
||||
|
||||
- `server/services/analyzer.test.ts` — `auto_class` outcomes for the five
|
||||
cases enumerated during brainstorming (all five exercised explicitly,
|
||||
including language-tag-missing and title-heuristic-triggered cases).
|
||||
- `server/api/__tests__/review-sort-inbox.test.ts` — unsorted plans
|
||||
distribute to the right buckets; `auto` items produce job rows; already-sorted
|
||||
plans are untouched; SSE event is broadcast.
|
||||
- `server/api/__tests__/review-approve-ready.test.ts` — only `auto_heuristic`
|
||||
pending items get approved; `manual` items are untouched; jobs are enqueued.
|
||||
|
||||
Modified:
|
||||
|
||||
- `server/api/__tests__/review-groups.test.ts` — add `bucket=inbox|review`
|
||||
assertions. The existing confidence-sorted test is rewritten in terms of
|
||||
`auto_class` (high → auto, low → manual mapping for the test fixtures).
|
||||
|
||||
## Rollout
|
||||
|
||||
1. Ship migration + analyzer change + new endpoints behind no feature flag;
|
||||
existing UI continues to work during deploy because the new columns render
|
||||
empty data until the backfill runs.
|
||||
2. Backfill runs on first boot. Existing Review items stay in Review
|
||||
(`sorted = 1` at migration time), so the user sees no surprise movement.
|
||||
3. `auto_processing` defaults to `"0"` — no behavior change until the user
|
||||
opts in.
|
||||
|
||||
## Guided Gates
|
||||
|
||||
- **GG-1:** Trigger a scan with the existing setup, flip `auto_processing` off,
|
||||
and confirm every new plan lands in Inbox. Items already in Review at
|
||||
upgrade time stay in Review.
|
||||
- **GG-2:** Click **Auto Review** on a populated Inbox. Verify that items
|
||||
matching the `auto` rule set move to Queue (jobs created) and the rest land
|
||||
in Review with the correct ⚡ / ✋ badge.
|
||||
- **GG-3:** Flip `auto_processing` to on, trigger a scan, and confirm Inbox
|
||||
drains automatically. Verify the one-shot sort also fires when the toggle
|
||||
flips from off to on while Inbox is non-empty.
|
||||
- **GG-4:** In Review, click **Approve all ready** and confirm only
|
||||
`auto_heuristic` items move to Queue; `manual` items are untouched.
|
||||
- **GG-5:** Confirm the Processing column renders its subtitle slot (currently
|
||||
"sequential") — placeholder is ready for the future parallel work.
|
||||
+1
-1
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "netfelix-audio-fix",
|
||||
"version": "2026.04.15.2",
|
||||
"version": "2026.04.19.5",
|
||||
"scripts": {
|
||||
"dev:server": "NODE_ENV=development bun --hot server/index.tsx",
|
||||
"dev:client": "vite",
|
||||
|
||||
@@ -0,0 +1,84 @@
|
||||
import { Database } from "bun:sqlite";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { SCHEMA } from "../../db/schema";
|
||||
import { clearQueue } from "../execute";
|
||||
|
||||
function makeDb(): Database {
|
||||
const db = new Database(":memory:");
|
||||
for (const stmt of SCHEMA.split(";")) {
|
||||
const trimmed = stmt.trim();
|
||||
if (trimmed) db.run(trimmed);
|
||||
}
|
||||
return db;
|
||||
}
|
||||
|
||||
function seedQueuedItem(db: Database, id: number, autoClass: "auto" | "auto_heuristic" | "manual") {
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO media_items (id, jellyfin_id, type, name, file_path, container) VALUES (?, ?, 'Movie', ?, ?, 'mkv')",
|
||||
)
|
||||
.run(id, `jf-${id}`, `Item ${id}`, `/x/${id}.mkv`);
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO review_plans (item_id, status, is_noop, auto_class, sorted, apple_compat, job_type) VALUES (?, 'approved', 0, ?, 1, 'direct_play', 'copy')",
|
||||
)
|
||||
.run(id, autoClass);
|
||||
db.prepare("INSERT INTO jobs (item_id, command, job_type, status) VALUES (?, 'ffmpeg …', 'audio', 'pending')").run(id);
|
||||
}
|
||||
|
||||
describe("clearQueue", () => {
|
||||
test("deletes pending jobs and returns plans to the Inbox (sorted=0, pending)", () => {
|
||||
const db = makeDb();
|
||||
seedQueuedItem(db, 1, "auto");
|
||||
seedQueuedItem(db, 2, "auto");
|
||||
|
||||
const cleared = clearQueue(db);
|
||||
expect(cleared).toBe(2);
|
||||
|
||||
const plans = db.prepare("SELECT item_id, status, sorted FROM review_plans ORDER BY item_id").all() as {
|
||||
item_id: number;
|
||||
status: string;
|
||||
sorted: number;
|
||||
}[];
|
||||
expect(plans).toEqual([
|
||||
{ item_id: 1, status: "pending", sorted: 0 },
|
||||
{ item_id: 2, status: "pending", sorted: 0 },
|
||||
]);
|
||||
|
||||
const jobCount = (db.prepare("SELECT COUNT(*) as n FROM jobs WHERE status = 'pending'").get() as { n: number }).n;
|
||||
expect(jobCount).toBe(0);
|
||||
});
|
||||
|
||||
test("leaves running + completed jobs alone", () => {
|
||||
const db = makeDb();
|
||||
seedQueuedItem(db, 1, "auto");
|
||||
db.prepare("UPDATE jobs SET status = 'running' WHERE item_id = 1").run();
|
||||
seedQueuedItem(db, 2, "auto");
|
||||
db.prepare("UPDATE jobs SET status = 'done' WHERE item_id = 2").run();
|
||||
seedQueuedItem(db, 3, "auto"); // stays pending
|
||||
|
||||
const cleared = clearQueue(db);
|
||||
expect(cleared).toBe(1);
|
||||
|
||||
const surviving = db.prepare("SELECT item_id, status FROM jobs ORDER BY item_id").all() as {
|
||||
item_id: number;
|
||||
status: string;
|
||||
}[];
|
||||
expect(surviving).toEqual([
|
||||
{ item_id: 1, status: "running" },
|
||||
{ item_id: 2, status: "done" },
|
||||
]);
|
||||
|
||||
// Only the plan whose job was pending should be reset.
|
||||
const plan3 = db.prepare("SELECT status, sorted FROM review_plans WHERE item_id = 3").get() as {
|
||||
status: string;
|
||||
sorted: number;
|
||||
};
|
||||
expect(plan3).toEqual({ status: "pending", sorted: 0 });
|
||||
const plan1 = db.prepare("SELECT status, sorted FROM review_plans WHERE item_id = 1").get() as {
|
||||
status: string;
|
||||
sorted: number;
|
||||
};
|
||||
expect(plan1).toEqual({ status: "approved", sorted: 1 });
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,5 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { extractErrorSummary } from "../execute";
|
||||
import { enqueueUnseenJobs, extractErrorSummary, shouldSendLiveUpdate, yieldAfterChunk } from "../execute";
|
||||
|
||||
describe("extractErrorSummary", () => {
|
||||
test("pulls the real error line out of ffmpeg's banner", () => {
|
||||
@@ -47,3 +47,39 @@ describe("extractErrorSummary", () => {
|
||||
expect(summary).toBe("Error: no space left on device");
|
||||
});
|
||||
});
|
||||
|
||||
describe("shouldSendLiveUpdate", () => {
|
||||
test("throttles updates until interval passes", () => {
|
||||
expect(shouldSendLiveUpdate(1_000, 800, 500)).toBe(false);
|
||||
expect(shouldSendLiveUpdate(1_301, 800, 500)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("yieldAfterChunk", () => {
|
||||
test("yields once threshold is reached, resets chunk counter", async () => {
|
||||
let yieldCalls = 0;
|
||||
const sleep = async (_ms: number) => {
|
||||
yieldCalls += 1;
|
||||
};
|
||||
let chunks = 0;
|
||||
chunks = await yieldAfterChunk(chunks, 3, sleep);
|
||||
expect(chunks).toBe(1);
|
||||
chunks = await yieldAfterChunk(chunks, 3, sleep);
|
||||
expect(chunks).toBe(2);
|
||||
chunks = await yieldAfterChunk(chunks, 3, sleep);
|
||||
expect(chunks).toBe(0);
|
||||
expect(yieldCalls).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("enqueueUnseenJobs", () => {
|
||||
test("appends only unseen job ids to the active queue", () => {
|
||||
const queue = [{ id: 1 }, { id: 2 }] as { id: number }[];
|
||||
const seen = new Set([1, 2]);
|
||||
const added = enqueueUnseenJobs(queue, seen, [{ id: 2 }, { id: 3 }, { id: 4 }] as { id: number }[]);
|
||||
expect(added).toBe(2);
|
||||
expect(queue.map((j) => j.id)).toEqual([1, 2, 3, 4]);
|
||||
expect(seen.has(3)).toBeTrue();
|
||||
expect(seen.has(4)).toBeTrue();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -0,0 +1,67 @@
|
||||
import { Database } from "bun:sqlite";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { SCHEMA } from "../../db/schema";
|
||||
import { approveReady } from "../review";
|
||||
|
||||
function makeDb(): Database {
|
||||
const db = new Database(":memory:");
|
||||
for (const stmt of SCHEMA.split(";")) {
|
||||
const trimmed = stmt.trim();
|
||||
if (trimmed) db.run(trimmed);
|
||||
}
|
||||
return db;
|
||||
}
|
||||
|
||||
function seedSortedPlan(db: Database, id: number, autoClass: "auto_heuristic" | "manual") {
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO media_items (id, jellyfin_id, type, name, file_path, container) VALUES (?, ?, 'Movie', ?, ?, 'mkv')",
|
||||
)
|
||||
.run(id, `jf-${id}`, `Item ${id}`, `/x/${id}.mkv`);
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO media_streams (item_id, stream_index, type, codec, language) VALUES (?, 0, 'Audio', 'eac3', 'eng')",
|
||||
)
|
||||
.run(id);
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO review_plans (item_id, status, is_noop, auto_class, sorted, apple_compat, job_type) VALUES (?, 'pending', 0, ?, 1, 'direct_play', 'copy')",
|
||||
)
|
||||
.run(id, autoClass);
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO stream_decisions (plan_id, stream_id, action, target_index) SELECT rp.id, ms.id, 'keep', 0 FROM review_plans rp, media_streams ms WHERE rp.item_id = ? AND ms.item_id = ?",
|
||||
)
|
||||
.run(id, id);
|
||||
}
|
||||
|
||||
describe("approveReady", () => {
|
||||
test("approves auto_heuristic only, leaves manual alone", () => {
|
||||
const db = makeDb();
|
||||
seedSortedPlan(db, 1, "auto_heuristic");
|
||||
seedSortedPlan(db, 2, "manual");
|
||||
seedSortedPlan(db, 3, "auto_heuristic");
|
||||
|
||||
const count = approveReady(db);
|
||||
expect(count).toBe(2);
|
||||
|
||||
const statuses = db.prepare("SELECT item_id, status FROM review_plans ORDER BY item_id").all() as {
|
||||
item_id: number;
|
||||
status: string;
|
||||
}[];
|
||||
expect(statuses).toEqual([
|
||||
{ item_id: 1, status: "approved" },
|
||||
{ item_id: 2, status: "pending" },
|
||||
{ item_id: 3, status: "approved" },
|
||||
]);
|
||||
|
||||
const jobCount = (db.prepare("SELECT COUNT(*) as n FROM jobs").get() as { n: number }).n;
|
||||
expect(jobCount).toBe(2);
|
||||
});
|
||||
|
||||
test("noop when nothing is ready", () => {
|
||||
const db = makeDb();
|
||||
seedSortedPlan(db, 1, "manual");
|
||||
expect(approveReady(db)).toBe(0);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,178 @@
|
||||
import { Database } from "bun:sqlite";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { SCHEMA } from "../../db/schema";
|
||||
import { buildReviewGroups } from "../review";
|
||||
|
||||
function makeDb(): Database {
|
||||
const db = new Database(":memory:");
|
||||
for (const stmt of SCHEMA.split(";")) {
|
||||
const trimmed = stmt.trim();
|
||||
if (trimmed) db.run(trimmed);
|
||||
}
|
||||
return db;
|
||||
}
|
||||
|
||||
interface SeedOpts {
|
||||
id: number;
|
||||
type: "Movie" | "Episode";
|
||||
name?: string;
|
||||
seriesName?: string | null;
|
||||
seriesJellyfinId?: string | null;
|
||||
seasonNumber?: number | null;
|
||||
episodeNumber?: number | null;
|
||||
autoClass?: "auto" | "auto_heuristic" | "manual" | null;
|
||||
sorted?: 0 | 1;
|
||||
}
|
||||
|
||||
function seed(db: Database, opts: SeedOpts) {
|
||||
const {
|
||||
id,
|
||||
type,
|
||||
name = `Item ${id}`,
|
||||
seriesName = null,
|
||||
seriesJellyfinId = null,
|
||||
seasonNumber = null,
|
||||
episodeNumber = null,
|
||||
autoClass = "manual",
|
||||
sorted = 1,
|
||||
} = opts;
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO media_items (id, jellyfin_id, type, name, series_name, series_jellyfin_id, season_number, episode_number, file_path) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
)
|
||||
.run(id, `jf-${id}`, type, name, seriesName, seriesJellyfinId, seasonNumber, episodeNumber, `/x/${id}.mkv`);
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO review_plans (item_id, status, is_noop, auto_class, sorted, apple_compat, job_type, notes) VALUES (?, 'pending', 0, ?, ?, 'direct_play', 'copy', NULL)",
|
||||
)
|
||||
.run(id, autoClass, sorted);
|
||||
}
|
||||
|
||||
describe("buildReviewGroups", () => {
|
||||
test("returns a complete series with every pending episode", () => {
|
||||
const db = makeDb();
|
||||
for (let i = 1; i <= 30; i++) {
|
||||
seed(db, {
|
||||
id: i,
|
||||
type: "Episode",
|
||||
seriesName: "Breaking Bad",
|
||||
seriesJellyfinId: "bb",
|
||||
seasonNumber: 1,
|
||||
episodeNumber: i,
|
||||
});
|
||||
}
|
||||
|
||||
const { groups, totalItems } = buildReviewGroups(db, { bucket: "review" });
|
||||
|
||||
expect(groups).toHaveLength(1);
|
||||
const series = groups[0];
|
||||
expect(series.kind).toBe("series");
|
||||
if (series.kind !== "series") throw new Error("expected series");
|
||||
expect(series.episodeCount).toBe(30);
|
||||
expect(series.seasons).toHaveLength(1);
|
||||
expect(series.seasons[0].episodes).toHaveLength(30);
|
||||
expect(totalItems).toBe(30);
|
||||
});
|
||||
|
||||
test("buckets episodes by season with null ordered last", () => {
|
||||
const db = makeDb();
|
||||
for (let ep = 1; ep <= 3; ep++) {
|
||||
seed(db, {
|
||||
id: ep,
|
||||
type: "Episode",
|
||||
seriesName: "Lost",
|
||||
seriesJellyfinId: "lost",
|
||||
seasonNumber: 1,
|
||||
episodeNumber: ep,
|
||||
});
|
||||
}
|
||||
for (let ep = 1; ep <= 2; ep++) {
|
||||
seed(db, {
|
||||
id: 10 + ep,
|
||||
type: "Episode",
|
||||
seriesName: "Lost",
|
||||
seriesJellyfinId: "lost",
|
||||
seasonNumber: 2,
|
||||
episodeNumber: ep,
|
||||
});
|
||||
}
|
||||
seed(db, { id: 99, type: "Episode", seriesName: "Lost", seriesJellyfinId: "lost", seasonNumber: null });
|
||||
|
||||
const { groups } = buildReviewGroups(db, { bucket: "review" });
|
||||
expect(groups).toHaveLength(1);
|
||||
const lost = groups[0];
|
||||
if (lost.kind !== "series") throw new Error("expected series");
|
||||
expect(lost.seasons.map((s) => s.season)).toEqual([1, 2, null]);
|
||||
expect(lost.seasons[0].episodes).toHaveLength(3);
|
||||
expect(lost.seasons[1].episodes).toHaveLength(2);
|
||||
expect(lost.seasons[2].episodes).toHaveLength(1);
|
||||
});
|
||||
|
||||
test("sorts groups: auto_heuristic (ready) first, then manual, then by name", () => {
|
||||
const db = makeDb();
|
||||
seed(db, { id: 1, type: "Movie", name: "Zodiac", autoClass: "auto_heuristic" });
|
||||
seed(db, { id: 2, type: "Movie", name: "Arrival", autoClass: "manual" });
|
||||
seed(db, { id: 3, type: "Movie", name: "Blade Runner", autoClass: "auto_heuristic" });
|
||||
|
||||
const { groups } = buildReviewGroups(db, { bucket: "review" });
|
||||
const names = groups.map((g) => (g.kind === "movie" ? g.item.name : g.seriesName));
|
||||
expect(names).toEqual(["Blade Runner", "Zodiac", "Arrival"]);
|
||||
});
|
||||
|
||||
test("series readyCount counts auto_heuristic episodes", () => {
|
||||
const db = makeDb();
|
||||
seed(db, {
|
||||
id: 1,
|
||||
type: "Episode",
|
||||
seriesName: "Show",
|
||||
seriesJellyfinId: "s",
|
||||
seasonNumber: 1,
|
||||
episodeNumber: 1,
|
||||
autoClass: "auto_heuristic",
|
||||
});
|
||||
seed(db, {
|
||||
id: 2,
|
||||
type: "Episode",
|
||||
seriesName: "Show",
|
||||
seriesJellyfinId: "s",
|
||||
seasonNumber: 1,
|
||||
episodeNumber: 2,
|
||||
autoClass: "manual",
|
||||
});
|
||||
|
||||
const { groups } = buildReviewGroups(db, { bucket: "review" });
|
||||
if (groups[0].kind !== "series") throw new Error("expected series");
|
||||
expect(groups[0].readyCount).toBe(1);
|
||||
});
|
||||
|
||||
test("excludes plans that are not pending or are is_noop=1", () => {
|
||||
const db = makeDb();
|
||||
seed(db, { id: 1, type: "Movie", name: "Pending" });
|
||||
seed(db, { id: 2, type: "Movie", name: "Approved" });
|
||||
db.prepare("UPDATE review_plans SET status = 'approved' WHERE item_id = ?").run(2);
|
||||
seed(db, { id: 3, type: "Movie", name: "Noop" });
|
||||
db.prepare("UPDATE review_plans SET is_noop = 1 WHERE item_id = ?").run(3);
|
||||
|
||||
const { groups, totalItems } = buildReviewGroups(db, { bucket: "review" });
|
||||
expect(groups).toHaveLength(1);
|
||||
expect(totalItems).toBe(1);
|
||||
if (groups[0].kind !== "movie") throw new Error("expected movie");
|
||||
expect(groups[0].item.name).toBe("Pending");
|
||||
});
|
||||
|
||||
test("bucket=inbox returns sorted=0 plans only", () => {
|
||||
const db = makeDb();
|
||||
seed(db, { id: 1, type: "Movie", name: "Fresh", autoClass: null, sorted: 0 });
|
||||
seed(db, { id: 2, type: "Movie", name: "Old", autoClass: "manual", sorted: 1 });
|
||||
|
||||
const inbox = buildReviewGroups(db, { bucket: "inbox" });
|
||||
expect(inbox.groups).toHaveLength(1);
|
||||
if (inbox.groups[0].kind !== "movie") throw new Error("expected movie");
|
||||
expect(inbox.groups[0].item.name).toBe("Fresh");
|
||||
|
||||
const review = buildReviewGroups(db, { bucket: "review" });
|
||||
expect(review.groups).toHaveLength(1);
|
||||
if (review.groups[0].kind !== "movie") throw new Error("expected movie");
|
||||
expect(review.groups[0].item.name).toBe("Old");
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,104 @@
|
||||
import { Database } from "bun:sqlite";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { SCHEMA } from "../../db/schema";
|
||||
import { sortInbox } from "../review";
|
||||
|
||||
function makeDb(): Database {
|
||||
const db = new Database(":memory:");
|
||||
for (const stmt of SCHEMA.split(";")) {
|
||||
const trimmed = stmt.trim();
|
||||
if (trimmed) db.run(trimmed);
|
||||
}
|
||||
return db;
|
||||
}
|
||||
|
||||
function seedPlan(db: Database, id: number, autoClass: "auto" | "auto_heuristic" | "manual") {
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO media_items (id, jellyfin_id, type, name, file_path, container) VALUES (?, ?, 'Movie', ?, ?, 'mkv')",
|
||||
)
|
||||
.run(id, `jf-${id}`, `Item ${id}`, `/x/${id}.mkv`);
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO media_streams (item_id, stream_index, type, codec, language) VALUES (?, 0, 'Video', 'h264', NULL)",
|
||||
)
|
||||
.run(id);
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO media_streams (item_id, stream_index, type, codec, language) VALUES (?, 1, 'Audio', 'eac3', 'eng')",
|
||||
)
|
||||
.run(id);
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO review_plans (item_id, status, is_noop, auto_class, sorted, apple_compat, job_type) VALUES (?, 'pending', 0, ?, 0, 'direct_play', 'copy')",
|
||||
)
|
||||
.run(id, autoClass);
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO stream_decisions (plan_id, stream_id, action, target_index) SELECT rp.id, ms.id, 'keep', 0 FROM review_plans rp, media_streams ms WHERE rp.item_id = ? AND ms.item_id = ? AND ms.type = 'Audio'",
|
||||
)
|
||||
.run(id, id);
|
||||
}
|
||||
|
||||
describe("sortInbox", () => {
|
||||
test("moves auto → sorted=1 + approved + enqueues job", () => {
|
||||
const db = makeDb();
|
||||
seedPlan(db, 1, "auto");
|
||||
|
||||
const result = sortInbox(db);
|
||||
|
||||
expect(result.moved_to_queue).toBe(1);
|
||||
expect(result.moved_to_review).toBe(0);
|
||||
const plan = db.prepare("SELECT status, sorted FROM review_plans WHERE item_id = 1").get() as {
|
||||
status: string;
|
||||
sorted: number;
|
||||
};
|
||||
expect(plan.status).toBe("approved");
|
||||
expect(plan.sorted).toBe(1);
|
||||
const job = db.prepare("SELECT status FROM jobs WHERE item_id = 1").get() as { status: string };
|
||||
expect(job.status).toBe("pending");
|
||||
});
|
||||
|
||||
test("moves auto_heuristic → sorted=1, stays pending, no job", () => {
|
||||
const db = makeDb();
|
||||
seedPlan(db, 1, "auto_heuristic");
|
||||
|
||||
const result = sortInbox(db);
|
||||
|
||||
expect(result.moved_to_queue).toBe(0);
|
||||
expect(result.moved_to_review).toBe(1);
|
||||
const plan = db.prepare("SELECT status, sorted FROM review_plans WHERE item_id = 1").get() as {
|
||||
status: string;
|
||||
sorted: number;
|
||||
};
|
||||
expect(plan.status).toBe("pending");
|
||||
expect(plan.sorted).toBe(1);
|
||||
const jobCount = (db.prepare("SELECT COUNT(*) as n FROM jobs WHERE item_id = 1").get() as { n: number }).n;
|
||||
expect(jobCount).toBe(0);
|
||||
});
|
||||
|
||||
test("moves manual → sorted=1, stays pending, no job", () => {
|
||||
const db = makeDb();
|
||||
seedPlan(db, 1, "manual");
|
||||
|
||||
const result = sortInbox(db);
|
||||
|
||||
expect(result.moved_to_review).toBe(1);
|
||||
const plan = db.prepare("SELECT status, sorted FROM review_plans WHERE item_id = 1").get() as {
|
||||
status: string;
|
||||
sorted: number;
|
||||
};
|
||||
expect(plan.sorted).toBe(1);
|
||||
expect(plan.status).toBe("pending");
|
||||
});
|
||||
|
||||
test("already sorted plans are untouched", () => {
|
||||
const db = makeDb();
|
||||
seedPlan(db, 1, "auto");
|
||||
db.prepare("UPDATE review_plans SET sorted = 1 WHERE item_id = 1").run();
|
||||
|
||||
const result = sortInbox(db);
|
||||
expect(result.moved_to_queue).toBe(0);
|
||||
expect(result.moved_to_review).toBe(0);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,93 @@
|
||||
import { Database } from "bun:sqlite";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { SCHEMA } from "../../db/schema";
|
||||
import { reopenAllDone, unsortAll } from "../review";
|
||||
|
||||
function makeDb(): Database {
|
||||
const db = new Database(":memory:");
|
||||
for (const stmt of SCHEMA.split(";")) {
|
||||
const trimmed = stmt.trim();
|
||||
if (trimmed) db.run(trimmed);
|
||||
}
|
||||
return db;
|
||||
}
|
||||
|
||||
function seedPlan(db: Database, id: number, opts: { sorted?: 0 | 1; status?: string; isNoop?: 0 | 1 } = {}) {
|
||||
const { sorted = 1, status = "pending", isNoop = 0 } = opts;
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO media_items (id, jellyfin_id, type, name, file_path, container) VALUES (?, ?, 'Movie', ?, ?, 'mkv')",
|
||||
)
|
||||
.run(id, `jf-${id}`, `Item ${id}`, `/x/${id}.mkv`);
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO review_plans (item_id, status, is_noop, auto_class, sorted, apple_compat, job_type) VALUES (?, ?, ?, 'auto_heuristic', ?, 'direct_play', 'copy')",
|
||||
)
|
||||
.run(id, status, isNoop, sorted);
|
||||
}
|
||||
|
||||
describe("unsortAll", () => {
|
||||
test("flips sorted=1 pending plans back to sorted=0, skips is_noop and non-pending", () => {
|
||||
const db = makeDb();
|
||||
seedPlan(db, 1, { sorted: 1, status: "pending" });
|
||||
seedPlan(db, 2, { sorted: 1, status: "pending" });
|
||||
seedPlan(db, 3, { sorted: 0, status: "pending" }); // already in inbox
|
||||
seedPlan(db, 4, { sorted: 1, status: "approved" }); // queued
|
||||
seedPlan(db, 5, { sorted: 1, status: "pending", isNoop: 1 }); // noop
|
||||
|
||||
const count = unsortAll(db);
|
||||
expect(count).toBe(2);
|
||||
|
||||
const rows = db.prepare("SELECT item_id, sorted, status FROM review_plans ORDER BY item_id").all() as {
|
||||
item_id: number;
|
||||
sorted: number;
|
||||
status: string;
|
||||
}[];
|
||||
expect(rows).toEqual([
|
||||
{ item_id: 1, sorted: 0, status: "pending" },
|
||||
{ item_id: 2, sorted: 0, status: "pending" },
|
||||
{ item_id: 3, sorted: 0, status: "pending" },
|
||||
{ item_id: 4, sorted: 1, status: "approved" },
|
||||
{ item_id: 5, sorted: 1, status: "pending" },
|
||||
]);
|
||||
});
|
||||
|
||||
test("noop when review column is empty", () => {
|
||||
const db = makeDb();
|
||||
expect(unsortAll(db)).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("reopenAllDone", () => {
|
||||
test("flips done/error plans back to pending and drops their jobs", () => {
|
||||
const db = makeDb();
|
||||
seedPlan(db, 1, { status: "done" });
|
||||
seedPlan(db, 2, { status: "error" });
|
||||
seedPlan(db, 3, { status: "approved" }); // untouched
|
||||
db.prepare("INSERT INTO jobs (item_id, command, job_type, status) VALUES (1, 'ffmpeg', 'copy', 'done')").run();
|
||||
db.prepare("INSERT INTO jobs (item_id, command, job_type, status) VALUES (2, 'ffmpeg', 'copy', 'error')").run();
|
||||
db.prepare("INSERT INTO jobs (item_id, command, job_type, status) VALUES (3, 'ffmpeg', 'copy', 'pending')").run();
|
||||
|
||||
const count = reopenAllDone(db);
|
||||
expect(count).toBe(2);
|
||||
|
||||
const statuses = db.prepare("SELECT item_id, status, reviewed_at FROM review_plans ORDER BY item_id").all() as {
|
||||
item_id: number;
|
||||
status: string;
|
||||
reviewed_at: string | null;
|
||||
}[];
|
||||
expect(statuses[0]?.status).toBe("pending");
|
||||
expect(statuses[0]?.reviewed_at).toBeNull();
|
||||
expect(statuses[1]?.status).toBe("pending");
|
||||
expect(statuses[2]?.status).toBe("approved");
|
||||
|
||||
const jobs = db.prepare("SELECT item_id, status FROM jobs ORDER BY item_id").all();
|
||||
expect(jobs).toEqual([{ item_id: 3, status: "pending" }]);
|
||||
});
|
||||
|
||||
test("noop when nothing is done or errored", () => {
|
||||
const db = makeDb();
|
||||
seedPlan(db, 1, { status: "pending" });
|
||||
expect(reopenAllDone(db)).toBe(0);
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,5 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { parseScanLimit } from "../scan";
|
||||
import { buildScanItemsWhere, parseScanItemsQuery, parseScanLimit } from "../scan";
|
||||
|
||||
describe("parseScanLimit", () => {
|
||||
test("accepts positive integers and nullish/empty as no-limit", () => {
|
||||
@@ -29,3 +29,73 @@ describe("parseScanLimit", () => {
|
||||
expect(parseScanLimit(Number.POSITIVE_INFINITY)).toEqual({ ok: false });
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseScanItemsQuery", () => {
|
||||
test("normalizes default filters and pagination", () => {
|
||||
const q = parseScanItemsQuery({});
|
||||
expect(q).toEqual({
|
||||
offset: 0,
|
||||
limit: 50,
|
||||
search: "",
|
||||
status: "all",
|
||||
type: "all",
|
||||
source: "all",
|
||||
});
|
||||
});
|
||||
|
||||
test("clamps limit and offset, trims and lowercases values", () => {
|
||||
const q = parseScanItemsQuery({
|
||||
offset: "-12",
|
||||
limit: "5000",
|
||||
q: " The Wire ",
|
||||
status: "SCANNED",
|
||||
type: "EPISODE",
|
||||
source: "WEBHOOK",
|
||||
});
|
||||
expect(q).toEqual({
|
||||
offset: 0,
|
||||
limit: 200,
|
||||
search: "The Wire",
|
||||
status: "scanned",
|
||||
type: "episode",
|
||||
source: "webhook",
|
||||
});
|
||||
});
|
||||
|
||||
test("falls back to all for unknown enum values", () => {
|
||||
const q = parseScanItemsQuery({ status: "zzz", type: "cartoon", source: "mqtt" });
|
||||
expect(q.status).toBe("all");
|
||||
expect(q.type).toBe("all");
|
||||
expect(q.source).toBe("all");
|
||||
});
|
||||
});
|
||||
|
||||
describe("buildScanItemsWhere", () => {
|
||||
test("builds combined where clause + args in stable order", () => {
|
||||
const where = buildScanItemsWhere({
|
||||
offset: 0,
|
||||
limit: 50,
|
||||
search: "blade",
|
||||
status: "scanned",
|
||||
type: "movie",
|
||||
source: "webhook",
|
||||
});
|
||||
expect(where.sql).toBe(
|
||||
"WHERE scan_status = ? AND lower(type) = ? AND ingest_source = ? AND (lower(name) LIKE ? OR lower(file_path) LIKE ?)",
|
||||
);
|
||||
expect(where.args).toEqual(["scanned", "movie", "webhook", "%blade%", "%blade%"]);
|
||||
});
|
||||
|
||||
test("returns empty where when all filters are broad", () => {
|
||||
const where = buildScanItemsWhere({
|
||||
offset: 0,
|
||||
limit: 50,
|
||||
search: "",
|
||||
status: "all",
|
||||
type: "all",
|
||||
source: "all",
|
||||
});
|
||||
expect(where.sql).toBe("");
|
||||
expect(where.args).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
+70
-16
@@ -23,6 +23,36 @@ const app = new Hono();
|
||||
let queueRunning = false;
|
||||
let runningProc: ReturnType<typeof Bun.spawn> | null = null;
|
||||
let runningJobId: number | null = null;
|
||||
let activeQueue: Job[] | null = null;
|
||||
let activeSeen: Set<number> | null = null;
|
||||
const LIVE_UPDATE_INTERVAL_MS = 500;
|
||||
const STREAM_CHUNKS_BEFORE_YIELD = 24;
|
||||
|
||||
export function shouldSendLiveUpdate(now: number, lastSentAt: number, intervalMs = LIVE_UPDATE_INTERVAL_MS): boolean {
|
||||
return now - lastSentAt > intervalMs;
|
||||
}
|
||||
|
||||
export async function yieldAfterChunk(
|
||||
chunksSinceYield: number,
|
||||
chunksBeforeYield = STREAM_CHUNKS_BEFORE_YIELD,
|
||||
sleep: (ms: number) => Promise<unknown> = (ms) => Bun.sleep(ms),
|
||||
): Promise<number> {
|
||||
const next = chunksSinceYield + 1;
|
||||
if (next < chunksBeforeYield) return next;
|
||||
await sleep(0);
|
||||
return 0;
|
||||
}
|
||||
|
||||
export function enqueueUnseenJobs<T extends { id: number }>(queue: T[], seen: Set<number>, jobs: T[]): number {
|
||||
let added = 0;
|
||||
for (const job of jobs) {
|
||||
if (seen.has(job.id)) continue;
|
||||
queue.push(job);
|
||||
seen.add(job.id);
|
||||
added += 1;
|
||||
}
|
||||
return added;
|
||||
}
|
||||
|
||||
function emitQueueStatus(
|
||||
status: "running" | "paused" | "sleeping" | "idle",
|
||||
@@ -39,6 +69,8 @@ async function runSequential(initial: Job[]): Promise<void> {
|
||||
let first = true;
|
||||
const queue: Job[] = [...initial];
|
||||
const seen = new Set<number>(queue.map((j) => j.id));
|
||||
activeQueue = queue;
|
||||
activeSeen = seen;
|
||||
|
||||
while (queue.length > 0) {
|
||||
const job = queue.shift() as Job;
|
||||
@@ -82,15 +114,12 @@ async function runSequential(initial: Job[]): Promise<void> {
|
||||
// manually clicks "Run all" again.
|
||||
if (queue.length === 0) {
|
||||
const more = db.prepare("SELECT * FROM jobs WHERE status = 'pending' ORDER BY created_at").all() as Job[];
|
||||
for (const m of more) {
|
||||
if (!seen.has(m.id)) {
|
||||
queue.push(m);
|
||||
seen.add(m.id);
|
||||
}
|
||||
}
|
||||
enqueueUnseenJobs(queue, seen, more);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
activeQueue = null;
|
||||
activeSeen = null;
|
||||
queueRunning = false;
|
||||
emitQueueStatus("idle");
|
||||
}
|
||||
@@ -110,6 +139,11 @@ function emitJobProgress(jobId: number, seconds: number, total: number): void {
|
||||
for (const l of jobListeners) l(line);
|
||||
}
|
||||
|
||||
export function emitInboxSorted(result: { moved_to_queue: number; moved_to_review: number }): void {
|
||||
const line = `event: inbox_sorted\ndata: ${JSON.stringify(result)}\n\n`;
|
||||
for (const l of jobListeners) l(line);
|
||||
}
|
||||
|
||||
/** Parse "Duration: HH:MM:SS.MS" from ffmpeg startup output. */
|
||||
function parseFFmpegDuration(line: string): number | null {
|
||||
const match = line.match(/Duration:\s*(\d+):(\d+):(\d+)\.(\d+)/);
|
||||
@@ -161,8 +195,12 @@ function loadJobRow(jobId: number) {
|
||||
app.post("/start", (c) => {
|
||||
const db = getDb();
|
||||
const pending = db.prepare("SELECT * FROM jobs WHERE status = 'pending' ORDER BY created_at").all() as Job[];
|
||||
if (queueRunning && activeQueue && activeSeen) {
|
||||
const queued = enqueueUnseenJobs(activeQueue, activeSeen, pending);
|
||||
return c.json({ ok: true, started: 0, queued });
|
||||
}
|
||||
runSequential(pending).catch((err) => logError("Queue failed:", err));
|
||||
return c.json({ ok: true, started: pending.length });
|
||||
return c.json({ ok: true, started: pending.length, queued: pending.length });
|
||||
});
|
||||
|
||||
// ─── Run single ───────────────────────────────────────────────────────────────
|
||||
@@ -196,17 +234,28 @@ app.post("/job/:id/cancel", (c) => {
|
||||
|
||||
// ─── Clear queue ──────────────────────────────────────────────────────────────
|
||||
|
||||
app.post("/clear", (c) => {
|
||||
const db = getDb();
|
||||
/**
|
||||
* Cancel every pending job and send its plan back to the Inbox so the
|
||||
* distributor can re-classify it. Without `sorted = 0` the plan stays in
|
||||
* Review with `auto_class='auto'` — where "Approve all ready" (auto_heuristic
|
||||
* only) can't re-queue it and "Auto Review" (sort-inbox, sorted=0 only) can't
|
||||
* see it, leaving the item stranded until the user manually approves.
|
||||
*/
|
||||
export function clearQueue(db: ReturnType<typeof getDb>): number {
|
||||
db
|
||||
.prepare(`
|
||||
UPDATE review_plans SET status = 'pending', reviewed_at = NULL
|
||||
UPDATE review_plans SET status = 'pending', reviewed_at = NULL, sorted = 0
|
||||
WHERE item_id IN (SELECT item_id FROM jobs WHERE status = 'pending')
|
||||
AND status = 'approved'
|
||||
`)
|
||||
.run();
|
||||
const result = db.prepare("DELETE FROM jobs WHERE status = 'pending'").run();
|
||||
return c.json({ ok: true, cleared: result.changes });
|
||||
return result.changes;
|
||||
}
|
||||
|
||||
app.post("/clear", (c) => {
|
||||
const cleared = clearQueue(getDb());
|
||||
return c.json({ ok: true, cleared });
|
||||
});
|
||||
|
||||
app.post("/clear-completed", (c) => {
|
||||
@@ -329,14 +378,16 @@ async function runJob(job: Job): Promise<void> {
|
||||
const updateOutput = db.prepare("UPDATE jobs SET output = ? WHERE id = ?");
|
||||
|
||||
const flush = (final = false) => {
|
||||
const text = outputLines.join("\n");
|
||||
const now = Date.now();
|
||||
if (final || now - lastFlushAt > 500) {
|
||||
if (!final && !shouldSendLiveUpdate(now, lastFlushAt)) {
|
||||
pendingFlush = true;
|
||||
return;
|
||||
}
|
||||
const text = outputLines.join("\n");
|
||||
if (final || shouldSendLiveUpdate(now, lastFlushAt)) {
|
||||
updateOutput.run(text, job.id);
|
||||
lastFlushAt = now;
|
||||
pendingFlush = false;
|
||||
} else {
|
||||
pendingFlush = true;
|
||||
}
|
||||
emitJobUpdate(job.id, "running", text);
|
||||
};
|
||||
@@ -349,7 +400,7 @@ async function runJob(job: Job): Promise<void> {
|
||||
const progressed = parseFFmpegProgress(line);
|
||||
if (progressed != null && totalSeconds > 0) {
|
||||
const now = Date.now();
|
||||
if (now - lastProgressEmit > 500) {
|
||||
if (shouldSendLiveUpdate(now, lastProgressEmit)) {
|
||||
emitJobProgress(job.id, progressed, totalSeconds);
|
||||
lastProgressEmit = now;
|
||||
}
|
||||
@@ -364,6 +415,7 @@ async function runJob(job: Job): Promise<void> {
|
||||
const reader = readable.getReader();
|
||||
const decoder = new TextDecoder();
|
||||
let buffer = "";
|
||||
let chunksSinceYield = 0;
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
@@ -377,6 +429,8 @@ async function runJob(job: Job): Promise<void> {
|
||||
consumeProgress(line);
|
||||
}
|
||||
flush();
|
||||
// Let pending HTTP requests run even when ffmpeg floods stdout/stderr.
|
||||
chunksSinceYield = await yieldAfterChunk(chunksSinceYield);
|
||||
}
|
||||
if (buffer.trim()) {
|
||||
outputLines.push(prefix + buffer);
|
||||
|
||||
+401
-108
@@ -5,6 +5,7 @@ import { analyzeItem, assignTargetOrder } from "../services/analyzer";
|
||||
import { buildCommand } from "../services/ffmpeg";
|
||||
import { getItem, mapStream, normalizeLanguage, refreshItem } from "../services/jellyfin";
|
||||
import type { Job, MediaItem, MediaStream, ReviewPlan, StreamDecision } from "../types";
|
||||
import { emitInboxSorted } from "./execute";
|
||||
|
||||
const app = new Hono();
|
||||
|
||||
@@ -42,6 +43,73 @@ export function enqueueAudioJob(db: ReturnType<typeof getDb>, itemId: number, co
|
||||
return result.changes > 0;
|
||||
}
|
||||
|
||||
export interface SortInboxResult {
|
||||
moved_to_queue: number;
|
||||
moved_to_review: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Distribute every unsorted (sorted=0) pending plan to its final bucket:
|
||||
* auto → sorted=1, status='approved', job enqueued (→ Queue)
|
||||
* auto_heuristic → sorted=1 (→ Review, badge ⚡)
|
||||
* manual → sorted=1 (→ Review, badge ✋)
|
||||
*/
|
||||
export function sortInbox(db: ReturnType<typeof getDb>): SortInboxResult {
|
||||
const unsorted = db
|
||||
.prepare(`
|
||||
SELECT rp.*, mi.id as item_id
|
||||
FROM review_plans rp
|
||||
JOIN media_items mi ON mi.id = rp.item_id
|
||||
WHERE rp.status = 'pending' AND rp.is_noop = 0 AND rp.sorted = 0
|
||||
`)
|
||||
.all() as (ReviewPlan & { item_id: number })[];
|
||||
|
||||
let movedToQueue = 0;
|
||||
let movedToReview = 0;
|
||||
|
||||
for (const plan of unsorted) {
|
||||
if (plan.auto_class === "auto") {
|
||||
db
|
||||
.prepare("UPDATE review_plans SET status = 'approved', reviewed_at = datetime('now'), sorted = 1 WHERE id = ?")
|
||||
.run(plan.id);
|
||||
const { item, streams, decisions } = loadItemDetail(db, plan.item_id);
|
||||
if (item) enqueueAudioJob(db, plan.item_id, buildCommand(item, streams, decisions));
|
||||
movedToQueue += 1;
|
||||
} else {
|
||||
db.prepare("UPDATE review_plans SET sorted = 1 WHERE id = ?").run(plan.id);
|
||||
movedToReview += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return { moved_to_queue: movedToQueue, moved_to_review: movedToReview };
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk-approve every ⚡ Ready (auto_heuristic) plan currently in the Review
|
||||
* column (sorted=1, status='pending'). Manual items are untouched.
|
||||
*/
|
||||
export function approveReady(db: ReturnType<typeof getDb>): number {
|
||||
const ready = db
|
||||
.prepare(`
|
||||
SELECT rp.*, mi.id as item_id
|
||||
FROM review_plans rp
|
||||
JOIN media_items mi ON mi.id = rp.item_id
|
||||
WHERE rp.status = 'pending' AND rp.is_noop = 0 AND rp.sorted = 1 AND rp.auto_class = 'auto_heuristic'
|
||||
`)
|
||||
.all() as (ReviewPlan & { item_id: number })[];
|
||||
|
||||
let count = 0;
|
||||
for (const plan of ready) {
|
||||
db.prepare("UPDATE review_plans SET status = 'approved', reviewed_at = datetime('now') WHERE id = ?").run(plan.id);
|
||||
const { item, streams, decisions } = loadItemDetail(db, plan.item_id);
|
||||
if (item) {
|
||||
enqueueAudioJob(db, plan.item_id, buildCommand(item, streams, decisions));
|
||||
count += 1;
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
function countsByFilter(db: ReturnType<typeof getDb>): Record<string, number> {
|
||||
const total = (db.prepare("SELECT COUNT(*) as n FROM review_plans").get() as { n: number }).n;
|
||||
const noops = (db.prepare("SELECT COUNT(*) as n FROM review_plans WHERE is_noop = 1").get() as { n: number }).n;
|
||||
@@ -156,21 +224,32 @@ export function reanalyze(db: ReturnType<typeof getDb>, itemId: number, preserve
|
||||
.all(itemId) as MediaStream[];
|
||||
const audioLanguages = getAudioLanguages();
|
||||
const analysis = analyzeItem(
|
||||
{ original_language: item.original_language, needs_review: item.needs_review, container: item.container },
|
||||
{
|
||||
original_language: item.original_language,
|
||||
orig_lang_source: item.orig_lang_source,
|
||||
needs_review: item.needs_review,
|
||||
container: item.container,
|
||||
},
|
||||
streams,
|
||||
{ audioLanguages },
|
||||
);
|
||||
|
||||
db
|
||||
.prepare(`
|
||||
INSERT INTO review_plans (item_id, status, is_noop, confidence, apple_compat, job_type, notes)
|
||||
INSERT INTO review_plans (item_id, status, is_noop, auto_class, apple_compat, job_type, notes)
|
||||
VALUES (?, 'pending', ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(item_id) DO UPDATE SET status = 'pending', is_noop = excluded.is_noop, confidence = excluded.confidence, apple_compat = excluded.apple_compat, job_type = excluded.job_type, notes = excluded.notes
|
||||
ON CONFLICT(item_id) DO UPDATE SET
|
||||
status = 'pending',
|
||||
is_noop = excluded.is_noop,
|
||||
auto_class = excluded.auto_class,
|
||||
apple_compat = excluded.apple_compat,
|
||||
job_type = excluded.job_type,
|
||||
notes = excluded.notes
|
||||
`)
|
||||
.run(
|
||||
itemId,
|
||||
analysis.is_noop ? 1 : 0,
|
||||
analysis.confidence,
|
||||
analysis.auto_class,
|
||||
analysis.apple_compat,
|
||||
analysis.job_type,
|
||||
analysis.notes.length > 0 ? analysis.notes.join("\n") : null,
|
||||
@@ -275,94 +354,17 @@ interface PipelineAudioStream {
|
||||
action: "keep" | "remove";
|
||||
}
|
||||
|
||||
app.get("/pipeline", (c) => {
|
||||
const db = getDb();
|
||||
const jellyfinUrl = getConfig("jellyfin_url") ?? "";
|
||||
|
||||
// Cap the review column to keep the page snappy at scale; pipelines
|
||||
// with thousands of pending items would otherwise ship 10k+ rows on
|
||||
// every refresh and re-render every card.
|
||||
const REVIEW_LIMIT = 500;
|
||||
const review = db
|
||||
.prepare(`
|
||||
SELECT rp.*, mi.name, mi.series_name, mi.series_jellyfin_id,
|
||||
mi.jellyfin_id,
|
||||
mi.season_number, mi.episode_number, mi.type, mi.container,
|
||||
mi.original_language, mi.orig_lang_source, mi.file_path
|
||||
FROM review_plans rp
|
||||
JOIN media_items mi ON mi.id = rp.item_id
|
||||
WHERE rp.status = 'pending' AND rp.is_noop = 0
|
||||
ORDER BY
|
||||
CASE rp.confidence WHEN 'high' THEN 0 ELSE 1 END,
|
||||
COALESCE(mi.series_name, mi.name),
|
||||
mi.season_number, mi.episode_number
|
||||
LIMIT ${REVIEW_LIMIT}
|
||||
`)
|
||||
.all();
|
||||
const reviewTotal = (
|
||||
db.prepare("SELECT COUNT(*) as n FROM review_plans WHERE status = 'pending' AND is_noop = 0").get() as { n: number }
|
||||
).n;
|
||||
|
||||
// Queued gets the same enrichment as review so the card can render
|
||||
// streams + transcode reasons read-only (with a "Back to review" button).
|
||||
const queued = db
|
||||
.prepare(`
|
||||
SELECT j.id, j.item_id, j.status, j.started_at, j.completed_at,
|
||||
mi.name, mi.series_name, mi.series_jellyfin_id, mi.jellyfin_id,
|
||||
mi.season_number, mi.episode_number, mi.type, mi.container,
|
||||
mi.original_language, mi.orig_lang_source, mi.file_path,
|
||||
rp.id as plan_id, rp.job_type, rp.apple_compat,
|
||||
rp.confidence, rp.is_noop
|
||||
FROM jobs j
|
||||
JOIN media_items mi ON mi.id = j.item_id
|
||||
JOIN review_plans rp ON rp.item_id = j.item_id
|
||||
WHERE j.status = 'pending'
|
||||
ORDER BY j.created_at
|
||||
`)
|
||||
.all();
|
||||
|
||||
const processing = db
|
||||
.prepare(`
|
||||
SELECT j.*, mi.name, mi.series_name, mi.type,
|
||||
rp.job_type, rp.apple_compat
|
||||
FROM jobs j
|
||||
JOIN media_items mi ON mi.id = j.item_id
|
||||
JOIN review_plans rp ON rp.item_id = j.item_id
|
||||
WHERE j.status = 'running'
|
||||
`)
|
||||
.all();
|
||||
|
||||
const done = db
|
||||
.prepare(`
|
||||
SELECT j.*, mi.name, mi.series_name, mi.type,
|
||||
rp.job_type, rp.apple_compat
|
||||
FROM jobs j
|
||||
JOIN media_items mi ON mi.id = j.item_id
|
||||
JOIN review_plans rp ON rp.item_id = j.item_id
|
||||
WHERE j.status IN ('done', 'error')
|
||||
ORDER BY j.completed_at DESC
|
||||
LIMIT 50
|
||||
`)
|
||||
.all();
|
||||
|
||||
// "Done" = files already in the desired end state. Either the analyzer
|
||||
// says nothing to do (is_noop=1) or a job finished. Use two indexable
|
||||
// counts and add — the OR form (is_noop=1 OR status='done') can't use
|
||||
// our single-column indexes and gets slow on large libraries.
|
||||
const noopRow = db.prepare("SELECT COUNT(*) as n FROM review_plans WHERE is_noop = 1").get() as { n: number };
|
||||
const doneRow = db.prepare("SELECT COUNT(*) as n FROM review_plans WHERE status = 'done' AND is_noop = 0").get() as {
|
||||
n: number;
|
||||
};
|
||||
const doneCount = noopRow.n + doneRow.n;
|
||||
|
||||
// Enrich rows that have (plan_id, item_id) with the transcode-reason
|
||||
// badges and pre-checked audio streams. Used for both review and queued
|
||||
// columns so the queued card can render read-only with the same info.
|
||||
type EnrichableRow = { id?: number; plan_id?: number; item_id: number } & {
|
||||
type EnrichableRow = { id?: number; plan_id?: number; item_id: number } & {
|
||||
transcode_reasons?: string[];
|
||||
audio_streams?: PipelineAudioStream[];
|
||||
};
|
||||
const enrichWithStreamsAndReasons = (rows: EnrichableRow[]) => {
|
||||
};
|
||||
|
||||
/**
|
||||
* Enrich review/queued rows with transcode-reason badges and pre-checked audio
|
||||
* streams. Works for both the Review column (where `id` is the plan id) and
|
||||
* the Queued column (where `plan_id` is explicit and `id` is the job id).
|
||||
*/
|
||||
function enrichWithStreamsAndReasons(db: ReturnType<typeof getDb>, rows: EnrichableRow[]): void {
|
||||
if (rows.length === 0) return;
|
||||
const planIdFor = (r: EnrichableRow): number => (r.plan_id ?? r.id) as number;
|
||||
const planIds = rows.map(planIdFor);
|
||||
@@ -422,12 +424,266 @@ app.get("/pipeline", (c) => {
|
||||
r.transcode_reasons = reasonsByPlan.get(planIdFor(r)) ?? [];
|
||||
r.audio_streams = streamsByItem.get(r.item_id) ?? [];
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Review groups (paginated, always returns complete series) ──────────────
|
||||
|
||||
interface ReviewItemRow {
|
||||
id: number;
|
||||
item_id: number;
|
||||
status: string;
|
||||
is_noop: number;
|
||||
auto_class: "auto" | "auto_heuristic" | "manual" | null;
|
||||
apple_compat: ReviewPlan["apple_compat"];
|
||||
job_type: "copy" | "transcode";
|
||||
name: string;
|
||||
series_name: string | null;
|
||||
series_jellyfin_id: string | null;
|
||||
jellyfin_id: string;
|
||||
season_number: number | null;
|
||||
episode_number: number | null;
|
||||
type: "Movie" | "Episode";
|
||||
container: string | null;
|
||||
original_language: string | null;
|
||||
orig_lang_source: string | null;
|
||||
file_path: string;
|
||||
transcode_reasons?: string[];
|
||||
audio_streams?: PipelineAudioStream[];
|
||||
}
|
||||
|
||||
type ReviewGroup =
|
||||
| { kind: "movie"; item: ReviewItemRow }
|
||||
| {
|
||||
kind: "series";
|
||||
seriesKey: string;
|
||||
seriesName: string;
|
||||
seriesJellyfinId: string | null;
|
||||
episodeCount: number;
|
||||
readyCount: number;
|
||||
originalLanguage: string | null;
|
||||
seasons: Array<{ season: number | null; episodes: ReviewItemRow[] }>;
|
||||
};
|
||||
|
||||
enrichWithStreamsAndReasons(review as EnrichableRow[]);
|
||||
enrichWithStreamsAndReasons(queued as EnrichableRow[]);
|
||||
export interface BuildReviewGroupsOpts {
|
||||
bucket: "inbox" | "review";
|
||||
}
|
||||
|
||||
return c.json({ review, reviewTotal, queued, processing, done, doneCount, jellyfinUrl });
|
||||
export function buildReviewGroups(
|
||||
db: ReturnType<typeof getDb>,
|
||||
opts: BuildReviewGroupsOpts,
|
||||
): { groups: ReviewGroup[]; totalItems: number } {
|
||||
const sortedFilter = opts.bucket === "inbox" ? "rp.sorted = 0" : "rp.sorted = 1";
|
||||
const rows = db
|
||||
.prepare(`
|
||||
SELECT rp.*, mi.name, mi.series_name, mi.series_jellyfin_id,
|
||||
mi.jellyfin_id,
|
||||
mi.season_number, mi.episode_number, mi.type, mi.container,
|
||||
mi.original_language, mi.orig_lang_source, mi.file_path
|
||||
FROM review_plans rp
|
||||
JOIN media_items mi ON mi.id = rp.item_id
|
||||
WHERE rp.status = 'pending' AND rp.is_noop = 0 AND ${sortedFilter}
|
||||
ORDER BY
|
||||
CASE rp.auto_class WHEN 'auto_heuristic' THEN 0 WHEN 'manual' THEN 1 ELSE 2 END,
|
||||
COALESCE(mi.series_name, mi.name),
|
||||
mi.season_number, mi.episode_number
|
||||
`)
|
||||
.all() as ReviewItemRow[];
|
||||
|
||||
const movieGroups: ReviewGroup[] = [];
|
||||
interface SeriesAccum {
|
||||
seriesName: string;
|
||||
seriesJellyfinId: string | null;
|
||||
seasons: Map<number | null, ReviewItemRow[]>;
|
||||
originalLanguage: string | null;
|
||||
readyCount: number;
|
||||
}
|
||||
const seriesMap = new Map<string, SeriesAccum>();
|
||||
|
||||
for (const row of rows) {
|
||||
if (row.type === "Movie") {
|
||||
movieGroups.push({ kind: "movie", item: row });
|
||||
continue;
|
||||
}
|
||||
const key = row.series_jellyfin_id ?? row.series_name ?? String(row.item_id);
|
||||
let entry = seriesMap.get(key);
|
||||
if (!entry) {
|
||||
entry = {
|
||||
seriesName: row.series_name ?? "",
|
||||
seriesJellyfinId: row.series_jellyfin_id,
|
||||
seasons: new Map(),
|
||||
originalLanguage: row.original_language,
|
||||
readyCount: 0,
|
||||
};
|
||||
seriesMap.set(key, entry);
|
||||
}
|
||||
let bucket = entry.seasons.get(row.season_number);
|
||||
if (!bucket) {
|
||||
bucket = [];
|
||||
entry.seasons.set(row.season_number, bucket);
|
||||
}
|
||||
bucket.push(row);
|
||||
if (row.auto_class === "auto_heuristic") entry.readyCount += 1;
|
||||
}
|
||||
|
||||
const seriesGroups: ReviewGroup[] = [];
|
||||
for (const [seriesKey, entry] of seriesMap) {
|
||||
const seasonKeys = [...entry.seasons.keys()].sort((a, b) => {
|
||||
if (a === null) return 1;
|
||||
if (b === null) return -1;
|
||||
return a - b;
|
||||
});
|
||||
const seasons = seasonKeys.map((season) => ({
|
||||
season,
|
||||
episodes: (entry.seasons.get(season) ?? []).sort((a, b) => (a.episode_number ?? 0) - (b.episode_number ?? 0)),
|
||||
}));
|
||||
const episodeCount = seasons.reduce((sum, s) => sum + s.episodes.length, 0);
|
||||
seriesGroups.push({
|
||||
kind: "series",
|
||||
seriesKey,
|
||||
seriesName: entry.seriesName,
|
||||
seriesJellyfinId: entry.seriesJellyfinId,
|
||||
episodeCount,
|
||||
readyCount: entry.readyCount,
|
||||
originalLanguage: entry.originalLanguage,
|
||||
seasons,
|
||||
});
|
||||
}
|
||||
|
||||
const allGroups = [...movieGroups, ...seriesGroups].sort((a, b) => {
|
||||
const rankA = a.kind === "movie" ? autoClassRank(a.item.auto_class) : a.readyCount > 0 ? 0 : 1;
|
||||
const rankB = b.kind === "movie" ? autoClassRank(b.item.auto_class) : b.readyCount > 0 ? 0 : 1;
|
||||
if (rankA !== rankB) return rankA - rankB;
|
||||
const nameA = a.kind === "movie" ? a.item.name : a.seriesName;
|
||||
const nameB = b.kind === "movie" ? b.item.name : b.seriesName;
|
||||
return nameA.localeCompare(nameB);
|
||||
});
|
||||
|
||||
const totalItems =
|
||||
movieGroups.length + seriesGroups.reduce((sum, g) => sum + (g.kind === "series" ? g.episodeCount : 0), 0);
|
||||
return { groups: allGroups, totalItems };
|
||||
}
|
||||
|
||||
function autoClassRank(cls: string | null): number {
|
||||
if (cls === "auto_heuristic") return 0;
|
||||
if (cls === "manual") return 1;
|
||||
return 2;
|
||||
}
|
||||
|
||||
app.get("/groups", (c) => {
|
||||
const db = getDb();
|
||||
const offset = Math.max(0, Number.parseInt(c.req.query("offset") ?? "0", 10) || 0);
|
||||
const limit = Math.max(1, Math.min(200, Number.parseInt(c.req.query("limit") ?? "25", 10) || 25));
|
||||
const bucketParam = c.req.query("bucket") ?? "review";
|
||||
const bucket = bucketParam === "inbox" ? "inbox" : "review";
|
||||
|
||||
const { groups, totalItems } = buildReviewGroups(db, { bucket });
|
||||
const page = groups.slice(offset, offset + limit);
|
||||
|
||||
const flat: EnrichableRow[] = [];
|
||||
for (const g of page) {
|
||||
if (g.kind === "movie") flat.push(g.item as EnrichableRow);
|
||||
else for (const s of g.seasons) for (const ep of s.episodes) flat.push(ep as EnrichableRow);
|
||||
}
|
||||
enrichWithStreamsAndReasons(db, flat);
|
||||
|
||||
return c.json({
|
||||
groups: page,
|
||||
totalGroups: groups.length,
|
||||
totalItems,
|
||||
hasMore: offset + limit < groups.length,
|
||||
});
|
||||
});
|
||||
|
||||
app.get("/pipeline", (c) => {
|
||||
const db = getDb();
|
||||
const jellyfinUrl = getConfig("jellyfin_url") ?? "";
|
||||
|
||||
const inboxTotal = (
|
||||
db
|
||||
.prepare("SELECT COUNT(*) as n FROM review_plans WHERE status = 'pending' AND is_noop = 0 AND sorted = 0")
|
||||
.get() as { n: number }
|
||||
).n;
|
||||
const reviewItemsTotal = (
|
||||
db
|
||||
.prepare("SELECT COUNT(*) as n FROM review_plans WHERE status = 'pending' AND is_noop = 0 AND sorted = 1")
|
||||
.get() as { n: number }
|
||||
).n;
|
||||
const reviewReadyCount = (
|
||||
db
|
||||
.prepare(
|
||||
"SELECT COUNT(*) as n FROM review_plans WHERE status = 'pending' AND is_noop = 0 AND sorted = 1 AND auto_class = 'auto_heuristic'",
|
||||
)
|
||||
.get() as { n: number }
|
||||
).n;
|
||||
const reviewManualCount = reviewItemsTotal - reviewReadyCount;
|
||||
const autoProcessing = getConfig("auto_processing") === "1";
|
||||
|
||||
// Queued carries stream + transcode-reason enrichment so the card renders
|
||||
// read-only with a "Back to review" button.
|
||||
const queued = db
|
||||
.prepare(`
|
||||
SELECT j.id, j.item_id, j.status, j.started_at, j.completed_at,
|
||||
mi.name, mi.series_name, mi.series_jellyfin_id, mi.jellyfin_id,
|
||||
mi.season_number, mi.episode_number, mi.type, mi.container,
|
||||
mi.original_language, mi.orig_lang_source, mi.file_path,
|
||||
rp.id as plan_id, rp.job_type, rp.apple_compat,
|
||||
rp.auto_class, rp.is_noop
|
||||
FROM jobs j
|
||||
JOIN media_items mi ON mi.id = j.item_id
|
||||
JOIN review_plans rp ON rp.item_id = j.item_id
|
||||
WHERE j.status = 'pending'
|
||||
ORDER BY j.created_at
|
||||
`)
|
||||
.all();
|
||||
|
||||
const processing = db
|
||||
.prepare(`
|
||||
SELECT j.*, mi.name, mi.series_name, mi.type,
|
||||
rp.job_type, rp.apple_compat
|
||||
FROM jobs j
|
||||
JOIN media_items mi ON mi.id = j.item_id
|
||||
JOIN review_plans rp ON rp.item_id = j.item_id
|
||||
WHERE j.status = 'running'
|
||||
`)
|
||||
.all();
|
||||
|
||||
const done = db
|
||||
.prepare(`
|
||||
SELECT j.*, mi.name, mi.series_name, mi.type,
|
||||
rp.job_type, rp.apple_compat
|
||||
FROM jobs j
|
||||
JOIN media_items mi ON mi.id = j.item_id
|
||||
JOIN review_plans rp ON rp.item_id = j.item_id
|
||||
WHERE j.status IN ('done', 'error')
|
||||
ORDER BY j.completed_at DESC
|
||||
LIMIT 50
|
||||
`)
|
||||
.all();
|
||||
|
||||
// "Done" = files already in the desired end state. Either the analyzer
|
||||
// says nothing to do (is_noop=1) or a job finished. Use two indexable
|
||||
// counts and add — the OR form (is_noop=1 OR status='done') can't use
|
||||
// our single-column indexes and gets slow on large libraries.
|
||||
const noopRow = db.prepare("SELECT COUNT(*) as n FROM review_plans WHERE is_noop = 1").get() as { n: number };
|
||||
const doneRow = db.prepare("SELECT COUNT(*) as n FROM review_plans WHERE status = 'done' AND is_noop = 0").get() as {
|
||||
n: number;
|
||||
};
|
||||
const doneCount = noopRow.n + doneRow.n;
|
||||
|
||||
enrichWithStreamsAndReasons(db, queued as EnrichableRow[]);
|
||||
|
||||
return c.json({
|
||||
inboxTotal,
|
||||
reviewItemsTotal,
|
||||
reviewReadyCount,
|
||||
reviewManualCount,
|
||||
autoProcessing,
|
||||
queued,
|
||||
processing,
|
||||
done,
|
||||
doneCount,
|
||||
jellyfinUrl,
|
||||
});
|
||||
});
|
||||
|
||||
// ─── List ─────────────────────────────────────────────────────────────────────
|
||||
@@ -621,22 +877,59 @@ app.post("/approve-batch", async (c) => {
|
||||
return c.json({ ok: true, count });
|
||||
});
|
||||
|
||||
// ─── Auto-approve high-confidence ────────────────────────────────────────────
|
||||
// Approves every pending plan whose original language came from an authoritative
|
||||
// source (radarr/sonarr). Anything with low confidence keeps needing a human.
|
||||
app.post("/auto-approve", (c) => {
|
||||
// ─── Sort inbox ──────────────────────────────────────────────────────────────
|
||||
// Distributor: walks every unsorted plan and moves it to Queue (auto) or Review
|
||||
// (auto_heuristic / manual). Called by the user's "Auto Review" button and by
|
||||
// the rescan hook when auto_processing is enabled.
|
||||
app.post("/sort-inbox", (c) => {
|
||||
const db = getDb();
|
||||
const pending = db
|
||||
.prepare(
|
||||
"SELECT rp.*, mi.id as item_id FROM review_plans rp JOIN media_items mi ON mi.id = rp.item_id WHERE rp.status = 'pending' AND rp.is_noop = 0 AND rp.confidence = 'high'",
|
||||
)
|
||||
.all() as (ReviewPlan & { item_id: number })[];
|
||||
for (const plan of pending) {
|
||||
db.prepare("UPDATE review_plans SET status = 'approved', reviewed_at = datetime('now') WHERE id = ?").run(plan.id);
|
||||
const { item, streams, decisions } = loadItemDetail(db, plan.item_id);
|
||||
if (item) enqueueAudioJob(db, plan.item_id, buildCommand(item, streams, decisions));
|
||||
}
|
||||
return c.json({ ok: true, count: pending.length });
|
||||
const result = sortInbox(db);
|
||||
emitInboxSorted(result);
|
||||
return c.json({ ok: true, ...result });
|
||||
});
|
||||
|
||||
// ─── Approve all ready ───────────────────────────────────────────────────────
|
||||
// Bulk-approves every auto_heuristic-classified plan currently in Review.
|
||||
app.post("/approve-ready", (c) => {
|
||||
const db = getDb();
|
||||
const count = approveReady(db);
|
||||
return c.json({ ok: true, count });
|
||||
});
|
||||
|
||||
// ─── Unsort all (Review → Inbox) ─────────────────────────────────────────────
|
||||
// Flip every sorted, pending plan back to the Inbox so the distributor can
|
||||
// re-classify on the next "Auto Review". Symmetric backward counterpart of
|
||||
// sort-inbox; only touches plans currently visible in the Review column.
|
||||
export function unsortAll(db: ReturnType<typeof getDb>): number {
|
||||
const result = db
|
||||
.prepare("UPDATE review_plans SET sorted = 0 WHERE status = 'pending' AND is_noop = 0 AND sorted = 1")
|
||||
.run();
|
||||
return result.changes;
|
||||
}
|
||||
|
||||
app.post("/unsort-all", (c) => {
|
||||
const count = unsortAll(getDb());
|
||||
return c.json({ ok: true, count });
|
||||
});
|
||||
|
||||
// ─── Reopen all done/errored (Done → Review) ─────────────────────────────────
|
||||
// Backward counterpart of the per-item reopen: flips every finished plan
|
||||
// back to pending and drops the finished job rows so the Done column clears.
|
||||
export function reopenAllDone(db: ReturnType<typeof getDb>): number {
|
||||
let count = 0;
|
||||
db.transaction(() => {
|
||||
const result = db
|
||||
.prepare("UPDATE review_plans SET status = 'pending', reviewed_at = NULL WHERE status IN ('done', 'error')")
|
||||
.run();
|
||||
count = result.changes;
|
||||
db.prepare("DELETE FROM jobs WHERE status IN ('done', 'error')").run();
|
||||
})();
|
||||
return count;
|
||||
}
|
||||
|
||||
app.post("/reopen-all", (c) => {
|
||||
const count = reopenAllDone(getDb());
|
||||
return c.json({ ok: true, count });
|
||||
});
|
||||
|
||||
// ─── Detail ───────────────────────────────────────────────────────────────────
|
||||
|
||||
+153
-2
@@ -23,6 +23,78 @@ export function parseScanLimit(raw: unknown): { ok: true; value: number | null }
|
||||
return { ok: true, value: n };
|
||||
}
|
||||
|
||||
type ScanStatusFilter = "all" | "pending" | "scanned" | "error";
|
||||
type ScanTypeFilter = "all" | "movie" | "episode";
|
||||
type ScanSourceFilter = "all" | "scan" | "webhook";
|
||||
|
||||
export interface ScanItemsQuery {
|
||||
offset: number;
|
||||
limit: number;
|
||||
search: string;
|
||||
status: ScanStatusFilter;
|
||||
type: ScanTypeFilter;
|
||||
source: ScanSourceFilter;
|
||||
}
|
||||
|
||||
function parsePositiveInt(raw: unknown, fallback: number): number {
|
||||
const n = typeof raw === "number" ? raw : Number(raw);
|
||||
if (!Number.isFinite(n)) return fallback;
|
||||
if (!Number.isInteger(n)) return fallback;
|
||||
return n;
|
||||
}
|
||||
|
||||
function clamp(n: number, min: number, max: number): number {
|
||||
if (n < min) return min;
|
||||
if (n > max) return max;
|
||||
return n;
|
||||
}
|
||||
|
||||
function parseOneOf<T extends readonly string[]>(raw: unknown, allowed: T, fallback: T[number]): T[number] {
|
||||
if (typeof raw !== "string") return fallback;
|
||||
const lowered = raw.toLowerCase();
|
||||
return (allowed as readonly string[]).includes(lowered) ? (lowered as T[number]) : fallback;
|
||||
}
|
||||
|
||||
export function parseScanItemsQuery(raw: Record<string, unknown>): ScanItemsQuery {
|
||||
const limit = clamp(parsePositiveInt(raw.limit, 50), 1, 200);
|
||||
const offset = Math.max(0, parsePositiveInt(raw.offset, 0));
|
||||
const search = typeof raw.q === "string" ? raw.q.trim() : "";
|
||||
return {
|
||||
offset,
|
||||
limit,
|
||||
search,
|
||||
status: parseOneOf(raw.status, ["all", "pending", "scanned", "error"] as const, "all"),
|
||||
type: parseOneOf(raw.type, ["all", "movie", "episode"] as const, "all"),
|
||||
source: parseOneOf(raw.source, ["all", "scan", "webhook"] as const, "all"),
|
||||
};
|
||||
}
|
||||
|
||||
export function buildScanItemsWhere(query: ScanItemsQuery): { sql: string; args: string[] } {
|
||||
const clauses: string[] = [];
|
||||
const args: string[] = [];
|
||||
if (query.status !== "all") {
|
||||
clauses.push("scan_status = ?");
|
||||
args.push(query.status);
|
||||
}
|
||||
if (query.type !== "all") {
|
||||
clauses.push("lower(type) = ?");
|
||||
args.push(query.type);
|
||||
}
|
||||
if (query.source !== "all") {
|
||||
clauses.push("ingest_source = ?");
|
||||
args.push(query.source);
|
||||
}
|
||||
if (query.search.length > 0) {
|
||||
clauses.push("(lower(name) LIKE ? OR lower(file_path) LIKE ?)");
|
||||
const needle = `%${query.search.toLowerCase()}%`;
|
||||
args.push(needle, needle);
|
||||
}
|
||||
return {
|
||||
sql: clauses.length > 0 ? `WHERE ${clauses.join(" AND ")}` : "",
|
||||
args,
|
||||
};
|
||||
}
|
||||
|
||||
// ─── State ────────────────────────────────────────────────────────────────────
|
||||
|
||||
let scanAbort: AbortController | null = null;
|
||||
@@ -60,12 +132,84 @@ app.get("/", (c) => {
|
||||
const errors = (db.prepare("SELECT COUNT(*) as n FROM media_items WHERE scan_status = 'error'").get() as { n: number })
|
||||
.n;
|
||||
const recentItems = db
|
||||
.prepare("SELECT name, type, scan_status, file_path FROM media_items ORDER BY last_scanned_at DESC LIMIT 50")
|
||||
.all() as { name: string; type: string; scan_status: string; file_path: string }[];
|
||||
.prepare(
|
||||
"SELECT name, type, scan_status, file_path, last_scanned_at, ingest_source FROM media_items ORDER BY COALESCE(last_scanned_at, created_at) DESC, id DESC LIMIT 5",
|
||||
)
|
||||
.all() as {
|
||||
name: string;
|
||||
type: string;
|
||||
scan_status: string;
|
||||
file_path: string;
|
||||
last_scanned_at: string | null;
|
||||
ingest_source: string | null;
|
||||
}[];
|
||||
|
||||
return c.json({ running, progress: { scanned, total, errors }, recentItems, scanLimit: currentScanLimit() });
|
||||
});
|
||||
|
||||
app.get("/items", (c) => {
|
||||
const db = getDb();
|
||||
const query = parseScanItemsQuery({
|
||||
offset: c.req.query("offset"),
|
||||
limit: c.req.query("limit"),
|
||||
q: c.req.query("q"),
|
||||
status: c.req.query("status"),
|
||||
type: c.req.query("type"),
|
||||
source: c.req.query("source"),
|
||||
});
|
||||
const where = buildScanItemsWhere(query);
|
||||
const rows = db
|
||||
.prepare(
|
||||
`
|
||||
SELECT id, jellyfin_id, name, type, series_name, season_number, episode_number,
|
||||
scan_status, original_language, orig_lang_source, container, file_size, file_path,
|
||||
last_scanned_at, ingest_source
|
||||
FROM media_items
|
||||
${where.sql}
|
||||
ORDER BY COALESCE(last_scanned_at, created_at) DESC, id DESC
|
||||
LIMIT ? OFFSET ?
|
||||
`,
|
||||
)
|
||||
.all(...where.args, query.limit, query.offset) as Array<{
|
||||
id: number;
|
||||
jellyfin_id: string;
|
||||
name: string;
|
||||
type: string;
|
||||
series_name: string | null;
|
||||
season_number: number | null;
|
||||
episode_number: number | null;
|
||||
scan_status: string;
|
||||
original_language: string | null;
|
||||
orig_lang_source: string | null;
|
||||
container: string | null;
|
||||
file_size: number | null;
|
||||
file_path: string;
|
||||
last_scanned_at: string | null;
|
||||
ingest_source: string | null;
|
||||
audio_codecs: string | null;
|
||||
}>;
|
||||
|
||||
// Audio codecs per item, batched into one query for the current page.
|
||||
// A per-row scalar subquery over media_streams was O(page × streams)
|
||||
// and could block the event loop for minutes on large libraries.
|
||||
if (rows.length > 0) {
|
||||
const placeholders = rows.map(() => "?").join(",");
|
||||
const codecRows = db
|
||||
.prepare(
|
||||
`SELECT item_id, GROUP_CONCAT(DISTINCT LOWER(codec)) AS codecs
|
||||
FROM media_streams
|
||||
WHERE item_id IN (${placeholders}) AND type = 'Audio' AND codec IS NOT NULL
|
||||
GROUP BY item_id`,
|
||||
)
|
||||
.all(...rows.map((r) => r.id)) as { item_id: number; codecs: string | null }[];
|
||||
const byItem = new Map(codecRows.map((r) => [r.item_id, r.codecs]));
|
||||
for (const row of rows) row.audio_codecs = byItem.get(row.id) ?? null;
|
||||
}
|
||||
|
||||
const total = (db.prepare(`SELECT COUNT(*) as n FROM media_items ${where.sql}`).get(...where.args) as { n: number }).n;
|
||||
return c.json({ rows, total, hasMore: query.offset + rows.length < total, query });
|
||||
});
|
||||
|
||||
// ─── Start ────────────────────────────────────────────────────────────────────
|
||||
|
||||
app.post("/start", async (c) => {
|
||||
@@ -272,6 +416,13 @@ async function runScan(limit: number | null = null): Promise<void> {
|
||||
` language sources: radarr hits=${radarrHits} misses=${radarrMisses}, sonarr hits=${sonarrHits} misses=${sonarrMisses}, no provider id=${missingProviderIds}`,
|
||||
);
|
||||
emitSse("complete", { scanned: processed, total, errors });
|
||||
|
||||
if (getConfig("auto_processing") === "1") {
|
||||
const { sortInbox } = await import("./review");
|
||||
const { emitInboxSorted } = await import("./execute");
|
||||
const result = sortInbox(db);
|
||||
emitInboxSorted(result);
|
||||
}
|
||||
}
|
||||
|
||||
export default app;
|
||||
|
||||
@@ -114,6 +114,26 @@ app.post("/audio-languages", async (c) => {
|
||||
return c.json({ ok: true });
|
||||
});
|
||||
|
||||
// Toggle the auto-processing flag. When flipped on, trigger a one-shot
|
||||
// sort-inbox pass so existing Inbox items drain immediately without waiting
|
||||
// for the next scan.
|
||||
app.post("/auto-processing", async (c) => {
|
||||
const body = await c.req.json<{ enabled?: unknown }>().catch(() => ({ enabled: null }));
|
||||
if (typeof body.enabled !== "boolean") {
|
||||
return c.json({ ok: false, error: "enabled must be a boolean" }, 400);
|
||||
}
|
||||
setConfig("auto_processing", body.enabled ? "1" : "0");
|
||||
|
||||
if (body.enabled) {
|
||||
const { sortInbox } = await import("./review");
|
||||
const { emitInboxSorted } = await import("./execute");
|
||||
const result = sortInbox(getDb());
|
||||
emitInboxSorted(result);
|
||||
return c.json({ ok: true, enabled: true, ...result });
|
||||
}
|
||||
return c.json({ ok: true, enabled: false });
|
||||
});
|
||||
|
||||
app.get("/schedule", (c) => {
|
||||
return c.json(getScheduleConfig());
|
||||
});
|
||||
|
||||
@@ -55,6 +55,7 @@ export function getDb(): Database {
|
||||
_db = new Database(dbPath, { create: true });
|
||||
_db.exec(SCHEMA);
|
||||
migrate(_db);
|
||||
backfill(_db);
|
||||
seedDefaults(_db);
|
||||
return _db;
|
||||
}
|
||||
@@ -79,6 +80,30 @@ function migrate(db: Database): void {
|
||||
// RENAME COLUMN preserves values; both alters are no-ops on fresh DBs.
|
||||
alter("ALTER TABLE review_plans RENAME COLUMN webhook_verified TO verified");
|
||||
alter("ALTER TABLE review_plans DROP COLUMN verified");
|
||||
alter("ALTER TABLE media_items ADD COLUMN ingest_source TEXT NOT NULL DEFAULT 'scan'");
|
||||
alter("ALTER TABLE review_plans ADD COLUMN auto_class TEXT");
|
||||
alter("ALTER TABLE review_plans ADD COLUMN sorted INTEGER NOT NULL DEFAULT 0");
|
||||
alter("ALTER TABLE review_plans DROP COLUMN confidence");
|
||||
// Indexes for new columns — must run after the columns exist on existing DBs
|
||||
alter("CREATE INDEX IF NOT EXISTS idx_review_plans_sorted ON review_plans(sorted)");
|
||||
alter("CREATE INDEX IF NOT EXISTS idx_review_plans_auto_class ON review_plans(auto_class)");
|
||||
}
|
||||
|
||||
/**
|
||||
* One-shot backfill for the inbox/auto_class rollout (2026-04-18):
|
||||
*
|
||||
* - Existing plans were already past the Inbox stage in the old world. Set
|
||||
* sorted = 1 on every pre-existing row so they keep showing up where the
|
||||
* user last saw them; dumping them into the new Inbox column on upgrade
|
||||
* would look like data loss.
|
||||
* - auto_class starts NULL on upgraded rows. The analyzer's next run over
|
||||
* each item (reanalyze, rescan, or the explicit /sort-inbox pass) will
|
||||
* populate it. Until then, the Review column renders a neutral badge.
|
||||
*
|
||||
* Idempotent: the WHERE clause makes repeated calls no-ops.
|
||||
*/
|
||||
function backfill(db: Database): void {
|
||||
db.prepare("UPDATE review_plans SET sorted = 1 WHERE sorted = 0 AND auto_class IS NULL").run();
|
||||
}
|
||||
|
||||
function seedDefaults(db: Database): void {
|
||||
|
||||
+5
-2
@@ -34,9 +34,10 @@ CREATE TABLE IF NOT EXISTS media_items (
|
||||
scan_status TEXT NOT NULL DEFAULT 'pending',
|
||||
scan_error TEXT,
|
||||
last_scanned_at TEXT,
|
||||
ingest_source TEXT NOT NULL DEFAULT 'scan',
|
||||
last_executed_at TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS media_streams (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
@@ -64,7 +65,8 @@ CREATE TABLE IF NOT EXISTS review_plans (
|
||||
item_id INTEGER NOT NULL UNIQUE REFERENCES media_items(id) ON DELETE CASCADE,
|
||||
status TEXT NOT NULL DEFAULT 'pending',
|
||||
is_noop INTEGER NOT NULL DEFAULT 0,
|
||||
confidence TEXT NOT NULL DEFAULT 'low',
|
||||
auto_class TEXT,
|
||||
sorted INTEGER NOT NULL DEFAULT 0,
|
||||
apple_compat TEXT,
|
||||
job_type TEXT NOT NULL DEFAULT 'copy',
|
||||
subs_extracted INTEGER NOT NULL DEFAULT 0,
|
||||
@@ -134,6 +136,7 @@ export const DEFAULT_CONFIG: Record<string, string> = {
|
||||
sonarr_api_key: "",
|
||||
sonarr_enabled: "0",
|
||||
audio_languages: "[]",
|
||||
auto_processing: "0",
|
||||
|
||||
scan_running: "0",
|
||||
job_sleep_seconds: "0",
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import type { MediaStream } from "../../types";
|
||||
import type { MediaItem, MediaStream } from "../../types";
|
||||
import { analyzeItem } from "../analyzer";
|
||||
|
||||
type OrigLangSource = MediaItem["orig_lang_source"];
|
||||
|
||||
type StreamOverride = Partial<MediaStream> & Pick<MediaStream, "id" | "type" | "stream_index">;
|
||||
|
||||
function stream(o: StreamOverride): MediaStream {
|
||||
@@ -24,7 +26,11 @@ function stream(o: StreamOverride): MediaStream {
|
||||
};
|
||||
}
|
||||
|
||||
const ITEM_DEFAULTS = { needs_review: 0 as number, container: "mkv" as string | null };
|
||||
const ITEM_DEFAULTS = {
|
||||
needs_review: 0 as number,
|
||||
container: "mkv" as string | null,
|
||||
orig_lang_source: null as OrigLangSource,
|
||||
};
|
||||
|
||||
describe("analyzeItem — audio keep rules", () => {
|
||||
test("keeps only OG + configured languages, drops others", () => {
|
||||
@@ -178,9 +184,11 @@ describe("analyzeItem — subtitles & is_noop", () => {
|
||||
describe("analyzeItem — transcode targets", () => {
|
||||
test("DTS on mp4 → transcode to eac3", () => {
|
||||
const streams = [stream({ id: 1, type: "Audio", stream_index: 0, codec: "dts", language: "eng" })];
|
||||
const result = analyzeItem({ original_language: "eng", needs_review: 0, container: "mp4" }, streams, {
|
||||
audioLanguages: [],
|
||||
});
|
||||
const result = analyzeItem(
|
||||
{ ...ITEM_DEFAULTS, original_language: "eng", needs_review: 0, container: "mp4" },
|
||||
streams,
|
||||
{ audioLanguages: [] },
|
||||
);
|
||||
expect(result.decisions[0].transcode_codec).toBe("eac3");
|
||||
expect(result.job_type).toBe("transcode");
|
||||
expect(result.is_noop).toBe(false);
|
||||
@@ -188,9 +196,11 @@ describe("analyzeItem — transcode targets", () => {
|
||||
|
||||
test("AAC passes through without transcode", () => {
|
||||
const streams = [stream({ id: 1, type: "Audio", stream_index: 0, codec: "aac", language: "eng" })];
|
||||
const result = analyzeItem({ original_language: "eng", needs_review: 0, container: "mp4" }, streams, {
|
||||
audioLanguages: [],
|
||||
});
|
||||
const result = analyzeItem(
|
||||
{ ...ITEM_DEFAULTS, original_language: "eng", needs_review: 0, container: "mp4" },
|
||||
streams,
|
||||
{ audioLanguages: [] },
|
||||
);
|
||||
expect(result.decisions[0].transcode_codec).toBe(null);
|
||||
expect(result.job_type).toBe("copy");
|
||||
});
|
||||
@@ -346,3 +356,141 @@ describe("analyzeItem — one audio track per language", () => {
|
||||
expect(result.is_noop).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("analyzeItem — auto_class classification", () => {
|
||||
const AUTHORITATIVE = {
|
||||
...ITEM_DEFAULTS,
|
||||
original_language: "eng" as string | null,
|
||||
orig_lang_source: "radarr" as OrigLangSource,
|
||||
needs_review: 0,
|
||||
};
|
||||
|
||||
test("one OG language track → auto", () => {
|
||||
const streams = [
|
||||
stream({ id: 1, type: "Video", stream_index: 0, codec: "h264" }),
|
||||
stream({ id: 2, type: "Audio", stream_index: 1, codec: "eac3", language: "eng", channels: 6 }),
|
||||
];
|
||||
const result = analyzeItem(AUTHORITATIVE, streams, { audioLanguages: [] });
|
||||
expect(result.auto_class).toBe("auto");
|
||||
});
|
||||
|
||||
test("OG + additional configured language, both kept → auto", () => {
|
||||
const streams = [
|
||||
stream({ id: 1, type: "Video", stream_index: 0, codec: "h264" }),
|
||||
stream({ id: 2, type: "Audio", stream_index: 1, codec: "eac3", language: "eng", channels: 6 }),
|
||||
stream({ id: 3, type: "Audio", stream_index: 2, codec: "eac3", language: "deu", channels: 6 }),
|
||||
];
|
||||
const result = analyzeItem(AUTHORITATIVE, streams, { audioLanguages: ["deu"] });
|
||||
expect(result.auto_class).toBe("auto");
|
||||
});
|
||||
|
||||
test("two English tracks resolved by channel count → auto", () => {
|
||||
const streams = [
|
||||
stream({ id: 1, type: "Video", stream_index: 0, codec: "h264" }),
|
||||
stream({ id: 2, type: "Audio", stream_index: 1, codec: "dts", language: "eng", channels: 6, title: "English 5.1" }),
|
||||
stream({
|
||||
id: 3,
|
||||
type: "Audio",
|
||||
stream_index: 2,
|
||||
codec: "ac3",
|
||||
language: "eng",
|
||||
channels: 2,
|
||||
title: "English Stereo",
|
||||
}),
|
||||
];
|
||||
const result = analyzeItem(AUTHORITATIVE, streams, { audioLanguages: [] });
|
||||
expect(result.auto_class).toBe("auto");
|
||||
});
|
||||
|
||||
test("commentary track dropped by title heuristic → auto_heuristic", () => {
|
||||
const streams = [
|
||||
stream({ id: 1, type: "Video", stream_index: 0, codec: "h264" }),
|
||||
stream({ id: 2, type: "Audio", stream_index: 1, codec: "eac3", language: "eng", channels: 6, title: "English 5.1" }),
|
||||
stream({
|
||||
id: 3,
|
||||
type: "Audio",
|
||||
stream_index: 2,
|
||||
codec: "ac3",
|
||||
language: "eng",
|
||||
channels: 2,
|
||||
title: "Director's Commentary",
|
||||
}),
|
||||
];
|
||||
const result = analyzeItem(AUTHORITATIVE, streams, { audioLanguages: [] });
|
||||
expect(result.auto_class).toBe("auto_heuristic");
|
||||
});
|
||||
|
||||
test("OG language unknown → manual", () => {
|
||||
const streams = [stream({ id: 1, type: "Audio", stream_index: 0, codec: "eac3", language: "eng" })];
|
||||
const result = analyzeItem(
|
||||
{ ...ITEM_DEFAULTS, original_language: null, orig_lang_source: null, needs_review: 1 },
|
||||
streams,
|
||||
{ audioLanguages: [] },
|
||||
);
|
||||
expect(result.auto_class).toBe("manual");
|
||||
});
|
||||
|
||||
test("OG known but not present in any audio track → manual", () => {
|
||||
const streams = [stream({ id: 1, type: "Audio", stream_index: 0, codec: "eac3", language: "deu" })];
|
||||
const result = analyzeItem(
|
||||
{ ...ITEM_DEFAULTS, original_language: "eng", orig_lang_source: "radarr", needs_review: 0 },
|
||||
streams,
|
||||
{ audioLanguages: [] },
|
||||
);
|
||||
expect(result.auto_class).toBe("manual");
|
||||
});
|
||||
|
||||
test("kept audio track with null language tag → manual", () => {
|
||||
const streams = [
|
||||
stream({ id: 1, type: "Audio", stream_index: 0, codec: "eac3", language: "eng" }),
|
||||
stream({ id: 2, type: "Audio", stream_index: 1, codec: "eac3", language: null }),
|
||||
];
|
||||
const result = analyzeItem(
|
||||
{ ...ITEM_DEFAULTS, original_language: "eng", orig_lang_source: "radarr", needs_review: 0 },
|
||||
streams,
|
||||
{ audioLanguages: [] },
|
||||
);
|
||||
expect(result.auto_class).toBe("manual");
|
||||
});
|
||||
|
||||
test("needs_review=1 → manual even with known OG", () => {
|
||||
const streams = [stream({ id: 1, type: "Audio", stream_index: 0, codec: "eac3", language: "eng" })];
|
||||
const result = analyzeItem(
|
||||
{ ...ITEM_DEFAULTS, original_language: "eng", orig_lang_source: "radarr", needs_review: 1 },
|
||||
streams,
|
||||
{ audioLanguages: [] },
|
||||
);
|
||||
expect(result.auto_class).toBe("manual");
|
||||
});
|
||||
|
||||
test("non-OG track with coincidental commentary-ish title → auto (not auto_heuristic)", () => {
|
||||
// OG is English. German track is removed for LANGUAGE reasons, not title.
|
||||
// Its title coincidentally contains 'commentary', but that should not
|
||||
// upgrade the classification to auto_heuristic.
|
||||
const streams = [
|
||||
stream({ id: 1, type: "Video", stream_index: 0, codec: "h264" }),
|
||||
stream({ id: 2, type: "Audio", stream_index: 1, codec: "eac3", language: "eng", channels: 6 }),
|
||||
stream({
|
||||
id: 3,
|
||||
type: "Audio",
|
||||
stream_index: 2,
|
||||
codec: "ac3",
|
||||
language: "deu",
|
||||
channels: 2,
|
||||
title: "German Commentary Audio",
|
||||
}),
|
||||
];
|
||||
const result = analyzeItem(AUTHORITATIVE, streams, { audioLanguages: [] });
|
||||
expect(result.auto_class).toBe("auto");
|
||||
});
|
||||
|
||||
test("orig_lang_source=jellyfin is not authoritative → manual", () => {
|
||||
const streams = [stream({ id: 1, type: "Audio", stream_index: 0, codec: "eac3", language: "eng" })];
|
||||
const result = analyzeItem(
|
||||
{ ...ITEM_DEFAULTS, original_language: "eng", orig_lang_source: "jellyfin", needs_review: 0 },
|
||||
streams,
|
||||
{ audioLanguages: [] },
|
||||
);
|
||||
expect(result.auto_class).toBe("manual");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -0,0 +1,143 @@
|
||||
import { Database } from "bun:sqlite";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { SCHEMA } from "../../db/schema";
|
||||
import type { JellyfinItem, MediaItem } from "../../types";
|
||||
import type { RescanConfig } from "../rescan";
|
||||
import { upsertJellyfinItem } from "../rescan";
|
||||
|
||||
function makeDb(): Database {
|
||||
const db = new Database(":memory:");
|
||||
for (const stmt of SCHEMA.split(";")) {
|
||||
const trimmed = stmt.trim();
|
||||
if (trimmed) db.run(trimmed);
|
||||
}
|
||||
return db;
|
||||
}
|
||||
|
||||
const BASE_CFG: RescanConfig = {
|
||||
audioLanguages: [],
|
||||
radarr: null,
|
||||
sonarr: null,
|
||||
radarrLibrary: null,
|
||||
sonarrLibrary: null,
|
||||
};
|
||||
|
||||
function germanDubbedMovie(over: Partial<JellyfinItem> = {}): JellyfinItem {
|
||||
return {
|
||||
Id: "jf-m1",
|
||||
Type: "Movie",
|
||||
Name: "Some Dubbed Movie",
|
||||
Path: "/movies/Some.mkv",
|
||||
Container: "mkv",
|
||||
ProviderIds: { Tmdb: "12345" },
|
||||
MediaStreams: [
|
||||
{ Type: "Video", Index: 0, Codec: "h264" },
|
||||
// German audio flagged default — Jellyfin's guess would return "ger"/"deu".
|
||||
{ Type: "Audio", Index: 1, Codec: "aac", Language: "ger", IsDefault: true },
|
||||
{ Type: "Audio", Index: 2, Codec: "aac", Language: "eng" },
|
||||
],
|
||||
...over,
|
||||
};
|
||||
}
|
||||
|
||||
function episodeWithSeriesTvdb(over: Partial<JellyfinItem> = {}): JellyfinItem {
|
||||
return {
|
||||
Id: "jf-ep1",
|
||||
Type: "Episode",
|
||||
Name: "S02E02",
|
||||
Path: "/tv/Show/S02E02.mkv",
|
||||
Container: "mkv",
|
||||
SeriesName: "Some Show",
|
||||
SeriesId: "series-1",
|
||||
ParentIndexNumber: 2,
|
||||
IndexNumber: 2,
|
||||
ProviderIds: { Tvdb: "EPISODE_TVDB_9999" },
|
||||
SeriesProviderIds: { Tvdb: "SERIES_TVDB_1234" },
|
||||
MediaStreams: [
|
||||
{ Type: "Video", Index: 0, Codec: "h264" },
|
||||
{ Type: "Audio", Index: 1, Codec: "aac", Language: "ita", IsDefault: true },
|
||||
],
|
||||
...over,
|
||||
};
|
||||
}
|
||||
|
||||
function getItem(db: Database, jellyfinId: string): MediaItem {
|
||||
return db.prepare("SELECT * FROM media_items WHERE jellyfin_id = ?").get(jellyfinId) as MediaItem;
|
||||
}
|
||||
|
||||
describe("upsertJellyfinItem — manual override preservation", () => {
|
||||
test("preserves orig_lang_source='manual' across rescan (Movie)", async () => {
|
||||
const db = makeDb();
|
||||
await upsertJellyfinItem(db, germanDubbedMovie(), BASE_CFG);
|
||||
|
||||
// User pins it to English via /api/review/:id/language.
|
||||
db
|
||||
.prepare(
|
||||
"UPDATE media_items SET original_language='eng', orig_lang_source='manual', needs_review=0 WHERE jellyfin_id=?",
|
||||
)
|
||||
.run("jf-m1");
|
||||
|
||||
// Rescan re-runs upsertJellyfinItem with the SAME Jellyfin payload
|
||||
// (default audio still German). Without the guard, the ON CONFLICT
|
||||
// clause would blast 'eng'/'manual' back to 'ger'/'jellyfin'.
|
||||
await upsertJellyfinItem(db, germanDubbedMovie(), BASE_CFG);
|
||||
|
||||
const row = getItem(db, "jf-m1");
|
||||
expect(row.original_language).toBe("eng");
|
||||
expect(row.orig_lang_source).toBe("manual");
|
||||
expect(row.needs_review).toBe(0);
|
||||
});
|
||||
|
||||
test("preserves orig_lang_source='manual' across rescan (Episode)", async () => {
|
||||
const db = makeDb();
|
||||
await upsertJellyfinItem(db, episodeWithSeriesTvdb(), BASE_CFG);
|
||||
|
||||
db
|
||||
.prepare(
|
||||
"UPDATE media_items SET original_language='eng', orig_lang_source='manual', needs_review=0 WHERE jellyfin_id=?",
|
||||
)
|
||||
.run("jf-ep1");
|
||||
|
||||
await upsertJellyfinItem(db, episodeWithSeriesTvdb(), BASE_CFG);
|
||||
|
||||
const row = getItem(db, "jf-ep1");
|
||||
expect(row.original_language).toBe("eng");
|
||||
expect(row.orig_lang_source).toBe("manual");
|
||||
});
|
||||
|
||||
test("falls through to jellyfin guess when no manual override exists", async () => {
|
||||
const db = makeDb();
|
||||
await upsertJellyfinItem(db, germanDubbedMovie(), BASE_CFG);
|
||||
|
||||
const row = getItem(db, "jf-m1");
|
||||
// Default audio is German so the guess lands on the German tag.
|
||||
// The raw tag is "ger" which normalizes to "deu" in our store.
|
||||
expect(row.orig_lang_source).toBe("jellyfin");
|
||||
expect(row.original_language).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe("upsertJellyfinItem — episode tvdb_id resolution", () => {
|
||||
test("uses SeriesProviderIds.Tvdb for episodes, not the episode-level Tvdb", async () => {
|
||||
const db = makeDb();
|
||||
await upsertJellyfinItem(db, episodeWithSeriesTvdb(), BASE_CFG);
|
||||
const row = getItem(db, "jf-ep1");
|
||||
expect(row.tvdb_id).toBe("SERIES_TVDB_1234");
|
||||
});
|
||||
|
||||
test("falls back to ProviderIds.Tvdb when SeriesProviderIds absent (older Jellyfin)", async () => {
|
||||
const db = makeDb();
|
||||
const legacy = episodeWithSeriesTvdb({ SeriesProviderIds: undefined });
|
||||
await upsertJellyfinItem(db, legacy, BASE_CFG);
|
||||
const row = getItem(db, "jf-ep1");
|
||||
expect(row.tvdb_id).toBe("EPISODE_TVDB_9999");
|
||||
});
|
||||
|
||||
test("movies still use ProviderIds.Tvdb directly", async () => {
|
||||
const db = makeDb();
|
||||
const movie = germanDubbedMovie({ ProviderIds: { Tvdb: "MOVIE_TVDB_1" } });
|
||||
await upsertJellyfinItem(db, movie, BASE_CFG);
|
||||
const row = getItem(db, "jf-m1");
|
||||
expect(row.tvdb_id).toBe("MOVIE_TVDB_1");
|
||||
});
|
||||
});
|
||||
@@ -3,6 +3,8 @@ import { computeAppleCompat, isAppleCompatible, transcodeTarget } from "./apple-
|
||||
import { isExtractableSubtitle } from "./ffmpeg";
|
||||
import { normalizeLanguage } from "./jellyfin";
|
||||
|
||||
const AUTHORITATIVE_ORIG_SOURCES = new Set<string>(["radarr", "sonarr", "manual"]);
|
||||
|
||||
export interface AnalyzerConfig {
|
||||
audioLanguages: string[]; // additional languages to keep (after OG)
|
||||
}
|
||||
@@ -17,7 +19,7 @@ export interface AnalyzerConfig {
|
||||
* at all.
|
||||
*/
|
||||
export function analyzeItem(
|
||||
item: Pick<MediaItem, "original_language" | "needs_review" | "container">,
|
||||
item: Pick<MediaItem, "original_language" | "orig_lang_source" | "needs_review" | "container">,
|
||||
streams: MediaStream[],
|
||||
config: AnalyzerConfig,
|
||||
): PlanResult {
|
||||
@@ -29,6 +31,10 @@ export function analyzeItem(
|
||||
return { stream_id: s.id, action, target_index: null, transcode_codec: null };
|
||||
});
|
||||
|
||||
// Snapshot actions before dedup so we can distinguish language-driven removes
|
||||
// from commentary-title-driven removes when computing commentaryHeuristicFired.
|
||||
const decisionsBeforeDedup = new Map<number, "keep" | "remove">(decisions.map((d) => [d.stream_id, d.action]));
|
||||
|
||||
// Second pass: within each kept-language group, drop commentary/AD tracks
|
||||
// and alternate formats so we end up with exactly one audio stream per
|
||||
// language. The user doesn't need 2× English (main + director's
|
||||
@@ -109,7 +115,36 @@ export function analyzeItem(
|
||||
notes.push(`${nonExtractable.length} subtitle(s) dropped: ${summary} — not extractable to sidecar`);
|
||||
}
|
||||
|
||||
return { is_noop, has_subs: hasSubs, confidence: "low", apple_compat, job_type, decisions, notes };
|
||||
const origLangSource = item.orig_lang_source ?? null;
|
||||
const authoritativeOg =
|
||||
!!origLang && !!origLangSource && AUTHORITATIVE_ORIG_SOURCES.has(origLangSource) && item.needs_review === 0;
|
||||
|
||||
const keptAudioLanguages = keptAudioStreams.map((s) => (s.language ? normalizeLanguage(s.language) : null));
|
||||
const ogPresent = !!origLang && keptAudioLanguages.includes(origLang);
|
||||
const everyKeptHasLanguage = keptAudioStreams.length > 0 && keptAudioLanguages.every((l) => l != null);
|
||||
|
||||
// Only count as heuristic-fired when the commentary regex itself CAUSED the
|
||||
// removal: track was "keep" after language-based decideAction, then flipped
|
||||
// to "remove" by deduplicateAudioByLanguage because of its title/flag.
|
||||
// A track removed for LANGUAGE reasons (keep→remove never happened) should
|
||||
// not upgrade the classification even if its title coincidentally matches.
|
||||
const commentaryHeuristicFired = decisions.some((d) => {
|
||||
const before = decisionsBeforeDedup.get(d.stream_id);
|
||||
if (before !== "keep" || d.action !== "remove") return false;
|
||||
const s = streams.find((str) => str.id === d.stream_id);
|
||||
return !!s && isCommentaryOrAuxiliary(s);
|
||||
});
|
||||
|
||||
let auto_class: PlanResult["auto_class"];
|
||||
if (!authoritativeOg || !ogPresent || !everyKeptHasLanguage) {
|
||||
auto_class = "manual";
|
||||
} else if (commentaryHeuristicFired) {
|
||||
auto_class = "auto_heuristic";
|
||||
} else {
|
||||
auto_class = "auto";
|
||||
}
|
||||
|
||||
return { is_noop, has_subs: hasSubs, auto_class, apple_compat, job_type, decisions, notes };
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -43,6 +43,7 @@ const ITEM_FIELDS = [
|
||||
"MediaStreams",
|
||||
"Path",
|
||||
"ProviderIds",
|
||||
"SeriesProviderIds",
|
||||
"OriginalTitle",
|
||||
"ProductionYear",
|
||||
"Size",
|
||||
|
||||
+38
-19
@@ -18,7 +18,6 @@ export interface RescanResult {
|
||||
origLang: string | null;
|
||||
origLangSource: string | null;
|
||||
needsReview: number;
|
||||
confidence: "high" | "low";
|
||||
isNoop: boolean;
|
||||
radarrHit: boolean;
|
||||
radarrMiss: boolean;
|
||||
@@ -48,17 +47,34 @@ export async function upsertJellyfinItem(
|
||||
const itemPath: string = jellyfinItem.Path;
|
||||
|
||||
const providerIds = jellyfinItem.ProviderIds ?? {};
|
||||
const seriesProviderIds = jellyfinItem.SeriesProviderIds ?? {};
|
||||
const imdbId = providerIds.Imdb ?? null;
|
||||
const tmdbId = providerIds.Tmdb ?? null;
|
||||
const tvdbId = providerIds.Tvdb ?? null;
|
||||
// Episodes: ProviderIds.Tvdb is the EPISODE's tvdb id; Sonarr is keyed by
|
||||
// SERIES tvdb. Prefer SeriesProviderIds.Tvdb when present (Jellyfin 10.9+);
|
||||
// fall back to ProviderIds.Tvdb for Movies and older Jellyfins.
|
||||
const tvdbId =
|
||||
jellyfinItem.Type === "Episode" ? (seriesProviderIds.Tvdb ?? providerIds.Tvdb ?? null) : (providerIds.Tvdb ?? null);
|
||||
|
||||
// Preserve manual overrides: if the user has already pinned a language via
|
||||
// /series/:key/language or /:id/language, scans must not blast it back to
|
||||
// the audio-track guess. Read the prior row BEFORE running the guesses.
|
||||
const existing = db
|
||||
.prepare("SELECT original_language, orig_lang_source FROM media_items WHERE jellyfin_id = ?")
|
||||
.get(jellyfinItem.Id) as { original_language: string | null; orig_lang_source: string | null } | undefined;
|
||||
const hasManualOverride = existing?.orig_lang_source === "manual";
|
||||
|
||||
// See scan.ts for the "8 Mile got labelled Turkish" rationale. Jellyfin's
|
||||
// first-audio-track guess is an unverified starting point.
|
||||
const jellyfinGuess = extractOriginalLanguage(jellyfinItem);
|
||||
let origLang: string | null = jellyfinGuess;
|
||||
let origLangSource: string | null = jellyfinGuess ? "jellyfin" : null;
|
||||
let origLang: string | null = hasManualOverride ? (existing?.original_language ?? null) : jellyfinGuess;
|
||||
let origLangSource: "jellyfin" | "radarr" | "sonarr" | "manual" | null = hasManualOverride
|
||||
? "manual"
|
||||
: jellyfinGuess
|
||||
? "jellyfin"
|
||||
: null;
|
||||
let needsReview = origLang ? 0 : 1;
|
||||
let authoritative = false;
|
||||
let authoritative = hasManualOverride;
|
||||
let externalRaw: unknown = null;
|
||||
|
||||
const result: RescanResult = {
|
||||
@@ -66,7 +82,6 @@ export async function upsertJellyfinItem(
|
||||
origLang: null,
|
||||
origLangSource: null,
|
||||
needsReview: 1,
|
||||
confidence: "low",
|
||||
isNoop: false,
|
||||
radarrHit: false,
|
||||
radarrMiss: false,
|
||||
@@ -75,7 +90,7 @@ export async function upsertJellyfinItem(
|
||||
missingProviderId: false,
|
||||
};
|
||||
|
||||
if (jellyfinItem.Type === "Movie" && cfg.radarr && cfg.radarrLibrary) {
|
||||
if (!hasManualOverride && jellyfinItem.Type === "Movie" && cfg.radarr && cfg.radarrLibrary) {
|
||||
if (!tmdbId && !imdbId) {
|
||||
result.missingProviderId = true;
|
||||
} else {
|
||||
@@ -99,7 +114,7 @@ export async function upsertJellyfinItem(
|
||||
}
|
||||
}
|
||||
|
||||
if (jellyfinItem.Type === "Episode" && cfg.sonarr && cfg.sonarrLibrary) {
|
||||
if (!hasManualOverride && jellyfinItem.Type === "Episode" && cfg.sonarr && cfg.sonarrLibrary) {
|
||||
if (!tvdbId) {
|
||||
result.missingProviderId = true;
|
||||
} else {
|
||||
@@ -117,9 +132,7 @@ export async function upsertJellyfinItem(
|
||||
}
|
||||
}
|
||||
|
||||
let confidence: "high" | "low" = "low";
|
||||
if (origLang && authoritative && !needsReview) confidence = "high";
|
||||
else if (origLang && !authoritative) needsReview = 1;
|
||||
if (origLang && !authoritative && !needsReview) needsReview = 1;
|
||||
|
||||
const jellyfinRaw = JSON.stringify(jellyfinItem);
|
||||
const externalRawJson = externalRaw ? JSON.stringify(externalRaw) : null;
|
||||
@@ -135,8 +148,8 @@ export async function upsertJellyfinItem(
|
||||
original_language, orig_lang_source, needs_review,
|
||||
imdb_id, tmdb_id, tvdb_id,
|
||||
jellyfin_raw, external_raw,
|
||||
scan_status, last_scanned_at${opts.executed ? ", last_executed_at" : ""}
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'scanned', datetime('now')${opts.executed ? ", datetime('now')" : ""})
|
||||
scan_status, last_scanned_at, ingest_source${opts.executed ? ", last_executed_at" : ""}
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'scanned', datetime('now'), ?${opts.executed ? ", datetime('now')" : ""})
|
||||
ON CONFLICT(jellyfin_id) DO UPDATE SET
|
||||
type = excluded.type, name = excluded.name, original_title = excluded.original_title,
|
||||
series_name = excluded.series_name, series_jellyfin_id = excluded.series_jellyfin_id,
|
||||
@@ -148,7 +161,8 @@ export async function upsertJellyfinItem(
|
||||
needs_review = excluded.needs_review, imdb_id = excluded.imdb_id,
|
||||
tmdb_id = excluded.tmdb_id, tvdb_id = excluded.tvdb_id,
|
||||
jellyfin_raw = excluded.jellyfin_raw, external_raw = excluded.external_raw,
|
||||
scan_status = 'scanned', last_scanned_at = datetime('now')
|
||||
scan_status = 'scanned', last_scanned_at = datetime('now'),
|
||||
ingest_source = excluded.ingest_source
|
||||
${opts.executed ? ", last_executed_at = datetime('now')" : ""}
|
||||
`);
|
||||
upsertItem.run(
|
||||
@@ -174,6 +188,7 @@ export async function upsertJellyfinItem(
|
||||
tvdbId,
|
||||
jellyfinRaw,
|
||||
externalRawJson,
|
||||
source,
|
||||
);
|
||||
|
||||
const itemRow = db.prepare("SELECT id FROM media_items WHERE jellyfin_id = ?").get(jellyfinItem.Id) as {
|
||||
@@ -215,7 +230,12 @@ export async function upsertJellyfinItem(
|
||||
|
||||
const streams = db.prepare("SELECT * FROM media_streams WHERE item_id = ?").all(itemId) as MediaStream[];
|
||||
const analysis = analyzeItem(
|
||||
{ original_language: origLang, needs_review: needsReview, container: jellyfinItem.Container ?? null },
|
||||
{
|
||||
original_language: origLang,
|
||||
orig_lang_source: origLangSource,
|
||||
needs_review: needsReview,
|
||||
container: jellyfinItem.Container ?? null,
|
||||
},
|
||||
streams,
|
||||
{ audioLanguages: cfg.audioLanguages },
|
||||
);
|
||||
@@ -231,7 +251,7 @@ export async function upsertJellyfinItem(
|
||||
// else keep current status
|
||||
db
|
||||
.prepare(`
|
||||
INSERT INTO review_plans (item_id, status, is_noop, confidence, apple_compat, job_type, notes)
|
||||
INSERT INTO review_plans (item_id, status, is_noop, auto_class, apple_compat, job_type, notes)
|
||||
VALUES (?, 'pending', ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(item_id) DO UPDATE SET
|
||||
status = CASE
|
||||
@@ -242,7 +262,7 @@ export async function upsertJellyfinItem(
|
||||
ELSE review_plans.status
|
||||
END,
|
||||
is_noop = excluded.is_noop,
|
||||
confidence = excluded.confidence,
|
||||
auto_class = excluded.auto_class,
|
||||
apple_compat = excluded.apple_compat,
|
||||
job_type = excluded.job_type,
|
||||
notes = excluded.notes
|
||||
@@ -250,7 +270,7 @@ export async function upsertJellyfinItem(
|
||||
.run(
|
||||
itemId,
|
||||
analysis.is_noop ? 1 : 0,
|
||||
confidence,
|
||||
analysis.auto_class,
|
||||
analysis.apple_compat,
|
||||
analysis.job_type,
|
||||
analysis.notes.length > 0 ? analysis.notes.join("\n") : null,
|
||||
@@ -273,7 +293,6 @@ export async function upsertJellyfinItem(
|
||||
result.origLang = origLang;
|
||||
result.origLangSource = origLangSource;
|
||||
result.needsReview = needsReview;
|
||||
result.confidence = confidence;
|
||||
result.isNoop = analysis.is_noop;
|
||||
})();
|
||||
|
||||
|
||||
@@ -85,7 +85,10 @@ export async function getOriginalLanguage(
|
||||
cfg,
|
||||
"lookup/tvdb",
|
||||
);
|
||||
const fromLookup = lookup?.find((s) => String(s.tvdbId ?? "") === String(tvdbId)) ?? lookup?.[0];
|
||||
// Only trust an exact tvdbId match. Falling back to lookup[0] silently
|
||||
// attaches whatever Sonarr returned first (often a fuzzy title match) and
|
||||
// caused shows to be labelled with completely unrelated languages.
|
||||
const fromLookup = lookup?.find((s) => String(s.tvdbId ?? "") === String(tvdbId));
|
||||
if (fromLookup?.originalLanguage) return nameToIso(fromLookup.originalLanguage.name);
|
||||
|
||||
return null;
|
||||
|
||||
+4
-2
@@ -59,7 +59,8 @@ export interface ReviewPlan {
|
||||
item_id: number;
|
||||
status: "pending" | "approved" | "skipped" | "done" | "error";
|
||||
is_noop: number;
|
||||
confidence: "high" | "low";
|
||||
auto_class: "auto" | "auto_heuristic" | "manual" | null;
|
||||
sorted: number;
|
||||
apple_compat: "direct_play" | "remux" | "audio_transcode" | null;
|
||||
job_type: "copy" | "transcode";
|
||||
subs_extracted: number;
|
||||
@@ -113,7 +114,7 @@ export interface StreamWithDecision extends MediaStream {
|
||||
export interface PlanResult {
|
||||
is_noop: boolean;
|
||||
has_subs: boolean;
|
||||
confidence: "high" | "low";
|
||||
auto_class: "auto" | "auto_heuristic" | "manual";
|
||||
apple_compat: "direct_play" | "remux" | "audio_transcode" | null;
|
||||
job_type: "copy" | "transcode";
|
||||
decisions: Array<{
|
||||
@@ -163,6 +164,7 @@ export interface JellyfinItem {
|
||||
DateLastRefreshed?: string;
|
||||
MediaStreams?: JellyfinMediaStream[];
|
||||
ProviderIds?: Record<string, string>;
|
||||
SeriesProviderIds?: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface JellyfinUser {
|
||||
|
||||
@@ -6,46 +6,65 @@ export interface ColumnAction {
|
||||
disabled?: boolean;
|
||||
danger?: boolean;
|
||||
primary?: boolean;
|
||||
title?: string;
|
||||
}
|
||||
|
||||
interface ColumnShellProps {
|
||||
title: string;
|
||||
count: ReactNode;
|
||||
actions?: ColumnAction[];
|
||||
subtitle?: ReactNode;
|
||||
backward?: ColumnAction;
|
||||
skip?: ColumnAction;
|
||||
forward?: ColumnAction;
|
||||
children: ReactNode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Equal-width pipeline column with a header (title + count + optional action buttons)
|
||||
* and a scrolling body. All four pipeline columns share this shell so widths and
|
||||
* header layout stay consistent.
|
||||
*/
|
||||
export function ColumnShell({ title, count, actions, children }: ColumnShellProps) {
|
||||
function actionClass(a: ColumnAction): string {
|
||||
const base =
|
||||
"text-xs px-2 py-0.5 rounded border whitespace-nowrap disabled:opacity-40 disabled:cursor-not-allowed";
|
||||
if (a.danger) return `${base} border-red-200 text-red-700 hover:bg-red-50`;
|
||||
if (a.primary) return `${base} border-blue-600 bg-blue-600 text-white hover:bg-blue-700`;
|
||||
return `${base} border-gray-300 text-gray-600 hover:bg-gray-100`;
|
||||
}
|
||||
|
||||
function ActionButton({ action }: { action: ColumnAction }) {
|
||||
return (
|
||||
<div className="flex flex-col flex-1 basis-0 min-w-64 min-h-0 bg-gray-50 rounded-lg">
|
||||
<div className="flex items-center justify-between gap-2 px-3 py-2 border-b">
|
||||
<button
|
||||
type="button"
|
||||
onClick={action.onClick}
|
||||
disabled={action.disabled}
|
||||
title={action.title}
|
||||
className={actionClass(action)}
|
||||
>
|
||||
{action.label}
|
||||
</button>
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Equal-width pipeline column with a three-row header (title + count, then a
|
||||
* subtitle/help row, then a three-slot button row: backward left · skip middle
|
||||
* · forward right) and a scrolling body. All five pipeline columns share this
|
||||
* shell so widths, spacing, and the left/middle/right button layout stay
|
||||
* consistent — which in turn makes the pipeline direction readable at a glance.
|
||||
*/
|
||||
export function ColumnShell({ title, count, subtitle, backward, skip, forward, children }: ColumnShellProps) {
|
||||
const hasButtons = !!(backward || skip || forward);
|
||||
return (
|
||||
<div className="flex flex-col flex-1 basis-0 min-w-80 min-h-0 bg-gray-50 rounded-lg">
|
||||
<div className="flex flex-col gap-1.5 px-3 py-2 border-b">
|
||||
<span className="font-medium text-sm truncate">
|
||||
{title} <span className="text-gray-400 font-normal">({count})</span>
|
||||
</span>
|
||||
{actions && actions.length > 0 && (
|
||||
<div className="flex items-center gap-1">
|
||||
{actions.map((a) => (
|
||||
<button
|
||||
key={a.label}
|
||||
type="button"
|
||||
onClick={a.onClick}
|
||||
disabled={a.disabled}
|
||||
className={
|
||||
a.danger
|
||||
? "text-xs px-2 py-0.5 rounded border border-red-200 text-red-700 hover:bg-red-50 disabled:opacity-40 disabled:cursor-not-allowed"
|
||||
: a.primary
|
||||
? "text-xs px-2 py-0.5 rounded border border-blue-600 bg-blue-600 text-white hover:bg-blue-700 disabled:opacity-40 disabled:cursor-not-allowed"
|
||||
: "text-xs px-2 py-0.5 rounded border border-gray-300 text-gray-600 hover:bg-gray-100 disabled:opacity-40 disabled:cursor-not-allowed"
|
||||
}
|
||||
>
|
||||
{a.label}
|
||||
</button>
|
||||
))}
|
||||
{subtitle && <div className="text-xs text-gray-500 min-w-0">{subtitle}</div>}
|
||||
{hasButtons && (
|
||||
// auto|1fr|auto: left/right buttons take their natural width (no wrapping
|
||||
// on "← Back to inbox" / "Approve auto →"), the middle column flexes and
|
||||
// centers the skip button if present.
|
||||
<div className="grid grid-cols-[auto_1fr_auto] items-center gap-1">
|
||||
<div className="flex justify-start">{backward && <ActionButton action={backward} />}</div>
|
||||
<div className="flex justify-center">{skip && <ActionButton action={skip} />}</div>
|
||||
<div className="flex justify-end">{forward && <ActionButton action={forward} />}</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -6,24 +6,46 @@ import { ColumnShell } from "./ColumnShell";
|
||||
|
||||
interface DoneColumnProps {
|
||||
items: PipelineJobItem[];
|
||||
doneCount: number;
|
||||
onMutate: () => void;
|
||||
}
|
||||
|
||||
export function DoneColumn({ items, onMutate }: DoneColumnProps) {
|
||||
export function DoneColumn({ items, doneCount, onMutate }: DoneColumnProps) {
|
||||
const clear = async () => {
|
||||
await api.post("/api/execute/clear-completed");
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const reopenAll = async () => {
|
||||
if (!confirm(`Send all ${items.length} completed items back to Review for re-decisioning?`)) return;
|
||||
await api.post("/api/review/reopen-all");
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const reopen = async (itemId: number) => {
|
||||
await api.post(`/api/review/${itemId}/reopen`);
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const actions = items.length > 0 ? [{ label: "Clear", onClick: clear }] : undefined;
|
||||
const backward =
|
||||
items.length > 0
|
||||
? {
|
||||
label: "← Back to review",
|
||||
onClick: reopenAll,
|
||||
title: "Reopen every completed item so you can re-decide and re-queue",
|
||||
}
|
||||
: undefined;
|
||||
const skip =
|
||||
items.length > 0 ? { label: "Clear", onClick: clear, title: "Dismiss completed items from this column" } : undefined;
|
||||
|
||||
return (
|
||||
<ColumnShell title="Done" count={items.length} actions={actions}>
|
||||
<ColumnShell
|
||||
title="Done"
|
||||
count={items.length}
|
||||
subtitle={`${doneCount} in desired state`}
|
||||
backward={backward}
|
||||
skip={skip}
|
||||
>
|
||||
{items.map((item) => (
|
||||
<div key={item.id} className="group rounded border bg-white p-2">
|
||||
<Link
|
||||
|
||||
@@ -0,0 +1,141 @@
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { api } from "~/shared/lib/api";
|
||||
import type { ReviewGroup, ReviewGroupsResponse } from "~/shared/lib/types";
|
||||
import { ColumnShell } from "./ColumnShell";
|
||||
import { PipelineCard } from "./PipelineCard";
|
||||
import { SeriesCard } from "./SeriesCard";
|
||||
|
||||
const PAGE_SIZE = 25;
|
||||
|
||||
interface InboxColumnProps {
|
||||
initialResponse: ReviewGroupsResponse;
|
||||
totalItems: number;
|
||||
autoProcessing: boolean;
|
||||
onToggleAutoProcessing: (enabled: boolean) => void;
|
||||
jellyfinUrl: string;
|
||||
onMutate: () => void;
|
||||
}
|
||||
|
||||
export function InboxColumn({
|
||||
initialResponse,
|
||||
totalItems,
|
||||
autoProcessing,
|
||||
onToggleAutoProcessing,
|
||||
jellyfinUrl,
|
||||
onMutate,
|
||||
}: InboxColumnProps) {
|
||||
const [groups, setGroups] = useState<ReviewGroup[]>(initialResponse.groups);
|
||||
const [hasMore, setHasMore] = useState(initialResponse.hasMore);
|
||||
const [loadingMore, setLoadingMore] = useState(false);
|
||||
const sentinelRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
setGroups(initialResponse.groups);
|
||||
setHasMore(initialResponse.hasMore);
|
||||
}, [initialResponse]);
|
||||
|
||||
const loadMore = useCallback(async () => {
|
||||
if (loadingMore || !hasMore) return;
|
||||
setLoadingMore(true);
|
||||
try {
|
||||
const res = await api.get<ReviewGroupsResponse>(
|
||||
`/api/review/groups?bucket=inbox&offset=${groups.length}&limit=${PAGE_SIZE}`,
|
||||
);
|
||||
setGroups((prev) => [...prev, ...res.groups]);
|
||||
setHasMore(res.hasMore);
|
||||
} finally {
|
||||
setLoadingMore(false);
|
||||
}
|
||||
}, [groups.length, hasMore, loadingMore]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!hasMore || !sentinelRef.current) return;
|
||||
const observer = new IntersectionObserver(
|
||||
(entries) => {
|
||||
if (entries[0]?.isIntersecting) loadMore();
|
||||
},
|
||||
{ rootMargin: "200px" },
|
||||
);
|
||||
observer.observe(sentinelRef.current);
|
||||
return () => observer.disconnect();
|
||||
}, [hasMore, loadMore]);
|
||||
|
||||
const runSort = async () => {
|
||||
await api.post<{ ok: boolean; moved_to_queue: number; moved_to_review: number }>("/api/review/sort-inbox");
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const skipAll = async () => {
|
||||
if (!confirm(`Skip all ${totalItems} unsorted items? They won't be processed unless you unskip them.`)) return;
|
||||
await api.post("/api/review/skip-all");
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const subtitle = (
|
||||
<label className="flex items-center gap-1.5 cursor-pointer select-none">
|
||||
<input
|
||||
type="checkbox"
|
||||
className="h-3 w-3"
|
||||
checked={autoProcessing}
|
||||
onChange={(e) => onToggleAutoProcessing(e.target.checked)}
|
||||
/>
|
||||
<span>Auto-process new items</span>
|
||||
</label>
|
||||
);
|
||||
|
||||
const skip = totalItems > 0 ? { label: "Skip all", onClick: skipAll } : undefined;
|
||||
const forward =
|
||||
totalItems > 0
|
||||
? { label: "Auto Review →", onClick: runSort, primary: true, title: "Sort inbox to Queue / Review" }
|
||||
: undefined;
|
||||
|
||||
return (
|
||||
<ColumnShell title="Inbox" count={totalItems} subtitle={subtitle} skip={skip} forward={forward}>
|
||||
<div className="space-y-2">
|
||||
{groups.map((group) => {
|
||||
if (group.kind === "movie") {
|
||||
return (
|
||||
<PipelineCard
|
||||
key={group.item.id}
|
||||
item={group.item}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
onToggleStream={async (streamId, action) => {
|
||||
await api.patch(`/api/review/${group.item.item_id}/stream/${streamId}`, { action });
|
||||
onMutate();
|
||||
}}
|
||||
onApprove={async () => {
|
||||
await api.post(`/api/review/${group.item.item_id}/approve`);
|
||||
onMutate();
|
||||
}}
|
||||
onSkip={async () => {
|
||||
await api.post(`/api/review/${group.item.item_id}/skip`);
|
||||
onMutate();
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<SeriesCard
|
||||
key={group.seriesKey}
|
||||
seriesKey={group.seriesKey}
|
||||
seriesName={group.seriesName}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
seriesJellyfinId={group.seriesJellyfinId}
|
||||
seasons={group.seasons}
|
||||
episodeCount={group.episodeCount}
|
||||
readyCount={group.readyCount}
|
||||
originalLanguage={group.originalLanguage}
|
||||
onMutate={onMutate}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
{groups.length === 0 && <p className="text-sm text-gray-400 text-center py-8">No items in the inbox</p>}
|
||||
{hasMore && (
|
||||
<div ref={sentinelRef} className="py-4 text-center text-xs text-gray-400">
|
||||
{loadingMore ? "Loading more…" : ""}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</ColumnShell>
|
||||
);
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Link } from "@tanstack/react-router";
|
||||
import { Badge } from "~/shared/components/ui/badge";
|
||||
import { langName, normalizeLanguageClient } from "~/shared/lib/lang";
|
||||
import type { PipelineAudioStream } from "~/shared/lib/types";
|
||||
import type { PipelineAudioStream, PipelineReviewItem } from "~/shared/lib/types";
|
||||
|
||||
// Shared shape across review items, raw media_item rows, and queued jobs.
|
||||
// Only name/type are strictly required; the rest is optional so the card
|
||||
@@ -15,13 +15,34 @@ interface PipelineCardItem {
|
||||
season_number?: number | null;
|
||||
episode_number?: number | null;
|
||||
jellyfin_id?: string;
|
||||
confidence?: "high" | "low";
|
||||
auto_class?: PipelineReviewItem["auto_class"];
|
||||
job_type?: "copy" | "transcode";
|
||||
original_language?: string | null;
|
||||
transcode_reasons?: string[];
|
||||
audio_streams?: PipelineAudioStream[];
|
||||
}
|
||||
|
||||
export function AutoClassBadge({ autoClass }: { autoClass: PipelineReviewItem["auto_class"] }) {
|
||||
if (autoClass === "auto_heuristic") {
|
||||
return (
|
||||
<span
|
||||
className="inline-flex items-center gap-1 text-[10px] px-1.5 py-0.5 rounded bg-amber-100 text-amber-800 border border-amber-200"
|
||||
title="Analyzer is confident — one-click auto-approve from the Review column"
|
||||
>
|
||||
⚡ Auto-approve
|
||||
</span>
|
||||
);
|
||||
}
|
||||
if (autoClass === "manual") {
|
||||
return (
|
||||
<span className="inline-flex items-center gap-1 text-[10px] px-1.5 py-0.5 rounded bg-gray-100 text-gray-700 border border-gray-200">
|
||||
✋ Needs decision
|
||||
</span>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
interface PipelineCardProps {
|
||||
item: PipelineCardItem;
|
||||
jellyfinUrl: string;
|
||||
@@ -71,7 +92,12 @@ export function PipelineCard({
|
||||
? `S${String(item.season_number).padStart(2, "0")}E${String(item.episode_number).padStart(2, "0")} — ${item.name}`
|
||||
: item.name;
|
||||
|
||||
const confidenceColor = item.confidence === "high" ? "bg-green-50 border-green-200" : "bg-amber-50 border-amber-200";
|
||||
const cardColor =
|
||||
item.auto_class === "auto_heuristic"
|
||||
? "bg-amber-50 border-amber-200"
|
||||
: item.auto_class === "manual"
|
||||
? "bg-gray-50 border-gray-200"
|
||||
: "bg-white border-gray-200";
|
||||
|
||||
const jellyfinLink =
|
||||
jellyfinUrl && item.jellyfin_id ? `${jellyfinUrl}/web/index.html#!/details?id=${item.jellyfin_id}` : null;
|
||||
@@ -80,11 +106,59 @@ export function PipelineCard({
|
||||
// media_item rows (no plan) in which case we fall back to item.id.
|
||||
const mediaItemId: number = item.item_id ?? (item as { id: number }).id;
|
||||
|
||||
const hasActionRow = !!(onSkip || onApprove || onUnapprove || onApproveUpToHere);
|
||||
const hasTranscodeReasons = !!item.transcode_reasons && item.transcode_reasons.length > 0;
|
||||
const hasInfoRow = hasTranscodeReasons || item.job_type === "copy" || !!item.auto_class;
|
||||
|
||||
return (
|
||||
<div className={`group rounded-lg border p-3 ${confidenceColor}`}>
|
||||
<div className="flex items-start justify-between gap-2">
|
||||
<div className="min-w-0">
|
||||
<div className="flex items-center gap-1 min-w-0">
|
||||
<div className={`group rounded-lg border p-3 ${cardColor}`}>
|
||||
{/* Action row — anchored at the top so buttons stay in the same
|
||||
place regardless of card body height. */}
|
||||
{hasActionRow && (
|
||||
<div className="flex items-center gap-1">
|
||||
{onSkip && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={onSkip}
|
||||
className="text-xs px-2 py-1 rounded border border-gray-300 text-gray-700 hover:bg-gray-100"
|
||||
>
|
||||
Skip
|
||||
</button>
|
||||
)}
|
||||
<div className="flex-1" />
|
||||
{onApproveUpToHere && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={onApproveUpToHere}
|
||||
title="Approve every card listed above this one"
|
||||
className="text-xs px-2 py-1 rounded border border-blue-600 text-blue-700 bg-white hover:bg-blue-50 opacity-0 group-hover:opacity-100 transition-opacity"
|
||||
>
|
||||
↑ Approve above
|
||||
</button>
|
||||
)}
|
||||
{onApprove && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={onApprove}
|
||||
className="text-xs px-3 py-1 rounded bg-blue-600 text-white hover:bg-blue-700"
|
||||
>
|
||||
Approve
|
||||
</button>
|
||||
)}
|
||||
{onUnapprove && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={onUnapprove}
|
||||
className="text-xs px-3 py-1 rounded border border-gray-300 bg-white text-gray-700 hover:bg-gray-100 opacity-0 group-hover:opacity-100 transition-opacity"
|
||||
>
|
||||
← Back to review
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Title row */}
|
||||
<div className={`flex items-center gap-1 min-w-0 ${hasActionRow ? "mt-2" : ""}`}>
|
||||
<Link
|
||||
to="/review/audio/$id"
|
||||
params={{ id: String(mediaItemId) }}
|
||||
@@ -105,15 +179,22 @@ export function PipelineCard({
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex items-center gap-1.5 mt-1 flex-wrap">
|
||||
{item.transcode_reasons && item.transcode_reasons.length > 0
|
||||
? item.transcode_reasons.map((r) => (
|
||||
|
||||
{/* Info row: file info (transcode / copy) on the left, system status on the right. */}
|
||||
{hasInfoRow && (
|
||||
<div className="flex items-center justify-between gap-2 mt-1">
|
||||
<div className="flex items-center gap-1.5 flex-wrap min-w-0">
|
||||
{hasTranscodeReasons
|
||||
? item.transcode_reasons!.map((r) => (
|
||||
<Badge key={r} variant="manual">
|
||||
{r}
|
||||
</Badge>
|
||||
))
|
||||
: item.job_type === "copy" && <Badge variant="noop">copy</Badge>}
|
||||
</div>
|
||||
<AutoClassBadge autoClass={item.auto_class ?? null} />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Audio streams: checkboxes over the actual tracks on this file,
|
||||
pre-checked per analyzer decisions. The track whose language
|
||||
@@ -158,48 +239,5 @@ export function PipelineCard({
|
||||
</ul>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-1 mt-2">
|
||||
{onSkip && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={onSkip}
|
||||
className="text-xs px-2 py-1 rounded border border-gray-300 text-gray-700 hover:bg-gray-100"
|
||||
>
|
||||
Skip
|
||||
</button>
|
||||
)}
|
||||
<div className="flex-1" />
|
||||
{onApproveUpToHere && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={onApproveUpToHere}
|
||||
title="Approve every card listed above this one"
|
||||
className="text-xs px-2 py-1 rounded border border-blue-600 text-blue-700 bg-white hover:bg-blue-50 opacity-0 group-hover:opacity-100 transition-opacity"
|
||||
>
|
||||
↑ Approve above
|
||||
</button>
|
||||
)}
|
||||
{onApprove && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={onApprove}
|
||||
className="text-xs px-3 py-1 rounded bg-blue-600 text-white hover:bg-blue-700"
|
||||
>
|
||||
Approve
|
||||
</button>
|
||||
)}
|
||||
{onUnapprove && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={onUnapprove}
|
||||
className="text-xs px-3 py-1 rounded border border-gray-300 bg-white text-gray-700 hover:bg-gray-100 opacity-0 group-hover:opacity-100 transition-opacity"
|
||||
>
|
||||
← Back to review
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { api } from "~/shared/lib/api";
|
||||
import type { PipelineData } from "~/shared/lib/types";
|
||||
import type { PipelineData, ReviewGroupsResponse } from "~/shared/lib/types";
|
||||
import { DoneColumn } from "./DoneColumn";
|
||||
import { InboxColumn } from "./InboxColumn";
|
||||
import { ProcessingColumn } from "./ProcessingColumn";
|
||||
import { QueueColumn } from "./QueueColumn";
|
||||
import { ReviewColumn } from "./ReviewColumn";
|
||||
@@ -20,43 +21,53 @@ interface QueueStatus {
|
||||
|
||||
export function PipelinePage() {
|
||||
const [data, setData] = useState<PipelineData | null>(null);
|
||||
const [inboxInitial, setInboxInitial] = useState<ReviewGroupsResponse | null>(null);
|
||||
const [reviewInitial, setReviewInitial] = useState<ReviewGroupsResponse | null>(null);
|
||||
const [progress, setProgress] = useState<Progress | null>(null);
|
||||
const [queueStatus, setQueueStatus] = useState<QueueStatus | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
||||
const load = useCallback(async () => {
|
||||
const pipelineRes = await api.get<PipelineData>("/api/review/pipeline");
|
||||
setData(pipelineRes);
|
||||
setLoading(false);
|
||||
const loadPipeline = useCallback(async () => {
|
||||
const res = await api.get<PipelineData>("/api/review/pipeline");
|
||||
setData(res);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
load();
|
||||
}, [load]);
|
||||
const loadGroups = useCallback(async () => {
|
||||
const [inbox, review] = await Promise.all([
|
||||
api.get<ReviewGroupsResponse>("/api/review/groups?bucket=inbox&offset=0&limit=25"),
|
||||
api.get<ReviewGroupsResponse>("/api/review/groups?bucket=review&offset=0&limit=25"),
|
||||
]);
|
||||
setInboxInitial(inbox);
|
||||
setReviewInitial(review);
|
||||
}, []);
|
||||
|
||||
const loadAll = useCallback(async () => {
|
||||
await Promise.all([loadPipeline(), loadGroups()]);
|
||||
setLoading(false);
|
||||
}, [loadPipeline, loadGroups]);
|
||||
|
||||
useEffect(() => {
|
||||
loadAll();
|
||||
}, [loadAll]);
|
||||
|
||||
// SSE for live updates. job_update fires on every status change and per-line
|
||||
// stdout flush of the running job — without coalescing, the pipeline endpoint
|
||||
// (a 500-row review query + counts) would re-run several times per second.
|
||||
const reloadTimer = useRef<ReturnType<typeof setTimeout> | null>(null);
|
||||
useEffect(() => {
|
||||
const scheduleReload = () => {
|
||||
const schedulePipelineReload = () => {
|
||||
if (reloadTimer.current) return;
|
||||
reloadTimer.current = setTimeout(() => {
|
||||
reloadTimer.current = null;
|
||||
load();
|
||||
loadPipeline();
|
||||
}, 1000);
|
||||
};
|
||||
const es = new EventSource("/api/execute/events");
|
||||
es.addEventListener("job_update", (e) => {
|
||||
// When a job leaves 'running' (done / error / cancelled), drop any
|
||||
// stale progress so the bar doesn't linger on the next job's card.
|
||||
try {
|
||||
const upd = JSON.parse((e as MessageEvent).data) as { id: number; status: string };
|
||||
if (upd.status !== "running") setProgress(null);
|
||||
} catch {
|
||||
/* ignore malformed events */
|
||||
}
|
||||
scheduleReload();
|
||||
schedulePipelineReload();
|
||||
});
|
||||
es.addEventListener("job_progress", (e) => {
|
||||
setProgress(JSON.parse((e as MessageEvent).data));
|
||||
@@ -64,25 +75,48 @@ export function PipelinePage() {
|
||||
es.addEventListener("queue_status", (e) => {
|
||||
setQueueStatus(JSON.parse((e as MessageEvent).data));
|
||||
});
|
||||
es.addEventListener("inbox_sorted", () => {
|
||||
loadAll();
|
||||
});
|
||||
return () => {
|
||||
es.close();
|
||||
if (reloadTimer.current) clearTimeout(reloadTimer.current);
|
||||
};
|
||||
}, [load]);
|
||||
}, [loadPipeline, loadAll]);
|
||||
|
||||
if (loading || !data) return <div className="p-6 text-gray-500">Loading pipeline...</div>;
|
||||
const toggleAutoProcessing = async (enabled: boolean) => {
|
||||
await api.post<{ ok: boolean }>("/api/settings/auto-processing", { enabled });
|
||||
loadAll();
|
||||
};
|
||||
|
||||
if (loading || !data || !inboxInitial || !reviewInitial)
|
||||
return <div className="p-6 text-gray-500">Loading pipeline...</div>;
|
||||
|
||||
return (
|
||||
<div className="flex flex-col -mx-3 sm:-mx-5 -mt-4 -mb-12 h-[calc(100vh-3rem)] overflow-hidden">
|
||||
<div className="flex items-center justify-between px-6 py-3 border-b shrink-0">
|
||||
<div className="flex items-center px-6 py-3 border-b shrink-0">
|
||||
<h1 className="text-lg font-semibold">Pipeline</h1>
|
||||
<span className="text-sm text-gray-500">{data.doneCount} files in desired state</span>
|
||||
</div>
|
||||
<div className="flex flex-1 gap-4 p-4 overflow-x-auto overflow-y-hidden min-h-0">
|
||||
<ReviewColumn items={data.review} total={data.reviewTotal} jellyfinUrl={data.jellyfinUrl} onMutate={load} />
|
||||
<QueueColumn items={data.queued} jellyfinUrl={data.jellyfinUrl} onMutate={load} />
|
||||
<ProcessingColumn items={data.processing} progress={progress} queueStatus={queueStatus} onMutate={load} />
|
||||
<DoneColumn items={data.done} onMutate={load} />
|
||||
<InboxColumn
|
||||
initialResponse={inboxInitial}
|
||||
totalItems={data.inboxTotal}
|
||||
autoProcessing={data.autoProcessing}
|
||||
onToggleAutoProcessing={toggleAutoProcessing}
|
||||
jellyfinUrl={data.jellyfinUrl}
|
||||
onMutate={loadAll}
|
||||
/>
|
||||
<ReviewColumn
|
||||
initialResponse={reviewInitial}
|
||||
totalItems={data.reviewItemsTotal}
|
||||
readyCount={data.reviewReadyCount}
|
||||
manualCount={data.reviewManualCount}
|
||||
jellyfinUrl={data.jellyfinUrl}
|
||||
onMutate={loadAll}
|
||||
/>
|
||||
<QueueColumn items={data.queued} jellyfinUrl={data.jellyfinUrl} onMutate={loadAll} />
|
||||
<ProcessingColumn items={data.processing} progress={progress} queueStatus={queueStatus} onMutate={loadAll} />
|
||||
<DoneColumn items={data.done} doneCount={data.doneCount} onMutate={loadAll} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -23,6 +23,25 @@ export function ProcessingColumn({ items, progress, queueStatus, onMutate }: Pro
|
||||
return () => clearInterval(t);
|
||||
}, [job]);
|
||||
|
||||
// Local sleep countdown. Server emits the sleep duration once when the
|
||||
// pause begins; the client anchors "deadline = receivedAt + seconds*1000"
|
||||
// and ticks a 1s timer so the UI shows a live countdown, not a static number.
|
||||
const [sleepDeadline, setSleepDeadline] = useState<number | null>(null);
|
||||
const [sleepNow, setSleepNow] = useState(() => Date.now());
|
||||
useEffect(() => {
|
||||
if (queueStatus?.status === "sleeping" && typeof queueStatus.seconds === "number") {
|
||||
setSleepDeadline(Date.now() + queueStatus.seconds * 1000);
|
||||
} else {
|
||||
setSleepDeadline(null);
|
||||
}
|
||||
}, [queueStatus?.status, queueStatus?.seconds]);
|
||||
useEffect(() => {
|
||||
if (sleepDeadline == null) return;
|
||||
const t = setInterval(() => setSleepNow(Date.now()), 1000);
|
||||
return () => clearInterval(t);
|
||||
}, [sleepDeadline]);
|
||||
const sleepRemaining = sleepDeadline != null ? Math.max(0, Math.ceil((sleepDeadline - sleepNow) / 1000)) : null;
|
||||
|
||||
// Only trust progress if it belongs to the current job — stale events from
|
||||
// a previous job would otherwise show wrong numbers until the new job emits.
|
||||
const liveProgress = job && progress && progress.id === job.id ? progress : null;
|
||||
@@ -52,12 +71,12 @@ export function ProcessingColumn({ items, progress, queueStatus, onMutate }: Pro
|
||||
<ColumnShell
|
||||
title="Processing"
|
||||
count={job ? 1 : 0}
|
||||
actions={job ? [{ label: "Stop", onClick: stop, danger: true }] : undefined}
|
||||
backward={job ? { label: "Stop", onClick: stop, danger: true, title: "Stop the running job" } : undefined}
|
||||
>
|
||||
{queueStatus && queueStatus.status !== "running" && (
|
||||
<div className="mb-2 text-xs text-gray-500 bg-white rounded border p-2">
|
||||
<div className="mb-2 text-xs text-gray-500 bg-white rounded border p-2 tabular-nums">
|
||||
{queueStatus.status === "paused" && <>Paused until {queueStatus.until}</>}
|
||||
{queueStatus.status === "sleeping" && <>Sleeping {queueStatus.seconds}s between jobs</>}
|
||||
{queueStatus.status === "sleeping" && <>Next job in {sleepRemaining ?? queueStatus.seconds ?? 0}s</>}
|
||||
{queueStatus.status === "idle" && <>Idle</>}
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -14,8 +14,8 @@ export function QueueColumn({ items, jellyfinUrl, onMutate }: QueueColumnProps)
|
||||
await api.post("/api/execute/start");
|
||||
onMutate();
|
||||
};
|
||||
const clear = async () => {
|
||||
if (!confirm(`Cancel all ${items.length} pending jobs?`)) return;
|
||||
const backToInbox = async () => {
|
||||
if (!confirm(`Cancel all ${items.length} pending jobs and send them back to the Inbox?`)) return;
|
||||
await api.post("/api/execute/clear");
|
||||
onMutate();
|
||||
};
|
||||
@@ -24,16 +24,18 @@ export function QueueColumn({ items, jellyfinUrl, onMutate }: QueueColumnProps)
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const actions =
|
||||
const backward =
|
||||
items.length > 0
|
||||
? [
|
||||
{ label: "Run all", onClick: runAll, primary: true },
|
||||
{ label: "Clear", onClick: clear },
|
||||
]
|
||||
? {
|
||||
label: "← Back to inbox",
|
||||
onClick: backToInbox,
|
||||
title: "Cancel every pending job and send its plan back to the Inbox",
|
||||
}
|
||||
: undefined;
|
||||
const forward = items.length > 0 ? { label: "Run all →", onClick: runAll, primary: true } : undefined;
|
||||
|
||||
return (
|
||||
<ColumnShell title="Queued" count={items.length} actions={actions}>
|
||||
<ColumnShell title="Queued" count={items.length} backward={backward} forward={forward}>
|
||||
<div className="space-y-2">
|
||||
{items.map((item) => (
|
||||
<PipelineCard key={item.id} item={item} jellyfinUrl={jellyfinUrl} onUnapprove={() => unapprove(item.item_id)} />
|
||||
|
||||
@@ -1,36 +1,84 @@
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { api } from "~/shared/lib/api";
|
||||
import type { PipelineReviewItem } from "~/shared/lib/types";
|
||||
import type { ReviewGroup, ReviewGroupsResponse } from "~/shared/lib/types";
|
||||
import { ColumnShell } from "./ColumnShell";
|
||||
import { PipelineCard } from "./PipelineCard";
|
||||
import { SeriesCard } from "./SeriesCard";
|
||||
|
||||
const PAGE_SIZE = 25;
|
||||
|
||||
interface ReviewColumnProps {
|
||||
items: PipelineReviewItem[];
|
||||
total: number;
|
||||
initialResponse: ReviewGroupsResponse;
|
||||
totalItems: number;
|
||||
readyCount: number;
|
||||
manualCount: number;
|
||||
jellyfinUrl: string;
|
||||
onMutate: () => void;
|
||||
}
|
||||
|
||||
interface SeriesGroup {
|
||||
name: string;
|
||||
key: string;
|
||||
jellyfinId: string | null;
|
||||
episodes: PipelineReviewItem[];
|
||||
}
|
||||
export function ReviewColumn({
|
||||
initialResponse,
|
||||
totalItems,
|
||||
readyCount,
|
||||
manualCount,
|
||||
jellyfinUrl,
|
||||
onMutate,
|
||||
}: ReviewColumnProps) {
|
||||
const [groups, setGroups] = useState<ReviewGroup[]>(initialResponse.groups);
|
||||
const [hasMore, setHasMore] = useState(initialResponse.hasMore);
|
||||
const [loadingMore, setLoadingMore] = useState(false);
|
||||
const sentinelRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
export function ReviewColumn({ items, total, jellyfinUrl, onMutate }: ReviewColumnProps) {
|
||||
const truncated = total > items.length;
|
||||
useEffect(() => {
|
||||
setGroups(initialResponse.groups);
|
||||
setHasMore(initialResponse.hasMore);
|
||||
}, [initialResponse]);
|
||||
|
||||
const loadMore = useCallback(async () => {
|
||||
if (loadingMore || !hasMore) return;
|
||||
setLoadingMore(true);
|
||||
try {
|
||||
const res = await api.get<ReviewGroupsResponse>(
|
||||
`/api/review/groups?bucket=review&offset=${groups.length}&limit=${PAGE_SIZE}`,
|
||||
);
|
||||
setGroups((prev) => [...prev, ...res.groups]);
|
||||
setHasMore(res.hasMore);
|
||||
} finally {
|
||||
setLoadingMore(false);
|
||||
}
|
||||
}, [groups.length, hasMore, loadingMore]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!hasMore || !sentinelRef.current) return;
|
||||
const observer = new IntersectionObserver(
|
||||
(entries) => {
|
||||
if (entries[0]?.isIntersecting) loadMore();
|
||||
},
|
||||
{ rootMargin: "200px" },
|
||||
);
|
||||
observer.observe(sentinelRef.current);
|
||||
return () => observer.disconnect();
|
||||
}, [hasMore, loadMore]);
|
||||
|
||||
const skipAll = async () => {
|
||||
if (!confirm(`Skip all ${total} pending items? They won't be processed unless you unskip them.`)) return;
|
||||
if (!confirm(`Skip all ${totalItems} pending items? They won't be processed unless you unskip them.`)) return;
|
||||
await api.post("/api/review/skip-all");
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const autoApprove = async () => {
|
||||
const res = await api.post<{ ok: boolean; count: number }>("/api/review/auto-approve");
|
||||
const backToInbox = async () => {
|
||||
if (
|
||||
!confirm(`Move all ${totalItems} review items back to the Inbox? You'll need to run Auto Review to re-sort them.`)
|
||||
)
|
||||
return;
|
||||
await api.post("/api/review/unsort-all");
|
||||
onMutate();
|
||||
if (res.count === 0) alert("No high-confidence items to auto-approve.");
|
||||
};
|
||||
|
||||
const approveAllReady = async () => {
|
||||
const res = await api.post<{ ok: boolean; count: number }>("/api/review/approve-ready");
|
||||
onMutate();
|
||||
if (res.count === 0) alert("No auto-approvable items found.");
|
||||
};
|
||||
|
||||
const approveItem = async (itemId: number) => {
|
||||
@@ -47,89 +95,71 @@ export function ReviewColumn({ items, total, jellyfinUrl, onMutate }: ReviewColu
|
||||
onMutate();
|
||||
};
|
||||
|
||||
// Group by series (movies are standalone)
|
||||
const movies = items.filter((i) => i.type === "Movie");
|
||||
const seriesMap = new Map<string, SeriesGroup>();
|
||||
|
||||
for (const item of items.filter((i) => i.type === "Episode")) {
|
||||
const key = item.series_jellyfin_id ?? item.series_name ?? String(item.item_id);
|
||||
if (!seriesMap.has(key)) {
|
||||
seriesMap.set(key, { name: item.series_name ?? "", key, jellyfinId: item.series_jellyfin_id, episodes: [] });
|
||||
}
|
||||
seriesMap.get(key)!.episodes.push(item);
|
||||
}
|
||||
|
||||
// Interleave movies and series, sorted by confidence (high first)
|
||||
const allItems = [
|
||||
...movies.map((m) => ({ type: "movie" as const, item: m, sortKey: m.confidence === "high" ? 0 : 1 })),
|
||||
...[...seriesMap.values()].map((s) => ({
|
||||
type: "series" as const,
|
||||
item: s,
|
||||
sortKey: s.episodes.every((e) => e.confidence === "high") ? 0 : 1,
|
||||
})),
|
||||
].sort((a, b) => a.sortKey - b.sortKey);
|
||||
|
||||
// Flatten each visible entry to its list of item_ids. "Approve up to here"
|
||||
// on index i approves everything in the union of idsByEntry[0..i-1] — one
|
||||
// id for a movie, N ids for a series (one per episode).
|
||||
const idsByEntry: number[][] = allItems.map((entry) =>
|
||||
entry.type === "movie" ? [entry.item.item_id] : entry.item.episodes.map((e) => e.item_id),
|
||||
const idsByGroup: number[][] = groups.map((g) =>
|
||||
g.kind === "movie" ? [g.item.item_id] : g.seasons.flatMap((s) => s.episodes.map((ep) => ep.item_id)),
|
||||
);
|
||||
const priorIds = (index: number): number[] => idsByEntry.slice(0, index).flat();
|
||||
const priorIds = (index: number): number[] => idsByGroup.slice(0, index).flat();
|
||||
|
||||
const backward =
|
||||
totalItems > 0
|
||||
? { label: "← Back to inbox", onClick: backToInbox, title: "Move everything back to the Inbox" }
|
||||
: undefined;
|
||||
const skip = totalItems > 0 ? { label: "Skip all", onClick: skipAll } : undefined;
|
||||
const forward =
|
||||
readyCount > 0
|
||||
? {
|
||||
label: "Approve auto →",
|
||||
onClick: approveAllReady,
|
||||
primary: true,
|
||||
title: "Approve every auto-approvable item (no manual decision needed)",
|
||||
}
|
||||
: undefined;
|
||||
|
||||
const subtitle = totalItems === 0 ? undefined : `${readyCount} auto · ${manualCount} need decisions`;
|
||||
|
||||
return (
|
||||
<ColumnShell
|
||||
title="Review"
|
||||
count={truncated ? `${items.length} of ${total}` : total}
|
||||
actions={
|
||||
total > 0
|
||||
? [
|
||||
{ label: "Auto Review", onClick: autoApprove, primary: true },
|
||||
{ label: "Skip all", onClick: skipAll },
|
||||
]
|
||||
: undefined
|
||||
}
|
||||
>
|
||||
<ColumnShell title="Review" count={totalItems} subtitle={subtitle} backward={backward} skip={skip} forward={forward}>
|
||||
<div className="space-y-2">
|
||||
{allItems.map((entry, index) => {
|
||||
// The button approves everything visually above this card. First
|
||||
// card has nothing before it → undefined suppresses the affordance.
|
||||
{groups.map((group, index) => {
|
||||
const prior = index > 0 ? priorIds(index) : null;
|
||||
const onApproveUpToHere = prior && prior.length > 0 ? () => approveBatch(prior) : undefined;
|
||||
if (entry.type === "movie") {
|
||||
if (group.kind === "movie") {
|
||||
return (
|
||||
<PipelineCard
|
||||
key={entry.item.id}
|
||||
item={entry.item}
|
||||
key={group.item.id}
|
||||
item={group.item}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
onToggleStream={async (streamId, action) => {
|
||||
await api.patch(`/api/review/${entry.item.item_id}/stream/${streamId}`, { action });
|
||||
await api.patch(`/api/review/${group.item.item_id}/stream/${streamId}`, { action });
|
||||
onMutate();
|
||||
}}
|
||||
onApprove={() => approveItem(entry.item.item_id)}
|
||||
onSkip={() => skipItem(entry.item.item_id)}
|
||||
onApprove={() => approveItem(group.item.item_id)}
|
||||
onSkip={() => skipItem(group.item.item_id)}
|
||||
onApproveUpToHere={onApproveUpToHere}
|
||||
/>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<SeriesCard
|
||||
key={entry.item.key}
|
||||
seriesKey={entry.item.key}
|
||||
seriesName={entry.item.name}
|
||||
key={group.seriesKey}
|
||||
seriesKey={group.seriesKey}
|
||||
seriesName={group.seriesName}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
seriesJellyfinId={entry.item.jellyfinId}
|
||||
episodes={entry.item.episodes}
|
||||
seriesJellyfinId={group.seriesJellyfinId}
|
||||
seasons={group.seasons}
|
||||
episodeCount={group.episodeCount}
|
||||
readyCount={group.readyCount}
|
||||
originalLanguage={group.originalLanguage}
|
||||
onMutate={onMutate}
|
||||
onApproveUpToHere={onApproveUpToHere}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
{allItems.length === 0 && <p className="text-sm text-gray-400 text-center py-8">No items to review</p>}
|
||||
{truncated && (
|
||||
<p className="text-xs text-gray-400 text-center py-3 border-t mt-2">
|
||||
Showing first {items.length} of {total}. Approve some to see the rest.
|
||||
</p>
|
||||
{groups.length === 0 && <p className="text-sm text-gray-400 text-center py-8">No items to review</p>}
|
||||
{hasMore && (
|
||||
<div ref={sentinelRef} className="py-4 text-center text-xs text-gray-400">
|
||||
{loadingMore ? "Loading more…" : ""}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</ColumnShell>
|
||||
|
||||
@@ -9,7 +9,10 @@ interface SeriesCardProps {
|
||||
seriesName: string;
|
||||
jellyfinUrl: string;
|
||||
seriesJellyfinId: string | null;
|
||||
episodes: PipelineReviewItem[];
|
||||
seasons: Array<{ season: number | null; episodes: PipelineReviewItem[] }>;
|
||||
episodeCount: number;
|
||||
readyCount?: number;
|
||||
originalLanguage: string | null;
|
||||
onMutate: () => void;
|
||||
// Review-column affordance: approve every card visually above this
|
||||
// series in one round-trip. See ReviewColumn for the id computation.
|
||||
@@ -21,13 +24,16 @@ export function SeriesCard({
|
||||
seriesName,
|
||||
jellyfinUrl,
|
||||
seriesJellyfinId,
|
||||
episodes,
|
||||
seasons,
|
||||
episodeCount,
|
||||
readyCount = 0,
|
||||
originalLanguage,
|
||||
onMutate,
|
||||
onApproveUpToHere,
|
||||
}: SeriesCardProps) {
|
||||
const [expanded, setExpanded] = useState(false);
|
||||
|
||||
const seriesLang = episodes[0]?.original_language ?? "";
|
||||
const multipleSeasons = seasons.length > 1;
|
||||
|
||||
const setSeriesLanguage = async (lang: string) => {
|
||||
await api.patch(`/api/review/series/${encodeURIComponent(seriesKey)}/language`, { language: lang });
|
||||
@@ -39,17 +45,49 @@ export function SeriesCard({
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const highCount = episodes.filter((e) => e.confidence === "high").length;
|
||||
const lowCount = episodes.filter((e) => e.confidence === "low").length;
|
||||
const approveSeason = async (season: number | null) => {
|
||||
if (season == null) return;
|
||||
await api.post(`/api/review/season/${encodeURIComponent(seriesKey)}/${season}/approve-all`);
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const jellyfinLink =
|
||||
jellyfinUrl && seriesJellyfinId ? `${jellyfinUrl}/web/index.html#!/details?id=${seriesJellyfinId}` : null;
|
||||
|
||||
return (
|
||||
<div className="group/series rounded-lg border bg-white overflow-hidden">
|
||||
{/* Action row — anchored at the top so buttons stay in the same
|
||||
place regardless of card body height, mirroring PipelineCard. */}
|
||||
<div className="flex items-center gap-1 px-3 pt-3">
|
||||
<div className="flex-1" />
|
||||
{onApproveUpToHere && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onApproveUpToHere();
|
||||
}}
|
||||
title="Approve every card listed above this one"
|
||||
className="text-xs px-2 py-1 rounded border border-blue-600 text-blue-700 bg-white hover:bg-blue-50 cursor-pointer whitespace-nowrap shrink-0 opacity-0 group-hover/series:opacity-100 transition-opacity"
|
||||
>
|
||||
↑ Approve above
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
type="button"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
approveSeries();
|
||||
}}
|
||||
className="text-xs px-3 py-1 rounded bg-blue-600 text-white hover:bg-blue-700 cursor-pointer whitespace-nowrap shrink-0"
|
||||
>
|
||||
Approve series
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Title row */}
|
||||
<div
|
||||
className="flex items-center gap-2 px-3 pt-3 pb-1 cursor-pointer hover:bg-gray-50 rounded-t-lg"
|
||||
className="flex items-center gap-2 px-3 pt-2 pb-1 cursor-pointer hover:bg-gray-50"
|
||||
onClick={() => setExpanded(!expanded)}
|
||||
>
|
||||
<span className="text-xs text-gray-400 shrink-0">{expanded ? "▼" : "▶"}</span>
|
||||
@@ -68,15 +106,15 @@ export function SeriesCard({
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Controls row */}
|
||||
{/* Meta row: badges + language select */}
|
||||
<div className="flex items-center gap-2 px-3 pb-3 pt-1">
|
||||
<span className="text-xs text-gray-500 shrink-0">{episodes.length} eps</span>
|
||||
{highCount > 0 && <span className="text-xs text-green-600 shrink-0">{highCount} ready</span>}
|
||||
{lowCount > 0 && <span className="text-xs text-amber-600 shrink-0">{lowCount} review</span>}
|
||||
<span className="text-xs text-gray-500 shrink-0">{episodeCount} eps</span>
|
||||
{multipleSeasons && <span className="text-xs text-gray-500 shrink-0">· {seasons.length} seasons</span>}
|
||||
{readyCount > 0 && <span className="text-xs text-amber-700 shrink-0">⚡ {readyCount} auto</span>}
|
||||
<div className="flex-1" />
|
||||
<select
|
||||
className="h-6 text-xs border border-gray-300 rounded px-1 bg-white shrink-0"
|
||||
value={seriesLang}
|
||||
className="h-6 text-xs border border-gray-300 rounded px-1 bg-white shrink-0 min-w-0"
|
||||
value={originalLanguage ?? ""}
|
||||
onChange={(e) => {
|
||||
e.stopPropagation();
|
||||
setSeriesLanguage(e.target.value);
|
||||
@@ -89,34 +127,93 @@ export function SeriesCard({
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
{onApproveUpToHere && (
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onApproveUpToHere();
|
||||
}}
|
||||
title="Approve every card listed above this one"
|
||||
className="text-xs px-2 py-1 rounded border border-blue-600 text-blue-700 bg-white hover:bg-blue-50 cursor-pointer whitespace-nowrap shrink-0 opacity-0 group-hover/series:opacity-100 transition-opacity"
|
||||
>
|
||||
↑ Approve above
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
approveSeries();
|
||||
}}
|
||||
className="text-xs px-2 py-1 rounded bg-blue-600 text-white hover:bg-blue-700 cursor-pointer whitespace-nowrap shrink-0"
|
||||
>
|
||||
Approve all
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{expanded && (
|
||||
<div className="border-t px-3 pb-3 space-y-2 pt-2">
|
||||
<div className="border-t">
|
||||
{multipleSeasons
|
||||
? seasons.map((s) => (
|
||||
<SeasonGroup
|
||||
key={s.season ?? "unknown"}
|
||||
season={s.season}
|
||||
episodes={s.episodes}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
onApproveSeason={() => approveSeason(s.season)}
|
||||
onMutate={onMutate}
|
||||
/>
|
||||
))
|
||||
: (seasons[0]?.episodes ?? []).map((ep) => (
|
||||
<EpisodeRow key={ep.id} ep={ep} jellyfinUrl={jellyfinUrl} onMutate={onMutate} />
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function SeasonGroup({
|
||||
season,
|
||||
episodes,
|
||||
jellyfinUrl,
|
||||
onApproveSeason,
|
||||
onMutate,
|
||||
}: {
|
||||
season: number | null;
|
||||
episodes: PipelineReviewItem[];
|
||||
jellyfinUrl: string;
|
||||
onApproveSeason: () => void;
|
||||
onMutate: () => void;
|
||||
}) {
|
||||
const [open, setOpen] = useState(false);
|
||||
const readyCount = episodes.filter((e) => e.auto_class === "auto_heuristic").length;
|
||||
const label = season == null ? "No season" : `Season ${String(season).padStart(2, "0")}`;
|
||||
|
||||
return (
|
||||
<div className="border-t first:border-t-0">
|
||||
<div
|
||||
className="flex flex-wrap items-center gap-x-2 gap-y-1 px-3 py-2 cursor-pointer hover:bg-gray-50"
|
||||
onClick={() => setOpen(!open)}
|
||||
>
|
||||
<span className="text-xs text-gray-400 shrink-0">{open ? "▼" : "▶"}</span>
|
||||
<span className="text-xs font-medium shrink-0">{label}</span>
|
||||
<span className="text-xs text-gray-500 shrink-0">· {episodes.length} eps</span>
|
||||
{readyCount > 0 && <span className="text-xs text-amber-700 shrink-0">⚡ {readyCount} ready</span>}
|
||||
{season != null && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onApproveSeason();
|
||||
}}
|
||||
className="text-xs px-2 py-1 rounded border border-blue-600 text-blue-700 bg-white hover:bg-blue-50 cursor-pointer whitespace-nowrap shrink-0 ml-auto"
|
||||
>
|
||||
Approve season
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
{open && (
|
||||
<div className="px-3 pb-3 space-y-2 pt-2">
|
||||
{episodes.map((ep) => (
|
||||
<EpisodeRow key={ep.id} ep={ep} jellyfinUrl={jellyfinUrl} onMutate={onMutate} />
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function EpisodeRow({
|
||||
ep,
|
||||
jellyfinUrl,
|
||||
onMutate,
|
||||
}: {
|
||||
ep: PipelineReviewItem;
|
||||
jellyfinUrl: string;
|
||||
onMutate: () => void;
|
||||
}) {
|
||||
return (
|
||||
<div className="px-3 py-1">
|
||||
<PipelineCard
|
||||
key={ep.id}
|
||||
item={ep}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
onToggleStream={async (streamId, action) => {
|
||||
@@ -132,9 +229,6 @@ export function SeriesCard({
|
||||
onMutate();
|
||||
}}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
+311
-24
@@ -10,10 +10,42 @@ import { formatThousands } from "~/shared/lib/utils";
|
||||
interface ScanStatus {
|
||||
running: boolean;
|
||||
progress: { scanned: number; total: number; errors: number };
|
||||
recentItems: { name: string; type: string; scan_status: string; file_path: string }[];
|
||||
recentItems: {
|
||||
name: string;
|
||||
type: string;
|
||||
scan_status: string;
|
||||
file_path: string;
|
||||
last_scanned_at: string | null;
|
||||
ingest_source: "scan" | "webhook" | null;
|
||||
}[];
|
||||
scanLimit: number | null;
|
||||
}
|
||||
|
||||
interface ScanItemsRow {
|
||||
id: number;
|
||||
jellyfin_id: string;
|
||||
name: string;
|
||||
type: "Movie" | "Episode";
|
||||
series_name: string | null;
|
||||
season_number: number | null;
|
||||
episode_number: number | null;
|
||||
scan_status: string;
|
||||
original_language: string | null;
|
||||
orig_lang_source: string | null;
|
||||
container: string | null;
|
||||
file_size: number | null;
|
||||
file_path: string;
|
||||
last_scanned_at: string | null;
|
||||
ingest_source: "scan" | "webhook" | null;
|
||||
audio_codecs: string | null;
|
||||
}
|
||||
|
||||
interface ScanItemsResponse {
|
||||
rows: ScanItemsRow[];
|
||||
total: number;
|
||||
hasMore: boolean;
|
||||
}
|
||||
|
||||
interface DashboardStats {
|
||||
totalItems: number;
|
||||
scanned: number;
|
||||
@@ -47,6 +79,22 @@ interface LogEntry {
|
||||
file?: string;
|
||||
}
|
||||
|
||||
interface RecentIngestRow {
|
||||
name: string;
|
||||
type: string;
|
||||
status: string;
|
||||
file: string;
|
||||
scannedAt: string | null;
|
||||
source: "scan" | "webhook" | null;
|
||||
}
|
||||
|
||||
interface ItemFilters {
|
||||
q: string;
|
||||
status: "all" | "pending" | "scanned" | "error";
|
||||
type: "all" | "movie" | "episode";
|
||||
source: "all" | "scan" | "webhook";
|
||||
}
|
||||
|
||||
// Mutable buffer for SSE data — flushed to React state on an interval
|
||||
interface SseBuf {
|
||||
scanned: number;
|
||||
@@ -65,19 +113,54 @@ function freshBuf(): SseBuf {
|
||||
|
||||
const FLUSH_MS = 200;
|
||||
|
||||
function statusBadgeVariant(status: string): "pending" | "done" | "error" | "default" {
|
||||
if (status === "pending") return "pending";
|
||||
if (status === "done" || status === "scanned") return "done";
|
||||
if (status === "error") return "error";
|
||||
return "default";
|
||||
}
|
||||
|
||||
function formatScannedAt(ts: string | null): string {
|
||||
if (!ts) return "—";
|
||||
const d = new Date(ts.includes("T") ? ts : `${ts}Z`);
|
||||
if (Number.isNaN(d.getTime())) return ts;
|
||||
return d.toLocaleString([], { year: "numeric", month: "2-digit", day: "2-digit", hour: "2-digit", minute: "2-digit" });
|
||||
}
|
||||
|
||||
function formatFileSize(bytes: number | null): string {
|
||||
if (!bytes || bytes <= 0) return "—";
|
||||
if (bytes < 1000) return `${bytes} B`;
|
||||
if (bytes < 1000 ** 2) return `${(bytes / 1000).toFixed(1)} kB`;
|
||||
if (bytes < 1000 ** 3) return `${(bytes / 1000 ** 2).toFixed(1)} MB`;
|
||||
return `${(bytes / 1000 ** 3).toFixed(1)} GB`;
|
||||
}
|
||||
|
||||
function episodeLabel(row: ScanItemsRow): string {
|
||||
if (row.type !== "Episode") return "—";
|
||||
const season = row.season_number ?? 0;
|
||||
const episode = row.episode_number ?? 0;
|
||||
return `S${String(season).padStart(2, "0")}E${String(episode).padStart(2, "0")}`;
|
||||
}
|
||||
|
||||
export function ScanPage() {
|
||||
const navigate = useNavigate();
|
||||
const [status, setStatus] = useState<ScanStatus | null>(null);
|
||||
const [stats, setStats] = useState<DashboardStats | null>(null);
|
||||
const [configChecked, setConfigChecked] = useState(false);
|
||||
const [limit, setLimit] = useState("");
|
||||
const [log, setLog] = useState<LogEntry[]>([]);
|
||||
const [recentIngest, setRecentIngest] = useState<RecentIngestRow[]>([]);
|
||||
const [statusLabel, setStatusLabel] = useState("");
|
||||
const [scanComplete, setScanComplete] = useState(false);
|
||||
const [currentItem, setCurrentItem] = useState("");
|
||||
const [progressScanned, setProgressScanned] = useState(0);
|
||||
const [progressTotal, setProgressTotal] = useState(0);
|
||||
const [errors, setErrors] = useState(0);
|
||||
const [filters, setFilters] = useState<ItemFilters>({ q: "", status: "all", type: "all", source: "all" });
|
||||
const [itemsRows, setItemsRows] = useState<ScanItemsRow[]>([]);
|
||||
const [itemsOffset, setItemsOffset] = useState(0);
|
||||
const [itemsHasMore, setItemsHasMore] = useState(false);
|
||||
const [itemsTotal, setItemsTotal] = useState(0);
|
||||
const [itemsLoading, setItemsLoading] = useState(false);
|
||||
const esRef = useRef<EventSource | null>(null);
|
||||
const bufRef = useRef<SseBuf>(freshBuf());
|
||||
const timerRef = useRef<ReturnType<typeof setInterval> | null>(null);
|
||||
@@ -122,7 +205,19 @@ export function ScanPage() {
|
||||
setCurrentItem(b.currentItem);
|
||||
if (b.newLogs.length > 0) {
|
||||
const batch = b.newLogs.splice(0);
|
||||
setLog((prev) => [...batch.reverse(), ...prev].slice(0, 100));
|
||||
setRecentIngest((prev) =>
|
||||
[
|
||||
...batch.map((item) => ({
|
||||
name: item.name,
|
||||
type: item.type,
|
||||
status: item.status,
|
||||
file: item.file ?? item.name,
|
||||
scannedAt: new Date().toISOString(),
|
||||
source: "scan" as const,
|
||||
})),
|
||||
...prev,
|
||||
].slice(0, 5),
|
||||
);
|
||||
}
|
||||
b.dirty = false;
|
||||
}
|
||||
@@ -172,13 +267,55 @@ export function ScanPage() {
|
||||
setErrors(s.progress.errors);
|
||||
setStatusLabel(s.running ? "Scan in progress…" : "Scan idle");
|
||||
if (s.scanLimit != null) setLimit(String(s.scanLimit));
|
||||
setLog(s.recentItems.map((i) => ({ name: i.name, type: i.type, status: i.scan_status, file: i.file_path })));
|
||||
setRecentIngest(
|
||||
s.recentItems.map((i) => ({
|
||||
name: i.name,
|
||||
type: i.type,
|
||||
status: i.scan_status,
|
||||
file: i.file_path,
|
||||
scannedAt: i.last_scanned_at,
|
||||
source: i.ingest_source,
|
||||
})),
|
||||
);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
load();
|
||||
}, [load]);
|
||||
|
||||
const fetchItems = useCallback(
|
||||
async (offset: number, append: boolean) => {
|
||||
setItemsLoading(true);
|
||||
try {
|
||||
const qs = new URLSearchParams({
|
||||
offset: String(offset),
|
||||
limit: "50",
|
||||
q: filters.q,
|
||||
status: filters.status,
|
||||
type: filters.type,
|
||||
source: filters.source,
|
||||
});
|
||||
const res = await api.get<ScanItemsResponse>(`/api/scan/items?${qs.toString()}`);
|
||||
setItemsRows((prev) => (append ? [...prev, ...res.rows] : res.rows));
|
||||
setItemsOffset(offset + res.rows.length);
|
||||
setItemsHasMore(res.hasMore);
|
||||
setItemsTotal(res.total);
|
||||
} finally {
|
||||
setItemsLoading(false);
|
||||
}
|
||||
},
|
||||
[filters],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
fetchItems(0, false);
|
||||
}, [fetchItems]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!scanComplete) return;
|
||||
fetchItems(0, false);
|
||||
}, [scanComplete, fetchItems]);
|
||||
|
||||
const connectSse = useCallback(() => {
|
||||
esRef.current?.close();
|
||||
const buf = bufRef.current;
|
||||
@@ -229,7 +366,7 @@ export function ScanPage() {
|
||||
}, [status?.running, connectSse, stopFlushing]);
|
||||
|
||||
const startScan = async () => {
|
||||
setLog([]);
|
||||
setRecentIngest([]);
|
||||
setProgressScanned(0);
|
||||
setProgressTotal(0);
|
||||
setErrors(0);
|
||||
@@ -261,7 +398,7 @@ export function ScanPage() {
|
||||
return (
|
||||
<div>
|
||||
<div className="flex items-center justify-between mb-4">
|
||||
<h1 className="text-xl font-bold m-0">Scan</h1>
|
||||
<h1 className="text-xl font-bold m-0">Library</h1>
|
||||
<MqttBadge />
|
||||
</div>
|
||||
|
||||
@@ -284,13 +421,17 @@ export function ScanPage() {
|
||||
)}
|
||||
|
||||
<div className="border border-gray-200 rounded-lg px-4 py-3 mb-6">
|
||||
<div className="flex items-center flex-wrap gap-2 mb-3">
|
||||
<div className="flex items-start justify-between gap-3 mb-3">
|
||||
<div className="space-y-2 min-w-0">
|
||||
<div className="flex items-center flex-wrap gap-2">
|
||||
<span className="text-sm font-medium">{statusLabel || (running ? "Scan in progress…" : "Scan idle")}</span>
|
||||
{scanComplete && (
|
||||
<Link to="/pipeline" className="text-blue-600 hover:underline text-sm">
|
||||
Review in Pipeline →
|
||||
</Link>
|
||||
)}
|
||||
{errors > 0 && <Badge variant="error">{errors} error(s)</Badge>}
|
||||
</div>
|
||||
{running ? (
|
||||
<Button variant="secondary" size="sm" onClick={stopScan}>
|
||||
Stop
|
||||
@@ -314,7 +455,14 @@ export function ScanPage() {
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
{errors > 0 && <Badge variant="error">{errors} error(s)</Badge>}
|
||||
</div>
|
||||
<div className="text-right shrink-0">
|
||||
<div className="text-sm font-semibold text-gray-700">
|
||||
{formatThousands(progressScanned)}
|
||||
{progressTotal > 0 ? ` / ${formatThousands(progressTotal)}` : ""}
|
||||
</div>
|
||||
<div className="text-[0.7rem] text-gray-500">scanned</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{(running || progressScanned > 0) && (
|
||||
@@ -325,25 +473,20 @@ export function ScanPage() {
|
||||
</div>
|
||||
)}
|
||||
<div className="flex items-center gap-2 text-gray-500 text-xs">
|
||||
<span>
|
||||
{progressScanned}
|
||||
{progressTotal > 0 ? ` / ${progressTotal}` : ""} scanned
|
||||
</span>
|
||||
{currentItem && <span className="truncate max-w-xs text-gray-400">{currentItem}</span>}
|
||||
{currentItem && <span className="truncate max-w-2xl text-gray-400">{currentItem}</span>}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Log */}
|
||||
<h3 className="font-semibold text-sm mb-2">Recent items</h3>
|
||||
<table className="w-full border-collapse text-[0.82rem]">
|
||||
<div className="mt-3">
|
||||
<h3 className="font-semibold text-sm mb-2">Recent ingest (5)</h3>
|
||||
<table className="w-full border-collapse text-[0.78rem]">
|
||||
<thead>
|
||||
<tr>
|
||||
{["Type", "File", "Status"].map((h) => (
|
||||
{["Time", "Source", "Type", "File", "Status"].map((h) => (
|
||||
<th
|
||||
key={h}
|
||||
className="text-left text-[0.68rem] font-bold uppercase tracking-[0.06em] text-gray-500 py-1 px-2 border-b-2 border-gray-200 whitespace-nowrap"
|
||||
className="text-left text-[0.66rem] font-bold uppercase tracking-[0.05em] text-gray-500 py-1 px-2 border-b border-gray-200 whitespace-nowrap"
|
||||
>
|
||||
{h}
|
||||
</th>
|
||||
@@ -351,16 +494,27 @@ export function ScanPage() {
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{log.map((item, i) => {
|
||||
const fileName = item.file ? (item.file.split("/").pop() ?? item.name) : item.name;
|
||||
{recentIngest.length === 0 && (
|
||||
<tr>
|
||||
<td colSpan={5} className="py-2 px-2 text-gray-400">
|
||||
No ingested items yet.
|
||||
</td>
|
||||
</tr>
|
||||
)}
|
||||
{recentIngest.map((item, i) => {
|
||||
const fileName = item.file.split("/").pop() ?? item.name;
|
||||
return (
|
||||
<tr key={i} className="hover:bg-gray-50">
|
||||
<tr key={`${item.file}-${i}`} className="hover:bg-gray-50">
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 whitespace-nowrap">{formatScannedAt(item.scannedAt)}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<Badge variant="default">{item.source ?? "scan"}</Badge>
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">{item.type}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100" title={item.file ?? item.name}>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 truncate max-w-96" title={item.file}>
|
||||
{fileName}
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<Badge variant={item.status as "error" | "done" | "pending"}>{item.status}</Badge>
|
||||
<Badge variant={statusBadgeVariant(item.status)}>{item.status}</Badge>
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
@@ -368,5 +522,138 @@ export function ScanPage() {
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="mb-2 flex items-end justify-between gap-3">
|
||||
<h3 className="font-semibold text-sm">Library items</h3>
|
||||
<span className="text-xs text-gray-500">{formatThousands(itemsTotal)} total</span>
|
||||
</div>
|
||||
|
||||
<div className="border border-gray-200 rounded-lg p-3 mb-3 flex flex-wrap items-end gap-2">
|
||||
<label className="text-xs text-gray-600 flex flex-col gap-1">
|
||||
Search
|
||||
<input
|
||||
type="text"
|
||||
value={filters.q}
|
||||
onChange={(e) => setFilters((prev) => ({ ...prev, q: e.target.value }))}
|
||||
placeholder="Name or path"
|
||||
className="border border-gray-300 rounded px-2 py-1 text-xs w-56"
|
||||
/>
|
||||
</label>
|
||||
<label className="text-xs text-gray-600 flex flex-col gap-1">
|
||||
Status
|
||||
<select
|
||||
value={filters.status}
|
||||
onChange={(e) => setFilters((prev) => ({ ...prev, status: e.target.value as ItemFilters["status"] }))}
|
||||
className="border border-gray-300 rounded px-2 py-1 text-xs"
|
||||
>
|
||||
<option value="all">All</option>
|
||||
<option value="scanned">Scanned</option>
|
||||
<option value="pending">Pending</option>
|
||||
<option value="error">Error</option>
|
||||
</select>
|
||||
</label>
|
||||
<label className="text-xs text-gray-600 flex flex-col gap-1">
|
||||
Type
|
||||
<select
|
||||
value={filters.type}
|
||||
onChange={(e) => setFilters((prev) => ({ ...prev, type: e.target.value as ItemFilters["type"] }))}
|
||||
className="border border-gray-300 rounded px-2 py-1 text-xs"
|
||||
>
|
||||
<option value="all">All</option>
|
||||
<option value="movie">Movie</option>
|
||||
<option value="episode">Episode</option>
|
||||
</select>
|
||||
</label>
|
||||
<label className="text-xs text-gray-600 flex flex-col gap-1">
|
||||
Source
|
||||
<select
|
||||
value={filters.source}
|
||||
onChange={(e) => setFilters((prev) => ({ ...prev, source: e.target.value as ItemFilters["source"] }))}
|
||||
className="border border-gray-300 rounded px-2 py-1 text-xs"
|
||||
>
|
||||
<option value="all">All</option>
|
||||
<option value="scan">Scan</option>
|
||||
<option value="webhook">Webhook</option>
|
||||
</select>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<table className="w-full border-collapse text-[0.8rem]">
|
||||
<thead>
|
||||
<tr>
|
||||
{[
|
||||
"Scanned",
|
||||
"Name",
|
||||
"Type",
|
||||
"Series / Ep",
|
||||
"Language",
|
||||
"Audio",
|
||||
"Container",
|
||||
"Size",
|
||||
"Source",
|
||||
"Status",
|
||||
"Path",
|
||||
].map((h) => (
|
||||
<th
|
||||
key={h}
|
||||
className="text-left text-[0.66rem] font-bold uppercase tracking-[0.05em] text-gray-500 py-1 px-2 border-b border-gray-200 whitespace-nowrap"
|
||||
>
|
||||
{h}
|
||||
</th>
|
||||
))}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{itemsRows.length === 0 && !itemsLoading && (
|
||||
<tr>
|
||||
<td colSpan={11} className="py-3 px-2 text-gray-400">
|
||||
No items match the current filters.
|
||||
</td>
|
||||
</tr>
|
||||
)}
|
||||
{itemsRows.map((row) => (
|
||||
<tr key={row.id} className="hover:bg-gray-50">
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 whitespace-nowrap">
|
||||
{formatScannedAt(row.last_scanned_at)}
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">{row.name}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">{row.type}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<div>{row.series_name ?? "—"}</div>
|
||||
<div className="text-[0.68rem] text-gray-500">{episodeLabel(row)}</div>
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<div>{row.original_language ?? "—"}</div>
|
||||
<div className="text-[0.68rem] text-gray-500">{row.orig_lang_source ?? "—"}</div>
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 font-mono text-[0.72rem]">
|
||||
{row.audio_codecs ? row.audio_codecs.split(",").join(" · ") : "—"}
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">{row.container ?? "—"}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 whitespace-nowrap">{formatFileSize(row.file_size)}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<Badge variant="default">{row.ingest_source ?? "scan"}</Badge>
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<Badge variant={statusBadgeVariant(row.scan_status)}>{row.scan_status}</Badge>
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 truncate max-w-xs" title={row.file_path}>
|
||||
{row.file_path}
|
||||
</td>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<div className="mt-3 flex items-center gap-2">
|
||||
{itemsHasMore && (
|
||||
<Button size="sm" variant="secondary" onClick={() => fetchItems(itemsOffset, true)} disabled={itemsLoading}>
|
||||
{itemsLoading ? "Loading…" : "Load more"}
|
||||
</Button>
|
||||
)}
|
||||
{itemsLoading && !itemsHasMore && <span className="text-xs text-gray-500">Loading…</span>}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -60,6 +60,49 @@ function LockedInput({ locked, ...props }: { locked: boolean } & React.InputHTML
|
||||
|
||||
// ─── Secret input (password-masked with eye-icon reveal) ──────────────────────
|
||||
|
||||
function EyeIcon({ open }: { open: boolean }) {
|
||||
// GNOME-style eye / crossed-eye glyphs as inline SVG so they inherit
|
||||
// currentColor instead of fighting emoji rendering across OSes.
|
||||
if (open) {
|
||||
return (
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width="16"
|
||||
height="16"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
aria-hidden="true"
|
||||
>
|
||||
<path d="M17.94 17.94A10.07 10.07 0 0 1 12 20c-7 0-11-8-11-8a18.45 18.45 0 0 1 5.06-5.94" />
|
||||
<path d="M9.9 4.24A9.12 9.12 0 0 1 12 4c7 0 11 8 11 8a18.5 18.5 0 0 1-2.16 3.19" />
|
||||
<path d="M14.12 14.12a3 3 0 1 1-4.24-4.24" />
|
||||
<line x1="1" y1="1" x2="23" y2="23" />
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width="16"
|
||||
height="16"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
aria-hidden="true"
|
||||
>
|
||||
<path d="M1 12s4-8 11-8 11 8 11 8-4 8-11 8-11-8-11-8z" />
|
||||
<circle cx="12" cy="12" r="3" />
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Input for API keys / passwords. Shows "***" masked when the server returns
|
||||
* a secret value (the raw key never reaches this component by default). Eye
|
||||
@@ -101,31 +144,33 @@ function SecretInput({
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="relative">
|
||||
<div className={`relative ${className ?? ""}`}>
|
||||
<Input
|
||||
type={revealed || !isMasked ? "text" : "password"}
|
||||
type={revealed ? "text" : "password"}
|
||||
value={value}
|
||||
disabled={locked}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
placeholder={placeholder}
|
||||
className={`pr-16 ${className ?? ""}`}
|
||||
className="pr-9"
|
||||
/>
|
||||
<button
|
||||
type="button"
|
||||
onClick={toggle}
|
||||
disabled={locked}
|
||||
className="absolute right-2 top-1/2 -translate-y-1/2 text-[0.9rem] opacity-60 hover:opacity-100 disabled:opacity-30"
|
||||
title={revealed ? "Hide" : "Reveal"}
|
||||
>
|
||||
{revealed ? "🙈" : "👁"}
|
||||
</button>
|
||||
{locked && (
|
||||
{locked ? (
|
||||
<span
|
||||
className="absolute right-9 top-1/2 -translate-y-1/2 text-[0.9rem] opacity-40 pointer-events-none select-none"
|
||||
className="absolute inset-y-0 right-0 flex items-center pr-2.5 text-[0.9rem] opacity-40 pointer-events-none select-none"
|
||||
title="Set via environment variable — edit your .env file to change this value"
|
||||
>
|
||||
🔒
|
||||
</span>
|
||||
) : (
|
||||
<button
|
||||
type="button"
|
||||
onClick={toggle}
|
||||
tabIndex={-1}
|
||||
className="absolute inset-y-0 right-0 flex items-center px-2.5 text-gray-400 hover:text-gray-700 focus:outline-none focus-visible:text-gray-700"
|
||||
title={revealed ? "Hide" : "Reveal"}
|
||||
aria-label={revealed ? "Hide secret" : "Reveal secret"}
|
||||
>
|
||||
<EyeIcon open={revealed} />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
@@ -300,7 +345,7 @@ function ConnSection({
|
||||
value={url}
|
||||
onChange={(e) => setUrl(e.target.value)}
|
||||
placeholder={urlPlaceholder}
|
||||
className="mt-0.5 max-w-sm"
|
||||
className="mt-0.5"
|
||||
/>
|
||||
</label>
|
||||
<label className="block text-sm text-gray-700 mb-1 mt-3">
|
||||
@@ -311,7 +356,7 @@ function ConnSection({
|
||||
value={key}
|
||||
onChange={setKey}
|
||||
placeholder="your-api-key"
|
||||
className="mt-0.5 max-w-xs"
|
||||
className="mt-0.5"
|
||||
/>
|
||||
</label>
|
||||
<div className="flex items-center gap-2 mt-3">
|
||||
@@ -416,6 +461,7 @@ export function SettingsPage() {
|
||||
const [clearStatus, setClearStatus] = useState("");
|
||||
const [audLangs, setAudLangs] = useState<string[]>([]);
|
||||
const [audSaved, setAudSaved] = useState("");
|
||||
const [autoProcessing, setAutoProcessing] = useState(false);
|
||||
const langsLoadedRef = useRef(false);
|
||||
|
||||
const load = useCallback(() => {
|
||||
@@ -425,6 +471,7 @@ export function SettingsPage() {
|
||||
.then((d) => {
|
||||
settingsCache = d;
|
||||
setData(d);
|
||||
setAutoProcessing(d.config.auto_processing === "1");
|
||||
if (!langsLoadedRef.current) {
|
||||
let parsed: string[] = [];
|
||||
try {
|
||||
@@ -540,6 +587,22 @@ export function SettingsPage() {
|
||||
{/* Schedule */}
|
||||
<ScheduleSection />
|
||||
|
||||
{/* Auto-processing */}
|
||||
<SectionCard title="Processing" subtitle="Control how new library items are handled after a scan.">
|
||||
<label className="flex items-center gap-2 text-sm text-gray-700">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={autoProcessing}
|
||||
onChange={async (e) => {
|
||||
const next = e.target.checked;
|
||||
setAutoProcessing(next);
|
||||
await api.post("/api/settings/auto-processing", { enabled: next });
|
||||
}}
|
||||
/>
|
||||
Auto-process new items — distribute Inbox immediately after scan
|
||||
</label>
|
||||
</SectionCard>
|
||||
|
||||
{/* Audio languages */}
|
||||
<SectionCard
|
||||
title="Audio Languages"
|
||||
|
||||
@@ -65,7 +65,7 @@ function RootLayout() {
|
||||
<VersionBadge />
|
||||
<div className="flex flex-wrap items-center gap-0.5">
|
||||
<NavLink to="/" exact>
|
||||
Scan
|
||||
Library
|
||||
</NavLink>
|
||||
<NavLink to="/pipeline">Pipeline</NavLink>
|
||||
<NavLink to="/review/subtitles">Subtitles</NavLink>
|
||||
|
||||
+31
-5
@@ -46,7 +46,8 @@ export interface ReviewPlan {
|
||||
item_id: number;
|
||||
status: string;
|
||||
is_noop: number;
|
||||
confidence: "high" | "low";
|
||||
auto_class: "auto" | "auto_heuristic" | "manual" | null;
|
||||
sorted: number;
|
||||
apple_compat: "direct_play" | "remux" | "audio_transcode" | null;
|
||||
job_type: "copy" | "transcode";
|
||||
subs_extracted: number;
|
||||
@@ -99,7 +100,7 @@ export interface PipelineReviewItem {
|
||||
item_id: number;
|
||||
status: string;
|
||||
is_noop: number;
|
||||
confidence: "high" | "low";
|
||||
auto_class: "auto" | "auto_heuristic" | "manual" | null;
|
||||
apple_compat: ReviewPlan["apple_compat"];
|
||||
job_type: "copy" | "transcode";
|
||||
// media_item fields
|
||||
@@ -153,18 +154,43 @@ export interface PipelineJobItem {
|
||||
original_language?: string | null;
|
||||
orig_lang_source?: string | null;
|
||||
file_path?: string;
|
||||
confidence?: "high" | "low";
|
||||
auto_class?: "auto" | "auto_heuristic" | "manual" | null;
|
||||
is_noop?: number;
|
||||
transcode_reasons?: string[];
|
||||
audio_streams?: PipelineAudioStream[];
|
||||
}
|
||||
|
||||
export interface PipelineData {
|
||||
review: PipelineReviewItem[];
|
||||
reviewTotal: number;
|
||||
inboxTotal: number;
|
||||
reviewItemsTotal: number;
|
||||
reviewReadyCount: number;
|
||||
reviewManualCount: number;
|
||||
autoProcessing: boolean;
|
||||
queued: PipelineJobItem[];
|
||||
processing: PipelineJobItem[];
|
||||
done: PipelineJobItem[];
|
||||
doneCount: number;
|
||||
jellyfinUrl: string;
|
||||
}
|
||||
|
||||
// ─── Review groups (GET /api/review/groups) ──────────────────────────────────
|
||||
|
||||
export type ReviewGroup =
|
||||
| { kind: "movie"; item: PipelineReviewItem }
|
||||
| {
|
||||
kind: "series";
|
||||
seriesKey: string;
|
||||
seriesName: string;
|
||||
seriesJellyfinId: string | null;
|
||||
episodeCount: number;
|
||||
readyCount: number;
|
||||
originalLanguage: string | null;
|
||||
seasons: Array<{ season: number | null; episodes: PipelineReviewItem[] }>;
|
||||
};
|
||||
|
||||
export interface ReviewGroupsResponse {
|
||||
groups: ReviewGroup[];
|
||||
totalGroups: number;
|
||||
totalItems: number;
|
||||
hasMore: boolean;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user