Compare commits
15 Commits
e49a04c576
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 0c595a787e | |||
| 7d30e6c1a6 | |||
| a2bdecd298 | |||
| c6698db51a | |||
| 604fdc5c6c | |||
| c22642630d | |||
| ab65909e6e | |||
| 07c98f36f0 | |||
| 4e96382097 | |||
| 3f910873eb | |||
| 3f848c0d31 | |||
| 967d2f56ad | |||
| 45f4175929 | |||
| e040c9a234 | |||
| b0d06a1d8c |
857
docs/superpowers/plans/2026-04-15-review-lazy-load.md
Normal file
857
docs/superpowers/plans/2026-04-15-review-lazy-load.md
Normal file
@@ -0,0 +1,857 @@
|
||||
# Review column lazy-load + season grouping — Implementation Plan
|
||||
|
||||
> **For agentic workers:** Use superpowers:subagent-driven-development. Checkbox (`- [ ]`) syntax tracks progress.
|
||||
|
||||
**Goal:** Replace the 500-item review cap with group-paginated infinite scroll; nest season sub-groups inside series when they have pending work across >1 season; wire the existing `/season/:key/:season/approve-all` endpoint into the UI.
|
||||
|
||||
**Architecture:** Move the grouping logic from the client to the server so groups are always returned complete. New `GET /api/review/groups?offset=N&limit=25` endpoint. Client's ReviewColumn becomes a stateful list that extends itself via `IntersectionObserver` on a sentinel.
|
||||
|
||||
**Tech Stack:** Bun + Hono (server), React 19 + TanStack Router (client), bun:sqlite.
|
||||
|
||||
---
|
||||
|
||||
## Task 1: Server — build grouped data structure + new endpoint
|
||||
|
||||
**Files:**
|
||||
- Modify: `server/api/review.ts`
|
||||
|
||||
- [ ] **Step 1: Add shared types + builder**
|
||||
|
||||
At the top of `server/api/review.ts` (near the other type definitions), add exported types:
|
||||
|
||||
```ts
|
||||
export type ReviewGroup =
|
||||
| { kind: "movie"; item: PipelineReviewItem }
|
||||
| {
|
||||
kind: "series";
|
||||
seriesKey: string;
|
||||
seriesName: string;
|
||||
seriesJellyfinId: string | null;
|
||||
episodeCount: number;
|
||||
minConfidence: "high" | "low";
|
||||
originalLanguage: string | null;
|
||||
seasons: Array<{ season: number | null; episodes: PipelineReviewItem[] }>;
|
||||
};
|
||||
|
||||
export interface ReviewGroupsResponse {
|
||||
groups: ReviewGroup[];
|
||||
totalGroups: number;
|
||||
totalItems: number;
|
||||
hasMore: boolean;
|
||||
}
|
||||
```
|
||||
|
||||
Add a helper after the existing `enrichWithStreamsAndReasons` helper:
|
||||
|
||||
```ts
|
||||
function buildReviewGroups(db: ReturnType<typeof getDb>): {
|
||||
groups: ReviewGroup[];
|
||||
totalItems: number;
|
||||
} {
|
||||
// Fetch ALL pending non-noop items. Grouping + pagination happen in memory.
|
||||
const rows = db
|
||||
.prepare(`
|
||||
SELECT rp.*, mi.name, mi.series_name, mi.series_jellyfin_id,
|
||||
mi.jellyfin_id,
|
||||
mi.season_number, mi.episode_number, mi.type, mi.container,
|
||||
mi.original_language, mi.orig_lang_source, mi.file_path
|
||||
FROM review_plans rp
|
||||
JOIN media_items mi ON mi.id = rp.item_id
|
||||
WHERE rp.status = 'pending' AND rp.is_noop = 0
|
||||
ORDER BY
|
||||
CASE rp.confidence WHEN 'high' THEN 0 ELSE 1 END,
|
||||
COALESCE(mi.series_name, mi.name),
|
||||
mi.season_number, mi.episode_number
|
||||
`)
|
||||
.all() as PipelineReviewItem[];
|
||||
|
||||
const movies: PipelineReviewItem[] = [];
|
||||
const seriesMap = new Map<
|
||||
string,
|
||||
{
|
||||
seriesName: string;
|
||||
seriesJellyfinId: string | null;
|
||||
seasons: Map<number | null, PipelineReviewItem[]>;
|
||||
originalLanguage: string | null;
|
||||
minConfidence: "high" | "low";
|
||||
firstName: string;
|
||||
}
|
||||
>();
|
||||
|
||||
for (const row of rows) {
|
||||
if (row.type === "Movie") {
|
||||
movies.push(row);
|
||||
continue;
|
||||
}
|
||||
const key = row.series_jellyfin_id ?? row.series_name ?? String(row.item_id);
|
||||
let entry = seriesMap.get(key);
|
||||
if (!entry) {
|
||||
entry = {
|
||||
seriesName: row.series_name ?? "",
|
||||
seriesJellyfinId: row.series_jellyfin_id,
|
||||
seasons: new Map(),
|
||||
originalLanguage: row.original_language,
|
||||
minConfidence: row.confidence,
|
||||
firstName: row.series_name ?? "",
|
||||
};
|
||||
seriesMap.set(key, entry);
|
||||
}
|
||||
const season = row.season_number;
|
||||
let bucket = entry.seasons.get(season);
|
||||
if (!bucket) {
|
||||
bucket = [];
|
||||
entry.seasons.set(season, bucket);
|
||||
}
|
||||
bucket.push(row);
|
||||
if (row.confidence === "high" && entry.minConfidence === "low") {
|
||||
// Keep minConfidence as the "best" confidence across episodes — if any
|
||||
// episode is high, that's the group's dominant confidence for sort.
|
||||
// Actually we want the LOWEST (low wins) so user sees low-confidence
|
||||
// groups sorted after high-confidence ones. Revisit: keep low if present.
|
||||
}
|
||||
if (row.confidence === "low") entry.minConfidence = "low";
|
||||
}
|
||||
|
||||
// Sort season keys within each series (nulls last), episodes by episode_number.
|
||||
const seriesGroups: ReviewGroup[] = [];
|
||||
for (const [seriesKey, entry] of seriesMap) {
|
||||
const seasonKeys = [...entry.seasons.keys()].sort((a, b) => {
|
||||
if (a === null) return 1;
|
||||
if (b === null) return -1;
|
||||
return a - b;
|
||||
});
|
||||
const seasons = seasonKeys.map((season) => ({
|
||||
season,
|
||||
episodes: (entry.seasons.get(season) ?? []).sort(
|
||||
(a, b) => (a.episode_number ?? 0) - (b.episode_number ?? 0),
|
||||
),
|
||||
}));
|
||||
const episodeCount = seasons.reduce((sum, s) => sum + s.episodes.length, 0);
|
||||
seriesGroups.push({
|
||||
kind: "series",
|
||||
seriesKey,
|
||||
seriesName: entry.seriesName,
|
||||
seriesJellyfinId: entry.seriesJellyfinId,
|
||||
episodeCount,
|
||||
minConfidence: entry.minConfidence,
|
||||
originalLanguage: entry.originalLanguage,
|
||||
seasons,
|
||||
});
|
||||
}
|
||||
|
||||
// Interleave movies + series, sort by (minConfidence, name).
|
||||
const movieGroups: ReviewGroup[] = movies.map((m) => ({ kind: "movie" as const, item: m }));
|
||||
const allGroups = [...movieGroups, ...seriesGroups].sort((a, b) => {
|
||||
const confA = a.kind === "movie" ? a.item.confidence : a.minConfidence;
|
||||
const confB = b.kind === "movie" ? b.item.confidence : b.minConfidence;
|
||||
const rankA = confA === "high" ? 0 : 1;
|
||||
const rankB = confB === "high" ? 0 : 1;
|
||||
if (rankA !== rankB) return rankA - rankB;
|
||||
const nameA = a.kind === "movie" ? a.item.name : a.seriesName;
|
||||
const nameB = b.kind === "movie" ? b.item.name : b.seriesName;
|
||||
return nameA.localeCompare(nameB);
|
||||
});
|
||||
|
||||
const totalItems = movieGroups.length + seriesGroups.reduce((sum, g) => sum + (g as { episodeCount: number }).episodeCount, 0);
|
||||
return { groups: allGroups, totalItems };
|
||||
}
|
||||
```
|
||||
|
||||
(Delete the stray comment block inside the loop about "keep minConfidence as the best" — the actual logic below it is correct. I left a TODO-style note while drafting; clean it up when editing.)
|
||||
|
||||
- [ ] **Step 2: Add the `/groups` endpoint**
|
||||
|
||||
Add before `app.get("/pipeline", …)`:
|
||||
|
||||
```ts
|
||||
app.get("/groups", (c) => {
|
||||
const db = getDb();
|
||||
const offset = Math.max(0, Number.parseInt(c.req.query("offset") ?? "0", 10) || 0);
|
||||
const limit = Math.max(1, Math.min(200, Number.parseInt(c.req.query("limit") ?? "25", 10) || 25));
|
||||
|
||||
const { groups, totalItems } = buildReviewGroups(db);
|
||||
const page = groups.slice(offset, offset + limit);
|
||||
|
||||
// Enrich each visible episode/movie with audio streams + transcode reasons
|
||||
// (same shape the existing UI expects — reuse the helper already in this file).
|
||||
const flatItemsForEnrichment: Array<{ id: number; plan_id?: number; item_id: number; transcode_reasons?: string[]; audio_streams?: PipelineAudioStream[] }> = [];
|
||||
for (const g of page) {
|
||||
if (g.kind === "movie") flatItemsForEnrichment.push(g.item as never);
|
||||
else for (const s of g.seasons) for (const ep of s.episodes) flatItemsForEnrichment.push(ep as never);
|
||||
}
|
||||
enrichWithStreamsAndReasons(flatItemsForEnrichment);
|
||||
|
||||
return c.json<ReviewGroupsResponse>({
|
||||
groups: page,
|
||||
totalGroups: groups.length,
|
||||
totalItems,
|
||||
hasMore: offset + limit < groups.length,
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
`PipelineAudioStream` already imported; if not, add to existing import block.
|
||||
|
||||
- [ ] **Step 3: Modify `/pipeline` to drop `review`/`reviewTotal`**
|
||||
|
||||
In the existing `app.get("/pipeline", …)` handler (around line 270):
|
||||
|
||||
- Delete the `review` SELECT (lines ~278–293) and the enrichment of `review` rows.
|
||||
- Delete the `reviewTotal` count query (lines ~294–296).
|
||||
- Add in its place: `const reviewItemsTotal = (db.prepare("SELECT COUNT(*) as n FROM review_plans WHERE status = 'pending' AND is_noop = 0").get() as { n: number }).n;`
|
||||
- In the final `return c.json({...})` (line ~430), replace `review, reviewTotal` with `reviewItemsTotal`.
|
||||
|
||||
- [ ] **Step 4: Run tests + lint + tsc**
|
||||
|
||||
```
|
||||
mise exec bun -- bun test
|
||||
mise exec bun -- bun run lint
|
||||
mise exec bun -- bunx tsc --noEmit --project tsconfig.server.json
|
||||
```
|
||||
|
||||
All must pass. If tests that hit `/pipeline` fail because they expect `review[]`, update them in the same commit (they need to migrate anyway).
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add server/api/review.ts
|
||||
git commit -m "review: add /groups endpoint with server-side grouping + pagination"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 2: Server — test `/groups` endpoint
|
||||
|
||||
**Files:**
|
||||
- Create: `server/api/__tests__/review-groups.test.ts`
|
||||
|
||||
- [ ] **Step 1: Write the test file**
|
||||
|
||||
```ts
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { Hono } from "hono";
|
||||
import reviewRoutes from "../review";
|
||||
import { setupTestDb, seedItem, seedPlan } from "./test-helpers"; // adjust to the project's test helpers; see existing webhook.test.ts for how tests wire up a DB
|
||||
|
||||
const app = new Hono();
|
||||
app.route("/api/review", reviewRoutes);
|
||||
|
||||
describe("GET /api/review/groups", () => {
|
||||
test("returns complete series even when total items exceed limit", async () => {
|
||||
const db = setupTestDb();
|
||||
// Seed 1 series with 30 episodes, all pending non-noop
|
||||
for (let i = 1; i <= 30; i++) seedItem(db, { type: "Episode", seriesName: "Breaking Bad", seasonNumber: 1, episodeNumber: i });
|
||||
for (const row of db.prepare("SELECT id FROM media_items").all() as { id: number }[]) seedPlan(db, row.id, { pending: true, isNoop: false });
|
||||
|
||||
const res = await app.request("/api/review/groups?offset=0&limit=25");
|
||||
const body = await res.json();
|
||||
|
||||
expect(body.groups).toHaveLength(1);
|
||||
expect(body.groups[0].kind).toBe("series");
|
||||
expect(body.groups[0].episodeCount).toBe(30);
|
||||
expect(body.groups[0].seasons[0].episodes).toHaveLength(30);
|
||||
expect(body.totalItems).toBe(30);
|
||||
expect(body.hasMore).toBe(false);
|
||||
});
|
||||
|
||||
test("paginates groups with hasMore=true", async () => {
|
||||
const db = setupTestDb();
|
||||
for (let i = 1; i <= 50; i++) seedItem(db, { type: "Movie", name: `Movie ${String(i).padStart(2, "0")}` });
|
||||
for (const row of db.prepare("SELECT id FROM media_items").all() as { id: number }[]) seedPlan(db, row.id, { pending: true, isNoop: false });
|
||||
|
||||
const page1 = await (await app.request("/api/review/groups?offset=0&limit=25")).json();
|
||||
const page2 = await (await app.request("/api/review/groups?offset=25&limit=25")).json();
|
||||
|
||||
expect(page1.groups).toHaveLength(25);
|
||||
expect(page1.hasMore).toBe(true);
|
||||
expect(page2.groups).toHaveLength(25);
|
||||
expect(page2.hasMore).toBe(false);
|
||||
const ids1 = page1.groups.map((g: { item: { item_id: number } }) => g.item.item_id);
|
||||
const ids2 = page2.groups.map((g: { item: { item_id: number } }) => g.item.item_id);
|
||||
expect(ids1.filter((id: number) => ids2.includes(id))).toHaveLength(0);
|
||||
});
|
||||
|
||||
test("buckets episodes by season, nulls last", async () => {
|
||||
const db = setupTestDb();
|
||||
for (let ep = 1; ep <= 3; ep++) seedItem(db, { type: "Episode", seriesName: "Lost", seasonNumber: 1, episodeNumber: ep });
|
||||
for (let ep = 1; ep <= 2; ep++) seedItem(db, { type: "Episode", seriesName: "Lost", seasonNumber: 2, episodeNumber: ep });
|
||||
seedItem(db, { type: "Episode", seriesName: "Lost", seasonNumber: null, episodeNumber: null });
|
||||
for (const row of db.prepare("SELECT id FROM media_items").all() as { id: number }[]) seedPlan(db, row.id, { pending: true, isNoop: false });
|
||||
|
||||
const body = await (await app.request("/api/review/groups?offset=0&limit=25")).json();
|
||||
const lost = body.groups[0];
|
||||
expect(lost.kind).toBe("series");
|
||||
expect(lost.seasons.map((s: { season: number | null }) => s.season)).toEqual([1, 2, null]);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
Important: this test file needs the project's actual test-helpers pattern. Before writing, look at `server/services/__tests__/webhook.test.ts` (the 60-line one that's still in the repo after the verified-flag block was removed) and **copy its setup style** — including how it creates a test DB, how it seeds media_items and review_plans, and how it invokes the Hono app. Replace the placeholder `setupTestDb`, `seedItem`, `seedPlan` calls with whatever the real helpers are.
|
||||
|
||||
- [ ] **Step 2: Run the tests**
|
||||
|
||||
```
|
||||
mise exec bun -- bun test server/api/__tests__/review-groups.test.ts
|
||||
```
|
||||
|
||||
Expected: 3 passes.
|
||||
|
||||
- [ ] **Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git add server/api/__tests__/review-groups.test.ts
|
||||
git commit -m "test: /groups endpoint — series completeness, pagination, season buckets"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 3: Client types + PipelinePage
|
||||
|
||||
**Files:**
|
||||
- Modify: `src/shared/lib/types.ts`
|
||||
- Modify: `src/features/pipeline/PipelinePage.tsx`
|
||||
|
||||
- [ ] **Step 1: Update shared types**
|
||||
|
||||
In `src/shared/lib/types.ts`, replace the `PipelineData` interface's `review` and `reviewTotal` fields with `reviewItemsTotal: number`. Add types for the new groups response:
|
||||
|
||||
```ts
|
||||
export type ReviewGroup =
|
||||
| { kind: "movie"; item: PipelineReviewItem }
|
||||
| {
|
||||
kind: "series";
|
||||
seriesKey: string;
|
||||
seriesName: string;
|
||||
seriesJellyfinId: string | null;
|
||||
episodeCount: number;
|
||||
minConfidence: "high" | "low";
|
||||
originalLanguage: string | null;
|
||||
seasons: Array<{ season: number | null; episodes: PipelineReviewItem[] }>;
|
||||
};
|
||||
|
||||
export interface ReviewGroupsResponse {
|
||||
groups: ReviewGroup[];
|
||||
totalGroups: number;
|
||||
totalItems: number;
|
||||
hasMore: boolean;
|
||||
}
|
||||
```
|
||||
|
||||
The `PipelineData` interface becomes:
|
||||
```ts
|
||||
export interface PipelineData {
|
||||
reviewItemsTotal: number;
|
||||
queued: PipelineJobItem[];
|
||||
processing: PipelineJobItem[];
|
||||
done: PipelineJobItem[];
|
||||
doneCount: number;
|
||||
jellyfinUrl: string;
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Update PipelinePage**
|
||||
|
||||
Change `PipelinePage.tsx`:
|
||||
|
||||
- Add state for the initial groups page: `const [initialGroups, setInitialGroups] = useState<ReviewGroupsResponse | null>(null);`
|
||||
- In `load()`, fetch both in parallel:
|
||||
```ts
|
||||
const [pipelineRes, groupsRes] = await Promise.all([
|
||||
api.get<PipelineData>("/api/review/pipeline"),
|
||||
api.get<ReviewGroupsResponse>("/api/review/groups?offset=0&limit=25"),
|
||||
]);
|
||||
setData(pipelineRes);
|
||||
setInitialGroups(groupsRes);
|
||||
```
|
||||
- Wait for both before rendering (loading gate: `if (loading || !data || !initialGroups) return <Loading />`).
|
||||
- Pass to ReviewColumn: `<ReviewColumn initialResponse={initialGroups} totalItems={data.reviewItemsTotal} jellyfinUrl={data.jellyfinUrl} onMutate={load} />` — drop `items` and `total` props.
|
||||
|
||||
- [ ] **Step 3: Tsc + lint**
|
||||
|
||||
```
|
||||
mise exec bun -- bunx tsc --noEmit
|
||||
mise exec bun -- bun run lint
|
||||
```
|
||||
|
||||
Expected: errors in `ReviewColumn.tsx` because its props type hasn't been updated yet — that's fine, Task 4 fixes it. For this step, only verify that types.ts and PipelinePage.tsx themselves compile internally. If the build breaks because of ReviewColumn, commit these two files anyway and proceed to Task 4 immediately.
|
||||
|
||||
- [ ] **Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add src/shared/lib/types.ts src/features/pipeline/PipelinePage.tsx
|
||||
git commit -m "pipeline: fetch review groups endpoint in parallel with pipeline"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 4: Client — ReviewColumn with infinite scroll
|
||||
|
||||
**Files:**
|
||||
- Modify: `src/features/pipeline/ReviewColumn.tsx`
|
||||
|
||||
- [ ] **Step 1: Rewrite ReviewColumn**
|
||||
|
||||
Replace the file contents with:
|
||||
|
||||
```tsx
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { api } from "~/shared/lib/api";
|
||||
import type { ReviewGroup, ReviewGroupsResponse } from "~/shared/lib/types";
|
||||
import { ColumnShell } from "./ColumnShell";
|
||||
import { PipelineCard } from "./PipelineCard";
|
||||
import { SeriesCard } from "./SeriesCard";
|
||||
|
||||
const PAGE_SIZE = 25;
|
||||
|
||||
interface ReviewColumnProps {
|
||||
initialResponse: ReviewGroupsResponse;
|
||||
totalItems: number;
|
||||
jellyfinUrl: string;
|
||||
onMutate: () => void;
|
||||
}
|
||||
|
||||
export function ReviewColumn({ initialResponse, totalItems, jellyfinUrl, onMutate }: ReviewColumnProps) {
|
||||
const [groups, setGroups] = useState<ReviewGroup[]>(initialResponse.groups);
|
||||
const [hasMore, setHasMore] = useState(initialResponse.hasMore);
|
||||
const [loadingMore, setLoadingMore] = useState(false);
|
||||
const sentinelRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
// Reset when parent passes a new initial page (onMutate refetch)
|
||||
useEffect(() => {
|
||||
setGroups(initialResponse.groups);
|
||||
setHasMore(initialResponse.hasMore);
|
||||
}, [initialResponse]);
|
||||
|
||||
const loadMore = useCallback(async () => {
|
||||
if (loadingMore || !hasMore) return;
|
||||
setLoadingMore(true);
|
||||
try {
|
||||
const res = await api.get<ReviewGroupsResponse>(`/api/review/groups?offset=${groups.length}&limit=${PAGE_SIZE}`);
|
||||
setGroups((prev) => [...prev, ...res.groups]);
|
||||
setHasMore(res.hasMore);
|
||||
} finally {
|
||||
setLoadingMore(false);
|
||||
}
|
||||
}, [groups.length, hasMore, loadingMore]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!hasMore || !sentinelRef.current) return;
|
||||
const observer = new IntersectionObserver(
|
||||
(entries) => {
|
||||
if (entries[0]?.isIntersecting) loadMore();
|
||||
},
|
||||
{ rootMargin: "200px" },
|
||||
);
|
||||
observer.observe(sentinelRef.current);
|
||||
return () => observer.disconnect();
|
||||
}, [hasMore, loadMore]);
|
||||
|
||||
const skipAll = async () => {
|
||||
if (!confirm(`Skip all ${totalItems} pending items? They won't be processed unless you unskip them.`)) return;
|
||||
await api.post("/api/review/skip-all");
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const autoApprove = async () => {
|
||||
const res = await api.post<{ ok: boolean; count: number }>("/api/review/auto-approve");
|
||||
onMutate();
|
||||
if (res.count === 0) alert("No high-confidence items to auto-approve.");
|
||||
};
|
||||
|
||||
const approveItem = async (itemId: number) => {
|
||||
await api.post(`/api/review/${itemId}/approve`);
|
||||
onMutate();
|
||||
};
|
||||
const skipItem = async (itemId: number) => {
|
||||
await api.post(`/api/review/${itemId}/skip`);
|
||||
onMutate();
|
||||
};
|
||||
const approveBatch = async (itemIds: number[]) => {
|
||||
if (itemIds.length === 0) return;
|
||||
await api.post<{ ok: boolean; count: number }>("/api/review/approve-batch", { itemIds });
|
||||
onMutate();
|
||||
};
|
||||
|
||||
// Compute ids per visible group for "Approve above"
|
||||
const idsByGroup: number[][] = groups.map((g) =>
|
||||
g.kind === "movie" ? [g.item.item_id] : g.seasons.flatMap((s) => s.episodes.map((ep) => ep.item_id)),
|
||||
);
|
||||
const priorIds = (index: number): number[] => idsByGroup.slice(0, index).flat();
|
||||
|
||||
const actions =
|
||||
totalItems > 0
|
||||
? [
|
||||
{ label: "Auto Review", onClick: autoApprove, primary: true },
|
||||
{ label: "Skip all", onClick: skipAll },
|
||||
]
|
||||
: undefined;
|
||||
|
||||
return (
|
||||
<ColumnShell title="Review" count={totalItems} actions={actions}>
|
||||
<div className="space-y-2">
|
||||
{groups.map((group, index) => {
|
||||
const prior = index > 0 ? priorIds(index) : null;
|
||||
const onApproveUpToHere = prior && prior.length > 0 ? () => approveBatch(prior) : undefined;
|
||||
if (group.kind === "movie") {
|
||||
return (
|
||||
<PipelineCard
|
||||
key={group.item.id}
|
||||
item={group.item}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
onToggleStream={async (streamId, action) => {
|
||||
await api.patch(`/api/review/${group.item.item_id}/stream/${streamId}`, { action });
|
||||
onMutate();
|
||||
}}
|
||||
onApprove={() => approveItem(group.item.item_id)}
|
||||
onSkip={() => skipItem(group.item.item_id)}
|
||||
onApproveUpToHere={onApproveUpToHere}
|
||||
/>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<SeriesCard
|
||||
key={group.seriesKey}
|
||||
seriesKey={group.seriesKey}
|
||||
seriesName={group.seriesName}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
seriesJellyfinId={group.seriesJellyfinId}
|
||||
seasons={group.seasons}
|
||||
episodeCount={group.episodeCount}
|
||||
originalLanguage={group.originalLanguage}
|
||||
onMutate={onMutate}
|
||||
onApproveUpToHere={onApproveUpToHere}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
{groups.length === 0 && <p className="text-sm text-gray-400 text-center py-8">No items to review</p>}
|
||||
{hasMore && (
|
||||
<div ref={sentinelRef} className="py-4 text-center text-xs text-gray-400">
|
||||
{loadingMore ? "Loading more…" : ""}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</ColumnShell>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Tsc + lint**
|
||||
|
||||
```
|
||||
mise exec bun -- bunx tsc --noEmit
|
||||
mise exec bun -- bun run lint
|
||||
```
|
||||
|
||||
Expected: the call site in ReviewColumn passes `seasons`, `episodeCount`, `originalLanguage` props to SeriesCard — this will fail until Task 5 updates SeriesCard. Same handling as Task 3 step 3: commit and proceed.
|
||||
|
||||
- [ ] **Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git add src/features/pipeline/ReviewColumn.tsx
|
||||
git commit -m "review column: infinite scroll with IntersectionObserver sentinel"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 5: Client — SeriesCard season nesting
|
||||
|
||||
**Files:**
|
||||
- Modify: `src/features/pipeline/SeriesCard.tsx`
|
||||
|
||||
- [ ] **Step 1: Rewrite SeriesCard**
|
||||
|
||||
Replace the file contents with:
|
||||
|
||||
```tsx
|
||||
import { useState } from "react";
|
||||
import { api } from "~/shared/lib/api";
|
||||
import { LANG_NAMES } from "~/shared/lib/lang";
|
||||
import type { PipelineReviewItem } from "~/shared/lib/types";
|
||||
import { PipelineCard } from "./PipelineCard";
|
||||
|
||||
interface SeriesCardProps {
|
||||
seriesKey: string;
|
||||
seriesName: string;
|
||||
jellyfinUrl: string;
|
||||
seriesJellyfinId: string | null;
|
||||
seasons: Array<{ season: number | null; episodes: PipelineReviewItem[] }>;
|
||||
episodeCount: number;
|
||||
originalLanguage: string | null;
|
||||
onMutate: () => void;
|
||||
onApproveUpToHere?: () => void;
|
||||
}
|
||||
|
||||
export function SeriesCard({
|
||||
seriesKey,
|
||||
seriesName,
|
||||
jellyfinUrl,
|
||||
seriesJellyfinId,
|
||||
seasons,
|
||||
episodeCount,
|
||||
originalLanguage,
|
||||
onMutate,
|
||||
onApproveUpToHere,
|
||||
}: SeriesCardProps) {
|
||||
const [expanded, setExpanded] = useState(false);
|
||||
|
||||
const flatEpisodes = seasons.flatMap((s) => s.episodes);
|
||||
const highCount = flatEpisodes.filter((e) => e.confidence === "high").length;
|
||||
const lowCount = flatEpisodes.filter((e) => e.confidence === "low").length;
|
||||
const multipleSeasons = seasons.length > 1;
|
||||
|
||||
const setSeriesLanguage = async (lang: string) => {
|
||||
await api.patch(`/api/review/series/${encodeURIComponent(seriesKey)}/language`, { language: lang });
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const approveSeries = async () => {
|
||||
await api.post(`/api/review/series/${encodeURIComponent(seriesKey)}/approve-all`);
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const approveSeason = async (season: number | null) => {
|
||||
if (season == null) return;
|
||||
await api.post(`/api/review/season/${encodeURIComponent(seriesKey)}/${season}/approve-all`);
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const jellyfinLink =
|
||||
jellyfinUrl && seriesJellyfinId ? `${jellyfinUrl}/web/index.html#!/details?id=${seriesJellyfinId}` : null;
|
||||
|
||||
return (
|
||||
<div className="group/series rounded-lg border bg-white overflow-hidden">
|
||||
{/* Title row */}
|
||||
<div
|
||||
className="flex items-center gap-2 px-3 pt-3 pb-1 cursor-pointer hover:bg-gray-50 rounded-t-lg"
|
||||
onClick={() => setExpanded(!expanded)}
|
||||
>
|
||||
<span className="text-xs text-gray-400 shrink-0">{expanded ? "▼" : "▶"}</span>
|
||||
{jellyfinLink ? (
|
||||
<a
|
||||
href={jellyfinLink}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-sm font-medium truncate hover:text-blue-600 hover:underline"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
{seriesName}
|
||||
</a>
|
||||
) : (
|
||||
<p className="text-sm font-medium truncate">{seriesName}</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Controls row */}
|
||||
<div className="flex items-center gap-2 px-3 pb-3 pt-1">
|
||||
<span className="text-xs text-gray-500 shrink-0">{episodeCount} eps</span>
|
||||
{multipleSeasons && <span className="text-xs text-gray-500 shrink-0">· {seasons.length} seasons</span>}
|
||||
{highCount > 0 && <span className="text-xs text-green-600 shrink-0">{highCount} ready</span>}
|
||||
{lowCount > 0 && <span className="text-xs text-amber-600 shrink-0">{lowCount} review</span>}
|
||||
<div className="flex-1" />
|
||||
<select
|
||||
className="h-6 text-xs border border-gray-300 rounded px-1 bg-white shrink-0"
|
||||
value={originalLanguage ?? ""}
|
||||
onChange={(e) => {
|
||||
e.stopPropagation();
|
||||
setSeriesLanguage(e.target.value);
|
||||
}}
|
||||
>
|
||||
<option value="">unknown</option>
|
||||
{Object.entries(LANG_NAMES).map(([code, name]) => (
|
||||
<option key={code} value={code}>
|
||||
{name}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
{onApproveUpToHere && (
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onApproveUpToHere();
|
||||
}}
|
||||
title="Approve every card listed above this one"
|
||||
className="text-xs px-2 py-1 rounded border border-blue-600 text-blue-700 bg-white hover:bg-blue-50 cursor-pointer whitespace-nowrap shrink-0 opacity-0 group-hover/series:opacity-100 transition-opacity"
|
||||
>
|
||||
↑ Approve above
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
approveSeries();
|
||||
}}
|
||||
className="text-xs px-2 py-1 rounded bg-blue-600 text-white hover:bg-blue-700 cursor-pointer whitespace-nowrap shrink-0"
|
||||
>
|
||||
Approve series
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{expanded && (
|
||||
<div className="border-t">
|
||||
{multipleSeasons
|
||||
? seasons.map((s) => (
|
||||
<SeasonGroup
|
||||
key={s.season ?? "unknown"}
|
||||
season={s.season}
|
||||
episodes={s.episodes}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
onApproveSeason={() => approveSeason(s.season)}
|
||||
onMutate={onMutate}
|
||||
/>
|
||||
))
|
||||
: flatEpisodes.map((ep) => (
|
||||
<EpisodeRow key={ep.id} ep={ep} jellyfinUrl={jellyfinUrl} onMutate={onMutate} />
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function SeasonGroup({
|
||||
season,
|
||||
episodes,
|
||||
jellyfinUrl,
|
||||
onApproveSeason,
|
||||
onMutate,
|
||||
}: {
|
||||
season: number | null;
|
||||
episodes: PipelineReviewItem[];
|
||||
jellyfinUrl: string;
|
||||
onApproveSeason: () => void;
|
||||
onMutate: () => void;
|
||||
}) {
|
||||
const [open, setOpen] = useState(false);
|
||||
const highCount = episodes.filter((e) => e.confidence === "high").length;
|
||||
const lowCount = episodes.filter((e) => e.confidence === "low").length;
|
||||
const label = season == null ? "No season" : `Season ${String(season).padStart(2, "0")}`;
|
||||
|
||||
return (
|
||||
<div className="border-t first:border-t-0">
|
||||
<div
|
||||
className="flex items-center gap-2 px-3 py-2 cursor-pointer hover:bg-gray-50"
|
||||
onClick={() => setOpen(!open)}
|
||||
>
|
||||
<span className="text-xs text-gray-400 shrink-0">{open ? "▼" : "▶"}</span>
|
||||
<span className="text-xs font-medium shrink-0">{label}</span>
|
||||
<span className="text-xs text-gray-500 shrink-0">· {episodes.length} eps</span>
|
||||
{highCount > 0 && <span className="text-xs text-green-600 shrink-0">{highCount} ready</span>}
|
||||
{lowCount > 0 && <span className="text-xs text-amber-600 shrink-0">{lowCount} review</span>}
|
||||
<div className="flex-1" />
|
||||
{season != null && (
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onApproveSeason();
|
||||
}}
|
||||
className="text-xs px-2 py-1 rounded border border-blue-600 text-blue-700 bg-white hover:bg-blue-50 cursor-pointer whitespace-nowrap shrink-0"
|
||||
>
|
||||
Approve season
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
{open && (
|
||||
<div className="px-3 pb-3 space-y-2 pt-2">
|
||||
{episodes.map((ep) => (
|
||||
<EpisodeRow key={ep.id} ep={ep} jellyfinUrl={jellyfinUrl} onMutate={onMutate} />
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function EpisodeRow({ ep, jellyfinUrl, onMutate }: { ep: PipelineReviewItem; jellyfinUrl: string; onMutate: () => void }) {
|
||||
return (
|
||||
<div className="px-3 py-1">
|
||||
<PipelineCard
|
||||
item={ep}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
onToggleStream={async (streamId, action) => {
|
||||
await api.patch(`/api/review/${ep.item_id}/stream/${streamId}`, { action });
|
||||
onMutate();
|
||||
}}
|
||||
onApprove={async () => {
|
||||
await api.post(`/api/review/${ep.item_id}/approve`);
|
||||
onMutate();
|
||||
}}
|
||||
onSkip={async () => {
|
||||
await api.post(`/api/review/${ep.item_id}/skip`);
|
||||
onMutate();
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
(The `EpisodeRow` wrapper keeps the padding consistent whether episodes render directly under the series or under a season group.)
|
||||
|
||||
- [ ] **Step 2: Lint + tsc + test + build**
|
||||
|
||||
```
|
||||
mise exec bun -- bun run lint
|
||||
mise exec bun -- bunx tsc --noEmit
|
||||
mise exec bun -- bun test
|
||||
mise exec bun -- bun run build
|
||||
```
|
||||
|
||||
All must pass now that the whole pipeline (server → types → PipelinePage → ReviewColumn → SeriesCard) is consistent.
|
||||
|
||||
- [ ] **Step 3: Manual smoke test**
|
||||
|
||||
```
|
||||
mise exec bun -- bun run dev
|
||||
```
|
||||
|
||||
Navigate to the Pipeline page:
|
||||
- Confirm no "Showing first 500 of N" banner.
|
||||
- Scroll the Review column to the bottom; new groups auto-load.
|
||||
- Find a series with pending work in >1 season; expand it; confirm nested seasons with working `Approve season` button.
|
||||
- Find a series with pending work in a single season; expand it; confirm flat episode list (no season nesting).
|
||||
- Click `Approve series` on a series with many pending episodes; confirm the whole series vanishes from the column.
|
||||
|
||||
Kill the dev server.
|
||||
|
||||
- [ ] **Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add src/features/pipeline/SeriesCard.tsx
|
||||
git commit -m "series card: nest seasons when >1 pending, add Approve season button"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 6: Version bump + final push
|
||||
|
||||
- [ ] **Step 1: Bump CalVer**
|
||||
|
||||
In `package.json`, set version to today's next free dot-suffix (today is 2026-04-15; prior releases are `.1` and `.2`, so use `.3` unless already taken).
|
||||
|
||||
- [ ] **Step 2: Final checks**
|
||||
|
||||
```
|
||||
mise exec bun -- bun run lint
|
||||
mise exec bun -- bunx tsc --noEmit
|
||||
mise exec bun -- bunx tsc --noEmit --project tsconfig.server.json
|
||||
mise exec bun -- bun test
|
||||
mise exec bun -- bun run build
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Commit + push**
|
||||
|
||||
```bash
|
||||
git add package.json
|
||||
git commit -m "v2026.04.15.3 — review column lazy-load + season grouping"
|
||||
git push gitea main
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Guided Gates (user-verified)
|
||||
|
||||
- **GG-1:** No "Showing first 500 of N" banner.
|
||||
- **GG-2:** A series with episodes previously split across the cap now shows the correct episode count.
|
||||
- **GG-3:** A series with >1 pending season expands into nested season groups, each with a working `Approve season` button.
|
||||
- **GG-4:** A series with 1 pending season expands flat (no extra nesting).
|
||||
- **GG-5:** Scrolling to the bottom of Review auto-loads the next page; no scroll = no extra fetch.
|
||||
47
docs/superpowers/plans/2026-04-15-scan-page-rework.md
Normal file
47
docs/superpowers/plans/2026-04-15-scan-page-rework.md
Normal file
@@ -0,0 +1,47 @@
|
||||
# Scan Page Rework Implementation Plan
|
||||
|
||||
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||
|
||||
**Goal:** Rework the Scan page to prioritize progress + fresh ingest visibility, and add a scalable filterable/lazy-loaded library table.
|
||||
|
||||
**Architecture:** Keep `/api/scan` lightweight for status/progress and compact recent ingest rows. Add `/api/scan/items` for paginated/filterable DB browsing. Update `ScanPage` to render: scan card header count, compact 5-row recent ingest table, then a filterable lazy-loaded library table.
|
||||
|
||||
**Tech Stack:** Bun + Hono, React 19 + TanStack Router, bun:test, Biome.
|
||||
|
||||
---
|
||||
|
||||
### Task 1: Backend scan payload + items endpoint (TDD)
|
||||
|
||||
**Files:**
|
||||
- Modify: `server/api/__tests__/scan.test.ts`
|
||||
- Modify: `server/db/schema.ts`
|
||||
- Modify: `server/db/index.ts`
|
||||
- Modify: `server/services/rescan.ts`
|
||||
- Modify: `server/api/scan.ts`
|
||||
|
||||
- [ ] Add failing tests for scan item query parsing/normalization and SQL filter behavior helpers.
|
||||
- [ ] Run targeted tests to verify failure.
|
||||
- [ ] Add `media_items.ingest_source` schema + migration, set value on upsert (`scan`/`webhook`).
|
||||
- [ ] Extend `GET /api/scan` recent item shape with timestamp + ingest source and clamp to 5 rows.
|
||||
- [ ] Add `GET /api/scan/items` with filters (`q,status,type,source`) + pagination (`offset,limit`), returning `{ rows,total,hasMore }`.
|
||||
- [ ] Run targeted and full backend tests.
|
||||
|
||||
### Task 2: Scan page UI rework + lazy table
|
||||
|
||||
**Files:**
|
||||
- Modify: `src/features/scan/ScanPage.tsx`
|
||||
|
||||
- [ ] Refactor scan box header to show scanned count in top-right.
|
||||
- [ ] Replace large recent-items table with a compact 5-row recent ingest list directly under progress bar.
|
||||
- [ ] Add filter controls for library table (`q,status,type,source`) with default “All”.
|
||||
- [ ] Add lazy loading flow (initial fetch + load more) against `/api/scan/items`.
|
||||
- [ ] Render new table with useful file metadata columns and consistent truncation/tooltips.
|
||||
|
||||
### Task 3: Verification
|
||||
|
||||
**Files:**
|
||||
- Modify: none
|
||||
|
||||
- [ ] Run `bun test`.
|
||||
- [ ] Run `bun run lint` and format if needed.
|
||||
- [ ] Confirm no regressions in scan start/stop/progress behavior.
|
||||
111
docs/superpowers/specs/2026-04-15-review-lazy-load-design.md
Normal file
111
docs/superpowers/specs/2026-04-15-review-lazy-load-design.md
Normal file
@@ -0,0 +1,111 @@
|
||||
# Review column lazy-load + season grouping
|
||||
|
||||
Date: 2026-04-15
|
||||
|
||||
## Summary
|
||||
|
||||
Replace the Review column's 500-item hard cap with server-side group-paginated lazy loading. Series are always returned complete (every pending non-noop episode, grouped by season), eliminating the "2 eps" mirage caused by groups getting split across the cap. When a series has pending work in more than one season, the UI nests seasons as collapsible sub-groups, each with its own "Approve season" button.
|
||||
|
||||
## Motivation
|
||||
|
||||
`server/api/review.ts:277` caps the pipeline's review list at 500 items. ReviewColumn groups client-side, so any series whose episodes spill beyond the cap shows a wrong episode count and partial episode list. The banner "Showing first 500 of N" is present but misleading — the *groups* don't survive the cut, not just the tail.
|
||||
|
||||
The existing "Approve all" button on a series card already calls `/series/:seriesKey/approve-all`, which operates on the DB directly and does approve every pending episode — so functionality works, only the display is wrong. Still, partial groups are confusing and the 500 cap forces users to approve in waves.
|
||||
|
||||
## Server changes
|
||||
|
||||
### New endpoint `GET /api/review/groups?offset=0&limit=25`
|
||||
|
||||
Response:
|
||||
```ts
|
||||
{
|
||||
groups: ReviewGroup[];
|
||||
totalGroups: number;
|
||||
totalItems: number;
|
||||
hasMore: boolean;
|
||||
}
|
||||
|
||||
type ReviewGroup =
|
||||
| { kind: "movie"; item: PipelineReviewItem }
|
||||
| {
|
||||
kind: "series";
|
||||
seriesKey: string;
|
||||
seriesName: string;
|
||||
seriesJellyfinId: string | null;
|
||||
episodeCount: number;
|
||||
minConfidence: "high" | "low";
|
||||
originalLanguage: string | null;
|
||||
seasons: Array<{ season: number | null; episodes: PipelineReviewItem[] }>;
|
||||
};
|
||||
```
|
||||
|
||||
Ordering:
|
||||
- Groups ordered by (min confidence across group ASC — `high` < `low`), then (series_name or movie name ASC)
|
||||
- Within a series, seasons ordered by `season_number` ASC (`null` last)
|
||||
- Within a season, episodes ordered by `episode_number` ASC
|
||||
|
||||
Implementation outline:
|
||||
1. Query all pending non-noop plans joined to media_items (existing `review` query minus the LIMIT).
|
||||
2. Walk once in sort order, producing groups: a Movie becomes a one-shot `{ kind: "movie" }`; consecutive Episodes sharing `series_jellyfin_id` (or `series_name` fallback) accumulate into a `{ kind: "series" }` with `seasons` bucketed by `season_number`.
|
||||
3. Apply `.slice(offset, offset + limit)` over the full group list, enrich per-episode audio streams + transcode reasons for episodes that survive (reuse existing `enrichWithStreamsAndReasons`).
|
||||
4. `totalGroups` = full group count before slicing. `totalItems` = sum of episode counts + movie count (unchanged from today's `reviewTotal`). `hasMore` = `offset + limit < totalGroups`.
|
||||
|
||||
### `GET /api/review/pipeline` changes
|
||||
|
||||
Drop `review` and `reviewTotal` from the response. Add `reviewItemsTotal: number` so the column header shows a count before the groups endpoint resolves. Queue / Processing / Done / doneCount stay unchanged.
|
||||
|
||||
### Kept as-is
|
||||
|
||||
- `POST /api/review/series/:seriesKey/approve-all` (`review.ts:529`)
|
||||
- `POST /api/review/season/:seriesKey/:season/approve-all` (`review.ts:549`) — already implemented, just unused by the UI until now
|
||||
|
||||
## Client changes
|
||||
|
||||
### PipelinePage
|
||||
|
||||
Fetches `/api/review/pipeline` for queue columns (existing) and separately `/api/review/groups?offset=0&limit=25` for the Review column's initial page. `onMutate` refetches both. Pass `reviewGroups`, `reviewGroupsTotalItems`, `reviewHasMore` into `ReviewColumn`.
|
||||
|
||||
### ReviewColumn
|
||||
|
||||
Replace the hard-cap rendering with infinite scroll:
|
||||
- Render the current loaded groups.
|
||||
- Append a sentinel `<div>` at the bottom when `hasMore`. An `IntersectionObserver` attached to it triggers a fetch of the next page when it enters the scroll viewport.
|
||||
- Pagination state (`offset`, `groups`, `hasMore`, `loading`) lives locally in ReviewColumn — parent passes `initialGroups` on mount and whenever the filter changes (`onMutate` → parent refetches page 0).
|
||||
- Remove the "Showing first N of M" banner and the `truncated` logic.
|
||||
|
||||
### SeriesCard
|
||||
|
||||
When `seasons.length > 1`:
|
||||
- Render seasons as collapsible sub-groups inside the expanded series body.
|
||||
- Each season header: `S{NN} — {episodeCount} eps · {high} high / {low} low` + an `Approve season` button.
|
||||
|
||||
When `seasons.length === 1`:
|
||||
- Render the current flat episode list (no extra nesting).
|
||||
|
||||
Rename the existing header button `Approve all` → `Approve series`.
|
||||
|
||||
### "Approve above"
|
||||
|
||||
Keeps its current "approve every group currently visible above this card" semantic. With lazy loading, that means "everything the user has scrolled past". Compute item ids client-side across the loaded groups as today. No endpoint change.
|
||||
|
||||
## Data flow
|
||||
|
||||
1. PipelinePage mounts → parallel fetch `/pipeline` + `/groups?offset=0&limit=25`.
|
||||
2. User scrolls; sentinel becomes visible → fetch `/groups?offset=25&limit=25`; appended to the list.
|
||||
3. User clicks `Approve series` on a card → `POST /series/:key/approve-all` → `onMutate` → parent refetches `/pipeline` + `/groups?offset=0&limit=25`. Series gone from list.
|
||||
4. User clicks `Approve season S02` on a nested season → `POST /season/:key/2/approve-all` → `onMutate` → same refetch.
|
||||
|
||||
## Testing
|
||||
|
||||
- Server unit test: `/groups` endpoint returns a series with all pending episodes even when the total item count exceeds `limit * offset_pages`.
|
||||
- Server unit test: offset/limit/hasMore correctness across the group boundary.
|
||||
- Server unit test: seasons array is populated, sorted, with `null` season_number ordered last.
|
||||
- Manual: scroll through the Review column on a library with >1000 pending items and confirm episode counts match `SELECT COUNT(*) ... WHERE pending AND is_noop=0` scoped per series.
|
||||
|
||||
## Guided Gates
|
||||
|
||||
- **GG-1:** No "Showing first 500 of N" banner ever appears.
|
||||
- **GG-2:** A series whose episodes previously split across the cap now shows the correct episode count immediately on first page load (if the series is in the first page) or after scroll (if not).
|
||||
- **GG-3:** A series with pending episodes in 2+ seasons expands into nested season sub-groups, each with an `Approve season` button that approves only that season.
|
||||
- **GG-4:** A series with pending episodes in exactly one season expands into the flat episode list as before.
|
||||
- **GG-5:** Scrolling to the bottom of the Review column auto-fetches the next page without a click; scrolling stops fetching when `hasMore` is false.
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "netfelix-audio-fix",
|
||||
"version": "2026.04.15.1",
|
||||
"version": "2026.04.15.10",
|
||||
"scripts": {
|
||||
"dev:server": "NODE_ENV=development bun --hot server/index.tsx",
|
||||
"dev:client": "vite",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { extractErrorSummary } from "../execute";
|
||||
import { enqueueUnseenJobs, extractErrorSummary, shouldSendLiveUpdate, yieldAfterChunk } from "../execute";
|
||||
|
||||
describe("extractErrorSummary", () => {
|
||||
test("pulls the real error line out of ffmpeg's banner", () => {
|
||||
@@ -47,3 +47,39 @@ describe("extractErrorSummary", () => {
|
||||
expect(summary).toBe("Error: no space left on device");
|
||||
});
|
||||
});
|
||||
|
||||
describe("shouldSendLiveUpdate", () => {
|
||||
test("throttles updates until interval passes", () => {
|
||||
expect(shouldSendLiveUpdate(1_000, 800, 500)).toBe(false);
|
||||
expect(shouldSendLiveUpdate(1_301, 800, 500)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("yieldAfterChunk", () => {
|
||||
test("yields once threshold is reached, resets chunk counter", async () => {
|
||||
let yieldCalls = 0;
|
||||
const sleep = async (_ms: number) => {
|
||||
yieldCalls += 1;
|
||||
};
|
||||
let chunks = 0;
|
||||
chunks = await yieldAfterChunk(chunks, 3, sleep);
|
||||
expect(chunks).toBe(1);
|
||||
chunks = await yieldAfterChunk(chunks, 3, sleep);
|
||||
expect(chunks).toBe(2);
|
||||
chunks = await yieldAfterChunk(chunks, 3, sleep);
|
||||
expect(chunks).toBe(0);
|
||||
expect(yieldCalls).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("enqueueUnseenJobs", () => {
|
||||
test("appends only unseen job ids to the active queue", () => {
|
||||
const queue = [{ id: 1 }, { id: 2 }] as { id: number }[];
|
||||
const seen = new Set([1, 2]);
|
||||
const added = enqueueUnseenJobs(queue, seen, [{ id: 2 }, { id: 3 }, { id: 4 }] as { id: number }[]);
|
||||
expect(added).toBe(2);
|
||||
expect(queue.map((j) => j.id)).toEqual([1, 2, 3, 4]);
|
||||
expect(seen.has(3)).toBeTrue();
|
||||
expect(seen.has(4)).toBeTrue();
|
||||
});
|
||||
});
|
||||
|
||||
161
server/api/__tests__/review-groups.test.ts
Normal file
161
server/api/__tests__/review-groups.test.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
import { Database } from "bun:sqlite";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { SCHEMA } from "../../db/schema";
|
||||
import { buildReviewGroups } from "../review";
|
||||
|
||||
function makeDb(): Database {
|
||||
const db = new Database(":memory:");
|
||||
for (const stmt of SCHEMA.split(";")) {
|
||||
const trimmed = stmt.trim();
|
||||
if (trimmed) db.run(trimmed);
|
||||
}
|
||||
return db;
|
||||
}
|
||||
|
||||
interface SeedOpts {
|
||||
id: number;
|
||||
type: "Movie" | "Episode";
|
||||
name?: string;
|
||||
seriesName?: string | null;
|
||||
seriesJellyfinId?: string | null;
|
||||
seasonNumber?: number | null;
|
||||
episodeNumber?: number | null;
|
||||
confidence?: "high" | "low";
|
||||
}
|
||||
|
||||
function seed(db: Database, opts: SeedOpts) {
|
||||
const {
|
||||
id,
|
||||
type,
|
||||
name = `Item ${id}`,
|
||||
seriesName = null,
|
||||
seriesJellyfinId = null,
|
||||
seasonNumber = null,
|
||||
episodeNumber = null,
|
||||
confidence = "high",
|
||||
} = opts;
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO media_items (id, jellyfin_id, type, name, series_name, series_jellyfin_id, season_number, episode_number, file_path) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
)
|
||||
.run(id, `jf-${id}`, type, name, seriesName, seriesJellyfinId, seasonNumber, episodeNumber, `/x/${id}.mkv`);
|
||||
db
|
||||
.prepare(
|
||||
"INSERT INTO review_plans (item_id, status, is_noop, confidence, apple_compat, job_type, notes) VALUES (?, 'pending', 0, ?, 'direct_play', 'copy', NULL)",
|
||||
)
|
||||
.run(id, confidence);
|
||||
}
|
||||
|
||||
describe("buildReviewGroups", () => {
|
||||
test("returns a complete series with every pending episode", () => {
|
||||
const db = makeDb();
|
||||
for (let i = 1; i <= 30; i++) {
|
||||
seed(db, {
|
||||
id: i,
|
||||
type: "Episode",
|
||||
seriesName: "Breaking Bad",
|
||||
seriesJellyfinId: "bb",
|
||||
seasonNumber: 1,
|
||||
episodeNumber: i,
|
||||
});
|
||||
}
|
||||
|
||||
const { groups, totalItems } = buildReviewGroups(db);
|
||||
|
||||
expect(groups).toHaveLength(1);
|
||||
const series = groups[0];
|
||||
expect(series.kind).toBe("series");
|
||||
if (series.kind !== "series") throw new Error("expected series");
|
||||
expect(series.episodeCount).toBe(30);
|
||||
expect(series.seasons).toHaveLength(1);
|
||||
expect(series.seasons[0].episodes).toHaveLength(30);
|
||||
expect(totalItems).toBe(30);
|
||||
});
|
||||
|
||||
test("buckets episodes by season with null ordered last", () => {
|
||||
const db = makeDb();
|
||||
for (let ep = 1; ep <= 3; ep++) {
|
||||
seed(db, {
|
||||
id: ep,
|
||||
type: "Episode",
|
||||
seriesName: "Lost",
|
||||
seriesJellyfinId: "lost",
|
||||
seasonNumber: 1,
|
||||
episodeNumber: ep,
|
||||
});
|
||||
}
|
||||
for (let ep = 1; ep <= 2; ep++) {
|
||||
seed(db, {
|
||||
id: 10 + ep,
|
||||
type: "Episode",
|
||||
seriesName: "Lost",
|
||||
seriesJellyfinId: "lost",
|
||||
seasonNumber: 2,
|
||||
episodeNumber: ep,
|
||||
});
|
||||
}
|
||||
seed(db, { id: 99, type: "Episode", seriesName: "Lost", seriesJellyfinId: "lost", seasonNumber: null });
|
||||
|
||||
const { groups } = buildReviewGroups(db);
|
||||
expect(groups).toHaveLength(1);
|
||||
const lost = groups[0];
|
||||
if (lost.kind !== "series") throw new Error("expected series");
|
||||
expect(lost.seasons.map((s) => s.season)).toEqual([1, 2, null]);
|
||||
expect(lost.seasons[0].episodes).toHaveLength(3);
|
||||
expect(lost.seasons[1].episodes).toHaveLength(2);
|
||||
expect(lost.seasons[2].episodes).toHaveLength(1);
|
||||
});
|
||||
|
||||
test("sorts groups: high-confidence first, then by name", () => {
|
||||
const db = makeDb();
|
||||
seed(db, { id: 1, type: "Movie", name: "Zodiac", confidence: "high" });
|
||||
seed(db, { id: 2, type: "Movie", name: "Arrival", confidence: "low" });
|
||||
seed(db, { id: 3, type: "Movie", name: "Blade Runner", confidence: "high" });
|
||||
|
||||
const { groups } = buildReviewGroups(db);
|
||||
const names = groups.map((g) => (g.kind === "movie" ? g.item.name : g.seriesName));
|
||||
expect(names).toEqual(["Blade Runner", "Zodiac", "Arrival"]);
|
||||
});
|
||||
|
||||
test("minConfidence is low when any episode in the series is low", () => {
|
||||
const db = makeDb();
|
||||
seed(db, {
|
||||
id: 1,
|
||||
type: "Episode",
|
||||
seriesName: "Show",
|
||||
seriesJellyfinId: "s",
|
||||
seasonNumber: 1,
|
||||
episodeNumber: 1,
|
||||
confidence: "high",
|
||||
});
|
||||
seed(db, {
|
||||
id: 2,
|
||||
type: "Episode",
|
||||
seriesName: "Show",
|
||||
seriesJellyfinId: "s",
|
||||
seasonNumber: 1,
|
||||
episodeNumber: 2,
|
||||
confidence: "low",
|
||||
});
|
||||
|
||||
const { groups } = buildReviewGroups(db);
|
||||
expect(groups).toHaveLength(1);
|
||||
if (groups[0].kind !== "series") throw new Error("expected series");
|
||||
expect(groups[0].minConfidence).toBe("low");
|
||||
});
|
||||
|
||||
test("excludes plans that are not pending or are is_noop=1", () => {
|
||||
const db = makeDb();
|
||||
seed(db, { id: 1, type: "Movie", name: "Pending" });
|
||||
seed(db, { id: 2, type: "Movie", name: "Approved" });
|
||||
db.prepare("UPDATE review_plans SET status = 'approved' WHERE item_id = ?").run(2);
|
||||
seed(db, { id: 3, type: "Movie", name: "Noop" });
|
||||
db.prepare("UPDATE review_plans SET is_noop = 1 WHERE item_id = ?").run(3);
|
||||
|
||||
const { groups, totalItems } = buildReviewGroups(db);
|
||||
expect(groups).toHaveLength(1);
|
||||
expect(totalItems).toBe(1);
|
||||
if (groups[0].kind !== "movie") throw new Error("expected movie");
|
||||
expect(groups[0].item.name).toBe("Pending");
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,5 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { parseScanLimit } from "../scan";
|
||||
import { buildScanItemsWhere, parseScanItemsQuery, parseScanLimit } from "../scan";
|
||||
|
||||
describe("parseScanLimit", () => {
|
||||
test("accepts positive integers and nullish/empty as no-limit", () => {
|
||||
@@ -29,3 +29,73 @@ describe("parseScanLimit", () => {
|
||||
expect(parseScanLimit(Number.POSITIVE_INFINITY)).toEqual({ ok: false });
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseScanItemsQuery", () => {
|
||||
test("normalizes default filters and pagination", () => {
|
||||
const q = parseScanItemsQuery({});
|
||||
expect(q).toEqual({
|
||||
offset: 0,
|
||||
limit: 50,
|
||||
search: "",
|
||||
status: "all",
|
||||
type: "all",
|
||||
source: "all",
|
||||
});
|
||||
});
|
||||
|
||||
test("clamps limit and offset, trims and lowercases values", () => {
|
||||
const q = parseScanItemsQuery({
|
||||
offset: "-12",
|
||||
limit: "5000",
|
||||
q: " The Wire ",
|
||||
status: "SCANNED",
|
||||
type: "EPISODE",
|
||||
source: "WEBHOOK",
|
||||
});
|
||||
expect(q).toEqual({
|
||||
offset: 0,
|
||||
limit: 200,
|
||||
search: "The Wire",
|
||||
status: "scanned",
|
||||
type: "episode",
|
||||
source: "webhook",
|
||||
});
|
||||
});
|
||||
|
||||
test("falls back to all for unknown enum values", () => {
|
||||
const q = parseScanItemsQuery({ status: "zzz", type: "cartoon", source: "mqtt" });
|
||||
expect(q.status).toBe("all");
|
||||
expect(q.type).toBe("all");
|
||||
expect(q.source).toBe("all");
|
||||
});
|
||||
});
|
||||
|
||||
describe("buildScanItemsWhere", () => {
|
||||
test("builds combined where clause + args in stable order", () => {
|
||||
const where = buildScanItemsWhere({
|
||||
offset: 0,
|
||||
limit: 50,
|
||||
search: "blade",
|
||||
status: "scanned",
|
||||
type: "movie",
|
||||
source: "webhook",
|
||||
});
|
||||
expect(where.sql).toBe(
|
||||
"WHERE scan_status = ? AND lower(type) = ? AND ingest_source = ? AND (lower(name) LIKE ? OR lower(file_path) LIKE ?)",
|
||||
);
|
||||
expect(where.args).toEqual(["scanned", "movie", "webhook", "%blade%", "%blade%"]);
|
||||
});
|
||||
|
||||
test("returns empty where when all filters are broad", () => {
|
||||
const where = buildScanItemsWhere({
|
||||
offset: 0,
|
||||
limit: 50,
|
||||
search: "",
|
||||
status: "all",
|
||||
type: "all",
|
||||
source: "all",
|
||||
});
|
||||
expect(where.sql).toBe("");
|
||||
expect(where.args).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,6 +3,7 @@ import { Hono } from "hono";
|
||||
import { stream } from "hono/streaming";
|
||||
import { getDb } from "../db/index";
|
||||
import { log, error as logError, warn } from "../lib/log";
|
||||
import { parseId } from "../lib/validate";
|
||||
import { predictExtractedFiles } from "../services/ffmpeg";
|
||||
import {
|
||||
getScheduleConfig,
|
||||
@@ -22,6 +23,36 @@ const app = new Hono();
|
||||
let queueRunning = false;
|
||||
let runningProc: ReturnType<typeof Bun.spawn> | null = null;
|
||||
let runningJobId: number | null = null;
|
||||
let activeQueue: Job[] | null = null;
|
||||
let activeSeen: Set<number> | null = null;
|
||||
const LIVE_UPDATE_INTERVAL_MS = 500;
|
||||
const STREAM_CHUNKS_BEFORE_YIELD = 24;
|
||||
|
||||
export function shouldSendLiveUpdate(now: number, lastSentAt: number, intervalMs = LIVE_UPDATE_INTERVAL_MS): boolean {
|
||||
return now - lastSentAt > intervalMs;
|
||||
}
|
||||
|
||||
export async function yieldAfterChunk(
|
||||
chunksSinceYield: number,
|
||||
chunksBeforeYield = STREAM_CHUNKS_BEFORE_YIELD,
|
||||
sleep: (ms: number) => Promise<unknown> = (ms) => Bun.sleep(ms),
|
||||
): Promise<number> {
|
||||
const next = chunksSinceYield + 1;
|
||||
if (next < chunksBeforeYield) return next;
|
||||
await sleep(0);
|
||||
return 0;
|
||||
}
|
||||
|
||||
export function enqueueUnseenJobs<T extends { id: number }>(queue: T[], seen: Set<number>, jobs: T[]): number {
|
||||
let added = 0;
|
||||
for (const job of jobs) {
|
||||
if (seen.has(job.id)) continue;
|
||||
queue.push(job);
|
||||
seen.add(job.id);
|
||||
added += 1;
|
||||
}
|
||||
return added;
|
||||
}
|
||||
|
||||
function emitQueueStatus(
|
||||
status: "running" | "paused" | "sleeping" | "idle",
|
||||
@@ -31,12 +62,19 @@ function emitQueueStatus(
|
||||
for (const l of jobListeners) l(line);
|
||||
}
|
||||
|
||||
async function runSequential(jobs: Job[]): Promise<void> {
|
||||
async function runSequential(initial: Job[]): Promise<void> {
|
||||
if (queueRunning) return;
|
||||
queueRunning = true;
|
||||
try {
|
||||
let first = true;
|
||||
for (const job of jobs) {
|
||||
const queue: Job[] = [...initial];
|
||||
const seen = new Set<number>(queue.map((j) => j.id));
|
||||
activeQueue = queue;
|
||||
activeSeen = seen;
|
||||
|
||||
while (queue.length > 0) {
|
||||
const job = queue.shift() as Job;
|
||||
|
||||
// Pause outside the processing window
|
||||
if (!isInProcessWindow()) {
|
||||
emitQueueStatus("paused", {
|
||||
@@ -70,8 +108,18 @@ async function runSequential(jobs: Job[]): Promise<void> {
|
||||
} catch (err) {
|
||||
logError(`Job ${job.id} failed:`, err);
|
||||
}
|
||||
|
||||
// When the local queue drains, re-check the DB for jobs that were
|
||||
// approved mid-run. Without this they'd sit pending until the user
|
||||
// manually clicks "Run all" again.
|
||||
if (queue.length === 0) {
|
||||
const more = db.prepare("SELECT * FROM jobs WHERE status = 'pending' ORDER BY created_at").all() as Job[];
|
||||
enqueueUnseenJobs(queue, seen, more);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
activeQueue = null;
|
||||
activeSeen = null;
|
||||
queueRunning = false;
|
||||
emitQueueStatus("idle");
|
||||
}
|
||||
@@ -137,21 +185,17 @@ function loadJobRow(jobId: number) {
|
||||
return { job: row as unknown as Job, item };
|
||||
}
|
||||
|
||||
// ─── Param helpers ────────────────────────────────────────────────────────────
|
||||
|
||||
function parseId(raw: string | undefined): number | null {
|
||||
if (!raw) return null;
|
||||
const n = Number.parseInt(raw, 10);
|
||||
return Number.isFinite(n) && n > 0 ? n : null;
|
||||
}
|
||||
|
||||
// ─── Start all pending ────────────────────────────────────────────────────────
|
||||
|
||||
app.post("/start", (c) => {
|
||||
const db = getDb();
|
||||
const pending = db.prepare("SELECT * FROM jobs WHERE status = 'pending' ORDER BY created_at").all() as Job[];
|
||||
if (queueRunning && activeQueue && activeSeen) {
|
||||
const queued = enqueueUnseenJobs(activeQueue, activeSeen, pending);
|
||||
return c.json({ ok: true, started: 0, queued });
|
||||
}
|
||||
runSequential(pending).catch((err) => logError("Queue failed:", err));
|
||||
return c.json({ ok: true, started: pending.length });
|
||||
return c.json({ ok: true, started: pending.length, queued: pending.length });
|
||||
});
|
||||
|
||||
// ─── Run single ───────────────────────────────────────────────────────────────
|
||||
@@ -318,14 +362,16 @@ async function runJob(job: Job): Promise<void> {
|
||||
const updateOutput = db.prepare("UPDATE jobs SET output = ? WHERE id = ?");
|
||||
|
||||
const flush = (final = false) => {
|
||||
const text = outputLines.join("\n");
|
||||
const now = Date.now();
|
||||
if (final || now - lastFlushAt > 500) {
|
||||
if (!final && !shouldSendLiveUpdate(now, lastFlushAt)) {
|
||||
pendingFlush = true;
|
||||
return;
|
||||
}
|
||||
const text = outputLines.join("\n");
|
||||
if (final || shouldSendLiveUpdate(now, lastFlushAt)) {
|
||||
updateOutput.run(text, job.id);
|
||||
lastFlushAt = now;
|
||||
pendingFlush = false;
|
||||
} else {
|
||||
pendingFlush = true;
|
||||
}
|
||||
emitJobUpdate(job.id, "running", text);
|
||||
};
|
||||
@@ -338,7 +384,7 @@ async function runJob(job: Job): Promise<void> {
|
||||
const progressed = parseFFmpegProgress(line);
|
||||
if (progressed != null && totalSeconds > 0) {
|
||||
const now = Date.now();
|
||||
if (now - lastProgressEmit > 500) {
|
||||
if (shouldSendLiveUpdate(now, lastProgressEmit)) {
|
||||
emitJobProgress(job.id, progressed, totalSeconds);
|
||||
lastProgressEmit = now;
|
||||
}
|
||||
@@ -353,6 +399,7 @@ async function runJob(job: Job): Promise<void> {
|
||||
const reader = readable.getReader();
|
||||
const decoder = new TextDecoder();
|
||||
let buffer = "";
|
||||
let chunksSinceYield = 0;
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
@@ -366,6 +413,8 @@ async function runJob(job: Job): Promise<void> {
|
||||
consumeProgress(line);
|
||||
}
|
||||
flush();
|
||||
// Let pending HTTP requests run even when ffmpeg floods stdout/stderr.
|
||||
chunksSinceYield = await yieldAfterChunk(chunksSinceYield);
|
||||
}
|
||||
if (buffer.trim()) {
|
||||
outputLines.push(prefix + buffer);
|
||||
|
||||
@@ -275,36 +275,246 @@ interface PipelineAudioStream {
|
||||
action: "keep" | "remove";
|
||||
}
|
||||
|
||||
type EnrichableRow = { id?: number; plan_id?: number; item_id: number } & {
|
||||
transcode_reasons?: string[];
|
||||
audio_streams?: PipelineAudioStream[];
|
||||
};
|
||||
|
||||
/**
|
||||
* Enrich review/queued rows with transcode-reason badges and pre-checked audio
|
||||
* streams. Works for both the Review column (where `id` is the plan id) and
|
||||
* the Queued column (where `plan_id` is explicit and `id` is the job id).
|
||||
*/
|
||||
function enrichWithStreamsAndReasons(db: ReturnType<typeof getDb>, rows: EnrichableRow[]): void {
|
||||
if (rows.length === 0) return;
|
||||
const planIdFor = (r: EnrichableRow): number => (r.plan_id ?? r.id) as number;
|
||||
const planIds = rows.map(planIdFor);
|
||||
const itemIds = rows.map((r) => r.item_id);
|
||||
|
||||
const reasonPh = planIds.map(() => "?").join(",");
|
||||
const allReasons = db
|
||||
.prepare(`
|
||||
SELECT DISTINCT sd.plan_id, ms.codec, sd.transcode_codec
|
||||
FROM stream_decisions sd
|
||||
JOIN media_streams ms ON ms.id = sd.stream_id
|
||||
WHERE sd.plan_id IN (${reasonPh}) AND sd.transcode_codec IS NOT NULL
|
||||
`)
|
||||
.all(...planIds) as { plan_id: number; codec: string | null; transcode_codec: string }[];
|
||||
const reasonsByPlan = new Map<number, string[]>();
|
||||
for (const r of allReasons) {
|
||||
if (!reasonsByPlan.has(r.plan_id)) reasonsByPlan.set(r.plan_id, []);
|
||||
reasonsByPlan.get(r.plan_id)!.push(`${(r.codec ?? "").toUpperCase()} → ${r.transcode_codec.toUpperCase()}`);
|
||||
}
|
||||
|
||||
const streamPh = itemIds.map(() => "?").join(",");
|
||||
const streamRows = db
|
||||
.prepare(`
|
||||
SELECT ms.id, ms.item_id, ms.language, ms.codec, ms.channels, ms.title,
|
||||
ms.is_default, sd.action
|
||||
FROM media_streams ms
|
||||
JOIN review_plans rp ON rp.item_id = ms.item_id
|
||||
LEFT JOIN stream_decisions sd ON sd.plan_id = rp.id AND sd.stream_id = ms.id
|
||||
WHERE ms.item_id IN (${streamPh}) AND ms.type = 'Audio'
|
||||
ORDER BY ms.item_id, ms.stream_index
|
||||
`)
|
||||
.all(...itemIds) as {
|
||||
id: number;
|
||||
item_id: number;
|
||||
language: string | null;
|
||||
codec: string | null;
|
||||
channels: number | null;
|
||||
title: string | null;
|
||||
is_default: number;
|
||||
action: "keep" | "remove" | null;
|
||||
}[];
|
||||
const streamsByItem = new Map<number, PipelineAudioStream[]>();
|
||||
for (const r of streamRows) {
|
||||
if (!streamsByItem.has(r.item_id)) streamsByItem.set(r.item_id, []);
|
||||
streamsByItem.get(r.item_id)!.push({
|
||||
id: r.id,
|
||||
language: r.language,
|
||||
codec: r.codec,
|
||||
channels: r.channels,
|
||||
title: r.title,
|
||||
is_default: r.is_default,
|
||||
action: r.action ?? "keep",
|
||||
});
|
||||
}
|
||||
|
||||
for (const r of rows) {
|
||||
r.transcode_reasons = reasonsByPlan.get(planIdFor(r)) ?? [];
|
||||
r.audio_streams = streamsByItem.get(r.item_id) ?? [];
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Review groups (paginated, always returns complete series) ──────────────
|
||||
|
||||
interface ReviewItemRow {
|
||||
id: number;
|
||||
item_id: number;
|
||||
status: string;
|
||||
is_noop: number;
|
||||
confidence: "high" | "low";
|
||||
apple_compat: ReviewPlan["apple_compat"];
|
||||
job_type: "copy" | "transcode";
|
||||
name: string;
|
||||
series_name: string | null;
|
||||
series_jellyfin_id: string | null;
|
||||
jellyfin_id: string;
|
||||
season_number: number | null;
|
||||
episode_number: number | null;
|
||||
type: "Movie" | "Episode";
|
||||
container: string | null;
|
||||
original_language: string | null;
|
||||
orig_lang_source: string | null;
|
||||
file_path: string;
|
||||
transcode_reasons?: string[];
|
||||
audio_streams?: PipelineAudioStream[];
|
||||
}
|
||||
|
||||
type ReviewGroup =
|
||||
| { kind: "movie"; item: ReviewItemRow }
|
||||
| {
|
||||
kind: "series";
|
||||
seriesKey: string;
|
||||
seriesName: string;
|
||||
seriesJellyfinId: string | null;
|
||||
episodeCount: number;
|
||||
minConfidence: "high" | "low";
|
||||
originalLanguage: string | null;
|
||||
seasons: Array<{ season: number | null; episodes: ReviewItemRow[] }>;
|
||||
};
|
||||
|
||||
export function buildReviewGroups(db: ReturnType<typeof getDb>): { groups: ReviewGroup[]; totalItems: number } {
|
||||
const rows = db
|
||||
.prepare(`
|
||||
SELECT rp.*, mi.name, mi.series_name, mi.series_jellyfin_id,
|
||||
mi.jellyfin_id,
|
||||
mi.season_number, mi.episode_number, mi.type, mi.container,
|
||||
mi.original_language, mi.orig_lang_source, mi.file_path
|
||||
FROM review_plans rp
|
||||
JOIN media_items mi ON mi.id = rp.item_id
|
||||
WHERE rp.status = 'pending' AND rp.is_noop = 0
|
||||
ORDER BY
|
||||
CASE rp.confidence WHEN 'high' THEN 0 ELSE 1 END,
|
||||
COALESCE(mi.series_name, mi.name),
|
||||
mi.season_number, mi.episode_number
|
||||
`)
|
||||
.all() as ReviewItemRow[];
|
||||
|
||||
const movieGroups: ReviewGroup[] = [];
|
||||
interface SeriesAccum {
|
||||
seriesName: string;
|
||||
seriesJellyfinId: string | null;
|
||||
seasons: Map<number | null, ReviewItemRow[]>;
|
||||
originalLanguage: string | null;
|
||||
hasLow: boolean;
|
||||
}
|
||||
const seriesMap = new Map<string, SeriesAccum>();
|
||||
|
||||
for (const row of rows) {
|
||||
if (row.type === "Movie") {
|
||||
movieGroups.push({ kind: "movie", item: row });
|
||||
continue;
|
||||
}
|
||||
const key = row.series_jellyfin_id ?? row.series_name ?? String(row.item_id);
|
||||
let entry = seriesMap.get(key);
|
||||
if (!entry) {
|
||||
entry = {
|
||||
seriesName: row.series_name ?? "",
|
||||
seriesJellyfinId: row.series_jellyfin_id,
|
||||
seasons: new Map(),
|
||||
originalLanguage: row.original_language,
|
||||
hasLow: false,
|
||||
};
|
||||
seriesMap.set(key, entry);
|
||||
}
|
||||
let bucket = entry.seasons.get(row.season_number);
|
||||
if (!bucket) {
|
||||
bucket = [];
|
||||
entry.seasons.set(row.season_number, bucket);
|
||||
}
|
||||
bucket.push(row);
|
||||
if (row.confidence === "low") entry.hasLow = true;
|
||||
}
|
||||
|
||||
const seriesGroups: ReviewGroup[] = [];
|
||||
for (const [seriesKey, entry] of seriesMap) {
|
||||
const seasonKeys = [...entry.seasons.keys()].sort((a, b) => {
|
||||
if (a === null) return 1;
|
||||
if (b === null) return -1;
|
||||
return a - b;
|
||||
});
|
||||
const seasons = seasonKeys.map((season) => ({
|
||||
season,
|
||||
episodes: (entry.seasons.get(season) ?? []).sort((a, b) => (a.episode_number ?? 0) - (b.episode_number ?? 0)),
|
||||
}));
|
||||
const episodeCount = seasons.reduce((sum, s) => sum + s.episodes.length, 0);
|
||||
seriesGroups.push({
|
||||
kind: "series",
|
||||
seriesKey,
|
||||
seriesName: entry.seriesName,
|
||||
seriesJellyfinId: entry.seriesJellyfinId,
|
||||
episodeCount,
|
||||
minConfidence: entry.hasLow ? "low" : "high",
|
||||
originalLanguage: entry.originalLanguage,
|
||||
seasons,
|
||||
});
|
||||
}
|
||||
|
||||
const allGroups = [...movieGroups, ...seriesGroups].sort((a, b) => {
|
||||
const confA = a.kind === "movie" ? a.item.confidence : a.minConfidence;
|
||||
const confB = b.kind === "movie" ? b.item.confidence : b.minConfidence;
|
||||
const rankA = confA === "high" ? 0 : 1;
|
||||
const rankB = confB === "high" ? 0 : 1;
|
||||
if (rankA !== rankB) return rankA - rankB;
|
||||
const nameA = a.kind === "movie" ? a.item.name : a.seriesName;
|
||||
const nameB = b.kind === "movie" ? b.item.name : b.seriesName;
|
||||
return nameA.localeCompare(nameB);
|
||||
});
|
||||
|
||||
const totalItems =
|
||||
movieGroups.length + seriesGroups.reduce((sum, g) => sum + (g.kind === "series" ? g.episodeCount : 0), 0);
|
||||
return { groups: allGroups, totalItems };
|
||||
}
|
||||
|
||||
app.get("/groups", (c) => {
|
||||
const db = getDb();
|
||||
const offset = Math.max(0, Number.parseInt(c.req.query("offset") ?? "0", 10) || 0);
|
||||
const limit = Math.max(1, Math.min(200, Number.parseInt(c.req.query("limit") ?? "25", 10) || 25));
|
||||
|
||||
const { groups, totalItems } = buildReviewGroups(db);
|
||||
const page = groups.slice(offset, offset + limit);
|
||||
|
||||
// Enrich each visible episode/movie with audio streams + transcode reasons.
|
||||
const flat: EnrichableRow[] = [];
|
||||
for (const g of page) {
|
||||
if (g.kind === "movie") flat.push(g.item as EnrichableRow);
|
||||
else for (const s of g.seasons) for (const ep of s.episodes) flat.push(ep as EnrichableRow);
|
||||
}
|
||||
enrichWithStreamsAndReasons(db, flat);
|
||||
|
||||
return c.json({
|
||||
groups: page,
|
||||
totalGroups: groups.length,
|
||||
totalItems,
|
||||
hasMore: offset + limit < groups.length,
|
||||
});
|
||||
});
|
||||
|
||||
app.get("/pipeline", (c) => {
|
||||
const db = getDb();
|
||||
const jellyfinUrl = getConfig("jellyfin_url") ?? "";
|
||||
|
||||
// Cap the review column to keep the page snappy at scale; pipelines
|
||||
// with thousands of pending items would otherwise ship 10k+ rows on
|
||||
// every refresh and re-render every card.
|
||||
const REVIEW_LIMIT = 500;
|
||||
const review = db
|
||||
.prepare(`
|
||||
SELECT rp.*, mi.name, mi.series_name, mi.series_jellyfin_id,
|
||||
mi.jellyfin_id,
|
||||
mi.season_number, mi.episode_number, mi.type, mi.container,
|
||||
mi.original_language, mi.orig_lang_source, mi.file_path
|
||||
FROM review_plans rp
|
||||
JOIN media_items mi ON mi.id = rp.item_id
|
||||
WHERE rp.status = 'pending' AND rp.is_noop = 0
|
||||
ORDER BY
|
||||
CASE rp.confidence WHEN 'high' THEN 0 ELSE 1 END,
|
||||
COALESCE(mi.series_name, mi.name),
|
||||
mi.season_number, mi.episode_number
|
||||
LIMIT ${REVIEW_LIMIT}
|
||||
`)
|
||||
.all();
|
||||
const reviewTotal = (
|
||||
// Review items ship via GET /groups (paginated, always returns complete
|
||||
// series). The pipeline payload only carries the total count so the column
|
||||
// header can render immediately.
|
||||
const reviewItemsTotal = (
|
||||
db.prepare("SELECT COUNT(*) as n FROM review_plans WHERE status = 'pending' AND is_noop = 0").get() as { n: number }
|
||||
).n;
|
||||
|
||||
// Queued gets the same enrichment as review so the card can render
|
||||
// streams + transcode reasons read-only (with a "Back to review" button).
|
||||
// Queued carries stream + transcode-reason enrichment so the card renders
|
||||
// read-only with a "Back to review" button.
|
||||
const queued = db
|
||||
.prepare(`
|
||||
SELECT j.id, j.item_id, j.status, j.started_at, j.completed_at,
|
||||
@@ -355,79 +565,9 @@ app.get("/pipeline", (c) => {
|
||||
};
|
||||
const doneCount = noopRow.n + doneRow.n;
|
||||
|
||||
// Enrich rows that have (plan_id, item_id) with the transcode-reason
|
||||
// badges and pre-checked audio streams. Used for both review and queued
|
||||
// columns so the queued card can render read-only with the same info.
|
||||
type EnrichableRow = { id?: number; plan_id?: number; item_id: number } & {
|
||||
transcode_reasons?: string[];
|
||||
audio_streams?: PipelineAudioStream[];
|
||||
};
|
||||
const enrichWithStreamsAndReasons = (rows: EnrichableRow[]) => {
|
||||
if (rows.length === 0) return;
|
||||
const planIdFor = (r: EnrichableRow): number => (r.plan_id ?? r.id) as number;
|
||||
const planIds = rows.map(planIdFor);
|
||||
const itemIds = rows.map((r) => r.item_id);
|
||||
enrichWithStreamsAndReasons(db, queued as EnrichableRow[]);
|
||||
|
||||
const reasonPh = planIds.map(() => "?").join(",");
|
||||
const allReasons = db
|
||||
.prepare(`
|
||||
SELECT DISTINCT sd.plan_id, ms.codec, sd.transcode_codec
|
||||
FROM stream_decisions sd
|
||||
JOIN media_streams ms ON ms.id = sd.stream_id
|
||||
WHERE sd.plan_id IN (${reasonPh}) AND sd.transcode_codec IS NOT NULL
|
||||
`)
|
||||
.all(...planIds) as { plan_id: number; codec: string | null; transcode_codec: string }[];
|
||||
const reasonsByPlan = new Map<number, string[]>();
|
||||
for (const r of allReasons) {
|
||||
if (!reasonsByPlan.has(r.plan_id)) reasonsByPlan.set(r.plan_id, []);
|
||||
reasonsByPlan.get(r.plan_id)!.push(`${(r.codec ?? "").toUpperCase()} → ${r.transcode_codec.toUpperCase()}`);
|
||||
}
|
||||
|
||||
const streamPh = itemIds.map(() => "?").join(",");
|
||||
const streamRows = db
|
||||
.prepare(`
|
||||
SELECT ms.id, ms.item_id, ms.language, ms.codec, ms.channels, ms.title,
|
||||
ms.is_default, sd.action
|
||||
FROM media_streams ms
|
||||
JOIN review_plans rp ON rp.item_id = ms.item_id
|
||||
LEFT JOIN stream_decisions sd ON sd.plan_id = rp.id AND sd.stream_id = ms.id
|
||||
WHERE ms.item_id IN (${streamPh}) AND ms.type = 'Audio'
|
||||
ORDER BY ms.item_id, ms.stream_index
|
||||
`)
|
||||
.all(...itemIds) as {
|
||||
id: number;
|
||||
item_id: number;
|
||||
language: string | null;
|
||||
codec: string | null;
|
||||
channels: number | null;
|
||||
title: string | null;
|
||||
is_default: number;
|
||||
action: "keep" | "remove" | null;
|
||||
}[];
|
||||
const streamsByItem = new Map<number, PipelineAudioStream[]>();
|
||||
for (const r of streamRows) {
|
||||
if (!streamsByItem.has(r.item_id)) streamsByItem.set(r.item_id, []);
|
||||
streamsByItem.get(r.item_id)!.push({
|
||||
id: r.id,
|
||||
language: r.language,
|
||||
codec: r.codec,
|
||||
channels: r.channels,
|
||||
title: r.title,
|
||||
is_default: r.is_default,
|
||||
action: r.action ?? "keep",
|
||||
});
|
||||
}
|
||||
|
||||
for (const r of rows) {
|
||||
r.transcode_reasons = reasonsByPlan.get(planIdFor(r)) ?? [];
|
||||
r.audio_streams = streamsByItem.get(r.item_id) ?? [];
|
||||
}
|
||||
};
|
||||
|
||||
enrichWithStreamsAndReasons(review as EnrichableRow[]);
|
||||
enrichWithStreamsAndReasons(queued as EnrichableRow[]);
|
||||
|
||||
return c.json({ review, reviewTotal, queued, processing, done, doneCount, jellyfinUrl });
|
||||
return c.json({ reviewItemsTotal, queued, processing, done, doneCount, jellyfinUrl });
|
||||
});
|
||||
|
||||
// ─── List ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
@@ -23,6 +23,78 @@ export function parseScanLimit(raw: unknown): { ok: true; value: number | null }
|
||||
return { ok: true, value: n };
|
||||
}
|
||||
|
||||
type ScanStatusFilter = "all" | "pending" | "scanned" | "error";
|
||||
type ScanTypeFilter = "all" | "movie" | "episode";
|
||||
type ScanSourceFilter = "all" | "scan" | "webhook";
|
||||
|
||||
export interface ScanItemsQuery {
|
||||
offset: number;
|
||||
limit: number;
|
||||
search: string;
|
||||
status: ScanStatusFilter;
|
||||
type: ScanTypeFilter;
|
||||
source: ScanSourceFilter;
|
||||
}
|
||||
|
||||
function parsePositiveInt(raw: unknown, fallback: number): number {
|
||||
const n = typeof raw === "number" ? raw : Number(raw);
|
||||
if (!Number.isFinite(n)) return fallback;
|
||||
if (!Number.isInteger(n)) return fallback;
|
||||
return n;
|
||||
}
|
||||
|
||||
function clamp(n: number, min: number, max: number): number {
|
||||
if (n < min) return min;
|
||||
if (n > max) return max;
|
||||
return n;
|
||||
}
|
||||
|
||||
function parseOneOf<T extends readonly string[]>(raw: unknown, allowed: T, fallback: T[number]): T[number] {
|
||||
if (typeof raw !== "string") return fallback;
|
||||
const lowered = raw.toLowerCase();
|
||||
return (allowed as readonly string[]).includes(lowered) ? (lowered as T[number]) : fallback;
|
||||
}
|
||||
|
||||
export function parseScanItemsQuery(raw: Record<string, unknown>): ScanItemsQuery {
|
||||
const limit = clamp(parsePositiveInt(raw.limit, 50), 1, 200);
|
||||
const offset = Math.max(0, parsePositiveInt(raw.offset, 0));
|
||||
const search = typeof raw.q === "string" ? raw.q.trim() : "";
|
||||
return {
|
||||
offset,
|
||||
limit,
|
||||
search,
|
||||
status: parseOneOf(raw.status, ["all", "pending", "scanned", "error"] as const, "all"),
|
||||
type: parseOneOf(raw.type, ["all", "movie", "episode"] as const, "all"),
|
||||
source: parseOneOf(raw.source, ["all", "scan", "webhook"] as const, "all"),
|
||||
};
|
||||
}
|
||||
|
||||
export function buildScanItemsWhere(query: ScanItemsQuery): { sql: string; args: string[] } {
|
||||
const clauses: string[] = [];
|
||||
const args: string[] = [];
|
||||
if (query.status !== "all") {
|
||||
clauses.push("scan_status = ?");
|
||||
args.push(query.status);
|
||||
}
|
||||
if (query.type !== "all") {
|
||||
clauses.push("lower(type) = ?");
|
||||
args.push(query.type);
|
||||
}
|
||||
if (query.source !== "all") {
|
||||
clauses.push("ingest_source = ?");
|
||||
args.push(query.source);
|
||||
}
|
||||
if (query.search.length > 0) {
|
||||
clauses.push("(lower(name) LIKE ? OR lower(file_path) LIKE ?)");
|
||||
const needle = `%${query.search.toLowerCase()}%`;
|
||||
args.push(needle, needle);
|
||||
}
|
||||
return {
|
||||
sql: clauses.length > 0 ? `WHERE ${clauses.join(" AND ")}` : "",
|
||||
args,
|
||||
};
|
||||
}
|
||||
|
||||
// ─── State ────────────────────────────────────────────────────────────────────
|
||||
|
||||
let scanAbort: AbortController | null = null;
|
||||
@@ -60,12 +132,84 @@ app.get("/", (c) => {
|
||||
const errors = (db.prepare("SELECT COUNT(*) as n FROM media_items WHERE scan_status = 'error'").get() as { n: number })
|
||||
.n;
|
||||
const recentItems = db
|
||||
.prepare("SELECT name, type, scan_status, file_path FROM media_items ORDER BY last_scanned_at DESC LIMIT 50")
|
||||
.all() as { name: string; type: string; scan_status: string; file_path: string }[];
|
||||
.prepare(
|
||||
"SELECT name, type, scan_status, file_path, last_scanned_at, ingest_source FROM media_items ORDER BY COALESCE(last_scanned_at, created_at) DESC, id DESC LIMIT 5",
|
||||
)
|
||||
.all() as {
|
||||
name: string;
|
||||
type: string;
|
||||
scan_status: string;
|
||||
file_path: string;
|
||||
last_scanned_at: string | null;
|
||||
ingest_source: string | null;
|
||||
}[];
|
||||
|
||||
return c.json({ running, progress: { scanned, total, errors }, recentItems, scanLimit: currentScanLimit() });
|
||||
});
|
||||
|
||||
app.get("/items", (c) => {
|
||||
const db = getDb();
|
||||
const query = parseScanItemsQuery({
|
||||
offset: c.req.query("offset"),
|
||||
limit: c.req.query("limit"),
|
||||
q: c.req.query("q"),
|
||||
status: c.req.query("status"),
|
||||
type: c.req.query("type"),
|
||||
source: c.req.query("source"),
|
||||
});
|
||||
const where = buildScanItemsWhere(query);
|
||||
const rows = db
|
||||
.prepare(
|
||||
`
|
||||
SELECT id, jellyfin_id, name, type, series_name, season_number, episode_number,
|
||||
scan_status, original_language, orig_lang_source, container, file_size, file_path,
|
||||
last_scanned_at, ingest_source
|
||||
FROM media_items
|
||||
${where.sql}
|
||||
ORDER BY COALESCE(last_scanned_at, created_at) DESC, id DESC
|
||||
LIMIT ? OFFSET ?
|
||||
`,
|
||||
)
|
||||
.all(...where.args, query.limit, query.offset) as Array<{
|
||||
id: number;
|
||||
jellyfin_id: string;
|
||||
name: string;
|
||||
type: string;
|
||||
series_name: string | null;
|
||||
season_number: number | null;
|
||||
episode_number: number | null;
|
||||
scan_status: string;
|
||||
original_language: string | null;
|
||||
orig_lang_source: string | null;
|
||||
container: string | null;
|
||||
file_size: number | null;
|
||||
file_path: string;
|
||||
last_scanned_at: string | null;
|
||||
ingest_source: string | null;
|
||||
audio_codecs: string | null;
|
||||
}>;
|
||||
|
||||
// Audio codecs per item, batched into one query for the current page.
|
||||
// A per-row scalar subquery over media_streams was O(page × streams)
|
||||
// and could block the event loop for minutes on large libraries.
|
||||
if (rows.length > 0) {
|
||||
const placeholders = rows.map(() => "?").join(",");
|
||||
const codecRows = db
|
||||
.prepare(
|
||||
`SELECT item_id, GROUP_CONCAT(DISTINCT LOWER(codec)) AS codecs
|
||||
FROM media_streams
|
||||
WHERE item_id IN (${placeholders}) AND type = 'Audio' AND codec IS NOT NULL
|
||||
GROUP BY item_id`,
|
||||
)
|
||||
.all(...rows.map((r) => r.id)) as { item_id: number; codecs: string | null }[];
|
||||
const byItem = new Map(codecRows.map((r) => [r.item_id, r.codecs]));
|
||||
for (const row of rows) row.audio_codecs = byItem.get(row.id) ?? null;
|
||||
}
|
||||
|
||||
const total = (db.prepare(`SELECT COUNT(*) as n FROM media_items ${where.sql}`).get(...where.args) as { n: number }).n;
|
||||
return c.json({ rows, total, hasMore: query.offset + rows.length < total, query });
|
||||
});
|
||||
|
||||
// ─── Start ────────────────────────────────────────────────────────────────────
|
||||
|
||||
app.post("/start", async (c) => {
|
||||
|
||||
@@ -8,16 +8,37 @@ import { testConnection as testSonarr } from "../services/sonarr";
|
||||
|
||||
const app = new Hono();
|
||||
|
||||
// Config keys that hold credentials. `GET /` returns these as "***" when set,
|
||||
// "" when unset. Real values only reach the client via the explicit
|
||||
// GET /reveal?key=<key> endpoint (eye-icon toggle in the settings UI).
|
||||
const SECRET_KEYS = new Set(["jellyfin_api_key", "radarr_api_key", "sonarr_api_key", "mqtt_password"]);
|
||||
|
||||
app.get("/", (c) => {
|
||||
const config = getAllConfig();
|
||||
for (const key of SECRET_KEYS) {
|
||||
if (config[key]) config[key] = "***";
|
||||
}
|
||||
const envLocked = Array.from(getEnvLockedKeys());
|
||||
return c.json({ config, envLocked });
|
||||
});
|
||||
|
||||
app.get("/reveal", (c) => {
|
||||
const key = c.req.query("key") ?? "";
|
||||
if (!SECRET_KEYS.has(key)) return c.json({ error: "not a secret key" }, 400);
|
||||
return c.json({ value: getConfig(key) ?? "" });
|
||||
});
|
||||
|
||||
// The UI sends "***" as a sentinel meaning "user didn't touch this field,
|
||||
// keep the stored value". Save endpoints call this before writing a secret.
|
||||
function resolveSecret(incoming: string | undefined, storedKey: string): string {
|
||||
if (incoming === "***") return getConfig(storedKey) ?? "";
|
||||
return incoming ?? "";
|
||||
}
|
||||
|
||||
app.post("/jellyfin", async (c) => {
|
||||
const body = await c.req.json<{ url: string; api_key: string }>();
|
||||
const url = body.url?.replace(/\/$/, "");
|
||||
const apiKey = body.api_key;
|
||||
const apiKey = resolveSecret(body.api_key, "jellyfin_api_key");
|
||||
|
||||
if (!url || !apiKey) return c.json({ ok: false, error: "URL and API key are required" }, 400);
|
||||
|
||||
@@ -54,7 +75,7 @@ app.post("/jellyfin", async (c) => {
|
||||
app.post("/radarr", async (c) => {
|
||||
const body = await c.req.json<{ url?: string; api_key?: string }>();
|
||||
const url = body.url?.replace(/\/$/, "");
|
||||
const apiKey = body.api_key;
|
||||
const apiKey = resolveSecret(body.api_key, "radarr_api_key");
|
||||
|
||||
if (!url || !apiKey) {
|
||||
setConfig("radarr_enabled", "0");
|
||||
@@ -72,7 +93,7 @@ app.post("/radarr", async (c) => {
|
||||
app.post("/sonarr", async (c) => {
|
||||
const body = await c.req.json<{ url?: string; api_key?: string }>();
|
||||
const url = body.url?.replace(/\/$/, "");
|
||||
const apiKey = body.api_key;
|
||||
const apiKey = resolveSecret(body.api_key, "sonarr_api_key");
|
||||
|
||||
if (!url || !apiKey) {
|
||||
setConfig("sonarr_enabled", "0");
|
||||
@@ -127,9 +148,10 @@ app.post("/mqtt", async (c) => {
|
||||
setConfig("mqtt_url", url);
|
||||
setConfig("mqtt_topic", topic || "jellyfin/events");
|
||||
setConfig("mqtt_username", username);
|
||||
// Only overwrite password when a non-empty value is sent, so the UI can
|
||||
// leave the field blank to indicate "keep the existing one".
|
||||
if (password) setConfig("mqtt_password", password);
|
||||
// Only overwrite password when a real value is sent. The UI leaves the
|
||||
// field blank or sends "***" (masked placeholder) when the user didn't
|
||||
// touch it — both mean "keep the existing one".
|
||||
if (password && password !== "***") setConfig("mqtt_password", password);
|
||||
|
||||
// Reconnect with the new config. Best-effort; failures surface in status.
|
||||
startMqttClient().catch(() => {});
|
||||
|
||||
@@ -79,6 +79,7 @@ function migrate(db: Database): void {
|
||||
// RENAME COLUMN preserves values; both alters are no-ops on fresh DBs.
|
||||
alter("ALTER TABLE review_plans RENAME COLUMN webhook_verified TO verified");
|
||||
alter("ALTER TABLE review_plans DROP COLUMN verified");
|
||||
alter("ALTER TABLE media_items ADD COLUMN ingest_source TEXT NOT NULL DEFAULT 'scan'");
|
||||
}
|
||||
|
||||
function seedDefaults(db: Database): void {
|
||||
|
||||
@@ -31,12 +31,13 @@ CREATE TABLE IF NOT EXISTS media_items (
|
||||
tvdb_id TEXT,
|
||||
jellyfin_raw TEXT,
|
||||
external_raw TEXT,
|
||||
scan_status TEXT NOT NULL DEFAULT 'pending',
|
||||
scan_error TEXT,
|
||||
last_scanned_at TEXT,
|
||||
last_executed_at TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
scan_status TEXT NOT NULL DEFAULT 'pending',
|
||||
scan_error TEXT,
|
||||
last_scanned_at TEXT,
|
||||
ingest_source TEXT NOT NULL DEFAULT 'scan',
|
||||
last_executed_at TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS media_streams (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
|
||||
@@ -133,11 +133,11 @@ export async function upsertJellyfinItem(
|
||||
season_number, episode_number, year, file_path, file_size, container,
|
||||
runtime_ticks, date_last_refreshed,
|
||||
original_language, orig_lang_source, needs_review,
|
||||
imdb_id, tmdb_id, tvdb_id,
|
||||
jellyfin_raw, external_raw,
|
||||
scan_status, last_scanned_at${opts.executed ? ", last_executed_at" : ""}
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'scanned', datetime('now')${opts.executed ? ", datetime('now')" : ""})
|
||||
ON CONFLICT(jellyfin_id) DO UPDATE SET
|
||||
imdb_id, tmdb_id, tvdb_id,
|
||||
jellyfin_raw, external_raw,
|
||||
scan_status, last_scanned_at, ingest_source${opts.executed ? ", last_executed_at" : ""}
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'scanned', datetime('now'), ?${opts.executed ? ", datetime('now')" : ""})
|
||||
ON CONFLICT(jellyfin_id) DO UPDATE SET
|
||||
type = excluded.type, name = excluded.name, original_title = excluded.original_title,
|
||||
series_name = excluded.series_name, series_jellyfin_id = excluded.series_jellyfin_id,
|
||||
season_number = excluded.season_number, episode_number = excluded.episode_number,
|
||||
@@ -145,12 +145,13 @@ export async function upsertJellyfinItem(
|
||||
file_size = excluded.file_size, container = excluded.container,
|
||||
runtime_ticks = excluded.runtime_ticks, date_last_refreshed = excluded.date_last_refreshed,
|
||||
original_language = excluded.original_language, orig_lang_source = excluded.orig_lang_source,
|
||||
needs_review = excluded.needs_review, imdb_id = excluded.imdb_id,
|
||||
tmdb_id = excluded.tmdb_id, tvdb_id = excluded.tvdb_id,
|
||||
jellyfin_raw = excluded.jellyfin_raw, external_raw = excluded.external_raw,
|
||||
scan_status = 'scanned', last_scanned_at = datetime('now')
|
||||
${opts.executed ? ", last_executed_at = datetime('now')" : ""}
|
||||
`);
|
||||
needs_review = excluded.needs_review, imdb_id = excluded.imdb_id,
|
||||
tmdb_id = excluded.tmdb_id, tvdb_id = excluded.tvdb_id,
|
||||
jellyfin_raw = excluded.jellyfin_raw, external_raw = excluded.external_raw,
|
||||
scan_status = 'scanned', last_scanned_at = datetime('now'),
|
||||
ingest_source = excluded.ingest_source
|
||||
${opts.executed ? ", last_executed_at = datetime('now')" : ""}
|
||||
`);
|
||||
upsertItem.run(
|
||||
jellyfinItem.Id,
|
||||
jellyfinItem.Type === "Episode" ? "Episode" : "Movie",
|
||||
@@ -174,6 +175,7 @@ export async function upsertJellyfinItem(
|
||||
tvdbId,
|
||||
jellyfinRaw,
|
||||
externalRawJson,
|
||||
source,
|
||||
);
|
||||
|
||||
const itemRow = db.prepare("SELECT id FROM media_items WHERE jellyfin_id = ?").get(jellyfinItem.Id) as {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { api } from "~/shared/lib/api";
|
||||
import type { PipelineData } from "~/shared/lib/types";
|
||||
import type { PipelineData, ReviewGroupsResponse } from "~/shared/lib/types";
|
||||
import { DoneColumn } from "./DoneColumn";
|
||||
import { ProcessingColumn } from "./ProcessingColumn";
|
||||
import { QueueColumn } from "./QueueColumn";
|
||||
@@ -20,43 +20,55 @@ interface QueueStatus {
|
||||
|
||||
export function PipelinePage() {
|
||||
const [data, setData] = useState<PipelineData | null>(null);
|
||||
const [initialGroups, setInitialGroups] = useState<ReviewGroupsResponse | null>(null);
|
||||
const [progress, setProgress] = useState<Progress | null>(null);
|
||||
const [queueStatus, setQueueStatus] = useState<QueueStatus | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
||||
const load = useCallback(async () => {
|
||||
const loadPipeline = useCallback(async () => {
|
||||
const pipelineRes = await api.get<PipelineData>("/api/review/pipeline");
|
||||
setData(pipelineRes);
|
||||
setLoading(false);
|
||||
}, []);
|
||||
|
||||
const loadReviewGroups = useCallback(async () => {
|
||||
const groupsRes = await api.get<ReviewGroupsResponse>("/api/review/groups?offset=0&limit=25");
|
||||
setInitialGroups(groupsRes);
|
||||
}, []);
|
||||
|
||||
// Full refresh: used on first mount and after user-driven mutations
|
||||
// (approve/skip). SSE-driven refreshes during a running job call
|
||||
// loadPipeline only, so the Review column's scroll-loaded pages don't get
|
||||
// wiped every second by job_update events.
|
||||
const loadAll = useCallback(async () => {
|
||||
await Promise.all([loadPipeline(), loadReviewGroups()]);
|
||||
setLoading(false);
|
||||
}, [loadPipeline, loadReviewGroups]);
|
||||
|
||||
useEffect(() => {
|
||||
load();
|
||||
}, [load]);
|
||||
loadAll();
|
||||
}, [loadAll]);
|
||||
|
||||
// SSE for live updates. job_update fires on every status change and per-line
|
||||
// stdout flush of the running job — without coalescing, the pipeline endpoint
|
||||
// (a 500-row review query + counts) would re-run several times per second.
|
||||
// stdout flush — coalesce via 1s debounce so the pipeline endpoint doesn't
|
||||
// re-run several times per second.
|
||||
const reloadTimer = useRef<ReturnType<typeof setTimeout> | null>(null);
|
||||
useEffect(() => {
|
||||
const scheduleReload = () => {
|
||||
const schedulePipelineReload = () => {
|
||||
if (reloadTimer.current) return;
|
||||
reloadTimer.current = setTimeout(() => {
|
||||
reloadTimer.current = null;
|
||||
load();
|
||||
loadPipeline();
|
||||
}, 1000);
|
||||
};
|
||||
const es = new EventSource("/api/execute/events");
|
||||
es.addEventListener("job_update", (e) => {
|
||||
// When a job leaves 'running' (done / error / cancelled), drop any
|
||||
// stale progress so the bar doesn't linger on the next job's card.
|
||||
try {
|
||||
const upd = JSON.parse((e as MessageEvent).data) as { id: number; status: string };
|
||||
if (upd.status !== "running") setProgress(null);
|
||||
} catch {
|
||||
/* ignore malformed events */
|
||||
}
|
||||
scheduleReload();
|
||||
schedulePipelineReload();
|
||||
});
|
||||
es.addEventListener("job_progress", (e) => {
|
||||
setProgress(JSON.parse((e as MessageEvent).data));
|
||||
@@ -68,9 +80,9 @@ export function PipelinePage() {
|
||||
es.close();
|
||||
if (reloadTimer.current) clearTimeout(reloadTimer.current);
|
||||
};
|
||||
}, [load]);
|
||||
}, [loadPipeline]);
|
||||
|
||||
if (loading || !data) return <div className="p-6 text-gray-500">Loading pipeline...</div>;
|
||||
if (loading || !data || !initialGroups) return <div className="p-6 text-gray-500">Loading pipeline...</div>;
|
||||
|
||||
return (
|
||||
<div className="flex flex-col -mx-3 sm:-mx-5 -mt-4 -mb-12 h-[calc(100vh-3rem)] overflow-hidden">
|
||||
@@ -79,10 +91,15 @@ export function PipelinePage() {
|
||||
<span className="text-sm text-gray-500">{data.doneCount} files in desired state</span>
|
||||
</div>
|
||||
<div className="flex flex-1 gap-4 p-4 overflow-x-auto overflow-y-hidden min-h-0">
|
||||
<ReviewColumn items={data.review} total={data.reviewTotal} jellyfinUrl={data.jellyfinUrl} onMutate={load} />
|
||||
<QueueColumn items={data.queued} jellyfinUrl={data.jellyfinUrl} onMutate={load} />
|
||||
<ProcessingColumn items={data.processing} progress={progress} queueStatus={queueStatus} onMutate={load} />
|
||||
<DoneColumn items={data.done} onMutate={load} />
|
||||
<ReviewColumn
|
||||
initialResponse={initialGroups}
|
||||
totalItems={data.reviewItemsTotal}
|
||||
jellyfinUrl={data.jellyfinUrl}
|
||||
onMutate={loadAll}
|
||||
/>
|
||||
<QueueColumn items={data.queued} jellyfinUrl={data.jellyfinUrl} onMutate={loadAll} />
|
||||
<ProcessingColumn items={data.processing} progress={progress} queueStatus={queueStatus} onMutate={loadAll} />
|
||||
<DoneColumn items={data.done} onMutate={loadAll} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -23,6 +23,25 @@ export function ProcessingColumn({ items, progress, queueStatus, onMutate }: Pro
|
||||
return () => clearInterval(t);
|
||||
}, [job]);
|
||||
|
||||
// Local sleep countdown. Server emits the sleep duration once when the
|
||||
// pause begins; the client anchors "deadline = receivedAt + seconds*1000"
|
||||
// and ticks a 1s timer so the UI shows a live countdown, not a static number.
|
||||
const [sleepDeadline, setSleepDeadline] = useState<number | null>(null);
|
||||
const [sleepNow, setSleepNow] = useState(() => Date.now());
|
||||
useEffect(() => {
|
||||
if (queueStatus?.status === "sleeping" && typeof queueStatus.seconds === "number") {
|
||||
setSleepDeadline(Date.now() + queueStatus.seconds * 1000);
|
||||
} else {
|
||||
setSleepDeadline(null);
|
||||
}
|
||||
}, [queueStatus?.status, queueStatus?.seconds]);
|
||||
useEffect(() => {
|
||||
if (sleepDeadline == null) return;
|
||||
const t = setInterval(() => setSleepNow(Date.now()), 1000);
|
||||
return () => clearInterval(t);
|
||||
}, [sleepDeadline]);
|
||||
const sleepRemaining = sleepDeadline != null ? Math.max(0, Math.ceil((sleepDeadline - sleepNow) / 1000)) : null;
|
||||
|
||||
// Only trust progress if it belongs to the current job — stale events from
|
||||
// a previous job would otherwise show wrong numbers until the new job emits.
|
||||
const liveProgress = job && progress && progress.id === job.id ? progress : null;
|
||||
@@ -55,9 +74,9 @@ export function ProcessingColumn({ items, progress, queueStatus, onMutate }: Pro
|
||||
actions={job ? [{ label: "Stop", onClick: stop, danger: true }] : undefined}
|
||||
>
|
||||
{queueStatus && queueStatus.status !== "running" && (
|
||||
<div className="mb-2 text-xs text-gray-500 bg-white rounded border p-2">
|
||||
<div className="mb-2 text-xs text-gray-500 bg-white rounded border p-2 tabular-nums">
|
||||
{queueStatus.status === "paused" && <>Paused until {queueStatus.until}</>}
|
||||
{queueStatus.status === "sleeping" && <>Sleeping {queueStatus.seconds}s between jobs</>}
|
||||
{queueStatus.status === "sleeping" && <>Next job in {sleepRemaining ?? queueStatus.seconds ?? 0}s</>}
|
||||
{queueStatus.status === "idle" && <>Idle</>}
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -1,28 +1,57 @@
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { api } from "~/shared/lib/api";
|
||||
import type { PipelineReviewItem } from "~/shared/lib/types";
|
||||
import type { ReviewGroup, ReviewGroupsResponse } from "~/shared/lib/types";
|
||||
import { ColumnShell } from "./ColumnShell";
|
||||
import { PipelineCard } from "./PipelineCard";
|
||||
import { SeriesCard } from "./SeriesCard";
|
||||
|
||||
const PAGE_SIZE = 25;
|
||||
|
||||
interface ReviewColumnProps {
|
||||
items: PipelineReviewItem[];
|
||||
total: number;
|
||||
initialResponse: ReviewGroupsResponse;
|
||||
totalItems: number;
|
||||
jellyfinUrl: string;
|
||||
onMutate: () => void;
|
||||
}
|
||||
|
||||
interface SeriesGroup {
|
||||
name: string;
|
||||
key: string;
|
||||
jellyfinId: string | null;
|
||||
episodes: PipelineReviewItem[];
|
||||
}
|
||||
export function ReviewColumn({ initialResponse, totalItems, jellyfinUrl, onMutate }: ReviewColumnProps) {
|
||||
const [groups, setGroups] = useState<ReviewGroup[]>(initialResponse.groups);
|
||||
const [hasMore, setHasMore] = useState(initialResponse.hasMore);
|
||||
const [loadingMore, setLoadingMore] = useState(false);
|
||||
const sentinelRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
export function ReviewColumn({ items, total, jellyfinUrl, onMutate }: ReviewColumnProps) {
|
||||
const truncated = total > items.length;
|
||||
// Reset when the parent refetches page 0 (after approve/skip actions).
|
||||
useEffect(() => {
|
||||
setGroups(initialResponse.groups);
|
||||
setHasMore(initialResponse.hasMore);
|
||||
}, [initialResponse]);
|
||||
|
||||
const loadMore = useCallback(async () => {
|
||||
if (loadingMore || !hasMore) return;
|
||||
setLoadingMore(true);
|
||||
try {
|
||||
const res = await api.get<ReviewGroupsResponse>(`/api/review/groups?offset=${groups.length}&limit=${PAGE_SIZE}`);
|
||||
setGroups((prev) => [...prev, ...res.groups]);
|
||||
setHasMore(res.hasMore);
|
||||
} finally {
|
||||
setLoadingMore(false);
|
||||
}
|
||||
}, [groups.length, hasMore, loadingMore]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!hasMore || !sentinelRef.current) return;
|
||||
const observer = new IntersectionObserver(
|
||||
(entries) => {
|
||||
if (entries[0]?.isIntersecting) loadMore();
|
||||
},
|
||||
{ rootMargin: "200px" },
|
||||
);
|
||||
observer.observe(sentinelRef.current);
|
||||
return () => observer.disconnect();
|
||||
}, [hasMore, loadMore]);
|
||||
|
||||
const skipAll = async () => {
|
||||
if (!confirm(`Skip all ${total} pending items? They won't be processed unless you unskip them.`)) return;
|
||||
if (!confirm(`Skip all ${totalItems} pending items? They won't be processed unless you unskip them.`)) return;
|
||||
await api.post("/api/review/skip-all");
|
||||
onMutate();
|
||||
};
|
||||
@@ -47,89 +76,62 @@ export function ReviewColumn({ items, total, jellyfinUrl, onMutate }: ReviewColu
|
||||
onMutate();
|
||||
};
|
||||
|
||||
// Group by series (movies are standalone)
|
||||
const movies = items.filter((i) => i.type === "Movie");
|
||||
const seriesMap = new Map<string, SeriesGroup>();
|
||||
|
||||
for (const item of items.filter((i) => i.type === "Episode")) {
|
||||
const key = item.series_jellyfin_id ?? item.series_name ?? String(item.item_id);
|
||||
if (!seriesMap.has(key)) {
|
||||
seriesMap.set(key, { name: item.series_name ?? "", key, jellyfinId: item.series_jellyfin_id, episodes: [] });
|
||||
}
|
||||
seriesMap.get(key)!.episodes.push(item);
|
||||
}
|
||||
|
||||
// Interleave movies and series, sorted by confidence (high first)
|
||||
const allItems = [
|
||||
...movies.map((m) => ({ type: "movie" as const, item: m, sortKey: m.confidence === "high" ? 0 : 1 })),
|
||||
...[...seriesMap.values()].map((s) => ({
|
||||
type: "series" as const,
|
||||
item: s,
|
||||
sortKey: s.episodes.every((e) => e.confidence === "high") ? 0 : 1,
|
||||
})),
|
||||
].sort((a, b) => a.sortKey - b.sortKey);
|
||||
|
||||
// Flatten each visible entry to its list of item_ids. "Approve up to here"
|
||||
// on index i approves everything in the union of idsByEntry[0..i-1] — one
|
||||
// id for a movie, N ids for a series (one per episode).
|
||||
const idsByEntry: number[][] = allItems.map((entry) =>
|
||||
entry.type === "movie" ? [entry.item.item_id] : entry.item.episodes.map((e) => e.item_id),
|
||||
// Compute ids per visible group for "Approve above"
|
||||
const idsByGroup: number[][] = groups.map((g) =>
|
||||
g.kind === "movie" ? [g.item.item_id] : g.seasons.flatMap((s) => s.episodes.map((ep) => ep.item_id)),
|
||||
);
|
||||
const priorIds = (index: number): number[] => idsByEntry.slice(0, index).flat();
|
||||
const priorIds = (index: number): number[] => idsByGroup.slice(0, index).flat();
|
||||
|
||||
const actions =
|
||||
totalItems > 0
|
||||
? [
|
||||
{ label: "Auto Review", onClick: autoApprove, primary: true },
|
||||
{ label: "Skip all", onClick: skipAll },
|
||||
]
|
||||
: undefined;
|
||||
|
||||
return (
|
||||
<ColumnShell
|
||||
title="Review"
|
||||
count={truncated ? `${items.length} of ${total}` : total}
|
||||
actions={
|
||||
total > 0
|
||||
? [
|
||||
{ label: "Auto Review", onClick: autoApprove, primary: true },
|
||||
{ label: "Skip all", onClick: skipAll },
|
||||
]
|
||||
: undefined
|
||||
}
|
||||
>
|
||||
<ColumnShell title="Review" count={totalItems} actions={actions}>
|
||||
<div className="space-y-2">
|
||||
{allItems.map((entry, index) => {
|
||||
// The button approves everything visually above this card. First
|
||||
// card has nothing before it → undefined suppresses the affordance.
|
||||
{groups.map((group, index) => {
|
||||
const prior = index > 0 ? priorIds(index) : null;
|
||||
const onApproveUpToHere = prior && prior.length > 0 ? () => approveBatch(prior) : undefined;
|
||||
if (entry.type === "movie") {
|
||||
if (group.kind === "movie") {
|
||||
return (
|
||||
<PipelineCard
|
||||
key={entry.item.id}
|
||||
item={entry.item}
|
||||
key={group.item.id}
|
||||
item={group.item}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
onToggleStream={async (streamId, action) => {
|
||||
await api.patch(`/api/review/${entry.item.item_id}/stream/${streamId}`, { action });
|
||||
await api.patch(`/api/review/${group.item.item_id}/stream/${streamId}`, { action });
|
||||
onMutate();
|
||||
}}
|
||||
onApprove={() => approveItem(entry.item.item_id)}
|
||||
onSkip={() => skipItem(entry.item.item_id)}
|
||||
onApprove={() => approveItem(group.item.item_id)}
|
||||
onSkip={() => skipItem(group.item.item_id)}
|
||||
onApproveUpToHere={onApproveUpToHere}
|
||||
/>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<SeriesCard
|
||||
key={entry.item.key}
|
||||
seriesKey={entry.item.key}
|
||||
seriesName={entry.item.name}
|
||||
key={group.seriesKey}
|
||||
seriesKey={group.seriesKey}
|
||||
seriesName={group.seriesName}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
seriesJellyfinId={entry.item.jellyfinId}
|
||||
episodes={entry.item.episodes}
|
||||
seriesJellyfinId={group.seriesJellyfinId}
|
||||
seasons={group.seasons}
|
||||
episodeCount={group.episodeCount}
|
||||
originalLanguage={group.originalLanguage}
|
||||
onMutate={onMutate}
|
||||
onApproveUpToHere={onApproveUpToHere}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
{allItems.length === 0 && <p className="text-sm text-gray-400 text-center py-8">No items to review</p>}
|
||||
{truncated && (
|
||||
<p className="text-xs text-gray-400 text-center py-3 border-t mt-2">
|
||||
Showing first {items.length} of {total}. Approve some to see the rest.
|
||||
</p>
|
||||
{groups.length === 0 && <p className="text-sm text-gray-400 text-center py-8">No items to review</p>}
|
||||
{hasMore && (
|
||||
<div ref={sentinelRef} className="py-4 text-center text-xs text-gray-400">
|
||||
{loadingMore ? "Loading more…" : ""}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</ColumnShell>
|
||||
|
||||
@@ -9,7 +9,9 @@ interface SeriesCardProps {
|
||||
seriesName: string;
|
||||
jellyfinUrl: string;
|
||||
seriesJellyfinId: string | null;
|
||||
episodes: PipelineReviewItem[];
|
||||
seasons: Array<{ season: number | null; episodes: PipelineReviewItem[] }>;
|
||||
episodeCount: number;
|
||||
originalLanguage: string | null;
|
||||
onMutate: () => void;
|
||||
// Review-column affordance: approve every card visually above this
|
||||
// series in one round-trip. See ReviewColumn for the id computation.
|
||||
@@ -21,13 +23,18 @@ export function SeriesCard({
|
||||
seriesName,
|
||||
jellyfinUrl,
|
||||
seriesJellyfinId,
|
||||
episodes,
|
||||
seasons,
|
||||
episodeCount,
|
||||
originalLanguage,
|
||||
onMutate,
|
||||
onApproveUpToHere,
|
||||
}: SeriesCardProps) {
|
||||
const [expanded, setExpanded] = useState(false);
|
||||
|
||||
const seriesLang = episodes[0]?.original_language ?? "";
|
||||
const flatEpisodes = seasons.flatMap((s) => s.episodes);
|
||||
const highCount = flatEpisodes.filter((e) => e.confidence === "high").length;
|
||||
const lowCount = flatEpisodes.filter((e) => e.confidence === "low").length;
|
||||
const multipleSeasons = seasons.length > 1;
|
||||
|
||||
const setSeriesLanguage = async (lang: string) => {
|
||||
await api.patch(`/api/review/series/${encodeURIComponent(seriesKey)}/language`, { language: lang });
|
||||
@@ -39,8 +46,11 @@ export function SeriesCard({
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const highCount = episodes.filter((e) => e.confidence === "high").length;
|
||||
const lowCount = episodes.filter((e) => e.confidence === "low").length;
|
||||
const approveSeason = async (season: number | null) => {
|
||||
if (season == null) return;
|
||||
await api.post(`/api/review/season/${encodeURIComponent(seriesKey)}/${season}/approve-all`);
|
||||
onMutate();
|
||||
};
|
||||
|
||||
const jellyfinLink =
|
||||
jellyfinUrl && seriesJellyfinId ? `${jellyfinUrl}/web/index.html#!/details?id=${seriesJellyfinId}` : null;
|
||||
@@ -70,13 +80,14 @@ export function SeriesCard({
|
||||
|
||||
{/* Controls row */}
|
||||
<div className="flex items-center gap-2 px-3 pb-3 pt-1">
|
||||
<span className="text-xs text-gray-500 shrink-0">{episodes.length} eps</span>
|
||||
<span className="text-xs text-gray-500 shrink-0">{episodeCount} eps</span>
|
||||
{multipleSeasons && <span className="text-xs text-gray-500 shrink-0">· {seasons.length} seasons</span>}
|
||||
{highCount > 0 && <span className="text-xs text-green-600 shrink-0">{highCount} ready</span>}
|
||||
{lowCount > 0 && <span className="text-xs text-amber-600 shrink-0">{lowCount} review</span>}
|
||||
<div className="flex-1" />
|
||||
<select
|
||||
className="h-6 text-xs border border-gray-300 rounded px-1 bg-white shrink-0"
|
||||
value={seriesLang}
|
||||
value={originalLanguage ?? ""}
|
||||
onChange={(e) => {
|
||||
e.stopPropagation();
|
||||
setSeriesLanguage(e.target.value);
|
||||
@@ -91,6 +102,7 @@ export function SeriesCard({
|
||||
</select>
|
||||
{onApproveUpToHere && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onApproveUpToHere();
|
||||
@@ -102,39 +114,115 @@ export function SeriesCard({
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
type="button"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
approveSeries();
|
||||
}}
|
||||
className="text-xs px-2 py-1 rounded bg-blue-600 text-white hover:bg-blue-700 cursor-pointer whitespace-nowrap shrink-0"
|
||||
>
|
||||
Approve all
|
||||
Approve series
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{expanded && (
|
||||
<div className="border-t px-3 pb-3 space-y-2 pt-2">
|
||||
<div className="border-t">
|
||||
{multipleSeasons
|
||||
? seasons.map((s) => (
|
||||
<SeasonGroup
|
||||
key={s.season ?? "unknown"}
|
||||
season={s.season}
|
||||
episodes={s.episodes}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
onApproveSeason={() => approveSeason(s.season)}
|
||||
onMutate={onMutate}
|
||||
/>
|
||||
))
|
||||
: flatEpisodes.map((ep) => <EpisodeRow key={ep.id} ep={ep} jellyfinUrl={jellyfinUrl} onMutate={onMutate} />)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function SeasonGroup({
|
||||
season,
|
||||
episodes,
|
||||
jellyfinUrl,
|
||||
onApproveSeason,
|
||||
onMutate,
|
||||
}: {
|
||||
season: number | null;
|
||||
episodes: PipelineReviewItem[];
|
||||
jellyfinUrl: string;
|
||||
onApproveSeason: () => void;
|
||||
onMutate: () => void;
|
||||
}) {
|
||||
const [open, setOpen] = useState(false);
|
||||
const highCount = episodes.filter((e) => e.confidence === "high").length;
|
||||
const lowCount = episodes.filter((e) => e.confidence === "low").length;
|
||||
const label = season == null ? "No season" : `Season ${String(season).padStart(2, "0")}`;
|
||||
|
||||
return (
|
||||
<div className="border-t first:border-t-0">
|
||||
<div className="flex items-center gap-2 px-3 py-2 cursor-pointer hover:bg-gray-50" onClick={() => setOpen(!open)}>
|
||||
<span className="text-xs text-gray-400 shrink-0">{open ? "▼" : "▶"}</span>
|
||||
<span className="text-xs font-medium shrink-0">{label}</span>
|
||||
<span className="text-xs text-gray-500 shrink-0">· {episodes.length} eps</span>
|
||||
{highCount > 0 && <span className="text-xs text-green-600 shrink-0">{highCount} ready</span>}
|
||||
{lowCount > 0 && <span className="text-xs text-amber-600 shrink-0">{lowCount} review</span>}
|
||||
<div className="flex-1" />
|
||||
{season != null && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onApproveSeason();
|
||||
}}
|
||||
className="text-xs px-2 py-1 rounded border border-blue-600 text-blue-700 bg-white hover:bg-blue-50 cursor-pointer whitespace-nowrap shrink-0"
|
||||
>
|
||||
Approve season
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
{open && (
|
||||
<div className="px-3 pb-3 space-y-2 pt-2">
|
||||
{episodes.map((ep) => (
|
||||
<PipelineCard
|
||||
key={ep.id}
|
||||
item={ep}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
onToggleStream={async (streamId, action) => {
|
||||
await api.patch(`/api/review/${ep.item_id}/stream/${streamId}`, { action });
|
||||
onMutate();
|
||||
}}
|
||||
onApprove={async () => {
|
||||
await api.post(`/api/review/${ep.item_id}/approve`);
|
||||
onMutate();
|
||||
}}
|
||||
onSkip={async () => {
|
||||
await api.post(`/api/review/${ep.item_id}/skip`);
|
||||
onMutate();
|
||||
}}
|
||||
/>
|
||||
<EpisodeRow key={ep.id} ep={ep} jellyfinUrl={jellyfinUrl} onMutate={onMutate} />
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function EpisodeRow({
|
||||
ep,
|
||||
jellyfinUrl,
|
||||
onMutate,
|
||||
}: {
|
||||
ep: PipelineReviewItem;
|
||||
jellyfinUrl: string;
|
||||
onMutate: () => void;
|
||||
}) {
|
||||
return (
|
||||
<div className="px-3 py-1">
|
||||
<PipelineCard
|
||||
item={ep}
|
||||
jellyfinUrl={jellyfinUrl}
|
||||
onToggleStream={async (streamId, action) => {
|
||||
await api.patch(`/api/review/${ep.item_id}/stream/${streamId}`, { action });
|
||||
onMutate();
|
||||
}}
|
||||
onApprove={async () => {
|
||||
await api.post(`/api/review/${ep.item_id}/approve`);
|
||||
onMutate();
|
||||
}}
|
||||
onSkip={async () => {
|
||||
await api.post(`/api/review/${ep.item_id}/skip`);
|
||||
onMutate();
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -10,10 +10,42 @@ import { formatThousands } from "~/shared/lib/utils";
|
||||
interface ScanStatus {
|
||||
running: boolean;
|
||||
progress: { scanned: number; total: number; errors: number };
|
||||
recentItems: { name: string; type: string; scan_status: string; file_path: string }[];
|
||||
recentItems: {
|
||||
name: string;
|
||||
type: string;
|
||||
scan_status: string;
|
||||
file_path: string;
|
||||
last_scanned_at: string | null;
|
||||
ingest_source: "scan" | "webhook" | null;
|
||||
}[];
|
||||
scanLimit: number | null;
|
||||
}
|
||||
|
||||
interface ScanItemsRow {
|
||||
id: number;
|
||||
jellyfin_id: string;
|
||||
name: string;
|
||||
type: "Movie" | "Episode";
|
||||
series_name: string | null;
|
||||
season_number: number | null;
|
||||
episode_number: number | null;
|
||||
scan_status: string;
|
||||
original_language: string | null;
|
||||
orig_lang_source: string | null;
|
||||
container: string | null;
|
||||
file_size: number | null;
|
||||
file_path: string;
|
||||
last_scanned_at: string | null;
|
||||
ingest_source: "scan" | "webhook" | null;
|
||||
audio_codecs: string | null;
|
||||
}
|
||||
|
||||
interface ScanItemsResponse {
|
||||
rows: ScanItemsRow[];
|
||||
total: number;
|
||||
hasMore: boolean;
|
||||
}
|
||||
|
||||
interface DashboardStats {
|
||||
totalItems: number;
|
||||
scanned: number;
|
||||
@@ -47,6 +79,22 @@ interface LogEntry {
|
||||
file?: string;
|
||||
}
|
||||
|
||||
interface RecentIngestRow {
|
||||
name: string;
|
||||
type: string;
|
||||
status: string;
|
||||
file: string;
|
||||
scannedAt: string | null;
|
||||
source: "scan" | "webhook" | null;
|
||||
}
|
||||
|
||||
interface ItemFilters {
|
||||
q: string;
|
||||
status: "all" | "pending" | "scanned" | "error";
|
||||
type: "all" | "movie" | "episode";
|
||||
source: "all" | "scan" | "webhook";
|
||||
}
|
||||
|
||||
// Mutable buffer for SSE data — flushed to React state on an interval
|
||||
interface SseBuf {
|
||||
scanned: number;
|
||||
@@ -65,19 +113,54 @@ function freshBuf(): SseBuf {
|
||||
|
||||
const FLUSH_MS = 200;
|
||||
|
||||
function statusBadgeVariant(status: string): "pending" | "done" | "error" | "default" {
|
||||
if (status === "pending") return "pending";
|
||||
if (status === "done" || status === "scanned") return "done";
|
||||
if (status === "error") return "error";
|
||||
return "default";
|
||||
}
|
||||
|
||||
function formatScannedAt(ts: string | null): string {
|
||||
if (!ts) return "—";
|
||||
const d = new Date(ts.includes("T") ? ts : `${ts}Z`);
|
||||
if (Number.isNaN(d.getTime())) return ts;
|
||||
return d.toLocaleString([], { year: "numeric", month: "2-digit", day: "2-digit", hour: "2-digit", minute: "2-digit" });
|
||||
}
|
||||
|
||||
function formatFileSize(bytes: number | null): string {
|
||||
if (!bytes || bytes <= 0) return "—";
|
||||
if (bytes < 1000) return `${bytes} B`;
|
||||
if (bytes < 1000 ** 2) return `${(bytes / 1000).toFixed(1)} kB`;
|
||||
if (bytes < 1000 ** 3) return `${(bytes / 1000 ** 2).toFixed(1)} MB`;
|
||||
return `${(bytes / 1000 ** 3).toFixed(1)} GB`;
|
||||
}
|
||||
|
||||
function episodeLabel(row: ScanItemsRow): string {
|
||||
if (row.type !== "Episode") return "—";
|
||||
const season = row.season_number ?? 0;
|
||||
const episode = row.episode_number ?? 0;
|
||||
return `S${String(season).padStart(2, "0")}E${String(episode).padStart(2, "0")}`;
|
||||
}
|
||||
|
||||
export function ScanPage() {
|
||||
const navigate = useNavigate();
|
||||
const [status, setStatus] = useState<ScanStatus | null>(null);
|
||||
const [stats, setStats] = useState<DashboardStats | null>(null);
|
||||
const [configChecked, setConfigChecked] = useState(false);
|
||||
const [limit, setLimit] = useState("");
|
||||
const [log, setLog] = useState<LogEntry[]>([]);
|
||||
const [recentIngest, setRecentIngest] = useState<RecentIngestRow[]>([]);
|
||||
const [statusLabel, setStatusLabel] = useState("");
|
||||
const [scanComplete, setScanComplete] = useState(false);
|
||||
const [currentItem, setCurrentItem] = useState("");
|
||||
const [progressScanned, setProgressScanned] = useState(0);
|
||||
const [progressTotal, setProgressTotal] = useState(0);
|
||||
const [errors, setErrors] = useState(0);
|
||||
const [filters, setFilters] = useState<ItemFilters>({ q: "", status: "all", type: "all", source: "all" });
|
||||
const [itemsRows, setItemsRows] = useState<ScanItemsRow[]>([]);
|
||||
const [itemsOffset, setItemsOffset] = useState(0);
|
||||
const [itemsHasMore, setItemsHasMore] = useState(false);
|
||||
const [itemsTotal, setItemsTotal] = useState(0);
|
||||
const [itemsLoading, setItemsLoading] = useState(false);
|
||||
const esRef = useRef<EventSource | null>(null);
|
||||
const bufRef = useRef<SseBuf>(freshBuf());
|
||||
const timerRef = useRef<ReturnType<typeof setInterval> | null>(null);
|
||||
@@ -122,7 +205,19 @@ export function ScanPage() {
|
||||
setCurrentItem(b.currentItem);
|
||||
if (b.newLogs.length > 0) {
|
||||
const batch = b.newLogs.splice(0);
|
||||
setLog((prev) => [...batch.reverse(), ...prev].slice(0, 100));
|
||||
setRecentIngest((prev) =>
|
||||
[
|
||||
...batch.map((item) => ({
|
||||
name: item.name,
|
||||
type: item.type,
|
||||
status: item.status,
|
||||
file: item.file ?? item.name,
|
||||
scannedAt: new Date().toISOString(),
|
||||
source: "scan" as const,
|
||||
})),
|
||||
...prev,
|
||||
].slice(0, 5),
|
||||
);
|
||||
}
|
||||
b.dirty = false;
|
||||
}
|
||||
@@ -172,13 +267,55 @@ export function ScanPage() {
|
||||
setErrors(s.progress.errors);
|
||||
setStatusLabel(s.running ? "Scan in progress…" : "Scan idle");
|
||||
if (s.scanLimit != null) setLimit(String(s.scanLimit));
|
||||
setLog(s.recentItems.map((i) => ({ name: i.name, type: i.type, status: i.scan_status, file: i.file_path })));
|
||||
setRecentIngest(
|
||||
s.recentItems.map((i) => ({
|
||||
name: i.name,
|
||||
type: i.type,
|
||||
status: i.scan_status,
|
||||
file: i.file_path,
|
||||
scannedAt: i.last_scanned_at,
|
||||
source: i.ingest_source,
|
||||
})),
|
||||
);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
load();
|
||||
}, [load]);
|
||||
|
||||
const fetchItems = useCallback(
|
||||
async (offset: number, append: boolean) => {
|
||||
setItemsLoading(true);
|
||||
try {
|
||||
const qs = new URLSearchParams({
|
||||
offset: String(offset),
|
||||
limit: "50",
|
||||
q: filters.q,
|
||||
status: filters.status,
|
||||
type: filters.type,
|
||||
source: filters.source,
|
||||
});
|
||||
const res = await api.get<ScanItemsResponse>(`/api/scan/items?${qs.toString()}`);
|
||||
setItemsRows((prev) => (append ? [...prev, ...res.rows] : res.rows));
|
||||
setItemsOffset(offset + res.rows.length);
|
||||
setItemsHasMore(res.hasMore);
|
||||
setItemsTotal(res.total);
|
||||
} finally {
|
||||
setItemsLoading(false);
|
||||
}
|
||||
},
|
||||
[filters],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
fetchItems(0, false);
|
||||
}, [fetchItems]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!scanComplete) return;
|
||||
fetchItems(0, false);
|
||||
}, [scanComplete, fetchItems]);
|
||||
|
||||
const connectSse = useCallback(() => {
|
||||
esRef.current?.close();
|
||||
const buf = bufRef.current;
|
||||
@@ -229,7 +366,7 @@ export function ScanPage() {
|
||||
}, [status?.running, connectSse, stopFlushing]);
|
||||
|
||||
const startScan = async () => {
|
||||
setLog([]);
|
||||
setRecentIngest([]);
|
||||
setProgressScanned(0);
|
||||
setProgressTotal(0);
|
||||
setErrors(0);
|
||||
@@ -261,7 +398,7 @@ export function ScanPage() {
|
||||
return (
|
||||
<div>
|
||||
<div className="flex items-center justify-between mb-4">
|
||||
<h1 className="text-xl font-bold m-0">Scan</h1>
|
||||
<h1 className="text-xl font-bold m-0">Library</h1>
|
||||
<MqttBadge />
|
||||
</div>
|
||||
|
||||
@@ -284,37 +421,48 @@ export function ScanPage() {
|
||||
)}
|
||||
|
||||
<div className="border border-gray-200 rounded-lg px-4 py-3 mb-6">
|
||||
<div className="flex items-center flex-wrap gap-2 mb-3">
|
||||
<span className="text-sm font-medium">{statusLabel || (running ? "Scan in progress…" : "Scan idle")}</span>
|
||||
{scanComplete && (
|
||||
<Link to="/pipeline" className="text-blue-600 hover:underline text-sm">
|
||||
Review in Pipeline →
|
||||
</Link>
|
||||
)}
|
||||
{running ? (
|
||||
<Button variant="secondary" size="sm" onClick={stopScan}>
|
||||
Stop
|
||||
</Button>
|
||||
) : (
|
||||
<div className="flex items-center gap-2">
|
||||
<label className="flex items-center gap-1.5 text-xs m-0">
|
||||
Limit
|
||||
<input
|
||||
type="number"
|
||||
value={limit}
|
||||
onChange={(e) => setLimit(e.target.value)}
|
||||
placeholder="all"
|
||||
min="1"
|
||||
className="border border-gray-300 rounded px-1.5 py-0.5 text-xs w-16"
|
||||
/>
|
||||
items
|
||||
</label>
|
||||
<Button size="sm" onClick={startScan}>
|
||||
Start Scan
|
||||
</Button>
|
||||
<div className="flex items-start justify-between gap-3 mb-3">
|
||||
<div className="space-y-2 min-w-0">
|
||||
<div className="flex items-center flex-wrap gap-2">
|
||||
<span className="text-sm font-medium">{statusLabel || (running ? "Scan in progress…" : "Scan idle")}</span>
|
||||
{scanComplete && (
|
||||
<Link to="/pipeline" className="text-blue-600 hover:underline text-sm">
|
||||
Review in Pipeline →
|
||||
</Link>
|
||||
)}
|
||||
{errors > 0 && <Badge variant="error">{errors} error(s)</Badge>}
|
||||
</div>
|
||||
)}
|
||||
{errors > 0 && <Badge variant="error">{errors} error(s)</Badge>}
|
||||
{running ? (
|
||||
<Button variant="secondary" size="sm" onClick={stopScan}>
|
||||
Stop
|
||||
</Button>
|
||||
) : (
|
||||
<div className="flex items-center gap-2">
|
||||
<label className="flex items-center gap-1.5 text-xs m-0">
|
||||
Limit
|
||||
<input
|
||||
type="number"
|
||||
value={limit}
|
||||
onChange={(e) => setLimit(e.target.value)}
|
||||
placeholder="all"
|
||||
min="1"
|
||||
className="border border-gray-300 rounded px-1.5 py-0.5 text-xs w-16"
|
||||
/>
|
||||
items
|
||||
</label>
|
||||
<Button size="sm" onClick={startScan}>
|
||||
Start Scan
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div className="text-right shrink-0">
|
||||
<div className="text-sm font-semibold text-gray-700">
|
||||
{formatThousands(progressScanned)}
|
||||
{progressTotal > 0 ? ` / ${formatThousands(progressTotal)}` : ""}
|
||||
</div>
|
||||
<div className="text-[0.7rem] text-gray-500">scanned</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{(running || progressScanned > 0) && (
|
||||
@@ -325,25 +473,131 @@ export function ScanPage() {
|
||||
</div>
|
||||
)}
|
||||
<div className="flex items-center gap-2 text-gray-500 text-xs">
|
||||
<span>
|
||||
{progressScanned}
|
||||
{progressTotal > 0 ? ` / ${progressTotal}` : ""} scanned
|
||||
</span>
|
||||
{currentItem && <span className="truncate max-w-xs text-gray-400">{currentItem}</span>}
|
||||
{currentItem && <span className="truncate max-w-2xl text-gray-400">{currentItem}</span>}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
<div className="mt-3">
|
||||
<h3 className="font-semibold text-sm mb-2">Recent ingest (5)</h3>
|
||||
<table className="w-full border-collapse text-[0.78rem]">
|
||||
<thead>
|
||||
<tr>
|
||||
{["Time", "Source", "Type", "File", "Status"].map((h) => (
|
||||
<th
|
||||
key={h}
|
||||
className="text-left text-[0.66rem] font-bold uppercase tracking-[0.05em] text-gray-500 py-1 px-2 border-b border-gray-200 whitespace-nowrap"
|
||||
>
|
||||
{h}
|
||||
</th>
|
||||
))}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{recentIngest.length === 0 && (
|
||||
<tr>
|
||||
<td colSpan={5} className="py-2 px-2 text-gray-400">
|
||||
No ingested items yet.
|
||||
</td>
|
||||
</tr>
|
||||
)}
|
||||
{recentIngest.map((item, i) => {
|
||||
const fileName = item.file.split("/").pop() ?? item.name;
|
||||
return (
|
||||
<tr key={`${item.file}-${i}`} className="hover:bg-gray-50">
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 whitespace-nowrap">{formatScannedAt(item.scannedAt)}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<Badge variant="default">{item.source ?? "scan"}</Badge>
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">{item.type}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 truncate max-w-96" title={item.file}>
|
||||
{fileName}
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<Badge variant={statusBadgeVariant(item.status)}>{item.status}</Badge>
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
})}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Log */}
|
||||
<h3 className="font-semibold text-sm mb-2">Recent items</h3>
|
||||
<table className="w-full border-collapse text-[0.82rem]">
|
||||
<div className="mb-2 flex items-end justify-between gap-3">
|
||||
<h3 className="font-semibold text-sm">Library items</h3>
|
||||
<span className="text-xs text-gray-500">{formatThousands(itemsTotal)} total</span>
|
||||
</div>
|
||||
|
||||
<div className="border border-gray-200 rounded-lg p-3 mb-3 flex flex-wrap items-end gap-2">
|
||||
<label className="text-xs text-gray-600 flex flex-col gap-1">
|
||||
Search
|
||||
<input
|
||||
type="text"
|
||||
value={filters.q}
|
||||
onChange={(e) => setFilters((prev) => ({ ...prev, q: e.target.value }))}
|
||||
placeholder="Name or path"
|
||||
className="border border-gray-300 rounded px-2 py-1 text-xs w-56"
|
||||
/>
|
||||
</label>
|
||||
<label className="text-xs text-gray-600 flex flex-col gap-1">
|
||||
Status
|
||||
<select
|
||||
value={filters.status}
|
||||
onChange={(e) => setFilters((prev) => ({ ...prev, status: e.target.value as ItemFilters["status"] }))}
|
||||
className="border border-gray-300 rounded px-2 py-1 text-xs"
|
||||
>
|
||||
<option value="all">All</option>
|
||||
<option value="scanned">Scanned</option>
|
||||
<option value="pending">Pending</option>
|
||||
<option value="error">Error</option>
|
||||
</select>
|
||||
</label>
|
||||
<label className="text-xs text-gray-600 flex flex-col gap-1">
|
||||
Type
|
||||
<select
|
||||
value={filters.type}
|
||||
onChange={(e) => setFilters((prev) => ({ ...prev, type: e.target.value as ItemFilters["type"] }))}
|
||||
className="border border-gray-300 rounded px-2 py-1 text-xs"
|
||||
>
|
||||
<option value="all">All</option>
|
||||
<option value="movie">Movie</option>
|
||||
<option value="episode">Episode</option>
|
||||
</select>
|
||||
</label>
|
||||
<label className="text-xs text-gray-600 flex flex-col gap-1">
|
||||
Source
|
||||
<select
|
||||
value={filters.source}
|
||||
onChange={(e) => setFilters((prev) => ({ ...prev, source: e.target.value as ItemFilters["source"] }))}
|
||||
className="border border-gray-300 rounded px-2 py-1 text-xs"
|
||||
>
|
||||
<option value="all">All</option>
|
||||
<option value="scan">Scan</option>
|
||||
<option value="webhook">Webhook</option>
|
||||
</select>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<table className="w-full border-collapse text-[0.8rem]">
|
||||
<thead>
|
||||
<tr>
|
||||
{["Type", "File", "Status"].map((h) => (
|
||||
{[
|
||||
"Scanned",
|
||||
"Name",
|
||||
"Type",
|
||||
"Series / Ep",
|
||||
"Language",
|
||||
"Audio",
|
||||
"Container",
|
||||
"Size",
|
||||
"Source",
|
||||
"Status",
|
||||
"Path",
|
||||
].map((h) => (
|
||||
<th
|
||||
key={h}
|
||||
className="text-left text-[0.68rem] font-bold uppercase tracking-[0.06em] text-gray-500 py-1 px-2 border-b-2 border-gray-200 whitespace-nowrap"
|
||||
className="text-left text-[0.66rem] font-bold uppercase tracking-[0.05em] text-gray-500 py-1 px-2 border-b border-gray-200 whitespace-nowrap"
|
||||
>
|
||||
{h}
|
||||
</th>
|
||||
@@ -351,22 +605,55 @@ export function ScanPage() {
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{log.map((item, i) => {
|
||||
const fileName = item.file ? (item.file.split("/").pop() ?? item.name) : item.name;
|
||||
return (
|
||||
<tr key={i} className="hover:bg-gray-50">
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">{item.type}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100" title={item.file ?? item.name}>
|
||||
{fileName}
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<Badge variant={item.status as "error" | "done" | "pending"}>{item.status}</Badge>
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
})}
|
||||
{itemsRows.length === 0 && !itemsLoading && (
|
||||
<tr>
|
||||
<td colSpan={11} className="py-3 px-2 text-gray-400">
|
||||
No items match the current filters.
|
||||
</td>
|
||||
</tr>
|
||||
)}
|
||||
{itemsRows.map((row) => (
|
||||
<tr key={row.id} className="hover:bg-gray-50">
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 whitespace-nowrap">
|
||||
{formatScannedAt(row.last_scanned_at)}
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">{row.name}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">{row.type}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<div>{row.series_name ?? "—"}</div>
|
||||
<div className="text-[0.68rem] text-gray-500">{episodeLabel(row)}</div>
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<div>{row.original_language ?? "—"}</div>
|
||||
<div className="text-[0.68rem] text-gray-500">{row.orig_lang_source ?? "—"}</div>
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 font-mono text-[0.72rem]">
|
||||
{row.audio_codecs ? row.audio_codecs.split(",").join(" · ") : "—"}
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">{row.container ?? "—"}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 whitespace-nowrap">{formatFileSize(row.file_size)}</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<Badge variant="default">{row.ingest_source ?? "scan"}</Badge>
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100">
|
||||
<Badge variant={statusBadgeVariant(row.scan_status)}>{row.scan_status}</Badge>
|
||||
</td>
|
||||
<td className="py-1.5 px-2 border-b border-gray-100 truncate max-w-xs" title={row.file_path}>
|
||||
{row.file_path}
|
||||
</td>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<div className="mt-3 flex items-center gap-2">
|
||||
{itemsHasMore && (
|
||||
<Button size="sm" variant="secondary" onClick={() => fetchItems(itemsOffset, true)} disabled={itemsLoading}>
|
||||
{itemsLoading ? "Loading…" : "Load more"}
|
||||
</Button>
|
||||
)}
|
||||
{itemsLoading && !itemsHasMore && <span className="text-xs text-gray-500">Loading…</span>}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -58,6 +58,124 @@ function LockedInput({ locked, ...props }: { locked: boolean } & React.InputHTML
|
||||
// (LockedInput) already signals when a value is env-controlled, the badge
|
||||
// was duplicate noise.
|
||||
|
||||
// ─── Secret input (password-masked with eye-icon reveal) ──────────────────────
|
||||
|
||||
function EyeIcon({ open }: { open: boolean }) {
|
||||
// GNOME-style eye / crossed-eye glyphs as inline SVG so they inherit
|
||||
// currentColor instead of fighting emoji rendering across OSes.
|
||||
if (open) {
|
||||
return (
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width="16"
|
||||
height="16"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
aria-hidden="true"
|
||||
>
|
||||
<path d="M17.94 17.94A10.07 10.07 0 0 1 12 20c-7 0-11-8-11-8a18.45 18.45 0 0 1 5.06-5.94" />
|
||||
<path d="M9.9 4.24A9.12 9.12 0 0 1 12 4c7 0 11 8 11 8a18.5 18.5 0 0 1-2.16 3.19" />
|
||||
<path d="M14.12 14.12a3 3 0 1 1-4.24-4.24" />
|
||||
<line x1="1" y1="1" x2="23" y2="23" />
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width="16"
|
||||
height="16"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
aria-hidden="true"
|
||||
>
|
||||
<path d="M1 12s4-8 11-8 11 8 11 8-4 8-11 8-11-8-11-8z" />
|
||||
<circle cx="12" cy="12" r="3" />
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Input for API keys / passwords. Shows "***" masked when the server returns
|
||||
* a secret value (the raw key never reaches this component by default). Eye
|
||||
* icon fetches the real value via /api/settings/reveal and shows it. Users
|
||||
* can also type a new value directly — any edit clears the masked state.
|
||||
*/
|
||||
function SecretInput({
|
||||
configKey,
|
||||
locked,
|
||||
value,
|
||||
onChange,
|
||||
placeholder,
|
||||
className,
|
||||
}: {
|
||||
configKey: string;
|
||||
locked: boolean;
|
||||
value: string;
|
||||
onChange: (next: string) => void;
|
||||
placeholder?: string;
|
||||
className?: string;
|
||||
}) {
|
||||
const [revealed, setRevealed] = useState(false);
|
||||
const isMasked = value === "***";
|
||||
|
||||
const toggle = async () => {
|
||||
if (revealed) {
|
||||
setRevealed(false);
|
||||
return;
|
||||
}
|
||||
if (isMasked) {
|
||||
try {
|
||||
const res = await api.get<{ value: string }>(`/api/settings/reveal?key=${encodeURIComponent(configKey)}`);
|
||||
onChange(res.value);
|
||||
} catch {
|
||||
/* ignore — keep masked */
|
||||
}
|
||||
}
|
||||
setRevealed(true);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={`relative ${className ?? ""}`}>
|
||||
<Input
|
||||
type={revealed ? "text" : "password"}
|
||||
value={value}
|
||||
disabled={locked}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
placeholder={placeholder}
|
||||
className="pr-9"
|
||||
/>
|
||||
{locked ? (
|
||||
<span
|
||||
className="absolute inset-y-0 right-0 flex items-center pr-2.5 text-[0.9rem] opacity-40 pointer-events-none select-none"
|
||||
title="Set via environment variable — edit your .env file to change this value"
|
||||
>
|
||||
🔒
|
||||
</span>
|
||||
) : (
|
||||
<button
|
||||
type="button"
|
||||
onClick={toggle}
|
||||
tabIndex={-1}
|
||||
className="absolute inset-y-0 right-0 flex items-center px-2.5 text-gray-400 hover:text-gray-700 focus:outline-none focus-visible:text-gray-700"
|
||||
title={revealed ? "Hide" : "Reveal"}
|
||||
aria-label={revealed ? "Hide secret" : "Reveal secret"}
|
||||
>
|
||||
<EyeIcon open={revealed} />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// ─── Section card ──────────────────────────────────────────────────────────────
|
||||
|
||||
function SectionCard({
|
||||
@@ -227,17 +345,18 @@ function ConnSection({
|
||||
value={url}
|
||||
onChange={(e) => setUrl(e.target.value)}
|
||||
placeholder={urlPlaceholder}
|
||||
className="mt-0.5 max-w-sm"
|
||||
className="mt-0.5"
|
||||
/>
|
||||
</label>
|
||||
<label className="block text-sm text-gray-700 mb-1 mt-3">
|
||||
API Key
|
||||
<LockedInput
|
||||
<SecretInput
|
||||
configKey={apiKeyProp}
|
||||
locked={locked.has(apiKeyProp)}
|
||||
value={key}
|
||||
onChange={(e) => setKey(e.target.value)}
|
||||
onChange={setKey}
|
||||
placeholder="your-api-key"
|
||||
className="mt-0.5 max-w-xs"
|
||||
className="mt-0.5"
|
||||
/>
|
||||
</label>
|
||||
<div className="flex items-center gap-2 mt-3">
|
||||
|
||||
@@ -65,7 +65,7 @@ function RootLayout() {
|
||||
<VersionBadge />
|
||||
<div className="flex flex-wrap items-center gap-0.5">
|
||||
<NavLink to="/" exact>
|
||||
Scan
|
||||
Library
|
||||
</NavLink>
|
||||
<NavLink to="/pipeline">Pipeline</NavLink>
|
||||
<NavLink to="/review/subtitles">Subtitles</NavLink>
|
||||
|
||||
@@ -160,11 +160,32 @@ export interface PipelineJobItem {
|
||||
}
|
||||
|
||||
export interface PipelineData {
|
||||
review: PipelineReviewItem[];
|
||||
reviewTotal: number;
|
||||
reviewItemsTotal: number;
|
||||
queued: PipelineJobItem[];
|
||||
processing: PipelineJobItem[];
|
||||
done: PipelineJobItem[];
|
||||
doneCount: number;
|
||||
jellyfinUrl: string;
|
||||
}
|
||||
|
||||
// ─── Review groups (GET /api/review/groups) ──────────────────────────────────
|
||||
|
||||
export type ReviewGroup =
|
||||
| { kind: "movie"; item: PipelineReviewItem }
|
||||
| {
|
||||
kind: "series";
|
||||
seriesKey: string;
|
||||
seriesName: string;
|
||||
seriesJellyfinId: string | null;
|
||||
episodeCount: number;
|
||||
minConfidence: "high" | "low";
|
||||
originalLanguage: string | null;
|
||||
seasons: Array<{ season: number | null; episodes: PipelineReviewItem[] }>;
|
||||
};
|
||||
|
||||
export interface ReviewGroupsResponse {
|
||||
groups: ReviewGroup[];
|
||||
totalGroups: number;
|
||||
totalItems: number;
|
||||
hasMore: boolean;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user