library: batch audio-codec lookup — per-row subquery was O(page×streams)
All checks were successful
Build and Push Docker Image / build (push) Successful in 1m11s
All checks were successful
Build and Push Docker Image / build (push) Successful in 1m11s
The scalar subquery I added in 7d30e6c ran one aggregate scan of
media_streams per row. On a real library (33k items / 212k streams)
a single page took 500+ seconds synchronously, blocking the event
loop and timing out every other request — Library AND Pipeline both
stopped loading.
Swap it for a single batched `GROUP_CONCAT ... WHERE item_id IN (?...)`
query over the current page's ids (max 25), then merge back into rows.
v2026.04.15.10
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "netfelix-audio-fix",
|
"name": "netfelix-audio-fix",
|
||||||
"version": "2026.04.15.9",
|
"version": "2026.04.15.10",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev:server": "NODE_ENV=development bun --hot server/index.tsx",
|
"dev:server": "NODE_ENV=development bun --hot server/index.tsx",
|
||||||
"dev:client": "vite",
|
"dev:client": "vite",
|
||||||
|
|||||||
@@ -163,11 +163,7 @@ app.get("/items", (c) => {
|
|||||||
`
|
`
|
||||||
SELECT id, jellyfin_id, name, type, series_name, season_number, episode_number,
|
SELECT id, jellyfin_id, name, type, series_name, season_number, episode_number,
|
||||||
scan_status, original_language, orig_lang_source, container, file_size, file_path,
|
scan_status, original_language, orig_lang_source, container, file_size, file_path,
|
||||||
last_scanned_at, ingest_source,
|
last_scanned_at, ingest_source
|
||||||
(SELECT GROUP_CONCAT(DISTINCT LOWER(codec))
|
|
||||||
FROM media_streams
|
|
||||||
WHERE item_id = media_items.id AND type = 'Audio' AND codec IS NOT NULL
|
|
||||||
) AS audio_codecs
|
|
||||||
FROM media_items
|
FROM media_items
|
||||||
${where.sql}
|
${where.sql}
|
||||||
ORDER BY COALESCE(last_scanned_at, created_at) DESC, id DESC
|
ORDER BY COALESCE(last_scanned_at, created_at) DESC, id DESC
|
||||||
@@ -192,6 +188,24 @@ app.get("/items", (c) => {
|
|||||||
ingest_source: string | null;
|
ingest_source: string | null;
|
||||||
audio_codecs: string | null;
|
audio_codecs: string | null;
|
||||||
}>;
|
}>;
|
||||||
|
|
||||||
|
// Audio codecs per item, batched into one query for the current page.
|
||||||
|
// A per-row scalar subquery over media_streams was O(page × streams)
|
||||||
|
// and could block the event loop for minutes on large libraries.
|
||||||
|
if (rows.length > 0) {
|
||||||
|
const placeholders = rows.map(() => "?").join(",");
|
||||||
|
const codecRows = db
|
||||||
|
.prepare(
|
||||||
|
`SELECT item_id, GROUP_CONCAT(DISTINCT LOWER(codec)) AS codecs
|
||||||
|
FROM media_streams
|
||||||
|
WHERE item_id IN (${placeholders}) AND type = 'Audio' AND codec IS NOT NULL
|
||||||
|
GROUP BY item_id`,
|
||||||
|
)
|
||||||
|
.all(...rows.map((r) => r.id)) as { item_id: number; codecs: string | null }[];
|
||||||
|
const byItem = new Map(codecRows.map((r) => [r.item_id, r.codecs]));
|
||||||
|
for (const row of rows) row.audio_codecs = byItem.get(row.id) ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
const total = (db.prepare(`SELECT COUNT(*) as n FROM media_items ${where.sql}`).get(...where.args) as { n: number }).n;
|
const total = (db.prepare(`SELECT COUNT(*) as n FROM media_items ${where.sql}`).get(...where.args) as { n: number }).n;
|
||||||
return c.json({ rows, total, hasMore: query.offset + rows.length < total, query });
|
return c.json({ rows, total, hasMore: query.offset + rows.length < total, query });
|
||||||
});
|
});
|
||||||
|
|||||||
Reference in New Issue
Block a user