replace IGDB file caches with Drizzle/PostgreSQL, add combined /resolve endpoint

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-03 14:29:27 +01:00
parent 63219afc10
commit bd5df81f37
6 changed files with 203 additions and 166 deletions

View File

@@ -1,47 +1,36 @@
import { mkdirSync, readFileSync, writeFileSync } from "node:fs"
import { dirname, join } from "node:path"
import { fileURLToPath } from "node:url"
const __dirname = dirname(fileURLToPath(import.meta.url))
const CACHE_FILE = join(__dirname, "../../data/igdb-cache.json")
import { inArray, sql } from "drizzle-orm"
import { db } from "../../shared/db/client.ts"
import { igdbResolutions } from "../../shared/db/schema/igdb.ts"
interface CacheEntry {
igdbId: number | null
}
const cache = new Map<string, CacheEntry>()
export function loadCache() {
try {
const data = readFileSync(CACHE_FILE, "utf-8")
const entries: Record<string, CacheEntry> = JSON.parse(data)
for (const [key, value] of Object.entries(entries)) {
cache.set(key, value)
}
console.log(`[IGDB] Cache loaded: ${cache.size} entries`)
} catch {
console.log("[IGDB] No cache file found, starting fresh")
export async function getCacheEntries(keys: string[]): Promise<Map<string, CacheEntry>> {
if (keys.length === 0) return new Map()
const rows = await db
.select({ cacheKey: igdbResolutions.cacheKey, igdbId: igdbResolutions.igdbId })
.from(igdbResolutions)
.where(inArray(igdbResolutions.cacheKey, keys))
const result = new Map<string, CacheEntry>()
for (const row of rows) {
result.set(row.cacheKey, { igdbId: row.igdbId })
}
return result
}
export function saveCache() {
try {
mkdirSync(join(__dirname, "../../data"), { recursive: true })
const obj = Object.fromEntries(cache)
writeFileSync(CACHE_FILE, JSON.stringify(obj, null, 2))
} catch (err) {
console.error("[IGDB] Failed to save cache:", (err as Error).message)
}
}
export function getCacheEntry(key: string): CacheEntry | undefined {
return cache.get(key)
}
export function setCacheEntry(key: string, value: CacheEntry) {
cache.set(key, value)
}
export function getCacheSize() {
return cache.size
export async function setCacheEntries(
entries: Array<{ cacheKey: string; source: string; sourceId: string; igdbId: number | null }>,
) {
if (entries.length === 0) return
await db
.insert(igdbResolutions)
.values(entries)
.onConflictDoUpdate({
target: igdbResolutions.cacheKey,
set: {
igdbId: sql`excluded.igdb_id`,
resolvedAt: sql`now()`,
},
})
}

View File

@@ -1,9 +1,6 @@
import { mkdirSync, readFileSync, writeFileSync } from "node:fs"
import { dirname, join } from "node:path"
import { fileURLToPath } from "node:url"
const __dirname = dirname(fileURLToPath(import.meta.url))
const CACHE_FILE = join(__dirname, "../../data/igdb-metadata.json")
import { inArray, sql } from "drizzle-orm"
import { db } from "../../shared/db/client.ts"
import { igdbMetadata } from "../../shared/db/schema/igdb.ts"
export interface IgdbMetadata {
summary: string | null
@@ -16,38 +13,56 @@ export interface IgdbMetadata {
developers: string[]
}
const cache = new Map<string, IgdbMetadata>()
export function loadMetadataCache() {
try {
const data = readFileSync(CACHE_FILE, "utf-8")
const entries: Record<string, IgdbMetadata> = JSON.parse(data)
for (const [key, value] of Object.entries(entries)) {
cache.set(key, value)
}
console.log(`[IGDB] Metadata cache loaded: ${cache.size} entries`)
} catch {
console.log("[IGDB] No metadata cache file found, starting fresh")
export async function getMetadataBatch(canonicalIds: string[]): Promise<Map<string, IgdbMetadata>> {
if (canonicalIds.length === 0) return new Map()
const rows = await db
.select()
.from(igdbMetadata)
.where(inArray(igdbMetadata.canonicalId, canonicalIds))
const result = new Map<string, IgdbMetadata>()
for (const row of rows) {
result.set(row.canonicalId, {
summary: row.summary,
coverImageId: row.coverImageId,
screenshots: row.screenshots ?? [],
videoIds: row.videoIds ?? [],
genres: row.genres ?? [],
aggregatedRating: row.aggregatedRating,
releaseDate: row.releaseDate,
developers: row.developers ?? [],
})
}
return result
}
function saveMetadataCache() {
try {
mkdirSync(join(__dirname, "../../data"), { recursive: true })
const obj = Object.fromEntries(cache)
writeFileSync(CACHE_FILE, JSON.stringify(obj, null, 2))
} catch (err) {
console.error("[IGDB] Failed to save metadata cache:", (err as Error).message)
}
}
export function getMetadata(canonicalId: string): IgdbMetadata | undefined {
return cache.get(canonicalId)
}
export function setMetadataBatch(entries: Map<string, IgdbMetadata>) {
for (const [key, value] of entries) {
cache.set(key, value)
}
saveMetadataCache()
export async function setMetadataBatch(entries: Map<string, IgdbMetadata>) {
if (entries.size === 0) return
const values = Array.from(entries, ([canonicalId, meta]) => ({
canonicalId,
summary: meta.summary,
coverImageId: meta.coverImageId,
screenshots: meta.screenshots,
videoIds: meta.videoIds,
genres: meta.genres,
aggregatedRating: meta.aggregatedRating,
releaseDate: meta.releaseDate,
developers: meta.developers,
}))
await db
.insert(igdbMetadata)
.values(values)
.onConflictDoUpdate({
target: igdbMetadata.canonicalId,
set: {
summary: sql`excluded.summary`,
coverImageId: sql`excluded.cover_image_id`,
screenshots: sql`excluded.screenshots`,
videoIds: sql`excluded.video_ids`,
genres: sql`excluded.genres`,
aggregatedRating: sql`excluded.aggregated_rating`,
releaseDate: sql`excluded.release_date`,
developers: sql`excluded.developers`,
fetchedAt: sql`now()`,
},
})
}

View File

@@ -2,7 +2,7 @@ import { zValidator } from "@hono/zod-validator"
import { Hono } from "hono"
import { z } from "zod"
import { fetchAndCacheImage, hasImage, isValidSize, readImage } from "./image-cache.ts"
import { enrichGamesWithIgdb, fetchMetadataForGames } from "./service.ts"
import { enrichAndFetchMetadata, enrichGamesWithIgdb, fetchMetadataForGames } from "./service.ts"
const enrichInput = z.object({
games: z.array(
@@ -19,6 +19,15 @@ const metadataInput = z.object({
canonicalIds: z.array(z.string()),
})
const resolveInput = z.object({
games: z.array(
z.object({
source: z.string(),
sourceId: z.string(),
}),
),
})
export const igdbRouter = new Hono()
.post("/enrich", zValidator("json", enrichInput), async (c) => {
const { games } = c.req.valid("json")
@@ -30,6 +39,11 @@ export const igdbRouter = new Hono()
const metadataMap = await fetchMetadataForGames(canonicalIds)
return c.json({ metadata: Object.fromEntries(metadataMap) })
})
.post("/resolve", zValidator("json", resolveInput), async (c) => {
const { games } = c.req.valid("json")
const result = await enrichAndFetchMetadata(games)
return c.json(result)
})
.get("/image/:imageId/:size", async (c) => {
const { imageId, size } = c.req.param()
if (!/^[a-z0-9]+$/.test(imageId)) {

View File

@@ -1,6 +1,6 @@
import { env } from "../../shared/lib/env.ts"
import { getCacheEntry, getCacheSize, saveCache, setCacheEntry } from "./cache.ts"
import { type IgdbMetadata, getMetadata, setMetadataBatch } from "./metadata-cache.ts"
import { getCacheEntries, setCacheEntries } from "./cache.ts"
import { type IgdbMetadata, getMetadataBatch, setMetadataBatch } from "./metadata-cache.ts"
const SOURCE_URL_PREFIX: Record<string, string> = {
steam: "https://store.steampowered.com/app/",
@@ -67,8 +67,6 @@ async function batchResolve(source: string, sourceIds: string[]): Promise<Map<st
const urlPrefix = SOURCE_URL_PREFIX[source]
if (!urlPrefix) return results
// Without category filter, each uid may return multiple platform entries,
// so keep batches small to stay within the 500-result API limit
const BATCH_SIZE = 50
for (let i = 0; i < sourceIds.length; i += BATCH_SIZE) {
@@ -109,10 +107,16 @@ export async function enrichGamesWithIgdb<T extends GameForEnrichment>(
return games
}
// Batch lookup all cache keys at once
const allKeys = games
.filter((g) => SOURCE_URL_PREFIX[g.source])
.map((g) => `${g.source}:${g.sourceId}`)
const cached = await getCacheEntries(allKeys)
const uncachedBySource: Record<string, string[]> = {}
for (const game of games) {
const cacheKey = `${game.source}:${game.sourceId}`
if (!getCacheEntry(cacheKey) && SOURCE_URL_PREFIX[game.source]) {
if (!cached.has(cacheKey) && SOURCE_URL_PREFIX[game.source]) {
if (!uncachedBySource[game.source]) {
uncachedBySource[game.source] = []
}
@@ -120,37 +124,46 @@ export async function enrichGamesWithIgdb<T extends GameForEnrichment>(
}
}
let newEntries = 0
try {
const newEntries: Array<{
cacheKey: string
source: string
sourceId: string
igdbId: number | null
}> = []
for (const [source, sourceIds] of Object.entries(uncachedBySource)) {
console.log(`[IGDB] Resolving ${sourceIds.length} ${source} games...`)
const resolved = await batchResolve(source, sourceIds)
for (const [uid, igdbId] of resolved) {
setCacheEntry(`${source}:${uid}`, { igdbId })
newEntries++
const entry = { cacheKey: `${source}:${uid}`, source, sourceId: uid, igdbId }
newEntries.push(entry)
cached.set(entry.cacheKey, { igdbId })
}
for (const uid of sourceIds) {
if (!resolved.has(uid)) {
setCacheEntry(`${source}:${uid}`, { igdbId: null })
const entry = { cacheKey: `${source}:${uid}`, source, sourceId: uid, igdbId: null }
newEntries.push(entry)
cached.set(entry.cacheKey, { igdbId: null })
}
}
}
if (newEntries > 0) {
console.log(`[IGDB] Resolved ${newEntries} new games, cache: ${getCacheSize()} entries`)
saveCache()
if (newEntries.length > 0) {
await setCacheEntries(newEntries)
console.log(`[IGDB] Resolved ${newEntries.length} new games`)
}
} catch (err) {
console.error("[IGDB] Enrichment failed (non-fatal):", (err as Error).message)
}
return games.map((game) => {
const cached = getCacheEntry(`${game.source}:${game.sourceId}`)
if (cached?.igdbId) {
return { ...game, canonicalId: String(cached.igdbId) }
const entry = cached.get(`${game.source}:${game.sourceId}`)
if (entry?.igdbId) {
return { ...game, canonicalId: String(entry.igdbId) }
}
return game
})
@@ -174,19 +187,16 @@ interface IgdbGameResponse {
export async function fetchMetadataForGames(
canonicalIds: string[],
): Promise<Map<string, IgdbMetadata>> {
const results = new Map<string, IgdbMetadata>()
if (!env.TWITCH_CLIENT_ID || !env.TWITCH_CLIENT_SECRET) {
return results
return new Map()
}
const uncached = canonicalIds.filter((id) => !getMetadata(id))
// Batch lookup all cached metadata
const cached = await getMetadataBatch(canonicalIds)
const uncached = canonicalIds.filter((id) => !cached.has(id))
if (uncached.length === 0) {
for (const id of canonicalIds) {
const cached = getMetadata(id)
if (cached) results.set(id, cached)
}
return results
return cached
}
console.log(`[IGDB] Fetching metadata for ${uncached.length} games...`)
@@ -227,14 +237,38 @@ export async function fetchMetadataForGames(
}
if (freshEntries.size > 0) {
setMetadataBatch(freshEntries)
await setMetadataBatch(freshEntries)
console.log(`[IGDB] Fetched metadata for ${freshEntries.size} games`)
}
for (const id of canonicalIds) {
const meta = freshEntries.get(id) ?? getMetadata(id)
if (meta) results.set(id, meta)
// Merge fresh into cached for complete results
for (const [id, meta] of freshEntries) {
cached.set(id, meta)
}
return results
return cached
}
/** Combined resolve + metadata in one call */
export async function enrichAndFetchMetadata(
games: Array<{ source: string; sourceId: string }>,
): Promise<{
games: Array<{ source: string; sourceId: string; canonicalId?: string }>
metadata: Record<string, IgdbMetadata>
}> {
const enriched = await enrichGamesWithIgdb(games)
const canonicalIds = enriched
.map((g) => g.canonicalId)
.filter((id): id is string => id !== undefined)
const metadataMap =
canonicalIds.length > 0
? await fetchMetadataForGames(canonicalIds)
: new Map<string, IgdbMetadata>()
return {
games: enriched,
metadata: Object.fromEntries(metadataMap),
}
}

View File

@@ -1,11 +1,6 @@
import app from "./app.ts"
import { loadCache } from "./features/igdb/cache.ts"
import { loadMetadataCache } from "./features/igdb/metadata-cache.ts"
import { env } from "./shared/lib/env.ts"
loadCache()
loadMetadataCache()
console.log(`[server] listening on http://localhost:${env.PORT}`)
export default {

View File

@@ -30,70 +30,60 @@ async function enrichAfterSync(
) {
const db = await getDb()
// Step 1: resolve canonical_ids for games that don't have one
try {
// Find games that need enrichment: no canonical_id or no metadata
const missing = await db.query<{ source: string; source_id: string }>(
"SELECT source, source_id FROM games WHERE canonical_id IS NULL AND source = $1",
"SELECT source, source_id FROM games WHERE (canonical_id IS NULL OR metadata_fetched_at IS NULL) AND source = $1",
[source],
)
if (missing.rows.length > 0) {
set({ [source]: { ...get()[source], progress: "enriching:ids" } })
const gamesToEnrich = missing.rows.map((r) => ({
source: r.source,
sourceId: r.source_id,
}))
const res = await api.igdb.enrich.$post({ json: { games: gamesToEnrich } })
if (res.ok) {
const data = await res.json()
for (const g of data.games) {
if (g.canonicalId) {
await db.query(
"UPDATE games SET canonical_id = $1 WHERE source = $2 AND source_id = $3",
[g.canonicalId, g.source, g.sourceId],
)
}
}
}
}
} catch (err) {
console.error("[sync] canonical_id enrichment failed (non-fatal):", (err as Error).message)
}
if (missing.rows.length === 0) return
// Step 2: fetch metadata for games with canonical_id but no metadata yet
try {
const needsMeta = await db.query<{ canonical_id: string }>(
"SELECT canonical_id FROM games WHERE canonical_id IS NOT NULL AND metadata_fetched_at IS NULL",
)
if (needsMeta.rows.length > 0) {
set({ [source]: { ...get()[source], progress: "enriching:metadata" } })
const canonicalIds = needsMeta.rows.map((r) => r.canonical_id)
const res = await api.igdb.metadata.$post({ json: { canonicalIds } })
if (res.ok) {
const data = await res.json()
for (const [canonicalId, meta] of Object.entries(data.metadata)) {
await db.query(
`UPDATE games SET
summary = $1, cover_image_id = $2, screenshots = $3, video_ids = $4,
genres = $5, aggregated_rating = $6, release_date = $7, developers = $8,
metadata_fetched_at = NOW()
WHERE canonical_id = $9`,
[
meta.summary,
meta.coverImageId,
JSON.stringify(meta.screenshots),
JSON.stringify(meta.videoIds),
JSON.stringify(meta.genres),
meta.aggregatedRating,
meta.releaseDate,
JSON.stringify(meta.developers),
canonicalId,
],
)
}
set({ [source]: { ...get()[source], progress: "enriching" } })
const gamesToResolve = missing.rows.map((r) => ({
source: r.source,
sourceId: r.source_id,
}))
// Single round trip: resolve canonical IDs + fetch metadata
const res = await api.igdb.resolve.$post({ json: { games: gamesToResolve } })
if (!res.ok) return
const data = await res.json()
// Apply canonical IDs
for (const g of data.games) {
if (g.canonicalId) {
await db.query("UPDATE games SET canonical_id = $1 WHERE source = $2 AND source_id = $3", [
g.canonicalId,
g.source,
g.sourceId,
])
}
}
// Apply metadata
for (const [canonicalId, meta] of Object.entries(data.metadata)) {
await db.query(
`UPDATE games SET
summary = $1, cover_image_id = $2, screenshots = $3, video_ids = $4,
genres = $5, aggregated_rating = $6, release_date = $7, developers = $8,
metadata_fetched_at = NOW()
WHERE canonical_id = $9`,
[
meta.summary,
meta.coverImageId,
JSON.stringify(meta.screenshots),
JSON.stringify(meta.videoIds),
JSON.stringify(meta.genres),
meta.aggregatedRating,
meta.releaseDate,
JSON.stringify(meta.developers),
canonicalId,
],
)
}
} catch (err) {
console.error("[sync] metadata enrichment failed (non-fatal):", (err as Error).message)
console.error("[sync] enrichment failed (non-fatal):", (err as Error).message)
}
}