add one-time migration script for JSON cache → PostgreSQL
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,106 @@
|
||||
/**
|
||||
* One-time migration: import existing igdb-cache.json and igdb-metadata.json
|
||||
* into PostgreSQL. Run with: DATABASE_URL=... bun server/scripts/migrate-json-to-pg.ts
|
||||
*/
|
||||
import { readFileSync } from "node:fs"
|
||||
import { dirname, join } from "node:path"
|
||||
import { fileURLToPath } from "node:url"
|
||||
import postgres from "postgres"
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url))
|
||||
const DATA_DIR = join(__dirname, "../src/data")
|
||||
|
||||
const DATABASE_URL = process.env.DATABASE_URL
|
||||
if (!DATABASE_URL) {
|
||||
console.error("DATABASE_URL is required")
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const sql = postgres(DATABASE_URL)
|
||||
|
||||
const BATCH_SIZE = 500
|
||||
|
||||
async function migrateResolutions() {
|
||||
const file = join(DATA_DIR, "igdb-cache.json")
|
||||
let data: Record<string, { igdbId: number | null }>
|
||||
try {
|
||||
data = JSON.parse(readFileSync(file, "utf-8"))
|
||||
} catch {
|
||||
console.log("[migrate] No igdb-cache.json found, skipping resolutions")
|
||||
return
|
||||
}
|
||||
|
||||
const entries = Object.entries(data)
|
||||
console.log(`[migrate] Importing ${entries.length} resolution entries...`)
|
||||
|
||||
for (let i = 0; i < entries.length; i += BATCH_SIZE) {
|
||||
const batch = entries.slice(i, i + BATCH_SIZE)
|
||||
const values = batch.map(([key, val]) => {
|
||||
const [source, ...rest] = key.split(":")
|
||||
const sourceId = rest.join(":")
|
||||
return { cache_key: key, source, source_id: sourceId, igdb_id: val.igdbId }
|
||||
})
|
||||
|
||||
await sql`
|
||||
INSERT INTO igdb_resolutions ${sql(values, "cache_key", "source", "source_id", "igdb_id")}
|
||||
ON CONFLICT (cache_key) DO NOTHING
|
||||
`
|
||||
|
||||
console.log(
|
||||
`[migrate] Resolutions: ${Math.min(i + BATCH_SIZE, entries.length)}/${entries.length}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function migrateMetadata() {
|
||||
const file = join(DATA_DIR, "igdb-metadata.json")
|
||||
let data: Record<
|
||||
string,
|
||||
{
|
||||
summary: string | null
|
||||
coverImageId: string | null
|
||||
screenshots: string[]
|
||||
videoIds: string[]
|
||||
genres: string[]
|
||||
aggregatedRating: number | null
|
||||
releaseDate: string | null
|
||||
developers: string[]
|
||||
}
|
||||
>
|
||||
try {
|
||||
data = JSON.parse(readFileSync(file, "utf-8"))
|
||||
} catch {
|
||||
console.log("[migrate] No igdb-metadata.json found, skipping metadata")
|
||||
return
|
||||
}
|
||||
|
||||
const entries = Object.entries(data)
|
||||
console.log(`[migrate] Importing ${entries.length} metadata entries...`)
|
||||
|
||||
for (let i = 0; i < entries.length; i += BATCH_SIZE) {
|
||||
const batch = entries.slice(i, i + BATCH_SIZE)
|
||||
const values = batch.map(([canonicalId, meta]) => ({
|
||||
canonical_id: canonicalId,
|
||||
summary: meta.summary,
|
||||
cover_image_id: meta.coverImageId,
|
||||
screenshots: JSON.stringify(meta.screenshots),
|
||||
video_ids: JSON.stringify(meta.videoIds),
|
||||
genres: JSON.stringify(meta.genres),
|
||||
aggregated_rating: meta.aggregatedRating,
|
||||
release_date: meta.releaseDate,
|
||||
developers: JSON.stringify(meta.developers),
|
||||
}))
|
||||
|
||||
await sql`
|
||||
INSERT INTO igdb_metadata ${sql(values, "canonical_id", "summary", "cover_image_id", "screenshots", "video_ids", "genres", "aggregated_rating", "release_date", "developers")}
|
||||
ON CONFLICT (canonical_id) DO NOTHING
|
||||
`
|
||||
|
||||
console.log(`[migrate] Metadata: ${Math.min(i + BATCH_SIZE, entries.length)}/${entries.length}`)
|
||||
}
|
||||
}
|
||||
|
||||
await migrateResolutions()
|
||||
await migrateMetadata()
|
||||
console.log("[migrate] Done!")
|
||||
await sql.end()
|
||||
Reference in New Issue
Block a user