Files
english/scripts/import-tests.mjs
2026-04-24 14:41:41 +07:00

146 lines
4.3 KiB
JavaScript

// Import TOEIC test JSON files into Supabase.
// Usage: SUPABASE_URL=... SUPABASE_SERVICE_ROLE_KEY=... node scripts/import-tests.mjs
// Requires service_role key to bypass RLS. Idempotent: skips if slug already exists.
import { readdir, readFile } from 'node:fs/promises'
import { dirname, join } from 'node:path'
import { fileURLToPath } from 'node:url'
import { createClient } from '@supabase/supabase-js'
const __dirname = dirname(fileURLToPath(import.meta.url))
const TEST_DIR = join(__dirname, '..', 'test')
const SUPABASE_URL = process.env.SUPABASE_URL
const SERVICE_KEY = process.env.SUPABASE_SERVICE_ROLE_KEY
if (!SUPABASE_URL || !SERVICE_KEY) {
console.error('Missing env: SUPABASE_URL and SUPABASE_SERVICE_ROLE_KEY')
process.exit(1)
}
const db = createClient(SUPABASE_URL, SERVICE_KEY, {
auth: { persistSession: false },
})
async function insertReturningId(table, payload) {
const { data, error } = await db.from(table).insert(payload).select('id').single()
if (error) throw new Error(`${table} insert failed: ${error.message}`)
return data.id
}
async function insertMany(table, rows) {
if (rows.length === 0) return
const { error } = await db.from(table).insert(rows)
if (error) throw new Error(`${table} bulk insert failed: ${error.message}`)
}
async function importTest(data) {
const { data: existing } = await db.from('test').select('id').eq('slug', data.slug).maybeSingle()
if (existing) {
console.log(` skip (exists): ${data.slug}`)
return { skipped: true }
}
const testId = await insertReturningId('test', {
title: data.title,
slug: data.slug,
total_questions: data.total_questions ?? 0,
})
let totalQuestions = 0
for (const part of data.parts ?? []) {
const partId = await insertReturningId('part', {
test_id: testId,
part_number: part.part_number,
title: part.title,
display_order: part.display_order ?? 0,
})
let partCount = 0
for (const group of part.groups ?? []) {
const groupId = await insertReturningId('question_group', {
part_id: partId,
audio_url: group.audio_url ?? null,
image_url: group.image_url ?? null,
passage_text: group.passage_text ?? null,
transcript: group.transcript ?? null,
display_order: group.display_order ?? 0,
})
for (const q of group.questions ?? []) {
const questionId = await insertReturningId('question', {
group_id: groupId,
question_number: q.question_number,
question_text: q.question_text ?? null,
display_order: q.display_order ?? 0,
})
const choices = (q.choices ?? []).map((c) => ({
question_id: questionId,
value: c.value,
label_text: c.label_text ?? null,
is_correct: c.is_correct ?? false,
}))
await insertMany('answer_choice', choices)
partCount++
totalQuestions++
}
}
await db.from('part').update({ question_count: partCount }).eq('id', partId)
}
return { testId, totalQuestions }
}
async function rollback(slug) {
// CASCADE will clean up parts, groups, questions, choices when test row is deleted.
await db.from('test').delete().eq('slug', slug)
}
async function main() {
const files = (await readdir(TEST_DIR))
.filter((f) => f.startsWith('test_') && f.endsWith('.json'))
.sort()
console.log(`Found ${files.length} file(s) in ${TEST_DIR}\n`)
let imported = 0
let skipped = 0
let failed = 0
for (const file of files) {
const path = join(TEST_DIR, file)
process.stdout.write(`${file}: `)
let slug = null
try {
const data = JSON.parse(await readFile(path, 'utf-8'))
slug = data.slug
const res = await importTest(data)
if (res.skipped) skipped++
else {
imported++
console.log(` ok (${res.totalQuestions} questions)`)
}
} catch (err) {
failed++
console.error(` FAIL: ${err.message}`)
if (slug) {
console.error(` rolling back ${slug}...`)
await rollback(slug).catch((e) => console.error(` rollback failed: ${e.message}`))
}
}
}
console.log(`\nDone. imported=${imported} skipped=${skipped} failed=${failed}`)
process.exit(failed > 0 ? 1 : 0)
}
main().catch((err) => {
console.error('Fatal:', err)
process.exit(1)
})