All checks were successful
Vercel Production Deployment / Deploy-Production (push) Successful in 1m1s
213 lines
6.4 KiB
JavaScript
213 lines
6.4 KiB
JavaScript
#!/usr/bin/env node
|
||
/**
|
||
* Lightweight migration runner for Supabase.
|
||
* Similar to Laravel's `artisan migrate`.
|
||
*
|
||
* Usage:
|
||
* npm run migrate - Apply all pending migrations
|
||
* npm run migrate:status - Show status of all migrations
|
||
* npm run migrate:rollback - Rollback the last applied migration
|
||
*
|
||
* Env (loaded from .env automatically via --env-file):
|
||
* SUPABASE_DB_URL or DATABASE_URL postgres connection string
|
||
*/
|
||
import fs from 'fs';
|
||
import path from 'path';
|
||
import { fileURLToPath } from 'url';
|
||
import { Pool } from 'pg';
|
||
|
||
const __filename = fileURLToPath(import.meta.url);
|
||
const __dirname = path.dirname(__filename);
|
||
|
||
const migrationsDir = path.resolve(__dirname, '..', 'supabase', 'migrations');
|
||
const args = process.argv.slice(2);
|
||
const command = args[0]; // --status | --rollback | undefined (= migrate)
|
||
|
||
function buildConnectionString() {
|
||
// Direct override takes precedence
|
||
if (process.env.SUPABASE_DB_URL) return process.env.SUPABASE_DB_URL;
|
||
if (process.env.DATABASE_URL) return process.env.DATABASE_URL;
|
||
|
||
// Derive from VITE_SUPABASE_URL + SUPABASE_DB_PASSWORD
|
||
const supabaseUrl = process.env.VITE_SUPABASE_URL;
|
||
const dbPassword = process.env.SUPABASE_DB_PASSWORD;
|
||
|
||
if (supabaseUrl && dbPassword) {
|
||
// Extract project ref from https://[ref].supabase.co
|
||
const match = supabaseUrl.match(/https:\/\/([^.]+)\.supabase\.co/);
|
||
if (!match) {
|
||
console.error('❌ VITE_SUPABASE_URL hat ein unerwartetes Format.');
|
||
process.exit(1);
|
||
}
|
||
const ref = match[1];
|
||
return `postgresql://postgres:${encodeURIComponent(dbPassword)}@db.${ref}.supabase.co:5432/postgres`;
|
||
}
|
||
|
||
return null;
|
||
}
|
||
|
||
const connectionString = buildConnectionString();
|
||
|
||
if (!connectionString) {
|
||
console.error('❌ Keine Datenbankverbindung konfiguriert.');
|
||
console.error(' Füge SUPABASE_DB_PASSWORD zu deiner .env hinzu (Passwort aus Supabase → Settings → Database).');
|
||
console.error(' Alternativ: SUPABASE_DB_URL=postgresql://... direkt setzen.');
|
||
process.exit(1);
|
||
}
|
||
|
||
if (!fs.existsSync(migrationsDir)) {
|
||
console.error(`❌ Migrations directory not found: ${migrationsDir}`);
|
||
process.exit(1);
|
||
}
|
||
|
||
const pool = new Pool({ connectionString });
|
||
|
||
async function ensureMigrationsTable(client) {
|
||
await client.query(`
|
||
create table if not exists public.schema_migrations (
|
||
id serial primary key,
|
||
filename text not null unique,
|
||
applied_at timestamptz not null default now()
|
||
);
|
||
`);
|
||
}
|
||
|
||
async function getApplied(client) {
|
||
const { rows } = await client.query(
|
||
'select filename, applied_at from public.schema_migrations order by filename'
|
||
);
|
||
return rows;
|
||
}
|
||
|
||
function listMigrationFiles() {
|
||
return fs
|
||
.readdirSync(migrationsDir)
|
||
.filter((f) => f.endsWith('.sql'))
|
||
.sort();
|
||
}
|
||
|
||
async function applyMigration(client, filename) {
|
||
const filePath = path.join(migrationsDir, filename);
|
||
const sql = fs.readFileSync(filePath, 'utf8');
|
||
console.log(` → Applying: ${filename}`);
|
||
await client.query('begin');
|
||
try {
|
||
await client.query(sql);
|
||
await client.query(
|
||
'insert into public.schema_migrations (filename) values ($1)',
|
||
[filename]
|
||
);
|
||
await client.query('commit');
|
||
console.log(` ✓ Done: ${filename}`);
|
||
} catch (err) {
|
||
await client.query('rollback');
|
||
console.error(` ✗ Failed: ${filename}\n ${err.message}`);
|
||
throw err;
|
||
}
|
||
}
|
||
|
||
async function cmdMigrate(client) {
|
||
await ensureMigrationsTable(client);
|
||
const applied = new Set((await getApplied(client)).map((r) => r.filename));
|
||
const pending = listMigrationFiles().filter((f) => !applied.has(f));
|
||
|
||
if (pending.length === 0) {
|
||
console.log('✔ Nothing to migrate – all migrations are up to date.');
|
||
return;
|
||
}
|
||
|
||
console.log(`\nRunning ${pending.length} migration(s)...\n`);
|
||
for (const file of pending) {
|
||
await applyMigration(client, file);
|
||
}
|
||
console.log(`\n✔ ${pending.length} migration(s) applied successfully.`);
|
||
}
|
||
|
||
async function cmdStatus(client) {
|
||
await ensureMigrationsTable(client);
|
||
const appliedRows = await getApplied(client);
|
||
const appliedMap = new Map(appliedRows.map((r) => [r.filename, r.applied_at]));
|
||
const files = listMigrationFiles();
|
||
|
||
if (files.length === 0) {
|
||
console.log('No migration files found.');
|
||
return;
|
||
}
|
||
|
||
console.log('\n Status Migration');
|
||
console.log(' ───────── ' + '─'.repeat(50));
|
||
for (const file of files) {
|
||
if (appliedMap.has(file)) {
|
||
const ts = new Date(appliedMap.get(file)).toLocaleString('de-AT');
|
||
console.log(` ✓ applied ${file} (${ts})`);
|
||
} else {
|
||
console.log(` ○ pending ${file}`);
|
||
}
|
||
}
|
||
|
||
const pendingCount = files.filter((f) => !appliedMap.has(f)).length;
|
||
console.log(`\n ${appliedMap.size} applied, ${pendingCount} pending.\n`);
|
||
}
|
||
|
||
async function cmdRollback(client) {
|
||
await ensureMigrationsTable(client);
|
||
const applied = await getApplied(client);
|
||
|
||
if (applied.length === 0) {
|
||
console.log('Nothing to rollback – no migrations have been applied.');
|
||
return;
|
||
}
|
||
|
||
const last = applied[applied.length - 1];
|
||
console.log(`\nRolling back: ${last.filename}`);
|
||
|
||
// Check for a corresponding .down.sql file
|
||
const downFile = path.join(
|
||
migrationsDir,
|
||
last.filename.replace('.sql', '.down.sql')
|
||
);
|
||
|
||
await client.query('begin');
|
||
try {
|
||
if (fs.existsSync(downFile)) {
|
||
const sql = fs.readFileSync(downFile, 'utf8');
|
||
await client.query(sql);
|
||
console.log(` ✓ Ran down-migration: ${path.basename(downFile)}`);
|
||
} else {
|
||
console.warn(` ⚠ No down-migration file found (${path.basename(downFile)})`);
|
||
console.warn(' Only removing the migration record, schema changes are NOT reverted.');
|
||
}
|
||
await client.query(
|
||
'delete from public.schema_migrations where filename = $1',
|
||
[last.filename]
|
||
);
|
||
await client.query('commit');
|
||
console.log(`✔ Rolled back: ${last.filename}\n`);
|
||
} catch (err) {
|
||
await client.query('rollback');
|
||
console.error(`✗ Rollback failed: ${err.message}`);
|
||
throw err;
|
||
}
|
||
}
|
||
|
||
async function run() {
|
||
const client = await pool.connect();
|
||
try {
|
||
if (command === '--status') {
|
||
await cmdStatus(client);
|
||
} else if (command === '--rollback') {
|
||
await cmdRollback(client);
|
||
} else {
|
||
await cmdMigrate(client);
|
||
}
|
||
} finally {
|
||
client.release();
|
||
await pool.end();
|
||
}
|
||
}
|
||
|
||
run().catch((err) => {
|
||
console.error('\n❌ Migration runner failed:', err.message);
|
||
process.exit(1);
|
||
});
|