Unit Tests
All checks were successful
Vercel Production Deployment / Deploy-Production (push) Successful in 1m1s

This commit is contained in:
Ashikagi
2026-03-28 15:54:02 +01:00
parent 6b2d0024ed
commit 0acece98dc
8 changed files with 810 additions and 33 deletions

View File

@@ -1,10 +1,15 @@
#!/usr/bin/env node
/**
* Lightweight migration runner for Supabase.
* Reads SQL files in supabase/migrations (sorted) and applies any that have not been recorded.
* Similar to Laravel's `artisan migrate`.
*
* Env:
* - SUPABASE_DB_URL or DATABASE_URL: postgres connection string (service role / connection string)
* Usage:
* npm run migrate - Apply all pending migrations
* npm run migrate:status - Show status of all migrations
* npm run migrate:rollback - Rollback the last applied migration
*
* Env (loaded from .env automatically via --env-file):
* SUPABASE_DB_URL or DATABASE_URL postgres connection string
*/
import fs from 'fs';
import path from 'path';
@@ -15,15 +20,43 @@ const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const migrationsDir = path.resolve(__dirname, '..', 'supabase', 'migrations');
const connectionString = process.env.SUPABASE_DB_URL || process.env.DATABASE_URL;
const args = process.argv.slice(2);
const command = args[0]; // --status | --rollback | undefined (= migrate)
function buildConnectionString() {
// Direct override takes precedence
if (process.env.SUPABASE_DB_URL) return process.env.SUPABASE_DB_URL;
if (process.env.DATABASE_URL) return process.env.DATABASE_URL;
// Derive from VITE_SUPABASE_URL + SUPABASE_DB_PASSWORD
const supabaseUrl = process.env.VITE_SUPABASE_URL;
const dbPassword = process.env.SUPABASE_DB_PASSWORD;
if (supabaseUrl && dbPassword) {
// Extract project ref from https://[ref].supabase.co
const match = supabaseUrl.match(/https:\/\/([^.]+)\.supabase\.co/);
if (!match) {
console.error('❌ VITE_SUPABASE_URL hat ein unerwartetes Format.');
process.exit(1);
}
const ref = match[1];
return `postgresql://postgres:${encodeURIComponent(dbPassword)}@db.${ref}.supabase.co:5432/postgres`;
}
return null;
}
const connectionString = buildConnectionString();
if (!connectionString) {
console.error('Missing SUPABASE_DB_URL (or DATABASE_URL) environment variable. Aborting migrations.');
console.error('❌ Keine Datenbankverbindung konfiguriert.');
console.error(' Füge SUPABASE_DB_PASSWORD zu deiner .env hinzu (Passwort aus Supabase → Settings → Database).');
console.error(' Alternativ: SUPABASE_DB_URL=postgresql://... direkt setzen.');
process.exit(1);
}
if (!fs.existsSync(migrationsDir)) {
console.error(`Migrations directory not found: ${migrationsDir}`);
console.error(`Migrations directory not found: ${migrationsDir}`);
process.exit(1);
}
@@ -32,38 +65,127 @@ const pool = new Pool({ connectionString });
async function ensureMigrationsTable(client) {
await client.query(`
create table if not exists public.schema_migrations (
id serial primary key,
filename text not null unique,
id serial primary key,
filename text not null unique,
applied_at timestamptz not null default now()
);
`);
}
async function getApplied(client) {
const { rows } = await client.query('select filename from public.schema_migrations');
return new Set(rows.map((r) => r.filename));
const { rows } = await client.query(
'select filename, applied_at from public.schema_migrations order by filename'
);
return rows;
}
function listMigrations() {
function listMigrationFiles() {
return fs
.readdirSync(migrationsDir)
.filter((file) => file.endsWith('.sql'))
.filter((f) => f.endsWith('.sql'))
.sort();
}
async function applyMigration(client, filename) {
const filePath = path.join(migrationsDir, filename);
const sql = fs.readFileSync(filePath, 'utf8');
console.log(`Applying migration: ${filename}`);
console.log(`Applying: ${filename}`);
await client.query('begin');
try {
await client.query(sql);
await client.query('insert into public.schema_migrations (filename) values ($1)', [filename]);
await client.query(
'insert into public.schema_migrations (filename) values ($1)',
[filename]
);
await client.query('commit');
console.log(`✓ Applied ${filename}`);
console.log(` ✓ Done: ${filename}`);
} catch (err) {
await client.query('rollback');
console.error(`✗ Failed ${filename}:`, err.message);
console.error(` ✗ Failed: ${filename}\n ${err.message}`);
throw err;
}
}
async function cmdMigrate(client) {
await ensureMigrationsTable(client);
const applied = new Set((await getApplied(client)).map((r) => r.filename));
const pending = listMigrationFiles().filter((f) => !applied.has(f));
if (pending.length === 0) {
console.log('✔ Nothing to migrate all migrations are up to date.');
return;
}
console.log(`\nRunning ${pending.length} migration(s)...\n`);
for (const file of pending) {
await applyMigration(client, file);
}
console.log(`\n${pending.length} migration(s) applied successfully.`);
}
async function cmdStatus(client) {
await ensureMigrationsTable(client);
const appliedRows = await getApplied(client);
const appliedMap = new Map(appliedRows.map((r) => [r.filename, r.applied_at]));
const files = listMigrationFiles();
if (files.length === 0) {
console.log('No migration files found.');
return;
}
console.log('\n Status Migration');
console.log(' ───────── ' + '─'.repeat(50));
for (const file of files) {
if (appliedMap.has(file)) {
const ts = new Date(appliedMap.get(file)).toLocaleString('de-AT');
console.log(` ✓ applied ${file} (${ts})`);
} else {
console.log(` ○ pending ${file}`);
}
}
const pendingCount = files.filter((f) => !appliedMap.has(f)).length;
console.log(`\n ${appliedMap.size} applied, ${pendingCount} pending.\n`);
}
async function cmdRollback(client) {
await ensureMigrationsTable(client);
const applied = await getApplied(client);
if (applied.length === 0) {
console.log('Nothing to rollback no migrations have been applied.');
return;
}
const last = applied[applied.length - 1];
console.log(`\nRolling back: ${last.filename}`);
// Check for a corresponding .down.sql file
const downFile = path.join(
migrationsDir,
last.filename.replace('.sql', '.down.sql')
);
await client.query('begin');
try {
if (fs.existsSync(downFile)) {
const sql = fs.readFileSync(downFile, 'utf8');
await client.query(sql);
console.log(` ✓ Ran down-migration: ${path.basename(downFile)}`);
} else {
console.warn(` ⚠ No down-migration file found (${path.basename(downFile)})`);
console.warn(' Only removing the migration record, schema changes are NOT reverted.');
}
await client.query(
'delete from public.schema_migrations where filename = $1',
[last.filename]
);
await client.query('commit');
console.log(`✔ Rolled back: ${last.filename}\n`);
} catch (err) {
await client.query('rollback');
console.error(`✗ Rollback failed: ${err.message}`);
throw err;
}
}
@@ -71,20 +193,13 @@ async function applyMigration(client, filename) {
async function run() {
const client = await pool.connect();
try {
await ensureMigrationsTable(client);
const applied = await getApplied(client);
const migrations = listMigrations();
const pending = migrations.filter((m) => !applied.has(m));
if (pending.length === 0) {
console.log('No pending migrations.');
return;
if (command === '--status') {
await cmdStatus(client);
} else if (command === '--rollback') {
await cmdRollback(client);
} else {
await cmdMigrate(client);
}
for (const migration of pending) {
await applyMigration(client, migration);
}
console.log('All pending migrations applied.');
} finally {
client.release();
await pool.end();
@@ -92,6 +207,6 @@ async function run() {
}
run().catch((err) => {
console.error('Migration run failed:', err);
console.error('\n❌ Migration runner failed:', err.message);
process.exit(1);
});