- Published on
Testing Database Migrations — Catching Breaking Changes Before They Reach Production
- Authors

- Name
- Sanjeev Sharma
- @webcoderspeed1
Introduction
Database migrations fail silently. A migration adds a NOT NULL column without a default, and your application doesn't crash until it tries to insert a row. A migration renames a table, but old code still queries the old name. Shadow migrations catch these in production, but testing catches them in CI. This post covers strategies for testing migrations: compatibility testing (can old code read new schema?), rollback testing (can we recover from bad migrations?), and smoke tests (are critical queries still fast?).
- Migration Test Setup
- Backwards Compatibility Testing
- Forwards Compatibility Testing
- Rollback Testing
- Migration Smoke Tests
- Large Table Migration Testing
- Checklist
- Conclusion
Migration Test Setup
Use a fresh database per test to ensure migrations run correctly from empty state:
// test/migrations.setup.ts
import { Pool, PoolClient } from 'pg';
import { execSync } from 'child_process';
import path from 'path';
let pool: Pool;
let testDbName: string;
export async function setupMigrationTest() {
const adminPool = new Pool({
host: process.env.DB_HOST || 'localhost',
port: parseInt(process.env.DB_PORT || '5432'),
user: 'postgres',
password: process.env.DB_ADMIN_PASSWORD || 'password',
database: 'postgres',
});
testDbName = `test_db_${Date.now()}`;
const adminClient = await adminPool.connect();
try {
await adminClient.query(`CREATE DATABASE "${testDbName}"`);
} finally {
adminClient.release();
}
pool = new Pool({
host: process.env.DB_HOST || 'localhost',
port: parseInt(process.env.DB_PORT || '5432'),
user: process.env.DB_USER || 'postgres',
password: process.env.DB_PASSWORD || 'password',
database: testDbName,
});
// Run migrations
const migrationsDir = path.join(__dirname, '../migrations');
execSync(`npm run migrate -- --dir ${migrationsDir}`, {
env: { ...process.env, DATABASE_URL: getConnectionString() },
});
return pool;
}
export async function teardownMigrationTest() {
await pool.end();
const adminPool = new Pool({
host: process.env.DB_HOST || 'localhost',
port: parseInt(process.env.DB_PORT || '5432'),
user: 'postgres',
password: process.env.DB_ADMIN_PASSWORD || 'password',
database: 'postgres',
});
const adminClient = await adminPool.connect();
try {
await adminClient.query(`DROP DATABASE "${testDbName}" WITH (FORCE)`);
} finally {
adminClient.release();
}
await adminPool.end();
}
export function getConnectionString(): string {
return `postgres://${process.env.DB_USER || 'postgres'}:${
process.env.DB_PASSWORD || 'password'
}@${process.env.DB_HOST || 'localhost'}:${process.env.DB_PORT || 5432}/${testDbName}`;
}
export function getPool(): Pool {
return pool;
}
Backwards Compatibility Testing
Can old application code still run against the new schema? Deploy schema changes before code changes.
// test/backwards-compat.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import { setupMigrationTest, teardownMigrationTest, getPool } from './migrations.setup';
describe('Backwards compatibility - old code with new schema', () => {
beforeAll(async () => {
await setupMigrationTest();
// All migrations are applied at this point
});
afterAll(async () => {
await teardownMigrationTest();
});
it('old code should read from new posts schema with created_at', async () => {
const pool = getPool();
const client = await pool.connect();
try {
// Old code that doesn't know about created_at column
const oldQuery = 'SELECT id, title, content FROM posts WHERE id = $1';
// Insert using new schema (with created_at)
await client.query(
`INSERT INTO posts (id, title, content, created_at)
VALUES ($1, $2, $3, NOW())`,
[1, 'Test Post', 'Content here']
);
// Old code should still work
const result = await client.query(oldQuery, [1]);
expect(result.rows).toHaveLength(1);
expect(result.rows[0]).toEqual({
id: 1,
title: 'Test Post',
content: 'Content here',
});
} finally {
client.release();
}
});
it('should handle NULL created_at gracefully', async () => {
const pool = getPool();
const client = await pool.connect();
try {
// Insert with NULL created_at (old code inserting without new field)
await client.query(
`INSERT INTO posts (id, title, content, created_at)
VALUES ($1, $2, $3, NULL)`,
[2, 'Old Post', 'No timestamp']
);
const result = await client.query(
'SELECT * FROM posts WHERE id = 2'
);
expect(result.rows[0].created_at).toBeNull();
} finally {
client.release();
}
});
});
Forwards Compatibility Testing
Can new application code read old schemas? Test against schema before the latest migration.
// test/forwards-compat.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import { Pool } from 'pg';
import path from 'path';
import { execSync } from 'child_process';
describe('Forwards compatibility - new code with old schema', () => {
let pool: Pool;
let testDbName: string;
beforeAll(async () => {
const adminPool = new Pool({
host: process.env.DB_HOST || 'localhost',
user: 'postgres',
password: process.env.DB_ADMIN_PASSWORD || 'password',
database: 'postgres',
});
testDbName = `test_old_schema_${Date.now()}`;
const adminClient = await adminPool.connect();
try {
await adminClient.query(`CREATE DATABASE "${testDbName}"`);
} finally {
adminClient.release();
}
// Run all migrations except the latest one
const migrationsDir = path.join(__dirname, '../migrations');
const allMigrations = execSync(`ls ${migrationsDir}`).toString().split('\n');
const latestMigration = allMigrations[allMigrations.length - 2]; // Skip last
// Run migrations selectively
execSync(
`npm run migrate -- --dir ${migrationsDir} --target-version ${latestMigration}`,
{
env: {
...process.env,
DATABASE_URL: `postgres://${process.env.DB_USER || 'postgres'}:${
process.env.DB_PASSWORD || 'password'
}@${process.env.DB_HOST || 'localhost'}:${process.env.DB_PORT || 5432}/${testDbName}`,
},
}
);
pool = new Pool({
host: process.env.DB_HOST || 'localhost',
user: process.env.DB_USER || 'postgres',
password: process.env.DB_PASSWORD || 'password',
database: testDbName,
});
await adminPool.end();
});
afterAll(async () => {
await pool.end();
const adminPool = new Pool({
host: process.env.DB_HOST || 'localhost',
user: 'postgres',
password: process.env.DB_ADMIN_PASSWORD || 'password',
database: 'postgres',
});
const adminClient = await adminPool.connect();
try {
await adminClient.query(`DROP DATABASE "${testDbName}" WITH (FORCE)`);
} finally {
adminClient.release();
}
await adminPool.end();
});
it('new code should query without using new columns', async () => {
const client = await pool.connect();
try {
// New code that expects created_at might not exist
const result = await client.query(
`SELECT id, title, content FROM posts LIMIT 1`
);
// Should succeed even if created_at column doesn't exist yet
expect(Array.isArray(result.rows)).toBe(true);
} catch (error: any) {
if (!error.message.includes('does not exist')) {
throw error;
}
} finally {
client.release();
}
});
});
Rollback Testing
Ensure migrations can rollback safely:
// test/rollback.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import { Pool } from 'pg';
import { execSync } from 'child_process';
import path from 'path';
describe('Migration rollback', () => {
let pool: Pool;
let testDbName: string;
beforeAll(async () => {
const adminPool = new Pool({
host: process.env.DB_HOST || 'localhost',
user: 'postgres',
password: process.env.DB_ADMIN_PASSWORD || 'password',
database: 'postgres',
});
testDbName = `test_rollback_${Date.now()}`;
const adminClient = await adminPool.connect();
try {
await adminClient.query(`CREATE DATABASE "${testDbName}"`);
} finally {
adminClient.release();
}
await adminPool.end();
pool = new Pool({
host: process.env.DB_HOST || 'localhost',
user: process.env.DB_USER || 'postgres',
password: process.env.DB_PASSWORD || 'password',
database: testDbName,
});
});
afterAll(async () => {
await pool.end();
const adminPool = new Pool({
host: process.env.DB_HOST || 'localhost',
user: 'postgres',
password: process.env.DB_ADMIN_PASSWORD || 'password',
database: 'postgres',
});
const adminClient = await adminPool.connect();
try {
await adminClient.query(`DROP DATABASE "${testDbName}" WITH (FORCE)`);
} finally {
adminClient.release();
}
await adminPool.end();
});
it('should rollback adding NOT NULL column without default', async () => {
const client = await pool.connect();
try {
// Forward migration
await client.query(
`CREATE TABLE products (id SERIAL PRIMARY KEY, name VARCHAR(255))`
);
await client.query(`INSERT INTO products (name) VALUES ('Widget')`);
await client.query(
`ALTER TABLE products ADD COLUMN sku VARCHAR(50) NOT NULL DEFAULT 'TEMP'`
);
// Rollback
await client.query(`ALTER TABLE products DROP COLUMN sku`);
// Old code should work
const result = await client.query(`SELECT * FROM products WHERE id = 1`);
expect(result.rows[0]).toEqual({ id: 1, name: 'Widget' });
} finally {
client.release();
}
});
it('should preserve data during rollback', async () => {
const client = await pool.connect();
try {
await client.query(
`CREATE TABLE orders (id SERIAL PRIMARY KEY, total DECIMAL(10,2))`
);
await client.query(
`INSERT INTO orders (total) VALUES (99.99), (150.50), (50.00)`
);
const beforeRollback = await client.query(`SELECT COUNT(*) FROM orders`);
expect(parseInt(beforeRollback.rows[0].count)).toBe(3);
// Rollback (no-op in this case)
const afterRollback = await client.query(`SELECT COUNT(*) FROM orders`);
expect(parseInt(afterRollback.rows[0].count)).toBe(3);
} finally {
client.release();
}
});
});
Migration Smoke Tests
After all migrations, verify critical queries still work:
// test/migration-smoke.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import { setupMigrationTest, teardownMigrationTest, getPool } from './migrations.setup';
describe('Migration smoke tests - critical queries', () => {
beforeAll(async () => {
await setupMigrationTest();
});
afterAll(async () => {
await teardownMigrationTest();
});
it('should list users with acceptable performance', async () => {
const pool = getPool();
const client = await pool.connect();
try {
// Insert test data
for (let i = 0; i < 1000; i++) {
await client.query(
`INSERT INTO users (email, created_at) VALUES ($1, NOW())`,
[`user${i}@example.com`]
);
}
const start = Date.now();
const result = await client.query(
`SELECT id, email, created_at FROM users
WHERE created_at > NOW() - INTERVAL '30 days'
ORDER BY created_at DESC
LIMIT 100`
);
const duration = Date.now() - start;
expect(result.rows.length).toBeGreaterThan(0);
expect(duration).toBeLessThan(500); // Should be sub-500ms
} finally {
client.release();
}
});
it('should join posts and comments efficiently', async () => {
const pool = getPool();
const client = await pool.connect();
try {
const start = Date.now();
const result = await client.query(`
SELECT
p.id, p.title, COUNT(c.id) as comment_count
FROM posts p
LEFT JOIN comments c ON p.id = c.post_id
GROUP BY p.id, p.title
ORDER BY p.created_at DESC
LIMIT 50
`);
const duration = Date.now() - start;
expect(duration).toBeLessThan(1000);
} finally {
client.release();
}
});
it('should verify index exists on frequently queried column', async () => {
const pool = getPool();
const client = await pool.connect();
try {
const result = await client.query(`
SELECT indexname FROM pg_indexes
WHERE tablename = 'posts' AND indexname = 'idx_posts_created_at'
`);
expect(result.rows).toHaveLength(1);
} finally {
client.release();
}
});
});
Large Table Migration Testing
Test migrations against realistic data volume:
// test/large-table-migration.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import { Pool } from 'pg';
describe('Large table migration (10M rows)', () => {
let pool: Pool;
let testDbName: string;
beforeAll(async () => {
// Create test database with large table simulation
const adminPool = new Pool({
host: process.env.DB_HOST || 'localhost',
user: 'postgres',
password: process.env.DB_ADMIN_PASSWORD || 'password',
database: 'postgres',
});
testDbName = `test_large_${Date.now()}`;
const adminClient = await adminPool.connect();
try {
await adminClient.query(`CREATE DATABASE "${testDbName}"`);
} finally {
adminClient.release();
}
pool = new Pool({
host: process.env.DB_HOST || 'localhost',
user: process.env.DB_USER || 'postgres',
password: process.env.DB_PASSWORD || 'password',
database: testDbName,
});
await adminPool.end();
});
afterAll(async () => {
await pool.end();
const adminPool = new Pool({
host: process.env.DB_HOST || 'localhost',
user: 'postgres',
password: process.env.DB_ADMIN_PASSWORD || 'password',
database: 'postgres',
});
const adminClient = await adminPool.connect();
try {
await adminClient.query(`DROP DATABASE "${testDbName}" WITH (FORCE)`);
} finally {
adminClient.release();
}
await adminPool.end();
});
it('should handle adding index on large table', async () => {
const client = await pool.connect();
try {
// Create large table
await client.query(`
CREATE TABLE events (
id BIGSERIAL PRIMARY KEY,
user_id INTEGER,
event_type VARCHAR(50),
created_at TIMESTAMP DEFAULT NOW()
)
`);
// Insert 100k rows (simulating larger migration)
await client.query(`
INSERT INTO events (user_id, event_type, created_at)
SELECT
(RANDOM() * 10000)::INTEGER,
CASE WHEN RANDOM() < 0.5 THEN 'click' ELSE 'view' END,
NOW() - (RANDOM() * INTERVAL '365 days')
FROM generate_series(1, 100000)
`);
// Migration: add index (should be fast with CONCURRENTLY in production)
const start = Date.now();
await client.query(
`CREATE INDEX CONCURRENTLY idx_events_user_created
ON events (user_id, created_at DESC)`
);
const duration = Date.now() - start;
expect(duration).toBeLessThan(5000); // Should complete in < 5s
} finally {
client.release();
}
});
});
Checklist
- Write migration test setup that creates fresh DB
- Test backwards compatibility (old code + new schema)
- Test forwards compatibility (new code + old schema)
- Test rollback for each migration
- Verify critical queries work post-migration
- Test on realistic data volumes (100k+ rows)
- Verify indexes are created/maintained
- Check migration duration targets
- Test NULL handling in default values
- Automate migration tests in CI pipeline
Conclusion
Database migrations are code, and code needs tests. Backwards and forwards compatibility tests catch deployment hazards before they happen. Rollback tests ensure you can recover quickly. Smoke tests verify your schema supports critical queries. Add migration testing to your CI pipeline today—it's the cheapest insurance against expensive production incidents.