fix(#14): Implement database migrations system with baseline migration
Some checks are pending
Docker Test / test (push) Waiting to run
Some checks are pending
Docker Test / test (push) Waiting to run
This commit is contained in:
parent
2f2ea4d483
commit
3916dd42bf
5 changed files with 248 additions and 23 deletions
104
backend/migrations/001_baseline.sql
Normal file
104
backend/migrations/001_baseline.sql
Normal file
|
|
@ -0,0 +1,104 @@
|
||||||
|
-- Migration: 001_baseline
|
||||||
|
-- Description: Initial database schema
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
|
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||||
|
email VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
password_hash VARCHAR(255) NOT NULL,
|
||||||
|
display_name VARCHAR(120) NOT NULL,
|
||||||
|
phone_encrypted TEXT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS help_requests (
|
||||||
|
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||||
|
requester_id BIGINT NOT NULL,
|
||||||
|
title VARCHAR(180) NOT NULL,
|
||||||
|
description TEXT NOT NULL,
|
||||||
|
value_chf DECIMAL(10,2) NOT NULL,
|
||||||
|
status ENUM('open','negotiating','agreed','completed','cancelled') DEFAULT 'open',
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (requester_id) REFERENCES users(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS offers (
|
||||||
|
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||||
|
request_id BIGINT NOT NULL,
|
||||||
|
helper_id BIGINT NOT NULL,
|
||||||
|
amount_chf DECIMAL(10,2) NOT NULL,
|
||||||
|
message TEXT NULL,
|
||||||
|
status ENUM('pending','countered','accepted','rejected') DEFAULT 'pending',
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (request_id) REFERENCES help_requests(id),
|
||||||
|
FOREIGN KEY (helper_id) REFERENCES users(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS negotiations (
|
||||||
|
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||||
|
offer_id BIGINT NOT NULL,
|
||||||
|
sender_id BIGINT NOT NULL,
|
||||||
|
amount_chf DECIMAL(10,2) NOT NULL,
|
||||||
|
message TEXT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (offer_id) REFERENCES offers(id),
|
||||||
|
FOREIGN KEY (sender_id) REFERENCES users(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS deals (
|
||||||
|
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||||
|
request_id BIGINT NOT NULL,
|
||||||
|
offer_id BIGINT NOT NULL,
|
||||||
|
agreed_amount_chf DECIMAL(10,2) NOT NULL,
|
||||||
|
agreed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
completed_at TIMESTAMP NULL,
|
||||||
|
FOREIGN KEY (request_id) REFERENCES help_requests(id),
|
||||||
|
FOREIGN KEY (offer_id) REFERENCES offers(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS contact_exchange_requests (
|
||||||
|
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||||
|
deal_id BIGINT NOT NULL,
|
||||||
|
requester_id BIGINT NOT NULL,
|
||||||
|
target_id BIGINT NOT NULL,
|
||||||
|
accepted BOOLEAN DEFAULT FALSE,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (deal_id) REFERENCES deals(id),
|
||||||
|
FOREIGN KEY (requester_id) REFERENCES users(id),
|
||||||
|
FOREIGN KEY (target_id) REFERENCES users(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS addresses (
|
||||||
|
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||||
|
user_id BIGINT NOT NULL,
|
||||||
|
address_encrypted TEXT NOT NULL,
|
||||||
|
postal_verified_at TIMESTAMP NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS address_change_requests (
|
||||||
|
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||||
|
user_id BIGINT NOT NULL,
|
||||||
|
new_address_encrypted TEXT NOT NULL,
|
||||||
|
verification_code_hash VARCHAR(255) NOT NULL,
|
||||||
|
status ENUM('pending_letter','verified','expired','rejected') DEFAULT 'pending_letter',
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
verified_at TIMESTAMP NULL,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS reviews (
|
||||||
|
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||||
|
deal_id BIGINT NOT NULL,
|
||||||
|
reviewer_id BIGINT NOT NULL,
|
||||||
|
reviewee_id BIGINT NOT NULL,
|
||||||
|
rating TINYINT NOT NULL,
|
||||||
|
comment TEXT NULL,
|
||||||
|
earliest_prompt_at TIMESTAMP NOT NULL,
|
||||||
|
latest_prompt_at TIMESTAMP NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (deal_id) REFERENCES deals(id),
|
||||||
|
FOREIGN KEY (reviewer_id) REFERENCES users(id),
|
||||||
|
FOREIGN KEY (reviewee_id) REFERENCES users(id),
|
||||||
|
CHECK (rating BETWEEN 1 AND 5)
|
||||||
|
);
|
||||||
17
backend/migrations/config.js
Normal file
17
backend/migrations/config.js
Normal file
|
|
@ -0,0 +1,17 @@
|
||||||
|
// Migration configuration
|
||||||
|
export default {
|
||||||
|
// Database connection settings
|
||||||
|
connection: {
|
||||||
|
host: process.env.DB_HOST || 'localhost',
|
||||||
|
port: process.env.DB_PORT || 3306,
|
||||||
|
user: process.env.DB_USER || 'root',
|
||||||
|
password: process.env.DB_PASSWORD || '',
|
||||||
|
database: process.env.DB_NAME || 'helpyourneighbour'
|
||||||
|
},
|
||||||
|
|
||||||
|
// Migration table name
|
||||||
|
tableName: 'migrations',
|
||||||
|
|
||||||
|
// Path to migration files
|
||||||
|
migrationsDir: './migrations'
|
||||||
|
};
|
||||||
114
backend/migrations/runner.js
Normal file
114
backend/migrations/runner.js
Normal file
|
|
@ -0,0 +1,114 @@
|
||||||
|
import mysql from 'mysql2';
|
||||||
|
import fs from 'fs/promises';
|
||||||
|
import path from 'path';
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
import config from './config.js';
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = path.dirname(__filename);
|
||||||
|
|
||||||
|
// Create database connection
|
||||||
|
const connection = mysql.createConnection(config.connection);
|
||||||
|
|
||||||
|
// Ensure migrations table exists
|
||||||
|
function ensureMigrationsTable() {
|
||||||
|
const createTableQuery = `
|
||||||
|
CREATE TABLE IF NOT EXISTS ${config.tableName} (
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
executed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
`;
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
connection.execute(createTableQuery, (err) => {
|
||||||
|
if (err) reject(err);
|
||||||
|
else resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get list of already executed migrations
|
||||||
|
function getExecutedMigrations() {
|
||||||
|
const query = `SELECT name FROM ${config.tableName} ORDER BY executed_at`;
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
connection.execute(query, (err, results) => {
|
||||||
|
if (err) reject(err);
|
||||||
|
else resolve(results.map(row => row.name));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute a migration file
|
||||||
|
function executeMigration(migrationName, sqlContent) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
connection.execute(sqlContent, (err) => {
|
||||||
|
if (err) reject(err);
|
||||||
|
else {
|
||||||
|
// Log the execution in migrations table
|
||||||
|
const logQuery = `INSERT INTO ${config.tableName} (name) VALUES (?)`;
|
||||||
|
connection.execute(logQuery, [migrationName], (logErr) => {
|
||||||
|
if (logErr) reject(logErr);
|
||||||
|
else resolve();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get all migration files
|
||||||
|
async function getMigrationFiles() {
|
||||||
|
try {
|
||||||
|
const files = await fs.readdir(config.migrationsDir);
|
||||||
|
return files
|
||||||
|
.filter(file => file.endsWith('.sql'))
|
||||||
|
.sort((a, b) => a.localeCompare(b));
|
||||||
|
} catch (err) {
|
||||||
|
if (err.code === 'ENOENT') {
|
||||||
|
// Directory doesn't exist, create it
|
||||||
|
await fs.mkdir(config.migrationsDir, { recursive: true });
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run migrations
|
||||||
|
async function runMigrations() {
|
||||||
|
try {
|
||||||
|
await ensureMigrationsTable();
|
||||||
|
const executed = await getExecutedMigrations();
|
||||||
|
const allMigrations = await getMigrationFiles();
|
||||||
|
|
||||||
|
const pending = allMigrations.filter(name => !executed.includes(name));
|
||||||
|
|
||||||
|
if (pending.length === 0) {
|
||||||
|
console.log('No pending migrations');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Running ${pending.length} migrations...`);
|
||||||
|
|
||||||
|
for (const migrationName of pending) {
|
||||||
|
console.log(`Executing ${migrationName}`);
|
||||||
|
const filePath = path.join(config.migrationsDir, migrationName);
|
||||||
|
const sqlContent = await fs.readFile(filePath, 'utf8');
|
||||||
|
await executeMigration(migrationName, sqlContent);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('All migrations executed successfully');
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Error running migrations:', err);
|
||||||
|
throw err;
|
||||||
|
} finally {
|
||||||
|
connection.end();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the migrations
|
||||||
|
if (process.argv.includes('--run')) {
|
||||||
|
runMigrations().catch(console.error);
|
||||||
|
}
|
||||||
|
|
||||||
|
export { runMigrations };
|
||||||
|
|
@ -10,6 +10,7 @@
|
||||||
"dev": "node --watch src/server.js",
|
"dev": "node --watch src/server.js",
|
||||||
"db:init": "node src/db/init.js",
|
"db:init": "node src/db/init.js",
|
||||||
"db:seed": "node src/db/seed.js",
|
"db:seed": "node src/db/seed.js",
|
||||||
|
"db:migrate": "node migrations/runner.js --run",
|
||||||
"test:smoke": "node scripts/smoke-test.mjs",
|
"test:smoke": "node scripts/smoke-test.mjs",
|
||||||
"test:integration": "node scripts/integration-test.mjs"
|
"test:integration": "node scripts/integration-test.mjs"
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1,29 +1,18 @@
|
||||||
import fs from 'fs/promises';
|
import { runMigrations } from '../../migrations/runner.js';
|
||||||
import path from 'path';
|
|
||||||
import { fileURLToPath } from 'url';
|
|
||||||
import { pool } from './connection.js';
|
import { pool } from './connection.js';
|
||||||
|
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
|
||||||
const __dirname = path.dirname(__filename);
|
|
||||||
|
|
||||||
const run = async () => {
|
const run = async () => {
|
||||||
const schemaPath = path.resolve(__dirname, '../../sql/schema.sql');
|
try {
|
||||||
const sql = await fs.readFile(schemaPath, 'utf8');
|
// Run migrations
|
||||||
const statements = sql
|
await runMigrations();
|
||||||
.split(';')
|
|
||||||
.map((s) => s.trim())
|
|
||||||
.filter(Boolean);
|
|
||||||
|
|
||||||
for (const statement of statements) {
|
console.log('Database initialized with migrations.');
|
||||||
await pool.query(statement);
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(`Applied ${statements.length} SQL statements.`);
|
|
||||||
await pool.end();
|
await pool.end();
|
||||||
};
|
} catch (err) {
|
||||||
|
|
||||||
run().catch(async (err) => {
|
|
||||||
console.error('Database init failed:', err.message);
|
console.error('Database init failed:', err.message);
|
||||||
await pool.end();
|
await pool.end();
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
run();
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue