fix(#14): Implement database migrations system with baseline migration
Some checks are pending
Docker Test / test (push) Waiting to run

This commit is contained in:
OpenClaw 2026-03-06 23:37:39 +00:00
parent 2f2ea4d483
commit 3916dd42bf
5 changed files with 248 additions and 23 deletions

View file

@ -0,0 +1,104 @@
-- Migration: 001_baseline
-- Description: Initial database schema
CREATE TABLE IF NOT EXISTS users (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
email VARCHAR(255) NOT NULL UNIQUE,
password_hash VARCHAR(255) NOT NULL,
display_name VARCHAR(120) NOT NULL,
phone_encrypted TEXT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS help_requests (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
requester_id BIGINT NOT NULL,
title VARCHAR(180) NOT NULL,
description TEXT NOT NULL,
value_chf DECIMAL(10,2) NOT NULL,
status ENUM('open','negotiating','agreed','completed','cancelled') DEFAULT 'open',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (requester_id) REFERENCES users(id)
);
CREATE TABLE IF NOT EXISTS offers (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
request_id BIGINT NOT NULL,
helper_id BIGINT NOT NULL,
amount_chf DECIMAL(10,2) NOT NULL,
message TEXT NULL,
status ENUM('pending','countered','accepted','rejected') DEFAULT 'pending',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (request_id) REFERENCES help_requests(id),
FOREIGN KEY (helper_id) REFERENCES users(id)
);
CREATE TABLE IF NOT EXISTS negotiations (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
offer_id BIGINT NOT NULL,
sender_id BIGINT NOT NULL,
amount_chf DECIMAL(10,2) NOT NULL,
message TEXT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (offer_id) REFERENCES offers(id),
FOREIGN KEY (sender_id) REFERENCES users(id)
);
CREATE TABLE IF NOT EXISTS deals (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
request_id BIGINT NOT NULL,
offer_id BIGINT NOT NULL,
agreed_amount_chf DECIMAL(10,2) NOT NULL,
agreed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
completed_at TIMESTAMP NULL,
FOREIGN KEY (request_id) REFERENCES help_requests(id),
FOREIGN KEY (offer_id) REFERENCES offers(id)
);
CREATE TABLE IF NOT EXISTS contact_exchange_requests (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
deal_id BIGINT NOT NULL,
requester_id BIGINT NOT NULL,
target_id BIGINT NOT NULL,
accepted BOOLEAN DEFAULT FALSE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (deal_id) REFERENCES deals(id),
FOREIGN KEY (requester_id) REFERENCES users(id),
FOREIGN KEY (target_id) REFERENCES users(id)
);
CREATE TABLE IF NOT EXISTS addresses (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
user_id BIGINT NOT NULL,
address_encrypted TEXT NOT NULL,
postal_verified_at TIMESTAMP NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id)
);
CREATE TABLE IF NOT EXISTS address_change_requests (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
user_id BIGINT NOT NULL,
new_address_encrypted TEXT NOT NULL,
verification_code_hash VARCHAR(255) NOT NULL,
status ENUM('pending_letter','verified','expired','rejected') DEFAULT 'pending_letter',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
verified_at TIMESTAMP NULL,
FOREIGN KEY (user_id) REFERENCES users(id)
);
CREATE TABLE IF NOT EXISTS reviews (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
deal_id BIGINT NOT NULL,
reviewer_id BIGINT NOT NULL,
reviewee_id BIGINT NOT NULL,
rating TINYINT NOT NULL,
comment TEXT NULL,
earliest_prompt_at TIMESTAMP NOT NULL,
latest_prompt_at TIMESTAMP NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (deal_id) REFERENCES deals(id),
FOREIGN KEY (reviewer_id) REFERENCES users(id),
FOREIGN KEY (reviewee_id) REFERENCES users(id),
CHECK (rating BETWEEN 1 AND 5)
);

View file

@ -0,0 +1,17 @@
// Migration configuration
export default {
// Database connection settings
connection: {
host: process.env.DB_HOST || 'localhost',
port: process.env.DB_PORT || 3306,
user: process.env.DB_USER || 'root',
password: process.env.DB_PASSWORD || '',
database: process.env.DB_NAME || 'helpyourneighbour'
},
// Migration table name
tableName: 'migrations',
// Path to migration files
migrationsDir: './migrations'
};

View file

@ -0,0 +1,114 @@
import mysql from 'mysql2';
import fs from 'fs/promises';
import path from 'path';
import { fileURLToPath } from 'url';
import config from './config.js';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
// Create database connection
const connection = mysql.createConnection(config.connection);
// Ensure migrations table exists
function ensureMigrationsTable() {
const createTableQuery = `
CREATE TABLE IF NOT EXISTS ${config.tableName} (
id INT AUTO_INCREMENT PRIMARY KEY,
name VARCHAR(255) NOT NULL,
executed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
`;
return new Promise((resolve, reject) => {
connection.execute(createTableQuery, (err) => {
if (err) reject(err);
else resolve();
});
});
}
// Get list of already executed migrations
function getExecutedMigrations() {
const query = `SELECT name FROM ${config.tableName} ORDER BY executed_at`;
return new Promise((resolve, reject) => {
connection.execute(query, (err, results) => {
if (err) reject(err);
else resolve(results.map(row => row.name));
});
});
}
// Execute a migration file
function executeMigration(migrationName, sqlContent) {
return new Promise((resolve, reject) => {
connection.execute(sqlContent, (err) => {
if (err) reject(err);
else {
// Log the execution in migrations table
const logQuery = `INSERT INTO ${config.tableName} (name) VALUES (?)`;
connection.execute(logQuery, [migrationName], (logErr) => {
if (logErr) reject(logErr);
else resolve();
});
}
});
});
}
// Get all migration files
async function getMigrationFiles() {
try {
const files = await fs.readdir(config.migrationsDir);
return files
.filter(file => file.endsWith('.sql'))
.sort((a, b) => a.localeCompare(b));
} catch (err) {
if (err.code === 'ENOENT') {
// Directory doesn't exist, create it
await fs.mkdir(config.migrationsDir, { recursive: true });
return [];
}
throw err;
}
}
// Run migrations
async function runMigrations() {
try {
await ensureMigrationsTable();
const executed = await getExecutedMigrations();
const allMigrations = await getMigrationFiles();
const pending = allMigrations.filter(name => !executed.includes(name));
if (pending.length === 0) {
console.log('No pending migrations');
return;
}
console.log(`Running ${pending.length} migrations...`);
for (const migrationName of pending) {
console.log(`Executing ${migrationName}`);
const filePath = path.join(config.migrationsDir, migrationName);
const sqlContent = await fs.readFile(filePath, 'utf8');
await executeMigration(migrationName, sqlContent);
}
console.log('All migrations executed successfully');
} catch (err) {
console.error('Error running migrations:', err);
throw err;
} finally {
connection.end();
}
}
// Run the migrations
if (process.argv.includes('--run')) {
runMigrations().catch(console.error);
}
export { runMigrations };

View file

@ -10,6 +10,7 @@
"dev": "node --watch src/server.js",
"db:init": "node src/db/init.js",
"db:seed": "node src/db/seed.js",
"db:migrate": "node migrations/runner.js --run",
"test:smoke": "node scripts/smoke-test.mjs",
"test:integration": "node scripts/integration-test.mjs"
},

View file

@ -1,29 +1,18 @@
import fs from 'fs/promises';
import path from 'path';
import { fileURLToPath } from 'url';
import { runMigrations } from '../../migrations/runner.js';
import { pool } from './connection.js';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const run = async () => {
const schemaPath = path.resolve(__dirname, '../../sql/schema.sql');
const sql = await fs.readFile(schemaPath, 'utf8');
const statements = sql
.split(';')
.map((s) => s.trim())
.filter(Boolean);
try {
// Run migrations
await runMigrations();
for (const statement of statements) {
await pool.query(statement);
}
console.log(`Applied ${statements.length} SQL statements.`);
console.log('Database initialized with migrations.');
await pool.end();
};
run().catch(async (err) => {
} catch (err) {
console.error('Database init failed:', err.message);
await pool.end();
process.exit(1);
});
}
};
run();