3 changed files with 181 additions and 3 deletions
@ -0,0 +1,126 @@
|
||||
/** |
||||
* Database Migration Script |
||||
* Executes SQL migration files in order |
||||
*/ |
||||
|
||||
import { readFileSync, readdirSync } from 'fs'; |
||||
import { join } from 'path'; |
||||
import pg from 'pg'; |
||||
|
||||
const { Pool } = pg; |
||||
|
||||
// Database configuration
|
||||
const dbConfig = { |
||||
host: process.env.DB_HOST || 'localhost', |
||||
port: parseInt(process.env.DB_PORT || '5433', 10), |
||||
database: process.env.DB_NAME || 'clinic_tests', |
||||
user: process.env.DB_USER || 'developer', |
||||
password: process.env.DB_PASSWORD || 'dev_password', |
||||
}; |
||||
|
||||
const MIGRATIONS_DIR = join(process.cwd(), 'src', 'db', 'migrations'); |
||||
|
||||
/** |
||||
* Get list of migration files sorted by name |
||||
*/ |
||||
function getMigrationFiles() { |
||||
const files = readdirSync(MIGRATIONS_DIR) |
||||
.filter((file) => file.endsWith('.sql')) |
||||
.sort(); |
||||
|
||||
return files; |
||||
} |
||||
|
||||
/** |
||||
* Create migrations tracking table if not exists |
||||
*/ |
||||
async function ensureMigrationsTable(pool) { |
||||
await pool.query(` |
||||
CREATE TABLE IF NOT EXISTS migrations ( |
||||
id SERIAL PRIMARY KEY, |
||||
name VARCHAR(255) NOT NULL UNIQUE, |
||||
executed_at TIMESTAMP DEFAULT NOW() |
||||
) |
||||
`);
|
||||
} |
||||
|
||||
/** |
||||
* Get list of already executed migrations |
||||
*/ |
||||
async function getExecutedMigrations(pool) { |
||||
const result = await pool.query('SELECT name FROM migrations ORDER BY name'); |
||||
return result.rows.map((row) => row.name); |
||||
} |
||||
|
||||
/** |
||||
* Execute a single migration file |
||||
*/ |
||||
async function executeMigration(pool, filename) { |
||||
const filePath = join(MIGRATIONS_DIR, filename); |
||||
const sql = readFileSync(filePath, 'utf-8'); |
||||
|
||||
console.log(`Executing migration: ${filename}`); |
||||
|
||||
await pool.query('BEGIN'); |
||||
try { |
||||
await pool.query(sql); |
||||
await pool.query( |
||||
'INSERT INTO migrations (name) VALUES ($1)', |
||||
[filename] |
||||
); |
||||
await pool.query('COMMIT'); |
||||
console.log(`✓ Migration ${filename} completed successfully`); |
||||
} catch (error) { |
||||
await pool.query('ROLLBACK'); |
||||
console.error(`✗ Migration ${filename} failed:`, error.message); |
||||
throw error; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Main migration function |
||||
*/ |
||||
async function migrate() { |
||||
const pool = new Pool(dbConfig); |
||||
|
||||
try { |
||||
console.log('Connecting to database...'); |
||||
await pool.connect(); |
||||
console.log('Connected to database\n'); |
||||
|
||||
// Ensure migrations table exists
|
||||
await ensureMigrationsTable(pool); |
||||
|
||||
// Get migration files and already executed migrations
|
||||
const migrationFiles = getMigrationFiles(); |
||||
const executedMigrations = await getExecutedMigrations(pool); |
||||
|
||||
console.log(`Found ${migrationFiles.length} migration file(s)`); |
||||
console.log(`Already executed: ${executedMigrations.length} migration(s)\n`); |
||||
|
||||
// Execute pending migrations
|
||||
const pendingMigrations = migrationFiles.filter( |
||||
(file) => !executedMigrations.includes(file) |
||||
); |
||||
|
||||
if (pendingMigrations.length === 0) { |
||||
console.log('All migrations already executed.'); |
||||
} else { |
||||
console.log(`Pending migrations: ${pendingMigrations.length}\n`); |
||||
|
||||
for (const filename of pendingMigrations) { |
||||
await executeMigration(pool, filename); |
||||
} |
||||
|
||||
console.log(`\n✓ Successfully executed ${pendingMigrations.length} migration(s)`); |
||||
} |
||||
} catch (error) { |
||||
console.error('\n✗ Migration failed:', error.message); |
||||
process.exit(1); |
||||
} finally { |
||||
await pool.end(); |
||||
} |
||||
} |
||||
|
||||
// Run migrations if this script is executed directly
|
||||
migrate(); |
||||
@ -0,0 +1,52 @@
|
||||
import express from 'express'; |
||||
import cors from 'cors'; |
||||
import cookieParser from 'cookie-parser'; |
||||
import dotenv from 'dotenv'; |
||||
|
||||
import authRoutes from './routes/auth.js'; |
||||
|
||||
dotenv.config(); |
||||
|
||||
const app = express(); |
||||
const PORT = process.env.PORT || 3001; |
||||
|
||||
// Middleware
|
||||
app.use(cors({ |
||||
origin: process.env.NODE_ENV === 'production' |
||||
? process.env.FRONTEND_URL |
||||
: ['http://localhost:5173', 'http://localhost:3000'], |
||||
credentials: true, |
||||
})); |
||||
app.use(express.json()); |
||||
app.use(cookieParser()); |
||||
|
||||
// Mount auth routes
|
||||
app.use('/api/auth', authRoutes); |
||||
|
||||
// Health check route
|
||||
app.get('/api/health', (req, res) => { |
||||
res.json({ |
||||
status: 'ok', |
||||
timestamp: new Date().toISOString(), |
||||
message: 'Server is running', |
||||
}); |
||||
}); |
||||
|
||||
// Error handling middleware
|
||||
app.use((err, req, res, _next) => { |
||||
console.error('Error:', err); |
||||
res.status(err.status || 500).json({ |
||||
error: err.message || 'Internal Server Error', |
||||
}); |
||||
}); |
||||
|
||||
// 404 handler
|
||||
app.use((req, res) => { |
||||
res.status(404).json({ error: 'Not found' }); |
||||
}); |
||||
|
||||
// Start server
|
||||
app.listen(PORT, () => { |
||||
console.log(`Server is running on port ${PORT}`); |
||||
console.log(`Environment: ${process.env.NODE_ENV || 'development'}`); |
||||
}); |
||||
Loading…
Reference in new issue