mpx-db 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +347 -0
- package/SUMMARY.md +182 -0
- package/bin/mpx-db.js +3 -0
- package/package.json +62 -0
- package/src/cli.js +141 -0
- package/src/commands/connections.js +79 -0
- package/src/commands/data.js +79 -0
- package/src/commands/migrate.js +318 -0
- package/src/commands/query.js +93 -0
- package/src/commands/schema.js +181 -0
- package/src/db/base-adapter.js +101 -0
- package/src/db/connection.js +46 -0
- package/src/db/mysql-adapter.js +144 -0
- package/src/db/postgres-adapter.js +150 -0
- package/src/db/sqlite-adapter.js +141 -0
- package/src/index.js +15 -0
- package/src/utils/config.js +109 -0
- package/src/utils/crypto.js +67 -0
package/src/cli.js
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import { Command } from 'commander';
|
|
2
|
+
import chalk from 'chalk';
|
|
3
|
+
import { handleConnect, listConnections, removeConnection } from './commands/connections.js';
|
|
4
|
+
import { handleQuery } from './commands/query.js';
|
|
5
|
+
import { showInfo, listTables, describeTable, dumpSchema } from './commands/schema.js';
|
|
6
|
+
import {
|
|
7
|
+
initMigrations,
|
|
8
|
+
createMigration,
|
|
9
|
+
showMigrationStatus,
|
|
10
|
+
runMigrations,
|
|
11
|
+
rollbackMigration
|
|
12
|
+
} from './commands/migrate.js';
|
|
13
|
+
import { exportData } from './commands/data.js';
|
|
14
|
+
|
|
15
|
+
const program = new Command();
|
|
16
|
+
|
|
17
|
+
program
|
|
18
|
+
.name('mpx-db')
|
|
19
|
+
.description('Database management CLI - Connect, query, migrate, and manage databases')
|
|
20
|
+
.version('1.0.0');
|
|
21
|
+
|
|
22
|
+
// Connect command
|
|
23
|
+
program
|
|
24
|
+
.command('connect')
|
|
25
|
+
.description('Test and optionally save a database connection')
|
|
26
|
+
.argument('<url>', 'Connection URL (sqlite://, postgres://, mysql://)')
|
|
27
|
+
.option('-s, --save <name>', 'Save connection with a name')
|
|
28
|
+
.action(handleConnect);
|
|
29
|
+
|
|
30
|
+
// Connections command
|
|
31
|
+
const connections = program
|
|
32
|
+
.command('connections')
|
|
33
|
+
.description('Manage saved connections');
|
|
34
|
+
|
|
35
|
+
connections
|
|
36
|
+
.command('list')
|
|
37
|
+
.description('List all saved connections')
|
|
38
|
+
.action(listConnections);
|
|
39
|
+
|
|
40
|
+
connections
|
|
41
|
+
.command('remove')
|
|
42
|
+
.description('Remove a saved connection')
|
|
43
|
+
.argument('<name>', 'Connection name')
|
|
44
|
+
.action(removeConnection);
|
|
45
|
+
|
|
46
|
+
// Query command
|
|
47
|
+
program
|
|
48
|
+
.command('query')
|
|
49
|
+
.description('Execute a SQL query')
|
|
50
|
+
.argument('<target>', 'Connection name or URL')
|
|
51
|
+
.argument('<sql>', 'SQL query to execute')
|
|
52
|
+
.action(handleQuery);
|
|
53
|
+
|
|
54
|
+
// Info command
|
|
55
|
+
program
|
|
56
|
+
.command('info')
|
|
57
|
+
.description('Show database information')
|
|
58
|
+
.argument('<target>', 'Connection name or URL')
|
|
59
|
+
.action(showInfo);
|
|
60
|
+
|
|
61
|
+
// Tables command
|
|
62
|
+
program
|
|
63
|
+
.command('tables')
|
|
64
|
+
.description('List all tables')
|
|
65
|
+
.argument('<target>', 'Connection name or URL')
|
|
66
|
+
.action(listTables);
|
|
67
|
+
|
|
68
|
+
// Describe command
|
|
69
|
+
program
|
|
70
|
+
.command('describe')
|
|
71
|
+
.description('Show table schema')
|
|
72
|
+
.argument('<target>', 'Connection name or URL')
|
|
73
|
+
.argument('<table>', 'Table name')
|
|
74
|
+
.action(describeTable);
|
|
75
|
+
|
|
76
|
+
// Schema commands
|
|
77
|
+
const schema = program
|
|
78
|
+
.command('schema')
|
|
79
|
+
.description('Schema operations');
|
|
80
|
+
|
|
81
|
+
schema
|
|
82
|
+
.command('dump')
|
|
83
|
+
.description('Dump database schema as SQL')
|
|
84
|
+
.argument('<target>', 'Connection name or URL')
|
|
85
|
+
.action(dumpSchema);
|
|
86
|
+
|
|
87
|
+
// Migration commands
|
|
88
|
+
const migrate = program
|
|
89
|
+
.command('migrate')
|
|
90
|
+
.description('Database migration commands');
|
|
91
|
+
|
|
92
|
+
migrate
|
|
93
|
+
.command('init')
|
|
94
|
+
.description('Initialize migrations directory')
|
|
95
|
+
.action(initMigrations);
|
|
96
|
+
|
|
97
|
+
migrate
|
|
98
|
+
.command('create')
|
|
99
|
+
.description('Create a new migration file')
|
|
100
|
+
.argument('<description>', 'Migration description')
|
|
101
|
+
.action(createMigration);
|
|
102
|
+
|
|
103
|
+
migrate
|
|
104
|
+
.command('status')
|
|
105
|
+
.description('Show migration status')
|
|
106
|
+
.argument('<target>', 'Connection name or URL')
|
|
107
|
+
.action(showMigrationStatus);
|
|
108
|
+
|
|
109
|
+
migrate
|
|
110
|
+
.command('up')
|
|
111
|
+
.description('Run pending migrations')
|
|
112
|
+
.argument('<target>', 'Connection name or URL')
|
|
113
|
+
.action(runMigrations);
|
|
114
|
+
|
|
115
|
+
migrate
|
|
116
|
+
.command('down')
|
|
117
|
+
.description('Rollback last migration')
|
|
118
|
+
.argument('<target>', 'Connection name or URL')
|
|
119
|
+
.action(rollbackMigration);
|
|
120
|
+
|
|
121
|
+
// Export command
|
|
122
|
+
program
|
|
123
|
+
.command('export')
|
|
124
|
+
.description('Export table data')
|
|
125
|
+
.argument('<target>', 'Connection name or URL')
|
|
126
|
+
.argument('<table>', 'Table name')
|
|
127
|
+
.option('-f, --format <format>', 'Output format (json, csv)', 'json')
|
|
128
|
+
.option('-o, --output <file>', 'Output file path')
|
|
129
|
+
.action(exportData);
|
|
130
|
+
|
|
131
|
+
// Error handling
|
|
132
|
+
program.exitOverride();
|
|
133
|
+
|
|
134
|
+
try {
|
|
135
|
+
await program.parseAsync(process.argv);
|
|
136
|
+
} catch (err) {
|
|
137
|
+
if (err.code !== 'commander.help' && err.code !== 'commander.helpDisplayed') {
|
|
138
|
+
console.error(chalk.red(`Error: ${err.message}`));
|
|
139
|
+
process.exit(1);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import chalk from 'chalk';
|
|
2
|
+
import Table from 'cli-table3';
|
|
3
|
+
import { saveConnection, loadConnections, deleteConnection } from '../utils/config.js';
|
|
4
|
+
import { createConnection } from '../db/connection.js';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Handle connect command
|
|
8
|
+
*/
|
|
9
|
+
export async function handleConnect(url, options) {
|
|
10
|
+
try {
|
|
11
|
+
// Test connection
|
|
12
|
+
console.log(chalk.gray('Testing connection...'));
|
|
13
|
+
const db = await createConnection(url);
|
|
14
|
+
const info = await db.getInfo();
|
|
15
|
+
await db.disconnect();
|
|
16
|
+
|
|
17
|
+
console.log(chalk.green('✓ Connection successful'));
|
|
18
|
+
console.log(chalk.gray(` Type: ${info.type}`));
|
|
19
|
+
if (info.database) {
|
|
20
|
+
console.log(chalk.gray(` Database: ${info.database}`));
|
|
21
|
+
}
|
|
22
|
+
if (info.path) {
|
|
23
|
+
console.log(chalk.gray(` Path: ${info.path}`));
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
// Save if requested
|
|
27
|
+
if (options.save) {
|
|
28
|
+
saveConnection(options.save, url);
|
|
29
|
+
console.log(chalk.green(`✓ Saved connection as "${options.save}"`));
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
} catch (err) {
|
|
33
|
+
console.error(chalk.red('✗ Connection failed'));
|
|
34
|
+
console.error(chalk.red(` ${err.message}`));
|
|
35
|
+
process.exit(1);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* List saved connections
|
|
41
|
+
*/
|
|
42
|
+
export async function listConnections() {
|
|
43
|
+
const connections = loadConnections();
|
|
44
|
+
|
|
45
|
+
if (Object.keys(connections).length === 0) {
|
|
46
|
+
console.log(chalk.yellow('No saved connections'));
|
|
47
|
+
console.log(chalk.gray('\nSave a connection with:'));
|
|
48
|
+
console.log(chalk.gray(' mpx-db connect --save <name> <url>'));
|
|
49
|
+
return;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
const table = new Table({
|
|
53
|
+
head: ['Name', 'Type', 'Created'].map(h => chalk.cyan(h)),
|
|
54
|
+
style: { head: [], border: ['gray'] }
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
for (const [name, conn] of Object.entries(connections)) {
|
|
58
|
+
table.push([
|
|
59
|
+
chalk.white(name),
|
|
60
|
+
chalk.gray(conn.type),
|
|
61
|
+
chalk.gray(new Date(conn.createdAt).toLocaleDateString())
|
|
62
|
+
]);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
console.log(table.toString());
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Delete a saved connection
|
|
70
|
+
*/
|
|
71
|
+
export async function removeConnection(name) {
|
|
72
|
+
const deleted = deleteConnection(name);
|
|
73
|
+
|
|
74
|
+
if (deleted) {
|
|
75
|
+
console.log(chalk.green(`✓ Deleted connection "${name}"`));
|
|
76
|
+
} else {
|
|
77
|
+
console.log(chalk.yellow(`Connection "${name}" not found`));
|
|
78
|
+
}
|
|
79
|
+
}
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import chalk from 'chalk';
|
|
3
|
+
import { createConnection } from '../db/connection.js';
|
|
4
|
+
import { resolveConnection } from './query.js';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Export table data to CSV or JSON
|
|
8
|
+
*/
|
|
9
|
+
export async function exportData(target, tableName, options) {
|
|
10
|
+
let db;
|
|
11
|
+
|
|
12
|
+
try {
|
|
13
|
+
const connectionString = await resolveConnection(target);
|
|
14
|
+
db = await createConnection(connectionString);
|
|
15
|
+
|
|
16
|
+
// Query all data
|
|
17
|
+
const rows = await db.query(`SELECT * FROM ${tableName}`);
|
|
18
|
+
|
|
19
|
+
if (rows.length === 0) {
|
|
20
|
+
console.log(chalk.yellow('No data to export'));
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const format = options.format || 'json';
|
|
25
|
+
const output = options.output || `${tableName}.${format}`;
|
|
26
|
+
|
|
27
|
+
let content;
|
|
28
|
+
|
|
29
|
+
if (format === 'json') {
|
|
30
|
+
content = JSON.stringify(rows, null, 2);
|
|
31
|
+
} else if (format === 'csv') {
|
|
32
|
+
content = convertToCSV(rows);
|
|
33
|
+
} else {
|
|
34
|
+
throw new Error(`Unsupported format: ${format}`);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
fs.writeFileSync(output, content);
|
|
38
|
+
|
|
39
|
+
console.log(chalk.green(`✓ Exported ${rows.length} rows to ${output}`));
|
|
40
|
+
|
|
41
|
+
} catch (err) {
|
|
42
|
+
console.error(chalk.red(`✗ Export failed: ${err.message}`));
|
|
43
|
+
process.exit(1);
|
|
44
|
+
} finally {
|
|
45
|
+
if (db) {
|
|
46
|
+
await db.disconnect();
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Convert rows to CSV
|
|
53
|
+
*/
|
|
54
|
+
function convertToCSV(rows) {
|
|
55
|
+
if (rows.length === 0) return '';
|
|
56
|
+
|
|
57
|
+
const columns = Object.keys(rows[0]);
|
|
58
|
+
const header = columns.map(escapeCSV).join(',');
|
|
59
|
+
|
|
60
|
+
const lines = rows.map(row => {
|
|
61
|
+
return columns.map(col => {
|
|
62
|
+
const val = row[col];
|
|
63
|
+
if (val === null) return '';
|
|
64
|
+
return escapeCSV(String(val));
|
|
65
|
+
}).join(',');
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
return [header, ...lines].join('\n');
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Escape CSV field
|
|
73
|
+
*/
|
|
74
|
+
function escapeCSV(field) {
|
|
75
|
+
if (field.includes(',') || field.includes('"') || field.includes('\n')) {
|
|
76
|
+
return `"${field.replace(/"/g, '""')}"`;
|
|
77
|
+
}
|
|
78
|
+
return field;
|
|
79
|
+
}
|
|
@@ -0,0 +1,318 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
import Table from 'cli-table3';
|
|
5
|
+
import { createConnection } from '../db/connection.js';
|
|
6
|
+
import { resolveConnection } from './query.js';
|
|
7
|
+
|
|
8
|
+
const MIGRATIONS_DIR = './migrations';
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Initialize migrations directory
|
|
12
|
+
*/
|
|
13
|
+
export async function initMigrations() {
|
|
14
|
+
if (fs.existsSync(MIGRATIONS_DIR)) {
|
|
15
|
+
console.log(chalk.yellow('Migrations directory already exists'));
|
|
16
|
+
return;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
fs.mkdirSync(MIGRATIONS_DIR, { recursive: true });
|
|
20
|
+
|
|
21
|
+
// Create README
|
|
22
|
+
const readme = `# Database Migrations
|
|
23
|
+
|
|
24
|
+
This directory contains database migration files.
|
|
25
|
+
|
|
26
|
+
## File naming convention
|
|
27
|
+
|
|
28
|
+
Migrations are named: \`YYYYMMDD_HHMMSS_description.sql\`
|
|
29
|
+
|
|
30
|
+
Example: \`20260215_143022_create_users_table.sql\`
|
|
31
|
+
|
|
32
|
+
## Migration format
|
|
33
|
+
|
|
34
|
+
Each migration file should contain SQL statements:
|
|
35
|
+
|
|
36
|
+
\`\`\`sql
|
|
37
|
+
-- Up migration
|
|
38
|
+
CREATE TABLE users (
|
|
39
|
+
id INTEGER PRIMARY KEY,
|
|
40
|
+
name TEXT NOT NULL,
|
|
41
|
+
email TEXT UNIQUE NOT NULL
|
|
42
|
+
);
|
|
43
|
+
|
|
44
|
+
-- Down migration (optional, after -- DOWN marker)
|
|
45
|
+
-- DOWN
|
|
46
|
+
DROP TABLE users;
|
|
47
|
+
\`\`\`
|
|
48
|
+
|
|
49
|
+
## Commands
|
|
50
|
+
|
|
51
|
+
- \`mpx-db migrate create <description>\` - Create new migration
|
|
52
|
+
- \`mpx-db migrate status <connection>\` - Show migration status
|
|
53
|
+
- \`mpx-db migrate up <connection>\` - Run pending migrations
|
|
54
|
+
- \`mpx-db migrate down <connection>\` - Rollback last migration
|
|
55
|
+
`;
|
|
56
|
+
|
|
57
|
+
fs.writeFileSync(path.join(MIGRATIONS_DIR, 'README.md'), readme);
|
|
58
|
+
|
|
59
|
+
console.log(chalk.green('✓ Migrations directory created'));
|
|
60
|
+
console.log(chalk.gray(` ${MIGRATIONS_DIR}/`));
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Create new migration file
|
|
65
|
+
*/
|
|
66
|
+
export async function createMigration(description) {
|
|
67
|
+
if (!fs.existsSync(MIGRATIONS_DIR)) {
|
|
68
|
+
console.log(chalk.yellow('Migrations directory not found. Run: mpx-db migrate init'));
|
|
69
|
+
return;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
// Generate timestamp-based filename
|
|
73
|
+
const timestamp = new Date()
|
|
74
|
+
.toISOString()
|
|
75
|
+
.replace(/[-:]/g, '')
|
|
76
|
+
.replace(/\..+/, '')
|
|
77
|
+
.replace('T', '_');
|
|
78
|
+
|
|
79
|
+
const slug = description
|
|
80
|
+
.toLowerCase()
|
|
81
|
+
.replace(/[^a-z0-9]+/g, '_')
|
|
82
|
+
.replace(/^_|_$/g, '');
|
|
83
|
+
|
|
84
|
+
const filename = `${timestamp}_${slug}.sql`;
|
|
85
|
+
const filepath = path.join(MIGRATIONS_DIR, filename);
|
|
86
|
+
|
|
87
|
+
const template = `-- Migration: ${description}
|
|
88
|
+
-- Created: ${new Date().toISOString()}
|
|
89
|
+
|
|
90
|
+
-- Up migration
|
|
91
|
+
-- Write your SQL here
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
-- Down migration (rollback)
|
|
95
|
+
-- DOWN
|
|
96
|
+
|
|
97
|
+
`;
|
|
98
|
+
|
|
99
|
+
fs.writeFileSync(filepath, template);
|
|
100
|
+
|
|
101
|
+
console.log(chalk.green('✓ Created migration'));
|
|
102
|
+
console.log(chalk.gray(` ${filepath}`));
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
/**
|
|
106
|
+
* Get all migration files
|
|
107
|
+
*/
|
|
108
|
+
function getMigrationFiles() {
|
|
109
|
+
if (!fs.existsSync(MIGRATIONS_DIR)) {
|
|
110
|
+
return [];
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
return fs.readdirSync(MIGRATIONS_DIR)
|
|
114
|
+
.filter(f => f.endsWith('.sql'))
|
|
115
|
+
.sort();
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
/**
|
|
119
|
+
* Parse migration file (split up/down)
|
|
120
|
+
*/
|
|
121
|
+
function parseMigration(filepath) {
|
|
122
|
+
const content = fs.readFileSync(filepath, 'utf8');
|
|
123
|
+
const parts = content.split(/^--\s*DOWN\s*$/m);
|
|
124
|
+
|
|
125
|
+
return {
|
|
126
|
+
up: parts[0].trim(),
|
|
127
|
+
down: parts[1]?.trim() || null
|
|
128
|
+
};
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
/**
|
|
132
|
+
* Show migration status
|
|
133
|
+
*/
|
|
134
|
+
export async function showMigrationStatus(target) {
|
|
135
|
+
let db;
|
|
136
|
+
|
|
137
|
+
try {
|
|
138
|
+
const connectionString = await resolveConnection(target);
|
|
139
|
+
db = await createConnection(connectionString);
|
|
140
|
+
|
|
141
|
+
// Ensure migrations table exists
|
|
142
|
+
await db.ensureMigrationsTable();
|
|
143
|
+
|
|
144
|
+
// Get applied migrations
|
|
145
|
+
const applied = await db.getAppliedMigrations();
|
|
146
|
+
const appliedNames = new Set(applied.map(m => m.name));
|
|
147
|
+
|
|
148
|
+
// Get migration files
|
|
149
|
+
const files = getMigrationFiles();
|
|
150
|
+
|
|
151
|
+
if (files.length === 0) {
|
|
152
|
+
console.log(chalk.yellow('No migration files found'));
|
|
153
|
+
console.log(chalk.gray('Create one with: mpx-db migrate create <description>'));
|
|
154
|
+
return;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
const table = new Table({
|
|
158
|
+
head: ['Migration', 'Status', 'Applied'].map(h => chalk.cyan(h)),
|
|
159
|
+
style: { head: [], border: ['gray'] }
|
|
160
|
+
});
|
|
161
|
+
|
|
162
|
+
for (const file of files) {
|
|
163
|
+
const name = file.replace('.sql', '');
|
|
164
|
+
const isApplied = appliedNames.has(name);
|
|
165
|
+
const appliedRecord = applied.find(m => m.name === name);
|
|
166
|
+
|
|
167
|
+
table.push([
|
|
168
|
+
chalk.white(name),
|
|
169
|
+
isApplied ? chalk.green('Applied') : chalk.yellow('Pending'),
|
|
170
|
+
appliedRecord ? chalk.gray(new Date(appliedRecord.applied_at).toLocaleString()) : chalk.gray('-')
|
|
171
|
+
]);
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
console.log(table.toString());
|
|
175
|
+
|
|
176
|
+
const pending = files.filter(f => !appliedNames.has(f.replace('.sql', '')));
|
|
177
|
+
console.log(chalk.gray(`\n${applied.length} applied, ${pending.length} pending`));
|
|
178
|
+
|
|
179
|
+
} catch (err) {
|
|
180
|
+
console.error(chalk.red(`✗ ${err.message}`));
|
|
181
|
+
process.exit(1);
|
|
182
|
+
} finally {
|
|
183
|
+
if (db) {
|
|
184
|
+
await db.disconnect();
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
/**
|
|
190
|
+
* Run pending migrations
|
|
191
|
+
*/
|
|
192
|
+
export async function runMigrations(target) {
|
|
193
|
+
let db;
|
|
194
|
+
|
|
195
|
+
try {
|
|
196
|
+
const connectionString = await resolveConnection(target);
|
|
197
|
+
db = await createConnection(connectionString);
|
|
198
|
+
|
|
199
|
+
await db.ensureMigrationsTable();
|
|
200
|
+
|
|
201
|
+
const applied = await db.getAppliedMigrations();
|
|
202
|
+
const appliedNames = new Set(applied.map(m => m.name));
|
|
203
|
+
|
|
204
|
+
const files = getMigrationFiles();
|
|
205
|
+
const pending = files.filter(f => !appliedNames.has(f.replace('.sql', '')));
|
|
206
|
+
|
|
207
|
+
if (pending.length === 0) {
|
|
208
|
+
console.log(chalk.green('✓ All migrations up to date'));
|
|
209
|
+
return;
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
console.log(chalk.cyan(`Running ${pending.length} migration(s)...\n`));
|
|
213
|
+
|
|
214
|
+
for (const file of pending) {
|
|
215
|
+
const name = file.replace('.sql', '');
|
|
216
|
+
const filepath = path.join(MIGRATIONS_DIR, file);
|
|
217
|
+
const migration = parseMigration(filepath);
|
|
218
|
+
|
|
219
|
+
console.log(chalk.gray(`→ ${name}`));
|
|
220
|
+
|
|
221
|
+
// Execute migration (split by semicolon for multiple statements)
|
|
222
|
+
// Remove comment lines first, then split
|
|
223
|
+
const cleanedSQL = migration.up
|
|
224
|
+
.split('\n')
|
|
225
|
+
.filter(line => !line.trim().startsWith('--'))
|
|
226
|
+
.join('\n');
|
|
227
|
+
|
|
228
|
+
const statements = cleanedSQL
|
|
229
|
+
.split(';')
|
|
230
|
+
.map(s => s.trim())
|
|
231
|
+
.filter(s => s.length > 0);
|
|
232
|
+
|
|
233
|
+
for (const statement of statements) {
|
|
234
|
+
await db.execute(statement);
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
await db.recordMigration(name);
|
|
238
|
+
|
|
239
|
+
console.log(chalk.green(` ✓ Applied`));
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
console.log(chalk.green(`\n✓ ${pending.length} migration(s) applied`));
|
|
243
|
+
|
|
244
|
+
} catch (err) {
|
|
245
|
+
console.error(chalk.red(`\n✗ Migration failed: ${err.message}`));
|
|
246
|
+
process.exit(1);
|
|
247
|
+
} finally {
|
|
248
|
+
if (db) {
|
|
249
|
+
await db.disconnect();
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
/**
|
|
255
|
+
* Rollback last migration
|
|
256
|
+
*/
|
|
257
|
+
export async function rollbackMigration(target) {
|
|
258
|
+
let db;
|
|
259
|
+
|
|
260
|
+
try {
|
|
261
|
+
const connectionString = await resolveConnection(target);
|
|
262
|
+
db = await createConnection(connectionString);
|
|
263
|
+
|
|
264
|
+
await db.ensureMigrationsTable();
|
|
265
|
+
|
|
266
|
+
const applied = await db.getAppliedMigrations();
|
|
267
|
+
|
|
268
|
+
if (applied.length === 0) {
|
|
269
|
+
console.log(chalk.yellow('No migrations to rollback'));
|
|
270
|
+
return;
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
const last = applied[applied.length - 1];
|
|
274
|
+
const filepath = path.join(MIGRATIONS_DIR, `${last.name}.sql`);
|
|
275
|
+
|
|
276
|
+
if (!fs.existsSync(filepath)) {
|
|
277
|
+
console.error(chalk.red(`✗ Migration file not found: ${filepath}`));
|
|
278
|
+
process.exit(1);
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
const migration = parseMigration(filepath);
|
|
282
|
+
|
|
283
|
+
if (!migration.down) {
|
|
284
|
+
console.error(chalk.red(`✗ No down migration found in ${last.name}`));
|
|
285
|
+
process.exit(1);
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
console.log(chalk.cyan(`Rolling back: ${last.name}`));
|
|
289
|
+
|
|
290
|
+
// Execute rollback (split by semicolon for multiple statements)
|
|
291
|
+
// Remove comment lines first, then split
|
|
292
|
+
const cleanedSQL = migration.down
|
|
293
|
+
.split('\n')
|
|
294
|
+
.filter(line => !line.trim().startsWith('--'))
|
|
295
|
+
.join('\n');
|
|
296
|
+
|
|
297
|
+
const statements = cleanedSQL
|
|
298
|
+
.split(';')
|
|
299
|
+
.map(s => s.trim())
|
|
300
|
+
.filter(s => s.length > 0);
|
|
301
|
+
|
|
302
|
+
for (const statement of statements) {
|
|
303
|
+
await db.execute(statement);
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
await db.removeMigration(last.name);
|
|
307
|
+
|
|
308
|
+
console.log(chalk.green('✓ Rolled back'));
|
|
309
|
+
|
|
310
|
+
} catch (err) {
|
|
311
|
+
console.error(chalk.red(`✗ Rollback failed: ${err.message}`));
|
|
312
|
+
process.exit(1);
|
|
313
|
+
} finally {
|
|
314
|
+
if (db) {
|
|
315
|
+
await db.disconnect();
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
}
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import chalk from 'chalk';
|
|
2
|
+
import Table from 'cli-table3';
|
|
3
|
+
import { getConnection } from '../utils/config.js';
|
|
4
|
+
import { createConnection } from '../db/connection.js';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Execute a query
|
|
8
|
+
*/
|
|
9
|
+
export async function handleQuery(target, sql, options) {
|
|
10
|
+
let db;
|
|
11
|
+
|
|
12
|
+
try {
|
|
13
|
+
// Get connection string
|
|
14
|
+
const connectionString = await resolveConnection(target);
|
|
15
|
+
|
|
16
|
+
// Connect
|
|
17
|
+
db = await createConnection(connectionString);
|
|
18
|
+
|
|
19
|
+
// Execute query
|
|
20
|
+
const startTime = Date.now();
|
|
21
|
+
const rows = await db.query(sql);
|
|
22
|
+
const duration = Date.now() - startTime;
|
|
23
|
+
|
|
24
|
+
// Display results
|
|
25
|
+
if (rows.length === 0) {
|
|
26
|
+
console.log(chalk.yellow('No rows returned'));
|
|
27
|
+
} else {
|
|
28
|
+
displayTable(rows);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
console.log(chalk.gray(`\n${rows.length} row(s) in ${duration}ms`));
|
|
32
|
+
|
|
33
|
+
} catch (err) {
|
|
34
|
+
console.error(chalk.red('✗ Query failed'));
|
|
35
|
+
console.error(chalk.red(` ${err.message}`));
|
|
36
|
+
process.exit(1);
|
|
37
|
+
} finally {
|
|
38
|
+
if (db) {
|
|
39
|
+
await db.disconnect();
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Display query results as table
|
|
46
|
+
*/
|
|
47
|
+
function displayTable(rows) {
|
|
48
|
+
if (rows.length === 0) return;
|
|
49
|
+
|
|
50
|
+
const columns = Object.keys(rows[0]);
|
|
51
|
+
|
|
52
|
+
const table = new Table({
|
|
53
|
+
head: columns.map(c => chalk.cyan(c)),
|
|
54
|
+
style: { head: [], border: ['gray'] }
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
for (const row of rows) {
|
|
58
|
+
table.push(
|
|
59
|
+
columns.map(col => {
|
|
60
|
+
const val = row[col];
|
|
61
|
+
if (val === null) return chalk.gray('NULL');
|
|
62
|
+
if (typeof val === 'number') return chalk.yellow(val.toString());
|
|
63
|
+
return val.toString();
|
|
64
|
+
})
|
|
65
|
+
);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
console.log(table.toString());
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Resolve connection string from target (name or URL)
|
|
73
|
+
*/
|
|
74
|
+
async function resolveConnection(target) {
|
|
75
|
+
// Check if it's a saved connection name
|
|
76
|
+
const saved = getConnection(target);
|
|
77
|
+
if (saved) {
|
|
78
|
+
return saved.url;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// Check if it looks like a connection URL
|
|
82
|
+
if (target.includes('://')) {
|
|
83
|
+
return target;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
throw new Error(
|
|
87
|
+
`Connection "${target}" not found.\n` +
|
|
88
|
+
` Use a connection URL or save a connection with:\n` +
|
|
89
|
+
` mpx-db connect --save ${target} <url>`
|
|
90
|
+
);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
export { resolveConnection };
|