@stonyx/orm 0.2.1-beta.2 → 0.2.1-beta.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/code-style-rules.md +44 -0
- package/.claude/hooks.md +250 -0
- package/.claude/index.md +279 -0
- package/.claude/usage-patterns.md +217 -0
- package/.github/workflows/publish.yml +17 -1
- package/README.md +424 -15
- package/config/environment.js +26 -5
- package/improvements.md +139 -0
- package/package.json +19 -8
- package/project-structure.md +343 -0
- package/src/commands.js +170 -0
- package/src/db.js +132 -6
- package/src/hooks.js +124 -0
- package/src/index.js +2 -1
- package/src/main.js +31 -2
- package/src/manage-record.js +19 -4
- package/src/migrate.js +72 -0
- package/src/mysql/connection.js +28 -0
- package/src/mysql/migration-generator.js +188 -0
- package/src/mysql/migration-runner.js +110 -0
- package/src/mysql/mysql-db.js +320 -0
- package/src/mysql/query-builder.js +64 -0
- package/src/mysql/schema-introspector.js +158 -0
- package/src/mysql/type-map.js +37 -0
- package/src/orm-request.js +306 -52
- package/src/record.js +35 -8
- package/src/serializer.js +2 -2
- package/src/setup-rest-server.js +4 -1
- package/src/utils.js +12 -0
- package/test-events-setup.js +41 -0
- package/test-hooks-manual.js +54 -0
- package/test-hooks-with-logging.js +52 -0
- package/.claude/project-structure.md +0 -578
- package/stonyx-bootstrap.cjs +0 -30
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
import { introspectModels, buildTableDDL, schemasToSnapshot, getTopologicalOrder } from './schema-introspector.js';
|
|
2
|
+
import { readFile, createFile, createDirectory, fileExists } from '@stonyx/utils/file';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
import config from 'stonyx/config';
|
|
5
|
+
import log from 'stonyx/log';
|
|
6
|
+
|
|
7
|
+
export async function generateMigration(description = 'migration') {
|
|
8
|
+
const { migrationsDir } = config.orm.mysql;
|
|
9
|
+
const rootPath = config.rootPath;
|
|
10
|
+
const migrationsPath = path.resolve(rootPath, migrationsDir);
|
|
11
|
+
|
|
12
|
+
await createDirectory(migrationsPath);
|
|
13
|
+
|
|
14
|
+
const schemas = introspectModels();
|
|
15
|
+
const currentSnapshot = schemasToSnapshot(schemas);
|
|
16
|
+
const previousSnapshot = await loadLatestSnapshot(migrationsPath);
|
|
17
|
+
const diff = diffSnapshots(previousSnapshot, currentSnapshot);
|
|
18
|
+
|
|
19
|
+
if (!diff.hasChanges) {
|
|
20
|
+
log.db('No schema changes detected.');
|
|
21
|
+
return null;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const upStatements = [];
|
|
25
|
+
const downStatements = [];
|
|
26
|
+
|
|
27
|
+
// New tables — in topological order (parents before children)
|
|
28
|
+
const allOrder = getTopologicalOrder(schemas);
|
|
29
|
+
const addedOrdered = allOrder.filter(name => diff.addedModels.includes(name));
|
|
30
|
+
|
|
31
|
+
for (const name of addedOrdered) {
|
|
32
|
+
upStatements.push(buildTableDDL(name, schemas[name], schemas) + ';');
|
|
33
|
+
downStatements.unshift(`DROP TABLE IF EXISTS \`${schemas[name].table}\`;`);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// Removed tables (warn only, commented out)
|
|
37
|
+
for (const name of diff.removedModels) {
|
|
38
|
+
upStatements.push(`-- WARNING: Model '${name}' was removed. Uncomment to drop table:`);
|
|
39
|
+
upStatements.push(`-- DROP TABLE IF EXISTS \`${previousSnapshot[name].table}\`;`);
|
|
40
|
+
downStatements.push(`-- Recreate table for removed model '${name}' manually if needed`);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Added columns
|
|
44
|
+
for (const { model, column, type } of diff.addedColumns) {
|
|
45
|
+
const table = currentSnapshot[model].table;
|
|
46
|
+
upStatements.push(`ALTER TABLE \`${table}\` ADD COLUMN \`${column}\` ${type};`);
|
|
47
|
+
downStatements.push(`ALTER TABLE \`${table}\` DROP COLUMN \`${column}\`;`);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Removed columns
|
|
51
|
+
for (const { model, column, type } of diff.removedColumns) {
|
|
52
|
+
const table = previousSnapshot[model].table;
|
|
53
|
+
upStatements.push(`ALTER TABLE \`${table}\` DROP COLUMN \`${column}\`;`);
|
|
54
|
+
downStatements.push(`ALTER TABLE \`${table}\` ADD COLUMN \`${column}\` ${type};`);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Changed column types
|
|
58
|
+
for (const { model, column, from, to } of diff.changedColumns) {
|
|
59
|
+
const table = currentSnapshot[model].table;
|
|
60
|
+
upStatements.push(`ALTER TABLE \`${table}\` MODIFY COLUMN \`${column}\` ${to};`);
|
|
61
|
+
downStatements.push(`ALTER TABLE \`${table}\` MODIFY COLUMN \`${column}\` ${from};`);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// Added foreign keys
|
|
65
|
+
for (const { model, column, references } of diff.addedForeignKeys) {
|
|
66
|
+
const table = currentSnapshot[model].table;
|
|
67
|
+
// Resolve FK column type from the referenced table's PK type
|
|
68
|
+
const refModel = Object.entries(currentSnapshot).find(([, s]) => s.table === references.references);
|
|
69
|
+
const fkType = refModel && refModel[1].idType === 'string' ? 'VARCHAR(255)' : 'INT';
|
|
70
|
+
upStatements.push(`ALTER TABLE \`${table}\` ADD COLUMN \`${column}\` ${fkType};`);
|
|
71
|
+
upStatements.push(`ALTER TABLE \`${table}\` ADD FOREIGN KEY (\`${column}\`) REFERENCES \`${references.references}\`(\`${references.column}\`) ON DELETE SET NULL;`);
|
|
72
|
+
downStatements.push(`ALTER TABLE \`${table}\` DROP FOREIGN KEY \`${column}\`;`);
|
|
73
|
+
downStatements.push(`ALTER TABLE \`${table}\` DROP COLUMN \`${column}\`;`);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// Removed foreign keys
|
|
77
|
+
for (const { model, column, references } of diff.removedForeignKeys) {
|
|
78
|
+
const table = previousSnapshot[model].table;
|
|
79
|
+
// Resolve FK column type from the referenced table's PK type in previous snapshot
|
|
80
|
+
const refModel = Object.entries(previousSnapshot).find(([, s]) => s.table === references.references);
|
|
81
|
+
const fkType = refModel && refModel[1].idType === 'string' ? 'VARCHAR(255)' : 'INT';
|
|
82
|
+
upStatements.push(`ALTER TABLE \`${table}\` DROP FOREIGN KEY \`${column}\`;`);
|
|
83
|
+
upStatements.push(`ALTER TABLE \`${table}\` DROP COLUMN \`${column}\`;`);
|
|
84
|
+
downStatements.push(`ALTER TABLE \`${table}\` ADD COLUMN \`${column}\` ${fkType};`);
|
|
85
|
+
downStatements.push(`ALTER TABLE \`${table}\` ADD FOREIGN KEY (\`${column}\`) REFERENCES \`${references.references}\`(\`${references.column}\`) ON DELETE SET NULL;`);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
const sanitizedDescription = description.replace(/\s+/g, '_').replace(/[^a-zA-Z0-9_]/g, '');
|
|
89
|
+
const timestamp = Math.floor(Date.now() / 1000);
|
|
90
|
+
const filename = `${timestamp}_${sanitizedDescription}.sql`;
|
|
91
|
+
const content = `-- UP\n${upStatements.join('\n')}\n\n-- DOWN\n${downStatements.join('\n')}\n`;
|
|
92
|
+
|
|
93
|
+
await createFile(path.join(migrationsPath, filename), content);
|
|
94
|
+
await createFile(path.join(migrationsPath, '.snapshot.json'), JSON.stringify(currentSnapshot, null, 2));
|
|
95
|
+
|
|
96
|
+
log.db(`Migration generated: ${filename}`);
|
|
97
|
+
|
|
98
|
+
return { filename, content, snapshot: currentSnapshot };
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
export async function loadLatestSnapshot(migrationsPath) {
|
|
102
|
+
const snapshotPath = path.join(migrationsPath, '.snapshot.json');
|
|
103
|
+
const exists = await fileExists(snapshotPath);
|
|
104
|
+
|
|
105
|
+
if (!exists) return {};
|
|
106
|
+
|
|
107
|
+
return readFile(snapshotPath, { json: true });
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
export function diffSnapshots(previous, current) {
|
|
111
|
+
const addedModels = [];
|
|
112
|
+
const removedModels = [];
|
|
113
|
+
const addedColumns = [];
|
|
114
|
+
const removedColumns = [];
|
|
115
|
+
const changedColumns = [];
|
|
116
|
+
const addedForeignKeys = [];
|
|
117
|
+
const removedForeignKeys = [];
|
|
118
|
+
|
|
119
|
+
// Find added models
|
|
120
|
+
for (const name of Object.keys(current)) {
|
|
121
|
+
if (!previous[name]) addedModels.push(name);
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// Find removed models
|
|
125
|
+
for (const name of Object.keys(previous)) {
|
|
126
|
+
if (!current[name]) removedModels.push(name);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// Find column changes in existing models
|
|
130
|
+
for (const name of Object.keys(current)) {
|
|
131
|
+
if (!previous[name]) continue;
|
|
132
|
+
|
|
133
|
+
const { columns: prevCols = {} } = previous[name];
|
|
134
|
+
const { columns: currCols = {} } = current[name];
|
|
135
|
+
|
|
136
|
+
// Added columns
|
|
137
|
+
for (const [col, type] of Object.entries(currCols)) {
|
|
138
|
+
if (!prevCols[col]) {
|
|
139
|
+
addedColumns.push({ model: name, column: col, type });
|
|
140
|
+
} else if (prevCols[col] !== type) {
|
|
141
|
+
changedColumns.push({ model: name, column: col, from: prevCols[col], to: type });
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
// Removed columns
|
|
146
|
+
for (const [col, type] of Object.entries(prevCols)) {
|
|
147
|
+
if (!currCols[col]) {
|
|
148
|
+
removedColumns.push({ model: name, column: col, type });
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// Foreign key changes
|
|
153
|
+
const prevFKs = previous[name].foreignKeys || {};
|
|
154
|
+
const currFKs = current[name].foreignKeys || {};
|
|
155
|
+
|
|
156
|
+
for (const [col, refs] of Object.entries(currFKs)) {
|
|
157
|
+
if (!prevFKs[col]) {
|
|
158
|
+
addedForeignKeys.push({ model: name, column: col, references: refs });
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
for (const [col, refs] of Object.entries(prevFKs)) {
|
|
163
|
+
if (!currFKs[col]) {
|
|
164
|
+
removedForeignKeys.push({ model: name, column: col, references: refs });
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
const hasChanges = addedModels.length > 0 || removedModels.length > 0 ||
|
|
170
|
+
addedColumns.length > 0 || removedColumns.length > 0 ||
|
|
171
|
+
changedColumns.length > 0 || addedForeignKeys.length > 0 || removedForeignKeys.length > 0;
|
|
172
|
+
|
|
173
|
+
return {
|
|
174
|
+
hasChanges,
|
|
175
|
+
addedModels,
|
|
176
|
+
removedModels,
|
|
177
|
+
addedColumns,
|
|
178
|
+
removedColumns,
|
|
179
|
+
changedColumns,
|
|
180
|
+
addedForeignKeys,
|
|
181
|
+
removedForeignKeys,
|
|
182
|
+
};
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
export function detectSchemaDrift(schemas, snapshot) {
|
|
186
|
+
const current = schemasToSnapshot(schemas);
|
|
187
|
+
return diffSnapshots(snapshot, current);
|
|
188
|
+
}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
import { readFile, fileExists } from '@stonyx/utils/file';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import fs from 'fs/promises';
|
|
4
|
+
|
|
5
|
+
export async function ensureMigrationsTable(pool, tableName = '__migrations') {
|
|
6
|
+
await pool.execute(`
|
|
7
|
+
CREATE TABLE IF NOT EXISTS \`${tableName}\` (
|
|
8
|
+
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
9
|
+
filename VARCHAR(255) NOT NULL UNIQUE,
|
|
10
|
+
applied_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
11
|
+
)
|
|
12
|
+
`);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export async function getAppliedMigrations(pool, tableName = '__migrations') {
|
|
16
|
+
const [rows] = await pool.execute(
|
|
17
|
+
`SELECT filename FROM \`${tableName}\` ORDER BY id ASC`
|
|
18
|
+
);
|
|
19
|
+
|
|
20
|
+
return rows.map(row => row.filename);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export async function getMigrationFiles(migrationsDir) {
|
|
24
|
+
const exists = await fileExists(migrationsDir);
|
|
25
|
+
if (!exists) return [];
|
|
26
|
+
|
|
27
|
+
const entries = await fs.readdir(migrationsDir);
|
|
28
|
+
|
|
29
|
+
return entries
|
|
30
|
+
.filter(f => f.endsWith('.sql'))
|
|
31
|
+
.sort();
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export function parseMigrationFile(content) {
|
|
35
|
+
const upMarker = '-- UP';
|
|
36
|
+
const downMarker = '-- DOWN';
|
|
37
|
+
const upIndex = content.indexOf(upMarker);
|
|
38
|
+
const downIndex = content.indexOf(downMarker);
|
|
39
|
+
|
|
40
|
+
if (upIndex === -1) {
|
|
41
|
+
return { up: content.trim(), down: '' };
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const upStart = upIndex + upMarker.length;
|
|
45
|
+
const upEnd = downIndex !== -1 ? downIndex : content.length;
|
|
46
|
+
const up = content.slice(upStart, upEnd).trim();
|
|
47
|
+
const down = downIndex !== -1 ? content.slice(downIndex + downMarker.length).trim() : '';
|
|
48
|
+
|
|
49
|
+
return { up, down };
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export async function applyMigration(pool, filename, upSql, tableName = '__migrations') {
|
|
53
|
+
const connection = await pool.getConnection();
|
|
54
|
+
|
|
55
|
+
try {
|
|
56
|
+
await connection.beginTransaction();
|
|
57
|
+
|
|
58
|
+
// Execute each statement separately (split on semicolons)
|
|
59
|
+
const statements = splitStatements(upSql);
|
|
60
|
+
|
|
61
|
+
for (const stmt of statements) {
|
|
62
|
+
await connection.execute(stmt);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
await connection.execute(
|
|
66
|
+
`INSERT INTO \`${tableName}\` (filename) VALUES (?)`,
|
|
67
|
+
[filename]
|
|
68
|
+
);
|
|
69
|
+
|
|
70
|
+
await connection.commit();
|
|
71
|
+
} catch (error) {
|
|
72
|
+
await connection.rollback();
|
|
73
|
+
throw error;
|
|
74
|
+
} finally {
|
|
75
|
+
connection.release();
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
export async function rollbackMigration(pool, filename, downSql, tableName = '__migrations') {
|
|
80
|
+
const connection = await pool.getConnection();
|
|
81
|
+
|
|
82
|
+
try {
|
|
83
|
+
await connection.beginTransaction();
|
|
84
|
+
|
|
85
|
+
const statements = splitStatements(downSql);
|
|
86
|
+
|
|
87
|
+
for (const stmt of statements) {
|
|
88
|
+
await connection.execute(stmt);
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
await connection.execute(
|
|
92
|
+
`DELETE FROM \`${tableName}\` WHERE filename = ?`,
|
|
93
|
+
[filename]
|
|
94
|
+
);
|
|
95
|
+
|
|
96
|
+
await connection.commit();
|
|
97
|
+
} catch (error) {
|
|
98
|
+
await connection.rollback();
|
|
99
|
+
throw error;
|
|
100
|
+
} finally {
|
|
101
|
+
connection.release();
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
function splitStatements(sql) {
|
|
106
|
+
return sql
|
|
107
|
+
.split(';')
|
|
108
|
+
.map(s => s.trim())
|
|
109
|
+
.filter(s => s.length > 0 && !s.startsWith('--'));
|
|
110
|
+
}
|
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
import { getPool, closePool } from './connection.js';
|
|
2
|
+
import { ensureMigrationsTable, getAppliedMigrations, getMigrationFiles, applyMigration, parseMigrationFile } from './migration-runner.js';
|
|
3
|
+
import { introspectModels, getTopologicalOrder, schemasToSnapshot } from './schema-introspector.js';
|
|
4
|
+
import { loadLatestSnapshot, detectSchemaDrift } from './migration-generator.js';
|
|
5
|
+
import { buildInsert, buildUpdate, buildDelete, buildSelect } from './query-builder.js';
|
|
6
|
+
import { createRecord, store } from '@stonyx/orm';
|
|
7
|
+
import { confirm } from '@stonyx/utils/prompt';
|
|
8
|
+
import { readFile } from '@stonyx/utils/file';
|
|
9
|
+
import { pluralize } from '../utils.js';
|
|
10
|
+
import config from 'stonyx/config';
|
|
11
|
+
import log from 'stonyx/log';
|
|
12
|
+
import path from 'path';
|
|
13
|
+
|
|
14
|
+
const defaultDeps = {
|
|
15
|
+
getPool, closePool, ensureMigrationsTable, getAppliedMigrations,
|
|
16
|
+
getMigrationFiles, applyMigration, parseMigrationFile,
|
|
17
|
+
introspectModels, getTopologicalOrder, schemasToSnapshot,
|
|
18
|
+
loadLatestSnapshot, detectSchemaDrift,
|
|
19
|
+
buildInsert, buildUpdate, buildDelete, buildSelect,
|
|
20
|
+
createRecord, store, confirm, readFile, pluralize, config, log, path
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
export default class MysqlDB {
|
|
24
|
+
constructor(deps = {}) {
|
|
25
|
+
if (MysqlDB.instance) return MysqlDB.instance;
|
|
26
|
+
MysqlDB.instance = this;
|
|
27
|
+
|
|
28
|
+
this.deps = { ...defaultDeps, ...deps };
|
|
29
|
+
this.pool = null;
|
|
30
|
+
this.mysqlConfig = this.deps.config.orm.mysql;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
async init() {
|
|
34
|
+
this.pool = await this.deps.getPool(this.mysqlConfig);
|
|
35
|
+
await this.deps.ensureMigrationsTable(this.pool, this.mysqlConfig.migrationsTable);
|
|
36
|
+
await this.loadAllRecords();
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
async startup() {
|
|
40
|
+
const migrationsPath = this.deps.path.resolve(this.deps.config.rootPath, this.mysqlConfig.migrationsDir);
|
|
41
|
+
|
|
42
|
+
// Check for pending migrations
|
|
43
|
+
const applied = await this.deps.getAppliedMigrations(this.pool, this.mysqlConfig.migrationsTable);
|
|
44
|
+
const files = await this.deps.getMigrationFiles(migrationsPath);
|
|
45
|
+
const pending = files.filter(f => !applied.includes(f));
|
|
46
|
+
|
|
47
|
+
if (pending.length > 0) {
|
|
48
|
+
this.deps.log.db(`${pending.length} pending migration(s) found.`);
|
|
49
|
+
|
|
50
|
+
const shouldApply = await this.deps.confirm(`${pending.length} pending migration(s) found. Apply now?`);
|
|
51
|
+
|
|
52
|
+
if (shouldApply) {
|
|
53
|
+
for (const filename of pending) {
|
|
54
|
+
const content = await this.deps.readFile(this.deps.path.join(migrationsPath, filename));
|
|
55
|
+
const { up } = this.deps.parseMigrationFile(content);
|
|
56
|
+
|
|
57
|
+
await this.deps.applyMigration(this.pool, filename, up, this.mysqlConfig.migrationsTable);
|
|
58
|
+
this.deps.log.db(`Applied migration: ${filename}`);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Reload records after applying migrations
|
|
62
|
+
await this.loadAllRecords();
|
|
63
|
+
} else {
|
|
64
|
+
this.deps.log.warn('Skipping pending migrations. Schema may be outdated.');
|
|
65
|
+
}
|
|
66
|
+
} else if (files.length === 0) {
|
|
67
|
+
const schemas = this.deps.introspectModels();
|
|
68
|
+
const modelCount = Object.keys(schemas).length;
|
|
69
|
+
|
|
70
|
+
if (modelCount > 0) {
|
|
71
|
+
const shouldGenerate = await this.deps.confirm(
|
|
72
|
+
`No migrations found but ${modelCount} model(s) detected. Generate and apply initial migration?`
|
|
73
|
+
);
|
|
74
|
+
|
|
75
|
+
if (shouldGenerate) {
|
|
76
|
+
const { generateMigration } = await import('./migration-generator.js');
|
|
77
|
+
const result = await generateMigration('initial_setup');
|
|
78
|
+
|
|
79
|
+
if (result) {
|
|
80
|
+
const { up } = this.deps.parseMigrationFile(result.content);
|
|
81
|
+
await this.deps.applyMigration(this.pool, result.filename, up, this.mysqlConfig.migrationsTable);
|
|
82
|
+
this.deps.log.db(`Applied migration: ${result.filename}`);
|
|
83
|
+
await this.loadAllRecords();
|
|
84
|
+
}
|
|
85
|
+
} else {
|
|
86
|
+
this.deps.log.warn('Skipping initial migration. Tables may not exist.');
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Check for schema drift
|
|
92
|
+
const schemas = this.deps.introspectModels();
|
|
93
|
+
const snapshot = await this.deps.loadLatestSnapshot(this.deps.path.resolve(this.deps.config.rootPath, this.mysqlConfig.migrationsDir));
|
|
94
|
+
|
|
95
|
+
if (Object.keys(snapshot).length > 0) {
|
|
96
|
+
const drift = this.deps.detectSchemaDrift(schemas, snapshot);
|
|
97
|
+
|
|
98
|
+
if (drift.hasChanges) {
|
|
99
|
+
this.deps.log.warn('Schema drift detected: models have changed since the last migration.');
|
|
100
|
+
this.deps.log.warn('Run `stonyx db:generate-migration` to create a new migration.');
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
async shutdown() {
|
|
106
|
+
await this.deps.closePool();
|
|
107
|
+
this.pool = null;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
async save() {
|
|
111
|
+
// No-op: MySQL persists data immediately via persist()
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
async loadAllRecords() {
|
|
115
|
+
const schemas = this.deps.introspectModels();
|
|
116
|
+
const order = this.deps.getTopologicalOrder(schemas);
|
|
117
|
+
|
|
118
|
+
for (const modelName of order) {
|
|
119
|
+
const schema = schemas[modelName];
|
|
120
|
+
const { sql, values } = this.deps.buildSelect(schema.table);
|
|
121
|
+
|
|
122
|
+
try {
|
|
123
|
+
const [rows] = await this.pool.execute(sql, values);
|
|
124
|
+
|
|
125
|
+
for (const row of rows) {
|
|
126
|
+
const rawData = this._rowToRawData(row, schema);
|
|
127
|
+
this.deps.createRecord(modelName, rawData, { isDbRecord: true, serialize: false, transform: false });
|
|
128
|
+
}
|
|
129
|
+
} catch (error) {
|
|
130
|
+
// Table may not exist yet (pre-migration) — skip gracefully
|
|
131
|
+
if (error.code === 'ER_NO_SUCH_TABLE') {
|
|
132
|
+
this.deps.log.db(`Table '${schema.table}' does not exist yet. Skipping load for '${modelName}'.`);
|
|
133
|
+
continue;
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
throw error;
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
_rowToRawData(row, schema) {
|
|
143
|
+
const rawData = { ...row };
|
|
144
|
+
|
|
145
|
+
for (const [col, mysqlType] of Object.entries(schema.columns)) {
|
|
146
|
+
if (rawData[col] == null) continue;
|
|
147
|
+
|
|
148
|
+
// Convert boolean columns from MySQL TINYINT(1) 0/1 to false/true
|
|
149
|
+
if (mysqlType === 'TINYINT(1)') {
|
|
150
|
+
rawData[col] = !!rawData[col];
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// Parse JSON columns back to JS values (custom transforms stored as JSON)
|
|
154
|
+
if (mysqlType === 'JSON' && typeof rawData[col] === 'string') {
|
|
155
|
+
try { rawData[col] = JSON.parse(rawData[col]); } catch { /* keep raw string */ }
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
// Map FK columns back to relationship keys
|
|
160
|
+
// e.g., owner_id → owner (the belongsTo handler expects the id value under the relationship key name)
|
|
161
|
+
for (const [fkCol, fkDef] of Object.entries(schema.foreignKeys)) {
|
|
162
|
+
const relName = fkCol.replace(/_id$/, '');
|
|
163
|
+
|
|
164
|
+
if (rawData[fkCol] !== undefined) {
|
|
165
|
+
rawData[relName] = rawData[fkCol];
|
|
166
|
+
delete rawData[fkCol];
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
// Remove timestamp columns — managed by MySQL
|
|
171
|
+
delete rawData.created_at;
|
|
172
|
+
delete rawData.updated_at;
|
|
173
|
+
|
|
174
|
+
return rawData;
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
async persist(operation, modelName, context, response) {
|
|
178
|
+
switch (operation) {
|
|
179
|
+
case 'create':
|
|
180
|
+
return this._persistCreate(modelName, context, response);
|
|
181
|
+
case 'update':
|
|
182
|
+
return this._persistUpdate(modelName, context, response);
|
|
183
|
+
case 'delete':
|
|
184
|
+
return this._persistDelete(modelName, context);
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
async _persistCreate(modelName, context, response) {
|
|
189
|
+
const schemas = this.deps.introspectModels();
|
|
190
|
+
const schema = schemas[modelName];
|
|
191
|
+
|
|
192
|
+
if (!schema) return;
|
|
193
|
+
|
|
194
|
+
const recordId = response?.data?.id;
|
|
195
|
+
const record = recordId != null ? this.deps.store.get(modelName, isNaN(recordId) ? recordId : parseInt(recordId)) : null;
|
|
196
|
+
|
|
197
|
+
if (!record) return;
|
|
198
|
+
|
|
199
|
+
const insertData = this._recordToRow(record, schema);
|
|
200
|
+
|
|
201
|
+
// For auto-increment models, remove the pending ID
|
|
202
|
+
const isPendingId = record.__data.__pendingMysqlId;
|
|
203
|
+
|
|
204
|
+
if (isPendingId) {
|
|
205
|
+
delete insertData.id;
|
|
206
|
+
} else if (insertData.id !== undefined) {
|
|
207
|
+
// Keep user-provided ID (string IDs or explicit numeric IDs)
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
const { sql, values } = this.deps.buildInsert(schema.table, insertData);
|
|
211
|
+
|
|
212
|
+
const [result] = await this.pool.execute(sql, values);
|
|
213
|
+
|
|
214
|
+
// Re-key the record in the store if MySQL generated the ID
|
|
215
|
+
if (isPendingId && result.insertId) {
|
|
216
|
+
const pendingId = record.id;
|
|
217
|
+
const realId = result.insertId;
|
|
218
|
+
const modelStore = this.deps.store.get(modelName);
|
|
219
|
+
|
|
220
|
+
modelStore.delete(pendingId);
|
|
221
|
+
record.__data.id = realId;
|
|
222
|
+
record.id = realId;
|
|
223
|
+
modelStore.set(realId, record);
|
|
224
|
+
|
|
225
|
+
// Update the response data with the real ID
|
|
226
|
+
if (response?.data) {
|
|
227
|
+
response.data.id = realId;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
delete record.__data.__pendingMysqlId;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
async _persistUpdate(modelName, context, response) {
|
|
235
|
+
const schemas = this.deps.introspectModels();
|
|
236
|
+
const schema = schemas[modelName];
|
|
237
|
+
|
|
238
|
+
if (!schema) return;
|
|
239
|
+
|
|
240
|
+
const record = context.record;
|
|
241
|
+
if (!record) return;
|
|
242
|
+
|
|
243
|
+
const id = record.id;
|
|
244
|
+
const oldState = context.oldState || {};
|
|
245
|
+
const currentData = record.__data;
|
|
246
|
+
|
|
247
|
+
// Build a diff of changed columns
|
|
248
|
+
const changedData = {};
|
|
249
|
+
|
|
250
|
+
for (const [col] of Object.entries(schema.columns)) {
|
|
251
|
+
if (currentData[col] !== oldState[col]) {
|
|
252
|
+
changedData[col] = currentData[col] ?? null;
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
// Check FK changes too
|
|
257
|
+
for (const fkCol of Object.keys(schema.foreignKeys)) {
|
|
258
|
+
const relName = fkCol.replace(/_id$/, '');
|
|
259
|
+
const currentFkValue = record.__relationships[relName]?.id ?? null;
|
|
260
|
+
const oldFkValue = oldState[relName] ?? null;
|
|
261
|
+
|
|
262
|
+
if (currentFkValue !== oldFkValue) {
|
|
263
|
+
changedData[fkCol] = currentFkValue;
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
if (Object.keys(changedData).length === 0) return;
|
|
268
|
+
|
|
269
|
+
const { sql, values } = this.deps.buildUpdate(schema.table, id, changedData);
|
|
270
|
+
await this.pool.execute(sql, values);
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
async _persistDelete(modelName, context) {
|
|
274
|
+
const schemas = this.deps.introspectModels();
|
|
275
|
+
const schema = schemas[modelName];
|
|
276
|
+
|
|
277
|
+
if (!schema) return;
|
|
278
|
+
|
|
279
|
+
const id = context.recordId;
|
|
280
|
+
if (id == null) return;
|
|
281
|
+
|
|
282
|
+
const { sql, values } = this.deps.buildDelete(schema.table, id);
|
|
283
|
+
await this.pool.execute(sql, values);
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
_recordToRow(record, schema) {
|
|
287
|
+
const row = {};
|
|
288
|
+
const data = record.__data;
|
|
289
|
+
|
|
290
|
+
// ID
|
|
291
|
+
if (data.id !== undefined) {
|
|
292
|
+
row.id = data.id;
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
// Attribute columns
|
|
296
|
+
for (const [col, mysqlType] of Object.entries(schema.columns)) {
|
|
297
|
+
if (data[col] !== undefined) {
|
|
298
|
+
// JSON columns: stringify non-string values for MySQL JSON storage
|
|
299
|
+
row[col] = mysqlType === 'JSON' && typeof data[col] !== 'string'
|
|
300
|
+
? JSON.stringify(data[col])
|
|
301
|
+
: data[col];
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
// FK columns from relationships
|
|
306
|
+
for (const fkCol of Object.keys(schema.foreignKeys)) {
|
|
307
|
+
const relName = fkCol.replace(/_id$/, '');
|
|
308
|
+
const related = record.__relationships[relName];
|
|
309
|
+
|
|
310
|
+
if (related) {
|
|
311
|
+
row[fkCol] = related.id;
|
|
312
|
+
} else if (data[relName] !== undefined) {
|
|
313
|
+
// Raw FK value (e.g., from create payload)
|
|
314
|
+
row[fkCol] = data[relName];
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
return row;
|
|
319
|
+
}
|
|
320
|
+
}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
const SAFE_IDENTIFIER = /^[a-zA-Z_][a-zA-Z0-9_-]*$/;
|
|
2
|
+
|
|
3
|
+
export function validateIdentifier(name, context = 'identifier') {
|
|
4
|
+
if (!name || typeof name !== 'string' || !SAFE_IDENTIFIER.test(name)) {
|
|
5
|
+
throw new Error(`Invalid SQL ${context}: "${name}". Identifiers must match ${SAFE_IDENTIFIER}`);
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
return name;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function buildInsert(table, data) {
|
|
12
|
+
validateIdentifier(table, 'table name');
|
|
13
|
+
|
|
14
|
+
const keys = Object.keys(data);
|
|
15
|
+
keys.forEach(k => validateIdentifier(k, 'column name'));
|
|
16
|
+
|
|
17
|
+
const placeholders = keys.map(() => '?');
|
|
18
|
+
const values = keys.map(k => data[k]);
|
|
19
|
+
|
|
20
|
+
const sql = `INSERT INTO \`${table}\` (${keys.map(k => `\`${k}\``).join(', ')}) VALUES (${placeholders.join(', ')})`;
|
|
21
|
+
|
|
22
|
+
return { sql, values };
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export function buildUpdate(table, id, data) {
|
|
26
|
+
validateIdentifier(table, 'table name');
|
|
27
|
+
|
|
28
|
+
const keys = Object.keys(data);
|
|
29
|
+
keys.forEach(k => validateIdentifier(k, 'column name'));
|
|
30
|
+
|
|
31
|
+
const setClauses = keys.map(k => `\`${k}\` = ?`);
|
|
32
|
+
const values = [...keys.map(k => data[k]), id];
|
|
33
|
+
|
|
34
|
+
const sql = `UPDATE \`${table}\` SET ${setClauses.join(', ')} WHERE \`id\` = ?`;
|
|
35
|
+
|
|
36
|
+
return { sql, values };
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
export function buildDelete(table, id) {
|
|
40
|
+
validateIdentifier(table, 'table name');
|
|
41
|
+
|
|
42
|
+
return {
|
|
43
|
+
sql: `DELETE FROM \`${table}\` WHERE \`id\` = ?`,
|
|
44
|
+
values: [id],
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export function buildSelect(table, conditions) {
|
|
49
|
+
validateIdentifier(table, 'table name');
|
|
50
|
+
|
|
51
|
+
if (!conditions || Object.keys(conditions).length === 0) {
|
|
52
|
+
return { sql: `SELECT * FROM \`${table}\``, values: [] };
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const keys = Object.keys(conditions);
|
|
56
|
+
keys.forEach(k => validateIdentifier(k, 'column name'));
|
|
57
|
+
|
|
58
|
+
const whereClauses = keys.map(k => `\`${k}\` = ?`);
|
|
59
|
+
const values = keys.map(k => conditions[k]);
|
|
60
|
+
|
|
61
|
+
const sql = `SELECT * FROM \`${table}\` WHERE ${whereClauses.join(' AND ')}`;
|
|
62
|
+
|
|
63
|
+
return { sql, values };
|
|
64
|
+
}
|