@stonyx/orm 0.2.1-alpha.2 → 0.2.1-alpha.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/code-style-rules.md +44 -0
- package/.claude/hooks.md +250 -0
- package/.claude/index.md +292 -0
- package/.claude/usage-patterns.md +300 -0
- package/.claude/views.md +292 -0
- package/.github/workflows/ci.yml +5 -25
- package/.github/workflows/publish.yml +24 -116
- package/README.md +461 -15
- package/config/environment.js +29 -6
- package/improvements.md +139 -0
- package/package.json +24 -8
- package/project-structure.md +343 -0
- package/scripts/setup-test-db.sh +21 -0
- package/src/aggregates.js +93 -0
- package/src/belongs-to.js +4 -1
- package/src/commands.js +170 -0
- package/src/db.js +132 -6
- package/src/has-many.js +4 -1
- package/src/hooks.js +124 -0
- package/src/index.js +12 -2
- package/src/main.js +77 -4
- package/src/manage-record.js +30 -4
- package/src/migrate.js +72 -0
- package/src/model-property.js +2 -2
- package/src/model.js +11 -0
- package/src/mysql/connection.js +28 -0
- package/src/mysql/migration-generator.js +286 -0
- package/src/mysql/migration-runner.js +110 -0
- package/src/mysql/mysql-db.js +473 -0
- package/src/mysql/query-builder.js +64 -0
- package/src/mysql/schema-introspector.js +325 -0
- package/src/mysql/type-map.js +37 -0
- package/src/orm-request.js +313 -53
- package/src/plural-registry.js +12 -0
- package/src/record.js +35 -8
- package/src/serializer.js +9 -2
- package/src/setup-rest-server.js +5 -2
- package/src/store.js +130 -1
- package/src/utils.js +1 -1
- package/src/view-resolver.js +183 -0
- package/src/view.js +21 -0
- package/test-events-setup.js +41 -0
- package/test-hooks-manual.js +54 -0
- package/test-hooks-with-logging.js +52 -0
- package/.claude/project-structure.md +0 -578
- package/stonyx-bootstrap.cjs +0 -30
package/src/manage-record.js
CHANGED
|
@@ -14,6 +14,11 @@ export function createRecord(modelName, rawData={}, userOptions={}) {
|
|
|
14
14
|
|
|
15
15
|
if (!initialized && !options.isDbRecord) throw new Error('ORM is not ready');
|
|
16
16
|
|
|
17
|
+
// Guard: read-only views cannot have records created directly
|
|
18
|
+
if (orm?.isView?.(modelName) && !options.isDbRecord) {
|
|
19
|
+
throw new Error(`Cannot create records for read-only view '${modelName}'`);
|
|
20
|
+
}
|
|
21
|
+
|
|
17
22
|
const modelStore = store.get(modelName);
|
|
18
23
|
const globalRelationships = relationships.get('global');
|
|
19
24
|
const pendingRelationships = relationships.get('pending');
|
|
@@ -83,6 +88,12 @@ export function createRecord(modelName, rawData={}, userOptions={}) {
|
|
|
83
88
|
export function updateRecord(record, rawData, userOptions={}) {
|
|
84
89
|
if (!rawData) throw new Error('rawData must be passed in to updateRecord call');
|
|
85
90
|
|
|
91
|
+
// Guard: read-only views cannot be updated
|
|
92
|
+
const modelName = record?.__model?.__name;
|
|
93
|
+
if (modelName && Orm.instance?.isView?.(modelName)) {
|
|
94
|
+
throw new Error(`Cannot update records for read-only view '${modelName}'`);
|
|
95
|
+
}
|
|
96
|
+
|
|
86
97
|
const options = { ...defaultOptions, ...userOptions, update:true };
|
|
87
98
|
|
|
88
99
|
record.serialize(rawData, options);
|
|
@@ -90,14 +101,29 @@ export function updateRecord(record, rawData, userOptions={}) {
|
|
|
90
101
|
|
|
91
102
|
/**
|
|
92
103
|
* gets the next available id based on last record entry.
|
|
93
|
-
*
|
|
94
|
-
*
|
|
95
|
-
*
|
|
96
|
-
* record is created
|
|
104
|
+
*
|
|
105
|
+
* In MySQL mode with numeric IDs, assigns a temporary pending ID.
|
|
106
|
+
* MySQL's AUTO_INCREMENT provides the real ID after INSERT.
|
|
97
107
|
*/
|
|
98
108
|
function assignRecordId(modelName, rawData) {
|
|
99
109
|
if (rawData.id) return;
|
|
100
110
|
|
|
111
|
+
// In MySQL mode with numeric IDs, defer to MySQL auto-increment
|
|
112
|
+
if (Orm.instance?.mysqlDb && !isStringIdModel(modelName)) {
|
|
113
|
+
rawData.id = `__pending_${Date.now()}_${Math.random()}`;
|
|
114
|
+
rawData.__pendingMysqlId = true;
|
|
115
|
+
return;
|
|
116
|
+
}
|
|
117
|
+
|
|
101
118
|
const modelStore = Array.from(store.get(modelName).values());
|
|
102
119
|
rawData.id = modelStore.length ? modelStore.at(-1).id + 1 : 1;
|
|
103
120
|
}
|
|
121
|
+
|
|
122
|
+
function isStringIdModel(modelName) {
|
|
123
|
+
const { modelClass } = Orm.instance.getRecordClasses(modelName);
|
|
124
|
+
if (!modelClass) return false;
|
|
125
|
+
|
|
126
|
+
const model = new modelClass(modelName);
|
|
127
|
+
|
|
128
|
+
return model.id?.type === 'string';
|
|
129
|
+
}
|
package/src/migrate.js
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import config from 'stonyx/config';
|
|
2
|
+
import Orm from '@stonyx/orm';
|
|
3
|
+
import { createFile, createDirectory, readFile, updateFile, deleteDirectory } from '@stonyx/utils/file';
|
|
4
|
+
import { dbKey } from './db.js';
|
|
5
|
+
import path from 'path';
|
|
6
|
+
|
|
7
|
+
function getCollectionKeys() {
|
|
8
|
+
const SchemaClass = Orm.instance.models[`${dbKey}Model`];
|
|
9
|
+
const instance = new SchemaClass();
|
|
10
|
+
const keys = [];
|
|
11
|
+
|
|
12
|
+
for (const key of Object.keys(instance)) {
|
|
13
|
+
if (key === '__name' || key === 'id') continue;
|
|
14
|
+
if (typeof instance[key] === 'function') keys.push(key);
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
return keys;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function getDirPath() {
|
|
21
|
+
const { rootPath } = config;
|
|
22
|
+
const { file, directory } = config.orm.db;
|
|
23
|
+
const dbDir = path.dirname(path.resolve(`${rootPath}/${file}`));
|
|
24
|
+
|
|
25
|
+
return path.join(dbDir, directory);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export async function fileToDirectory() {
|
|
29
|
+
const { rootPath } = config;
|
|
30
|
+
const { file } = config.orm.db;
|
|
31
|
+
const dbFilePath = path.resolve(`${rootPath}/${file}`);
|
|
32
|
+
const collectionKeys = getCollectionKeys();
|
|
33
|
+
const dirPath = getDirPath();
|
|
34
|
+
|
|
35
|
+
// Read full data from db.json
|
|
36
|
+
const data = await readFile(dbFilePath, { json: true });
|
|
37
|
+
|
|
38
|
+
// Create directory and write each collection
|
|
39
|
+
await createDirectory(dirPath);
|
|
40
|
+
|
|
41
|
+
await Promise.all(collectionKeys.map(key =>
|
|
42
|
+
createFile(path.join(dirPath, `${key}.json`), data[key] || [], { json: true })
|
|
43
|
+
));
|
|
44
|
+
|
|
45
|
+
// Overwrite db.json with empty-array skeleton
|
|
46
|
+
const skeleton = {};
|
|
47
|
+
for (const key of collectionKeys) skeleton[key] = [];
|
|
48
|
+
|
|
49
|
+
await updateFile(dbFilePath, skeleton, { json: true });
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export async function directoryToFile() {
|
|
53
|
+
const { rootPath } = config;
|
|
54
|
+
const { file } = config.orm.db;
|
|
55
|
+
const dbFilePath = path.resolve(`${rootPath}/${file}`);
|
|
56
|
+
const collectionKeys = getCollectionKeys();
|
|
57
|
+
const dirPath = getDirPath();
|
|
58
|
+
|
|
59
|
+
// Read each collection from the directory
|
|
60
|
+
const assembled = {};
|
|
61
|
+
|
|
62
|
+
await Promise.all(collectionKeys.map(async key => {
|
|
63
|
+
const filePath = path.join(dirPath, `${key}.json`);
|
|
64
|
+
assembled[key] = await readFile(filePath, { json: true });
|
|
65
|
+
}));
|
|
66
|
+
|
|
67
|
+
// Overwrite db.json with full assembled data
|
|
68
|
+
await updateFile(dbFilePath, assembled, { json: true });
|
|
69
|
+
|
|
70
|
+
// Remove the directory
|
|
71
|
+
await deleteDirectory(dirPath);
|
|
72
|
+
}
|
package/src/model-property.js
CHANGED
|
@@ -22,8 +22,8 @@ export default class ModelProperty {
|
|
|
22
22
|
return this._value = newValue;
|
|
23
23
|
}
|
|
24
24
|
|
|
25
|
-
if (newValue === undefined
|
|
25
|
+
if (newValue === undefined) return;
|
|
26
26
|
|
|
27
|
-
this._value = Orm.instance.transforms[this.type](newValue);
|
|
27
|
+
this._value = newValue === null ? null : Orm.instance.transforms[this.type](newValue);
|
|
28
28
|
}
|
|
29
29
|
}
|
package/src/model.js
CHANGED
|
@@ -1,6 +1,17 @@
|
|
|
1
1
|
import { attr } from '@stonyx/orm';
|
|
2
2
|
|
|
3
3
|
export default class Model {
|
|
4
|
+
/**
|
|
5
|
+
* Controls whether records of this model are loaded into memory on startup.
|
|
6
|
+
*
|
|
7
|
+
* - true → loaded on boot, kept in store (default for backward compatibility)
|
|
8
|
+
* - false → never cached; find() always queries MySQL
|
|
9
|
+
*
|
|
10
|
+
* Override in subclass: static memory = false;
|
|
11
|
+
*/
|
|
12
|
+
static memory = true;
|
|
13
|
+
static pluralName = undefined;
|
|
14
|
+
|
|
4
15
|
id = attr('number');
|
|
5
16
|
|
|
6
17
|
constructor(name) {
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
let pool = null;
|
|
2
|
+
|
|
3
|
+
export async function getPool(mysqlConfig) {
|
|
4
|
+
if (pool) return pool;
|
|
5
|
+
|
|
6
|
+
const mysql = await import('mysql2/promise');
|
|
7
|
+
|
|
8
|
+
pool = mysql.createPool({
|
|
9
|
+
host: mysqlConfig.host,
|
|
10
|
+
port: mysqlConfig.port,
|
|
11
|
+
user: mysqlConfig.user,
|
|
12
|
+
password: mysqlConfig.password,
|
|
13
|
+
database: mysqlConfig.database,
|
|
14
|
+
connectionLimit: mysqlConfig.connectionLimit,
|
|
15
|
+
waitForConnections: true,
|
|
16
|
+
enableKeepAlive: true,
|
|
17
|
+
keepAliveInitialDelay: 10000,
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
return pool;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export async function closePool() {
|
|
24
|
+
if (!pool) return;
|
|
25
|
+
|
|
26
|
+
await pool.end();
|
|
27
|
+
pool = null;
|
|
28
|
+
}
|
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
import { introspectModels, introspectViews, buildTableDDL, buildViewDDL, schemasToSnapshot, viewSchemasToSnapshot, getTopologicalOrder } from './schema-introspector.js';
|
|
2
|
+
import { readFile, createFile, createDirectory, fileExists } from '@stonyx/utils/file';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
import config from 'stonyx/config';
|
|
5
|
+
import log from 'stonyx/log';
|
|
6
|
+
|
|
7
|
+
export async function generateMigration(description = 'migration') {
|
|
8
|
+
const { migrationsDir } = config.orm.mysql;
|
|
9
|
+
const rootPath = config.rootPath;
|
|
10
|
+
const migrationsPath = path.resolve(rootPath, migrationsDir);
|
|
11
|
+
|
|
12
|
+
await createDirectory(migrationsPath);
|
|
13
|
+
|
|
14
|
+
const schemas = introspectModels();
|
|
15
|
+
const currentSnapshot = schemasToSnapshot(schemas);
|
|
16
|
+
const previousSnapshot = await loadLatestSnapshot(migrationsPath);
|
|
17
|
+
const diff = diffSnapshots(previousSnapshot, currentSnapshot);
|
|
18
|
+
|
|
19
|
+
// Don't return early — check view changes too before deciding
|
|
20
|
+
if (!diff.hasChanges) {
|
|
21
|
+
// Check if there are view changes before returning null
|
|
22
|
+
const viewSchemasPrelim = introspectViews();
|
|
23
|
+
const currentViewSnapshotPrelim = viewSchemasToSnapshot(viewSchemasPrelim);
|
|
24
|
+
const previousViewSnapshotPrelim = extractViewsFromSnapshot(previousSnapshot);
|
|
25
|
+
const viewDiffPrelim = diffViewSnapshots(previousViewSnapshotPrelim, currentViewSnapshotPrelim);
|
|
26
|
+
|
|
27
|
+
if (!viewDiffPrelim.hasChanges) {
|
|
28
|
+
log.db('No schema changes detected.');
|
|
29
|
+
return null;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const upStatements = [];
|
|
34
|
+
const downStatements = [];
|
|
35
|
+
|
|
36
|
+
// New tables — in topological order (parents before children)
|
|
37
|
+
const allOrder = getTopologicalOrder(schemas);
|
|
38
|
+
const addedOrdered = allOrder.filter(name => diff.addedModels.includes(name));
|
|
39
|
+
|
|
40
|
+
for (const name of addedOrdered) {
|
|
41
|
+
upStatements.push(buildTableDDL(name, schemas[name], schemas) + ';');
|
|
42
|
+
downStatements.unshift(`DROP TABLE IF EXISTS \`${schemas[name].table}\`;`);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// Removed tables (warn only, commented out)
|
|
46
|
+
for (const name of diff.removedModels) {
|
|
47
|
+
upStatements.push(`-- WARNING: Model '${name}' was removed. Uncomment to drop table:`);
|
|
48
|
+
upStatements.push(`-- DROP TABLE IF EXISTS \`${previousSnapshot[name].table}\`;`);
|
|
49
|
+
downStatements.push(`-- Recreate table for removed model '${name}' manually if needed`);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Added columns
|
|
53
|
+
for (const { model, column, type } of diff.addedColumns) {
|
|
54
|
+
const table = currentSnapshot[model].table;
|
|
55
|
+
upStatements.push(`ALTER TABLE \`${table}\` ADD COLUMN \`${column}\` ${type};`);
|
|
56
|
+
downStatements.push(`ALTER TABLE \`${table}\` DROP COLUMN \`${column}\`;`);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Removed columns
|
|
60
|
+
for (const { model, column, type } of diff.removedColumns) {
|
|
61
|
+
const table = previousSnapshot[model].table;
|
|
62
|
+
upStatements.push(`ALTER TABLE \`${table}\` DROP COLUMN \`${column}\`;`);
|
|
63
|
+
downStatements.push(`ALTER TABLE \`${table}\` ADD COLUMN \`${column}\` ${type};`);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// Changed column types
|
|
67
|
+
for (const { model, column, from, to } of diff.changedColumns) {
|
|
68
|
+
const table = currentSnapshot[model].table;
|
|
69
|
+
upStatements.push(`ALTER TABLE \`${table}\` MODIFY COLUMN \`${column}\` ${to};`);
|
|
70
|
+
downStatements.push(`ALTER TABLE \`${table}\` MODIFY COLUMN \`${column}\` ${from};`);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Added foreign keys
|
|
74
|
+
for (const { model, column, references } of diff.addedForeignKeys) {
|
|
75
|
+
const table = currentSnapshot[model].table;
|
|
76
|
+
// Resolve FK column type from the referenced table's PK type
|
|
77
|
+
const refModel = Object.entries(currentSnapshot).find(([, s]) => s.table === references.references);
|
|
78
|
+
const fkType = refModel && refModel[1].idType === 'string' ? 'VARCHAR(255)' : 'INT';
|
|
79
|
+
upStatements.push(`ALTER TABLE \`${table}\` ADD COLUMN \`${column}\` ${fkType};`);
|
|
80
|
+
upStatements.push(`ALTER TABLE \`${table}\` ADD FOREIGN KEY (\`${column}\`) REFERENCES \`${references.references}\`(\`${references.column}\`) ON DELETE SET NULL;`);
|
|
81
|
+
downStatements.push(`ALTER TABLE \`${table}\` DROP FOREIGN KEY \`${column}\`;`);
|
|
82
|
+
downStatements.push(`ALTER TABLE \`${table}\` DROP COLUMN \`${column}\`;`);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// Removed foreign keys
|
|
86
|
+
for (const { model, column, references } of diff.removedForeignKeys) {
|
|
87
|
+
const table = previousSnapshot[model].table;
|
|
88
|
+
// Resolve FK column type from the referenced table's PK type in previous snapshot
|
|
89
|
+
const refModel = Object.entries(previousSnapshot).find(([, s]) => s.table === references.references);
|
|
90
|
+
const fkType = refModel && refModel[1].idType === 'string' ? 'VARCHAR(255)' : 'INT';
|
|
91
|
+
upStatements.push(`ALTER TABLE \`${table}\` DROP FOREIGN KEY \`${column}\`;`);
|
|
92
|
+
upStatements.push(`ALTER TABLE \`${table}\` DROP COLUMN \`${column}\`;`);
|
|
93
|
+
downStatements.push(`ALTER TABLE \`${table}\` ADD COLUMN \`${column}\` ${fkType};`);
|
|
94
|
+
downStatements.push(`ALTER TABLE \`${table}\` ADD FOREIGN KEY (\`${column}\`) REFERENCES \`${references.references}\`(\`${references.column}\`) ON DELETE SET NULL;`);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// View migrations — views are created AFTER tables (dependency order)
|
|
98
|
+
const viewSchemas = introspectViews();
|
|
99
|
+
const currentViewSnapshot = viewSchemasToSnapshot(viewSchemas);
|
|
100
|
+
const previousViewSnapshot = extractViewsFromSnapshot(previousSnapshot);
|
|
101
|
+
const viewDiff = diffViewSnapshots(previousViewSnapshot, currentViewSnapshot);
|
|
102
|
+
|
|
103
|
+
if (viewDiff.hasChanges) {
|
|
104
|
+
upStatements.push('');
|
|
105
|
+
upStatements.push('-- Views');
|
|
106
|
+
downStatements.push('');
|
|
107
|
+
downStatements.push('-- Views');
|
|
108
|
+
|
|
109
|
+
// Added views
|
|
110
|
+
for (const name of viewDiff.addedViews) {
|
|
111
|
+
try {
|
|
112
|
+
const ddl = buildViewDDL(name, viewSchemas[name], schemas);
|
|
113
|
+
upStatements.push(ddl + ';');
|
|
114
|
+
downStatements.unshift(`DROP VIEW IF EXISTS \`${viewSchemas[name].viewName}\`;`);
|
|
115
|
+
} catch (error) {
|
|
116
|
+
upStatements.push(`-- WARNING: Could not generate DDL for view '${name}': ${error.message}`);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// Removed views
|
|
121
|
+
for (const name of viewDiff.removedViews) {
|
|
122
|
+
upStatements.push(`-- WARNING: View '${name}' was removed. Uncomment to drop view:`);
|
|
123
|
+
upStatements.push(`-- DROP VIEW IF EXISTS \`${previousViewSnapshot[name].viewName}\`;`);
|
|
124
|
+
downStatements.push(`-- Recreate view for removed view '${name}' manually if needed`);
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// Changed views (source or aggregates changed)
|
|
128
|
+
for (const name of viewDiff.changedViews) {
|
|
129
|
+
try {
|
|
130
|
+
const ddl = buildViewDDL(name, viewSchemas[name], schemas);
|
|
131
|
+
upStatements.push(ddl + ';');
|
|
132
|
+
} catch (error) {
|
|
133
|
+
upStatements.push(`-- WARNING: Could not generate DDL for changed view '${name}': ${error.message}`);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const combinedHasChanges = diff.hasChanges || viewDiff.hasChanges;
|
|
139
|
+
|
|
140
|
+
if (!combinedHasChanges) {
|
|
141
|
+
log.db('No schema changes detected.');
|
|
142
|
+
return null;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
// Merge view snapshot into the main snapshot
|
|
146
|
+
const combinedSnapshot = { ...currentSnapshot };
|
|
147
|
+
for (const [name, viewSnap] of Object.entries(currentViewSnapshot)) {
|
|
148
|
+
combinedSnapshot[name] = viewSnap;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
const sanitizedDescription = description.replace(/\s+/g, '_').replace(/[^a-zA-Z0-9_]/g, '');
|
|
152
|
+
const timestamp = Math.floor(Date.now() / 1000);
|
|
153
|
+
const filename = `${timestamp}_${sanitizedDescription}.sql`;
|
|
154
|
+
const content = `-- UP\n${upStatements.join('\n')}\n\n-- DOWN\n${downStatements.join('\n')}\n`;
|
|
155
|
+
|
|
156
|
+
await createFile(path.join(migrationsPath, filename), content);
|
|
157
|
+
await createFile(path.join(migrationsPath, '.snapshot.json'), JSON.stringify(combinedSnapshot, null, 2));
|
|
158
|
+
|
|
159
|
+
log.db(`Migration generated: ${filename}`);
|
|
160
|
+
|
|
161
|
+
return { filename, content, snapshot: combinedSnapshot };
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
export async function loadLatestSnapshot(migrationsPath) {
|
|
165
|
+
const snapshotPath = path.join(migrationsPath, '.snapshot.json');
|
|
166
|
+
const exists = await fileExists(snapshotPath);
|
|
167
|
+
|
|
168
|
+
if (!exists) return {};
|
|
169
|
+
|
|
170
|
+
return readFile(snapshotPath, { json: true });
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
export function diffSnapshots(previous, current) {
|
|
174
|
+
const addedModels = [];
|
|
175
|
+
const removedModels = [];
|
|
176
|
+
const addedColumns = [];
|
|
177
|
+
const removedColumns = [];
|
|
178
|
+
const changedColumns = [];
|
|
179
|
+
const addedForeignKeys = [];
|
|
180
|
+
const removedForeignKeys = [];
|
|
181
|
+
|
|
182
|
+
// Find added models
|
|
183
|
+
for (const name of Object.keys(current)) {
|
|
184
|
+
if (!previous[name]) addedModels.push(name);
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
// Find removed models
|
|
188
|
+
for (const name of Object.keys(previous)) {
|
|
189
|
+
if (!current[name]) removedModels.push(name);
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
// Find column changes in existing models
|
|
193
|
+
for (const name of Object.keys(current)) {
|
|
194
|
+
if (!previous[name]) continue;
|
|
195
|
+
|
|
196
|
+
const { columns: prevCols = {} } = previous[name];
|
|
197
|
+
const { columns: currCols = {} } = current[name];
|
|
198
|
+
|
|
199
|
+
// Added columns
|
|
200
|
+
for (const [col, type] of Object.entries(currCols)) {
|
|
201
|
+
if (!prevCols[col]) {
|
|
202
|
+
addedColumns.push({ model: name, column: col, type });
|
|
203
|
+
} else if (prevCols[col] !== type) {
|
|
204
|
+
changedColumns.push({ model: name, column: col, from: prevCols[col], to: type });
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
// Removed columns
|
|
209
|
+
for (const [col, type] of Object.entries(prevCols)) {
|
|
210
|
+
if (!currCols[col]) {
|
|
211
|
+
removedColumns.push({ model: name, column: col, type });
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
// Foreign key changes
|
|
216
|
+
const prevFKs = previous[name].foreignKeys || {};
|
|
217
|
+
const currFKs = current[name].foreignKeys || {};
|
|
218
|
+
|
|
219
|
+
for (const [col, refs] of Object.entries(currFKs)) {
|
|
220
|
+
if (!prevFKs[col]) {
|
|
221
|
+
addedForeignKeys.push({ model: name, column: col, references: refs });
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
for (const [col, refs] of Object.entries(prevFKs)) {
|
|
226
|
+
if (!currFKs[col]) {
|
|
227
|
+
removedForeignKeys.push({ model: name, column: col, references: refs });
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
const hasChanges = addedModels.length > 0 || removedModels.length > 0 ||
|
|
233
|
+
addedColumns.length > 0 || removedColumns.length > 0 ||
|
|
234
|
+
changedColumns.length > 0 || addedForeignKeys.length > 0 || removedForeignKeys.length > 0;
|
|
235
|
+
|
|
236
|
+
return {
|
|
237
|
+
hasChanges,
|
|
238
|
+
addedModels,
|
|
239
|
+
removedModels,
|
|
240
|
+
addedColumns,
|
|
241
|
+
removedColumns,
|
|
242
|
+
changedColumns,
|
|
243
|
+
addedForeignKeys,
|
|
244
|
+
removedForeignKeys,
|
|
245
|
+
};
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
export function detectSchemaDrift(schemas, snapshot) {
|
|
249
|
+
const current = schemasToSnapshot(schemas);
|
|
250
|
+
return diffSnapshots(snapshot, current);
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
export function extractViewsFromSnapshot(snapshot) {
|
|
254
|
+
const views = {};
|
|
255
|
+
for (const [name, entry] of Object.entries(snapshot)) {
|
|
256
|
+
if (entry.isView) views[name] = entry;
|
|
257
|
+
}
|
|
258
|
+
return views;
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
export function diffViewSnapshots(previous, current) {
|
|
262
|
+
const addedViews = [];
|
|
263
|
+
const removedViews = [];
|
|
264
|
+
const changedViews = [];
|
|
265
|
+
|
|
266
|
+
for (const name of Object.keys(current)) {
|
|
267
|
+
if (!previous[name]) {
|
|
268
|
+
addedViews.push(name);
|
|
269
|
+
} else if (
|
|
270
|
+
current[name].viewQuery !== previous[name].viewQuery ||
|
|
271
|
+
current[name].source !== previous[name].source
|
|
272
|
+
) {
|
|
273
|
+
changedViews.push(name);
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
for (const name of Object.keys(previous)) {
|
|
278
|
+
if (!current[name]) {
|
|
279
|
+
removedViews.push(name);
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
const hasChanges = addedViews.length > 0 || removedViews.length > 0 || changedViews.length > 0;
|
|
284
|
+
|
|
285
|
+
return { hasChanges, addedViews, removedViews, changedViews };
|
|
286
|
+
}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
import { readFile, fileExists } from '@stonyx/utils/file';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import fs from 'fs/promises';
|
|
4
|
+
|
|
5
|
+
export async function ensureMigrationsTable(pool, tableName = '__migrations') {
|
|
6
|
+
await pool.execute(`
|
|
7
|
+
CREATE TABLE IF NOT EXISTS \`${tableName}\` (
|
|
8
|
+
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
9
|
+
filename VARCHAR(255) NOT NULL UNIQUE,
|
|
10
|
+
applied_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
11
|
+
)
|
|
12
|
+
`);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export async function getAppliedMigrations(pool, tableName = '__migrations') {
|
|
16
|
+
const [rows] = await pool.execute(
|
|
17
|
+
`SELECT filename FROM \`${tableName}\` ORDER BY id ASC`
|
|
18
|
+
);
|
|
19
|
+
|
|
20
|
+
return rows.map(row => row.filename);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export async function getMigrationFiles(migrationsDir) {
|
|
24
|
+
const exists = await fileExists(migrationsDir);
|
|
25
|
+
if (!exists) return [];
|
|
26
|
+
|
|
27
|
+
const entries = await fs.readdir(migrationsDir);
|
|
28
|
+
|
|
29
|
+
return entries
|
|
30
|
+
.filter(f => f.endsWith('.sql'))
|
|
31
|
+
.sort();
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export function parseMigrationFile(content) {
|
|
35
|
+
const upMarker = '-- UP';
|
|
36
|
+
const downMarker = '-- DOWN';
|
|
37
|
+
const upIndex = content.indexOf(upMarker);
|
|
38
|
+
const downIndex = content.indexOf(downMarker);
|
|
39
|
+
|
|
40
|
+
if (upIndex === -1) {
|
|
41
|
+
return { up: content.trim(), down: '' };
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const upStart = upIndex + upMarker.length;
|
|
45
|
+
const upEnd = downIndex !== -1 ? downIndex : content.length;
|
|
46
|
+
const up = content.slice(upStart, upEnd).trim();
|
|
47
|
+
const down = downIndex !== -1 ? content.slice(downIndex + downMarker.length).trim() : '';
|
|
48
|
+
|
|
49
|
+
return { up, down };
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export async function applyMigration(pool, filename, upSql, tableName = '__migrations') {
|
|
53
|
+
const connection = await pool.getConnection();
|
|
54
|
+
|
|
55
|
+
try {
|
|
56
|
+
await connection.beginTransaction();
|
|
57
|
+
|
|
58
|
+
// Execute each statement separately (split on semicolons)
|
|
59
|
+
const statements = splitStatements(upSql);
|
|
60
|
+
|
|
61
|
+
for (const stmt of statements) {
|
|
62
|
+
await connection.execute(stmt);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
await connection.execute(
|
|
66
|
+
`INSERT INTO \`${tableName}\` (filename) VALUES (?)`,
|
|
67
|
+
[filename]
|
|
68
|
+
);
|
|
69
|
+
|
|
70
|
+
await connection.commit();
|
|
71
|
+
} catch (error) {
|
|
72
|
+
await connection.rollback();
|
|
73
|
+
throw error;
|
|
74
|
+
} finally {
|
|
75
|
+
connection.release();
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
export async function rollbackMigration(pool, filename, downSql, tableName = '__migrations') {
|
|
80
|
+
const connection = await pool.getConnection();
|
|
81
|
+
|
|
82
|
+
try {
|
|
83
|
+
await connection.beginTransaction();
|
|
84
|
+
|
|
85
|
+
const statements = splitStatements(downSql);
|
|
86
|
+
|
|
87
|
+
for (const stmt of statements) {
|
|
88
|
+
await connection.execute(stmt);
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
await connection.execute(
|
|
92
|
+
`DELETE FROM \`${tableName}\` WHERE filename = ?`,
|
|
93
|
+
[filename]
|
|
94
|
+
);
|
|
95
|
+
|
|
96
|
+
await connection.commit();
|
|
97
|
+
} catch (error) {
|
|
98
|
+
await connection.rollback();
|
|
99
|
+
throw error;
|
|
100
|
+
} finally {
|
|
101
|
+
connection.release();
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
function splitStatements(sql) {
|
|
106
|
+
return sql
|
|
107
|
+
.split(';')
|
|
108
|
+
.map(s => s.trim())
|
|
109
|
+
.filter(s => s.length > 0 && !s.startsWith('--'));
|
|
110
|
+
}
|