@stonyx/orm 0.2.1-beta.81 → 0.2.1-beta.82
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/config/environment.js +17 -0
- package/package.json +7 -2
- package/src/main.js +6 -1
- package/src/postgres/connection.js +30 -0
- package/src/postgres/migration-generator.js +302 -0
- package/src/postgres/migration-runner.js +109 -0
- package/src/postgres/postgres-db.js +524 -0
- package/src/postgres/query-builder.js +149 -0
- package/src/postgres/schema-introspector.js +354 -0
- package/src/postgres/type-map.js +53 -0
package/config/environment.js
CHANGED
|
@@ -19,6 +19,13 @@ const {
|
|
|
19
19
|
MYSQL_DATABASE,
|
|
20
20
|
MYSQL_CONNECTION_LIMIT,
|
|
21
21
|
MYSQL_MIGRATIONS_DIR,
|
|
22
|
+
PG_HOST,
|
|
23
|
+
PG_PORT,
|
|
24
|
+
PG_USER,
|
|
25
|
+
PG_PASSWORD,
|
|
26
|
+
PG_DATABASE,
|
|
27
|
+
PG_CONNECTION_LIMIT,
|
|
28
|
+
PG_MIGRATIONS_DIR,
|
|
22
29
|
} = process.env;
|
|
23
30
|
|
|
24
31
|
export default {
|
|
@@ -50,6 +57,16 @@ export default {
|
|
|
50
57
|
migrationsDir: MYSQL_MIGRATIONS_DIR ?? 'migrations',
|
|
51
58
|
migrationsTable: '__migrations',
|
|
52
59
|
} : undefined,
|
|
60
|
+
postgres: PG_HOST ? {
|
|
61
|
+
host: PG_HOST ?? 'localhost',
|
|
62
|
+
port: parseInt(PG_PORT ?? '5432'),
|
|
63
|
+
user: PG_USER ?? 'postgres',
|
|
64
|
+
password: PG_PASSWORD ?? '',
|
|
65
|
+
database: PG_DATABASE ?? 'stonyx',
|
|
66
|
+
connectionLimit: parseInt(PG_CONNECTION_LIMIT ?? '10'),
|
|
67
|
+
migrationsDir: PG_MIGRATIONS_DIR ?? 'migrations',
|
|
68
|
+
migrationsTable: '__migrations',
|
|
69
|
+
} : undefined,
|
|
53
70
|
restServer: {
|
|
54
71
|
enabled: ORM_USE_REST_SERVER ?? 'true', // Whether to load restServer for automatic route setup or
|
|
55
72
|
route: ORM_REST_ROUTE ?? '/',
|
package/package.json
CHANGED
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
"stonyx-async",
|
|
5
5
|
"stonyx-module"
|
|
6
6
|
],
|
|
7
|
-
"version": "0.2.1-beta.
|
|
7
|
+
"version": "0.2.1-beta.82",
|
|
8
8
|
"description": "",
|
|
9
9
|
"main": "src/main.js",
|
|
10
10
|
"type": "module",
|
|
@@ -48,12 +48,16 @@
|
|
|
48
48
|
},
|
|
49
49
|
"peerDependencies": {
|
|
50
50
|
"@stonyx/rest-server": ">=0.2.1-beta.14",
|
|
51
|
-
"mysql2": "^3.0.0"
|
|
51
|
+
"mysql2": "^3.0.0",
|
|
52
|
+
"pg": "^8.0.0"
|
|
52
53
|
},
|
|
53
54
|
"peerDependenciesMeta": {
|
|
54
55
|
"mysql2": {
|
|
55
56
|
"optional": true
|
|
56
57
|
},
|
|
58
|
+
"pg": {
|
|
59
|
+
"optional": true
|
|
60
|
+
},
|
|
57
61
|
"@stonyx/rest-server": {
|
|
58
62
|
"optional": true
|
|
59
63
|
}
|
|
@@ -62,6 +66,7 @@
|
|
|
62
66
|
"@stonyx/rest-server": "0.2.1-beta.30",
|
|
63
67
|
"@stonyx/utils": "0.2.3-beta.7",
|
|
64
68
|
"mysql2": "^3.20.0",
|
|
69
|
+
"pg": "^8.20.0",
|
|
65
70
|
"qunit": "^2.24.1",
|
|
66
71
|
"sinon": "^21.0.0"
|
|
67
72
|
},
|
package/src/main.js
CHANGED
|
@@ -109,7 +109,12 @@ export default class Orm {
|
|
|
109
109
|
|
|
110
110
|
setup(eventNames);
|
|
111
111
|
|
|
112
|
-
if (config.orm.
|
|
112
|
+
if (config.orm.postgres) {
|
|
113
|
+
const { default: PostgresDB } = await import('./postgres/postgres-db.js');
|
|
114
|
+
this.sqlDb = new PostgresDB();
|
|
115
|
+
this.db = this.sqlDb;
|
|
116
|
+
promises.push(this.sqlDb.init());
|
|
117
|
+
} else if (config.orm.mysql) {
|
|
113
118
|
const { default: MysqlDB } = await import('./mysql/mysql-db.js');
|
|
114
119
|
this.sqlDb = new MysqlDB();
|
|
115
120
|
this.db = this.sqlDb;
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
let pool = null;
|
|
2
|
+
|
|
3
|
+
export async function getPool(pgConfig) {
|
|
4
|
+
if (pool) return pool;
|
|
5
|
+
|
|
6
|
+
const { default: pg } = await import('pg');
|
|
7
|
+
|
|
8
|
+
pool = new pg.Pool({
|
|
9
|
+
host: pgConfig.host,
|
|
10
|
+
port: pgConfig.port,
|
|
11
|
+
user: pgConfig.user,
|
|
12
|
+
password: pgConfig.password,
|
|
13
|
+
database: pgConfig.database,
|
|
14
|
+
max: pgConfig.connectionLimit,
|
|
15
|
+
idleTimeoutMillis: 30000,
|
|
16
|
+
connectionTimeoutMillis: 10000,
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
// Enable pgvector extension
|
|
20
|
+
await pool.query('CREATE EXTENSION IF NOT EXISTS vector');
|
|
21
|
+
|
|
22
|
+
return pool;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export async function closePool() {
|
|
26
|
+
if (!pool) return;
|
|
27
|
+
|
|
28
|
+
await pool.end();
|
|
29
|
+
pool = null;
|
|
30
|
+
}
|
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
import { introspectModels, introspectViews, buildTableDDL, buildViewDDL, buildVectorIndexDDL, schemasToSnapshot, viewSchemasToSnapshot, getTopologicalOrder } from './schema-introspector.js';
|
|
2
|
+
import { readFile, createFile, createDirectory, fileExists } from '@stonyx/utils/file';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
import config from 'stonyx/config';
|
|
5
|
+
import log from 'stonyx/log';
|
|
6
|
+
|
|
7
|
+
export async function generateMigration(description = 'migration') {
|
|
8
|
+
const { migrationsDir } = config.orm.postgres;
|
|
9
|
+
const rootPath = config.rootPath;
|
|
10
|
+
const migrationsPath = path.resolve(rootPath, migrationsDir);
|
|
11
|
+
|
|
12
|
+
await createDirectory(migrationsPath);
|
|
13
|
+
|
|
14
|
+
const schemas = introspectModels();
|
|
15
|
+
const currentSnapshot = schemasToSnapshot(schemas);
|
|
16
|
+
const previousSnapshot = await loadLatestSnapshot(migrationsPath);
|
|
17
|
+
const diff = diffSnapshots(previousSnapshot, currentSnapshot);
|
|
18
|
+
|
|
19
|
+
// Don't return early — check view changes too before deciding
|
|
20
|
+
if (!diff.hasChanges) {
|
|
21
|
+
const viewSchemasPrelim = introspectViews();
|
|
22
|
+
const currentViewSnapshotPrelim = viewSchemasToSnapshot(viewSchemasPrelim);
|
|
23
|
+
const previousViewSnapshotPrelim = extractViewsFromSnapshot(previousSnapshot);
|
|
24
|
+
const viewDiffPrelim = diffViewSnapshots(previousViewSnapshotPrelim, currentViewSnapshotPrelim);
|
|
25
|
+
|
|
26
|
+
if (!viewDiffPrelim.hasChanges) {
|
|
27
|
+
log.db('No schema changes detected.');
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const upStatements = [];
|
|
33
|
+
const downStatements = [];
|
|
34
|
+
|
|
35
|
+
// pgvector extension (include in initial migration)
|
|
36
|
+
if (Object.keys(previousSnapshot).length === 0) {
|
|
37
|
+
upStatements.push('CREATE EXTENSION IF NOT EXISTS vector;');
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// New tables — in topological order (parents before children)
|
|
41
|
+
const allOrder = getTopologicalOrder(schemas);
|
|
42
|
+
const addedOrdered = allOrder.filter(name => diff.addedModels.includes(name));
|
|
43
|
+
|
|
44
|
+
for (const name of addedOrdered) {
|
|
45
|
+
upStatements.push(buildTableDDL(name, schemas[name], schemas) + ';');
|
|
46
|
+
|
|
47
|
+
// HNSW indexes for vector columns
|
|
48
|
+
const indexStatements = buildVectorIndexDDL(name, schemas[name]);
|
|
49
|
+
for (const stmt of indexStatements) {
|
|
50
|
+
upStatements.push(stmt + ';');
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
downStatements.unshift(`DROP TABLE IF EXISTS "${schemas[name].table}" CASCADE;`);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// Removed tables (warn only, commented out)
|
|
57
|
+
for (const name of diff.removedModels) {
|
|
58
|
+
upStatements.push(`-- WARNING: Model '${name}' was removed. Uncomment to drop table:`);
|
|
59
|
+
upStatements.push(`-- DROP TABLE IF EXISTS "${previousSnapshot[name].table}" CASCADE;`);
|
|
60
|
+
downStatements.push(`-- Recreate table for removed model '${name}' manually if needed`);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// Added columns
|
|
64
|
+
for (const { model, column, type } of diff.addedColumns) {
|
|
65
|
+
const table = currentSnapshot[model].table;
|
|
66
|
+
upStatements.push(`ALTER TABLE "${table}" ADD COLUMN "${column}" ${type};`);
|
|
67
|
+
downStatements.push(`ALTER TABLE "${table}" DROP COLUMN "${column}";`);
|
|
68
|
+
|
|
69
|
+
// Add HNSW index if it's a vector column
|
|
70
|
+
if (type.startsWith('vector(')) {
|
|
71
|
+
upStatements.push(`CREATE INDEX IF NOT EXISTS "idx_${table}_${column}_hnsw" ON "${table}" USING hnsw ("${column}" vector_cosine_ops) WITH (m = 16, ef_construction = 200);`);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// Removed columns
|
|
76
|
+
for (const { model, column, type } of diff.removedColumns) {
|
|
77
|
+
const table = previousSnapshot[model].table;
|
|
78
|
+
upStatements.push(`ALTER TABLE "${table}" DROP COLUMN "${column}";`);
|
|
79
|
+
downStatements.push(`ALTER TABLE "${table}" ADD COLUMN "${column}" ${type};`);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// Changed column types
|
|
83
|
+
for (const { model, column, from, to } of diff.changedColumns) {
|
|
84
|
+
const table = currentSnapshot[model].table;
|
|
85
|
+
upStatements.push(`ALTER TABLE "${table}" ALTER COLUMN "${column}" TYPE ${to};`);
|
|
86
|
+
downStatements.push(`ALTER TABLE "${table}" ALTER COLUMN "${column}" TYPE ${from};`);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// Added foreign keys
|
|
90
|
+
for (const { model, column, references } of diff.addedForeignKeys) {
|
|
91
|
+
const table = currentSnapshot[model].table;
|
|
92
|
+
const refModel = Object.entries(currentSnapshot).find(([, s]) => s.table === references.references);
|
|
93
|
+
const fkType = refModel && refModel[1].idType === 'string' ? 'VARCHAR(255)' : 'INTEGER';
|
|
94
|
+
const constraintName = `fk_${table}_${column}`;
|
|
95
|
+
upStatements.push(`ALTER TABLE "${table}" ADD COLUMN "${column}" ${fkType};`);
|
|
96
|
+
upStatements.push(`ALTER TABLE "${table}" ADD CONSTRAINT "${constraintName}" FOREIGN KEY ("${column}") REFERENCES "${references.references}"("${references.column}") ON DELETE SET NULL;`);
|
|
97
|
+
downStatements.push(`ALTER TABLE "${table}" DROP CONSTRAINT "${constraintName}";`);
|
|
98
|
+
downStatements.push(`ALTER TABLE "${table}" DROP COLUMN "${column}";`);
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Removed foreign keys
|
|
102
|
+
for (const { model, column, references } of diff.removedForeignKeys) {
|
|
103
|
+
const table = previousSnapshot[model].table;
|
|
104
|
+
const refModel = Object.entries(previousSnapshot).find(([, s]) => s.table === references.references);
|
|
105
|
+
const fkType = refModel && refModel[1].idType === 'string' ? 'VARCHAR(255)' : 'INTEGER';
|
|
106
|
+
const constraintName = `fk_${table}_${column}`;
|
|
107
|
+
upStatements.push(`ALTER TABLE "${table}" DROP CONSTRAINT "${constraintName}";`);
|
|
108
|
+
upStatements.push(`ALTER TABLE "${table}" DROP COLUMN "${column}";`);
|
|
109
|
+
downStatements.push(`ALTER TABLE "${table}" ADD COLUMN "${column}" ${fkType};`);
|
|
110
|
+
downStatements.push(`ALTER TABLE "${table}" ADD CONSTRAINT "${constraintName}" FOREIGN KEY ("${column}") REFERENCES "${references.references}"("${references.column}") ON DELETE SET NULL;`);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// View migrations — views are created AFTER tables (dependency order)
|
|
114
|
+
const viewSchemas = introspectViews();
|
|
115
|
+
const currentViewSnapshot = viewSchemasToSnapshot(viewSchemas);
|
|
116
|
+
const previousViewSnapshot = extractViewsFromSnapshot(previousSnapshot);
|
|
117
|
+
const viewDiff = diffViewSnapshots(previousViewSnapshot, currentViewSnapshot);
|
|
118
|
+
|
|
119
|
+
if (viewDiff.hasChanges) {
|
|
120
|
+
upStatements.push('');
|
|
121
|
+
upStatements.push('-- Views');
|
|
122
|
+
downStatements.push('');
|
|
123
|
+
downStatements.push('-- Views');
|
|
124
|
+
|
|
125
|
+
// Added views
|
|
126
|
+
for (const name of viewDiff.addedViews) {
|
|
127
|
+
try {
|
|
128
|
+
const ddl = buildViewDDL(name, viewSchemas[name], schemas);
|
|
129
|
+
upStatements.push(ddl + ';');
|
|
130
|
+
downStatements.unshift(`DROP VIEW IF EXISTS "${viewSchemas[name].viewName}";`);
|
|
131
|
+
} catch (error) {
|
|
132
|
+
upStatements.push(`-- WARNING: Could not generate DDL for view '${name}': ${error.message}`);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// Removed views
|
|
137
|
+
for (const name of viewDiff.removedViews) {
|
|
138
|
+
upStatements.push(`-- WARNING: View '${name}' was removed. Uncomment to drop view:`);
|
|
139
|
+
upStatements.push(`-- DROP VIEW IF EXISTS "${previousViewSnapshot[name].viewName}";`);
|
|
140
|
+
downStatements.push(`-- Recreate view for removed view '${name}' manually if needed`);
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
// Changed views (source or aggregates changed)
|
|
144
|
+
for (const name of viewDiff.changedViews) {
|
|
145
|
+
try {
|
|
146
|
+
const ddl = buildViewDDL(name, viewSchemas[name], schemas);
|
|
147
|
+
upStatements.push(ddl + ';');
|
|
148
|
+
} catch (error) {
|
|
149
|
+
upStatements.push(`-- WARNING: Could not generate DDL for changed view '${name}': ${error.message}`);
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
const combinedHasChanges = diff.hasChanges || viewDiff.hasChanges;
|
|
155
|
+
|
|
156
|
+
if (!combinedHasChanges) {
|
|
157
|
+
log.db('No schema changes detected.');
|
|
158
|
+
return null;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// Merge view snapshot into the main snapshot
|
|
162
|
+
const combinedSnapshot = { ...currentSnapshot };
|
|
163
|
+
for (const [name, viewSnap] of Object.entries(currentViewSnapshot)) {
|
|
164
|
+
combinedSnapshot[name] = viewSnap;
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
const sanitizedDescription = description.replace(/\s+/g, '_').replace(/[^a-zA-Z0-9_]/g, '');
|
|
168
|
+
const timestamp = Math.floor(Date.now() / 1000);
|
|
169
|
+
const filename = `${timestamp}_${sanitizedDescription}.sql`;
|
|
170
|
+
const content = `-- UP\n${upStatements.join('\n')}\n\n-- DOWN\n${downStatements.join('\n')}\n`;
|
|
171
|
+
|
|
172
|
+
await createFile(path.join(migrationsPath, filename), content);
|
|
173
|
+
await createFile(path.join(migrationsPath, '.snapshot.json'), JSON.stringify(combinedSnapshot, null, 2));
|
|
174
|
+
|
|
175
|
+
log.db(`Migration generated: ${filename}`);
|
|
176
|
+
|
|
177
|
+
return { filename, content, snapshot: combinedSnapshot };
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
export async function loadLatestSnapshot(migrationsPath) {
|
|
181
|
+
const snapshotPath = path.join(migrationsPath, '.snapshot.json');
|
|
182
|
+
const exists = await fileExists(snapshotPath);
|
|
183
|
+
|
|
184
|
+
if (!exists) return {};
|
|
185
|
+
|
|
186
|
+
return readFile(snapshotPath, { json: true });
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
export function diffSnapshots(previous, current) {
|
|
190
|
+
const addedModels = [];
|
|
191
|
+
const removedModels = [];
|
|
192
|
+
const addedColumns = [];
|
|
193
|
+
const removedColumns = [];
|
|
194
|
+
const changedColumns = [];
|
|
195
|
+
const addedForeignKeys = [];
|
|
196
|
+
const removedForeignKeys = [];
|
|
197
|
+
|
|
198
|
+
// Find added models
|
|
199
|
+
for (const name of Object.keys(current)) {
|
|
200
|
+
if (!previous[name]) addedModels.push(name);
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// Find removed models
|
|
204
|
+
for (const name of Object.keys(previous)) {
|
|
205
|
+
if (!current[name]) removedModels.push(name);
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
// Find column changes in existing models
|
|
209
|
+
for (const name of Object.keys(current)) {
|
|
210
|
+
if (!previous[name]) continue;
|
|
211
|
+
|
|
212
|
+
const { columns: prevCols = {} } = previous[name];
|
|
213
|
+
const { columns: currCols = {} } = current[name];
|
|
214
|
+
|
|
215
|
+
// Added columns
|
|
216
|
+
for (const [col, type] of Object.entries(currCols)) {
|
|
217
|
+
if (!prevCols[col]) {
|
|
218
|
+
addedColumns.push({ model: name, column: col, type });
|
|
219
|
+
} else if (prevCols[col] !== type) {
|
|
220
|
+
changedColumns.push({ model: name, column: col, from: prevCols[col], to: type });
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
// Removed columns
|
|
225
|
+
for (const [col, type] of Object.entries(prevCols)) {
|
|
226
|
+
if (!currCols[col]) {
|
|
227
|
+
removedColumns.push({ model: name, column: col, type });
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
// Foreign key changes
|
|
232
|
+
const prevFKs = previous[name].foreignKeys || {};
|
|
233
|
+
const currFKs = current[name].foreignKeys || {};
|
|
234
|
+
|
|
235
|
+
for (const [col, refs] of Object.entries(currFKs)) {
|
|
236
|
+
if (!prevFKs[col]) {
|
|
237
|
+
addedForeignKeys.push({ model: name, column: col, references: refs });
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
for (const [col, refs] of Object.entries(prevFKs)) {
|
|
242
|
+
if (!currFKs[col]) {
|
|
243
|
+
removedForeignKeys.push({ model: name, column: col, references: refs });
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
const hasChanges = addedModels.length > 0 || removedModels.length > 0 ||
|
|
249
|
+
addedColumns.length > 0 || removedColumns.length > 0 ||
|
|
250
|
+
changedColumns.length > 0 || addedForeignKeys.length > 0 || removedForeignKeys.length > 0;
|
|
251
|
+
|
|
252
|
+
return {
|
|
253
|
+
hasChanges,
|
|
254
|
+
addedModels,
|
|
255
|
+
removedModels,
|
|
256
|
+
addedColumns,
|
|
257
|
+
removedColumns,
|
|
258
|
+
changedColumns,
|
|
259
|
+
addedForeignKeys,
|
|
260
|
+
removedForeignKeys,
|
|
261
|
+
};
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
export function detectSchemaDrift(schemas, snapshot) {
|
|
265
|
+
const current = schemasToSnapshot(schemas);
|
|
266
|
+
return diffSnapshots(snapshot, current);
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
export function extractViewsFromSnapshot(snapshot) {
|
|
270
|
+
const views = {};
|
|
271
|
+
for (const [name, entry] of Object.entries(snapshot)) {
|
|
272
|
+
if (entry.isView) views[name] = entry;
|
|
273
|
+
}
|
|
274
|
+
return views;
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
export function diffViewSnapshots(previous, current) {
|
|
278
|
+
const addedViews = [];
|
|
279
|
+
const removedViews = [];
|
|
280
|
+
const changedViews = [];
|
|
281
|
+
|
|
282
|
+
for (const name of Object.keys(current)) {
|
|
283
|
+
if (!previous[name]) {
|
|
284
|
+
addedViews.push(name);
|
|
285
|
+
} else if (
|
|
286
|
+
current[name].viewQuery !== previous[name].viewQuery ||
|
|
287
|
+
current[name].source !== previous[name].source
|
|
288
|
+
) {
|
|
289
|
+
changedViews.push(name);
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
for (const name of Object.keys(previous)) {
|
|
294
|
+
if (!current[name]) {
|
|
295
|
+
removedViews.push(name);
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
const hasChanges = addedViews.length > 0 || removedViews.length > 0 || changedViews.length > 0;
|
|
300
|
+
|
|
301
|
+
return { hasChanges, addedViews, removedViews, changedViews };
|
|
302
|
+
}
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import { readFile, fileExists } from '@stonyx/utils/file';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import fs from 'fs/promises';
|
|
4
|
+
|
|
5
|
+
export async function ensureMigrationsTable(pool, tableName = '__migrations') {
|
|
6
|
+
await pool.query(`
|
|
7
|
+
CREATE TABLE IF NOT EXISTS "${tableName}" (
|
|
8
|
+
id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
|
|
9
|
+
filename VARCHAR(255) NOT NULL UNIQUE,
|
|
10
|
+
applied_at TIMESTAMPTZ DEFAULT NOW()
|
|
11
|
+
)
|
|
12
|
+
`);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export async function getAppliedMigrations(pool, tableName = '__migrations') {
|
|
16
|
+
const result = await pool.query(
|
|
17
|
+
`SELECT filename FROM "${tableName}" ORDER BY id ASC`
|
|
18
|
+
);
|
|
19
|
+
|
|
20
|
+
return result.rows.map(row => row.filename);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export async function getMigrationFiles(migrationsDir) {
|
|
24
|
+
const exists = await fileExists(migrationsDir);
|
|
25
|
+
if (!exists) return [];
|
|
26
|
+
|
|
27
|
+
const entries = await fs.readdir(migrationsDir);
|
|
28
|
+
|
|
29
|
+
return entries
|
|
30
|
+
.filter(f => f.endsWith('.sql'))
|
|
31
|
+
.sort();
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export function parseMigrationFile(content) {
|
|
35
|
+
const upMarker = '-- UP';
|
|
36
|
+
const downMarker = '-- DOWN';
|
|
37
|
+
const upIndex = content.indexOf(upMarker);
|
|
38
|
+
const downIndex = content.indexOf(downMarker);
|
|
39
|
+
|
|
40
|
+
if (upIndex === -1) {
|
|
41
|
+
return { up: content.trim(), down: '' };
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const upStart = upIndex + upMarker.length;
|
|
45
|
+
const upEnd = downIndex !== -1 ? downIndex : content.length;
|
|
46
|
+
const up = content.slice(upStart, upEnd).trim();
|
|
47
|
+
const down = downIndex !== -1 ? content.slice(downIndex + downMarker.length).trim() : '';
|
|
48
|
+
|
|
49
|
+
return { up, down };
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export async function applyMigration(pool, filename, upSql, tableName = '__migrations') {
|
|
53
|
+
const client = await pool.connect();
|
|
54
|
+
|
|
55
|
+
try {
|
|
56
|
+
await client.query('BEGIN');
|
|
57
|
+
|
|
58
|
+
const statements = splitStatements(upSql);
|
|
59
|
+
|
|
60
|
+
for (const stmt of statements) {
|
|
61
|
+
await client.query(stmt);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
await client.query(
|
|
65
|
+
`INSERT INTO "${tableName}" (filename) VALUES ($1)`,
|
|
66
|
+
[filename]
|
|
67
|
+
);
|
|
68
|
+
|
|
69
|
+
await client.query('COMMIT');
|
|
70
|
+
} catch (error) {
|
|
71
|
+
await client.query('ROLLBACK');
|
|
72
|
+
throw error;
|
|
73
|
+
} finally {
|
|
74
|
+
client.release();
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
export async function rollbackMigration(pool, filename, downSql, tableName = '__migrations') {
|
|
79
|
+
const client = await pool.connect();
|
|
80
|
+
|
|
81
|
+
try {
|
|
82
|
+
await client.query('BEGIN');
|
|
83
|
+
|
|
84
|
+
const statements = splitStatements(downSql);
|
|
85
|
+
|
|
86
|
+
for (const stmt of statements) {
|
|
87
|
+
await client.query(stmt);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
await client.query(
|
|
91
|
+
`DELETE FROM "${tableName}" WHERE filename = $1`,
|
|
92
|
+
[filename]
|
|
93
|
+
);
|
|
94
|
+
|
|
95
|
+
await client.query('COMMIT');
|
|
96
|
+
} catch (error) {
|
|
97
|
+
await client.query('ROLLBACK');
|
|
98
|
+
throw error;
|
|
99
|
+
} finally {
|
|
100
|
+
client.release();
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
function splitStatements(sql) {
|
|
105
|
+
return sql
|
|
106
|
+
.split(';')
|
|
107
|
+
.map(s => s.trim())
|
|
108
|
+
.filter(s => s.length > 0 && !s.startsWith('--'));
|
|
109
|
+
}
|