@stonyx/orm 0.2.5-alpha.0 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +482 -15
- package/config/environment.js +63 -6
- package/dist/aggregates.d.ts +21 -0
- package/dist/aggregates.js +93 -0
- package/dist/attr.d.ts +2 -0
- package/dist/attr.js +22 -0
- package/dist/belongs-to.d.ts +11 -0
- package/dist/belongs-to.js +59 -0
- package/dist/cli.d.ts +22 -0
- package/dist/cli.js +148 -0
- package/dist/commands.d.ts +7 -0
- package/dist/commands.js +146 -0
- package/dist/db.d.ts +21 -0
- package/dist/db.js +180 -0
- package/dist/exports/db.d.ts +7 -0
- package/{src → dist}/exports/db.js +2 -4
- package/dist/has-many.d.ts +11 -0
- package/dist/has-many.js +58 -0
- package/dist/hooks.d.ts +75 -0
- package/dist/hooks.js +110 -0
- package/dist/index.d.ts +14 -0
- package/dist/index.js +34 -0
- package/dist/main.d.ts +46 -0
- package/dist/main.js +181 -0
- package/dist/manage-record.d.ts +13 -0
- package/dist/manage-record.js +123 -0
- package/dist/meta-request.d.ts +6 -0
- package/dist/meta-request.js +52 -0
- package/dist/migrate.d.ts +2 -0
- package/dist/migrate.js +57 -0
- package/dist/model-property.d.ts +9 -0
- package/dist/model-property.js +29 -0
- package/dist/model.d.ts +15 -0
- package/dist/model.js +18 -0
- package/dist/mysql/connection.d.ts +14 -0
- package/dist/mysql/connection.js +24 -0
- package/dist/mysql/migration-generator.d.ts +45 -0
- package/dist/mysql/migration-generator.js +254 -0
- package/dist/mysql/migration-runner.d.ts +12 -0
- package/dist/mysql/migration-runner.js +88 -0
- package/dist/mysql/mysql-db.d.ts +100 -0
- package/dist/mysql/mysql-db.js +425 -0
- package/dist/mysql/query-builder.d.ts +10 -0
- package/dist/mysql/query-builder.js +44 -0
- package/dist/mysql/schema-introspector.d.ts +19 -0
- package/dist/mysql/schema-introspector.js +257 -0
- package/dist/mysql/type-map.d.ts +21 -0
- package/dist/mysql/type-map.js +36 -0
- package/dist/orm-request.d.ts +38 -0
- package/dist/orm-request.js +475 -0
- package/dist/plural-registry.d.ts +4 -0
- package/dist/plural-registry.js +9 -0
- package/dist/postgres/connection.d.ts +15 -0
- package/dist/postgres/connection.js +32 -0
- package/dist/postgres/migration-generator.d.ts +45 -0
- package/dist/postgres/migration-generator.js +280 -0
- package/dist/postgres/migration-runner.d.ts +10 -0
- package/dist/postgres/migration-runner.js +87 -0
- package/dist/postgres/postgres-db.d.ts +119 -0
- package/dist/postgres/postgres-db.js +477 -0
- package/dist/postgres/query-builder.d.ts +27 -0
- package/dist/postgres/query-builder.js +98 -0
- package/dist/postgres/schema-introspector.d.ts +29 -0
- package/dist/postgres/schema-introspector.js +296 -0
- package/dist/postgres/type-map.d.ts +23 -0
- package/dist/postgres/type-map.js +56 -0
- package/dist/record.d.ts +75 -0
- package/dist/record.js +129 -0
- package/dist/relationships.d.ts +10 -0
- package/dist/relationships.js +41 -0
- package/dist/schema-helpers.d.ts +20 -0
- package/dist/schema-helpers.js +48 -0
- package/dist/serializer.d.ts +17 -0
- package/dist/serializer.js +136 -0
- package/dist/setup-rest-server.d.ts +1 -0
- package/dist/setup-rest-server.js +52 -0
- package/dist/standalone-db.d.ts +58 -0
- package/dist/standalone-db.js +142 -0
- package/dist/store.d.ts +62 -0
- package/dist/store.js +286 -0
- package/dist/timescale/query-builder.d.ts +43 -0
- package/dist/timescale/query-builder.js +115 -0
- package/dist/timescale/timescale-db.d.ts +45 -0
- package/dist/timescale/timescale-db.js +84 -0
- package/dist/transforms.d.ts +2 -0
- package/dist/transforms.js +17 -0
- package/dist/types/orm-types.d.ts +153 -0
- package/dist/types/orm-types.js +1 -0
- package/dist/utils.d.ts +7 -0
- package/dist/utils.js +17 -0
- package/dist/view-resolver.d.ts +8 -0
- package/dist/view-resolver.js +171 -0
- package/dist/view.d.ts +11 -0
- package/dist/view.js +18 -0
- package/package.json +64 -11
- package/src/aggregates.ts +109 -0
- package/src/{attr.js → attr.ts} +2 -2
- package/src/belongs-to.ts +90 -0
- package/src/cli.ts +183 -0
- package/src/commands.ts +179 -0
- package/src/db.ts +232 -0
- package/src/exports/db.ts +7 -0
- package/src/has-many.ts +92 -0
- package/src/hooks.ts +151 -0
- package/src/{index.js → index.ts} +12 -2
- package/src/main.ts +229 -0
- package/src/manage-record.ts +161 -0
- package/src/{meta-request.js → meta-request.ts} +17 -14
- package/src/migrate.ts +72 -0
- package/src/model-property.ts +35 -0
- package/src/model.ts +21 -0
- package/src/mysql/connection.ts +43 -0
- package/src/mysql/migration-generator.ts +337 -0
- package/src/mysql/migration-runner.ts +121 -0
- package/src/mysql/mysql-db.ts +543 -0
- package/src/mysql/query-builder.ts +69 -0
- package/src/mysql/schema-introspector.ts +310 -0
- package/src/mysql/type-map.ts +42 -0
- package/src/orm-request.ts +582 -0
- package/src/plural-registry.ts +12 -0
- package/src/postgres/connection.ts +48 -0
- package/src/postgres/migration-generator.ts +370 -0
- package/src/postgres/migration-runner.ts +115 -0
- package/src/postgres/postgres-db.ts +616 -0
- package/src/postgres/query-builder.ts +148 -0
- package/src/postgres/schema-introspector.ts +360 -0
- package/src/postgres/type-map.ts +61 -0
- package/src/record.ts +186 -0
- package/src/relationships.ts +54 -0
- package/src/schema-helpers.ts +59 -0
- package/src/serializer.ts +161 -0
- package/src/setup-rest-server.ts +62 -0
- package/src/standalone-db.ts +185 -0
- package/src/store.ts +373 -0
- package/src/timescale/query-builder.ts +174 -0
- package/src/timescale/timescale-db.ts +119 -0
- package/src/transforms.ts +20 -0
- package/src/types/mysql2.d.ts +49 -0
- package/src/types/orm-types.ts +158 -0
- package/src/types/pg.d.ts +32 -0
- package/src/types/stonyx-cron.d.ts +5 -0
- package/src/types/stonyx-events.d.ts +4 -0
- package/src/types/stonyx-rest-server.d.ts +16 -0
- package/src/types/stonyx-utils.d.ts +33 -0
- package/src/types/stonyx.d.ts +21 -0
- package/src/utils.ts +22 -0
- package/src/view-resolver.ts +211 -0
- package/src/view.ts +22 -0
- package/.claude/project-structure.md +0 -578
- package/.github/workflows/ci.yml +0 -36
- package/.github/workflows/publish.yml +0 -143
- package/src/belongs-to.js +0 -63
- package/src/db.js +0 -80
- package/src/has-many.js +0 -61
- package/src/main.js +0 -119
- package/src/manage-record.js +0 -103
- package/src/model-property.js +0 -29
- package/src/model.js +0 -9
- package/src/orm-request.js +0 -249
- package/src/record.js +0 -100
- package/src/relationships.js +0 -43
- package/src/serializer.js +0 -138
- package/src/setup-rest-server.js +0 -57
- package/src/store.js +0 -211
- package/src/transforms.js +0 -20
- package/stonyx-bootstrap.cjs +0 -30
|
@@ -0,0 +1,280 @@
|
|
|
1
|
+
import { introspectModels, introspectViews, buildTableDDL, buildViewDDL, buildVectorIndexDDL, schemasToSnapshot, viewSchemasToSnapshot, getTopologicalOrder } from './schema-introspector.js';
|
|
2
|
+
import { buildCreateHypertable, buildEnableCompression, buildCompressionPolicy } from '../timescale/query-builder.js';
|
|
3
|
+
import { readFile, createFile, createDirectory, fileExists } from '@stonyx/utils/file';
|
|
4
|
+
import path from 'path';
|
|
5
|
+
import config from 'stonyx/config';
|
|
6
|
+
import log from 'stonyx/log';
|
|
7
|
+
export async function generateMigration(description = 'migration', configKey = 'postgres') {
|
|
8
|
+
const { migrationsDir } = config.orm[configKey];
|
|
9
|
+
const rootPath = config.rootPath;
|
|
10
|
+
const migrationsPath = path.resolve(rootPath, migrationsDir);
|
|
11
|
+
await createDirectory(migrationsPath);
|
|
12
|
+
const schemas = introspectModels();
|
|
13
|
+
const currentSnapshot = schemasToSnapshot(schemas);
|
|
14
|
+
const previousSnapshot = await loadLatestSnapshot(migrationsPath);
|
|
15
|
+
const diff = diffSnapshots(previousSnapshot, currentSnapshot);
|
|
16
|
+
// Don't return early -- check view changes too before deciding
|
|
17
|
+
if (!diff.hasChanges) {
|
|
18
|
+
const viewSchemasPrelim = introspectViews();
|
|
19
|
+
const currentViewSnapshotPrelim = viewSchemasToSnapshot(viewSchemasPrelim);
|
|
20
|
+
const previousViewSnapshotPrelim = extractViewsFromSnapshot(previousSnapshot);
|
|
21
|
+
const viewDiffPrelim = diffViewSnapshots(previousViewSnapshotPrelim, currentViewSnapshotPrelim);
|
|
22
|
+
if (!viewDiffPrelim.hasChanges) {
|
|
23
|
+
log.db?.('No schema changes detected.');
|
|
24
|
+
return null;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
const upStatements = [];
|
|
28
|
+
const downStatements = [];
|
|
29
|
+
// pgvector extension (include in initial migration)
|
|
30
|
+
if (Object.keys(previousSnapshot).length === 0) {
|
|
31
|
+
upStatements.push('CREATE EXTENSION IF NOT EXISTS vector;');
|
|
32
|
+
}
|
|
33
|
+
// New tables -- in topological order (parents before children)
|
|
34
|
+
const allOrder = getTopologicalOrder(schemas);
|
|
35
|
+
const addedOrdered = allOrder.filter(name => diff.addedModels.includes(name));
|
|
36
|
+
for (const name of addedOrdered) {
|
|
37
|
+
upStatements.push(buildTableDDL(name, schemas[name], schemas) + ';');
|
|
38
|
+
// HNSW indexes for vector columns
|
|
39
|
+
const indexStatements = buildVectorIndexDDL(name, schemas[name]);
|
|
40
|
+
for (const stmt of indexStatements) {
|
|
41
|
+
upStatements.push(stmt + ';');
|
|
42
|
+
}
|
|
43
|
+
downStatements.unshift(`DROP TABLE IF EXISTS "${schemas[name].table}" CASCADE;`);
|
|
44
|
+
}
|
|
45
|
+
// Hypertable conversion + compression (TimescaleDB only)
|
|
46
|
+
if (configKey === 'timescale') {
|
|
47
|
+
for (const name of addedOrdered) {
|
|
48
|
+
const schema = schemas[name];
|
|
49
|
+
if (!schema.hypertable)
|
|
50
|
+
continue;
|
|
51
|
+
const { timeColumn, chunkInterval } = schema.hypertable;
|
|
52
|
+
upStatements.push(buildCreateHypertable(schema.table, timeColumn, { chunkInterval }).sql + ';');
|
|
53
|
+
if (schema.hypertable.compress) {
|
|
54
|
+
const { segmentBy, orderBy, after } = schema.hypertable.compress;
|
|
55
|
+
upStatements.push(buildEnableCompression(schema.table, segmentBy, orderBy).sql + ';');
|
|
56
|
+
if (after) {
|
|
57
|
+
upStatements.push(buildCompressionPolicy(schema.table, after).sql + ';');
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
downStatements.unshift('-- Hypertable conversion is not reversible; table drop handles cleanup');
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
// Removed tables (warn only, commented out)
|
|
64
|
+
for (const name of diff.removedModels) {
|
|
65
|
+
upStatements.push(`-- WARNING: Model '${name}' was removed. Uncomment to drop table:`);
|
|
66
|
+
upStatements.push(`-- DROP TABLE IF EXISTS "${previousSnapshot[name].table}" CASCADE;`);
|
|
67
|
+
downStatements.push(`-- Recreate table for removed model '${name}' manually if needed`);
|
|
68
|
+
}
|
|
69
|
+
// Added columns
|
|
70
|
+
for (const { model, column, type } of diff.addedColumns) {
|
|
71
|
+
const table = currentSnapshot[model].table;
|
|
72
|
+
upStatements.push(`ALTER TABLE "${table}" ADD COLUMN "${column}" ${type};`);
|
|
73
|
+
downStatements.push(`ALTER TABLE "${table}" DROP COLUMN "${column}";`);
|
|
74
|
+
// Add HNSW index if it's a vector column
|
|
75
|
+
if (type.startsWith('vector(')) {
|
|
76
|
+
upStatements.push(`CREATE INDEX IF NOT EXISTS "idx_${table}_${column}_hnsw" ON "${table}" USING hnsw ("${column}" vector_cosine_ops) WITH (m = 16, ef_construction = 200);`);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
// Removed columns
|
|
80
|
+
for (const { model, column, type } of diff.removedColumns) {
|
|
81
|
+
const table = previousSnapshot[model]?.table;
|
|
82
|
+
if (!table)
|
|
83
|
+
throw new Error(`Missing table name in snapshot for model "${model}"`);
|
|
84
|
+
upStatements.push(`ALTER TABLE "${table}" DROP COLUMN "${column}";`);
|
|
85
|
+
downStatements.push(`ALTER TABLE "${table}" ADD COLUMN "${column}" ${type};`);
|
|
86
|
+
}
|
|
87
|
+
// Changed column types
|
|
88
|
+
for (const { model, column, from, to } of diff.changedColumns) {
|
|
89
|
+
const table = currentSnapshot[model].table;
|
|
90
|
+
upStatements.push(`ALTER TABLE "${table}" ALTER COLUMN "${column}" TYPE ${to};`);
|
|
91
|
+
downStatements.push(`ALTER TABLE "${table}" ALTER COLUMN "${column}" TYPE ${from};`);
|
|
92
|
+
}
|
|
93
|
+
// Added foreign keys
|
|
94
|
+
for (const { model, column, references } of diff.addedForeignKeys) {
|
|
95
|
+
const table = currentSnapshot[model].table;
|
|
96
|
+
const refModel = Object.entries(currentSnapshot).find(([, s]) => s.table === references.references);
|
|
97
|
+
const fkType = refModel && refModel[1].idType === 'string' ? 'VARCHAR(255)' : 'INTEGER';
|
|
98
|
+
const constraintName = `fk_${table}_${column}`;
|
|
99
|
+
upStatements.push(`ALTER TABLE "${table}" ADD COLUMN "${column}" ${fkType};`);
|
|
100
|
+
upStatements.push(`ALTER TABLE "${table}" ADD CONSTRAINT "${constraintName}" FOREIGN KEY ("${column}") REFERENCES "${references.references}"("${references.column}") ON DELETE SET NULL;`);
|
|
101
|
+
downStatements.push(`ALTER TABLE "${table}" DROP CONSTRAINT "${constraintName}";`);
|
|
102
|
+
downStatements.push(`ALTER TABLE "${table}" DROP COLUMN "${column}";`);
|
|
103
|
+
}
|
|
104
|
+
// Removed foreign keys
|
|
105
|
+
for (const { model, column, references } of diff.removedForeignKeys) {
|
|
106
|
+
const table = previousSnapshot[model]?.table;
|
|
107
|
+
if (!table)
|
|
108
|
+
throw new Error(`Missing table name in snapshot for model "${model}"`);
|
|
109
|
+
const refModel = Object.entries(previousSnapshot).find(([, s]) => s.table === references.references);
|
|
110
|
+
const fkType = refModel && refModel[1].idType === 'string' ? 'VARCHAR(255)' : 'INTEGER';
|
|
111
|
+
const constraintName = `fk_${table}_${column}`;
|
|
112
|
+
upStatements.push(`ALTER TABLE "${table}" DROP CONSTRAINT "${constraintName}";`);
|
|
113
|
+
upStatements.push(`ALTER TABLE "${table}" DROP COLUMN "${column}";`);
|
|
114
|
+
downStatements.push(`ALTER TABLE "${table}" ADD COLUMN "${column}" ${fkType};`);
|
|
115
|
+
downStatements.push(`ALTER TABLE "${table}" ADD CONSTRAINT "${constraintName}" FOREIGN KEY ("${column}") REFERENCES "${references.references}"("${references.column}") ON DELETE SET NULL;`);
|
|
116
|
+
}
|
|
117
|
+
// View migrations -- views are created AFTER tables (dependency order)
|
|
118
|
+
const viewSchemas = introspectViews();
|
|
119
|
+
const currentViewSnapshot = viewSchemasToSnapshot(viewSchemas);
|
|
120
|
+
const previousViewSnapshot = extractViewsFromSnapshot(previousSnapshot);
|
|
121
|
+
const viewDiff = diffViewSnapshots(previousViewSnapshot, currentViewSnapshot);
|
|
122
|
+
if (viewDiff.hasChanges) {
|
|
123
|
+
upStatements.push('');
|
|
124
|
+
upStatements.push('-- Views');
|
|
125
|
+
downStatements.push('');
|
|
126
|
+
downStatements.push('-- Views');
|
|
127
|
+
// Added views
|
|
128
|
+
for (const name of viewDiff.addedViews) {
|
|
129
|
+
try {
|
|
130
|
+
const ddl = buildViewDDL(name, viewSchemas[name], schemas);
|
|
131
|
+
upStatements.push(ddl + ';');
|
|
132
|
+
downStatements.unshift(`DROP VIEW IF EXISTS "${viewSchemas[name].viewName}";`);
|
|
133
|
+
}
|
|
134
|
+
catch (error) {
|
|
135
|
+
upStatements.push(`-- WARNING: Could not generate DDL for view '${name}': ${error instanceof Error ? error.message : String(error)}`);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
// Removed views
|
|
139
|
+
for (const name of viewDiff.removedViews) {
|
|
140
|
+
upStatements.push(`-- WARNING: View '${name}' was removed. Uncomment to drop view:`);
|
|
141
|
+
upStatements.push(`-- DROP VIEW IF EXISTS "${previousViewSnapshot[name].viewName}";`);
|
|
142
|
+
downStatements.push(`-- Recreate view for removed view '${name}' manually if needed`);
|
|
143
|
+
}
|
|
144
|
+
// Changed views (source or aggregates changed)
|
|
145
|
+
for (const name of viewDiff.changedViews) {
|
|
146
|
+
try {
|
|
147
|
+
const ddl = buildViewDDL(name, viewSchemas[name], schemas);
|
|
148
|
+
upStatements.push(ddl + ';');
|
|
149
|
+
}
|
|
150
|
+
catch (error) {
|
|
151
|
+
upStatements.push(`-- WARNING: Could not generate DDL for changed view '${name}': ${error instanceof Error ? error.message : String(error)}`);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
const combinedHasChanges = diff.hasChanges || viewDiff.hasChanges;
|
|
156
|
+
if (!combinedHasChanges) {
|
|
157
|
+
log.db?.('No schema changes detected.');
|
|
158
|
+
return null;
|
|
159
|
+
}
|
|
160
|
+
// Merge view snapshot into the main snapshot
|
|
161
|
+
const combinedSnapshot = { ...currentSnapshot };
|
|
162
|
+
for (const [name, viewSnap] of Object.entries(currentViewSnapshot)) {
|
|
163
|
+
combinedSnapshot[name] = viewSnap;
|
|
164
|
+
}
|
|
165
|
+
const sanitizedDescription = description.replace(/\s+/g, '_').replace(/[^a-zA-Z0-9_]/g, '');
|
|
166
|
+
const timestamp = Math.floor(Date.now() / 1000);
|
|
167
|
+
const filename = `${timestamp}_${sanitizedDescription}.sql`;
|
|
168
|
+
const content = `-- UP\n${upStatements.join('\n')}\n\n-- DOWN\n${downStatements.join('\n')}\n`;
|
|
169
|
+
await createFile(path.join(migrationsPath, filename), content);
|
|
170
|
+
await createFile(path.join(migrationsPath, '.snapshot.json'), JSON.stringify(combinedSnapshot, null, 2));
|
|
171
|
+
log.db?.(`Migration generated: ${filename}`);
|
|
172
|
+
return { filename, content, snapshot: combinedSnapshot };
|
|
173
|
+
}
|
|
174
|
+
export async function loadLatestSnapshot(migrationsPath) {
|
|
175
|
+
const snapshotPath = path.join(migrationsPath, '.snapshot.json');
|
|
176
|
+
const exists = await fileExists(snapshotPath);
|
|
177
|
+
if (!exists)
|
|
178
|
+
return {};
|
|
179
|
+
return readFile(snapshotPath, { json: true });
|
|
180
|
+
}
|
|
181
|
+
export function diffSnapshots(previous, current) {
|
|
182
|
+
const addedModels = [];
|
|
183
|
+
const removedModels = [];
|
|
184
|
+
const addedColumns = [];
|
|
185
|
+
const removedColumns = [];
|
|
186
|
+
const changedColumns = [];
|
|
187
|
+
const addedForeignKeys = [];
|
|
188
|
+
const removedForeignKeys = [];
|
|
189
|
+
// Find added models
|
|
190
|
+
for (const name of Object.keys(current)) {
|
|
191
|
+
if (!previous[name])
|
|
192
|
+
addedModels.push(name);
|
|
193
|
+
}
|
|
194
|
+
// Find removed models
|
|
195
|
+
for (const name of Object.keys(previous)) {
|
|
196
|
+
if (!current[name])
|
|
197
|
+
removedModels.push(name);
|
|
198
|
+
}
|
|
199
|
+
// Find column changes in existing models
|
|
200
|
+
for (const name of Object.keys(current)) {
|
|
201
|
+
if (!previous[name])
|
|
202
|
+
continue;
|
|
203
|
+
const prevCols = previous[name].columns || {};
|
|
204
|
+
const currCols = current[name].columns || {};
|
|
205
|
+
// Added columns
|
|
206
|
+
for (const [col, type] of Object.entries(currCols)) {
|
|
207
|
+
if (!prevCols[col]) {
|
|
208
|
+
addedColumns.push({ model: name, column: col, type });
|
|
209
|
+
}
|
|
210
|
+
else if (prevCols[col] !== type) {
|
|
211
|
+
changedColumns.push({ model: name, column: col, from: prevCols[col], to: type });
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
// Removed columns
|
|
215
|
+
for (const [col, type] of Object.entries(prevCols)) {
|
|
216
|
+
if (!currCols[col]) {
|
|
217
|
+
removedColumns.push({ model: name, column: col, type });
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
// Foreign key changes
|
|
221
|
+
const prevFKs = previous[name].foreignKeys || {};
|
|
222
|
+
const currFKs = current[name].foreignKeys || {};
|
|
223
|
+
for (const [col, refs] of Object.entries(currFKs)) {
|
|
224
|
+
if (!prevFKs[col]) {
|
|
225
|
+
addedForeignKeys.push({ model: name, column: col, references: refs });
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
for (const [col, refs] of Object.entries(prevFKs)) {
|
|
229
|
+
if (!currFKs[col]) {
|
|
230
|
+
removedForeignKeys.push({ model: name, column: col, references: refs });
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
const hasChanges = addedModels.length > 0 || removedModels.length > 0 ||
|
|
235
|
+
addedColumns.length > 0 || removedColumns.length > 0 ||
|
|
236
|
+
changedColumns.length > 0 || addedForeignKeys.length > 0 || removedForeignKeys.length > 0;
|
|
237
|
+
return {
|
|
238
|
+
hasChanges,
|
|
239
|
+
addedModels,
|
|
240
|
+
removedModels,
|
|
241
|
+
addedColumns,
|
|
242
|
+
removedColumns,
|
|
243
|
+
changedColumns,
|
|
244
|
+
addedForeignKeys,
|
|
245
|
+
removedForeignKeys,
|
|
246
|
+
};
|
|
247
|
+
}
|
|
248
|
+
export function detectSchemaDrift(schemas, snapshot) {
|
|
249
|
+
const current = schemasToSnapshot(schemas);
|
|
250
|
+
return diffSnapshots(snapshot, current);
|
|
251
|
+
}
|
|
252
|
+
export function extractViewsFromSnapshot(snapshot) {
|
|
253
|
+
const views = {};
|
|
254
|
+
for (const [name, entry] of Object.entries(snapshot)) {
|
|
255
|
+
if (entry.isView)
|
|
256
|
+
views[name] = entry;
|
|
257
|
+
}
|
|
258
|
+
return views;
|
|
259
|
+
}
|
|
260
|
+
export function diffViewSnapshots(previous, current) {
|
|
261
|
+
const addedViews = [];
|
|
262
|
+
const removedViews = [];
|
|
263
|
+
const changedViews = [];
|
|
264
|
+
for (const name of Object.keys(current)) {
|
|
265
|
+
if (!previous[name]) {
|
|
266
|
+
addedViews.push(name);
|
|
267
|
+
}
|
|
268
|
+
else if (current[name].viewQuery !== previous[name].viewQuery ||
|
|
269
|
+
current[name].source !== previous[name].source) {
|
|
270
|
+
changedViews.push(name);
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
for (const name of Object.keys(previous)) {
|
|
274
|
+
if (!current[name]) {
|
|
275
|
+
removedViews.push(name);
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
const hasChanges = addedViews.length > 0 || removedViews.length > 0 || changedViews.length > 0;
|
|
279
|
+
return { hasChanges, addedViews, removedViews, changedViews };
|
|
280
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { Pool } from 'pg';
|
|
2
|
+
export declare function ensureMigrationsTable(pool: Pool, tableName?: string): Promise<void>;
|
|
3
|
+
export declare function getAppliedMigrations(pool: Pool, tableName?: string): Promise<string[]>;
|
|
4
|
+
export declare function getMigrationFiles(migrationsDir: string): Promise<string[]>;
|
|
5
|
+
export declare function parseMigrationFile(content: string): {
|
|
6
|
+
up: string;
|
|
7
|
+
down: string;
|
|
8
|
+
};
|
|
9
|
+
export declare function applyMigration(pool: Pool, filename: string, upSql: string, tableName?: string): Promise<void>;
|
|
10
|
+
export declare function rollbackMigration(pool: Pool, filename: string, downSql: string, tableName?: string): Promise<void>;
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import { fileExists } from '@stonyx/utils/file';
|
|
2
|
+
import fs from 'fs/promises';
|
|
3
|
+
import { validateIdentifier } from './query-builder.js';
|
|
4
|
+
export async function ensureMigrationsTable(pool, tableName = '__migrations') {
|
|
5
|
+
validateIdentifier(tableName, 'migration table name');
|
|
6
|
+
await pool.query(`
|
|
7
|
+
CREATE TABLE IF NOT EXISTS "${tableName}" (
|
|
8
|
+
id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
|
|
9
|
+
filename VARCHAR(255) NOT NULL UNIQUE,
|
|
10
|
+
applied_at TIMESTAMPTZ DEFAULT NOW()
|
|
11
|
+
)
|
|
12
|
+
`);
|
|
13
|
+
}
|
|
14
|
+
export async function getAppliedMigrations(pool, tableName = '__migrations') {
|
|
15
|
+
validateIdentifier(tableName, 'migration table name');
|
|
16
|
+
const result = await pool.query(`SELECT filename FROM "${tableName}" ORDER BY id ASC`);
|
|
17
|
+
return result.rows.map(row => row.filename);
|
|
18
|
+
}
|
|
19
|
+
export async function getMigrationFiles(migrationsDir) {
|
|
20
|
+
const exists = await fileExists(migrationsDir);
|
|
21
|
+
if (!exists)
|
|
22
|
+
return [];
|
|
23
|
+
const entries = await fs.readdir(migrationsDir);
|
|
24
|
+
return entries
|
|
25
|
+
.filter(f => f.endsWith('.sql'))
|
|
26
|
+
.sort();
|
|
27
|
+
}
|
|
28
|
+
export function parseMigrationFile(content) {
|
|
29
|
+
const upMarker = '-- UP';
|
|
30
|
+
const downMarker = '-- DOWN';
|
|
31
|
+
const upIndex = content.indexOf(upMarker);
|
|
32
|
+
const downIndex = content.indexOf(downMarker);
|
|
33
|
+
if (upIndex === -1) {
|
|
34
|
+
return { up: content.trim(), down: '' };
|
|
35
|
+
}
|
|
36
|
+
const upStart = upIndex + upMarker.length;
|
|
37
|
+
const upEnd = downIndex !== -1 ? downIndex : content.length;
|
|
38
|
+
const up = content.slice(upStart, upEnd).trim();
|
|
39
|
+
const down = downIndex !== -1 ? content.slice(downIndex + downMarker.length).trim() : '';
|
|
40
|
+
return { up, down };
|
|
41
|
+
}
|
|
42
|
+
export async function applyMigration(pool, filename, upSql, tableName = '__migrations') {
|
|
43
|
+
validateIdentifier(tableName, 'migration table name');
|
|
44
|
+
const client = await pool.connect();
|
|
45
|
+
try {
|
|
46
|
+
await client.query('BEGIN');
|
|
47
|
+
const statements = splitStatements(upSql);
|
|
48
|
+
for (const stmt of statements) {
|
|
49
|
+
await client.query(stmt);
|
|
50
|
+
}
|
|
51
|
+
await client.query(`INSERT INTO "${tableName}" (filename) VALUES ($1)`, [filename]);
|
|
52
|
+
await client.query('COMMIT');
|
|
53
|
+
}
|
|
54
|
+
catch (error) {
|
|
55
|
+
await client.query('ROLLBACK');
|
|
56
|
+
throw error;
|
|
57
|
+
}
|
|
58
|
+
finally {
|
|
59
|
+
client.release();
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
export async function rollbackMigration(pool, filename, downSql, tableName = '__migrations') {
|
|
63
|
+
validateIdentifier(tableName, 'migration table name');
|
|
64
|
+
const client = await pool.connect();
|
|
65
|
+
try {
|
|
66
|
+
await client.query('BEGIN');
|
|
67
|
+
const statements = splitStatements(downSql);
|
|
68
|
+
for (const stmt of statements) {
|
|
69
|
+
await client.query(stmt);
|
|
70
|
+
}
|
|
71
|
+
await client.query(`DELETE FROM "${tableName}" WHERE filename = $1`, [filename]);
|
|
72
|
+
await client.query('COMMIT');
|
|
73
|
+
}
|
|
74
|
+
catch (error) {
|
|
75
|
+
await client.query('ROLLBACK');
|
|
76
|
+
throw error;
|
|
77
|
+
}
|
|
78
|
+
finally {
|
|
79
|
+
client.release();
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
function splitStatements(sql) {
|
|
83
|
+
return sql
|
|
84
|
+
.split(';')
|
|
85
|
+
.map(s => s.trim())
|
|
86
|
+
.filter(s => s.length > 0 && !s.startsWith('--'));
|
|
87
|
+
}
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import { getPool, closePool } from './connection.js';
|
|
2
|
+
import { ensureMigrationsTable, getAppliedMigrations, getMigrationFiles, applyMigration, parseMigrationFile } from './migration-runner.js';
|
|
3
|
+
import { introspectModels, introspectViews, getTopologicalOrder, schemasToSnapshot } from './schema-introspector.js';
|
|
4
|
+
import { loadLatestSnapshot, detectSchemaDrift } from './migration-generator.js';
|
|
5
|
+
import { buildInsert, buildUpdate, buildDelete, buildSelect, buildVectorSearch, buildHybridSearch } from './query-builder.js';
|
|
6
|
+
import { store } from '@stonyx/orm';
|
|
7
|
+
import { createRecord } from '../manage-record.js';
|
|
8
|
+
import { confirm } from '@stonyx/utils/prompt';
|
|
9
|
+
import { readFile } from '@stonyx/utils/file';
|
|
10
|
+
import { getPluralName } from '../plural-registry.js';
|
|
11
|
+
import config from 'stonyx/config';
|
|
12
|
+
import log from 'stonyx/log';
|
|
13
|
+
import path from 'path';
|
|
14
|
+
import type { Pool } from 'pg';
|
|
15
|
+
import type { OrmRecord } from '../types/orm-types.js';
|
|
16
|
+
interface PersistContext {
|
|
17
|
+
record?: OrmRecord;
|
|
18
|
+
recordId?: unknown;
|
|
19
|
+
oldState?: Record<string, unknown>;
|
|
20
|
+
}
|
|
21
|
+
interface PersistResponse {
|
|
22
|
+
data?: {
|
|
23
|
+
id: unknown;
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
interface SearchResult {
|
|
27
|
+
record: OrmRecord;
|
|
28
|
+
distance: number;
|
|
29
|
+
}
|
|
30
|
+
interface VectorSearchOptions {
|
|
31
|
+
limit?: number;
|
|
32
|
+
where?: Record<string, unknown>;
|
|
33
|
+
}
|
|
34
|
+
interface PostgresDeps {
|
|
35
|
+
getPool: typeof getPool;
|
|
36
|
+
closePool: typeof closePool;
|
|
37
|
+
ensureMigrationsTable: typeof ensureMigrationsTable;
|
|
38
|
+
getAppliedMigrations: typeof getAppliedMigrations;
|
|
39
|
+
getMigrationFiles: typeof getMigrationFiles;
|
|
40
|
+
applyMigration: typeof applyMigration;
|
|
41
|
+
parseMigrationFile: typeof parseMigrationFile;
|
|
42
|
+
introspectModels: typeof introspectModels;
|
|
43
|
+
introspectViews: typeof introspectViews;
|
|
44
|
+
getTopologicalOrder: typeof getTopologicalOrder;
|
|
45
|
+
schemasToSnapshot: typeof schemasToSnapshot;
|
|
46
|
+
loadLatestSnapshot: typeof loadLatestSnapshot;
|
|
47
|
+
detectSchemaDrift: typeof detectSchemaDrift;
|
|
48
|
+
buildInsert: typeof buildInsert;
|
|
49
|
+
buildUpdate: typeof buildUpdate;
|
|
50
|
+
buildDelete: typeof buildDelete;
|
|
51
|
+
buildSelect: typeof buildSelect;
|
|
52
|
+
buildVectorSearch: typeof buildVectorSearch;
|
|
53
|
+
buildHybridSearch: typeof buildHybridSearch;
|
|
54
|
+
createRecord: typeof createRecord;
|
|
55
|
+
store: typeof store;
|
|
56
|
+
confirm: typeof confirm;
|
|
57
|
+
readFile: typeof readFile;
|
|
58
|
+
getPluralName: typeof getPluralName;
|
|
59
|
+
config: typeof config;
|
|
60
|
+
log: typeof log;
|
|
61
|
+
path: typeof path;
|
|
62
|
+
[key: string]: unknown;
|
|
63
|
+
}
|
|
64
|
+
export default class PostgresDB {
|
|
65
|
+
/** PostgreSQL extensions to enable on pool init. Subclasses can override. */
|
|
66
|
+
static extensions: string[];
|
|
67
|
+
/** Config key under config.orm for this adapter. Subclasses can override. */
|
|
68
|
+
static configKey: string;
|
|
69
|
+
/** Singleton instance, set by subclass constructor name. */
|
|
70
|
+
static instance: PostgresDB | undefined;
|
|
71
|
+
deps: PostgresDeps;
|
|
72
|
+
pool: Pool | null;
|
|
73
|
+
pgConfig: Record<string, unknown>;
|
|
74
|
+
constructor(deps?: Partial<PostgresDeps>);
|
|
75
|
+
protected requirePool(): Pool;
|
|
76
|
+
init(): Promise<void>;
|
|
77
|
+
startup(): Promise<void>;
|
|
78
|
+
shutdown(): Promise<void>;
|
|
79
|
+
save(): Promise<void>;
|
|
80
|
+
/**
|
|
81
|
+
* Loads only models with memory: true into the in-memory store on startup.
|
|
82
|
+
* Models with memory: false are skipped -- accessed on-demand via find()/findAll().
|
|
83
|
+
*/
|
|
84
|
+
loadMemoryRecords(): Promise<void>;
|
|
85
|
+
/**
|
|
86
|
+
* @deprecated Use loadMemoryRecords() instead. Kept for backward compatibility.
|
|
87
|
+
*/
|
|
88
|
+
loadAllRecords(): Promise<void>;
|
|
89
|
+
/**
|
|
90
|
+
* Find a single record by ID from PostgreSQL.
|
|
91
|
+
* Does NOT cache the result in the store for memory: false models.
|
|
92
|
+
*/
|
|
93
|
+
findRecord(modelName: string, id: string | number): Promise<OrmRecord | undefined>;
|
|
94
|
+
/**
|
|
95
|
+
* Find all records of a model from PostgreSQL, with optional conditions.
|
|
96
|
+
*/
|
|
97
|
+
findAll(modelName: string, conditions?: Record<string, unknown>): Promise<OrmRecord[]>;
|
|
98
|
+
/**
|
|
99
|
+
* Perform a vector similarity search using cosine distance.
|
|
100
|
+
*/
|
|
101
|
+
vectorSearch(modelName: string, vectorColumn: string, queryVector: number[], options?: VectorSearchOptions): Promise<SearchResult[]>;
|
|
102
|
+
/**
|
|
103
|
+
* Perform a hybrid search combining vector similarity with text filtering.
|
|
104
|
+
*/
|
|
105
|
+
hybridSearch(modelName: string, vectorColumn: string, queryVector: number[], textColumn: string, textQuery: string, options?: VectorSearchOptions): Promise<SearchResult[]>;
|
|
106
|
+
/**
|
|
107
|
+
* Remove a record from the in-memory store if its model has memory: false.
|
|
108
|
+
* The record object itself survives -- the caller retains the reference.
|
|
109
|
+
* @private
|
|
110
|
+
*/
|
|
111
|
+
private _evictIfNotMemory;
|
|
112
|
+
private _rowToRawData;
|
|
113
|
+
persist(operation: string, modelName: string, context: PersistContext, response: PersistResponse): Promise<void>;
|
|
114
|
+
private _persistCreate;
|
|
115
|
+
private _persistUpdate;
|
|
116
|
+
private _persistDelete;
|
|
117
|
+
private _recordToRow;
|
|
118
|
+
}
|
|
119
|
+
export {};
|