@xubylele/schema-forge 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +15 -0
- package/README.md +322 -0
- package/dist/cli.d.ts +1 -0
- package/dist/cli.js +1179 -0
- package/package.json +53 -0
package/dist/cli.js
ADDED
|
@@ -0,0 +1,1179 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
"use strict";
|
|
3
|
+
var __create = Object.create;
|
|
4
|
+
var __defProp = Object.defineProperty;
|
|
5
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
6
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
7
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
8
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
9
|
+
var __copyProps = (to, from, except, desc) => {
|
|
10
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
11
|
+
for (let key of __getOwnPropNames(from))
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
13
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
14
|
+
}
|
|
15
|
+
return to;
|
|
16
|
+
};
|
|
17
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
18
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
19
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
20
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
21
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
22
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
23
|
+
mod
|
|
24
|
+
));
|
|
25
|
+
|
|
26
|
+
// src/cli.ts
|
|
27
|
+
var import_commander4 = require("commander");
|
|
28
|
+
|
|
29
|
+
// package.json
|
|
30
|
+
var package_default = {
|
|
31
|
+
name: "@xubylele/schema-forge",
|
|
32
|
+
version: "0.3.1",
|
|
33
|
+
description: "Universal migration generator from schema DSL",
|
|
34
|
+
main: "dist/cli.js",
|
|
35
|
+
type: "commonjs",
|
|
36
|
+
bin: {
|
|
37
|
+
schemaforge: "dist/cli.js"
|
|
38
|
+
},
|
|
39
|
+
scripts: {
|
|
40
|
+
build: "tsup src/cli.ts --format cjs --dts",
|
|
41
|
+
dev: "ts-node src/cli.ts",
|
|
42
|
+
test: "vitest",
|
|
43
|
+
prepublishOnly: "npm run build",
|
|
44
|
+
changeset: "changeset",
|
|
45
|
+
"version-packages": "changeset version",
|
|
46
|
+
release: "changeset publish",
|
|
47
|
+
"publish:public": "npm publish --access public"
|
|
48
|
+
},
|
|
49
|
+
keywords: [
|
|
50
|
+
"cli",
|
|
51
|
+
"schema",
|
|
52
|
+
"sql",
|
|
53
|
+
"generator",
|
|
54
|
+
"migration",
|
|
55
|
+
"database"
|
|
56
|
+
],
|
|
57
|
+
author: "Xuby",
|
|
58
|
+
license: "ISC",
|
|
59
|
+
repository: {
|
|
60
|
+
type: "git",
|
|
61
|
+
url: "git+https://github.com/xubylele/schema-forge.git"
|
|
62
|
+
},
|
|
63
|
+
bugs: "https://github.com/xubylele/schema-forge/issues",
|
|
64
|
+
homepage: "https://github.com/xubylele/schema-forge#readme",
|
|
65
|
+
files: [
|
|
66
|
+
"dist"
|
|
67
|
+
],
|
|
68
|
+
engines: {
|
|
69
|
+
node: ">=18.0.0"
|
|
70
|
+
},
|
|
71
|
+
dependencies: {
|
|
72
|
+
commander: "^14.0.3"
|
|
73
|
+
},
|
|
74
|
+
devDependencies: {
|
|
75
|
+
"@changesets/cli": "^2.29.8",
|
|
76
|
+
"@types/node": "^25.2.3",
|
|
77
|
+
"ts-node": "^10.9.2",
|
|
78
|
+
tsup: "^8.5.1",
|
|
79
|
+
typescript: "^5.9.3",
|
|
80
|
+
vitest: "^4.0.18"
|
|
81
|
+
}
|
|
82
|
+
};
|
|
83
|
+
|
|
84
|
+
// src/commands/diff.ts
|
|
85
|
+
var import_commander = require("commander");
|
|
86
|
+
var import_path4 = __toESM(require("path"));
|
|
87
|
+
|
|
88
|
+
// src/core/errors.ts
|
|
89
|
+
var SchemaValidationError = class extends Error {
|
|
90
|
+
constructor(message) {
|
|
91
|
+
super(message);
|
|
92
|
+
this.name = "SchemaValidationError";
|
|
93
|
+
}
|
|
94
|
+
};
|
|
95
|
+
|
|
96
|
+
// src/core/diff.ts
|
|
97
|
+
function getTableNamesFromState(state) {
|
|
98
|
+
return new Set(Object.keys(state.tables));
|
|
99
|
+
}
|
|
100
|
+
function getTableNamesFromSchema(schema) {
|
|
101
|
+
return new Set(Object.keys(schema.tables));
|
|
102
|
+
}
|
|
103
|
+
function getColumnNamesFromState(stateColumns) {
|
|
104
|
+
return new Set(Object.keys(stateColumns));
|
|
105
|
+
}
|
|
106
|
+
function getColumnNamesFromSchema(dbColumns) {
|
|
107
|
+
return new Set(dbColumns.map((column) => column.name));
|
|
108
|
+
}
|
|
109
|
+
function getSortedNames(names) {
|
|
110
|
+
return Array.from(names).sort((a, b) => a.localeCompare(b));
|
|
111
|
+
}
|
|
112
|
+
function diffSchemas(oldState, newSchema) {
|
|
113
|
+
const operations = [];
|
|
114
|
+
const oldTableNames = getTableNamesFromState(oldState);
|
|
115
|
+
const newTableNames = getTableNamesFromSchema(newSchema);
|
|
116
|
+
const sortedNewTableNames = getSortedNames(newTableNames);
|
|
117
|
+
const sortedOldTableNames = getSortedNames(oldTableNames);
|
|
118
|
+
for (const tableName of sortedNewTableNames) {
|
|
119
|
+
if (!oldTableNames.has(tableName)) {
|
|
120
|
+
operations.push({
|
|
121
|
+
kind: "create_table",
|
|
122
|
+
table: newSchema.tables[tableName]
|
|
123
|
+
});
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
const commonTableNames = sortedNewTableNames.filter(
|
|
127
|
+
(tableName) => oldTableNames.has(tableName)
|
|
128
|
+
);
|
|
129
|
+
for (const tableName of commonTableNames) {
|
|
130
|
+
const newTable = newSchema.tables[tableName];
|
|
131
|
+
const oldTable = oldState.tables[tableName];
|
|
132
|
+
if (!newTable || !oldTable) {
|
|
133
|
+
continue;
|
|
134
|
+
}
|
|
135
|
+
const oldColumnNames = getColumnNamesFromState(oldTable.columns);
|
|
136
|
+
for (const column of newTable.columns) {
|
|
137
|
+
if (!oldColumnNames.has(column.name)) {
|
|
138
|
+
operations.push({
|
|
139
|
+
kind: "add_column",
|
|
140
|
+
tableName,
|
|
141
|
+
column
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
for (const tableName of commonTableNames) {
|
|
147
|
+
const newTable = newSchema.tables[tableName];
|
|
148
|
+
const oldTable = oldState.tables[tableName];
|
|
149
|
+
if (!newTable || !oldTable) {
|
|
150
|
+
continue;
|
|
151
|
+
}
|
|
152
|
+
const newColumnNames = getColumnNamesFromSchema(newTable.columns);
|
|
153
|
+
for (const columnName of Object.keys(oldTable.columns)) {
|
|
154
|
+
if (!newColumnNames.has(columnName)) {
|
|
155
|
+
operations.push({
|
|
156
|
+
kind: "drop_column",
|
|
157
|
+
tableName,
|
|
158
|
+
columnName
|
|
159
|
+
});
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
for (const tableName of sortedOldTableNames) {
|
|
164
|
+
if (!newTableNames.has(tableName)) {
|
|
165
|
+
operations.push({
|
|
166
|
+
kind: "drop_table",
|
|
167
|
+
tableName
|
|
168
|
+
});
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
return { operations };
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
// src/core/fs.ts
|
|
175
|
+
var import_fs = require("fs");
|
|
176
|
+
var import_path = __toESM(require("path"));
|
|
177
|
+
async function ensureDir(dirPath) {
|
|
178
|
+
try {
|
|
179
|
+
await import_fs.promises.mkdir(dirPath, { recursive: true });
|
|
180
|
+
} catch (error) {
|
|
181
|
+
throw new Error(`Failed to create directory ${dirPath}: ${error}`);
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
async function fileExists(filePath) {
|
|
185
|
+
try {
|
|
186
|
+
await import_fs.promises.access(filePath);
|
|
187
|
+
return true;
|
|
188
|
+
} catch {
|
|
189
|
+
return false;
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
async function readTextFile(filePath) {
|
|
193
|
+
try {
|
|
194
|
+
return await import_fs.promises.readFile(filePath, "utf-8");
|
|
195
|
+
} catch (error) {
|
|
196
|
+
throw new Error(`Failed to read file ${filePath}: ${error}`);
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
async function writeTextFile(filePath, content) {
|
|
200
|
+
try {
|
|
201
|
+
const dir = import_path.default.dirname(filePath);
|
|
202
|
+
await ensureDir(dir);
|
|
203
|
+
await import_fs.promises.writeFile(filePath, content, "utf-8");
|
|
204
|
+
} catch (error) {
|
|
205
|
+
throw new Error(`Failed to write file ${filePath}: ${error}`);
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
async function readJsonFile(filePath, fallback) {
|
|
209
|
+
try {
|
|
210
|
+
const exists = await fileExists(filePath);
|
|
211
|
+
if (!exists) {
|
|
212
|
+
return fallback;
|
|
213
|
+
}
|
|
214
|
+
const content = await readTextFile(filePath);
|
|
215
|
+
return JSON.parse(content);
|
|
216
|
+
} catch (error) {
|
|
217
|
+
throw new Error(`Failed to read JSON file ${filePath}: ${error}`);
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
async function writeJsonFile(filePath, data) {
|
|
221
|
+
try {
|
|
222
|
+
const content = JSON.stringify(data, null, 2);
|
|
223
|
+
await writeTextFile(filePath, content);
|
|
224
|
+
} catch (error) {
|
|
225
|
+
throw new Error(`Failed to write JSON file ${filePath}: ${error}`);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
async function findFiles(dirPath, pattern) {
|
|
229
|
+
const results = [];
|
|
230
|
+
try {
|
|
231
|
+
const items = await import_fs.promises.readdir(dirPath, { withFileTypes: true });
|
|
232
|
+
for (const item of items) {
|
|
233
|
+
const fullPath = import_path.default.join(dirPath, item.name);
|
|
234
|
+
if (item.isDirectory()) {
|
|
235
|
+
const subResults = await findFiles(fullPath, pattern);
|
|
236
|
+
results.push(...subResults);
|
|
237
|
+
} else if (item.isFile() && pattern.test(item.name)) {
|
|
238
|
+
results.push(fullPath);
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
} catch (error) {
|
|
242
|
+
throw new Error(`Failed to find files in ${dirPath}: ${error}`);
|
|
243
|
+
}
|
|
244
|
+
return results;
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
// src/core/parser.ts
|
|
248
|
+
var SchemaParser = class {
|
|
249
|
+
/**
|
|
250
|
+
* Parse a schema from a JSON file
|
|
251
|
+
*/
|
|
252
|
+
async parseSchemaFile(filePath) {
|
|
253
|
+
try {
|
|
254
|
+
const schema = await readJsonFile(filePath, {});
|
|
255
|
+
return this.normalizeSchema(schema);
|
|
256
|
+
} catch (error) {
|
|
257
|
+
throw new Error(`Failed to parse schema file ${filePath}: ${error}`);
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
/**
|
|
261
|
+
* Parse multiple schema files from a directory
|
|
262
|
+
*/
|
|
263
|
+
async parseSchemaDirectory(dirPath) {
|
|
264
|
+
const schemaFiles = await findFiles(dirPath, /\.schema\.json$/);
|
|
265
|
+
const schemas = [];
|
|
266
|
+
for (const file of schemaFiles) {
|
|
267
|
+
try {
|
|
268
|
+
const schema = await this.parseSchemaFile(file);
|
|
269
|
+
schemas.push(schema);
|
|
270
|
+
} catch (error) {
|
|
271
|
+
console.warn(`Warning: Could not parse ${file}:`, error);
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
return schemas;
|
|
275
|
+
}
|
|
276
|
+
/**
|
|
277
|
+
* Merge multiple schemas into one
|
|
278
|
+
*/
|
|
279
|
+
mergeSchemas(schemas) {
|
|
280
|
+
if (schemas.length === 0) {
|
|
281
|
+
throw new Error("Cannot merge empty schema array");
|
|
282
|
+
}
|
|
283
|
+
const baseSchema = schemas[0];
|
|
284
|
+
const mergedTables = [];
|
|
285
|
+
for (const schema of schemas) {
|
|
286
|
+
for (const table of schema.tables) {
|
|
287
|
+
const existingIndex = mergedTables.findIndex((t) => t.name === table.name);
|
|
288
|
+
if (existingIndex >= 0) {
|
|
289
|
+
console.warn(`Warning: Duplicate table '${table.name}' found, using first occurrence`);
|
|
290
|
+
} else {
|
|
291
|
+
mergedTables.push(table);
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
return {
|
|
296
|
+
version: baseSchema.version,
|
|
297
|
+
database: baseSchema.database,
|
|
298
|
+
tables: mergedTables
|
|
299
|
+
};
|
|
300
|
+
}
|
|
301
|
+
/**
|
|
302
|
+
* Normalize schema to ensure consistent structure
|
|
303
|
+
*/
|
|
304
|
+
normalizeSchema(schema) {
|
|
305
|
+
return {
|
|
306
|
+
version: schema.version || "1.0.0",
|
|
307
|
+
database: schema.database || "postgres",
|
|
308
|
+
tables: schema.tables.map((table) => ({
|
|
309
|
+
...table,
|
|
310
|
+
fields: table.fields.map((field) => ({
|
|
311
|
+
...field,
|
|
312
|
+
required: field.required ?? false,
|
|
313
|
+
unique: field.unique ?? false
|
|
314
|
+
})),
|
|
315
|
+
indexes: table.indexes || [],
|
|
316
|
+
constraints: table.constraints || []
|
|
317
|
+
}))
|
|
318
|
+
};
|
|
319
|
+
}
|
|
320
|
+
/**
|
|
321
|
+
* Convert schema to JSON string
|
|
322
|
+
*/
|
|
323
|
+
schemaToJson(schema, pretty = true) {
|
|
324
|
+
return pretty ? JSON.stringify(schema, null, 2) : JSON.stringify(schema);
|
|
325
|
+
}
|
|
326
|
+
/**
|
|
327
|
+
* Parse schema from JSON string
|
|
328
|
+
*/
|
|
329
|
+
parseSchemaString(jsonString) {
|
|
330
|
+
try {
|
|
331
|
+
const schema = JSON.parse(jsonString);
|
|
332
|
+
return this.normalizeSchema(schema);
|
|
333
|
+
} catch (error) {
|
|
334
|
+
throw new Error(`Failed to parse schema JSON: ${error}`);
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
};
|
|
338
|
+
var defaultParser = new SchemaParser();
|
|
339
|
+
function parseSchema(source) {
|
|
340
|
+
const lines = source.split("\n");
|
|
341
|
+
const tables = {};
|
|
342
|
+
let currentLine = 0;
|
|
343
|
+
const validColumnTypes = /* @__PURE__ */ new Set([
|
|
344
|
+
"uuid",
|
|
345
|
+
"varchar",
|
|
346
|
+
"text",
|
|
347
|
+
"int",
|
|
348
|
+
"boolean",
|
|
349
|
+
"timestamptz",
|
|
350
|
+
"date"
|
|
351
|
+
]);
|
|
352
|
+
function cleanLine(line) {
|
|
353
|
+
const commentIndex = line.search(/(?:\/\/|#)/);
|
|
354
|
+
if (commentIndex !== -1) {
|
|
355
|
+
line = line.substring(0, commentIndex);
|
|
356
|
+
}
|
|
357
|
+
return line.trim();
|
|
358
|
+
}
|
|
359
|
+
function parseForeignKey(fkRef, lineNum) {
|
|
360
|
+
const parts = fkRef.split(".");
|
|
361
|
+
if (parts.length !== 2 || !parts[0] || !parts[1]) {
|
|
362
|
+
throw new Error(`Line ${lineNum}: Invalid foreign key format '${fkRef}'. Expected format: table.column`);
|
|
363
|
+
}
|
|
364
|
+
return {
|
|
365
|
+
table: parts[0],
|
|
366
|
+
column: parts[1]
|
|
367
|
+
};
|
|
368
|
+
}
|
|
369
|
+
function parseColumn(line, lineNum) {
|
|
370
|
+
const tokens = line.split(/\s+/).filter((t) => t.length > 0);
|
|
371
|
+
if (tokens.length < 2) {
|
|
372
|
+
throw new Error(`Line ${lineNum}: Invalid column definition. Expected: <name> <type> [modifiers...]`);
|
|
373
|
+
}
|
|
374
|
+
const colName = tokens[0];
|
|
375
|
+
const colType = tokens[1];
|
|
376
|
+
if (!validColumnTypes.has(colType)) {
|
|
377
|
+
throw new Error(`Line ${lineNum}: Invalid column type '${colType}'. Valid types: ${Array.from(validColumnTypes).join(", ")}`);
|
|
378
|
+
}
|
|
379
|
+
const column = {
|
|
380
|
+
name: colName,
|
|
381
|
+
type: colType
|
|
382
|
+
};
|
|
383
|
+
let i = 2;
|
|
384
|
+
while (i < tokens.length) {
|
|
385
|
+
const modifier = tokens[i];
|
|
386
|
+
switch (modifier) {
|
|
387
|
+
case "pk":
|
|
388
|
+
column.primaryKey = true;
|
|
389
|
+
i++;
|
|
390
|
+
break;
|
|
391
|
+
case "unique":
|
|
392
|
+
column.unique = true;
|
|
393
|
+
i++;
|
|
394
|
+
break;
|
|
395
|
+
case "nullable":
|
|
396
|
+
column.nullable = true;
|
|
397
|
+
i++;
|
|
398
|
+
break;
|
|
399
|
+
case "default":
|
|
400
|
+
i++;
|
|
401
|
+
if (i >= tokens.length) {
|
|
402
|
+
throw new Error(`Line ${lineNum}: 'default' modifier requires a value`);
|
|
403
|
+
}
|
|
404
|
+
column.default = tokens[i];
|
|
405
|
+
i++;
|
|
406
|
+
break;
|
|
407
|
+
case "fk":
|
|
408
|
+
i++;
|
|
409
|
+
if (i >= tokens.length) {
|
|
410
|
+
throw new Error(`Line ${lineNum}: 'fk' modifier requires a table.column reference`);
|
|
411
|
+
}
|
|
412
|
+
column.foreignKey = parseForeignKey(tokens[i], lineNum);
|
|
413
|
+
i++;
|
|
414
|
+
break;
|
|
415
|
+
default:
|
|
416
|
+
throw new Error(`Line ${lineNum}: Unknown modifier '${modifier}'`);
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
return column;
|
|
420
|
+
}
|
|
421
|
+
function parseTableBlock(startLine) {
|
|
422
|
+
const firstLine = cleanLine(lines[startLine]);
|
|
423
|
+
const match = firstLine.match(/^table\s+(\w+)\s*\{?\s*$/);
|
|
424
|
+
if (!match) {
|
|
425
|
+
throw new Error(`Line ${startLine + 1}: Invalid table definition. Expected: table <name> {`);
|
|
426
|
+
}
|
|
427
|
+
const tableName = match[1];
|
|
428
|
+
if (tables[tableName]) {
|
|
429
|
+
throw new Error(`Line ${startLine + 1}: Duplicate table definition '${tableName}'`);
|
|
430
|
+
}
|
|
431
|
+
const columns = [];
|
|
432
|
+
let lineIdx = startLine + 1;
|
|
433
|
+
let foundClosingBrace = false;
|
|
434
|
+
while (lineIdx < lines.length) {
|
|
435
|
+
const cleaned = cleanLine(lines[lineIdx]);
|
|
436
|
+
if (!cleaned) {
|
|
437
|
+
lineIdx++;
|
|
438
|
+
continue;
|
|
439
|
+
}
|
|
440
|
+
if (cleaned === "}") {
|
|
441
|
+
foundClosingBrace = true;
|
|
442
|
+
break;
|
|
443
|
+
}
|
|
444
|
+
try {
|
|
445
|
+
const column = parseColumn(cleaned, lineIdx + 1);
|
|
446
|
+
columns.push(column);
|
|
447
|
+
} catch (error) {
|
|
448
|
+
throw error;
|
|
449
|
+
}
|
|
450
|
+
lineIdx++;
|
|
451
|
+
}
|
|
452
|
+
if (!foundClosingBrace) {
|
|
453
|
+
throw new Error(`Line ${startLine + 1}: Table '${tableName}' block not closed (missing '}')`);
|
|
454
|
+
}
|
|
455
|
+
tables[tableName] = {
|
|
456
|
+
name: tableName,
|
|
457
|
+
columns
|
|
458
|
+
};
|
|
459
|
+
return lineIdx;
|
|
460
|
+
}
|
|
461
|
+
while (currentLine < lines.length) {
|
|
462
|
+
const cleaned = cleanLine(lines[currentLine]);
|
|
463
|
+
if (!cleaned) {
|
|
464
|
+
currentLine++;
|
|
465
|
+
continue;
|
|
466
|
+
}
|
|
467
|
+
if (cleaned.startsWith("table ")) {
|
|
468
|
+
currentLine = parseTableBlock(currentLine);
|
|
469
|
+
} else {
|
|
470
|
+
throw new Error(`Line ${currentLine + 1}: Unexpected content '${cleaned}'. Expected table definition.`);
|
|
471
|
+
}
|
|
472
|
+
currentLine++;
|
|
473
|
+
}
|
|
474
|
+
return { tables };
|
|
475
|
+
}
|
|
476
|
+
|
|
477
|
+
// src/core/paths.ts
|
|
478
|
+
var import_path2 = __toESM(require("path"));
|
|
479
|
+
function getProjectRoot(cwd = process.cwd()) {
|
|
480
|
+
return cwd;
|
|
481
|
+
}
|
|
482
|
+
function getSchemaForgeDir(root) {
|
|
483
|
+
return import_path2.default.join(root, "schemaforge");
|
|
484
|
+
}
|
|
485
|
+
function getSchemaFilePath(root, config) {
|
|
486
|
+
const schemaForgeDir = getSchemaForgeDir(root);
|
|
487
|
+
const fileName = config?.schemaFile || "schema.sf";
|
|
488
|
+
return import_path2.default.join(schemaForgeDir, fileName);
|
|
489
|
+
}
|
|
490
|
+
function getConfigPath(root) {
|
|
491
|
+
const schemaForgeDir = getSchemaForgeDir(root);
|
|
492
|
+
return import_path2.default.join(schemaForgeDir, "config.json");
|
|
493
|
+
}
|
|
494
|
+
function getStatePath(root, config) {
|
|
495
|
+
const schemaForgeDir = getSchemaForgeDir(root);
|
|
496
|
+
const fileName = config?.stateFile || "state.json";
|
|
497
|
+
return import_path2.default.join(schemaForgeDir, fileName);
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
// src/core/state-manager.ts
|
|
501
|
+
var import_path3 = __toESM(require("path"));
|
|
502
|
+
var StateManager = class {
|
|
503
|
+
constructor(root = process.cwd()) {
|
|
504
|
+
this.config = null;
|
|
505
|
+
this.root = root;
|
|
506
|
+
}
|
|
507
|
+
/**
|
|
508
|
+
* Initialize a new SchemaForge project
|
|
509
|
+
*/
|
|
510
|
+
async initializeProject(directory = ".", force = false) {
|
|
511
|
+
const configPath = import_path3.default.join(directory, "schemaforge.config.json");
|
|
512
|
+
if (await fileExists(configPath) && !force) {
|
|
513
|
+
throw new Error("SchemaForge project already initialized. Use --force to overwrite.");
|
|
514
|
+
}
|
|
515
|
+
const defaultConfig = {
|
|
516
|
+
version: "1.0.0",
|
|
517
|
+
database: "postgres",
|
|
518
|
+
schemaDir: "schemas",
|
|
519
|
+
outputDir: "output",
|
|
520
|
+
migrationDir: "migrations"
|
|
521
|
+
};
|
|
522
|
+
await writeJsonFile(configPath, defaultConfig);
|
|
523
|
+
await ensureDir(import_path3.default.join(directory, defaultConfig.schemaDir));
|
|
524
|
+
await ensureDir(import_path3.default.join(directory, defaultConfig.outputDir));
|
|
525
|
+
await ensureDir(import_path3.default.join(directory, defaultConfig.migrationDir));
|
|
526
|
+
const exampleSchema = {
|
|
527
|
+
version: "1.0.0",
|
|
528
|
+
database: "postgres",
|
|
529
|
+
tables: [
|
|
530
|
+
{
|
|
531
|
+
name: "users",
|
|
532
|
+
fields: [
|
|
533
|
+
{ name: "id", type: "uuid", required: true, unique: true },
|
|
534
|
+
{ name: "email", type: "string", required: true, unique: true, length: 255 },
|
|
535
|
+
{ name: "name", type: "string", required: true, length: 255 },
|
|
536
|
+
{ name: "created_at", type: "datetime", required: true }
|
|
537
|
+
],
|
|
538
|
+
indexes: [
|
|
539
|
+
{ name: "idx_users_email", fields: ["email"], unique: true }
|
|
540
|
+
]
|
|
541
|
+
}
|
|
542
|
+
]
|
|
543
|
+
};
|
|
544
|
+
const exampleSchemaPath = import_path3.default.join(
|
|
545
|
+
directory,
|
|
546
|
+
defaultConfig.schemaDir,
|
|
547
|
+
"example.schema.json"
|
|
548
|
+
);
|
|
549
|
+
await writeJsonFile(exampleSchemaPath, exampleSchema);
|
|
550
|
+
this.config = defaultConfig;
|
|
551
|
+
}
|
|
552
|
+
/**
|
|
553
|
+
* Load configuration from file
|
|
554
|
+
*/
|
|
555
|
+
async loadConfig(directory = ".") {
|
|
556
|
+
const configPath = import_path3.default.join(directory, "schemaforge.config.json");
|
|
557
|
+
if (!await fileExists(configPath)) {
|
|
558
|
+
throw new Error('SchemaForge project not initialized. Run "schemaforge init" first.');
|
|
559
|
+
}
|
|
560
|
+
this.config = await readJsonFile(configPath, {});
|
|
561
|
+
return this.config;
|
|
562
|
+
}
|
|
563
|
+
/**
|
|
564
|
+
* Save configuration to file
|
|
565
|
+
*/
|
|
566
|
+
async saveConfig(config, directory = ".") {
|
|
567
|
+
const configPath = import_path3.default.join(directory, "schemaforge.config.json");
|
|
568
|
+
await writeJsonFile(configPath, config);
|
|
569
|
+
this.config = config;
|
|
570
|
+
}
|
|
571
|
+
/**
|
|
572
|
+
* Get current configuration
|
|
573
|
+
*/
|
|
574
|
+
getConfig() {
|
|
575
|
+
return this.config;
|
|
576
|
+
}
|
|
577
|
+
/**
|
|
578
|
+
* Update configuration
|
|
579
|
+
*/
|
|
580
|
+
updateConfig(updates) {
|
|
581
|
+
if (!this.config) {
|
|
582
|
+
throw new Error("No configuration loaded");
|
|
583
|
+
}
|
|
584
|
+
this.config = { ...this.config, ...updates };
|
|
585
|
+
}
|
|
586
|
+
/**
|
|
587
|
+
* Check if project is initialized
|
|
588
|
+
*/
|
|
589
|
+
async isInitialized(directory = ".") {
|
|
590
|
+
const configPath = import_path3.default.join(directory, "schemaforge.config.json");
|
|
591
|
+
return await fileExists(configPath);
|
|
592
|
+
}
|
|
593
|
+
/**
|
|
594
|
+
* Get schema directory path
|
|
595
|
+
*/
|
|
596
|
+
getSchemaDir() {
|
|
597
|
+
if (!this.config) {
|
|
598
|
+
throw new Error("No configuration loaded");
|
|
599
|
+
}
|
|
600
|
+
return import_path3.default.join(this.root, this.config.schemaDir);
|
|
601
|
+
}
|
|
602
|
+
/**
|
|
603
|
+
* Get output directory path
|
|
604
|
+
*/
|
|
605
|
+
getOutputDir() {
|
|
606
|
+
if (!this.config) {
|
|
607
|
+
throw new Error("No configuration loaded");
|
|
608
|
+
}
|
|
609
|
+
return import_path3.default.join(this.root, this.config.outputDir);
|
|
610
|
+
}
|
|
611
|
+
/**
|
|
612
|
+
* Get migration directory path
|
|
613
|
+
*/
|
|
614
|
+
getMigrationDir() {
|
|
615
|
+
if (!this.config) {
|
|
616
|
+
throw new Error("No configuration loaded");
|
|
617
|
+
}
|
|
618
|
+
return import_path3.default.join(this.root, this.config.migrationDir);
|
|
619
|
+
}
|
|
620
|
+
};
|
|
621
|
+
async function schemaToState(schema) {
|
|
622
|
+
const tables = {};
|
|
623
|
+
for (const [tableName, table] of Object.entries(schema.tables)) {
|
|
624
|
+
const columns = {};
|
|
625
|
+
for (const column of table.columns) {
|
|
626
|
+
columns[column.name] = {
|
|
627
|
+
type: column.type,
|
|
628
|
+
...column.primaryKey !== void 0 && { primaryKey: column.primaryKey },
|
|
629
|
+
...column.unique !== void 0 && { unique: column.unique },
|
|
630
|
+
...column.nullable !== void 0 && { nullable: column.nullable },
|
|
631
|
+
...column.default !== void 0 && { default: column.default },
|
|
632
|
+
...column.foreignKey !== void 0 && { foreignKey: column.foreignKey }
|
|
633
|
+
};
|
|
634
|
+
}
|
|
635
|
+
tables[tableName] = { columns };
|
|
636
|
+
}
|
|
637
|
+
return {
|
|
638
|
+
version: 1,
|
|
639
|
+
tables
|
|
640
|
+
};
|
|
641
|
+
}
|
|
642
|
+
async function loadState(statePath) {
|
|
643
|
+
return await readJsonFile(statePath, { version: 1, tables: {} });
|
|
644
|
+
}
|
|
645
|
+
async function saveState(statePath, state) {
|
|
646
|
+
const dirPath = import_path3.default.dirname(statePath);
|
|
647
|
+
await ensureDir(dirPath);
|
|
648
|
+
await writeJsonFile(statePath, state);
|
|
649
|
+
}
|
|
650
|
+
var defaultStateManager = new StateManager();
|
|
651
|
+
|
|
652
|
+
// src/core/validator.ts
|
|
653
|
+
var SchemaValidator = class {
|
|
654
|
+
/**
|
|
655
|
+
* Validate a complete schema
|
|
656
|
+
*/
|
|
657
|
+
validateSchema(schema) {
|
|
658
|
+
const errors = [];
|
|
659
|
+
if (!schema.version) {
|
|
660
|
+
errors.push({
|
|
661
|
+
path: "schema.version",
|
|
662
|
+
message: "Schema version is required",
|
|
663
|
+
severity: "error"
|
|
664
|
+
});
|
|
665
|
+
}
|
|
666
|
+
if (!schema.database) {
|
|
667
|
+
errors.push({
|
|
668
|
+
path: "schema.database",
|
|
669
|
+
message: "Database type is required",
|
|
670
|
+
severity: "error"
|
|
671
|
+
});
|
|
672
|
+
}
|
|
673
|
+
if (!schema.tables || schema.tables.length === 0) {
|
|
674
|
+
errors.push({
|
|
675
|
+
path: "schema.tables",
|
|
676
|
+
message: "Schema must contain at least one table",
|
|
677
|
+
severity: "error"
|
|
678
|
+
});
|
|
679
|
+
}
|
|
680
|
+
if (schema.tables) {
|
|
681
|
+
const tableNames = /* @__PURE__ */ new Set();
|
|
682
|
+
for (let i = 0; i < schema.tables.length; i++) {
|
|
683
|
+
const table = schema.tables[i];
|
|
684
|
+
const tableErrors = this.validateTable(table, i);
|
|
685
|
+
errors.push(...tableErrors);
|
|
686
|
+
if (tableNames.has(table.name)) {
|
|
687
|
+
errors.push({
|
|
688
|
+
path: `schema.tables[${i}].name`,
|
|
689
|
+
message: `Duplicate table name: ${table.name}`,
|
|
690
|
+
severity: "error"
|
|
691
|
+
});
|
|
692
|
+
}
|
|
693
|
+
tableNames.add(table.name);
|
|
694
|
+
}
|
|
695
|
+
errors.push(...this.validateReferences(schema));
|
|
696
|
+
}
|
|
697
|
+
return {
|
|
698
|
+
valid: errors.filter((e) => e.severity === "error").length === 0,
|
|
699
|
+
errors
|
|
700
|
+
};
|
|
701
|
+
}
|
|
702
|
+
/**
|
|
703
|
+
* Validate a table
|
|
704
|
+
*/
|
|
705
|
+
validateTable(table, tableIndex) {
|
|
706
|
+
const errors = [];
|
|
707
|
+
const basePath = `schema.tables[${tableIndex}]`;
|
|
708
|
+
if (!table.name || table.name.trim() === "") {
|
|
709
|
+
errors.push({
|
|
710
|
+
path: `${basePath}.name`,
|
|
711
|
+
message: "Table name is required",
|
|
712
|
+
severity: "error"
|
|
713
|
+
});
|
|
714
|
+
}
|
|
715
|
+
if (table.name && !/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(table.name)) {
|
|
716
|
+
errors.push({
|
|
717
|
+
path: `${basePath}.name`,
|
|
718
|
+
message: `Invalid table name '${table.name}': must start with letter or underscore and contain only alphanumeric characters and underscores`,
|
|
719
|
+
severity: "error"
|
|
720
|
+
});
|
|
721
|
+
}
|
|
722
|
+
if (!table.fields || table.fields.length === 0) {
|
|
723
|
+
errors.push({
|
|
724
|
+
path: `${basePath}.fields`,
|
|
725
|
+
message: `Table '${table.name}' must have at least one field`,
|
|
726
|
+
severity: "error"
|
|
727
|
+
});
|
|
728
|
+
}
|
|
729
|
+
if (table.fields) {
|
|
730
|
+
const fieldNames = /* @__PURE__ */ new Set();
|
|
731
|
+
for (let i = 0; i < table.fields.length; i++) {
|
|
732
|
+
const field = table.fields[i];
|
|
733
|
+
const fieldErrors = this.validateField(field, basePath, i);
|
|
734
|
+
errors.push(...fieldErrors);
|
|
735
|
+
if (fieldNames.has(field.name)) {
|
|
736
|
+
errors.push({
|
|
737
|
+
path: `${basePath}.fields[${i}].name`,
|
|
738
|
+
message: `Duplicate field name: ${field.name}`,
|
|
739
|
+
severity: "error"
|
|
740
|
+
});
|
|
741
|
+
}
|
|
742
|
+
fieldNames.add(field.name);
|
|
743
|
+
}
|
|
744
|
+
}
|
|
745
|
+
return errors;
|
|
746
|
+
}
|
|
747
|
+
/**
|
|
748
|
+
* Validate a field
|
|
749
|
+
*/
|
|
750
|
+
validateField(field, tablePath, fieldIndex) {
|
|
751
|
+
const errors = [];
|
|
752
|
+
const basePath = `${tablePath}.fields[${fieldIndex}]`;
|
|
753
|
+
if (!field.name || field.name.trim() === "") {
|
|
754
|
+
errors.push({
|
|
755
|
+
path: `${basePath}.name`,
|
|
756
|
+
message: "Field name is required",
|
|
757
|
+
severity: "error"
|
|
758
|
+
});
|
|
759
|
+
}
|
|
760
|
+
if (field.name && !/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(field.name)) {
|
|
761
|
+
errors.push({
|
|
762
|
+
path: `${basePath}.name`,
|
|
763
|
+
message: `Invalid field name '${field.name}': must start with letter or underscore and contain only alphanumeric characters and underscores`,
|
|
764
|
+
severity: "error"
|
|
765
|
+
});
|
|
766
|
+
}
|
|
767
|
+
if (!field.type) {
|
|
768
|
+
errors.push({
|
|
769
|
+
path: `${basePath}.type`,
|
|
770
|
+
message: "Field type is required",
|
|
771
|
+
severity: "error"
|
|
772
|
+
});
|
|
773
|
+
}
|
|
774
|
+
if (field.type === "enum") {
|
|
775
|
+
if (!field.enumValues || field.enumValues.length === 0) {
|
|
776
|
+
errors.push({
|
|
777
|
+
path: `${basePath}.enumValues`,
|
|
778
|
+
message: "Enum type requires enumValues array",
|
|
779
|
+
severity: "error"
|
|
780
|
+
});
|
|
781
|
+
}
|
|
782
|
+
}
|
|
783
|
+
if (field.type === "string" && field.length && field.length <= 0) {
|
|
784
|
+
errors.push({
|
|
785
|
+
path: `${basePath}.length`,
|
|
786
|
+
message: "String length must be greater than 0",
|
|
787
|
+
severity: "error"
|
|
788
|
+
});
|
|
789
|
+
}
|
|
790
|
+
return errors;
|
|
791
|
+
}
|
|
792
|
+
/**
|
|
793
|
+
* Validate foreign key references
|
|
794
|
+
*/
|
|
795
|
+
validateReferences(schema) {
|
|
796
|
+
const errors = [];
|
|
797
|
+
const tableNames = new Set(schema.tables.map((t) => t.name));
|
|
798
|
+
for (let i = 0; i < schema.tables.length; i++) {
|
|
799
|
+
const table = schema.tables[i];
|
|
800
|
+
for (let j = 0; j < table.fields.length; j++) {
|
|
801
|
+
const field = table.fields[j];
|
|
802
|
+
if (field.references) {
|
|
803
|
+
const refTable = field.references.table;
|
|
804
|
+
const refField = field.references.field;
|
|
805
|
+
if (!tableNames.has(refTable)) {
|
|
806
|
+
errors.push({
|
|
807
|
+
path: `schema.tables[${i}].fields[${j}].references.table`,
|
|
808
|
+
message: `Referenced table '${refTable}' does not exist`,
|
|
809
|
+
severity: "error"
|
|
810
|
+
});
|
|
811
|
+
} else {
|
|
812
|
+
const referencedTable = schema.tables.find((t) => t.name === refTable);
|
|
813
|
+
if (referencedTable) {
|
|
814
|
+
const referencedField = referencedTable.fields.find((f) => f.name === refField);
|
|
815
|
+
if (!referencedField) {
|
|
816
|
+
errors.push({
|
|
817
|
+
path: `schema.tables[${i}].fields[${j}].references.field`,
|
|
818
|
+
message: `Referenced field '${refField}' does not exist in table '${refTable}'`,
|
|
819
|
+
severity: "error"
|
|
820
|
+
});
|
|
821
|
+
}
|
|
822
|
+
}
|
|
823
|
+
}
|
|
824
|
+
}
|
|
825
|
+
}
|
|
826
|
+
}
|
|
827
|
+
return errors;
|
|
828
|
+
}
|
|
829
|
+
};
|
|
830
|
+
var defaultValidator = new SchemaValidator();
|
|
831
|
+
var VALID_COLUMN_TYPES = ["uuid", "varchar", "text", "int", "boolean", "timestamptz", "date"];
|
|
832
|
+
function validateSchema(schema) {
|
|
833
|
+
validateDuplicateTables(schema);
|
|
834
|
+
for (const tableName in schema.tables) {
|
|
835
|
+
const table = schema.tables[tableName];
|
|
836
|
+
validateTableColumns(tableName, table, schema.tables);
|
|
837
|
+
}
|
|
838
|
+
}
|
|
839
|
+
function validateDuplicateTables(schema) {
|
|
840
|
+
const tableNames = Object.keys(schema.tables);
|
|
841
|
+
const seen = /* @__PURE__ */ new Set();
|
|
842
|
+
for (const tableName of tableNames) {
|
|
843
|
+
if (seen.has(tableName)) {
|
|
844
|
+
throw new Error(`Tabla duplicada: '${tableName}'`);
|
|
845
|
+
}
|
|
846
|
+
seen.add(tableName);
|
|
847
|
+
}
|
|
848
|
+
}
|
|
849
|
+
function validateTableColumns(tableName, table, allTables) {
|
|
850
|
+
const columnNames = /* @__PURE__ */ new Set();
|
|
851
|
+
let primaryKeyCount = 0;
|
|
852
|
+
for (const column of table.columns) {
|
|
853
|
+
if (columnNames.has(column.name)) {
|
|
854
|
+
throw new Error(`Tabla '${tableName}': columna duplicada '${column.name}'`);
|
|
855
|
+
}
|
|
856
|
+
columnNames.add(column.name);
|
|
857
|
+
if (column.primaryKey) {
|
|
858
|
+
primaryKeyCount++;
|
|
859
|
+
}
|
|
860
|
+
if (!VALID_COLUMN_TYPES.includes(column.type)) {
|
|
861
|
+
throw new Error(
|
|
862
|
+
`Tabla '${tableName}', columna '${column.name}': tipo '${column.type}' no es v\xE1lido. Tipos soportados: ${VALID_COLUMN_TYPES.join(", ")}`
|
|
863
|
+
);
|
|
864
|
+
}
|
|
865
|
+
if (column.foreignKey) {
|
|
866
|
+
const fkTable = column.foreignKey.table;
|
|
867
|
+
const fkColumn = column.foreignKey.column;
|
|
868
|
+
if (!allTables[fkTable]) {
|
|
869
|
+
throw new Error(
|
|
870
|
+
`Tabla '${tableName}', columna '${column.name}': tabla referenciada '${fkTable}' no existe`
|
|
871
|
+
);
|
|
872
|
+
}
|
|
873
|
+
const referencedTable = allTables[fkTable];
|
|
874
|
+
const columnExists = referencedTable.columns.some((col) => col.name === fkColumn);
|
|
875
|
+
if (!columnExists) {
|
|
876
|
+
throw new Error(
|
|
877
|
+
`Tabla '${tableName}', columna '${column.name}': tabla '${fkTable}' no tiene columna '${fkColumn}'`
|
|
878
|
+
);
|
|
879
|
+
}
|
|
880
|
+
}
|
|
881
|
+
}
|
|
882
|
+
if (primaryKeyCount > 1) {
|
|
883
|
+
throw new Error(`Tabla '${tableName}': solo puede tener una primary key (encontradas ${primaryKeyCount})`);
|
|
884
|
+
}
|
|
885
|
+
}
|
|
886
|
+
|
|
887
|
+
// src/generator/sql-generator.ts
|
|
888
|
+
function generateSql(diff, provider, sqlConfig) {
|
|
889
|
+
const statements = [];
|
|
890
|
+
for (const operation of diff.operations) {
|
|
891
|
+
const sql = generateOperation(operation, provider, sqlConfig);
|
|
892
|
+
if (sql) {
|
|
893
|
+
statements.push(sql);
|
|
894
|
+
}
|
|
895
|
+
}
|
|
896
|
+
return statements.join("\n\n");
|
|
897
|
+
}
|
|
898
|
+
function generateOperation(operation, provider, sqlConfig) {
|
|
899
|
+
switch (operation.kind) {
|
|
900
|
+
case "create_table":
|
|
901
|
+
return generateCreateTable(operation.table, provider, sqlConfig);
|
|
902
|
+
case "drop_table":
|
|
903
|
+
return generateDropTable(operation.tableName);
|
|
904
|
+
case "add_column":
|
|
905
|
+
return generateAddColumn(operation.tableName, operation.column, provider, sqlConfig);
|
|
906
|
+
case "drop_column":
|
|
907
|
+
return generateDropColumn(operation.tableName, operation.columnName);
|
|
908
|
+
}
|
|
909
|
+
}
|
|
910
|
+
function generateCreateTable(table, provider, sqlConfig) {
|
|
911
|
+
const columnDefs = table.columns.map(
|
|
912
|
+
(col) => generateColumnDefinition(col, provider, sqlConfig)
|
|
913
|
+
);
|
|
914
|
+
const lines = ["CREATE TABLE " + table.name + " ("];
|
|
915
|
+
columnDefs.forEach((colDef, index) => {
|
|
916
|
+
const isLast = index === columnDefs.length - 1;
|
|
917
|
+
lines.push(" " + colDef + (isLast ? "" : ","));
|
|
918
|
+
});
|
|
919
|
+
lines.push(");");
|
|
920
|
+
return lines.join("\n");
|
|
921
|
+
}
|
|
922
|
+
function generateColumnDefinition(column, provider, sqlConfig) {
|
|
923
|
+
const parts = [column.name, column.type];
|
|
924
|
+
if (column.foreignKey) {
|
|
925
|
+
parts.push(
|
|
926
|
+
`references ${column.foreignKey.table}(${column.foreignKey.column})`
|
|
927
|
+
);
|
|
928
|
+
}
|
|
929
|
+
if (column.primaryKey) {
|
|
930
|
+
parts.push("primary key");
|
|
931
|
+
}
|
|
932
|
+
if (column.unique) {
|
|
933
|
+
parts.push("unique");
|
|
934
|
+
}
|
|
935
|
+
if (column.nullable === false) {
|
|
936
|
+
parts.push("not null");
|
|
937
|
+
}
|
|
938
|
+
if (column.default !== void 0) {
|
|
939
|
+
parts.push("default " + column.default);
|
|
940
|
+
} else if (column.type === "uuid" && column.primaryKey && provider === "supabase") {
|
|
941
|
+
parts.push("default gen_random_uuid()");
|
|
942
|
+
}
|
|
943
|
+
return parts.join(" ");
|
|
944
|
+
}
|
|
945
|
+
function generateDropTable(tableName) {
|
|
946
|
+
return `DROP TABLE ${tableName};`;
|
|
947
|
+
}
|
|
948
|
+
function generateAddColumn(tableName, column, provider, sqlConfig) {
|
|
949
|
+
const colDef = generateColumnDefinition(column, provider, sqlConfig);
|
|
950
|
+
return `ALTER TABLE ${tableName} ADD COLUMN ${colDef};`;
|
|
951
|
+
}
|
|
952
|
+
function generateDropColumn(tableName, columnName) {
|
|
953
|
+
return `ALTER TABLE ${tableName} DROP COLUMN ${columnName};`;
|
|
954
|
+
}
|
|
955
|
+
|
|
956
|
+
// src/commands/diff.ts
|
|
957
|
+
var REQUIRED_CONFIG_FIELDS = ["schemaFile", "stateFile"];
|
|
958
|
+
function resolveConfigPath(root, targetPath) {
|
|
959
|
+
return import_path4.default.isAbsolute(targetPath) ? targetPath : import_path4.default.join(root, targetPath);
|
|
960
|
+
}
|
|
961
|
+
async function runDiff() {
|
|
962
|
+
const root = getProjectRoot();
|
|
963
|
+
const configPath = getConfigPath(root);
|
|
964
|
+
if (!await fileExists(configPath)) {
|
|
965
|
+
throw new Error('SchemaForge project not initialized. Run "schemaforge init" first.');
|
|
966
|
+
}
|
|
967
|
+
const config = await readJsonFile(configPath, {});
|
|
968
|
+
for (const field of REQUIRED_CONFIG_FIELDS) {
|
|
969
|
+
const value = config[field];
|
|
970
|
+
if (!value || typeof value !== "string") {
|
|
971
|
+
throw new Error(`Invalid config: '${field}' is required`);
|
|
972
|
+
}
|
|
973
|
+
}
|
|
974
|
+
const schemaPath = resolveConfigPath(root, config.schemaFile);
|
|
975
|
+
const statePath = resolveConfigPath(root, config.stateFile);
|
|
976
|
+
if (config.provider && config.provider !== "postgres" && config.provider !== "supabase") {
|
|
977
|
+
throw new Error(`Unsupported provider '${config.provider}'.`);
|
|
978
|
+
}
|
|
979
|
+
const provider = config.provider ?? "postgres";
|
|
980
|
+
const schemaSource = await readTextFile(schemaPath);
|
|
981
|
+
const schema = parseSchema(schemaSource);
|
|
982
|
+
try {
|
|
983
|
+
validateSchema(schema);
|
|
984
|
+
} catch (error) {
|
|
985
|
+
if (error instanceof Error) {
|
|
986
|
+
throw new SchemaValidationError(error.message);
|
|
987
|
+
}
|
|
988
|
+
throw error;
|
|
989
|
+
}
|
|
990
|
+
const previousState = await loadState(statePath);
|
|
991
|
+
const diff = diffSchemas(previousState, schema);
|
|
992
|
+
if (diff.operations.length === 0) {
|
|
993
|
+
console.log("No changes detected");
|
|
994
|
+
return;
|
|
995
|
+
}
|
|
996
|
+
const sql = generateSql(diff, provider, config.sql);
|
|
997
|
+
console.log(sql);
|
|
998
|
+
}
|
|
999
|
+
|
|
1000
|
+
// src/commands/generate.ts
|
|
1001
|
+
var import_commander2 = require("commander");
|
|
1002
|
+
var import_path5 = __toESM(require("path"));
|
|
1003
|
+
|
|
1004
|
+
// src/core/utils.ts
|
|
1005
|
+
function nowTimestamp() {
|
|
1006
|
+
const date = /* @__PURE__ */ new Date();
|
|
1007
|
+
const pad = (value) => String(value).padStart(2, "0");
|
|
1008
|
+
return String(date.getFullYear()) + pad(date.getMonth() + 1) + pad(date.getDate()) + pad(date.getHours()) + pad(date.getMinutes()) + pad(date.getSeconds());
|
|
1009
|
+
}
|
|
1010
|
+
function slugifyName(name) {
|
|
1011
|
+
return name.trim().toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-+|-+$/g, "") || "migration";
|
|
1012
|
+
}
|
|
1013
|
+
|
|
1014
|
+
// src/commands/generate.ts
|
|
1015
|
+
var REQUIRED_CONFIG_FIELDS2 = [
|
|
1016
|
+
"schemaFile",
|
|
1017
|
+
"stateFile",
|
|
1018
|
+
"outputDir"
|
|
1019
|
+
];
|
|
1020
|
+
function resolveConfigPath2(root, targetPath) {
|
|
1021
|
+
return import_path5.default.isAbsolute(targetPath) ? targetPath : import_path5.default.join(root, targetPath);
|
|
1022
|
+
}
|
|
1023
|
+
async function runGenerate(options) {
|
|
1024
|
+
const root = getProjectRoot();
|
|
1025
|
+
const configPath = getConfigPath(root);
|
|
1026
|
+
if (!await fileExists(configPath)) {
|
|
1027
|
+
throw new Error('SchemaForge project not initialized. Run "schemaforge init" first.');
|
|
1028
|
+
}
|
|
1029
|
+
const config = await readJsonFile(configPath, {});
|
|
1030
|
+
for (const field of REQUIRED_CONFIG_FIELDS2) {
|
|
1031
|
+
const value = config[field];
|
|
1032
|
+
if (!value || typeof value !== "string") {
|
|
1033
|
+
throw new Error(`Invalid config: '${field}' is required`);
|
|
1034
|
+
}
|
|
1035
|
+
}
|
|
1036
|
+
const schemaPath = resolveConfigPath2(root, config.schemaFile);
|
|
1037
|
+
const statePath = resolveConfigPath2(root, config.stateFile);
|
|
1038
|
+
const outputDir = resolveConfigPath2(root, config.outputDir);
|
|
1039
|
+
if (config.provider && config.provider !== "postgres" && config.provider !== "supabase") {
|
|
1040
|
+
throw new Error(`Unsupported provider '${config.provider}'.`);
|
|
1041
|
+
}
|
|
1042
|
+
const provider = config.provider ?? "postgres";
|
|
1043
|
+
if (!config.provider) {
|
|
1044
|
+
console.log("Provider not set; defaulting to postgres.");
|
|
1045
|
+
}
|
|
1046
|
+
console.log("Generating SQL...");
|
|
1047
|
+
const schemaSource = await readTextFile(schemaPath);
|
|
1048
|
+
const schema = parseSchema(schemaSource);
|
|
1049
|
+
try {
|
|
1050
|
+
validateSchema(schema);
|
|
1051
|
+
} catch (error) {
|
|
1052
|
+
if (error instanceof Error) {
|
|
1053
|
+
throw new SchemaValidationError(error.message);
|
|
1054
|
+
}
|
|
1055
|
+
throw error;
|
|
1056
|
+
}
|
|
1057
|
+
const previousState = await loadState(statePath);
|
|
1058
|
+
const diff = diffSchemas(previousState, schema);
|
|
1059
|
+
if (diff.operations.length === 0) {
|
|
1060
|
+
console.log("No changes detected");
|
|
1061
|
+
return;
|
|
1062
|
+
}
|
|
1063
|
+
const sql = generateSql(diff, provider, config.sql);
|
|
1064
|
+
const timestamp = nowTimestamp();
|
|
1065
|
+
const slug = slugifyName(options.name ?? "migration");
|
|
1066
|
+
const fileName = `${timestamp}-${slug}.sql`;
|
|
1067
|
+
await ensureDir(outputDir);
|
|
1068
|
+
const migrationPath = import_path5.default.join(outputDir, fileName);
|
|
1069
|
+
await writeTextFile(migrationPath, sql + "\n");
|
|
1070
|
+
const nextState = await schemaToState(schema);
|
|
1071
|
+
await saveState(statePath, nextState);
|
|
1072
|
+
console.log(`\u2713 SQL generated successfully: ${migrationPath}`);
|
|
1073
|
+
}
|
|
1074
|
+
|
|
1075
|
+
// src/commands/init.ts
|
|
1076
|
+
var import_commander3 = require("commander");
|
|
1077
|
+
async function runInit() {
|
|
1078
|
+
const root = getProjectRoot();
|
|
1079
|
+
const schemaForgeDir = getSchemaForgeDir(root);
|
|
1080
|
+
if (await fileExists(schemaForgeDir)) {
|
|
1081
|
+
console.error("Error: schemaforge/ directory already exists");
|
|
1082
|
+
console.error("Please remove it or run init in a different directory");
|
|
1083
|
+
process.exit(1);
|
|
1084
|
+
}
|
|
1085
|
+
const schemaFilePath = getSchemaFilePath(root);
|
|
1086
|
+
const configPath = getConfigPath(root);
|
|
1087
|
+
const statePath = getStatePath(root);
|
|
1088
|
+
if (await fileExists(schemaFilePath)) {
|
|
1089
|
+
console.error(`Error: ${schemaFilePath} already exists`);
|
|
1090
|
+
process.exit(1);
|
|
1091
|
+
}
|
|
1092
|
+
if (await fileExists(configPath)) {
|
|
1093
|
+
console.error(`Error: ${configPath} already exists`);
|
|
1094
|
+
process.exit(1);
|
|
1095
|
+
}
|
|
1096
|
+
if (await fileExists(statePath)) {
|
|
1097
|
+
console.error(`Error: ${statePath} already exists`);
|
|
1098
|
+
process.exit(1);
|
|
1099
|
+
}
|
|
1100
|
+
console.log("Initializing schema project...");
|
|
1101
|
+
await ensureDir(schemaForgeDir);
|
|
1102
|
+
const schemaContent = `# SchemaForge schema definition
|
|
1103
|
+
# Run: schemaforge generate
|
|
1104
|
+
|
|
1105
|
+
table users {
|
|
1106
|
+
id uuid pk
|
|
1107
|
+
created_at timestamptz default now()
|
|
1108
|
+
}
|
|
1109
|
+
`;
|
|
1110
|
+
await writeTextFile(schemaFilePath, schemaContent);
|
|
1111
|
+
console.log(`\u2713 Created ${schemaFilePath}`);
|
|
1112
|
+
const config = {
|
|
1113
|
+
provider: "supabase",
|
|
1114
|
+
outputDir: "supabase/migrations",
|
|
1115
|
+
schemaFile: "schemaforge/schema.sf",
|
|
1116
|
+
stateFile: "schemaforge/state.json",
|
|
1117
|
+
sql: {
|
|
1118
|
+
uuidDefault: "gen_random_uuid()",
|
|
1119
|
+
timestampDefault: "now()"
|
|
1120
|
+
}
|
|
1121
|
+
};
|
|
1122
|
+
await writeJsonFile(configPath, config);
|
|
1123
|
+
console.log(`\u2713 Created ${configPath}`);
|
|
1124
|
+
const state = {
|
|
1125
|
+
version: 1,
|
|
1126
|
+
tables: {}
|
|
1127
|
+
};
|
|
1128
|
+
await writeJsonFile(statePath, state);
|
|
1129
|
+
console.log(`\u2713 Created ${statePath}`);
|
|
1130
|
+
const outputDir = "supabase/migrations";
|
|
1131
|
+
await ensureDir(outputDir);
|
|
1132
|
+
console.log(`\u2713 Created ${outputDir}`);
|
|
1133
|
+
console.log("\n\u2713 Project initialized successfully");
|
|
1134
|
+
console.log("Next steps:");
|
|
1135
|
+
console.log(" 1. Edit schemaforge/schema.sf to define your schema");
|
|
1136
|
+
console.log(" 2. Run: schemaforge generate");
|
|
1137
|
+
}
|
|
1138
|
+
|
|
1139
|
+
// src/cli.ts
|
|
1140
|
+
var program = new import_commander4.Command();
|
|
1141
|
+
program.name("schemaforge").description("CLI tool for schema management and SQL generation").version(package_default.version);
|
|
1142
|
+
function handleError(error) {
|
|
1143
|
+
if (error instanceof SchemaValidationError) {
|
|
1144
|
+
console.error(error.message);
|
|
1145
|
+
process.exitCode = 2;
|
|
1146
|
+
return;
|
|
1147
|
+
}
|
|
1148
|
+
if (error instanceof Error) {
|
|
1149
|
+
console.error(error.message);
|
|
1150
|
+
} else {
|
|
1151
|
+
console.error("Unexpected error");
|
|
1152
|
+
}
|
|
1153
|
+
process.exitCode = 1;
|
|
1154
|
+
}
|
|
1155
|
+
program.command("init").description("Initialize a new schema project").action(async () => {
|
|
1156
|
+
try {
|
|
1157
|
+
await runInit();
|
|
1158
|
+
} catch (error) {
|
|
1159
|
+
handleError(error);
|
|
1160
|
+
}
|
|
1161
|
+
});
|
|
1162
|
+
program.command("generate").description("Generate SQL from schema files").option("--name <string>", "Schema name to generate").action(async (options) => {
|
|
1163
|
+
try {
|
|
1164
|
+
await runGenerate(options);
|
|
1165
|
+
} catch (error) {
|
|
1166
|
+
handleError(error);
|
|
1167
|
+
}
|
|
1168
|
+
});
|
|
1169
|
+
program.command("diff").description("Compare two schema versions and generate migration SQL").action(async () => {
|
|
1170
|
+
try {
|
|
1171
|
+
await runDiff();
|
|
1172
|
+
} catch (error) {
|
|
1173
|
+
handleError(error);
|
|
1174
|
+
}
|
|
1175
|
+
});
|
|
1176
|
+
program.parse(process.argv);
|
|
1177
|
+
if (!process.argv.slice(2).length) {
|
|
1178
|
+
program.outputHelp();
|
|
1179
|
+
}
|