@xubylele/schema-forge 1.4.0 → 1.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +2 -0
  2. package/dist/cli.js +1716 -1811
  3. package/package.json +3 -2
package/dist/cli.js CHANGED
@@ -6,6 +6,13 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
6
6
  var __getOwnPropNames = Object.getOwnPropertyNames;
7
7
  var __getProtoOf = Object.getPrototypeOf;
8
8
  var __hasOwnProp = Object.prototype.hasOwnProperty;
9
+ var __esm = (fn, res) => function __init() {
10
+ return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
11
+ };
12
+ var __export = (target, all) => {
13
+ for (var name in all)
14
+ __defProp(target, name, { get: all[name], enumerable: true });
15
+ };
9
16
  var __copyProps = (to, from, except, desc) => {
10
17
  if (from && typeof from === "object" || typeof from === "function") {
11
18
  for (let key of __getOwnPropNames(from))
@@ -23,71 +30,182 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
23
30
  mod
24
31
  ));
25
32
 
26
- // src/cli.ts
27
- var import_commander6 = require("commander");
28
-
29
- // package.json
30
- var package_default = {
31
- name: "@xubylele/schema-forge",
32
- version: "1.4.0",
33
- description: "Universal migration generator from schema DSL",
34
- main: "dist/cli.js",
35
- type: "commonjs",
36
- bin: {
37
- "schema-forge": "dist/cli.js"
38
- },
39
- scripts: {
40
- build: "tsup src/cli.ts --format cjs --dts",
41
- dev: "ts-node src/cli.ts",
42
- test: "vitest",
43
- prepublishOnly: "npm run build",
44
- "publish:public": "npm publish --access public",
45
- changeset: "changeset",
46
- "version-packages": "changeset version",
47
- release: "changeset publish"
48
- },
49
- keywords: [
50
- "cli",
51
- "schema",
52
- "sql",
53
- "generator",
54
- "migration",
55
- "database"
56
- ],
57
- author: "Xuby",
58
- license: "ISC",
59
- repository: {
60
- type: "git",
61
- url: "git+https://github.com/xubylele/schema-forge.git"
62
- },
63
- bugs: "https://github.com/xubylele/schema-forge/issues",
64
- homepage: "https://github.com/xubylele/schema-forge#readme",
65
- files: [
66
- "dist"
67
- ],
68
- engines: {
69
- node: ">=18.0.0"
70
- },
71
- dependencies: {
72
- boxen: "^8.0.1",
73
- chalk: "^5.6.2",
74
- commander: "^14.0.3"
75
- },
76
- devDependencies: {
77
- "@changesets/cli": "^2.29.8",
78
- "@types/node": "^25.2.3",
79
- "ts-node": "^10.9.2",
80
- tsup: "^8.5.1",
81
- typescript: "^5.9.3",
82
- vitest: "^4.0.18"
33
+ // node_modules/@xubylele/schema-forge-core/dist/core/parser.js
34
+ function parseSchema(source) {
35
+ const lines = source.split("\n");
36
+ const tables = {};
37
+ let currentLine = 0;
38
+ const validBaseColumnTypes = /* @__PURE__ */ new Set([
39
+ "uuid",
40
+ "varchar",
41
+ "text",
42
+ "int",
43
+ "bigint",
44
+ "boolean",
45
+ "timestamptz",
46
+ "date"
47
+ ]);
48
+ function normalizeColumnType3(type) {
49
+ return type.toLowerCase().trim().replace(/\s+/g, " ").replace(/\s*\(\s*/g, "(").replace(/\s*,\s*/g, ",").replace(/\s*\)\s*/g, ")");
83
50
  }
84
- };
85
-
86
- // src/commands/diff.ts
87
- var import_commander = require("commander");
88
- var import_path4 = __toESM(require("path"));
51
+ function isValidColumnType2(type) {
52
+ const normalizedType = normalizeColumnType3(type);
53
+ if (validBaseColumnTypes.has(normalizedType)) {
54
+ return true;
55
+ }
56
+ return /^varchar\(\d+\)$/.test(normalizedType) || /^numeric\(\d+,\d+\)$/.test(normalizedType);
57
+ }
58
+ function cleanLine(line) {
59
+ const commentIndex = line.search(/(?:\/\/|#)/);
60
+ if (commentIndex !== -1) {
61
+ line = line.substring(0, commentIndex);
62
+ }
63
+ return line.trim();
64
+ }
65
+ function parseForeignKey(fkRef, lineNum) {
66
+ const parts = fkRef.split(".");
67
+ if (parts.length !== 2 || !parts[0] || !parts[1]) {
68
+ throw new Error(`Line ${lineNum}: Invalid foreign key format '${fkRef}'. Expected format: table.column`);
69
+ }
70
+ return {
71
+ table: parts[0],
72
+ column: parts[1]
73
+ };
74
+ }
75
+ function parseColumn(line, lineNum) {
76
+ const tokens = line.split(/\s+/).filter((t) => t.length > 0);
77
+ const modifiers = /* @__PURE__ */ new Set(["pk", "unique", "nullable", "default", "fk"]);
78
+ if (tokens.length < 2) {
79
+ throw new Error(`Line ${lineNum}: Invalid column definition. Expected: <name> <type> [modifiers...]`);
80
+ }
81
+ const colName = tokens[0];
82
+ const colType = normalizeColumnType3(tokens[1]);
83
+ if (!isValidColumnType2(colType)) {
84
+ throw new Error(`Line ${lineNum}: Invalid column type '${tokens[1]}'. Valid types: ${Array.from(validBaseColumnTypes).join(", ")}, varchar(n), numeric(p,s)`);
85
+ }
86
+ const column = {
87
+ name: colName,
88
+ type: colType,
89
+ nullable: true
90
+ };
91
+ let i = 2;
92
+ while (i < tokens.length) {
93
+ const modifier = tokens[i];
94
+ switch (modifier) {
95
+ case "pk":
96
+ column.primaryKey = true;
97
+ i++;
98
+ break;
99
+ case "unique":
100
+ column.unique = true;
101
+ i++;
102
+ break;
103
+ case "nullable":
104
+ column.nullable = true;
105
+ i++;
106
+ break;
107
+ case "not":
108
+ if (tokens[i + 1] !== "null") {
109
+ throw new Error(`Line ${lineNum}: Unknown modifier 'not'`);
110
+ }
111
+ column.nullable = false;
112
+ i += 2;
113
+ break;
114
+ case "default":
115
+ i++;
116
+ if (i >= tokens.length) {
117
+ throw new Error(`Line ${lineNum}: 'default' modifier requires a value`);
118
+ }
119
+ {
120
+ const defaultTokens = [];
121
+ while (i < tokens.length && !modifiers.has(tokens[i])) {
122
+ defaultTokens.push(tokens[i]);
123
+ i++;
124
+ }
125
+ if (defaultTokens.length === 0) {
126
+ throw new Error(`Line ${lineNum}: 'default' modifier requires a value`);
127
+ }
128
+ column.default = defaultTokens.join(" ");
129
+ }
130
+ break;
131
+ case "fk":
132
+ i++;
133
+ if (i >= tokens.length) {
134
+ throw new Error(`Line ${lineNum}: 'fk' modifier requires a table.column reference`);
135
+ }
136
+ column.foreignKey = parseForeignKey(tokens[i], lineNum);
137
+ i++;
138
+ break;
139
+ default:
140
+ throw new Error(`Line ${lineNum}: Unknown modifier '${modifier}'`);
141
+ }
142
+ }
143
+ return column;
144
+ }
145
+ function parseTableBlock(startLine) {
146
+ const firstLine = cleanLine(lines[startLine]);
147
+ const match = firstLine.match(/^table\s+(\w+)\s*\{?\s*$/);
148
+ if (!match) {
149
+ throw new Error(`Line ${startLine + 1}: Invalid table definition. Expected: table <name> {`);
150
+ }
151
+ const tableName = match[1];
152
+ if (tables[tableName]) {
153
+ throw new Error(`Line ${startLine + 1}: Duplicate table definition '${tableName}'`);
154
+ }
155
+ const columns = [];
156
+ let lineIdx = startLine + 1;
157
+ let foundClosingBrace = false;
158
+ while (lineIdx < lines.length) {
159
+ const cleaned = cleanLine(lines[lineIdx]);
160
+ if (!cleaned) {
161
+ lineIdx++;
162
+ continue;
163
+ }
164
+ if (cleaned === "}") {
165
+ foundClosingBrace = true;
166
+ break;
167
+ }
168
+ try {
169
+ const column = parseColumn(cleaned, lineIdx + 1);
170
+ columns.push(column);
171
+ } catch (error2) {
172
+ throw error2;
173
+ }
174
+ lineIdx++;
175
+ }
176
+ if (!foundClosingBrace) {
177
+ throw new Error(`Line ${startLine + 1}: Table '${tableName}' block not closed (missing '}')`);
178
+ }
179
+ const primaryKeyColumn = columns.find((column) => column.primaryKey)?.name ?? null;
180
+ tables[tableName] = {
181
+ name: tableName,
182
+ columns,
183
+ ...primaryKeyColumn !== null && { primaryKey: primaryKeyColumn }
184
+ };
185
+ return lineIdx;
186
+ }
187
+ while (currentLine < lines.length) {
188
+ const cleaned = cleanLine(lines[currentLine]);
189
+ if (!cleaned) {
190
+ currentLine++;
191
+ continue;
192
+ }
193
+ if (cleaned.startsWith("table ")) {
194
+ currentLine = parseTableBlock(currentLine);
195
+ } else {
196
+ throw new Error(`Line ${currentLine + 1}: Unexpected content '${cleaned}'. Expected table definition.`);
197
+ }
198
+ currentLine++;
199
+ }
200
+ return { tables };
201
+ }
202
+ var init_parser = __esm({
203
+ "node_modules/@xubylele/schema-forge-core/dist/core/parser.js"() {
204
+ "use strict";
205
+ }
206
+ });
89
207
 
90
- // src/core/normalize.ts
208
+ // node_modules/@xubylele/schema-forge-core/dist/core/normalize.js
91
209
  function normalizeIdent(input) {
92
210
  return input.trim().toLowerCase().replace(/[^a-z0-9]+/g, "_").replace(/_+/g, "_").replace(/^_+|_+$/g, "");
93
211
  }
@@ -229,8 +347,13 @@ function normalizeDefault(expr) {
229
347
  const normalizedPunctuation = normalizePunctuationOutsideQuotes(normalizedSpacing);
230
348
  return normalizeKnownFunctionsOutsideQuotes(normalizedPunctuation);
231
349
  }
350
+ var init_normalize = __esm({
351
+ "node_modules/@xubylele/schema-forge-core/dist/core/normalize.js"() {
352
+ "use strict";
353
+ }
354
+ });
232
355
 
233
- // src/core/diff.ts
356
+ // node_modules/@xubylele/schema-forge-core/dist/core/diff.js
234
357
  function getTableNamesFromState(state) {
235
358
  return new Set(Object.keys(state.tables));
236
359
  }
@@ -272,9 +395,7 @@ function diffSchemas(oldState, newSchema) {
272
395
  });
273
396
  }
274
397
  }
275
- const commonTableNames = sortedNewTableNames.filter(
276
- (tableName) => oldTableNames.has(tableName)
277
- );
398
+ const commonTableNames = sortedNewTableNames.filter((tableName) => oldTableNames.has(tableName));
278
399
  for (const tableName of commonTableNames) {
279
400
  const newTable = newSchema.tables[tableName];
280
401
  const oldTable = oldState.tables[tableName];
@@ -446,65 +567,301 @@ function diffSchemas(oldState, newSchema) {
446
567
  }
447
568
  return { operations };
448
569
  }
449
-
450
- // src/core/errors.ts
451
- var SchemaValidationError = class extends Error {
452
- constructor(message) {
453
- super(message);
454
- this.name = "SchemaValidationError";
570
+ var init_diff = __esm({
571
+ "node_modules/@xubylele/schema-forge-core/dist/core/diff.js"() {
572
+ "use strict";
573
+ init_normalize();
455
574
  }
456
- };
575
+ });
457
576
 
458
- // src/core/fs.ts
459
- var import_fs = require("fs");
460
- var import_path = __toESM(require("path"));
461
- async function ensureDir(dirPath) {
462
- try {
463
- await import_fs.promises.mkdir(dirPath, { recursive: true });
464
- } catch (error2) {
465
- throw new Error(`Failed to create directory ${dirPath}: ${error2}`);
466
- }
467
- }
468
- async function fileExists(filePath) {
469
- try {
470
- await import_fs.promises.access(filePath);
577
+ // node_modules/@xubylele/schema-forge-core/dist/core/validator.js
578
+ function isValidColumnType(type) {
579
+ const normalizedType = type.toLowerCase().trim().replace(/\s+/g, " ").replace(/\s*\(\s*/g, "(").replace(/\s*,\s*/g, ",").replace(/\s*\)\s*/g, ")");
580
+ if (VALID_BASE_COLUMN_TYPES.includes(normalizedType)) {
471
581
  return true;
472
- } catch {
473
- return false;
474
582
  }
583
+ return /^varchar\(\d+\)$/.test(normalizedType) || /^numeric\(\d+,\d+\)$/.test(normalizedType);
475
584
  }
476
- async function readTextFile(filePath) {
585
+ function validateSchema(schema) {
586
+ validateDuplicateTables(schema);
587
+ for (const tableName in schema.tables) {
588
+ const table = schema.tables[tableName];
589
+ validateTableColumns(tableName, table, schema.tables);
590
+ }
591
+ }
592
+ function validateDuplicateTables(schema) {
593
+ const tableNames = Object.keys(schema.tables);
594
+ const seen = /* @__PURE__ */ new Set();
595
+ for (const tableName of tableNames) {
596
+ if (seen.has(tableName)) {
597
+ throw new Error(`Duplicate table: '${tableName}'`);
598
+ }
599
+ seen.add(tableName);
600
+ }
601
+ }
602
+ function validateTableColumns(tableName, table, allTables) {
603
+ const columnNames = /* @__PURE__ */ new Set();
604
+ const primaryKeyColumns = [];
605
+ for (const column of table.columns) {
606
+ if (columnNames.has(column.name)) {
607
+ throw new Error(`Table '${tableName}': duplicate column '${column.name}'`);
608
+ }
609
+ columnNames.add(column.name);
610
+ if (column.primaryKey) {
611
+ primaryKeyColumns.push(column.name);
612
+ }
613
+ if (!isValidColumnType(column.type)) {
614
+ throw new Error(`Table '${tableName}', column '${column.name}': type '${column.type}' is not valid. Supported types: ${VALID_BASE_COLUMN_TYPES.join(", ")}, varchar(n), numeric(p,s)`);
615
+ }
616
+ if (column.foreignKey) {
617
+ const fkTable = column.foreignKey.table;
618
+ const fkColumn = column.foreignKey.column;
619
+ if (!allTables[fkTable]) {
620
+ throw new Error(`Table '${tableName}', column '${column.name}': referenced table '${fkTable}' does not exist`);
621
+ }
622
+ const referencedTable = allTables[fkTable];
623
+ const columnExists = referencedTable.columns.some((col) => col.name === fkColumn);
624
+ if (!columnExists) {
625
+ throw new Error(`Table '${tableName}', column '${column.name}': table '${fkTable}' does not have column '${fkColumn}'`);
626
+ }
627
+ }
628
+ }
629
+ if (primaryKeyColumns.length > 1) {
630
+ throw new Error(`Table '${tableName}': can only have one primary key (found ${primaryKeyColumns.length})`);
631
+ }
632
+ const normalizedPrimaryKey = table.primaryKey ?? primaryKeyColumns[0] ?? null;
633
+ if (table.primaryKey && !columnNames.has(table.primaryKey)) {
634
+ throw new Error(`Table '${tableName}': primary key column '${table.primaryKey}' does not exist`);
635
+ }
636
+ if (table.primaryKey && primaryKeyColumns.length === 1 && primaryKeyColumns[0] !== table.primaryKey) {
637
+ throw new Error(`Table '${tableName}': column-level primary key '${primaryKeyColumns[0]}' does not match table primary key '${table.primaryKey}'`);
638
+ }
639
+ if (normalizedPrimaryKey) {
640
+ const pkMatches = table.columns.filter((column) => column.name === normalizedPrimaryKey);
641
+ if (pkMatches.length !== 1) {
642
+ throw new Error(`Table '${tableName}': primary key column '${normalizedPrimaryKey}' is invalid`);
643
+ }
644
+ }
645
+ }
646
+ var VALID_BASE_COLUMN_TYPES;
647
+ var init_validator = __esm({
648
+ "node_modules/@xubylele/schema-forge-core/dist/core/validator.js"() {
649
+ "use strict";
650
+ VALID_BASE_COLUMN_TYPES = [
651
+ "uuid",
652
+ "varchar",
653
+ "text",
654
+ "int",
655
+ "bigint",
656
+ "boolean",
657
+ "timestamptz",
658
+ "date"
659
+ ];
660
+ }
661
+ });
662
+
663
+ // node_modules/@xubylele/schema-forge-core/dist/core/validate.js
664
+ function normalizeColumnType2(type) {
665
+ return type.toLowerCase().trim().replace(/\s+/g, " ").replace(/\s*\(\s*/g, "(").replace(/\s*,\s*/g, ",").replace(/\s*\)\s*/g, ")");
666
+ }
667
+ function parseVarcharLength(type) {
668
+ const match = normalizeColumnType2(type).match(/^varchar\((\d+)\)$/);
669
+ return match ? Number(match[1]) : null;
670
+ }
671
+ function parseNumericType(type) {
672
+ const match = normalizeColumnType2(type).match(/^numeric\((\d+),(\d+)\)$/);
673
+ if (!match) {
674
+ return null;
675
+ }
676
+ return {
677
+ precision: Number(match[1]),
678
+ scale: Number(match[2])
679
+ };
680
+ }
681
+ function classifyTypeChange(from, to) {
682
+ const fromType = normalizeColumnType2(from);
683
+ const toType = normalizeColumnType2(to);
684
+ const uuidInvolved = fromType === "uuid" || toType === "uuid";
685
+ if (uuidInvolved && fromType !== toType) {
686
+ return {
687
+ severity: "error",
688
+ message: `Type changed from ${fromType} to ${toType} (likely incompatible cast)`
689
+ };
690
+ }
691
+ if (fromType === "int" && toType === "bigint") {
692
+ return {
693
+ severity: "warning",
694
+ message: "Type widened from int to bigint"
695
+ };
696
+ }
697
+ if (fromType === "bigint" && toType === "int") {
698
+ return {
699
+ severity: "error",
700
+ message: "Type narrowed from bigint to int (likely incompatible cast)"
701
+ };
702
+ }
703
+ if (fromType === "text" && parseVarcharLength(toType) !== null) {
704
+ return {
705
+ severity: "error",
706
+ message: `Type changed from text to ${toType} (may truncate existing values)`
707
+ };
708
+ }
709
+ if (parseVarcharLength(fromType) !== null && toType === "text") {
710
+ return {
711
+ severity: "warning",
712
+ message: "Type widened from varchar(n) to text"
713
+ };
714
+ }
715
+ const fromVarcharLength = parseVarcharLength(fromType);
716
+ const toVarcharLength = parseVarcharLength(toType);
717
+ if (fromVarcharLength !== null && toVarcharLength !== null) {
718
+ if (toVarcharLength >= fromVarcharLength) {
719
+ return {
720
+ severity: "warning",
721
+ message: `Type widened from varchar(${fromVarcharLength}) to varchar(${toVarcharLength})`
722
+ };
723
+ }
724
+ return {
725
+ severity: "error",
726
+ message: `Type narrowed from varchar(${fromVarcharLength}) to varchar(${toVarcharLength})`
727
+ };
728
+ }
729
+ const fromNumeric = parseNumericType(fromType);
730
+ const toNumeric = parseNumericType(toType);
731
+ if (fromNumeric && toNumeric && fromNumeric.scale === toNumeric.scale) {
732
+ if (toNumeric.precision >= fromNumeric.precision) {
733
+ return {
734
+ severity: "warning",
735
+ message: `Type widened from numeric(${fromNumeric.precision},${fromNumeric.scale}) to numeric(${toNumeric.precision},${toNumeric.scale})`
736
+ };
737
+ }
738
+ return {
739
+ severity: "error",
740
+ message: `Type narrowed from numeric(${fromNumeric.precision},${fromNumeric.scale}) to numeric(${toNumeric.precision},${toNumeric.scale})`
741
+ };
742
+ }
743
+ return {
744
+ severity: "warning",
745
+ message: `Type changed from ${fromType} to ${toType} (compatibility unknown)`
746
+ };
747
+ }
748
+ function validateSchemaChanges(previousState, currentSchema) {
749
+ const findings = [];
750
+ const diff = diffSchemas(previousState, currentSchema);
751
+ for (const operation of diff.operations) {
752
+ switch (operation.kind) {
753
+ case "drop_table":
754
+ findings.push({
755
+ severity: "error",
756
+ code: "DROP_TABLE",
757
+ table: operation.tableName,
758
+ message: "Table removed"
759
+ });
760
+ break;
761
+ case "drop_column":
762
+ findings.push({
763
+ severity: "error",
764
+ code: "DROP_COLUMN",
765
+ table: operation.tableName,
766
+ column: operation.columnName,
767
+ message: "Column removed"
768
+ });
769
+ break;
770
+ case "column_type_changed": {
771
+ const classification = classifyTypeChange(operation.fromType, operation.toType);
772
+ findings.push({
773
+ severity: classification.severity,
774
+ code: "ALTER_COLUMN_TYPE",
775
+ table: operation.tableName,
776
+ column: operation.columnName,
777
+ from: normalizeColumnType2(operation.fromType),
778
+ to: normalizeColumnType2(operation.toType),
779
+ message: classification.message
780
+ });
781
+ break;
782
+ }
783
+ case "column_nullability_changed":
784
+ if (operation.from && !operation.to) {
785
+ findings.push({
786
+ severity: "warning",
787
+ code: "SET_NOT_NULL",
788
+ table: operation.tableName,
789
+ column: operation.columnName,
790
+ message: "Column changed to NOT NULL (may fail if data contains NULLs)"
791
+ });
792
+ }
793
+ break;
794
+ default:
795
+ break;
796
+ }
797
+ }
798
+ return findings;
799
+ }
800
+ function toValidationReport(findings) {
801
+ const errors = findings.filter((finding) => finding.severity === "error");
802
+ const warnings = findings.filter((finding) => finding.severity === "warning");
803
+ return {
804
+ hasErrors: errors.length > 0,
805
+ hasWarnings: warnings.length > 0,
806
+ errors: errors.map(({ severity, ...finding }) => finding),
807
+ warnings: warnings.map(({ severity, ...finding }) => finding)
808
+ };
809
+ }
810
+ var init_validate = __esm({
811
+ "node_modules/@xubylele/schema-forge-core/dist/core/validate.js"() {
812
+ "use strict";
813
+ init_diff();
814
+ }
815
+ });
816
+
817
+ // node_modules/@xubylele/schema-forge-core/dist/core/fs.js
818
+ async function ensureDir2(dirPath) {
477
819
  try {
478
- return await import_fs.promises.readFile(filePath, "utf-8");
820
+ await import_fs2.promises.mkdir(dirPath, { recursive: true });
821
+ } catch (error2) {
822
+ throw new Error(`Failed to create directory ${dirPath}: ${error2}`);
823
+ }
824
+ }
825
+ async function fileExists2(filePath) {
826
+ try {
827
+ await import_fs2.promises.access(filePath);
828
+ return true;
829
+ } catch {
830
+ return false;
831
+ }
832
+ }
833
+ async function readTextFile2(filePath) {
834
+ try {
835
+ return await import_fs2.promises.readFile(filePath, "utf-8");
479
836
  } catch (error2) {
480
837
  throw new Error(`Failed to read file ${filePath}: ${error2}`);
481
838
  }
482
839
  }
483
- async function writeTextFile(filePath, content) {
840
+ async function writeTextFile2(filePath, content) {
484
841
  try {
485
- const dir = import_path.default.dirname(filePath);
486
- await ensureDir(dir);
487
- await import_fs.promises.writeFile(filePath, content, "utf-8");
842
+ const dir = import_path3.default.dirname(filePath);
843
+ await ensureDir2(dir);
844
+ await import_fs2.promises.writeFile(filePath, content, "utf-8");
488
845
  } catch (error2) {
489
846
  throw new Error(`Failed to write file ${filePath}: ${error2}`);
490
847
  }
491
848
  }
492
- async function readJsonFile(filePath, fallback) {
849
+ async function readJsonFile2(filePath, fallback) {
493
850
  try {
494
- const exists = await fileExists(filePath);
851
+ const exists = await fileExists2(filePath);
495
852
  if (!exists) {
496
853
  return fallback;
497
854
  }
498
- const content = await readTextFile(filePath);
855
+ const content = await readTextFile2(filePath);
499
856
  return JSON.parse(content);
500
857
  } catch (error2) {
501
858
  throw new Error(`Failed to read JSON file ${filePath}: ${error2}`);
502
859
  }
503
860
  }
504
- async function writeJsonFile(filePath, data) {
861
+ async function writeJsonFile2(filePath, data) {
505
862
  try {
506
863
  const content = JSON.stringify(data, null, 2);
507
- await writeTextFile(filePath, content);
864
+ await writeTextFile2(filePath, content);
508
865
  } catch (error2) {
509
866
  throw new Error(`Failed to write JSON file ${filePath}: ${error2}`);
510
867
  }
@@ -512,9 +869,9 @@ async function writeJsonFile(filePath, data) {
512
869
  async function findFiles(dirPath, pattern) {
513
870
  const results = [];
514
871
  try {
515
- const items = await import_fs.promises.readdir(dirPath, { withFileTypes: true });
872
+ const items = await import_fs2.promises.readdir(dirPath, { withFileTypes: true });
516
873
  for (const item of items) {
517
- const fullPath = import_path.default.join(dirPath, item.name);
874
+ const fullPath = import_path3.default.join(dirPath, item.name);
518
875
  if (item.isDirectory()) {
519
876
  const subResults = await findFiles(fullPath, pattern);
520
877
  results.push(...subResults);
@@ -527,456 +884,16 @@ async function findFiles(dirPath, pattern) {
527
884
  }
528
885
  return results;
529
886
  }
530
-
531
- // src/utils/output.ts
532
- var import_boxen = __toESM(require("boxen"));
533
- var import_chalk = require("chalk");
534
- var isInteractive = Boolean(process.stdout?.isTTY);
535
- var colorsEnabled = isInteractive && process.env.FORCE_COLOR !== "0" && !("NO_COLOR" in process.env);
536
- var color = new import_chalk.Chalk({ level: colorsEnabled ? 3 : 0 });
537
- var theme = {
538
- primary: color.cyanBright,
539
- success: color.hex("#00FF88"),
540
- warning: color.hex("#FFD166"),
541
- error: color.hex("#EF476F"),
542
- accent: color.magentaBright
543
- };
544
- function success(message) {
545
- const text = theme.success(`[OK] ${message}`);
546
- if (!isInteractive) {
547
- console.log(text);
548
- return;
887
+ var import_fs2, import_path3;
888
+ var init_fs = __esm({
889
+ "node_modules/@xubylele/schema-forge-core/dist/core/fs.js"() {
890
+ "use strict";
891
+ import_fs2 = require("fs");
892
+ import_path3 = __toESM(require("path"), 1);
549
893
  }
550
- try {
551
- console.log(
552
- (0, import_boxen.default)(text, {
553
- padding: 1,
554
- borderColor: "cyan",
555
- borderStyle: "round"
556
- })
557
- );
558
- } catch {
559
- console.log(text);
560
- }
561
- }
562
- function info(message) {
563
- console.log(theme.primary(message));
564
- }
565
- function warning(message) {
566
- console.warn(theme.warning(`[WARN] ${message}`));
567
- }
568
- function error(message) {
569
- console.error(theme.error(`[ERROR] ${message}`));
570
- }
571
-
572
- // src/core/parser.ts
573
- var SchemaParser = class {
574
- /**
575
- * Parse a schema from a JSON file
576
- */
577
- async parseSchemaFile(filePath) {
578
- try {
579
- const schema = await readJsonFile(filePath, {});
580
- return this.normalizeSchema(schema);
581
- } catch (error2) {
582
- throw new Error(`Failed to parse schema file ${filePath}: ${error2}`);
583
- }
584
- }
585
- /**
586
- * Parse multiple schema files from a directory
587
- */
588
- async parseSchemaDirectory(dirPath) {
589
- const schemaFiles = await findFiles(dirPath, /\.schema\.json$/);
590
- const schemas = [];
591
- for (const file of schemaFiles) {
592
- try {
593
- const schema = await this.parseSchemaFile(file);
594
- schemas.push(schema);
595
- } catch (error2) {
596
- const reason = error2 instanceof Error ? error2.message : String(error2);
597
- warning(`Could not parse ${file}: ${reason}`);
598
- }
599
- }
600
- return schemas;
601
- }
602
- /**
603
- * Merge multiple schemas into one
604
- */
605
- mergeSchemas(schemas) {
606
- if (schemas.length === 0) {
607
- throw new Error("Cannot merge empty schema array");
608
- }
609
- const baseSchema = schemas[0];
610
- const mergedTables = [];
611
- for (const schema of schemas) {
612
- for (const table of schema.tables) {
613
- const existingIndex = mergedTables.findIndex((t) => t.name === table.name);
614
- if (existingIndex >= 0) {
615
- warning(`Duplicate table '${table.name}' found, using first occurrence`);
616
- } else {
617
- mergedTables.push(table);
618
- }
619
- }
620
- }
621
- return {
622
- version: baseSchema.version,
623
- database: baseSchema.database,
624
- tables: mergedTables
625
- };
626
- }
627
- /**
628
- * Normalize schema to ensure consistent structure
629
- */
630
- normalizeSchema(schema) {
631
- return {
632
- version: schema.version || "1.0.0",
633
- database: schema.database || "postgres",
634
- tables: schema.tables.map((table) => ({
635
- ...table,
636
- fields: table.fields.map((field) => ({
637
- ...field,
638
- required: field.required ?? false,
639
- unique: field.unique ?? false
640
- })),
641
- indexes: table.indexes || [],
642
- constraints: table.constraints || []
643
- }))
644
- };
645
- }
646
- /**
647
- * Convert schema to JSON string
648
- */
649
- schemaToJson(schema, pretty = true) {
650
- return pretty ? JSON.stringify(schema, null, 2) : JSON.stringify(schema);
651
- }
652
- /**
653
- * Parse schema from JSON string
654
- */
655
- parseSchemaString(jsonString) {
656
- try {
657
- const schema = JSON.parse(jsonString);
658
- return this.normalizeSchema(schema);
659
- } catch (error2) {
660
- throw new Error(`Failed to parse schema JSON: ${error2}`);
661
- }
662
- }
663
- };
664
- var defaultParser = new SchemaParser();
665
- function parseSchema(source) {
666
- const lines = source.split("\n");
667
- const tables = {};
668
- let currentLine = 0;
669
- const validBaseColumnTypes = /* @__PURE__ */ new Set([
670
- "uuid",
671
- "varchar",
672
- "text",
673
- "int",
674
- "bigint",
675
- "boolean",
676
- "timestamptz",
677
- "date"
678
- ]);
679
- function normalizeColumnType3(type) {
680
- return type.toLowerCase().trim().replace(/\s+/g, " ").replace(/\s*\(\s*/g, "(").replace(/\s*,\s*/g, ",").replace(/\s*\)\s*/g, ")");
681
- }
682
- function isValidColumnType2(type) {
683
- const normalizedType = normalizeColumnType3(type);
684
- if (validBaseColumnTypes.has(normalizedType)) {
685
- return true;
686
- }
687
- return /^varchar\(\d+\)$/.test(normalizedType) || /^numeric\(\d+,\d+\)$/.test(normalizedType);
688
- }
689
- function cleanLine(line) {
690
- const commentIndex = line.search(/(?:\/\/|#)/);
691
- if (commentIndex !== -1) {
692
- line = line.substring(0, commentIndex);
693
- }
694
- return line.trim();
695
- }
696
- function parseForeignKey(fkRef, lineNum) {
697
- const parts = fkRef.split(".");
698
- if (parts.length !== 2 || !parts[0] || !parts[1]) {
699
- throw new Error(`Line ${lineNum}: Invalid foreign key format '${fkRef}'. Expected format: table.column`);
700
- }
701
- return {
702
- table: parts[0],
703
- column: parts[1]
704
- };
705
- }
706
- function parseColumn(line, lineNum) {
707
- const tokens = line.split(/\s+/).filter((t) => t.length > 0);
708
- const modifiers = /* @__PURE__ */ new Set(["pk", "unique", "nullable", "default", "fk"]);
709
- if (tokens.length < 2) {
710
- throw new Error(`Line ${lineNum}: Invalid column definition. Expected: <name> <type> [modifiers...]`);
711
- }
712
- const colName = tokens[0];
713
- const colType = normalizeColumnType3(tokens[1]);
714
- if (!isValidColumnType2(colType)) {
715
- throw new Error(
716
- `Line ${lineNum}: Invalid column type '${tokens[1]}'. Valid types: ${Array.from(validBaseColumnTypes).join(", ")}, varchar(n), numeric(p,s)`
717
- );
718
- }
719
- const column = {
720
- name: colName,
721
- type: colType,
722
- nullable: true
723
- };
724
- let i = 2;
725
- while (i < tokens.length) {
726
- const modifier = tokens[i];
727
- switch (modifier) {
728
- case "pk":
729
- column.primaryKey = true;
730
- i++;
731
- break;
732
- case "unique":
733
- column.unique = true;
734
- i++;
735
- break;
736
- case "nullable":
737
- column.nullable = true;
738
- i++;
739
- break;
740
- case "not":
741
- if (tokens[i + 1] !== "null") {
742
- throw new Error(`Line ${lineNum}: Unknown modifier 'not'`);
743
- }
744
- column.nullable = false;
745
- i += 2;
746
- break;
747
- case "default":
748
- i++;
749
- if (i >= tokens.length) {
750
- throw new Error(`Line ${lineNum}: 'default' modifier requires a value`);
751
- }
752
- {
753
- const defaultTokens = [];
754
- while (i < tokens.length && !modifiers.has(tokens[i])) {
755
- defaultTokens.push(tokens[i]);
756
- i++;
757
- }
758
- if (defaultTokens.length === 0) {
759
- throw new Error(`Line ${lineNum}: 'default' modifier requires a value`);
760
- }
761
- column.default = defaultTokens.join(" ");
762
- }
763
- break;
764
- case "fk":
765
- i++;
766
- if (i >= tokens.length) {
767
- throw new Error(`Line ${lineNum}: 'fk' modifier requires a table.column reference`);
768
- }
769
- column.foreignKey = parseForeignKey(tokens[i], lineNum);
770
- i++;
771
- break;
772
- default:
773
- throw new Error(`Line ${lineNum}: Unknown modifier '${modifier}'`);
774
- }
775
- }
776
- return column;
777
- }
778
- function parseTableBlock(startLine) {
779
- const firstLine = cleanLine(lines[startLine]);
780
- const match = firstLine.match(/^table\s+(\w+)\s*\{?\s*$/);
781
- if (!match) {
782
- throw new Error(`Line ${startLine + 1}: Invalid table definition. Expected: table <name> {`);
783
- }
784
- const tableName = match[1];
785
- if (tables[tableName]) {
786
- throw new Error(`Line ${startLine + 1}: Duplicate table definition '${tableName}'`);
787
- }
788
- const columns = [];
789
- let lineIdx = startLine + 1;
790
- let foundClosingBrace = false;
791
- while (lineIdx < lines.length) {
792
- const cleaned = cleanLine(lines[lineIdx]);
793
- if (!cleaned) {
794
- lineIdx++;
795
- continue;
796
- }
797
- if (cleaned === "}") {
798
- foundClosingBrace = true;
799
- break;
800
- }
801
- try {
802
- const column = parseColumn(cleaned, lineIdx + 1);
803
- columns.push(column);
804
- } catch (error2) {
805
- throw error2;
806
- }
807
- lineIdx++;
808
- }
809
- if (!foundClosingBrace) {
810
- throw new Error(`Line ${startLine + 1}: Table '${tableName}' block not closed (missing '}')`);
811
- }
812
- const primaryKeyColumn = columns.find((column) => column.primaryKey)?.name ?? null;
813
- tables[tableName] = {
814
- name: tableName,
815
- columns,
816
- ...primaryKeyColumn !== null && { primaryKey: primaryKeyColumn }
817
- };
818
- return lineIdx;
819
- }
820
- while (currentLine < lines.length) {
821
- const cleaned = cleanLine(lines[currentLine]);
822
- if (!cleaned) {
823
- currentLine++;
824
- continue;
825
- }
826
- if (cleaned.startsWith("table ")) {
827
- currentLine = parseTableBlock(currentLine);
828
- } else {
829
- throw new Error(`Line ${currentLine + 1}: Unexpected content '${cleaned}'. Expected table definition.`);
830
- }
831
- currentLine++;
832
- }
833
- return { tables };
834
- }
835
-
836
- // src/core/paths.ts
837
- var import_path2 = __toESM(require("path"));
838
- function getProjectRoot(cwd = process.cwd()) {
839
- return cwd;
840
- }
841
- function getSchemaForgeDir(root) {
842
- return import_path2.default.join(root, "schemaforge");
843
- }
844
- function getSchemaFilePath(root, config) {
845
- const schemaForgeDir = getSchemaForgeDir(root);
846
- const fileName = config?.schemaFile || "schema.sf";
847
- return import_path2.default.join(schemaForgeDir, fileName);
848
- }
849
- function getConfigPath(root) {
850
- const schemaForgeDir = getSchemaForgeDir(root);
851
- return import_path2.default.join(schemaForgeDir, "config.json");
852
- }
853
- function getStatePath(root, config) {
854
- const schemaForgeDir = getSchemaForgeDir(root);
855
- const fileName = config?.stateFile || "state.json";
856
- return import_path2.default.join(schemaForgeDir, fileName);
857
- }
894
+ });
858
895
 
859
- // src/core/state-manager.ts
860
- var import_path3 = __toESM(require("path"));
861
- var StateManager = class {
862
- constructor(root = process.cwd()) {
863
- this.config = null;
864
- this.root = root;
865
- }
866
- /**
867
- * Initialize a new SchemaForge project
868
- */
869
- async initializeProject(directory = ".", force = false) {
870
- const configPath = import_path3.default.join(directory, "schemaforge.config.json");
871
- if (await fileExists(configPath) && !force) {
872
- throw new Error("SchemaForge project already initialized. Use --force to overwrite.");
873
- }
874
- const defaultConfig = {
875
- version: "1.0.0",
876
- database: "postgres",
877
- schemaDir: "schemas",
878
- outputDir: "output",
879
- migrationDir: "migrations"
880
- };
881
- await writeJsonFile(configPath, defaultConfig);
882
- await ensureDir(import_path3.default.join(directory, defaultConfig.schemaDir));
883
- await ensureDir(import_path3.default.join(directory, defaultConfig.outputDir));
884
- await ensureDir(import_path3.default.join(directory, defaultConfig.migrationDir));
885
- const exampleSchema = {
886
- version: "1.0.0",
887
- database: "postgres",
888
- tables: [
889
- {
890
- name: "users",
891
- fields: [
892
- { name: "id", type: "uuid", required: true, unique: true },
893
- { name: "email", type: "string", required: true, unique: true, length: 255 },
894
- { name: "name", type: "string", required: true, length: 255 },
895
- { name: "created_at", type: "datetime", required: true }
896
- ],
897
- indexes: [
898
- { name: "idx_users_email", fields: ["email"], unique: true }
899
- ]
900
- }
901
- ]
902
- };
903
- const exampleSchemaPath = import_path3.default.join(
904
- directory,
905
- defaultConfig.schemaDir,
906
- "example.schema.json"
907
- );
908
- await writeJsonFile(exampleSchemaPath, exampleSchema);
909
- this.config = defaultConfig;
910
- }
911
- /**
912
- * Load configuration from file
913
- */
914
- async loadConfig(directory = ".") {
915
- const configPath = import_path3.default.join(directory, "schemaforge.config.json");
916
- if (!await fileExists(configPath)) {
917
- throw new Error('SchemaForge project not initialized. Run "schemaforge init" first.');
918
- }
919
- this.config = await readJsonFile(configPath, {});
920
- return this.config;
921
- }
922
- /**
923
- * Save configuration to file
924
- */
925
- async saveConfig(config, directory = ".") {
926
- const configPath = import_path3.default.join(directory, "schemaforge.config.json");
927
- await writeJsonFile(configPath, config);
928
- this.config = config;
929
- }
930
- /**
931
- * Get current configuration
932
- */
933
- getConfig() {
934
- return this.config;
935
- }
936
- /**
937
- * Update configuration
938
- */
939
- updateConfig(updates) {
940
- if (!this.config) {
941
- throw new Error("No configuration loaded");
942
- }
943
- this.config = { ...this.config, ...updates };
944
- }
945
- /**
946
- * Check if project is initialized
947
- */
948
- async isInitialized(directory = ".") {
949
- const configPath = import_path3.default.join(directory, "schemaforge.config.json");
950
- return await fileExists(configPath);
951
- }
952
- /**
953
- * Get schema directory path
954
- */
955
- getSchemaDir() {
956
- if (!this.config) {
957
- throw new Error("No configuration loaded");
958
- }
959
- return import_path3.default.join(this.root, this.config.schemaDir);
960
- }
961
- /**
962
- * Get output directory path
963
- */
964
- getOutputDir() {
965
- if (!this.config) {
966
- throw new Error("No configuration loaded");
967
- }
968
- return import_path3.default.join(this.root, this.config.outputDir);
969
- }
970
- /**
971
- * Get migration directory path
972
- */
973
- getMigrationDir() {
974
- if (!this.config) {
975
- throw new Error("No configuration loaded");
976
- }
977
- return import_path3.default.join(this.root, this.config.migrationDir);
978
- }
979
- };
896
+ // node_modules/@xubylele/schema-forge-core/dist/core/state-manager.js
980
897
  async function schemaToState(schema) {
981
898
  const tables = {};
982
899
  for (const [tableName, table] of Object.entries(schema.tables)) {
@@ -1003,292 +920,29 @@ async function schemaToState(schema) {
1003
920
  };
1004
921
  }
1005
922
  async function loadState(statePath) {
1006
- return await readJsonFile(statePath, { version: 1, tables: {} });
923
+ return await readJsonFile2(statePath, { version: 1, tables: {} });
1007
924
  }
1008
925
  async function saveState(statePath, state) {
1009
- const dirPath = import_path3.default.dirname(statePath);
1010
- await ensureDir(dirPath);
1011
- await writeJsonFile(statePath, state);
926
+ const dirPath = import_path4.default.dirname(statePath);
927
+ await ensureDir2(dirPath);
928
+ await writeJsonFile2(statePath, state);
1012
929
  }
1013
- var defaultStateManager = new StateManager();
930
+ var import_path4;
931
+ var init_state_manager = __esm({
932
+ "node_modules/@xubylele/schema-forge-core/dist/core/state-manager.js"() {
933
+ "use strict";
934
+ import_path4 = __toESM(require("path"), 1);
935
+ init_fs();
936
+ }
937
+ });
1014
938
 
1015
- // src/core/validator.ts
1016
- var SchemaValidator = class {
1017
- /**
1018
- * Validate a complete schema
1019
- */
1020
- validateSchema(schema) {
1021
- const errors = [];
1022
- if (!schema.version) {
1023
- errors.push({
1024
- path: "schema.version",
1025
- message: "Schema version is required",
1026
- severity: "error"
1027
- });
1028
- }
1029
- if (!schema.database) {
1030
- errors.push({
1031
- path: "schema.database",
1032
- message: "Database type is required",
1033
- severity: "error"
1034
- });
1035
- }
1036
- if (!schema.tables || schema.tables.length === 0) {
1037
- errors.push({
1038
- path: "schema.tables",
1039
- message: "Schema must contain at least one table",
1040
- severity: "error"
1041
- });
1042
- }
1043
- if (schema.tables) {
1044
- const tableNames = /* @__PURE__ */ new Set();
1045
- for (let i = 0; i < schema.tables.length; i++) {
1046
- const table = schema.tables[i];
1047
- const tableErrors = this.validateTable(table, i);
1048
- errors.push(...tableErrors);
1049
- if (tableNames.has(table.name)) {
1050
- errors.push({
1051
- path: `schema.tables[${i}].name`,
1052
- message: `Duplicate table name: ${table.name}`,
1053
- severity: "error"
1054
- });
1055
- }
1056
- tableNames.add(table.name);
1057
- }
1058
- errors.push(...this.validateReferences(schema));
1059
- }
1060
- return {
1061
- valid: errors.filter((e) => e.severity === "error").length === 0,
1062
- errors
1063
- };
1064
- }
1065
- /**
1066
- * Validate a table
1067
- */
1068
- validateTable(table, tableIndex) {
1069
- const errors = [];
1070
- const basePath = `schema.tables[${tableIndex}]`;
1071
- if (!table.name || table.name.trim() === "") {
1072
- errors.push({
1073
- path: `${basePath}.name`,
1074
- message: "Table name is required",
1075
- severity: "error"
1076
- });
1077
- }
1078
- if (table.name && !/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(table.name)) {
1079
- errors.push({
1080
- path: `${basePath}.name`,
1081
- message: `Invalid table name '${table.name}': must start with letter or underscore and contain only alphanumeric characters and underscores`,
1082
- severity: "error"
1083
- });
1084
- }
1085
- if (!table.fields || table.fields.length === 0) {
1086
- errors.push({
1087
- path: `${basePath}.fields`,
1088
- message: `Table '${table.name}' must have at least one field`,
1089
- severity: "error"
1090
- });
1091
- }
1092
- if (table.fields) {
1093
- const fieldNames = /* @__PURE__ */ new Set();
1094
- for (let i = 0; i < table.fields.length; i++) {
1095
- const field = table.fields[i];
1096
- const fieldErrors = this.validateField(field, basePath, i);
1097
- errors.push(...fieldErrors);
1098
- if (fieldNames.has(field.name)) {
1099
- errors.push({
1100
- path: `${basePath}.fields[${i}].name`,
1101
- message: `Duplicate field name: ${field.name}`,
1102
- severity: "error"
1103
- });
1104
- }
1105
- fieldNames.add(field.name);
1106
- }
1107
- }
1108
- return errors;
1109
- }
1110
- /**
1111
- * Validate a field
1112
- */
1113
- validateField(field, tablePath, fieldIndex) {
1114
- const errors = [];
1115
- const basePath = `${tablePath}.fields[${fieldIndex}]`;
1116
- if (!field.name || field.name.trim() === "") {
1117
- errors.push({
1118
- path: `${basePath}.name`,
1119
- message: "Field name is required",
1120
- severity: "error"
1121
- });
1122
- }
1123
- if (field.name && !/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(field.name)) {
1124
- errors.push({
1125
- path: `${basePath}.name`,
1126
- message: `Invalid field name '${field.name}': must start with letter or underscore and contain only alphanumeric characters and underscores`,
1127
- severity: "error"
1128
- });
1129
- }
1130
- if (!field.type) {
1131
- errors.push({
1132
- path: `${basePath}.type`,
1133
- message: "Field type is required",
1134
- severity: "error"
1135
- });
1136
- }
1137
- if (field.type === "enum") {
1138
- if (!field.enumValues || field.enumValues.length === 0) {
1139
- errors.push({
1140
- path: `${basePath}.enumValues`,
1141
- message: "Enum type requires enumValues array",
1142
- severity: "error"
1143
- });
1144
- }
1145
- }
1146
- if (field.type === "string" && field.length && field.length <= 0) {
1147
- errors.push({
1148
- path: `${basePath}.length`,
1149
- message: "String length must be greater than 0",
1150
- severity: "error"
1151
- });
1152
- }
1153
- return errors;
1154
- }
1155
- /**
1156
- * Validate foreign key references
1157
- */
1158
- validateReferences(schema) {
1159
- const errors = [];
1160
- const tableNames = new Set(schema.tables.map((t) => t.name));
1161
- for (let i = 0; i < schema.tables.length; i++) {
1162
- const table = schema.tables[i];
1163
- for (let j = 0; j < table.fields.length; j++) {
1164
- const field = table.fields[j];
1165
- if (field.references) {
1166
- const refTable = field.references.table;
1167
- const refField = field.references.field;
1168
- if (!tableNames.has(refTable)) {
1169
- errors.push({
1170
- path: `schema.tables[${i}].fields[${j}].references.table`,
1171
- message: `Referenced table '${refTable}' does not exist`,
1172
- severity: "error"
1173
- });
1174
- } else {
1175
- const referencedTable = schema.tables.find((t) => t.name === refTable);
1176
- if (referencedTable) {
1177
- const referencedField = referencedTable.fields.find((f) => f.name === refField);
1178
- if (!referencedField) {
1179
- errors.push({
1180
- path: `schema.tables[${i}].fields[${j}].references.field`,
1181
- message: `Referenced field '${refField}' does not exist in table '${refTable}'`,
1182
- severity: "error"
1183
- });
1184
- }
1185
- }
1186
- }
1187
- }
1188
- }
1189
- }
1190
- return errors;
1191
- }
1192
- };
1193
- var defaultValidator = new SchemaValidator();
1194
- var VALID_BASE_COLUMN_TYPES = [
1195
- "uuid",
1196
- "varchar",
1197
- "text",
1198
- "int",
1199
- "bigint",
1200
- "boolean",
1201
- "timestamptz",
1202
- "date"
1203
- ];
1204
- function isValidColumnType(type) {
1205
- const normalizedType = type.toLowerCase().trim().replace(/\s+/g, " ").replace(/\s*\(\s*/g, "(").replace(/\s*,\s*/g, ",").replace(/\s*\)\s*/g, ")");
1206
- if (VALID_BASE_COLUMN_TYPES.includes(normalizedType)) {
1207
- return true;
1208
- }
1209
- return /^varchar\(\d+\)$/.test(normalizedType) || /^numeric\(\d+,\d+\)$/.test(normalizedType);
1210
- }
1211
- function validateSchema(schema) {
1212
- validateDuplicateTables(schema);
1213
- for (const tableName in schema.tables) {
1214
- const table = schema.tables[tableName];
1215
- validateTableColumns(tableName, table, schema.tables);
1216
- }
1217
- }
1218
- function validateDuplicateTables(schema) {
1219
- const tableNames = Object.keys(schema.tables);
1220
- const seen = /* @__PURE__ */ new Set();
1221
- for (const tableName of tableNames) {
1222
- if (seen.has(tableName)) {
1223
- throw new Error(`Duplicate table: '${tableName}'`);
1224
- }
1225
- seen.add(tableName);
1226
- }
1227
- }
1228
- function validateTableColumns(tableName, table, allTables) {
1229
- const columnNames = /* @__PURE__ */ new Set();
1230
- const primaryKeyColumns = [];
1231
- for (const column of table.columns) {
1232
- if (columnNames.has(column.name)) {
1233
- throw new Error(`Table '${tableName}': duplicate column '${column.name}'`);
1234
- }
1235
- columnNames.add(column.name);
1236
- if (column.primaryKey) {
1237
- primaryKeyColumns.push(column.name);
1238
- }
1239
- if (!isValidColumnType(column.type)) {
1240
- throw new Error(
1241
- `Table '${tableName}', column '${column.name}': type '${column.type}' is not valid. Supported types: ${VALID_BASE_COLUMN_TYPES.join(", ")}, varchar(n), numeric(p,s)`
1242
- );
1243
- }
1244
- if (column.foreignKey) {
1245
- const fkTable = column.foreignKey.table;
1246
- const fkColumn = column.foreignKey.column;
1247
- if (!allTables[fkTable]) {
1248
- throw new Error(
1249
- `Table '${tableName}', column '${column.name}': referenced table '${fkTable}' does not exist`
1250
- );
1251
- }
1252
- const referencedTable = allTables[fkTable];
1253
- const columnExists = referencedTable.columns.some((col) => col.name === fkColumn);
1254
- if (!columnExists) {
1255
- throw new Error(
1256
- `Table '${tableName}', column '${column.name}': table '${fkTable}' does not have column '${fkColumn}'`
1257
- );
1258
- }
1259
- }
1260
- }
1261
- if (primaryKeyColumns.length > 1) {
1262
- throw new Error(`Table '${tableName}': can only have one primary key (found ${primaryKeyColumns.length})`);
1263
- }
1264
- const normalizedPrimaryKey = table.primaryKey ?? primaryKeyColumns[0] ?? null;
1265
- if (table.primaryKey && !columnNames.has(table.primaryKey)) {
1266
- throw new Error(
1267
- `Table '${tableName}': primary key column '${table.primaryKey}' does not exist`
1268
- );
1269
- }
1270
- if (table.primaryKey && primaryKeyColumns.length === 1 && primaryKeyColumns[0] !== table.primaryKey) {
1271
- throw new Error(
1272
- `Table '${tableName}': column-level primary key '${primaryKeyColumns[0]}' does not match table primary key '${table.primaryKey}'`
1273
- );
1274
- }
1275
- if (normalizedPrimaryKey) {
1276
- const pkMatches = table.columns.filter((column) => column.name === normalizedPrimaryKey);
1277
- if (pkMatches.length !== 1) {
1278
- throw new Error(
1279
- `Table '${tableName}': primary key column '${normalizedPrimaryKey}' is invalid`
1280
- );
1281
- }
1282
- }
1283
- }
1284
-
1285
- // src/generator/sql-generator.ts
1286
- function generateSql(diff, provider, sqlConfig) {
1287
- const statements = [];
1288
- for (const operation of diff.operations) {
1289
- const sql = generateOperation(operation, provider, sqlConfig);
1290
- if (sql) {
1291
- statements.push(sql);
939
+ // node_modules/@xubylele/schema-forge-core/dist/generator/sql-generator.js
940
+ function generateSql(diff, provider, sqlConfig) {
941
+ const statements = [];
942
+ for (const operation of diff.operations) {
943
+ const sql = generateOperation(operation, provider, sqlConfig);
944
+ if (sql) {
945
+ statements.push(sql);
1292
946
  }
1293
947
  }
1294
948
  return statements.join("\n\n");
@@ -1300,25 +954,13 @@ function generateOperation(operation, provider, sqlConfig) {
1300
954
  case "drop_table":
1301
955
  return generateDropTable(operation.tableName);
1302
956
  case "column_type_changed":
1303
- return generateAlterColumnType(
1304
- operation.tableName,
1305
- operation.columnName,
1306
- operation.toType
1307
- );
957
+ return generateAlterColumnType(operation.tableName, operation.columnName, operation.toType);
1308
958
  case "column_nullability_changed":
1309
- return generateAlterColumnNullability(
1310
- operation.tableName,
1311
- operation.columnName,
1312
- operation.to
1313
- );
959
+ return generateAlterColumnNullability(operation.tableName, operation.columnName, operation.to);
1314
960
  case "add_column":
1315
961
  return generateAddColumn(operation.tableName, operation.column, provider, sqlConfig);
1316
962
  case "column_default_changed":
1317
- return generateAlterColumnDefault(
1318
- operation.tableName,
1319
- operation.columnName,
1320
- operation.toDefault
1321
- );
963
+ return generateAlterColumnDefault(operation.tableName, operation.columnName, operation.toDefault);
1322
964
  case "drop_column":
1323
965
  return generateDropColumn(operation.tableName, operation.columnName);
1324
966
  case "column_unique_changed":
@@ -1330,9 +972,7 @@ function generateOperation(operation, provider, sqlConfig) {
1330
972
  }
1331
973
  }
1332
974
  function generateCreateTable(table, provider, sqlConfig) {
1333
- const columnDefs = table.columns.map(
1334
- (col) => generateColumnDefinition(col, provider, sqlConfig)
1335
- );
975
+ const columnDefs = table.columns.map((col) => generateColumnDefinition(col, provider, sqlConfig));
1336
976
  const lines = ["CREATE TABLE " + table.name + " ("];
1337
977
  columnDefs.forEach((colDef, index) => {
1338
978
  const isLast = index === columnDefs.length - 1;
@@ -1344,9 +984,7 @@ function generateCreateTable(table, provider, sqlConfig) {
1344
984
  function generateColumnDefinition(column, provider, sqlConfig) {
1345
985
  const parts = [column.name, column.type];
1346
986
  if (column.foreignKey) {
1347
- parts.push(
1348
- `references ${column.foreignKey.table}(${column.foreignKey.column})`
1349
- );
987
+ parts.push(`references ${column.foreignKey.table}(${column.foreignKey.column})`);
1350
988
  }
1351
989
  if (column.primaryKey) {
1352
990
  parts.push("primary key");
@@ -1397,9 +1035,7 @@ function generateAddPrimaryKeyConstraint(tableName, columnName) {
1397
1035
  }
1398
1036
  function generateDropConstraintStatements(tableName, constraintNames) {
1399
1037
  const uniqueConstraintNames = Array.from(new Set(constraintNames));
1400
- return uniqueConstraintNames.map(
1401
- (constraintName) => `ALTER TABLE ${tableName} DROP CONSTRAINT IF EXISTS ${constraintName};`
1402
- ).join("\n");
1038
+ return uniqueConstraintNames.map((constraintName) => `ALTER TABLE ${tableName} DROP CONSTRAINT IF EXISTS ${constraintName};`).join("\n");
1403
1039
  }
1404
1040
  function generateAlterColumnDefault(tableName, columnName, newDefault) {
1405
1041
  if (newDefault === null) {
@@ -1413,461 +1049,136 @@ function generateAlterColumnNullability(tableName, columnName, toNullable) {
1413
1049
  }
1414
1050
  return `ALTER TABLE ${tableName} ALTER COLUMN ${columnName} SET NOT NULL;`;
1415
1051
  }
1416
-
1417
- // src/commands/diff.ts
1418
- var REQUIRED_CONFIG_FIELDS = ["schemaFile", "stateFile"];
1419
- function resolveConfigPath(root, targetPath) {
1420
- return import_path4.default.isAbsolute(targetPath) ? targetPath : import_path4.default.join(root, targetPath);
1421
- }
1422
- async function runDiff() {
1423
- const root = getProjectRoot();
1424
- const configPath = getConfigPath(root);
1425
- if (!await fileExists(configPath)) {
1426
- throw new Error('SchemaForge project not initialized. Run "schema-forge init" first.');
1052
+ var init_sql_generator = __esm({
1053
+ "node_modules/@xubylele/schema-forge-core/dist/generator/sql-generator.js"() {
1054
+ "use strict";
1055
+ init_normalize();
1427
1056
  }
1428
- const config = await readJsonFile(configPath, {});
1429
- for (const field of REQUIRED_CONFIG_FIELDS) {
1430
- const value = config[field];
1431
- if (!value || typeof value !== "string") {
1432
- throw new Error(`Invalid config: '${field}' is required`);
1057
+ });
1058
+
1059
+ // node_modules/@xubylele/schema-forge-core/dist/core/sql/split-statements.js
1060
+ function splitSqlStatements(sql) {
1061
+ const statements = [];
1062
+ let current = "";
1063
+ let inSingleQuote = false;
1064
+ let inDoubleQuote = false;
1065
+ let inLineComment = false;
1066
+ let inBlockComment = false;
1067
+ let dollarTag = null;
1068
+ let index = 0;
1069
+ while (index < sql.length) {
1070
+ const char = sql[index];
1071
+ const next = index + 1 < sql.length ? sql[index + 1] : "";
1072
+ if (inLineComment) {
1073
+ current += char;
1074
+ if (char === "\n") {
1075
+ inLineComment = false;
1076
+ }
1077
+ index++;
1078
+ continue;
1433
1079
  }
1434
- }
1435
- const schemaPath = resolveConfigPath(root, config.schemaFile);
1436
- const statePath = resolveConfigPath(root, config.stateFile);
1437
- if (config.provider && config.provider !== "postgres" && config.provider !== "supabase") {
1438
- throw new Error(`Unsupported provider '${config.provider}'.`);
1439
- }
1440
- const provider = config.provider ?? "postgres";
1441
- const schemaSource = await readTextFile(schemaPath);
1442
- const schema = parseSchema(schemaSource);
1443
- try {
1444
- validateSchema(schema);
1445
- } catch (error2) {
1446
- if (error2 instanceof Error) {
1447
- throw new SchemaValidationError(error2.message);
1080
+ if (inBlockComment) {
1081
+ current += char;
1082
+ if (char === "*" && next === "/") {
1083
+ current += next;
1084
+ inBlockComment = false;
1085
+ index += 2;
1086
+ continue;
1087
+ }
1088
+ index++;
1089
+ continue;
1448
1090
  }
1449
- throw error2;
1091
+ if (!inSingleQuote && !inDoubleQuote && dollarTag === null) {
1092
+ if (char === "-" && next === "-") {
1093
+ current += char + next;
1094
+ inLineComment = true;
1095
+ index += 2;
1096
+ continue;
1097
+ }
1098
+ if (char === "/" && next === "*") {
1099
+ current += char + next;
1100
+ inBlockComment = true;
1101
+ index += 2;
1102
+ continue;
1103
+ }
1104
+ }
1105
+ if (!inDoubleQuote && dollarTag === null && char === "'") {
1106
+ current += char;
1107
+ if (inSingleQuote && next === "'") {
1108
+ current += next;
1109
+ index += 2;
1110
+ continue;
1111
+ }
1112
+ inSingleQuote = !inSingleQuote;
1113
+ index++;
1114
+ continue;
1115
+ }
1116
+ if (!inSingleQuote && dollarTag === null && char === '"') {
1117
+ current += char;
1118
+ if (inDoubleQuote && next === '"') {
1119
+ current += next;
1120
+ index += 2;
1121
+ continue;
1122
+ }
1123
+ inDoubleQuote = !inDoubleQuote;
1124
+ index++;
1125
+ continue;
1126
+ }
1127
+ if (!inSingleQuote && !inDoubleQuote) {
1128
+ if (dollarTag === null && char === "$") {
1129
+ const remainder = sql.slice(index);
1130
+ const match = remainder.match(/^\$[a-zA-Z_][a-zA-Z0-9_]*\$|^\$\$/);
1131
+ if (match) {
1132
+ dollarTag = match[0];
1133
+ current += match[0];
1134
+ index += match[0].length;
1135
+ continue;
1136
+ }
1137
+ }
1138
+ if (dollarTag !== null && sql.startsWith(dollarTag, index)) {
1139
+ current += dollarTag;
1140
+ index += dollarTag.length;
1141
+ dollarTag = null;
1142
+ continue;
1143
+ }
1144
+ }
1145
+ if (!inSingleQuote && !inDoubleQuote && dollarTag === null && char === ";") {
1146
+ if (current.trim().length > 0) {
1147
+ statements.push(current.trim());
1148
+ }
1149
+ current = "";
1150
+ index++;
1151
+ continue;
1152
+ }
1153
+ current += char;
1154
+ index++;
1450
1155
  }
1451
- const previousState = await loadState(statePath);
1452
- const diff = diffSchemas(previousState, schema);
1453
- if (diff.operations.length === 0) {
1454
- success("No changes detected");
1455
- return;
1156
+ if (current.trim().length > 0) {
1157
+ statements.push(current.trim());
1456
1158
  }
1457
- const sql = generateSql(diff, provider, config.sql);
1458
- console.log(sql);
1159
+ return statements;
1459
1160
  }
1161
+ var init_split_statements = __esm({
1162
+ "node_modules/@xubylele/schema-forge-core/dist/core/sql/split-statements.js"() {
1163
+ "use strict";
1164
+ }
1165
+ });
1460
1166
 
1461
- // src/commands/generate.ts
1462
- var import_commander2 = require("commander");
1463
- var import_path5 = __toESM(require("path"));
1464
-
1465
- // src/core/utils.ts
1466
- function nowTimestamp() {
1467
- const date = /* @__PURE__ */ new Date();
1468
- const pad = (value) => String(value).padStart(2, "0");
1469
- return String(date.getFullYear()) + pad(date.getMonth() + 1) + pad(date.getDate()) + pad(date.getHours()) + pad(date.getMinutes()) + pad(date.getSeconds());
1167
+ // node_modules/@xubylele/schema-forge-core/dist/core/sql/parse-migration.js
1168
+ function normalizeSqlType(type) {
1169
+ return type.trim().toLowerCase().replace(/\s+/g, " ").replace(/\s*\(\s*/g, "(").replace(/\s*,\s*/g, ",").replace(/\s*\)\s*/g, ")");
1470
1170
  }
1471
- function slugifyName(name) {
1472
- return name.trim().toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-+|-+$/g, "") || "migration";
1171
+ function unquoteIdentifier(value) {
1172
+ const trimmed = value.trim();
1173
+ if (trimmed.startsWith('"') && trimmed.endsWith('"') && trimmed.length >= 2) {
1174
+ return trimmed.slice(1, -1).replace(/""/g, '"');
1175
+ }
1176
+ return trimmed;
1473
1177
  }
1474
-
1475
- // src/commands/generate.ts
1476
- var REQUIRED_CONFIG_FIELDS2 = [
1477
- "schemaFile",
1478
- "stateFile",
1479
- "outputDir"
1480
- ];
1481
- function resolveConfigPath2(root, targetPath) {
1482
- return import_path5.default.isAbsolute(targetPath) ? targetPath : import_path5.default.join(root, targetPath);
1483
- }
1484
- async function runGenerate(options) {
1485
- const root = getProjectRoot();
1486
- const configPath = getConfigPath(root);
1487
- if (!await fileExists(configPath)) {
1488
- throw new Error('SchemaForge project not initialized. Run "schema-forge init" first.');
1489
- }
1490
- const config = await readJsonFile(configPath, {});
1491
- for (const field of REQUIRED_CONFIG_FIELDS2) {
1492
- const value = config[field];
1493
- if (!value || typeof value !== "string") {
1494
- throw new Error(`Invalid config: '${field}' is required`);
1495
- }
1496
- }
1497
- const schemaPath = resolveConfigPath2(root, config.schemaFile);
1498
- const statePath = resolveConfigPath2(root, config.stateFile);
1499
- const outputDir = resolveConfigPath2(root, config.outputDir);
1500
- if (config.provider && config.provider !== "postgres" && config.provider !== "supabase") {
1501
- throw new Error(`Unsupported provider '${config.provider}'.`);
1502
- }
1503
- const provider = config.provider ?? "postgres";
1504
- if (!config.provider) {
1505
- info("Provider not set; defaulting to postgres.");
1506
- }
1507
- info("Generating SQL...");
1508
- const schemaSource = await readTextFile(schemaPath);
1509
- const schema = parseSchema(schemaSource);
1510
- try {
1511
- validateSchema(schema);
1512
- } catch (error2) {
1513
- if (error2 instanceof Error) {
1514
- throw new SchemaValidationError(error2.message);
1515
- }
1516
- throw error2;
1517
- }
1518
- const previousState = await loadState(statePath);
1519
- const diff = diffSchemas(previousState, schema);
1520
- if (diff.operations.length === 0) {
1521
- info("No changes detected");
1522
- return;
1523
- }
1524
- const sql = generateSql(diff, provider, config.sql);
1525
- const timestamp = nowTimestamp();
1526
- const slug = slugifyName(options.name ?? "migration");
1527
- const fileName = `${timestamp}-${slug}.sql`;
1528
- await ensureDir(outputDir);
1529
- const migrationPath = import_path5.default.join(outputDir, fileName);
1530
- await writeTextFile(migrationPath, sql + "\n");
1531
- const nextState = await schemaToState(schema);
1532
- await saveState(statePath, nextState);
1533
- success(`SQL generated successfully: ${migrationPath}`);
1534
- }
1535
-
1536
- // src/commands/import.ts
1537
- var import_commander3 = require("commander");
1538
- var import_path7 = __toESM(require("path"));
1539
-
1540
- // src/core/sql/apply-ops.ts
1541
- function toSchemaColumn(column) {
1542
- return {
1543
- name: column.name,
1544
- type: column.type,
1545
- nullable: column.nullable,
1546
- ...column.default !== void 0 ? { default: column.default } : {},
1547
- ...column.unique !== void 0 ? { unique: column.unique } : {},
1548
- ...column.primaryKey !== void 0 ? { primaryKey: column.primaryKey } : {}
1549
- };
1550
- }
1551
- function applySingleColumnConstraint(table, constraint) {
1552
- if (constraint.columns.length !== 1) {
1553
- return false;
1554
- }
1555
- const targetColumn = table.columns.find((column) => column.name === constraint.columns[0]);
1556
- if (!targetColumn) {
1557
- return false;
1558
- }
1559
- if (constraint.type === "PRIMARY_KEY") {
1560
- table.primaryKey = targetColumn.name;
1561
- targetColumn.primaryKey = true;
1562
- targetColumn.nullable = false;
1563
- return true;
1564
- }
1565
- targetColumn.unique = true;
1566
- return true;
1567
- }
1568
- function clearConstraintByName(table, name) {
1569
- if (name.endsWith("_pkey") || name.startsWith("pk_")) {
1570
- if (table.primaryKey) {
1571
- const pkColumn = table.columns.find((column) => column.name === table.primaryKey);
1572
- if (pkColumn) {
1573
- pkColumn.primaryKey = false;
1574
- }
1575
- table.primaryKey = null;
1576
- }
1577
- return;
1578
- }
1579
- if (name.endsWith("_key") || name.startsWith("uq_")) {
1580
- for (const column of table.columns) {
1581
- if (column.unique) {
1582
- column.unique = false;
1583
- }
1584
- }
1585
- }
1586
- }
1587
- function getOrCreateTable(tables, name) {
1588
- if (!tables[name]) {
1589
- tables[name] = { name, columns: [] };
1590
- }
1591
- return tables[name];
1592
- }
1593
- function applySqlOps(ops) {
1594
- const tables = {};
1595
- const warnings = [];
1596
- for (const op of ops) {
1597
- switch (op.kind) {
1598
- case "CREATE_TABLE": {
1599
- const table = {
1600
- name: op.table,
1601
- columns: op.columns.map(toSchemaColumn)
1602
- };
1603
- for (const column of table.columns) {
1604
- if (column.primaryKey) {
1605
- table.primaryKey = column.name;
1606
- }
1607
- }
1608
- for (const constraint of op.constraints) {
1609
- const applied = applySingleColumnConstraint(table, constraint);
1610
- if (!applied) {
1611
- warnings.push({
1612
- statement: `CREATE TABLE ${op.table}`,
1613
- reason: `Constraint ${constraint.type}${constraint.name ? ` (${constraint.name})` : ""} is unsupported for schema reconstruction`
1614
- });
1615
- }
1616
- }
1617
- tables[op.table] = table;
1618
- break;
1619
- }
1620
- case "ADD_COLUMN": {
1621
- const table = getOrCreateTable(tables, op.table);
1622
- table.columns = table.columns.filter((column) => column.name !== op.column.name);
1623
- table.columns.push(toSchemaColumn(op.column));
1624
- if (op.column.primaryKey) {
1625
- table.primaryKey = op.column.name;
1626
- }
1627
- break;
1628
- }
1629
- case "ALTER_COLUMN_TYPE": {
1630
- const table = tables[op.table];
1631
- if (!table) {
1632
- break;
1633
- }
1634
- const column = table.columns.find((item) => item.name === op.column);
1635
- if (column) {
1636
- column.type = op.toType;
1637
- }
1638
- break;
1639
- }
1640
- case "SET_NOT_NULL": {
1641
- const table = tables[op.table];
1642
- const column = table?.columns.find((item) => item.name === op.column);
1643
- if (column) {
1644
- column.nullable = false;
1645
- }
1646
- break;
1647
- }
1648
- case "DROP_NOT_NULL": {
1649
- const table = tables[op.table];
1650
- const column = table?.columns.find((item) => item.name === op.column);
1651
- if (column) {
1652
- column.nullable = true;
1653
- }
1654
- break;
1655
- }
1656
- case "SET_DEFAULT": {
1657
- const table = tables[op.table];
1658
- const column = table?.columns.find((item) => item.name === op.column);
1659
- if (column) {
1660
- column.default = op.expr;
1661
- }
1662
- break;
1663
- }
1664
- case "DROP_DEFAULT": {
1665
- const table = tables[op.table];
1666
- const column = table?.columns.find((item) => item.name === op.column);
1667
- if (column) {
1668
- column.default = null;
1669
- }
1670
- break;
1671
- }
1672
- case "ADD_CONSTRAINT": {
1673
- const table = tables[op.table];
1674
- if (!table) {
1675
- break;
1676
- }
1677
- const applied = applySingleColumnConstraint(table, op.constraint);
1678
- if (!applied) {
1679
- warnings.push({
1680
- statement: `ALTER TABLE ${op.table} ADD CONSTRAINT ${op.constraint.name ?? "<unnamed>"}`,
1681
- reason: `Constraint ${op.constraint.type} is unsupported for schema reconstruction`
1682
- });
1683
- }
1684
- break;
1685
- }
1686
- case "DROP_CONSTRAINT": {
1687
- const table = tables[op.table];
1688
- if (!table) {
1689
- break;
1690
- }
1691
- clearConstraintByName(table, op.name);
1692
- break;
1693
- }
1694
- case "DROP_COLUMN": {
1695
- const table = tables[op.table];
1696
- if (!table) {
1697
- break;
1698
- }
1699
- table.columns = table.columns.filter((column) => column.name !== op.column);
1700
- if (table.primaryKey === op.column) {
1701
- table.primaryKey = null;
1702
- }
1703
- break;
1704
- }
1705
- case "DROP_TABLE": {
1706
- delete tables[op.table];
1707
- break;
1708
- }
1709
- }
1710
- }
1711
- const schema = { tables };
1712
- return { schema, warnings };
1713
- }
1714
-
1715
- // src/core/sql/load-migrations.ts
1716
- var import_fs6 = require("fs");
1717
- var import_path6 = __toESM(require("path"));
1718
- async function loadMigrationSqlInput(inputPath) {
1719
- const stats = await import_fs6.promises.stat(inputPath);
1720
- if (stats.isFile()) {
1721
- if (!inputPath.toLowerCase().endsWith(".sql")) {
1722
- throw new Error(`Input file must be a .sql file: ${inputPath}`);
1723
- }
1724
- return [{ filePath: inputPath, sql: await readTextFile(inputPath) }];
1725
- }
1726
- if (!stats.isDirectory()) {
1727
- throw new Error(`Input path must be a .sql file or directory: ${inputPath}`);
1728
- }
1729
- const sqlFiles = await findFiles(inputPath, /\.sql$/i);
1730
- sqlFiles.sort((left, right) => import_path6.default.basename(left).localeCompare(import_path6.default.basename(right)));
1731
- const result = [];
1732
- for (const filePath of sqlFiles) {
1733
- result.push({
1734
- filePath,
1735
- sql: await readTextFile(filePath)
1736
- });
1737
- }
1738
- return result;
1739
- }
1740
-
1741
- // src/core/sql/split-statements.ts
1742
- function splitSqlStatements(sql) {
1743
- const statements = [];
1744
- let current = "";
1745
- let inSingleQuote = false;
1746
- let inDoubleQuote = false;
1747
- let inLineComment = false;
1748
- let inBlockComment = false;
1749
- let dollarTag = null;
1750
- let index = 0;
1751
- while (index < sql.length) {
1752
- const char = sql[index];
1753
- const next = index + 1 < sql.length ? sql[index + 1] : "";
1754
- if (inLineComment) {
1755
- current += char;
1756
- if (char === "\n") {
1757
- inLineComment = false;
1758
- }
1759
- index++;
1760
- continue;
1761
- }
1762
- if (inBlockComment) {
1763
- current += char;
1764
- if (char === "*" && next === "/") {
1765
- current += next;
1766
- inBlockComment = false;
1767
- index += 2;
1768
- continue;
1769
- }
1770
- index++;
1771
- continue;
1772
- }
1773
- if (!inSingleQuote && !inDoubleQuote && dollarTag === null) {
1774
- if (char === "-" && next === "-") {
1775
- current += char + next;
1776
- inLineComment = true;
1777
- index += 2;
1778
- continue;
1779
- }
1780
- if (char === "/" && next === "*") {
1781
- current += char + next;
1782
- inBlockComment = true;
1783
- index += 2;
1784
- continue;
1785
- }
1786
- }
1787
- if (!inDoubleQuote && dollarTag === null && char === "'") {
1788
- current += char;
1789
- if (inSingleQuote && next === "'") {
1790
- current += next;
1791
- index += 2;
1792
- continue;
1793
- }
1794
- inSingleQuote = !inSingleQuote;
1795
- index++;
1796
- continue;
1797
- }
1798
- if (!inSingleQuote && dollarTag === null && char === '"') {
1799
- current += char;
1800
- if (inDoubleQuote && next === '"') {
1801
- current += next;
1802
- index += 2;
1803
- continue;
1804
- }
1805
- inDoubleQuote = !inDoubleQuote;
1806
- index++;
1807
- continue;
1808
- }
1809
- if (!inSingleQuote && !inDoubleQuote) {
1810
- if (dollarTag === null && char === "$") {
1811
- const remainder = sql.slice(index);
1812
- const match = remainder.match(/^\$[a-zA-Z_][a-zA-Z0-9_]*\$|^\$\$/);
1813
- if (match) {
1814
- dollarTag = match[0];
1815
- current += match[0];
1816
- index += match[0].length;
1817
- continue;
1818
- }
1819
- }
1820
- if (dollarTag !== null && sql.startsWith(dollarTag, index)) {
1821
- current += dollarTag;
1822
- index += dollarTag.length;
1823
- dollarTag = null;
1824
- continue;
1825
- }
1826
- }
1827
- if (!inSingleQuote && !inDoubleQuote && dollarTag === null && char === ";") {
1828
- const statement = current.trim();
1829
- if (statement.length > 0) {
1830
- statements.push(statement);
1831
- }
1832
- current = "";
1833
- index++;
1834
- continue;
1835
- }
1836
- current += char;
1837
- index++;
1838
- }
1839
- const tail = current.trim();
1840
- if (tail.length > 0) {
1841
- statements.push(tail);
1842
- }
1843
- return statements;
1844
- }
1845
-
1846
- // src/core/sql/parse-migration.ts
1847
- var COLUMN_CONSTRAINT_KEYWORDS = /* @__PURE__ */ new Set([
1848
- "primary",
1849
- "unique",
1850
- "not",
1851
- "null",
1852
- "default",
1853
- "constraint",
1854
- "references",
1855
- "check"
1856
- ]);
1857
- function normalizeSqlType(type) {
1858
- return type.trim().toLowerCase().replace(/\s+/g, " ").replace(/\s*\(\s*/g, "(").replace(/\s*,\s*/g, ",").replace(/\s*\)\s*/g, ")");
1859
- }
1860
- function unquoteIdentifier(value) {
1861
- const trimmed = value.trim();
1862
- if (trimmed.startsWith('"') && trimmed.endsWith('"') && trimmed.length >= 2) {
1863
- return trimmed.slice(1, -1).replace(/""/g, '"');
1864
- }
1865
- return trimmed;
1866
- }
1867
- function normalizeIdentifier(identifier) {
1868
- const parts = identifier.trim().split(".").map((part) => unquoteIdentifier(part)).filter((part) => part.length > 0);
1869
- const leaf = parts.length > 0 ? parts[parts.length - 1] : identifier.trim();
1870
- return leaf.toLowerCase();
1178
+ function normalizeIdentifier(identifier) {
1179
+ const parts = identifier.trim().split(".").map((part) => unquoteIdentifier(part)).filter((part) => part.length > 0);
1180
+ const leaf = parts.length > 0 ? parts[parts.length - 1] : identifier.trim();
1181
+ return leaf.toLowerCase();
1871
1182
  }
1872
1183
  function removeSqlComments(statement) {
1873
1184
  let result = "";
@@ -1962,433 +1273,1177 @@ function splitTopLevelComma(input) {
1962
1273
  depth++;
1963
1274
  } else if (char === ")") {
1964
1275
  depth = Math.max(0, depth - 1);
1965
- } else if (char === "," && depth === 0) {
1966
- const segment = current.trim();
1967
- if (segment.length > 0) {
1968
- parts.push(segment);
1276
+ } else if (char === "," && depth === 0) {
1277
+ const segment = current.trim();
1278
+ if (segment.length > 0) {
1279
+ parts.push(segment);
1280
+ }
1281
+ current = "";
1282
+ continue;
1283
+ }
1284
+ }
1285
+ current += char;
1286
+ }
1287
+ const tail = current.trim();
1288
+ if (tail.length > 0) {
1289
+ parts.push(tail);
1290
+ }
1291
+ return parts;
1292
+ }
1293
+ function tokenize(segment) {
1294
+ const tokens = [];
1295
+ let current = "";
1296
+ let depth = 0;
1297
+ let inSingleQuote = false;
1298
+ let inDoubleQuote = false;
1299
+ for (let index = 0; index < segment.length; index++) {
1300
+ const char = segment[index];
1301
+ const next = index + 1 < segment.length ? segment[index + 1] : "";
1302
+ if (char === "'" && !inDoubleQuote) {
1303
+ current += char;
1304
+ if (inSingleQuote && next === "'") {
1305
+ current += next;
1306
+ index++;
1307
+ continue;
1308
+ }
1309
+ inSingleQuote = !inSingleQuote;
1310
+ continue;
1311
+ }
1312
+ if (char === '"' && !inSingleQuote) {
1313
+ current += char;
1314
+ if (inDoubleQuote && next === '"') {
1315
+ current += next;
1316
+ index++;
1317
+ continue;
1318
+ }
1319
+ inDoubleQuote = !inDoubleQuote;
1320
+ continue;
1321
+ }
1322
+ if (!inSingleQuote && !inDoubleQuote) {
1323
+ if (char === "(") {
1324
+ depth++;
1325
+ } else if (char === ")") {
1326
+ depth = Math.max(0, depth - 1);
1327
+ }
1328
+ if (/\s/.test(char) && depth === 0) {
1329
+ if (current.length > 0) {
1330
+ tokens.push(current);
1331
+ current = "";
1969
1332
  }
1970
- current = "";
1971
1333
  continue;
1972
1334
  }
1973
1335
  }
1974
1336
  current += char;
1975
1337
  }
1976
- const tail = current.trim();
1977
- if (tail.length > 0) {
1978
- parts.push(tail);
1338
+ if (current.length > 0) {
1339
+ tokens.push(current);
1340
+ }
1341
+ return tokens;
1342
+ }
1343
+ function parseColumnDefinition(segment) {
1344
+ const tokens = tokenize(segment);
1345
+ if (tokens.length < 2) {
1346
+ return null;
1347
+ }
1348
+ const name = normalizeIdentifier(tokens[0]);
1349
+ let cursor = 1;
1350
+ const typeTokens = [];
1351
+ while (cursor < tokens.length) {
1352
+ const lower = tokens[cursor].toLowerCase();
1353
+ if (COLUMN_CONSTRAINT_KEYWORDS.has(lower)) {
1354
+ break;
1355
+ }
1356
+ typeTokens.push(tokens[cursor]);
1357
+ cursor++;
1358
+ }
1359
+ if (typeTokens.length === 0) {
1360
+ return null;
1361
+ }
1362
+ const parsed = {
1363
+ name,
1364
+ type: normalizeSqlType(typeTokens.join(" ")),
1365
+ nullable: true
1366
+ };
1367
+ while (cursor < tokens.length) {
1368
+ const lower = tokens[cursor].toLowerCase();
1369
+ if (lower === "primary" && tokens[cursor + 1]?.toLowerCase() === "key") {
1370
+ parsed.primaryKey = true;
1371
+ parsed.nullable = false;
1372
+ cursor += 2;
1373
+ continue;
1374
+ }
1375
+ if (lower === "unique") {
1376
+ parsed.unique = true;
1377
+ cursor++;
1378
+ continue;
1379
+ }
1380
+ if (lower === "not" && tokens[cursor + 1]?.toLowerCase() === "null") {
1381
+ parsed.nullable = false;
1382
+ cursor += 2;
1383
+ continue;
1384
+ }
1385
+ if (lower === "null") {
1386
+ parsed.nullable = true;
1387
+ cursor++;
1388
+ continue;
1389
+ }
1390
+ if (lower === "default") {
1391
+ cursor++;
1392
+ const defaultTokens = [];
1393
+ while (cursor < tokens.length) {
1394
+ const probe = tokens[cursor].toLowerCase();
1395
+ if (probe === "constraint" || probe === "references" || probe === "check" || probe === "not" && tokens[cursor + 1]?.toLowerCase() === "null" || probe === "null" || probe === "unique" || probe === "primary" && tokens[cursor + 1]?.toLowerCase() === "key") {
1396
+ break;
1397
+ }
1398
+ defaultTokens.push(tokens[cursor]);
1399
+ cursor++;
1400
+ }
1401
+ parsed.default = normalizeDefault(defaultTokens.join(" "));
1402
+ continue;
1403
+ }
1404
+ cursor++;
1405
+ }
1406
+ return parsed;
1407
+ }
1408
+ function parseCreateTableConstraint(segment) {
1409
+ const normalized = segment.trim().replace(/\s+/g, " ");
1410
+ const constraintMatch = normalized.match(/^constraint\s+([^\s]+)\s+(primary\s+key|unique)\s*\((.+)\)$/i);
1411
+ if (constraintMatch) {
1412
+ const [, rawName, kind, rawColumns] = constraintMatch;
1413
+ const columns = splitTopLevelComma(rawColumns).map((item) => normalizeIdentifier(item));
1414
+ if (kind.toLowerCase().includes("primary")) {
1415
+ return { type: "PRIMARY_KEY", name: normalizeIdentifier(rawName), columns };
1416
+ }
1417
+ return { type: "UNIQUE", name: normalizeIdentifier(rawName), columns };
1418
+ }
1419
+ const barePk = normalized.match(/^primary\s+key\s*\((.+)\)$/i);
1420
+ if (barePk) {
1421
+ const columns = splitTopLevelComma(barePk[1]).map((item) => normalizeIdentifier(item));
1422
+ return { type: "PRIMARY_KEY", columns };
1423
+ }
1424
+ const bareUnique = normalized.match(/^unique\s*\((.+)\)$/i);
1425
+ if (bareUnique) {
1426
+ const columns = splitTopLevelComma(bareUnique[1]).map((item) => normalizeIdentifier(item));
1427
+ return { type: "UNIQUE", columns };
1428
+ }
1429
+ return null;
1430
+ }
1431
+ function parseAlterTablePrefix(stmt) {
1432
+ const match = stmt.match(/^alter\s+table\s+(?:if\s+exists\s+)?(?:only\s+)?(.+)$/i);
1433
+ if (!match) {
1434
+ return null;
1435
+ }
1436
+ const remainder = match[1].trim();
1437
+ const tokens = tokenize(remainder);
1438
+ if (tokens.length < 2) {
1439
+ return null;
1440
+ }
1441
+ const tableToken = tokens[0];
1442
+ const table = normalizeIdentifier(tableToken);
1443
+ const rest = remainder.slice(tableToken.length).trim();
1444
+ return { table, rest };
1445
+ }
1446
+ function parseCreateTable(stmt) {
1447
+ const match = stmt.match(/^create\s+table\s+(?:if\s+not\s+exists\s+)?(.+?)\s*\((.*)\)$/is);
1448
+ if (!match) {
1449
+ return null;
1450
+ }
1451
+ const table = normalizeIdentifier(match[1]);
1452
+ const body = match[2];
1453
+ const segments = splitTopLevelComma(body);
1454
+ const columns = [];
1455
+ const constraints = [];
1456
+ for (const segment of segments) {
1457
+ const constraint = parseCreateTableConstraint(segment);
1458
+ if (constraint) {
1459
+ constraints.push(constraint);
1460
+ continue;
1461
+ }
1462
+ const column = parseColumnDefinition(segment);
1463
+ if (column) {
1464
+ columns.push(column);
1465
+ }
1466
+ }
1467
+ return {
1468
+ kind: "CREATE_TABLE",
1469
+ table,
1470
+ columns,
1471
+ constraints
1472
+ };
1473
+ }
1474
+ function parseAlterTableAddColumn(stmt) {
1475
+ const prefix = parseAlterTablePrefix(stmt);
1476
+ if (!prefix) {
1477
+ return null;
1478
+ }
1479
+ const match = prefix.rest.match(/^add\s+column\s+(?:if\s+not\s+exists\s+)?(.+)$/i);
1480
+ if (!match) {
1481
+ return null;
1482
+ }
1483
+ const column = parseColumnDefinition(match[1]);
1484
+ if (!column) {
1485
+ return null;
1486
+ }
1487
+ return { kind: "ADD_COLUMN", table: prefix.table, column };
1488
+ }
1489
+ function parseAlterColumnType(stmt) {
1490
+ const prefix = parseAlterTablePrefix(stmt);
1491
+ if (!prefix) {
1492
+ return null;
1493
+ }
1494
+ const match = prefix.rest.match(/^alter\s+column\s+([^\s]+)\s+type\s+(.+)$/i);
1495
+ if (!match) {
1496
+ return null;
1497
+ }
1498
+ const column = normalizeIdentifier(match[1]);
1499
+ const toType = normalizeSqlType(match[2].replace(/\s+using\s+[\s\S]*$/i, "").trim());
1500
+ return {
1501
+ kind: "ALTER_COLUMN_TYPE",
1502
+ table: prefix.table,
1503
+ column,
1504
+ toType
1505
+ };
1506
+ }
1507
+ function parseSetDropNotNull(stmt) {
1508
+ const prefix = parseAlterTablePrefix(stmt);
1509
+ if (!prefix) {
1510
+ return null;
1511
+ }
1512
+ const setMatch = prefix.rest.match(/^alter\s+column\s+([^\s]+)\s+set\s+not\s+null$/i);
1513
+ if (setMatch) {
1514
+ return {
1515
+ kind: "SET_NOT_NULL",
1516
+ table: prefix.table,
1517
+ column: normalizeIdentifier(setMatch[1])
1518
+ };
1519
+ }
1520
+ const dropMatch = prefix.rest.match(/^alter\s+column\s+([^\s]+)\s+drop\s+not\s+null$/i);
1521
+ if (dropMatch) {
1522
+ return {
1523
+ kind: "DROP_NOT_NULL",
1524
+ table: prefix.table,
1525
+ column: normalizeIdentifier(dropMatch[1])
1526
+ };
1527
+ }
1528
+ return null;
1529
+ }
1530
+ function parseSetDropDefault(stmt) {
1531
+ const prefix = parseAlterTablePrefix(stmt);
1532
+ if (!prefix) {
1533
+ return null;
1534
+ }
1535
+ const setMatch = prefix.rest.match(/^alter\s+column\s+([^\s]+)\s+set\s+default\s+(.+)$/i);
1536
+ if (setMatch) {
1537
+ return {
1538
+ kind: "SET_DEFAULT",
1539
+ table: prefix.table,
1540
+ column: normalizeIdentifier(setMatch[1]),
1541
+ expr: normalizeDefault(setMatch[2].trim()) ?? setMatch[2].trim()
1542
+ };
1543
+ }
1544
+ const dropMatch = prefix.rest.match(/^alter\s+column\s+([^\s]+)\s+drop\s+default$/i);
1545
+ if (dropMatch) {
1546
+ return {
1547
+ kind: "DROP_DEFAULT",
1548
+ table: prefix.table,
1549
+ column: normalizeIdentifier(dropMatch[1])
1550
+ };
1551
+ }
1552
+ return null;
1553
+ }
1554
+ function parseAddDropConstraint(stmt) {
1555
+ const prefix = parseAlterTablePrefix(stmt);
1556
+ if (!prefix) {
1557
+ return null;
1558
+ }
1559
+ const addMatch = prefix.rest.match(/^add\s+constraint\s+([^\s]+)\s+(primary\s+key|unique)\s*\((.+)\)$/i);
1560
+ if (addMatch) {
1561
+ const [, rawName, kind, rawColumns] = addMatch;
1562
+ const columns = splitTopLevelComma(rawColumns).map((item) => normalizeIdentifier(item));
1563
+ const constraint = kind.toLowerCase().includes("primary") ? { type: "PRIMARY_KEY", name: normalizeIdentifier(rawName), columns } : { type: "UNIQUE", name: normalizeIdentifier(rawName), columns };
1564
+ return {
1565
+ kind: "ADD_CONSTRAINT",
1566
+ table: prefix.table,
1567
+ constraint
1568
+ };
1569
+ }
1570
+ const dropMatch = prefix.rest.match(/^drop\s+constraint\s+(?:if\s+exists\s+)?([^\s]+)(?:\s+cascade)?$/i);
1571
+ if (dropMatch) {
1572
+ return {
1573
+ kind: "DROP_CONSTRAINT",
1574
+ table: prefix.table,
1575
+ name: normalizeIdentifier(dropMatch[1])
1576
+ };
1577
+ }
1578
+ return null;
1579
+ }
1580
+ function parseDropColumn(stmt) {
1581
+ const prefix = parseAlterTablePrefix(stmt);
1582
+ if (!prefix) {
1583
+ return null;
1584
+ }
1585
+ const match = prefix.rest.match(/^drop\s+column\s+(?:if\s+exists\s+)?([^\s]+)(?:\s+cascade)?$/i);
1586
+ if (!match) {
1587
+ return null;
1979
1588
  }
1980
- return parts;
1589
+ return {
1590
+ kind: "DROP_COLUMN",
1591
+ table: prefix.table,
1592
+ column: normalizeIdentifier(match[1])
1593
+ };
1981
1594
  }
1982
- function tokenize(segment) {
1983
- const tokens = [];
1984
- let current = "";
1985
- let depth = 0;
1986
- let inSingleQuote = false;
1987
- let inDoubleQuote = false;
1988
- for (let index = 0; index < segment.length; index++) {
1989
- const char = segment[index];
1990
- const next = index + 1 < segment.length ? segment[index + 1] : "";
1991
- if (char === "'" && !inDoubleQuote) {
1992
- current += char;
1993
- if (inSingleQuote && next === "'") {
1994
- current += next;
1995
- index++;
1996
- continue;
1997
- }
1998
- inSingleQuote = !inSingleQuote;
1595
+ function parseDropTable(stmt) {
1596
+ const match = stmt.match(/^drop\s+table\s+(?:if\s+exists\s+)?([^\s]+)(?:\s+cascade)?$/i);
1597
+ if (!match) {
1598
+ return null;
1599
+ }
1600
+ return {
1601
+ kind: "DROP_TABLE",
1602
+ table: normalizeIdentifier(match[1])
1603
+ };
1604
+ }
1605
+ function parseMigrationSql(sql) {
1606
+ const statements = splitSqlStatements(sql);
1607
+ const ops = [];
1608
+ const warnings = [];
1609
+ for (const raw of statements) {
1610
+ const stmt = removeSqlComments(raw).trim();
1611
+ if (!stmt) {
1999
1612
  continue;
2000
1613
  }
2001
- if (char === '"' && !inSingleQuote) {
2002
- current += char;
2003
- if (inDoubleQuote && next === '"') {
2004
- current += next;
2005
- index++;
2006
- continue;
1614
+ let parsed = null;
1615
+ for (const parseFn of PARSERS) {
1616
+ parsed = parseFn(stmt);
1617
+ if (parsed) {
1618
+ break;
2007
1619
  }
2008
- inDoubleQuote = !inDoubleQuote;
2009
- continue;
2010
1620
  }
2011
- if (!inSingleQuote && !inDoubleQuote) {
2012
- if (char === "(") {
2013
- depth++;
2014
- } else if (char === ")") {
2015
- depth = Math.max(0, depth - 1);
2016
- }
2017
- if (/\s/.test(char) && depth === 0) {
2018
- if (current.length > 0) {
2019
- tokens.push(current);
2020
- current = "";
2021
- }
2022
- continue;
2023
- }
1621
+ if (parsed) {
1622
+ ops.push(parsed);
1623
+ } else {
1624
+ warnings.push({
1625
+ statement: stmt,
1626
+ reason: "Unsupported or unrecognized statement"
1627
+ });
2024
1628
  }
2025
- current += char;
2026
1629
  }
2027
- if (current.length > 0) {
2028
- tokens.push(current);
1630
+ return { ops, warnings };
1631
+ }
1632
+ var COLUMN_CONSTRAINT_KEYWORDS, PARSERS;
1633
+ var init_parse_migration = __esm({
1634
+ "node_modules/@xubylele/schema-forge-core/dist/core/sql/parse-migration.js"() {
1635
+ "use strict";
1636
+ init_normalize();
1637
+ init_split_statements();
1638
+ COLUMN_CONSTRAINT_KEYWORDS = /* @__PURE__ */ new Set([
1639
+ "primary",
1640
+ "unique",
1641
+ "not",
1642
+ "null",
1643
+ "default",
1644
+ "constraint",
1645
+ "references",
1646
+ "check"
1647
+ ]);
1648
+ PARSERS = [
1649
+ parseCreateTable,
1650
+ parseAlterTableAddColumn,
1651
+ parseAlterColumnType,
1652
+ parseSetDropNotNull,
1653
+ parseSetDropDefault,
1654
+ parseAddDropConstraint,
1655
+ parseDropColumn,
1656
+ parseDropTable
1657
+ ];
2029
1658
  }
2030
- return tokens;
1659
+ });
1660
+
1661
+ // node_modules/@xubylele/schema-forge-core/dist/core/sql/apply-ops.js
1662
+ function toSchemaColumn(column) {
1663
+ return {
1664
+ name: column.name,
1665
+ type: column.type,
1666
+ nullable: column.nullable,
1667
+ ...column.default !== void 0 ? { default: column.default } : {},
1668
+ ...column.unique !== void 0 ? { unique: column.unique } : {},
1669
+ ...column.primaryKey !== void 0 ? { primaryKey: column.primaryKey } : {}
1670
+ };
2031
1671
  }
2032
- function parseColumnDefinition(segment) {
2033
- const tokens = tokenize(segment);
2034
- if (tokens.length < 2) {
2035
- return null;
1672
+ function applySingleColumnConstraint(table, constraint) {
1673
+ if (constraint.columns.length !== 1) {
1674
+ return false;
2036
1675
  }
2037
- const name = normalizeIdentifier(tokens[0]);
2038
- let cursor = 1;
2039
- const typeTokens = [];
2040
- while (cursor < tokens.length) {
2041
- const lower = tokens[cursor].toLowerCase();
2042
- if (COLUMN_CONSTRAINT_KEYWORDS.has(lower)) {
2043
- break;
2044
- }
2045
- typeTokens.push(tokens[cursor]);
2046
- cursor++;
1676
+ const targetColumn = table.columns.find((column) => column.name === constraint.columns[0]);
1677
+ if (!targetColumn) {
1678
+ return false;
2047
1679
  }
2048
- if (typeTokens.length === 0) {
2049
- return null;
1680
+ if (constraint.type === "PRIMARY_KEY") {
1681
+ table.primaryKey = targetColumn.name;
1682
+ targetColumn.primaryKey = true;
1683
+ targetColumn.nullable = false;
1684
+ return true;
2050
1685
  }
2051
- const parsed = {
2052
- name,
2053
- type: normalizeSqlType(typeTokens.join(" ")),
2054
- nullable: true
2055
- };
2056
- while (cursor < tokens.length) {
2057
- const lower = tokens[cursor].toLowerCase();
2058
- if (lower === "primary" && tokens[cursor + 1]?.toLowerCase() === "key") {
2059
- parsed.primaryKey = true;
2060
- parsed.nullable = false;
2061
- cursor += 2;
2062
- continue;
2063
- }
2064
- if (lower === "unique") {
2065
- parsed.unique = true;
2066
- cursor++;
2067
- continue;
2068
- }
2069
- if (lower === "not" && tokens[cursor + 1]?.toLowerCase() === "null") {
2070
- parsed.nullable = false;
2071
- cursor += 2;
2072
- continue;
1686
+ targetColumn.unique = true;
1687
+ return true;
1688
+ }
1689
+ function clearConstraintByName(table, name) {
1690
+ if (name.endsWith("_pkey") || name.startsWith("pk_")) {
1691
+ if (table.primaryKey) {
1692
+ const pkColumn = table.columns.find((column) => column.name === table.primaryKey);
1693
+ if (pkColumn) {
1694
+ pkColumn.primaryKey = false;
1695
+ }
1696
+ table.primaryKey = null;
2073
1697
  }
2074
- if (lower === "null") {
2075
- parsed.nullable = true;
2076
- cursor++;
2077
- continue;
1698
+ return;
1699
+ }
1700
+ if (name.endsWith("_key") || name.startsWith("uq_")) {
1701
+ for (const column of table.columns) {
1702
+ if (column.unique) {
1703
+ column.unique = false;
1704
+ }
2078
1705
  }
2079
- if (lower === "default") {
2080
- cursor++;
2081
- const defaultTokens = [];
2082
- while (cursor < tokens.length) {
2083
- const probe = tokens[cursor].toLowerCase();
2084
- if (probe === "constraint" || probe === "references" || probe === "check" || probe === "not" && tokens[cursor + 1]?.toLowerCase() === "null" || probe === "null" || probe === "unique" || probe === "primary" && tokens[cursor + 1]?.toLowerCase() === "key") {
1706
+ }
1707
+ }
1708
+ function getOrCreateTable(tables, name) {
1709
+ if (!tables[name]) {
1710
+ tables[name] = { name, columns: [] };
1711
+ }
1712
+ return tables[name];
1713
+ }
1714
+ function applySqlOps(ops) {
1715
+ const tables = {};
1716
+ const warnings = [];
1717
+ for (const op of ops) {
1718
+ switch (op.kind) {
1719
+ case "CREATE_TABLE": {
1720
+ const table = {
1721
+ name: op.table,
1722
+ columns: op.columns.map(toSchemaColumn)
1723
+ };
1724
+ for (const column of table.columns) {
1725
+ if (column.primaryKey) {
1726
+ table.primaryKey = column.name;
1727
+ }
1728
+ }
1729
+ for (const constraint of op.constraints) {
1730
+ const applied = applySingleColumnConstraint(table, constraint);
1731
+ if (!applied) {
1732
+ warnings.push({
1733
+ statement: `CREATE TABLE ${op.table}`,
1734
+ reason: `Constraint ${constraint.type}${constraint.name ? ` (${constraint.name})` : ""} is unsupported for schema reconstruction`
1735
+ });
1736
+ }
1737
+ }
1738
+ tables[op.table] = table;
1739
+ break;
1740
+ }
1741
+ case "ADD_COLUMN": {
1742
+ const table = getOrCreateTable(tables, op.table);
1743
+ table.columns = table.columns.filter((column) => column.name !== op.column.name);
1744
+ table.columns.push(toSchemaColumn(op.column));
1745
+ if (op.column.primaryKey) {
1746
+ table.primaryKey = op.column.name;
1747
+ }
1748
+ break;
1749
+ }
1750
+ case "ALTER_COLUMN_TYPE": {
1751
+ const table = tables[op.table];
1752
+ if (!table) {
1753
+ break;
1754
+ }
1755
+ const column = table.columns.find((item) => item.name === op.column);
1756
+ if (column) {
1757
+ column.type = op.toType;
1758
+ }
1759
+ break;
1760
+ }
1761
+ case "SET_NOT_NULL": {
1762
+ const table = tables[op.table];
1763
+ const column = table?.columns.find((item) => item.name === op.column);
1764
+ if (column) {
1765
+ column.nullable = false;
1766
+ }
1767
+ break;
1768
+ }
1769
+ case "DROP_NOT_NULL": {
1770
+ const table = tables[op.table];
1771
+ const column = table?.columns.find((item) => item.name === op.column);
1772
+ if (column) {
1773
+ column.nullable = true;
1774
+ }
1775
+ break;
1776
+ }
1777
+ case "SET_DEFAULT": {
1778
+ const table = tables[op.table];
1779
+ const column = table?.columns.find((item) => item.name === op.column);
1780
+ if (column) {
1781
+ column.default = op.expr;
1782
+ }
1783
+ break;
1784
+ }
1785
+ case "DROP_DEFAULT": {
1786
+ const table = tables[op.table];
1787
+ const column = table?.columns.find((item) => item.name === op.column);
1788
+ if (column) {
1789
+ column.default = null;
1790
+ }
1791
+ break;
1792
+ }
1793
+ case "ADD_CONSTRAINT": {
1794
+ const table = tables[op.table];
1795
+ if (!table) {
2085
1796
  break;
2086
1797
  }
2087
- defaultTokens.push(tokens[cursor]);
2088
- cursor++;
1798
+ const applied = applySingleColumnConstraint(table, op.constraint);
1799
+ if (!applied) {
1800
+ warnings.push({
1801
+ statement: `ALTER TABLE ${op.table} ADD CONSTRAINT ${op.constraint.name ?? "<unnamed>"}`,
1802
+ reason: `Constraint ${op.constraint.type} is unsupported for schema reconstruction`
1803
+ });
1804
+ }
1805
+ break;
1806
+ }
1807
+ case "DROP_CONSTRAINT": {
1808
+ const table = tables[op.table];
1809
+ if (!table) {
1810
+ break;
1811
+ }
1812
+ clearConstraintByName(table, op.name);
1813
+ break;
1814
+ }
1815
+ case "DROP_COLUMN": {
1816
+ const table = tables[op.table];
1817
+ if (!table) {
1818
+ break;
1819
+ }
1820
+ table.columns = table.columns.filter((column) => column.name !== op.column);
1821
+ if (table.primaryKey === op.column) {
1822
+ table.primaryKey = null;
1823
+ }
1824
+ break;
1825
+ }
1826
+ case "DROP_TABLE": {
1827
+ delete tables[op.table];
1828
+ break;
2089
1829
  }
2090
- parsed.default = normalizeDefault(defaultTokens.join(" "));
2091
- continue;
2092
1830
  }
2093
- cursor++;
2094
1831
  }
2095
- return parsed;
1832
+ const schema = { tables };
1833
+ return { schema, warnings };
2096
1834
  }
2097
- function parseCreateTableConstraint(segment) {
2098
- const normalized = segment.trim().replace(/\s+/g, " ");
2099
- const constraintMatch = normalized.match(/^constraint\s+([^\s]+)\s+(primary\s+key|unique)\s*\((.+)\)$/i);
2100
- if (constraintMatch) {
2101
- const [, rawName, kind, rawColumns] = constraintMatch;
2102
- const columns = splitTopLevelComma(rawColumns).map((item) => normalizeIdentifier(item));
2103
- if (kind.toLowerCase().includes("primary")) {
2104
- return { type: "PRIMARY_KEY", name: normalizeIdentifier(rawName), columns };
2105
- }
2106
- return { type: "UNIQUE", name: normalizeIdentifier(rawName), columns };
1835
+ var init_apply_ops = __esm({
1836
+ "node_modules/@xubylele/schema-forge-core/dist/core/sql/apply-ops.js"() {
1837
+ "use strict";
2107
1838
  }
2108
- const barePk = normalized.match(/^primary\s+key\s*\((.+)\)$/i);
2109
- if (barePk) {
2110
- const columns = splitTopLevelComma(barePk[1]).map((item) => normalizeIdentifier(item));
2111
- return { type: "PRIMARY_KEY", columns };
1839
+ });
1840
+
1841
+ // node_modules/@xubylele/schema-forge-core/dist/core/sql/schema-to-dsl.js
1842
+ function renderColumn(column) {
1843
+ const parts = [column.name, column.type];
1844
+ if (column.primaryKey) {
1845
+ parts.push("pk");
2112
1846
  }
2113
- const bareUnique = normalized.match(/^unique\s*\((.+)\)$/i);
2114
- if (bareUnique) {
2115
- const columns = splitTopLevelComma(bareUnique[1]).map((item) => normalizeIdentifier(item));
2116
- return { type: "UNIQUE", columns };
1847
+ if (column.unique) {
1848
+ parts.push("unique");
2117
1849
  }
2118
- return null;
2119
- }
2120
- function parseAlterTablePrefix(stmt) {
2121
- const match = stmt.match(/^alter\s+table\s+(?:if\s+exists\s+)?(?:only\s+)?(.+)$/i);
2122
- if (!match) {
2123
- return null;
1850
+ if (column.nullable === false && !column.primaryKey) {
1851
+ parts.push("not null");
2124
1852
  }
2125
- const remainder = match[1].trim();
2126
- const tokens = tokenize(remainder);
2127
- if (tokens.length < 2) {
2128
- return null;
1853
+ if (column.default !== void 0 && column.default !== null) {
1854
+ parts.push(`default ${column.default}`);
2129
1855
  }
2130
- const tableToken = tokens[0];
2131
- const table = normalizeIdentifier(tableToken);
2132
- const rest = remainder.slice(tableToken.length).trim();
2133
- return { table, rest };
1856
+ return ` ${parts.join(" ")}`;
2134
1857
  }
2135
- function parseCreateTable(stmt) {
2136
- const match = stmt.match(/^create\s+table\s+(?:if\s+not\s+exists\s+)?(.+?)\s*\((.*)\)$/is);
2137
- if (!match) {
2138
- return null;
2139
- }
2140
- const table = normalizeIdentifier(match[1]);
2141
- const body = match[2];
2142
- const segments = splitTopLevelComma(body);
2143
- const columns = [];
2144
- const constraints = [];
2145
- for (const segment of segments) {
2146
- const constraint = parseCreateTableConstraint(segment);
2147
- if (constraint) {
2148
- constraints.push(constraint);
2149
- continue;
2150
- }
2151
- const column = parseColumnDefinition(segment);
2152
- if (column) {
2153
- columns.push(column);
1858
+ function schemaToDsl(schema) {
1859
+ const tableNames = Object.keys(schema.tables).sort((left, right) => left.localeCompare(right));
1860
+ const blocks = tableNames.map((tableName) => {
1861
+ const table = schema.tables[tableName];
1862
+ const lines = [`table ${table.name} {`];
1863
+ for (const column of table.columns) {
1864
+ lines.push(renderColumn(column));
2154
1865
  }
1866
+ lines.push("}");
1867
+ return lines.join("\n");
1868
+ });
1869
+ if (blocks.length === 0) {
1870
+ return "# SchemaForge schema definition\n";
2155
1871
  }
2156
- return {
2157
- kind: "CREATE_TABLE",
2158
- table,
2159
- columns,
2160
- constraints
2161
- };
1872
+ return `# SchemaForge schema definition
1873
+
1874
+ ${blocks.join("\n\n")}
1875
+ `;
2162
1876
  }
2163
- function parseAlterTableAddColumn(stmt) {
2164
- const prefix = parseAlterTablePrefix(stmt);
2165
- if (!prefix) {
2166
- return null;
1877
+ var init_schema_to_dsl = __esm({
1878
+ "node_modules/@xubylele/schema-forge-core/dist/core/sql/schema-to-dsl.js"() {
1879
+ "use strict";
2167
1880
  }
2168
- const match = prefix.rest.match(/^add\s+column\s+(?:if\s+not\s+exists\s+)?(.+)$/i);
2169
- if (!match) {
2170
- return null;
1881
+ });
1882
+
1883
+ // node_modules/@xubylele/schema-forge-core/dist/core/sql/load-migrations.js
1884
+ async function loadMigrationSqlInput(inputPath) {
1885
+ const stats = await import_fs4.promises.stat(inputPath);
1886
+ if (stats.isFile()) {
1887
+ if (!inputPath.toLowerCase().endsWith(".sql")) {
1888
+ throw new Error(`Input file must be a .sql file: ${inputPath}`);
1889
+ }
1890
+ return [{ filePath: inputPath, sql: await readTextFile2(inputPath) }];
2171
1891
  }
2172
- const column = parseColumnDefinition(match[1]);
2173
- if (!column) {
2174
- return null;
1892
+ if (!stats.isDirectory()) {
1893
+ throw new Error(`Input path must be a .sql file or directory: ${inputPath}`);
2175
1894
  }
2176
- return { kind: "ADD_COLUMN", table: prefix.table, column };
1895
+ const sqlFiles = await findFiles(inputPath, /\.sql$/i);
1896
+ sqlFiles.sort((left, right) => import_path5.default.basename(left).localeCompare(import_path5.default.basename(right)));
1897
+ const result = [];
1898
+ for (const filePath of sqlFiles) {
1899
+ result.push({
1900
+ filePath,
1901
+ sql: await readTextFile2(filePath)
1902
+ });
1903
+ }
1904
+ return result;
2177
1905
  }
2178
- function parseAlterColumnType(stmt) {
2179
- const prefix = parseAlterTablePrefix(stmt);
2180
- if (!prefix) {
2181
- return null;
1906
+ var import_fs4, import_path5;
1907
+ var init_load_migrations = __esm({
1908
+ "node_modules/@xubylele/schema-forge-core/dist/core/sql/load-migrations.js"() {
1909
+ "use strict";
1910
+ import_fs4 = require("fs");
1911
+ import_path5 = __toESM(require("path"), 1);
1912
+ init_fs();
1913
+ }
1914
+ });
1915
+
1916
+ // node_modules/@xubylele/schema-forge-core/dist/core/paths.js
1917
+ function getProjectRoot2(cwd = process.cwd()) {
1918
+ return cwd;
1919
+ }
1920
+ function getSchemaForgeDir2(root) {
1921
+ return import_path6.default.join(root, "schemaforge");
1922
+ }
1923
+ function getSchemaFilePath2(root, config) {
1924
+ const schemaForgeDir = getSchemaForgeDir2(root);
1925
+ const fileName = config?.schemaFile || "schema.sf";
1926
+ return import_path6.default.join(schemaForgeDir, fileName);
1927
+ }
1928
+ function getConfigPath2(root) {
1929
+ const schemaForgeDir = getSchemaForgeDir2(root);
1930
+ return import_path6.default.join(schemaForgeDir, "config.json");
1931
+ }
1932
+ function getStatePath2(root, config) {
1933
+ const schemaForgeDir = getSchemaForgeDir2(root);
1934
+ const fileName = config?.stateFile || "state.json";
1935
+ return import_path6.default.join(schemaForgeDir, fileName);
1936
+ }
1937
+ var import_path6;
1938
+ var init_paths = __esm({
1939
+ "node_modules/@xubylele/schema-forge-core/dist/core/paths.js"() {
1940
+ "use strict";
1941
+ import_path6 = __toESM(require("path"), 1);
1942
+ }
1943
+ });
1944
+
1945
+ // node_modules/@xubylele/schema-forge-core/dist/core/utils.js
1946
+ function nowTimestamp() {
1947
+ const date = /* @__PURE__ */ new Date();
1948
+ const pad = (value) => String(value).padStart(2, "0");
1949
+ return String(date.getFullYear()) + pad(date.getMonth() + 1) + pad(date.getDate()) + pad(date.getHours()) + pad(date.getMinutes()) + pad(date.getSeconds());
1950
+ }
1951
+ function slugifyName(name) {
1952
+ return name.trim().toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-+|-+$/g, "") || "migration";
1953
+ }
1954
+ var init_utils = __esm({
1955
+ "node_modules/@xubylele/schema-forge-core/dist/core/utils.js"() {
1956
+ "use strict";
1957
+ }
1958
+ });
1959
+
1960
+ // node_modules/@xubylele/schema-forge-core/dist/core/errors.js
1961
+ var SchemaValidationError;
1962
+ var init_errors = __esm({
1963
+ "node_modules/@xubylele/schema-forge-core/dist/core/errors.js"() {
1964
+ "use strict";
1965
+ SchemaValidationError = class extends Error {
1966
+ constructor(message) {
1967
+ super(message);
1968
+ this.name = "SchemaValidationError";
1969
+ }
1970
+ };
1971
+ }
1972
+ });
1973
+
1974
+ // node_modules/@xubylele/schema-forge-core/dist/index.js
1975
+ var dist_exports = {};
1976
+ __export(dist_exports, {
1977
+ SchemaValidationError: () => SchemaValidationError,
1978
+ applySqlOps: () => applySqlOps,
1979
+ diffSchemas: () => diffSchemas,
1980
+ ensureDir: () => ensureDir2,
1981
+ fileExists: () => fileExists2,
1982
+ findFiles: () => findFiles,
1983
+ generateSql: () => generateSql,
1984
+ getColumnNamesFromSchema: () => getColumnNamesFromSchema,
1985
+ getColumnNamesFromState: () => getColumnNamesFromState,
1986
+ getConfigPath: () => getConfigPath2,
1987
+ getProjectRoot: () => getProjectRoot2,
1988
+ getSchemaFilePath: () => getSchemaFilePath2,
1989
+ getSchemaForgeDir: () => getSchemaForgeDir2,
1990
+ getStatePath: () => getStatePath2,
1991
+ getTableNamesFromSchema: () => getTableNamesFromSchema,
1992
+ getTableNamesFromState: () => getTableNamesFromState,
1993
+ legacyPkName: () => legacyPkName,
1994
+ legacyUqName: () => legacyUqName,
1995
+ loadMigrationSqlInput: () => loadMigrationSqlInput,
1996
+ loadState: () => loadState,
1997
+ normalizeDefault: () => normalizeDefault,
1998
+ normalizeIdent: () => normalizeIdent,
1999
+ nowTimestamp: () => nowTimestamp,
2000
+ parseAddDropConstraint: () => parseAddDropConstraint,
2001
+ parseAlterColumnType: () => parseAlterColumnType,
2002
+ parseAlterTableAddColumn: () => parseAlterTableAddColumn,
2003
+ parseCreateTable: () => parseCreateTable,
2004
+ parseDropColumn: () => parseDropColumn,
2005
+ parseDropTable: () => parseDropTable,
2006
+ parseMigrationSql: () => parseMigrationSql,
2007
+ parseSchema: () => parseSchema,
2008
+ parseSetDropDefault: () => parseSetDropDefault,
2009
+ parseSetDropNotNull: () => parseSetDropNotNull,
2010
+ pkName: () => pkName,
2011
+ readJsonFile: () => readJsonFile2,
2012
+ readTextFile: () => readTextFile2,
2013
+ saveState: () => saveState,
2014
+ schemaToDsl: () => schemaToDsl,
2015
+ schemaToState: () => schemaToState,
2016
+ slugifyName: () => slugifyName,
2017
+ splitSqlStatements: () => splitSqlStatements,
2018
+ toValidationReport: () => toValidationReport,
2019
+ uqName: () => uqName,
2020
+ validateSchema: () => validateSchema,
2021
+ validateSchemaChanges: () => validateSchemaChanges,
2022
+ writeJsonFile: () => writeJsonFile2,
2023
+ writeTextFile: () => writeTextFile2
2024
+ });
2025
+ var init_dist = __esm({
2026
+ "node_modules/@xubylele/schema-forge-core/dist/index.js"() {
2027
+ "use strict";
2028
+ init_parser();
2029
+ init_diff();
2030
+ init_validator();
2031
+ init_validate();
2032
+ init_state_manager();
2033
+ init_sql_generator();
2034
+ init_parse_migration();
2035
+ init_apply_ops();
2036
+ init_schema_to_dsl();
2037
+ init_load_migrations();
2038
+ init_split_statements();
2039
+ init_fs();
2040
+ init_normalize();
2041
+ init_paths();
2042
+ init_utils();
2043
+ init_errors();
2044
+ }
2045
+ });
2046
+
2047
+ // src/cli.ts
2048
+ var import_commander6 = require("commander");
2049
+
2050
+ // package.json
2051
+ var package_default = {
2052
+ name: "@xubylele/schema-forge",
2053
+ version: "1.5.1",
2054
+ description: "Universal migration generator from schema DSL",
2055
+ main: "dist/cli.js",
2056
+ type: "commonjs",
2057
+ bin: {
2058
+ "schema-forge": "dist/cli.js"
2059
+ },
2060
+ scripts: {
2061
+ build: "tsup src/cli.ts --format cjs --dts",
2062
+ dev: "ts-node src/cli.ts",
2063
+ test: "vitest",
2064
+ prepublishOnly: "npm run build",
2065
+ "publish:public": "npm publish --access public",
2066
+ changeset: "changeset",
2067
+ "version-packages": "changeset version",
2068
+ release: "changeset publish"
2069
+ },
2070
+ keywords: [
2071
+ "cli",
2072
+ "schema",
2073
+ "sql",
2074
+ "generator",
2075
+ "migration",
2076
+ "database"
2077
+ ],
2078
+ author: "Xuby",
2079
+ license: "ISC",
2080
+ repository: {
2081
+ type: "git",
2082
+ url: "git+https://github.com/xubylele/schema-forge.git"
2083
+ },
2084
+ bugs: "https://github.com/xubylele/schema-forge/issues",
2085
+ homepage: "https://github.com/xubylele/schema-forge#readme",
2086
+ files: [
2087
+ "dist"
2088
+ ],
2089
+ engines: {
2090
+ node: ">=18.0.0"
2091
+ },
2092
+ dependencies: {
2093
+ boxen: "^8.0.1",
2094
+ chalk: "^5.6.2",
2095
+ commander: "^14.0.3"
2096
+ },
2097
+ devDependencies: {
2098
+ "@changesets/cli": "^2.29.8",
2099
+ "@types/node": "^25.2.3",
2100
+ "@xubylele/schema-forge-core": "^1.0.5",
2101
+ "ts-node": "^10.9.2",
2102
+ tsup: "^8.5.1",
2103
+ typescript: "^5.9.3",
2104
+ vitest: "^4.0.18"
2182
2105
  }
2183
- const match = prefix.rest.match(/^alter\s+column\s+([^\s]+)\s+type\s+(.+)$/i);
2184
- if (!match) {
2185
- return null;
2106
+ };
2107
+
2108
+ // src/commands/diff.ts
2109
+ var import_commander = require("commander");
2110
+ var import_path7 = __toESM(require("path"));
2111
+
2112
+ // src/core/fs.ts
2113
+ var import_fs = require("fs");
2114
+ var import_path = __toESM(require("path"));
2115
+ async function ensureDir(dirPath) {
2116
+ try {
2117
+ await import_fs.promises.mkdir(dirPath, { recursive: true });
2118
+ } catch (error2) {
2119
+ throw new Error(`Failed to create directory ${dirPath}: ${error2}`);
2186
2120
  }
2187
- const column = normalizeIdentifier(match[1]);
2188
- const toType = normalizeSqlType(match[2].replace(/\s+using\s+[\s\S]*$/i, "").trim());
2189
- return {
2190
- kind: "ALTER_COLUMN_TYPE",
2191
- table: prefix.table,
2192
- column,
2193
- toType
2194
- };
2195
2121
  }
2196
- function parseSetDropNotNull(stmt) {
2197
- const prefix = parseAlterTablePrefix(stmt);
2198
- if (!prefix) {
2199
- return null;
2200
- }
2201
- const setMatch = prefix.rest.match(/^alter\s+column\s+([^\s]+)\s+set\s+not\s+null$/i);
2202
- if (setMatch) {
2203
- return {
2204
- kind: "SET_NOT_NULL",
2205
- table: prefix.table,
2206
- column: normalizeIdentifier(setMatch[1])
2207
- };
2208
- }
2209
- const dropMatch = prefix.rest.match(/^alter\s+column\s+([^\s]+)\s+drop\s+not\s+null$/i);
2210
- if (dropMatch) {
2211
- return {
2212
- kind: "DROP_NOT_NULL",
2213
- table: prefix.table,
2214
- column: normalizeIdentifier(dropMatch[1])
2215
- };
2122
+ async function fileExists(filePath) {
2123
+ try {
2124
+ await import_fs.promises.access(filePath);
2125
+ return true;
2126
+ } catch {
2127
+ return false;
2216
2128
  }
2217
- return null;
2218
2129
  }
2219
- function parseSetDropDefault(stmt) {
2220
- const prefix = parseAlterTablePrefix(stmt);
2221
- if (!prefix) {
2222
- return null;
2130
+ async function readTextFile(filePath) {
2131
+ try {
2132
+ return await import_fs.promises.readFile(filePath, "utf-8");
2133
+ } catch (error2) {
2134
+ throw new Error(`Failed to read file ${filePath}: ${error2}`);
2223
2135
  }
2224
- const setMatch = prefix.rest.match(/^alter\s+column\s+([^\s]+)\s+set\s+default\s+(.+)$/i);
2225
- if (setMatch) {
2226
- return {
2227
- kind: "SET_DEFAULT",
2228
- table: prefix.table,
2229
- column: normalizeIdentifier(setMatch[1]),
2230
- expr: normalizeDefault(setMatch[2].trim()) ?? setMatch[2].trim()
2231
- };
2136
+ }
2137
+ async function writeTextFile(filePath, content) {
2138
+ try {
2139
+ const dir = import_path.default.dirname(filePath);
2140
+ await ensureDir(dir);
2141
+ await import_fs.promises.writeFile(filePath, content, "utf-8");
2142
+ } catch (error2) {
2143
+ throw new Error(`Failed to write file ${filePath}: ${error2}`);
2232
2144
  }
2233
- const dropMatch = prefix.rest.match(/^alter\s+column\s+([^\s]+)\s+drop\s+default$/i);
2234
- if (dropMatch) {
2235
- return {
2236
- kind: "DROP_DEFAULT",
2237
- table: prefix.table,
2238
- column: normalizeIdentifier(dropMatch[1])
2239
- };
2145
+ }
2146
+ async function readJsonFile(filePath, fallback) {
2147
+ try {
2148
+ const exists = await fileExists(filePath);
2149
+ if (!exists) {
2150
+ return fallback;
2151
+ }
2152
+ const content = await readTextFile(filePath);
2153
+ return JSON.parse(content);
2154
+ } catch (error2) {
2155
+ throw new Error(`Failed to read JSON file ${filePath}: ${error2}`);
2240
2156
  }
2241
- return null;
2242
2157
  }
2243
- function parseAddDropConstraint(stmt) {
2244
- const prefix = parseAlterTablePrefix(stmt);
2245
- if (!prefix) {
2246
- return null;
2158
+ async function writeJsonFile(filePath, data) {
2159
+ try {
2160
+ const content = JSON.stringify(data, null, 2);
2161
+ await writeTextFile(filePath, content);
2162
+ } catch (error2) {
2163
+ throw new Error(`Failed to write JSON file ${filePath}: ${error2}`);
2247
2164
  }
2248
- const addMatch = prefix.rest.match(/^add\s+constraint\s+([^\s]+)\s+(primary\s+key|unique)\s*\((.+)\)$/i);
2249
- if (addMatch) {
2250
- const [, rawName, kind, rawColumns] = addMatch;
2251
- const columns = splitTopLevelComma(rawColumns).map((item) => normalizeIdentifier(item));
2252
- const constraint = kind.toLowerCase().includes("primary") ? { type: "PRIMARY_KEY", name: normalizeIdentifier(rawName), columns } : { type: "UNIQUE", name: normalizeIdentifier(rawName), columns };
2253
- return {
2254
- kind: "ADD_CONSTRAINT",
2255
- table: prefix.table,
2256
- constraint
2257
- };
2165
+ }
2166
+
2167
+ // src/core/paths.ts
2168
+ var import_path2 = __toESM(require("path"));
2169
+ function getProjectRoot(cwd = process.cwd()) {
2170
+ return cwd;
2171
+ }
2172
+ function getSchemaForgeDir(root) {
2173
+ return import_path2.default.join(root, "schemaforge");
2174
+ }
2175
+ function getSchemaFilePath(root, config) {
2176
+ const schemaForgeDir = getSchemaForgeDir(root);
2177
+ const fileName = config?.schemaFile || "schema.sf";
2178
+ return import_path2.default.join(schemaForgeDir, fileName);
2179
+ }
2180
+ function getConfigPath(root) {
2181
+ const schemaForgeDir = getSchemaForgeDir(root);
2182
+ return import_path2.default.join(schemaForgeDir, "config.json");
2183
+ }
2184
+ function getStatePath(root, config) {
2185
+ const schemaForgeDir = getSchemaForgeDir(root);
2186
+ const fileName = config?.stateFile || "state.json";
2187
+ return import_path2.default.join(schemaForgeDir, fileName);
2188
+ }
2189
+
2190
+ // src/core/provider.ts
2191
+ var DEFAULT_PROVIDER = "postgres";
2192
+ function resolveProvider(provider) {
2193
+ if (!provider) {
2194
+ return { provider: DEFAULT_PROVIDER, usedDefault: true };
2258
2195
  }
2259
- const dropMatch = prefix.rest.match(/^drop\s+constraint\s+(?:if\s+exists\s+)?([^\s]+)(?:\s+cascade)?$/i);
2260
- if (dropMatch) {
2261
- return {
2262
- kind: "DROP_CONSTRAINT",
2263
- table: prefix.table,
2264
- name: normalizeIdentifier(dropMatch[1])
2265
- };
2196
+ return { provider, usedDefault: false };
2197
+ }
2198
+
2199
+ // src/domain.ts
2200
+ var corePromise;
2201
+ async function loadCore() {
2202
+ if (!corePromise) {
2203
+ corePromise = Promise.resolve().then(() => (init_dist(), dist_exports));
2266
2204
  }
2267
- return null;
2205
+ return corePromise;
2268
2206
  }
2269
- function parseDropColumn(stmt) {
2270
- const prefix = parseAlterTablePrefix(stmt);
2271
- if (!prefix) {
2272
- return null;
2207
+ async function parseSchema2(source) {
2208
+ const core = await loadCore();
2209
+ return core.parseSchema(source);
2210
+ }
2211
+ async function validateSchema2(schema) {
2212
+ const core = await loadCore();
2213
+ core.validateSchema(schema);
2214
+ }
2215
+ async function diffSchemas2(previousState, currentSchema) {
2216
+ const core = await loadCore();
2217
+ return core.diffSchemas(previousState, currentSchema);
2218
+ }
2219
+ async function generateSql2(diff, provider, config) {
2220
+ const core = await loadCore();
2221
+ return core.generateSql(diff, provider, config);
2222
+ }
2223
+ async function schemaToState2(schema) {
2224
+ const core = await loadCore();
2225
+ return core.schemaToState(schema);
2226
+ }
2227
+ async function loadState2(statePath) {
2228
+ const core = await loadCore();
2229
+ return core.loadState(statePath);
2230
+ }
2231
+ async function saveState2(statePath, state) {
2232
+ const core = await loadCore();
2233
+ return core.saveState(statePath, state);
2234
+ }
2235
+ async function validateSchemaChanges2(previousState, currentSchema) {
2236
+ const core = await loadCore();
2237
+ return core.validateSchemaChanges(previousState, currentSchema);
2238
+ }
2239
+ async function toValidationReport2(findings) {
2240
+ const core = await loadCore();
2241
+ return core.toValidationReport(findings);
2242
+ }
2243
+ async function parseMigrationSql2(sql) {
2244
+ const core = await loadCore();
2245
+ return core.parseMigrationSql(sql);
2246
+ }
2247
+ async function applySqlOps2(ops) {
2248
+ const core = await loadCore();
2249
+ return core.applySqlOps(ops);
2250
+ }
2251
+ async function schemaToDsl2(schema) {
2252
+ const core = await loadCore();
2253
+ return core.schemaToDsl(schema);
2254
+ }
2255
+ async function loadMigrationSqlInput2(inputPath) {
2256
+ const core = await loadCore();
2257
+ return core.loadMigrationSqlInput(inputPath);
2258
+ }
2259
+ async function createSchemaValidationError(message) {
2260
+ const core = await loadCore();
2261
+ return new core.SchemaValidationError(message);
2262
+ }
2263
+ async function isSchemaValidationError(error2) {
2264
+ const core = await loadCore();
2265
+ return error2 instanceof core.SchemaValidationError;
2266
+ }
2267
+
2268
+ // src/utils/output.ts
2269
+ var import_boxen = __toESM(require("boxen"));
2270
+ var import_chalk = require("chalk");
2271
+ var isInteractive = Boolean(process.stdout?.isTTY);
2272
+ var colorsEnabled = isInteractive && process.env.FORCE_COLOR !== "0" && !("NO_COLOR" in process.env);
2273
+ var color = new import_chalk.Chalk({ level: colorsEnabled ? 3 : 0 });
2274
+ var theme = {
2275
+ primary: color.cyanBright,
2276
+ success: color.hex("#00FF88"),
2277
+ warning: color.hex("#FFD166"),
2278
+ error: color.hex("#EF476F"),
2279
+ accent: color.magentaBright
2280
+ };
2281
+ function success(message) {
2282
+ const text = theme.success(`[OK] ${message}`);
2283
+ if (!isInteractive) {
2284
+ console.log(text);
2285
+ return;
2273
2286
  }
2274
- const match = prefix.rest.match(/^drop\s+column\s+(?:if\s+exists\s+)?([^\s]+)(?:\s+cascade)?$/i);
2275
- if (!match) {
2276
- return null;
2287
+ try {
2288
+ console.log(
2289
+ (0, import_boxen.default)(text, {
2290
+ padding: 1,
2291
+ borderColor: "cyan",
2292
+ borderStyle: "round"
2293
+ })
2294
+ );
2295
+ } catch {
2296
+ console.log(text);
2277
2297
  }
2278
- return {
2279
- kind: "DROP_COLUMN",
2280
- table: prefix.table,
2281
- column: normalizeIdentifier(match[1])
2282
- };
2283
2298
  }
2284
- function parseDropTable(stmt) {
2285
- const match = stmt.match(/^drop\s+table\s+(?:if\s+exists\s+)?([^\s]+)(?:\s+cascade)?$/i);
2286
- if (!match) {
2287
- return null;
2288
- }
2289
- return {
2290
- kind: "DROP_TABLE",
2291
- table: normalizeIdentifier(match[1])
2292
- };
2299
+ function info(message) {
2300
+ console.log(theme.primary(message));
2293
2301
  }
2294
- var PARSERS = [
2295
- parseCreateTable,
2296
- parseAlterTableAddColumn,
2297
- parseAlterColumnType,
2298
- parseSetDropNotNull,
2299
- parseSetDropDefault,
2300
- parseAddDropConstraint,
2301
- parseDropColumn,
2302
- parseDropTable
2303
- ];
2304
- function parseMigrationSql(sql) {
2305
- const statements = splitSqlStatements(sql);
2306
- const ops = [];
2307
- const warnings = [];
2308
- for (const raw of statements) {
2309
- const stmt = removeSqlComments(raw).trim();
2310
- if (!stmt) {
2311
- continue;
2312
- }
2313
- let parsed = null;
2314
- for (const parseFn of PARSERS) {
2315
- parsed = parseFn(stmt);
2316
- if (parsed) {
2317
- break;
2318
- }
2302
+ function warning(message) {
2303
+ console.warn(theme.warning(`[WARN] ${message}`));
2304
+ }
2305
+ function error(message) {
2306
+ console.error(theme.error(`[ERROR] ${message}`));
2307
+ }
2308
+
2309
+ // src/commands/diff.ts
2310
+ var REQUIRED_CONFIG_FIELDS = ["schemaFile", "stateFile"];
2311
+ function resolveConfigPath(root, targetPath) {
2312
+ return import_path7.default.isAbsolute(targetPath) ? targetPath : import_path7.default.join(root, targetPath);
2313
+ }
2314
+ async function runDiff() {
2315
+ const root = getProjectRoot();
2316
+ const configPath = getConfigPath(root);
2317
+ if (!await fileExists(configPath)) {
2318
+ throw new Error('SchemaForge project not initialized. Run "schema-forge init" first.');
2319
+ }
2320
+ const config = await readJsonFile(configPath, {});
2321
+ for (const field of REQUIRED_CONFIG_FIELDS) {
2322
+ const value = config[field];
2323
+ if (!value || typeof value !== "string") {
2324
+ throw new Error(`Invalid config: '${field}' is required`);
2319
2325
  }
2320
- if (parsed) {
2321
- ops.push(parsed);
2322
- } else {
2323
- warnings.push({
2324
- statement: stmt,
2325
- reason: "Unsupported or unrecognized statement"
2326
- });
2326
+ }
2327
+ const schemaPath = resolveConfigPath(root, config.schemaFile);
2328
+ const statePath = resolveConfigPath(root, config.stateFile);
2329
+ const { provider } = resolveProvider(config.provider);
2330
+ const schemaSource = await readTextFile(schemaPath);
2331
+ const schema = await parseSchema2(schemaSource);
2332
+ try {
2333
+ await validateSchema2(schema);
2334
+ } catch (error2) {
2335
+ if (error2 instanceof Error) {
2336
+ throw await createSchemaValidationError(error2.message);
2327
2337
  }
2338
+ throw error2;
2328
2339
  }
2329
- return { ops, warnings };
2340
+ const previousState = await loadState2(statePath);
2341
+ const diff = await diffSchemas2(previousState, schema);
2342
+ if (diff.operations.length === 0) {
2343
+ success("No changes detected");
2344
+ return;
2345
+ }
2346
+ const sql = await generateSql2(diff, provider, config.sql);
2347
+ console.log(sql);
2330
2348
  }
2331
2349
 
2332
- // src/core/sql/schema-to-dsl.ts
2333
- function renderColumn(column) {
2334
- const parts = [column.name, column.type];
2335
- if (column.primaryKey) {
2336
- parts.push("pk");
2337
- }
2338
- if (column.unique) {
2339
- parts.push("unique");
2350
+ // src/commands/generate.ts
2351
+ var import_commander2 = require("commander");
2352
+ var import_path8 = __toESM(require("path"));
2353
+
2354
+ // src/core/utils.ts
2355
+ function nowTimestamp2() {
2356
+ const date = /* @__PURE__ */ new Date();
2357
+ const pad = (value) => String(value).padStart(2, "0");
2358
+ return String(date.getFullYear()) + pad(date.getMonth() + 1) + pad(date.getDate()) + pad(date.getHours()) + pad(date.getMinutes()) + pad(date.getSeconds());
2359
+ }
2360
+ function slugifyName2(name) {
2361
+ return name.trim().toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-+|-+$/g, "") || "migration";
2362
+ }
2363
+
2364
+ // src/commands/generate.ts
2365
+ var REQUIRED_CONFIG_FIELDS2 = [
2366
+ "schemaFile",
2367
+ "stateFile",
2368
+ "outputDir"
2369
+ ];
2370
+ function resolveConfigPath2(root, targetPath) {
2371
+ return import_path8.default.isAbsolute(targetPath) ? targetPath : import_path8.default.join(root, targetPath);
2372
+ }
2373
+ async function runGenerate(options) {
2374
+ const root = getProjectRoot();
2375
+ const configPath = getConfigPath(root);
2376
+ if (!await fileExists(configPath)) {
2377
+ throw new Error('SchemaForge project not initialized. Run "schema-forge init" first.');
2340
2378
  }
2341
- if (column.nullable === false && !column.primaryKey) {
2342
- parts.push("not null");
2379
+ const config = await readJsonFile(configPath, {});
2380
+ for (const field of REQUIRED_CONFIG_FIELDS2) {
2381
+ const value = config[field];
2382
+ if (!value || typeof value !== "string") {
2383
+ throw new Error(`Invalid config: '${field}' is required`);
2384
+ }
2343
2385
  }
2344
- if (column.default !== void 0 && column.default !== null) {
2345
- parts.push(`default ${column.default}`);
2386
+ const schemaPath = resolveConfigPath2(root, config.schemaFile);
2387
+ const statePath = resolveConfigPath2(root, config.stateFile);
2388
+ const outputDir = resolveConfigPath2(root, config.outputDir);
2389
+ const { provider, usedDefault } = resolveProvider(config.provider);
2390
+ if (usedDefault) {
2391
+ info("Provider not set; defaulting to postgres.");
2346
2392
  }
2347
- return ` ${parts.join(" ")}`;
2348
- }
2349
- function schemaToDsl(schema) {
2350
- const tableNames = Object.keys(schema.tables).sort((left, right) => left.localeCompare(right));
2351
- const blocks = tableNames.map((tableName) => {
2352
- const table = schema.tables[tableName];
2353
- const lines = [`table ${table.name} {`];
2354
- for (const column of table.columns) {
2355
- lines.push(renderColumn(column));
2393
+ info("Generating SQL...");
2394
+ const schemaSource = await readTextFile(schemaPath);
2395
+ const schema = await parseSchema2(schemaSource);
2396
+ try {
2397
+ await validateSchema2(schema);
2398
+ } catch (error2) {
2399
+ if (error2 instanceof Error) {
2400
+ throw await createSchemaValidationError(error2.message);
2356
2401
  }
2357
- lines.push("}");
2358
- return lines.join("\n");
2359
- });
2360
- if (blocks.length === 0) {
2361
- return "# SchemaForge schema definition\n";
2402
+ throw error2;
2362
2403
  }
2363
- return `# SchemaForge schema definition
2364
-
2365
- ${blocks.join("\n\n")}
2366
- `;
2404
+ const previousState = await loadState2(statePath);
2405
+ const diff = await diffSchemas2(previousState, schema);
2406
+ if (diff.operations.length === 0) {
2407
+ info("No changes detected");
2408
+ return;
2409
+ }
2410
+ const sql = await generateSql2(diff, provider, config.sql);
2411
+ const timestamp = nowTimestamp2();
2412
+ const slug = slugifyName2(options.name ?? "migration");
2413
+ const fileName = `${timestamp}-${slug}.sql`;
2414
+ await ensureDir(outputDir);
2415
+ const migrationPath = import_path8.default.join(outputDir, fileName);
2416
+ await writeTextFile(migrationPath, sql + "\n");
2417
+ const nextState = await schemaToState2(schema);
2418
+ await saveState2(statePath, nextState);
2419
+ success(`SQL generated successfully: ${migrationPath}`);
2367
2420
  }
2368
2421
 
2369
2422
  // src/commands/import.ts
2423
+ var import_commander3 = require("commander");
2424
+ var import_path9 = __toESM(require("path"));
2370
2425
  function resolveConfigPath3(root, targetPath) {
2371
- return import_path7.default.isAbsolute(targetPath) ? targetPath : import_path7.default.join(root, targetPath);
2426
+ return import_path9.default.isAbsolute(targetPath) ? targetPath : import_path9.default.join(root, targetPath);
2372
2427
  }
2373
2428
  async function runImport(inputPath, options = {}) {
2374
2429
  const root = getProjectRoot();
2375
2430
  const absoluteInputPath = resolveConfigPath3(root, inputPath);
2376
- const inputs = await loadMigrationSqlInput(absoluteInputPath);
2431
+ const inputs = await loadMigrationSqlInput2(absoluteInputPath);
2377
2432
  if (inputs.length === 0) {
2378
2433
  throw new Error(`No .sql migration files found in: ${absoluteInputPath}`);
2379
2434
  }
2380
2435
  const allOps = [];
2381
2436
  const parseWarnings = [];
2382
2437
  for (const input of inputs) {
2383
- const result = parseMigrationSql(input.sql);
2438
+ const result = await parseMigrationSql2(input.sql);
2384
2439
  allOps.push(...result.ops);
2385
2440
  parseWarnings.push(...result.warnings.map((item) => ({
2386
- statement: `[${import_path7.default.basename(input.filePath)}] ${item.statement}`,
2441
+ statement: `[${import_path9.default.basename(input.filePath)}] ${item.statement}`,
2387
2442
  reason: item.reason
2388
2443
  })));
2389
2444
  }
2390
- const applied = applySqlOps(allOps);
2391
- const dsl = schemaToDsl(applied.schema);
2445
+ const applied = await applySqlOps2(allOps);
2446
+ const dsl = await schemaToDsl2(applied.schema);
2392
2447
  let targetPath = options.out;
2393
2448
  if (!targetPath) {
2394
2449
  const configPath = getConfigPath(root);
@@ -2453,8 +2508,8 @@ table users {
2453
2508
  await writeTextFile(schemaFilePath, schemaContent);
2454
2509
  success(`Created ${schemaFilePath}`);
2455
2510
  const config = {
2456
- provider: "supabase",
2457
- outputDir: "supabase/migrations",
2511
+ provider: "postgres",
2512
+ outputDir: "migrations",
2458
2513
  schemaFile: "schemaforge/schema.sf",
2459
2514
  stateFile: "schemaforge/state.json",
2460
2515
  sql: {
@@ -2470,7 +2525,7 @@ table users {
2470
2525
  };
2471
2526
  await writeJsonFile(statePath, state);
2472
2527
  success(`Created ${statePath}`);
2473
- const outputDir = "supabase/migrations";
2528
+ const outputDir = "migrations";
2474
2529
  await ensureDir(outputDir);
2475
2530
  success(`Created ${outputDir}`);
2476
2531
  success("Project initialized successfully");
@@ -2481,160 +2536,10 @@ table users {
2481
2536
 
2482
2537
  // src/commands/validate.ts
2483
2538
  var import_commander5 = require("commander");
2484
- var import_path8 = __toESM(require("path"));
2485
-
2486
- // src/core/validate.ts
2487
- function normalizeColumnType2(type) {
2488
- return type.toLowerCase().trim().replace(/\s+/g, " ").replace(/\s*\(\s*/g, "(").replace(/\s*,\s*/g, ",").replace(/\s*\)\s*/g, ")");
2489
- }
2490
- function parseVarcharLength(type) {
2491
- const match = normalizeColumnType2(type).match(/^varchar\((\d+)\)$/);
2492
- return match ? Number(match[1]) : null;
2493
- }
2494
- function parseNumericType(type) {
2495
- const match = normalizeColumnType2(type).match(/^numeric\((\d+),(\d+)\)$/);
2496
- if (!match) {
2497
- return null;
2498
- }
2499
- return {
2500
- precision: Number(match[1]),
2501
- scale: Number(match[2])
2502
- };
2503
- }
2504
- function classifyTypeChange(from, to) {
2505
- const fromType = normalizeColumnType2(from);
2506
- const toType = normalizeColumnType2(to);
2507
- const uuidInvolved = fromType === "uuid" || toType === "uuid";
2508
- if (uuidInvolved && fromType !== toType) {
2509
- return {
2510
- severity: "error",
2511
- message: `Type changed from ${fromType} to ${toType} (likely incompatible cast)`
2512
- };
2513
- }
2514
- if (fromType === "int" && toType === "bigint") {
2515
- return {
2516
- severity: "warning",
2517
- message: "Type widened from int to bigint"
2518
- };
2519
- }
2520
- if (fromType === "bigint" && toType === "int") {
2521
- return {
2522
- severity: "error",
2523
- message: "Type narrowed from bigint to int (likely incompatible cast)"
2524
- };
2525
- }
2526
- if (fromType === "text" && parseVarcharLength(toType) !== null) {
2527
- return {
2528
- severity: "error",
2529
- message: `Type changed from text to ${toType} (may truncate existing values)`
2530
- };
2531
- }
2532
- if (parseVarcharLength(fromType) !== null && toType === "text") {
2533
- return {
2534
- severity: "warning",
2535
- message: "Type widened from varchar(n) to text"
2536
- };
2537
- }
2538
- const fromVarcharLength = parseVarcharLength(fromType);
2539
- const toVarcharLength = parseVarcharLength(toType);
2540
- if (fromVarcharLength !== null && toVarcharLength !== null) {
2541
- if (toVarcharLength >= fromVarcharLength) {
2542
- return {
2543
- severity: "warning",
2544
- message: `Type widened from varchar(${fromVarcharLength}) to varchar(${toVarcharLength})`
2545
- };
2546
- }
2547
- return {
2548
- severity: "error",
2549
- message: `Type narrowed from varchar(${fromVarcharLength}) to varchar(${toVarcharLength})`
2550
- };
2551
- }
2552
- const fromNumeric = parseNumericType(fromType);
2553
- const toNumeric = parseNumericType(toType);
2554
- if (fromNumeric && toNumeric && fromNumeric.scale === toNumeric.scale) {
2555
- if (toNumeric.precision >= fromNumeric.precision) {
2556
- return {
2557
- severity: "warning",
2558
- message: `Type widened from numeric(${fromNumeric.precision},${fromNumeric.scale}) to numeric(${toNumeric.precision},${toNumeric.scale})`
2559
- };
2560
- }
2561
- return {
2562
- severity: "error",
2563
- message: `Type narrowed from numeric(${fromNumeric.precision},${fromNumeric.scale}) to numeric(${toNumeric.precision},${toNumeric.scale})`
2564
- };
2565
- }
2566
- return {
2567
- severity: "warning",
2568
- message: `Type changed from ${fromType} to ${toType} (compatibility unknown)`
2569
- };
2570
- }
2571
- function validateSchemaChanges(previousState, currentSchema) {
2572
- const findings = [];
2573
- const diff = diffSchemas(previousState, currentSchema);
2574
- for (const operation of diff.operations) {
2575
- switch (operation.kind) {
2576
- case "drop_table":
2577
- findings.push({
2578
- severity: "error",
2579
- code: "DROP_TABLE",
2580
- table: operation.tableName,
2581
- message: "Table removed"
2582
- });
2583
- break;
2584
- case "drop_column":
2585
- findings.push({
2586
- severity: "error",
2587
- code: "DROP_COLUMN",
2588
- table: operation.tableName,
2589
- column: operation.columnName,
2590
- message: "Column removed"
2591
- });
2592
- break;
2593
- case "column_type_changed": {
2594
- const classification = classifyTypeChange(operation.fromType, operation.toType);
2595
- findings.push({
2596
- severity: classification.severity,
2597
- code: "ALTER_COLUMN_TYPE",
2598
- table: operation.tableName,
2599
- column: operation.columnName,
2600
- from: normalizeColumnType2(operation.fromType),
2601
- to: normalizeColumnType2(operation.toType),
2602
- message: classification.message
2603
- });
2604
- break;
2605
- }
2606
- case "column_nullability_changed":
2607
- if (operation.from && !operation.to) {
2608
- findings.push({
2609
- severity: "warning",
2610
- code: "SET_NOT_NULL",
2611
- table: operation.tableName,
2612
- column: operation.columnName,
2613
- message: "Column changed to NOT NULL (may fail if data contains NULLs)"
2614
- });
2615
- }
2616
- break;
2617
- default:
2618
- break;
2619
- }
2620
- }
2621
- return findings;
2622
- }
2623
- function toValidationReport(findings) {
2624
- const errors = findings.filter((finding) => finding.severity === "error");
2625
- const warnings = findings.filter((finding) => finding.severity === "warning");
2626
- return {
2627
- hasErrors: errors.length > 0,
2628
- hasWarnings: warnings.length > 0,
2629
- errors: errors.map(({ severity, ...finding }) => finding),
2630
- warnings: warnings.map(({ severity, ...finding }) => finding)
2631
- };
2632
- }
2633
-
2634
- // src/commands/validate.ts
2539
+ var import_path10 = __toESM(require("path"));
2635
2540
  var REQUIRED_CONFIG_FIELDS3 = ["schemaFile", "stateFile"];
2636
2541
  function resolveConfigPath4(root, targetPath) {
2637
- return import_path8.default.isAbsolute(targetPath) ? targetPath : import_path8.default.join(root, targetPath);
2542
+ return import_path10.default.isAbsolute(targetPath) ? targetPath : import_path10.default.join(root, targetPath);
2638
2543
  }
2639
2544
  async function runValidate(options = {}) {
2640
2545
  const root = getProjectRoot();
@@ -2652,18 +2557,18 @@ async function runValidate(options = {}) {
2652
2557
  const schemaPath = resolveConfigPath4(root, config.schemaFile);
2653
2558
  const statePath = resolveConfigPath4(root, config.stateFile);
2654
2559
  const schemaSource = await readTextFile(schemaPath);
2655
- const schema = parseSchema(schemaSource);
2560
+ const schema = await parseSchema2(schemaSource);
2656
2561
  try {
2657
- validateSchema(schema);
2562
+ await validateSchema2(schema);
2658
2563
  } catch (error2) {
2659
2564
  if (error2 instanceof Error) {
2660
- throw new SchemaValidationError(error2.message);
2565
+ throw await createSchemaValidationError(error2.message);
2661
2566
  }
2662
2567
  throw error2;
2663
2568
  }
2664
- const previousState = await loadState(statePath);
2665
- const findings = validateSchemaChanges(previousState, schema);
2666
- const report = toValidationReport(findings);
2569
+ const previousState = await loadState2(statePath);
2570
+ const findings = await validateSchemaChanges2(previousState, schema);
2571
+ const report = await toValidationReport2(findings);
2667
2572
  if (options.json) {
2668
2573
  console.log(JSON.stringify(report, null, 2));
2669
2574
  process.exitCode = report.hasErrors ? 1 : 0;
@@ -2694,8 +2599,8 @@ async function runValidate(options = {}) {
2694
2599
  // src/cli.ts
2695
2600
  var program = new import_commander6.Command();
2696
2601
  program.name("schema-forge").description("CLI tool for schema management and SQL generation").version(package_default.version);
2697
- function handleError(error2) {
2698
- if (error2 instanceof SchemaValidationError) {
2602
+ async function handleError(error2) {
2603
+ if (await isSchemaValidationError(error2) && error2 instanceof Error) {
2699
2604
  error(error2.message);
2700
2605
  process.exitCode = 2;
2701
2606
  return;
@@ -2711,35 +2616,35 @@ program.command("init").description("Initialize a new schema project").action(as
2711
2616
  try {
2712
2617
  await runInit();
2713
2618
  } catch (error2) {
2714
- handleError(error2);
2619
+ await handleError(error2);
2715
2620
  }
2716
2621
  });
2717
2622
  program.command("generate").description("Generate SQL from schema files").option("--name <string>", "Schema name to generate").action(async (options) => {
2718
2623
  try {
2719
2624
  await runGenerate(options);
2720
2625
  } catch (error2) {
2721
- handleError(error2);
2626
+ await handleError(error2);
2722
2627
  }
2723
2628
  });
2724
2629
  program.command("diff").description("Compare two schema versions and generate migration SQL").action(async () => {
2725
2630
  try {
2726
2631
  await runDiff();
2727
2632
  } catch (error2) {
2728
- handleError(error2);
2633
+ await handleError(error2);
2729
2634
  }
2730
2635
  });
2731
2636
  program.command("import").description("Import schema from SQL migrations").argument("<path>", "Path to .sql file or migrations directory").option("--out <path>", "Output schema file path").action(async (targetPath, options) => {
2732
2637
  try {
2733
2638
  await runImport(targetPath, options);
2734
2639
  } catch (error2) {
2735
- handleError(error2);
2640
+ await handleError(error2);
2736
2641
  }
2737
2642
  });
2738
2643
  program.command("validate").description("Detect destructive or risky schema changes").option("--json", "Output structured JSON").action(async (options) => {
2739
2644
  try {
2740
2645
  await runValidate(options);
2741
2646
  } catch (error2) {
2742
- handleError(error2);
2647
+ await handleError(error2);
2743
2648
  }
2744
2649
  });
2745
2650
  program.parse(process.argv);