@xubylele/schema-forge 1.1.1 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +136 -21
  2. package/dist/cli.js +1717 -148
  3. package/package.json +4 -2
package/dist/cli.js CHANGED
@@ -24,12 +24,12 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
24
24
  ));
25
25
 
26
26
  // src/cli.ts
27
- var import_commander4 = require("commander");
27
+ var import_commander6 = require("commander");
28
28
 
29
29
  // package.json
30
30
  var package_default = {
31
31
  name: "@xubylele/schema-forge",
32
- version: "1.1.1",
32
+ version: "1.4.0",
33
33
  description: "Universal migration generator from schema DSL",
34
34
  main: "dist/cli.js",
35
35
  type: "commonjs",
@@ -69,6 +69,8 @@ var package_default = {
69
69
  node: ">=18.0.0"
70
70
  },
71
71
  dependencies: {
72
+ boxen: "^8.0.1",
73
+ chalk: "^5.6.2",
72
74
  commander: "^14.0.3"
73
75
  },
74
76
  devDependencies: {
@@ -85,13 +87,148 @@ var package_default = {
85
87
  var import_commander = require("commander");
86
88
  var import_path4 = __toESM(require("path"));
87
89
 
88
- // src/core/errors.ts
89
- var SchemaValidationError = class extends Error {
90
- constructor(message) {
91
- super(message);
92
- this.name = "SchemaValidationError";
90
+ // src/core/normalize.ts
91
+ function normalizeIdent(input) {
92
+ return input.trim().toLowerCase().replace(/[^a-z0-9]+/g, "_").replace(/_+/g, "_").replace(/^_+|_+$/g, "");
93
+ }
94
+ function pkName(table) {
95
+ return `pk_${normalizeIdent(table)}`;
96
+ }
97
+ function uqName(table, column) {
98
+ return `uq_${normalizeIdent(table)}_${normalizeIdent(column)}`;
99
+ }
100
+ function legacyPkName(table) {
101
+ return `${normalizeIdent(table)}_pkey`;
102
+ }
103
+ function legacyUqName(table, column) {
104
+ return `${normalizeIdent(table)}_${normalizeIdent(column)}_key`;
105
+ }
106
+ function normalizeSpacesOutsideQuotes(value) {
107
+ let result = "";
108
+ let inSingleQuote = false;
109
+ let inDoubleQuote = false;
110
+ let pendingSpace = false;
111
+ for (const char of value) {
112
+ if (char === "'" && !inDoubleQuote) {
113
+ if (pendingSpace && result.length > 0 && result[result.length - 1] !== " ") {
114
+ result += " ";
115
+ }
116
+ pendingSpace = false;
117
+ inSingleQuote = !inSingleQuote;
118
+ result += char;
119
+ continue;
120
+ }
121
+ if (char === '"' && !inSingleQuote) {
122
+ if (pendingSpace && result.length > 0 && result[result.length - 1] !== " ") {
123
+ result += " ";
124
+ }
125
+ pendingSpace = false;
126
+ inDoubleQuote = !inDoubleQuote;
127
+ result += char;
128
+ continue;
129
+ }
130
+ if (!inSingleQuote && !inDoubleQuote && /\s/.test(char)) {
131
+ pendingSpace = true;
132
+ continue;
133
+ }
134
+ if (pendingSpace && result.length > 0 && result[result.length - 1] !== " ") {
135
+ result += " ";
136
+ }
137
+ pendingSpace = false;
138
+ result += char;
93
139
  }
94
- };
140
+ return result.trim();
141
+ }
142
+ function normalizeKnownFunctionsOutsideQuotes(value) {
143
+ let result = "";
144
+ let inSingleQuote = false;
145
+ let inDoubleQuote = false;
146
+ let buffer = "";
147
+ function flushBuffer() {
148
+ if (!buffer) {
149
+ return;
150
+ }
151
+ result += buffer.replace(/\bnow\s*\(\s*\)/gi, "now()").replace(/\bgen_random_uuid\s*\(\s*\)/gi, "gen_random_uuid()");
152
+ buffer = "";
153
+ }
154
+ for (const char of value) {
155
+ if (char === "'" && !inDoubleQuote) {
156
+ flushBuffer();
157
+ inSingleQuote = !inSingleQuote;
158
+ result += char;
159
+ continue;
160
+ }
161
+ if (char === '"' && !inSingleQuote) {
162
+ flushBuffer();
163
+ inDoubleQuote = !inDoubleQuote;
164
+ result += char;
165
+ continue;
166
+ }
167
+ if (inSingleQuote || inDoubleQuote) {
168
+ result += char;
169
+ continue;
170
+ }
171
+ buffer += char;
172
+ }
173
+ flushBuffer();
174
+ return result;
175
+ }
176
+ function normalizePunctuationOutsideQuotes(value) {
177
+ let result = "";
178
+ let inSingleQuote = false;
179
+ let inDoubleQuote = false;
180
+ for (let index = 0; index < value.length; index++) {
181
+ const char = value[index];
182
+ if (char === "'" && !inDoubleQuote) {
183
+ inSingleQuote = !inSingleQuote;
184
+ result += char;
185
+ continue;
186
+ }
187
+ if (char === '"' && !inSingleQuote) {
188
+ inDoubleQuote = !inDoubleQuote;
189
+ result += char;
190
+ continue;
191
+ }
192
+ if (!inSingleQuote && !inDoubleQuote && (char === "(" || char === ")")) {
193
+ while (result.endsWith(" ")) {
194
+ result = result.slice(0, -1);
195
+ }
196
+ result += char;
197
+ let lookahead = index + 1;
198
+ while (lookahead < value.length && value[lookahead] === " ") {
199
+ lookahead++;
200
+ }
201
+ index = lookahead - 1;
202
+ continue;
203
+ }
204
+ if (!inSingleQuote && !inDoubleQuote && char === ",") {
205
+ while (result.endsWith(" ")) {
206
+ result = result.slice(0, -1);
207
+ }
208
+ result += ", ";
209
+ let lookahead = index + 1;
210
+ while (lookahead < value.length && value[lookahead] === " ") {
211
+ lookahead++;
212
+ }
213
+ index = lookahead - 1;
214
+ continue;
215
+ }
216
+ result += char;
217
+ }
218
+ return result;
219
+ }
220
+ function normalizeDefault(expr) {
221
+ if (expr === void 0 || expr === null) {
222
+ return null;
223
+ }
224
+ const trimmed = expr.trim();
225
+ if (trimmed.length === 0) {
226
+ return null;
227
+ }
228
+ const normalizedSpacing = normalizeSpacesOutsideQuotes(trimmed);
229
+ const normalizedPunctuation = normalizePunctuationOutsideQuotes(normalizedSpacing);
230
+ return normalizeKnownFunctionsOutsideQuotes(normalizedPunctuation);
231
+ }
95
232
 
96
233
  // src/core/diff.ts
97
234
  function getTableNamesFromState(state) {
@@ -109,6 +246,18 @@ function getColumnNamesFromSchema(dbColumns) {
109
246
  function getSortedNames(names) {
110
247
  return Array.from(names).sort((a, b) => a.localeCompare(b));
111
248
  }
249
+ function normalizeColumnType(type) {
250
+ return type.toLowerCase().trim().replace(/\s+/g, " ").replace(/\s*\(\s*/g, "(").replace(/\s*,\s*/g, ",").replace(/\s*\)\s*/g, ")");
251
+ }
252
+ function resolveStatePrimaryKey(table) {
253
+ return table.primaryKey ?? Object.entries(table.columns).find(([, column]) => column.primaryKey)?.[0] ?? null;
254
+ }
255
+ function resolveSchemaPrimaryKey(table) {
256
+ return table.primaryKey ?? table.columns.find((column) => column.primaryKey)?.name ?? null;
257
+ }
258
+ function normalizeNullable(nullable) {
259
+ return nullable ?? true;
260
+ }
112
261
  function diffSchemas(oldState, newSchema) {
113
262
  const operations = [];
114
263
  const oldTableNames = getTableNamesFromState(oldState);
@@ -126,6 +275,117 @@ function diffSchemas(oldState, newSchema) {
126
275
  const commonTableNames = sortedNewTableNames.filter(
127
276
  (tableName) => oldTableNames.has(tableName)
128
277
  );
278
+ for (const tableName of commonTableNames) {
279
+ const newTable = newSchema.tables[tableName];
280
+ const oldTable = oldState.tables[tableName];
281
+ if (!newTable || !oldTable) {
282
+ continue;
283
+ }
284
+ for (const column of newTable.columns) {
285
+ const previousColumn = oldTable.columns[column.name];
286
+ if (!previousColumn) {
287
+ continue;
288
+ }
289
+ const previousType = normalizeColumnType(previousColumn.type);
290
+ const currentType = normalizeColumnType(column.type);
291
+ if (previousType !== currentType) {
292
+ operations.push({
293
+ kind: "column_type_changed",
294
+ tableName,
295
+ columnName: column.name,
296
+ fromType: previousColumn.type,
297
+ toType: column.type
298
+ });
299
+ }
300
+ }
301
+ }
302
+ for (const tableName of commonTableNames) {
303
+ const newTable = newSchema.tables[tableName];
304
+ const oldTable = oldState.tables[tableName];
305
+ if (!newTable || !oldTable) {
306
+ continue;
307
+ }
308
+ const previousPrimaryKey = resolveStatePrimaryKey(oldTable);
309
+ const currentPrimaryKey = resolveSchemaPrimaryKey(newTable);
310
+ if (previousPrimaryKey !== null && previousPrimaryKey !== currentPrimaryKey) {
311
+ operations.push({
312
+ kind: "drop_primary_key_constraint",
313
+ tableName
314
+ });
315
+ }
316
+ }
317
+ for (const tableName of commonTableNames) {
318
+ const newTable = newSchema.tables[tableName];
319
+ const oldTable = oldState.tables[tableName];
320
+ if (!newTable || !oldTable) {
321
+ continue;
322
+ }
323
+ for (const column of newTable.columns) {
324
+ const previousColumn = oldTable.columns[column.name];
325
+ if (!previousColumn) {
326
+ continue;
327
+ }
328
+ const previousUnique = previousColumn.unique ?? false;
329
+ const currentUnique = column.unique ?? false;
330
+ if (previousUnique !== currentUnique) {
331
+ operations.push({
332
+ kind: "column_unique_changed",
333
+ tableName,
334
+ columnName: column.name,
335
+ from: previousUnique,
336
+ to: currentUnique
337
+ });
338
+ }
339
+ }
340
+ }
341
+ for (const tableName of commonTableNames) {
342
+ const newTable = newSchema.tables[tableName];
343
+ const oldTable = oldState.tables[tableName];
344
+ if (!newTable || !oldTable) {
345
+ continue;
346
+ }
347
+ for (const column of newTable.columns) {
348
+ const previousColumn = oldTable.columns[column.name];
349
+ if (!previousColumn) {
350
+ continue;
351
+ }
352
+ const previousNullable = normalizeNullable(previousColumn.nullable);
353
+ const currentNullable = normalizeNullable(column.nullable);
354
+ if (previousNullable !== currentNullable) {
355
+ operations.push({
356
+ kind: "column_nullability_changed",
357
+ tableName,
358
+ columnName: column.name,
359
+ from: previousNullable,
360
+ to: currentNullable
361
+ });
362
+ }
363
+ }
364
+ }
365
+ for (const tableName of commonTableNames) {
366
+ const newTable = newSchema.tables[tableName];
367
+ const oldTable = oldState.tables[tableName];
368
+ if (!newTable || !oldTable) {
369
+ continue;
370
+ }
371
+ for (const column of newTable.columns) {
372
+ const previousColumn = oldTable.columns[column.name];
373
+ if (!previousColumn) {
374
+ continue;
375
+ }
376
+ const previousDefault = normalizeDefault(previousColumn.default);
377
+ const currentDefault = normalizeDefault(column.default);
378
+ if (previousDefault !== currentDefault) {
379
+ operations.push({
380
+ kind: "column_default_changed",
381
+ tableName,
382
+ columnName: column.name,
383
+ fromDefault: previousDefault,
384
+ toDefault: currentDefault
385
+ });
386
+ }
387
+ }
388
+ }
129
389
  for (const tableName of commonTableNames) {
130
390
  const newTable = newSchema.tables[tableName];
131
391
  const oldTable = oldState.tables[tableName];
@@ -143,6 +403,22 @@ function diffSchemas(oldState, newSchema) {
143
403
  }
144
404
  }
145
405
  }
406
+ for (const tableName of commonTableNames) {
407
+ const newTable = newSchema.tables[tableName];
408
+ const oldTable = oldState.tables[tableName];
409
+ if (!newTable || !oldTable) {
410
+ continue;
411
+ }
412
+ const previousPrimaryKey = resolveStatePrimaryKey(oldTable);
413
+ const currentPrimaryKey = resolveSchemaPrimaryKey(newTable);
414
+ if (currentPrimaryKey !== null && previousPrimaryKey !== currentPrimaryKey) {
415
+ operations.push({
416
+ kind: "add_primary_key_constraint",
417
+ tableName,
418
+ columnName: currentPrimaryKey
419
+ });
420
+ }
421
+ }
146
422
  for (const tableName of commonTableNames) {
147
423
  const newTable = newSchema.tables[tableName];
148
424
  const oldTable = oldState.tables[tableName];
@@ -171,14 +447,22 @@ function diffSchemas(oldState, newSchema) {
171
447
  return { operations };
172
448
  }
173
449
 
450
+ // src/core/errors.ts
451
+ var SchemaValidationError = class extends Error {
452
+ constructor(message) {
453
+ super(message);
454
+ this.name = "SchemaValidationError";
455
+ }
456
+ };
457
+
174
458
  // src/core/fs.ts
175
459
  var import_fs = require("fs");
176
460
  var import_path = __toESM(require("path"));
177
461
  async function ensureDir(dirPath) {
178
462
  try {
179
463
  await import_fs.promises.mkdir(dirPath, { recursive: true });
180
- } catch (error) {
181
- throw new Error(`Failed to create directory ${dirPath}: ${error}`);
464
+ } catch (error2) {
465
+ throw new Error(`Failed to create directory ${dirPath}: ${error2}`);
182
466
  }
183
467
  }
184
468
  async function fileExists(filePath) {
@@ -192,8 +476,8 @@ async function fileExists(filePath) {
192
476
  async function readTextFile(filePath) {
193
477
  try {
194
478
  return await import_fs.promises.readFile(filePath, "utf-8");
195
- } catch (error) {
196
- throw new Error(`Failed to read file ${filePath}: ${error}`);
479
+ } catch (error2) {
480
+ throw new Error(`Failed to read file ${filePath}: ${error2}`);
197
481
  }
198
482
  }
199
483
  async function writeTextFile(filePath, content) {
@@ -201,8 +485,8 @@ async function writeTextFile(filePath, content) {
201
485
  const dir = import_path.default.dirname(filePath);
202
486
  await ensureDir(dir);
203
487
  await import_fs.promises.writeFile(filePath, content, "utf-8");
204
- } catch (error) {
205
- throw new Error(`Failed to write file ${filePath}: ${error}`);
488
+ } catch (error2) {
489
+ throw new Error(`Failed to write file ${filePath}: ${error2}`);
206
490
  }
207
491
  }
208
492
  async function readJsonFile(filePath, fallback) {
@@ -213,16 +497,16 @@ async function readJsonFile(filePath, fallback) {
213
497
  }
214
498
  const content = await readTextFile(filePath);
215
499
  return JSON.parse(content);
216
- } catch (error) {
217
- throw new Error(`Failed to read JSON file ${filePath}: ${error}`);
500
+ } catch (error2) {
501
+ throw new Error(`Failed to read JSON file ${filePath}: ${error2}`);
218
502
  }
219
503
  }
220
504
  async function writeJsonFile(filePath, data) {
221
505
  try {
222
506
  const content = JSON.stringify(data, null, 2);
223
507
  await writeTextFile(filePath, content);
224
- } catch (error) {
225
- throw new Error(`Failed to write JSON file ${filePath}: ${error}`);
508
+ } catch (error2) {
509
+ throw new Error(`Failed to write JSON file ${filePath}: ${error2}`);
226
510
  }
227
511
  }
228
512
  async function findFiles(dirPath, pattern) {
@@ -238,12 +522,53 @@ async function findFiles(dirPath, pattern) {
238
522
  results.push(fullPath);
239
523
  }
240
524
  }
241
- } catch (error) {
242
- throw new Error(`Failed to find files in ${dirPath}: ${error}`);
525
+ } catch (error2) {
526
+ throw new Error(`Failed to find files in ${dirPath}: ${error2}`);
243
527
  }
244
528
  return results;
245
529
  }
246
530
 
531
+ // src/utils/output.ts
532
+ var import_boxen = __toESM(require("boxen"));
533
+ var import_chalk = require("chalk");
534
+ var isInteractive = Boolean(process.stdout?.isTTY);
535
+ var colorsEnabled = isInteractive && process.env.FORCE_COLOR !== "0" && !("NO_COLOR" in process.env);
536
+ var color = new import_chalk.Chalk({ level: colorsEnabled ? 3 : 0 });
537
+ var theme = {
538
+ primary: color.cyanBright,
539
+ success: color.hex("#00FF88"),
540
+ warning: color.hex("#FFD166"),
541
+ error: color.hex("#EF476F"),
542
+ accent: color.magentaBright
543
+ };
544
+ function success(message) {
545
+ const text = theme.success(`[OK] ${message}`);
546
+ if (!isInteractive) {
547
+ console.log(text);
548
+ return;
549
+ }
550
+ try {
551
+ console.log(
552
+ (0, import_boxen.default)(text, {
553
+ padding: 1,
554
+ borderColor: "cyan",
555
+ borderStyle: "round"
556
+ })
557
+ );
558
+ } catch {
559
+ console.log(text);
560
+ }
561
+ }
562
+ function info(message) {
563
+ console.log(theme.primary(message));
564
+ }
565
+ function warning(message) {
566
+ console.warn(theme.warning(`[WARN] ${message}`));
567
+ }
568
+ function error(message) {
569
+ console.error(theme.error(`[ERROR] ${message}`));
570
+ }
571
+
247
572
  // src/core/parser.ts
248
573
  var SchemaParser = class {
249
574
  /**
@@ -253,8 +578,8 @@ var SchemaParser = class {
253
578
  try {
254
579
  const schema = await readJsonFile(filePath, {});
255
580
  return this.normalizeSchema(schema);
256
- } catch (error) {
257
- throw new Error(`Failed to parse schema file ${filePath}: ${error}`);
581
+ } catch (error2) {
582
+ throw new Error(`Failed to parse schema file ${filePath}: ${error2}`);
258
583
  }
259
584
  }
260
585
  /**
@@ -267,8 +592,9 @@ var SchemaParser = class {
267
592
  try {
268
593
  const schema = await this.parseSchemaFile(file);
269
594
  schemas.push(schema);
270
- } catch (error) {
271
- console.warn(`Warning: Could not parse ${file}:`, error);
595
+ } catch (error2) {
596
+ const reason = error2 instanceof Error ? error2.message : String(error2);
597
+ warning(`Could not parse ${file}: ${reason}`);
272
598
  }
273
599
  }
274
600
  return schemas;
@@ -286,7 +612,7 @@ var SchemaParser = class {
286
612
  for (const table of schema.tables) {
287
613
  const existingIndex = mergedTables.findIndex((t) => t.name === table.name);
288
614
  if (existingIndex >= 0) {
289
- console.warn(`Warning: Duplicate table '${table.name}' found, using first occurrence`);
615
+ warning(`Duplicate table '${table.name}' found, using first occurrence`);
290
616
  } else {
291
617
  mergedTables.push(table);
292
618
  }
@@ -330,8 +656,8 @@ var SchemaParser = class {
330
656
  try {
331
657
  const schema = JSON.parse(jsonString);
332
658
  return this.normalizeSchema(schema);
333
- } catch (error) {
334
- throw new Error(`Failed to parse schema JSON: ${error}`);
659
+ } catch (error2) {
660
+ throw new Error(`Failed to parse schema JSON: ${error2}`);
335
661
  }
336
662
  }
337
663
  };
@@ -340,15 +666,26 @@ function parseSchema(source) {
340
666
  const lines = source.split("\n");
341
667
  const tables = {};
342
668
  let currentLine = 0;
343
- const validColumnTypes = /* @__PURE__ */ new Set([
669
+ const validBaseColumnTypes = /* @__PURE__ */ new Set([
344
670
  "uuid",
345
671
  "varchar",
346
672
  "text",
347
673
  "int",
674
+ "bigint",
348
675
  "boolean",
349
676
  "timestamptz",
350
677
  "date"
351
678
  ]);
679
+ function normalizeColumnType3(type) {
680
+ return type.toLowerCase().trim().replace(/\s+/g, " ").replace(/\s*\(\s*/g, "(").replace(/\s*,\s*/g, ",").replace(/\s*\)\s*/g, ")");
681
+ }
682
+ function isValidColumnType2(type) {
683
+ const normalizedType = normalizeColumnType3(type);
684
+ if (validBaseColumnTypes.has(normalizedType)) {
685
+ return true;
686
+ }
687
+ return /^varchar\(\d+\)$/.test(normalizedType) || /^numeric\(\d+,\d+\)$/.test(normalizedType);
688
+ }
352
689
  function cleanLine(line) {
353
690
  const commentIndex = line.search(/(?:\/\/|#)/);
354
691
  if (commentIndex !== -1) {
@@ -368,17 +705,21 @@ function parseSchema(source) {
368
705
  }
369
706
  function parseColumn(line, lineNum) {
370
707
  const tokens = line.split(/\s+/).filter((t) => t.length > 0);
708
+ const modifiers = /* @__PURE__ */ new Set(["pk", "unique", "nullable", "default", "fk"]);
371
709
  if (tokens.length < 2) {
372
710
  throw new Error(`Line ${lineNum}: Invalid column definition. Expected: <name> <type> [modifiers...]`);
373
711
  }
374
712
  const colName = tokens[0];
375
- const colType = tokens[1];
376
- if (!validColumnTypes.has(colType)) {
377
- throw new Error(`Line ${lineNum}: Invalid column type '${colType}'. Valid types: ${Array.from(validColumnTypes).join(", ")}`);
713
+ const colType = normalizeColumnType3(tokens[1]);
714
+ if (!isValidColumnType2(colType)) {
715
+ throw new Error(
716
+ `Line ${lineNum}: Invalid column type '${tokens[1]}'. Valid types: ${Array.from(validBaseColumnTypes).join(", ")}, varchar(n), numeric(p,s)`
717
+ );
378
718
  }
379
719
  const column = {
380
720
  name: colName,
381
- type: colType
721
+ type: colType,
722
+ nullable: true
382
723
  };
383
724
  let i = 2;
384
725
  while (i < tokens.length) {
@@ -396,13 +737,29 @@ function parseSchema(source) {
396
737
  column.nullable = true;
397
738
  i++;
398
739
  break;
740
+ case "not":
741
+ if (tokens[i + 1] !== "null") {
742
+ throw new Error(`Line ${lineNum}: Unknown modifier 'not'`);
743
+ }
744
+ column.nullable = false;
745
+ i += 2;
746
+ break;
399
747
  case "default":
400
748
  i++;
401
749
  if (i >= tokens.length) {
402
750
  throw new Error(`Line ${lineNum}: 'default' modifier requires a value`);
403
751
  }
404
- column.default = tokens[i];
405
- i++;
752
+ {
753
+ const defaultTokens = [];
754
+ while (i < tokens.length && !modifiers.has(tokens[i])) {
755
+ defaultTokens.push(tokens[i]);
756
+ i++;
757
+ }
758
+ if (defaultTokens.length === 0) {
759
+ throw new Error(`Line ${lineNum}: 'default' modifier requires a value`);
760
+ }
761
+ column.default = defaultTokens.join(" ");
762
+ }
406
763
  break;
407
764
  case "fk":
408
765
  i++;
@@ -444,17 +801,19 @@ function parseSchema(source) {
444
801
  try {
445
802
  const column = parseColumn(cleaned, lineIdx + 1);
446
803
  columns.push(column);
447
- } catch (error) {
448
- throw error;
804
+ } catch (error2) {
805
+ throw error2;
449
806
  }
450
807
  lineIdx++;
451
808
  }
452
809
  if (!foundClosingBrace) {
453
810
  throw new Error(`Line ${startLine + 1}: Table '${tableName}' block not closed (missing '}')`);
454
811
  }
812
+ const primaryKeyColumn = columns.find((column) => column.primaryKey)?.name ?? null;
455
813
  tables[tableName] = {
456
814
  name: tableName,
457
- columns
815
+ columns,
816
+ ...primaryKeyColumn !== null && { primaryKey: primaryKeyColumn }
458
817
  };
459
818
  return lineIdx;
460
819
  }
@@ -622,17 +981,21 @@ async function schemaToState(schema) {
622
981
  const tables = {};
623
982
  for (const [tableName, table] of Object.entries(schema.tables)) {
624
983
  const columns = {};
984
+ const primaryKeyColumn = table.primaryKey ?? table.columns.find((column) => column.primaryKey)?.name ?? null;
625
985
  for (const column of table.columns) {
626
986
  columns[column.name] = {
627
987
  type: column.type,
628
988
  ...column.primaryKey !== void 0 && { primaryKey: column.primaryKey },
629
989
  ...column.unique !== void 0 && { unique: column.unique },
630
- ...column.nullable !== void 0 && { nullable: column.nullable },
990
+ nullable: column.nullable ?? true,
631
991
  ...column.default !== void 0 && { default: column.default },
632
992
  ...column.foreignKey !== void 0 && { foreignKey: column.foreignKey }
633
993
  };
634
994
  }
635
- tables[tableName] = { columns };
995
+ tables[tableName] = {
996
+ columns,
997
+ ...primaryKeyColumn !== null && { primaryKey: primaryKeyColumn }
998
+ };
636
999
  }
637
1000
  return {
638
1001
  version: 1,
@@ -828,7 +1191,23 @@ var SchemaValidator = class {
828
1191
  }
829
1192
  };
830
1193
  var defaultValidator = new SchemaValidator();
831
- var VALID_COLUMN_TYPES = ["uuid", "varchar", "text", "int", "boolean", "timestamptz", "date"];
1194
+ var VALID_BASE_COLUMN_TYPES = [
1195
+ "uuid",
1196
+ "varchar",
1197
+ "text",
1198
+ "int",
1199
+ "bigint",
1200
+ "boolean",
1201
+ "timestamptz",
1202
+ "date"
1203
+ ];
1204
+ function isValidColumnType(type) {
1205
+ const normalizedType = type.toLowerCase().trim().replace(/\s+/g, " ").replace(/\s*\(\s*/g, "(").replace(/\s*,\s*/g, ",").replace(/\s*\)\s*/g, ")");
1206
+ if (VALID_BASE_COLUMN_TYPES.includes(normalizedType)) {
1207
+ return true;
1208
+ }
1209
+ return /^varchar\(\d+\)$/.test(normalizedType) || /^numeric\(\d+,\d+\)$/.test(normalizedType);
1210
+ }
832
1211
  function validateSchema(schema) {
833
1212
  validateDuplicateTables(schema);
834
1213
  for (const tableName in schema.tables) {
@@ -841,25 +1220,25 @@ function validateDuplicateTables(schema) {
841
1220
  const seen = /* @__PURE__ */ new Set();
842
1221
  for (const tableName of tableNames) {
843
1222
  if (seen.has(tableName)) {
844
- throw new Error(`Tabla duplicada: '${tableName}'`);
1223
+ throw new Error(`Duplicate table: '${tableName}'`);
845
1224
  }
846
1225
  seen.add(tableName);
847
1226
  }
848
1227
  }
849
1228
  function validateTableColumns(tableName, table, allTables) {
850
1229
  const columnNames = /* @__PURE__ */ new Set();
851
- let primaryKeyCount = 0;
1230
+ const primaryKeyColumns = [];
852
1231
  for (const column of table.columns) {
853
1232
  if (columnNames.has(column.name)) {
854
- throw new Error(`Tabla '${tableName}': columna duplicada '${column.name}'`);
1233
+ throw new Error(`Table '${tableName}': duplicate column '${column.name}'`);
855
1234
  }
856
1235
  columnNames.add(column.name);
857
1236
  if (column.primaryKey) {
858
- primaryKeyCount++;
1237
+ primaryKeyColumns.push(column.name);
859
1238
  }
860
- if (!VALID_COLUMN_TYPES.includes(column.type)) {
1239
+ if (!isValidColumnType(column.type)) {
861
1240
  throw new Error(
862
- `Tabla '${tableName}', columna '${column.name}': tipo '${column.type}' no es v\xE1lido. Tipos soportados: ${VALID_COLUMN_TYPES.join(", ")}`
1241
+ `Table '${tableName}', column '${column.name}': type '${column.type}' is not valid. Supported types: ${VALID_BASE_COLUMN_TYPES.join(", ")}, varchar(n), numeric(p,s)`
863
1242
  );
864
1243
  }
865
1244
  if (column.foreignKey) {
@@ -867,20 +1246,39 @@ function validateTableColumns(tableName, table, allTables) {
867
1246
  const fkColumn = column.foreignKey.column;
868
1247
  if (!allTables[fkTable]) {
869
1248
  throw new Error(
870
- `Tabla '${tableName}', columna '${column.name}': tabla referenciada '${fkTable}' no existe`
1249
+ `Table '${tableName}', column '${column.name}': referenced table '${fkTable}' does not exist`
871
1250
  );
872
1251
  }
873
1252
  const referencedTable = allTables[fkTable];
874
1253
  const columnExists = referencedTable.columns.some((col) => col.name === fkColumn);
875
1254
  if (!columnExists) {
876
1255
  throw new Error(
877
- `Tabla '${tableName}', columna '${column.name}': tabla '${fkTable}' no tiene columna '${fkColumn}'`
1256
+ `Table '${tableName}', column '${column.name}': table '${fkTable}' does not have column '${fkColumn}'`
878
1257
  );
879
1258
  }
880
1259
  }
881
1260
  }
882
- if (primaryKeyCount > 1) {
883
- throw new Error(`Tabla '${tableName}': solo puede tener una primary key (encontradas ${primaryKeyCount})`);
1261
+ if (primaryKeyColumns.length > 1) {
1262
+ throw new Error(`Table '${tableName}': can only have one primary key (found ${primaryKeyColumns.length})`);
1263
+ }
1264
+ const normalizedPrimaryKey = table.primaryKey ?? primaryKeyColumns[0] ?? null;
1265
+ if (table.primaryKey && !columnNames.has(table.primaryKey)) {
1266
+ throw new Error(
1267
+ `Table '${tableName}': primary key column '${table.primaryKey}' does not exist`
1268
+ );
1269
+ }
1270
+ if (table.primaryKey && primaryKeyColumns.length === 1 && primaryKeyColumns[0] !== table.primaryKey) {
1271
+ throw new Error(
1272
+ `Table '${tableName}': column-level primary key '${primaryKeyColumns[0]}' does not match table primary key '${table.primaryKey}'`
1273
+ );
1274
+ }
1275
+ if (normalizedPrimaryKey) {
1276
+ const pkMatches = table.columns.filter((column) => column.name === normalizedPrimaryKey);
1277
+ if (pkMatches.length !== 1) {
1278
+ throw new Error(
1279
+ `Table '${tableName}': primary key column '${normalizedPrimaryKey}' is invalid`
1280
+ );
1281
+ }
884
1282
  }
885
1283
  }
886
1284
 
@@ -901,10 +1299,34 @@ function generateOperation(operation, provider, sqlConfig) {
901
1299
  return generateCreateTable(operation.table, provider, sqlConfig);
902
1300
  case "drop_table":
903
1301
  return generateDropTable(operation.tableName);
1302
+ case "column_type_changed":
1303
+ return generateAlterColumnType(
1304
+ operation.tableName,
1305
+ operation.columnName,
1306
+ operation.toType
1307
+ );
1308
+ case "column_nullability_changed":
1309
+ return generateAlterColumnNullability(
1310
+ operation.tableName,
1311
+ operation.columnName,
1312
+ operation.to
1313
+ );
904
1314
  case "add_column":
905
1315
  return generateAddColumn(operation.tableName, operation.column, provider, sqlConfig);
1316
+ case "column_default_changed":
1317
+ return generateAlterColumnDefault(
1318
+ operation.tableName,
1319
+ operation.columnName,
1320
+ operation.toDefault
1321
+ );
906
1322
  case "drop_column":
907
1323
  return generateDropColumn(operation.tableName, operation.columnName);
1324
+ case "column_unique_changed":
1325
+ return operation.to ? generateAddUniqueConstraint(operation.tableName, operation.columnName) : generateDropUniqueConstraint(operation.tableName, operation.columnName);
1326
+ case "drop_primary_key_constraint":
1327
+ return generateDropPrimaryKeyConstraint(operation.tableName);
1328
+ case "add_primary_key_constraint":
1329
+ return generateAddPrimaryKeyConstraint(operation.tableName, operation.columnName);
908
1330
  }
909
1331
  }
910
1332
  function generateCreateTable(table, provider, sqlConfig) {
@@ -952,6 +1374,45 @@ function generateAddColumn(tableName, column, provider, sqlConfig) {
952
1374
  function generateDropColumn(tableName, columnName) {
953
1375
  return `ALTER TABLE ${tableName} DROP COLUMN ${columnName};`;
954
1376
  }
1377
+ function generateAlterColumnType(tableName, columnName, newType) {
1378
+ return `ALTER TABLE ${tableName} ALTER COLUMN ${columnName} TYPE ${newType} USING ${columnName}::${newType};`;
1379
+ }
1380
+ function generateAddUniqueConstraint(tableName, columnName) {
1381
+ const deterministicConstraintName = uqName(tableName, columnName);
1382
+ return `ALTER TABLE ${tableName} ADD CONSTRAINT ${deterministicConstraintName} UNIQUE (${columnName});`;
1383
+ }
1384
+ function generateDropUniqueConstraint(tableName, columnName) {
1385
+ const deterministicConstraintName = uqName(tableName, columnName);
1386
+ const fallbackConstraintName = legacyUqName(tableName, columnName);
1387
+ return generateDropConstraintStatements(tableName, [deterministicConstraintName, fallbackConstraintName]);
1388
+ }
1389
+ function generateDropPrimaryKeyConstraint(tableName) {
1390
+ const deterministicConstraintName = pkName(tableName);
1391
+ const fallbackConstraintName = legacyPkName(tableName);
1392
+ return generateDropConstraintStatements(tableName, [deterministicConstraintName, fallbackConstraintName]);
1393
+ }
1394
+ function generateAddPrimaryKeyConstraint(tableName, columnName) {
1395
+ const deterministicConstraintName = pkName(tableName);
1396
+ return `ALTER TABLE ${tableName} ADD CONSTRAINT ${deterministicConstraintName} PRIMARY KEY (${columnName});`;
1397
+ }
1398
+ function generateDropConstraintStatements(tableName, constraintNames) {
1399
+ const uniqueConstraintNames = Array.from(new Set(constraintNames));
1400
+ return uniqueConstraintNames.map(
1401
+ (constraintName) => `ALTER TABLE ${tableName} DROP CONSTRAINT IF EXISTS ${constraintName};`
1402
+ ).join("\n");
1403
+ }
1404
+ function generateAlterColumnDefault(tableName, columnName, newDefault) {
1405
+ if (newDefault === null) {
1406
+ return `ALTER TABLE ${tableName} ALTER COLUMN ${columnName} DROP DEFAULT;`;
1407
+ }
1408
+ return `ALTER TABLE ${tableName} ALTER COLUMN ${columnName} SET DEFAULT ${newDefault};`;
1409
+ }
1410
+ function generateAlterColumnNullability(tableName, columnName, toNullable) {
1411
+ if (toNullable) {
1412
+ return `ALTER TABLE ${tableName} ALTER COLUMN ${columnName} DROP NOT NULL;`;
1413
+ }
1414
+ return `ALTER TABLE ${tableName} ALTER COLUMN ${columnName} SET NOT NULL;`;
1415
+ }
955
1416
 
956
1417
  // src/commands/diff.ts
957
1418
  var REQUIRED_CONFIG_FIELDS = ["schemaFile", "stateFile"];
@@ -962,7 +1423,7 @@ async function runDiff() {
962
1423
  const root = getProjectRoot();
963
1424
  const configPath = getConfigPath(root);
964
1425
  if (!await fileExists(configPath)) {
965
- throw new Error('SchemaForge project not initialized. Run "schemaforge init" first.');
1426
+ throw new Error('SchemaForge project not initialized. Run "schema-forge init" first.');
966
1427
  }
967
1428
  const config = await readJsonFile(configPath, {});
968
1429
  for (const field of REQUIRED_CONFIG_FIELDS) {
@@ -981,16 +1442,16 @@ async function runDiff() {
981
1442
  const schema = parseSchema(schemaSource);
982
1443
  try {
983
1444
  validateSchema(schema);
984
- } catch (error) {
985
- if (error instanceof Error) {
986
- throw new SchemaValidationError(error.message);
1445
+ } catch (error2) {
1446
+ if (error2 instanceof Error) {
1447
+ throw new SchemaValidationError(error2.message);
987
1448
  }
988
- throw error;
1449
+ throw error2;
989
1450
  }
990
1451
  const previousState = await loadState(statePath);
991
1452
  const diff = diffSchemas(previousState, schema);
992
1453
  if (diff.operations.length === 0) {
993
- console.log("No changes detected");
1454
+ success("No changes detected");
994
1455
  return;
995
1456
  }
996
1457
  const sql = generateSql(diff, provider, config.sql);
@@ -1024,7 +1485,7 @@ async function runGenerate(options) {
1024
1485
  const root = getProjectRoot();
1025
1486
  const configPath = getConfigPath(root);
1026
1487
  if (!await fileExists(configPath)) {
1027
- throw new Error('SchemaForge project not initialized. Run "schemaforge init" first.');
1488
+ throw new Error('SchemaForge project not initialized. Run "schema-forge init" first.');
1028
1489
  }
1029
1490
  const config = await readJsonFile(configPath, {});
1030
1491
  for (const field of REQUIRED_CONFIG_FIELDS2) {
@@ -1041,23 +1502,23 @@ async function runGenerate(options) {
1041
1502
  }
1042
1503
  const provider = config.provider ?? "postgres";
1043
1504
  if (!config.provider) {
1044
- console.log("Provider not set; defaulting to postgres.");
1505
+ info("Provider not set; defaulting to postgres.");
1045
1506
  }
1046
- console.log("Generating SQL...");
1507
+ info("Generating SQL...");
1047
1508
  const schemaSource = await readTextFile(schemaPath);
1048
1509
  const schema = parseSchema(schemaSource);
1049
1510
  try {
1050
1511
  validateSchema(schema);
1051
- } catch (error) {
1052
- if (error instanceof Error) {
1053
- throw new SchemaValidationError(error.message);
1512
+ } catch (error2) {
1513
+ if (error2 instanceof Error) {
1514
+ throw new SchemaValidationError(error2.message);
1054
1515
  }
1055
- throw error;
1516
+ throw error2;
1056
1517
  }
1057
1518
  const previousState = await loadState(statePath);
1058
1519
  const diff = diffSchemas(previousState, schema);
1059
1520
  if (diff.operations.length === 0) {
1060
- console.log("No changes detected");
1521
+ info("No changes detected");
1061
1522
  return;
1062
1523
  }
1063
1524
  const sql = generateSql(diff, provider, config.sql);
@@ -1069,108 +1530,1216 @@ async function runGenerate(options) {
1069
1530
  await writeTextFile(migrationPath, sql + "\n");
1070
1531
  const nextState = await schemaToState(schema);
1071
1532
  await saveState(statePath, nextState);
1072
- console.log(`\u2713 SQL generated successfully: ${migrationPath}`);
1533
+ success(`SQL generated successfully: ${migrationPath}`);
1073
1534
  }
1074
1535
 
1075
- // src/commands/init.ts
1536
+ // src/commands/import.ts
1076
1537
  var import_commander3 = require("commander");
1077
- async function runInit() {
1078
- const root = getProjectRoot();
1079
- const schemaForgeDir = getSchemaForgeDir(root);
1080
- if (await fileExists(schemaForgeDir)) {
1081
- console.error("Error: schemaforge/ directory already exists");
1082
- console.error("Please remove it or run init in a different directory");
1083
- process.exit(1);
1084
- }
1085
- const schemaFilePath = getSchemaFilePath(root);
1086
- const configPath = getConfigPath(root);
1087
- const statePath = getStatePath(root);
1088
- if (await fileExists(schemaFilePath)) {
1089
- console.error(`Error: ${schemaFilePath} already exists`);
1090
- process.exit(1);
1538
+ var import_path7 = __toESM(require("path"));
1539
+
1540
+ // src/core/sql/apply-ops.ts
1541
+ function toSchemaColumn(column) {
1542
+ return {
1543
+ name: column.name,
1544
+ type: column.type,
1545
+ nullable: column.nullable,
1546
+ ...column.default !== void 0 ? { default: column.default } : {},
1547
+ ...column.unique !== void 0 ? { unique: column.unique } : {},
1548
+ ...column.primaryKey !== void 0 ? { primaryKey: column.primaryKey } : {}
1549
+ };
1550
+ }
1551
+ function applySingleColumnConstraint(table, constraint) {
1552
+ if (constraint.columns.length !== 1) {
1553
+ return false;
1091
1554
  }
1092
- if (await fileExists(configPath)) {
1093
- console.error(`Error: ${configPath} already exists`);
1094
- process.exit(1);
1555
+ const targetColumn = table.columns.find((column) => column.name === constraint.columns[0]);
1556
+ if (!targetColumn) {
1557
+ return false;
1095
1558
  }
1096
- if (await fileExists(statePath)) {
1097
- console.error(`Error: ${statePath} already exists`);
1098
- process.exit(1);
1559
+ if (constraint.type === "PRIMARY_KEY") {
1560
+ table.primaryKey = targetColumn.name;
1561
+ targetColumn.primaryKey = true;
1562
+ targetColumn.nullable = false;
1563
+ return true;
1099
1564
  }
1100
- console.log("Initializing schema project...");
1101
- await ensureDir(schemaForgeDir);
1102
- const schemaContent = `# SchemaForge schema definition
1103
- # Run: schemaforge generate
1104
-
1105
- table users {
1106
- id uuid pk
1107
- created_at timestamptz default now()
1565
+ targetColumn.unique = true;
1566
+ return true;
1108
1567
  }
1109
- `;
1110
- await writeTextFile(schemaFilePath, schemaContent);
1111
- console.log(`\u2713 Created ${schemaFilePath}`);
1112
- const config = {
1113
- provider: "supabase",
1114
- outputDir: "supabase/migrations",
1115
- schemaFile: "schemaforge/schema.sf",
1116
- stateFile: "schemaforge/state.json",
1117
- sql: {
1118
- uuidDefault: "gen_random_uuid()",
1119
- timestampDefault: "now()"
1568
+ function clearConstraintByName(table, name) {
1569
+ if (name.endsWith("_pkey") || name.startsWith("pk_")) {
1570
+ if (table.primaryKey) {
1571
+ const pkColumn = table.columns.find((column) => column.name === table.primaryKey);
1572
+ if (pkColumn) {
1573
+ pkColumn.primaryKey = false;
1574
+ }
1575
+ table.primaryKey = null;
1120
1576
  }
1121
- };
1122
- await writeJsonFile(configPath, config);
1123
- console.log(`\u2713 Created ${configPath}`);
1124
- const state = {
1125
- version: 1,
1126
- tables: {}
1127
- };
1128
- await writeJsonFile(statePath, state);
1129
- console.log(`\u2713 Created ${statePath}`);
1130
- const outputDir = "supabase/migrations";
1131
- await ensureDir(outputDir);
1132
- console.log(`\u2713 Created ${outputDir}`);
1133
- console.log("\n\u2713 Project initialized successfully");
1134
- console.log("Next steps:");
1135
- console.log(" 1. Edit schemaforge/schema.sf to define your schema");
1136
- console.log(" 2. Run: schemaforge generate");
1137
- }
1138
-
1139
- // src/cli.ts
1140
- var program = new import_commander4.Command();
1141
- program.name("schemaforge").description("CLI tool for schema management and SQL generation").version(package_default.version);
1142
- function handleError(error) {
1143
- if (error instanceof SchemaValidationError) {
1144
- console.error(error.message);
1145
- process.exitCode = 2;
1146
1577
  return;
1147
1578
  }
1148
- if (error instanceof Error) {
1149
- console.error(error.message);
1150
- } else {
1151
- console.error("Unexpected error");
1579
+ if (name.endsWith("_key") || name.startsWith("uq_")) {
1580
+ for (const column of table.columns) {
1581
+ if (column.unique) {
1582
+ column.unique = false;
1583
+ }
1584
+ }
1152
1585
  }
1153
- process.exitCode = 1;
1154
1586
  }
1155
- program.command("init").description("Initialize a new schema project").action(async () => {
1156
- try {
1157
- await runInit();
1158
- } catch (error) {
1159
- handleError(error);
1587
+ function getOrCreateTable(tables, name) {
1588
+ if (!tables[name]) {
1589
+ tables[name] = { name, columns: [] };
1160
1590
  }
1161
- });
1162
- program.command("generate").description("Generate SQL from schema files").option("--name <string>", "Schema name to generate").action(async (options) => {
1163
- try {
1164
- await runGenerate(options);
1165
- } catch (error) {
1166
- handleError(error);
1591
+ return tables[name];
1592
+ }
1593
+ function applySqlOps(ops) {
1594
+ const tables = {};
1595
+ const warnings = [];
1596
+ for (const op of ops) {
1597
+ switch (op.kind) {
1598
+ case "CREATE_TABLE": {
1599
+ const table = {
1600
+ name: op.table,
1601
+ columns: op.columns.map(toSchemaColumn)
1602
+ };
1603
+ for (const column of table.columns) {
1604
+ if (column.primaryKey) {
1605
+ table.primaryKey = column.name;
1606
+ }
1607
+ }
1608
+ for (const constraint of op.constraints) {
1609
+ const applied = applySingleColumnConstraint(table, constraint);
1610
+ if (!applied) {
1611
+ warnings.push({
1612
+ statement: `CREATE TABLE ${op.table}`,
1613
+ reason: `Constraint ${constraint.type}${constraint.name ? ` (${constraint.name})` : ""} is unsupported for schema reconstruction`
1614
+ });
1615
+ }
1616
+ }
1617
+ tables[op.table] = table;
1618
+ break;
1619
+ }
1620
+ case "ADD_COLUMN": {
1621
+ const table = getOrCreateTable(tables, op.table);
1622
+ table.columns = table.columns.filter((column) => column.name !== op.column.name);
1623
+ table.columns.push(toSchemaColumn(op.column));
1624
+ if (op.column.primaryKey) {
1625
+ table.primaryKey = op.column.name;
1626
+ }
1627
+ break;
1628
+ }
1629
+ case "ALTER_COLUMN_TYPE": {
1630
+ const table = tables[op.table];
1631
+ if (!table) {
1632
+ break;
1633
+ }
1634
+ const column = table.columns.find((item) => item.name === op.column);
1635
+ if (column) {
1636
+ column.type = op.toType;
1637
+ }
1638
+ break;
1639
+ }
1640
+ case "SET_NOT_NULL": {
1641
+ const table = tables[op.table];
1642
+ const column = table?.columns.find((item) => item.name === op.column);
1643
+ if (column) {
1644
+ column.nullable = false;
1645
+ }
1646
+ break;
1647
+ }
1648
+ case "DROP_NOT_NULL": {
1649
+ const table = tables[op.table];
1650
+ const column = table?.columns.find((item) => item.name === op.column);
1651
+ if (column) {
1652
+ column.nullable = true;
1653
+ }
1654
+ break;
1655
+ }
1656
+ case "SET_DEFAULT": {
1657
+ const table = tables[op.table];
1658
+ const column = table?.columns.find((item) => item.name === op.column);
1659
+ if (column) {
1660
+ column.default = op.expr;
1661
+ }
1662
+ break;
1663
+ }
1664
+ case "DROP_DEFAULT": {
1665
+ const table = tables[op.table];
1666
+ const column = table?.columns.find((item) => item.name === op.column);
1667
+ if (column) {
1668
+ column.default = null;
1669
+ }
1670
+ break;
1671
+ }
1672
+ case "ADD_CONSTRAINT": {
1673
+ const table = tables[op.table];
1674
+ if (!table) {
1675
+ break;
1676
+ }
1677
+ const applied = applySingleColumnConstraint(table, op.constraint);
1678
+ if (!applied) {
1679
+ warnings.push({
1680
+ statement: `ALTER TABLE ${op.table} ADD CONSTRAINT ${op.constraint.name ?? "<unnamed>"}`,
1681
+ reason: `Constraint ${op.constraint.type} is unsupported for schema reconstruction`
1682
+ });
1683
+ }
1684
+ break;
1685
+ }
1686
+ case "DROP_CONSTRAINT": {
1687
+ const table = tables[op.table];
1688
+ if (!table) {
1689
+ break;
1690
+ }
1691
+ clearConstraintByName(table, op.name);
1692
+ break;
1693
+ }
1694
+ case "DROP_COLUMN": {
1695
+ const table = tables[op.table];
1696
+ if (!table) {
1697
+ break;
1698
+ }
1699
+ table.columns = table.columns.filter((column) => column.name !== op.column);
1700
+ if (table.primaryKey === op.column) {
1701
+ table.primaryKey = null;
1702
+ }
1703
+ break;
1704
+ }
1705
+ case "DROP_TABLE": {
1706
+ delete tables[op.table];
1707
+ break;
1708
+ }
1709
+ }
1710
+ }
1711
+ const schema = { tables };
1712
+ return { schema, warnings };
1713
+ }
1714
+
1715
+ // src/core/sql/load-migrations.ts
1716
+ var import_fs6 = require("fs");
1717
+ var import_path6 = __toESM(require("path"));
1718
+ async function loadMigrationSqlInput(inputPath) {
1719
+ const stats = await import_fs6.promises.stat(inputPath);
1720
+ if (stats.isFile()) {
1721
+ if (!inputPath.toLowerCase().endsWith(".sql")) {
1722
+ throw new Error(`Input file must be a .sql file: ${inputPath}`);
1723
+ }
1724
+ return [{ filePath: inputPath, sql: await readTextFile(inputPath) }];
1725
+ }
1726
+ if (!stats.isDirectory()) {
1727
+ throw new Error(`Input path must be a .sql file or directory: ${inputPath}`);
1728
+ }
1729
+ const sqlFiles = await findFiles(inputPath, /\.sql$/i);
1730
+ sqlFiles.sort((left, right) => import_path6.default.basename(left).localeCompare(import_path6.default.basename(right)));
1731
+ const result = [];
1732
+ for (const filePath of sqlFiles) {
1733
+ result.push({
1734
+ filePath,
1735
+ sql: await readTextFile(filePath)
1736
+ });
1737
+ }
1738
+ return result;
1739
+ }
1740
+
1741
+ // src/core/sql/split-statements.ts
1742
+ function splitSqlStatements(sql) {
1743
+ const statements = [];
1744
+ let current = "";
1745
+ let inSingleQuote = false;
1746
+ let inDoubleQuote = false;
1747
+ let inLineComment = false;
1748
+ let inBlockComment = false;
1749
+ let dollarTag = null;
1750
+ let index = 0;
1751
+ while (index < sql.length) {
1752
+ const char = sql[index];
1753
+ const next = index + 1 < sql.length ? sql[index + 1] : "";
1754
+ if (inLineComment) {
1755
+ current += char;
1756
+ if (char === "\n") {
1757
+ inLineComment = false;
1758
+ }
1759
+ index++;
1760
+ continue;
1761
+ }
1762
+ if (inBlockComment) {
1763
+ current += char;
1764
+ if (char === "*" && next === "/") {
1765
+ current += next;
1766
+ inBlockComment = false;
1767
+ index += 2;
1768
+ continue;
1769
+ }
1770
+ index++;
1771
+ continue;
1772
+ }
1773
+ if (!inSingleQuote && !inDoubleQuote && dollarTag === null) {
1774
+ if (char === "-" && next === "-") {
1775
+ current += char + next;
1776
+ inLineComment = true;
1777
+ index += 2;
1778
+ continue;
1779
+ }
1780
+ if (char === "/" && next === "*") {
1781
+ current += char + next;
1782
+ inBlockComment = true;
1783
+ index += 2;
1784
+ continue;
1785
+ }
1786
+ }
1787
+ if (!inDoubleQuote && dollarTag === null && char === "'") {
1788
+ current += char;
1789
+ if (inSingleQuote && next === "'") {
1790
+ current += next;
1791
+ index += 2;
1792
+ continue;
1793
+ }
1794
+ inSingleQuote = !inSingleQuote;
1795
+ index++;
1796
+ continue;
1797
+ }
1798
+ if (!inSingleQuote && dollarTag === null && char === '"') {
1799
+ current += char;
1800
+ if (inDoubleQuote && next === '"') {
1801
+ current += next;
1802
+ index += 2;
1803
+ continue;
1804
+ }
1805
+ inDoubleQuote = !inDoubleQuote;
1806
+ index++;
1807
+ continue;
1808
+ }
1809
+ if (!inSingleQuote && !inDoubleQuote) {
1810
+ if (dollarTag === null && char === "$") {
1811
+ const remainder = sql.slice(index);
1812
+ const match = remainder.match(/^\$[a-zA-Z_][a-zA-Z0-9_]*\$|^\$\$/);
1813
+ if (match) {
1814
+ dollarTag = match[0];
1815
+ current += match[0];
1816
+ index += match[0].length;
1817
+ continue;
1818
+ }
1819
+ }
1820
+ if (dollarTag !== null && sql.startsWith(dollarTag, index)) {
1821
+ current += dollarTag;
1822
+ index += dollarTag.length;
1823
+ dollarTag = null;
1824
+ continue;
1825
+ }
1826
+ }
1827
+ if (!inSingleQuote && !inDoubleQuote && dollarTag === null && char === ";") {
1828
+ const statement = current.trim();
1829
+ if (statement.length > 0) {
1830
+ statements.push(statement);
1831
+ }
1832
+ current = "";
1833
+ index++;
1834
+ continue;
1835
+ }
1836
+ current += char;
1837
+ index++;
1838
+ }
1839
+ const tail = current.trim();
1840
+ if (tail.length > 0) {
1841
+ statements.push(tail);
1842
+ }
1843
+ return statements;
1844
+ }
1845
+
1846
+ // src/core/sql/parse-migration.ts
1847
+ var COLUMN_CONSTRAINT_KEYWORDS = /* @__PURE__ */ new Set([
1848
+ "primary",
1849
+ "unique",
1850
+ "not",
1851
+ "null",
1852
+ "default",
1853
+ "constraint",
1854
+ "references",
1855
+ "check"
1856
+ ]);
1857
+ function normalizeSqlType(type) {
1858
+ return type.trim().toLowerCase().replace(/\s+/g, " ").replace(/\s*\(\s*/g, "(").replace(/\s*,\s*/g, ",").replace(/\s*\)\s*/g, ")");
1859
+ }
1860
+ function unquoteIdentifier(value) {
1861
+ const trimmed = value.trim();
1862
+ if (trimmed.startsWith('"') && trimmed.endsWith('"') && trimmed.length >= 2) {
1863
+ return trimmed.slice(1, -1).replace(/""/g, '"');
1864
+ }
1865
+ return trimmed;
1866
+ }
1867
+ function normalizeIdentifier(identifier) {
1868
+ const parts = identifier.trim().split(".").map((part) => unquoteIdentifier(part)).filter((part) => part.length > 0);
1869
+ const leaf = parts.length > 0 ? parts[parts.length - 1] : identifier.trim();
1870
+ return leaf.toLowerCase();
1871
+ }
1872
+ function removeSqlComments(statement) {
1873
+ let result = "";
1874
+ let inSingleQuote = false;
1875
+ let inDoubleQuote = false;
1876
+ let inLineComment = false;
1877
+ let inBlockComment = false;
1878
+ for (let index = 0; index < statement.length; index++) {
1879
+ const char = statement[index];
1880
+ const next = index + 1 < statement.length ? statement[index + 1] : "";
1881
+ if (inLineComment) {
1882
+ if (char === "\n") {
1883
+ inLineComment = false;
1884
+ result += char;
1885
+ }
1886
+ continue;
1887
+ }
1888
+ if (inBlockComment) {
1889
+ if (char === "*" && next === "/") {
1890
+ inBlockComment = false;
1891
+ index++;
1892
+ }
1893
+ continue;
1894
+ }
1895
+ if (!inSingleQuote && !inDoubleQuote) {
1896
+ if (char === "-" && next === "-") {
1897
+ inLineComment = true;
1898
+ index++;
1899
+ continue;
1900
+ }
1901
+ if (char === "/" && next === "*") {
1902
+ inBlockComment = true;
1903
+ index++;
1904
+ continue;
1905
+ }
1906
+ }
1907
+ if (char === "'" && !inDoubleQuote) {
1908
+ if (inSingleQuote && next === "'") {
1909
+ result += "''";
1910
+ index++;
1911
+ continue;
1912
+ }
1913
+ inSingleQuote = !inSingleQuote;
1914
+ result += char;
1915
+ continue;
1916
+ }
1917
+ if (char === '"' && !inSingleQuote) {
1918
+ if (inDoubleQuote && next === '"') {
1919
+ result += '""';
1920
+ index++;
1921
+ continue;
1922
+ }
1923
+ inDoubleQuote = !inDoubleQuote;
1924
+ result += char;
1925
+ continue;
1926
+ }
1927
+ result += char;
1928
+ }
1929
+ return result.trim();
1930
+ }
1931
+ function splitTopLevelComma(input) {
1932
+ const parts = [];
1933
+ let current = "";
1934
+ let depth = 0;
1935
+ let inSingleQuote = false;
1936
+ let inDoubleQuote = false;
1937
+ for (let index = 0; index < input.length; index++) {
1938
+ const char = input[index];
1939
+ const next = index + 1 < input.length ? input[index + 1] : "";
1940
+ if (char === "'" && !inDoubleQuote) {
1941
+ current += char;
1942
+ if (inSingleQuote && next === "'") {
1943
+ current += next;
1944
+ index++;
1945
+ continue;
1946
+ }
1947
+ inSingleQuote = !inSingleQuote;
1948
+ continue;
1949
+ }
1950
+ if (char === '"' && !inSingleQuote) {
1951
+ current += char;
1952
+ if (inDoubleQuote && next === '"') {
1953
+ current += next;
1954
+ index++;
1955
+ continue;
1956
+ }
1957
+ inDoubleQuote = !inDoubleQuote;
1958
+ continue;
1959
+ }
1960
+ if (!inSingleQuote && !inDoubleQuote) {
1961
+ if (char === "(") {
1962
+ depth++;
1963
+ } else if (char === ")") {
1964
+ depth = Math.max(0, depth - 1);
1965
+ } else if (char === "," && depth === 0) {
1966
+ const segment = current.trim();
1967
+ if (segment.length > 0) {
1968
+ parts.push(segment);
1969
+ }
1970
+ current = "";
1971
+ continue;
1972
+ }
1973
+ }
1974
+ current += char;
1975
+ }
1976
+ const tail = current.trim();
1977
+ if (tail.length > 0) {
1978
+ parts.push(tail);
1979
+ }
1980
+ return parts;
1981
+ }
1982
+ function tokenize(segment) {
1983
+ const tokens = [];
1984
+ let current = "";
1985
+ let depth = 0;
1986
+ let inSingleQuote = false;
1987
+ let inDoubleQuote = false;
1988
+ for (let index = 0; index < segment.length; index++) {
1989
+ const char = segment[index];
1990
+ const next = index + 1 < segment.length ? segment[index + 1] : "";
1991
+ if (char === "'" && !inDoubleQuote) {
1992
+ current += char;
1993
+ if (inSingleQuote && next === "'") {
1994
+ current += next;
1995
+ index++;
1996
+ continue;
1997
+ }
1998
+ inSingleQuote = !inSingleQuote;
1999
+ continue;
2000
+ }
2001
+ if (char === '"' && !inSingleQuote) {
2002
+ current += char;
2003
+ if (inDoubleQuote && next === '"') {
2004
+ current += next;
2005
+ index++;
2006
+ continue;
2007
+ }
2008
+ inDoubleQuote = !inDoubleQuote;
2009
+ continue;
2010
+ }
2011
+ if (!inSingleQuote && !inDoubleQuote) {
2012
+ if (char === "(") {
2013
+ depth++;
2014
+ } else if (char === ")") {
2015
+ depth = Math.max(0, depth - 1);
2016
+ }
2017
+ if (/\s/.test(char) && depth === 0) {
2018
+ if (current.length > 0) {
2019
+ tokens.push(current);
2020
+ current = "";
2021
+ }
2022
+ continue;
2023
+ }
2024
+ }
2025
+ current += char;
2026
+ }
2027
+ if (current.length > 0) {
2028
+ tokens.push(current);
2029
+ }
2030
+ return tokens;
2031
+ }
2032
+ function parseColumnDefinition(segment) {
2033
+ const tokens = tokenize(segment);
2034
+ if (tokens.length < 2) {
2035
+ return null;
2036
+ }
2037
+ const name = normalizeIdentifier(tokens[0]);
2038
+ let cursor = 1;
2039
+ const typeTokens = [];
2040
+ while (cursor < tokens.length) {
2041
+ const lower = tokens[cursor].toLowerCase();
2042
+ if (COLUMN_CONSTRAINT_KEYWORDS.has(lower)) {
2043
+ break;
2044
+ }
2045
+ typeTokens.push(tokens[cursor]);
2046
+ cursor++;
2047
+ }
2048
+ if (typeTokens.length === 0) {
2049
+ return null;
2050
+ }
2051
+ const parsed = {
2052
+ name,
2053
+ type: normalizeSqlType(typeTokens.join(" ")),
2054
+ nullable: true
2055
+ };
2056
+ while (cursor < tokens.length) {
2057
+ const lower = tokens[cursor].toLowerCase();
2058
+ if (lower === "primary" && tokens[cursor + 1]?.toLowerCase() === "key") {
2059
+ parsed.primaryKey = true;
2060
+ parsed.nullable = false;
2061
+ cursor += 2;
2062
+ continue;
2063
+ }
2064
+ if (lower === "unique") {
2065
+ parsed.unique = true;
2066
+ cursor++;
2067
+ continue;
2068
+ }
2069
+ if (lower === "not" && tokens[cursor + 1]?.toLowerCase() === "null") {
2070
+ parsed.nullable = false;
2071
+ cursor += 2;
2072
+ continue;
2073
+ }
2074
+ if (lower === "null") {
2075
+ parsed.nullable = true;
2076
+ cursor++;
2077
+ continue;
2078
+ }
2079
+ if (lower === "default") {
2080
+ cursor++;
2081
+ const defaultTokens = [];
2082
+ while (cursor < tokens.length) {
2083
+ const probe = tokens[cursor].toLowerCase();
2084
+ if (probe === "constraint" || probe === "references" || probe === "check" || probe === "not" && tokens[cursor + 1]?.toLowerCase() === "null" || probe === "null" || probe === "unique" || probe === "primary" && tokens[cursor + 1]?.toLowerCase() === "key") {
2085
+ break;
2086
+ }
2087
+ defaultTokens.push(tokens[cursor]);
2088
+ cursor++;
2089
+ }
2090
+ parsed.default = normalizeDefault(defaultTokens.join(" "));
2091
+ continue;
2092
+ }
2093
+ cursor++;
2094
+ }
2095
+ return parsed;
2096
+ }
2097
+ function parseCreateTableConstraint(segment) {
2098
+ const normalized = segment.trim().replace(/\s+/g, " ");
2099
+ const constraintMatch = normalized.match(/^constraint\s+([^\s]+)\s+(primary\s+key|unique)\s*\((.+)\)$/i);
2100
+ if (constraintMatch) {
2101
+ const [, rawName, kind, rawColumns] = constraintMatch;
2102
+ const columns = splitTopLevelComma(rawColumns).map((item) => normalizeIdentifier(item));
2103
+ if (kind.toLowerCase().includes("primary")) {
2104
+ return { type: "PRIMARY_KEY", name: normalizeIdentifier(rawName), columns };
2105
+ }
2106
+ return { type: "UNIQUE", name: normalizeIdentifier(rawName), columns };
2107
+ }
2108
+ const barePk = normalized.match(/^primary\s+key\s*\((.+)\)$/i);
2109
+ if (barePk) {
2110
+ const columns = splitTopLevelComma(barePk[1]).map((item) => normalizeIdentifier(item));
2111
+ return { type: "PRIMARY_KEY", columns };
2112
+ }
2113
+ const bareUnique = normalized.match(/^unique\s*\((.+)\)$/i);
2114
+ if (bareUnique) {
2115
+ const columns = splitTopLevelComma(bareUnique[1]).map((item) => normalizeIdentifier(item));
2116
+ return { type: "UNIQUE", columns };
2117
+ }
2118
+ return null;
2119
+ }
2120
+ function parseAlterTablePrefix(stmt) {
2121
+ const match = stmt.match(/^alter\s+table\s+(?:if\s+exists\s+)?(?:only\s+)?(.+)$/i);
2122
+ if (!match) {
2123
+ return null;
2124
+ }
2125
+ const remainder = match[1].trim();
2126
+ const tokens = tokenize(remainder);
2127
+ if (tokens.length < 2) {
2128
+ return null;
2129
+ }
2130
+ const tableToken = tokens[0];
2131
+ const table = normalizeIdentifier(tableToken);
2132
+ const rest = remainder.slice(tableToken.length).trim();
2133
+ return { table, rest };
2134
+ }
2135
+ function parseCreateTable(stmt) {
2136
+ const match = stmt.match(/^create\s+table\s+(?:if\s+not\s+exists\s+)?(.+?)\s*\((.*)\)$/is);
2137
+ if (!match) {
2138
+ return null;
2139
+ }
2140
+ const table = normalizeIdentifier(match[1]);
2141
+ const body = match[2];
2142
+ const segments = splitTopLevelComma(body);
2143
+ const columns = [];
2144
+ const constraints = [];
2145
+ for (const segment of segments) {
2146
+ const constraint = parseCreateTableConstraint(segment);
2147
+ if (constraint) {
2148
+ constraints.push(constraint);
2149
+ continue;
2150
+ }
2151
+ const column = parseColumnDefinition(segment);
2152
+ if (column) {
2153
+ columns.push(column);
2154
+ }
2155
+ }
2156
+ return {
2157
+ kind: "CREATE_TABLE",
2158
+ table,
2159
+ columns,
2160
+ constraints
2161
+ };
2162
+ }
2163
+ function parseAlterTableAddColumn(stmt) {
2164
+ const prefix = parseAlterTablePrefix(stmt);
2165
+ if (!prefix) {
2166
+ return null;
2167
+ }
2168
+ const match = prefix.rest.match(/^add\s+column\s+(?:if\s+not\s+exists\s+)?(.+)$/i);
2169
+ if (!match) {
2170
+ return null;
2171
+ }
2172
+ const column = parseColumnDefinition(match[1]);
2173
+ if (!column) {
2174
+ return null;
2175
+ }
2176
+ return { kind: "ADD_COLUMN", table: prefix.table, column };
2177
+ }
2178
+ function parseAlterColumnType(stmt) {
2179
+ const prefix = parseAlterTablePrefix(stmt);
2180
+ if (!prefix) {
2181
+ return null;
2182
+ }
2183
+ const match = prefix.rest.match(/^alter\s+column\s+([^\s]+)\s+type\s+(.+)$/i);
2184
+ if (!match) {
2185
+ return null;
2186
+ }
2187
+ const column = normalizeIdentifier(match[1]);
2188
+ const toType = normalizeSqlType(match[2].replace(/\s+using\s+[\s\S]*$/i, "").trim());
2189
+ return {
2190
+ kind: "ALTER_COLUMN_TYPE",
2191
+ table: prefix.table,
2192
+ column,
2193
+ toType
2194
+ };
2195
+ }
2196
+ function parseSetDropNotNull(stmt) {
2197
+ const prefix = parseAlterTablePrefix(stmt);
2198
+ if (!prefix) {
2199
+ return null;
2200
+ }
2201
+ const setMatch = prefix.rest.match(/^alter\s+column\s+([^\s]+)\s+set\s+not\s+null$/i);
2202
+ if (setMatch) {
2203
+ return {
2204
+ kind: "SET_NOT_NULL",
2205
+ table: prefix.table,
2206
+ column: normalizeIdentifier(setMatch[1])
2207
+ };
2208
+ }
2209
+ const dropMatch = prefix.rest.match(/^alter\s+column\s+([^\s]+)\s+drop\s+not\s+null$/i);
2210
+ if (dropMatch) {
2211
+ return {
2212
+ kind: "DROP_NOT_NULL",
2213
+ table: prefix.table,
2214
+ column: normalizeIdentifier(dropMatch[1])
2215
+ };
2216
+ }
2217
+ return null;
2218
+ }
2219
+ function parseSetDropDefault(stmt) {
2220
+ const prefix = parseAlterTablePrefix(stmt);
2221
+ if (!prefix) {
2222
+ return null;
2223
+ }
2224
+ const setMatch = prefix.rest.match(/^alter\s+column\s+([^\s]+)\s+set\s+default\s+(.+)$/i);
2225
+ if (setMatch) {
2226
+ return {
2227
+ kind: "SET_DEFAULT",
2228
+ table: prefix.table,
2229
+ column: normalizeIdentifier(setMatch[1]),
2230
+ expr: normalizeDefault(setMatch[2].trim()) ?? setMatch[2].trim()
2231
+ };
2232
+ }
2233
+ const dropMatch = prefix.rest.match(/^alter\s+column\s+([^\s]+)\s+drop\s+default$/i);
2234
+ if (dropMatch) {
2235
+ return {
2236
+ kind: "DROP_DEFAULT",
2237
+ table: prefix.table,
2238
+ column: normalizeIdentifier(dropMatch[1])
2239
+ };
2240
+ }
2241
+ return null;
2242
+ }
2243
+ function parseAddDropConstraint(stmt) {
2244
+ const prefix = parseAlterTablePrefix(stmt);
2245
+ if (!prefix) {
2246
+ return null;
2247
+ }
2248
+ const addMatch = prefix.rest.match(/^add\s+constraint\s+([^\s]+)\s+(primary\s+key|unique)\s*\((.+)\)$/i);
2249
+ if (addMatch) {
2250
+ const [, rawName, kind, rawColumns] = addMatch;
2251
+ const columns = splitTopLevelComma(rawColumns).map((item) => normalizeIdentifier(item));
2252
+ const constraint = kind.toLowerCase().includes("primary") ? { type: "PRIMARY_KEY", name: normalizeIdentifier(rawName), columns } : { type: "UNIQUE", name: normalizeIdentifier(rawName), columns };
2253
+ return {
2254
+ kind: "ADD_CONSTRAINT",
2255
+ table: prefix.table,
2256
+ constraint
2257
+ };
2258
+ }
2259
+ const dropMatch = prefix.rest.match(/^drop\s+constraint\s+(?:if\s+exists\s+)?([^\s]+)(?:\s+cascade)?$/i);
2260
+ if (dropMatch) {
2261
+ return {
2262
+ kind: "DROP_CONSTRAINT",
2263
+ table: prefix.table,
2264
+ name: normalizeIdentifier(dropMatch[1])
2265
+ };
2266
+ }
2267
+ return null;
2268
+ }
2269
+ function parseDropColumn(stmt) {
2270
+ const prefix = parseAlterTablePrefix(stmt);
2271
+ if (!prefix) {
2272
+ return null;
2273
+ }
2274
+ const match = prefix.rest.match(/^drop\s+column\s+(?:if\s+exists\s+)?([^\s]+)(?:\s+cascade)?$/i);
2275
+ if (!match) {
2276
+ return null;
2277
+ }
2278
+ return {
2279
+ kind: "DROP_COLUMN",
2280
+ table: prefix.table,
2281
+ column: normalizeIdentifier(match[1])
2282
+ };
2283
+ }
2284
+ function parseDropTable(stmt) {
2285
+ const match = stmt.match(/^drop\s+table\s+(?:if\s+exists\s+)?([^\s]+)(?:\s+cascade)?$/i);
2286
+ if (!match) {
2287
+ return null;
2288
+ }
2289
+ return {
2290
+ kind: "DROP_TABLE",
2291
+ table: normalizeIdentifier(match[1])
2292
+ };
2293
+ }
2294
+ var PARSERS = [
2295
+ parseCreateTable,
2296
+ parseAlterTableAddColumn,
2297
+ parseAlterColumnType,
2298
+ parseSetDropNotNull,
2299
+ parseSetDropDefault,
2300
+ parseAddDropConstraint,
2301
+ parseDropColumn,
2302
+ parseDropTable
2303
+ ];
2304
+ function parseMigrationSql(sql) {
2305
+ const statements = splitSqlStatements(sql);
2306
+ const ops = [];
2307
+ const warnings = [];
2308
+ for (const raw of statements) {
2309
+ const stmt = removeSqlComments(raw).trim();
2310
+ if (!stmt) {
2311
+ continue;
2312
+ }
2313
+ let parsed = null;
2314
+ for (const parseFn of PARSERS) {
2315
+ parsed = parseFn(stmt);
2316
+ if (parsed) {
2317
+ break;
2318
+ }
2319
+ }
2320
+ if (parsed) {
2321
+ ops.push(parsed);
2322
+ } else {
2323
+ warnings.push({
2324
+ statement: stmt,
2325
+ reason: "Unsupported or unrecognized statement"
2326
+ });
2327
+ }
2328
+ }
2329
+ return { ops, warnings };
2330
+ }
2331
+
2332
+ // src/core/sql/schema-to-dsl.ts
2333
+ function renderColumn(column) {
2334
+ const parts = [column.name, column.type];
2335
+ if (column.primaryKey) {
2336
+ parts.push("pk");
2337
+ }
2338
+ if (column.unique) {
2339
+ parts.push("unique");
2340
+ }
2341
+ if (column.nullable === false && !column.primaryKey) {
2342
+ parts.push("not null");
2343
+ }
2344
+ if (column.default !== void 0 && column.default !== null) {
2345
+ parts.push(`default ${column.default}`);
2346
+ }
2347
+ return ` ${parts.join(" ")}`;
2348
+ }
2349
+ function schemaToDsl(schema) {
2350
+ const tableNames = Object.keys(schema.tables).sort((left, right) => left.localeCompare(right));
2351
+ const blocks = tableNames.map((tableName) => {
2352
+ const table = schema.tables[tableName];
2353
+ const lines = [`table ${table.name} {`];
2354
+ for (const column of table.columns) {
2355
+ lines.push(renderColumn(column));
2356
+ }
2357
+ lines.push("}");
2358
+ return lines.join("\n");
2359
+ });
2360
+ if (blocks.length === 0) {
2361
+ return "# SchemaForge schema definition\n";
2362
+ }
2363
+ return `# SchemaForge schema definition
2364
+
2365
+ ${blocks.join("\n\n")}
2366
+ `;
2367
+ }
2368
+
2369
+ // src/commands/import.ts
2370
+ function resolveConfigPath3(root, targetPath) {
2371
+ return import_path7.default.isAbsolute(targetPath) ? targetPath : import_path7.default.join(root, targetPath);
2372
+ }
2373
+ async function runImport(inputPath, options = {}) {
2374
+ const root = getProjectRoot();
2375
+ const absoluteInputPath = resolveConfigPath3(root, inputPath);
2376
+ const inputs = await loadMigrationSqlInput(absoluteInputPath);
2377
+ if (inputs.length === 0) {
2378
+ throw new Error(`No .sql migration files found in: ${absoluteInputPath}`);
2379
+ }
2380
+ const allOps = [];
2381
+ const parseWarnings = [];
2382
+ for (const input of inputs) {
2383
+ const result = parseMigrationSql(input.sql);
2384
+ allOps.push(...result.ops);
2385
+ parseWarnings.push(...result.warnings.map((item) => ({
2386
+ statement: `[${import_path7.default.basename(input.filePath)}] ${item.statement}`,
2387
+ reason: item.reason
2388
+ })));
2389
+ }
2390
+ const applied = applySqlOps(allOps);
2391
+ const dsl = schemaToDsl(applied.schema);
2392
+ let targetPath = options.out;
2393
+ if (!targetPath) {
2394
+ const configPath = getConfigPath(root);
2395
+ if (await fileExists(configPath)) {
2396
+ const config = await readJsonFile(configPath, {});
2397
+ if (typeof config.schemaFile === "string" && config.schemaFile.length > 0) {
2398
+ targetPath = config.schemaFile;
2399
+ }
2400
+ }
2401
+ }
2402
+ const schemaPath = targetPath ? resolveConfigPath3(root, targetPath) : getSchemaFilePath(root);
2403
+ await writeTextFile(schemaPath, dsl);
2404
+ success(`Imported ${inputs.length} migration file(s) into ${schemaPath}`);
2405
+ info(`Parsed ${allOps.length} supported DDL operation(s)`);
2406
+ const warnings = [...parseWarnings, ...applied.warnings];
2407
+ if (warnings.length > 0) {
2408
+ warning(`Ignored ${warnings.length} unsupported item(s)`);
2409
+ for (const item of warnings.slice(0, 10)) {
2410
+ warning(`${item.reason}: ${item.statement}`);
2411
+ }
2412
+ if (warnings.length > 10) {
2413
+ warning(`...and ${warnings.length - 10} more`);
2414
+ }
2415
+ }
2416
+ }
2417
+
2418
+ // src/commands/init.ts
2419
+ var import_commander4 = require("commander");
2420
+ async function runInit() {
2421
+ const root = getProjectRoot();
2422
+ const schemaForgeDir = getSchemaForgeDir(root);
2423
+ if (await fileExists(schemaForgeDir)) {
2424
+ error("schemaforge/ directory already exists");
2425
+ error("Please remove it or run init in a different directory");
2426
+ process.exit(1);
2427
+ }
2428
+ const schemaFilePath = getSchemaFilePath(root);
2429
+ const configPath = getConfigPath(root);
2430
+ const statePath = getStatePath(root);
2431
+ if (await fileExists(schemaFilePath)) {
2432
+ error(`${schemaFilePath} already exists`);
2433
+ process.exit(1);
2434
+ }
2435
+ if (await fileExists(configPath)) {
2436
+ error(`${configPath} already exists`);
2437
+ process.exit(1);
2438
+ }
2439
+ if (await fileExists(statePath)) {
2440
+ error(`${statePath} already exists`);
2441
+ process.exit(1);
2442
+ }
2443
+ info("Initializing schema project...");
2444
+ await ensureDir(schemaForgeDir);
2445
+ const schemaContent = `# SchemaForge schema definition
2446
+ # Run: schema-forge generate
2447
+
2448
+ table users {
2449
+ id uuid pk
2450
+ created_at timestamptz default now()
2451
+ }
2452
+ `;
2453
+ await writeTextFile(schemaFilePath, schemaContent);
2454
+ success(`Created ${schemaFilePath}`);
2455
+ const config = {
2456
+ provider: "supabase",
2457
+ outputDir: "supabase/migrations",
2458
+ schemaFile: "schemaforge/schema.sf",
2459
+ stateFile: "schemaforge/state.json",
2460
+ sql: {
2461
+ uuidDefault: "gen_random_uuid()",
2462
+ timestampDefault: "now()"
2463
+ }
2464
+ };
2465
+ await writeJsonFile(configPath, config);
2466
+ success(`Created ${configPath}`);
2467
+ const state = {
2468
+ version: 1,
2469
+ tables: {}
2470
+ };
2471
+ await writeJsonFile(statePath, state);
2472
+ success(`Created ${statePath}`);
2473
+ const outputDir = "supabase/migrations";
2474
+ await ensureDir(outputDir);
2475
+ success(`Created ${outputDir}`);
2476
+ success("Project initialized successfully");
2477
+ info("Next steps:");
2478
+ info(" 1. Edit schemaforge/schema.sf to define your schema");
2479
+ info(" 2. Run: schema-forge generate");
2480
+ }
2481
+
2482
+ // src/commands/validate.ts
2483
+ var import_commander5 = require("commander");
2484
+ var import_path8 = __toESM(require("path"));
2485
+
2486
+ // src/core/validate.ts
2487
+ function normalizeColumnType2(type) {
2488
+ return type.toLowerCase().trim().replace(/\s+/g, " ").replace(/\s*\(\s*/g, "(").replace(/\s*,\s*/g, ",").replace(/\s*\)\s*/g, ")");
2489
+ }
2490
+ function parseVarcharLength(type) {
2491
+ const match = normalizeColumnType2(type).match(/^varchar\((\d+)\)$/);
2492
+ return match ? Number(match[1]) : null;
2493
+ }
2494
+ function parseNumericType(type) {
2495
+ const match = normalizeColumnType2(type).match(/^numeric\((\d+),(\d+)\)$/);
2496
+ if (!match) {
2497
+ return null;
2498
+ }
2499
+ return {
2500
+ precision: Number(match[1]),
2501
+ scale: Number(match[2])
2502
+ };
2503
+ }
2504
+ function classifyTypeChange(from, to) {
2505
+ const fromType = normalizeColumnType2(from);
2506
+ const toType = normalizeColumnType2(to);
2507
+ const uuidInvolved = fromType === "uuid" || toType === "uuid";
2508
+ if (uuidInvolved && fromType !== toType) {
2509
+ return {
2510
+ severity: "error",
2511
+ message: `Type changed from ${fromType} to ${toType} (likely incompatible cast)`
2512
+ };
2513
+ }
2514
+ if (fromType === "int" && toType === "bigint") {
2515
+ return {
2516
+ severity: "warning",
2517
+ message: "Type widened from int to bigint"
2518
+ };
2519
+ }
2520
+ if (fromType === "bigint" && toType === "int") {
2521
+ return {
2522
+ severity: "error",
2523
+ message: "Type narrowed from bigint to int (likely incompatible cast)"
2524
+ };
2525
+ }
2526
+ if (fromType === "text" && parseVarcharLength(toType) !== null) {
2527
+ return {
2528
+ severity: "error",
2529
+ message: `Type changed from text to ${toType} (may truncate existing values)`
2530
+ };
2531
+ }
2532
+ if (parseVarcharLength(fromType) !== null && toType === "text") {
2533
+ return {
2534
+ severity: "warning",
2535
+ message: "Type widened from varchar(n) to text"
2536
+ };
2537
+ }
2538
+ const fromVarcharLength = parseVarcharLength(fromType);
2539
+ const toVarcharLength = parseVarcharLength(toType);
2540
+ if (fromVarcharLength !== null && toVarcharLength !== null) {
2541
+ if (toVarcharLength >= fromVarcharLength) {
2542
+ return {
2543
+ severity: "warning",
2544
+ message: `Type widened from varchar(${fromVarcharLength}) to varchar(${toVarcharLength})`
2545
+ };
2546
+ }
2547
+ return {
2548
+ severity: "error",
2549
+ message: `Type narrowed from varchar(${fromVarcharLength}) to varchar(${toVarcharLength})`
2550
+ };
2551
+ }
2552
+ const fromNumeric = parseNumericType(fromType);
2553
+ const toNumeric = parseNumericType(toType);
2554
+ if (fromNumeric && toNumeric && fromNumeric.scale === toNumeric.scale) {
2555
+ if (toNumeric.precision >= fromNumeric.precision) {
2556
+ return {
2557
+ severity: "warning",
2558
+ message: `Type widened from numeric(${fromNumeric.precision},${fromNumeric.scale}) to numeric(${toNumeric.precision},${toNumeric.scale})`
2559
+ };
2560
+ }
2561
+ return {
2562
+ severity: "error",
2563
+ message: `Type narrowed from numeric(${fromNumeric.precision},${fromNumeric.scale}) to numeric(${toNumeric.precision},${toNumeric.scale})`
2564
+ };
2565
+ }
2566
+ return {
2567
+ severity: "warning",
2568
+ message: `Type changed from ${fromType} to ${toType} (compatibility unknown)`
2569
+ };
2570
+ }
2571
+ function validateSchemaChanges(previousState, currentSchema) {
2572
+ const findings = [];
2573
+ const diff = diffSchemas(previousState, currentSchema);
2574
+ for (const operation of diff.operations) {
2575
+ switch (operation.kind) {
2576
+ case "drop_table":
2577
+ findings.push({
2578
+ severity: "error",
2579
+ code: "DROP_TABLE",
2580
+ table: operation.tableName,
2581
+ message: "Table removed"
2582
+ });
2583
+ break;
2584
+ case "drop_column":
2585
+ findings.push({
2586
+ severity: "error",
2587
+ code: "DROP_COLUMN",
2588
+ table: operation.tableName,
2589
+ column: operation.columnName,
2590
+ message: "Column removed"
2591
+ });
2592
+ break;
2593
+ case "column_type_changed": {
2594
+ const classification = classifyTypeChange(operation.fromType, operation.toType);
2595
+ findings.push({
2596
+ severity: classification.severity,
2597
+ code: "ALTER_COLUMN_TYPE",
2598
+ table: operation.tableName,
2599
+ column: operation.columnName,
2600
+ from: normalizeColumnType2(operation.fromType),
2601
+ to: normalizeColumnType2(operation.toType),
2602
+ message: classification.message
2603
+ });
2604
+ break;
2605
+ }
2606
+ case "column_nullability_changed":
2607
+ if (operation.from && !operation.to) {
2608
+ findings.push({
2609
+ severity: "warning",
2610
+ code: "SET_NOT_NULL",
2611
+ table: operation.tableName,
2612
+ column: operation.columnName,
2613
+ message: "Column changed to NOT NULL (may fail if data contains NULLs)"
2614
+ });
2615
+ }
2616
+ break;
2617
+ default:
2618
+ break;
2619
+ }
2620
+ }
2621
+ return findings;
2622
+ }
2623
+ function toValidationReport(findings) {
2624
+ const errors = findings.filter((finding) => finding.severity === "error");
2625
+ const warnings = findings.filter((finding) => finding.severity === "warning");
2626
+ return {
2627
+ hasErrors: errors.length > 0,
2628
+ hasWarnings: warnings.length > 0,
2629
+ errors: errors.map(({ severity, ...finding }) => finding),
2630
+ warnings: warnings.map(({ severity, ...finding }) => finding)
2631
+ };
2632
+ }
2633
+
2634
+ // src/commands/validate.ts
2635
+ var REQUIRED_CONFIG_FIELDS3 = ["schemaFile", "stateFile"];
2636
+ function resolveConfigPath4(root, targetPath) {
2637
+ return import_path8.default.isAbsolute(targetPath) ? targetPath : import_path8.default.join(root, targetPath);
2638
+ }
2639
+ async function runValidate(options = {}) {
2640
+ const root = getProjectRoot();
2641
+ const configPath = getConfigPath(root);
2642
+ if (!await fileExists(configPath)) {
2643
+ throw new Error('SchemaForge project not initialized. Run "schema-forge init" first.');
2644
+ }
2645
+ const config = await readJsonFile(configPath, {});
2646
+ for (const field of REQUIRED_CONFIG_FIELDS3) {
2647
+ const value = config[field];
2648
+ if (!value || typeof value !== "string") {
2649
+ throw new Error(`Invalid config: '${field}' is required`);
2650
+ }
2651
+ }
2652
+ const schemaPath = resolveConfigPath4(root, config.schemaFile);
2653
+ const statePath = resolveConfigPath4(root, config.stateFile);
2654
+ const schemaSource = await readTextFile(schemaPath);
2655
+ const schema = parseSchema(schemaSource);
2656
+ try {
2657
+ validateSchema(schema);
2658
+ } catch (error2) {
2659
+ if (error2 instanceof Error) {
2660
+ throw new SchemaValidationError(error2.message);
2661
+ }
2662
+ throw error2;
2663
+ }
2664
+ const previousState = await loadState(statePath);
2665
+ const findings = validateSchemaChanges(previousState, schema);
2666
+ const report = toValidationReport(findings);
2667
+ if (options.json) {
2668
+ console.log(JSON.stringify(report, null, 2));
2669
+ process.exitCode = report.hasErrors ? 1 : 0;
2670
+ return;
2671
+ }
2672
+ if (findings.length === 0) {
2673
+ success("No destructive changes detected");
2674
+ process.exitCode = 0;
2675
+ return;
2676
+ }
2677
+ console.log(
2678
+ `Validation Summary: ${report.errors.length} error(s), ${report.warnings.length} warning(s)`
2679
+ );
2680
+ const tableOrder = Array.from(new Set(findings.map((finding) => finding.table)));
2681
+ for (const tableName of tableOrder) {
2682
+ console.log(tableName);
2683
+ for (const finding of findings.filter((entry) => entry.table === tableName)) {
2684
+ const target = finding.column ? `${finding.table}.${finding.column}` : finding.table;
2685
+ const typeRange = finding.from && finding.to ? ` (${finding.from} -> ${finding.to})` : "";
2686
+ console.log(
2687
+ `${finding.severity.toUpperCase()}: ${finding.code} ${target}${typeRange} - ${finding.message}`
2688
+ );
2689
+ }
2690
+ }
2691
+ process.exitCode = report.hasErrors ? 1 : 0;
2692
+ }
2693
+
2694
+ // src/cli.ts
2695
+ var program = new import_commander6.Command();
2696
+ program.name("schema-forge").description("CLI tool for schema management and SQL generation").version(package_default.version);
2697
+ function handleError(error2) {
2698
+ if (error2 instanceof SchemaValidationError) {
2699
+ error(error2.message);
2700
+ process.exitCode = 2;
2701
+ return;
2702
+ }
2703
+ if (error2 instanceof Error) {
2704
+ error(error2.message);
2705
+ } else {
2706
+ error("Unexpected error");
2707
+ }
2708
+ process.exitCode = 1;
2709
+ }
2710
+ program.command("init").description("Initialize a new schema project").action(async () => {
2711
+ try {
2712
+ await runInit();
2713
+ } catch (error2) {
2714
+ handleError(error2);
2715
+ }
2716
+ });
2717
+ program.command("generate").description("Generate SQL from schema files").option("--name <string>", "Schema name to generate").action(async (options) => {
2718
+ try {
2719
+ await runGenerate(options);
2720
+ } catch (error2) {
2721
+ handleError(error2);
1167
2722
  }
1168
2723
  });
1169
2724
  program.command("diff").description("Compare two schema versions and generate migration SQL").action(async () => {
1170
2725
  try {
1171
2726
  await runDiff();
1172
- } catch (error) {
1173
- handleError(error);
2727
+ } catch (error2) {
2728
+ handleError(error2);
2729
+ }
2730
+ });
2731
+ program.command("import").description("Import schema from SQL migrations").argument("<path>", "Path to .sql file or migrations directory").option("--out <path>", "Output schema file path").action(async (targetPath, options) => {
2732
+ try {
2733
+ await runImport(targetPath, options);
2734
+ } catch (error2) {
2735
+ handleError(error2);
2736
+ }
2737
+ });
2738
+ program.command("validate").description("Detect destructive or risky schema changes").option("--json", "Output structured JSON").action(async (options) => {
2739
+ try {
2740
+ await runValidate(options);
2741
+ } catch (error2) {
2742
+ handleError(error2);
1174
2743
  }
1175
2744
  });
1176
2745
  program.parse(process.argv);