drizzle-kit 0.25.0-a5a41e0 → 0.25.0-a9aca5c

Sign up to get free protection for your applications and to get access to all the features.
package/bin.cjs CHANGED
@@ -922,8 +922,8 @@ var require_hanji = __commonJS({
922
922
  };
923
923
  exports2.deferred = deferred;
924
924
  var Terminal = class {
925
- constructor(view2, stdin, stdout, closable) {
926
- this.view = view2;
925
+ constructor(view, stdin, stdout, closable) {
926
+ this.view = view;
927
927
  this.stdin = stdin;
928
928
  this.stdout = stdout;
929
929
  this.closable = closable;
@@ -961,7 +961,7 @@ var require_hanji = __commonJS({
961
961
  this.resolve({ status: "submitted", data: this.view.result() });
962
962
  return;
963
963
  }
964
- view2.input(str, key);
964
+ view.input(str, key);
965
965
  };
966
966
  this.stdin.on("keypress", keypress);
967
967
  this.view.attach(this);
@@ -1023,8 +1023,8 @@ var require_hanji = __commonJS({
1023
1023
  };
1024
1024
  exports2.TaskView = TaskView2;
1025
1025
  var TaskTerminal = class {
1026
- constructor(view2, stdout) {
1027
- this.view = view2;
1026
+ constructor(view, stdout) {
1027
+ this.view = view;
1028
1028
  this.stdout = stdout;
1029
1029
  this.text = "";
1030
1030
  this.view.attach(this);
@@ -1043,22 +1043,22 @@ var require_hanji = __commonJS({
1043
1043
  }
1044
1044
  };
1045
1045
  exports2.TaskTerminal = TaskTerminal;
1046
- function render9(view2) {
1046
+ function render9(view) {
1047
1047
  const { stdin, stdout, closable } = (0, readline_1.prepareReadLine)();
1048
- if (view2 instanceof Prompt3) {
1049
- const terminal = new Terminal(view2, stdin, stdout, closable);
1048
+ if (view instanceof Prompt3) {
1049
+ const terminal = new Terminal(view, stdin, stdout, closable);
1050
1050
  terminal.requestLayout();
1051
1051
  return terminal.result();
1052
1052
  }
1053
- stdout.write(`${view2}
1053
+ stdout.write(`${view}
1054
1054
  `);
1055
1055
  closable.close();
1056
1056
  return;
1057
1057
  }
1058
1058
  exports2.render = render9;
1059
- function renderWithTask6(view2, task) {
1059
+ function renderWithTask6(view, task) {
1060
1060
  return __awaiter2(this, void 0, void 0, function* () {
1061
- const terminal = new TaskTerminal(view2, process.stdout);
1061
+ const terminal = new TaskTerminal(view, process.stdout);
1062
1062
  terminal.requestLayout();
1063
1063
  const result = yield task;
1064
1064
  terminal.clear();
@@ -5706,7 +5706,7 @@ var init_vector = __esm({
5706
5706
  });
5707
5707
 
5708
5708
  // src/serializer/pgSchema.ts
5709
- var indexV2, columnV2, tableV2, enumSchemaV1, enumSchema, pgSchemaV2, references, columnV1, tableV1, pgSchemaV1, indexColumn, index2, indexV4, indexV5, indexV6, fk2, sequenceSchema, sequenceSquashed, columnV7, column2, columnSquashed, tableV32, compositePK2, uniqueConstraint2, viewWithOption, matViewWithOption, mergedViewWithOption, view, tableV42, tableV5, tableV6, tableV7, table2, schemaHash2, kitInternals2, pgSchemaInternalV3, pgSchemaInternalV4, pgSchemaInternalV5, pgSchemaInternalV6, pgSchemaExternal, pgSchemaInternalV7, pgSchemaInternal, tableSquashed2, tableSquashedV42, pgSchemaSquashedV4, pgSchemaSquashedV6, pgSchemaSquashed, pgSchemaV3, pgSchemaV4, pgSchemaV5, pgSchemaV6, pgSchemaV7, pgSchema, backwardCompatiblePgSchema, PgSquasher, squashPgScheme, dryPg;
5709
+ var indexV2, columnV2, tableV2, enumSchemaV1, enumSchema, pgSchemaV2, references, columnV1, tableV1, pgSchemaV1, indexColumn, index2, indexV4, indexV5, indexV6, fk2, sequenceSchema, sequenceSquashed, columnV7, column2, columnSquashed, tableV32, compositePK2, uniqueConstraint2, tableV42, tableV5, tableV6, tableV7, table2, schemaHash2, kitInternals2, pgSchemaInternalV3, pgSchemaInternalV4, pgSchemaInternalV5, pgSchemaInternalV6, pgSchemaExternal, pgSchemaInternalV7, pgSchemaInternal, tableSquashed2, tableSquashedV42, pgSchemaSquashedV4, pgSchemaSquashedV6, pgSchemaSquashed, pgSchemaV3, pgSchemaV4, pgSchemaV5, pgSchemaV6, pgSchemaV7, pgSchema, backwardCompatiblePgSchema, PgSquasher, squashPgScheme, dryPg;
5710
5710
  var init_pgSchema = __esm({
5711
5711
  "src/serializer/pgSchema.ts"() {
5712
5712
  "use strict";
@@ -5901,44 +5901,6 @@ var init_pgSchema = __esm({
5901
5901
  columns: stringType().array(),
5902
5902
  nullsNotDistinct: booleanType()
5903
5903
  }).strict();
5904
- viewWithOption = objectType({
5905
- checkOption: enumType(["local", "cascaded"]).optional(),
5906
- securityBarrier: booleanType().optional(),
5907
- securityInvoker: booleanType().optional()
5908
- }).strict();
5909
- matViewWithOption = objectType({
5910
- fillfactor: numberType().optional(),
5911
- toastTupleTarget: numberType().optional(),
5912
- parallelWorkers: numberType().optional(),
5913
- autovacuumEnabled: booleanType().optional(),
5914
- vacuumIndexCleanup: enumType(["auto", "off", "on"]).optional(),
5915
- vacuumTruncate: booleanType().optional(),
5916
- autovacuumVacuumThreshold: numberType().optional(),
5917
- autovacuumVacuumScaleFactor: numberType().optional(),
5918
- autovacuumVacuumCostDelay: numberType().optional(),
5919
- autovacuumVacuumCostLimit: numberType().optional(),
5920
- autovacuumFreezeMinAge: numberType().optional(),
5921
- autovacuumFreezeMaxAge: numberType().optional(),
5922
- autovacuumFreezeTableAge: numberType().optional(),
5923
- autovacuumMultixactFreezeMinAge: numberType().optional(),
5924
- autovacuumMultixactFreezeMaxAge: numberType().optional(),
5925
- autovacuumMultixactFreezeTableAge: numberType().optional(),
5926
- logAutovacuumMinDuration: numberType().optional(),
5927
- userCatalogTable: booleanType().optional()
5928
- }).strict();
5929
- mergedViewWithOption = viewWithOption.merge(matViewWithOption);
5930
- view = objectType({
5931
- name: stringType(),
5932
- schema: stringType(),
5933
- columns: recordType(stringType(), column2),
5934
- definition: stringType().optional(),
5935
- materialized: booleanType(),
5936
- with: mergedViewWithOption.optional(),
5937
- isExisting: booleanType(),
5938
- withNoData: booleanType().optional(),
5939
- using: stringType().optional(),
5940
- tablespace: stringType().optional()
5941
- }).strict();
5942
5904
  tableV42 = objectType({
5943
5905
  name: stringType(),
5944
5906
  schema: stringType(),
@@ -6073,7 +6035,6 @@ var init_pgSchema = __esm({
6073
6035
  tables: recordType(stringType(), table2),
6074
6036
  enums: recordType(stringType(), enumSchema),
6075
6037
  schemas: recordType(stringType(), stringType()),
6076
- views: recordType(stringType(), view).default({}),
6077
6038
  sequences: recordType(stringType(), sequenceSchema).default({}),
6078
6039
  _meta: objectType({
6079
6040
  schemas: recordType(stringType(), stringType()),
@@ -6118,7 +6079,6 @@ var init_pgSchema = __esm({
6118
6079
  tables: recordType(stringType(), tableSquashed2),
6119
6080
  enums: recordType(stringType(), enumSchema),
6120
6081
  schemas: recordType(stringType(), stringType()),
6121
- views: recordType(stringType(), view),
6122
6082
  sequences: recordType(stringType(), sequenceSquashed)
6123
6083
  }).strict();
6124
6084
  pgSchemaV3 = pgSchemaInternalV3.merge(schemaHash2);
@@ -6336,7 +6296,6 @@ var init_pgSchema = __esm({
6336
6296
  tables: mappedTables,
6337
6297
  enums: json.enums,
6338
6298
  schemas: json.schemas,
6339
- views: json.views,
6340
6299
  sequences: mappedSequences
6341
6300
  };
6342
6301
  };
@@ -6632,11 +6591,17 @@ function findAddedAndRemoved(columnNames1, columnNames2) {
6632
6591
  const removedColumns = columnNames1.filter((it) => !set2.has(it));
6633
6592
  return { addedColumns, removedColumns };
6634
6593
  }
6635
- var import_fs, import_path, import_url, copy, objectValues, assertV1OutFolder, dryJournal, prepareOutFolder, validatorForDialect, validateWithReport, prepareMigrationFolder, prepareMigrationMeta, schemaRenameKey, tableRenameKey, columnRenameKey, normaliseSQLiteUrl, normalisePGliteUrl;
6594
+ function getColumnCasing(column7, casing2) {
6595
+ if (!column7.name)
6596
+ return "";
6597
+ return !column7.keyAsName || casing2 === void 0 ? column7.name : casing2 === "camelCase" ? (0, import_casing.toCamelCase)(column7.name) : (0, import_casing.toSnakeCase)(column7.name);
6598
+ }
6599
+ var import_casing, import_fs, import_path, import_url, copy, objectValues, assertV1OutFolder, dryJournal, prepareOutFolder, validatorForDialect, validateWithReport, prepareMigrationFolder, prepareMigrationMeta, schemaRenameKey, tableRenameKey, columnRenameKey, normaliseSQLiteUrl, normalisePGliteUrl;
6636
6600
  var init_utils = __esm({
6637
6601
  "src/utils.ts"() {
6638
6602
  "use strict";
6639
6603
  init_source();
6604
+ import_casing = require("drizzle-orm/casing");
6640
6605
  import_fs = require("fs");
6641
6606
  import_path = require("path");
6642
6607
  import_url = require("url");
@@ -6666,27 +6631,27 @@ var init_utils = __esm({
6666
6631
  process.exit(1);
6667
6632
  }
6668
6633
  };
6669
- dryJournal = (dialect7) => {
6634
+ dryJournal = (dialect4) => {
6670
6635
  return {
6671
6636
  version: snapshotVersion,
6672
- dialect: dialect7,
6637
+ dialect: dialect4,
6673
6638
  entries: []
6674
6639
  };
6675
6640
  };
6676
- prepareOutFolder = (out, dialect7) => {
6641
+ prepareOutFolder = (out, dialect4) => {
6677
6642
  const meta = (0, import_path.join)(out, "meta");
6678
6643
  const journalPath = (0, import_path.join)(meta, "_journal.json");
6679
6644
  if (!(0, import_fs.existsSync)((0, import_path.join)(out, "meta"))) {
6680
6645
  (0, import_fs.mkdirSync)(meta, { recursive: true });
6681
- (0, import_fs.writeFileSync)(journalPath, JSON.stringify(dryJournal(dialect7)));
6646
+ (0, import_fs.writeFileSync)(journalPath, JSON.stringify(dryJournal(dialect4)));
6682
6647
  }
6683
6648
  const journal = JSON.parse((0, import_fs.readFileSync)(journalPath).toString());
6684
6649
  const snapshots = (0, import_fs.readdirSync)(meta).filter((it) => !it.startsWith("_")).map((it) => (0, import_path.join)(meta, it));
6685
6650
  snapshots.sort();
6686
6651
  return { meta, snapshots, journal };
6687
6652
  };
6688
- validatorForDialect = (dialect7) => {
6689
- switch (dialect7) {
6653
+ validatorForDialect = (dialect4) => {
6654
+ switch (dialect4) {
6690
6655
  case "postgresql":
6691
6656
  return { validator: backwardCompatiblePgSchema, version: 7 };
6692
6657
  case "sqlite":
@@ -6697,8 +6662,8 @@ var init_utils = __esm({
6697
6662
  return { validator: backwardCompatibleMysqlSchema, version: 5 };
6698
6663
  }
6699
6664
  };
6700
- validateWithReport = (snapshots, dialect7) => {
6701
- const { validator: validator2, version: version3 } = validatorForDialect(dialect7);
6665
+ validateWithReport = (snapshots, dialect4) => {
6666
+ const { validator: validator2, version: version3 } = validatorForDialect(dialect4);
6702
6667
  const result = snapshots.reduce(
6703
6668
  (accum, it) => {
6704
6669
  const raw2 = JSON.parse((0, import_fs.readFileSync)(`./${it}`).toString());
@@ -6739,9 +6704,9 @@ var init_utils = __esm({
6739
6704
  );
6740
6705
  return result;
6741
6706
  };
6742
- prepareMigrationFolder = (outFolder = "drizzle", dialect7) => {
6743
- const { snapshots, journal } = prepareOutFolder(outFolder, dialect7);
6744
- const report = validateWithReport(snapshots, dialect7);
6707
+ prepareMigrationFolder = (outFolder = "drizzle", dialect4) => {
6708
+ const { snapshots, journal } = prepareOutFolder(outFolder, dialect4);
6709
+ const report = validateWithReport(snapshots, dialect4);
6745
6710
  if (report.nonLatest.length > 0) {
6746
6711
  console.log(
6747
6712
  report.nonLatest.map((it) => {
@@ -8852,9 +8817,6 @@ var init_utils3 = __esm({
8852
8817
  String.prototype.concatIf = function(it, condition) {
8853
8818
  return condition ? `${this}${it}` : String(this);
8854
8819
  };
8855
- String.prototype.snake_case = function() {
8856
- return this && this.length > 0 ? `${this.replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`)}` : String(this);
8857
- };
8858
8820
  Array.prototype.random = function() {
8859
8821
  return this[~~(Math.random() * this.length)];
8860
8822
  };
@@ -11294,7 +11256,7 @@ var init_outputs = __esm({
11294
11256
  });
11295
11257
 
11296
11258
  // src/cli/validations/common.ts
11297
- var assertCollisions, sqliteDriversLiterals, postgresqlDriversLiterals, prefixes, prefix, sqliteDriver, postgresDriver, driver, configMigrations, configCommonSchema, casing, introspectParams, configIntrospectCliSchema, configGenerateSchema, configPushSchema, drivers, wrapParam;
11259
+ var assertCollisions, sqliteDriversLiterals, postgresqlDriversLiterals, prefixes, prefix, casingTypes, casingType, sqliteDriver, postgresDriver, driver, configMigrations, configCommonSchema, casing, introspectParams, configIntrospectCliSchema, configGenerateSchema, configPushSchema, drivers, wrapParam;
11298
11260
  var init_common = __esm({
11299
11261
  "src/cli/validations/common.ts"() {
11300
11262
  "use strict";
@@ -11338,6 +11300,8 @@ var init_common = __esm({
11338
11300
  {
11339
11301
  const _2 = "";
11340
11302
  }
11303
+ casingTypes = ["snake_case", "camelCase"];
11304
+ casingType = enumType(casingTypes);
11341
11305
  sqliteDriver = unionType(sqliteDriversLiterals);
11342
11306
  postgresDriver = unionType(postgresqlDriversLiterals);
11343
11307
  driver = unionType([sqliteDriver, postgresDriver]);
@@ -11356,7 +11320,8 @@ var init_common = __esm({
11356
11320
  tablesFilter: unionType([stringType(), stringType().array()]).optional(),
11357
11321
  schemaFilter: unionType([stringType(), stringType().array()]).default(["public"]),
11358
11322
  migrations: configMigrations,
11359
- dbCredentials: anyType().optional()
11323
+ dbCredentials: anyType().optional(),
11324
+ casing: casingType.optional()
11360
11325
  }).passthrough();
11361
11326
  casing = unionType([literalType("camel"), literalType("preserve")]).default(
11362
11327
  "camel"
@@ -11438,6 +11403,7 @@ var init_cli = __esm({
11438
11403
  }).strict();
11439
11404
  pushParams = objectType({
11440
11405
  dialect: dialect3,
11406
+ casing: casingType.optional(),
11441
11407
  schema: unionType([stringType(), stringType().array()]),
11442
11408
  tablesFilter: unionType([stringType(), stringType().array()]).optional(),
11443
11409
  schemaFilter: unionType([stringType(), stringType().array()]).optional().default(["public"]),
@@ -12551,11 +12517,11 @@ var require_node2 = __commonJS({
12551
12517
  };
12552
12518
  var require_base64 = __commonJS3((exports3) => {
12553
12519
  var intToCharMap = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");
12554
- exports3.encode = function(number2) {
12555
- if (0 <= number2 && number2 < intToCharMap.length) {
12556
- return intToCharMap[number2];
12520
+ exports3.encode = function(number3) {
12521
+ if (0 <= number3 && number3 < intToCharMap.length) {
12522
+ return intToCharMap[number3];
12557
12523
  }
12558
- throw new TypeError("Must be between 0 and 63: " + number2);
12524
+ throw new TypeError("Must be between 0 and 63: " + number3);
12559
12525
  };
12560
12526
  exports3.decode = function(charCode) {
12561
12527
  var bigA = 65;
@@ -17454,11 +17420,11 @@ var init_utils4 = __esm({
17454
17420
  prepareGenerateConfig = async (options, from) => {
17455
17421
  var _a;
17456
17422
  const config = from === "config" ? await drizzleConfigFromFile(options.config) : options;
17457
- const { schema: schema5, out, breakpoints, dialect: dialect7, driver: driver2 } = config;
17458
- if (!schema5 || !dialect7) {
17423
+ const { schema: schema5, out, breakpoints, dialect: dialect4, driver: driver2, casing: casing2 } = config;
17424
+ if (!schema5 || !dialect4) {
17459
17425
  console.log(error("Please provide required params:"));
17460
17426
  console.log(wrapParam("schema", schema5));
17461
- console.log(wrapParam("dialect", dialect7));
17427
+ console.log(wrapParam("dialect", dialect4));
17462
17428
  console.log(wrapParam("out", out, true));
17463
17429
  process.exit(1);
17464
17430
  }
@@ -17469,14 +17435,15 @@ var init_utils4 = __esm({
17469
17435
  }
17470
17436
  const prefix2 = ("migrations" in config ? (_a = config.migrations) == null ? void 0 : _a.prefix : options.prefix) || "index";
17471
17437
  return {
17472
- dialect: dialect7,
17438
+ dialect: dialect4,
17473
17439
  name: options.name,
17474
17440
  custom: options.custom || false,
17475
17441
  prefix: prefix2,
17476
17442
  breakpoints: breakpoints || true,
17477
17443
  schema: schema5,
17478
17444
  out: out || "drizzle",
17479
- bundle: driver2 === "expo"
17445
+ bundle: driver2 === "expo",
17446
+ casing: casing2
17480
17447
  };
17481
17448
  };
17482
17449
  flattenDatabaseCredentials = (config) => {
@@ -17543,6 +17510,7 @@ var init_utils4 = __esm({
17543
17510
  verbose: config.verbose ?? false,
17544
17511
  force: options.force ?? false,
17545
17512
  credentials: parsed2.data,
17513
+ casing: config.casing,
17546
17514
  tablesFilter,
17547
17515
  schemasFilter
17548
17516
  };
@@ -17560,6 +17528,7 @@ var init_utils4 = __esm({
17560
17528
  verbose: config.verbose ?? false,
17561
17529
  force: options.force ?? false,
17562
17530
  credentials: parsed2.data,
17531
+ casing: config.casing,
17563
17532
  tablesFilter,
17564
17533
  schemasFilter
17565
17534
  };
@@ -17577,6 +17546,7 @@ var init_utils4 = __esm({
17577
17546
  verbose: config.verbose ?? false,
17578
17547
  force: options.force ?? false,
17579
17548
  credentials: parsed2.data,
17549
+ casing: config.casing,
17580
17550
  tablesFilter,
17581
17551
  schemasFilter
17582
17552
  };
@@ -17594,6 +17564,7 @@ var init_utils4 = __esm({
17594
17564
  verbose: config.verbose ?? false,
17595
17565
  force: options.force ?? false,
17596
17566
  credentials: parsed2.data,
17567
+ casing: config.casing,
17597
17568
  tablesFilter,
17598
17569
  schemasFilter
17599
17570
  };
@@ -17612,11 +17583,11 @@ var init_utils4 = __esm({
17612
17583
  process.exit(1);
17613
17584
  }
17614
17585
  const config = parsed.data;
17615
- const dialect7 = config.dialect;
17586
+ const dialect4 = config.dialect;
17616
17587
  const tablesFilterConfig = config.tablesFilter;
17617
17588
  const tablesFilter = tablesFilterConfig ? typeof tablesFilterConfig === "string" ? [tablesFilterConfig] : tablesFilterConfig : [];
17618
17589
  if (config.extensionsFilters) {
17619
- if (config.extensionsFilters.includes("postgis") && dialect7 === "postgresql") {
17590
+ if (config.extensionsFilters.includes("postgis") && dialect4 === "postgresql") {
17620
17591
  tablesFilter.push(
17621
17592
  ...["!geography_columns", "!geometry_columns", "!spatial_ref_sys"]
17622
17593
  );
@@ -17624,7 +17595,7 @@ var init_utils4 = __esm({
17624
17595
  }
17625
17596
  const schemasFilterConfig = config.schemaFilter;
17626
17597
  const schemasFilter = schemasFilterConfig ? typeof schemasFilterConfig === "string" ? [schemasFilterConfig] : schemasFilterConfig : [];
17627
- if (dialect7 === "postgresql") {
17598
+ if (dialect4 === "postgresql") {
17628
17599
  const parsed2 = postgresCredentials.safeParse(config);
17629
17600
  if (!parsed2.success) {
17630
17601
  printConfigConnectionIssues3(config);
@@ -17641,7 +17612,7 @@ var init_utils4 = __esm({
17641
17612
  prefix: ((_a = config.migrations) == null ? void 0 : _a.prefix) || "index"
17642
17613
  };
17643
17614
  }
17644
- if (dialect7 === "mysql") {
17615
+ if (dialect4 === "mysql") {
17645
17616
  const parsed2 = mysqlCredentials.safeParse(config);
17646
17617
  if (!parsed2.success) {
17647
17618
  printConfigConnectionIssues2(config);
@@ -17658,7 +17629,7 @@ var init_utils4 = __esm({
17658
17629
  prefix: ((_b = config.migrations) == null ? void 0 : _b.prefix) || "index"
17659
17630
  };
17660
17631
  }
17661
- if (dialect7 === "sqlite") {
17632
+ if (dialect4 === "sqlite") {
17662
17633
  const parsed2 = sqliteCredentials.safeParse(config);
17663
17634
  if (!parsed2.success) {
17664
17635
  printConfigConnectionIssues4(config, "pull");
@@ -17675,14 +17646,14 @@ var init_utils4 = __esm({
17675
17646
  prefix: ((_c = config.migrations) == null ? void 0 : _c.prefix) || "index"
17676
17647
  };
17677
17648
  }
17678
- if (dialect7 === "turso") {
17649
+ if (dialect4 === "turso") {
17679
17650
  const parsed2 = libSQLCredentials.safeParse(config);
17680
17651
  if (!parsed2.success) {
17681
17652
  printConfigConnectionIssues(config, "pull");
17682
17653
  process.exit(1);
17683
17654
  }
17684
17655
  return {
17685
- dialect: dialect7,
17656
+ dialect: dialect4,
17686
17657
  out: config.out,
17687
17658
  breakpoints: config.breakpoints,
17688
17659
  casing: config.casing,
@@ -17692,7 +17663,7 @@ var init_utils4 = __esm({
17692
17663
  prefix: ((_d = config.migrations) == null ? void 0 : _d.prefix) || "index"
17693
17664
  };
17694
17665
  }
17695
- assertUnreachable(dialect7);
17666
+ assertUnreachable(dialect4);
17696
17667
  };
17697
17668
  prepareStudioConfig = async (options) => {
17698
17669
  const params = studioCliParams.parse(options);
@@ -17709,9 +17680,9 @@ var init_utils4 = __esm({
17709
17680
  process.exit(1);
17710
17681
  }
17711
17682
  const { host, port } = params;
17712
- const { dialect: dialect7, schema: schema5 } = result.data;
17683
+ const { dialect: dialect4, schema: schema5 } = result.data;
17713
17684
  const flattened = flattenDatabaseCredentials(config);
17714
- if (dialect7 === "postgresql") {
17685
+ if (dialect4 === "postgresql") {
17715
17686
  const parsed = postgresCredentials.safeParse(flattened);
17716
17687
  if (!parsed.success) {
17717
17688
  printConfigConnectionIssues3(flattened);
@@ -17719,14 +17690,14 @@ var init_utils4 = __esm({
17719
17690
  }
17720
17691
  const credentials2 = parsed.data;
17721
17692
  return {
17722
- dialect: dialect7,
17693
+ dialect: dialect4,
17723
17694
  schema: schema5,
17724
17695
  host,
17725
17696
  port,
17726
17697
  credentials: credentials2
17727
17698
  };
17728
17699
  }
17729
- if (dialect7 === "mysql") {
17700
+ if (dialect4 === "mysql") {
17730
17701
  const parsed = mysqlCredentials.safeParse(flattened);
17731
17702
  if (!parsed.success) {
17732
17703
  printConfigConnectionIssues2(flattened);
@@ -17734,14 +17705,14 @@ var init_utils4 = __esm({
17734
17705
  }
17735
17706
  const credentials2 = parsed.data;
17736
17707
  return {
17737
- dialect: dialect7,
17708
+ dialect: dialect4,
17738
17709
  schema: schema5,
17739
17710
  host,
17740
17711
  port,
17741
17712
  credentials: credentials2
17742
17713
  };
17743
17714
  }
17744
- if (dialect7 === "sqlite") {
17715
+ if (dialect4 === "sqlite") {
17745
17716
  const parsed = sqliteCredentials.safeParse(flattened);
17746
17717
  if (!parsed.success) {
17747
17718
  printConfigConnectionIssues4(flattened, "studio");
@@ -17749,14 +17720,14 @@ var init_utils4 = __esm({
17749
17720
  }
17750
17721
  const credentials2 = parsed.data;
17751
17722
  return {
17752
- dialect: dialect7,
17723
+ dialect: dialect4,
17753
17724
  schema: schema5,
17754
17725
  host,
17755
17726
  port,
17756
17727
  credentials: credentials2
17757
17728
  };
17758
17729
  }
17759
- if (dialect7 === "turso") {
17730
+ if (dialect4 === "turso") {
17760
17731
  const parsed = libSQLCredentials.safeParse(flattened);
17761
17732
  if (!parsed.success) {
17762
17733
  printConfigConnectionIssues(flattened, "studio");
@@ -17764,14 +17735,14 @@ var init_utils4 = __esm({
17764
17735
  }
17765
17736
  const credentials2 = parsed.data;
17766
17737
  return {
17767
- dialect: dialect7,
17738
+ dialect: dialect4,
17768
17739
  schema: schema5,
17769
17740
  host,
17770
17741
  port,
17771
17742
  credentials: credentials2
17772
17743
  };
17773
17744
  }
17774
- assertUnreachable(dialect7);
17745
+ assertUnreachable(dialect4);
17775
17746
  };
17776
17747
  migrateConfig = objectType({
17777
17748
  dialect: dialect3,
@@ -17786,10 +17757,10 @@ var init_utils4 = __esm({
17786
17757
  console.log(wrapParam("dialect", config.dialect));
17787
17758
  process.exit(1);
17788
17759
  }
17789
- const { dialect: dialect7, out } = parsed.data;
17760
+ const { dialect: dialect4, out } = parsed.data;
17790
17761
  const { schema: schema5, table: table4 } = parsed.data.migrations || {};
17791
17762
  const flattened = flattenDatabaseCredentials(config);
17792
- if (dialect7 === "postgresql") {
17763
+ if (dialect4 === "postgresql") {
17793
17764
  const parsed2 = postgresCredentials.safeParse(flattened);
17794
17765
  if (!parsed2.success) {
17795
17766
  printConfigConnectionIssues3(flattened);
@@ -17797,14 +17768,14 @@ var init_utils4 = __esm({
17797
17768
  }
17798
17769
  const credentials2 = parsed2.data;
17799
17770
  return {
17800
- dialect: dialect7,
17771
+ dialect: dialect4,
17801
17772
  out,
17802
17773
  credentials: credentials2,
17803
17774
  schema: schema5,
17804
17775
  table: table4
17805
17776
  };
17806
17777
  }
17807
- if (dialect7 === "mysql") {
17778
+ if (dialect4 === "mysql") {
17808
17779
  const parsed2 = mysqlCredentials.safeParse(flattened);
17809
17780
  if (!parsed2.success) {
17810
17781
  printConfigConnectionIssues2(flattened);
@@ -17812,14 +17783,14 @@ var init_utils4 = __esm({
17812
17783
  }
17813
17784
  const credentials2 = parsed2.data;
17814
17785
  return {
17815
- dialect: dialect7,
17786
+ dialect: dialect4,
17816
17787
  out,
17817
17788
  credentials: credentials2,
17818
17789
  schema: schema5,
17819
17790
  table: table4
17820
17791
  };
17821
17792
  }
17822
- if (dialect7 === "sqlite") {
17793
+ if (dialect4 === "sqlite") {
17823
17794
  const parsed2 = sqliteCredentials.safeParse(flattened);
17824
17795
  if (!parsed2.success) {
17825
17796
  printConfigConnectionIssues4(flattened, "migrate");
@@ -17827,14 +17798,14 @@ var init_utils4 = __esm({
17827
17798
  }
17828
17799
  const credentials2 = parsed2.data;
17829
17800
  return {
17830
- dialect: dialect7,
17801
+ dialect: dialect4,
17831
17802
  out,
17832
17803
  credentials: credentials2,
17833
17804
  schema: schema5,
17834
17805
  table: table4
17835
17806
  };
17836
17807
  }
17837
- if (dialect7 === "turso") {
17808
+ if (dialect4 === "turso") {
17838
17809
  const parsed2 = libSQLCredentials.safeParse(flattened);
17839
17810
  if (!parsed2.success) {
17840
17811
  printConfigConnectionIssues(flattened, "migrate");
@@ -17842,14 +17813,14 @@ var init_utils4 = __esm({
17842
17813
  }
17843
17814
  const credentials2 = parsed2.data;
17844
17815
  return {
17845
- dialect: dialect7,
17816
+ dialect: dialect4,
17846
17817
  out,
17847
17818
  credentials: credentials2,
17848
17819
  schema: schema5,
17849
17820
  table: table4
17850
17821
  };
17851
17822
  }
17852
- assertUnreachable(dialect7);
17823
+ assertUnreachable(dialect4);
17853
17824
  };
17854
17825
  drizzleConfigFromFile = async (configPath) => {
17855
17826
  const prefix2 = process.env.TEST_CONFIG_PATH_PREFIX || "";
@@ -17878,6 +17849,7 @@ var init_utils4 = __esm({
17878
17849
  unregister();
17879
17850
  const res = configCommonSchema.safeParse(content);
17880
17851
  if (!res.success) {
17852
+ console.log(res.error);
17881
17853
  if (!("dialect" in content)) {
17882
17854
  console.log(error("Please specify 'dialect' param in config file"));
17883
17855
  }
@@ -17950,7 +17922,7 @@ function clearDefaults(defaultValue, collate) {
17950
17922
  return `(${resultDefault})`;
17951
17923
  }
17952
17924
  }
17953
- var import_drizzle_orm2, import_drizzle_orm3, import_mysql_core2, import_mysql_core3, dialect4, indexName, generateMySqlSnapshot, fromDatabase;
17925
+ var import_drizzle_orm2, import_drizzle_orm3, import_mysql_core2, import_mysql_core3, indexName, generateMySqlSnapshot, fromDatabase;
17954
17926
  var init_mysqlSerializer = __esm({
17955
17927
  "src/serializer/mysqlSerializer.ts"() {
17956
17928
  "use strict";
@@ -17960,12 +17932,13 @@ var init_mysqlSerializer = __esm({
17960
17932
  import_mysql_core2 = require("drizzle-orm/mysql-core");
17961
17933
  import_mysql_core3 = require("drizzle-orm/mysql-core");
17962
17934
  init_outputs();
17935
+ init_utils();
17963
17936
  init_serializer();
17964
- dialect4 = new import_mysql_core2.MySqlDialect();
17965
17937
  indexName = (tableName, columns) => {
17966
17938
  return `${tableName}_${columns.join("_")}_index`;
17967
17939
  };
17968
- generateMySqlSnapshot = (tables) => {
17940
+ generateMySqlSnapshot = (tables, casing2) => {
17941
+ const dialect4 = new import_mysql_core2.MySqlDialect({ casing: casing2 });
17969
17942
  const result = {};
17970
17943
  const internal = { tables: {}, indexes: {} };
17971
17944
  for (const table4 of tables) {
@@ -17984,12 +17957,13 @@ var init_mysqlSerializer = __esm({
17984
17957
  const primaryKeysObject = {};
17985
17958
  const uniqueConstraintObject = {};
17986
17959
  columns.forEach((column7) => {
17960
+ const name = getColumnCasing(column7, casing2);
17987
17961
  const notNull = column7.notNull;
17988
17962
  const sqlTypeLowered = column7.getSQLType().toLowerCase();
17989
17963
  const autoIncrement = typeof column7.autoIncrement === "undefined" ? false : column7.autoIncrement;
17990
17964
  const generated = column7.generated;
17991
17965
  const columnToSet = {
17992
- name: column7.name,
17966
+ name,
17993
17967
  type: column7.getSQLType(),
17994
17968
  primaryKey: false,
17995
17969
  // If field is autoincrement it's notNull by default
@@ -18003,9 +17977,9 @@ var init_mysqlSerializer = __esm({
18003
17977
  } : void 0
18004
17978
  };
18005
17979
  if (column7.primary) {
18006
- primaryKeysObject[`${tableName}_${column7.name}`] = {
18007
- name: `${tableName}_${column7.name}`,
18008
- columns: [column7.name]
17980
+ primaryKeysObject[`${tableName}_${name}`] = {
17981
+ name: `${tableName}_${name}`,
17982
+ columns: [name]
18009
17983
  };
18010
17984
  }
18011
17985
  if (column7.isUnique) {
@@ -18019,7 +17993,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18019
17993
  The unique constraint ${source_default.underline.blue(
18020
17994
  column7.uniqueName
18021
17995
  )} on the ${source_default.underline.blue(
18022
- column7.name
17996
+ name
18023
17997
  )} column is confilcting with a unique constraint name already defined for ${source_default.underline.blue(
18024
17998
  existingUnique.columns.join(",")
18025
17999
  )} columns
@@ -18034,7 +18008,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18034
18008
  }
18035
18009
  if (column7.default !== void 0) {
18036
18010
  if ((0, import_drizzle_orm2.is)(column7.default, import_drizzle_orm3.SQL)) {
18037
- columnToSet.default = sqlToStr(column7.default);
18011
+ columnToSet.default = sqlToStr(column7.default, casing2);
18038
18012
  } else {
18039
18013
  if (typeof column7.default === "string") {
18040
18014
  columnToSet.default = `'${column7.default}'`;
@@ -18056,20 +18030,27 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18056
18030
  }
18057
18031
  }
18058
18032
  }
18059
- columnsObject[column7.name] = columnToSet;
18033
+ columnsObject[name] = columnToSet;
18060
18034
  });
18061
18035
  primaryKeys.map((pk) => {
18062
- const columnNames = pk.columns.map((c) => c.name);
18063
- primaryKeysObject[pk.getName()] = {
18064
- name: pk.getName(),
18036
+ const originalColumnNames = pk.columns.map((c) => c.name);
18037
+ const columnNames = pk.columns.map((c) => getColumnCasing(c, casing2));
18038
+ let name = pk.getName();
18039
+ if (casing2 !== void 0) {
18040
+ for (let i2 = 0; i2 < originalColumnNames.length; i2++) {
18041
+ name = name.replace(originalColumnNames[i2], columnNames[i2]);
18042
+ }
18043
+ }
18044
+ primaryKeysObject[name] = {
18045
+ name,
18065
18046
  columns: columnNames
18066
18047
  };
18067
18048
  for (const column7 of pk.columns) {
18068
- columnsObject[column7.name].notNull = true;
18049
+ columnsObject[getColumnCasing(column7, casing2)].notNull = true;
18069
18050
  }
18070
18051
  });
18071
18052
  uniqueConstraints == null ? void 0 : uniqueConstraints.map((unq) => {
18072
- const columnNames = unq.columns.map((c) => c.name);
18053
+ const columnNames = unq.columns.map((c) => getColumnCasing(c, casing2));
18073
18054
  const name = unq.name ?? (0, import_mysql_core2.uniqueKeyName)(table4, columnNames);
18074
18055
  const existingUnique = uniqueConstraintObject[name];
18075
18056
  if (typeof existingUnique !== "undefined") {
@@ -18097,15 +18078,25 @@ The unique constraint ${source_default.underline.blue(
18097
18078
  };
18098
18079
  });
18099
18080
  const fks = foreignKeys.map((fk4) => {
18100
- const name = fk4.getName();
18101
18081
  const tableFrom = tableName;
18102
18082
  const onDelete = fk4.onDelete ?? "no action";
18103
18083
  const onUpdate = fk4.onUpdate ?? "no action";
18104
18084
  const reference = fk4.reference();
18105
18085
  const referenceFT = reference.foreignTable;
18106
18086
  const tableTo = (0, import_drizzle_orm2.getTableName)(referenceFT);
18107
- const columnsFrom = reference.columns.map((it) => it.name);
18108
- const columnsTo = reference.foreignColumns.map((it) => it.name);
18087
+ const originalColumnsFrom = reference.columns.map((it) => it.name);
18088
+ const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing2));
18089
+ const originalColumnsTo = reference.foreignColumns.map((it) => it.name);
18090
+ const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing2));
18091
+ let name = fk4.getName();
18092
+ if (casing2 !== void 0) {
18093
+ for (let i2 = 0; i2 < originalColumnsFrom.length; i2++) {
18094
+ name = name.replace(originalColumnsFrom[i2], columnsFrom[i2]);
18095
+ }
18096
+ for (let i2 = 0; i2 < originalColumnsTo.length; i2++) {
18097
+ name = name.replace(originalColumnsTo[i2], columnsTo[i2]);
18098
+ }
18099
+ }
18109
18100
  return {
18110
18101
  name,
18111
18102
  tableFrom,
@@ -18145,7 +18136,7 @@ The unique constraint ${source_default.underline.blue(
18145
18136
  }
18146
18137
  return sql;
18147
18138
  } else {
18148
- return `${it.name}`;
18139
+ return `${getColumnCasing(it, casing2)}`;
18149
18140
  }
18150
18141
  });
18151
18142
  if (value.config.unique) {
@@ -18508,8 +18499,6 @@ var init_pgImports = __esm({
18508
18499
  const enums = [];
18509
18500
  const schemas = [];
18510
18501
  const sequences = [];
18511
- const views = [];
18512
- const matViews = [];
18513
18502
  const i0values = Object.values(exports2);
18514
18503
  i0values.forEach((t2) => {
18515
18504
  if ((0, import_pg_core.isPgEnum)(t2)) {
@@ -18522,25 +18511,17 @@ var init_pgImports = __esm({
18522
18511
  if ((0, import_drizzle_orm4.is)(t2, import_pg_core.PgSchema)) {
18523
18512
  schemas.push(t2);
18524
18513
  }
18525
- if ((0, import_pg_core.isPgView)(t2)) {
18526
- views.push(t2);
18527
- }
18528
- if ((0, import_pg_core.isPgMaterializedView)(t2)) {
18529
- matViews.push(t2);
18530
- }
18531
18514
  if ((0, import_pg_core.isPgSequence)(t2)) {
18532
18515
  sequences.push(t2);
18533
18516
  }
18534
18517
  });
18535
- return { tables, enums, schemas, sequences, views, matViews };
18518
+ return { tables, enums, schemas, sequences };
18536
18519
  };
18537
18520
  prepareFromPgImports = async (imports) => {
18538
- const tables = [];
18539
- const enums = [];
18540
- const schemas = [];
18541
- const sequences = [];
18542
- const views = [];
18543
- const matViews = [];
18521
+ let tables = [];
18522
+ let enums = [];
18523
+ let schemas = [];
18524
+ let sequences = [];
18544
18525
  const { unregister } = await safeRegister();
18545
18526
  for (let i2 = 0; i2 < imports.length; i2++) {
18546
18527
  const it = imports[i2];
@@ -18550,11 +18531,9 @@ var init_pgImports = __esm({
18550
18531
  enums.push(...prepared.enums);
18551
18532
  schemas.push(...prepared.schemas);
18552
18533
  sequences.push(...prepared.sequences);
18553
- views.push(...prepared.views);
18554
- matViews.push(...prepared.matViews);
18555
18534
  }
18556
18535
  unregister();
18557
- return { tables: Array.from(new Set(tables)), enums, schemas, sequences, views, matViews };
18536
+ return { tables: Array.from(new Set(tables)), enums, schemas, sequences };
18558
18537
  };
18559
18538
  }
18560
18539
  });
@@ -18602,7 +18581,7 @@ function buildArrayString(array, sqlType) {
18602
18581
  }).join(",");
18603
18582
  return `{${values}}`;
18604
18583
  }
18605
- var import_drizzle_orm5, import_pg_core2, import_pg_core3, dialect5, indexName2, generatePgSnapshot, trimChar, fromDatabase2, defaultForColumn;
18584
+ var import_drizzle_orm5, import_pg_core2, import_pg_core3, indexName2, generatePgSnapshot, trimChar, fromDatabase2, defaultForColumn;
18606
18585
  var init_pgSerializer = __esm({
18607
18586
  "src/serializer/pgSerializer.ts"() {
18608
18587
  "use strict";
@@ -18614,14 +18593,13 @@ var init_pgSerializer = __esm({
18614
18593
  init_outputs();
18615
18594
  init_utils();
18616
18595
  init_serializer();
18617
- dialect5 = new import_pg_core2.PgDialect();
18618
18596
  indexName2 = (tableName, columns) => {
18619
18597
  return `${tableName}_${columns.join("_")}_index`;
18620
18598
  };
18621
- generatePgSnapshot = (tables, enums, schemas, sequences, views, matViews, schemaFilter) => {
18622
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
18599
+ generatePgSnapshot = (tables, enums, schemas, sequences, casing2, schemaFilter) => {
18600
+ var _a, _b, _c, _d, _e, _f;
18601
+ const dialect4 = new import_pg_core2.PgDialect({ casing: casing2 });
18623
18602
  const result = {};
18624
- const resultViews = {};
18625
18603
  const sequencesToReturn = {};
18626
18604
  const indexesInSchema = {};
18627
18605
  for (const table4 of tables) {
@@ -18645,6 +18623,7 @@ var init_pgSerializer = __esm({
18645
18623
  const uniqueConstraintObject = {};
18646
18624
  columns.forEach((column7) => {
18647
18625
  var _a2, _b2, _c2, _d2, _e2, _f2;
18626
+ const name = getColumnCasing(column7, casing2);
18648
18627
  const notNull = column7.notNull;
18649
18628
  const primaryKey = column7.primary;
18650
18629
  const sqlTypeLowered = column7.getSQLType().toLowerCase();
@@ -18657,18 +18636,18 @@ var init_pgSerializer = __esm({
18657
18636
  const startWith = stringFromIdentityProperty((_d2 = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _d2.startWith) ?? (parseFloat(increment) < 0 ? maxValue : minValue);
18658
18637
  const cache = stringFromIdentityProperty((_e2 = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _e2.cache) ?? "1";
18659
18638
  const columnToSet = {
18660
- name: column7.name,
18639
+ name,
18661
18640
  type: column7.getSQLType(),
18662
18641
  typeSchema,
18663
18642
  primaryKey,
18664
18643
  notNull,
18665
18644
  generated: generated ? {
18666
- as: (0, import_drizzle_orm5.is)(generated.as, import_drizzle_orm5.SQL) ? dialect5.sqlToQuery(generated.as).sql : typeof generated.as === "function" ? dialect5.sqlToQuery(generated.as()).sql : generated.as,
18645
+ as: (0, import_drizzle_orm5.is)(generated.as, import_drizzle_orm5.SQL) ? dialect4.sqlToQuery(generated.as).sql : typeof generated.as === "function" ? dialect4.sqlToQuery(generated.as()).sql : generated.as,
18667
18646
  type: "stored"
18668
18647
  } : void 0,
18669
18648
  identity: identity ? {
18670
18649
  type: identity.type,
18671
- name: identity.sequenceName ?? `${tableName}_${column7.name}_seq`,
18650
+ name: identity.sequenceName ?? `${tableName}_${name}_seq`,
18672
18651
  schema: schema5 ?? "public",
18673
18652
  increment,
18674
18653
  startWith,
@@ -18689,7 +18668,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18689
18668
  The unique constraint ${source_default.underline.blue(
18690
18669
  column7.uniqueName
18691
18670
  )} on the ${source_default.underline.blue(
18692
- column7.name
18671
+ name
18693
18672
  )} column is confilcting with a unique constraint name already defined for ${source_default.underline.blue(
18694
18673
  existingUnique.columns.join(",")
18695
18674
  )} columns
@@ -18705,7 +18684,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18705
18684
  }
18706
18685
  if (column7.default !== void 0) {
18707
18686
  if ((0, import_drizzle_orm5.is)(column7.default, import_drizzle_orm5.SQL)) {
18708
- columnToSet.default = sqlToStr(column7.default);
18687
+ columnToSet.default = sqlToStr(column7.default, casing2);
18709
18688
  } else {
18710
18689
  if (typeof column7.default === "string") {
18711
18690
  columnToSet.default = `'${column7.default}'`;
@@ -18733,17 +18712,24 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18733
18712
  }
18734
18713
  }
18735
18714
  }
18736
- columnsObject[column7.name] = columnToSet;
18715
+ columnsObject[name] = columnToSet;
18737
18716
  });
18738
18717
  primaryKeys.map((pk) => {
18739
- const columnNames = pk.columns.map((c) => c.name);
18740
- primaryKeysObject[pk.getName()] = {
18741
- name: pk.getName(),
18718
+ const originalColumnNames = pk.columns.map((c) => c.name);
18719
+ const columnNames = pk.columns.map((c) => getColumnCasing(c, casing2));
18720
+ let name = pk.getName();
18721
+ if (casing2 !== void 0) {
18722
+ for (let i2 = 0; i2 < originalColumnNames.length; i2++) {
18723
+ name = name.replace(originalColumnNames[i2], columnNames[i2]);
18724
+ }
18725
+ }
18726
+ primaryKeysObject[name] = {
18727
+ name,
18742
18728
  columns: columnNames
18743
18729
  };
18744
18730
  });
18745
18731
  uniqueConstraints == null ? void 0 : uniqueConstraints.map((unq) => {
18746
- const columnNames = unq.columns.map((c) => c.name);
18732
+ const columnNames = unq.columns.map((c) => getColumnCasing(c, casing2));
18747
18733
  const name = unq.name ?? (0, import_pg_core2.uniqueKeyName)(table4, columnNames);
18748
18734
  const existingUnique = uniqueConstraintObject[name];
18749
18735
  if (typeof existingUnique !== "undefined") {
@@ -18770,15 +18756,25 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18770
18756
  };
18771
18757
  });
18772
18758
  const fks = foreignKeys.map((fk4) => {
18773
- const name = fk4.getName();
18774
18759
  const tableFrom = tableName;
18775
18760
  const onDelete = fk4.onDelete;
18776
18761
  const onUpdate = fk4.onUpdate;
18777
18762
  const reference = fk4.reference();
18778
18763
  const tableTo = (0, import_drizzle_orm5.getTableName)(reference.foreignTable);
18779
18764
  const schemaTo = (0, import_pg_core3.getTableConfig)(reference.foreignTable).schema;
18780
- const columnsFrom = reference.columns.map((it) => it.name);
18781
- const columnsTo = reference.foreignColumns.map((it) => it.name);
18765
+ const originalColumnsFrom = reference.columns.map((it) => it.name);
18766
+ const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing2));
18767
+ const originalColumnsTo = reference.foreignColumns.map((it) => it.name);
18768
+ const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing2));
18769
+ let name = fk4.getName();
18770
+ if (casing2 !== void 0) {
18771
+ for (let i2 = 0; i2 < originalColumnsFrom.length; i2++) {
18772
+ name = name.replace(originalColumnsFrom[i2], columnsFrom[i2]);
18773
+ }
18774
+ for (let i2 = 0; i2 < originalColumnsTo.length; i2++) {
18775
+ name = name.replace(originalColumnsTo[i2], columnsTo[i2]);
18776
+ }
18777
+ }
18782
18778
  return {
18783
18779
  name,
18784
18780
  tableFrom,
@@ -18804,19 +18800,20 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18804
18800
  ${withStyle.errorWarning(
18805
18801
  `Please specify an index name in ${(0, import_drizzle_orm5.getTableName)(
18806
18802
  value.config.table
18807
- )} table that has "${dialect5.sqlToQuery(it).sql}" expression. We can generate index names for indexes on columns only; for expressions in indexes, you need to specify the name yourself.`
18803
+ )} table that has "${dialect4.sqlToQuery(it).sql}" expression. We can generate index names for indexes on columns only; for expressions in indexes, you need to specify the name yourself.`
18808
18804
  )}`
18809
18805
  );
18810
18806
  process.exit(1);
18811
18807
  }
18812
18808
  }
18813
18809
  it = it;
18810
+ const name2 = getColumnCasing(it, casing2);
18814
18811
  if (!(0, import_drizzle_orm5.is)(it, import_drizzle_orm5.SQL) && it.type === "PgVector" && typeof it.indexConfig.opClass === "undefined") {
18815
18812
  console.log(
18816
18813
  `
18817
18814
  ${withStyle.errorWarning(
18818
18815
  `You are specifying an index on the ${source_default.blueBright(
18819
- it.name
18816
+ name2
18820
18817
  )} column inside the ${source_default.blueBright(
18821
18818
  tableName
18822
18819
  )} table with the ${source_default.blueBright(
@@ -18826,7 +18823,7 @@ ${withStyle.errorWarning(
18826
18823
  )}].
18827
18824
 
18828
18825
  You can specify it using current syntax: ${source_default.underline(
18829
- `index("${value.config.name}").using("${value.config.method}", table.${it.name}.op("${vectorOps[0]}"))`
18826
+ `index("${value.config.name}").using("${value.config.method}", table.${name2}.op("${vectorOps[0]}"))`
18830
18827
  )}
18831
18828
 
18832
18829
  You can check the "pg_vector" docs for more info: https://github.com/pgvector/pgvector?tab=readme-ov-file#indexing
@@ -18835,7 +18832,7 @@ You can check the "pg_vector" docs for more info: https://github.com/pgvector/pg
18835
18832
  );
18836
18833
  process.exit(1);
18837
18834
  }
18838
- indexColumnNames.push(it.name);
18835
+ indexColumnNames.push(name2);
18839
18836
  });
18840
18837
  const name = value.config.name ? value.config.name : indexName2(tableName, indexColumnNames);
18841
18838
  let indexColumns = columns2.map(
@@ -18843,7 +18840,7 @@ You can check the "pg_vector" docs for more info: https://github.com/pgvector/pg
18843
18840
  var _a2, _b2, _c2, _d2, _e2;
18844
18841
  if ((0, import_drizzle_orm5.is)(it, import_drizzle_orm5.SQL)) {
18845
18842
  return {
18846
- expression: dialect5.sqlToQuery(it, "indexes").sql,
18843
+ expression: dialect4.sqlToQuery(it, "indexes").sql,
18847
18844
  asc: true,
18848
18845
  isExpression: true,
18849
18846
  nulls: "last"
@@ -18851,7 +18848,7 @@ You can check the "pg_vector" docs for more info: https://github.com/pgvector/pg
18851
18848
  } else {
18852
18849
  it = it;
18853
18850
  return {
18854
- expression: it.name,
18851
+ expression: getColumnCasing(it, casing2),
18855
18852
  isExpression: false,
18856
18853
  asc: ((_a2 = it.indexConfig) == null ? void 0 : _a2.order) === "asc",
18857
18854
  nulls: ((_b2 = it.indexConfig) == null ? void 0 : _b2.nulls) ? (_c2 = it.indexConfig) == null ? void 0 : _c2.nulls : ((_d2 = it.indexConfig) == null ? void 0 : _d2.order) === "desc" ? "first" : "last",
@@ -18882,7 +18879,7 @@ ${withStyle.errorWarning(
18882
18879
  name,
18883
18880
  columns: indexColumns,
18884
18881
  isUnique: value.config.unique ?? false,
18885
- where: value.config.where ? dialect5.sqlToQuery(value.config.where).sql : void 0,
18882
+ where: value.config.where ? dialect4.sqlToQuery(value.config.where).sql : void 0,
18886
18883
  concurrently: value.config.concurrently ?? false,
18887
18884
  method: value.config.method ?? "btree",
18888
18885
  with: value.config.with ?? {}
@@ -18920,149 +18917,6 @@ ${withStyle.errorWarning(
18920
18917
  } else {
18921
18918
  }
18922
18919
  }
18923
- const combinedViews = [...views, ...matViews];
18924
- for (const view2 of combinedViews) {
18925
- let viewName;
18926
- let schema5;
18927
- let query;
18928
- let selectedFields;
18929
- let isExisting;
18930
- let withOption;
18931
- let tablespace;
18932
- let using;
18933
- let withNoData;
18934
- let materialized = false;
18935
- if ((0, import_drizzle_orm5.is)(view2, import_pg_core2.PgView)) {
18936
- ({ name: viewName, schema: schema5, query, selectedFields, isExisting, with: withOption } = (0, import_pg_core2.getViewConfig)(view2));
18937
- } else {
18938
- ({ name: viewName, schema: schema5, query, selectedFields, isExisting, with: withOption, tablespace, using, withNoData } = (0, import_pg_core2.getMaterializedViewConfig)(
18939
- view2
18940
- ));
18941
- materialized = true;
18942
- }
18943
- const viewSchema = schema5 ?? "public";
18944
- const viewKey = `${viewSchema}.${viewName}`;
18945
- const columnsObject = {};
18946
- const uniqueConstraintObject = {};
18947
- const existingView = resultViews[viewKey];
18948
- if (typeof existingView !== "undefined") {
18949
- console.log(
18950
- `
18951
- ${withStyle.errorWarning(
18952
- `We've found duplicated view name across ${source_default.underline.blue(
18953
- schema5 ?? "public"
18954
- )} schema. Please rename your view`
18955
- )}`
18956
- );
18957
- process.exit(1);
18958
- }
18959
- for (const key in selectedFields) {
18960
- if ((0, import_drizzle_orm5.is)(selectedFields[key], import_pg_core2.PgColumn)) {
18961
- const column7 = selectedFields[key];
18962
- const notNull = column7.notNull;
18963
- const primaryKey = column7.primary;
18964
- const sqlTypeLowered = column7.getSQLType().toLowerCase();
18965
- const typeSchema = (0, import_drizzle_orm5.is)(column7, import_pg_core2.PgEnumColumn) ? column7.enum.schema || "public" : void 0;
18966
- const generated = column7.generated;
18967
- const identity = column7.generatedIdentity;
18968
- const increment = stringFromIdentityProperty((_g = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _g.increment) ?? "1";
18969
- const minValue = stringFromIdentityProperty((_h = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _h.minValue) ?? (parseFloat(increment) < 0 ? minRangeForIdentityBasedOn(column7.columnType) : "1");
18970
- const maxValue = stringFromIdentityProperty((_i = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _i.maxValue) ?? (parseFloat(increment) < 0 ? "-1" : maxRangeForIdentityBasedOn(column7.getSQLType()));
18971
- const startWith = stringFromIdentityProperty((_j = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _j.startWith) ?? (parseFloat(increment) < 0 ? maxValue : minValue);
18972
- const cache = stringFromIdentityProperty((_k = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _k.cache) ?? "1";
18973
- const columnToSet = {
18974
- name: column7.name,
18975
- type: column7.getSQLType(),
18976
- typeSchema,
18977
- primaryKey,
18978
- notNull,
18979
- generated: generated ? {
18980
- as: (0, import_drizzle_orm5.is)(generated.as, import_drizzle_orm5.SQL) ? dialect5.sqlToQuery(generated.as).sql : typeof generated.as === "function" ? dialect5.sqlToQuery(generated.as()).sql : generated.as,
18981
- type: "stored"
18982
- } : void 0,
18983
- identity: identity ? {
18984
- type: identity.type,
18985
- name: identity.sequenceName ?? `${viewName}_${column7.name}_seq`,
18986
- schema: schema5 ?? "public",
18987
- increment,
18988
- startWith,
18989
- minValue,
18990
- maxValue,
18991
- cache,
18992
- cycle: ((_l = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _l.cycle) ?? false
18993
- } : void 0
18994
- };
18995
- if (column7.isUnique) {
18996
- const existingUnique = uniqueConstraintObject[column7.uniqueName];
18997
- if (typeof existingUnique !== "undefined") {
18998
- console.log(
18999
- `
19000
- ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${source_default.underline.blue(
19001
- viewName
19002
- )} table.
19003
- The unique constraint ${source_default.underline.blue(
19004
- column7.uniqueName
19005
- )} on the ${source_default.underline.blue(
19006
- column7.name
19007
- )} column is confilcting with a unique constraint name already defined for ${source_default.underline.blue(
19008
- existingUnique.columns.join(",")
19009
- )} columns
19010
- `)}`
19011
- );
19012
- process.exit(1);
19013
- }
19014
- uniqueConstraintObject[column7.uniqueName] = {
19015
- name: column7.uniqueName,
19016
- nullsNotDistinct: column7.uniqueType === "not distinct",
19017
- columns: [columnToSet.name]
19018
- };
19019
- }
19020
- if (column7.default !== void 0) {
19021
- if ((0, import_drizzle_orm5.is)(column7.default, import_drizzle_orm5.SQL)) {
19022
- columnToSet.default = sqlToStr(column7.default);
19023
- } else {
19024
- if (typeof column7.default === "string") {
19025
- columnToSet.default = `'${column7.default}'`;
19026
- } else {
19027
- if (sqlTypeLowered === "jsonb" || sqlTypeLowered === "json") {
19028
- columnToSet.default = `'${JSON.stringify(
19029
- column7.default
19030
- )}'::${sqlTypeLowered}`;
19031
- } else if (column7.default instanceof Date) {
19032
- if (sqlTypeLowered === "date") {
19033
- columnToSet.default = `'${column7.default.toISOString().split("T")[0]}'`;
19034
- } else if (sqlTypeLowered === "timestamp") {
19035
- columnToSet.default = `'${column7.default.toISOString().replace("T", " ").slice(0, 23)}'`;
19036
- } else {
19037
- columnToSet.default = `'${column7.default.toISOString()}'`;
19038
- }
19039
- } else if (isPgArrayType(sqlTypeLowered) && Array.isArray(column7.default)) {
19040
- columnToSet.default = `'${buildArrayString(
19041
- column7.default,
19042
- sqlTypeLowered
19043
- )}'`;
19044
- } else {
19045
- columnToSet.default = column7.default;
19046
- }
19047
- }
19048
- }
19049
- }
19050
- columnsObject[column7.name] = columnToSet;
19051
- }
19052
- }
19053
- resultViews[viewKey] = {
19054
- columns: columnsObject,
19055
- definition: isExisting ? void 0 : dialect5.sqlToQuery(query).sql,
19056
- name: viewName,
19057
- schema: viewSchema,
19058
- isExisting,
19059
- with: withOption,
19060
- withNoData,
19061
- materialized,
19062
- tablespace,
19063
- using
19064
- };
19065
- }
19066
18920
  const enumsToReturn = enums.reduce((map, obj) => {
19067
18921
  const enumSchema3 = obj.schema || "public";
19068
18922
  const key = `${enumSchema3}.${obj.enumName}`;
@@ -19089,7 +18943,6 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
19089
18943
  enums: enumsToReturn,
19090
18944
  schemas: schemasObject,
19091
18945
  sequences: sequencesToReturn,
19092
- views: resultViews,
19093
18946
  _meta: {
19094
18947
  schemas: {},
19095
18948
  tables: {},
@@ -19108,25 +18961,10 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
19108
18961
  };
19109
18962
  fromDatabase2 = async (db, tablesFilter = () => true, schemaFilters, progressCallback) => {
19110
18963
  const result = {};
19111
- const views = {};
19112
18964
  const internals = { tables: {} };
19113
- const where = schemaFilters.map((t2) => `n.nspname = '${t2}'`).join(" or ");
18965
+ const where = schemaFilters.map((t2) => `table_schema = '${t2}'`).join(" or ");
19114
18966
  const allTables = await db.query(
19115
- `SELECT
19116
- n.nspname AS table_schema,
19117
- c.relname AS table_name,
19118
- CASE
19119
- WHEN c.relkind = 'r' THEN 'table'
19120
- WHEN c.relkind = 'v' THEN 'view'
19121
- WHEN c.relkind = 'm' THEN 'materialized_view'
19122
- END AS type
19123
- FROM
19124
- pg_catalog.pg_class c
19125
- JOIN
19126
- pg_catalog.pg_namespace n ON n.oid = c.relnamespace
19127
- WHERE
19128
- c.relkind IN ('r', 'v', 'm')
19129
- ${where === "" ? "" : ` AND ${where}`};`
18967
+ `SELECT table_schema, table_name FROM information_schema.tables${where === "" ? "" : ` WHERE ${where}`};`
19130
18968
  );
19131
18969
  const schemas = new Set(allTables.map((it) => it.table_schema));
19132
18970
  schemas.delete("public");
@@ -19204,7 +19042,7 @@ WHERE
19204
19042
  progressCallback("enums", Object.keys(enumsToReturn).length, "done");
19205
19043
  }
19206
19044
  const sequencesInColumns = [];
19207
- const all = allTables.filter((it) => it.type === "table").map((row) => {
19045
+ const all = allTables.map((row) => {
19208
19046
  return new Promise(async (res, rej) => {
19209
19047
  var _a, _b, _c, _d, _e, _f;
19210
19048
  const tableName = row.table_name;
@@ -19479,7 +19317,7 @@ WHERE
19479
19317
  schema: tableSchema
19480
19318
  } : void 0
19481
19319
  };
19482
- if (identityName) {
19320
+ if (identityName && typeof identityName === "string") {
19483
19321
  delete sequencesToReturn[`${tableSchema}.${identityName.startsWith('"') && identityName.endsWith('"') ? identityName.slice(1, -1) : identityName}`];
19484
19322
  delete sequencesToReturn[identityName];
19485
19323
  }
@@ -19613,336 +19451,6 @@ WHERE
19613
19451
  }
19614
19452
  for await (const _2 of all) {
19615
19453
  }
19616
- const allViews = allTables.filter((it) => it.type === "view" || it.type === "materialized_view").map((row) => {
19617
- return new Promise(async (res, rej) => {
19618
- var _a, _b, _c, _d;
19619
- const viewName = row.table_name;
19620
- if (!tablesFilter(viewName))
19621
- return res("");
19622
- tableCount += 1;
19623
- const viewSchema = row.table_schema;
19624
- try {
19625
- const columnToReturn = {};
19626
- const viewResponses = await db.query(`WITH view_columns AS (
19627
- SELECT DISTINCT
19628
- nv.nspname::information_schema.sql_identifier AS view_schema,
19629
- v.relname::information_schema.sql_identifier AS view_name,
19630
- nt.nspname::information_schema.sql_identifier AS table_schema,
19631
- t.relname::information_schema.sql_identifier AS table_name,
19632
- a.attname::information_schema.sql_identifier AS column_name
19633
- FROM pg_namespace nv
19634
- JOIN pg_class v ON nv.oid = v.relnamespace
19635
- JOIN pg_depend dv ON v.oid = dv.refobjid
19636
- JOIN pg_depend dt ON dv.objid = dt.objid
19637
- JOIN pg_class t ON dt.refobjid = t.oid
19638
- JOIN pg_namespace nt ON t.relnamespace = nt.oid
19639
- JOIN pg_attribute a ON t.oid = a.attrelid
19640
- WHERE (v.relkind = 'v'::"char" OR v.relkind = 'm'::"char")
19641
- AND dv.refclassid = 'pg_class'::regclass::oid
19642
- AND dv.classid = 'pg_rewrite'::regclass::oid
19643
- AND dv.deptype = 'i'::"char"
19644
- AND dv.objid = dt.objid
19645
- AND dv.refobjid <> dt.refobjid
19646
- AND dt.classid = 'pg_rewrite'::regclass::oid
19647
- AND dt.refclassid = 'pg_class'::regclass::oid
19648
- AND t.relkind = ANY (ARRAY['r'::"char", 'v'::"char", 'f'::"char", 'p'::"char"])
19649
- AND dt.refobjsubid = a.attnum
19650
- AND pg_has_role(t.relowner, 'USAGE'::text)
19651
- AND nv.nspname::information_schema.sql_identifier = '${viewSchema}'
19652
- AND v.relname::information_schema.sql_identifier = '${viewName}'
19653
- ),
19654
- column_descriptions AS (
19655
- SELECT DISTINCT
19656
- a.attrelid::regclass::text AS table_name,
19657
- a.attname AS column_name,
19658
- c.is_nullable,
19659
- a.attndims AS array_dimensions,
19660
- CASE
19661
- WHEN a.atttypid = ANY ('{int,int8,int2}'::regtype[]) AND EXISTS (
19662
- SELECT FROM pg_attrdef ad
19663
- WHERE ad.adrelid = a.attrelid
19664
- AND ad.adnum = a.attnum
19665
- AND pg_get_expr(ad.adbin, ad.adrelid) = 'nextval(''' || pg_get_serial_sequence(a.attrelid::regclass::text, a.attname)::regclass || '''::regclass)'
19666
- )
19667
- THEN CASE a.atttypid
19668
- WHEN 'int'::regtype THEN 'serial'
19669
- WHEN 'int8'::regtype THEN 'bigserial'
19670
- WHEN 'int2'::regtype THEN 'smallserial'
19671
- END
19672
- ELSE format_type(a.atttypid, a.atttypmod)
19673
- END AS data_type,
19674
- pg_get_serial_sequence('"' || c.table_schema || '"."' || c.table_name || '"', a.attname)::regclass AS seq_name,
19675
- c.column_default,
19676
- c.data_type AS additional_dt,
19677
- c.udt_name AS enum_name,
19678
- c.is_generated,
19679
- c.generation_expression,
19680
- c.is_identity,
19681
- c.identity_generation,
19682
- c.identity_start,
19683
- c.identity_increment,
19684
- c.identity_maximum,
19685
- c.identity_minimum,
19686
- c.identity_cycle
19687
- FROM pg_attribute a
19688
- JOIN information_schema.columns c ON c.column_name = a.attname
19689
- JOIN pg_type t ON t.oid = a.atttypid
19690
- LEFT JOIN pg_namespace ns ON ns.oid = t.typnamespace
19691
- WHERE a.attnum > 0
19692
- AND NOT a.attisdropped
19693
- ),
19694
- table_constraints AS (
19695
- SELECT DISTINCT ON (ccu.column_name)
19696
- ccu.column_name,
19697
- c.data_type,
19698
- tc.constraint_type,
19699
- tc.constraint_name,
19700
- tc.constraint_schema,
19701
- tc.table_name
19702
- FROM information_schema.table_constraints tc
19703
- JOIN information_schema.constraint_column_usage ccu USING (constraint_schema, constraint_name)
19704
- JOIN information_schema.columns c ON c.table_schema = tc.constraint_schema
19705
- AND tc.table_name = c.table_name
19706
- AND ccu.column_name = c.column_name
19707
- ),
19708
- additional_column_info AS (
19709
- SELECT DISTINCT
19710
- a.attrelid::regclass::text AS table_name,
19711
- a.attname AS column_name,
19712
- is_nullable,
19713
- a.attndims AS array_dimensions,
19714
- CASE
19715
- WHEN a.atttypid = ANY ('{int,int8,int2}'::regtype[]) AND EXISTS (
19716
- SELECT FROM pg_attrdef ad
19717
- WHERE ad.adrelid = a.attrelid
19718
- AND ad.adnum = a.attnum
19719
- AND pg_get_expr(ad.adbin, ad.adrelid) = 'nextval(''' || pg_get_serial_sequence(a.attrelid::regclass::text, a.attname)::regclass || '''::regclass)'
19720
- )
19721
- THEN CASE a.atttypid
19722
- WHEN 'int'::regtype THEN 'serial'
19723
- WHEN 'int8'::regtype THEN 'bigserial'
19724
- WHEN 'int2'::regtype THEN 'smallserial'
19725
- END
19726
- ELSE format_type(a.atttypid, a.atttypmod)
19727
- END AS data_type,
19728
- pg_get_serial_sequence('"' || c.table_schema || '"."' || c.table_name || '"', a.attname)::regclass AS seq_name,
19729
- c.column_default,
19730
- c.data_type AS additional_dt,
19731
- c.udt_name AS enum_name,
19732
- c.is_generated,
19733
- generation_expression,
19734
- is_identity,
19735
- identity_generation,
19736
- identity_start,
19737
- identity_increment,
19738
- identity_maximum,
19739
- identity_minimum,
19740
- identity_cycle
19741
- FROM pg_attribute a
19742
- JOIN information_schema.columns c ON c.column_name = a.attname
19743
- LEFT JOIN pg_type t ON t.oid = a.atttypid
19744
- LEFT JOIN pg_namespace ns ON ns.oid = t.typnamespace
19745
- WHERE a.attnum > 0
19746
- AND NOT a.attisdropped
19747
- )
19748
- SELECT DISTINCT ON (vc.table_name, vc.column_name)
19749
- vc.view_schema,
19750
- vc.view_name,
19751
- vc.table_schema,
19752
- vc.table_name,
19753
- vc.column_name,
19754
- COALESCE(cd.data_type, aci.data_type) AS data_type,
19755
- tc.constraint_type,
19756
- tc.constraint_name,
19757
- aci.is_nullable,
19758
- aci.array_dimensions,
19759
- aci.seq_name,
19760
- aci.column_default,
19761
- aci.additional_dt,
19762
- aci.enum_name,
19763
- aci.is_generated,
19764
- aci.generation_expression,
19765
- aci.is_identity,
19766
- aci.identity_generation,
19767
- aci.identity_start,
19768
- aci.identity_increment,
19769
- aci.identity_maximum,
19770
- aci.identity_minimum,
19771
- aci.identity_cycle
19772
- FROM view_columns vc
19773
- LEFT JOIN column_descriptions cd ON vc.table_name = cd.table_name AND vc.column_name = cd.column_name
19774
- LEFT JOIN table_constraints tc ON vc.table_name = tc.table_name AND vc.column_name = tc.column_name
19775
- LEFT JOIN additional_column_info aci ON vc.table_name = aci.table_name AND vc.column_name = aci.column_name
19776
- ORDER BY vc.table_name, vc.column_name;`);
19777
- for (const viewResponse of viewResponses) {
19778
- const columnName = viewResponse.column_name;
19779
- const columnAdditionalDT = viewResponse.additional_dt;
19780
- const columnDimensions = viewResponse.array_dimensions;
19781
- const enumType2 = viewResponse.enum_name;
19782
- let columnType = viewResponse.data_type;
19783
- const typeSchema = viewResponse.type_schema;
19784
- const isGenerated = viewResponse.is_generated === "ALWAYS";
19785
- const generationExpression = viewResponse.generation_expression;
19786
- const isIdentity = viewResponse.is_identity === "YES";
19787
- const identityGeneration = viewResponse.identity_generation === "ALWAYS" ? "always" : "byDefault";
19788
- const identityStart = viewResponse.identity_start;
19789
- const identityIncrement = viewResponse.identity_increment;
19790
- const identityMaximum = viewResponse.identity_maximum;
19791
- const identityMinimum = viewResponse.identity_minimum;
19792
- const identityCycle = viewResponse.identity_cycle === "YES";
19793
- const identityName = viewResponse.seq_name;
19794
- const defaultValueRes = viewResponse.column_default;
19795
- const primaryKey = viewResponse.constraint_type === "PRIMARY KEY";
19796
- let columnTypeMapped = columnType;
19797
- if (columnAdditionalDT === "ARRAY") {
19798
- if (typeof internals.tables[viewName] === "undefined") {
19799
- internals.tables[viewName] = {
19800
- columns: {
19801
- [columnName]: {
19802
- isArray: true,
19803
- dimensions: columnDimensions,
19804
- rawType: columnTypeMapped.substring(
19805
- 0,
19806
- columnTypeMapped.length - 2
19807
- )
19808
- }
19809
- }
19810
- };
19811
- } else {
19812
- if (typeof internals.tables[viewName].columns[columnName] === "undefined") {
19813
- internals.tables[viewName].columns[columnName] = {
19814
- isArray: true,
19815
- dimensions: columnDimensions,
19816
- rawType: columnTypeMapped.substring(
19817
- 0,
19818
- columnTypeMapped.length - 2
19819
- )
19820
- };
19821
- }
19822
- }
19823
- }
19824
- const defaultValue = defaultForColumn(
19825
- viewResponse,
19826
- internals,
19827
- viewName
19828
- );
19829
- if (defaultValue === "NULL" || defaultValueRes && defaultValueRes.startsWith("(") && defaultValueRes.endsWith(")")) {
19830
- if (typeof internals.tables[viewName] === "undefined") {
19831
- internals.tables[viewName] = {
19832
- columns: {
19833
- [columnName]: {
19834
- isDefaultAnExpression: true
19835
- }
19836
- }
19837
- };
19838
- } else {
19839
- if (typeof internals.tables[viewName].columns[columnName] === "undefined") {
19840
- internals.tables[viewName].columns[columnName] = {
19841
- isDefaultAnExpression: true
19842
- };
19843
- } else {
19844
- internals.tables[viewName].columns[columnName].isDefaultAnExpression = true;
19845
- }
19846
- }
19847
- }
19848
- const isSerial = columnType === "serial";
19849
- if (columnTypeMapped.startsWith("numeric(")) {
19850
- columnTypeMapped = columnTypeMapped.replace(",", ", ");
19851
- }
19852
- if (columnAdditionalDT === "ARRAY") {
19853
- for (let i2 = 1; i2 < Number(columnDimensions); i2++) {
19854
- columnTypeMapped += "[]";
19855
- }
19856
- }
19857
- columnTypeMapped = columnTypeMapped.replace("character varying", "varchar").replace(" without time zone", "").replace("character", "char");
19858
- columnTypeMapped = trimChar(columnTypeMapped, '"');
19859
- columnToReturn[columnName] = {
19860
- name: columnName,
19861
- type: (
19862
- // filter vectors, but in future we should filter any extension that was installed by user
19863
- columnAdditionalDT === "USER-DEFINED" && !["vector", "geometry"].includes(enumType2) ? enumType2 : columnTypeMapped
19864
- ),
19865
- typeSchema: enumsToReturn[`${typeSchema}.${enumType2}`] !== void 0 ? enumsToReturn[`${typeSchema}.${enumType2}`].schema : void 0,
19866
- primaryKey,
19867
- notNull: viewResponse.is_nullable === "NO",
19868
- generated: isGenerated ? { as: generationExpression, type: "stored" } : void 0,
19869
- identity: isIdentity ? {
19870
- type: identityGeneration,
19871
- name: identityName,
19872
- increment: stringFromDatabaseIdentityProperty(identityIncrement),
19873
- minValue: stringFromDatabaseIdentityProperty(identityMinimum),
19874
- maxValue: stringFromDatabaseIdentityProperty(identityMaximum),
19875
- startWith: stringFromDatabaseIdentityProperty(identityStart),
19876
- cache: ((_a = sequencesToReturn[identityName]) == null ? void 0 : _a.cache) ? (_b = sequencesToReturn[identityName]) == null ? void 0 : _b.cache : ((_c = sequencesToReturn[`${viewSchema}.${identityName}`]) == null ? void 0 : _c.cache) ? (_d = sequencesToReturn[`${viewSchema}.${identityName}`]) == null ? void 0 : _d.cache : void 0,
19877
- cycle: identityCycle,
19878
- schema: viewSchema
19879
- } : void 0
19880
- };
19881
- if (identityName) {
19882
- delete sequencesToReturn[`${viewSchema}.${identityName.startsWith('"') && identityName.endsWith('"') ? identityName.slice(1, -1) : identityName}`];
19883
- delete sequencesToReturn[identityName];
19884
- }
19885
- if (!isSerial && typeof defaultValue !== "undefined") {
19886
- columnToReturn[columnName].default = defaultValue;
19887
- }
19888
- }
19889
- const [viewInfo] = await db.query(`
19890
- SELECT
19891
- c.relname AS view_name,
19892
- n.nspname AS schema_name,
19893
- pg_get_viewdef(c.oid, true) AS definition,
19894
- ts.spcname AS tablespace_name,
19895
- c.reloptions AS options,
19896
- pg_tablespace_location(ts.oid) AS location
19897
- FROM
19898
- pg_class c
19899
- JOIN
19900
- pg_namespace n ON c.relnamespace = n.oid
19901
- LEFT JOIN
19902
- pg_tablespace ts ON c.reltablespace = ts.oid
19903
- WHERE
19904
- (c.relkind = 'm' OR c.relkind = 'v')
19905
- AND n.nspname = '${viewSchema}'
19906
- AND c.relname = '${viewName}';`);
19907
- const resultWith = {};
19908
- if (viewInfo.options) {
19909
- viewInfo.options.forEach((pair) => {
19910
- const splitted = pair.split("=");
19911
- const key = splitted[0];
19912
- const value = splitted[1];
19913
- if (value === "true") {
19914
- resultWith[key] = true;
19915
- } else if (value === "false") {
19916
- resultWith[key] = false;
19917
- } else if (!isNaN(Number(value))) {
19918
- resultWith[key] = Number(value);
19919
- } else {
19920
- resultWith[key] = value;
19921
- }
19922
- });
19923
- }
19924
- const definition = viewInfo.definition.replace(/\s+/g, " ").replace(";", "").trim();
19925
- const withOption = Object.values(resultWith).length ? Object.fromEntries(Object.entries(resultWith).map(([key, value]) => [key.camelCase(), value])) : void 0;
19926
- const materialized = row.type === "materialized_view";
19927
- views[`${viewSchema}.${viewName}`] = {
19928
- name: viewName,
19929
- schema: viewSchema,
19930
- columns: columnToReturn,
19931
- isExisting: false,
19932
- definition,
19933
- materialized,
19934
- with: withOption,
19935
- tablespace: viewInfo.tablespace_name ?? void 0
19936
- };
19937
- } catch (e2) {
19938
- rej(e2);
19939
- return;
19940
- }
19941
- res("");
19942
- });
19943
- });
19944
- for await (const _2 of allViews) {
19945
- }
19946
19454
  if (progressCallback) {
19947
19455
  progressCallback("columns", columnsCount, "done");
19948
19456
  progressCallback("indexes", indexesCount, "done");
@@ -19956,7 +19464,6 @@ WHERE
19956
19464
  enums: enumsToReturn,
19957
19465
  schemas: schemasObject,
19958
19466
  sequences: sequencesToReturn,
19959
- views,
19960
19467
  _meta: {
19961
19468
  schemas: {},
19962
19469
  tables: {},
@@ -20141,7 +19648,7 @@ function extractGeneratedColumns(input) {
20141
19648
  }
20142
19649
  return columns;
20143
19650
  }
20144
- var import_drizzle_orm7, import_sqlite_core2, dialect6, generateSqliteSnapshot, fromDatabase3;
19651
+ var import_drizzle_orm7, import_sqlite_core2, generateSqliteSnapshot, fromDatabase3;
20145
19652
  var init_sqliteSerializer = __esm({
20146
19653
  "src/serializer/sqliteSerializer.ts"() {
20147
19654
  "use strict";
@@ -20149,9 +19656,10 @@ var init_sqliteSerializer = __esm({
20149
19656
  import_drizzle_orm7 = require("drizzle-orm");
20150
19657
  import_sqlite_core2 = require("drizzle-orm/sqlite-core");
20151
19658
  init_outputs();
19659
+ init_utils();
20152
19660
  init_serializer();
20153
- dialect6 = new import_sqlite_core2.SQLiteSyncDialect();
20154
- generateSqliteSnapshot = (tables) => {
19661
+ generateSqliteSnapshot = (tables, casing2) => {
19662
+ const dialect4 = new import_sqlite_core2.SQLiteSyncDialect({ casing: casing2 });
20155
19663
  const result = {};
20156
19664
  const internal = { indexes: {} };
20157
19665
  for (const table4 of tables) {
@@ -20169,28 +19677,29 @@ var init_sqliteSerializer = __esm({
20169
19677
  uniqueConstraints
20170
19678
  } = (0, import_sqlite_core2.getTableConfig)(table4);
20171
19679
  columns.forEach((column7) => {
19680
+ const name = getColumnCasing(column7, casing2);
20172
19681
  const notNull = column7.notNull;
20173
19682
  const primaryKey = column7.primary;
20174
19683
  const generated = column7.generated;
20175
19684
  const columnToSet = {
20176
- name: column7.name,
19685
+ name,
20177
19686
  type: column7.getSQLType(),
20178
19687
  primaryKey,
20179
19688
  notNull,
20180
19689
  autoincrement: (0, import_drizzle_orm7.is)(column7, import_sqlite_core2.SQLiteBaseInteger) ? column7.autoIncrement : false,
20181
19690
  generated: generated ? {
20182
- as: (0, import_drizzle_orm7.is)(generated.as, import_drizzle_orm7.SQL) ? `(${dialect6.sqlToQuery(generated.as, "indexes").sql})` : typeof generated.as === "function" ? `(${dialect6.sqlToQuery(generated.as(), "indexes").sql})` : `(${generated.as})`,
19691
+ as: (0, import_drizzle_orm7.is)(generated.as, import_drizzle_orm7.SQL) ? `(${dialect4.sqlToQuery(generated.as, "indexes").sql})` : typeof generated.as === "function" ? `(${dialect4.sqlToQuery(generated.as(), "indexes").sql})` : `(${generated.as})`,
20183
19692
  type: generated.mode ?? "virtual"
20184
19693
  } : void 0
20185
19694
  };
20186
19695
  if (column7.default !== void 0) {
20187
19696
  if ((0, import_drizzle_orm7.is)(column7.default, import_drizzle_orm7.SQL)) {
20188
- columnToSet.default = sqlToStr(column7.default);
19697
+ columnToSet.default = sqlToStr(column7.default, casing2);
20189
19698
  } else {
20190
19699
  columnToSet.default = typeof column7.default === "string" ? `'${column7.default}'` : typeof column7.default === "object" || Array.isArray(column7.default) ? `'${JSON.stringify(column7.default)}'` : column7.default;
20191
19700
  }
20192
19701
  }
20193
- columnsObject[column7.name] = columnToSet;
19702
+ columnsObject[name] = columnToSet;
20194
19703
  if (column7.isUnique) {
20195
19704
  const existingUnique = indexesObject[column7.uniqueName];
20196
19705
  if (typeof existingUnique !== "undefined") {
@@ -20202,7 +19711,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
20202
19711
  The unique constraint ${source_default.underline.blue(
20203
19712
  column7.uniqueName
20204
19713
  )} on the ${source_default.underline.blue(
20205
- column7.name
19714
+ name
20206
19715
  )} column is confilcting with a unique constraint name already defined for ${source_default.underline.blue(
20207
19716
  existingUnique.columns.join(",")
20208
19717
  )} columns
@@ -20218,15 +19727,25 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
20218
19727
  }
20219
19728
  });
20220
19729
  const foreignKeys = tableForeignKeys.map((fk4) => {
20221
- const name = fk4.getName();
20222
19730
  const tableFrom = tableName;
20223
19731
  const onDelete = fk4.onDelete ?? "no action";
20224
19732
  const onUpdate = fk4.onUpdate ?? "no action";
20225
19733
  const reference = fk4.reference();
20226
19734
  const referenceFT = reference.foreignTable;
20227
19735
  const tableTo = (0, import_drizzle_orm7.getTableName)(referenceFT);
20228
- const columnsFrom = reference.columns.map((it) => it.name);
20229
- const columnsTo = reference.foreignColumns.map((it) => it.name);
19736
+ const originalColumnsFrom = reference.columns.map((it) => it.name);
19737
+ const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing2));
19738
+ const originalColumnsTo = reference.foreignColumns.map((it) => it.name);
19739
+ const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing2));
19740
+ let name = fk4.getName();
19741
+ if (casing2 !== void 0) {
19742
+ for (let i2 = 0; i2 < originalColumnsFrom.length; i2++) {
19743
+ name = name.replace(originalColumnsFrom[i2], columnsFrom[i2]);
19744
+ }
19745
+ for (let i2 = 0; i2 < originalColumnsTo.length; i2++) {
19746
+ name = name.replace(originalColumnsTo[i2], columnsTo[i2]);
19747
+ }
19748
+ }
20230
19749
  return {
20231
19750
  name,
20232
19751
  tableFrom,
@@ -20246,7 +19765,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
20246
19765
  let indexColumns = columns2.map((it) => {
20247
19766
  var _a;
20248
19767
  if ((0, import_drizzle_orm7.is)(it, import_drizzle_orm7.SQL)) {
20249
- const sql = dialect6.sqlToQuery(it, "indexes").sql;
19768
+ const sql = dialect4.sqlToQuery(it, "indexes").sql;
20250
19769
  if (typeof internal.indexes[name] === "undefined") {
20251
19770
  internal.indexes[name] = {
20252
19771
  columns: {
@@ -20266,13 +19785,13 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
20266
19785
  }
20267
19786
  return sql;
20268
19787
  } else {
20269
- return it.name;
19788
+ return getColumnCasing(it, casing2);
20270
19789
  }
20271
19790
  });
20272
19791
  let where = void 0;
20273
19792
  if (value.config.where !== void 0) {
20274
19793
  if ((0, import_drizzle_orm7.is)(value.config.where, import_drizzle_orm7.SQL)) {
20275
- where = dialect6.sqlToQuery(value.config.where).sql;
19794
+ where = dialect4.sqlToQuery(value.config.where).sql;
20276
19795
  }
20277
19796
  }
20278
19797
  indexesObject[name] = {
@@ -20283,7 +19802,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
20283
19802
  };
20284
19803
  });
20285
19804
  uniqueConstraints == null ? void 0 : uniqueConstraints.map((unq) => {
20286
- const columnNames = unq.columns.map((c) => c.name);
19805
+ const columnNames = unq.columns.map((c) => getColumnCasing(c, casing2));
20287
19806
  const name = unq.name ?? (0, import_sqlite_core2.uniqueKeyName)(table4, columnNames);
20288
19807
  const existingUnique = indexesObject[name];
20289
19808
  if (typeof existingUnique !== "undefined") {
@@ -20313,12 +19832,20 @@ The unique constraint ${source_default.underline.blue(
20313
19832
  });
20314
19833
  primaryKeys.forEach((it) => {
20315
19834
  if (it.columns.length > 1) {
20316
- primaryKeysObject[it.getName()] = {
20317
- columns: it.columns.map((it2) => it2.name),
20318
- name: it.getName()
19835
+ const originalColumnNames = it.columns.map((c) => c.name);
19836
+ const columnNames = it.columns.map((c) => getColumnCasing(c, casing2));
19837
+ let name = it.getName();
19838
+ if (casing2 !== void 0) {
19839
+ for (let i2 = 0; i2 < originalColumnNames.length; i2++) {
19840
+ name = name.replace(originalColumnNames[i2], columnNames[i2]);
19841
+ }
19842
+ }
19843
+ primaryKeysObject[name] = {
19844
+ columns: columnNames,
19845
+ name
20319
19846
  };
20320
19847
  } else {
20321
- columnsObject[it.columns[0].name].primaryKey = true;
19848
+ columnsObject[getColumnCasing(it.columns[0], casing2)].primaryKey = true;
20322
19849
  }
20323
19850
  });
20324
19851
  result[tableName] = {
@@ -20563,16 +20090,17 @@ WHERE
20563
20090
  });
20564
20091
 
20565
20092
  // src/serializer/index.ts
20566
- var import_fs3, glob, import_path3, sqlToStr, serializeMySql, serializePg, serializeSQLite, prepareFilenames;
20093
+ var import_casing2, import_fs3, glob, import_path3, sqlToStr, serializeMySql, serializePg, serializeSQLite, prepareFilenames;
20567
20094
  var init_serializer = __esm({
20568
20095
  "src/serializer/index.ts"() {
20569
20096
  "use strict";
20570
20097
  init_source();
20098
+ import_casing2 = require("drizzle-orm/casing");
20571
20099
  import_fs3 = __toESM(require("fs"));
20572
20100
  glob = __toESM(require_glob());
20573
20101
  import_path3 = __toESM(require("path"));
20574
20102
  init_views();
20575
- sqlToStr = (sql) => {
20103
+ sqlToStr = (sql, casing2) => {
20576
20104
  return sql.toQuery({
20577
20105
  escapeName: () => {
20578
20106
  throw new Error("we don't support params for `sql` default values");
@@ -20582,10 +20110,11 @@ var init_serializer = __esm({
20582
20110
  },
20583
20111
  escapeString: () => {
20584
20112
  throw new Error("we don't support params for `sql` default values");
20585
- }
20113
+ },
20114
+ casing: new import_casing2.CasingCache(casing2)
20586
20115
  }).sql;
20587
20116
  };
20588
- serializeMySql = async (path5) => {
20117
+ serializeMySql = async (path5, casing2) => {
20589
20118
  const filenames = prepareFilenames(path5);
20590
20119
  console.log(source_default.gray(`Reading schema files:
20591
20120
  ${filenames.join("\n")}
@@ -20593,23 +20122,23 @@ ${filenames.join("\n")}
20593
20122
  const { prepareFromMySqlImports: prepareFromMySqlImports2 } = await Promise.resolve().then(() => (init_mysqlImports(), mysqlImports_exports));
20594
20123
  const { generateMySqlSnapshot: generateMySqlSnapshot2 } = await Promise.resolve().then(() => (init_mysqlSerializer(), mysqlSerializer_exports));
20595
20124
  const { tables } = await prepareFromMySqlImports2(filenames);
20596
- return generateMySqlSnapshot2(tables);
20125
+ return generateMySqlSnapshot2(tables, casing2);
20597
20126
  };
20598
- serializePg = async (path5, schemaFilter) => {
20127
+ serializePg = async (path5, casing2, schemaFilter) => {
20599
20128
  const filenames = prepareFilenames(path5);
20600
20129
  const { prepareFromPgImports: prepareFromPgImports2 } = await Promise.resolve().then(() => (init_pgImports(), pgImports_exports));
20601
20130
  const { generatePgSnapshot: generatePgSnapshot2 } = await Promise.resolve().then(() => (init_pgSerializer(), pgSerializer_exports));
20602
- const { tables, enums, schemas, sequences, views, matViews } = await prepareFromPgImports2(
20131
+ const { tables, enums, schemas, sequences } = await prepareFromPgImports2(
20603
20132
  filenames
20604
20133
  );
20605
- return generatePgSnapshot2(tables, enums, schemas, sequences, views, matViews, schemaFilter);
20134
+ return generatePgSnapshot2(tables, enums, schemas, sequences, casing2, schemaFilter);
20606
20135
  };
20607
- serializeSQLite = async (path5) => {
20136
+ serializeSQLite = async (path5, casing2) => {
20608
20137
  const filenames = prepareFilenames(path5);
20609
20138
  const { prepareFromSqliteImports: prepareFromSqliteImports2 } = await Promise.resolve().then(() => (init_sqliteImports(), sqliteImports_exports));
20610
20139
  const { generateSqliteSnapshot: generateSqliteSnapshot2 } = await Promise.resolve().then(() => (init_sqliteSerializer(), sqliteSerializer_exports));
20611
20140
  const { tables } = await prepareFromSqliteImports2(filenames);
20612
- return generateSqliteSnapshot2(tables);
20141
+ return generateSqliteSnapshot2(tables, casing2);
20613
20142
  };
20614
20143
  prepareFilenames = (path5) => {
20615
20144
  if (typeof path5 === "string") {
@@ -20658,45 +20187,45 @@ var init_migrationPreparator = __esm({
20658
20187
  init_mysqlSchema();
20659
20188
  init_pgSchema();
20660
20189
  init_sqliteSchema();
20661
- prepareMySqlDbPushSnapshot = async (prev, schemaPath) => {
20662
- const serialized = await serializeMySql(schemaPath);
20190
+ prepareMySqlDbPushSnapshot = async (prev, schemaPath, casing2) => {
20191
+ const serialized = await serializeMySql(schemaPath, casing2);
20663
20192
  const id = (0, import_crypto.randomUUID)();
20664
20193
  const idPrev = prev.id;
20665
- const { version: version3, dialect: dialect7, ...rest } = serialized;
20666
- const result = { version: version3, dialect: dialect7, id, prevId: idPrev, ...rest };
20194
+ const { version: version3, dialect: dialect4, ...rest } = serialized;
20195
+ const result = { version: version3, dialect: dialect4, id, prevId: idPrev, ...rest };
20667
20196
  return { prev, cur: result };
20668
20197
  };
20669
- prepareSQLiteDbPushSnapshot = async (prev, schemaPath) => {
20670
- const serialized = await serializeSQLite(schemaPath);
20198
+ prepareSQLiteDbPushSnapshot = async (prev, schemaPath, casing2) => {
20199
+ const serialized = await serializeSQLite(schemaPath, casing2);
20671
20200
  const id = (0, import_crypto.randomUUID)();
20672
20201
  const idPrev = prev.id;
20673
- const { version: version3, dialect: dialect7, ...rest } = serialized;
20202
+ const { version: version3, dialect: dialect4, ...rest } = serialized;
20674
20203
  const result = {
20675
20204
  version: version3,
20676
- dialect: dialect7,
20205
+ dialect: dialect4,
20677
20206
  id,
20678
20207
  prevId: idPrev,
20679
20208
  ...rest
20680
20209
  };
20681
20210
  return { prev, cur: result };
20682
20211
  };
20683
- preparePgDbPushSnapshot = async (prev, schemaPath, schemaFilter = ["public"]) => {
20684
- const serialized = await serializePg(schemaPath, schemaFilter);
20212
+ preparePgDbPushSnapshot = async (prev, schemaPath, casing2, schemaFilter = ["public"]) => {
20213
+ const serialized = await serializePg(schemaPath, casing2, schemaFilter);
20685
20214
  const id = (0, import_crypto.randomUUID)();
20686
20215
  const idPrev = prev.id;
20687
- const { version: version3, dialect: dialect7, ...rest } = serialized;
20688
- const result = { version: version3, dialect: dialect7, id, prevId: idPrev, ...rest };
20216
+ const { version: version3, dialect: dialect4, ...rest } = serialized;
20217
+ const result = { version: version3, dialect: dialect4, id, prevId: idPrev, ...rest };
20689
20218
  return { prev, cur: result };
20690
20219
  };
20691
- prepareMySqlMigrationSnapshot = async (migrationFolders, schemaPath) => {
20220
+ prepareMySqlMigrationSnapshot = async (migrationFolders, schemaPath, casing2) => {
20692
20221
  const prevSnapshot = mysqlSchema.parse(
20693
20222
  preparePrevSnapshot(migrationFolders, dryMySql)
20694
20223
  );
20695
- const serialized = await serializeMySql(schemaPath);
20224
+ const serialized = await serializeMySql(schemaPath, casing2);
20696
20225
  const id = (0, import_crypto.randomUUID)();
20697
20226
  const idPrev = prevSnapshot.id;
20698
- const { version: version3, dialect: dialect7, ...rest } = serialized;
20699
- const result = { version: version3, dialect: dialect7, id, prevId: idPrev, ...rest };
20227
+ const { version: version3, dialect: dialect4, ...rest } = serialized;
20228
+ const result = { version: version3, dialect: dialect4, id, prevId: idPrev, ...rest };
20700
20229
  const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot;
20701
20230
  const custom2 = {
20702
20231
  id,
@@ -20705,17 +20234,17 @@ var init_migrationPreparator = __esm({
20705
20234
  };
20706
20235
  return { prev: prevSnapshot, cur: result, custom: custom2 };
20707
20236
  };
20708
- prepareSqliteMigrationSnapshot = async (snapshots, schemaPath) => {
20237
+ prepareSqliteMigrationSnapshot = async (snapshots, schemaPath, casing2) => {
20709
20238
  const prevSnapshot = sqliteSchema.parse(
20710
20239
  preparePrevSnapshot(snapshots, drySQLite)
20711
20240
  );
20712
- const serialized = await serializeSQLite(schemaPath);
20241
+ const serialized = await serializeSQLite(schemaPath, casing2);
20713
20242
  const id = (0, import_crypto.randomUUID)();
20714
20243
  const idPrev = prevSnapshot.id;
20715
- const { version: version3, dialect: dialect7, ...rest } = serialized;
20244
+ const { version: version3, dialect: dialect4, ...rest } = serialized;
20716
20245
  const result = {
20717
20246
  version: version3,
20718
- dialect: dialect7,
20247
+ dialect: dialect4,
20719
20248
  id,
20720
20249
  prevId: idPrev,
20721
20250
  ...rest
@@ -20735,9 +20264,9 @@ var init_migrationPreparator = __esm({
20735
20264
  }) => {
20736
20265
  return { id, prevId: idPrev, ...serialized };
20737
20266
  };
20738
- preparePgMigrationSnapshot = async (snapshots, schemaPath) => {
20267
+ preparePgMigrationSnapshot = async (snapshots, schemaPath, casing2) => {
20739
20268
  const prevSnapshot = pgSchema.parse(preparePrevSnapshot(snapshots, dryPg));
20740
- const serialized = await serializePg(schemaPath);
20269
+ const serialized = await serializePg(schemaPath, casing2);
20741
20270
  const id = (0, import_crypto.randomUUID)();
20742
20271
  const idPrev = prevSnapshot.id;
20743
20272
  const result = { id, prevId: idPrev, ...serialized };
@@ -23118,7 +22647,6 @@ function applyJsonDiff(json1, json2) {
23118
22647
  difference.tables = difference.tables || {};
23119
22648
  difference.enums = difference.enums || {};
23120
22649
  difference.sequences = difference.sequences || {};
23121
- difference.views = difference.views || {};
23122
22650
  const schemaKeys = Object.keys(difference.schemas);
23123
22651
  for (let key of schemaKeys) {
23124
22652
  if (key.endsWith("__added") || key.endsWith("__deleted")) {
@@ -23174,71 +22702,6 @@ function applyJsonDiff(json1, json2) {
23174
22702
  const alteredSequences = sequencesEntries.filter((it) => !(it[0].includes("__added") || it[0].includes("__deleted")) && "values" in it[1]).map((it) => {
23175
22703
  return json2.sequences[it[0]];
23176
22704
  });
23177
- const viewsEntries = Object.entries(difference.views);
23178
- const alteredViews = viewsEntries.filter((it) => !(it[0].includes("__added") || it[0].includes("__deleted"))).map(
23179
- ([nameWithSchema, view2]) => {
23180
- const deletedWithOption = view2.with__deleted;
23181
- const addedWithOption = view2.with__added;
23182
- const deletedWith = Object.fromEntries(
23183
- Object.entries(view2.with || {}).filter((it) => it[0].endsWith("__deleted")).map(([key, value]) => {
23184
- return [key.replace("__deleted", ""), value];
23185
- })
23186
- );
23187
- const addedWith = Object.fromEntries(
23188
- Object.entries(view2.with || {}).filter((it) => it[0].endsWith("__added")).map(([key, value]) => {
23189
- return [key.replace("__added", ""), value];
23190
- })
23191
- );
23192
- const alterWith = Object.fromEntries(
23193
- Object.entries(view2.with || {}).filter(
23194
- (it) => typeof it[1].__old !== "undefined" && typeof it[1].__new !== "undefined"
23195
- ).map(
23196
- (it) => {
23197
- return [it[0], it[1].__new];
23198
- }
23199
- )
23200
- );
23201
- const alteredSchema = view2.schema;
23202
- const alteredDefinition = view2.definition;
23203
- const alteredExisting = view2.isExisting;
23204
- const addedTablespace = view2.tablespace__added;
23205
- const droppedTablespace = view2.tablespace__deleted;
23206
- const alterTablespaceTo = view2.tablespace;
23207
- let alteredTablespace;
23208
- if (addedTablespace)
23209
- alteredTablespace = { __new: addedTablespace, __old: "pg_default" };
23210
- if (droppedTablespace)
23211
- alteredTablespace = { __new: "pg_default", __old: droppedTablespace };
23212
- if (alterTablespaceTo)
23213
- alteredTablespace = alterTablespaceTo;
23214
- const addedUsing = view2.using__added;
23215
- const droppedUsing = view2.using__deleted;
23216
- const alterUsingTo = view2.using;
23217
- let alteredUsing;
23218
- if (addedUsing)
23219
- alteredUsing = { __new: addedUsing, __old: "heap" };
23220
- if (droppedUsing)
23221
- alteredUsing = { __new: "heap", __old: droppedUsing };
23222
- if (alterUsingTo)
23223
- alteredUsing = alterUsingTo;
23224
- return {
23225
- name: json2.views[nameWithSchema].name,
23226
- schema: json2.views[nameWithSchema].schema,
23227
- deletedWithOption,
23228
- addedWithOption,
23229
- alteredWith: {
23230
- deletedWith: Object.keys(deletedWith).length ? deletedWith : void 0,
23231
- addedWith: Object.keys(addedWith).length ? addedWith : void 0,
23232
- alterWith: Object.keys(alterWith).length ? alterWith : void 0
23233
- },
23234
- alteredSchema,
23235
- alteredDefinition,
23236
- alteredExisting,
23237
- alteredTablespace,
23238
- alteredUsing
23239
- };
23240
- }
23241
- );
23242
22705
  const alteredTablesWithColumns = Object.values(difference.tables).map(
23243
22706
  (table4) => {
23244
22707
  return findAlternationsInTable(table4);
@@ -23247,8 +22710,7 @@ function applyJsonDiff(json1, json2) {
23247
22710
  return {
23248
22711
  alteredTablesWithColumns,
23249
22712
  alteredEnums,
23250
- alteredSequences,
23251
- alteredViews
22713
+ alteredSequences
23252
22714
  };
23253
22715
  }
23254
22716
  var import_json_diff, mapArraysDiff, findAlternationsInTable, alternationsInColumn;
@@ -23630,10 +23092,10 @@ var init_jsonDiffer = __esm({
23630
23092
  });
23631
23093
 
23632
23094
  // src/sqlgenerator.ts
23633
- function fromJson(statements, dialect7, action, json2) {
23095
+ function fromJson(statements, dialect4, action, json2) {
23634
23096
  const result = statements.flatMap((statement) => {
23635
23097
  const filtered = convertors.filter((it) => {
23636
- return it.can(statement, dialect7);
23098
+ return it.can(statement, dialect4);
23637
23099
  });
23638
23100
  const convertor = filtered.length === 1 ? filtered[0] : void 0;
23639
23101
  if (!convertor) {
@@ -23643,7 +23105,7 @@ function fromJson(statements, dialect7, action, json2) {
23643
23105
  }).filter((it) => it !== "");
23644
23106
  return result;
23645
23107
  }
23646
- var pgNativeTypes, isPgNativeType, Convertor, PgCreateTableConvertor, MySqlCreateTableConvertor, SQLiteCreateTableConvertor, PgCreateViewConvertor, PgDropViewConvertor, PgRenameViewConvertor, PgAlterViewSchemaConvertor, PgAlterViewAddWithOptionConvertor, PgAlterViewDropWithOptionConvertor, PgAlterViewAlterTablespaceConvertor, PgAlterViewAlterUsingConvertor, PgAlterTableAlterColumnSetGenerated, PgAlterTableAlterColumnDropGenerated, PgAlterTableAlterColumnAlterGenerated, PgAlterTableAddUniqueConstraintConvertor, PgAlterTableDropUniqueConstraintConvertor, MySQLAlterTableAddUniqueConstraintConvertor, MySQLAlterTableDropUniqueConstraintConvertor, CreatePgSequenceConvertor, DropPgSequenceConvertor, RenamePgSequenceConvertor, MovePgSequenceConvertor, AlterPgSequenceConvertor, CreateTypeEnumConvertor, AlterTypeAddValueConvertor, PgDropTableConvertor, MySQLDropTableConvertor, SQLiteDropTableConvertor, PgRenameTableConvertor, SqliteRenameTableConvertor, MySqlRenameTableConvertor, PgAlterTableRenameColumnConvertor, MySqlAlterTableRenameColumnConvertor, SQLiteAlterTableRenameColumnConvertor, PgAlterTableDropColumnConvertor, MySqlAlterTableDropColumnConvertor, SQLiteAlterTableDropColumnConvertor, PgAlterTableAddColumnConvertor, MySqlAlterTableAddColumnConvertor, SQLiteAlterTableAddColumnConvertor, PgAlterTableAlterColumnSetTypeConvertor, PgAlterTableAlterColumnSetDefaultConvertor, PgAlterTableAlterColumnDropDefaultConvertor, PgAlterTableAlterColumnDropGeneratedConvertor, PgAlterTableAlterColumnSetExpressionConvertor, PgAlterTableAlterColumnAlterrGeneratedConvertor, SqliteAlterTableAlterColumnDropGeneratedConvertor, SqliteAlterTableAlterColumnSetExpressionConvertor, SqliteAlterTableAlterColumnAlterGeneratedConvertor, MySqlAlterTableAlterColumnAlterrGeneratedConvertor, MySqlAlterTableAddPk, MySqlAlterTableDropPk, LibSQLModifyColumn, MySqlModifyColumn, PgAlterTableCreateCompositePrimaryKeyConvertor, PgAlterTableDeleteCompositePrimaryKeyConvertor, PgAlterTableAlterCompositePrimaryKeyConvertor, MySqlAlterTableCreateCompositePrimaryKeyConvertor, MySqlAlterTableDeleteCompositePrimaryKeyConvertor, MySqlAlterTableAlterCompositePrimaryKeyConvertor, PgAlterTableAlterColumnSetPrimaryKeyConvertor, PgAlterTableAlterColumnDropPrimaryKeyConvertor, PgAlterTableAlterColumnSetNotNullConvertor, PgAlterTableAlterColumnDropNotNullConvertor, PgCreateForeignKeyConvertor, LibSQLCreateForeignKeyConvertor, MySqlCreateForeignKeyConvertor, PgAlterForeignKeyConvertor, PgDeleteForeignKeyConvertor, MySqlDeleteForeignKeyConvertor, CreatePgIndexConvertor, CreateMySqlIndexConvertor, CreateSqliteIndexConvertor, PgDropIndexConvertor, PgCreateSchemaConvertor, PgRenameSchemaConvertor, PgDropSchemaConvertor, PgAlterTableSetSchemaConvertor, PgAlterTableSetNewSchemaConvertor, PgAlterTableRemoveFromSchemaConvertor, SqliteDropIndexConvertor, MySqlDropIndexConvertor, SQLiteRecreateTableConvertor, LibSQLRecreateTableConvertor, convertors;
23108
+ var pgNativeTypes, isPgNativeType, Convertor, PgCreateTableConvertor, MySqlCreateTableConvertor, SQLiteCreateTableConvertor, PgAlterTableAlterColumnSetGenerated, PgAlterTableAlterColumnDropGenerated, PgAlterTableAlterColumnAlterGenerated, PgAlterTableAddUniqueConstraintConvertor, PgAlterTableDropUniqueConstraintConvertor, MySQLAlterTableAddUniqueConstraintConvertor, MySQLAlterTableDropUniqueConstraintConvertor, CreatePgSequenceConvertor, DropPgSequenceConvertor, RenamePgSequenceConvertor, MovePgSequenceConvertor, AlterPgSequenceConvertor, CreateTypeEnumConvertor, AlterTypeAddValueConvertor, PgDropTableConvertor, MySQLDropTableConvertor, SQLiteDropTableConvertor, PgRenameTableConvertor, SqliteRenameTableConvertor, MySqlRenameTableConvertor, PgAlterTableRenameColumnConvertor, MySqlAlterTableRenameColumnConvertor, SQLiteAlterTableRenameColumnConvertor, PgAlterTableDropColumnConvertor, MySqlAlterTableDropColumnConvertor, SQLiteAlterTableDropColumnConvertor, PgAlterTableAddColumnConvertor, MySqlAlterTableAddColumnConvertor, SQLiteAlterTableAddColumnConvertor, PgAlterTableAlterColumnSetTypeConvertor, PgAlterTableAlterColumnSetDefaultConvertor, PgAlterTableAlterColumnDropDefaultConvertor, PgAlterTableAlterColumnDropGeneratedConvertor, PgAlterTableAlterColumnSetExpressionConvertor, PgAlterTableAlterColumnAlterrGeneratedConvertor, SqliteAlterTableAlterColumnDropGeneratedConvertor, SqliteAlterTableAlterColumnSetExpressionConvertor, SqliteAlterTableAlterColumnAlterGeneratedConvertor, MySqlAlterTableAlterColumnAlterrGeneratedConvertor, MySqlAlterTableAddPk, MySqlAlterTableDropPk, LibSQLModifyColumn, MySqlModifyColumn, PgAlterTableCreateCompositePrimaryKeyConvertor, PgAlterTableDeleteCompositePrimaryKeyConvertor, PgAlterTableAlterCompositePrimaryKeyConvertor, MySqlAlterTableCreateCompositePrimaryKeyConvertor, MySqlAlterTableDeleteCompositePrimaryKeyConvertor, MySqlAlterTableAlterCompositePrimaryKeyConvertor, PgAlterTableAlterColumnSetPrimaryKeyConvertor, PgAlterTableAlterColumnDropPrimaryKeyConvertor, PgAlterTableAlterColumnSetNotNullConvertor, PgAlterTableAlterColumnDropNotNullConvertor, PgCreateForeignKeyConvertor, LibSQLCreateForeignKeyConvertor, MySqlCreateForeignKeyConvertor, PgAlterForeignKeyConvertor, PgDeleteForeignKeyConvertor, MySqlDeleteForeignKeyConvertor, CreatePgIndexConvertor, CreateMySqlIndexConvertor, CreateSqliteIndexConvertor, PgDropIndexConvertor, PgCreateSchemaConvertor, PgRenameSchemaConvertor, PgDropSchemaConvertor, PgAlterTableSetSchemaConvertor, PgAlterTableSetNewSchemaConvertor, PgAlterTableRemoveFromSchemaConvertor, SqliteDropIndexConvertor, MySqlDropIndexConvertor, SQLiteRecreateTableConvertor, LibSQLRecreateTableConvertor, convertors;
23647
23109
  var init_sqlgenerator = __esm({
23648
23110
  "src/sqlgenerator.ts"() {
23649
23111
  "use strict";
@@ -23701,8 +23163,8 @@ var init_sqlgenerator = __esm({
23701
23163
  Convertor = class {
23702
23164
  };
23703
23165
  PgCreateTableConvertor = class extends Convertor {
23704
- can(statement, dialect7) {
23705
- return statement.type === "create_table" && dialect7 === "postgresql";
23166
+ can(statement, dialect4) {
23167
+ return statement.type === "create_table" && dialect4 === "postgresql";
23706
23168
  }
23707
23169
  convert(st) {
23708
23170
  const { tableName, schema: schema5, columns, compositePKs, uniqueConstraints } = st;
@@ -23746,8 +23208,8 @@ var init_sqlgenerator = __esm({
23746
23208
  }
23747
23209
  };
23748
23210
  MySqlCreateTableConvertor = class extends Convertor {
23749
- can(statement, dialect7) {
23750
- return statement.type === "create_table" && dialect7 === "mysql";
23211
+ can(statement, dialect4) {
23212
+ return statement.type === "create_table" && dialect4 === "mysql";
23751
23213
  }
23752
23214
  convert(st) {
23753
23215
  var _a, _b;
@@ -23797,8 +23259,8 @@ var init_sqlgenerator = __esm({
23797
23259
  }
23798
23260
  };
23799
23261
  SQLiteCreateTableConvertor = class extends Convertor {
23800
- can(statement, dialect7) {
23801
- return statement.type === "sqlite_create_table" && (dialect7 === "sqlite" || dialect7 === "turso");
23262
+ can(statement, dialect4) {
23263
+ return statement.type === "sqlite_create_table" && (dialect4 === "sqlite" || dialect4 === "turso");
23802
23264
  }
23803
23265
  convert(st) {
23804
23266
  const {
@@ -23859,121 +23321,9 @@ var init_sqlgenerator = __esm({
23859
23321
  return statement;
23860
23322
  }
23861
23323
  };
23862
- PgCreateViewConvertor = class extends Convertor {
23863
- can(statement, dialect7) {
23864
- return statement.type === "create_view" && dialect7 === "postgresql";
23865
- }
23866
- convert(st) {
23867
- const { definition, name: viewName, schema: schema5, with: withOption, materialized, withNoData, tablespace, using } = st;
23868
- const name = schema5 ? `"${schema5}"."${viewName}"` : `"${viewName}"`;
23869
- let statement = materialized ? `CREATE MATERIALIZED VIEW ${name}` : `CREATE VIEW ${name}`;
23870
- if (using)
23871
- statement += ` USING "${using}"`;
23872
- const options = [];
23873
- if (withOption) {
23874
- statement += ` WITH (`;
23875
- Object.entries(withOption).forEach(([key, value]) => {
23876
- if (typeof value === "undefined")
23877
- return;
23878
- options.push(`${key.snake_case()} = ${value}`);
23879
- });
23880
- statement += options.join(", ");
23881
- statement += `)`;
23882
- }
23883
- if (tablespace)
23884
- statement += ` TABLESPACE ${tablespace}`;
23885
- statement += ` AS (${definition})`;
23886
- if (withNoData)
23887
- statement += ` WITH NO DATA`;
23888
- statement += `;`;
23889
- return statement;
23890
- }
23891
- };
23892
- PgDropViewConvertor = class extends Convertor {
23893
- can(statement, dialect7) {
23894
- return statement.type === "drop_view" && dialect7 === "postgresql";
23895
- }
23896
- convert(st) {
23897
- const { name: viewName, schema: schema5, materialized } = st;
23898
- const name = schema5 ? `"${schema5}"."${viewName}"` : `"${viewName}"`;
23899
- return `DROP${materialized ? " MATERIALIZED" : ""} VIEW ${name};`;
23900
- }
23901
- };
23902
- PgRenameViewConvertor = class extends Convertor {
23903
- can(statement, dialect7) {
23904
- return statement.type === "rename_view" && dialect7 === "postgresql";
23905
- }
23906
- convert(st) {
23907
- const { nameFrom: from, nameTo: to, schema: schema5, materialized } = st;
23908
- const nameFrom = `"${schema5}"."${from}"`;
23909
- return `ALTER${materialized ? " MATERIALIZED" : ""} VIEW ${nameFrom} RENAME TO "${to}";`;
23910
- }
23911
- };
23912
- PgAlterViewSchemaConvertor = class extends Convertor {
23913
- can(statement, dialect7) {
23914
- return statement.type === "alter_view_alter_schema" && dialect7 === "postgresql";
23915
- }
23916
- convert(st) {
23917
- const { fromSchema, toSchema, name, materialized } = st;
23918
- const statement = `ALTER${materialized ? " MATERIALIZED" : ""} VIEW "${fromSchema}"."${name}" SET SCHEMA "${toSchema}";`;
23919
- return statement;
23920
- }
23921
- };
23922
- PgAlterViewAddWithOptionConvertor = class extends Convertor {
23923
- can(statement, dialect7) {
23924
- return statement.type === "alter_view_add_with_option" && dialect7 === "postgresql";
23925
- }
23926
- convert(st) {
23927
- const { schema: schema5, with: withOption, name, materialized } = st;
23928
- let statement = `ALTER${materialized ? " MATERIALIZED" : ""} VIEW "${schema5}"."${name}" SET (`;
23929
- const options = [];
23930
- Object.entries(withOption).forEach(([key, value]) => {
23931
- options.push(`${key.snake_case()} = ${value}`);
23932
- });
23933
- statement += options.join(", ");
23934
- statement += `);`;
23935
- return statement;
23936
- }
23937
- };
23938
- PgAlterViewDropWithOptionConvertor = class extends Convertor {
23939
- can(statement, dialect7) {
23940
- return statement.type === "alter_view_drop_with_option" && dialect7 === "postgresql";
23941
- }
23942
- convert(st) {
23943
- const { schema: schema5, name, materialized, with: withOptions } = st;
23944
- let statement = `ALTER${materialized ? " MATERIALIZED" : ""} VIEW "${schema5}"."${name}" RESET (`;
23945
- const options = [];
23946
- Object.entries(withOptions).forEach(([key, value]) => {
23947
- options.push(`${key.snake_case()}`);
23948
- });
23949
- statement += options.join(", ");
23950
- statement += ");";
23951
- return statement;
23952
- }
23953
- };
23954
- PgAlterViewAlterTablespaceConvertor = class extends Convertor {
23955
- can(statement, dialect7) {
23956
- return statement.type === "alter_view_alter_tablespace" && dialect7 === "postgresql";
23957
- }
23958
- convert(st) {
23959
- const { schema: schema5, name, toTablespace } = st;
23960
- const statement = `ALTER MATERIALIZED VIEW "${schema5}"."${name}" SET TABLESPACE ${toTablespace};`;
23961
- return statement;
23962
- }
23963
- };
23964
- PgAlterViewAlterUsingConvertor = class extends Convertor {
23965
- can(statement, dialect7) {
23966
- return statement.type === "alter_view_alter_using" && dialect7 === "postgresql";
23967
- }
23968
- convert(st) {
23969
- const { schema: schema5, name, toUsing } = st;
23970
- const statement = `ALTER MATERIALIZED VIEW "${schema5}"."${name}" SET ACCESS METHOD "${toUsing}";`;
23971
- return statement;
23972
- }
23973
- };
23974
23324
  PgAlterTableAlterColumnSetGenerated = class extends Convertor {
23975
- can(statement, dialect7) {
23976
- return statement.type === "alter_table_alter_column_set_identity" && dialect7 === "postgresql";
23325
+ can(statement, dialect4) {
23326
+ return statement.type === "alter_table_alter_column_set_identity" && dialect4 === "postgresql";
23977
23327
  }
23978
23328
  convert(statement) {
23979
23329
  const { identity, tableName, columnName, schema: schema5 } = statement;
@@ -23985,8 +23335,8 @@ var init_sqlgenerator = __esm({
23985
23335
  }
23986
23336
  };
23987
23337
  PgAlterTableAlterColumnDropGenerated = class extends Convertor {
23988
- can(statement, dialect7) {
23989
- return statement.type === "alter_table_alter_column_drop_identity" && dialect7 === "postgresql";
23338
+ can(statement, dialect4) {
23339
+ return statement.type === "alter_table_alter_column_drop_identity" && dialect4 === "postgresql";
23990
23340
  }
23991
23341
  convert(statement) {
23992
23342
  const { tableName, columnName, schema: schema5 } = statement;
@@ -23995,8 +23345,8 @@ var init_sqlgenerator = __esm({
23995
23345
  }
23996
23346
  };
23997
23347
  PgAlterTableAlterColumnAlterGenerated = class extends Convertor {
23998
- can(statement, dialect7) {
23999
- return statement.type === "alter_table_alter_column_change_identity" && dialect7 === "postgresql";
23348
+ can(statement, dialect4) {
23349
+ return statement.type === "alter_table_alter_column_change_identity" && dialect4 === "postgresql";
24000
23350
  }
24001
23351
  convert(statement) {
24002
23352
  const { identity, oldIdentity, tableName, columnName, schema: schema5 } = statement;
@@ -24043,8 +23393,8 @@ var init_sqlgenerator = __esm({
24043
23393
  }
24044
23394
  };
24045
23395
  PgAlterTableAddUniqueConstraintConvertor = class extends Convertor {
24046
- can(statement, dialect7) {
24047
- return statement.type === "create_unique_constraint" && dialect7 === "postgresql";
23396
+ can(statement, dialect4) {
23397
+ return statement.type === "create_unique_constraint" && dialect4 === "postgresql";
24048
23398
  }
24049
23399
  convert(statement) {
24050
23400
  const unsquashed = PgSquasher.unsquashUnique(statement.data);
@@ -24053,8 +23403,8 @@ var init_sqlgenerator = __esm({
24053
23403
  }
24054
23404
  };
24055
23405
  PgAlterTableDropUniqueConstraintConvertor = class extends Convertor {
24056
- can(statement, dialect7) {
24057
- return statement.type === "delete_unique_constraint" && dialect7 === "postgresql";
23406
+ can(statement, dialect4) {
23407
+ return statement.type === "delete_unique_constraint" && dialect4 === "postgresql";
24058
23408
  }
24059
23409
  convert(statement) {
24060
23410
  const unsquashed = PgSquasher.unsquashUnique(statement.data);
@@ -24063,8 +23413,8 @@ var init_sqlgenerator = __esm({
24063
23413
  }
24064
23414
  };
24065
23415
  MySQLAlterTableAddUniqueConstraintConvertor = class extends Convertor {
24066
- can(statement, dialect7) {
24067
- return statement.type === "create_unique_constraint" && dialect7 === "mysql";
23416
+ can(statement, dialect4) {
23417
+ return statement.type === "create_unique_constraint" && dialect4 === "mysql";
24068
23418
  }
24069
23419
  convert(statement) {
24070
23420
  const unsquashed = MySqlSquasher.unsquashUnique(statement.data);
@@ -24072,8 +23422,8 @@ var init_sqlgenerator = __esm({
24072
23422
  }
24073
23423
  };
24074
23424
  MySQLAlterTableDropUniqueConstraintConvertor = class extends Convertor {
24075
- can(statement, dialect7) {
24076
- return statement.type === "delete_unique_constraint" && dialect7 === "mysql";
23425
+ can(statement, dialect4) {
23426
+ return statement.type === "delete_unique_constraint" && dialect4 === "mysql";
24077
23427
  }
24078
23428
  convert(statement) {
24079
23429
  const unsquashed = MySqlSquasher.unsquashUnique(statement.data);
@@ -24081,8 +23431,8 @@ var init_sqlgenerator = __esm({
24081
23431
  }
24082
23432
  };
24083
23433
  CreatePgSequenceConvertor = class extends Convertor {
24084
- can(statement, dialect7) {
24085
- return statement.type === "create_sequence" && dialect7 === "postgresql";
23434
+ can(statement, dialect4) {
23435
+ return statement.type === "create_sequence" && dialect4 === "postgresql";
24086
23436
  }
24087
23437
  convert(st) {
24088
23438
  const { name, values, schema: schema5 } = st;
@@ -24091,8 +23441,8 @@ var init_sqlgenerator = __esm({
24091
23441
  }
24092
23442
  };
24093
23443
  DropPgSequenceConvertor = class extends Convertor {
24094
- can(statement, dialect7) {
24095
- return statement.type === "drop_sequence" && dialect7 === "postgresql";
23444
+ can(statement, dialect4) {
23445
+ return statement.type === "drop_sequence" && dialect4 === "postgresql";
24096
23446
  }
24097
23447
  convert(st) {
24098
23448
  const { name, schema: schema5 } = st;
@@ -24101,8 +23451,8 @@ var init_sqlgenerator = __esm({
24101
23451
  }
24102
23452
  };
24103
23453
  RenamePgSequenceConvertor = class extends Convertor {
24104
- can(statement, dialect7) {
24105
- return statement.type === "rename_sequence" && dialect7 === "postgresql";
23454
+ can(statement, dialect4) {
23455
+ return statement.type === "rename_sequence" && dialect4 === "postgresql";
24106
23456
  }
24107
23457
  convert(st) {
24108
23458
  const { nameFrom, nameTo, schema: schema5 } = st;
@@ -24112,8 +23462,8 @@ var init_sqlgenerator = __esm({
24112
23462
  }
24113
23463
  };
24114
23464
  MovePgSequenceConvertor = class extends Convertor {
24115
- can(statement, dialect7) {
24116
- return statement.type === "move_sequence" && dialect7 === "postgresql";
23465
+ can(statement, dialect4) {
23466
+ return statement.type === "move_sequence" && dialect4 === "postgresql";
24117
23467
  }
24118
23468
  convert(st) {
24119
23469
  const { schemaFrom, schemaTo, name } = st;
@@ -24123,8 +23473,8 @@ var init_sqlgenerator = __esm({
24123
23473
  }
24124
23474
  };
24125
23475
  AlterPgSequenceConvertor = class extends Convertor {
24126
- can(statement, dialect7) {
24127
- return statement.type === "alter_sequence" && dialect7 === "postgresql";
23476
+ can(statement, dialect4) {
23477
+ return statement.type === "alter_sequence" && dialect4 === "postgresql";
24128
23478
  }
24129
23479
  convert(st) {
24130
23480
  const { name, schema: schema5, values } = st;
@@ -24167,8 +23517,8 @@ var init_sqlgenerator = __esm({
24167
23517
  }
24168
23518
  };
24169
23519
  PgDropTableConvertor = class extends Convertor {
24170
- can(statement, dialect7) {
24171
- return statement.type === "drop_table" && dialect7 === "postgresql";
23520
+ can(statement, dialect4) {
23521
+ return statement.type === "drop_table" && dialect4 === "postgresql";
24172
23522
  }
24173
23523
  convert(statement) {
24174
23524
  const { tableName, schema: schema5 } = statement;
@@ -24177,8 +23527,8 @@ var init_sqlgenerator = __esm({
24177
23527
  }
24178
23528
  };
24179
23529
  MySQLDropTableConvertor = class extends Convertor {
24180
- can(statement, dialect7) {
24181
- return statement.type === "drop_table" && dialect7 === "mysql";
23530
+ can(statement, dialect4) {
23531
+ return statement.type === "drop_table" && dialect4 === "mysql";
24182
23532
  }
24183
23533
  convert(statement) {
24184
23534
  const { tableName } = statement;
@@ -24186,8 +23536,8 @@ var init_sqlgenerator = __esm({
24186
23536
  }
24187
23537
  };
24188
23538
  SQLiteDropTableConvertor = class extends Convertor {
24189
- can(statement, dialect7) {
24190
- return statement.type === "drop_table" && (dialect7 === "sqlite" || dialect7 === "turso");
23539
+ can(statement, dialect4) {
23540
+ return statement.type === "drop_table" && (dialect4 === "sqlite" || dialect4 === "turso");
24191
23541
  }
24192
23542
  convert(statement) {
24193
23543
  const { tableName } = statement;
@@ -24195,8 +23545,8 @@ var init_sqlgenerator = __esm({
24195
23545
  }
24196
23546
  };
24197
23547
  PgRenameTableConvertor = class extends Convertor {
24198
- can(statement, dialect7) {
24199
- return statement.type === "rename_table" && dialect7 === "postgresql";
23548
+ can(statement, dialect4) {
23549
+ return statement.type === "rename_table" && dialect4 === "postgresql";
24200
23550
  }
24201
23551
  convert(statement) {
24202
23552
  const { tableNameFrom, tableNameTo, toSchema, fromSchema } = statement;
@@ -24206,8 +23556,8 @@ var init_sqlgenerator = __esm({
24206
23556
  }
24207
23557
  };
24208
23558
  SqliteRenameTableConvertor = class extends Convertor {
24209
- can(statement, dialect7) {
24210
- return statement.type === "rename_table" && (dialect7 === "sqlite" || dialect7 === "turso");
23559
+ can(statement, dialect4) {
23560
+ return statement.type === "rename_table" && (dialect4 === "sqlite" || dialect4 === "turso");
24211
23561
  }
24212
23562
  convert(statement) {
24213
23563
  const { tableNameFrom, tableNameTo } = statement;
@@ -24215,8 +23565,8 @@ var init_sqlgenerator = __esm({
24215
23565
  }
24216
23566
  };
24217
23567
  MySqlRenameTableConvertor = class extends Convertor {
24218
- can(statement, dialect7) {
24219
- return statement.type === "rename_table" && dialect7 === "mysql";
23568
+ can(statement, dialect4) {
23569
+ return statement.type === "rename_table" && dialect4 === "mysql";
24220
23570
  }
24221
23571
  convert(statement) {
24222
23572
  const { tableNameFrom, tableNameTo } = statement;
@@ -24224,8 +23574,8 @@ var init_sqlgenerator = __esm({
24224
23574
  }
24225
23575
  };
24226
23576
  PgAlterTableRenameColumnConvertor = class extends Convertor {
24227
- can(statement, dialect7) {
24228
- return statement.type === "alter_table_rename_column" && dialect7 === "postgresql";
23577
+ can(statement, dialect4) {
23578
+ return statement.type === "alter_table_rename_column" && dialect4 === "postgresql";
24229
23579
  }
24230
23580
  convert(statement) {
24231
23581
  const { tableName, oldColumnName, newColumnName, schema: schema5 } = statement;
@@ -24234,8 +23584,8 @@ var init_sqlgenerator = __esm({
24234
23584
  }
24235
23585
  };
24236
23586
  MySqlAlterTableRenameColumnConvertor = class extends Convertor {
24237
- can(statement, dialect7) {
24238
- return statement.type === "alter_table_rename_column" && dialect7 === "mysql";
23587
+ can(statement, dialect4) {
23588
+ return statement.type === "alter_table_rename_column" && dialect4 === "mysql";
24239
23589
  }
24240
23590
  convert(statement) {
24241
23591
  const { tableName, oldColumnName, newColumnName } = statement;
@@ -24243,8 +23593,8 @@ var init_sqlgenerator = __esm({
24243
23593
  }
24244
23594
  };
24245
23595
  SQLiteAlterTableRenameColumnConvertor = class extends Convertor {
24246
- can(statement, dialect7) {
24247
- return statement.type === "alter_table_rename_column" && (dialect7 === "sqlite" || dialect7 === "turso");
23596
+ can(statement, dialect4) {
23597
+ return statement.type === "alter_table_rename_column" && (dialect4 === "sqlite" || dialect4 === "turso");
24248
23598
  }
24249
23599
  convert(statement) {
24250
23600
  const { tableName, oldColumnName, newColumnName } = statement;
@@ -24252,8 +23602,8 @@ var init_sqlgenerator = __esm({
24252
23602
  }
24253
23603
  };
24254
23604
  PgAlterTableDropColumnConvertor = class extends Convertor {
24255
- can(statement, dialect7) {
24256
- return statement.type === "alter_table_drop_column" && dialect7 === "postgresql";
23605
+ can(statement, dialect4) {
23606
+ return statement.type === "alter_table_drop_column" && dialect4 === "postgresql";
24257
23607
  }
24258
23608
  convert(statement) {
24259
23609
  const { tableName, columnName, schema: schema5 } = statement;
@@ -24262,8 +23612,8 @@ var init_sqlgenerator = __esm({
24262
23612
  }
24263
23613
  };
24264
23614
  MySqlAlterTableDropColumnConvertor = class extends Convertor {
24265
- can(statement, dialect7) {
24266
- return statement.type === "alter_table_drop_column" && dialect7 === "mysql";
23615
+ can(statement, dialect4) {
23616
+ return statement.type === "alter_table_drop_column" && dialect4 === "mysql";
24267
23617
  }
24268
23618
  convert(statement) {
24269
23619
  const { tableName, columnName } = statement;
@@ -24271,8 +23621,8 @@ var init_sqlgenerator = __esm({
24271
23621
  }
24272
23622
  };
24273
23623
  SQLiteAlterTableDropColumnConvertor = class extends Convertor {
24274
- can(statement, dialect7) {
24275
- return statement.type === "alter_table_drop_column" && (dialect7 === "sqlite" || dialect7 === "turso");
23624
+ can(statement, dialect4) {
23625
+ return statement.type === "alter_table_drop_column" && (dialect4 === "sqlite" || dialect4 === "turso");
24276
23626
  }
24277
23627
  convert(statement) {
24278
23628
  const { tableName, columnName } = statement;
@@ -24280,8 +23630,8 @@ var init_sqlgenerator = __esm({
24280
23630
  }
24281
23631
  };
24282
23632
  PgAlterTableAddColumnConvertor = class extends Convertor {
24283
- can(statement, dialect7) {
24284
- return statement.type === "alter_table_add_column" && dialect7 === "postgresql";
23633
+ can(statement, dialect4) {
23634
+ return statement.type === "alter_table_add_column" && dialect4 === "postgresql";
24285
23635
  }
24286
23636
  convert(statement) {
24287
23637
  const { tableName, column: column7, schema: schema5 } = statement;
@@ -24300,8 +23650,8 @@ var init_sqlgenerator = __esm({
24300
23650
  }
24301
23651
  };
24302
23652
  MySqlAlterTableAddColumnConvertor = class extends Convertor {
24303
- can(statement, dialect7) {
24304
- return statement.type === "alter_table_add_column" && dialect7 === "mysql";
23653
+ can(statement, dialect4) {
23654
+ return statement.type === "alter_table_add_column" && dialect4 === "mysql";
24305
23655
  }
24306
23656
  convert(statement) {
24307
23657
  const { tableName, column: column7 } = statement;
@@ -24324,8 +23674,8 @@ var init_sqlgenerator = __esm({
24324
23674
  }
24325
23675
  };
24326
23676
  SQLiteAlterTableAddColumnConvertor = class extends Convertor {
24327
- can(statement, dialect7) {
24328
- return statement.type === "sqlite_alter_table_add_column" && (dialect7 === "sqlite" || dialect7 === "turso");
23677
+ can(statement, dialect4) {
23678
+ return statement.type === "sqlite_alter_table_add_column" && (dialect4 === "sqlite" || dialect4 === "turso");
24329
23679
  }
24330
23680
  convert(statement) {
24331
23681
  const { tableName, column: column7, referenceData } = statement;
@@ -24340,8 +23690,8 @@ var init_sqlgenerator = __esm({
24340
23690
  }
24341
23691
  };
24342
23692
  PgAlterTableAlterColumnSetTypeConvertor = class extends Convertor {
24343
- can(statement, dialect7) {
24344
- return statement.type === "alter_table_alter_column_set_type" && dialect7 === "postgresql";
23693
+ can(statement, dialect4) {
23694
+ return statement.type === "alter_table_alter_column_set_type" && dialect4 === "postgresql";
24345
23695
  }
24346
23696
  convert(statement) {
24347
23697
  const { tableName, columnName, newDataType, schema: schema5 } = statement;
@@ -24350,8 +23700,8 @@ var init_sqlgenerator = __esm({
24350
23700
  }
24351
23701
  };
24352
23702
  PgAlterTableAlterColumnSetDefaultConvertor = class extends Convertor {
24353
- can(statement, dialect7) {
24354
- return statement.type === "alter_table_alter_column_set_default" && dialect7 === "postgresql";
23703
+ can(statement, dialect4) {
23704
+ return statement.type === "alter_table_alter_column_set_default" && dialect4 === "postgresql";
24355
23705
  }
24356
23706
  convert(statement) {
24357
23707
  const { tableName, columnName, schema: schema5 } = statement;
@@ -24360,8 +23710,8 @@ var init_sqlgenerator = __esm({
24360
23710
  }
24361
23711
  };
24362
23712
  PgAlterTableAlterColumnDropDefaultConvertor = class extends Convertor {
24363
- can(statement, dialect7) {
24364
- return statement.type === "alter_table_alter_column_drop_default" && dialect7 === "postgresql";
23713
+ can(statement, dialect4) {
23714
+ return statement.type === "alter_table_alter_column_drop_default" && dialect4 === "postgresql";
24365
23715
  }
24366
23716
  convert(statement) {
24367
23717
  const { tableName, columnName, schema: schema5 } = statement;
@@ -24370,8 +23720,8 @@ var init_sqlgenerator = __esm({
24370
23720
  }
24371
23721
  };
24372
23722
  PgAlterTableAlterColumnDropGeneratedConvertor = class extends Convertor {
24373
- can(statement, dialect7) {
24374
- return statement.type === "alter_table_alter_column_drop_generated" && dialect7 === "postgresql";
23723
+ can(statement, dialect4) {
23724
+ return statement.type === "alter_table_alter_column_drop_generated" && dialect4 === "postgresql";
24375
23725
  }
24376
23726
  convert(statement) {
24377
23727
  const { tableName, columnName, schema: schema5 } = statement;
@@ -24380,8 +23730,8 @@ var init_sqlgenerator = __esm({
24380
23730
  }
24381
23731
  };
24382
23732
  PgAlterTableAlterColumnSetExpressionConvertor = class extends Convertor {
24383
- can(statement, dialect7) {
24384
- return statement.type === "alter_table_alter_column_set_generated" && dialect7 === "postgresql";
23733
+ can(statement, dialect4) {
23734
+ return statement.type === "alter_table_alter_column_set_generated" && dialect4 === "postgresql";
24385
23735
  }
24386
23736
  convert(statement) {
24387
23737
  const {
@@ -24418,8 +23768,8 @@ var init_sqlgenerator = __esm({
24418
23768
  }
24419
23769
  };
24420
23770
  PgAlterTableAlterColumnAlterrGeneratedConvertor = class extends Convertor {
24421
- can(statement, dialect7) {
24422
- return statement.type === "alter_table_alter_column_alter_generated" && dialect7 === "postgresql";
23771
+ can(statement, dialect4) {
23772
+ return statement.type === "alter_table_alter_column_alter_generated" && dialect4 === "postgresql";
24423
23773
  }
24424
23774
  convert(statement) {
24425
23775
  const {
@@ -24456,8 +23806,8 @@ var init_sqlgenerator = __esm({
24456
23806
  }
24457
23807
  };
24458
23808
  SqliteAlterTableAlterColumnDropGeneratedConvertor = class extends Convertor {
24459
- can(statement, dialect7) {
24460
- return statement.type === "alter_table_alter_column_drop_generated" && (dialect7 === "sqlite" || dialect7 === "turso");
23809
+ can(statement, dialect4) {
23810
+ return statement.type === "alter_table_alter_column_drop_generated" && (dialect4 === "sqlite" || dialect4 === "turso");
24461
23811
  }
24462
23812
  convert(statement) {
24463
23813
  const {
@@ -24497,8 +23847,8 @@ var init_sqlgenerator = __esm({
24497
23847
  }
24498
23848
  };
24499
23849
  SqliteAlterTableAlterColumnSetExpressionConvertor = class extends Convertor {
24500
- can(statement, dialect7) {
24501
- return statement.type === "alter_table_alter_column_set_generated" && (dialect7 === "sqlite" || dialect7 === "turso");
23850
+ can(statement, dialect4) {
23851
+ return statement.type === "alter_table_alter_column_set_generated" && (dialect4 === "sqlite" || dialect4 === "turso");
24502
23852
  }
24503
23853
  convert(statement) {
24504
23854
  const {
@@ -24538,8 +23888,8 @@ var init_sqlgenerator = __esm({
24538
23888
  }
24539
23889
  };
24540
23890
  SqliteAlterTableAlterColumnAlterGeneratedConvertor = class extends Convertor {
24541
- can(statement, dialect7) {
24542
- return statement.type === "alter_table_alter_column_alter_generated" && (dialect7 === "sqlite" || dialect7 === "turso");
23891
+ can(statement, dialect4) {
23892
+ return statement.type === "alter_table_alter_column_alter_generated" && (dialect4 === "sqlite" || dialect4 === "turso");
24543
23893
  }
24544
23894
  convert(statement) {
24545
23895
  const {
@@ -24579,8 +23929,8 @@ var init_sqlgenerator = __esm({
24579
23929
  }
24580
23930
  };
24581
23931
  MySqlAlterTableAlterColumnAlterrGeneratedConvertor = class extends Convertor {
24582
- can(statement, dialect7) {
24583
- return statement.type === "alter_table_alter_column_alter_generated" && dialect7 === "mysql";
23932
+ can(statement, dialect4) {
23933
+ return statement.type === "alter_table_alter_column_alter_generated" && dialect4 === "mysql";
24584
23934
  }
24585
23935
  convert(statement) {
24586
23936
  const {
@@ -24617,24 +23967,24 @@ var init_sqlgenerator = __esm({
24617
23967
  }
24618
23968
  };
24619
23969
  MySqlAlterTableAddPk = class extends Convertor {
24620
- can(statement, dialect7) {
24621
- return statement.type === "alter_table_alter_column_set_pk" && dialect7 === "mysql";
23970
+ can(statement, dialect4) {
23971
+ return statement.type === "alter_table_alter_column_set_pk" && dialect4 === "mysql";
24622
23972
  }
24623
23973
  convert(statement) {
24624
23974
  return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY (\`${statement.columnName}\`);`;
24625
23975
  }
24626
23976
  };
24627
23977
  MySqlAlterTableDropPk = class extends Convertor {
24628
- can(statement, dialect7) {
24629
- return statement.type === "alter_table_alter_column_drop_pk" && dialect7 === "mysql";
23978
+ can(statement, dialect4) {
23979
+ return statement.type === "alter_table_alter_column_drop_pk" && dialect4 === "mysql";
24630
23980
  }
24631
23981
  convert(statement) {
24632
23982
  return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY`;
24633
23983
  }
24634
23984
  };
24635
23985
  LibSQLModifyColumn = class extends Convertor {
24636
- can(statement, dialect7) {
24637
- return (statement.type === "alter_table_alter_column_set_type" || statement.type === "alter_table_alter_column_drop_notnull" || statement.type === "alter_table_alter_column_set_notnull" || statement.type === "alter_table_alter_column_set_default" || statement.type === "alter_table_alter_column_drop_default") && dialect7 === "turso";
23986
+ can(statement, dialect4) {
23987
+ return (statement.type === "alter_table_alter_column_set_type" || statement.type === "alter_table_alter_column_drop_notnull" || statement.type === "alter_table_alter_column_set_notnull" || statement.type === "alter_table_alter_column_set_default" || statement.type === "alter_table_alter_column_drop_default") && dialect4 === "turso";
24638
23988
  }
24639
23989
  convert(statement, json2) {
24640
23990
  const { tableName, columnName } = statement;
@@ -24694,8 +24044,8 @@ var init_sqlgenerator = __esm({
24694
24044
  }
24695
24045
  };
24696
24046
  MySqlModifyColumn = class extends Convertor {
24697
- can(statement, dialect7) {
24698
- return (statement.type === "alter_table_alter_column_set_type" || statement.type === "alter_table_alter_column_set_notnull" || statement.type === "alter_table_alter_column_drop_notnull" || statement.type === "alter_table_alter_column_drop_on_update" || statement.type === "alter_table_alter_column_set_on_update" || statement.type === "alter_table_alter_column_set_autoincrement" || statement.type === "alter_table_alter_column_drop_autoincrement" || statement.type === "alter_table_alter_column_set_default" || statement.type === "alter_table_alter_column_drop_default" || statement.type === "alter_table_alter_column_set_generated" || statement.type === "alter_table_alter_column_drop_generated") && dialect7 === "mysql";
24047
+ can(statement, dialect4) {
24048
+ return (statement.type === "alter_table_alter_column_set_type" || statement.type === "alter_table_alter_column_set_notnull" || statement.type === "alter_table_alter_column_drop_notnull" || statement.type === "alter_table_alter_column_drop_on_update" || statement.type === "alter_table_alter_column_set_on_update" || statement.type === "alter_table_alter_column_set_autoincrement" || statement.type === "alter_table_alter_column_drop_autoincrement" || statement.type === "alter_table_alter_column_set_default" || statement.type === "alter_table_alter_column_drop_default" || statement.type === "alter_table_alter_column_set_generated" || statement.type === "alter_table_alter_column_drop_generated") && dialect4 === "mysql";
24699
24049
  }
24700
24050
  convert(statement) {
24701
24051
  var _a, _b, _c, _d, _e, _f, _g;
@@ -24832,8 +24182,8 @@ var init_sqlgenerator = __esm({
24832
24182
  }
24833
24183
  };
24834
24184
  PgAlterTableCreateCompositePrimaryKeyConvertor = class extends Convertor {
24835
- can(statement, dialect7) {
24836
- return statement.type === "create_composite_pk" && dialect7 === "postgresql";
24185
+ can(statement, dialect4) {
24186
+ return statement.type === "create_composite_pk" && dialect4 === "postgresql";
24837
24187
  }
24838
24188
  convert(statement) {
24839
24189
  const { name, columns } = PgSquasher.unsquashPK(statement.data);
@@ -24842,8 +24192,8 @@ var init_sqlgenerator = __esm({
24842
24192
  }
24843
24193
  };
24844
24194
  PgAlterTableDeleteCompositePrimaryKeyConvertor = class extends Convertor {
24845
- can(statement, dialect7) {
24846
- return statement.type === "delete_composite_pk" && dialect7 === "postgresql";
24195
+ can(statement, dialect4) {
24196
+ return statement.type === "delete_composite_pk" && dialect4 === "postgresql";
24847
24197
  }
24848
24198
  convert(statement) {
24849
24199
  const { name, columns } = PgSquasher.unsquashPK(statement.data);
@@ -24852,8 +24202,8 @@ var init_sqlgenerator = __esm({
24852
24202
  }
24853
24203
  };
24854
24204
  PgAlterTableAlterCompositePrimaryKeyConvertor = class extends Convertor {
24855
- can(statement, dialect7) {
24856
- return statement.type === "alter_composite_pk" && dialect7 === "postgresql";
24205
+ can(statement, dialect4) {
24206
+ return statement.type === "alter_composite_pk" && dialect4 === "postgresql";
24857
24207
  }
24858
24208
  convert(statement) {
24859
24209
  const { name, columns } = PgSquasher.unsquashPK(statement.old);
@@ -24866,8 +24216,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24866
24216
  }
24867
24217
  };
24868
24218
  MySqlAlterTableCreateCompositePrimaryKeyConvertor = class extends Convertor {
24869
- can(statement, dialect7) {
24870
- return statement.type === "create_composite_pk" && dialect7 === "mysql";
24219
+ can(statement, dialect4) {
24220
+ return statement.type === "create_composite_pk" && dialect4 === "mysql";
24871
24221
  }
24872
24222
  convert(statement) {
24873
24223
  const { name, columns } = MySqlSquasher.unsquashPK(statement.data);
@@ -24875,8 +24225,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24875
24225
  }
24876
24226
  };
24877
24227
  MySqlAlterTableDeleteCompositePrimaryKeyConvertor = class extends Convertor {
24878
- can(statement, dialect7) {
24879
- return statement.type === "delete_composite_pk" && dialect7 === "mysql";
24228
+ can(statement, dialect4) {
24229
+ return statement.type === "delete_composite_pk" && dialect4 === "mysql";
24880
24230
  }
24881
24231
  convert(statement) {
24882
24232
  const { name, columns } = MySqlSquasher.unsquashPK(statement.data);
@@ -24884,8 +24234,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24884
24234
  }
24885
24235
  };
24886
24236
  MySqlAlterTableAlterCompositePrimaryKeyConvertor = class extends Convertor {
24887
- can(statement, dialect7) {
24888
- return statement.type === "alter_composite_pk" && dialect7 === "mysql";
24237
+ can(statement, dialect4) {
24238
+ return statement.type === "alter_composite_pk" && dialect4 === "mysql";
24889
24239
  }
24890
24240
  convert(statement) {
24891
24241
  const { name, columns } = MySqlSquasher.unsquashPK(statement.old);
@@ -24896,8 +24246,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24896
24246
  }
24897
24247
  };
24898
24248
  PgAlterTableAlterColumnSetPrimaryKeyConvertor = class extends Convertor {
24899
- can(statement, dialect7) {
24900
- return statement.type === "alter_table_alter_column_set_pk" && dialect7 === "postgresql";
24249
+ can(statement, dialect4) {
24250
+ return statement.type === "alter_table_alter_column_set_pk" && dialect4 === "postgresql";
24901
24251
  }
24902
24252
  convert(statement) {
24903
24253
  const { tableName, columnName } = statement;
@@ -24906,8 +24256,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24906
24256
  }
24907
24257
  };
24908
24258
  PgAlterTableAlterColumnDropPrimaryKeyConvertor = class extends Convertor {
24909
- can(statement, dialect7) {
24910
- return statement.type === "alter_table_alter_column_drop_pk" && dialect7 === "postgresql";
24259
+ can(statement, dialect4) {
24260
+ return statement.type === "alter_table_alter_column_drop_pk" && dialect4 === "postgresql";
24911
24261
  }
24912
24262
  convert(statement) {
24913
24263
  const { tableName, columnName, schema: schema5 } = statement;
@@ -24930,8 +24280,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24930
24280
  }
24931
24281
  };
24932
24282
  PgAlterTableAlterColumnSetNotNullConvertor = class extends Convertor {
24933
- can(statement, dialect7) {
24934
- return statement.type === "alter_table_alter_column_set_notnull" && dialect7 === "postgresql";
24283
+ can(statement, dialect4) {
24284
+ return statement.type === "alter_table_alter_column_set_notnull" && dialect4 === "postgresql";
24935
24285
  }
24936
24286
  convert(statement) {
24937
24287
  const { tableName, columnName } = statement;
@@ -24940,8 +24290,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24940
24290
  }
24941
24291
  };
24942
24292
  PgAlterTableAlterColumnDropNotNullConvertor = class extends Convertor {
24943
- can(statement, dialect7) {
24944
- return statement.type === "alter_table_alter_column_drop_notnull" && dialect7 === "postgresql";
24293
+ can(statement, dialect4) {
24294
+ return statement.type === "alter_table_alter_column_drop_notnull" && dialect4 === "postgresql";
24945
24295
  }
24946
24296
  convert(statement) {
24947
24297
  const { tableName, columnName } = statement;
@@ -24950,8 +24300,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24950
24300
  }
24951
24301
  };
24952
24302
  PgCreateForeignKeyConvertor = class extends Convertor {
24953
- can(statement, dialect7) {
24954
- return statement.type === "create_reference" && dialect7 === "postgresql";
24303
+ can(statement, dialect4) {
24304
+ return statement.type === "create_reference" && dialect4 === "postgresql";
24955
24305
  }
24956
24306
  convert(statement) {
24957
24307
  const {
@@ -24980,8 +24330,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24980
24330
  }
24981
24331
  };
24982
24332
  LibSQLCreateForeignKeyConvertor = class extends Convertor {
24983
- can(statement, dialect7) {
24984
- return statement.type === "create_reference" && dialect7 === "turso";
24333
+ can(statement, dialect4) {
24334
+ return statement.type === "create_reference" && dialect4 === "turso";
24985
24335
  }
24986
24336
  convert(statement, json2, action) {
24987
24337
  const { columnsFrom, columnsTo, tableFrom, onDelete, onUpdate, tableTo } = action === "push" ? SQLiteSquasher.unsquashPushFK(statement.data) : SQLiteSquasher.unsquashFK(statement.data);
@@ -24997,8 +24347,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24997
24347
  }
24998
24348
  };
24999
24349
  MySqlCreateForeignKeyConvertor = class extends Convertor {
25000
- can(statement, dialect7) {
25001
- return statement.type === "create_reference" && dialect7 === "mysql";
24350
+ can(statement, dialect4) {
24351
+ return statement.type === "create_reference" && dialect4 === "mysql";
25002
24352
  }
25003
24353
  convert(statement) {
25004
24354
  const {
@@ -25018,8 +24368,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25018
24368
  }
25019
24369
  };
25020
24370
  PgAlterForeignKeyConvertor = class extends Convertor {
25021
- can(statement, dialect7) {
25022
- return statement.type === "alter_reference" && dialect7 === "postgresql";
24371
+ can(statement, dialect4) {
24372
+ return statement.type === "alter_reference" && dialect4 === "postgresql";
25023
24373
  }
25024
24374
  convert(statement) {
25025
24375
  const newFk = PgSquasher.unsquashFK(statement.data);
@@ -25043,8 +24393,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25043
24393
  }
25044
24394
  };
25045
24395
  PgDeleteForeignKeyConvertor = class extends Convertor {
25046
- can(statement, dialect7) {
25047
- return statement.type === "delete_reference" && dialect7 === "postgresql";
24396
+ can(statement, dialect4) {
24397
+ return statement.type === "delete_reference" && dialect4 === "postgresql";
25048
24398
  }
25049
24399
  convert(statement) {
25050
24400
  const tableFrom = statement.tableName;
@@ -25055,8 +24405,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25055
24405
  }
25056
24406
  };
25057
24407
  MySqlDeleteForeignKeyConvertor = class extends Convertor {
25058
- can(statement, dialect7) {
25059
- return statement.type === "delete_reference" && dialect7 === "mysql";
24408
+ can(statement, dialect4) {
24409
+ return statement.type === "delete_reference" && dialect4 === "mysql";
25060
24410
  }
25061
24411
  convert(statement) {
25062
24412
  const tableFrom = statement.tableName;
@@ -25066,8 +24416,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25066
24416
  }
25067
24417
  };
25068
24418
  CreatePgIndexConvertor = class extends Convertor {
25069
- can(statement, dialect7) {
25070
- return statement.type === "create_index_pg" && dialect7 === "postgresql";
24419
+ can(statement, dialect4) {
24420
+ return statement.type === "create_index_pg" && dialect4 === "postgresql";
25071
24421
  }
25072
24422
  convert(statement) {
25073
24423
  const {
@@ -25098,8 +24448,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25098
24448
  }
25099
24449
  };
25100
24450
  CreateMySqlIndexConvertor = class extends Convertor {
25101
- can(statement, dialect7) {
25102
- return statement.type === "create_index" && dialect7 === "mysql";
24451
+ can(statement, dialect4) {
24452
+ return statement.type === "create_index" && dialect4 === "mysql";
25103
24453
  }
25104
24454
  convert(statement) {
25105
24455
  const { name, columns, isUnique } = MySqlSquasher.unsquashIdx(
@@ -25114,8 +24464,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25114
24464
  }
25115
24465
  };
25116
24466
  CreateSqliteIndexConvertor = class extends Convertor {
25117
- can(statement, dialect7) {
25118
- return statement.type === "create_index" && (dialect7 === "sqlite" || dialect7 === "turso");
24467
+ can(statement, dialect4) {
24468
+ return statement.type === "create_index" && (dialect4 === "sqlite" || dialect4 === "turso");
25119
24469
  }
25120
24470
  convert(statement) {
25121
24471
  const { name, columns, isUnique, where } = SQLiteSquasher.unsquashIdx(
@@ -25131,8 +24481,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25131
24481
  }
25132
24482
  };
25133
24483
  PgDropIndexConvertor = class extends Convertor {
25134
- can(statement, dialect7) {
25135
- return statement.type === "drop_index" && dialect7 === "postgresql";
24484
+ can(statement, dialect4) {
24485
+ return statement.type === "drop_index" && dialect4 === "postgresql";
25136
24486
  }
25137
24487
  convert(statement) {
25138
24488
  const { name } = PgSquasher.unsquashIdx(statement.data);
@@ -25140,8 +24490,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25140
24490
  }
25141
24491
  };
25142
24492
  PgCreateSchemaConvertor = class extends Convertor {
25143
- can(statement, dialect7) {
25144
- return statement.type === "create_schema" && dialect7 === "postgresql";
24493
+ can(statement, dialect4) {
24494
+ return statement.type === "create_schema" && dialect4 === "postgresql";
25145
24495
  }
25146
24496
  convert(statement) {
25147
24497
  const { name } = statement;
@@ -25150,8 +24500,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25150
24500
  }
25151
24501
  };
25152
24502
  PgRenameSchemaConvertor = class extends Convertor {
25153
- can(statement, dialect7) {
25154
- return statement.type === "rename_schema" && dialect7 === "postgresql";
24503
+ can(statement, dialect4) {
24504
+ return statement.type === "rename_schema" && dialect4 === "postgresql";
25155
24505
  }
25156
24506
  convert(statement) {
25157
24507
  const { from, to } = statement;
@@ -25160,8 +24510,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25160
24510
  }
25161
24511
  };
25162
24512
  PgDropSchemaConvertor = class extends Convertor {
25163
- can(statement, dialect7) {
25164
- return statement.type === "drop_schema" && dialect7 === "postgresql";
24513
+ can(statement, dialect4) {
24514
+ return statement.type === "drop_schema" && dialect4 === "postgresql";
25165
24515
  }
25166
24516
  convert(statement) {
25167
24517
  const { name } = statement;
@@ -25170,8 +24520,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25170
24520
  }
25171
24521
  };
25172
24522
  PgAlterTableSetSchemaConvertor = class extends Convertor {
25173
- can(statement, dialect7) {
25174
- return statement.type === "alter_table_set_schema" && dialect7 === "postgresql";
24523
+ can(statement, dialect4) {
24524
+ return statement.type === "alter_table_set_schema" && dialect4 === "postgresql";
25175
24525
  }
25176
24526
  convert(statement) {
25177
24527
  const { tableName, schemaFrom, schemaTo } = statement;
@@ -25180,8 +24530,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25180
24530
  }
25181
24531
  };
25182
24532
  PgAlterTableSetNewSchemaConvertor = class extends Convertor {
25183
- can(statement, dialect7) {
25184
- return statement.type === "alter_table_set_new_schema" && dialect7 === "postgresql";
24533
+ can(statement, dialect4) {
24534
+ return statement.type === "alter_table_set_new_schema" && dialect4 === "postgresql";
25185
24535
  }
25186
24536
  convert(statement) {
25187
24537
  const { tableName, to, from } = statement;
@@ -25191,8 +24541,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25191
24541
  }
25192
24542
  };
25193
24543
  PgAlterTableRemoveFromSchemaConvertor = class extends Convertor {
25194
- can(statement, dialect7) {
25195
- return statement.type === "alter_table_remove_from_schema" && dialect7 === "postgresql";
24544
+ can(statement, dialect4) {
24545
+ return statement.type === "alter_table_remove_from_schema" && dialect4 === "postgresql";
25196
24546
  }
25197
24547
  convert(statement) {
25198
24548
  const { tableName, schema: schema5 } = statement;
@@ -25202,8 +24552,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25202
24552
  }
25203
24553
  };
25204
24554
  SqliteDropIndexConvertor = class extends Convertor {
25205
- can(statement, dialect7) {
25206
- return statement.type === "drop_index" && (dialect7 === "sqlite" || dialect7 === "turso");
24555
+ can(statement, dialect4) {
24556
+ return statement.type === "drop_index" && (dialect4 === "sqlite" || dialect4 === "turso");
25207
24557
  }
25208
24558
  convert(statement) {
25209
24559
  const { name } = PgSquasher.unsquashIdx(statement.data);
@@ -25211,8 +24561,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25211
24561
  }
25212
24562
  };
25213
24563
  MySqlDropIndexConvertor = class extends Convertor {
25214
- can(statement, dialect7) {
25215
- return statement.type === "drop_index" && dialect7 === "mysql";
24564
+ can(statement, dialect4) {
24565
+ return statement.type === "drop_index" && dialect4 === "mysql";
25216
24566
  }
25217
24567
  convert(statement) {
25218
24568
  const { name } = MySqlSquasher.unsquashIdx(statement.data);
@@ -25220,8 +24570,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25220
24570
  }
25221
24571
  };
25222
24572
  SQLiteRecreateTableConvertor = class extends Convertor {
25223
- can(statement, dialect7) {
25224
- return statement.type === "recreate_table" && dialect7 === "sqlite";
24573
+ can(statement, dialect4) {
24574
+ return statement.type === "recreate_table" && dialect4 === "sqlite";
25225
24575
  }
25226
24576
  convert(statement) {
25227
24577
  const { tableName, columns, compositePKs, referenceData } = statement;
@@ -25262,8 +24612,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25262
24612
  }
25263
24613
  };
25264
24614
  LibSQLRecreateTableConvertor = class extends Convertor {
25265
- can(statement, dialect7) {
25266
- return statement.type === "recreate_table" && dialect7 === "turso";
24615
+ can(statement, dialect4) {
24616
+ return statement.type === "recreate_table" && dialect4 === "turso";
25267
24617
  }
25268
24618
  convert(statement) {
25269
24619
  const { tableName, columns, compositePKs, referenceData } = statement;
@@ -25309,14 +24659,6 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25309
24659
  convertors.push(new SQLiteCreateTableConvertor());
25310
24660
  convertors.push(new SQLiteRecreateTableConvertor());
25311
24661
  convertors.push(new LibSQLRecreateTableConvertor());
25312
- convertors.push(new PgCreateViewConvertor());
25313
- convertors.push(new PgDropViewConvertor());
25314
- convertors.push(new PgRenameViewConvertor());
25315
- convertors.push(new PgAlterViewSchemaConvertor());
25316
- convertors.push(new PgAlterViewAddWithOptionConvertor());
25317
- convertors.push(new PgAlterViewDropWithOptionConvertor());
25318
- convertors.push(new PgAlterViewAlterTablespaceConvertor());
25319
- convertors.push(new PgAlterViewAlterUsingConvertor());
25320
24662
  convertors.push(new CreateTypeEnumConvertor());
25321
24663
  convertors.push(new CreatePgSequenceConvertor());
25322
24664
  convertors.push(new DropPgSequenceConvertor());
@@ -25651,7 +24993,7 @@ var init_sqlitePushUtils = __esm({
25651
24993
  });
25652
24994
 
25653
24995
  // src/jsonStatements.ts
25654
- var preparePgCreateTableJson, prepareMySqlCreateTableJson, prepareSQLiteCreateTable, prepareDropTableJson, prepareRenameTableJson, prepareCreateEnumJson, prepareAddValuesToEnumJson, prepareDropEnumJson, prepareMoveEnumJson, prepareRenameEnumJson, prepareCreateSequenceJson, prepareAlterSequenceJson, prepareDropSequenceJson, prepareMoveSequenceJson, prepareRenameSequenceJson, prepareCreateSchemasJson, prepareRenameSchemasJson, prepareDeleteSchemasJson, prepareRenameColumns, _prepareDropColumns, _prepareAddColumns, _prepareSqliteAddColumns, prepareAlterColumnsMysql, preparePgAlterColumns, prepareSqliteAlterColumns, preparePgCreateIndexesJson, prepareCreateIndexesJson, prepareCreateReferencesJson, prepareLibSQLCreateReferencesJson, prepareDropReferencesJson, prepareLibSQLDropReferencesJson, prepareAlterReferencesJson, prepareDropIndexesJson, prepareAddCompositePrimaryKeySqlite, prepareDeleteCompositePrimaryKeySqlite, prepareAlterCompositePrimaryKeySqlite, prepareAddCompositePrimaryKeyPg, prepareDeleteCompositePrimaryKeyPg, prepareAlterCompositePrimaryKeyPg, prepareAddUniqueConstraintPg, prepareDeleteUniqueConstraintPg, prepareAddCompositePrimaryKeyMySql, prepareDeleteCompositePrimaryKeyMySql, prepareAlterCompositePrimaryKeyMySql, preparePgCreateViewJson, preparePgDropViewJson, preparePgRenameViewJson, preparePgAlterViewAlterSchemaJson, preparePgAlterViewAddWithOptionJson, preparePgAlterViewDropWithOptionJson, preparePgAlterViewAlterTablespaceJson, preparePgAlterViewAlterUsingJson;
24996
+ var preparePgCreateTableJson, prepareMySqlCreateTableJson, prepareSQLiteCreateTable, prepareDropTableJson, prepareRenameTableJson, prepareCreateEnumJson, prepareAddValuesToEnumJson, prepareDropEnumJson, prepareMoveEnumJson, prepareRenameEnumJson, prepareCreateSequenceJson, prepareAlterSequenceJson, prepareDropSequenceJson, prepareMoveSequenceJson, prepareRenameSequenceJson, prepareCreateSchemasJson, prepareRenameSchemasJson, prepareDeleteSchemasJson, prepareRenameColumns, _prepareDropColumns, _prepareAddColumns, _prepareSqliteAddColumns, prepareAlterColumnsMysql, preparePgAlterColumns, prepareSqliteAlterColumns, preparePgCreateIndexesJson, prepareCreateIndexesJson, prepareCreateReferencesJson, prepareLibSQLCreateReferencesJson, prepareDropReferencesJson, prepareLibSQLDropReferencesJson, prepareAlterReferencesJson, prepareDropIndexesJson, prepareAddCompositePrimaryKeySqlite, prepareDeleteCompositePrimaryKeySqlite, prepareAlterCompositePrimaryKeySqlite, prepareAddCompositePrimaryKeyPg, prepareDeleteCompositePrimaryKeyPg, prepareAlterCompositePrimaryKeyPg, prepareAddUniqueConstraintPg, prepareDeleteUniqueConstraintPg, prepareAddCompositePrimaryKeyMySql, prepareDeleteCompositePrimaryKeyMySql, prepareAlterCompositePrimaryKeyMySql;
25655
24997
  var init_jsonStatements = __esm({
25656
24998
  "src/jsonStatements.ts"() {
25657
24999
  "use strict";
@@ -26990,81 +26332,6 @@ var init_jsonStatements = __esm({
26990
26332
  };
26991
26333
  });
26992
26334
  };
26993
- preparePgCreateViewJson = (name, schema5, definition, materialized, withNoData = false, withOption, using, tablespace) => {
26994
- return {
26995
- type: "create_view",
26996
- name,
26997
- schema: schema5,
26998
- definition,
26999
- with: withOption,
27000
- materialized,
27001
- withNoData,
27002
- using,
27003
- tablespace
27004
- };
27005
- };
27006
- preparePgDropViewJson = (name, schema5, materialized) => {
27007
- return {
27008
- type: "drop_view",
27009
- name,
27010
- schema: schema5,
27011
- materialized
27012
- };
27013
- };
27014
- preparePgRenameViewJson = (to, from, schema5, materialized) => {
27015
- return {
27016
- type: "rename_view",
27017
- nameTo: to,
27018
- nameFrom: from,
27019
- schema: schema5,
27020
- materialized
27021
- };
27022
- };
27023
- preparePgAlterViewAlterSchemaJson = (to, from, name, materialized) => {
27024
- return {
27025
- type: "alter_view_alter_schema",
27026
- fromSchema: from,
27027
- toSchema: to,
27028
- name,
27029
- materialized
27030
- };
27031
- };
27032
- preparePgAlterViewAddWithOptionJson = (name, schema5, materialized, withOption) => {
27033
- return {
27034
- type: "alter_view_add_with_option",
27035
- name,
27036
- schema: schema5,
27037
- materialized,
27038
- with: withOption
27039
- };
27040
- };
27041
- preparePgAlterViewDropWithOptionJson = (name, schema5, materialized, withOption) => {
27042
- return {
27043
- type: "alter_view_drop_with_option",
27044
- name,
27045
- schema: schema5,
27046
- materialized,
27047
- with: withOption
27048
- };
27049
- };
27050
- preparePgAlterViewAlterTablespaceJson = (name, schema5, materialized, to) => {
27051
- return {
27052
- type: "alter_view_alter_tablespace",
27053
- name,
27054
- schema: schema5,
27055
- materialized,
27056
- toTablespace: to
27057
- };
27058
- };
27059
- preparePgAlterViewAlterUsingJson = (name, schema5, materialized, to) => {
27060
- return {
27061
- type: "alter_view_alter_using",
27062
- name,
27063
- schema: schema5,
27064
- materialized,
27065
- toUsing: to
27066
- };
27067
- };
27068
26335
  }
27069
26336
  });
27070
26337
 
@@ -27350,7 +26617,7 @@ var init_statementCombiner = __esm({
27350
26617
  });
27351
26618
 
27352
26619
  // src/snapshotsDiffer.ts
27353
- var makeChanged, makeSelfOrChanged, makePatched, columnSchema, alteredColumnSchema, enumSchema2, changedEnumSchema, tableScheme, alteredTableScheme, alteredViewSchema, diffResultScheme, diffResultSchemeMysql, diffResultSchemeSQLite, schemaChangeFor, nameChangeFor, nameSchemaChangeFor, columnChangeFor, applyPgSnapshotsDiff, applyMysqlSnapshotsDiff, applySqliteSnapshotsDiff, applyLibSQLSnapshotsDiff;
26620
+ var makeChanged, makeSelfOrChanged, makePatched, columnSchema, alteredColumnSchema, enumSchema2, changedEnumSchema, tableScheme, alteredTableScheme, diffResultScheme, diffResultSchemeMysql, diffResultSchemeSQLite, schemaChangeFor, nameChangeFor, nameSchemaChangeFor, columnChangeFor, applyPgSnapshotsDiff, applyMysqlSnapshotsDiff, applySqliteSnapshotsDiff, applyLibSQLSnapshotsDiff;
27354
26621
  var init_snapshotsDiffer = __esm({
27355
26622
  "src/snapshotsDiffer.ts"() {
27356
26623
  "use strict";
@@ -27498,42 +26765,10 @@ var init_snapshotsDiffer = __esm({
27498
26765
  })
27499
26766
  )
27500
26767
  }).strict();
27501
- alteredViewSchema = objectType({
27502
- name: stringType(),
27503
- schema: stringType(),
27504
- deletedWithOption: mergedViewWithOption.optional(),
27505
- addedWithOption: mergedViewWithOption.optional(),
27506
- alteredWith: objectType({
27507
- addedWith: mergedViewWithOption.optional(),
27508
- deletedWith: mergedViewWithOption.optional(),
27509
- alterWith: mergedViewWithOption.optional()
27510
- }).strict(),
27511
- alteredSchema: objectType({
27512
- __old: stringType(),
27513
- __new: stringType()
27514
- }).strict().optional(),
27515
- alteredDefinition: objectType({
27516
- __old: stringType(),
27517
- __new: stringType()
27518
- }).strict().optional(),
27519
- alteredExisting: objectType({
27520
- __old: booleanType(),
27521
- __new: booleanType()
27522
- }).strict().optional(),
27523
- alteredTablespace: objectType({
27524
- __old: stringType(),
27525
- __new: stringType()
27526
- }).strict().optional(),
27527
- alteredUsing: objectType({
27528
- __old: stringType(),
27529
- __new: stringType()
27530
- }).strict().optional()
27531
- }).strict();
27532
26768
  diffResultScheme = objectType({
27533
26769
  alteredTablesWithColumns: alteredTableScheme.array(),
27534
26770
  alteredEnums: changedEnumSchema.array(),
27535
- alteredSequences: sequenceSquashed.array(),
27536
- alteredViews: alteredViewSchema.array()
26771
+ alteredSequences: sequenceSquashed.array()
27537
26772
  }).strict();
27538
26773
  diffResultSchemeMysql = objectType({
27539
26774
  alteredTablesWithColumns: alteredTableScheme.array(),
@@ -27588,7 +26823,7 @@ var init_snapshotsDiffer = __esm({
27588
26823
  }
27589
26824
  return column7;
27590
26825
  };
27591
- applyPgSnapshotsDiff = async (json1, json2, schemasResolver2, enumsResolver2, sequencesResolver2, tablesResolver2, columnsResolver2, viewsResolver2, prevFull, curFull, action) => {
26826
+ applyPgSnapshotsDiff = async (json1, json2, schemasResolver2, enumsResolver2, sequencesResolver2, tablesResolver2, columnsResolver2, prevFull, curFull, action) => {
27592
26827
  const schemasDiff = diffSchemasOrTables(json1.schemas, json2.schemas);
27593
26828
  const {
27594
26829
  created: createdSchemas,
@@ -27816,16 +27051,6 @@ var init_snapshotsDiffer = __esm({
27816
27051
  return [tableKey2, tableValue];
27817
27052
  }
27818
27053
  );
27819
- const viewsDiff = diffSchemasOrTables(json1.views, json2.views);
27820
- const {
27821
- created: createdViews,
27822
- deleted: deletedViews,
27823
- renamed: renamedViews,
27824
- moved: movedViews
27825
- } = await viewsResolver2({
27826
- created: viewsDiff.added,
27827
- deleted: viewsDiff.deleted
27828
- });
27829
27054
  const diffResult = applyJsonDiff(columnsPatchedSnap1, json2);
27830
27055
  const typedResult = diffResultScheme.parse(diffResult);
27831
27056
  const jsonStatements = [];
@@ -28078,137 +27303,6 @@ var init_snapshotsDiffer = __esm({
28078
27303
  const createTables = createdTables.map((it) => {
28079
27304
  return preparePgCreateTableJson(it, curFull);
28080
27305
  });
28081
- const createViews = [];
28082
- const dropViews = [];
28083
- const renameViews = [];
28084
- const alterViews = [];
28085
- createViews.push(
28086
- ...createdViews.filter((it) => !it.isExisting).map((it) => {
28087
- return preparePgCreateViewJson(
28088
- it.name,
28089
- it.schema,
28090
- it.definition,
28091
- it.materialized,
28092
- it.withNoData,
28093
- it.with,
28094
- it.using,
28095
- it.tablespace
28096
- );
28097
- })
28098
- );
28099
- dropViews.push(
28100
- ...deletedViews.filter((it) => !it.isExisting).map((it) => {
28101
- return preparePgDropViewJson(it.name, it.schema, it.materialized);
28102
- })
28103
- );
28104
- renameViews.push(
28105
- ...renamedViews.filter((it) => !it.to.isExisting).map((it) => {
28106
- return preparePgRenameViewJson(it.to.name, it.from.name, it.to.schema, it.to.materialized);
28107
- })
28108
- );
28109
- alterViews.push(
28110
- ...movedViews.filter((it) => !json2.views[`${it.schemaTo}.${it.name}`].isExisting).map((it) => {
28111
- return preparePgAlterViewAlterSchemaJson(
28112
- it.schemaTo,
28113
- it.schemaFrom,
28114
- it.name,
28115
- json2.views[`${it.schemaTo}.${it.name}`].materialized
28116
- );
28117
- })
28118
- );
28119
- const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[`${it.schema}.${it.name}`].isExisting);
28120
- for (const alteredView of alteredViews) {
28121
- const viewKey = `${alteredView.schema}.${alteredView.name}`;
28122
- const { materialized, with: withOption, definition, withNoData, using, tablespace } = json2.views[viewKey];
28123
- if (alteredView.alteredExisting || alteredView.alteredDefinition && action !== "push") {
28124
- dropViews.push(preparePgDropViewJson(alteredView.name, alteredView.schema, materialized));
28125
- createViews.push(
28126
- preparePgCreateViewJson(
28127
- alteredView.name,
28128
- alteredView.schema,
28129
- definition,
28130
- materialized,
28131
- withNoData,
28132
- withOption,
28133
- using,
28134
- tablespace
28135
- )
28136
- );
28137
- continue;
28138
- }
28139
- if (alteredView.addedWithOption) {
28140
- alterViews.push(
28141
- preparePgAlterViewAddWithOptionJson(
28142
- alteredView.name,
28143
- alteredView.schema,
28144
- materialized,
28145
- alteredView.addedWithOption
28146
- )
28147
- );
28148
- }
28149
- if (alteredView.deletedWithOption) {
28150
- alterViews.push(
28151
- preparePgAlterViewDropWithOptionJson(
28152
- alteredView.name,
28153
- alteredView.schema,
28154
- materialized,
28155
- alteredView.deletedWithOption
28156
- )
28157
- );
28158
- }
28159
- if (alteredView.alteredWith) {
28160
- if (alteredView.alteredWith.addedWith) {
28161
- alterViews.push(
28162
- preparePgAlterViewAddWithOptionJson(
28163
- alteredView.name,
28164
- alteredView.schema,
28165
- materialized,
28166
- alteredView.alteredWith.addedWith
28167
- )
28168
- );
28169
- }
28170
- if (alteredView.alteredWith.deletedWith) {
28171
- alterViews.push(
28172
- preparePgAlterViewDropWithOptionJson(
28173
- alteredView.name,
28174
- alteredView.schema,
28175
- materialized,
28176
- alteredView.alteredWith.deletedWith
28177
- )
28178
- );
28179
- }
28180
- if (alteredView.alteredWith.alterWith) {
28181
- alterViews.push(
28182
- preparePgAlterViewAddWithOptionJson(
28183
- alteredView.name,
28184
- alteredView.schema,
28185
- materialized,
28186
- alteredView.alteredWith.alterWith
28187
- )
28188
- );
28189
- }
28190
- }
28191
- if (alteredView.alteredTablespace) {
28192
- alterViews.push(
28193
- preparePgAlterViewAlterTablespaceJson(
28194
- alteredView.name,
28195
- alteredView.schema,
28196
- materialized,
28197
- alteredView.alteredTablespace.__new
28198
- )
28199
- );
28200
- }
28201
- if (alteredView.alteredUsing) {
28202
- alterViews.push(
28203
- preparePgAlterViewAlterUsingJson(
28204
- alteredView.name,
28205
- alteredView.schema,
28206
- materialized,
28207
- alteredView.alteredUsing.__new
28208
- )
28209
- );
28210
- }
28211
- }
28212
27306
  jsonStatements.push(...createSchemas);
28213
27307
  jsonStatements.push(...renameSchemas);
28214
27308
  jsonStatements.push(...createEnums);
@@ -28220,9 +27314,6 @@ var init_snapshotsDiffer = __esm({
28220
27314
  jsonStatements.push(...renameSequences);
28221
27315
  jsonStatements.push(...jsonAlterSequences);
28222
27316
  jsonStatements.push(...createTables);
28223
- jsonStatements.push(...dropViews);
28224
- jsonStatements.push(...renameViews);
28225
- jsonStatements.push(...alterViews);
28226
27317
  jsonStatements.push(...jsonDropTables);
28227
27318
  jsonStatements.push(...jsonSetTableSchemas);
28228
27319
  jsonStatements.push(...jsonRenameTables);
@@ -28242,7 +27333,6 @@ var init_snapshotsDiffer = __esm({
28242
27333
  jsonStatements.push(...jsonAlteredCompositePKs);
28243
27334
  jsonStatements.push(...jsonAddedUniqueConstraints);
28244
27335
  jsonStatements.push(...jsonAlteredUniqueConstraints);
28245
- jsonStatements.push(...createViews);
28246
27336
  jsonStatements.push(...dropEnums);
28247
27337
  jsonStatements.push(...dropSequences);
28248
27338
  jsonStatements.push(...dropSchemas);
@@ -30521,10 +29611,9 @@ __export(migrate_exports, {
30521
29611
  schemasResolver: () => schemasResolver,
30522
29612
  sequencesResolver: () => sequencesResolver,
30523
29613
  tablesResolver: () => tablesResolver,
30524
- viewsResolver: () => viewsResolver,
30525
29614
  writeResult: () => writeResult
30526
29615
  });
30527
- var import_fs5, import_hanji3, import_path4, schemasResolver, tablesResolver, viewsResolver, sequencesResolver, enumsResolver, columnsResolver, prepareAndMigratePg, preparePgPush, prepareMySQLPush, prepareAndMigrateMysql, prepareAndMigrateSqlite, prepareAndMigrateLibSQL, prepareSQLitePush, prepareLibSQLPush, promptColumnsConflicts, promptNamedWithSchemasConflict, promptSchemasConflict, BREAKPOINT, writeResult, embeddedMigrations, prepareSnapshotFolderName, two;
29616
+ var import_fs5, import_hanji3, import_path4, schemasResolver, tablesResolver, sequencesResolver, enumsResolver, columnsResolver, prepareAndMigratePg, preparePgPush, prepareMySQLPush, prepareAndMigrateMysql, prepareAndMigrateSqlite, prepareAndMigrateLibSQL, prepareSQLitePush, prepareLibSQLPush, promptColumnsConflicts, promptNamedWithSchemasConflict, promptSchemasConflict, BREAKPOINT, writeResult, embeddedMigrations, prepareSnapshotFolderName, two;
30528
29617
  var init_migrate = __esm({
30529
29618
  "src/cli/commands/migrate.ts"() {
30530
29619
  "use strict";
@@ -30571,24 +29660,6 @@ var init_migrate = __esm({
30571
29660
  throw e2;
30572
29661
  }
30573
29662
  };
30574
- viewsResolver = async (input) => {
30575
- try {
30576
- const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict(
30577
- input.created,
30578
- input.deleted,
30579
- "view"
30580
- );
30581
- return {
30582
- created,
30583
- deleted,
30584
- moved,
30585
- renamed
30586
- };
30587
- } catch (e2) {
30588
- console.error(e2);
30589
- throw e2;
30590
- }
30591
- };
30592
29663
  sequencesResolver = async (input) => {
30593
29664
  try {
30594
29665
  const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict(
@@ -30642,6 +29713,7 @@ var init_migrate = __esm({
30642
29713
  prepareAndMigratePg = async (config) => {
30643
29714
  const outFolder = config.out;
30644
29715
  const schemaPath = config.schema;
29716
+ const casing2 = config.casing;
30645
29717
  try {
30646
29718
  assertV1OutFolder(outFolder);
30647
29719
  const { snapshots, journal } = prepareMigrationFolder(
@@ -30650,7 +29722,8 @@ var init_migrate = __esm({
30650
29722
  );
30651
29723
  const { prev, cur, custom: custom2 } = await preparePgMigrationSnapshot(
30652
29724
  snapshots,
30653
- schemaPath
29725
+ schemaPath,
29726
+ casing2
30654
29727
  );
30655
29728
  const validatedPrev = pgSchema.parse(prev);
30656
29729
  const validatedCur = pgSchema.parse(cur);
@@ -30677,7 +29750,6 @@ var init_migrate = __esm({
30677
29750
  sequencesResolver,
30678
29751
  tablesResolver,
30679
29752
  columnsResolver,
30680
- viewsResolver,
30681
29753
  validatedPrev,
30682
29754
  validatedCur
30683
29755
  );
@@ -30694,10 +29766,11 @@ var init_migrate = __esm({
30694
29766
  console.error(e2);
30695
29767
  }
30696
29768
  };
30697
- preparePgPush = async (schemaPath, snapshot, schemaFilter) => {
29769
+ preparePgPush = async (schemaPath, snapshot, schemaFilter, casing2) => {
30698
29770
  const { prev, cur } = await preparePgDbPushSnapshot(
30699
29771
  snapshot,
30700
29772
  schemaPath,
29773
+ casing2,
30701
29774
  schemaFilter
30702
29775
  );
30703
29776
  const validatedPrev = pgSchema.parse(prev);
@@ -30712,18 +29785,18 @@ var init_migrate = __esm({
30712
29785
  sequencesResolver,
30713
29786
  tablesResolver,
30714
29787
  columnsResolver,
30715
- viewsResolver,
30716
29788
  validatedPrev,
30717
29789
  validatedCur,
30718
29790
  "push"
30719
29791
  );
30720
29792
  return { sqlStatements, statements, squashedPrev, squashedCur };
30721
29793
  };
30722
- prepareMySQLPush = async (schemaPath, snapshot) => {
29794
+ prepareMySQLPush = async (schemaPath, snapshot, casing2) => {
30723
29795
  try {
30724
29796
  const { prev, cur } = await prepareMySqlDbPushSnapshot(
30725
29797
  snapshot,
30726
- schemaPath
29798
+ schemaPath,
29799
+ casing2
30727
29800
  );
30728
29801
  const validatedPrev = mysqlSchema.parse(prev);
30729
29802
  const validatedCur = mysqlSchema.parse(cur);
@@ -30747,12 +29820,14 @@ var init_migrate = __esm({
30747
29820
  prepareAndMigrateMysql = async (config) => {
30748
29821
  const outFolder = config.out;
30749
29822
  const schemaPath = config.schema;
29823
+ const casing2 = config.casing;
30750
29824
  try {
30751
29825
  assertV1OutFolder(outFolder);
30752
29826
  const { snapshots, journal } = prepareMigrationFolder(outFolder, "mysql");
30753
29827
  const { prev, cur, custom: custom2 } = await prepareMySqlMigrationSnapshot(
30754
29828
  snapshots,
30755
- schemaPath
29829
+ schemaPath,
29830
+ casing2
30756
29831
  );
30757
29832
  const validatedPrev = mysqlSchema.parse(prev);
30758
29833
  const validatedCur = mysqlSchema.parse(cur);
@@ -30796,12 +29871,14 @@ var init_migrate = __esm({
30796
29871
  prepareAndMigrateSqlite = async (config) => {
30797
29872
  const outFolder = config.out;
30798
29873
  const schemaPath = config.schema;
29874
+ const casing2 = config.casing;
30799
29875
  try {
30800
29876
  assertV1OutFolder(outFolder);
30801
29877
  const { snapshots, journal } = prepareMigrationFolder(outFolder, "sqlite");
30802
29878
  const { prev, cur, custom: custom2 } = await prepareSqliteMigrationSnapshot(
30803
29879
  snapshots,
30804
- schemaPath
29880
+ schemaPath,
29881
+ casing2
30805
29882
  );
30806
29883
  const validatedPrev = sqliteSchema.parse(prev);
30807
29884
  const validatedCur = sqliteSchema.parse(cur);
@@ -30847,12 +29924,14 @@ var init_migrate = __esm({
30847
29924
  prepareAndMigrateLibSQL = async (config) => {
30848
29925
  const outFolder = config.out;
30849
29926
  const schemaPath = config.schema;
29927
+ const casing2 = config.casing;
30850
29928
  try {
30851
29929
  assertV1OutFolder(outFolder);
30852
29930
  const { snapshots, journal } = prepareMigrationFolder(outFolder, "sqlite");
30853
29931
  const { prev, cur, custom: custom2 } = await prepareSqliteMigrationSnapshot(
30854
29932
  snapshots,
30855
- schemaPath
29933
+ schemaPath,
29934
+ casing2
30856
29935
  );
30857
29936
  const validatedPrev = sqliteSchema.parse(prev);
30858
29937
  const validatedCur = sqliteSchema.parse(cur);
@@ -30895,8 +29974,8 @@ var init_migrate = __esm({
30895
29974
  console.error(e2);
30896
29975
  }
30897
29976
  };
30898
- prepareSQLitePush = async (schemaPath, snapshot) => {
30899
- const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath);
29977
+ prepareSQLitePush = async (schemaPath, snapshot, casing2) => {
29978
+ const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath, casing2);
30900
29979
  const validatedPrev = sqliteSchema.parse(prev);
30901
29980
  const validatedCur = sqliteSchema.parse(cur);
30902
29981
  const squashedPrev = squashSqliteScheme(validatedPrev, "push");
@@ -30918,8 +29997,8 @@ var init_migrate = __esm({
30918
29997
  meta: _meta
30919
29998
  };
30920
29999
  };
30921
- prepareLibSQLPush = async (schemaPath, snapshot) => {
30922
- const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath);
30000
+ prepareLibSQLPush = async (schemaPath, snapshot, casing2) => {
30001
+ const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath, casing2);
30923
30002
  const validatedPrev = sqliteSchema.parse(prev);
30924
30003
  const validatedCur = sqliteSchema.parse(cur);
30925
30004
  const squashedPrev = squashSqliteScheme(validatedPrev, "push");
@@ -35534,20 +34613,20 @@ var require_ponyfill_es2018 = __commonJS({
35534
34613
  ;
35535
34614
  ReadableByteStreamControllerRespond(this._associatedReadableByteStreamController, bytesWritten);
35536
34615
  }
35537
- respondWithNewView(view2) {
34616
+ respondWithNewView(view) {
35538
34617
  if (!IsReadableStreamBYOBRequest(this)) {
35539
34618
  throw byobRequestBrandCheckException("respondWithNewView");
35540
34619
  }
35541
- assertRequiredArgument(view2, 1, "respondWithNewView");
35542
- if (!ArrayBuffer.isView(view2)) {
34620
+ assertRequiredArgument(view, 1, "respondWithNewView");
34621
+ if (!ArrayBuffer.isView(view)) {
35543
34622
  throw new TypeError("You can only respond with array buffer views");
35544
34623
  }
35545
34624
  if (this._associatedReadableByteStreamController === void 0) {
35546
34625
  throw new TypeError("This BYOB request has been invalidated");
35547
34626
  }
35548
- if (IsDetachedBuffer(view2.buffer))
34627
+ if (IsDetachedBuffer(view.buffer))
35549
34628
  ;
35550
- ReadableByteStreamControllerRespondWithNewView(this._associatedReadableByteStreamController, view2);
34629
+ ReadableByteStreamControllerRespondWithNewView(this._associatedReadableByteStreamController, view);
35551
34630
  }
35552
34631
  }
35553
34632
  Object.defineProperties(ReadableStreamBYOBRequest.prototype, {
@@ -35648,8 +34727,8 @@ var require_ponyfill_es2018 = __commonJS({
35648
34727
  const entry = this._queue.shift();
35649
34728
  this._queueTotalSize -= entry.byteLength;
35650
34729
  ReadableByteStreamControllerHandleQueueDrain(this);
35651
- const view2 = new Uint8Array(entry.buffer, entry.byteOffset, entry.byteLength);
35652
- readRequest._chunkSteps(view2);
34730
+ const view = new Uint8Array(entry.buffer, entry.byteOffset, entry.byteLength);
34731
+ readRequest._chunkSteps(view);
35653
34732
  return;
35654
34733
  }
35655
34734
  const autoAllocateChunkSize = this._autoAllocateChunkSize;
@@ -35815,19 +34894,19 @@ var require_ponyfill_es2018 = __commonJS({
35815
34894
  }
35816
34895
  }
35817
34896
  }
35818
- function ReadableByteStreamControllerPullInto(controller, view2, readIntoRequest) {
34897
+ function ReadableByteStreamControllerPullInto(controller, view, readIntoRequest) {
35819
34898
  const stream = controller._controlledReadableByteStream;
35820
34899
  let elementSize = 1;
35821
- if (view2.constructor !== DataView) {
35822
- elementSize = view2.constructor.BYTES_PER_ELEMENT;
34900
+ if (view.constructor !== DataView) {
34901
+ elementSize = view.constructor.BYTES_PER_ELEMENT;
35823
34902
  }
35824
- const ctor = view2.constructor;
35825
- const buffer = TransferArrayBuffer(view2.buffer);
34903
+ const ctor = view.constructor;
34904
+ const buffer = TransferArrayBuffer(view.buffer);
35826
34905
  const pullIntoDescriptor = {
35827
34906
  buffer,
35828
34907
  bufferByteLength: buffer.byteLength,
35829
- byteOffset: view2.byteOffset,
35830
- byteLength: view2.byteLength,
34908
+ byteOffset: view.byteOffset,
34909
+ byteLength: view.byteLength,
35831
34910
  bytesFilled: 0,
35832
34911
  elementSize,
35833
34912
  viewConstructor: ctor,
@@ -35995,9 +35074,9 @@ var require_ponyfill_es2018 = __commonJS({
35995
35074
  function ReadableByteStreamControllerGetBYOBRequest(controller) {
35996
35075
  if (controller._byobRequest === null && controller._pendingPullIntos.length > 0) {
35997
35076
  const firstDescriptor = controller._pendingPullIntos.peek();
35998
- const view2 = new Uint8Array(firstDescriptor.buffer, firstDescriptor.byteOffset + firstDescriptor.bytesFilled, firstDescriptor.byteLength - firstDescriptor.bytesFilled);
35077
+ const view = new Uint8Array(firstDescriptor.buffer, firstDescriptor.byteOffset + firstDescriptor.bytesFilled, firstDescriptor.byteLength - firstDescriptor.bytesFilled);
35999
35078
  const byobRequest = Object.create(ReadableStreamBYOBRequest.prototype);
36000
- SetUpReadableStreamBYOBRequest(byobRequest, controller, view2);
35079
+ SetUpReadableStreamBYOBRequest(byobRequest, controller, view);
36001
35080
  controller._byobRequest = byobRequest;
36002
35081
  }
36003
35082
  return controller._byobRequest;
@@ -36030,29 +35109,29 @@ var require_ponyfill_es2018 = __commonJS({
36030
35109
  firstDescriptor.buffer = TransferArrayBuffer(firstDescriptor.buffer);
36031
35110
  ReadableByteStreamControllerRespondInternal(controller, bytesWritten);
36032
35111
  }
36033
- function ReadableByteStreamControllerRespondWithNewView(controller, view2) {
35112
+ function ReadableByteStreamControllerRespondWithNewView(controller, view) {
36034
35113
  const firstDescriptor = controller._pendingPullIntos.peek();
36035
35114
  const state = controller._controlledReadableByteStream._state;
36036
35115
  if (state === "closed") {
36037
- if (view2.byteLength !== 0) {
35116
+ if (view.byteLength !== 0) {
36038
35117
  throw new TypeError("The view's length must be 0 when calling respondWithNewView() on a closed stream");
36039
35118
  }
36040
35119
  } else {
36041
- if (view2.byteLength === 0) {
35120
+ if (view.byteLength === 0) {
36042
35121
  throw new TypeError("The view's length must be greater than 0 when calling respondWithNewView() on a readable stream");
36043
35122
  }
36044
35123
  }
36045
- if (firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== view2.byteOffset) {
35124
+ if (firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== view.byteOffset) {
36046
35125
  throw new RangeError("The region specified by view does not match byobRequest");
36047
35126
  }
36048
- if (firstDescriptor.bufferByteLength !== view2.buffer.byteLength) {
35127
+ if (firstDescriptor.bufferByteLength !== view.buffer.byteLength) {
36049
35128
  throw new RangeError("The buffer of view has different capacity than byobRequest");
36050
35129
  }
36051
- if (firstDescriptor.bytesFilled + view2.byteLength > firstDescriptor.byteLength) {
35130
+ if (firstDescriptor.bytesFilled + view.byteLength > firstDescriptor.byteLength) {
36052
35131
  throw new RangeError("The region specified by view is larger than byobRequest");
36053
35132
  }
36054
- const viewByteLength = view2.byteLength;
36055
- firstDescriptor.buffer = TransferArrayBuffer(view2.buffer);
35133
+ const viewByteLength = view.byteLength;
35134
+ firstDescriptor.buffer = TransferArrayBuffer(view.buffer);
36056
35135
  ReadableByteStreamControllerRespondInternal(controller, viewByteLength);
36057
35136
  }
36058
35137
  function SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize) {
@@ -36098,9 +35177,9 @@ var require_ponyfill_es2018 = __commonJS({
36098
35177
  }
36099
35178
  SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize);
36100
35179
  }
36101
- function SetUpReadableStreamBYOBRequest(request, controller, view2) {
35180
+ function SetUpReadableStreamBYOBRequest(request, controller, view) {
36102
35181
  request._associatedReadableByteStreamController = controller;
36103
- request._view = view2;
35182
+ request._view = view;
36104
35183
  }
36105
35184
  function byobRequestBrandCheckException(name) {
36106
35185
  return new TypeError(`ReadableStreamBYOBRequest.prototype.${name} can only be used on a ReadableStreamBYOBRequest`);
@@ -36176,20 +35255,20 @@ var require_ponyfill_es2018 = __commonJS({
36176
35255
  *
36177
35256
  * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source.
36178
35257
  */
36179
- read(view2) {
35258
+ read(view) {
36180
35259
  if (!IsReadableStreamBYOBReader(this)) {
36181
35260
  return promiseRejectedWith(byobReaderBrandCheckException("read"));
36182
35261
  }
36183
- if (!ArrayBuffer.isView(view2)) {
35262
+ if (!ArrayBuffer.isView(view)) {
36184
35263
  return promiseRejectedWith(new TypeError("view must be an array buffer view"));
36185
35264
  }
36186
- if (view2.byteLength === 0) {
35265
+ if (view.byteLength === 0) {
36187
35266
  return promiseRejectedWith(new TypeError("view must have non-zero byteLength"));
36188
35267
  }
36189
- if (view2.buffer.byteLength === 0) {
35268
+ if (view.buffer.byteLength === 0) {
36190
35269
  return promiseRejectedWith(new TypeError(`view's buffer must have non-zero byteLength`));
36191
35270
  }
36192
- if (IsDetachedBuffer(view2.buffer))
35271
+ if (IsDetachedBuffer(view.buffer))
36193
35272
  ;
36194
35273
  if (this._ownerReadableStream === void 0) {
36195
35274
  return promiseRejectedWith(readerLockException("read from"));
@@ -36205,7 +35284,7 @@ var require_ponyfill_es2018 = __commonJS({
36205
35284
  _closeSteps: (chunk) => resolvePromise({ value: chunk, done: true }),
36206
35285
  _errorSteps: (e2) => rejectPromise(e2)
36207
35286
  };
36208
- ReadableStreamBYOBReaderRead(this, view2, readIntoRequest);
35287
+ ReadableStreamBYOBReaderRead(this, view, readIntoRequest);
36209
35288
  return promise;
36210
35289
  }
36211
35290
  /**
@@ -36251,13 +35330,13 @@ var require_ponyfill_es2018 = __commonJS({
36251
35330
  }
36252
35331
  return x2 instanceof ReadableStreamBYOBReader;
36253
35332
  }
36254
- function ReadableStreamBYOBReaderRead(reader, view2, readIntoRequest) {
35333
+ function ReadableStreamBYOBReaderRead(reader, view, readIntoRequest) {
36255
35334
  const stream = reader._ownerReadableStream;
36256
35335
  stream._disturbed = true;
36257
35336
  if (stream._state === "errored") {
36258
35337
  readIntoRequest._errorSteps(stream._storedError);
36259
35338
  } else {
36260
- ReadableByteStreamControllerPullInto(stream._readableStreamController, view2, readIntoRequest);
35339
+ ReadableByteStreamControllerPullInto(stream._readableStreamController, view, readIntoRequest);
36261
35340
  }
36262
35341
  }
36263
35342
  function byobReaderBrandCheckException(name) {
@@ -37806,7 +36885,7 @@ var require_ponyfill_es2018 = __commonJS({
37806
36885
  };
37807
36886
  ReadableStreamDefaultReaderRead(reader, readRequest);
37808
36887
  }
37809
- function pullWithBYOBReader(view2, forBranch2) {
36888
+ function pullWithBYOBReader(view, forBranch2) {
37810
36889
  if (IsReadableStreamDefaultReader(reader)) {
37811
36890
  ReadableStreamReaderGenericRelease(reader);
37812
36891
  reader = AcquireReadableStreamBYOBReader(stream);
@@ -37872,7 +36951,7 @@ var require_ponyfill_es2018 = __commonJS({
37872
36951
  reading = false;
37873
36952
  }
37874
36953
  };
37875
- ReadableStreamBYOBReaderRead(reader, view2, readIntoRequest);
36954
+ ReadableStreamBYOBReaderRead(reader, view, readIntoRequest);
37876
36955
  }
37877
36956
  function pull1Algorithm() {
37878
36957
  if (reading) {
@@ -52731,15 +51810,15 @@ var require_dist_cjs36 = __commonJS({
52731
51810
  throw new Error("Int64 buffers must be exactly 8 bytes");
52732
51811
  }
52733
51812
  }
52734
- static fromNumber(number2) {
52735
- if (number2 > 9223372036854776e3 || number2 < -9223372036854776e3) {
52736
- throw new Error(`${number2} is too large (or, if negative, too small) to represent as an Int64`);
51813
+ static fromNumber(number3) {
51814
+ if (number3 > 9223372036854776e3 || number3 < -9223372036854776e3) {
51815
+ throw new Error(`${number3} is too large (or, if negative, too small) to represent as an Int64`);
52737
51816
  }
52738
51817
  const bytes = new Uint8Array(8);
52739
- for (let i2 = 7, remaining = Math.abs(Math.round(number2)); i2 > -1 && remaining > 0; i2--, remaining /= 256) {
51818
+ for (let i2 = 7, remaining = Math.abs(Math.round(number3)); i2 > -1 && remaining > 0; i2--, remaining /= 256) {
52740
51819
  bytes[i2] = remaining;
52741
51820
  }
52742
- if (number2 < 0) {
51821
+ if (number3 < 0) {
52743
51822
  negate(bytes);
52744
51823
  }
52745
51824
  return new _Int642(bytes);
@@ -57241,9 +56320,9 @@ var require_dist_cjs46 = __commonJS({
57241
56320
  }
57242
56321
  });
57243
56322
 
57244
- // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/auth/httpAuthSchemeProvider.js
56323
+ // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/auth/httpAuthSchemeProvider.js
57245
56324
  var require_httpAuthSchemeProvider3 = __commonJS({
57246
- "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/auth/httpAuthSchemeProvider.js"(exports2) {
56325
+ "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/auth/httpAuthSchemeProvider.js"(exports2) {
57247
56326
  "use strict";
57248
56327
  Object.defineProperty(exports2, "__esModule", { value: true });
57249
56328
  exports2.resolveHttpAuthSchemeConfig = exports2.defaultSSOOIDCHttpAuthSchemeProvider = exports2.defaultSSOOIDCHttpAuthSchemeParametersProvider = void 0;
@@ -57310,9 +56389,9 @@ var require_httpAuthSchemeProvider3 = __commonJS({
57310
56389
  }
57311
56390
  });
57312
56391
 
57313
- // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/package.json
56392
+ // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/package.json
57314
56393
  var require_package4 = __commonJS({
57315
- "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/package.json"(exports2, module2) {
56394
+ "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/package.json"(exports2, module2) {
57316
56395
  module2.exports = {
57317
56396
  name: "@aws-sdk/client-sso-oidc",
57318
56397
  description: "AWS SDK for JavaScript Sso Oidc Client for Node.js, Browser and React Native",
@@ -57416,9 +56495,9 @@ var require_package4 = __commonJS({
57416
56495
  }
57417
56496
  });
57418
56497
 
57419
- // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/endpoint/ruleset.js
56498
+ // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/endpoint/ruleset.js
57420
56499
  var require_ruleset2 = __commonJS({
57421
- "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/endpoint/ruleset.js"(exports2) {
56500
+ "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/endpoint/ruleset.js"(exports2) {
57422
56501
  "use strict";
57423
56502
  Object.defineProperty(exports2, "__esModule", { value: true });
57424
56503
  exports2.ruleSet = void 0;
@@ -57451,9 +56530,9 @@ var require_ruleset2 = __commonJS({
57451
56530
  }
57452
56531
  });
57453
56532
 
57454
- // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/endpoint/endpointResolver.js
56533
+ // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/endpoint/endpointResolver.js
57455
56534
  var require_endpointResolver2 = __commonJS({
57456
- "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/endpoint/endpointResolver.js"(exports2) {
56535
+ "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/endpoint/endpointResolver.js"(exports2) {
57457
56536
  "use strict";
57458
56537
  Object.defineProperty(exports2, "__esModule", { value: true });
57459
56538
  exports2.defaultEndpointResolver = void 0;
@@ -57471,9 +56550,9 @@ var require_endpointResolver2 = __commonJS({
57471
56550
  }
57472
56551
  });
57473
56552
 
57474
- // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/runtimeConfig.shared.js
56553
+ // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/runtimeConfig.shared.js
57475
56554
  var require_runtimeConfig_shared2 = __commonJS({
57476
- "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/runtimeConfig.shared.js"(exports2) {
56555
+ "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/runtimeConfig.shared.js"(exports2) {
57477
56556
  "use strict";
57478
56557
  Object.defineProperty(exports2, "__esModule", { value: true });
57479
56558
  exports2.getRuntimeConfig = void 0;
@@ -57517,9 +56596,9 @@ var require_runtimeConfig_shared2 = __commonJS({
57517
56596
  }
57518
56597
  });
57519
56598
 
57520
- // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/runtimeConfig.js
56599
+ // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/runtimeConfig.js
57521
56600
  var require_runtimeConfig2 = __commonJS({
57522
- "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/runtimeConfig.js"(exports2) {
56601
+ "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/runtimeConfig.js"(exports2) {
57523
56602
  "use strict";
57524
56603
  Object.defineProperty(exports2, "__esModule", { value: true });
57525
56604
  exports2.getRuntimeConfig = void 0;
@@ -57570,9 +56649,9 @@ var require_runtimeConfig2 = __commonJS({
57570
56649
  }
57571
56650
  });
57572
56651
 
57573
- // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/index.js
56652
+ // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/index.js
57574
56653
  var require_dist_cjs47 = __commonJS({
57575
- "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/index.js"(exports2, module2) {
56654
+ "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/index.js"(exports2, module2) {
57576
56655
  "use strict";
57577
56656
  var __defProp3 = Object.defineProperty;
57578
56657
  var __getOwnPropDesc3 = Object.getOwnPropertyDescriptor;
@@ -58918,9 +57997,9 @@ Reference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.ht
58918
57997
  }
58919
57998
  });
58920
57999
 
58921
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/auth/httpAuthSchemeProvider.js
58000
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/auth/httpAuthSchemeProvider.js
58922
58001
  var require_httpAuthSchemeProvider4 = __commonJS({
58923
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/auth/httpAuthSchemeProvider.js"(exports2) {
58002
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/auth/httpAuthSchemeProvider.js"(exports2) {
58924
58003
  "use strict";
58925
58004
  Object.defineProperty(exports2, "__esModule", { value: true });
58926
58005
  exports2.resolveHttpAuthSchemeConfig = exports2.resolveStsAuthConfig = exports2.defaultSTSHttpAuthSchemeProvider = exports2.defaultSTSHttpAuthSchemeParametersProvider = void 0;
@@ -58990,9 +58069,9 @@ var require_httpAuthSchemeProvider4 = __commonJS({
58990
58069
  }
58991
58070
  });
58992
58071
 
58993
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/EndpointParameters.js
58072
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/EndpointParameters.js
58994
58073
  var require_EndpointParameters = __commonJS({
58995
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/EndpointParameters.js"(exports2) {
58074
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/EndpointParameters.js"(exports2) {
58996
58075
  "use strict";
58997
58076
  Object.defineProperty(exports2, "__esModule", { value: true });
58998
58077
  exports2.commonParams = exports2.resolveClientEndpointParameters = void 0;
@@ -59016,9 +58095,9 @@ var require_EndpointParameters = __commonJS({
59016
58095
  }
59017
58096
  });
59018
58097
 
59019
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/package.json
58098
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/package.json
59020
58099
  var require_package5 = __commonJS({
59021
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/package.json"(exports2, module2) {
58100
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/package.json"(exports2, module2) {
59022
58101
  module2.exports = {
59023
58102
  name: "@aws-sdk/client-sts",
59024
58103
  description: "AWS SDK for JavaScript Sts Client for Node.js, Browser and React Native",
@@ -59124,9 +58203,9 @@ var require_package5 = __commonJS({
59124
58203
  }
59125
58204
  });
59126
58205
 
59127
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/ruleset.js
58206
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/ruleset.js
59128
58207
  var require_ruleset3 = __commonJS({
59129
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/ruleset.js"(exports2) {
58208
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/ruleset.js"(exports2) {
59130
58209
  "use strict";
59131
58210
  Object.defineProperty(exports2, "__esModule", { value: true });
59132
58211
  exports2.ruleSet = void 0;
@@ -59171,9 +58250,9 @@ var require_ruleset3 = __commonJS({
59171
58250
  }
59172
58251
  });
59173
58252
 
59174
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/endpointResolver.js
58253
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/endpointResolver.js
59175
58254
  var require_endpointResolver3 = __commonJS({
59176
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/endpointResolver.js"(exports2) {
58255
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/endpointResolver.js"(exports2) {
59177
58256
  "use strict";
59178
58257
  Object.defineProperty(exports2, "__esModule", { value: true });
59179
58258
  exports2.defaultEndpointResolver = void 0;
@@ -59191,9 +58270,9 @@ var require_endpointResolver3 = __commonJS({
59191
58270
  }
59192
58271
  });
59193
58272
 
59194
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeConfig.shared.js
58273
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeConfig.shared.js
59195
58274
  var require_runtimeConfig_shared3 = __commonJS({
59196
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeConfig.shared.js"(exports2) {
58275
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeConfig.shared.js"(exports2) {
59197
58276
  "use strict";
59198
58277
  Object.defineProperty(exports2, "__esModule", { value: true });
59199
58278
  exports2.getRuntimeConfig = void 0;
@@ -59237,9 +58316,9 @@ var require_runtimeConfig_shared3 = __commonJS({
59237
58316
  }
59238
58317
  });
59239
58318
 
59240
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeConfig.js
58319
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeConfig.js
59241
58320
  var require_runtimeConfig3 = __commonJS({
59242
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeConfig.js"(exports2) {
58321
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeConfig.js"(exports2) {
59243
58322
  "use strict";
59244
58323
  Object.defineProperty(exports2, "__esModule", { value: true });
59245
58324
  exports2.getRuntimeConfig = void 0;
@@ -59303,9 +58382,9 @@ var require_runtimeConfig3 = __commonJS({
59303
58382
  }
59304
58383
  });
59305
58384
 
59306
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/auth/httpAuthExtensionConfiguration.js
58385
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/auth/httpAuthExtensionConfiguration.js
59307
58386
  var require_httpAuthExtensionConfiguration = __commonJS({
59308
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/auth/httpAuthExtensionConfiguration.js"(exports2) {
58387
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/auth/httpAuthExtensionConfiguration.js"(exports2) {
59309
58388
  "use strict";
59310
58389
  Object.defineProperty(exports2, "__esModule", { value: true });
59311
58390
  exports2.resolveHttpAuthRuntimeConfig = exports2.getHttpAuthExtensionConfiguration = void 0;
@@ -59351,9 +58430,9 @@ var require_httpAuthExtensionConfiguration = __commonJS({
59351
58430
  }
59352
58431
  });
59353
58432
 
59354
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeExtensions.js
58433
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeExtensions.js
59355
58434
  var require_runtimeExtensions = __commonJS({
59356
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeExtensions.js"(exports2) {
58435
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeExtensions.js"(exports2) {
59357
58436
  "use strict";
59358
58437
  Object.defineProperty(exports2, "__esModule", { value: true });
59359
58438
  exports2.resolveRuntimeExtensions = void 0;
@@ -59382,9 +58461,9 @@ var require_runtimeExtensions = __commonJS({
59382
58461
  }
59383
58462
  });
59384
58463
 
59385
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/STSClient.js
58464
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/STSClient.js
59386
58465
  var require_STSClient = __commonJS({
59387
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/STSClient.js"(exports2) {
58466
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/STSClient.js"(exports2) {
59388
58467
  "use strict";
59389
58468
  Object.defineProperty(exports2, "__esModule", { value: true });
59390
58469
  exports2.STSClient = exports2.__Client = void 0;
@@ -59446,9 +58525,9 @@ var require_STSClient = __commonJS({
59446
58525
  }
59447
58526
  });
59448
58527
 
59449
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/index.js
58528
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/index.js
59450
58529
  var require_dist_cjs50 = __commonJS({
59451
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/index.js"(exports2, module2) {
58530
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/index.js"(exports2, module2) {
59452
58531
  "use strict";
59453
58532
  var __defProp3 = Object.defineProperty;
59454
58533
  var __getOwnPropDesc3 = Object.getOwnPropertyDescriptor;
@@ -63606,7 +62685,7 @@ var init_studio2 = __esm({
63606
62685
  });
63607
62686
  };
63608
62687
  prepareServer = async ({
63609
- dialect: dialect7,
62688
+ dialect: dialect4,
63610
62689
  driver: driver2,
63611
62690
  proxy,
63612
62691
  customDefaults,
@@ -63656,7 +62735,7 @@ var init_studio2 = __esm({
63656
62735
  }));
63657
62736
  return c.json({
63658
62737
  version: "6",
63659
- dialect: dialect7,
62738
+ dialect: dialect4,
63660
62739
  driver: driver2,
63661
62740
  schemaFiles,
63662
62741
  customDefaults: preparedDefaults,
@@ -64170,16 +63249,16 @@ var require_map_stream = __commonJS({
64170
63249
  var writeQueue = {};
64171
63250
  stream.writable = true;
64172
63251
  stream.readable = true;
64173
- function queueData(data, number2) {
63252
+ function queueData(data, number3) {
64174
63253
  var nextToWrite = lastWritten + 1;
64175
- if (number2 === nextToWrite) {
63254
+ if (number3 === nextToWrite) {
64176
63255
  if (data !== void 0) {
64177
63256
  stream.emit.apply(stream, ["data", data]);
64178
63257
  }
64179
63258
  lastWritten++;
64180
63259
  nextToWrite++;
64181
63260
  } else {
64182
- writeQueue[number2] = data;
63261
+ writeQueue[number3] = data;
64183
63262
  }
64184
63263
  if (writeQueue.hasOwnProperty(nextToWrite)) {
64185
63264
  var dataToWrite = writeQueue[nextToWrite];
@@ -64194,21 +63273,21 @@ var require_map_stream = __commonJS({
64194
63273
  end();
64195
63274
  }
64196
63275
  }
64197
- function next(err2, data, number2) {
63276
+ function next(err2, data, number3) {
64198
63277
  if (destroyed)
64199
63278
  return;
64200
63279
  inNext = true;
64201
63280
  if (!err2 || self2.opts.failures) {
64202
- queueData(data, number2);
63281
+ queueData(data, number3);
64203
63282
  }
64204
63283
  if (err2) {
64205
63284
  stream.emit.apply(stream, [errorEventName, err2]);
64206
63285
  }
64207
63286
  inNext = false;
64208
63287
  }
64209
- function wrappedMapper(input, number2, callback) {
63288
+ function wrappedMapper(input, number3, callback) {
64210
63289
  return mapper.call(null, input, function(err2, data) {
64211
- callback(err2, data, number2);
63290
+ callback(err2, data, number3);
64212
63291
  });
64213
63292
  }
64214
63293
  stream.write = function(data) {
@@ -78040,7 +77119,7 @@ var init_selector_ui = __esm({
78040
77119
  });
78041
77120
 
78042
77121
  // src/cli/commands/libSqlPushUtils.ts
78043
- var getOldTableName2, _moveDataStatements2, libSqlLogSuggestionsAndReturn;
77122
+ var getOldTableName3, _moveDataStatements2, libSqlLogSuggestionsAndReturn;
78044
77123
  var init_libSqlPushUtils = __esm({
78045
77124
  "src/cli/commands/libSqlPushUtils.ts"() {
78046
77125
  "use strict";
@@ -78048,7 +77127,7 @@ var init_libSqlPushUtils = __esm({
78048
77127
  init_utils();
78049
77128
  init_sqliteSchema();
78050
77129
  init_sqlgenerator();
78051
- getOldTableName2 = (tableName, meta) => {
77130
+ getOldTableName3 = (tableName, meta) => {
78052
77131
  for (const key of Object.keys(meta.tables)) {
78053
77132
  const value = meta.tables[key];
78054
77133
  if (`"${tableName}"` === value) {
@@ -78203,7 +77282,7 @@ var init_libSqlPushUtils = __esm({
78203
77282
  } else if (statement.type === "recreate_table") {
78204
77283
  const tableName = statement.tableName;
78205
77284
  let dataLoss = false;
78206
- const oldTableName = getOldTableName2(tableName, meta);
77285
+ const oldTableName = getOldTableName3(tableName, meta);
78207
77286
  const prevColumnNames = Object.keys(json1.tables[oldTableName].columns);
78208
77287
  const currentColumnNames = Object.keys(json2.tables[tableName].columns);
78209
77288
  const { removedColumns, addedColumns } = findAddedAndRemoved(
@@ -79963,11 +79042,11 @@ var init_pgIntrospect = __esm({
79963
79042
  });
79964
79043
 
79965
79044
  // src/introspect-sqlite.ts
79966
- var import_casing, sqliteImportsList, indexName3, objToStatement2, relations, escapeColumnKey, withCasing, dbColumnName, schemaToTypeScript, isCyclic, isSelf, mapColumnDefault, column4, createTableColumns, createTableIndexes, createTableUniques, createTablePKs, createTableFKs;
79045
+ var import_casing3, sqliteImportsList, indexName3, objToStatement2, relations, escapeColumnKey, withCasing, dbColumnName, schemaToTypeScript, isCyclic, isSelf, mapColumnDefault, column4, createTableColumns, createTableIndexes, createTableUniques, createTablePKs, createTableFKs;
79967
79046
  var init_introspect_sqlite = __esm({
79968
79047
  "src/introspect-sqlite.ts"() {
79969
79048
  "use strict";
79970
- import_casing = require("drizzle-orm/casing");
79049
+ import_casing3 = require("drizzle-orm/casing");
79971
79050
  init_utils3();
79972
79051
  init_global();
79973
79052
  sqliteImportsList = /* @__PURE__ */ new Set([
@@ -80012,7 +79091,7 @@ var init_introspect_sqlite = __esm({
80012
79091
  return "";
80013
79092
  }
80014
79093
  if (casing2 === "camel") {
80015
- return (0, import_casing.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
79094
+ return (0, import_casing3.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
80016
79095
  }
80017
79096
  assertUnreachable(casing2);
80018
79097
  };
@@ -80387,13 +79466,13 @@ var init_push = __esm({
80387
79466
  init_mysqlPushUtils();
80388
79467
  init_pgPushUtils();
80389
79468
  init_sqlitePushUtils();
80390
- mysqlPush = async (schemaPath, credentials2, tablesFilter, strict, verbose, force) => {
79469
+ mysqlPush = async (schemaPath, credentials2, tablesFilter, strict, verbose, force, casing2) => {
80391
79470
  const { connectToMySQL: connectToMySQL2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
80392
79471
  const { mysqlPushIntrospect: mysqlPushIntrospect2 } = await Promise.resolve().then(() => (init_mysqlIntrospect(), mysqlIntrospect_exports));
80393
79472
  const { db, database } = await connectToMySQL2(credentials2);
80394
79473
  const { schema: schema5 } = await mysqlPushIntrospect2(db, database, tablesFilter);
80395
79474
  const { prepareMySQLPush: prepareMySQLPush2 } = await Promise.resolve().then(() => (init_migrate(), migrate_exports));
80396
- const statements = await prepareMySQLPush2(schemaPath, schema5);
79475
+ const statements = await prepareMySQLPush2(schemaPath, schema5, casing2);
80397
79476
  const filteredStatements = filterStatements(
80398
79477
  statements.statements ?? [],
80399
79478
  statements.validatedCur,
@@ -80488,13 +79567,13 @@ var init_push = __esm({
80488
79567
  console.log(e2);
80489
79568
  }
80490
79569
  };
80491
- pgPush = async (schemaPath, verbose, strict, credentials2, tablesFilter, schemasFilter, force) => {
79570
+ pgPush = async (schemaPath, verbose, strict, credentials2, tablesFilter, schemasFilter, force, casing2) => {
80492
79571
  const { preparePostgresDB: preparePostgresDB2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
80493
79572
  const { pgPushIntrospect: pgPushIntrospect2 } = await Promise.resolve().then(() => (init_pgIntrospect(), pgIntrospect_exports));
80494
79573
  const db = await preparePostgresDB2(credentials2);
80495
79574
  const { schema: schema5 } = await pgPushIntrospect2(db, tablesFilter, schemasFilter);
80496
79575
  const { preparePgPush: preparePgPush2 } = await Promise.resolve().then(() => (init_migrate(), migrate_exports));
80497
- const statements = await preparePgPush2(schemaPath, schema5, schemasFilter);
79576
+ const statements = await preparePgPush2(schemaPath, schema5, schemasFilter, casing2);
80498
79577
  try {
80499
79578
  if (statements.sqlStatements.length === 0) {
80500
79579
  (0, import_hanji11.render)(`[${source_default.blue("i")}] No changes detected`);
@@ -80562,13 +79641,13 @@ var init_push = __esm({
80562
79641
  console.error(e2);
80563
79642
  }
80564
79643
  };
80565
- sqlitePush = async (schemaPath, verbose, strict, credentials2, tablesFilter, force) => {
79644
+ sqlitePush = async (schemaPath, verbose, strict, credentials2, tablesFilter, force, casing2) => {
80566
79645
  const { connectToSQLite: connectToSQLite2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
80567
79646
  const { sqlitePushIntrospect: sqlitePushIntrospect2 } = await Promise.resolve().then(() => (init_sqliteIntrospect(), sqliteIntrospect_exports));
80568
79647
  const db = await connectToSQLite2(credentials2);
80569
79648
  const { schema: schema5 } = await sqlitePushIntrospect2(db, tablesFilter);
80570
79649
  const { prepareSQLitePush: prepareSQLitePush2 } = await Promise.resolve().then(() => (init_migrate(), migrate_exports));
80571
- const statements = await prepareSQLitePush2(schemaPath, schema5);
79650
+ const statements = await prepareSQLitePush2(schemaPath, schema5, casing2);
80572
79651
  if (statements.sqlStatements.length === 0) {
80573
79652
  (0, import_hanji11.render)(`
80574
79653
  [${source_default.blue("i")}] No changes detected`);
@@ -80650,13 +79729,13 @@ var init_push = __esm({
80650
79729
  }
80651
79730
  }
80652
79731
  };
80653
- libSQLPush = async (schemaPath, verbose, strict, credentials2, tablesFilter, force) => {
79732
+ libSQLPush = async (schemaPath, verbose, strict, credentials2, tablesFilter, force, casing2) => {
80654
79733
  const { connectToLibSQL: connectToLibSQL2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
80655
79734
  const { sqlitePushIntrospect: sqlitePushIntrospect2 } = await Promise.resolve().then(() => (init_sqliteIntrospect(), sqliteIntrospect_exports));
80656
79735
  const db = await connectToLibSQL2(credentials2);
80657
79736
  const { schema: schema5 } = await sqlitePushIntrospect2(db, tablesFilter);
80658
79737
  const { prepareLibSQLPush: prepareLibSQLPush2 } = await Promise.resolve().then(() => (init_migrate(), migrate_exports));
80659
- const statements = await prepareLibSQLPush2(schemaPath, schema5);
79738
+ const statements = await prepareLibSQLPush2(schemaPath, schema5, casing2);
80660
79739
  if (statements.sqlStatements.length === 0) {
80661
79740
  (0, import_hanji11.render)(`
80662
79741
  [${source_default.blue("i")}] No changes detected`);
@@ -81086,11 +80165,11 @@ var require_pluralize = __commonJS({
81086
80165
  });
81087
80166
 
81088
80167
  // src/introspect-mysql.ts
81089
- var import_casing2, mysqlImportsList, objToStatement22, timeConfig, binaryConfig, importsPatch, relations2, escapeColumnKey2, prepareCasing, dbColumnName2, schemaToTypeScript2, isCyclic2, isSelf2, mapColumnDefault2, mapColumnDefaultForJson, column5, createTableColumns2, createTableIndexes2, createTableUniques2, createTablePKs2, createTableFKs2;
80168
+ var import_casing4, mysqlImportsList, objToStatement22, timeConfig, binaryConfig, importsPatch, relations2, escapeColumnKey2, prepareCasing, dbColumnName2, schemaToTypeScript2, isCyclic2, isSelf2, mapColumnDefault2, mapColumnDefaultForJson, column5, createTableColumns2, createTableIndexes2, createTableUniques2, createTablePKs2, createTableFKs2;
81090
80169
  var init_introspect_mysql = __esm({
81091
80170
  "src/introspect-mysql.ts"() {
81092
80171
  "use strict";
81093
- import_casing2 = require("drizzle-orm/casing");
80172
+ import_casing4 = require("drizzle-orm/casing");
81094
80173
  init_utils3();
81095
80174
  init_global();
81096
80175
  init_mysqlSerializer();
@@ -81179,7 +80258,7 @@ var init_introspect_mysql = __esm({
81179
80258
  return "";
81180
80259
  }
81181
80260
  if (casing2 === "camel") {
81182
- return (0, import_casing2.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
80261
+ return (0, import_casing4.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
81183
80262
  }
81184
80263
  assertUnreachable(casing2);
81185
80264
  };
@@ -81677,14 +80756,14 @@ function generateIdentityParams(identity) {
81677
80756
  }
81678
80757
  return `.generatedByDefaultAsIdentity(${paramsObj})`;
81679
80758
  }
81680
- var import_drizzle_orm9, import_relations, import_casing3, pgImportsList, timeConfig2, possibleIntervals, intervalStrToObj, intervalConfig, mapColumnDefault3, importsPatch2, relations3, escapeColumnKey3, withCasing2, dbColumnName3, paramNameFor, schemaToTypeScript3, isCyclic3, isSelf3, buildArrayDefault, mapDefault, column6, dimensionsInArray, createTableColumns3, createTableIndexes3, createTablePKs3, createTableUniques3, createTableFKs3;
80759
+ var import_drizzle_orm9, import_relations, import_casing5, pgImportsList, timeConfig2, possibleIntervals, intervalStrToObj, intervalConfig, mapColumnDefault3, importsPatch2, relations3, escapeColumnKey3, withCasing2, dbColumnName3, paramNameFor, schemaToTypeScript3, isCyclic3, isSelf3, buildArrayDefault, mapDefault, column6, dimensionsInArray, createTableColumns3, createTableIndexes3, createTablePKs3, createTableUniques3, createTableFKs3;
81681
80760
  var init_introspect_pg = __esm({
81682
80761
  "src/introspect-pg.ts"() {
81683
80762
  "use strict";
81684
80763
  import_drizzle_orm9 = require("drizzle-orm");
81685
80764
  import_relations = require("drizzle-orm/relations");
81686
80765
  init_utils3();
81687
- import_casing3 = require("drizzle-orm/casing");
80766
+ import_casing5 = require("drizzle-orm/casing");
81688
80767
  init_vector();
81689
80768
  init_global();
81690
80769
  init_pgSerializer();
@@ -81815,7 +80894,7 @@ var init_introspect_pg = __esm({
81815
80894
  return "";
81816
80895
  }
81817
80896
  if (casing2 === "camel") {
81818
- return (0, import_casing3.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
80897
+ return (0, import_casing5.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
81819
80898
  }
81820
80899
  assertUnreachable(casing2);
81821
80900
  };
@@ -81881,30 +80960,6 @@ var init_introspect_pg = __esm({
81881
80960
  },
81882
80961
  { pg: [] }
81883
80962
  );
81884
- Object.values(schema5.views).forEach((it) => {
81885
- if (it.schema && it.schema !== "public" && it.schema !== "") {
81886
- imports.pg.push("pgSchema");
81887
- } else if (it.schema === "public") {
81888
- it.materialized ? imports.pg.push("pgMaterializedView") : imports.pg.push("pgView");
81889
- }
81890
- Object.values(it.columns).forEach(() => {
81891
- const columnImports = Object.values(it.columns).map((col) => {
81892
- let patched = (importsPatch2[col.type] || col.type).replace("[]", "");
81893
- patched = patched === "double precision" ? "doublePrecision" : patched;
81894
- patched = patched.startsWith("varchar(") ? "varchar" : patched;
81895
- patched = patched.startsWith("char(") ? "char" : patched;
81896
- patched = patched.startsWith("numeric(") ? "numeric" : patched;
81897
- patched = patched.startsWith("time(") ? "time" : patched;
81898
- patched = patched.startsWith("timestamp(") ? "timestamp" : patched;
81899
- patched = patched.startsWith("vector(") ? "vector" : patched;
81900
- patched = patched.startsWith("geometry(") ? "geometry" : patched;
81901
- return patched;
81902
- }).filter((type) => {
81903
- return pgImportsList.has(type);
81904
- });
81905
- imports.pg.push(...columnImports);
81906
- });
81907
- });
81908
80963
  Object.values(schema5.sequences).forEach((it) => {
81909
80964
  if (it.schema && it.schema !== "public" && it.schema !== "") {
81910
80965
  imports.pg.push("pgSchema");
@@ -81999,28 +81054,6 @@ var init_introspect_pg = __esm({
81999
81054
  statement += ");";
82000
81055
  return statement;
82001
81056
  });
82002
- const viewsStatements = Object.values(schema5.views).map((it) => {
82003
- const viewSchema = schemas[it.schema];
82004
- const paramName = paramNameFor(it.name, viewSchema);
82005
- const func = viewSchema ? it.materialized ? `${viewSchema}.materializedView` : `${viewSchema}.view` : it.materialized ? "pgMaterializedView" : "pgView";
82006
- const withOption = it.with ?? "";
82007
- const as = `sql\`${it.definition}\``;
82008
- const tablespace = it.tablespace ?? "";
82009
- const columns = createTableColumns3(
82010
- "",
82011
- Object.values(it.columns),
82012
- [],
82013
- enumTypes,
82014
- schemas,
82015
- casing2,
82016
- schema5.internal
82017
- );
82018
- let statement = `export const ${withCasing2(paramName, casing2)} = ${func}("${it.name}", {${columns}})`;
82019
- statement += tablespace ? `.tablespace("${tablespace}")` : "";
82020
- statement += withOption ? `.with(${JSON.stringify(withOption)})` : "";
82021
- statement += `.as(${as});`;
82022
- return statement;
82023
- }).join("\n\n");
82024
81057
  const uniquePgImports = ["pgTable", ...new Set(imports.pg)];
82025
81058
  const importsTs = `import { ${uniquePgImports.join(
82026
81059
  ", "
@@ -82033,8 +81066,6 @@ var init_introspect_pg = __esm({
82033
81066
  decalrations += sequencesStatements;
82034
81067
  decalrations += "\n";
82035
81068
  decalrations += tableStatements.join("\n\n");
82036
- decalrations += "\n";
82037
- decalrations += viewsStatements;
82038
81069
  const file = importsTs + decalrations;
82039
81070
  const schemaEntry = `
82040
81071
  {
@@ -82593,7 +81624,6 @@ var init_introspect = __esm({
82593
81624
  sequencesResolver,
82594
81625
  tablesResolver,
82595
81626
  columnsResolver,
82596
- viewsResolver,
82597
81627
  dryPg,
82598
81628
  schema5
82599
81629
  );
@@ -84449,9 +83479,9 @@ init_source();
84449
83479
 
84450
83480
  // src/cli/commands/check.ts
84451
83481
  init_utils();
84452
- var checkHandler = (out, dialect7) => {
84453
- const { snapshots } = prepareOutFolder(out, dialect7);
84454
- const report = validateWithReport(snapshots, dialect7);
83482
+ var checkHandler = (out, dialect4) => {
83483
+ const { snapshots } = prepareOutFolder(out, dialect4);
83484
+ const report = validateWithReport(snapshots, dialect4);
84455
83485
  if (report.nonLatest.length > 0) {
84456
83486
  console.log(
84457
83487
  report.nonLatest.map((it) => {
@@ -85738,12 +84768,14 @@ var optionBreakpoints = boolean().desc(
85738
84768
  `Prepare SQL statements with breakpoints`
85739
84769
  );
85740
84770
  var optionDriver = string().enum(...drivers).desc("Database driver");
84771
+ var optionCasing = string().enum("camelCase", "snake_case").desc("Casing for serialization");
85741
84772
  var generate = command({
85742
84773
  name: "generate",
85743
84774
  options: {
85744
84775
  config: optionConfig,
85745
84776
  dialect: optionDialect,
85746
84777
  driver: optionDriver,
84778
+ casing: optionCasing,
85747
84779
  schema: string().desc("Path to a schema file or folder"),
85748
84780
  out: optionOut,
85749
84781
  name: string().desc("Migration file name"),
@@ -85756,7 +84788,7 @@ var generate = command({
85756
84788
  "generate",
85757
84789
  opts,
85758
84790
  ["prefix", "name", "custom"],
85759
- ["driver", "breakpoints", "schema", "out", "dialect"]
84791
+ ["driver", "breakpoints", "schema", "out", "dialect", "casing"]
85760
84792
  );
85761
84793
  return prepareGenerateConfig(opts, from);
85762
84794
  },
@@ -85769,17 +84801,17 @@ var generate = command({
85769
84801
  prepareAndMigrateSqlite: prepareAndMigrateSqlite2,
85770
84802
  prepareAndMigrateLibSQL: prepareAndMigrateLibSQL2
85771
84803
  } = await Promise.resolve().then(() => (init_migrate(), migrate_exports));
85772
- const dialect7 = opts.dialect;
85773
- if (dialect7 === "postgresql") {
84804
+ const dialect4 = opts.dialect;
84805
+ if (dialect4 === "postgresql") {
85774
84806
  await prepareAndMigratePg2(opts);
85775
- } else if (dialect7 === "mysql") {
84807
+ } else if (dialect4 === "mysql") {
85776
84808
  await prepareAndMigrateMysql2(opts);
85777
- } else if (dialect7 === "sqlite") {
84809
+ } else if (dialect4 === "sqlite") {
85778
84810
  await prepareAndMigrateSqlite2(opts);
85779
- } else if (dialect7 === "turso") {
84811
+ } else if (dialect4 === "turso") {
85780
84812
  await prepareAndMigrateLibSQL2(opts);
85781
84813
  } else {
85782
- assertUnreachable(dialect7);
84814
+ assertUnreachable(dialect4);
85783
84815
  }
85784
84816
  }
85785
84817
  });
@@ -85794,9 +84826,9 @@ var migrate = command({
85794
84826
  handler: async (opts) => {
85795
84827
  await assertOrmCoreVersion();
85796
84828
  await assertPackages("drizzle-orm");
85797
- const { dialect: dialect7, schema: schema5, table: table4, out, credentials: credentials2 } = opts;
84829
+ const { dialect: dialect4, schema: schema5, table: table4, out, credentials: credentials2 } = opts;
85798
84830
  try {
85799
- if (dialect7 === "postgresql") {
84831
+ if (dialect4 === "postgresql") {
85800
84832
  if ("driver" in credentials2) {
85801
84833
  const { driver: driver2 } = credentials2;
85802
84834
  if (driver2 === "aws-data-api") {
@@ -85827,7 +84859,7 @@ var migrate = command({
85827
84859
  migrationsSchema: schema5
85828
84860
  })
85829
84861
  );
85830
- } else if (dialect7 === "mysql") {
84862
+ } else if (dialect4 === "mysql") {
85831
84863
  const { connectToMySQL: connectToMySQL2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
85832
84864
  const { migrate: migrate2 } = await connectToMySQL2(credentials2);
85833
84865
  await (0, import_hanji13.renderWithTask)(
@@ -85838,7 +84870,7 @@ var migrate = command({
85838
84870
  migrationsSchema: schema5
85839
84871
  })
85840
84872
  );
85841
- } else if (dialect7 === "sqlite") {
84873
+ } else if (dialect4 === "sqlite") {
85842
84874
  const { connectToSQLite: connectToSQLite2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
85843
84875
  const { migrate: migrate2 } = await connectToSQLite2(credentials2);
85844
84876
  await (0, import_hanji13.renderWithTask)(
@@ -85849,7 +84881,7 @@ var migrate = command({
85849
84881
  migrationsSchema: schema5
85850
84882
  })
85851
84883
  );
85852
- } else if (dialect7 === "turso") {
84884
+ } else if (dialect4 === "turso") {
85853
84885
  const { connectToLibSQL: connectToLibSQL2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
85854
84886
  const { migrate: migrate2 } = await connectToLibSQL2(credentials2);
85855
84887
  await (0, import_hanji13.renderWithTask)(
@@ -85861,7 +84893,7 @@ var migrate = command({
85861
84893
  })
85862
84894
  );
85863
84895
  } else {
85864
- assertUnreachable(dialect7);
84896
+ assertUnreachable(dialect4);
85865
84897
  }
85866
84898
  } catch (e2) {
85867
84899
  console.error(e2);
@@ -85895,6 +84927,7 @@ var push = command({
85895
84927
  options: {
85896
84928
  config: optionConfig,
85897
84929
  dialect: optionDialect,
84930
+ casing: optionCasing,
85898
84931
  schema: string().desc("Path to a schema file or folder"),
85899
84932
  ...optionsFilters,
85900
84933
  ...optionsDatabaseCredentials,
@@ -85923,7 +84956,8 @@ var push = command({
85923
84956
  "authToken",
85924
84957
  "schemaFilters",
85925
84958
  "extensionsFilters",
85926
- "tablesFilter"
84959
+ "tablesFilter",
84960
+ "casing"
85927
84961
  ]
85928
84962
  );
85929
84963
  return preparePushConfig(opts, from);
@@ -85932,17 +84966,18 @@ var push = command({
85932
84966
  await assertPackages("drizzle-orm");
85933
84967
  await assertOrmCoreVersion();
85934
84968
  const {
85935
- dialect: dialect7,
84969
+ dialect: dialect4,
85936
84970
  schemaPath,
85937
84971
  strict,
85938
84972
  verbose,
85939
84973
  credentials: credentials2,
85940
84974
  tablesFilter,
85941
84975
  schemasFilter,
85942
- force
84976
+ force,
84977
+ casing: casing2
85943
84978
  } = config;
85944
84979
  try {
85945
- if (dialect7 === "mysql") {
84980
+ if (dialect4 === "mysql") {
85946
84981
  const { mysqlPush: mysqlPush2 } = await Promise.resolve().then(() => (init_push(), push_exports));
85947
84982
  await mysqlPush2(
85948
84983
  schemaPath,
@@ -85950,9 +84985,10 @@ var push = command({
85950
84985
  tablesFilter,
85951
84986
  strict,
85952
84987
  verbose,
85953
- force
84988
+ force,
84989
+ casing2
85954
84990
  );
85955
- } else if (dialect7 === "postgresql") {
84991
+ } else if (dialect4 === "postgresql") {
85956
84992
  if ("driver" in credentials2) {
85957
84993
  const { driver: driver2 } = credentials2;
85958
84994
  if (driver2 === "aws-data-api") {
@@ -85981,9 +85017,10 @@ var push = command({
85981
85017
  credentials2,
85982
85018
  tablesFilter,
85983
85019
  schemasFilter,
85984
- force
85020
+ force,
85021
+ casing2
85985
85022
  );
85986
- } else if (dialect7 === "sqlite") {
85023
+ } else if (dialect4 === "sqlite") {
85987
85024
  const { sqlitePush: sqlitePush2 } = await Promise.resolve().then(() => (init_push(), push_exports));
85988
85025
  await sqlitePush2(
85989
85026
  schemaPath,
@@ -85991,9 +85028,10 @@ var push = command({
85991
85028
  strict,
85992
85029
  credentials2,
85993
85030
  tablesFilter,
85994
- force
85031
+ force,
85032
+ casing2
85995
85033
  );
85996
- } else if (dialect7 === "turso") {
85034
+ } else if (dialect4 === "turso") {
85997
85035
  const { libSQLPush: libSQLPush2 } = await Promise.resolve().then(() => (init_push(), push_exports));
85998
85036
  await libSQLPush2(
85999
85037
  schemaPath,
@@ -86001,10 +85039,11 @@ var push = command({
86001
85039
  strict,
86002
85040
  credentials2,
86003
85041
  tablesFilter,
86004
- force
85042
+ force,
85043
+ casing2
86005
85044
  );
86006
85045
  } else {
86007
- assertUnreachable(dialect7);
85046
+ assertUnreachable(dialect4);
86008
85047
  }
86009
85048
  } catch (e2) {
86010
85049
  console.error(e2);
@@ -86025,8 +85064,8 @@ var check = command({
86025
85064
  },
86026
85065
  handler: async (config) => {
86027
85066
  await assertOrmCoreVersion();
86028
- const { out, dialect: dialect7 } = config;
86029
- checkHandler(out, dialect7);
85067
+ const { out, dialect: dialect4 } = config;
85068
+ checkHandler(out, dialect4);
86030
85069
  console.log("Everything's fine \u{1F436}\u{1F525}");
86031
85070
  }
86032
85071
  });
@@ -86043,15 +85082,15 @@ var up = command({
86043
85082
  },
86044
85083
  handler: async (config) => {
86045
85084
  await assertOrmCoreVersion();
86046
- const { out, dialect: dialect7 } = config;
85085
+ const { out, dialect: dialect4 } = config;
86047
85086
  await assertPackages("drizzle-orm");
86048
- if (dialect7 === "postgresql") {
85087
+ if (dialect4 === "postgresql") {
86049
85088
  upPgHandler(out);
86050
85089
  }
86051
- if (dialect7 === "mysql") {
85090
+ if (dialect4 === "mysql") {
86052
85091
  upMysqlHandler(out);
86053
85092
  }
86054
- if (dialect7 === "sqlite" || dialect7 === "turso") {
85093
+ if (dialect4 === "sqlite" || dialect4 === "turso") {
86055
85094
  upSqliteHandler(out);
86056
85095
  }
86057
85096
  }
@@ -86098,7 +85137,7 @@ var pull = command({
86098
85137
  await assertPackages("drizzle-orm");
86099
85138
  await assertOrmCoreVersion();
86100
85139
  const {
86101
- dialect: dialect7,
85140
+ dialect: dialect4,
86102
85141
  credentials: credentials2,
86103
85142
  out,
86104
85143
  casing: casing2,
@@ -86115,7 +85154,7 @@ var pull = command({
86115
85154
  );
86116
85155
  console.log();
86117
85156
  try {
86118
- if (dialect7 === "postgresql") {
85157
+ if (dialect4 === "postgresql") {
86119
85158
  if ("driver" in credentials2) {
86120
85159
  const { driver: driver2 } = credentials2;
86121
85160
  if (driver2 === "aws-data-api") {
@@ -86146,7 +85185,7 @@ var pull = command({
86146
85185
  schemasFilter,
86147
85186
  prefix2
86148
85187
  );
86149
- } else if (dialect7 === "mysql") {
85188
+ } else if (dialect4 === "mysql") {
86150
85189
  const { introspectMysql: introspectMysql2 } = await Promise.resolve().then(() => (init_introspect(), introspect_exports));
86151
85190
  await introspectMysql2(
86152
85191
  casing2,
@@ -86156,7 +85195,7 @@ var pull = command({
86156
85195
  tablesFilter,
86157
85196
  prefix2
86158
85197
  );
86159
- } else if (dialect7 === "sqlite") {
85198
+ } else if (dialect4 === "sqlite") {
86160
85199
  const { introspectSqlite: introspectSqlite2 } = await Promise.resolve().then(() => (init_introspect(), introspect_exports));
86161
85200
  await introspectSqlite2(
86162
85201
  casing2,
@@ -86166,7 +85205,7 @@ var pull = command({
86166
85205
  tablesFilter,
86167
85206
  prefix2
86168
85207
  );
86169
- } else if (dialect7 === "turso") {
85208
+ } else if (dialect4 === "turso") {
86170
85209
  const { introspectLibSQL: introspectLibSQL2 } = await Promise.resolve().then(() => (init_introspect(), introspect_exports));
86171
85210
  await introspectLibSQL2(
86172
85211
  casing2,
@@ -86177,7 +85216,7 @@ var pull = command({
86177
85216
  prefix2
86178
85217
  );
86179
85218
  } else {
86180
- assertUnreachable(dialect7);
85219
+ assertUnreachable(dialect4);
86181
85220
  }
86182
85221
  } catch (e2) {
86183
85222
  console.error(e2);
@@ -86215,7 +85254,7 @@ var studio = command({
86215
85254
  await assertPackages("drizzle-orm");
86216
85255
  assertStudioNodeVersion();
86217
85256
  const {
86218
- dialect: dialect7,
85257
+ dialect: dialect4,
86219
85258
  schema: schemaPath,
86220
85259
  port,
86221
85260
  host,
@@ -86231,7 +85270,7 @@ var studio = command({
86231
85270
  } = await Promise.resolve().then(() => (init_studio2(), studio_exports));
86232
85271
  let setup;
86233
85272
  try {
86234
- if (dialect7 === "postgresql") {
85273
+ if (dialect4 === "postgresql") {
86235
85274
  if ("driver" in credentials2) {
86236
85275
  const { driver: driver2 } = credentials2;
86237
85276
  if (driver2 === "aws-data-api") {
@@ -86254,17 +85293,17 @@ var studio = command({
86254
85293
  }
86255
85294
  const { schema: schema5, relations: relations4, files } = schemaPath ? await preparePgSchema2(schemaPath) : { schema: {}, relations: {}, files: [] };
86256
85295
  setup = await drizzleForPostgres2(credentials2, schema5, relations4, files);
86257
- } else if (dialect7 === "mysql") {
85296
+ } else if (dialect4 === "mysql") {
86258
85297
  const { schema: schema5, relations: relations4, files } = schemaPath ? await prepareMySqlSchema2(schemaPath) : { schema: {}, relations: {}, files: [] };
86259
85298
  setup = await drizzleForMySQL2(credentials2, schema5, relations4, files);
86260
- } else if (dialect7 === "sqlite") {
85299
+ } else if (dialect4 === "sqlite") {
86261
85300
  const { schema: schema5, relations: relations4, files } = schemaPath ? await prepareSQLiteSchema2(schemaPath) : { schema: {}, relations: {}, files: [] };
86262
85301
  setup = await drizzleForSQLite2(credentials2, schema5, relations4, files);
86263
- } else if (dialect7 === "turso") {
85302
+ } else if (dialect4 === "turso") {
86264
85303
  const { schema: schema5, relations: relations4, files } = schemaPath ? await prepareSQLiteSchema2(schemaPath) : { schema: {}, relations: {}, files: [] };
86265
85304
  setup = await drizzleForLibSQL(credentials2, schema5, relations4, files);
86266
85305
  } else {
86267
- assertUnreachable(dialect7);
85306
+ assertUnreachable(dialect4);
86268
85307
  }
86269
85308
  const { prepareServer: prepareServer2 } = await Promise.resolve().then(() => (init_studio2(), studio_exports));
86270
85309
  const server = await prepareServer2(setup);
@@ -86315,7 +85354,7 @@ init_utils2();
86315
85354
  var version2 = async () => {
86316
85355
  const { npmVersion } = await ormCoreVersions();
86317
85356
  const ormVersion = npmVersion ? `drizzle-orm: v${npmVersion}` : "";
86318
- const envVersion = "0.25.0-a5a41e0";
85357
+ const envVersion = "0.25.0-a9aca5c";
86319
85358
  const kitVersion = envVersion ? `v${envVersion}` : "--";
86320
85359
  const versions = `drizzle-kit: ${kitVersion}
86321
85360
  ${ormVersion}`;