drizzle-kit 0.25.0-f5f3e49 → 0.25.0

Sign up to get free protection for your applications and to get access to all the features.
package/bin.cjs CHANGED
@@ -922,8 +922,8 @@ var require_hanji = __commonJS({
922
922
  };
923
923
  exports2.deferred = deferred;
924
924
  var Terminal = class {
925
- constructor(view2, stdin, stdout, closable) {
926
- this.view = view2;
925
+ constructor(view, stdin, stdout, closable) {
926
+ this.view = view;
927
927
  this.stdin = stdin;
928
928
  this.stdout = stdout;
929
929
  this.closable = closable;
@@ -961,7 +961,7 @@ var require_hanji = __commonJS({
961
961
  this.resolve({ status: "submitted", data: this.view.result() });
962
962
  return;
963
963
  }
964
- view2.input(str, key);
964
+ view.input(str, key);
965
965
  };
966
966
  this.stdin.on("keypress", keypress);
967
967
  this.view.attach(this);
@@ -1023,8 +1023,8 @@ var require_hanji = __commonJS({
1023
1023
  };
1024
1024
  exports2.TaskView = TaskView2;
1025
1025
  var TaskTerminal = class {
1026
- constructor(view2, stdout) {
1027
- this.view = view2;
1026
+ constructor(view, stdout) {
1027
+ this.view = view;
1028
1028
  this.stdout = stdout;
1029
1029
  this.text = "";
1030
1030
  this.view.attach(this);
@@ -1043,22 +1043,22 @@ var require_hanji = __commonJS({
1043
1043
  }
1044
1044
  };
1045
1045
  exports2.TaskTerminal = TaskTerminal;
1046
- function render9(view2) {
1046
+ function render9(view) {
1047
1047
  const { stdin, stdout, closable } = (0, readline_1.prepareReadLine)();
1048
- if (view2 instanceof Prompt3) {
1049
- const terminal = new Terminal(view2, stdin, stdout, closable);
1048
+ if (view instanceof Prompt3) {
1049
+ const terminal = new Terminal(view, stdin, stdout, closable);
1050
1050
  terminal.requestLayout();
1051
1051
  return terminal.result();
1052
1052
  }
1053
- stdout.write(`${view2}
1053
+ stdout.write(`${view}
1054
1054
  `);
1055
1055
  closable.close();
1056
1056
  return;
1057
1057
  }
1058
1058
  exports2.render = render9;
1059
- function renderWithTask6(view2, task) {
1059
+ function renderWithTask6(view, task) {
1060
1060
  return __awaiter2(this, void 0, void 0, function* () {
1061
- const terminal = new TaskTerminal(view2, process.stdout);
1061
+ const terminal = new TaskTerminal(view, process.stdout);
1062
1062
  terminal.requestLayout();
1063
1063
  const result = yield task;
1064
1064
  terminal.clear();
@@ -5706,7 +5706,7 @@ var init_vector = __esm({
5706
5706
  });
5707
5707
 
5708
5708
  // src/serializer/pgSchema.ts
5709
- var indexV2, columnV2, tableV2, enumSchemaV1, enumSchema, pgSchemaV2, references, columnV1, tableV1, pgSchemaV1, indexColumn, index2, indexV4, indexV5, indexV6, fk2, sequenceSchema, sequenceSquashed, columnV7, column2, columnSquashed, tableV32, compositePK2, uniqueConstraint2, viewWithOption, matViewWithOption, mergedViewWithOption, view, tableV42, tableV5, tableV6, tableV7, table2, schemaHash2, kitInternals2, pgSchemaInternalV3, pgSchemaInternalV4, pgSchemaInternalV5, pgSchemaInternalV6, pgSchemaExternal, pgSchemaInternalV7, pgSchemaInternal, tableSquashed2, tableSquashedV42, pgSchemaSquashedV4, pgSchemaSquashedV6, pgSchemaSquashed, pgSchemaV3, pgSchemaV4, pgSchemaV5, pgSchemaV6, pgSchemaV7, pgSchema, backwardCompatiblePgSchema, PgSquasher, squashPgScheme, dryPg;
5709
+ var indexV2, columnV2, tableV2, enumSchemaV1, enumSchema, pgSchemaV2, references, columnV1, tableV1, pgSchemaV1, indexColumn, index2, indexV4, indexV5, indexV6, fk2, sequenceSchema, sequenceSquashed, columnV7, column2, columnSquashed, tableV32, compositePK2, uniqueConstraint2, tableV42, tableV5, tableV6, tableV7, table2, schemaHash2, kitInternals2, pgSchemaInternalV3, pgSchemaInternalV4, pgSchemaInternalV5, pgSchemaInternalV6, pgSchemaExternal, pgSchemaInternalV7, pgSchemaInternal, tableSquashed2, tableSquashedV42, pgSchemaSquashedV4, pgSchemaSquashedV6, pgSchemaSquashed, pgSchemaV3, pgSchemaV4, pgSchemaV5, pgSchemaV6, pgSchemaV7, pgSchema, backwardCompatiblePgSchema, PgSquasher, squashPgScheme, dryPg;
5710
5710
  var init_pgSchema = __esm({
5711
5711
  "src/serializer/pgSchema.ts"() {
5712
5712
  "use strict";
@@ -5901,44 +5901,6 @@ var init_pgSchema = __esm({
5901
5901
  columns: stringType().array(),
5902
5902
  nullsNotDistinct: booleanType()
5903
5903
  }).strict();
5904
- viewWithOption = objectType({
5905
- checkOption: enumType(["local", "cascaded"]).optional(),
5906
- securityBarrier: booleanType().optional(),
5907
- securityInvoker: booleanType().optional()
5908
- }).strict();
5909
- matViewWithOption = objectType({
5910
- fillfactor: numberType().optional(),
5911
- toastTupleTarget: numberType().optional(),
5912
- parallelWorkers: numberType().optional(),
5913
- autovacuumEnabled: booleanType().optional(),
5914
- vacuumIndexCleanup: enumType(["auto", "off", "on"]).optional(),
5915
- vacuumTruncate: booleanType().optional(),
5916
- autovacuumVacuumThreshold: numberType().optional(),
5917
- autovacuumVacuumScaleFactor: numberType().optional(),
5918
- autovacuumVacuumCostDelay: numberType().optional(),
5919
- autovacuumVacuumCostLimit: numberType().optional(),
5920
- autovacuumFreezeMinAge: numberType().optional(),
5921
- autovacuumFreezeMaxAge: numberType().optional(),
5922
- autovacuumFreezeTableAge: numberType().optional(),
5923
- autovacuumMultixactFreezeMinAge: numberType().optional(),
5924
- autovacuumMultixactFreezeMaxAge: numberType().optional(),
5925
- autovacuumMultixactFreezeTableAge: numberType().optional(),
5926
- logAutovacuumMinDuration: numberType().optional(),
5927
- userCatalogTable: booleanType().optional()
5928
- }).strict();
5929
- mergedViewWithOption = viewWithOption.merge(matViewWithOption);
5930
- view = objectType({
5931
- name: stringType(),
5932
- schema: stringType(),
5933
- columns: recordType(stringType(), column2),
5934
- definition: stringType().optional(),
5935
- materialized: booleanType(),
5936
- with: mergedViewWithOption.optional(),
5937
- isExisting: booleanType(),
5938
- withNoData: booleanType().optional(),
5939
- using: stringType().optional(),
5940
- tablespace: stringType().optional()
5941
- }).strict();
5942
5904
  tableV42 = objectType({
5943
5905
  name: stringType(),
5944
5906
  schema: stringType(),
@@ -6073,7 +6035,6 @@ var init_pgSchema = __esm({
6073
6035
  tables: recordType(stringType(), table2),
6074
6036
  enums: recordType(stringType(), enumSchema),
6075
6037
  schemas: recordType(stringType(), stringType()),
6076
- views: recordType(stringType(), view).default({}),
6077
6038
  sequences: recordType(stringType(), sequenceSchema).default({}),
6078
6039
  _meta: objectType({
6079
6040
  schemas: recordType(stringType(), stringType()),
@@ -6118,7 +6079,6 @@ var init_pgSchema = __esm({
6118
6079
  tables: recordType(stringType(), tableSquashed2),
6119
6080
  enums: recordType(stringType(), enumSchema),
6120
6081
  schemas: recordType(stringType(), stringType()),
6121
- views: recordType(stringType(), view),
6122
6082
  sequences: recordType(stringType(), sequenceSquashed)
6123
6083
  }).strict();
6124
6084
  pgSchemaV3 = pgSchemaInternalV3.merge(schemaHash2);
@@ -6336,7 +6296,6 @@ var init_pgSchema = __esm({
6336
6296
  tables: mappedTables,
6337
6297
  enums: json.enums,
6338
6298
  schemas: json.schemas,
6339
- views: json.views,
6340
6299
  sequences: mappedSequences
6341
6300
  };
6342
6301
  };
@@ -6632,11 +6591,17 @@ function findAddedAndRemoved(columnNames1, columnNames2) {
6632
6591
  const removedColumns = columnNames1.filter((it) => !set2.has(it));
6633
6592
  return { addedColumns, removedColumns };
6634
6593
  }
6635
- var import_fs, import_path, import_url, copy, objectValues, assertV1OutFolder, dryJournal, prepareOutFolder, validatorForDialect, validateWithReport, prepareMigrationFolder, prepareMigrationMeta, schemaRenameKey, tableRenameKey, columnRenameKey, normaliseSQLiteUrl, normalisePGliteUrl;
6594
+ function getColumnCasing(column7, casing2) {
6595
+ if (!column7.name)
6596
+ return "";
6597
+ return !column7.keyAsName || casing2 === void 0 ? column7.name : casing2 === "camelCase" ? (0, import_casing.toCamelCase)(column7.name) : (0, import_casing.toSnakeCase)(column7.name);
6598
+ }
6599
+ var import_casing, import_fs, import_path, import_url, copy, objectValues, assertV1OutFolder, dryJournal, prepareOutFolder, validatorForDialect, validateWithReport, prepareMigrationFolder, prepareMigrationMeta, schemaRenameKey, tableRenameKey, columnRenameKey, normaliseSQLiteUrl, normalisePGliteUrl;
6636
6600
  var init_utils = __esm({
6637
6601
  "src/utils.ts"() {
6638
6602
  "use strict";
6639
6603
  init_source();
6604
+ import_casing = require("drizzle-orm/casing");
6640
6605
  import_fs = require("fs");
6641
6606
  import_path = require("path");
6642
6607
  import_url = require("url");
@@ -6666,27 +6631,27 @@ var init_utils = __esm({
6666
6631
  process.exit(1);
6667
6632
  }
6668
6633
  };
6669
- dryJournal = (dialect7) => {
6634
+ dryJournal = (dialect4) => {
6670
6635
  return {
6671
6636
  version: snapshotVersion,
6672
- dialect: dialect7,
6637
+ dialect: dialect4,
6673
6638
  entries: []
6674
6639
  };
6675
6640
  };
6676
- prepareOutFolder = (out, dialect7) => {
6641
+ prepareOutFolder = (out, dialect4) => {
6677
6642
  const meta = (0, import_path.join)(out, "meta");
6678
6643
  const journalPath = (0, import_path.join)(meta, "_journal.json");
6679
6644
  if (!(0, import_fs.existsSync)((0, import_path.join)(out, "meta"))) {
6680
6645
  (0, import_fs.mkdirSync)(meta, { recursive: true });
6681
- (0, import_fs.writeFileSync)(journalPath, JSON.stringify(dryJournal(dialect7)));
6646
+ (0, import_fs.writeFileSync)(journalPath, JSON.stringify(dryJournal(dialect4)));
6682
6647
  }
6683
6648
  const journal = JSON.parse((0, import_fs.readFileSync)(journalPath).toString());
6684
6649
  const snapshots = (0, import_fs.readdirSync)(meta).filter((it) => !it.startsWith("_")).map((it) => (0, import_path.join)(meta, it));
6685
6650
  snapshots.sort();
6686
6651
  return { meta, snapshots, journal };
6687
6652
  };
6688
- validatorForDialect = (dialect7) => {
6689
- switch (dialect7) {
6653
+ validatorForDialect = (dialect4) => {
6654
+ switch (dialect4) {
6690
6655
  case "postgresql":
6691
6656
  return { validator: backwardCompatiblePgSchema, version: 7 };
6692
6657
  case "sqlite":
@@ -6697,8 +6662,8 @@ var init_utils = __esm({
6697
6662
  return { validator: backwardCompatibleMysqlSchema, version: 5 };
6698
6663
  }
6699
6664
  };
6700
- validateWithReport = (snapshots, dialect7) => {
6701
- const { validator: validator2, version: version3 } = validatorForDialect(dialect7);
6665
+ validateWithReport = (snapshots, dialect4) => {
6666
+ const { validator: validator2, version: version3 } = validatorForDialect(dialect4);
6702
6667
  const result = snapshots.reduce(
6703
6668
  (accum, it) => {
6704
6669
  const raw2 = JSON.parse((0, import_fs.readFileSync)(`./${it}`).toString());
@@ -6739,9 +6704,9 @@ var init_utils = __esm({
6739
6704
  );
6740
6705
  return result;
6741
6706
  };
6742
- prepareMigrationFolder = (outFolder = "drizzle", dialect7) => {
6743
- const { snapshots, journal } = prepareOutFolder(outFolder, dialect7);
6744
- const report = validateWithReport(snapshots, dialect7);
6707
+ prepareMigrationFolder = (outFolder = "drizzle", dialect4) => {
6708
+ const { snapshots, journal } = prepareOutFolder(outFolder, dialect4);
6709
+ const report = validateWithReport(snapshots, dialect4);
6745
6710
  if (report.nonLatest.length > 0) {
6746
6711
  console.log(
6747
6712
  report.nonLatest.map((it) => {
@@ -8852,9 +8817,6 @@ var init_utils3 = __esm({
8852
8817
  String.prototype.concatIf = function(it, condition) {
8853
8818
  return condition ? `${this}${it}` : String(this);
8854
8819
  };
8855
- String.prototype.snake_case = function() {
8856
- return this && this.length > 0 ? `${this.replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`)}` : String(this);
8857
- };
8858
8820
  Array.prototype.random = function() {
8859
8821
  return this[~~(Math.random() * this.length)];
8860
8822
  };
@@ -11294,7 +11256,7 @@ var init_outputs = __esm({
11294
11256
  });
11295
11257
 
11296
11258
  // src/cli/validations/common.ts
11297
- var assertCollisions, sqliteDriversLiterals, postgresqlDriversLiterals, prefixes, prefix, sqliteDriver, postgresDriver, driver, configMigrations, configCommonSchema, casing, introspectParams, configIntrospectCliSchema, configGenerateSchema, configPushSchema, drivers, wrapParam;
11259
+ var assertCollisions, sqliteDriversLiterals, postgresqlDriversLiterals, prefixes, prefix, casingTypes, casingType, sqliteDriver, postgresDriver, driver, configMigrations, configCommonSchema, casing, introspectParams, configIntrospectCliSchema, configGenerateSchema, configPushSchema, drivers, wrapParam;
11298
11260
  var init_common = __esm({
11299
11261
  "src/cli/validations/common.ts"() {
11300
11262
  "use strict";
@@ -11338,6 +11300,8 @@ var init_common = __esm({
11338
11300
  {
11339
11301
  const _2 = "";
11340
11302
  }
11303
+ casingTypes = ["snake_case", "camelCase"];
11304
+ casingType = enumType(casingTypes);
11341
11305
  sqliteDriver = unionType(sqliteDriversLiterals);
11342
11306
  postgresDriver = unionType(postgresqlDriversLiterals);
11343
11307
  driver = unionType([sqliteDriver, postgresDriver]);
@@ -11356,7 +11320,8 @@ var init_common = __esm({
11356
11320
  tablesFilter: unionType([stringType(), stringType().array()]).optional(),
11357
11321
  schemaFilter: unionType([stringType(), stringType().array()]).default(["public"]),
11358
11322
  migrations: configMigrations,
11359
- dbCredentials: anyType().optional()
11323
+ dbCredentials: anyType().optional(),
11324
+ casing: casingType.optional()
11360
11325
  }).passthrough();
11361
11326
  casing = unionType([literalType("camel"), literalType("preserve")]).default(
11362
11327
  "camel"
@@ -11438,6 +11403,7 @@ var init_cli = __esm({
11438
11403
  }).strict();
11439
11404
  pushParams = objectType({
11440
11405
  dialect: dialect3,
11406
+ casing: casingType.optional(),
11441
11407
  schema: unionType([stringType(), stringType().array()]),
11442
11408
  tablesFilter: unionType([stringType(), stringType().array()]).optional(),
11443
11409
  schemaFilter: unionType([stringType(), stringType().array()]).optional().default(["public"]),
@@ -12551,11 +12517,11 @@ var require_node2 = __commonJS({
12551
12517
  };
12552
12518
  var require_base64 = __commonJS3((exports3) => {
12553
12519
  var intToCharMap = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");
12554
- exports3.encode = function(number2) {
12555
- if (0 <= number2 && number2 < intToCharMap.length) {
12556
- return intToCharMap[number2];
12520
+ exports3.encode = function(number3) {
12521
+ if (0 <= number3 && number3 < intToCharMap.length) {
12522
+ return intToCharMap[number3];
12557
12523
  }
12558
- throw new TypeError("Must be between 0 and 63: " + number2);
12524
+ throw new TypeError("Must be between 0 and 63: " + number3);
12559
12525
  };
12560
12526
  exports3.decode = function(charCode) {
12561
12527
  var bigA = 65;
@@ -17454,11 +17420,11 @@ var init_utils4 = __esm({
17454
17420
  prepareGenerateConfig = async (options, from) => {
17455
17421
  var _a;
17456
17422
  const config = from === "config" ? await drizzleConfigFromFile(options.config) : options;
17457
- const { schema: schema5, out, breakpoints, dialect: dialect7, driver: driver2 } = config;
17458
- if (!schema5 || !dialect7) {
17423
+ const { schema: schema5, out, breakpoints, dialect: dialect4, driver: driver2, casing: casing2 } = config;
17424
+ if (!schema5 || !dialect4) {
17459
17425
  console.log(error("Please provide required params:"));
17460
17426
  console.log(wrapParam("schema", schema5));
17461
- console.log(wrapParam("dialect", dialect7));
17427
+ console.log(wrapParam("dialect", dialect4));
17462
17428
  console.log(wrapParam("out", out, true));
17463
17429
  process.exit(1);
17464
17430
  }
@@ -17469,14 +17435,15 @@ var init_utils4 = __esm({
17469
17435
  }
17470
17436
  const prefix2 = ("migrations" in config ? (_a = config.migrations) == null ? void 0 : _a.prefix : options.prefix) || "index";
17471
17437
  return {
17472
- dialect: dialect7,
17438
+ dialect: dialect4,
17473
17439
  name: options.name,
17474
17440
  custom: options.custom || false,
17475
17441
  prefix: prefix2,
17476
17442
  breakpoints: breakpoints || true,
17477
17443
  schema: schema5,
17478
17444
  out: out || "drizzle",
17479
- bundle: driver2 === "expo"
17445
+ bundle: driver2 === "expo",
17446
+ casing: casing2
17480
17447
  };
17481
17448
  };
17482
17449
  flattenDatabaseCredentials = (config) => {
@@ -17543,6 +17510,7 @@ var init_utils4 = __esm({
17543
17510
  verbose: config.verbose ?? false,
17544
17511
  force: options.force ?? false,
17545
17512
  credentials: parsed2.data,
17513
+ casing: config.casing,
17546
17514
  tablesFilter,
17547
17515
  schemasFilter
17548
17516
  };
@@ -17560,6 +17528,7 @@ var init_utils4 = __esm({
17560
17528
  verbose: config.verbose ?? false,
17561
17529
  force: options.force ?? false,
17562
17530
  credentials: parsed2.data,
17531
+ casing: config.casing,
17563
17532
  tablesFilter,
17564
17533
  schemasFilter
17565
17534
  };
@@ -17577,6 +17546,7 @@ var init_utils4 = __esm({
17577
17546
  verbose: config.verbose ?? false,
17578
17547
  force: options.force ?? false,
17579
17548
  credentials: parsed2.data,
17549
+ casing: config.casing,
17580
17550
  tablesFilter,
17581
17551
  schemasFilter
17582
17552
  };
@@ -17594,6 +17564,7 @@ var init_utils4 = __esm({
17594
17564
  verbose: config.verbose ?? false,
17595
17565
  force: options.force ?? false,
17596
17566
  credentials: parsed2.data,
17567
+ casing: config.casing,
17597
17568
  tablesFilter,
17598
17569
  schemasFilter
17599
17570
  };
@@ -17612,11 +17583,11 @@ var init_utils4 = __esm({
17612
17583
  process.exit(1);
17613
17584
  }
17614
17585
  const config = parsed.data;
17615
- const dialect7 = config.dialect;
17586
+ const dialect4 = config.dialect;
17616
17587
  const tablesFilterConfig = config.tablesFilter;
17617
17588
  const tablesFilter = tablesFilterConfig ? typeof tablesFilterConfig === "string" ? [tablesFilterConfig] : tablesFilterConfig : [];
17618
17589
  if (config.extensionsFilters) {
17619
- if (config.extensionsFilters.includes("postgis") && dialect7 === "postgresql") {
17590
+ if (config.extensionsFilters.includes("postgis") && dialect4 === "postgresql") {
17620
17591
  tablesFilter.push(
17621
17592
  ...["!geography_columns", "!geometry_columns", "!spatial_ref_sys"]
17622
17593
  );
@@ -17624,7 +17595,7 @@ var init_utils4 = __esm({
17624
17595
  }
17625
17596
  const schemasFilterConfig = config.schemaFilter;
17626
17597
  const schemasFilter = schemasFilterConfig ? typeof schemasFilterConfig === "string" ? [schemasFilterConfig] : schemasFilterConfig : [];
17627
- if (dialect7 === "postgresql") {
17598
+ if (dialect4 === "postgresql") {
17628
17599
  const parsed2 = postgresCredentials.safeParse(config);
17629
17600
  if (!parsed2.success) {
17630
17601
  printConfigConnectionIssues3(config);
@@ -17641,7 +17612,7 @@ var init_utils4 = __esm({
17641
17612
  prefix: ((_a = config.migrations) == null ? void 0 : _a.prefix) || "index"
17642
17613
  };
17643
17614
  }
17644
- if (dialect7 === "mysql") {
17615
+ if (dialect4 === "mysql") {
17645
17616
  const parsed2 = mysqlCredentials.safeParse(config);
17646
17617
  if (!parsed2.success) {
17647
17618
  printConfigConnectionIssues2(config);
@@ -17658,7 +17629,7 @@ var init_utils4 = __esm({
17658
17629
  prefix: ((_b = config.migrations) == null ? void 0 : _b.prefix) || "index"
17659
17630
  };
17660
17631
  }
17661
- if (dialect7 === "sqlite") {
17632
+ if (dialect4 === "sqlite") {
17662
17633
  const parsed2 = sqliteCredentials.safeParse(config);
17663
17634
  if (!parsed2.success) {
17664
17635
  printConfigConnectionIssues4(config, "pull");
@@ -17675,14 +17646,14 @@ var init_utils4 = __esm({
17675
17646
  prefix: ((_c = config.migrations) == null ? void 0 : _c.prefix) || "index"
17676
17647
  };
17677
17648
  }
17678
- if (dialect7 === "turso") {
17649
+ if (dialect4 === "turso") {
17679
17650
  const parsed2 = libSQLCredentials.safeParse(config);
17680
17651
  if (!parsed2.success) {
17681
17652
  printConfigConnectionIssues(config, "pull");
17682
17653
  process.exit(1);
17683
17654
  }
17684
17655
  return {
17685
- dialect: dialect7,
17656
+ dialect: dialect4,
17686
17657
  out: config.out,
17687
17658
  breakpoints: config.breakpoints,
17688
17659
  casing: config.casing,
@@ -17692,7 +17663,7 @@ var init_utils4 = __esm({
17692
17663
  prefix: ((_d = config.migrations) == null ? void 0 : _d.prefix) || "index"
17693
17664
  };
17694
17665
  }
17695
- assertUnreachable(dialect7);
17666
+ assertUnreachable(dialect4);
17696
17667
  };
17697
17668
  prepareStudioConfig = async (options) => {
17698
17669
  const params = studioCliParams.parse(options);
@@ -17709,9 +17680,9 @@ var init_utils4 = __esm({
17709
17680
  process.exit(1);
17710
17681
  }
17711
17682
  const { host, port } = params;
17712
- const { dialect: dialect7, schema: schema5 } = result.data;
17683
+ const { dialect: dialect4, schema: schema5 } = result.data;
17713
17684
  const flattened = flattenDatabaseCredentials(config);
17714
- if (dialect7 === "postgresql") {
17685
+ if (dialect4 === "postgresql") {
17715
17686
  const parsed = postgresCredentials.safeParse(flattened);
17716
17687
  if (!parsed.success) {
17717
17688
  printConfigConnectionIssues3(flattened);
@@ -17719,14 +17690,14 @@ var init_utils4 = __esm({
17719
17690
  }
17720
17691
  const credentials2 = parsed.data;
17721
17692
  return {
17722
- dialect: dialect7,
17693
+ dialect: dialect4,
17723
17694
  schema: schema5,
17724
17695
  host,
17725
17696
  port,
17726
17697
  credentials: credentials2
17727
17698
  };
17728
17699
  }
17729
- if (dialect7 === "mysql") {
17700
+ if (dialect4 === "mysql") {
17730
17701
  const parsed = mysqlCredentials.safeParse(flattened);
17731
17702
  if (!parsed.success) {
17732
17703
  printConfigConnectionIssues2(flattened);
@@ -17734,14 +17705,14 @@ var init_utils4 = __esm({
17734
17705
  }
17735
17706
  const credentials2 = parsed.data;
17736
17707
  return {
17737
- dialect: dialect7,
17708
+ dialect: dialect4,
17738
17709
  schema: schema5,
17739
17710
  host,
17740
17711
  port,
17741
17712
  credentials: credentials2
17742
17713
  };
17743
17714
  }
17744
- if (dialect7 === "sqlite") {
17715
+ if (dialect4 === "sqlite") {
17745
17716
  const parsed = sqliteCredentials.safeParse(flattened);
17746
17717
  if (!parsed.success) {
17747
17718
  printConfigConnectionIssues4(flattened, "studio");
@@ -17749,14 +17720,14 @@ var init_utils4 = __esm({
17749
17720
  }
17750
17721
  const credentials2 = parsed.data;
17751
17722
  return {
17752
- dialect: dialect7,
17723
+ dialect: dialect4,
17753
17724
  schema: schema5,
17754
17725
  host,
17755
17726
  port,
17756
17727
  credentials: credentials2
17757
17728
  };
17758
17729
  }
17759
- if (dialect7 === "turso") {
17730
+ if (dialect4 === "turso") {
17760
17731
  const parsed = libSQLCredentials.safeParse(flattened);
17761
17732
  if (!parsed.success) {
17762
17733
  printConfigConnectionIssues(flattened, "studio");
@@ -17764,14 +17735,14 @@ var init_utils4 = __esm({
17764
17735
  }
17765
17736
  const credentials2 = parsed.data;
17766
17737
  return {
17767
- dialect: dialect7,
17738
+ dialect: dialect4,
17768
17739
  schema: schema5,
17769
17740
  host,
17770
17741
  port,
17771
17742
  credentials: credentials2
17772
17743
  };
17773
17744
  }
17774
- assertUnreachable(dialect7);
17745
+ assertUnreachable(dialect4);
17775
17746
  };
17776
17747
  migrateConfig = objectType({
17777
17748
  dialect: dialect3,
@@ -17786,10 +17757,10 @@ var init_utils4 = __esm({
17786
17757
  console.log(wrapParam("dialect", config.dialect));
17787
17758
  process.exit(1);
17788
17759
  }
17789
- const { dialect: dialect7, out } = parsed.data;
17760
+ const { dialect: dialect4, out } = parsed.data;
17790
17761
  const { schema: schema5, table: table4 } = parsed.data.migrations || {};
17791
17762
  const flattened = flattenDatabaseCredentials(config);
17792
- if (dialect7 === "postgresql") {
17763
+ if (dialect4 === "postgresql") {
17793
17764
  const parsed2 = postgresCredentials.safeParse(flattened);
17794
17765
  if (!parsed2.success) {
17795
17766
  printConfigConnectionIssues3(flattened);
@@ -17797,14 +17768,14 @@ var init_utils4 = __esm({
17797
17768
  }
17798
17769
  const credentials2 = parsed2.data;
17799
17770
  return {
17800
- dialect: dialect7,
17771
+ dialect: dialect4,
17801
17772
  out,
17802
17773
  credentials: credentials2,
17803
17774
  schema: schema5,
17804
17775
  table: table4
17805
17776
  };
17806
17777
  }
17807
- if (dialect7 === "mysql") {
17778
+ if (dialect4 === "mysql") {
17808
17779
  const parsed2 = mysqlCredentials.safeParse(flattened);
17809
17780
  if (!parsed2.success) {
17810
17781
  printConfigConnectionIssues2(flattened);
@@ -17812,14 +17783,14 @@ var init_utils4 = __esm({
17812
17783
  }
17813
17784
  const credentials2 = parsed2.data;
17814
17785
  return {
17815
- dialect: dialect7,
17786
+ dialect: dialect4,
17816
17787
  out,
17817
17788
  credentials: credentials2,
17818
17789
  schema: schema5,
17819
17790
  table: table4
17820
17791
  };
17821
17792
  }
17822
- if (dialect7 === "sqlite") {
17793
+ if (dialect4 === "sqlite") {
17823
17794
  const parsed2 = sqliteCredentials.safeParse(flattened);
17824
17795
  if (!parsed2.success) {
17825
17796
  printConfigConnectionIssues4(flattened, "migrate");
@@ -17827,14 +17798,14 @@ var init_utils4 = __esm({
17827
17798
  }
17828
17799
  const credentials2 = parsed2.data;
17829
17800
  return {
17830
- dialect: dialect7,
17801
+ dialect: dialect4,
17831
17802
  out,
17832
17803
  credentials: credentials2,
17833
17804
  schema: schema5,
17834
17805
  table: table4
17835
17806
  };
17836
17807
  }
17837
- if (dialect7 === "turso") {
17808
+ if (dialect4 === "turso") {
17838
17809
  const parsed2 = libSQLCredentials.safeParse(flattened);
17839
17810
  if (!parsed2.success) {
17840
17811
  printConfigConnectionIssues(flattened, "migrate");
@@ -17842,14 +17813,14 @@ var init_utils4 = __esm({
17842
17813
  }
17843
17814
  const credentials2 = parsed2.data;
17844
17815
  return {
17845
- dialect: dialect7,
17816
+ dialect: dialect4,
17846
17817
  out,
17847
17818
  credentials: credentials2,
17848
17819
  schema: schema5,
17849
17820
  table: table4
17850
17821
  };
17851
17822
  }
17852
- assertUnreachable(dialect7);
17823
+ assertUnreachable(dialect4);
17853
17824
  };
17854
17825
  drizzleConfigFromFile = async (configPath) => {
17855
17826
  const prefix2 = process.env.TEST_CONFIG_PATH_PREFIX || "";
@@ -17878,6 +17849,7 @@ var init_utils4 = __esm({
17878
17849
  unregister();
17879
17850
  const res = configCommonSchema.safeParse(content);
17880
17851
  if (!res.success) {
17852
+ console.log(res.error);
17881
17853
  if (!("dialect" in content)) {
17882
17854
  console.log(error("Please specify 'dialect' param in config file"));
17883
17855
  }
@@ -17950,7 +17922,7 @@ function clearDefaults(defaultValue, collate) {
17950
17922
  return `(${resultDefault})`;
17951
17923
  }
17952
17924
  }
17953
- var import_drizzle_orm2, import_drizzle_orm3, import_mysql_core2, import_mysql_core3, dialect4, indexName, generateMySqlSnapshot, fromDatabase;
17925
+ var import_drizzle_orm2, import_drizzle_orm3, import_mysql_core2, import_mysql_core3, indexName, generateMySqlSnapshot, fromDatabase;
17954
17926
  var init_mysqlSerializer = __esm({
17955
17927
  "src/serializer/mysqlSerializer.ts"() {
17956
17928
  "use strict";
@@ -17960,12 +17932,13 @@ var init_mysqlSerializer = __esm({
17960
17932
  import_mysql_core2 = require("drizzle-orm/mysql-core");
17961
17933
  import_mysql_core3 = require("drizzle-orm/mysql-core");
17962
17934
  init_outputs();
17935
+ init_utils();
17963
17936
  init_serializer();
17964
- dialect4 = new import_mysql_core2.MySqlDialect();
17965
17937
  indexName = (tableName, columns) => {
17966
17938
  return `${tableName}_${columns.join("_")}_index`;
17967
17939
  };
17968
- generateMySqlSnapshot = (tables) => {
17940
+ generateMySqlSnapshot = (tables, casing2) => {
17941
+ const dialect4 = new import_mysql_core2.MySqlDialect({ casing: casing2 });
17969
17942
  const result = {};
17970
17943
  const internal = { tables: {}, indexes: {} };
17971
17944
  for (const table4 of tables) {
@@ -17984,12 +17957,13 @@ var init_mysqlSerializer = __esm({
17984
17957
  const primaryKeysObject = {};
17985
17958
  const uniqueConstraintObject = {};
17986
17959
  columns.forEach((column7) => {
17960
+ const name = getColumnCasing(column7, casing2);
17987
17961
  const notNull = column7.notNull;
17988
17962
  const sqlTypeLowered = column7.getSQLType().toLowerCase();
17989
17963
  const autoIncrement = typeof column7.autoIncrement === "undefined" ? false : column7.autoIncrement;
17990
17964
  const generated = column7.generated;
17991
17965
  const columnToSet = {
17992
- name: column7.name,
17966
+ name,
17993
17967
  type: column7.getSQLType(),
17994
17968
  primaryKey: false,
17995
17969
  // If field is autoincrement it's notNull by default
@@ -18003,9 +17977,9 @@ var init_mysqlSerializer = __esm({
18003
17977
  } : void 0
18004
17978
  };
18005
17979
  if (column7.primary) {
18006
- primaryKeysObject[`${tableName}_${column7.name}`] = {
18007
- name: `${tableName}_${column7.name}`,
18008
- columns: [column7.name]
17980
+ primaryKeysObject[`${tableName}_${name}`] = {
17981
+ name: `${tableName}_${name}`,
17982
+ columns: [name]
18009
17983
  };
18010
17984
  }
18011
17985
  if (column7.isUnique) {
@@ -18019,7 +17993,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18019
17993
  The unique constraint ${source_default.underline.blue(
18020
17994
  column7.uniqueName
18021
17995
  )} on the ${source_default.underline.blue(
18022
- column7.name
17996
+ name
18023
17997
  )} column is confilcting with a unique constraint name already defined for ${source_default.underline.blue(
18024
17998
  existingUnique.columns.join(",")
18025
17999
  )} columns
@@ -18034,7 +18008,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18034
18008
  }
18035
18009
  if (column7.default !== void 0) {
18036
18010
  if ((0, import_drizzle_orm2.is)(column7.default, import_drizzle_orm3.SQL)) {
18037
- columnToSet.default = sqlToStr(column7.default);
18011
+ columnToSet.default = sqlToStr(column7.default, casing2);
18038
18012
  } else {
18039
18013
  if (typeof column7.default === "string") {
18040
18014
  columnToSet.default = `'${column7.default}'`;
@@ -18056,20 +18030,27 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18056
18030
  }
18057
18031
  }
18058
18032
  }
18059
- columnsObject[column7.name] = columnToSet;
18033
+ columnsObject[name] = columnToSet;
18060
18034
  });
18061
18035
  primaryKeys.map((pk) => {
18062
- const columnNames = pk.columns.map((c) => c.name);
18063
- primaryKeysObject[pk.getName()] = {
18064
- name: pk.getName(),
18036
+ const originalColumnNames = pk.columns.map((c) => c.name);
18037
+ const columnNames = pk.columns.map((c) => getColumnCasing(c, casing2));
18038
+ let name = pk.getName();
18039
+ if (casing2 !== void 0) {
18040
+ for (let i2 = 0; i2 < originalColumnNames.length; i2++) {
18041
+ name = name.replace(originalColumnNames[i2], columnNames[i2]);
18042
+ }
18043
+ }
18044
+ primaryKeysObject[name] = {
18045
+ name,
18065
18046
  columns: columnNames
18066
18047
  };
18067
18048
  for (const column7 of pk.columns) {
18068
- columnsObject[column7.name].notNull = true;
18049
+ columnsObject[getColumnCasing(column7, casing2)].notNull = true;
18069
18050
  }
18070
18051
  });
18071
18052
  uniqueConstraints == null ? void 0 : uniqueConstraints.map((unq) => {
18072
- const columnNames = unq.columns.map((c) => c.name);
18053
+ const columnNames = unq.columns.map((c) => getColumnCasing(c, casing2));
18073
18054
  const name = unq.name ?? (0, import_mysql_core2.uniqueKeyName)(table4, columnNames);
18074
18055
  const existingUnique = uniqueConstraintObject[name];
18075
18056
  if (typeof existingUnique !== "undefined") {
@@ -18097,15 +18078,25 @@ The unique constraint ${source_default.underline.blue(
18097
18078
  };
18098
18079
  });
18099
18080
  const fks = foreignKeys.map((fk4) => {
18100
- const name = fk4.getName();
18101
18081
  const tableFrom = tableName;
18102
18082
  const onDelete = fk4.onDelete ?? "no action";
18103
18083
  const onUpdate = fk4.onUpdate ?? "no action";
18104
18084
  const reference = fk4.reference();
18105
18085
  const referenceFT = reference.foreignTable;
18106
18086
  const tableTo = (0, import_drizzle_orm2.getTableName)(referenceFT);
18107
- const columnsFrom = reference.columns.map((it) => it.name);
18108
- const columnsTo = reference.foreignColumns.map((it) => it.name);
18087
+ const originalColumnsFrom = reference.columns.map((it) => it.name);
18088
+ const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing2));
18089
+ const originalColumnsTo = reference.foreignColumns.map((it) => it.name);
18090
+ const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing2));
18091
+ let name = fk4.getName();
18092
+ if (casing2 !== void 0) {
18093
+ for (let i2 = 0; i2 < originalColumnsFrom.length; i2++) {
18094
+ name = name.replace(originalColumnsFrom[i2], columnsFrom[i2]);
18095
+ }
18096
+ for (let i2 = 0; i2 < originalColumnsTo.length; i2++) {
18097
+ name = name.replace(originalColumnsTo[i2], columnsTo[i2]);
18098
+ }
18099
+ }
18109
18100
  return {
18110
18101
  name,
18111
18102
  tableFrom,
@@ -18145,7 +18136,7 @@ The unique constraint ${source_default.underline.blue(
18145
18136
  }
18146
18137
  return sql;
18147
18138
  } else {
18148
- return `${it.name}`;
18139
+ return `${getColumnCasing(it, casing2)}`;
18149
18140
  }
18150
18141
  });
18151
18142
  if (value.config.unique) {
@@ -18508,8 +18499,6 @@ var init_pgImports = __esm({
18508
18499
  const enums = [];
18509
18500
  const schemas = [];
18510
18501
  const sequences = [];
18511
- const views = [];
18512
- const matViews = [];
18513
18502
  const i0values = Object.values(exports2);
18514
18503
  i0values.forEach((t2) => {
18515
18504
  if ((0, import_pg_core.isPgEnum)(t2)) {
@@ -18522,25 +18511,17 @@ var init_pgImports = __esm({
18522
18511
  if ((0, import_drizzle_orm4.is)(t2, import_pg_core.PgSchema)) {
18523
18512
  schemas.push(t2);
18524
18513
  }
18525
- if ((0, import_pg_core.isPgView)(t2)) {
18526
- views.push(t2);
18527
- }
18528
- if ((0, import_pg_core.isPgMaterializedView)(t2)) {
18529
- matViews.push(t2);
18530
- }
18531
18514
  if ((0, import_pg_core.isPgSequence)(t2)) {
18532
18515
  sequences.push(t2);
18533
18516
  }
18534
18517
  });
18535
- return { tables, enums, schemas, sequences, views, matViews };
18518
+ return { tables, enums, schemas, sequences };
18536
18519
  };
18537
18520
  prepareFromPgImports = async (imports) => {
18538
- const tables = [];
18539
- const enums = [];
18540
- const schemas = [];
18541
- const sequences = [];
18542
- const views = [];
18543
- const matViews = [];
18521
+ let tables = [];
18522
+ let enums = [];
18523
+ let schemas = [];
18524
+ let sequences = [];
18544
18525
  const { unregister } = await safeRegister();
18545
18526
  for (let i2 = 0; i2 < imports.length; i2++) {
18546
18527
  const it = imports[i2];
@@ -18550,11 +18531,9 @@ var init_pgImports = __esm({
18550
18531
  enums.push(...prepared.enums);
18551
18532
  schemas.push(...prepared.schemas);
18552
18533
  sequences.push(...prepared.sequences);
18553
- views.push(...prepared.views);
18554
- matViews.push(...prepared.matViews);
18555
18534
  }
18556
18535
  unregister();
18557
- return { tables: Array.from(new Set(tables)), enums, schemas, sequences, views, matViews };
18536
+ return { tables: Array.from(new Set(tables)), enums, schemas, sequences };
18558
18537
  };
18559
18538
  }
18560
18539
  });
@@ -18602,7 +18581,7 @@ function buildArrayString(array, sqlType) {
18602
18581
  }).join(",");
18603
18582
  return `{${values}}`;
18604
18583
  }
18605
- var import_drizzle_orm5, import_pg_core2, import_pg_core3, dialect5, indexName2, generatePgSnapshot, trimChar, fromDatabase2, defaultForColumn;
18584
+ var import_drizzle_orm5, import_pg_core2, import_pg_core3, indexName2, generatePgSnapshot, trimChar, fromDatabase2, defaultForColumn;
18606
18585
  var init_pgSerializer = __esm({
18607
18586
  "src/serializer/pgSerializer.ts"() {
18608
18587
  "use strict";
@@ -18614,14 +18593,13 @@ var init_pgSerializer = __esm({
18614
18593
  init_outputs();
18615
18594
  init_utils();
18616
18595
  init_serializer();
18617
- dialect5 = new import_pg_core2.PgDialect();
18618
18596
  indexName2 = (tableName, columns) => {
18619
18597
  return `${tableName}_${columns.join("_")}_index`;
18620
18598
  };
18621
- generatePgSnapshot = (tables, enums, schemas, sequences, views, matViews, schemaFilter) => {
18622
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
18599
+ generatePgSnapshot = (tables, enums, schemas, sequences, casing2, schemaFilter) => {
18600
+ var _a, _b, _c, _d, _e, _f;
18601
+ const dialect4 = new import_pg_core2.PgDialect({ casing: casing2 });
18623
18602
  const result = {};
18624
- const resultViews = {};
18625
18603
  const sequencesToReturn = {};
18626
18604
  const indexesInSchema = {};
18627
18605
  for (const table4 of tables) {
@@ -18645,6 +18623,7 @@ var init_pgSerializer = __esm({
18645
18623
  const uniqueConstraintObject = {};
18646
18624
  columns.forEach((column7) => {
18647
18625
  var _a2, _b2, _c2, _d2, _e2, _f2;
18626
+ const name = getColumnCasing(column7, casing2);
18648
18627
  const notNull = column7.notNull;
18649
18628
  const primaryKey = column7.primary;
18650
18629
  const sqlTypeLowered = column7.getSQLType().toLowerCase();
@@ -18657,18 +18636,18 @@ var init_pgSerializer = __esm({
18657
18636
  const startWith = stringFromIdentityProperty((_d2 = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _d2.startWith) ?? (parseFloat(increment) < 0 ? maxValue : minValue);
18658
18637
  const cache = stringFromIdentityProperty((_e2 = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _e2.cache) ?? "1";
18659
18638
  const columnToSet = {
18660
- name: column7.name,
18639
+ name,
18661
18640
  type: column7.getSQLType(),
18662
18641
  typeSchema,
18663
18642
  primaryKey,
18664
18643
  notNull,
18665
18644
  generated: generated ? {
18666
- as: (0, import_drizzle_orm5.is)(generated.as, import_drizzle_orm5.SQL) ? dialect5.sqlToQuery(generated.as).sql : typeof generated.as === "function" ? dialect5.sqlToQuery(generated.as()).sql : generated.as,
18645
+ as: (0, import_drizzle_orm5.is)(generated.as, import_drizzle_orm5.SQL) ? dialect4.sqlToQuery(generated.as).sql : typeof generated.as === "function" ? dialect4.sqlToQuery(generated.as()).sql : generated.as,
18667
18646
  type: "stored"
18668
18647
  } : void 0,
18669
18648
  identity: identity ? {
18670
18649
  type: identity.type,
18671
- name: identity.sequenceName ?? `${tableName}_${column7.name}_seq`,
18650
+ name: identity.sequenceName ?? `${tableName}_${name}_seq`,
18672
18651
  schema: schema5 ?? "public",
18673
18652
  increment,
18674
18653
  startWith,
@@ -18689,7 +18668,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18689
18668
  The unique constraint ${source_default.underline.blue(
18690
18669
  column7.uniqueName
18691
18670
  )} on the ${source_default.underline.blue(
18692
- column7.name
18671
+ name
18693
18672
  )} column is confilcting with a unique constraint name already defined for ${source_default.underline.blue(
18694
18673
  existingUnique.columns.join(",")
18695
18674
  )} columns
@@ -18705,7 +18684,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18705
18684
  }
18706
18685
  if (column7.default !== void 0) {
18707
18686
  if ((0, import_drizzle_orm5.is)(column7.default, import_drizzle_orm5.SQL)) {
18708
- columnToSet.default = sqlToStr(column7.default);
18687
+ columnToSet.default = sqlToStr(column7.default, casing2);
18709
18688
  } else {
18710
18689
  if (typeof column7.default === "string") {
18711
18690
  columnToSet.default = `'${column7.default}'`;
@@ -18733,17 +18712,24 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18733
18712
  }
18734
18713
  }
18735
18714
  }
18736
- columnsObject[column7.name] = columnToSet;
18715
+ columnsObject[name] = columnToSet;
18737
18716
  });
18738
18717
  primaryKeys.map((pk) => {
18739
- const columnNames = pk.columns.map((c) => c.name);
18740
- primaryKeysObject[pk.getName()] = {
18741
- name: pk.getName(),
18718
+ const originalColumnNames = pk.columns.map((c) => c.name);
18719
+ const columnNames = pk.columns.map((c) => getColumnCasing(c, casing2));
18720
+ let name = pk.getName();
18721
+ if (casing2 !== void 0) {
18722
+ for (let i2 = 0; i2 < originalColumnNames.length; i2++) {
18723
+ name = name.replace(originalColumnNames[i2], columnNames[i2]);
18724
+ }
18725
+ }
18726
+ primaryKeysObject[name] = {
18727
+ name,
18742
18728
  columns: columnNames
18743
18729
  };
18744
18730
  });
18745
18731
  uniqueConstraints == null ? void 0 : uniqueConstraints.map((unq) => {
18746
- const columnNames = unq.columns.map((c) => c.name);
18732
+ const columnNames = unq.columns.map((c) => getColumnCasing(c, casing2));
18747
18733
  const name = unq.name ?? (0, import_pg_core2.uniqueKeyName)(table4, columnNames);
18748
18734
  const existingUnique = uniqueConstraintObject[name];
18749
18735
  if (typeof existingUnique !== "undefined") {
@@ -18770,15 +18756,25 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18770
18756
  };
18771
18757
  });
18772
18758
  const fks = foreignKeys.map((fk4) => {
18773
- const name = fk4.getName();
18774
18759
  const tableFrom = tableName;
18775
18760
  const onDelete = fk4.onDelete;
18776
18761
  const onUpdate = fk4.onUpdate;
18777
18762
  const reference = fk4.reference();
18778
18763
  const tableTo = (0, import_drizzle_orm5.getTableName)(reference.foreignTable);
18779
18764
  const schemaTo = (0, import_pg_core3.getTableConfig)(reference.foreignTable).schema;
18780
- const columnsFrom = reference.columns.map((it) => it.name);
18781
- const columnsTo = reference.foreignColumns.map((it) => it.name);
18765
+ const originalColumnsFrom = reference.columns.map((it) => it.name);
18766
+ const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing2));
18767
+ const originalColumnsTo = reference.foreignColumns.map((it) => it.name);
18768
+ const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing2));
18769
+ let name = fk4.getName();
18770
+ if (casing2 !== void 0) {
18771
+ for (let i2 = 0; i2 < originalColumnsFrom.length; i2++) {
18772
+ name = name.replace(originalColumnsFrom[i2], columnsFrom[i2]);
18773
+ }
18774
+ for (let i2 = 0; i2 < originalColumnsTo.length; i2++) {
18775
+ name = name.replace(originalColumnsTo[i2], columnsTo[i2]);
18776
+ }
18777
+ }
18782
18778
  return {
18783
18779
  name,
18784
18780
  tableFrom,
@@ -18804,19 +18800,20 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18804
18800
  ${withStyle.errorWarning(
18805
18801
  `Please specify an index name in ${(0, import_drizzle_orm5.getTableName)(
18806
18802
  value.config.table
18807
- )} table that has "${dialect5.sqlToQuery(it).sql}" expression. We can generate index names for indexes on columns only; for expressions in indexes, you need to specify the name yourself.`
18803
+ )} table that has "${dialect4.sqlToQuery(it).sql}" expression. We can generate index names for indexes on columns only; for expressions in indexes, you need to specify the name yourself.`
18808
18804
  )}`
18809
18805
  );
18810
18806
  process.exit(1);
18811
18807
  }
18812
18808
  }
18813
18809
  it = it;
18810
+ const name2 = getColumnCasing(it, casing2);
18814
18811
  if (!(0, import_drizzle_orm5.is)(it, import_drizzle_orm5.SQL) && it.type === "PgVector" && typeof it.indexConfig.opClass === "undefined") {
18815
18812
  console.log(
18816
18813
  `
18817
18814
  ${withStyle.errorWarning(
18818
18815
  `You are specifying an index on the ${source_default.blueBright(
18819
- it.name
18816
+ name2
18820
18817
  )} column inside the ${source_default.blueBright(
18821
18818
  tableName
18822
18819
  )} table with the ${source_default.blueBright(
@@ -18826,7 +18823,7 @@ ${withStyle.errorWarning(
18826
18823
  )}].
18827
18824
 
18828
18825
  You can specify it using current syntax: ${source_default.underline(
18829
- `index("${value.config.name}").using("${value.config.method}", table.${it.name}.op("${vectorOps[0]}"))`
18826
+ `index("${value.config.name}").using("${value.config.method}", table.${name2}.op("${vectorOps[0]}"))`
18830
18827
  )}
18831
18828
 
18832
18829
  You can check the "pg_vector" docs for more info: https://github.com/pgvector/pgvector?tab=readme-ov-file#indexing
@@ -18835,7 +18832,7 @@ You can check the "pg_vector" docs for more info: https://github.com/pgvector/pg
18835
18832
  );
18836
18833
  process.exit(1);
18837
18834
  }
18838
- indexColumnNames.push(it.name);
18835
+ indexColumnNames.push(name2);
18839
18836
  });
18840
18837
  const name = value.config.name ? value.config.name : indexName2(tableName, indexColumnNames);
18841
18838
  let indexColumns = columns2.map(
@@ -18843,7 +18840,7 @@ You can check the "pg_vector" docs for more info: https://github.com/pgvector/pg
18843
18840
  var _a2, _b2, _c2, _d2, _e2;
18844
18841
  if ((0, import_drizzle_orm5.is)(it, import_drizzle_orm5.SQL)) {
18845
18842
  return {
18846
- expression: dialect5.sqlToQuery(it, "indexes").sql,
18843
+ expression: dialect4.sqlToQuery(it, "indexes").sql,
18847
18844
  asc: true,
18848
18845
  isExpression: true,
18849
18846
  nulls: "last"
@@ -18851,7 +18848,7 @@ You can check the "pg_vector" docs for more info: https://github.com/pgvector/pg
18851
18848
  } else {
18852
18849
  it = it;
18853
18850
  return {
18854
- expression: it.name,
18851
+ expression: getColumnCasing(it, casing2),
18855
18852
  isExpression: false,
18856
18853
  asc: ((_a2 = it.indexConfig) == null ? void 0 : _a2.order) === "asc",
18857
18854
  nulls: ((_b2 = it.indexConfig) == null ? void 0 : _b2.nulls) ? (_c2 = it.indexConfig) == null ? void 0 : _c2.nulls : ((_d2 = it.indexConfig) == null ? void 0 : _d2.order) === "desc" ? "first" : "last",
@@ -18882,7 +18879,7 @@ ${withStyle.errorWarning(
18882
18879
  name,
18883
18880
  columns: indexColumns,
18884
18881
  isUnique: value.config.unique ?? false,
18885
- where: value.config.where ? dialect5.sqlToQuery(value.config.where).sql : void 0,
18882
+ where: value.config.where ? dialect4.sqlToQuery(value.config.where).sql : void 0,
18886
18883
  concurrently: value.config.concurrently ?? false,
18887
18884
  method: value.config.method ?? "btree",
18888
18885
  with: value.config.with ?? {}
@@ -18920,149 +18917,6 @@ ${withStyle.errorWarning(
18920
18917
  } else {
18921
18918
  }
18922
18919
  }
18923
- const combinedViews = [...views, ...matViews];
18924
- for (const view2 of combinedViews) {
18925
- let viewName;
18926
- let schema5;
18927
- let query;
18928
- let selectedFields;
18929
- let isExisting;
18930
- let withOption;
18931
- let tablespace;
18932
- let using;
18933
- let withNoData;
18934
- let materialized = false;
18935
- if ((0, import_drizzle_orm5.is)(view2, import_pg_core2.PgView)) {
18936
- ({ name: viewName, schema: schema5, query, selectedFields, isExisting, with: withOption } = (0, import_pg_core2.getViewConfig)(view2));
18937
- } else {
18938
- ({ name: viewName, schema: schema5, query, selectedFields, isExisting, with: withOption, tablespace, using, withNoData } = (0, import_pg_core2.getMaterializedViewConfig)(
18939
- view2
18940
- ));
18941
- materialized = true;
18942
- }
18943
- const viewSchema = schema5 ?? "public";
18944
- const viewKey = `${viewSchema}.${viewName}`;
18945
- const columnsObject = {};
18946
- const uniqueConstraintObject = {};
18947
- const existingView = resultViews[viewKey];
18948
- if (typeof existingView !== "undefined") {
18949
- console.log(
18950
- `
18951
- ${withStyle.errorWarning(
18952
- `We've found duplicated view name across ${source_default.underline.blue(
18953
- schema5 ?? "public"
18954
- )} schema. Please rename your view`
18955
- )}`
18956
- );
18957
- process.exit(1);
18958
- }
18959
- for (const key in selectedFields) {
18960
- if ((0, import_drizzle_orm5.is)(selectedFields[key], import_pg_core2.PgColumn)) {
18961
- const column7 = selectedFields[key];
18962
- const notNull = column7.notNull;
18963
- const primaryKey = column7.primary;
18964
- const sqlTypeLowered = column7.getSQLType().toLowerCase();
18965
- const typeSchema = (0, import_drizzle_orm5.is)(column7, import_pg_core2.PgEnumColumn) ? column7.enum.schema || "public" : void 0;
18966
- const generated = column7.generated;
18967
- const identity = column7.generatedIdentity;
18968
- const increment = stringFromIdentityProperty((_g = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _g.increment) ?? "1";
18969
- const minValue = stringFromIdentityProperty((_h = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _h.minValue) ?? (parseFloat(increment) < 0 ? minRangeForIdentityBasedOn(column7.columnType) : "1");
18970
- const maxValue = stringFromIdentityProperty((_i = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _i.maxValue) ?? (parseFloat(increment) < 0 ? "-1" : maxRangeForIdentityBasedOn(column7.getSQLType()));
18971
- const startWith = stringFromIdentityProperty((_j = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _j.startWith) ?? (parseFloat(increment) < 0 ? maxValue : minValue);
18972
- const cache = stringFromIdentityProperty((_k = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _k.cache) ?? "1";
18973
- const columnToSet = {
18974
- name: column7.name,
18975
- type: column7.getSQLType(),
18976
- typeSchema,
18977
- primaryKey,
18978
- notNull,
18979
- generated: generated ? {
18980
- as: (0, import_drizzle_orm5.is)(generated.as, import_drizzle_orm5.SQL) ? dialect5.sqlToQuery(generated.as).sql : typeof generated.as === "function" ? dialect5.sqlToQuery(generated.as()).sql : generated.as,
18981
- type: "stored"
18982
- } : void 0,
18983
- identity: identity ? {
18984
- type: identity.type,
18985
- name: identity.sequenceName ?? `${viewName}_${column7.name}_seq`,
18986
- schema: schema5 ?? "public",
18987
- increment,
18988
- startWith,
18989
- minValue,
18990
- maxValue,
18991
- cache,
18992
- cycle: ((_l = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _l.cycle) ?? false
18993
- } : void 0
18994
- };
18995
- if (column7.isUnique) {
18996
- const existingUnique = uniqueConstraintObject[column7.uniqueName];
18997
- if (typeof existingUnique !== "undefined") {
18998
- console.log(
18999
- `
19000
- ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${source_default.underline.blue(
19001
- viewName
19002
- )} table.
19003
- The unique constraint ${source_default.underline.blue(
19004
- column7.uniqueName
19005
- )} on the ${source_default.underline.blue(
19006
- column7.name
19007
- )} column is confilcting with a unique constraint name already defined for ${source_default.underline.blue(
19008
- existingUnique.columns.join(",")
19009
- )} columns
19010
- `)}`
19011
- );
19012
- process.exit(1);
19013
- }
19014
- uniqueConstraintObject[column7.uniqueName] = {
19015
- name: column7.uniqueName,
19016
- nullsNotDistinct: column7.uniqueType === "not distinct",
19017
- columns: [columnToSet.name]
19018
- };
19019
- }
19020
- if (column7.default !== void 0) {
19021
- if ((0, import_drizzle_orm5.is)(column7.default, import_drizzle_orm5.SQL)) {
19022
- columnToSet.default = sqlToStr(column7.default);
19023
- } else {
19024
- if (typeof column7.default === "string") {
19025
- columnToSet.default = `'${column7.default}'`;
19026
- } else {
19027
- if (sqlTypeLowered === "jsonb" || sqlTypeLowered === "json") {
19028
- columnToSet.default = `'${JSON.stringify(
19029
- column7.default
19030
- )}'::${sqlTypeLowered}`;
19031
- } else if (column7.default instanceof Date) {
19032
- if (sqlTypeLowered === "date") {
19033
- columnToSet.default = `'${column7.default.toISOString().split("T")[0]}'`;
19034
- } else if (sqlTypeLowered === "timestamp") {
19035
- columnToSet.default = `'${column7.default.toISOString().replace("T", " ").slice(0, 23)}'`;
19036
- } else {
19037
- columnToSet.default = `'${column7.default.toISOString()}'`;
19038
- }
19039
- } else if (isPgArrayType(sqlTypeLowered) && Array.isArray(column7.default)) {
19040
- columnToSet.default = `'${buildArrayString(
19041
- column7.default,
19042
- sqlTypeLowered
19043
- )}'`;
19044
- } else {
19045
- columnToSet.default = column7.default;
19046
- }
19047
- }
19048
- }
19049
- }
19050
- columnsObject[column7.name] = columnToSet;
19051
- }
19052
- }
19053
- resultViews[viewKey] = {
19054
- columns: columnsObject,
19055
- definition: isExisting ? void 0 : dialect5.sqlToQuery(query).sql,
19056
- name: viewName,
19057
- schema: viewSchema,
19058
- isExisting,
19059
- with: withOption,
19060
- withNoData,
19061
- materialized,
19062
- tablespace,
19063
- using
19064
- };
19065
- }
19066
18920
  const enumsToReturn = enums.reduce((map, obj) => {
19067
18921
  const enumSchema3 = obj.schema || "public";
19068
18922
  const key = `${enumSchema3}.${obj.enumName}`;
@@ -19089,7 +18943,6 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
19089
18943
  enums: enumsToReturn,
19090
18944
  schemas: schemasObject,
19091
18945
  sequences: sequencesToReturn,
19092
- views: resultViews,
19093
18946
  _meta: {
19094
18947
  schemas: {},
19095
18948
  tables: {},
@@ -19108,25 +18961,10 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
19108
18961
  };
19109
18962
  fromDatabase2 = async (db, tablesFilter = () => true, schemaFilters, progressCallback) => {
19110
18963
  const result = {};
19111
- const views = {};
19112
18964
  const internals = { tables: {} };
19113
- const where = schemaFilters.map((t2) => `n.nspname = '${t2}'`).join(" or ");
18965
+ const where = schemaFilters.map((t2) => `table_schema = '${t2}'`).join(" or ");
19114
18966
  const allTables = await db.query(
19115
- `SELECT
19116
- n.nspname AS table_schema,
19117
- c.relname AS table_name,
19118
- CASE
19119
- WHEN c.relkind = 'r' THEN 'table'
19120
- WHEN c.relkind = 'v' THEN 'view'
19121
- WHEN c.relkind = 'm' THEN 'materialized_view'
19122
- END AS type
19123
- FROM
19124
- pg_catalog.pg_class c
19125
- JOIN
19126
- pg_catalog.pg_namespace n ON n.oid = c.relnamespace
19127
- WHERE
19128
- c.relkind IN ('r', 'v', 'm')
19129
- ${where === "" ? "" : ` AND ${where}`};`
18967
+ `SELECT table_schema, table_name FROM information_schema.tables${where === "" ? "" : ` WHERE ${where}`};`
19130
18968
  );
19131
18969
  const schemas = new Set(allTables.map((it) => it.table_schema));
19132
18970
  schemas.delete("public");
@@ -19204,7 +19042,7 @@ WHERE
19204
19042
  progressCallback("enums", Object.keys(enumsToReturn).length, "done");
19205
19043
  }
19206
19044
  const sequencesInColumns = [];
19207
- const all = allTables.filter((it) => it.type === "table").map((row) => {
19045
+ const all = allTables.map((row) => {
19208
19046
  return new Promise(async (res, rej) => {
19209
19047
  var _a, _b, _c, _d, _e, _f;
19210
19048
  const tableName = row.table_name;
@@ -19479,7 +19317,7 @@ WHERE
19479
19317
  schema: tableSchema
19480
19318
  } : void 0
19481
19319
  };
19482
- if (identityName) {
19320
+ if (identityName && typeof identityName === "string") {
19483
19321
  delete sequencesToReturn[`${tableSchema}.${identityName.startsWith('"') && identityName.endsWith('"') ? identityName.slice(1, -1) : identityName}`];
19484
19322
  delete sequencesToReturn[identityName];
19485
19323
  }
@@ -19613,336 +19451,6 @@ WHERE
19613
19451
  }
19614
19452
  for await (const _2 of all) {
19615
19453
  }
19616
- const allViews = allTables.filter((it) => it.type === "view" || it.type === "materialized_view").map((row) => {
19617
- return new Promise(async (res, rej) => {
19618
- var _a, _b, _c, _d;
19619
- const viewName = row.table_name;
19620
- if (!tablesFilter(viewName))
19621
- return res("");
19622
- tableCount += 1;
19623
- const viewSchema = row.table_schema;
19624
- try {
19625
- const columnToReturn = {};
19626
- const viewResponses = await db.query(`WITH view_columns AS (
19627
- SELECT DISTINCT
19628
- nv.nspname::information_schema.sql_identifier AS view_schema,
19629
- v.relname::information_schema.sql_identifier AS view_name,
19630
- nt.nspname::information_schema.sql_identifier AS table_schema,
19631
- t.relname::information_schema.sql_identifier AS table_name,
19632
- a.attname::information_schema.sql_identifier AS column_name
19633
- FROM pg_namespace nv
19634
- JOIN pg_class v ON nv.oid = v.relnamespace
19635
- JOIN pg_depend dv ON v.oid = dv.refobjid
19636
- JOIN pg_depend dt ON dv.objid = dt.objid
19637
- JOIN pg_class t ON dt.refobjid = t.oid
19638
- JOIN pg_namespace nt ON t.relnamespace = nt.oid
19639
- JOIN pg_attribute a ON t.oid = a.attrelid
19640
- WHERE (v.relkind = 'v'::"char" OR v.relkind = 'm'::"char")
19641
- AND dv.refclassid = 'pg_class'::regclass::oid
19642
- AND dv.classid = 'pg_rewrite'::regclass::oid
19643
- AND dv.deptype = 'i'::"char"
19644
- AND dv.objid = dt.objid
19645
- AND dv.refobjid <> dt.refobjid
19646
- AND dt.classid = 'pg_rewrite'::regclass::oid
19647
- AND dt.refclassid = 'pg_class'::regclass::oid
19648
- AND t.relkind = ANY (ARRAY['r'::"char", 'v'::"char", 'f'::"char", 'p'::"char"])
19649
- AND dt.refobjsubid = a.attnum
19650
- AND pg_has_role(t.relowner, 'USAGE'::text)
19651
- AND nv.nspname::information_schema.sql_identifier = '${viewSchema}'
19652
- AND v.relname::information_schema.sql_identifier = '${viewName}'
19653
- ),
19654
- column_descriptions AS (
19655
- SELECT DISTINCT
19656
- a.attrelid::regclass::text AS table_name,
19657
- a.attname AS column_name,
19658
- c.is_nullable,
19659
- a.attndims AS array_dimensions,
19660
- CASE
19661
- WHEN a.atttypid = ANY ('{int,int8,int2}'::regtype[]) AND EXISTS (
19662
- SELECT FROM pg_attrdef ad
19663
- WHERE ad.adrelid = a.attrelid
19664
- AND ad.adnum = a.attnum
19665
- AND pg_get_expr(ad.adbin, ad.adrelid) = 'nextval(''' || pg_get_serial_sequence(a.attrelid::regclass::text, a.attname)::regclass || '''::regclass)'
19666
- )
19667
- THEN CASE a.atttypid
19668
- WHEN 'int'::regtype THEN 'serial'
19669
- WHEN 'int8'::regtype THEN 'bigserial'
19670
- WHEN 'int2'::regtype THEN 'smallserial'
19671
- END
19672
- ELSE format_type(a.atttypid, a.atttypmod)
19673
- END AS data_type,
19674
- pg_get_serial_sequence('"' || c.table_schema || '"."' || c.table_name || '"', a.attname)::regclass AS seq_name,
19675
- c.column_default,
19676
- c.data_type AS additional_dt,
19677
- c.udt_name AS enum_name,
19678
- c.is_generated,
19679
- c.generation_expression,
19680
- c.is_identity,
19681
- c.identity_generation,
19682
- c.identity_start,
19683
- c.identity_increment,
19684
- c.identity_maximum,
19685
- c.identity_minimum,
19686
- c.identity_cycle
19687
- FROM pg_attribute a
19688
- JOIN information_schema.columns c ON c.column_name = a.attname
19689
- JOIN pg_type t ON t.oid = a.atttypid
19690
- LEFT JOIN pg_namespace ns ON ns.oid = t.typnamespace
19691
- WHERE a.attnum > 0
19692
- AND NOT a.attisdropped
19693
- ),
19694
- table_constraints AS (
19695
- SELECT DISTINCT ON (ccu.column_name)
19696
- ccu.column_name,
19697
- c.data_type,
19698
- tc.constraint_type,
19699
- tc.constraint_name,
19700
- tc.constraint_schema,
19701
- tc.table_name
19702
- FROM information_schema.table_constraints tc
19703
- JOIN information_schema.constraint_column_usage ccu USING (constraint_schema, constraint_name)
19704
- JOIN information_schema.columns c ON c.table_schema = tc.constraint_schema
19705
- AND tc.table_name = c.table_name
19706
- AND ccu.column_name = c.column_name
19707
- ),
19708
- additional_column_info AS (
19709
- SELECT DISTINCT
19710
- a.attrelid::regclass::text AS table_name,
19711
- a.attname AS column_name,
19712
- is_nullable,
19713
- a.attndims AS array_dimensions,
19714
- CASE
19715
- WHEN a.atttypid = ANY ('{int,int8,int2}'::regtype[]) AND EXISTS (
19716
- SELECT FROM pg_attrdef ad
19717
- WHERE ad.adrelid = a.attrelid
19718
- AND ad.adnum = a.attnum
19719
- AND pg_get_expr(ad.adbin, ad.adrelid) = 'nextval(''' || pg_get_serial_sequence(a.attrelid::regclass::text, a.attname)::regclass || '''::regclass)'
19720
- )
19721
- THEN CASE a.atttypid
19722
- WHEN 'int'::regtype THEN 'serial'
19723
- WHEN 'int8'::regtype THEN 'bigserial'
19724
- WHEN 'int2'::regtype THEN 'smallserial'
19725
- END
19726
- ELSE format_type(a.atttypid, a.atttypmod)
19727
- END AS data_type,
19728
- pg_get_serial_sequence('"' || c.table_schema || '"."' || c.table_name || '"', a.attname)::regclass AS seq_name,
19729
- c.column_default,
19730
- c.data_type AS additional_dt,
19731
- c.udt_name AS enum_name,
19732
- c.is_generated,
19733
- generation_expression,
19734
- is_identity,
19735
- identity_generation,
19736
- identity_start,
19737
- identity_increment,
19738
- identity_maximum,
19739
- identity_minimum,
19740
- identity_cycle
19741
- FROM pg_attribute a
19742
- JOIN information_schema.columns c ON c.column_name = a.attname
19743
- LEFT JOIN pg_type t ON t.oid = a.atttypid
19744
- LEFT JOIN pg_namespace ns ON ns.oid = t.typnamespace
19745
- WHERE a.attnum > 0
19746
- AND NOT a.attisdropped
19747
- )
19748
- SELECT DISTINCT ON (vc.table_name, vc.column_name)
19749
- vc.view_schema,
19750
- vc.view_name,
19751
- vc.table_schema,
19752
- vc.table_name,
19753
- vc.column_name,
19754
- COALESCE(cd.data_type, aci.data_type) AS data_type,
19755
- tc.constraint_type,
19756
- tc.constraint_name,
19757
- aci.is_nullable,
19758
- aci.array_dimensions,
19759
- aci.seq_name,
19760
- aci.column_default,
19761
- aci.additional_dt,
19762
- aci.enum_name,
19763
- aci.is_generated,
19764
- aci.generation_expression,
19765
- aci.is_identity,
19766
- aci.identity_generation,
19767
- aci.identity_start,
19768
- aci.identity_increment,
19769
- aci.identity_maximum,
19770
- aci.identity_minimum,
19771
- aci.identity_cycle
19772
- FROM view_columns vc
19773
- LEFT JOIN column_descriptions cd ON vc.table_name = cd.table_name AND vc.column_name = cd.column_name
19774
- LEFT JOIN table_constraints tc ON vc.table_name = tc.table_name AND vc.column_name = tc.column_name
19775
- LEFT JOIN additional_column_info aci ON vc.table_name = aci.table_name AND vc.column_name = aci.column_name
19776
- ORDER BY vc.table_name, vc.column_name;`);
19777
- for (const viewResponse of viewResponses) {
19778
- const columnName = viewResponse.column_name;
19779
- const columnAdditionalDT = viewResponse.additional_dt;
19780
- const columnDimensions = viewResponse.array_dimensions;
19781
- const enumType2 = viewResponse.enum_name;
19782
- let columnType = viewResponse.data_type;
19783
- const typeSchema = viewResponse.type_schema;
19784
- const isGenerated = viewResponse.is_generated === "ALWAYS";
19785
- const generationExpression = viewResponse.generation_expression;
19786
- const isIdentity = viewResponse.is_identity === "YES";
19787
- const identityGeneration = viewResponse.identity_generation === "ALWAYS" ? "always" : "byDefault";
19788
- const identityStart = viewResponse.identity_start;
19789
- const identityIncrement = viewResponse.identity_increment;
19790
- const identityMaximum = viewResponse.identity_maximum;
19791
- const identityMinimum = viewResponse.identity_minimum;
19792
- const identityCycle = viewResponse.identity_cycle === "YES";
19793
- const identityName = viewResponse.seq_name;
19794
- const defaultValueRes = viewResponse.column_default;
19795
- const primaryKey = viewResponse.constraint_type === "PRIMARY KEY";
19796
- let columnTypeMapped = columnType;
19797
- if (columnAdditionalDT === "ARRAY") {
19798
- if (typeof internals.tables[viewName] === "undefined") {
19799
- internals.tables[viewName] = {
19800
- columns: {
19801
- [columnName]: {
19802
- isArray: true,
19803
- dimensions: columnDimensions,
19804
- rawType: columnTypeMapped.substring(
19805
- 0,
19806
- columnTypeMapped.length - 2
19807
- )
19808
- }
19809
- }
19810
- };
19811
- } else {
19812
- if (typeof internals.tables[viewName].columns[columnName] === "undefined") {
19813
- internals.tables[viewName].columns[columnName] = {
19814
- isArray: true,
19815
- dimensions: columnDimensions,
19816
- rawType: columnTypeMapped.substring(
19817
- 0,
19818
- columnTypeMapped.length - 2
19819
- )
19820
- };
19821
- }
19822
- }
19823
- }
19824
- const defaultValue = defaultForColumn(
19825
- viewResponse,
19826
- internals,
19827
- viewName
19828
- );
19829
- if (defaultValue === "NULL" || defaultValueRes && defaultValueRes.startsWith("(") && defaultValueRes.endsWith(")")) {
19830
- if (typeof internals.tables[viewName] === "undefined") {
19831
- internals.tables[viewName] = {
19832
- columns: {
19833
- [columnName]: {
19834
- isDefaultAnExpression: true
19835
- }
19836
- }
19837
- };
19838
- } else {
19839
- if (typeof internals.tables[viewName].columns[columnName] === "undefined") {
19840
- internals.tables[viewName].columns[columnName] = {
19841
- isDefaultAnExpression: true
19842
- };
19843
- } else {
19844
- internals.tables[viewName].columns[columnName].isDefaultAnExpression = true;
19845
- }
19846
- }
19847
- }
19848
- const isSerial = columnType === "serial";
19849
- if (columnTypeMapped.startsWith("numeric(")) {
19850
- columnTypeMapped = columnTypeMapped.replace(",", ", ");
19851
- }
19852
- if (columnAdditionalDT === "ARRAY") {
19853
- for (let i2 = 1; i2 < Number(columnDimensions); i2++) {
19854
- columnTypeMapped += "[]";
19855
- }
19856
- }
19857
- columnTypeMapped = columnTypeMapped.replace("character varying", "varchar").replace(" without time zone", "").replace("character", "char");
19858
- columnTypeMapped = trimChar(columnTypeMapped, '"');
19859
- columnToReturn[columnName] = {
19860
- name: columnName,
19861
- type: (
19862
- // filter vectors, but in future we should filter any extension that was installed by user
19863
- columnAdditionalDT === "USER-DEFINED" && !["vector", "geometry"].includes(enumType2) ? enumType2 : columnTypeMapped
19864
- ),
19865
- typeSchema: enumsToReturn[`${typeSchema}.${enumType2}`] !== void 0 ? enumsToReturn[`${typeSchema}.${enumType2}`].schema : void 0,
19866
- primaryKey,
19867
- notNull: viewResponse.is_nullable === "NO",
19868
- generated: isGenerated ? { as: generationExpression, type: "stored" } : void 0,
19869
- identity: isIdentity ? {
19870
- type: identityGeneration,
19871
- name: identityName,
19872
- increment: stringFromDatabaseIdentityProperty(identityIncrement),
19873
- minValue: stringFromDatabaseIdentityProperty(identityMinimum),
19874
- maxValue: stringFromDatabaseIdentityProperty(identityMaximum),
19875
- startWith: stringFromDatabaseIdentityProperty(identityStart),
19876
- cache: ((_a = sequencesToReturn[identityName]) == null ? void 0 : _a.cache) ? (_b = sequencesToReturn[identityName]) == null ? void 0 : _b.cache : ((_c = sequencesToReturn[`${viewSchema}.${identityName}`]) == null ? void 0 : _c.cache) ? (_d = sequencesToReturn[`${viewSchema}.${identityName}`]) == null ? void 0 : _d.cache : void 0,
19877
- cycle: identityCycle,
19878
- schema: viewSchema
19879
- } : void 0
19880
- };
19881
- if (identityName) {
19882
- delete sequencesToReturn[`${viewSchema}.${identityName.startsWith('"') && identityName.endsWith('"') ? identityName.slice(1, -1) : identityName}`];
19883
- delete sequencesToReturn[identityName];
19884
- }
19885
- if (!isSerial && typeof defaultValue !== "undefined") {
19886
- columnToReturn[columnName].default = defaultValue;
19887
- }
19888
- }
19889
- const [viewInfo] = await db.query(`
19890
- SELECT
19891
- c.relname AS view_name,
19892
- n.nspname AS schema_name,
19893
- pg_get_viewdef(c.oid, true) AS definition,
19894
- ts.spcname AS tablespace_name,
19895
- c.reloptions AS options,
19896
- pg_tablespace_location(ts.oid) AS location
19897
- FROM
19898
- pg_class c
19899
- JOIN
19900
- pg_namespace n ON c.relnamespace = n.oid
19901
- LEFT JOIN
19902
- pg_tablespace ts ON c.reltablespace = ts.oid
19903
- WHERE
19904
- (c.relkind = 'm' OR c.relkind = 'v')
19905
- AND n.nspname = '${viewSchema}'
19906
- AND c.relname = '${viewName}';`);
19907
- const resultWith = {};
19908
- if (viewInfo.options) {
19909
- viewInfo.options.forEach((pair) => {
19910
- const splitted = pair.split("=");
19911
- const key = splitted[0];
19912
- const value = splitted[1];
19913
- if (value === "true") {
19914
- resultWith[key] = true;
19915
- } else if (value === "false") {
19916
- resultWith[key] = false;
19917
- } else if (!isNaN(Number(value))) {
19918
- resultWith[key] = Number(value);
19919
- } else {
19920
- resultWith[key] = value;
19921
- }
19922
- });
19923
- }
19924
- const definition = viewInfo.definition.replace(/\s+/g, " ").replace(";", "").trim();
19925
- const withOption = Object.values(resultWith).length ? Object.fromEntries(Object.entries(resultWith).map(([key, value]) => [key.camelCase(), value])) : void 0;
19926
- const materialized = row.type === "materialized_view";
19927
- views[`${viewSchema}.${viewName}`] = {
19928
- name: viewName,
19929
- schema: viewSchema,
19930
- columns: columnToReturn,
19931
- isExisting: false,
19932
- definition,
19933
- materialized,
19934
- with: withOption,
19935
- tablespace: viewInfo.tablespace_name ?? void 0
19936
- };
19937
- } catch (e2) {
19938
- rej(e2);
19939
- return;
19940
- }
19941
- res("");
19942
- });
19943
- });
19944
- for await (const _2 of allViews) {
19945
- }
19946
19454
  if (progressCallback) {
19947
19455
  progressCallback("columns", columnsCount, "done");
19948
19456
  progressCallback("indexes", indexesCount, "done");
@@ -19956,7 +19464,6 @@ WHERE
19956
19464
  enums: enumsToReturn,
19957
19465
  schemas: schemasObject,
19958
19466
  sequences: sequencesToReturn,
19959
- views,
19960
19467
  _meta: {
19961
19468
  schemas: {},
19962
19469
  tables: {},
@@ -20141,7 +19648,7 @@ function extractGeneratedColumns(input) {
20141
19648
  }
20142
19649
  return columns;
20143
19650
  }
20144
- var import_drizzle_orm7, import_sqlite_core2, dialect6, generateSqliteSnapshot, fromDatabase3;
19651
+ var import_drizzle_orm7, import_sqlite_core2, generateSqliteSnapshot, fromDatabase3;
20145
19652
  var init_sqliteSerializer = __esm({
20146
19653
  "src/serializer/sqliteSerializer.ts"() {
20147
19654
  "use strict";
@@ -20149,9 +19656,10 @@ var init_sqliteSerializer = __esm({
20149
19656
  import_drizzle_orm7 = require("drizzle-orm");
20150
19657
  import_sqlite_core2 = require("drizzle-orm/sqlite-core");
20151
19658
  init_outputs();
19659
+ init_utils();
20152
19660
  init_serializer();
20153
- dialect6 = new import_sqlite_core2.SQLiteSyncDialect();
20154
- generateSqliteSnapshot = (tables) => {
19661
+ generateSqliteSnapshot = (tables, casing2) => {
19662
+ const dialect4 = new import_sqlite_core2.SQLiteSyncDialect({ casing: casing2 });
20155
19663
  const result = {};
20156
19664
  const internal = { indexes: {} };
20157
19665
  for (const table4 of tables) {
@@ -20169,28 +19677,29 @@ var init_sqliteSerializer = __esm({
20169
19677
  uniqueConstraints
20170
19678
  } = (0, import_sqlite_core2.getTableConfig)(table4);
20171
19679
  columns.forEach((column7) => {
19680
+ const name = getColumnCasing(column7, casing2);
20172
19681
  const notNull = column7.notNull;
20173
19682
  const primaryKey = column7.primary;
20174
19683
  const generated = column7.generated;
20175
19684
  const columnToSet = {
20176
- name: column7.name,
19685
+ name,
20177
19686
  type: column7.getSQLType(),
20178
19687
  primaryKey,
20179
19688
  notNull,
20180
19689
  autoincrement: (0, import_drizzle_orm7.is)(column7, import_sqlite_core2.SQLiteBaseInteger) ? column7.autoIncrement : false,
20181
19690
  generated: generated ? {
20182
- as: (0, import_drizzle_orm7.is)(generated.as, import_drizzle_orm7.SQL) ? `(${dialect6.sqlToQuery(generated.as, "indexes").sql})` : typeof generated.as === "function" ? `(${dialect6.sqlToQuery(generated.as(), "indexes").sql})` : `(${generated.as})`,
19691
+ as: (0, import_drizzle_orm7.is)(generated.as, import_drizzle_orm7.SQL) ? `(${dialect4.sqlToQuery(generated.as, "indexes").sql})` : typeof generated.as === "function" ? `(${dialect4.sqlToQuery(generated.as(), "indexes").sql})` : `(${generated.as})`,
20183
19692
  type: generated.mode ?? "virtual"
20184
19693
  } : void 0
20185
19694
  };
20186
19695
  if (column7.default !== void 0) {
20187
19696
  if ((0, import_drizzle_orm7.is)(column7.default, import_drizzle_orm7.SQL)) {
20188
- columnToSet.default = sqlToStr(column7.default);
19697
+ columnToSet.default = sqlToStr(column7.default, casing2);
20189
19698
  } else {
20190
19699
  columnToSet.default = typeof column7.default === "string" ? `'${column7.default}'` : typeof column7.default === "object" || Array.isArray(column7.default) ? `'${JSON.stringify(column7.default)}'` : column7.default;
20191
19700
  }
20192
19701
  }
20193
- columnsObject[column7.name] = columnToSet;
19702
+ columnsObject[name] = columnToSet;
20194
19703
  if (column7.isUnique) {
20195
19704
  const existingUnique = indexesObject[column7.uniqueName];
20196
19705
  if (typeof existingUnique !== "undefined") {
@@ -20202,7 +19711,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
20202
19711
  The unique constraint ${source_default.underline.blue(
20203
19712
  column7.uniqueName
20204
19713
  )} on the ${source_default.underline.blue(
20205
- column7.name
19714
+ name
20206
19715
  )} column is confilcting with a unique constraint name already defined for ${source_default.underline.blue(
20207
19716
  existingUnique.columns.join(",")
20208
19717
  )} columns
@@ -20218,15 +19727,25 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
20218
19727
  }
20219
19728
  });
20220
19729
  const foreignKeys = tableForeignKeys.map((fk4) => {
20221
- const name = fk4.getName();
20222
19730
  const tableFrom = tableName;
20223
19731
  const onDelete = fk4.onDelete ?? "no action";
20224
19732
  const onUpdate = fk4.onUpdate ?? "no action";
20225
19733
  const reference = fk4.reference();
20226
19734
  const referenceFT = reference.foreignTable;
20227
19735
  const tableTo = (0, import_drizzle_orm7.getTableName)(referenceFT);
20228
- const columnsFrom = reference.columns.map((it) => it.name);
20229
- const columnsTo = reference.foreignColumns.map((it) => it.name);
19736
+ const originalColumnsFrom = reference.columns.map((it) => it.name);
19737
+ const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing2));
19738
+ const originalColumnsTo = reference.foreignColumns.map((it) => it.name);
19739
+ const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing2));
19740
+ let name = fk4.getName();
19741
+ if (casing2 !== void 0) {
19742
+ for (let i2 = 0; i2 < originalColumnsFrom.length; i2++) {
19743
+ name = name.replace(originalColumnsFrom[i2], columnsFrom[i2]);
19744
+ }
19745
+ for (let i2 = 0; i2 < originalColumnsTo.length; i2++) {
19746
+ name = name.replace(originalColumnsTo[i2], columnsTo[i2]);
19747
+ }
19748
+ }
20230
19749
  return {
20231
19750
  name,
20232
19751
  tableFrom,
@@ -20246,7 +19765,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
20246
19765
  let indexColumns = columns2.map((it) => {
20247
19766
  var _a;
20248
19767
  if ((0, import_drizzle_orm7.is)(it, import_drizzle_orm7.SQL)) {
20249
- const sql = dialect6.sqlToQuery(it, "indexes").sql;
19768
+ const sql = dialect4.sqlToQuery(it, "indexes").sql;
20250
19769
  if (typeof internal.indexes[name] === "undefined") {
20251
19770
  internal.indexes[name] = {
20252
19771
  columns: {
@@ -20266,13 +19785,13 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
20266
19785
  }
20267
19786
  return sql;
20268
19787
  } else {
20269
- return it.name;
19788
+ return getColumnCasing(it, casing2);
20270
19789
  }
20271
19790
  });
20272
19791
  let where = void 0;
20273
19792
  if (value.config.where !== void 0) {
20274
19793
  if ((0, import_drizzle_orm7.is)(value.config.where, import_drizzle_orm7.SQL)) {
20275
- where = dialect6.sqlToQuery(value.config.where).sql;
19794
+ where = dialect4.sqlToQuery(value.config.where).sql;
20276
19795
  }
20277
19796
  }
20278
19797
  indexesObject[name] = {
@@ -20283,7 +19802,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
20283
19802
  };
20284
19803
  });
20285
19804
  uniqueConstraints == null ? void 0 : uniqueConstraints.map((unq) => {
20286
- const columnNames = unq.columns.map((c) => c.name);
19805
+ const columnNames = unq.columns.map((c) => getColumnCasing(c, casing2));
20287
19806
  const name = unq.name ?? (0, import_sqlite_core2.uniqueKeyName)(table4, columnNames);
20288
19807
  const existingUnique = indexesObject[name];
20289
19808
  if (typeof existingUnique !== "undefined") {
@@ -20313,12 +19832,20 @@ The unique constraint ${source_default.underline.blue(
20313
19832
  });
20314
19833
  primaryKeys.forEach((it) => {
20315
19834
  if (it.columns.length > 1) {
20316
- primaryKeysObject[it.getName()] = {
20317
- columns: it.columns.map((it2) => it2.name),
20318
- name: it.getName()
19835
+ const originalColumnNames = it.columns.map((c) => c.name);
19836
+ const columnNames = it.columns.map((c) => getColumnCasing(c, casing2));
19837
+ let name = it.getName();
19838
+ if (casing2 !== void 0) {
19839
+ for (let i2 = 0; i2 < originalColumnNames.length; i2++) {
19840
+ name = name.replace(originalColumnNames[i2], columnNames[i2]);
19841
+ }
19842
+ }
19843
+ primaryKeysObject[name] = {
19844
+ columns: columnNames,
19845
+ name
20319
19846
  };
20320
19847
  } else {
20321
- columnsObject[it.columns[0].name].primaryKey = true;
19848
+ columnsObject[getColumnCasing(it.columns[0], casing2)].primaryKey = true;
20322
19849
  }
20323
19850
  });
20324
19851
  result[tableName] = {
@@ -20563,16 +20090,17 @@ WHERE
20563
20090
  });
20564
20091
 
20565
20092
  // src/serializer/index.ts
20566
- var import_fs3, glob, import_path3, sqlToStr, serializeMySql, serializePg, serializeSQLite, prepareFilenames;
20093
+ var import_casing2, import_fs3, glob, import_path3, sqlToStr, serializeMySql, serializePg, serializeSQLite, prepareFilenames;
20567
20094
  var init_serializer = __esm({
20568
20095
  "src/serializer/index.ts"() {
20569
20096
  "use strict";
20570
20097
  init_source();
20098
+ import_casing2 = require("drizzle-orm/casing");
20571
20099
  import_fs3 = __toESM(require("fs"));
20572
20100
  glob = __toESM(require_glob());
20573
20101
  import_path3 = __toESM(require("path"));
20574
20102
  init_views();
20575
- sqlToStr = (sql) => {
20103
+ sqlToStr = (sql, casing2) => {
20576
20104
  return sql.toQuery({
20577
20105
  escapeName: () => {
20578
20106
  throw new Error("we don't support params for `sql` default values");
@@ -20582,10 +20110,11 @@ var init_serializer = __esm({
20582
20110
  },
20583
20111
  escapeString: () => {
20584
20112
  throw new Error("we don't support params for `sql` default values");
20585
- }
20113
+ },
20114
+ casing: new import_casing2.CasingCache(casing2)
20586
20115
  }).sql;
20587
20116
  };
20588
- serializeMySql = async (path5) => {
20117
+ serializeMySql = async (path5, casing2) => {
20589
20118
  const filenames = prepareFilenames(path5);
20590
20119
  console.log(source_default.gray(`Reading schema files:
20591
20120
  ${filenames.join("\n")}
@@ -20593,23 +20122,23 @@ ${filenames.join("\n")}
20593
20122
  const { prepareFromMySqlImports: prepareFromMySqlImports2 } = await Promise.resolve().then(() => (init_mysqlImports(), mysqlImports_exports));
20594
20123
  const { generateMySqlSnapshot: generateMySqlSnapshot2 } = await Promise.resolve().then(() => (init_mysqlSerializer(), mysqlSerializer_exports));
20595
20124
  const { tables } = await prepareFromMySqlImports2(filenames);
20596
- return generateMySqlSnapshot2(tables);
20125
+ return generateMySqlSnapshot2(tables, casing2);
20597
20126
  };
20598
- serializePg = async (path5, schemaFilter) => {
20127
+ serializePg = async (path5, casing2, schemaFilter) => {
20599
20128
  const filenames = prepareFilenames(path5);
20600
20129
  const { prepareFromPgImports: prepareFromPgImports2 } = await Promise.resolve().then(() => (init_pgImports(), pgImports_exports));
20601
20130
  const { generatePgSnapshot: generatePgSnapshot2 } = await Promise.resolve().then(() => (init_pgSerializer(), pgSerializer_exports));
20602
- const { tables, enums, schemas, sequences, views, matViews } = await prepareFromPgImports2(
20131
+ const { tables, enums, schemas, sequences } = await prepareFromPgImports2(
20603
20132
  filenames
20604
20133
  );
20605
- return generatePgSnapshot2(tables, enums, schemas, sequences, views, matViews, schemaFilter);
20134
+ return generatePgSnapshot2(tables, enums, schemas, sequences, casing2, schemaFilter);
20606
20135
  };
20607
- serializeSQLite = async (path5) => {
20136
+ serializeSQLite = async (path5, casing2) => {
20608
20137
  const filenames = prepareFilenames(path5);
20609
20138
  const { prepareFromSqliteImports: prepareFromSqliteImports2 } = await Promise.resolve().then(() => (init_sqliteImports(), sqliteImports_exports));
20610
20139
  const { generateSqliteSnapshot: generateSqliteSnapshot2 } = await Promise.resolve().then(() => (init_sqliteSerializer(), sqliteSerializer_exports));
20611
20140
  const { tables } = await prepareFromSqliteImports2(filenames);
20612
- return generateSqliteSnapshot2(tables);
20141
+ return generateSqliteSnapshot2(tables, casing2);
20613
20142
  };
20614
20143
  prepareFilenames = (path5) => {
20615
20144
  if (typeof path5 === "string") {
@@ -20658,45 +20187,45 @@ var init_migrationPreparator = __esm({
20658
20187
  init_mysqlSchema();
20659
20188
  init_pgSchema();
20660
20189
  init_sqliteSchema();
20661
- prepareMySqlDbPushSnapshot = async (prev, schemaPath) => {
20662
- const serialized = await serializeMySql(schemaPath);
20190
+ prepareMySqlDbPushSnapshot = async (prev, schemaPath, casing2) => {
20191
+ const serialized = await serializeMySql(schemaPath, casing2);
20663
20192
  const id = (0, import_crypto.randomUUID)();
20664
20193
  const idPrev = prev.id;
20665
- const { version: version3, dialect: dialect7, ...rest } = serialized;
20666
- const result = { version: version3, dialect: dialect7, id, prevId: idPrev, ...rest };
20194
+ const { version: version3, dialect: dialect4, ...rest } = serialized;
20195
+ const result = { version: version3, dialect: dialect4, id, prevId: idPrev, ...rest };
20667
20196
  return { prev, cur: result };
20668
20197
  };
20669
- prepareSQLiteDbPushSnapshot = async (prev, schemaPath) => {
20670
- const serialized = await serializeSQLite(schemaPath);
20198
+ prepareSQLiteDbPushSnapshot = async (prev, schemaPath, casing2) => {
20199
+ const serialized = await serializeSQLite(schemaPath, casing2);
20671
20200
  const id = (0, import_crypto.randomUUID)();
20672
20201
  const idPrev = prev.id;
20673
- const { version: version3, dialect: dialect7, ...rest } = serialized;
20202
+ const { version: version3, dialect: dialect4, ...rest } = serialized;
20674
20203
  const result = {
20675
20204
  version: version3,
20676
- dialect: dialect7,
20205
+ dialect: dialect4,
20677
20206
  id,
20678
20207
  prevId: idPrev,
20679
20208
  ...rest
20680
20209
  };
20681
20210
  return { prev, cur: result };
20682
20211
  };
20683
- preparePgDbPushSnapshot = async (prev, schemaPath, schemaFilter = ["public"]) => {
20684
- const serialized = await serializePg(schemaPath, schemaFilter);
20212
+ preparePgDbPushSnapshot = async (prev, schemaPath, casing2, schemaFilter = ["public"]) => {
20213
+ const serialized = await serializePg(schemaPath, casing2, schemaFilter);
20685
20214
  const id = (0, import_crypto.randomUUID)();
20686
20215
  const idPrev = prev.id;
20687
- const { version: version3, dialect: dialect7, ...rest } = serialized;
20688
- const result = { version: version3, dialect: dialect7, id, prevId: idPrev, ...rest };
20216
+ const { version: version3, dialect: dialect4, ...rest } = serialized;
20217
+ const result = { version: version3, dialect: dialect4, id, prevId: idPrev, ...rest };
20689
20218
  return { prev, cur: result };
20690
20219
  };
20691
- prepareMySqlMigrationSnapshot = async (migrationFolders, schemaPath) => {
20220
+ prepareMySqlMigrationSnapshot = async (migrationFolders, schemaPath, casing2) => {
20692
20221
  const prevSnapshot = mysqlSchema.parse(
20693
20222
  preparePrevSnapshot(migrationFolders, dryMySql)
20694
20223
  );
20695
- const serialized = await serializeMySql(schemaPath);
20224
+ const serialized = await serializeMySql(schemaPath, casing2);
20696
20225
  const id = (0, import_crypto.randomUUID)();
20697
20226
  const idPrev = prevSnapshot.id;
20698
- const { version: version3, dialect: dialect7, ...rest } = serialized;
20699
- const result = { version: version3, dialect: dialect7, id, prevId: idPrev, ...rest };
20227
+ const { version: version3, dialect: dialect4, ...rest } = serialized;
20228
+ const result = { version: version3, dialect: dialect4, id, prevId: idPrev, ...rest };
20700
20229
  const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot;
20701
20230
  const custom2 = {
20702
20231
  id,
@@ -20705,17 +20234,17 @@ var init_migrationPreparator = __esm({
20705
20234
  };
20706
20235
  return { prev: prevSnapshot, cur: result, custom: custom2 };
20707
20236
  };
20708
- prepareSqliteMigrationSnapshot = async (snapshots, schemaPath) => {
20237
+ prepareSqliteMigrationSnapshot = async (snapshots, schemaPath, casing2) => {
20709
20238
  const prevSnapshot = sqliteSchema.parse(
20710
20239
  preparePrevSnapshot(snapshots, drySQLite)
20711
20240
  );
20712
- const serialized = await serializeSQLite(schemaPath);
20241
+ const serialized = await serializeSQLite(schemaPath, casing2);
20713
20242
  const id = (0, import_crypto.randomUUID)();
20714
20243
  const idPrev = prevSnapshot.id;
20715
- const { version: version3, dialect: dialect7, ...rest } = serialized;
20244
+ const { version: version3, dialect: dialect4, ...rest } = serialized;
20716
20245
  const result = {
20717
20246
  version: version3,
20718
- dialect: dialect7,
20247
+ dialect: dialect4,
20719
20248
  id,
20720
20249
  prevId: idPrev,
20721
20250
  ...rest
@@ -20735,9 +20264,9 @@ var init_migrationPreparator = __esm({
20735
20264
  }) => {
20736
20265
  return { id, prevId: idPrev, ...serialized };
20737
20266
  };
20738
- preparePgMigrationSnapshot = async (snapshots, schemaPath) => {
20267
+ preparePgMigrationSnapshot = async (snapshots, schemaPath, casing2) => {
20739
20268
  const prevSnapshot = pgSchema.parse(preparePrevSnapshot(snapshots, dryPg));
20740
- const serialized = await serializePg(schemaPath);
20269
+ const serialized = await serializePg(schemaPath, casing2);
20741
20270
  const id = (0, import_crypto.randomUUID)();
20742
20271
  const idPrev = prevSnapshot.id;
20743
20272
  const result = { id, prevId: idPrev, ...serialized };
@@ -23118,7 +22647,6 @@ function applyJsonDiff(json1, json2) {
23118
22647
  difference.tables = difference.tables || {};
23119
22648
  difference.enums = difference.enums || {};
23120
22649
  difference.sequences = difference.sequences || {};
23121
- difference.views = difference.views || {};
23122
22650
  const schemaKeys = Object.keys(difference.schemas);
23123
22651
  for (let key of schemaKeys) {
23124
22652
  if (key.endsWith("__added") || key.endsWith("__deleted")) {
@@ -23174,71 +22702,6 @@ function applyJsonDiff(json1, json2) {
23174
22702
  const alteredSequences = sequencesEntries.filter((it) => !(it[0].includes("__added") || it[0].includes("__deleted")) && "values" in it[1]).map((it) => {
23175
22703
  return json2.sequences[it[0]];
23176
22704
  });
23177
- const viewsEntries = Object.entries(difference.views);
23178
- const alteredViews = viewsEntries.filter((it) => !(it[0].includes("__added") || it[0].includes("__deleted"))).map(
23179
- ([nameWithSchema, view2]) => {
23180
- const deletedWithOption = view2.with__deleted;
23181
- const addedWithOption = view2.with__added;
23182
- const deletedWith = Object.fromEntries(
23183
- Object.entries(view2.with || {}).filter((it) => it[0].endsWith("__deleted")).map(([key, value]) => {
23184
- return [key.replace("__deleted", ""), value];
23185
- })
23186
- );
23187
- const addedWith = Object.fromEntries(
23188
- Object.entries(view2.with || {}).filter((it) => it[0].endsWith("__added")).map(([key, value]) => {
23189
- return [key.replace("__added", ""), value];
23190
- })
23191
- );
23192
- const alterWith = Object.fromEntries(
23193
- Object.entries(view2.with || {}).filter(
23194
- (it) => typeof it[1].__old !== "undefined" && typeof it[1].__new !== "undefined"
23195
- ).map(
23196
- (it) => {
23197
- return [it[0], it[1].__new];
23198
- }
23199
- )
23200
- );
23201
- const alteredSchema = view2.schema;
23202
- const alteredDefinition = view2.definition;
23203
- const alteredExisting = view2.isExisting;
23204
- const addedTablespace = view2.tablespace__added;
23205
- const droppedTablespace = view2.tablespace__deleted;
23206
- const alterTablespaceTo = view2.tablespace;
23207
- let alteredTablespace;
23208
- if (addedTablespace)
23209
- alteredTablespace = { __new: addedTablespace, __old: "pg_default" };
23210
- if (droppedTablespace)
23211
- alteredTablespace = { __new: "pg_default", __old: droppedTablespace };
23212
- if (alterTablespaceTo)
23213
- alteredTablespace = alterTablespaceTo;
23214
- const addedUsing = view2.using__added;
23215
- const droppedUsing = view2.using__deleted;
23216
- const alterUsingTo = view2.using;
23217
- let alteredUsing;
23218
- if (addedUsing)
23219
- alteredUsing = { __new: addedUsing, __old: "heap" };
23220
- if (droppedUsing)
23221
- alteredUsing = { __new: "heap", __old: droppedUsing };
23222
- if (alterUsingTo)
23223
- alteredUsing = alterUsingTo;
23224
- return {
23225
- name: json2.views[nameWithSchema].name,
23226
- schema: json2.views[nameWithSchema].schema,
23227
- deletedWithOption,
23228
- addedWithOption,
23229
- alteredWith: {
23230
- deletedWith: Object.keys(deletedWith).length ? deletedWith : void 0,
23231
- addedWith: Object.keys(addedWith).length ? addedWith : void 0,
23232
- alterWith: Object.keys(alterWith).length ? alterWith : void 0
23233
- },
23234
- alteredSchema,
23235
- alteredDefinition,
23236
- alteredExisting,
23237
- alteredTablespace,
23238
- alteredUsing
23239
- };
23240
- }
23241
- );
23242
22705
  const alteredTablesWithColumns = Object.values(difference.tables).map(
23243
22706
  (table4) => {
23244
22707
  return findAlternationsInTable(table4);
@@ -23247,8 +22710,7 @@ function applyJsonDiff(json1, json2) {
23247
22710
  return {
23248
22711
  alteredTablesWithColumns,
23249
22712
  alteredEnums,
23250
- alteredSequences,
23251
- alteredViews
22713
+ alteredSequences
23252
22714
  };
23253
22715
  }
23254
22716
  var import_json_diff, mapArraysDiff, findAlternationsInTable, alternationsInColumn;
@@ -23630,10 +23092,10 @@ var init_jsonDiffer = __esm({
23630
23092
  });
23631
23093
 
23632
23094
  // src/sqlgenerator.ts
23633
- function fromJson(statements, dialect7, action, json2) {
23095
+ function fromJson(statements, dialect4, action, json2) {
23634
23096
  const result = statements.flatMap((statement) => {
23635
23097
  const filtered = convertors.filter((it) => {
23636
- return it.can(statement, dialect7);
23098
+ return it.can(statement, dialect4);
23637
23099
  });
23638
23100
  const convertor = filtered.length === 1 ? filtered[0] : void 0;
23639
23101
  if (!convertor) {
@@ -23643,7 +23105,7 @@ function fromJson(statements, dialect7, action, json2) {
23643
23105
  }).filter((it) => it !== "");
23644
23106
  return result;
23645
23107
  }
23646
- var pgNativeTypes, isPgNativeType, Convertor, PgCreateTableConvertor, MySqlCreateTableConvertor, SQLiteCreateTableConvertor, PgCreateViewConvertor, PgDropViewConvertor, PgRenameViewConvertor, PgAlterViewSchemaConvertor, PgAlterViewAddWithOptionConvertor, PgAlterViewDropWithOptionConvertor, PgAlterViewAlterTablespaceConvertor, PgAlterViewAlterUsingConvertor, PgAlterTableAlterColumnSetGenerated, PgAlterTableAlterColumnDropGenerated, PgAlterTableAlterColumnAlterGenerated, PgAlterTableAddUniqueConstraintConvertor, PgAlterTableDropUniqueConstraintConvertor, MySQLAlterTableAddUniqueConstraintConvertor, MySQLAlterTableDropUniqueConstraintConvertor, CreatePgSequenceConvertor, DropPgSequenceConvertor, RenamePgSequenceConvertor, MovePgSequenceConvertor, AlterPgSequenceConvertor, CreateTypeEnumConvertor, AlterTypeAddValueConvertor, PgDropTableConvertor, MySQLDropTableConvertor, SQLiteDropTableConvertor, PgRenameTableConvertor, SqliteRenameTableConvertor, MySqlRenameTableConvertor, PgAlterTableRenameColumnConvertor, MySqlAlterTableRenameColumnConvertor, SQLiteAlterTableRenameColumnConvertor, PgAlterTableDropColumnConvertor, MySqlAlterTableDropColumnConvertor, SQLiteAlterTableDropColumnConvertor, PgAlterTableAddColumnConvertor, MySqlAlterTableAddColumnConvertor, SQLiteAlterTableAddColumnConvertor, PgAlterTableAlterColumnSetTypeConvertor, PgAlterTableAlterColumnSetDefaultConvertor, PgAlterTableAlterColumnDropDefaultConvertor, PgAlterTableAlterColumnDropGeneratedConvertor, PgAlterTableAlterColumnSetExpressionConvertor, PgAlterTableAlterColumnAlterrGeneratedConvertor, SqliteAlterTableAlterColumnDropGeneratedConvertor, SqliteAlterTableAlterColumnSetExpressionConvertor, SqliteAlterTableAlterColumnAlterGeneratedConvertor, MySqlAlterTableAlterColumnAlterrGeneratedConvertor, MySqlAlterTableAddPk, MySqlAlterTableDropPk, LibSQLModifyColumn, MySqlModifyColumn, PgAlterTableCreateCompositePrimaryKeyConvertor, PgAlterTableDeleteCompositePrimaryKeyConvertor, PgAlterTableAlterCompositePrimaryKeyConvertor, MySqlAlterTableCreateCompositePrimaryKeyConvertor, MySqlAlterTableDeleteCompositePrimaryKeyConvertor, MySqlAlterTableAlterCompositePrimaryKeyConvertor, PgAlterTableAlterColumnSetPrimaryKeyConvertor, PgAlterTableAlterColumnDropPrimaryKeyConvertor, PgAlterTableAlterColumnSetNotNullConvertor, PgAlterTableAlterColumnDropNotNullConvertor, PgCreateForeignKeyConvertor, LibSQLCreateForeignKeyConvertor, MySqlCreateForeignKeyConvertor, PgAlterForeignKeyConvertor, PgDeleteForeignKeyConvertor, MySqlDeleteForeignKeyConvertor, CreatePgIndexConvertor, CreateMySqlIndexConvertor, CreateSqliteIndexConvertor, PgDropIndexConvertor, PgCreateSchemaConvertor, PgRenameSchemaConvertor, PgDropSchemaConvertor, PgAlterTableSetSchemaConvertor, PgAlterTableSetNewSchemaConvertor, PgAlterTableRemoveFromSchemaConvertor, SqliteDropIndexConvertor, MySqlDropIndexConvertor, SQLiteRecreateTableConvertor, LibSQLRecreateTableConvertor, convertors;
23108
+ var pgNativeTypes, isPgNativeType, Convertor, PgCreateTableConvertor, MySqlCreateTableConvertor, SQLiteCreateTableConvertor, PgAlterTableAlterColumnSetGenerated, PgAlterTableAlterColumnDropGenerated, PgAlterTableAlterColumnAlterGenerated, PgAlterTableAddUniqueConstraintConvertor, PgAlterTableDropUniqueConstraintConvertor, MySQLAlterTableAddUniqueConstraintConvertor, MySQLAlterTableDropUniqueConstraintConvertor, CreatePgSequenceConvertor, DropPgSequenceConvertor, RenamePgSequenceConvertor, MovePgSequenceConvertor, AlterPgSequenceConvertor, CreateTypeEnumConvertor, AlterTypeAddValueConvertor, PgDropTableConvertor, MySQLDropTableConvertor, SQLiteDropTableConvertor, PgRenameTableConvertor, SqliteRenameTableConvertor, MySqlRenameTableConvertor, PgAlterTableRenameColumnConvertor, MySqlAlterTableRenameColumnConvertor, SQLiteAlterTableRenameColumnConvertor, PgAlterTableDropColumnConvertor, MySqlAlterTableDropColumnConvertor, SQLiteAlterTableDropColumnConvertor, PgAlterTableAddColumnConvertor, MySqlAlterTableAddColumnConvertor, SQLiteAlterTableAddColumnConvertor, PgAlterTableAlterColumnSetTypeConvertor, PgAlterTableAlterColumnSetDefaultConvertor, PgAlterTableAlterColumnDropDefaultConvertor, PgAlterTableAlterColumnDropGeneratedConvertor, PgAlterTableAlterColumnSetExpressionConvertor, PgAlterTableAlterColumnAlterrGeneratedConvertor, SqliteAlterTableAlterColumnDropGeneratedConvertor, SqliteAlterTableAlterColumnSetExpressionConvertor, SqliteAlterTableAlterColumnAlterGeneratedConvertor, MySqlAlterTableAlterColumnAlterrGeneratedConvertor, MySqlAlterTableAddPk, MySqlAlterTableDropPk, LibSQLModifyColumn, MySqlModifyColumn, PgAlterTableCreateCompositePrimaryKeyConvertor, PgAlterTableDeleteCompositePrimaryKeyConvertor, PgAlterTableAlterCompositePrimaryKeyConvertor, MySqlAlterTableCreateCompositePrimaryKeyConvertor, MySqlAlterTableDeleteCompositePrimaryKeyConvertor, MySqlAlterTableAlterCompositePrimaryKeyConvertor, PgAlterTableAlterColumnSetPrimaryKeyConvertor, PgAlterTableAlterColumnDropPrimaryKeyConvertor, PgAlterTableAlterColumnSetNotNullConvertor, PgAlterTableAlterColumnDropNotNullConvertor, PgCreateForeignKeyConvertor, LibSQLCreateForeignKeyConvertor, MySqlCreateForeignKeyConvertor, PgAlterForeignKeyConvertor, PgDeleteForeignKeyConvertor, MySqlDeleteForeignKeyConvertor, CreatePgIndexConvertor, CreateMySqlIndexConvertor, CreateSqliteIndexConvertor, PgDropIndexConvertor, PgCreateSchemaConvertor, PgRenameSchemaConvertor, PgDropSchemaConvertor, PgAlterTableSetSchemaConvertor, PgAlterTableSetNewSchemaConvertor, PgAlterTableRemoveFromSchemaConvertor, SqliteDropIndexConvertor, MySqlDropIndexConvertor, SQLiteRecreateTableConvertor, LibSQLRecreateTableConvertor, convertors;
23647
23109
  var init_sqlgenerator = __esm({
23648
23110
  "src/sqlgenerator.ts"() {
23649
23111
  "use strict";
@@ -23701,8 +23163,8 @@ var init_sqlgenerator = __esm({
23701
23163
  Convertor = class {
23702
23164
  };
23703
23165
  PgCreateTableConvertor = class extends Convertor {
23704
- can(statement, dialect7) {
23705
- return statement.type === "create_table" && dialect7 === "postgresql";
23166
+ can(statement, dialect4) {
23167
+ return statement.type === "create_table" && dialect4 === "postgresql";
23706
23168
  }
23707
23169
  convert(st) {
23708
23170
  const { tableName, schema: schema5, columns, compositePKs, uniqueConstraints } = st;
@@ -23746,8 +23208,8 @@ var init_sqlgenerator = __esm({
23746
23208
  }
23747
23209
  };
23748
23210
  MySqlCreateTableConvertor = class extends Convertor {
23749
- can(statement, dialect7) {
23750
- return statement.type === "create_table" && dialect7 === "mysql";
23211
+ can(statement, dialect4) {
23212
+ return statement.type === "create_table" && dialect4 === "mysql";
23751
23213
  }
23752
23214
  convert(st) {
23753
23215
  var _a, _b;
@@ -23797,8 +23259,8 @@ var init_sqlgenerator = __esm({
23797
23259
  }
23798
23260
  };
23799
23261
  SQLiteCreateTableConvertor = class extends Convertor {
23800
- can(statement, dialect7) {
23801
- return statement.type === "sqlite_create_table" && (dialect7 === "sqlite" || dialect7 === "turso");
23262
+ can(statement, dialect4) {
23263
+ return statement.type === "sqlite_create_table" && (dialect4 === "sqlite" || dialect4 === "turso");
23802
23264
  }
23803
23265
  convert(st) {
23804
23266
  const {
@@ -23859,121 +23321,9 @@ var init_sqlgenerator = __esm({
23859
23321
  return statement;
23860
23322
  }
23861
23323
  };
23862
- PgCreateViewConvertor = class extends Convertor {
23863
- can(statement, dialect7) {
23864
- return statement.type === "create_view" && dialect7 === "postgresql";
23865
- }
23866
- convert(st) {
23867
- const { definition, name: viewName, schema: schema5, with: withOption, materialized, withNoData, tablespace, using } = st;
23868
- const name = schema5 ? `"${schema5}"."${viewName}"` : `"${viewName}"`;
23869
- let statement = materialized ? `CREATE MATERIALIZED VIEW ${name}` : `CREATE VIEW ${name}`;
23870
- if (using)
23871
- statement += ` USING "${using}"`;
23872
- const options = [];
23873
- if (withOption) {
23874
- statement += ` WITH (`;
23875
- Object.entries(withOption).forEach(([key, value]) => {
23876
- if (typeof value === "undefined")
23877
- return;
23878
- options.push(`${key.snake_case()} = ${value}`);
23879
- });
23880
- statement += options.join(", ");
23881
- statement += `)`;
23882
- }
23883
- if (tablespace)
23884
- statement += ` TABLESPACE ${tablespace}`;
23885
- statement += ` AS (${definition})`;
23886
- if (withNoData)
23887
- statement += ` WITH NO DATA`;
23888
- statement += `;`;
23889
- return statement;
23890
- }
23891
- };
23892
- PgDropViewConvertor = class extends Convertor {
23893
- can(statement, dialect7) {
23894
- return statement.type === "drop_view" && dialect7 === "postgresql";
23895
- }
23896
- convert(st) {
23897
- const { name: viewName, schema: schema5, materialized } = st;
23898
- const name = schema5 ? `"${schema5}"."${viewName}"` : `"${viewName}"`;
23899
- return `DROP${materialized ? " MATERIALIZED" : ""} VIEW ${name};`;
23900
- }
23901
- };
23902
- PgRenameViewConvertor = class extends Convertor {
23903
- can(statement, dialect7) {
23904
- return statement.type === "rename_view" && dialect7 === "postgresql";
23905
- }
23906
- convert(st) {
23907
- const { nameFrom: from, nameTo: to, schema: schema5, materialized } = st;
23908
- const nameFrom = `"${schema5}"."${from}"`;
23909
- return `ALTER${materialized ? " MATERIALIZED" : ""} VIEW ${nameFrom} RENAME TO "${to}";`;
23910
- }
23911
- };
23912
- PgAlterViewSchemaConvertor = class extends Convertor {
23913
- can(statement, dialect7) {
23914
- return statement.type === "alter_view_alter_schema" && dialect7 === "postgresql";
23915
- }
23916
- convert(st) {
23917
- const { fromSchema, toSchema, name, materialized } = st;
23918
- const statement = `ALTER${materialized ? " MATERIALIZED" : ""} VIEW "${fromSchema}"."${name}" SET SCHEMA "${toSchema}";`;
23919
- return statement;
23920
- }
23921
- };
23922
- PgAlterViewAddWithOptionConvertor = class extends Convertor {
23923
- can(statement, dialect7) {
23924
- return statement.type === "alter_view_add_with_option" && dialect7 === "postgresql";
23925
- }
23926
- convert(st) {
23927
- const { schema: schema5, with: withOption, name, materialized } = st;
23928
- let statement = `ALTER${materialized ? " MATERIALIZED" : ""} VIEW "${schema5}"."${name}" SET (`;
23929
- const options = [];
23930
- Object.entries(withOption).forEach(([key, value]) => {
23931
- options.push(`${key.snake_case()} = ${value}`);
23932
- });
23933
- statement += options.join(", ");
23934
- statement += `);`;
23935
- return statement;
23936
- }
23937
- };
23938
- PgAlterViewDropWithOptionConvertor = class extends Convertor {
23939
- can(statement, dialect7) {
23940
- return statement.type === "alter_view_drop_with_option" && dialect7 === "postgresql";
23941
- }
23942
- convert(st) {
23943
- const { schema: schema5, name, materialized, with: withOptions } = st;
23944
- let statement = `ALTER${materialized ? " MATERIALIZED" : ""} VIEW "${schema5}"."${name}" RESET (`;
23945
- const options = [];
23946
- Object.entries(withOptions).forEach(([key, value]) => {
23947
- options.push(`${key.snake_case()}`);
23948
- });
23949
- statement += options.join(", ");
23950
- statement += ");";
23951
- return statement;
23952
- }
23953
- };
23954
- PgAlterViewAlterTablespaceConvertor = class extends Convertor {
23955
- can(statement, dialect7) {
23956
- return statement.type === "alter_view_alter_tablespace" && dialect7 === "postgresql";
23957
- }
23958
- convert(st) {
23959
- const { schema: schema5, name, toTablespace } = st;
23960
- const statement = `ALTER MATERIALIZED VIEW "${schema5}"."${name}" SET TABLESPACE ${toTablespace};`;
23961
- return statement;
23962
- }
23963
- };
23964
- PgAlterViewAlterUsingConvertor = class extends Convertor {
23965
- can(statement, dialect7) {
23966
- return statement.type === "alter_view_alter_using" && dialect7 === "postgresql";
23967
- }
23968
- convert(st) {
23969
- const { schema: schema5, name, toUsing } = st;
23970
- const statement = `ALTER MATERIALIZED VIEW "${schema5}"."${name}" SET ACCESS METHOD "${toUsing}";`;
23971
- return statement;
23972
- }
23973
- };
23974
23324
  PgAlterTableAlterColumnSetGenerated = class extends Convertor {
23975
- can(statement, dialect7) {
23976
- return statement.type === "alter_table_alter_column_set_identity" && dialect7 === "postgresql";
23325
+ can(statement, dialect4) {
23326
+ return statement.type === "alter_table_alter_column_set_identity" && dialect4 === "postgresql";
23977
23327
  }
23978
23328
  convert(statement) {
23979
23329
  const { identity, tableName, columnName, schema: schema5 } = statement;
@@ -23985,8 +23335,8 @@ var init_sqlgenerator = __esm({
23985
23335
  }
23986
23336
  };
23987
23337
  PgAlterTableAlterColumnDropGenerated = class extends Convertor {
23988
- can(statement, dialect7) {
23989
- return statement.type === "alter_table_alter_column_drop_identity" && dialect7 === "postgresql";
23338
+ can(statement, dialect4) {
23339
+ return statement.type === "alter_table_alter_column_drop_identity" && dialect4 === "postgresql";
23990
23340
  }
23991
23341
  convert(statement) {
23992
23342
  const { tableName, columnName, schema: schema5 } = statement;
@@ -23995,8 +23345,8 @@ var init_sqlgenerator = __esm({
23995
23345
  }
23996
23346
  };
23997
23347
  PgAlterTableAlterColumnAlterGenerated = class extends Convertor {
23998
- can(statement, dialect7) {
23999
- return statement.type === "alter_table_alter_column_change_identity" && dialect7 === "postgresql";
23348
+ can(statement, dialect4) {
23349
+ return statement.type === "alter_table_alter_column_change_identity" && dialect4 === "postgresql";
24000
23350
  }
24001
23351
  convert(statement) {
24002
23352
  const { identity, oldIdentity, tableName, columnName, schema: schema5 } = statement;
@@ -24043,8 +23393,8 @@ var init_sqlgenerator = __esm({
24043
23393
  }
24044
23394
  };
24045
23395
  PgAlterTableAddUniqueConstraintConvertor = class extends Convertor {
24046
- can(statement, dialect7) {
24047
- return statement.type === "create_unique_constraint" && dialect7 === "postgresql";
23396
+ can(statement, dialect4) {
23397
+ return statement.type === "create_unique_constraint" && dialect4 === "postgresql";
24048
23398
  }
24049
23399
  convert(statement) {
24050
23400
  const unsquashed = PgSquasher.unsquashUnique(statement.data);
@@ -24053,8 +23403,8 @@ var init_sqlgenerator = __esm({
24053
23403
  }
24054
23404
  };
24055
23405
  PgAlterTableDropUniqueConstraintConvertor = class extends Convertor {
24056
- can(statement, dialect7) {
24057
- return statement.type === "delete_unique_constraint" && dialect7 === "postgresql";
23406
+ can(statement, dialect4) {
23407
+ return statement.type === "delete_unique_constraint" && dialect4 === "postgresql";
24058
23408
  }
24059
23409
  convert(statement) {
24060
23410
  const unsquashed = PgSquasher.unsquashUnique(statement.data);
@@ -24063,8 +23413,8 @@ var init_sqlgenerator = __esm({
24063
23413
  }
24064
23414
  };
24065
23415
  MySQLAlterTableAddUniqueConstraintConvertor = class extends Convertor {
24066
- can(statement, dialect7) {
24067
- return statement.type === "create_unique_constraint" && dialect7 === "mysql";
23416
+ can(statement, dialect4) {
23417
+ return statement.type === "create_unique_constraint" && dialect4 === "mysql";
24068
23418
  }
24069
23419
  convert(statement) {
24070
23420
  const unsquashed = MySqlSquasher.unsquashUnique(statement.data);
@@ -24072,8 +23422,8 @@ var init_sqlgenerator = __esm({
24072
23422
  }
24073
23423
  };
24074
23424
  MySQLAlterTableDropUniqueConstraintConvertor = class extends Convertor {
24075
- can(statement, dialect7) {
24076
- return statement.type === "delete_unique_constraint" && dialect7 === "mysql";
23425
+ can(statement, dialect4) {
23426
+ return statement.type === "delete_unique_constraint" && dialect4 === "mysql";
24077
23427
  }
24078
23428
  convert(statement) {
24079
23429
  const unsquashed = MySqlSquasher.unsquashUnique(statement.data);
@@ -24081,8 +23431,8 @@ var init_sqlgenerator = __esm({
24081
23431
  }
24082
23432
  };
24083
23433
  CreatePgSequenceConvertor = class extends Convertor {
24084
- can(statement, dialect7) {
24085
- return statement.type === "create_sequence" && dialect7 === "postgresql";
23434
+ can(statement, dialect4) {
23435
+ return statement.type === "create_sequence" && dialect4 === "postgresql";
24086
23436
  }
24087
23437
  convert(st) {
24088
23438
  const { name, values, schema: schema5 } = st;
@@ -24091,8 +23441,8 @@ var init_sqlgenerator = __esm({
24091
23441
  }
24092
23442
  };
24093
23443
  DropPgSequenceConvertor = class extends Convertor {
24094
- can(statement, dialect7) {
24095
- return statement.type === "drop_sequence" && dialect7 === "postgresql";
23444
+ can(statement, dialect4) {
23445
+ return statement.type === "drop_sequence" && dialect4 === "postgresql";
24096
23446
  }
24097
23447
  convert(st) {
24098
23448
  const { name, schema: schema5 } = st;
@@ -24101,8 +23451,8 @@ var init_sqlgenerator = __esm({
24101
23451
  }
24102
23452
  };
24103
23453
  RenamePgSequenceConvertor = class extends Convertor {
24104
- can(statement, dialect7) {
24105
- return statement.type === "rename_sequence" && dialect7 === "postgresql";
23454
+ can(statement, dialect4) {
23455
+ return statement.type === "rename_sequence" && dialect4 === "postgresql";
24106
23456
  }
24107
23457
  convert(st) {
24108
23458
  const { nameFrom, nameTo, schema: schema5 } = st;
@@ -24112,8 +23462,8 @@ var init_sqlgenerator = __esm({
24112
23462
  }
24113
23463
  };
24114
23464
  MovePgSequenceConvertor = class extends Convertor {
24115
- can(statement, dialect7) {
24116
- return statement.type === "move_sequence" && dialect7 === "postgresql";
23465
+ can(statement, dialect4) {
23466
+ return statement.type === "move_sequence" && dialect4 === "postgresql";
24117
23467
  }
24118
23468
  convert(st) {
24119
23469
  const { schemaFrom, schemaTo, name } = st;
@@ -24123,8 +23473,8 @@ var init_sqlgenerator = __esm({
24123
23473
  }
24124
23474
  };
24125
23475
  AlterPgSequenceConvertor = class extends Convertor {
24126
- can(statement, dialect7) {
24127
- return statement.type === "alter_sequence" && dialect7 === "postgresql";
23476
+ can(statement, dialect4) {
23477
+ return statement.type === "alter_sequence" && dialect4 === "postgresql";
24128
23478
  }
24129
23479
  convert(st) {
24130
23480
  const { name, schema: schema5, values } = st;
@@ -24167,8 +23517,8 @@ var init_sqlgenerator = __esm({
24167
23517
  }
24168
23518
  };
24169
23519
  PgDropTableConvertor = class extends Convertor {
24170
- can(statement, dialect7) {
24171
- return statement.type === "drop_table" && dialect7 === "postgresql";
23520
+ can(statement, dialect4) {
23521
+ return statement.type === "drop_table" && dialect4 === "postgresql";
24172
23522
  }
24173
23523
  convert(statement) {
24174
23524
  const { tableName, schema: schema5 } = statement;
@@ -24177,8 +23527,8 @@ var init_sqlgenerator = __esm({
24177
23527
  }
24178
23528
  };
24179
23529
  MySQLDropTableConvertor = class extends Convertor {
24180
- can(statement, dialect7) {
24181
- return statement.type === "drop_table" && dialect7 === "mysql";
23530
+ can(statement, dialect4) {
23531
+ return statement.type === "drop_table" && dialect4 === "mysql";
24182
23532
  }
24183
23533
  convert(statement) {
24184
23534
  const { tableName } = statement;
@@ -24186,8 +23536,8 @@ var init_sqlgenerator = __esm({
24186
23536
  }
24187
23537
  };
24188
23538
  SQLiteDropTableConvertor = class extends Convertor {
24189
- can(statement, dialect7) {
24190
- return statement.type === "drop_table" && (dialect7 === "sqlite" || dialect7 === "turso");
23539
+ can(statement, dialect4) {
23540
+ return statement.type === "drop_table" && (dialect4 === "sqlite" || dialect4 === "turso");
24191
23541
  }
24192
23542
  convert(statement) {
24193
23543
  const { tableName } = statement;
@@ -24195,8 +23545,8 @@ var init_sqlgenerator = __esm({
24195
23545
  }
24196
23546
  };
24197
23547
  PgRenameTableConvertor = class extends Convertor {
24198
- can(statement, dialect7) {
24199
- return statement.type === "rename_table" && dialect7 === "postgresql";
23548
+ can(statement, dialect4) {
23549
+ return statement.type === "rename_table" && dialect4 === "postgresql";
24200
23550
  }
24201
23551
  convert(statement) {
24202
23552
  const { tableNameFrom, tableNameTo, toSchema, fromSchema } = statement;
@@ -24206,8 +23556,8 @@ var init_sqlgenerator = __esm({
24206
23556
  }
24207
23557
  };
24208
23558
  SqliteRenameTableConvertor = class extends Convertor {
24209
- can(statement, dialect7) {
24210
- return statement.type === "rename_table" && (dialect7 === "sqlite" || dialect7 === "turso");
23559
+ can(statement, dialect4) {
23560
+ return statement.type === "rename_table" && (dialect4 === "sqlite" || dialect4 === "turso");
24211
23561
  }
24212
23562
  convert(statement) {
24213
23563
  const { tableNameFrom, tableNameTo } = statement;
@@ -24215,8 +23565,8 @@ var init_sqlgenerator = __esm({
24215
23565
  }
24216
23566
  };
24217
23567
  MySqlRenameTableConvertor = class extends Convertor {
24218
- can(statement, dialect7) {
24219
- return statement.type === "rename_table" && dialect7 === "mysql";
23568
+ can(statement, dialect4) {
23569
+ return statement.type === "rename_table" && dialect4 === "mysql";
24220
23570
  }
24221
23571
  convert(statement) {
24222
23572
  const { tableNameFrom, tableNameTo } = statement;
@@ -24224,8 +23574,8 @@ var init_sqlgenerator = __esm({
24224
23574
  }
24225
23575
  };
24226
23576
  PgAlterTableRenameColumnConvertor = class extends Convertor {
24227
- can(statement, dialect7) {
24228
- return statement.type === "alter_table_rename_column" && dialect7 === "postgresql";
23577
+ can(statement, dialect4) {
23578
+ return statement.type === "alter_table_rename_column" && dialect4 === "postgresql";
24229
23579
  }
24230
23580
  convert(statement) {
24231
23581
  const { tableName, oldColumnName, newColumnName, schema: schema5 } = statement;
@@ -24234,8 +23584,8 @@ var init_sqlgenerator = __esm({
24234
23584
  }
24235
23585
  };
24236
23586
  MySqlAlterTableRenameColumnConvertor = class extends Convertor {
24237
- can(statement, dialect7) {
24238
- return statement.type === "alter_table_rename_column" && dialect7 === "mysql";
23587
+ can(statement, dialect4) {
23588
+ return statement.type === "alter_table_rename_column" && dialect4 === "mysql";
24239
23589
  }
24240
23590
  convert(statement) {
24241
23591
  const { tableName, oldColumnName, newColumnName } = statement;
@@ -24243,8 +23593,8 @@ var init_sqlgenerator = __esm({
24243
23593
  }
24244
23594
  };
24245
23595
  SQLiteAlterTableRenameColumnConvertor = class extends Convertor {
24246
- can(statement, dialect7) {
24247
- return statement.type === "alter_table_rename_column" && (dialect7 === "sqlite" || dialect7 === "turso");
23596
+ can(statement, dialect4) {
23597
+ return statement.type === "alter_table_rename_column" && (dialect4 === "sqlite" || dialect4 === "turso");
24248
23598
  }
24249
23599
  convert(statement) {
24250
23600
  const { tableName, oldColumnName, newColumnName } = statement;
@@ -24252,8 +23602,8 @@ var init_sqlgenerator = __esm({
24252
23602
  }
24253
23603
  };
24254
23604
  PgAlterTableDropColumnConvertor = class extends Convertor {
24255
- can(statement, dialect7) {
24256
- return statement.type === "alter_table_drop_column" && dialect7 === "postgresql";
23605
+ can(statement, dialect4) {
23606
+ return statement.type === "alter_table_drop_column" && dialect4 === "postgresql";
24257
23607
  }
24258
23608
  convert(statement) {
24259
23609
  const { tableName, columnName, schema: schema5 } = statement;
@@ -24262,8 +23612,8 @@ var init_sqlgenerator = __esm({
24262
23612
  }
24263
23613
  };
24264
23614
  MySqlAlterTableDropColumnConvertor = class extends Convertor {
24265
- can(statement, dialect7) {
24266
- return statement.type === "alter_table_drop_column" && dialect7 === "mysql";
23615
+ can(statement, dialect4) {
23616
+ return statement.type === "alter_table_drop_column" && dialect4 === "mysql";
24267
23617
  }
24268
23618
  convert(statement) {
24269
23619
  const { tableName, columnName } = statement;
@@ -24271,8 +23621,8 @@ var init_sqlgenerator = __esm({
24271
23621
  }
24272
23622
  };
24273
23623
  SQLiteAlterTableDropColumnConvertor = class extends Convertor {
24274
- can(statement, dialect7) {
24275
- return statement.type === "alter_table_drop_column" && (dialect7 === "sqlite" || dialect7 === "turso");
23624
+ can(statement, dialect4) {
23625
+ return statement.type === "alter_table_drop_column" && (dialect4 === "sqlite" || dialect4 === "turso");
24276
23626
  }
24277
23627
  convert(statement) {
24278
23628
  const { tableName, columnName } = statement;
@@ -24280,8 +23630,8 @@ var init_sqlgenerator = __esm({
24280
23630
  }
24281
23631
  };
24282
23632
  PgAlterTableAddColumnConvertor = class extends Convertor {
24283
- can(statement, dialect7) {
24284
- return statement.type === "alter_table_add_column" && dialect7 === "postgresql";
23633
+ can(statement, dialect4) {
23634
+ return statement.type === "alter_table_add_column" && dialect4 === "postgresql";
24285
23635
  }
24286
23636
  convert(statement) {
24287
23637
  const { tableName, column: column7, schema: schema5 } = statement;
@@ -24300,8 +23650,8 @@ var init_sqlgenerator = __esm({
24300
23650
  }
24301
23651
  };
24302
23652
  MySqlAlterTableAddColumnConvertor = class extends Convertor {
24303
- can(statement, dialect7) {
24304
- return statement.type === "alter_table_add_column" && dialect7 === "mysql";
23653
+ can(statement, dialect4) {
23654
+ return statement.type === "alter_table_add_column" && dialect4 === "mysql";
24305
23655
  }
24306
23656
  convert(statement) {
24307
23657
  const { tableName, column: column7 } = statement;
@@ -24324,8 +23674,8 @@ var init_sqlgenerator = __esm({
24324
23674
  }
24325
23675
  };
24326
23676
  SQLiteAlterTableAddColumnConvertor = class extends Convertor {
24327
- can(statement, dialect7) {
24328
- return statement.type === "sqlite_alter_table_add_column" && (dialect7 === "sqlite" || dialect7 === "turso");
23677
+ can(statement, dialect4) {
23678
+ return statement.type === "sqlite_alter_table_add_column" && (dialect4 === "sqlite" || dialect4 === "turso");
24329
23679
  }
24330
23680
  convert(statement) {
24331
23681
  const { tableName, column: column7, referenceData } = statement;
@@ -24340,8 +23690,8 @@ var init_sqlgenerator = __esm({
24340
23690
  }
24341
23691
  };
24342
23692
  PgAlterTableAlterColumnSetTypeConvertor = class extends Convertor {
24343
- can(statement, dialect7) {
24344
- return statement.type === "alter_table_alter_column_set_type" && dialect7 === "postgresql";
23693
+ can(statement, dialect4) {
23694
+ return statement.type === "alter_table_alter_column_set_type" && dialect4 === "postgresql";
24345
23695
  }
24346
23696
  convert(statement) {
24347
23697
  const { tableName, columnName, newDataType, schema: schema5 } = statement;
@@ -24350,8 +23700,8 @@ var init_sqlgenerator = __esm({
24350
23700
  }
24351
23701
  };
24352
23702
  PgAlterTableAlterColumnSetDefaultConvertor = class extends Convertor {
24353
- can(statement, dialect7) {
24354
- return statement.type === "alter_table_alter_column_set_default" && dialect7 === "postgresql";
23703
+ can(statement, dialect4) {
23704
+ return statement.type === "alter_table_alter_column_set_default" && dialect4 === "postgresql";
24355
23705
  }
24356
23706
  convert(statement) {
24357
23707
  const { tableName, columnName, schema: schema5 } = statement;
@@ -24360,8 +23710,8 @@ var init_sqlgenerator = __esm({
24360
23710
  }
24361
23711
  };
24362
23712
  PgAlterTableAlterColumnDropDefaultConvertor = class extends Convertor {
24363
- can(statement, dialect7) {
24364
- return statement.type === "alter_table_alter_column_drop_default" && dialect7 === "postgresql";
23713
+ can(statement, dialect4) {
23714
+ return statement.type === "alter_table_alter_column_drop_default" && dialect4 === "postgresql";
24365
23715
  }
24366
23716
  convert(statement) {
24367
23717
  const { tableName, columnName, schema: schema5 } = statement;
@@ -24370,8 +23720,8 @@ var init_sqlgenerator = __esm({
24370
23720
  }
24371
23721
  };
24372
23722
  PgAlterTableAlterColumnDropGeneratedConvertor = class extends Convertor {
24373
- can(statement, dialect7) {
24374
- return statement.type === "alter_table_alter_column_drop_generated" && dialect7 === "postgresql";
23723
+ can(statement, dialect4) {
23724
+ return statement.type === "alter_table_alter_column_drop_generated" && dialect4 === "postgresql";
24375
23725
  }
24376
23726
  convert(statement) {
24377
23727
  const { tableName, columnName, schema: schema5 } = statement;
@@ -24380,8 +23730,8 @@ var init_sqlgenerator = __esm({
24380
23730
  }
24381
23731
  };
24382
23732
  PgAlterTableAlterColumnSetExpressionConvertor = class extends Convertor {
24383
- can(statement, dialect7) {
24384
- return statement.type === "alter_table_alter_column_set_generated" && dialect7 === "postgresql";
23733
+ can(statement, dialect4) {
23734
+ return statement.type === "alter_table_alter_column_set_generated" && dialect4 === "postgresql";
24385
23735
  }
24386
23736
  convert(statement) {
24387
23737
  const {
@@ -24418,8 +23768,8 @@ var init_sqlgenerator = __esm({
24418
23768
  }
24419
23769
  };
24420
23770
  PgAlterTableAlterColumnAlterrGeneratedConvertor = class extends Convertor {
24421
- can(statement, dialect7) {
24422
- return statement.type === "alter_table_alter_column_alter_generated" && dialect7 === "postgresql";
23771
+ can(statement, dialect4) {
23772
+ return statement.type === "alter_table_alter_column_alter_generated" && dialect4 === "postgresql";
24423
23773
  }
24424
23774
  convert(statement) {
24425
23775
  const {
@@ -24456,8 +23806,8 @@ var init_sqlgenerator = __esm({
24456
23806
  }
24457
23807
  };
24458
23808
  SqliteAlterTableAlterColumnDropGeneratedConvertor = class extends Convertor {
24459
- can(statement, dialect7) {
24460
- return statement.type === "alter_table_alter_column_drop_generated" && (dialect7 === "sqlite" || dialect7 === "turso");
23809
+ can(statement, dialect4) {
23810
+ return statement.type === "alter_table_alter_column_drop_generated" && (dialect4 === "sqlite" || dialect4 === "turso");
24461
23811
  }
24462
23812
  convert(statement) {
24463
23813
  const {
@@ -24497,8 +23847,8 @@ var init_sqlgenerator = __esm({
24497
23847
  }
24498
23848
  };
24499
23849
  SqliteAlterTableAlterColumnSetExpressionConvertor = class extends Convertor {
24500
- can(statement, dialect7) {
24501
- return statement.type === "alter_table_alter_column_set_generated" && (dialect7 === "sqlite" || dialect7 === "turso");
23850
+ can(statement, dialect4) {
23851
+ return statement.type === "alter_table_alter_column_set_generated" && (dialect4 === "sqlite" || dialect4 === "turso");
24502
23852
  }
24503
23853
  convert(statement) {
24504
23854
  const {
@@ -24538,8 +23888,8 @@ var init_sqlgenerator = __esm({
24538
23888
  }
24539
23889
  };
24540
23890
  SqliteAlterTableAlterColumnAlterGeneratedConvertor = class extends Convertor {
24541
- can(statement, dialect7) {
24542
- return statement.type === "alter_table_alter_column_alter_generated" && (dialect7 === "sqlite" || dialect7 === "turso");
23891
+ can(statement, dialect4) {
23892
+ return statement.type === "alter_table_alter_column_alter_generated" && (dialect4 === "sqlite" || dialect4 === "turso");
24543
23893
  }
24544
23894
  convert(statement) {
24545
23895
  const {
@@ -24579,8 +23929,8 @@ var init_sqlgenerator = __esm({
24579
23929
  }
24580
23930
  };
24581
23931
  MySqlAlterTableAlterColumnAlterrGeneratedConvertor = class extends Convertor {
24582
- can(statement, dialect7) {
24583
- return statement.type === "alter_table_alter_column_alter_generated" && dialect7 === "mysql";
23932
+ can(statement, dialect4) {
23933
+ return statement.type === "alter_table_alter_column_alter_generated" && dialect4 === "mysql";
24584
23934
  }
24585
23935
  convert(statement) {
24586
23936
  const {
@@ -24617,24 +23967,24 @@ var init_sqlgenerator = __esm({
24617
23967
  }
24618
23968
  };
24619
23969
  MySqlAlterTableAddPk = class extends Convertor {
24620
- can(statement, dialect7) {
24621
- return statement.type === "alter_table_alter_column_set_pk" && dialect7 === "mysql";
23970
+ can(statement, dialect4) {
23971
+ return statement.type === "alter_table_alter_column_set_pk" && dialect4 === "mysql";
24622
23972
  }
24623
23973
  convert(statement) {
24624
23974
  return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY (\`${statement.columnName}\`);`;
24625
23975
  }
24626
23976
  };
24627
23977
  MySqlAlterTableDropPk = class extends Convertor {
24628
- can(statement, dialect7) {
24629
- return statement.type === "alter_table_alter_column_drop_pk" && dialect7 === "mysql";
23978
+ can(statement, dialect4) {
23979
+ return statement.type === "alter_table_alter_column_drop_pk" && dialect4 === "mysql";
24630
23980
  }
24631
23981
  convert(statement) {
24632
23982
  return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY`;
24633
23983
  }
24634
23984
  };
24635
23985
  LibSQLModifyColumn = class extends Convertor {
24636
- can(statement, dialect7) {
24637
- return (statement.type === "alter_table_alter_column_set_type" || statement.type === "alter_table_alter_column_drop_notnull" || statement.type === "alter_table_alter_column_set_notnull" || statement.type === "alter_table_alter_column_set_default" || statement.type === "alter_table_alter_column_drop_default") && dialect7 === "turso";
23986
+ can(statement, dialect4) {
23987
+ return (statement.type === "alter_table_alter_column_set_type" || statement.type === "alter_table_alter_column_drop_notnull" || statement.type === "alter_table_alter_column_set_notnull" || statement.type === "alter_table_alter_column_set_default" || statement.type === "alter_table_alter_column_drop_default") && dialect4 === "turso";
24638
23988
  }
24639
23989
  convert(statement, json2) {
24640
23990
  const { tableName, columnName } = statement;
@@ -24694,8 +24044,8 @@ var init_sqlgenerator = __esm({
24694
24044
  }
24695
24045
  };
24696
24046
  MySqlModifyColumn = class extends Convertor {
24697
- can(statement, dialect7) {
24698
- return (statement.type === "alter_table_alter_column_set_type" || statement.type === "alter_table_alter_column_set_notnull" || statement.type === "alter_table_alter_column_drop_notnull" || statement.type === "alter_table_alter_column_drop_on_update" || statement.type === "alter_table_alter_column_set_on_update" || statement.type === "alter_table_alter_column_set_autoincrement" || statement.type === "alter_table_alter_column_drop_autoincrement" || statement.type === "alter_table_alter_column_set_default" || statement.type === "alter_table_alter_column_drop_default" || statement.type === "alter_table_alter_column_set_generated" || statement.type === "alter_table_alter_column_drop_generated") && dialect7 === "mysql";
24047
+ can(statement, dialect4) {
24048
+ return (statement.type === "alter_table_alter_column_set_type" || statement.type === "alter_table_alter_column_set_notnull" || statement.type === "alter_table_alter_column_drop_notnull" || statement.type === "alter_table_alter_column_drop_on_update" || statement.type === "alter_table_alter_column_set_on_update" || statement.type === "alter_table_alter_column_set_autoincrement" || statement.type === "alter_table_alter_column_drop_autoincrement" || statement.type === "alter_table_alter_column_set_default" || statement.type === "alter_table_alter_column_drop_default" || statement.type === "alter_table_alter_column_set_generated" || statement.type === "alter_table_alter_column_drop_generated") && dialect4 === "mysql";
24699
24049
  }
24700
24050
  convert(statement) {
24701
24051
  var _a, _b, _c, _d, _e, _f, _g;
@@ -24832,8 +24182,8 @@ var init_sqlgenerator = __esm({
24832
24182
  }
24833
24183
  };
24834
24184
  PgAlterTableCreateCompositePrimaryKeyConvertor = class extends Convertor {
24835
- can(statement, dialect7) {
24836
- return statement.type === "create_composite_pk" && dialect7 === "postgresql";
24185
+ can(statement, dialect4) {
24186
+ return statement.type === "create_composite_pk" && dialect4 === "postgresql";
24837
24187
  }
24838
24188
  convert(statement) {
24839
24189
  const { name, columns } = PgSquasher.unsquashPK(statement.data);
@@ -24842,8 +24192,8 @@ var init_sqlgenerator = __esm({
24842
24192
  }
24843
24193
  };
24844
24194
  PgAlterTableDeleteCompositePrimaryKeyConvertor = class extends Convertor {
24845
- can(statement, dialect7) {
24846
- return statement.type === "delete_composite_pk" && dialect7 === "postgresql";
24195
+ can(statement, dialect4) {
24196
+ return statement.type === "delete_composite_pk" && dialect4 === "postgresql";
24847
24197
  }
24848
24198
  convert(statement) {
24849
24199
  const { name, columns } = PgSquasher.unsquashPK(statement.data);
@@ -24852,8 +24202,8 @@ var init_sqlgenerator = __esm({
24852
24202
  }
24853
24203
  };
24854
24204
  PgAlterTableAlterCompositePrimaryKeyConvertor = class extends Convertor {
24855
- can(statement, dialect7) {
24856
- return statement.type === "alter_composite_pk" && dialect7 === "postgresql";
24205
+ can(statement, dialect4) {
24206
+ return statement.type === "alter_composite_pk" && dialect4 === "postgresql";
24857
24207
  }
24858
24208
  convert(statement) {
24859
24209
  const { name, columns } = PgSquasher.unsquashPK(statement.old);
@@ -24866,8 +24216,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24866
24216
  }
24867
24217
  };
24868
24218
  MySqlAlterTableCreateCompositePrimaryKeyConvertor = class extends Convertor {
24869
- can(statement, dialect7) {
24870
- return statement.type === "create_composite_pk" && dialect7 === "mysql";
24219
+ can(statement, dialect4) {
24220
+ return statement.type === "create_composite_pk" && dialect4 === "mysql";
24871
24221
  }
24872
24222
  convert(statement) {
24873
24223
  const { name, columns } = MySqlSquasher.unsquashPK(statement.data);
@@ -24875,8 +24225,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24875
24225
  }
24876
24226
  };
24877
24227
  MySqlAlterTableDeleteCompositePrimaryKeyConvertor = class extends Convertor {
24878
- can(statement, dialect7) {
24879
- return statement.type === "delete_composite_pk" && dialect7 === "mysql";
24228
+ can(statement, dialect4) {
24229
+ return statement.type === "delete_composite_pk" && dialect4 === "mysql";
24880
24230
  }
24881
24231
  convert(statement) {
24882
24232
  const { name, columns } = MySqlSquasher.unsquashPK(statement.data);
@@ -24884,8 +24234,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24884
24234
  }
24885
24235
  };
24886
24236
  MySqlAlterTableAlterCompositePrimaryKeyConvertor = class extends Convertor {
24887
- can(statement, dialect7) {
24888
- return statement.type === "alter_composite_pk" && dialect7 === "mysql";
24237
+ can(statement, dialect4) {
24238
+ return statement.type === "alter_composite_pk" && dialect4 === "mysql";
24889
24239
  }
24890
24240
  convert(statement) {
24891
24241
  const { name, columns } = MySqlSquasher.unsquashPK(statement.old);
@@ -24896,8 +24246,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24896
24246
  }
24897
24247
  };
24898
24248
  PgAlterTableAlterColumnSetPrimaryKeyConvertor = class extends Convertor {
24899
- can(statement, dialect7) {
24900
- return statement.type === "alter_table_alter_column_set_pk" && dialect7 === "postgresql";
24249
+ can(statement, dialect4) {
24250
+ return statement.type === "alter_table_alter_column_set_pk" && dialect4 === "postgresql";
24901
24251
  }
24902
24252
  convert(statement) {
24903
24253
  const { tableName, columnName } = statement;
@@ -24906,8 +24256,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24906
24256
  }
24907
24257
  };
24908
24258
  PgAlterTableAlterColumnDropPrimaryKeyConvertor = class extends Convertor {
24909
- can(statement, dialect7) {
24910
- return statement.type === "alter_table_alter_column_drop_pk" && dialect7 === "postgresql";
24259
+ can(statement, dialect4) {
24260
+ return statement.type === "alter_table_alter_column_drop_pk" && dialect4 === "postgresql";
24911
24261
  }
24912
24262
  convert(statement) {
24913
24263
  const { tableName, columnName, schema: schema5 } = statement;
@@ -24930,8 +24280,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24930
24280
  }
24931
24281
  };
24932
24282
  PgAlterTableAlterColumnSetNotNullConvertor = class extends Convertor {
24933
- can(statement, dialect7) {
24934
- return statement.type === "alter_table_alter_column_set_notnull" && dialect7 === "postgresql";
24283
+ can(statement, dialect4) {
24284
+ return statement.type === "alter_table_alter_column_set_notnull" && dialect4 === "postgresql";
24935
24285
  }
24936
24286
  convert(statement) {
24937
24287
  const { tableName, columnName } = statement;
@@ -24940,8 +24290,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24940
24290
  }
24941
24291
  };
24942
24292
  PgAlterTableAlterColumnDropNotNullConvertor = class extends Convertor {
24943
- can(statement, dialect7) {
24944
- return statement.type === "alter_table_alter_column_drop_notnull" && dialect7 === "postgresql";
24293
+ can(statement, dialect4) {
24294
+ return statement.type === "alter_table_alter_column_drop_notnull" && dialect4 === "postgresql";
24945
24295
  }
24946
24296
  convert(statement) {
24947
24297
  const { tableName, columnName } = statement;
@@ -24950,8 +24300,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24950
24300
  }
24951
24301
  };
24952
24302
  PgCreateForeignKeyConvertor = class extends Convertor {
24953
- can(statement, dialect7) {
24954
- return statement.type === "create_reference" && dialect7 === "postgresql";
24303
+ can(statement, dialect4) {
24304
+ return statement.type === "create_reference" && dialect4 === "postgresql";
24955
24305
  }
24956
24306
  convert(statement) {
24957
24307
  const {
@@ -24980,8 +24330,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24980
24330
  }
24981
24331
  };
24982
24332
  LibSQLCreateForeignKeyConvertor = class extends Convertor {
24983
- can(statement, dialect7) {
24984
- return statement.type === "create_reference" && dialect7 === "turso";
24333
+ can(statement, dialect4) {
24334
+ return statement.type === "create_reference" && dialect4 === "turso";
24985
24335
  }
24986
24336
  convert(statement, json2, action) {
24987
24337
  const { columnsFrom, columnsTo, tableFrom, onDelete, onUpdate, tableTo } = action === "push" ? SQLiteSquasher.unsquashPushFK(statement.data) : SQLiteSquasher.unsquashFK(statement.data);
@@ -24997,8 +24347,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24997
24347
  }
24998
24348
  };
24999
24349
  MySqlCreateForeignKeyConvertor = class extends Convertor {
25000
- can(statement, dialect7) {
25001
- return statement.type === "create_reference" && dialect7 === "mysql";
24350
+ can(statement, dialect4) {
24351
+ return statement.type === "create_reference" && dialect4 === "mysql";
25002
24352
  }
25003
24353
  convert(statement) {
25004
24354
  const {
@@ -25018,8 +24368,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25018
24368
  }
25019
24369
  };
25020
24370
  PgAlterForeignKeyConvertor = class extends Convertor {
25021
- can(statement, dialect7) {
25022
- return statement.type === "alter_reference" && dialect7 === "postgresql";
24371
+ can(statement, dialect4) {
24372
+ return statement.type === "alter_reference" && dialect4 === "postgresql";
25023
24373
  }
25024
24374
  convert(statement) {
25025
24375
  const newFk = PgSquasher.unsquashFK(statement.data);
@@ -25043,8 +24393,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25043
24393
  }
25044
24394
  };
25045
24395
  PgDeleteForeignKeyConvertor = class extends Convertor {
25046
- can(statement, dialect7) {
25047
- return statement.type === "delete_reference" && dialect7 === "postgresql";
24396
+ can(statement, dialect4) {
24397
+ return statement.type === "delete_reference" && dialect4 === "postgresql";
25048
24398
  }
25049
24399
  convert(statement) {
25050
24400
  const tableFrom = statement.tableName;
@@ -25055,8 +24405,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25055
24405
  }
25056
24406
  };
25057
24407
  MySqlDeleteForeignKeyConvertor = class extends Convertor {
25058
- can(statement, dialect7) {
25059
- return statement.type === "delete_reference" && dialect7 === "mysql";
24408
+ can(statement, dialect4) {
24409
+ return statement.type === "delete_reference" && dialect4 === "mysql";
25060
24410
  }
25061
24411
  convert(statement) {
25062
24412
  const tableFrom = statement.tableName;
@@ -25066,8 +24416,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25066
24416
  }
25067
24417
  };
25068
24418
  CreatePgIndexConvertor = class extends Convertor {
25069
- can(statement, dialect7) {
25070
- return statement.type === "create_index_pg" && dialect7 === "postgresql";
24419
+ can(statement, dialect4) {
24420
+ return statement.type === "create_index_pg" && dialect4 === "postgresql";
25071
24421
  }
25072
24422
  convert(statement) {
25073
24423
  const {
@@ -25098,8 +24448,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25098
24448
  }
25099
24449
  };
25100
24450
  CreateMySqlIndexConvertor = class extends Convertor {
25101
- can(statement, dialect7) {
25102
- return statement.type === "create_index" && dialect7 === "mysql";
24451
+ can(statement, dialect4) {
24452
+ return statement.type === "create_index" && dialect4 === "mysql";
25103
24453
  }
25104
24454
  convert(statement) {
25105
24455
  const { name, columns, isUnique } = MySqlSquasher.unsquashIdx(
@@ -25114,8 +24464,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25114
24464
  }
25115
24465
  };
25116
24466
  CreateSqliteIndexConvertor = class extends Convertor {
25117
- can(statement, dialect7) {
25118
- return statement.type === "create_index" && (dialect7 === "sqlite" || dialect7 === "turso");
24467
+ can(statement, dialect4) {
24468
+ return statement.type === "create_index" && (dialect4 === "sqlite" || dialect4 === "turso");
25119
24469
  }
25120
24470
  convert(statement) {
25121
24471
  const { name, columns, isUnique, where } = SQLiteSquasher.unsquashIdx(
@@ -25131,8 +24481,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25131
24481
  }
25132
24482
  };
25133
24483
  PgDropIndexConvertor = class extends Convertor {
25134
- can(statement, dialect7) {
25135
- return statement.type === "drop_index" && dialect7 === "postgresql";
24484
+ can(statement, dialect4) {
24485
+ return statement.type === "drop_index" && dialect4 === "postgresql";
25136
24486
  }
25137
24487
  convert(statement) {
25138
24488
  const { name } = PgSquasher.unsquashIdx(statement.data);
@@ -25140,8 +24490,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25140
24490
  }
25141
24491
  };
25142
24492
  PgCreateSchemaConvertor = class extends Convertor {
25143
- can(statement, dialect7) {
25144
- return statement.type === "create_schema" && dialect7 === "postgresql";
24493
+ can(statement, dialect4) {
24494
+ return statement.type === "create_schema" && dialect4 === "postgresql";
25145
24495
  }
25146
24496
  convert(statement) {
25147
24497
  const { name } = statement;
@@ -25150,8 +24500,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25150
24500
  }
25151
24501
  };
25152
24502
  PgRenameSchemaConvertor = class extends Convertor {
25153
- can(statement, dialect7) {
25154
- return statement.type === "rename_schema" && dialect7 === "postgresql";
24503
+ can(statement, dialect4) {
24504
+ return statement.type === "rename_schema" && dialect4 === "postgresql";
25155
24505
  }
25156
24506
  convert(statement) {
25157
24507
  const { from, to } = statement;
@@ -25160,8 +24510,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25160
24510
  }
25161
24511
  };
25162
24512
  PgDropSchemaConvertor = class extends Convertor {
25163
- can(statement, dialect7) {
25164
- return statement.type === "drop_schema" && dialect7 === "postgresql";
24513
+ can(statement, dialect4) {
24514
+ return statement.type === "drop_schema" && dialect4 === "postgresql";
25165
24515
  }
25166
24516
  convert(statement) {
25167
24517
  const { name } = statement;
@@ -25170,8 +24520,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25170
24520
  }
25171
24521
  };
25172
24522
  PgAlterTableSetSchemaConvertor = class extends Convertor {
25173
- can(statement, dialect7) {
25174
- return statement.type === "alter_table_set_schema" && dialect7 === "postgresql";
24523
+ can(statement, dialect4) {
24524
+ return statement.type === "alter_table_set_schema" && dialect4 === "postgresql";
25175
24525
  }
25176
24526
  convert(statement) {
25177
24527
  const { tableName, schemaFrom, schemaTo } = statement;
@@ -25180,8 +24530,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25180
24530
  }
25181
24531
  };
25182
24532
  PgAlterTableSetNewSchemaConvertor = class extends Convertor {
25183
- can(statement, dialect7) {
25184
- return statement.type === "alter_table_set_new_schema" && dialect7 === "postgresql";
24533
+ can(statement, dialect4) {
24534
+ return statement.type === "alter_table_set_new_schema" && dialect4 === "postgresql";
25185
24535
  }
25186
24536
  convert(statement) {
25187
24537
  const { tableName, to, from } = statement;
@@ -25191,8 +24541,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25191
24541
  }
25192
24542
  };
25193
24543
  PgAlterTableRemoveFromSchemaConvertor = class extends Convertor {
25194
- can(statement, dialect7) {
25195
- return statement.type === "alter_table_remove_from_schema" && dialect7 === "postgresql";
24544
+ can(statement, dialect4) {
24545
+ return statement.type === "alter_table_remove_from_schema" && dialect4 === "postgresql";
25196
24546
  }
25197
24547
  convert(statement) {
25198
24548
  const { tableName, schema: schema5 } = statement;
@@ -25202,8 +24552,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25202
24552
  }
25203
24553
  };
25204
24554
  SqliteDropIndexConvertor = class extends Convertor {
25205
- can(statement, dialect7) {
25206
- return statement.type === "drop_index" && (dialect7 === "sqlite" || dialect7 === "turso");
24555
+ can(statement, dialect4) {
24556
+ return statement.type === "drop_index" && (dialect4 === "sqlite" || dialect4 === "turso");
25207
24557
  }
25208
24558
  convert(statement) {
25209
24559
  const { name } = PgSquasher.unsquashIdx(statement.data);
@@ -25211,8 +24561,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25211
24561
  }
25212
24562
  };
25213
24563
  MySqlDropIndexConvertor = class extends Convertor {
25214
- can(statement, dialect7) {
25215
- return statement.type === "drop_index" && dialect7 === "mysql";
24564
+ can(statement, dialect4) {
24565
+ return statement.type === "drop_index" && dialect4 === "mysql";
25216
24566
  }
25217
24567
  convert(statement) {
25218
24568
  const { name } = MySqlSquasher.unsquashIdx(statement.data);
@@ -25220,8 +24570,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25220
24570
  }
25221
24571
  };
25222
24572
  SQLiteRecreateTableConvertor = class extends Convertor {
25223
- can(statement, dialect7) {
25224
- return statement.type === "recreate_table" && dialect7 === "sqlite";
24573
+ can(statement, dialect4) {
24574
+ return statement.type === "recreate_table" && dialect4 === "sqlite";
25225
24575
  }
25226
24576
  convert(statement) {
25227
24577
  const { tableName, columns, compositePKs, referenceData } = statement;
@@ -25262,8 +24612,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25262
24612
  }
25263
24613
  };
25264
24614
  LibSQLRecreateTableConvertor = class extends Convertor {
25265
- can(statement, dialect7) {
25266
- return statement.type === "recreate_table" && dialect7 === "turso";
24615
+ can(statement, dialect4) {
24616
+ return statement.type === "recreate_table" && dialect4 === "turso";
25267
24617
  }
25268
24618
  convert(statement) {
25269
24619
  const { tableName, columns, compositePKs, referenceData } = statement;
@@ -25309,14 +24659,6 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
25309
24659
  convertors.push(new SQLiteCreateTableConvertor());
25310
24660
  convertors.push(new SQLiteRecreateTableConvertor());
25311
24661
  convertors.push(new LibSQLRecreateTableConvertor());
25312
- convertors.push(new PgCreateViewConvertor());
25313
- convertors.push(new PgDropViewConvertor());
25314
- convertors.push(new PgRenameViewConvertor());
25315
- convertors.push(new PgAlterViewSchemaConvertor());
25316
- convertors.push(new PgAlterViewAddWithOptionConvertor());
25317
- convertors.push(new PgAlterViewDropWithOptionConvertor());
25318
- convertors.push(new PgAlterViewAlterTablespaceConvertor());
25319
- convertors.push(new PgAlterViewAlterUsingConvertor());
25320
24662
  convertors.push(new CreateTypeEnumConvertor());
25321
24663
  convertors.push(new CreatePgSequenceConvertor());
25322
24664
  convertors.push(new DropPgSequenceConvertor());
@@ -25651,7 +24993,7 @@ var init_sqlitePushUtils = __esm({
25651
24993
  });
25652
24994
 
25653
24995
  // src/jsonStatements.ts
25654
- var preparePgCreateTableJson, prepareMySqlCreateTableJson, prepareSQLiteCreateTable, prepareDropTableJson, prepareRenameTableJson, prepareCreateEnumJson, prepareAddValuesToEnumJson, prepareDropEnumJson, prepareMoveEnumJson, prepareRenameEnumJson, prepareCreateSequenceJson, prepareAlterSequenceJson, prepareDropSequenceJson, prepareMoveSequenceJson, prepareRenameSequenceJson, prepareCreateSchemasJson, prepareRenameSchemasJson, prepareDeleteSchemasJson, prepareRenameColumns, _prepareDropColumns, _prepareAddColumns, _prepareSqliteAddColumns, prepareAlterColumnsMysql, preparePgAlterColumns, prepareSqliteAlterColumns, preparePgCreateIndexesJson, prepareCreateIndexesJson, prepareCreateReferencesJson, prepareLibSQLCreateReferencesJson, prepareDropReferencesJson, prepareLibSQLDropReferencesJson, prepareAlterReferencesJson, prepareDropIndexesJson, prepareAddCompositePrimaryKeySqlite, prepareDeleteCompositePrimaryKeySqlite, prepareAlterCompositePrimaryKeySqlite, prepareAddCompositePrimaryKeyPg, prepareDeleteCompositePrimaryKeyPg, prepareAlterCompositePrimaryKeyPg, prepareAddUniqueConstraintPg, prepareDeleteUniqueConstraintPg, prepareAddCompositePrimaryKeyMySql, prepareDeleteCompositePrimaryKeyMySql, prepareAlterCompositePrimaryKeyMySql, preparePgCreateViewJson, preparePgDropViewJson, preparePgRenameViewJson, preparePgAlterViewAlterSchemaJson, preparePgAlterViewAddWithOptionJson, preparePgAlterViewDropWithOptionJson, preparePgAlterViewAlterTablespaceJson, preparePgAlterViewAlterUsingJson;
24996
+ var preparePgCreateTableJson, prepareMySqlCreateTableJson, prepareSQLiteCreateTable, prepareDropTableJson, prepareRenameTableJson, prepareCreateEnumJson, prepareAddValuesToEnumJson, prepareDropEnumJson, prepareMoveEnumJson, prepareRenameEnumJson, prepareCreateSequenceJson, prepareAlterSequenceJson, prepareDropSequenceJson, prepareMoveSequenceJson, prepareRenameSequenceJson, prepareCreateSchemasJson, prepareRenameSchemasJson, prepareDeleteSchemasJson, prepareRenameColumns, _prepareDropColumns, _prepareAddColumns, _prepareSqliteAddColumns, prepareAlterColumnsMysql, preparePgAlterColumns, prepareSqliteAlterColumns, preparePgCreateIndexesJson, prepareCreateIndexesJson, prepareCreateReferencesJson, prepareLibSQLCreateReferencesJson, prepareDropReferencesJson, prepareLibSQLDropReferencesJson, prepareAlterReferencesJson, prepareDropIndexesJson, prepareAddCompositePrimaryKeySqlite, prepareDeleteCompositePrimaryKeySqlite, prepareAlterCompositePrimaryKeySqlite, prepareAddCompositePrimaryKeyPg, prepareDeleteCompositePrimaryKeyPg, prepareAlterCompositePrimaryKeyPg, prepareAddUniqueConstraintPg, prepareDeleteUniqueConstraintPg, prepareAddCompositePrimaryKeyMySql, prepareDeleteCompositePrimaryKeyMySql, prepareAlterCompositePrimaryKeyMySql;
25655
24997
  var init_jsonStatements = __esm({
25656
24998
  "src/jsonStatements.ts"() {
25657
24999
  "use strict";
@@ -26990,81 +26332,6 @@ var init_jsonStatements = __esm({
26990
26332
  };
26991
26333
  });
26992
26334
  };
26993
- preparePgCreateViewJson = (name, schema5, definition, materialized, withNoData = false, withOption, using, tablespace) => {
26994
- return {
26995
- type: "create_view",
26996
- name,
26997
- schema: schema5,
26998
- definition,
26999
- with: withOption,
27000
- materialized,
27001
- withNoData,
27002
- using,
27003
- tablespace
27004
- };
27005
- };
27006
- preparePgDropViewJson = (name, schema5, materialized) => {
27007
- return {
27008
- type: "drop_view",
27009
- name,
27010
- schema: schema5,
27011
- materialized
27012
- };
27013
- };
27014
- preparePgRenameViewJson = (to, from, schema5, materialized) => {
27015
- return {
27016
- type: "rename_view",
27017
- nameTo: to,
27018
- nameFrom: from,
27019
- schema: schema5,
27020
- materialized
27021
- };
27022
- };
27023
- preparePgAlterViewAlterSchemaJson = (to, from, name, materialized) => {
27024
- return {
27025
- type: "alter_view_alter_schema",
27026
- fromSchema: from,
27027
- toSchema: to,
27028
- name,
27029
- materialized
27030
- };
27031
- };
27032
- preparePgAlterViewAddWithOptionJson = (name, schema5, materialized, withOption) => {
27033
- return {
27034
- type: "alter_view_add_with_option",
27035
- name,
27036
- schema: schema5,
27037
- materialized,
27038
- with: withOption
27039
- };
27040
- };
27041
- preparePgAlterViewDropWithOptionJson = (name, schema5, materialized, withOption) => {
27042
- return {
27043
- type: "alter_view_drop_with_option",
27044
- name,
27045
- schema: schema5,
27046
- materialized,
27047
- with: withOption
27048
- };
27049
- };
27050
- preparePgAlterViewAlterTablespaceJson = (name, schema5, materialized, to) => {
27051
- return {
27052
- type: "alter_view_alter_tablespace",
27053
- name,
27054
- schema: schema5,
27055
- materialized,
27056
- toTablespace: to
27057
- };
27058
- };
27059
- preparePgAlterViewAlterUsingJson = (name, schema5, materialized, to) => {
27060
- return {
27061
- type: "alter_view_alter_using",
27062
- name,
27063
- schema: schema5,
27064
- materialized,
27065
- toUsing: to
27066
- };
27067
- };
27068
26335
  }
27069
26336
  });
27070
26337
 
@@ -27350,7 +26617,7 @@ var init_statementCombiner = __esm({
27350
26617
  });
27351
26618
 
27352
26619
  // src/snapshotsDiffer.ts
27353
- var makeChanged, makeSelfOrChanged, makePatched, columnSchema, alteredColumnSchema, enumSchema2, changedEnumSchema, tableScheme, alteredTableScheme, alteredViewSchema, diffResultScheme, diffResultSchemeMysql, diffResultSchemeSQLite, schemaChangeFor, nameChangeFor, nameSchemaChangeFor, columnChangeFor, applyPgSnapshotsDiff, applyMysqlSnapshotsDiff, applySqliteSnapshotsDiff, applyLibSQLSnapshotsDiff;
26620
+ var makeChanged, makeSelfOrChanged, makePatched, columnSchema, alteredColumnSchema, enumSchema2, changedEnumSchema, tableScheme, alteredTableScheme, diffResultScheme, diffResultSchemeMysql, diffResultSchemeSQLite, schemaChangeFor, nameChangeFor, nameSchemaChangeFor, columnChangeFor, applyPgSnapshotsDiff, applyMysqlSnapshotsDiff, applySqliteSnapshotsDiff, applyLibSQLSnapshotsDiff;
27354
26621
  var init_snapshotsDiffer = __esm({
27355
26622
  "src/snapshotsDiffer.ts"() {
27356
26623
  "use strict";
@@ -27498,42 +26765,10 @@ var init_snapshotsDiffer = __esm({
27498
26765
  })
27499
26766
  )
27500
26767
  }).strict();
27501
- alteredViewSchema = objectType({
27502
- name: stringType(),
27503
- schema: stringType(),
27504
- deletedWithOption: mergedViewWithOption.optional(),
27505
- addedWithOption: mergedViewWithOption.optional(),
27506
- alteredWith: objectType({
27507
- addedWith: mergedViewWithOption.optional(),
27508
- deletedWith: mergedViewWithOption.optional(),
27509
- alterWith: mergedViewWithOption.optional()
27510
- }).strict(),
27511
- alteredSchema: objectType({
27512
- __old: stringType(),
27513
- __new: stringType()
27514
- }).strict().optional(),
27515
- alteredDefinition: objectType({
27516
- __old: stringType(),
27517
- __new: stringType()
27518
- }).strict().optional(),
27519
- alteredExisting: objectType({
27520
- __old: booleanType(),
27521
- __new: booleanType()
27522
- }).strict().optional(),
27523
- alteredTablespace: objectType({
27524
- __old: stringType(),
27525
- __new: stringType()
27526
- }).strict().optional(),
27527
- alteredUsing: objectType({
27528
- __old: stringType(),
27529
- __new: stringType()
27530
- }).strict().optional()
27531
- }).strict();
27532
26768
  diffResultScheme = objectType({
27533
26769
  alteredTablesWithColumns: alteredTableScheme.array(),
27534
26770
  alteredEnums: changedEnumSchema.array(),
27535
- alteredSequences: sequenceSquashed.array(),
27536
- alteredViews: alteredViewSchema.array()
26771
+ alteredSequences: sequenceSquashed.array()
27537
26772
  }).strict();
27538
26773
  diffResultSchemeMysql = objectType({
27539
26774
  alteredTablesWithColumns: alteredTableScheme.array(),
@@ -27588,7 +26823,7 @@ var init_snapshotsDiffer = __esm({
27588
26823
  }
27589
26824
  return column7;
27590
26825
  };
27591
- applyPgSnapshotsDiff = async (json1, json2, schemasResolver2, enumsResolver2, sequencesResolver2, tablesResolver2, columnsResolver2, viewsResolver2, prevFull, curFull, action) => {
26826
+ applyPgSnapshotsDiff = async (json1, json2, schemasResolver2, enumsResolver2, sequencesResolver2, tablesResolver2, columnsResolver2, prevFull, curFull, action) => {
27592
26827
  const schemasDiff = diffSchemasOrTables(json1.schemas, json2.schemas);
27593
26828
  const {
27594
26829
  created: createdSchemas,
@@ -27816,40 +27051,7 @@ var init_snapshotsDiffer = __esm({
27816
27051
  return [tableKey2, tableValue];
27817
27052
  }
27818
27053
  );
27819
- const viewsDiff = diffSchemasOrTables(json1.views, json2.views);
27820
- const {
27821
- created: createdViews,
27822
- deleted: deletedViews,
27823
- renamed: renamedViews,
27824
- moved: movedViews
27825
- } = await viewsResolver2({
27826
- created: viewsDiff.added,
27827
- deleted: viewsDiff.deleted
27828
- });
27829
- const renamesViewDic = {};
27830
- renamedViews.forEach((it) => {
27831
- renamesViewDic[`${it.from.schema}.${it.from.name}`] = { to: it.to.name, from: it.from.name };
27832
- });
27833
- const movedViewDic = {};
27834
- movedViews.forEach((it) => {
27835
- movedViewDic[`${it.schemaFrom}.${it.name}`] = { to: it.schemaTo, from: it.schemaFrom };
27836
- });
27837
- const viewsPatchedSnap1 = copy(columnsPatchedSnap1);
27838
- viewsPatchedSnap1.views = mapEntries(
27839
- viewsPatchedSnap1.views,
27840
- (viewKey, viewValue) => {
27841
- const rename = renamesViewDic[`${viewValue.schema}.${viewValue.name}`];
27842
- const moved = movedViewDic[`${viewValue.schema}.${viewValue.name}`];
27843
- if (rename) {
27844
- viewValue.name = rename.to;
27845
- viewKey = `${viewValue.schema}.${viewValue.name}`;
27846
- }
27847
- if (moved)
27848
- viewKey = `${moved.to}.${viewValue.name}`;
27849
- return [viewKey, viewValue];
27850
- }
27851
- );
27852
- const diffResult = applyJsonDiff(viewsPatchedSnap1, json2);
27054
+ const diffResult = applyJsonDiff(columnsPatchedSnap1, json2);
27853
27055
  const typedResult = diffResultScheme.parse(diffResult);
27854
27056
  const jsonStatements = [];
27855
27057
  const jsonCreateIndexesForCreatedTables = createdTables.map((it) => {
@@ -28101,137 +27303,6 @@ var init_snapshotsDiffer = __esm({
28101
27303
  const createTables = createdTables.map((it) => {
28102
27304
  return preparePgCreateTableJson(it, curFull);
28103
27305
  });
28104
- const createViews = [];
28105
- const dropViews = [];
28106
- const renameViews = [];
28107
- const alterViews = [];
28108
- createViews.push(
28109
- ...createdViews.filter((it) => !it.isExisting).map((it) => {
28110
- return preparePgCreateViewJson(
28111
- it.name,
28112
- it.schema,
28113
- it.definition,
28114
- it.materialized,
28115
- it.withNoData,
28116
- it.with,
28117
- it.using,
28118
- it.tablespace
28119
- );
28120
- })
28121
- );
28122
- dropViews.push(
28123
- ...deletedViews.filter((it) => !it.isExisting).map((it) => {
28124
- return preparePgDropViewJson(it.name, it.schema, it.materialized);
28125
- })
28126
- );
28127
- renameViews.push(
28128
- ...renamedViews.filter((it) => !it.to.isExisting).map((it) => {
28129
- return preparePgRenameViewJson(it.to.name, it.from.name, it.to.schema, it.to.materialized);
28130
- })
28131
- );
28132
- alterViews.push(
28133
- ...movedViews.filter((it) => !json2.views[`${it.schemaTo}.${it.name}`].isExisting).map((it) => {
28134
- return preparePgAlterViewAlterSchemaJson(
28135
- it.schemaTo,
28136
- it.schemaFrom,
28137
- it.name,
28138
- json2.views[`${it.schemaTo}.${it.name}`].materialized
28139
- );
28140
- })
28141
- );
28142
- const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[`${it.schema}.${it.name}`].isExisting);
28143
- for (const alteredView of alteredViews) {
28144
- const viewKey = `${alteredView.schema}.${alteredView.name}`;
28145
- const { materialized, with: withOption, definition, withNoData, using, tablespace } = json2.views[viewKey];
28146
- if (alteredView.alteredExisting || alteredView.alteredDefinition && action !== "push") {
28147
- dropViews.push(preparePgDropViewJson(alteredView.name, alteredView.schema, materialized));
28148
- createViews.push(
28149
- preparePgCreateViewJson(
28150
- alteredView.name,
28151
- alteredView.schema,
28152
- definition,
28153
- materialized,
28154
- withNoData,
28155
- withOption,
28156
- using,
28157
- tablespace
28158
- )
28159
- );
28160
- continue;
28161
- }
28162
- if (alteredView.addedWithOption) {
28163
- alterViews.push(
28164
- preparePgAlterViewAddWithOptionJson(
28165
- alteredView.name,
28166
- alteredView.schema,
28167
- materialized,
28168
- alteredView.addedWithOption
28169
- )
28170
- );
28171
- }
28172
- if (alteredView.deletedWithOption) {
28173
- alterViews.push(
28174
- preparePgAlterViewDropWithOptionJson(
28175
- alteredView.name,
28176
- alteredView.schema,
28177
- materialized,
28178
- alteredView.deletedWithOption
28179
- )
28180
- );
28181
- }
28182
- if (alteredView.alteredWith) {
28183
- if (alteredView.alteredWith.addedWith) {
28184
- alterViews.push(
28185
- preparePgAlterViewAddWithOptionJson(
28186
- alteredView.name,
28187
- alteredView.schema,
28188
- materialized,
28189
- alteredView.alteredWith.addedWith
28190
- )
28191
- );
28192
- }
28193
- if (alteredView.alteredWith.deletedWith) {
28194
- alterViews.push(
28195
- preparePgAlterViewDropWithOptionJson(
28196
- alteredView.name,
28197
- alteredView.schema,
28198
- materialized,
28199
- alteredView.alteredWith.deletedWith
28200
- )
28201
- );
28202
- }
28203
- if (alteredView.alteredWith.alterWith) {
28204
- alterViews.push(
28205
- preparePgAlterViewAddWithOptionJson(
28206
- alteredView.name,
28207
- alteredView.schema,
28208
- materialized,
28209
- alteredView.alteredWith.alterWith
28210
- )
28211
- );
28212
- }
28213
- }
28214
- if (alteredView.alteredTablespace) {
28215
- alterViews.push(
28216
- preparePgAlterViewAlterTablespaceJson(
28217
- alteredView.name,
28218
- alteredView.schema,
28219
- materialized,
28220
- alteredView.alteredTablespace.__new
28221
- )
28222
- );
28223
- }
28224
- if (alteredView.alteredUsing) {
28225
- alterViews.push(
28226
- preparePgAlterViewAlterUsingJson(
28227
- alteredView.name,
28228
- alteredView.schema,
28229
- materialized,
28230
- alteredView.alteredUsing.__new
28231
- )
28232
- );
28233
- }
28234
- }
28235
27306
  jsonStatements.push(...createSchemas);
28236
27307
  jsonStatements.push(...renameSchemas);
28237
27308
  jsonStatements.push(...createEnums);
@@ -28243,9 +27314,6 @@ var init_snapshotsDiffer = __esm({
28243
27314
  jsonStatements.push(...renameSequences);
28244
27315
  jsonStatements.push(...jsonAlterSequences);
28245
27316
  jsonStatements.push(...createTables);
28246
- jsonStatements.push(...dropViews);
28247
- jsonStatements.push(...renameViews);
28248
- jsonStatements.push(...alterViews);
28249
27317
  jsonStatements.push(...jsonDropTables);
28250
27318
  jsonStatements.push(...jsonSetTableSchemas);
28251
27319
  jsonStatements.push(...jsonRenameTables);
@@ -28265,7 +27333,6 @@ var init_snapshotsDiffer = __esm({
28265
27333
  jsonStatements.push(...jsonAlteredCompositePKs);
28266
27334
  jsonStatements.push(...jsonAddedUniqueConstraints);
28267
27335
  jsonStatements.push(...jsonAlteredUniqueConstraints);
28268
- jsonStatements.push(...createViews);
28269
27336
  jsonStatements.push(...dropEnums);
28270
27337
  jsonStatements.push(...dropSequences);
28271
27338
  jsonStatements.push(...dropSchemas);
@@ -30544,10 +29611,9 @@ __export(migrate_exports, {
30544
29611
  schemasResolver: () => schemasResolver,
30545
29612
  sequencesResolver: () => sequencesResolver,
30546
29613
  tablesResolver: () => tablesResolver,
30547
- viewsResolver: () => viewsResolver,
30548
29614
  writeResult: () => writeResult
30549
29615
  });
30550
- var import_fs5, import_hanji3, import_path4, schemasResolver, tablesResolver, viewsResolver, sequencesResolver, enumsResolver, columnsResolver, prepareAndMigratePg, preparePgPush, prepareMySQLPush, prepareAndMigrateMysql, prepareAndMigrateSqlite, prepareAndMigrateLibSQL, prepareSQLitePush, prepareLibSQLPush, promptColumnsConflicts, promptNamedWithSchemasConflict, promptSchemasConflict, BREAKPOINT, writeResult, embeddedMigrations, prepareSnapshotFolderName, two;
29616
+ var import_fs5, import_hanji3, import_path4, schemasResolver, tablesResolver, sequencesResolver, enumsResolver, columnsResolver, prepareAndMigratePg, preparePgPush, prepareMySQLPush, prepareAndMigrateMysql, prepareAndMigrateSqlite, prepareAndMigrateLibSQL, prepareSQLitePush, prepareLibSQLPush, promptColumnsConflicts, promptNamedWithSchemasConflict, promptSchemasConflict, BREAKPOINT, writeResult, embeddedMigrations, prepareSnapshotFolderName, two;
30551
29617
  var init_migrate = __esm({
30552
29618
  "src/cli/commands/migrate.ts"() {
30553
29619
  "use strict";
@@ -30594,24 +29660,6 @@ var init_migrate = __esm({
30594
29660
  throw e2;
30595
29661
  }
30596
29662
  };
30597
- viewsResolver = async (input) => {
30598
- try {
30599
- const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict(
30600
- input.created,
30601
- input.deleted,
30602
- "view"
30603
- );
30604
- return {
30605
- created,
30606
- deleted,
30607
- moved,
30608
- renamed
30609
- };
30610
- } catch (e2) {
30611
- console.error(e2);
30612
- throw e2;
30613
- }
30614
- };
30615
29663
  sequencesResolver = async (input) => {
30616
29664
  try {
30617
29665
  const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict(
@@ -30665,6 +29713,7 @@ var init_migrate = __esm({
30665
29713
  prepareAndMigratePg = async (config) => {
30666
29714
  const outFolder = config.out;
30667
29715
  const schemaPath = config.schema;
29716
+ const casing2 = config.casing;
30668
29717
  try {
30669
29718
  assertV1OutFolder(outFolder);
30670
29719
  const { snapshots, journal } = prepareMigrationFolder(
@@ -30673,7 +29722,8 @@ var init_migrate = __esm({
30673
29722
  );
30674
29723
  const { prev, cur, custom: custom2 } = await preparePgMigrationSnapshot(
30675
29724
  snapshots,
30676
- schemaPath
29725
+ schemaPath,
29726
+ casing2
30677
29727
  );
30678
29728
  const validatedPrev = pgSchema.parse(prev);
30679
29729
  const validatedCur = pgSchema.parse(cur);
@@ -30700,7 +29750,6 @@ var init_migrate = __esm({
30700
29750
  sequencesResolver,
30701
29751
  tablesResolver,
30702
29752
  columnsResolver,
30703
- viewsResolver,
30704
29753
  validatedPrev,
30705
29754
  validatedCur
30706
29755
  );
@@ -30717,10 +29766,11 @@ var init_migrate = __esm({
30717
29766
  console.error(e2);
30718
29767
  }
30719
29768
  };
30720
- preparePgPush = async (schemaPath, snapshot, schemaFilter) => {
29769
+ preparePgPush = async (schemaPath, snapshot, schemaFilter, casing2) => {
30721
29770
  const { prev, cur } = await preparePgDbPushSnapshot(
30722
29771
  snapshot,
30723
29772
  schemaPath,
29773
+ casing2,
30724
29774
  schemaFilter
30725
29775
  );
30726
29776
  const validatedPrev = pgSchema.parse(prev);
@@ -30735,18 +29785,18 @@ var init_migrate = __esm({
30735
29785
  sequencesResolver,
30736
29786
  tablesResolver,
30737
29787
  columnsResolver,
30738
- viewsResolver,
30739
29788
  validatedPrev,
30740
29789
  validatedCur,
30741
29790
  "push"
30742
29791
  );
30743
29792
  return { sqlStatements, statements, squashedPrev, squashedCur };
30744
29793
  };
30745
- prepareMySQLPush = async (schemaPath, snapshot) => {
29794
+ prepareMySQLPush = async (schemaPath, snapshot, casing2) => {
30746
29795
  try {
30747
29796
  const { prev, cur } = await prepareMySqlDbPushSnapshot(
30748
29797
  snapshot,
30749
- schemaPath
29798
+ schemaPath,
29799
+ casing2
30750
29800
  );
30751
29801
  const validatedPrev = mysqlSchema.parse(prev);
30752
29802
  const validatedCur = mysqlSchema.parse(cur);
@@ -30770,12 +29820,14 @@ var init_migrate = __esm({
30770
29820
  prepareAndMigrateMysql = async (config) => {
30771
29821
  const outFolder = config.out;
30772
29822
  const schemaPath = config.schema;
29823
+ const casing2 = config.casing;
30773
29824
  try {
30774
29825
  assertV1OutFolder(outFolder);
30775
29826
  const { snapshots, journal } = prepareMigrationFolder(outFolder, "mysql");
30776
29827
  const { prev, cur, custom: custom2 } = await prepareMySqlMigrationSnapshot(
30777
29828
  snapshots,
30778
- schemaPath
29829
+ schemaPath,
29830
+ casing2
30779
29831
  );
30780
29832
  const validatedPrev = mysqlSchema.parse(prev);
30781
29833
  const validatedCur = mysqlSchema.parse(cur);
@@ -30819,12 +29871,14 @@ var init_migrate = __esm({
30819
29871
  prepareAndMigrateSqlite = async (config) => {
30820
29872
  const outFolder = config.out;
30821
29873
  const schemaPath = config.schema;
29874
+ const casing2 = config.casing;
30822
29875
  try {
30823
29876
  assertV1OutFolder(outFolder);
30824
29877
  const { snapshots, journal } = prepareMigrationFolder(outFolder, "sqlite");
30825
29878
  const { prev, cur, custom: custom2 } = await prepareSqliteMigrationSnapshot(
30826
29879
  snapshots,
30827
- schemaPath
29880
+ schemaPath,
29881
+ casing2
30828
29882
  );
30829
29883
  const validatedPrev = sqliteSchema.parse(prev);
30830
29884
  const validatedCur = sqliteSchema.parse(cur);
@@ -30870,12 +29924,14 @@ var init_migrate = __esm({
30870
29924
  prepareAndMigrateLibSQL = async (config) => {
30871
29925
  const outFolder = config.out;
30872
29926
  const schemaPath = config.schema;
29927
+ const casing2 = config.casing;
30873
29928
  try {
30874
29929
  assertV1OutFolder(outFolder);
30875
29930
  const { snapshots, journal } = prepareMigrationFolder(outFolder, "sqlite");
30876
29931
  const { prev, cur, custom: custom2 } = await prepareSqliteMigrationSnapshot(
30877
29932
  snapshots,
30878
- schemaPath
29933
+ schemaPath,
29934
+ casing2
30879
29935
  );
30880
29936
  const validatedPrev = sqliteSchema.parse(prev);
30881
29937
  const validatedCur = sqliteSchema.parse(cur);
@@ -30918,8 +29974,8 @@ var init_migrate = __esm({
30918
29974
  console.error(e2);
30919
29975
  }
30920
29976
  };
30921
- prepareSQLitePush = async (schemaPath, snapshot) => {
30922
- const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath);
29977
+ prepareSQLitePush = async (schemaPath, snapshot, casing2) => {
29978
+ const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath, casing2);
30923
29979
  const validatedPrev = sqliteSchema.parse(prev);
30924
29980
  const validatedCur = sqliteSchema.parse(cur);
30925
29981
  const squashedPrev = squashSqliteScheme(validatedPrev, "push");
@@ -30941,8 +29997,8 @@ var init_migrate = __esm({
30941
29997
  meta: _meta
30942
29998
  };
30943
29999
  };
30944
- prepareLibSQLPush = async (schemaPath, snapshot) => {
30945
- const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath);
30000
+ prepareLibSQLPush = async (schemaPath, snapshot, casing2) => {
30001
+ const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath, casing2);
30946
30002
  const validatedPrev = sqliteSchema.parse(prev);
30947
30003
  const validatedCur = sqliteSchema.parse(cur);
30948
30004
  const squashedPrev = squashSqliteScheme(validatedPrev, "push");
@@ -35557,20 +34613,20 @@ var require_ponyfill_es2018 = __commonJS({
35557
34613
  ;
35558
34614
  ReadableByteStreamControllerRespond(this._associatedReadableByteStreamController, bytesWritten);
35559
34615
  }
35560
- respondWithNewView(view2) {
34616
+ respondWithNewView(view) {
35561
34617
  if (!IsReadableStreamBYOBRequest(this)) {
35562
34618
  throw byobRequestBrandCheckException("respondWithNewView");
35563
34619
  }
35564
- assertRequiredArgument(view2, 1, "respondWithNewView");
35565
- if (!ArrayBuffer.isView(view2)) {
34620
+ assertRequiredArgument(view, 1, "respondWithNewView");
34621
+ if (!ArrayBuffer.isView(view)) {
35566
34622
  throw new TypeError("You can only respond with array buffer views");
35567
34623
  }
35568
34624
  if (this._associatedReadableByteStreamController === void 0) {
35569
34625
  throw new TypeError("This BYOB request has been invalidated");
35570
34626
  }
35571
- if (IsDetachedBuffer(view2.buffer))
34627
+ if (IsDetachedBuffer(view.buffer))
35572
34628
  ;
35573
- ReadableByteStreamControllerRespondWithNewView(this._associatedReadableByteStreamController, view2);
34629
+ ReadableByteStreamControllerRespondWithNewView(this._associatedReadableByteStreamController, view);
35574
34630
  }
35575
34631
  }
35576
34632
  Object.defineProperties(ReadableStreamBYOBRequest.prototype, {
@@ -35671,8 +34727,8 @@ var require_ponyfill_es2018 = __commonJS({
35671
34727
  const entry = this._queue.shift();
35672
34728
  this._queueTotalSize -= entry.byteLength;
35673
34729
  ReadableByteStreamControllerHandleQueueDrain(this);
35674
- const view2 = new Uint8Array(entry.buffer, entry.byteOffset, entry.byteLength);
35675
- readRequest._chunkSteps(view2);
34730
+ const view = new Uint8Array(entry.buffer, entry.byteOffset, entry.byteLength);
34731
+ readRequest._chunkSteps(view);
35676
34732
  return;
35677
34733
  }
35678
34734
  const autoAllocateChunkSize = this._autoAllocateChunkSize;
@@ -35838,19 +34894,19 @@ var require_ponyfill_es2018 = __commonJS({
35838
34894
  }
35839
34895
  }
35840
34896
  }
35841
- function ReadableByteStreamControllerPullInto(controller, view2, readIntoRequest) {
34897
+ function ReadableByteStreamControllerPullInto(controller, view, readIntoRequest) {
35842
34898
  const stream = controller._controlledReadableByteStream;
35843
34899
  let elementSize = 1;
35844
- if (view2.constructor !== DataView) {
35845
- elementSize = view2.constructor.BYTES_PER_ELEMENT;
34900
+ if (view.constructor !== DataView) {
34901
+ elementSize = view.constructor.BYTES_PER_ELEMENT;
35846
34902
  }
35847
- const ctor = view2.constructor;
35848
- const buffer = TransferArrayBuffer(view2.buffer);
34903
+ const ctor = view.constructor;
34904
+ const buffer = TransferArrayBuffer(view.buffer);
35849
34905
  const pullIntoDescriptor = {
35850
34906
  buffer,
35851
34907
  bufferByteLength: buffer.byteLength,
35852
- byteOffset: view2.byteOffset,
35853
- byteLength: view2.byteLength,
34908
+ byteOffset: view.byteOffset,
34909
+ byteLength: view.byteLength,
35854
34910
  bytesFilled: 0,
35855
34911
  elementSize,
35856
34912
  viewConstructor: ctor,
@@ -36018,9 +35074,9 @@ var require_ponyfill_es2018 = __commonJS({
36018
35074
  function ReadableByteStreamControllerGetBYOBRequest(controller) {
36019
35075
  if (controller._byobRequest === null && controller._pendingPullIntos.length > 0) {
36020
35076
  const firstDescriptor = controller._pendingPullIntos.peek();
36021
- const view2 = new Uint8Array(firstDescriptor.buffer, firstDescriptor.byteOffset + firstDescriptor.bytesFilled, firstDescriptor.byteLength - firstDescriptor.bytesFilled);
35077
+ const view = new Uint8Array(firstDescriptor.buffer, firstDescriptor.byteOffset + firstDescriptor.bytesFilled, firstDescriptor.byteLength - firstDescriptor.bytesFilled);
36022
35078
  const byobRequest = Object.create(ReadableStreamBYOBRequest.prototype);
36023
- SetUpReadableStreamBYOBRequest(byobRequest, controller, view2);
35079
+ SetUpReadableStreamBYOBRequest(byobRequest, controller, view);
36024
35080
  controller._byobRequest = byobRequest;
36025
35081
  }
36026
35082
  return controller._byobRequest;
@@ -36053,29 +35109,29 @@ var require_ponyfill_es2018 = __commonJS({
36053
35109
  firstDescriptor.buffer = TransferArrayBuffer(firstDescriptor.buffer);
36054
35110
  ReadableByteStreamControllerRespondInternal(controller, bytesWritten);
36055
35111
  }
36056
- function ReadableByteStreamControllerRespondWithNewView(controller, view2) {
35112
+ function ReadableByteStreamControllerRespondWithNewView(controller, view) {
36057
35113
  const firstDescriptor = controller._pendingPullIntos.peek();
36058
35114
  const state = controller._controlledReadableByteStream._state;
36059
35115
  if (state === "closed") {
36060
- if (view2.byteLength !== 0) {
35116
+ if (view.byteLength !== 0) {
36061
35117
  throw new TypeError("The view's length must be 0 when calling respondWithNewView() on a closed stream");
36062
35118
  }
36063
35119
  } else {
36064
- if (view2.byteLength === 0) {
35120
+ if (view.byteLength === 0) {
36065
35121
  throw new TypeError("The view's length must be greater than 0 when calling respondWithNewView() on a readable stream");
36066
35122
  }
36067
35123
  }
36068
- if (firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== view2.byteOffset) {
35124
+ if (firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== view.byteOffset) {
36069
35125
  throw new RangeError("The region specified by view does not match byobRequest");
36070
35126
  }
36071
- if (firstDescriptor.bufferByteLength !== view2.buffer.byteLength) {
35127
+ if (firstDescriptor.bufferByteLength !== view.buffer.byteLength) {
36072
35128
  throw new RangeError("The buffer of view has different capacity than byobRequest");
36073
35129
  }
36074
- if (firstDescriptor.bytesFilled + view2.byteLength > firstDescriptor.byteLength) {
35130
+ if (firstDescriptor.bytesFilled + view.byteLength > firstDescriptor.byteLength) {
36075
35131
  throw new RangeError("The region specified by view is larger than byobRequest");
36076
35132
  }
36077
- const viewByteLength = view2.byteLength;
36078
- firstDescriptor.buffer = TransferArrayBuffer(view2.buffer);
35133
+ const viewByteLength = view.byteLength;
35134
+ firstDescriptor.buffer = TransferArrayBuffer(view.buffer);
36079
35135
  ReadableByteStreamControllerRespondInternal(controller, viewByteLength);
36080
35136
  }
36081
35137
  function SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize) {
@@ -36121,9 +35177,9 @@ var require_ponyfill_es2018 = __commonJS({
36121
35177
  }
36122
35178
  SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize);
36123
35179
  }
36124
- function SetUpReadableStreamBYOBRequest(request, controller, view2) {
35180
+ function SetUpReadableStreamBYOBRequest(request, controller, view) {
36125
35181
  request._associatedReadableByteStreamController = controller;
36126
- request._view = view2;
35182
+ request._view = view;
36127
35183
  }
36128
35184
  function byobRequestBrandCheckException(name) {
36129
35185
  return new TypeError(`ReadableStreamBYOBRequest.prototype.${name} can only be used on a ReadableStreamBYOBRequest`);
@@ -36199,20 +35255,20 @@ var require_ponyfill_es2018 = __commonJS({
36199
35255
  *
36200
35256
  * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source.
36201
35257
  */
36202
- read(view2) {
35258
+ read(view) {
36203
35259
  if (!IsReadableStreamBYOBReader(this)) {
36204
35260
  return promiseRejectedWith(byobReaderBrandCheckException("read"));
36205
35261
  }
36206
- if (!ArrayBuffer.isView(view2)) {
35262
+ if (!ArrayBuffer.isView(view)) {
36207
35263
  return promiseRejectedWith(new TypeError("view must be an array buffer view"));
36208
35264
  }
36209
- if (view2.byteLength === 0) {
35265
+ if (view.byteLength === 0) {
36210
35266
  return promiseRejectedWith(new TypeError("view must have non-zero byteLength"));
36211
35267
  }
36212
- if (view2.buffer.byteLength === 0) {
35268
+ if (view.buffer.byteLength === 0) {
36213
35269
  return promiseRejectedWith(new TypeError(`view's buffer must have non-zero byteLength`));
36214
35270
  }
36215
- if (IsDetachedBuffer(view2.buffer))
35271
+ if (IsDetachedBuffer(view.buffer))
36216
35272
  ;
36217
35273
  if (this._ownerReadableStream === void 0) {
36218
35274
  return promiseRejectedWith(readerLockException("read from"));
@@ -36228,7 +35284,7 @@ var require_ponyfill_es2018 = __commonJS({
36228
35284
  _closeSteps: (chunk) => resolvePromise({ value: chunk, done: true }),
36229
35285
  _errorSteps: (e2) => rejectPromise(e2)
36230
35286
  };
36231
- ReadableStreamBYOBReaderRead(this, view2, readIntoRequest);
35287
+ ReadableStreamBYOBReaderRead(this, view, readIntoRequest);
36232
35288
  return promise;
36233
35289
  }
36234
35290
  /**
@@ -36274,13 +35330,13 @@ var require_ponyfill_es2018 = __commonJS({
36274
35330
  }
36275
35331
  return x2 instanceof ReadableStreamBYOBReader;
36276
35332
  }
36277
- function ReadableStreamBYOBReaderRead(reader, view2, readIntoRequest) {
35333
+ function ReadableStreamBYOBReaderRead(reader, view, readIntoRequest) {
36278
35334
  const stream = reader._ownerReadableStream;
36279
35335
  stream._disturbed = true;
36280
35336
  if (stream._state === "errored") {
36281
35337
  readIntoRequest._errorSteps(stream._storedError);
36282
35338
  } else {
36283
- ReadableByteStreamControllerPullInto(stream._readableStreamController, view2, readIntoRequest);
35339
+ ReadableByteStreamControllerPullInto(stream._readableStreamController, view, readIntoRequest);
36284
35340
  }
36285
35341
  }
36286
35342
  function byobReaderBrandCheckException(name) {
@@ -37829,7 +36885,7 @@ var require_ponyfill_es2018 = __commonJS({
37829
36885
  };
37830
36886
  ReadableStreamDefaultReaderRead(reader, readRequest);
37831
36887
  }
37832
- function pullWithBYOBReader(view2, forBranch2) {
36888
+ function pullWithBYOBReader(view, forBranch2) {
37833
36889
  if (IsReadableStreamDefaultReader(reader)) {
37834
36890
  ReadableStreamReaderGenericRelease(reader);
37835
36891
  reader = AcquireReadableStreamBYOBReader(stream);
@@ -37895,7 +36951,7 @@ var require_ponyfill_es2018 = __commonJS({
37895
36951
  reading = false;
37896
36952
  }
37897
36953
  };
37898
- ReadableStreamBYOBReaderRead(reader, view2, readIntoRequest);
36954
+ ReadableStreamBYOBReaderRead(reader, view, readIntoRequest);
37899
36955
  }
37900
36956
  function pull1Algorithm() {
37901
36957
  if (reading) {
@@ -52754,15 +51810,15 @@ var require_dist_cjs36 = __commonJS({
52754
51810
  throw new Error("Int64 buffers must be exactly 8 bytes");
52755
51811
  }
52756
51812
  }
52757
- static fromNumber(number2) {
52758
- if (number2 > 9223372036854776e3 || number2 < -9223372036854776e3) {
52759
- throw new Error(`${number2} is too large (or, if negative, too small) to represent as an Int64`);
51813
+ static fromNumber(number3) {
51814
+ if (number3 > 9223372036854776e3 || number3 < -9223372036854776e3) {
51815
+ throw new Error(`${number3} is too large (or, if negative, too small) to represent as an Int64`);
52760
51816
  }
52761
51817
  const bytes = new Uint8Array(8);
52762
- for (let i2 = 7, remaining = Math.abs(Math.round(number2)); i2 > -1 && remaining > 0; i2--, remaining /= 256) {
51818
+ for (let i2 = 7, remaining = Math.abs(Math.round(number3)); i2 > -1 && remaining > 0; i2--, remaining /= 256) {
52763
51819
  bytes[i2] = remaining;
52764
51820
  }
52765
- if (number2 < 0) {
51821
+ if (number3 < 0) {
52766
51822
  negate(bytes);
52767
51823
  }
52768
51824
  return new _Int642(bytes);
@@ -57264,9 +56320,9 @@ var require_dist_cjs46 = __commonJS({
57264
56320
  }
57265
56321
  });
57266
56322
 
57267
- // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/auth/httpAuthSchemeProvider.js
56323
+ // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/auth/httpAuthSchemeProvider.js
57268
56324
  var require_httpAuthSchemeProvider3 = __commonJS({
57269
- "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/auth/httpAuthSchemeProvider.js"(exports2) {
56325
+ "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/auth/httpAuthSchemeProvider.js"(exports2) {
57270
56326
  "use strict";
57271
56327
  Object.defineProperty(exports2, "__esModule", { value: true });
57272
56328
  exports2.resolveHttpAuthSchemeConfig = exports2.defaultSSOOIDCHttpAuthSchemeProvider = exports2.defaultSSOOIDCHttpAuthSchemeParametersProvider = void 0;
@@ -57333,9 +56389,9 @@ var require_httpAuthSchemeProvider3 = __commonJS({
57333
56389
  }
57334
56390
  });
57335
56391
 
57336
- // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/package.json
56392
+ // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/package.json
57337
56393
  var require_package4 = __commonJS({
57338
- "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/package.json"(exports2, module2) {
56394
+ "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/package.json"(exports2, module2) {
57339
56395
  module2.exports = {
57340
56396
  name: "@aws-sdk/client-sso-oidc",
57341
56397
  description: "AWS SDK for JavaScript Sso Oidc Client for Node.js, Browser and React Native",
@@ -57439,9 +56495,9 @@ var require_package4 = __commonJS({
57439
56495
  }
57440
56496
  });
57441
56497
 
57442
- // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/endpoint/ruleset.js
56498
+ // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/endpoint/ruleset.js
57443
56499
  var require_ruleset2 = __commonJS({
57444
- "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/endpoint/ruleset.js"(exports2) {
56500
+ "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/endpoint/ruleset.js"(exports2) {
57445
56501
  "use strict";
57446
56502
  Object.defineProperty(exports2, "__esModule", { value: true });
57447
56503
  exports2.ruleSet = void 0;
@@ -57474,9 +56530,9 @@ var require_ruleset2 = __commonJS({
57474
56530
  }
57475
56531
  });
57476
56532
 
57477
- // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/endpoint/endpointResolver.js
56533
+ // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/endpoint/endpointResolver.js
57478
56534
  var require_endpointResolver2 = __commonJS({
57479
- "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/endpoint/endpointResolver.js"(exports2) {
56535
+ "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/endpoint/endpointResolver.js"(exports2) {
57480
56536
  "use strict";
57481
56537
  Object.defineProperty(exports2, "__esModule", { value: true });
57482
56538
  exports2.defaultEndpointResolver = void 0;
@@ -57494,9 +56550,9 @@ var require_endpointResolver2 = __commonJS({
57494
56550
  }
57495
56551
  });
57496
56552
 
57497
- // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/runtimeConfig.shared.js
56553
+ // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/runtimeConfig.shared.js
57498
56554
  var require_runtimeConfig_shared2 = __commonJS({
57499
- "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/runtimeConfig.shared.js"(exports2) {
56555
+ "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/runtimeConfig.shared.js"(exports2) {
57500
56556
  "use strict";
57501
56557
  Object.defineProperty(exports2, "__esModule", { value: true });
57502
56558
  exports2.getRuntimeConfig = void 0;
@@ -57540,9 +56596,9 @@ var require_runtimeConfig_shared2 = __commonJS({
57540
56596
  }
57541
56597
  });
57542
56598
 
57543
- // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/runtimeConfig.js
56599
+ // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/runtimeConfig.js
57544
56600
  var require_runtimeConfig2 = __commonJS({
57545
- "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/runtimeConfig.js"(exports2) {
56601
+ "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/runtimeConfig.js"(exports2) {
57546
56602
  "use strict";
57547
56603
  Object.defineProperty(exports2, "__esModule", { value: true });
57548
56604
  exports2.getRuntimeConfig = void 0;
@@ -57593,9 +56649,9 @@ var require_runtimeConfig2 = __commonJS({
57593
56649
  }
57594
56650
  });
57595
56651
 
57596
- // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/index.js
56652
+ // ../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/index.js
57597
56653
  var require_dist_cjs47 = __commonJS({
57598
- "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/index.js"(exports2, module2) {
56654
+ "../node_modules/.pnpm/@aws-sdk+client-sso-oidc@3.583.0_@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sso-oidc/dist-cjs/index.js"(exports2, module2) {
57599
56655
  "use strict";
57600
56656
  var __defProp3 = Object.defineProperty;
57601
56657
  var __getOwnPropDesc3 = Object.getOwnPropertyDescriptor;
@@ -58941,9 +57997,9 @@ Reference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.ht
58941
57997
  }
58942
57998
  });
58943
57999
 
58944
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/auth/httpAuthSchemeProvider.js
58000
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/auth/httpAuthSchemeProvider.js
58945
58001
  var require_httpAuthSchemeProvider4 = __commonJS({
58946
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/auth/httpAuthSchemeProvider.js"(exports2) {
58002
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/auth/httpAuthSchemeProvider.js"(exports2) {
58947
58003
  "use strict";
58948
58004
  Object.defineProperty(exports2, "__esModule", { value: true });
58949
58005
  exports2.resolveHttpAuthSchemeConfig = exports2.resolveStsAuthConfig = exports2.defaultSTSHttpAuthSchemeProvider = exports2.defaultSTSHttpAuthSchemeParametersProvider = void 0;
@@ -59013,9 +58069,9 @@ var require_httpAuthSchemeProvider4 = __commonJS({
59013
58069
  }
59014
58070
  });
59015
58071
 
59016
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/EndpointParameters.js
58072
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/EndpointParameters.js
59017
58073
  var require_EndpointParameters = __commonJS({
59018
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/EndpointParameters.js"(exports2) {
58074
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/EndpointParameters.js"(exports2) {
59019
58075
  "use strict";
59020
58076
  Object.defineProperty(exports2, "__esModule", { value: true });
59021
58077
  exports2.commonParams = exports2.resolveClientEndpointParameters = void 0;
@@ -59039,9 +58095,9 @@ var require_EndpointParameters = __commonJS({
59039
58095
  }
59040
58096
  });
59041
58097
 
59042
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/package.json
58098
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/package.json
59043
58099
  var require_package5 = __commonJS({
59044
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/package.json"(exports2, module2) {
58100
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/package.json"(exports2, module2) {
59045
58101
  module2.exports = {
59046
58102
  name: "@aws-sdk/client-sts",
59047
58103
  description: "AWS SDK for JavaScript Sts Client for Node.js, Browser and React Native",
@@ -59147,9 +58203,9 @@ var require_package5 = __commonJS({
59147
58203
  }
59148
58204
  });
59149
58205
 
59150
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/ruleset.js
58206
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/ruleset.js
59151
58207
  var require_ruleset3 = __commonJS({
59152
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/ruleset.js"(exports2) {
58208
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/ruleset.js"(exports2) {
59153
58209
  "use strict";
59154
58210
  Object.defineProperty(exports2, "__esModule", { value: true });
59155
58211
  exports2.ruleSet = void 0;
@@ -59194,9 +58250,9 @@ var require_ruleset3 = __commonJS({
59194
58250
  }
59195
58251
  });
59196
58252
 
59197
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/endpointResolver.js
58253
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/endpointResolver.js
59198
58254
  var require_endpointResolver3 = __commonJS({
59199
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/endpointResolver.js"(exports2) {
58255
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/endpoint/endpointResolver.js"(exports2) {
59200
58256
  "use strict";
59201
58257
  Object.defineProperty(exports2, "__esModule", { value: true });
59202
58258
  exports2.defaultEndpointResolver = void 0;
@@ -59214,9 +58270,9 @@ var require_endpointResolver3 = __commonJS({
59214
58270
  }
59215
58271
  });
59216
58272
 
59217
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeConfig.shared.js
58273
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeConfig.shared.js
59218
58274
  var require_runtimeConfig_shared3 = __commonJS({
59219
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeConfig.shared.js"(exports2) {
58275
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeConfig.shared.js"(exports2) {
59220
58276
  "use strict";
59221
58277
  Object.defineProperty(exports2, "__esModule", { value: true });
59222
58278
  exports2.getRuntimeConfig = void 0;
@@ -59260,9 +58316,9 @@ var require_runtimeConfig_shared3 = __commonJS({
59260
58316
  }
59261
58317
  });
59262
58318
 
59263
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeConfig.js
58319
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeConfig.js
59264
58320
  var require_runtimeConfig3 = __commonJS({
59265
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeConfig.js"(exports2) {
58321
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeConfig.js"(exports2) {
59266
58322
  "use strict";
59267
58323
  Object.defineProperty(exports2, "__esModule", { value: true });
59268
58324
  exports2.getRuntimeConfig = void 0;
@@ -59326,9 +58382,9 @@ var require_runtimeConfig3 = __commonJS({
59326
58382
  }
59327
58383
  });
59328
58384
 
59329
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/auth/httpAuthExtensionConfiguration.js
58385
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/auth/httpAuthExtensionConfiguration.js
59330
58386
  var require_httpAuthExtensionConfiguration = __commonJS({
59331
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/auth/httpAuthExtensionConfiguration.js"(exports2) {
58387
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/auth/httpAuthExtensionConfiguration.js"(exports2) {
59332
58388
  "use strict";
59333
58389
  Object.defineProperty(exports2, "__esModule", { value: true });
59334
58390
  exports2.resolveHttpAuthRuntimeConfig = exports2.getHttpAuthExtensionConfiguration = void 0;
@@ -59374,9 +58430,9 @@ var require_httpAuthExtensionConfiguration = __commonJS({
59374
58430
  }
59375
58431
  });
59376
58432
 
59377
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeExtensions.js
58433
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeExtensions.js
59378
58434
  var require_runtimeExtensions = __commonJS({
59379
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeExtensions.js"(exports2) {
58435
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/runtimeExtensions.js"(exports2) {
59380
58436
  "use strict";
59381
58437
  Object.defineProperty(exports2, "__esModule", { value: true });
59382
58438
  exports2.resolveRuntimeExtensions = void 0;
@@ -59405,9 +58461,9 @@ var require_runtimeExtensions = __commonJS({
59405
58461
  }
59406
58462
  });
59407
58463
 
59408
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/STSClient.js
58464
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/STSClient.js
59409
58465
  var require_STSClient = __commonJS({
59410
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/STSClient.js"(exports2) {
58466
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/STSClient.js"(exports2) {
59411
58467
  "use strict";
59412
58468
  Object.defineProperty(exports2, "__esModule", { value: true });
59413
58469
  exports2.STSClient = exports2.__Client = void 0;
@@ -59469,9 +58525,9 @@ var require_STSClient = __commonJS({
59469
58525
  }
59470
58526
  });
59471
58527
 
59472
- // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/index.js
58528
+ // ../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/index.js
59473
58529
  var require_dist_cjs50 = __commonJS({
59474
- "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0_@aws-sdk+client-sso-oidc@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/index.js"(exports2, module2) {
58530
+ "../node_modules/.pnpm/@aws-sdk+client-sts@3.583.0/node_modules/@aws-sdk/client-sts/dist-cjs/index.js"(exports2, module2) {
59475
58531
  "use strict";
59476
58532
  var __defProp3 = Object.defineProperty;
59477
58533
  var __getOwnPropDesc3 = Object.getOwnPropertyDescriptor;
@@ -63629,7 +62685,7 @@ var init_studio2 = __esm({
63629
62685
  });
63630
62686
  };
63631
62687
  prepareServer = async ({
63632
- dialect: dialect7,
62688
+ dialect: dialect4,
63633
62689
  driver: driver2,
63634
62690
  proxy,
63635
62691
  customDefaults,
@@ -63679,7 +62735,7 @@ var init_studio2 = __esm({
63679
62735
  }));
63680
62736
  return c.json({
63681
62737
  version: "6",
63682
- dialect: dialect7,
62738
+ dialect: dialect4,
63683
62739
  driver: driver2,
63684
62740
  schemaFiles,
63685
62741
  customDefaults: preparedDefaults,
@@ -64193,16 +63249,16 @@ var require_map_stream = __commonJS({
64193
63249
  var writeQueue = {};
64194
63250
  stream.writable = true;
64195
63251
  stream.readable = true;
64196
- function queueData(data, number2) {
63252
+ function queueData(data, number3) {
64197
63253
  var nextToWrite = lastWritten + 1;
64198
- if (number2 === nextToWrite) {
63254
+ if (number3 === nextToWrite) {
64199
63255
  if (data !== void 0) {
64200
63256
  stream.emit.apply(stream, ["data", data]);
64201
63257
  }
64202
63258
  lastWritten++;
64203
63259
  nextToWrite++;
64204
63260
  } else {
64205
- writeQueue[number2] = data;
63261
+ writeQueue[number3] = data;
64206
63262
  }
64207
63263
  if (writeQueue.hasOwnProperty(nextToWrite)) {
64208
63264
  var dataToWrite = writeQueue[nextToWrite];
@@ -64217,21 +63273,21 @@ var require_map_stream = __commonJS({
64217
63273
  end();
64218
63274
  }
64219
63275
  }
64220
- function next(err2, data, number2) {
63276
+ function next(err2, data, number3) {
64221
63277
  if (destroyed)
64222
63278
  return;
64223
63279
  inNext = true;
64224
63280
  if (!err2 || self2.opts.failures) {
64225
- queueData(data, number2);
63281
+ queueData(data, number3);
64226
63282
  }
64227
63283
  if (err2) {
64228
63284
  stream.emit.apply(stream, [errorEventName, err2]);
64229
63285
  }
64230
63286
  inNext = false;
64231
63287
  }
64232
- function wrappedMapper(input, number2, callback) {
63288
+ function wrappedMapper(input, number3, callback) {
64233
63289
  return mapper.call(null, input, function(err2, data) {
64234
- callback(err2, data, number2);
63290
+ callback(err2, data, number3);
64235
63291
  });
64236
63292
  }
64237
63293
  stream.write = function(data) {
@@ -78063,7 +77119,7 @@ var init_selector_ui = __esm({
78063
77119
  });
78064
77120
 
78065
77121
  // src/cli/commands/libSqlPushUtils.ts
78066
- var getOldTableName2, _moveDataStatements2, libSqlLogSuggestionsAndReturn;
77122
+ var getOldTableName3, _moveDataStatements2, libSqlLogSuggestionsAndReturn;
78067
77123
  var init_libSqlPushUtils = __esm({
78068
77124
  "src/cli/commands/libSqlPushUtils.ts"() {
78069
77125
  "use strict";
@@ -78071,7 +77127,7 @@ var init_libSqlPushUtils = __esm({
78071
77127
  init_utils();
78072
77128
  init_sqliteSchema();
78073
77129
  init_sqlgenerator();
78074
- getOldTableName2 = (tableName, meta) => {
77130
+ getOldTableName3 = (tableName, meta) => {
78075
77131
  for (const key of Object.keys(meta.tables)) {
78076
77132
  const value = meta.tables[key];
78077
77133
  if (`"${tableName}"` === value) {
@@ -78226,7 +77282,7 @@ var init_libSqlPushUtils = __esm({
78226
77282
  } else if (statement.type === "recreate_table") {
78227
77283
  const tableName = statement.tableName;
78228
77284
  let dataLoss = false;
78229
- const oldTableName = getOldTableName2(tableName, meta);
77285
+ const oldTableName = getOldTableName3(tableName, meta);
78230
77286
  const prevColumnNames = Object.keys(json1.tables[oldTableName].columns);
78231
77287
  const currentColumnNames = Object.keys(json2.tables[tableName].columns);
78232
77288
  const { removedColumns, addedColumns } = findAddedAndRemoved(
@@ -79986,11 +79042,11 @@ var init_pgIntrospect = __esm({
79986
79042
  });
79987
79043
 
79988
79044
  // src/introspect-sqlite.ts
79989
- var import_casing, sqliteImportsList, indexName3, objToStatement2, relations, escapeColumnKey, withCasing, dbColumnName, schemaToTypeScript, isCyclic, isSelf, mapColumnDefault, column4, createTableColumns, createTableIndexes, createTableUniques, createTablePKs, createTableFKs;
79045
+ var import_casing3, sqliteImportsList, indexName3, objToStatement2, relations, escapeColumnKey, withCasing, dbColumnName, schemaToTypeScript, isCyclic, isSelf, mapColumnDefault, column4, createTableColumns, createTableIndexes, createTableUniques, createTablePKs, createTableFKs;
79990
79046
  var init_introspect_sqlite = __esm({
79991
79047
  "src/introspect-sqlite.ts"() {
79992
79048
  "use strict";
79993
- import_casing = require("drizzle-orm/casing");
79049
+ import_casing3 = require("drizzle-orm/casing");
79994
79050
  init_utils3();
79995
79051
  init_global();
79996
79052
  sqliteImportsList = /* @__PURE__ */ new Set([
@@ -80035,7 +79091,7 @@ var init_introspect_sqlite = __esm({
80035
79091
  return "";
80036
79092
  }
80037
79093
  if (casing2 === "camel") {
80038
- return (0, import_casing.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
79094
+ return (0, import_casing3.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
80039
79095
  }
80040
79096
  assertUnreachable(casing2);
80041
79097
  };
@@ -80410,13 +79466,13 @@ var init_push = __esm({
80410
79466
  init_mysqlPushUtils();
80411
79467
  init_pgPushUtils();
80412
79468
  init_sqlitePushUtils();
80413
- mysqlPush = async (schemaPath, credentials2, tablesFilter, strict, verbose, force) => {
79469
+ mysqlPush = async (schemaPath, credentials2, tablesFilter, strict, verbose, force, casing2) => {
80414
79470
  const { connectToMySQL: connectToMySQL2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
80415
79471
  const { mysqlPushIntrospect: mysqlPushIntrospect2 } = await Promise.resolve().then(() => (init_mysqlIntrospect(), mysqlIntrospect_exports));
80416
79472
  const { db, database } = await connectToMySQL2(credentials2);
80417
79473
  const { schema: schema5 } = await mysqlPushIntrospect2(db, database, tablesFilter);
80418
79474
  const { prepareMySQLPush: prepareMySQLPush2 } = await Promise.resolve().then(() => (init_migrate(), migrate_exports));
80419
- const statements = await prepareMySQLPush2(schemaPath, schema5);
79475
+ const statements = await prepareMySQLPush2(schemaPath, schema5, casing2);
80420
79476
  const filteredStatements = filterStatements(
80421
79477
  statements.statements ?? [],
80422
79478
  statements.validatedCur,
@@ -80511,13 +79567,13 @@ var init_push = __esm({
80511
79567
  console.log(e2);
80512
79568
  }
80513
79569
  };
80514
- pgPush = async (schemaPath, verbose, strict, credentials2, tablesFilter, schemasFilter, force) => {
79570
+ pgPush = async (schemaPath, verbose, strict, credentials2, tablesFilter, schemasFilter, force, casing2) => {
80515
79571
  const { preparePostgresDB: preparePostgresDB2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
80516
79572
  const { pgPushIntrospect: pgPushIntrospect2 } = await Promise.resolve().then(() => (init_pgIntrospect(), pgIntrospect_exports));
80517
79573
  const db = await preparePostgresDB2(credentials2);
80518
79574
  const { schema: schema5 } = await pgPushIntrospect2(db, tablesFilter, schemasFilter);
80519
79575
  const { preparePgPush: preparePgPush2 } = await Promise.resolve().then(() => (init_migrate(), migrate_exports));
80520
- const statements = await preparePgPush2(schemaPath, schema5, schemasFilter);
79576
+ const statements = await preparePgPush2(schemaPath, schema5, schemasFilter, casing2);
80521
79577
  try {
80522
79578
  if (statements.sqlStatements.length === 0) {
80523
79579
  (0, import_hanji11.render)(`[${source_default.blue("i")}] No changes detected`);
@@ -80585,13 +79641,13 @@ var init_push = __esm({
80585
79641
  console.error(e2);
80586
79642
  }
80587
79643
  };
80588
- sqlitePush = async (schemaPath, verbose, strict, credentials2, tablesFilter, force) => {
79644
+ sqlitePush = async (schemaPath, verbose, strict, credentials2, tablesFilter, force, casing2) => {
80589
79645
  const { connectToSQLite: connectToSQLite2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
80590
79646
  const { sqlitePushIntrospect: sqlitePushIntrospect2 } = await Promise.resolve().then(() => (init_sqliteIntrospect(), sqliteIntrospect_exports));
80591
79647
  const db = await connectToSQLite2(credentials2);
80592
79648
  const { schema: schema5 } = await sqlitePushIntrospect2(db, tablesFilter);
80593
79649
  const { prepareSQLitePush: prepareSQLitePush2 } = await Promise.resolve().then(() => (init_migrate(), migrate_exports));
80594
- const statements = await prepareSQLitePush2(schemaPath, schema5);
79650
+ const statements = await prepareSQLitePush2(schemaPath, schema5, casing2);
80595
79651
  if (statements.sqlStatements.length === 0) {
80596
79652
  (0, import_hanji11.render)(`
80597
79653
  [${source_default.blue("i")}] No changes detected`);
@@ -80673,13 +79729,13 @@ var init_push = __esm({
80673
79729
  }
80674
79730
  }
80675
79731
  };
80676
- libSQLPush = async (schemaPath, verbose, strict, credentials2, tablesFilter, force) => {
79732
+ libSQLPush = async (schemaPath, verbose, strict, credentials2, tablesFilter, force, casing2) => {
80677
79733
  const { connectToLibSQL: connectToLibSQL2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
80678
79734
  const { sqlitePushIntrospect: sqlitePushIntrospect2 } = await Promise.resolve().then(() => (init_sqliteIntrospect(), sqliteIntrospect_exports));
80679
79735
  const db = await connectToLibSQL2(credentials2);
80680
79736
  const { schema: schema5 } = await sqlitePushIntrospect2(db, tablesFilter);
80681
79737
  const { prepareLibSQLPush: prepareLibSQLPush2 } = await Promise.resolve().then(() => (init_migrate(), migrate_exports));
80682
- const statements = await prepareLibSQLPush2(schemaPath, schema5);
79738
+ const statements = await prepareLibSQLPush2(schemaPath, schema5, casing2);
80683
79739
  if (statements.sqlStatements.length === 0) {
80684
79740
  (0, import_hanji11.render)(`
80685
79741
  [${source_default.blue("i")}] No changes detected`);
@@ -81109,11 +80165,11 @@ var require_pluralize = __commonJS({
81109
80165
  });
81110
80166
 
81111
80167
  // src/introspect-mysql.ts
81112
- var import_casing2, mysqlImportsList, objToStatement22, timeConfig, binaryConfig, importsPatch, relations2, escapeColumnKey2, prepareCasing, dbColumnName2, schemaToTypeScript2, isCyclic2, isSelf2, mapColumnDefault2, mapColumnDefaultForJson, column5, createTableColumns2, createTableIndexes2, createTableUniques2, createTablePKs2, createTableFKs2;
80168
+ var import_casing4, mysqlImportsList, objToStatement22, timeConfig, binaryConfig, importsPatch, relations2, escapeColumnKey2, prepareCasing, dbColumnName2, schemaToTypeScript2, isCyclic2, isSelf2, mapColumnDefault2, mapColumnDefaultForJson, column5, createTableColumns2, createTableIndexes2, createTableUniques2, createTablePKs2, createTableFKs2;
81113
80169
  var init_introspect_mysql = __esm({
81114
80170
  "src/introspect-mysql.ts"() {
81115
80171
  "use strict";
81116
- import_casing2 = require("drizzle-orm/casing");
80172
+ import_casing4 = require("drizzle-orm/casing");
81117
80173
  init_utils3();
81118
80174
  init_global();
81119
80175
  init_mysqlSerializer();
@@ -81202,7 +80258,7 @@ var init_introspect_mysql = __esm({
81202
80258
  return "";
81203
80259
  }
81204
80260
  if (casing2 === "camel") {
81205
- return (0, import_casing2.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
80261
+ return (0, import_casing4.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
81206
80262
  }
81207
80263
  assertUnreachable(casing2);
81208
80264
  };
@@ -81700,14 +80756,14 @@ function generateIdentityParams(identity) {
81700
80756
  }
81701
80757
  return `.generatedByDefaultAsIdentity(${paramsObj})`;
81702
80758
  }
81703
- var import_drizzle_orm9, import_relations, import_casing3, pgImportsList, timeConfig2, possibleIntervals, intervalStrToObj, intervalConfig, mapColumnDefault3, importsPatch2, relations3, escapeColumnKey3, withCasing2, dbColumnName3, paramNameFor, schemaToTypeScript3, isCyclic3, isSelf3, buildArrayDefault, mapDefault, column6, dimensionsInArray, createTableColumns3, createTableIndexes3, createTablePKs3, createTableUniques3, createTableFKs3;
80759
+ var import_drizzle_orm9, import_relations, import_casing5, pgImportsList, timeConfig2, possibleIntervals, intervalStrToObj, intervalConfig, mapColumnDefault3, importsPatch2, relations3, escapeColumnKey3, withCasing2, dbColumnName3, paramNameFor, schemaToTypeScript3, isCyclic3, isSelf3, buildArrayDefault, mapDefault, column6, dimensionsInArray, createTableColumns3, createTableIndexes3, createTablePKs3, createTableUniques3, createTableFKs3;
81704
80760
  var init_introspect_pg = __esm({
81705
80761
  "src/introspect-pg.ts"() {
81706
80762
  "use strict";
81707
80763
  import_drizzle_orm9 = require("drizzle-orm");
81708
80764
  import_relations = require("drizzle-orm/relations");
81709
80765
  init_utils3();
81710
- import_casing3 = require("drizzle-orm/casing");
80766
+ import_casing5 = require("drizzle-orm/casing");
81711
80767
  init_vector();
81712
80768
  init_global();
81713
80769
  init_pgSerializer();
@@ -81838,7 +80894,7 @@ var init_introspect_pg = __esm({
81838
80894
  return "";
81839
80895
  }
81840
80896
  if (casing2 === "camel") {
81841
- return (0, import_casing3.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
80897
+ return (0, import_casing5.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
81842
80898
  }
81843
80899
  assertUnreachable(casing2);
81844
80900
  };
@@ -81904,30 +80960,6 @@ var init_introspect_pg = __esm({
81904
80960
  },
81905
80961
  { pg: [] }
81906
80962
  );
81907
- Object.values(schema5.views).forEach((it) => {
81908
- if (it.schema && it.schema !== "public" && it.schema !== "") {
81909
- imports.pg.push("pgSchema");
81910
- } else if (it.schema === "public") {
81911
- it.materialized ? imports.pg.push("pgMaterializedView") : imports.pg.push("pgView");
81912
- }
81913
- Object.values(it.columns).forEach(() => {
81914
- const columnImports = Object.values(it.columns).map((col) => {
81915
- let patched = (importsPatch2[col.type] || col.type).replace("[]", "");
81916
- patched = patched === "double precision" ? "doublePrecision" : patched;
81917
- patched = patched.startsWith("varchar(") ? "varchar" : patched;
81918
- patched = patched.startsWith("char(") ? "char" : patched;
81919
- patched = patched.startsWith("numeric(") ? "numeric" : patched;
81920
- patched = patched.startsWith("time(") ? "time" : patched;
81921
- patched = patched.startsWith("timestamp(") ? "timestamp" : patched;
81922
- patched = patched.startsWith("vector(") ? "vector" : patched;
81923
- patched = patched.startsWith("geometry(") ? "geometry" : patched;
81924
- return patched;
81925
- }).filter((type) => {
81926
- return pgImportsList.has(type);
81927
- });
81928
- imports.pg.push(...columnImports);
81929
- });
81930
- });
81931
80963
  Object.values(schema5.sequences).forEach((it) => {
81932
80964
  if (it.schema && it.schema !== "public" && it.schema !== "") {
81933
80965
  imports.pg.push("pgSchema");
@@ -82022,28 +81054,6 @@ var init_introspect_pg = __esm({
82022
81054
  statement += ");";
82023
81055
  return statement;
82024
81056
  });
82025
- const viewsStatements = Object.values(schema5.views).map((it) => {
82026
- const viewSchema = schemas[it.schema];
82027
- const paramName = paramNameFor(it.name, viewSchema);
82028
- const func = viewSchema ? it.materialized ? `${viewSchema}.materializedView` : `${viewSchema}.view` : it.materialized ? "pgMaterializedView" : "pgView";
82029
- const withOption = it.with ?? "";
82030
- const as = `sql\`${it.definition}\``;
82031
- const tablespace = it.tablespace ?? "";
82032
- const columns = createTableColumns3(
82033
- "",
82034
- Object.values(it.columns),
82035
- [],
82036
- enumTypes,
82037
- schemas,
82038
- casing2,
82039
- schema5.internal
82040
- );
82041
- let statement = `export const ${withCasing2(paramName, casing2)} = ${func}("${it.name}", {${columns}})`;
82042
- statement += tablespace ? `.tablespace("${tablespace}")` : "";
82043
- statement += withOption ? `.with(${JSON.stringify(withOption)})` : "";
82044
- statement += `.as(${as});`;
82045
- return statement;
82046
- }).join("\n\n");
82047
81057
  const uniquePgImports = ["pgTable", ...new Set(imports.pg)];
82048
81058
  const importsTs = `import { ${uniquePgImports.join(
82049
81059
  ", "
@@ -82056,8 +81066,6 @@ var init_introspect_pg = __esm({
82056
81066
  decalrations += sequencesStatements;
82057
81067
  decalrations += "\n";
82058
81068
  decalrations += tableStatements.join("\n\n");
82059
- decalrations += "\n";
82060
- decalrations += viewsStatements;
82061
81069
  const file = importsTs + decalrations;
82062
81070
  const schemaEntry = `
82063
81071
  {
@@ -82616,7 +81624,6 @@ var init_introspect = __esm({
82616
81624
  sequencesResolver,
82617
81625
  tablesResolver,
82618
81626
  columnsResolver,
82619
- viewsResolver,
82620
81627
  dryPg,
82621
81628
  schema5
82622
81629
  );
@@ -84472,9 +83479,9 @@ init_source();
84472
83479
 
84473
83480
  // src/cli/commands/check.ts
84474
83481
  init_utils();
84475
- var checkHandler = (out, dialect7) => {
84476
- const { snapshots } = prepareOutFolder(out, dialect7);
84477
- const report = validateWithReport(snapshots, dialect7);
83482
+ var checkHandler = (out, dialect4) => {
83483
+ const { snapshots } = prepareOutFolder(out, dialect4);
83484
+ const report = validateWithReport(snapshots, dialect4);
84478
83485
  if (report.nonLatest.length > 0) {
84479
83486
  console.log(
84480
83487
  report.nonLatest.map((it) => {
@@ -85761,12 +84768,14 @@ var optionBreakpoints = boolean().desc(
85761
84768
  `Prepare SQL statements with breakpoints`
85762
84769
  );
85763
84770
  var optionDriver = string().enum(...drivers).desc("Database driver");
84771
+ var optionCasing = string().enum("camelCase", "snake_case").desc("Casing for serialization");
85764
84772
  var generate = command({
85765
84773
  name: "generate",
85766
84774
  options: {
85767
84775
  config: optionConfig,
85768
84776
  dialect: optionDialect,
85769
84777
  driver: optionDriver,
84778
+ casing: optionCasing,
85770
84779
  schema: string().desc("Path to a schema file or folder"),
85771
84780
  out: optionOut,
85772
84781
  name: string().desc("Migration file name"),
@@ -85779,7 +84788,7 @@ var generate = command({
85779
84788
  "generate",
85780
84789
  opts,
85781
84790
  ["prefix", "name", "custom"],
85782
- ["driver", "breakpoints", "schema", "out", "dialect"]
84791
+ ["driver", "breakpoints", "schema", "out", "dialect", "casing"]
85783
84792
  );
85784
84793
  return prepareGenerateConfig(opts, from);
85785
84794
  },
@@ -85792,17 +84801,17 @@ var generate = command({
85792
84801
  prepareAndMigrateSqlite: prepareAndMigrateSqlite2,
85793
84802
  prepareAndMigrateLibSQL: prepareAndMigrateLibSQL2
85794
84803
  } = await Promise.resolve().then(() => (init_migrate(), migrate_exports));
85795
- const dialect7 = opts.dialect;
85796
- if (dialect7 === "postgresql") {
84804
+ const dialect4 = opts.dialect;
84805
+ if (dialect4 === "postgresql") {
85797
84806
  await prepareAndMigratePg2(opts);
85798
- } else if (dialect7 === "mysql") {
84807
+ } else if (dialect4 === "mysql") {
85799
84808
  await prepareAndMigrateMysql2(opts);
85800
- } else if (dialect7 === "sqlite") {
84809
+ } else if (dialect4 === "sqlite") {
85801
84810
  await prepareAndMigrateSqlite2(opts);
85802
- } else if (dialect7 === "turso") {
84811
+ } else if (dialect4 === "turso") {
85803
84812
  await prepareAndMigrateLibSQL2(opts);
85804
84813
  } else {
85805
- assertUnreachable(dialect7);
84814
+ assertUnreachable(dialect4);
85806
84815
  }
85807
84816
  }
85808
84817
  });
@@ -85817,9 +84826,9 @@ var migrate = command({
85817
84826
  handler: async (opts) => {
85818
84827
  await assertOrmCoreVersion();
85819
84828
  await assertPackages("drizzle-orm");
85820
- const { dialect: dialect7, schema: schema5, table: table4, out, credentials: credentials2 } = opts;
84829
+ const { dialect: dialect4, schema: schema5, table: table4, out, credentials: credentials2 } = opts;
85821
84830
  try {
85822
- if (dialect7 === "postgresql") {
84831
+ if (dialect4 === "postgresql") {
85823
84832
  if ("driver" in credentials2) {
85824
84833
  const { driver: driver2 } = credentials2;
85825
84834
  if (driver2 === "aws-data-api") {
@@ -85850,7 +84859,7 @@ var migrate = command({
85850
84859
  migrationsSchema: schema5
85851
84860
  })
85852
84861
  );
85853
- } else if (dialect7 === "mysql") {
84862
+ } else if (dialect4 === "mysql") {
85854
84863
  const { connectToMySQL: connectToMySQL2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
85855
84864
  const { migrate: migrate2 } = await connectToMySQL2(credentials2);
85856
84865
  await (0, import_hanji13.renderWithTask)(
@@ -85861,7 +84870,7 @@ var migrate = command({
85861
84870
  migrationsSchema: schema5
85862
84871
  })
85863
84872
  );
85864
- } else if (dialect7 === "sqlite") {
84873
+ } else if (dialect4 === "sqlite") {
85865
84874
  const { connectToSQLite: connectToSQLite2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
85866
84875
  const { migrate: migrate2 } = await connectToSQLite2(credentials2);
85867
84876
  await (0, import_hanji13.renderWithTask)(
@@ -85872,7 +84881,7 @@ var migrate = command({
85872
84881
  migrationsSchema: schema5
85873
84882
  })
85874
84883
  );
85875
- } else if (dialect7 === "turso") {
84884
+ } else if (dialect4 === "turso") {
85876
84885
  const { connectToLibSQL: connectToLibSQL2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
85877
84886
  const { migrate: migrate2 } = await connectToLibSQL2(credentials2);
85878
84887
  await (0, import_hanji13.renderWithTask)(
@@ -85884,7 +84893,7 @@ var migrate = command({
85884
84893
  })
85885
84894
  );
85886
84895
  } else {
85887
- assertUnreachable(dialect7);
84896
+ assertUnreachable(dialect4);
85888
84897
  }
85889
84898
  } catch (e2) {
85890
84899
  console.error(e2);
@@ -85918,6 +84927,7 @@ var push = command({
85918
84927
  options: {
85919
84928
  config: optionConfig,
85920
84929
  dialect: optionDialect,
84930
+ casing: optionCasing,
85921
84931
  schema: string().desc("Path to a schema file or folder"),
85922
84932
  ...optionsFilters,
85923
84933
  ...optionsDatabaseCredentials,
@@ -85946,7 +84956,8 @@ var push = command({
85946
84956
  "authToken",
85947
84957
  "schemaFilters",
85948
84958
  "extensionsFilters",
85949
- "tablesFilter"
84959
+ "tablesFilter",
84960
+ "casing"
85950
84961
  ]
85951
84962
  );
85952
84963
  return preparePushConfig(opts, from);
@@ -85955,17 +84966,18 @@ var push = command({
85955
84966
  await assertPackages("drizzle-orm");
85956
84967
  await assertOrmCoreVersion();
85957
84968
  const {
85958
- dialect: dialect7,
84969
+ dialect: dialect4,
85959
84970
  schemaPath,
85960
84971
  strict,
85961
84972
  verbose,
85962
84973
  credentials: credentials2,
85963
84974
  tablesFilter,
85964
84975
  schemasFilter,
85965
- force
84976
+ force,
84977
+ casing: casing2
85966
84978
  } = config;
85967
84979
  try {
85968
- if (dialect7 === "mysql") {
84980
+ if (dialect4 === "mysql") {
85969
84981
  const { mysqlPush: mysqlPush2 } = await Promise.resolve().then(() => (init_push(), push_exports));
85970
84982
  await mysqlPush2(
85971
84983
  schemaPath,
@@ -85973,9 +84985,10 @@ var push = command({
85973
84985
  tablesFilter,
85974
84986
  strict,
85975
84987
  verbose,
85976
- force
84988
+ force,
84989
+ casing2
85977
84990
  );
85978
- } else if (dialect7 === "postgresql") {
84991
+ } else if (dialect4 === "postgresql") {
85979
84992
  if ("driver" in credentials2) {
85980
84993
  const { driver: driver2 } = credentials2;
85981
84994
  if (driver2 === "aws-data-api") {
@@ -86004,9 +85017,10 @@ var push = command({
86004
85017
  credentials2,
86005
85018
  tablesFilter,
86006
85019
  schemasFilter,
86007
- force
85020
+ force,
85021
+ casing2
86008
85022
  );
86009
- } else if (dialect7 === "sqlite") {
85023
+ } else if (dialect4 === "sqlite") {
86010
85024
  const { sqlitePush: sqlitePush2 } = await Promise.resolve().then(() => (init_push(), push_exports));
86011
85025
  await sqlitePush2(
86012
85026
  schemaPath,
@@ -86014,9 +85028,10 @@ var push = command({
86014
85028
  strict,
86015
85029
  credentials2,
86016
85030
  tablesFilter,
86017
- force
85031
+ force,
85032
+ casing2
86018
85033
  );
86019
- } else if (dialect7 === "turso") {
85034
+ } else if (dialect4 === "turso") {
86020
85035
  const { libSQLPush: libSQLPush2 } = await Promise.resolve().then(() => (init_push(), push_exports));
86021
85036
  await libSQLPush2(
86022
85037
  schemaPath,
@@ -86024,10 +85039,11 @@ var push = command({
86024
85039
  strict,
86025
85040
  credentials2,
86026
85041
  tablesFilter,
86027
- force
85042
+ force,
85043
+ casing2
86028
85044
  );
86029
85045
  } else {
86030
- assertUnreachable(dialect7);
85046
+ assertUnreachable(dialect4);
86031
85047
  }
86032
85048
  } catch (e2) {
86033
85049
  console.error(e2);
@@ -86048,8 +85064,8 @@ var check = command({
86048
85064
  },
86049
85065
  handler: async (config) => {
86050
85066
  await assertOrmCoreVersion();
86051
- const { out, dialect: dialect7 } = config;
86052
- checkHandler(out, dialect7);
85067
+ const { out, dialect: dialect4 } = config;
85068
+ checkHandler(out, dialect4);
86053
85069
  console.log("Everything's fine \u{1F436}\u{1F525}");
86054
85070
  }
86055
85071
  });
@@ -86066,15 +85082,15 @@ var up = command({
86066
85082
  },
86067
85083
  handler: async (config) => {
86068
85084
  await assertOrmCoreVersion();
86069
- const { out, dialect: dialect7 } = config;
85085
+ const { out, dialect: dialect4 } = config;
86070
85086
  await assertPackages("drizzle-orm");
86071
- if (dialect7 === "postgresql") {
85087
+ if (dialect4 === "postgresql") {
86072
85088
  upPgHandler(out);
86073
85089
  }
86074
- if (dialect7 === "mysql") {
85090
+ if (dialect4 === "mysql") {
86075
85091
  upMysqlHandler(out);
86076
85092
  }
86077
- if (dialect7 === "sqlite" || dialect7 === "turso") {
85093
+ if (dialect4 === "sqlite" || dialect4 === "turso") {
86078
85094
  upSqliteHandler(out);
86079
85095
  }
86080
85096
  }
@@ -86121,7 +85137,7 @@ var pull = command({
86121
85137
  await assertPackages("drizzle-orm");
86122
85138
  await assertOrmCoreVersion();
86123
85139
  const {
86124
- dialect: dialect7,
85140
+ dialect: dialect4,
86125
85141
  credentials: credentials2,
86126
85142
  out,
86127
85143
  casing: casing2,
@@ -86138,7 +85154,7 @@ var pull = command({
86138
85154
  );
86139
85155
  console.log();
86140
85156
  try {
86141
- if (dialect7 === "postgresql") {
85157
+ if (dialect4 === "postgresql") {
86142
85158
  if ("driver" in credentials2) {
86143
85159
  const { driver: driver2 } = credentials2;
86144
85160
  if (driver2 === "aws-data-api") {
@@ -86169,7 +85185,7 @@ var pull = command({
86169
85185
  schemasFilter,
86170
85186
  prefix2
86171
85187
  );
86172
- } else if (dialect7 === "mysql") {
85188
+ } else if (dialect4 === "mysql") {
86173
85189
  const { introspectMysql: introspectMysql2 } = await Promise.resolve().then(() => (init_introspect(), introspect_exports));
86174
85190
  await introspectMysql2(
86175
85191
  casing2,
@@ -86179,7 +85195,7 @@ var pull = command({
86179
85195
  tablesFilter,
86180
85196
  prefix2
86181
85197
  );
86182
- } else if (dialect7 === "sqlite") {
85198
+ } else if (dialect4 === "sqlite") {
86183
85199
  const { introspectSqlite: introspectSqlite2 } = await Promise.resolve().then(() => (init_introspect(), introspect_exports));
86184
85200
  await introspectSqlite2(
86185
85201
  casing2,
@@ -86189,7 +85205,7 @@ var pull = command({
86189
85205
  tablesFilter,
86190
85206
  prefix2
86191
85207
  );
86192
- } else if (dialect7 === "turso") {
85208
+ } else if (dialect4 === "turso") {
86193
85209
  const { introspectLibSQL: introspectLibSQL2 } = await Promise.resolve().then(() => (init_introspect(), introspect_exports));
86194
85210
  await introspectLibSQL2(
86195
85211
  casing2,
@@ -86200,7 +85216,7 @@ var pull = command({
86200
85216
  prefix2
86201
85217
  );
86202
85218
  } else {
86203
- assertUnreachable(dialect7);
85219
+ assertUnreachable(dialect4);
86204
85220
  }
86205
85221
  } catch (e2) {
86206
85222
  console.error(e2);
@@ -86238,7 +85254,7 @@ var studio = command({
86238
85254
  await assertPackages("drizzle-orm");
86239
85255
  assertStudioNodeVersion();
86240
85256
  const {
86241
- dialect: dialect7,
85257
+ dialect: dialect4,
86242
85258
  schema: schemaPath,
86243
85259
  port,
86244
85260
  host,
@@ -86254,7 +85270,7 @@ var studio = command({
86254
85270
  } = await Promise.resolve().then(() => (init_studio2(), studio_exports));
86255
85271
  let setup;
86256
85272
  try {
86257
- if (dialect7 === "postgresql") {
85273
+ if (dialect4 === "postgresql") {
86258
85274
  if ("driver" in credentials2) {
86259
85275
  const { driver: driver2 } = credentials2;
86260
85276
  if (driver2 === "aws-data-api") {
@@ -86277,17 +85293,17 @@ var studio = command({
86277
85293
  }
86278
85294
  const { schema: schema5, relations: relations4, files } = schemaPath ? await preparePgSchema2(schemaPath) : { schema: {}, relations: {}, files: [] };
86279
85295
  setup = await drizzleForPostgres2(credentials2, schema5, relations4, files);
86280
- } else if (dialect7 === "mysql") {
85296
+ } else if (dialect4 === "mysql") {
86281
85297
  const { schema: schema5, relations: relations4, files } = schemaPath ? await prepareMySqlSchema2(schemaPath) : { schema: {}, relations: {}, files: [] };
86282
85298
  setup = await drizzleForMySQL2(credentials2, schema5, relations4, files);
86283
- } else if (dialect7 === "sqlite") {
85299
+ } else if (dialect4 === "sqlite") {
86284
85300
  const { schema: schema5, relations: relations4, files } = schemaPath ? await prepareSQLiteSchema2(schemaPath) : { schema: {}, relations: {}, files: [] };
86285
85301
  setup = await drizzleForSQLite2(credentials2, schema5, relations4, files);
86286
- } else if (dialect7 === "turso") {
85302
+ } else if (dialect4 === "turso") {
86287
85303
  const { schema: schema5, relations: relations4, files } = schemaPath ? await prepareSQLiteSchema2(schemaPath) : { schema: {}, relations: {}, files: [] };
86288
85304
  setup = await drizzleForLibSQL(credentials2, schema5, relations4, files);
86289
85305
  } else {
86290
- assertUnreachable(dialect7);
85306
+ assertUnreachable(dialect4);
86291
85307
  }
86292
85308
  const { prepareServer: prepareServer2 } = await Promise.resolve().then(() => (init_studio2(), studio_exports));
86293
85309
  const server = await prepareServer2(setup);
@@ -86338,7 +85354,7 @@ init_utils2();
86338
85354
  var version2 = async () => {
86339
85355
  const { npmVersion } = await ormCoreVersions();
86340
85356
  const ormVersion = npmVersion ? `drizzle-orm: v${npmVersion}` : "";
86341
- const envVersion = "0.25.0-f5f3e49";
85357
+ const envVersion = "0.25.0";
86342
85358
  const kitVersion = envVersion ? `v${envVersion}` : "--";
86343
85359
  const versions = `drizzle-kit: ${kitVersion}
86344
85360
  ${ormVersion}`;