drizzle-kit 0.25.0-d1da3b8 → 0.25.0-f5d46d3

Sign up to get free protection for your applications and to get access to all the features.
package/bin.cjs CHANGED
@@ -6591,11 +6591,17 @@ function findAddedAndRemoved(columnNames1, columnNames2) {
6591
6591
  const removedColumns = columnNames1.filter((it) => !set2.has(it));
6592
6592
  return { addedColumns, removedColumns };
6593
6593
  }
6594
- var import_fs, import_path, import_url, copy, objectValues, assertV1OutFolder, dryJournal, prepareOutFolder, validatorForDialect, validateWithReport, prepareMigrationFolder, prepareMigrationMeta, schemaRenameKey, tableRenameKey, columnRenameKey, normaliseSQLiteUrl, normalisePGliteUrl;
6594
+ function getColumnCasing(column7, casing2) {
6595
+ if (!column7.name)
6596
+ return "";
6597
+ return !column7.keyAsName || casing2 === void 0 ? column7.name : casing2 === "camelCase" ? (0, import_casing.toCamelCase)(column7.name) : (0, import_casing.toSnakeCase)(column7.name);
6598
+ }
6599
+ var import_casing, import_fs, import_path, import_url, copy, objectValues, assertV1OutFolder, dryJournal, prepareOutFolder, validatorForDialect, validateWithReport, prepareMigrationFolder, prepareMigrationMeta, schemaRenameKey, tableRenameKey, columnRenameKey, normaliseSQLiteUrl, normalisePGliteUrl;
6595
6600
  var init_utils = __esm({
6596
6601
  "src/utils.ts"() {
6597
6602
  "use strict";
6598
6603
  init_source();
6604
+ import_casing = require("drizzle-orm/casing");
6599
6605
  import_fs = require("fs");
6600
6606
  import_path = require("path");
6601
6607
  import_url = require("url");
@@ -6625,27 +6631,27 @@ var init_utils = __esm({
6625
6631
  process.exit(1);
6626
6632
  }
6627
6633
  };
6628
- dryJournal = (dialect7) => {
6634
+ dryJournal = (dialect4) => {
6629
6635
  return {
6630
6636
  version: snapshotVersion,
6631
- dialect: dialect7,
6637
+ dialect: dialect4,
6632
6638
  entries: []
6633
6639
  };
6634
6640
  };
6635
- prepareOutFolder = (out, dialect7) => {
6641
+ prepareOutFolder = (out, dialect4) => {
6636
6642
  const meta = (0, import_path.join)(out, "meta");
6637
6643
  const journalPath = (0, import_path.join)(meta, "_journal.json");
6638
6644
  if (!(0, import_fs.existsSync)((0, import_path.join)(out, "meta"))) {
6639
6645
  (0, import_fs.mkdirSync)(meta, { recursive: true });
6640
- (0, import_fs.writeFileSync)(journalPath, JSON.stringify(dryJournal(dialect7)));
6646
+ (0, import_fs.writeFileSync)(journalPath, JSON.stringify(dryJournal(dialect4)));
6641
6647
  }
6642
6648
  const journal = JSON.parse((0, import_fs.readFileSync)(journalPath).toString());
6643
6649
  const snapshots = (0, import_fs.readdirSync)(meta).filter((it) => !it.startsWith("_")).map((it) => (0, import_path.join)(meta, it));
6644
6650
  snapshots.sort();
6645
6651
  return { meta, snapshots, journal };
6646
6652
  };
6647
- validatorForDialect = (dialect7) => {
6648
- switch (dialect7) {
6653
+ validatorForDialect = (dialect4) => {
6654
+ switch (dialect4) {
6649
6655
  case "postgresql":
6650
6656
  return { validator: backwardCompatiblePgSchema, version: 7 };
6651
6657
  case "sqlite":
@@ -6656,8 +6662,8 @@ var init_utils = __esm({
6656
6662
  return { validator: backwardCompatibleMysqlSchema, version: 5 };
6657
6663
  }
6658
6664
  };
6659
- validateWithReport = (snapshots, dialect7) => {
6660
- const { validator: validator2, version: version3 } = validatorForDialect(dialect7);
6665
+ validateWithReport = (snapshots, dialect4) => {
6666
+ const { validator: validator2, version: version3 } = validatorForDialect(dialect4);
6661
6667
  const result = snapshots.reduce(
6662
6668
  (accum, it) => {
6663
6669
  const raw2 = JSON.parse((0, import_fs.readFileSync)(`./${it}`).toString());
@@ -6698,9 +6704,9 @@ var init_utils = __esm({
6698
6704
  );
6699
6705
  return result;
6700
6706
  };
6701
- prepareMigrationFolder = (outFolder = "drizzle", dialect7) => {
6702
- const { snapshots, journal } = prepareOutFolder(outFolder, dialect7);
6703
- const report = validateWithReport(snapshots, dialect7);
6707
+ prepareMigrationFolder = (outFolder = "drizzle", dialect4) => {
6708
+ const { snapshots, journal } = prepareOutFolder(outFolder, dialect4);
6709
+ const report = validateWithReport(snapshots, dialect4);
6704
6710
  if (report.nonLatest.length > 0) {
6705
6711
  console.log(
6706
6712
  report.nonLatest.map((it) => {
@@ -11250,7 +11256,7 @@ var init_outputs = __esm({
11250
11256
  });
11251
11257
 
11252
11258
  // src/cli/validations/common.ts
11253
- var assertCollisions, sqliteDriversLiterals, postgresqlDriversLiterals, prefixes, prefix, sqliteDriver, postgresDriver, driver, configMigrations, configCommonSchema, casing, introspectParams, configIntrospectCliSchema, configGenerateSchema, configPushSchema, drivers, wrapParam;
11259
+ var assertCollisions, sqliteDriversLiterals, postgresqlDriversLiterals, prefixes, prefix, casingTypes, casingType, sqliteDriver, postgresDriver, driver, configMigrations, configCommonSchema, casing, introspectParams, configIntrospectCliSchema, configGenerateSchema, configPushSchema, drivers, wrapParam;
11254
11260
  var init_common = __esm({
11255
11261
  "src/cli/validations/common.ts"() {
11256
11262
  "use strict";
@@ -11294,6 +11300,8 @@ var init_common = __esm({
11294
11300
  {
11295
11301
  const _2 = "";
11296
11302
  }
11303
+ casingTypes = ["snake_case", "camelCase"];
11304
+ casingType = enumType(casingTypes);
11297
11305
  sqliteDriver = unionType(sqliteDriversLiterals);
11298
11306
  postgresDriver = unionType(postgresqlDriversLiterals);
11299
11307
  driver = unionType([sqliteDriver, postgresDriver]);
@@ -11312,7 +11320,8 @@ var init_common = __esm({
11312
11320
  tablesFilter: unionType([stringType(), stringType().array()]).optional(),
11313
11321
  schemaFilter: unionType([stringType(), stringType().array()]).default(["public"]),
11314
11322
  migrations: configMigrations,
11315
- dbCredentials: anyType().optional()
11323
+ dbCredentials: anyType().optional(),
11324
+ casing: casingType.optional()
11316
11325
  }).passthrough();
11317
11326
  casing = unionType([literalType("camel"), literalType("preserve")]).default(
11318
11327
  "camel"
@@ -11394,6 +11403,7 @@ var init_cli = __esm({
11394
11403
  }).strict();
11395
11404
  pushParams = objectType({
11396
11405
  dialect: dialect3,
11406
+ casing: casingType.optional(),
11397
11407
  schema: unionType([stringType(), stringType().array()]),
11398
11408
  tablesFilter: unionType([stringType(), stringType().array()]).optional(),
11399
11409
  schemaFilter: unionType([stringType(), stringType().array()]).optional().default(["public"]),
@@ -17410,11 +17420,11 @@ var init_utils4 = __esm({
17410
17420
  prepareGenerateConfig = async (options, from) => {
17411
17421
  var _a;
17412
17422
  const config = from === "config" ? await drizzleConfigFromFile(options.config) : options;
17413
- const { schema: schema5, out, breakpoints, dialect: dialect7, driver: driver2 } = config;
17414
- if (!schema5 || !dialect7) {
17423
+ const { schema: schema5, out, breakpoints, dialect: dialect4, driver: driver2, casing: casing2 } = config;
17424
+ if (!schema5 || !dialect4) {
17415
17425
  console.log(error("Please provide required params:"));
17416
17426
  console.log(wrapParam("schema", schema5));
17417
- console.log(wrapParam("dialect", dialect7));
17427
+ console.log(wrapParam("dialect", dialect4));
17418
17428
  console.log(wrapParam("out", out, true));
17419
17429
  process.exit(1);
17420
17430
  }
@@ -17425,14 +17435,15 @@ var init_utils4 = __esm({
17425
17435
  }
17426
17436
  const prefix2 = ("migrations" in config ? (_a = config.migrations) == null ? void 0 : _a.prefix : options.prefix) || "index";
17427
17437
  return {
17428
- dialect: dialect7,
17438
+ dialect: dialect4,
17429
17439
  name: options.name,
17430
17440
  custom: options.custom || false,
17431
17441
  prefix: prefix2,
17432
17442
  breakpoints: breakpoints || true,
17433
17443
  schema: schema5,
17434
17444
  out: out || "drizzle",
17435
- bundle: driver2 === "expo"
17445
+ bundle: driver2 === "expo",
17446
+ casing: casing2
17436
17447
  };
17437
17448
  };
17438
17449
  flattenDatabaseCredentials = (config) => {
@@ -17499,6 +17510,7 @@ var init_utils4 = __esm({
17499
17510
  verbose: config.verbose ?? false,
17500
17511
  force: options.force ?? false,
17501
17512
  credentials: parsed2.data,
17513
+ casing: config.casing,
17502
17514
  tablesFilter,
17503
17515
  schemasFilter
17504
17516
  };
@@ -17516,6 +17528,7 @@ var init_utils4 = __esm({
17516
17528
  verbose: config.verbose ?? false,
17517
17529
  force: options.force ?? false,
17518
17530
  credentials: parsed2.data,
17531
+ casing: config.casing,
17519
17532
  tablesFilter,
17520
17533
  schemasFilter
17521
17534
  };
@@ -17533,6 +17546,7 @@ var init_utils4 = __esm({
17533
17546
  verbose: config.verbose ?? false,
17534
17547
  force: options.force ?? false,
17535
17548
  credentials: parsed2.data,
17549
+ casing: config.casing,
17536
17550
  tablesFilter,
17537
17551
  schemasFilter
17538
17552
  };
@@ -17550,6 +17564,7 @@ var init_utils4 = __esm({
17550
17564
  verbose: config.verbose ?? false,
17551
17565
  force: options.force ?? false,
17552
17566
  credentials: parsed2.data,
17567
+ casing: config.casing,
17553
17568
  tablesFilter,
17554
17569
  schemasFilter
17555
17570
  };
@@ -17568,11 +17583,11 @@ var init_utils4 = __esm({
17568
17583
  process.exit(1);
17569
17584
  }
17570
17585
  const config = parsed.data;
17571
- const dialect7 = config.dialect;
17586
+ const dialect4 = config.dialect;
17572
17587
  const tablesFilterConfig = config.tablesFilter;
17573
17588
  const tablesFilter = tablesFilterConfig ? typeof tablesFilterConfig === "string" ? [tablesFilterConfig] : tablesFilterConfig : [];
17574
17589
  if (config.extensionsFilters) {
17575
- if (config.extensionsFilters.includes("postgis") && dialect7 === "postgresql") {
17590
+ if (config.extensionsFilters.includes("postgis") && dialect4 === "postgresql") {
17576
17591
  tablesFilter.push(
17577
17592
  ...["!geography_columns", "!geometry_columns", "!spatial_ref_sys"]
17578
17593
  );
@@ -17580,7 +17595,7 @@ var init_utils4 = __esm({
17580
17595
  }
17581
17596
  const schemasFilterConfig = config.schemaFilter;
17582
17597
  const schemasFilter = schemasFilterConfig ? typeof schemasFilterConfig === "string" ? [schemasFilterConfig] : schemasFilterConfig : [];
17583
- if (dialect7 === "postgresql") {
17598
+ if (dialect4 === "postgresql") {
17584
17599
  const parsed2 = postgresCredentials.safeParse(config);
17585
17600
  if (!parsed2.success) {
17586
17601
  printConfigConnectionIssues3(config);
@@ -17597,7 +17612,7 @@ var init_utils4 = __esm({
17597
17612
  prefix: ((_a = config.migrations) == null ? void 0 : _a.prefix) || "index"
17598
17613
  };
17599
17614
  }
17600
- if (dialect7 === "mysql") {
17615
+ if (dialect4 === "mysql") {
17601
17616
  const parsed2 = mysqlCredentials.safeParse(config);
17602
17617
  if (!parsed2.success) {
17603
17618
  printConfigConnectionIssues2(config);
@@ -17614,7 +17629,7 @@ var init_utils4 = __esm({
17614
17629
  prefix: ((_b = config.migrations) == null ? void 0 : _b.prefix) || "index"
17615
17630
  };
17616
17631
  }
17617
- if (dialect7 === "sqlite") {
17632
+ if (dialect4 === "sqlite") {
17618
17633
  const parsed2 = sqliteCredentials.safeParse(config);
17619
17634
  if (!parsed2.success) {
17620
17635
  printConfigConnectionIssues4(config, "pull");
@@ -17631,14 +17646,14 @@ var init_utils4 = __esm({
17631
17646
  prefix: ((_c = config.migrations) == null ? void 0 : _c.prefix) || "index"
17632
17647
  };
17633
17648
  }
17634
- if (dialect7 === "turso") {
17649
+ if (dialect4 === "turso") {
17635
17650
  const parsed2 = libSQLCredentials.safeParse(config);
17636
17651
  if (!parsed2.success) {
17637
17652
  printConfigConnectionIssues(config, "pull");
17638
17653
  process.exit(1);
17639
17654
  }
17640
17655
  return {
17641
- dialect: dialect7,
17656
+ dialect: dialect4,
17642
17657
  out: config.out,
17643
17658
  breakpoints: config.breakpoints,
17644
17659
  casing: config.casing,
@@ -17648,7 +17663,7 @@ var init_utils4 = __esm({
17648
17663
  prefix: ((_d = config.migrations) == null ? void 0 : _d.prefix) || "index"
17649
17664
  };
17650
17665
  }
17651
- assertUnreachable(dialect7);
17666
+ assertUnreachable(dialect4);
17652
17667
  };
17653
17668
  prepareStudioConfig = async (options) => {
17654
17669
  const params = studioCliParams.parse(options);
@@ -17665,9 +17680,9 @@ var init_utils4 = __esm({
17665
17680
  process.exit(1);
17666
17681
  }
17667
17682
  const { host, port } = params;
17668
- const { dialect: dialect7, schema: schema5 } = result.data;
17683
+ const { dialect: dialect4, schema: schema5 } = result.data;
17669
17684
  const flattened = flattenDatabaseCredentials(config);
17670
- if (dialect7 === "postgresql") {
17685
+ if (dialect4 === "postgresql") {
17671
17686
  const parsed = postgresCredentials.safeParse(flattened);
17672
17687
  if (!parsed.success) {
17673
17688
  printConfigConnectionIssues3(flattened);
@@ -17675,14 +17690,14 @@ var init_utils4 = __esm({
17675
17690
  }
17676
17691
  const credentials2 = parsed.data;
17677
17692
  return {
17678
- dialect: dialect7,
17693
+ dialect: dialect4,
17679
17694
  schema: schema5,
17680
17695
  host,
17681
17696
  port,
17682
17697
  credentials: credentials2
17683
17698
  };
17684
17699
  }
17685
- if (dialect7 === "mysql") {
17700
+ if (dialect4 === "mysql") {
17686
17701
  const parsed = mysqlCredentials.safeParse(flattened);
17687
17702
  if (!parsed.success) {
17688
17703
  printConfigConnectionIssues2(flattened);
@@ -17690,14 +17705,14 @@ var init_utils4 = __esm({
17690
17705
  }
17691
17706
  const credentials2 = parsed.data;
17692
17707
  return {
17693
- dialect: dialect7,
17708
+ dialect: dialect4,
17694
17709
  schema: schema5,
17695
17710
  host,
17696
17711
  port,
17697
17712
  credentials: credentials2
17698
17713
  };
17699
17714
  }
17700
- if (dialect7 === "sqlite") {
17715
+ if (dialect4 === "sqlite") {
17701
17716
  const parsed = sqliteCredentials.safeParse(flattened);
17702
17717
  if (!parsed.success) {
17703
17718
  printConfigConnectionIssues4(flattened, "studio");
@@ -17705,14 +17720,14 @@ var init_utils4 = __esm({
17705
17720
  }
17706
17721
  const credentials2 = parsed.data;
17707
17722
  return {
17708
- dialect: dialect7,
17723
+ dialect: dialect4,
17709
17724
  schema: schema5,
17710
17725
  host,
17711
17726
  port,
17712
17727
  credentials: credentials2
17713
17728
  };
17714
17729
  }
17715
- if (dialect7 === "turso") {
17730
+ if (dialect4 === "turso") {
17716
17731
  const parsed = libSQLCredentials.safeParse(flattened);
17717
17732
  if (!parsed.success) {
17718
17733
  printConfigConnectionIssues(flattened, "studio");
@@ -17720,14 +17735,14 @@ var init_utils4 = __esm({
17720
17735
  }
17721
17736
  const credentials2 = parsed.data;
17722
17737
  return {
17723
- dialect: dialect7,
17738
+ dialect: dialect4,
17724
17739
  schema: schema5,
17725
17740
  host,
17726
17741
  port,
17727
17742
  credentials: credentials2
17728
17743
  };
17729
17744
  }
17730
- assertUnreachable(dialect7);
17745
+ assertUnreachable(dialect4);
17731
17746
  };
17732
17747
  migrateConfig = objectType({
17733
17748
  dialect: dialect3,
@@ -17742,10 +17757,10 @@ var init_utils4 = __esm({
17742
17757
  console.log(wrapParam("dialect", config.dialect));
17743
17758
  process.exit(1);
17744
17759
  }
17745
- const { dialect: dialect7, out } = parsed.data;
17760
+ const { dialect: dialect4, out } = parsed.data;
17746
17761
  const { schema: schema5, table: table4 } = parsed.data.migrations || {};
17747
17762
  const flattened = flattenDatabaseCredentials(config);
17748
- if (dialect7 === "postgresql") {
17763
+ if (dialect4 === "postgresql") {
17749
17764
  const parsed2 = postgresCredentials.safeParse(flattened);
17750
17765
  if (!parsed2.success) {
17751
17766
  printConfigConnectionIssues3(flattened);
@@ -17753,14 +17768,14 @@ var init_utils4 = __esm({
17753
17768
  }
17754
17769
  const credentials2 = parsed2.data;
17755
17770
  return {
17756
- dialect: dialect7,
17771
+ dialect: dialect4,
17757
17772
  out,
17758
17773
  credentials: credentials2,
17759
17774
  schema: schema5,
17760
17775
  table: table4
17761
17776
  };
17762
17777
  }
17763
- if (dialect7 === "mysql") {
17778
+ if (dialect4 === "mysql") {
17764
17779
  const parsed2 = mysqlCredentials.safeParse(flattened);
17765
17780
  if (!parsed2.success) {
17766
17781
  printConfigConnectionIssues2(flattened);
@@ -17768,14 +17783,14 @@ var init_utils4 = __esm({
17768
17783
  }
17769
17784
  const credentials2 = parsed2.data;
17770
17785
  return {
17771
- dialect: dialect7,
17786
+ dialect: dialect4,
17772
17787
  out,
17773
17788
  credentials: credentials2,
17774
17789
  schema: schema5,
17775
17790
  table: table4
17776
17791
  };
17777
17792
  }
17778
- if (dialect7 === "sqlite") {
17793
+ if (dialect4 === "sqlite") {
17779
17794
  const parsed2 = sqliteCredentials.safeParse(flattened);
17780
17795
  if (!parsed2.success) {
17781
17796
  printConfigConnectionIssues4(flattened, "migrate");
@@ -17783,14 +17798,14 @@ var init_utils4 = __esm({
17783
17798
  }
17784
17799
  const credentials2 = parsed2.data;
17785
17800
  return {
17786
- dialect: dialect7,
17801
+ dialect: dialect4,
17787
17802
  out,
17788
17803
  credentials: credentials2,
17789
17804
  schema: schema5,
17790
17805
  table: table4
17791
17806
  };
17792
17807
  }
17793
- if (dialect7 === "turso") {
17808
+ if (dialect4 === "turso") {
17794
17809
  const parsed2 = libSQLCredentials.safeParse(flattened);
17795
17810
  if (!parsed2.success) {
17796
17811
  printConfigConnectionIssues(flattened, "migrate");
@@ -17798,14 +17813,14 @@ var init_utils4 = __esm({
17798
17813
  }
17799
17814
  const credentials2 = parsed2.data;
17800
17815
  return {
17801
- dialect: dialect7,
17816
+ dialect: dialect4,
17802
17817
  out,
17803
17818
  credentials: credentials2,
17804
17819
  schema: schema5,
17805
17820
  table: table4
17806
17821
  };
17807
17822
  }
17808
- assertUnreachable(dialect7);
17823
+ assertUnreachable(dialect4);
17809
17824
  };
17810
17825
  drizzleConfigFromFile = async (configPath) => {
17811
17826
  const prefix2 = process.env.TEST_CONFIG_PATH_PREFIX || "";
@@ -17834,6 +17849,7 @@ var init_utils4 = __esm({
17834
17849
  unregister();
17835
17850
  const res = configCommonSchema.safeParse(content);
17836
17851
  if (!res.success) {
17852
+ console.log(res.error);
17837
17853
  if (!("dialect" in content)) {
17838
17854
  console.log(error("Please specify 'dialect' param in config file"));
17839
17855
  }
@@ -17906,7 +17922,7 @@ function clearDefaults(defaultValue, collate) {
17906
17922
  return `(${resultDefault})`;
17907
17923
  }
17908
17924
  }
17909
- var import_drizzle_orm2, import_drizzle_orm3, import_mysql_core2, import_mysql_core3, dialect4, indexName, generateMySqlSnapshot, fromDatabase;
17925
+ var import_drizzle_orm2, import_drizzle_orm3, import_mysql_core2, import_mysql_core3, indexName, generateMySqlSnapshot, fromDatabase;
17910
17926
  var init_mysqlSerializer = __esm({
17911
17927
  "src/serializer/mysqlSerializer.ts"() {
17912
17928
  "use strict";
@@ -17916,12 +17932,13 @@ var init_mysqlSerializer = __esm({
17916
17932
  import_mysql_core2 = require("drizzle-orm/mysql-core");
17917
17933
  import_mysql_core3 = require("drizzle-orm/mysql-core");
17918
17934
  init_outputs();
17935
+ init_utils();
17919
17936
  init_serializer();
17920
- dialect4 = new import_mysql_core2.MySqlDialect();
17921
17937
  indexName = (tableName, columns) => {
17922
17938
  return `${tableName}_${columns.join("_")}_index`;
17923
17939
  };
17924
- generateMySqlSnapshot = (tables) => {
17940
+ generateMySqlSnapshot = (tables, casing2) => {
17941
+ const dialect4 = new import_mysql_core2.MySqlDialect({ casing: casing2 });
17925
17942
  const result = {};
17926
17943
  const internal = { tables: {}, indexes: {} };
17927
17944
  for (const table4 of tables) {
@@ -17940,12 +17957,13 @@ var init_mysqlSerializer = __esm({
17940
17957
  const primaryKeysObject = {};
17941
17958
  const uniqueConstraintObject = {};
17942
17959
  columns.forEach((column7) => {
17960
+ const name = getColumnCasing(column7, casing2);
17943
17961
  const notNull = column7.notNull;
17944
17962
  const sqlTypeLowered = column7.getSQLType().toLowerCase();
17945
17963
  const autoIncrement = typeof column7.autoIncrement === "undefined" ? false : column7.autoIncrement;
17946
17964
  const generated = column7.generated;
17947
17965
  const columnToSet = {
17948
- name: column7.name,
17966
+ name,
17949
17967
  type: column7.getSQLType(),
17950
17968
  primaryKey: false,
17951
17969
  // If field is autoincrement it's notNull by default
@@ -17959,9 +17977,9 @@ var init_mysqlSerializer = __esm({
17959
17977
  } : void 0
17960
17978
  };
17961
17979
  if (column7.primary) {
17962
- primaryKeysObject[`${tableName}_${column7.name}`] = {
17963
- name: `${tableName}_${column7.name}`,
17964
- columns: [column7.name]
17980
+ primaryKeysObject[`${tableName}_${name}`] = {
17981
+ name: `${tableName}_${name}`,
17982
+ columns: [name]
17965
17983
  };
17966
17984
  }
17967
17985
  if (column7.isUnique) {
@@ -17975,7 +17993,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
17975
17993
  The unique constraint ${source_default.underline.blue(
17976
17994
  column7.uniqueName
17977
17995
  )} on the ${source_default.underline.blue(
17978
- column7.name
17996
+ name
17979
17997
  )} column is confilcting with a unique constraint name already defined for ${source_default.underline.blue(
17980
17998
  existingUnique.columns.join(",")
17981
17999
  )} columns
@@ -17990,7 +18008,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
17990
18008
  }
17991
18009
  if (column7.default !== void 0) {
17992
18010
  if ((0, import_drizzle_orm2.is)(column7.default, import_drizzle_orm3.SQL)) {
17993
- columnToSet.default = sqlToStr(column7.default);
18011
+ columnToSet.default = sqlToStr(column7.default, casing2);
17994
18012
  } else {
17995
18013
  if (typeof column7.default === "string") {
17996
18014
  columnToSet.default = `'${column7.default}'`;
@@ -18012,20 +18030,27 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18012
18030
  }
18013
18031
  }
18014
18032
  }
18015
- columnsObject[column7.name] = columnToSet;
18033
+ columnsObject[name] = columnToSet;
18016
18034
  });
18017
18035
  primaryKeys.map((pk) => {
18018
- const columnNames = pk.columns.map((c) => c.name);
18019
- primaryKeysObject[pk.getName()] = {
18020
- name: pk.getName(),
18036
+ const originalColumnNames = pk.columns.map((c) => c.name);
18037
+ const columnNames = pk.columns.map((c) => getColumnCasing(c, casing2));
18038
+ let name = pk.getName();
18039
+ if (casing2 !== void 0) {
18040
+ for (let i2 = 0; i2 < originalColumnNames.length; i2++) {
18041
+ name = name.replace(originalColumnNames[i2], columnNames[i2]);
18042
+ }
18043
+ }
18044
+ primaryKeysObject[name] = {
18045
+ name,
18021
18046
  columns: columnNames
18022
18047
  };
18023
18048
  for (const column7 of pk.columns) {
18024
- columnsObject[column7.name].notNull = true;
18049
+ columnsObject[getColumnCasing(column7, casing2)].notNull = true;
18025
18050
  }
18026
18051
  });
18027
18052
  uniqueConstraints == null ? void 0 : uniqueConstraints.map((unq) => {
18028
- const columnNames = unq.columns.map((c) => c.name);
18053
+ const columnNames = unq.columns.map((c) => getColumnCasing(c, casing2));
18029
18054
  const name = unq.name ?? (0, import_mysql_core2.uniqueKeyName)(table4, columnNames);
18030
18055
  const existingUnique = uniqueConstraintObject[name];
18031
18056
  if (typeof existingUnique !== "undefined") {
@@ -18053,15 +18078,25 @@ The unique constraint ${source_default.underline.blue(
18053
18078
  };
18054
18079
  });
18055
18080
  const fks = foreignKeys.map((fk4) => {
18056
- const name = fk4.getName();
18057
18081
  const tableFrom = tableName;
18058
18082
  const onDelete = fk4.onDelete ?? "no action";
18059
18083
  const onUpdate = fk4.onUpdate ?? "no action";
18060
18084
  const reference = fk4.reference();
18061
18085
  const referenceFT = reference.foreignTable;
18062
18086
  const tableTo = (0, import_drizzle_orm2.getTableName)(referenceFT);
18063
- const columnsFrom = reference.columns.map((it) => it.name);
18064
- const columnsTo = reference.foreignColumns.map((it) => it.name);
18087
+ const originalColumnsFrom = reference.columns.map((it) => it.name);
18088
+ const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing2));
18089
+ const originalColumnsTo = reference.foreignColumns.map((it) => it.name);
18090
+ const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing2));
18091
+ let name = fk4.getName();
18092
+ if (casing2 !== void 0) {
18093
+ for (let i2 = 0; i2 < originalColumnsFrom.length; i2++) {
18094
+ name = name.replace(originalColumnsFrom[i2], columnsFrom[i2]);
18095
+ }
18096
+ for (let i2 = 0; i2 < originalColumnsTo.length; i2++) {
18097
+ name = name.replace(originalColumnsTo[i2], columnsTo[i2]);
18098
+ }
18099
+ }
18065
18100
  return {
18066
18101
  name,
18067
18102
  tableFrom,
@@ -18101,7 +18136,7 @@ The unique constraint ${source_default.underline.blue(
18101
18136
  }
18102
18137
  return sql;
18103
18138
  } else {
18104
- return `${it.name}`;
18139
+ return `${getColumnCasing(it, casing2)}`;
18105
18140
  }
18106
18141
  });
18107
18142
  if (value.config.unique) {
@@ -18546,7 +18581,7 @@ function buildArrayString(array, sqlType) {
18546
18581
  }).join(",");
18547
18582
  return `{${values}}`;
18548
18583
  }
18549
- var import_drizzle_orm5, import_pg_core2, import_pg_core3, dialect5, indexName2, generatePgSnapshot, trimChar, fromDatabase2, defaultForColumn;
18584
+ var import_drizzle_orm5, import_pg_core2, import_pg_core3, indexName2, generatePgSnapshot, trimChar, fromDatabase2, defaultForColumn;
18550
18585
  var init_pgSerializer = __esm({
18551
18586
  "src/serializer/pgSerializer.ts"() {
18552
18587
  "use strict";
@@ -18558,12 +18593,12 @@ var init_pgSerializer = __esm({
18558
18593
  init_outputs();
18559
18594
  init_utils();
18560
18595
  init_serializer();
18561
- dialect5 = new import_pg_core2.PgDialect();
18562
18596
  indexName2 = (tableName, columns) => {
18563
18597
  return `${tableName}_${columns.join("_")}_index`;
18564
18598
  };
18565
- generatePgSnapshot = (tables, enums, schemas, sequences, schemaFilter) => {
18599
+ generatePgSnapshot = (tables, enums, schemas, sequences, casing2, schemaFilter) => {
18566
18600
  var _a, _b, _c, _d, _e, _f;
18601
+ const dialect4 = new import_pg_core2.PgDialect({ casing: casing2 });
18567
18602
  const result = {};
18568
18603
  const sequencesToReturn = {};
18569
18604
  const indexesInSchema = {};
@@ -18588,6 +18623,7 @@ var init_pgSerializer = __esm({
18588
18623
  const uniqueConstraintObject = {};
18589
18624
  columns.forEach((column7) => {
18590
18625
  var _a2, _b2, _c2, _d2, _e2, _f2;
18626
+ const name = getColumnCasing(column7, casing2);
18591
18627
  const notNull = column7.notNull;
18592
18628
  const primaryKey = column7.primary;
18593
18629
  const sqlTypeLowered = column7.getSQLType().toLowerCase();
@@ -18600,18 +18636,18 @@ var init_pgSerializer = __esm({
18600
18636
  const startWith = stringFromIdentityProperty((_d2 = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _d2.startWith) ?? (parseFloat(increment) < 0 ? maxValue : minValue);
18601
18637
  const cache = stringFromIdentityProperty((_e2 = identity == null ? void 0 : identity.sequenceOptions) == null ? void 0 : _e2.cache) ?? "1";
18602
18638
  const columnToSet = {
18603
- name: column7.name,
18639
+ name,
18604
18640
  type: column7.getSQLType(),
18605
18641
  typeSchema,
18606
18642
  primaryKey,
18607
18643
  notNull,
18608
18644
  generated: generated ? {
18609
- as: (0, import_drizzle_orm5.is)(generated.as, import_drizzle_orm5.SQL) ? dialect5.sqlToQuery(generated.as).sql : typeof generated.as === "function" ? dialect5.sqlToQuery(generated.as()).sql : generated.as,
18645
+ as: (0, import_drizzle_orm5.is)(generated.as, import_drizzle_orm5.SQL) ? dialect4.sqlToQuery(generated.as).sql : typeof generated.as === "function" ? dialect4.sqlToQuery(generated.as()).sql : generated.as,
18610
18646
  type: "stored"
18611
18647
  } : void 0,
18612
18648
  identity: identity ? {
18613
18649
  type: identity.type,
18614
- name: identity.sequenceName ?? `${tableName}_${column7.name}_seq`,
18650
+ name: identity.sequenceName ?? `${tableName}_${name}_seq`,
18615
18651
  schema: schema5 ?? "public",
18616
18652
  increment,
18617
18653
  startWith,
@@ -18632,7 +18668,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18632
18668
  The unique constraint ${source_default.underline.blue(
18633
18669
  column7.uniqueName
18634
18670
  )} on the ${source_default.underline.blue(
18635
- column7.name
18671
+ name
18636
18672
  )} column is confilcting with a unique constraint name already defined for ${source_default.underline.blue(
18637
18673
  existingUnique.columns.join(",")
18638
18674
  )} columns
@@ -18648,7 +18684,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18648
18684
  }
18649
18685
  if (column7.default !== void 0) {
18650
18686
  if ((0, import_drizzle_orm5.is)(column7.default, import_drizzle_orm5.SQL)) {
18651
- columnToSet.default = sqlToStr(column7.default);
18687
+ columnToSet.default = sqlToStr(column7.default, casing2);
18652
18688
  } else {
18653
18689
  if (typeof column7.default === "string") {
18654
18690
  columnToSet.default = `'${column7.default}'`;
@@ -18676,17 +18712,24 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18676
18712
  }
18677
18713
  }
18678
18714
  }
18679
- columnsObject[column7.name] = columnToSet;
18715
+ columnsObject[name] = columnToSet;
18680
18716
  });
18681
18717
  primaryKeys.map((pk) => {
18682
- const columnNames = pk.columns.map((c) => c.name);
18683
- primaryKeysObject[pk.getName()] = {
18684
- name: pk.getName(),
18718
+ const originalColumnNames = pk.columns.map((c) => c.name);
18719
+ const columnNames = pk.columns.map((c) => getColumnCasing(c, casing2));
18720
+ let name = pk.getName();
18721
+ if (casing2 !== void 0) {
18722
+ for (let i2 = 0; i2 < originalColumnNames.length; i2++) {
18723
+ name = name.replace(originalColumnNames[i2], columnNames[i2]);
18724
+ }
18725
+ }
18726
+ primaryKeysObject[name] = {
18727
+ name,
18685
18728
  columns: columnNames
18686
18729
  };
18687
18730
  });
18688
18731
  uniqueConstraints == null ? void 0 : uniqueConstraints.map((unq) => {
18689
- const columnNames = unq.columns.map((c) => c.name);
18732
+ const columnNames = unq.columns.map((c) => getColumnCasing(c, casing2));
18690
18733
  const name = unq.name ?? (0, import_pg_core2.uniqueKeyName)(table4, columnNames);
18691
18734
  const existingUnique = uniqueConstraintObject[name];
18692
18735
  if (typeof existingUnique !== "undefined") {
@@ -18713,15 +18756,25 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18713
18756
  };
18714
18757
  });
18715
18758
  const fks = foreignKeys.map((fk4) => {
18716
- const name = fk4.getName();
18717
18759
  const tableFrom = tableName;
18718
18760
  const onDelete = fk4.onDelete;
18719
18761
  const onUpdate = fk4.onUpdate;
18720
18762
  const reference = fk4.reference();
18721
18763
  const tableTo = (0, import_drizzle_orm5.getTableName)(reference.foreignTable);
18722
18764
  const schemaTo = (0, import_pg_core3.getTableConfig)(reference.foreignTable).schema;
18723
- const columnsFrom = reference.columns.map((it) => it.name);
18724
- const columnsTo = reference.foreignColumns.map((it) => it.name);
18765
+ const originalColumnsFrom = reference.columns.map((it) => it.name);
18766
+ const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing2));
18767
+ const originalColumnsTo = reference.foreignColumns.map((it) => it.name);
18768
+ const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing2));
18769
+ let name = fk4.getName();
18770
+ if (casing2 !== void 0) {
18771
+ for (let i2 = 0; i2 < originalColumnsFrom.length; i2++) {
18772
+ name = name.replace(originalColumnsFrom[i2], columnsFrom[i2]);
18773
+ }
18774
+ for (let i2 = 0; i2 < originalColumnsTo.length; i2++) {
18775
+ name = name.replace(originalColumnsTo[i2], columnsTo[i2]);
18776
+ }
18777
+ }
18725
18778
  return {
18726
18779
  name,
18727
18780
  tableFrom,
@@ -18747,19 +18800,20 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
18747
18800
  ${withStyle.errorWarning(
18748
18801
  `Please specify an index name in ${(0, import_drizzle_orm5.getTableName)(
18749
18802
  value.config.table
18750
- )} table that has "${dialect5.sqlToQuery(it).sql}" expression. We can generate index names for indexes on columns only; for expressions in indexes, you need to specify the name yourself.`
18803
+ )} table that has "${dialect4.sqlToQuery(it).sql}" expression. We can generate index names for indexes on columns only; for expressions in indexes, you need to specify the name yourself.`
18751
18804
  )}`
18752
18805
  );
18753
18806
  process.exit(1);
18754
18807
  }
18755
18808
  }
18756
18809
  it = it;
18810
+ const name2 = getColumnCasing(it, casing2);
18757
18811
  if (!(0, import_drizzle_orm5.is)(it, import_drizzle_orm5.SQL) && it.type === "PgVector" && typeof it.indexConfig.opClass === "undefined") {
18758
18812
  console.log(
18759
18813
  `
18760
18814
  ${withStyle.errorWarning(
18761
18815
  `You are specifying an index on the ${source_default.blueBright(
18762
- it.name
18816
+ name2
18763
18817
  )} column inside the ${source_default.blueBright(
18764
18818
  tableName
18765
18819
  )} table with the ${source_default.blueBright(
@@ -18769,7 +18823,7 @@ ${withStyle.errorWarning(
18769
18823
  )}].
18770
18824
 
18771
18825
  You can specify it using current syntax: ${source_default.underline(
18772
- `index("${value.config.name}").using("${value.config.method}", table.${it.name}.op("${vectorOps[0]}"))`
18826
+ `index("${value.config.name}").using("${value.config.method}", table.${name2}.op("${vectorOps[0]}"))`
18773
18827
  )}
18774
18828
 
18775
18829
  You can check the "pg_vector" docs for more info: https://github.com/pgvector/pgvector?tab=readme-ov-file#indexing
@@ -18778,7 +18832,7 @@ You can check the "pg_vector" docs for more info: https://github.com/pgvector/pg
18778
18832
  );
18779
18833
  process.exit(1);
18780
18834
  }
18781
- indexColumnNames.push(it.name);
18835
+ indexColumnNames.push(name2);
18782
18836
  });
18783
18837
  const name = value.config.name ? value.config.name : indexName2(tableName, indexColumnNames);
18784
18838
  let indexColumns = columns2.map(
@@ -18786,7 +18840,7 @@ You can check the "pg_vector" docs for more info: https://github.com/pgvector/pg
18786
18840
  var _a2, _b2, _c2, _d2, _e2;
18787
18841
  if ((0, import_drizzle_orm5.is)(it, import_drizzle_orm5.SQL)) {
18788
18842
  return {
18789
- expression: dialect5.sqlToQuery(it, "indexes").sql,
18843
+ expression: dialect4.sqlToQuery(it, "indexes").sql,
18790
18844
  asc: true,
18791
18845
  isExpression: true,
18792
18846
  nulls: "last"
@@ -18794,7 +18848,7 @@ You can check the "pg_vector" docs for more info: https://github.com/pgvector/pg
18794
18848
  } else {
18795
18849
  it = it;
18796
18850
  return {
18797
- expression: it.name,
18851
+ expression: getColumnCasing(it, casing2),
18798
18852
  isExpression: false,
18799
18853
  asc: ((_a2 = it.indexConfig) == null ? void 0 : _a2.order) === "asc",
18800
18854
  nulls: ((_b2 = it.indexConfig) == null ? void 0 : _b2.nulls) ? (_c2 = it.indexConfig) == null ? void 0 : _c2.nulls : ((_d2 = it.indexConfig) == null ? void 0 : _d2.order) === "desc" ? "first" : "last",
@@ -18825,7 +18879,7 @@ ${withStyle.errorWarning(
18825
18879
  name,
18826
18880
  columns: indexColumns,
18827
18881
  isUnique: value.config.unique ?? false,
18828
- where: value.config.where ? dialect5.sqlToQuery(value.config.where).sql : void 0,
18882
+ where: value.config.where ? dialect4.sqlToQuery(value.config.where).sql : void 0,
18829
18883
  concurrently: value.config.concurrently ?? false,
18830
18884
  method: value.config.method ?? "btree",
18831
18885
  with: value.config.with ?? {}
@@ -19594,7 +19648,7 @@ function extractGeneratedColumns(input) {
19594
19648
  }
19595
19649
  return columns;
19596
19650
  }
19597
- var import_drizzle_orm7, import_sqlite_core2, dialect6, generateSqliteSnapshot, fromDatabase3;
19651
+ var import_drizzle_orm7, import_sqlite_core2, generateSqliteSnapshot, fromDatabase3;
19598
19652
  var init_sqliteSerializer = __esm({
19599
19653
  "src/serializer/sqliteSerializer.ts"() {
19600
19654
  "use strict";
@@ -19602,9 +19656,10 @@ var init_sqliteSerializer = __esm({
19602
19656
  import_drizzle_orm7 = require("drizzle-orm");
19603
19657
  import_sqlite_core2 = require("drizzle-orm/sqlite-core");
19604
19658
  init_outputs();
19659
+ init_utils();
19605
19660
  init_serializer();
19606
- dialect6 = new import_sqlite_core2.SQLiteSyncDialect();
19607
- generateSqliteSnapshot = (tables) => {
19661
+ generateSqliteSnapshot = (tables, casing2) => {
19662
+ const dialect4 = new import_sqlite_core2.SQLiteSyncDialect({ casing: casing2 });
19608
19663
  const result = {};
19609
19664
  const internal = { indexes: {} };
19610
19665
  for (const table4 of tables) {
@@ -19622,28 +19677,29 @@ var init_sqliteSerializer = __esm({
19622
19677
  uniqueConstraints
19623
19678
  } = (0, import_sqlite_core2.getTableConfig)(table4);
19624
19679
  columns.forEach((column7) => {
19680
+ const name = getColumnCasing(column7, casing2);
19625
19681
  const notNull = column7.notNull;
19626
19682
  const primaryKey = column7.primary;
19627
19683
  const generated = column7.generated;
19628
19684
  const columnToSet = {
19629
- name: column7.name,
19685
+ name,
19630
19686
  type: column7.getSQLType(),
19631
19687
  primaryKey,
19632
19688
  notNull,
19633
19689
  autoincrement: (0, import_drizzle_orm7.is)(column7, import_sqlite_core2.SQLiteBaseInteger) ? column7.autoIncrement : false,
19634
19690
  generated: generated ? {
19635
- as: (0, import_drizzle_orm7.is)(generated.as, import_drizzle_orm7.SQL) ? `(${dialect6.sqlToQuery(generated.as, "indexes").sql})` : typeof generated.as === "function" ? `(${dialect6.sqlToQuery(generated.as(), "indexes").sql})` : `(${generated.as})`,
19691
+ as: (0, import_drizzle_orm7.is)(generated.as, import_drizzle_orm7.SQL) ? `(${dialect4.sqlToQuery(generated.as, "indexes").sql})` : typeof generated.as === "function" ? `(${dialect4.sqlToQuery(generated.as(), "indexes").sql})` : `(${generated.as})`,
19636
19692
  type: generated.mode ?? "virtual"
19637
19693
  } : void 0
19638
19694
  };
19639
19695
  if (column7.default !== void 0) {
19640
19696
  if ((0, import_drizzle_orm7.is)(column7.default, import_drizzle_orm7.SQL)) {
19641
- columnToSet.default = sqlToStr(column7.default);
19697
+ columnToSet.default = sqlToStr(column7.default, casing2);
19642
19698
  } else {
19643
19699
  columnToSet.default = typeof column7.default === "string" ? `'${column7.default}'` : typeof column7.default === "object" || Array.isArray(column7.default) ? `'${JSON.stringify(column7.default)}'` : column7.default;
19644
19700
  }
19645
19701
  }
19646
- columnsObject[column7.name] = columnToSet;
19702
+ columnsObject[name] = columnToSet;
19647
19703
  if (column7.isUnique) {
19648
19704
  const existingUnique = indexesObject[column7.uniqueName];
19649
19705
  if (typeof existingUnique !== "undefined") {
@@ -19655,7 +19711,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
19655
19711
  The unique constraint ${source_default.underline.blue(
19656
19712
  column7.uniqueName
19657
19713
  )} on the ${source_default.underline.blue(
19658
- column7.name
19714
+ name
19659
19715
  )} column is confilcting with a unique constraint name already defined for ${source_default.underline.blue(
19660
19716
  existingUnique.columns.join(",")
19661
19717
  )} columns
@@ -19671,15 +19727,25 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
19671
19727
  }
19672
19728
  });
19673
19729
  const foreignKeys = tableForeignKeys.map((fk4) => {
19674
- const name = fk4.getName();
19675
19730
  const tableFrom = tableName;
19676
19731
  const onDelete = fk4.onDelete ?? "no action";
19677
19732
  const onUpdate = fk4.onUpdate ?? "no action";
19678
19733
  const reference = fk4.reference();
19679
19734
  const referenceFT = reference.foreignTable;
19680
19735
  const tableTo = (0, import_drizzle_orm7.getTableName)(referenceFT);
19681
- const columnsFrom = reference.columns.map((it) => it.name);
19682
- const columnsTo = reference.foreignColumns.map((it) => it.name);
19736
+ const originalColumnsFrom = reference.columns.map((it) => it.name);
19737
+ const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing2));
19738
+ const originalColumnsTo = reference.foreignColumns.map((it) => it.name);
19739
+ const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing2));
19740
+ let name = fk4.getName();
19741
+ if (casing2 !== void 0) {
19742
+ for (let i2 = 0; i2 < originalColumnsFrom.length; i2++) {
19743
+ name = name.replace(originalColumnsFrom[i2], columnsFrom[i2]);
19744
+ }
19745
+ for (let i2 = 0; i2 < originalColumnsTo.length; i2++) {
19746
+ name = name.replace(originalColumnsTo[i2], columnsTo[i2]);
19747
+ }
19748
+ }
19683
19749
  return {
19684
19750
  name,
19685
19751
  tableFrom,
@@ -19699,7 +19765,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
19699
19765
  let indexColumns = columns2.map((it) => {
19700
19766
  var _a;
19701
19767
  if ((0, import_drizzle_orm7.is)(it, import_drizzle_orm7.SQL)) {
19702
- const sql = dialect6.sqlToQuery(it, "indexes").sql;
19768
+ const sql = dialect4.sqlToQuery(it, "indexes").sql;
19703
19769
  if (typeof internal.indexes[name] === "undefined") {
19704
19770
  internal.indexes[name] = {
19705
19771
  columns: {
@@ -19719,13 +19785,13 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
19719
19785
  }
19720
19786
  return sql;
19721
19787
  } else {
19722
- return it.name;
19788
+ return getColumnCasing(it, casing2);
19723
19789
  }
19724
19790
  });
19725
19791
  let where = void 0;
19726
19792
  if (value.config.where !== void 0) {
19727
19793
  if ((0, import_drizzle_orm7.is)(value.config.where, import_drizzle_orm7.SQL)) {
19728
- where = dialect6.sqlToQuery(value.config.where).sql;
19794
+ where = dialect4.sqlToQuery(value.config.where).sql;
19729
19795
  }
19730
19796
  }
19731
19797
  indexesObject[name] = {
@@ -19736,7 +19802,7 @@ ${withStyle.errorWarning(`We've found duplicated unique constraint names in ${so
19736
19802
  };
19737
19803
  });
19738
19804
  uniqueConstraints == null ? void 0 : uniqueConstraints.map((unq) => {
19739
- const columnNames = unq.columns.map((c) => c.name);
19805
+ const columnNames = unq.columns.map((c) => getColumnCasing(c, casing2));
19740
19806
  const name = unq.name ?? (0, import_sqlite_core2.uniqueKeyName)(table4, columnNames);
19741
19807
  const existingUnique = indexesObject[name];
19742
19808
  if (typeof existingUnique !== "undefined") {
@@ -19766,12 +19832,20 @@ The unique constraint ${source_default.underline.blue(
19766
19832
  });
19767
19833
  primaryKeys.forEach((it) => {
19768
19834
  if (it.columns.length > 1) {
19769
- primaryKeysObject[it.getName()] = {
19770
- columns: it.columns.map((it2) => it2.name),
19771
- name: it.getName()
19835
+ const originalColumnNames = it.columns.map((c) => c.name);
19836
+ const columnNames = it.columns.map((c) => getColumnCasing(c, casing2));
19837
+ let name = it.getName();
19838
+ if (casing2 !== void 0) {
19839
+ for (let i2 = 0; i2 < originalColumnNames.length; i2++) {
19840
+ name = name.replace(originalColumnNames[i2], columnNames[i2]);
19841
+ }
19842
+ }
19843
+ primaryKeysObject[name] = {
19844
+ columns: columnNames,
19845
+ name
19772
19846
  };
19773
19847
  } else {
19774
- columnsObject[it.columns[0].name].primaryKey = true;
19848
+ columnsObject[getColumnCasing(it.columns[0], casing2)].primaryKey = true;
19775
19849
  }
19776
19850
  });
19777
19851
  result[tableName] = {
@@ -20016,16 +20090,17 @@ WHERE
20016
20090
  });
20017
20091
 
20018
20092
  // src/serializer/index.ts
20019
- var import_fs3, glob, import_path3, sqlToStr, serializeMySql, serializePg, serializeSQLite, prepareFilenames;
20093
+ var import_casing2, import_fs3, glob, import_path3, sqlToStr, serializeMySql, serializePg, serializeSQLite, prepareFilenames;
20020
20094
  var init_serializer = __esm({
20021
20095
  "src/serializer/index.ts"() {
20022
20096
  "use strict";
20023
20097
  init_source();
20098
+ import_casing2 = require("drizzle-orm/casing");
20024
20099
  import_fs3 = __toESM(require("fs"));
20025
20100
  glob = __toESM(require_glob());
20026
20101
  import_path3 = __toESM(require("path"));
20027
20102
  init_views();
20028
- sqlToStr = (sql) => {
20103
+ sqlToStr = (sql, casing2) => {
20029
20104
  return sql.toQuery({
20030
20105
  escapeName: () => {
20031
20106
  throw new Error("we don't support params for `sql` default values");
@@ -20035,10 +20110,11 @@ var init_serializer = __esm({
20035
20110
  },
20036
20111
  escapeString: () => {
20037
20112
  throw new Error("we don't support params for `sql` default values");
20038
- }
20113
+ },
20114
+ casing: new import_casing2.CasingCache(casing2)
20039
20115
  }).sql;
20040
20116
  };
20041
- serializeMySql = async (path5) => {
20117
+ serializeMySql = async (path5, casing2) => {
20042
20118
  const filenames = prepareFilenames(path5);
20043
20119
  console.log(source_default.gray(`Reading schema files:
20044
20120
  ${filenames.join("\n")}
@@ -20046,23 +20122,23 @@ ${filenames.join("\n")}
20046
20122
  const { prepareFromMySqlImports: prepareFromMySqlImports2 } = await Promise.resolve().then(() => (init_mysqlImports(), mysqlImports_exports));
20047
20123
  const { generateMySqlSnapshot: generateMySqlSnapshot2 } = await Promise.resolve().then(() => (init_mysqlSerializer(), mysqlSerializer_exports));
20048
20124
  const { tables } = await prepareFromMySqlImports2(filenames);
20049
- return generateMySqlSnapshot2(tables);
20125
+ return generateMySqlSnapshot2(tables, casing2);
20050
20126
  };
20051
- serializePg = async (path5, schemaFilter) => {
20127
+ serializePg = async (path5, casing2, schemaFilter) => {
20052
20128
  const filenames = prepareFilenames(path5);
20053
20129
  const { prepareFromPgImports: prepareFromPgImports2 } = await Promise.resolve().then(() => (init_pgImports(), pgImports_exports));
20054
20130
  const { generatePgSnapshot: generatePgSnapshot2 } = await Promise.resolve().then(() => (init_pgSerializer(), pgSerializer_exports));
20055
20131
  const { tables, enums, schemas, sequences } = await prepareFromPgImports2(
20056
20132
  filenames
20057
20133
  );
20058
- return generatePgSnapshot2(tables, enums, schemas, sequences, schemaFilter);
20134
+ return generatePgSnapshot2(tables, enums, schemas, sequences, casing2, schemaFilter);
20059
20135
  };
20060
- serializeSQLite = async (path5) => {
20136
+ serializeSQLite = async (path5, casing2) => {
20061
20137
  const filenames = prepareFilenames(path5);
20062
20138
  const { prepareFromSqliteImports: prepareFromSqliteImports2 } = await Promise.resolve().then(() => (init_sqliteImports(), sqliteImports_exports));
20063
20139
  const { generateSqliteSnapshot: generateSqliteSnapshot2 } = await Promise.resolve().then(() => (init_sqliteSerializer(), sqliteSerializer_exports));
20064
20140
  const { tables } = await prepareFromSqliteImports2(filenames);
20065
- return generateSqliteSnapshot2(tables);
20141
+ return generateSqliteSnapshot2(tables, casing2);
20066
20142
  };
20067
20143
  prepareFilenames = (path5) => {
20068
20144
  if (typeof path5 === "string") {
@@ -20111,45 +20187,45 @@ var init_migrationPreparator = __esm({
20111
20187
  init_mysqlSchema();
20112
20188
  init_pgSchema();
20113
20189
  init_sqliteSchema();
20114
- prepareMySqlDbPushSnapshot = async (prev, schemaPath) => {
20115
- const serialized = await serializeMySql(schemaPath);
20190
+ prepareMySqlDbPushSnapshot = async (prev, schemaPath, casing2) => {
20191
+ const serialized = await serializeMySql(schemaPath, casing2);
20116
20192
  const id = (0, import_crypto.randomUUID)();
20117
20193
  const idPrev = prev.id;
20118
- const { version: version3, dialect: dialect7, ...rest } = serialized;
20119
- const result = { version: version3, dialect: dialect7, id, prevId: idPrev, ...rest };
20194
+ const { version: version3, dialect: dialect4, ...rest } = serialized;
20195
+ const result = { version: version3, dialect: dialect4, id, prevId: idPrev, ...rest };
20120
20196
  return { prev, cur: result };
20121
20197
  };
20122
- prepareSQLiteDbPushSnapshot = async (prev, schemaPath) => {
20123
- const serialized = await serializeSQLite(schemaPath);
20198
+ prepareSQLiteDbPushSnapshot = async (prev, schemaPath, casing2) => {
20199
+ const serialized = await serializeSQLite(schemaPath, casing2);
20124
20200
  const id = (0, import_crypto.randomUUID)();
20125
20201
  const idPrev = prev.id;
20126
- const { version: version3, dialect: dialect7, ...rest } = serialized;
20202
+ const { version: version3, dialect: dialect4, ...rest } = serialized;
20127
20203
  const result = {
20128
20204
  version: version3,
20129
- dialect: dialect7,
20205
+ dialect: dialect4,
20130
20206
  id,
20131
20207
  prevId: idPrev,
20132
20208
  ...rest
20133
20209
  };
20134
20210
  return { prev, cur: result };
20135
20211
  };
20136
- preparePgDbPushSnapshot = async (prev, schemaPath, schemaFilter = ["public"]) => {
20137
- const serialized = await serializePg(schemaPath, schemaFilter);
20212
+ preparePgDbPushSnapshot = async (prev, schemaPath, casing2, schemaFilter = ["public"]) => {
20213
+ const serialized = await serializePg(schemaPath, casing2, schemaFilter);
20138
20214
  const id = (0, import_crypto.randomUUID)();
20139
20215
  const idPrev = prev.id;
20140
- const { version: version3, dialect: dialect7, ...rest } = serialized;
20141
- const result = { version: version3, dialect: dialect7, id, prevId: idPrev, ...rest };
20216
+ const { version: version3, dialect: dialect4, ...rest } = serialized;
20217
+ const result = { version: version3, dialect: dialect4, id, prevId: idPrev, ...rest };
20142
20218
  return { prev, cur: result };
20143
20219
  };
20144
- prepareMySqlMigrationSnapshot = async (migrationFolders, schemaPath) => {
20220
+ prepareMySqlMigrationSnapshot = async (migrationFolders, schemaPath, casing2) => {
20145
20221
  const prevSnapshot = mysqlSchema.parse(
20146
20222
  preparePrevSnapshot(migrationFolders, dryMySql)
20147
20223
  );
20148
- const serialized = await serializeMySql(schemaPath);
20224
+ const serialized = await serializeMySql(schemaPath, casing2);
20149
20225
  const id = (0, import_crypto.randomUUID)();
20150
20226
  const idPrev = prevSnapshot.id;
20151
- const { version: version3, dialect: dialect7, ...rest } = serialized;
20152
- const result = { version: version3, dialect: dialect7, id, prevId: idPrev, ...rest };
20227
+ const { version: version3, dialect: dialect4, ...rest } = serialized;
20228
+ const result = { version: version3, dialect: dialect4, id, prevId: idPrev, ...rest };
20153
20229
  const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot;
20154
20230
  const custom2 = {
20155
20231
  id,
@@ -20158,17 +20234,17 @@ var init_migrationPreparator = __esm({
20158
20234
  };
20159
20235
  return { prev: prevSnapshot, cur: result, custom: custom2 };
20160
20236
  };
20161
- prepareSqliteMigrationSnapshot = async (snapshots, schemaPath) => {
20237
+ prepareSqliteMigrationSnapshot = async (snapshots, schemaPath, casing2) => {
20162
20238
  const prevSnapshot = sqliteSchema.parse(
20163
20239
  preparePrevSnapshot(snapshots, drySQLite)
20164
20240
  );
20165
- const serialized = await serializeSQLite(schemaPath);
20241
+ const serialized = await serializeSQLite(schemaPath, casing2);
20166
20242
  const id = (0, import_crypto.randomUUID)();
20167
20243
  const idPrev = prevSnapshot.id;
20168
- const { version: version3, dialect: dialect7, ...rest } = serialized;
20244
+ const { version: version3, dialect: dialect4, ...rest } = serialized;
20169
20245
  const result = {
20170
20246
  version: version3,
20171
- dialect: dialect7,
20247
+ dialect: dialect4,
20172
20248
  id,
20173
20249
  prevId: idPrev,
20174
20250
  ...rest
@@ -20188,9 +20264,9 @@ var init_migrationPreparator = __esm({
20188
20264
  }) => {
20189
20265
  return { id, prevId: idPrev, ...serialized };
20190
20266
  };
20191
- preparePgMigrationSnapshot = async (snapshots, schemaPath) => {
20267
+ preparePgMigrationSnapshot = async (snapshots, schemaPath, casing2) => {
20192
20268
  const prevSnapshot = pgSchema.parse(preparePrevSnapshot(snapshots, dryPg));
20193
- const serialized = await serializePg(schemaPath);
20269
+ const serialized = await serializePg(schemaPath, casing2);
20194
20270
  const id = (0, import_crypto.randomUUID)();
20195
20271
  const idPrev = prevSnapshot.id;
20196
20272
  const result = { id, prevId: idPrev, ...serialized };
@@ -23016,10 +23092,10 @@ var init_jsonDiffer = __esm({
23016
23092
  });
23017
23093
 
23018
23094
  // src/sqlgenerator.ts
23019
- function fromJson(statements, dialect7, action, json2) {
23095
+ function fromJson(statements, dialect4, action, json2) {
23020
23096
  const result = statements.flatMap((statement) => {
23021
23097
  const filtered = convertors.filter((it) => {
23022
- return it.can(statement, dialect7);
23098
+ return it.can(statement, dialect4);
23023
23099
  });
23024
23100
  const convertor = filtered.length === 1 ? filtered[0] : void 0;
23025
23101
  if (!convertor) {
@@ -23087,8 +23163,8 @@ var init_sqlgenerator = __esm({
23087
23163
  Convertor = class {
23088
23164
  };
23089
23165
  PgCreateTableConvertor = class extends Convertor {
23090
- can(statement, dialect7) {
23091
- return statement.type === "create_table" && dialect7 === "postgresql";
23166
+ can(statement, dialect4) {
23167
+ return statement.type === "create_table" && dialect4 === "postgresql";
23092
23168
  }
23093
23169
  convert(st) {
23094
23170
  const { tableName, schema: schema5, columns, compositePKs, uniqueConstraints } = st;
@@ -23132,8 +23208,8 @@ var init_sqlgenerator = __esm({
23132
23208
  }
23133
23209
  };
23134
23210
  MySqlCreateTableConvertor = class extends Convertor {
23135
- can(statement, dialect7) {
23136
- return statement.type === "create_table" && dialect7 === "mysql";
23211
+ can(statement, dialect4) {
23212
+ return statement.type === "create_table" && dialect4 === "mysql";
23137
23213
  }
23138
23214
  convert(st) {
23139
23215
  var _a, _b;
@@ -23183,8 +23259,8 @@ var init_sqlgenerator = __esm({
23183
23259
  }
23184
23260
  };
23185
23261
  SQLiteCreateTableConvertor = class extends Convertor {
23186
- can(statement, dialect7) {
23187
- return statement.type === "sqlite_create_table" && (dialect7 === "sqlite" || dialect7 === "turso");
23262
+ can(statement, dialect4) {
23263
+ return statement.type === "sqlite_create_table" && (dialect4 === "sqlite" || dialect4 === "turso");
23188
23264
  }
23189
23265
  convert(st) {
23190
23266
  const {
@@ -23246,8 +23322,8 @@ var init_sqlgenerator = __esm({
23246
23322
  }
23247
23323
  };
23248
23324
  PgAlterTableAlterColumnSetGenerated = class extends Convertor {
23249
- can(statement, dialect7) {
23250
- return statement.type === "alter_table_alter_column_set_identity" && dialect7 === "postgresql";
23325
+ can(statement, dialect4) {
23326
+ return statement.type === "alter_table_alter_column_set_identity" && dialect4 === "postgresql";
23251
23327
  }
23252
23328
  convert(statement) {
23253
23329
  const { identity, tableName, columnName, schema: schema5 } = statement;
@@ -23259,8 +23335,8 @@ var init_sqlgenerator = __esm({
23259
23335
  }
23260
23336
  };
23261
23337
  PgAlterTableAlterColumnDropGenerated = class extends Convertor {
23262
- can(statement, dialect7) {
23263
- return statement.type === "alter_table_alter_column_drop_identity" && dialect7 === "postgresql";
23338
+ can(statement, dialect4) {
23339
+ return statement.type === "alter_table_alter_column_drop_identity" && dialect4 === "postgresql";
23264
23340
  }
23265
23341
  convert(statement) {
23266
23342
  const { tableName, columnName, schema: schema5 } = statement;
@@ -23269,8 +23345,8 @@ var init_sqlgenerator = __esm({
23269
23345
  }
23270
23346
  };
23271
23347
  PgAlterTableAlterColumnAlterGenerated = class extends Convertor {
23272
- can(statement, dialect7) {
23273
- return statement.type === "alter_table_alter_column_change_identity" && dialect7 === "postgresql";
23348
+ can(statement, dialect4) {
23349
+ return statement.type === "alter_table_alter_column_change_identity" && dialect4 === "postgresql";
23274
23350
  }
23275
23351
  convert(statement) {
23276
23352
  const { identity, oldIdentity, tableName, columnName, schema: schema5 } = statement;
@@ -23317,8 +23393,8 @@ var init_sqlgenerator = __esm({
23317
23393
  }
23318
23394
  };
23319
23395
  PgAlterTableAddUniqueConstraintConvertor = class extends Convertor {
23320
- can(statement, dialect7) {
23321
- return statement.type === "create_unique_constraint" && dialect7 === "postgresql";
23396
+ can(statement, dialect4) {
23397
+ return statement.type === "create_unique_constraint" && dialect4 === "postgresql";
23322
23398
  }
23323
23399
  convert(statement) {
23324
23400
  const unsquashed = PgSquasher.unsquashUnique(statement.data);
@@ -23327,8 +23403,8 @@ var init_sqlgenerator = __esm({
23327
23403
  }
23328
23404
  };
23329
23405
  PgAlterTableDropUniqueConstraintConvertor = class extends Convertor {
23330
- can(statement, dialect7) {
23331
- return statement.type === "delete_unique_constraint" && dialect7 === "postgresql";
23406
+ can(statement, dialect4) {
23407
+ return statement.type === "delete_unique_constraint" && dialect4 === "postgresql";
23332
23408
  }
23333
23409
  convert(statement) {
23334
23410
  const unsquashed = PgSquasher.unsquashUnique(statement.data);
@@ -23337,8 +23413,8 @@ var init_sqlgenerator = __esm({
23337
23413
  }
23338
23414
  };
23339
23415
  MySQLAlterTableAddUniqueConstraintConvertor = class extends Convertor {
23340
- can(statement, dialect7) {
23341
- return statement.type === "create_unique_constraint" && dialect7 === "mysql";
23416
+ can(statement, dialect4) {
23417
+ return statement.type === "create_unique_constraint" && dialect4 === "mysql";
23342
23418
  }
23343
23419
  convert(statement) {
23344
23420
  const unsquashed = MySqlSquasher.unsquashUnique(statement.data);
@@ -23346,8 +23422,8 @@ var init_sqlgenerator = __esm({
23346
23422
  }
23347
23423
  };
23348
23424
  MySQLAlterTableDropUniqueConstraintConvertor = class extends Convertor {
23349
- can(statement, dialect7) {
23350
- return statement.type === "delete_unique_constraint" && dialect7 === "mysql";
23425
+ can(statement, dialect4) {
23426
+ return statement.type === "delete_unique_constraint" && dialect4 === "mysql";
23351
23427
  }
23352
23428
  convert(statement) {
23353
23429
  const unsquashed = MySqlSquasher.unsquashUnique(statement.data);
@@ -23355,8 +23431,8 @@ var init_sqlgenerator = __esm({
23355
23431
  }
23356
23432
  };
23357
23433
  CreatePgSequenceConvertor = class extends Convertor {
23358
- can(statement, dialect7) {
23359
- return statement.type === "create_sequence" && dialect7 === "postgresql";
23434
+ can(statement, dialect4) {
23435
+ return statement.type === "create_sequence" && dialect4 === "postgresql";
23360
23436
  }
23361
23437
  convert(st) {
23362
23438
  const { name, values, schema: schema5 } = st;
@@ -23365,8 +23441,8 @@ var init_sqlgenerator = __esm({
23365
23441
  }
23366
23442
  };
23367
23443
  DropPgSequenceConvertor = class extends Convertor {
23368
- can(statement, dialect7) {
23369
- return statement.type === "drop_sequence" && dialect7 === "postgresql";
23444
+ can(statement, dialect4) {
23445
+ return statement.type === "drop_sequence" && dialect4 === "postgresql";
23370
23446
  }
23371
23447
  convert(st) {
23372
23448
  const { name, schema: schema5 } = st;
@@ -23375,8 +23451,8 @@ var init_sqlgenerator = __esm({
23375
23451
  }
23376
23452
  };
23377
23453
  RenamePgSequenceConvertor = class extends Convertor {
23378
- can(statement, dialect7) {
23379
- return statement.type === "rename_sequence" && dialect7 === "postgresql";
23454
+ can(statement, dialect4) {
23455
+ return statement.type === "rename_sequence" && dialect4 === "postgresql";
23380
23456
  }
23381
23457
  convert(st) {
23382
23458
  const { nameFrom, nameTo, schema: schema5 } = st;
@@ -23386,8 +23462,8 @@ var init_sqlgenerator = __esm({
23386
23462
  }
23387
23463
  };
23388
23464
  MovePgSequenceConvertor = class extends Convertor {
23389
- can(statement, dialect7) {
23390
- return statement.type === "move_sequence" && dialect7 === "postgresql";
23465
+ can(statement, dialect4) {
23466
+ return statement.type === "move_sequence" && dialect4 === "postgresql";
23391
23467
  }
23392
23468
  convert(st) {
23393
23469
  const { schemaFrom, schemaTo, name } = st;
@@ -23397,8 +23473,8 @@ var init_sqlgenerator = __esm({
23397
23473
  }
23398
23474
  };
23399
23475
  AlterPgSequenceConvertor = class extends Convertor {
23400
- can(statement, dialect7) {
23401
- return statement.type === "alter_sequence" && dialect7 === "postgresql";
23476
+ can(statement, dialect4) {
23477
+ return statement.type === "alter_sequence" && dialect4 === "postgresql";
23402
23478
  }
23403
23479
  convert(st) {
23404
23480
  const { name, schema: schema5, values } = st;
@@ -23441,8 +23517,8 @@ var init_sqlgenerator = __esm({
23441
23517
  }
23442
23518
  };
23443
23519
  PgDropTableConvertor = class extends Convertor {
23444
- can(statement, dialect7) {
23445
- return statement.type === "drop_table" && dialect7 === "postgresql";
23520
+ can(statement, dialect4) {
23521
+ return statement.type === "drop_table" && dialect4 === "postgresql";
23446
23522
  }
23447
23523
  convert(statement) {
23448
23524
  const { tableName, schema: schema5 } = statement;
@@ -23451,8 +23527,8 @@ var init_sqlgenerator = __esm({
23451
23527
  }
23452
23528
  };
23453
23529
  MySQLDropTableConvertor = class extends Convertor {
23454
- can(statement, dialect7) {
23455
- return statement.type === "drop_table" && dialect7 === "mysql";
23530
+ can(statement, dialect4) {
23531
+ return statement.type === "drop_table" && dialect4 === "mysql";
23456
23532
  }
23457
23533
  convert(statement) {
23458
23534
  const { tableName } = statement;
@@ -23460,8 +23536,8 @@ var init_sqlgenerator = __esm({
23460
23536
  }
23461
23537
  };
23462
23538
  SQLiteDropTableConvertor = class extends Convertor {
23463
- can(statement, dialect7) {
23464
- return statement.type === "drop_table" && (dialect7 === "sqlite" || dialect7 === "turso");
23539
+ can(statement, dialect4) {
23540
+ return statement.type === "drop_table" && (dialect4 === "sqlite" || dialect4 === "turso");
23465
23541
  }
23466
23542
  convert(statement) {
23467
23543
  const { tableName } = statement;
@@ -23469,8 +23545,8 @@ var init_sqlgenerator = __esm({
23469
23545
  }
23470
23546
  };
23471
23547
  PgRenameTableConvertor = class extends Convertor {
23472
- can(statement, dialect7) {
23473
- return statement.type === "rename_table" && dialect7 === "postgresql";
23548
+ can(statement, dialect4) {
23549
+ return statement.type === "rename_table" && dialect4 === "postgresql";
23474
23550
  }
23475
23551
  convert(statement) {
23476
23552
  const { tableNameFrom, tableNameTo, toSchema, fromSchema } = statement;
@@ -23480,8 +23556,8 @@ var init_sqlgenerator = __esm({
23480
23556
  }
23481
23557
  };
23482
23558
  SqliteRenameTableConvertor = class extends Convertor {
23483
- can(statement, dialect7) {
23484
- return statement.type === "rename_table" && (dialect7 === "sqlite" || dialect7 === "turso");
23559
+ can(statement, dialect4) {
23560
+ return statement.type === "rename_table" && (dialect4 === "sqlite" || dialect4 === "turso");
23485
23561
  }
23486
23562
  convert(statement) {
23487
23563
  const { tableNameFrom, tableNameTo } = statement;
@@ -23489,8 +23565,8 @@ var init_sqlgenerator = __esm({
23489
23565
  }
23490
23566
  };
23491
23567
  MySqlRenameTableConvertor = class extends Convertor {
23492
- can(statement, dialect7) {
23493
- return statement.type === "rename_table" && dialect7 === "mysql";
23568
+ can(statement, dialect4) {
23569
+ return statement.type === "rename_table" && dialect4 === "mysql";
23494
23570
  }
23495
23571
  convert(statement) {
23496
23572
  const { tableNameFrom, tableNameTo } = statement;
@@ -23498,8 +23574,8 @@ var init_sqlgenerator = __esm({
23498
23574
  }
23499
23575
  };
23500
23576
  PgAlterTableRenameColumnConvertor = class extends Convertor {
23501
- can(statement, dialect7) {
23502
- return statement.type === "alter_table_rename_column" && dialect7 === "postgresql";
23577
+ can(statement, dialect4) {
23578
+ return statement.type === "alter_table_rename_column" && dialect4 === "postgresql";
23503
23579
  }
23504
23580
  convert(statement) {
23505
23581
  const { tableName, oldColumnName, newColumnName, schema: schema5 } = statement;
@@ -23508,8 +23584,8 @@ var init_sqlgenerator = __esm({
23508
23584
  }
23509
23585
  };
23510
23586
  MySqlAlterTableRenameColumnConvertor = class extends Convertor {
23511
- can(statement, dialect7) {
23512
- return statement.type === "alter_table_rename_column" && dialect7 === "mysql";
23587
+ can(statement, dialect4) {
23588
+ return statement.type === "alter_table_rename_column" && dialect4 === "mysql";
23513
23589
  }
23514
23590
  convert(statement) {
23515
23591
  const { tableName, oldColumnName, newColumnName } = statement;
@@ -23517,8 +23593,8 @@ var init_sqlgenerator = __esm({
23517
23593
  }
23518
23594
  };
23519
23595
  SQLiteAlterTableRenameColumnConvertor = class extends Convertor {
23520
- can(statement, dialect7) {
23521
- return statement.type === "alter_table_rename_column" && (dialect7 === "sqlite" || dialect7 === "turso");
23596
+ can(statement, dialect4) {
23597
+ return statement.type === "alter_table_rename_column" && (dialect4 === "sqlite" || dialect4 === "turso");
23522
23598
  }
23523
23599
  convert(statement) {
23524
23600
  const { tableName, oldColumnName, newColumnName } = statement;
@@ -23526,8 +23602,8 @@ var init_sqlgenerator = __esm({
23526
23602
  }
23527
23603
  };
23528
23604
  PgAlterTableDropColumnConvertor = class extends Convertor {
23529
- can(statement, dialect7) {
23530
- return statement.type === "alter_table_drop_column" && dialect7 === "postgresql";
23605
+ can(statement, dialect4) {
23606
+ return statement.type === "alter_table_drop_column" && dialect4 === "postgresql";
23531
23607
  }
23532
23608
  convert(statement) {
23533
23609
  const { tableName, columnName, schema: schema5 } = statement;
@@ -23536,8 +23612,8 @@ var init_sqlgenerator = __esm({
23536
23612
  }
23537
23613
  };
23538
23614
  MySqlAlterTableDropColumnConvertor = class extends Convertor {
23539
- can(statement, dialect7) {
23540
- return statement.type === "alter_table_drop_column" && dialect7 === "mysql";
23615
+ can(statement, dialect4) {
23616
+ return statement.type === "alter_table_drop_column" && dialect4 === "mysql";
23541
23617
  }
23542
23618
  convert(statement) {
23543
23619
  const { tableName, columnName } = statement;
@@ -23545,8 +23621,8 @@ var init_sqlgenerator = __esm({
23545
23621
  }
23546
23622
  };
23547
23623
  SQLiteAlterTableDropColumnConvertor = class extends Convertor {
23548
- can(statement, dialect7) {
23549
- return statement.type === "alter_table_drop_column" && (dialect7 === "sqlite" || dialect7 === "turso");
23624
+ can(statement, dialect4) {
23625
+ return statement.type === "alter_table_drop_column" && (dialect4 === "sqlite" || dialect4 === "turso");
23550
23626
  }
23551
23627
  convert(statement) {
23552
23628
  const { tableName, columnName } = statement;
@@ -23554,8 +23630,8 @@ var init_sqlgenerator = __esm({
23554
23630
  }
23555
23631
  };
23556
23632
  PgAlterTableAddColumnConvertor = class extends Convertor {
23557
- can(statement, dialect7) {
23558
- return statement.type === "alter_table_add_column" && dialect7 === "postgresql";
23633
+ can(statement, dialect4) {
23634
+ return statement.type === "alter_table_add_column" && dialect4 === "postgresql";
23559
23635
  }
23560
23636
  convert(statement) {
23561
23637
  const { tableName, column: column7, schema: schema5 } = statement;
@@ -23574,8 +23650,8 @@ var init_sqlgenerator = __esm({
23574
23650
  }
23575
23651
  };
23576
23652
  MySqlAlterTableAddColumnConvertor = class extends Convertor {
23577
- can(statement, dialect7) {
23578
- return statement.type === "alter_table_add_column" && dialect7 === "mysql";
23653
+ can(statement, dialect4) {
23654
+ return statement.type === "alter_table_add_column" && dialect4 === "mysql";
23579
23655
  }
23580
23656
  convert(statement) {
23581
23657
  const { tableName, column: column7 } = statement;
@@ -23598,8 +23674,8 @@ var init_sqlgenerator = __esm({
23598
23674
  }
23599
23675
  };
23600
23676
  SQLiteAlterTableAddColumnConvertor = class extends Convertor {
23601
- can(statement, dialect7) {
23602
- return statement.type === "sqlite_alter_table_add_column" && (dialect7 === "sqlite" || dialect7 === "turso");
23677
+ can(statement, dialect4) {
23678
+ return statement.type === "sqlite_alter_table_add_column" && (dialect4 === "sqlite" || dialect4 === "turso");
23603
23679
  }
23604
23680
  convert(statement) {
23605
23681
  const { tableName, column: column7, referenceData } = statement;
@@ -23614,8 +23690,8 @@ var init_sqlgenerator = __esm({
23614
23690
  }
23615
23691
  };
23616
23692
  PgAlterTableAlterColumnSetTypeConvertor = class extends Convertor {
23617
- can(statement, dialect7) {
23618
- return statement.type === "alter_table_alter_column_set_type" && dialect7 === "postgresql";
23693
+ can(statement, dialect4) {
23694
+ return statement.type === "alter_table_alter_column_set_type" && dialect4 === "postgresql";
23619
23695
  }
23620
23696
  convert(statement) {
23621
23697
  const { tableName, columnName, newDataType, schema: schema5 } = statement;
@@ -23624,8 +23700,8 @@ var init_sqlgenerator = __esm({
23624
23700
  }
23625
23701
  };
23626
23702
  PgAlterTableAlterColumnSetDefaultConvertor = class extends Convertor {
23627
- can(statement, dialect7) {
23628
- return statement.type === "alter_table_alter_column_set_default" && dialect7 === "postgresql";
23703
+ can(statement, dialect4) {
23704
+ return statement.type === "alter_table_alter_column_set_default" && dialect4 === "postgresql";
23629
23705
  }
23630
23706
  convert(statement) {
23631
23707
  const { tableName, columnName, schema: schema5 } = statement;
@@ -23634,8 +23710,8 @@ var init_sqlgenerator = __esm({
23634
23710
  }
23635
23711
  };
23636
23712
  PgAlterTableAlterColumnDropDefaultConvertor = class extends Convertor {
23637
- can(statement, dialect7) {
23638
- return statement.type === "alter_table_alter_column_drop_default" && dialect7 === "postgresql";
23713
+ can(statement, dialect4) {
23714
+ return statement.type === "alter_table_alter_column_drop_default" && dialect4 === "postgresql";
23639
23715
  }
23640
23716
  convert(statement) {
23641
23717
  const { tableName, columnName, schema: schema5 } = statement;
@@ -23644,8 +23720,8 @@ var init_sqlgenerator = __esm({
23644
23720
  }
23645
23721
  };
23646
23722
  PgAlterTableAlterColumnDropGeneratedConvertor = class extends Convertor {
23647
- can(statement, dialect7) {
23648
- return statement.type === "alter_table_alter_column_drop_generated" && dialect7 === "postgresql";
23723
+ can(statement, dialect4) {
23724
+ return statement.type === "alter_table_alter_column_drop_generated" && dialect4 === "postgresql";
23649
23725
  }
23650
23726
  convert(statement) {
23651
23727
  const { tableName, columnName, schema: schema5 } = statement;
@@ -23654,8 +23730,8 @@ var init_sqlgenerator = __esm({
23654
23730
  }
23655
23731
  };
23656
23732
  PgAlterTableAlterColumnSetExpressionConvertor = class extends Convertor {
23657
- can(statement, dialect7) {
23658
- return statement.type === "alter_table_alter_column_set_generated" && dialect7 === "postgresql";
23733
+ can(statement, dialect4) {
23734
+ return statement.type === "alter_table_alter_column_set_generated" && dialect4 === "postgresql";
23659
23735
  }
23660
23736
  convert(statement) {
23661
23737
  const {
@@ -23692,8 +23768,8 @@ var init_sqlgenerator = __esm({
23692
23768
  }
23693
23769
  };
23694
23770
  PgAlterTableAlterColumnAlterrGeneratedConvertor = class extends Convertor {
23695
- can(statement, dialect7) {
23696
- return statement.type === "alter_table_alter_column_alter_generated" && dialect7 === "postgresql";
23771
+ can(statement, dialect4) {
23772
+ return statement.type === "alter_table_alter_column_alter_generated" && dialect4 === "postgresql";
23697
23773
  }
23698
23774
  convert(statement) {
23699
23775
  const {
@@ -23730,8 +23806,8 @@ var init_sqlgenerator = __esm({
23730
23806
  }
23731
23807
  };
23732
23808
  SqliteAlterTableAlterColumnDropGeneratedConvertor = class extends Convertor {
23733
- can(statement, dialect7) {
23734
- return statement.type === "alter_table_alter_column_drop_generated" && (dialect7 === "sqlite" || dialect7 === "turso");
23809
+ can(statement, dialect4) {
23810
+ return statement.type === "alter_table_alter_column_drop_generated" && (dialect4 === "sqlite" || dialect4 === "turso");
23735
23811
  }
23736
23812
  convert(statement) {
23737
23813
  const {
@@ -23771,8 +23847,8 @@ var init_sqlgenerator = __esm({
23771
23847
  }
23772
23848
  };
23773
23849
  SqliteAlterTableAlterColumnSetExpressionConvertor = class extends Convertor {
23774
- can(statement, dialect7) {
23775
- return statement.type === "alter_table_alter_column_set_generated" && (dialect7 === "sqlite" || dialect7 === "turso");
23850
+ can(statement, dialect4) {
23851
+ return statement.type === "alter_table_alter_column_set_generated" && (dialect4 === "sqlite" || dialect4 === "turso");
23776
23852
  }
23777
23853
  convert(statement) {
23778
23854
  const {
@@ -23812,8 +23888,8 @@ var init_sqlgenerator = __esm({
23812
23888
  }
23813
23889
  };
23814
23890
  SqliteAlterTableAlterColumnAlterGeneratedConvertor = class extends Convertor {
23815
- can(statement, dialect7) {
23816
- return statement.type === "alter_table_alter_column_alter_generated" && (dialect7 === "sqlite" || dialect7 === "turso");
23891
+ can(statement, dialect4) {
23892
+ return statement.type === "alter_table_alter_column_alter_generated" && (dialect4 === "sqlite" || dialect4 === "turso");
23817
23893
  }
23818
23894
  convert(statement) {
23819
23895
  const {
@@ -23853,8 +23929,8 @@ var init_sqlgenerator = __esm({
23853
23929
  }
23854
23930
  };
23855
23931
  MySqlAlterTableAlterColumnAlterrGeneratedConvertor = class extends Convertor {
23856
- can(statement, dialect7) {
23857
- return statement.type === "alter_table_alter_column_alter_generated" && dialect7 === "mysql";
23932
+ can(statement, dialect4) {
23933
+ return statement.type === "alter_table_alter_column_alter_generated" && dialect4 === "mysql";
23858
23934
  }
23859
23935
  convert(statement) {
23860
23936
  const {
@@ -23891,24 +23967,24 @@ var init_sqlgenerator = __esm({
23891
23967
  }
23892
23968
  };
23893
23969
  MySqlAlterTableAddPk = class extends Convertor {
23894
- can(statement, dialect7) {
23895
- return statement.type === "alter_table_alter_column_set_pk" && dialect7 === "mysql";
23970
+ can(statement, dialect4) {
23971
+ return statement.type === "alter_table_alter_column_set_pk" && dialect4 === "mysql";
23896
23972
  }
23897
23973
  convert(statement) {
23898
23974
  return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY (\`${statement.columnName}\`);`;
23899
23975
  }
23900
23976
  };
23901
23977
  MySqlAlterTableDropPk = class extends Convertor {
23902
- can(statement, dialect7) {
23903
- return statement.type === "alter_table_alter_column_drop_pk" && dialect7 === "mysql";
23978
+ can(statement, dialect4) {
23979
+ return statement.type === "alter_table_alter_column_drop_pk" && dialect4 === "mysql";
23904
23980
  }
23905
23981
  convert(statement) {
23906
23982
  return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY`;
23907
23983
  }
23908
23984
  };
23909
23985
  LibSQLModifyColumn = class extends Convertor {
23910
- can(statement, dialect7) {
23911
- return (statement.type === "alter_table_alter_column_set_type" || statement.type === "alter_table_alter_column_drop_notnull" || statement.type === "alter_table_alter_column_set_notnull" || statement.type === "alter_table_alter_column_set_default" || statement.type === "alter_table_alter_column_drop_default") && dialect7 === "turso";
23986
+ can(statement, dialect4) {
23987
+ return (statement.type === "alter_table_alter_column_set_type" || statement.type === "alter_table_alter_column_drop_notnull" || statement.type === "alter_table_alter_column_set_notnull" || statement.type === "alter_table_alter_column_set_default" || statement.type === "alter_table_alter_column_drop_default") && dialect4 === "turso";
23912
23988
  }
23913
23989
  convert(statement, json2) {
23914
23990
  const { tableName, columnName } = statement;
@@ -23968,8 +24044,8 @@ var init_sqlgenerator = __esm({
23968
24044
  }
23969
24045
  };
23970
24046
  MySqlModifyColumn = class extends Convertor {
23971
- can(statement, dialect7) {
23972
- return (statement.type === "alter_table_alter_column_set_type" || statement.type === "alter_table_alter_column_set_notnull" || statement.type === "alter_table_alter_column_drop_notnull" || statement.type === "alter_table_alter_column_drop_on_update" || statement.type === "alter_table_alter_column_set_on_update" || statement.type === "alter_table_alter_column_set_autoincrement" || statement.type === "alter_table_alter_column_drop_autoincrement" || statement.type === "alter_table_alter_column_set_default" || statement.type === "alter_table_alter_column_drop_default" || statement.type === "alter_table_alter_column_set_generated" || statement.type === "alter_table_alter_column_drop_generated") && dialect7 === "mysql";
24047
+ can(statement, dialect4) {
24048
+ return (statement.type === "alter_table_alter_column_set_type" || statement.type === "alter_table_alter_column_set_notnull" || statement.type === "alter_table_alter_column_drop_notnull" || statement.type === "alter_table_alter_column_drop_on_update" || statement.type === "alter_table_alter_column_set_on_update" || statement.type === "alter_table_alter_column_set_autoincrement" || statement.type === "alter_table_alter_column_drop_autoincrement" || statement.type === "alter_table_alter_column_set_default" || statement.type === "alter_table_alter_column_drop_default" || statement.type === "alter_table_alter_column_set_generated" || statement.type === "alter_table_alter_column_drop_generated") && dialect4 === "mysql";
23973
24049
  }
23974
24050
  convert(statement) {
23975
24051
  var _a, _b, _c, _d, _e, _f, _g;
@@ -24106,8 +24182,8 @@ var init_sqlgenerator = __esm({
24106
24182
  }
24107
24183
  };
24108
24184
  PgAlterTableCreateCompositePrimaryKeyConvertor = class extends Convertor {
24109
- can(statement, dialect7) {
24110
- return statement.type === "create_composite_pk" && dialect7 === "postgresql";
24185
+ can(statement, dialect4) {
24186
+ return statement.type === "create_composite_pk" && dialect4 === "postgresql";
24111
24187
  }
24112
24188
  convert(statement) {
24113
24189
  const { name, columns } = PgSquasher.unsquashPK(statement.data);
@@ -24116,8 +24192,8 @@ var init_sqlgenerator = __esm({
24116
24192
  }
24117
24193
  };
24118
24194
  PgAlterTableDeleteCompositePrimaryKeyConvertor = class extends Convertor {
24119
- can(statement, dialect7) {
24120
- return statement.type === "delete_composite_pk" && dialect7 === "postgresql";
24195
+ can(statement, dialect4) {
24196
+ return statement.type === "delete_composite_pk" && dialect4 === "postgresql";
24121
24197
  }
24122
24198
  convert(statement) {
24123
24199
  const { name, columns } = PgSquasher.unsquashPK(statement.data);
@@ -24126,8 +24202,8 @@ var init_sqlgenerator = __esm({
24126
24202
  }
24127
24203
  };
24128
24204
  PgAlterTableAlterCompositePrimaryKeyConvertor = class extends Convertor {
24129
- can(statement, dialect7) {
24130
- return statement.type === "alter_composite_pk" && dialect7 === "postgresql";
24205
+ can(statement, dialect4) {
24206
+ return statement.type === "alter_composite_pk" && dialect4 === "postgresql";
24131
24207
  }
24132
24208
  convert(statement) {
24133
24209
  const { name, columns } = PgSquasher.unsquashPK(statement.old);
@@ -24140,8 +24216,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24140
24216
  }
24141
24217
  };
24142
24218
  MySqlAlterTableCreateCompositePrimaryKeyConvertor = class extends Convertor {
24143
- can(statement, dialect7) {
24144
- return statement.type === "create_composite_pk" && dialect7 === "mysql";
24219
+ can(statement, dialect4) {
24220
+ return statement.type === "create_composite_pk" && dialect4 === "mysql";
24145
24221
  }
24146
24222
  convert(statement) {
24147
24223
  const { name, columns } = MySqlSquasher.unsquashPK(statement.data);
@@ -24149,8 +24225,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24149
24225
  }
24150
24226
  };
24151
24227
  MySqlAlterTableDeleteCompositePrimaryKeyConvertor = class extends Convertor {
24152
- can(statement, dialect7) {
24153
- return statement.type === "delete_composite_pk" && dialect7 === "mysql";
24228
+ can(statement, dialect4) {
24229
+ return statement.type === "delete_composite_pk" && dialect4 === "mysql";
24154
24230
  }
24155
24231
  convert(statement) {
24156
24232
  const { name, columns } = MySqlSquasher.unsquashPK(statement.data);
@@ -24158,8 +24234,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24158
24234
  }
24159
24235
  };
24160
24236
  MySqlAlterTableAlterCompositePrimaryKeyConvertor = class extends Convertor {
24161
- can(statement, dialect7) {
24162
- return statement.type === "alter_composite_pk" && dialect7 === "mysql";
24237
+ can(statement, dialect4) {
24238
+ return statement.type === "alter_composite_pk" && dialect4 === "mysql";
24163
24239
  }
24164
24240
  convert(statement) {
24165
24241
  const { name, columns } = MySqlSquasher.unsquashPK(statement.old);
@@ -24170,8 +24246,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24170
24246
  }
24171
24247
  };
24172
24248
  PgAlterTableAlterColumnSetPrimaryKeyConvertor = class extends Convertor {
24173
- can(statement, dialect7) {
24174
- return statement.type === "alter_table_alter_column_set_pk" && dialect7 === "postgresql";
24249
+ can(statement, dialect4) {
24250
+ return statement.type === "alter_table_alter_column_set_pk" && dialect4 === "postgresql";
24175
24251
  }
24176
24252
  convert(statement) {
24177
24253
  const { tableName, columnName } = statement;
@@ -24180,8 +24256,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24180
24256
  }
24181
24257
  };
24182
24258
  PgAlterTableAlterColumnDropPrimaryKeyConvertor = class extends Convertor {
24183
- can(statement, dialect7) {
24184
- return statement.type === "alter_table_alter_column_drop_pk" && dialect7 === "postgresql";
24259
+ can(statement, dialect4) {
24260
+ return statement.type === "alter_table_alter_column_drop_pk" && dialect4 === "postgresql";
24185
24261
  }
24186
24262
  convert(statement) {
24187
24263
  const { tableName, columnName, schema: schema5 } = statement;
@@ -24204,8 +24280,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24204
24280
  }
24205
24281
  };
24206
24282
  PgAlterTableAlterColumnSetNotNullConvertor = class extends Convertor {
24207
- can(statement, dialect7) {
24208
- return statement.type === "alter_table_alter_column_set_notnull" && dialect7 === "postgresql";
24283
+ can(statement, dialect4) {
24284
+ return statement.type === "alter_table_alter_column_set_notnull" && dialect4 === "postgresql";
24209
24285
  }
24210
24286
  convert(statement) {
24211
24287
  const { tableName, columnName } = statement;
@@ -24214,8 +24290,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24214
24290
  }
24215
24291
  };
24216
24292
  PgAlterTableAlterColumnDropNotNullConvertor = class extends Convertor {
24217
- can(statement, dialect7) {
24218
- return statement.type === "alter_table_alter_column_drop_notnull" && dialect7 === "postgresql";
24293
+ can(statement, dialect4) {
24294
+ return statement.type === "alter_table_alter_column_drop_notnull" && dialect4 === "postgresql";
24219
24295
  }
24220
24296
  convert(statement) {
24221
24297
  const { tableName, columnName } = statement;
@@ -24224,8 +24300,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24224
24300
  }
24225
24301
  };
24226
24302
  PgCreateForeignKeyConvertor = class extends Convertor {
24227
- can(statement, dialect7) {
24228
- return statement.type === "create_reference" && dialect7 === "postgresql";
24303
+ can(statement, dialect4) {
24304
+ return statement.type === "create_reference" && dialect4 === "postgresql";
24229
24305
  }
24230
24306
  convert(statement) {
24231
24307
  const {
@@ -24254,8 +24330,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24254
24330
  }
24255
24331
  };
24256
24332
  LibSQLCreateForeignKeyConvertor = class extends Convertor {
24257
- can(statement, dialect7) {
24258
- return statement.type === "create_reference" && dialect7 === "turso";
24333
+ can(statement, dialect4) {
24334
+ return statement.type === "create_reference" && dialect4 === "turso";
24259
24335
  }
24260
24336
  convert(statement, json2, action) {
24261
24337
  const { columnsFrom, columnsTo, tableFrom, onDelete, onUpdate, tableTo } = action === "push" ? SQLiteSquasher.unsquashPushFK(statement.data) : SQLiteSquasher.unsquashFK(statement.data);
@@ -24271,8 +24347,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24271
24347
  }
24272
24348
  };
24273
24349
  MySqlCreateForeignKeyConvertor = class extends Convertor {
24274
- can(statement, dialect7) {
24275
- return statement.type === "create_reference" && dialect7 === "mysql";
24350
+ can(statement, dialect4) {
24351
+ return statement.type === "create_reference" && dialect4 === "mysql";
24276
24352
  }
24277
24353
  convert(statement) {
24278
24354
  const {
@@ -24292,8 +24368,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24292
24368
  }
24293
24369
  };
24294
24370
  PgAlterForeignKeyConvertor = class extends Convertor {
24295
- can(statement, dialect7) {
24296
- return statement.type === "alter_reference" && dialect7 === "postgresql";
24371
+ can(statement, dialect4) {
24372
+ return statement.type === "alter_reference" && dialect4 === "postgresql";
24297
24373
  }
24298
24374
  convert(statement) {
24299
24375
  const newFk = PgSquasher.unsquashFK(statement.data);
@@ -24317,8 +24393,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24317
24393
  }
24318
24394
  };
24319
24395
  PgDeleteForeignKeyConvertor = class extends Convertor {
24320
- can(statement, dialect7) {
24321
- return statement.type === "delete_reference" && dialect7 === "postgresql";
24396
+ can(statement, dialect4) {
24397
+ return statement.type === "delete_reference" && dialect4 === "postgresql";
24322
24398
  }
24323
24399
  convert(statement) {
24324
24400
  const tableFrom = statement.tableName;
@@ -24329,8 +24405,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24329
24405
  }
24330
24406
  };
24331
24407
  MySqlDeleteForeignKeyConvertor = class extends Convertor {
24332
- can(statement, dialect7) {
24333
- return statement.type === "delete_reference" && dialect7 === "mysql";
24408
+ can(statement, dialect4) {
24409
+ return statement.type === "delete_reference" && dialect4 === "mysql";
24334
24410
  }
24335
24411
  convert(statement) {
24336
24412
  const tableFrom = statement.tableName;
@@ -24340,8 +24416,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24340
24416
  }
24341
24417
  };
24342
24418
  CreatePgIndexConvertor = class extends Convertor {
24343
- can(statement, dialect7) {
24344
- return statement.type === "create_index_pg" && dialect7 === "postgresql";
24419
+ can(statement, dialect4) {
24420
+ return statement.type === "create_index_pg" && dialect4 === "postgresql";
24345
24421
  }
24346
24422
  convert(statement) {
24347
24423
  const {
@@ -24372,8 +24448,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24372
24448
  }
24373
24449
  };
24374
24450
  CreateMySqlIndexConvertor = class extends Convertor {
24375
- can(statement, dialect7) {
24376
- return statement.type === "create_index" && dialect7 === "mysql";
24451
+ can(statement, dialect4) {
24452
+ return statement.type === "create_index" && dialect4 === "mysql";
24377
24453
  }
24378
24454
  convert(statement) {
24379
24455
  const { name, columns, isUnique } = MySqlSquasher.unsquashIdx(
@@ -24388,8 +24464,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24388
24464
  }
24389
24465
  };
24390
24466
  CreateSqliteIndexConvertor = class extends Convertor {
24391
- can(statement, dialect7) {
24392
- return statement.type === "create_index" && (dialect7 === "sqlite" || dialect7 === "turso");
24467
+ can(statement, dialect4) {
24468
+ return statement.type === "create_index" && (dialect4 === "sqlite" || dialect4 === "turso");
24393
24469
  }
24394
24470
  convert(statement) {
24395
24471
  const { name, columns, isUnique, where } = SQLiteSquasher.unsquashIdx(
@@ -24405,8 +24481,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24405
24481
  }
24406
24482
  };
24407
24483
  PgDropIndexConvertor = class extends Convertor {
24408
- can(statement, dialect7) {
24409
- return statement.type === "drop_index" && dialect7 === "postgresql";
24484
+ can(statement, dialect4) {
24485
+ return statement.type === "drop_index" && dialect4 === "postgresql";
24410
24486
  }
24411
24487
  convert(statement) {
24412
24488
  const { name } = PgSquasher.unsquashIdx(statement.data);
@@ -24414,8 +24490,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24414
24490
  }
24415
24491
  };
24416
24492
  PgCreateSchemaConvertor = class extends Convertor {
24417
- can(statement, dialect7) {
24418
- return statement.type === "create_schema" && dialect7 === "postgresql";
24493
+ can(statement, dialect4) {
24494
+ return statement.type === "create_schema" && dialect4 === "postgresql";
24419
24495
  }
24420
24496
  convert(statement) {
24421
24497
  const { name } = statement;
@@ -24424,8 +24500,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24424
24500
  }
24425
24501
  };
24426
24502
  PgRenameSchemaConvertor = class extends Convertor {
24427
- can(statement, dialect7) {
24428
- return statement.type === "rename_schema" && dialect7 === "postgresql";
24503
+ can(statement, dialect4) {
24504
+ return statement.type === "rename_schema" && dialect4 === "postgresql";
24429
24505
  }
24430
24506
  convert(statement) {
24431
24507
  const { from, to } = statement;
@@ -24434,8 +24510,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24434
24510
  }
24435
24511
  };
24436
24512
  PgDropSchemaConvertor = class extends Convertor {
24437
- can(statement, dialect7) {
24438
- return statement.type === "drop_schema" && dialect7 === "postgresql";
24513
+ can(statement, dialect4) {
24514
+ return statement.type === "drop_schema" && dialect4 === "postgresql";
24439
24515
  }
24440
24516
  convert(statement) {
24441
24517
  const { name } = statement;
@@ -24444,8 +24520,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24444
24520
  }
24445
24521
  };
24446
24522
  PgAlterTableSetSchemaConvertor = class extends Convertor {
24447
- can(statement, dialect7) {
24448
- return statement.type === "alter_table_set_schema" && dialect7 === "postgresql";
24523
+ can(statement, dialect4) {
24524
+ return statement.type === "alter_table_set_schema" && dialect4 === "postgresql";
24449
24525
  }
24450
24526
  convert(statement) {
24451
24527
  const { tableName, schemaFrom, schemaTo } = statement;
@@ -24454,8 +24530,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24454
24530
  }
24455
24531
  };
24456
24532
  PgAlterTableSetNewSchemaConvertor = class extends Convertor {
24457
- can(statement, dialect7) {
24458
- return statement.type === "alter_table_set_new_schema" && dialect7 === "postgresql";
24533
+ can(statement, dialect4) {
24534
+ return statement.type === "alter_table_set_new_schema" && dialect4 === "postgresql";
24459
24535
  }
24460
24536
  convert(statement) {
24461
24537
  const { tableName, to, from } = statement;
@@ -24465,8 +24541,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24465
24541
  }
24466
24542
  };
24467
24543
  PgAlterTableRemoveFromSchemaConvertor = class extends Convertor {
24468
- can(statement, dialect7) {
24469
- return statement.type === "alter_table_remove_from_schema" && dialect7 === "postgresql";
24544
+ can(statement, dialect4) {
24545
+ return statement.type === "alter_table_remove_from_schema" && dialect4 === "postgresql";
24470
24546
  }
24471
24547
  convert(statement) {
24472
24548
  const { tableName, schema: schema5 } = statement;
@@ -24476,8 +24552,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24476
24552
  }
24477
24553
  };
24478
24554
  SqliteDropIndexConvertor = class extends Convertor {
24479
- can(statement, dialect7) {
24480
- return statement.type === "drop_index" && (dialect7 === "sqlite" || dialect7 === "turso");
24555
+ can(statement, dialect4) {
24556
+ return statement.type === "drop_index" && (dialect4 === "sqlite" || dialect4 === "turso");
24481
24557
  }
24482
24558
  convert(statement) {
24483
24559
  const { name } = PgSquasher.unsquashIdx(statement.data);
@@ -24485,8 +24561,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24485
24561
  }
24486
24562
  };
24487
24563
  MySqlDropIndexConvertor = class extends Convertor {
24488
- can(statement, dialect7) {
24489
- return statement.type === "drop_index" && dialect7 === "mysql";
24564
+ can(statement, dialect4) {
24565
+ return statement.type === "drop_index" && dialect4 === "mysql";
24490
24566
  }
24491
24567
  convert(statement) {
24492
24568
  const { name } = MySqlSquasher.unsquashIdx(statement.data);
@@ -24494,8 +24570,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24494
24570
  }
24495
24571
  };
24496
24572
  SQLiteRecreateTableConvertor = class extends Convertor {
24497
- can(statement, dialect7) {
24498
- return statement.type === "recreate_table" && dialect7 === "sqlite";
24573
+ can(statement, dialect4) {
24574
+ return statement.type === "recreate_table" && dialect4 === "sqlite";
24499
24575
  }
24500
24576
  convert(statement) {
24501
24577
  const { tableName, columns, compositePKs, referenceData } = statement;
@@ -24536,8 +24612,8 @@ ${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newCo
24536
24612
  }
24537
24613
  };
24538
24614
  LibSQLRecreateTableConvertor = class extends Convertor {
24539
- can(statement, dialect7) {
24540
- return statement.type === "recreate_table" && dialect7 === "turso";
24615
+ can(statement, dialect4) {
24616
+ return statement.type === "recreate_table" && dialect4 === "turso";
24541
24617
  }
24542
24618
  convert(statement) {
24543
24619
  const { tableName, columns, compositePKs, referenceData } = statement;
@@ -29637,6 +29713,7 @@ var init_migrate = __esm({
29637
29713
  prepareAndMigratePg = async (config) => {
29638
29714
  const outFolder = config.out;
29639
29715
  const schemaPath = config.schema;
29716
+ const casing2 = config.casing;
29640
29717
  try {
29641
29718
  assertV1OutFolder(outFolder);
29642
29719
  const { snapshots, journal } = prepareMigrationFolder(
@@ -29645,7 +29722,8 @@ var init_migrate = __esm({
29645
29722
  );
29646
29723
  const { prev, cur, custom: custom2 } = await preparePgMigrationSnapshot(
29647
29724
  snapshots,
29648
- schemaPath
29725
+ schemaPath,
29726
+ casing2
29649
29727
  );
29650
29728
  const validatedPrev = pgSchema.parse(prev);
29651
29729
  const validatedCur = pgSchema.parse(cur);
@@ -29688,10 +29766,11 @@ var init_migrate = __esm({
29688
29766
  console.error(e2);
29689
29767
  }
29690
29768
  };
29691
- preparePgPush = async (schemaPath, snapshot, schemaFilter) => {
29769
+ preparePgPush = async (schemaPath, snapshot, schemaFilter, casing2) => {
29692
29770
  const { prev, cur } = await preparePgDbPushSnapshot(
29693
29771
  snapshot,
29694
29772
  schemaPath,
29773
+ casing2,
29695
29774
  schemaFilter
29696
29775
  );
29697
29776
  const validatedPrev = pgSchema.parse(prev);
@@ -29712,11 +29791,12 @@ var init_migrate = __esm({
29712
29791
  );
29713
29792
  return { sqlStatements, statements, squashedPrev, squashedCur };
29714
29793
  };
29715
- prepareMySQLPush = async (schemaPath, snapshot) => {
29794
+ prepareMySQLPush = async (schemaPath, snapshot, casing2) => {
29716
29795
  try {
29717
29796
  const { prev, cur } = await prepareMySqlDbPushSnapshot(
29718
29797
  snapshot,
29719
- schemaPath
29798
+ schemaPath,
29799
+ casing2
29720
29800
  );
29721
29801
  const validatedPrev = mysqlSchema.parse(prev);
29722
29802
  const validatedCur = mysqlSchema.parse(cur);
@@ -29740,12 +29820,14 @@ var init_migrate = __esm({
29740
29820
  prepareAndMigrateMysql = async (config) => {
29741
29821
  const outFolder = config.out;
29742
29822
  const schemaPath = config.schema;
29823
+ const casing2 = config.casing;
29743
29824
  try {
29744
29825
  assertV1OutFolder(outFolder);
29745
29826
  const { snapshots, journal } = prepareMigrationFolder(outFolder, "mysql");
29746
29827
  const { prev, cur, custom: custom2 } = await prepareMySqlMigrationSnapshot(
29747
29828
  snapshots,
29748
- schemaPath
29829
+ schemaPath,
29830
+ casing2
29749
29831
  );
29750
29832
  const validatedPrev = mysqlSchema.parse(prev);
29751
29833
  const validatedCur = mysqlSchema.parse(cur);
@@ -29789,12 +29871,14 @@ var init_migrate = __esm({
29789
29871
  prepareAndMigrateSqlite = async (config) => {
29790
29872
  const outFolder = config.out;
29791
29873
  const schemaPath = config.schema;
29874
+ const casing2 = config.casing;
29792
29875
  try {
29793
29876
  assertV1OutFolder(outFolder);
29794
29877
  const { snapshots, journal } = prepareMigrationFolder(outFolder, "sqlite");
29795
29878
  const { prev, cur, custom: custom2 } = await prepareSqliteMigrationSnapshot(
29796
29879
  snapshots,
29797
- schemaPath
29880
+ schemaPath,
29881
+ casing2
29798
29882
  );
29799
29883
  const validatedPrev = sqliteSchema.parse(prev);
29800
29884
  const validatedCur = sqliteSchema.parse(cur);
@@ -29840,12 +29924,14 @@ var init_migrate = __esm({
29840
29924
  prepareAndMigrateLibSQL = async (config) => {
29841
29925
  const outFolder = config.out;
29842
29926
  const schemaPath = config.schema;
29927
+ const casing2 = config.casing;
29843
29928
  try {
29844
29929
  assertV1OutFolder(outFolder);
29845
29930
  const { snapshots, journal } = prepareMigrationFolder(outFolder, "sqlite");
29846
29931
  const { prev, cur, custom: custom2 } = await prepareSqliteMigrationSnapshot(
29847
29932
  snapshots,
29848
- schemaPath
29933
+ schemaPath,
29934
+ casing2
29849
29935
  );
29850
29936
  const validatedPrev = sqliteSchema.parse(prev);
29851
29937
  const validatedCur = sqliteSchema.parse(cur);
@@ -29888,8 +29974,8 @@ var init_migrate = __esm({
29888
29974
  console.error(e2);
29889
29975
  }
29890
29976
  };
29891
- prepareSQLitePush = async (schemaPath, snapshot) => {
29892
- const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath);
29977
+ prepareSQLitePush = async (schemaPath, snapshot, casing2) => {
29978
+ const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath, casing2);
29893
29979
  const validatedPrev = sqliteSchema.parse(prev);
29894
29980
  const validatedCur = sqliteSchema.parse(cur);
29895
29981
  const squashedPrev = squashSqliteScheme(validatedPrev, "push");
@@ -29911,8 +29997,8 @@ var init_migrate = __esm({
29911
29997
  meta: _meta
29912
29998
  };
29913
29999
  };
29914
- prepareLibSQLPush = async (schemaPath, snapshot) => {
29915
- const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath);
30000
+ prepareLibSQLPush = async (schemaPath, snapshot, casing2) => {
30001
+ const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath, casing2);
29916
30002
  const validatedPrev = sqliteSchema.parse(prev);
29917
30003
  const validatedCur = sqliteSchema.parse(cur);
29918
30004
  const squashedPrev = squashSqliteScheme(validatedPrev, "push");
@@ -62599,7 +62685,7 @@ var init_studio2 = __esm({
62599
62685
  });
62600
62686
  };
62601
62687
  prepareServer = async ({
62602
- dialect: dialect7,
62688
+ dialect: dialect4,
62603
62689
  driver: driver2,
62604
62690
  proxy,
62605
62691
  customDefaults,
@@ -62649,7 +62735,7 @@ var init_studio2 = __esm({
62649
62735
  }));
62650
62736
  return c.json({
62651
62737
  version: "6",
62652
- dialect: dialect7,
62738
+ dialect: dialect4,
62653
62739
  driver: driver2,
62654
62740
  schemaFiles,
62655
62741
  customDefaults: preparedDefaults,
@@ -78956,11 +79042,11 @@ var init_pgIntrospect = __esm({
78956
79042
  });
78957
79043
 
78958
79044
  // src/introspect-sqlite.ts
78959
- var import_casing, sqliteImportsList, indexName3, objToStatement2, relations, escapeColumnKey, withCasing, dbColumnName, schemaToTypeScript, isCyclic, isSelf, mapColumnDefault, column4, createTableColumns, createTableIndexes, createTableUniques, createTablePKs, createTableFKs;
79045
+ var import_casing3, sqliteImportsList, indexName3, objToStatement2, relations, escapeColumnKey, withCasing, dbColumnName, schemaToTypeScript, isCyclic, isSelf, mapColumnDefault, column4, createTableColumns, createTableIndexes, createTableUniques, createTablePKs, createTableFKs;
78960
79046
  var init_introspect_sqlite = __esm({
78961
79047
  "src/introspect-sqlite.ts"() {
78962
79048
  "use strict";
78963
- import_casing = require("drizzle-orm/casing");
79049
+ import_casing3 = require("drizzle-orm/casing");
78964
79050
  init_utils3();
78965
79051
  init_global();
78966
79052
  sqliteImportsList = /* @__PURE__ */ new Set([
@@ -79005,7 +79091,7 @@ var init_introspect_sqlite = __esm({
79005
79091
  return "";
79006
79092
  }
79007
79093
  if (casing2 === "camel") {
79008
- return (0, import_casing.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
79094
+ return (0, import_casing3.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
79009
79095
  }
79010
79096
  assertUnreachable(casing2);
79011
79097
  };
@@ -79380,13 +79466,13 @@ var init_push = __esm({
79380
79466
  init_mysqlPushUtils();
79381
79467
  init_pgPushUtils();
79382
79468
  init_sqlitePushUtils();
79383
- mysqlPush = async (schemaPath, credentials2, tablesFilter, strict, verbose, force) => {
79469
+ mysqlPush = async (schemaPath, credentials2, tablesFilter, strict, verbose, force, casing2) => {
79384
79470
  const { connectToMySQL: connectToMySQL2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
79385
79471
  const { mysqlPushIntrospect: mysqlPushIntrospect2 } = await Promise.resolve().then(() => (init_mysqlIntrospect(), mysqlIntrospect_exports));
79386
79472
  const { db, database } = await connectToMySQL2(credentials2);
79387
79473
  const { schema: schema5 } = await mysqlPushIntrospect2(db, database, tablesFilter);
79388
79474
  const { prepareMySQLPush: prepareMySQLPush2 } = await Promise.resolve().then(() => (init_migrate(), migrate_exports));
79389
- const statements = await prepareMySQLPush2(schemaPath, schema5);
79475
+ const statements = await prepareMySQLPush2(schemaPath, schema5, casing2);
79390
79476
  const filteredStatements = filterStatements(
79391
79477
  statements.statements ?? [],
79392
79478
  statements.validatedCur,
@@ -79481,13 +79567,13 @@ var init_push = __esm({
79481
79567
  console.log(e2);
79482
79568
  }
79483
79569
  };
79484
- pgPush = async (schemaPath, verbose, strict, credentials2, tablesFilter, schemasFilter, force) => {
79570
+ pgPush = async (schemaPath, verbose, strict, credentials2, tablesFilter, schemasFilter, force, casing2) => {
79485
79571
  const { preparePostgresDB: preparePostgresDB2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
79486
79572
  const { pgPushIntrospect: pgPushIntrospect2 } = await Promise.resolve().then(() => (init_pgIntrospect(), pgIntrospect_exports));
79487
79573
  const db = await preparePostgresDB2(credentials2);
79488
79574
  const { schema: schema5 } = await pgPushIntrospect2(db, tablesFilter, schemasFilter);
79489
79575
  const { preparePgPush: preparePgPush2 } = await Promise.resolve().then(() => (init_migrate(), migrate_exports));
79490
- const statements = await preparePgPush2(schemaPath, schema5, schemasFilter);
79576
+ const statements = await preparePgPush2(schemaPath, schema5, schemasFilter, casing2);
79491
79577
  try {
79492
79578
  if (statements.sqlStatements.length === 0) {
79493
79579
  (0, import_hanji11.render)(`[${source_default.blue("i")}] No changes detected`);
@@ -79555,13 +79641,13 @@ var init_push = __esm({
79555
79641
  console.error(e2);
79556
79642
  }
79557
79643
  };
79558
- sqlitePush = async (schemaPath, verbose, strict, credentials2, tablesFilter, force) => {
79644
+ sqlitePush = async (schemaPath, verbose, strict, credentials2, tablesFilter, force, casing2) => {
79559
79645
  const { connectToSQLite: connectToSQLite2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
79560
79646
  const { sqlitePushIntrospect: sqlitePushIntrospect2 } = await Promise.resolve().then(() => (init_sqliteIntrospect(), sqliteIntrospect_exports));
79561
79647
  const db = await connectToSQLite2(credentials2);
79562
79648
  const { schema: schema5 } = await sqlitePushIntrospect2(db, tablesFilter);
79563
79649
  const { prepareSQLitePush: prepareSQLitePush2 } = await Promise.resolve().then(() => (init_migrate(), migrate_exports));
79564
- const statements = await prepareSQLitePush2(schemaPath, schema5);
79650
+ const statements = await prepareSQLitePush2(schemaPath, schema5, casing2);
79565
79651
  if (statements.sqlStatements.length === 0) {
79566
79652
  (0, import_hanji11.render)(`
79567
79653
  [${source_default.blue("i")}] No changes detected`);
@@ -79643,13 +79729,13 @@ var init_push = __esm({
79643
79729
  }
79644
79730
  }
79645
79731
  };
79646
- libSQLPush = async (schemaPath, verbose, strict, credentials2, tablesFilter, force) => {
79732
+ libSQLPush = async (schemaPath, verbose, strict, credentials2, tablesFilter, force, casing2) => {
79647
79733
  const { connectToLibSQL: connectToLibSQL2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
79648
79734
  const { sqlitePushIntrospect: sqlitePushIntrospect2 } = await Promise.resolve().then(() => (init_sqliteIntrospect(), sqliteIntrospect_exports));
79649
79735
  const db = await connectToLibSQL2(credentials2);
79650
79736
  const { schema: schema5 } = await sqlitePushIntrospect2(db, tablesFilter);
79651
79737
  const { prepareLibSQLPush: prepareLibSQLPush2 } = await Promise.resolve().then(() => (init_migrate(), migrate_exports));
79652
- const statements = await prepareLibSQLPush2(schemaPath, schema5);
79738
+ const statements = await prepareLibSQLPush2(schemaPath, schema5, casing2);
79653
79739
  if (statements.sqlStatements.length === 0) {
79654
79740
  (0, import_hanji11.render)(`
79655
79741
  [${source_default.blue("i")}] No changes detected`);
@@ -80079,11 +80165,11 @@ var require_pluralize = __commonJS({
80079
80165
  });
80080
80166
 
80081
80167
  // src/introspect-mysql.ts
80082
- var import_casing2, mysqlImportsList, objToStatement22, timeConfig, binaryConfig, importsPatch, relations2, escapeColumnKey2, prepareCasing, dbColumnName2, schemaToTypeScript2, isCyclic2, isSelf2, mapColumnDefault2, mapColumnDefaultForJson, column5, createTableColumns2, createTableIndexes2, createTableUniques2, createTablePKs2, createTableFKs2;
80168
+ var import_casing4, mysqlImportsList, objToStatement22, timeConfig, binaryConfig, importsPatch, relations2, escapeColumnKey2, prepareCasing, dbColumnName2, schemaToTypeScript2, isCyclic2, isSelf2, mapColumnDefault2, mapColumnDefaultForJson, column5, createTableColumns2, createTableIndexes2, createTableUniques2, createTablePKs2, createTableFKs2;
80083
80169
  var init_introspect_mysql = __esm({
80084
80170
  "src/introspect-mysql.ts"() {
80085
80171
  "use strict";
80086
- import_casing2 = require("drizzle-orm/casing");
80172
+ import_casing4 = require("drizzle-orm/casing");
80087
80173
  init_utils3();
80088
80174
  init_global();
80089
80175
  init_mysqlSerializer();
@@ -80172,7 +80258,7 @@ var init_introspect_mysql = __esm({
80172
80258
  return "";
80173
80259
  }
80174
80260
  if (casing2 === "camel") {
80175
- return (0, import_casing2.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
80261
+ return (0, import_casing4.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
80176
80262
  }
80177
80263
  assertUnreachable(casing2);
80178
80264
  };
@@ -80670,14 +80756,14 @@ function generateIdentityParams(identity) {
80670
80756
  }
80671
80757
  return `.generatedByDefaultAsIdentity(${paramsObj})`;
80672
80758
  }
80673
- var import_drizzle_orm9, import_relations, import_casing3, pgImportsList, timeConfig2, possibleIntervals, intervalStrToObj, intervalConfig, mapColumnDefault3, importsPatch2, relations3, escapeColumnKey3, withCasing2, dbColumnName3, paramNameFor, schemaToTypeScript3, isCyclic3, isSelf3, buildArrayDefault, mapDefault, column6, dimensionsInArray, createTableColumns3, createTableIndexes3, createTablePKs3, createTableUniques3, createTableFKs3;
80759
+ var import_drizzle_orm9, import_relations, import_casing5, pgImportsList, timeConfig2, possibleIntervals, intervalStrToObj, intervalConfig, mapColumnDefault3, importsPatch2, relations3, escapeColumnKey3, withCasing2, dbColumnName3, paramNameFor, schemaToTypeScript3, isCyclic3, isSelf3, buildArrayDefault, mapDefault, column6, dimensionsInArray, createTableColumns3, createTableIndexes3, createTablePKs3, createTableUniques3, createTableFKs3;
80674
80760
  var init_introspect_pg = __esm({
80675
80761
  "src/introspect-pg.ts"() {
80676
80762
  "use strict";
80677
80763
  import_drizzle_orm9 = require("drizzle-orm");
80678
80764
  import_relations = require("drizzle-orm/relations");
80679
80765
  init_utils3();
80680
- import_casing3 = require("drizzle-orm/casing");
80766
+ import_casing5 = require("drizzle-orm/casing");
80681
80767
  init_vector();
80682
80768
  init_global();
80683
80769
  init_pgSerializer();
@@ -80808,7 +80894,7 @@ var init_introspect_pg = __esm({
80808
80894
  return "";
80809
80895
  }
80810
80896
  if (casing2 === "camel") {
80811
- return (0, import_casing3.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
80897
+ return (0, import_casing5.toCamelCase)(name) === name ? "" : withMode ? `"${name}", ` : `"${name}"`;
80812
80898
  }
80813
80899
  assertUnreachable(casing2);
80814
80900
  };
@@ -83393,9 +83479,9 @@ init_source();
83393
83479
 
83394
83480
  // src/cli/commands/check.ts
83395
83481
  init_utils();
83396
- var checkHandler = (out, dialect7) => {
83397
- const { snapshots } = prepareOutFolder(out, dialect7);
83398
- const report = validateWithReport(snapshots, dialect7);
83482
+ var checkHandler = (out, dialect4) => {
83483
+ const { snapshots } = prepareOutFolder(out, dialect4);
83484
+ const report = validateWithReport(snapshots, dialect4);
83399
83485
  if (report.nonLatest.length > 0) {
83400
83486
  console.log(
83401
83487
  report.nonLatest.map((it) => {
@@ -84682,12 +84768,14 @@ var optionBreakpoints = boolean().desc(
84682
84768
  `Prepare SQL statements with breakpoints`
84683
84769
  );
84684
84770
  var optionDriver = string().enum(...drivers).desc("Database driver");
84771
+ var optionCasing = string().enum("camelCase", "snake_case").desc("Casing for serialization");
84685
84772
  var generate = command({
84686
84773
  name: "generate",
84687
84774
  options: {
84688
84775
  config: optionConfig,
84689
84776
  dialect: optionDialect,
84690
84777
  driver: optionDriver,
84778
+ casing: optionCasing,
84691
84779
  schema: string().desc("Path to a schema file or folder"),
84692
84780
  out: optionOut,
84693
84781
  name: string().desc("Migration file name"),
@@ -84700,7 +84788,7 @@ var generate = command({
84700
84788
  "generate",
84701
84789
  opts,
84702
84790
  ["prefix", "name", "custom"],
84703
- ["driver", "breakpoints", "schema", "out", "dialect"]
84791
+ ["driver", "breakpoints", "schema", "out", "dialect", "casing"]
84704
84792
  );
84705
84793
  return prepareGenerateConfig(opts, from);
84706
84794
  },
@@ -84713,17 +84801,17 @@ var generate = command({
84713
84801
  prepareAndMigrateSqlite: prepareAndMigrateSqlite2,
84714
84802
  prepareAndMigrateLibSQL: prepareAndMigrateLibSQL2
84715
84803
  } = await Promise.resolve().then(() => (init_migrate(), migrate_exports));
84716
- const dialect7 = opts.dialect;
84717
- if (dialect7 === "postgresql") {
84804
+ const dialect4 = opts.dialect;
84805
+ if (dialect4 === "postgresql") {
84718
84806
  await prepareAndMigratePg2(opts);
84719
- } else if (dialect7 === "mysql") {
84807
+ } else if (dialect4 === "mysql") {
84720
84808
  await prepareAndMigrateMysql2(opts);
84721
- } else if (dialect7 === "sqlite") {
84809
+ } else if (dialect4 === "sqlite") {
84722
84810
  await prepareAndMigrateSqlite2(opts);
84723
- } else if (dialect7 === "turso") {
84811
+ } else if (dialect4 === "turso") {
84724
84812
  await prepareAndMigrateLibSQL2(opts);
84725
84813
  } else {
84726
- assertUnreachable(dialect7);
84814
+ assertUnreachable(dialect4);
84727
84815
  }
84728
84816
  }
84729
84817
  });
@@ -84738,9 +84826,9 @@ var migrate = command({
84738
84826
  handler: async (opts) => {
84739
84827
  await assertOrmCoreVersion();
84740
84828
  await assertPackages("drizzle-orm");
84741
- const { dialect: dialect7, schema: schema5, table: table4, out, credentials: credentials2 } = opts;
84829
+ const { dialect: dialect4, schema: schema5, table: table4, out, credentials: credentials2 } = opts;
84742
84830
  try {
84743
- if (dialect7 === "postgresql") {
84831
+ if (dialect4 === "postgresql") {
84744
84832
  if ("driver" in credentials2) {
84745
84833
  const { driver: driver2 } = credentials2;
84746
84834
  if (driver2 === "aws-data-api") {
@@ -84771,7 +84859,7 @@ var migrate = command({
84771
84859
  migrationsSchema: schema5
84772
84860
  })
84773
84861
  );
84774
- } else if (dialect7 === "mysql") {
84862
+ } else if (dialect4 === "mysql") {
84775
84863
  const { connectToMySQL: connectToMySQL2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
84776
84864
  const { migrate: migrate2 } = await connectToMySQL2(credentials2);
84777
84865
  await (0, import_hanji13.renderWithTask)(
@@ -84782,7 +84870,7 @@ var migrate = command({
84782
84870
  migrationsSchema: schema5
84783
84871
  })
84784
84872
  );
84785
- } else if (dialect7 === "sqlite") {
84873
+ } else if (dialect4 === "sqlite") {
84786
84874
  const { connectToSQLite: connectToSQLite2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
84787
84875
  const { migrate: migrate2 } = await connectToSQLite2(credentials2);
84788
84876
  await (0, import_hanji13.renderWithTask)(
@@ -84793,7 +84881,7 @@ var migrate = command({
84793
84881
  migrationsSchema: schema5
84794
84882
  })
84795
84883
  );
84796
- } else if (dialect7 === "turso") {
84884
+ } else if (dialect4 === "turso") {
84797
84885
  const { connectToLibSQL: connectToLibSQL2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
84798
84886
  const { migrate: migrate2 } = await connectToLibSQL2(credentials2);
84799
84887
  await (0, import_hanji13.renderWithTask)(
@@ -84805,7 +84893,7 @@ var migrate = command({
84805
84893
  })
84806
84894
  );
84807
84895
  } else {
84808
- assertUnreachable(dialect7);
84896
+ assertUnreachable(dialect4);
84809
84897
  }
84810
84898
  } catch (e2) {
84811
84899
  console.error(e2);
@@ -84839,6 +84927,7 @@ var push = command({
84839
84927
  options: {
84840
84928
  config: optionConfig,
84841
84929
  dialect: optionDialect,
84930
+ casing: optionCasing,
84842
84931
  schema: string().desc("Path to a schema file or folder"),
84843
84932
  ...optionsFilters,
84844
84933
  ...optionsDatabaseCredentials,
@@ -84867,7 +84956,8 @@ var push = command({
84867
84956
  "authToken",
84868
84957
  "schemaFilters",
84869
84958
  "extensionsFilters",
84870
- "tablesFilter"
84959
+ "tablesFilter",
84960
+ "casing"
84871
84961
  ]
84872
84962
  );
84873
84963
  return preparePushConfig(opts, from);
@@ -84876,17 +84966,18 @@ var push = command({
84876
84966
  await assertPackages("drizzle-orm");
84877
84967
  await assertOrmCoreVersion();
84878
84968
  const {
84879
- dialect: dialect7,
84969
+ dialect: dialect4,
84880
84970
  schemaPath,
84881
84971
  strict,
84882
84972
  verbose,
84883
84973
  credentials: credentials2,
84884
84974
  tablesFilter,
84885
84975
  schemasFilter,
84886
- force
84976
+ force,
84977
+ casing: casing2
84887
84978
  } = config;
84888
84979
  try {
84889
- if (dialect7 === "mysql") {
84980
+ if (dialect4 === "mysql") {
84890
84981
  const { mysqlPush: mysqlPush2 } = await Promise.resolve().then(() => (init_push(), push_exports));
84891
84982
  await mysqlPush2(
84892
84983
  schemaPath,
@@ -84894,9 +84985,10 @@ var push = command({
84894
84985
  tablesFilter,
84895
84986
  strict,
84896
84987
  verbose,
84897
- force
84988
+ force,
84989
+ casing2
84898
84990
  );
84899
- } else if (dialect7 === "postgresql") {
84991
+ } else if (dialect4 === "postgresql") {
84900
84992
  if ("driver" in credentials2) {
84901
84993
  const { driver: driver2 } = credentials2;
84902
84994
  if (driver2 === "aws-data-api") {
@@ -84925,9 +85017,10 @@ var push = command({
84925
85017
  credentials2,
84926
85018
  tablesFilter,
84927
85019
  schemasFilter,
84928
- force
85020
+ force,
85021
+ casing2
84929
85022
  );
84930
- } else if (dialect7 === "sqlite") {
85023
+ } else if (dialect4 === "sqlite") {
84931
85024
  const { sqlitePush: sqlitePush2 } = await Promise.resolve().then(() => (init_push(), push_exports));
84932
85025
  await sqlitePush2(
84933
85026
  schemaPath,
@@ -84935,9 +85028,10 @@ var push = command({
84935
85028
  strict,
84936
85029
  credentials2,
84937
85030
  tablesFilter,
84938
- force
85031
+ force,
85032
+ casing2
84939
85033
  );
84940
- } else if (dialect7 === "turso") {
85034
+ } else if (dialect4 === "turso") {
84941
85035
  const { libSQLPush: libSQLPush2 } = await Promise.resolve().then(() => (init_push(), push_exports));
84942
85036
  await libSQLPush2(
84943
85037
  schemaPath,
@@ -84945,10 +85039,11 @@ var push = command({
84945
85039
  strict,
84946
85040
  credentials2,
84947
85041
  tablesFilter,
84948
- force
85042
+ force,
85043
+ casing2
84949
85044
  );
84950
85045
  } else {
84951
- assertUnreachable(dialect7);
85046
+ assertUnreachable(dialect4);
84952
85047
  }
84953
85048
  } catch (e2) {
84954
85049
  console.error(e2);
@@ -84969,8 +85064,8 @@ var check = command({
84969
85064
  },
84970
85065
  handler: async (config) => {
84971
85066
  await assertOrmCoreVersion();
84972
- const { out, dialect: dialect7 } = config;
84973
- checkHandler(out, dialect7);
85067
+ const { out, dialect: dialect4 } = config;
85068
+ checkHandler(out, dialect4);
84974
85069
  console.log("Everything's fine \u{1F436}\u{1F525}");
84975
85070
  }
84976
85071
  });
@@ -84987,15 +85082,15 @@ var up = command({
84987
85082
  },
84988
85083
  handler: async (config) => {
84989
85084
  await assertOrmCoreVersion();
84990
- const { out, dialect: dialect7 } = config;
85085
+ const { out, dialect: dialect4 } = config;
84991
85086
  await assertPackages("drizzle-orm");
84992
- if (dialect7 === "postgresql") {
85087
+ if (dialect4 === "postgresql") {
84993
85088
  upPgHandler(out);
84994
85089
  }
84995
- if (dialect7 === "mysql") {
85090
+ if (dialect4 === "mysql") {
84996
85091
  upMysqlHandler(out);
84997
85092
  }
84998
- if (dialect7 === "sqlite" || dialect7 === "turso") {
85093
+ if (dialect4 === "sqlite" || dialect4 === "turso") {
84999
85094
  upSqliteHandler(out);
85000
85095
  }
85001
85096
  }
@@ -85042,7 +85137,7 @@ var pull = command({
85042
85137
  await assertPackages("drizzle-orm");
85043
85138
  await assertOrmCoreVersion();
85044
85139
  const {
85045
- dialect: dialect7,
85140
+ dialect: dialect4,
85046
85141
  credentials: credentials2,
85047
85142
  out,
85048
85143
  casing: casing2,
@@ -85059,7 +85154,7 @@ var pull = command({
85059
85154
  );
85060
85155
  console.log();
85061
85156
  try {
85062
- if (dialect7 === "postgresql") {
85157
+ if (dialect4 === "postgresql") {
85063
85158
  if ("driver" in credentials2) {
85064
85159
  const { driver: driver2 } = credentials2;
85065
85160
  if (driver2 === "aws-data-api") {
@@ -85090,7 +85185,7 @@ var pull = command({
85090
85185
  schemasFilter,
85091
85186
  prefix2
85092
85187
  );
85093
- } else if (dialect7 === "mysql") {
85188
+ } else if (dialect4 === "mysql") {
85094
85189
  const { introspectMysql: introspectMysql2 } = await Promise.resolve().then(() => (init_introspect(), introspect_exports));
85095
85190
  await introspectMysql2(
85096
85191
  casing2,
@@ -85100,7 +85195,7 @@ var pull = command({
85100
85195
  tablesFilter,
85101
85196
  prefix2
85102
85197
  );
85103
- } else if (dialect7 === "sqlite") {
85198
+ } else if (dialect4 === "sqlite") {
85104
85199
  const { introspectSqlite: introspectSqlite2 } = await Promise.resolve().then(() => (init_introspect(), introspect_exports));
85105
85200
  await introspectSqlite2(
85106
85201
  casing2,
@@ -85110,7 +85205,7 @@ var pull = command({
85110
85205
  tablesFilter,
85111
85206
  prefix2
85112
85207
  );
85113
- } else if (dialect7 === "turso") {
85208
+ } else if (dialect4 === "turso") {
85114
85209
  const { introspectLibSQL: introspectLibSQL2 } = await Promise.resolve().then(() => (init_introspect(), introspect_exports));
85115
85210
  await introspectLibSQL2(
85116
85211
  casing2,
@@ -85121,7 +85216,7 @@ var pull = command({
85121
85216
  prefix2
85122
85217
  );
85123
85218
  } else {
85124
- assertUnreachable(dialect7);
85219
+ assertUnreachable(dialect4);
85125
85220
  }
85126
85221
  } catch (e2) {
85127
85222
  console.error(e2);
@@ -85159,7 +85254,7 @@ var studio = command({
85159
85254
  await assertPackages("drizzle-orm");
85160
85255
  assertStudioNodeVersion();
85161
85256
  const {
85162
- dialect: dialect7,
85257
+ dialect: dialect4,
85163
85258
  schema: schemaPath,
85164
85259
  port,
85165
85260
  host,
@@ -85175,7 +85270,7 @@ var studio = command({
85175
85270
  } = await Promise.resolve().then(() => (init_studio2(), studio_exports));
85176
85271
  let setup;
85177
85272
  try {
85178
- if (dialect7 === "postgresql") {
85273
+ if (dialect4 === "postgresql") {
85179
85274
  if ("driver" in credentials2) {
85180
85275
  const { driver: driver2 } = credentials2;
85181
85276
  if (driver2 === "aws-data-api") {
@@ -85198,17 +85293,17 @@ var studio = command({
85198
85293
  }
85199
85294
  const { schema: schema5, relations: relations4, files } = schemaPath ? await preparePgSchema2(schemaPath) : { schema: {}, relations: {}, files: [] };
85200
85295
  setup = await drizzleForPostgres2(credentials2, schema5, relations4, files);
85201
- } else if (dialect7 === "mysql") {
85296
+ } else if (dialect4 === "mysql") {
85202
85297
  const { schema: schema5, relations: relations4, files } = schemaPath ? await prepareMySqlSchema2(schemaPath) : { schema: {}, relations: {}, files: [] };
85203
85298
  setup = await drizzleForMySQL2(credentials2, schema5, relations4, files);
85204
- } else if (dialect7 === "sqlite") {
85299
+ } else if (dialect4 === "sqlite") {
85205
85300
  const { schema: schema5, relations: relations4, files } = schemaPath ? await prepareSQLiteSchema2(schemaPath) : { schema: {}, relations: {}, files: [] };
85206
85301
  setup = await drizzleForSQLite2(credentials2, schema5, relations4, files);
85207
- } else if (dialect7 === "turso") {
85302
+ } else if (dialect4 === "turso") {
85208
85303
  const { schema: schema5, relations: relations4, files } = schemaPath ? await prepareSQLiteSchema2(schemaPath) : { schema: {}, relations: {}, files: [] };
85209
85304
  setup = await drizzleForLibSQL(credentials2, schema5, relations4, files);
85210
85305
  } else {
85211
- assertUnreachable(dialect7);
85306
+ assertUnreachable(dialect4);
85212
85307
  }
85213
85308
  const { prepareServer: prepareServer2 } = await Promise.resolve().then(() => (init_studio2(), studio_exports));
85214
85309
  const server = await prepareServer2(setup);
@@ -85259,7 +85354,7 @@ init_utils2();
85259
85354
  var version2 = async () => {
85260
85355
  const { npmVersion } = await ormCoreVersions();
85261
85356
  const ormVersion = npmVersion ? `drizzle-orm: v${npmVersion}` : "";
85262
- const envVersion = "0.25.0-d1da3b8";
85357
+ const envVersion = "0.25.0-f5d46d3";
85263
85358
  const kitVersion = envVersion ? `v${envVersion}` : "--";
85264
85359
  const versions = `drizzle-kit: ${kitVersion}
85265
85360
  ${ormVersion}`;