durcno 1.0.0-alpha.0 → 1.0.0-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. package/README.md +11 -8
  2. package/dist/bin.cjs +84 -69
  3. package/dist/src/columns/bigint.d.mts +1 -0
  4. package/dist/src/columns/bigint.mjs +3 -0
  5. package/dist/src/columns/bigserial.d.mts +1 -0
  6. package/dist/src/columns/bigserial.mjs +3 -0
  7. package/dist/src/columns/boolean.d.mts +1 -0
  8. package/dist/src/columns/boolean.mjs +3 -0
  9. package/dist/src/columns/bytea.d.mts +1 -0
  10. package/dist/src/columns/bytea.mjs +3 -0
  11. package/dist/src/columns/char.d.mts +1 -0
  12. package/dist/src/columns/char.mjs +3 -0
  13. package/dist/src/columns/cidr.d.mts +1 -0
  14. package/dist/src/columns/cidr.mjs +3 -0
  15. package/dist/src/columns/common.d.mts +7 -1
  16. package/dist/src/columns/common.mjs +21 -7
  17. package/dist/src/columns/date.d.mts +1 -0
  18. package/dist/src/columns/date.mjs +3 -0
  19. package/dist/src/columns/enum.d.mts +1 -0
  20. package/dist/src/columns/enum.mjs +3 -0
  21. package/dist/src/columns/inet.d.mts +1 -0
  22. package/dist/src/columns/inet.mjs +3 -0
  23. package/dist/src/columns/integer.d.mts +1 -0
  24. package/dist/src/columns/integer.mjs +3 -0
  25. package/dist/src/columns/json.d.mts +1 -0
  26. package/dist/src/columns/json.mjs +3 -0
  27. package/dist/src/columns/jsonb.d.mts +1 -0
  28. package/dist/src/columns/jsonb.mjs +3 -0
  29. package/dist/src/columns/macaddr.d.mts +1 -0
  30. package/dist/src/columns/macaddr.mjs +3 -0
  31. package/dist/src/columns/numeric.d.mts +1 -0
  32. package/dist/src/columns/numeric.mjs +3 -0
  33. package/dist/src/columns/postgis/geography/linestring.d.mts +1 -0
  34. package/dist/src/columns/postgis/geography/linestring.mjs +3 -0
  35. package/dist/src/columns/postgis/geography/multilinestring.d.mts +1 -0
  36. package/dist/src/columns/postgis/geography/multilinestring.mjs +3 -0
  37. package/dist/src/columns/postgis/geography/multipoint.d.mts +1 -0
  38. package/dist/src/columns/postgis/geography/multipoint.mjs +3 -0
  39. package/dist/src/columns/postgis/geography/multipolygon.d.mts +1 -0
  40. package/dist/src/columns/postgis/geography/multipolygon.mjs +3 -0
  41. package/dist/src/columns/postgis/geography/point.d.mts +1 -0
  42. package/dist/src/columns/postgis/geography/point.mjs +3 -0
  43. package/dist/src/columns/postgis/geography/polygon.d.mts +1 -0
  44. package/dist/src/columns/postgis/geography/polygon.mjs +3 -0
  45. package/dist/src/columns/serial.d.mts +1 -0
  46. package/dist/src/columns/serial.mjs +3 -0
  47. package/dist/src/columns/smallint.d.mts +1 -0
  48. package/dist/src/columns/smallint.mjs +3 -0
  49. package/dist/src/columns/smallserial.d.mts +1 -0
  50. package/dist/src/columns/smallserial.mjs +3 -0
  51. package/dist/src/columns/text.d.mts +1 -0
  52. package/dist/src/columns/text.mjs +3 -0
  53. package/dist/src/columns/time.d.mts +1 -0
  54. package/dist/src/columns/time.mjs +3 -0
  55. package/dist/src/columns/timestamp.d.mts +1 -0
  56. package/dist/src/columns/timestamp.mjs +3 -0
  57. package/dist/src/columns/uuid.d.mts +1 -0
  58. package/dist/src/columns/uuid.mjs +3 -0
  59. package/dist/src/columns/varchar.d.mts +1 -0
  60. package/dist/src/columns/varchar.mjs +3 -0
  61. package/dist/src/connectors/bun.d.mts +3 -1
  62. package/dist/src/connectors/bun.mjs +5 -1
  63. package/dist/src/connectors/common.d.mts +2 -1
  64. package/dist/src/connectors/common.mjs +2 -1
  65. package/dist/src/connectors/pg.d.mts +3 -1
  66. package/dist/src/connectors/pg.mjs +5 -1
  67. package/dist/src/connectors/pglite.d.mts +6 -1
  68. package/dist/src/connectors/pglite.mjs +16 -7
  69. package/dist/src/connectors/postgres.d.mts +3 -1
  70. package/dist/src/connectors/postgres.mjs +8 -4
  71. package/dist/src/db.d.mts +1 -2
  72. package/dist/src/db.mjs +1 -3
  73. package/dist/src/index.d.mts +5 -5
  74. package/dist/src/index.mjs +7 -6
  75. package/dist/src/migration/ddl.d.mts +17 -0
  76. package/dist/src/migration/ddl.mjs +52 -1
  77. package/dist/src/migration/index.d.mts +2 -0
  78. package/dist/src/query-builders/insert-returning.d.mts +1 -1
  79. package/dist/src/query-builders/insert-returning.mjs +4 -26
  80. package/dist/src/query-builders/insert.d.mts +6 -6
  81. package/dist/src/query-builders/insert.mjs +25 -14
  82. package/dist/src/query-builders/pre.d.mts +3 -1
  83. package/dist/src/query-builders/pre.mjs +4 -1
  84. package/dist/src/query-builders/query.d.mts +3 -0
  85. package/dist/src/query-builders/query.mjs +9 -1
  86. package/dist/src/query-builders/rq.mjs +8 -2
  87. package/dist/src/query-builders/update.mjs +1 -5
  88. package/package.json +4 -2
package/README.md CHANGED
@@ -1,8 +1,11 @@
1
1
  <p align="center">
2
- <img src="https://img.shields.io/npm/v/durcno?style=flat&color=blue" alt="npm version" />
2
+ <a href="https://npmjs.com/package/durcno" target="_blank">
3
+ <img src="https://img.shields.io/npm/v/durcno?style=flat&logo=npm&color=339933" alt="npm version" />
4
+ </a>
5
+ <img src="https://img.shields.io/badge/Node.js-24%2B-339933?style=flat&logo=node.js&logoColor=white" alt="Node.js 24+" />
3
6
  <img src="https://img.shields.io/badge/PostgreSQL-14%2B-336791?style=flat&logo=postgresql&logoColor=white" alt="PostgreSQL 14+" />
4
- <img src="https://img.shields.io/badge/Node.js-25%2B-339933?style=flat&logo=node.js&logoColor=white" alt="Node.js" />
5
7
  <img src="https://img.shields.io/badge/License-Apache%202.0-blue?style=flat" alt="License" />
8
+ <img src="https://img.shields.io/badge/vitest--green?logo=vitest" alt="License" />
6
9
  </p>
7
10
 
8
11
  <h1 align="center">Durcno</h1>
@@ -17,7 +20,7 @@
17
20
 
18
21
  ---
19
22
 
20
- ## Features
23
+ ## Features
21
24
 
22
25
  - **🔗 Relation Mapping** — Intuitive `many`, `one`, and `fk` relations with full type inference.
23
26
  - **🦾 Robust Migrations** — Auto-generated, reversible, and squashable migrations for production applications.
@@ -25,17 +28,17 @@
25
28
  - **🔌 Multiple Drivers** — Support for `pg`, `postgres`, `bun`, and `pglite` drivers.
26
29
  - **🌍 PostGIS Support** — First-class geographic column types for spatial queries.
27
30
 
28
- ## 📦 Setup
31
+ ## Setup
29
32
 
30
33
  ```bash
31
34
  npm install durcno
32
35
  ```
33
36
 
34
37
  ```bash
35
- npx durcno init
38
+ npm exec durcno init
36
39
  ```
37
40
 
38
- ## 🚀 Getting Started
41
+ ## Getting Started
39
42
 
40
43
  Get started with Durcno by following our comprehensive documentation.
41
44
 
@@ -45,11 +48,11 @@ Get started with Durcno by following our comprehensive documentation.
45
48
  > Durcno is currently in the alpha stage.
46
49
  > Avoid using it in any critical or large project until it reaches beta. Expect bugs and breaking changes. However, your feedback is invaluable to help us shape the future of Durcno!
47
50
 
48
- ## 🤝 Contributing
51
+ ## Contributing
49
52
 
50
53
  Contributions are welcome! Please read our [Contributing Guide](CONTRIBUTING.md) before submitting PRs.
51
54
 
52
- ## 📄 License
55
+ ## License
53
56
 
54
57
  Apache License 2.0 - see [LICENSE](LICENSE) for details.
55
58
 
package/dist/bin.cjs CHANGED
@@ -8903,9 +8903,9 @@ function resolveConfigPath(argPath) {
8903
8903
  }
8904
8904
  return (0, import_node_path2.resolve)(process.cwd(), DURCNO_CONFIG_NAME);
8905
8905
  }
8906
- function getSetup(argPath) {
8906
+ async function getSetup(argPath) {
8907
8907
  const absPath = resolveConfigPath(argPath);
8908
- const mod = require(absPath);
8908
+ const mod = await import(absPath);
8909
8909
  const { default: setup } = mod;
8910
8910
  return setup;
8911
8911
  }
@@ -8914,7 +8914,7 @@ function getSetup(argPath) {
8914
8914
  var { bgGreen, dim, cyan: cyan2, yellow: yellow2, red: red2 } = source_default;
8915
8915
  async function down(m, options) {
8916
8916
  const configPath = resolveConfigPath(options.config);
8917
- const { connector, config: config2 } = getSetup(configPath);
8917
+ const { connector, config: config2 } = await getSetup(configPath);
8918
8918
  config2.pool = { ...config2.pool, max: 1 };
8919
8919
  const migrationsDir = (0, import_node_path3.resolve)(
8920
8920
  (0, import_node_path3.dirname)(configPath),
@@ -8962,13 +8962,20 @@ async function runDownMigration(migrationDirName, isFirstMigration, migrationsDi
8962
8962
  const statements = migrationModule.statements;
8963
8963
  const options = migrationModule.options ?? {};
8964
8964
  const useTransaction = options.transaction ?? true;
8965
+ const execution = options.execution ?? "joined";
8965
8966
  if (useTransaction) {
8966
8967
  await client.query("BEGIN;");
8967
8968
  }
8968
8969
  try {
8969
8970
  if (statements.length > 0) {
8970
- const sql = `${statements.map((st) => st.toSQL()).join(";\n")};`;
8971
- await client.query(sql);
8971
+ if (execution === "sequential") {
8972
+ for (const st of statements) {
8973
+ await client.query(st.toSQL());
8974
+ }
8975
+ } else {
8976
+ const sql = `${statements.map((st) => st.toSQL()).join("\n")}`;
8977
+ await client.query(sql);
8978
+ }
8972
8979
  }
8973
8980
  if (useTransaction) {
8974
8981
  await client.query("COMMIT;");
@@ -11688,8 +11695,11 @@ var Arg = class {
11688
11695
  index = 0;
11689
11696
  key = "";
11690
11697
  handler;
11691
- constructor(handler) {
11698
+ /** PostgreSQL cast type suffix (e.g. `"boolean"`, `"geography"`), or `null` if no cast needed. */
11699
+ cast = null;
11700
+ constructor(handler, cast = null) {
11692
11701
  this.handler = handler;
11702
+ this.cast = cast;
11693
11703
  }
11694
11704
  };
11695
11705
 
@@ -11795,6 +11805,14 @@ var Column = class {
11795
11805
  const suffix = this.config.dimension.map((d) => d === null ? "[]" : `[${d}]`).join("");
11796
11806
  return `${base}${suffix}`;
11797
11807
  }
11808
+ /** Returns the full PostgreSQL cast type including array dimensions, or `null` if no cast is needed. */
11809
+ get sqlCast() {
11810
+ const base = this.sqlCastScalar;
11811
+ if (base === null) return null;
11812
+ if (!this.config.dimension) return base;
11813
+ const suffix = this.config.dimension.map((d) => d === null ? "[]" : `[${d}]`).join("");
11814
+ return `${base}${suffix}`;
11815
+ }
11798
11816
  get zodType() {
11799
11817
  let schema = this.zodTypeScaler;
11800
11818
  if (!this.config.dimension) {
@@ -11845,29 +11863,33 @@ var Column = class {
11845
11863
  * Converts a JavaScript value to SQL string literal.
11846
11864
  * Handles array dimensions with ARRAY[...] syntax if configured.
11847
11865
  */
11848
- toSQL(value) {
11866
+ toSQL(value, options) {
11867
+ if (value === null) return "NULL";
11849
11868
  if (value instanceof Sql) return value.string;
11850
11869
  if (!this.config.dimension) {
11851
- return this.toSQLScalar(value);
11870
+ if (!options?.cast || !this.sqlCastScalar) return this.toSQLScalar(value);
11871
+ return `${this.toSQLScalar(value)}::${this.sqlCastScalar}`;
11852
11872
  }
11853
- return this.#toSQLArray(value, 0);
11873
+ return this.#toSQLArray(value, 0, options);
11854
11874
  }
11855
11875
  /**
11856
11876
  * Helper to recursively process multi-dimensional arrays for toSQL.
11857
11877
  */
11858
- #toSQLArray(arr, dimIndex) {
11878
+ #toSQLArray(arr, dimIndex, options) {
11859
11879
  const dimensions = this.config.dimension;
11860
11880
  if (arr.length === 0) {
11861
11881
  return "'{}'";
11862
11882
  }
11863
11883
  if (dimIndex >= dimensions.length - 1) {
11864
- const elements2 = arr.map(
11865
- (item) => this.toSQLScalar(item)
11866
- );
11884
+ const elements2 = arr.map((item) => {
11885
+ if (!options?.cast || !this.sqlCastScalar)
11886
+ return this.toSQLScalar(item);
11887
+ return `${this.toSQLScalar(item)}::${this.sqlCastScalar}`;
11888
+ });
11867
11889
  return `ARRAY[${elements2.join(", ")}]`;
11868
11890
  }
11869
11891
  const elements = arr.map(
11870
- (item) => this.#toSQLArray(item, dimIndex + 1)
11892
+ (item) => this.#toSQLArray(item, dimIndex + 1, options)
11871
11893
  );
11872
11894
  return `ARRAY[${elements.join(", ")}]`;
11873
11895
  }
@@ -12037,7 +12059,7 @@ var Column = class {
12037
12059
  * @returns an `Arg` instance with the type of this column
12038
12060
  */
12039
12061
  arg() {
12040
- return new Arg(this.toDriver.bind(this));
12062
+ return new Arg(this.toDriver.bind(this), this.sqlCast);
12041
12063
  }
12042
12064
  };
12043
12065
 
@@ -12212,7 +12234,7 @@ async function promptColumnRenames(prev, curr, renamedTables) {
12212
12234
  }
12213
12235
  async function generate(options) {
12214
12236
  const configPath = resolveConfigPath(options.config);
12215
- const { config: config2 } = getSetup(configPath);
12237
+ const { config: config2 } = await getSetup(configPath);
12216
12238
  const migrationsDir = (0, import_node_path4.resolve)(
12217
12239
  (0, import_node_path4.dirname)(configPath),
12218
12240
  config2.out || DEFAULT_MIGRATIONS_DIR
@@ -12224,7 +12246,7 @@ async function generate(options) {
12224
12246
  const ssPrevious = (0, import_migration2.createEmptySnapshot)();
12225
12247
  for (const migrationFolder of migrationFolderNames.sort()) {
12226
12248
  const upTsPath = (0, import_node_path4.resolve)(migrationsDir, migrationFolder, "up.ts");
12227
- const upModule = require(upTsPath);
12249
+ const upModule = await import(upTsPath);
12228
12250
  const statements = upModule.statements;
12229
12251
  for (const statement of statements) {
12230
12252
  statement.applyToSnapshot(ssPrevious);
@@ -12234,7 +12256,7 @@ async function generate(options) {
12234
12256
  configPath ? (0, import_node_path4.dirname)(configPath) : process.cwd(),
12235
12257
  config2.schema
12236
12258
  );
12237
- const exports2 = require(schemaFile);
12259
+ const exports2 = await import(schemaFile);
12238
12260
  ensureNoEntityCollisions(exports2);
12239
12261
  const entities = Object.values(exports2);
12240
12262
  const ssCurrent = (0, import_migration2.snapshot)(entities);
@@ -12577,6 +12599,22 @@ function generateAlterTableStmts(prevTable, currTable, tableName, curr, statemen
12577
12599
  alterStatements.push(`.dropDefault("${colName}")`);
12578
12600
  }
12579
12601
  }
12602
+ const prevRefJson = JSON.stringify(prevCol.references ?? null);
12603
+ const currRefJson = JSON.stringify(currCol.references ?? null);
12604
+ if (prevRefJson !== currRefJson) {
12605
+ if (prevCol.references) {
12606
+ const constraintName = `${currTable.name}_${colName}_fkey`;
12607
+ alterStatements.push(
12608
+ `.dropForeignKey("${constraintName}", "${colName}")`
12609
+ );
12610
+ }
12611
+ if (currCol.references) {
12612
+ const constraintName = `${currTable.name}_${colName}_fkey`;
12613
+ alterStatements.push(
12614
+ `.addForeignKey("${constraintName}", "${colName}", ${JSON.stringify(currCol.references)})`
12615
+ );
12616
+ }
12617
+ }
12580
12618
  }
12581
12619
  }
12582
12620
  for (const idxName in currTable.indexes) {
@@ -12733,11 +12771,11 @@ var CONNECTOR_PACKAGES = {
12733
12771
  bun: []
12734
12772
  // Built-in
12735
12773
  };
12736
- var CONNECTOR_CLASS_NAMES = {
12737
- pg: "PgConnector",
12738
- postgres: "PostgresConnector",
12739
- bun: "BunConnector",
12740
- pglite: "PgLiteConnector"
12774
+ var CONNECTOR_FUNCTION_NAMES = {
12775
+ pg: "pg",
12776
+ postgres: "postgres",
12777
+ bun: "bun",
12778
+ pglite: "pglite"
12741
12779
  };
12742
12780
  var LOADER_PACKAGES = {
12743
12781
  node: ["dotenv"],
@@ -12757,15 +12795,15 @@ var pm = (() => {
12757
12795
  })();
12758
12796
  function generateConfigFile(config2) {
12759
12797
  const { connector, connectionUrl, schemaPath, migrationsDir } = config2;
12760
- const className = CONNECTOR_CLASS_NAMES[connector];
12798
+ const funcName = CONNECTOR_FUNCTION_NAMES[connector];
12761
12799
  const urlValue = connectionUrl === "" ? "process.env.DATABASE_URL!" : `"${connectionUrl}"`;
12762
12800
  const envLoader = connectionUrl === "" ? runtime === "bun" ? "" : runtime === "deno" ? `import "@std/dotenv/load";
12763
12801
  ` : `import "dotenv/config";
12764
12802
  ` : "";
12765
12803
  return `${envLoader}import { defineConfig } from "durcno";
12766
- import { ${className} } from "durcno/connectors/${connector}";
12804
+ import { ${funcName} } from "durcno/connectors/${connector}";
12767
12805
 
12768
- export default defineConfig(${className}, {
12806
+ export default defineConfig(${funcName}(), {
12769
12807
  schema: "${schemaPath}",
12770
12808
  out: "${migrationsDir}",
12771
12809
  dbCredentials: {
@@ -12789,43 +12827,14 @@ export const Users = table("public", "users", {
12789
12827
  `;
12790
12828
  }
12791
12829
  function generateIndexFile(schemaPath) {
12792
- const schemaImport = schemaPath.replace(/\.ts$/, "").replace(/^db\//, "./");
12830
+ const schemaImport = schemaPath.replace(/^db\//, "./");
12793
12831
  return `import { database } from "durcno";
12794
12832
  import * as schema from "${schemaImport}";
12795
- import setup from "../durcno.config";
12833
+ import setup from "../durcno.config.ts";
12796
12834
 
12797
12835
  export const db = database(schema, setup);
12798
12836
  `;
12799
12837
  }
12800
- function setTypeModule() {
12801
- const pkgPath = (0, import_node_path5.resolve)(process.cwd(), "package.json");
12802
- if (!(0, import_node_fs3.existsSync)(pkgPath)) {
12803
- (0, import_node_fs3.writeFileSync)(pkgPath, `${JSON.stringify({ type: "module" }, null, 2)}
12804
- `);
12805
- console.log(
12806
- bold(
12807
- `${green("\u2714")} Created ${cyan4("package.json")} with "type": "module"`
12808
- )
12809
- );
12810
- return;
12811
- }
12812
- try {
12813
- const pkgContent = (0, import_node_fs3.readFileSync)(pkgPath, "utf-8");
12814
- const pkg = JSON.parse(pkgContent);
12815
- if (pkg.type !== "module") {
12816
- pkg.type = "module";
12817
- (0, import_node_fs3.writeFileSync)(pkgPath, `${JSON.stringify(pkg, null, 2)}
12818
- `);
12819
- console.log(
12820
- bold(
12821
- `${green("\u2714")} Updated ${cyan4("package.json")} with "type": "module"`
12822
- )
12823
- );
12824
- }
12825
- } catch (err) {
12826
- console.log(yellow4(`Warning: Failed to update package.json: ${err}`));
12827
- }
12828
- }
12829
12838
  async function promptConfig() {
12830
12839
  const response = await (0, import_prompts2.default)(
12831
12840
  [
@@ -12918,7 +12927,6 @@ async function init(options) {
12918
12927
  config2 = DEFAULTS;
12919
12928
  }
12920
12929
  await writeFiles(config2, options);
12921
- setTypeModule();
12922
12930
  console.log(green.bold("\n\u{1F4E6} Setup dependencies\n"));
12923
12931
  const toInstall = [];
12924
12932
  const toInstallDrivers = [];
@@ -12972,7 +12980,7 @@ Installation failed with exit code ${exitCode}`)
12972
12980
  }
12973
12981
  console.log(green.bold("\n\u2728 Durcno Setuped!\n"));
12974
12982
  console.log(dim2("Next steps:"));
12975
- const execCmd = pm === "npm" ? "npx" : pm === "pnpm" ? "pnpm exec" : pm === "bun" ? "bun x" : pm;
12983
+ const execCmd = pm === "npm" ? "npm exec" : pm === "pnpm" ? "pnpm exec" : pm === "bun" ? "bunx" : pm;
12976
12984
  const nextSteps = [
12977
12985
  ["Edit your schema in", magenta.bold(config2.schemaPath)],
12978
12986
  ["Run", cyan4.bold(`${execCmd} durcno generate`), "to create migrations"],
@@ -12993,7 +13001,7 @@ var import_durcno3 = require("durcno");
12993
13001
  var { bgGreen: bgGreen3, bgYellow, dim: dim3, gray: gray4, yellow: yellow5, green: green2, cyan: cyan5 } = source_default;
12994
13002
  async function migrate(options) {
12995
13003
  const configPath = resolveConfigPath(options.config);
12996
- const { connector, config: config2 } = getSetup(configPath);
13004
+ const { connector, config: config2 } = await getSetup(configPath);
12997
13005
  config2.pool = { ...config2.pool, max: 1 };
12998
13006
  const migrationsDir = (0, import_node_path6.resolve)(
12999
13007
  (0, import_node_path6.dirname)(configPath),
@@ -13052,13 +13060,20 @@ async function runUpMigration(migrationDirName, migrationsDir, client, setup) {
13052
13060
  const statements = migrationModule.statements;
13053
13061
  const options = migrationModule.options ?? {};
13054
13062
  const useTransaction = options.transaction ?? true;
13063
+ const execution = options.execution ?? "joined";
13055
13064
  if (useTransaction) {
13056
13065
  await client.query("BEGIN;");
13057
13066
  }
13058
13067
  try {
13059
13068
  if (statements.length > 0) {
13060
- const sql = `${statements.map((st) => st.toSQL()).join(";\n")};`;
13061
- await client.query(sql);
13069
+ if (execution === "sequential") {
13070
+ for (const st of statements) {
13071
+ await client.query(st.toSQL());
13072
+ }
13073
+ } else {
13074
+ const sql = `${statements.map((st) => st.toSQL()).join("\n")}`;
13075
+ await client.query(sql);
13076
+ }
13062
13077
  }
13063
13078
  if (useTransaction) {
13064
13079
  await client.query("COMMIT;");
@@ -13091,7 +13106,7 @@ var import_node_readline = require("node:readline");
13091
13106
  var { cyan: cyan6, green: green3, red: red4, yellow: yellow6, gray: gray5, bgCyan, bold: bold2 } = source_default;
13092
13107
  async function shell(options) {
13093
13108
  const configPath = resolveConfigPath(options.config);
13094
- const { connector } = getSetup(configPath);
13109
+ const { connector } = await getSetup(configPath);
13095
13110
  const client = connector.getClient();
13096
13111
  console.log(gray5("Connecting to database..."));
13097
13112
  await client.connect();
@@ -13337,7 +13352,7 @@ var import_migration3 = require("durcno/migration");
13337
13352
  var { bgGreen: bgGreen4, bgRed: bgRed2, yellow: yellow7, red: red5, green: green4, cyan: cyan7, gray: gray6 } = source_default;
13338
13353
  async function squash(start, end, options) {
13339
13354
  const configPath = resolveConfigPath(options.config);
13340
- const { config: config2 } = getSetup(configPath);
13355
+ const { config: config2 } = await getSetup(configPath);
13341
13356
  const migrationsDir = (0, import_node_path7.resolve)(
13342
13357
  (0, import_node_path7.dirname)(configPath),
13343
13358
  config2.out || DEFAULT_MIGRATIONS_DIR
@@ -13375,7 +13390,7 @@ async function squash(start, end, options) {
13375
13390
  let hasCustomStatements = false;
13376
13391
  for (const migrationDirName of range) {
13377
13392
  const upPath = (0, import_node_path7.join)(migrationsDir, migrationDirName, "up.ts");
13378
- const upModule = require(upPath);
13393
+ const upModule = await import(upPath);
13379
13394
  const statements = upModule.statements;
13380
13395
  if (statements.some((st) => st.isCustom)) {
13381
13396
  hasCustomStatements = true;
@@ -13393,7 +13408,7 @@ async function squash(start, end, options) {
13393
13408
  const beforeSnapshot = (0, import_migration3.createEmptySnapshot)();
13394
13409
  for (const migrationDirName of before) {
13395
13410
  const upPath = (0, import_node_path7.join)(migrationsDir, migrationDirName, "up.ts");
13396
- const upModule = require(upPath);
13411
+ const upModule = await import(upPath);
13397
13412
  const statements = upModule.statements;
13398
13413
  for (const statement of statements) {
13399
13414
  statement.applyToSnapshot(beforeSnapshot);
@@ -13402,7 +13417,7 @@ async function squash(start, end, options) {
13402
13417
  const afterRangeSnapshot = (0, import_migration3.createEmptySnapshot)();
13403
13418
  for (const migrationDirName of [...before, ...range]) {
13404
13419
  const upPath = (0, import_node_path7.join)(migrationsDir, migrationDirName, "up.ts");
13405
- const upModule = require(upPath);
13420
+ const upModule = await import(upPath);
13406
13421
  const statements = upModule.statements;
13407
13422
  for (const statement of statements) {
13408
13423
  statement.applyToSnapshot(afterRangeSnapshot);
@@ -13444,7 +13459,7 @@ var import_durcno4 = require("durcno");
13444
13459
  var { dim: dim4, cyan: cyan8, yellow: yellow8, green: green5 } = source_default;
13445
13460
  async function status(options) {
13446
13461
  const configPath = resolveConfigPath(options.config);
13447
- const { connector, config: config2 } = getSetup(configPath);
13462
+ const { connector, config: config2 } = await getSetup(configPath);
13448
13463
  config2.pool = { ...config2.pool, max: 1 };
13449
13464
  const migrationsDir = (0, import_node_path8.resolve)(
13450
13465
  (0, import_node_path8.dirname)(configPath),
@@ -13489,7 +13504,7 @@ async function status(options) {
13489
13504
  }
13490
13505
 
13491
13506
  // src/cli/index.ts
13492
- program.version("1.0.0-alpha.0");
13507
+ program.version("1.0.0-alpha.1");
13493
13508
  var Options = {
13494
13509
  config: ["--config <path>", "Path to the config file"]
13495
13510
  };
@@ -8,6 +8,7 @@ type BigintConfig = ColumnConfig;
8
8
  declare class BigintColumn<TConfig extends BigintConfig> extends Column<TConfig, BigintValType> {
9
9
  static readonly id = "Column.Bigint";
10
10
  get sqlTypeScalar(): string;
11
+ get sqlCastScalar(): null;
11
12
  get zodTypeScaler(): z.ZodCoercedNumber<unknown>;
12
13
  toDriverScalar(value: BigintValType | Sql | null): string | number | null;
13
14
  toSQLScalar(value: number | Sql | null): string;
@@ -7,6 +7,9 @@ var BigintColumn = class extends Column {
7
7
  get sqlTypeScalar() {
8
8
  return "bigint";
9
9
  }
10
+ get sqlCastScalar() {
11
+ return null;
12
+ }
10
13
  get zodTypeScaler() {
11
14
  return z.coerce.number();
12
15
  }
@@ -13,6 +13,7 @@ declare class BigserialColumn<TConfig extends BigserialConfig> extends Column<Bi
13
13
  static readonly id = "Column.Bigserial";
14
14
  constructor(config: TConfig);
15
15
  get sqlTypeScalar(): string;
16
+ get sqlCastScalar(): null;
16
17
  get zodTypeScaler(): z.ZodCoercedNumber<unknown>;
17
18
  toDriverScalar(value: BigserialValType | Sql | null): string | number | null;
18
19
  toSQLScalar(value: number | Sql | null): string;
@@ -10,6 +10,9 @@ var BigserialColumn = class extends Column {
10
10
  get sqlTypeScalar() {
11
11
  return "bigserial";
12
12
  }
13
+ get sqlCastScalar() {
14
+ return null;
15
+ }
13
16
  get zodTypeScaler() {
14
17
  return z.coerce.number();
15
18
  }
@@ -8,6 +8,7 @@ type BooleanConfig = ColumnConfig;
8
8
  declare class BooleanColumn<TConfig extends BooleanConfig> extends Column<TConfig, BooleanValType> {
9
9
  static readonly id = "Column.Boolean";
10
10
  get sqlTypeScalar(): string;
11
+ get sqlCastScalar(): string;
11
12
  get zodTypeScaler(): z.ZodBoolean;
12
13
  toDriverScalar(value: BooleanValType | Sql | null): string | null;
13
14
  toSQLScalar(value: boolean | Sql | null): string;
@@ -7,6 +7,9 @@ var BooleanColumn = class extends Column {
7
7
  get sqlTypeScalar() {
8
8
  return "boolean";
9
9
  }
10
+ get sqlCastScalar() {
11
+ return "boolean";
12
+ }
10
13
  get zodTypeScaler() {
11
14
  return z.boolean();
12
15
  }
@@ -8,6 +8,7 @@ type ByteaConfig = ColumnConfig;
8
8
  declare class ByteaColumn<TConfig extends ByteaConfig> extends Column<TConfig, ByteaValType> {
9
9
  static readonly id = "Column.Bytea";
10
10
  get sqlTypeScalar(): string;
11
+ get sqlCastScalar(): string;
11
12
  get zodTypeScaler(): z.ZodCustom<Buffer<ArrayBufferLike>, Buffer<ArrayBufferLike>>;
12
13
  toDriverScalar(value: ByteaValType | Sql | null): string | null;
13
14
  toSQLScalar(value: ByteaValType | Sql | null): string;
@@ -7,6 +7,9 @@ var ByteaColumn = class extends Column {
7
7
  get sqlTypeScalar() {
8
8
  return "bytea";
9
9
  }
10
+ get sqlCastScalar() {
11
+ return "bytea";
12
+ }
10
13
  get zodTypeScaler() {
11
14
  return z.instanceof(Buffer);
12
15
  }
@@ -12,6 +12,7 @@ declare class CharColumn<TConfig extends CharConfig> extends Column<TConfig, Cha
12
12
  static readonly id = "Column.Char";
13
13
  constructor(config: TConfig);
14
14
  get sqlTypeScalar(): string;
15
+ get sqlCastScalar(): null;
15
16
  get zodTypeScaler(): z.ZodString;
16
17
  toDriverScalar(value: CharValType | Sql | null): string | null;
17
18
  toSQLScalar(value: string | Sql | null): string;
@@ -12,6 +12,9 @@ var CharColumn = class extends Column {
12
12
  get sqlTypeScalar() {
13
13
  return `char(${this.#length})`;
14
14
  }
15
+ get sqlCastScalar() {
16
+ return null;
17
+ }
15
18
  get zodTypeScaler() {
16
19
  return z.string().length(this.#length);
17
20
  }
@@ -8,6 +8,7 @@ type CidrConfig = ColumnConfig;
8
8
  declare class CidrColumn<TConfig extends CidrConfig> extends Column<TConfig, CidrValType> {
9
9
  static readonly id = "Column.Cidr";
10
10
  get sqlTypeScalar(): string;
11
+ get sqlCastScalar(): string;
11
12
  get zodTypeScaler(): z.ZodUnion<readonly [z.ZodCIDRv4, z.ZodCIDRv6]>;
12
13
  toDriverScalar(value: CidrValType | Sql | null): string | null;
13
14
  toSQLScalar(value: string | Sql | null): string;
@@ -7,6 +7,9 @@ var CidrColumn = class extends Column {
7
7
  get sqlTypeScalar() {
8
8
  return "cidr";
9
9
  }
10
+ get sqlCastScalar() {
11
+ return "cidr";
12
+ }
10
13
  get zodTypeScaler() {
11
14
  return z.union([z.cidrv4(), z.cidrv6()]);
12
15
  }
@@ -158,6 +158,10 @@ declare abstract class Column<TConfig extends ColumnConfig, TColVal> {
158
158
  } ? true : false;
159
159
  abstract get sqlTypeScalar(): string;
160
160
  get sqlType(): string;
161
+ /** Returns the PostgreSQL cast type for this column's scalar value, or `null` if no cast is needed. */
162
+ abstract get sqlCastScalar(): string | null;
163
+ /** Returns the full PostgreSQL cast type including array dimensions, or `null` if no cast is needed. */
164
+ get sqlCast(): string | null;
161
165
  abstract get zodTypeScaler(): z.ZodType;
162
166
  get zodType(): GetZodTypeArray<this["zodTypeScaler"], this["config"]>;
163
167
  /**
@@ -177,7 +181,9 @@ declare abstract class Column<TConfig extends ColumnConfig, TColVal> {
177
181
  * Converts a JavaScript value to SQL string literal.
178
182
  * Handles array dimensions with ARRAY[...] syntax if configured.
179
183
  */
180
- toSQL(value: this["ValType"] | Sql | null): string;
184
+ toSQL(value: this["ValType"] | Sql | null, options?: {
185
+ cast?: boolean;
186
+ }): string;
181
187
  /**
182
188
  * Converts a PostgreSQL result back to JavaScript value.
183
189
  * Handles array dimensions if configured, otherwise delegates to scalar implementation.
@@ -77,6 +77,13 @@ var Column = class {
77
77
  if (!this.config.dimension) return base;
78
78
  return `${base}${this.config.dimension.map((d) => d === null ? "[]" : `[${d}]`).join("")}`;
79
79
  }
80
+ /** Returns the full PostgreSQL cast type including array dimensions, or `null` if no cast is needed. */
81
+ get sqlCast() {
82
+ const base = this.sqlCastScalar;
83
+ if (base === null) return null;
84
+ if (!this.config.dimension) return base;
85
+ return `${base}${this.config.dimension.map((d) => d === null ? "[]" : `[${d}]`).join("")}`;
86
+ }
80
87
  get zodType() {
81
88
  let schema = this.zodTypeScaler;
82
89
  if (!this.config.dimension) return schema;
@@ -112,19 +119,26 @@ var Column = class {
112
119
  * Converts a JavaScript value to SQL string literal.
113
120
  * Handles array dimensions with ARRAY[...] syntax if configured.
114
121
  */
115
- toSQL(value) {
122
+ toSQL(value, options) {
123
+ if (value === null) return "NULL";
116
124
  if (value instanceof Sql) return value.string;
117
- if (!this.config.dimension) return this.toSQLScalar(value);
118
- return this.#toSQLArray(value, 0);
125
+ if (!this.config.dimension) {
126
+ if (!options?.cast || !this.sqlCastScalar) return this.toSQLScalar(value);
127
+ return `${this.toSQLScalar(value)}::${this.sqlCastScalar}`;
128
+ }
129
+ return this.#toSQLArray(value, 0, options);
119
130
  }
120
131
  /**
121
132
  * Helper to recursively process multi-dimensional arrays for toSQL.
122
133
  */
123
- #toSQLArray(arr, dimIndex) {
134
+ #toSQLArray(arr, dimIndex, options) {
124
135
  const dimensions = this.config.dimension;
125
136
  if (arr.length === 0) return "'{}'";
126
- if (dimIndex >= dimensions.length - 1) return `ARRAY[${arr.map((item) => this.toSQLScalar(item)).join(", ")}]`;
127
- return `ARRAY[${arr.map((item) => this.#toSQLArray(item, dimIndex + 1)).join(", ")}]`;
137
+ if (dimIndex >= dimensions.length - 1) return `ARRAY[${arr.map((item) => {
138
+ if (!options?.cast || !this.sqlCastScalar) return this.toSQLScalar(item);
139
+ return `${this.toSQLScalar(item)}::${this.sqlCastScalar}`;
140
+ }).join(", ")}]`;
141
+ return `ARRAY[${arr.map((item) => this.#toSQLArray(item, dimIndex + 1, options)).join(", ")}]`;
128
142
  }
129
143
  /**
130
144
  * Converts a PostgreSQL result back to JavaScript value.
@@ -269,7 +283,7 @@ var Column = class {
269
283
  * @returns an `Arg` instance with the type of this column
270
284
  */
271
285
  arg() {
272
- return new Arg(this.toDriver.bind(this));
286
+ return new Arg(this.toDriver.bind(this), this.sqlCast);
273
287
  }
274
288
  };
275
289
  //#endregion
@@ -8,6 +8,7 @@ type DateConfig = ColumnConfig;
8
8
  declare class DateColumn<TConfig extends DateConfig> extends Column<TConfig, DateValType> {
9
9
  static readonly id = "Column.Date";
10
10
  get sqlTypeScalar(): string;
11
+ get sqlCastScalar(): string;
11
12
  get zodTypeScaler(): z.ZodDate;
12
13
  toDriverScalar(value: DateValType | Sql | null): string | null;
13
14
  toSQLScalar(value: Date | Sql | null): string;
@@ -7,6 +7,9 @@ var DateColumn = class extends Column {
7
7
  get sqlTypeScalar() {
8
8
  return "date";
9
9
  }
10
+ get sqlCastScalar() {
11
+ return "date";
12
+ }
10
13
  get zodTypeScaler() {
11
14
  return z.date();
12
15
  }
@@ -10,6 +10,7 @@ declare class EnumedColumn<TValue extends string, TConfig extends EnumedConfig>
10
10
  static readonly id = "Column.Enumed";
11
11
  constructor(enm: Enum<TValue>, config: TConfig);
12
12
  get sqlTypeScalar(): string;
13
+ get sqlCastScalar(): string;
13
14
  get zodTypeScaler(): z.ZodEnum<{ [k_1 in TValue]: k_1 } extends infer T ? { [k in keyof T]: T[k] } : never>;
14
15
  toDriverScalar(value: TValue | Sql | null): string | null;
15
16
  toSQLScalar(value: TValue | Sql | null): string;
@@ -12,6 +12,9 @@ var EnumedColumn = class extends Column {
12
12
  get sqlTypeScalar() {
13
13
  return `"${this.#enum.schema}"."${this.#enum.name}"`;
14
14
  }
15
+ get sqlCastScalar() {
16
+ return this.sqlTypeScalar;
17
+ }
15
18
  get zodTypeScaler() {
16
19
  return z.enum(this.#enum.values);
17
20
  }