durcno 1.0.0-alpha.2 → 1.0.0-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. package/README.md +3 -5
  2. package/dist/bin.cjs +69 -65
  3. package/dist/src/columns/enum.d.mts +1 -1
  4. package/dist/src/connectors/bun.d.mts +2 -2
  5. package/dist/src/connectors/bun.mjs +15 -15
  6. package/dist/src/connectors/common.d.mts +104 -25
  7. package/dist/src/connectors/common.mjs +93 -28
  8. package/dist/src/connectors/pg.d.mts +3 -2
  9. package/dist/src/connectors/pg.mjs +18 -15
  10. package/dist/src/connectors/pglite.d.mts +12 -4
  11. package/dist/src/connectors/pglite.mjs +27 -19
  12. package/dist/src/connectors/postgres.d.mts +3 -2
  13. package/dist/src/connectors/postgres.mjs +17 -14
  14. package/dist/src/db.d.mts +6 -7
  15. package/dist/src/db.mjs +30 -39
  16. package/dist/src/index.d.mts +26 -51
  17. package/dist/src/index.mjs +14 -15
  18. package/dist/src/logger.d.mts +32 -0
  19. package/dist/src/logger.mjs +57 -0
  20. package/dist/src/migration/ddl/enum.d.mts +100 -0
  21. package/dist/src/migration/ddl/enum.mjs +138 -0
  22. package/dist/src/migration/ddl/index.d.mts +211 -0
  23. package/dist/src/migration/ddl/index.mjs +81 -0
  24. package/dist/src/migration/ddl/indexes.d.mts +110 -0
  25. package/dist/src/migration/ddl/indexes.mjs +151 -0
  26. package/dist/src/migration/ddl/schema.d.mts +56 -0
  27. package/dist/src/migration/ddl/schema.mjs +62 -0
  28. package/dist/src/migration/ddl/sequence.d.mts +77 -0
  29. package/dist/src/migration/ddl/sequence.mjs +86 -0
  30. package/dist/src/migration/{statement.d.mts → ddl/statement.d.mts} +13 -10
  31. package/dist/src/migration/{statement.mjs → ddl/statement.mjs} +4 -4
  32. package/dist/src/migration/ddl/table.d.mts +305 -0
  33. package/dist/src/migration/{ddl.mjs → ddl/table.mjs} +8 -493
  34. package/dist/src/migration/ddl/types.d.mts +117 -0
  35. package/dist/src/migration/ddl/types.mjs +187 -0
  36. package/dist/src/migration/index.d.mts +2 -2
  37. package/dist/src/migration/index.mjs +2 -2
  38. package/dist/src/migration/snapshot.d.mts +1 -1
  39. package/dist/src/models.d.mts +1 -1
  40. package/dist/src/query-builders/aggregates.d.mts +1 -2
  41. package/dist/src/query-builders/aggregates.mjs +2 -4
  42. package/dist/src/query-builders/count.d.mts +1 -2
  43. package/dist/src/query-builders/count.mjs +2 -4
  44. package/dist/src/query-builders/delete.d.mts +1 -2
  45. package/dist/src/query-builders/delete.mjs +4 -6
  46. package/dist/src/query-builders/distinct.d.mts +1 -2
  47. package/dist/src/query-builders/distinct.mjs +2 -4
  48. package/dist/src/query-builders/exists.d.mts +1 -2
  49. package/dist/src/query-builders/exists.mjs +2 -4
  50. package/dist/src/query-builders/first.d.mts +1 -2
  51. package/dist/src/query-builders/first.mjs +2 -4
  52. package/dist/src/query-builders/insert-returning.d.mts +2 -3
  53. package/dist/src/query-builders/insert-returning.mjs +3 -5
  54. package/dist/src/query-builders/insert.d.mts +2 -3
  55. package/dist/src/query-builders/insert.mjs +5 -9
  56. package/dist/src/query-builders/pre.d.mts +2 -4
  57. package/dist/src/query-builders/pre.mjs +3 -5
  58. package/dist/src/query-builders/query.d.mts +6 -4
  59. package/dist/src/query-builders/query.mjs +5 -0
  60. package/dist/src/query-builders/rq.d.mts +2 -3
  61. package/dist/src/query-builders/rq.mjs +10 -15
  62. package/dist/src/query-builders/select.d.mts +2 -3
  63. package/dist/src/query-builders/select.mjs +10 -14
  64. package/dist/src/query-builders/update.d.mts +2 -3
  65. package/dist/src/query-builders/update.mjs +6 -10
  66. package/dist/src/table.d.mts +2 -4
  67. package/dist/src/wkx/binarywriter.mjs +103 -108
  68. package/dist/src/wkx/geometry.mjs +88 -95
  69. package/dist/src/wkx/geometrycollection.mjs +3 -6
  70. package/dist/src/wkx/index.mjs +1 -4
  71. package/dist/src/wkx/linestring.mjs +4 -8
  72. package/dist/src/wkx/multilinestring.mjs +4 -8
  73. package/dist/src/wkx/multipoint.mjs +4 -8
  74. package/dist/src/wkx/multipolygon.mjs +4 -8
  75. package/dist/src/wkx/parser.mjs +4 -8
  76. package/dist/src/wkx/point.mjs +147 -157
  77. package/dist/src/wkx/polygon.mjs +4 -8
  78. package/dist/src/wkx/types.mjs +30 -35
  79. package/dist/src/wkx/wktparser.mjs +2 -5
  80. package/dist/src/wkx/zigzag.mjs +5 -10
  81. package/package.json +5 -2
  82. package/dist/src/_virtual/_rolldown/runtime.mjs +0 -28
  83. package/dist/src/cli/helpers.mjs +0 -16
  84. package/dist/src/migration/ddl.d.mts +0 -764
package/README.md CHANGED
@@ -1,8 +1,6 @@
1
1
  <p align="center">
2
- <a href="https://npmjs.com/package/durcno" target="_blank">
3
- <img src="https://img.shields.io/npm/v/durcno?style=flat&logo=npm&color=339933" alt="npm version" />
4
- </a>
5
- <img src="https://img.shields.io/badge/Node.js-24%2B-339933?style=flat&logo=node.js&logoColor=white" alt="Node.js 24+" />
2
+ <img alt="NPM Downloads" src="https://img.shields.io/npm/dw/durcno" alt="Downloads" />
3
+ <img src="https://img.shields.io/badge/Node.js-24%2B-339935?style=flat&logo=node.js&logoColor=white" alt="Node.js 24+" />
6
4
  <img src="https://img.shields.io/badge/PostgreSQL-14%2B-336791?style=flat&logo=postgresql&logoColor=white" alt="PostgreSQL 14+" />
7
5
  <img src="https://img.shields.io/badge/License-Apache%202.0-blue?style=flat" alt="License" />
8
6
  <img src="https://img.shields.io/badge/vitest--green?logo=vitest" alt="License" />
@@ -31,7 +29,7 @@
31
29
  ## Setup
32
30
 
33
31
  ```bash
34
- npm install durcno
32
+ npm add durcno@alpha
35
33
  ```
36
34
 
37
35
  ```bash
package/dist/bin.cjs CHANGED
@@ -8903,19 +8903,17 @@ function resolveConfigPath(argPath) {
8903
8903
  }
8904
8904
  return (0, import_node_path2.resolve)(process.cwd(), DURCNO_CONFIG_NAME);
8905
8905
  }
8906
- async function getSetup(argPath) {
8907
- const absPath = resolveConfigPath(argPath);
8906
+ async function loadConfig(absPath) {
8908
8907
  const mod = await import(absPath);
8909
- const { default: setup } = mod;
8910
- return setup;
8908
+ return mod.default;
8911
8909
  }
8912
8910
 
8913
8911
  // src/cli/commands/down.ts
8914
8912
  var { bgGreen, dim, cyan: cyan2, yellow: yellow2, red: red2 } = source_default;
8915
8913
  async function down(m, options) {
8916
8914
  const configPath = resolveConfigPath(options.config);
8917
- const { connector, config: config2 } = await getSetup(configPath);
8918
- config2.pool = { ...config2.pool, max: 1 };
8915
+ const config2 = await loadConfig(configPath);
8916
+ const { connector } = config2;
8919
8917
  const migrationsDir = (0, import_node_path3.resolve)(
8920
8918
  (0, import_node_path3.dirname)(configPath),
8921
8919
  config2.out || DEFAULT_MIGRATIONS_DIR
@@ -8925,7 +8923,7 @@ async function down(m, options) {
8925
8923
  const client = connector.getClient();
8926
8924
  await client.connect();
8927
8925
  if (await migrationsTableExists(client)) {
8928
- const db = (0, import_durcno2.database)({ Migrations: import_durcno2.Migrations }, { connector, config: config2 });
8926
+ const db = (0, import_durcno2.database)({ Migrations: import_durcno2.Migrations }, config2);
8929
8927
  const migrations = await db.from(import_durcno2.Migrations).select();
8930
8928
  const migrationDirsReversed = migrationDirNames.sort().reverse();
8931
8929
  for (let i = 0; i < migrationDirsReversed.length; i++) {
@@ -8939,7 +8937,7 @@ async function down(m, options) {
8939
8937
  migrationDirName,
8940
8938
  isFirstMigration,
8941
8939
  migrationsDir,
8942
- { connector, config: config2 },
8940
+ config2,
8943
8941
  client
8944
8942
  );
8945
8943
  if (migration.name === m) {
@@ -8954,7 +8952,7 @@ async function down(m, options) {
8954
8952
  await client.close();
8955
8953
  process.exit(0);
8956
8954
  }
8957
- async function runDownMigration(migrationDirName, isFirstMigration, migrationsDirPath, setup, client) {
8955
+ async function runDownMigration(migrationDirName, isFirstMigration, migrationsDirPath, config2, client) {
8958
8956
  const migrationName = (0, import_node_path3.basename)(migrationDirName);
8959
8957
  const downPath = (0, import_node_path3.join)(migrationsDirPath, migrationName, "down.ts");
8960
8958
  try {
@@ -8963,31 +8961,32 @@ async function runDownMigration(migrationDirName, isFirstMigration, migrationsDi
8963
8961
  const options = migrationModule.options ?? {};
8964
8962
  const useTransaction = options.transaction ?? true;
8965
8963
  const execution = options.execution ?? "joined";
8966
- if (useTransaction) {
8967
- await client.query("BEGIN;");
8968
- }
8969
8964
  try {
8970
8965
  if (statements.length > 0) {
8971
8966
  if (execution === "sequential") {
8967
+ if (useTransaction) await client.query("BEGIN;");
8972
8968
  for (const st of statements) {
8973
8969
  await client.query(st.toSQL());
8974
8970
  }
8971
+ if (useTransaction) await client.query("COMMIT;");
8975
8972
  } else {
8976
- const sql = `${statements.map((st) => st.toSQL()).join("\n")}`;
8973
+ let sql = "";
8974
+ if (useTransaction) sql += "BEGIN;\n";
8975
+ sql += statements.map((st) => st.toSQL()).join("\n");
8976
+ if (useTransaction) sql += "\nCOMMIT;";
8977
8977
  await client.query(sql);
8978
8978
  }
8979
8979
  }
8980
- if (useTransaction) {
8981
- await client.query("COMMIT;");
8982
- }
8983
8980
  } catch (e) {
8984
- if (useTransaction) {
8981
+ if (useTransaction && execution === "sequential") {
8985
8982
  await client.query("ROLLBACK;");
8986
8983
  }
8987
8984
  throw e;
8988
8985
  }
8989
8986
  if (!isFirstMigration) {
8990
- const db = (0, import_durcno2.database)({ Migrations: import_durcno2.Migrations }, setup);
8987
+ config2.connector.pool = { ...config2.connector.pool, max: 1 };
8988
+ config2.connector.logger = void 0;
8989
+ const db = (0, import_durcno2.database)({ Migrations: import_durcno2.Migrations }, config2);
8991
8990
  await db.delete(import_durcno2.Migrations).where((0, import_durcno2.eq)(import_durcno2.Migrations.name, migrationName));
8992
8991
  await db.close();
8993
8992
  }
@@ -12234,7 +12233,7 @@ async function promptColumnRenames(prev, curr, renamedTables) {
12234
12233
  }
12235
12234
  async function generate(options) {
12236
12235
  const configPath = resolveConfigPath(options.config);
12237
- const { config: config2 } = await getSetup(configPath);
12236
+ const config2 = await loadConfig(configPath);
12238
12237
  const migrationsDir = (0, import_node_path4.resolve)(
12239
12238
  (0, import_node_path4.dirname)(configPath),
12240
12239
  config2.out || DEFAULT_MIGRATIONS_DIR
@@ -12275,12 +12274,14 @@ async function generate(options) {
12275
12274
  ssCurrent,
12276
12275
  renamedTables
12277
12276
  );
12277
+ const connectorMigrationOpts = config2.connector.constructor.migrationOptions;
12278
12278
  const migrationUpTs = generateMigration(
12279
12279
  ssPrevious,
12280
12280
  ssCurrent,
12281
12281
  "up",
12282
12282
  renamedTables,
12283
- renamedColumns
12283
+ renamedColumns,
12284
+ connectorMigrationOpts
12284
12285
  );
12285
12286
  const reverseRenamedTables = {};
12286
12287
  for (const [oldKey, newKey] of Object.entries(renamedTables)) {
@@ -12299,7 +12300,8 @@ async function generate(options) {
12299
12300
  ssPrevious,
12300
12301
  "down",
12301
12302
  reverseRenamedTables,
12302
- reverseRenamedColumns
12303
+ reverseRenamedColumns,
12304
+ connectorMigrationOpts
12303
12305
  );
12304
12306
  if (migrationUpTs === null) {
12305
12307
  console.log(yellow3("No changes detected. Skipping migration creation."));
@@ -12311,14 +12313,14 @@ async function generate(options) {
12311
12313
  (0, import_node_fs2.writeFileSync)((0, import_node_path4.resolve)(migrationDir, "up.ts"), migrationUpTs);
12312
12314
  (0, import_node_fs2.writeFileSync)(
12313
12315
  (0, import_node_path4.resolve)(migrationDir, "down.ts"),
12314
- migrationDnTs ?? generateNoOpMigration()
12316
+ migrationDnTs ?? generateNoOpMigration(connectorMigrationOpts)
12315
12317
  );
12316
12318
  const migrationsRelativePath = (0, import_node_path4.relative)(process.cwd(), migrationsDir);
12317
12319
  console.log(
12318
12320
  `${bgGreen2.white.bold("[CREATED]")} ${cyan3(migrationName)} at ${cyan3(`${migrationsRelativePath}/`)}`
12319
12321
  );
12320
12322
  }
12321
- function generateMigration(prev, curr, direction, renamedTables = {}, renamedColumns = {}) {
12323
+ function generateMigration(prev, curr, direction, renamedTables = {}, renamedColumns = {}, defaultOptions) {
12322
12324
  const statements = [];
12323
12325
  const renamedFromKeys = new Set(Object.keys(renamedTables));
12324
12326
  const renamedToKeys = new Set(Object.values(renamedTables));
@@ -12351,7 +12353,7 @@ function generateMigration(prev, curr, direction, renamedTables = {}, renamedCol
12351
12353
  for (const enumName in prev.enums) {
12352
12354
  if (!(enumName in curr.enums)) {
12353
12355
  const enm = prev.enums[enumName];
12354
- statements.push(`ddl.dropEnum("${enm.schema}", "${enm.name}")`);
12356
+ statements.push(`ddl.dropType("${enm.schema}", "${enm.name}")`);
12355
12357
  }
12356
12358
  }
12357
12359
  for (const enumName in curr.enums) {
@@ -12359,7 +12361,7 @@ function generateMigration(prev, curr, direction, renamedTables = {}, renamedCol
12359
12361
  const enm = curr.enums[enumName];
12360
12362
  const values = enm.values.map((v) => `"${v}"`).join(", ");
12361
12363
  statements.push(
12362
- `ddl.createEnum("${enm.schema}", "${enm.name}", [${values}])`
12364
+ `ddl.createType("${enm.schema}", "${enm.name}", { asEnum: [${values}] })`
12363
12365
  );
12364
12366
  } else {
12365
12367
  const prevValues = prev.enums[enumName].values;
@@ -12417,15 +12419,15 @@ function generateMigration(prev, curr, direction, renamedTables = {}, renamedCol
12417
12419
  }
12418
12420
  if (afterValue !== null) {
12419
12421
  statements.push(
12420
- `ddl.alterEnumAddValue("${enm.schema}", "${enm.name}", "${addedValue}", { after: "${afterValue}" })`
12422
+ `ddl.alterType("${enm.schema}", "${enm.name}").addValue("${addedValue}", { after: "${afterValue}" })`
12421
12423
  );
12422
12424
  } else if (beforeValue !== null) {
12423
12425
  statements.push(
12424
- `ddl.alterEnumAddValue("${enm.schema}", "${enm.name}", "${addedValue}", { before: "${beforeValue}" })`
12426
+ `ddl.alterType("${enm.schema}", "${enm.name}").addValue("${addedValue}", { before: "${beforeValue}" })`
12425
12427
  );
12426
12428
  } else {
12427
12429
  statements.push(
12428
- `ddl.alterEnumAddValue("${enm.schema}", "${enm.name}", "${addedValue}")`
12430
+ `ddl.alterType("${enm.schema}", "${enm.name}").addValue("${addedValue}")`
12429
12431
  );
12430
12432
  }
12431
12433
  }
@@ -12524,25 +12526,25 @@ function generateMigration(prev, curr, direction, renamedTables = {}, renamedCol
12524
12526
  if (statements.length === 0) return null;
12525
12527
  return `import { type DDLStatement, ddl, type MigrationOptions } from "durcno/migration";
12526
12528
 
12527
- export const options: MigrationOptions = {
12528
- transaction: true,
12529
- };
12529
+ export const options: MigrationOptions = ${stringifyMigrationOpts(defaultOptions ?? { transaction: true })};
12530
12530
 
12531
12531
  export const statements: DDLStatement[] = [
12532
12532
  ${statements.join(",\n ")},
12533
12533
  ];
12534
12534
  `;
12535
12535
  }
12536
- function generateNoOpMigration() {
12536
+ function generateNoOpMigration(defaultOptions) {
12537
12537
  return `import { type DDLStatement, ddl, type MigrationOptions } from "durcno/migration";
12538
12538
 
12539
- export const options: MigrationOptions = {
12540
- transaction: true,
12541
- };
12539
+ export const options: MigrationOptions = ${stringifyMigrationOpts(defaultOptions ?? { transaction: true })};
12542
12540
 
12543
12541
  export const statements: DDLStatement[] = [];
12544
12542
  `;
12545
12543
  }
12544
+ function stringifyMigrationOpts(opts) {
12545
+ if (opts.transaction === void 0) opts.transaction = true;
12546
+ return JSON.stringify(opts, null, 2);
12547
+ }
12546
12548
  function generateAlterTableStmts(prevTable, currTable, tableName, curr, statements, columnRenames) {
12547
12549
  const alterStatements = [];
12548
12550
  const renamedFromCols = new Set(
@@ -12803,12 +12805,14 @@ function generateConfigFile(config2) {
12803
12805
  return `${envLoader}import { defineConfig } from "durcno";
12804
12806
  import { ${funcName} } from "durcno/connectors/${connector}";
12805
12807
 
12806
- export default defineConfig(${funcName}(), {
12808
+ export default defineConfig({
12807
12809
  schema: "${schemaPath}",
12808
12810
  out: "${migrationsDir}",
12809
- dbCredentials: {
12810
- url: ${urlValue},
12811
- },
12811
+ connector: ${funcName}({
12812
+ dbCredentials: {
12813
+ url: ${urlValue},
12814
+ },
12815
+ }),
12812
12816
  });
12813
12817
  `;
12814
12818
  }
@@ -12830,9 +12834,9 @@ function generateIndexFile(schemaPath) {
12830
12834
  const schemaImport = schemaPath.replace(/^db\//, "./");
12831
12835
  return `import { database } from "durcno";
12832
12836
  import * as schema from "${schemaImport}";
12833
- import setup from "../durcno.config.ts";
12837
+ import config from "../durcno.config.ts";
12834
12838
 
12835
- export const db = database(schema, setup);
12839
+ export const db = database(schema, config);
12836
12840
  `;
12837
12841
  }
12838
12842
  async function promptConfig() {
@@ -13001,8 +13005,8 @@ var import_durcno3 = require("durcno");
13001
13005
  var { bgGreen: bgGreen3, bgYellow, dim: dim3, gray: gray4, yellow: yellow5, green: green2, cyan: cyan5 } = source_default;
13002
13006
  async function migrate(options) {
13003
13007
  const configPath = resolveConfigPath(options.config);
13004
- const { connector, config: config2 } = await getSetup(configPath);
13005
- config2.pool = { ...config2.pool, max: 1 };
13008
+ const config2 = await loadConfig(configPath);
13009
+ const { connector } = config2;
13006
13010
  const migrationsDir = (0, import_node_path6.resolve)(
13007
13011
  (0, import_node_path6.dirname)(configPath),
13008
13012
  config2.out || DEFAULT_MIGRATIONS_DIR
@@ -13015,16 +13019,13 @@ async function migrate(options) {
13015
13019
  try {
13016
13020
  let previouslyApplied = [];
13017
13021
  if (await migrationsTableExists(client)) {
13018
- const db = (0, import_durcno3.database)({ Migrations: import_durcno3.Migrations }, { connector, config: config2 });
13022
+ const db = (0, import_durcno3.database)({ Migrations: import_durcno3.Migrations }, config2);
13019
13023
  const records = await db.from(import_durcno3.Migrations).select();
13020
13024
  previouslyApplied = records.map((r) => r.name);
13021
13025
  }
13022
13026
  for (const migrationDirName of migrationDirNames.sort()) {
13023
13027
  if (!previouslyApplied.includes(migrationDirName)) {
13024
- await runUpMigration(migrationDirName, migrationsDir, client, {
13025
- connector,
13026
- config: config2
13027
- });
13028
+ await runUpMigration(migrationDirName, migrationsDir, client, config2);
13028
13029
  appliedMigrations.push(migrationDirName);
13029
13030
  }
13030
13031
  }
@@ -13040,7 +13041,7 @@ async function migrate(options) {
13040
13041
  migrationFolder,
13041
13042
  isFirstMigration,
13042
13043
  migrationsDir,
13043
- { connector, config: config2 },
13044
+ config2,
13044
13045
  client
13045
13046
  );
13046
13047
  }
@@ -13054,41 +13055,42 @@ async function migrate(options) {
13054
13055
  await client.close();
13055
13056
  process.exit(0);
13056
13057
  }
13057
- async function runUpMigration(migrationDirName, migrationsDir, client, setup) {
13058
+ async function runUpMigration(migrationDirName, migrationsDir, client, config2) {
13058
13059
  const upPath = (0, import_node_path6.join)(migrationsDir, migrationDirName, "up.ts");
13059
13060
  const migrationModule = await import(upPath);
13060
13061
  const statements = migrationModule.statements;
13061
13062
  const options = migrationModule.options ?? {};
13062
13063
  const useTransaction = options.transaction ?? true;
13063
13064
  const execution = options.execution ?? "joined";
13064
- if (useTransaction) {
13065
- await client.query("BEGIN;");
13066
- }
13067
13065
  try {
13068
13066
  if (statements.length > 0) {
13069
13067
  if (execution === "sequential") {
13068
+ if (useTransaction) await client.query("BEGIN;");
13070
13069
  for (const st of statements) {
13071
13070
  await client.query(st.toSQL());
13072
13071
  }
13072
+ if (useTransaction) await client.query("COMMIT;");
13073
13073
  } else {
13074
- const sql = `${statements.map((st) => st.toSQL()).join("\n")}`;
13074
+ let sql = "";
13075
+ if (useTransaction) sql += "BEGIN;\n";
13076
+ sql += statements.map((st) => st.toSQL()).join("\n");
13077
+ if (useTransaction) sql += "\nCOMMIT;";
13075
13078
  await client.query(sql);
13076
13079
  }
13077
13080
  }
13078
- if (useTransaction) {
13079
- await client.query("COMMIT;");
13080
- }
13081
13081
  console.log(
13082
13082
  bgGreen3.white.bold("[APPLIED]") + " " + green2(`Migration ${cyan5(migrationDirName)}`) + dim3(".")
13083
13083
  );
13084
- const db = (0, import_durcno3.database)({ Migrations: import_durcno3.Migrations }, setup);
13084
+ config2.connector.pool = { ...config2.connector.pool, max: 1 };
13085
+ config2.connector.logger = void 0;
13086
+ const db = (0, import_durcno3.database)({ Migrations: import_durcno3.Migrations }, config2);
13085
13087
  await db.insert(import_durcno3.Migrations).values({
13086
13088
  name: migrationDirName,
13087
13089
  createdAt: /* @__PURE__ */ new Date()
13088
13090
  });
13089
13091
  await db.close();
13090
13092
  } catch (e) {
13091
- if (useTransaction) {
13093
+ if (useTransaction && execution === "sequential") {
13092
13094
  await client.query("ROLLBACK;");
13093
13095
  }
13094
13096
  throw e;
@@ -13106,7 +13108,7 @@ var import_node_readline = require("node:readline");
13106
13108
  var { cyan: cyan6, green: green3, red: red4, yellow: yellow6, gray: gray5, bgCyan, bold: bold2 } = source_default;
13107
13109
  async function shell(options) {
13108
13110
  const configPath = resolveConfigPath(options.config);
13109
- const { connector } = await getSetup(configPath);
13111
+ const { connector } = await loadConfig(configPath);
13110
13112
  const client = connector.getClient();
13111
13113
  console.log(gray5("Connecting to database..."));
13112
13114
  await client.connect();
@@ -13352,7 +13354,7 @@ var import_migration3 = require("durcno/migration");
13352
13354
  var { bgGreen: bgGreen4, bgRed: bgRed2, yellow: yellow7, red: red5, green: green4, cyan: cyan7, gray: gray6 } = source_default;
13353
13355
  async function squash(start, end, options) {
13354
13356
  const configPath = resolveConfigPath(options.config);
13355
- const { config: config2 } = await getSetup(configPath);
13357
+ const config2 = await loadConfig(configPath);
13356
13358
  const migrationsDir = (0, import_node_path7.resolve)(
13357
13359
  (0, import_node_path7.dirname)(configPath),
13358
13360
  config2.out || DEFAULT_MIGRATIONS_DIR
@@ -13459,8 +13461,8 @@ var import_durcno4 = require("durcno");
13459
13461
  var { dim: dim4, cyan: cyan8, yellow: yellow8, green: green5 } = source_default;
13460
13462
  async function status(options) {
13461
13463
  const configPath = resolveConfigPath(options.config);
13462
- const { connector, config: config2 } = await getSetup(configPath);
13463
- config2.pool = { ...config2.pool, max: 1 };
13464
+ const config2 = await loadConfig(configPath);
13465
+ const { connector } = config2;
13464
13466
  const migrationsDir = (0, import_node_path8.resolve)(
13465
13467
  (0, import_node_path8.dirname)(configPath),
13466
13468
  config2.out || DEFAULT_MIGRATIONS_DIR
@@ -13471,7 +13473,9 @@ async function status(options) {
13471
13473
  console.log(source_default.yellow("No migrations found."));
13472
13474
  process.exit(0);
13473
13475
  }
13474
- const db = (0, import_durcno4.database)({ Migrations: import_durcno4.Migrations }, { connector, config: config2 });
13476
+ connector.pool = { ...connector.pool, max: 1 };
13477
+ connector.logger = void 0;
13478
+ const db = (0, import_durcno4.database)({ Migrations: import_durcno4.Migrations }, config2);
13475
13479
  const migrationsQuery = db.from(import_durcno4.Migrations).select();
13476
13480
  let migrations;
13477
13481
  const client = connector.getClient();
@@ -13504,7 +13508,7 @@ async function status(options) {
13504
13508
  }
13505
13509
 
13506
13510
  // src/cli/index.ts
13507
- program.version("1.0.0-alpha.1");
13511
+ program.version("1.0.0-alpha.3");
13508
13512
  var Options = {
13509
13513
  config: ["--config <path>", "Path to the config file"]
13510
13514
  };
@@ -1,6 +1,6 @@
1
1
  import { Sql } from "../sql.mjs";
2
- import { Column, ColumnConfig } from "./common.mjs";
3
2
  import { Enum } from "../enumtype.mjs";
3
+ import { Column, ColumnConfig } from "./common.mjs";
4
4
  import * as z from "zod";
5
5
 
6
6
  //#region src/columns/enum.d.ts
@@ -1,4 +1,4 @@
1
- import { Connector } from "./common.mjs";
1
+ import { Connector, ConnectorOptions } from "./common.mjs";
2
2
 
3
3
  //#region src/connectors/bun.d.ts
4
4
  /**
@@ -15,6 +15,6 @@ declare class BunConnector extends Connector {
15
15
  getPool(): BunPool;
16
16
  }
17
17
  /** Creates a Bun SQL connector instance. */
18
- declare function bun(): BunConnector;
18
+ declare function bun(options: ConnectorOptions): BunConnector;
19
19
  //#endregion
20
20
  export { BunConnector, bun };
@@ -1,5 +1,5 @@
1
- import { $Client, $Pool, Connector } from "./common.mjs";
2
- import { SQL } from "bun";
1
+ import { $Client, $Pool, Connector, getUrlFromDbCredentials } from "./common.mjs";
2
+ import Bun from "bun";
3
3
  //#region src/connectors/bun.ts
4
4
  /**
5
5
  * Connector implementation for the Bun built-in SQL client.
@@ -12,15 +12,15 @@ import { SQL } from "bun";
12
12
  */
13
13
  var BunConnector = class extends Connector {
14
14
  getClient() {
15
- return new BunClient(this.url);
15
+ return new BunClient(this.options);
16
16
  }
17
17
  getPool() {
18
- return new BunPool(this.url, this.config.pool);
18
+ return new BunPool(this.options);
19
19
  }
20
20
  };
21
21
  /** Creates a Bun SQL connector instance. */
22
- function bun() {
23
- return new BunConnector();
22
+ function bun(options) {
23
+ return new BunConnector(options);
24
24
  }
25
25
  /**
26
26
  * Single-connection client wrapper for Bun's SQL API.
@@ -32,9 +32,9 @@ function bun() {
32
32
  */
33
33
  var BunClient = class extends $Client {
34
34
  #client;
35
- constructor(connectionString) {
36
- super();
37
- this.#client = new SQL(connectionString, { max: 1 });
35
+ constructor(options) {
36
+ super(options);
37
+ this.#client = new Bun.SQL(getUrlFromDbCredentials(options.dbCredentials), { max: 1 });
38
38
  this.query = this.#client.unsafe.bind(this.#client);
39
39
  }
40
40
  async connect() {
@@ -57,9 +57,9 @@ var BunClient = class extends $Client {
57
57
  */
58
58
  var BunPool = class extends $Pool {
59
59
  #pool;
60
- constructor(connectionString, pool) {
61
- super();
62
- this.#pool = new SQL(connectionString, { max: pool?.max ?? 10 });
60
+ constructor(options) {
61
+ super(options);
62
+ this.#pool = new Bun.SQL(getUrlFromDbCredentials(options.dbCredentials), { max: options.pool?.max ?? 10 });
63
63
  this.query = this.#pool.unsafe.bind(this.#pool);
64
64
  }
65
65
  async connect() {
@@ -72,7 +72,7 @@ var BunPool = class extends $Pool {
72
72
  await this.#pool.end();
73
73
  }
74
74
  async acquireClient() {
75
- return new BunPoolClient(await this.#pool.reserve());
75
+ return new BunPoolClient(await this.#pool.reserve(), this.options);
76
76
  }
77
77
  };
78
78
  /**
@@ -84,8 +84,8 @@ var BunPool = class extends $Pool {
84
84
  */
85
85
  var BunPoolClient = class extends $Client {
86
86
  #sql;
87
- constructor(sql) {
88
- super();
87
+ constructor(sql, options) {
88
+ super(options);
89
89
  this.#sql = sql;
90
90
  this.query = this.#sql.unsafe.bind(this.#sql);
91
91
  }
@@ -1,6 +1,50 @@
1
- import { Config } from "../index.mjs";
1
+ import { DurcnoLogger } from "../logger.mjs";
2
+ import { Query } from "../query-builders/query.mjs";
3
+ import { MigrationOptions } from "../migration/index.mjs";
4
+ import { ConnectionOptions } from "node:tls";
2
5
 
3
6
  //#region src/connectors/common.d.ts
7
+ /**
8
+ * Options passed to connector constructors containing database connection
9
+ * credentials, pool settings, and an optional logger.
10
+ *
11
+ * These options were previously part of the top-level `Config` type and are
12
+ * now scoped to the connector so that `Config` only contains schema/migration
13
+ * settings.
14
+ */
15
+ type ConnectorOptions = {
16
+ /**
17
+ * Database connection credentials — either a connection URL or individual
18
+ * host/user/password/database fields.
19
+ */
20
+ dbCredentials: ({
21
+ host: string;
22
+ port?: number;
23
+ user: string;
24
+ password?: string;
25
+ database: string;
26
+ ssl?: boolean | "require" | "allow" | "prefer" | "verify-full" | ConnectionOptions;
27
+ } & {}) | {
28
+ url: string;
29
+ };
30
+ /**
31
+ * Connection pool configuration.
32
+ */
33
+ pool?: {
34
+ /**
35
+ * Maximum number of connections in the pool.
36
+ * @default 10
37
+ */
38
+ max?: number;
39
+ };
40
+ /**
41
+ * Optional logger instance for query logging.
42
+ * Pass a Winston logger or any object with a compatible `info()` method.
43
+ * When set, all executed queries will be logged at the `info` level with
44
+ * structured `{ sql, arguments }` metadata.
45
+ */
46
+ logger?: DurcnoLogger;
47
+ };
4
48
  /**
5
49
  * Abstract base class for all database connectors.
6
50
  *
@@ -12,12 +56,35 @@ import { Config } from "../index.mjs";
12
56
  * @abstract
13
57
  */
14
58
  declare abstract class Connector {
15
- /** The configuration object containing file paths, database credentials, client configs etc. */
16
- config: Config;
17
- /** The PostgreSQL connection URL derived from the configuration. */
18
- url: string;
19
- /** Injects the configuration and derives the connection URL. Called by `defineConfig`. */
20
- _init(config: Config): void;
59
+ /**
60
+ * Default migration options applied to generated migration files for this
61
+ * connector. When set, the `generate` CLI command will use these values as
62
+ * the `options` export in the produced `up.ts` / `down.ts` files instead of
63
+ * the built-in defaults.
64
+ *
65
+ * @example
66
+ * ```typescript
67
+ * class MyConnector extends Connector {
68
+ * static override migrationOptions: MigrationOptions = {
69
+ * transaction: false,
70
+ * execution: "sequential",
71
+ * };
72
+ * }
73
+ * ```
74
+ */
75
+ static migrationOptions?: MigrationOptions;
76
+ /**
77
+ * The original options passed to the connector constructor.
78
+ * Provides full access to `dbCredentials`, `pool`, and `logger`.
79
+ * Note: `pool` and `logger` may be mutated on the connector instance after
80
+ * construction (e.g. by CLI commands); use the instance fields for current values.
81
+ */
82
+ options: ConnectorOptions;
83
+ /** Connection pool size override (can be mutated by CLI commands before `getPool()` is called). */
84
+ pool?: ConnectorOptions["pool"];
85
+ /** Optional logger instance for query logging. */
86
+ logger?: DurcnoLogger;
87
+ constructor(options: ConnectorOptions);
21
88
  /**
22
89
  * Creates a single-connection client.
23
90
  *
@@ -38,15 +105,19 @@ declare abstract class Connector {
38
105
  abstract getPool(): $Pool;
39
106
  }
40
107
  /**
41
- * Abstract base class for single-connection database clients.
108
+ * Abstract base class shared by {@link $Client} and {@link $Pool}.
42
109
  *
43
- * Implementations wrap specific PostgreSQL client libraries to provide
44
- * a unified interface for query execution, connection management, and
45
- * result parsing.
110
+ * Holds the common `options`, `logger`, `query`, and `execQuery` members so
111
+ * they only need to be defined once.
46
112
  *
47
113
  * @abstract
48
114
  */
49
- declare abstract class $Client {
115
+ declare abstract class $QueryExecutor {
116
+ /** The connector options used to create this executor. */
117
+ options: ConnectorOptions;
118
+ /** Optional logger instance for query logging. */
119
+ logger?: DurcnoLogger;
120
+ constructor(options: ConnectorOptions);
50
121
  /**
51
122
  * Executes a SQL query with optional parameterized arguments.
52
123
  *
@@ -55,6 +126,25 @@ declare abstract class $Client {
55
126
  * @returns A promise that resolves with the query result.
56
127
  */
57
128
  query: (query: string, args?: (string | number | null)[]) => Promise<unknown>;
129
+ /**
130
+ * Executes a {@link Query} object by forwarding its sql and arguments to {@link query}.
131
+ * When a logger is configured, logs the SQL, arguments, and query duration after execution.
132
+ *
133
+ * @param q - The {@link Query} object to execute.
134
+ * @returns A promise that resolves with the raw query result.
135
+ */
136
+ execQuery(q: Query<any>): Promise<unknown>;
137
+ }
138
+ /**
139
+ * Abstract base class for single-connection database clients.
140
+ *
141
+ * Implementations wrap specific PostgreSQL client libraries to provide
142
+ * a unified interface for query execution, connection management, and
143
+ * result parsing.
144
+ *
145
+ * @abstract
146
+ */
147
+ declare abstract class $Client extends $QueryExecutor {
58
148
  /**
59
149
  * Establishes a connection to the database.
60
150
  *
@@ -87,18 +177,7 @@ declare abstract class $Client {
87
177
  *
88
178
  * @abstract
89
179
  */
90
- declare abstract class $Pool {
91
- /**
92
- * Executes a SQL query with optional parameterized arguments.
93
- *
94
- * The pool automatically acquires a connection, executes the query,
95
- * and returns the connection to the pool.
96
- *
97
- * @param query - The SQL query string to execute.
98
- * @param args - Optional array of parameter values for parameterized queries.
99
- * @returns A promise that resolves with the query result.
100
- */
101
- query: (query: string, args?: (string | number | null)[]) => Promise<unknown>;
180
+ declare abstract class $Pool extends $QueryExecutor {
102
181
  /**
103
182
  * Initializes the connection pool.
104
183
  *
@@ -146,4 +225,4 @@ declare abstract class $Pool {
146
225
  */
147
226
  type QueryExecutor = $Client | $Pool;
148
227
  //#endregion
149
- export { $Pool, Connector, QueryExecutor };
228
+ export { $Client, $Pool, Connector, ConnectorOptions, QueryExecutor };