postgres-schema-migrations 6.1.0 → 7.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/bin/validate.js +12 -0
  2. package/create.d.ts +2 -0
  3. package/create.js +19 -0
  4. package/{dist/file-name-parser.js → file-name-parser.js} +1 -5
  5. package/{dist/files-loader.js → files-loader.js} +12 -14
  6. package/index.d.ts +3 -0
  7. package/index.js +3 -0
  8. package/load-sql-from-js.d.ts +1 -0
  9. package/{dist/load-sql-from-js.js → load-sql-from-js.js} +5 -7
  10. package/{dist/migrate.js → migrate.js} +27 -27
  11. package/migration-file.js +45 -0
  12. package/package.json +36 -32
  13. package/{dist/run-migration.js → run-migration.js} +8 -9
  14. package/{dist/types.d.ts → types.d.ts} +5 -12
  15. package/types.js +2 -0
  16. package/{dist/validation.js → validation.js} +2 -7
  17. package/{dist/with-connection.js → with-connection.js} +10 -8
  18. package/{dist/with-lock.js → with-lock.js} +11 -8
  19. package/.editorconfig +0 -9
  20. package/.fitcommitjsrc.yml +0 -22
  21. package/.gitattributes +0 -1
  22. package/.github/workflows/node.js.yml +0 -31
  23. package/.prettierignore +0 -1
  24. package/.prettierrc +0 -5
  25. package/CHANGELOG.md +0 -30
  26. package/LICENSE +0 -21
  27. package/README.md +0 -334
  28. package/ava.config.cjs +0 -4
  29. package/ava.config.integration.cjs +0 -6
  30. package/ava.config.unit.cjs +0 -6
  31. package/dist/bin/validate.js +0 -14
  32. package/dist/create.d.ts +0 -6
  33. package/dist/create.js +0 -64
  34. package/dist/index.d.ts +0 -4
  35. package/dist/index.js +0 -11
  36. package/dist/load-sql-from-js.d.ts +0 -1
  37. package/dist/migration-file.js +0 -46
  38. package/dist/package.json +0 -71
  39. package/dist/types.js +0 -6
  40. package/tsconfig-base.json +0 -17
  41. package/tsconfig-build.json +0 -5
  42. package/tsconfig.json +0 -4
  43. package/tslint.json +0 -42
  44. /package/{dist/bin → bin}/validate.d.ts +0 -0
  45. /package/{dist/file-name-parser.d.ts → file-name-parser.d.ts} +0 -0
  46. /package/{dist/files-loader.d.ts → files-loader.d.ts} +0 -0
  47. /package/{dist/migrate.d.ts → migrate.d.ts} +0 -0
  48. /package/{dist/migration-file.d.ts → migration-file.d.ts} +0 -0
  49. /package/{dist/migrations → migrations}/0_create-migrations-table.sql +0 -0
  50. /package/{dist/run-migration.d.ts → run-migration.d.ts} +0 -0
  51. /package/{dist/validation.d.ts → validation.d.ts} +0 -0
  52. /package/{dist/with-connection.d.ts → with-connection.d.ts} +0 -0
  53. /package/{dist/with-lock.d.ts → with-lock.d.ts} +0 -0
@@ -0,0 +1,12 @@
1
+ #!/usr/bin/env node
2
+ // tslint:disable no-console
3
+ import { argv } from "process";
4
+ import { loadMigrationFiles } from "../files-loader";
5
+ async function main(args) {
6
+ const directory = args[0];
7
+ await loadMigrationFiles(directory, (x) => console.error(x));
8
+ }
9
+ main(argv.slice(2)).catch((e) => {
10
+ console.error(`ERROR: ${e.message}`);
11
+ process.exit(1);
12
+ });
package/create.d.ts ADDED
@@ -0,0 +1,2 @@
1
+ import { BasicPgClient, Logger } from "./types";
2
+ export declare function runCreateQuery(dbName: string, log: Logger): (client: BasicPgClient) => Promise<void>;
package/create.js ADDED
@@ -0,0 +1,19 @@
1
+ const DUPLICATE_DATABASE = "42P04";
2
+ export function runCreateQuery(dbName, log) {
3
+ return async (client) => {
4
+ await client
5
+ .query(`CREATE DATABASE "${dbName.replace(/"/g, '""')}"`)
6
+ .catch((e) => {
7
+ switch (e.code) {
8
+ case DUPLICATE_DATABASE: {
9
+ log(`'${dbName}' database already exists`);
10
+ return;
11
+ }
12
+ default: {
13
+ log(e);
14
+ throw new Error(`Error creating database. Caused by: '${e.name}: ${e.message}'`);
15
+ }
16
+ }
17
+ });
18
+ };
19
+ }
@@ -1,6 +1,3 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.parseFileName = void 0;
4
1
  const parseId = (id) => {
5
2
  const parsed = parseInt(id, 10);
6
3
  if (isNaN(parsed)) {
@@ -8,7 +5,7 @@ const parseId = (id) => {
8
5
  }
9
6
  return parsed;
10
7
  };
11
- const parseFileName = (fileName) => {
8
+ export const parseFileName = (fileName) => {
12
9
  const result = /^(-?\d+)[-_]?(.*).(sql|js)$/gi.exec(fileName);
13
10
  if (!result) {
14
11
  throw new Error(`Invalid file name: '${fileName}'.`);
@@ -24,4 +21,3 @@ const parseFileName = (fileName) => {
24
21
  type: lowerType,
25
22
  };
26
23
  };
27
- exports.parseFileName = parseFileName;
@@ -1,12 +1,11 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.loadMigrationFiles = void 0;
4
- const fs = require("fs");
5
- const path = require("path");
6
- const util_1 = require("util");
7
- const migration_file_1 = require("./migration-file");
8
- const validation_1 = require("./validation");
9
- const readDir = util_1.promisify(fs.readdir);
1
+ import * as fs from "fs";
2
+ import * as path from "path";
3
+ import { promisify } from "util";
4
+ import { fileURLToPath } from "url";
5
+ import { loadMigrationFile } from "./migration-file";
6
+ import { validateMigrationOrdering } from "./validation";
7
+ const currentDir = path.dirname(fileURLToPath(import.meta.url));
8
+ const readDir = promisify(fs.readdir);
10
9
  const isValidFile = (fileName) => /\.(sql|js)$/gi.test(fileName);
11
10
  /**
12
11
  * Load the migration files and assert they are reasonably valid.
@@ -16,7 +15,7 @@ const isValidFile = (fileName) => /\.(sql|js)$/gi.test(fileName);
16
15
  *
17
16
  * No assertions are made about the validity of the SQL.
18
17
  */
19
- const loadMigrationFiles = async (directory,
18
+ export const loadMigrationFiles = async (directory,
20
19
  // tslint:disable-next-line no-empty
21
20
  log = () => { }, schemaName = "public") => {
22
21
  log(`Loading migrations from: ${directory}`);
@@ -26,14 +25,13 @@ log = () => { }, schemaName = "public") => {
26
25
  return [];
27
26
  }
28
27
  const migrationFiles = [
29
- path.join(__dirname, "migrations/0_create-migrations-table.sql"),
28
+ path.join(currentDir, "migrations/0_create-migrations-table.sql"),
30
29
  ...fileNames.map((fileName) => path.resolve(directory, fileName)),
31
30
  ].filter(isValidFile);
32
- const unorderedMigrations = await Promise.all(migrationFiles.map(migration_file_1.loadMigrationFile));
31
+ const unorderedMigrations = await Promise.all(migrationFiles.map(loadMigrationFile));
33
32
  // Arrange in ID order
34
33
  const orderedMigrations = unorderedMigrations.sort((a, b) => a.id - b.id);
35
- validation_1.validateMigrationOrdering(orderedMigrations);
34
+ validateMigrationOrdering(orderedMigrations);
36
35
  orderedMigrations[0].sql = orderedMigrations[0].sql.replace("CREATE TABLE IF NOT EXISTS migrations", `CREATE SCHEMA IF NOT EXISTS ${schemaName}; CREATE TABLE IF NOT EXISTS ${schemaName}.migrations`);
37
36
  return orderedMigrations;
38
37
  };
39
- exports.loadMigrationFiles = loadMigrationFiles;
package/index.d.ts ADDED
@@ -0,0 +1,3 @@
1
+ export { migrate } from "./migrate";
2
+ export { loadMigrationFiles } from "./files-loader";
3
+ export { ConnectionParams, MigrateDBConfig, Logger, Config, MigrationError, } from "./types";
package/index.js ADDED
@@ -0,0 +1,3 @@
1
+ export { migrate } from "./migrate";
2
+ export { loadMigrationFiles } from "./files-loader";
3
+ export { MigrationError, } from "./types";
@@ -0,0 +1 @@
1
+ export declare const loadSqlFromJs: (filePath: string) => Promise<string>;
@@ -1,9 +1,8 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.loadSqlFromJs = void 0;
4
- const path = require("path");
5
- const loadSqlFromJs = (filePath) => {
6
- const migrationModule = require(filePath);
1
+ import * as path from "path";
2
+ import { pathToFileURL } from "url";
3
+ export const loadSqlFromJs = async (filePath) => {
4
+ const fileUrl = pathToFileURL(filePath).href;
5
+ const migrationModule = await import(fileUrl);
7
6
  if (!migrationModule.generateSql) {
8
7
  throw new Error(`Invalid javascript migration file: '${path.basename(filePath)}'.
9
8
  It must to export a 'generateSql' function.`);
@@ -15,4 +14,3 @@ It must to export a 'generateSql' function.`);
15
14
  }
16
15
  return generatedValue;
17
16
  };
18
- exports.loadSqlFromJs = loadSqlFromJs;
@@ -1,14 +1,11 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.doesTableExist = exports.migrate = void 0;
4
- const pg = require("pg");
5
- const sql_template_strings_1 = require("sql-template-strings");
6
- const create_1 = require("./create");
7
- const files_loader_1 = require("./files-loader");
8
- const run_migration_1 = require("./run-migration");
9
- const validation_1 = require("./validation");
10
- const with_connection_1 = require("./with-connection");
11
- const with_lock_1 = require("./with-lock");
1
+ import * as pg from "pg";
2
+ import SQL from "sql-template-strings";
3
+ import { runCreateQuery } from "./create";
4
+ import { loadMigrationFiles } from "./files-loader";
5
+ import { runMigration } from "./run-migration";
6
+ import { validateMigrationHashes } from "./validation";
7
+ import { withConnection } from "./with-connection";
8
+ import { withAdvisoryLock } from "./with-lock";
12
9
  const splitTableName = (tableName) => {
13
10
  const parts = tableName.split(".");
14
11
  if (parts.length > 1) {
@@ -29,7 +26,7 @@ const splitTableName = (tableName) => {
29
26
  * @param config Extra configuration
30
27
  * @returns Details about the migrations which were run
31
28
  */
32
- async function migrate(dbConfig, migrationsDirectory, config = {}) {
29
+ export async function migrate(dbConfig, migrationsDirectory, config = {}) {
33
30
  const log = config.logger != null
34
31
  ? config.logger
35
32
  : () => {
@@ -41,10 +38,10 @@ async function migrate(dbConfig, migrationsDirectory, config = {}) {
41
38
  if (typeof migrationsDirectory !== "string") {
42
39
  throw new Error("Must pass migrations directory as a string");
43
40
  }
44
- const intendedMigrations = await files_loader_1.loadMigrationFiles(migrationsDirectory, log, config.schema);
41
+ const intendedMigrations = await loadMigrationFiles(migrationsDirectory, log, config.schema);
45
42
  if ("client" in dbConfig) {
46
43
  // we have been given a client to use, it should already be connected
47
- return with_lock_1.withAdvisoryLock(log, runMigrations(intendedMigrations, log, config.schema))(dbConfig.client);
44
+ return withAdvisoryLock(log, runMigrations(intendedMigrations, log, config.schema))(dbConfig.client);
48
45
  }
49
46
  if (typeof dbConfig.database !== "string" ||
50
47
  typeof dbConfig.user !== "string" ||
@@ -65,13 +62,13 @@ async function migrate(dbConfig, migrationsDirectory, config = {}) {
65
62
  host,
66
63
  port,
67
64
  });
68
- const runWith = with_connection_1.withConnection(log, async (connectedClient) => {
65
+ const runWith = withConnection(log, async (connectedClient) => {
69
66
  const result = await connectedClient.query({
70
67
  text: "SELECT 1 FROM pg_database WHERE datname=$1",
71
68
  values: [dbConfig.database],
72
69
  });
73
70
  if (result.rowCount !== 1) {
74
- await create_1.runCreateQuery(dbConfig.database, log)(connectedClient);
71
+ await runCreateQuery(dbConfig.database, log)(connectedClient);
75
72
  }
76
73
  });
77
74
  await runWith(client);
@@ -81,23 +78,22 @@ async function migrate(dbConfig, migrationsDirectory, config = {}) {
81
78
  client.on("error", (err) => {
82
79
  log(`pg client emitted an error: ${err.message}`);
83
80
  });
84
- const runWith = with_connection_1.withConnection(log, with_lock_1.withAdvisoryLock(log, runMigrations(intendedMigrations, log, config.schema)));
81
+ const runWith = withConnection(log, withAdvisoryLock(log, runMigrations(intendedMigrations, log, config.schema)));
85
82
  return runWith(client);
86
83
  }
87
84
  }
88
- exports.migrate = migrate;
89
85
  function runMigrations(intendedMigrations, log, schemaName = "public") {
90
86
  return async (client) => {
91
87
  try {
92
88
  const migrationTableName = `${schemaName}.migrations`;
93
89
  log("Starting migrations");
94
90
  const appliedMigrations = await fetchAppliedMigrationFromDB(migrationTableName, client, log);
95
- validation_1.validateMigrationHashes(intendedMigrations, appliedMigrations);
91
+ validateMigrationHashes(intendedMigrations, appliedMigrations);
96
92
  const migrationsToRun = filterMigrations(intendedMigrations, appliedMigrations);
97
93
  const completedMigrations = [];
98
94
  for (const migration of migrationsToRun) {
99
95
  log(`Starting migration: ${migration.id} ${migration.name}`);
100
- const result = await run_migration_1.runMigration(migrationTableName, client, log)(migration);
96
+ const result = await runMigration(migrationTableName, client, log)(migration);
101
97
  log(`Finished migration: ${migration.id} ${migration.name}`);
102
98
  completedMigrations.push(result);
103
99
  }
@@ -106,9 +102,14 @@ function runMigrations(intendedMigrations, log, schemaName = "public") {
106
102
  return completedMigrations;
107
103
  }
108
104
  catch (e) {
109
- const exception = e;
110
- const error = new Error(`Migration failed. Reason: ${exception.message}`);
111
- error.cause = exception.message;
105
+ const reason = (() => {
106
+ if (e instanceof Error) {
107
+ return e.message;
108
+ }
109
+ return `${e}`;
110
+ })();
111
+ const error = new Error(`Migration failed. Reason: ${reason}`);
112
+ error.cause = reason;
112
113
  throw error;
113
114
  }
114
115
  };
@@ -142,13 +143,13 @@ function logResult(completedMigrations, log) {
142
143
  }
143
144
  }
144
145
  /** Check whether table exists in postgres - http://stackoverflow.com/a/24089729 */
145
- async function doesTableExist(client, tableName) {
146
+ export async function doesTableExist(client, tableName) {
146
147
  const [schema, table] = splitTableName(tableName);
147
- const result = await client.query(sql_template_strings_1.default `
148
+ const result = await client.query(SQL `
148
149
  SELECT EXISTS (
149
150
  SELECT 1
150
151
  FROM pg_catalog.pg_class c
151
- JOIN pg_catalog.pg_namespace n
152
+ JOIN pg_catalog.pg_namespace n
152
153
  ON n.oid = c.relnamespace
153
154
  WHERE c.relname = ${table}
154
155
  AND c.relkind = 'r'
@@ -157,4 +158,3 @@ async function doesTableExist(client, tableName) {
157
158
  `);
158
159
  return result.rows.length > 0 && result.rows[0].exists;
159
160
  }
160
- exports.doesTableExist = doesTableExist;
@@ -0,0 +1,45 @@
1
+ import { promisify } from "util";
2
+ import * as fs from "fs";
3
+ import * as path from "path";
4
+ import * as crypto from "crypto";
5
+ import { loadSqlFromJs } from "./load-sql-from-js";
6
+ import { parseFileName } from "./file-name-parser";
7
+ const readFile = promisify(fs.readFile);
8
+ const getFileName = (filePath) => path.basename(filePath);
9
+ const getFileContents = async (filePath) => readFile(filePath, "utf8");
10
+ const hashString = (s) => crypto.createHash("sha1").update(s, "utf8").digest("hex");
11
+ const getSqlStringLiteral = async (filePath, contents, type) => {
12
+ switch (type) {
13
+ case "sql":
14
+ return contents;
15
+ case "js":
16
+ return await loadSqlFromJs(filePath);
17
+ default: {
18
+ const exhaustiveCheck = type;
19
+ return exhaustiveCheck;
20
+ }
21
+ }
22
+ };
23
+ export const loadMigrationFile = async (filePath) => {
24
+ const fileName = getFileName(filePath);
25
+ try {
26
+ const { id, name, type } = parseFileName(fileName);
27
+ const contents = await getFileContents(filePath);
28
+ const sql = await getSqlStringLiteral(filePath, contents, type);
29
+ const hash = hashString(fileName + sql);
30
+ return {
31
+ id,
32
+ name,
33
+ contents,
34
+ fileName,
35
+ hash,
36
+ sql,
37
+ };
38
+ }
39
+ catch (err) {
40
+ if (err instanceof Error) {
41
+ throw new Error(`${err.message} - Offending file: '${fileName}'.`);
42
+ }
43
+ throw err;
44
+ }
45
+ };
package/package.json CHANGED
@@ -1,13 +1,17 @@
1
1
  {
2
2
  "name": "postgres-schema-migrations",
3
- "version": "6.1.0",
4
- "description": "Stack Overflow style database migrations for PostgreSQL",
3
+ "version": "7.0.0",
4
+ "description": "Database migrations for PostgreSQL",
5
+ "type": "module",
5
6
  "main": "dist/index.js",
6
7
  "types": "dist/index.d.ts",
7
8
  "bin": {
8
9
  "pg-validate-migrations": "./dist/bin/validate.js"
9
10
  },
10
- "authors": ["Thom Wright", "Zak Patterson"],
11
+ "authors": [
12
+ "Thom Wright",
13
+ "Zak Patterson"
14
+ ],
11
15
  "keywords": [
12
16
  "postgres",
13
17
  "postgresql",
@@ -29,43 +33,43 @@
29
33
  "engines": {
30
34
  "node": ">10.17.0"
31
35
  },
32
- "scripts": {
33
- "checkPushed": "[ \"$(git rev-list --count @{upstream}..HEAD)\" -eq 0 ] || (echo You have unpushed commits && exit 1)",
34
- "prepublishOnly": "npm run checkPushed && npm run build",
35
- "check-formatting": "./node_modules/.bin/prettier '**/*.ts' --list-different",
36
- "fix-formatting": "./node_modules/.bin/prettier '**/*.ts' --write",
37
- "lint": "npm run tslint && npm run check-formatting",
38
- "tslint": "tslint 'src/**/*.ts' --type-check --project tsconfig.json --format verbose",
39
- "test-integration": "ava --config ava.config.integration.cjs",
40
- "test-unit": "ava --config ava.config.unit.cjs",
41
- "test": "npm run test-unit && npm run lint && npm run test-integration",
42
- "preversion": "npm test",
43
- "build": "rm -rf ./dist && rsync -a --exclude='*.ts' --exclude='__tests__' --exclude='__unit__' --prune-empty-dirs src/ dist/ && tsc --project tsconfig-build.json && cp ./package.json ./dist/"
44
- },
45
36
  "husky": {
46
37
  "hooks": {
47
38
  "commit-msg": "node ./node_modules/fit-commit-js/lib/hook.js .git/COMMIT_EDITMSG",
48
- "pre-commit": "npm run lint",
49
- "pre-push": "npm test"
39
+ "pre-commit": "pnpm run lint",
40
+ "pre-push": "pnpm test"
50
41
  }
51
42
  },
52
43
  "dependencies": {
53
- "pg": "^8.6.0",
44
+ "pg": "^8.17.1",
54
45
  "sql-template-strings": "^2.2.2"
55
46
  },
56
47
  "devDependencies": {
57
- "@types/node": "^10.17.60",
58
- "@types/pg": "^8.6.0",
59
- "@types/sinon": "^9.0.11",
60
- "ava": "^3.15.0",
48
+ "@eslint/js": "^9.39.2",
49
+ "@types/node": "^25.0.9",
50
+ "@types/pg": "^8.16.0",
51
+ "@types/sinon": "^21.0.0",
52
+ "ava": "^6.4.1",
53
+ "eslint": "^9.39.2",
61
54
  "fit-commit-js": "^0.3.2",
62
- "husky": "^3.1.0",
63
- "prettier": "^2.3.1",
64
- "sinon": "^9.2.4",
65
- "ts-node": "^10.0.0",
66
- "tslint": "^6.1.3",
67
- "tslint-config-prettier": "^1.18.0",
68
- "typescript": "^4.3.4",
69
- "typescript-tslint-plugin": "^1.0.1"
55
+ "husky": "^9.1.7",
56
+ "prettier": "^3.8.0",
57
+ "sinon": "^21.0.1",
58
+ "ts-node": "^10.9.2",
59
+ "tsx": "^4.21.0",
60
+ "typescript": "^5.9.3",
61
+ "typescript-eslint": "^8.53.0"
62
+ },
63
+ "scripts": {
64
+ "checkPushed": "[ \"$(git rev-list --count @{upstream}..HEAD)\" -eq 0 ] || (echo You have unpushed commits && exit 1)",
65
+ "check-formatting": "./node_modules/.bin/prettier '**/*.ts' --list-different",
66
+ "fix-formatting": "./node_modules/.bin/prettier '**/*.ts' --write",
67
+ "lint": "pnpm run eslint && pnpm run check-formatting",
68
+ "eslint": "eslint 'src/**/*.ts'",
69
+ "test-integration": "ava --config ava.config.integration.cjs",
70
+ "test-unit": "ava --config ava.config.unit.cjs",
71
+ "test": "pnpm run test-unit && npm run lint && pnpm run test-integration",
72
+ "preversion": "pnpm test",
73
+ "build": "rm -rf ./dist && rsync -a --exclude='*.ts' --exclude='__tests__' --exclude='__unit__' --prune-empty-dirs src/ dist/ && tsc --project tsconfig-build.json && cp ./package.json ./dist/"
70
74
  }
71
- }
75
+ }
@@ -1,19 +1,16 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.runMigration = void 0;
4
- const sql_template_strings_1 = require("sql-template-strings");
1
+ import SQL from "sql-template-strings";
5
2
  const noop = () => {
6
3
  //
7
4
  };
8
5
  const asyncNoop = () => Promise.resolve();
9
6
  const insertMigration = async (migrationTableName, client, migration, log) => {
10
7
  log(`Saving migration to '${migrationTableName}': ${migration.id} | ${migration.name} | ${migration.hash}`);
11
- const sql = sql_template_strings_1.default `INSERT INTO `
8
+ const sql = SQL `INSERT INTO `
12
9
  .append(migrationTableName)
13
- .append(sql_template_strings_1.default ` ("id", "name", "hash") VALUES (${migration.id},${migration.name},${migration.hash})`);
10
+ .append(SQL ` ("id", "name", "hash") VALUES (${migration.id},${migration.name},${migration.hash})`);
14
11
  return client.query(sql);
15
12
  };
16
- const runMigration = (migrationTableName, client, log = noop) => async (migration) => {
13
+ export const runMigration = (migrationTableName, client, log = noop) => async (migration) => {
17
14
  const inTransaction = migration.sql.includes("-- postgres-migrations disable-transaction") ===
18
15
  false;
19
16
  log(`Running migration in transaction: ${inTransaction}`);
@@ -38,7 +35,9 @@ const runMigration = (migrationTableName, client, log = noop) => async (migratio
38
35
  catch (_a) {
39
36
  //
40
37
  }
41
- throw new Error(`An error occurred running '${migration.name}'. Rolled back this migration. No further migrations were run. Reason: ${err.message}`);
38
+ if (err instanceof Error) {
39
+ throw new Error(`An error occurred running '${migration.name}'. Rolled back this migration. No further migrations were run. Reason: ${err.message}`);
40
+ }
41
+ throw err;
42
42
  }
43
43
  };
44
- exports.runMigration = runMigration;
@@ -17,7 +17,7 @@ export interface ClientParams {
17
17
  /** A connected Client, or a Pool Client. The caller is responsible for connecting and cleaning up. */
18
18
  readonly client: pg.Client | pg.PoolClient | pg.Pool;
19
19
  }
20
- export declare type EnsureDatabase = {
20
+ export type EnsureDatabase = {
21
21
  /**
22
22
  * Might default to `true` in future versions
23
23
  * @default false
@@ -31,18 +31,11 @@ export declare type EnsureDatabase = {
31
31
  } | {
32
32
  readonly ensureDatabaseExists?: false;
33
33
  };
34
- /**
35
- * @deprecated Use `migrate` instead with `ensureDatabaseExists: true`.
36
- */
37
- export declare type CreateDBConfig = (ConnectionParams & {
38
- /** The database to connect to when creating the new database. */
39
- readonly defaultDatabase?: string;
40
- }) | ClientParams;
41
- export declare type MigrateDBConfig = (ConnectionParams & {
34
+ export type MigrateDBConfig = (ConnectionParams & {
42
35
  readonly database: string;
43
36
  } & EnsureDatabase) | ClientParams;
44
- export declare type Logger = (msg: string) => void;
45
- export declare type Config = Partial<FullConfig>;
37
+ export type Logger = (msg: string) => void;
38
+ export type Config = Partial<FullConfig>;
46
39
  export interface FullConfig {
47
40
  readonly logger: Logger;
48
41
  readonly schema: string;
@@ -50,7 +43,7 @@ export interface FullConfig {
50
43
  export declare class MigrationError extends Error {
51
44
  cause?: string;
52
45
  }
53
- export declare type FileType = "sql" | "js";
46
+ export type FileType = "sql" | "js";
54
47
  export interface BasicPgClient {
55
48
  query(queryTextOrConfig: string | pg.QueryConfig): Promise<pg.QueryResult>;
56
49
  }
package/types.js ADDED
@@ -0,0 +1,2 @@
1
+ export class MigrationError extends Error {
2
+ }
@@ -1,17 +1,13 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.validateMigrationHashes = exports.validateMigrationOrdering = void 0;
4
1
  const indexNotMatch = (migration, index) => migration.id !== index;
5
2
  /** Assert migration IDs are consecutive integers */
6
- function validateMigrationOrdering(migrations) {
3
+ export function validateMigrationOrdering(migrations) {
7
4
  const notMatchingId = migrations.find(indexNotMatch);
8
5
  if (notMatchingId) {
9
6
  throw new Error(`Found a non-consecutive migration ID on file: '${notMatchingId.fileName}'`);
10
7
  }
11
8
  }
12
- exports.validateMigrationOrdering = validateMigrationOrdering;
13
9
  /** Assert hashes match */
14
- function validateMigrationHashes(migrations, appliedMigrations) {
10
+ export function validateMigrationHashes(migrations, appliedMigrations) {
15
11
  const invalidHash = (migration) => {
16
12
  const appliedMigration = appliedMigrations[migration.id];
17
13
  return appliedMigration != null && appliedMigration.hash !== migration.hash;
@@ -25,4 +21,3 @@ function validateMigrationHashes(migrations, appliedMigrations) {
25
21
  This means that the scripts have changed since it was applied.`);
26
22
  }
27
23
  }
28
- exports.validateMigrationHashes = validateMigrationHashes;
@@ -1,7 +1,4 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.withConnection = void 0;
4
- function withConnection(log, f) {
1
+ export function withConnection(log, f) {
5
2
  return async (client) => {
6
3
  try {
7
4
  try {
@@ -10,14 +7,18 @@ function withConnection(log, f) {
10
7
  log("... connected to database");
11
8
  }
12
9
  catch (e) {
13
- log(`Error connecting to database: ${e.message}`);
10
+ if (e instanceof Error) {
11
+ log(`Error connecting to database: ${e.message}`);
12
+ }
14
13
  throw e;
15
14
  }
16
15
  const result = await f(client);
17
16
  return result;
18
17
  }
19
18
  catch (e) {
20
- log(`Error using connection: ${e.message}`);
19
+ if (e instanceof Error) {
20
+ log(`Error using connection: ${e.message}`);
21
+ }
21
22
  throw e;
22
23
  }
23
24
  finally {
@@ -28,9 +29,10 @@ function withConnection(log, f) {
28
29
  log("... connection closed");
29
30
  }
30
31
  catch (e) {
31
- log(`Error closing the connection: ${e.message}`);
32
+ if (e instanceof Error) {
33
+ log(`Error closing the connection: ${e.message}`);
34
+ }
32
35
  }
33
36
  }
34
37
  };
35
38
  }
36
- exports.withConnection = withConnection;
@@ -1,7 +1,4 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.withAdvisoryLock = void 0;
4
- function withAdvisoryLock(log, f) {
1
+ export function withAdvisoryLock(log, f) {
5
2
  return async (client) => {
6
3
  try {
7
4
  try {
@@ -19,14 +16,18 @@ function withAdvisoryLock(log, f) {
19
16
  log("... acquired advisory lock");
20
17
  }
21
18
  catch (e) {
22
- log(`Error acquiring advisory lock: ${e.message}`);
19
+ if (e instanceof Error) {
20
+ log(`Error acquiring advisory lock: ${e.message}`);
21
+ }
23
22
  throw e;
24
23
  }
25
24
  const result = await f(client);
26
25
  return result;
27
26
  }
28
27
  catch (e) {
29
- log(`Error while using lock: ${e.message}`);
28
+ if (e instanceof Error) {
29
+ log(`Error while using lock: ${e.message}`);
30
+ }
30
31
  throw e;
31
32
  }
32
33
  finally {
@@ -36,9 +37,11 @@ function withAdvisoryLock(log, f) {
36
37
  log("... released advisory lock");
37
38
  }
38
39
  catch (e) {
39
- log(`Error releasing advisory lock: ${e.message}`);
40
+ if (e instanceof Error) {
41
+ log(`Error releasing advisory lock: ${e.message}`);
42
+ }
43
+ // Don't re-throw - avoid masking the original error from the try block
40
44
  }
41
45
  }
42
46
  };
43
47
  }
44
- exports.withAdvisoryLock = withAdvisoryLock;
package/.editorconfig DELETED
@@ -1,9 +0,0 @@
1
- root = true
2
-
3
- [*]
4
- end_of_line = lf
5
- charset = utf-8
6
- trim_trailing_whitespace = true
7
- insert_final_newline = true
8
- indent_style = space
9
- indent_size = 2
@@ -1,22 +0,0 @@
1
- ---
2
- validators:
3
- lineLength:
4
- enabled: true
5
- maxLineLength: 72
6
- subjectMaxLength: 50
7
- ticketCode:
8
- enabled: false
9
- emptyLines:
10
- enabled: true
11
- emptyLines: 1
12
- tags:
13
- enabled: false
14
- subjectTense:
15
- enabled: true
16
- subjectPeriod:
17
- enabled: false
18
- capitalizedSubject:
19
- # to allow npm version commits e.g. 1.0.0
20
- enabled: false
21
- wip:
22
- enabled: true
package/.gitattributes DELETED
@@ -1 +0,0 @@
1
- package-lock.json -diff