postgres-schema-migrations 6.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.editorconfig +9 -0
- package/.fitcommitjsrc.yml +22 -0
- package/.gitattributes +1 -0
- package/.github/workflows/node.js.yml +31 -0
- package/.prettierignore +1 -0
- package/.prettierrc +5 -0
- package/CHANGELOG.md +30 -0
- package/LICENSE +21 -0
- package/README.md +336 -0
- package/ava.config.cjs +4 -0
- package/ava.config.integration.cjs +6 -0
- package/ava.config.unit.cjs +6 -0
- package/dist/bin/validate.d.ts +2 -0
- package/dist/bin/validate.js +14 -0
- package/dist/create.d.ts +6 -0
- package/dist/create.js +64 -0
- package/dist/file-name-parser.d.ts +7 -0
- package/dist/file-name-parser.js +27 -0
- package/dist/files-loader.d.ts +10 -0
- package/dist/files-loader.js +39 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.js +11 -0
- package/dist/load-sql-from-js.d.ts +1 -0
- package/dist/load-sql-from-js.js +18 -0
- package/dist/migrate.d.ts +13 -0
- package/dist/migrate.js +158 -0
- package/dist/migration-file.d.ts +8 -0
- package/dist/migration-file.js +46 -0
- package/dist/migrations/0_create-migrations-table.sql +6 -0
- package/dist/run-migration.d.ts +2 -0
- package/dist/run-migration.js +39 -0
- package/dist/types.d.ts +56 -0
- package/dist/types.js +6 -0
- package/dist/validation.d.ts +5 -0
- package/dist/validation.js +28 -0
- package/dist/with-connection.d.ts +3 -0
- package/dist/with-connection.js +36 -0
- package/dist/with-lock.d.ts +2 -0
- package/dist/with-lock.js +44 -0
- package/package.json +71 -0
- package/tsconfig-base.json +17 -0
- package/tsconfig-build.json +5 -0
- package/tsconfig.json +4 -0
- package/tslint.json +42 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.MigrationError = exports.loadMigrationFiles = exports.migrate = exports.createDb = void 0;
|
|
4
|
+
var create_1 = require("./create");
|
|
5
|
+
Object.defineProperty(exports, "createDb", { enumerable: true, get: function () { return create_1.createDb; } });
|
|
6
|
+
var migrate_1 = require("./migrate");
|
|
7
|
+
Object.defineProperty(exports, "migrate", { enumerable: true, get: function () { return migrate_1.migrate; } });
|
|
8
|
+
var files_loader_1 = require("./files-loader");
|
|
9
|
+
Object.defineProperty(exports, "loadMigrationFiles", { enumerable: true, get: function () { return files_loader_1.loadMigrationFiles; } });
|
|
10
|
+
var types_1 = require("./types");
|
|
11
|
+
Object.defineProperty(exports, "MigrationError", { enumerable: true, get: function () { return types_1.MigrationError; } });
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const loadSqlFromJs: (filePath: string) => string;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.loadSqlFromJs = void 0;
|
|
4
|
+
const path = require("path");
|
|
5
|
+
const loadSqlFromJs = (filePath) => {
|
|
6
|
+
const migrationModule = require(filePath);
|
|
7
|
+
if (!migrationModule.generateSql) {
|
|
8
|
+
throw new Error(`Invalid javascript migration file: '${path.basename(filePath)}'.
|
|
9
|
+
It must to export a 'generateSql' function.`);
|
|
10
|
+
}
|
|
11
|
+
const generatedValue = migrationModule.generateSql();
|
|
12
|
+
if (typeof generatedValue !== "string") {
|
|
13
|
+
throw new Error(`Invalid javascript migration file: '${path.basename(filePath)}'.
|
|
14
|
+
'generateSql' function must return a string literal.`);
|
|
15
|
+
}
|
|
16
|
+
return generatedValue;
|
|
17
|
+
};
|
|
18
|
+
exports.loadSqlFromJs = loadSqlFromJs;
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { Config, MigrateDBConfig, Migration } from "./types";
|
|
2
|
+
/**
|
|
3
|
+
* Run the migrations.
|
|
4
|
+
*
|
|
5
|
+
* If `dbConfig.ensureDatabaseExists` is true then `dbConfig.database` will be created if it
|
|
6
|
+
* does not exist.
|
|
7
|
+
*
|
|
8
|
+
* @param dbConfig Details about how to connect to the database
|
|
9
|
+
* @param migrationsDirectory Directory containing the SQL migration files
|
|
10
|
+
* @param config Extra configuration
|
|
11
|
+
* @returns Details about the migrations which were run
|
|
12
|
+
*/
|
|
13
|
+
export declare function migrate(dbConfig: MigrateDBConfig, migrationsDirectory: string, config?: Config): Promise<Array<Migration>>;
|
package/dist/migrate.js
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.migrate = void 0;
|
|
4
|
+
const pg = require("pg");
|
|
5
|
+
const sql_template_strings_1 = require("sql-template-strings");
|
|
6
|
+
const create_1 = require("./create");
|
|
7
|
+
const files_loader_1 = require("./files-loader");
|
|
8
|
+
const run_migration_1 = require("./run-migration");
|
|
9
|
+
const validation_1 = require("./validation");
|
|
10
|
+
const with_connection_1 = require("./with-connection");
|
|
11
|
+
const with_lock_1 = require("./with-lock");
|
|
12
|
+
const splitTableName = (tableName) => {
|
|
13
|
+
const parts = tableName.split(".");
|
|
14
|
+
if (parts.length > 1) {
|
|
15
|
+
return [parts[0], parts[1]];
|
|
16
|
+
}
|
|
17
|
+
else {
|
|
18
|
+
return ["public", tableName];
|
|
19
|
+
}
|
|
20
|
+
};
|
|
21
|
+
/**
|
|
22
|
+
* Run the migrations.
|
|
23
|
+
*
|
|
24
|
+
* If `dbConfig.ensureDatabaseExists` is true then `dbConfig.database` will be created if it
|
|
25
|
+
* does not exist.
|
|
26
|
+
*
|
|
27
|
+
* @param dbConfig Details about how to connect to the database
|
|
28
|
+
* @param migrationsDirectory Directory containing the SQL migration files
|
|
29
|
+
* @param config Extra configuration
|
|
30
|
+
* @returns Details about the migrations which were run
|
|
31
|
+
*/
|
|
32
|
+
async function migrate(dbConfig, migrationsDirectory, config = {}) {
|
|
33
|
+
const log = config.logger != null
|
|
34
|
+
? config.logger
|
|
35
|
+
: () => {
|
|
36
|
+
//
|
|
37
|
+
};
|
|
38
|
+
if (dbConfig == null) {
|
|
39
|
+
throw new Error("No config object");
|
|
40
|
+
}
|
|
41
|
+
if (typeof migrationsDirectory !== "string") {
|
|
42
|
+
throw new Error("Must pass migrations directory as a string");
|
|
43
|
+
}
|
|
44
|
+
const intendedMigrations = await files_loader_1.loadMigrationFiles(migrationsDirectory, log, config.schema);
|
|
45
|
+
if ("client" in dbConfig) {
|
|
46
|
+
// we have been given a client to use, it should already be connected
|
|
47
|
+
return with_lock_1.withAdvisoryLock(log, runMigrations(intendedMigrations, log, config.schema))(dbConfig.client);
|
|
48
|
+
}
|
|
49
|
+
if (typeof dbConfig.database !== "string" ||
|
|
50
|
+
typeof dbConfig.user !== "string" ||
|
|
51
|
+
typeof dbConfig.password !== "string" ||
|
|
52
|
+
typeof dbConfig.host !== "string" ||
|
|
53
|
+
typeof dbConfig.port !== "number") {
|
|
54
|
+
throw new Error("Database config problem");
|
|
55
|
+
}
|
|
56
|
+
if (dbConfig.ensureDatabaseExists === true) {
|
|
57
|
+
// Check whether database exists
|
|
58
|
+
const { user, password, host, port } = dbConfig;
|
|
59
|
+
const client = new pg.Client({
|
|
60
|
+
database: dbConfig.defaultDatabase != null
|
|
61
|
+
? dbConfig.defaultDatabase
|
|
62
|
+
: "postgres",
|
|
63
|
+
user,
|
|
64
|
+
password,
|
|
65
|
+
host,
|
|
66
|
+
port,
|
|
67
|
+
});
|
|
68
|
+
const runWith = with_connection_1.withConnection(log, async (connectedClient) => {
|
|
69
|
+
const result = await connectedClient.query({
|
|
70
|
+
text: "SELECT 1 FROM pg_database WHERE datname=$1",
|
|
71
|
+
values: [dbConfig.database],
|
|
72
|
+
});
|
|
73
|
+
if (result.rowCount !== 1) {
|
|
74
|
+
await create_1.runCreateQuery(dbConfig.database, log)(connectedClient);
|
|
75
|
+
}
|
|
76
|
+
});
|
|
77
|
+
await runWith(client);
|
|
78
|
+
}
|
|
79
|
+
{
|
|
80
|
+
const client = new pg.Client(dbConfig);
|
|
81
|
+
client.on("error", (err) => {
|
|
82
|
+
log(`pg client emitted an error: ${err.message}`);
|
|
83
|
+
});
|
|
84
|
+
const runWith = with_connection_1.withConnection(log, with_lock_1.withAdvisoryLock(log, runMigrations(intendedMigrations, log, config.schema)));
|
|
85
|
+
return runWith(client);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
exports.migrate = migrate;
|
|
89
|
+
function runMigrations(intendedMigrations, log, schemaName = "public") {
|
|
90
|
+
return async (client) => {
|
|
91
|
+
try {
|
|
92
|
+
const migrationTableName = `${schemaName}.migrations`;
|
|
93
|
+
log("Starting migrations");
|
|
94
|
+
const appliedMigrations = await fetchAppliedMigrationFromDB(migrationTableName, client, log);
|
|
95
|
+
validation_1.validateMigrationHashes(intendedMigrations, appliedMigrations);
|
|
96
|
+
const migrationsToRun = filterMigrations(intendedMigrations, appliedMigrations);
|
|
97
|
+
const completedMigrations = [];
|
|
98
|
+
for (const migration of migrationsToRun) {
|
|
99
|
+
log(`Starting migration: ${migration.id} ${migration.name}`);
|
|
100
|
+
const result = await run_migration_1.runMigration(migrationTableName, client, log)(migration);
|
|
101
|
+
log(`Finished migration: ${migration.id} ${migration.name}`);
|
|
102
|
+
completedMigrations.push(result);
|
|
103
|
+
}
|
|
104
|
+
logResult(completedMigrations, log);
|
|
105
|
+
log("Finished migrations");
|
|
106
|
+
return completedMigrations;
|
|
107
|
+
}
|
|
108
|
+
catch (e) {
|
|
109
|
+
const error = new Error(`Migration failed. Reason: ${e.message}`);
|
|
110
|
+
error.cause = e;
|
|
111
|
+
throw error;
|
|
112
|
+
}
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
/** Queries the database for migrations table and retrieve it rows if exists */
|
|
116
|
+
async function fetchAppliedMigrationFromDB(migrationTableName, client, log) {
|
|
117
|
+
let appliedMigrations = [];
|
|
118
|
+
if (await doesTableExist(client, migrationTableName)) {
|
|
119
|
+
log(`Migrations table with name '${migrationTableName}' exists, filtering not applied migrations.`);
|
|
120
|
+
const { rows } = await client.query(`SELECT * FROM ${migrationTableName} ORDER BY id`);
|
|
121
|
+
appliedMigrations = rows;
|
|
122
|
+
}
|
|
123
|
+
else {
|
|
124
|
+
log(`Migrations table with name '${migrationTableName}' hasn't been created,
|
|
125
|
+
so the database is new and we need to run all migrations.`);
|
|
126
|
+
}
|
|
127
|
+
return appliedMigrations;
|
|
128
|
+
}
|
|
129
|
+
/** Work out which migrations to apply */
|
|
130
|
+
function filterMigrations(migrations, appliedMigrations) {
|
|
131
|
+
const notAppliedMigration = (migration) => !appliedMigrations[migration.id];
|
|
132
|
+
return migrations.filter(notAppliedMigration);
|
|
133
|
+
}
|
|
134
|
+
/** Logs the result */
|
|
135
|
+
function logResult(completedMigrations, log) {
|
|
136
|
+
if (completedMigrations.length === 0) {
|
|
137
|
+
log("No migrations applied");
|
|
138
|
+
}
|
|
139
|
+
else {
|
|
140
|
+
log(`Successfully applied migrations: ${completedMigrations.map(({ name }) => name)}`);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
/** Check whether table exists in postgres - http://stackoverflow.com/a/24089729 */
|
|
144
|
+
async function doesTableExist(client, tableName) {
|
|
145
|
+
const [schema, table] = splitTableName(tableName);
|
|
146
|
+
const result = await client.query(sql_template_strings_1.default `
|
|
147
|
+
SELECT EXISTS (
|
|
148
|
+
SELECT 1
|
|
149
|
+
FROM pg_catalog.pg_class c
|
|
150
|
+
JOIN pg_catalog.pg_namespace n
|
|
151
|
+
ON n.oid = c.relnamespace
|
|
152
|
+
WHERE c.relname = ${table}
|
|
153
|
+
AND c.relkind = 'r'
|
|
154
|
+
AND n.nspname = ${schema}
|
|
155
|
+
);
|
|
156
|
+
`);
|
|
157
|
+
return result.rows.length > 0 && result.rows[0].exists;
|
|
158
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.loadMigrationFile = void 0;
|
|
4
|
+
const util_1 = require("util");
|
|
5
|
+
const fs = require("fs");
|
|
6
|
+
const path = require("path");
|
|
7
|
+
const crypto = require("crypto");
|
|
8
|
+
const load_sql_from_js_1 = require("./load-sql-from-js");
|
|
9
|
+
const file_name_parser_1 = require("./file-name-parser");
|
|
10
|
+
const readFile = util_1.promisify(fs.readFile);
|
|
11
|
+
const getFileName = (filePath) => path.basename(filePath);
|
|
12
|
+
const getFileContents = async (filePath) => readFile(filePath, "utf8");
|
|
13
|
+
const hashString = (s) => crypto.createHash("sha1").update(s, "utf8").digest("hex");
|
|
14
|
+
const getSqlStringLiteral = (filePath, contents, type) => {
|
|
15
|
+
switch (type) {
|
|
16
|
+
case "sql":
|
|
17
|
+
return contents;
|
|
18
|
+
case "js":
|
|
19
|
+
return load_sql_from_js_1.loadSqlFromJs(filePath);
|
|
20
|
+
default: {
|
|
21
|
+
const exhaustiveCheck = type;
|
|
22
|
+
return exhaustiveCheck;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
};
|
|
26
|
+
const loadMigrationFile = async (filePath) => {
|
|
27
|
+
const fileName = getFileName(filePath);
|
|
28
|
+
try {
|
|
29
|
+
const { id, name, type } = file_name_parser_1.parseFileName(fileName);
|
|
30
|
+
const contents = await getFileContents(filePath);
|
|
31
|
+
const sql = getSqlStringLiteral(filePath, contents, type);
|
|
32
|
+
const hash = hashString(fileName + sql);
|
|
33
|
+
return {
|
|
34
|
+
id,
|
|
35
|
+
name,
|
|
36
|
+
contents,
|
|
37
|
+
fileName,
|
|
38
|
+
hash,
|
|
39
|
+
sql,
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
catch (err) {
|
|
43
|
+
throw new Error(`${err.message} - Offending file: '${fileName}'.`);
|
|
44
|
+
}
|
|
45
|
+
};
|
|
46
|
+
exports.loadMigrationFile = loadMigrationFile;
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
CREATE TABLE IF NOT EXISTS migrations (
|
|
2
|
+
id integer PRIMARY KEY,
|
|
3
|
+
name varchar(100) UNIQUE NOT NULL,
|
|
4
|
+
hash varchar(40) NOT NULL, -- sha1 hex encoded hash of the file name and contents, to ensure it hasn't been altered since applying the migration
|
|
5
|
+
executed_at timestamp DEFAULT current_timestamp
|
|
6
|
+
);
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.runMigration = void 0;
|
|
4
|
+
const sql_template_strings_1 = require("sql-template-strings");
|
|
5
|
+
const noop = () => {
|
|
6
|
+
//
|
|
7
|
+
};
|
|
8
|
+
const insertMigration = async (migrationTableName, client, migration, log) => {
|
|
9
|
+
log(`Saving migration to '${migrationTableName}': ${migration.id} | ${migration.name} | ${migration.hash}`);
|
|
10
|
+
const sql = sql_template_strings_1.default `INSERT INTO `
|
|
11
|
+
.append(migrationTableName)
|
|
12
|
+
.append(sql_template_strings_1.default ` ("id", "name", "hash") VALUES (${migration.id},${migration.name},${migration.hash})`);
|
|
13
|
+
return client.query(sql);
|
|
14
|
+
};
|
|
15
|
+
const runMigration = (migrationTableName, client, log = noop) => async (migration) => {
|
|
16
|
+
const inTransaction = migration.sql.includes("-- postgres-migrations disable-transaction") ===
|
|
17
|
+
false;
|
|
18
|
+
log(`Running migration in transaction: ${inTransaction}`);
|
|
19
|
+
const begin = inTransaction ? () => client.query("START TRANSACTION") : noop;
|
|
20
|
+
const end = inTransaction ? () => client.query("COMMIT") : noop;
|
|
21
|
+
const cleanup = inTransaction ? () => client.query("ROLLBACK") : noop;
|
|
22
|
+
try {
|
|
23
|
+
await begin();
|
|
24
|
+
await client.query(migration.sql);
|
|
25
|
+
await insertMigration(migrationTableName, client, migration, log);
|
|
26
|
+
await end();
|
|
27
|
+
return migration;
|
|
28
|
+
}
|
|
29
|
+
catch (err) {
|
|
30
|
+
try {
|
|
31
|
+
await cleanup();
|
|
32
|
+
}
|
|
33
|
+
catch (_a) {
|
|
34
|
+
//
|
|
35
|
+
}
|
|
36
|
+
throw new Error(`An error occurred running '${migration.name}'. Rolled back this migration. No further migrations were run. Reason: ${err.message}`);
|
|
37
|
+
}
|
|
38
|
+
};
|
|
39
|
+
exports.runMigration = runMigration;
|
package/dist/types.d.ts
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import * as pg from "pg";
|
|
2
|
+
export interface Migration {
|
|
3
|
+
readonly id: number;
|
|
4
|
+
readonly name: string;
|
|
5
|
+
readonly contents: string;
|
|
6
|
+
readonly fileName: string;
|
|
7
|
+
readonly hash: string;
|
|
8
|
+
readonly sql: string;
|
|
9
|
+
}
|
|
10
|
+
export interface ConnectionParams {
|
|
11
|
+
readonly user: string;
|
|
12
|
+
readonly password: string;
|
|
13
|
+
readonly host: string;
|
|
14
|
+
readonly port: number;
|
|
15
|
+
}
|
|
16
|
+
export interface ClientParams {
|
|
17
|
+
/** A connected Client, or a Pool Client. The caller is responsible for connecting and cleaning up. */
|
|
18
|
+
readonly client: pg.Client | pg.PoolClient | pg.Pool;
|
|
19
|
+
}
|
|
20
|
+
export declare type EnsureDatabase = {
|
|
21
|
+
/**
|
|
22
|
+
* Might default to `true` in future versions
|
|
23
|
+
* @default false
|
|
24
|
+
*/
|
|
25
|
+
readonly ensureDatabaseExists: true;
|
|
26
|
+
/**
|
|
27
|
+
* The database to connect to when creating a database (if necessary).
|
|
28
|
+
* @default postgres
|
|
29
|
+
*/
|
|
30
|
+
readonly defaultDatabase?: string;
|
|
31
|
+
} | {
|
|
32
|
+
readonly ensureDatabaseExists?: false;
|
|
33
|
+
};
|
|
34
|
+
/**
|
|
35
|
+
* @deprecated Use `migrate` instead with `ensureDatabaseExists: true`.
|
|
36
|
+
*/
|
|
37
|
+
export declare type CreateDBConfig = (ConnectionParams & {
|
|
38
|
+
/** The database to connect to when creating the new database. */
|
|
39
|
+
readonly defaultDatabase?: string;
|
|
40
|
+
}) | ClientParams;
|
|
41
|
+
export declare type MigrateDBConfig = (ConnectionParams & {
|
|
42
|
+
readonly database: string;
|
|
43
|
+
} & EnsureDatabase) | ClientParams;
|
|
44
|
+
export declare type Logger = (msg: string) => void;
|
|
45
|
+
export declare type Config = Partial<FullConfig>;
|
|
46
|
+
export interface FullConfig {
|
|
47
|
+
readonly logger: Logger;
|
|
48
|
+
readonly schema: string;
|
|
49
|
+
}
|
|
50
|
+
export declare class MigrationError extends Error {
|
|
51
|
+
cause?: string;
|
|
52
|
+
}
|
|
53
|
+
export declare type FileType = "sql" | "js";
|
|
54
|
+
export interface BasicPgClient {
|
|
55
|
+
query(queryTextOrConfig: string | pg.QueryConfig): Promise<pg.QueryResult>;
|
|
56
|
+
}
|
package/dist/types.js
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import { Migration } from "./types";
|
|
2
|
+
/** Assert migration IDs are consecutive integers */
|
|
3
|
+
export declare function validateMigrationOrdering(migrations: Array<Migration>): void;
|
|
4
|
+
/** Assert hashes match */
|
|
5
|
+
export declare function validateMigrationHashes(migrations: Array<Migration>, appliedMigrations: Record<number, Migration | undefined>): void;
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.validateMigrationHashes = exports.validateMigrationOrdering = void 0;
|
|
4
|
+
const indexNotMatch = (migration, index) => migration.id !== index;
|
|
5
|
+
/** Assert migration IDs are consecutive integers */
|
|
6
|
+
function validateMigrationOrdering(migrations) {
|
|
7
|
+
const notMatchingId = migrations.find(indexNotMatch);
|
|
8
|
+
if (notMatchingId) {
|
|
9
|
+
throw new Error(`Found a non-consecutive migration ID on file: '${notMatchingId.fileName}'`);
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
exports.validateMigrationOrdering = validateMigrationOrdering;
|
|
13
|
+
/** Assert hashes match */
|
|
14
|
+
function validateMigrationHashes(migrations, appliedMigrations) {
|
|
15
|
+
const invalidHash = (migration) => {
|
|
16
|
+
const appliedMigration = appliedMigrations[migration.id];
|
|
17
|
+
return appliedMigration != null && appliedMigration.hash !== migration.hash;
|
|
18
|
+
};
|
|
19
|
+
// Assert migration hashes are still same
|
|
20
|
+
const invalidHashes = migrations.filter(invalidHash);
|
|
21
|
+
if (invalidHashes.length > 0) {
|
|
22
|
+
// Someone has altered one or more migrations which has already run - gasp!
|
|
23
|
+
const invalidFiles = invalidHashes.map(({ fileName }) => fileName);
|
|
24
|
+
throw new Error(`Hashes don't match for migrations '${invalidFiles}'.
|
|
25
|
+
This means that the scripts have changed since it was applied.`);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
exports.validateMigrationHashes = validateMigrationHashes;
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.withConnection = void 0;
|
|
4
|
+
function withConnection(log, f) {
|
|
5
|
+
return async (client) => {
|
|
6
|
+
try {
|
|
7
|
+
try {
|
|
8
|
+
log("Connecting to database...");
|
|
9
|
+
await client.connect();
|
|
10
|
+
log("... connected to database");
|
|
11
|
+
}
|
|
12
|
+
catch (e) {
|
|
13
|
+
log(`Error connecting to database: ${e.message}`);
|
|
14
|
+
throw e;
|
|
15
|
+
}
|
|
16
|
+
const result = await f(client);
|
|
17
|
+
return result;
|
|
18
|
+
}
|
|
19
|
+
catch (e) {
|
|
20
|
+
log(`Error using connection: ${e.message}`);
|
|
21
|
+
throw e;
|
|
22
|
+
}
|
|
23
|
+
finally {
|
|
24
|
+
// always try to close the connection
|
|
25
|
+
try {
|
|
26
|
+
log("Closing connection...");
|
|
27
|
+
await client.end();
|
|
28
|
+
log("... connection closed");
|
|
29
|
+
}
|
|
30
|
+
catch (e) {
|
|
31
|
+
log(`Error closing the connection: ${e.message}`);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
exports.withConnection = withConnection;
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.withAdvisoryLock = void 0;
|
|
4
|
+
function withAdvisoryLock(log, f) {
|
|
5
|
+
return async (client) => {
|
|
6
|
+
try {
|
|
7
|
+
try {
|
|
8
|
+
log("Acquiring advisory lock...");
|
|
9
|
+
let acquired = false;
|
|
10
|
+
while (!acquired) {
|
|
11
|
+
const lockResult = await client.query("SELECT pg_try_advisory_lock(-8525285245963000605);");
|
|
12
|
+
if (lockResult.rows[0].pg_try_advisory_lock === true) {
|
|
13
|
+
acquired = true;
|
|
14
|
+
}
|
|
15
|
+
else {
|
|
16
|
+
await new Promise((res) => setTimeout(res, 1000));
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
log("... acquired advisory lock");
|
|
20
|
+
}
|
|
21
|
+
catch (e) {
|
|
22
|
+
log(`Error acquiring advisory lock: ${e.message}`);
|
|
23
|
+
throw e;
|
|
24
|
+
}
|
|
25
|
+
const result = await f(client);
|
|
26
|
+
return result;
|
|
27
|
+
}
|
|
28
|
+
catch (e) {
|
|
29
|
+
log(`Error while using lock: ${e.message}`);
|
|
30
|
+
throw e;
|
|
31
|
+
}
|
|
32
|
+
finally {
|
|
33
|
+
try {
|
|
34
|
+
log("Releasing advisory lock...");
|
|
35
|
+
await client.query("SELECT pg_advisory_unlock(-8525285245963000605);");
|
|
36
|
+
log("... released advisory lock");
|
|
37
|
+
}
|
|
38
|
+
catch (e) {
|
|
39
|
+
log(`Error releasing advisory lock: ${e.message}`);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
exports.withAdvisoryLock = withAdvisoryLock;
|
package/package.json
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "postgres-schema-migrations",
|
|
3
|
+
"version": "6.0.0",
|
|
4
|
+
"description": "Stack Overflow style database migrations for PostgreSQL",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"types": "dist/index.d.ts",
|
|
7
|
+
"bin": {
|
|
8
|
+
"pg-validate-migrations": "./dist/bin/validate.js"
|
|
9
|
+
},
|
|
10
|
+
"authors": ["Thom Wright", "Zak Patterson"],
|
|
11
|
+
"keywords": [
|
|
12
|
+
"postgres",
|
|
13
|
+
"postgresql",
|
|
14
|
+
"migration",
|
|
15
|
+
"migrations",
|
|
16
|
+
"sql",
|
|
17
|
+
"database",
|
|
18
|
+
"db"
|
|
19
|
+
],
|
|
20
|
+
"homepage": "https://github.com/zakpatterson/postgres-schema-migrations#readme",
|
|
21
|
+
"license": "MIT",
|
|
22
|
+
"repository": {
|
|
23
|
+
"type": "git",
|
|
24
|
+
"url": "git@github.com:zakpatterson/postgres-schema-migrations.git"
|
|
25
|
+
},
|
|
26
|
+
"bugs": {
|
|
27
|
+
"url": "https://github.com/zakpatterson/postgres-schema-migrations/issues"
|
|
28
|
+
},
|
|
29
|
+
"engines": {
|
|
30
|
+
"node": ">10.17.0"
|
|
31
|
+
},
|
|
32
|
+
"scripts": {
|
|
33
|
+
"checkPushed": "[ \"$(git rev-list --count @{upstream}..HEAD)\" -eq 0 ] || (echo You have unpushed commits && exit 1)",
|
|
34
|
+
"prepublishOnly": "npm run checkPushed && npm test && npm run build",
|
|
35
|
+
"check-formatting": "./node_modules/.bin/prettier '**/*.ts' --list-different",
|
|
36
|
+
"fix-formatting": "./node_modules/.bin/prettier '**/*.ts' --write",
|
|
37
|
+
"lint": "npm run tslint && npm run check-formatting",
|
|
38
|
+
"tslint": "tslint 'src/**/*.ts' --type-check --project tsconfig.json --format verbose",
|
|
39
|
+
"test-integration": "ava --config ava.config.integration.cjs",
|
|
40
|
+
"test-unit": "ava --config ava.config.unit.cjs",
|
|
41
|
+
"test": "npm run test-unit && npm run lint && npm run test-integration",
|
|
42
|
+
"preversion": "npm test",
|
|
43
|
+
"build": "rm -rf ./dist && rsync -a --exclude='*.ts' --exclude='__tests__' --exclude='__unit__' --prune-empty-dirs src/ dist/ && tsc --project tsconfig-build.json"
|
|
44
|
+
},
|
|
45
|
+
"husky": {
|
|
46
|
+
"hooks": {
|
|
47
|
+
"commit-msg": "node ./node_modules/fit-commit-js/lib/hook.js .git/COMMIT_EDITMSG",
|
|
48
|
+
"pre-commit": "npm run lint",
|
|
49
|
+
"pre-push": "npm test"
|
|
50
|
+
}
|
|
51
|
+
},
|
|
52
|
+
"dependencies": {
|
|
53
|
+
"pg": "^8.6.0",
|
|
54
|
+
"sql-template-strings": "^2.2.2"
|
|
55
|
+
},
|
|
56
|
+
"devDependencies": {
|
|
57
|
+
"@types/node": "^10.17.60",
|
|
58
|
+
"@types/pg": "^8.6.0",
|
|
59
|
+
"@types/sinon": "^9.0.11",
|
|
60
|
+
"ava": "^3.15.0",
|
|
61
|
+
"fit-commit-js": "^0.3.2",
|
|
62
|
+
"husky": "^3.1.0",
|
|
63
|
+
"prettier": "^2.3.1",
|
|
64
|
+
"sinon": "^9.2.4",
|
|
65
|
+
"ts-node": "^10.0.0",
|
|
66
|
+
"tslint": "^6.1.3",
|
|
67
|
+
"tslint-config-prettier": "^1.18.0",
|
|
68
|
+
"typescript": "^4.3.4",
|
|
69
|
+
"typescript-tslint-plugin": "^1.0.1"
|
|
70
|
+
}
|
|
71
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"plugins": [
|
|
4
|
+
{"name": "typescript-tslint-plugin"}
|
|
5
|
+
],
|
|
6
|
+
"typeRoots": ["./typings", "./node_modules/@types"],
|
|
7
|
+
"outDir": "./dist",
|
|
8
|
+
"target": "es2017",
|
|
9
|
+
"declaration": true,
|
|
10
|
+
"module": "commonjs",
|
|
11
|
+
"lib": ["es2017"],
|
|
12
|
+
"noUnusedLocals": true,
|
|
13
|
+
"noErrorTruncation": true,
|
|
14
|
+
"pretty": true,
|
|
15
|
+
"strict": true
|
|
16
|
+
}
|
|
17
|
+
}
|
package/tsconfig.json
ADDED
package/tslint.json
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
{
|
|
2
|
+
"defaultSeverity": "error",
|
|
3
|
+
"extends": ["tslint:recommended", "tslint-config-prettier"],
|
|
4
|
+
"rules": {
|
|
5
|
+
"interface-name": false,
|
|
6
|
+
"object-literal-sort-keys": false,
|
|
7
|
+
"ordered-imports": false,
|
|
8
|
+
"no-unused-expression": false,
|
|
9
|
+
"only-arrow-functions": false,
|
|
10
|
+
"array-type": [true, "generic"],
|
|
11
|
+
"no-any": {
|
|
12
|
+
"severity": "warning"
|
|
13
|
+
},
|
|
14
|
+
"no-var-requires": {
|
|
15
|
+
"severity": "warning"
|
|
16
|
+
},
|
|
17
|
+
"no-console": {
|
|
18
|
+
"severity": "warning"
|
|
19
|
+
},
|
|
20
|
+
"no-switch-case-fall-through": true,
|
|
21
|
+
"callable-types": {
|
|
22
|
+
"severity": "warning"
|
|
23
|
+
},
|
|
24
|
+
"strict-boolean-expressions": [
|
|
25
|
+
true,
|
|
26
|
+
"allow-null-union",
|
|
27
|
+
"allow-undefined-union"
|
|
28
|
+
],
|
|
29
|
+
"no-unnecessary-initializer": false,
|
|
30
|
+
"no-implicit-dependencies": [true, "dev"],
|
|
31
|
+
"no-return-await": true,
|
|
32
|
+
"no-floating-promises": true,
|
|
33
|
+
"no-string-throw": true,
|
|
34
|
+
"no-this-assignment": true,
|
|
35
|
+
"no-unnecessary-class": true,
|
|
36
|
+
"no-unsafe-finally": true,
|
|
37
|
+
"radix": true,
|
|
38
|
+
"switch-default": true,
|
|
39
|
+
"restrict-plus-operands": true,
|
|
40
|
+
"no-invalid-template-strings": true
|
|
41
|
+
}
|
|
42
|
+
}
|