stepwise-migrations 1.0.24 → 1.0.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +33 -2
- package/dist/src/commands.js +80 -38
- package/dist/src/db/index.js +61 -0
- package/dist/src/db/pg.js +190 -0
- package/dist/src/index.js +14 -13
- package/dist/src/state.js +3 -4
- package/dist/src/utils.js +29 -20
- package/dist/test/index.test.js +6 -0
- package/package.json +3 -1
- package/src/commands.ts +95 -69
- package/src/db/index.ts +20 -0
- package/src/db/pg.ts +189 -0
- package/src/index.ts +16 -14
- package/src/state.ts +4 -6
- package/src/utils.ts +41 -24
- package/test/index.test.ts +11 -0
- package/dist/src/db.js +0 -167
- package/dist/src/validate.js +0 -1
- package/src/db.ts +0 -163
package/src/state.ts
CHANGED
@@ -1,7 +1,6 @@
|
|
1
1
|
import gitDiff from "git-diff";
|
2
2
|
import path from "path";
|
3
|
-
import {
|
4
|
-
import { dbEventHistory } from "./db";
|
3
|
+
import { DbClient } from "./db";
|
5
4
|
import { AppliedMigration, MigrationState } from "./types";
|
6
5
|
import { readMigrationFiles } from "./utils";
|
7
6
|
|
@@ -64,11 +63,10 @@ export const validateMigrationFiles = (state: MigrationState) => {
|
|
64
63
|
};
|
65
64
|
|
66
65
|
export const loadState = async (
|
67
|
-
client:
|
68
|
-
schema: string,
|
66
|
+
client: DbClient,
|
69
67
|
migrationPath: string
|
70
68
|
): Promise<MigrationState> => {
|
71
|
-
const events = await dbEventHistory(
|
69
|
+
const events = await client.dbEventHistory();
|
72
70
|
const {
|
73
71
|
appliedVersionedMigrations,
|
74
72
|
appliedRepeatableMigrations,
|
@@ -97,7 +95,7 @@ export const loadState = async (
|
|
97
95
|
);
|
98
96
|
|
99
97
|
return {
|
100
|
-
schema,
|
98
|
+
schema: client.schema,
|
101
99
|
current: {
|
102
100
|
appliedVersionedMigrations,
|
103
101
|
appliedRepeatableMigrations,
|
package/src/utils.ts
CHANGED
@@ -1,8 +1,7 @@
|
|
1
1
|
import fs from "fs/promises";
|
2
2
|
import gitDiff from "git-diff";
|
3
3
|
import path from "path";
|
4
|
-
import {
|
5
|
-
import { dbSchemaExists, dbTableExists } from "./db";
|
4
|
+
import { DbClient } from "./db";
|
6
5
|
import {
|
7
6
|
AppliedMigration,
|
8
7
|
MigrationFile,
|
@@ -28,48 +27,69 @@ Commands:
|
|
28
27
|
Drop all tables, schema and stepwise_migration_events table
|
29
28
|
get-applied-script
|
30
29
|
Get the script for the last applied migration
|
30
|
+
baseline
|
31
|
+
Without applying any migrations, set the migration table state to a specific version
|
31
32
|
|
32
33
|
Options:
|
33
34
|
--connection <connection> The connection string to use to connect to the database
|
34
|
-
--schema <schema> The schema to use for the migrations
|
35
|
+
--schema <schema> The schema to use for the migrations (default: public)
|
35
36
|
--path <path> The path to the migrations directory
|
36
37
|
--ssl true/false Whether to use SSL for the connection (default: false)
|
37
38
|
--napply Number of up migrations to apply (default: all)
|
38
39
|
--nundo Number of undo migrations to apply (default: 1)
|
39
|
-
--filename The filename to get the script for (default: last applied migration)
|
40
|
+
--filename (get-applied-script) The filename to get the script for (default: last applied migration)
|
41
|
+
--filename (baseline) The filename to baseline (default: last unapplied versioned migration)
|
40
42
|
|
41
43
|
Example:
|
42
|
-
npx stepwise-migrations migrate
|
43
|
-
--connection=postgresql://postgres:postgres@127.0.0.1:5432/mydatabase
|
44
|
-
--schema=myschema
|
44
|
+
npx stepwise-migrations migrate \
|
45
|
+
--connection=postgresql://postgres:postgres@127.0.0.1:5432/mydatabase \
|
46
|
+
--schema=myschema \
|
45
47
|
--path=./test/migrations-template/
|
46
48
|
`;
|
47
49
|
|
48
|
-
export
|
50
|
+
export type Args = {
|
51
|
+
schema: string;
|
52
|
+
command: string;
|
53
|
+
napply: number;
|
54
|
+
nundo: number;
|
55
|
+
filePath: string;
|
56
|
+
connection: string;
|
57
|
+
filename?: string;
|
58
|
+
ssl: string;
|
59
|
+
};
|
60
|
+
|
61
|
+
export const parseArgs = (argv: any): Args => {
|
49
62
|
const schema = argv.schema ?? "public";
|
50
63
|
const command = argv._[0];
|
51
64
|
const napply = argv.napply || Infinity;
|
52
65
|
const nundo = argv.nundo || 1;
|
53
66
|
const filePath = argv.path;
|
67
|
+
const connection = argv.connection;
|
68
|
+
const ssl = argv.ssl ?? "false";
|
69
|
+
const filename = argv.filename;
|
54
70
|
|
55
|
-
return {
|
71
|
+
return {
|
72
|
+
schema,
|
73
|
+
command,
|
74
|
+
napply,
|
75
|
+
nundo,
|
76
|
+
filePath,
|
77
|
+
connection,
|
78
|
+
ssl,
|
79
|
+
filename,
|
80
|
+
};
|
56
81
|
};
|
57
82
|
|
58
|
-
export const validateArgs = (
|
59
|
-
const required = ["connection", "
|
60
|
-
if (required.some((key) => !(key in
|
83
|
+
export const validateArgs = (args: Args) => {
|
84
|
+
const required = ["connection", "filePath", "command"];
|
85
|
+
if (required.some((key) => !(key in args))) {
|
61
86
|
console.error(
|
62
87
|
"Missing required arguments",
|
63
|
-
required.filter((key) => !(key in
|
88
|
+
required.filter((key) => !(key in args))
|
64
89
|
);
|
65
90
|
console.log(usage);
|
66
91
|
process.exit(1);
|
67
92
|
}
|
68
|
-
if (argv._.length !== 1) {
|
69
|
-
console.error(`Invalid number of arguments: ${argv._.length}`);
|
70
|
-
console.log(usage);
|
71
|
-
process.exit(1);
|
72
|
-
}
|
73
93
|
};
|
74
94
|
|
75
95
|
export const filenameToType = (filename: string): MigrationType => {
|
@@ -239,11 +259,8 @@ export const sliceFromFirstNull = <T>(array: (T | undefined)[]): T[] => {
|
|
239
259
|
: (array.slice(0, indexOfFirstNull) as T[]);
|
240
260
|
};
|
241
261
|
|
242
|
-
export const checkSchemaAndTable = async (
|
243
|
-
client
|
244
|
-
|
245
|
-
) => {
|
246
|
-
const schemaExists = await dbSchemaExists(client, schema);
|
247
|
-
const tableExists = await dbTableExists(client, schema);
|
262
|
+
export const checkSchemaAndTable = async (client: DbClient) => {
|
263
|
+
const schemaExists = await client.dbSchemaExists();
|
264
|
+
const tableExists = await client.dbTableExists();
|
248
265
|
return { schemaExists, tableExists };
|
249
266
|
};
|
package/test/index.test.ts
CHANGED
@@ -41,6 +41,17 @@ describe("valid migrations", async () => {
|
|
41
41
|
assertIncludesAll(await execute("npm exec stepwise-migrations"), ["Usage"]);
|
42
42
|
});
|
43
43
|
|
44
|
+
it("baseline", async () => {
|
45
|
+
assertIncludesAll(await executeCommand("baseline", paths.valid), [
|
46
|
+
"All done! (Shadow)-applied 3 migrations to baseline to v3_third.sql",
|
47
|
+
]);
|
48
|
+
assertIncludesExcludesAll(
|
49
|
+
await executeCommand("status"),
|
50
|
+
["v1_first.sql", "v2_second.sql", "v3_third.sql"],
|
51
|
+
["v0_get_number.repeatable.sql"]
|
52
|
+
);
|
53
|
+
});
|
54
|
+
|
44
55
|
it("migrate one versioned and undo, redo, undo", async () => {
|
45
56
|
assertIncludesAll(await executeCommand("migrate", paths.valid), [
|
46
57
|
"All done! Applied 4 migrations",
|
package/dist/src/db.js
DELETED
@@ -1,167 +0,0 @@
|
|
1
|
-
"use strict";
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
3
|
-
if (k2 === undefined) k2 = k;
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
7
|
-
}
|
8
|
-
Object.defineProperty(o, k2, desc);
|
9
|
-
}) : (function(o, m, k, k2) {
|
10
|
-
if (k2 === undefined) k2 = k;
|
11
|
-
o[k2] = m[k];
|
12
|
-
}));
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
15
|
-
}) : function(o, v) {
|
16
|
-
o["default"] = v;
|
17
|
-
});
|
18
|
-
var __importStar = (this && this.__importStar) || (function () {
|
19
|
-
var ownKeys = function(o) {
|
20
|
-
ownKeys = Object.getOwnPropertyNames || function (o) {
|
21
|
-
var ar = [];
|
22
|
-
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
23
|
-
return ar;
|
24
|
-
};
|
25
|
-
return ownKeys(o);
|
26
|
-
};
|
27
|
-
return function (mod) {
|
28
|
-
if (mod && mod.__esModule) return mod;
|
29
|
-
var result = {};
|
30
|
-
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
31
|
-
__setModuleDefault(result, mod);
|
32
|
-
return result;
|
33
|
-
};
|
34
|
-
})();
|
35
|
-
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
36
|
-
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
37
|
-
return new (P || (P = Promise))(function (resolve, reject) {
|
38
|
-
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
39
|
-
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
40
|
-
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
41
|
-
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
42
|
-
});
|
43
|
-
};
|
44
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
45
|
-
exports.applyUndoMigration = exports.applyMigration = exports.dbGetAppliedScript = exports.dbCreateEventsTable = exports.dbEventHistory = exports.dbCreateSchema = exports.dbDropAll = exports.dbTableExists = exports.dbSchemaExists = exports.dbConnect = void 0;
|
46
|
-
const pg_1 = __importStar(require("pg"));
|
47
|
-
const types_1 = require("./types");
|
48
|
-
pg_1.default.types.setTypeParser(1114, function (stringValue) {
|
49
|
-
return stringValue; //1114 for time without timezone type
|
50
|
-
});
|
51
|
-
pg_1.default.types.setTypeParser(1082, function (stringValue) {
|
52
|
-
return stringValue; //1082 for date type
|
53
|
-
});
|
54
|
-
const dbConnect = (argv) => __awaiter(void 0, void 0, void 0, function* () {
|
55
|
-
const pool = new pg_1.Pool({
|
56
|
-
connectionString: argv.connection,
|
57
|
-
ssl: argv.ssl === "true",
|
58
|
-
});
|
59
|
-
let client;
|
60
|
-
try {
|
61
|
-
client = yield pool.connect();
|
62
|
-
yield client.query("SELECT 1");
|
63
|
-
}
|
64
|
-
catch (error) {
|
65
|
-
console.error("Failed to connect to the database", error);
|
66
|
-
process.exit(1);
|
67
|
-
}
|
68
|
-
return client;
|
69
|
-
});
|
70
|
-
exports.dbConnect = dbConnect;
|
71
|
-
const dbSchemaExists = (client, schema) => __awaiter(void 0, void 0, void 0, function* () {
|
72
|
-
const result = yield client.query(`SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = '${schema}')`);
|
73
|
-
return result.rows[0].exists;
|
74
|
-
});
|
75
|
-
exports.dbSchemaExists = dbSchemaExists;
|
76
|
-
const dbTableExists = (client, schema) => __awaiter(void 0, void 0, void 0, function* () {
|
77
|
-
const tableExistsResult = yield client.query(`SELECT EXISTS (SELECT 1 FROM pg_tables WHERE tablename = 'stepwise_migration_events' and schemaname = '${schema}')`);
|
78
|
-
return tableExistsResult.rows[0].exists;
|
79
|
-
});
|
80
|
-
exports.dbTableExists = dbTableExists;
|
81
|
-
const dbDropAll = (client, schema) => __awaiter(void 0, void 0, void 0, function* () {
|
82
|
-
yield client.query(`DROP SCHEMA IF EXISTS ${schema} CASCADE`);
|
83
|
-
});
|
84
|
-
exports.dbDropAll = dbDropAll;
|
85
|
-
const dbCreateSchema = (client, schema) => __awaiter(void 0, void 0, void 0, function* () {
|
86
|
-
process.stdout.write(`Creating schema ${schema}... `);
|
87
|
-
yield client.query(`CREATE SCHEMA IF NOT EXISTS ${schema}`);
|
88
|
-
console.log(`done!`);
|
89
|
-
});
|
90
|
-
exports.dbCreateSchema = dbCreateSchema;
|
91
|
-
const dbEventHistory = (client, schema) => __awaiter(void 0, void 0, void 0, function* () {
|
92
|
-
try {
|
93
|
-
const eventQuery = yield client.query(`SELECT * FROM ${schema}.stepwise_migration_events`);
|
94
|
-
return eventQuery.rows.map((row) => types_1.EventRow.parse(row));
|
95
|
-
}
|
96
|
-
catch (error) {
|
97
|
-
console.error("Error fetching event history", error);
|
98
|
-
process.exit(1);
|
99
|
-
}
|
100
|
-
});
|
101
|
-
exports.dbEventHistory = dbEventHistory;
|
102
|
-
const dbCreateEventsTable = (client, schema) => __awaiter(void 0, void 0, void 0, function* () {
|
103
|
-
process.stdout.write(`Creating stepwise_migration_events table... `);
|
104
|
-
yield client.query(`
|
105
|
-
CREATE TABLE IF NOT EXISTS ${schema}.stepwise_migration_events (
|
106
|
-
id SERIAL PRIMARY KEY,
|
107
|
-
type TEXT NOT NULL,
|
108
|
-
filename TEXT NOT NULL,
|
109
|
-
script TEXT NOT NULL,
|
110
|
-
applied_by TEXT NOT NULL DEFAULT current_user,
|
111
|
-
applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
112
|
-
);
|
113
|
-
`);
|
114
|
-
console.log(`done!`);
|
115
|
-
});
|
116
|
-
exports.dbCreateEventsTable = dbCreateEventsTable;
|
117
|
-
const dbGetAppliedScript = (state, filename) => __awaiter(void 0, void 0, void 0, function* () {
|
118
|
-
var _a;
|
119
|
-
return (_a = state.current.appliedVersionedMigrations
|
120
|
-
.concat(state.current.appliedRepeatableMigrations)
|
121
|
-
.find((file) => file.filename === filename)) === null || _a === void 0 ? void 0 : _a.script;
|
122
|
-
});
|
123
|
-
exports.dbGetAppliedScript = dbGetAppliedScript;
|
124
|
-
const applyMigration = (client, schema, migration) => __awaiter(void 0, void 0, void 0, function* () {
|
125
|
-
try {
|
126
|
-
process.stdout.write(`Applying ${migration.type} migration ${migration.filename}... `);
|
127
|
-
yield client.query("BEGIN");
|
128
|
-
yield client.query(`SET search_path TO ${schema};
|
129
|
-
${migration.script.toString()}`);
|
130
|
-
yield client.query(`INSERT INTO ${schema}.stepwise_migration_events (type, filename, script) VALUES ($1, $2, $3)`, [migration.type, migration.filename, migration.script]);
|
131
|
-
yield client.query("COMMIT");
|
132
|
-
console.log(`done!`);
|
133
|
-
}
|
134
|
-
catch (error) {
|
135
|
-
try {
|
136
|
-
yield client.query("ROLLBACK");
|
137
|
-
}
|
138
|
-
catch (error) {
|
139
|
-
console.error("Error rolling back transaction", error);
|
140
|
-
}
|
141
|
-
console.error("Error applying migration", error);
|
142
|
-
process.exit(1);
|
143
|
-
}
|
144
|
-
});
|
145
|
-
exports.applyMigration = applyMigration;
|
146
|
-
const applyUndoMigration = (client, schema, filename, script) => __awaiter(void 0, void 0, void 0, function* () {
|
147
|
-
try {
|
148
|
-
process.stdout.write(`Applying undo migration ${filename}... `);
|
149
|
-
yield client.query("BEGIN");
|
150
|
-
yield client.query(`SET search_path TO ${schema};
|
151
|
-
${script.toString()}`);
|
152
|
-
yield client.query(`INSERT INTO ${schema}.stepwise_migration_events (type, filename, script) VALUES ($1, $2, $3)`, ["undo", filename, script]);
|
153
|
-
yield client.query("COMMIT");
|
154
|
-
console.log(`done!`);
|
155
|
-
}
|
156
|
-
catch (error) {
|
157
|
-
try {
|
158
|
-
yield client.query("ROLLBACK");
|
159
|
-
}
|
160
|
-
catch (error) {
|
161
|
-
console.error("Error rolling back transaction", error);
|
162
|
-
}
|
163
|
-
console.error("Error applying undo migration", error);
|
164
|
-
process.exit(1);
|
165
|
-
}
|
166
|
-
});
|
167
|
-
exports.applyUndoMigration = applyUndoMigration;
|
package/dist/src/validate.js
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
"use strict";
|
package/src/db.ts
DELETED
@@ -1,163 +0,0 @@
|
|
1
|
-
import pg, { Pool, PoolClient } from "pg";
|
2
|
-
import { EventRow, MigrationFile, MigrationState } from "./types";
|
3
|
-
|
4
|
-
pg.types.setTypeParser(1114, function (stringValue) {
|
5
|
-
return stringValue; //1114 for time without timezone type
|
6
|
-
});
|
7
|
-
|
8
|
-
pg.types.setTypeParser(1082, function (stringValue) {
|
9
|
-
return stringValue; //1082 for date type
|
10
|
-
});
|
11
|
-
|
12
|
-
export const dbConnect = async (argv: { connection: string; ssl?: string }) => {
|
13
|
-
const pool = new Pool({
|
14
|
-
connectionString: argv.connection,
|
15
|
-
ssl: argv.ssl === "true",
|
16
|
-
});
|
17
|
-
|
18
|
-
let client: PoolClient | undefined;
|
19
|
-
try {
|
20
|
-
client = await pool.connect();
|
21
|
-
await client.query("SELECT 1");
|
22
|
-
} catch (error) {
|
23
|
-
console.error("Failed to connect to the database", error);
|
24
|
-
process.exit(1);
|
25
|
-
}
|
26
|
-
|
27
|
-
return client;
|
28
|
-
};
|
29
|
-
|
30
|
-
export const dbSchemaExists = async (client: PoolClient, schema: string) => {
|
31
|
-
const result = await client.query(
|
32
|
-
`SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = '${schema}')`
|
33
|
-
);
|
34
|
-
return result.rows[0].exists;
|
35
|
-
};
|
36
|
-
|
37
|
-
export const dbTableExists = async (client: PoolClient, schema: string) => {
|
38
|
-
const tableExistsResult = await client.query(
|
39
|
-
`SELECT EXISTS (SELECT 1 FROM pg_tables WHERE tablename = 'stepwise_migration_events' and schemaname = '${schema}')`
|
40
|
-
);
|
41
|
-
|
42
|
-
return tableExistsResult.rows[0].exists;
|
43
|
-
};
|
44
|
-
|
45
|
-
export const dbDropAll = async (client: PoolClient, schema: string) => {
|
46
|
-
await client.query(`DROP SCHEMA IF EXISTS ${schema} CASCADE`);
|
47
|
-
};
|
48
|
-
|
49
|
-
export const dbCreateSchema = async (client: PoolClient, schema: string) => {
|
50
|
-
process.stdout.write(`Creating schema ${schema}... `);
|
51
|
-
await client.query(`CREATE SCHEMA IF NOT EXISTS ${schema}`);
|
52
|
-
console.log(`done!`);
|
53
|
-
};
|
54
|
-
|
55
|
-
export const dbEventHistory = async (client: PoolClient, schema: string) => {
|
56
|
-
try {
|
57
|
-
const eventQuery = await client.query(
|
58
|
-
`SELECT * FROM ${schema}.stepwise_migration_events`
|
59
|
-
);
|
60
|
-
return eventQuery.rows.map((row) => EventRow.parse(row));
|
61
|
-
} catch (error) {
|
62
|
-
console.error("Error fetching event history", error);
|
63
|
-
process.exit(1);
|
64
|
-
}
|
65
|
-
};
|
66
|
-
|
67
|
-
export const dbCreateEventsTable = async (
|
68
|
-
client: PoolClient,
|
69
|
-
schema: string
|
70
|
-
) => {
|
71
|
-
process.stdout.write(`Creating stepwise_migration_events table... `);
|
72
|
-
await client.query(
|
73
|
-
`
|
74
|
-
CREATE TABLE IF NOT EXISTS ${schema}.stepwise_migration_events (
|
75
|
-
id SERIAL PRIMARY KEY,
|
76
|
-
type TEXT NOT NULL,
|
77
|
-
filename TEXT NOT NULL,
|
78
|
-
script TEXT NOT NULL,
|
79
|
-
applied_by TEXT NOT NULL DEFAULT current_user,
|
80
|
-
applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
81
|
-
);
|
82
|
-
`
|
83
|
-
);
|
84
|
-
console.log(`done!`);
|
85
|
-
};
|
86
|
-
|
87
|
-
export const dbGetAppliedScript = async (
|
88
|
-
state: MigrationState,
|
89
|
-
filename: string
|
90
|
-
) => {
|
91
|
-
return state.current.appliedVersionedMigrations
|
92
|
-
.concat(state.current.appliedRepeatableMigrations)
|
93
|
-
.find((file) => file.filename === filename)?.script;
|
94
|
-
};
|
95
|
-
|
96
|
-
export const applyMigration = async (
|
97
|
-
client: PoolClient,
|
98
|
-
schema: string,
|
99
|
-
migration: MigrationFile
|
100
|
-
) => {
|
101
|
-
try {
|
102
|
-
process.stdout.write(
|
103
|
-
`Applying ${migration.type} migration ${migration.filename}... `
|
104
|
-
);
|
105
|
-
await client.query("BEGIN");
|
106
|
-
|
107
|
-
await client.query(
|
108
|
-
`SET search_path TO ${schema};
|
109
|
-
${migration.script.toString()}`
|
110
|
-
);
|
111
|
-
|
112
|
-
await client.query(
|
113
|
-
`INSERT INTO ${schema}.stepwise_migration_events (type, filename, script) VALUES ($1, $2, $3)`,
|
114
|
-
[migration.type, migration.filename, migration.script]
|
115
|
-
);
|
116
|
-
|
117
|
-
await client.query("COMMIT");
|
118
|
-
|
119
|
-
console.log(`done!`);
|
120
|
-
} catch (error) {
|
121
|
-
try {
|
122
|
-
await client.query("ROLLBACK");
|
123
|
-
} catch (error) {
|
124
|
-
console.error("Error rolling back transaction", error);
|
125
|
-
}
|
126
|
-
console.error("Error applying migration", error);
|
127
|
-
process.exit(1);
|
128
|
-
}
|
129
|
-
};
|
130
|
-
|
131
|
-
export const applyUndoMigration = async (
|
132
|
-
client: PoolClient,
|
133
|
-
schema: string,
|
134
|
-
filename: string,
|
135
|
-
script: string
|
136
|
-
) => {
|
137
|
-
try {
|
138
|
-
process.stdout.write(`Applying undo migration ${filename}... `);
|
139
|
-
await client.query("BEGIN");
|
140
|
-
|
141
|
-
await client.query(
|
142
|
-
`SET search_path TO ${schema};
|
143
|
-
${script.toString()}`
|
144
|
-
);
|
145
|
-
|
146
|
-
await client.query(
|
147
|
-
`INSERT INTO ${schema}.stepwise_migration_events (type, filename, script) VALUES ($1, $2, $3)`,
|
148
|
-
["undo", filename, script]
|
149
|
-
);
|
150
|
-
|
151
|
-
await client.query("COMMIT");
|
152
|
-
|
153
|
-
console.log(`done!`);
|
154
|
-
} catch (error) {
|
155
|
-
try {
|
156
|
-
await client.query("ROLLBACK");
|
157
|
-
} catch (error) {
|
158
|
-
console.error("Error rolling back transaction", error);
|
159
|
-
}
|
160
|
-
console.error("Error applying undo migration", error);
|
161
|
-
process.exit(1);
|
162
|
-
}
|
163
|
-
};
|