stepwise-migrations 1.0.24 → 1.0.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/src/state.js CHANGED
@@ -15,7 +15,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
15
15
  exports.eventsToApplied = exports.getUndoFilename = exports.loadState = exports.validateMigrationFiles = void 0;
16
16
  const git_diff_1 = __importDefault(require("git-diff"));
17
17
  const path_1 = __importDefault(require("path"));
18
- const db_1 = require("./db");
19
18
  const utils_1 = require("./utils");
20
19
  const validateMigrationFiles = (state) => {
21
20
  let errors = [];
@@ -55,8 +54,8 @@ const validateMigrationFiles = (state) => {
55
54
  return errors;
56
55
  };
57
56
  exports.validateMigrationFiles = validateMigrationFiles;
58
- const loadState = (client, schema, migrationPath) => __awaiter(void 0, void 0, void 0, function* () {
59
- const events = yield (0, db_1.dbEventHistory)(client, schema);
57
+ const loadState = (client, migrationPath) => __awaiter(void 0, void 0, void 0, function* () {
58
+ const events = yield client.dbEventHistory();
60
59
  const { appliedVersionedMigrations, appliedRepeatableMigrations, errors: appliedErrors, } = (0, exports.eventsToApplied)(events);
61
60
  const { files: allFiles, errors: readFileErrors } = yield (0, utils_1.readMigrationFiles)(path_1.default.join(process.cwd(), migrationPath), appliedVersionedMigrations);
62
61
  const unappliedVersionedFiles = allFiles
@@ -66,7 +65,7 @@ const loadState = (client, schema, migrationPath) => __awaiter(void 0, void 0, v
66
65
  .filter((file) => file.type === "repeatable")
67
66
  .filter((file) => !appliedRepeatableMigrations.find((event) => event.filename === file.filename && event.script === file.script));
68
67
  return {
69
- schema,
68
+ schema: client.schema,
70
69
  current: {
71
70
  appliedVersionedMigrations,
72
71
  appliedRepeatableMigrations,
package/dist/src/utils.js CHANGED
@@ -16,7 +16,6 @@ exports.checkSchemaAndTable = exports.sliceFromFirstNull = exports.exitIfNotInit
16
16
  const promises_1 = __importDefault(require("fs/promises"));
17
17
  const git_diff_1 = __importDefault(require("git-diff"));
18
18
  const path_1 = __importDefault(require("path"));
19
- const db_1 = require("./db");
20
19
  exports.usage = `
21
20
  Usage: stepwise-migrations [command] [options]
22
21
 
@@ -35,41 +34,51 @@ Commands:
35
34
  Drop all tables, schema and stepwise_migration_events table
36
35
  get-applied-script
37
36
  Get the script for the last applied migration
37
+ baseline
38
+ Without applying any migrations, set the migration table state to a specific version
38
39
 
39
40
  Options:
40
41
  --connection <connection> The connection string to use to connect to the database
41
- --schema <schema> The schema to use for the migrations
42
+ --schema <schema> The schema to use for the migrations (default: public)
42
43
  --path <path> The path to the migrations directory
43
44
  --ssl true/false Whether to use SSL for the connection (default: false)
44
45
  --napply Number of up migrations to apply (default: all)
45
46
  --nundo Number of undo migrations to apply (default: 1)
46
- --filename The filename to get the script for (default: last applied migration)
47
+ --filename (get-applied-script) The filename to get the script for (default: last applied migration)
48
+ --filename (baseline) The filename to baseline (default: last unapplied versioned migration)
47
49
 
48
50
  Example:
49
- npx stepwise-migrations migrate \\
50
- --connection=postgresql://postgres:postgres@127.0.0.1:5432/mydatabase \\
51
- --schema=myschema \\
51
+ npx stepwise-migrations migrate \
52
+ --connection=postgresql://postgres:postgres@127.0.0.1:5432/mydatabase \
53
+ --schema=myschema \
52
54
  --path=./test/migrations-template/
53
55
  `;
54
56
  const parseArgs = (argv) => {
55
- var _a;
57
+ var _a, _b;
56
58
  const schema = (_a = argv.schema) !== null && _a !== void 0 ? _a : "public";
57
59
  const command = argv._[0];
58
60
  const napply = argv.napply || Infinity;
59
61
  const nundo = argv.nundo || 1;
60
62
  const filePath = argv.path;
61
- return { schema, command, napply, nundo, filePath };
63
+ const connection = argv.connection;
64
+ const ssl = (_b = argv.ssl) !== null && _b !== void 0 ? _b : "false";
65
+ const filename = argv.filename;
66
+ return {
67
+ schema,
68
+ command,
69
+ napply,
70
+ nundo,
71
+ filePath,
72
+ connection,
73
+ ssl,
74
+ filename,
75
+ };
62
76
  };
63
77
  exports.parseArgs = parseArgs;
64
- const validateArgs = (argv) => {
65
- const required = ["connection", "path", "_"];
66
- if (required.some((key) => !(key in argv))) {
67
- console.error("Missing required arguments", required.filter((key) => !(key in argv)));
68
- console.log(exports.usage);
69
- process.exit(1);
70
- }
71
- if (argv._.length !== 1) {
72
- console.error(`Invalid number of arguments: ${argv._.length}`);
78
+ const validateArgs = (args) => {
79
+ const required = ["connection", "filePath", "command"];
80
+ if (required.some((key) => !(key in args))) {
81
+ console.error("Missing required arguments", required.filter((key) => !(key in args)));
73
82
  console.log(exports.usage);
74
83
  process.exit(1);
75
84
  }
@@ -205,9 +214,9 @@ const sliceFromFirstNull = (array) => {
205
214
  : array.slice(0, indexOfFirstNull);
206
215
  };
207
216
  exports.sliceFromFirstNull = sliceFromFirstNull;
208
- const checkSchemaAndTable = (client, schema) => __awaiter(void 0, void 0, void 0, function* () {
209
- const schemaExists = yield (0, db_1.dbSchemaExists)(client, schema);
210
- const tableExists = yield (0, db_1.dbTableExists)(client, schema);
217
+ const checkSchemaAndTable = (client) => __awaiter(void 0, void 0, void 0, function* () {
218
+ const schemaExists = yield client.dbSchemaExists();
219
+ const tableExists = yield client.dbTableExists();
211
220
  return { schemaExists, tableExists };
212
221
  });
213
222
  exports.checkSchemaAndTable = checkSchemaAndTable;
@@ -12,7 +12,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
12
12
  return (mod && mod.__esModule) ? mod : { "default": mod };
13
13
  };
14
14
  Object.defineProperty(exports, "__esModule", { value: true });
15
- const node_assert_1 = __importDefault(require("node:assert"));
16
15
  const node_fs_1 = __importDefault(require("node:fs"));
17
16
  const node_test_1 = require("node:test");
18
17
  const utils_1 = require("./utils");
@@ -29,16 +28,24 @@ const executeCommand = (command, path = "", extraArgs = "") => (0, utils_1.execu
29
28
  `);
30
29
  (0, node_test_1.describe)("valid migrations", () => __awaiter(void 0, void 0, void 0, function* () {
31
30
  (0, node_test_1.beforeEach)(() => __awaiter(void 0, void 0, void 0, function* () {
32
- const { output, error, exitCode } = yield executeCommand("drop", "");
33
- node_assert_1.default.ok(output.includes("Dropping the tables, schema and migration history table... done!"));
34
- node_assert_1.default.ok(exitCode === 0);
31
+ (0, utils_1.assertIncludesAll)(yield executeCommand("drop", ""), [
32
+ "Dropping the tables, schema and migration history table... done!",
33
+ ]);
35
34
  node_fs_1.default.rmSync(paths.valid, { recursive: true, force: true });
36
35
  node_fs_1.default.cpSync("./test/migrations-template", paths.valid, {
37
36
  recursive: true,
38
37
  });
39
38
  }));
40
39
  (0, node_test_1.it)("migrate without params", () => __awaiter(void 0, void 0, void 0, function* () {
41
- (0, utils_1.assertIncludesAll)(yield (0, utils_1.execute)("npm exec stepwise-migrations"), ["Usage"]);
40
+ (0, utils_1.assertIncludesAll)(yield (0, utils_1.execute)("npm exec stepwise-migrations"), [
41
+ "stepwise-migrations <command>",
42
+ ]);
43
+ }));
44
+ (0, node_test_1.it)("baseline", () => __awaiter(void 0, void 0, void 0, function* () {
45
+ (0, utils_1.assertIncludesAll)(yield executeCommand("baseline", paths.valid), [
46
+ "All done! (Shadow)-applied 3 migrations to baseline to v3_third.sql",
47
+ ]);
48
+ (0, utils_1.assertIncludesExcludesAll)(yield executeCommand("status"), ["v1_first.sql", "v2_second.sql", "v3_third.sql"], ["v0_get_number.repeatable.sql"]);
42
49
  }));
43
50
  (0, node_test_1.it)("migrate one versioned and undo, redo, undo", () => __awaiter(void 0, void 0, void 0, function* () {
44
51
  (0, utils_1.assertIncludesAll)(yield executeCommand("migrate", paths.valid), [
@@ -100,22 +107,22 @@ const executeCommand = (command, path = "", extraArgs = "") => (0, utils_1.execu
100
107
  ]);
101
108
  }));
102
109
  }));
103
- (0, node_test_1.describe)("invalid migrations", () => __awaiter(void 0, void 0, void 0, function* () {
110
+ node_test_1.describe.only("invalid migrations", () => __awaiter(void 0, void 0, void 0, function* () {
104
111
  (0, node_test_1.beforeEach)(() => __awaiter(void 0, void 0, void 0, function* () {
105
- const { output, error, exitCode } = yield executeCommand("drop", "");
106
- node_assert_1.default.ok(output.includes("Dropping the tables, schema and migration history table... done!"));
107
- node_assert_1.default.ok(exitCode === 0);
112
+ (0, utils_1.assertIncludesAll)(yield executeCommand("drop", ""), [
113
+ "Dropping the tables, schema and migration history table... done!",
114
+ ]);
108
115
  node_fs_1.default.rmSync(paths.invalid, { recursive: true, force: true });
109
116
  node_fs_1.default.cpSync("./test/migrations-template", paths.invalid, {
110
117
  recursive: true,
111
118
  });
112
119
  }));
113
- (0, node_test_1.it)("missing undo migration", () => __awaiter(void 0, void 0, void 0, function* () {
120
+ node_test_1.it.only("missing undo migration", () => __awaiter(void 0, void 0, void 0, function* () {
114
121
  (0, utils_1.assertIncludesAll)(yield executeCommand("migrate", paths.invalid), [
115
122
  "All done!",
116
123
  ]);
117
124
  node_fs_1.default.unlinkSync("./test/migrations-invalid/v3_third.undo.sql");
118
- (0, utils_1.assertIncludesAll)(yield executeCommand("undo", paths.invalid, "--nundos=2"), ["Error: not enough sequential (from last) undo migrations to apply"]);
125
+ (0, utils_1.assertIncludesAll)(yield executeCommand("undo", paths.invalid, "--nundo=2"), ["Error: not enough sequential (from last) undo migrations to apply"]);
119
126
  }));
120
127
  (0, node_test_1.it)("alter migration", () => __awaiter(void 0, void 0, void 0, function* () {
121
128
  (0, utils_1.assertIncludesAll)(yield executeCommand("migrate", paths.invalid), [
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "stepwise-migrations",
3
- "version": "1.0.24",
3
+ "version": "1.0.27",
4
4
  "description": "A JavaScript CLI tool for managing Raw SQL migrations in a Postgres database. Loosely based on flyway.",
5
5
  "main": "./dist/src/index.js",
6
6
  "scripts": {
@@ -18,6 +18,7 @@
18
18
  "license": "MIT",
19
19
  "devDependencies": {
20
20
  "@types/git-diff": "^2.0.7",
21
+ "@types/mysql": "^2.15.26",
21
22
  "@types/pg": "^8.11.10",
22
23
  "@types/yargs": "^17.0.33",
23
24
  "glob": "^11.0.0",
@@ -28,6 +29,7 @@
28
29
  },
29
30
  "dependencies": {
30
31
  "git-diff": "^2.0.6",
32
+ "mysql": "^2.18.1",
31
33
  "pg": "^8.13.1",
32
34
  "sqlite": "^5.1.1",
33
35
  "yargs": "^17.7.2",
package/src/commands.ts CHANGED
@@ -1,38 +1,31 @@
1
- import { PoolClient } from "pg";
2
- import {
3
- applyMigration,
4
- applyUndoMigration,
5
- dbCreateEventsTable,
6
- dbCreateSchema,
7
- dbDropAll,
8
- dbGetAppliedScript,
9
- } from "./db";
1
+ import assert from "node:assert";
2
+ import { DbClient, dbConnect } from "./db";
10
3
  import { getUndoFilename, loadState } from "./state";
11
4
  import {
12
5
  abortIfErrors,
6
+ Args,
13
7
  checkSchemaAndTable,
14
8
  exitIfNotInitialized,
15
- parseArgs,
16
9
  printMigrationHistory,
17
10
  printMigrationHistoryAndUnappliedMigrations,
18
11
  sliceFromFirstNull,
19
12
  } from "./utils";
20
13
 
21
- export const migrateCommand = async (client: PoolClient, argv: any) => {
22
- const { schema, napply, filePath } = parseArgs(argv);
23
- const { schemaExists, tableExists } = await checkSchemaAndTable(
24
- client,
25
- schema
26
- );
27
-
14
+ export const ensureTableInitialised = async (client: DbClient) => {
15
+ const { schemaExists, tableExists } = await checkSchemaAndTable(client);
28
16
  if (!schemaExists) {
29
- await dbCreateSchema(client, schema);
17
+ await client.dbCreateSchema();
30
18
  }
31
19
  if (!tableExists) {
32
- await dbCreateEventsTable(client, schema);
20
+ await client.dbCreateEventsTable();
33
21
  }
22
+ };
34
23
 
35
- const state = await loadState(client, schema, filePath);
24
+ export const migrateCommand = async (args: Args) => {
25
+ const { napply, filePath } = args;
26
+ const client = await dbConnect(args);
27
+ await ensureTableInitialised(client);
28
+ const state = await loadState(client, filePath);
36
29
 
37
30
  abortIfErrors(state);
38
31
 
@@ -50,7 +43,7 @@ export const migrateCommand = async (client: PoolClient, argv: any) => {
50
43
  ].slice(0, napply);
51
44
 
52
45
  for (const migration of migrationsToApply) {
53
- await applyMigration(client, schema, migration);
46
+ await client.dbApplyMigration(migration);
54
47
  }
55
48
 
56
49
  console.log(
@@ -60,16 +53,14 @@ export const migrateCommand = async (client: PoolClient, argv: any) => {
60
53
  );
61
54
 
62
55
  printMigrationHistoryAndUnappliedMigrations(
63
- await loadState(client, schema, filePath)
56
+ await loadState(client, filePath)
64
57
  );
65
58
  };
66
59
 
67
- export const infoCommand = async (client: PoolClient, argv: any) => {
68
- const { schema, filePath } = parseArgs(argv);
69
- const { schemaExists, tableExists } = await checkSchemaAndTable(
70
- client,
71
- schema
72
- );
60
+ export const infoCommand = async (args: Args) => {
61
+ const { connection, schema, filePath } = args;
62
+ const client = await dbConnect(args);
63
+ const { schemaExists, tableExists } = await checkSchemaAndTable(client);
73
64
 
74
65
  if (!schemaExists) {
75
66
  console.log("Schema does not exist");
@@ -83,17 +74,15 @@ export const infoCommand = async (client: PoolClient, argv: any) => {
83
74
 
84
75
  if (schemaExists && tableExists) {
85
76
  printMigrationHistoryAndUnappliedMigrations(
86
- await loadState(client, schema, filePath)
77
+ await loadState(client, filePath)
87
78
  );
88
79
  }
89
80
  };
90
81
 
91
- export const statusCommand = async (client: PoolClient, argv: any) => {
92
- const { schema, filePath } = parseArgs(argv);
93
- const { schemaExists, tableExists } = await checkSchemaAndTable(
94
- client,
95
- schema
96
- );
82
+ export const statusCommand = async (args: Args) => {
83
+ const { connection, schema, filePath } = args;
84
+ const client = await dbConnect(args);
85
+ const { schemaExists, tableExists } = await checkSchemaAndTable(client);
97
86
  if (!schemaExists) {
98
87
  console.log("Schema does not exist");
99
88
  }
@@ -105,19 +94,17 @@ export const statusCommand = async (client: PoolClient, argv: any) => {
105
94
  }
106
95
 
107
96
  if (schemaExists && tableExists) {
108
- printMigrationHistory(await loadState(client, schema, filePath));
97
+ printMigrationHistory(await loadState(client, filePath));
109
98
  }
110
99
  };
111
100
 
112
- export const validateCommand = async (client: PoolClient, argv: any) => {
113
- const { schema } = parseArgs(argv);
114
- const { schemaExists, tableExists } = await checkSchemaAndTable(
115
- client,
116
- schema
117
- );
101
+ export const validateCommand = async (args: Args) => {
102
+ const { connection, schema, filePath } = args;
103
+ const client = await dbConnect(args);
104
+ const { schemaExists, tableExists } = await checkSchemaAndTable(client);
118
105
  exitIfNotInitialized(schemaExists, tableExists);
119
106
 
120
- const state = await loadState(client, schema, argv.path);
107
+ const state = await loadState(client, filePath);
121
108
  if (schemaExists && tableExists) {
122
109
  abortIfErrors(state);
123
110
  }
@@ -126,18 +113,20 @@ export const validateCommand = async (client: PoolClient, argv: any) => {
126
113
  printMigrationHistoryAndUnappliedMigrations(state);
127
114
  };
128
115
 
129
- export const dropCommand = async (client: PoolClient, argv: any) => {
130
- const { schema } = parseArgs(argv);
116
+ export const dropCommand = async (args: Args) => {
117
+ const { connection, schema } = args;
118
+ const client = await dbConnect(args);
131
119
  process.stdout.write(
132
120
  `Dropping the tables, schema and migration history table... `
133
121
  );
134
- await dbDropAll(client, schema);
122
+ await client.dbDropAll();
135
123
  console.log(`done!`);
136
124
  };
137
125
 
138
- export const undoCommand = async (client: PoolClient, argv: any) => {
139
- const { schema, nundo, filePath } = parseArgs(argv);
140
- const state = await loadState(client, schema, filePath);
126
+ export const undoCommand = async (args: Args) => {
127
+ const { connection, schema, filePath, nundo } = args;
128
+ const client = await dbConnect(args);
129
+ const state = await loadState(client, filePath);
141
130
 
142
131
  abortIfErrors(state);
143
132
 
@@ -160,7 +149,7 @@ export const undoCommand = async (client: PoolClient, argv: any) => {
160
149
  }
161
150
 
162
151
  for (const { filename, script } of undosToApply) {
163
- await applyUndoMigration(client, schema, filename, script);
152
+ await client.dbApplyUndoMigration(filename, script);
164
153
  }
165
154
  console.log(
166
155
  `All done! Performed ${undosToApply.length} undo migration${
@@ -171,16 +160,14 @@ export const undoCommand = async (client: PoolClient, argv: any) => {
171
160
  printMigrationHistoryAndUnappliedMigrations(state);
172
161
  };
173
162
 
174
- export const auditCommand = async (client: PoolClient, argv: any) => {
175
- const { schema } = parseArgs(argv);
176
- const { schemaExists, tableExists } = await checkSchemaAndTable(
177
- client,
178
- schema
179
- );
163
+ export const auditCommand = async (args: Args) => {
164
+ const { connection, schema, filePath } = args;
165
+ const client = await dbConnect(args);
166
+ const { schemaExists, tableExists } = await checkSchemaAndTable(client);
180
167
 
181
168
  exitIfNotInitialized(schemaExists, tableExists);
182
169
 
183
- const state = await loadState(client, schema, argv.path);
170
+ const state = await loadState(client, filePath);
184
171
  console.log("Event history:");
185
172
  console.table(
186
173
  state.events.map((row) => ({
@@ -193,25 +180,64 @@ export const auditCommand = async (client: PoolClient, argv: any) => {
193
180
  );
194
181
  };
195
182
 
196
- export const getAppliedScriptCommand = async (
197
- client: PoolClient,
198
- argv: any
199
- ) => {
200
- const { schema } = parseArgs(argv);
201
- const { schemaExists, tableExists } = await checkSchemaAndTable(
202
- client,
203
- schema
204
- );
183
+ export const getAppliedScriptCommand = async (args: Args) => {
184
+ const { connection, schema, filePath, filename } = args;
185
+ assert.ok(filename, "filename is required for this command");
186
+ const client = await dbConnect(args);
187
+ const { schemaExists, tableExists } = await checkSchemaAndTable(client);
205
188
 
206
189
  exitIfNotInitialized(schemaExists, tableExists);
207
190
 
208
- const state = await loadState(client, schema, argv.path);
209
- const script = await dbGetAppliedScript(state, argv.filename);
191
+ const state = await loadState(client, filePath);
192
+ const script = await client.dbGetAppliedScript(state, filename);
210
193
  if (script) {
211
194
  console.log(script);
212
195
  } else {
213
196
  console.error(
214
- `Script for ${argv.filename} not found, use the audit command to check all applied migrations`
197
+ `Script for ${filename} not found, use the audit command to check all applied migrations`
215
198
  );
216
199
  }
217
200
  };
201
+
202
+ export const baselineCommand = async (args: Args) => {
203
+ const { connection, schema, filePath, filename: argvFilename } = args;
204
+ const client = await dbConnect(args);
205
+ await ensureTableInitialised(client);
206
+
207
+ const state = await loadState(client, filePath);
208
+
209
+ if (state.files.unappliedVersionedFiles.length === 0) {
210
+ console.error("Error: No unapplied versioned migrations, aborting.");
211
+ process.exit(1);
212
+ }
213
+
214
+ const filename =
215
+ argvFilename ??
216
+ state.files.unappliedVersionedFiles[
217
+ state.files.unappliedVersionedFiles.length - 1
218
+ ].filename;
219
+
220
+ if (
221
+ !state.files.unappliedVersionedFiles.find(
222
+ (file) => file.filename === filename
223
+ )
224
+ ) {
225
+ console.error(
226
+ `Error: '${filename}' is not an unapplied versioned migration, aborting.`
227
+ );
228
+ process.exit(1);
229
+ }
230
+
231
+ let appliedCount = 0;
232
+ for (const file of state.files.unappliedVersionedFiles) {
233
+ await client.dbBaseline(file);
234
+ appliedCount++;
235
+ if (file.filename === filename) {
236
+ break;
237
+ }
238
+ }
239
+
240
+ console.log(
241
+ `All done! (Shadow)-applied ${appliedCount} migrations to baseline to ${filename}`
242
+ );
243
+ };
@@ -0,0 +1,20 @@
1
+ import { Args } from "../utils";
2
+ import * as pg from "./pg";
3
+
4
+ export const dbConnect = async (args: Args) => {
5
+ if (args.connection.startsWith("postgresql://")) {
6
+ return await pg._dbConnect(args);
7
+ }
8
+ // else if (connection.startsWith("mysql://")) {
9
+ // const client = await mysql._dbConnect(argv);
10
+ // return {
11
+ // type: "mysql",
12
+ // client,
13
+ // ...mysql,
14
+ // };
15
+ // }
16
+
17
+ throw new Error("Invalid database connection string");
18
+ };
19
+
20
+ export type DbClient = Awaited<ReturnType<typeof dbConnect>>;
package/src/db/pg.ts ADDED
@@ -0,0 +1,189 @@
1
+ import pg, { Pool, PoolClient } from "pg";
2
+ import { EventRow, MigrationFile, MigrationState } from "../types";
3
+ import { Args } from "../utils";
4
+
5
+ pg.types.setTypeParser(1114, function (stringValue) {
6
+ return stringValue; //1114 for time without timezone type
7
+ });
8
+
9
+ pg.types.setTypeParser(1082, function (stringValue) {
10
+ return stringValue; //1082 for date type
11
+ });
12
+
13
+ export const _dbConnect = async ({ ssl, connection, schema }: Args) => {
14
+ const pool = new Pool({
15
+ connectionString: connection,
16
+ ssl: ssl === "true",
17
+ });
18
+
19
+ let client: PoolClient | undefined;
20
+ try {
21
+ client = await pool.connect();
22
+ await client.query("SELECT 1");
23
+ } catch (error) {
24
+ console.error("Failed to connect to the database", error);
25
+ process.exit(1);
26
+ }
27
+
28
+ const dbSchemaExists = async () => {
29
+ const result = await client.query(
30
+ `SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = '${schema}')`
31
+ );
32
+ return result.rows[0].exists;
33
+ };
34
+
35
+ const dbTableExists = async () => {
36
+ const tableExistsResult = await client.query(
37
+ `SELECT EXISTS (SELECT 1 FROM pg_tables WHERE tablename = 'stepwise_migration_events' and schemaname = '${schema}')`
38
+ );
39
+
40
+ return tableExistsResult.rows[0].exists;
41
+ };
42
+
43
+ const dbDropAll = async () => {
44
+ await client.query(`DROP SCHEMA IF EXISTS ${schema} CASCADE`);
45
+ };
46
+
47
+ const dbCreateSchema = async () => {
48
+ process.stdout.write(`Creating schema ${schema}... `);
49
+ await client.query(`CREATE SCHEMA IF NOT EXISTS ${schema}`);
50
+ console.log(`done!`);
51
+ };
52
+
53
+ const dbEventHistory = async () => {
54
+ try {
55
+ const eventQuery = await client.query(
56
+ `SELECT * FROM ${schema}.stepwise_migration_events`
57
+ );
58
+ return eventQuery.rows.map((row) => EventRow.parse(row));
59
+ } catch (error) {
60
+ console.error("Error fetching event history", error);
61
+ process.exit(1);
62
+ }
63
+ };
64
+
65
+ const dbCreateEventsTable = async () => {
66
+ process.stdout.write(`Creating stepwise_migration_events table... `);
67
+ await client.query(
68
+ `
69
+ CREATE TABLE IF NOT EXISTS ${schema}.stepwise_migration_events (
70
+ id SERIAL PRIMARY KEY,
71
+ type TEXT NOT NULL,
72
+ filename TEXT NOT NULL,
73
+ script TEXT NOT NULL,
74
+ applied_by TEXT NOT NULL DEFAULT current_user,
75
+ applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
76
+ );
77
+ `
78
+ );
79
+ console.log(`done!`);
80
+ };
81
+
82
+ const dbGetAppliedScript = async (
83
+ state: MigrationState,
84
+ filename: string
85
+ ) => {
86
+ return state.current.appliedVersionedMigrations
87
+ .concat(state.current.appliedRepeatableMigrations)
88
+ .find((file) => file.filename === filename)?.script;
89
+ };
90
+
91
+ const dbApplyMigration = async (migration: MigrationFile) => {
92
+ try {
93
+ process.stdout.write(
94
+ `Applying ${migration.type} migration ${migration.filename}... `
95
+ );
96
+ await client.query("BEGIN");
97
+
98
+ await client.query(
99
+ `SET search_path TO ${schema};
100
+ ${migration.script.toString()}`
101
+ );
102
+
103
+ await client.query(
104
+ `INSERT INTO ${schema}.stepwise_migration_events (type, filename, script) VALUES ($1, $2, $3)`,
105
+ [migration.type, migration.filename, migration.script]
106
+ );
107
+
108
+ await client.query("COMMIT");
109
+
110
+ console.log(`done!`);
111
+ } catch (error) {
112
+ try {
113
+ await client.query("ROLLBACK");
114
+ } catch (error) {
115
+ console.error("Error rolling back transaction", error);
116
+ }
117
+ console.error("Error applying migration", error);
118
+ process.exit(1);
119
+ }
120
+ };
121
+
122
+ const dbBaseline = async (migration: MigrationFile) => {
123
+ try {
124
+ process.stdout.write(`Baselining ${migration.filename}... `);
125
+ await client.query("BEGIN");
126
+
127
+ await client.query(
128
+ `INSERT INTO ${schema}.stepwise_migration_events (type, filename, script) VALUES ($1, $2, $3)`,
129
+ [migration.type, migration.filename, migration.script]
130
+ );
131
+
132
+ await client.query("COMMIT");
133
+
134
+ console.log(`done!`);
135
+ } catch (error) {
136
+ try {
137
+ await client.query("ROLLBACK");
138
+ } catch (error) {
139
+ console.error("Error rolling back transaction", error);
140
+ }
141
+ console.error("Error baselining migration", error);
142
+ process.exit(1);
143
+ }
144
+ };
145
+
146
+ const dbApplyUndoMigration = async (filename: string, script: string) => {
147
+ try {
148
+ process.stdout.write(`Applying undo migration ${filename}... `);
149
+ await client.query("BEGIN");
150
+
151
+ await client.query(
152
+ `SET search_path TO ${schema};
153
+ ${script.toString()}`
154
+ );
155
+
156
+ await client.query(
157
+ `INSERT INTO ${schema}.stepwise_migration_events (type, filename, script) VALUES ($1, $2, $3)`,
158
+ ["undo", filename, script]
159
+ );
160
+
161
+ await client.query("COMMIT");
162
+
163
+ console.log(`done!`);
164
+ } catch (error) {
165
+ try {
166
+ await client.query("ROLLBACK");
167
+ } catch (error) {
168
+ console.error("Error rolling back transaction", error);
169
+ }
170
+ console.error("Error applying undo migration", error);
171
+ process.exit(1);
172
+ }
173
+ };
174
+
175
+ return {
176
+ type: "pg",
177
+ schema,
178
+ dbSchemaExists,
179
+ dbTableExists,
180
+ dbDropAll,
181
+ dbCreateSchema,
182
+ dbEventHistory,
183
+ dbCreateEventsTable,
184
+ dbGetAppliedScript,
185
+ dbApplyMigration,
186
+ dbBaseline,
187
+ dbApplyUndoMigration,
188
+ };
189
+ };