stepwise-migrations 1.0.23 → 1.0.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +35 -4
- package/dist/src/commands.js +80 -38
- package/dist/src/db/index.js +61 -0
- package/dist/src/db/pg.js +190 -0
- package/dist/src/index.js +14 -13
- package/dist/src/state.js +3 -4
- package/dist/src/utils.js +30 -20
- package/dist/test/index.test.js +6 -0
- package/package.json +3 -1
- package/src/commands.ts +95 -69
- package/src/db/index.ts +20 -0
- package/src/db/pg.ts +189 -0
- package/src/index.ts +16 -14
- package/src/state.ts +4 -6
- package/src/utils.ts +42 -25
- package/test/index.test.ts +11 -0
- package/dist/src/db.js +0 -167
- package/dist/src/validate.js +0 -1
- package/src/db.ts +0 -163
package/README.md
CHANGED
@@ -3,8 +3,8 @@
|
|
3
3
|
A JavaScript CLI tool for managing Raw SQL migrations in a Postgres database.
|
4
4
|
Loosely based on flyway.
|
5
5
|
|
6
|
-
|
7
|
-

|
7
|
+

|
8
8
|
|
9
9
|
## Table of Contents
|
10
10
|
|
@@ -79,15 +79,18 @@ Commands:
|
|
79
79
|
Drop all tables, schema and stepwise_migration_events table
|
80
80
|
get-applied-script
|
81
81
|
Get the script for the last applied migration
|
82
|
+
baseline
|
83
|
+
Without applying any migrations, set the migration table state to a specific version
|
82
84
|
|
83
85
|
Options:
|
84
86
|
--connection <connection> The connection string to use to connect to the database
|
85
|
-
--schema <schema> The schema to use for the migrations
|
87
|
+
--schema <schema> The schema to use for the migrations (default: public)
|
86
88
|
--path <path> The path to the migrations directory
|
87
89
|
--ssl true/false Whether to use SSL for the connection (default: false)
|
88
90
|
--napply Number of up migrations to apply (default: all)
|
89
91
|
--nundo Number of undo migrations to apply (default: 1)
|
90
|
-
--filename The filename to get the script for (default: last applied migration)
|
92
|
+
--filename (get-applied-script) The filename to get the script for (default: last applied migration)
|
93
|
+
--filename (baseline) The filename to baseline (default: last unapplied versioned migration)
|
91
94
|
|
92
95
|
Example:
|
93
96
|
npx stepwise-migrations migrate \
|
@@ -371,4 +374,32 @@ Dropping the tables, schema and migration history table... done!
|
|
371
374
|
|
372
375
|
</details>
|
373
376
|
|
377
|
+
### Baseline
|
378
|
+
|
379
|
+
Without applying any migrations, set the migration table state to a specific version.
|
380
|
+
|
381
|
+
Command:
|
382
|
+
|
383
|
+
```bash
|
384
|
+
npx stepwise-migrations baseline \
|
385
|
+
--connection=postgresql://postgres:postgres@127.0.0.1:5432/mydb \
|
386
|
+
--schema=myschema \
|
387
|
+
--path=./test/migrations-template/
|
388
|
+
```
|
389
|
+
|
390
|
+
<details>
|
391
|
+
|
392
|
+
<summary>Example output</summary>
|
393
|
+
|
394
|
+
```text
|
395
|
+
Creating schema myschema... done!
|
396
|
+
Creating stepwise_migration_events table... done!
|
397
|
+
Baselining v1_first.sql... done!
|
398
|
+
Baselining v2_second.sql... done!
|
399
|
+
Baselining v3_third.sql... done!
|
400
|
+
All done! (Shadow)-applied 3 migrations to baseline to v3_third.sql
|
401
|
+
```
|
402
|
+
|
403
|
+
</details>
|
404
|
+
|
374
405
|
[comment]: <> (End of examples)
|
package/dist/src/commands.js
CHANGED
@@ -8,21 +8,30 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
8
8
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
9
|
});
|
10
10
|
};
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
13
|
+
};
|
11
14
|
Object.defineProperty(exports, "__esModule", { value: true });
|
12
|
-
exports.getAppliedScriptCommand = exports.auditCommand = exports.undoCommand = exports.dropCommand = exports.validateCommand = exports.statusCommand = exports.infoCommand = exports.migrateCommand = void 0;
|
15
|
+
exports.baselineCommand = exports.getAppliedScriptCommand = exports.auditCommand = exports.undoCommand = exports.dropCommand = exports.validateCommand = exports.statusCommand = exports.infoCommand = exports.migrateCommand = exports.ensureTableInitialised = void 0;
|
16
|
+
const node_assert_1 = __importDefault(require("node:assert"));
|
13
17
|
const db_1 = require("./db");
|
14
18
|
const state_1 = require("./state");
|
15
19
|
const utils_1 = require("./utils");
|
16
|
-
const
|
17
|
-
const {
|
18
|
-
const { schemaExists, tableExists } = yield (0, utils_1.checkSchemaAndTable)(client, schema);
|
20
|
+
const ensureTableInitialised = (client) => __awaiter(void 0, void 0, void 0, function* () {
|
21
|
+
const { schemaExists, tableExists } = yield (0, utils_1.checkSchemaAndTable)(client);
|
19
22
|
if (!schemaExists) {
|
20
|
-
yield
|
23
|
+
yield client.dbCreateSchema();
|
21
24
|
}
|
22
25
|
if (!tableExists) {
|
23
|
-
yield
|
26
|
+
yield client.dbCreateEventsTable();
|
24
27
|
}
|
25
|
-
|
28
|
+
});
|
29
|
+
exports.ensureTableInitialised = ensureTableInitialised;
|
30
|
+
const migrateCommand = (args) => __awaiter(void 0, void 0, void 0, function* () {
|
31
|
+
const { napply, filePath } = args;
|
32
|
+
const client = yield (0, db_1.dbConnect)(args);
|
33
|
+
yield (0, exports.ensureTableInitialised)(client);
|
34
|
+
const state = yield (0, state_1.loadState)(client, filePath);
|
26
35
|
(0, utils_1.abortIfErrors)(state);
|
27
36
|
if (state.files.unappliedVersionedFiles.length === 0 &&
|
28
37
|
state.files.unappliedRepeatableFiles.length === 0) {
|
@@ -34,15 +43,16 @@ const migrateCommand = (client, argv) => __awaiter(void 0, void 0, void 0, funct
|
|
34
43
|
...state.files.unappliedRepeatableFiles,
|
35
44
|
].slice(0, napply);
|
36
45
|
for (const migration of migrationsToApply) {
|
37
|
-
yield
|
46
|
+
yield client.dbApplyMigration(migration);
|
38
47
|
}
|
39
48
|
console.log(`All done! Applied ${migrationsToApply.length} migration${migrationsToApply.length === 1 ? "" : "s"}`);
|
40
|
-
(0, utils_1.printMigrationHistoryAndUnappliedMigrations)(yield (0, state_1.loadState)(client,
|
49
|
+
(0, utils_1.printMigrationHistoryAndUnappliedMigrations)(yield (0, state_1.loadState)(client, filePath));
|
41
50
|
});
|
42
51
|
exports.migrateCommand = migrateCommand;
|
43
|
-
const infoCommand = (
|
44
|
-
const { schema, filePath } =
|
45
|
-
const
|
52
|
+
const infoCommand = (args) => __awaiter(void 0, void 0, void 0, function* () {
|
53
|
+
const { connection, schema, filePath } = args;
|
54
|
+
const client = yield (0, db_1.dbConnect)(args);
|
55
|
+
const { schemaExists, tableExists } = yield (0, utils_1.checkSchemaAndTable)(client);
|
46
56
|
if (!schemaExists) {
|
47
57
|
console.log("Schema does not exist");
|
48
58
|
}
|
@@ -50,13 +60,14 @@ const infoCommand = (client, argv) => __awaiter(void 0, void 0, void 0, function
|
|
50
60
|
console.log("Migration table has not been initialised. Run migrate to begin.");
|
51
61
|
}
|
52
62
|
if (schemaExists && tableExists) {
|
53
|
-
(0, utils_1.printMigrationHistoryAndUnappliedMigrations)(yield (0, state_1.loadState)(client,
|
63
|
+
(0, utils_1.printMigrationHistoryAndUnappliedMigrations)(yield (0, state_1.loadState)(client, filePath));
|
54
64
|
}
|
55
65
|
});
|
56
66
|
exports.infoCommand = infoCommand;
|
57
|
-
const statusCommand = (
|
58
|
-
const { schema, filePath } =
|
59
|
-
const
|
67
|
+
const statusCommand = (args) => __awaiter(void 0, void 0, void 0, function* () {
|
68
|
+
const { connection, schema, filePath } = args;
|
69
|
+
const client = yield (0, db_1.dbConnect)(args);
|
70
|
+
const { schemaExists, tableExists } = yield (0, utils_1.checkSchemaAndTable)(client);
|
60
71
|
if (!schemaExists) {
|
61
72
|
console.log("Schema does not exist");
|
62
73
|
}
|
@@ -64,15 +75,16 @@ const statusCommand = (client, argv) => __awaiter(void 0, void 0, void 0, functi
|
|
64
75
|
console.log("Migration table has not been initialised. Run migrate to begin.");
|
65
76
|
}
|
66
77
|
if (schemaExists && tableExists) {
|
67
|
-
(0, utils_1.printMigrationHistory)(yield (0, state_1.loadState)(client,
|
78
|
+
(0, utils_1.printMigrationHistory)(yield (0, state_1.loadState)(client, filePath));
|
68
79
|
}
|
69
80
|
});
|
70
81
|
exports.statusCommand = statusCommand;
|
71
|
-
const validateCommand = (
|
72
|
-
const { schema } =
|
73
|
-
const
|
82
|
+
const validateCommand = (args) => __awaiter(void 0, void 0, void 0, function* () {
|
83
|
+
const { connection, schema, filePath } = args;
|
84
|
+
const client = yield (0, db_1.dbConnect)(args);
|
85
|
+
const { schemaExists, tableExists } = yield (0, utils_1.checkSchemaAndTable)(client);
|
74
86
|
(0, utils_1.exitIfNotInitialized)(schemaExists, tableExists);
|
75
|
-
const state = yield (0, state_1.loadState)(client,
|
87
|
+
const state = yield (0, state_1.loadState)(client, filePath);
|
76
88
|
if (schemaExists && tableExists) {
|
77
89
|
(0, utils_1.abortIfErrors)(state);
|
78
90
|
}
|
@@ -80,16 +92,18 @@ const validateCommand = (client, argv) => __awaiter(void 0, void 0, void 0, func
|
|
80
92
|
(0, utils_1.printMigrationHistoryAndUnappliedMigrations)(state);
|
81
93
|
});
|
82
94
|
exports.validateCommand = validateCommand;
|
83
|
-
const dropCommand = (
|
84
|
-
const { schema } =
|
95
|
+
const dropCommand = (args) => __awaiter(void 0, void 0, void 0, function* () {
|
96
|
+
const { connection, schema } = args;
|
97
|
+
const client = yield (0, db_1.dbConnect)(args);
|
85
98
|
process.stdout.write(`Dropping the tables, schema and migration history table... `);
|
86
|
-
yield
|
99
|
+
yield client.dbDropAll();
|
87
100
|
console.log(`done!`);
|
88
101
|
});
|
89
102
|
exports.dropCommand = dropCommand;
|
90
|
-
const undoCommand = (
|
91
|
-
const {
|
92
|
-
const
|
103
|
+
const undoCommand = (args) => __awaiter(void 0, void 0, void 0, function* () {
|
104
|
+
const { connection, schema, filePath, nundo } = args;
|
105
|
+
const client = yield (0, db_1.dbConnect)(args);
|
106
|
+
const state = yield (0, state_1.loadState)(client, filePath);
|
93
107
|
(0, utils_1.abortIfErrors)(state);
|
94
108
|
const reversedAppliedVersionedMigrations = state.current.appliedVersionedMigrations.slice().reverse();
|
95
109
|
const undosToApplyAll = reversedAppliedVersionedMigrations.map((migration) => state.files.undoFiles.find((file) => file.filename === (0, state_1.getUndoFilename)(migration.filename)));
|
@@ -99,17 +113,18 @@ const undoCommand = (client, argv) => __awaiter(void 0, void 0, void 0, function
|
|
99
113
|
process.exit(1);
|
100
114
|
}
|
101
115
|
for (const { filename, script } of undosToApply) {
|
102
|
-
yield
|
116
|
+
yield client.dbApplyUndoMigration(filename, script);
|
103
117
|
}
|
104
118
|
console.log(`All done! Performed ${undosToApply.length} undo migration${undosToApply.length === 1 ? "" : "s"}`);
|
105
119
|
(0, utils_1.printMigrationHistoryAndUnappliedMigrations)(state);
|
106
120
|
});
|
107
121
|
exports.undoCommand = undoCommand;
|
108
|
-
const auditCommand = (
|
109
|
-
const { schema } =
|
110
|
-
const
|
122
|
+
const auditCommand = (args) => __awaiter(void 0, void 0, void 0, function* () {
|
123
|
+
const { connection, schema, filePath } = args;
|
124
|
+
const client = yield (0, db_1.dbConnect)(args);
|
125
|
+
const { schemaExists, tableExists } = yield (0, utils_1.checkSchemaAndTable)(client);
|
111
126
|
(0, utils_1.exitIfNotInitialized)(schemaExists, tableExists);
|
112
|
-
const state = yield (0, state_1.loadState)(client,
|
127
|
+
const state = yield (0, state_1.loadState)(client, filePath);
|
113
128
|
console.log("Event history:");
|
114
129
|
console.table(state.events.map((row) => ({
|
115
130
|
id: row.id,
|
@@ -120,17 +135,44 @@ const auditCommand = (client, argv) => __awaiter(void 0, void 0, void 0, functio
|
|
120
135
|
})));
|
121
136
|
});
|
122
137
|
exports.auditCommand = auditCommand;
|
123
|
-
const getAppliedScriptCommand = (
|
124
|
-
const { schema } =
|
125
|
-
|
138
|
+
const getAppliedScriptCommand = (args) => __awaiter(void 0, void 0, void 0, function* () {
|
139
|
+
const { connection, schema, filePath, filename } = args;
|
140
|
+
node_assert_1.default.ok(filename, "filename is required for this command");
|
141
|
+
const client = yield (0, db_1.dbConnect)(args);
|
142
|
+
const { schemaExists, tableExists } = yield (0, utils_1.checkSchemaAndTable)(client);
|
126
143
|
(0, utils_1.exitIfNotInitialized)(schemaExists, tableExists);
|
127
|
-
const state = yield (0, state_1.loadState)(client,
|
128
|
-
const script = yield
|
144
|
+
const state = yield (0, state_1.loadState)(client, filePath);
|
145
|
+
const script = yield client.dbGetAppliedScript(state, filename);
|
129
146
|
if (script) {
|
130
147
|
console.log(script);
|
131
148
|
}
|
132
149
|
else {
|
133
|
-
console.error(`Script for ${
|
150
|
+
console.error(`Script for ${filename} not found, use the audit command to check all applied migrations`);
|
134
151
|
}
|
135
152
|
});
|
136
153
|
exports.getAppliedScriptCommand = getAppliedScriptCommand;
|
154
|
+
const baselineCommand = (args) => __awaiter(void 0, void 0, void 0, function* () {
|
155
|
+
const { connection, schema, filePath, filename: argvFilename } = args;
|
156
|
+
const client = yield (0, db_1.dbConnect)(args);
|
157
|
+
yield (0, exports.ensureTableInitialised)(client);
|
158
|
+
const state = yield (0, state_1.loadState)(client, filePath);
|
159
|
+
if (state.files.unappliedVersionedFiles.length === 0) {
|
160
|
+
console.error("Error: No unapplied versioned migrations, aborting.");
|
161
|
+
process.exit(1);
|
162
|
+
}
|
163
|
+
const filename = argvFilename !== null && argvFilename !== void 0 ? argvFilename : state.files.unappliedVersionedFiles[state.files.unappliedVersionedFiles.length - 1].filename;
|
164
|
+
if (!state.files.unappliedVersionedFiles.find((file) => file.filename === filename)) {
|
165
|
+
console.error(`Error: '${filename}' is not an unapplied versioned migration, aborting.`);
|
166
|
+
process.exit(1);
|
167
|
+
}
|
168
|
+
let appliedCount = 0;
|
169
|
+
for (const file of state.files.unappliedVersionedFiles) {
|
170
|
+
yield client.dbBaseline(file);
|
171
|
+
appliedCount++;
|
172
|
+
if (file.filename === filename) {
|
173
|
+
break;
|
174
|
+
}
|
175
|
+
}
|
176
|
+
console.log(`All done! (Shadow)-applied ${appliedCount} migrations to baseline to ${filename}`);
|
177
|
+
});
|
178
|
+
exports.baselineCommand = baselineCommand;
|
@@ -0,0 +1,61 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
3
|
+
if (k2 === undefined) k2 = k;
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
7
|
+
}
|
8
|
+
Object.defineProperty(o, k2, desc);
|
9
|
+
}) : (function(o, m, k, k2) {
|
10
|
+
if (k2 === undefined) k2 = k;
|
11
|
+
o[k2] = m[k];
|
12
|
+
}));
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
15
|
+
}) : function(o, v) {
|
16
|
+
o["default"] = v;
|
17
|
+
});
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
19
|
+
var ownKeys = function(o) {
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
21
|
+
var ar = [];
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
23
|
+
return ar;
|
24
|
+
};
|
25
|
+
return ownKeys(o);
|
26
|
+
};
|
27
|
+
return function (mod) {
|
28
|
+
if (mod && mod.__esModule) return mod;
|
29
|
+
var result = {};
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
31
|
+
__setModuleDefault(result, mod);
|
32
|
+
return result;
|
33
|
+
};
|
34
|
+
})();
|
35
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
36
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
37
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
38
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
39
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
40
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
41
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
42
|
+
});
|
43
|
+
};
|
44
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
45
|
+
exports.dbConnect = void 0;
|
46
|
+
const pg = __importStar(require("./pg"));
|
47
|
+
const dbConnect = (args) => __awaiter(void 0, void 0, void 0, function* () {
|
48
|
+
if (args.connection.startsWith("postgresql://")) {
|
49
|
+
return yield pg._dbConnect(args);
|
50
|
+
}
|
51
|
+
// else if (connection.startsWith("mysql://")) {
|
52
|
+
// const client = await mysql._dbConnect(argv);
|
53
|
+
// return {
|
54
|
+
// type: "mysql",
|
55
|
+
// client,
|
56
|
+
// ...mysql,
|
57
|
+
// };
|
58
|
+
// }
|
59
|
+
throw new Error("Invalid database connection string");
|
60
|
+
});
|
61
|
+
exports.dbConnect = dbConnect;
|
@@ -0,0 +1,190 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
3
|
+
if (k2 === undefined) k2 = k;
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
7
|
+
}
|
8
|
+
Object.defineProperty(o, k2, desc);
|
9
|
+
}) : (function(o, m, k, k2) {
|
10
|
+
if (k2 === undefined) k2 = k;
|
11
|
+
o[k2] = m[k];
|
12
|
+
}));
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
15
|
+
}) : function(o, v) {
|
16
|
+
o["default"] = v;
|
17
|
+
});
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
19
|
+
var ownKeys = function(o) {
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
21
|
+
var ar = [];
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
23
|
+
return ar;
|
24
|
+
};
|
25
|
+
return ownKeys(o);
|
26
|
+
};
|
27
|
+
return function (mod) {
|
28
|
+
if (mod && mod.__esModule) return mod;
|
29
|
+
var result = {};
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
31
|
+
__setModuleDefault(result, mod);
|
32
|
+
return result;
|
33
|
+
};
|
34
|
+
})();
|
35
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
36
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
37
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
38
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
39
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
40
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
41
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
42
|
+
});
|
43
|
+
};
|
44
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
45
|
+
exports._dbConnect = void 0;
|
46
|
+
const pg_1 = __importStar(require("pg"));
|
47
|
+
const types_1 = require("../types");
|
48
|
+
pg_1.default.types.setTypeParser(1114, function (stringValue) {
|
49
|
+
return stringValue; //1114 for time without timezone type
|
50
|
+
});
|
51
|
+
pg_1.default.types.setTypeParser(1082, function (stringValue) {
|
52
|
+
return stringValue; //1082 for date type
|
53
|
+
});
|
54
|
+
const _dbConnect = (_a) => __awaiter(void 0, [_a], void 0, function* ({ ssl, connection, schema }) {
|
55
|
+
const pool = new pg_1.Pool({
|
56
|
+
connectionString: connection,
|
57
|
+
ssl: ssl === "true",
|
58
|
+
});
|
59
|
+
let client;
|
60
|
+
try {
|
61
|
+
client = yield pool.connect();
|
62
|
+
yield client.query("SELECT 1");
|
63
|
+
}
|
64
|
+
catch (error) {
|
65
|
+
console.error("Failed to connect to the database", error);
|
66
|
+
process.exit(1);
|
67
|
+
}
|
68
|
+
const dbSchemaExists = () => __awaiter(void 0, void 0, void 0, function* () {
|
69
|
+
const result = yield client.query(`SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = '${schema}')`);
|
70
|
+
return result.rows[0].exists;
|
71
|
+
});
|
72
|
+
const dbTableExists = () => __awaiter(void 0, void 0, void 0, function* () {
|
73
|
+
const tableExistsResult = yield client.query(`SELECT EXISTS (SELECT 1 FROM pg_tables WHERE tablename = 'stepwise_migration_events' and schemaname = '${schema}')`);
|
74
|
+
return tableExistsResult.rows[0].exists;
|
75
|
+
});
|
76
|
+
const dbDropAll = () => __awaiter(void 0, void 0, void 0, function* () {
|
77
|
+
yield client.query(`DROP SCHEMA IF EXISTS ${schema} CASCADE`);
|
78
|
+
});
|
79
|
+
const dbCreateSchema = () => __awaiter(void 0, void 0, void 0, function* () {
|
80
|
+
process.stdout.write(`Creating schema ${schema}... `);
|
81
|
+
yield client.query(`CREATE SCHEMA IF NOT EXISTS ${schema}`);
|
82
|
+
console.log(`done!`);
|
83
|
+
});
|
84
|
+
const dbEventHistory = () => __awaiter(void 0, void 0, void 0, function* () {
|
85
|
+
try {
|
86
|
+
const eventQuery = yield client.query(`SELECT * FROM ${schema}.stepwise_migration_events`);
|
87
|
+
return eventQuery.rows.map((row) => types_1.EventRow.parse(row));
|
88
|
+
}
|
89
|
+
catch (error) {
|
90
|
+
console.error("Error fetching event history", error);
|
91
|
+
process.exit(1);
|
92
|
+
}
|
93
|
+
});
|
94
|
+
const dbCreateEventsTable = () => __awaiter(void 0, void 0, void 0, function* () {
|
95
|
+
process.stdout.write(`Creating stepwise_migration_events table... `);
|
96
|
+
yield client.query(`
|
97
|
+
CREATE TABLE IF NOT EXISTS ${schema}.stepwise_migration_events (
|
98
|
+
id SERIAL PRIMARY KEY,
|
99
|
+
type TEXT NOT NULL,
|
100
|
+
filename TEXT NOT NULL,
|
101
|
+
script TEXT NOT NULL,
|
102
|
+
applied_by TEXT NOT NULL DEFAULT current_user,
|
103
|
+
applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
104
|
+
);
|
105
|
+
`);
|
106
|
+
console.log(`done!`);
|
107
|
+
});
|
108
|
+
const dbGetAppliedScript = (state, filename) => __awaiter(void 0, void 0, void 0, function* () {
|
109
|
+
var _a;
|
110
|
+
return (_a = state.current.appliedVersionedMigrations
|
111
|
+
.concat(state.current.appliedRepeatableMigrations)
|
112
|
+
.find((file) => file.filename === filename)) === null || _a === void 0 ? void 0 : _a.script;
|
113
|
+
});
|
114
|
+
const dbApplyMigration = (migration) => __awaiter(void 0, void 0, void 0, function* () {
|
115
|
+
try {
|
116
|
+
process.stdout.write(`Applying ${migration.type} migration ${migration.filename}... `);
|
117
|
+
yield client.query("BEGIN");
|
118
|
+
yield client.query(`SET search_path TO ${schema};
|
119
|
+
${migration.script.toString()}`);
|
120
|
+
yield client.query(`INSERT INTO ${schema}.stepwise_migration_events (type, filename, script) VALUES ($1, $2, $3)`, [migration.type, migration.filename, migration.script]);
|
121
|
+
yield client.query("COMMIT");
|
122
|
+
console.log(`done!`);
|
123
|
+
}
|
124
|
+
catch (error) {
|
125
|
+
try {
|
126
|
+
yield client.query("ROLLBACK");
|
127
|
+
}
|
128
|
+
catch (error) {
|
129
|
+
console.error("Error rolling back transaction", error);
|
130
|
+
}
|
131
|
+
console.error("Error applying migration", error);
|
132
|
+
process.exit(1);
|
133
|
+
}
|
134
|
+
});
|
135
|
+
const dbBaseline = (migration) => __awaiter(void 0, void 0, void 0, function* () {
|
136
|
+
try {
|
137
|
+
process.stdout.write(`Baselining ${migration.filename}... `);
|
138
|
+
yield client.query("BEGIN");
|
139
|
+
yield client.query(`INSERT INTO ${schema}.stepwise_migration_events (type, filename, script) VALUES ($1, $2, $3)`, [migration.type, migration.filename, migration.script]);
|
140
|
+
yield client.query("COMMIT");
|
141
|
+
console.log(`done!`);
|
142
|
+
}
|
143
|
+
catch (error) {
|
144
|
+
try {
|
145
|
+
yield client.query("ROLLBACK");
|
146
|
+
}
|
147
|
+
catch (error) {
|
148
|
+
console.error("Error rolling back transaction", error);
|
149
|
+
}
|
150
|
+
console.error("Error baselining migration", error);
|
151
|
+
process.exit(1);
|
152
|
+
}
|
153
|
+
});
|
154
|
+
const dbApplyUndoMigration = (filename, script) => __awaiter(void 0, void 0, void 0, function* () {
|
155
|
+
try {
|
156
|
+
process.stdout.write(`Applying undo migration ${filename}... `);
|
157
|
+
yield client.query("BEGIN");
|
158
|
+
yield client.query(`SET search_path TO ${schema};
|
159
|
+
${script.toString()}`);
|
160
|
+
yield client.query(`INSERT INTO ${schema}.stepwise_migration_events (type, filename, script) VALUES ($1, $2, $3)`, ["undo", filename, script]);
|
161
|
+
yield client.query("COMMIT");
|
162
|
+
console.log(`done!`);
|
163
|
+
}
|
164
|
+
catch (error) {
|
165
|
+
try {
|
166
|
+
yield client.query("ROLLBACK");
|
167
|
+
}
|
168
|
+
catch (error) {
|
169
|
+
console.error("Error rolling back transaction", error);
|
170
|
+
}
|
171
|
+
console.error("Error applying undo migration", error);
|
172
|
+
process.exit(1);
|
173
|
+
}
|
174
|
+
});
|
175
|
+
return {
|
176
|
+
type: "pg",
|
177
|
+
schema,
|
178
|
+
dbSchemaExists,
|
179
|
+
dbTableExists,
|
180
|
+
dbDropAll,
|
181
|
+
dbCreateSchema,
|
182
|
+
dbEventHistory,
|
183
|
+
dbCreateEventsTable,
|
184
|
+
dbGetAppliedScript,
|
185
|
+
dbApplyMigration,
|
186
|
+
dbBaseline,
|
187
|
+
dbApplyUndoMigration,
|
188
|
+
};
|
189
|
+
});
|
190
|
+
exports._dbConnect = _dbConnect;
|
package/dist/src/index.js
CHANGED
@@ -15,43 +15,44 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
15
15
|
Object.defineProperty(exports, "__esModule", { value: true });
|
16
16
|
const yargs_1 = __importDefault(require("yargs"));
|
17
17
|
const commands_1 = require("./commands");
|
18
|
-
const db_1 = require("./db");
|
19
18
|
const utils_1 = require("./utils");
|
20
19
|
const main = () => __awaiter(void 0, void 0, void 0, function* () {
|
21
20
|
const argv = (0, yargs_1.default)(process.argv.slice(2)).argv;
|
22
|
-
(0, utils_1.
|
23
|
-
|
24
|
-
const
|
21
|
+
const args = (0, utils_1.parseArgs)(argv);
|
22
|
+
(0, utils_1.validateArgs)(args);
|
23
|
+
const command = args.command;
|
25
24
|
if (command === "migrate") {
|
26
|
-
yield (0, commands_1.migrateCommand)(
|
25
|
+
yield (0, commands_1.migrateCommand)(args);
|
27
26
|
}
|
28
27
|
else if (command === "info") {
|
29
|
-
yield (0, commands_1.infoCommand)(
|
28
|
+
yield (0, commands_1.infoCommand)(args);
|
30
29
|
}
|
31
30
|
else if (command === "status") {
|
32
|
-
yield (0, commands_1.statusCommand)(
|
31
|
+
yield (0, commands_1.statusCommand)(args);
|
33
32
|
}
|
34
33
|
else if (command === "validate") {
|
35
|
-
yield (0, commands_1.validateCommand)(
|
34
|
+
yield (0, commands_1.validateCommand)(args);
|
36
35
|
}
|
37
36
|
else if (command === "drop") {
|
38
|
-
yield (0, commands_1.dropCommand)(
|
37
|
+
yield (0, commands_1.dropCommand)(args);
|
39
38
|
}
|
40
39
|
else if (command === "undo") {
|
41
|
-
yield (0, commands_1.undoCommand)(
|
40
|
+
yield (0, commands_1.undoCommand)(args);
|
42
41
|
}
|
43
42
|
else if (command === "audit") {
|
44
|
-
yield (0, commands_1.auditCommand)(
|
43
|
+
yield (0, commands_1.auditCommand)(args);
|
45
44
|
}
|
46
45
|
else if (command === "get-applied-script") {
|
47
|
-
yield (0, commands_1.getAppliedScriptCommand)(
|
46
|
+
yield (0, commands_1.getAppliedScriptCommand)(args);
|
47
|
+
}
|
48
|
+
else if (command === "baseline") {
|
49
|
+
yield (0, commands_1.baselineCommand)(args);
|
48
50
|
}
|
49
51
|
else {
|
50
52
|
console.error(`Invalid command: ${command}`);
|
51
53
|
console.log(utils_1.usage);
|
52
54
|
process.exit(1);
|
53
55
|
}
|
54
|
-
client.release();
|
55
56
|
process.exit(0);
|
56
57
|
});
|
57
58
|
main();
|
package/dist/src/state.js
CHANGED
@@ -15,7 +15,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
15
|
exports.eventsToApplied = exports.getUndoFilename = exports.loadState = exports.validateMigrationFiles = void 0;
|
16
16
|
const git_diff_1 = __importDefault(require("git-diff"));
|
17
17
|
const path_1 = __importDefault(require("path"));
|
18
|
-
const db_1 = require("./db");
|
19
18
|
const utils_1 = require("./utils");
|
20
19
|
const validateMigrationFiles = (state) => {
|
21
20
|
let errors = [];
|
@@ -55,8 +54,8 @@ const validateMigrationFiles = (state) => {
|
|
55
54
|
return errors;
|
56
55
|
};
|
57
56
|
exports.validateMigrationFiles = validateMigrationFiles;
|
58
|
-
const loadState = (client,
|
59
|
-
const events = yield
|
57
|
+
const loadState = (client, migrationPath) => __awaiter(void 0, void 0, void 0, function* () {
|
58
|
+
const events = yield client.dbEventHistory();
|
60
59
|
const { appliedVersionedMigrations, appliedRepeatableMigrations, errors: appliedErrors, } = (0, exports.eventsToApplied)(events);
|
61
60
|
const { files: allFiles, errors: readFileErrors } = yield (0, utils_1.readMigrationFiles)(path_1.default.join(process.cwd(), migrationPath), appliedVersionedMigrations);
|
62
61
|
const unappliedVersionedFiles = allFiles
|
@@ -66,7 +65,7 @@ const loadState = (client, schema, migrationPath) => __awaiter(void 0, void 0, v
|
|
66
65
|
.filter((file) => file.type === "repeatable")
|
67
66
|
.filter((file) => !appliedRepeatableMigrations.find((event) => event.filename === file.filename && event.script === file.script));
|
68
67
|
return {
|
69
|
-
schema,
|
68
|
+
schema: client.schema,
|
70
69
|
current: {
|
71
70
|
appliedVersionedMigrations,
|
72
71
|
appliedRepeatableMigrations,
|