stepwise-migrations 1.0.8 → 1.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -24,10 +24,16 @@ Commands:
24
24
  Migrate the database to the latest version
25
25
  down
26
26
  Rollback the database to the previous version
27
+ validate
28
+ Validate the migration files and the migration history table
29
+ audit
30
+ Show the audit history for the migrations in the database
27
31
  info
28
32
  Show information about the current state of the migrations in the database
29
33
  drop
30
34
  Drop all tables, schema and migration history table
35
+ get-script
36
+ Get the script for the last applied migration
31
37
 
32
38
  Options:
33
39
  --connection <connection> The connection string to use to connect to the database
@@ -36,6 +42,7 @@ Options:
36
42
  --ssl true/false Whether to use SSL for the connection (default: false)
37
43
  --nup Number of up migrations to apply (default: all)
38
44
  --ndown Number of down migrations to apply (default: 1)
45
+ --filename The filename to get the script for (default: last applied migration)
39
46
 
40
47
  Example:
41
48
  npx stepwise-migrations migrate \
@@ -48,7 +55,7 @@ Example:
48
55
 
49
56
  ### Migrate
50
57
 
51
- Command:
58
+ If all files are in a valid state, runs all the "up" migrations that have not been applied yet.
52
59
 
53
60
  ```bash
54
61
  npx stepwise-migrations migrate \
@@ -57,25 +64,11 @@ npx stepwise-migrations migrate \
57
64
  --path=./db/migration/
58
65
  ```
59
66
 
60
- Outputs:
61
-
62
- ```
63
- Creating schema myschema... done!
64
- Creating migration history table... done!
65
- Applying migration v1_connect_session_table.sql... done!
66
- Applying migration v2_auth.sql... done!
67
- All done! Applied 2 migrations
68
- New migration history:
69
- ┌─────────┬────┬────────────────────────────────┬────────────────────────────────────────────────────────────────────┬────────────┬─────────────────────────────┐
70
- │ (index) │ id │ name │ hash │ applied_by │ applied_at │
71
- ├─────────┼────┼────────────────────────────────┼────────────────────────────────────────────────────────────────────┼────────────┼─────────────────────────────┤
72
- │ 0 │ 1 │ 'v1_connect_session_table.sql' │ 'f08638e58139ae0e2dda24b1bdba29f3f2128597066a23d2bb382d448bbe9d7e' │ 'postgres' │ '2024-11-23 18:29:16.1616' │
73
- │ 1 │ 2 │ 'v2_auth.sql' │ '0a4c5df39f03df85cb68ef0b297b913d7c15477fa9dcba13b6e0577d88258a8e' │ 'postgres' │ '2024-11-23 18:29:16.16533' │
74
- └─────────┴────┴────────────────────────────────┴────────────────────────────────────────────────────────────────────┴────────────┴─────────────────────────────┘
75
- ```
76
-
77
67
  ### Down
78
68
 
69
+ Runs a single down migration for the last applied migration.
70
+ Can run multiple down migrations if the `--ndown` option is provided.
71
+
79
72
  Command:
80
73
 
81
74
  ```bash
@@ -85,21 +78,32 @@ npx stepwise-migrations down \
85
78
  --path=./db/migration/
86
79
  ```
87
80
 
88
- Outputs:
81
+ ### Validate
89
82
 
83
+ Validates the migration files and the migration history table.
84
+
85
+ ```bash
86
+ npx stepwise-migrations validate \
87
+ --connection=postgresql://postgres:postgres@127.0.0.1:5432/mydb \
88
+ --schema=myschema \
89
+ --path=./db/migration/
90
90
  ```
91
- Applying down migration v2_auth.down.sql... done!
92
- All done! Applied 1 down migration
93
- New migration history:
94
- ┌─────────┬────┬────────────────────────────────┬────────────────────────────────────────────────────────────────────┬────────────┬────────────────────────────┐
95
- │ (index) │ id │ name │ hash │ applied_by │ applied_at │
96
- ├─────────┼────┼────────────────────────────────┼────────────────────────────────────────────────────────────────────┼────────────┼────────────────────────────┤
97
- 0 │ 1 │ 'v1_connect_session_table.sql' │ 'f08638e58139ae0e2dda24b1bdba29f3f2128597066a23d2bb382d448bbe9d7e' │ 'postgres' │ '2024-11-23 18:29:16.1616'
98
- └─────────┴────┴────────────────────────────────┴────────────────────────────────────────────────────────────────────┴────────────┴────────────────────────────┘
91
+
92
+ ### Audit
93
+
94
+ Shows the audit history for the migrations in the database.
95
+
96
+ ```bash
97
+ npx stepwise-migrations audit \
98
+ --connection=postgresql://postgres:postgres@127.0.0.1:5432/mydb \
99
+ --schema=myschema \
100
+ --path=./db/migration/
99
101
  ```
100
102
 
101
103
  ### Info
102
104
 
105
+ Shows the current state of the migrations in the database.
106
+
103
107
  Command:
104
108
 
105
109
  ```bash
@@ -109,19 +113,10 @@ npx stepwise-migrations info \
109
113
  --path=./db/migration/
110
114
  ```
111
115
 
112
- Outputs:
113
-
114
- ```
115
- Migration history:
116
- ┌─────────┬────┬────────────────────────────────┬────────────────────────────────────────────────────────────────────┬────────────┬────────────────────────────┐
117
- │ (index) │ id │ name │ hash │ applied_by │ applied_at │
118
- ├─────────┼────┼────────────────────────────────┼────────────────────────────────────────────────────────────────────┼────────────┼────────────────────────────┤
119
- │ 0 │ 1 │ 'v1_connect_session_table.sql' │ 'f08638e58139ae0e2dda24b1bdba29f3f2128597066a23d2bb382d448bbe9d7e' │ 'postgres' │ '2024-11-23 18:29:16.1616' │
120
- └─────────┴────┴────────────────────────────────┴────────────────────────────────────────────────────────────────────┴────────────┴────────────────────────────┘
121
- ```
122
-
123
116
  ### Drop
124
117
 
118
+ Drops the tables, schema and migration history table.
119
+
125
120
  Command:
126
121
 
127
122
  ```bash
@@ -130,8 +125,16 @@ npx stepwise-migrations drop \
130
125
  --schema=myschema
131
126
  ```
132
127
 
133
- Outputs:
128
+ ### Get Script
134
129
 
135
- ```
136
- Dropping the tables, schema and migration history table... done!
130
+ Gets the script for the last applied migration.
131
+ Can get the script for a specific migration if the `--filename` option is provided.
132
+
133
+ Command:
134
+
135
+ ```bash
136
+ npx stepwise-migrations get-script \
137
+ --filename v1_users.sql \
138
+ --connection=postgresql://postgres:postgres@127.0.0.1:5432/mydb \
139
+ --schema=myschema
137
140
  ```
package/dist/db.js CHANGED
@@ -42,7 +42,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
42
42
  });
43
43
  };
44
44
  Object.defineProperty(exports, "__esModule", { value: true });
45
- exports.dbCreateHistoryTable = exports.dbCreateSchema = exports.dbMigrationHistory = exports.dbTableExists = exports.dbHistorySchemaExists = exports.dbConnect = void 0;
45
+ exports.dbGetScript = exports.dbCreateHistoryTable = exports.dbAuditHistory = exports.dbCreateSchema = exports.dbMigrationHistory = exports.dbTableExists = exports.dbHistorySchemaExists = exports.dbConnect = void 0;
46
46
  const pg_1 = __importStar(require("pg"));
47
47
  pg_1.default.types.setTypeParser(1114, function (stringValue) {
48
48
  return stringValue; //1114 for time without timezone type
@@ -88,15 +88,35 @@ const dbCreateSchema = (client, schema) => __awaiter(void 0, void 0, void 0, fun
88
88
  console.log(`done!`);
89
89
  });
90
90
  exports.dbCreateSchema = dbCreateSchema;
91
+ const dbAuditHistory = (client, schema) => __awaiter(void 0, void 0, void 0, function* () {
92
+ const auditQuery = yield client.query(`SELECT * FROM ${schema}.stepwise_audit`);
93
+ return auditQuery.rows;
94
+ });
95
+ exports.dbAuditHistory = dbAuditHistory;
91
96
  const dbCreateHistoryTable = (client, schema) => __awaiter(void 0, void 0, void 0, function* () {
92
97
  process.stdout.write(`Creating migration history table... `);
93
- yield client.query(`CREATE TABLE IF NOT EXISTS ${schema}.stepwise_migrations (
94
- id SERIAL PRIMARY KEY,
95
- name TEXT UNIQUE NOT NULL,
96
- hash TEXT NOT NULL,
97
- applied_by TEXT NOT NULL DEFAULT current_user,
98
- applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
99
- )`);
98
+ yield client.query(`
99
+ CREATE TABLE IF NOT EXISTS ${schema}.stepwise_migrations (
100
+ id SERIAL PRIMARY KEY,
101
+ name TEXT UNIQUE NOT NULL,
102
+ script TEXT NOT NULL,
103
+ applied_by TEXT NOT NULL DEFAULT current_user,
104
+ applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
105
+ );
106
+ CREATE TABLE IF NOT EXISTS ${schema}.stepwise_audit (
107
+ id SERIAL PRIMARY KEY,
108
+ type TEXT NOT NULL,
109
+ name TEXT UNIQUE NOT NULL,
110
+ script TEXT NOT NULL,
111
+ applied_by TEXT NOT NULL DEFAULT current_user,
112
+ applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
113
+ );
114
+ `);
100
115
  console.log(`done!`);
101
116
  });
102
117
  exports.dbCreateHistoryTable = dbCreateHistoryTable;
118
+ const dbGetScript = (client, schema, filename) => __awaiter(void 0, void 0, void 0, function* () {
119
+ const script = yield client.query(`SELECT script FROM ${schema}.stepwise_audit WHERE name = $1`, [filename]);
120
+ return script.rows[0].script;
121
+ });
122
+ exports.dbGetScript = dbGetScript;
package/dist/index.js CHANGED
@@ -36,13 +36,16 @@ const main = () => __awaiter(void 0, void 0, void 0, function* () {
36
36
  const migrationHistory = yield (0, db_1.dbMigrationHistory)(client, schema);
37
37
  const migrationFiles = yield (0, utils_1.readMigrationFiles)(argv.path);
38
38
  (0, migrate_1.validateMigrationFiles)(migrationFiles, migrationHistory);
39
+ if (migrationFiles.length === migrationHistory.length) {
40
+ console.log("All migrations are already applied");
41
+ process.exit(0);
42
+ }
39
43
  const migrationsToApply = migrationFiles.slice(migrationHistory.length, migrationHistory.length + nUp);
40
- for (const { filename, contents, hash } of migrationsToApply) {
41
- yield (0, migrate_1.applyMigration)(client, schema, filename, contents, hash);
44
+ for (const { filename, script } of migrationsToApply) {
45
+ yield (0, migrate_1.applyMigration)(client, schema, filename, script);
42
46
  }
43
47
  console.log(`All done! Applied ${migrationsToApply.length} migrations`);
44
- console.log("New migration history:");
45
- console.table(yield (0, db_1.dbMigrationHistory)(client, schema));
48
+ (0, utils_1.printMigrationHistoryAndUnappliedMigrations)(yield (0, utils_1.readMigrationFiles)(argv.path), yield (0, db_1.dbMigrationHistory)(client, schema));
46
49
  }
47
50
  else if (command === "info") {
48
51
  if (!historySchemaExists) {
@@ -52,10 +55,22 @@ const main = () => __awaiter(void 0, void 0, void 0, function* () {
52
55
  console.log("Migration history table does not exist");
53
56
  }
54
57
  if (historySchemaExists && tableExists) {
55
- console.log("Migration history:");
56
- console.table(yield (0, db_1.dbMigrationHistory)(client, schema));
58
+ (0, utils_1.printMigrationHistory)(yield (0, db_1.dbMigrationHistory)(client, schema));
57
59
  }
58
60
  }
61
+ else if (command === "validate") {
62
+ if (!historySchemaExists) {
63
+ console.log("Schema does not exist");
64
+ }
65
+ if (!tableExists) {
66
+ console.log("Migration history table does not exist");
67
+ }
68
+ if (historySchemaExists && tableExists) {
69
+ (0, migrate_1.validateMigrationFiles)(yield (0, utils_1.readMigrationFiles)(argv.path), yield (0, db_1.dbMigrationHistory)(client, schema));
70
+ }
71
+ console.log("Validation passed");
72
+ (0, utils_1.printMigrationHistoryAndUnappliedMigrations)(yield (0, utils_1.readMigrationFiles)(argv.path), yield (0, db_1.dbMigrationHistory)(client, schema));
73
+ }
59
74
  else if (command === "drop") {
60
75
  process.stdout.write(`Dropping the tables, schema and migration history table... `);
61
76
  yield client.query(`DROP SCHEMA IF EXISTS ${schema} CASCADE`);
@@ -68,12 +83,31 @@ const main = () => __awaiter(void 0, void 0, void 0, function* () {
68
83
  const reverseMigrationHistory = migrationHistory.reverse().slice(0, nDown);
69
84
  const downMigrationFilesToApply = yield (0, utils_1.readDownMigrationFiles)(argv.path, reverseMigrationHistory);
70
85
  (0, migrate_1.validateDownMigrationFiles)(downMigrationFilesToApply, reverseMigrationHistory);
71
- for (const { filename, contents, upFilename, } of downMigrationFilesToApply) {
72
- yield (0, migrate_1.applyDownMigration)(client, schema, filename, contents, upFilename);
86
+ for (const { filename, script, upFilename } of downMigrationFilesToApply) {
87
+ yield (0, migrate_1.applyDownMigration)(client, schema, filename, script, upFilename);
73
88
  }
74
- console.log(`All done! Applied ${downMigrationFilesToApply.length} down migrations`);
75
- console.log("New migration history:");
76
- console.table(yield (0, db_1.dbMigrationHistory)(client, schema));
89
+ console.log(`All done! Applied ${downMigrationFilesToApply.length} down migration${downMigrationFilesToApply.length === 1 ? "" : "s"}`);
90
+ (0, utils_1.printMigrationHistoryAndUnappliedMigrations)(yield (0, utils_1.readMigrationFiles)(argv.path), yield (0, db_1.dbMigrationHistory)(client, schema));
91
+ }
92
+ else if (command === "audit") {
93
+ const auditHistory = yield (0, db_1.dbAuditHistory)(client, schema);
94
+ console.log("Audit history:");
95
+ console.table(auditHistory.map((row) => ({
96
+ id: row.id,
97
+ type: row.type,
98
+ name: row.name,
99
+ applied_by: row.applied_by,
100
+ applied_at: row.applied_at,
101
+ })));
102
+ }
103
+ else if (command === "get-script") {
104
+ const script = yield (0, db_1.dbGetScript)(client, schema, argv.filename);
105
+ console.log(script);
106
+ }
107
+ else {
108
+ console.error(`Invalid command: ${argv._[0]}`);
109
+ console.log(utils_1.usage);
110
+ process.exit(1);
77
111
  }
78
112
  client.release();
79
113
  process.exit(0);
package/dist/migrate.js CHANGED
@@ -8,8 +8,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
8
8
  step((generator = generator.apply(thisArg, _arguments || [])).next());
9
9
  });
10
10
  };
11
+ var __importDefault = (this && this.__importDefault) || function (mod) {
12
+ return (mod && mod.__esModule) ? mod : { "default": mod };
13
+ };
11
14
  Object.defineProperty(exports, "__esModule", { value: true });
12
15
  exports.applyDownMigration = exports.validateDownMigrationFiles = exports.applyMigration = exports.validateMigrationFiles = void 0;
16
+ const git_diff_1 = __importDefault(require("git-diff"));
13
17
  const validateMigrationFiles = (migrationFiles, migrationHistory, isUp = true) => {
14
18
  if (migrationFiles.length === 0) {
15
19
  console.log("No migrations found");
@@ -19,12 +23,8 @@ const validateMigrationFiles = (migrationFiles, migrationHistory, isUp = true) =
19
23
  console.error("Error: migration history is longer than the number of migration files, aborting.");
20
24
  process.exit(1);
21
25
  }
22
- if (migrationFiles.length === migrationHistory.length && isUp) {
23
- console.log("All migrations are already applied");
24
- process.exit(0);
25
- }
26
26
  for (let i = 0; i < migrationFiles.length; i++) {
27
- const { filename, hash: migrationHash } = migrationFiles[i];
27
+ const { filename, script: migrationScript } = migrationFiles[i];
28
28
  if (i >= migrationHistory.length) {
29
29
  continue;
30
30
  }
@@ -32,20 +32,25 @@ const validateMigrationFiles = (migrationFiles, migrationHistory, isUp = true) =
32
32
  console.error(`Error: migration ${filename} has been renamed, aborting.`);
33
33
  process.exit(1);
34
34
  }
35
- if (migrationHistory[i].hash !== migrationHash) {
35
+ if (migrationHistory[i].script !== migrationScript) {
36
36
  console.error(`Error: migration ${filename} has been modified, aborting.`);
37
+ console.log((0, git_diff_1.default)(migrationHistory[i].script, migrationScript, {
38
+ color: true,
39
+ noHeaders: true,
40
+ }));
37
41
  process.exit(1);
38
42
  }
39
43
  }
40
44
  };
41
45
  exports.validateMigrationFiles = validateMigrationFiles;
42
- const applyMigration = (client, schema, filename, contents, hash) => __awaiter(void 0, void 0, void 0, function* () {
46
+ const applyMigration = (client, schema, filename, script) => __awaiter(void 0, void 0, void 0, function* () {
43
47
  try {
44
48
  process.stdout.write(`Applying migration ${filename}... `);
45
49
  yield client.query("BEGIN");
46
50
  yield client.query(`SET search_path TO ${schema};
47
- ${contents.toString()}`);
48
- yield client.query(`INSERT INTO ${schema}.stepwise_migrations (name, hash) VALUES ($1, $2)`, [filename, hash]);
51
+ ${script.toString()}`);
52
+ yield client.query(`INSERT INTO ${schema}.stepwise_migrations (name, script) VALUES ($1, $2)`, [filename, script]);
53
+ yield client.query(`INSERT INTO ${schema}.stepwise_audit (type, name, script) VALUES ($1, $2, $3)`, ["up", filename, script]);
49
54
  yield client.query("COMMIT");
50
55
  console.log(`done!`);
51
56
  }
@@ -72,13 +77,14 @@ const validateDownMigrationFiles = (downMigrationFilesToApply, reverseMigrationH
72
77
  }
73
78
  };
74
79
  exports.validateDownMigrationFiles = validateDownMigrationFiles;
75
- const applyDownMigration = (client, schema, filename, contents, upFilename) => __awaiter(void 0, void 0, void 0, function* () {
80
+ const applyDownMigration = (client, schema, filename, script, upFilename) => __awaiter(void 0, void 0, void 0, function* () {
76
81
  try {
77
82
  process.stdout.write(`Applying down migration ${filename}... `);
78
83
  yield client.query("BEGIN");
79
84
  yield client.query(`SET search_path TO ${schema};
80
- ${contents.toString()}`);
85
+ ${script.toString()}`);
81
86
  yield client.query(`DELETE FROM ${schema}.stepwise_migrations WHERE name = $1`, [upFilename]);
87
+ yield client.query(`INSERT INTO ${schema}.stepwise_audit (type, name, script) VALUES ($1, $2, $3)`, ["down", filename, script]);
82
88
  yield client.query("COMMIT");
83
89
  console.log(`done!`);
84
90
  }
package/dist/utils.js CHANGED
@@ -12,14 +12,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
12
12
  return (mod && mod.__esModule) ? mod : { "default": mod };
13
13
  };
14
14
  Object.defineProperty(exports, "__esModule", { value: true });
15
- exports.readDownMigrationFiles = exports.fileExists = exports.readMigrationFiles = exports.validateArgs = exports.usage = exports.calculateHash = void 0;
16
- const crypto_1 = __importDefault(require("crypto"));
15
+ exports.readDownMigrationFiles = exports.fileExists = exports.printMigrationHistory = exports.printMigrationHistoryAndUnappliedMigrations = exports.readMigrationFiles = exports.validateArgs = exports.usage = void 0;
17
16
  const promises_1 = __importDefault(require("fs/promises"));
18
17
  const path_1 = __importDefault(require("path"));
19
- const calculateHash = (contents) => {
20
- return crypto_1.default.createHash("sha256").update(contents).digest("hex");
21
- };
22
- exports.calculateHash = calculateHash;
23
18
  exports.usage = `
24
19
  Usage: stepwise-migrations [command] [options]
25
20
 
@@ -59,14 +54,6 @@ const validateArgs = (argv) => {
59
54
  console.log(exports.usage);
60
55
  process.exit(1);
61
56
  }
62
- if (argv._[0] !== "migrate" &&
63
- argv._[0] !== "info" &&
64
- argv._[0] !== "drop" &&
65
- argv._[0] !== "down") {
66
- console.error(`Invalid command: ${argv._[0]}`);
67
- console.log(exports.usage);
68
- process.exit(1);
69
- }
70
57
  };
71
58
  exports.validateArgs = validateArgs;
72
59
  const readMigrationFiles = (directory) => __awaiter(void 0, void 0, void 0, function* () {
@@ -79,18 +66,41 @@ const readMigrationFiles = (directory) => __awaiter(void 0, void 0, void 0, func
79
66
  migrationFiles.sort();
80
67
  const results = [];
81
68
  for (const fullFilePath of migrationFiles) {
82
- const contents = yield promises_1.default.readFile(fullFilePath, "utf8");
69
+ const script = yield promises_1.default.readFile(fullFilePath, "utf8");
83
70
  results.push({
84
71
  type: "up",
85
72
  fullFilePath,
86
73
  filename: path_1.default.basename(fullFilePath),
87
- hash: (0, exports.calculateHash)(contents),
88
- contents,
74
+ script,
89
75
  });
90
76
  }
91
77
  return results;
92
78
  });
93
79
  exports.readMigrationFiles = readMigrationFiles;
80
+ const printMigrationHistoryAndUnappliedMigrations = (migrationFiles, migrationHistory) => {
81
+ console.log("Migration history:");
82
+ console.table(migrationHistory.map((h) => ({
83
+ id: h.id,
84
+ name: h.name,
85
+ applied_by: h.applied_by,
86
+ applied_at: h.applied_at,
87
+ })));
88
+ console.log("Unapplied migrations:");
89
+ console.table(migrationFiles.slice(migrationHistory.length).map((m) => ({
90
+ filename: m.filename,
91
+ })));
92
+ };
93
+ exports.printMigrationHistoryAndUnappliedMigrations = printMigrationHistoryAndUnappliedMigrations;
94
+ const printMigrationHistory = (migrationHistory) => {
95
+ console.log("Migration history:");
96
+ console.table(migrationHistory.map((h) => ({
97
+ id: h.id,
98
+ name: h.name,
99
+ applied_by: h.applied_by,
100
+ applied_at: h.applied_at,
101
+ })));
102
+ };
103
+ exports.printMigrationHistory = printMigrationHistory;
94
104
  const fileExists = (path) => __awaiter(void 0, void 0, void 0, function* () {
95
105
  try {
96
106
  return (yield promises_1.default.stat(path)).isFile();
@@ -108,13 +118,13 @@ const readDownMigrationFiles = (directory, migrationHistory) => __awaiter(void 0
108
118
  console.error(`Down migration file not found: ${fullFilePath}`);
109
119
  process.exit(1);
110
120
  }
111
- const contents = yield promises_1.default.readFile(fullFilePath, "utf8");
121
+ const script = yield promises_1.default.readFile(fullFilePath, "utf8");
112
122
  results.push({
113
123
  type: "down",
114
124
  fullFilePath,
115
125
  filename: path_1.default.basename(fullFilePath),
116
126
  upFilename: migration.name,
117
- contents,
127
+ script,
118
128
  });
119
129
  }
120
130
  return results;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "stepwise-migrations",
3
- "version": "1.0.8",
3
+ "version": "1.0.9",
4
4
  "description": "",
5
5
  "main": "index.js",
6
6
  "scripts": {
@@ -15,6 +15,7 @@
15
15
  "author": "github.com/mj1618",
16
16
  "license": "MIT",
17
17
  "devDependencies": {
18
+ "@types/git-diff": "^2.0.7",
18
19
  "@types/pg": "^8.11.10",
19
20
  "@types/yargs": "^17.0.33"
20
21
  },
@@ -22,6 +23,7 @@
22
23
  "stepwise-migrations": "dist/index.js"
23
24
  },
24
25
  "dependencies": {
26
+ "git-diff": "^2.0.6",
25
27
  "pg": "^8.13.1",
26
28
  "yargs": "^17.7.2"
27
29
  }
package/src/db.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import pg, { Pool, PoolClient } from "pg";
2
- import { MigrationRow } from "./types";
2
+ import { AuditRow, MigrationRow } from "./types";
3
3
 
4
4
  pg.types.setTypeParser(1114, function (stringValue) {
5
5
  return stringValue; //1114 for time without timezone type
@@ -61,19 +61,48 @@ export const dbCreateSchema = async (client: PoolClient, schema: string) => {
61
61
  console.log(`done!`);
62
62
  };
63
63
 
64
+ export const dbAuditHistory = async (client: PoolClient, schema: string) => {
65
+ const auditQuery = await client.query(
66
+ `SELECT * FROM ${schema}.stepwise_audit`
67
+ );
68
+ return auditQuery.rows as AuditRow[];
69
+ };
70
+
64
71
  export const dbCreateHistoryTable = async (
65
72
  client: PoolClient,
66
73
  schema: string
67
74
  ) => {
68
75
  process.stdout.write(`Creating migration history table... `);
69
76
  await client.query(
70
- `CREATE TABLE IF NOT EXISTS ${schema}.stepwise_migrations (
71
- id SERIAL PRIMARY KEY,
72
- name TEXT UNIQUE NOT NULL,
73
- hash TEXT NOT NULL,
74
- applied_by TEXT NOT NULL DEFAULT current_user,
75
- applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
76
- )`
77
+ `
78
+ CREATE TABLE IF NOT EXISTS ${schema}.stepwise_migrations (
79
+ id SERIAL PRIMARY KEY,
80
+ name TEXT UNIQUE NOT NULL,
81
+ script TEXT NOT NULL,
82
+ applied_by TEXT NOT NULL DEFAULT current_user,
83
+ applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
84
+ );
85
+ CREATE TABLE IF NOT EXISTS ${schema}.stepwise_audit (
86
+ id SERIAL PRIMARY KEY,
87
+ type TEXT NOT NULL,
88
+ name TEXT UNIQUE NOT NULL,
89
+ script TEXT NOT NULL,
90
+ applied_by TEXT NOT NULL DEFAULT current_user,
91
+ applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
92
+ );
93
+ `
77
94
  );
78
95
  console.log(`done!`);
79
96
  };
97
+
98
+ export const dbGetScript = async (
99
+ client: PoolClient,
100
+ schema: string,
101
+ filename: string
102
+ ) => {
103
+ const script = await client.query(
104
+ `SELECT script FROM ${schema}.stepwise_audit WHERE name = $1`,
105
+ [filename]
106
+ );
107
+ return script.rows[0].script;
108
+ };
package/src/index.ts CHANGED
@@ -2,9 +2,11 @@
2
2
 
3
3
  import yargs from "yargs";
4
4
  import {
5
+ dbAuditHistory,
5
6
  dbConnect,
6
7
  dbCreateHistoryTable,
7
8
  dbCreateSchema,
9
+ dbGetScript,
8
10
  dbHistorySchemaExists,
9
11
  dbMigrationHistory,
10
12
  dbTableExists,
@@ -16,8 +18,11 @@ import {
16
18
  validateMigrationFiles,
17
19
  } from "./migrate";
18
20
  import {
21
+ printMigrationHistory,
22
+ printMigrationHistoryAndUnappliedMigrations,
19
23
  readDownMigrationFiles,
20
24
  readMigrationFiles,
25
+ usage,
21
26
  validateArgs,
22
27
  } from "./utils";
23
28
 
@@ -47,18 +52,26 @@ const main = async () => {
47
52
 
48
53
  validateMigrationFiles(migrationFiles, migrationHistory);
49
54
 
55
+ if (migrationFiles.length === migrationHistory.length) {
56
+ console.log("All migrations are already applied");
57
+ process.exit(0);
58
+ }
59
+
50
60
  const migrationsToApply = migrationFiles.slice(
51
61
  migrationHistory.length,
52
62
  migrationHistory.length + nUp
53
63
  );
54
64
 
55
- for (const { filename, contents, hash } of migrationsToApply) {
56
- await applyMigration(client, schema, filename, contents, hash);
65
+ for (const { filename, script } of migrationsToApply) {
66
+ await applyMigration(client, schema, filename, script);
57
67
  }
58
68
 
59
69
  console.log(`All done! Applied ${migrationsToApply.length} migrations`);
60
- console.log("New migration history:");
61
- console.table(await dbMigrationHistory(client, schema));
70
+
71
+ printMigrationHistoryAndUnappliedMigrations(
72
+ await readMigrationFiles(argv.path),
73
+ await dbMigrationHistory(client, schema)
74
+ );
62
75
  } else if (command === "info") {
63
76
  if (!historySchemaExists) {
64
77
  console.log("Schema does not exist");
@@ -69,9 +82,29 @@ const main = async () => {
69
82
  }
70
83
 
71
84
  if (historySchemaExists && tableExists) {
72
- console.log("Migration history:");
73
- console.table(await dbMigrationHistory(client, schema));
85
+ printMigrationHistory(await dbMigrationHistory(client, schema));
86
+ }
87
+ } else if (command === "validate") {
88
+ if (!historySchemaExists) {
89
+ console.log("Schema does not exist");
90
+ }
91
+
92
+ if (!tableExists) {
93
+ console.log("Migration history table does not exist");
94
+ }
95
+
96
+ if (historySchemaExists && tableExists) {
97
+ validateMigrationFiles(
98
+ await readMigrationFiles(argv.path),
99
+ await dbMigrationHistory(client, schema)
100
+ );
74
101
  }
102
+ console.log("Validation passed");
103
+
104
+ printMigrationHistoryAndUnappliedMigrations(
105
+ await readMigrationFiles(argv.path),
106
+ await dbMigrationHistory(client, schema)
107
+ );
75
108
  } else if (command === "drop") {
76
109
  process.stdout.write(
77
110
  `Dropping the tables, schema and migration history table... `
@@ -98,18 +131,38 @@ const main = async () => {
98
131
  downMigrationFilesToApply,
99
132
  reverseMigrationHistory
100
133
  );
101
- for (const {
102
- filename,
103
- contents,
104
- upFilename,
105
- } of downMigrationFilesToApply) {
106
- await applyDownMigration(client, schema, filename, contents, upFilename);
134
+ for (const { filename, script, upFilename } of downMigrationFilesToApply) {
135
+ await applyDownMigration(client, schema, filename, script, upFilename);
107
136
  }
108
137
  console.log(
109
- `All done! Applied ${downMigrationFilesToApply.length} down migrations`
138
+ `All done! Applied ${downMigrationFilesToApply.length} down migration${
139
+ downMigrationFilesToApply.length === 1 ? "" : "s"
140
+ }`
141
+ );
142
+
143
+ printMigrationHistoryAndUnappliedMigrations(
144
+ await readMigrationFiles(argv.path),
145
+ await dbMigrationHistory(client, schema)
146
+ );
147
+ } else if (command === "audit") {
148
+ const auditHistory = await dbAuditHistory(client, schema);
149
+ console.log("Audit history:");
150
+ console.table(
151
+ auditHistory.map((row) => ({
152
+ id: row.id,
153
+ type: row.type,
154
+ name: row.name,
155
+ applied_by: row.applied_by,
156
+ applied_at: row.applied_at,
157
+ }))
110
158
  );
111
- console.log("New migration history:");
112
- console.table(await dbMigrationHistory(client, schema));
159
+ } else if (command === "get-script") {
160
+ const script = await dbGetScript(client, schema, argv.filename);
161
+ console.log(script);
162
+ } else {
163
+ console.error(`Invalid command: ${argv._[0]}`);
164
+ console.log(usage);
165
+ process.exit(1);
113
166
  }
114
167
 
115
168
  client.release();
package/src/migrate.ts CHANGED
@@ -1,8 +1,9 @@
1
+ import gitDiff from "git-diff";
1
2
  import { PoolClient } from "pg";
2
3
  import { MigrationRow } from "./types";
3
4
 
4
5
  export const validateMigrationFiles = (
5
- migrationFiles: { fullFilePath: string; filename: string; hash: string }[],
6
+ migrationFiles: { fullFilePath: string; filename: string; script: string }[],
6
7
  migrationHistory: MigrationRow[],
7
8
  isUp: boolean = true
8
9
  ) => {
@@ -18,13 +19,8 @@ export const validateMigrationFiles = (
18
19
  process.exit(1);
19
20
  }
20
21
 
21
- if (migrationFiles.length === migrationHistory.length && isUp) {
22
- console.log("All migrations are already applied");
23
- process.exit(0);
24
- }
25
-
26
22
  for (let i = 0; i < migrationFiles.length; i++) {
27
- const { filename, hash: migrationHash } = migrationFiles[i];
23
+ const { filename, script: migrationScript } = migrationFiles[i];
28
24
  if (i >= migrationHistory.length) {
29
25
  continue;
30
26
  }
@@ -32,10 +28,18 @@ export const validateMigrationFiles = (
32
28
  console.error(`Error: migration ${filename} has been renamed, aborting.`);
33
29
  process.exit(1);
34
30
  }
35
- if (migrationHistory[i].hash !== migrationHash) {
31
+ if (migrationHistory[i].script !== migrationScript) {
36
32
  console.error(
37
33
  `Error: migration ${filename} has been modified, aborting.`
38
34
  );
35
+
36
+ console.log(
37
+ gitDiff(migrationHistory[i].script, migrationScript, {
38
+ color: true,
39
+ noHeaders: true,
40
+ })
41
+ );
42
+
39
43
  process.exit(1);
40
44
  }
41
45
  }
@@ -45,8 +49,7 @@ export const applyMigration = async (
45
49
  client: PoolClient,
46
50
  schema: string,
47
51
  filename: string,
48
- contents: string,
49
- hash: string
52
+ script: string
50
53
  ) => {
51
54
  try {
52
55
  process.stdout.write(`Applying migration ${filename}... `);
@@ -54,12 +57,17 @@ export const applyMigration = async (
54
57
 
55
58
  await client.query(
56
59
  `SET search_path TO ${schema};
57
- ${contents.toString()}`
60
+ ${script.toString()}`
58
61
  );
59
62
 
60
63
  await client.query(
61
- `INSERT INTO ${schema}.stepwise_migrations (name, hash) VALUES ($1, $2)`,
62
- [filename, hash]
64
+ `INSERT INTO ${schema}.stepwise_migrations (name, script) VALUES ($1, $2)`,
65
+ [filename, script]
66
+ );
67
+
68
+ await client.query(
69
+ `INSERT INTO ${schema}.stepwise_audit (type, name, script) VALUES ($1, $2, $3)`,
70
+ ["up", filename, script]
63
71
  );
64
72
 
65
73
  await client.query("COMMIT");
@@ -98,7 +106,7 @@ export const applyDownMigration = async (
98
106
  client: PoolClient,
99
107
  schema: string,
100
108
  filename: string,
101
- contents: string,
109
+ script: string,
102
110
  upFilename: string
103
111
  ) => {
104
112
  try {
@@ -107,7 +115,7 @@ export const applyDownMigration = async (
107
115
 
108
116
  await client.query(
109
117
  `SET search_path TO ${schema};
110
- ${contents.toString()}`
118
+ ${script.toString()}`
111
119
  );
112
120
 
113
121
  await client.query(
@@ -115,6 +123,11 @@ export const applyDownMigration = async (
115
123
  [upFilename]
116
124
  );
117
125
 
126
+ await client.query(
127
+ `INSERT INTO ${schema}.stepwise_audit (type, name, script) VALUES ($1, $2, $3)`,
128
+ ["down", filename, script]
129
+ );
130
+
118
131
  await client.query("COMMIT");
119
132
 
120
133
  console.log(`done!`);
package/src/types.ts CHANGED
@@ -1,7 +1,9 @@
1
1
  export interface MigrationRow {
2
2
  id: string;
3
3
  name: string;
4
- hash: string;
4
+ script: string;
5
5
  applied_by: string;
6
6
  applied_at: string;
7
7
  }
8
+
9
+ export type AuditRow = MigrationRow & { type: "up" | "down" };
package/src/utils.ts CHANGED
@@ -1,12 +1,7 @@
1
- import crypto from "crypto";
2
1
  import fs from "fs/promises";
3
2
  import path from "path";
4
3
  import { MigrationRow } from "./types";
5
4
 
6
- export const calculateHash = (contents: string) => {
7
- return crypto.createHash("sha256").update(contents).digest("hex");
8
- };
9
-
10
5
  export const usage = `
11
6
  Usage: stepwise-migrations [command] [options]
12
7
 
@@ -50,16 +45,6 @@ export const validateArgs = (argv: any) => {
50
45
  console.log(usage);
51
46
  process.exit(1);
52
47
  }
53
- if (
54
- argv._[0] !== "migrate" &&
55
- argv._[0] !== "info" &&
56
- argv._[0] !== "drop" &&
57
- argv._[0] !== "down"
58
- ) {
59
- console.error(`Invalid command: ${argv._[0]}`);
60
- console.log(usage);
61
- process.exit(1);
62
- }
63
48
  };
64
49
 
65
50
  export const readMigrationFiles = async (directory: string) => {
@@ -77,23 +62,54 @@ export const readMigrationFiles = async (directory: string) => {
77
62
  type: "up";
78
63
  fullFilePath: string;
79
64
  filename: string;
80
- hash: string;
81
- contents: string;
65
+ script: string;
82
66
  }[] = [];
83
67
  for (const fullFilePath of migrationFiles) {
84
- const contents = await fs.readFile(fullFilePath, "utf8");
68
+ const script = await fs.readFile(fullFilePath, "utf8");
85
69
 
86
70
  results.push({
87
71
  type: "up",
88
72
  fullFilePath,
89
73
  filename: path.basename(fullFilePath),
90
- hash: calculateHash(contents),
91
- contents,
74
+ script,
92
75
  });
93
76
  }
94
77
  return results;
95
78
  };
96
79
 
80
+ export const printMigrationHistoryAndUnappliedMigrations = (
81
+ migrationFiles: { filename: string }[],
82
+ migrationHistory: MigrationRow[]
83
+ ) => {
84
+ console.log("Migration history:");
85
+ console.table(
86
+ migrationHistory.map((h) => ({
87
+ id: h.id,
88
+ name: h.name,
89
+ applied_by: h.applied_by,
90
+ applied_at: h.applied_at,
91
+ }))
92
+ );
93
+ console.log("Unapplied migrations:");
94
+ console.table(
95
+ migrationFiles.slice(migrationHistory.length).map((m) => ({
96
+ filename: m.filename,
97
+ }))
98
+ );
99
+ };
100
+
101
+ export const printMigrationHistory = (migrationHistory: MigrationRow[]) => {
102
+ console.log("Migration history:");
103
+ console.table(
104
+ migrationHistory.map((h) => ({
105
+ id: h.id,
106
+ name: h.name,
107
+ applied_by: h.applied_by,
108
+ applied_at: h.applied_at,
109
+ }))
110
+ );
111
+ };
112
+
97
113
  export const fileExists = async (path: string) => {
98
114
  try {
99
115
  return (await fs.stat(path)).isFile();
@@ -112,7 +128,7 @@ export const readDownMigrationFiles = async (
112
128
  filename: string;
113
129
  upFilename: string;
114
130
 
115
- contents: string;
131
+ script: string;
116
132
  }[] = [];
117
133
  for (const migration of migrationHistory) {
118
134
  const fullFilePath = path.join(
@@ -123,13 +139,13 @@ export const readDownMigrationFiles = async (
123
139
  console.error(`Down migration file not found: ${fullFilePath}`);
124
140
  process.exit(1);
125
141
  }
126
- const contents = await fs.readFile(fullFilePath, "utf8");
142
+ const script = await fs.readFile(fullFilePath, "utf8");
127
143
  results.push({
128
144
  type: "down",
129
145
  fullFilePath,
130
146
  filename: path.basename(fullFilePath),
131
147
  upFilename: migration.name,
132
- contents,
148
+ script,
133
149
  });
134
150
  }
135
151
  return results;