stepwise-migrations 1.0.4 → 1.0.6

Sign up to get free protection for your applications and to get access to all the features.
package/README.md CHANGED
@@ -1,17 +1,52 @@
1
1
  # Stepwise Migrations
2
2
 
3
+ [![npm version](https://badge.fury.io/js/stepwise-migrations.svg?icon=si%3Anpm)](https://badge.fury.io/js/stepwise-migrations)
4
+
3
5
  A tool for managing Raw SQL migrations in a Postgres database.
4
6
  Loosely based on flyway.
5
- Only "up" migrations are supported so far, but what more do you need?
6
7
 
7
8
  ## Notes
8
9
 
9
- All files ending in `.sql` in the migration directory will be applied.
10
+ Name up migration files as `.sql` and down migration files with the same name but suffixed with `.down.sql`.
11
+ e.g. `v1_users.sql` and `v1_users.down.sql`.
12
+ Down migrations are optional.
13
+
10
14
  They are first sorted in ascending order based on filename.
11
15
  No subdirectories are read below the migration directory.
12
16
 
13
17
  ## Usage
14
18
 
19
+ ```
20
+ Usage: stepwise-migrations [command] [options]
21
+
22
+ Commands:
23
+ migrate
24
+ Migrate the database to the latest version
25
+ down
26
+ Rollback the database to the previous version
27
+ info
28
+ Show information about the current state of the migrations in the database
29
+ drop
30
+ Drop all tables, schema and migration history table
31
+
32
+ Options:
33
+ --connection <connection> The connection string to use to connect to the database
34
+ --schema <schema> The schema to use for the migrations
35
+ --path <path> The path to the migrations directory
36
+ --ssl true/false Whether to use SSL for the connection (default: false)
37
+ --nup Number of up migrations to apply (default: all)
38
+ --ndown Number of down migrations to apply (default: 1)
39
+
40
+ Example:
41
+ npx stepwise-migrations \
42
+ --connection=postgresql://postgres:postgres@127.0.0.1:5432/mydatabase \
43
+ --schema=myschema \
44
+ --path=./db/migration/ \
45
+ migrate
46
+ ```
47
+
48
+ ## Examples
49
+
15
50
  ### Migrate
16
51
 
17
52
  Command:
@@ -34,8 +69,33 @@ Migration history table created
34
69
  Found 2 migration files
35
70
  Applied migration V0_01__connect_session_table.sql
36
71
  Applied migration V0_02__auth.sql
72
+ All done!
73
+ ```
74
+
75
+ ### Down
37
76
 
77
+ Command:
78
+
79
+ ```bash
80
+ npx stepwise-migrations down \
81
+ --connection=postgresql://postgres:postgres@127.0.0.1:5432/mydb \
82
+ --schema=myschema \
83
+ --path=./db/migration/
84
+ ```
85
+
86
+ Outputs:
87
+
88
+ ```
89
+
90
+ Connected to the database
91
+ Applied down migration v2_auth.down.sql
38
92
  All done!
93
+ New migration history:
94
+ ┌─────────┬────┬────────────────────────────────┬────────────────────────────────────────────────────────────────────┬────────────┬──────────────────────────────┐
95
+ │ (index) │ id │ name │ hash │ applied_by │ applied_at │
96
+ ├─────────┼────┼────────────────────────────────┼────────────────────────────────────────────────────────────────────┼────────────┼──────────────────────────────┤
97
+ │ 0 │ 1 │ 'v1_connect_session_table.sql' │ 'f08638e58139ae0e2dda24b1bdba29f3f2128597066a23d2bb382d448bbe9d7e' │ 'postgres' │ '2024-11-23 18:13:36.518495' │
98
+ └─────────┴────┴────────────────────────────────┴────────────────────────────────────────────────────────────────────┴────────────┴──────────────────────────────┘
39
99
  ```
40
100
 
41
101
  ### Info
@@ -76,6 +136,5 @@ Outputs:
76
136
  ```
77
137
  Connected to the database
78
138
  Dropping the tables, schema and migration history table
79
-
80
139
  All done!
81
140
  ```
package/dist/db.js CHANGED
@@ -93,7 +93,7 @@ const dbCreateHistoryTable = (client, schema) => __awaiter(void 0, void 0, void
93
93
  console.log(`Creating migration history table`);
94
94
  yield client.query(`CREATE TABLE IF NOT EXISTS ${schema}.stepwise_migrations (
95
95
  id SERIAL PRIMARY KEY,
96
- name TEXT NOT NULL,
96
+ name TEXT UNIQUE NOT NULL,
97
97
  hash TEXT NOT NULL,
98
98
  applied_by TEXT NOT NULL DEFAULT current_user,
99
99
  applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
package/dist/index.js CHANGED
@@ -26,6 +26,7 @@ const main = () => __awaiter(void 0, void 0, void 0, function* () {
26
26
  const historySchemaExists = yield (0, db_1.dbHistorySchemaExists)(client, schema);
27
27
  const tableExists = yield (0, db_1.dbTableExists)(client, schema);
28
28
  if (command === "migrate") {
29
+ const nUp = argv.nup || Infinity;
29
30
  if (!historySchemaExists) {
30
31
  yield (0, db_1.dbCreateSchema)(client, schema);
31
32
  }
@@ -36,11 +37,13 @@ const main = () => __awaiter(void 0, void 0, void 0, function* () {
36
37
  const migrationFiles = yield (0, utils_1.readMigrationFiles)(argv.path);
37
38
  console.log(`Found ${migrationFiles.length} migration files`);
38
39
  (0, migrate_1.validateMigrationFiles)(migrationFiles, migrationHistory);
39
- const migrationsToApply = migrationFiles.slice(migrationHistory.length);
40
+ const migrationsToApply = migrationFiles.slice(migrationHistory.length, migrationHistory.length + nUp);
40
41
  for (const { filename, contents, hash } of migrationsToApply) {
41
42
  yield (0, migrate_1.applyMigration)(client, schema, filename, contents, hash);
42
43
  }
43
- console.log("\nAll done!");
44
+ console.log("All done!");
45
+ console.log("New migration history:");
46
+ console.table(yield (0, db_1.dbMigrationHistory)(client, schema));
44
47
  }
45
48
  else if (command === "info") {
46
49
  console.log("Showing information about the current state of the migrations in the database");
@@ -54,7 +57,21 @@ const main = () => __awaiter(void 0, void 0, void 0, function* () {
54
57
  else if (command === "drop") {
55
58
  console.log("Dropping the tables, schema and migration history table");
56
59
  yield client.query(`DROP SCHEMA IF EXISTS ${schema} CASCADE`);
57
- console.log("\nAll done!");
60
+ console.log("All done!");
61
+ }
62
+ else if (command === "down") {
63
+ const nDown = argv.ndown || 1;
64
+ const migrationHistory = yield (0, db_1.dbMigrationHistory)(client, schema);
65
+ (0, migrate_1.validateMigrationFiles)(yield (0, utils_1.readMigrationFiles)(argv.path), migrationHistory, false);
66
+ const reverseMigrationHistory = migrationHistory.reverse().slice(0, nDown);
67
+ const downMigrationFilesToApply = yield (0, utils_1.readDownMigrationFiles)(argv.path, reverseMigrationHistory);
68
+ (0, migrate_1.validateDownMigrationFiles)(downMigrationFilesToApply, reverseMigrationHistory);
69
+ for (const { filename, contents, upFilename, } of downMigrationFilesToApply) {
70
+ yield (0, migrate_1.applyDownMigration)(client, schema, filename, contents, upFilename);
71
+ }
72
+ console.log("All done!");
73
+ console.log("New migration history:");
74
+ console.table(yield (0, db_1.dbMigrationHistory)(client, schema));
58
75
  }
59
76
  client.release();
60
77
  process.exit(0);
package/dist/migrate.js CHANGED
@@ -9,8 +9,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
9
9
  });
10
10
  };
11
11
  Object.defineProperty(exports, "__esModule", { value: true });
12
- exports.applyMigration = exports.validateMigrationFiles = void 0;
13
- const validateMigrationFiles = (migrationFiles, migrationHistory) => {
12
+ exports.applyDownMigration = exports.validateDownMigrationFiles = exports.applyMigration = exports.validateMigrationFiles = void 0;
13
+ const validateMigrationFiles = (migrationFiles, migrationHistory, isUp = true) => {
14
14
  if (migrationFiles.length === 0) {
15
15
  console.log("No migrations found");
16
16
  process.exit(0);
@@ -19,7 +19,7 @@ const validateMigrationFiles = (migrationFiles, migrationHistory) => {
19
19
  console.error("Error: migration history is longer than the number of migration files, aborting.");
20
20
  process.exit(1);
21
21
  }
22
- if (migrationFiles.length === migrationHistory.length) {
22
+ if (migrationFiles.length === migrationHistory.length && isUp) {
23
23
  console.log("All migrations are already applied");
24
24
  process.exit(0);
25
25
  }
@@ -60,3 +60,35 @@ const applyMigration = (client, schema, filename, contents, hash) => __awaiter(v
60
60
  }
61
61
  });
62
62
  exports.applyMigration = applyMigration;
63
+ const validateDownMigrationFiles = (downMigrationFilesToApply, reverseMigrationHistory) => {
64
+ for (let i = 0; i < downMigrationFilesToApply.length; i++) {
65
+ const { filename } = downMigrationFilesToApply[i];
66
+ if (filename.split(".down.sql")[0] !==
67
+ reverseMigrationHistory[i].name.split(".sql")[0]) {
68
+ console.error(`Migration ${filename} does not match the expected migration ${reverseMigrationHistory[i].name}`);
69
+ process.exit(1);
70
+ }
71
+ }
72
+ };
73
+ exports.validateDownMigrationFiles = validateDownMigrationFiles;
74
+ const applyDownMigration = (client, schema, filename, contents, upFilename) => __awaiter(void 0, void 0, void 0, function* () {
75
+ try {
76
+ yield client.query("BEGIN");
77
+ yield client.query(`SET search_path TO ${schema};
78
+ ${contents.toString()}`);
79
+ yield client.query(`DELETE FROM ${schema}.stepwise_migrations WHERE name = $1`, [upFilename]);
80
+ yield client.query("COMMIT");
81
+ console.log(`Applied down migration ${filename}`);
82
+ }
83
+ catch (error) {
84
+ try {
85
+ yield client.query("ROLLBACK");
86
+ }
87
+ catch (error) {
88
+ console.error("Error rolling back transaction", error);
89
+ }
90
+ console.error("Error applying down migration", error);
91
+ process.exit(1);
92
+ }
93
+ });
94
+ exports.applyDownMigration = applyDownMigration;
package/dist/utils.js CHANGED
@@ -12,21 +12,22 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
12
12
  return (mod && mod.__esModule) ? mod : { "default": mod };
13
13
  };
14
14
  Object.defineProperty(exports, "__esModule", { value: true });
15
- exports.readMigrationFiles = exports.validateArgs = exports.usage = exports.hashFile = void 0;
15
+ exports.readDownMigrationFiles = exports.fileExists = exports.readMigrationFiles = exports.validateArgs = exports.usage = exports.calculateHash = void 0;
16
16
  const crypto_1 = __importDefault(require("crypto"));
17
17
  const promises_1 = __importDefault(require("fs/promises"));
18
18
  const path_1 = __importDefault(require("path"));
19
- const hashFile = (path) => __awaiter(void 0, void 0, void 0, function* () {
20
- const file = yield promises_1.default.readFile(path);
21
- return crypto_1.default.createHash("sha256").update(file).digest("hex");
22
- });
23
- exports.hashFile = hashFile;
19
+ const calculateHash = (contents) => {
20
+ return crypto_1.default.createHash("sha256").update(contents).digest("hex");
21
+ };
22
+ exports.calculateHash = calculateHash;
24
23
  exports.usage = `
25
24
  Usage: stepwise-migrations [command] [options]
26
25
 
27
26
  Commands:
28
27
  migrate
29
28
  Migrate the database to the latest version
29
+ down
30
+ Rollback the database to the previous version
30
31
  info
31
32
  Show information about the current state of the migrations in the database
32
33
  drop
@@ -37,6 +38,8 @@ Options:
37
38
  --schema <schema> The schema to use for the migrations
38
39
  --path <path> The path to the migrations directory
39
40
  --ssl true/false Whether to use SSL for the connection (default: false)
41
+ --nup Number of up migrations to apply (default: all)
42
+ --ndown Number of down migrations to apply (default: 1)
40
43
 
41
44
  Example:
42
45
  npx stepwise-migrations \
@@ -57,7 +60,10 @@ const validateArgs = (argv) => {
57
60
  console.log(exports.usage);
58
61
  process.exit(1);
59
62
  }
60
- if (argv._[0] !== "migrate" && argv._[0] !== "info" && argv._[0] !== "drop") {
63
+ if (argv._[0] !== "migrate" &&
64
+ argv._[0] !== "info" &&
65
+ argv._[0] !== "drop" &&
66
+ argv._[0] !== "down") {
61
67
  console.error(`Invalid command: ${argv._[0]}`);
62
68
  console.log(exports.usage);
63
69
  process.exit(1);
@@ -67,20 +73,51 @@ exports.validateArgs = validateArgs;
67
73
  const readMigrationFiles = (directory) => __awaiter(void 0, void 0, void 0, function* () {
68
74
  const files = yield promises_1.default.readdir(directory, { withFileTypes: true });
69
75
  const migrationFiles = files
70
- .filter((file) => file.isFile() && file.name.endsWith(".sql"))
76
+ .filter((file) => file.isFile() &&
77
+ file.name.endsWith(".sql") &&
78
+ !file.name.endsWith(".down.sql"))
71
79
  .map((file) => path_1.default.join(directory, file.name));
72
80
  migrationFiles.sort();
73
81
  const results = [];
74
82
  for (const fullFilePath of migrationFiles) {
75
- const hash = yield (0, exports.hashFile)(fullFilePath);
76
83
  const contents = yield promises_1.default.readFile(fullFilePath, "utf8");
77
84
  results.push({
85
+ type: "up",
78
86
  fullFilePath,
79
87
  filename: path_1.default.basename(fullFilePath),
80
- hash,
88
+ hash: (0, exports.calculateHash)(contents),
81
89
  contents,
82
90
  });
83
91
  }
84
92
  return results;
85
93
  });
86
94
  exports.readMigrationFiles = readMigrationFiles;
95
+ const fileExists = (path) => __awaiter(void 0, void 0, void 0, function* () {
96
+ try {
97
+ return (yield promises_1.default.stat(path)).isFile();
98
+ }
99
+ catch (error) {
100
+ return false;
101
+ }
102
+ });
103
+ exports.fileExists = fileExists;
104
+ const readDownMigrationFiles = (directory, migrationHistory) => __awaiter(void 0, void 0, void 0, function* () {
105
+ const results = [];
106
+ for (const migration of migrationHistory) {
107
+ const fullFilePath = path_1.default.join(directory, `${migration.name.split(".sql")[0]}.down.sql`);
108
+ if (!(yield (0, exports.fileExists)(fullFilePath))) {
109
+ console.error(`Down migration file not found: ${fullFilePath}`);
110
+ process.exit(1);
111
+ }
112
+ const contents = yield promises_1.default.readFile(fullFilePath, "utf8");
113
+ results.push({
114
+ type: "down",
115
+ fullFilePath,
116
+ filename: path_1.default.basename(fullFilePath),
117
+ upFilename: migration.name,
118
+ contents,
119
+ });
120
+ }
121
+ return results;
122
+ });
123
+ exports.readDownMigrationFiles = readDownMigrationFiles;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "stepwise-migrations",
3
- "version": "1.0.4",
3
+ "version": "1.0.6",
4
4
  "description": "",
5
5
  "main": "index.js",
6
6
  "scripts": {
package/src/db.ts CHANGED
@@ -1,4 +1,5 @@
1
1
  import pg, { Pool, PoolClient } from "pg";
2
+ import { MigrationRow } from "./types";
2
3
 
3
4
  pg.types.setTypeParser(1114, function (stringValue) {
4
5
  return stringValue; //1114 for time without timezone type
@@ -52,7 +53,7 @@ export const dbMigrationHistory = async (
52
53
  const migrationsQuery = await client.query(
53
54
  `SELECT * FROM ${schema}.stepwise_migrations`
54
55
  );
55
- return migrationsQuery.rows;
56
+ return migrationsQuery.rows as MigrationRow[];
56
57
  };
57
58
 
58
59
  export const dbCreateSchema = async (client: PoolClient, schema: string) => {
@@ -69,7 +70,7 @@ export const dbCreateHistoryTable = async (
69
70
  await client.query(
70
71
  `CREATE TABLE IF NOT EXISTS ${schema}.stepwise_migrations (
71
72
  id SERIAL PRIMARY KEY,
72
- name TEXT NOT NULL,
73
+ name TEXT UNIQUE NOT NULL,
73
74
  hash TEXT NOT NULL,
74
75
  applied_by TEXT NOT NULL DEFAULT current_user,
75
76
  applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
package/src/index.ts CHANGED
@@ -9,8 +9,17 @@ import {
9
9
  dbMigrationHistory,
10
10
  dbTableExists,
11
11
  } from "./db";
12
- import { applyMigration, validateMigrationFiles } from "./migrate";
13
- import { readMigrationFiles, validateArgs } from "./utils";
12
+ import {
13
+ applyDownMigration,
14
+ applyMigration,
15
+ validateDownMigrationFiles,
16
+ validateMigrationFiles,
17
+ } from "./migrate";
18
+ import {
19
+ readDownMigrationFiles,
20
+ readMigrationFiles,
21
+ validateArgs,
22
+ } from "./utils";
14
23
 
15
24
  const main = async () => {
16
25
  const argv: any = yargs(process.argv.slice(2)).argv;
@@ -25,6 +34,7 @@ const main = async () => {
25
34
  const tableExists = await dbTableExists(client, schema);
26
35
 
27
36
  if (command === "migrate") {
37
+ const nUp = argv.nup || Infinity;
28
38
  if (!historySchemaExists) {
29
39
  await dbCreateSchema(client, schema);
30
40
  }
@@ -38,12 +48,18 @@ const main = async () => {
38
48
 
39
49
  validateMigrationFiles(migrationFiles, migrationHistory);
40
50
 
41
- const migrationsToApply = migrationFiles.slice(migrationHistory.length);
51
+ const migrationsToApply = migrationFiles.slice(
52
+ migrationHistory.length,
53
+ migrationHistory.length + nUp
54
+ );
42
55
 
43
56
  for (const { filename, contents, hash } of migrationsToApply) {
44
57
  await applyMigration(client, schema, filename, contents, hash);
45
58
  }
46
- console.log("\nAll done!");
59
+
60
+ console.log("All done!");
61
+ console.log("New migration history:");
62
+ console.table(await dbMigrationHistory(client, schema));
47
63
  } else if (command === "info") {
48
64
  console.log(
49
65
  "Showing information about the current state of the migrations in the database"
@@ -61,7 +77,37 @@ const main = async () => {
61
77
  } else if (command === "drop") {
62
78
  console.log("Dropping the tables, schema and migration history table");
63
79
  await client.query(`DROP SCHEMA IF EXISTS ${schema} CASCADE`);
64
- console.log("\nAll done!");
80
+ console.log("All done!");
81
+ } else if (command === "down") {
82
+ const nDown = argv.ndown || 1;
83
+
84
+ const migrationHistory = await dbMigrationHistory(client, schema);
85
+ validateMigrationFiles(
86
+ await readMigrationFiles(argv.path),
87
+ migrationHistory,
88
+ false
89
+ );
90
+
91
+ const reverseMigrationHistory = migrationHistory.reverse().slice(0, nDown);
92
+ const downMigrationFilesToApply = await readDownMigrationFiles(
93
+ argv.path,
94
+ reverseMigrationHistory
95
+ );
96
+
97
+ validateDownMigrationFiles(
98
+ downMigrationFilesToApply,
99
+ reverseMigrationHistory
100
+ );
101
+ for (const {
102
+ filename,
103
+ contents,
104
+ upFilename,
105
+ } of downMigrationFilesToApply) {
106
+ await applyDownMigration(client, schema, filename, contents, upFilename);
107
+ }
108
+ console.log("All done!");
109
+ console.log("New migration history:");
110
+ console.table(await dbMigrationHistory(client, schema));
65
111
  }
66
112
 
67
113
  client.release();
package/src/migrate.ts CHANGED
@@ -3,7 +3,8 @@ import { MigrationRow } from "./types";
3
3
 
4
4
  export const validateMigrationFiles = (
5
5
  migrationFiles: { fullFilePath: string; filename: string; hash: string }[],
6
- migrationHistory: MigrationRow[]
6
+ migrationHistory: MigrationRow[],
7
+ isUp: boolean = true
7
8
  ) => {
8
9
  if (migrationFiles.length === 0) {
9
10
  console.log("No migrations found");
@@ -17,7 +18,7 @@ export const validateMigrationFiles = (
17
18
  process.exit(1);
18
19
  }
19
20
 
20
- if (migrationFiles.length === migrationHistory.length) {
21
+ if (migrationFiles.length === migrationHistory.length && isUp) {
21
22
  console.log("All migrations are already applied");
22
23
  process.exit(0);
23
24
  }
@@ -73,3 +74,55 @@ export const applyMigration = async (
73
74
  process.exit(1);
74
75
  }
75
76
  };
77
+
78
+ export const validateDownMigrationFiles = (
79
+ downMigrationFilesToApply: { filename: string }[],
80
+ reverseMigrationHistory: MigrationRow[]
81
+ ) => {
82
+ for (let i = 0; i < downMigrationFilesToApply.length; i++) {
83
+ const { filename } = downMigrationFilesToApply[i];
84
+ if (
85
+ filename.split(".down.sql")[0] !==
86
+ reverseMigrationHistory[i].name.split(".sql")[0]
87
+ ) {
88
+ console.error(
89
+ `Migration ${filename} does not match the expected migration ${reverseMigrationHistory[i].name}`
90
+ );
91
+ process.exit(1);
92
+ }
93
+ }
94
+ };
95
+
96
+ export const applyDownMigration = async (
97
+ client: PoolClient,
98
+ schema: string,
99
+ filename: string,
100
+ contents: string,
101
+ upFilename: string
102
+ ) => {
103
+ try {
104
+ await client.query("BEGIN");
105
+
106
+ await client.query(
107
+ `SET search_path TO ${schema};
108
+ ${contents.toString()}`
109
+ );
110
+
111
+ await client.query(
112
+ `DELETE FROM ${schema}.stepwise_migrations WHERE name = $1`,
113
+ [upFilename]
114
+ );
115
+
116
+ await client.query("COMMIT");
117
+
118
+ console.log(`Applied down migration ${filename}`);
119
+ } catch (error) {
120
+ try {
121
+ await client.query("ROLLBACK");
122
+ } catch (error) {
123
+ console.error("Error rolling back transaction", error);
124
+ }
125
+ console.error("Error applying down migration", error);
126
+ process.exit(1);
127
+ }
128
+ };
package/src/utils.ts CHANGED
@@ -1,10 +1,10 @@
1
1
  import crypto from "crypto";
2
2
  import fs from "fs/promises";
3
3
  import path from "path";
4
+ import { MigrationRow } from "./types";
4
5
 
5
- export const hashFile = async (path: string) => {
6
- const file = await fs.readFile(path);
7
- return crypto.createHash("sha256").update(file).digest("hex");
6
+ export const calculateHash = (contents: string) => {
7
+ return crypto.createHash("sha256").update(contents).digest("hex");
8
8
  };
9
9
 
10
10
  export const usage = `
@@ -13,6 +13,8 @@ Usage: stepwise-migrations [command] [options]
13
13
  Commands:
14
14
  migrate
15
15
  Migrate the database to the latest version
16
+ down
17
+ Rollback the database to the previous version
16
18
  info
17
19
  Show information about the current state of the migrations in the database
18
20
  drop
@@ -23,6 +25,8 @@ Options:
23
25
  --schema <schema> The schema to use for the migrations
24
26
  --path <path> The path to the migrations directory
25
27
  --ssl true/false Whether to use SSL for the connection (default: false)
28
+ --nup Number of up migrations to apply (default: all)
29
+ --ndown Number of down migrations to apply (default: 1)
26
30
 
27
31
  Example:
28
32
  npx stepwise-migrations \
@@ -47,7 +51,12 @@ export const validateArgs = (argv: any) => {
47
51
  console.log(usage);
48
52
  process.exit(1);
49
53
  }
50
- if (argv._[0] !== "migrate" && argv._[0] !== "info" && argv._[0] !== "drop") {
54
+ if (
55
+ argv._[0] !== "migrate" &&
56
+ argv._[0] !== "info" &&
57
+ argv._[0] !== "drop" &&
58
+ argv._[0] !== "down"
59
+ ) {
51
60
  console.error(`Invalid command: ${argv._[0]}`);
52
61
  console.log(usage);
53
62
  process.exit(1);
@@ -57,22 +66,70 @@ export const validateArgs = (argv: any) => {
57
66
  export const readMigrationFiles = async (directory: string) => {
58
67
  const files = await fs.readdir(directory, { withFileTypes: true });
59
68
  const migrationFiles = files
60
- .filter((file) => file.isFile() && file.name.endsWith(".sql"))
69
+ .filter(
70
+ (file) =>
71
+ file.isFile() &&
72
+ file.name.endsWith(".sql") &&
73
+ !file.name.endsWith(".down.sql")
74
+ )
61
75
  .map((file) => path.join(directory, file.name));
62
76
  migrationFiles.sort();
63
77
  const results: {
78
+ type: "up";
64
79
  fullFilePath: string;
65
80
  filename: string;
66
81
  hash: string;
67
82
  contents: string;
68
83
  }[] = [];
69
84
  for (const fullFilePath of migrationFiles) {
70
- const hash = await hashFile(fullFilePath);
85
+ const contents = await fs.readFile(fullFilePath, "utf8");
86
+
87
+ results.push({
88
+ type: "up",
89
+ fullFilePath,
90
+ filename: path.basename(fullFilePath),
91
+ hash: calculateHash(contents),
92
+ contents,
93
+ });
94
+ }
95
+ return results;
96
+ };
97
+
98
+ export const fileExists = async (path: string) => {
99
+ try {
100
+ return (await fs.stat(path)).isFile();
101
+ } catch (error) {
102
+ return false;
103
+ }
104
+ };
105
+
106
+ export const readDownMigrationFiles = async (
107
+ directory: string,
108
+ migrationHistory: MigrationRow[]
109
+ ) => {
110
+ const results: {
111
+ type: "down";
112
+ fullFilePath: string;
113
+ filename: string;
114
+ upFilename: string;
115
+
116
+ contents: string;
117
+ }[] = [];
118
+ for (const migration of migrationHistory) {
119
+ const fullFilePath = path.join(
120
+ directory,
121
+ `${migration.name.split(".sql")[0]}.down.sql`
122
+ );
123
+ if (!(await fileExists(fullFilePath))) {
124
+ console.error(`Down migration file not found: ${fullFilePath}`);
125
+ process.exit(1);
126
+ }
71
127
  const contents = await fs.readFile(fullFilePath, "utf8");
72
128
  results.push({
129
+ type: "down",
73
130
  fullFilePath,
74
131
  filename: path.basename(fullFilePath),
75
- hash,
132
+ upFilename: migration.name,
76
133
  contents,
77
134
  });
78
135
  }