postgres-schema-migrations 6.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/.editorconfig +9 -0
  2. package/.fitcommitjsrc.yml +22 -0
  3. package/.gitattributes +1 -0
  4. package/.github/workflows/node.js.yml +31 -0
  5. package/.prettierignore +1 -0
  6. package/.prettierrc +5 -0
  7. package/CHANGELOG.md +30 -0
  8. package/LICENSE +21 -0
  9. package/README.md +336 -0
  10. package/ava.config.cjs +4 -0
  11. package/ava.config.integration.cjs +6 -0
  12. package/ava.config.unit.cjs +6 -0
  13. package/dist/bin/validate.d.ts +2 -0
  14. package/dist/bin/validate.js +14 -0
  15. package/dist/create.d.ts +6 -0
  16. package/dist/create.js +64 -0
  17. package/dist/file-name-parser.d.ts +7 -0
  18. package/dist/file-name-parser.js +27 -0
  19. package/dist/files-loader.d.ts +10 -0
  20. package/dist/files-loader.js +39 -0
  21. package/dist/index.d.ts +4 -0
  22. package/dist/index.js +11 -0
  23. package/dist/load-sql-from-js.d.ts +1 -0
  24. package/dist/load-sql-from-js.js +18 -0
  25. package/dist/migrate.d.ts +13 -0
  26. package/dist/migrate.js +158 -0
  27. package/dist/migration-file.d.ts +8 -0
  28. package/dist/migration-file.js +46 -0
  29. package/dist/migrations/0_create-migrations-table.sql +6 -0
  30. package/dist/run-migration.d.ts +2 -0
  31. package/dist/run-migration.js +39 -0
  32. package/dist/types.d.ts +56 -0
  33. package/dist/types.js +6 -0
  34. package/dist/validation.d.ts +5 -0
  35. package/dist/validation.js +28 -0
  36. package/dist/with-connection.d.ts +3 -0
  37. package/dist/with-connection.js +36 -0
  38. package/dist/with-lock.d.ts +2 -0
  39. package/dist/with-lock.js +44 -0
  40. package/package.json +71 -0
  41. package/tsconfig-base.json +17 -0
  42. package/tsconfig-build.json +5 -0
  43. package/tsconfig.json +4 -0
  44. package/tslint.json +42 -0
package/.editorconfig ADDED
@@ -0,0 +1,9 @@
1
+ root = true
2
+
3
+ [*]
4
+ end_of_line = lf
5
+ charset = utf-8
6
+ trim_trailing_whitespace = true
7
+ insert_final_newline = true
8
+ indent_style = space
9
+ indent_size = 2
@@ -0,0 +1,22 @@
1
+ ---
2
+ validators:
3
+ lineLength:
4
+ enabled: true
5
+ maxLineLength: 72
6
+ subjectMaxLength: 50
7
+ ticketCode:
8
+ enabled: false
9
+ emptyLines:
10
+ enabled: true
11
+ emptyLines: 1
12
+ tags:
13
+ enabled: false
14
+ subjectTense:
15
+ enabled: true
16
+ subjectPeriod:
17
+ enabled: false
18
+ capitalizedSubject:
19
+ # to allow npm version commits e.g. 1.0.0
20
+ enabled: false
21
+ wip:
22
+ enabled: true
package/.gitattributes ADDED
@@ -0,0 +1 @@
1
+ package-lock.json -diff
@@ -0,0 +1,31 @@
1
+ # This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node
2
+ # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
3
+
4
+ name: Node.js CI
5
+
6
+ on:
7
+ push:
8
+ branches: [ master ]
9
+ pull_request:
10
+ branches: [ master ]
11
+
12
+ jobs:
13
+ build:
14
+
15
+ runs-on: ubuntu-latest
16
+
17
+ strategy:
18
+ matrix:
19
+ node-version: [12.x, 14.x, 16.x]
20
+ # See supported Node.js release schedule at https://nodejs.org/en/about/releases/
21
+
22
+ steps:
23
+ - uses: actions/checkout@v2
24
+ - name: Use Node.js ${{ matrix.node-version }}
25
+ uses: actions/setup-node@v2
26
+ with:
27
+ node-version: ${{ matrix.node-version }}
28
+ - run: npm ci
29
+ - run: npm run
30
+ - run: docker pull postgres:9.4
31
+ - run: npm test
@@ -0,0 +1 @@
1
+ dist
package/.prettierrc ADDED
@@ -0,0 +1,5 @@
1
+ {
2
+ "semi": false,
3
+ "trailingComma": "all",
4
+ "bracketSpacing": false
5
+ }
package/CHANGELOG.md ADDED
@@ -0,0 +1,30 @@
1
+ # Changelog
2
+
3
+ ## 5.3.0
4
+
5
+ - [DEPRECATION] Deprecate `createDb`
6
+ - Add `ensureDatabaseExists` to check/create database in `migrate`
7
+
8
+ ## 5.1.0
9
+
10
+ - Validate migration ordering when loading files (instead of when applying migrations)
11
+ - Expose `loadMigrationFiles` publicly, which can be used to validate files in e.g. a pre-push hook
12
+ - Add `pg-validate-migrations` bin script
13
+
14
+ ## 5.0.0
15
+
16
+ - [BREAKING] Update `pg` to version 8. See the [pg changelog](https://github.com/brianc/node-postgres/blob/master/CHANGELOG.md#pg800) for details.
17
+
18
+ ## 4.0.0
19
+
20
+ - [BREAKING] Updated whole project to TypeScript
21
+ - some types might differ, no functional change
22
+ - 21a7ee6
23
+ - [BREAKING] Increase required Node.js version to v10
24
+ - 24bf9b7
25
+ - [BREAKING] Ensure file extension includes `.`
26
+ - b8ed85a
27
+ - [BREAKING] Implement advisory locks to manage concurrency
28
+ - 73b5ade
29
+ - Optionally accept a `pg` client for database connections
30
+ - ad81ed9 c246ad3
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2016 Momentum Financial Technology
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,336 @@
1
+ # Postgres schema migrations
2
+
3
+ ![GitHub Actions](https://github.com/ThomWright/postgres-migrations/actions/workflows/node.js.yml/badge.svg)
4
+ [![npm](https://img.shields.io/npm/v/postgres-migrations.svg)](https://www.npmjs.com/package/postgres-migrations)
5
+ [![David](https://img.shields.io/david/ThomWright/postgres-migrations.svg)](https://david-dm.org/ThomWright/postgres-migrations)
6
+ [![David](https://img.shields.io/david/dev/ThomWright/postgres-migrations.svg)](https://david-dm.org/ThomWright/postgres-migrations)
7
+
8
+ This is a fork of Thom Wright's [PostgreSQL migration library](https://github.com/ThomWright/postgres-migrations) which allows for a schema to be specified and for separate migrations to be tracked based on schema. Using Postgres schema namespaces make it easy to reuse database code across projects, so it can be helpful to track migrations for different namespaces.
9
+
10
+ A PostgreSQL migration library inspired by the Stack Overflow system described in [Nick Craver's blog](http://nickcraver.com/blog/2016/05/03/stack-overflow-how-we-do-deployment-2016-edition/#database-migrations).
11
+
12
+ Migrations are defined in sequential SQL files, for example:
13
+
14
+ ```text
15
+ migrations
16
+ ├ 1_create-table.sql
17
+ ├ 2_alter-table.sql
18
+ └ 3_add-index.sql
19
+ ```
20
+
21
+ Requires Node 10.17.0+
22
+
23
+ Supports PostgreSQL 9.4+
24
+
25
+ ## API
26
+
27
+ There are two ways to use the API.
28
+
29
+ Either, pass a database connection config object:
30
+
31
+ ```typescript
32
+ import {migrate} from "postgres-migrations"
33
+
34
+ async function() {
35
+ const dbConfig = {
36
+ database: "database-name",
37
+ user: "postgres",
38
+ password: "password",
39
+ host: "localhost",
40
+ port: 5432,
41
+
42
+ // Default: false for backwards-compatibility
43
+ // This might change!
44
+ ensureDatabaseExists: true,
45
+
46
+ // Default: "postgres"
47
+ // Used when checking/creating "database-name"
48
+ defaultDatabase: "postgres"
49
+ }
50
+
51
+ await migrate(dbConfig, "path/to/migration/files")
52
+ }
53
+ ```
54
+
55
+ Or, pass a `pg` client:
56
+
57
+ ```typescript
58
+ import {migrate} from "postgres-migrations"
59
+
60
+ async function() {
61
+ const dbConfig = {
62
+ database: "database-name",
63
+ user: "postgres",
64
+ password: "password",
65
+ host: "localhost",
66
+ port: 5432,
67
+ }
68
+
69
+ // Note: when passing a client, it is assumed that the database already exists
70
+ const client = new pg.Client(dbConfig) // or a Pool, or a PoolClient
71
+ await client.connect()
72
+ try {
73
+ await migrate({client}, "path/to/migration/files")
74
+ } finally {
75
+ await client.end()
76
+ }
77
+ }
78
+ ```
79
+
80
+ ### Namespaced migrations
81
+
82
+ To track migrations within a given schema, just pass an additional `{schema: "your_schema_name"}` parameter to `migrate`, as in:
83
+
84
+ ```
85
+ try {
86
+ await migrate({client}, "path/to/schema/migration/files", {schema: "schema_name"})
87
+ } finally {
88
+ await client.end()
89
+ }
90
+ ```
91
+
92
+ Without specifying this parameter, this library will default to the `public` schema.
93
+
94
+ ### Validating migration files
95
+
96
+ Occasionally, if two people are working on the same codebase independently, they might both create a migration at the same time. For example, `5_add-table.sql` and `5_add-column.sql`. If these both get pushed, there will be a conflict.
97
+
98
+ While the migration system will notice this and refuse to apply the migrations, it can be useful to catch this as early as possible.
99
+
100
+ The `loadMigrationFiles` function can be used to check if the migration files satisfy the rules.
101
+
102
+ Alternatively, use the `pg-validate-migrations` bin script: `pg-validate-migrations "path/to/migration/files"`.
103
+
104
+ ## Design decisions
105
+
106
+ ### No down migrations
107
+
108
+ There is deliberately no concept of a 'down' migration. In the words of Nick Craver:
109
+
110
+ > If we needed to reverse something, we could just push another migration negating whatever we did that went boom ... Why roll back when you can roll forward?
111
+
112
+ ### Simple ordering
113
+
114
+ Migrations are guaranteed to run in the same order every time, on every system.
115
+
116
+ Some migration systems use timestamps for ordering migrations, where the timestamp represents when the migration file was created. This doesn't guarantee that the migrations will be run in the same order on every system.
117
+
118
+ For example, imagine Developer A creates a migration file in a branch. The next day, Developer B creates a migration in master, and deploys it to production. On day three Developer A merges in their branch and deploys to production.
119
+
120
+ The production database sees the migrations applied out of order with respect to their creation time. Any new development database will run the migrations in the timestamp order.
121
+
122
+ ### The `migrations` table
123
+
124
+ A `migrations` table is created as the first migration, before any user-supplied migrations. This keeps track of all the migrations which have already been run.
125
+
126
+ ### Hash checks for previous migrations
127
+
128
+ Previously run migration scripts shouldn't be modified, since we want the process to be repeated in the same way for every new environment.
129
+
130
+ This is enforced by hashing the file contents of a migration script and storing this in `migrations` table. Before running a migration, the previously run scripts are hashed and checked against the database to ensure they haven't changed.
131
+
132
+ ### Each migration runs in a transaction
133
+
134
+ Running in a transaction ensures each migration is atomic. Either it completes successfully, or it is rolled back and the process is aborted.
135
+
136
+ An exception is made when `-- postgres-migrations disable-transaction` is included at the top of the migration file. This allows migrations such as `CREATE INDEX CONCURRENTLY` which cannot be run inside a transaction.
137
+
138
+ ### Abort on errors
139
+
140
+ If anything fails, the migration in progress is rolled back and an exception is thrown.
141
+
142
+ ## Concurrency
143
+
144
+ As of v4, [advisory locks](https://www.postgresql.org/docs/9.4/explicit-locking.html#ADVISORY-LOCKS) are used to control concurrency. If two migration runs are kicked off concurrently, one will wait for the other to finish before starting. Once a process has acquired a lock, it will run each of the pending migrations before releasing the lock again.
145
+
146
+ Logs from two processes `A` and `B` running concurrently should look something like the following.
147
+
148
+ ```text
149
+ B Connected to database
150
+ B Acquiring advisory lock...
151
+ A Connected to database
152
+ A Acquiring advisory lock...
153
+ B ... acquired advisory lock
154
+ B Starting migrations
155
+ B Starting migration: 2 migration-name
156
+ B Finished migration: 2 migration-name
157
+ B Starting migration: 3 another-migration-name
158
+ B Finished migration: 3 another-migration-name
159
+ B Successfully applied migrations: migration-name, another-migration-name
160
+ B Finished migrations
161
+ B Releasing advisory lock...
162
+ B ... released advisory lock
163
+ A ... acquired advisory lock
164
+ A Starting migrations
165
+ A No migrations applied
166
+ A Finished migrations
167
+ A Releasing advisory lock...
168
+ A ... released advisory lock
169
+ ```
170
+
171
+ Warning: the use of advisory locks will cause problems when using [transaction pooling or statement pooling in PgBouncer](http://www.pgbouncer.org/features.html). A similar system is used in Rails, [see this for an explanation of the problem](https://blog.saeloun.com/2019/09/09/rails-6-disable-advisory-locks.html).
172
+
173
+ ## Migration rules
174
+
175
+ ### Make migrations idempotent
176
+
177
+ Migrations should only be run once, but this is a good principle to follow regardless.
178
+
179
+ ### Migrations are immutable
180
+
181
+ Once applied (to production), a migration cannot be changed.
182
+
183
+ This is enforced by storing a hash of the file contents for each migration in the migrations table.
184
+
185
+ These hashes are checked when running migrations.
186
+
187
+ ### Migrations should be backwards compatible
188
+
189
+ Backwards incompatible changes can usually be made in a few stages.
190
+
191
+ For an example, see [this blog post](http://www.brunton-spall.co.uk/post/2014/05/06/database-migrations-done-right/).
192
+
193
+ ### File name
194
+
195
+ A migration file must match the following pattern:
196
+
197
+ `[id][separator][name][extension]`
198
+
199
+ | Section | Accepted Values | Description |
200
+ | --------- | --------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- |
201
+ | id | Any integer or left zero integers | Consecutive integer ID. <br />**Must start from 1 and be consecutive, e.g. if you have migrations 1-4, the next one must be 5.** |
202
+ | separator | `_` or `-` or nothing | |
203
+ | name | Any length text | |
204
+ | extension | `.sql` or `.js` | File extensions supported. **Case insensitive.** |
205
+
206
+ Example:
207
+
208
+ ```text
209
+ migrations
210
+ ├ 1_create-initial-tables.sql
211
+ ├ 1_create-initial-tables.md # Docs can go here
212
+ ├ 2-alter-initial-tables.SQL
213
+ └ 3-alter-initial-tables-again.js
214
+ ```
215
+
216
+ Or, if you want better ordering in your filesystem:
217
+
218
+ ```text
219
+ migrations
220
+ ├ 00001_create-initial-tables.sql
221
+ ├ 00001_create-initial-tables.md # Docs can go here
222
+ ├ 00002-alter-initial-tables.sql
223
+ └ 00003_alter-initial-tables-again.js
224
+ ```
225
+
226
+ Migrations will be performed in the order of the ids. If ids are not consecutive or if multiple migrations have the same id, the migration run will fail.
227
+
228
+ Note that file names cannot be changed later.
229
+
230
+ ### Javascript Migrations
231
+
232
+ By using `.js` extension on your migration file you gain access to all NodeJS features and only need to export a method called `generateSql` that returns a `string` literal like:
233
+
234
+ ```js
235
+ // ./migrations/helpers/create-main-table.js
236
+ module.exports = `
237
+ CREATE TABLE main (
238
+ id int primary key
239
+ );`
240
+
241
+ // ./migrations/helpers/create-secondary-table.js
242
+ module.exports = `
243
+ CREATE TABLE secondary (
244
+ id int primary key
245
+ );`
246
+
247
+ // ./migrations/1-init.js
248
+ const createMainTable = require("./create-main-table")
249
+ const createSecondaryTable = require("./create-secondary-table")
250
+
251
+ module.exports.generateSql = () => `${createMainTable}
252
+ ${createSecondaryTable}`
253
+ ```
254
+
255
+ ## Tips
256
+
257
+ ### Date handling
258
+
259
+ If you want sane date handling, it is recommended you use the following code snippet to fix a `node-postgres` [bug](https://github.com/brianc/node-postgres/issues/818):
260
+
261
+ ```js
262
+ const pg = require("pg")
263
+
264
+ const parseDate = (val) =>
265
+ val === null ? null : moment(val).format("YYYY-MM-DD")
266
+ const DATATYPE_DATE = 1082
267
+ pg.types.setTypeParser(DATATYPE_DATE, (val) => {
268
+ return val === null ? null : parseDate(val)
269
+ })
270
+ ```
271
+
272
+ ### Schema migrations vs data migrations
273
+
274
+ General rule: only change schemas and other static data in database migrations.
275
+
276
+ When writing a migration which affects data, consider whether the migration needs to be run for all possible environments or just some specific environment. Schema changes and static data need changing for all environments. Often, data changes need to only happen in dev or prod (to fix some data), and might be better of run as one-off jobs (manually or otherwise).
277
+
278
+ ### Making a column NOT NULL
279
+
280
+ ```sql
281
+ -- No no no nononono (at least for big tables)
282
+ ALTER TABLE my_table ALTER COLUMN currently_nullable SET NOT NULL;
283
+ ```
284
+
285
+ TL;DR don't do the above without [reading this](https://medium.com/doctolib/adding-a-not-null-constraint-on-pg-faster-with-minimal-locking-38b2c00c4d1c). It can be slow for big tables, and will lock out all writes to the table until it completes.
286
+
287
+ ### Creating indexes
288
+
289
+ When creating indexes, there are a few important considerations.
290
+
291
+ Creating an index should probably look like this:
292
+
293
+ ```sql
294
+ -- postgres-migrations disable-transaction
295
+ CREATE INDEX CONCURRENTLY IF NOT EXISTS name_of_idx
296
+ ON table_name (column_name);
297
+ ```
298
+
299
+ - `CONCURRENTLY` - without this, writes on the table will block until the index has finished being created. However, it can't be run inside a transaction.
300
+ - `-- postgres-migrations disable-transaction` - migrations are run inside a transaction by default. This disables that.
301
+ - `IF NOT EXISTS` - since the transaction is disabled, it's possible to end up in a partially applied state where the index exists but the migration wasn't recorded. In this case, the migration will probably get run again. This ensures that will succeed.
302
+
303
+ See the [Postgres docs on creating indexes](https://www.postgresql.org/docs/9.6/sql-createindex.html).
304
+
305
+ ### Avoid `IF NOT EXISTS`
306
+
307
+ _Most_ of the time using `IF NOT EXISTS` is not necessary (see above for an exception). In most cases, we would be better off with a failing migration script that tells us that we tried to create a table with a duplicate name.
308
+
309
+ ### Use separate markdown files for complex documentation
310
+
311
+ A comment that is added to a migration script can never be changed once the migration script has been deployed. For complex migration scripts, consider documenting them in a separate markdown file with the same file name as the migration script. This documentation can then be updated later if a better explanation becomes apparent.
312
+
313
+ Your file structure might look something like this:
314
+
315
+ ```text
316
+ - migrations
317
+ - 0001_complex_migration.md <--- Contains documentation that can be updated.
318
+ - 0001_complex_migration.sql
319
+ - 0002_simple_migration.sql
320
+ Rather than this:
321
+ - migrations
322
+ - 0001_complex_migration.sql <--- Contains documentation that can never be updated.
323
+ - 0002_simple_migration.sql
324
+ ```
325
+
326
+ ## Useful resources
327
+
328
+ [Stack Overflow: How We Do Deployment - 2016 Edition (Database Migrations)](http://nickcraver.com/blog/2016/05/03/stack-overflow-how-we-do-deployment-2016-edition/#database-migrations)
329
+
330
+ [Database Migrations Done Right](http://www.brunton-spall.co.uk/post/2014/05/06/database-migrations-done-right/)
331
+
332
+ [Database versioning best practices](http://enterprisecraftsmanship.com/2015/08/10/database-versioning-best-practices/)
333
+
334
+ ## Developing `postgres-migrations`
335
+
336
+ The tests require Docker to be installed. It probably helps to `docker pull postgres:9.4`.
package/ava.config.cjs ADDED
@@ -0,0 +1,4 @@
1
+ module.exports = {
2
+ extensions: ["ts"],
3
+ require: ["ts-node/register"],
4
+ }
@@ -0,0 +1,6 @@
1
+ const baseConfig = require("./ava.config.cjs")
2
+
3
+ module.exports = {
4
+ ...baseConfig,
5
+ files: ["src/**/__tests__/**/*.ts", "!src/**/__tests__/**/fixtures/**/*"],
6
+ }
@@ -0,0 +1,6 @@
1
+ const baseConfig = require("./ava.config.cjs")
2
+
3
+ module.exports = {
4
+ ...baseConfig,
5
+ files: ["src/**/__unit__/**/*.ts", "!src/**/__unit__/**/fixtures/**/*"],
6
+ }
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ export {};
@@ -0,0 +1,14 @@
1
+ #!/usr/bin/env node
2
+ "use strict";
3
+ // tslint:disable no-console
4
+ Object.defineProperty(exports, "__esModule", { value: true });
5
+ const process_1 = require("process");
6
+ const files_loader_1 = require("../files-loader");
7
+ async function main(args) {
8
+ const directory = args[0];
9
+ await files_loader_1.loadMigrationFiles(directory, (x) => console.error(x));
10
+ }
11
+ main(process_1.argv.slice(2)).catch((e) => {
12
+ console.error(`ERROR: ${e.message}`);
13
+ process.exit(1);
14
+ });
@@ -0,0 +1,6 @@
1
+ import { BasicPgClient, Config, CreateDBConfig, Logger } from "./types";
2
+ /**
3
+ * @deprecated Use `migrate` instead with `ensureDatabaseExists: true`.
4
+ */
5
+ export declare function createDb(dbName: string, dbConfig: CreateDBConfig, config?: Config): Promise<void>;
6
+ export declare function runCreateQuery(dbName: string, log: Logger): (client: BasicPgClient) => Promise<void>;
package/dist/create.js ADDED
@@ -0,0 +1,64 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.runCreateQuery = exports.createDb = void 0;
4
+ const pg = require("pg");
5
+ const with_connection_1 = require("./with-connection");
6
+ const DUPLICATE_DATABASE = "42P04";
7
+ /**
8
+ * @deprecated Use `migrate` instead with `ensureDatabaseExists: true`.
9
+ */
10
+ async function createDb(dbName, dbConfig, config = {}) {
11
+ if (typeof dbName !== "string") {
12
+ throw new Error("Must pass database name as a string");
13
+ }
14
+ const log = config.logger != null
15
+ ? config.logger
16
+ : () => {
17
+ //
18
+ };
19
+ if (dbConfig == null) {
20
+ throw new Error("No config object");
21
+ }
22
+ if ("client" in dbConfig) {
23
+ return runCreateQuery(dbName, log)(dbConfig.client);
24
+ }
25
+ if (typeof dbConfig.user !== "string" ||
26
+ typeof dbConfig.password !== "string" ||
27
+ typeof dbConfig.host !== "string" ||
28
+ typeof dbConfig.port !== "number") {
29
+ throw new Error("Database config problem");
30
+ }
31
+ const { user, password, host, port } = dbConfig;
32
+ const client = new pg.Client({
33
+ database: dbConfig.defaultDatabase != null ? dbConfig.defaultDatabase : "postgres",
34
+ user,
35
+ password,
36
+ host,
37
+ port,
38
+ });
39
+ client.on("error", (err) => {
40
+ log(`pg client emitted an error: ${err.message}`);
41
+ });
42
+ const runWith = with_connection_1.withConnection(log, runCreateQuery(dbName, log));
43
+ return runWith(client);
44
+ }
45
+ exports.createDb = createDb;
46
+ function runCreateQuery(dbName, log) {
47
+ return async (client) => {
48
+ await client
49
+ .query(`CREATE DATABASE "${dbName.replace(/\"/g, '""')}"`)
50
+ .catch((e) => {
51
+ switch (e.code) {
52
+ case DUPLICATE_DATABASE: {
53
+ log(`'${dbName}' database already exists`);
54
+ return;
55
+ }
56
+ default: {
57
+ log(e);
58
+ throw new Error(`Error creating database. Caused by: '${e.name}: ${e.message}'`);
59
+ }
60
+ }
61
+ });
62
+ };
63
+ }
64
+ exports.runCreateQuery = runCreateQuery;
@@ -0,0 +1,7 @@
1
+ import { FileType } from "./types";
2
+ export interface FileInfo {
3
+ id: number;
4
+ name: string;
5
+ type: FileType;
6
+ }
7
+ export declare const parseFileName: (fileName: string) => FileInfo;
@@ -0,0 +1,27 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.parseFileName = void 0;
4
+ const parseId = (id) => {
5
+ const parsed = parseInt(id, 10);
6
+ if (isNaN(parsed)) {
7
+ throw new Error(`Migration file name should begin with an integer ID.'`);
8
+ }
9
+ return parsed;
10
+ };
11
+ const parseFileName = (fileName) => {
12
+ const result = /^(-?\d+)[-_]?(.*).(sql|js)$/gi.exec(fileName);
13
+ if (!result) {
14
+ throw new Error(`Invalid file name: '${fileName}'.`);
15
+ }
16
+ const [, id, name, type] = result;
17
+ const lowerType = type.toLowerCase();
18
+ if (lowerType !== "js" && lowerType !== "sql") {
19
+ throw new Error("Not a JS or SQL file");
20
+ }
21
+ return {
22
+ id: parseId(id),
23
+ name: name == null || name === "" ? fileName : name,
24
+ type: lowerType,
25
+ };
26
+ };
27
+ exports.parseFileName = parseFileName;
@@ -0,0 +1,10 @@
1
+ import { Logger, Migration } from "./types";
2
+ /**
3
+ * Load the migration files and assert they are reasonably valid.
4
+ *
5
+ * 'Reasonably valid' in this case means obeying the file name and
6
+ * consecutive ordering rules.
7
+ *
8
+ * No assertions are made about the validity of the SQL.
9
+ */
10
+ export declare const loadMigrationFiles: (directory: string, log?: Logger, schemaName?: string) => Promise<Array<Migration>>;
@@ -0,0 +1,39 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.loadMigrationFiles = void 0;
4
+ const fs = require("fs");
5
+ const path = require("path");
6
+ const util_1 = require("util");
7
+ const migration_file_1 = require("./migration-file");
8
+ const validation_1 = require("./validation");
9
+ const readDir = util_1.promisify(fs.readdir);
10
+ const isValidFile = (fileName) => /\.(sql|js)$/gi.test(fileName);
11
+ /**
12
+ * Load the migration files and assert they are reasonably valid.
13
+ *
14
+ * 'Reasonably valid' in this case means obeying the file name and
15
+ * consecutive ordering rules.
16
+ *
17
+ * No assertions are made about the validity of the SQL.
18
+ */
19
+ const loadMigrationFiles = async (directory,
20
+ // tslint:disable-next-line no-empty
21
+ log = () => { }, schemaName = "public") => {
22
+ log(`Loading migrations from: ${directory}`);
23
+ const fileNames = await readDir(directory);
24
+ log(`Found migration files: ${fileNames}`);
25
+ if (fileNames == null) {
26
+ return [];
27
+ }
28
+ const migrationFiles = [
29
+ path.join(__dirname, "migrations/0_create-migrations-table.sql"),
30
+ ...fileNames.map((fileName) => path.resolve(directory, fileName)),
31
+ ].filter(isValidFile);
32
+ const unorderedMigrations = await Promise.all(migrationFiles.map(migration_file_1.loadMigrationFile));
33
+ // Arrange in ID order
34
+ const orderedMigrations = unorderedMigrations.sort((a, b) => a.id - b.id);
35
+ validation_1.validateMigrationOrdering(orderedMigrations);
36
+ orderedMigrations[0].sql = orderedMigrations[0].sql.replace("CREATE TABLE IF NOT EXISTS migrations", `CREATE SCHEMA IF NOT EXISTS ${schemaName}; CREATE TABLE IF NOT EXISTS ${schemaName}.migrations`);
37
+ return orderedMigrations;
38
+ };
39
+ exports.loadMigrationFiles = loadMigrationFiles;
@@ -0,0 +1,4 @@
1
+ export { createDb } from "./create";
2
+ export { migrate } from "./migrate";
3
+ export { loadMigrationFiles } from "./files-loader";
4
+ export { ConnectionParams, CreateDBConfig, MigrateDBConfig, Logger, Config, MigrationError, } from "./types";