@oliasoft-open-source/node-json-migrator 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,266 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.writePlan = exports.sortPlanEntries = exports.readPlan = exports.parsePlan = exports.getPlannedMigrations = exports.getNextSequenceString = exports.generatePlanEntries = void 0;
7
+
8
+ var _path = _interopRequireDefault(require("path"));
9
+
10
+ var _fs = require("fs");
11
+
12
+ var _moduleFromString = require("module-from-string");
13
+
14
+ var _hash = require("../hash/hash");
15
+
16
+ var _glob = require("../glob/glob");
17
+
18
+ var _validator = require("./validator");
19
+
20
+ var _database = require("../database/database");
21
+
22
+ var _cachedPlannedVersion = require("./cached-planned-version");
23
+
24
+ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
25
+
26
+ //https://stackoverflow.com/a/31102605/942635
27
+ const sortEntry = entry => Object.keys(entry).sort().reduce((obj, key) => {
28
+ obj[key] = entry[key];
29
+ return obj;
30
+ }, {});
31
+ /**
32
+ * Sorts the plan entries by sequence string
33
+ *
34
+ * @param {Array<Object>} plannedMigrations
35
+ * returns Array<Object>
36
+ */
37
+
38
+
39
+ const maxSequence = 1000000;
40
+
41
+ const sortPlanEntries = plannedMigrations => plannedMigrations //dotted sequence sort https://stackoverflow.com/a/40201629/942635
42
+ .sort((a, b) => a.sequence.replace(/\d+/g, n => +n + maxSequence).localeCompare(b.sequence.replace(/\d+/g, n => +n + maxSequence))).map(p => sortEntry(p));
43
+ /**
44
+ * Get next automatic sequence string
45
+ *
46
+ * @param {String} sortedPlannedMigrations
47
+ * returns String next sequence
48
+ */
49
+
50
+
51
+ exports.sortPlanEntries = sortPlanEntries;
52
+
53
+ const getNextSequenceString = sortedPlannedMigrations => {
54
+ const lastMigration = sortedPlannedMigrations[sortedPlannedMigrations.length - 1];
55
+
56
+ if (!lastMigration) {
57
+ return '1';
58
+ }
59
+
60
+ const currentLastSequenceNumber = lastMigration.sequence;
61
+ return `${parseInt(`${currentLastSequenceNumber}`, 10) + 1}`;
62
+ };
63
+ /**
64
+ * Read the plan.json file
65
+ *
66
+ * @param {String} directory path to migrations directory
67
+ * returns Promise<String> plan.json string
68
+ */
69
+
70
+
71
+ exports.getNextSequenceString = getNextSequenceString;
72
+
73
+ const readPlan = async directory => {
74
+ let plan;
75
+
76
+ try {
77
+ plan = await _fs.promises.readFile(`${directory}/plan.json`, {
78
+ encoding: 'utf8'
79
+ });
80
+ return plan;
81
+ } catch {
82
+ return '[]';
83
+ }
84
+ };
85
+ /**
86
+ * Parse the plan.json string into a sorted object
87
+ *
88
+ * @param {String} plan plan.json string
89
+ * returns Array<Object> migration entries from plan.json
90
+ */
91
+
92
+
93
+ exports.readPlan = readPlan;
94
+
95
+ const parsePlan = plan => {
96
+ let plannedMigrations;
97
+
98
+ try {
99
+ plannedMigrations = JSON.parse(plan);
100
+ } catch {
101
+ throw new Error('Invalid JSON for migrator plan.js (unable to parse file)');
102
+ }
103
+
104
+ if (!(0, _validator.validatePlan)(plannedMigrations)) {
105
+ throw new Error('Invalid JSON for migrator plan.js (does not match schema)');
106
+ }
107
+
108
+ return sortPlanEntries(plannedMigrations);
109
+ };
110
+ /**
111
+ * Write the plan.json file
112
+ *
113
+ * @param {String} directory path to migrations directory
114
+ * @param {String} content string to be written to file
115
+ * returns Promise<>
116
+ */
117
+
118
+
119
+ exports.parsePlan = parsePlan;
120
+
121
+ const writePlan = async (directory, content) => {
122
+ //todo this is not being written in the correct order
123
+ await _fs.promises.writeFile(_path.default.resolve(`${directory}/plan.json`), content, {
124
+ encoding: 'utf8'
125
+ });
126
+ };
127
+ /**
128
+ * Generate plan entries
129
+ *
130
+ * @param {Array<Object>} sortedMigrationEntries
131
+ * returns Array<Object>
132
+ */
133
+
134
+
135
+ exports.writePlan = writePlan;
136
+
137
+ const generatePlanEntries = sortedMigrationEntries => sortedMigrationEntries.map(m => ({
138
+ fileHash: m.fileHash,
139
+ fileName: m.fileName,
140
+ sequence: m.sequence
141
+ }));
142
+
143
+ exports.generatePlanEntries = generatePlanEntries;
144
+
145
+ const getPlannedMigrations = async ({
146
+ config
147
+ }) => {
148
+ const {
149
+ directory,
150
+ database,
151
+ entity,
152
+ force,
153
+ pgpHelpers,
154
+ dry,
155
+ importModule = true
156
+ } = config;
157
+
158
+ if (database && !dry) {
159
+ /*
160
+ OW-9043 and OW-9238: The tool tries to self-create its own meta tables
161
+ to simplify setup. That's useful for devs running on local machines,
162
+ and when running via CLI jobs. But for environments without CREATE
163
+ privileges, we want this don't want to execute CREATE TABLE IF NOT EXISTS
164
+ because it throws errors.
165
+ */
166
+ const tablesExist = await (0, _database.migrationTablesExist)(database, entity);
167
+
168
+ if (!tablesExist) {
169
+ await (0, _database.createMigrationsTables)(database, entity);
170
+ }
171
+ }
172
+
173
+ const rawPlan = await readPlan(directory);
174
+ const parsedPlan = parsePlan(rawPlan);
175
+ const plannedMigrations = parsedPlan.map(m => ({
176
+ fileHashFromPlan: m.fileHash,
177
+ fileName: m.fileName,
178
+ sequence: m.sequence
179
+ }));
180
+ const filePaths = await (0, _glob.getMigrationFilePaths)(directory);
181
+ const skippedFilePaths = await (0, _glob.getSkippedFilePaths)(directory);
182
+ const plannedMigrationsWithFileEntries = await Promise.all(plannedMigrations.map(async m => {
183
+ var _await$importFromStri;
184
+
185
+ const {
186
+ fileName
187
+ } = m;
188
+ const isValidFileName = (0, _validator.validateFileName)(fileName);
189
+ const filePath = filePaths.find(f => _path.default.parse(f).base === m.fileName);
190
+ const script = filePath ? await _fs.promises.readFile(filePath, {
191
+ encoding: 'utf8'
192
+ }) : null;
193
+ const migrator = script && importModule ? (_await$importFromStri = await (0, _moduleFromString.importFromString)(script)) === null || _await$importFromStri === void 0 ? void 0 : _await$importFromStri.default : null;
194
+ const fileHash = script ? (0, _hash.hash)(script) : null; //skipped files
195
+
196
+ const skipMatch = '(.skip|.skip\\d+).js$';
197
+
198
+ const nameMatch = _path.default.parse(m.fileName).name;
199
+
200
+ const regex = new RegExp(`${nameMatch}${skipMatch}`, 'g');
201
+ const matchingSkippedFilePaths = skippedFilePaths.filter(f => {
202
+ const name = _path.default.parse(f).base;
203
+
204
+ return name.match(regex);
205
+ });
206
+ const skippedFileHashes = await Promise.all(matchingSkippedFilePaths.map(async skippedFilePath => {
207
+ const skippedScript = await _fs.promises.readFile(skippedFilePath, {
208
+ encoding: 'utf8'
209
+ });
210
+ return skippedScript ? (0, _hash.hash)(skippedScript) : null;
211
+ }));
212
+ return { ...m,
213
+ filePath,
214
+ isValidFileName,
215
+ script,
216
+ migrator,
217
+ fileHash,
218
+ skippedFileHashes
219
+ };
220
+ }));
221
+ const historicalMigrations = database ? await (0, _database.getMigrationRecords)(database, entity) : [];
222
+ const plannedMigrationsWithHistory = plannedMigrationsWithFileEntries.map(m => {
223
+ const historicalMigration = historicalMigrations.find(h => h.fileName === m.fileName);
224
+ return { ...m,
225
+ fileHashFromHistory: (historicalMigration === null || historicalMigration === void 0 ? void 0 : historicalMigration.fileHash) || null,
226
+ sequenceFromHistory: (historicalMigration === null || historicalMigration === void 0 ? void 0 : historicalMigration.sequence) || null
227
+ };
228
+ });
229
+ (0, _validator.throwIfFilesNotFound)(plannedMigrationsWithHistory, importModule);
230
+ (0, _validator.throwIfFileNamesInvalid)(plannedMigrationsWithHistory);
231
+ (0, _validator.throwIfFileNamesNotUnique)(plannedMigrationsWithHistory);
232
+ (0, _validator.throwIfSequenceNotUnique)(plannedMigrationsWithHistory);
233
+
234
+ if (!force) {
235
+ (0, _validator.warnIfFilesHaveBeenRemovedFromPlan)(plannedMigrationsWithHistory, historicalMigrations);
236
+ (0, _validator.throwIfFilesHaveChanged)(plannedMigrationsWithHistory);
237
+ (0, _validator.throwIfSequenceHasChanged)(plannedMigrationsWithHistory);
238
+ }
239
+
240
+ (0, _validator.throwIfSequenceHasIntegerGaps)(plannedMigrationsWithHistory);
241
+ const validatedPlannedMigrations = generatePlanEntries(plannedMigrationsWithHistory);
242
+ const validatedPlan = JSON.stringify(validatedPlannedMigrations, null, 2);
243
+ const nextVersion = (0, _hash.hash)(validatedPlan);
244
+ const planHasChanged = validatedPlan !== rawPlan;
245
+
246
+ if (!dry && planHasChanged) {
247
+ await writePlan(directory, validatedPlan);
248
+ }
249
+
250
+ if (database && !dry) {
251
+ await (0, _database.replaceMigrationsRecords)(database, pgpHelpers, entity, validatedPlannedMigrations);
252
+ const versionRecord = [{
253
+ version: nextVersion,
254
+ plan: validatedPlan
255
+ }];
256
+ await (0, _database.insertVersions)(database, pgpHelpers, entity, versionRecord);
257
+ }
258
+
259
+ (0, _cachedPlannedVersion.setCachedPlannedVersion)(nextVersion);
260
+ return {
261
+ plannedMigrations: plannedMigrationsWithHistory,
262
+ nextVersion
263
+ };
264
+ };
265
+
266
+ exports.getPlannedMigrations = getPlannedMigrations;
@@ -0,0 +1,20 @@
1
+ {
2
+ "type": "array",
3
+ "items": {
4
+ "type": "object",
5
+ "properties": {
6
+ "fileHash": {
7
+ "type": ["string"]
8
+ },
9
+ "fileName": {
10
+ "type": "string"
11
+ },
12
+ "sequence": {
13
+ "type": "string",
14
+ "pattern": "^[1-9][0-9]*(.[1-9][0-9]*)*$"
15
+ }
16
+ },
17
+ "required": ["fileHash", "fileName", "sequence"],
18
+ "additionalProperties": false
19
+ }
20
+ }
@@ -0,0 +1,34 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.getPlannedVersion = void 0;
7
+
8
+ var _fs = require("fs");
9
+
10
+ var _hash = require("../hash/hash");
11
+
12
+ var _plan = require("./plan");
13
+
14
+ var _cachedPlannedVersion = require("./cached-planned-version");
15
+
16
+ const getPlannedVersion = async ({
17
+ config
18
+ }) => {
19
+ const {
20
+ directory
21
+ } = config;
22
+ const currentPlan = await _fs.promises.readFile(`${directory}/plan.json`, {
23
+ encoding: 'utf8'
24
+ });
25
+ const plannedVersion = (0, _hash.hash)(currentPlan);
26
+ const cachedPlannedVersion = (0, _cachedPlannedVersion.getCachedPlannedVersion)();
27
+ return cachedPlannedVersion === plannedVersion ? cachedPlannedVersion : (await (0, _plan.getPlannedMigrations)({
28
+ config: { ...config,
29
+ importModule: false
30
+ }
31
+ })).nextVersion;
32
+ };
33
+
34
+ exports.getPlannedVersion = getPlannedVersion;
@@ -0,0 +1,254 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.warnIfFilesHaveBeenRemovedFromPlan = exports.validatePlan = exports.validateFileName = exports.validateFileDescription = exports.throwIfSequenceNotUnique = exports.throwIfSequenceHasIntegerGaps = exports.throwIfSequenceHasChanged = exports.throwIfFilesNotFound = exports.throwIfFilesHaveChanged = exports.throwIfFileNamesNotUnique = exports.throwIfFileNamesInvalid = void 0;
7
+
8
+ var _chalk = _interopRequireDefault(require("chalk"));
9
+
10
+ var _lodash = require("lodash");
11
+
12
+ var _ajv = _interopRequireDefault(require("ajv"));
13
+
14
+ var _ajvErrors = _interopRequireDefault(require("ajv-errors"));
15
+
16
+ var _planSchema = _interopRequireDefault(require("./plan.schema.json"));
17
+
18
+ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
19
+
20
+ const isNonEmptyString = input => (0, _lodash.isString)(input) && input.length > 0;
21
+
22
+ const ajv = new _ajv.default({
23
+ allErrors: true
24
+ });
25
+ (0, _ajvErrors.default)(ajv); //sequence pattern: https://regex101.com/r/4qHZLm/1
26
+
27
+ const planValidator = ajv.compile(_planSchema.default);
28
+ /**
29
+ * Validates if a plan matches the schema
30
+ *
31
+ * @param {Array<Object>} plan
32
+ * @returns {Boolean} valid
33
+ */
34
+
35
+ const validatePlan = plan => {
36
+ const valid = planValidator(plan);
37
+ const {
38
+ errors
39
+ } = planValidator;
40
+
41
+ if (errors !== null && errors !== void 0 && errors.length) {
42
+ errors.forEach(e => console.error(`${e.message}${e.instancePath ? `at ${e.instancePath}` : ''}`));
43
+ }
44
+
45
+ return valid;
46
+ };
47
+ /**
48
+ * Validates if file description format is valid
49
+ *
50
+ * @param {String} description
51
+ * @returns {Boolean} valid
52
+ */
53
+
54
+
55
+ exports.validatePlan = validatePlan;
56
+
57
+ const validateFileDescription = description => {
58
+ if (!description) {
59
+ return false;
60
+ } //https://regex101.com/r/IO0dzF/1
61
+
62
+
63
+ const validFileName = /^([a-z]+|(([a-z]+-)+[a-z]+))$/g;
64
+ return validFileName.test(description);
65
+ };
66
+ /**
67
+ * Validates if fileName format is valid
68
+ *
69
+ * @param {String} fileName
70
+ * @returns {Boolean} valid
71
+ */
72
+
73
+
74
+ exports.validateFileDescription = validateFileDescription;
75
+
76
+ const validateFileName = fileName => {
77
+ //https://regex101.com/r/EX2RlS/1
78
+ const validFileName = /^([a-z]+|(([a-z]+-)+[a-z]+))(.js|.skip.js)$/g;
79
+ return validFileName.test(fileName);
80
+ };
81
+ /**
82
+ * Throws if file names are invalid
83
+ *
84
+ * @param {Array<Object>} migrationEntries
85
+ */
86
+
87
+
88
+ exports.validateFileName = validateFileName;
89
+
90
+ const throwIfFileNamesInvalid = fileEntries => {
91
+ const invalidFileNames = fileEntries.filter(f => !f.isValidFileName).map(f => f.fileName);
92
+
93
+ if (invalidFileNames.length) {
94
+ invalidFileNames.forEach(name => console.error(_chalk.default.red(name)));
95
+ throw new Error('Invalid migration filename format (use kebab-case)');
96
+ }
97
+ };
98
+ /**
99
+ * Warns if files have been removed
100
+ *
101
+ * @param {Array<Object>} migrationEntries
102
+ * @param {Array<Object>} historicalMigrations
103
+ */
104
+
105
+
106
+ exports.throwIfFileNamesInvalid = throwIfFileNamesInvalid;
107
+
108
+ const warnIfFilesHaveBeenRemovedFromPlan = (migrationEntries, historicalMigrations) => {
109
+ const fileNames = migrationEntries.map(f => f.fileName);
110
+ const deletedFilesNames = historicalMigrations.filter(e => !fileNames.includes(e.fileName)).map(e => e.fileName);
111
+
112
+ if (deletedFilesNames.length) {
113
+ deletedFilesNames.forEach(name => console.warn(_chalk.default.yellow(name)));
114
+ console.warn(_chalk.default.yellow('Previously executed migration files have been deleted from plan.json (rename file to .skip.js and replace with a new file instead)'));
115
+ }
116
+ };
117
+ /**
118
+ * Throws error if any pre-existing file has changed
119
+ *
120
+ * @param {Array<Object>} migrationEntries
121
+ */
122
+
123
+
124
+ exports.warnIfFilesHaveBeenRemovedFromPlan = warnIfFilesHaveBeenRemovedFromPlan;
125
+
126
+ const throwIfFilesHaveChanged = migrationEntries => {
127
+ const changedFilesNames = migrationEntries.filter(m => {
128
+ const {
129
+ fileHash,
130
+ fileHashFromPlan,
131
+ fileHashFromHistory,
132
+ skippedFileHashes
133
+ } = m;
134
+ const matchesHistory = !isNonEmptyString(fileHashFromHistory) || [fileHash].concat(skippedFileHashes).includes(fileHashFromHistory);
135
+ const matchesPlan = !isNonEmptyString(fileHashFromPlan) || [fileHash].concat(skippedFileHashes).includes(fileHashFromPlan);
136
+ return !(matchesHistory && matchesPlan);
137
+ }).map(f => f.fileName);
138
+
139
+ if (changedFilesNames.length) {
140
+ changedFilesNames.forEach(name => console.error(_chalk.default.red(name)));
141
+ throw new Error('Not allowed to change migration files (for unreleased local work, you can use the `force` option)');
142
+ }
143
+ };
144
+ /**
145
+ * Throws error if previous sequence numbers have changed
146
+ *
147
+ * @param {Array<Object>} migrationEntries
148
+ */
149
+
150
+
151
+ exports.throwIfFilesHaveChanged = throwIfFilesHaveChanged;
152
+
153
+ const throwIfSequenceHasChanged = migrationEntries => {
154
+ const changeSequences = migrationEntries.filter(m => {
155
+ const {
156
+ sequence,
157
+ sequenceFromHistory
158
+ } = m;
159
+ const changed = (0, _lodash.isString)(sequenceFromHistory) && sequenceFromHistory !== sequence;
160
+ return changed;
161
+ }).map(f => f.fileName);
162
+
163
+ if (changeSequences.length) {
164
+ changeSequences.forEach(name => console.error(_chalk.default.red(name)));
165
+ throw new Error('Not allowed to change migration sequences in plan.json');
166
+ }
167
+ };
168
+ /**
169
+ * Throws error if sequence numbers are not unique
170
+ *
171
+ * @param {Array<Object>} migrationEntries
172
+ */
173
+
174
+
175
+ exports.throwIfSequenceHasChanged = throwIfSequenceHasChanged;
176
+
177
+ const throwIfSequenceNotUnique = migrationEntries => {
178
+ const sequenceNumbers = migrationEntries.map(f => f.sequence);
179
+ const repeatedSequenceNumbers = sequenceNumbers.filter( //https://stackoverflow.com/a/59517965/942635
180
+ (s => v => s.has(v) || !s.add(v))(new Set()));
181
+ const duplicates = migrationEntries.filter(f => repeatedSequenceNumbers.includes(f.sequence)).map(f => f.fileName);
182
+
183
+ if (duplicates.length) {
184
+ duplicates.forEach(fileName => console.error(_chalk.default.red(fileName)));
185
+ throw new Error('Migrations must have unique sequence numbers in plan.json');
186
+ }
187
+ };
188
+ /**
189
+ * Throws error if sequence numbers are not unique
190
+ *
191
+ * @param {Array<Object>} migrationEntries
192
+ */
193
+
194
+
195
+ exports.throwIfSequenceNotUnique = throwIfSequenceNotUnique;
196
+
197
+ const throwIfSequenceHasIntegerGaps = migrationEntries => {
198
+ const toInteger = sequence => parseInt(sequence.split('.')[0], 10);
199
+
200
+ const unique = arr => Array.from(new Set(arr));
201
+
202
+ const sequences = migrationEntries.map(s => s.sequence);
203
+ const sequenceIntegers = sequences.map(s => toInteger(s));
204
+ const uniqueSequenceIntegers = unique(sequenceIntegers);
205
+ const orderedUniqueSequenceIntegers = uniqueSequenceIntegers.sort((a, b) => a - b);
206
+ const max = orderedUniqueSequenceIntegers[orderedUniqueSequenceIntegers.length - 1];
207
+ const expected = Array.from({
208
+ length: max
209
+ }, (v, k) => k + 1);
210
+ const missing = (0, _lodash.xor)(orderedUniqueSequenceIntegers, expected);
211
+
212
+ if (missing.length) {
213
+ throw new Error(`Migration sequence numbers in plan.json have unexpected gaps: ${missing.join(', ')}`);
214
+ }
215
+ };
216
+ /**
217
+ * Throws error if file names are not unique
218
+ *
219
+ * @param {Array<Object>} migrationEntries
220
+ */
221
+
222
+
223
+ exports.throwIfSequenceHasIntegerGaps = throwIfSequenceHasIntegerGaps;
224
+
225
+ const throwIfFileNamesNotUnique = migrationEntries => {
226
+ const fileNames = migrationEntries.map(f => f.fileName);
227
+ const repeatedFileNames = fileNames.filter( //https://stackoverflow.com/a/59517965/942635
228
+ (s => v => s.has(v) || !s.add(v))(new Set()));
229
+
230
+ if (repeatedFileNames.length) {
231
+ repeatedFileNames.forEach(fileName => console.error(_chalk.default.red(fileName)));
232
+ throw new Error('Migration file names must be unique');
233
+ }
234
+ };
235
+ /**
236
+ * Throws error if files listed in plan.json are found
237
+ *
238
+ * @param {Array<Object>} migrationEntries
239
+ * @param {Boolean} importModule
240
+ */
241
+
242
+
243
+ exports.throwIfFileNamesNotUnique = throwIfFileNamesNotUnique;
244
+
245
+ const throwIfFilesNotFound = (migrationEntries, importModule) => {
246
+ const migrationsWithoutFiles = migrationEntries.filter(m => !m.script || importModule && !m.script && !m.migrator);
247
+
248
+ if (migrationsWithoutFiles.length) {
249
+ migrationsWithoutFiles.forEach(migration => console.error(_chalk.default.red(migration.fileName)));
250
+ throw new Error('Migration files from plan.json are missing from filesystem');
251
+ }
252
+ };
253
+
254
+ exports.throwIfFilesNotFound = throwIfFilesNotFound;
package/index.js ADDED
@@ -0,0 +1 @@
1
+ module.exports = require('./dist/index.js');
package/package.json ADDED
@@ -0,0 +1,78 @@
1
+ {
2
+ "name": "@oliasoft-open-source/node-json-migrator",
3
+ "version": "2.0.0",
4
+ "description": "A library for JSON migrations",
5
+ "scripts": {
6
+ "build": "npx babel src --ignore 'src/**/*.test.js' --out-dir dist --copy-files --no-copy-ignored",
7
+ "test": "npm run prettier:check && npm run lint:check && npm run test:unit:coverage",
8
+ "test:unit": "jest --silent=true",
9
+ "test:unit:coverage": "jest --silent=true --collectCoverage=true",
10
+ "test:unit:verbose": "jest --silent=false --verbose",
11
+ "lint:check": "eslint \"**/*.{js,jsx}\"",
12
+ "lint:fix": "eslint --fix \"**/*.{js,jsx}\"",
13
+ "prettier:check": "prettier --check \"**/*.{js,jsx,json,css,less}\"",
14
+ "prettier:fix": "prettier --write \"**/*.{js,jsx,json,css,less}\"",
15
+ "prepare": "husky install"
16
+ },
17
+ "lint-staged": {
18
+ "*.js": "eslint --cache --fix",
19
+ "*.{js,jsx,json,css,less}": [
20
+ "prettier --write"
21
+ ]
22
+ },
23
+ "homepage": "https://oliasoft-open-source.gitlab.io/node-postgresql-migrator",
24
+ "repository": {
25
+ "type": "git",
26
+ "url": "git+https://gitlab.com/oliasoft-open-source/node-postgresql-migrator.git"
27
+ },
28
+ "author": "Oliasoft AS and contributors",
29
+ "license": "MIT",
30
+ "bugs": {
31
+ "url": "https://https://gitlab.com/oliasoft-open-source/node-postgresql-migrator/issues"
32
+ },
33
+ "devDependencies": {
34
+ "@babel/cli": "^7.15.7",
35
+ "@babel/core": "^7.15.0",
36
+ "@babel/eslint-parser": "^7.15.0",
37
+ "@babel/node": "^7.14.9",
38
+ "@babel/plugin-proposal-class-properties": "^7.14.5",
39
+ "@babel/plugin-proposal-decorators": "^7.14.5",
40
+ "@babel/plugin-proposal-export-namespace-from": "^7.14.5",
41
+ "@babel/plugin-proposal-function-sent": "^7.14.5",
42
+ "@babel/plugin-proposal-json-strings": "^7.14.5",
43
+ "@babel/plugin-proposal-numeric-separator": "^7.14.5",
44
+ "@babel/plugin-proposal-object-rest-spread": "^7.14.7",
45
+ "@babel/plugin-proposal-throw-expressions": "^7.14.5",
46
+ "@babel/plugin-syntax-dynamic-import": "^7.8.3",
47
+ "@babel/plugin-syntax-import-meta": "^7.10.4",
48
+ "@babel/preset-env": "^7.15.0",
49
+ "@babel/runtime": "^7.15.3",
50
+ "@types/lodash": "^4.14.172",
51
+ "babel-eslint": "^10.1.0",
52
+ "babel-jest": "^27.1.0",
53
+ "babel-loader": "^8.2.2",
54
+ "eslint": "^7.32.0",
55
+ "eslint-config-airbnb": "^18.2.1",
56
+ "eslint-config-prettier": "^8.3.0",
57
+ "eslint-import-resolver-alias": "^1.1.2",
58
+ "eslint-plugin-import": "^2.24.2",
59
+ "husky": "^7.0.2",
60
+ "jest": "^27.1.0",
61
+ "jest-diff": "^27.3.1",
62
+ "jest-matcher-utils": "^27.3.1",
63
+ "lint-staged": "^11.1.2",
64
+ "mock-fs": "^5.1.1",
65
+ "pg-mem": "^1.9.17",
66
+ "prettier": "2.3.2"
67
+ },
68
+ "dependencies": {
69
+ "ajv": "^8.6.3",
70
+ "ajv-errors": "^3.0.0",
71
+ "chalk": "^4.1.2",
72
+ "glob-promise": "^4.2.0",
73
+ "immer": "^9.0.6",
74
+ "lodash": "^4.17.21",
75
+ "module-from-string": "^3.1.1",
76
+ "pg-promise": "^10.11.0"
77
+ }
78
+ }
@@ -0,0 +1,21 @@
1
+ #!/bin/bash
2
+ set -eEuo pipefail
3
+
4
+ # This script requires the following things to be installed:
5
+ # - bash
6
+ # - curl
7
+
8
+ # Required inputs:
9
+ MATTERMOST_BOT_KEY=$1
10
+ MSG=$2
11
+
12
+ # Optional inputs:
13
+ CHANNEL=${3:-'#releases'}
14
+ USERNAME=${4:-'Gitlab CI/CD'}
15
+ ICON_URL=${5:-'https://oliasoftstaticwebsite.blob.core.windows.net/helpguideimages/robot-icon.png'}
16
+ MATTERMOST_URL=${6:-'https://mm.oliasoft.com/hooks/'}
17
+
18
+ curl -X POST \
19
+ --data-urlencode \
20
+ "payload={\"channel\": \"$CHANNEL\", \"username\": \"$USERNAME\", \"icon_url\": \"$ICON_URL\", \"text\": \"$MSG\"}" \
21
+ $MATTERMOST_URL$MATTERMOST_BOT_KEY