@webiny/data-migration 0.0.0-unstable.e3f4727c56
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/MigrationRunner.d.ts +15 -0
- package/MigrationRunner.js +160 -0
- package/MigrationRunner.js.map +1 -0
- package/README.md +6 -0
- package/createPinoLogger.d.ts +7 -0
- package/createPinoLogger.js +22 -0
- package/createPinoLogger.js.map +1 -0
- package/createTable.d.ts +7 -0
- package/createTable.js +29 -0
- package/createTable.js.map +1 -0
- package/handlers/createDdbEsProjectMigration.d.ts +15 -0
- package/handlers/createDdbEsProjectMigration.js +67 -0
- package/handlers/createDdbEsProjectMigration.js.map +1 -0
- package/handlers/createDdbProjectMigration.d.ts +12 -0
- package/handlers/createDdbProjectMigration.js +63 -0
- package/handlers/createDdbProjectMigration.js.map +1 -0
- package/handlers/createPatternMatcher.d.ts +2 -0
- package/handlers/createPatternMatcher.js +17 -0
- package/handlers/createPatternMatcher.js.map +1 -0
- package/handlers/devVersionErrorResponse.d.ts +5 -0
- package/handlers/devVersionErrorResponse.js +14 -0
- package/handlers/devVersionErrorResponse.js.map +1 -0
- package/index.d.ts +6 -0
- package/index.js +71 -0
- package/index.js.map +1 -0
- package/package.json +56 -0
- package/repository/migrations.entity.d.ts +4 -0
- package/repository/migrations.entity.js +36 -0
- package/repository/migrations.entity.js.map +1 -0
- package/repository/migrations.repository.d.ts +10 -0
- package/repository/migrations.repository.js +49 -0
- package/repository/migrations.repository.js.map +1 -0
- package/symbols.d.ts +6 -0
- package/symbols.js +18 -0
- package/symbols.js.map +1 -0
- package/types.d.ts +57 -0
- package/types.js +12 -0
- package/types.js.map +1 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) Webiny
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { Logger } from "pino";
|
|
2
|
+
import { ExecutedMigrationResponse, SkippedMigrationResponse, MigrationRepository, DataMigration } from "./types";
|
|
3
|
+
export declare type IsMigrationApplicable = (migration: DataMigration) => boolean;
|
|
4
|
+
export declare class MigrationRunner {
|
|
5
|
+
private readonly logger;
|
|
6
|
+
private readonly migrations;
|
|
7
|
+
private readonly repository;
|
|
8
|
+
constructor(repository: MigrationRepository, migrations: DataMigration[], logger: Logger | undefined);
|
|
9
|
+
execute(projectVersion: string, isApplicable?: IsMigrationApplicable): Promise<{
|
|
10
|
+
executed: ExecutedMigrationResponse[];
|
|
11
|
+
skipped: SkippedMigrationResponse[];
|
|
12
|
+
notApplicable: SkippedMigrationResponse[];
|
|
13
|
+
}>;
|
|
14
|
+
private validateIds;
|
|
15
|
+
}
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.MigrationRunner = void 0;
|
|
8
|
+
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
|
9
|
+
var _ioc = require("@webiny/ioc");
|
|
10
|
+
var _semver = require("semver");
|
|
11
|
+
var _symbols = require("./symbols");
|
|
12
|
+
var _createPinoLogger = require("./createPinoLogger");
|
|
13
|
+
class MigrationRunner {
|
|
14
|
+
constructor(repository, migrations, logger) {
|
|
15
|
+
(0, _defineProperty2.default)(this, "logger", void 0);
|
|
16
|
+
(0, _defineProperty2.default)(this, "migrations", void 0);
|
|
17
|
+
(0, _defineProperty2.default)(this, "repository", void 0);
|
|
18
|
+
this.repository = repository;
|
|
19
|
+
this.migrations = migrations || [];
|
|
20
|
+
if (!logger) {
|
|
21
|
+
logger = (0, _createPinoLogger.createPinoLogger)();
|
|
22
|
+
}
|
|
23
|
+
this.logger = logger;
|
|
24
|
+
}
|
|
25
|
+
async execute(projectVersion, isApplicable) {
|
|
26
|
+
this.validateIds(this.migrations);
|
|
27
|
+
const [latestMigration] = await this.repository.listMigrations({
|
|
28
|
+
limit: 1
|
|
29
|
+
});
|
|
30
|
+
this.logger.info(`Project version is %s.`, projectVersion);
|
|
31
|
+
|
|
32
|
+
// Get current version, and coerce it to a valid SemVer.
|
|
33
|
+
// With this, we can run migrations targeted for stable versions, released under a preid tag (e.g., `beta`).
|
|
34
|
+
const currentVersion = (0, _semver.coerce)(projectVersion) + "";
|
|
35
|
+
const startingId = latestMigration ? latestMigration.id : `${currentVersion}-000`;
|
|
36
|
+
const lastId = `${currentVersion}-999`;
|
|
37
|
+
|
|
38
|
+
// Create initial migration record.
|
|
39
|
+
if (!latestMigration) {
|
|
40
|
+
this.logger.info(`No migrations were ever executed. Creating initial migration record %s.`, startingId);
|
|
41
|
+
await this.repository.logMigration({
|
|
42
|
+
id: startingId,
|
|
43
|
+
description: "starting point for applicable migrations detection",
|
|
44
|
+
createdOn: new Date().toISOString(),
|
|
45
|
+
duration: 0,
|
|
46
|
+
reason: "initial migration"
|
|
47
|
+
});
|
|
48
|
+
} else {
|
|
49
|
+
this.logger.info(`Latest migration ID is %s.`, latestMigration.id);
|
|
50
|
+
}
|
|
51
|
+
if (isApplicable) {
|
|
52
|
+
this.logger.info(`Using custom "isApplicable" function.`);
|
|
53
|
+
} else {
|
|
54
|
+
this.logger.info(`Using migrations in the range of %s to %s.`, startingId, lastId);
|
|
55
|
+
}
|
|
56
|
+
const executed = [];
|
|
57
|
+
const skipped = [];
|
|
58
|
+
const notApplicable = [];
|
|
59
|
+
const defaultIsApplicable = mig => {
|
|
60
|
+
return mig.getId() > startingId && mig.getId() <= lastId;
|
|
61
|
+
};
|
|
62
|
+
const isMigrationApplicable = isApplicable || defaultIsApplicable;
|
|
63
|
+
const executableMigrations = this.migrations.filter(mig => {
|
|
64
|
+
if (!isMigrationApplicable(mig)) {
|
|
65
|
+
notApplicable.push({
|
|
66
|
+
id: mig.getId(),
|
|
67
|
+
description: mig.getDescription(),
|
|
68
|
+
reason: "not applicable"
|
|
69
|
+
});
|
|
70
|
+
return false;
|
|
71
|
+
}
|
|
72
|
+
return true;
|
|
73
|
+
}).sort((a, b) => a.getId() > b.getId() ? 1 : -1);
|
|
74
|
+
this.logger.info(`Found %s applicable migration(s).`, executableMigrations.length);
|
|
75
|
+
for (const migration of executableMigrations) {
|
|
76
|
+
const logger = (0, _createPinoLogger.getChildLogger)(this.logger, migration);
|
|
77
|
+
const context = {
|
|
78
|
+
projectVersion,
|
|
79
|
+
logger
|
|
80
|
+
};
|
|
81
|
+
const shouldExecute = await migration.shouldExecute(context);
|
|
82
|
+
if (!shouldExecute) {
|
|
83
|
+
this.logger.info(`Skipping migration %s.`, migration.getId());
|
|
84
|
+
skipped.push({
|
|
85
|
+
id: migration.getId(),
|
|
86
|
+
description: migration.getDescription(),
|
|
87
|
+
reason: "migration already applied"
|
|
88
|
+
});
|
|
89
|
+
await this.repository.logMigration({
|
|
90
|
+
id: migration.getId(),
|
|
91
|
+
description: migration.getDescription(),
|
|
92
|
+
createdOn: new Date().toISOString(),
|
|
93
|
+
duration: 0,
|
|
94
|
+
reason: "skipped"
|
|
95
|
+
});
|
|
96
|
+
continue;
|
|
97
|
+
}
|
|
98
|
+
const result = {
|
|
99
|
+
duration: 0,
|
|
100
|
+
logs: [],
|
|
101
|
+
success: true
|
|
102
|
+
};
|
|
103
|
+
const start = Date.now();
|
|
104
|
+
try {
|
|
105
|
+
this.logger.info(`Executing migration %s: %s.`, migration.getId(), migration.getDescription());
|
|
106
|
+
await migration.execute(context);
|
|
107
|
+
} catch (err) {
|
|
108
|
+
result.success = false;
|
|
109
|
+
this.logger.error(err, err.message);
|
|
110
|
+
} finally {
|
|
111
|
+
result.duration = Date.now() - start;
|
|
112
|
+
this.logger.info(`Finished executing migration %s in %sms.`, migration.getId(), result.duration);
|
|
113
|
+
}
|
|
114
|
+
executed.push({
|
|
115
|
+
id: migration.getId(),
|
|
116
|
+
description: migration.getDescription(),
|
|
117
|
+
result
|
|
118
|
+
});
|
|
119
|
+
if (result.success) {
|
|
120
|
+
await this.repository.logMigration({
|
|
121
|
+
id: migration.getId(),
|
|
122
|
+
description: migration.getDescription(),
|
|
123
|
+
createdOn: new Date().toISOString(),
|
|
124
|
+
duration: result.duration,
|
|
125
|
+
reason: "executed"
|
|
126
|
+
});
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
this.logger.info(`Finished processing applicable migrations.`);
|
|
130
|
+
return {
|
|
131
|
+
executed,
|
|
132
|
+
skipped,
|
|
133
|
+
notApplicable
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
validateIds(migrations) {
|
|
137
|
+
const ids = new Set();
|
|
138
|
+
for (const mig of migrations) {
|
|
139
|
+
const id = mig.getId();
|
|
140
|
+
if (id.endsWith("-000")) {
|
|
141
|
+
const error = new Error(`Migration ID must not end with "000": ${id}`);
|
|
142
|
+
this.logger.error(error);
|
|
143
|
+
throw error;
|
|
144
|
+
}
|
|
145
|
+
if (ids.has(id)) {
|
|
146
|
+
const error = new Error(`Duplicate migration ID found: ${id}`);
|
|
147
|
+
this.logger.error(error);
|
|
148
|
+
throw error;
|
|
149
|
+
}
|
|
150
|
+
ids.add(id);
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
exports.MigrationRunner = MigrationRunner;
|
|
155
|
+
(0, _ioc.makeInjectable)(MigrationRunner, [(0, _ioc.inject)(_symbols.MigrationRepositorySymbol), (0, _ioc.inject)(_symbols.MigrationSymbol, {
|
|
156
|
+
multi: true,
|
|
157
|
+
optional: true
|
|
158
|
+
}), (0, _ioc.inject)(_symbols.LoggerSymbol, {
|
|
159
|
+
optional: true
|
|
160
|
+
})]);
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["MigrationRunner","constructor","repository","migrations","logger","createPinoLogger","execute","projectVersion","isApplicable","validateIds","latestMigration","listMigrations","limit","info","currentVersion","coerce","startingId","id","lastId","logMigration","description","createdOn","Date","toISOString","duration","reason","executed","skipped","notApplicable","defaultIsApplicable","mig","getId","isMigrationApplicable","executableMigrations","filter","push","getDescription","sort","a","b","length","migration","getChildLogger","context","shouldExecute","result","logs","success","start","now","err","error","message","ids","Set","endsWith","Error","has","add","makeInjectable","inject","MigrationRepositorySymbol","MigrationSymbol","multi","optional","LoggerSymbol"],"sources":["MigrationRunner.ts"],"sourcesContent":["import { Logger } from \"pino\";\nimport { inject, makeInjectable } from \"@webiny/ioc\";\nimport { coerce } from \"semver\";\nimport { MigrationRepositorySymbol, LoggerSymbol, MigrationSymbol } from \"./symbols\";\nimport { createPinoLogger, getChildLogger } from \"./createPinoLogger\";\nimport {\n MigrationResult,\n ExecutedMigrationResponse,\n SkippedMigrationResponse,\n MigrationRepository,\n DataMigration,\n DataMigrationContext\n} from \"~/types\";\n\nexport type IsMigrationApplicable = (migration: DataMigration) => boolean;\n\nexport class MigrationRunner {\n private readonly logger: Logger;\n private readonly migrations: DataMigration[];\n private readonly repository: MigrationRepository;\n\n constructor(\n repository: MigrationRepository,\n migrations: DataMigration[],\n logger: Logger | undefined\n ) {\n this.repository = repository;\n this.migrations = migrations || [];\n\n if (!logger) {\n logger = createPinoLogger();\n }\n this.logger = logger;\n }\n\n async execute(projectVersion: string, isApplicable?: IsMigrationApplicable) {\n this.validateIds(this.migrations);\n const [latestMigration] = await this.repository.listMigrations({ limit: 1 });\n\n this.logger.info(`Project version is %s.`, projectVersion);\n\n // Get current version, and coerce it to a valid SemVer.\n // With this, we can run migrations targeted for stable versions, released under a preid tag (e.g., `beta`).\n const currentVersion = coerce(projectVersion) + \"\";\n const startingId = latestMigration ? latestMigration.id : `${currentVersion}-000`;\n const lastId = `${currentVersion}-999`;\n\n // Create initial migration record.\n if (!latestMigration) {\n this.logger.info(\n `No migrations were ever executed. Creating initial migration record %s.`,\n startingId\n );\n await this.repository.logMigration({\n id: startingId,\n description: \"starting point for applicable migrations detection\",\n createdOn: new Date().toISOString(),\n duration: 0,\n reason: \"initial migration\"\n });\n } else {\n this.logger.info(`Latest migration ID is %s.`, latestMigration.id);\n }\n\n if (isApplicable) {\n this.logger.info(`Using custom \"isApplicable\" function.`);\n } else {\n this.logger.info(`Using migrations in the range of %s to %s.`, startingId, lastId);\n }\n\n const executed: ExecutedMigrationResponse[] = [];\n const skipped: SkippedMigrationResponse[] = [];\n const notApplicable: SkippedMigrationResponse[] = [];\n\n const defaultIsApplicable: IsMigrationApplicable = mig => {\n return mig.getId() > startingId && mig.getId() <= lastId;\n };\n\n const isMigrationApplicable = isApplicable || defaultIsApplicable;\n\n const executableMigrations = this.migrations\n .filter(mig => {\n if (!isMigrationApplicable(mig)) {\n notApplicable.push({\n id: mig.getId(),\n description: mig.getDescription(),\n reason: \"not applicable\"\n });\n\n return false;\n }\n return true;\n })\n .sort((a, b) => (a.getId() > b.getId() ? 1 : -1));\n\n this.logger.info(`Found %s applicable migration(s).`, executableMigrations.length);\n\n for (const migration of executableMigrations) {\n const logger = getChildLogger(this.logger, migration);\n const context: DataMigrationContext = { projectVersion, logger };\n\n const shouldExecute = await migration.shouldExecute(context);\n\n if (!shouldExecute) {\n this.logger.info(`Skipping migration %s.`, migration.getId());\n skipped.push({\n id: migration.getId(),\n description: migration.getDescription(),\n reason: \"migration already applied\"\n });\n\n await this.repository.logMigration({\n id: migration.getId(),\n description: migration.getDescription(),\n createdOn: new Date().toISOString(),\n duration: 0,\n reason: \"skipped\"\n });\n\n continue;\n }\n\n const result: MigrationResult = {\n duration: 0,\n logs: [],\n success: true\n };\n\n const start = Date.now();\n try {\n this.logger.info(\n `Executing migration %s: %s.`,\n migration.getId(),\n migration.getDescription()\n );\n await migration.execute(context);\n } catch (err) {\n result.success = false;\n this.logger.error(err, err.message);\n } finally {\n result.duration = Date.now() - start;\n this.logger.info(\n `Finished executing migration %s in %sms.`,\n migration.getId(),\n result.duration\n );\n }\n\n executed.push({\n id: migration.getId(),\n description: migration.getDescription(),\n result\n });\n\n if (result.success) {\n await this.repository.logMigration({\n id: migration.getId(),\n description: migration.getDescription(),\n createdOn: new Date().toISOString(),\n duration: result.duration,\n reason: \"executed\"\n });\n }\n }\n\n this.logger.info(`Finished processing applicable migrations.`);\n\n return { executed, skipped, notApplicable };\n }\n\n private validateIds(migrations: DataMigration[]) {\n const ids = new Set();\n for (const mig of migrations) {\n const id = mig.getId();\n if (id.endsWith(\"-000\")) {\n const error = new Error(`Migration ID must not end with \"000\": ${id}`);\n this.logger.error(error);\n throw error;\n }\n\n if (ids.has(id)) {\n const error = new Error(`Duplicate migration ID found: ${id}`);\n this.logger.error(error);\n throw error;\n }\n ids.add(id);\n }\n }\n}\n\nmakeInjectable(MigrationRunner, [\n inject(MigrationRepositorySymbol),\n inject(MigrationSymbol, { multi: true, optional: true }),\n inject(LoggerSymbol, { optional: true })\n]);\n"],"mappings":";;;;;;;;AACA;AACA;AACA;AACA;AAYO,MAAMA,eAAe,CAAC;EAKzBC,WAAW,CACPC,UAA+B,EAC/BC,UAA2B,EAC3BC,MAA0B,EAC5B;IAAA;IAAA;IAAA;IACE,IAAI,CAACF,UAAU,GAAGA,UAAU;IAC5B,IAAI,CAACC,UAAU,GAAGA,UAAU,IAAI,EAAE;IAElC,IAAI,CAACC,MAAM,EAAE;MACTA,MAAM,GAAG,IAAAC,kCAAgB,GAAE;IAC/B;IACA,IAAI,CAACD,MAAM,GAAGA,MAAM;EACxB;EAEA,MAAME,OAAO,CAACC,cAAsB,EAAEC,YAAoC,EAAE;IACxE,IAAI,CAACC,WAAW,CAAC,IAAI,CAACN,UAAU,CAAC;IACjC,MAAM,CAACO,eAAe,CAAC,GAAG,MAAM,IAAI,CAACR,UAAU,CAACS,cAAc,CAAC;MAAEC,KAAK,EAAE;IAAE,CAAC,CAAC;IAE5E,IAAI,CAACR,MAAM,CAACS,IAAI,CAAE,wBAAuB,EAAEN,cAAc,CAAC;;IAE1D;IACA;IACA,MAAMO,cAAc,GAAG,IAAAC,cAAM,EAACR,cAAc,CAAC,GAAG,EAAE;IAClD,MAAMS,UAAU,GAAGN,eAAe,GAAGA,eAAe,CAACO,EAAE,GAAI,GAAEH,cAAe,MAAK;IACjF,MAAMI,MAAM,GAAI,GAAEJ,cAAe,MAAK;;IAEtC;IACA,IAAI,CAACJ,eAAe,EAAE;MAClB,IAAI,CAACN,MAAM,CAACS,IAAI,CACX,yEAAwE,EACzEG,UAAU,CACb;MACD,MAAM,IAAI,CAACd,UAAU,CAACiB,YAAY,CAAC;QAC/BF,EAAE,EAAED,UAAU;QACdI,WAAW,EAAE,oDAAoD;QACjEC,SAAS,EAAE,IAAIC,IAAI,EAAE,CAACC,WAAW,EAAE;QACnCC,QAAQ,EAAE,CAAC;QACXC,MAAM,EAAE;MACZ,CAAC,CAAC;IACN,CAAC,MAAM;MACH,IAAI,CAACrB,MAAM,CAACS,IAAI,CAAE,4BAA2B,EAAEH,eAAe,CAACO,EAAE,CAAC;IACtE;IAEA,IAAIT,YAAY,EAAE;MACd,IAAI,CAACJ,MAAM,CAACS,IAAI,CAAE,uCAAsC,CAAC;IAC7D,CAAC,MAAM;MACH,IAAI,CAACT,MAAM,CAACS,IAAI,CAAE,4CAA2C,EAAEG,UAAU,EAAEE,MAAM,CAAC;IACtF;IAEA,MAAMQ,QAAqC,GAAG,EAAE;IAChD,MAAMC,OAAmC,GAAG,EAAE;IAC9C,MAAMC,aAAyC,GAAG,EAAE;IAEpD,MAAMC,mBAA0C,GAAGC,GAAG,IAAI;MACtD,OAAOA,GAAG,CAACC,KAAK,EAAE,GAAGf,UAAU,IAAIc,GAAG,CAACC,KAAK,EAAE,IAAIb,MAAM;IAC5D,CAAC;IAED,MAAMc,qBAAqB,GAAGxB,YAAY,IAAIqB,mBAAmB;IAEjE,MAAMI,oBAAoB,GAAG,IAAI,CAAC9B,UAAU,CACvC+B,MAAM,CAACJ,GAAG,IAAI;MACX,IAAI,CAACE,qBAAqB,CAACF,GAAG,CAAC,EAAE;QAC7BF,aAAa,CAACO,IAAI,CAAC;UACflB,EAAE,EAAEa,GAAG,CAACC,KAAK,EAAE;UACfX,WAAW,EAAEU,GAAG,CAACM,cAAc,EAAE;UACjCX,MAAM,EAAE;QACZ,CAAC,CAAC;QAEF,OAAO,KAAK;MAChB;MACA,OAAO,IAAI;IACf,CAAC,CAAC,CACDY,IAAI,CAAC,CAACC,CAAC,EAAEC,CAAC,KAAMD,CAAC,CAACP,KAAK,EAAE,GAAGQ,CAAC,CAACR,KAAK,EAAE,GAAG,CAAC,GAAG,CAAC,CAAE,CAAC;IAErD,IAAI,CAAC3B,MAAM,CAACS,IAAI,CAAE,mCAAkC,EAAEoB,oBAAoB,CAACO,MAAM,CAAC;IAElF,KAAK,MAAMC,SAAS,IAAIR,oBAAoB,EAAE;MAC1C,MAAM7B,MAAM,GAAG,IAAAsC,gCAAc,EAAC,IAAI,CAACtC,MAAM,EAAEqC,SAAS,CAAC;MACrD,MAAME,OAA6B,GAAG;QAAEpC,cAAc;QAAEH;MAAO,CAAC;MAEhE,MAAMwC,aAAa,GAAG,MAAMH,SAAS,CAACG,aAAa,CAACD,OAAO,CAAC;MAE5D,IAAI,CAACC,aAAa,EAAE;QAChB,IAAI,CAACxC,MAAM,CAACS,IAAI,CAAE,wBAAuB,EAAE4B,SAAS,CAACV,KAAK,EAAE,CAAC;QAC7DJ,OAAO,CAACQ,IAAI,CAAC;UACTlB,EAAE,EAAEwB,SAAS,CAACV,KAAK,EAAE;UACrBX,WAAW,EAAEqB,SAAS,CAACL,cAAc,EAAE;UACvCX,MAAM,EAAE;QACZ,CAAC,CAAC;QAEF,MAAM,IAAI,CAACvB,UAAU,CAACiB,YAAY,CAAC;UAC/BF,EAAE,EAAEwB,SAAS,CAACV,KAAK,EAAE;UACrBX,WAAW,EAAEqB,SAAS,CAACL,cAAc,EAAE;UACvCf,SAAS,EAAE,IAAIC,IAAI,EAAE,CAACC,WAAW,EAAE;UACnCC,QAAQ,EAAE,CAAC;UACXC,MAAM,EAAE;QACZ,CAAC,CAAC;QAEF;MACJ;MAEA,MAAMoB,MAAuB,GAAG;QAC5BrB,QAAQ,EAAE,CAAC;QACXsB,IAAI,EAAE,EAAE;QACRC,OAAO,EAAE;MACb,CAAC;MAED,MAAMC,KAAK,GAAG1B,IAAI,CAAC2B,GAAG,EAAE;MACxB,IAAI;QACA,IAAI,CAAC7C,MAAM,CAACS,IAAI,CACX,6BAA4B,EAC7B4B,SAAS,CAACV,KAAK,EAAE,EACjBU,SAAS,CAACL,cAAc,EAAE,CAC7B;QACD,MAAMK,SAAS,CAACnC,OAAO,CAACqC,OAAO,CAAC;MACpC,CAAC,CAAC,OAAOO,GAAG,EAAE;QACVL,MAAM,CAACE,OAAO,GAAG,KAAK;QACtB,IAAI,CAAC3C,MAAM,CAAC+C,KAAK,CAACD,GAAG,EAAEA,GAAG,CAACE,OAAO,CAAC;MACvC,CAAC,SAAS;QACNP,MAAM,CAACrB,QAAQ,GAAGF,IAAI,CAAC2B,GAAG,EAAE,GAAGD,KAAK;QACpC,IAAI,CAAC5C,MAAM,CAACS,IAAI,CACX,0CAAyC,EAC1C4B,SAAS,CAACV,KAAK,EAAE,EACjBc,MAAM,CAACrB,QAAQ,CAClB;MACL;MAEAE,QAAQ,CAACS,IAAI,CAAC;QACVlB,EAAE,EAAEwB,SAAS,CAACV,KAAK,EAAE;QACrBX,WAAW,EAAEqB,SAAS,CAACL,cAAc,EAAE;QACvCS;MACJ,CAAC,CAAC;MAEF,IAAIA,MAAM,CAACE,OAAO,EAAE;QAChB,MAAM,IAAI,CAAC7C,UAAU,CAACiB,YAAY,CAAC;UAC/BF,EAAE,EAAEwB,SAAS,CAACV,KAAK,EAAE;UACrBX,WAAW,EAAEqB,SAAS,CAACL,cAAc,EAAE;UACvCf,SAAS,EAAE,IAAIC,IAAI,EAAE,CAACC,WAAW,EAAE;UACnCC,QAAQ,EAAEqB,MAAM,CAACrB,QAAQ;UACzBC,MAAM,EAAE;QACZ,CAAC,CAAC;MACN;IACJ;IAEA,IAAI,CAACrB,MAAM,CAACS,IAAI,CAAE,4CAA2C,CAAC;IAE9D,OAAO;MAAEa,QAAQ;MAAEC,OAAO;MAAEC;IAAc,CAAC;EAC/C;EAEQnB,WAAW,CAACN,UAA2B,EAAE;IAC7C,MAAMkD,GAAG,GAAG,IAAIC,GAAG,EAAE;IACrB,KAAK,MAAMxB,GAAG,IAAI3B,UAAU,EAAE;MAC1B,MAAMc,EAAE,GAAGa,GAAG,CAACC,KAAK,EAAE;MACtB,IAAId,EAAE,CAACsC,QAAQ,CAAC,MAAM,CAAC,EAAE;QACrB,MAAMJ,KAAK,GAAG,IAAIK,KAAK,CAAE,yCAAwCvC,EAAG,EAAC,CAAC;QACtE,IAAI,CAACb,MAAM,CAAC+C,KAAK,CAACA,KAAK,CAAC;QACxB,MAAMA,KAAK;MACf;MAEA,IAAIE,GAAG,CAACI,GAAG,CAACxC,EAAE,CAAC,EAAE;QACb,MAAMkC,KAAK,GAAG,IAAIK,KAAK,CAAE,iCAAgCvC,EAAG,EAAC,CAAC;QAC9D,IAAI,CAACb,MAAM,CAAC+C,KAAK,CAACA,KAAK,CAAC;QACxB,MAAMA,KAAK;MACf;MACAE,GAAG,CAACK,GAAG,CAACzC,EAAE,CAAC;IACf;EACJ;AACJ;AAAC;AAED,IAAA0C,mBAAc,EAAC3D,eAAe,EAAE,CAC5B,IAAA4D,WAAM,EAACC,kCAAyB,CAAC,EACjC,IAAAD,WAAM,EAACE,wBAAe,EAAE;EAAEC,KAAK,EAAE,IAAI;EAAEC,QAAQ,EAAE;AAAK,CAAC,CAAC,EACxD,IAAAJ,WAAM,EAACK,qBAAY,EAAE;EAAED,QAAQ,EAAE;AAAK,CAAC,CAAC,CAC3C,CAAC"}
|
package/README.md
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
# @webiny/data-migration
|
|
2
|
+
|
|
3
|
+
[](https://www.npmjs.com/package/@webiny/data-migration)
|
|
4
|
+
[](https://www.npmjs.com/package/@webiny/data-migration)
|
|
5
|
+
[](https://github.com/prettier/prettier)
|
|
6
|
+
[](http://makeapullrequest.com)
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { pino, Logger } from "pino";
|
|
2
|
+
import pinoPretty from "pino-pretty";
|
|
3
|
+
import { DataMigration } from "./types";
|
|
4
|
+
export declare const createPinoLogger: () => Logger<pinoPretty.PrettyStream>;
|
|
5
|
+
export declare const getChildLogger: (logger: Logger, migration: DataMigration) => pino.Logger<import("pino").LoggerOptions & {
|
|
6
|
+
msgPrefix: string;
|
|
7
|
+
}>;
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.getChildLogger = exports.createPinoLogger = void 0;
|
|
8
|
+
var _chalk = _interopRequireDefault(require("chalk"));
|
|
9
|
+
var _pino = require("pino");
|
|
10
|
+
var _pinoPretty = _interopRequireDefault(require("pino-pretty"));
|
|
11
|
+
const createPinoLogger = () => {
|
|
12
|
+
return (0, _pino.pino)((0, _pinoPretty.default)({
|
|
13
|
+
ignore: "pid,hostname"
|
|
14
|
+
}));
|
|
15
|
+
};
|
|
16
|
+
exports.createPinoLogger = createPinoLogger;
|
|
17
|
+
const getChildLogger = (logger, migration) => {
|
|
18
|
+
return logger.child({}, {
|
|
19
|
+
msgPrefix: _chalk.default.blueBright(`[${migration.getId()}]`) + " "
|
|
20
|
+
});
|
|
21
|
+
};
|
|
22
|
+
exports.getChildLogger = getChildLogger;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["createPinoLogger","pino","pinoPretty","ignore","getChildLogger","logger","migration","child","msgPrefix","chalk","blueBright","getId"],"sources":["createPinoLogger.ts"],"sourcesContent":["import chalk from \"chalk\";\nimport { pino, Logger } from \"pino\";\nimport pinoPretty from \"pino-pretty\";\nimport { DataMigration } from \"~/types\";\n\nexport const createPinoLogger = () => {\n return pino(\n pinoPretty({\n ignore: \"pid,hostname\"\n })\n );\n};\n\nexport const getChildLogger = (logger: Logger, migration: DataMigration) => {\n return logger.child({}, { msgPrefix: chalk.blueBright(`[${migration.getId()}]`) + \" \" });\n};\n"],"mappings":";;;;;;;AAAA;AACA;AACA;AAGO,MAAMA,gBAAgB,GAAG,MAAM;EAClC,OAAO,IAAAC,UAAI,EACP,IAAAC,mBAAU,EAAC;IACPC,MAAM,EAAE;EACZ,CAAC,CAAC,CACL;AACL,CAAC;AAAC;AAEK,MAAMC,cAAc,GAAG,CAACC,MAAc,EAAEC,SAAwB,KAAK;EACxE,OAAOD,MAAM,CAACE,KAAK,CAAC,CAAC,CAAC,EAAE;IAAEC,SAAS,EAAEC,cAAK,CAACC,UAAU,CAAE,IAAGJ,SAAS,CAACK,KAAK,EAAG,GAAE,CAAC,GAAG;EAAI,CAAC,CAAC;AAC5F,CAAC;AAAC"}
|
package/createTable.d.ts
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { DocumentClient } from "aws-sdk/clients/dynamodb";
|
|
2
|
+
import { Table } from "dynamodb-toolbox";
|
|
3
|
+
export interface CreateTableParams {
|
|
4
|
+
name: string;
|
|
5
|
+
documentClient: DocumentClient;
|
|
6
|
+
}
|
|
7
|
+
export declare const createTable: ({ name, documentClient }: CreateTableParams) => Table;
|
package/createTable.js
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.createTable = void 0;
|
|
7
|
+
var _dynamodbToolbox = require("dynamodb-toolbox");
|
|
8
|
+
const createTable = ({
|
|
9
|
+
name,
|
|
10
|
+
documentClient
|
|
11
|
+
}) => {
|
|
12
|
+
return new _dynamodbToolbox.Table({
|
|
13
|
+
name,
|
|
14
|
+
partitionKey: "PK",
|
|
15
|
+
sortKey: "SK",
|
|
16
|
+
DocumentClient: documentClient,
|
|
17
|
+
indexes: {
|
|
18
|
+
GSI1: {
|
|
19
|
+
partitionKey: "GSI1_PK",
|
|
20
|
+
sortKey: "GSI1_SK"
|
|
21
|
+
}
|
|
22
|
+
// GSI2: {
|
|
23
|
+
// partitionKey: "GSI2_PK",
|
|
24
|
+
// sortKey: "GSI2_SK"
|
|
25
|
+
// }
|
|
26
|
+
}
|
|
27
|
+
});
|
|
28
|
+
};
|
|
29
|
+
exports.createTable = createTable;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["createTable","name","documentClient","Table","partitionKey","sortKey","DocumentClient","indexes","GSI1"],"sources":["createTable.ts"],"sourcesContent":["import { DocumentClient } from \"aws-sdk/clients/dynamodb\";\nimport { Table } from \"dynamodb-toolbox\";\n\nexport interface CreateTableParams {\n name: string;\n documentClient: DocumentClient;\n}\n\nexport const createTable = ({ name, documentClient }: CreateTableParams) => {\n return new Table({\n name,\n partitionKey: \"PK\",\n sortKey: \"SK\",\n DocumentClient: documentClient,\n indexes: {\n GSI1: {\n partitionKey: \"GSI1_PK\",\n sortKey: \"GSI1_SK\"\n }\n // GSI2: {\n // partitionKey: \"GSI2_PK\",\n // sortKey: \"GSI2_SK\"\n // }\n }\n });\n};\n"],"mappings":";;;;;;AACA;AAOO,MAAMA,WAAW,GAAG,CAAC;EAAEC,IAAI;EAAEC;AAAkC,CAAC,KAAK;EACxE,OAAO,IAAIC,sBAAK,CAAC;IACbF,IAAI;IACJG,YAAY,EAAE,IAAI;IAClBC,OAAO,EAAE,IAAI;IACbC,cAAc,EAAEJ,cAAc;IAC9BK,OAAO,EAAE;MACLC,IAAI,EAAE;QACFJ,YAAY,EAAE,SAAS;QACvBC,OAAO,EAAE;MACb;MACA;MACA;MACA;MACA;IACJ;EACJ,CAAC,CAAC;AACN,CAAC;AAAC"}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { Client as ElasticsearchClient } from "@elastic/elasticsearch";
|
|
2
|
+
import { Table } from "dynamodb-toolbox";
|
|
3
|
+
import { Constructor } from "@webiny/ioc";
|
|
4
|
+
import { DataMigration, MigrationEventHandlerResponse, MigrationEventPayload, MigrationRepository } from "../types";
|
|
5
|
+
import { IsMigrationApplicable } from "../MigrationRunner";
|
|
6
|
+
interface CreateDdbEsDataMigrationConfig {
|
|
7
|
+
elasticsearchClient: ElasticsearchClient;
|
|
8
|
+
primaryTable: Table;
|
|
9
|
+
dynamoToEsTable: Table;
|
|
10
|
+
migrations: Constructor<DataMigration>[];
|
|
11
|
+
isMigrationApplicable?: IsMigrationApplicable;
|
|
12
|
+
repository?: MigrationRepository;
|
|
13
|
+
}
|
|
14
|
+
export declare const createDdbEsProjectMigration: ({ migrations, elasticsearchClient, primaryTable, dynamoToEsTable, isMigrationApplicable, repository }: CreateDdbEsDataMigrationConfig) => import("@webiny/handler-aws").RawEventHandler<MigrationEventPayload, any, MigrationEventHandlerResponse>;
|
|
15
|
+
export {};
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.createDdbEsProjectMigration = void 0;
|
|
7
|
+
var _handlerAws = require("@webiny/handler-aws");
|
|
8
|
+
var _ioc = require("@webiny/ioc");
|
|
9
|
+
var _symbols = require("../symbols");
|
|
10
|
+
var _MigrationRunner = require("../MigrationRunner");
|
|
11
|
+
var _migrations = require("../repository/migrations.repository");
|
|
12
|
+
var _devVersionErrorResponse = require("./devVersionErrorResponse");
|
|
13
|
+
var _createPatternMatcher = require("./createPatternMatcher");
|
|
14
|
+
const createDdbEsProjectMigration = ({
|
|
15
|
+
migrations,
|
|
16
|
+
elasticsearchClient,
|
|
17
|
+
primaryTable,
|
|
18
|
+
dynamoToEsTable,
|
|
19
|
+
isMigrationApplicable = undefined,
|
|
20
|
+
repository = undefined
|
|
21
|
+
}) => {
|
|
22
|
+
return (0, _handlerAws.createRawEventHandler)(async ({
|
|
23
|
+
payload
|
|
24
|
+
}) => {
|
|
25
|
+
const projectVersion = String((payload === null || payload === void 0 ? void 0 : payload.version) || process.env.WEBINY_VERSION);
|
|
26
|
+
if (projectVersion === "0.0.0") {
|
|
27
|
+
return (0, _devVersionErrorResponse.devVersionErrorResponse)();
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// COMPOSITION ROOT
|
|
31
|
+
const container = (0, _ioc.createContainer)();
|
|
32
|
+
container.bind(_symbols.PrimaryDynamoTableSymbol).toConstantValue(primaryTable);
|
|
33
|
+
container.bind(_symbols.ElasticsearchDynamoTableSymbol).toConstantValue(dynamoToEsTable);
|
|
34
|
+
container.bind(_symbols.ElasticsearchClientSymbol).toConstantValue(elasticsearchClient);
|
|
35
|
+
if (repository) {
|
|
36
|
+
// Repository implementation provided by the user.
|
|
37
|
+
container.bind(_symbols.MigrationRepositorySymbol).toConstantValue(repository);
|
|
38
|
+
} else {
|
|
39
|
+
// Default repository implementation.
|
|
40
|
+
container.bind(_symbols.MigrationRepositorySymbol).to(_migrations.MigrationRepositoryImpl);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Bind the provided migrations.
|
|
44
|
+
migrations.forEach(migration => container.bind(_symbols.MigrationSymbol).to(migration));
|
|
45
|
+
|
|
46
|
+
// If handler was invoked with a `pattern`, filter migrations that match the pattern only.
|
|
47
|
+
let patternMatcher;
|
|
48
|
+
if (payload.pattern) {
|
|
49
|
+
patternMatcher = (0, _createPatternMatcher.createPatternMatcher)(payload.pattern);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Inject dependencies and execute.
|
|
53
|
+
try {
|
|
54
|
+
const data = await container.resolve(_MigrationRunner.MigrationRunner).execute(projectVersion, patternMatcher || isMigrationApplicable);
|
|
55
|
+
return {
|
|
56
|
+
data
|
|
57
|
+
};
|
|
58
|
+
} catch (err) {
|
|
59
|
+
return {
|
|
60
|
+
error: {
|
|
61
|
+
message: err.message
|
|
62
|
+
}
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
});
|
|
66
|
+
};
|
|
67
|
+
exports.createDdbEsProjectMigration = createDdbEsProjectMigration;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["createDdbEsProjectMigration","migrations","elasticsearchClient","primaryTable","dynamoToEsTable","isMigrationApplicable","undefined","repository","createRawEventHandler","payload","projectVersion","String","version","process","env","WEBINY_VERSION","devVersionErrorResponse","container","createContainer","bind","PrimaryDynamoTableSymbol","toConstantValue","ElasticsearchDynamoTableSymbol","ElasticsearchClientSymbol","MigrationRepositorySymbol","to","MigrationRepositoryImpl","forEach","migration","MigrationSymbol","patternMatcher","pattern","createPatternMatcher","data","resolve","MigrationRunner","execute","err","error","message"],"sources":["createDdbEsProjectMigration.ts"],"sourcesContent":["import { Client as ElasticsearchClient } from \"@elastic/elasticsearch\";\nimport { Table } from \"dynamodb-toolbox\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\nimport { createContainer, Constructor } from \"@webiny/ioc\";\nimport {\n DataMigration,\n MigrationEventHandlerResponse,\n MigrationEventPayload,\n MigrationRepository\n} from \"~/types\";\nimport {\n ElasticsearchClientSymbol,\n MigrationRepositorySymbol,\n PrimaryDynamoTableSymbol,\n ElasticsearchDynamoTableSymbol,\n MigrationSymbol\n} from \"~/symbols\";\nimport { IsMigrationApplicable, MigrationRunner } from \"~/MigrationRunner\";\nimport { MigrationRepositoryImpl } from \"~/repository/migrations.repository\";\nimport { devVersionErrorResponse } from \"~/handlers/devVersionErrorResponse\";\nimport { createPatternMatcher } from \"~/handlers/createPatternMatcher\";\n\ninterface CreateDdbEsDataMigrationConfig {\n elasticsearchClient: ElasticsearchClient;\n primaryTable: Table;\n dynamoToEsTable: Table;\n migrations: Constructor<DataMigration>[];\n isMigrationApplicable?: IsMigrationApplicable;\n repository?: MigrationRepository;\n}\n\nexport const createDdbEsProjectMigration = ({\n migrations,\n elasticsearchClient,\n primaryTable,\n dynamoToEsTable,\n isMigrationApplicable = undefined,\n repository = undefined\n}: CreateDdbEsDataMigrationConfig) => {\n return createRawEventHandler<MigrationEventPayload, any, MigrationEventHandlerResponse>(\n async ({ payload }) => {\n const projectVersion = String(payload?.version || process.env.WEBINY_VERSION);\n\n if (projectVersion === \"0.0.0\") {\n return devVersionErrorResponse();\n }\n\n // COMPOSITION ROOT\n const container = createContainer();\n container.bind(PrimaryDynamoTableSymbol).toConstantValue(primaryTable);\n container.bind(ElasticsearchDynamoTableSymbol).toConstantValue(dynamoToEsTable);\n container.bind(ElasticsearchClientSymbol).toConstantValue(elasticsearchClient);\n\n if (repository) {\n // Repository implementation provided by the user.\n container.bind(MigrationRepositorySymbol).toConstantValue(repository);\n } else {\n // Default repository implementation.\n container.bind(MigrationRepositorySymbol).to(MigrationRepositoryImpl);\n }\n\n // Bind the provided migrations.\n migrations.forEach(migration => container.bind(MigrationSymbol).to(migration));\n\n // If handler was invoked with a `pattern`, filter migrations that match the pattern only.\n let patternMatcher;\n if (payload.pattern) {\n patternMatcher = createPatternMatcher(payload.pattern);\n }\n\n // Inject dependencies and execute.\n try {\n const data = await container\n .resolve(MigrationRunner)\n .execute(projectVersion, patternMatcher || isMigrationApplicable);\n\n return { data };\n } catch (err) {\n return { error: { message: err.message } };\n }\n }\n );\n};\n"],"mappings":";;;;;;AAEA;AACA;AAOA;AAOA;AACA;AACA;AACA;AAWO,MAAMA,2BAA2B,GAAG,CAAC;EACxCC,UAAU;EACVC,mBAAmB;EACnBC,YAAY;EACZC,eAAe;EACfC,qBAAqB,GAAGC,SAAS;EACjCC,UAAU,GAAGD;AACe,CAAC,KAAK;EAClC,OAAO,IAAAE,iCAAqB,EACxB,OAAO;IAAEC;EAAQ,CAAC,KAAK;IACnB,MAAMC,cAAc,GAAGC,MAAM,CAAC,CAAAF,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEG,OAAO,KAAIC,OAAO,CAACC,GAAG,CAACC,cAAc,CAAC;IAE7E,IAAIL,cAAc,KAAK,OAAO,EAAE;MAC5B,OAAO,IAAAM,gDAAuB,GAAE;IACpC;;IAEA;IACA,MAAMC,SAAS,GAAG,IAAAC,oBAAe,GAAE;IACnCD,SAAS,CAACE,IAAI,CAACC,iCAAwB,CAAC,CAACC,eAAe,CAAClB,YAAY,CAAC;IACtEc,SAAS,CAACE,IAAI,CAACG,uCAA8B,CAAC,CAACD,eAAe,CAACjB,eAAe,CAAC;IAC/Ea,SAAS,CAACE,IAAI,CAACI,kCAAyB,CAAC,CAACF,eAAe,CAACnB,mBAAmB,CAAC;IAE9E,IAAIK,UAAU,EAAE;MACZ;MACAU,SAAS,CAACE,IAAI,CAACK,kCAAyB,CAAC,CAACH,eAAe,CAACd,UAAU,CAAC;IACzE,CAAC,MAAM;MACH;MACAU,SAAS,CAACE,IAAI,CAACK,kCAAyB,CAAC,CAACC,EAAE,CAACC,mCAAuB,CAAC;IACzE;;IAEA;IACAzB,UAAU,CAAC0B,OAAO,CAACC,SAAS,IAAIX,SAAS,CAACE,IAAI,CAACU,wBAAe,CAAC,CAACJ,EAAE,CAACG,SAAS,CAAC,CAAC;;IAE9E;IACA,IAAIE,cAAc;IAClB,IAAIrB,OAAO,CAACsB,OAAO,EAAE;MACjBD,cAAc,GAAG,IAAAE,0CAAoB,EAACvB,OAAO,CAACsB,OAAO,CAAC;IAC1D;;IAEA;IACA,IAAI;MACA,MAAME,IAAI,GAAG,MAAMhB,SAAS,CACvBiB,OAAO,CAACC,gCAAe,CAAC,CACxBC,OAAO,CAAC1B,cAAc,EAAEoB,cAAc,IAAIzB,qBAAqB,CAAC;MAErE,OAAO;QAAE4B;MAAK,CAAC;IACnB,CAAC,CAAC,OAAOI,GAAG,EAAE;MACV,OAAO;QAAEC,KAAK,EAAE;UAAEC,OAAO,EAAEF,GAAG,CAACE;QAAQ;MAAE,CAAC;IAC9C;EACJ,CAAC,CACJ;AACL,CAAC;AAAC"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { Table } from "dynamodb-toolbox";
|
|
2
|
+
import { Constructor } from "@webiny/ioc";
|
|
3
|
+
import { IsMigrationApplicable } from "../MigrationRunner";
|
|
4
|
+
import { DataMigration, MigrationEventHandlerResponse, MigrationEventPayload, MigrationRepository } from "../types";
|
|
5
|
+
interface CreateDdbDataMigrationConfig {
|
|
6
|
+
migrations: Constructor<DataMigration>[];
|
|
7
|
+
primaryTable: Table;
|
|
8
|
+
repository?: MigrationRepository;
|
|
9
|
+
isMigrationApplicable?: IsMigrationApplicable;
|
|
10
|
+
}
|
|
11
|
+
export declare const createDdbProjectMigration: ({ migrations, primaryTable, isMigrationApplicable, repository }: CreateDdbDataMigrationConfig) => import("@webiny/handler-aws").RawEventHandler<MigrationEventPayload, any, MigrationEventHandlerResponse>;
|
|
12
|
+
export {};
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.createDdbProjectMigration = void 0;
|
|
7
|
+
var _handlerAws = require("@webiny/handler-aws");
|
|
8
|
+
var _ioc = require("@webiny/ioc");
|
|
9
|
+
var _MigrationRunner = require("../MigrationRunner");
|
|
10
|
+
var _symbols = require("../symbols");
|
|
11
|
+
var _migrations = require("../repository/migrations.repository");
|
|
12
|
+
var _devVersionErrorResponse = require("./devVersionErrorResponse");
|
|
13
|
+
var _createPatternMatcher = require("./createPatternMatcher");
|
|
14
|
+
const createDdbProjectMigration = ({
|
|
15
|
+
migrations,
|
|
16
|
+
primaryTable,
|
|
17
|
+
isMigrationApplicable = undefined,
|
|
18
|
+
repository = undefined
|
|
19
|
+
}) => {
|
|
20
|
+
return (0, _handlerAws.createRawEventHandler)(async ({
|
|
21
|
+
payload
|
|
22
|
+
}) => {
|
|
23
|
+
const projectVersion = String((payload === null || payload === void 0 ? void 0 : payload.version) || process.env.WEBINY_VERSION);
|
|
24
|
+
if (projectVersion === "0.0.0") {
|
|
25
|
+
return (0, _devVersionErrorResponse.devVersionErrorResponse)();
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// COMPOSITION ROOT
|
|
29
|
+
const container = (0, _ioc.createContainer)();
|
|
30
|
+
container.bind(_symbols.PrimaryDynamoTableSymbol).toConstantValue(primaryTable);
|
|
31
|
+
if (repository) {
|
|
32
|
+
// Repository implementation provided by the user.
|
|
33
|
+
container.bind(_symbols.MigrationRepositorySymbol).toConstantValue(repository);
|
|
34
|
+
} else {
|
|
35
|
+
// Default repository implementation.
|
|
36
|
+
container.bind(_symbols.MigrationRepositorySymbol).to(_migrations.MigrationRepositoryImpl);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// Bind the provided migrations.
|
|
40
|
+
migrations.forEach(migration => container.bind(_symbols.MigrationSymbol).to(migration));
|
|
41
|
+
|
|
42
|
+
// If handler was invoked with a `pattern`, filter migrations that match the pattern only.
|
|
43
|
+
let patternMatcher;
|
|
44
|
+
if (payload.pattern) {
|
|
45
|
+
patternMatcher = (0, _createPatternMatcher.createPatternMatcher)(payload.pattern);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Inject dependencies and execute.
|
|
49
|
+
try {
|
|
50
|
+
const data = await container.resolve(_MigrationRunner.MigrationRunner).execute(projectVersion, patternMatcher || isMigrationApplicable);
|
|
51
|
+
return {
|
|
52
|
+
data
|
|
53
|
+
};
|
|
54
|
+
} catch (err) {
|
|
55
|
+
return {
|
|
56
|
+
error: {
|
|
57
|
+
message: err.message
|
|
58
|
+
}
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
});
|
|
62
|
+
};
|
|
63
|
+
exports.createDdbProjectMigration = createDdbProjectMigration;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["createDdbProjectMigration","migrations","primaryTable","isMigrationApplicable","undefined","repository","createRawEventHandler","payload","projectVersion","String","version","process","env","WEBINY_VERSION","devVersionErrorResponse","container","createContainer","bind","PrimaryDynamoTableSymbol","toConstantValue","MigrationRepositorySymbol","to","MigrationRepositoryImpl","forEach","migration","MigrationSymbol","patternMatcher","pattern","createPatternMatcher","data","resolve","MigrationRunner","execute","err","error","message"],"sources":["createDdbProjectMigration.ts"],"sourcesContent":["import { Table } from \"dynamodb-toolbox\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\nimport { Constructor, createContainer } from \"@webiny/ioc\";\nimport { IsMigrationApplicable, MigrationRunner } from \"~/MigrationRunner\";\nimport { MigrationRepositorySymbol, MigrationSymbol, PrimaryDynamoTableSymbol } from \"~/symbols\";\nimport { MigrationRepositoryImpl } from \"~/repository/migrations.repository\";\nimport { devVersionErrorResponse } from \"./devVersionErrorResponse\";\nimport { createPatternMatcher } from \"./createPatternMatcher\";\nimport {\n DataMigration,\n MigrationEventHandlerResponse,\n MigrationEventPayload,\n MigrationRepository\n} from \"~/types\";\n\ninterface CreateDdbDataMigrationConfig {\n migrations: Constructor<DataMigration>[];\n primaryTable: Table;\n repository?: MigrationRepository;\n isMigrationApplicable?: IsMigrationApplicable;\n}\n\nexport const createDdbProjectMigration = ({\n migrations,\n primaryTable,\n isMigrationApplicable = undefined,\n repository = undefined\n}: CreateDdbDataMigrationConfig) => {\n return createRawEventHandler<MigrationEventPayload, any, MigrationEventHandlerResponse>(\n async ({ payload }) => {\n const projectVersion = String(payload?.version || process.env.WEBINY_VERSION);\n\n if (projectVersion === \"0.0.0\") {\n return devVersionErrorResponse();\n }\n\n // COMPOSITION ROOT\n const container = createContainer();\n container.bind(PrimaryDynamoTableSymbol).toConstantValue(primaryTable);\n\n if (repository) {\n // Repository implementation provided by the user.\n container.bind(MigrationRepositorySymbol).toConstantValue(repository);\n } else {\n // Default repository implementation.\n container.bind(MigrationRepositorySymbol).to(MigrationRepositoryImpl);\n }\n\n // Bind the provided migrations.\n migrations.forEach(migration => container.bind(MigrationSymbol).to(migration));\n\n // If handler was invoked with a `pattern`, filter migrations that match the pattern only.\n let patternMatcher;\n if (payload.pattern) {\n patternMatcher = createPatternMatcher(payload.pattern);\n }\n\n // Inject dependencies and execute.\n try {\n const data = await container\n .resolve(MigrationRunner)\n .execute(projectVersion, patternMatcher || isMigrationApplicable);\n\n return { data };\n } catch (err) {\n return { error: { message: err.message } };\n }\n }\n );\n};\n"],"mappings":";;;;;;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAeO,MAAMA,yBAAyB,GAAG,CAAC;EACtCC,UAAU;EACVC,YAAY;EACZC,qBAAqB,GAAGC,SAAS;EACjCC,UAAU,GAAGD;AACa,CAAC,KAAK;EAChC,OAAO,IAAAE,iCAAqB,EACxB,OAAO;IAAEC;EAAQ,CAAC,KAAK;IACnB,MAAMC,cAAc,GAAGC,MAAM,CAAC,CAAAF,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEG,OAAO,KAAIC,OAAO,CAACC,GAAG,CAACC,cAAc,CAAC;IAE7E,IAAIL,cAAc,KAAK,OAAO,EAAE;MAC5B,OAAO,IAAAM,gDAAuB,GAAE;IACpC;;IAEA;IACA,MAAMC,SAAS,GAAG,IAAAC,oBAAe,GAAE;IACnCD,SAAS,CAACE,IAAI,CAACC,iCAAwB,CAAC,CAACC,eAAe,CAACjB,YAAY,CAAC;IAEtE,IAAIG,UAAU,EAAE;MACZ;MACAU,SAAS,CAACE,IAAI,CAACG,kCAAyB,CAAC,CAACD,eAAe,CAACd,UAAU,CAAC;IACzE,CAAC,MAAM;MACH;MACAU,SAAS,CAACE,IAAI,CAACG,kCAAyB,CAAC,CAACC,EAAE,CAACC,mCAAuB,CAAC;IACzE;;IAEA;IACArB,UAAU,CAACsB,OAAO,CAACC,SAAS,IAAIT,SAAS,CAACE,IAAI,CAACQ,wBAAe,CAAC,CAACJ,EAAE,CAACG,SAAS,CAAC,CAAC;;IAE9E;IACA,IAAIE,cAAc;IAClB,IAAInB,OAAO,CAACoB,OAAO,EAAE;MACjBD,cAAc,GAAG,IAAAE,0CAAoB,EAACrB,OAAO,CAACoB,OAAO,CAAC;IAC1D;;IAEA;IACA,IAAI;MACA,MAAME,IAAI,GAAG,MAAMd,SAAS,CACvBe,OAAO,CAACC,gCAAe,CAAC,CACxBC,OAAO,CAACxB,cAAc,EAAEkB,cAAc,IAAIvB,qBAAqB,CAAC;MAErE,OAAO;QAAE0B;MAAK,CAAC;IACnB,CAAC,CAAC,OAAOI,GAAG,EAAE;MACV,OAAO;QAAEC,KAAK,EAAE;UAAEC,OAAO,EAAEF,GAAG,CAACE;QAAQ;MAAE,CAAC;IAC9C;EACJ,CAAC,CACJ;AACL,CAAC;AAAC"}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.createPatternMatcher = void 0;
|
|
8
|
+
var _minimatch = _interopRequireDefault(require("minimatch"));
|
|
9
|
+
const createPatternMatcher = pattern => {
|
|
10
|
+
return migration => {
|
|
11
|
+
if (pattern.includes("*")) {
|
|
12
|
+
return (0, _minimatch.default)(migration.getId(), pattern);
|
|
13
|
+
}
|
|
14
|
+
return migration.getId() === pattern;
|
|
15
|
+
};
|
|
16
|
+
};
|
|
17
|
+
exports.createPatternMatcher = createPatternMatcher;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["createPatternMatcher","pattern","migration","includes","minimatch","getId"],"sources":["createPatternMatcher.ts"],"sourcesContent":["import minimatch from \"minimatch\";\nimport { IsMigrationApplicable } from \"~/MigrationRunner\";\n\nexport const createPatternMatcher = (pattern: string): IsMigrationApplicable => {\n return migration => {\n if (pattern.includes(\"*\")) {\n return minimatch(migration.getId(), pattern);\n }\n return migration.getId() === pattern;\n };\n};\n"],"mappings":";;;;;;;AAAA;AAGO,MAAMA,oBAAoB,GAAIC,OAAe,IAA4B;EAC5E,OAAOC,SAAS,IAAI;IAChB,IAAID,OAAO,CAACE,QAAQ,CAAC,GAAG,CAAC,EAAE;MACvB,OAAO,IAAAC,kBAAS,EAACF,SAAS,CAACG,KAAK,EAAE,EAAEJ,OAAO,CAAC;IAChD;IACA,OAAOC,SAAS,CAACG,KAAK,EAAE,KAAKJ,OAAO;EACxC,CAAC;AACL,CAAC;AAAC"}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.devVersionErrorResponse = void 0;
|
|
7
|
+
const devVersionErrorResponse = () => {
|
|
8
|
+
return {
|
|
9
|
+
error: {
|
|
10
|
+
message: [`This project is using a development version 0.0.0!`, `Migrations cannot be executed using version 0.0.0, as that makes them all eligible for execution.`, `To trigger a particular set of migrations, set a WEBINY_VERSION variable in the .env file.`].join(" ")
|
|
11
|
+
}
|
|
12
|
+
};
|
|
13
|
+
};
|
|
14
|
+
exports.devVersionErrorResponse = devVersionErrorResponse;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["devVersionErrorResponse","error","message","join"],"sources":["devVersionErrorResponse.ts"],"sourcesContent":["export const devVersionErrorResponse = () => {\n return {\n error: {\n message: [\n `This project is using a development version 0.0.0!`,\n `Migrations cannot be executed using version 0.0.0, as that makes them all eligible for execution.`,\n `To trigger a particular set of migrations, set a WEBINY_VERSION variable in the .env file.`\n ].join(\" \")\n }\n };\n};\n"],"mappings":";;;;;;AAAO,MAAMA,uBAAuB,GAAG,MAAM;EACzC,OAAO;IACHC,KAAK,EAAE;MACHC,OAAO,EAAE,CACJ,oDAAmD,EACnD,mGAAkG,EAClG,4FAA2F,CAC/F,CAACC,IAAI,CAAC,GAAG;IACd;EACJ,CAAC;AACL,CAAC;AAAC"}
|
package/index.d.ts
ADDED
package/index.js
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
var _createDdbProjectMigration = require("./handlers/createDdbProjectMigration");
|
|
7
|
+
Object.keys(_createDdbProjectMigration).forEach(function (key) {
|
|
8
|
+
if (key === "default" || key === "__esModule") return;
|
|
9
|
+
if (key in exports && exports[key] === _createDdbProjectMigration[key]) return;
|
|
10
|
+
Object.defineProperty(exports, key, {
|
|
11
|
+
enumerable: true,
|
|
12
|
+
get: function () {
|
|
13
|
+
return _createDdbProjectMigration[key];
|
|
14
|
+
}
|
|
15
|
+
});
|
|
16
|
+
});
|
|
17
|
+
var _createDdbEsProjectMigration = require("./handlers/createDdbEsProjectMigration");
|
|
18
|
+
Object.keys(_createDdbEsProjectMigration).forEach(function (key) {
|
|
19
|
+
if (key === "default" || key === "__esModule") return;
|
|
20
|
+
if (key in exports && exports[key] === _createDdbEsProjectMigration[key]) return;
|
|
21
|
+
Object.defineProperty(exports, key, {
|
|
22
|
+
enumerable: true,
|
|
23
|
+
get: function () {
|
|
24
|
+
return _createDdbEsProjectMigration[key];
|
|
25
|
+
}
|
|
26
|
+
});
|
|
27
|
+
});
|
|
28
|
+
var _symbols = require("./symbols");
|
|
29
|
+
Object.keys(_symbols).forEach(function (key) {
|
|
30
|
+
if (key === "default" || key === "__esModule") return;
|
|
31
|
+
if (key in exports && exports[key] === _symbols[key]) return;
|
|
32
|
+
Object.defineProperty(exports, key, {
|
|
33
|
+
enumerable: true,
|
|
34
|
+
get: function () {
|
|
35
|
+
return _symbols[key];
|
|
36
|
+
}
|
|
37
|
+
});
|
|
38
|
+
});
|
|
39
|
+
var _types = require("./types");
|
|
40
|
+
Object.keys(_types).forEach(function (key) {
|
|
41
|
+
if (key === "default" || key === "__esModule") return;
|
|
42
|
+
if (key in exports && exports[key] === _types[key]) return;
|
|
43
|
+
Object.defineProperty(exports, key, {
|
|
44
|
+
enumerable: true,
|
|
45
|
+
get: function () {
|
|
46
|
+
return _types[key];
|
|
47
|
+
}
|
|
48
|
+
});
|
|
49
|
+
});
|
|
50
|
+
var _createTable = require("./createTable");
|
|
51
|
+
Object.keys(_createTable).forEach(function (key) {
|
|
52
|
+
if (key === "default" || key === "__esModule") return;
|
|
53
|
+
if (key in exports && exports[key] === _createTable[key]) return;
|
|
54
|
+
Object.defineProperty(exports, key, {
|
|
55
|
+
enumerable: true,
|
|
56
|
+
get: function () {
|
|
57
|
+
return _createTable[key];
|
|
58
|
+
}
|
|
59
|
+
});
|
|
60
|
+
});
|
|
61
|
+
var _createPinoLogger = require("./createPinoLogger");
|
|
62
|
+
Object.keys(_createPinoLogger).forEach(function (key) {
|
|
63
|
+
if (key === "default" || key === "__esModule") return;
|
|
64
|
+
if (key in exports && exports[key] === _createPinoLogger[key]) return;
|
|
65
|
+
Object.defineProperty(exports, key, {
|
|
66
|
+
enumerable: true,
|
|
67
|
+
get: function () {
|
|
68
|
+
return _createPinoLogger[key];
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
});
|
package/index.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":[],"sources":["index.ts"],"sourcesContent":["export * from \"./handlers/createDdbProjectMigration\";\nexport * from \"./handlers/createDdbEsProjectMigration\";\nexport * from \"./symbols\";\nexport * from \"./types\";\nexport * from \"./createTable\";\nexport * from \"./createPinoLogger\";\n"],"mappings":";;;;;AAAA;AAAA;EAAA;EAAA;EAAA;IAAA;IAAA;MAAA;IAAA;EAAA;AAAA;AACA;AAAA;EAAA;EAAA;EAAA;IAAA;IAAA;MAAA;IAAA;EAAA;AAAA;AACA;AAAA;EAAA;EAAA;EAAA;IAAA;IAAA;MAAA;IAAA;EAAA;AAAA;AACA;AAAA;EAAA;EAAA;EAAA;IAAA;IAAA;MAAA;IAAA;EAAA;AAAA;AACA;AAAA;EAAA;EAAA;EAAA;IAAA;IAAA;MAAA;IAAA;EAAA;AAAA;AACA;AAAA;EAAA;EAAA;EAAA;IAAA;IAAA;MAAA;IAAA;EAAA;AAAA"}
|
package/package.json
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@webiny/data-migration",
|
|
3
|
+
"version": "0.0.0-unstable.e3f4727c56",
|
|
4
|
+
"main": "index.js",
|
|
5
|
+
"types": "types.ts",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"repository": {
|
|
8
|
+
"type": "git",
|
|
9
|
+
"url": "https://github.com/webiny/webiny-js.git"
|
|
10
|
+
},
|
|
11
|
+
"description": "Tools to author and execute data migrations.",
|
|
12
|
+
"author": "Webiny Ltd.",
|
|
13
|
+
"dependencies": {
|
|
14
|
+
"@babel/runtime": "7.20.13",
|
|
15
|
+
"@elastic/elasticsearch": "7.12.0",
|
|
16
|
+
"@types/pino": "7.0.5",
|
|
17
|
+
"@webiny/db-dynamodb": "0.0.0-unstable.e3f4727c56",
|
|
18
|
+
"@webiny/handler-aws": "0.0.0-unstable.e3f4727c56",
|
|
19
|
+
"@webiny/ioc": "0.0.0-unstable.e3f4727c56",
|
|
20
|
+
"chalk": "4.1.2",
|
|
21
|
+
"dynamodb-toolbox": "0.3.5",
|
|
22
|
+
"minimatch": "5.1.6",
|
|
23
|
+
"pino": "8.11.0",
|
|
24
|
+
"pino-pretty": "9.4.0",
|
|
25
|
+
"semver": "6.3.0"
|
|
26
|
+
},
|
|
27
|
+
"devDependencies": {
|
|
28
|
+
"@babel/cli": "^7.19.3",
|
|
29
|
+
"@babel/core": "^7.19.3",
|
|
30
|
+
"@babel/preset-env": "^7.19.4",
|
|
31
|
+
"@types/semver": "^7.3.4",
|
|
32
|
+
"@webiny/cli": "^0.0.0-unstable.e3f4727c56",
|
|
33
|
+
"@webiny/project-utils": "^0.0.0-unstable.e3f4727c56",
|
|
34
|
+
"jest": "^28.1.0",
|
|
35
|
+
"jest-dynalite": "^3.2.0",
|
|
36
|
+
"jest-mock-console": "^1.0.0",
|
|
37
|
+
"rimraf": "^3.0.2",
|
|
38
|
+
"typescript": "4.7.4"
|
|
39
|
+
},
|
|
40
|
+
"publishConfig": {
|
|
41
|
+
"access": "public",
|
|
42
|
+
"directory": "dist"
|
|
43
|
+
},
|
|
44
|
+
"scripts": {
|
|
45
|
+
"build": "yarn webiny run build",
|
|
46
|
+
"watch": "yarn webiny run watch"
|
|
47
|
+
},
|
|
48
|
+
"adio": {
|
|
49
|
+
"ignore": {
|
|
50
|
+
"dependencies": [
|
|
51
|
+
"@types/pino"
|
|
52
|
+
]
|
|
53
|
+
}
|
|
54
|
+
},
|
|
55
|
+
"gitHead": "e3f4727c567484dc53e1efceacfb37dbacd7f4de"
|
|
56
|
+
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.createMigrationsEntity = void 0;
|
|
7
|
+
var _dynamodbToolbox = require("dynamodb-toolbox");
|
|
8
|
+
const createMigrationsEntity = ({
|
|
9
|
+
table
|
|
10
|
+
}) => {
|
|
11
|
+
return new _dynamodbToolbox.Entity({
|
|
12
|
+
name: "Migrations",
|
|
13
|
+
table,
|
|
14
|
+
attributes: {
|
|
15
|
+
PK: {
|
|
16
|
+
partitionKey: true
|
|
17
|
+
},
|
|
18
|
+
SK: {
|
|
19
|
+
sortKey: true
|
|
20
|
+
},
|
|
21
|
+
GSI1_PK: {
|
|
22
|
+
type: "string"
|
|
23
|
+
},
|
|
24
|
+
GSI1_SK: {
|
|
25
|
+
type: "string"
|
|
26
|
+
},
|
|
27
|
+
TYPE: {
|
|
28
|
+
type: "string"
|
|
29
|
+
},
|
|
30
|
+
data: {
|
|
31
|
+
type: "map"
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
});
|
|
35
|
+
};
|
|
36
|
+
exports.createMigrationsEntity = createMigrationsEntity;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["createMigrationsEntity","table","Entity","name","attributes","PK","partitionKey","SK","sortKey","GSI1_PK","type","GSI1_SK","TYPE","data"],"sources":["migrations.entity.ts"],"sourcesContent":["import { Table, Entity } from \"dynamodb-toolbox\";\n\nexport const createMigrationsEntity = ({ table }: { table: Table }) => {\n return new Entity({\n name: \"Migrations\",\n table,\n attributes: {\n PK: {\n partitionKey: true\n },\n SK: {\n sortKey: true\n },\n GSI1_PK: {\n type: \"string\"\n },\n GSI1_SK: {\n type: \"string\"\n },\n TYPE: {\n type: \"string\"\n },\n data: {\n type: \"map\"\n }\n }\n });\n};\n"],"mappings":";;;;;;AAAA;AAEO,MAAMA,sBAAsB,GAAG,CAAC;EAAEC;AAAwB,CAAC,KAAK;EACnE,OAAO,IAAIC,uBAAM,CAAC;IACdC,IAAI,EAAE,YAAY;IAClBF,KAAK;IACLG,UAAU,EAAE;MACRC,EAAE,EAAE;QACAC,YAAY,EAAE;MAClB,CAAC;MACDC,EAAE,EAAE;QACAC,OAAO,EAAE;MACb,CAAC;MACDC,OAAO,EAAE;QACLC,IAAI,EAAE;MACV,CAAC;MACDC,OAAO,EAAE;QACLD,IAAI,EAAE;MACV,CAAC;MACDE,IAAI,EAAE;QACFF,IAAI,EAAE;MACV,CAAC;MACDG,IAAI,EAAE;QACFH,IAAI,EAAE;MACV;IACJ;EACJ,CAAC,CAAC;AACN,CAAC;AAAC"}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { Table } from "dynamodb-toolbox";
|
|
2
|
+
import { MigrationItem, MigrationRepository } from "../types";
|
|
3
|
+
export declare class MigrationRepositoryImpl implements MigrationRepository {
|
|
4
|
+
private readonly entity;
|
|
5
|
+
constructor(table: Table);
|
|
6
|
+
listMigrations(params?: {
|
|
7
|
+
limit: number;
|
|
8
|
+
}): Promise<MigrationItem[]>;
|
|
9
|
+
logMigration(migration: MigrationItem): Promise<void>;
|
|
10
|
+
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.MigrationRepositoryImpl = void 0;
|
|
8
|
+
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
|
9
|
+
var _query = require("@webiny/db-dynamodb/utils/query");
|
|
10
|
+
var _migrations = require("./migrations.entity");
|
|
11
|
+
var _ioc = require("@webiny/ioc");
|
|
12
|
+
var _symbols = require("../symbols");
|
|
13
|
+
class MigrationRepositoryImpl {
|
|
14
|
+
constructor(table) {
|
|
15
|
+
(0, _defineProperty2.default)(this, "entity", void 0);
|
|
16
|
+
this.entity = (0, _migrations.createMigrationsEntity)({
|
|
17
|
+
table
|
|
18
|
+
});
|
|
19
|
+
}
|
|
20
|
+
async listMigrations(params) {
|
|
21
|
+
const {
|
|
22
|
+
limit
|
|
23
|
+
} = params || {};
|
|
24
|
+
const result = await (0, _query.queryAll)({
|
|
25
|
+
entity: this.entity,
|
|
26
|
+
partitionKey: "MIGRATIONS",
|
|
27
|
+
options: {
|
|
28
|
+
index: "GSI1",
|
|
29
|
+
gt: " ",
|
|
30
|
+
limit,
|
|
31
|
+
// Sort by GSI1_SK in descending order.
|
|
32
|
+
reverse: true
|
|
33
|
+
}
|
|
34
|
+
});
|
|
35
|
+
return result.map(item => item.data);
|
|
36
|
+
}
|
|
37
|
+
async logMigration(migration) {
|
|
38
|
+
await this.entity.put({
|
|
39
|
+
PK: `MIGRATION#${migration.id}`,
|
|
40
|
+
SK: "A",
|
|
41
|
+
TYPE: "migration",
|
|
42
|
+
GSI1_PK: "MIGRATIONS",
|
|
43
|
+
GSI1_SK: migration.id,
|
|
44
|
+
data: migration
|
|
45
|
+
});
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
exports.MigrationRepositoryImpl = MigrationRepositoryImpl;
|
|
49
|
+
(0, _ioc.makeInjectable)(MigrationRepositoryImpl, [(0, _ioc.inject)(_symbols.PrimaryDynamoTableSymbol)]);
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["MigrationRepositoryImpl","constructor","table","entity","createMigrationsEntity","listMigrations","params","limit","result","queryAll","partitionKey","options","index","gt","reverse","map","item","data","logMigration","migration","put","PK","id","SK","TYPE","GSI1_PK","GSI1_SK","makeInjectable","inject","PrimaryDynamoTableSymbol"],"sources":["migrations.repository.ts"],"sourcesContent":["import { Table, Entity } from \"dynamodb-toolbox\";\nimport { queryAll } from \"@webiny/db-dynamodb/utils/query\";\nimport { MigrationItem, MigrationRepository } from \"~/types\";\nimport { createMigrationsEntity } from \"~/repository/migrations.entity\";\nimport { inject, makeInjectable } from \"@webiny/ioc\";\nimport { PrimaryDynamoTableSymbol } from \"~/symbols\";\n\nexport class MigrationRepositoryImpl implements MigrationRepository {\n private readonly entity: Entity<any>;\n\n constructor(table: Table) {\n this.entity = createMigrationsEntity({ table });\n }\n\n async listMigrations(params?: { limit: number }): Promise<MigrationItem[]> {\n const { limit } = params || {};\n const result = await queryAll<{ data: MigrationItem }>({\n entity: this.entity,\n partitionKey: \"MIGRATIONS\",\n options: {\n index: \"GSI1\",\n gt: \" \",\n limit,\n // Sort by GSI1_SK in descending order.\n reverse: true\n }\n });\n\n return result.map(item => item.data);\n }\n\n async logMigration(migration: MigrationItem): Promise<void> {\n await this.entity.put({\n PK: `MIGRATION#${migration.id}`,\n SK: \"A\",\n TYPE: \"migration\",\n GSI1_PK: \"MIGRATIONS\",\n GSI1_SK: migration.id,\n data: migration\n });\n }\n}\n\nmakeInjectable(MigrationRepositoryImpl, [inject(PrimaryDynamoTableSymbol)]);\n"],"mappings":";;;;;;;;AACA;AAEA;AACA;AACA;AAEO,MAAMA,uBAAuB,CAAgC;EAGhEC,WAAW,CAACC,KAAY,EAAE;IAAA;IACtB,IAAI,CAACC,MAAM,GAAG,IAAAC,kCAAsB,EAAC;MAAEF;IAAM,CAAC,CAAC;EACnD;EAEA,MAAMG,cAAc,CAACC,MAA0B,EAA4B;IACvE,MAAM;MAAEC;IAAM,CAAC,GAAGD,MAAM,IAAI,CAAC,CAAC;IAC9B,MAAME,MAAM,GAAG,MAAM,IAAAC,eAAQ,EAA0B;MACnDN,MAAM,EAAE,IAAI,CAACA,MAAM;MACnBO,YAAY,EAAE,YAAY;MAC1BC,OAAO,EAAE;QACLC,KAAK,EAAE,MAAM;QACbC,EAAE,EAAE,GAAG;QACPN,KAAK;QACL;QACAO,OAAO,EAAE;MACb;IACJ,CAAC,CAAC;IAEF,OAAON,MAAM,CAACO,GAAG,CAACC,IAAI,IAAIA,IAAI,CAACC,IAAI,CAAC;EACxC;EAEA,MAAMC,YAAY,CAACC,SAAwB,EAAiB;IACxD,MAAM,IAAI,CAAChB,MAAM,CAACiB,GAAG,CAAC;MAClBC,EAAE,EAAG,aAAYF,SAAS,CAACG,EAAG,EAAC;MAC/BC,EAAE,EAAE,GAAG;MACPC,IAAI,EAAE,WAAW;MACjBC,OAAO,EAAE,YAAY;MACrBC,OAAO,EAAEP,SAAS,CAACG,EAAE;MACrBL,IAAI,EAAEE;IACV,CAAC,CAAC;EACN;AACJ;AAAC;AAED,IAAAQ,mBAAc,EAAC3B,uBAAuB,EAAE,CAAC,IAAA4B,WAAM,EAACC,iCAAwB,CAAC,CAAC,CAAC"}
|
package/symbols.d.ts
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export declare const LoggerSymbol: unique symbol;
|
|
2
|
+
export declare const MigrationSymbol: unique symbol;
|
|
3
|
+
export declare const MigrationRepositorySymbol: unique symbol;
|
|
4
|
+
export declare const ElasticsearchClientSymbol: unique symbol;
|
|
5
|
+
export declare const PrimaryDynamoTableSymbol: unique symbol;
|
|
6
|
+
export declare const ElasticsearchDynamoTableSymbol: unique symbol;
|
package/symbols.js
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.PrimaryDynamoTableSymbol = exports.MigrationSymbol = exports.MigrationRepositorySymbol = exports.LoggerSymbol = exports.ElasticsearchDynamoTableSymbol = exports.ElasticsearchClientSymbol = void 0;
|
|
7
|
+
const LoggerSymbol = Symbol.for("PinoLogger");
|
|
8
|
+
exports.LoggerSymbol = LoggerSymbol;
|
|
9
|
+
const MigrationSymbol = Symbol.for("Migration");
|
|
10
|
+
exports.MigrationSymbol = MigrationSymbol;
|
|
11
|
+
const MigrationRepositorySymbol = Symbol.for("MigrationRepository");
|
|
12
|
+
exports.MigrationRepositorySymbol = MigrationRepositorySymbol;
|
|
13
|
+
const ElasticsearchClientSymbol = Symbol.for("ElasticsearchClient");
|
|
14
|
+
exports.ElasticsearchClientSymbol = ElasticsearchClientSymbol;
|
|
15
|
+
const PrimaryDynamoTableSymbol = Symbol.for("PrimaryDynamoTable");
|
|
16
|
+
exports.PrimaryDynamoTableSymbol = PrimaryDynamoTableSymbol;
|
|
17
|
+
const ElasticsearchDynamoTableSymbol = Symbol.for("ElasticsearchDynamoTable");
|
|
18
|
+
exports.ElasticsearchDynamoTableSymbol = ElasticsearchDynamoTableSymbol;
|
package/symbols.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["LoggerSymbol","Symbol","for","MigrationSymbol","MigrationRepositorySymbol","ElasticsearchClientSymbol","PrimaryDynamoTableSymbol","ElasticsearchDynamoTableSymbol"],"sources":["symbols.ts"],"sourcesContent":["export const LoggerSymbol = Symbol.for(\"PinoLogger\");\nexport const MigrationSymbol = Symbol.for(\"Migration\");\nexport const MigrationRepositorySymbol = Symbol.for(\"MigrationRepository\");\nexport const ElasticsearchClientSymbol = Symbol.for(\"ElasticsearchClient\");\nexport const PrimaryDynamoTableSymbol = Symbol.for(\"PrimaryDynamoTable\");\nexport const ElasticsearchDynamoTableSymbol = Symbol.for(\"ElasticsearchDynamoTable\");\n"],"mappings":";;;;;;AAAO,MAAMA,YAAY,GAAGC,MAAM,CAACC,GAAG,CAAC,YAAY,CAAC;AAAC;AAC9C,MAAMC,eAAe,GAAGF,MAAM,CAACC,GAAG,CAAC,WAAW,CAAC;AAAC;AAChD,MAAME,yBAAyB,GAAGH,MAAM,CAACC,GAAG,CAAC,qBAAqB,CAAC;AAAC;AACpE,MAAMG,yBAAyB,GAAGJ,MAAM,CAACC,GAAG,CAAC,qBAAqB,CAAC;AAAC;AACpE,MAAMI,wBAAwB,GAAGL,MAAM,CAACC,GAAG,CAAC,oBAAoB,CAAC;AAAC;AAClE,MAAMK,8BAA8B,GAAGN,MAAM,CAACC,GAAG,CAAC,0BAA0B,CAAC;AAAC"}
|
package/types.d.ts
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import { Logger, LogEvent } from "pino";
|
|
2
|
+
export { Logger };
|
|
3
|
+
export interface MigrationItem {
|
|
4
|
+
id: string;
|
|
5
|
+
description: string;
|
|
6
|
+
createdOn: string;
|
|
7
|
+
duration: number;
|
|
8
|
+
reason: string;
|
|
9
|
+
}
|
|
10
|
+
export interface MigrationRepository {
|
|
11
|
+
listMigrations(params?: {
|
|
12
|
+
limit: number;
|
|
13
|
+
}): Promise<MigrationItem[]>;
|
|
14
|
+
logMigration(migration: MigrationItem): Promise<void>;
|
|
15
|
+
}
|
|
16
|
+
export interface DataMigrationContext {
|
|
17
|
+
projectVersion: string;
|
|
18
|
+
logger: Logger;
|
|
19
|
+
}
|
|
20
|
+
export interface DataMigration {
|
|
21
|
+
getId(): string;
|
|
22
|
+
getDescription(): string;
|
|
23
|
+
shouldExecute(context: DataMigrationContext): Promise<boolean>;
|
|
24
|
+
execute(context: DataMigrationContext): Promise<void>;
|
|
25
|
+
}
|
|
26
|
+
export interface MigrationResult {
|
|
27
|
+
success: boolean;
|
|
28
|
+
logs: LogEvent[];
|
|
29
|
+
duration: number;
|
|
30
|
+
}
|
|
31
|
+
export interface ExecutedMigrationResponse {
|
|
32
|
+
id: string;
|
|
33
|
+
description: string;
|
|
34
|
+
result: MigrationResult;
|
|
35
|
+
}
|
|
36
|
+
export interface SkippedMigrationResponse {
|
|
37
|
+
id: string;
|
|
38
|
+
description: string;
|
|
39
|
+
reason: string;
|
|
40
|
+
}
|
|
41
|
+
export interface MigrationEventPayload {
|
|
42
|
+
version?: string;
|
|
43
|
+
pattern?: string;
|
|
44
|
+
}
|
|
45
|
+
export declare type MigrationEventHandlerResponse = {
|
|
46
|
+
error: {
|
|
47
|
+
message: string;
|
|
48
|
+
};
|
|
49
|
+
data?: never;
|
|
50
|
+
} | {
|
|
51
|
+
data: {
|
|
52
|
+
executed: ExecutedMigrationResponse[];
|
|
53
|
+
skipped: SkippedMigrationResponse[];
|
|
54
|
+
notApplicable: SkippedMigrationResponse[];
|
|
55
|
+
};
|
|
56
|
+
error?: never;
|
|
57
|
+
};
|
package/types.js
ADDED
package/types.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":[],"sources":["types.ts"],"sourcesContent":["import { Logger, LogEvent } from \"pino\";\n\nexport { Logger };\n\nexport interface MigrationItem {\n id: string;\n description: string;\n createdOn: string;\n duration: number;\n reason: string;\n}\n\nexport interface MigrationRepository {\n listMigrations(params?: { limit: number }): Promise<MigrationItem[]>;\n logMigration(migration: MigrationItem): Promise<void>;\n}\n\nexport interface DataMigrationContext {\n projectVersion: string;\n logger: Logger;\n}\n\nexport interface DataMigration {\n getId(): string;\n getDescription(): string;\n // This function should check of the migration needs to apply some changes to the system.\n // Returning `false` means \"everything is ok, mark this migration as executed\".\n shouldExecute(context: DataMigrationContext): Promise<boolean>;\n execute(context: DataMigrationContext): Promise<void>;\n}\n\nexport interface MigrationResult {\n success: boolean;\n logs: LogEvent[];\n duration: number;\n}\n\nexport interface ExecutedMigrationResponse {\n id: string;\n description: string;\n result: MigrationResult;\n}\n\nexport interface SkippedMigrationResponse {\n id: string;\n description: string;\n reason: string;\n}\n\nexport interface MigrationEventPayload {\n version?: string;\n pattern?: string;\n}\n\nexport type MigrationEventHandlerResponse =\n // We can either have a `data`, or `error`, but never both.\n | {\n error: {\n message: string;\n };\n data?: never;\n }\n | {\n data: {\n // Executed migrations\n executed: ExecutedMigrationResponse[];\n // Applicable, but the migration itself decided it should not be executed.\n skipped: SkippedMigrationResponse[];\n // Not applicable; either out of version range, or already applied.\n notApplicable: SkippedMigrationResponse[];\n };\n error?: never;\n };\n"],"mappings":";;;;;;;;;;;AAAA"}
|