@webiny/data-migration 0.0.0-unstable.1145e7667f
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/MigrationRunner.d.ts +18 -0
- package/MigrationRunner.js +276 -0
- package/MigrationRunner.js.map +1 -0
- package/README.md +6 -0
- package/cli/getDuration.d.ts +5 -0
- package/cli/getDuration.js +21 -0
- package/cli/getDuration.js.map +1 -0
- package/cli/getMigrationStatus.d.ts +9 -0
- package/cli/getMigrationStatus.js +23 -0
- package/cli/getMigrationStatus.js.map +1 -0
- package/cli/index.d.ts +3 -0
- package/cli/index.js +38 -0
- package/cli/index.js.map +1 -0
- package/cli/printReport.d.ts +9 -0
- package/cli/printReport.js +57 -0
- package/cli/printReport.js.map +1 -0
- package/cli/runMigration.d.ts +13 -0
- package/cli/runMigration.js +85 -0
- package/cli/runMigration.js.map +1 -0
- package/createId.d.ts +1 -0
- package/createId.js +14 -0
- package/createId.js.map +1 -0
- package/createPinoLogger.d.ts +7 -0
- package/createPinoLogger.js +22 -0
- package/createPinoLogger.js.map +1 -0
- package/createTable.d.ts +7 -0
- package/createTable.js +29 -0
- package/createTable.js.map +1 -0
- package/handlers/createDdbEsProjectMigration.d.ts +16 -0
- package/handlers/createDdbEsProjectMigration.js +81 -0
- package/handlers/createDdbEsProjectMigration.js.map +1 -0
- package/handlers/createDdbProjectMigration.d.ts +13 -0
- package/handlers/createDdbProjectMigration.js +77 -0
- package/handlers/createDdbProjectMigration.js.map +1 -0
- package/handlers/createPatternMatcher.d.ts +2 -0
- package/handlers/createPatternMatcher.js +17 -0
- package/handlers/createPatternMatcher.js.map +1 -0
- package/handlers/devVersionErrorResponse.d.ts +5 -0
- package/handlers/devVersionErrorResponse.js +14 -0
- package/handlers/devVersionErrorResponse.js.map +1 -0
- package/index.d.ts +7 -0
- package/index.js +71 -0
- package/index.js.map +1 -0
- package/package.json +60 -0
- package/repository/createStandardEntity.d.ts +5 -0
- package/repository/createStandardEntity.js +37 -0
- package/repository/createStandardEntity.js.map +1 -0
- package/repository/migrations.repository.d.ts +17 -0
- package/repository/migrations.repository.js +108 -0
- package/repository/migrations.repository.js.map +1 -0
- package/symbols.d.ts +7 -0
- package/symbols.js +20 -0
- package/symbols.js.map +1 -0
- package/types.d.ts +80 -0
- package/types.js +12 -0
- package/types.js.map +1 -0
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.runMigration = void 0;
|
|
8
|
+
var _objectSpread2 = _interopRequireDefault(require("@babel/runtime/helpers/objectSpread2"));
|
|
9
|
+
var _getMigrationStatus = require("./getMigrationStatus");
|
|
10
|
+
const getMigrationStatusReportInterval = () => {
|
|
11
|
+
const envKey = "MIGRATION_STATUS_REPORT_INTERVAL";
|
|
12
|
+
if (envKey in process.env) {
|
|
13
|
+
return parseInt(String(process.env[envKey]));
|
|
14
|
+
}
|
|
15
|
+
return 2000;
|
|
16
|
+
};
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Run the migration Lambda, and re-run when resuming is requested.
|
|
20
|
+
*/
|
|
21
|
+
const runMigration = async ({
|
|
22
|
+
payload,
|
|
23
|
+
functionName,
|
|
24
|
+
lambdaClient,
|
|
25
|
+
statusCallback
|
|
26
|
+
}) => {
|
|
27
|
+
// We don't report status, if `stdout` is not TTY (usually in CIs, and child processes spawned programmatically).
|
|
28
|
+
const reportStatus = data => {
|
|
29
|
+
if (!process.stdout.isTTY || typeof statusCallback !== "function") {
|
|
30
|
+
return;
|
|
31
|
+
}
|
|
32
|
+
statusCallback(data);
|
|
33
|
+
};
|
|
34
|
+
const invokeMigration = async () => {
|
|
35
|
+
const response = await lambdaClient.invoke({
|
|
36
|
+
FunctionName: functionName,
|
|
37
|
+
InvocationType: "Event",
|
|
38
|
+
Payload: JSON.stringify((0, _objectSpread2.default)((0, _objectSpread2.default)({}, payload), {}, {
|
|
39
|
+
command: "execute"
|
|
40
|
+
}))
|
|
41
|
+
}).promise();
|
|
42
|
+
return response.StatusCode;
|
|
43
|
+
};
|
|
44
|
+
|
|
45
|
+
// Execute migration function.
|
|
46
|
+
await invokeMigration();
|
|
47
|
+
|
|
48
|
+
// Poll for status and re-execute when migration is in "pending" state.
|
|
49
|
+
let response;
|
|
50
|
+
while (true) {
|
|
51
|
+
await new Promise(resolve => setTimeout(resolve, getMigrationStatusReportInterval()));
|
|
52
|
+
response = await (0, _getMigrationStatus.getMigrationStatus)({
|
|
53
|
+
payload,
|
|
54
|
+
functionName,
|
|
55
|
+
lambdaClient
|
|
56
|
+
});
|
|
57
|
+
if (!response) {
|
|
58
|
+
continue;
|
|
59
|
+
}
|
|
60
|
+
const {
|
|
61
|
+
data,
|
|
62
|
+
error
|
|
63
|
+
} = response;
|
|
64
|
+
|
|
65
|
+
// If we received an error, it must be an unrecoverable error, and we don't retry.
|
|
66
|
+
if (error) {
|
|
67
|
+
return response;
|
|
68
|
+
}
|
|
69
|
+
switch (data.status) {
|
|
70
|
+
case "init":
|
|
71
|
+
reportStatus(data);
|
|
72
|
+
continue;
|
|
73
|
+
case "pending":
|
|
74
|
+
await invokeMigration();
|
|
75
|
+
break;
|
|
76
|
+
case "running":
|
|
77
|
+
reportStatus(data);
|
|
78
|
+
break;
|
|
79
|
+
case "done":
|
|
80
|
+
default:
|
|
81
|
+
return response;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
};
|
|
85
|
+
exports.runMigration = runMigration;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["getMigrationStatusReportInterval","envKey","process","env","parseInt","String","runMigration","payload","functionName","lambdaClient","statusCallback","reportStatus","data","stdout","isTTY","invokeMigration","response","invoke","FunctionName","InvocationType","Payload","JSON","stringify","command","promise","StatusCode","Promise","resolve","setTimeout","getMigrationStatus","error","status"],"sources":["runMigration.ts"],"sourcesContent":["import LambdaClient from \"aws-sdk/clients/lambda\";\nimport {\n MigrationEventHandlerResponse,\n MigrationInvocationErrorResponse,\n MigrationRun,\n MigrationStatus,\n MigrationStatusResponse\n} from \"~/types\";\nimport { getMigrationStatus } from \"./getMigrationStatus\";\n\ninterface RunMigrationParams {\n lambdaClient: LambdaClient;\n functionName: string;\n payload?: Record<string, any>;\n statusCallback?: (status: MigrationRun) => void;\n}\n\nconst getMigrationStatusReportInterval = () => {\n const envKey = \"MIGRATION_STATUS_REPORT_INTERVAL\";\n if (envKey in process.env) {\n return parseInt(String(process.env[envKey]));\n }\n return 2000;\n};\n\n/**\n * Run the migration Lambda, and re-run when resuming is requested.\n */\nexport const runMigration = async ({\n payload,\n functionName,\n lambdaClient,\n statusCallback\n}: RunMigrationParams): Promise<MigrationStatusResponse | MigrationInvocationErrorResponse> => {\n // We don't report status, if `stdout` is not TTY (usually in CIs, and child processes spawned programmatically).\n const reportStatus = (data: MigrationStatus) => {\n if (!process.stdout.isTTY || typeof statusCallback !== \"function\") {\n return;\n }\n\n statusCallback(data);\n };\n\n const invokeMigration = async () => {\n const response = await lambdaClient\n .invoke({\n FunctionName: functionName,\n InvocationType: \"Event\",\n Payload: JSON.stringify({ ...payload, command: \"execute\" })\n })\n .promise();\n\n return response.StatusCode;\n };\n\n // Execute migration function.\n await invokeMigration();\n\n // Poll for status and re-execute when migration is in \"pending\" state.\n let response: MigrationEventHandlerResponse;\n while (true) {\n await new Promise(resolve => setTimeout(resolve, getMigrationStatusReportInterval()));\n\n response = await getMigrationStatus({\n payload,\n functionName,\n lambdaClient\n });\n\n if (!response) {\n continue;\n }\n\n const { data, error } = response;\n\n // If we received an error, it must be an unrecoverable error, and we don't retry.\n if (error) {\n return response;\n }\n\n switch (data.status) {\n case \"init\":\n reportStatus(data);\n continue;\n case \"pending\":\n await invokeMigration();\n break;\n case \"running\":\n reportStatus(data);\n break;\n case \"done\":\n default:\n return response;\n }\n }\n};\n"],"mappings":";;;;;;;;AAQA;AASA,MAAMA,gCAAgC,GAAG,MAAM;EAC3C,MAAMC,MAAM,GAAG,kCAAkC;EACjD,IAAIA,MAAM,IAAIC,OAAO,CAACC,GAAG,EAAE;IACvB,OAAOC,QAAQ,CAACC,MAAM,CAACH,OAAO,CAACC,GAAG,CAACF,MAAM,CAAC,CAAC,CAAC;EAChD;EACA,OAAO,IAAI;AACf,CAAC;;AAED;AACA;AACA;AACO,MAAMK,YAAY,GAAG,OAAO;EAC/BC,OAAO;EACPC,YAAY;EACZC,YAAY;EACZC;AACgB,CAAC,KAA0E;EAC3F;EACA,MAAMC,YAAY,GAAIC,IAAqB,IAAK;IAC5C,IAAI,CAACV,OAAO,CAACW,MAAM,CAACC,KAAK,IAAI,OAAOJ,cAAc,KAAK,UAAU,EAAE;MAC/D;IACJ;IAEAA,cAAc,CAACE,IAAI,CAAC;EACxB,CAAC;EAED,MAAMG,eAAe,GAAG,YAAY;IAChC,MAAMC,QAAQ,GAAG,MAAMP,YAAY,CAC9BQ,MAAM,CAAC;MACJC,YAAY,EAAEV,YAAY;MAC1BW,cAAc,EAAE,OAAO;MACvBC,OAAO,EAAEC,IAAI,CAACC,SAAS,6DAAMf,OAAO;QAAEgB,OAAO,EAAE;MAAS;IAC5D,CAAC,CAAC,CACDC,OAAO,EAAE;IAEd,OAAOR,QAAQ,CAACS,UAAU;EAC9B,CAAC;;EAED;EACA,MAAMV,eAAe,EAAE;;EAEvB;EACA,IAAIC,QAAuC;EAC3C,OAAO,IAAI,EAAE;IACT,MAAM,IAAIU,OAAO,CAACC,OAAO,IAAIC,UAAU,CAACD,OAAO,EAAE3B,gCAAgC,EAAE,CAAC,CAAC;IAErFgB,QAAQ,GAAG,MAAM,IAAAa,sCAAkB,EAAC;MAChCtB,OAAO;MACPC,YAAY;MACZC;IACJ,CAAC,CAAC;IAEF,IAAI,CAACO,QAAQ,EAAE;MACX;IACJ;IAEA,MAAM;MAAEJ,IAAI;MAAEkB;IAAM,CAAC,GAAGd,QAAQ;;IAEhC;IACA,IAAIc,KAAK,EAAE;MACP,OAAOd,QAAQ;IACnB;IAEA,QAAQJ,IAAI,CAACmB,MAAM;MACf,KAAK,MAAM;QACPpB,YAAY,CAACC,IAAI,CAAC;QAClB;MACJ,KAAK,SAAS;QACV,MAAMG,eAAe,EAAE;QACvB;MACJ,KAAK,SAAS;QACVJ,YAAY,CAACC,IAAI,CAAC;QAClB;MACJ,KAAK,MAAM;MACX;QACI,OAAOI,QAAQ;IAAC;EAE5B;AACJ,CAAC;AAAC"}
|
package/createId.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const createId: () => any;
|
package/createId.js
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.createId = void 0;
|
|
8
|
+
var _mdbid = _interopRequireDefault(require("mdbid"));
|
|
9
|
+
// @ts-ignore mdbid doesn't have TS types.
|
|
10
|
+
|
|
11
|
+
const createId = () => {
|
|
12
|
+
return (0, _mdbid.default)();
|
|
13
|
+
};
|
|
14
|
+
exports.createId = createId;
|
package/createId.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["createId","mdbid"],"sources":["createId.ts"],"sourcesContent":["// @ts-ignore mdbid doesn't have TS types.\nimport mdbid from \"mdbid\";\n\nexport const createId = () => {\n return mdbid();\n};\n"],"mappings":";;;;;;;AACA;AADA;;AAGO,MAAMA,QAAQ,GAAG,MAAM;EAC1B,OAAO,IAAAC,cAAK,GAAE;AAClB,CAAC;AAAC"}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { pino, Logger } from "pino";
|
|
2
|
+
import pinoPretty from "pino-pretty";
|
|
3
|
+
import { DataMigration } from "./types";
|
|
4
|
+
export declare const createPinoLogger: () => Logger<pinoPretty.PrettyStream>;
|
|
5
|
+
export declare const getChildLogger: (logger: Logger, migration: DataMigration) => pino.Logger<import("pino").LoggerOptions & {
|
|
6
|
+
msgPrefix: string;
|
|
7
|
+
}>;
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.getChildLogger = exports.createPinoLogger = void 0;
|
|
8
|
+
var _chalk = _interopRequireDefault(require("chalk"));
|
|
9
|
+
var _pino = require("pino");
|
|
10
|
+
var _pinoPretty = _interopRequireDefault(require("pino-pretty"));
|
|
11
|
+
const createPinoLogger = () => {
|
|
12
|
+
return (0, _pino.pino)((0, _pinoPretty.default)({
|
|
13
|
+
ignore: "pid,hostname"
|
|
14
|
+
}));
|
|
15
|
+
};
|
|
16
|
+
exports.createPinoLogger = createPinoLogger;
|
|
17
|
+
const getChildLogger = (logger, migration) => {
|
|
18
|
+
return logger.child({}, {
|
|
19
|
+
msgPrefix: _chalk.default.blueBright(`[${migration.getId()}]`) + " "
|
|
20
|
+
});
|
|
21
|
+
};
|
|
22
|
+
exports.getChildLogger = getChildLogger;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["createPinoLogger","pino","pinoPretty","ignore","getChildLogger","logger","migration","child","msgPrefix","chalk","blueBright","getId"],"sources":["createPinoLogger.ts"],"sourcesContent":["import chalk from \"chalk\";\nimport { pino, Logger } from \"pino\";\nimport pinoPretty from \"pino-pretty\";\nimport { DataMigration } from \"~/types\";\n\nexport const createPinoLogger = () => {\n return pino(\n pinoPretty({\n ignore: \"pid,hostname\"\n })\n );\n};\n\nexport const getChildLogger = (logger: Logger, migration: DataMigration) => {\n return logger.child({}, { msgPrefix: chalk.blueBright(`[${migration.getId()}]`) + \" \" });\n};\n"],"mappings":";;;;;;;AAAA;AACA;AACA;AAGO,MAAMA,gBAAgB,GAAG,MAAM;EAClC,OAAO,IAAAC,UAAI,EACP,IAAAC,mBAAU,EAAC;IACPC,MAAM,EAAE;EACZ,CAAC,CAAC,CACL;AACL,CAAC;AAAC;AAEK,MAAMC,cAAc,GAAG,CAACC,MAAc,EAAEC,SAAwB,KAAK;EACxE,OAAOD,MAAM,CAACE,KAAK,CAAC,CAAC,CAAC,EAAE;IAAEC,SAAS,EAAEC,cAAK,CAACC,UAAU,CAAE,IAAGJ,SAAS,CAACK,KAAK,EAAG,GAAE,CAAC,GAAG;EAAI,CAAC,CAAC;AAC5F,CAAC;AAAC"}
|
package/createTable.d.ts
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { DocumentClient } from "aws-sdk/clients/dynamodb";
|
|
2
|
+
import { Table } from "dynamodb-toolbox";
|
|
3
|
+
export interface CreateTableParams {
|
|
4
|
+
name: string;
|
|
5
|
+
documentClient: DocumentClient;
|
|
6
|
+
}
|
|
7
|
+
export declare const createTable: ({ name, documentClient }: CreateTableParams) => Table;
|
package/createTable.js
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.createTable = void 0;
|
|
7
|
+
var _dynamodbToolbox = require("dynamodb-toolbox");
|
|
8
|
+
const createTable = ({
|
|
9
|
+
name,
|
|
10
|
+
documentClient
|
|
11
|
+
}) => {
|
|
12
|
+
return new _dynamodbToolbox.Table({
|
|
13
|
+
name,
|
|
14
|
+
partitionKey: "PK",
|
|
15
|
+
sortKey: "SK",
|
|
16
|
+
DocumentClient: documentClient,
|
|
17
|
+
indexes: {
|
|
18
|
+
GSI1: {
|
|
19
|
+
partitionKey: "GSI1_PK",
|
|
20
|
+
sortKey: "GSI1_SK"
|
|
21
|
+
}
|
|
22
|
+
// GSI2: {
|
|
23
|
+
// partitionKey: "GSI2_PK",
|
|
24
|
+
// sortKey: "GSI2_SK"
|
|
25
|
+
// }
|
|
26
|
+
}
|
|
27
|
+
});
|
|
28
|
+
};
|
|
29
|
+
exports.createTable = createTable;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["createTable","name","documentClient","Table","partitionKey","sortKey","DocumentClient","indexes","GSI1"],"sources":["createTable.ts"],"sourcesContent":["import { DocumentClient } from \"aws-sdk/clients/dynamodb\";\nimport { Table } from \"dynamodb-toolbox\";\n\nexport interface CreateTableParams {\n name: string;\n documentClient: DocumentClient;\n}\n\nexport const createTable = ({ name, documentClient }: CreateTableParams) => {\n return new Table({\n name,\n partitionKey: \"PK\",\n sortKey: \"SK\",\n DocumentClient: documentClient,\n indexes: {\n GSI1: {\n partitionKey: \"GSI1_PK\",\n sortKey: \"GSI1_SK\"\n }\n // GSI2: {\n // partitionKey: \"GSI2_PK\",\n // sortKey: \"GSI2_SK\"\n // }\n }\n });\n};\n"],"mappings":";;;;;;AACA;AAOO,MAAMA,WAAW,GAAG,CAAC;EAAEC,IAAI;EAAEC;AAAkC,CAAC,KAAK;EACxE,OAAO,IAAIC,sBAAK,CAAC;IACbF,IAAI;IACJG,YAAY,EAAE,IAAI;IAClBC,OAAO,EAAE,IAAI;IACbC,cAAc,EAAEJ,cAAc;IAC9BK,OAAO,EAAE;MACLC,IAAI,EAAE;QACFJ,YAAY,EAAE,SAAS;QACvBC,OAAO,EAAE;MACb;MACA;MACA;MACA;MACA;IACJ;EACJ,CAAC,CAAC;AACN,CAAC;AAAC"}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { Client as ElasticsearchClient } from "@elastic/elasticsearch";
|
|
2
|
+
import { Table } from "dynamodb-toolbox";
|
|
3
|
+
import { Constructor } from "@webiny/ioc";
|
|
4
|
+
import { DataMigration, ExecutionTimeLimiter, MigrationEventHandlerResponse, MigrationEventPayload, MigrationRepository } from "../types";
|
|
5
|
+
import { IsMigrationApplicable } from "../MigrationRunner";
|
|
6
|
+
interface CreateDdbEsDataMigrationConfig {
|
|
7
|
+
elasticsearchClient: ElasticsearchClient;
|
|
8
|
+
primaryTable: Table;
|
|
9
|
+
dynamoToEsTable: Table;
|
|
10
|
+
migrations: Constructor<DataMigration>[];
|
|
11
|
+
isMigrationApplicable?: IsMigrationApplicable;
|
|
12
|
+
repository?: MigrationRepository;
|
|
13
|
+
timeLimiter?: ExecutionTimeLimiter;
|
|
14
|
+
}
|
|
15
|
+
export declare const createDdbEsProjectMigration: ({ migrations, elasticsearchClient, primaryTable, dynamoToEsTable, isMigrationApplicable, repository, ...config }: CreateDdbEsDataMigrationConfig) => import("@webiny/handler-aws").RawEventHandler<MigrationEventPayload, any, MigrationEventHandlerResponse>;
|
|
16
|
+
export {};
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.createDdbEsProjectMigration = void 0;
|
|
8
|
+
var _objectWithoutProperties2 = _interopRequireDefault(require("@babel/runtime/helpers/objectWithoutProperties"));
|
|
9
|
+
var _handlerAws = require("@webiny/handler-aws");
|
|
10
|
+
var _ioc = require("@webiny/ioc");
|
|
11
|
+
var _symbols = require("../symbols");
|
|
12
|
+
var _MigrationRunner = require("../MigrationRunner");
|
|
13
|
+
var _migrations = require("../repository/migrations.repository");
|
|
14
|
+
var _devVersionErrorResponse = require("./devVersionErrorResponse");
|
|
15
|
+
var _createPatternMatcher = require("./createPatternMatcher");
|
|
16
|
+
var _semver = require("semver");
|
|
17
|
+
const _excluded = ["migrations", "elasticsearchClient", "primaryTable", "dynamoToEsTable", "isMigrationApplicable", "repository"];
|
|
18
|
+
const createDdbEsProjectMigration = _ref => {
|
|
19
|
+
let {
|
|
20
|
+
migrations,
|
|
21
|
+
elasticsearchClient,
|
|
22
|
+
primaryTable,
|
|
23
|
+
dynamoToEsTable,
|
|
24
|
+
isMigrationApplicable = undefined,
|
|
25
|
+
repository = undefined
|
|
26
|
+
} = _ref,
|
|
27
|
+
config = (0, _objectWithoutProperties2.default)(_ref, _excluded);
|
|
28
|
+
return (0, _handlerAws.createRawEventHandler)(async ({
|
|
29
|
+
payload,
|
|
30
|
+
lambdaContext
|
|
31
|
+
}) => {
|
|
32
|
+
const projectVersion = String((payload === null || payload === void 0 ? void 0 : payload.version) || process.env.WEBINY_VERSION);
|
|
33
|
+
const version = (0, _semver.coerce)(projectVersion);
|
|
34
|
+
if ((version === null || version === void 0 ? void 0 : version.version) === "0.0.0") {
|
|
35
|
+
return (0, _devVersionErrorResponse.devVersionErrorResponse)();
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// COMPOSITION ROOT
|
|
39
|
+
const container = (0, _ioc.createContainer)();
|
|
40
|
+
container.bind(_symbols.PrimaryDynamoTableSymbol).toConstantValue(primaryTable);
|
|
41
|
+
container.bind(_symbols.ElasticsearchDynamoTableSymbol).toConstantValue(dynamoToEsTable);
|
|
42
|
+
container.bind(_symbols.ElasticsearchClientSymbol).toConstantValue(elasticsearchClient);
|
|
43
|
+
const timeLimiter = config.timeLimiter || (lambdaContext === null || lambdaContext === void 0 ? void 0 : lambdaContext.getRemainingTimeInMillis) || (() => 0);
|
|
44
|
+
container.bind(_symbols.ExecutionTimeLimiterSymbol).toConstantValue(timeLimiter);
|
|
45
|
+
if (repository) {
|
|
46
|
+
// Repository implementation provided by the user.
|
|
47
|
+
container.bind(_symbols.MigrationRepositorySymbol).toConstantValue(repository);
|
|
48
|
+
} else {
|
|
49
|
+
// Default repository implementation.
|
|
50
|
+
container.bind(_symbols.MigrationRepositorySymbol).to(_migrations.MigrationRepositoryImpl);
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Bind the provided migrations.
|
|
54
|
+
migrations.forEach(migration => container.bind(_symbols.MigrationSymbol).to(migration));
|
|
55
|
+
|
|
56
|
+
// If handler was invoked with a `pattern`, filter migrations that match the pattern only.
|
|
57
|
+
let patternMatcher;
|
|
58
|
+
if (payload.pattern) {
|
|
59
|
+
patternMatcher = (0, _createPatternMatcher.createPatternMatcher)(payload.pattern);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// Inject dependencies and execute.
|
|
63
|
+
try {
|
|
64
|
+
const runner = await container.resolve(_MigrationRunner.MigrationRunner);
|
|
65
|
+
if (payload.command === "execute") {
|
|
66
|
+
await runner.execute(projectVersion, patternMatcher || isMigrationApplicable);
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
return {
|
|
70
|
+
data: await runner.getStatus()
|
|
71
|
+
};
|
|
72
|
+
} catch (err) {
|
|
73
|
+
return {
|
|
74
|
+
error: {
|
|
75
|
+
message: err.message
|
|
76
|
+
}
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
});
|
|
80
|
+
};
|
|
81
|
+
exports.createDdbEsProjectMigration = createDdbEsProjectMigration;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["createDdbEsProjectMigration","migrations","elasticsearchClient","primaryTable","dynamoToEsTable","isMigrationApplicable","undefined","repository","config","createRawEventHandler","payload","lambdaContext","projectVersion","String","version","process","env","WEBINY_VERSION","semverCoerce","devVersionErrorResponse","container","createContainer","bind","PrimaryDynamoTableSymbol","toConstantValue","ElasticsearchDynamoTableSymbol","ElasticsearchClientSymbol","timeLimiter","getRemainingTimeInMillis","ExecutionTimeLimiterSymbol","MigrationRepositorySymbol","to","MigrationRepositoryImpl","forEach","migration","MigrationSymbol","patternMatcher","pattern","createPatternMatcher","runner","resolve","MigrationRunner","command","execute","data","getStatus","err","error","message"],"sources":["createDdbEsProjectMigration.ts"],"sourcesContent":["import { Client as ElasticsearchClient } from \"@elastic/elasticsearch\";\nimport { Table } from \"dynamodb-toolbox\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\nimport { Constructor, createContainer } from \"@webiny/ioc\";\nimport {\n DataMigration,\n ExecutionTimeLimiter,\n MigrationEventHandlerResponse,\n MigrationEventPayload,\n MigrationRepository\n} from \"~/types\";\nimport {\n ElasticsearchClientSymbol,\n ElasticsearchDynamoTableSymbol,\n ExecutionTimeLimiterSymbol,\n MigrationRepositorySymbol,\n MigrationSymbol,\n PrimaryDynamoTableSymbol\n} from \"~/symbols\";\nimport { IsMigrationApplicable, MigrationRunner } from \"~/MigrationRunner\";\nimport { MigrationRepositoryImpl } from \"~/repository/migrations.repository\";\nimport { devVersionErrorResponse } from \"~/handlers/devVersionErrorResponse\";\nimport { createPatternMatcher } from \"~/handlers/createPatternMatcher\";\nimport { coerce as semverCoerce } from \"semver\";\n\ninterface CreateDdbEsDataMigrationConfig {\n elasticsearchClient: ElasticsearchClient;\n primaryTable: Table;\n dynamoToEsTable: Table;\n migrations: Constructor<DataMigration>[];\n isMigrationApplicable?: IsMigrationApplicable;\n repository?: MigrationRepository;\n timeLimiter?: ExecutionTimeLimiter;\n}\n\nexport const createDdbEsProjectMigration = ({\n migrations,\n elasticsearchClient,\n primaryTable,\n dynamoToEsTable,\n isMigrationApplicable = undefined,\n repository = undefined,\n ...config\n}: CreateDdbEsDataMigrationConfig) => {\n return createRawEventHandler<MigrationEventPayload, any, MigrationEventHandlerResponse>(\n async ({ payload, lambdaContext }) => {\n const projectVersion = String(payload?.version || process.env.WEBINY_VERSION);\n\n const version = semverCoerce(projectVersion);\n if (version?.version === \"0.0.0\") {\n return devVersionErrorResponse();\n }\n\n // COMPOSITION ROOT\n const container = createContainer();\n container.bind(PrimaryDynamoTableSymbol).toConstantValue(primaryTable);\n container.bind(ElasticsearchDynamoTableSymbol).toConstantValue(dynamoToEsTable);\n container.bind(ElasticsearchClientSymbol).toConstantValue(elasticsearchClient);\n\n const timeLimiter: ExecutionTimeLimiter =\n config.timeLimiter || lambdaContext?.getRemainingTimeInMillis || (() => 0);\n container.bind(ExecutionTimeLimiterSymbol).toConstantValue(timeLimiter);\n\n if (repository) {\n // Repository implementation provided by the user.\n container.bind(MigrationRepositorySymbol).toConstantValue(repository);\n } else {\n // Default repository implementation.\n container.bind(MigrationRepositorySymbol).to(MigrationRepositoryImpl);\n }\n\n // Bind the provided migrations.\n migrations.forEach(migration => container.bind(MigrationSymbol).to(migration));\n\n // If handler was invoked with a `pattern`, filter migrations that match the pattern only.\n let patternMatcher;\n if (payload.pattern) {\n patternMatcher = createPatternMatcher(payload.pattern);\n }\n\n // Inject dependencies and execute.\n try {\n const runner = await container.resolve(MigrationRunner);\n\n if (payload.command === \"execute\") {\n await runner.execute(projectVersion, patternMatcher || isMigrationApplicable);\n return;\n }\n\n return { data: await runner.getStatus() };\n } catch (err) {\n return { error: { message: err.message } };\n }\n }\n );\n};\n"],"mappings":";;;;;;;;AAEA;AACA;AAQA;AAQA;AACA;AACA;AACA;AACA;AAAgD;AAYzC,MAAMA,2BAA2B,GAAG,QAQL;EAAA,IARM;MACxCC,UAAU;MACVC,mBAAmB;MACnBC,YAAY;MACZC,eAAe;MACfC,qBAAqB,GAAGC,SAAS;MACjCC,UAAU,GAAGD;IAEe,CAAC;IAD1BE,MAAM;EAET,OAAO,IAAAC,iCAAqB,EACxB,OAAO;IAAEC,OAAO;IAAEC;EAAc,CAAC,KAAK;IAClC,MAAMC,cAAc,GAAGC,MAAM,CAAC,CAAAH,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEI,OAAO,KAAIC,OAAO,CAACC,GAAG,CAACC,cAAc,CAAC;IAE7E,MAAMH,OAAO,GAAG,IAAAI,cAAY,EAACN,cAAc,CAAC;IAC5C,IAAI,CAAAE,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEA,OAAO,MAAK,OAAO,EAAE;MAC9B,OAAO,IAAAK,gDAAuB,GAAE;IACpC;;IAEA;IACA,MAAMC,SAAS,GAAG,IAAAC,oBAAe,GAAE;IACnCD,SAAS,CAACE,IAAI,CAACC,iCAAwB,CAAC,CAACC,eAAe,CAACrB,YAAY,CAAC;IACtEiB,SAAS,CAACE,IAAI,CAACG,uCAA8B,CAAC,CAACD,eAAe,CAACpB,eAAe,CAAC;IAC/EgB,SAAS,CAACE,IAAI,CAACI,kCAAyB,CAAC,CAACF,eAAe,CAACtB,mBAAmB,CAAC;IAE9E,MAAMyB,WAAiC,GACnCnB,MAAM,CAACmB,WAAW,KAAIhB,aAAa,aAAbA,aAAa,uBAAbA,aAAa,CAAEiB,wBAAwB,MAAK,MAAM,CAAC,CAAC;IAC9ER,SAAS,CAACE,IAAI,CAACO,mCAA0B,CAAC,CAACL,eAAe,CAACG,WAAW,CAAC;IAEvE,IAAIpB,UAAU,EAAE;MACZ;MACAa,SAAS,CAACE,IAAI,CAACQ,kCAAyB,CAAC,CAACN,eAAe,CAACjB,UAAU,CAAC;IACzE,CAAC,MAAM;MACH;MACAa,SAAS,CAACE,IAAI,CAACQ,kCAAyB,CAAC,CAACC,EAAE,CAACC,mCAAuB,CAAC;IACzE;;IAEA;IACA/B,UAAU,CAACgC,OAAO,CAACC,SAAS,IAAId,SAAS,CAACE,IAAI,CAACa,wBAAe,CAAC,CAACJ,EAAE,CAACG,SAAS,CAAC,CAAC;;IAE9E;IACA,IAAIE,cAAc;IAClB,IAAI1B,OAAO,CAAC2B,OAAO,EAAE;MACjBD,cAAc,GAAG,IAAAE,0CAAoB,EAAC5B,OAAO,CAAC2B,OAAO,CAAC;IAC1D;;IAEA;IACA,IAAI;MACA,MAAME,MAAM,GAAG,MAAMnB,SAAS,CAACoB,OAAO,CAACC,gCAAe,CAAC;MAEvD,IAAI/B,OAAO,CAACgC,OAAO,KAAK,SAAS,EAAE;QAC/B,MAAMH,MAAM,CAACI,OAAO,CAAC/B,cAAc,EAAEwB,cAAc,IAAI/B,qBAAqB,CAAC;QAC7E;MACJ;MAEA,OAAO;QAAEuC,IAAI,EAAE,MAAML,MAAM,CAACM,SAAS;MAAG,CAAC;IAC7C,CAAC,CAAC,OAAOC,GAAG,EAAE;MACV,OAAO;QAAEC,KAAK,EAAE;UAAEC,OAAO,EAAEF,GAAG,CAACE;QAAQ;MAAE,CAAC;IAC9C;EACJ,CAAC,CACJ;AACL,CAAC;AAAC"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { Table } from "dynamodb-toolbox";
|
|
2
|
+
import { Constructor } from "@webiny/ioc";
|
|
3
|
+
import { IsMigrationApplicable } from "../MigrationRunner";
|
|
4
|
+
import { DataMigration, ExecutionTimeLimiter, MigrationEventHandlerResponse, MigrationEventPayload, MigrationRepository } from "../types";
|
|
5
|
+
interface CreateDdbDataMigrationConfig {
|
|
6
|
+
migrations: Constructor<DataMigration>[];
|
|
7
|
+
primaryTable: Table;
|
|
8
|
+
repository?: MigrationRepository;
|
|
9
|
+
isMigrationApplicable?: IsMigrationApplicable;
|
|
10
|
+
timeLimiter?: ExecutionTimeLimiter;
|
|
11
|
+
}
|
|
12
|
+
export declare const createDdbProjectMigration: ({ migrations, primaryTable, isMigrationApplicable, repository, ...config }: CreateDdbDataMigrationConfig) => import("@webiny/handler-aws").RawEventHandler<MigrationEventPayload, any, MigrationEventHandlerResponse>;
|
|
13
|
+
export {};
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.createDdbProjectMigration = void 0;
|
|
8
|
+
var _objectWithoutProperties2 = _interopRequireDefault(require("@babel/runtime/helpers/objectWithoutProperties"));
|
|
9
|
+
var _handlerAws = require("@webiny/handler-aws");
|
|
10
|
+
var _ioc = require("@webiny/ioc");
|
|
11
|
+
var _MigrationRunner = require("../MigrationRunner");
|
|
12
|
+
var _symbols = require("../symbols");
|
|
13
|
+
var _migrations = require("../repository/migrations.repository");
|
|
14
|
+
var _devVersionErrorResponse = require("./devVersionErrorResponse");
|
|
15
|
+
var _createPatternMatcher = require("./createPatternMatcher");
|
|
16
|
+
var _semver = require("semver");
|
|
17
|
+
const _excluded = ["migrations", "primaryTable", "isMigrationApplicable", "repository"];
|
|
18
|
+
const createDdbProjectMigration = _ref => {
|
|
19
|
+
let {
|
|
20
|
+
migrations,
|
|
21
|
+
primaryTable,
|
|
22
|
+
isMigrationApplicable = undefined,
|
|
23
|
+
repository = undefined
|
|
24
|
+
} = _ref,
|
|
25
|
+
config = (0, _objectWithoutProperties2.default)(_ref, _excluded);
|
|
26
|
+
return (0, _handlerAws.createRawEventHandler)(async ({
|
|
27
|
+
payload,
|
|
28
|
+
lambdaContext
|
|
29
|
+
}) => {
|
|
30
|
+
const projectVersion = String((payload === null || payload === void 0 ? void 0 : payload.version) || process.env.WEBINY_VERSION);
|
|
31
|
+
const version = (0, _semver.coerce)(projectVersion);
|
|
32
|
+
if ((version === null || version === void 0 ? void 0 : version.version) === "0.0.0") {
|
|
33
|
+
return (0, _devVersionErrorResponse.devVersionErrorResponse)();
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// COMPOSITION ROOT
|
|
37
|
+
const container = (0, _ioc.createContainer)();
|
|
38
|
+
container.bind(_symbols.PrimaryDynamoTableSymbol).toConstantValue(primaryTable);
|
|
39
|
+
const timeLimiter = config.timeLimiter || (lambdaContext === null || lambdaContext === void 0 ? void 0 : lambdaContext.getRemainingTimeInMillis) || (() => 0);
|
|
40
|
+
container.bind(_symbols.ExecutionTimeLimiterSymbol).toConstantValue(timeLimiter);
|
|
41
|
+
if (repository) {
|
|
42
|
+
// Repository implementation provided by the user.
|
|
43
|
+
container.bind(_symbols.MigrationRepositorySymbol).toConstantValue(repository);
|
|
44
|
+
} else {
|
|
45
|
+
// Default repository implementation.
|
|
46
|
+
container.bind(_symbols.MigrationRepositorySymbol).to(_migrations.MigrationRepositoryImpl);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// Bind the provided migrations.
|
|
50
|
+
migrations.forEach(migration => container.bind(_symbols.MigrationSymbol).to(migration));
|
|
51
|
+
|
|
52
|
+
// If handler was invoked with a `pattern`, filter migrations that match the pattern only.
|
|
53
|
+
let patternMatcher;
|
|
54
|
+
if (payload.pattern) {
|
|
55
|
+
patternMatcher = (0, _createPatternMatcher.createPatternMatcher)(payload.pattern);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Inject dependencies and execute.
|
|
59
|
+
try {
|
|
60
|
+
const runner = await container.resolve(_MigrationRunner.MigrationRunner);
|
|
61
|
+
if (payload.command === "execute") {
|
|
62
|
+
await runner.execute(projectVersion, patternMatcher || isMigrationApplicable);
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
return {
|
|
66
|
+
data: await runner.getStatus()
|
|
67
|
+
};
|
|
68
|
+
} catch (err) {
|
|
69
|
+
return {
|
|
70
|
+
error: {
|
|
71
|
+
message: err.message
|
|
72
|
+
}
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
});
|
|
76
|
+
};
|
|
77
|
+
exports.createDdbProjectMigration = createDdbProjectMigration;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["createDdbProjectMigration","migrations","primaryTable","isMigrationApplicable","undefined","repository","config","createRawEventHandler","payload","lambdaContext","projectVersion","String","version","process","env","WEBINY_VERSION","semverCoerce","devVersionErrorResponse","container","createContainer","bind","PrimaryDynamoTableSymbol","toConstantValue","timeLimiter","getRemainingTimeInMillis","ExecutionTimeLimiterSymbol","MigrationRepositorySymbol","to","MigrationRepositoryImpl","forEach","migration","MigrationSymbol","patternMatcher","pattern","createPatternMatcher","runner","resolve","MigrationRunner","command","execute","data","getStatus","err","error","message"],"sources":["createDdbProjectMigration.ts"],"sourcesContent":["import { Table } from \"dynamodb-toolbox\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\nimport { Constructor, createContainer } from \"@webiny/ioc\";\nimport { IsMigrationApplicable, MigrationRunner } from \"~/MigrationRunner\";\nimport {\n ExecutionTimeLimiterSymbol,\n MigrationRepositorySymbol,\n MigrationSymbol,\n PrimaryDynamoTableSymbol\n} from \"~/symbols\";\nimport { MigrationRepositoryImpl } from \"~/repository/migrations.repository\";\nimport { devVersionErrorResponse } from \"./devVersionErrorResponse\";\nimport { createPatternMatcher } from \"./createPatternMatcher\";\nimport {\n DataMigration,\n ExecutionTimeLimiter,\n MigrationEventHandlerResponse,\n MigrationEventPayload,\n MigrationRepository\n} from \"~/types\";\nimport { coerce as semverCoerce } from \"semver\";\n\ninterface CreateDdbDataMigrationConfig {\n migrations: Constructor<DataMigration>[];\n primaryTable: Table;\n repository?: MigrationRepository;\n isMigrationApplicable?: IsMigrationApplicable;\n timeLimiter?: ExecutionTimeLimiter;\n}\n\nexport const createDdbProjectMigration = ({\n migrations,\n primaryTable,\n isMigrationApplicable = undefined,\n repository = undefined,\n ...config\n}: CreateDdbDataMigrationConfig) => {\n return createRawEventHandler<MigrationEventPayload, any, MigrationEventHandlerResponse>(\n async ({ payload, lambdaContext }) => {\n const projectVersion = String(payload?.version || process.env.WEBINY_VERSION);\n\n const version = semverCoerce(projectVersion);\n if (version?.version === \"0.0.0\") {\n return devVersionErrorResponse();\n }\n\n // COMPOSITION ROOT\n const container = createContainer();\n container.bind(PrimaryDynamoTableSymbol).toConstantValue(primaryTable);\n\n const timeLimiter: ExecutionTimeLimiter =\n config.timeLimiter || lambdaContext?.getRemainingTimeInMillis || (() => 0);\n container.bind(ExecutionTimeLimiterSymbol).toConstantValue(timeLimiter);\n\n if (repository) {\n // Repository implementation provided by the user.\n container.bind(MigrationRepositorySymbol).toConstantValue(repository);\n } else {\n // Default repository implementation.\n container.bind(MigrationRepositorySymbol).to(MigrationRepositoryImpl);\n }\n\n // Bind the provided migrations.\n migrations.forEach(migration => container.bind(MigrationSymbol).to(migration));\n\n // If handler was invoked with a `pattern`, filter migrations that match the pattern only.\n let patternMatcher;\n if (payload.pattern) {\n patternMatcher = createPatternMatcher(payload.pattern);\n }\n\n // Inject dependencies and execute.\n try {\n const runner = await container.resolve(MigrationRunner);\n\n if (payload.command === \"execute\") {\n await runner.execute(projectVersion, patternMatcher || isMigrationApplicable);\n return;\n }\n\n return { data: await runner.getStatus() };\n } catch (err) {\n return { error: { message: err.message } };\n }\n }\n );\n};\n"],"mappings":";;;;;;;;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AAQA;AAAgD;AAUzC,MAAMA,yBAAyB,GAAG,QAML;EAAA,IANM;MACtCC,UAAU;MACVC,YAAY;MACZC,qBAAqB,GAAGC,SAAS;MACjCC,UAAU,GAAGD;IAEa,CAAC;IADxBE,MAAM;EAET,OAAO,IAAAC,iCAAqB,EACxB,OAAO;IAAEC,OAAO;IAAEC;EAAc,CAAC,KAAK;IAClC,MAAMC,cAAc,GAAGC,MAAM,CAAC,CAAAH,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEI,OAAO,KAAIC,OAAO,CAACC,GAAG,CAACC,cAAc,CAAC;IAE7E,MAAMH,OAAO,GAAG,IAAAI,cAAY,EAACN,cAAc,CAAC;IAC5C,IAAI,CAAAE,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEA,OAAO,MAAK,OAAO,EAAE;MAC9B,OAAO,IAAAK,gDAAuB,GAAE;IACpC;;IAEA;IACA,MAAMC,SAAS,GAAG,IAAAC,oBAAe,GAAE;IACnCD,SAAS,CAACE,IAAI,CAACC,iCAAwB,CAAC,CAACC,eAAe,CAACpB,YAAY,CAAC;IAEtE,MAAMqB,WAAiC,GACnCjB,MAAM,CAACiB,WAAW,KAAId,aAAa,aAAbA,aAAa,uBAAbA,aAAa,CAAEe,wBAAwB,MAAK,MAAM,CAAC,CAAC;IAC9EN,SAAS,CAACE,IAAI,CAACK,mCAA0B,CAAC,CAACH,eAAe,CAACC,WAAW,CAAC;IAEvE,IAAIlB,UAAU,EAAE;MACZ;MACAa,SAAS,CAACE,IAAI,CAACM,kCAAyB,CAAC,CAACJ,eAAe,CAACjB,UAAU,CAAC;IACzE,CAAC,MAAM;MACH;MACAa,SAAS,CAACE,IAAI,CAACM,kCAAyB,CAAC,CAACC,EAAE,CAACC,mCAAuB,CAAC;IACzE;;IAEA;IACA3B,UAAU,CAAC4B,OAAO,CAACC,SAAS,IAAIZ,SAAS,CAACE,IAAI,CAACW,wBAAe,CAAC,CAACJ,EAAE,CAACG,SAAS,CAAC,CAAC;;IAE9E;IACA,IAAIE,cAAc;IAClB,IAAIxB,OAAO,CAACyB,OAAO,EAAE;MACjBD,cAAc,GAAG,IAAAE,0CAAoB,EAAC1B,OAAO,CAACyB,OAAO,CAAC;IAC1D;;IAEA;IACA,IAAI;MACA,MAAME,MAAM,GAAG,MAAMjB,SAAS,CAACkB,OAAO,CAACC,gCAAe,CAAC;MAEvD,IAAI7B,OAAO,CAAC8B,OAAO,KAAK,SAAS,EAAE;QAC/B,MAAMH,MAAM,CAACI,OAAO,CAAC7B,cAAc,EAAEsB,cAAc,IAAI7B,qBAAqB,CAAC;QAC7E;MACJ;MAEA,OAAO;QAAEqC,IAAI,EAAE,MAAML,MAAM,CAACM,SAAS;MAAG,CAAC;IAC7C,CAAC,CAAC,OAAOC,GAAG,EAAE;MACV,OAAO;QAAEC,KAAK,EAAE;UAAEC,OAAO,EAAEF,GAAG,CAACE;QAAQ;MAAE,CAAC;IAC9C;EACJ,CAAC,CACJ;AACL,CAAC;AAAC"}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.createPatternMatcher = void 0;
|
|
8
|
+
var _minimatch = _interopRequireDefault(require("minimatch"));
|
|
9
|
+
const createPatternMatcher = pattern => {
|
|
10
|
+
return migration => {
|
|
11
|
+
if (pattern.includes("*")) {
|
|
12
|
+
return (0, _minimatch.default)(migration.getId(), pattern);
|
|
13
|
+
}
|
|
14
|
+
return migration.getId() === pattern;
|
|
15
|
+
};
|
|
16
|
+
};
|
|
17
|
+
exports.createPatternMatcher = createPatternMatcher;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["createPatternMatcher","pattern","migration","includes","minimatch","getId"],"sources":["createPatternMatcher.ts"],"sourcesContent":["import minimatch from \"minimatch\";\nimport { IsMigrationApplicable } from \"~/MigrationRunner\";\n\nexport const createPatternMatcher = (pattern: string): IsMigrationApplicable => {\n return migration => {\n if (pattern.includes(\"*\")) {\n return minimatch(migration.getId(), pattern);\n }\n return migration.getId() === pattern;\n };\n};\n"],"mappings":";;;;;;;AAAA;AAGO,MAAMA,oBAAoB,GAAIC,OAAe,IAA4B;EAC5E,OAAOC,SAAS,IAAI;IAChB,IAAID,OAAO,CAACE,QAAQ,CAAC,GAAG,CAAC,EAAE;MACvB,OAAO,IAAAC,kBAAS,EAACF,SAAS,CAACG,KAAK,EAAE,EAAEJ,OAAO,CAAC;IAChD;IACA,OAAOC,SAAS,CAACG,KAAK,EAAE,KAAKJ,OAAO;EACxC,CAAC;AACL,CAAC;AAAC"}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.devVersionErrorResponse = void 0;
|
|
7
|
+
const devVersionErrorResponse = () => {
|
|
8
|
+
return {
|
|
9
|
+
error: {
|
|
10
|
+
message: [`This project is using a development version 0.0.0!`, `Migrations cannot be executed using version 0.0.0, as that makes them all eligible for execution.`, `To trigger a particular set of migrations, set a WEBINY_VERSION variable in the .env file.`].join(" ")
|
|
11
|
+
}
|
|
12
|
+
};
|
|
13
|
+
};
|
|
14
|
+
exports.devVersionErrorResponse = devVersionErrorResponse;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["devVersionErrorResponse","error","message","join"],"sources":["devVersionErrorResponse.ts"],"sourcesContent":["export const devVersionErrorResponse = () => {\n return {\n error: {\n message: [\n `This project is using a development version 0.0.0!`,\n `Migrations cannot be executed using version 0.0.0, as that makes them all eligible for execution.`,\n `To trigger a particular set of migrations, set a WEBINY_VERSION variable in the .env file.`\n ].join(\" \")\n }\n };\n};\n"],"mappings":";;;;;;AAAO,MAAMA,uBAAuB,GAAG,MAAM;EACzC,OAAO;IACHC,KAAK,EAAE;MACHC,OAAO,EAAE,CACJ,oDAAmD,EACnD,mGAAkG,EAClG,4FAA2F,CAC/F,CAACC,IAAI,CAAC,GAAG;IACd;EACJ,CAAC;AACL,CAAC;AAAC"}
|
package/index.d.ts
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
export * from "./handlers/createDdbProjectMigration";
|
|
2
|
+
export * from "./handlers/createDdbEsProjectMigration";
|
|
3
|
+
export * from "./symbols";
|
|
4
|
+
export * from "./types";
|
|
5
|
+
export * from "./createTable";
|
|
6
|
+
export * from "./createPinoLogger";
|
|
7
|
+
export * from "./createPinoLogger";
|
package/index.js
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
var _createDdbProjectMigration = require("./handlers/createDdbProjectMigration");
|
|
7
|
+
Object.keys(_createDdbProjectMigration).forEach(function (key) {
|
|
8
|
+
if (key === "default" || key === "__esModule") return;
|
|
9
|
+
if (key in exports && exports[key] === _createDdbProjectMigration[key]) return;
|
|
10
|
+
Object.defineProperty(exports, key, {
|
|
11
|
+
enumerable: true,
|
|
12
|
+
get: function () {
|
|
13
|
+
return _createDdbProjectMigration[key];
|
|
14
|
+
}
|
|
15
|
+
});
|
|
16
|
+
});
|
|
17
|
+
var _createDdbEsProjectMigration = require("./handlers/createDdbEsProjectMigration");
|
|
18
|
+
Object.keys(_createDdbEsProjectMigration).forEach(function (key) {
|
|
19
|
+
if (key === "default" || key === "__esModule") return;
|
|
20
|
+
if (key in exports && exports[key] === _createDdbEsProjectMigration[key]) return;
|
|
21
|
+
Object.defineProperty(exports, key, {
|
|
22
|
+
enumerable: true,
|
|
23
|
+
get: function () {
|
|
24
|
+
return _createDdbEsProjectMigration[key];
|
|
25
|
+
}
|
|
26
|
+
});
|
|
27
|
+
});
|
|
28
|
+
var _symbols = require("./symbols");
|
|
29
|
+
Object.keys(_symbols).forEach(function (key) {
|
|
30
|
+
if (key === "default" || key === "__esModule") return;
|
|
31
|
+
if (key in exports && exports[key] === _symbols[key]) return;
|
|
32
|
+
Object.defineProperty(exports, key, {
|
|
33
|
+
enumerable: true,
|
|
34
|
+
get: function () {
|
|
35
|
+
return _symbols[key];
|
|
36
|
+
}
|
|
37
|
+
});
|
|
38
|
+
});
|
|
39
|
+
var _types = require("./types");
|
|
40
|
+
Object.keys(_types).forEach(function (key) {
|
|
41
|
+
if (key === "default" || key === "__esModule") return;
|
|
42
|
+
if (key in exports && exports[key] === _types[key]) return;
|
|
43
|
+
Object.defineProperty(exports, key, {
|
|
44
|
+
enumerable: true,
|
|
45
|
+
get: function () {
|
|
46
|
+
return _types[key];
|
|
47
|
+
}
|
|
48
|
+
});
|
|
49
|
+
});
|
|
50
|
+
var _createTable = require("./createTable");
|
|
51
|
+
Object.keys(_createTable).forEach(function (key) {
|
|
52
|
+
if (key === "default" || key === "__esModule") return;
|
|
53
|
+
if (key in exports && exports[key] === _createTable[key]) return;
|
|
54
|
+
Object.defineProperty(exports, key, {
|
|
55
|
+
enumerable: true,
|
|
56
|
+
get: function () {
|
|
57
|
+
return _createTable[key];
|
|
58
|
+
}
|
|
59
|
+
});
|
|
60
|
+
});
|
|
61
|
+
var _createPinoLogger = require("./createPinoLogger");
|
|
62
|
+
Object.keys(_createPinoLogger).forEach(function (key) {
|
|
63
|
+
if (key === "default" || key === "__esModule") return;
|
|
64
|
+
if (key in exports && exports[key] === _createPinoLogger[key]) return;
|
|
65
|
+
Object.defineProperty(exports, key, {
|
|
66
|
+
enumerable: true,
|
|
67
|
+
get: function () {
|
|
68
|
+
return _createPinoLogger[key];
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
});
|
package/index.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":[],"sources":["index.ts"],"sourcesContent":["export * from \"./handlers/createDdbProjectMigration\";\nexport * from \"./handlers/createDdbEsProjectMigration\";\nexport * from \"./symbols\";\nexport * from \"./types\";\nexport * from \"./createTable\";\nexport * from \"./createPinoLogger\";\nexport * from \"./createPinoLogger\";\n"],"mappings":";;;;;AAAA;AAAA;EAAA;EAAA;EAAA;IAAA;IAAA;MAAA;IAAA;EAAA;AAAA;AACA;AAAA;EAAA;EAAA;EAAA;IAAA;IAAA;MAAA;IAAA;EAAA;AAAA;AACA;AAAA;EAAA;EAAA;EAAA;IAAA;IAAA;MAAA;IAAA;EAAA;AAAA;AACA;AAAA;EAAA;EAAA;EAAA;IAAA;IAAA;MAAA;IAAA;EAAA;AAAA;AACA;AAAA;EAAA;EAAA;EAAA;IAAA;IAAA;MAAA;IAAA;EAAA;AAAA;AACA;AACA;EAAA;EAAA;EAAA;IAAA;IAAA;MAAA;IAAA;EAAA;AAAA"}
|
package/package.json
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@webiny/data-migration",
|
|
3
|
+
"version": "0.0.0-unstable.1145e7667f",
|
|
4
|
+
"main": "index.js",
|
|
5
|
+
"types": "types.ts",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"repository": {
|
|
8
|
+
"type": "git",
|
|
9
|
+
"url": "https://github.com/webiny/webiny-js.git"
|
|
10
|
+
},
|
|
11
|
+
"description": "Tools to author and execute data migrations.",
|
|
12
|
+
"author": "Webiny Ltd.",
|
|
13
|
+
"dependencies": {
|
|
14
|
+
"@babel/runtime": "7.20.13",
|
|
15
|
+
"@elastic/elasticsearch": "7.12.0",
|
|
16
|
+
"@types/pino": "7.0.5",
|
|
17
|
+
"@webiny/db-dynamodb": "0.0.0-unstable.1145e7667f",
|
|
18
|
+
"@webiny/handler-aws": "0.0.0-unstable.1145e7667f",
|
|
19
|
+
"@webiny/ioc": "0.0.0-unstable.1145e7667f",
|
|
20
|
+
"@webiny/utils": "0.0.0-unstable.1145e7667f",
|
|
21
|
+
"center-align": "1.0.1",
|
|
22
|
+
"chalk": "4.1.2",
|
|
23
|
+
"dynamodb-toolbox": "0.3.5",
|
|
24
|
+
"mdbid": "1.0.0",
|
|
25
|
+
"minimatch": "5.1.6",
|
|
26
|
+
"pino": "8.11.0",
|
|
27
|
+
"pino-pretty": "9.4.0",
|
|
28
|
+
"semver": "6.3.0"
|
|
29
|
+
},
|
|
30
|
+
"devDependencies": {
|
|
31
|
+
"@babel/cli": "^7.19.3",
|
|
32
|
+
"@babel/core": "^7.19.3",
|
|
33
|
+
"@babel/preset-env": "^7.19.4",
|
|
34
|
+
"@types/center-align": "^1.0.0",
|
|
35
|
+
"@types/semver": "^7.3.4",
|
|
36
|
+
"@webiny/cli": "^0.0.0-unstable.1145e7667f",
|
|
37
|
+
"@webiny/project-utils": "^0.0.0-unstable.1145e7667f",
|
|
38
|
+
"jest": "^28.1.0",
|
|
39
|
+
"jest-dynalite": "^3.2.0",
|
|
40
|
+
"jest-mock-console": "^1.0.0",
|
|
41
|
+
"rimraf": "^3.0.2",
|
|
42
|
+
"typescript": "4.7.4"
|
|
43
|
+
},
|
|
44
|
+
"publishConfig": {
|
|
45
|
+
"access": "public",
|
|
46
|
+
"directory": "dist"
|
|
47
|
+
},
|
|
48
|
+
"scripts": {
|
|
49
|
+
"build": "yarn webiny run build",
|
|
50
|
+
"watch": "yarn webiny run watch"
|
|
51
|
+
},
|
|
52
|
+
"adio": {
|
|
53
|
+
"ignore": {
|
|
54
|
+
"dependencies": [
|
|
55
|
+
"@types/pino"
|
|
56
|
+
]
|
|
57
|
+
}
|
|
58
|
+
},
|
|
59
|
+
"gitHead": "1145e7667ffd3d18bfea1e73d6078cd6a35c1038"
|
|
60
|
+
}
|