@webiny/migrations 5.40.5 → 5.40.6-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/migrations/5.39.0/001/utils/getFallbackIdentity.d.ts +3 -1
- package/migrations/5.39.0/001/utils/getFallbackIdentity.js +13 -8
- package/migrations/5.39.0/001/utils/getFallbackIdentity.js.map +1 -1
- package/migrations/5.39.0/001/utils/getFirstLastPublishedOn.d.ts +2 -0
- package/migrations/5.39.0/001/utils/getFirstLastPublishedOn.js +9 -5
- package/migrations/5.39.0/001/utils/getFirstLastPublishedOn.js.map +1 -1
- package/migrations/5.39.0/001/utils/getOldestRevisionCreatedOn.d.ts +2 -0
- package/migrations/5.39.0/001/utils/getOldestRevisionCreatedOn.js +9 -5
- package/migrations/5.39.0/001/utils/getOldestRevisionCreatedOn.js.map +1 -1
- package/migrations/5.39.6/001/ddb-es/worker.js +301 -188
- package/migrations/5.39.6/001/ddb-es/worker.js.map +1 -1
- package/package.json +17 -17
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
import { CmsIdentity } from "@webiny/api-headless-cms/types";
|
|
2
2
|
import { Entity } from "@webiny/db-dynamodb/toolbox";
|
|
3
|
+
import { ExecuteWithRetryOptions } from "@webiny/utils";
|
|
3
4
|
interface GetFallbackIdentityParams {
|
|
4
5
|
entity: Entity;
|
|
5
6
|
tenant: string;
|
|
7
|
+
retryOptions?: ExecuteWithRetryOptions;
|
|
6
8
|
}
|
|
7
|
-
export declare const getFallbackIdentity: ({ entity, tenant }: GetFallbackIdentityParams) => Promise<CmsIdentity>;
|
|
9
|
+
export declare const getFallbackIdentity: ({ entity, tenant, retryOptions }: GetFallbackIdentityParams) => Promise<CmsIdentity>;
|
|
8
10
|
export {};
|
|
@@ -5,6 +5,7 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
5
5
|
});
|
|
6
6
|
exports.getFallbackIdentity = void 0;
|
|
7
7
|
var _dbDynamodb = require("@webiny/db-dynamodb");
|
|
8
|
+
var _utils = require("@webiny/utils");
|
|
8
9
|
const NON_EXISTING_DATA_MIGRATION_IDENTITY = {
|
|
9
10
|
id: "data-migration",
|
|
10
11
|
type: "data-migration",
|
|
@@ -13,18 +14,22 @@ const NON_EXISTING_DATA_MIGRATION_IDENTITY = {
|
|
|
13
14
|
const identitiesPerTenantCache = {};
|
|
14
15
|
const getFallbackIdentity = async ({
|
|
15
16
|
entity,
|
|
16
|
-
tenant
|
|
17
|
+
tenant,
|
|
18
|
+
retryOptions
|
|
17
19
|
}) => {
|
|
18
20
|
if (identitiesPerTenantCache[tenant]) {
|
|
19
21
|
return identitiesPerTenantCache[tenant];
|
|
20
22
|
}
|
|
21
|
-
const
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
23
|
+
const executeQueryAll = () => {
|
|
24
|
+
return (0, _dbDynamodb.queryAll)({
|
|
25
|
+
entity,
|
|
26
|
+
partitionKey: `T#${tenant}#ADMIN_USERS`,
|
|
27
|
+
options: {
|
|
28
|
+
index: "GSI1"
|
|
29
|
+
}
|
|
30
|
+
});
|
|
31
|
+
};
|
|
32
|
+
const allAdminUsersRecords = await (0, _utils.executeWithRetry)(executeQueryAll, retryOptions);
|
|
28
33
|
if (allAdminUsersRecords.length === 0) {
|
|
29
34
|
// Hopefully it doesn't come to this, but we still need to consider it.
|
|
30
35
|
return NON_EXISTING_DATA_MIGRATION_IDENTITY;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["_dbDynamodb","require","NON_EXISTING_DATA_MIGRATION_IDENTITY","id","type","displayName","identitiesPerTenantCache","getFallbackIdentity","entity","tenant","
|
|
1
|
+
{"version":3,"names":["_dbDynamodb","require","_utils","NON_EXISTING_DATA_MIGRATION_IDENTITY","id","type","displayName","identitiesPerTenantCache","getFallbackIdentity","entity","tenant","retryOptions","executeQueryAll","queryAll","partitionKey","options","index","allAdminUsersRecords","executeWithRetry","length","oldestAdminUser","sort","prev","next","data","createdOn","exports"],"sources":["getFallbackIdentity.ts"],"sourcesContent":["import { CmsIdentity } from \"@webiny/api-headless-cms/types\";\nimport { queryAll } from \"@webiny/db-dynamodb\";\nimport { Entity } from \"@webiny/db-dynamodb/toolbox\";\nimport { executeWithRetry, ExecuteWithRetryOptions } from \"@webiny/utils\";\n\nconst NON_EXISTING_DATA_MIGRATION_IDENTITY: CmsIdentity = {\n id: \"data-migration\",\n type: \"data-migration\",\n displayName: \"Data Migration\"\n};\n\ninterface GetFallbackIdentityParams {\n entity: Entity;\n tenant: string;\n retryOptions?: ExecuteWithRetryOptions;\n}\n\ninterface AdminUserRecord {\n data: {\n createdOn: string;\n id: string;\n displayName: string;\n };\n}\n\nconst identitiesPerTenantCache: Record<string, CmsIdentity> = {};\n\nexport const getFallbackIdentity = async ({\n entity,\n tenant,\n retryOptions\n}: GetFallbackIdentityParams): Promise<CmsIdentity> => {\n if (identitiesPerTenantCache[tenant]) {\n return identitiesPerTenantCache[tenant];\n }\n\n const executeQueryAll = () => {\n return queryAll<AdminUserRecord>({\n entity,\n partitionKey: `T#${tenant}#ADMIN_USERS`,\n options: {\n index: \"GSI1\"\n }\n });\n };\n\n const allAdminUsersRecords = await executeWithRetry(executeQueryAll, retryOptions);\n\n if (allAdminUsersRecords.length === 0) {\n // Hopefully it doesn't come to this, but we still need to consider it.\n return NON_EXISTING_DATA_MIGRATION_IDENTITY;\n }\n\n const [oldestAdminUser] = allAdminUsersRecords.sort((prev, next) => {\n return prev.data.createdOn < next.data.createdOn ? -1 : 1;\n });\n\n identitiesPerTenantCache[tenant] = {\n id: oldestAdminUser.data.id,\n type: \"admin\",\n displayName: oldestAdminUser.data.displayName\n };\n\n return identitiesPerTenantCache[tenant];\n};\n"],"mappings":";;;;;;AACA,IAAAA,WAAA,GAAAC,OAAA;AAEA,IAAAC,MAAA,GAAAD,OAAA;AAEA,MAAME,oCAAiD,GAAG;EACtDC,EAAE,EAAE,gBAAgB;EACpBC,IAAI,EAAE,gBAAgB;EACtBC,WAAW,EAAE;AACjB,CAAC;AAgBD,MAAMC,wBAAqD,GAAG,CAAC,CAAC;AAEzD,MAAMC,mBAAmB,GAAG,MAAAA,CAAO;EACtCC,MAAM;EACNC,MAAM;EACNC;AACuB,CAAC,KAA2B;EACnD,IAAIJ,wBAAwB,CAACG,MAAM,CAAC,EAAE;IAClC,OAAOH,wBAAwB,CAACG,MAAM,CAAC;EAC3C;EAEA,MAAME,eAAe,GAAGA,CAAA,KAAM;IAC1B,OAAO,IAAAC,oBAAQ,EAAkB;MAC7BJ,MAAM;MACNK,YAAY,EAAG,KAAIJ,MAAO,cAAa;MACvCK,OAAO,EAAE;QACLC,KAAK,EAAE;MACX;IACJ,CAAC,CAAC;EACN,CAAC;EAED,MAAMC,oBAAoB,GAAG,MAAM,IAAAC,uBAAgB,EAACN,eAAe,EAAED,YAAY,CAAC;EAElF,IAAIM,oBAAoB,CAACE,MAAM,KAAK,CAAC,EAAE;IACnC;IACA,OAAOhB,oCAAoC;EAC/C;EAEA,MAAM,CAACiB,eAAe,CAAC,GAAGH,oBAAoB,CAACI,IAAI,CAAC,CAACC,IAAI,EAAEC,IAAI,KAAK;IAChE,OAAOD,IAAI,CAACE,IAAI,CAACC,SAAS,GAAGF,IAAI,CAACC,IAAI,CAACC,SAAS,GAAG,CAAC,CAAC,GAAG,CAAC;EAC7D,CAAC,CAAC;EAEFlB,wBAAwB,CAACG,MAAM,CAAC,GAAG;IAC/BN,EAAE,EAAEgB,eAAe,CAACI,IAAI,CAACpB,EAAE;IAC3BC,IAAI,EAAE,OAAO;IACbC,WAAW,EAAEc,eAAe,CAACI,IAAI,CAAClB;EACtC,CAAC;EAED,OAAOC,wBAAwB,CAACG,MAAM,CAAC;AAC3C,CAAC;AAACgB,OAAA,CAAAlB,mBAAA,GAAAA,mBAAA","ignoreList":[]}
|
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
import { createDdbEntryEntity } from "./../entities/createEntryEntity";
|
|
2
2
|
import { CmsEntry } from "../types";
|
|
3
|
+
import { ExecuteWithRetryOptions } from "@webiny/utils";
|
|
3
4
|
interface CmsEntryWithPK extends CmsEntry {
|
|
4
5
|
PK: string;
|
|
5
6
|
}
|
|
6
7
|
export interface getFirstLastPublishedOnParams {
|
|
7
8
|
entry: CmsEntryWithPK;
|
|
8
9
|
entryEntity: ReturnType<typeof createDdbEntryEntity>;
|
|
10
|
+
retryOptions?: ExecuteWithRetryOptions;
|
|
9
11
|
}
|
|
10
12
|
export declare const getFirstLastPublishedOnBy: (params: getFirstLastPublishedOnParams) => Promise<Pick<CmsEntry<import("../types").CmsEntryValues>, "firstPublishedOn" | "lastPublishedOn" | "firstPublishedBy" | "lastPublishedBy">>;
|
|
11
13
|
export {};
|
|
@@ -4,6 +4,7 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
4
4
|
value: true
|
|
5
5
|
});
|
|
6
6
|
exports.getFirstLastPublishedOnBy = void 0;
|
|
7
|
+
var _utils = require("@webiny/utils");
|
|
7
8
|
const cachedEntryFirstLastPublishedOnBy = {};
|
|
8
9
|
const getFirstLastPublishedOnBy = async params => {
|
|
9
10
|
const {
|
|
@@ -19,11 +20,14 @@ const getFirstLastPublishedOnBy = async params => {
|
|
|
19
20
|
firstPublishedBy: null,
|
|
20
21
|
lastPublishedBy: null
|
|
21
22
|
};
|
|
22
|
-
const
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
23
|
+
const executeQuery = () => {
|
|
24
|
+
return entryEntity.query(entry.PK, {
|
|
25
|
+
limit: 1,
|
|
26
|
+
eq: "P",
|
|
27
|
+
attributes: ["modifiedBy", "createdBy", "publishedOn"]
|
|
28
|
+
});
|
|
29
|
+
};
|
|
30
|
+
const result = await (0, _utils.executeWithRetry)(executeQuery, params.retryOptions);
|
|
27
31
|
const publishedRecord = result.Items?.[0];
|
|
28
32
|
if (publishedRecord) {
|
|
29
33
|
cachedEntryFirstLastPublishedOnBy[entry.PK] = {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["cachedEntryFirstLastPublishedOnBy","getFirstLastPublishedOnBy","params","entry","entryEntity","PK","firstPublishedOn","lastPublishedOn","firstPublishedBy","lastPublishedBy","
|
|
1
|
+
{"version":3,"names":["_utils","require","cachedEntryFirstLastPublishedOnBy","getFirstLastPublishedOnBy","params","entry","entryEntity","PK","firstPublishedOn","lastPublishedOn","firstPublishedBy","lastPublishedBy","executeQuery","query","limit","eq","attributes","result","executeWithRetry","retryOptions","publishedRecord","Items","publishedOn","modifiedBy","createdBy","exports"],"sources":["getFirstLastPublishedOn.ts"],"sourcesContent":["import { createDdbEntryEntity } from \"./../entities/createEntryEntity\";\nimport { CmsEntry } from \"../types\";\nimport { executeWithRetry, ExecuteWithRetryOptions } from \"@webiny/utils\";\n\nconst cachedEntryFirstLastPublishedOnBy: Record<\n string,\n Pick<CmsEntry, \"firstPublishedOn\" | \"lastPublishedOn\" | \"firstPublishedBy\" | \"lastPublishedBy\">\n> = {};\n\ninterface CmsEntryWithPK extends CmsEntry {\n PK: string;\n}\n\nexport interface getFirstLastPublishedOnParams {\n entry: CmsEntryWithPK;\n entryEntity: ReturnType<typeof createDdbEntryEntity>;\n retryOptions?: ExecuteWithRetryOptions;\n}\n\nexport const getFirstLastPublishedOnBy = async (params: getFirstLastPublishedOnParams) => {\n const { entry, entryEntity } = params;\n\n if (cachedEntryFirstLastPublishedOnBy[entry.PK]) {\n return cachedEntryFirstLastPublishedOnBy[entry.PK];\n }\n\n cachedEntryFirstLastPublishedOnBy[entry.PK] = {\n firstPublishedOn: null,\n lastPublishedOn: null,\n firstPublishedBy: null,\n lastPublishedBy: null\n };\n\n const executeQuery = () => {\n return entryEntity.query(entry.PK, {\n limit: 1,\n eq: \"P\",\n attributes: [\"modifiedBy\", \"createdBy\", \"publishedOn\"]\n });\n };\n\n const result = await executeWithRetry(executeQuery, params.retryOptions);\n\n const publishedRecord = result.Items?.[0];\n if (publishedRecord) {\n cachedEntryFirstLastPublishedOnBy[entry.PK] = {\n firstPublishedOn: publishedRecord.publishedOn || null,\n lastPublishedOn: publishedRecord.publishedOn || null,\n firstPublishedBy: entry.modifiedBy || entry.createdBy || null,\n lastPublishedBy: entry.modifiedBy || entry.createdBy || null\n };\n }\n\n return cachedEntryFirstLastPublishedOnBy[entry.PK];\n};\n"],"mappings":";;;;;;AAEA,IAAAA,MAAA,GAAAC,OAAA;AAEA,MAAMC,iCAGL,GAAG,CAAC,CAAC;AAYC,MAAMC,yBAAyB,GAAG,MAAOC,MAAqC,IAAK;EACtF,MAAM;IAAEC,KAAK;IAAEC;EAAY,CAAC,GAAGF,MAAM;EAErC,IAAIF,iCAAiC,CAACG,KAAK,CAACE,EAAE,CAAC,EAAE;IAC7C,OAAOL,iCAAiC,CAACG,KAAK,CAACE,EAAE,CAAC;EACtD;EAEAL,iCAAiC,CAACG,KAAK,CAACE,EAAE,CAAC,GAAG;IAC1CC,gBAAgB,EAAE,IAAI;IACtBC,eAAe,EAAE,IAAI;IACrBC,gBAAgB,EAAE,IAAI;IACtBC,eAAe,EAAE;EACrB,CAAC;EAED,MAAMC,YAAY,GAAGA,CAAA,KAAM;IACvB,OAAON,WAAW,CAACO,KAAK,CAACR,KAAK,CAACE,EAAE,EAAE;MAC/BO,KAAK,EAAE,CAAC;MACRC,EAAE,EAAE,GAAG;MACPC,UAAU,EAAE,CAAC,YAAY,EAAE,WAAW,EAAE,aAAa;IACzD,CAAC,CAAC;EACN,CAAC;EAED,MAAMC,MAAM,GAAG,MAAM,IAAAC,uBAAgB,EAACN,YAAY,EAAER,MAAM,CAACe,YAAY,CAAC;EAExE,MAAMC,eAAe,GAAGH,MAAM,CAACI,KAAK,GAAG,CAAC,CAAC;EACzC,IAAID,eAAe,EAAE;IACjBlB,iCAAiC,CAACG,KAAK,CAACE,EAAE,CAAC,GAAG;MAC1CC,gBAAgB,EAAEY,eAAe,CAACE,WAAW,IAAI,IAAI;MACrDb,eAAe,EAAEW,eAAe,CAACE,WAAW,IAAI,IAAI;MACpDZ,gBAAgB,EAAEL,KAAK,CAACkB,UAAU,IAAIlB,KAAK,CAACmB,SAAS,IAAI,IAAI;MAC7Db,eAAe,EAAEN,KAAK,CAACkB,UAAU,IAAIlB,KAAK,CAACmB,SAAS,IAAI;IAC5D,CAAC;EACL;EAEA,OAAOtB,iCAAiC,CAACG,KAAK,CAACE,EAAE,CAAC;AACtD,CAAC;AAACkB,OAAA,CAAAtB,yBAAA,GAAAA,yBAAA","ignoreList":[]}
|
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
import { createDdbEntryEntity } from "./../entities/createEntryEntity";
|
|
2
2
|
import { CmsEntry } from "../types";
|
|
3
|
+
import { ExecuteWithRetryOptions } from "@webiny/utils";
|
|
3
4
|
interface CmsEntryWithPK extends CmsEntry {
|
|
4
5
|
PK: string;
|
|
5
6
|
}
|
|
6
7
|
export interface GetOldestRevisionCreatedOnParams {
|
|
7
8
|
entry: CmsEntryWithPK;
|
|
8
9
|
entryEntity: ReturnType<typeof createDdbEntryEntity>;
|
|
10
|
+
retryOptions?: ExecuteWithRetryOptions;
|
|
9
11
|
}
|
|
10
12
|
export declare const getOldestRevisionCreatedOn: (params: GetOldestRevisionCreatedOnParams) => Promise<string>;
|
|
11
13
|
export {};
|
|
@@ -4,6 +4,7 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
4
4
|
value: true
|
|
5
5
|
});
|
|
6
6
|
exports.getOldestRevisionCreatedOn = void 0;
|
|
7
|
+
var _utils = require("@webiny/utils");
|
|
7
8
|
const cachedEntryCreatedOn = {};
|
|
8
9
|
const getOldestRevisionCreatedOn = async params => {
|
|
9
10
|
const {
|
|
@@ -16,11 +17,14 @@ const getOldestRevisionCreatedOn = async params => {
|
|
|
16
17
|
if (entry.version === 1) {
|
|
17
18
|
cachedEntryCreatedOn[entry.PK] = entry.createdOn;
|
|
18
19
|
} else {
|
|
19
|
-
const
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
20
|
+
const executeQuery = () => {
|
|
21
|
+
return entryEntity.query(entry.PK, {
|
|
22
|
+
limit: 1,
|
|
23
|
+
beginsWith: "REV#",
|
|
24
|
+
attributes: ["createdOn"]
|
|
25
|
+
});
|
|
26
|
+
};
|
|
27
|
+
const result = await (0, _utils.executeWithRetry)(executeQuery, params.retryOptions);
|
|
24
28
|
const oldestRevision = result.Items?.[0];
|
|
25
29
|
if (oldestRevision) {
|
|
26
30
|
cachedEntryCreatedOn[entry.PK] = oldestRevision.createdOn;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["cachedEntryCreatedOn","getOldestRevisionCreatedOn","params","entry","entryEntity","PK","version","createdOn","
|
|
1
|
+
{"version":3,"names":["_utils","require","cachedEntryCreatedOn","getOldestRevisionCreatedOn","params","entry","entryEntity","PK","version","createdOn","executeQuery","query","limit","beginsWith","attributes","result","executeWithRetry","retryOptions","oldestRevision","Items","exports"],"sources":["getOldestRevisionCreatedOn.ts"],"sourcesContent":["import { createDdbEntryEntity } from \"./../entities/createEntryEntity\";\nimport { CmsEntry } from \"../types\";\nimport { executeWithRetry, ExecuteWithRetryOptions } from \"@webiny/utils\";\n\nconst cachedEntryCreatedOn: Record<string, string> = {};\n\ninterface CmsEntryWithPK extends CmsEntry {\n PK: string;\n}\n\nexport interface GetOldestRevisionCreatedOnParams {\n entry: CmsEntryWithPK;\n entryEntity: ReturnType<typeof createDdbEntryEntity>;\n retryOptions?: ExecuteWithRetryOptions;\n}\n\nexport const getOldestRevisionCreatedOn = async (params: GetOldestRevisionCreatedOnParams) => {\n const { entry, entryEntity } = params;\n\n if (cachedEntryCreatedOn[entry.PK]) {\n return cachedEntryCreatedOn[entry.PK];\n }\n\n if (entry.version === 1) {\n cachedEntryCreatedOn[entry.PK] = entry.createdOn;\n } else {\n const executeQuery = () => {\n return entryEntity.query(entry.PK, {\n limit: 1,\n beginsWith: \"REV#\",\n attributes: [\"createdOn\"]\n });\n };\n\n const result = await executeWithRetry(executeQuery, params.retryOptions);\n\n const oldestRevision = result.Items?.[0];\n if (oldestRevision) {\n cachedEntryCreatedOn[entry.PK] = oldestRevision.createdOn;\n }\n }\n\n return cachedEntryCreatedOn[entry.PK];\n};\n"],"mappings":";;;;;;AAEA,IAAAA,MAAA,GAAAC,OAAA;AAEA,MAAMC,oBAA4C,GAAG,CAAC,CAAC;AAYhD,MAAMC,0BAA0B,GAAG,MAAOC,MAAwC,IAAK;EAC1F,MAAM;IAAEC,KAAK;IAAEC;EAAY,CAAC,GAAGF,MAAM;EAErC,IAAIF,oBAAoB,CAACG,KAAK,CAACE,EAAE,CAAC,EAAE;IAChC,OAAOL,oBAAoB,CAACG,KAAK,CAACE,EAAE,CAAC;EACzC;EAEA,IAAIF,KAAK,CAACG,OAAO,KAAK,CAAC,EAAE;IACrBN,oBAAoB,CAACG,KAAK,CAACE,EAAE,CAAC,GAAGF,KAAK,CAACI,SAAS;EACpD,CAAC,MAAM;IACH,MAAMC,YAAY,GAAGA,CAAA,KAAM;MACvB,OAAOJ,WAAW,CAACK,KAAK,CAACN,KAAK,CAACE,EAAE,EAAE;QAC/BK,KAAK,EAAE,CAAC;QACRC,UAAU,EAAE,MAAM;QAClBC,UAAU,EAAE,CAAC,WAAW;MAC5B,CAAC,CAAC;IACN,CAAC;IAED,MAAMC,MAAM,GAAG,MAAM,IAAAC,uBAAgB,EAACN,YAAY,EAAEN,MAAM,CAACa,YAAY,CAAC;IAExE,MAAMC,cAAc,GAAGH,MAAM,CAACI,KAAK,GAAG,CAAC,CAAC;IACxC,IAAID,cAAc,EAAE;MAChBhB,oBAAoB,CAACG,KAAK,CAACE,EAAE,CAAC,GAAGW,cAAc,CAACT,SAAS;IAC7D;EACJ;EAEA,OAAOP,oBAAoB,CAACG,KAAK,CAACE,EAAE,CAAC;AACzC,CAAC;AAACa,OAAA,CAAAjB,0BAAA,GAAAA,0BAAA","ignoreList":[]}
|
|
@@ -90,6 +90,10 @@ const createInitialStatus = () => {
|
|
|
90
90
|
}
|
|
91
91
|
};
|
|
92
92
|
};
|
|
93
|
+
let BATCH_WRITE_MAX_CHUNK = 20;
|
|
94
|
+
if (process.env.WEBINY_MIGRATION_5_39_6_001_BATCH_WRITE_MAX_CHUNK) {
|
|
95
|
+
BATCH_WRITE_MAX_CHUNK = parseInt(process.env.WEBINY_MIGRATION_5_39_6_001_BATCH_WRITE_MAX_CHUNK);
|
|
96
|
+
}
|
|
93
97
|
(async () => {
|
|
94
98
|
const logger = (0, _logger.createPinoLogger)({
|
|
95
99
|
level: (0, _logger.getLogLevel)(process.env.MIGRATIONS_LOG_LEVEL, "trace"),
|
|
@@ -119,234 +123,343 @@ const createInitialStatus = () => {
|
|
|
119
123
|
maxWaitingTime: argv.esHealthMaxWaitingTime,
|
|
120
124
|
waitingTimeStep: argv.esHealthWaitingTimeStep
|
|
121
125
|
});
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
}, async result => {
|
|
135
|
-
status.stats.iterationsCount++;
|
|
136
|
-
status.stats.recordsScanned += result.items.length;
|
|
137
|
-
if (status.stats.iterationsCount % 5 === 0) {
|
|
138
|
-
// We log every 5th iteration.
|
|
139
|
-
logger.trace(`[iteration #${status.stats.iterationsCount}] Reading ${result.items.length} record(s)...`);
|
|
140
|
-
}
|
|
141
|
-
const ddbItemsToBatchWrite = [];
|
|
142
|
-
const ddbEsItemsToBatchWrite = [];
|
|
143
|
-
const ddbEsItemsToBatchRead = {};
|
|
144
|
-
const fallbackDateTime = new Date().toISOString();
|
|
145
|
-
|
|
146
|
-
// Update records in primary DynamoDB table. Also do preparations for
|
|
147
|
-
// subsequent updates on DDB-ES DynamoDB table, and in Elasticsearch.
|
|
148
|
-
for (const item of result.items) {
|
|
149
|
-
const isFullyMigrated = (0, _isMigratedEntry.isMigratedEntry)(item) && (0, _hasValidTypeFieldValue.hasValidTypeFieldValue)(item) && (0, _hasAllNonNullableValues.hasAllNonNullableValues)(item);
|
|
150
|
-
if (isFullyMigrated) {
|
|
151
|
-
status.stats.recordsSkipped++;
|
|
152
|
-
continue;
|
|
153
|
-
}
|
|
154
|
-
|
|
155
|
-
// 1. Check if the data migration was ever performed. If not, let's perform it.
|
|
156
|
-
if (!(0, _isMigratedEntry.isMigratedEntry)(item)) {
|
|
157
|
-
// Get the oldest revision's `createdOn` value. We use that to set the entry-level `createdOn` value.
|
|
158
|
-
const createdOn = await (0, _getOldestRevisionCreatedOn.getOldestRevisionCreatedOn)({
|
|
159
|
-
entry: item,
|
|
160
|
-
entryEntity: ddbEntryEntity
|
|
161
|
-
});
|
|
162
|
-
const firstLastPublishedOnByFields = await (0, _getFirstLastPublishedOn.getFirstLastPublishedOnBy)({
|
|
163
|
-
entry: item,
|
|
164
|
-
entryEntity: ddbEntryEntity
|
|
165
|
-
});
|
|
166
|
-
(0, _assignNewMetaFields.assignNewMetaFields)(item, {
|
|
167
|
-
createdOn,
|
|
168
|
-
...firstLastPublishedOnByFields
|
|
169
|
-
});
|
|
170
|
-
}
|
|
171
|
-
|
|
172
|
-
// 2. We've noticed some of the records had an invalid `TYPE` field value
|
|
173
|
-
// in the database. This step addresses this issue.
|
|
174
|
-
if (!(0, _hasValidTypeFieldValue.hasValidTypeFieldValue)(item)) {
|
|
175
|
-
// Fixes the value of the `TYPE` field, if it's not valid.
|
|
176
|
-
(0, _fixTypeFieldValue.fixTypeFieldValue)(item);
|
|
126
|
+
try {
|
|
127
|
+
await (0, _utils2.ddbScanWithCallback)({
|
|
128
|
+
entity: ddbEntryEntity,
|
|
129
|
+
options: {
|
|
130
|
+
segment: argv.segmentIndex,
|
|
131
|
+
segments: argv.totalSegments,
|
|
132
|
+
filters: [{
|
|
133
|
+
attr: "_et",
|
|
134
|
+
eq: "CmsEntries"
|
|
135
|
+
}],
|
|
136
|
+
startKey: status.lastEvaluatedKey || undefined,
|
|
137
|
+
limit: 100
|
|
177
138
|
}
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
if (
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
const fallbackIdentity = await (0, _getFallbackIdentity.getFallbackIdentity)({
|
|
185
|
-
entity: ddbEntryEntity,
|
|
186
|
-
tenant: item.tenant
|
|
187
|
-
});
|
|
188
|
-
(0, _ensureAllNonNullableValues.ensureAllNonNullableValues)(item, {
|
|
189
|
-
dateTime: fallbackDateTime,
|
|
190
|
-
identity: fallbackIdentity
|
|
191
|
-
});
|
|
192
|
-
logger.trace(`Successfully ensured all non-nullable meta fields have values (${item.modelId}/${item.id}). Will be saving into the database soon.`);
|
|
193
|
-
} catch (e) {
|
|
194
|
-
logger.debug(`Failed to ensure all non-nullable meta fields have values (${item.modelId}/${item.id}): ${e.message}`);
|
|
195
|
-
}
|
|
139
|
+
}, async result => {
|
|
140
|
+
status.stats.iterationsCount++;
|
|
141
|
+
status.stats.recordsScanned += result.items.length;
|
|
142
|
+
if (status.stats.iterationsCount % 5 === 0) {
|
|
143
|
+
// We log every 5th iteration.
|
|
144
|
+
logger.trace(`[iteration #${status.stats.iterationsCount}] Reading ${result.items.length} record(s)...`);
|
|
196
145
|
}
|
|
197
|
-
ddbItemsToBatchWrite
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
*/
|
|
146
|
+
const ddbItemsToBatchWrite = [];
|
|
147
|
+
const ddbEsItemsToBatchWrite = [];
|
|
148
|
+
const ddbEsItemsToBatchRead = {};
|
|
149
|
+
const fallbackDateTime = new Date().toISOString();
|
|
202
150
|
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
SK: "L"
|
|
210
|
-
});
|
|
211
|
-
const ddbEsPublishedRecordKey = `${item.entryId}:P`;
|
|
212
|
-
if (item.status === "published" || !!item.locked) {
|
|
213
|
-
ddbEsItemsToBatchRead[ddbEsPublishedRecordKey] = ddbEsEntryEntity.getBatch({
|
|
214
|
-
PK: item.PK,
|
|
215
|
-
SK: "P"
|
|
216
|
-
});
|
|
217
|
-
}
|
|
218
|
-
}
|
|
219
|
-
if (Object.keys(ddbEsItemsToBatchRead).length > 0) {
|
|
220
|
-
/**
|
|
221
|
-
* Get all the records from DynamoDB Elasticsearch.
|
|
222
|
-
*/
|
|
223
|
-
const ddbEsRecords = await (0, _utils2.batchReadAll)({
|
|
224
|
-
table: ddbEsEntryEntity.table,
|
|
225
|
-
items: Object.values(ddbEsItemsToBatchRead)
|
|
226
|
-
});
|
|
227
|
-
for (const ddbEsRecord of ddbEsRecords) {
|
|
228
|
-
const decompressedData = await (0, _getDecompressedData.getDecompressedData)(ddbEsRecord.data);
|
|
229
|
-
if (!decompressedData) {
|
|
230
|
-
logger.trace(`[DDB-ES Table] Skipping record "${ddbEsRecord.PK}" as it is not a valid CMS entry...`);
|
|
151
|
+
// Update records in primary DynamoDB table. Also do preparations for
|
|
152
|
+
// subsequent updates on DDB-ES DynamoDB table, and in Elasticsearch.
|
|
153
|
+
for (const item of result.items) {
|
|
154
|
+
const isFullyMigrated = (0, _isMigratedEntry.isMigratedEntry)(item) && (0, _hasValidTypeFieldValue.hasValidTypeFieldValue)(item) && (0, _hasAllNonNullableValues.hasAllNonNullableValues)(item);
|
|
155
|
+
if (isFullyMigrated) {
|
|
156
|
+
status.stats.recordsSkipped++;
|
|
231
157
|
continue;
|
|
232
158
|
}
|
|
233
159
|
|
|
234
160
|
// 1. Check if the data migration was ever performed. If not, let's perform it.
|
|
235
|
-
if (!(0, _isMigratedEntry.isMigratedEntry)(
|
|
161
|
+
if (!(0, _isMigratedEntry.isMigratedEntry)(item)) {
|
|
236
162
|
// Get the oldest revision's `createdOn` value. We use that to set the entry-level `createdOn` value.
|
|
237
163
|
const createdOn = await (0, _getOldestRevisionCreatedOn.getOldestRevisionCreatedOn)({
|
|
238
|
-
entry:
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
164
|
+
entry: item,
|
|
165
|
+
entryEntity: ddbEntryEntity,
|
|
166
|
+
retryOptions: {
|
|
167
|
+
onFailedAttempt: error => {
|
|
168
|
+
logger.warn({
|
|
169
|
+
error,
|
|
170
|
+
item
|
|
171
|
+
}, `getOldestRevisionCreatedOn attempt #${error.attemptNumber} failed: ${error.message}`);
|
|
172
|
+
}
|
|
173
|
+
}
|
|
243
174
|
});
|
|
244
175
|
const firstLastPublishedOnByFields = await (0, _getFirstLastPublishedOn.getFirstLastPublishedOnBy)({
|
|
245
|
-
entry:
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
176
|
+
entry: item,
|
|
177
|
+
entryEntity: ddbEntryEntity,
|
|
178
|
+
retryOptions: {
|
|
179
|
+
onFailedAttempt: error => {
|
|
180
|
+
logger.warn({
|
|
181
|
+
error,
|
|
182
|
+
item
|
|
183
|
+
}, `getFirstLastPublishedOnBy attempt #${error.attemptNumber} failed: ${error.message}`);
|
|
184
|
+
}
|
|
185
|
+
}
|
|
250
186
|
});
|
|
251
|
-
(0, _assignNewMetaFields.assignNewMetaFields)(
|
|
187
|
+
(0, _assignNewMetaFields.assignNewMetaFields)(item, {
|
|
252
188
|
createdOn,
|
|
253
189
|
...firstLastPublishedOnByFields
|
|
254
190
|
});
|
|
255
191
|
}
|
|
256
192
|
|
|
257
|
-
// 2.
|
|
258
|
-
|
|
259
|
-
|
|
193
|
+
// 2. We've noticed some of the records had an invalid `TYPE` field value
|
|
194
|
+
// in the database. This step addresses this issue.
|
|
195
|
+
if (!(0, _hasValidTypeFieldValue.hasValidTypeFieldValue)(item)) {
|
|
196
|
+
// Fixes the value of the `TYPE` field, if it's not valid.
|
|
197
|
+
(0, _fixTypeFieldValue.fixTypeFieldValue)(item);
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
// 3. Finally, once both of the steps were performed, ensure that all
|
|
201
|
+
// new non-nullable meta fields have a value and nothing is missing.
|
|
202
|
+
if (!(0, _hasAllNonNullableValues.hasAllNonNullableValues)(item)) {
|
|
203
|
+
logger.trace((0, _getNonNullableFieldsWithMissingValues.getNonNullableFieldsWithMissingValues)(item), `Detected an entry with missing values for non-nullable meta fields (${item.modelId}/${item.id}).`);
|
|
260
204
|
try {
|
|
261
205
|
const fallbackIdentity = await (0, _getFallbackIdentity.getFallbackIdentity)({
|
|
262
206
|
entity: ddbEntryEntity,
|
|
263
|
-
tenant:
|
|
207
|
+
tenant: item.tenant,
|
|
208
|
+
retryOptions: {
|
|
209
|
+
onFailedAttempt: error => {
|
|
210
|
+
logger.warn({
|
|
211
|
+
error,
|
|
212
|
+
item
|
|
213
|
+
}, `getFallbackIdentity attempt #${error.attemptNumber} failed: ${error.message}`);
|
|
214
|
+
}
|
|
215
|
+
}
|
|
264
216
|
});
|
|
265
|
-
(0, _ensureAllNonNullableValues.ensureAllNonNullableValues)(
|
|
217
|
+
(0, _ensureAllNonNullableValues.ensureAllNonNullableValues)(item, {
|
|
266
218
|
dateTime: fallbackDateTime,
|
|
267
219
|
identity: fallbackIdentity
|
|
268
220
|
});
|
|
269
|
-
logger.trace(
|
|
221
|
+
logger.trace(`Successfully ensured all non-nullable meta fields have values (${item.modelId}/${item.id}). Will be saving into the database soon.`);
|
|
270
222
|
} catch (e) {
|
|
271
|
-
logger.
|
|
223
|
+
logger.debug(`Failed to ensure all non-nullable meta fields have values (${item.modelId}/${item.id}): ${e.message}`);
|
|
272
224
|
}
|
|
273
225
|
}
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
const execute = () => {
|
|
284
|
-
return (0, _utils2.batchWriteAll)({
|
|
285
|
-
table: ddbEntryEntity.table,
|
|
286
|
-
items: ddbItemsToBatchWrite
|
|
287
|
-
});
|
|
288
|
-
};
|
|
289
|
-
logger.trace(`Storing ${ddbItemsToBatchWrite.length} record(s) in primary DynamoDB table...`);
|
|
290
|
-
await (0, _utils.executeWithRetry)(execute, {
|
|
291
|
-
onFailedAttempt: error => {
|
|
292
|
-
logger.warn(`Batch write attempt #${error.attemptNumber} failed: ${error.message}`);
|
|
226
|
+
ddbItemsToBatchWrite.push(ddbEntryEntity.putBatch(item));
|
|
227
|
+
|
|
228
|
+
/**
|
|
229
|
+
* Prepare the loading of DynamoDB Elasticsearch part of the records.
|
|
230
|
+
*/
|
|
231
|
+
|
|
232
|
+
const ddbEsLatestRecordKey = `${item.entryId}:L`;
|
|
233
|
+
if (ddbEsItemsToBatchRead[ddbEsLatestRecordKey]) {
|
|
234
|
+
continue;
|
|
293
235
|
}
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
const results = await waitUntilHealthy.wait({
|
|
298
|
-
async onUnhealthy(params) {
|
|
299
|
-
const shouldWaitReason = params.waitingReason.name;
|
|
300
|
-
logger.warn(`Cluster is unhealthy (${shouldWaitReason}). Waiting for the cluster to become healthy...`, params);
|
|
301
|
-
if (status.stats.esHealthChecks.unhealthyReasons[shouldWaitReason]) {
|
|
302
|
-
status.stats.esHealthChecks.unhealthyReasons[shouldWaitReason]++;
|
|
303
|
-
} else {
|
|
304
|
-
status.stats.esHealthChecks.unhealthyReasons[shouldWaitReason] = 1;
|
|
305
|
-
}
|
|
306
|
-
}
|
|
236
|
+
ddbEsItemsToBatchRead[ddbEsLatestRecordKey] = ddbEsEntryEntity.getBatch({
|
|
237
|
+
PK: item.PK,
|
|
238
|
+
SK: "L"
|
|
307
239
|
});
|
|
308
|
-
|
|
309
|
-
status
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
240
|
+
const ddbEsPublishedRecordKey = `${item.entryId}:P`;
|
|
241
|
+
if (item.status === "published" || !!item.locked) {
|
|
242
|
+
ddbEsItemsToBatchRead[ddbEsPublishedRecordKey] = ddbEsEntryEntity.getBatch({
|
|
243
|
+
PK: item.PK,
|
|
244
|
+
SK: "P"
|
|
245
|
+
});
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
if (Object.keys(ddbEsItemsToBatchRead).length > 0) {
|
|
249
|
+
/**
|
|
250
|
+
* Get all the records from DynamoDB Elasticsearch.
|
|
251
|
+
*/
|
|
252
|
+
const executeBatchReadAll = () => {
|
|
253
|
+
return (0, _utils2.batchReadAll)({
|
|
314
254
|
table: ddbEsEntryEntity.table,
|
|
315
|
-
items:
|
|
255
|
+
items: Object.values(ddbEsItemsToBatchRead)
|
|
316
256
|
});
|
|
317
257
|
};
|
|
318
|
-
await (0, _utils.executeWithRetry)(
|
|
258
|
+
const ddbEsRecords = await (0, _utils.executeWithRetry)(executeBatchReadAll, {
|
|
319
259
|
onFailedAttempt: error => {
|
|
320
|
-
logger.warn(
|
|
260
|
+
logger.warn({
|
|
261
|
+
error,
|
|
262
|
+
items: Object.values(ddbEsItemsToBatchRead)
|
|
263
|
+
}, `[DDB-ES Table] Batch (ddbEsItemsToBatchRead) read attempt #${error.attemptNumber} failed: ${error.message}`);
|
|
321
264
|
}
|
|
322
265
|
});
|
|
266
|
+
for (const ddbEsRecord of ddbEsRecords) {
|
|
267
|
+
const decompressedData = await (0, _getDecompressedData.getDecompressedData)(ddbEsRecord.data);
|
|
268
|
+
if (!decompressedData) {
|
|
269
|
+
logger.trace(`[DDB-ES Table] Skipping record "${ddbEsRecord.PK}" as it is not a valid CMS entry...`);
|
|
270
|
+
continue;
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
// 1. Check if the data migration was ever performed. If not, let's perform it.
|
|
274
|
+
if (!(0, _isMigratedEntry.isMigratedEntry)(decompressedData)) {
|
|
275
|
+
// Get the oldest revision's `createdOn` value. We use that to set the entry-level `createdOn` value.
|
|
276
|
+
const createdOn = await (0, _getOldestRevisionCreatedOn.getOldestRevisionCreatedOn)({
|
|
277
|
+
entry: {
|
|
278
|
+
...decompressedData,
|
|
279
|
+
PK: ddbEsRecord.PK
|
|
280
|
+
},
|
|
281
|
+
entryEntity: ddbEntryEntity,
|
|
282
|
+
retryOptions: {
|
|
283
|
+
onFailedAttempt: error => {
|
|
284
|
+
logger.warn({
|
|
285
|
+
error,
|
|
286
|
+
item: {
|
|
287
|
+
...decompressedData,
|
|
288
|
+
PK: ddbEsRecord.PK
|
|
289
|
+
}
|
|
290
|
+
}, `[DDB-ES Table] getOldestRevisionCreatedOn attempt #${error.attemptNumber} failed: ${error.message}`);
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
});
|
|
294
|
+
const firstLastPublishedOnByFields = await (0, _getFirstLastPublishedOn.getFirstLastPublishedOnBy)({
|
|
295
|
+
entry: {
|
|
296
|
+
...decompressedData,
|
|
297
|
+
PK: ddbEsRecord.PK
|
|
298
|
+
},
|
|
299
|
+
entryEntity: ddbEntryEntity,
|
|
300
|
+
retryOptions: {
|
|
301
|
+
onFailedAttempt: error => {
|
|
302
|
+
logger.warn({
|
|
303
|
+
error,
|
|
304
|
+
item: {
|
|
305
|
+
...decompressedData,
|
|
306
|
+
PK: ddbEsRecord.PK
|
|
307
|
+
}
|
|
308
|
+
}, `[DDB-ES Table] getFirstLastPublishedOnBy attempt #${error.attemptNumber} failed: ${error.message}`);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
});
|
|
312
|
+
(0, _assignNewMetaFields.assignNewMetaFields)(decompressedData, {
|
|
313
|
+
createdOn,
|
|
314
|
+
...firstLastPublishedOnByFields
|
|
315
|
+
});
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
// 2. Ensure new non-nullable meta fields have a value and nothing is missing.
|
|
319
|
+
if (!(0, _hasAllNonNullableValues.hasAllNonNullableValues)(decompressedData)) {
|
|
320
|
+
logger.trace((0, _getNonNullableFieldsWithMissingValues.getNonNullableFieldsWithMissingValues)(decompressedData), [`[DDB-ES Table] Detected an entry with missing values for non-nullable meta fields`, `(${decompressedData.modelId}/${decompressedData.id}).`].join(" "));
|
|
321
|
+
try {
|
|
322
|
+
const fallbackIdentity = await (0, _getFallbackIdentity.getFallbackIdentity)({
|
|
323
|
+
entity: ddbEntryEntity,
|
|
324
|
+
tenant: decompressedData.tenant,
|
|
325
|
+
retryOptions: {
|
|
326
|
+
onFailedAttempt: error => {
|
|
327
|
+
logger.warn({
|
|
328
|
+
error,
|
|
329
|
+
item: ddbEntryEntity
|
|
330
|
+
}, `[DDB-ES Table] getFallbackIdentity attempt #${error.attemptNumber} failed: ${error.message}`);
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
});
|
|
334
|
+
(0, _ensureAllNonNullableValues.ensureAllNonNullableValues)(decompressedData, {
|
|
335
|
+
dateTime: fallbackDateTime,
|
|
336
|
+
identity: fallbackIdentity
|
|
337
|
+
});
|
|
338
|
+
logger.trace([`[DDB-ES Table] Successfully ensured all non-nullable meta fields`, `have values (${decompressedData.modelId}/${decompressedData.id}).`, "Will be saving the changes soon."].join(" "));
|
|
339
|
+
} catch (e) {
|
|
340
|
+
logger.error(["[DDB-ES Table] Failed to ensure all non-nullable meta fields have values", `(${decompressedData.modelId}/${decompressedData.id}): ${e.message}`].join(" "));
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
const compressedData = await (0, _getCompressedData.getCompressedData)(decompressedData);
|
|
344
|
+
ddbEsItemsToBatchWrite.push(ddbEsEntryEntity.putBatch({
|
|
345
|
+
...ddbEsRecord,
|
|
346
|
+
data: compressedData
|
|
347
|
+
}));
|
|
348
|
+
}
|
|
323
349
|
}
|
|
324
|
-
|
|
325
|
-
|
|
350
|
+
if (ddbItemsToBatchWrite.length) {
|
|
351
|
+
let ddbWriteError = false;
|
|
352
|
+
let ddbEsWriteError = false;
|
|
326
353
|
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
354
|
+
// Store data in primary DynamoDB table.
|
|
355
|
+
const execute = () => {
|
|
356
|
+
return (0, _utils2.batchWriteAll)({
|
|
357
|
+
table: ddbEntryEntity.table,
|
|
358
|
+
items: ddbItemsToBatchWrite
|
|
359
|
+
}, BATCH_WRITE_MAX_CHUNK);
|
|
360
|
+
};
|
|
361
|
+
logger.trace(`Storing ${ddbItemsToBatchWrite.length} record(s) in primary DynamoDB table...`);
|
|
362
|
+
try {
|
|
363
|
+
await (0, _utils.executeWithRetry)(execute, {
|
|
364
|
+
onFailedAttempt: error => {
|
|
365
|
+
logger.warn(`Batch write attempt #${error.attemptNumber} failed: ${error.message}`);
|
|
366
|
+
}
|
|
367
|
+
});
|
|
368
|
+
} catch (e) {
|
|
369
|
+
ddbWriteError = true;
|
|
370
|
+
logger.error({
|
|
371
|
+
error: e,
|
|
372
|
+
ddbItemsToBatchWrite
|
|
373
|
+
}, "After multiple retries, failed to batch-store records in primary DynamoDB table.");
|
|
374
|
+
}
|
|
375
|
+
if (ddbEsItemsToBatchWrite.length) {
|
|
376
|
+
logger.trace(`Storing ${ddbEsItemsToBatchWrite.length} record(s) in DDB-ES DynamoDB table...`);
|
|
377
|
+
try {
|
|
378
|
+
const results = await waitUntilHealthy.wait({
|
|
379
|
+
async onUnhealthy(params) {
|
|
380
|
+
const shouldWaitReason = params.waitingReason.name;
|
|
381
|
+
logger.warn(`Cluster is unhealthy (${shouldWaitReason}). Waiting for the cluster to become healthy...`, params);
|
|
382
|
+
if (status.stats.esHealthChecks.unhealthyReasons[shouldWaitReason]) {
|
|
383
|
+
status.stats.esHealthChecks.unhealthyReasons[shouldWaitReason]++;
|
|
384
|
+
} else {
|
|
385
|
+
status.stats.esHealthChecks.unhealthyReasons[shouldWaitReason] = 1;
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
});
|
|
389
|
+
status.stats.esHealthChecks.checksCount++;
|
|
390
|
+
status.stats.esHealthChecks.timeSpentWaiting += results.runningTime;
|
|
336
391
|
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
392
|
+
// Store data in DDB-ES DynamoDB table.
|
|
393
|
+
const executeDdbEs = () => {
|
|
394
|
+
return (0, _utils2.batchWriteAll)({
|
|
395
|
+
table: ddbEsEntryEntity.table,
|
|
396
|
+
items: ddbEsItemsToBatchWrite
|
|
397
|
+
}, BATCH_WRITE_MAX_CHUNK);
|
|
398
|
+
};
|
|
399
|
+
await (0, _utils.executeWithRetry)(executeDdbEs, {
|
|
400
|
+
onFailedAttempt: error => {
|
|
401
|
+
logger.warn(`[DDB-ES Table] Batch write attempt #${error.attemptNumber} failed: ${error.message}`);
|
|
402
|
+
}
|
|
403
|
+
});
|
|
404
|
+
} catch (e) {
|
|
405
|
+
ddbEsWriteError = true;
|
|
406
|
+
logger.error({
|
|
407
|
+
error: e,
|
|
408
|
+
ddbEsItemsToBatchWrite
|
|
409
|
+
}, "After multiple retries, failed to batch-store records in DDB-ES DynamoDB table.");
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
if (ddbEsWriteError || ddbWriteError) {
|
|
413
|
+
logger.warn('Not increasing the "recordsUpdated" count due to write errors.');
|
|
414
|
+
} else {
|
|
415
|
+
status.stats.recordsUpdated += ddbItemsToBatchWrite.length;
|
|
416
|
+
}
|
|
417
|
+
}
|
|
340
418
|
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
419
|
+
// Update checkpoint after every batch.
|
|
420
|
+
let lastEvaluatedKey = true;
|
|
421
|
+
if (result.lastEvaluatedKey) {
|
|
422
|
+
lastEvaluatedKey = result.lastEvaluatedKey;
|
|
423
|
+
}
|
|
424
|
+
status.lastEvaluatedKey = lastEvaluatedKey;
|
|
425
|
+
if (lastEvaluatedKey === true) {
|
|
426
|
+
return false;
|
|
427
|
+
}
|
|
346
428
|
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
429
|
+
// Continue further scanning.
|
|
430
|
+
return true;
|
|
431
|
+
}, {
|
|
432
|
+
retry: {
|
|
433
|
+
onFailedAttempt: error => {
|
|
434
|
+
logger.warn({
|
|
435
|
+
lastEvaluatedKey: status.lastEvaluatedKey,
|
|
436
|
+
error
|
|
437
|
+
}, `ddbScanWithCallback attempt #${error.attemptNumber} failed: ${error.message}`);
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
});
|
|
441
|
+
|
|
442
|
+
// Store status in tmp file.
|
|
443
|
+
logger.trace({
|
|
444
|
+
status
|
|
445
|
+
}, "Segment processing completed. Saving status to tmp file...");
|
|
446
|
+
const logFilePath = _path.default.join(_os.default.tmpdir(), `webiny-5-39-6-meta-fields-data-migration-log-${argv.runId}-${argv.segmentIndex}.log`);
|
|
447
|
+
|
|
448
|
+
// Save segment processing stats to a file.
|
|
449
|
+
_fs.default.writeFileSync(logFilePath, JSON.stringify(status.stats, null, 2));
|
|
450
|
+
logger.trace(`Segment processing stats saved in ${logFilePath}.`);
|
|
451
|
+
} catch (error) {
|
|
452
|
+
// Store status in tmp file.
|
|
453
|
+
logger.error({
|
|
454
|
+
status,
|
|
455
|
+
error
|
|
456
|
+
}, "Segment processing failed to complete. Saving status to tmp file...");
|
|
457
|
+
const logFilePath = _path.default.join(_os.default.tmpdir(), `webiny-5-39-6-meta-fields-data-migration-log-${argv.runId}-${argv.segmentIndex}.log`);
|
|
458
|
+
|
|
459
|
+
// Save segment processing stats to a file.
|
|
460
|
+
_fs.default.writeFileSync(logFilePath, JSON.stringify(status.stats, null, 2));
|
|
461
|
+
logger.trace(`Segment processing stats saved in ${logFilePath}.`);
|
|
462
|
+
}
|
|
350
463
|
})();
|
|
351
464
|
|
|
352
465
|
//# sourceMappingURL=worker.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["_utils","require","_logger","_dataMigration","_clientDynamodb","_apiElasticsearch","_yargs","_interopRequireDefault","_helpers","_isMigratedEntry","_hasValidTypeFieldValue","_hasAllNonNullableValues","_getOldestRevisionCreatedOn","_getFirstLastPublishedOn","_assignNewMetaFields","_fixTypeFieldValue","_getFallbackIdentity","_ensureAllNonNullableValues","_getDecompressedData","_getCompressedData","_createEntryEntity","_utils2","_waitUntilHealthy","_pinoPretty","_path","_os","_fs","_getNonNullableFieldsWithMissingValues","argv","yargs","hideBin","process","options","runId","type","demandOption","ddbTable","ddbEsTable","esEndpoint","segmentIndex","totalSegments","esHealthMinClusterHealthStatus","esHealthMaxProcessorPercent","esHealthMaxRamPercent","esHealthMaxWaitingTime","esHealthWaitingTimeStep","parseSync","createInitialStatus","lastEvaluatedKey","stats","iterationsCount","recordsScanned","recordsUpdated","recordsSkipped","esHealthChecks","timeSpentWaiting","checksCount","unhealthyReasons","logger","createPinoLogger","level","getLogLevel","env","MIGRATIONS_LOG_LEVEL","msgPrefix","pinoPretty","ignore","documentClient","getDocumentClient","elasticsearchClient","createElasticsearchClient","endpoint","primaryTable","createTable","name","dynamoToEsTable","ddbEntryEntity","createDdbEntryEntity","ddbEsEntryEntity","createDdbEsEntryEntity","status","waitUntilHealthy","createWaitUntilHealthy","minClusterHealthStatus","maxProcessorPercent","maxRamPercent","maxWaitingTime","waitingTimeStep","ddbScanWithCallback","entity","segment","segments","filters","attr","eq","startKey","undefined","limit","result","items","length","trace","ddbItemsToBatchWrite","ddbEsItemsToBatchWrite","ddbEsItemsToBatchRead","fallbackDateTime","Date","toISOString","item","isFullyMigrated","isMigratedEntry","hasValidTypeFieldValue","hasAllNonNullableValues","createdOn","getOldestRevisionCreatedOn","entry","entryEntity","firstLastPublishedOnByFields","getFirstLastPublishedOnBy","assignNewMetaFields","fixTypeFieldValue","getNonNullableFieldsWithMissingValues","modelId","id","fallbackIdentity","getFallbackIdentity","tenant","ensureAllNonNullableValues","dateTime","identity","e","debug","message","push","putBatch","ddbEsLatestRecordKey","entryId","getBatch","PK","SK","ddbEsPublishedRecordKey","locked","Object","keys","ddbEsRecords","batchReadAll","table","values","ddbEsRecord","decompressedData","getDecompressedData","data","join","error","compressedData","getCompressedData","execute","batchWriteAll","executeWithRetry","onFailedAttempt","warn","attemptNumber","results","wait","onUnhealthy","params","shouldWaitReason","waitingReason","runningTime","executeDdbEs","logFilePath","path","os","tmpdir","fs","writeFileSync","JSON","stringify"],"sources":["worker.ts"],"sourcesContent":["import { executeWithRetry } from \"@webiny/utils\";\nimport { createPinoLogger, getLogLevel } from \"@webiny/logger\";\nimport { createTable } from \"@webiny/data-migration\";\nimport { getDocumentClient } from \"@webiny/aws-sdk/client-dynamodb\";\nimport { createElasticsearchClient } from \"@webiny/api-elasticsearch\";\nimport yargs from \"yargs/yargs\";\nimport { hideBin } from \"yargs/helpers\";\nimport { isMigratedEntry } from \"~/migrations/5.39.0/001/utils/isMigratedEntry\";\nimport { hasValidTypeFieldValue } from \"~/migrations/5.39.0/001/utils/hasValidTypeFieldValue\";\nimport { hasAllNonNullableValues } from \"~/migrations/5.39.0/001/utils/hasAllNonNullableValues\";\nimport { getOldestRevisionCreatedOn } from \"~/migrations/5.39.0/001/utils/getOldestRevisionCreatedOn\";\nimport { getFirstLastPublishedOnBy } from \"~/migrations/5.39.0/001/utils/getFirstLastPublishedOn\";\nimport { assignNewMetaFields } from \"~/migrations/5.39.0/001/utils/assignNewMetaFields\";\nimport { fixTypeFieldValue } from \"~/migrations/5.39.0/001/utils/fixTypeFieldValue\";\nimport { getFallbackIdentity } from \"~/migrations/5.39.0/001/utils/getFallbackIdentity\";\nimport { ensureAllNonNullableValues } from \"~/migrations/5.39.0/001/utils/ensureAllNonNullableValues\";\nimport { getDecompressedData } from \"~/migrations/5.39.0/001/utils/getDecompressedData\";\nimport { getCompressedData } from \"~/migrations/5.39.0/001/utils/getCompressedData\";\nimport { CmsEntry } from \"~/migrations/5.39.0/001/types\";\nimport {\n createDdbEntryEntity,\n createDdbEsEntryEntity\n} from \"~/migrations/5.39.0/001/entities/createEntryEntity\";\nimport {\n batchReadAll,\n BatchReadItem,\n batchWriteAll,\n BatchWriteItem,\n ddbScanWithCallback\n} from \"~/utils\";\nimport { createWaitUntilHealthy } from \"@webiny/api-elasticsearch/utils/waitUntilHealthy\";\nimport pinoPretty from \"pino-pretty\";\nimport { EsHealthChecksParams } from \"~/migrations/5.39.6/001/ddb-es/utils\";\nimport path from \"path\";\nimport os from \"os\";\nimport fs from \"fs\";\nimport { getNonNullableFieldsWithMissingValues } from \"~/migrations/5.39.0/001/utils/getNonNullableFieldsWithMissingValues\";\n\nconst argv = yargs(hideBin(process.argv))\n .options({\n runId: { type: \"string\", demandOption: true },\n ddbTable: { type: \"string\", demandOption: true },\n ddbEsTable: { type: \"string\", demandOption: true },\n esEndpoint: { type: \"string\", demandOption: true },\n segmentIndex: { type: \"number\", demandOption: true },\n totalSegments: { type: \"number\", demandOption: true },\n\n // Elasticsearch health check options.\n esHealthMinClusterHealthStatus: { type: \"string\", demandOption: true },\n esHealthMaxProcessorPercent: { type: \"number\", demandOption: true },\n esHealthMaxRamPercent: { type: \"number\", demandOption: true },\n esHealthMaxWaitingTime: { type: \"number\", demandOption: true },\n esHealthWaitingTimeStep: { type: \"number\", demandOption: true }\n })\n .parseSync();\n\ninterface LastEvaluatedKeyObject {\n PK: string;\n SK: string;\n GSI1_PK: string;\n GSI1_SK: string;\n}\n\ntype LastEvaluatedKey = LastEvaluatedKeyObject | true | null;\n\ninterface MigrationStatus {\n lastEvaluatedKey: LastEvaluatedKey;\n stats: {\n iterationsCount: number;\n recordsScanned: number;\n recordsUpdated: number;\n recordsSkipped: number;\n esHealthChecks: {\n timeSpentWaiting: number;\n checksCount: number;\n unhealthyReasons: {\n [key: string]: number;\n };\n };\n };\n}\n\ninterface DynamoDbElasticsearchRecord {\n PK: string;\n SK: string;\n data: string;\n}\n\nconst createInitialStatus = (): MigrationStatus => {\n return {\n lastEvaluatedKey: null,\n stats: {\n iterationsCount: 0,\n recordsScanned: 0,\n recordsUpdated: 0,\n recordsSkipped: 0,\n esHealthChecks: {\n timeSpentWaiting: 0,\n checksCount: 0,\n unhealthyReasons: {}\n }\n }\n };\n};\n\n(async () => {\n const logger = createPinoLogger(\n {\n level: getLogLevel(process.env.MIGRATIONS_LOG_LEVEL, \"trace\"),\n msgPrefix: `[segment #${argv.segmentIndex}] `\n },\n pinoPretty({ ignore: \"pid,hostname\" })\n );\n\n const documentClient = getDocumentClient();\n const elasticsearchClient = createElasticsearchClient({\n endpoint: `https://${argv.esEndpoint}`\n });\n\n const primaryTable = createTable({\n name: argv.ddbTable,\n documentClient\n });\n const dynamoToEsTable = createTable({\n name: argv.ddbEsTable,\n documentClient\n });\n\n const ddbEntryEntity = createDdbEntryEntity(primaryTable);\n const ddbEsEntryEntity = createDdbEsEntryEntity(dynamoToEsTable);\n\n const status = createInitialStatus();\n\n const waitUntilHealthy = createWaitUntilHealthy(elasticsearchClient, {\n minClusterHealthStatus:\n argv.esHealthMinClusterHealthStatus as EsHealthChecksParams[\"minClusterHealthStatus\"],\n maxProcessorPercent: argv.esHealthMaxProcessorPercent,\n maxRamPercent: argv.esHealthMaxRamPercent,\n maxWaitingTime: argv.esHealthMaxWaitingTime,\n waitingTimeStep: argv.esHealthWaitingTimeStep\n });\n\n await ddbScanWithCallback<CmsEntry>(\n {\n entity: ddbEntryEntity,\n options: {\n segment: argv.segmentIndex,\n segments: argv.totalSegments,\n filters: [\n {\n attr: \"_et\",\n eq: \"CmsEntries\"\n }\n ],\n startKey: status.lastEvaluatedKey || undefined,\n limit: 100\n }\n },\n async result => {\n status.stats.iterationsCount++;\n status.stats.recordsScanned += result.items.length;\n\n if (status.stats.iterationsCount % 5 === 0) {\n // We log every 5th iteration.\n logger.trace(\n `[iteration #${status.stats.iterationsCount}] Reading ${result.items.length} record(s)...`\n );\n }\n\n const ddbItemsToBatchWrite: BatchWriteItem[] = [];\n const ddbEsItemsToBatchWrite: BatchWriteItem[] = [];\n const ddbEsItemsToBatchRead: Record<string, BatchReadItem> = {};\n\n const fallbackDateTime = new Date().toISOString();\n\n // Update records in primary DynamoDB table. Also do preparations for\n // subsequent updates on DDB-ES DynamoDB table, and in Elasticsearch.\n for (const item of result.items) {\n const isFullyMigrated =\n isMigratedEntry(item) &&\n hasValidTypeFieldValue(item) &&\n hasAllNonNullableValues(item);\n\n if (isFullyMigrated) {\n status.stats.recordsSkipped++;\n continue;\n }\n\n // 1. Check if the data migration was ever performed. If not, let's perform it.\n if (!isMigratedEntry(item)) {\n // Get the oldest revision's `createdOn` value. We use that to set the entry-level `createdOn` value.\n const createdOn = await getOldestRevisionCreatedOn({\n entry: item,\n entryEntity: ddbEntryEntity\n });\n\n const firstLastPublishedOnByFields = await getFirstLastPublishedOnBy({\n entry: item,\n entryEntity: ddbEntryEntity\n });\n\n assignNewMetaFields(item, {\n createdOn,\n ...firstLastPublishedOnByFields\n });\n }\n\n // 2. We've noticed some of the records had an invalid `TYPE` field value\n // in the database. This step addresses this issue.\n if (!hasValidTypeFieldValue(item)) {\n // Fixes the value of the `TYPE` field, if it's not valid.\n fixTypeFieldValue(item);\n }\n\n // 3. Finally, once both of the steps were performed, ensure that all\n // new non-nullable meta fields have a value and nothing is missing.\n if (!hasAllNonNullableValues(item)) {\n logger.trace(\n getNonNullableFieldsWithMissingValues(item),\n `Detected an entry with missing values for non-nullable meta fields (${item.modelId}/${item.id}).`\n );\n\n try {\n const fallbackIdentity = await getFallbackIdentity({\n entity: ddbEntryEntity,\n tenant: item.tenant\n });\n\n ensureAllNonNullableValues(item, {\n dateTime: fallbackDateTime,\n identity: fallbackIdentity\n });\n\n logger.trace(\n `Successfully ensured all non-nullable meta fields have values (${item.modelId}/${item.id}). Will be saving into the database soon.`\n );\n } catch (e) {\n logger.debug(\n `Failed to ensure all non-nullable meta fields have values (${item.modelId}/${item.id}): ${e.message}`\n );\n }\n }\n\n ddbItemsToBatchWrite.push(ddbEntryEntity.putBatch(item));\n\n /**\n * Prepare the loading of DynamoDB Elasticsearch part of the records.\n */\n\n const ddbEsLatestRecordKey = `${item.entryId}:L`;\n if (ddbEsItemsToBatchRead[ddbEsLatestRecordKey]) {\n continue;\n }\n\n ddbEsItemsToBatchRead[ddbEsLatestRecordKey] = ddbEsEntryEntity.getBatch({\n PK: item.PK,\n SK: \"L\"\n });\n\n const ddbEsPublishedRecordKey = `${item.entryId}:P`;\n if (item.status === \"published\" || !!item.locked) {\n ddbEsItemsToBatchRead[ddbEsPublishedRecordKey] = ddbEsEntryEntity.getBatch({\n PK: item.PK,\n SK: \"P\"\n });\n }\n }\n\n if (Object.keys(ddbEsItemsToBatchRead).length > 0) {\n /**\n * Get all the records from DynamoDB Elasticsearch.\n */\n const ddbEsRecords = await batchReadAll<DynamoDbElasticsearchRecord>({\n table: ddbEsEntryEntity.table,\n items: Object.values(ddbEsItemsToBatchRead)\n });\n\n for (const ddbEsRecord of ddbEsRecords) {\n const decompressedData = await getDecompressedData<CmsEntry>(ddbEsRecord.data);\n if (!decompressedData) {\n logger.trace(\n `[DDB-ES Table] Skipping record \"${ddbEsRecord.PK}\" as it is not a valid CMS entry...`\n );\n continue;\n }\n\n // 1. Check if the data migration was ever performed. If not, let's perform it.\n if (!isMigratedEntry(decompressedData)) {\n // Get the oldest revision's `createdOn` value. We use that to set the entry-level `createdOn` value.\n const createdOn = await getOldestRevisionCreatedOn({\n entry: { ...decompressedData, PK: ddbEsRecord.PK },\n entryEntity: ddbEntryEntity\n });\n\n const firstLastPublishedOnByFields = await getFirstLastPublishedOnBy({\n entry: { ...decompressedData, PK: ddbEsRecord.PK },\n entryEntity: ddbEntryEntity\n });\n\n assignNewMetaFields(decompressedData, {\n createdOn,\n ...firstLastPublishedOnByFields\n });\n }\n\n // 2. Ensure new non-nullable meta fields have a value and nothing is missing.\n if (!hasAllNonNullableValues(decompressedData)) {\n logger.trace(\n getNonNullableFieldsWithMissingValues(decompressedData),\n [\n `[DDB-ES Table] Detected an entry with missing values for non-nullable meta fields`,\n `(${decompressedData.modelId}/${decompressedData.id}).`\n ].join(\" \")\n );\n\n try {\n const fallbackIdentity = await getFallbackIdentity({\n entity: ddbEntryEntity,\n tenant: decompressedData.tenant\n });\n\n ensureAllNonNullableValues(decompressedData, {\n dateTime: fallbackDateTime,\n identity: fallbackIdentity\n });\n\n logger.trace(\n [\n `[DDB-ES Table] Successfully ensured all non-nullable meta fields`,\n `have values (${decompressedData.modelId}/${decompressedData.id}).`,\n \"Will be saving the changes soon.\"\n ].join(\" \")\n );\n } catch (e) {\n logger.error(\n [\n \"[DDB-ES Table] Failed to ensure all non-nullable meta fields have values\",\n `(${decompressedData.modelId}/${decompressedData.id}): ${e.message}`\n ].join(\" \")\n );\n }\n }\n\n const compressedData = await getCompressedData(decompressedData);\n\n ddbEsItemsToBatchWrite.push(\n ddbEsEntryEntity.putBatch({\n ...ddbEsRecord,\n data: compressedData\n })\n );\n }\n }\n\n if (ddbItemsToBatchWrite.length) {\n // Store data in primary DynamoDB table.\n const execute = () => {\n return batchWriteAll({\n table: ddbEntryEntity.table,\n items: ddbItemsToBatchWrite\n });\n };\n\n logger.trace(\n `Storing ${ddbItemsToBatchWrite.length} record(s) in primary DynamoDB table...`\n );\n await executeWithRetry(execute, {\n onFailedAttempt: error => {\n logger.warn(\n `Batch write attempt #${error.attemptNumber} failed: ${error.message}`\n );\n }\n });\n\n if (ddbEsItemsToBatchWrite.length) {\n logger.trace(\n `Storing ${ddbEsItemsToBatchWrite.length} record(s) in DDB-ES DynamoDB table...`\n );\n const results = await waitUntilHealthy.wait({\n async onUnhealthy(params) {\n const shouldWaitReason = params.waitingReason.name;\n\n logger.warn(\n `Cluster is unhealthy (${shouldWaitReason}). Waiting for the cluster to become healthy...`,\n params\n );\n\n if (status.stats.esHealthChecks.unhealthyReasons[shouldWaitReason]) {\n status.stats.esHealthChecks.unhealthyReasons[shouldWaitReason]++;\n } else {\n status.stats.esHealthChecks.unhealthyReasons[shouldWaitReason] = 1;\n }\n }\n });\n\n status.stats.esHealthChecks.checksCount++;\n status.stats.esHealthChecks.timeSpentWaiting += results.runningTime;\n\n // Store data in DDB-ES DynamoDB table.\n const executeDdbEs = () => {\n return batchWriteAll({\n table: ddbEsEntryEntity.table,\n items: ddbEsItemsToBatchWrite\n });\n };\n\n await executeWithRetry(executeDdbEs, {\n onFailedAttempt: error => {\n logger.warn(\n `[DDB-ES Table] Batch write attempt #${error.attemptNumber} failed: ${error.message}`\n );\n }\n });\n }\n\n status.stats.recordsUpdated += ddbItemsToBatchWrite.length;\n }\n\n // Update checkpoint after every batch.\n let lastEvaluatedKey: LastEvaluatedKey = true;\n if (result.lastEvaluatedKey) {\n lastEvaluatedKey = result.lastEvaluatedKey as unknown as LastEvaluatedKeyObject;\n }\n\n status.lastEvaluatedKey = lastEvaluatedKey;\n\n if (lastEvaluatedKey === true) {\n return false;\n }\n\n // Continue further scanning.\n return true;\n }\n );\n\n // Store status in tmp file.\n logger.trace({ status }, \"Segment processing completed. Saving status to tmp file...\");\n const logFilePath = path.join(\n os.tmpdir(),\n `webiny-5-39-6-meta-fields-data-migration-log-${argv.runId}-${argv.segmentIndex}.log`\n );\n\n // Save segment processing stats to a file.\n fs.writeFileSync(logFilePath, JSON.stringify(status.stats, null, 2));\n\n logger.trace(`Segment processing stats saved in ${logFilePath}.`);\n})();\n"],"mappings":";;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,OAAA,GAAAD,OAAA;AACA,IAAAE,cAAA,GAAAF,OAAA;AACA,IAAAG,eAAA,GAAAH,OAAA;AACA,IAAAI,iBAAA,GAAAJ,OAAA;AACA,IAAAK,MAAA,GAAAC,sBAAA,CAAAN,OAAA;AACA,IAAAO,QAAA,GAAAP,OAAA;AACA,IAAAQ,gBAAA,GAAAR,OAAA;AACA,IAAAS,uBAAA,GAAAT,OAAA;AACA,IAAAU,wBAAA,GAAAV,OAAA;AACA,IAAAW,2BAAA,GAAAX,OAAA;AACA,IAAAY,wBAAA,GAAAZ,OAAA;AACA,IAAAa,oBAAA,GAAAb,OAAA;AACA,IAAAc,kBAAA,GAAAd,OAAA;AACA,IAAAe,oBAAA,GAAAf,OAAA;AACA,IAAAgB,2BAAA,GAAAhB,OAAA;AACA,IAAAiB,oBAAA,GAAAjB,OAAA;AACA,IAAAkB,kBAAA,GAAAlB,OAAA;AAEA,IAAAmB,kBAAA,GAAAnB,OAAA;AAIA,IAAAoB,OAAA,GAAApB,OAAA;AAOA,IAAAqB,iBAAA,GAAArB,OAAA;AACA,IAAAsB,WAAA,GAAAhB,sBAAA,CAAAN,OAAA;AAEA,IAAAuB,KAAA,GAAAjB,sBAAA,CAAAN,OAAA;AACA,IAAAwB,GAAA,GAAAlB,sBAAA,CAAAN,OAAA;AACA,IAAAyB,GAAA,GAAAnB,sBAAA,CAAAN,OAAA;AACA,IAAA0B,sCAAA,GAAA1B,OAAA;AAEA,MAAM2B,IAAI,GAAG,IAAAC,cAAK,EAAC,IAAAC,gBAAO,EAACC,OAAO,CAACH,IAAI,CAAC,CAAC,CACpCI,OAAO,CAAC;EACLC,KAAK,EAAE;IAAEC,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EAC7CC,QAAQ,EAAE;IAAEF,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EAChDE,UAAU,EAAE;IAAEH,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EAClDG,UAAU,EAAE;IAAEJ,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EAClDI,YAAY,EAAE;IAAEL,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EACpDK,aAAa,EAAE;IAAEN,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EAErD;EACAM,8BAA8B,EAAE;IAAEP,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EACtEO,2BAA2B,EAAE;IAAER,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EACnEQ,qBAAqB,EAAE;IAAET,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EAC7DS,sBAAsB,EAAE;IAAEV,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EAC9DU,uBAAuB,EAAE;IAAEX,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK;AAClE,CAAC,CAAC,CACDW,SAAS,CAAC,CAAC;AAkChB,MAAMC,mBAAmB,GAAGA,CAAA,KAAuB;EAC/C,OAAO;IACHC,gBAAgB,EAAE,IAAI;IACtBC,KAAK,EAAE;MACHC,eAAe,EAAE,CAAC;MAClBC,cAAc,EAAE,CAAC;MACjBC,cAAc,EAAE,CAAC;MACjBC,cAAc,EAAE,CAAC;MACjBC,cAAc,EAAE;QACZC,gBAAgB,EAAE,CAAC;QACnBC,WAAW,EAAE,CAAC;QACdC,gBAAgB,EAAE,CAAC;MACvB;IACJ;EACJ,CAAC;AACL,CAAC;AAED,CAAC,YAAY;EACT,MAAMC,MAAM,GAAG,IAAAC,wBAAgB,EAC3B;IACIC,KAAK,EAAE,IAAAC,mBAAW,EAAC9B,OAAO,CAAC+B,GAAG,CAACC,oBAAoB,EAAE,OAAO,CAAC;IAC7DC,SAAS,EAAG,aAAYpC,IAAI,CAACW,YAAa;EAC9C,CAAC,EACD,IAAA0B,mBAAU,EAAC;IAAEC,MAAM,EAAE;EAAe,CAAC,CACzC,CAAC;EAED,MAAMC,cAAc,GAAG,IAAAC,iCAAiB,EAAC,CAAC;EAC1C,MAAMC,mBAAmB,GAAG,IAAAC,2CAAyB,EAAC;IAClDC,QAAQ,EAAG,WAAU3C,IAAI,CAACU,UAAW;EACzC,CAAC,CAAC;EAEF,MAAMkC,YAAY,GAAG,IAAAC,0BAAW,EAAC;IAC7BC,IAAI,EAAE9C,IAAI,CAACQ,QAAQ;IACnB+B;EACJ,CAAC,CAAC;EACF,MAAMQ,eAAe,GAAG,IAAAF,0BAAW,EAAC;IAChCC,IAAI,EAAE9C,IAAI,CAACS,UAAU;IACrB8B;EACJ,CAAC,CAAC;EAEF,MAAMS,cAAc,GAAG,IAAAC,uCAAoB,EAACL,YAAY,CAAC;EACzD,MAAMM,gBAAgB,GAAG,IAAAC,yCAAsB,EAACJ,eAAe,CAAC;EAEhE,MAAMK,MAAM,GAAGjC,mBAAmB,CAAC,CAAC;EAEpC,MAAMkC,gBAAgB,GAAG,IAAAC,wCAAsB,EAACb,mBAAmB,EAAE;IACjEc,sBAAsB,EAClBvD,IAAI,CAACa,8BAAgF;IACzF2C,mBAAmB,EAAExD,IAAI,CAACc,2BAA2B;IACrD2C,aAAa,EAAEzD,IAAI,CAACe,qBAAqB;IACzC2C,cAAc,EAAE1D,IAAI,CAACgB,sBAAsB;IAC3C2C,eAAe,EAAE3D,IAAI,CAACiB;EAC1B,CAAC,CAAC;EAEF,MAAM,IAAA2C,2BAAmB,EACrB;IACIC,MAAM,EAAEb,cAAc;IACtB5C,OAAO,EAAE;MACL0D,OAAO,EAAE9D,IAAI,CAACW,YAAY;MAC1BoD,QAAQ,EAAE/D,IAAI,CAACY,aAAa;MAC5BoD,OAAO,EAAE,CACL;QACIC,IAAI,EAAE,KAAK;QACXC,EAAE,EAAE;MACR,CAAC,CACJ;MACDC,QAAQ,EAAEf,MAAM,CAAChC,gBAAgB,IAAIgD,SAAS;MAC9CC,KAAK,EAAE;IACX;EACJ,CAAC,EACD,MAAMC,MAAM,IAAI;IACZlB,MAAM,CAAC/B,KAAK,CAACC,eAAe,EAAE;IAC9B8B,MAAM,CAAC/B,KAAK,CAACE,cAAc,IAAI+C,MAAM,CAACC,KAAK,CAACC,MAAM;IAElD,IAAIpB,MAAM,CAAC/B,KAAK,CAACC,eAAe,GAAG,CAAC,KAAK,CAAC,EAAE;MACxC;MACAQ,MAAM,CAAC2C,KAAK,CACP,eAAcrB,MAAM,CAAC/B,KAAK,CAACC,eAAgB,aAAYgD,MAAM,CAACC,KAAK,CAACC,MAAO,eAChF,CAAC;IACL;IAEA,MAAME,oBAAsC,GAAG,EAAE;IACjD,MAAMC,sBAAwC,GAAG,EAAE;IACnD,MAAMC,qBAAoD,GAAG,CAAC,CAAC;IAE/D,MAAMC,gBAAgB,GAAG,IAAIC,IAAI,CAAC,CAAC,CAACC,WAAW,CAAC,CAAC;;IAEjD;IACA;IACA,KAAK,MAAMC,IAAI,IAAIV,MAAM,CAACC,KAAK,EAAE;MAC7B,MAAMU,eAAe,GACjB,IAAAC,gCAAe,EAACF,IAAI,CAAC,IACrB,IAAAG,8CAAsB,EAACH,IAAI,CAAC,IAC5B,IAAAI,gDAAuB,EAACJ,IAAI,CAAC;MAEjC,IAAIC,eAAe,EAAE;QACjB7B,MAAM,CAAC/B,KAAK,CAACI,cAAc,EAAE;QAC7B;MACJ;;MAEA;MACA,IAAI,CAAC,IAAAyD,gCAAe,EAACF,IAAI,CAAC,EAAE;QACxB;QACA,MAAMK,SAAS,GAAG,MAAM,IAAAC,sDAA0B,EAAC;UAC/CC,KAAK,EAAEP,IAAI;UACXQ,WAAW,EAAExC;QACjB,CAAC,CAAC;QAEF,MAAMyC,4BAA4B,GAAG,MAAM,IAAAC,kDAAyB,EAAC;UACjEH,KAAK,EAAEP,IAAI;UACXQ,WAAW,EAAExC;QACjB,CAAC,CAAC;QAEF,IAAA2C,wCAAmB,EAACX,IAAI,EAAE;UACtBK,SAAS;UACT,GAAGI;QACP,CAAC,CAAC;MACN;;MAEA;MACA;MACA,IAAI,CAAC,IAAAN,8CAAsB,EAACH,IAAI,CAAC,EAAE;QAC/B;QACA,IAAAY,oCAAiB,EAACZ,IAAI,CAAC;MAC3B;;MAEA;MACA;MACA,IAAI,CAAC,IAAAI,gDAAuB,EAACJ,IAAI,CAAC,EAAE;QAChClD,MAAM,CAAC2C,KAAK,CACR,IAAAoB,4EAAqC,EAACb,IAAI,CAAC,EAC1C,uEAAsEA,IAAI,CAACc,OAAQ,IAAGd,IAAI,CAACe,EAAG,IACnG,CAAC;QAED,IAAI;UACA,MAAMC,gBAAgB,GAAG,MAAM,IAAAC,wCAAmB,EAAC;YAC/CpC,MAAM,EAAEb,cAAc;YACtBkD,MAAM,EAAElB,IAAI,CAACkB;UACjB,CAAC,CAAC;UAEF,IAAAC,sDAA0B,EAACnB,IAAI,EAAE;YAC7BoB,QAAQ,EAAEvB,gBAAgB;YAC1BwB,QAAQ,EAAEL;UACd,CAAC,CAAC;UAEFlE,MAAM,CAAC2C,KAAK,CACP,kEAAiEO,IAAI,CAACc,OAAQ,IAAGd,IAAI,CAACe,EAAG,2CAC9F,CAAC;QACL,CAAC,CAAC,OAAOO,CAAC,EAAE;UACRxE,MAAM,CAACyE,KAAK,CACP,8DAA6DvB,IAAI,CAACc,OAAQ,IAAGd,IAAI,CAACe,EAAG,MAAKO,CAAC,CAACE,OAAQ,EACzG,CAAC;QACL;MACJ;MAEA9B,oBAAoB,CAAC+B,IAAI,CAACzD,cAAc,CAAC0D,QAAQ,CAAC1B,IAAI,CAAC,CAAC;;MAExD;AAChB;AACA;;MAEgB,MAAM2B,oBAAoB,GAAI,GAAE3B,IAAI,CAAC4B,OAAQ,IAAG;MAChD,IAAIhC,qBAAqB,CAAC+B,oBAAoB,CAAC,EAAE;QAC7C;MACJ;MAEA/B,qBAAqB,CAAC+B,oBAAoB,CAAC,GAAGzD,gBAAgB,CAAC2D,QAAQ,CAAC;QACpEC,EAAE,EAAE9B,IAAI,CAAC8B,EAAE;QACXC,EAAE,EAAE;MACR,CAAC,CAAC;MAEF,MAAMC,uBAAuB,GAAI,GAAEhC,IAAI,CAAC4B,OAAQ,IAAG;MACnD,IAAI5B,IAAI,CAAC5B,MAAM,KAAK,WAAW,IAAI,CAAC,CAAC4B,IAAI,CAACiC,MAAM,EAAE;QAC9CrC,qBAAqB,CAACoC,uBAAuB,CAAC,GAAG9D,gBAAgB,CAAC2D,QAAQ,CAAC;UACvEC,EAAE,EAAE9B,IAAI,CAAC8B,EAAE;UACXC,EAAE,EAAE;QACR,CAAC,CAAC;MACN;IACJ;IAEA,IAAIG,MAAM,CAACC,IAAI,CAACvC,qBAAqB,CAAC,CAACJ,MAAM,GAAG,CAAC,EAAE;MAC/C;AAChB;AACA;MACgB,MAAM4C,YAAY,GAAG,MAAM,IAAAC,oBAAY,EAA8B;QACjEC,KAAK,EAAEpE,gBAAgB,CAACoE,KAAK;QAC7B/C,KAAK,EAAE2C,MAAM,CAACK,MAAM,CAAC3C,qBAAqB;MAC9C,CAAC,CAAC;MAEF,KAAK,MAAM4C,WAAW,IAAIJ,YAAY,EAAE;QACpC,MAAMK,gBAAgB,GAAG,MAAM,IAAAC,wCAAmB,EAAWF,WAAW,CAACG,IAAI,CAAC;QAC9E,IAAI,CAACF,gBAAgB,EAAE;UACnB3F,MAAM,CAAC2C,KAAK,CACP,mCAAkC+C,WAAW,CAACV,EAAG,qCACtD,CAAC;UACD;QACJ;;QAEA;QACA,IAAI,CAAC,IAAA5B,gCAAe,EAACuC,gBAAgB,CAAC,EAAE;UACpC;UACA,MAAMpC,SAAS,GAAG,MAAM,IAAAC,sDAA0B,EAAC;YAC/CC,KAAK,EAAE;cAAE,GAAGkC,gBAAgB;cAAEX,EAAE,EAAEU,WAAW,CAACV;YAAG,CAAC;YAClDtB,WAAW,EAAExC;UACjB,CAAC,CAAC;UAEF,MAAMyC,4BAA4B,GAAG,MAAM,IAAAC,kDAAyB,EAAC;YACjEH,KAAK,EAAE;cAAE,GAAGkC,gBAAgB;cAAEX,EAAE,EAAEU,WAAW,CAACV;YAAG,CAAC;YAClDtB,WAAW,EAAExC;UACjB,CAAC,CAAC;UAEF,IAAA2C,wCAAmB,EAAC8B,gBAAgB,EAAE;YAClCpC,SAAS;YACT,GAAGI;UACP,CAAC,CAAC;QACN;;QAEA;QACA,IAAI,CAAC,IAAAL,gDAAuB,EAACqC,gBAAgB,CAAC,EAAE;UAC5C3F,MAAM,CAAC2C,KAAK,CACR,IAAAoB,4EAAqC,EAAC4B,gBAAgB,CAAC,EACvD,CACK,mFAAkF,EAClF,IAAGA,gBAAgB,CAAC3B,OAAQ,IAAG2B,gBAAgB,CAAC1B,EAAG,IAAG,CAC1D,CAAC6B,IAAI,CAAC,GAAG,CACd,CAAC;UAED,IAAI;YACA,MAAM5B,gBAAgB,GAAG,MAAM,IAAAC,wCAAmB,EAAC;cAC/CpC,MAAM,EAAEb,cAAc;cACtBkD,MAAM,EAAEuB,gBAAgB,CAACvB;YAC7B,CAAC,CAAC;YAEF,IAAAC,sDAA0B,EAACsB,gBAAgB,EAAE;cACzCrB,QAAQ,EAAEvB,gBAAgB;cAC1BwB,QAAQ,EAAEL;YACd,CAAC,CAAC;YAEFlE,MAAM,CAAC2C,KAAK,CACR,CACK,kEAAiE,EACjE,gBAAegD,gBAAgB,CAAC3B,OAAQ,IAAG2B,gBAAgB,CAAC1B,EAAG,IAAG,EACnE,kCAAkC,CACrC,CAAC6B,IAAI,CAAC,GAAG,CACd,CAAC;UACL,CAAC,CAAC,OAAOtB,CAAC,EAAE;YACRxE,MAAM,CAAC+F,KAAK,CACR,CACI,0EAA0E,EACzE,IAAGJ,gBAAgB,CAAC3B,OAAQ,IAAG2B,gBAAgB,CAAC1B,EAAG,MAAKO,CAAC,CAACE,OAAQ,EAAC,CACvE,CAACoB,IAAI,CAAC,GAAG,CACd,CAAC;UACL;QACJ;QAEA,MAAME,cAAc,GAAG,MAAM,IAAAC,oCAAiB,EAACN,gBAAgB,CAAC;QAEhE9C,sBAAsB,CAAC8B,IAAI,CACvBvD,gBAAgB,CAACwD,QAAQ,CAAC;UACtB,GAAGc,WAAW;UACdG,IAAI,EAAEG;QACV,CAAC,CACL,CAAC;MACL;IACJ;IAEA,IAAIpD,oBAAoB,CAACF,MAAM,EAAE;MAC7B;MACA,MAAMwD,OAAO,GAAGA,CAAA,KAAM;QAClB,OAAO,IAAAC,qBAAa,EAAC;UACjBX,KAAK,EAAEtE,cAAc,CAACsE,KAAK;UAC3B/C,KAAK,EAAEG;QACX,CAAC,CAAC;MACN,CAAC;MAED5C,MAAM,CAAC2C,KAAK,CACP,WAAUC,oBAAoB,CAACF,MAAO,yCAC3C,CAAC;MACD,MAAM,IAAA0D,uBAAgB,EAACF,OAAO,EAAE;QAC5BG,eAAe,EAAEN,KAAK,IAAI;UACtB/F,MAAM,CAACsG,IAAI,CACN,wBAAuBP,KAAK,CAACQ,aAAc,YAAWR,KAAK,CAACrB,OAAQ,EACzE,CAAC;QACL;MACJ,CAAC,CAAC;MAEF,IAAI7B,sBAAsB,CAACH,MAAM,EAAE;QAC/B1C,MAAM,CAAC2C,KAAK,CACP,WAAUE,sBAAsB,CAACH,MAAO,wCAC7C,CAAC;QACD,MAAM8D,OAAO,GAAG,MAAMjF,gBAAgB,CAACkF,IAAI,CAAC;UACxC,MAAMC,WAAWA,CAACC,MAAM,EAAE;YACtB,MAAMC,gBAAgB,GAAGD,MAAM,CAACE,aAAa,CAAC7F,IAAI;YAElDhB,MAAM,CAACsG,IAAI,CACN,yBAAwBM,gBAAiB,iDAAgD,EAC1FD,MACJ,CAAC;YAED,IAAIrF,MAAM,CAAC/B,KAAK,CAACK,cAAc,CAACG,gBAAgB,CAAC6G,gBAAgB,CAAC,EAAE;cAChEtF,MAAM,CAAC/B,KAAK,CAACK,cAAc,CAACG,gBAAgB,CAAC6G,gBAAgB,CAAC,EAAE;YACpE,CAAC,MAAM;cACHtF,MAAM,CAAC/B,KAAK,CAACK,cAAc,CAACG,gBAAgB,CAAC6G,gBAAgB,CAAC,GAAG,CAAC;YACtE;UACJ;QACJ,CAAC,CAAC;QAEFtF,MAAM,CAAC/B,KAAK,CAACK,cAAc,CAACE,WAAW,EAAE;QACzCwB,MAAM,CAAC/B,KAAK,CAACK,cAAc,CAACC,gBAAgB,IAAI2G,OAAO,CAACM,WAAW;;QAEnE;QACA,MAAMC,YAAY,GAAGA,CAAA,KAAM;UACvB,OAAO,IAAAZ,qBAAa,EAAC;YACjBX,KAAK,EAAEpE,gBAAgB,CAACoE,KAAK;YAC7B/C,KAAK,EAAEI;UACX,CAAC,CAAC;QACN,CAAC;QAED,MAAM,IAAAuD,uBAAgB,EAACW,YAAY,EAAE;UACjCV,eAAe,EAAEN,KAAK,IAAI;YACtB/F,MAAM,CAACsG,IAAI,CACN,uCAAsCP,KAAK,CAACQ,aAAc,YAAWR,KAAK,CAACrB,OAAQ,EACxF,CAAC;UACL;QACJ,CAAC,CAAC;MACN;MAEApD,MAAM,CAAC/B,KAAK,CAACG,cAAc,IAAIkD,oBAAoB,CAACF,MAAM;IAC9D;;IAEA;IACA,IAAIpD,gBAAkC,GAAG,IAAI;IAC7C,IAAIkD,MAAM,CAAClD,gBAAgB,EAAE;MACzBA,gBAAgB,GAAGkD,MAAM,CAAClD,gBAAqD;IACnF;IAEAgC,MAAM,CAAChC,gBAAgB,GAAGA,gBAAgB;IAE1C,IAAIA,gBAAgB,KAAK,IAAI,EAAE;MAC3B,OAAO,KAAK;IAChB;;IAEA;IACA,OAAO,IAAI;EACf,CACJ,CAAC;;EAED;EACAU,MAAM,CAAC2C,KAAK,CAAC;IAAErB;EAAO,CAAC,EAAE,4DAA4D,CAAC;EACtF,MAAM0F,WAAW,GAAGC,aAAI,CAACnB,IAAI,CACzBoB,WAAE,CAACC,MAAM,CAAC,CAAC,EACV,gDAA+CjJ,IAAI,CAACK,KAAM,IAAGL,IAAI,CAACW,YAAa,MACpF,CAAC;;EAED;EACAuI,WAAE,CAACC,aAAa,CAACL,WAAW,EAAEM,IAAI,CAACC,SAAS,CAACjG,MAAM,CAAC/B,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;EAEpES,MAAM,CAAC2C,KAAK,CAAE,qCAAoCqE,WAAY,GAAE,CAAC;AACrE,CAAC,EAAE,CAAC","ignoreList":[]}
|
|
1
|
+
{"version":3,"names":["_utils","require","_logger","_dataMigration","_clientDynamodb","_apiElasticsearch","_yargs","_interopRequireDefault","_helpers","_isMigratedEntry","_hasValidTypeFieldValue","_hasAllNonNullableValues","_getOldestRevisionCreatedOn","_getFirstLastPublishedOn","_assignNewMetaFields","_fixTypeFieldValue","_getFallbackIdentity","_ensureAllNonNullableValues","_getDecompressedData","_getCompressedData","_createEntryEntity","_utils2","_waitUntilHealthy","_pinoPretty","_path","_os","_fs","_getNonNullableFieldsWithMissingValues","argv","yargs","hideBin","process","options","runId","type","demandOption","ddbTable","ddbEsTable","esEndpoint","segmentIndex","totalSegments","esHealthMinClusterHealthStatus","esHealthMaxProcessorPercent","esHealthMaxRamPercent","esHealthMaxWaitingTime","esHealthWaitingTimeStep","parseSync","createInitialStatus","lastEvaluatedKey","stats","iterationsCount","recordsScanned","recordsUpdated","recordsSkipped","esHealthChecks","timeSpentWaiting","checksCount","unhealthyReasons","BATCH_WRITE_MAX_CHUNK","env","WEBINY_MIGRATION_5_39_6_001_BATCH_WRITE_MAX_CHUNK","parseInt","logger","createPinoLogger","level","getLogLevel","MIGRATIONS_LOG_LEVEL","msgPrefix","pinoPretty","ignore","documentClient","getDocumentClient","elasticsearchClient","createElasticsearchClient","endpoint","primaryTable","createTable","name","dynamoToEsTable","ddbEntryEntity","createDdbEntryEntity","ddbEsEntryEntity","createDdbEsEntryEntity","status","waitUntilHealthy","createWaitUntilHealthy","minClusterHealthStatus","maxProcessorPercent","maxRamPercent","maxWaitingTime","waitingTimeStep","ddbScanWithCallback","entity","segment","segments","filters","attr","eq","startKey","undefined","limit","result","items","length","trace","ddbItemsToBatchWrite","ddbEsItemsToBatchWrite","ddbEsItemsToBatchRead","fallbackDateTime","Date","toISOString","item","isFullyMigrated","isMigratedEntry","hasValidTypeFieldValue","hasAllNonNullableValues","createdOn","getOldestRevisionCreatedOn","entry","entryEntity","retryOptions","onFailedAttempt","error","warn","attemptNumber","message","firstLastPublishedOnByFields","getFirstLastPublishedOnBy","assignNewMetaFields","fixTypeFieldValue","getNonNullableFieldsWithMissingValues","modelId","id","fallbackIdentity","getFallbackIdentity","tenant","ensureAllNonNullableValues","dateTime","identity","e","debug","push","putBatch","ddbEsLatestRecordKey","entryId","getBatch","PK","SK","ddbEsPublishedRecordKey","locked","Object","keys","executeBatchReadAll","batchReadAll","table","values","ddbEsRecords","executeWithRetry","ddbEsRecord","decompressedData","getDecompressedData","data","join","compressedData","getCompressedData","ddbWriteError","ddbEsWriteError","execute","batchWriteAll","results","wait","onUnhealthy","params","shouldWaitReason","waitingReason","runningTime","executeDdbEs","retry","logFilePath","path","os","tmpdir","fs","writeFileSync","JSON","stringify"],"sources":["worker.ts"],"sourcesContent":["import { executeWithRetry } from \"@webiny/utils\";\nimport { createPinoLogger, getLogLevel } from \"@webiny/logger\";\nimport { createTable } from \"@webiny/data-migration\";\nimport { getDocumentClient } from \"@webiny/aws-sdk/client-dynamodb\";\nimport { createElasticsearchClient } from \"@webiny/api-elasticsearch\";\nimport yargs from \"yargs/yargs\";\nimport { hideBin } from \"yargs/helpers\";\nimport { isMigratedEntry } from \"~/migrations/5.39.0/001/utils/isMigratedEntry\";\nimport { hasValidTypeFieldValue } from \"~/migrations/5.39.0/001/utils/hasValidTypeFieldValue\";\nimport { hasAllNonNullableValues } from \"~/migrations/5.39.0/001/utils/hasAllNonNullableValues\";\nimport { getOldestRevisionCreatedOn } from \"~/migrations/5.39.0/001/utils/getOldestRevisionCreatedOn\";\nimport { getFirstLastPublishedOnBy } from \"~/migrations/5.39.0/001/utils/getFirstLastPublishedOn\";\nimport { assignNewMetaFields } from \"~/migrations/5.39.0/001/utils/assignNewMetaFields\";\nimport { fixTypeFieldValue } from \"~/migrations/5.39.0/001/utils/fixTypeFieldValue\";\nimport { getFallbackIdentity } from \"~/migrations/5.39.0/001/utils/getFallbackIdentity\";\nimport { ensureAllNonNullableValues } from \"~/migrations/5.39.0/001/utils/ensureAllNonNullableValues\";\nimport { getDecompressedData } from \"~/migrations/5.39.0/001/utils/getDecompressedData\";\nimport { getCompressedData } from \"~/migrations/5.39.0/001/utils/getCompressedData\";\nimport { CmsEntry } from \"~/migrations/5.39.0/001/types\";\nimport {\n createDdbEntryEntity,\n createDdbEsEntryEntity\n} from \"~/migrations/5.39.0/001/entities/createEntryEntity\";\nimport {\n batchReadAll,\n BatchReadItem,\n batchWriteAll,\n BatchWriteItem,\n ddbScanWithCallback\n} from \"~/utils\";\nimport { createWaitUntilHealthy } from \"@webiny/api-elasticsearch/utils/waitUntilHealthy\";\nimport pinoPretty from \"pino-pretty\";\nimport { EsHealthChecksParams } from \"~/migrations/5.39.6/001/ddb-es/utils\";\nimport path from \"path\";\nimport os from \"os\";\nimport fs from \"fs\";\nimport { getNonNullableFieldsWithMissingValues } from \"~/migrations/5.39.0/001/utils/getNonNullableFieldsWithMissingValues\";\n\nconst argv = yargs(hideBin(process.argv))\n .options({\n runId: { type: \"string\", demandOption: true },\n ddbTable: { type: \"string\", demandOption: true },\n ddbEsTable: { type: \"string\", demandOption: true },\n esEndpoint: { type: \"string\", demandOption: true },\n segmentIndex: { type: \"number\", demandOption: true },\n totalSegments: { type: \"number\", demandOption: true },\n\n // Elasticsearch health check options.\n esHealthMinClusterHealthStatus: { type: \"string\", demandOption: true },\n esHealthMaxProcessorPercent: { type: \"number\", demandOption: true },\n esHealthMaxRamPercent: { type: \"number\", demandOption: true },\n esHealthMaxWaitingTime: { type: \"number\", demandOption: true },\n esHealthWaitingTimeStep: { type: \"number\", demandOption: true }\n })\n .parseSync();\n\ninterface LastEvaluatedKeyObject {\n PK: string;\n SK: string;\n GSI1_PK: string;\n GSI1_SK: string;\n}\n\ntype LastEvaluatedKey = LastEvaluatedKeyObject | true | null;\n\ninterface MigrationStatus {\n lastEvaluatedKey: LastEvaluatedKey;\n stats: {\n iterationsCount: number;\n recordsScanned: number;\n recordsUpdated: number;\n recordsSkipped: number;\n esHealthChecks: {\n timeSpentWaiting: number;\n checksCount: number;\n unhealthyReasons: {\n [key: string]: number;\n };\n };\n };\n}\n\ninterface DynamoDbElasticsearchRecord {\n PK: string;\n SK: string;\n data: string;\n}\n\nconst createInitialStatus = (): MigrationStatus => {\n return {\n lastEvaluatedKey: null,\n stats: {\n iterationsCount: 0,\n recordsScanned: 0,\n recordsUpdated: 0,\n recordsSkipped: 0,\n esHealthChecks: {\n timeSpentWaiting: 0,\n checksCount: 0,\n unhealthyReasons: {}\n }\n }\n };\n};\n\nlet BATCH_WRITE_MAX_CHUNK = 20;\nif (process.env.WEBINY_MIGRATION_5_39_6_001_BATCH_WRITE_MAX_CHUNK) {\n BATCH_WRITE_MAX_CHUNK = parseInt(process.env.WEBINY_MIGRATION_5_39_6_001_BATCH_WRITE_MAX_CHUNK);\n}\n\n(async () => {\n const logger = createPinoLogger(\n {\n level: getLogLevel(process.env.MIGRATIONS_LOG_LEVEL, \"trace\"),\n msgPrefix: `[segment #${argv.segmentIndex}] `\n },\n pinoPretty({ ignore: \"pid,hostname\" })\n );\n\n const documentClient = getDocumentClient();\n const elasticsearchClient = createElasticsearchClient({\n endpoint: `https://${argv.esEndpoint}`\n });\n\n const primaryTable = createTable({\n name: argv.ddbTable,\n documentClient\n });\n const dynamoToEsTable = createTable({\n name: argv.ddbEsTable,\n documentClient\n });\n\n const ddbEntryEntity = createDdbEntryEntity(primaryTable);\n const ddbEsEntryEntity = createDdbEsEntryEntity(dynamoToEsTable);\n\n const status = createInitialStatus();\n\n const waitUntilHealthy = createWaitUntilHealthy(elasticsearchClient, {\n minClusterHealthStatus:\n argv.esHealthMinClusterHealthStatus as EsHealthChecksParams[\"minClusterHealthStatus\"],\n maxProcessorPercent: argv.esHealthMaxProcessorPercent,\n maxRamPercent: argv.esHealthMaxRamPercent,\n maxWaitingTime: argv.esHealthMaxWaitingTime,\n waitingTimeStep: argv.esHealthWaitingTimeStep\n });\n\n try {\n await ddbScanWithCallback<CmsEntry>(\n {\n entity: ddbEntryEntity,\n options: {\n segment: argv.segmentIndex,\n segments: argv.totalSegments,\n filters: [\n {\n attr: \"_et\",\n eq: \"CmsEntries\"\n }\n ],\n startKey: status.lastEvaluatedKey || undefined,\n limit: 100\n }\n },\n async result => {\n status.stats.iterationsCount++;\n status.stats.recordsScanned += result.items.length;\n\n if (status.stats.iterationsCount % 5 === 0) {\n // We log every 5th iteration.\n logger.trace(\n `[iteration #${status.stats.iterationsCount}] Reading ${result.items.length} record(s)...`\n );\n }\n\n const ddbItemsToBatchWrite: BatchWriteItem[] = [];\n const ddbEsItemsToBatchWrite: BatchWriteItem[] = [];\n const ddbEsItemsToBatchRead: Record<string, BatchReadItem> = {};\n\n const fallbackDateTime = new Date().toISOString();\n\n // Update records in primary DynamoDB table. Also do preparations for\n // subsequent updates on DDB-ES DynamoDB table, and in Elasticsearch.\n for (const item of result.items) {\n const isFullyMigrated =\n isMigratedEntry(item) &&\n hasValidTypeFieldValue(item) &&\n hasAllNonNullableValues(item);\n\n if (isFullyMigrated) {\n status.stats.recordsSkipped++;\n continue;\n }\n\n // 1. Check if the data migration was ever performed. If not, let's perform it.\n if (!isMigratedEntry(item)) {\n // Get the oldest revision's `createdOn` value. We use that to set the entry-level `createdOn` value.\n const createdOn = await getOldestRevisionCreatedOn({\n entry: item,\n entryEntity: ddbEntryEntity,\n retryOptions: {\n onFailedAttempt: error => {\n logger.warn(\n { error, item },\n `getOldestRevisionCreatedOn attempt #${error.attemptNumber} failed: ${error.message}`\n );\n }\n }\n });\n\n const firstLastPublishedOnByFields = await getFirstLastPublishedOnBy({\n entry: item,\n entryEntity: ddbEntryEntity,\n retryOptions: {\n onFailedAttempt: error => {\n logger.warn(\n { error, item },\n `getFirstLastPublishedOnBy attempt #${error.attemptNumber} failed: ${error.message}`\n );\n }\n }\n });\n\n assignNewMetaFields(item, {\n createdOn,\n ...firstLastPublishedOnByFields\n });\n }\n\n // 2. We've noticed some of the records had an invalid `TYPE` field value\n // in the database. This step addresses this issue.\n if (!hasValidTypeFieldValue(item)) {\n // Fixes the value of the `TYPE` field, if it's not valid.\n fixTypeFieldValue(item);\n }\n\n // 3. Finally, once both of the steps were performed, ensure that all\n // new non-nullable meta fields have a value and nothing is missing.\n if (!hasAllNonNullableValues(item)) {\n logger.trace(\n getNonNullableFieldsWithMissingValues(item),\n `Detected an entry with missing values for non-nullable meta fields (${item.modelId}/${item.id}).`\n );\n\n try {\n const fallbackIdentity = await getFallbackIdentity({\n entity: ddbEntryEntity,\n tenant: item.tenant,\n retryOptions: {\n onFailedAttempt: error => {\n logger.warn(\n { error, item },\n `getFallbackIdentity attempt #${error.attemptNumber} failed: ${error.message}`\n );\n }\n }\n });\n\n ensureAllNonNullableValues(item, {\n dateTime: fallbackDateTime,\n identity: fallbackIdentity\n });\n\n logger.trace(\n `Successfully ensured all non-nullable meta fields have values (${item.modelId}/${item.id}). Will be saving into the database soon.`\n );\n } catch (e) {\n logger.debug(\n `Failed to ensure all non-nullable meta fields have values (${item.modelId}/${item.id}): ${e.message}`\n );\n }\n }\n\n ddbItemsToBatchWrite.push(ddbEntryEntity.putBatch(item));\n\n /**\n * Prepare the loading of DynamoDB Elasticsearch part of the records.\n */\n\n const ddbEsLatestRecordKey = `${item.entryId}:L`;\n if (ddbEsItemsToBatchRead[ddbEsLatestRecordKey]) {\n continue;\n }\n\n ddbEsItemsToBatchRead[ddbEsLatestRecordKey] = ddbEsEntryEntity.getBatch({\n PK: item.PK,\n SK: \"L\"\n });\n\n const ddbEsPublishedRecordKey = `${item.entryId}:P`;\n if (item.status === \"published\" || !!item.locked) {\n ddbEsItemsToBatchRead[ddbEsPublishedRecordKey] = ddbEsEntryEntity.getBatch({\n PK: item.PK,\n SK: \"P\"\n });\n }\n }\n\n if (Object.keys(ddbEsItemsToBatchRead).length > 0) {\n /**\n * Get all the records from DynamoDB Elasticsearch.\n */\n const executeBatchReadAll = () => {\n return batchReadAll<DynamoDbElasticsearchRecord>({\n table: ddbEsEntryEntity.table,\n items: Object.values(ddbEsItemsToBatchRead)\n });\n };\n\n const ddbEsRecords = await executeWithRetry(executeBatchReadAll, {\n onFailedAttempt: error => {\n logger.warn(\n { error, items: Object.values(ddbEsItemsToBatchRead) },\n `[DDB-ES Table] Batch (ddbEsItemsToBatchRead) read attempt #${error.attemptNumber} failed: ${error.message}`\n );\n }\n });\n\n for (const ddbEsRecord of ddbEsRecords) {\n const decompressedData = await getDecompressedData<CmsEntry>(\n ddbEsRecord.data\n );\n if (!decompressedData) {\n logger.trace(\n `[DDB-ES Table] Skipping record \"${ddbEsRecord.PK}\" as it is not a valid CMS entry...`\n );\n continue;\n }\n\n // 1. Check if the data migration was ever performed. If not, let's perform it.\n if (!isMigratedEntry(decompressedData)) {\n // Get the oldest revision's `createdOn` value. We use that to set the entry-level `createdOn` value.\n const createdOn = await getOldestRevisionCreatedOn({\n entry: { ...decompressedData, PK: ddbEsRecord.PK },\n entryEntity: ddbEntryEntity,\n retryOptions: {\n onFailedAttempt: error => {\n logger.warn(\n {\n error,\n item: { ...decompressedData, PK: ddbEsRecord.PK }\n },\n `[DDB-ES Table] getOldestRevisionCreatedOn attempt #${error.attemptNumber} failed: ${error.message}`\n );\n }\n }\n });\n\n const firstLastPublishedOnByFields = await getFirstLastPublishedOnBy({\n entry: { ...decompressedData, PK: ddbEsRecord.PK },\n entryEntity: ddbEntryEntity,\n retryOptions: {\n onFailedAttempt: error => {\n logger.warn(\n {\n error,\n item: { ...decompressedData, PK: ddbEsRecord.PK }\n },\n `[DDB-ES Table] getFirstLastPublishedOnBy attempt #${error.attemptNumber} failed: ${error.message}`\n );\n }\n }\n });\n\n assignNewMetaFields(decompressedData, {\n createdOn,\n ...firstLastPublishedOnByFields\n });\n }\n\n // 2. Ensure new non-nullable meta fields have a value and nothing is missing.\n if (!hasAllNonNullableValues(decompressedData)) {\n logger.trace(\n getNonNullableFieldsWithMissingValues(decompressedData),\n [\n `[DDB-ES Table] Detected an entry with missing values for non-nullable meta fields`,\n `(${decompressedData.modelId}/${decompressedData.id}).`\n ].join(\" \")\n );\n\n try {\n const fallbackIdentity = await getFallbackIdentity({\n entity: ddbEntryEntity,\n tenant: decompressedData.tenant,\n retryOptions: {\n onFailedAttempt: error => {\n logger.warn(\n { error, item: ddbEntryEntity },\n `[DDB-ES Table] getFallbackIdentity attempt #${error.attemptNumber} failed: ${error.message}`\n );\n }\n }\n });\n\n ensureAllNonNullableValues(decompressedData, {\n dateTime: fallbackDateTime,\n identity: fallbackIdentity\n });\n\n logger.trace(\n [\n `[DDB-ES Table] Successfully ensured all non-nullable meta fields`,\n `have values (${decompressedData.modelId}/${decompressedData.id}).`,\n \"Will be saving the changes soon.\"\n ].join(\" \")\n );\n } catch (e) {\n logger.error(\n [\n \"[DDB-ES Table] Failed to ensure all non-nullable meta fields have values\",\n `(${decompressedData.modelId}/${decompressedData.id}): ${e.message}`\n ].join(\" \")\n );\n }\n }\n\n const compressedData = await getCompressedData(decompressedData);\n\n ddbEsItemsToBatchWrite.push(\n ddbEsEntryEntity.putBatch({\n ...ddbEsRecord,\n data: compressedData\n })\n );\n }\n }\n\n if (ddbItemsToBatchWrite.length) {\n let ddbWriteError = false;\n let ddbEsWriteError = false;\n\n // Store data in primary DynamoDB table.\n const execute = () => {\n return batchWriteAll(\n {\n table: ddbEntryEntity.table,\n items: ddbItemsToBatchWrite\n },\n BATCH_WRITE_MAX_CHUNK\n );\n };\n\n logger.trace(\n `Storing ${ddbItemsToBatchWrite.length} record(s) in primary DynamoDB table...`\n );\n\n try {\n await executeWithRetry(execute, {\n onFailedAttempt: error => {\n logger.warn(\n `Batch write attempt #${error.attemptNumber} failed: ${error.message}`\n );\n }\n });\n } catch (e) {\n ddbWriteError = true;\n logger.error(\n {\n error: e,\n ddbItemsToBatchWrite\n },\n \"After multiple retries, failed to batch-store records in primary DynamoDB table.\"\n );\n }\n\n if (ddbEsItemsToBatchWrite.length) {\n logger.trace(\n `Storing ${ddbEsItemsToBatchWrite.length} record(s) in DDB-ES DynamoDB table...`\n );\n\n try {\n const results = await waitUntilHealthy.wait({\n async onUnhealthy(params) {\n const shouldWaitReason = params.waitingReason.name;\n\n logger.warn(\n `Cluster is unhealthy (${shouldWaitReason}). Waiting for the cluster to become healthy...`,\n params\n );\n\n if (\n status.stats.esHealthChecks.unhealthyReasons[\n shouldWaitReason\n ]\n ) {\n status.stats.esHealthChecks.unhealthyReasons[\n shouldWaitReason\n ]++;\n } else {\n status.stats.esHealthChecks.unhealthyReasons[\n shouldWaitReason\n ] = 1;\n }\n }\n });\n\n status.stats.esHealthChecks.checksCount++;\n status.stats.esHealthChecks.timeSpentWaiting += results.runningTime;\n\n // Store data in DDB-ES DynamoDB table.\n const executeDdbEs = () => {\n return batchWriteAll(\n {\n table: ddbEsEntryEntity.table,\n items: ddbEsItemsToBatchWrite\n },\n BATCH_WRITE_MAX_CHUNK\n );\n };\n\n await executeWithRetry(executeDdbEs, {\n onFailedAttempt: error => {\n logger.warn(\n `[DDB-ES Table] Batch write attempt #${error.attemptNumber} failed: ${error.message}`\n );\n }\n });\n } catch (e) {\n ddbEsWriteError = true;\n logger.error(\n {\n error: e,\n ddbEsItemsToBatchWrite\n },\n \"After multiple retries, failed to batch-store records in DDB-ES DynamoDB table.\"\n );\n }\n }\n\n if (ddbEsWriteError || ddbWriteError) {\n logger.warn(\n 'Not increasing the \"recordsUpdated\" count due to write errors.'\n );\n } else {\n status.stats.recordsUpdated += ddbItemsToBatchWrite.length;\n }\n }\n\n // Update checkpoint after every batch.\n let lastEvaluatedKey: LastEvaluatedKey = true;\n if (result.lastEvaluatedKey) {\n lastEvaluatedKey = result.lastEvaluatedKey as unknown as LastEvaluatedKeyObject;\n }\n\n status.lastEvaluatedKey = lastEvaluatedKey;\n\n if (lastEvaluatedKey === true) {\n return false;\n }\n\n // Continue further scanning.\n return true;\n },\n {\n retry: {\n onFailedAttempt: error => {\n logger.warn(\n {\n lastEvaluatedKey: status.lastEvaluatedKey,\n error\n },\n `ddbScanWithCallback attempt #${error.attemptNumber} failed: ${error.message}`\n );\n }\n }\n }\n );\n\n // Store status in tmp file.\n logger.trace({ status }, \"Segment processing completed. Saving status to tmp file...\");\n const logFilePath = path.join(\n os.tmpdir(),\n `webiny-5-39-6-meta-fields-data-migration-log-${argv.runId}-${argv.segmentIndex}.log`\n );\n\n // Save segment processing stats to a file.\n fs.writeFileSync(logFilePath, JSON.stringify(status.stats, null, 2));\n\n logger.trace(`Segment processing stats saved in ${logFilePath}.`);\n } catch (error) {\n // Store status in tmp file.\n logger.error(\n { status, error },\n \"Segment processing failed to complete. Saving status to tmp file...\"\n );\n const logFilePath = path.join(\n os.tmpdir(),\n `webiny-5-39-6-meta-fields-data-migration-log-${argv.runId}-${argv.segmentIndex}.log`\n );\n\n // Save segment processing stats to a file.\n fs.writeFileSync(logFilePath, JSON.stringify(status.stats, null, 2));\n\n logger.trace(`Segment processing stats saved in ${logFilePath}.`);\n }\n})();\n"],"mappings":";;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,OAAA,GAAAD,OAAA;AACA,IAAAE,cAAA,GAAAF,OAAA;AACA,IAAAG,eAAA,GAAAH,OAAA;AACA,IAAAI,iBAAA,GAAAJ,OAAA;AACA,IAAAK,MAAA,GAAAC,sBAAA,CAAAN,OAAA;AACA,IAAAO,QAAA,GAAAP,OAAA;AACA,IAAAQ,gBAAA,GAAAR,OAAA;AACA,IAAAS,uBAAA,GAAAT,OAAA;AACA,IAAAU,wBAAA,GAAAV,OAAA;AACA,IAAAW,2BAAA,GAAAX,OAAA;AACA,IAAAY,wBAAA,GAAAZ,OAAA;AACA,IAAAa,oBAAA,GAAAb,OAAA;AACA,IAAAc,kBAAA,GAAAd,OAAA;AACA,IAAAe,oBAAA,GAAAf,OAAA;AACA,IAAAgB,2BAAA,GAAAhB,OAAA;AACA,IAAAiB,oBAAA,GAAAjB,OAAA;AACA,IAAAkB,kBAAA,GAAAlB,OAAA;AAEA,IAAAmB,kBAAA,GAAAnB,OAAA;AAIA,IAAAoB,OAAA,GAAApB,OAAA;AAOA,IAAAqB,iBAAA,GAAArB,OAAA;AACA,IAAAsB,WAAA,GAAAhB,sBAAA,CAAAN,OAAA;AAEA,IAAAuB,KAAA,GAAAjB,sBAAA,CAAAN,OAAA;AACA,IAAAwB,GAAA,GAAAlB,sBAAA,CAAAN,OAAA;AACA,IAAAyB,GAAA,GAAAnB,sBAAA,CAAAN,OAAA;AACA,IAAA0B,sCAAA,GAAA1B,OAAA;AAEA,MAAM2B,IAAI,GAAG,IAAAC,cAAK,EAAC,IAAAC,gBAAO,EAACC,OAAO,CAACH,IAAI,CAAC,CAAC,CACpCI,OAAO,CAAC;EACLC,KAAK,EAAE;IAAEC,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EAC7CC,QAAQ,EAAE;IAAEF,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EAChDE,UAAU,EAAE;IAAEH,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EAClDG,UAAU,EAAE;IAAEJ,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EAClDI,YAAY,EAAE;IAAEL,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EACpDK,aAAa,EAAE;IAAEN,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EAErD;EACAM,8BAA8B,EAAE;IAAEP,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EACtEO,2BAA2B,EAAE;IAAER,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EACnEQ,qBAAqB,EAAE;IAAET,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EAC7DS,sBAAsB,EAAE;IAAEV,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK,CAAC;EAC9DU,uBAAuB,EAAE;IAAEX,IAAI,EAAE,QAAQ;IAAEC,YAAY,EAAE;EAAK;AAClE,CAAC,CAAC,CACDW,SAAS,CAAC,CAAC;AAkChB,MAAMC,mBAAmB,GAAGA,CAAA,KAAuB;EAC/C,OAAO;IACHC,gBAAgB,EAAE,IAAI;IACtBC,KAAK,EAAE;MACHC,eAAe,EAAE,CAAC;MAClBC,cAAc,EAAE,CAAC;MACjBC,cAAc,EAAE,CAAC;MACjBC,cAAc,EAAE,CAAC;MACjBC,cAAc,EAAE;QACZC,gBAAgB,EAAE,CAAC;QACnBC,WAAW,EAAE,CAAC;QACdC,gBAAgB,EAAE,CAAC;MACvB;IACJ;EACJ,CAAC;AACL,CAAC;AAED,IAAIC,qBAAqB,GAAG,EAAE;AAC9B,IAAI3B,OAAO,CAAC4B,GAAG,CAACC,iDAAiD,EAAE;EAC/DF,qBAAqB,GAAGG,QAAQ,CAAC9B,OAAO,CAAC4B,GAAG,CAACC,iDAAiD,CAAC;AACnG;AAEA,CAAC,YAAY;EACT,MAAME,MAAM,GAAG,IAAAC,wBAAgB,EAC3B;IACIC,KAAK,EAAE,IAAAC,mBAAW,EAAClC,OAAO,CAAC4B,GAAG,CAACO,oBAAoB,EAAE,OAAO,CAAC;IAC7DC,SAAS,EAAG,aAAYvC,IAAI,CAACW,YAAa;EAC9C,CAAC,EACD,IAAA6B,mBAAU,EAAC;IAAEC,MAAM,EAAE;EAAe,CAAC,CACzC,CAAC;EAED,MAAMC,cAAc,GAAG,IAAAC,iCAAiB,EAAC,CAAC;EAC1C,MAAMC,mBAAmB,GAAG,IAAAC,2CAAyB,EAAC;IAClDC,QAAQ,EAAG,WAAU9C,IAAI,CAACU,UAAW;EACzC,CAAC,CAAC;EAEF,MAAMqC,YAAY,GAAG,IAAAC,0BAAW,EAAC;IAC7BC,IAAI,EAAEjD,IAAI,CAACQ,QAAQ;IACnBkC;EACJ,CAAC,CAAC;EACF,MAAMQ,eAAe,GAAG,IAAAF,0BAAW,EAAC;IAChCC,IAAI,EAAEjD,IAAI,CAACS,UAAU;IACrBiC;EACJ,CAAC,CAAC;EAEF,MAAMS,cAAc,GAAG,IAAAC,uCAAoB,EAACL,YAAY,CAAC;EACzD,MAAMM,gBAAgB,GAAG,IAAAC,yCAAsB,EAACJ,eAAe,CAAC;EAEhE,MAAMK,MAAM,GAAGpC,mBAAmB,CAAC,CAAC;EAEpC,MAAMqC,gBAAgB,GAAG,IAAAC,wCAAsB,EAACb,mBAAmB,EAAE;IACjEc,sBAAsB,EAClB1D,IAAI,CAACa,8BAAgF;IACzF8C,mBAAmB,EAAE3D,IAAI,CAACc,2BAA2B;IACrD8C,aAAa,EAAE5D,IAAI,CAACe,qBAAqB;IACzC8C,cAAc,EAAE7D,IAAI,CAACgB,sBAAsB;IAC3C8C,eAAe,EAAE9D,IAAI,CAACiB;EAC1B,CAAC,CAAC;EAEF,IAAI;IACA,MAAM,IAAA8C,2BAAmB,EACrB;MACIC,MAAM,EAAEb,cAAc;MACtB/C,OAAO,EAAE;QACL6D,OAAO,EAAEjE,IAAI,CAACW,YAAY;QAC1BuD,QAAQ,EAAElE,IAAI,CAACY,aAAa;QAC5BuD,OAAO,EAAE,CACL;UACIC,IAAI,EAAE,KAAK;UACXC,EAAE,EAAE;QACR,CAAC,CACJ;QACDC,QAAQ,EAAEf,MAAM,CAACnC,gBAAgB,IAAImD,SAAS;QAC9CC,KAAK,EAAE;MACX;IACJ,CAAC,EACD,MAAMC,MAAM,IAAI;MACZlB,MAAM,CAAClC,KAAK,CAACC,eAAe,EAAE;MAC9BiC,MAAM,CAAClC,KAAK,CAACE,cAAc,IAAIkD,MAAM,CAACC,KAAK,CAACC,MAAM;MAElD,IAAIpB,MAAM,CAAClC,KAAK,CAACC,eAAe,GAAG,CAAC,KAAK,CAAC,EAAE;QACxC;QACAY,MAAM,CAAC0C,KAAK,CACP,eAAcrB,MAAM,CAAClC,KAAK,CAACC,eAAgB,aAAYmD,MAAM,CAACC,KAAK,CAACC,MAAO,eAChF,CAAC;MACL;MAEA,MAAME,oBAAsC,GAAG,EAAE;MACjD,MAAMC,sBAAwC,GAAG,EAAE;MACnD,MAAMC,qBAAoD,GAAG,CAAC,CAAC;MAE/D,MAAMC,gBAAgB,GAAG,IAAIC,IAAI,CAAC,CAAC,CAACC,WAAW,CAAC,CAAC;;MAEjD;MACA;MACA,KAAK,MAAMC,IAAI,IAAIV,MAAM,CAACC,KAAK,EAAE;QAC7B,MAAMU,eAAe,GACjB,IAAAC,gCAAe,EAACF,IAAI,CAAC,IACrB,IAAAG,8CAAsB,EAACH,IAAI,CAAC,IAC5B,IAAAI,gDAAuB,EAACJ,IAAI,CAAC;QAEjC,IAAIC,eAAe,EAAE;UACjB7B,MAAM,CAAClC,KAAK,CAACI,cAAc,EAAE;UAC7B;QACJ;;QAEA;QACA,IAAI,CAAC,IAAA4D,gCAAe,EAACF,IAAI,CAAC,EAAE;UACxB;UACA,MAAMK,SAAS,GAAG,MAAM,IAAAC,sDAA0B,EAAC;YAC/CC,KAAK,EAAEP,IAAI;YACXQ,WAAW,EAAExC,cAAc;YAC3ByC,YAAY,EAAE;cACVC,eAAe,EAAEC,KAAK,IAAI;gBACtB5D,MAAM,CAAC6D,IAAI,CACP;kBAAED,KAAK;kBAAEX;gBAAK,CAAC,EACd,uCAAsCW,KAAK,CAACE,aAAc,YAAWF,KAAK,CAACG,OAAQ,EACxF,CAAC;cACL;YACJ;UACJ,CAAC,CAAC;UAEF,MAAMC,4BAA4B,GAAG,MAAM,IAAAC,kDAAyB,EAAC;YACjET,KAAK,EAAEP,IAAI;YACXQ,WAAW,EAAExC,cAAc;YAC3ByC,YAAY,EAAE;cACVC,eAAe,EAAEC,KAAK,IAAI;gBACtB5D,MAAM,CAAC6D,IAAI,CACP;kBAAED,KAAK;kBAAEX;gBAAK,CAAC,EACd,sCAAqCW,KAAK,CAACE,aAAc,YAAWF,KAAK,CAACG,OAAQ,EACvF,CAAC;cACL;YACJ;UACJ,CAAC,CAAC;UAEF,IAAAG,wCAAmB,EAACjB,IAAI,EAAE;YACtBK,SAAS;YACT,GAAGU;UACP,CAAC,CAAC;QACN;;QAEA;QACA;QACA,IAAI,CAAC,IAAAZ,8CAAsB,EAACH,IAAI,CAAC,EAAE;UAC/B;UACA,IAAAkB,oCAAiB,EAAClB,IAAI,CAAC;QAC3B;;QAEA;QACA;QACA,IAAI,CAAC,IAAAI,gDAAuB,EAACJ,IAAI,CAAC,EAAE;UAChCjD,MAAM,CAAC0C,KAAK,CACR,IAAA0B,4EAAqC,EAACnB,IAAI,CAAC,EAC1C,uEAAsEA,IAAI,CAACoB,OAAQ,IAAGpB,IAAI,CAACqB,EAAG,IACnG,CAAC;UAED,IAAI;YACA,MAAMC,gBAAgB,GAAG,MAAM,IAAAC,wCAAmB,EAAC;cAC/C1C,MAAM,EAAEb,cAAc;cACtBwD,MAAM,EAAExB,IAAI,CAACwB,MAAM;cACnBf,YAAY,EAAE;gBACVC,eAAe,EAAEC,KAAK,IAAI;kBACtB5D,MAAM,CAAC6D,IAAI,CACP;oBAAED,KAAK;oBAAEX;kBAAK,CAAC,EACd,gCAA+BW,KAAK,CAACE,aAAc,YAAWF,KAAK,CAACG,OAAQ,EACjF,CAAC;gBACL;cACJ;YACJ,CAAC,CAAC;YAEF,IAAAW,sDAA0B,EAACzB,IAAI,EAAE;cAC7B0B,QAAQ,EAAE7B,gBAAgB;cAC1B8B,QAAQ,EAAEL;YACd,CAAC,CAAC;YAEFvE,MAAM,CAAC0C,KAAK,CACP,kEAAiEO,IAAI,CAACoB,OAAQ,IAAGpB,IAAI,CAACqB,EAAG,2CAC9F,CAAC;UACL,CAAC,CAAC,OAAOO,CAAC,EAAE;YACR7E,MAAM,CAAC8E,KAAK,CACP,8DAA6D7B,IAAI,CAACoB,OAAQ,IAAGpB,IAAI,CAACqB,EAAG,MAAKO,CAAC,CAACd,OAAQ,EACzG,CAAC;UACL;QACJ;QAEApB,oBAAoB,CAACoC,IAAI,CAAC9D,cAAc,CAAC+D,QAAQ,CAAC/B,IAAI,CAAC,CAAC;;QAExD;AACpB;AACA;;QAEoB,MAAMgC,oBAAoB,GAAI,GAAEhC,IAAI,CAACiC,OAAQ,IAAG;QAChD,IAAIrC,qBAAqB,CAACoC,oBAAoB,CAAC,EAAE;UAC7C;QACJ;QAEApC,qBAAqB,CAACoC,oBAAoB,CAAC,GAAG9D,gBAAgB,CAACgE,QAAQ,CAAC;UACpEC,EAAE,EAAEnC,IAAI,CAACmC,EAAE;UACXC,EAAE,EAAE;QACR,CAAC,CAAC;QAEF,MAAMC,uBAAuB,GAAI,GAAErC,IAAI,CAACiC,OAAQ,IAAG;QACnD,IAAIjC,IAAI,CAAC5B,MAAM,KAAK,WAAW,IAAI,CAAC,CAAC4B,IAAI,CAACsC,MAAM,EAAE;UAC9C1C,qBAAqB,CAACyC,uBAAuB,CAAC,GAAGnE,gBAAgB,CAACgE,QAAQ,CAAC;YACvEC,EAAE,EAAEnC,IAAI,CAACmC,EAAE;YACXC,EAAE,EAAE;UACR,CAAC,CAAC;QACN;MACJ;MAEA,IAAIG,MAAM,CAACC,IAAI,CAAC5C,qBAAqB,CAAC,CAACJ,MAAM,GAAG,CAAC,EAAE;QAC/C;AACpB;AACA;QACoB,MAAMiD,mBAAmB,GAAGA,CAAA,KAAM;UAC9B,OAAO,IAAAC,oBAAY,EAA8B;YAC7CC,KAAK,EAAEzE,gBAAgB,CAACyE,KAAK;YAC7BpD,KAAK,EAAEgD,MAAM,CAACK,MAAM,CAAChD,qBAAqB;UAC9C,CAAC,CAAC;QACN,CAAC;QAED,MAAMiD,YAAY,GAAG,MAAM,IAAAC,uBAAgB,EAACL,mBAAmB,EAAE;UAC7D/B,eAAe,EAAEC,KAAK,IAAI;YACtB5D,MAAM,CAAC6D,IAAI,CACP;cAAED,KAAK;cAAEpB,KAAK,EAAEgD,MAAM,CAACK,MAAM,CAAChD,qBAAqB;YAAE,CAAC,EACrD,8DAA6De,KAAK,CAACE,aAAc,YAAWF,KAAK,CAACG,OAAQ,EAC/G,CAAC;UACL;QACJ,CAAC,CAAC;QAEF,KAAK,MAAMiC,WAAW,IAAIF,YAAY,EAAE;UACpC,MAAMG,gBAAgB,GAAG,MAAM,IAAAC,wCAAmB,EAC9CF,WAAW,CAACG,IAChB,CAAC;UACD,IAAI,CAACF,gBAAgB,EAAE;YACnBjG,MAAM,CAAC0C,KAAK,CACP,mCAAkCsD,WAAW,CAACZ,EAAG,qCACtD,CAAC;YACD;UACJ;;UAEA;UACA,IAAI,CAAC,IAAAjC,gCAAe,EAAC8C,gBAAgB,CAAC,EAAE;YACpC;YACA,MAAM3C,SAAS,GAAG,MAAM,IAAAC,sDAA0B,EAAC;cAC/CC,KAAK,EAAE;gBAAE,GAAGyC,gBAAgB;gBAAEb,EAAE,EAAEY,WAAW,CAACZ;cAAG,CAAC;cAClD3B,WAAW,EAAExC,cAAc;cAC3ByC,YAAY,EAAE;gBACVC,eAAe,EAAEC,KAAK,IAAI;kBACtB5D,MAAM,CAAC6D,IAAI,CACP;oBACID,KAAK;oBACLX,IAAI,EAAE;sBAAE,GAAGgD,gBAAgB;sBAAEb,EAAE,EAAEY,WAAW,CAACZ;oBAAG;kBACpD,CAAC,EACA,sDAAqDxB,KAAK,CAACE,aAAc,YAAWF,KAAK,CAACG,OAAQ,EACvG,CAAC;gBACL;cACJ;YACJ,CAAC,CAAC;YAEF,MAAMC,4BAA4B,GAAG,MAAM,IAAAC,kDAAyB,EAAC;cACjET,KAAK,EAAE;gBAAE,GAAGyC,gBAAgB;gBAAEb,EAAE,EAAEY,WAAW,CAACZ;cAAG,CAAC;cAClD3B,WAAW,EAAExC,cAAc;cAC3ByC,YAAY,EAAE;gBACVC,eAAe,EAAEC,KAAK,IAAI;kBACtB5D,MAAM,CAAC6D,IAAI,CACP;oBACID,KAAK;oBACLX,IAAI,EAAE;sBAAE,GAAGgD,gBAAgB;sBAAEb,EAAE,EAAEY,WAAW,CAACZ;oBAAG;kBACpD,CAAC,EACA,qDAAoDxB,KAAK,CAACE,aAAc,YAAWF,KAAK,CAACG,OAAQ,EACtG,CAAC;gBACL;cACJ;YACJ,CAAC,CAAC;YAEF,IAAAG,wCAAmB,EAAC+B,gBAAgB,EAAE;cAClC3C,SAAS;cACT,GAAGU;YACP,CAAC,CAAC;UACN;;UAEA;UACA,IAAI,CAAC,IAAAX,gDAAuB,EAAC4C,gBAAgB,CAAC,EAAE;YAC5CjG,MAAM,CAAC0C,KAAK,CACR,IAAA0B,4EAAqC,EAAC6B,gBAAgB,CAAC,EACvD,CACK,mFAAkF,EAClF,IAAGA,gBAAgB,CAAC5B,OAAQ,IAAG4B,gBAAgB,CAAC3B,EAAG,IAAG,CAC1D,CAAC8B,IAAI,CAAC,GAAG,CACd,CAAC;YAED,IAAI;cACA,MAAM7B,gBAAgB,GAAG,MAAM,IAAAC,wCAAmB,EAAC;gBAC/C1C,MAAM,EAAEb,cAAc;gBACtBwD,MAAM,EAAEwB,gBAAgB,CAACxB,MAAM;gBAC/Bf,YAAY,EAAE;kBACVC,eAAe,EAAEC,KAAK,IAAI;oBACtB5D,MAAM,CAAC6D,IAAI,CACP;sBAAED,KAAK;sBAAEX,IAAI,EAAEhC;oBAAe,CAAC,EAC9B,+CAA8C2C,KAAK,CAACE,aAAc,YAAWF,KAAK,CAACG,OAAQ,EAChG,CAAC;kBACL;gBACJ;cACJ,CAAC,CAAC;cAEF,IAAAW,sDAA0B,EAACuB,gBAAgB,EAAE;gBACzCtB,QAAQ,EAAE7B,gBAAgB;gBAC1B8B,QAAQ,EAAEL;cACd,CAAC,CAAC;cAEFvE,MAAM,CAAC0C,KAAK,CACR,CACK,kEAAiE,EACjE,gBAAeuD,gBAAgB,CAAC5B,OAAQ,IAAG4B,gBAAgB,CAAC3B,EAAG,IAAG,EACnE,kCAAkC,CACrC,CAAC8B,IAAI,CAAC,GAAG,CACd,CAAC;YACL,CAAC,CAAC,OAAOvB,CAAC,EAAE;cACR7E,MAAM,CAAC4D,KAAK,CACR,CACI,0EAA0E,EACzE,IAAGqC,gBAAgB,CAAC5B,OAAQ,IAAG4B,gBAAgB,CAAC3B,EAAG,MAAKO,CAAC,CAACd,OAAQ,EAAC,CACvE,CAACqC,IAAI,CAAC,GAAG,CACd,CAAC;YACL;UACJ;UAEA,MAAMC,cAAc,GAAG,MAAM,IAAAC,oCAAiB,EAACL,gBAAgB,CAAC;UAEhErD,sBAAsB,CAACmC,IAAI,CACvB5D,gBAAgB,CAAC6D,QAAQ,CAAC;YACtB,GAAGgB,WAAW;YACdG,IAAI,EAAEE;UACV,CAAC,CACL,CAAC;QACL;MACJ;MAEA,IAAI1D,oBAAoB,CAACF,MAAM,EAAE;QAC7B,IAAI8D,aAAa,GAAG,KAAK;QACzB,IAAIC,eAAe,GAAG,KAAK;;QAE3B;QACA,MAAMC,OAAO,GAAGA,CAAA,KAAM;UAClB,OAAO,IAAAC,qBAAa,EAChB;YACId,KAAK,EAAE3E,cAAc,CAAC2E,KAAK;YAC3BpD,KAAK,EAAEG;UACX,CAAC,EACD/C,qBACJ,CAAC;QACL,CAAC;QAEDI,MAAM,CAAC0C,KAAK,CACP,WAAUC,oBAAoB,CAACF,MAAO,yCAC3C,CAAC;QAED,IAAI;UACA,MAAM,IAAAsD,uBAAgB,EAACU,OAAO,EAAE;YAC5B9C,eAAe,EAAEC,KAAK,IAAI;cACtB5D,MAAM,CAAC6D,IAAI,CACN,wBAAuBD,KAAK,CAACE,aAAc,YAAWF,KAAK,CAACG,OAAQ,EACzE,CAAC;YACL;UACJ,CAAC,CAAC;QACN,CAAC,CAAC,OAAOc,CAAC,EAAE;UACR0B,aAAa,GAAG,IAAI;UACpBvG,MAAM,CAAC4D,KAAK,CACR;YACIA,KAAK,EAAEiB,CAAC;YACRlC;UACJ,CAAC,EACD,kFACJ,CAAC;QACL;QAEA,IAAIC,sBAAsB,CAACH,MAAM,EAAE;UAC/BzC,MAAM,CAAC0C,KAAK,CACP,WAAUE,sBAAsB,CAACH,MAAO,wCAC7C,CAAC;UAED,IAAI;YACA,MAAMkE,OAAO,GAAG,MAAMrF,gBAAgB,CAACsF,IAAI,CAAC;cACxC,MAAMC,WAAWA,CAACC,MAAM,EAAE;gBACtB,MAAMC,gBAAgB,GAAGD,MAAM,CAACE,aAAa,CAACjG,IAAI;gBAElDf,MAAM,CAAC6D,IAAI,CACN,yBAAwBkD,gBAAiB,iDAAgD,EAC1FD,MACJ,CAAC;gBAED,IACIzF,MAAM,CAAClC,KAAK,CAACK,cAAc,CAACG,gBAAgB,CACxCoH,gBAAgB,CACnB,EACH;kBACE1F,MAAM,CAAClC,KAAK,CAACK,cAAc,CAACG,gBAAgB,CACxCoH,gBAAgB,CACnB,EAAE;gBACP,CAAC,MAAM;kBACH1F,MAAM,CAAClC,KAAK,CAACK,cAAc,CAACG,gBAAgB,CACxCoH,gBAAgB,CACnB,GAAG,CAAC;gBACT;cACJ;YACJ,CAAC,CAAC;YAEF1F,MAAM,CAAClC,KAAK,CAACK,cAAc,CAACE,WAAW,EAAE;YACzC2B,MAAM,CAAClC,KAAK,CAACK,cAAc,CAACC,gBAAgB,IAAIkH,OAAO,CAACM,WAAW;;YAEnE;YACA,MAAMC,YAAY,GAAGA,CAAA,KAAM;cACvB,OAAO,IAAAR,qBAAa,EAChB;gBACId,KAAK,EAAEzE,gBAAgB,CAACyE,KAAK;gBAC7BpD,KAAK,EAAEI;cACX,CAAC,EACDhD,qBACJ,CAAC;YACL,CAAC;YAED,MAAM,IAAAmG,uBAAgB,EAACmB,YAAY,EAAE;cACjCvD,eAAe,EAAEC,KAAK,IAAI;gBACtB5D,MAAM,CAAC6D,IAAI,CACN,uCAAsCD,KAAK,CAACE,aAAc,YAAWF,KAAK,CAACG,OAAQ,EACxF,CAAC;cACL;YACJ,CAAC,CAAC;UACN,CAAC,CAAC,OAAOc,CAAC,EAAE;YACR2B,eAAe,GAAG,IAAI;YACtBxG,MAAM,CAAC4D,KAAK,CACR;cACIA,KAAK,EAAEiB,CAAC;cACRjC;YACJ,CAAC,EACD,iFACJ,CAAC;UACL;QACJ;QAEA,IAAI4D,eAAe,IAAID,aAAa,EAAE;UAClCvG,MAAM,CAAC6D,IAAI,CACP,gEACJ,CAAC;QACL,CAAC,MAAM;UACHxC,MAAM,CAAClC,KAAK,CAACG,cAAc,IAAIqD,oBAAoB,CAACF,MAAM;QAC9D;MACJ;;MAEA;MACA,IAAIvD,gBAAkC,GAAG,IAAI;MAC7C,IAAIqD,MAAM,CAACrD,gBAAgB,EAAE;QACzBA,gBAAgB,GAAGqD,MAAM,CAACrD,gBAAqD;MACnF;MAEAmC,MAAM,CAACnC,gBAAgB,GAAGA,gBAAgB;MAE1C,IAAIA,gBAAgB,KAAK,IAAI,EAAE;QAC3B,OAAO,KAAK;MAChB;;MAEA;MACA,OAAO,IAAI;IACf,CAAC,EACD;MACIiI,KAAK,EAAE;QACHxD,eAAe,EAAEC,KAAK,IAAI;UACtB5D,MAAM,CAAC6D,IAAI,CACP;YACI3E,gBAAgB,EAAEmC,MAAM,CAACnC,gBAAgB;YACzC0E;UACJ,CAAC,EACA,gCAA+BA,KAAK,CAACE,aAAc,YAAWF,KAAK,CAACG,OAAQ,EACjF,CAAC;QACL;MACJ;IACJ,CACJ,CAAC;;IAED;IACA/D,MAAM,CAAC0C,KAAK,CAAC;MAAErB;IAAO,CAAC,EAAE,4DAA4D,CAAC;IACtF,MAAM+F,WAAW,GAAGC,aAAI,CAACjB,IAAI,CACzBkB,WAAE,CAACC,MAAM,CAAC,CAAC,EACV,gDAA+CzJ,IAAI,CAACK,KAAM,IAAGL,IAAI,CAACW,YAAa,MACpF,CAAC;;IAED;IACA+I,WAAE,CAACC,aAAa,CAACL,WAAW,EAAEM,IAAI,CAACC,SAAS,CAACtG,MAAM,CAAClC,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;IAEpEa,MAAM,CAAC0C,KAAK,CAAE,qCAAoC0E,WAAY,GAAE,CAAC;EACrE,CAAC,CAAC,OAAOxD,KAAK,EAAE;IACZ;IACA5D,MAAM,CAAC4D,KAAK,CACR;MAAEvC,MAAM;MAAEuC;IAAM,CAAC,EACjB,qEACJ,CAAC;IACD,MAAMwD,WAAW,GAAGC,aAAI,CAACjB,IAAI,CACzBkB,WAAE,CAACC,MAAM,CAAC,CAAC,EACV,gDAA+CzJ,IAAI,CAACK,KAAM,IAAGL,IAAI,CAACW,YAAa,MACpF,CAAC;;IAED;IACA+I,WAAE,CAACC,aAAa,CAACL,WAAW,EAAEM,IAAI,CAACC,SAAS,CAACtG,MAAM,CAAClC,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;IAEpEa,MAAM,CAAC0C,KAAK,CAAE,qCAAoC0E,WAAY,GAAE,CAAC;EACrE;AACJ,CAAC,EAAE,CAAC","ignoreList":[]}
|
package/package.json
CHANGED
|
@@ -1,21 +1,21 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@webiny/migrations",
|
|
3
|
-
"version": "5.40.
|
|
3
|
+
"version": "5.40.6-beta.1",
|
|
4
4
|
"scripts": {
|
|
5
5
|
"build": "yarn webiny run build",
|
|
6
6
|
"watch": "yarn webiny run watch"
|
|
7
7
|
},
|
|
8
8
|
"dependencies": {
|
|
9
9
|
"@elastic/elasticsearch": "7.12.0",
|
|
10
|
-
"@webiny/api-elasticsearch": "5.40.
|
|
11
|
-
"@webiny/aws-sdk": "5.40.
|
|
12
|
-
"@webiny/cli-plugin-deploy-pulumi": "5.40.
|
|
13
|
-
"@webiny/data-migration": "5.40.
|
|
14
|
-
"@webiny/db-dynamodb": "5.40.
|
|
15
|
-
"@webiny/error": "5.40.
|
|
16
|
-
"@webiny/ioc": "5.40.
|
|
17
|
-
"@webiny/logger": "5.40.
|
|
18
|
-
"@webiny/utils": "5.40.
|
|
10
|
+
"@webiny/api-elasticsearch": "5.40.6-beta.1",
|
|
11
|
+
"@webiny/aws-sdk": "5.40.6-beta.1",
|
|
12
|
+
"@webiny/cli-plugin-deploy-pulumi": "5.40.6-beta.1",
|
|
13
|
+
"@webiny/data-migration": "5.40.6-beta.1",
|
|
14
|
+
"@webiny/db-dynamodb": "5.40.6-beta.1",
|
|
15
|
+
"@webiny/error": "5.40.6-beta.1",
|
|
16
|
+
"@webiny/ioc": "5.40.6-beta.1",
|
|
17
|
+
"@webiny/logger": "5.40.6-beta.1",
|
|
18
|
+
"@webiny/utils": "5.40.6-beta.1",
|
|
19
19
|
"execa": "5.1.1",
|
|
20
20
|
"fast-glob": "3.2.12",
|
|
21
21
|
"jsonpack": "1.1.5",
|
|
@@ -30,16 +30,16 @@
|
|
|
30
30
|
},
|
|
31
31
|
"devDependencies": {
|
|
32
32
|
"@types/execa": "2.0.0",
|
|
33
|
-
"@webiny/api-headless-cms": "5.40.
|
|
34
|
-
"@webiny/api-headless-cms-ddb-es": "5.40.
|
|
35
|
-
"@webiny/cli": "5.40.
|
|
36
|
-
"@webiny/handler-aws": "5.40.
|
|
37
|
-
"@webiny/plugins": "5.40.
|
|
38
|
-
"@webiny/project-utils": "5.40.
|
|
33
|
+
"@webiny/api-headless-cms": "5.40.6-beta.1",
|
|
34
|
+
"@webiny/api-headless-cms-ddb-es": "5.40.6-beta.1",
|
|
35
|
+
"@webiny/cli": "5.40.6-beta.1",
|
|
36
|
+
"@webiny/handler-aws": "5.40.6-beta.1",
|
|
37
|
+
"@webiny/plugins": "5.40.6-beta.1",
|
|
38
|
+
"@webiny/project-utils": "5.40.6-beta.1",
|
|
39
39
|
"elastic-ts": "0.8.0",
|
|
40
40
|
"jest-dynalite": "3.6.1",
|
|
41
41
|
"ttypescript": "1.5.15",
|
|
42
42
|
"typescript": "4.7.4"
|
|
43
43
|
},
|
|
44
|
-
"gitHead": "
|
|
44
|
+
"gitHead": "8e5c47c1cbad8da50813bea5257f9ab1eb8718c8"
|
|
45
45
|
}
|