@webiny/api-headless-cms-ddb-es 0.0.0-mt-1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +35 -0
- package/configurations.d.ts +12 -0
- package/configurations.js +32 -0
- package/definitions/entry.d.ts +8 -0
- package/definitions/entry.js +91 -0
- package/definitions/entryElasticsearch.d.ts +8 -0
- package/definitions/entryElasticsearch.js +46 -0
- package/definitions/group.d.ts +8 -0
- package/definitions/group.js +74 -0
- package/definitions/model.d.ts +8 -0
- package/definitions/model.js +96 -0
- package/definitions/settings.d.ts +8 -0
- package/definitions/settings.js +62 -0
- package/definitions/system.d.ts +8 -0
- package/definitions/system.js +50 -0
- package/definitions/table.d.ts +8 -0
- package/definitions/table.js +24 -0
- package/definitions/tableElasticsearch.d.ts +8 -0
- package/definitions/tableElasticsearch.js +24 -0
- package/dynamoDb/index.d.ts +2 -0
- package/dynamoDb/index.js +18 -0
- package/dynamoDb/storage/date.d.ts +3 -0
- package/dynamoDb/storage/date.js +65 -0
- package/dynamoDb/storage/longText.d.ts +7 -0
- package/dynamoDb/storage/longText.js +83 -0
- package/dynamoDb/storage/richText.d.ts +8 -0
- package/dynamoDb/storage/richText.js +110 -0
- package/elasticsearch/index.d.ts +2 -0
- package/elasticsearch/index.js +16 -0
- package/elasticsearch/indexing/dateTimeIndexing.d.ts +3 -0
- package/elasticsearch/indexing/dateTimeIndexing.js +89 -0
- package/elasticsearch/indexing/defaultFieldIndexing.d.ts +3 -0
- package/elasticsearch/indexing/defaultFieldIndexing.js +47 -0
- package/elasticsearch/indexing/index.d.ts +2 -0
- package/elasticsearch/indexing/index.js +24 -0
- package/elasticsearch/indexing/longTextIndexing.d.ts +3 -0
- package/elasticsearch/indexing/longTextIndexing.js +36 -0
- package/elasticsearch/indexing/numberIndexing.d.ts +3 -0
- package/elasticsearch/indexing/numberIndexing.js +48 -0
- package/elasticsearch/indexing/objectIndexing.d.ts +3 -0
- package/elasticsearch/indexing/objectIndexing.js +200 -0
- package/elasticsearch/indexing/richTextIndexing.d.ts +3 -0
- package/elasticsearch/indexing/richTextIndexing.js +34 -0
- package/elasticsearch/search/index.d.ts +3 -0
- package/elasticsearch/search/index.js +16 -0
- package/elasticsearch/search/refSearch.d.ts +3 -0
- package/elasticsearch/search/refSearch.js +24 -0
- package/elasticsearch/search/timeSearch.d.ts +3 -0
- package/elasticsearch/search/timeSearch.js +25 -0
- package/helpers/createElasticsearchQueryBody.d.ts +11 -0
- package/helpers/createElasticsearchQueryBody.js +375 -0
- package/helpers/entryIndexHelpers.d.ts +18 -0
- package/helpers/entryIndexHelpers.js +189 -0
- package/helpers/fields.d.ts +77 -0
- package/helpers/fields.js +174 -0
- package/helpers/index.d.ts +2 -0
- package/helpers/index.js +31 -0
- package/helpers/operatorPluginsList.d.ts +7 -0
- package/helpers/operatorPluginsList.js +30 -0
- package/helpers/searchPluginsList.d.ts +6 -0
- package/helpers/searchPluginsList.js +26 -0
- package/helpers/transformValueForSearch.d.ts +9 -0
- package/helpers/transformValueForSearch.js +26 -0
- package/index.d.ts +2 -0
- package/index.js +171 -0
- package/operations/entry/dataLoaders.d.ts +47 -0
- package/operations/entry/dataLoaders.js +347 -0
- package/operations/entry/elasticsearchFields.d.ts +2 -0
- package/operations/entry/elasticsearchFields.js +32 -0
- package/operations/entry/fields.d.ts +3 -0
- package/operations/entry/fields.js +60 -0
- package/operations/entry/index.d.ts +13 -0
- package/operations/entry/index.js +1152 -0
- package/operations/entry/keys.d.ts +12 -0
- package/operations/entry/keys.js +40 -0
- package/operations/group/index.d.ts +8 -0
- package/operations/group/index.js +202 -0
- package/operations/model/index.d.ts +8 -0
- package/operations/model/index.js +205 -0
- package/operations/settings/index.d.ts +6 -0
- package/operations/settings/index.js +141 -0
- package/operations/system/createElasticsearchTemplate.d.ts +5 -0
- package/operations/system/createElasticsearchTemplate.js +62 -0
- package/operations/system/index.d.ts +6 -0
- package/operations/system/index.js +105 -0
- package/package.json +73 -0
- package/plugins/CmsEntryElasticsearchBodyModifierPlugin.d.ts +17 -0
- package/plugins/CmsEntryElasticsearchBodyModifierPlugin.js +24 -0
- package/plugins/CmsEntryElasticsearchFieldPlugin.d.ts +12 -0
- package/plugins/CmsEntryElasticsearchFieldPlugin.js +24 -0
- package/plugins/CmsEntryElasticsearchQueryModifierPlugin.d.ts +17 -0
- package/plugins/CmsEntryElasticsearchQueryModifierPlugin.js +24 -0
- package/plugins/CmsEntryElasticsearchSortModifierPlugin.d.ts +17 -0
- package/plugins/CmsEntryElasticsearchSortModifierPlugin.js +24 -0
- package/types.d.ts +191 -0
- package/types.js +60 -0
- package/upgrades/index.d.ts +2 -0
- package/upgrades/index.js +16 -0
- package/upgrades/utils.d.ts +1 -0
- package/upgrades/utils.js +16 -0
- package/upgrades/v5.0.0/cleanDatabaseRecord.d.ts +6 -0
- package/upgrades/v5.0.0/cleanDatabaseRecord.js +16 -0
- package/upgrades/v5.0.0/createOldVersionIndiceName.d.ts +2 -0
- package/upgrades/v5.0.0/createOldVersionIndiceName.js +12 -0
- package/upgrades/v5.0.0/entryValueFixer.d.ts +4 -0
- package/upgrades/v5.0.0/entryValueFixer.js +124 -0
- package/upgrades/v5.0.0/fieldFinder.d.ts +6 -0
- package/upgrades/v5.0.0/fieldFinder.js +42 -0
- package/upgrades/v5.0.0/helpers.d.ts +4 -0
- package/upgrades/v5.0.0/helpers.js +57 -0
- package/upgrades/v5.0.0/index.d.ts +4 -0
- package/upgrades/v5.0.0/index.js +232 -0
- package/upgrades/v5.8.0/index.d.ts +4 -0
- package/upgrades/v5.8.0/index.js +426 -0
|
@@ -0,0 +1,426 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
+
|
|
5
|
+
Object.defineProperty(exports, "__esModule", {
|
|
6
|
+
value: true
|
|
7
|
+
});
|
|
8
|
+
exports.default = void 0;
|
|
9
|
+
|
|
10
|
+
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
|
11
|
+
|
|
12
|
+
var _configurations = _interopRequireDefault(require("../../configurations"));
|
|
13
|
+
|
|
14
|
+
var _CmsContentEntryDynamoElastic = require("../../operations/entry/CmsContentEntryDynamoElastic");
|
|
15
|
+
|
|
16
|
+
var _error = _interopRequireDefault(require("@webiny/error"));
|
|
17
|
+
|
|
18
|
+
var _lodash = _interopRequireDefault(require("lodash.chunk"));
|
|
19
|
+
|
|
20
|
+
var _definitions = _interopRequireDefault(require("../../definitions"));
|
|
21
|
+
|
|
22
|
+
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
|
|
23
|
+
|
|
24
|
+
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
|
|
25
|
+
|
|
26
|
+
const sleep = async (ms = 2000) => {
|
|
27
|
+
await new Promise(resolve => setTimeout(resolve, ms));
|
|
28
|
+
};
|
|
29
|
+
/**
|
|
30
|
+
* Entry ID is the first part of the ID, before the #.
|
|
31
|
+
*/
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
const extractEntryId = id => {
|
|
35
|
+
if (id.includes("#") === false) {
|
|
36
|
+
throw new _error.default("Missing # in the given id value. Possibly not an ID.", "MALFORMED_ID_ERROR", {
|
|
37
|
+
id
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const entryId = id.split("#").shift();
|
|
42
|
+
|
|
43
|
+
if (!entryId) {
|
|
44
|
+
throw new _error.default("Malformed entry ID.", "MALFORMED_ID_ERROR", {
|
|
45
|
+
id
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
return entryId;
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
const createPartitionKey = data => {
|
|
53
|
+
if (!data.id) {
|
|
54
|
+
throw new _error.default(`Missing ID in the EntryRecordData: ${JSON.stringify(data)}`, "RECORD_DATA_ID_ERROR", {
|
|
55
|
+
data
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const entryId = extractEntryId(data.id);
|
|
60
|
+
|
|
61
|
+
if (!data.tenant) {
|
|
62
|
+
throw new _error.default("Missing tenant on entry record data.", "TENANT_ERROR", {
|
|
63
|
+
data
|
|
64
|
+
});
|
|
65
|
+
} else if (!data.locale) {
|
|
66
|
+
throw new _error.default("Missing locale on entry record data.", "LOCALE_ERROR", {
|
|
67
|
+
data
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
return `T#${data.tenant}#L#${data.locale}#CMS#CME#${entryId}`;
|
|
72
|
+
};
|
|
73
|
+
/**
|
|
74
|
+
* DynamoDB Toolbox Entity query executor.
|
|
75
|
+
* This helps with paginating the result and loading all of the data that can be fetched by the query.
|
|
76
|
+
*/
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
const executeQuery = async query => {
|
|
80
|
+
const items = [];
|
|
81
|
+
/**
|
|
82
|
+
* First result is actually the real query result.
|
|
83
|
+
*/
|
|
84
|
+
|
|
85
|
+
let previousResult = await query;
|
|
86
|
+
|
|
87
|
+
if (!previousResult || Array.isArray(previousResult.Items) === false || previousResult.Items.length === 0) {
|
|
88
|
+
return items;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
items.push(...previousResult.Items);
|
|
92
|
+
let result;
|
|
93
|
+
/**
|
|
94
|
+
* If previousResult.next method returns false it means there is nothing more to load.
|
|
95
|
+
* Otherwise a result object is returned and we loop it.
|
|
96
|
+
* This works only if autoParse is not false (it is true by default).
|
|
97
|
+
*/
|
|
98
|
+
|
|
99
|
+
while (typeof previousResult.next === "function" && (result = previousResult.next())) {
|
|
100
|
+
if (!result || Array.isArray(result.Items) === false || result.Items.length === 0) {
|
|
101
|
+
return items;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
items.push(...result.Items);
|
|
105
|
+
previousResult = result;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
return items;
|
|
109
|
+
};
|
|
110
|
+
|
|
111
|
+
const fetchEntries = async args => {
|
|
112
|
+
const {
|
|
113
|
+
records,
|
|
114
|
+
entity
|
|
115
|
+
} = args;
|
|
116
|
+
const queries = Object.values(records).map(record => {
|
|
117
|
+
const partitionKey = createPartitionKey(record);
|
|
118
|
+
return executeQuery(entity.query(partitionKey));
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
try {
|
|
122
|
+
const results = await Promise.all(queries);
|
|
123
|
+
return results.reduce((items, result) => {
|
|
124
|
+
return items.concat(result);
|
|
125
|
+
}, []);
|
|
126
|
+
} catch (ex) {
|
|
127
|
+
throw new _error.default(`Fetching the entries from "${entity.table.name}" table the error.`, "FETCH_ENTRIES_ERROR", {
|
|
128
|
+
table: entity.table.name,
|
|
129
|
+
entity: entity.name
|
|
130
|
+
});
|
|
131
|
+
}
|
|
132
|
+
};
|
|
133
|
+
|
|
134
|
+
var _default = () => ({
|
|
135
|
+
type: "api-upgrade",
|
|
136
|
+
name: "api-upgrade-5.8.0",
|
|
137
|
+
app: "headless-cms",
|
|
138
|
+
version: "5.8.0",
|
|
139
|
+
|
|
140
|
+
async apply(context) {
|
|
141
|
+
const {
|
|
142
|
+
i18n
|
|
143
|
+
} = context;
|
|
144
|
+
const elasticsearch = context.elasticsearch;
|
|
145
|
+
|
|
146
|
+
if (!elasticsearch) {
|
|
147
|
+
throw new _error.default("Missing Elasticsearch client on the context.");
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
console.log("Started with the update of CMS entries.");
|
|
151
|
+
/**
|
|
152
|
+
* Define tables and entities via helper methods because tables and entities are created in multiple places in the code.
|
|
153
|
+
*/
|
|
154
|
+
|
|
155
|
+
const table = _definitions.default.defineTable(context);
|
|
156
|
+
|
|
157
|
+
const elasticTable = _definitions.default.defineElasticsearchTable(context);
|
|
158
|
+
|
|
159
|
+
const modelEntity = _definitions.default.defineModel({
|
|
160
|
+
context,
|
|
161
|
+
table
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
const entryEntity = _definitions.default.defineEntry({
|
|
165
|
+
context,
|
|
166
|
+
table
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
const entryElasticsearchEntity = _definitions.default.defineElasticsearchEntry({
|
|
170
|
+
context,
|
|
171
|
+
table: elasticTable
|
|
172
|
+
});
|
|
173
|
+
|
|
174
|
+
const entryRecords = {};
|
|
175
|
+
const esIndices = [];
|
|
176
|
+
const locales = i18n.getLocales();
|
|
177
|
+
/**
|
|
178
|
+
* max dynamodb limit
|
|
179
|
+
*/
|
|
180
|
+
|
|
181
|
+
const recordsInABatch = 25;
|
|
182
|
+
/**
|
|
183
|
+
* We need to find all the entries in each of the possible elasticsearch indexes.
|
|
184
|
+
* For that we need a list of indexes + localeCode.
|
|
185
|
+
* To get the index name we need models from in each locale.
|
|
186
|
+
*/
|
|
187
|
+
|
|
188
|
+
for (const locale of locales) {
|
|
189
|
+
/**
|
|
190
|
+
* Need all the models to build the elasticsearch indexes
|
|
191
|
+
*/
|
|
192
|
+
const models = await executeQuery( // TODO determine if required to loop through the tenants
|
|
193
|
+
modelEntity.query(`T#root#L#${locale.code}#CMS#CM`));
|
|
194
|
+
|
|
195
|
+
for (const model of models) {
|
|
196
|
+
const {
|
|
197
|
+
index: esIndex
|
|
198
|
+
} = _configurations.default.es(_objectSpread(_objectSpread({}, context), {}, {
|
|
199
|
+
cms: _objectSpread(_objectSpread({}, context.cms), {}, {
|
|
200
|
+
getLocale: () => {
|
|
201
|
+
return locale;
|
|
202
|
+
}
|
|
203
|
+
})
|
|
204
|
+
}), // @ts-ignore
|
|
205
|
+
model);
|
|
206
|
+
|
|
207
|
+
esIndices.push({
|
|
208
|
+
esIndex,
|
|
209
|
+
localeCode: locale.code
|
|
210
|
+
});
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
const limit = 100;
|
|
215
|
+
/**
|
|
216
|
+
* Get all the latest entries from all the indexes, we only need the ID of the entry from the Elasticsearch.
|
|
217
|
+
* Other data we need is the tenant ID and locale code.
|
|
218
|
+
*/
|
|
219
|
+
|
|
220
|
+
for (const esData of esIndices) {
|
|
221
|
+
const {
|
|
222
|
+
esIndex,
|
|
223
|
+
localeCode
|
|
224
|
+
} = esData;
|
|
225
|
+
let hasMoreItems = true;
|
|
226
|
+
let after;
|
|
227
|
+
|
|
228
|
+
while (hasMoreItems) {
|
|
229
|
+
const response = await elasticsearch.search({
|
|
230
|
+
index: esIndex,
|
|
231
|
+
body: {
|
|
232
|
+
query: {
|
|
233
|
+
bool: {
|
|
234
|
+
must: [{
|
|
235
|
+
term: {
|
|
236
|
+
__type: _CmsContentEntryDynamoElastic.TYPE_ENTRY_LATEST
|
|
237
|
+
}
|
|
238
|
+
}]
|
|
239
|
+
}
|
|
240
|
+
},
|
|
241
|
+
sort: {
|
|
242
|
+
createdOn: {
|
|
243
|
+
order: "asc",
|
|
244
|
+
// eslint-disable-next-line
|
|
245
|
+
unmapped_type: "date"
|
|
246
|
+
}
|
|
247
|
+
},
|
|
248
|
+
size: limit + 1,
|
|
249
|
+
after
|
|
250
|
+
}
|
|
251
|
+
});
|
|
252
|
+
const {
|
|
253
|
+
hits
|
|
254
|
+
} = response.body.hits;
|
|
255
|
+
|
|
256
|
+
for (const hit of hits) {
|
|
257
|
+
/**
|
|
258
|
+
* _source should be an property on the hit, but we check it just in case something wrong happens.
|
|
259
|
+
* There are no Elasticsearch typings for this.
|
|
260
|
+
*/
|
|
261
|
+
const source = hit._source || {};
|
|
262
|
+
|
|
263
|
+
if (!source.id) {
|
|
264
|
+
throw new _error.default(`Missing ID in source: ${JSON.stringify(source)}`, "SOURCE_ID_ERROR", {
|
|
265
|
+
source
|
|
266
|
+
});
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
const entryId = extractEntryId(source.id);
|
|
270
|
+
entryRecords[entryId] = {
|
|
271
|
+
id: source.id,
|
|
272
|
+
entryId,
|
|
273
|
+
tenant: context.tenancy.getCurrentTenant().id,
|
|
274
|
+
locale: localeCode
|
|
275
|
+
};
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
hasMoreItems = hits.length > limit;
|
|
279
|
+
after = hasMoreItems ? hits[limit - 1].sort : undefined;
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
console.log(`[ELASTICSEARCH] Retrieved ${Object.keys(entryRecords).length} entry IDs to query directly from the DynamoDB.`);
|
|
284
|
+
/**
|
|
285
|
+
* Now we go into the DynamoDB regular table and find all the entries with the given IDs.
|
|
286
|
+
* This is done via the partition key as it is a combination of tenant, locale and generated entry id.
|
|
287
|
+
* We need to have a query for each of the entries because there are possible versions, published and latest records.
|
|
288
|
+
* We can use a scan but at that point all the records in the table would be read,
|
|
289
|
+
* which is possibly expensive if there are a lot of records from the other applications (file manager, form builder, ...)
|
|
290
|
+
*/
|
|
291
|
+
|
|
292
|
+
const regularEntries = await fetchEntries({
|
|
293
|
+
records: entryRecords,
|
|
294
|
+
entity: entryEntity
|
|
295
|
+
});
|
|
296
|
+
/**
|
|
297
|
+
* Then we create updates to the regular DynamoDB records.
|
|
298
|
+
*/
|
|
299
|
+
|
|
300
|
+
const updates = [];
|
|
301
|
+
|
|
302
|
+
for (const entry of regularEntries) {
|
|
303
|
+
if (!entry.id) {
|
|
304
|
+
throw new _error.default(`Missing ID in regular entry: ${JSON.stringify(entry)}`, "ENTRY_ID_ERROR", {
|
|
305
|
+
entry
|
|
306
|
+
});
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
const entryId = extractEntryId(entry.id);
|
|
310
|
+
|
|
311
|
+
if (!entryRecords[entryId]) {
|
|
312
|
+
continue;
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
const {
|
|
316
|
+
tenant
|
|
317
|
+
} = entryRecords[entryId];
|
|
318
|
+
/**
|
|
319
|
+
* Remove the leftovers from the beta 5 to v5.0.0 upgrade
|
|
320
|
+
*/
|
|
321
|
+
|
|
322
|
+
delete entry["ignore"];
|
|
323
|
+
updates.push(entryEntity.putBatch(_objectSpread(_objectSpread({}, entry), {}, {
|
|
324
|
+
entryId,
|
|
325
|
+
tenant
|
|
326
|
+
})));
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
try {
|
|
330
|
+
const chunks = (0, _lodash.default)(updates, recordsInABatch);
|
|
331
|
+
|
|
332
|
+
for (const key in chunks) {
|
|
333
|
+
if (!chunks.hasOwnProperty(key)) {
|
|
334
|
+
continue;
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
const chunk = chunks[key];
|
|
338
|
+
await table.batchWrite(chunk);
|
|
339
|
+
}
|
|
340
|
+
} catch (ex) {
|
|
341
|
+
throw new _error.default("Error while writing a batch of records to regular table.", "REGULAR_TABLE_WRITE_ERROR", {
|
|
342
|
+
length: updates.length,
|
|
343
|
+
ex
|
|
344
|
+
});
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
console.log(`[DYNAMODB] Table updated records: ${updates.length}`);
|
|
348
|
+
/**
|
|
349
|
+
* Time to go for the Elasticsearch data.
|
|
350
|
+
*/
|
|
351
|
+
|
|
352
|
+
const elasticEntries = await fetchEntries({
|
|
353
|
+
records: entryRecords,
|
|
354
|
+
entity: entryElasticsearchEntity
|
|
355
|
+
});
|
|
356
|
+
/**
|
|
357
|
+
* Then we create the updates to the Elasticsearch records.
|
|
358
|
+
*/
|
|
359
|
+
|
|
360
|
+
const elasticUpdates = [];
|
|
361
|
+
|
|
362
|
+
for (const entry of elasticEntries) {
|
|
363
|
+
const data = entry.data;
|
|
364
|
+
|
|
365
|
+
if (!data) {
|
|
366
|
+
throw new _error.default("Missing data in elastic entry.", "DATA_ERROR", {
|
|
367
|
+
entry
|
|
368
|
+
});
|
|
369
|
+
} else if (!data.id) {
|
|
370
|
+
throw new _error.default(`Missing ID in elastic entry data: ${JSON.stringify(entry)}`, "DATA_ID_ERROR", {
|
|
371
|
+
entry
|
|
372
|
+
});
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
const entryId = extractEntryId(data.id);
|
|
376
|
+
|
|
377
|
+
if (!entryRecords[entryId]) {
|
|
378
|
+
continue;
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
const {
|
|
382
|
+
tenant
|
|
383
|
+
} = entryRecords[entryId];
|
|
384
|
+
/**
|
|
385
|
+
* Remove the leftovers from the beta 5 to v5.0.0 upgrade
|
|
386
|
+
*/
|
|
387
|
+
|
|
388
|
+
delete entry["ignore"];
|
|
389
|
+
delete entry["savedOn"];
|
|
390
|
+
delete entry["version"];
|
|
391
|
+
elasticUpdates.push(entryElasticsearchEntity.putBatch(_objectSpread(_objectSpread({}, entry), {}, {
|
|
392
|
+
data: _objectSpread(_objectSpread({}, data), {}, {
|
|
393
|
+
entryId,
|
|
394
|
+
tenant
|
|
395
|
+
})
|
|
396
|
+
})));
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
const breakMs = 200;
|
|
400
|
+
/**
|
|
401
|
+
* Updating the Elasticsearch table is a bit tricky because it can break if overwhelmed.
|
|
402
|
+
* We will take breakMs ms break between each recordsInABatch records
|
|
403
|
+
*/
|
|
404
|
+
|
|
405
|
+
const elasticUpdatesChunks = (0, _lodash.default)(elasticUpdates, recordsInABatch);
|
|
406
|
+
console.log(`[ELASTICSEARCH] Total chunks to be written with ${recordsInABatch} records in a batch: ${elasticUpdatesChunks.length}`);
|
|
407
|
+
|
|
408
|
+
for (const elasticUpdateChunk of elasticUpdatesChunks) {
|
|
409
|
+
try {
|
|
410
|
+
await elasticTable.batchWrite(elasticUpdateChunk);
|
|
411
|
+
} catch (ex) {
|
|
412
|
+
throw new _error.default("Error writing to Elasticsearch stream table.", "STREAM_TABLE_WRITE_ERROR", {
|
|
413
|
+
ex,
|
|
414
|
+
message: ex.message
|
|
415
|
+
});
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
await sleep(breakMs);
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
console.log(`[ELASTICSEARCH] Streaming table updated records: ${elasticUpdates.length}`);
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
});
|
|
425
|
+
|
|
426
|
+
exports.default = _default;
|