@certik/skynet 0.21.0 → 0.22.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/abi.ts +2 -4
- package/api.ts +7 -7
- package/app.ts +6 -12
- package/const.ts +1 -1
- package/date.ts +1 -1
- package/deploy.ts +4 -4
- package/dist/abi.d.ts +112 -0
- package/dist/abi.d.ts.map +1 -0
- package/dist/abi.js +565 -0
- package/dist/address.d.ts +3 -0
- package/dist/address.d.ts.map +1 -0
- package/dist/address.js +23 -0
- package/dist/api.d.ts +32 -0
- package/dist/api.d.ts.map +1 -0
- package/dist/api.js +146 -0
- package/dist/app.d.ts +102 -0
- package/dist/app.d.ts.map +1 -0
- package/dist/app.js +328 -0
- package/dist/availability.d.ts +24 -0
- package/dist/availability.d.ts.map +1 -0
- package/dist/availability.js +63 -0
- package/dist/cli.d.ts +6 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +37 -0
- package/dist/const.d.ts +35 -0
- package/dist/const.d.ts.map +1 -0
- package/dist/const.js +141 -0
- package/dist/databricks.d.ts +4 -0
- package/dist/databricks.d.ts.map +1 -0
- package/dist/databricks.js +68 -0
- package/dist/date.d.ts +6 -0
- package/dist/date.d.ts.map +1 -0
- package/dist/date.js +29 -0
- package/dist/deploy.d.ts +76 -0
- package/dist/deploy.d.ts.map +1 -0
- package/dist/deploy.js +454 -0
- package/dist/dynamodb.d.ts +17 -0
- package/dist/dynamodb.d.ts.map +1 -0
- package/dist/dynamodb.js +328 -0
- package/dist/env.d.ts +7 -0
- package/dist/env.d.ts.map +1 -0
- package/dist/env.js +19 -0
- package/dist/graphql.d.ts +6 -0
- package/dist/graphql.d.ts.map +1 -0
- package/dist/graphql.js +25 -0
- package/dist/indexer.d.ts +70 -0
- package/dist/indexer.d.ts.map +1 -0
- package/dist/indexer.js +492 -0
- package/dist/log.d.ts +14 -0
- package/dist/log.d.ts.map +1 -0
- package/dist/log.js +62 -0
- package/dist/object-hash.d.ts +2 -0
- package/dist/object-hash.d.ts.map +1 -0
- package/dist/object-hash.js +71 -0
- package/dist/opsgenie.d.ts +21 -0
- package/dist/opsgenie.d.ts.map +1 -0
- package/dist/opsgenie.js +38 -0
- package/dist/por.d.ts +38 -0
- package/dist/por.d.ts.map +1 -0
- package/dist/por.js +130 -0
- package/dist/s3.d.ts +21 -0
- package/dist/s3.d.ts.map +1 -0
- package/dist/s3.js +110 -0
- package/dist/search.d.ts +6 -0
- package/dist/search.d.ts.map +1 -0
- package/dist/search.js +30 -0
- package/dist/selector.d.ts +18 -0
- package/dist/selector.d.ts.map +1 -0
- package/dist/selector.js +48 -0
- package/dist/slack.d.ts +9 -0
- package/dist/slack.d.ts.map +1 -0
- package/dist/slack.js +26 -0
- package/dist/util.d.ts +5 -0
- package/dist/util.d.ts.map +1 -0
- package/dist/util.js +26 -0
- package/examples/api.ts +1 -1
- package/examples/indexer.ts +1 -1
- package/examples/mode-indexer.ts +1 -1
- package/indexer.ts +11 -11
- package/package.json +99 -4
- package/por.ts +181 -0
- package/search.ts +2 -2
- package/tsconfig.build.json +23 -0
- package/tsconfig.json +5 -5
- package/.vscode/settings.json +0 -5
package/dist/indexer.js
ADDED
|
@@ -0,0 +1,492 @@
|
|
|
1
|
+
import meow from "meow";
|
|
2
|
+
import { createRecord, getRecordByKey } from "./dynamodb";
|
|
3
|
+
import { getEnvironment } from "./env";
|
|
4
|
+
import { exponentialRetry } from "./availability";
|
|
5
|
+
import { range as numberRange, fillRange as fillNumberRange } from "./util";
|
|
6
|
+
import { getSelectorDesc, getSelectorFlags, toSelectorString } from "./selector";
|
|
7
|
+
import { getBinaryName } from "./cli";
|
|
8
|
+
import { inline } from "./log";
|
|
9
|
+
import { findDateAfter, dateRange, daysInRange as fillDateRange } from "./date";
|
|
10
|
+
const STATE_TABLE_NAME = "skynet-" + getEnvironment() + "-indexer-state";
|
|
11
|
+
async function getIndexerLatestId(name, selectorFlags) {
|
|
12
|
+
const record = await getRecordByKey(STATE_TABLE_NAME, {
|
|
13
|
+
name: `${name}Since(${toSelectorString(selectorFlags)})`,
|
|
14
|
+
});
|
|
15
|
+
return record?.value;
|
|
16
|
+
}
|
|
17
|
+
async function getIndexerValidatedId(name, selectorFlags) {
|
|
18
|
+
const record = await getRecordByKey(STATE_TABLE_NAME, {
|
|
19
|
+
name: `${name}Validate(${toSelectorString(selectorFlags)})`,
|
|
20
|
+
});
|
|
21
|
+
if (record) {
|
|
22
|
+
return record.value;
|
|
23
|
+
}
|
|
24
|
+
return undefined;
|
|
25
|
+
}
|
|
26
|
+
function increaseId(type, currentId, n) {
|
|
27
|
+
if (type === "date") {
|
|
28
|
+
if (typeof currentId !== "string") {
|
|
29
|
+
throw new Error("invalid type for date id");
|
|
30
|
+
}
|
|
31
|
+
return findDateAfter(currentId, n);
|
|
32
|
+
}
|
|
33
|
+
if (typeof currentId !== "number") {
|
|
34
|
+
throw new Error("Invalid type for numeric id");
|
|
35
|
+
}
|
|
36
|
+
return (currentId + n);
|
|
37
|
+
}
|
|
38
|
+
// for those indexers that can have progress tracked by a numeric state.type
|
|
39
|
+
// such as block height, or timestamp
|
|
40
|
+
// managing state would be helpful to reduce the build time
|
|
41
|
+
// and avoid unnecessary computation & storage
|
|
42
|
+
function createModeIndexerApp({ binaryName, name, selector = {}, build,
|
|
43
|
+
// number of items run in a batch, determines the { from, to } to the build function
|
|
44
|
+
buildBatchSize = 1,
|
|
45
|
+
// number of build functions calling at a time
|
|
46
|
+
buildConcurrency = 1,
|
|
47
|
+
// commit updates every rolling window = buildBatchSize * buildConcurrency
|
|
48
|
+
validate,
|
|
49
|
+
// number of items run in a batch, determines the { from, to } to the validate function
|
|
50
|
+
validateBatchSize = 1,
|
|
51
|
+
// number of validate functions calling at a time
|
|
52
|
+
validateConcurrency = 1,
|
|
53
|
+
// commit updates every rolling window = validateBatchSize * validateConcurrency
|
|
54
|
+
maxRetry = 2, state, }) {
|
|
55
|
+
const defaultState = {
|
|
56
|
+
type: "block",
|
|
57
|
+
getMinId: async () => 1,
|
|
58
|
+
getMaxId: async () => {
|
|
59
|
+
throw new Error("must implement getMaxId");
|
|
60
|
+
},
|
|
61
|
+
};
|
|
62
|
+
const finalState = {
|
|
63
|
+
...defaultState,
|
|
64
|
+
...state,
|
|
65
|
+
};
|
|
66
|
+
// type based range functions
|
|
67
|
+
function range(from, to, step) {
|
|
68
|
+
if (typeof from === "string" && typeof to === "string") {
|
|
69
|
+
if (finalState.type === "date") {
|
|
70
|
+
return dateRange(from, to, step);
|
|
71
|
+
}
|
|
72
|
+
throw new Error("Invalid type for numeric range");
|
|
73
|
+
}
|
|
74
|
+
if (typeof from === "number" && typeof to === "number") {
|
|
75
|
+
return numberRange(from, to, step);
|
|
76
|
+
}
|
|
77
|
+
throw new Error("Invalid type for range");
|
|
78
|
+
}
|
|
79
|
+
function fillRange(from, to) {
|
|
80
|
+
if (typeof from === "string" && typeof to === "string") {
|
|
81
|
+
if (finalState.type === "date") {
|
|
82
|
+
return fillDateRange(from, to);
|
|
83
|
+
}
|
|
84
|
+
throw new Error("Invalid type for numeric range");
|
|
85
|
+
}
|
|
86
|
+
if (typeof from === "number" && typeof to === "number") {
|
|
87
|
+
return fillNumberRange(from, to);
|
|
88
|
+
}
|
|
89
|
+
throw new Error("Invalid type for range");
|
|
90
|
+
}
|
|
91
|
+
function offsetRange(from, to) {
|
|
92
|
+
return fillRange(from, to).length;
|
|
93
|
+
}
|
|
94
|
+
async function runMode(flags) {
|
|
95
|
+
const { mode, from: fromUntyped, to: toUntyped, status, verbose: verboseUntyped, ...untypeSelectorFlags } = flags;
|
|
96
|
+
const from = fromUntyped;
|
|
97
|
+
const to = toUntyped;
|
|
98
|
+
const verbose = verboseUntyped;
|
|
99
|
+
const selectorFlags = untypeSelectorFlags;
|
|
100
|
+
if (status) {
|
|
101
|
+
const stateItem = await getRecordByKey(STATE_TABLE_NAME, {
|
|
102
|
+
name: `${name}RebuildState(${toSelectorString(selectorFlags)})`,
|
|
103
|
+
});
|
|
104
|
+
const fromItem = await getRecordByKey(STATE_TABLE_NAME, {
|
|
105
|
+
name: `${name}Since(${toSelectorString(selectorFlags)})`,
|
|
106
|
+
});
|
|
107
|
+
const validateItem = await getRecordByKey(STATE_TABLE_NAME, {
|
|
108
|
+
name: `${name}Validate(${toSelectorString(selectorFlags)})`,
|
|
109
|
+
});
|
|
110
|
+
inline.log(`RebuildState=${stateItem?.value} Since=${fromItem?.value} Validated=${validateItem?.value}`);
|
|
111
|
+
process.exit(0);
|
|
112
|
+
}
|
|
113
|
+
inline.log(`[MODE INDEXER] mode=${mode}, env=${getEnvironment()}, ${toSelectorString(selectorFlags, ", ")}`);
|
|
114
|
+
if (mode === "reset") {
|
|
115
|
+
await runReset(selectorFlags);
|
|
116
|
+
}
|
|
117
|
+
else if (mode === "rebuild") {
|
|
118
|
+
const rebuildFrom = from || (await finalState.getMinId(selectorFlags));
|
|
119
|
+
const rebuildTo = to || (await finalState.getMaxId(selectorFlags));
|
|
120
|
+
await runReset(selectorFlags);
|
|
121
|
+
await runRebuild(selectorFlags, rebuildFrom, rebuildTo, verbose);
|
|
122
|
+
}
|
|
123
|
+
else if (mode === "resume-rebuild") {
|
|
124
|
+
const previousRebuildEnds = await getIndexerLatestId(name, selectorFlags);
|
|
125
|
+
const rebuildFrom = from ||
|
|
126
|
+
(previousRebuildEnds !== undefined && increaseId(finalState.type, previousRebuildEnds, 1)) ||
|
|
127
|
+
(await finalState.getMinId(selectorFlags));
|
|
128
|
+
const rebuildTo = to || (await finalState.getMaxId(selectorFlags));
|
|
129
|
+
await runRebuild(selectorFlags, rebuildFrom, rebuildTo, verbose);
|
|
130
|
+
}
|
|
131
|
+
else if (mode === "validate" || mode === "validation") {
|
|
132
|
+
const previousRebuildEnds = await getIndexerLatestId(name, selectorFlags);
|
|
133
|
+
if (!previousRebuildEnds) {
|
|
134
|
+
inline.log(`[MODE INDEXER] cannot validate without a successful rebuild`);
|
|
135
|
+
process.exit(0);
|
|
136
|
+
}
|
|
137
|
+
const previousValidatedTo = await getIndexerValidatedId(name, selectorFlags);
|
|
138
|
+
const validateFrom = from || previousValidatedTo || (await finalState.getMinId(selectorFlags));
|
|
139
|
+
const validateTo = to || previousRebuildEnds;
|
|
140
|
+
const shouldSaveState = !to; // should not save state for manual validations, those are for testing
|
|
141
|
+
await runValidate(selectorFlags, validateFrom, validateTo, shouldSaveState, verbose);
|
|
142
|
+
}
|
|
143
|
+
else if (mode === "one") {
|
|
144
|
+
if (to) {
|
|
145
|
+
inline.log("[MODE INDEXER] one mode ignores --to option. you may want to use range mode instead");
|
|
146
|
+
}
|
|
147
|
+
if (!from) {
|
|
148
|
+
inline.log(`[MODE INDEXER] must provide --from option for one mode`);
|
|
149
|
+
process.exit(1);
|
|
150
|
+
}
|
|
151
|
+
await runRange(selectorFlags, from, from, verbose);
|
|
152
|
+
}
|
|
153
|
+
else if (mode === "range") {
|
|
154
|
+
if (!from || !to) {
|
|
155
|
+
inline.log(`[MODE INDEXER] must provide --from and --to option for range mode`);
|
|
156
|
+
process.exit(1);
|
|
157
|
+
}
|
|
158
|
+
await runRange(selectorFlags, from, to, verbose);
|
|
159
|
+
}
|
|
160
|
+
else {
|
|
161
|
+
const stateItem = await getRecordByKey(STATE_TABLE_NAME, {
|
|
162
|
+
name: `${name}RebuildState(${toSelectorString(selectorFlags)})`,
|
|
163
|
+
});
|
|
164
|
+
// only build when rebuild succeed
|
|
165
|
+
if (!stateItem || stateItem.value !== "succeed") {
|
|
166
|
+
inline.log("[MODE INDEXER] skip because rebuild hasn't done yet");
|
|
167
|
+
process.exit(0);
|
|
168
|
+
}
|
|
169
|
+
const latestId = await getIndexerLatestId(name, selectorFlags);
|
|
170
|
+
if (!latestId) {
|
|
171
|
+
throw new Error(`[MODE INDEXER] cannot find the latest ${finalState.type}`);
|
|
172
|
+
}
|
|
173
|
+
const deltaFrom = increaseId(finalState.type, latestId, 1);
|
|
174
|
+
const deltaTo = await state.getMaxId(selectorFlags);
|
|
175
|
+
await runDelta(selectorFlags, deltaFrom, deltaTo, verbose);
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
async function runRange(selectorFlags, from, to, verbose) {
|
|
179
|
+
const startTime = Date.now();
|
|
180
|
+
inline.log(`[MODE INDEXER] building range, from=${from}, to=${to}, ${toSelectorString(selectorFlags, ", ")}, batchSize=${buildBatchSize}, concurrency=${buildConcurrency}`);
|
|
181
|
+
const failedIds = await execBuild(selectorFlags, from, to, verbose, false);
|
|
182
|
+
if (failedIds.length > 0) {
|
|
183
|
+
inline.log(`[MODE INDEXER] built with some failed ${finalState.type}`, failedIds);
|
|
184
|
+
process.exit(1);
|
|
185
|
+
}
|
|
186
|
+
else {
|
|
187
|
+
inline.log(`[MODE INDEXER] built successfully in ${Date.now() - startTime}ms`);
|
|
188
|
+
process.exit(0);
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
async function runValidate(selectorFlags, from, to, shouldSaveState, verbose) {
|
|
192
|
+
if (!validate) {
|
|
193
|
+
inline.log(`[MODE INDEXER] the indexer doesn't support validate mode, validate function not implemented`);
|
|
194
|
+
process.exit(1);
|
|
195
|
+
}
|
|
196
|
+
const startTime = Date.now();
|
|
197
|
+
inline.log(`[MODE INDEXER] validating, from=${from}, to=${to}, ${toSelectorString(selectorFlags, ", ")}, batchSize=${validateBatchSize}, concurrency=${validateConcurrency}`);
|
|
198
|
+
const windows = range(from, to, validateBatchSize * validateConcurrency);
|
|
199
|
+
inline.log(`[MODE INDEXER] from=${from}, to=${to}, batchSize=${validateBatchSize}, concurrency=${validateConcurrency}`);
|
|
200
|
+
for (const [windowStart, windowEnd] of windows) {
|
|
201
|
+
inline.log(`[MODE INDEXER] validating window ${windowStart}~${windowEnd}, concurrency=${validateConcurrency}`);
|
|
202
|
+
const batches = range(windowStart, windowEnd, validateBatchSize);
|
|
203
|
+
// add a retry for errors
|
|
204
|
+
await Promise.all(batches.map(async ([batchStart, batchEnd]) => {
|
|
205
|
+
const result = await exponentialRetry(async () => {
|
|
206
|
+
try {
|
|
207
|
+
await validate({
|
|
208
|
+
...selectorFlags,
|
|
209
|
+
from: batchStart,
|
|
210
|
+
to: batchEnd,
|
|
211
|
+
verbose,
|
|
212
|
+
});
|
|
213
|
+
return true;
|
|
214
|
+
}
|
|
215
|
+
catch (err) {
|
|
216
|
+
inline.error(`got error in validation`, err);
|
|
217
|
+
return false;
|
|
218
|
+
}
|
|
219
|
+
}, {
|
|
220
|
+
maxRetry,
|
|
221
|
+
test: (r) => r,
|
|
222
|
+
verbose,
|
|
223
|
+
});
|
|
224
|
+
if (!result) {
|
|
225
|
+
throw new Error(`Terminate validation due to critical errors, from=${batchStart}, to=${batchEnd}`);
|
|
226
|
+
}
|
|
227
|
+
}));
|
|
228
|
+
if (shouldSaveState) {
|
|
229
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
230
|
+
name: `${name}Validate(${toSelectorString(selectorFlags)})`,
|
|
231
|
+
value: to,
|
|
232
|
+
});
|
|
233
|
+
if (verbose) {
|
|
234
|
+
inline.log(`[MODE INDEXER] updated processed ${finalState.type} to ${windowEnd}`);
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
inline.log(`[MODE INDEXER] validated ${offsetRange(from, to)} ${finalState.type} successfully in ${Date.now() - startTime}ms`);
|
|
239
|
+
}
|
|
240
|
+
async function execBuild(selectorFlags, from, to, verbose, shouldSaveState = false) {
|
|
241
|
+
let failedIds = [];
|
|
242
|
+
const windows = range(from, to, buildBatchSize * buildConcurrency);
|
|
243
|
+
for (const [windowStart, windowEnd] of windows) {
|
|
244
|
+
inline.log(`[MODE INDEXER] building window ${windowStart}~${windowEnd}, concurrency = ${buildConcurrency}`);
|
|
245
|
+
const batches = range(windowStart, windowEnd, buildBatchSize);
|
|
246
|
+
// add a retry for errors
|
|
247
|
+
const batchResults = await Promise.all(batches.map(async ([batchStart, batchEnd]) => await exponentialRetry(async () => {
|
|
248
|
+
try {
|
|
249
|
+
const ids = await build({
|
|
250
|
+
...selectorFlags,
|
|
251
|
+
from: batchStart,
|
|
252
|
+
to: batchEnd,
|
|
253
|
+
verbose,
|
|
254
|
+
});
|
|
255
|
+
if (ids && ids.length > 0) {
|
|
256
|
+
return ids;
|
|
257
|
+
}
|
|
258
|
+
else {
|
|
259
|
+
return false;
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
catch (err) {
|
|
263
|
+
inline.error(`[MODE INDEXER] got error in build`, err);
|
|
264
|
+
return fillRange(batchStart, batchEnd);
|
|
265
|
+
}
|
|
266
|
+
}, {
|
|
267
|
+
maxRetry,
|
|
268
|
+
test: (r) => !r,
|
|
269
|
+
verbose,
|
|
270
|
+
})));
|
|
271
|
+
if (shouldSaveState) {
|
|
272
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
273
|
+
name: `${name}Since(${toSelectorString(selectorFlags)})`,
|
|
274
|
+
value: windowEnd,
|
|
275
|
+
});
|
|
276
|
+
if (verbose) {
|
|
277
|
+
inline.log(`[MODE INDEXER] updated processed ${finalState.type} to ${windowEnd}`);
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
batchResults.forEach((ids) => {
|
|
281
|
+
if (ids) {
|
|
282
|
+
failedIds = failedIds.concat(ids);
|
|
283
|
+
}
|
|
284
|
+
});
|
|
285
|
+
}
|
|
286
|
+
failedIds.sort();
|
|
287
|
+
return failedIds;
|
|
288
|
+
}
|
|
289
|
+
async function runRebuild(selectorFlags, from, to, verbose) {
|
|
290
|
+
const startTime = Date.now();
|
|
291
|
+
inline.log(`[MODE INDEXER] rebuilding, from=${from}, to=${to}, ${toSelectorString(selectorFlags, ", ")}, batchSize=${buildBatchSize}, concurrency=${buildConcurrency}`);
|
|
292
|
+
// add a flag to stop delta from running
|
|
293
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
294
|
+
name: `${name}RebuildState(${toSelectorString(selectorFlags)})`,
|
|
295
|
+
value: "running",
|
|
296
|
+
});
|
|
297
|
+
const failedIds = await execBuild(selectorFlags, from, to, verbose, true);
|
|
298
|
+
// even if some transactions are failed we should continue to allow delta job running
|
|
299
|
+
// because rebuild is usally heavy
|
|
300
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
301
|
+
name: `${name}Since(${toSelectorString(selectorFlags)})`,
|
|
302
|
+
value: to,
|
|
303
|
+
});
|
|
304
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
305
|
+
name: `${name}RebuildState(${toSelectorString(selectorFlags)})`,
|
|
306
|
+
value: "succeed",
|
|
307
|
+
});
|
|
308
|
+
if (failedIds.length > 0) {
|
|
309
|
+
inline.log(`[MODE INDEXER] built ${offsetRange(from, to)} ${finalState.type}(s) with some failed ${finalState.type}`, failedIds);
|
|
310
|
+
process.exit(1);
|
|
311
|
+
}
|
|
312
|
+
else {
|
|
313
|
+
inline.log(`[MODE INDEXER] built ${offsetRange(from, to)} ${finalState.type}(s) successfully in ${Date.now() - startTime}ms`);
|
|
314
|
+
process.exit(0);
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
async function runDelta(selectorFlags, from, to, verbose) {
|
|
318
|
+
const startTime = Date.now();
|
|
319
|
+
if (to < from) {
|
|
320
|
+
inline.log(`[MODE INDEXER] skip delta, there're no more items need to be processed, from=${from}, to=${to}, ${toSelectorString(selectorFlags, ", ")}`);
|
|
321
|
+
return;
|
|
322
|
+
}
|
|
323
|
+
inline.log(`[MODE INDEXER] starting delta, from=${from}, to=${to}, ${toSelectorString(selectorFlags, ", ")}, batchSize=${buildBatchSize}, concurrency=${buildConcurrency}`);
|
|
324
|
+
try {
|
|
325
|
+
const failedIds = await execBuild(selectorFlags, from, to, verbose, true);
|
|
326
|
+
if (failedIds.length > 0) {
|
|
327
|
+
inline.log("[MODE INDEXER] built with some failed txs", failedIds);
|
|
328
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
329
|
+
name: `${name}DeltaState(${toSelectorString(selectorFlags)})`,
|
|
330
|
+
value: "failed",
|
|
331
|
+
});
|
|
332
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
333
|
+
name: `${name}Since(${toSelectorString(selectorFlags)})`,
|
|
334
|
+
value: to < failedIds[0] ? to : failedIds[0],
|
|
335
|
+
});
|
|
336
|
+
process.exit(1);
|
|
337
|
+
}
|
|
338
|
+
else {
|
|
339
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
340
|
+
name: `${name}DeltaState(${toSelectorString(selectorFlags)})`,
|
|
341
|
+
value: "succeed",
|
|
342
|
+
});
|
|
343
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
344
|
+
name: `${name}Since(${toSelectorString(selectorFlags)})`,
|
|
345
|
+
value: to,
|
|
346
|
+
});
|
|
347
|
+
inline.log(`[MODE INDEXER] built successfully in ${Date.now() - startTime}ms`);
|
|
348
|
+
process.exit(0);
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
catch (err) {
|
|
352
|
+
inline.error("[MODE INDEXER] delta build failed", from, to, err);
|
|
353
|
+
process.exit(1);
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
async function runReset(selectorFlags) {
|
|
357
|
+
const startTime = Date.now();
|
|
358
|
+
inline.log(`[MODE INDEXER] starting reset, ${toSelectorString(selectorFlags, ", ")}`);
|
|
359
|
+
inline.log("[MODE INDEXER] reset state", STATE_TABLE_NAME);
|
|
360
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
361
|
+
name: `${name}Since(${toSelectorString(selectorFlags)})`,
|
|
362
|
+
value: 0,
|
|
363
|
+
});
|
|
364
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
365
|
+
name: `${name}Validate(${toSelectorString(selectorFlags)})`,
|
|
366
|
+
value: 0,
|
|
367
|
+
});
|
|
368
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
369
|
+
name: `${name}RebuildState(${toSelectorString(selectorFlags)})`,
|
|
370
|
+
value: "init",
|
|
371
|
+
});
|
|
372
|
+
inline.log(`[MODE INDEXER] reset successfully in ${Date.now() - startTime}ms`);
|
|
373
|
+
}
|
|
374
|
+
async function run() {
|
|
375
|
+
if (!binaryName) {
|
|
376
|
+
binaryName = getBinaryName();
|
|
377
|
+
}
|
|
378
|
+
const cli = meow(`
|
|
379
|
+
Usage
|
|
380
|
+
|
|
381
|
+
$ ${binaryName} <options>
|
|
382
|
+
|
|
383
|
+
Options
|
|
384
|
+
${selector ? getSelectorDesc(selector) : ""}
|
|
385
|
+
--mode could be delta/rebuild/resume-rebuild/validate/one/range/reset
|
|
386
|
+
--from min ${finalState.type} to build
|
|
387
|
+
--to max ${finalState.type} to build
|
|
388
|
+
--status print status of indexer and exit
|
|
389
|
+
--verbose Output debug messages
|
|
390
|
+
`, {
|
|
391
|
+
importMeta: import.meta,
|
|
392
|
+
description: false,
|
|
393
|
+
version: false,
|
|
394
|
+
flags: {
|
|
395
|
+
...getSelectorFlags(selector),
|
|
396
|
+
mode: {
|
|
397
|
+
type: "string",
|
|
398
|
+
default: "delta",
|
|
399
|
+
},
|
|
400
|
+
from: {
|
|
401
|
+
aliases: ["since"],
|
|
402
|
+
type: "string",
|
|
403
|
+
},
|
|
404
|
+
to: {
|
|
405
|
+
aliases: ["until"],
|
|
406
|
+
type: "string",
|
|
407
|
+
},
|
|
408
|
+
status: {
|
|
409
|
+
type: "boolean",
|
|
410
|
+
default: false,
|
|
411
|
+
},
|
|
412
|
+
verbose: {
|
|
413
|
+
type: "boolean",
|
|
414
|
+
default: false,
|
|
415
|
+
},
|
|
416
|
+
},
|
|
417
|
+
});
|
|
418
|
+
try {
|
|
419
|
+
return runMode(cli.flags);
|
|
420
|
+
}
|
|
421
|
+
catch (err) {
|
|
422
|
+
inline.error(err);
|
|
423
|
+
process.exit(1);
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
return { run };
|
|
427
|
+
}
|
|
428
|
+
// for indexers that don't rely on a cursor
|
|
429
|
+
// e.g. should always rebuild everything from scratch
|
|
430
|
+
// or that the state can be easily inferred from existing data
|
|
431
|
+
function createIndexerApp({ binaryName, selector = {}, build, maxRetry = 2, }) {
|
|
432
|
+
async function run() {
|
|
433
|
+
if (!binaryName) {
|
|
434
|
+
binaryName = getBinaryName();
|
|
435
|
+
}
|
|
436
|
+
const cli = meow(`
|
|
437
|
+
Usage
|
|
438
|
+
$ ${binaryName} <options>
|
|
439
|
+
|
|
440
|
+
Options
|
|
441
|
+
${selector ? getSelectorDesc(selector) : ""}
|
|
442
|
+
--verbose Output debug messages
|
|
443
|
+
`, {
|
|
444
|
+
importMeta: import.meta,
|
|
445
|
+
description: false,
|
|
446
|
+
version: false,
|
|
447
|
+
flags: {
|
|
448
|
+
...getSelectorFlags(selector),
|
|
449
|
+
verbose: {
|
|
450
|
+
type: "boolean",
|
|
451
|
+
default: false,
|
|
452
|
+
},
|
|
453
|
+
},
|
|
454
|
+
});
|
|
455
|
+
async function runBuild(flags) {
|
|
456
|
+
const { verbose: untypedVerbose, ...untypedSelectorFlags } = flags;
|
|
457
|
+
const verbose = untypedVerbose;
|
|
458
|
+
const selectorFlags = untypedSelectorFlags;
|
|
459
|
+
const startTime = Date.now();
|
|
460
|
+
if (Object.keys(selectorFlags).length > 0) {
|
|
461
|
+
inline.log(`[INDEXER] starting build, ${toSelectorString(selectorFlags, ", ")}`);
|
|
462
|
+
}
|
|
463
|
+
else {
|
|
464
|
+
inline.log(`[INDEXER] starting build`);
|
|
465
|
+
}
|
|
466
|
+
const result = await exponentialRetry(async () => {
|
|
467
|
+
try {
|
|
468
|
+
await build(flags);
|
|
469
|
+
return true;
|
|
470
|
+
}
|
|
471
|
+
catch (err) {
|
|
472
|
+
inline.log(`[INDEXER] got error in build`, err);
|
|
473
|
+
return false;
|
|
474
|
+
}
|
|
475
|
+
}, {
|
|
476
|
+
maxRetry,
|
|
477
|
+
test: (r) => r,
|
|
478
|
+
verbose: verbose,
|
|
479
|
+
});
|
|
480
|
+
if (!result) {
|
|
481
|
+
throw new Error(`[INDEXER] Build failed due to critical errors`);
|
|
482
|
+
}
|
|
483
|
+
inline.log(`[INDEXER] build successfully in ${Date.now() - startTime}ms`);
|
|
484
|
+
}
|
|
485
|
+
return runBuild(cli.flags).catch((err) => {
|
|
486
|
+
inline.error(err);
|
|
487
|
+
process.exit(1);
|
|
488
|
+
});
|
|
489
|
+
}
|
|
490
|
+
return { run };
|
|
491
|
+
}
|
|
492
|
+
export { increaseId, createModeIndexerApp, createIndexerApp };
|
package/dist/log.d.ts
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
declare function print(o: unknown): string;
|
|
2
|
+
declare function getLine(params: unknown[]): string;
|
|
3
|
+
declare const inline: {
|
|
4
|
+
debug: (...args: unknown[]) => void;
|
|
5
|
+
log: (...args: unknown[]) => void;
|
|
6
|
+
error: (...args: unknown[]) => void;
|
|
7
|
+
};
|
|
8
|
+
declare const logger: {
|
|
9
|
+
debug: (...args: unknown[]) => void;
|
|
10
|
+
log: (...args: unknown[]) => void;
|
|
11
|
+
error: (...args: unknown[]) => void;
|
|
12
|
+
};
|
|
13
|
+
export { print, getLine, inline, logger };
|
|
14
|
+
//# sourceMappingURL=log.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"log.d.ts","sourceRoot":"","sources":["../log.ts"],"names":[],"mappings":"AAIA,iBAAS,KAAK,CAAC,CAAC,EAAE,OAAO,GAAG,MAAM,CAYjC;AAED,iBAAS,OAAO,CAAC,MAAM,EAAE,OAAO,EAAE,UAWjC;AAMD,QAAA,MAAM,MAAM;qBACgB,OAAO,EAAE;mBAKX,OAAO,EAAE;qBAKP,OAAO,EAAE;CAKpC,CAAC;AAEF,QAAA,MAAM,MAAM;qBACgB,OAAO,EAAE;mBAKX,OAAO,EAAE;qBAKP,OAAO,EAAE;CAKpC,CAAC;AAEF,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC"}
|
package/dist/log.js
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
function isObject(a) {
|
|
2
|
+
return !!a && a.constructor === Object;
|
|
3
|
+
}
|
|
4
|
+
function print(o) {
|
|
5
|
+
if (Array.isArray(o)) {
|
|
6
|
+
return `[${o.map(print).join(", ")}]`;
|
|
7
|
+
}
|
|
8
|
+
if (isObject(o)) {
|
|
9
|
+
return `{${Object.keys(o)
|
|
10
|
+
.map((k) => `${k}: ${o[k]}`)
|
|
11
|
+
.join(", ")}}`;
|
|
12
|
+
}
|
|
13
|
+
return `${o}`;
|
|
14
|
+
}
|
|
15
|
+
function getLine(params) {
|
|
16
|
+
let line = "";
|
|
17
|
+
// Convert to string and filter out newline to tabs (AWS Athena)
|
|
18
|
+
for (let i = 0, l = params.length; i < l; i++) {
|
|
19
|
+
// Certain objects don't get converted
|
|
20
|
+
// Note using JSON.stringfy may be too slow for large objects
|
|
21
|
+
line += `${print(params[i])} `.replace(/\n/gm, "\t");
|
|
22
|
+
}
|
|
23
|
+
return line.trim();
|
|
24
|
+
}
|
|
25
|
+
function timestamp() {
|
|
26
|
+
return new Date().toISOString();
|
|
27
|
+
}
|
|
28
|
+
const inline = {
|
|
29
|
+
debug: function (...args) {
|
|
30
|
+
if (process.env.NODE_ENV === "development") {
|
|
31
|
+
console.log(`${timestamp()} ${getLine(args)}`);
|
|
32
|
+
}
|
|
33
|
+
},
|
|
34
|
+
log: function (...args) {
|
|
35
|
+
if (process.env.NODE_ENV !== "test") {
|
|
36
|
+
console.log(`${timestamp()} ${getLine(args)}`);
|
|
37
|
+
}
|
|
38
|
+
},
|
|
39
|
+
error: function (...args) {
|
|
40
|
+
if (process.env.NODE_ENV !== "test") {
|
|
41
|
+
console.error(`${timestamp()} ${getLine(args)}`);
|
|
42
|
+
}
|
|
43
|
+
},
|
|
44
|
+
};
|
|
45
|
+
const logger = {
|
|
46
|
+
debug: function (...args) {
|
|
47
|
+
if (process.env.NODE_ENV === "development") {
|
|
48
|
+
console.log(`[${timestamp()}]`, ...args);
|
|
49
|
+
}
|
|
50
|
+
},
|
|
51
|
+
log: function (...args) {
|
|
52
|
+
if (process.env.NODE_ENV !== "test") {
|
|
53
|
+
console.log(`[${timestamp()}]`, ...args);
|
|
54
|
+
}
|
|
55
|
+
},
|
|
56
|
+
error: function (...args) {
|
|
57
|
+
if (process.env.NODE_ENV !== "test") {
|
|
58
|
+
console.error(`[${timestamp()}]`, ...args);
|
|
59
|
+
}
|
|
60
|
+
},
|
|
61
|
+
};
|
|
62
|
+
export { print, getLine, inline, logger };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"object-hash.d.ts","sourceRoot":"","sources":["../object-hash.ts"],"names":[],"mappings":"AAEA,wBAAgB,OAAO,CAAC,GAAG,EAAE,OAAO,UAInC"}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import xh from "@node-rs/xxhash";
|
|
2
|
+
export function getHash(obj) {
|
|
3
|
+
const xxh3 = xh.xxh3.Xxh3.withSeed();
|
|
4
|
+
hash(obj, xxh3);
|
|
5
|
+
return xxh3.digest().toString(16);
|
|
6
|
+
}
|
|
7
|
+
function hash(obj, xxh3) {
|
|
8
|
+
if (obj === null) {
|
|
9
|
+
xxh3.update("null");
|
|
10
|
+
}
|
|
11
|
+
else if (obj === undefined) {
|
|
12
|
+
xxh3.update("undefined");
|
|
13
|
+
}
|
|
14
|
+
else if (typeof obj === "string") {
|
|
15
|
+
xxh3.update(obj);
|
|
16
|
+
}
|
|
17
|
+
else if (typeof obj === "number") {
|
|
18
|
+
xxh3.update(obj.toString());
|
|
19
|
+
}
|
|
20
|
+
else if (typeof obj === "boolean") {
|
|
21
|
+
xxh3.update(obj.toString());
|
|
22
|
+
}
|
|
23
|
+
else if (typeof obj === "bigint") {
|
|
24
|
+
xxh3.update(obj.toString());
|
|
25
|
+
}
|
|
26
|
+
else if (obj instanceof Date) {
|
|
27
|
+
xxh3.update(obj.toISOString());
|
|
28
|
+
}
|
|
29
|
+
else if (Array.isArray(obj)) {
|
|
30
|
+
arrayHash(obj, xxh3);
|
|
31
|
+
}
|
|
32
|
+
else if (obj instanceof Set) {
|
|
33
|
+
setHash(obj, xxh3);
|
|
34
|
+
}
|
|
35
|
+
else if (obj instanceof Map) {
|
|
36
|
+
mapHash(obj, xxh3);
|
|
37
|
+
}
|
|
38
|
+
else if (typeof obj === "object") {
|
|
39
|
+
objectHash(obj, xxh3);
|
|
40
|
+
}
|
|
41
|
+
else {
|
|
42
|
+
throw new Error(`Unsupported type: ${obj}`);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
function arrayHash(array, xxh3) {
|
|
46
|
+
xxh3.update("[");
|
|
47
|
+
for (const obj of array) {
|
|
48
|
+
hash(obj, xxh3);
|
|
49
|
+
}
|
|
50
|
+
xxh3.update("]");
|
|
51
|
+
}
|
|
52
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
53
|
+
function setHash(_set, _xxh3) {
|
|
54
|
+
throw new Error("Set hashing not implemented");
|
|
55
|
+
}
|
|
56
|
+
function mapHash(map, xxh3) {
|
|
57
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
58
|
+
const array = Array.from(map.entries()).sort(([aKey], [bKey]) => aKey.localeCompare(bKey));
|
|
59
|
+
for (const [key, value] of array) {
|
|
60
|
+
hash(key, xxh3);
|
|
61
|
+
hash(value, xxh3);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
65
|
+
function objectHash(obj, xxh3) {
|
|
66
|
+
const array = Object.entries(obj).sort(([aKey], [bKey]) => aKey.localeCompare(bKey));
|
|
67
|
+
for (const [key, value] of array) {
|
|
68
|
+
hash(key, xxh3);
|
|
69
|
+
hash(value, xxh3);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
type OpsgenieResponse = {
|
|
2
|
+
data: {
|
|
3
|
+
success: boolean;
|
|
4
|
+
action: string;
|
|
5
|
+
processedAt: string;
|
|
6
|
+
integrationId: string;
|
|
7
|
+
isSuccess: boolean;
|
|
8
|
+
status: string;
|
|
9
|
+
alertId: string;
|
|
10
|
+
alias: string;
|
|
11
|
+
};
|
|
12
|
+
took: number;
|
|
13
|
+
requestId: string;
|
|
14
|
+
};
|
|
15
|
+
export declare function postGenieMessage(body: {
|
|
16
|
+
alias?: string;
|
|
17
|
+
message: string;
|
|
18
|
+
description?: string;
|
|
19
|
+
}, apiKey?: string, verbose?: boolean): Promise<OpsgenieResponse>;
|
|
20
|
+
export {};
|
|
21
|
+
//# sourceMappingURL=opsgenie.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"opsgenie.d.ts","sourceRoot":"","sources":["../opsgenie.ts"],"names":[],"mappings":"AAEA,KAAK,gBAAgB,GAAG;IACtB,IAAI,EAAE;QACJ,OAAO,EAAE,OAAO,CAAC;QACjB,MAAM,EAAE,MAAM,CAAC;QACf,WAAW,EAAE,MAAM,CAAC;QACpB,aAAa,EAAE,MAAM,CAAC;QACtB,SAAS,EAAE,OAAO,CAAC;QACnB,MAAM,EAAE,MAAM,CAAC;QACf,OAAO,EAAE,MAAM,CAAC;QAChB,KAAK,EAAE,MAAM,CAAC;KACf,CAAC;IACF,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,EAAE,MAAM,CAAC;CACnB,CAAC;AAUF,wBAAsB,gBAAgB,CACpC,IAAI,EAAE;IACJ,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,CAAC;IAChB,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB,EACD,MAAM,CAAC,EAAE,MAAM,EACf,OAAO,CAAC,EAAE,OAAO,6BAoClB"}
|