@certik/skynet 0.7.10 → 0.7.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.editorconfig +6 -0
- package/.eslintrc.js +13 -13
- package/CHANGELOG.md +18 -0
- package/README.md +23 -23
- package/abi.js +227 -227
- package/address.js +18 -18
- package/block.js +83 -83
- package/dynamodb.js +456 -456
- package/examples/deploy-consumer +0 -0
- package/examples/deploy-indexer +0 -0
- package/examples/deploy-mode-indexer +0 -0
- package/examples/deploy-producer +0 -0
- package/examples/indexer +0 -0
- package/examples/kafka-consumer +0 -0
- package/examples/kafka-producer +1 -1
- package/examples/mode-indexer +0 -0
- package/inquiry.js +14 -14
- package/kafka.js +129 -113
- package/labelling.js +40 -4
- package/metric.js +65 -65
- package/package.json +2 -2
- package/price.js +48 -48
- package/primitive.js +77 -77
- package/rateLimit.js +21 -21
- package/s3.js +93 -93
- package/scan.js +67 -67
- package/sqs.js +12 -12
- package/token.js +44 -44
- package/transaction.js +47 -47
- package/util.js +58 -58
package/examples/deploy-consumer
CHANGED
|
File without changes
|
package/examples/deploy-indexer
CHANGED
|
File without changes
|
|
File without changes
|
package/examples/deploy-producer
CHANGED
|
File without changes
|
package/examples/indexer
CHANGED
|
File without changes
|
package/examples/kafka-consumer
CHANGED
|
File without changes
|
package/examples/kafka-producer
CHANGED
package/examples/mode-indexer
CHANGED
|
File without changes
|
package/inquiry.js
CHANGED
|
@@ -1,15 +1,15 @@
|
|
|
1
|
-
const fetch = require("node-fetch");
|
|
2
|
-
const { getEnvironment } = require("./env");
|
|
3
|
-
|
|
4
|
-
const ADDRESS_INQUIRY_URL = getEnvironment() === "prd" ? (
|
|
5
|
-
"https://aml-inquiry.certik-skynet.com/"
|
|
6
|
-
) : (
|
|
7
|
-
"https://2kbfm0t10j.execute-api.us-east-1.amazonaws.com/dev/"
|
|
8
|
-
);
|
|
9
|
-
|
|
10
|
-
async function createAddressRecord(address) {
|
|
11
|
-
// Use skynet-aml-address-inquiry to add addresses as EOA or smart contract
|
|
12
|
-
return await fetch(`${ADDRESS_INQUIRY_URL}?address=${address}`);
|
|
13
|
-
}
|
|
14
|
-
|
|
1
|
+
const fetch = require("node-fetch");
|
|
2
|
+
const { getEnvironment } = require("./env");
|
|
3
|
+
|
|
4
|
+
const ADDRESS_INQUIRY_URL = getEnvironment() === "prd" ? (
|
|
5
|
+
"https://aml-inquiry.certik-skynet.com/"
|
|
6
|
+
) : (
|
|
7
|
+
"https://2kbfm0t10j.execute-api.us-east-1.amazonaws.com/dev/"
|
|
8
|
+
);
|
|
9
|
+
|
|
10
|
+
async function createAddressRecord(address) {
|
|
11
|
+
// Use skynet-aml-address-inquiry to add addresses as EOA or smart contract
|
|
12
|
+
return await fetch(`${ADDRESS_INQUIRY_URL}?address=${address}`);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
15
|
module.exports = { createAddressRecord };
|
package/kafka.js
CHANGED
|
@@ -3,11 +3,7 @@ const meow = require("meow");
|
|
|
3
3
|
const { getEnvironment, getEnvOrThrow } = require("./env");
|
|
4
4
|
const { wait } = require("./availability");
|
|
5
5
|
const { Kafka, logLevel } = require("kafkajs");
|
|
6
|
-
const {
|
|
7
|
-
getSelectorFlags,
|
|
8
|
-
getSelectorDesc,
|
|
9
|
-
toSelectorString
|
|
10
|
-
} = require("./selector");
|
|
6
|
+
const { getSelectorFlags, getSelectorDesc, toSelectorString } = require("./selector");
|
|
11
7
|
const { createRecord, getRecordByKey, deleteRecordsByHashKey } = require("./dynamodb");
|
|
12
8
|
const { exponentialRetry } = require("./availability");
|
|
13
9
|
|
|
@@ -15,7 +11,7 @@ const STATE_TABLE_NAME = "skynet-" + getEnvironment() + "-indexer-state";
|
|
|
15
11
|
|
|
16
12
|
async function getProducerLatestId(name, selectorFlags) {
|
|
17
13
|
const record = await getRecordByKey(STATE_TABLE_NAME, {
|
|
18
|
-
name: `${name}At(${toSelectorString(selectorFlags)})
|
|
14
|
+
name: `${name}At(${toSelectorString(selectorFlags)})`,
|
|
19
15
|
});
|
|
20
16
|
|
|
21
17
|
if (record) {
|
|
@@ -28,17 +24,12 @@ async function getProducerLatestId(name, selectorFlags) {
|
|
|
28
24
|
async function setProducerLatestId(name, selectorFlags, value) {
|
|
29
25
|
await createRecord(STATE_TABLE_NAME, {
|
|
30
26
|
name: `${name}At(${toSelectorString(selectorFlags)})`,
|
|
31
|
-
value
|
|
27
|
+
value,
|
|
32
28
|
});
|
|
33
29
|
}
|
|
34
30
|
|
|
35
31
|
async function deleteProducerLatestId(name, selectorFlags, verbose) {
|
|
36
|
-
await deleteRecordsByHashKey(
|
|
37
|
-
STATE_TABLE_NAME,
|
|
38
|
-
null,
|
|
39
|
-
`${name}At(${toSelectorString(selectorFlags)})`,
|
|
40
|
-
verbose
|
|
41
|
-
);
|
|
32
|
+
await deleteRecordsByHashKey(STATE_TABLE_NAME, null, `${name}At(${toSelectorString(selectorFlags)})`, verbose);
|
|
42
33
|
}
|
|
43
34
|
|
|
44
35
|
function sendToTopic(producer, topic, verbose) {
|
|
@@ -49,9 +40,9 @@ function sendToTopic(producer, topic, verbose) {
|
|
|
49
40
|
|
|
50
41
|
await producer.send({
|
|
51
42
|
topic,
|
|
52
|
-
messages: records.map(r => ({
|
|
53
|
-
value: Buffer.from(typeof r === "string" ? r : JSON.stringify(r))
|
|
54
|
-
}))
|
|
43
|
+
messages: records.map((r) => ({
|
|
44
|
+
value: Buffer.from(typeof r === "string" ? r : JSON.stringify(r)),
|
|
45
|
+
})),
|
|
55
46
|
});
|
|
56
47
|
}
|
|
57
48
|
|
|
@@ -62,8 +53,88 @@ function getDefaultKafkaCredential() {
|
|
|
62
53
|
return {
|
|
63
54
|
server: getEnvOrThrow("SKYNET_KAFKA_SERVER"),
|
|
64
55
|
username: getEnvOrThrow("SKYNET_KAFKA_USERNAME"),
|
|
65
|
-
password: getEnvOrThrow("SKYNET_KAFKA_PASSWORD")
|
|
56
|
+
password: getEnvOrThrow("SKYNET_KAFKA_PASSWORD"),
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
async function initProducer(clientId, customKafkaCredential = undefined) {
|
|
61
|
+
const kafkaCredential = customKafkaCredential ?? getDefaultKafkaCredential();
|
|
62
|
+
const kafka = new Kafka({
|
|
63
|
+
clientId,
|
|
64
|
+
brokers: [kafkaCredential.server],
|
|
65
|
+
logLevel: logLevel.ERROR,
|
|
66
|
+
ssl: true,
|
|
67
|
+
sasl: {
|
|
68
|
+
mechanism: "plain",
|
|
69
|
+
username: kafkaCredential.username,
|
|
70
|
+
password: kafkaCredential.password,
|
|
71
|
+
},
|
|
72
|
+
});
|
|
73
|
+
const producerInstance = kafka.producer();
|
|
74
|
+
await producerInstance.connect();
|
|
75
|
+
|
|
76
|
+
return producerInstance;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
async function closeProducer(producerInstance) {
|
|
80
|
+
await producerInstance.disconnect();
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
async function initConsumer(clientId, customKafkaCredential = undefined) {
|
|
84
|
+
const kafkaCredential = customKafkaCredential ? customKafkaCredential : getDefaultKafkaCredential();
|
|
85
|
+
const kafka = new Kafka({
|
|
86
|
+
clientId,
|
|
87
|
+
brokers: [kafkaCredential.server],
|
|
88
|
+
logLevel: logLevel.ERROR,
|
|
89
|
+
ssl: true,
|
|
90
|
+
sasl: {
|
|
91
|
+
mechanism: "plain",
|
|
92
|
+
username: kafkaCredential.username,
|
|
93
|
+
password: kafkaCredential.password,
|
|
94
|
+
},
|
|
95
|
+
});
|
|
96
|
+
const consumerInstance = kafka.consumer({ groupId: clientId });
|
|
97
|
+
|
|
98
|
+
await consumerInstance.connect();
|
|
99
|
+
|
|
100
|
+
return consumerInstance;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
async function closeConsumer(consumerInstance) {
|
|
104
|
+
await consumerInstance.disconnect();
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
async function produceMessages(producerId, callback) {
|
|
108
|
+
const producerInstance = await initProducer(producerId);
|
|
109
|
+
const send = async (topic, messages) => {
|
|
110
|
+
let records;
|
|
111
|
+
if (Array.isArray(messages)) {
|
|
112
|
+
records = messages;
|
|
113
|
+
} else {
|
|
114
|
+
records = [messages];
|
|
115
|
+
}
|
|
116
|
+
return sendToTopic(producerInstance, topic, false)(records);
|
|
117
|
+
};
|
|
118
|
+
await callback(send);
|
|
119
|
+
await closeProducer(producerInstance);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
async function consumeMessages(consumerId, topic, callback) {
|
|
123
|
+
const consumerInstance = await initConsumer(consumerId);
|
|
124
|
+
|
|
125
|
+
await consumerInstance.subscribe({ topic });
|
|
126
|
+
|
|
127
|
+
const stopConsumeMessages = async () => {
|
|
128
|
+
await closeConsumer(consumerInstance);
|
|
66
129
|
};
|
|
130
|
+
await consumerInstance.run({
|
|
131
|
+
eachMessage: async ({ topic: receivedTopic, message }) => {
|
|
132
|
+
if (topic === receivedTopic) {
|
|
133
|
+
await callback(message, stopConsumeMessages);
|
|
134
|
+
}
|
|
135
|
+
},
|
|
136
|
+
});
|
|
137
|
+
return stopConsumeMessages;
|
|
67
138
|
}
|
|
68
139
|
|
|
69
140
|
function createProducerApp({ name, selector = {}, producer, state }) {
|
|
@@ -78,7 +149,7 @@ function createProducerApp({ name, selector = {}, producer, state }) {
|
|
|
78
149
|
getMaxId: async () => {
|
|
79
150
|
throw new Error("must implement state.getMaxId");
|
|
80
151
|
},
|
|
81
|
-
...state
|
|
152
|
+
...state,
|
|
82
153
|
};
|
|
83
154
|
|
|
84
155
|
const cli = meow(
|
|
@@ -102,29 +173,28 @@ ${getSelectorDesc(selector)}
|
|
|
102
173
|
from: {
|
|
103
174
|
alias: "since",
|
|
104
175
|
type: "number",
|
|
105
|
-
default: 0
|
|
176
|
+
default: 0,
|
|
106
177
|
},
|
|
107
178
|
to: {
|
|
108
179
|
alias: "until",
|
|
109
180
|
type: "number",
|
|
110
|
-
default: 0
|
|
181
|
+
default: 0,
|
|
111
182
|
},
|
|
112
183
|
verbose: {
|
|
113
184
|
type: "boolean",
|
|
114
|
-
default: false
|
|
115
|
-
}
|
|
116
|
-
}
|
|
185
|
+
default: false,
|
|
186
|
+
},
|
|
187
|
+
},
|
|
117
188
|
}
|
|
118
189
|
);
|
|
119
190
|
|
|
120
191
|
const { topic, deadLetterTopic, produce, batchSize, maxRetry } = {
|
|
121
192
|
batchSize: 50,
|
|
122
193
|
maxRetry: 2,
|
|
123
|
-
...producer
|
|
194
|
+
...producer,
|
|
124
195
|
};
|
|
125
196
|
|
|
126
|
-
const kafkaCredential =
|
|
127
|
-
(producer && producer.kafkaCredential) || getDefaultKafkaCredential();
|
|
197
|
+
const kafkaCredential = (producer && producer.kafkaCredential) || getDefaultKafkaCredential();
|
|
128
198
|
|
|
129
199
|
async function build({ from, to, status, reset, verbose, ...selectorFlags }) {
|
|
130
200
|
if (reset) {
|
|
@@ -157,33 +227,16 @@ ${getSelectorDesc(selector)}
|
|
|
157
227
|
}
|
|
158
228
|
}
|
|
159
229
|
|
|
160
|
-
const finalTopic =
|
|
161
|
-
typeof topic === "function" ? topic(selectorFlags) : topic;
|
|
230
|
+
const finalTopic = typeof topic === "function" ? topic(selectorFlags) : topic;
|
|
162
231
|
const finalDeadLetterTopic =
|
|
163
|
-
typeof deadLetterTopic === "function"
|
|
164
|
-
? deadLetterTopic(selectorFlags)
|
|
165
|
-
: deadLetterTopic;
|
|
232
|
+
typeof deadLetterTopic === "function" ? deadLetterTopic(selectorFlags) : deadLetterTopic;
|
|
166
233
|
|
|
167
234
|
const updateInterval =
|
|
168
235
|
typeof finalState.updateInterval === "function"
|
|
169
236
|
? finalState.updateInterval(selectorFlags)
|
|
170
237
|
: finalState.updateInterval;
|
|
171
238
|
|
|
172
|
-
const
|
|
173
|
-
clientId: name,
|
|
174
|
-
brokers: [kafkaCredential.server],
|
|
175
|
-
logLevel: logLevel.ERROR,
|
|
176
|
-
ssl: true,
|
|
177
|
-
sasl: {
|
|
178
|
-
mechanism: "plain",
|
|
179
|
-
username: kafkaCredential.username,
|
|
180
|
-
password: kafkaCredential.password
|
|
181
|
-
}
|
|
182
|
-
});
|
|
183
|
-
|
|
184
|
-
const producerInstance = kafka.producer();
|
|
185
|
-
|
|
186
|
-
await producerInstance.connect();
|
|
239
|
+
const producerInstance = await initProducer(name, kafkaCredential);
|
|
187
240
|
|
|
188
241
|
console.log("producing to topic", finalTopic);
|
|
189
242
|
|
|
@@ -197,7 +250,7 @@ ${getSelectorDesc(selector)}
|
|
|
197
250
|
async function cleanUpForLongRunningMode() {
|
|
198
251
|
clearInterval(pollHandle);
|
|
199
252
|
|
|
200
|
-
await producerInstance
|
|
253
|
+
await closeProducer(producerInstance);
|
|
201
254
|
|
|
202
255
|
process.exit(0);
|
|
203
256
|
}
|
|
@@ -214,11 +267,7 @@ ${getSelectorDesc(selector)}
|
|
|
214
267
|
// the main loop
|
|
215
268
|
for (let i = from; isLongRunning || i <= lastId; ) {
|
|
216
269
|
if (i > lastId) {
|
|
217
|
-
console.log(
|
|
218
|
-
"no more items to process, sleep for",
|
|
219
|
-
updateInterval,
|
|
220
|
-
"ms"
|
|
221
|
-
);
|
|
270
|
+
console.log("no more items to process, sleep for", updateInterval, "ms");
|
|
222
271
|
|
|
223
272
|
await wait(updateInterval);
|
|
224
273
|
} else {
|
|
@@ -236,31 +285,24 @@ ${getSelectorDesc(selector)}
|
|
|
236
285
|
from: batchStart,
|
|
237
286
|
to: batchEnd,
|
|
238
287
|
verbose,
|
|
239
|
-
send: sendToTopic(producerInstance, finalTopic, verbose)
|
|
288
|
+
send: sendToTopic(producerInstance, finalTopic, verbose),
|
|
240
289
|
});
|
|
241
290
|
|
|
242
291
|
if (Array.isArray(failed) && failed.length > 0) {
|
|
243
|
-
await sendToTopic(
|
|
244
|
-
producerInstance,
|
|
245
|
-
finalDeadLetterTopic,
|
|
246
|
-
verbose
|
|
247
|
-
)(failed);
|
|
292
|
+
await sendToTopic(producerInstance, finalDeadLetterTopic, verbose)(failed);
|
|
248
293
|
}
|
|
249
294
|
|
|
250
295
|
return true;
|
|
251
296
|
} catch (err) {
|
|
252
|
-
console.error(
|
|
253
|
-
`Critical error processing batch ${batchStart}~${batchEnd}`,
|
|
254
|
-
err
|
|
255
|
-
);
|
|
297
|
+
console.error(`Critical error processing batch ${batchStart}~${batchEnd}`, err);
|
|
256
298
|
|
|
257
299
|
return false;
|
|
258
300
|
}
|
|
259
301
|
},
|
|
260
302
|
{
|
|
261
303
|
maxRetry,
|
|
262
|
-
test: r => r,
|
|
263
|
-
verbose
|
|
304
|
+
test: (r) => r,
|
|
305
|
+
verbose,
|
|
264
306
|
}
|
|
265
307
|
);
|
|
266
308
|
|
|
@@ -269,13 +311,11 @@ ${getSelectorDesc(selector)}
|
|
|
269
311
|
cleanUpForLongRunningMode();
|
|
270
312
|
}
|
|
271
313
|
|
|
272
|
-
throw new Error(
|
|
273
|
-
`Terminate producer due to critical errors, batch ${batchStart}~${batchEnd}`
|
|
274
|
-
);
|
|
314
|
+
throw new Error(`Terminate producer due to critical errors, batch ${batchStart}~${batchEnd}`);
|
|
275
315
|
}
|
|
276
316
|
|
|
277
317
|
await setProducerLatestId(name, selectorFlags, batchEnd);
|
|
278
|
-
|
|
318
|
+
|
|
279
319
|
if (i + batchSize <= lastId) i += batchSize;
|
|
280
320
|
else i = batchEnd + 1;
|
|
281
321
|
}
|
|
@@ -285,12 +325,10 @@ ${getSelectorDesc(selector)}
|
|
|
285
325
|
}
|
|
286
326
|
}
|
|
287
327
|
|
|
288
|
-
|
|
289
|
-
producerInstance.disconnect();
|
|
290
|
-
});
|
|
328
|
+
await closeProducer(producerInstance);
|
|
291
329
|
}
|
|
292
330
|
|
|
293
|
-
return build(cli.flags).catch(err => {
|
|
331
|
+
return build(cli.flags).catch((err) => {
|
|
294
332
|
console.error(err);
|
|
295
333
|
process.exit(1);
|
|
296
334
|
});
|
|
@@ -320,55 +358,36 @@ ${getSelectorDesc(selector)}
|
|
|
320
358
|
...getSelectorFlags(selector),
|
|
321
359
|
verbose: {
|
|
322
360
|
type: "boolean",
|
|
323
|
-
default: false
|
|
324
|
-
}
|
|
325
|
-
}
|
|
361
|
+
default: false,
|
|
362
|
+
},
|
|
363
|
+
},
|
|
326
364
|
}
|
|
327
365
|
);
|
|
328
366
|
|
|
329
367
|
const { topic, consume, maxRetry } = {
|
|
330
368
|
maxRetry: 2,
|
|
331
|
-
...consumer
|
|
369
|
+
...consumer,
|
|
332
370
|
};
|
|
333
371
|
|
|
334
|
-
const kafkaCredential =
|
|
335
|
-
(consume && consumer.kafkaCredential) || getDefaultKafkaCredential();
|
|
372
|
+
const kafkaCredential = (consume && consumer.kafkaCredential) || getDefaultKafkaCredential();
|
|
336
373
|
|
|
337
374
|
async function build({ verbose, ...selectorFlags }) {
|
|
338
|
-
const
|
|
339
|
-
clientId: name,
|
|
340
|
-
brokers: [kafkaCredential.server],
|
|
341
|
-
logLevel: logLevel.ERROR,
|
|
342
|
-
ssl: true,
|
|
343
|
-
sasl: {
|
|
344
|
-
mechanism: "plain",
|
|
345
|
-
username: kafkaCredential.username,
|
|
346
|
-
password: kafkaCredential.password
|
|
347
|
-
}
|
|
348
|
-
});
|
|
349
|
-
|
|
350
|
-
const consumerInstance = kafka.consumer({ groupId: name });
|
|
375
|
+
const consumerInstance = await initConsumer(name, kafkaCredential);
|
|
351
376
|
|
|
352
377
|
try {
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
const finalTopic =
|
|
356
|
-
typeof topic === "function" ? topic(selectorFlags) : topic;
|
|
378
|
+
const finalTopic = typeof topic === "function" ? topic(selectorFlags) : topic;
|
|
357
379
|
|
|
358
|
-
consumerInstance.subscribe({ topic: finalTopic });
|
|
380
|
+
await consumerInstance.subscribe({ topic: finalTopic });
|
|
359
381
|
|
|
360
382
|
console.log("subscribed to topic", finalTopic);
|
|
361
383
|
|
|
362
384
|
await consumerInstance.run({
|
|
363
385
|
eachBatch: async ({ batch }) => {
|
|
364
386
|
if (verbose) {
|
|
365
|
-
console.log(
|
|
366
|
-
"received batch, number of items =",
|
|
367
|
-
batch.messages.length
|
|
368
|
-
);
|
|
387
|
+
console.log("received batch, number of items =", batch.messages.length);
|
|
369
388
|
}
|
|
370
389
|
|
|
371
|
-
const messages = batch.messages.map(m => JSON.parse(m.value));
|
|
390
|
+
const messages = batch.messages.map((m) => JSON.parse(m.value));
|
|
372
391
|
|
|
373
392
|
// add a retry for errors
|
|
374
393
|
const result = await exponentialRetry(
|
|
@@ -385,31 +404,26 @@ ${getSelectorDesc(selector)}
|
|
|
385
404
|
},
|
|
386
405
|
{
|
|
387
406
|
maxRetry,
|
|
388
|
-
test: r => r,
|
|
389
|
-
verbose
|
|
407
|
+
test: (r) => r,
|
|
408
|
+
verbose,
|
|
390
409
|
}
|
|
391
410
|
);
|
|
392
411
|
|
|
393
412
|
if (!result) {
|
|
394
|
-
console.error(
|
|
395
|
-
"Terminate consumer due to consume errors, likely bug"
|
|
396
|
-
);
|
|
413
|
+
console.error("Terminate consumer due to consume errors, likely bug");
|
|
397
414
|
|
|
398
415
|
process.exit(1);
|
|
399
416
|
}
|
|
400
|
-
}
|
|
417
|
+
},
|
|
401
418
|
});
|
|
402
419
|
} catch (err) {
|
|
403
|
-
console.error(
|
|
404
|
-
"Terminate consumer due to critical kafka connection error",
|
|
405
|
-
err
|
|
406
|
-
);
|
|
420
|
+
console.error("Terminate consumer due to critical kafka connection error", err);
|
|
407
421
|
|
|
408
422
|
process.exit(1);
|
|
409
423
|
}
|
|
410
424
|
}
|
|
411
425
|
|
|
412
|
-
return build(cli.flags).catch(err => {
|
|
426
|
+
return build(cli.flags).catch((err) => {
|
|
413
427
|
console.error(err);
|
|
414
428
|
process.exit(1);
|
|
415
429
|
});
|
|
@@ -420,5 +434,7 @@ ${getSelectorDesc(selector)}
|
|
|
420
434
|
|
|
421
435
|
module.exports = {
|
|
422
436
|
createProducerApp,
|
|
423
|
-
createConsumerApp
|
|
437
|
+
createConsumerApp,
|
|
438
|
+
produceMessages,
|
|
439
|
+
consumeMessages,
|
|
424
440
|
};
|
package/labelling.js
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
const fetch = require("node-fetch");
|
|
1
2
|
const { getRecordByKey, updateRecordByKey } = require("./dynamodb");
|
|
2
3
|
const { getEnvironment } = require("./env");
|
|
3
4
|
|
|
@@ -9,11 +10,11 @@ async function addTagsToAddress(address, tags, verbose) {
|
|
|
9
10
|
let updateItemTags;
|
|
10
11
|
|
|
11
12
|
// Validate tags
|
|
12
|
-
tags.forEach(tag => {
|
|
13
|
+
tags.forEach((tag) => {
|
|
13
14
|
if (!tag.type || !tag.label) {
|
|
14
15
|
throw new Error("error: tag must be in format { type, label }");
|
|
15
16
|
}
|
|
16
|
-
})
|
|
17
|
+
});
|
|
17
18
|
|
|
18
19
|
// check if address is a smart contract
|
|
19
20
|
let queryResult = await getRecordByKey(SMART_CONTRACTS_TABLE_NAME, { address });
|
|
@@ -28,12 +29,23 @@ async function addTagsToAddress(address, tags, verbose) {
|
|
|
28
29
|
if (queryResult) {
|
|
29
30
|
tableName = ADDRESSES_TABLE_NAME;
|
|
30
31
|
updateItemTags = queryResult.tags ?? [];
|
|
32
|
+
} else {
|
|
33
|
+
// Unknown address type, inquiry endpoint to create a record
|
|
34
|
+
const url = `https://aml-inquiry.certik-skynet.com/address?address=${address}`;
|
|
35
|
+
const response = await fetch(url);
|
|
36
|
+
const json = await response.json();
|
|
37
|
+
if (!json.type) {
|
|
38
|
+
throw new Error(`Unknown address type in inquiry response: ${address}`);
|
|
39
|
+
}
|
|
40
|
+
const { type } = json;
|
|
41
|
+
tableName = type === "EOA" ? ADDRESSES_TABLE_NAME : SMART_CONTRACTS_TABLE_NAME;
|
|
42
|
+
updateItemTags = [];
|
|
31
43
|
}
|
|
32
44
|
}
|
|
33
45
|
|
|
34
46
|
if (tableName) {
|
|
35
47
|
// record available, add tag
|
|
36
|
-
const newTags = tags.filter(tag => !updateItemTags.includes(tag));
|
|
48
|
+
const newTags = tags.filter((tag) => !updateItemTags.includes(tag));
|
|
37
49
|
|
|
38
50
|
if (newTags.length + updateItemTags.length === 0) {
|
|
39
51
|
throw new Error("error: no tags to add and none already in database");
|
|
@@ -45,10 +57,34 @@ async function addTagsToAddress(address, tags, verbose) {
|
|
|
45
57
|
|
|
46
58
|
updateItemTags = [...updateItemTags, ...newTags];
|
|
47
59
|
|
|
60
|
+
// keep distinct tags
|
|
61
|
+
updateItemTags = updateItemTags.filter(
|
|
62
|
+
(value, index, self) => self.findIndex((t) => t.type === value.type && t.label === value.label) === index
|
|
63
|
+
);
|
|
64
|
+
|
|
48
65
|
await updateRecordByKey(tableName, { address: address }, { tags: updateItemTags }, null, verbose);
|
|
49
66
|
}
|
|
50
67
|
|
|
51
68
|
return { tableName, updateItemTags };
|
|
52
69
|
}
|
|
53
70
|
|
|
54
|
-
|
|
71
|
+
async function queryAddressTags(address) {
|
|
72
|
+
let updateItemTags = [];
|
|
73
|
+
|
|
74
|
+
let queryResult = await getRecordByKey(SMART_CONTRACTS_TABLE_NAME, { address });
|
|
75
|
+
|
|
76
|
+
if (queryResult) {
|
|
77
|
+
updateItemTags = queryResult.tags ?? [];
|
|
78
|
+
} else {
|
|
79
|
+
// check if address is an EOA
|
|
80
|
+
queryResult = await getRecordByKey(ADDRESSES_TABLE_NAME, { address });
|
|
81
|
+
|
|
82
|
+
if (queryResult) {
|
|
83
|
+
updateItemTags = queryResult.tags ?? [];
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
return updateItemTags;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
module.exports = { addTagsToAddress, queryAddressTags };
|
package/metric.js
CHANGED
|
@@ -1,65 +1,65 @@
|
|
|
1
|
-
const { getDocClient } = require("./dynamodb");
|
|
2
|
-
|
|
3
|
-
/* Assume table has name/timestamp/value fields */
|
|
4
|
-
|
|
5
|
-
async function getMetricAt(tableName, name, timestamp) {
|
|
6
|
-
const docClient = getDocClient();
|
|
7
|
-
|
|
8
|
-
const query = await docClient
|
|
9
|
-
.query({
|
|
10
|
-
TableName: tableName,
|
|
11
|
-
KeyConditionExpression: "#name = :name and #timestamp <= :timestamp",
|
|
12
|
-
ExpressionAttributeNames: {
|
|
13
|
-
"#name": "name",
|
|
14
|
-
"#timestamp": "timestamp"
|
|
15
|
-
},
|
|
16
|
-
ExpressionAttributeValues: {
|
|
17
|
-
":name": name,
|
|
18
|
-
":timestamp": timestamp
|
|
19
|
-
},
|
|
20
|
-
Limit: 1,
|
|
21
|
-
ScanIndexForward: false
|
|
22
|
-
})
|
|
23
|
-
.promise();
|
|
24
|
-
|
|
25
|
-
if (query.Count > 0) {
|
|
26
|
-
const { value } = query.Items[0];
|
|
27
|
-
|
|
28
|
-
return value;
|
|
29
|
-
} else {
|
|
30
|
-
return null;
|
|
31
|
-
}
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
async function getMetricPreviousValue(tableName, name, timestamp) {
|
|
35
|
-
const docClient = getDocClient();
|
|
36
|
-
|
|
37
|
-
const query = await docClient
|
|
38
|
-
.query({
|
|
39
|
-
TableName: tableName,
|
|
40
|
-
KeyConditionExpression: "#name = :name and #timestamp < :timestamp",
|
|
41
|
-
ExpressionAttributeNames: {
|
|
42
|
-
"#timestamp": "timestamp",
|
|
43
|
-
"#name": "name"
|
|
44
|
-
},
|
|
45
|
-
ExpressionAttributeValues: {
|
|
46
|
-
":timestamp": timestamp,
|
|
47
|
-
":name": name
|
|
48
|
-
},
|
|
49
|
-
Limit: 1,
|
|
50
|
-
ScanIndexForward: false
|
|
51
|
-
})
|
|
52
|
-
.promise();
|
|
53
|
-
|
|
54
|
-
if (query.Count === 0) {
|
|
55
|
-
return 0;
|
|
56
|
-
} else {
|
|
57
|
-
const { value } = query.Items[0];
|
|
58
|
-
return value;
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
module.exports = {
|
|
63
|
-
getMetricAt,
|
|
64
|
-
getMetricPreviousValue
|
|
65
|
-
}
|
|
1
|
+
const { getDocClient } = require("./dynamodb");
|
|
2
|
+
|
|
3
|
+
/* Assume table has name/timestamp/value fields */
|
|
4
|
+
|
|
5
|
+
async function getMetricAt(tableName, name, timestamp) {
|
|
6
|
+
const docClient = getDocClient();
|
|
7
|
+
|
|
8
|
+
const query = await docClient
|
|
9
|
+
.query({
|
|
10
|
+
TableName: tableName,
|
|
11
|
+
KeyConditionExpression: "#name = :name and #timestamp <= :timestamp",
|
|
12
|
+
ExpressionAttributeNames: {
|
|
13
|
+
"#name": "name",
|
|
14
|
+
"#timestamp": "timestamp"
|
|
15
|
+
},
|
|
16
|
+
ExpressionAttributeValues: {
|
|
17
|
+
":name": name,
|
|
18
|
+
":timestamp": timestamp
|
|
19
|
+
},
|
|
20
|
+
Limit: 1,
|
|
21
|
+
ScanIndexForward: false
|
|
22
|
+
})
|
|
23
|
+
.promise();
|
|
24
|
+
|
|
25
|
+
if (query.Count > 0) {
|
|
26
|
+
const { value } = query.Items[0];
|
|
27
|
+
|
|
28
|
+
return value;
|
|
29
|
+
} else {
|
|
30
|
+
return null;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
async function getMetricPreviousValue(tableName, name, timestamp) {
|
|
35
|
+
const docClient = getDocClient();
|
|
36
|
+
|
|
37
|
+
const query = await docClient
|
|
38
|
+
.query({
|
|
39
|
+
TableName: tableName,
|
|
40
|
+
KeyConditionExpression: "#name = :name and #timestamp < :timestamp",
|
|
41
|
+
ExpressionAttributeNames: {
|
|
42
|
+
"#timestamp": "timestamp",
|
|
43
|
+
"#name": "name"
|
|
44
|
+
},
|
|
45
|
+
ExpressionAttributeValues: {
|
|
46
|
+
":timestamp": timestamp,
|
|
47
|
+
":name": name
|
|
48
|
+
},
|
|
49
|
+
Limit: 1,
|
|
50
|
+
ScanIndexForward: false
|
|
51
|
+
})
|
|
52
|
+
.promise();
|
|
53
|
+
|
|
54
|
+
if (query.Count === 0) {
|
|
55
|
+
return 0;
|
|
56
|
+
} else {
|
|
57
|
+
const { value } = query.Items[0];
|
|
58
|
+
return value;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
module.exports = {
|
|
63
|
+
getMetricAt,
|
|
64
|
+
getMetricPreviousValue
|
|
65
|
+
}
|