@certik/skynet 0.8.9 → 0.8.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,6 +1,19 @@
1
1
  # Changelog
2
2
 
3
- ## 0.8.9
3
+ ## 0.8.13
4
+
5
+ - Added `inline.error` function to `log` library
6
+
7
+ ## 0.8.12
8
+
9
+ - Added `log` library to print one line log
10
+
11
+ ## 0.8.11
12
+
13
+ - Improved `updateRecordByKeys` in `dynamodb` library to support fields deletion
14
+ - Added `opsgenie` library
15
+
16
+ ## 0.8.10
4
17
 
5
18
  - Improved `postMessage` in `slack` library to support private channels
6
19
 
package/dynamodb.js CHANGED
@@ -1,16 +1,12 @@
1
1
  const { DynamoDB } = require("aws-sdk");
2
- const {
3
- getAWSAccessKeyId,
4
- getAWSSecretAccessKey,
5
- getAWSRegion
6
- } = require("./env");
2
+ const { getAWSAccessKeyId, getAWSSecretAccessKey, getAWSRegion } = require("./env");
7
3
  const { wait } = require("./availability");
8
4
 
9
5
  function getDynamoDB() {
10
6
  return new DynamoDB({
11
7
  accessKeyId: getAWSAccessKeyId(),
12
8
  secretAccessKey: getAWSSecretAccessKey(),
13
- region: getAWSRegion()
9
+ region: getAWSRegion(),
14
10
  });
15
11
  }
16
12
 
@@ -18,7 +14,7 @@ function getDocClient() {
18
14
  return new DynamoDB.DocumentClient({
19
15
  accessKeyId: getAWSAccessKeyId(),
20
16
  secretAccessKey: getAWSSecretAccessKey(),
21
- region: getAWSRegion()
17
+ region: getAWSRegion(),
22
18
  });
23
19
  }
24
20
 
@@ -26,7 +22,7 @@ function mergeQueries(q1, q2) {
26
22
  return {
27
23
  Items: q1.Items.concat(q2.Items),
28
24
  Count: q1.Count + q2.Count,
29
- ScannedCount: q1.ScannedCount + q2.ScannedCount
25
+ ScannedCount: q1.ScannedCount + q2.ScannedCount,
30
26
  };
31
27
  }
32
28
 
@@ -36,7 +32,7 @@ async function scanWholeTable(options) {
36
32
  let items = {
37
33
  Items: [],
38
34
  Count: 0,
39
- ScannedCount: 0
35
+ ScannedCount: 0,
40
36
  };
41
37
 
42
38
  let data = await dynamodb.scan(options).promise();
@@ -47,7 +43,7 @@ async function scanWholeTable(options) {
47
43
  data = await dynamodb
48
44
  .scan({
49
45
  ExclusiveStartKey: data.LastEvaluatedKey,
50
- ...options
46
+ ...options,
51
47
  })
52
48
  .promise();
53
49
  }
@@ -67,12 +63,7 @@ function toSlices(items) {
67
63
  return slices;
68
64
  }
69
65
 
70
- async function batchCreateRecords(
71
- tableName,
72
- records,
73
- maxWritingCapacity,
74
- verbose = false
75
- ) {
66
+ async function batchCreateRecords(tableName, records, maxWritingCapacity, verbose = false) {
76
67
  if (verbose) {
77
68
  console.log(`creating ${records.length} items in ${tableName}`);
78
69
  }
@@ -91,20 +82,18 @@ async function batchCreateRecords(
91
82
  }
92
83
 
93
84
  if (factor >= 32) {
94
- console.log(
95
- `WARNING: no progress for a long time for batchCreateRecords, please check`
96
- );
85
+ console.log(`WARNING: no progress for a long time for batchCreateRecords, please check`);
97
86
  }
98
87
 
99
88
  const slices = toSlices(remainingItems.slice(0, maxWritingCapacity));
100
89
  const results = await Promise.allSettled(
101
- slices.map(rs => {
90
+ slices.map((rs) => {
102
91
  const params = {
103
92
  RequestItems: {
104
- [tableName]: rs.map(record => {
93
+ [tableName]: rs.map((record) => {
105
94
  return { PutRequest: { Item: record } };
106
- })
107
- }
95
+ }),
96
+ },
108
97
  };
109
98
 
110
99
  return docClient.batchWrite(params).promise();
@@ -120,39 +109,27 @@ async function batchCreateRecords(
120
109
  rejection = rs;
121
110
  } else if (Object.keys(rs.value.UnprocessedItems).length > 0) {
122
111
  // partially fails, redo unprocessedItems
123
- acc = acc.concat(
124
- rs.value.UnprocessedItems[tableName].map(it => it.PutRequest.Item)
125
- );
112
+ acc = acc.concat(rs.value.UnprocessedItems[tableName].map((it) => it.PutRequest.Item));
126
113
  }
127
114
 
128
115
  return acc;
129
116
  }, remainingItems.slice(maxWritingCapacity));
130
117
 
131
118
  if (verbose) {
132
- console.log(
133
- `processed=${prevRemainingCount - remainingItems.length}, remaining=${
134
- remainingItems.length
135
- }`
136
- );
119
+ console.log(`processed=${prevRemainingCount - remainingItems.length}, remaining=${remainingItems.length}`);
137
120
  }
138
121
  }
139
122
 
140
123
  if (rejection) {
141
124
  console.log("batchCreateRecords rejected", rejection);
142
125
 
143
- throw new Error(
144
- `batchCreateRecords rejected, failed items=${remainingItems.length}`
145
- );
126
+ throw new Error(`batchCreateRecords rejected, failed items=${remainingItems.length}`);
146
127
  }
147
128
 
148
129
  if (remainingItems.length > 0) {
149
- console.log(
150
- `failed batchCreateRecords, failed items=${remainingItems.length}`
151
- );
130
+ console.log(`failed batchCreateRecords, failed items=${remainingItems.length}`);
152
131
 
153
- throw new Error(
154
- `batchCreateRecords retry failed, failed items=${remainingItems.length}`
155
- );
132
+ throw new Error(`batchCreateRecords retry failed, failed items=${remainingItems.length}`);
156
133
  }
157
134
  }
158
135
 
@@ -164,7 +141,7 @@ async function createRecord(tableName, fields, verbose = false) {
164
141
  const docClient = getDocClient();
165
142
  const params = {
166
143
  TableName: tableName,
167
- Item: fields
144
+ Item: fields,
168
145
  };
169
146
 
170
147
  return docClient.put(params).promise();
@@ -174,23 +151,13 @@ async function getRecordsByKey(tableName, keys, indexName) {
174
151
  const docClient = getDocClient();
175
152
 
176
153
  const keyNames = Object.keys(keys);
177
- const conditionExpression = keyNames
178
- .map(key => `#${key} = :${key}`)
179
- .join(" and ");
180
- const expressionNames = keyNames.reduce((acc, key) => {
181
- acc[`#${key}`] = key;
182
- return acc;
183
- }, {});
184
- const expressionValues = keyNames.reduce((acc, key) => {
185
- acc[`:${key}`] = keys[key];
186
- return acc;
187
- }, {});
154
+ const conditionExpression = keyNames.map((key) => `#${key} = :${key}`).join(" and ");
188
155
 
189
156
  const params = {
190
157
  TableName: tableName,
191
158
  KeyConditionExpression: conditionExpression,
192
- ExpressionAttributeNames: expressionNames,
193
- ExpressionAttributeValues: expressionValues
159
+ ExpressionAttributeNames: generateExpressionNames(keyNames),
160
+ ExpressionAttributeValues: generateExpressionValues(keyNames, keys),
194
161
  };
195
162
 
196
163
  if (indexName) {
@@ -221,45 +188,77 @@ async function getRecordByKey(tableName, keys, indexName) {
221
188
  }
222
189
  }
223
190
 
224
- async function updateRecordByKey(
225
- tableName,
226
- idKey,
227
- fields,
228
- conditionExpressions = null,
229
- verbose = false
230
- ) {
231
- if (verbose) {
232
- console.log("update", tableName, idKey, fields);
233
- }
234
-
235
- const idKeyNames = Object.keys(idKey);
236
- const keys = Object.keys(fields).filter(k => !idKeyNames.includes(k));
237
- const updateExpressions = keys.map(key => `#${key} = :${key}`);
238
- const expressionNames = keys.reduce((acc, key) => {
191
+ function generateExpressionNames(keys) {
192
+ return keys.reduce((acc, key) => {
239
193
  acc[`#${key}`] = key;
240
194
  return acc;
241
195
  }, {});
242
- const expressionValues = keys.reduce((acc, key) => {
196
+ }
197
+
198
+ function generateExpressionValues(keys, fields) {
199
+ return keys.reduce((acc, key) => {
243
200
  acc[`:${key}`] = fields[key];
244
201
  return acc;
245
202
  }, {});
203
+ }
204
+
205
+ async function updateRecordByKey(tableName, idKey, fields, conditionExpressions = null, verbose = false) {
206
+ if (verbose) {
207
+ console.log("update", tableName, idKey, fields);
208
+ }
209
+
246
210
  const docClient = getDocClient();
247
- const params = {
248
- TableName: tableName,
249
- Key: idKey,
250
- ExpressionAttributeNames: expressionNames,
251
- ExpressionAttributeValues: expressionValues,
252
- UpdateExpression: `SET ${updateExpressions.join(", ")}`,
253
- ReturnValues: "ALL_NEW"
254
- };
255
211
 
256
- if (conditionExpressions) {
257
- params.ConditionExpression = conditionExpressions;
212
+ const idKeyNames = Object.keys(idKey);
213
+ const fieldsToDelete = Object.keys(fields).filter((f) => fields[f] === undefined);
214
+ const fieldsToUpdate = Object.keys(fields).filter((k) => !idKeyNames.includes(k) && !fieldsToDelete.includes(k));
215
+
216
+ let data;
217
+
218
+ if (fieldsToDelete.length > 0) {
219
+ if (verbose) {
220
+ console.log("delete fields", tableName, fieldsToDelete);
221
+ }
222
+
223
+ const deleteParams = {
224
+ TableName: tableName,
225
+ Key: idKey,
226
+ ExpressionAttributeNames: generateExpressionNames(fieldsToDelete),
227
+ UpdateExpression: `REMOVE ${fieldsToDelete.map((f) => `#${f}`).join(", ")}`,
228
+ ReturnValues: "ALL_NEW",
229
+ };
230
+
231
+ if (conditionExpressions) {
232
+ deleteParams.ConditionExpression = conditionExpressions;
233
+ }
234
+
235
+ data = await docClient.update(deleteParams).promise();
258
236
  }
259
237
 
260
- const data = await docClient.update(params).promise();
238
+ if (fieldsToUpdate.length > 0) {
239
+ if (verbose) {
240
+ console.log("update fields", tableName, fieldsToUpdate);
241
+ }
242
+
243
+ const updateExpressions = fieldsToUpdate.map((key) => `#${key} = :${key}`);
244
+
245
+ const params = {
246
+ TableName: tableName,
247
+ Key: idKey,
248
+ ExpressionAttributeNames: generateExpressionNames(fieldsToUpdate),
249
+ ExpressionAttributeValues: generateExpressionValues(fieldsToUpdate, fields),
250
+ UpdateExpression: `SET ${updateExpressions.join(", ")}`,
251
+ ReturnValues: "ALL_NEW",
252
+ };
261
253
 
262
- return data.Attributes;
254
+ if (conditionExpressions) {
255
+ params.ConditionExpression = conditionExpressions;
256
+ }
257
+
258
+ data = await docClient.update(params).promise();
259
+ }
260
+
261
+ return data && data.Attributes;
263
262
  }
264
263
 
265
264
  async function createTableIfNotExist(
@@ -267,14 +266,14 @@ async function createTableIfNotExist(
267
266
  attributeDefinitions = [
268
267
  {
269
268
  AttributeName: "id",
270
- AttributeType: "S"
271
- }
269
+ AttributeType: "S",
270
+ },
272
271
  ],
273
272
  keySchema = [
274
273
  {
275
274
  AttributeName: "id",
276
- KeyType: "HASH"
277
- }
275
+ KeyType: "HASH",
276
+ },
278
277
  ],
279
278
  otherOptions = {},
280
279
  verbose = false
@@ -282,9 +281,7 @@ async function createTableIfNotExist(
282
281
  const dynamodb = getDynamoDB();
283
282
 
284
283
  try {
285
- const table = await dynamodb
286
- .describeTable({ TableName: tableName })
287
- .promise();
284
+ const table = await dynamodb.describeTable({ TableName: tableName }).promise();
288
285
 
289
286
  if (verbose) {
290
287
  console.log(`table ${tableName} already exist`, table);
@@ -298,9 +295,9 @@ async function createTableIfNotExist(
298
295
  KeySchema: keySchema,
299
296
  ProvisionedThroughput: {
300
297
  ReadCapacityUnits: 5,
301
- WriteCapacityUnits: 5
298
+ WriteCapacityUnits: 5,
302
299
  },
303
- ...otherOptions
300
+ ...otherOptions,
304
301
  })
305
302
  .promise();
306
303
 
@@ -313,9 +310,7 @@ async function createTableIfNotExist(
313
310
  while (!isCreated) {
314
311
  await wait(5000);
315
312
 
316
- const t = await dynamodb
317
- .describeTable({ TableName: tableName })
318
- .promise();
313
+ const t = await dynamodb.describeTable({ TableName: tableName }).promise();
319
314
 
320
315
  console.log("waiting for table to become active");
321
316
 
@@ -335,38 +330,31 @@ async function batchDeleteRecords(tableName, keys) {
335
330
  await docClient
336
331
  .batchWrite({
337
332
  RequestItems: {
338
- [tableName]: slice.map(key => {
333
+ [tableName]: slice.map((key) => {
339
334
  return { DeleteRequest: { Key: key } };
340
- })
341
- }
335
+ }),
336
+ },
342
337
  })
343
338
  .promise();
344
339
  }
345
340
  }
346
341
 
347
342
  function getKeyName(keySchema, type) {
348
- const key = keySchema.find(k => k.KeyType === type);
343
+ const key = keySchema.find((k) => k.KeyType === type);
349
344
 
350
345
  return key && key.AttributeName;
351
346
  }
352
347
 
353
348
  function getIndexKeyName(globalSecondaryIndexes, indexName, type) {
354
- const idx = globalSecondaryIndexes.find(i => i.IndexName === indexName);
349
+ const idx = globalSecondaryIndexes.find((i) => i.IndexName === indexName);
355
350
 
356
351
  return idx && getKeyName(idx.KeySchema, type);
357
352
  }
358
353
 
359
- async function deleteRecordsByHashKey(
360
- tableName,
361
- indexName,
362
- hashKeyValue,
363
- verbose = false
364
- ) {
354
+ async function deleteRecordsByHashKey(tableName, indexName, hashKeyValue, verbose = false) {
365
355
  const docClient = getDocClient();
366
356
 
367
- const meta = await getDynamoDB()
368
- .describeTable({ TableName: tableName })
369
- .promise();
357
+ const meta = await getDynamoDB().describeTable({ TableName: tableName }).promise();
370
358
 
371
359
  const hashKeyName = indexName
372
360
  ? getIndexKeyName(meta.Table.GlobalSecondaryIndexes, indexName, "HASH")
@@ -385,7 +373,7 @@ async function deleteRecordsByHashKey(
385
373
  TableName: tableName,
386
374
  KeyConditionExpression: "#hashKeyName = :hashKeyValue",
387
375
  ExpressionAttributeNames: { "#hashKeyName": hashKeyName },
388
- ExpressionAttributeValues: { ":hashKeyValue": hashKeyValue }
376
+ ExpressionAttributeValues: { ":hashKeyValue": hashKeyValue },
389
377
  };
390
378
 
391
379
  if (indexName) {
@@ -396,14 +384,14 @@ async function deleteRecordsByHashKey(
396
384
 
397
385
  await batchDeleteRecords(
398
386
  tableName,
399
- data.Items.map(item =>
387
+ data.Items.map((item) =>
400
388
  mainRangeKeyName
401
389
  ? {
402
390
  [mainHashKeyName]: item[mainHashKeyName],
403
- [mainRangeKeyName]: item[mainRangeKeyName]
391
+ [mainRangeKeyName]: item[mainRangeKeyName],
404
392
  }
405
393
  : {
406
- [mainHashKeyName]: item[mainHashKeyName]
394
+ [mainHashKeyName]: item[mainHashKeyName],
407
395
  }
408
396
  )
409
397
  );
@@ -414,20 +402,20 @@ async function deleteRecordsByHashKey(
414
402
  data = await docClient
415
403
  .query({
416
404
  ...params,
417
- ExclusiveStartKey: data.LastEvaluatedKey
405
+ ExclusiveStartKey: data.LastEvaluatedKey,
418
406
  })
419
407
  .promise();
420
408
 
421
409
  await batchDeleteRecords(
422
410
  tableName,
423
- data.Items.map(item =>
411
+ data.Items.map((item) =>
424
412
  mainRangeKeyName
425
413
  ? {
426
414
  [mainHashKeyName]: item[mainHashKeyName],
427
- [mainRangeKeyName]: item[mainRangeKeyName]
415
+ [mainRangeKeyName]: item[mainRangeKeyName],
428
416
  }
429
417
  : {
430
- [mainHashKeyName]: item[mainHashKeyName]
418
+ [mainHashKeyName]: item[mainHashKeyName],
431
419
  }
432
420
  )
433
421
  );
@@ -452,5 +440,5 @@ module.exports = {
452
440
  updateRecordByKey,
453
441
  batchDeleteRecords,
454
442
  deleteRecordsByHashKey,
455
- createTableIfNotExist
443
+ createTableIfNotExist,
456
444
  };
package/examples/indexer CHANGED
@@ -36,7 +36,12 @@ async function check({ protocol, state, verbose }) {
36
36
 
37
37
  const app = indexer({
38
38
  name: "LibSkynetExampleIndexer",
39
- selector: { protocol: { type: "string", description: "which chain to index" } },
39
+ selector: {
40
+ protocol: {
41
+ type: "string",
42
+ description: "which chain to index"
43
+ }
44
+ },
40
45
 
41
46
  build: {
42
47
  func: build,
package/log.js ADDED
@@ -0,0 +1,25 @@
1
+ function getLine(params) {
2
+ let line = `${new Date().toISOString()}\t`;
3
+
4
+ // Convert to string and filter out newline to tabs (AWS Athena)
5
+ for (let i = 0, l = params.length; i < l; i++) {
6
+ // Certain objects don't get converted
7
+ // Note using JSON.stringfy may be too slow for large objects
8
+ line += `${params[i]} `.replace(/\n/gm, "\t");
9
+ }
10
+
11
+ return line.trim();
12
+ }
13
+
14
+ const inline = {
15
+ log: function (...args) {
16
+ console.log(getLine(args));
17
+ },
18
+ error: function (...args) {
19
+ console.error(getLine(args));
20
+ },
21
+ };
22
+
23
+ module.exports = {
24
+ inline,
25
+ };
package/monitor.js CHANGED
@@ -5,6 +5,7 @@ const { getBinaryName } = require("./cli");
5
5
  const { exponentialRetry } = require("./availability");
6
6
  const { getIndexerLatestId, getIndexerValidatedId, getIndexerState } = require("./indexer");
7
7
  const { postMessage } = require("./slack");
8
+ const { postGenieMessage } = require("./opsgenie");
8
9
  const { getJobName, getNomadAddr } = require("./deploy");
9
10
 
10
11
  const ERROR_LEVEL = {
@@ -235,6 +236,12 @@ ${
235
236
  },
236
237
  verbose
237
238
  );
239
+
240
+ // Also alert on opsgenie (func prevents duplicate alerts)
241
+ await postGenieMessage(
242
+ `${jobName} Monitor Errors: ${result.map((m) => m.message || m).join("\n")}`,
243
+ m.message
244
+ );
238
245
  }
239
246
 
240
247
  throw new Error(`failed due to critical errors`);
package/opsgenie.js ADDED
@@ -0,0 +1,60 @@
1
+ const fetch = require("node-fetch");
2
+ const hash = require("object-hash");
3
+
4
+ function getGenieKey() {
5
+ return process.env.OPSGENIE_API_KEY;
6
+ }
7
+
8
+ function getGenieEndPoint() {
9
+ const key = getGenieKey();
10
+
11
+ if (!key) {
12
+ throw new Error("Cannot communicate with opsgenie due to missing API key: process.env.OPSGENIE_API_KEY");
13
+ }
14
+
15
+ return process.env.OPSGENIE_END_POINT;
16
+ }
17
+
18
+ async function postGenieMessage(msg, desc, verbose) {
19
+ try {
20
+ const genieKey = getGenieKey();
21
+ const genieEndPoint = getGenieEndPoint();
22
+
23
+ const body = {
24
+ message: msg,
25
+ description: desc
26
+ };
27
+
28
+ // Prevents duplicate alerts (See Opsgenie doc about alias)
29
+ bodyHash = hash(body);
30
+ body.alias = bodyHash;
31
+
32
+ if (verbose) {
33
+ console.log(`Making API call to Opsgenie ${msg} (${desc}):`, JSON.stringify(body, null, 2));
34
+ }
35
+
36
+ // Makes the call using fetch and ENV variables
37
+ const response = await fetch(genieEndPoint, {
38
+ method: "POST",
39
+ headers: {
40
+ "Content-Type": "application/json",
41
+ "Authorization": "GenieKey " + genieKey
42
+ },
43
+ body: JSON.stringify(body)
44
+ });
45
+
46
+ const result = await response.json();
47
+ if (verbose) {
48
+ console.log(`Result of API call to Opsgenie... ${result}`);
49
+ }
50
+
51
+ return result
52
+
53
+ } catch (error) {
54
+ console.error("Failed to make opsgenie API call", error);
55
+
56
+ throw error;
57
+ }
58
+ }
59
+
60
+ module.exports = { postGenieMessage };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@certik/skynet",
3
- "version": "0.8.9",
3
+ "version": "0.8.13",
4
4
  "description": "Skynet Shared JS library",
5
5
  "main": "index.js",
6
6
  "author": "CertiK Engineering",
@@ -19,6 +19,7 @@
19
19
  "kafkajs": "^1.15.0",
20
20
  "meow": "^7.0.1",
21
21
  "node-fetch": "^2.6.1",
22
+ "object-hash": "^2.2.0",
22
23
  "snowflake-sdk": "^1.6.3",
23
24
  "web3": "^1.3.5",
24
25
  "which": "^2.0.2"
package/slack.js CHANGED
@@ -19,13 +19,26 @@ function getClient() {
19
19
  async function findConversation(client, name) {
20
20
  const { conversations } = client;
21
21
 
22
- // Call the conversations.list method using the built-in WebClient
22
+ let channels = [];
23
+
23
24
  let result = await conversations.list({
24
- types: "public_channel,private_channel,mpim,im",
25
+ types: "public_channel,private_channel",
25
26
  limit: 1000,
26
27
  });
27
28
 
28
- for (const channel of result.channels) {
29
+ channels = channels.concat(result.channels);
30
+
31
+ while (result.response_metadata.next_cursor) {
32
+ result = await conversations.list({
33
+ types: "public_channel,private_channel",
34
+ cursor: result.response_metadata.next_cursor,
35
+ limit: 1000,
36
+ });
37
+
38
+ channels = channels.concat(result.channels);
39
+ }
40
+
41
+ for (const channel of channels) {
29
42
  if (channel.name === name) {
30
43
  const conversationId = channel.id;
31
44
 
@@ -42,6 +55,12 @@ async function postMessage(channel, message, verbose) {
42
55
 
43
56
  const conversationId = await findConversation(client, channel);
44
57
 
58
+ if (!conversationId) {
59
+ throw new Error(
60
+ `cannot find slack public/private channel: ${channel}, you may have to invite the @CertiK Skynet bot to the channel`
61
+ );
62
+ }
63
+
45
64
  let post = {};
46
65
 
47
66
  if (typeof message === "string") {
@@ -59,9 +78,10 @@ async function postMessage(channel, message, verbose) {
59
78
  ...post,
60
79
  });
61
80
  } catch (error) {
62
- // no blocking
63
81
  console.error("failed to post slack message", error);
82
+
83
+ throw error;
64
84
  }
65
85
  }
66
86
 
67
- module.exports = { postMessage };
87
+ module.exports = { getClient, findConversation, postMessage };