@certik/skynet 0.22.1 → 0.22.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/build.ts +23 -0
- package/dist/abi.d.ts +1 -2
- package/dist/abi.js +569 -563
- package/dist/address.d.ts +0 -1
- package/dist/address.js +22 -21
- package/dist/api.d.ts +0 -1
- package/dist/api.js +235 -120
- package/dist/app.d.ts +1 -2
- package/dist/app.js +2030 -276
- package/dist/availability.d.ts +0 -1
- package/dist/availability.js +126 -56
- package/dist/cli.d.ts +0 -1
- package/dist/cli.js +28 -24
- package/dist/const.d.ts +0 -1
- package/dist/const.js +153 -132
- package/dist/databricks.d.ts +0 -1
- package/dist/databricks.js +198 -58
- package/dist/date.d.ts +0 -1
- package/dist/date.js +48 -21
- package/dist/deploy.d.ts +0 -1
- package/dist/deploy.js +427 -292
- package/dist/dynamodb.d.ts +3 -4
- package/dist/dynamodb.js +432 -281
- package/dist/env.d.ts +2 -3
- package/dist/env.js +16 -9
- package/dist/graphql.d.ts +0 -1
- package/dist/graphql.js +26 -23
- package/dist/indexer.d.ts +0 -1
- package/dist/indexer.js +1050 -441
- package/dist/log.d.ts +0 -1
- package/dist/log.js +53 -52
- package/dist/object-hash.d.ts +0 -1
- package/dist/object-hash.js +49 -59
- package/dist/opsgenie.d.ts +97 -19
- package/dist/opsgenie.js +35 -30
- package/dist/por.d.ts +0 -1
- package/dist/por.js +113 -123
- package/dist/s3.d.ts +7 -8
- package/dist/s3.js +103 -91
- package/dist/search.d.ts +0 -1
- package/dist/search.js +100 -25
- package/dist/selector.d.ts +0 -1
- package/dist/selector.js +34 -38
- package/dist/slack.d.ts +0 -1
- package/dist/slack.js +27 -21
- package/dist/util.d.ts +0 -1
- package/dist/util.js +21 -20
- package/examples/api.ts +1 -1
- package/examples/indexer.ts +1 -1
- package/examples/mode-indexer.ts +1 -1
- package/package.json +4 -3
- package/{graphql.ts → src/graphql.ts} +1 -1
- package/src/opsgenie.ts +176 -0
- package/tsconfig.build.json +2 -5
- package/tsconfig.json +11 -20
- package/dist/abi.d.ts.map +0 -1
- package/dist/address.d.ts.map +0 -1
- package/dist/api.d.ts.map +0 -1
- package/dist/app.d.ts.map +0 -1
- package/dist/availability.d.ts.map +0 -1
- package/dist/cli.d.ts.map +0 -1
- package/dist/const.d.ts.map +0 -1
- package/dist/databricks.d.ts.map +0 -1
- package/dist/date.d.ts.map +0 -1
- package/dist/deploy.d.ts.map +0 -1
- package/dist/dynamodb.d.ts.map +0 -1
- package/dist/env.d.ts.map +0 -1
- package/dist/graphql.d.ts.map +0 -1
- package/dist/indexer.d.ts.map +0 -1
- package/dist/log.d.ts.map +0 -1
- package/dist/object-hash.d.ts.map +0 -1
- package/dist/opsgenie.d.ts.map +0 -1
- package/dist/por.d.ts.map +0 -1
- package/dist/s3.d.ts.map +0 -1
- package/dist/search.d.ts.map +0 -1
- package/dist/selector.d.ts.map +0 -1
- package/dist/slack.d.ts.map +0 -1
- package/dist/util.d.ts.map +0 -1
- package/opsgenie.ts +0 -69
- /package/{abi.ts → src/abi.ts} +0 -0
- /package/{address.ts → src/address.ts} +0 -0
- /package/{api.ts → src/api.ts} +0 -0
- /package/{app.ts → src/app.ts} +0 -0
- /package/{availability.ts → src/availability.ts} +0 -0
- /package/{cli.ts → src/cli.ts} +0 -0
- /package/{const.ts → src/const.ts} +0 -0
- /package/{databricks.ts → src/databricks.ts} +0 -0
- /package/{date.ts → src/date.ts} +0 -0
- /package/{deploy.ts → src/deploy.ts} +0 -0
- /package/{dynamodb.ts → src/dynamodb.ts} +0 -0
- /package/{env.ts → src/env.ts} +0 -0
- /package/{indexer.ts → src/indexer.ts} +0 -0
- /package/{log.ts → src/log.ts} +0 -0
- /package/{object-hash.ts → src/object-hash.ts} +0 -0
- /package/{por.ts → src/por.ts} +0 -0
- /package/{s3.ts → src/s3.ts} +0 -0
- /package/{search.ts → src/search.ts} +0 -0
- /package/{selector.ts → src/selector.ts} +0 -0
- /package/{slack.ts → src/slack.ts} +0 -0
- /package/{util.ts → src/util.ts} +0 -0
package/dist/dynamodb.js
CHANGED
|
@@ -1,328 +1,479 @@
|
|
|
1
|
-
|
|
1
|
+
// src/util.ts
|
|
2
|
+
function range(startAt, endAt, step) {
|
|
3
|
+
const arr = [];
|
|
4
|
+
for (let i = startAt;i <= endAt; i += step) {
|
|
5
|
+
arr.push([i, Math.min(endAt, i + step - 1)]);
|
|
6
|
+
}
|
|
7
|
+
return arr;
|
|
8
|
+
}
|
|
9
|
+
function arrayGroup(array, groupSize) {
|
|
10
|
+
const groups = [];
|
|
11
|
+
for (let i = 0;i < array.length; i += groupSize) {
|
|
12
|
+
groups.push(array.slice(i, i + groupSize));
|
|
13
|
+
}
|
|
14
|
+
return groups;
|
|
15
|
+
}
|
|
16
|
+
function fillRange(start, end) {
|
|
17
|
+
const result = [];
|
|
18
|
+
for (let i = start;i <= end; i++) {
|
|
19
|
+
result.push(i);
|
|
20
|
+
}
|
|
21
|
+
return result;
|
|
22
|
+
}
|
|
23
|
+
// src/object-hash.ts
|
|
24
|
+
import xh from "@node-rs/xxhash";
|
|
25
|
+
function getHash(obj) {
|
|
26
|
+
const xxh3 = xh.xxh3.Xxh3.withSeed();
|
|
27
|
+
hash(obj, xxh3);
|
|
28
|
+
return xxh3.digest().toString(16);
|
|
29
|
+
}
|
|
30
|
+
function hash(obj, xxh3) {
|
|
31
|
+
if (obj === null) {
|
|
32
|
+
xxh3.update("null");
|
|
33
|
+
} else if (obj === undefined) {
|
|
34
|
+
xxh3.update("undefined");
|
|
35
|
+
} else if (typeof obj === "string") {
|
|
36
|
+
xxh3.update(obj);
|
|
37
|
+
} else if (typeof obj === "number") {
|
|
38
|
+
xxh3.update(obj.toString());
|
|
39
|
+
} else if (typeof obj === "boolean") {
|
|
40
|
+
xxh3.update(obj.toString());
|
|
41
|
+
} else if (typeof obj === "bigint") {
|
|
42
|
+
xxh3.update(obj.toString());
|
|
43
|
+
} else if (obj instanceof Date) {
|
|
44
|
+
xxh3.update(obj.toISOString());
|
|
45
|
+
} else if (Array.isArray(obj)) {
|
|
46
|
+
arrayHash(obj, xxh3);
|
|
47
|
+
} else if (obj instanceof Set) {
|
|
48
|
+
setHash(obj, xxh3);
|
|
49
|
+
} else if (obj instanceof Map) {
|
|
50
|
+
mapHash(obj, xxh3);
|
|
51
|
+
} else if (typeof obj === "object") {
|
|
52
|
+
objectHash(obj, xxh3);
|
|
53
|
+
} else {
|
|
54
|
+
throw new Error(`Unsupported type: ${obj}`);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
function arrayHash(array, xxh3) {
|
|
58
|
+
xxh3.update("[");
|
|
59
|
+
for (const obj of array) {
|
|
60
|
+
hash(obj, xxh3);
|
|
61
|
+
}
|
|
62
|
+
xxh3.update("]");
|
|
63
|
+
}
|
|
64
|
+
function setHash(_set, _xxh3) {
|
|
65
|
+
throw new Error("Set hashing not implemented");
|
|
66
|
+
}
|
|
67
|
+
function mapHash(map, xxh3) {
|
|
68
|
+
const array = Array.from(map.entries()).sort(([aKey], [bKey]) => aKey.localeCompare(bKey));
|
|
69
|
+
for (const [key, value] of array) {
|
|
70
|
+
hash(key, xxh3);
|
|
71
|
+
hash(value, xxh3);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
function objectHash(obj, xxh3) {
|
|
75
|
+
const array = Object.entries(obj).sort(([aKey], [bKey]) => aKey.localeCompare(bKey));
|
|
76
|
+
for (const [key, value] of array) {
|
|
77
|
+
hash(key, xxh3);
|
|
78
|
+
hash(value, xxh3);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// src/availability.ts
|
|
83
|
+
import pThrottle from "p-throttle";
|
|
84
|
+
import pMemoize from "p-memoize";
|
|
85
|
+
import QuickLRU from "quick-lru";
|
|
86
|
+
async function wait(time) {
|
|
87
|
+
return new Promise((resolve) => {
|
|
88
|
+
setTimeout(resolve, time);
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
async function exponentialRetry(func, {
|
|
92
|
+
maxRetry,
|
|
93
|
+
initialDuration,
|
|
94
|
+
growFactor,
|
|
95
|
+
test,
|
|
96
|
+
verbose
|
|
97
|
+
}) {
|
|
98
|
+
let retries = maxRetry;
|
|
99
|
+
let duration = initialDuration || 5000;
|
|
100
|
+
const growFactorFinal = growFactor || 2;
|
|
101
|
+
let result = await func();
|
|
102
|
+
while (!test(result) && retries > 0) {
|
|
103
|
+
if (verbose) {
|
|
104
|
+
console.log("failed attempt result", result);
|
|
105
|
+
console.log(`sleep for ${duration}ms after failed attempt, remaining ${retries} attempts`);
|
|
106
|
+
}
|
|
107
|
+
retries = retries - 1;
|
|
108
|
+
await wait(duration);
|
|
109
|
+
result = await func();
|
|
110
|
+
duration = duration * growFactorFinal;
|
|
111
|
+
}
|
|
112
|
+
if (verbose) {
|
|
113
|
+
console.log(`function to retry ends with status ${test(result)}, number of retries done: ${maxRetry - retries}}`);
|
|
114
|
+
}
|
|
115
|
+
return result;
|
|
116
|
+
}
|
|
117
|
+
function withRetry(func, options) {
|
|
118
|
+
let retries = options?.maxRetry || 3;
|
|
119
|
+
let duration = options?.initialDuration || 500;
|
|
120
|
+
const growFactorFinal = options?.growFactor || 2;
|
|
121
|
+
return async (...args) => {
|
|
122
|
+
do {
|
|
123
|
+
try {
|
|
124
|
+
return await func(...args);
|
|
125
|
+
} catch (error) {
|
|
126
|
+
retries = retries - 1;
|
|
127
|
+
if (retries <= 0) {
|
|
128
|
+
throw error;
|
|
129
|
+
}
|
|
130
|
+
await wait(duration);
|
|
131
|
+
duration = duration * growFactorFinal;
|
|
132
|
+
}
|
|
133
|
+
} while (retries > 0);
|
|
134
|
+
throw new Error("unreachable");
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
function memoize(func, options) {
|
|
138
|
+
if (!options) {
|
|
139
|
+
options = {};
|
|
140
|
+
}
|
|
141
|
+
if (!options.cache) {
|
|
142
|
+
options.cache = new QuickLRU({ maxSize: options.lruMaxSize || 1e4 });
|
|
143
|
+
}
|
|
144
|
+
if (!options.cacheKey) {
|
|
145
|
+
options.cacheKey = (args) => getHash(args);
|
|
146
|
+
}
|
|
147
|
+
return pMemoize(func, options);
|
|
148
|
+
}
|
|
149
|
+
// src/dynamodb.ts
|
|
150
|
+
import {
|
|
151
|
+
DynamoDBDocumentClient,
|
|
152
|
+
ScanCommand,
|
|
153
|
+
BatchWriteCommand,
|
|
154
|
+
GetCommand,
|
|
155
|
+
PutCommand,
|
|
156
|
+
QueryCommand,
|
|
157
|
+
UpdateCommand
|
|
158
|
+
} from "@aws-sdk/lib-dynamodb";
|
|
2
159
|
import { DynamoDBClient, DescribeTableCommand } from "@aws-sdk/client-dynamodb";
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
let _dynamoDB;
|
|
6
|
-
let _docClient;
|
|
160
|
+
var _dynamoDB;
|
|
161
|
+
var _docClient;
|
|
7
162
|
function getDynamoDB(forceNew = false) {
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
163
|
+
if (!_dynamoDB || forceNew) {
|
|
164
|
+
_dynamoDB = new DynamoDBClient;
|
|
165
|
+
}
|
|
166
|
+
return _dynamoDB;
|
|
12
167
|
}
|
|
13
168
|
function getDocClient(forceNew = false) {
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
unmarshallOptions,
|
|
30
|
-
});
|
|
31
|
-
}
|
|
32
|
-
return _docClient;
|
|
169
|
+
const marshallOptions = {
|
|
170
|
+
convertEmptyValues: true,
|
|
171
|
+
removeUndefinedValues: true,
|
|
172
|
+
convertClassInstanceToMap: true
|
|
173
|
+
};
|
|
174
|
+
const unmarshallOptions = {
|
|
175
|
+
wrapNumbers: false
|
|
176
|
+
};
|
|
177
|
+
if (!_docClient || forceNew) {
|
|
178
|
+
_docClient = DynamoDBDocumentClient.from(getDynamoDB(), {
|
|
179
|
+
marshallOptions,
|
|
180
|
+
unmarshallOptions
|
|
181
|
+
});
|
|
182
|
+
}
|
|
183
|
+
return _docClient;
|
|
33
184
|
}
|
|
34
185
|
async function scanWholeTable(options) {
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
if (data.Items) {
|
|
42
|
-
items = items.concat(data.Items);
|
|
43
|
-
}
|
|
44
|
-
count += data.Count || 0;
|
|
45
|
-
scannedCount += data.ScannedCount || 0;
|
|
46
|
-
data = await dynamodb.send(new ScanCommand({ ...options, ExclusiveStartKey: data.LastEvaluatedKey }));
|
|
47
|
-
}
|
|
186
|
+
const dynamodb = getDocClient();
|
|
187
|
+
let items = [];
|
|
188
|
+
let count = 0;
|
|
189
|
+
let scannedCount = 0;
|
|
190
|
+
let data = await dynamodb.send(new ScanCommand(options));
|
|
191
|
+
while (data.LastEvaluatedKey) {
|
|
48
192
|
if (data.Items) {
|
|
49
|
-
|
|
193
|
+
items = items.concat(data.Items);
|
|
50
194
|
}
|
|
51
195
|
count += data.Count || 0;
|
|
52
196
|
scannedCount += data.ScannedCount || 0;
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
197
|
+
data = await dynamodb.send(new ScanCommand({ ...options, ExclusiveStartKey: data.LastEvaluatedKey }));
|
|
198
|
+
}
|
|
199
|
+
if (data.Items) {
|
|
200
|
+
items = items.concat(data.Items);
|
|
201
|
+
}
|
|
202
|
+
count += data.Count || 0;
|
|
203
|
+
scannedCount += data.ScannedCount || 0;
|
|
204
|
+
return {
|
|
205
|
+
Items: items,
|
|
206
|
+
Count: count,
|
|
207
|
+
ScannedCount: scannedCount
|
|
208
|
+
};
|
|
58
209
|
}
|
|
59
210
|
async function batchCreateRecords(tableName, records, maxWritingCapacity, verbose = false) {
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
factor = factor * 2;
|
|
73
|
-
}
|
|
74
|
-
if (factor >= 32) {
|
|
75
|
-
console.log(`WARNING: no progress for a long time for batchCreateRecords, please check`);
|
|
76
|
-
}
|
|
77
|
-
const slices = arrayGroup(remainingItems.slice(0, maxWritingCapacity), 25);
|
|
78
|
-
const results = await Promise.allSettled(slices.map((rs) => docClient.send(new BatchWriteCommand({
|
|
79
|
-
RequestItems: {
|
|
80
|
-
[tableName]: rs.map((record) => ({ PutRequest: { Item: record } })),
|
|
81
|
-
},
|
|
82
|
-
}))));
|
|
83
|
-
const isFulfilled = (p) => p.status === "fulfilled";
|
|
84
|
-
const isRejected = (p) => p.status === "rejected";
|
|
85
|
-
prevRemainingCount = remainingItems.length;
|
|
86
|
-
remainingItems = remainingItems.slice(maxWritingCapacity);
|
|
87
|
-
results.forEach((rs, idx) => {
|
|
88
|
-
if (isRejected(rs)) {
|
|
89
|
-
// whole slice fails, redo whole slice
|
|
90
|
-
remainingItems = remainingItems.concat(slices[idx]);
|
|
91
|
-
rejection = rs;
|
|
92
|
-
}
|
|
93
|
-
else if (isFulfilled(rs) && rs.value.UnprocessedItems && Object.keys(rs.value.UnprocessedItems).length > 0) {
|
|
94
|
-
// partially fails, redo unprocessedItems
|
|
95
|
-
const unprocessedItems = rs.value.UnprocessedItems[tableName].map((it) => it.PutRequest?.Item ?? []).flat();
|
|
96
|
-
remainingItems = remainingItems.concat(unprocessedItems);
|
|
97
|
-
}
|
|
98
|
-
});
|
|
99
|
-
if (verbose) {
|
|
100
|
-
console.log(`processed=${prevRemainingCount - remainingItems.length}, remaining=${remainingItems.length}`);
|
|
101
|
-
}
|
|
211
|
+
if (verbose) {
|
|
212
|
+
console.log(`creating ${records.length} items in ${tableName}`);
|
|
213
|
+
}
|
|
214
|
+
const docClient = getDocClient();
|
|
215
|
+
let remainingItems = records;
|
|
216
|
+
let prevRemainingCount = remainingItems.length + 1;
|
|
217
|
+
let factor = 1;
|
|
218
|
+
let rejection = undefined;
|
|
219
|
+
while (remainingItems.length > 0 && factor <= 128 && !rejection) {
|
|
220
|
+
if (prevRemainingCount === remainingItems.length) {
|
|
221
|
+
await wait(5000 * factor);
|
|
222
|
+
factor = factor * 2;
|
|
102
223
|
}
|
|
103
|
-
if (
|
|
104
|
-
|
|
105
|
-
throw new Error(`batchCreateRecords rejected, failed items=${remainingItems.length}`);
|
|
224
|
+
if (factor >= 32) {
|
|
225
|
+
console.log(`WARNING: no progress for a long time for batchCreateRecords, please check`);
|
|
106
226
|
}
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
227
|
+
const slices = arrayGroup(remainingItems.slice(0, maxWritingCapacity), 25);
|
|
228
|
+
const results = await Promise.allSettled(slices.map((rs) => docClient.send(new BatchWriteCommand({
|
|
229
|
+
RequestItems: {
|
|
230
|
+
[tableName]: rs.map((record) => ({ PutRequest: { Item: record } }))
|
|
231
|
+
}
|
|
232
|
+
}))));
|
|
233
|
+
const isFulfilled = (p) => p.status === "fulfilled";
|
|
234
|
+
const isRejected = (p) => p.status === "rejected";
|
|
235
|
+
prevRemainingCount = remainingItems.length;
|
|
236
|
+
remainingItems = remainingItems.slice(maxWritingCapacity);
|
|
237
|
+
results.forEach((rs, idx) => {
|
|
238
|
+
if (isRejected(rs)) {
|
|
239
|
+
remainingItems = remainingItems.concat(slices[idx]);
|
|
240
|
+
rejection = rs;
|
|
241
|
+
} else if (isFulfilled(rs) && rs.value.UnprocessedItems && Object.keys(rs.value.UnprocessedItems).length > 0) {
|
|
242
|
+
const unprocessedItems = rs.value.UnprocessedItems[tableName].map((it) => it.PutRequest?.Item ?? []).flat();
|
|
243
|
+
remainingItems = remainingItems.concat(unprocessedItems);
|
|
244
|
+
}
|
|
245
|
+
});
|
|
246
|
+
if (verbose) {
|
|
247
|
+
console.log(`processed=${prevRemainingCount - remainingItems.length}, remaining=${remainingItems.length}`);
|
|
110
248
|
}
|
|
249
|
+
}
|
|
250
|
+
if (rejection) {
|
|
251
|
+
console.log("batchCreateRecords rejected", rejection);
|
|
252
|
+
throw new Error(`batchCreateRecords rejected, failed items=${remainingItems.length}`);
|
|
253
|
+
}
|
|
254
|
+
if (remainingItems.length > 0) {
|
|
255
|
+
console.log(`failed batchCreateRecords, failed items=${remainingItems.length}`);
|
|
256
|
+
throw new Error(`batchCreateRecords retry failed, failed items=${remainingItems.length}`);
|
|
257
|
+
}
|
|
111
258
|
}
|
|
112
259
|
async function createRecord(tableName, fields, verbose = false) {
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
260
|
+
if (verbose) {
|
|
261
|
+
console.log("creating", tableName, fields);
|
|
262
|
+
}
|
|
263
|
+
const docClient = getDocClient();
|
|
264
|
+
const params = {
|
|
265
|
+
TableName: tableName,
|
|
266
|
+
Item: fields
|
|
267
|
+
};
|
|
268
|
+
return docClient.send(new PutCommand(params));
|
|
122
269
|
}
|
|
123
270
|
async function readRecord(tableName, key, verbose = false) {
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
271
|
+
if (verbose) {
|
|
272
|
+
console.log("reading", tableName, key);
|
|
273
|
+
}
|
|
274
|
+
const docClient = getDocClient();
|
|
275
|
+
const record = await docClient.send(new GetCommand({
|
|
276
|
+
TableName: tableName,
|
|
277
|
+
Key: key
|
|
278
|
+
}));
|
|
279
|
+
return record.Item;
|
|
133
280
|
}
|
|
134
281
|
async function getRecordsByKey(tableName, keys, indexName) {
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
}
|
|
159
|
-
return items;
|
|
282
|
+
const docClient = getDocClient();
|
|
283
|
+
const keyNames = Object.keys(keys);
|
|
284
|
+
const conditionExpression = keyNames.map((key) => `#${key} = :${key}`).join(" and ");
|
|
285
|
+
const params = {
|
|
286
|
+
TableName: tableName,
|
|
287
|
+
KeyConditionExpression: conditionExpression,
|
|
288
|
+
ExpressionAttributeNames: generateExpressionNames(keyNames),
|
|
289
|
+
ExpressionAttributeValues: generateExpressionValues(keyNames, keys)
|
|
290
|
+
};
|
|
291
|
+
if (indexName) {
|
|
292
|
+
params.IndexName = indexName;
|
|
293
|
+
}
|
|
294
|
+
try {
|
|
295
|
+
let data = await docClient.send(new QueryCommand(params));
|
|
296
|
+
let items = data.Items ?? [];
|
|
297
|
+
while (data.LastEvaluatedKey) {
|
|
298
|
+
data = await docClient.send(new QueryCommand({
|
|
299
|
+
...params,
|
|
300
|
+
ExclusiveStartKey: data.LastEvaluatedKey
|
|
301
|
+
}));
|
|
302
|
+
if (data.Items) {
|
|
303
|
+
items = items.concat(data.Items);
|
|
304
|
+
}
|
|
160
305
|
}
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
throw err;
|
|
306
|
+
return items;
|
|
307
|
+
} catch (err) {
|
|
308
|
+
console.log(err);
|
|
309
|
+
if (err instanceof Error && "statusCode" in err && err.statusCode === 400) {
|
|
310
|
+
return null;
|
|
167
311
|
}
|
|
312
|
+
throw err;
|
|
313
|
+
}
|
|
168
314
|
}
|
|
169
|
-
// Dual purpose for compatibility. If indexName is provided, it will use query command to get the record; if not, use get command which is most efficient.
|
|
170
315
|
async function getRecordByKey(tableName, keys, indexName) {
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
return null;
|
|
178
|
-
}
|
|
179
|
-
}
|
|
180
|
-
else {
|
|
181
|
-
return readRecord(tableName, keys);
|
|
316
|
+
if (indexName) {
|
|
317
|
+
const records = await getRecordsByKey(tableName, keys, indexName);
|
|
318
|
+
if (records) {
|
|
319
|
+
return records[0];
|
|
320
|
+
} else {
|
|
321
|
+
return null;
|
|
182
322
|
}
|
|
323
|
+
} else {
|
|
324
|
+
return readRecord(tableName, keys);
|
|
325
|
+
}
|
|
183
326
|
}
|
|
184
327
|
function generateExpressionNames(keys) {
|
|
185
|
-
|
|
328
|
+
return keys.reduce((acc, key) => ({ ...acc, [`#${key}`]: key }), {});
|
|
186
329
|
}
|
|
187
330
|
function generateExpressionValues(keys, fields) {
|
|
188
|
-
|
|
331
|
+
return keys.reduce((acc, key) => ({ ...acc, [`:${key}`]: fields[key] }), {});
|
|
189
332
|
}
|
|
190
333
|
async function updateRecordByKey(tableName, idKey, fields, conditionExpressions = null, verbose = false) {
|
|
334
|
+
if (verbose) {
|
|
335
|
+
console.log("update", tableName, idKey, fields);
|
|
336
|
+
}
|
|
337
|
+
const docClient = getDocClient();
|
|
338
|
+
const idKeyNames = Object.keys(idKey);
|
|
339
|
+
const fieldsToDelete = Object.keys(fields).filter((f) => fields[f] === undefined);
|
|
340
|
+
const fieldsToUpdate = Object.keys(fields).filter((k) => !idKeyNames.includes(k) && !fieldsToDelete.includes(k));
|
|
341
|
+
let data;
|
|
342
|
+
if (fieldsToDelete.length > 0) {
|
|
191
343
|
if (verbose) {
|
|
192
|
-
|
|
344
|
+
console.log("delete fields", tableName, fieldsToDelete);
|
|
193
345
|
}
|
|
194
|
-
const
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
const deleteParams = {
|
|
204
|
-
TableName: tableName,
|
|
205
|
-
Key: idKey,
|
|
206
|
-
ExpressionAttributeNames: generateExpressionNames(fieldsToDelete),
|
|
207
|
-
UpdateExpression: `REMOVE ${fieldsToDelete.map((f) => `#${f}`).join(", ")}`,
|
|
208
|
-
ReturnValues: "ALL_NEW",
|
|
209
|
-
};
|
|
210
|
-
if (conditionExpressions) {
|
|
211
|
-
deleteParams.ConditionExpression = conditionExpressions;
|
|
212
|
-
}
|
|
213
|
-
data = await docClient.send(new UpdateCommand(deleteParams));
|
|
346
|
+
const deleteParams = {
|
|
347
|
+
TableName: tableName,
|
|
348
|
+
Key: idKey,
|
|
349
|
+
ExpressionAttributeNames: generateExpressionNames(fieldsToDelete),
|
|
350
|
+
UpdateExpression: `REMOVE ${fieldsToDelete.map((f) => `#${f}`).join(", ")}`,
|
|
351
|
+
ReturnValues: "ALL_NEW"
|
|
352
|
+
};
|
|
353
|
+
if (conditionExpressions) {
|
|
354
|
+
deleteParams.ConditionExpression = conditionExpressions;
|
|
214
355
|
}
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
356
|
+
data = await docClient.send(new UpdateCommand(deleteParams));
|
|
357
|
+
}
|
|
358
|
+
if (fieldsToUpdate.length > 0) {
|
|
359
|
+
if (verbose) {
|
|
360
|
+
console.log("update fields", tableName, fieldsToUpdate);
|
|
361
|
+
}
|
|
362
|
+
const updateExpressions = fieldsToUpdate.map((key) => `#${key} = :${key}`);
|
|
363
|
+
const params = {
|
|
364
|
+
TableName: tableName,
|
|
365
|
+
Key: idKey,
|
|
366
|
+
ExpressionAttributeNames: generateExpressionNames(fieldsToUpdate),
|
|
367
|
+
ExpressionAttributeValues: generateExpressionValues(fieldsToUpdate, fields),
|
|
368
|
+
UpdateExpression: `SET ${updateExpressions.join(", ")}`,
|
|
369
|
+
ReturnValues: "ALL_NEW"
|
|
370
|
+
};
|
|
371
|
+
if (conditionExpressions) {
|
|
372
|
+
params.ConditionExpression = conditionExpressions;
|
|
232
373
|
}
|
|
233
|
-
|
|
374
|
+
data = await docClient.send(new UpdateCommand(params));
|
|
375
|
+
}
|
|
376
|
+
return data?.Attributes;
|
|
234
377
|
}
|
|
235
378
|
async function batchDeleteRecords(tableName, keys) {
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
379
|
+
const docClient = getDocClient();
|
|
380
|
+
for (let start = 0;start < keys.length; start += 25) {
|
|
381
|
+
const slice = keys.slice(start, start + 25);
|
|
382
|
+
await docClient.send(new BatchWriteCommand({
|
|
383
|
+
RequestItems: {
|
|
384
|
+
[tableName]: slice.map((key) => {
|
|
385
|
+
return { DeleteRequest: { Key: key } };
|
|
386
|
+
})
|
|
387
|
+
}
|
|
388
|
+
}));
|
|
389
|
+
}
|
|
247
390
|
}
|
|
248
391
|
function getKeyName(keySchema, type) {
|
|
249
|
-
|
|
250
|
-
|
|
392
|
+
const key = keySchema.find((k) => k.KeyType === type);
|
|
393
|
+
return key?.AttributeName;
|
|
251
394
|
}
|
|
252
395
|
function getIndexKeyName(globalSecondaryIndexes, indexName, type) {
|
|
253
|
-
|
|
254
|
-
|
|
396
|
+
const idx = globalSecondaryIndexes.find((i) => i.IndexName === indexName);
|
|
397
|
+
return idx?.KeySchema && getKeyName(idx.KeySchema, type);
|
|
255
398
|
}
|
|
256
399
|
async function deleteRecordsByHashKey(tableName, indexName, hashKeyValue, verbose = false) {
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
400
|
+
const docClient = getDocClient();
|
|
401
|
+
const meta = await getDynamoDB().send(new DescribeTableCommand({ TableName: tableName }));
|
|
402
|
+
if (!meta.Table) {
|
|
403
|
+
throw new Error(`cannot find table ${tableName}`);
|
|
404
|
+
}
|
|
405
|
+
if (indexName && !meta.Table.GlobalSecondaryIndexes) {
|
|
406
|
+
throw new Error(`cannot find global secondary indexes for table ${tableName}`);
|
|
407
|
+
}
|
|
408
|
+
if (!meta.Table.KeySchema) {
|
|
409
|
+
throw new Error(`cannot find key schema for table ${tableName}`);
|
|
410
|
+
}
|
|
411
|
+
const hashKeyName = indexName ? getIndexKeyName(meta.Table.GlobalSecondaryIndexes, indexName, "HASH") : getKeyName(meta.Table.KeySchema, "HASH");
|
|
412
|
+
if (!hashKeyName) {
|
|
413
|
+
throw new Error(`cannot find hash key name for table ${tableName}`);
|
|
414
|
+
}
|
|
415
|
+
const mainHashKeyName = getKeyName(meta.Table.KeySchema, "HASH");
|
|
416
|
+
if (!mainHashKeyName) {
|
|
417
|
+
throw new Error(`cannot find main hash key name for table ${tableName}`);
|
|
418
|
+
}
|
|
419
|
+
const mainRangeKeyName = getKeyName(meta.Table.KeySchema, "RANGE");
|
|
420
|
+
if (!mainRangeKeyName) {
|
|
421
|
+
throw new Error(`cannot find main range key name for table ${tableName}`);
|
|
422
|
+
}
|
|
423
|
+
let totalDeleted = 0;
|
|
424
|
+
const params = {
|
|
425
|
+
TableName: tableName,
|
|
426
|
+
KeyConditionExpression: "#hashKeyName = :hashKeyValue",
|
|
427
|
+
ExpressionAttributeNames: { "#hashKeyName": hashKeyName },
|
|
428
|
+
ExpressionAttributeValues: { ":hashKeyValue": hashKeyValue }
|
|
429
|
+
};
|
|
430
|
+
if (indexName) {
|
|
431
|
+
params.IndexName = indexName;
|
|
432
|
+
}
|
|
433
|
+
let data = await docClient.send(new QueryCommand(params));
|
|
434
|
+
if (data.Items) {
|
|
435
|
+
await batchDeleteRecords(tableName, data.Items.map((item) => mainRangeKeyName ? {
|
|
436
|
+
[mainHashKeyName]: item[mainHashKeyName],
|
|
437
|
+
[mainRangeKeyName]: item[mainRangeKeyName]
|
|
438
|
+
} : {
|
|
439
|
+
[mainHashKeyName]: item[mainHashKeyName]
|
|
440
|
+
}));
|
|
441
|
+
totalDeleted += data.Items.length;
|
|
442
|
+
}
|
|
443
|
+
while (data.LastEvaluatedKey) {
|
|
444
|
+
data = await docClient.send(new QueryCommand({
|
|
445
|
+
...params,
|
|
446
|
+
ExclusiveStartKey: data.LastEvaluatedKey
|
|
447
|
+
}));
|
|
293
448
|
if (data.Items) {
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
}));
|
|
302
|
-
totalDeleted += data.Items.length;
|
|
303
|
-
}
|
|
304
|
-
while (data.LastEvaluatedKey) {
|
|
305
|
-
data = await docClient.send(new QueryCommand({
|
|
306
|
-
...params,
|
|
307
|
-
ExclusiveStartKey: data.LastEvaluatedKey,
|
|
308
|
-
}));
|
|
309
|
-
if (data.Items) {
|
|
310
|
-
await batchDeleteRecords(tableName, data.Items.map((item) => mainRangeKeyName
|
|
311
|
-
? {
|
|
312
|
-
[mainHashKeyName]: item[mainHashKeyName],
|
|
313
|
-
[mainRangeKeyName]: item[mainRangeKeyName],
|
|
314
|
-
}
|
|
315
|
-
: {
|
|
316
|
-
[mainHashKeyName]: item[mainHashKeyName],
|
|
317
|
-
}));
|
|
318
|
-
totalDeleted += data.Items.length;
|
|
319
|
-
}
|
|
320
|
-
}
|
|
321
|
-
if (verbose) {
|
|
322
|
-
console.log(`successfully delete ${totalDeleted} items`);
|
|
449
|
+
await batchDeleteRecords(tableName, data.Items.map((item) => mainRangeKeyName ? {
|
|
450
|
+
[mainHashKeyName]: item[mainHashKeyName],
|
|
451
|
+
[mainRangeKeyName]: item[mainRangeKeyName]
|
|
452
|
+
} : {
|
|
453
|
+
[mainHashKeyName]: item[mainHashKeyName]
|
|
454
|
+
}));
|
|
455
|
+
totalDeleted += data.Items.length;
|
|
323
456
|
}
|
|
324
|
-
|
|
457
|
+
}
|
|
458
|
+
if (verbose) {
|
|
459
|
+
console.log(`successfully delete ${totalDeleted} items`);
|
|
460
|
+
}
|
|
461
|
+
return totalDeleted;
|
|
325
462
|
}
|
|
326
|
-
export {
|
|
327
|
-
|
|
328
|
-
|
|
463
|
+
export {
|
|
464
|
+
updateRecordByKey,
|
|
465
|
+
scanWholeTable,
|
|
466
|
+
getRecordsByKey,
|
|
467
|
+
getRecordByKey,
|
|
468
|
+
getDocClient,
|
|
469
|
+
deleteRecordsByHashKey,
|
|
470
|
+
createRecord,
|
|
471
|
+
batchDeleteRecords,
|
|
472
|
+
batchCreateRecords,
|
|
473
|
+
UpdateCommand,
|
|
474
|
+
ScanCommand,
|
|
475
|
+
QueryCommand,
|
|
476
|
+
PutCommand,
|
|
477
|
+
GetCommand,
|
|
478
|
+
BatchWriteCommand
|
|
479
|
+
};
|