@ocap/resolver 1.19.4 → 1.19.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.js +15 -39
- package/lib/token-distribution.js +68 -40
- package/package.json +12 -12
package/lib/index.js
CHANGED
|
@@ -1046,11 +1046,11 @@ module.exports = class OCAPResolver {
|
|
|
1046
1046
|
if (limit && paging.total > limit) {
|
|
1047
1047
|
throw new CustomError('EXCEED_LIMIT', `Total exceeds limit ${limit}`);
|
|
1048
1048
|
}
|
|
1049
|
-
if (paging.total
|
|
1049
|
+
if (paging.total <= pageSize) {
|
|
1050
1050
|
return firstPage;
|
|
1051
1051
|
}
|
|
1052
1052
|
|
|
1053
|
-
const totalPage = Math.
|
|
1053
|
+
const totalPage = Math.ceil(paging.total / pageSize) - 1;
|
|
1054
1054
|
const cursors = new Array(totalPage).fill(true).map((_, i) => (i + 1) * pageSize);
|
|
1055
1055
|
const step = process.env.RESOLVER_BATCH_CONCURRENCY || 3;
|
|
1056
1056
|
let results = firstPage;
|
|
@@ -1282,23 +1282,15 @@ module.exports = class OCAPResolver {
|
|
|
1282
1282
|
* @param {number} [param.concurrency=3] Number of concurrent requests
|
|
1283
1283
|
* @param {number} [param.chunkSize=2000] Maximum number of items to return in each next() call
|
|
1284
1284
|
* @param {number} [param.pageSize=100] Number of items per page
|
|
1285
|
-
* @param {
|
|
1285
|
+
* @param {string} param.timeKey Key to access time in response
|
|
1286
1286
|
* @param {string} param.dataKey Key to access data in response
|
|
1287
1287
|
* @returns {Promise<Array>} Array of fetched items
|
|
1288
1288
|
*/
|
|
1289
|
-
listChunks(fn, { concurrency = 3, chunkSize =
|
|
1290
|
-
let totalPage;
|
|
1291
|
-
let curPage;
|
|
1289
|
+
listChunks(fn, { total, concurrency = 3, chunkSize = 2100, pageSize = 100, timeKey, dataKey }) {
|
|
1292
1290
|
let done = false;
|
|
1293
1291
|
let time;
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
const { paging, page, [dataKey]: list } = await fn({ size: pageSize, cursor: '0' });
|
|
1297
|
-
const total = paging?.total || page?.total;
|
|
1298
|
-
curPage = 1;
|
|
1299
|
-
totalPage = Math.ceil(total / pageSize);
|
|
1300
|
-
return list;
|
|
1301
|
-
};
|
|
1292
|
+
const totalPage = Math.ceil(total / pageSize);
|
|
1293
|
+
let curPage = 0;
|
|
1302
1294
|
|
|
1303
1295
|
const next = async () => {
|
|
1304
1296
|
if (done) {
|
|
@@ -1307,28 +1299,10 @@ module.exports = class OCAPResolver {
|
|
|
1307
1299
|
|
|
1308
1300
|
let results = [];
|
|
1309
1301
|
|
|
1310
|
-
// first page
|
|
1311
|
-
if (!totalPage) {
|
|
1312
|
-
const data = await fetchFirstPage();
|
|
1313
|
-
// only 1 page
|
|
1314
|
-
if (data.length < pageSize) {
|
|
1315
|
-
done = true;
|
|
1316
|
-
return data;
|
|
1317
|
-
}
|
|
1318
|
-
|
|
1319
|
-
time = getTime(data[data.length - 1]);
|
|
1320
|
-
results = results.concat(data);
|
|
1321
|
-
|
|
1322
|
-
// limit
|
|
1323
|
-
if (data.length >= chunkSize) {
|
|
1324
|
-
return results;
|
|
1325
|
-
}
|
|
1326
|
-
}
|
|
1327
|
-
|
|
1328
1302
|
// next pages
|
|
1329
1303
|
for (; curPage < totalPage; curPage += concurrency) {
|
|
1330
1304
|
const batchResults = await Promise.all(
|
|
1331
|
-
new Array(concurrency).fill(true).map(async (_, i) => {
|
|
1305
|
+
new Array(Math.min(concurrency, totalPage - curPage)).fill(true).map(async (_, i) => {
|
|
1332
1306
|
const { [dataKey]: list } = await fn({
|
|
1333
1307
|
size: pageSize,
|
|
1334
1308
|
cursor: i * pageSize,
|
|
@@ -1342,11 +1316,11 @@ module.exports = class OCAPResolver {
|
|
|
1342
1316
|
// finish
|
|
1343
1317
|
if (!flatResults.length) {
|
|
1344
1318
|
done = true;
|
|
1345
|
-
return
|
|
1319
|
+
return results;
|
|
1346
1320
|
}
|
|
1347
1321
|
|
|
1348
1322
|
results = results.concat(flatResults);
|
|
1349
|
-
time = results.length ?
|
|
1323
|
+
time = results.length ? results[results.length - 1][timeKey] : null;
|
|
1350
1324
|
|
|
1351
1325
|
// limit
|
|
1352
1326
|
if (results.length >= chunkSize) {
|
|
@@ -1364,7 +1338,7 @@ module.exports = class OCAPResolver {
|
|
|
1364
1338
|
};
|
|
1365
1339
|
}
|
|
1366
1340
|
|
|
1367
|
-
listTransactionsChunks(args = {}, { chunkSize =
|
|
1341
|
+
async listTransactionsChunks(args = {}, { chunkSize = 2100, pageSize = 100 } = {}) {
|
|
1368
1342
|
return this.listChunks(
|
|
1369
1343
|
({ size, time, cursor }) =>
|
|
1370
1344
|
this.listTransactions({
|
|
@@ -1379,14 +1353,15 @@ module.exports = class OCAPResolver {
|
|
|
1379
1353
|
}),
|
|
1380
1354
|
{
|
|
1381
1355
|
dataKey: 'transactions',
|
|
1382
|
-
|
|
1356
|
+
timeKey: 'time',
|
|
1357
|
+
total: await this.indexdb.tx.count(),
|
|
1383
1358
|
chunkSize,
|
|
1384
1359
|
pageSize,
|
|
1385
1360
|
}
|
|
1386
1361
|
);
|
|
1387
1362
|
}
|
|
1388
1363
|
|
|
1389
|
-
listStakeChunks(args = {}, { chunkSize =
|
|
1364
|
+
async listStakeChunks(args = {}, { chunkSize = 2100, pageSize = 100 } = {}) {
|
|
1390
1365
|
return this.listChunks(
|
|
1391
1366
|
({ size, time, cursor }) =>
|
|
1392
1367
|
this.listStakes({
|
|
@@ -1401,7 +1376,8 @@ module.exports = class OCAPResolver {
|
|
|
1401
1376
|
}),
|
|
1402
1377
|
{
|
|
1403
1378
|
dataKey: 'stakes',
|
|
1404
|
-
|
|
1379
|
+
timeKey: 'renaissanceTime',
|
|
1380
|
+
total: await this.indexdb.stake.count(),
|
|
1405
1381
|
chunkSize,
|
|
1406
1382
|
pageSize,
|
|
1407
1383
|
}
|
|
@@ -16,6 +16,7 @@ class TokenDistributionManager {
|
|
|
16
16
|
constructor(resolver) {
|
|
17
17
|
this.resolver = resolver;
|
|
18
18
|
this.indexdb = resolver.indexdb;
|
|
19
|
+
this.isProcessing = false;
|
|
19
20
|
}
|
|
20
21
|
|
|
21
22
|
formatDistribution(distribution) {
|
|
@@ -40,7 +41,7 @@ class TokenDistributionManager {
|
|
|
40
41
|
return data && createIndexedTokenDistribution(data);
|
|
41
42
|
}
|
|
42
43
|
|
|
43
|
-
async saveDistribution(distribution) {
|
|
44
|
+
async saveDistribution(distribution, isEnsureLatest = true) {
|
|
44
45
|
const data = createIndexedTokenDistribution(distribution);
|
|
45
46
|
const indexdbDistribution = await this.getDistribution(data.tokenAddress);
|
|
46
47
|
|
|
@@ -48,8 +49,10 @@ class TokenDistributionManager {
|
|
|
48
49
|
await this.indexdb.tokenDistribution.insert(data);
|
|
49
50
|
} else {
|
|
50
51
|
// ensure txTime is latest
|
|
51
|
-
|
|
52
|
-
|
|
52
|
+
if (isEnsureLatest) {
|
|
53
|
+
const latestTime = Math.max(new Date(indexdbDistribution.txTime).getTime(), new Date(data.txTime).getTime());
|
|
54
|
+
data.txTime = new Date(latestTime).toISOString();
|
|
55
|
+
}
|
|
53
56
|
await this.indexdb.tokenDistribution.update(data.tokenAddress, data);
|
|
54
57
|
}
|
|
55
58
|
|
|
@@ -67,6 +70,11 @@ class TokenDistributionManager {
|
|
|
67
70
|
async updateByToken(tokenAddress, force) {
|
|
68
71
|
const { logger, config } = this.resolver;
|
|
69
72
|
|
|
73
|
+
if (this.isProcessing) {
|
|
74
|
+
logger?.logger('Token distribution is already in progress', { tokenAddress, force });
|
|
75
|
+
return;
|
|
76
|
+
}
|
|
77
|
+
|
|
70
78
|
const distribution = force
|
|
71
79
|
? this.formatDistribution({ tokenAddress })
|
|
72
80
|
: this.formatDistribution((await this.getDistribution(tokenAddress)) || { tokenAddress });
|
|
@@ -76,64 +84,77 @@ class TokenDistributionManager {
|
|
|
76
84
|
distribution: createIndexedTokenDistribution(distribution),
|
|
77
85
|
});
|
|
78
86
|
|
|
79
|
-
|
|
80
|
-
this.handleModerator(distribution);
|
|
81
|
-
}
|
|
87
|
+
this.isProcessing = true;
|
|
82
88
|
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
});
|
|
88
|
-
let nextData = await next();
|
|
89
|
+
try {
|
|
90
|
+
if (force && isDefaultToken) {
|
|
91
|
+
this.handleModerator(distribution);
|
|
92
|
+
}
|
|
89
93
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
startTime: nextData[0].time,
|
|
95
|
-
startHash: nextData[0].hash,
|
|
96
|
-
endTime: nextData[nextData.length - 1].time,
|
|
94
|
+
const { next } = await this.resolver.listTransactionsChunks({
|
|
95
|
+
timeFilter: { startDateTime: distribution.txTime },
|
|
96
|
+
// Default token is used for gas payment and may appear in any transaction, so we cannot filter by tokenFilter
|
|
97
|
+
tokenFilter: isDefaultToken ? {} : { tokenFilter: { tokens: [tokenAddress] } },
|
|
97
98
|
});
|
|
99
|
+
let nextData = await next();
|
|
100
|
+
|
|
101
|
+
// Process transactions in chunks and update indexdb
|
|
102
|
+
while (nextData.length) {
|
|
103
|
+
logger?.info('Updating token distribution in chunks', {
|
|
104
|
+
chunkSize: nextData.length,
|
|
105
|
+
startTime: nextData[0].time,
|
|
106
|
+
startHash: nextData[0].hash,
|
|
107
|
+
endTime: nextData[nextData.length - 1].time,
|
|
108
|
+
});
|
|
109
|
+
|
|
110
|
+
const handlePromises = nextData.map((tx) => this.handleTx(tx, distribution));
|
|
111
|
+
await Promise.all(handlePromises);
|
|
112
|
+
|
|
113
|
+
// update indexdb
|
|
114
|
+
await this.saveDistribution(distribution, false);
|
|
115
|
+
nextData = await next();
|
|
116
|
+
}
|
|
98
117
|
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
await this.saveDistribution(distribution);
|
|
104
|
-
nextData = await next();
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
// We cannot distinguish between revokedStake and stake from tx receipts here,
|
|
108
|
-
// so we need to read all stake transactions and recalculate token distribution based on their revokeTokens and tokens
|
|
109
|
-
await this.splitStake(distribution, force);
|
|
110
|
-
await this.saveDistribution(distribution);
|
|
111
|
-
|
|
112
|
-
const result = createIndexedTokenDistribution(distribution);
|
|
118
|
+
// We cannot distinguish between revokedStake and stake from tx receipts here,
|
|
119
|
+
// so we need to read all stake transactions and recalculate token distribution based on their revokeTokens and tokens
|
|
120
|
+
await this.splitStake(distribution);
|
|
121
|
+
await this.saveDistribution(distribution, false);
|
|
113
122
|
|
|
114
|
-
|
|
123
|
+
const result = createIndexedTokenDistribution(distribution);
|
|
115
124
|
|
|
116
|
-
|
|
125
|
+
logger.info(`Token distribution update completed (${tokenAddress})`, { distribution: result });
|
|
126
|
+
return result;
|
|
127
|
+
} catch (e) {
|
|
128
|
+
logger?.error('Token distribution update failed', { error: e });
|
|
129
|
+
return null;
|
|
130
|
+
} finally {
|
|
131
|
+
this.isProcessing = false;
|
|
132
|
+
}
|
|
117
133
|
}
|
|
118
134
|
|
|
119
135
|
/**
|
|
120
136
|
* Split out revokedStake / gasStake from stake
|
|
121
137
|
*
|
|
122
138
|
* @param {Object} distribution
|
|
123
|
-
* @param {boolean} force If force is false, only process transactions after txTime in indexdb. Default is false
|
|
124
139
|
* @returns {Promise<Object>}
|
|
125
140
|
*/
|
|
126
|
-
async splitStake(distribution
|
|
141
|
+
async splitStake(distribution) {
|
|
142
|
+
const { logger } = this.resolver;
|
|
127
143
|
const { tokenAddress } = distribution;
|
|
128
144
|
|
|
129
|
-
const { next } = await this.resolver.listStakeChunks(
|
|
130
|
-
timeFilter: !force ? { startDateTime: distribution.txTime } : {},
|
|
131
|
-
});
|
|
145
|
+
const { next } = await this.resolver.listStakeChunks();
|
|
132
146
|
|
|
133
147
|
let nextData = await next();
|
|
134
148
|
|
|
135
149
|
// Process transactions in chunks and update indexdb
|
|
136
150
|
while (nextData.length) {
|
|
151
|
+
logger?.info('Updating stake distribution in chunks', {
|
|
152
|
+
chunkSize: nextData.length,
|
|
153
|
+
startTime: nextData[0].renaissanceTime,
|
|
154
|
+
startAddress: nextData[0].address,
|
|
155
|
+
endTime: nextData[nextData.length - 1].renaissanceTime,
|
|
156
|
+
});
|
|
157
|
+
|
|
137
158
|
nextData.forEach((stakeState) => {
|
|
138
159
|
const isGasStake = this.isGasStake(stakeState);
|
|
139
160
|
const token = stakeState.tokens.find((x) => x.address === tokenAddress);
|
|
@@ -168,6 +189,13 @@ class TokenDistributionManager {
|
|
|
168
189
|
* @returns {Promise<Object>} The updated token distributions
|
|
169
190
|
*/
|
|
170
191
|
async updateByTx(tx, context) {
|
|
192
|
+
const { logger } = this.resolver;
|
|
193
|
+
|
|
194
|
+
if (this.isProcessing) {
|
|
195
|
+
logger?.logger('Token distribution is already in progress', { tx });
|
|
196
|
+
return;
|
|
197
|
+
}
|
|
198
|
+
|
|
171
199
|
const formattedTx = await this.resolver.formatTx(tx);
|
|
172
200
|
const tokens = formattedTx.tokenSymbols.map(({ address }) => address);
|
|
173
201
|
|
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"publishConfig": {
|
|
4
4
|
"access": "public"
|
|
5
5
|
},
|
|
6
|
-
"version": "1.19.
|
|
6
|
+
"version": "1.19.6",
|
|
7
7
|
"description": "GraphQL resolver built upon ocap statedb and GQL layer",
|
|
8
8
|
"main": "lib/index.js",
|
|
9
9
|
"files": [
|
|
@@ -22,18 +22,18 @@
|
|
|
22
22
|
"jest": "^29.7.0"
|
|
23
23
|
},
|
|
24
24
|
"dependencies": {
|
|
25
|
-
"@arcblock/did": "1.19.
|
|
26
|
-
"@arcblock/did-util": "1.19.
|
|
27
|
-
"@arcblock/validator": "1.19.
|
|
28
|
-
"@ocap/config": "1.19.
|
|
29
|
-
"@ocap/indexdb": "1.19.
|
|
30
|
-
"@ocap/mcrypto": "1.19.
|
|
31
|
-
"@ocap/message": "1.19.
|
|
32
|
-
"@ocap/state": "1.19.
|
|
33
|
-
"@ocap/tx-protocols": "1.19.
|
|
34
|
-
"@ocap/util": "1.19.
|
|
25
|
+
"@arcblock/did": "1.19.6",
|
|
26
|
+
"@arcblock/did-util": "1.19.6",
|
|
27
|
+
"@arcblock/validator": "1.19.6",
|
|
28
|
+
"@ocap/config": "1.19.6",
|
|
29
|
+
"@ocap/indexdb": "1.19.6",
|
|
30
|
+
"@ocap/mcrypto": "1.19.6",
|
|
31
|
+
"@ocap/message": "1.19.6",
|
|
32
|
+
"@ocap/state": "1.19.6",
|
|
33
|
+
"@ocap/tx-protocols": "1.19.6",
|
|
34
|
+
"@ocap/util": "1.19.6",
|
|
35
35
|
"debug": "^4.3.6",
|
|
36
36
|
"lodash": "^4.17.21"
|
|
37
37
|
},
|
|
38
|
-
"gitHead": "
|
|
38
|
+
"gitHead": "a884f23e66c8b498f2bf8d1efbbb3b0d024f4a9d"
|
|
39
39
|
}
|