@ocap/resolver 1.18.152 → 1.18.153
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.js +12 -2
- package/lib/migration-chain.js +21 -13
- package/lib/token-flow.js +36 -15
- package/package.json +12 -12
package/lib/index.js
CHANGED
|
@@ -682,7 +682,11 @@ module.exports = class OCAPResolver {
|
|
|
682
682
|
}
|
|
683
683
|
|
|
684
684
|
async verifyAccountRisk(args, ctx) {
|
|
685
|
-
return tokenFlow.verifyAccountRisk(
|
|
685
|
+
return tokenFlow.verifyAccountRisk(
|
|
686
|
+
{ ...args, accountLimit: process.env.VERIFY_RISK_ACCOUNT_LIMIT, txLimit: process.env.VERIFY_RISK_TX_LIMIT },
|
|
687
|
+
this,
|
|
688
|
+
ctx
|
|
689
|
+
);
|
|
686
690
|
}
|
|
687
691
|
|
|
688
692
|
async listTokenFlows(args, ctx) {
|
|
@@ -1004,11 +1008,17 @@ module.exports = class OCAPResolver {
|
|
|
1004
1008
|
return attachPaidTxGas(tx);
|
|
1005
1009
|
}
|
|
1006
1010
|
|
|
1007
|
-
async _getAllResults(dataKey, fn) {
|
|
1011
|
+
async _getAllResults(dataKey, fn, limit) {
|
|
1008
1012
|
const results = [];
|
|
1009
1013
|
const pageSize = 100;
|
|
1010
1014
|
|
|
1011
1015
|
const { paging, [dataKey]: firstPage } = await fn({ size: pageSize });
|
|
1016
|
+
|
|
1017
|
+
// Skip if total exceeds limit cause we cannot query full data
|
|
1018
|
+
if (limit && paging.total > limit) {
|
|
1019
|
+
throw new CustomError('EXCEED_LIMIT', `Total exceeds limit ${limit}`);
|
|
1020
|
+
}
|
|
1021
|
+
|
|
1012
1022
|
if (paging.total < pageSize) {
|
|
1013
1023
|
return firstPage;
|
|
1014
1024
|
}
|
package/lib/migration-chain.js
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
/* eslint-disable max-classes-per-file */
|
|
2
|
+
const { isEthereumDid } = require('@arcblock/did');
|
|
2
3
|
|
|
3
4
|
class MigrationNode {
|
|
4
5
|
constructor(address, validFrom, validUntil = null, nextAddress = null) {
|
|
@@ -59,7 +60,7 @@ class MigrationChainManager {
|
|
|
59
60
|
*/
|
|
60
61
|
_processMigration(fromAddr, toAddr, timestamp) {
|
|
61
62
|
// Find or create the root address for this chain
|
|
62
|
-
const rootAddr = this.
|
|
63
|
+
const rootAddr = this.getRootAddress(fromAddr);
|
|
63
64
|
|
|
64
65
|
// Get or create the chain
|
|
65
66
|
if (!this.chains.has(rootAddr)) {
|
|
@@ -81,31 +82,29 @@ class MigrationChainManager {
|
|
|
81
82
|
}
|
|
82
83
|
|
|
83
84
|
// Update root address mapping for the new address
|
|
84
|
-
this.rootAddresses.set(toAddr, rootAddr);
|
|
85
|
+
this.rootAddresses.set(this.formatAddress(toAddr), rootAddr);
|
|
85
86
|
}
|
|
86
87
|
|
|
87
88
|
/**
|
|
88
89
|
* Find the valid address at a specific timestamp
|
|
89
|
-
* @param {string}
|
|
90
|
+
* @param {string} address - Address to look up
|
|
90
91
|
* @param {Date} timestamp - Timestamp to check
|
|
91
92
|
* @returns {string} Valid address at the timestamp
|
|
92
93
|
*/
|
|
93
|
-
findAddressAtTime(
|
|
94
|
-
|
|
95
|
-
const rootAddr = this.rootAddresses.get(initialAddress) || initialAddress;
|
|
96
|
-
const chain = this.chains.get(rootAddr);
|
|
94
|
+
findAddressAtTime(address, timestamp) {
|
|
95
|
+
const chains = this.getMigrationHistory(address);
|
|
97
96
|
|
|
98
|
-
if (!
|
|
99
|
-
return
|
|
97
|
+
if (!chains?.length) {
|
|
98
|
+
return address;
|
|
100
99
|
}
|
|
101
100
|
|
|
102
101
|
// Binary search for the correct address
|
|
103
102
|
let left = 0;
|
|
104
|
-
let right =
|
|
103
|
+
let right = chains.length - 1;
|
|
105
104
|
|
|
106
105
|
while (left <= right) {
|
|
107
106
|
const mid = Math.floor((left + right) / 2);
|
|
108
|
-
const node =
|
|
107
|
+
const node = chains[mid];
|
|
109
108
|
|
|
110
109
|
if (node.validFrom <= timestamp && (!node.validUntil || timestamp < node.validUntil)) {
|
|
111
110
|
return node.address;
|
|
@@ -117,7 +116,16 @@ class MigrationChainManager {
|
|
|
117
116
|
}
|
|
118
117
|
}
|
|
119
118
|
|
|
120
|
-
return
|
|
119
|
+
return chains[chains.length - 1].address;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
formatAddress(address) {
|
|
123
|
+
return isEthereumDid(address) ? address.toLowerCase() : address;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
getRootAddress(address) {
|
|
127
|
+
const formattedAddress = this.formatAddress(address);
|
|
128
|
+
return this.rootAddresses.get(formattedAddress) || formattedAddress;
|
|
121
129
|
}
|
|
122
130
|
|
|
123
131
|
/**
|
|
@@ -126,7 +134,7 @@ class MigrationChainManager {
|
|
|
126
134
|
* @returns {Array} List of migration nodes
|
|
127
135
|
*/
|
|
128
136
|
getMigrationHistory(address) {
|
|
129
|
-
const rootAddr = this.
|
|
137
|
+
const rootAddr = this.getRootAddress(address);
|
|
130
138
|
return this.chains.get(rootAddr) || [];
|
|
131
139
|
}
|
|
132
140
|
}
|
package/lib/token-flow.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
/* eslint-disable no-await-in-loop */
|
|
2
2
|
|
|
3
|
-
const { BN, fromTokenToUnit } = require('@ocap/util');
|
|
3
|
+
const { BN, fromTokenToUnit, isSameDid } = require('@ocap/util');
|
|
4
4
|
const { schemas, Joi } = require('@arcblock/validator');
|
|
5
5
|
const { CustomError } = require('@ocap/util/lib/error');
|
|
6
6
|
const uniq = require('lodash/uniq');
|
|
@@ -126,6 +126,11 @@ const getVaultAccounts = (config) => {
|
|
|
126
126
|
return Object.values(config.vaults).flat().concat(FORGE_TOKEN_HOLDER);
|
|
127
127
|
};
|
|
128
128
|
|
|
129
|
+
const getInitialBalance = (address, config) => {
|
|
130
|
+
const account = config?.accounts?.find((x) => isSameDid(x.address, address));
|
|
131
|
+
return account ? fromTokenToUnit(account.balance) : ZERO;
|
|
132
|
+
};
|
|
133
|
+
|
|
129
134
|
const fixMigrateReceipts = async ({ accountAddress, tx }, resolver) => {
|
|
130
135
|
const migrationChain = await resolver.getMigrationChain();
|
|
131
136
|
const address = migrationChain.findAddressAtTime(accountAddress, new Date(tx.time));
|
|
@@ -134,14 +139,18 @@ const fixMigrateReceipts = async ({ accountAddress, tx }, resolver) => {
|
|
|
134
139
|
// fix receipts address
|
|
135
140
|
if (address && migrations.length) {
|
|
136
141
|
tx.receipts.forEach((receipt) => {
|
|
137
|
-
if (migrations.some((x) => x.address
|
|
142
|
+
if (migrations.some((x) => isSameDid(x.address, receipt.address))) {
|
|
138
143
|
receipt.address = address;
|
|
139
144
|
}
|
|
140
145
|
});
|
|
141
146
|
}
|
|
142
147
|
};
|
|
143
148
|
|
|
144
|
-
const verifyAccountRisk = async (
|
|
149
|
+
const verifyAccountRisk = async (
|
|
150
|
+
{ accountAddress, tokenAddress, accountLimit = 400, txLimit = 10000 },
|
|
151
|
+
resolver,
|
|
152
|
+
ctx = {}
|
|
153
|
+
) => {
|
|
145
154
|
// validate request params
|
|
146
155
|
const { error } = paramsSchema.validate({ accountAddress, tokenAddress, resolver });
|
|
147
156
|
if (error) {
|
|
@@ -151,12 +160,12 @@ const verifyAccountRisk = async ({ accountAddress, tokenAddress }, resolver, ctx
|
|
|
151
160
|
const checkedAccounts = new Map();
|
|
152
161
|
const checkedTx = new Map();
|
|
153
162
|
const accountQueue = [accountAddress];
|
|
154
|
-
|
|
163
|
+
|
|
155
164
|
const vaultAccounts = getVaultAccounts(resolver.config);
|
|
156
165
|
|
|
157
166
|
while (accountQueue.length) {
|
|
158
167
|
// limit
|
|
159
|
-
if (checkedAccounts.size >=
|
|
168
|
+
if (checkedAccounts.size >= accountLimit) {
|
|
160
169
|
return {
|
|
161
170
|
isRisky: false,
|
|
162
171
|
reason: 'MAX_ACCOUNT_SIZE_LIMIT',
|
|
@@ -172,21 +181,33 @@ const verifyAccountRisk = async ({ accountAddress, tokenAddress }, resolver, ctx
|
|
|
172
181
|
if (checkedAccounts.has(address)) continue;
|
|
173
182
|
// skip trusted accounts
|
|
174
183
|
if (await resolver.filter.isTrusted(address)) {
|
|
175
|
-
checkedAccounts.set(address,
|
|
184
|
+
checkedAccounts.set(address, true);
|
|
176
185
|
continue;
|
|
177
186
|
}
|
|
178
187
|
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
188
|
+
let transactions = [];
|
|
189
|
+
try {
|
|
190
|
+
transactions = await resolver._getAllResults(
|
|
191
|
+
'transactions',
|
|
192
|
+
(paging) => resolver.listTransactions({ paging, accountFilter: { accounts: [address] } }, ctx),
|
|
193
|
+
txLimit
|
|
194
|
+
);
|
|
195
|
+
} catch (e) {
|
|
196
|
+
// skip if tx limit exceeded
|
|
197
|
+
if (e.code === 'EXCEED_LIMIT') {
|
|
198
|
+
checkedAccounts.set(address, true);
|
|
199
|
+
continue;
|
|
200
|
+
}
|
|
201
|
+
throw e;
|
|
202
|
+
}
|
|
183
203
|
|
|
204
|
+
const accountState = await resolver.getAccountState({ address, traceMigration: false }, ctx);
|
|
184
205
|
if (!accountState) {
|
|
185
206
|
throw new CustomError('INVALID_REQUEST', `Invalid address ${address}`);
|
|
186
207
|
}
|
|
187
|
-
const balance = accountState.tokens.find((item) => item.address
|
|
208
|
+
const balance = accountState.tokens.find((item) => isSameDid(item.address, tokenAddress))?.value || 0;
|
|
188
209
|
|
|
189
|
-
let transferIn =
|
|
210
|
+
let transferIn = getInitialBalance(address, resolver.config);
|
|
190
211
|
let transferOut = ZERO;
|
|
191
212
|
|
|
192
213
|
// Parse txs to get transfer amounts
|
|
@@ -205,20 +226,20 @@ const verifyAccountRisk = async ({ accountAddress, tokenAddress }, resolver, ctx
|
|
|
205
226
|
// Calculate the total amount of transfer for this address
|
|
206
227
|
transferIn = transferIn.add(
|
|
207
228
|
transferInList
|
|
208
|
-
.filter((item) => item.address
|
|
229
|
+
.filter((item) => isSameDid(item.address, address))
|
|
209
230
|
.map((item) => item.value)
|
|
210
231
|
.reduce((prev, cur) => prev.add(cur), ZERO)
|
|
211
232
|
);
|
|
212
233
|
|
|
213
234
|
transferOut = transferOut.add(
|
|
214
235
|
transferOutList
|
|
215
|
-
.filter((item) => item.address
|
|
236
|
+
.filter((item) => isSameDid(item.address, address))
|
|
216
237
|
.map((item) => item.value)
|
|
217
238
|
.reduce((prev, cur) => prev.add(cur), ZERO)
|
|
218
239
|
);
|
|
219
240
|
|
|
220
241
|
// push transferIn accounts to queue for next time check
|
|
221
|
-
if (transferInList.some((item) => item.address
|
|
242
|
+
if (transferInList.some((item) => isSameDid(item.address, address))) {
|
|
222
243
|
const accountsToQueue = transferOutList
|
|
223
244
|
.filter((item) => {
|
|
224
245
|
if (accountQueue.includes(item.address)) return false;
|
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"publishConfig": {
|
|
4
4
|
"access": "public"
|
|
5
5
|
},
|
|
6
|
-
"version": "1.18.
|
|
6
|
+
"version": "1.18.153",
|
|
7
7
|
"description": "GraphQL resolver built upon ocap statedb and GQL layer",
|
|
8
8
|
"main": "lib/index.js",
|
|
9
9
|
"files": [
|
|
@@ -22,18 +22,18 @@
|
|
|
22
22
|
"jest": "^29.7.0"
|
|
23
23
|
},
|
|
24
24
|
"dependencies": {
|
|
25
|
-
"@arcblock/did": "1.18.
|
|
26
|
-
"@arcblock/did-util": "1.18.
|
|
27
|
-
"@arcblock/validator": "1.18.
|
|
28
|
-
"@ocap/config": "1.18.
|
|
29
|
-
"@ocap/indexdb": "1.18.
|
|
30
|
-
"@ocap/mcrypto": "1.18.
|
|
31
|
-
"@ocap/message": "1.18.
|
|
32
|
-
"@ocap/state": "1.18.
|
|
33
|
-
"@ocap/tx-protocols": "1.18.
|
|
34
|
-
"@ocap/util": "1.18.
|
|
25
|
+
"@arcblock/did": "1.18.153",
|
|
26
|
+
"@arcblock/did-util": "1.18.153",
|
|
27
|
+
"@arcblock/validator": "1.18.153",
|
|
28
|
+
"@ocap/config": "1.18.153",
|
|
29
|
+
"@ocap/indexdb": "1.18.153",
|
|
30
|
+
"@ocap/mcrypto": "1.18.153",
|
|
31
|
+
"@ocap/message": "1.18.153",
|
|
32
|
+
"@ocap/state": "1.18.153",
|
|
33
|
+
"@ocap/tx-protocols": "1.18.153",
|
|
34
|
+
"@ocap/util": "1.18.153",
|
|
35
35
|
"debug": "^4.3.6",
|
|
36
36
|
"lodash": "^4.17.21"
|
|
37
37
|
},
|
|
38
|
-
"gitHead": "
|
|
38
|
+
"gitHead": "ad18c565becef73d6ee782502c3f4858de43b68b"
|
|
39
39
|
}
|