@talismn/balances 0.0.0-pr2043-20250618091117 → 0.0.0-pr2043-20250619015240
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/declarations/src/BalanceModule.d.ts +1 -1
- package/dist/declarations/src/getMiniMetadata/getMiniMetadatas.d.ts +1 -1
- package/dist/declarations/src/getMiniMetadata/getUpdatedMiniMetadatas.d.ts +1 -1
- package/dist/declarations/src/getMiniMetadata/index.d.ts +1 -1
- package/dist/declarations/src/index.d.ts +0 -2
- package/dist/declarations/src/modules/SubstrateNativeModule/types.d.ts +0 -4
- package/dist/declarations/src/modules/SubstrateNativeModule/util/systemProperties.d.ts +22 -0
- package/dist/declarations/src/modules/util/buildStorageCoders.d.ts +10 -0
- package/dist/talismn-balances.cjs.dev.js +459 -771
- package/dist/talismn-balances.cjs.prod.js +459 -771
- package/dist/talismn-balances.esm.js +460 -771
- package/package.json +9 -7
- package/dist/declarations/src/MiniMetadataUpdater.d.ts +0 -43
- package/dist/declarations/src/util/hydrateChaindata.d.ts +0 -8
- package/dist/declarations/src/util/index.d.ts +0 -1
@@ -1,8 +1,6 @@
|
|
1
1
|
'use strict';
|
2
2
|
|
3
|
-
var chaindataProvider = require('@talismn/chaindata-provider');
|
4
3
|
var dexie = require('dexie');
|
5
|
-
var rxjs = require('rxjs');
|
6
4
|
var anylogger = require('anylogger');
|
7
5
|
var tokenRates = require('@talismn/token-rates');
|
8
6
|
var util = require('@talismn/util');
|
@@ -10,12 +8,14 @@ var BigNumber = require('bignumber.js');
|
|
10
8
|
var util$1 = require('@polkadot/util');
|
11
9
|
var utilCrypto = require('@polkadot/util-crypto');
|
12
10
|
var pako = require('pako');
|
11
|
+
var chaindataProvider = require('@talismn/chaindata-provider');
|
13
12
|
var viem = require('viem');
|
14
13
|
var isEqual = require('lodash/isEqual');
|
15
14
|
var txwrapperCore = require('@substrate/txwrapper-core');
|
16
15
|
var scale = require('@talismn/scale');
|
17
16
|
var lodash = require('lodash');
|
18
17
|
var camelCase = require('lodash/camelCase');
|
18
|
+
var PQueue = require('p-queue');
|
19
19
|
var sapi = require('@talismn/sapi');
|
20
20
|
var types = require('@polkadot/types');
|
21
21
|
var groupBy = require('lodash/groupBy');
|
@@ -23,8 +23,10 @@ var utils = require('@polkadot-api/utils');
|
|
23
23
|
var polkadotApi = require('polkadot-api');
|
24
24
|
var PromisePool = require('@supercharge/promise-pool');
|
25
25
|
var chainConnector = require('@talismn/chain-connector');
|
26
|
+
var rxjs = require('rxjs');
|
26
27
|
var scaleTs = require('scale-ts');
|
27
28
|
var upperFirst = require('lodash/upperFirst');
|
29
|
+
var z = require('zod/v4');
|
28
30
|
var apiContract = require('@polkadot/api-contract');
|
29
31
|
|
30
32
|
function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
|
@@ -34,9 +36,11 @@ var BigNumber__default = /*#__PURE__*/_interopDefault(BigNumber);
|
|
34
36
|
var pako__default = /*#__PURE__*/_interopDefault(pako);
|
35
37
|
var isEqual__default = /*#__PURE__*/_interopDefault(isEqual);
|
36
38
|
var camelCase__default = /*#__PURE__*/_interopDefault(camelCase);
|
39
|
+
var PQueue__default = /*#__PURE__*/_interopDefault(PQueue);
|
37
40
|
var groupBy__default = /*#__PURE__*/_interopDefault(groupBy);
|
38
41
|
var PromisePool__default = /*#__PURE__*/_interopDefault(PromisePool);
|
39
42
|
var upperFirst__default = /*#__PURE__*/_interopDefault(upperFirst);
|
43
|
+
var z__default = /*#__PURE__*/_interopDefault(z);
|
40
44
|
|
41
45
|
// TODO: Document default balances module purpose/usage
|
42
46
|
const DefaultBalanceModule = type => ({
|
@@ -120,13 +124,11 @@ class EvmTokenFetcher {
|
|
120
124
|
// }
|
121
125
|
}
|
122
126
|
|
123
|
-
var
|
127
|
+
var pkg = {
|
124
128
|
name: "@talismn/balances",
|
125
|
-
version: "0.0.0-pr2043-
|
126
|
-
|
127
|
-
const libVersion = packageJson.version;
|
129
|
+
version: "0.0.0-pr2043-20250619015240"};
|
128
130
|
|
129
|
-
var log = anylogger__default.default(
|
131
|
+
var log = anylogger__default.default(pkg.name);
|
130
132
|
|
131
133
|
function excludeFromTransferableAmount(locks) {
|
132
134
|
if (typeof locks === "string") return BigInt(locks);
|
@@ -336,20 +338,13 @@ class Balances {
|
|
336
338
|
return new SumBalancesFormatter(this);
|
337
339
|
}
|
338
340
|
}
|
339
|
-
|
340
|
-
// type BalanceJsonEvm = BalanceJson & { evmNetworkId: string }
|
341
|
-
|
342
|
-
// const isBalanceEvm = (balance: BalanceJson): balance is BalanceJsonEvm => "evmNetworkId" in balance
|
343
|
-
|
344
341
|
const getBalanceId = balance => {
|
345
342
|
const {
|
346
343
|
source,
|
347
344
|
address,
|
348
|
-
tokenId
|
349
|
-
networkId
|
345
|
+
tokenId
|
350
346
|
} = balance;
|
351
|
-
|
352
|
-
return [source, address, networkId, tokenId].filter(util.isTruthy).join("::");
|
347
|
+
return [source, address, tokenId].join("::");
|
353
348
|
};
|
354
349
|
|
355
350
|
/**
|
@@ -411,28 +406,6 @@ class Balance {
|
|
411
406
|
get address() {
|
412
407
|
return this.#storage.address;
|
413
408
|
}
|
414
|
-
|
415
|
-
// /** @deprecated */
|
416
|
-
// get chainId() {
|
417
|
-
// return isBalanceEvm(this.#storage) ? undefined : this.#storage.chainId
|
418
|
-
// }
|
419
|
-
// /** @deprecated */
|
420
|
-
// get chain() {
|
421
|
-
// return (this.#db?.networks && this.networkId && this.#db?.networks[this.networkId]) || null
|
422
|
-
// }
|
423
|
-
|
424
|
-
// /** @deprecated */
|
425
|
-
// get evmNetworkId() {
|
426
|
-
// return isBalanceEvm(this.#storage) ? this.#storage.evmNetworkId : undefined
|
427
|
-
// }
|
428
|
-
// /** @deprecated */
|
429
|
-
// get evmNetwork() {
|
430
|
-
// return (
|
431
|
-
// (this.#db?.networks && this.networkId && this.#db?.networks[this.networkId]) ||
|
432
|
-
// null
|
433
|
-
// )
|
434
|
-
// }
|
435
|
-
|
436
409
|
get networkId() {
|
437
410
|
return this.#storage.networkId;
|
438
411
|
}
|
@@ -1061,257 +1034,6 @@ class TalismanBalancesDatabase extends dexie.Dexie {
|
|
1061
1034
|
}
|
1062
1035
|
const db = new TalismanBalancesDatabase();
|
1063
1036
|
|
1064
|
-
const minimumHydrationInterval = 300_000; // 300_000ms = 300s = 5 minutes
|
1065
|
-
|
1066
|
-
/**
|
1067
|
-
* A substrate dapp needs access to a set of types when it wants to communicate with a blockchain node.
|
1068
|
-
*
|
1069
|
-
* These types are used to encode requests & decode responses via the SCALE codec.
|
1070
|
-
* Each chain generally has its own set of types.
|
1071
|
-
*
|
1072
|
-
* Substrate provides a construct to retrieve these types from a blockchain node.
|
1073
|
-
* The chain metadata.
|
1074
|
-
*
|
1075
|
-
* The metadata includes the types required for any communication with the chain,
|
1076
|
-
* including lots of methods which are not relevant to balance fetching.
|
1077
|
-
*
|
1078
|
-
* As such, the metadata can clock in at around 1-2MB per chain, which is a lot of storage
|
1079
|
-
* for browser-based dapps which want to connect to lots of chains.
|
1080
|
-
*
|
1081
|
-
* By utilizing the wonderful [scale-ts](https://github.com/unstoppablejs/unstoppablejs/tree/main/packages/scale-ts#readme) library,
|
1082
|
-
* we can trim the chain metadata down so that it only includes the types we need for balance fetching.
|
1083
|
-
*
|
1084
|
-
* Each balance module has a function to do just that, `BalanceModule::fetchSubstrateChainMeta`.
|
1085
|
-
*
|
1086
|
-
* But, we only want to run this operation when necessary.
|
1087
|
-
*
|
1088
|
-
* The purpose of this class, `MiniMetadataUpdater`, is to maintain a local cache of
|
1089
|
-
* trimmed-down metadatas, which we'll refer to as `MiniMetadatas`.
|
1090
|
-
*/
|
1091
|
-
class MiniMetadataUpdater {
|
1092
|
-
#lastHydratedMiniMetadatasAt = 0;
|
1093
|
-
#lastHydratedCustomChainsAt = 0;
|
1094
|
-
#chainConnectors;
|
1095
|
-
#chaindataProvider;
|
1096
|
-
#balanceModules;
|
1097
|
-
constructor(chainConnectors, chaindataProvider, balanceModules) {
|
1098
|
-
this.#chainConnectors = chainConnectors;
|
1099
|
-
this.#chaindataProvider = chaindataProvider;
|
1100
|
-
this.#balanceModules = balanceModules;
|
1101
|
-
}
|
1102
|
-
|
1103
|
-
/** Subscribe to the metadata for a chain */
|
1104
|
-
subscribe(chainId) {
|
1105
|
-
return rxjs.from(dexie.liveQuery(() => db.miniMetadatas.filter(m => m.chainId === chainId).toArray().then(array => array[0])));
|
1106
|
-
}
|
1107
|
-
async update(chainIds) {
|
1108
|
-
await this.updateSubstrateChains(chainIds);
|
1109
|
-
}
|
1110
|
-
async statuses(chains) {
|
1111
|
-
const ids = await db.miniMetadatas.orderBy("id").primaryKeys();
|
1112
|
-
const wantedIdsByChain = new Map(chains.flatMap(({
|
1113
|
-
id: chainId,
|
1114
|
-
specName,
|
1115
|
-
specVersion
|
1116
|
-
}) => {
|
1117
|
-
if (specName === null) return [];
|
1118
|
-
if (specVersion === null) return [];
|
1119
|
-
return [[chainId, this.#balanceModules.filter(m => m.type.startsWith("substrate-")).map(({
|
1120
|
-
type: source
|
1121
|
-
}) => deriveMiniMetadataId({
|
1122
|
-
source,
|
1123
|
-
chainId,
|
1124
|
-
specVersion,
|
1125
|
-
libVersion
|
1126
|
-
}))]];
|
1127
|
-
}));
|
1128
|
-
const statusesByChain = new Map(Array.from(wantedIdsByChain.entries()).map(([chainId, wantedIds]) => [chainId, wantedIds.every(wantedId => ids.includes(wantedId)) ? "good" : "none"]));
|
1129
|
-
return {
|
1130
|
-
wantedIdsByChain,
|
1131
|
-
statusesByChain
|
1132
|
-
};
|
1133
|
-
}
|
1134
|
-
async hydrateFromChaindata() {
|
1135
|
-
// TODO review this. feels unnecessary to fetch them all
|
1136
|
-
|
1137
|
-
const now = Date.now();
|
1138
|
-
if (now - this.#lastHydratedMiniMetadatasAt < minimumHydrationInterval) return false;
|
1139
|
-
const dbHasMiniMetadatas = (await db.miniMetadatas.count()) > 0;
|
1140
|
-
try {
|
1141
|
-
try {
|
1142
|
-
var miniMetadatas = await this.#chaindataProvider.miniMetadatas(); // eslint-disable-line no-var
|
1143
|
-
if (miniMetadatas.length <= 0) throw new Error("Ignoring empty chaindata miniMetadatas response");
|
1144
|
-
} catch (error) {
|
1145
|
-
if (dbHasMiniMetadatas) throw error;
|
1146
|
-
log.warn("Failed to fetch miniMetadatas from chaindata", error);
|
1147
|
-
// On first start-up (db is empty), if we fail to fetch miniMetadatas then we should
|
1148
|
-
// initialize the DB with the list of miniMetadatas inside our init/mini-metadatas.json file.
|
1149
|
-
// This data will represent a relatively recent copy of what's in chaindata,
|
1150
|
-
// which will be better for our users than to have nothing at all.
|
1151
|
-
var miniMetadatas = await chaindataProvider.fetchInitMiniMetadatas(); // eslint-disable-line no-var
|
1152
|
-
}
|
1153
|
-
await db.miniMetadatas.bulkPut(miniMetadatas);
|
1154
|
-
this.#lastHydratedMiniMetadatasAt = now;
|
1155
|
-
return true;
|
1156
|
-
} catch (error) {
|
1157
|
-
log.warn(`Failed to hydrate miniMetadatas from chaindata`, error);
|
1158
|
-
return false;
|
1159
|
-
}
|
1160
|
-
}
|
1161
|
-
async hydrateCustomChains() {
|
1162
|
-
// TODO
|
1163
|
-
// const now = Date.now()
|
1164
|
-
// if (now - this.#lastHydratedCustomChainsAt < minimumHydrationInterval) return false
|
1165
|
-
// const chains = await this.#chaindataProvider.chains()
|
1166
|
-
// const customChains = chains.filter(
|
1167
|
-
// (chain): chain is CustomChain => "isCustom" in chain && chain.isCustom,
|
1168
|
-
// )
|
1169
|
-
// const updatedCustomChains: Array<CustomChain> = []
|
1170
|
-
// const concurrency = 4
|
1171
|
-
// ;(
|
1172
|
-
// await PromisePool.withConcurrency(concurrency)
|
1173
|
-
// .for(customChains)
|
1174
|
-
// .process(async (customChain) => {
|
1175
|
-
// const send = (method: string, params: unknown[]) =>
|
1176
|
-
// this.#chainConnectors.substrate?.send(customChain.id, method, params)
|
1177
|
-
// const [genesisHash, runtimeVersion, chainName, chainType] = await Promise.all([
|
1178
|
-
// send("chain_getBlockHash", [0]),
|
1179
|
-
// send("state_getRuntimeVersion", []),
|
1180
|
-
// send("system_chain", []),
|
1181
|
-
// send("system_chainType", []),
|
1182
|
-
// ])
|
1183
|
-
// // deconstruct rpc data
|
1184
|
-
// const { specName, implName } = runtimeVersion
|
1185
|
-
// const specVersion = String(runtimeVersion.specVersion)
|
1186
|
-
// const changed =
|
1187
|
-
// customChain.genesisHash !== genesisHash ||
|
1188
|
-
// customChain.chainName !== chainName ||
|
1189
|
-
// !isEqual(customChain.chainType, chainType) ||
|
1190
|
-
// customChain.implName !== implName ||
|
1191
|
-
// customChain.specName !== specName ||
|
1192
|
-
// customChain.specVersion !== specVersion
|
1193
|
-
// if (!changed) return
|
1194
|
-
// customChain.genesisHash = genesisHash
|
1195
|
-
// customChain.chainName = chainName
|
1196
|
-
// customChain.chainType = chainType
|
1197
|
-
// customChain.implName = implName
|
1198
|
-
// customChain.specName = specName
|
1199
|
-
// customChain.specVersion = specVersion
|
1200
|
-
// updatedCustomChains.push(customChain)
|
1201
|
-
// })
|
1202
|
-
// ).errors.forEach((error) => log.error("Error hydrating custom chains", error))
|
1203
|
-
// if (updatedCustomChains.length > 0) {
|
1204
|
-
// await this.#chaindataProvider.transaction("rw", ["chains"], async () => {
|
1205
|
-
// for (const updatedCustomChain of updatedCustomChains) {
|
1206
|
-
// await this.#chaindataProvider.removeCustomChain(updatedCustomChain.id)
|
1207
|
-
// await this.#chaindataProvider.addCustomChain(updatedCustomChain)
|
1208
|
-
// }
|
1209
|
-
// })
|
1210
|
-
// }
|
1211
|
-
// if (updatedCustomChains.length > 0) this.#lastHydratedCustomChainsAt = now
|
1212
|
-
// return true
|
1213
|
-
}
|
1214
|
-
async updateSubstrateChains(_chainIds) {
|
1215
|
-
// const chains = new Map(
|
1216
|
-
// (await this.#chaindataProvider.chains()).map((chain) => [chain.id, chain]),
|
1217
|
-
// )
|
1218
|
-
// const filteredChains = chainIds.flatMap((chainId) => chains.get(chainId) ?? [])
|
1219
|
-
// const ids = await balancesDb.miniMetadatas.orderBy("id").primaryKeys()
|
1220
|
-
// const { wantedIdsByChain, statusesByChain } = await this.statuses(filteredChains)
|
1221
|
-
// // clean up store
|
1222
|
-
// const wantedIds = Array.from(wantedIdsByChain.values()).flatMap((ids) => ids)
|
1223
|
-
// const unwantedIds = ids.filter((id) => !wantedIds.includes(id))
|
1224
|
-
// if (unwantedIds.length > 0) {
|
1225
|
-
// const chainIds = Array.from(
|
1226
|
-
// new Set((await balancesDb.miniMetadatas.bulkGet(unwantedIds)).map((m) => m?.chainId)),
|
1227
|
-
// )
|
1228
|
-
// log.info(`Pruning ${unwantedIds.length} miniMetadatas on chains ${chainIds.join(", ")}`)
|
1229
|
-
// await balancesDb.miniMetadatas.bulkDelete(unwantedIds)
|
1230
|
-
// }
|
1231
|
-
// const needUpdates = Array.from(statusesByChain.entries())
|
1232
|
-
// .filter(([, status]) => status !== "good")
|
1233
|
-
// .map(([chainId]) => chainId)
|
1234
|
-
// if (needUpdates.length > 0)
|
1235
|
-
// log.info(`${needUpdates.length} miniMetadatas need updates (${needUpdates.join(", ")})`)
|
1236
|
-
// const availableTokenLogos = await availableTokenLogoFilenames().catch((error) => {
|
1237
|
-
// log.error("Failed to fetch available token logos", error)
|
1238
|
-
// return []
|
1239
|
-
// })
|
1240
|
-
// const concurrency = 12
|
1241
|
-
// ;(
|
1242
|
-
// await PromisePool.withConcurrency(concurrency)
|
1243
|
-
// .for(needUpdates)
|
1244
|
-
// .process(async (chainId) => {
|
1245
|
-
// log.info(`Updating metadata for chain ${chainId}`)
|
1246
|
-
// const chain = chains.get(chainId)
|
1247
|
-
// if (!chain) return
|
1248
|
-
// const { specName, specVersion } = chain
|
1249
|
-
// if (specName === null) return
|
1250
|
-
// if (specVersion === null) return
|
1251
|
-
// const fetchMetadata = async () => {
|
1252
|
-
// try {
|
1253
|
-
// return await fetchBestMetadata(
|
1254
|
-
// (method, params, isCacheable) => {
|
1255
|
-
// if (!this.#chainConnectors.substrate)
|
1256
|
-
// throw new Error("Substrate connector is not available")
|
1257
|
-
// return this.#chainConnectors.substrate.send(chainId, method, params, isCacheable)
|
1258
|
-
// },
|
1259
|
-
// true, // allow v14 fallback
|
1260
|
-
// )
|
1261
|
-
// } catch (err) {
|
1262
|
-
// log.warn(`Failed to fetch metadata for chain ${chainId}`)
|
1263
|
-
// return undefined
|
1264
|
-
// }
|
1265
|
-
// }
|
1266
|
-
// const [metadataRpc, systemProperties] = await Promise.all([
|
1267
|
-
// fetchMetadata(),
|
1268
|
-
// this.#chainConnectors.substrate?.send(chainId, "system_properties", []),
|
1269
|
-
// ])
|
1270
|
-
// for (const mod of this.#balanceModules.filter((m) => m.type.startsWith("substrate-"))) {
|
1271
|
-
// const balancesConfig = (chain.balancesConfig ?? []).find(
|
1272
|
-
// ({ moduleType }) => moduleType === mod.type,
|
1273
|
-
// )
|
1274
|
-
// const moduleConfig = balancesConfig?.moduleConfig ?? {}
|
1275
|
-
// const chainMeta = await mod.fetchSubstrateChainMeta(
|
1276
|
-
// chainId,
|
1277
|
-
// moduleConfig,
|
1278
|
-
// metadataRpc,
|
1279
|
-
// systemProperties,
|
1280
|
-
// )
|
1281
|
-
// const tokens = await mod.fetchSubstrateChainTokens(chainId, chainMeta, moduleConfig)
|
1282
|
-
// // update tokens in chaindata
|
1283
|
-
// await this.#chaindataProvider.updateChainTokens(
|
1284
|
-
// chainId,
|
1285
|
-
// mod.type,
|
1286
|
-
// Object.values(tokens),
|
1287
|
-
// availableTokenLogos,
|
1288
|
-
// )
|
1289
|
-
// // update miniMetadatas
|
1290
|
-
// const { miniMetadata: data, metadataVersion: version, ...extra } = chainMeta ?? {}
|
1291
|
-
// await balancesDb.miniMetadatas.put({
|
1292
|
-
// id: deriveMiniMetadataId({
|
1293
|
-
// source: mod.type,
|
1294
|
-
// chainId,
|
1295
|
-
// specName,
|
1296
|
-
// specVersion,
|
1297
|
-
// balancesConfig: JSON.stringify(moduleConfig),
|
1298
|
-
// }),
|
1299
|
-
// source: mod.type,
|
1300
|
-
// chainId,
|
1301
|
-
// specName,
|
1302
|
-
// specVersion,
|
1303
|
-
// balancesConfig: JSON.stringify(moduleConfig),
|
1304
|
-
// // TODO: Standardise return value from `fetchSubstrateChainMeta`
|
1305
|
-
// version,
|
1306
|
-
// data,
|
1307
|
-
// extra: JSON.stringify(extra),
|
1308
|
-
// })
|
1309
|
-
// }
|
1310
|
-
// })
|
1311
|
-
// ).errors.forEach((error) => log.error("Error updating chain metadata", error))
|
1312
|
-
}
|
1313
|
-
}
|
1314
|
-
|
1315
1037
|
const erc20Abi = [{
|
1316
1038
|
constant: true,
|
1317
1039
|
inputs: [],
|
@@ -2953,12 +2675,14 @@ async function getPoolBalance(publicClient, contractAddress, accountAddress) {
|
|
2953
2675
|
}
|
2954
2676
|
}
|
2955
2677
|
|
2678
|
+
const libVersion = pkg.version;
|
2679
|
+
|
2956
2680
|
// cache the promise so it can be shared across multiple calls
|
2957
2681
|
const CACHE_GET_SPEC_VERSION = new Map();
|
2958
2682
|
const fetchSpecVersion = async (chainConnector, networkId) => {
|
2959
2683
|
const {
|
2960
2684
|
specVersion
|
2961
|
-
} = await chainConnector.send(networkId, "state_getRuntimeVersion", [true
|
2685
|
+
} = await chainConnector.send(networkId, "state_getRuntimeVersion", [], true);
|
2962
2686
|
return specVersion;
|
2963
2687
|
};
|
2964
2688
|
|
@@ -3000,9 +2724,16 @@ const getMetadataRpc = async (chainConnector, networkId) => {
|
|
3000
2724
|
|
3001
2725
|
// share requests as all modules will call this at once
|
3002
2726
|
const CACHE = new Map();
|
3003
|
-
|
2727
|
+
|
2728
|
+
// ensures we dont fetch miniMetadatas on more than 4 chains at once
|
2729
|
+
const POOL = new PQueue__default.default({
|
2730
|
+
concurrency: 4
|
2731
|
+
});
|
2732
|
+
const getMiniMetadatas = async (chainConnector, chaindataProvider, networkId, specVersion, signal) => {
|
3004
2733
|
if (CACHE.has(networkId)) return CACHE.get(networkId);
|
3005
|
-
const pResult = fetchMiniMetadatas(chainConnector, chaindataProvider, networkId, specVersion)
|
2734
|
+
const pResult = POOL.add(() => fetchMiniMetadatas(chainConnector, chaindataProvider, networkId, specVersion), {
|
2735
|
+
signal
|
2736
|
+
});
|
3006
2737
|
CACHE.set(networkId, pResult);
|
3007
2738
|
try {
|
3008
2739
|
return await pResult;
|
@@ -3014,49 +2745,57 @@ const getMiniMetadatas = async (chainConnector, chaindataProvider, networkId, sp
|
|
3014
2745
|
CACHE.delete(networkId);
|
3015
2746
|
}
|
3016
2747
|
};
|
3017
|
-
const fetchMiniMetadatas = async (chainConnector, chaindataProvider, chainId, specVersion) => {
|
3018
|
-
const
|
3019
|
-
|
3020
|
-
|
3021
|
-
|
3022
|
-
|
3023
|
-
chainConnectors
|
3024
|
-
|
3025
|
-
|
3026
|
-
|
3027
|
-
const
|
3028
|
-
|
3029
|
-
|
3030
|
-
|
2748
|
+
const fetchMiniMetadatas = async (chainConnector, chaindataProvider, chainId, specVersion, signal) => {
|
2749
|
+
const start = performance.now();
|
2750
|
+
log.debug("[miniMetadata] fetching minimetadatas for %s", chainId);
|
2751
|
+
try {
|
2752
|
+
const metadataRpc = await getMetadataRpc(chainConnector, chainId);
|
2753
|
+
signal?.throwIfAborted();
|
2754
|
+
const chainConnectors = {
|
2755
|
+
substrate: chainConnector,
|
2756
|
+
evm: {} // wont be used but workarounds error for module creation
|
2757
|
+
};
|
2758
|
+
const modules = defaultBalanceModules.map(mod => mod({
|
2759
|
+
chainConnectors,
|
2760
|
+
chaindataProvider
|
2761
|
+
})).filter(mod => mod.type.startsWith("substrate-"));
|
2762
|
+
return Promise.all(modules.map(async mod => {
|
2763
|
+
const source = mod.type;
|
2764
|
+
const chainMeta = await mod.fetchSubstrateChainMeta(chainId, {}, metadataRpc);
|
2765
|
+
return {
|
2766
|
+
id: deriveMiniMetadataId({
|
2767
|
+
source,
|
2768
|
+
chainId,
|
2769
|
+
specVersion,
|
2770
|
+
libVersion
|
2771
|
+
}),
|
3031
2772
|
source,
|
3032
2773
|
chainId,
|
3033
2774
|
specVersion,
|
3034
|
-
libVersion
|
3035
|
-
|
3036
|
-
|
3037
|
-
|
3038
|
-
|
3039
|
-
|
3040
|
-
|
3041
|
-
};
|
3042
|
-
}));
|
2775
|
+
libVersion,
|
2776
|
+
data: chainMeta?.miniMetadata ?? null
|
2777
|
+
};
|
2778
|
+
}));
|
2779
|
+
} finally {
|
2780
|
+
log.debug("[miniMetadata] updated miniMetadatas for %s in %sms", chainId, performance.now() - start);
|
2781
|
+
}
|
3043
2782
|
};
|
3044
2783
|
|
3045
|
-
const getUpdatedMiniMetadatas = async (chainConnector, chaindataProvider,
|
3046
|
-
const miniMetadatas = await getMiniMetadatas(chainConnector, chaindataProvider,
|
2784
|
+
const getUpdatedMiniMetadatas = async (chainConnector, chaindataProvider, chainId, specVersion, signal) => {
|
2785
|
+
const miniMetadatas = await getMiniMetadatas(chainConnector, chaindataProvider, chainId, specVersion, signal);
|
2786
|
+
signal?.throwIfAborted();
|
3047
2787
|
await db.transaction("readwrite", "miniMetadatas", async tx => {
|
3048
2788
|
await tx.miniMetadatas.where({
|
3049
|
-
|
2789
|
+
chainId
|
3050
2790
|
}).delete();
|
3051
2791
|
await tx.miniMetadatas.bulkPut(miniMetadatas);
|
3052
2792
|
});
|
3053
2793
|
return miniMetadatas;
|
3054
2794
|
};
|
3055
2795
|
|
3056
|
-
const getMiniMetadata = async (chaindataProvider, chainConnector, chainId, source) => {
|
2796
|
+
const getMiniMetadata = async (chaindataProvider, chainConnector, chainId, source, signal) => {
|
3057
2797
|
const specVersion = await getSpecVersion(chainConnector, chainId);
|
3058
|
-
|
3059
|
-
// TODO when working a chaindata branch, need a way to pass the libVersion used to derive the miniMetadataId got github
|
2798
|
+
signal?.throwIfAborted();
|
3060
2799
|
const miniMetadataId = deriveMiniMetadataId({
|
3061
2800
|
source,
|
3062
2801
|
chainId,
|
@@ -3066,11 +2805,13 @@ const getMiniMetadata = async (chaindataProvider, chainConnector, chainId, sourc
|
|
3066
2805
|
|
3067
2806
|
// lookup local ones
|
3068
2807
|
const [dbMiniMetadata, ghMiniMetadata] = await Promise.all([db.miniMetadatas.get(miniMetadataId), chaindataProvider.miniMetadataById(miniMetadataId)]);
|
2808
|
+
signal?.throwIfAborted();
|
3069
2809
|
const miniMetadata = dbMiniMetadata ?? ghMiniMetadata;
|
3070
2810
|
if (miniMetadata) return miniMetadata;
|
3071
2811
|
|
3072
2812
|
// update from live chain metadata and persist locally
|
3073
|
-
const miniMetadatas = await getUpdatedMiniMetadatas(chainConnector, chaindataProvider, chainId, specVersion);
|
2813
|
+
const miniMetadatas = await getUpdatedMiniMetadatas(chainConnector, chaindataProvider, chainId, specVersion, signal);
|
2814
|
+
signal?.throwIfAborted();
|
3074
2815
|
const found = miniMetadatas.find(m => m.id === miniMetadataId);
|
3075
2816
|
if (!found) {
|
3076
2817
|
log.warn("MiniMetadata not found in updated miniMetadatas", {
|
@@ -3157,6 +2898,28 @@ const buildStorageCoders = ({
|
|
3157
2898
|
return [];
|
3158
2899
|
}
|
3159
2900
|
}));
|
2901
|
+
const buildNetworkStorageCoders = (chainId, miniMetadata, coders) => {
|
2902
|
+
if (!miniMetadata.data) return null;
|
2903
|
+
const metadata = scale.unifyMetadata(scale.decAnyMetadata(miniMetadata.data));
|
2904
|
+
try {
|
2905
|
+
const scaleBuilder = scale.getDynamicBuilder(scale.getLookupFn(metadata));
|
2906
|
+
const builtCoders = Object.fromEntries(Object.entries(coders).flatMap(([key, moduleMethodOrFn]) => {
|
2907
|
+
const [module, method] = typeof moduleMethodOrFn === "function" ? moduleMethodOrFn({
|
2908
|
+
chainId
|
2909
|
+
}) : moduleMethodOrFn;
|
2910
|
+
try {
|
2911
|
+
return [[key, scaleBuilder.buildStorage(module, method)]];
|
2912
|
+
} catch (cause) {
|
2913
|
+
log.trace(`Failed to build SCALE coder for chain ${chainId} (${module}::${method})`, cause);
|
2914
|
+
return [];
|
2915
|
+
}
|
2916
|
+
}));
|
2917
|
+
return builtCoders;
|
2918
|
+
} catch (cause) {
|
2919
|
+
log.error(`Failed to build SCALE coders for chain ${chainId} (${JSON.stringify(coders)})`, cause);
|
2920
|
+
}
|
2921
|
+
return null;
|
2922
|
+
};
|
3160
2923
|
|
3161
2924
|
/**
|
3162
2925
|
* Decodes & unwraps outputs and errors of a given result, contract, and method.
|
@@ -3357,15 +3120,9 @@ const SubAssetsModule = hydrate => {
|
|
3357
3120
|
util$1.assert(chainConnector, "This module requires a substrate chain connector");
|
3358
3121
|
return {
|
3359
3122
|
...DefaultBalanceModule(moduleType$4),
|
3123
|
+
// TODO make synchronous at the module definition level ?
|
3360
3124
|
async fetchSubstrateChainMeta(chainId, moduleConfig, metadataRpc) {
|
3361
|
-
|
3362
|
-
if (metadataRpc === undefined) return {
|
3363
|
-
isTestnet
|
3364
|
-
};
|
3365
|
-
if ((moduleConfig?.tokens ?? []).length < 1) return {
|
3366
|
-
isTestnet
|
3367
|
-
};
|
3368
|
-
const metadataVersion = scale.getMetadataVersion(metadataRpc);
|
3125
|
+
if (!metadataRpc) return {};
|
3369
3126
|
const metadata = scale.decAnyMetadata(metadataRpc);
|
3370
3127
|
scale.compactMetadata(metadata, [{
|
3371
3128
|
pallet: "Assets",
|
@@ -3373,9 +3130,7 @@ const SubAssetsModule = hydrate => {
|
|
3373
3130
|
}]);
|
3374
3131
|
const miniMetadata = scale.encodeMetadata(metadata);
|
3375
3132
|
return {
|
3376
|
-
|
3377
|
-
miniMetadata,
|
3378
|
-
metadataVersion
|
3133
|
+
miniMetadata
|
3379
3134
|
};
|
3380
3135
|
},
|
3381
3136
|
async fetchSubstrateChainTokens(chainId, chainMeta, moduleConfig) {
|
@@ -3440,44 +3195,32 @@ const SubAssetsModule = hydrate => {
|
|
3440
3195
|
return acc;
|
3441
3196
|
}, {});
|
3442
3197
|
const controller = new AbortController();
|
3443
|
-
|
3444
|
-
|
3445
|
-
|
3446
|
-
|
3447
|
-
|
3448
|
-
|
3449
|
-
|
3450
|
-
|
3451
|
-
|
3452
|
-
|
3453
|
-
|
3454
|
-
|
3455
|
-
|
3456
|
-
|
3198
|
+
const pUnsubs = Promise.all(lodash.toPairs(byNetwork).map(async ([networkId, addressesByToken]) => {
|
3199
|
+
try {
|
3200
|
+
const queries = await buildNetworkQueries$2(networkId, chainConnector, chaindataProvider$1, addressesByToken, controller.signal);
|
3201
|
+
if (controller.signal.aborted) return () => {};
|
3202
|
+
const stateHelper = new RpcStateQueryHelper(chainConnector, queries);
|
3203
|
+
return await stateHelper.subscribe((error, result) => {
|
3204
|
+
// console.log("SubstrateAssetsModule.callback", { error, result })
|
3205
|
+
if (error) return callback(error);
|
3206
|
+
const balances = result?.filter(b => b !== null) ?? [];
|
3207
|
+
if (balances.length > 0) callback(null, new Balances(balances));
|
3208
|
+
});
|
3209
|
+
} catch (err) {
|
3210
|
+
if (!controller.signal.aborted) log.error(`Failed to subscribe balances for network ${networkId}`, err);
|
3211
|
+
return () => {};
|
3212
|
+
}
|
3457
3213
|
}));
|
3458
|
-
|
3459
|
-
// const networkIds = uniq(uniq(keys(addressesByToken)).map((tokenId) => parseSubAssetTokenId(tokenId).networkId))
|
3460
|
-
// const
|
3461
|
-
|
3462
|
-
//console.log("SubstrateAssetsModule.subscribeBalances 1", { addressesByToken })
|
3463
|
-
// const queries = await buildQueries(chaindataProvider, addressesByToken)
|
3464
|
-
// //console.log("SubstrateAssetsModule.subscribeBalances 2", { queries, addressesByToken })
|
3465
|
-
// const unsubscribe = await new RpcStateQueryHelper(chainConnector, queries).subscribe(
|
3466
|
-
// (error, result) => {
|
3467
|
-
// // console.log("SubstrateAssetsModule.callback", { error, result })
|
3468
|
-
// if (error) return callback(error)
|
3469
|
-
// const balances = result?.filter((b): b is SubAssetsBalance => b !== null) ?? []
|
3470
|
-
// if (balances.length > 0) callback(null, new Balances(balances))
|
3471
|
-
// },
|
3472
|
-
// )
|
3473
|
-
|
3474
3214
|
return () => {
|
3475
3215
|
controller.abort();
|
3216
|
+
pUnsubs.then(unsubs => {
|
3217
|
+
unsubs.forEach(unsubscribe => unsubscribe());
|
3218
|
+
});
|
3476
3219
|
};
|
3477
3220
|
},
|
3478
3221
|
async fetchBalances(addressesByToken) {
|
3479
3222
|
util$1.assert(chainConnectors.substrate, "This module requires a substrate chain connector");
|
3480
|
-
const queries = await buildQueries$3(chaindataProvider$1, addressesByToken);
|
3223
|
+
const queries = await buildQueries$3(chainConnector, chaindataProvider$1, addressesByToken);
|
3481
3224
|
const result = await new RpcStateQueryHelper(chainConnectors.substrate, queries).fetch();
|
3482
3225
|
const balances = result?.filter(b => b !== null) ?? [];
|
3483
3226
|
return new Balances(balances);
|
@@ -3547,23 +3290,14 @@ const SubAssetsModule = hydrate => {
|
|
3547
3290
|
}
|
3548
3291
|
};
|
3549
3292
|
};
|
3550
|
-
async function buildNetworkQueries(networkId, chainConnector, chaindataProvider, addressesByToken) {
|
3551
|
-
const miniMetadata = await getMiniMetadata(chaindataProvider, chainConnector, networkId, moduleType$4);
|
3552
|
-
|
3293
|
+
async function buildNetworkQueries$2(networkId, chainConnector, chaindataProvider, addressesByToken, signal) {
|
3294
|
+
const miniMetadata = await getMiniMetadata(chaindataProvider, chainConnector, networkId, moduleType$4, signal);
|
3295
|
+
// console.log("Fetched miniMetadata for network", networkId, { miniMetadata })
|
3296
|
+
const chain = await chaindataProvider.chainById(networkId);
|
3553
3297
|
const tokensById = await chaindataProvider.tokensById();
|
3554
|
-
|
3555
|
-
const
|
3556
|
-
[
|
3557
|
-
} : {};
|
3558
|
-
const miniMetadatas = new Map([[miniMetadata.id, miniMetadata]]);
|
3559
|
-
const chainStorageCoders = buildStorageCoders({
|
3560
|
-
chainIds,
|
3561
|
-
chains,
|
3562
|
-
miniMetadatas,
|
3563
|
-
moduleType: moduleType$4,
|
3564
|
-
coders: {
|
3565
|
-
storage: ["Assets", "Account"]
|
3566
|
-
}
|
3298
|
+
signal?.throwIfAborted();
|
3299
|
+
const networkStorageCoders = buildNetworkStorageCoders(networkId, miniMetadata, {
|
3300
|
+
storage: ["Assets", "Account"]
|
3567
3301
|
});
|
3568
3302
|
return Object.entries(addressesByToken).flatMap(([tokenId, addresses]) => {
|
3569
3303
|
const token = tokensById[tokenId];
|
@@ -3575,19 +3309,14 @@ async function buildNetworkQueries(networkId, chainConnector, chaindataProvider,
|
|
3575
3309
|
log.debug(`This module doesn't handle tokens of type ${token.type}`);
|
3576
3310
|
return [];
|
3577
3311
|
}
|
3578
|
-
|
3579
|
-
if (!networkId) {
|
3580
|
-
log.warn(`Token ${tokenId} has no chain`);
|
3581
|
-
return [];
|
3582
|
-
}
|
3583
|
-
const chain = chains[networkId];
|
3312
|
+
//
|
3584
3313
|
if (!chain) {
|
3585
3314
|
log.warn(`Chain ${networkId} for token ${tokenId} not found`);
|
3586
3315
|
return [];
|
3587
3316
|
}
|
3588
3317
|
return addresses.flatMap(address => {
|
3589
|
-
const scaleCoder =
|
3590
|
-
const stateKey = tryEncode(scaleCoder, BigInt(token.assetId), address) ?? tryEncode(scaleCoder, token.assetId, address);
|
3318
|
+
const scaleCoder = networkStorageCoders?.storage;
|
3319
|
+
const stateKey = tryEncode(scaleCoder, BigInt(token.assetId), address) ?? tryEncode(scaleCoder, Number(token.assetId), address);
|
3591
3320
|
if (!stateKey) {
|
3592
3321
|
log.warn(`Invalid assetId / address in ${networkId} storage query ${token.assetId} / ${address}`);
|
3593
3322
|
return [];
|
@@ -3642,102 +3371,16 @@ async function buildNetworkQueries(networkId, chainConnector, chaindataProvider,
|
|
3642
3371
|
});
|
3643
3372
|
});
|
3644
3373
|
}
|
3645
|
-
async function buildQueries$3(chaindataProvider, addressesByToken) {
|
3646
|
-
const
|
3647
|
-
|
3648
|
-
|
3649
|
-
|
3650
|
-
|
3651
|
-
|
3652
|
-
|
3653
|
-
|
3654
|
-
|
3655
|
-
const chains = Object.fromEntries(uniqueChainIds.map(chainId => [chainId, allChains[chainId]]));
|
3656
|
-
const chainStorageCoders = buildStorageCoders({
|
3657
|
-
chainIds: uniqueChainIds,
|
3658
|
-
chains,
|
3659
|
-
miniMetadatas,
|
3660
|
-
moduleType: "substrate-assets",
|
3661
|
-
coders: {
|
3662
|
-
storage: ["Assets", "Account"]
|
3663
|
-
}
|
3664
|
-
});
|
3665
|
-
return Object.entries(addressesByToken).flatMap(([tokenId, addresses]) => {
|
3666
|
-
const token = tokens[tokenId];
|
3667
|
-
if (!token) {
|
3668
|
-
log.warn(`Token ${tokenId} not found`);
|
3669
|
-
return [];
|
3670
|
-
}
|
3671
|
-
if (token.type !== "substrate-assets") {
|
3672
|
-
log.debug(`This module doesn't handle tokens of type ${token.type}`);
|
3673
|
-
return [];
|
3674
|
-
}
|
3675
|
-
const networkId = token.networkId;
|
3676
|
-
if (!networkId) {
|
3677
|
-
log.warn(`Token ${tokenId} has no chain`);
|
3678
|
-
return [];
|
3679
|
-
}
|
3680
|
-
const chain = chains[networkId];
|
3681
|
-
if (!chain) {
|
3682
|
-
log.warn(`Chain ${networkId} for token ${tokenId} not found`);
|
3683
|
-
return [];
|
3684
|
-
}
|
3685
|
-
return addresses.flatMap(address => {
|
3686
|
-
const scaleCoder = chainStorageCoders.get(networkId)?.storage;
|
3687
|
-
const stateKey = tryEncode(scaleCoder, BigInt(token.assetId), address) ?? tryEncode(scaleCoder, token.assetId, address);
|
3688
|
-
if (!stateKey) {
|
3689
|
-
log.warn(`Invalid assetId / address in ${networkId} storage query ${token.assetId} / ${address}`);
|
3690
|
-
return [];
|
3691
|
-
}
|
3692
|
-
const decodeResult = change => {
|
3693
|
-
/** NOTE: This type is only a hint for typescript, the chain can actually return whatever it wants to */
|
3694
|
-
|
3695
|
-
const decoded = scale.decodeScale(scaleCoder, change, `Failed to decode substrate-assets balance on chain ${networkId}`) ?? {
|
3696
|
-
balance: 0n,
|
3697
|
-
status: {
|
3698
|
-
type: "Liquid"
|
3699
|
-
}};
|
3700
|
-
const isFrozen = decoded?.status?.type === "Frozen";
|
3701
|
-
const amount = (decoded?.balance ?? 0n).toString();
|
3702
|
-
|
3703
|
-
// due to the following balance calculations, which are made in the `Balance` type:
|
3704
|
-
//
|
3705
|
-
// total balance = (free balance) + (reserved balance)
|
3706
|
-
// transferable balance = (free balance) - (frozen balance)
|
3707
|
-
//
|
3708
|
-
// when `isFrozen` is true we need to set **both** the `free` and `frozen` amounts
|
3709
|
-
// of this balance to the value we received from the RPC.
|
3710
|
-
//
|
3711
|
-
// if we only set the `frozen` amount, then the `total` calculation will be incorrect!
|
3712
|
-
const free = amount;
|
3713
|
-
const frozen = token.isFrozen || isFrozen ? amount : "0";
|
3714
|
-
|
3715
|
-
// include balance values even if zero, so that newly-zero values overwrite old values
|
3716
|
-
const balanceValues = [{
|
3717
|
-
type: "free",
|
3718
|
-
label: "free",
|
3719
|
-
amount: free.toString()
|
3720
|
-
}, {
|
3721
|
-
type: "locked",
|
3722
|
-
label: "frozen",
|
3723
|
-
amount: frozen.toString()
|
3724
|
-
}];
|
3725
|
-
return {
|
3726
|
-
source: "substrate-assets",
|
3727
|
-
status: "live",
|
3728
|
-
address,
|
3729
|
-
networkId,
|
3730
|
-
tokenId: token.id,
|
3731
|
-
values: balanceValues
|
3732
|
-
};
|
3733
|
-
};
|
3734
|
-
return {
|
3735
|
-
chainId: networkId,
|
3736
|
-
stateKey,
|
3737
|
-
decodeResult
|
3738
|
-
};
|
3739
|
-
});
|
3740
|
-
});
|
3374
|
+
async function buildQueries$3(chainConnector, chaindataProvider$1, addressesByToken, signal) {
|
3375
|
+
const byNetwork = lodash.keys(addressesByToken).reduce((acc, tokenId) => {
|
3376
|
+
const networkId = chaindataProvider.parseSubAssetTokenId(tokenId).networkId;
|
3377
|
+
if (!acc[networkId]) acc[networkId] = {};
|
3378
|
+
acc[networkId][tokenId] = addressesByToken[tokenId];
|
3379
|
+
return acc;
|
3380
|
+
}, {});
|
3381
|
+
return (await Promise.all(lodash.toPairs(byNetwork).map(([networkId, addressesByToken]) => {
|
3382
|
+
return buildNetworkQueries$2(networkId, chainConnector, chaindataProvider$1, addressesByToken, signal);
|
3383
|
+
}))).flat();
|
3741
3384
|
}
|
3742
3385
|
// NOTE: Different chains need different formats for assetId when encoding the stateKey
|
3743
3386
|
// E.g. Polkadot Asset Hub needs it to be a string, Astar needs it to be a bigint
|
@@ -3762,10 +3405,7 @@ const SubForeignAssetsModule = hydrate => {
|
|
3762
3405
|
return {
|
3763
3406
|
...DefaultBalanceModule(moduleType$3),
|
3764
3407
|
async fetchSubstrateChainMeta(chainId, moduleConfig, metadataRpc) {
|
3765
|
-
// const isTestnet = (await chaindataProvider.chainById(chainId))?.isTestnet || false
|
3766
3408
|
if (metadataRpc === undefined) return {};
|
3767
|
-
// if ((moduleConfig?.tokens ?? []).length < 1) return { isTestnet }
|
3768
|
-
|
3769
3409
|
const metadataVersion = scale.getMetadataVersion(metadataRpc);
|
3770
3410
|
if (metadataVersion < 14) return {};
|
3771
3411
|
const metadata = scale.decAnyMetadata(metadataRpc);
|
@@ -3784,9 +3424,6 @@ const SubForeignAssetsModule = hydrate => {
|
|
3784
3424
|
miniMetadata
|
3785
3425
|
} = chainMeta;
|
3786
3426
|
if (!miniMetadata) return {};
|
3787
|
-
// if (miniMetadata === undefined || metadataVersion === undefined) return {}
|
3788
|
-
// if (metadataVersion < 14) return {}
|
3789
|
-
|
3790
3427
|
const metadata = scale.decAnyMetadata(miniMetadata);
|
3791
3428
|
const unifiedMetadata = scale.unifyMetadata(metadata);
|
3792
3429
|
const scaleBuilder = scale.getDynamicBuilder(scale.getLookupFn(unifiedMetadata));
|
@@ -3840,17 +3477,38 @@ const SubForeignAssetsModule = hydrate => {
|
|
3840
3477
|
async subscribeBalances({
|
3841
3478
|
addressesByToken
|
3842
3479
|
}, callback) {
|
3843
|
-
const
|
3844
|
-
|
3845
|
-
if (
|
3846
|
-
|
3847
|
-
|
3848
|
-
});
|
3849
|
-
|
3480
|
+
const byNetwork = lodash.keys(addressesByToken).reduce((acc, tokenId) => {
|
3481
|
+
const networkId = chaindataProvider.parseSubForeignAssetTokenId(tokenId).networkId;
|
3482
|
+
if (!acc[networkId]) acc[networkId] = {};
|
3483
|
+
acc[networkId][tokenId] = addressesByToken[tokenId];
|
3484
|
+
return acc;
|
3485
|
+
}, {});
|
3486
|
+
const controller = new AbortController();
|
3487
|
+
const pUnsubs = Promise.all(lodash.toPairs(byNetwork).map(async ([networkId, addressesByToken]) => {
|
3488
|
+
try {
|
3489
|
+
const queries = await buildNetworkQueries$1(networkId, chainConnector, chaindataProvider$1, addressesByToken, controller.signal);
|
3490
|
+
if (controller.signal.aborted) return () => {};
|
3491
|
+
const stateHelper = new RpcStateQueryHelper(chainConnector, queries);
|
3492
|
+
return await stateHelper.subscribe((error, result) => {
|
3493
|
+
if (error) return callback(error);
|
3494
|
+
const balances = result?.filter(b => b !== null) ?? [];
|
3495
|
+
if (balances.length > 0) callback(null, new Balances(balances));
|
3496
|
+
});
|
3497
|
+
} catch (err) {
|
3498
|
+
if (!controller.signal.aborted) log.error(`Failed to subscribe ${moduleType$3} balances for network ${networkId}`, err);
|
3499
|
+
return () => {};
|
3500
|
+
}
|
3501
|
+
}));
|
3502
|
+
return () => {
|
3503
|
+
controller.abort();
|
3504
|
+
pUnsubs.then(unsubs => {
|
3505
|
+
unsubs.forEach(unsubscribe => unsubscribe());
|
3506
|
+
});
|
3507
|
+
};
|
3850
3508
|
},
|
3851
3509
|
async fetchBalances(addressesByToken) {
|
3852
3510
|
util$1.assert(chainConnectors.substrate, "This module requires a substrate chain connector");
|
3853
|
-
const queries = await buildQueries$2(chaindataProvider$1, addressesByToken);
|
3511
|
+
const queries = await buildQueries$2(chainConnector, chaindataProvider$1, addressesByToken);
|
3854
3512
|
const result = await new RpcStateQueryHelper(chainConnectors.substrate, queries).fetch();
|
3855
3513
|
const balances = result?.filter(b => b !== null) ?? [];
|
3856
3514
|
return new Balances(balances);
|
@@ -3906,23 +3564,16 @@ const SubForeignAssetsModule = hydrate => {
|
|
3906
3564
|
}
|
3907
3565
|
};
|
3908
3566
|
};
|
3909
|
-
async function
|
3910
|
-
const
|
3911
|
-
const
|
3912
|
-
const
|
3913
|
-
|
3914
|
-
const
|
3915
|
-
|
3916
|
-
chainIds: uniqueChainIds,
|
3917
|
-
chains,
|
3918
|
-
miniMetadatas,
|
3919
|
-
moduleType: "substrate-foreignassets",
|
3920
|
-
coders: {
|
3921
|
-
storage: ["ForeignAssets", "Account"]
|
3922
|
-
}
|
3567
|
+
async function buildNetworkQueries$1(networkId, chainConnector, chaindataProvider, addressesByToken, signal) {
|
3568
|
+
const miniMetadata = await getMiniMetadata(chaindataProvider, chainConnector, networkId, moduleType$3, signal);
|
3569
|
+
const chain = await chaindataProvider.chainById(networkId);
|
3570
|
+
const tokensById = await chaindataProvider.tokensById();
|
3571
|
+
signal?.throwIfAborted();
|
3572
|
+
const networkStorageCoders = buildNetworkStorageCoders(networkId, miniMetadata, {
|
3573
|
+
storage: ["ForeignAssets", "Account"]
|
3923
3574
|
});
|
3924
3575
|
return Object.entries(addressesByToken).flatMap(([tokenId, addresses]) => {
|
3925
|
-
const token =
|
3576
|
+
const token = tokensById[tokenId];
|
3926
3577
|
if (!token) {
|
3927
3578
|
log.warn(`Token ${tokenId} not found`);
|
3928
3579
|
return [];
|
@@ -3931,18 +3582,12 @@ async function buildQueries$2(chaindataProvider, addressesByToken) {
|
|
3931
3582
|
log.debug(`This module doesn't handle tokens of type ${token.type}`);
|
3932
3583
|
return [];
|
3933
3584
|
}
|
3934
|
-
const networkId = token.networkId;
|
3935
|
-
if (!networkId) {
|
3936
|
-
log.warn(`Token ${tokenId} has no chain`);
|
3937
|
-
return [];
|
3938
|
-
}
|
3939
|
-
const chain = chains[networkId];
|
3940
3585
|
if (!chain) {
|
3941
3586
|
log.warn(`Chain ${networkId} for token ${tokenId} not found`);
|
3942
3587
|
return [];
|
3943
3588
|
}
|
3944
3589
|
return addresses.flatMap(address => {
|
3945
|
-
const scaleCoder =
|
3590
|
+
const scaleCoder = networkStorageCoders?.storage;
|
3946
3591
|
const onChainId = (() => {
|
3947
3592
|
try {
|
3948
3593
|
return scale.papiParse(token.onChainId);
|
@@ -4002,6 +3647,17 @@ async function buildQueries$2(chaindataProvider, addressesByToken) {
|
|
4002
3647
|
});
|
4003
3648
|
});
|
4004
3649
|
}
|
3650
|
+
async function buildQueries$2(chainConnector, chaindataProvider$1, addressesByToken, signal) {
|
3651
|
+
const byNetwork = lodash.keys(addressesByToken).reduce((acc, tokenId) => {
|
3652
|
+
const networkId = chaindataProvider.parseSubForeignAssetTokenId(tokenId).networkId;
|
3653
|
+
if (!acc[networkId]) acc[networkId] = {};
|
3654
|
+
acc[networkId][tokenId] = addressesByToken[tokenId];
|
3655
|
+
return acc;
|
3656
|
+
}, {});
|
3657
|
+
return (await Promise.all(lodash.toPairs(byNetwork).map(([networkId, addressesByToken]) => {
|
3658
|
+
return buildNetworkQueries$1(networkId, chainConnector, chaindataProvider$1, addressesByToken, signal);
|
3659
|
+
}))).flat();
|
3660
|
+
}
|
4005
3661
|
|
4006
3662
|
async function subscribeBase(queries, chainConnector, callback) {
|
4007
3663
|
const unsubscribe = await new RpcStateQueryHelper(chainConnector, queries).subscribe((error, result) => {
|
@@ -5413,8 +5069,23 @@ class SubNativeBalanceError extends Error {
|
|
5413
5069
|
}
|
5414
5070
|
}
|
5415
5071
|
|
5416
|
-
const
|
5417
|
-
|
5072
|
+
const DotNetworkPropertiesSimple = z__default.default.object({
|
5073
|
+
tokenDecimals: z__default.default.number().optional().default(0),
|
5074
|
+
tokenSymbol: z__default.default.string().optional().default("Unit")
|
5075
|
+
});
|
5076
|
+
const DotNetworkPropertiesArray = z__default.default.object({
|
5077
|
+
tokenDecimals: z__default.default.array(z__default.default.number()).nonempty(),
|
5078
|
+
tokenSymbol: z__default.default.array(z__default.default.string()).nonempty()
|
5079
|
+
});
|
5080
|
+
const DotNetworkProperties = z__default.default.union([DotNetworkPropertiesSimple, DotNetworkPropertiesArray]).transform(val => ({
|
5081
|
+
tokenDecimals: Array.isArray(val.tokenDecimals) ? val.tokenDecimals[0] : val.tokenDecimals,
|
5082
|
+
tokenSymbol: Array.isArray(val.tokenSymbol) ? val.tokenSymbol[0] : val.tokenSymbol
|
5083
|
+
}));
|
5084
|
+
const getChainProperties = async (chainConnector, networkId) => {
|
5085
|
+
const properties = await chainConnector.send(networkId, "system_properties", [], true);
|
5086
|
+
return DotNetworkProperties.parse(properties);
|
5087
|
+
};
|
5088
|
+
|
5418
5089
|
const POLLING_WINDOW_SIZE = 20;
|
5419
5090
|
const MAX_SUBSCRIPTION_SIZE = 40;
|
5420
5091
|
const SubNativeModule = hydrate => {
|
@@ -5428,29 +5099,187 @@ const SubNativeModule = hydrate => {
|
|
5428
5099
|
const getModuleTokens = async () => {
|
5429
5100
|
return await chaindataProvider$1.tokensByIdForType(moduleType$2);
|
5430
5101
|
};
|
5431
|
-
return {
|
5432
|
-
...DefaultBalanceModule(moduleType$2),
|
5433
|
-
async fetchSubstrateChainMeta(chainId, moduleConfig, metadataRpc, systemProperties) {
|
5434
|
-
const isTestnet = (await chaindataProvider$1.chainById(chainId))?.isTestnet || false;
|
5435
|
-
if (moduleConfig?.disable === true || metadataRpc === undefined) return {
|
5436
|
-
isTestnet
|
5437
|
-
};
|
5438
5102
|
|
5439
|
-
|
5440
|
-
|
5441
|
-
|
5103
|
+
// subscribeBalances was split by network to prevent all subs to wait for all minimetadatas to be ready.
|
5104
|
+
// however the multichain logic in there is so deep in the function below that i had to keep it as-is, and call it by per-network chunks
|
5105
|
+
// TODO refactor this be actually network specific
|
5106
|
+
const subscribeChainBalances = async (chainId, opts, callback) => {
|
5107
|
+
const {
|
5108
|
+
addressesByToken,
|
5109
|
+
initialBalances
|
5110
|
+
} = opts;
|
5111
|
+
// full record of balances for this module
|
5112
|
+
const subNativeBalances = new rxjs.BehaviorSubject(Object.fromEntries(initialBalances?.map(b => [getBalanceId(b), b]) ?? []));
|
5113
|
+
// tokens which have a known positive balance
|
5114
|
+
const positiveBalanceTokens = subNativeBalances.pipe(rxjs.map(balances => Array.from(new Set(Object.values(balances).map(b => b.tokenId)))), rxjs.share());
|
5115
|
+
|
5116
|
+
// tokens that will be subscribed to, simply a slice of the positive balance tokens of size MAX_SUBSCRIPTION_SIZE
|
5117
|
+
const subscriptionTokens = positiveBalanceTokens.pipe(rxjs.map(tokens => tokens.sort(sortChains).slice(0, MAX_SUBSCRIPTION_SIZE)));
|
5118
|
+
|
5119
|
+
// an initialised balance is one where we have received a response for any type of 'subsource',
|
5120
|
+
// until then they are initialising. We only need to maintain one map of tokens to addresses for this
|
5121
|
+
const initialisingBalances = Object.entries(addressesByToken).reduce((acc, [tokenId, addresses]) => {
|
5122
|
+
acc.set(tokenId, new Set(addresses));
|
5123
|
+
return acc;
|
5124
|
+
}, new Map());
|
5125
|
+
|
5126
|
+
// after thirty seconds, we need to kill the initialising balances
|
5127
|
+
const initBalancesTimeout = setTimeout(() => {
|
5128
|
+
initialisingBalances.clear();
|
5129
|
+
// manually call the callback to ensure the caller gets the correct status
|
5130
|
+
callback(null, {
|
5131
|
+
status: "live",
|
5132
|
+
data: Object.values(subNativeBalances.getValue())
|
5133
|
+
});
|
5134
|
+
}, 30_000);
|
5135
|
+
const _callbackSub = subNativeBalances.pipe(rxjs.debounceTime(100)).subscribe({
|
5136
|
+
next: balances => {
|
5137
|
+
callback(null, {
|
5138
|
+
status: initialisingBalances.size > 0 ? "initialising" : "live",
|
5139
|
+
data: Object.values(balances)
|
5140
|
+
});
|
5141
|
+
},
|
5142
|
+
error: error => callback(error),
|
5143
|
+
complete: () => {
|
5144
|
+
initialisingBalances.clear();
|
5145
|
+
clearTimeout(initBalancesTimeout);
|
5146
|
+
}
|
5147
|
+
});
|
5148
|
+
const unsubDeferred = util.Deferred();
|
5149
|
+
// we return this to the caller so that they can let us know when they're no longer interested in this subscription
|
5150
|
+
const callerUnsubscribe = () => {
|
5151
|
+
subNativeBalances.complete();
|
5152
|
+
_callbackSub.unsubscribe();
|
5153
|
+
return unsubDeferred.reject(new Error(`Caller unsubscribed`));
|
5154
|
+
};
|
5155
|
+
// we queue up our work to clean up our subscription when this promise rejects
|
5156
|
+
const callerUnsubscribed = unsubDeferred.promise;
|
5157
|
+
|
5158
|
+
// The update handler is to allow us to merge balances with the same id, and manage initialising and positive balances state for each
|
5159
|
+
// balance type and network
|
5160
|
+
const handleUpdateForSource = source => (error, result) => {
|
5161
|
+
if (result) {
|
5162
|
+
const currentBalances = subNativeBalances.getValue();
|
5163
|
+
|
5164
|
+
// first merge any balances with the same id within the result
|
5165
|
+
const accumulatedUpdates = result.filter(b => b.values.length > 0).reduce((acc, b) => {
|
5166
|
+
const bId = getBalanceId(b);
|
5167
|
+
acc[bId] = mergeBalances(acc[bId], b, source, false);
|
5168
|
+
return acc;
|
5169
|
+
}, {});
|
5170
|
+
|
5171
|
+
// then merge these with the current balances
|
5172
|
+
const mergedBalances = {};
|
5173
|
+
Object.entries(accumulatedUpdates).forEach(([bId, b]) => {
|
5174
|
+
// merge the values from the new balance into the existing balance, if there is one
|
5175
|
+
mergedBalances[bId] = mergeBalances(currentBalances[bId], b, source, true);
|
5176
|
+
|
5177
|
+
// update initialisingBalances to remove balances which have been updated
|
5178
|
+
const intialisingForToken = initialisingBalances.get(b.tokenId);
|
5179
|
+
if (intialisingForToken) {
|
5180
|
+
intialisingForToken.delete(b.address);
|
5181
|
+
if (intialisingForToken.size === 0) initialisingBalances.delete(b.tokenId);else initialisingBalances.set(b.tokenId, intialisingForToken);
|
5182
|
+
}
|
5183
|
+
});
|
5184
|
+
subNativeBalances.next({
|
5185
|
+
...currentBalances,
|
5186
|
+
...mergedBalances
|
5187
|
+
});
|
5188
|
+
}
|
5189
|
+
if (error) {
|
5190
|
+
if (error instanceof SubNativeBalanceError) {
|
5191
|
+
// this type of error doesn't need to be handled by the caller
|
5192
|
+
initialisingBalances.delete(error.tokenId);
|
5193
|
+
} else return callback(error);
|
5194
|
+
}
|
5195
|
+
};
|
5442
5196
|
|
5443
|
-
|
5444
|
-
|
5445
|
-
|
5446
|
-
|
5447
|
-
|
5448
|
-
|
5197
|
+
// subscribe to addresses and tokens for which we have a known positive balance
|
5198
|
+
const positiveSub = subscriptionTokens.pipe(rxjs.debounceTime(1000), rxjs.takeUntil(callerUnsubscribed), rxjs.map(tokenIds => tokenIds.reduce((acc, tokenId) => {
|
5199
|
+
acc[tokenId] = addressesByToken[tokenId];
|
5200
|
+
return acc;
|
5201
|
+
}, {})), rxjs.distinctUntilChanged(isEqual__default.default), rxjs.switchMap(newAddressesByToken => {
|
5202
|
+
return rxjs.from(queryCache.getQueries(newAddressesByToken)).pipe(rxjs.switchMap(baseQueries => {
|
5203
|
+
return new rxjs.Observable(subscriber => {
|
5204
|
+
if (!chainConnectors.substrate) return;
|
5205
|
+
const unsubSubtensorStaking = subscribeSubtensorStaking(chaindataProvider$1, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("subtensor-staking"));
|
5206
|
+
const unsubNompoolStaking = subscribeNompoolStaking(chaindataProvider$1, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("nompools-staking"));
|
5207
|
+
const unsubCrowdloans = subscribeCrowdloans(chaindataProvider$1, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("crowdloan"));
|
5208
|
+
const unsubBase = subscribeBase(baseQueries, chainConnectors.substrate, handleUpdateForSource("base"));
|
5209
|
+
subscriber.add(async () => (await unsubSubtensorStaking)());
|
5210
|
+
subscriber.add(async () => (await unsubNompoolStaking)());
|
5211
|
+
subscriber.add(async () => (await unsubCrowdloans)());
|
5212
|
+
subscriber.add(async () => (await unsubBase)());
|
5213
|
+
});
|
5214
|
+
}));
|
5215
|
+
})).subscribe();
|
5216
|
+
|
5217
|
+
// for chains where we don't have a known positive balance, poll rather than subscribe
|
5218
|
+
const poll = async (addressesByToken = {}) => {
|
5219
|
+
const handleUpdate = handleUpdateForSource("base");
|
5220
|
+
try {
|
5221
|
+
const balances = await fetchBalances(addressesByToken);
|
5222
|
+
handleUpdate(null, Object.values(balances.toJSON()));
|
5223
|
+
} catch (error) {
|
5224
|
+
if (error instanceof chainConnector.ChainConnectionError) {
|
5225
|
+
// coerce ChainConnection errors into SubNativeBalance errors
|
5226
|
+
const errorChainId = error.chainId;
|
5227
|
+
Object.entries(await getModuleTokens()).filter(([, token]) => token.networkId === errorChainId).forEach(([tokenId]) => {
|
5228
|
+
const wrappedError = new SubNativeBalanceError(tokenId, error.message);
|
5229
|
+
handleUpdate(wrappedError);
|
5230
|
+
});
|
5231
|
+
} else {
|
5232
|
+
log.error("unknown substrate native balance error", error);
|
5233
|
+
handleUpdate(error);
|
5234
|
+
}
|
5235
|
+
}
|
5236
|
+
};
|
5237
|
+
// do one poll to get things started
|
5238
|
+
const currentBalances = subNativeBalances.getValue();
|
5239
|
+
const currentTokens = new Set(Object.values(currentBalances).map(b => b.tokenId));
|
5240
|
+
const nonCurrentTokens = Object.keys(addressesByToken).filter(tokenId => !currentTokens.has(tokenId)).sort(sortChains);
|
5241
|
+
|
5242
|
+
// break nonCurrentTokens into chunks of POLLING_WINDOW_SIZE
|
5243
|
+
await PromisePool__default.default.withConcurrency(POLLING_WINDOW_SIZE).for(nonCurrentTokens).process(async nonCurrentTokenId => await poll({
|
5244
|
+
[nonCurrentTokenId]: addressesByToken[nonCurrentTokenId]
|
5245
|
+
}));
|
5246
|
+
|
5247
|
+
// now poll every 30s on chains which are not subscriptionTokens
|
5248
|
+
// we chunk this observable into batches of positive token ids, to prevent eating all the websocket connections
|
5249
|
+
const pollingSub = rxjs.interval(30_000) // emit values every 30 seconds
|
5250
|
+
.pipe(rxjs.takeUntil(callerUnsubscribed), rxjs.withLatestFrom(subscriptionTokens),
|
5251
|
+
// Combine latest value from subscriptionTokens with each interval tick
|
5252
|
+
rxjs.map(([, subscribedTokenIds]) =>
|
5253
|
+
// Filter out tokens that are not subscribed
|
5254
|
+
Object.keys(addressesByToken).filter(tokenId => !subscribedTokenIds.includes(tokenId))), rxjs.exhaustMap(tokenIds => rxjs.from(util$1.arrayChunk(tokenIds, POLLING_WINDOW_SIZE)).pipe(rxjs.concatMap(async tokenChunk => {
|
5255
|
+
// tokenChunk is a chunk of tokenIds with size POLLING_WINDOW_SIZE
|
5256
|
+
const pollingTokenAddresses = Object.fromEntries(tokenChunk.map(tokenId => [tokenId, addressesByToken[tokenId]]));
|
5257
|
+
await poll(pollingTokenAddresses);
|
5258
|
+
return true;
|
5259
|
+
})))).subscribe();
|
5260
|
+
return () => {
|
5261
|
+
callerUnsubscribe();
|
5262
|
+
positiveSub.unsubscribe();
|
5263
|
+
pollingSub.unsubscribe();
|
5264
|
+
};
|
5265
|
+
};
|
5266
|
+
const fetchBalances = async addressesByToken => {
|
5267
|
+
util$1.assert(chainConnectors.substrate, "This module requires a substrate chain connector");
|
5268
|
+
const queries = await queryCache.getQueries(addressesByToken);
|
5269
|
+
util$1.assert(chainConnectors.substrate, "This module requires a substrate chain connector");
|
5270
|
+
const result = await new RpcStateQueryHelper(chainConnectors.substrate, queries).fetch();
|
5271
|
+
return new Balances(result ?? []);
|
5272
|
+
};
|
5273
|
+
return {
|
5274
|
+
...DefaultBalanceModule(moduleType$2),
|
5275
|
+
async fetchSubstrateChainMeta(chainId, moduleConfig, metadataRpc) {
|
5276
|
+
if (!metadataRpc) return {};
|
5449
5277
|
|
5450
5278
|
//
|
5451
5279
|
// process metadata into SCALE encoders/decoders
|
5452
5280
|
//
|
5453
5281
|
const metadataVersion = scale.getMetadataVersion(metadataRpc);
|
5282
|
+
if (metadataVersion < 14) return {};
|
5454
5283
|
const metadata = scale.decAnyMetadata(metadataRpc);
|
5455
5284
|
const unifiedMetadata = scale.unifyMetadata(metadata);
|
5456
5285
|
|
@@ -5517,16 +5346,15 @@ const SubNativeModule = hydrate => {
|
|
5517
5346
|
}) => name === "Freezes"));
|
5518
5347
|
const useLegacyTransferableCalculation = !hasFreezesItem;
|
5519
5348
|
const chainMeta = {
|
5520
|
-
isTestnet,
|
5349
|
+
// isTestnet,
|
5521
5350
|
useLegacyTransferableCalculation,
|
5522
|
-
symbol,
|
5523
|
-
decimals,
|
5351
|
+
// symbol,
|
5352
|
+
// decimals,
|
5524
5353
|
existentialDeposit,
|
5525
5354
|
nominationPoolsPalletId,
|
5526
5355
|
crowdloanPalletId,
|
5527
5356
|
hasSubtensorPallet,
|
5528
|
-
miniMetadata
|
5529
|
-
metadataVersion
|
5357
|
+
miniMetadata
|
5530
5358
|
};
|
5531
5359
|
if (!useLegacyTransferableCalculation) delete chainMeta.useLegacyTransferableCalculation;
|
5532
5360
|
if (!hasSubtensorPallet) delete chainMeta.hasSubtensorPallet;
|
@@ -5535,9 +5363,10 @@ const SubNativeModule = hydrate => {
|
|
5535
5363
|
async fetchSubstrateChainTokens(chainId, chainMeta, moduleConfig) {
|
5536
5364
|
if (moduleConfig?.disable === true) return {};
|
5537
5365
|
const {
|
5538
|
-
|
5539
|
-
|
5540
|
-
|
5366
|
+
tokenSymbol: symbol,
|
5367
|
+
tokenDecimals: decimals
|
5368
|
+
} = await getChainProperties(chainConnector$1, chainId);
|
5369
|
+
const {
|
5541
5370
|
existentialDeposit
|
5542
5371
|
} = chainMeta;
|
5543
5372
|
const id = chaindataProvider.subNativeTokenId(chainId);
|
@@ -5545,11 +5374,10 @@ const SubNativeModule = hydrate => {
|
|
5545
5374
|
id,
|
5546
5375
|
type: "substrate-native",
|
5547
5376
|
platform: "polkadot",
|
5548
|
-
isTestnet,
|
5549
5377
|
isDefault: moduleConfig?.isDefault ?? true,
|
5550
|
-
symbol: symbol
|
5551
|
-
name: moduleConfig?.name ?? symbol
|
5552
|
-
decimals: decimals
|
5378
|
+
symbol: symbol,
|
5379
|
+
name: moduleConfig?.name ?? symbol,
|
5380
|
+
decimals: decimals,
|
5553
5381
|
logo: moduleConfig?.logo,
|
5554
5382
|
existentialDeposit: existentialDeposit ?? "0",
|
5555
5383
|
networkId: chainId
|
@@ -5566,169 +5394,43 @@ const SubNativeModule = hydrate => {
|
|
5566
5394
|
initialBalances
|
5567
5395
|
}, callback) {
|
5568
5396
|
util$1.assert(chainConnectors.substrate, "This module requires a substrate chain connector");
|
5569
|
-
|
5570
|
-
|
5571
|
-
|
5572
|
-
|
5573
|
-
const positiveBalanceTokens = subNativeBalances.pipe(rxjs.map(balances => Array.from(new Set(Object.values(balances).map(b => b.tokenId)))), rxjs.share());
|
5574
|
-
|
5575
|
-
// tokens that will be subscribed to, simply a slice of the positive balance tokens of size MAX_SUBSCRIPTION_SIZE
|
5576
|
-
const subscriptionTokens = positiveBalanceTokens.pipe(rxjs.map(tokens => tokens.sort(sortChains).slice(0, MAX_SUBSCRIPTION_SIZE)));
|
5577
|
-
|
5578
|
-
// an initialised balance is one where we have received a response for any type of 'subsource',
|
5579
|
-
// until then they are initialising. We only need to maintain one map of tokens to addresses for this
|
5580
|
-
const initialisingBalances = Object.entries(addressesByToken).reduce((acc, [tokenId, addresses]) => {
|
5581
|
-
acc.set(tokenId, new Set(addresses));
|
5397
|
+
const addressesByTokenByNetwork = lodash.keys(addressesByToken).reduce((acc, tokenId) => {
|
5398
|
+
const networkId = chaindataProvider.parseSubNativeTokenId(tokenId).networkId;
|
5399
|
+
if (!acc[networkId]) acc[networkId] = {};
|
5400
|
+
acc[networkId][tokenId] = addressesByToken[tokenId];
|
5582
5401
|
return acc;
|
5583
|
-
},
|
5584
|
-
|
5585
|
-
|
5586
|
-
|
5587
|
-
|
5588
|
-
|
5589
|
-
|
5590
|
-
|
5591
|
-
|
5592
|
-
|
5593
|
-
}, 30_000);
|
5594
|
-
const _callbackSub = subNativeBalances.pipe(rxjs.debounceTime(100)).subscribe({
|
5595
|
-
next: balances => {
|
5596
|
-
callback(null, {
|
5597
|
-
status: initialisingBalances.size > 0 ? "initialising" : "live",
|
5598
|
-
data: Object.values(balances)
|
5599
|
-
});
|
5600
|
-
},
|
5601
|
-
error: error => callback(error),
|
5602
|
-
complete: () => {
|
5603
|
-
initialisingBalances.clear();
|
5604
|
-
clearTimeout(initBalancesTimeout);
|
5605
|
-
}
|
5606
|
-
});
|
5607
|
-
const unsubDeferred = util.Deferred();
|
5608
|
-
// we return this to the caller so that they can let us know when they're no longer interested in this subscription
|
5609
|
-
const callerUnsubscribe = () => {
|
5610
|
-
subNativeBalances.complete();
|
5611
|
-
_callbackSub.unsubscribe();
|
5612
|
-
return unsubDeferred.reject(new Error(`Caller unsubscribed`));
|
5613
|
-
};
|
5614
|
-
// we queue up our work to clean up our subscription when this promise rejects
|
5615
|
-
const callerUnsubscribed = unsubDeferred.promise;
|
5616
|
-
|
5617
|
-
// The update handler is to allow us to merge balances with the same id, and manage initialising and positive balances state for each
|
5618
|
-
// balance type and network
|
5619
|
-
const handleUpdateForSource = source => (error, result) => {
|
5620
|
-
if (result) {
|
5621
|
-
const currentBalances = subNativeBalances.getValue();
|
5622
|
-
|
5623
|
-
// first merge any balances with the same id within the result
|
5624
|
-
const accumulatedUpdates = result.filter(b => b.values.length > 0).reduce((acc, b) => {
|
5625
|
-
const bId = getBalanceId(b);
|
5626
|
-
acc[bId] = mergeBalances(acc[bId], b, source, false);
|
5627
|
-
return acc;
|
5628
|
-
}, {});
|
5629
|
-
|
5630
|
-
// then merge these with the current balances
|
5631
|
-
const mergedBalances = {};
|
5632
|
-
Object.entries(accumulatedUpdates).forEach(([bId, b]) => {
|
5633
|
-
// merge the values from the new balance into the existing balance, if there is one
|
5634
|
-
mergedBalances[bId] = mergeBalances(currentBalances[bId], b, source, true);
|
5635
|
-
|
5636
|
-
// update initialisingBalances to remove balances which have been updated
|
5637
|
-
const intialisingForToken = initialisingBalances.get(b.tokenId);
|
5638
|
-
if (intialisingForToken) {
|
5639
|
-
intialisingForToken.delete(b.address);
|
5640
|
-
if (intialisingForToken.size === 0) initialisingBalances.delete(b.tokenId);else initialisingBalances.set(b.tokenId, intialisingForToken);
|
5641
|
-
}
|
5642
|
-
});
|
5643
|
-
subNativeBalances.next({
|
5644
|
-
...currentBalances,
|
5645
|
-
...mergedBalances
|
5646
|
-
});
|
5647
|
-
}
|
5648
|
-
if (error) {
|
5649
|
-
if (error instanceof SubNativeBalanceError) {
|
5650
|
-
// this type of error doesn't need to be handled by the caller
|
5651
|
-
initialisingBalances.delete(error.tokenId);
|
5652
|
-
} else return callback(error);
|
5653
|
-
}
|
5402
|
+
}, {});
|
5403
|
+
const initialBalancesByNetwork = lodash.groupBy(initialBalances ?? [], "networkId");
|
5404
|
+
const {
|
5405
|
+
abort,
|
5406
|
+
signal
|
5407
|
+
} = new AbortController();
|
5408
|
+
const safeCallback = (error, result) => {
|
5409
|
+
if (signal.aborted) return;
|
5410
|
+
// typescript isnt happy with fowarding parameters as is
|
5411
|
+
return error ? callback(error, undefined) : callback(error, result);
|
5654
5412
|
};
|
5655
|
-
|
5656
|
-
// subscribe to addresses and tokens for which we have a known positive balance
|
5657
|
-
const positiveSub = subscriptionTokens.pipe(rxjs.debounceTime(1000), rxjs.takeUntil(callerUnsubscribed), rxjs.map(tokenIds => tokenIds.reduce((acc, tokenId) => {
|
5658
|
-
acc[tokenId] = addressesByToken[tokenId];
|
5659
|
-
return acc;
|
5660
|
-
}, {})), rxjs.distinctUntilChanged(isEqual__default.default), rxjs.switchMap(newAddressesByToken => {
|
5661
|
-
return rxjs.from(queryCache.getQueries(newAddressesByToken)).pipe(rxjs.switchMap(baseQueries => {
|
5662
|
-
return new rxjs.Observable(subscriber => {
|
5663
|
-
if (!chainConnectors.substrate) return;
|
5664
|
-
const unsubSubtensorStaking = subscribeSubtensorStaking(chaindataProvider$1, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("subtensor-staking"));
|
5665
|
-
const unsubNompoolStaking = subscribeNompoolStaking(chaindataProvider$1, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("nompools-staking"));
|
5666
|
-
const unsubCrowdloans = subscribeCrowdloans(chaindataProvider$1, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("crowdloan"));
|
5667
|
-
const unsubBase = subscribeBase(baseQueries, chainConnectors.substrate, handleUpdateForSource("base"));
|
5668
|
-
subscriber.add(async () => (await unsubSubtensorStaking)());
|
5669
|
-
subscriber.add(async () => (await unsubNompoolStaking)());
|
5670
|
-
subscriber.add(async () => (await unsubCrowdloans)());
|
5671
|
-
subscriber.add(async () => (await unsubBase)());
|
5672
|
-
});
|
5673
|
-
}));
|
5674
|
-
})).subscribe();
|
5675
|
-
|
5676
|
-
// for chains where we don't have a known positive balance, poll rather than subscribe
|
5677
|
-
const poll = async (addressesByToken = {}) => {
|
5678
|
-
const handleUpdate = handleUpdateForSource("base");
|
5413
|
+
const unsubsribeFns = Promise.all(lodash.keys(addressesByTokenByNetwork).map(async networkId => {
|
5679
5414
|
try {
|
5680
|
-
|
5681
|
-
|
5682
|
-
|
5683
|
-
|
5684
|
-
|
5685
|
-
|
5686
|
-
Object.entries(await getModuleTokens()).filter(([, token]) => token.networkId === errorChainId).forEach(([tokenId]) => {
|
5687
|
-
const wrappedError = new SubNativeBalanceError(tokenId, error.message);
|
5688
|
-
handleUpdate(wrappedError);
|
5689
|
-
});
|
5690
|
-
} else {
|
5691
|
-
log.error("unknown substrate native balance error", error);
|
5692
|
-
handleUpdate(error);
|
5693
|
-
}
|
5415
|
+
// this is what we want to be done separately for each network
|
5416
|
+
// this will update the DB so minimetadata will be available when it's used, veeeeery far down the tree of subscribeChainBalances
|
5417
|
+
await getMiniMetadata(chaindataProvider$1, chainConnector$1, networkId, moduleType$2, signal);
|
5418
|
+
} catch (err) {
|
5419
|
+
if (!signal.aborted) log.warn("Failed to get native token miniMetadata for network", networkId, err);
|
5420
|
+
return () => {};
|
5694
5421
|
}
|
5695
|
-
|
5696
|
-
|
5697
|
-
|
5698
|
-
|
5699
|
-
|
5700
|
-
|
5701
|
-
// break nonCurrentTokens into chunks of POLLING_WINDOW_SIZE
|
5702
|
-
await PromisePool__default.default.withConcurrency(POLLING_WINDOW_SIZE).for(nonCurrentTokens).process(async nonCurrentTokenId => await poll({
|
5703
|
-
[nonCurrentTokenId]: addressesByToken[nonCurrentTokenId]
|
5422
|
+
if (signal.aborted) return () => {};
|
5423
|
+
return subscribeChainBalances(networkId, {
|
5424
|
+
addressesByToken: addressesByTokenByNetwork[networkId] ?? {},
|
5425
|
+
initialBalances: initialBalancesByNetwork[networkId] ?? []
|
5426
|
+
}, safeCallback);
|
5704
5427
|
}));
|
5705
|
-
|
5706
|
-
// now poll every 30s on chains which are not subscriptionTokens
|
5707
|
-
// we chunk this observable into batches of positive token ids, to prevent eating all the websocket connections
|
5708
|
-
const pollingSub = rxjs.interval(30_000) // emit values every 30 seconds
|
5709
|
-
.pipe(rxjs.takeUntil(callerUnsubscribed), rxjs.withLatestFrom(subscriptionTokens),
|
5710
|
-
// Combine latest value from subscriptionTokens with each interval tick
|
5711
|
-
rxjs.map(([, subscribedTokenIds]) =>
|
5712
|
-
// Filter out tokens that are not subscribed
|
5713
|
-
Object.keys(addressesByToken).filter(tokenId => !subscribedTokenIds.includes(tokenId))), rxjs.exhaustMap(tokenIds => rxjs.from(util$1.arrayChunk(tokenIds, POLLING_WINDOW_SIZE)).pipe(rxjs.concatMap(async tokenChunk => {
|
5714
|
-
// tokenChunk is a chunk of tokenIds with size POLLING_WINDOW_SIZE
|
5715
|
-
const pollingTokenAddresses = Object.fromEntries(tokenChunk.map(tokenId => [tokenId, addressesByToken[tokenId]]));
|
5716
|
-
await poll(pollingTokenAddresses);
|
5717
|
-
return true;
|
5718
|
-
})))).subscribe();
|
5719
5428
|
return () => {
|
5720
|
-
|
5721
|
-
|
5722
|
-
pollingSub.unsubscribe();
|
5429
|
+
abort();
|
5430
|
+
unsubsribeFns.then(fns => fns.forEach(unsubscribe => unsubscribe()));
|
5723
5431
|
};
|
5724
5432
|
},
|
5725
|
-
|
5726
|
-
util$1.assert(chainConnectors.substrate, "This module requires a substrate chain connector");
|
5727
|
-
const queries = await queryCache.getQueries(addressesByToken);
|
5728
|
-
util$1.assert(chainConnectors.substrate, "This module requires a substrate chain connector");
|
5729
|
-
const result = await new RpcStateQueryHelper(chainConnectors.substrate, queries).fetch();
|
5730
|
-
return new Balances(result ?? []);
|
5731
|
-
},
|
5433
|
+
fetchBalances,
|
5732
5434
|
async transferToken({
|
5733
5435
|
tokenId,
|
5734
5436
|
from,
|
@@ -7200,7 +6902,6 @@ const SubTokensModule = hydrate => {
|
|
7200
6902
|
...DefaultBalanceModule(moduleType),
|
7201
6903
|
async fetchSubstrateChainMeta(chainId, moduleConfig, metadataRpc) {
|
7202
6904
|
if (metadataRpc === undefined) return {};
|
7203
|
-
if ((moduleConfig?.tokens ?? []).length < 1) return {};
|
7204
6905
|
const metadata = scale.decAnyMetadata(metadataRpc);
|
7205
6906
|
const palletId = moduleConfig?.palletId ?? defaultPalletId;
|
7206
6907
|
scale.compactMetadata(metadata, [{
|
@@ -7252,17 +6953,39 @@ const SubTokensModule = hydrate => {
|
|
7252
6953
|
async subscribeBalances({
|
7253
6954
|
addressesByToken
|
7254
6955
|
}, callback) {
|
7255
|
-
const
|
7256
|
-
|
7257
|
-
if (
|
7258
|
-
|
7259
|
-
|
7260
|
-
});
|
7261
|
-
|
6956
|
+
const byNetwork = lodash.keys(addressesByToken).reduce((acc, tokenId) => {
|
6957
|
+
const networkId = chaindataProvider.parseSubTokensTokenId(tokenId).networkId;
|
6958
|
+
if (!acc[networkId]) acc[networkId] = {};
|
6959
|
+
acc[networkId][tokenId] = addressesByToken[tokenId];
|
6960
|
+
return acc;
|
6961
|
+
}, {});
|
6962
|
+
const controller = new AbortController();
|
6963
|
+
const pUnsubs = Promise.all(lodash.toPairs(byNetwork).map(async ([networkId, addressesByToken]) => {
|
6964
|
+
try {
|
6965
|
+
const queries = await buildNetworkQueries(networkId, chainConnector, chaindataProvider$1, addressesByToken, controller.signal);
|
6966
|
+
if (controller.signal.aborted) return () => {};
|
6967
|
+
const stateHelper = new RpcStateQueryHelper(chainConnector, queries);
|
6968
|
+
return await stateHelper.subscribe((error, result) => {
|
6969
|
+
// console.log("SubstrateAssetsModule.callback", { error, result })
|
6970
|
+
if (error) return callback(error);
|
6971
|
+
const balances = result?.filter(b => b !== null) ?? [];
|
6972
|
+
if (balances.length > 0) callback(null, new Balances(balances));
|
6973
|
+
});
|
6974
|
+
} catch (err) {
|
6975
|
+
if (!controller.signal.aborted) log.error(`Failed to subscribe balances for network ${networkId}`, err);
|
6976
|
+
return () => {};
|
6977
|
+
}
|
6978
|
+
}));
|
6979
|
+
return () => {
|
6980
|
+
controller.abort();
|
6981
|
+
pUnsubs.then(unsubs => {
|
6982
|
+
unsubs.forEach(unsubscribe => unsubscribe());
|
6983
|
+
});
|
6984
|
+
};
|
7262
6985
|
},
|
7263
6986
|
async fetchBalances(addressesByToken) {
|
7264
6987
|
util$1.assert(chainConnectors.substrate, "This module requires a substrate chain connector");
|
7265
|
-
const queries = await buildQueries(chaindataProvider$1, addressesByToken);
|
6988
|
+
const queries = await buildQueries(chainConnector, chaindataProvider$1, addressesByToken);
|
7266
6989
|
const result = await new RpcStateQueryHelper(chainConnectors.substrate, queries).fetch();
|
7267
6990
|
const balances = result?.filter(b => b !== null) ?? [];
|
7268
6991
|
return new Balances(balances);
|
@@ -7381,23 +7104,16 @@ const SubTokensModule = hydrate => {
|
|
7381
7104
|
}
|
7382
7105
|
};
|
7383
7106
|
};
|
7384
|
-
async function
|
7385
|
-
const
|
7107
|
+
async function buildNetworkQueries(networkId, chainConnector, chaindataProvider, addressesByToken, signal) {
|
7108
|
+
const miniMetadata = await getMiniMetadata(chaindataProvider, chainConnector, networkId, moduleType, signal);
|
7109
|
+
const chain = await chaindataProvider.chainById(networkId);
|
7386
7110
|
const tokens = await chaindataProvider.tokensById();
|
7387
|
-
|
7388
|
-
|
7389
|
-
const
|
7390
|
-
const
|
7391
|
-
const
|
7392
|
-
|
7393
|
-
chains,
|
7394
|
-
miniMetadatas,
|
7395
|
-
moduleType: "substrate-tokens",
|
7396
|
-
coders: {
|
7397
|
-
storage: ({
|
7398
|
-
chainId
|
7399
|
-
}) => [tokensPalletByChain.get(chainId) ?? defaultPalletId, "Accounts"]
|
7400
|
-
}
|
7111
|
+
if (!chain) return [];
|
7112
|
+
signal?.throwIfAborted();
|
7113
|
+
const tokensMetadata = miniMetadata;
|
7114
|
+
const palletId = tokensMetadata.palletId ?? defaultPalletId;
|
7115
|
+
const networkStorageCoders = buildNetworkStorageCoders(networkId, miniMetadata, {
|
7116
|
+
storage: [palletId, "Accounts"]
|
7401
7117
|
});
|
7402
7118
|
return Object.entries(addressesByToken).flatMap(([tokenId, addresses]) => {
|
7403
7119
|
const token = tokens[tokenId];
|
@@ -7409,18 +7125,8 @@ async function buildQueries(chaindataProvider, addressesByToken) {
|
|
7409
7125
|
log.debug(`This module doesn't handle tokens of type ${token.type}`);
|
7410
7126
|
return [];
|
7411
7127
|
}
|
7412
|
-
const networkId = token.networkId;
|
7413
|
-
if (!networkId) {
|
7414
|
-
log.warn(`Token ${tokenId} has no chain`);
|
7415
|
-
return [];
|
7416
|
-
}
|
7417
|
-
const chain = chains[networkId];
|
7418
|
-
if (!chain) {
|
7419
|
-
log.warn(`Chain ${networkId} for token ${tokenId} not found`);
|
7420
|
-
return [];
|
7421
|
-
}
|
7422
7128
|
return addresses.flatMap(address => {
|
7423
|
-
const scaleCoder =
|
7129
|
+
const scaleCoder = networkStorageCoders?.storage;
|
7424
7130
|
const onChainId = (() => {
|
7425
7131
|
try {
|
7426
7132
|
return scale.papiParse(token.onChainId);
|
@@ -7471,35 +7177,20 @@ async function buildQueries(chaindataProvider, addressesByToken) {
|
|
7471
7177
|
});
|
7472
7178
|
});
|
7473
7179
|
}
|
7180
|
+
async function buildQueries(chainConnector, chaindataProvider$1, addressesByToken, signal) {
|
7181
|
+
const byNetwork = lodash.keys(addressesByToken).reduce((acc, tokenId) => {
|
7182
|
+
const networkId = chaindataProvider.parseSubTokensTokenId(tokenId).networkId;
|
7183
|
+
if (!acc[networkId]) acc[networkId] = {};
|
7184
|
+
acc[networkId][tokenId] = addressesByToken[tokenId];
|
7185
|
+
return acc;
|
7186
|
+
}, {});
|
7187
|
+
return (await Promise.all(lodash.toPairs(byNetwork).map(([networkId, addressesByToken]) => {
|
7188
|
+
return buildNetworkQueries(networkId, chainConnector, chaindataProvider$1, addressesByToken, signal);
|
7189
|
+
}))).flat();
|
7190
|
+
}
|
7474
7191
|
|
7475
7192
|
const defaultBalanceModules = [EvmErc20Module, EvmNativeModule, EvmUniswapV2Module, SubAssetsModule, SubForeignAssetsModule, SubNativeModule, SubPsp22Module, SubTokensModule];
|
7476
7193
|
|
7477
|
-
/** Pulls the latest chaindata from https://github.com/TalismanSociety/chaindata */
|
7478
|
-
const hydrateChaindataAndMiniMetadata = async (chaindataProvider, miniMetadataUpdater) => {
|
7479
|
-
// need chains to be provisioned first, or substrate balances won't fetch on first subscription
|
7480
|
-
await chaindataProvider.hydrate();
|
7481
|
-
await Promise.all([miniMetadataUpdater.hydrateFromChaindata(), miniMetadataUpdater.hydrateCustomChains()]);
|
7482
|
-
const chains = await chaindataProvider.chains();
|
7483
|
-
const {
|
7484
|
-
statusesByChain
|
7485
|
-
} = await miniMetadataUpdater.statuses(chains);
|
7486
|
-
const goodChains = [...statusesByChain.entries()].flatMap(([chainId, status]) => status === "good" ? chainId : []);
|
7487
|
-
await chaindataProvider.hydrateSubstrateTokens(goodChains);
|
7488
|
-
};
|
7489
|
-
|
7490
|
-
/** Builds any missing miniMetadatas (e.g. for the user's custom substrate chains) */
|
7491
|
-
const updateCustomMiniMetadata = async (chaindataProvider, miniMetadataUpdater) => {
|
7492
|
-
const chainIds = await chaindataProvider.chainIds();
|
7493
|
-
await miniMetadataUpdater.update(chainIds);
|
7494
|
-
};
|
7495
|
-
|
7496
|
-
/** Fetches any missing Evm Tokens */
|
7497
|
-
const updateEvmTokens = async (chaindataProvider, evmTokenFetcher) => {
|
7498
|
-
await chaindataProvider.hydrate();
|
7499
|
-
const evmNetworkIds = await chaindataProvider.evmNetworkIds();
|
7500
|
-
await evmTokenFetcher.update(evmNetworkIds);
|
7501
|
-
};
|
7502
|
-
|
7503
7194
|
exports.Balance = Balance;
|
7504
7195
|
exports.BalanceFormatter = BalanceFormatter;
|
7505
7196
|
exports.BalanceValueGetter = BalanceValueGetter;
|
@@ -7511,7 +7202,6 @@ exports.EvmNativeModule = EvmNativeModule;
|
|
7511
7202
|
exports.EvmTokenFetcher = EvmTokenFetcher;
|
7512
7203
|
exports.EvmUniswapV2Module = EvmUniswapV2Module;
|
7513
7204
|
exports.FiatSumBalancesFormatter = FiatSumBalancesFormatter;
|
7514
|
-
exports.MiniMetadataUpdater = MiniMetadataUpdater;
|
7515
7205
|
exports.ONE_ALPHA_TOKEN = ONE_ALPHA_TOKEN;
|
7516
7206
|
exports.PlanckSumBalancesFormatter = PlanckSumBalancesFormatter;
|
7517
7207
|
exports.RpcStateQueryHelper = RpcStateQueryHelper;
|
@@ -7527,6 +7217,7 @@ exports.SumBalancesFormatter = SumBalancesFormatter;
|
|
7527
7217
|
exports.TalismanBalancesDatabase = TalismanBalancesDatabase;
|
7528
7218
|
exports.abiMulticall = abiMulticall;
|
7529
7219
|
exports.balances = balances;
|
7220
|
+
exports.buildNetworkStorageCoders = buildNetworkStorageCoders;
|
7530
7221
|
exports.buildStorageCoders = buildStorageCoders;
|
7531
7222
|
exports.calculateAlphaPrice = calculateAlphaPrice;
|
7532
7223
|
exports.calculateTaoAmountFromAlpha = calculateTaoAmountFromAlpha;
|
@@ -7550,9 +7241,6 @@ exports.getBalanceId = getBalanceId;
|
|
7550
7241
|
exports.getLockTitle = getLockTitle;
|
7551
7242
|
exports.getUniqueChainIds = getUniqueChainIds;
|
7552
7243
|
exports.getValueId = getValueId;
|
7553
|
-
exports.hydrateChaindataAndMiniMetadata = hydrateChaindataAndMiniMetadata;
|
7554
7244
|
exports.includeInTotalExtraAmount = includeInTotalExtraAmount;
|
7555
7245
|
exports.makeContractCaller = makeContractCaller;
|
7556
7246
|
exports.uniswapV2PairAbi = uniswapV2PairAbi;
|
7557
|
-
exports.updateCustomMiniMetadata = updateCustomMiniMetadata;
|
7558
|
-
exports.updateEvmTokens = updateEvmTokens;
|