@talismn/balances 0.0.0-pr2043-20250618091117 → 0.0.0-pr2043-20250619015240
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/declarations/src/BalanceModule.d.ts +1 -1
- package/dist/declarations/src/getMiniMetadata/getMiniMetadatas.d.ts +1 -1
- package/dist/declarations/src/getMiniMetadata/getUpdatedMiniMetadatas.d.ts +1 -1
- package/dist/declarations/src/getMiniMetadata/index.d.ts +1 -1
- package/dist/declarations/src/index.d.ts +0 -2
- package/dist/declarations/src/modules/SubstrateNativeModule/types.d.ts +0 -4
- package/dist/declarations/src/modules/SubstrateNativeModule/util/systemProperties.d.ts +22 -0
- package/dist/declarations/src/modules/util/buildStorageCoders.d.ts +10 -0
- package/dist/talismn-balances.cjs.dev.js +459 -771
- package/dist/talismn-balances.cjs.prod.js +459 -771
- package/dist/talismn-balances.esm.js +460 -771
- package/package.json +9 -7
- package/dist/declarations/src/MiniMetadataUpdater.d.ts +0 -43
- package/dist/declarations/src/util/hydrateChaindata.d.ts +0 -8
- package/dist/declarations/src/util/index.d.ts +0 -1
@@ -1,19 +1,19 @@
|
|
1
|
-
import { fetchInitMiniMetadatas, evmErc20TokenId as evmErc20TokenId$1, EvmErc20TokenSchema, evmNativeTokenId, evmUniswapV2TokenId, githubTokenLogoUrl, parseSubAssetTokenId, subAssetTokenId, subForeignAssetTokenId, subNativeTokenId, subPsp22TokenId, subTokensTokenId } from '@talismn/chaindata-provider';
|
2
1
|
import { Dexie, liveQuery } from 'dexie';
|
3
|
-
import { from, Observable, scan, share, map, switchAll, combineLatest, mergeMap, toArray, interval, startWith, exhaustMap, pipe, filter, shareReplay, combineLatestWith, distinctUntilChanged, firstValueFrom, BehaviorSubject, debounceTime, takeUntil, switchMap, withLatestFrom, concatMap } from 'rxjs';
|
4
2
|
import anylogger from 'anylogger';
|
5
3
|
import { newTokenRates } from '@talismn/token-rates';
|
6
|
-
import { isBigInt, BigMath, planckToTokens,
|
4
|
+
import { isBigInt, BigMath, planckToTokens, isArrayOf, isTruthy, isEthereumAddress, hasOwnProperty, decodeAnyAddress, isNotNil, blake2Concat, firstThenDebounce, Deferred } from '@talismn/util';
|
7
5
|
import BigNumber from 'bignumber.js';
|
8
6
|
import { u8aToHex, assert, stringCamelCase, u8aConcatStrict, u8aConcat, arrayChunk, u8aToString, hexToNumber, hexToU8a } from '@polkadot/util';
|
9
7
|
import { xxhashAsU8a, blake2AsU8a } from '@polkadot/util-crypto';
|
10
8
|
import pako from 'pako';
|
9
|
+
import { evmErc20TokenId as evmErc20TokenId$1, EvmErc20TokenSchema, evmNativeTokenId, evmUniswapV2TokenId, githubTokenLogoUrl, parseSubAssetTokenId, subAssetTokenId, parseSubForeignAssetTokenId, subForeignAssetTokenId, parseSubNativeTokenId, subNativeTokenId, subPsp22TokenId, parseSubTokensTokenId, subTokensTokenId } from '@talismn/chaindata-provider';
|
11
10
|
import { parseAbi, isHex, hexToBigInt } from 'viem';
|
12
11
|
import isEqual from 'lodash/isEqual';
|
13
12
|
import { defineMethod } from '@substrate/txwrapper-core';
|
14
|
-
import { unifyMetadata, decAnyMetadata, getDynamicBuilder, getLookupFn,
|
15
|
-
import { keys, toPairs } from 'lodash';
|
13
|
+
import { unifyMetadata, decAnyMetadata, getDynamicBuilder, getLookupFn, compactMetadata, encodeMetadata, decodeScale, papiParse, getMetadataVersion, encodeStateKey } from '@talismn/scale';
|
14
|
+
import { keys, toPairs, groupBy as groupBy$1 } from 'lodash';
|
16
15
|
import camelCase from 'lodash/camelCase';
|
16
|
+
import PQueue from 'p-queue';
|
17
17
|
import { fetchBestMetadata, getScaleApi } from '@talismn/sapi';
|
18
18
|
import { Metadata, TypeRegistry } from '@polkadot/types';
|
19
19
|
import groupBy from 'lodash/groupBy';
|
@@ -21,8 +21,10 @@ import { mergeUint8, toHex } from '@polkadot-api/utils';
|
|
21
21
|
import { Binary, AccountId } from 'polkadot-api';
|
22
22
|
import PromisePool from '@supercharge/promise-pool';
|
23
23
|
import { ChainConnectionError } from '@talismn/chain-connector';
|
24
|
+
import { Observable, scan, share, map, switchAll, combineLatest, from, mergeMap, toArray, interval, startWith, exhaustMap, pipe, filter, shareReplay, combineLatestWith, distinctUntilChanged, firstValueFrom, BehaviorSubject, debounceTime, takeUntil, switchMap, withLatestFrom, concatMap } from 'rxjs';
|
24
25
|
import { u32, u128, Struct } from 'scale-ts';
|
25
26
|
import upperFirst from 'lodash/upperFirst';
|
27
|
+
import z from 'zod/v4';
|
26
28
|
import { Abi } from '@polkadot/api-contract';
|
27
29
|
|
28
30
|
// TODO: Document default balances module purpose/usage
|
@@ -107,13 +109,11 @@ class EvmTokenFetcher {
|
|
107
109
|
// }
|
108
110
|
}
|
109
111
|
|
110
|
-
var
|
112
|
+
var pkg = {
|
111
113
|
name: "@talismn/balances",
|
112
|
-
version: "0.0.0-pr2043-
|
114
|
+
version: "0.0.0-pr2043-20250619015240"};
|
113
115
|
|
114
|
-
|
115
|
-
|
116
|
-
var log = anylogger(packageJson.name);
|
116
|
+
var log = anylogger(pkg.name);
|
117
117
|
|
118
118
|
function excludeFromTransferableAmount(locks) {
|
119
119
|
if (typeof locks === "string") return BigInt(locks);
|
@@ -323,20 +323,13 @@ class Balances {
|
|
323
323
|
return new SumBalancesFormatter(this);
|
324
324
|
}
|
325
325
|
}
|
326
|
-
|
327
|
-
// type BalanceJsonEvm = BalanceJson & { evmNetworkId: string }
|
328
|
-
|
329
|
-
// const isBalanceEvm = (balance: BalanceJson): balance is BalanceJsonEvm => "evmNetworkId" in balance
|
330
|
-
|
331
326
|
const getBalanceId = balance => {
|
332
327
|
const {
|
333
328
|
source,
|
334
329
|
address,
|
335
|
-
tokenId
|
336
|
-
networkId
|
330
|
+
tokenId
|
337
331
|
} = balance;
|
338
|
-
|
339
|
-
return [source, address, networkId, tokenId].filter(isTruthy).join("::");
|
332
|
+
return [source, address, tokenId].join("::");
|
340
333
|
};
|
341
334
|
|
342
335
|
/**
|
@@ -398,28 +391,6 @@ class Balance {
|
|
398
391
|
get address() {
|
399
392
|
return this.#storage.address;
|
400
393
|
}
|
401
|
-
|
402
|
-
// /** @deprecated */
|
403
|
-
// get chainId() {
|
404
|
-
// return isBalanceEvm(this.#storage) ? undefined : this.#storage.chainId
|
405
|
-
// }
|
406
|
-
// /** @deprecated */
|
407
|
-
// get chain() {
|
408
|
-
// return (this.#db?.networks && this.networkId && this.#db?.networks[this.networkId]) || null
|
409
|
-
// }
|
410
|
-
|
411
|
-
// /** @deprecated */
|
412
|
-
// get evmNetworkId() {
|
413
|
-
// return isBalanceEvm(this.#storage) ? this.#storage.evmNetworkId : undefined
|
414
|
-
// }
|
415
|
-
// /** @deprecated */
|
416
|
-
// get evmNetwork() {
|
417
|
-
// return (
|
418
|
-
// (this.#db?.networks && this.networkId && this.#db?.networks[this.networkId]) ||
|
419
|
-
// null
|
420
|
-
// )
|
421
|
-
// }
|
422
|
-
|
423
394
|
get networkId() {
|
424
395
|
return this.#storage.networkId;
|
425
396
|
}
|
@@ -1048,257 +1019,6 @@ class TalismanBalancesDatabase extends Dexie {
|
|
1048
1019
|
}
|
1049
1020
|
const db = new TalismanBalancesDatabase();
|
1050
1021
|
|
1051
|
-
const minimumHydrationInterval = 300_000; // 300_000ms = 300s = 5 minutes
|
1052
|
-
|
1053
|
-
/**
|
1054
|
-
* A substrate dapp needs access to a set of types when it wants to communicate with a blockchain node.
|
1055
|
-
*
|
1056
|
-
* These types are used to encode requests & decode responses via the SCALE codec.
|
1057
|
-
* Each chain generally has its own set of types.
|
1058
|
-
*
|
1059
|
-
* Substrate provides a construct to retrieve these types from a blockchain node.
|
1060
|
-
* The chain metadata.
|
1061
|
-
*
|
1062
|
-
* The metadata includes the types required for any communication with the chain,
|
1063
|
-
* including lots of methods which are not relevant to balance fetching.
|
1064
|
-
*
|
1065
|
-
* As such, the metadata can clock in at around 1-2MB per chain, which is a lot of storage
|
1066
|
-
* for browser-based dapps which want to connect to lots of chains.
|
1067
|
-
*
|
1068
|
-
* By utilizing the wonderful [scale-ts](https://github.com/unstoppablejs/unstoppablejs/tree/main/packages/scale-ts#readme) library,
|
1069
|
-
* we can trim the chain metadata down so that it only includes the types we need for balance fetching.
|
1070
|
-
*
|
1071
|
-
* Each balance module has a function to do just that, `BalanceModule::fetchSubstrateChainMeta`.
|
1072
|
-
*
|
1073
|
-
* But, we only want to run this operation when necessary.
|
1074
|
-
*
|
1075
|
-
* The purpose of this class, `MiniMetadataUpdater`, is to maintain a local cache of
|
1076
|
-
* trimmed-down metadatas, which we'll refer to as `MiniMetadatas`.
|
1077
|
-
*/
|
1078
|
-
class MiniMetadataUpdater {
|
1079
|
-
#lastHydratedMiniMetadatasAt = 0;
|
1080
|
-
#lastHydratedCustomChainsAt = 0;
|
1081
|
-
#chainConnectors;
|
1082
|
-
#chaindataProvider;
|
1083
|
-
#balanceModules;
|
1084
|
-
constructor(chainConnectors, chaindataProvider, balanceModules) {
|
1085
|
-
this.#chainConnectors = chainConnectors;
|
1086
|
-
this.#chaindataProvider = chaindataProvider;
|
1087
|
-
this.#balanceModules = balanceModules;
|
1088
|
-
}
|
1089
|
-
|
1090
|
-
/** Subscribe to the metadata for a chain */
|
1091
|
-
subscribe(chainId) {
|
1092
|
-
return from(liveQuery(() => db.miniMetadatas.filter(m => m.chainId === chainId).toArray().then(array => array[0])));
|
1093
|
-
}
|
1094
|
-
async update(chainIds) {
|
1095
|
-
await this.updateSubstrateChains(chainIds);
|
1096
|
-
}
|
1097
|
-
async statuses(chains) {
|
1098
|
-
const ids = await db.miniMetadatas.orderBy("id").primaryKeys();
|
1099
|
-
const wantedIdsByChain = new Map(chains.flatMap(({
|
1100
|
-
id: chainId,
|
1101
|
-
specName,
|
1102
|
-
specVersion
|
1103
|
-
}) => {
|
1104
|
-
if (specName === null) return [];
|
1105
|
-
if (specVersion === null) return [];
|
1106
|
-
return [[chainId, this.#balanceModules.filter(m => m.type.startsWith("substrate-")).map(({
|
1107
|
-
type: source
|
1108
|
-
}) => deriveMiniMetadataId({
|
1109
|
-
source,
|
1110
|
-
chainId,
|
1111
|
-
specVersion,
|
1112
|
-
libVersion
|
1113
|
-
}))]];
|
1114
|
-
}));
|
1115
|
-
const statusesByChain = new Map(Array.from(wantedIdsByChain.entries()).map(([chainId, wantedIds]) => [chainId, wantedIds.every(wantedId => ids.includes(wantedId)) ? "good" : "none"]));
|
1116
|
-
return {
|
1117
|
-
wantedIdsByChain,
|
1118
|
-
statusesByChain
|
1119
|
-
};
|
1120
|
-
}
|
1121
|
-
async hydrateFromChaindata() {
|
1122
|
-
// TODO review this. feels unnecessary to fetch them all
|
1123
|
-
|
1124
|
-
const now = Date.now();
|
1125
|
-
if (now - this.#lastHydratedMiniMetadatasAt < minimumHydrationInterval) return false;
|
1126
|
-
const dbHasMiniMetadatas = (await db.miniMetadatas.count()) > 0;
|
1127
|
-
try {
|
1128
|
-
try {
|
1129
|
-
var miniMetadatas = await this.#chaindataProvider.miniMetadatas(); // eslint-disable-line no-var
|
1130
|
-
if (miniMetadatas.length <= 0) throw new Error("Ignoring empty chaindata miniMetadatas response");
|
1131
|
-
} catch (error) {
|
1132
|
-
if (dbHasMiniMetadatas) throw error;
|
1133
|
-
log.warn("Failed to fetch miniMetadatas from chaindata", error);
|
1134
|
-
// On first start-up (db is empty), if we fail to fetch miniMetadatas then we should
|
1135
|
-
// initialize the DB with the list of miniMetadatas inside our init/mini-metadatas.json file.
|
1136
|
-
// This data will represent a relatively recent copy of what's in chaindata,
|
1137
|
-
// which will be better for our users than to have nothing at all.
|
1138
|
-
var miniMetadatas = await fetchInitMiniMetadatas(); // eslint-disable-line no-var
|
1139
|
-
}
|
1140
|
-
await db.miniMetadatas.bulkPut(miniMetadatas);
|
1141
|
-
this.#lastHydratedMiniMetadatasAt = now;
|
1142
|
-
return true;
|
1143
|
-
} catch (error) {
|
1144
|
-
log.warn(`Failed to hydrate miniMetadatas from chaindata`, error);
|
1145
|
-
return false;
|
1146
|
-
}
|
1147
|
-
}
|
1148
|
-
async hydrateCustomChains() {
|
1149
|
-
// TODO
|
1150
|
-
// const now = Date.now()
|
1151
|
-
// if (now - this.#lastHydratedCustomChainsAt < minimumHydrationInterval) return false
|
1152
|
-
// const chains = await this.#chaindataProvider.chains()
|
1153
|
-
// const customChains = chains.filter(
|
1154
|
-
// (chain): chain is CustomChain => "isCustom" in chain && chain.isCustom,
|
1155
|
-
// )
|
1156
|
-
// const updatedCustomChains: Array<CustomChain> = []
|
1157
|
-
// const concurrency = 4
|
1158
|
-
// ;(
|
1159
|
-
// await PromisePool.withConcurrency(concurrency)
|
1160
|
-
// .for(customChains)
|
1161
|
-
// .process(async (customChain) => {
|
1162
|
-
// const send = (method: string, params: unknown[]) =>
|
1163
|
-
// this.#chainConnectors.substrate?.send(customChain.id, method, params)
|
1164
|
-
// const [genesisHash, runtimeVersion, chainName, chainType] = await Promise.all([
|
1165
|
-
// send("chain_getBlockHash", [0]),
|
1166
|
-
// send("state_getRuntimeVersion", []),
|
1167
|
-
// send("system_chain", []),
|
1168
|
-
// send("system_chainType", []),
|
1169
|
-
// ])
|
1170
|
-
// // deconstruct rpc data
|
1171
|
-
// const { specName, implName } = runtimeVersion
|
1172
|
-
// const specVersion = String(runtimeVersion.specVersion)
|
1173
|
-
// const changed =
|
1174
|
-
// customChain.genesisHash !== genesisHash ||
|
1175
|
-
// customChain.chainName !== chainName ||
|
1176
|
-
// !isEqual(customChain.chainType, chainType) ||
|
1177
|
-
// customChain.implName !== implName ||
|
1178
|
-
// customChain.specName !== specName ||
|
1179
|
-
// customChain.specVersion !== specVersion
|
1180
|
-
// if (!changed) return
|
1181
|
-
// customChain.genesisHash = genesisHash
|
1182
|
-
// customChain.chainName = chainName
|
1183
|
-
// customChain.chainType = chainType
|
1184
|
-
// customChain.implName = implName
|
1185
|
-
// customChain.specName = specName
|
1186
|
-
// customChain.specVersion = specVersion
|
1187
|
-
// updatedCustomChains.push(customChain)
|
1188
|
-
// })
|
1189
|
-
// ).errors.forEach((error) => log.error("Error hydrating custom chains", error))
|
1190
|
-
// if (updatedCustomChains.length > 0) {
|
1191
|
-
// await this.#chaindataProvider.transaction("rw", ["chains"], async () => {
|
1192
|
-
// for (const updatedCustomChain of updatedCustomChains) {
|
1193
|
-
// await this.#chaindataProvider.removeCustomChain(updatedCustomChain.id)
|
1194
|
-
// await this.#chaindataProvider.addCustomChain(updatedCustomChain)
|
1195
|
-
// }
|
1196
|
-
// })
|
1197
|
-
// }
|
1198
|
-
// if (updatedCustomChains.length > 0) this.#lastHydratedCustomChainsAt = now
|
1199
|
-
// return true
|
1200
|
-
}
|
1201
|
-
async updateSubstrateChains(_chainIds) {
|
1202
|
-
// const chains = new Map(
|
1203
|
-
// (await this.#chaindataProvider.chains()).map((chain) => [chain.id, chain]),
|
1204
|
-
// )
|
1205
|
-
// const filteredChains = chainIds.flatMap((chainId) => chains.get(chainId) ?? [])
|
1206
|
-
// const ids = await balancesDb.miniMetadatas.orderBy("id").primaryKeys()
|
1207
|
-
// const { wantedIdsByChain, statusesByChain } = await this.statuses(filteredChains)
|
1208
|
-
// // clean up store
|
1209
|
-
// const wantedIds = Array.from(wantedIdsByChain.values()).flatMap((ids) => ids)
|
1210
|
-
// const unwantedIds = ids.filter((id) => !wantedIds.includes(id))
|
1211
|
-
// if (unwantedIds.length > 0) {
|
1212
|
-
// const chainIds = Array.from(
|
1213
|
-
// new Set((await balancesDb.miniMetadatas.bulkGet(unwantedIds)).map((m) => m?.chainId)),
|
1214
|
-
// )
|
1215
|
-
// log.info(`Pruning ${unwantedIds.length} miniMetadatas on chains ${chainIds.join(", ")}`)
|
1216
|
-
// await balancesDb.miniMetadatas.bulkDelete(unwantedIds)
|
1217
|
-
// }
|
1218
|
-
// const needUpdates = Array.from(statusesByChain.entries())
|
1219
|
-
// .filter(([, status]) => status !== "good")
|
1220
|
-
// .map(([chainId]) => chainId)
|
1221
|
-
// if (needUpdates.length > 0)
|
1222
|
-
// log.info(`${needUpdates.length} miniMetadatas need updates (${needUpdates.join(", ")})`)
|
1223
|
-
// const availableTokenLogos = await availableTokenLogoFilenames().catch((error) => {
|
1224
|
-
// log.error("Failed to fetch available token logos", error)
|
1225
|
-
// return []
|
1226
|
-
// })
|
1227
|
-
// const concurrency = 12
|
1228
|
-
// ;(
|
1229
|
-
// await PromisePool.withConcurrency(concurrency)
|
1230
|
-
// .for(needUpdates)
|
1231
|
-
// .process(async (chainId) => {
|
1232
|
-
// log.info(`Updating metadata for chain ${chainId}`)
|
1233
|
-
// const chain = chains.get(chainId)
|
1234
|
-
// if (!chain) return
|
1235
|
-
// const { specName, specVersion } = chain
|
1236
|
-
// if (specName === null) return
|
1237
|
-
// if (specVersion === null) return
|
1238
|
-
// const fetchMetadata = async () => {
|
1239
|
-
// try {
|
1240
|
-
// return await fetchBestMetadata(
|
1241
|
-
// (method, params, isCacheable) => {
|
1242
|
-
// if (!this.#chainConnectors.substrate)
|
1243
|
-
// throw new Error("Substrate connector is not available")
|
1244
|
-
// return this.#chainConnectors.substrate.send(chainId, method, params, isCacheable)
|
1245
|
-
// },
|
1246
|
-
// true, // allow v14 fallback
|
1247
|
-
// )
|
1248
|
-
// } catch (err) {
|
1249
|
-
// log.warn(`Failed to fetch metadata for chain ${chainId}`)
|
1250
|
-
// return undefined
|
1251
|
-
// }
|
1252
|
-
// }
|
1253
|
-
// const [metadataRpc, systemProperties] = await Promise.all([
|
1254
|
-
// fetchMetadata(),
|
1255
|
-
// this.#chainConnectors.substrate?.send(chainId, "system_properties", []),
|
1256
|
-
// ])
|
1257
|
-
// for (const mod of this.#balanceModules.filter((m) => m.type.startsWith("substrate-"))) {
|
1258
|
-
// const balancesConfig = (chain.balancesConfig ?? []).find(
|
1259
|
-
// ({ moduleType }) => moduleType === mod.type,
|
1260
|
-
// )
|
1261
|
-
// const moduleConfig = balancesConfig?.moduleConfig ?? {}
|
1262
|
-
// const chainMeta = await mod.fetchSubstrateChainMeta(
|
1263
|
-
// chainId,
|
1264
|
-
// moduleConfig,
|
1265
|
-
// metadataRpc,
|
1266
|
-
// systemProperties,
|
1267
|
-
// )
|
1268
|
-
// const tokens = await mod.fetchSubstrateChainTokens(chainId, chainMeta, moduleConfig)
|
1269
|
-
// // update tokens in chaindata
|
1270
|
-
// await this.#chaindataProvider.updateChainTokens(
|
1271
|
-
// chainId,
|
1272
|
-
// mod.type,
|
1273
|
-
// Object.values(tokens),
|
1274
|
-
// availableTokenLogos,
|
1275
|
-
// )
|
1276
|
-
// // update miniMetadatas
|
1277
|
-
// const { miniMetadata: data, metadataVersion: version, ...extra } = chainMeta ?? {}
|
1278
|
-
// await balancesDb.miniMetadatas.put({
|
1279
|
-
// id: deriveMiniMetadataId({
|
1280
|
-
// source: mod.type,
|
1281
|
-
// chainId,
|
1282
|
-
// specName,
|
1283
|
-
// specVersion,
|
1284
|
-
// balancesConfig: JSON.stringify(moduleConfig),
|
1285
|
-
// }),
|
1286
|
-
// source: mod.type,
|
1287
|
-
// chainId,
|
1288
|
-
// specName,
|
1289
|
-
// specVersion,
|
1290
|
-
// balancesConfig: JSON.stringify(moduleConfig),
|
1291
|
-
// // TODO: Standardise return value from `fetchSubstrateChainMeta`
|
1292
|
-
// version,
|
1293
|
-
// data,
|
1294
|
-
// extra: JSON.stringify(extra),
|
1295
|
-
// })
|
1296
|
-
// }
|
1297
|
-
// })
|
1298
|
-
// ).errors.forEach((error) => log.error("Error updating chain metadata", error))
|
1299
|
-
}
|
1300
|
-
}
|
1301
|
-
|
1302
1022
|
const erc20Abi = [{
|
1303
1023
|
constant: true,
|
1304
1024
|
inputs: [],
|
@@ -2940,12 +2660,14 @@ async function getPoolBalance(publicClient, contractAddress, accountAddress) {
|
|
2940
2660
|
}
|
2941
2661
|
}
|
2942
2662
|
|
2663
|
+
const libVersion = pkg.version;
|
2664
|
+
|
2943
2665
|
// cache the promise so it can be shared across multiple calls
|
2944
2666
|
const CACHE_GET_SPEC_VERSION = new Map();
|
2945
2667
|
const fetchSpecVersion = async (chainConnector, networkId) => {
|
2946
2668
|
const {
|
2947
2669
|
specVersion
|
2948
|
-
} = await chainConnector.send(networkId, "state_getRuntimeVersion", [true
|
2670
|
+
} = await chainConnector.send(networkId, "state_getRuntimeVersion", [], true);
|
2949
2671
|
return specVersion;
|
2950
2672
|
};
|
2951
2673
|
|
@@ -2987,9 +2709,16 @@ const getMetadataRpc = async (chainConnector, networkId) => {
|
|
2987
2709
|
|
2988
2710
|
// share requests as all modules will call this at once
|
2989
2711
|
const CACHE = new Map();
|
2990
|
-
|
2712
|
+
|
2713
|
+
// ensures we dont fetch miniMetadatas on more than 4 chains at once
|
2714
|
+
const POOL = new PQueue({
|
2715
|
+
concurrency: 4
|
2716
|
+
});
|
2717
|
+
const getMiniMetadatas = async (chainConnector, chaindataProvider, networkId, specVersion, signal) => {
|
2991
2718
|
if (CACHE.has(networkId)) return CACHE.get(networkId);
|
2992
|
-
const pResult = fetchMiniMetadatas(chainConnector, chaindataProvider, networkId, specVersion)
|
2719
|
+
const pResult = POOL.add(() => fetchMiniMetadatas(chainConnector, chaindataProvider, networkId, specVersion), {
|
2720
|
+
signal
|
2721
|
+
});
|
2993
2722
|
CACHE.set(networkId, pResult);
|
2994
2723
|
try {
|
2995
2724
|
return await pResult;
|
@@ -3001,49 +2730,57 @@ const getMiniMetadatas = async (chainConnector, chaindataProvider, networkId, sp
|
|
3001
2730
|
CACHE.delete(networkId);
|
3002
2731
|
}
|
3003
2732
|
};
|
3004
|
-
const fetchMiniMetadatas = async (chainConnector, chaindataProvider, chainId, specVersion) => {
|
3005
|
-
const
|
3006
|
-
|
3007
|
-
|
3008
|
-
|
3009
|
-
|
3010
|
-
chainConnectors
|
3011
|
-
|
3012
|
-
|
3013
|
-
|
3014
|
-
const
|
3015
|
-
|
3016
|
-
|
3017
|
-
|
2733
|
+
const fetchMiniMetadatas = async (chainConnector, chaindataProvider, chainId, specVersion, signal) => {
|
2734
|
+
const start = performance.now();
|
2735
|
+
log.debug("[miniMetadata] fetching minimetadatas for %s", chainId);
|
2736
|
+
try {
|
2737
|
+
const metadataRpc = await getMetadataRpc(chainConnector, chainId);
|
2738
|
+
signal?.throwIfAborted();
|
2739
|
+
const chainConnectors = {
|
2740
|
+
substrate: chainConnector,
|
2741
|
+
evm: {} // wont be used but workarounds error for module creation
|
2742
|
+
};
|
2743
|
+
const modules = defaultBalanceModules.map(mod => mod({
|
2744
|
+
chainConnectors,
|
2745
|
+
chaindataProvider
|
2746
|
+
})).filter(mod => mod.type.startsWith("substrate-"));
|
2747
|
+
return Promise.all(modules.map(async mod => {
|
2748
|
+
const source = mod.type;
|
2749
|
+
const chainMeta = await mod.fetchSubstrateChainMeta(chainId, {}, metadataRpc);
|
2750
|
+
return {
|
2751
|
+
id: deriveMiniMetadataId({
|
2752
|
+
source,
|
2753
|
+
chainId,
|
2754
|
+
specVersion,
|
2755
|
+
libVersion
|
2756
|
+
}),
|
3018
2757
|
source,
|
3019
2758
|
chainId,
|
3020
2759
|
specVersion,
|
3021
|
-
libVersion
|
3022
|
-
|
3023
|
-
|
3024
|
-
|
3025
|
-
|
3026
|
-
|
3027
|
-
|
3028
|
-
};
|
3029
|
-
}));
|
2760
|
+
libVersion,
|
2761
|
+
data: chainMeta?.miniMetadata ?? null
|
2762
|
+
};
|
2763
|
+
}));
|
2764
|
+
} finally {
|
2765
|
+
log.debug("[miniMetadata] updated miniMetadatas for %s in %sms", chainId, performance.now() - start);
|
2766
|
+
}
|
3030
2767
|
};
|
3031
2768
|
|
3032
|
-
const getUpdatedMiniMetadatas = async (chainConnector, chaindataProvider,
|
3033
|
-
const miniMetadatas = await getMiniMetadatas(chainConnector, chaindataProvider,
|
2769
|
+
const getUpdatedMiniMetadatas = async (chainConnector, chaindataProvider, chainId, specVersion, signal) => {
|
2770
|
+
const miniMetadatas = await getMiniMetadatas(chainConnector, chaindataProvider, chainId, specVersion, signal);
|
2771
|
+
signal?.throwIfAborted();
|
3034
2772
|
await db.transaction("readwrite", "miniMetadatas", async tx => {
|
3035
2773
|
await tx.miniMetadatas.where({
|
3036
|
-
|
2774
|
+
chainId
|
3037
2775
|
}).delete();
|
3038
2776
|
await tx.miniMetadatas.bulkPut(miniMetadatas);
|
3039
2777
|
});
|
3040
2778
|
return miniMetadatas;
|
3041
2779
|
};
|
3042
2780
|
|
3043
|
-
const getMiniMetadata = async (chaindataProvider, chainConnector, chainId, source) => {
|
2781
|
+
const getMiniMetadata = async (chaindataProvider, chainConnector, chainId, source, signal) => {
|
3044
2782
|
const specVersion = await getSpecVersion(chainConnector, chainId);
|
3045
|
-
|
3046
|
-
// TODO when working a chaindata branch, need a way to pass the libVersion used to derive the miniMetadataId got github
|
2783
|
+
signal?.throwIfAborted();
|
3047
2784
|
const miniMetadataId = deriveMiniMetadataId({
|
3048
2785
|
source,
|
3049
2786
|
chainId,
|
@@ -3053,11 +2790,13 @@ const getMiniMetadata = async (chaindataProvider, chainConnector, chainId, sourc
|
|
3053
2790
|
|
3054
2791
|
// lookup local ones
|
3055
2792
|
const [dbMiniMetadata, ghMiniMetadata] = await Promise.all([db.miniMetadatas.get(miniMetadataId), chaindataProvider.miniMetadataById(miniMetadataId)]);
|
2793
|
+
signal?.throwIfAborted();
|
3056
2794
|
const miniMetadata = dbMiniMetadata ?? ghMiniMetadata;
|
3057
2795
|
if (miniMetadata) return miniMetadata;
|
3058
2796
|
|
3059
2797
|
// update from live chain metadata and persist locally
|
3060
|
-
const miniMetadatas = await getUpdatedMiniMetadatas(chainConnector, chaindataProvider, chainId, specVersion);
|
2798
|
+
const miniMetadatas = await getUpdatedMiniMetadatas(chainConnector, chaindataProvider, chainId, specVersion, signal);
|
2799
|
+
signal?.throwIfAborted();
|
3061
2800
|
const found = miniMetadatas.find(m => m.id === miniMetadataId);
|
3062
2801
|
if (!found) {
|
3063
2802
|
log.warn("MiniMetadata not found in updated miniMetadatas", {
|
@@ -3144,6 +2883,28 @@ const buildStorageCoders = ({
|
|
3144
2883
|
return [];
|
3145
2884
|
}
|
3146
2885
|
}));
|
2886
|
+
const buildNetworkStorageCoders = (chainId, miniMetadata, coders) => {
|
2887
|
+
if (!miniMetadata.data) return null;
|
2888
|
+
const metadata = unifyMetadata(decAnyMetadata(miniMetadata.data));
|
2889
|
+
try {
|
2890
|
+
const scaleBuilder = getDynamicBuilder(getLookupFn(metadata));
|
2891
|
+
const builtCoders = Object.fromEntries(Object.entries(coders).flatMap(([key, moduleMethodOrFn]) => {
|
2892
|
+
const [module, method] = typeof moduleMethodOrFn === "function" ? moduleMethodOrFn({
|
2893
|
+
chainId
|
2894
|
+
}) : moduleMethodOrFn;
|
2895
|
+
try {
|
2896
|
+
return [[key, scaleBuilder.buildStorage(module, method)]];
|
2897
|
+
} catch (cause) {
|
2898
|
+
log.trace(`Failed to build SCALE coder for chain ${chainId} (${module}::${method})`, cause);
|
2899
|
+
return [];
|
2900
|
+
}
|
2901
|
+
}));
|
2902
|
+
return builtCoders;
|
2903
|
+
} catch (cause) {
|
2904
|
+
log.error(`Failed to build SCALE coders for chain ${chainId} (${JSON.stringify(coders)})`, cause);
|
2905
|
+
}
|
2906
|
+
return null;
|
2907
|
+
};
|
3147
2908
|
|
3148
2909
|
/**
|
3149
2910
|
* Decodes & unwraps outputs and errors of a given result, contract, and method.
|
@@ -3344,15 +3105,9 @@ const SubAssetsModule = hydrate => {
|
|
3344
3105
|
assert(chainConnector, "This module requires a substrate chain connector");
|
3345
3106
|
return {
|
3346
3107
|
...DefaultBalanceModule(moduleType$4),
|
3108
|
+
// TODO make synchronous at the module definition level ?
|
3347
3109
|
async fetchSubstrateChainMeta(chainId, moduleConfig, metadataRpc) {
|
3348
|
-
|
3349
|
-
if (metadataRpc === undefined) return {
|
3350
|
-
isTestnet
|
3351
|
-
};
|
3352
|
-
if ((moduleConfig?.tokens ?? []).length < 1) return {
|
3353
|
-
isTestnet
|
3354
|
-
};
|
3355
|
-
const metadataVersion = getMetadataVersion(metadataRpc);
|
3110
|
+
if (!metadataRpc) return {};
|
3356
3111
|
const metadata = decAnyMetadata(metadataRpc);
|
3357
3112
|
compactMetadata(metadata, [{
|
3358
3113
|
pallet: "Assets",
|
@@ -3360,9 +3115,7 @@ const SubAssetsModule = hydrate => {
|
|
3360
3115
|
}]);
|
3361
3116
|
const miniMetadata = encodeMetadata(metadata);
|
3362
3117
|
return {
|
3363
|
-
|
3364
|
-
miniMetadata,
|
3365
|
-
metadataVersion
|
3118
|
+
miniMetadata
|
3366
3119
|
};
|
3367
3120
|
},
|
3368
3121
|
async fetchSubstrateChainTokens(chainId, chainMeta, moduleConfig) {
|
@@ -3427,44 +3180,32 @@ const SubAssetsModule = hydrate => {
|
|
3427
3180
|
return acc;
|
3428
3181
|
}, {});
|
3429
3182
|
const controller = new AbortController();
|
3430
|
-
|
3431
|
-
|
3432
|
-
|
3433
|
-
|
3434
|
-
|
3435
|
-
|
3436
|
-
|
3437
|
-
|
3438
|
-
|
3439
|
-
|
3440
|
-
|
3441
|
-
|
3442
|
-
|
3443
|
-
|
3183
|
+
const pUnsubs = Promise.all(toPairs(byNetwork).map(async ([networkId, addressesByToken]) => {
|
3184
|
+
try {
|
3185
|
+
const queries = await buildNetworkQueries$2(networkId, chainConnector, chaindataProvider, addressesByToken, controller.signal);
|
3186
|
+
if (controller.signal.aborted) return () => {};
|
3187
|
+
const stateHelper = new RpcStateQueryHelper(chainConnector, queries);
|
3188
|
+
return await stateHelper.subscribe((error, result) => {
|
3189
|
+
// console.log("SubstrateAssetsModule.callback", { error, result })
|
3190
|
+
if (error) return callback(error);
|
3191
|
+
const balances = result?.filter(b => b !== null) ?? [];
|
3192
|
+
if (balances.length > 0) callback(null, new Balances(balances));
|
3193
|
+
});
|
3194
|
+
} catch (err) {
|
3195
|
+
if (!controller.signal.aborted) log.error(`Failed to subscribe balances for network ${networkId}`, err);
|
3196
|
+
return () => {};
|
3197
|
+
}
|
3444
3198
|
}));
|
3445
|
-
|
3446
|
-
// const networkIds = uniq(uniq(keys(addressesByToken)).map((tokenId) => parseSubAssetTokenId(tokenId).networkId))
|
3447
|
-
// const
|
3448
|
-
|
3449
|
-
//console.log("SubstrateAssetsModule.subscribeBalances 1", { addressesByToken })
|
3450
|
-
// const queries = await buildQueries(chaindataProvider, addressesByToken)
|
3451
|
-
// //console.log("SubstrateAssetsModule.subscribeBalances 2", { queries, addressesByToken })
|
3452
|
-
// const unsubscribe = await new RpcStateQueryHelper(chainConnector, queries).subscribe(
|
3453
|
-
// (error, result) => {
|
3454
|
-
// // console.log("SubstrateAssetsModule.callback", { error, result })
|
3455
|
-
// if (error) return callback(error)
|
3456
|
-
// const balances = result?.filter((b): b is SubAssetsBalance => b !== null) ?? []
|
3457
|
-
// if (balances.length > 0) callback(null, new Balances(balances))
|
3458
|
-
// },
|
3459
|
-
// )
|
3460
|
-
|
3461
3199
|
return () => {
|
3462
3200
|
controller.abort();
|
3201
|
+
pUnsubs.then(unsubs => {
|
3202
|
+
unsubs.forEach(unsubscribe => unsubscribe());
|
3203
|
+
});
|
3463
3204
|
};
|
3464
3205
|
},
|
3465
3206
|
async fetchBalances(addressesByToken) {
|
3466
3207
|
assert(chainConnectors.substrate, "This module requires a substrate chain connector");
|
3467
|
-
const queries = await buildQueries$3(chaindataProvider, addressesByToken);
|
3208
|
+
const queries = await buildQueries$3(chainConnector, chaindataProvider, addressesByToken);
|
3468
3209
|
const result = await new RpcStateQueryHelper(chainConnectors.substrate, queries).fetch();
|
3469
3210
|
const balances = result?.filter(b => b !== null) ?? [];
|
3470
3211
|
return new Balances(balances);
|
@@ -3534,23 +3275,14 @@ const SubAssetsModule = hydrate => {
|
|
3534
3275
|
}
|
3535
3276
|
};
|
3536
3277
|
};
|
3537
|
-
async function buildNetworkQueries(networkId, chainConnector, chaindataProvider, addressesByToken) {
|
3538
|
-
const miniMetadata = await getMiniMetadata(chaindataProvider, chainConnector, networkId, moduleType$4);
|
3539
|
-
|
3278
|
+
async function buildNetworkQueries$2(networkId, chainConnector, chaindataProvider, addressesByToken, signal) {
|
3279
|
+
const miniMetadata = await getMiniMetadata(chaindataProvider, chainConnector, networkId, moduleType$4, signal);
|
3280
|
+
// console.log("Fetched miniMetadata for network", networkId, { miniMetadata })
|
3281
|
+
const chain = await chaindataProvider.chainById(networkId);
|
3540
3282
|
const tokensById = await chaindataProvider.tokensById();
|
3541
|
-
|
3542
|
-
const
|
3543
|
-
[
|
3544
|
-
} : {};
|
3545
|
-
const miniMetadatas = new Map([[miniMetadata.id, miniMetadata]]);
|
3546
|
-
const chainStorageCoders = buildStorageCoders({
|
3547
|
-
chainIds,
|
3548
|
-
chains,
|
3549
|
-
miniMetadatas,
|
3550
|
-
moduleType: moduleType$4,
|
3551
|
-
coders: {
|
3552
|
-
storage: ["Assets", "Account"]
|
3553
|
-
}
|
3283
|
+
signal?.throwIfAborted();
|
3284
|
+
const networkStorageCoders = buildNetworkStorageCoders(networkId, miniMetadata, {
|
3285
|
+
storage: ["Assets", "Account"]
|
3554
3286
|
});
|
3555
3287
|
return Object.entries(addressesByToken).flatMap(([tokenId, addresses]) => {
|
3556
3288
|
const token = tokensById[tokenId];
|
@@ -3562,19 +3294,14 @@ async function buildNetworkQueries(networkId, chainConnector, chaindataProvider,
|
|
3562
3294
|
log.debug(`This module doesn't handle tokens of type ${token.type}`);
|
3563
3295
|
return [];
|
3564
3296
|
}
|
3565
|
-
|
3566
|
-
if (!networkId) {
|
3567
|
-
log.warn(`Token ${tokenId} has no chain`);
|
3568
|
-
return [];
|
3569
|
-
}
|
3570
|
-
const chain = chains[networkId];
|
3297
|
+
//
|
3571
3298
|
if (!chain) {
|
3572
3299
|
log.warn(`Chain ${networkId} for token ${tokenId} not found`);
|
3573
3300
|
return [];
|
3574
3301
|
}
|
3575
3302
|
return addresses.flatMap(address => {
|
3576
|
-
const scaleCoder =
|
3577
|
-
const stateKey = tryEncode(scaleCoder, BigInt(token.assetId), address) ?? tryEncode(scaleCoder, token.assetId, address);
|
3303
|
+
const scaleCoder = networkStorageCoders?.storage;
|
3304
|
+
const stateKey = tryEncode(scaleCoder, BigInt(token.assetId), address) ?? tryEncode(scaleCoder, Number(token.assetId), address);
|
3578
3305
|
if (!stateKey) {
|
3579
3306
|
log.warn(`Invalid assetId / address in ${networkId} storage query ${token.assetId} / ${address}`);
|
3580
3307
|
return [];
|
@@ -3629,102 +3356,16 @@ async function buildNetworkQueries(networkId, chainConnector, chaindataProvider,
|
|
3629
3356
|
});
|
3630
3357
|
});
|
3631
3358
|
}
|
3632
|
-
async function buildQueries$3(chaindataProvider, addressesByToken) {
|
3633
|
-
const
|
3634
|
-
|
3635
|
-
|
3636
|
-
|
3637
|
-
|
3638
|
-
|
3639
|
-
|
3640
|
-
|
3641
|
-
|
3642
|
-
const chains = Object.fromEntries(uniqueChainIds.map(chainId => [chainId, allChains[chainId]]));
|
3643
|
-
const chainStorageCoders = buildStorageCoders({
|
3644
|
-
chainIds: uniqueChainIds,
|
3645
|
-
chains,
|
3646
|
-
miniMetadatas,
|
3647
|
-
moduleType: "substrate-assets",
|
3648
|
-
coders: {
|
3649
|
-
storage: ["Assets", "Account"]
|
3650
|
-
}
|
3651
|
-
});
|
3652
|
-
return Object.entries(addressesByToken).flatMap(([tokenId, addresses]) => {
|
3653
|
-
const token = tokens[tokenId];
|
3654
|
-
if (!token) {
|
3655
|
-
log.warn(`Token ${tokenId} not found`);
|
3656
|
-
return [];
|
3657
|
-
}
|
3658
|
-
if (token.type !== "substrate-assets") {
|
3659
|
-
log.debug(`This module doesn't handle tokens of type ${token.type}`);
|
3660
|
-
return [];
|
3661
|
-
}
|
3662
|
-
const networkId = token.networkId;
|
3663
|
-
if (!networkId) {
|
3664
|
-
log.warn(`Token ${tokenId} has no chain`);
|
3665
|
-
return [];
|
3666
|
-
}
|
3667
|
-
const chain = chains[networkId];
|
3668
|
-
if (!chain) {
|
3669
|
-
log.warn(`Chain ${networkId} for token ${tokenId} not found`);
|
3670
|
-
return [];
|
3671
|
-
}
|
3672
|
-
return addresses.flatMap(address => {
|
3673
|
-
const scaleCoder = chainStorageCoders.get(networkId)?.storage;
|
3674
|
-
const stateKey = tryEncode(scaleCoder, BigInt(token.assetId), address) ?? tryEncode(scaleCoder, token.assetId, address);
|
3675
|
-
if (!stateKey) {
|
3676
|
-
log.warn(`Invalid assetId / address in ${networkId} storage query ${token.assetId} / ${address}`);
|
3677
|
-
return [];
|
3678
|
-
}
|
3679
|
-
const decodeResult = change => {
|
3680
|
-
/** NOTE: This type is only a hint for typescript, the chain can actually return whatever it wants to */
|
3681
|
-
|
3682
|
-
const decoded = decodeScale(scaleCoder, change, `Failed to decode substrate-assets balance on chain ${networkId}`) ?? {
|
3683
|
-
balance: 0n,
|
3684
|
-
status: {
|
3685
|
-
type: "Liquid"
|
3686
|
-
}};
|
3687
|
-
const isFrozen = decoded?.status?.type === "Frozen";
|
3688
|
-
const amount = (decoded?.balance ?? 0n).toString();
|
3689
|
-
|
3690
|
-
// due to the following balance calculations, which are made in the `Balance` type:
|
3691
|
-
//
|
3692
|
-
// total balance = (free balance) + (reserved balance)
|
3693
|
-
// transferable balance = (free balance) - (frozen balance)
|
3694
|
-
//
|
3695
|
-
// when `isFrozen` is true we need to set **both** the `free` and `frozen` amounts
|
3696
|
-
// of this balance to the value we received from the RPC.
|
3697
|
-
//
|
3698
|
-
// if we only set the `frozen` amount, then the `total` calculation will be incorrect!
|
3699
|
-
const free = amount;
|
3700
|
-
const frozen = token.isFrozen || isFrozen ? amount : "0";
|
3701
|
-
|
3702
|
-
// include balance values even if zero, so that newly-zero values overwrite old values
|
3703
|
-
const balanceValues = [{
|
3704
|
-
type: "free",
|
3705
|
-
label: "free",
|
3706
|
-
amount: free.toString()
|
3707
|
-
}, {
|
3708
|
-
type: "locked",
|
3709
|
-
label: "frozen",
|
3710
|
-
amount: frozen.toString()
|
3711
|
-
}];
|
3712
|
-
return {
|
3713
|
-
source: "substrate-assets",
|
3714
|
-
status: "live",
|
3715
|
-
address,
|
3716
|
-
networkId,
|
3717
|
-
tokenId: token.id,
|
3718
|
-
values: balanceValues
|
3719
|
-
};
|
3720
|
-
};
|
3721
|
-
return {
|
3722
|
-
chainId: networkId,
|
3723
|
-
stateKey,
|
3724
|
-
decodeResult
|
3725
|
-
};
|
3726
|
-
});
|
3727
|
-
});
|
3359
|
+
async function buildQueries$3(chainConnector, chaindataProvider, addressesByToken, signal) {
|
3360
|
+
const byNetwork = keys(addressesByToken).reduce((acc, tokenId) => {
|
3361
|
+
const networkId = parseSubAssetTokenId(tokenId).networkId;
|
3362
|
+
if (!acc[networkId]) acc[networkId] = {};
|
3363
|
+
acc[networkId][tokenId] = addressesByToken[tokenId];
|
3364
|
+
return acc;
|
3365
|
+
}, {});
|
3366
|
+
return (await Promise.all(toPairs(byNetwork).map(([networkId, addressesByToken]) => {
|
3367
|
+
return buildNetworkQueries$2(networkId, chainConnector, chaindataProvider, addressesByToken, signal);
|
3368
|
+
}))).flat();
|
3728
3369
|
}
|
3729
3370
|
// NOTE: Different chains need different formats for assetId when encoding the stateKey
|
3730
3371
|
// E.g. Polkadot Asset Hub needs it to be a string, Astar needs it to be a bigint
|
@@ -3749,10 +3390,7 @@ const SubForeignAssetsModule = hydrate => {
|
|
3749
3390
|
return {
|
3750
3391
|
...DefaultBalanceModule(moduleType$3),
|
3751
3392
|
async fetchSubstrateChainMeta(chainId, moduleConfig, metadataRpc) {
|
3752
|
-
// const isTestnet = (await chaindataProvider.chainById(chainId))?.isTestnet || false
|
3753
3393
|
if (metadataRpc === undefined) return {};
|
3754
|
-
// if ((moduleConfig?.tokens ?? []).length < 1) return { isTestnet }
|
3755
|
-
|
3756
3394
|
const metadataVersion = getMetadataVersion(metadataRpc);
|
3757
3395
|
if (metadataVersion < 14) return {};
|
3758
3396
|
const metadata = decAnyMetadata(metadataRpc);
|
@@ -3771,9 +3409,6 @@ const SubForeignAssetsModule = hydrate => {
|
|
3771
3409
|
miniMetadata
|
3772
3410
|
} = chainMeta;
|
3773
3411
|
if (!miniMetadata) return {};
|
3774
|
-
// if (miniMetadata === undefined || metadataVersion === undefined) return {}
|
3775
|
-
// if (metadataVersion < 14) return {}
|
3776
|
-
|
3777
3412
|
const metadata = decAnyMetadata(miniMetadata);
|
3778
3413
|
const unifiedMetadata = unifyMetadata(metadata);
|
3779
3414
|
const scaleBuilder = getDynamicBuilder(getLookupFn(unifiedMetadata));
|
@@ -3827,17 +3462,38 @@ const SubForeignAssetsModule = hydrate => {
|
|
3827
3462
|
async subscribeBalances({
|
3828
3463
|
addressesByToken
|
3829
3464
|
}, callback) {
|
3830
|
-
const
|
3831
|
-
|
3832
|
-
if (
|
3833
|
-
|
3834
|
-
|
3835
|
-
});
|
3836
|
-
|
3465
|
+
const byNetwork = keys(addressesByToken).reduce((acc, tokenId) => {
|
3466
|
+
const networkId = parseSubForeignAssetTokenId(tokenId).networkId;
|
3467
|
+
if (!acc[networkId]) acc[networkId] = {};
|
3468
|
+
acc[networkId][tokenId] = addressesByToken[tokenId];
|
3469
|
+
return acc;
|
3470
|
+
}, {});
|
3471
|
+
const controller = new AbortController();
|
3472
|
+
const pUnsubs = Promise.all(toPairs(byNetwork).map(async ([networkId, addressesByToken]) => {
|
3473
|
+
try {
|
3474
|
+
const queries = await buildNetworkQueries$1(networkId, chainConnector, chaindataProvider, addressesByToken, controller.signal);
|
3475
|
+
if (controller.signal.aborted) return () => {};
|
3476
|
+
const stateHelper = new RpcStateQueryHelper(chainConnector, queries);
|
3477
|
+
return await stateHelper.subscribe((error, result) => {
|
3478
|
+
if (error) return callback(error);
|
3479
|
+
const balances = result?.filter(b => b !== null) ?? [];
|
3480
|
+
if (balances.length > 0) callback(null, new Balances(balances));
|
3481
|
+
});
|
3482
|
+
} catch (err) {
|
3483
|
+
if (!controller.signal.aborted) log.error(`Failed to subscribe ${moduleType$3} balances for network ${networkId}`, err);
|
3484
|
+
return () => {};
|
3485
|
+
}
|
3486
|
+
}));
|
3487
|
+
return () => {
|
3488
|
+
controller.abort();
|
3489
|
+
pUnsubs.then(unsubs => {
|
3490
|
+
unsubs.forEach(unsubscribe => unsubscribe());
|
3491
|
+
});
|
3492
|
+
};
|
3837
3493
|
},
|
3838
3494
|
async fetchBalances(addressesByToken) {
|
3839
3495
|
assert(chainConnectors.substrate, "This module requires a substrate chain connector");
|
3840
|
-
const queries = await buildQueries$2(chaindataProvider, addressesByToken);
|
3496
|
+
const queries = await buildQueries$2(chainConnector, chaindataProvider, addressesByToken);
|
3841
3497
|
const result = await new RpcStateQueryHelper(chainConnectors.substrate, queries).fetch();
|
3842
3498
|
const balances = result?.filter(b => b !== null) ?? [];
|
3843
3499
|
return new Balances(balances);
|
@@ -3893,23 +3549,16 @@ const SubForeignAssetsModule = hydrate => {
|
|
3893
3549
|
}
|
3894
3550
|
};
|
3895
3551
|
};
|
3896
|
-
async function
|
3897
|
-
const
|
3898
|
-
const
|
3899
|
-
const
|
3900
|
-
|
3901
|
-
const
|
3902
|
-
|
3903
|
-
chainIds: uniqueChainIds,
|
3904
|
-
chains,
|
3905
|
-
miniMetadatas,
|
3906
|
-
moduleType: "substrate-foreignassets",
|
3907
|
-
coders: {
|
3908
|
-
storage: ["ForeignAssets", "Account"]
|
3909
|
-
}
|
3552
|
+
async function buildNetworkQueries$1(networkId, chainConnector, chaindataProvider, addressesByToken, signal) {
|
3553
|
+
const miniMetadata = await getMiniMetadata(chaindataProvider, chainConnector, networkId, moduleType$3, signal);
|
3554
|
+
const chain = await chaindataProvider.chainById(networkId);
|
3555
|
+
const tokensById = await chaindataProvider.tokensById();
|
3556
|
+
signal?.throwIfAborted();
|
3557
|
+
const networkStorageCoders = buildNetworkStorageCoders(networkId, miniMetadata, {
|
3558
|
+
storage: ["ForeignAssets", "Account"]
|
3910
3559
|
});
|
3911
3560
|
return Object.entries(addressesByToken).flatMap(([tokenId, addresses]) => {
|
3912
|
-
const token =
|
3561
|
+
const token = tokensById[tokenId];
|
3913
3562
|
if (!token) {
|
3914
3563
|
log.warn(`Token ${tokenId} not found`);
|
3915
3564
|
return [];
|
@@ -3918,18 +3567,12 @@ async function buildQueries$2(chaindataProvider, addressesByToken) {
|
|
3918
3567
|
log.debug(`This module doesn't handle tokens of type ${token.type}`);
|
3919
3568
|
return [];
|
3920
3569
|
}
|
3921
|
-
const networkId = token.networkId;
|
3922
|
-
if (!networkId) {
|
3923
|
-
log.warn(`Token ${tokenId} has no chain`);
|
3924
|
-
return [];
|
3925
|
-
}
|
3926
|
-
const chain = chains[networkId];
|
3927
3570
|
if (!chain) {
|
3928
3571
|
log.warn(`Chain ${networkId} for token ${tokenId} not found`);
|
3929
3572
|
return [];
|
3930
3573
|
}
|
3931
3574
|
return addresses.flatMap(address => {
|
3932
|
-
const scaleCoder =
|
3575
|
+
const scaleCoder = networkStorageCoders?.storage;
|
3933
3576
|
const onChainId = (() => {
|
3934
3577
|
try {
|
3935
3578
|
return papiParse(token.onChainId);
|
@@ -3989,6 +3632,17 @@ async function buildQueries$2(chaindataProvider, addressesByToken) {
|
|
3989
3632
|
});
|
3990
3633
|
});
|
3991
3634
|
}
|
3635
|
+
async function buildQueries$2(chainConnector, chaindataProvider, addressesByToken, signal) {
|
3636
|
+
const byNetwork = keys(addressesByToken).reduce((acc, tokenId) => {
|
3637
|
+
const networkId = parseSubForeignAssetTokenId(tokenId).networkId;
|
3638
|
+
if (!acc[networkId]) acc[networkId] = {};
|
3639
|
+
acc[networkId][tokenId] = addressesByToken[tokenId];
|
3640
|
+
return acc;
|
3641
|
+
}, {});
|
3642
|
+
return (await Promise.all(toPairs(byNetwork).map(([networkId, addressesByToken]) => {
|
3643
|
+
return buildNetworkQueries$1(networkId, chainConnector, chaindataProvider, addressesByToken, signal);
|
3644
|
+
}))).flat();
|
3645
|
+
}
|
3992
3646
|
|
3993
3647
|
async function subscribeBase(queries, chainConnector, callback) {
|
3994
3648
|
const unsubscribe = await new RpcStateQueryHelper(chainConnector, queries).subscribe((error, result) => {
|
@@ -5400,8 +5054,23 @@ class SubNativeBalanceError extends Error {
|
|
5400
5054
|
}
|
5401
5055
|
}
|
5402
5056
|
|
5403
|
-
const
|
5404
|
-
|
5057
|
+
const DotNetworkPropertiesSimple = z.object({
|
5058
|
+
tokenDecimals: z.number().optional().default(0),
|
5059
|
+
tokenSymbol: z.string().optional().default("Unit")
|
5060
|
+
});
|
5061
|
+
const DotNetworkPropertiesArray = z.object({
|
5062
|
+
tokenDecimals: z.array(z.number()).nonempty(),
|
5063
|
+
tokenSymbol: z.array(z.string()).nonempty()
|
5064
|
+
});
|
5065
|
+
const DotNetworkProperties = z.union([DotNetworkPropertiesSimple, DotNetworkPropertiesArray]).transform(val => ({
|
5066
|
+
tokenDecimals: Array.isArray(val.tokenDecimals) ? val.tokenDecimals[0] : val.tokenDecimals,
|
5067
|
+
tokenSymbol: Array.isArray(val.tokenSymbol) ? val.tokenSymbol[0] : val.tokenSymbol
|
5068
|
+
}));
|
5069
|
+
const getChainProperties = async (chainConnector, networkId) => {
|
5070
|
+
const properties = await chainConnector.send(networkId, "system_properties", [], true);
|
5071
|
+
return DotNetworkProperties.parse(properties);
|
5072
|
+
};
|
5073
|
+
|
5405
5074
|
const POLLING_WINDOW_SIZE = 20;
|
5406
5075
|
const MAX_SUBSCRIPTION_SIZE = 40;
|
5407
5076
|
const SubNativeModule = hydrate => {
|
@@ -5415,29 +5084,187 @@ const SubNativeModule = hydrate => {
|
|
5415
5084
|
const getModuleTokens = async () => {
|
5416
5085
|
return await chaindataProvider.tokensByIdForType(moduleType$2);
|
5417
5086
|
};
|
5418
|
-
return {
|
5419
|
-
...DefaultBalanceModule(moduleType$2),
|
5420
|
-
async fetchSubstrateChainMeta(chainId, moduleConfig, metadataRpc, systemProperties) {
|
5421
|
-
const isTestnet = (await chaindataProvider.chainById(chainId))?.isTestnet || false;
|
5422
|
-
if (moduleConfig?.disable === true || metadataRpc === undefined) return {
|
5423
|
-
isTestnet
|
5424
|
-
};
|
5425
5087
|
|
5426
|
-
|
5427
|
-
|
5428
|
-
|
5088
|
+
// subscribeBalances was split by network to prevent all subs to wait for all minimetadatas to be ready.
|
5089
|
+
// however the multichain logic in there is so deep in the function below that i had to keep it as-is, and call it by per-network chunks
|
5090
|
+
// TODO refactor this be actually network specific
|
5091
|
+
const subscribeChainBalances = async (chainId, opts, callback) => {
|
5092
|
+
const {
|
5093
|
+
addressesByToken,
|
5094
|
+
initialBalances
|
5095
|
+
} = opts;
|
5096
|
+
// full record of balances for this module
|
5097
|
+
const subNativeBalances = new BehaviorSubject(Object.fromEntries(initialBalances?.map(b => [getBalanceId(b), b]) ?? []));
|
5098
|
+
// tokens which have a known positive balance
|
5099
|
+
const positiveBalanceTokens = subNativeBalances.pipe(map(balances => Array.from(new Set(Object.values(balances).map(b => b.tokenId)))), share());
|
5100
|
+
|
5101
|
+
// tokens that will be subscribed to, simply a slice of the positive balance tokens of size MAX_SUBSCRIPTION_SIZE
|
5102
|
+
const subscriptionTokens = positiveBalanceTokens.pipe(map(tokens => tokens.sort(sortChains).slice(0, MAX_SUBSCRIPTION_SIZE)));
|
5103
|
+
|
5104
|
+
// an initialised balance is one where we have received a response for any type of 'subsource',
|
5105
|
+
// until then they are initialising. We only need to maintain one map of tokens to addresses for this
|
5106
|
+
const initialisingBalances = Object.entries(addressesByToken).reduce((acc, [tokenId, addresses]) => {
|
5107
|
+
acc.set(tokenId, new Set(addresses));
|
5108
|
+
return acc;
|
5109
|
+
}, new Map());
|
5110
|
+
|
5111
|
+
// after thirty seconds, we need to kill the initialising balances
|
5112
|
+
const initBalancesTimeout = setTimeout(() => {
|
5113
|
+
initialisingBalances.clear();
|
5114
|
+
// manually call the callback to ensure the caller gets the correct status
|
5115
|
+
callback(null, {
|
5116
|
+
status: "live",
|
5117
|
+
data: Object.values(subNativeBalances.getValue())
|
5118
|
+
});
|
5119
|
+
}, 30_000);
|
5120
|
+
const _callbackSub = subNativeBalances.pipe(debounceTime(100)).subscribe({
|
5121
|
+
next: balances => {
|
5122
|
+
callback(null, {
|
5123
|
+
status: initialisingBalances.size > 0 ? "initialising" : "live",
|
5124
|
+
data: Object.values(balances)
|
5125
|
+
});
|
5126
|
+
},
|
5127
|
+
error: error => callback(error),
|
5128
|
+
complete: () => {
|
5129
|
+
initialisingBalances.clear();
|
5130
|
+
clearTimeout(initBalancesTimeout);
|
5131
|
+
}
|
5132
|
+
});
|
5133
|
+
const unsubDeferred = Deferred();
|
5134
|
+
// we return this to the caller so that they can let us know when they're no longer interested in this subscription
|
5135
|
+
const callerUnsubscribe = () => {
|
5136
|
+
subNativeBalances.complete();
|
5137
|
+
_callbackSub.unsubscribe();
|
5138
|
+
return unsubDeferred.reject(new Error(`Caller unsubscribed`));
|
5139
|
+
};
|
5140
|
+
// we queue up our work to clean up our subscription when this promise rejects
|
5141
|
+
const callerUnsubscribed = unsubDeferred.promise;
|
5142
|
+
|
5143
|
+
// The update handler is to allow us to merge balances with the same id, and manage initialising and positive balances state for each
|
5144
|
+
// balance type and network
|
5145
|
+
const handleUpdateForSource = source => (error, result) => {
|
5146
|
+
if (result) {
|
5147
|
+
const currentBalances = subNativeBalances.getValue();
|
5148
|
+
|
5149
|
+
// first merge any balances with the same id within the result
|
5150
|
+
const accumulatedUpdates = result.filter(b => b.values.length > 0).reduce((acc, b) => {
|
5151
|
+
const bId = getBalanceId(b);
|
5152
|
+
acc[bId] = mergeBalances(acc[bId], b, source, false);
|
5153
|
+
return acc;
|
5154
|
+
}, {});
|
5155
|
+
|
5156
|
+
// then merge these with the current balances
|
5157
|
+
const mergedBalances = {};
|
5158
|
+
Object.entries(accumulatedUpdates).forEach(([bId, b]) => {
|
5159
|
+
// merge the values from the new balance into the existing balance, if there is one
|
5160
|
+
mergedBalances[bId] = mergeBalances(currentBalances[bId], b, source, true);
|
5161
|
+
|
5162
|
+
// update initialisingBalances to remove balances which have been updated
|
5163
|
+
const intialisingForToken = initialisingBalances.get(b.tokenId);
|
5164
|
+
if (intialisingForToken) {
|
5165
|
+
intialisingForToken.delete(b.address);
|
5166
|
+
if (intialisingForToken.size === 0) initialisingBalances.delete(b.tokenId);else initialisingBalances.set(b.tokenId, intialisingForToken);
|
5167
|
+
}
|
5168
|
+
});
|
5169
|
+
subNativeBalances.next({
|
5170
|
+
...currentBalances,
|
5171
|
+
...mergedBalances
|
5172
|
+
});
|
5173
|
+
}
|
5174
|
+
if (error) {
|
5175
|
+
if (error instanceof SubNativeBalanceError) {
|
5176
|
+
// this type of error doesn't need to be handled by the caller
|
5177
|
+
initialisingBalances.delete(error.tokenId);
|
5178
|
+
} else return callback(error);
|
5179
|
+
}
|
5180
|
+
};
|
5429
5181
|
|
5430
|
-
|
5431
|
-
|
5432
|
-
|
5433
|
-
|
5434
|
-
|
5435
|
-
|
5182
|
+
// subscribe to addresses and tokens for which we have a known positive balance
|
5183
|
+
const positiveSub = subscriptionTokens.pipe(debounceTime(1000), takeUntil(callerUnsubscribed), map(tokenIds => tokenIds.reduce((acc, tokenId) => {
|
5184
|
+
acc[tokenId] = addressesByToken[tokenId];
|
5185
|
+
return acc;
|
5186
|
+
}, {})), distinctUntilChanged(isEqual), switchMap(newAddressesByToken => {
|
5187
|
+
return from(queryCache.getQueries(newAddressesByToken)).pipe(switchMap(baseQueries => {
|
5188
|
+
return new Observable(subscriber => {
|
5189
|
+
if (!chainConnectors.substrate) return;
|
5190
|
+
const unsubSubtensorStaking = subscribeSubtensorStaking(chaindataProvider, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("subtensor-staking"));
|
5191
|
+
const unsubNompoolStaking = subscribeNompoolStaking(chaindataProvider, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("nompools-staking"));
|
5192
|
+
const unsubCrowdloans = subscribeCrowdloans(chaindataProvider, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("crowdloan"));
|
5193
|
+
const unsubBase = subscribeBase(baseQueries, chainConnectors.substrate, handleUpdateForSource("base"));
|
5194
|
+
subscriber.add(async () => (await unsubSubtensorStaking)());
|
5195
|
+
subscriber.add(async () => (await unsubNompoolStaking)());
|
5196
|
+
subscriber.add(async () => (await unsubCrowdloans)());
|
5197
|
+
subscriber.add(async () => (await unsubBase)());
|
5198
|
+
});
|
5199
|
+
}));
|
5200
|
+
})).subscribe();
|
5201
|
+
|
5202
|
+
// for chains where we don't have a known positive balance, poll rather than subscribe
|
5203
|
+
const poll = async (addressesByToken = {}) => {
|
5204
|
+
const handleUpdate = handleUpdateForSource("base");
|
5205
|
+
try {
|
5206
|
+
const balances = await fetchBalances(addressesByToken);
|
5207
|
+
handleUpdate(null, Object.values(balances.toJSON()));
|
5208
|
+
} catch (error) {
|
5209
|
+
if (error instanceof ChainConnectionError) {
|
5210
|
+
// coerce ChainConnection errors into SubNativeBalance errors
|
5211
|
+
const errorChainId = error.chainId;
|
5212
|
+
Object.entries(await getModuleTokens()).filter(([, token]) => token.networkId === errorChainId).forEach(([tokenId]) => {
|
5213
|
+
const wrappedError = new SubNativeBalanceError(tokenId, error.message);
|
5214
|
+
handleUpdate(wrappedError);
|
5215
|
+
});
|
5216
|
+
} else {
|
5217
|
+
log.error("unknown substrate native balance error", error);
|
5218
|
+
handleUpdate(error);
|
5219
|
+
}
|
5220
|
+
}
|
5221
|
+
};
|
5222
|
+
// do one poll to get things started
|
5223
|
+
const currentBalances = subNativeBalances.getValue();
|
5224
|
+
const currentTokens = new Set(Object.values(currentBalances).map(b => b.tokenId));
|
5225
|
+
const nonCurrentTokens = Object.keys(addressesByToken).filter(tokenId => !currentTokens.has(tokenId)).sort(sortChains);
|
5226
|
+
|
5227
|
+
// break nonCurrentTokens into chunks of POLLING_WINDOW_SIZE
|
5228
|
+
await PromisePool.withConcurrency(POLLING_WINDOW_SIZE).for(nonCurrentTokens).process(async nonCurrentTokenId => await poll({
|
5229
|
+
[nonCurrentTokenId]: addressesByToken[nonCurrentTokenId]
|
5230
|
+
}));
|
5231
|
+
|
5232
|
+
// now poll every 30s on chains which are not subscriptionTokens
|
5233
|
+
// we chunk this observable into batches of positive token ids, to prevent eating all the websocket connections
|
5234
|
+
const pollingSub = interval(30_000) // emit values every 30 seconds
|
5235
|
+
.pipe(takeUntil(callerUnsubscribed), withLatestFrom(subscriptionTokens),
|
5236
|
+
// Combine latest value from subscriptionTokens with each interval tick
|
5237
|
+
map(([, subscribedTokenIds]) =>
|
5238
|
+
// Filter out tokens that are not subscribed
|
5239
|
+
Object.keys(addressesByToken).filter(tokenId => !subscribedTokenIds.includes(tokenId))), exhaustMap(tokenIds => from(arrayChunk(tokenIds, POLLING_WINDOW_SIZE)).pipe(concatMap(async tokenChunk => {
|
5240
|
+
// tokenChunk is a chunk of tokenIds with size POLLING_WINDOW_SIZE
|
5241
|
+
const pollingTokenAddresses = Object.fromEntries(tokenChunk.map(tokenId => [tokenId, addressesByToken[tokenId]]));
|
5242
|
+
await poll(pollingTokenAddresses);
|
5243
|
+
return true;
|
5244
|
+
})))).subscribe();
|
5245
|
+
return () => {
|
5246
|
+
callerUnsubscribe();
|
5247
|
+
positiveSub.unsubscribe();
|
5248
|
+
pollingSub.unsubscribe();
|
5249
|
+
};
|
5250
|
+
};
|
5251
|
+
const fetchBalances = async addressesByToken => {
|
5252
|
+
assert(chainConnectors.substrate, "This module requires a substrate chain connector");
|
5253
|
+
const queries = await queryCache.getQueries(addressesByToken);
|
5254
|
+
assert(chainConnectors.substrate, "This module requires a substrate chain connector");
|
5255
|
+
const result = await new RpcStateQueryHelper(chainConnectors.substrate, queries).fetch();
|
5256
|
+
return new Balances(result ?? []);
|
5257
|
+
};
|
5258
|
+
return {
|
5259
|
+
...DefaultBalanceModule(moduleType$2),
|
5260
|
+
async fetchSubstrateChainMeta(chainId, moduleConfig, metadataRpc) {
|
5261
|
+
if (!metadataRpc) return {};
|
5436
5262
|
|
5437
5263
|
//
|
5438
5264
|
// process metadata into SCALE encoders/decoders
|
5439
5265
|
//
|
5440
5266
|
const metadataVersion = getMetadataVersion(metadataRpc);
|
5267
|
+
if (metadataVersion < 14) return {};
|
5441
5268
|
const metadata = decAnyMetadata(metadataRpc);
|
5442
5269
|
const unifiedMetadata = unifyMetadata(metadata);
|
5443
5270
|
|
@@ -5504,16 +5331,15 @@ const SubNativeModule = hydrate => {
|
|
5504
5331
|
}) => name === "Freezes"));
|
5505
5332
|
const useLegacyTransferableCalculation = !hasFreezesItem;
|
5506
5333
|
const chainMeta = {
|
5507
|
-
isTestnet,
|
5334
|
+
// isTestnet,
|
5508
5335
|
useLegacyTransferableCalculation,
|
5509
|
-
symbol,
|
5510
|
-
decimals,
|
5336
|
+
// symbol,
|
5337
|
+
// decimals,
|
5511
5338
|
existentialDeposit,
|
5512
5339
|
nominationPoolsPalletId,
|
5513
5340
|
crowdloanPalletId,
|
5514
5341
|
hasSubtensorPallet,
|
5515
|
-
miniMetadata
|
5516
|
-
metadataVersion
|
5342
|
+
miniMetadata
|
5517
5343
|
};
|
5518
5344
|
if (!useLegacyTransferableCalculation) delete chainMeta.useLegacyTransferableCalculation;
|
5519
5345
|
if (!hasSubtensorPallet) delete chainMeta.hasSubtensorPallet;
|
@@ -5522,9 +5348,10 @@ const SubNativeModule = hydrate => {
|
|
5522
5348
|
async fetchSubstrateChainTokens(chainId, chainMeta, moduleConfig) {
|
5523
5349
|
if (moduleConfig?.disable === true) return {};
|
5524
5350
|
const {
|
5525
|
-
|
5526
|
-
|
5527
|
-
|
5351
|
+
tokenSymbol: symbol,
|
5352
|
+
tokenDecimals: decimals
|
5353
|
+
} = await getChainProperties(chainConnector, chainId);
|
5354
|
+
const {
|
5528
5355
|
existentialDeposit
|
5529
5356
|
} = chainMeta;
|
5530
5357
|
const id = subNativeTokenId(chainId);
|
@@ -5532,11 +5359,10 @@ const SubNativeModule = hydrate => {
|
|
5532
5359
|
id,
|
5533
5360
|
type: "substrate-native",
|
5534
5361
|
platform: "polkadot",
|
5535
|
-
isTestnet,
|
5536
5362
|
isDefault: moduleConfig?.isDefault ?? true,
|
5537
|
-
symbol: symbol
|
5538
|
-
name: moduleConfig?.name ?? symbol
|
5539
|
-
decimals: decimals
|
5363
|
+
symbol: symbol,
|
5364
|
+
name: moduleConfig?.name ?? symbol,
|
5365
|
+
decimals: decimals,
|
5540
5366
|
logo: moduleConfig?.logo,
|
5541
5367
|
existentialDeposit: existentialDeposit ?? "0",
|
5542
5368
|
networkId: chainId
|
@@ -5553,169 +5379,43 @@ const SubNativeModule = hydrate => {
|
|
5553
5379
|
initialBalances
|
5554
5380
|
}, callback) {
|
5555
5381
|
assert(chainConnectors.substrate, "This module requires a substrate chain connector");
|
5556
|
-
|
5557
|
-
|
5558
|
-
|
5559
|
-
|
5560
|
-
const positiveBalanceTokens = subNativeBalances.pipe(map(balances => Array.from(new Set(Object.values(balances).map(b => b.tokenId)))), share());
|
5561
|
-
|
5562
|
-
// tokens that will be subscribed to, simply a slice of the positive balance tokens of size MAX_SUBSCRIPTION_SIZE
|
5563
|
-
const subscriptionTokens = positiveBalanceTokens.pipe(map(tokens => tokens.sort(sortChains).slice(0, MAX_SUBSCRIPTION_SIZE)));
|
5564
|
-
|
5565
|
-
// an initialised balance is one where we have received a response for any type of 'subsource',
|
5566
|
-
// until then they are initialising. We only need to maintain one map of tokens to addresses for this
|
5567
|
-
const initialisingBalances = Object.entries(addressesByToken).reduce((acc, [tokenId, addresses]) => {
|
5568
|
-
acc.set(tokenId, new Set(addresses));
|
5382
|
+
const addressesByTokenByNetwork = keys(addressesByToken).reduce((acc, tokenId) => {
|
5383
|
+
const networkId = parseSubNativeTokenId(tokenId).networkId;
|
5384
|
+
if (!acc[networkId]) acc[networkId] = {};
|
5385
|
+
acc[networkId][tokenId] = addressesByToken[tokenId];
|
5569
5386
|
return acc;
|
5570
|
-
},
|
5571
|
-
|
5572
|
-
|
5573
|
-
|
5574
|
-
|
5575
|
-
|
5576
|
-
|
5577
|
-
|
5578
|
-
|
5579
|
-
|
5580
|
-
}, 30_000);
|
5581
|
-
const _callbackSub = subNativeBalances.pipe(debounceTime(100)).subscribe({
|
5582
|
-
next: balances => {
|
5583
|
-
callback(null, {
|
5584
|
-
status: initialisingBalances.size > 0 ? "initialising" : "live",
|
5585
|
-
data: Object.values(balances)
|
5586
|
-
});
|
5587
|
-
},
|
5588
|
-
error: error => callback(error),
|
5589
|
-
complete: () => {
|
5590
|
-
initialisingBalances.clear();
|
5591
|
-
clearTimeout(initBalancesTimeout);
|
5592
|
-
}
|
5593
|
-
});
|
5594
|
-
const unsubDeferred = Deferred();
|
5595
|
-
// we return this to the caller so that they can let us know when they're no longer interested in this subscription
|
5596
|
-
const callerUnsubscribe = () => {
|
5597
|
-
subNativeBalances.complete();
|
5598
|
-
_callbackSub.unsubscribe();
|
5599
|
-
return unsubDeferred.reject(new Error(`Caller unsubscribed`));
|
5600
|
-
};
|
5601
|
-
// we queue up our work to clean up our subscription when this promise rejects
|
5602
|
-
const callerUnsubscribed = unsubDeferred.promise;
|
5603
|
-
|
5604
|
-
// The update handler is to allow us to merge balances with the same id, and manage initialising and positive balances state for each
|
5605
|
-
// balance type and network
|
5606
|
-
const handleUpdateForSource = source => (error, result) => {
|
5607
|
-
if (result) {
|
5608
|
-
const currentBalances = subNativeBalances.getValue();
|
5609
|
-
|
5610
|
-
// first merge any balances with the same id within the result
|
5611
|
-
const accumulatedUpdates = result.filter(b => b.values.length > 0).reduce((acc, b) => {
|
5612
|
-
const bId = getBalanceId(b);
|
5613
|
-
acc[bId] = mergeBalances(acc[bId], b, source, false);
|
5614
|
-
return acc;
|
5615
|
-
}, {});
|
5616
|
-
|
5617
|
-
// then merge these with the current balances
|
5618
|
-
const mergedBalances = {};
|
5619
|
-
Object.entries(accumulatedUpdates).forEach(([bId, b]) => {
|
5620
|
-
// merge the values from the new balance into the existing balance, if there is one
|
5621
|
-
mergedBalances[bId] = mergeBalances(currentBalances[bId], b, source, true);
|
5622
|
-
|
5623
|
-
// update initialisingBalances to remove balances which have been updated
|
5624
|
-
const intialisingForToken = initialisingBalances.get(b.tokenId);
|
5625
|
-
if (intialisingForToken) {
|
5626
|
-
intialisingForToken.delete(b.address);
|
5627
|
-
if (intialisingForToken.size === 0) initialisingBalances.delete(b.tokenId);else initialisingBalances.set(b.tokenId, intialisingForToken);
|
5628
|
-
}
|
5629
|
-
});
|
5630
|
-
subNativeBalances.next({
|
5631
|
-
...currentBalances,
|
5632
|
-
...mergedBalances
|
5633
|
-
});
|
5634
|
-
}
|
5635
|
-
if (error) {
|
5636
|
-
if (error instanceof SubNativeBalanceError) {
|
5637
|
-
// this type of error doesn't need to be handled by the caller
|
5638
|
-
initialisingBalances.delete(error.tokenId);
|
5639
|
-
} else return callback(error);
|
5640
|
-
}
|
5387
|
+
}, {});
|
5388
|
+
const initialBalancesByNetwork = groupBy$1(initialBalances ?? [], "networkId");
|
5389
|
+
const {
|
5390
|
+
abort,
|
5391
|
+
signal
|
5392
|
+
} = new AbortController();
|
5393
|
+
const safeCallback = (error, result) => {
|
5394
|
+
if (signal.aborted) return;
|
5395
|
+
// typescript isnt happy with fowarding parameters as is
|
5396
|
+
return error ? callback(error, undefined) : callback(error, result);
|
5641
5397
|
};
|
5642
|
-
|
5643
|
-
// subscribe to addresses and tokens for which we have a known positive balance
|
5644
|
-
const positiveSub = subscriptionTokens.pipe(debounceTime(1000), takeUntil(callerUnsubscribed), map(tokenIds => tokenIds.reduce((acc, tokenId) => {
|
5645
|
-
acc[tokenId] = addressesByToken[tokenId];
|
5646
|
-
return acc;
|
5647
|
-
}, {})), distinctUntilChanged(isEqual), switchMap(newAddressesByToken => {
|
5648
|
-
return from(queryCache.getQueries(newAddressesByToken)).pipe(switchMap(baseQueries => {
|
5649
|
-
return new Observable(subscriber => {
|
5650
|
-
if (!chainConnectors.substrate) return;
|
5651
|
-
const unsubSubtensorStaking = subscribeSubtensorStaking(chaindataProvider, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("subtensor-staking"));
|
5652
|
-
const unsubNompoolStaking = subscribeNompoolStaking(chaindataProvider, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("nompools-staking"));
|
5653
|
-
const unsubCrowdloans = subscribeCrowdloans(chaindataProvider, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("crowdloan"));
|
5654
|
-
const unsubBase = subscribeBase(baseQueries, chainConnectors.substrate, handleUpdateForSource("base"));
|
5655
|
-
subscriber.add(async () => (await unsubSubtensorStaking)());
|
5656
|
-
subscriber.add(async () => (await unsubNompoolStaking)());
|
5657
|
-
subscriber.add(async () => (await unsubCrowdloans)());
|
5658
|
-
subscriber.add(async () => (await unsubBase)());
|
5659
|
-
});
|
5660
|
-
}));
|
5661
|
-
})).subscribe();
|
5662
|
-
|
5663
|
-
// for chains where we don't have a known positive balance, poll rather than subscribe
|
5664
|
-
const poll = async (addressesByToken = {}) => {
|
5665
|
-
const handleUpdate = handleUpdateForSource("base");
|
5398
|
+
const unsubsribeFns = Promise.all(keys(addressesByTokenByNetwork).map(async networkId => {
|
5666
5399
|
try {
|
5667
|
-
|
5668
|
-
|
5669
|
-
|
5670
|
-
|
5671
|
-
|
5672
|
-
|
5673
|
-
Object.entries(await getModuleTokens()).filter(([, token]) => token.networkId === errorChainId).forEach(([tokenId]) => {
|
5674
|
-
const wrappedError = new SubNativeBalanceError(tokenId, error.message);
|
5675
|
-
handleUpdate(wrappedError);
|
5676
|
-
});
|
5677
|
-
} else {
|
5678
|
-
log.error("unknown substrate native balance error", error);
|
5679
|
-
handleUpdate(error);
|
5680
|
-
}
|
5400
|
+
// this is what we want to be done separately for each network
|
5401
|
+
// this will update the DB so minimetadata will be available when it's used, veeeeery far down the tree of subscribeChainBalances
|
5402
|
+
await getMiniMetadata(chaindataProvider, chainConnector, networkId, moduleType$2, signal);
|
5403
|
+
} catch (err) {
|
5404
|
+
if (!signal.aborted) log.warn("Failed to get native token miniMetadata for network", networkId, err);
|
5405
|
+
return () => {};
|
5681
5406
|
}
|
5682
|
-
|
5683
|
-
|
5684
|
-
|
5685
|
-
|
5686
|
-
|
5687
|
-
|
5688
|
-
// break nonCurrentTokens into chunks of POLLING_WINDOW_SIZE
|
5689
|
-
await PromisePool.withConcurrency(POLLING_WINDOW_SIZE).for(nonCurrentTokens).process(async nonCurrentTokenId => await poll({
|
5690
|
-
[nonCurrentTokenId]: addressesByToken[nonCurrentTokenId]
|
5407
|
+
if (signal.aborted) return () => {};
|
5408
|
+
return subscribeChainBalances(networkId, {
|
5409
|
+
addressesByToken: addressesByTokenByNetwork[networkId] ?? {},
|
5410
|
+
initialBalances: initialBalancesByNetwork[networkId] ?? []
|
5411
|
+
}, safeCallback);
|
5691
5412
|
}));
|
5692
|
-
|
5693
|
-
// now poll every 30s on chains which are not subscriptionTokens
|
5694
|
-
// we chunk this observable into batches of positive token ids, to prevent eating all the websocket connections
|
5695
|
-
const pollingSub = interval(30_000) // emit values every 30 seconds
|
5696
|
-
.pipe(takeUntil(callerUnsubscribed), withLatestFrom(subscriptionTokens),
|
5697
|
-
// Combine latest value from subscriptionTokens with each interval tick
|
5698
|
-
map(([, subscribedTokenIds]) =>
|
5699
|
-
// Filter out tokens that are not subscribed
|
5700
|
-
Object.keys(addressesByToken).filter(tokenId => !subscribedTokenIds.includes(tokenId))), exhaustMap(tokenIds => from(arrayChunk(tokenIds, POLLING_WINDOW_SIZE)).pipe(concatMap(async tokenChunk => {
|
5701
|
-
// tokenChunk is a chunk of tokenIds with size POLLING_WINDOW_SIZE
|
5702
|
-
const pollingTokenAddresses = Object.fromEntries(tokenChunk.map(tokenId => [tokenId, addressesByToken[tokenId]]));
|
5703
|
-
await poll(pollingTokenAddresses);
|
5704
|
-
return true;
|
5705
|
-
})))).subscribe();
|
5706
5413
|
return () => {
|
5707
|
-
|
5708
|
-
|
5709
|
-
pollingSub.unsubscribe();
|
5414
|
+
abort();
|
5415
|
+
unsubsribeFns.then(fns => fns.forEach(unsubscribe => unsubscribe()));
|
5710
5416
|
};
|
5711
5417
|
},
|
5712
|
-
|
5713
|
-
assert(chainConnectors.substrate, "This module requires a substrate chain connector");
|
5714
|
-
const queries = await queryCache.getQueries(addressesByToken);
|
5715
|
-
assert(chainConnectors.substrate, "This module requires a substrate chain connector");
|
5716
|
-
const result = await new RpcStateQueryHelper(chainConnectors.substrate, queries).fetch();
|
5717
|
-
return new Balances(result ?? []);
|
5718
|
-
},
|
5418
|
+
fetchBalances,
|
5719
5419
|
async transferToken({
|
5720
5420
|
tokenId,
|
5721
5421
|
from,
|
@@ -7187,7 +6887,6 @@ const SubTokensModule = hydrate => {
|
|
7187
6887
|
...DefaultBalanceModule(moduleType),
|
7188
6888
|
async fetchSubstrateChainMeta(chainId, moduleConfig, metadataRpc) {
|
7189
6889
|
if (metadataRpc === undefined) return {};
|
7190
|
-
if ((moduleConfig?.tokens ?? []).length < 1) return {};
|
7191
6890
|
const metadata = decAnyMetadata(metadataRpc);
|
7192
6891
|
const palletId = moduleConfig?.palletId ?? defaultPalletId;
|
7193
6892
|
compactMetadata(metadata, [{
|
@@ -7239,17 +6938,39 @@ const SubTokensModule = hydrate => {
|
|
7239
6938
|
async subscribeBalances({
|
7240
6939
|
addressesByToken
|
7241
6940
|
}, callback) {
|
7242
|
-
const
|
7243
|
-
|
7244
|
-
if (
|
7245
|
-
|
7246
|
-
|
7247
|
-
});
|
7248
|
-
|
6941
|
+
const byNetwork = keys(addressesByToken).reduce((acc, tokenId) => {
|
6942
|
+
const networkId = parseSubTokensTokenId(tokenId).networkId;
|
6943
|
+
if (!acc[networkId]) acc[networkId] = {};
|
6944
|
+
acc[networkId][tokenId] = addressesByToken[tokenId];
|
6945
|
+
return acc;
|
6946
|
+
}, {});
|
6947
|
+
const controller = new AbortController();
|
6948
|
+
const pUnsubs = Promise.all(toPairs(byNetwork).map(async ([networkId, addressesByToken]) => {
|
6949
|
+
try {
|
6950
|
+
const queries = await buildNetworkQueries(networkId, chainConnector, chaindataProvider, addressesByToken, controller.signal);
|
6951
|
+
if (controller.signal.aborted) return () => {};
|
6952
|
+
const stateHelper = new RpcStateQueryHelper(chainConnector, queries);
|
6953
|
+
return await stateHelper.subscribe((error, result) => {
|
6954
|
+
// console.log("SubstrateAssetsModule.callback", { error, result })
|
6955
|
+
if (error) return callback(error);
|
6956
|
+
const balances = result?.filter(b => b !== null) ?? [];
|
6957
|
+
if (balances.length > 0) callback(null, new Balances(balances));
|
6958
|
+
});
|
6959
|
+
} catch (err) {
|
6960
|
+
if (!controller.signal.aborted) log.error(`Failed to subscribe balances for network ${networkId}`, err);
|
6961
|
+
return () => {};
|
6962
|
+
}
|
6963
|
+
}));
|
6964
|
+
return () => {
|
6965
|
+
controller.abort();
|
6966
|
+
pUnsubs.then(unsubs => {
|
6967
|
+
unsubs.forEach(unsubscribe => unsubscribe());
|
6968
|
+
});
|
6969
|
+
};
|
7249
6970
|
},
|
7250
6971
|
async fetchBalances(addressesByToken) {
|
7251
6972
|
assert(chainConnectors.substrate, "This module requires a substrate chain connector");
|
7252
|
-
const queries = await buildQueries(chaindataProvider, addressesByToken);
|
6973
|
+
const queries = await buildQueries(chainConnector, chaindataProvider, addressesByToken);
|
7253
6974
|
const result = await new RpcStateQueryHelper(chainConnectors.substrate, queries).fetch();
|
7254
6975
|
const balances = result?.filter(b => b !== null) ?? [];
|
7255
6976
|
return new Balances(balances);
|
@@ -7368,23 +7089,16 @@ const SubTokensModule = hydrate => {
|
|
7368
7089
|
}
|
7369
7090
|
};
|
7370
7091
|
};
|
7371
|
-
async function
|
7372
|
-
const
|
7092
|
+
async function buildNetworkQueries(networkId, chainConnector, chaindataProvider, addressesByToken, signal) {
|
7093
|
+
const miniMetadata = await getMiniMetadata(chaindataProvider, chainConnector, networkId, moduleType, signal);
|
7094
|
+
const chain = await chaindataProvider.chainById(networkId);
|
7373
7095
|
const tokens = await chaindataProvider.tokensById();
|
7374
|
-
|
7375
|
-
|
7376
|
-
const
|
7377
|
-
const
|
7378
|
-
const
|
7379
|
-
|
7380
|
-
chains,
|
7381
|
-
miniMetadatas,
|
7382
|
-
moduleType: "substrate-tokens",
|
7383
|
-
coders: {
|
7384
|
-
storage: ({
|
7385
|
-
chainId
|
7386
|
-
}) => [tokensPalletByChain.get(chainId) ?? defaultPalletId, "Accounts"]
|
7387
|
-
}
|
7096
|
+
if (!chain) return [];
|
7097
|
+
signal?.throwIfAborted();
|
7098
|
+
const tokensMetadata = miniMetadata;
|
7099
|
+
const palletId = tokensMetadata.palletId ?? defaultPalletId;
|
7100
|
+
const networkStorageCoders = buildNetworkStorageCoders(networkId, miniMetadata, {
|
7101
|
+
storage: [palletId, "Accounts"]
|
7388
7102
|
});
|
7389
7103
|
return Object.entries(addressesByToken).flatMap(([tokenId, addresses]) => {
|
7390
7104
|
const token = tokens[tokenId];
|
@@ -7396,18 +7110,8 @@ async function buildQueries(chaindataProvider, addressesByToken) {
|
|
7396
7110
|
log.debug(`This module doesn't handle tokens of type ${token.type}`);
|
7397
7111
|
return [];
|
7398
7112
|
}
|
7399
|
-
const networkId = token.networkId;
|
7400
|
-
if (!networkId) {
|
7401
|
-
log.warn(`Token ${tokenId} has no chain`);
|
7402
|
-
return [];
|
7403
|
-
}
|
7404
|
-
const chain = chains[networkId];
|
7405
|
-
if (!chain) {
|
7406
|
-
log.warn(`Chain ${networkId} for token ${tokenId} not found`);
|
7407
|
-
return [];
|
7408
|
-
}
|
7409
7113
|
return addresses.flatMap(address => {
|
7410
|
-
const scaleCoder =
|
7114
|
+
const scaleCoder = networkStorageCoders?.storage;
|
7411
7115
|
const onChainId = (() => {
|
7412
7116
|
try {
|
7413
7117
|
return papiParse(token.onChainId);
|
@@ -7458,33 +7162,18 @@ async function buildQueries(chaindataProvider, addressesByToken) {
|
|
7458
7162
|
});
|
7459
7163
|
});
|
7460
7164
|
}
|
7165
|
+
async function buildQueries(chainConnector, chaindataProvider, addressesByToken, signal) {
|
7166
|
+
const byNetwork = keys(addressesByToken).reduce((acc, tokenId) => {
|
7167
|
+
const networkId = parseSubTokensTokenId(tokenId).networkId;
|
7168
|
+
if (!acc[networkId]) acc[networkId] = {};
|
7169
|
+
acc[networkId][tokenId] = addressesByToken[tokenId];
|
7170
|
+
return acc;
|
7171
|
+
}, {});
|
7172
|
+
return (await Promise.all(toPairs(byNetwork).map(([networkId, addressesByToken]) => {
|
7173
|
+
return buildNetworkQueries(networkId, chainConnector, chaindataProvider, addressesByToken, signal);
|
7174
|
+
}))).flat();
|
7175
|
+
}
|
7461
7176
|
|
7462
7177
|
const defaultBalanceModules = [EvmErc20Module, EvmNativeModule, EvmUniswapV2Module, SubAssetsModule, SubForeignAssetsModule, SubNativeModule, SubPsp22Module, SubTokensModule];
|
7463
7178
|
|
7464
|
-
|
7465
|
-
const hydrateChaindataAndMiniMetadata = async (chaindataProvider, miniMetadataUpdater) => {
|
7466
|
-
// need chains to be provisioned first, or substrate balances won't fetch on first subscription
|
7467
|
-
await chaindataProvider.hydrate();
|
7468
|
-
await Promise.all([miniMetadataUpdater.hydrateFromChaindata(), miniMetadataUpdater.hydrateCustomChains()]);
|
7469
|
-
const chains = await chaindataProvider.chains();
|
7470
|
-
const {
|
7471
|
-
statusesByChain
|
7472
|
-
} = await miniMetadataUpdater.statuses(chains);
|
7473
|
-
const goodChains = [...statusesByChain.entries()].flatMap(([chainId, status]) => status === "good" ? chainId : []);
|
7474
|
-
await chaindataProvider.hydrateSubstrateTokens(goodChains);
|
7475
|
-
};
|
7476
|
-
|
7477
|
-
/** Builds any missing miniMetadatas (e.g. for the user's custom substrate chains) */
|
7478
|
-
const updateCustomMiniMetadata = async (chaindataProvider, miniMetadataUpdater) => {
|
7479
|
-
const chainIds = await chaindataProvider.chainIds();
|
7480
|
-
await miniMetadataUpdater.update(chainIds);
|
7481
|
-
};
|
7482
|
-
|
7483
|
-
/** Fetches any missing Evm Tokens */
|
7484
|
-
const updateEvmTokens = async (chaindataProvider, evmTokenFetcher) => {
|
7485
|
-
await chaindataProvider.hydrate();
|
7486
|
-
const evmNetworkIds = await chaindataProvider.evmNetworkIds();
|
7487
|
-
await evmTokenFetcher.update(evmNetworkIds);
|
7488
|
-
};
|
7489
|
-
|
7490
|
-
export { Balance, BalanceFormatter, BalanceValueGetter, Balances, Change24hCurrencyFormatter, DefaultBalanceModule, EvmErc20Module, EvmNativeModule, EvmTokenFetcher, EvmUniswapV2Module, FiatSumBalancesFormatter, MiniMetadataUpdater, ONE_ALPHA_TOKEN, PlanckSumBalancesFormatter, RpcStateQueryHelper, SCALE_FACTOR, SUBTENSOR_MIN_STAKE_AMOUNT_PLANK, SUBTENSOR_ROOT_NETUID, SubAssetsModule, SubForeignAssetsModule, SubNativeModule, SubPsp22Module, SubTokensModule, SumBalancesFormatter, TalismanBalancesDatabase, abiMulticall, balances, buildStorageCoders, calculateAlphaPrice, calculateTaoAmountFromAlpha, calculateTaoFromDynamicInfo, compress, configureStore, db, decodeOutput, decompress, defaultBalanceModules, deriveMiniMetadataId, detectTransferMethod, erc20Abi, erc20BalancesAggregatorAbi, excludeFromFeePayableLocks, excludeFromTransferableAmount, filterBaseLocks, filterMirrorTokens, findChainMeta, getBalanceId, getLockTitle, getUniqueChainIds, getValueId, hydrateChaindataAndMiniMetadata, includeInTotalExtraAmount, makeContractCaller, uniswapV2PairAbi, updateCustomMiniMetadata, updateEvmTokens };
|
7179
|
+
export { Balance, BalanceFormatter, BalanceValueGetter, Balances, Change24hCurrencyFormatter, DefaultBalanceModule, EvmErc20Module, EvmNativeModule, EvmTokenFetcher, EvmUniswapV2Module, FiatSumBalancesFormatter, ONE_ALPHA_TOKEN, PlanckSumBalancesFormatter, RpcStateQueryHelper, SCALE_FACTOR, SUBTENSOR_MIN_STAKE_AMOUNT_PLANK, SUBTENSOR_ROOT_NETUID, SubAssetsModule, SubForeignAssetsModule, SubNativeModule, SubPsp22Module, SubTokensModule, SumBalancesFormatter, TalismanBalancesDatabase, abiMulticall, balances, buildNetworkStorageCoders, buildStorageCoders, calculateAlphaPrice, calculateTaoAmountFromAlpha, calculateTaoFromDynamicInfo, compress, configureStore, db, decodeOutput, decompress, defaultBalanceModules, deriveMiniMetadataId, detectTransferMethod, erc20Abi, erc20BalancesAggregatorAbi, excludeFromFeePayableLocks, excludeFromTransferableAmount, filterBaseLocks, filterMirrorTokens, findChainMeta, getBalanceId, getLockTitle, getUniqueChainIds, getValueId, includeInTotalExtraAmount, makeContractCaller, uniswapV2PairAbi };
|