@talismn/balances 0.0.0-pr2043-20250618091117 → 0.0.0-pr2043-20250619003406

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,19 +1,19 @@
1
- import { fetchInitMiniMetadatas, evmErc20TokenId as evmErc20TokenId$1, EvmErc20TokenSchema, evmNativeTokenId, evmUniswapV2TokenId, githubTokenLogoUrl, parseSubAssetTokenId, subAssetTokenId, subForeignAssetTokenId, subNativeTokenId, subPsp22TokenId, subTokensTokenId } from '@talismn/chaindata-provider';
2
1
  import { Dexie, liveQuery } from 'dexie';
3
- import { from, Observable, scan, share, map, switchAll, combineLatest, mergeMap, toArray, interval, startWith, exhaustMap, pipe, filter, shareReplay, combineLatestWith, distinctUntilChanged, firstValueFrom, BehaviorSubject, debounceTime, takeUntil, switchMap, withLatestFrom, concatMap } from 'rxjs';
4
2
  import anylogger from 'anylogger';
5
3
  import { newTokenRates } from '@talismn/token-rates';
6
- import { isBigInt, BigMath, planckToTokens, isTruthy, isArrayOf, isEthereumAddress, hasOwnProperty, decodeAnyAddress, isNotNil, blake2Concat, firstThenDebounce, Deferred } from '@talismn/util';
4
+ import { isBigInt, BigMath, planckToTokens, isArrayOf, isTruthy, isEthereumAddress, hasOwnProperty, decodeAnyAddress, isNotNil, blake2Concat, firstThenDebounce, Deferred } from '@talismn/util';
7
5
  import BigNumber from 'bignumber.js';
8
6
  import { u8aToHex, assert, stringCamelCase, u8aConcatStrict, u8aConcat, arrayChunk, u8aToString, hexToNumber, hexToU8a } from '@polkadot/util';
9
7
  import { xxhashAsU8a, blake2AsU8a } from '@polkadot/util-crypto';
10
8
  import pako from 'pako';
9
+ import { evmErc20TokenId as evmErc20TokenId$1, EvmErc20TokenSchema, evmNativeTokenId, evmUniswapV2TokenId, githubTokenLogoUrl, parseSubAssetTokenId, subAssetTokenId, subForeignAssetTokenId, parseSubNativeTokenId, subNativeTokenId, subPsp22TokenId, parseSubTokensTokenId, subTokensTokenId } from '@talismn/chaindata-provider';
11
10
  import { parseAbi, isHex, hexToBigInt } from 'viem';
12
11
  import isEqual from 'lodash/isEqual';
13
12
  import { defineMethod } from '@substrate/txwrapper-core';
14
- import { unifyMetadata, decAnyMetadata, getDynamicBuilder, getLookupFn, getMetadataVersion, compactMetadata, encodeMetadata, decodeScale, papiParse, encodeStateKey } from '@talismn/scale';
15
- import { keys, toPairs } from 'lodash';
13
+ import { unifyMetadata, decAnyMetadata, getDynamicBuilder, getLookupFn, compactMetadata, encodeMetadata, decodeScale, papiParse, getMetadataVersion, encodeStateKey } from '@talismn/scale';
14
+ import { keys, toPairs, groupBy as groupBy$1 } from 'lodash';
16
15
  import camelCase from 'lodash/camelCase';
16
+ import PQueue from 'p-queue';
17
17
  import { fetchBestMetadata, getScaleApi } from '@talismn/sapi';
18
18
  import { Metadata, TypeRegistry } from '@polkadot/types';
19
19
  import groupBy from 'lodash/groupBy';
@@ -21,8 +21,10 @@ import { mergeUint8, toHex } from '@polkadot-api/utils';
21
21
  import { Binary, AccountId } from 'polkadot-api';
22
22
  import PromisePool from '@supercharge/promise-pool';
23
23
  import { ChainConnectionError } from '@talismn/chain-connector';
24
+ import { Observable, scan, share, map, switchAll, combineLatest, from, mergeMap, toArray, interval, startWith, exhaustMap, pipe, filter, shareReplay, combineLatestWith, distinctUntilChanged, firstValueFrom, BehaviorSubject, debounceTime, takeUntil, switchMap, withLatestFrom, concatMap } from 'rxjs';
24
25
  import { u32, u128, Struct } from 'scale-ts';
25
26
  import upperFirst from 'lodash/upperFirst';
27
+ import z from 'zod/v4';
26
28
  import { Abi } from '@polkadot/api-contract';
27
29
 
28
30
  // TODO: Document default balances module purpose/usage
@@ -107,13 +109,11 @@ class EvmTokenFetcher {
107
109
  // }
108
110
  }
109
111
 
110
- var packageJson = {
112
+ var pkg = {
111
113
  name: "@talismn/balances",
112
- version: "0.0.0-pr2043-20250618091117"};
114
+ version: "0.0.0-pr2043-20250619003406"};
113
115
 
114
- const libVersion = packageJson.version;
115
-
116
- var log = anylogger(packageJson.name);
116
+ var log = anylogger(pkg.name);
117
117
 
118
118
  function excludeFromTransferableAmount(locks) {
119
119
  if (typeof locks === "string") return BigInt(locks);
@@ -323,20 +323,13 @@ class Balances {
323
323
  return new SumBalancesFormatter(this);
324
324
  }
325
325
  }
326
-
327
- // type BalanceJsonEvm = BalanceJson & { evmNetworkId: string }
328
-
329
- // const isBalanceEvm = (balance: BalanceJson): balance is BalanceJsonEvm => "evmNetworkId" in balance
330
-
331
326
  const getBalanceId = balance => {
332
327
  const {
333
328
  source,
334
329
  address,
335
- tokenId,
336
- networkId
330
+ tokenId
337
331
  } = balance;
338
- //const locationId = isBalanceEvm(balance) ? balance.evmNetworkId : balance.chainId
339
- return [source, address, networkId, tokenId].filter(isTruthy).join("::");
332
+ return [source, address, tokenId].join("::");
340
333
  };
341
334
 
342
335
  /**
@@ -1048,257 +1041,6 @@ class TalismanBalancesDatabase extends Dexie {
1048
1041
  }
1049
1042
  const db = new TalismanBalancesDatabase();
1050
1043
 
1051
- const minimumHydrationInterval = 300_000; // 300_000ms = 300s = 5 minutes
1052
-
1053
- /**
1054
- * A substrate dapp needs access to a set of types when it wants to communicate with a blockchain node.
1055
- *
1056
- * These types are used to encode requests & decode responses via the SCALE codec.
1057
- * Each chain generally has its own set of types.
1058
- *
1059
- * Substrate provides a construct to retrieve these types from a blockchain node.
1060
- * The chain metadata.
1061
- *
1062
- * The metadata includes the types required for any communication with the chain,
1063
- * including lots of methods which are not relevant to balance fetching.
1064
- *
1065
- * As such, the metadata can clock in at around 1-2MB per chain, which is a lot of storage
1066
- * for browser-based dapps which want to connect to lots of chains.
1067
- *
1068
- * By utilizing the wonderful [scale-ts](https://github.com/unstoppablejs/unstoppablejs/tree/main/packages/scale-ts#readme) library,
1069
- * we can trim the chain metadata down so that it only includes the types we need for balance fetching.
1070
- *
1071
- * Each balance module has a function to do just that, `BalanceModule::fetchSubstrateChainMeta`.
1072
- *
1073
- * But, we only want to run this operation when necessary.
1074
- *
1075
- * The purpose of this class, `MiniMetadataUpdater`, is to maintain a local cache of
1076
- * trimmed-down metadatas, which we'll refer to as `MiniMetadatas`.
1077
- */
1078
- class MiniMetadataUpdater {
1079
- #lastHydratedMiniMetadatasAt = 0;
1080
- #lastHydratedCustomChainsAt = 0;
1081
- #chainConnectors;
1082
- #chaindataProvider;
1083
- #balanceModules;
1084
- constructor(chainConnectors, chaindataProvider, balanceModules) {
1085
- this.#chainConnectors = chainConnectors;
1086
- this.#chaindataProvider = chaindataProvider;
1087
- this.#balanceModules = balanceModules;
1088
- }
1089
-
1090
- /** Subscribe to the metadata for a chain */
1091
- subscribe(chainId) {
1092
- return from(liveQuery(() => db.miniMetadatas.filter(m => m.chainId === chainId).toArray().then(array => array[0])));
1093
- }
1094
- async update(chainIds) {
1095
- await this.updateSubstrateChains(chainIds);
1096
- }
1097
- async statuses(chains) {
1098
- const ids = await db.miniMetadatas.orderBy("id").primaryKeys();
1099
- const wantedIdsByChain = new Map(chains.flatMap(({
1100
- id: chainId,
1101
- specName,
1102
- specVersion
1103
- }) => {
1104
- if (specName === null) return [];
1105
- if (specVersion === null) return [];
1106
- return [[chainId, this.#balanceModules.filter(m => m.type.startsWith("substrate-")).map(({
1107
- type: source
1108
- }) => deriveMiniMetadataId({
1109
- source,
1110
- chainId,
1111
- specVersion,
1112
- libVersion
1113
- }))]];
1114
- }));
1115
- const statusesByChain = new Map(Array.from(wantedIdsByChain.entries()).map(([chainId, wantedIds]) => [chainId, wantedIds.every(wantedId => ids.includes(wantedId)) ? "good" : "none"]));
1116
- return {
1117
- wantedIdsByChain,
1118
- statusesByChain
1119
- };
1120
- }
1121
- async hydrateFromChaindata() {
1122
- // TODO review this. feels unnecessary to fetch them all
1123
-
1124
- const now = Date.now();
1125
- if (now - this.#lastHydratedMiniMetadatasAt < minimumHydrationInterval) return false;
1126
- const dbHasMiniMetadatas = (await db.miniMetadatas.count()) > 0;
1127
- try {
1128
- try {
1129
- var miniMetadatas = await this.#chaindataProvider.miniMetadatas(); // eslint-disable-line no-var
1130
- if (miniMetadatas.length <= 0) throw new Error("Ignoring empty chaindata miniMetadatas response");
1131
- } catch (error) {
1132
- if (dbHasMiniMetadatas) throw error;
1133
- log.warn("Failed to fetch miniMetadatas from chaindata", error);
1134
- // On first start-up (db is empty), if we fail to fetch miniMetadatas then we should
1135
- // initialize the DB with the list of miniMetadatas inside our init/mini-metadatas.json file.
1136
- // This data will represent a relatively recent copy of what's in chaindata,
1137
- // which will be better for our users than to have nothing at all.
1138
- var miniMetadatas = await fetchInitMiniMetadatas(); // eslint-disable-line no-var
1139
- }
1140
- await db.miniMetadatas.bulkPut(miniMetadatas);
1141
- this.#lastHydratedMiniMetadatasAt = now;
1142
- return true;
1143
- } catch (error) {
1144
- log.warn(`Failed to hydrate miniMetadatas from chaindata`, error);
1145
- return false;
1146
- }
1147
- }
1148
- async hydrateCustomChains() {
1149
- // TODO
1150
- // const now = Date.now()
1151
- // if (now - this.#lastHydratedCustomChainsAt < minimumHydrationInterval) return false
1152
- // const chains = await this.#chaindataProvider.chains()
1153
- // const customChains = chains.filter(
1154
- // (chain): chain is CustomChain => "isCustom" in chain && chain.isCustom,
1155
- // )
1156
- // const updatedCustomChains: Array<CustomChain> = []
1157
- // const concurrency = 4
1158
- // ;(
1159
- // await PromisePool.withConcurrency(concurrency)
1160
- // .for(customChains)
1161
- // .process(async (customChain) => {
1162
- // const send = (method: string, params: unknown[]) =>
1163
- // this.#chainConnectors.substrate?.send(customChain.id, method, params)
1164
- // const [genesisHash, runtimeVersion, chainName, chainType] = await Promise.all([
1165
- // send("chain_getBlockHash", [0]),
1166
- // send("state_getRuntimeVersion", []),
1167
- // send("system_chain", []),
1168
- // send("system_chainType", []),
1169
- // ])
1170
- // // deconstruct rpc data
1171
- // const { specName, implName } = runtimeVersion
1172
- // const specVersion = String(runtimeVersion.specVersion)
1173
- // const changed =
1174
- // customChain.genesisHash !== genesisHash ||
1175
- // customChain.chainName !== chainName ||
1176
- // !isEqual(customChain.chainType, chainType) ||
1177
- // customChain.implName !== implName ||
1178
- // customChain.specName !== specName ||
1179
- // customChain.specVersion !== specVersion
1180
- // if (!changed) return
1181
- // customChain.genesisHash = genesisHash
1182
- // customChain.chainName = chainName
1183
- // customChain.chainType = chainType
1184
- // customChain.implName = implName
1185
- // customChain.specName = specName
1186
- // customChain.specVersion = specVersion
1187
- // updatedCustomChains.push(customChain)
1188
- // })
1189
- // ).errors.forEach((error) => log.error("Error hydrating custom chains", error))
1190
- // if (updatedCustomChains.length > 0) {
1191
- // await this.#chaindataProvider.transaction("rw", ["chains"], async () => {
1192
- // for (const updatedCustomChain of updatedCustomChains) {
1193
- // await this.#chaindataProvider.removeCustomChain(updatedCustomChain.id)
1194
- // await this.#chaindataProvider.addCustomChain(updatedCustomChain)
1195
- // }
1196
- // })
1197
- // }
1198
- // if (updatedCustomChains.length > 0) this.#lastHydratedCustomChainsAt = now
1199
- // return true
1200
- }
1201
- async updateSubstrateChains(_chainIds) {
1202
- // const chains = new Map(
1203
- // (await this.#chaindataProvider.chains()).map((chain) => [chain.id, chain]),
1204
- // )
1205
- // const filteredChains = chainIds.flatMap((chainId) => chains.get(chainId) ?? [])
1206
- // const ids = await balancesDb.miniMetadatas.orderBy("id").primaryKeys()
1207
- // const { wantedIdsByChain, statusesByChain } = await this.statuses(filteredChains)
1208
- // // clean up store
1209
- // const wantedIds = Array.from(wantedIdsByChain.values()).flatMap((ids) => ids)
1210
- // const unwantedIds = ids.filter((id) => !wantedIds.includes(id))
1211
- // if (unwantedIds.length > 0) {
1212
- // const chainIds = Array.from(
1213
- // new Set((await balancesDb.miniMetadatas.bulkGet(unwantedIds)).map((m) => m?.chainId)),
1214
- // )
1215
- // log.info(`Pruning ${unwantedIds.length} miniMetadatas on chains ${chainIds.join(", ")}`)
1216
- // await balancesDb.miniMetadatas.bulkDelete(unwantedIds)
1217
- // }
1218
- // const needUpdates = Array.from(statusesByChain.entries())
1219
- // .filter(([, status]) => status !== "good")
1220
- // .map(([chainId]) => chainId)
1221
- // if (needUpdates.length > 0)
1222
- // log.info(`${needUpdates.length} miniMetadatas need updates (${needUpdates.join(", ")})`)
1223
- // const availableTokenLogos = await availableTokenLogoFilenames().catch((error) => {
1224
- // log.error("Failed to fetch available token logos", error)
1225
- // return []
1226
- // })
1227
- // const concurrency = 12
1228
- // ;(
1229
- // await PromisePool.withConcurrency(concurrency)
1230
- // .for(needUpdates)
1231
- // .process(async (chainId) => {
1232
- // log.info(`Updating metadata for chain ${chainId}`)
1233
- // const chain = chains.get(chainId)
1234
- // if (!chain) return
1235
- // const { specName, specVersion } = chain
1236
- // if (specName === null) return
1237
- // if (specVersion === null) return
1238
- // const fetchMetadata = async () => {
1239
- // try {
1240
- // return await fetchBestMetadata(
1241
- // (method, params, isCacheable) => {
1242
- // if (!this.#chainConnectors.substrate)
1243
- // throw new Error("Substrate connector is not available")
1244
- // return this.#chainConnectors.substrate.send(chainId, method, params, isCacheable)
1245
- // },
1246
- // true, // allow v14 fallback
1247
- // )
1248
- // } catch (err) {
1249
- // log.warn(`Failed to fetch metadata for chain ${chainId}`)
1250
- // return undefined
1251
- // }
1252
- // }
1253
- // const [metadataRpc, systemProperties] = await Promise.all([
1254
- // fetchMetadata(),
1255
- // this.#chainConnectors.substrate?.send(chainId, "system_properties", []),
1256
- // ])
1257
- // for (const mod of this.#balanceModules.filter((m) => m.type.startsWith("substrate-"))) {
1258
- // const balancesConfig = (chain.balancesConfig ?? []).find(
1259
- // ({ moduleType }) => moduleType === mod.type,
1260
- // )
1261
- // const moduleConfig = balancesConfig?.moduleConfig ?? {}
1262
- // const chainMeta = await mod.fetchSubstrateChainMeta(
1263
- // chainId,
1264
- // moduleConfig,
1265
- // metadataRpc,
1266
- // systemProperties,
1267
- // )
1268
- // const tokens = await mod.fetchSubstrateChainTokens(chainId, chainMeta, moduleConfig)
1269
- // // update tokens in chaindata
1270
- // await this.#chaindataProvider.updateChainTokens(
1271
- // chainId,
1272
- // mod.type,
1273
- // Object.values(tokens),
1274
- // availableTokenLogos,
1275
- // )
1276
- // // update miniMetadatas
1277
- // const { miniMetadata: data, metadataVersion: version, ...extra } = chainMeta ?? {}
1278
- // await balancesDb.miniMetadatas.put({
1279
- // id: deriveMiniMetadataId({
1280
- // source: mod.type,
1281
- // chainId,
1282
- // specName,
1283
- // specVersion,
1284
- // balancesConfig: JSON.stringify(moduleConfig),
1285
- // }),
1286
- // source: mod.type,
1287
- // chainId,
1288
- // specName,
1289
- // specVersion,
1290
- // balancesConfig: JSON.stringify(moduleConfig),
1291
- // // TODO: Standardise return value from `fetchSubstrateChainMeta`
1292
- // version,
1293
- // data,
1294
- // extra: JSON.stringify(extra),
1295
- // })
1296
- // }
1297
- // })
1298
- // ).errors.forEach((error) => log.error("Error updating chain metadata", error))
1299
- }
1300
- }
1301
-
1302
1044
  const erc20Abi = [{
1303
1045
  constant: true,
1304
1046
  inputs: [],
@@ -2940,12 +2682,14 @@ async function getPoolBalance(publicClient, contractAddress, accountAddress) {
2940
2682
  }
2941
2683
  }
2942
2684
 
2685
+ const libVersion = pkg.version;
2686
+
2943
2687
  // cache the promise so it can be shared across multiple calls
2944
2688
  const CACHE_GET_SPEC_VERSION = new Map();
2945
2689
  const fetchSpecVersion = async (chainConnector, networkId) => {
2946
2690
  const {
2947
2691
  specVersion
2948
- } = await chainConnector.send(networkId, "state_getRuntimeVersion", [true]);
2692
+ } = await chainConnector.send(networkId, "state_getRuntimeVersion", [], true);
2949
2693
  return specVersion;
2950
2694
  };
2951
2695
 
@@ -2987,9 +2731,16 @@ const getMetadataRpc = async (chainConnector, networkId) => {
2987
2731
 
2988
2732
  // share requests as all modules will call this at once
2989
2733
  const CACHE = new Map();
2990
- const getMiniMetadatas = async (chainConnector, chaindataProvider, networkId, specVersion) => {
2734
+
2735
+ // ensures we dont fetch miniMetadatas on more than 4 chains at once
2736
+ const POOL = new PQueue({
2737
+ concurrency: 4
2738
+ });
2739
+ const getMiniMetadatas = async (chainConnector, chaindataProvider, networkId, specVersion, signal) => {
2991
2740
  if (CACHE.has(networkId)) return CACHE.get(networkId);
2992
- const pResult = fetchMiniMetadatas(chainConnector, chaindataProvider, networkId, specVersion);
2741
+ const pResult = POOL.add(() => fetchMiniMetadatas(chainConnector, chaindataProvider, networkId, specVersion), {
2742
+ signal
2743
+ });
2993
2744
  CACHE.set(networkId, pResult);
2994
2745
  try {
2995
2746
  return await pResult;
@@ -3001,49 +2752,57 @@ const getMiniMetadatas = async (chainConnector, chaindataProvider, networkId, sp
3001
2752
  CACHE.delete(networkId);
3002
2753
  }
3003
2754
  };
3004
- const fetchMiniMetadatas = async (chainConnector, chaindataProvider, chainId, specVersion) => {
3005
- const metadataRpc = await getMetadataRpc(chainConnector, chainId);
3006
- const chainConnectors = {
3007
- substrate: chainConnector
3008
- };
3009
- const modules = defaultBalanceModules.map(mod => mod({
3010
- chainConnectors,
3011
- chaindataProvider
3012
- })).filter(mod => mod.type.startsWith("substrate-"));
3013
- return Promise.all(modules.map(async mod => {
3014
- const source = mod.type;
3015
- const chainMeta = await mod.fetchSubstrateChainMeta(chainId, {}, metadataRpc, {});
3016
- return {
3017
- id: deriveMiniMetadataId({
2755
+ const fetchMiniMetadatas = async (chainConnector, chaindataProvider, chainId, specVersion, signal) => {
2756
+ const start = performance.now();
2757
+ log.debug("[miniMetadata] fetching minimetadatas for %s", chainId);
2758
+ try {
2759
+ const metadataRpc = await getMetadataRpc(chainConnector, chainId);
2760
+ signal?.throwIfAborted();
2761
+ const chainConnectors = {
2762
+ substrate: chainConnector,
2763
+ evm: {} // wont be used but workarounds error for module creation
2764
+ };
2765
+ const modules = defaultBalanceModules.map(mod => mod({
2766
+ chainConnectors,
2767
+ chaindataProvider
2768
+ })).filter(mod => mod.type.startsWith("substrate-"));
2769
+ return Promise.all(modules.map(async mod => {
2770
+ const source = mod.type;
2771
+ const chainMeta = await mod.fetchSubstrateChainMeta(chainId, {}, metadataRpc);
2772
+ return {
2773
+ id: deriveMiniMetadataId({
2774
+ source,
2775
+ chainId,
2776
+ specVersion,
2777
+ libVersion
2778
+ }),
3018
2779
  source,
3019
2780
  chainId,
3020
2781
  specVersion,
3021
- libVersion
3022
- }),
3023
- source,
3024
- chainId,
3025
- specVersion,
3026
- libVersion,
3027
- data: chainMeta?.miniMetadata ?? null
3028
- };
3029
- }));
2782
+ libVersion,
2783
+ data: chainMeta?.miniMetadata ?? null
2784
+ };
2785
+ }));
2786
+ } finally {
2787
+ log.debug("[miniMetadata] updated miniMetadatas for %s in %sms", chainId, performance.now() - start);
2788
+ }
3030
2789
  };
3031
2790
 
3032
- const getUpdatedMiniMetadatas = async (chainConnector, chaindataProvider, networkId, specVersion) => {
3033
- const miniMetadatas = await getMiniMetadatas(chainConnector, chaindataProvider, networkId, specVersion);
2791
+ const getUpdatedMiniMetadatas = async (chainConnector, chaindataProvider, chainId, specVersion, signal) => {
2792
+ const miniMetadatas = await getMiniMetadatas(chainConnector, chaindataProvider, chainId, specVersion, signal);
2793
+ signal?.throwIfAborted();
3034
2794
  await db.transaction("readwrite", "miniMetadatas", async tx => {
3035
2795
  await tx.miniMetadatas.where({
3036
- networkId
2796
+ chainId
3037
2797
  }).delete();
3038
2798
  await tx.miniMetadatas.bulkPut(miniMetadatas);
3039
2799
  });
3040
2800
  return miniMetadatas;
3041
2801
  };
3042
2802
 
3043
- const getMiniMetadata = async (chaindataProvider, chainConnector, chainId, source) => {
2803
+ const getMiniMetadata = async (chaindataProvider, chainConnector, chainId, source, signal) => {
3044
2804
  const specVersion = await getSpecVersion(chainConnector, chainId);
3045
-
3046
- // TODO when working a chaindata branch, need a way to pass the libVersion used to derive the miniMetadataId got github
2805
+ signal?.throwIfAborted();
3047
2806
  const miniMetadataId = deriveMiniMetadataId({
3048
2807
  source,
3049
2808
  chainId,
@@ -3053,11 +2812,13 @@ const getMiniMetadata = async (chaindataProvider, chainConnector, chainId, sourc
3053
2812
 
3054
2813
  // lookup local ones
3055
2814
  const [dbMiniMetadata, ghMiniMetadata] = await Promise.all([db.miniMetadatas.get(miniMetadataId), chaindataProvider.miniMetadataById(miniMetadataId)]);
2815
+ signal?.throwIfAborted();
3056
2816
  const miniMetadata = dbMiniMetadata ?? ghMiniMetadata;
3057
2817
  if (miniMetadata) return miniMetadata;
3058
2818
 
3059
2819
  // update from live chain metadata and persist locally
3060
- const miniMetadatas = await getUpdatedMiniMetadatas(chainConnector, chaindataProvider, chainId, specVersion);
2820
+ const miniMetadatas = await getUpdatedMiniMetadatas(chainConnector, chaindataProvider, chainId, specVersion, signal);
2821
+ signal?.throwIfAborted();
3061
2822
  const found = miniMetadatas.find(m => m.id === miniMetadataId);
3062
2823
  if (!found) {
3063
2824
  log.warn("MiniMetadata not found in updated miniMetadatas", {
@@ -3144,6 +2905,28 @@ const buildStorageCoders = ({
3144
2905
  return [];
3145
2906
  }
3146
2907
  }));
2908
+ const buildNetworkStorageCoders = (chainId, miniMetadata, coders) => {
2909
+ if (!miniMetadata.data) return null;
2910
+ const metadata = unifyMetadata(decAnyMetadata(miniMetadata.data));
2911
+ try {
2912
+ const scaleBuilder = getDynamicBuilder(getLookupFn(metadata));
2913
+ const builtCoders = Object.fromEntries(Object.entries(coders).flatMap(([key, moduleMethodOrFn]) => {
2914
+ const [module, method] = typeof moduleMethodOrFn === "function" ? moduleMethodOrFn({
2915
+ chainId
2916
+ }) : moduleMethodOrFn;
2917
+ try {
2918
+ return [[key, scaleBuilder.buildStorage(module, method)]];
2919
+ } catch (cause) {
2920
+ log.trace(`Failed to build SCALE coder for chain ${chainId} (${module}::${method})`, cause);
2921
+ return [];
2922
+ }
2923
+ }));
2924
+ return builtCoders;
2925
+ } catch (cause) {
2926
+ log.error(`Failed to build SCALE coders for chain ${chainId} (${JSON.stringify(coders)})`, cause);
2927
+ }
2928
+ return null;
2929
+ };
3147
2930
 
3148
2931
  /**
3149
2932
  * Decodes & unwraps outputs and errors of a given result, contract, and method.
@@ -3344,15 +3127,9 @@ const SubAssetsModule = hydrate => {
3344
3127
  assert(chainConnector, "This module requires a substrate chain connector");
3345
3128
  return {
3346
3129
  ...DefaultBalanceModule(moduleType$4),
3130
+ // TODO make synchronous at the module definition level ?
3347
3131
  async fetchSubstrateChainMeta(chainId, moduleConfig, metadataRpc) {
3348
- const isTestnet = (await chaindataProvider.chainById(chainId))?.isTestnet || false;
3349
- if (metadataRpc === undefined) return {
3350
- isTestnet
3351
- };
3352
- if ((moduleConfig?.tokens ?? []).length < 1) return {
3353
- isTestnet
3354
- };
3355
- const metadataVersion = getMetadataVersion(metadataRpc);
3132
+ if (!metadataRpc) return {};
3356
3133
  const metadata = decAnyMetadata(metadataRpc);
3357
3134
  compactMetadata(metadata, [{
3358
3135
  pallet: "Assets",
@@ -3360,9 +3137,7 @@ const SubAssetsModule = hydrate => {
3360
3137
  }]);
3361
3138
  const miniMetadata = encodeMetadata(metadata);
3362
3139
  return {
3363
- isTestnet,
3364
- miniMetadata,
3365
- metadataVersion
3140
+ miniMetadata
3366
3141
  };
3367
3142
  },
3368
3143
  async fetchSubstrateChainTokens(chainId, chainMeta, moduleConfig) {
@@ -3427,44 +3202,32 @@ const SubAssetsModule = hydrate => {
3427
3202
  return acc;
3428
3203
  }, {});
3429
3204
  const controller = new AbortController();
3430
- await Promise.all(toPairs(byNetwork).map(async ([networkId, addressesByToken]) => {
3431
- const queries = await buildNetworkQueries(networkId, chainConnector, chaindataProvider, addressesByToken);
3432
- if (controller.signal.aborted) return;
3433
- const stateHelper = new RpcStateQueryHelper(chainConnector, queries);
3434
- const unsubscribe = await stateHelper.subscribe((error, result) => {
3435
- // console.log("SubstrateAssetsModule.callback", { error, result })
3436
- if (error) return callback(error);
3437
- const balances = result?.filter(b => b !== null) ?? [];
3438
- if (balances.length > 0) callback(null, new Balances(balances));
3439
- });
3440
- controller.signal.addEventListener("abort", () => {
3441
- log.debug("TMP subscribeBalances aborted, unsubscribing from network", networkId);
3442
- unsubscribe();
3443
- });
3205
+ const pUnsubs = Promise.all(toPairs(byNetwork).map(async ([networkId, addressesByToken]) => {
3206
+ try {
3207
+ const queries = await buildNetworkQueries$1(networkId, chainConnector, chaindataProvider, addressesByToken, controller.signal);
3208
+ if (controller.signal.aborted) return () => {};
3209
+ const stateHelper = new RpcStateQueryHelper(chainConnector, queries);
3210
+ return await stateHelper.subscribe((error, result) => {
3211
+ // console.log("SubstrateAssetsModule.callback", { error, result })
3212
+ if (error) return callback(error);
3213
+ const balances = result?.filter(b => b !== null) ?? [];
3214
+ if (balances.length > 0) callback(null, new Balances(balances));
3215
+ });
3216
+ } catch (err) {
3217
+ if (!controller.signal.aborted) log.error(`Failed to subscribe balances for network ${networkId}`, err);
3218
+ return () => {};
3219
+ }
3444
3220
  }));
3445
-
3446
- // const networkIds = uniq(uniq(keys(addressesByToken)).map((tokenId) => parseSubAssetTokenId(tokenId).networkId))
3447
- // const
3448
-
3449
- //console.log("SubstrateAssetsModule.subscribeBalances 1", { addressesByToken })
3450
- // const queries = await buildQueries(chaindataProvider, addressesByToken)
3451
- // //console.log("SubstrateAssetsModule.subscribeBalances 2", { queries, addressesByToken })
3452
- // const unsubscribe = await new RpcStateQueryHelper(chainConnector, queries).subscribe(
3453
- // (error, result) => {
3454
- // // console.log("SubstrateAssetsModule.callback", { error, result })
3455
- // if (error) return callback(error)
3456
- // const balances = result?.filter((b): b is SubAssetsBalance => b !== null) ?? []
3457
- // if (balances.length > 0) callback(null, new Balances(balances))
3458
- // },
3459
- // )
3460
-
3461
3221
  return () => {
3462
3222
  controller.abort();
3223
+ pUnsubs.then(unsubs => {
3224
+ unsubs.forEach(unsubscribe => unsubscribe());
3225
+ });
3463
3226
  };
3464
3227
  },
3465
3228
  async fetchBalances(addressesByToken) {
3466
3229
  assert(chainConnectors.substrate, "This module requires a substrate chain connector");
3467
- const queries = await buildQueries$3(chaindataProvider, addressesByToken);
3230
+ const queries = await buildQueries$3(chainConnector, chaindataProvider, addressesByToken);
3468
3231
  const result = await new RpcStateQueryHelper(chainConnectors.substrate, queries).fetch();
3469
3232
  const balances = result?.filter(b => b !== null) ?? [];
3470
3233
  return new Balances(balances);
@@ -3534,23 +3297,14 @@ const SubAssetsModule = hydrate => {
3534
3297
  }
3535
3298
  };
3536
3299
  };
3537
- async function buildNetworkQueries(networkId, chainConnector, chaindataProvider, addressesByToken) {
3538
- const miniMetadata = await getMiniMetadata(chaindataProvider, chainConnector, networkId, moduleType$4);
3539
- const network = await chaindataProvider.chainById(networkId);
3300
+ async function buildNetworkQueries$1(networkId, chainConnector, chaindataProvider, addressesByToken, signal) {
3301
+ const miniMetadata = await getMiniMetadata(chaindataProvider, chainConnector, networkId, moduleType$4, signal);
3302
+ // console.log("Fetched miniMetadata for network", networkId, { miniMetadata })
3303
+ const chain = await chaindataProvider.chainById(networkId);
3540
3304
  const tokensById = await chaindataProvider.tokensById();
3541
- const chainIds = [networkId];
3542
- const chains = network ? {
3543
- [networkId]: network
3544
- } : {};
3545
- const miniMetadatas = new Map([[miniMetadata.id, miniMetadata]]);
3546
- const chainStorageCoders = buildStorageCoders({
3547
- chainIds,
3548
- chains,
3549
- miniMetadatas,
3550
- moduleType: moduleType$4,
3551
- coders: {
3552
- storage: ["Assets", "Account"]
3553
- }
3305
+ signal?.throwIfAborted();
3306
+ const networkStorageCoders = buildNetworkStorageCoders(networkId, miniMetadata, {
3307
+ storage: ["Assets", "Account"]
3554
3308
  });
3555
3309
  return Object.entries(addressesByToken).flatMap(([tokenId, addresses]) => {
3556
3310
  const token = tokensById[tokenId];
@@ -3562,19 +3316,14 @@ async function buildNetworkQueries(networkId, chainConnector, chaindataProvider,
3562
3316
  log.debug(`This module doesn't handle tokens of type ${token.type}`);
3563
3317
  return [];
3564
3318
  }
3565
- const networkId = token.networkId;
3566
- if (!networkId) {
3567
- log.warn(`Token ${tokenId} has no chain`);
3568
- return [];
3569
- }
3570
- const chain = chains[networkId];
3319
+ //
3571
3320
  if (!chain) {
3572
3321
  log.warn(`Chain ${networkId} for token ${tokenId} not found`);
3573
3322
  return [];
3574
3323
  }
3575
3324
  return addresses.flatMap(address => {
3576
- const scaleCoder = chainStorageCoders.get(networkId)?.storage;
3577
- const stateKey = tryEncode(scaleCoder, BigInt(token.assetId), address) ?? tryEncode(scaleCoder, token.assetId, address);
3325
+ const scaleCoder = networkStorageCoders?.storage;
3326
+ const stateKey = tryEncode(scaleCoder, BigInt(token.assetId), address) ?? tryEncode(scaleCoder, Number(token.assetId), address);
3578
3327
  if (!stateKey) {
3579
3328
  log.warn(`Invalid assetId / address in ${networkId} storage query ${token.assetId} / ${address}`);
3580
3329
  return [];
@@ -3629,102 +3378,16 @@ async function buildNetworkQueries(networkId, chainConnector, chaindataProvider,
3629
3378
  });
3630
3379
  });
3631
3380
  }
3632
- async function buildQueries$3(chaindataProvider, addressesByToken) {
3633
- const allChains = await chaindataProvider.chainsById();
3634
- const tokens = await chaindataProvider.tokensById();
3635
-
3636
- // const networkIds = Object.keys(addressesByToken)
3637
-
3638
- // const
3639
- // const miniMetadatas = await getMiniMetadatas(chainConnector, chaindataProvider, network)
3640
- const miniMetadatas = new Map((await db.miniMetadatas.toArray()).map(miniMetadata => [miniMetadata.id, miniMetadata]));
3641
- const uniqueChainIds = getUniqueChainIds(addressesByToken, tokens);
3642
- const chains = Object.fromEntries(uniqueChainIds.map(chainId => [chainId, allChains[chainId]]));
3643
- const chainStorageCoders = buildStorageCoders({
3644
- chainIds: uniqueChainIds,
3645
- chains,
3646
- miniMetadatas,
3647
- moduleType: "substrate-assets",
3648
- coders: {
3649
- storage: ["Assets", "Account"]
3650
- }
3651
- });
3652
- return Object.entries(addressesByToken).flatMap(([tokenId, addresses]) => {
3653
- const token = tokens[tokenId];
3654
- if (!token) {
3655
- log.warn(`Token ${tokenId} not found`);
3656
- return [];
3657
- }
3658
- if (token.type !== "substrate-assets") {
3659
- log.debug(`This module doesn't handle tokens of type ${token.type}`);
3660
- return [];
3661
- }
3662
- const networkId = token.networkId;
3663
- if (!networkId) {
3664
- log.warn(`Token ${tokenId} has no chain`);
3665
- return [];
3666
- }
3667
- const chain = chains[networkId];
3668
- if (!chain) {
3669
- log.warn(`Chain ${networkId} for token ${tokenId} not found`);
3670
- return [];
3671
- }
3672
- return addresses.flatMap(address => {
3673
- const scaleCoder = chainStorageCoders.get(networkId)?.storage;
3674
- const stateKey = tryEncode(scaleCoder, BigInt(token.assetId), address) ?? tryEncode(scaleCoder, token.assetId, address);
3675
- if (!stateKey) {
3676
- log.warn(`Invalid assetId / address in ${networkId} storage query ${token.assetId} / ${address}`);
3677
- return [];
3678
- }
3679
- const decodeResult = change => {
3680
- /** NOTE: This type is only a hint for typescript, the chain can actually return whatever it wants to */
3681
-
3682
- const decoded = decodeScale(scaleCoder, change, `Failed to decode substrate-assets balance on chain ${networkId}`) ?? {
3683
- balance: 0n,
3684
- status: {
3685
- type: "Liquid"
3686
- }};
3687
- const isFrozen = decoded?.status?.type === "Frozen";
3688
- const amount = (decoded?.balance ?? 0n).toString();
3689
-
3690
- // due to the following balance calculations, which are made in the `Balance` type:
3691
- //
3692
- // total balance = (free balance) + (reserved balance)
3693
- // transferable balance = (free balance) - (frozen balance)
3694
- //
3695
- // when `isFrozen` is true we need to set **both** the `free` and `frozen` amounts
3696
- // of this balance to the value we received from the RPC.
3697
- //
3698
- // if we only set the `frozen` amount, then the `total` calculation will be incorrect!
3699
- const free = amount;
3700
- const frozen = token.isFrozen || isFrozen ? amount : "0";
3701
-
3702
- // include balance values even if zero, so that newly-zero values overwrite old values
3703
- const balanceValues = [{
3704
- type: "free",
3705
- label: "free",
3706
- amount: free.toString()
3707
- }, {
3708
- type: "locked",
3709
- label: "frozen",
3710
- amount: frozen.toString()
3711
- }];
3712
- return {
3713
- source: "substrate-assets",
3714
- status: "live",
3715
- address,
3716
- networkId,
3717
- tokenId: token.id,
3718
- values: balanceValues
3719
- };
3720
- };
3721
- return {
3722
- chainId: networkId,
3723
- stateKey,
3724
- decodeResult
3725
- };
3726
- });
3727
- });
3381
+ async function buildQueries$3(chainConnector, chaindataProvider, addressesByToken, signal) {
3382
+ const byNetwork = keys(addressesByToken).reduce((acc, tokenId) => {
3383
+ const networkId = parseSubAssetTokenId(tokenId).networkId;
3384
+ if (!acc[networkId]) acc[networkId] = {};
3385
+ acc[networkId][tokenId] = addressesByToken[tokenId];
3386
+ return acc;
3387
+ }, {});
3388
+ return (await Promise.all(toPairs(byNetwork).map(([networkId, addressesByToken]) => {
3389
+ return buildNetworkQueries$1(networkId, chainConnector, chaindataProvider, addressesByToken, signal);
3390
+ }))).flat();
3728
3391
  }
3729
3392
  // NOTE: Different chains need different formats for assetId when encoding the stateKey
3730
3393
  // E.g. Polkadot Asset Hub needs it to be a string, Astar needs it to be a bigint
@@ -5400,8 +5063,23 @@ class SubNativeBalanceError extends Error {
5400
5063
  }
5401
5064
  }
5402
5065
 
5403
- const DEFAULT_SYMBOL = "Unit";
5404
- const DEFAULT_DECIMALS = 0;
5066
+ const DotNetworkPropertiesSimple = z.object({
5067
+ tokenDecimals: z.number().optional().default(0),
5068
+ tokenSymbol: z.string().optional().default("Unit")
5069
+ });
5070
+ const DotNetworkPropertiesArray = z.object({
5071
+ tokenDecimals: z.array(z.number()).nonempty(),
5072
+ tokenSymbol: z.array(z.string()).nonempty()
5073
+ });
5074
+ const DotNetworkProperties = z.union([DotNetworkPropertiesSimple, DotNetworkPropertiesArray]).transform(val => ({
5075
+ tokenDecimals: Array.isArray(val.tokenDecimals) ? val.tokenDecimals[0] : val.tokenDecimals,
5076
+ tokenSymbol: Array.isArray(val.tokenSymbol) ? val.tokenSymbol[0] : val.tokenSymbol
5077
+ }));
5078
+ const getChainProperties = async (chainConnector, networkId) => {
5079
+ const properties = await chainConnector.send(networkId, "system_properties", [], true);
5080
+ return DotNetworkProperties.parse(properties);
5081
+ };
5082
+
5405
5083
  const POLLING_WINDOW_SIZE = 20;
5406
5084
  const MAX_SUBSCRIPTION_SIZE = 40;
5407
5085
  const SubNativeModule = hydrate => {
@@ -5415,29 +5093,187 @@ const SubNativeModule = hydrate => {
5415
5093
  const getModuleTokens = async () => {
5416
5094
  return await chaindataProvider.tokensByIdForType(moduleType$2);
5417
5095
  };
5418
- return {
5419
- ...DefaultBalanceModule(moduleType$2),
5420
- async fetchSubstrateChainMeta(chainId, moduleConfig, metadataRpc, systemProperties) {
5421
- const isTestnet = (await chaindataProvider.chainById(chainId))?.isTestnet || false;
5422
- if (moduleConfig?.disable === true || metadataRpc === undefined) return {
5423
- isTestnet
5424
- };
5425
5096
 
5426
- //
5427
- // extract system_properties
5428
- //
5097
+ // subscribeBalances was split by network to prevent all subs to wait for all minimetadatas to be ready.
5098
+ // however the multichain logic in there is so deep in the function below that i had to keep it as-is, and call it by per-network chunks
5099
+ // TODO refactor this be actually network specific
5100
+ const subscribeChainBalances = async (chainId, opts, callback) => {
5101
+ const {
5102
+ addressesByToken,
5103
+ initialBalances
5104
+ } = opts;
5105
+ // full record of balances for this module
5106
+ const subNativeBalances = new BehaviorSubject(Object.fromEntries(initialBalances?.map(b => [getBalanceId(b), b]) ?? []));
5107
+ // tokens which have a known positive balance
5108
+ const positiveBalanceTokens = subNativeBalances.pipe(map(balances => Array.from(new Set(Object.values(balances).map(b => b.tokenId)))), share());
5109
+
5110
+ // tokens that will be subscribed to, simply a slice of the positive balance tokens of size MAX_SUBSCRIPTION_SIZE
5111
+ const subscriptionTokens = positiveBalanceTokens.pipe(map(tokens => tokens.sort(sortChains).slice(0, MAX_SUBSCRIPTION_SIZE)));
5112
+
5113
+ // an initialised balance is one where we have received a response for any type of 'subsource',
5114
+ // until then they are initialising. We only need to maintain one map of tokens to addresses for this
5115
+ const initialisingBalances = Object.entries(addressesByToken).reduce((acc, [tokenId, addresses]) => {
5116
+ acc.set(tokenId, new Set(addresses));
5117
+ return acc;
5118
+ }, new Map());
5119
+
5120
+ // after thirty seconds, we need to kill the initialising balances
5121
+ const initBalancesTimeout = setTimeout(() => {
5122
+ initialisingBalances.clear();
5123
+ // manually call the callback to ensure the caller gets the correct status
5124
+ callback(null, {
5125
+ status: "live",
5126
+ data: Object.values(subNativeBalances.getValue())
5127
+ });
5128
+ }, 30_000);
5129
+ const _callbackSub = subNativeBalances.pipe(debounceTime(100)).subscribe({
5130
+ next: balances => {
5131
+ callback(null, {
5132
+ status: initialisingBalances.size > 0 ? "initialising" : "live",
5133
+ data: Object.values(balances)
5134
+ });
5135
+ },
5136
+ error: error => callback(error),
5137
+ complete: () => {
5138
+ initialisingBalances.clear();
5139
+ clearTimeout(initBalancesTimeout);
5140
+ }
5141
+ });
5142
+ const unsubDeferred = Deferred();
5143
+ // we return this to the caller so that they can let us know when they're no longer interested in this subscription
5144
+ const callerUnsubscribe = () => {
5145
+ subNativeBalances.complete();
5146
+ _callbackSub.unsubscribe();
5147
+ return unsubDeferred.reject(new Error(`Caller unsubscribed`));
5148
+ };
5149
+ // we queue up our work to clean up our subscription when this promise rejects
5150
+ const callerUnsubscribed = unsubDeferred.promise;
5151
+
5152
+ // The update handler is to allow us to merge balances with the same id, and manage initialising and positive balances state for each
5153
+ // balance type and network
5154
+ const handleUpdateForSource = source => (error, result) => {
5155
+ if (result) {
5156
+ const currentBalances = subNativeBalances.getValue();
5157
+
5158
+ // first merge any balances with the same id within the result
5159
+ const accumulatedUpdates = result.filter(b => b.values.length > 0).reduce((acc, b) => {
5160
+ const bId = getBalanceId(b);
5161
+ acc[bId] = mergeBalances(acc[bId], b, source, false);
5162
+ return acc;
5163
+ }, {});
5164
+
5165
+ // then merge these with the current balances
5166
+ const mergedBalances = {};
5167
+ Object.entries(accumulatedUpdates).forEach(([bId, b]) => {
5168
+ // merge the values from the new balance into the existing balance, if there is one
5169
+ mergedBalances[bId] = mergeBalances(currentBalances[bId], b, source, true);
5170
+
5171
+ // update initialisingBalances to remove balances which have been updated
5172
+ const intialisingForToken = initialisingBalances.get(b.tokenId);
5173
+ if (intialisingForToken) {
5174
+ intialisingForToken.delete(b.address);
5175
+ if (intialisingForToken.size === 0) initialisingBalances.delete(b.tokenId);else initialisingBalances.set(b.tokenId, intialisingForToken);
5176
+ }
5177
+ });
5178
+ subNativeBalances.next({
5179
+ ...currentBalances,
5180
+ ...mergedBalances
5181
+ });
5182
+ }
5183
+ if (error) {
5184
+ if (error instanceof SubNativeBalanceError) {
5185
+ // this type of error doesn't need to be handled by the caller
5186
+ initialisingBalances.delete(error.tokenId);
5187
+ } else return callback(error);
5188
+ }
5189
+ };
5429
5190
 
5430
- const {
5431
- tokenSymbol,
5432
- tokenDecimals
5433
- } = systemProperties ?? {};
5434
- const symbol = (Array.isArray(tokenSymbol) ? tokenSymbol[0] : tokenSymbol) ?? DEFAULT_SYMBOL;
5435
- const decimals = (Array.isArray(tokenDecimals) ? tokenDecimals[0] : tokenDecimals) ?? DEFAULT_DECIMALS;
5191
+ // subscribe to addresses and tokens for which we have a known positive balance
5192
+ const positiveSub = subscriptionTokens.pipe(debounceTime(1000), takeUntil(callerUnsubscribed), map(tokenIds => tokenIds.reduce((acc, tokenId) => {
5193
+ acc[tokenId] = addressesByToken[tokenId];
5194
+ return acc;
5195
+ }, {})), distinctUntilChanged(isEqual), switchMap(newAddressesByToken => {
5196
+ return from(queryCache.getQueries(newAddressesByToken)).pipe(switchMap(baseQueries => {
5197
+ return new Observable(subscriber => {
5198
+ if (!chainConnectors.substrate) return;
5199
+ const unsubSubtensorStaking = subscribeSubtensorStaking(chaindataProvider, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("subtensor-staking"));
5200
+ const unsubNompoolStaking = subscribeNompoolStaking(chaindataProvider, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("nompools-staking"));
5201
+ const unsubCrowdloans = subscribeCrowdloans(chaindataProvider, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("crowdloan"));
5202
+ const unsubBase = subscribeBase(baseQueries, chainConnectors.substrate, handleUpdateForSource("base"));
5203
+ subscriber.add(async () => (await unsubSubtensorStaking)());
5204
+ subscriber.add(async () => (await unsubNompoolStaking)());
5205
+ subscriber.add(async () => (await unsubCrowdloans)());
5206
+ subscriber.add(async () => (await unsubBase)());
5207
+ });
5208
+ }));
5209
+ })).subscribe();
5210
+
5211
+ // for chains where we don't have a known positive balance, poll rather than subscribe
5212
+ const poll = async (addressesByToken = {}) => {
5213
+ const handleUpdate = handleUpdateForSource("base");
5214
+ try {
5215
+ const balances = await fetchBalances(addressesByToken);
5216
+ handleUpdate(null, Object.values(balances.toJSON()));
5217
+ } catch (error) {
5218
+ if (error instanceof ChainConnectionError) {
5219
+ // coerce ChainConnection errors into SubNativeBalance errors
5220
+ const errorChainId = error.chainId;
5221
+ Object.entries(await getModuleTokens()).filter(([, token]) => token.networkId === errorChainId).forEach(([tokenId]) => {
5222
+ const wrappedError = new SubNativeBalanceError(tokenId, error.message);
5223
+ handleUpdate(wrappedError);
5224
+ });
5225
+ } else {
5226
+ log.error("unknown substrate native balance error", error);
5227
+ handleUpdate(error);
5228
+ }
5229
+ }
5230
+ };
5231
+ // do one poll to get things started
5232
+ const currentBalances = subNativeBalances.getValue();
5233
+ const currentTokens = new Set(Object.values(currentBalances).map(b => b.tokenId));
5234
+ const nonCurrentTokens = Object.keys(addressesByToken).filter(tokenId => !currentTokens.has(tokenId)).sort(sortChains);
5235
+
5236
+ // break nonCurrentTokens into chunks of POLLING_WINDOW_SIZE
5237
+ await PromisePool.withConcurrency(POLLING_WINDOW_SIZE).for(nonCurrentTokens).process(async nonCurrentTokenId => await poll({
5238
+ [nonCurrentTokenId]: addressesByToken[nonCurrentTokenId]
5239
+ }));
5240
+
5241
+ // now poll every 30s on chains which are not subscriptionTokens
5242
+ // we chunk this observable into batches of positive token ids, to prevent eating all the websocket connections
5243
+ const pollingSub = interval(30_000) // emit values every 30 seconds
5244
+ .pipe(takeUntil(callerUnsubscribed), withLatestFrom(subscriptionTokens),
5245
+ // Combine latest value from subscriptionTokens with each interval tick
5246
+ map(([, subscribedTokenIds]) =>
5247
+ // Filter out tokens that are not subscribed
5248
+ Object.keys(addressesByToken).filter(tokenId => !subscribedTokenIds.includes(tokenId))), exhaustMap(tokenIds => from(arrayChunk(tokenIds, POLLING_WINDOW_SIZE)).pipe(concatMap(async tokenChunk => {
5249
+ // tokenChunk is a chunk of tokenIds with size POLLING_WINDOW_SIZE
5250
+ const pollingTokenAddresses = Object.fromEntries(tokenChunk.map(tokenId => [tokenId, addressesByToken[tokenId]]));
5251
+ await poll(pollingTokenAddresses);
5252
+ return true;
5253
+ })))).subscribe();
5254
+ return () => {
5255
+ callerUnsubscribe();
5256
+ positiveSub.unsubscribe();
5257
+ pollingSub.unsubscribe();
5258
+ };
5259
+ };
5260
+ const fetchBalances = async addressesByToken => {
5261
+ assert(chainConnectors.substrate, "This module requires a substrate chain connector");
5262
+ const queries = await queryCache.getQueries(addressesByToken);
5263
+ assert(chainConnectors.substrate, "This module requires a substrate chain connector");
5264
+ const result = await new RpcStateQueryHelper(chainConnectors.substrate, queries).fetch();
5265
+ return new Balances(result ?? []);
5266
+ };
5267
+ return {
5268
+ ...DefaultBalanceModule(moduleType$2),
5269
+ async fetchSubstrateChainMeta(chainId, moduleConfig, metadataRpc) {
5270
+ if (!metadataRpc) return {};
5436
5271
 
5437
5272
  //
5438
5273
  // process metadata into SCALE encoders/decoders
5439
5274
  //
5440
5275
  const metadataVersion = getMetadataVersion(metadataRpc);
5276
+ if (metadataVersion < 14) return {};
5441
5277
  const metadata = decAnyMetadata(metadataRpc);
5442
5278
  const unifiedMetadata = unifyMetadata(metadata);
5443
5279
 
@@ -5504,16 +5340,15 @@ const SubNativeModule = hydrate => {
5504
5340
  }) => name === "Freezes"));
5505
5341
  const useLegacyTransferableCalculation = !hasFreezesItem;
5506
5342
  const chainMeta = {
5507
- isTestnet,
5343
+ // isTestnet,
5508
5344
  useLegacyTransferableCalculation,
5509
- symbol,
5510
- decimals,
5345
+ // symbol,
5346
+ // decimals,
5511
5347
  existentialDeposit,
5512
5348
  nominationPoolsPalletId,
5513
5349
  crowdloanPalletId,
5514
5350
  hasSubtensorPallet,
5515
- miniMetadata,
5516
- metadataVersion
5351
+ miniMetadata
5517
5352
  };
5518
5353
  if (!useLegacyTransferableCalculation) delete chainMeta.useLegacyTransferableCalculation;
5519
5354
  if (!hasSubtensorPallet) delete chainMeta.hasSubtensorPallet;
@@ -5522,9 +5357,10 @@ const SubNativeModule = hydrate => {
5522
5357
  async fetchSubstrateChainTokens(chainId, chainMeta, moduleConfig) {
5523
5358
  if (moduleConfig?.disable === true) return {};
5524
5359
  const {
5525
- isTestnet,
5526
- symbol,
5527
- decimals,
5360
+ tokenSymbol: symbol,
5361
+ tokenDecimals: decimals
5362
+ } = await getChainProperties(chainConnector, chainId);
5363
+ const {
5528
5364
  existentialDeposit
5529
5365
  } = chainMeta;
5530
5366
  const id = subNativeTokenId(chainId);
@@ -5532,11 +5368,10 @@ const SubNativeModule = hydrate => {
5532
5368
  id,
5533
5369
  type: "substrate-native",
5534
5370
  platform: "polkadot",
5535
- isTestnet,
5536
5371
  isDefault: moduleConfig?.isDefault ?? true,
5537
- symbol: symbol ?? DEFAULT_SYMBOL,
5538
- name: moduleConfig?.name ?? symbol ?? DEFAULT_SYMBOL,
5539
- decimals: decimals ?? DEFAULT_DECIMALS,
5372
+ symbol: symbol,
5373
+ name: moduleConfig?.name ?? symbol,
5374
+ decimals: decimals,
5540
5375
  logo: moduleConfig?.logo,
5541
5376
  existentialDeposit: existentialDeposit ?? "0",
5542
5377
  networkId: chainId
@@ -5553,169 +5388,43 @@ const SubNativeModule = hydrate => {
5553
5388
  initialBalances
5554
5389
  }, callback) {
5555
5390
  assert(chainConnectors.substrate, "This module requires a substrate chain connector");
5556
-
5557
- // full record of balances for this module
5558
- const subNativeBalances = new BehaviorSubject(Object.fromEntries(initialBalances?.map(b => [getBalanceId(b), b]) ?? []));
5559
- // tokens which have a known positive balance
5560
- const positiveBalanceTokens = subNativeBalances.pipe(map(balances => Array.from(new Set(Object.values(balances).map(b => b.tokenId)))), share());
5561
-
5562
- // tokens that will be subscribed to, simply a slice of the positive balance tokens of size MAX_SUBSCRIPTION_SIZE
5563
- const subscriptionTokens = positiveBalanceTokens.pipe(map(tokens => tokens.sort(sortChains).slice(0, MAX_SUBSCRIPTION_SIZE)));
5564
-
5565
- // an initialised balance is one where we have received a response for any type of 'subsource',
5566
- // until then they are initialising. We only need to maintain one map of tokens to addresses for this
5567
- const initialisingBalances = Object.entries(addressesByToken).reduce((acc, [tokenId, addresses]) => {
5568
- acc.set(tokenId, new Set(addresses));
5391
+ const addressesByTokenByNetwork = keys(addressesByToken).reduce((acc, tokenId) => {
5392
+ const networkId = parseSubNativeTokenId(tokenId).networkId;
5393
+ if (!acc[networkId]) acc[networkId] = {};
5394
+ acc[networkId][tokenId] = addressesByToken[tokenId];
5569
5395
  return acc;
5570
- }, new Map());
5571
-
5572
- // after thirty seconds, we need to kill the initialising balances
5573
- const initBalancesTimeout = setTimeout(() => {
5574
- initialisingBalances.clear();
5575
- // manually call the callback to ensure the caller gets the correct status
5576
- callback(null, {
5577
- status: "live",
5578
- data: Object.values(subNativeBalances.getValue())
5579
- });
5580
- }, 30_000);
5581
- const _callbackSub = subNativeBalances.pipe(debounceTime(100)).subscribe({
5582
- next: balances => {
5583
- callback(null, {
5584
- status: initialisingBalances.size > 0 ? "initialising" : "live",
5585
- data: Object.values(balances)
5586
- });
5587
- },
5588
- error: error => callback(error),
5589
- complete: () => {
5590
- initialisingBalances.clear();
5591
- clearTimeout(initBalancesTimeout);
5592
- }
5593
- });
5594
- const unsubDeferred = Deferred();
5595
- // we return this to the caller so that they can let us know when they're no longer interested in this subscription
5596
- const callerUnsubscribe = () => {
5597
- subNativeBalances.complete();
5598
- _callbackSub.unsubscribe();
5599
- return unsubDeferred.reject(new Error(`Caller unsubscribed`));
5600
- };
5601
- // we queue up our work to clean up our subscription when this promise rejects
5602
- const callerUnsubscribed = unsubDeferred.promise;
5603
-
5604
- // The update handler is to allow us to merge balances with the same id, and manage initialising and positive balances state for each
5605
- // balance type and network
5606
- const handleUpdateForSource = source => (error, result) => {
5607
- if (result) {
5608
- const currentBalances = subNativeBalances.getValue();
5609
-
5610
- // first merge any balances with the same id within the result
5611
- const accumulatedUpdates = result.filter(b => b.values.length > 0).reduce((acc, b) => {
5612
- const bId = getBalanceId(b);
5613
- acc[bId] = mergeBalances(acc[bId], b, source, false);
5614
- return acc;
5615
- }, {});
5616
-
5617
- // then merge these with the current balances
5618
- const mergedBalances = {};
5619
- Object.entries(accumulatedUpdates).forEach(([bId, b]) => {
5620
- // merge the values from the new balance into the existing balance, if there is one
5621
- mergedBalances[bId] = mergeBalances(currentBalances[bId], b, source, true);
5622
-
5623
- // update initialisingBalances to remove balances which have been updated
5624
- const intialisingForToken = initialisingBalances.get(b.tokenId);
5625
- if (intialisingForToken) {
5626
- intialisingForToken.delete(b.address);
5627
- if (intialisingForToken.size === 0) initialisingBalances.delete(b.tokenId);else initialisingBalances.set(b.tokenId, intialisingForToken);
5628
- }
5629
- });
5630
- subNativeBalances.next({
5631
- ...currentBalances,
5632
- ...mergedBalances
5633
- });
5634
- }
5635
- if (error) {
5636
- if (error instanceof SubNativeBalanceError) {
5637
- // this type of error doesn't need to be handled by the caller
5638
- initialisingBalances.delete(error.tokenId);
5639
- } else return callback(error);
5640
- }
5396
+ }, {});
5397
+ const initialBalancesByNetwork = groupBy$1(initialBalances ?? [], "networkId");
5398
+ const {
5399
+ abort,
5400
+ signal
5401
+ } = new AbortController();
5402
+ const safeCallback = (error, result) => {
5403
+ if (signal.aborted) return;
5404
+ // typescript isnt happy with fowarding parameters as is
5405
+ return error ? callback(error, undefined) : callback(error, result);
5641
5406
  };
5642
-
5643
- // subscribe to addresses and tokens for which we have a known positive balance
5644
- const positiveSub = subscriptionTokens.pipe(debounceTime(1000), takeUntil(callerUnsubscribed), map(tokenIds => tokenIds.reduce((acc, tokenId) => {
5645
- acc[tokenId] = addressesByToken[tokenId];
5646
- return acc;
5647
- }, {})), distinctUntilChanged(isEqual), switchMap(newAddressesByToken => {
5648
- return from(queryCache.getQueries(newAddressesByToken)).pipe(switchMap(baseQueries => {
5649
- return new Observable(subscriber => {
5650
- if (!chainConnectors.substrate) return;
5651
- const unsubSubtensorStaking = subscribeSubtensorStaking(chaindataProvider, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("subtensor-staking"));
5652
- const unsubNompoolStaking = subscribeNompoolStaking(chaindataProvider, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("nompools-staking"));
5653
- const unsubCrowdloans = subscribeCrowdloans(chaindataProvider, chainConnectors.substrate, newAddressesByToken, handleUpdateForSource("crowdloan"));
5654
- const unsubBase = subscribeBase(baseQueries, chainConnectors.substrate, handleUpdateForSource("base"));
5655
- subscriber.add(async () => (await unsubSubtensorStaking)());
5656
- subscriber.add(async () => (await unsubNompoolStaking)());
5657
- subscriber.add(async () => (await unsubCrowdloans)());
5658
- subscriber.add(async () => (await unsubBase)());
5659
- });
5660
- }));
5661
- })).subscribe();
5662
-
5663
- // for chains where we don't have a known positive balance, poll rather than subscribe
5664
- const poll = async (addressesByToken = {}) => {
5665
- const handleUpdate = handleUpdateForSource("base");
5407
+ const unsubsribeFns = Promise.all(keys(addressesByTokenByNetwork).map(async networkId => {
5666
5408
  try {
5667
- const balances = await this.fetchBalances(addressesByToken);
5668
- handleUpdate(null, Object.values(balances.toJSON()));
5669
- } catch (error) {
5670
- if (error instanceof ChainConnectionError) {
5671
- // coerce ChainConnection errors into SubNativeBalance errors
5672
- const errorChainId = error.chainId;
5673
- Object.entries(await getModuleTokens()).filter(([, token]) => token.networkId === errorChainId).forEach(([tokenId]) => {
5674
- const wrappedError = new SubNativeBalanceError(tokenId, error.message);
5675
- handleUpdate(wrappedError);
5676
- });
5677
- } else {
5678
- log.error("unknown substrate native balance error", error);
5679
- handleUpdate(error);
5680
- }
5409
+ // this is what we want to be done separately for each network
5410
+ // this will update the DB so minimetadata will be available when it's used, veeeeery far down the tree of subscribeChainBalances
5411
+ await getMiniMetadata(chaindataProvider, chainConnector, networkId, moduleType$2, signal);
5412
+ } catch (err) {
5413
+ if (!signal.aborted) log.warn("Failed to get native token miniMetadata for network", networkId, err);
5414
+ return () => {};
5681
5415
  }
5682
- };
5683
- // do one poll to get things started
5684
- const currentBalances = subNativeBalances.getValue();
5685
- const currentTokens = new Set(Object.values(currentBalances).map(b => b.tokenId));
5686
- const nonCurrentTokens = Object.keys(addressesByToken).filter(tokenId => !currentTokens.has(tokenId)).sort(sortChains);
5687
-
5688
- // break nonCurrentTokens into chunks of POLLING_WINDOW_SIZE
5689
- await PromisePool.withConcurrency(POLLING_WINDOW_SIZE).for(nonCurrentTokens).process(async nonCurrentTokenId => await poll({
5690
- [nonCurrentTokenId]: addressesByToken[nonCurrentTokenId]
5416
+ if (signal.aborted) return () => {};
5417
+ return subscribeChainBalances(networkId, {
5418
+ addressesByToken: addressesByTokenByNetwork[networkId] ?? {},
5419
+ initialBalances: initialBalancesByNetwork[networkId] ?? []
5420
+ }, safeCallback);
5691
5421
  }));
5692
-
5693
- // now poll every 30s on chains which are not subscriptionTokens
5694
- // we chunk this observable into batches of positive token ids, to prevent eating all the websocket connections
5695
- const pollingSub = interval(30_000) // emit values every 30 seconds
5696
- .pipe(takeUntil(callerUnsubscribed), withLatestFrom(subscriptionTokens),
5697
- // Combine latest value from subscriptionTokens with each interval tick
5698
- map(([, subscribedTokenIds]) =>
5699
- // Filter out tokens that are not subscribed
5700
- Object.keys(addressesByToken).filter(tokenId => !subscribedTokenIds.includes(tokenId))), exhaustMap(tokenIds => from(arrayChunk(tokenIds, POLLING_WINDOW_SIZE)).pipe(concatMap(async tokenChunk => {
5701
- // tokenChunk is a chunk of tokenIds with size POLLING_WINDOW_SIZE
5702
- const pollingTokenAddresses = Object.fromEntries(tokenChunk.map(tokenId => [tokenId, addressesByToken[tokenId]]));
5703
- await poll(pollingTokenAddresses);
5704
- return true;
5705
- })))).subscribe();
5706
5422
  return () => {
5707
- callerUnsubscribe();
5708
- positiveSub.unsubscribe();
5709
- pollingSub.unsubscribe();
5423
+ abort();
5424
+ unsubsribeFns.then(fns => fns.forEach(unsubscribe => unsubscribe()));
5710
5425
  };
5711
5426
  },
5712
- async fetchBalances(addressesByToken) {
5713
- assert(chainConnectors.substrate, "This module requires a substrate chain connector");
5714
- const queries = await queryCache.getQueries(addressesByToken);
5715
- assert(chainConnectors.substrate, "This module requires a substrate chain connector");
5716
- const result = await new RpcStateQueryHelper(chainConnectors.substrate, queries).fetch();
5717
- return new Balances(result ?? []);
5718
- },
5427
+ fetchBalances,
5719
5428
  async transferToken({
5720
5429
  tokenId,
5721
5430
  from,
@@ -7187,7 +6896,6 @@ const SubTokensModule = hydrate => {
7187
6896
  ...DefaultBalanceModule(moduleType),
7188
6897
  async fetchSubstrateChainMeta(chainId, moduleConfig, metadataRpc) {
7189
6898
  if (metadataRpc === undefined) return {};
7190
- if ((moduleConfig?.tokens ?? []).length < 1) return {};
7191
6899
  const metadata = decAnyMetadata(metadataRpc);
7192
6900
  const palletId = moduleConfig?.palletId ?? defaultPalletId;
7193
6901
  compactMetadata(metadata, [{
@@ -7239,17 +6947,39 @@ const SubTokensModule = hydrate => {
7239
6947
  async subscribeBalances({
7240
6948
  addressesByToken
7241
6949
  }, callback) {
7242
- const queries = await buildQueries(chaindataProvider, addressesByToken);
7243
- const unsubscribe = await new RpcStateQueryHelper(chainConnector, queries).subscribe((error, result) => {
7244
- if (error) return callback(error);
7245
- const balances = result?.filter(b => b !== null) ?? [];
7246
- if (balances.length > 0) callback(null, new Balances(balances));
7247
- });
7248
- return unsubscribe;
6950
+ const byNetwork = keys(addressesByToken).reduce((acc, tokenId) => {
6951
+ const networkId = parseSubTokensTokenId(tokenId).networkId;
6952
+ if (!acc[networkId]) acc[networkId] = {};
6953
+ acc[networkId][tokenId] = addressesByToken[tokenId];
6954
+ return acc;
6955
+ }, {});
6956
+ const controller = new AbortController();
6957
+ const pUnsubs = Promise.all(toPairs(byNetwork).map(async ([networkId, addressesByToken]) => {
6958
+ try {
6959
+ const queries = await buildNetworkQueries(networkId, chainConnector, chaindataProvider, addressesByToken, controller.signal);
6960
+ if (controller.signal.aborted) return () => {};
6961
+ const stateHelper = new RpcStateQueryHelper(chainConnector, queries);
6962
+ return await stateHelper.subscribe((error, result) => {
6963
+ // console.log("SubstrateAssetsModule.callback", { error, result })
6964
+ if (error) return callback(error);
6965
+ const balances = result?.filter(b => b !== null) ?? [];
6966
+ if (balances.length > 0) callback(null, new Balances(balances));
6967
+ });
6968
+ } catch (err) {
6969
+ if (!controller.signal.aborted) log.error(`Failed to subscribe balances for network ${networkId}`, err);
6970
+ return () => {};
6971
+ }
6972
+ }));
6973
+ return () => {
6974
+ controller.abort();
6975
+ pUnsubs.then(unsubs => {
6976
+ unsubs.forEach(unsubscribe => unsubscribe());
6977
+ });
6978
+ };
7249
6979
  },
7250
6980
  async fetchBalances(addressesByToken) {
7251
6981
  assert(chainConnectors.substrate, "This module requires a substrate chain connector");
7252
- const queries = await buildQueries(chaindataProvider, addressesByToken);
6982
+ const queries = await buildQueries(chainConnector, chaindataProvider, addressesByToken);
7253
6983
  const result = await new RpcStateQueryHelper(chainConnectors.substrate, queries).fetch();
7254
6984
  const balances = result?.filter(b => b !== null) ?? [];
7255
6985
  return new Balances(balances);
@@ -7368,23 +7098,16 @@ const SubTokensModule = hydrate => {
7368
7098
  }
7369
7099
  };
7370
7100
  };
7371
- async function buildQueries(chaindataProvider, addressesByToken) {
7372
- const allChains = await chaindataProvider.chainsById();
7101
+ async function buildNetworkQueries(networkId, chainConnector, chaindataProvider, addressesByToken, signal) {
7102
+ const miniMetadata = await getMiniMetadata(chaindataProvider, chainConnector, networkId, moduleType, signal);
7103
+ const chain = await chaindataProvider.chainById(networkId);
7373
7104
  const tokens = await chaindataProvider.tokensById();
7374
- const miniMetadatas = new Map((await db.miniMetadatas.toArray()).map(miniMetadata => [miniMetadata.id, miniMetadata]));
7375
- const tokensPalletByChain = new Map(Object.values(allChains).map(chain => [chain.id, findChainMeta(miniMetadatas, moduleType, chain)[0]?.palletId]));
7376
- const uniqueChainIds = getUniqueChainIds(addressesByToken, tokens);
7377
- const chains = Object.fromEntries(uniqueChainIds.map(chainId => [chainId, allChains[chainId]]));
7378
- const chainStorageCoders = buildStorageCoders({
7379
- chainIds: uniqueChainIds,
7380
- chains,
7381
- miniMetadatas,
7382
- moduleType: "substrate-tokens",
7383
- coders: {
7384
- storage: ({
7385
- chainId
7386
- }) => [tokensPalletByChain.get(chainId) ?? defaultPalletId, "Accounts"]
7387
- }
7105
+ if (!chain) return [];
7106
+ signal?.throwIfAborted();
7107
+ const tokensMetadata = miniMetadata;
7108
+ const palletId = tokensMetadata.palletId ?? defaultPalletId;
7109
+ const networkStorageCoders = buildNetworkStorageCoders(networkId, miniMetadata, {
7110
+ storage: [palletId, "Accounts"]
7388
7111
  });
7389
7112
  return Object.entries(addressesByToken).flatMap(([tokenId, addresses]) => {
7390
7113
  const token = tokens[tokenId];
@@ -7396,18 +7119,8 @@ async function buildQueries(chaindataProvider, addressesByToken) {
7396
7119
  log.debug(`This module doesn't handle tokens of type ${token.type}`);
7397
7120
  return [];
7398
7121
  }
7399
- const networkId = token.networkId;
7400
- if (!networkId) {
7401
- log.warn(`Token ${tokenId} has no chain`);
7402
- return [];
7403
- }
7404
- const chain = chains[networkId];
7405
- if (!chain) {
7406
- log.warn(`Chain ${networkId} for token ${tokenId} not found`);
7407
- return [];
7408
- }
7409
7122
  return addresses.flatMap(address => {
7410
- const scaleCoder = chainStorageCoders.get(networkId)?.storage;
7123
+ const scaleCoder = networkStorageCoders?.storage;
7411
7124
  const onChainId = (() => {
7412
7125
  try {
7413
7126
  return papiParse(token.onChainId);
@@ -7458,33 +7171,18 @@ async function buildQueries(chaindataProvider, addressesByToken) {
7458
7171
  });
7459
7172
  });
7460
7173
  }
7174
+ async function buildQueries(chainConnector, chaindataProvider, addressesByToken, signal) {
7175
+ const byNetwork = keys(addressesByToken).reduce((acc, tokenId) => {
7176
+ const networkId = parseSubTokensTokenId(tokenId).networkId;
7177
+ if (!acc[networkId]) acc[networkId] = {};
7178
+ acc[networkId][tokenId] = addressesByToken[tokenId];
7179
+ return acc;
7180
+ }, {});
7181
+ return (await Promise.all(toPairs(byNetwork).map(([networkId, addressesByToken]) => {
7182
+ return buildNetworkQueries(networkId, chainConnector, chaindataProvider, addressesByToken, signal);
7183
+ }))).flat();
7184
+ }
7461
7185
 
7462
7186
  const defaultBalanceModules = [EvmErc20Module, EvmNativeModule, EvmUniswapV2Module, SubAssetsModule, SubForeignAssetsModule, SubNativeModule, SubPsp22Module, SubTokensModule];
7463
7187
 
7464
- /** Pulls the latest chaindata from https://github.com/TalismanSociety/chaindata */
7465
- const hydrateChaindataAndMiniMetadata = async (chaindataProvider, miniMetadataUpdater) => {
7466
- // need chains to be provisioned first, or substrate balances won't fetch on first subscription
7467
- await chaindataProvider.hydrate();
7468
- await Promise.all([miniMetadataUpdater.hydrateFromChaindata(), miniMetadataUpdater.hydrateCustomChains()]);
7469
- const chains = await chaindataProvider.chains();
7470
- const {
7471
- statusesByChain
7472
- } = await miniMetadataUpdater.statuses(chains);
7473
- const goodChains = [...statusesByChain.entries()].flatMap(([chainId, status]) => status === "good" ? chainId : []);
7474
- await chaindataProvider.hydrateSubstrateTokens(goodChains);
7475
- };
7476
-
7477
- /** Builds any missing miniMetadatas (e.g. for the user's custom substrate chains) */
7478
- const updateCustomMiniMetadata = async (chaindataProvider, miniMetadataUpdater) => {
7479
- const chainIds = await chaindataProvider.chainIds();
7480
- await miniMetadataUpdater.update(chainIds);
7481
- };
7482
-
7483
- /** Fetches any missing Evm Tokens */
7484
- const updateEvmTokens = async (chaindataProvider, evmTokenFetcher) => {
7485
- await chaindataProvider.hydrate();
7486
- const evmNetworkIds = await chaindataProvider.evmNetworkIds();
7487
- await evmTokenFetcher.update(evmNetworkIds);
7488
- };
7489
-
7490
- export { Balance, BalanceFormatter, BalanceValueGetter, Balances, Change24hCurrencyFormatter, DefaultBalanceModule, EvmErc20Module, EvmNativeModule, EvmTokenFetcher, EvmUniswapV2Module, FiatSumBalancesFormatter, MiniMetadataUpdater, ONE_ALPHA_TOKEN, PlanckSumBalancesFormatter, RpcStateQueryHelper, SCALE_FACTOR, SUBTENSOR_MIN_STAKE_AMOUNT_PLANK, SUBTENSOR_ROOT_NETUID, SubAssetsModule, SubForeignAssetsModule, SubNativeModule, SubPsp22Module, SubTokensModule, SumBalancesFormatter, TalismanBalancesDatabase, abiMulticall, balances, buildStorageCoders, calculateAlphaPrice, calculateTaoAmountFromAlpha, calculateTaoFromDynamicInfo, compress, configureStore, db, decodeOutput, decompress, defaultBalanceModules, deriveMiniMetadataId, detectTransferMethod, erc20Abi, erc20BalancesAggregatorAbi, excludeFromFeePayableLocks, excludeFromTransferableAmount, filterBaseLocks, filterMirrorTokens, findChainMeta, getBalanceId, getLockTitle, getUniqueChainIds, getValueId, hydrateChaindataAndMiniMetadata, includeInTotalExtraAmount, makeContractCaller, uniswapV2PairAbi, updateCustomMiniMetadata, updateEvmTokens };
7188
+ export { Balance, BalanceFormatter, BalanceValueGetter, Balances, Change24hCurrencyFormatter, DefaultBalanceModule, EvmErc20Module, EvmNativeModule, EvmTokenFetcher, EvmUniswapV2Module, FiatSumBalancesFormatter, ONE_ALPHA_TOKEN, PlanckSumBalancesFormatter, RpcStateQueryHelper, SCALE_FACTOR, SUBTENSOR_MIN_STAKE_AMOUNT_PLANK, SUBTENSOR_ROOT_NETUID, SubAssetsModule, SubForeignAssetsModule, SubNativeModule, SubPsp22Module, SubTokensModule, SumBalancesFormatter, TalismanBalancesDatabase, abiMulticall, balances, buildNetworkStorageCoders, buildStorageCoders, calculateAlphaPrice, calculateTaoAmountFromAlpha, calculateTaoFromDynamicInfo, compress, configureStore, db, decodeOutput, decompress, defaultBalanceModules, deriveMiniMetadataId, detectTransferMethod, erc20Abi, erc20BalancesAggregatorAbi, excludeFromFeePayableLocks, excludeFromTransferableAmount, filterBaseLocks, filterMirrorTokens, findChainMeta, getBalanceId, getLockTitle, getUniqueChainIds, getValueId, includeInTotalExtraAmount, makeContractCaller, uniswapV2PairAbi };