@talismn/balances 1.2.2 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/declarations/src/modules/substrate-dtao/calculatePendingRootClaimable.d.ts +12 -0
- package/dist/declarations/src/modules/substrate-dtao/types.d.ts +13 -0
- package/dist/talismn-balances.cjs.dev.js +289 -16
- package/dist/talismn-balances.cjs.prod.js +289 -16
- package/dist/talismn-balances.esm.js +289 -16
- package/package.json +5 -5
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { GetDynamicInfosResult, SubDTaoBalance } from "./types";
|
|
2
|
+
type DynamicInfo = NonNullable<GetDynamicInfosResult[number]>;
|
|
3
|
+
export declare const calculatePendingRootClaimable: ({ stake, hotkey, address, networkId, validatorRootClaimableRate, dynamicInfoByNetuid, alreadyClaimedByNetuid, }: {
|
|
4
|
+
stake: bigint;
|
|
5
|
+
hotkey: string;
|
|
6
|
+
address: string;
|
|
7
|
+
networkId: string;
|
|
8
|
+
validatorRootClaimableRate: Map<number, bigint>;
|
|
9
|
+
dynamicInfoByNetuid: Record<number, DynamicInfo | undefined>;
|
|
10
|
+
alreadyClaimedByNetuid: Map<number, bigint>;
|
|
11
|
+
}) => SubDTaoBalance[];
|
|
12
|
+
export {};
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import type { bittensor } from "@polkadot-api/descriptors";
|
|
1
2
|
import z from "zod/v4";
|
|
2
3
|
export declare const SubDTaoTokenConfigSchema: z.ZodObject<{
|
|
3
4
|
symbol: z.ZodOptional<z.ZodString>;
|
|
@@ -15,3 +16,15 @@ export type SubDTaoTokenConfig = z.infer<typeof SubDTaoTokenConfigSchema>;
|
|
|
15
16
|
export type SubDTaoBalanceMeta = {
|
|
16
17
|
scaledAlphaPrice: string;
|
|
17
18
|
};
|
|
19
|
+
export type SubDTaoBalance = {
|
|
20
|
+
address: string;
|
|
21
|
+
tokenId: string;
|
|
22
|
+
baseTokenId: string;
|
|
23
|
+
stake: bigint;
|
|
24
|
+
pendingRootClaim?: bigint;
|
|
25
|
+
hotkey: string;
|
|
26
|
+
netuid: number;
|
|
27
|
+
scaledAlphaPrice: bigint;
|
|
28
|
+
};
|
|
29
|
+
export type GetDynamicInfosResult = (typeof bittensor)["descriptors"]["apis"]["SubnetInfoRuntimeApi"]["get_all_dynamic_info"][1];
|
|
30
|
+
export type GetStakeInfosResult = (typeof bittensor)["descriptors"]["apis"]["StakeInfoRuntimeApi"]["get_stake_info_for_coldkeys"][1];
|
|
@@ -2295,6 +2295,47 @@ const taoToAlpha = (tao, scaledAlphaPrice) => {
|
|
|
2295
2295
|
return tao * ALPHA_PRICE_SCALE / scaledAlphaPrice;
|
|
2296
2296
|
};
|
|
2297
2297
|
|
|
2298
|
+
const calculatePendingRootClaimable = ({
|
|
2299
|
+
stake,
|
|
2300
|
+
hotkey,
|
|
2301
|
+
address,
|
|
2302
|
+
networkId,
|
|
2303
|
+
validatorRootClaimableRate,
|
|
2304
|
+
dynamicInfoByNetuid,
|
|
2305
|
+
alreadyClaimedByNetuid
|
|
2306
|
+
}) => {
|
|
2307
|
+
const pendingRootClaimBalances = [];
|
|
2308
|
+
for (const [netuid, claimableRate] of validatorRootClaimableRate) {
|
|
2309
|
+
if (claimableRate === 0n) {
|
|
2310
|
+
continue;
|
|
2311
|
+
}
|
|
2312
|
+
const dynamicInfo = dynamicInfoByNetuid[netuid];
|
|
2313
|
+
const scaledAlphaPrice = dynamicInfo ? getScaledAlphaPrice(dynamicInfo.alpha_in, dynamicInfo.tao_in) : 0n;
|
|
2314
|
+
// Calculate claimable = claimable_rate * root_stake
|
|
2315
|
+
// Note: claimableRate is a I96F32, a fixed-point number format
|
|
2316
|
+
|
|
2317
|
+
// Multiply claimable_rate by root_stake
|
|
2318
|
+
// I96F32 multiplication: round((a * b) / 2^32)
|
|
2319
|
+
const totalClaimable = stake * claimableRate + (1n << 31n) >> 32n;
|
|
2320
|
+
|
|
2321
|
+
// Subtract already claimed amount to get net pending claimable
|
|
2322
|
+
const alreadyClaimed = alreadyClaimedByNetuid.get(netuid) ?? 0n;
|
|
2323
|
+
const pendingRootClaim = totalClaimable > alreadyClaimed ? totalClaimable - alreadyClaimed : 0n;
|
|
2324
|
+
pendingRootClaimBalances.push({
|
|
2325
|
+
address,
|
|
2326
|
+
tokenId: chaindataProvider.subDTaoTokenId(networkId, netuid, hotkey),
|
|
2327
|
+
baseTokenId: chaindataProvider.subDTaoTokenId(networkId, netuid),
|
|
2328
|
+
hotkey: hotkey,
|
|
2329
|
+
netuid: netuid,
|
|
2330
|
+
scaledAlphaPrice,
|
|
2331
|
+
pendingRootClaim,
|
|
2332
|
+
stake: 0n
|
|
2333
|
+
});
|
|
2334
|
+
}
|
|
2335
|
+
return pendingRootClaimBalances;
|
|
2336
|
+
};
|
|
2337
|
+
|
|
2338
|
+
const ROOT_NETUID = 0;
|
|
2298
2339
|
const fetchBalances$5 = async ({
|
|
2299
2340
|
networkId,
|
|
2300
2341
|
tokensWithAddresses,
|
|
@@ -2344,20 +2385,83 @@ const fetchBalances$5 = async ({
|
|
|
2344
2385
|
const addresses = lodashEs.uniq(balanceDefs.map(def => def.address));
|
|
2345
2386
|
try {
|
|
2346
2387
|
const [stakeInfos, dynamicInfos] = await Promise.all([fetchRuntimeCallResult(connector, networkId, miniMetadata.data, "StakeInfoRuntimeApi", "get_stake_info_for_coldkeys", [addresses]), fetchRuntimeCallResult(connector, networkId, miniMetadata.data, "SubnetInfoRuntimeApi", "get_all_dynamic_info", [])]);
|
|
2388
|
+
const rootHotkeys = lodashEs.uniq(stakeInfos.flatMap(([, stakes]) => stakes.filter(stake => stake.netuid === ROOT_NETUID).map(stake => stake.hotkey)));
|
|
2389
|
+
const rootClaimableRatesByHotkey = rootHotkeys.length && miniMetadata.data ? await fetchRootClaimableRates(connector, networkId, miniMetadata.data, rootHotkeys) : new Map();
|
|
2390
|
+
|
|
2391
|
+
// Collect all (address, hotkey, netuid) pairs for root stakes to fetch RootClaimed amounts
|
|
2392
|
+
const addressHotkeyNetuidPairs = [];
|
|
2393
|
+
for (const [address, stakes] of stakeInfos) {
|
|
2394
|
+
for (const stake of stakes) {
|
|
2395
|
+
if (stake.netuid === ROOT_NETUID) {
|
|
2396
|
+
const claimableRates = rootClaimableRatesByHotkey.get(stake.hotkey);
|
|
2397
|
+
if (claimableRates) {
|
|
2398
|
+
// For each netuid that has a claimable rate, we need to check RootClaimed
|
|
2399
|
+
for (const netuid of claimableRates.keys()) {
|
|
2400
|
+
addressHotkeyNetuidPairs.push([address, stake.hotkey, netuid]);
|
|
2401
|
+
}
|
|
2402
|
+
}
|
|
2403
|
+
}
|
|
2404
|
+
}
|
|
2405
|
+
}
|
|
2406
|
+
const rootClaimedAmounts = addressHotkeyNetuidPairs.length && miniMetadata.data ? await fetchRootClaimedAmounts(connector, networkId, miniMetadata.data, addressHotkeyNetuidPairs) : new Map();
|
|
2347
2407
|
const dynamicInfoByNetuid = lodashEs.keyBy(dynamicInfos.filter(util.isNotNil), info => info.netuid);
|
|
2348
|
-
|
|
2349
|
-
|
|
2350
|
-
|
|
2351
|
-
|
|
2352
|
-
|
|
2353
|
-
|
|
2354
|
-
|
|
2355
|
-
|
|
2356
|
-
|
|
2357
|
-
|
|
2358
|
-
|
|
2359
|
-
|
|
2360
|
-
|
|
2408
|
+
|
|
2409
|
+
// Upserts a balance into the accumulator, merging stake values if the balance already exists.
|
|
2410
|
+
// Eg: Acc X has root staked with validator Y, but also staked on sn 45 with the same validator Y.
|
|
2411
|
+
// We merge the pending root claim of sn 45 and the sn 45 stake in the same balance.
|
|
2412
|
+
const upsertBalance = (acc, address, tokenId, balance) => {
|
|
2413
|
+
const key = `${address}:${tokenId}`;
|
|
2414
|
+
const recordedBalance = acc[key];
|
|
2415
|
+
if (recordedBalance) {
|
|
2416
|
+
acc[key] = {
|
|
2417
|
+
...recordedBalance,
|
|
2418
|
+
stake: recordedBalance.stake + balance.stake,
|
|
2419
|
+
// If the new balance has pendingRootClaim, use it (it's calculated from current state)
|
|
2420
|
+
...(balance.pendingRootClaim !== undefined && {
|
|
2421
|
+
pendingRootClaim: balance.pendingRootClaim
|
|
2422
|
+
})
|
|
2423
|
+
};
|
|
2424
|
+
} else {
|
|
2425
|
+
acc[key] = balance;
|
|
2426
|
+
}
|
|
2427
|
+
};
|
|
2428
|
+
const balancesRaw = stakeInfos.reduce((acc, [address, stakes]) => {
|
|
2429
|
+
for (const stake of stakes) {
|
|
2430
|
+
// Regular stake cases
|
|
2431
|
+
const dynamicInfo = dynamicInfoByNetuid[stake.netuid];
|
|
2432
|
+
const scaledAlphaPrice = dynamicInfo ? getScaledAlphaPrice(dynamicInfo.alpha_in, dynamicInfo.tao_in) : 0n;
|
|
2433
|
+
const balance = {
|
|
2434
|
+
address,
|
|
2435
|
+
tokenId: chaindataProvider.subDTaoTokenId(networkId, stake.netuid, stake.hotkey),
|
|
2436
|
+
baseTokenId: chaindataProvider.subDTaoTokenId(networkId, stake.netuid),
|
|
2437
|
+
stake: stake.stake,
|
|
2438
|
+
hotkey: stake.hotkey,
|
|
2439
|
+
netuid: stake.netuid,
|
|
2440
|
+
scaledAlphaPrice
|
|
2441
|
+
};
|
|
2442
|
+
upsertBalance(acc, address, balance.tokenId, balance);
|
|
2443
|
+
|
|
2444
|
+
// Root stake cases, we need to calculate the pending root claim and add to the balances
|
|
2445
|
+
if (stake.netuid === ROOT_NETUID) {
|
|
2446
|
+
const claimableRates = rootClaimableRatesByHotkey.get(stake.hotkey) ?? new Map();
|
|
2447
|
+
const alreadyClaimedMap = rootClaimedAmounts.get(address)?.get(stake.hotkey) ?? new Map();
|
|
2448
|
+
const pendingRootClaimBalances = calculatePendingRootClaimable({
|
|
2449
|
+
stake: stake.stake,
|
|
2450
|
+
hotkey: stake.hotkey,
|
|
2451
|
+
address,
|
|
2452
|
+
networkId,
|
|
2453
|
+
validatorRootClaimableRate: claimableRates,
|
|
2454
|
+
dynamicInfoByNetuid,
|
|
2455
|
+
alreadyClaimedByNetuid: alreadyClaimedMap
|
|
2456
|
+
});
|
|
2457
|
+
pendingRootClaimBalances.forEach(balance => {
|
|
2458
|
+
upsertBalance(acc, address, balance.tokenId, balance);
|
|
2459
|
+
});
|
|
2460
|
+
}
|
|
2461
|
+
}
|
|
2462
|
+
return acc;
|
|
2463
|
+
}, {});
|
|
2464
|
+
const balances = Object.values(balancesRaw);
|
|
2361
2465
|
const tokensById = lodashEs.keyBy(tokensWithAddresses.map(([token]) => token), t => t.id);
|
|
2362
2466
|
const dynamicTokens = [];
|
|
2363
2467
|
|
|
@@ -2383,19 +2487,45 @@ const fetchBalances$5 = async ({
|
|
|
2383
2487
|
const meta = {
|
|
2384
2488
|
scaledAlphaPrice: stake?.scaledAlphaPrice.toString() ?? "0"
|
|
2385
2489
|
};
|
|
2490
|
+
const stakeAmount = BigInt(stake?.stake?.toString() ?? "0");
|
|
2491
|
+
const pendingRootClaimAmount = BigInt(stake?.pendingRootClaim?.toString() ?? "0");
|
|
2492
|
+
const hasZeroStake = stakeAmount === 0n;
|
|
2493
|
+
const hasPendingRootClaim = pendingRootClaimAmount > 0n;
|
|
2386
2494
|
const balanceValue = {
|
|
2387
2495
|
type: "free",
|
|
2388
2496
|
label: stake?.netuid === 0 ? "Root Staking" : `Subnet Staking`,
|
|
2389
|
-
amount:
|
|
2497
|
+
amount: stakeAmount.toString(),
|
|
2390
2498
|
meta
|
|
2391
2499
|
};
|
|
2500
|
+
const pendingRootClaimValue = {
|
|
2501
|
+
type: "locked",
|
|
2502
|
+
label: "Pending root claim",
|
|
2503
|
+
amount: pendingRootClaimAmount.toString(),
|
|
2504
|
+
meta
|
|
2505
|
+
};
|
|
2506
|
+
const values = [balanceValue, pendingRootClaimValue];
|
|
2507
|
+
|
|
2508
|
+
// If stake is 0n but there's a pendingRootClaim, add it as an extra amount
|
|
2509
|
+
// with includeInTotal: true so it counts toward the total balance.
|
|
2510
|
+
// This ensures the balance isn't filtered out when stake is 0n.
|
|
2511
|
+
// The total.planck calculation is: free + reserved + extra (with includeInTotal: true)
|
|
2512
|
+
// So by adding pendingRootClaim as extra, it will be included in total.planck.
|
|
2513
|
+
if (hasZeroStake && hasPendingRootClaim) {
|
|
2514
|
+
values.push({
|
|
2515
|
+
type: "extra",
|
|
2516
|
+
label: "Pending root claim",
|
|
2517
|
+
amount: pendingRootClaimAmount.toString(),
|
|
2518
|
+
includeInTotal: true,
|
|
2519
|
+
meta
|
|
2520
|
+
});
|
|
2521
|
+
}
|
|
2392
2522
|
return {
|
|
2393
2523
|
address: def.address,
|
|
2394
2524
|
networkId,
|
|
2395
2525
|
tokenId: def.token.id,
|
|
2396
2526
|
source: MODULE_TYPE$5,
|
|
2397
2527
|
status: "live",
|
|
2398
|
-
values
|
|
2528
|
+
values
|
|
2399
2529
|
};
|
|
2400
2530
|
});
|
|
2401
2531
|
return {
|
|
@@ -2418,6 +2548,149 @@ const fetchBalances$5 = async ({
|
|
|
2418
2548
|
};
|
|
2419
2549
|
}
|
|
2420
2550
|
};
|
|
2551
|
+
const buildStorageCoder = (metadataRpc, pallet, entry) => {
|
|
2552
|
+
const {
|
|
2553
|
+
builder
|
|
2554
|
+
} = scale.parseMetadataRpc(metadataRpc);
|
|
2555
|
+
return builder.buildStorage(pallet, entry);
|
|
2556
|
+
};
|
|
2557
|
+
const buildRootClaimableStorageCoder = async (connector, networkId, metadataRpc) => {
|
|
2558
|
+
let storageCoder = null;
|
|
2559
|
+
if (metadataRpc) {
|
|
2560
|
+
try {
|
|
2561
|
+
storageCoder = buildStorageCoder(metadataRpc, "SubtensorModule", "RootClaimable");
|
|
2562
|
+
} catch (cause) {
|
|
2563
|
+
log.warn(`Failed to build storage coder for SubtensorModule.RootClaimable using provided metadata on ${networkId}`, {
|
|
2564
|
+
cause
|
|
2565
|
+
});
|
|
2566
|
+
}
|
|
2567
|
+
}
|
|
2568
|
+
return storageCoder;
|
|
2569
|
+
};
|
|
2570
|
+
const buildRootClaimedStorageCoder = async (networkId, metadataRpc) => {
|
|
2571
|
+
let storageCoder = null;
|
|
2572
|
+
if (metadataRpc) {
|
|
2573
|
+
try {
|
|
2574
|
+
storageCoder = buildStorageCoder(metadataRpc, "SubtensorModule", "RootClaimed");
|
|
2575
|
+
} catch (cause) {
|
|
2576
|
+
log.warn(`Failed to build storage coder for SubtensorModule.RootClaimed using provided metadata on ${networkId}`, {
|
|
2577
|
+
cause
|
|
2578
|
+
});
|
|
2579
|
+
}
|
|
2580
|
+
}
|
|
2581
|
+
return storageCoder;
|
|
2582
|
+
};
|
|
2583
|
+
const buildRootClaimableQueries = (networkId, hotkeys, storageCoder) => {
|
|
2584
|
+
return hotkeys.map(hotkey => {
|
|
2585
|
+
let stateKey = null;
|
|
2586
|
+
try {
|
|
2587
|
+
stateKey = storageCoder.keys.enc(hotkey);
|
|
2588
|
+
} catch (cause) {
|
|
2589
|
+
log.warn(`Failed to encode storage key for hotkey ${hotkey} on ${networkId}`, {
|
|
2590
|
+
cause
|
|
2591
|
+
});
|
|
2592
|
+
}
|
|
2593
|
+
const decodeResult = changes => {
|
|
2594
|
+
const hexValue = changes[0];
|
|
2595
|
+
if (!hexValue) {
|
|
2596
|
+
return [hotkey, new Map()];
|
|
2597
|
+
}
|
|
2598
|
+
const decoded = scale.decodeScale(storageCoder, hexValue, `Failed to decode RootClaimable for hotkey ${hotkey} on ${networkId}`);
|
|
2599
|
+
return [hotkey, decoded ? new Map(decoded) : new Map()];
|
|
2600
|
+
};
|
|
2601
|
+
return {
|
|
2602
|
+
stateKeys: [stateKey],
|
|
2603
|
+
decodeResult
|
|
2604
|
+
};
|
|
2605
|
+
});
|
|
2606
|
+
};
|
|
2607
|
+
const fetchRootClaimableRates = async (connector, networkId, metadataRpc, hotkeys) => {
|
|
2608
|
+
if (!hotkeys.length) return new Map();
|
|
2609
|
+
const storageCoder = await buildRootClaimableStorageCoder(connector, networkId, metadataRpc);
|
|
2610
|
+
if (!storageCoder) {
|
|
2611
|
+
// Fallback: return empty map for all hotkeys
|
|
2612
|
+
return new Map(hotkeys.map(hotkey => [hotkey, new Map()]));
|
|
2613
|
+
}
|
|
2614
|
+
const queries = buildRootClaimableQueries(networkId, hotkeys, storageCoder);
|
|
2615
|
+
try {
|
|
2616
|
+
const results = await fetchRpcQueryPack(connector, networkId, queries);
|
|
2617
|
+
return new Map(results);
|
|
2618
|
+
} catch (cause) {
|
|
2619
|
+
log.warn(`Failed to fetch RootClaimable for hotkeys on ${networkId}`, {
|
|
2620
|
+
cause
|
|
2621
|
+
});
|
|
2622
|
+
// Fallback: return empty map for all hotkeys
|
|
2623
|
+
return new Map(hotkeys.map(hotkey => [hotkey, new Map()]));
|
|
2624
|
+
}
|
|
2625
|
+
};
|
|
2626
|
+
const buildRootClaimedQueries = (networkId, addressHotkeyNetuidPairs, storageCoder) => {
|
|
2627
|
+
return addressHotkeyNetuidPairs.map(([address, hotkey, netuid]) => {
|
|
2628
|
+
let stateKey = null;
|
|
2629
|
+
try {
|
|
2630
|
+
// RootClaimed storage takes params: [netuid, hotkey, coldkey_ss58]
|
|
2631
|
+
stateKey = storageCoder.keys.enc(netuid, hotkey, address);
|
|
2632
|
+
} catch (cause) {
|
|
2633
|
+
log.warn(`Failed to encode storage key for RootClaimed (netuid=${netuid}, hotkey=${hotkey}, address=${address}) on ${networkId}`, {
|
|
2634
|
+
cause
|
|
2635
|
+
});
|
|
2636
|
+
}
|
|
2637
|
+
const decodeResult = changes => {
|
|
2638
|
+
const hexValue = changes[0];
|
|
2639
|
+
if (!hexValue) {
|
|
2640
|
+
return [address, hotkey, netuid, 0n];
|
|
2641
|
+
}
|
|
2642
|
+
const decoded = scale.decodeScale(storageCoder, hexValue, `Failed to decode RootClaimed for (netuid=${netuid}, hotkey=${hotkey}, address=${address}) on ${networkId}`);
|
|
2643
|
+
return [address, hotkey, netuid, decoded ?? 0n];
|
|
2644
|
+
};
|
|
2645
|
+
return {
|
|
2646
|
+
stateKeys: [stateKey],
|
|
2647
|
+
decodeResult
|
|
2648
|
+
};
|
|
2649
|
+
});
|
|
2650
|
+
};
|
|
2651
|
+
const fetchRootClaimedAmounts = async (connector, networkId, metadataRpc, addressHotkeyNetuidPairs) => {
|
|
2652
|
+
if (!addressHotkeyNetuidPairs.length) {
|
|
2653
|
+
return new Map();
|
|
2654
|
+
}
|
|
2655
|
+
const storageCoder = await buildRootClaimedStorageCoder(networkId, metadataRpc);
|
|
2656
|
+
if (!storageCoder) {
|
|
2657
|
+
// Fallback: return empty map for all pairs
|
|
2658
|
+
const result = new Map();
|
|
2659
|
+
for (const [address, hotkey, netuid] of addressHotkeyNetuidPairs) {
|
|
2660
|
+
if (!result.has(address)) result.set(address, new Map());
|
|
2661
|
+
const addressMap = result.get(address);
|
|
2662
|
+
if (!addressMap.has(hotkey)) addressMap.set(hotkey, new Map());
|
|
2663
|
+
addressMap.get(hotkey).set(netuid, 0n);
|
|
2664
|
+
}
|
|
2665
|
+
return result;
|
|
2666
|
+
}
|
|
2667
|
+
const queries = buildRootClaimedQueries(networkId, addressHotkeyNetuidPairs, storageCoder);
|
|
2668
|
+
try {
|
|
2669
|
+
const results = await fetchRpcQueryPack(connector, networkId, queries);
|
|
2670
|
+
// Build a nested map: address -> hotkey -> netuid -> claimed amount
|
|
2671
|
+
const result = new Map();
|
|
2672
|
+
for (const [address, hotkey, netuid, claimed] of results) {
|
|
2673
|
+
if (!result.has(address)) result.set(address, new Map());
|
|
2674
|
+
const addressMap = result.get(address);
|
|
2675
|
+
if (!addressMap.has(hotkey)) addressMap.set(hotkey, new Map());
|
|
2676
|
+
addressMap.get(hotkey).set(netuid, claimed);
|
|
2677
|
+
}
|
|
2678
|
+
return result;
|
|
2679
|
+
} catch (cause) {
|
|
2680
|
+
log.warn(`Failed to fetch RootClaimed for address-hotkey-netuid pairs on ${networkId}`, {
|
|
2681
|
+
cause
|
|
2682
|
+
});
|
|
2683
|
+
// Fallback: return empty map for all pairs
|
|
2684
|
+
const result = new Map();
|
|
2685
|
+
for (const [address, hotkey, netuid] of addressHotkeyNetuidPairs) {
|
|
2686
|
+
if (!result.has(address)) result.set(address, new Map());
|
|
2687
|
+
const addressMap = result.get(address);
|
|
2688
|
+
if (!addressMap.has(hotkey)) addressMap.set(hotkey, new Map());
|
|
2689
|
+
addressMap.get(hotkey).set(netuid, 0n);
|
|
2690
|
+
}
|
|
2691
|
+
return result;
|
|
2692
|
+
}
|
|
2693
|
+
};
|
|
2421
2694
|
|
|
2422
2695
|
// hardcoded because we dont have access to native tokens from the balance module
|
|
2423
2696
|
const NATIVE_TOKEN_SYMBOLS = {
|
|
@@ -2517,7 +2790,7 @@ const getData$4 = metadataRpc => {
|
|
|
2517
2790
|
if (!isBittensor) return null;
|
|
2518
2791
|
scale.compactMetadata(metadata, [{
|
|
2519
2792
|
pallet: "SubtensorModule",
|
|
2520
|
-
items: ["TransferToggle"]
|
|
2793
|
+
items: ["TransferToggle", "RootClaimable", "RootClaimed"]
|
|
2521
2794
|
}], [{
|
|
2522
2795
|
runtimeApi: "StakeInfoRuntimeApi",
|
|
2523
2796
|
methods: ["get_stake_info_for_coldkeys"]
|
|
@@ -2295,6 +2295,47 @@ const taoToAlpha = (tao, scaledAlphaPrice) => {
|
|
|
2295
2295
|
return tao * ALPHA_PRICE_SCALE / scaledAlphaPrice;
|
|
2296
2296
|
};
|
|
2297
2297
|
|
|
2298
|
+
const calculatePendingRootClaimable = ({
|
|
2299
|
+
stake,
|
|
2300
|
+
hotkey,
|
|
2301
|
+
address,
|
|
2302
|
+
networkId,
|
|
2303
|
+
validatorRootClaimableRate,
|
|
2304
|
+
dynamicInfoByNetuid,
|
|
2305
|
+
alreadyClaimedByNetuid
|
|
2306
|
+
}) => {
|
|
2307
|
+
const pendingRootClaimBalances = [];
|
|
2308
|
+
for (const [netuid, claimableRate] of validatorRootClaimableRate) {
|
|
2309
|
+
if (claimableRate === 0n) {
|
|
2310
|
+
continue;
|
|
2311
|
+
}
|
|
2312
|
+
const dynamicInfo = dynamicInfoByNetuid[netuid];
|
|
2313
|
+
const scaledAlphaPrice = dynamicInfo ? getScaledAlphaPrice(dynamicInfo.alpha_in, dynamicInfo.tao_in) : 0n;
|
|
2314
|
+
// Calculate claimable = claimable_rate * root_stake
|
|
2315
|
+
// Note: claimableRate is a I96F32, a fixed-point number format
|
|
2316
|
+
|
|
2317
|
+
// Multiply claimable_rate by root_stake
|
|
2318
|
+
// I96F32 multiplication: round((a * b) / 2^32)
|
|
2319
|
+
const totalClaimable = stake * claimableRate + (1n << 31n) >> 32n;
|
|
2320
|
+
|
|
2321
|
+
// Subtract already claimed amount to get net pending claimable
|
|
2322
|
+
const alreadyClaimed = alreadyClaimedByNetuid.get(netuid) ?? 0n;
|
|
2323
|
+
const pendingRootClaim = totalClaimable > alreadyClaimed ? totalClaimable - alreadyClaimed : 0n;
|
|
2324
|
+
pendingRootClaimBalances.push({
|
|
2325
|
+
address,
|
|
2326
|
+
tokenId: chaindataProvider.subDTaoTokenId(networkId, netuid, hotkey),
|
|
2327
|
+
baseTokenId: chaindataProvider.subDTaoTokenId(networkId, netuid),
|
|
2328
|
+
hotkey: hotkey,
|
|
2329
|
+
netuid: netuid,
|
|
2330
|
+
scaledAlphaPrice,
|
|
2331
|
+
pendingRootClaim,
|
|
2332
|
+
stake: 0n
|
|
2333
|
+
});
|
|
2334
|
+
}
|
|
2335
|
+
return pendingRootClaimBalances;
|
|
2336
|
+
};
|
|
2337
|
+
|
|
2338
|
+
const ROOT_NETUID = 0;
|
|
2298
2339
|
const fetchBalances$5 = async ({
|
|
2299
2340
|
networkId,
|
|
2300
2341
|
tokensWithAddresses,
|
|
@@ -2344,20 +2385,83 @@ const fetchBalances$5 = async ({
|
|
|
2344
2385
|
const addresses = lodashEs.uniq(balanceDefs.map(def => def.address));
|
|
2345
2386
|
try {
|
|
2346
2387
|
const [stakeInfos, dynamicInfos] = await Promise.all([fetchRuntimeCallResult(connector, networkId, miniMetadata.data, "StakeInfoRuntimeApi", "get_stake_info_for_coldkeys", [addresses]), fetchRuntimeCallResult(connector, networkId, miniMetadata.data, "SubnetInfoRuntimeApi", "get_all_dynamic_info", [])]);
|
|
2388
|
+
const rootHotkeys = lodashEs.uniq(stakeInfos.flatMap(([, stakes]) => stakes.filter(stake => stake.netuid === ROOT_NETUID).map(stake => stake.hotkey)));
|
|
2389
|
+
const rootClaimableRatesByHotkey = rootHotkeys.length && miniMetadata.data ? await fetchRootClaimableRates(connector, networkId, miniMetadata.data, rootHotkeys) : new Map();
|
|
2390
|
+
|
|
2391
|
+
// Collect all (address, hotkey, netuid) pairs for root stakes to fetch RootClaimed amounts
|
|
2392
|
+
const addressHotkeyNetuidPairs = [];
|
|
2393
|
+
for (const [address, stakes] of stakeInfos) {
|
|
2394
|
+
for (const stake of stakes) {
|
|
2395
|
+
if (stake.netuid === ROOT_NETUID) {
|
|
2396
|
+
const claimableRates = rootClaimableRatesByHotkey.get(stake.hotkey);
|
|
2397
|
+
if (claimableRates) {
|
|
2398
|
+
// For each netuid that has a claimable rate, we need to check RootClaimed
|
|
2399
|
+
for (const netuid of claimableRates.keys()) {
|
|
2400
|
+
addressHotkeyNetuidPairs.push([address, stake.hotkey, netuid]);
|
|
2401
|
+
}
|
|
2402
|
+
}
|
|
2403
|
+
}
|
|
2404
|
+
}
|
|
2405
|
+
}
|
|
2406
|
+
const rootClaimedAmounts = addressHotkeyNetuidPairs.length && miniMetadata.data ? await fetchRootClaimedAmounts(connector, networkId, miniMetadata.data, addressHotkeyNetuidPairs) : new Map();
|
|
2347
2407
|
const dynamicInfoByNetuid = lodashEs.keyBy(dynamicInfos.filter(util.isNotNil), info => info.netuid);
|
|
2348
|
-
|
|
2349
|
-
|
|
2350
|
-
|
|
2351
|
-
|
|
2352
|
-
|
|
2353
|
-
|
|
2354
|
-
|
|
2355
|
-
|
|
2356
|
-
|
|
2357
|
-
|
|
2358
|
-
|
|
2359
|
-
|
|
2360
|
-
|
|
2408
|
+
|
|
2409
|
+
// Upserts a balance into the accumulator, merging stake values if the balance already exists.
|
|
2410
|
+
// Eg: Acc X has root staked with validator Y, but also staked on sn 45 with the same validator Y.
|
|
2411
|
+
// We merge the pending root claim of sn 45 and the sn 45 stake in the same balance.
|
|
2412
|
+
const upsertBalance = (acc, address, tokenId, balance) => {
|
|
2413
|
+
const key = `${address}:${tokenId}`;
|
|
2414
|
+
const recordedBalance = acc[key];
|
|
2415
|
+
if (recordedBalance) {
|
|
2416
|
+
acc[key] = {
|
|
2417
|
+
...recordedBalance,
|
|
2418
|
+
stake: recordedBalance.stake + balance.stake,
|
|
2419
|
+
// If the new balance has pendingRootClaim, use it (it's calculated from current state)
|
|
2420
|
+
...(balance.pendingRootClaim !== undefined && {
|
|
2421
|
+
pendingRootClaim: balance.pendingRootClaim
|
|
2422
|
+
})
|
|
2423
|
+
};
|
|
2424
|
+
} else {
|
|
2425
|
+
acc[key] = balance;
|
|
2426
|
+
}
|
|
2427
|
+
};
|
|
2428
|
+
const balancesRaw = stakeInfos.reduce((acc, [address, stakes]) => {
|
|
2429
|
+
for (const stake of stakes) {
|
|
2430
|
+
// Regular stake cases
|
|
2431
|
+
const dynamicInfo = dynamicInfoByNetuid[stake.netuid];
|
|
2432
|
+
const scaledAlphaPrice = dynamicInfo ? getScaledAlphaPrice(dynamicInfo.alpha_in, dynamicInfo.tao_in) : 0n;
|
|
2433
|
+
const balance = {
|
|
2434
|
+
address,
|
|
2435
|
+
tokenId: chaindataProvider.subDTaoTokenId(networkId, stake.netuid, stake.hotkey),
|
|
2436
|
+
baseTokenId: chaindataProvider.subDTaoTokenId(networkId, stake.netuid),
|
|
2437
|
+
stake: stake.stake,
|
|
2438
|
+
hotkey: stake.hotkey,
|
|
2439
|
+
netuid: stake.netuid,
|
|
2440
|
+
scaledAlphaPrice
|
|
2441
|
+
};
|
|
2442
|
+
upsertBalance(acc, address, balance.tokenId, balance);
|
|
2443
|
+
|
|
2444
|
+
// Root stake cases, we need to calculate the pending root claim and add to the balances
|
|
2445
|
+
if (stake.netuid === ROOT_NETUID) {
|
|
2446
|
+
const claimableRates = rootClaimableRatesByHotkey.get(stake.hotkey) ?? new Map();
|
|
2447
|
+
const alreadyClaimedMap = rootClaimedAmounts.get(address)?.get(stake.hotkey) ?? new Map();
|
|
2448
|
+
const pendingRootClaimBalances = calculatePendingRootClaimable({
|
|
2449
|
+
stake: stake.stake,
|
|
2450
|
+
hotkey: stake.hotkey,
|
|
2451
|
+
address,
|
|
2452
|
+
networkId,
|
|
2453
|
+
validatorRootClaimableRate: claimableRates,
|
|
2454
|
+
dynamicInfoByNetuid,
|
|
2455
|
+
alreadyClaimedByNetuid: alreadyClaimedMap
|
|
2456
|
+
});
|
|
2457
|
+
pendingRootClaimBalances.forEach(balance => {
|
|
2458
|
+
upsertBalance(acc, address, balance.tokenId, balance);
|
|
2459
|
+
});
|
|
2460
|
+
}
|
|
2461
|
+
}
|
|
2462
|
+
return acc;
|
|
2463
|
+
}, {});
|
|
2464
|
+
const balances = Object.values(balancesRaw);
|
|
2361
2465
|
const tokensById = lodashEs.keyBy(tokensWithAddresses.map(([token]) => token), t => t.id);
|
|
2362
2466
|
const dynamicTokens = [];
|
|
2363
2467
|
|
|
@@ -2383,19 +2487,45 @@ const fetchBalances$5 = async ({
|
|
|
2383
2487
|
const meta = {
|
|
2384
2488
|
scaledAlphaPrice: stake?.scaledAlphaPrice.toString() ?? "0"
|
|
2385
2489
|
};
|
|
2490
|
+
const stakeAmount = BigInt(stake?.stake?.toString() ?? "0");
|
|
2491
|
+
const pendingRootClaimAmount = BigInt(stake?.pendingRootClaim?.toString() ?? "0");
|
|
2492
|
+
const hasZeroStake = stakeAmount === 0n;
|
|
2493
|
+
const hasPendingRootClaim = pendingRootClaimAmount > 0n;
|
|
2386
2494
|
const balanceValue = {
|
|
2387
2495
|
type: "free",
|
|
2388
2496
|
label: stake?.netuid === 0 ? "Root Staking" : `Subnet Staking`,
|
|
2389
|
-
amount:
|
|
2497
|
+
amount: stakeAmount.toString(),
|
|
2390
2498
|
meta
|
|
2391
2499
|
};
|
|
2500
|
+
const pendingRootClaimValue = {
|
|
2501
|
+
type: "locked",
|
|
2502
|
+
label: "Pending root claim",
|
|
2503
|
+
amount: pendingRootClaimAmount.toString(),
|
|
2504
|
+
meta
|
|
2505
|
+
};
|
|
2506
|
+
const values = [balanceValue, pendingRootClaimValue];
|
|
2507
|
+
|
|
2508
|
+
// If stake is 0n but there's a pendingRootClaim, add it as an extra amount
|
|
2509
|
+
// with includeInTotal: true so it counts toward the total balance.
|
|
2510
|
+
// This ensures the balance isn't filtered out when stake is 0n.
|
|
2511
|
+
// The total.planck calculation is: free + reserved + extra (with includeInTotal: true)
|
|
2512
|
+
// So by adding pendingRootClaim as extra, it will be included in total.planck.
|
|
2513
|
+
if (hasZeroStake && hasPendingRootClaim) {
|
|
2514
|
+
values.push({
|
|
2515
|
+
type: "extra",
|
|
2516
|
+
label: "Pending root claim",
|
|
2517
|
+
amount: pendingRootClaimAmount.toString(),
|
|
2518
|
+
includeInTotal: true,
|
|
2519
|
+
meta
|
|
2520
|
+
});
|
|
2521
|
+
}
|
|
2392
2522
|
return {
|
|
2393
2523
|
address: def.address,
|
|
2394
2524
|
networkId,
|
|
2395
2525
|
tokenId: def.token.id,
|
|
2396
2526
|
source: MODULE_TYPE$5,
|
|
2397
2527
|
status: "live",
|
|
2398
|
-
values
|
|
2528
|
+
values
|
|
2399
2529
|
};
|
|
2400
2530
|
});
|
|
2401
2531
|
return {
|
|
@@ -2418,6 +2548,149 @@ const fetchBalances$5 = async ({
|
|
|
2418
2548
|
};
|
|
2419
2549
|
}
|
|
2420
2550
|
};
|
|
2551
|
+
const buildStorageCoder = (metadataRpc, pallet, entry) => {
|
|
2552
|
+
const {
|
|
2553
|
+
builder
|
|
2554
|
+
} = scale.parseMetadataRpc(metadataRpc);
|
|
2555
|
+
return builder.buildStorage(pallet, entry);
|
|
2556
|
+
};
|
|
2557
|
+
const buildRootClaimableStorageCoder = async (connector, networkId, metadataRpc) => {
|
|
2558
|
+
let storageCoder = null;
|
|
2559
|
+
if (metadataRpc) {
|
|
2560
|
+
try {
|
|
2561
|
+
storageCoder = buildStorageCoder(metadataRpc, "SubtensorModule", "RootClaimable");
|
|
2562
|
+
} catch (cause) {
|
|
2563
|
+
log.warn(`Failed to build storage coder for SubtensorModule.RootClaimable using provided metadata on ${networkId}`, {
|
|
2564
|
+
cause
|
|
2565
|
+
});
|
|
2566
|
+
}
|
|
2567
|
+
}
|
|
2568
|
+
return storageCoder;
|
|
2569
|
+
};
|
|
2570
|
+
const buildRootClaimedStorageCoder = async (networkId, metadataRpc) => {
|
|
2571
|
+
let storageCoder = null;
|
|
2572
|
+
if (metadataRpc) {
|
|
2573
|
+
try {
|
|
2574
|
+
storageCoder = buildStorageCoder(metadataRpc, "SubtensorModule", "RootClaimed");
|
|
2575
|
+
} catch (cause) {
|
|
2576
|
+
log.warn(`Failed to build storage coder for SubtensorModule.RootClaimed using provided metadata on ${networkId}`, {
|
|
2577
|
+
cause
|
|
2578
|
+
});
|
|
2579
|
+
}
|
|
2580
|
+
}
|
|
2581
|
+
return storageCoder;
|
|
2582
|
+
};
|
|
2583
|
+
const buildRootClaimableQueries = (networkId, hotkeys, storageCoder) => {
|
|
2584
|
+
return hotkeys.map(hotkey => {
|
|
2585
|
+
let stateKey = null;
|
|
2586
|
+
try {
|
|
2587
|
+
stateKey = storageCoder.keys.enc(hotkey);
|
|
2588
|
+
} catch (cause) {
|
|
2589
|
+
log.warn(`Failed to encode storage key for hotkey ${hotkey} on ${networkId}`, {
|
|
2590
|
+
cause
|
|
2591
|
+
});
|
|
2592
|
+
}
|
|
2593
|
+
const decodeResult = changes => {
|
|
2594
|
+
const hexValue = changes[0];
|
|
2595
|
+
if (!hexValue) {
|
|
2596
|
+
return [hotkey, new Map()];
|
|
2597
|
+
}
|
|
2598
|
+
const decoded = scale.decodeScale(storageCoder, hexValue, `Failed to decode RootClaimable for hotkey ${hotkey} on ${networkId}`);
|
|
2599
|
+
return [hotkey, decoded ? new Map(decoded) : new Map()];
|
|
2600
|
+
};
|
|
2601
|
+
return {
|
|
2602
|
+
stateKeys: [stateKey],
|
|
2603
|
+
decodeResult
|
|
2604
|
+
};
|
|
2605
|
+
});
|
|
2606
|
+
};
|
|
2607
|
+
const fetchRootClaimableRates = async (connector, networkId, metadataRpc, hotkeys) => {
|
|
2608
|
+
if (!hotkeys.length) return new Map();
|
|
2609
|
+
const storageCoder = await buildRootClaimableStorageCoder(connector, networkId, metadataRpc);
|
|
2610
|
+
if (!storageCoder) {
|
|
2611
|
+
// Fallback: return empty map for all hotkeys
|
|
2612
|
+
return new Map(hotkeys.map(hotkey => [hotkey, new Map()]));
|
|
2613
|
+
}
|
|
2614
|
+
const queries = buildRootClaimableQueries(networkId, hotkeys, storageCoder);
|
|
2615
|
+
try {
|
|
2616
|
+
const results = await fetchRpcQueryPack(connector, networkId, queries);
|
|
2617
|
+
return new Map(results);
|
|
2618
|
+
} catch (cause) {
|
|
2619
|
+
log.warn(`Failed to fetch RootClaimable for hotkeys on ${networkId}`, {
|
|
2620
|
+
cause
|
|
2621
|
+
});
|
|
2622
|
+
// Fallback: return empty map for all hotkeys
|
|
2623
|
+
return new Map(hotkeys.map(hotkey => [hotkey, new Map()]));
|
|
2624
|
+
}
|
|
2625
|
+
};
|
|
2626
|
+
const buildRootClaimedQueries = (networkId, addressHotkeyNetuidPairs, storageCoder) => {
|
|
2627
|
+
return addressHotkeyNetuidPairs.map(([address, hotkey, netuid]) => {
|
|
2628
|
+
let stateKey = null;
|
|
2629
|
+
try {
|
|
2630
|
+
// RootClaimed storage takes params: [netuid, hotkey, coldkey_ss58]
|
|
2631
|
+
stateKey = storageCoder.keys.enc(netuid, hotkey, address);
|
|
2632
|
+
} catch (cause) {
|
|
2633
|
+
log.warn(`Failed to encode storage key for RootClaimed (netuid=${netuid}, hotkey=${hotkey}, address=${address}) on ${networkId}`, {
|
|
2634
|
+
cause
|
|
2635
|
+
});
|
|
2636
|
+
}
|
|
2637
|
+
const decodeResult = changes => {
|
|
2638
|
+
const hexValue = changes[0];
|
|
2639
|
+
if (!hexValue) {
|
|
2640
|
+
return [address, hotkey, netuid, 0n];
|
|
2641
|
+
}
|
|
2642
|
+
const decoded = scale.decodeScale(storageCoder, hexValue, `Failed to decode RootClaimed for (netuid=${netuid}, hotkey=${hotkey}, address=${address}) on ${networkId}`);
|
|
2643
|
+
return [address, hotkey, netuid, decoded ?? 0n];
|
|
2644
|
+
};
|
|
2645
|
+
return {
|
|
2646
|
+
stateKeys: [stateKey],
|
|
2647
|
+
decodeResult
|
|
2648
|
+
};
|
|
2649
|
+
});
|
|
2650
|
+
};
|
|
2651
|
+
const fetchRootClaimedAmounts = async (connector, networkId, metadataRpc, addressHotkeyNetuidPairs) => {
|
|
2652
|
+
if (!addressHotkeyNetuidPairs.length) {
|
|
2653
|
+
return new Map();
|
|
2654
|
+
}
|
|
2655
|
+
const storageCoder = await buildRootClaimedStorageCoder(networkId, metadataRpc);
|
|
2656
|
+
if (!storageCoder) {
|
|
2657
|
+
// Fallback: return empty map for all pairs
|
|
2658
|
+
const result = new Map();
|
|
2659
|
+
for (const [address, hotkey, netuid] of addressHotkeyNetuidPairs) {
|
|
2660
|
+
if (!result.has(address)) result.set(address, new Map());
|
|
2661
|
+
const addressMap = result.get(address);
|
|
2662
|
+
if (!addressMap.has(hotkey)) addressMap.set(hotkey, new Map());
|
|
2663
|
+
addressMap.get(hotkey).set(netuid, 0n);
|
|
2664
|
+
}
|
|
2665
|
+
return result;
|
|
2666
|
+
}
|
|
2667
|
+
const queries = buildRootClaimedQueries(networkId, addressHotkeyNetuidPairs, storageCoder);
|
|
2668
|
+
try {
|
|
2669
|
+
const results = await fetchRpcQueryPack(connector, networkId, queries);
|
|
2670
|
+
// Build a nested map: address -> hotkey -> netuid -> claimed amount
|
|
2671
|
+
const result = new Map();
|
|
2672
|
+
for (const [address, hotkey, netuid, claimed] of results) {
|
|
2673
|
+
if (!result.has(address)) result.set(address, new Map());
|
|
2674
|
+
const addressMap = result.get(address);
|
|
2675
|
+
if (!addressMap.has(hotkey)) addressMap.set(hotkey, new Map());
|
|
2676
|
+
addressMap.get(hotkey).set(netuid, claimed);
|
|
2677
|
+
}
|
|
2678
|
+
return result;
|
|
2679
|
+
} catch (cause) {
|
|
2680
|
+
log.warn(`Failed to fetch RootClaimed for address-hotkey-netuid pairs on ${networkId}`, {
|
|
2681
|
+
cause
|
|
2682
|
+
});
|
|
2683
|
+
// Fallback: return empty map for all pairs
|
|
2684
|
+
const result = new Map();
|
|
2685
|
+
for (const [address, hotkey, netuid] of addressHotkeyNetuidPairs) {
|
|
2686
|
+
if (!result.has(address)) result.set(address, new Map());
|
|
2687
|
+
const addressMap = result.get(address);
|
|
2688
|
+
if (!addressMap.has(hotkey)) addressMap.set(hotkey, new Map());
|
|
2689
|
+
addressMap.get(hotkey).set(netuid, 0n);
|
|
2690
|
+
}
|
|
2691
|
+
return result;
|
|
2692
|
+
}
|
|
2693
|
+
};
|
|
2421
2694
|
|
|
2422
2695
|
// hardcoded because we dont have access to native tokens from the balance module
|
|
2423
2696
|
const NATIVE_TOKEN_SYMBOLS = {
|
|
@@ -2517,7 +2790,7 @@ const getData$4 = metadataRpc => {
|
|
|
2517
2790
|
if (!isBittensor) return null;
|
|
2518
2791
|
scale.compactMetadata(metadata, [{
|
|
2519
2792
|
pallet: "SubtensorModule",
|
|
2520
|
-
items: ["TransferToggle"]
|
|
2793
|
+
items: ["TransferToggle", "RootClaimable", "RootClaimed"]
|
|
2521
2794
|
}], [{
|
|
2522
2795
|
runtimeApi: "StakeInfoRuntimeApi",
|
|
2523
2796
|
methods: ["get_stake_info_for_coldkeys"]
|
|
@@ -2286,6 +2286,47 @@ const taoToAlpha = (tao, scaledAlphaPrice) => {
|
|
|
2286
2286
|
return tao * ALPHA_PRICE_SCALE / scaledAlphaPrice;
|
|
2287
2287
|
};
|
|
2288
2288
|
|
|
2289
|
+
const calculatePendingRootClaimable = ({
|
|
2290
|
+
stake,
|
|
2291
|
+
hotkey,
|
|
2292
|
+
address,
|
|
2293
|
+
networkId,
|
|
2294
|
+
validatorRootClaimableRate,
|
|
2295
|
+
dynamicInfoByNetuid,
|
|
2296
|
+
alreadyClaimedByNetuid
|
|
2297
|
+
}) => {
|
|
2298
|
+
const pendingRootClaimBalances = [];
|
|
2299
|
+
for (const [netuid, claimableRate] of validatorRootClaimableRate) {
|
|
2300
|
+
if (claimableRate === 0n) {
|
|
2301
|
+
continue;
|
|
2302
|
+
}
|
|
2303
|
+
const dynamicInfo = dynamicInfoByNetuid[netuid];
|
|
2304
|
+
const scaledAlphaPrice = dynamicInfo ? getScaledAlphaPrice(dynamicInfo.alpha_in, dynamicInfo.tao_in) : 0n;
|
|
2305
|
+
// Calculate claimable = claimable_rate * root_stake
|
|
2306
|
+
// Note: claimableRate is a I96F32, a fixed-point number format
|
|
2307
|
+
|
|
2308
|
+
// Multiply claimable_rate by root_stake
|
|
2309
|
+
// I96F32 multiplication: round((a * b) / 2^32)
|
|
2310
|
+
const totalClaimable = stake * claimableRate + (1n << 31n) >> 32n;
|
|
2311
|
+
|
|
2312
|
+
// Subtract already claimed amount to get net pending claimable
|
|
2313
|
+
const alreadyClaimed = alreadyClaimedByNetuid.get(netuid) ?? 0n;
|
|
2314
|
+
const pendingRootClaim = totalClaimable > alreadyClaimed ? totalClaimable - alreadyClaimed : 0n;
|
|
2315
|
+
pendingRootClaimBalances.push({
|
|
2316
|
+
address,
|
|
2317
|
+
tokenId: subDTaoTokenId(networkId, netuid, hotkey),
|
|
2318
|
+
baseTokenId: subDTaoTokenId(networkId, netuid),
|
|
2319
|
+
hotkey: hotkey,
|
|
2320
|
+
netuid: netuid,
|
|
2321
|
+
scaledAlphaPrice,
|
|
2322
|
+
pendingRootClaim,
|
|
2323
|
+
stake: 0n
|
|
2324
|
+
});
|
|
2325
|
+
}
|
|
2326
|
+
return pendingRootClaimBalances;
|
|
2327
|
+
};
|
|
2328
|
+
|
|
2329
|
+
const ROOT_NETUID = 0;
|
|
2289
2330
|
const fetchBalances$5 = async ({
|
|
2290
2331
|
networkId,
|
|
2291
2332
|
tokensWithAddresses,
|
|
@@ -2335,20 +2376,83 @@ const fetchBalances$5 = async ({
|
|
|
2335
2376
|
const addresses = uniq(balanceDefs.map(def => def.address));
|
|
2336
2377
|
try {
|
|
2337
2378
|
const [stakeInfos, dynamicInfos] = await Promise.all([fetchRuntimeCallResult(connector, networkId, miniMetadata.data, "StakeInfoRuntimeApi", "get_stake_info_for_coldkeys", [addresses]), fetchRuntimeCallResult(connector, networkId, miniMetadata.data, "SubnetInfoRuntimeApi", "get_all_dynamic_info", [])]);
|
|
2379
|
+
const rootHotkeys = uniq(stakeInfos.flatMap(([, stakes]) => stakes.filter(stake => stake.netuid === ROOT_NETUID).map(stake => stake.hotkey)));
|
|
2380
|
+
const rootClaimableRatesByHotkey = rootHotkeys.length && miniMetadata.data ? await fetchRootClaimableRates(connector, networkId, miniMetadata.data, rootHotkeys) : new Map();
|
|
2381
|
+
|
|
2382
|
+
// Collect all (address, hotkey, netuid) pairs for root stakes to fetch RootClaimed amounts
|
|
2383
|
+
const addressHotkeyNetuidPairs = [];
|
|
2384
|
+
for (const [address, stakes] of stakeInfos) {
|
|
2385
|
+
for (const stake of stakes) {
|
|
2386
|
+
if (stake.netuid === ROOT_NETUID) {
|
|
2387
|
+
const claimableRates = rootClaimableRatesByHotkey.get(stake.hotkey);
|
|
2388
|
+
if (claimableRates) {
|
|
2389
|
+
// For each netuid that has a claimable rate, we need to check RootClaimed
|
|
2390
|
+
for (const netuid of claimableRates.keys()) {
|
|
2391
|
+
addressHotkeyNetuidPairs.push([address, stake.hotkey, netuid]);
|
|
2392
|
+
}
|
|
2393
|
+
}
|
|
2394
|
+
}
|
|
2395
|
+
}
|
|
2396
|
+
}
|
|
2397
|
+
const rootClaimedAmounts = addressHotkeyNetuidPairs.length && miniMetadata.data ? await fetchRootClaimedAmounts(connector, networkId, miniMetadata.data, addressHotkeyNetuidPairs) : new Map();
|
|
2338
2398
|
const dynamicInfoByNetuid = keyBy(dynamicInfos.filter(isNotNil), info => info.netuid);
|
|
2339
|
-
|
|
2340
|
-
|
|
2341
|
-
|
|
2342
|
-
|
|
2343
|
-
|
|
2344
|
-
|
|
2345
|
-
|
|
2346
|
-
|
|
2347
|
-
|
|
2348
|
-
|
|
2349
|
-
|
|
2350
|
-
|
|
2351
|
-
|
|
2399
|
+
|
|
2400
|
+
// Upserts a balance into the accumulator, merging stake values if the balance already exists.
|
|
2401
|
+
// Eg: Acc X has root staked with validator Y, but also staked on sn 45 with the same validator Y.
|
|
2402
|
+
// We merge the pending root claim of sn 45 and the sn 45 stake in the same balance.
|
|
2403
|
+
const upsertBalance = (acc, address, tokenId, balance) => {
|
|
2404
|
+
const key = `${address}:${tokenId}`;
|
|
2405
|
+
const recordedBalance = acc[key];
|
|
2406
|
+
if (recordedBalance) {
|
|
2407
|
+
acc[key] = {
|
|
2408
|
+
...recordedBalance,
|
|
2409
|
+
stake: recordedBalance.stake + balance.stake,
|
|
2410
|
+
// If the new balance has pendingRootClaim, use it (it's calculated from current state)
|
|
2411
|
+
...(balance.pendingRootClaim !== undefined && {
|
|
2412
|
+
pendingRootClaim: balance.pendingRootClaim
|
|
2413
|
+
})
|
|
2414
|
+
};
|
|
2415
|
+
} else {
|
|
2416
|
+
acc[key] = balance;
|
|
2417
|
+
}
|
|
2418
|
+
};
|
|
2419
|
+
const balancesRaw = stakeInfos.reduce((acc, [address, stakes]) => {
|
|
2420
|
+
for (const stake of stakes) {
|
|
2421
|
+
// Regular stake cases
|
|
2422
|
+
const dynamicInfo = dynamicInfoByNetuid[stake.netuid];
|
|
2423
|
+
const scaledAlphaPrice = dynamicInfo ? getScaledAlphaPrice(dynamicInfo.alpha_in, dynamicInfo.tao_in) : 0n;
|
|
2424
|
+
const balance = {
|
|
2425
|
+
address,
|
|
2426
|
+
tokenId: subDTaoTokenId(networkId, stake.netuid, stake.hotkey),
|
|
2427
|
+
baseTokenId: subDTaoTokenId(networkId, stake.netuid),
|
|
2428
|
+
stake: stake.stake,
|
|
2429
|
+
hotkey: stake.hotkey,
|
|
2430
|
+
netuid: stake.netuid,
|
|
2431
|
+
scaledAlphaPrice
|
|
2432
|
+
};
|
|
2433
|
+
upsertBalance(acc, address, balance.tokenId, balance);
|
|
2434
|
+
|
|
2435
|
+
// Root stake cases, we need to calculate the pending root claim and add to the balances
|
|
2436
|
+
if (stake.netuid === ROOT_NETUID) {
|
|
2437
|
+
const claimableRates = rootClaimableRatesByHotkey.get(stake.hotkey) ?? new Map();
|
|
2438
|
+
const alreadyClaimedMap = rootClaimedAmounts.get(address)?.get(stake.hotkey) ?? new Map();
|
|
2439
|
+
const pendingRootClaimBalances = calculatePendingRootClaimable({
|
|
2440
|
+
stake: stake.stake,
|
|
2441
|
+
hotkey: stake.hotkey,
|
|
2442
|
+
address,
|
|
2443
|
+
networkId,
|
|
2444
|
+
validatorRootClaimableRate: claimableRates,
|
|
2445
|
+
dynamicInfoByNetuid,
|
|
2446
|
+
alreadyClaimedByNetuid: alreadyClaimedMap
|
|
2447
|
+
});
|
|
2448
|
+
pendingRootClaimBalances.forEach(balance => {
|
|
2449
|
+
upsertBalance(acc, address, balance.tokenId, balance);
|
|
2450
|
+
});
|
|
2451
|
+
}
|
|
2452
|
+
}
|
|
2453
|
+
return acc;
|
|
2454
|
+
}, {});
|
|
2455
|
+
const balances = Object.values(balancesRaw);
|
|
2352
2456
|
const tokensById = keyBy(tokensWithAddresses.map(([token]) => token), t => t.id);
|
|
2353
2457
|
const dynamicTokens = [];
|
|
2354
2458
|
|
|
@@ -2374,19 +2478,45 @@ const fetchBalances$5 = async ({
|
|
|
2374
2478
|
const meta = {
|
|
2375
2479
|
scaledAlphaPrice: stake?.scaledAlphaPrice.toString() ?? "0"
|
|
2376
2480
|
};
|
|
2481
|
+
const stakeAmount = BigInt(stake?.stake?.toString() ?? "0");
|
|
2482
|
+
const pendingRootClaimAmount = BigInt(stake?.pendingRootClaim?.toString() ?? "0");
|
|
2483
|
+
const hasZeroStake = stakeAmount === 0n;
|
|
2484
|
+
const hasPendingRootClaim = pendingRootClaimAmount > 0n;
|
|
2377
2485
|
const balanceValue = {
|
|
2378
2486
|
type: "free",
|
|
2379
2487
|
label: stake?.netuid === 0 ? "Root Staking" : `Subnet Staking`,
|
|
2380
|
-
amount:
|
|
2488
|
+
amount: stakeAmount.toString(),
|
|
2381
2489
|
meta
|
|
2382
2490
|
};
|
|
2491
|
+
const pendingRootClaimValue = {
|
|
2492
|
+
type: "locked",
|
|
2493
|
+
label: "Pending root claim",
|
|
2494
|
+
amount: pendingRootClaimAmount.toString(),
|
|
2495
|
+
meta
|
|
2496
|
+
};
|
|
2497
|
+
const values = [balanceValue, pendingRootClaimValue];
|
|
2498
|
+
|
|
2499
|
+
// If stake is 0n but there's a pendingRootClaim, add it as an extra amount
|
|
2500
|
+
// with includeInTotal: true so it counts toward the total balance.
|
|
2501
|
+
// This ensures the balance isn't filtered out when stake is 0n.
|
|
2502
|
+
// The total.planck calculation is: free + reserved + extra (with includeInTotal: true)
|
|
2503
|
+
// So by adding pendingRootClaim as extra, it will be included in total.planck.
|
|
2504
|
+
if (hasZeroStake && hasPendingRootClaim) {
|
|
2505
|
+
values.push({
|
|
2506
|
+
type: "extra",
|
|
2507
|
+
label: "Pending root claim",
|
|
2508
|
+
amount: pendingRootClaimAmount.toString(),
|
|
2509
|
+
includeInTotal: true,
|
|
2510
|
+
meta
|
|
2511
|
+
});
|
|
2512
|
+
}
|
|
2383
2513
|
return {
|
|
2384
2514
|
address: def.address,
|
|
2385
2515
|
networkId,
|
|
2386
2516
|
tokenId: def.token.id,
|
|
2387
2517
|
source: MODULE_TYPE$5,
|
|
2388
2518
|
status: "live",
|
|
2389
|
-
values
|
|
2519
|
+
values
|
|
2390
2520
|
};
|
|
2391
2521
|
});
|
|
2392
2522
|
return {
|
|
@@ -2409,6 +2539,149 @@ const fetchBalances$5 = async ({
|
|
|
2409
2539
|
};
|
|
2410
2540
|
}
|
|
2411
2541
|
};
|
|
2542
|
+
const buildStorageCoder = (metadataRpc, pallet, entry) => {
|
|
2543
|
+
const {
|
|
2544
|
+
builder
|
|
2545
|
+
} = parseMetadataRpc(metadataRpc);
|
|
2546
|
+
return builder.buildStorage(pallet, entry);
|
|
2547
|
+
};
|
|
2548
|
+
const buildRootClaimableStorageCoder = async (connector, networkId, metadataRpc) => {
|
|
2549
|
+
let storageCoder = null;
|
|
2550
|
+
if (metadataRpc) {
|
|
2551
|
+
try {
|
|
2552
|
+
storageCoder = buildStorageCoder(metadataRpc, "SubtensorModule", "RootClaimable");
|
|
2553
|
+
} catch (cause) {
|
|
2554
|
+
log.warn(`Failed to build storage coder for SubtensorModule.RootClaimable using provided metadata on ${networkId}`, {
|
|
2555
|
+
cause
|
|
2556
|
+
});
|
|
2557
|
+
}
|
|
2558
|
+
}
|
|
2559
|
+
return storageCoder;
|
|
2560
|
+
};
|
|
2561
|
+
const buildRootClaimedStorageCoder = async (networkId, metadataRpc) => {
|
|
2562
|
+
let storageCoder = null;
|
|
2563
|
+
if (metadataRpc) {
|
|
2564
|
+
try {
|
|
2565
|
+
storageCoder = buildStorageCoder(metadataRpc, "SubtensorModule", "RootClaimed");
|
|
2566
|
+
} catch (cause) {
|
|
2567
|
+
log.warn(`Failed to build storage coder for SubtensorModule.RootClaimed using provided metadata on ${networkId}`, {
|
|
2568
|
+
cause
|
|
2569
|
+
});
|
|
2570
|
+
}
|
|
2571
|
+
}
|
|
2572
|
+
return storageCoder;
|
|
2573
|
+
};
|
|
2574
|
+
const buildRootClaimableQueries = (networkId, hotkeys, storageCoder) => {
|
|
2575
|
+
return hotkeys.map(hotkey => {
|
|
2576
|
+
let stateKey = null;
|
|
2577
|
+
try {
|
|
2578
|
+
stateKey = storageCoder.keys.enc(hotkey);
|
|
2579
|
+
} catch (cause) {
|
|
2580
|
+
log.warn(`Failed to encode storage key for hotkey ${hotkey} on ${networkId}`, {
|
|
2581
|
+
cause
|
|
2582
|
+
});
|
|
2583
|
+
}
|
|
2584
|
+
const decodeResult = changes => {
|
|
2585
|
+
const hexValue = changes[0];
|
|
2586
|
+
if (!hexValue) {
|
|
2587
|
+
return [hotkey, new Map()];
|
|
2588
|
+
}
|
|
2589
|
+
const decoded = decodeScale(storageCoder, hexValue, `Failed to decode RootClaimable for hotkey ${hotkey} on ${networkId}`);
|
|
2590
|
+
return [hotkey, decoded ? new Map(decoded) : new Map()];
|
|
2591
|
+
};
|
|
2592
|
+
return {
|
|
2593
|
+
stateKeys: [stateKey],
|
|
2594
|
+
decodeResult
|
|
2595
|
+
};
|
|
2596
|
+
});
|
|
2597
|
+
};
|
|
2598
|
+
const fetchRootClaimableRates = async (connector, networkId, metadataRpc, hotkeys) => {
|
|
2599
|
+
if (!hotkeys.length) return new Map();
|
|
2600
|
+
const storageCoder = await buildRootClaimableStorageCoder(connector, networkId, metadataRpc);
|
|
2601
|
+
if (!storageCoder) {
|
|
2602
|
+
// Fallback: return empty map for all hotkeys
|
|
2603
|
+
return new Map(hotkeys.map(hotkey => [hotkey, new Map()]));
|
|
2604
|
+
}
|
|
2605
|
+
const queries = buildRootClaimableQueries(networkId, hotkeys, storageCoder);
|
|
2606
|
+
try {
|
|
2607
|
+
const results = await fetchRpcQueryPack(connector, networkId, queries);
|
|
2608
|
+
return new Map(results);
|
|
2609
|
+
} catch (cause) {
|
|
2610
|
+
log.warn(`Failed to fetch RootClaimable for hotkeys on ${networkId}`, {
|
|
2611
|
+
cause
|
|
2612
|
+
});
|
|
2613
|
+
// Fallback: return empty map for all hotkeys
|
|
2614
|
+
return new Map(hotkeys.map(hotkey => [hotkey, new Map()]));
|
|
2615
|
+
}
|
|
2616
|
+
};
|
|
2617
|
+
const buildRootClaimedQueries = (networkId, addressHotkeyNetuidPairs, storageCoder) => {
|
|
2618
|
+
return addressHotkeyNetuidPairs.map(([address, hotkey, netuid]) => {
|
|
2619
|
+
let stateKey = null;
|
|
2620
|
+
try {
|
|
2621
|
+
// RootClaimed storage takes params: [netuid, hotkey, coldkey_ss58]
|
|
2622
|
+
stateKey = storageCoder.keys.enc(netuid, hotkey, address);
|
|
2623
|
+
} catch (cause) {
|
|
2624
|
+
log.warn(`Failed to encode storage key for RootClaimed (netuid=${netuid}, hotkey=${hotkey}, address=${address}) on ${networkId}`, {
|
|
2625
|
+
cause
|
|
2626
|
+
});
|
|
2627
|
+
}
|
|
2628
|
+
const decodeResult = changes => {
|
|
2629
|
+
const hexValue = changes[0];
|
|
2630
|
+
if (!hexValue) {
|
|
2631
|
+
return [address, hotkey, netuid, 0n];
|
|
2632
|
+
}
|
|
2633
|
+
const decoded = decodeScale(storageCoder, hexValue, `Failed to decode RootClaimed for (netuid=${netuid}, hotkey=${hotkey}, address=${address}) on ${networkId}`);
|
|
2634
|
+
return [address, hotkey, netuid, decoded ?? 0n];
|
|
2635
|
+
};
|
|
2636
|
+
return {
|
|
2637
|
+
stateKeys: [stateKey],
|
|
2638
|
+
decodeResult
|
|
2639
|
+
};
|
|
2640
|
+
});
|
|
2641
|
+
};
|
|
2642
|
+
const fetchRootClaimedAmounts = async (connector, networkId, metadataRpc, addressHotkeyNetuidPairs) => {
|
|
2643
|
+
if (!addressHotkeyNetuidPairs.length) {
|
|
2644
|
+
return new Map();
|
|
2645
|
+
}
|
|
2646
|
+
const storageCoder = await buildRootClaimedStorageCoder(networkId, metadataRpc);
|
|
2647
|
+
if (!storageCoder) {
|
|
2648
|
+
// Fallback: return empty map for all pairs
|
|
2649
|
+
const result = new Map();
|
|
2650
|
+
for (const [address, hotkey, netuid] of addressHotkeyNetuidPairs) {
|
|
2651
|
+
if (!result.has(address)) result.set(address, new Map());
|
|
2652
|
+
const addressMap = result.get(address);
|
|
2653
|
+
if (!addressMap.has(hotkey)) addressMap.set(hotkey, new Map());
|
|
2654
|
+
addressMap.get(hotkey).set(netuid, 0n);
|
|
2655
|
+
}
|
|
2656
|
+
return result;
|
|
2657
|
+
}
|
|
2658
|
+
const queries = buildRootClaimedQueries(networkId, addressHotkeyNetuidPairs, storageCoder);
|
|
2659
|
+
try {
|
|
2660
|
+
const results = await fetchRpcQueryPack(connector, networkId, queries);
|
|
2661
|
+
// Build a nested map: address -> hotkey -> netuid -> claimed amount
|
|
2662
|
+
const result = new Map();
|
|
2663
|
+
for (const [address, hotkey, netuid, claimed] of results) {
|
|
2664
|
+
if (!result.has(address)) result.set(address, new Map());
|
|
2665
|
+
const addressMap = result.get(address);
|
|
2666
|
+
if (!addressMap.has(hotkey)) addressMap.set(hotkey, new Map());
|
|
2667
|
+
addressMap.get(hotkey).set(netuid, claimed);
|
|
2668
|
+
}
|
|
2669
|
+
return result;
|
|
2670
|
+
} catch (cause) {
|
|
2671
|
+
log.warn(`Failed to fetch RootClaimed for address-hotkey-netuid pairs on ${networkId}`, {
|
|
2672
|
+
cause
|
|
2673
|
+
});
|
|
2674
|
+
// Fallback: return empty map for all pairs
|
|
2675
|
+
const result = new Map();
|
|
2676
|
+
for (const [address, hotkey, netuid] of addressHotkeyNetuidPairs) {
|
|
2677
|
+
if (!result.has(address)) result.set(address, new Map());
|
|
2678
|
+
const addressMap = result.get(address);
|
|
2679
|
+
if (!addressMap.has(hotkey)) addressMap.set(hotkey, new Map());
|
|
2680
|
+
addressMap.get(hotkey).set(netuid, 0n);
|
|
2681
|
+
}
|
|
2682
|
+
return result;
|
|
2683
|
+
}
|
|
2684
|
+
};
|
|
2412
2685
|
|
|
2413
2686
|
// hardcoded because we dont have access to native tokens from the balance module
|
|
2414
2687
|
const NATIVE_TOKEN_SYMBOLS = {
|
|
@@ -2508,7 +2781,7 @@ const getData$4 = metadataRpc => {
|
|
|
2508
2781
|
if (!isBittensor) return null;
|
|
2509
2782
|
compactMetadata(metadata, [{
|
|
2510
2783
|
pallet: "SubtensorModule",
|
|
2511
|
-
items: ["TransferToggle"]
|
|
2784
|
+
items: ["TransferToggle", "RootClaimable", "RootClaimed"]
|
|
2512
2785
|
}], [{
|
|
2513
2786
|
runtimeApi: "StakeInfoRuntimeApi",
|
|
2514
2787
|
methods: ["get_stake_info_for_coldkeys"]
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@talismn/balances",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.3.1",
|
|
4
4
|
"author": "Talisman",
|
|
5
5
|
"homepage": "https://talisman.xyz",
|
|
6
6
|
"license": "GPL-3.0-or-later",
|
|
@@ -38,14 +38,14 @@
|
|
|
38
38
|
"scale-ts": "^1.6.1",
|
|
39
39
|
"viem": "^2.27.3",
|
|
40
40
|
"zod": "^3.25.76",
|
|
41
|
+
"@talismn/chaindata-provider": "1.3.4",
|
|
41
42
|
"@talismn/crypto": "0.3.0",
|
|
43
|
+
"@talismn/chain-connectors": "0.0.13",
|
|
42
44
|
"@talismn/sapi": "0.1.0",
|
|
43
45
|
"@talismn/scale": "0.3.0",
|
|
44
46
|
"@talismn/solana": "0.0.5",
|
|
45
|
-
"@talismn/token-rates": "3.0.
|
|
46
|
-
"@talismn/util": "0.5.6"
|
|
47
|
-
"@talismn/chaindata-provider": "1.3.2",
|
|
48
|
-
"@talismn/chain-connectors": "0.0.11"
|
|
47
|
+
"@talismn/token-rates": "3.0.15",
|
|
48
|
+
"@talismn/util": "0.5.6"
|
|
49
49
|
},
|
|
50
50
|
"devDependencies": {
|
|
51
51
|
"@polkadot/api-contract": "16.1.2",
|