csm-dashboard 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- csm_dashboard-0.2.0.dist-info/METADATA +354 -0
- csm_dashboard-0.2.0.dist-info/RECORD +27 -0
- csm_dashboard-0.2.0.dist-info/WHEEL +4 -0
- csm_dashboard-0.2.0.dist-info/entry_points.txt +2 -0
- src/__init__.py +1 -0
- src/cli/__init__.py +1 -0
- src/cli/commands.py +624 -0
- src/core/__init__.py +1 -0
- src/core/config.py +42 -0
- src/core/contracts.py +19 -0
- src/core/types.py +153 -0
- src/data/__init__.py +1 -0
- src/data/beacon.py +370 -0
- src/data/cache.py +67 -0
- src/data/etherscan.py +78 -0
- src/data/ipfs_logs.py +267 -0
- src/data/known_cids.py +30 -0
- src/data/lido_api.py +35 -0
- src/data/onchain.py +258 -0
- src/data/rewards_tree.py +58 -0
- src/data/strikes.py +214 -0
- src/main.py +39 -0
- src/services/__init__.py +1 -0
- src/services/operator_service.py +320 -0
- src/web/__init__.py +1 -0
- src/web/app.py +576 -0
- src/web/routes.py +161 -0
src/data/strikes.py
ADDED
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
"""Fetch and parse the strikes merkle tree from IPFS via CSStrikes contract."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import json
|
|
5
|
+
import time
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
import httpx
|
|
10
|
+
from web3 import Web3
|
|
11
|
+
|
|
12
|
+
from ..core.config import get_settings
|
|
13
|
+
from .cache import cached
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class ValidatorStrikes:
|
|
18
|
+
"""Strike information for a single validator."""
|
|
19
|
+
|
|
20
|
+
pubkey: str
|
|
21
|
+
strikes: list[int] # Array of 6 values (0 or 1) representing strikes per frame
|
|
22
|
+
strike_count: int # Total strikes in the 6-frame window
|
|
23
|
+
at_ejection_risk: bool # True if 3+ strikes (eligible for ejection)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class StrikesProvider:
|
|
27
|
+
"""Fetches strikes data from CSStrikes contract via IPFS."""
|
|
28
|
+
|
|
29
|
+
# IPFS gateways to try in order (same as ipfs_logs.py)
|
|
30
|
+
GATEWAYS = [
|
|
31
|
+
"https://dweb.link/ipfs/",
|
|
32
|
+
"https://ipfs.io/ipfs/",
|
|
33
|
+
"https://cloudflare-ipfs.com/ipfs/",
|
|
34
|
+
]
|
|
35
|
+
|
|
36
|
+
# Rate limiting: minimum seconds between gateway requests
|
|
37
|
+
MIN_REQUEST_INTERVAL = 1.0
|
|
38
|
+
|
|
39
|
+
# CSStrikes contract ABI (only treeCid function needed)
|
|
40
|
+
CSSTRIKES_ABI = [
|
|
41
|
+
{
|
|
42
|
+
"inputs": [],
|
|
43
|
+
"name": "treeCid",
|
|
44
|
+
"outputs": [{"internalType": "string", "name": "", "type": "string"}],
|
|
45
|
+
"stateMutability": "view",
|
|
46
|
+
"type": "function",
|
|
47
|
+
}
|
|
48
|
+
]
|
|
49
|
+
|
|
50
|
+
def __init__(self, rpc_url: str | None = None, cache_dir: Path | None = None):
|
|
51
|
+
self.settings = get_settings()
|
|
52
|
+
self.w3 = Web3(Web3.HTTPProvider(rpc_url or self.settings.eth_rpc_url))
|
|
53
|
+
self.cache_dir = cache_dir or Path.home() / ".cache" / "csm-dashboard" / "strikes"
|
|
54
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
55
|
+
self._last_request_time = 0.0
|
|
56
|
+
self._rate_limit_lock = asyncio.Lock()
|
|
57
|
+
|
|
58
|
+
# Initialize CSStrikes contract
|
|
59
|
+
self.csstrikes = self.w3.eth.contract(
|
|
60
|
+
address=Web3.to_checksum_address(self.settings.csstrikes_address),
|
|
61
|
+
abi=self.CSSTRIKES_ABI,
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
def _get_cache_path(self, cid: str) -> Path:
|
|
65
|
+
"""Get the cache file path for a CID."""
|
|
66
|
+
return self.cache_dir / f"{cid}.json"
|
|
67
|
+
|
|
68
|
+
def _load_from_cache(self, cid: str) -> dict | None:
|
|
69
|
+
"""Load tree data from local cache if available."""
|
|
70
|
+
cache_path = self._get_cache_path(cid)
|
|
71
|
+
if cache_path.exists():
|
|
72
|
+
try:
|
|
73
|
+
with open(cache_path) as f:
|
|
74
|
+
return json.load(f)
|
|
75
|
+
except (json.JSONDecodeError, OSError):
|
|
76
|
+
cache_path.unlink(missing_ok=True)
|
|
77
|
+
return None
|
|
78
|
+
|
|
79
|
+
def _save_to_cache(self, cid: str, data: dict) -> None:
|
|
80
|
+
"""Save tree data to local cache."""
|
|
81
|
+
cache_path = self._get_cache_path(cid)
|
|
82
|
+
try:
|
|
83
|
+
with open(cache_path, "w") as f:
|
|
84
|
+
json.dump(data, f)
|
|
85
|
+
except OSError:
|
|
86
|
+
pass
|
|
87
|
+
|
|
88
|
+
async def _rate_limit(self) -> None:
|
|
89
|
+
"""Ensure minimum interval between IPFS gateway requests."""
|
|
90
|
+
async with self._rate_limit_lock:
|
|
91
|
+
now = time.time()
|
|
92
|
+
elapsed = now - self._last_request_time
|
|
93
|
+
if elapsed < self.MIN_REQUEST_INTERVAL:
|
|
94
|
+
await asyncio.sleep(self.MIN_REQUEST_INTERVAL - elapsed)
|
|
95
|
+
self._last_request_time = time.time()
|
|
96
|
+
|
|
97
|
+
@cached(ttl=300) # Cache CID for 5 minutes
|
|
98
|
+
async def get_tree_cid(self) -> str:
|
|
99
|
+
"""Get the current strikes tree CID from the contract."""
|
|
100
|
+
return self.csstrikes.functions.treeCid().call()
|
|
101
|
+
|
|
102
|
+
async def _fetch_tree_from_ipfs(self, cid: str) -> dict | None:
|
|
103
|
+
"""Fetch tree data from IPFS gateways."""
|
|
104
|
+
# Check cache first
|
|
105
|
+
cached_data = self._load_from_cache(cid)
|
|
106
|
+
if cached_data is not None:
|
|
107
|
+
return cached_data
|
|
108
|
+
|
|
109
|
+
# Rate limit gateway requests
|
|
110
|
+
await self._rate_limit()
|
|
111
|
+
|
|
112
|
+
# Try each gateway
|
|
113
|
+
async with httpx.AsyncClient(timeout=30.0, follow_redirects=True) as client:
|
|
114
|
+
for gateway in self.GATEWAYS:
|
|
115
|
+
try:
|
|
116
|
+
url = f"{gateway}{cid}"
|
|
117
|
+
response = await client.get(url)
|
|
118
|
+
if response.status_code == 200:
|
|
119
|
+
data = response.json()
|
|
120
|
+
# Cache the successful result
|
|
121
|
+
self._save_to_cache(cid, data)
|
|
122
|
+
return data
|
|
123
|
+
except Exception:
|
|
124
|
+
continue
|
|
125
|
+
|
|
126
|
+
return None
|
|
127
|
+
|
|
128
|
+
@cached(ttl=300) # Cache parsed tree for 5 minutes
|
|
129
|
+
async def fetch_strikes_tree(self) -> dict | None:
|
|
130
|
+
"""
|
|
131
|
+
Fetch and return the full strikes tree.
|
|
132
|
+
|
|
133
|
+
Returns dict with:
|
|
134
|
+
- format: "standard-v1"
|
|
135
|
+
- leafEncoding: ["uint256", "bytes", "uint256[]"]
|
|
136
|
+
- tree: list of merkle tree nodes
|
|
137
|
+
- values: list of {treeIndex, value: [operatorId, pubkey, strikesArray]}
|
|
138
|
+
"""
|
|
139
|
+
cid = await self.get_tree_cid()
|
|
140
|
+
if not cid:
|
|
141
|
+
return None
|
|
142
|
+
return await self._fetch_tree_from_ipfs(cid)
|
|
143
|
+
|
|
144
|
+
async def get_operator_strikes(self, operator_id: int) -> list[ValidatorStrikes]:
|
|
145
|
+
"""
|
|
146
|
+
Get strikes for all validators belonging to an operator.
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
operator_id: The CSM operator ID
|
|
150
|
+
|
|
151
|
+
Returns:
|
|
152
|
+
List of ValidatorStrikes for validators with any strikes.
|
|
153
|
+
Validators with 0 strikes are not included in the tree.
|
|
154
|
+
"""
|
|
155
|
+
tree_data = await self.fetch_strikes_tree()
|
|
156
|
+
if not tree_data:
|
|
157
|
+
return []
|
|
158
|
+
|
|
159
|
+
values = tree_data.get("values", [])
|
|
160
|
+
operator_strikes = []
|
|
161
|
+
|
|
162
|
+
for entry in values:
|
|
163
|
+
value = entry.get("value", [])
|
|
164
|
+
if len(value) < 3:
|
|
165
|
+
continue
|
|
166
|
+
|
|
167
|
+
entry_operator_id = value[0]
|
|
168
|
+
pubkey = value[1]
|
|
169
|
+
strikes_array = value[2]
|
|
170
|
+
|
|
171
|
+
if entry_operator_id != operator_id:
|
|
172
|
+
continue
|
|
173
|
+
|
|
174
|
+
# Count total strikes (sum of the 6-frame array)
|
|
175
|
+
strike_count = sum(strikes_array) if isinstance(strikes_array, list) else 0
|
|
176
|
+
|
|
177
|
+
operator_strikes.append(
|
|
178
|
+
ValidatorStrikes(
|
|
179
|
+
pubkey=pubkey,
|
|
180
|
+
strikes=strikes_array if isinstance(strikes_array, list) else [],
|
|
181
|
+
strike_count=strike_count,
|
|
182
|
+
at_ejection_risk=strike_count >= 3,
|
|
183
|
+
)
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
return operator_strikes
|
|
187
|
+
|
|
188
|
+
async def get_operator_strike_summary(
|
|
189
|
+
self, operator_id: int
|
|
190
|
+
) -> dict[str, int]:
|
|
191
|
+
"""
|
|
192
|
+
Get a summary of strikes for an operator.
|
|
193
|
+
|
|
194
|
+
Returns:
|
|
195
|
+
Dict with:
|
|
196
|
+
- total_validators_with_strikes: Count of validators with any strikes
|
|
197
|
+
- validators_at_risk: Count of validators with 3+ strikes
|
|
198
|
+
- total_strikes: Sum of all strikes across all validators
|
|
199
|
+
- max_strikes: Highest strike count on any single validator
|
|
200
|
+
"""
|
|
201
|
+
strikes = await self.get_operator_strikes(operator_id)
|
|
202
|
+
|
|
203
|
+
return {
|
|
204
|
+
"total_validators_with_strikes": len(strikes),
|
|
205
|
+
"validators_at_risk": sum(1 for s in strikes if s.at_ejection_risk),
|
|
206
|
+
"validators_near_ejection": sum(1 for s in strikes if s.strike_count == 2),
|
|
207
|
+
"total_strikes": sum(s.strike_count for s in strikes),
|
|
208
|
+
"max_strikes": max((s.strike_count for s in strikes), default=0),
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
def clear_cache(self) -> None:
|
|
212
|
+
"""Clear all cached strikes data."""
|
|
213
|
+
for cache_file in self.cache_dir.glob("*.json"):
|
|
214
|
+
cache_file.unlink(missing_ok=True)
|
src/main.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"""Main entry point for the CSM Dashboard application."""
|
|
2
|
+
|
|
3
|
+
import typer
|
|
4
|
+
|
|
5
|
+
from .cli.commands import app as cli_app
|
|
6
|
+
|
|
7
|
+
# Create main app that includes all CLI commands
|
|
8
|
+
app = typer.Typer(
|
|
9
|
+
name="csm-dashboard",
|
|
10
|
+
help="Lido CSM Operator Dashboard - Track your validator earnings",
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
# Add all CLI commands from the commands module
|
|
14
|
+
app.add_typer(cli_app, name="")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@app.command()
|
|
18
|
+
def serve(
|
|
19
|
+
host: str = typer.Option("127.0.0.1", help="Host to bind to"),
|
|
20
|
+
port: int = typer.Option(8080, help="Port to bind to"),
|
|
21
|
+
reload: bool = typer.Option(False, "--reload", help="Enable auto-reload for development"),
|
|
22
|
+
):
|
|
23
|
+
"""Start the web dashboard server."""
|
|
24
|
+
import uvicorn
|
|
25
|
+
|
|
26
|
+
from .web.app import create_app
|
|
27
|
+
|
|
28
|
+
web_app = create_app()
|
|
29
|
+
uvicorn.run(
|
|
30
|
+
web_app if not reload else "src.web.app:create_app",
|
|
31
|
+
host=host,
|
|
32
|
+
port=port,
|
|
33
|
+
reload=reload,
|
|
34
|
+
factory=reload,
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
if __name__ == "__main__":
|
|
39
|
+
app()
|
src/services/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Service layer - business logic."""
|
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
"""Main service for computing operator rewards."""
|
|
2
|
+
|
|
3
|
+
from decimal import Decimal
|
|
4
|
+
|
|
5
|
+
from ..core.types import APYMetrics, BondSummary, HealthStatus, OperatorRewards, StrikeSummary
|
|
6
|
+
from ..data.beacon import (
|
|
7
|
+
BeaconDataProvider,
|
|
8
|
+
ValidatorInfo,
|
|
9
|
+
aggregate_validator_status,
|
|
10
|
+
calculate_avg_effectiveness,
|
|
11
|
+
count_at_risk_validators,
|
|
12
|
+
count_slashed_validators,
|
|
13
|
+
epoch_to_datetime,
|
|
14
|
+
get_earliest_activation,
|
|
15
|
+
)
|
|
16
|
+
from ..data.ipfs_logs import IPFSLogProvider
|
|
17
|
+
from ..data.lido_api import LidoAPIProvider
|
|
18
|
+
from ..data.onchain import OnChainDataProvider
|
|
19
|
+
from ..data.rewards_tree import RewardsTreeProvider
|
|
20
|
+
from ..data.strikes import StrikesProvider
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class OperatorService:
|
|
24
|
+
"""Orchestrates data from multiple sources to compute final rewards."""
|
|
25
|
+
|
|
26
|
+
def __init__(self, rpc_url: str | None = None):
|
|
27
|
+
self.onchain = OnChainDataProvider(rpc_url)
|
|
28
|
+
self.rewards_tree = RewardsTreeProvider()
|
|
29
|
+
self.beacon = BeaconDataProvider()
|
|
30
|
+
self.lido_api = LidoAPIProvider()
|
|
31
|
+
self.ipfs_logs = IPFSLogProvider()
|
|
32
|
+
self.strikes = StrikesProvider(rpc_url)
|
|
33
|
+
|
|
34
|
+
async def get_operator_by_address(
|
|
35
|
+
self, address: str, include_validators: bool = False
|
|
36
|
+
) -> OperatorRewards | None:
|
|
37
|
+
"""
|
|
38
|
+
Main entry point: get complete rewards data for an address.
|
|
39
|
+
Returns None if address is not a CSM operator.
|
|
40
|
+
"""
|
|
41
|
+
# Step 1: Find operator ID by address
|
|
42
|
+
operator_id = await self.onchain.find_operator_by_address(address)
|
|
43
|
+
if operator_id is None:
|
|
44
|
+
return None
|
|
45
|
+
|
|
46
|
+
return await self.get_operator_by_id(operator_id, include_validators)
|
|
47
|
+
|
|
48
|
+
async def get_operator_by_id(
|
|
49
|
+
self, operator_id: int, include_validators: bool = False
|
|
50
|
+
) -> OperatorRewards | None:
|
|
51
|
+
"""Get complete rewards data for an operator ID."""
|
|
52
|
+
from web3.exceptions import ContractLogicError
|
|
53
|
+
|
|
54
|
+
# Step 1: Get operator info
|
|
55
|
+
try:
|
|
56
|
+
operator = await self.onchain.get_node_operator(operator_id)
|
|
57
|
+
except ContractLogicError:
|
|
58
|
+
# Operator ID doesn't exist on-chain
|
|
59
|
+
return None
|
|
60
|
+
|
|
61
|
+
# Step 2: Get bond summary
|
|
62
|
+
bond = await self.onchain.get_bond_summary(operator_id)
|
|
63
|
+
|
|
64
|
+
# Step 3: Get rewards from merkle tree
|
|
65
|
+
rewards_info = await self.rewards_tree.get_operator_rewards(operator_id)
|
|
66
|
+
|
|
67
|
+
# Step 4: Get already distributed (claimed) shares
|
|
68
|
+
distributed = await self.onchain.get_distributed_shares(operator_id)
|
|
69
|
+
|
|
70
|
+
# Step 5: Calculate unclaimed
|
|
71
|
+
cumulative_shares = (
|
|
72
|
+
rewards_info.cumulative_fee_shares if rewards_info else 0
|
|
73
|
+
)
|
|
74
|
+
unclaimed_shares = max(0, cumulative_shares - distributed)
|
|
75
|
+
|
|
76
|
+
# Step 6: Convert shares to ETH
|
|
77
|
+
unclaimed_eth = await self.onchain.shares_to_eth(unclaimed_shares)
|
|
78
|
+
cumulative_eth = await self.onchain.shares_to_eth(cumulative_shares)
|
|
79
|
+
distributed_eth = await self.onchain.shares_to_eth(distributed)
|
|
80
|
+
|
|
81
|
+
# Step 7: Calculate total claimable
|
|
82
|
+
total_claimable = bond.excess_bond_eth + unclaimed_eth
|
|
83
|
+
|
|
84
|
+
# Step 8: Get validator details if requested
|
|
85
|
+
validator_details: list[ValidatorInfo] = []
|
|
86
|
+
validators_by_status: dict[str, int] | None = None
|
|
87
|
+
avg_effectiveness: float | None = None
|
|
88
|
+
apy_metrics: APYMetrics | None = None
|
|
89
|
+
active_since = None
|
|
90
|
+
health_status: HealthStatus | None = None
|
|
91
|
+
|
|
92
|
+
if include_validators and operator.total_deposited_keys > 0:
|
|
93
|
+
# Get validator pubkeys
|
|
94
|
+
pubkeys = await self.onchain.get_signing_keys(
|
|
95
|
+
operator_id, 0, operator.total_deposited_keys
|
|
96
|
+
)
|
|
97
|
+
# Fetch validator status from beacon chain
|
|
98
|
+
validator_details = await self.beacon.get_validators_by_pubkeys(pubkeys)
|
|
99
|
+
validators_by_status = aggregate_validator_status(validator_details)
|
|
100
|
+
avg_effectiveness = calculate_avg_effectiveness(validator_details)
|
|
101
|
+
active_since = get_earliest_activation(validator_details)
|
|
102
|
+
|
|
103
|
+
# Step 9: Calculate APY metrics (using historical IPFS data)
|
|
104
|
+
apy_metrics = await self.calculate_apy_metrics(
|
|
105
|
+
operator_id=operator_id,
|
|
106
|
+
bond_eth=bond.current_bond_eth,
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
# Step 10: Calculate health status
|
|
110
|
+
health_status = await self.calculate_health_status(
|
|
111
|
+
operator_id=operator_id,
|
|
112
|
+
bond=bond,
|
|
113
|
+
stuck_validators_count=operator.stuck_validators_count,
|
|
114
|
+
validator_details=validator_details,
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
return OperatorRewards(
|
|
118
|
+
node_operator_id=operator_id,
|
|
119
|
+
manager_address=operator.manager_address,
|
|
120
|
+
reward_address=operator.reward_address,
|
|
121
|
+
current_bond_eth=bond.current_bond_eth,
|
|
122
|
+
required_bond_eth=bond.required_bond_eth,
|
|
123
|
+
excess_bond_eth=bond.excess_bond_eth,
|
|
124
|
+
cumulative_rewards_shares=cumulative_shares,
|
|
125
|
+
cumulative_rewards_eth=cumulative_eth,
|
|
126
|
+
distributed_shares=distributed,
|
|
127
|
+
distributed_eth=distributed_eth,
|
|
128
|
+
unclaimed_shares=unclaimed_shares,
|
|
129
|
+
unclaimed_eth=unclaimed_eth,
|
|
130
|
+
total_claimable_eth=total_claimable,
|
|
131
|
+
total_validators=operator.total_deposited_keys,
|
|
132
|
+
active_validators=operator.total_deposited_keys - operator.total_exited_keys,
|
|
133
|
+
exited_validators=operator.total_exited_keys,
|
|
134
|
+
validator_details=validator_details,
|
|
135
|
+
validators_by_status=validators_by_status,
|
|
136
|
+
avg_effectiveness=avg_effectiveness,
|
|
137
|
+
apy=apy_metrics,
|
|
138
|
+
active_since=active_since,
|
|
139
|
+
health=health_status,
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
async def get_all_operators_with_rewards(self) -> list[int]:
|
|
143
|
+
"""Get list of all operator IDs that have rewards in the tree."""
|
|
144
|
+
return await self.rewards_tree.get_all_operators_with_rewards()
|
|
145
|
+
|
|
146
|
+
async def calculate_apy_metrics(
|
|
147
|
+
self,
|
|
148
|
+
operator_id: int,
|
|
149
|
+
bond_eth: Decimal,
|
|
150
|
+
) -> APYMetrics:
|
|
151
|
+
"""Calculate APY metrics for an operator using historical IPFS data.
|
|
152
|
+
|
|
153
|
+
Note: Validator APY (consensus rewards) is NOT calculated because CSM operators
|
|
154
|
+
don't receive those rewards directly - they go to Lido protocol and are
|
|
155
|
+
redistributed via CSM reward distributions (captured in reward_apy).
|
|
156
|
+
"""
|
|
157
|
+
historical_reward_apy_28d = None
|
|
158
|
+
historical_reward_apy_ltd = None
|
|
159
|
+
|
|
160
|
+
# 1. Try to get historical APY from IPFS distribution logs
|
|
161
|
+
if bond_eth > 0:
|
|
162
|
+
try:
|
|
163
|
+
# Query historical log CIDs from contract events
|
|
164
|
+
log_history = await self.onchain.get_distribution_log_history()
|
|
165
|
+
|
|
166
|
+
if log_history:
|
|
167
|
+
# Fetch operator's historical frame data
|
|
168
|
+
frames = await self.ipfs_logs.get_operator_history(
|
|
169
|
+
operator_id, log_history
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
if frames:
|
|
173
|
+
# Calculate APY for 28-day and lifetime periods
|
|
174
|
+
apy_results = self.ipfs_logs.calculate_historical_apy(
|
|
175
|
+
frames=frames,
|
|
176
|
+
bond_eth=bond_eth,
|
|
177
|
+
periods=[28, None], # 28-day and lifetime
|
|
178
|
+
)
|
|
179
|
+
historical_reward_apy_28d = apy_results.get("28d")
|
|
180
|
+
historical_reward_apy_ltd = apy_results.get("ltd")
|
|
181
|
+
except Exception:
|
|
182
|
+
# If historical APY calculation fails, continue without it
|
|
183
|
+
pass
|
|
184
|
+
|
|
185
|
+
# 2. Bond APY (stETH protocol rebase rate)
|
|
186
|
+
steth_data = await self.lido_api.get_steth_apr()
|
|
187
|
+
bond_apy = steth_data.get("apr")
|
|
188
|
+
|
|
189
|
+
# 3. Net APY (Historical Reward APY + Bond APY)
|
|
190
|
+
net_apy_28d = None
|
|
191
|
+
net_apy_ltd = None
|
|
192
|
+
|
|
193
|
+
if historical_reward_apy_28d is not None and bond_apy is not None:
|
|
194
|
+
net_apy_28d = historical_reward_apy_28d + bond_apy
|
|
195
|
+
elif bond_apy is not None:
|
|
196
|
+
net_apy_28d = bond_apy
|
|
197
|
+
|
|
198
|
+
if historical_reward_apy_ltd is not None and bond_apy is not None:
|
|
199
|
+
net_apy_ltd = historical_reward_apy_ltd + bond_apy
|
|
200
|
+
elif bond_apy is not None:
|
|
201
|
+
net_apy_ltd = bond_apy
|
|
202
|
+
|
|
203
|
+
return APYMetrics(
|
|
204
|
+
historical_reward_apy_28d=historical_reward_apy_28d,
|
|
205
|
+
historical_reward_apy_ltd=historical_reward_apy_ltd,
|
|
206
|
+
bond_apy=bond_apy,
|
|
207
|
+
net_apy_28d=net_apy_28d,
|
|
208
|
+
net_apy_ltd=net_apy_ltd,
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
async def calculate_health_status(
|
|
212
|
+
self,
|
|
213
|
+
operator_id: int,
|
|
214
|
+
bond: BondSummary,
|
|
215
|
+
stuck_validators_count: int,
|
|
216
|
+
validator_details: list[ValidatorInfo],
|
|
217
|
+
) -> HealthStatus:
|
|
218
|
+
"""Calculate health status for an operator.
|
|
219
|
+
|
|
220
|
+
Includes bond health, stuck validators, slashing, at-risk validators, and strikes.
|
|
221
|
+
"""
|
|
222
|
+
# Bond health
|
|
223
|
+
bond_healthy = bond.current_bond_eth >= bond.required_bond_eth
|
|
224
|
+
bond_deficit = max(Decimal(0), bond.required_bond_eth - bond.current_bond_eth)
|
|
225
|
+
|
|
226
|
+
# Count slashed and at-risk validators
|
|
227
|
+
slashed_count = count_slashed_validators(validator_details)
|
|
228
|
+
at_risk_count = count_at_risk_validators(validator_details)
|
|
229
|
+
|
|
230
|
+
# Get strikes data
|
|
231
|
+
strike_summary = StrikeSummary()
|
|
232
|
+
try:
|
|
233
|
+
summary = await self.strikes.get_operator_strike_summary(operator_id)
|
|
234
|
+
strike_summary = StrikeSummary(
|
|
235
|
+
total_validators_with_strikes=summary.get("total_validators_with_strikes", 0),
|
|
236
|
+
validators_at_risk=summary.get("validators_at_risk", 0),
|
|
237
|
+
validators_near_ejection=summary.get("validators_near_ejection", 0),
|
|
238
|
+
total_strikes=summary.get("total_strikes", 0),
|
|
239
|
+
max_strikes=summary.get("max_strikes", 0),
|
|
240
|
+
)
|
|
241
|
+
except Exception:
|
|
242
|
+
# If strikes fetch fails, continue with empty summary
|
|
243
|
+
pass
|
|
244
|
+
|
|
245
|
+
return HealthStatus(
|
|
246
|
+
bond_healthy=bond_healthy,
|
|
247
|
+
bond_deficit_eth=bond_deficit,
|
|
248
|
+
stuck_validators_count=stuck_validators_count,
|
|
249
|
+
slashed_validators_count=slashed_count,
|
|
250
|
+
validators_at_risk_count=at_risk_count,
|
|
251
|
+
strikes=strike_summary,
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
async def get_operator_strikes(self, operator_id: int):
|
|
255
|
+
"""Get detailed strikes for an operator's validators."""
|
|
256
|
+
return await self.strikes.get_operator_strikes(operator_id)
|
|
257
|
+
|
|
258
|
+
async def get_recent_frame_dates(self, count: int = 6) -> list[dict]:
|
|
259
|
+
"""Get date ranges for the most recent N distribution frames.
|
|
260
|
+
|
|
261
|
+
Returns list of {start, end} dicts with formatted date strings,
|
|
262
|
+
ordered from oldest to newest (matching strikes array order).
|
|
263
|
+
"""
|
|
264
|
+
try:
|
|
265
|
+
log_history = await self.onchain.get_distribution_log_history()
|
|
266
|
+
except Exception:
|
|
267
|
+
return []
|
|
268
|
+
|
|
269
|
+
if not log_history:
|
|
270
|
+
return []
|
|
271
|
+
|
|
272
|
+
# Get last N frames (log_history is already sorted oldest-first)
|
|
273
|
+
recent_logs = log_history[-count:] if len(log_history) >= count else log_history
|
|
274
|
+
|
|
275
|
+
frame_dates = []
|
|
276
|
+
for entry in recent_logs:
|
|
277
|
+
try:
|
|
278
|
+
log_data = await self.ipfs_logs.fetch_log(entry["logCid"])
|
|
279
|
+
if log_data:
|
|
280
|
+
start_epoch, end_epoch = self.ipfs_logs.get_frame_info(log_data)
|
|
281
|
+
start_date = epoch_to_datetime(start_epoch)
|
|
282
|
+
end_date = epoch_to_datetime(end_epoch)
|
|
283
|
+
frame_dates.append({
|
|
284
|
+
"start": start_date.strftime("%b %d"),
|
|
285
|
+
"end": end_date.strftime("%b %d"),
|
|
286
|
+
})
|
|
287
|
+
except Exception:
|
|
288
|
+
# Skip frames we can't fetch
|
|
289
|
+
continue
|
|
290
|
+
|
|
291
|
+
# Pad to ensure we always have `count` entries (for UI consistency)
|
|
292
|
+
# Pad at the beginning since strikes array is ordered oldest to newest
|
|
293
|
+
frame_number = 1
|
|
294
|
+
while len(frame_dates) < count:
|
|
295
|
+
frame_dates.insert(0, {"start": f"Frame {frame_number}", "end": ""})
|
|
296
|
+
frame_number += 1
|
|
297
|
+
|
|
298
|
+
return frame_dates
|
|
299
|
+
|
|
300
|
+
async def get_operator_active_since(self, operator_id: int):
|
|
301
|
+
"""Get operator's first validator activation date (lightweight).
|
|
302
|
+
|
|
303
|
+
Returns datetime or None if no validators have been activated.
|
|
304
|
+
"""
|
|
305
|
+
from datetime import datetime
|
|
306
|
+
|
|
307
|
+
try:
|
|
308
|
+
operator = await self.onchain.get_node_operator(operator_id)
|
|
309
|
+
if operator.total_deposited_keys == 0:
|
|
310
|
+
return None
|
|
311
|
+
|
|
312
|
+
# Get just the first pubkey to minimize beacon chain API calls
|
|
313
|
+
pubkeys = await self.onchain.get_signing_keys(operator_id, 0, 1)
|
|
314
|
+
if not pubkeys:
|
|
315
|
+
return None
|
|
316
|
+
|
|
317
|
+
validators = await self.beacon.get_validators_by_pubkeys(pubkeys)
|
|
318
|
+
return get_earliest_activation(validators)
|
|
319
|
+
except Exception:
|
|
320
|
+
return None
|
src/web/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Web module - FastAPI application."""
|