csm-dashboard 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- csm_dashboard-0.2.0.dist-info/METADATA +354 -0
- csm_dashboard-0.2.0.dist-info/RECORD +27 -0
- csm_dashboard-0.2.0.dist-info/WHEEL +4 -0
- csm_dashboard-0.2.0.dist-info/entry_points.txt +2 -0
- src/__init__.py +1 -0
- src/cli/__init__.py +1 -0
- src/cli/commands.py +624 -0
- src/core/__init__.py +1 -0
- src/core/config.py +42 -0
- src/core/contracts.py +19 -0
- src/core/types.py +153 -0
- src/data/__init__.py +1 -0
- src/data/beacon.py +370 -0
- src/data/cache.py +67 -0
- src/data/etherscan.py +78 -0
- src/data/ipfs_logs.py +267 -0
- src/data/known_cids.py +30 -0
- src/data/lido_api.py +35 -0
- src/data/onchain.py +258 -0
- src/data/rewards_tree.py +58 -0
- src/data/strikes.py +214 -0
- src/main.py +39 -0
- src/services/__init__.py +1 -0
- src/services/operator_service.py +320 -0
- src/web/__init__.py +1 -0
- src/web/app.py +576 -0
- src/web/routes.py +161 -0
src/core/types.py
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
"""Data models for CSM Dashboard."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from decimal import Decimal
|
|
5
|
+
from typing import TYPE_CHECKING, Any
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from ..data.beacon import ValidatorInfo
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class NodeOperator(BaseModel):
|
|
14
|
+
"""Node operator data from CSModule contract."""
|
|
15
|
+
|
|
16
|
+
node_operator_id: int
|
|
17
|
+
total_added_keys: int
|
|
18
|
+
total_withdrawn_keys: int
|
|
19
|
+
total_deposited_keys: int
|
|
20
|
+
total_vetted_keys: int
|
|
21
|
+
stuck_validators_count: int
|
|
22
|
+
depositable_validators_count: int
|
|
23
|
+
target_limit: int
|
|
24
|
+
target_limit_mode: int
|
|
25
|
+
total_exited_keys: int
|
|
26
|
+
enqueued_count: int
|
|
27
|
+
manager_address: str
|
|
28
|
+
proposed_manager_address: str
|
|
29
|
+
reward_address: str
|
|
30
|
+
proposed_reward_address: str
|
|
31
|
+
extended_manager_permissions: bool
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class BondSummary(BaseModel):
|
|
35
|
+
"""Bond information for an operator."""
|
|
36
|
+
|
|
37
|
+
current_bond_wei: int
|
|
38
|
+
required_bond_wei: int
|
|
39
|
+
current_bond_eth: Decimal
|
|
40
|
+
required_bond_eth: Decimal
|
|
41
|
+
excess_bond_eth: Decimal
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class RewardsInfo(BaseModel):
|
|
45
|
+
"""Rewards data from merkle tree."""
|
|
46
|
+
|
|
47
|
+
cumulative_fee_shares: int
|
|
48
|
+
proof: list[str]
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class APYMetrics(BaseModel):
|
|
52
|
+
"""APY calculations for an operator.
|
|
53
|
+
|
|
54
|
+
Note: Validator APY (consensus rewards) is NOT included because CSM operators
|
|
55
|
+
don't receive those rewards directly - they go to Lido protocol and are
|
|
56
|
+
redistributed via CSM reward distributions (captured in reward_apy).
|
|
57
|
+
|
|
58
|
+
Historical APY is calculated from actual distributed rewards in IPFS logs,
|
|
59
|
+
which is more accurate than calculating from unclaimed amounts.
|
|
60
|
+
"""
|
|
61
|
+
|
|
62
|
+
# Historical Reward APY (from IPFS distribution logs) - most accurate
|
|
63
|
+
historical_reward_apy_28d: float | None = None # Last ~28 days (1 frame)
|
|
64
|
+
historical_reward_apy_ltd: float | None = None # Lifetime
|
|
65
|
+
|
|
66
|
+
# Bond APY (stETH rebase appreciation)
|
|
67
|
+
bond_apy: float | None = None
|
|
68
|
+
|
|
69
|
+
# Net APY (Historical Reward APY + Bond APY)
|
|
70
|
+
net_apy_28d: float | None = None
|
|
71
|
+
net_apy_ltd: float | None = None
|
|
72
|
+
|
|
73
|
+
# Legacy fields (deprecated, kept for backwards compatibility)
|
|
74
|
+
reward_apy_7d: float | None = None
|
|
75
|
+
reward_apy_28d: float | None = None
|
|
76
|
+
net_apy_7d: float | None = None
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class StrikeSummary(BaseModel):
|
|
80
|
+
"""Summary of strikes for an operator."""
|
|
81
|
+
|
|
82
|
+
total_validators_with_strikes: int = 0
|
|
83
|
+
validators_at_risk: int = 0 # Validators with 3+ strikes (ejection eligible)
|
|
84
|
+
validators_near_ejection: int = 0 # Validators with 2 strikes (one away from ejection)
|
|
85
|
+
total_strikes: int = 0
|
|
86
|
+
max_strikes: int = 0 # Highest strike count on any single validator
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class HealthStatus(BaseModel):
|
|
90
|
+
"""Overall health status for an operator."""
|
|
91
|
+
|
|
92
|
+
bond_healthy: bool = True
|
|
93
|
+
bond_deficit_eth: Decimal = Decimal(0)
|
|
94
|
+
stuck_validators_count: int = 0
|
|
95
|
+
slashed_validators_count: int = 0
|
|
96
|
+
validators_at_risk_count: int = 0 # Validators with balance < 32 ETH
|
|
97
|
+
strikes: StrikeSummary = StrikeSummary()
|
|
98
|
+
|
|
99
|
+
@property
|
|
100
|
+
def has_issues(self) -> bool:
|
|
101
|
+
"""Check if there are any health issues."""
|
|
102
|
+
return (
|
|
103
|
+
not self.bond_healthy
|
|
104
|
+
or self.stuck_validators_count > 0
|
|
105
|
+
or self.slashed_validators_count > 0
|
|
106
|
+
or self.validators_at_risk_count > 0
|
|
107
|
+
or self.strikes.total_validators_with_strikes > 0 # Any strikes = warning
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
class OperatorRewards(BaseModel):
|
|
112
|
+
"""Complete rewards summary for display."""
|
|
113
|
+
|
|
114
|
+
model_config = {"arbitrary_types_allowed": True}
|
|
115
|
+
|
|
116
|
+
node_operator_id: int
|
|
117
|
+
manager_address: str
|
|
118
|
+
reward_address: str
|
|
119
|
+
|
|
120
|
+
# Bond information
|
|
121
|
+
current_bond_eth: Decimal
|
|
122
|
+
required_bond_eth: Decimal
|
|
123
|
+
excess_bond_eth: Decimal
|
|
124
|
+
|
|
125
|
+
# Rewards information
|
|
126
|
+
cumulative_rewards_shares: int
|
|
127
|
+
cumulative_rewards_eth: Decimal
|
|
128
|
+
distributed_shares: int
|
|
129
|
+
distributed_eth: Decimal
|
|
130
|
+
unclaimed_shares: int
|
|
131
|
+
unclaimed_eth: Decimal
|
|
132
|
+
|
|
133
|
+
# Total claimable
|
|
134
|
+
total_claimable_eth: Decimal
|
|
135
|
+
|
|
136
|
+
# Validator counts (from on-chain)
|
|
137
|
+
total_validators: int
|
|
138
|
+
active_validators: int
|
|
139
|
+
exited_validators: int
|
|
140
|
+
|
|
141
|
+
# Validator details (from beacon chain, optional)
|
|
142
|
+
validator_details: list[Any] = [] # list[ValidatorInfo]
|
|
143
|
+
validators_by_status: dict[str, int] | None = None
|
|
144
|
+
avg_effectiveness: float | None = None
|
|
145
|
+
|
|
146
|
+
# APY metrics (optional, requires detailed lookup)
|
|
147
|
+
apy: APYMetrics | None = None
|
|
148
|
+
|
|
149
|
+
# Operator activation date (from earliest validator activation)
|
|
150
|
+
active_since: datetime | None = None
|
|
151
|
+
|
|
152
|
+
# Health status (optional, requires detailed lookup)
|
|
153
|
+
health: HealthStatus | None = None
|
src/data/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Data layer - on-chain and off-chain data fetching."""
|
src/data/beacon.py
ADDED
|
@@ -0,0 +1,370 @@
|
|
|
1
|
+
"""Beacon chain data fetching via beaconcha.in API."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime, timedelta, timezone
|
|
4
|
+
from decimal import Decimal
|
|
5
|
+
from enum import Enum
|
|
6
|
+
|
|
7
|
+
import httpx
|
|
8
|
+
|
|
9
|
+
from ..core.config import get_settings
|
|
10
|
+
from .cache import cached
|
|
11
|
+
|
|
12
|
+
# Beacon Chain constants
|
|
13
|
+
BEACON_GENESIS = datetime(2020, 12, 1, 12, 0, 23, tzinfo=timezone.utc)
|
|
14
|
+
SECONDS_PER_EPOCH = 32 * 12 # 384 seconds (32 slots × 12 seconds per slot)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def epoch_to_datetime(epoch: int) -> datetime:
|
|
18
|
+
"""Convert beacon chain epoch to datetime."""
|
|
19
|
+
return BEACON_GENESIS + timedelta(seconds=epoch * SECONDS_PER_EPOCH)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def get_earliest_activation(validators: list["ValidatorInfo"]) -> datetime | None:
|
|
23
|
+
"""Get the earliest activation date from a list of validators."""
|
|
24
|
+
epochs = [v.activation_epoch for v in validators if v.activation_epoch is not None]
|
|
25
|
+
if not epochs:
|
|
26
|
+
return None
|
|
27
|
+
return epoch_to_datetime(min(epochs))
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class ValidatorStatus(str, Enum):
|
|
31
|
+
"""Validator lifecycle status on the beacon chain."""
|
|
32
|
+
|
|
33
|
+
PENDING_INITIALIZED = "pending_initialized"
|
|
34
|
+
PENDING_QUEUED = "pending_queued"
|
|
35
|
+
ACTIVE_ONGOING = "active_ongoing"
|
|
36
|
+
ACTIVE_EXITING = "active_exiting"
|
|
37
|
+
ACTIVE_SLASHED = "active_slashed"
|
|
38
|
+
EXITED_UNSLASHED = "exited_unslashed"
|
|
39
|
+
EXITED_SLASHED = "exited_slashed"
|
|
40
|
+
WITHDRAWAL_POSSIBLE = "withdrawal_possible"
|
|
41
|
+
WITHDRAWAL_DONE = "withdrawal_done"
|
|
42
|
+
UNKNOWN = "unknown"
|
|
43
|
+
|
|
44
|
+
@classmethod
|
|
45
|
+
def from_beaconcha(cls, status: str) -> "ValidatorStatus":
|
|
46
|
+
"""Convert beaconcha.in status string to enum."""
|
|
47
|
+
status_map = {
|
|
48
|
+
"pending": cls.PENDING_QUEUED,
|
|
49
|
+
"active_online": cls.ACTIVE_ONGOING,
|
|
50
|
+
"active_offline": cls.ACTIVE_ONGOING,
|
|
51
|
+
"active": cls.ACTIVE_ONGOING,
|
|
52
|
+
"exiting_online": cls.ACTIVE_EXITING,
|
|
53
|
+
"exiting_offline": cls.ACTIVE_EXITING,
|
|
54
|
+
"exiting": cls.ACTIVE_EXITING,
|
|
55
|
+
"slashing_online": cls.ACTIVE_SLASHED,
|
|
56
|
+
"slashing_offline": cls.ACTIVE_SLASHED,
|
|
57
|
+
"slashing": cls.ACTIVE_SLASHED,
|
|
58
|
+
"slashed": cls.EXITED_SLASHED,
|
|
59
|
+
"exited": cls.EXITED_UNSLASHED,
|
|
60
|
+
"withdrawable": cls.WITHDRAWAL_POSSIBLE,
|
|
61
|
+
"withdrawn": cls.WITHDRAWAL_DONE,
|
|
62
|
+
}
|
|
63
|
+
return status_map.get(status.lower(), cls.UNKNOWN)
|
|
64
|
+
|
|
65
|
+
@property
|
|
66
|
+
def display_name(self) -> str:
|
|
67
|
+
"""Human-readable display name."""
|
|
68
|
+
names = {
|
|
69
|
+
self.PENDING_INITIALIZED: "Pending (Init)",
|
|
70
|
+
self.PENDING_QUEUED: "Pending",
|
|
71
|
+
self.ACTIVE_ONGOING: "Active",
|
|
72
|
+
self.ACTIVE_EXITING: "Exiting",
|
|
73
|
+
self.ACTIVE_SLASHED: "Slashed",
|
|
74
|
+
self.EXITED_UNSLASHED: "Exited",
|
|
75
|
+
self.EXITED_SLASHED: "Slashed & Exited",
|
|
76
|
+
self.WITHDRAWAL_POSSIBLE: "Withdrawable",
|
|
77
|
+
self.WITHDRAWAL_DONE: "Withdrawn",
|
|
78
|
+
self.UNKNOWN: "Unknown",
|
|
79
|
+
}
|
|
80
|
+
return names.get(self, "Unknown")
|
|
81
|
+
|
|
82
|
+
@property
|
|
83
|
+
def is_active(self) -> bool:
|
|
84
|
+
"""Check if validator is currently active."""
|
|
85
|
+
return self in (self.ACTIVE_ONGOING, self.ACTIVE_EXITING, self.ACTIVE_SLASHED)
|
|
86
|
+
|
|
87
|
+
@property
|
|
88
|
+
def is_exited(self) -> bool:
|
|
89
|
+
"""Check if validator has exited."""
|
|
90
|
+
return self in (
|
|
91
|
+
self.EXITED_UNSLASHED,
|
|
92
|
+
self.EXITED_SLASHED,
|
|
93
|
+
self.WITHDRAWAL_POSSIBLE,
|
|
94
|
+
self.WITHDRAWAL_DONE,
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class ValidatorInfo:
|
|
99
|
+
"""Information about a single validator."""
|
|
100
|
+
|
|
101
|
+
def __init__(
|
|
102
|
+
self,
|
|
103
|
+
pubkey: str,
|
|
104
|
+
index: int | None = None,
|
|
105
|
+
status: ValidatorStatus = ValidatorStatus.UNKNOWN,
|
|
106
|
+
balance_gwei: int = 0,
|
|
107
|
+
effectiveness: float | None = None,
|
|
108
|
+
activation_epoch: int | None = None,
|
|
109
|
+
exit_epoch: int | None = None,
|
|
110
|
+
):
|
|
111
|
+
self.pubkey = pubkey
|
|
112
|
+
self.index = index
|
|
113
|
+
self.status = status
|
|
114
|
+
self.balance_gwei = balance_gwei
|
|
115
|
+
self.effectiveness = effectiveness
|
|
116
|
+
self.activation_epoch = activation_epoch
|
|
117
|
+
self.exit_epoch = exit_epoch
|
|
118
|
+
|
|
119
|
+
@property
|
|
120
|
+
def balance_eth(self) -> Decimal:
|
|
121
|
+
"""Balance in ETH."""
|
|
122
|
+
return Decimal(self.balance_gwei) / Decimal(10**9)
|
|
123
|
+
|
|
124
|
+
@property
|
|
125
|
+
def at_risk(self) -> bool:
|
|
126
|
+
"""
|
|
127
|
+
Check if validator is at risk due to low balance.
|
|
128
|
+
|
|
129
|
+
A validator with effective balance < 32 ETH may face withdrawal penalties
|
|
130
|
+
when exiting, as the difference will be confiscated from the operator's bond.
|
|
131
|
+
"""
|
|
132
|
+
# Only active validators can be "at risk" in this sense
|
|
133
|
+
if not self.status.is_active:
|
|
134
|
+
return False
|
|
135
|
+
# 32 ETH = 32_000_000_000 gwei
|
|
136
|
+
return self.balance_gwei < 32_000_000_000
|
|
137
|
+
|
|
138
|
+
def to_dict(self) -> dict:
|
|
139
|
+
"""Convert to dictionary for JSON serialization."""
|
|
140
|
+
return {
|
|
141
|
+
"pubkey": self.pubkey,
|
|
142
|
+
"index": self.index,
|
|
143
|
+
"status": self.status.value,
|
|
144
|
+
"status_display": self.status.display_name,
|
|
145
|
+
"balance_eth": float(self.balance_eth),
|
|
146
|
+
"effectiveness": self.effectiveness,
|
|
147
|
+
"activation_epoch": self.activation_epoch,
|
|
148
|
+
"exit_epoch": self.exit_epoch,
|
|
149
|
+
"at_risk": self.at_risk,
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
class BeaconDataProvider:
|
|
154
|
+
"""Fetches validator data from beaconcha.in API."""
|
|
155
|
+
|
|
156
|
+
def __init__(self):
|
|
157
|
+
self.settings = get_settings()
|
|
158
|
+
self.base_url = self.settings.beacon_api_url.rstrip("/")
|
|
159
|
+
|
|
160
|
+
def _get_headers(self) -> dict[str, str]:
|
|
161
|
+
"""Get headers for API requests, including API key if configured."""
|
|
162
|
+
headers = {"accept": "application/json"}
|
|
163
|
+
if self.settings.beacon_api_key:
|
|
164
|
+
headers["apikey"] = self.settings.beacon_api_key
|
|
165
|
+
return headers
|
|
166
|
+
|
|
167
|
+
@cached(ttl=300) # Cache for 5 minutes
|
|
168
|
+
async def get_validators_by_pubkeys(
|
|
169
|
+
self, pubkeys: list[str]
|
|
170
|
+
) -> list[ValidatorInfo]:
|
|
171
|
+
"""
|
|
172
|
+
Fetch validator info for multiple pubkeys.
|
|
173
|
+
|
|
174
|
+
beaconcha.in supports comma-separated pubkeys (up to 100).
|
|
175
|
+
"""
|
|
176
|
+
if not pubkeys:
|
|
177
|
+
return []
|
|
178
|
+
|
|
179
|
+
validators = []
|
|
180
|
+
batch_size = 100 # beaconcha.in limit
|
|
181
|
+
|
|
182
|
+
async with httpx.AsyncClient(timeout=30.0) as client:
|
|
183
|
+
for i in range(0, len(pubkeys), batch_size):
|
|
184
|
+
batch = pubkeys[i : i + batch_size]
|
|
185
|
+
pubkeys_param = ",".join(batch)
|
|
186
|
+
|
|
187
|
+
try:
|
|
188
|
+
response = await client.get(
|
|
189
|
+
f"{self.base_url}/validator/{pubkeys_param}",
|
|
190
|
+
headers=self._get_headers(),
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
if response.status_code == 200:
|
|
194
|
+
data = response.json().get("data", [])
|
|
195
|
+
# API returns single object if only one validator
|
|
196
|
+
if isinstance(data, dict):
|
|
197
|
+
data = [data]
|
|
198
|
+
|
|
199
|
+
for v in data:
|
|
200
|
+
validators.append(self._parse_validator(v))
|
|
201
|
+
elif response.status_code == 404:
|
|
202
|
+
# Validators not found - create placeholder entries
|
|
203
|
+
for pubkey in batch:
|
|
204
|
+
validators.append(
|
|
205
|
+
ValidatorInfo(
|
|
206
|
+
pubkey=pubkey,
|
|
207
|
+
status=ValidatorStatus.PENDING_INITIALIZED,
|
|
208
|
+
)
|
|
209
|
+
)
|
|
210
|
+
except Exception:
|
|
211
|
+
# On error, add unknown status for this batch
|
|
212
|
+
for pubkey in batch:
|
|
213
|
+
validators.append(
|
|
214
|
+
ValidatorInfo(pubkey=pubkey, status=ValidatorStatus.UNKNOWN)
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
return validators
|
|
218
|
+
|
|
219
|
+
def _parse_validator(self, data: dict) -> ValidatorInfo:
|
|
220
|
+
"""Parse beaconcha.in validator response."""
|
|
221
|
+
return ValidatorInfo(
|
|
222
|
+
pubkey=data.get("pubkey", ""),
|
|
223
|
+
index=data.get("validatorindex"),
|
|
224
|
+
status=ValidatorStatus.from_beaconcha(data.get("status", "unknown")),
|
|
225
|
+
balance_gwei=data.get("balance", 0),
|
|
226
|
+
effectiveness=data.get("effectiveness"),
|
|
227
|
+
activation_epoch=data.get("activationepoch"),
|
|
228
|
+
exit_epoch=data.get("exitepoch") if data.get("exitepoch") is not None and data.get("exitepoch") >= 0 else None,
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
@cached(ttl=300)
|
|
232
|
+
async def get_validator_performance(
|
|
233
|
+
self, validator_index: int
|
|
234
|
+
) -> dict | None:
|
|
235
|
+
"""Get detailed performance metrics for a validator."""
|
|
236
|
+
async with httpx.AsyncClient(timeout=30.0) as client:
|
|
237
|
+
try:
|
|
238
|
+
response = await client.get(
|
|
239
|
+
f"{self.base_url}/validator/{validator_index}/performance",
|
|
240
|
+
headers=self._get_headers(),
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
if response.status_code == 200:
|
|
244
|
+
return response.json().get("data")
|
|
245
|
+
except Exception:
|
|
246
|
+
pass
|
|
247
|
+
|
|
248
|
+
return None
|
|
249
|
+
|
|
250
|
+
@cached(ttl=300)
|
|
251
|
+
async def get_validator_income(
|
|
252
|
+
self, validator_indices: list[int], days: int = 28
|
|
253
|
+
) -> dict:
|
|
254
|
+
"""
|
|
255
|
+
Fetch validator income for a period.
|
|
256
|
+
|
|
257
|
+
Uses the /validator/{indices}/incomedetailhistory endpoint.
|
|
258
|
+
Returns total consensus rewards in ETH for the period.
|
|
259
|
+
|
|
260
|
+
Args:
|
|
261
|
+
validator_indices: List of validator indices to query
|
|
262
|
+
days: Number of days of history to fetch (7 or 28)
|
|
263
|
+
"""
|
|
264
|
+
if not validator_indices:
|
|
265
|
+
return {"total_income_eth": Decimal(0), "days": days}
|
|
266
|
+
|
|
267
|
+
total_income_gwei = 0
|
|
268
|
+
batch_size = 100 # beaconcha.in limit
|
|
269
|
+
|
|
270
|
+
# Calculate epoch limit (~225 epochs per day)
|
|
271
|
+
epoch_limit = days * 225
|
|
272
|
+
|
|
273
|
+
async with httpx.AsyncClient(timeout=60.0) as client:
|
|
274
|
+
for i in range(0, len(validator_indices), batch_size):
|
|
275
|
+
batch = validator_indices[i : i + batch_size]
|
|
276
|
+
indices_param = ",".join(str(idx) for idx in batch)
|
|
277
|
+
|
|
278
|
+
try:
|
|
279
|
+
response = await client.get(
|
|
280
|
+
f"{self.base_url}/validator/{indices_param}/incomedetailhistory",
|
|
281
|
+
params={"limit": epoch_limit},
|
|
282
|
+
headers=self._get_headers(),
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
if response.status_code == 200:
|
|
286
|
+
data = response.json().get("data", [])
|
|
287
|
+
# Handle single validator response (dict instead of list)
|
|
288
|
+
if isinstance(data, dict):
|
|
289
|
+
data = [data]
|
|
290
|
+
|
|
291
|
+
for entry in data:
|
|
292
|
+
# Each entry has income breakdown by reward type
|
|
293
|
+
# API returns: attestation_source_reward, attestation_target_reward,
|
|
294
|
+
# attestation_head_reward (not a "total" field)
|
|
295
|
+
income = entry.get("income", {})
|
|
296
|
+
if isinstance(income, dict):
|
|
297
|
+
# Sum all reward types (values are in gwei)
|
|
298
|
+
total_income_gwei += sum(income.values())
|
|
299
|
+
elif isinstance(income, int):
|
|
300
|
+
total_income_gwei += income
|
|
301
|
+
except Exception:
|
|
302
|
+
# On error, continue with partial data
|
|
303
|
+
pass
|
|
304
|
+
|
|
305
|
+
return {
|
|
306
|
+
"total_income_eth": Decimal(total_income_gwei) / Decimal(10**9),
|
|
307
|
+
"days": days,
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
def aggregate_validator_status(validators: list[ValidatorInfo]) -> dict[str, int]:
|
|
312
|
+
"""
|
|
313
|
+
Aggregate validator statuses into counts.
|
|
314
|
+
|
|
315
|
+
Returns dict like: {"active": 198, "pending": 1, "exited": 1, "slashed": 0}
|
|
316
|
+
"""
|
|
317
|
+
counts = {
|
|
318
|
+
"active": 0,
|
|
319
|
+
"pending": 0,
|
|
320
|
+
"exiting": 0,
|
|
321
|
+
"exited": 0,
|
|
322
|
+
"slashed": 0,
|
|
323
|
+
"unknown": 0,
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
for v in validators:
|
|
327
|
+
if v.status.is_active and v.status != ValidatorStatus.ACTIVE_SLASHED:
|
|
328
|
+
if v.status == ValidatorStatus.ACTIVE_EXITING:
|
|
329
|
+
counts["exiting"] += 1
|
|
330
|
+
else:
|
|
331
|
+
counts["active"] += 1
|
|
332
|
+
elif v.status in (ValidatorStatus.PENDING_INITIALIZED, ValidatorStatus.PENDING_QUEUED):
|
|
333
|
+
counts["pending"] += 1
|
|
334
|
+
elif v.status.is_exited:
|
|
335
|
+
if v.status == ValidatorStatus.EXITED_SLASHED:
|
|
336
|
+
counts["slashed"] += 1
|
|
337
|
+
else:
|
|
338
|
+
counts["exited"] += 1
|
|
339
|
+
elif v.status == ValidatorStatus.ACTIVE_SLASHED:
|
|
340
|
+
counts["slashed"] += 1
|
|
341
|
+
else:
|
|
342
|
+
counts["unknown"] += 1
|
|
343
|
+
|
|
344
|
+
return counts
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
def calculate_avg_effectiveness(validators: list[ValidatorInfo]) -> float | None:
|
|
348
|
+
"""Calculate average attestation effectiveness across validators."""
|
|
349
|
+
active_with_effectiveness = [
|
|
350
|
+
v for v in validators if v.status.is_active and v.effectiveness is not None
|
|
351
|
+
]
|
|
352
|
+
|
|
353
|
+
if not active_with_effectiveness:
|
|
354
|
+
return None
|
|
355
|
+
|
|
356
|
+
total = sum(v.effectiveness for v in active_with_effectiveness)
|
|
357
|
+
return total / len(active_with_effectiveness)
|
|
358
|
+
|
|
359
|
+
|
|
360
|
+
def count_at_risk_validators(validators: list[ValidatorInfo]) -> int:
|
|
361
|
+
"""Count validators with balance < 32 ETH (at risk of withdrawal penalty)."""
|
|
362
|
+
return sum(1 for v in validators if v.at_risk)
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
def count_slashed_validators(validators: list[ValidatorInfo]) -> int:
|
|
366
|
+
"""Count slashed validators."""
|
|
367
|
+
return sum(
|
|
368
|
+
1 for v in validators
|
|
369
|
+
if v.status in (ValidatorStatus.ACTIVE_SLASHED, ValidatorStatus.EXITED_SLASHED)
|
|
370
|
+
)
|
src/data/cache.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"""Simple in-memory cache with TTL support."""
|
|
2
|
+
|
|
3
|
+
import hashlib
|
|
4
|
+
from datetime import datetime, timedelta
|
|
5
|
+
from functools import wraps
|
|
6
|
+
from typing import Any, Callable
|
|
7
|
+
|
|
8
|
+
from ..core.config import get_settings
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class SimpleCache:
|
|
12
|
+
"""Simple in-memory cache with TTL. Safe for single-threaded async but not thread-safe."""
|
|
13
|
+
|
|
14
|
+
def __init__(self, default_ttl: int | None = None):
|
|
15
|
+
self._cache: dict[str, tuple[Any, datetime]] = {}
|
|
16
|
+
self._default_ttl = default_ttl or get_settings().cache_ttl_seconds
|
|
17
|
+
|
|
18
|
+
def get(self, key: str) -> Any | None:
|
|
19
|
+
"""Get value from cache if not expired."""
|
|
20
|
+
if key in self._cache:
|
|
21
|
+
value, expiry = self._cache[key]
|
|
22
|
+
if datetime.now() < expiry:
|
|
23
|
+
return value
|
|
24
|
+
del self._cache[key]
|
|
25
|
+
return None
|
|
26
|
+
|
|
27
|
+
def set(self, key: str, value: Any, ttl: int | None = None) -> None:
|
|
28
|
+
"""Set value in cache with TTL."""
|
|
29
|
+
expiry = datetime.now() + timedelta(seconds=ttl or self._default_ttl)
|
|
30
|
+
self._cache[key] = (value, expiry)
|
|
31
|
+
|
|
32
|
+
def clear(self) -> None:
|
|
33
|
+
"""Clear all cached values."""
|
|
34
|
+
self._cache.clear()
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
# Global cache instance
|
|
38
|
+
_cache = SimpleCache()
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def cached(ttl: int | None = None) -> Callable:
|
|
42
|
+
"""Decorator for caching async function results."""
|
|
43
|
+
|
|
44
|
+
def decorator(func: Callable) -> Callable:
|
|
45
|
+
@wraps(func)
|
|
46
|
+
async def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
47
|
+
# Create cache key from function name and arguments
|
|
48
|
+
# Use repr() for proper tuple/list representation to avoid collisions
|
|
49
|
+
key_data = f"{func.__module__}.{func.__name__}:{repr(args)}:{repr(sorted(kwargs.items()))}"
|
|
50
|
+
cache_key = hashlib.md5(key_data.encode()).hexdigest()
|
|
51
|
+
|
|
52
|
+
cached_result = _cache.get(cache_key)
|
|
53
|
+
if cached_result is not None:
|
|
54
|
+
return cached_result
|
|
55
|
+
|
|
56
|
+
result = await func(*args, **kwargs)
|
|
57
|
+
_cache.set(cache_key, result, ttl)
|
|
58
|
+
return result
|
|
59
|
+
|
|
60
|
+
return wrapper
|
|
61
|
+
|
|
62
|
+
return decorator
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def get_cache() -> SimpleCache:
|
|
66
|
+
"""Get the global cache instance."""
|
|
67
|
+
return _cache
|
src/data/etherscan.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"""Etherscan API client for event queries."""
|
|
2
|
+
|
|
3
|
+
import httpx
|
|
4
|
+
from web3 import Web3
|
|
5
|
+
|
|
6
|
+
from ..core.config import get_settings
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class EtherscanProvider:
|
|
10
|
+
"""Query contract events via Etherscan API."""
|
|
11
|
+
|
|
12
|
+
BASE_URL = "https://api.etherscan.io/v2/api"
|
|
13
|
+
|
|
14
|
+
def __init__(self):
|
|
15
|
+
self.settings = get_settings()
|
|
16
|
+
self.api_key = self.settings.etherscan_api_key
|
|
17
|
+
|
|
18
|
+
def is_available(self) -> bool:
|
|
19
|
+
"""Check if Etherscan API key is configured."""
|
|
20
|
+
return bool(self.api_key)
|
|
21
|
+
|
|
22
|
+
async def get_distribution_log_events(
|
|
23
|
+
self,
|
|
24
|
+
contract_address: str,
|
|
25
|
+
from_block: int,
|
|
26
|
+
to_block: str | int = "latest",
|
|
27
|
+
) -> list[dict]:
|
|
28
|
+
"""Query DistributionLogUpdated events from Etherscan."""
|
|
29
|
+
if not self.api_key:
|
|
30
|
+
return []
|
|
31
|
+
|
|
32
|
+
# Event topic: keccak256("DistributionLogUpdated(string)")
|
|
33
|
+
topic0 = "0x" + Web3.keccak(text="DistributionLogUpdated(string)").hex()
|
|
34
|
+
|
|
35
|
+
async with httpx.AsyncClient(timeout=30.0) as client:
|
|
36
|
+
response = await client.get(
|
|
37
|
+
self.BASE_URL,
|
|
38
|
+
params={
|
|
39
|
+
"chainid": 1,
|
|
40
|
+
"module": "logs",
|
|
41
|
+
"action": "getLogs",
|
|
42
|
+
"address": contract_address,
|
|
43
|
+
"topic0": topic0,
|
|
44
|
+
"fromBlock": from_block,
|
|
45
|
+
"toBlock": to_block,
|
|
46
|
+
"apikey": self.api_key,
|
|
47
|
+
},
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
data = response.json()
|
|
51
|
+
if data.get("status") != "1":
|
|
52
|
+
return []
|
|
53
|
+
|
|
54
|
+
results = []
|
|
55
|
+
for log in data.get("result", []):
|
|
56
|
+
# Decode the logCid from the data field
|
|
57
|
+
# The data is ABI-encoded string: offset (32 bytes) + length (32 bytes) + data
|
|
58
|
+
raw_data = log["data"]
|
|
59
|
+
# Skip the offset (0x40 = 64 chars after 0x) and length prefix
|
|
60
|
+
# String data starts at byte 64 (128 hex chars after 0x)
|
|
61
|
+
if len(raw_data) > 130: # 0x + 128 chars minimum
|
|
62
|
+
# Extract length from bytes 32-64
|
|
63
|
+
length_hex = raw_data[66:130]
|
|
64
|
+
length = int(length_hex, 16)
|
|
65
|
+
# Extract string data starting at byte 64
|
|
66
|
+
string_data = raw_data[130 : 130 + length * 2]
|
|
67
|
+
try:
|
|
68
|
+
log_cid = bytes.fromhex(string_data).decode("utf-8")
|
|
69
|
+
results.append(
|
|
70
|
+
{
|
|
71
|
+
"block": int(log["blockNumber"], 16),
|
|
72
|
+
"logCid": log_cid,
|
|
73
|
+
}
|
|
74
|
+
)
|
|
75
|
+
except (ValueError, UnicodeDecodeError):
|
|
76
|
+
continue
|
|
77
|
+
|
|
78
|
+
return sorted(results, key=lambda x: x["block"])
|