csm-dashboard 0.3.6.1__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {csm_dashboard-0.3.6.1.dist-info → csm_dashboard-0.4.0.dist-info}/METADATA +2 -1
- csm_dashboard-0.4.0.dist-info/RECORD +35 -0
- src/cli/commands.py +8 -0
- src/core/config.py +7 -0
- src/core/types.py +3 -6
- src/data/beacon.py +23 -9
- src/data/cache.py +53 -8
- src/data/database.py +189 -0
- src/data/etherscan.py +33 -7
- src/data/ipfs_logs.py +29 -20
- src/data/lido_api.py +38 -12
- src/data/onchain.py +111 -58
- src/data/price.py +46 -0
- src/data/rewards_tree.py +18 -3
- src/data/strikes.py +35 -13
- src/main.py +12 -0
- src/services/operator_service.py +65 -43
- src/web/app.py +794 -72
- src/web/routes.py +372 -0
- csm_dashboard-0.3.6.1.dist-info/RECORD +0 -33
- {csm_dashboard-0.3.6.1.dist-info → csm_dashboard-0.4.0.dist-info}/WHEEL +0 -0
- {csm_dashboard-0.3.6.1.dist-info → csm_dashboard-0.4.0.dist-info}/entry_points.txt +0 -0
src/data/strikes.py
CHANGED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
4
|
import json
|
|
5
|
+
import logging
|
|
5
6
|
import time
|
|
6
7
|
from dataclasses import dataclass
|
|
7
8
|
from pathlib import Path
|
|
@@ -12,6 +13,8 @@ from web3 import Web3
|
|
|
12
13
|
from ..core.config import get_settings
|
|
13
14
|
from .cache import cached
|
|
14
15
|
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
15
18
|
|
|
16
19
|
# Strike thresholds by operator type (curve_id)
|
|
17
20
|
# Default (Permissionless): 3 strikes till key exit
|
|
@@ -26,6 +29,8 @@ DEFAULT_STRIKE_THRESHOLD = 3
|
|
|
26
29
|
|
|
27
30
|
def get_strike_threshold(curve_id: int) -> int:
|
|
28
31
|
"""Get the strike threshold for ejection based on operator curve_id."""
|
|
32
|
+
if curve_id not in STRIKE_THRESHOLDS:
|
|
33
|
+
logger.warning(f"Unknown curve_id {curve_id}, defaulting to strike threshold {DEFAULT_STRIKE_THRESHOLD}")
|
|
29
34
|
return STRIKE_THRESHOLDS.get(curve_id, DEFAULT_STRIKE_THRESHOLD)
|
|
30
35
|
|
|
31
36
|
|
|
@@ -43,13 +48,6 @@ class ValidatorStrikes:
|
|
|
43
48
|
class StrikesProvider:
|
|
44
49
|
"""Fetches strikes data from CSStrikes contract via IPFS."""
|
|
45
50
|
|
|
46
|
-
# IPFS gateways to try in order (same as ipfs_logs.py)
|
|
47
|
-
GATEWAYS = [
|
|
48
|
-
"https://dweb.link/ipfs/",
|
|
49
|
-
"https://ipfs.io/ipfs/",
|
|
50
|
-
"https://cloudflare-ipfs.com/ipfs/",
|
|
51
|
-
]
|
|
52
|
-
|
|
53
51
|
# Rate limiting: minimum seconds between gateway requests
|
|
54
52
|
MIN_REQUEST_INTERVAL = 1.0
|
|
55
53
|
|
|
@@ -66,6 +64,8 @@ class StrikesProvider:
|
|
|
66
64
|
|
|
67
65
|
def __init__(self, rpc_url: str | None = None, cache_dir: Path | None = None):
|
|
68
66
|
self.settings = get_settings()
|
|
67
|
+
# Use configurable gateways from settings (comma-separated)
|
|
68
|
+
self.gateways = [g.strip() for g in self.settings.ipfs_gateways.split(",") if g.strip()]
|
|
69
69
|
self.w3 = Web3(Web3.HTTPProvider(rpc_url or self.settings.eth_rpc_url))
|
|
70
70
|
self.cache_dir = cache_dir or Path.home() / ".cache" / "csm-dashboard" / "strikes"
|
|
71
71
|
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
@@ -114,7 +114,9 @@ class StrikesProvider:
|
|
|
114
114
|
@cached(ttl=300) # Cache CID for 5 minutes
|
|
115
115
|
async def get_tree_cid(self) -> str:
|
|
116
116
|
"""Get the current strikes tree CID from the contract."""
|
|
117
|
-
return
|
|
117
|
+
return await asyncio.to_thread(
|
|
118
|
+
self.csstrikes.functions.treeCid().call
|
|
119
|
+
)
|
|
118
120
|
|
|
119
121
|
async def _fetch_tree_from_ipfs(self, cid: str) -> dict | None:
|
|
120
122
|
"""Fetch tree data from IPFS gateways."""
|
|
@@ -128,18 +130,24 @@ class StrikesProvider:
|
|
|
128
130
|
|
|
129
131
|
# Try each gateway
|
|
130
132
|
async with httpx.AsyncClient(timeout=30.0, follow_redirects=True) as client:
|
|
131
|
-
for gateway in self.
|
|
133
|
+
for gateway in self.gateways:
|
|
132
134
|
try:
|
|
133
135
|
url = f"{gateway}{cid}"
|
|
134
136
|
response = await client.get(url)
|
|
135
137
|
if response.status_code == 200:
|
|
136
|
-
|
|
138
|
+
try:
|
|
139
|
+
data = response.json()
|
|
140
|
+
except json.JSONDecodeError as e:
|
|
141
|
+
logger.warning(f"Failed to parse strikes tree JSON from {gateway}: {e}")
|
|
142
|
+
continue
|
|
137
143
|
# Cache the successful result
|
|
138
144
|
self._save_to_cache(cid, data)
|
|
139
145
|
return data
|
|
140
|
-
except Exception:
|
|
146
|
+
except Exception as e:
|
|
147
|
+
logger.debug(f"IPFS gateway {gateway} failed for strikes CID {cid}: {e}")
|
|
141
148
|
continue
|
|
142
149
|
|
|
150
|
+
logger.warning(f"All IPFS gateways failed for strikes CID {cid}")
|
|
143
151
|
return None
|
|
144
152
|
|
|
145
153
|
@cached(ttl=300) # Cache parsed tree for 5 minutes
|
|
@@ -192,11 +200,25 @@ class StrikesProvider:
|
|
|
192
200
|
pubkey = value[1]
|
|
193
201
|
strikes_array = value[2]
|
|
194
202
|
|
|
203
|
+
# Validate types - operator_id must be an int
|
|
204
|
+
if not isinstance(entry_operator_id, int):
|
|
205
|
+
try:
|
|
206
|
+
entry_operator_id = int(entry_operator_id)
|
|
207
|
+
except (ValueError, TypeError):
|
|
208
|
+
continue
|
|
209
|
+
|
|
195
210
|
if entry_operator_id != operator_id:
|
|
196
211
|
continue
|
|
197
212
|
|
|
198
|
-
#
|
|
199
|
-
|
|
213
|
+
# Ensure pubkey is a string
|
|
214
|
+
if not isinstance(pubkey, str):
|
|
215
|
+
pubkey = str(pubkey) if pubkey else ""
|
|
216
|
+
|
|
217
|
+
# Count total strikes (sum of the 6-frame array), filtering non-numeric values
|
|
218
|
+
if isinstance(strikes_array, list):
|
|
219
|
+
strike_count = sum(s for s in strikes_array if isinstance(s, (int, float)))
|
|
220
|
+
else:
|
|
221
|
+
strike_count = 0
|
|
200
222
|
|
|
201
223
|
operator_strikes.append(
|
|
202
224
|
ValidatorStrikes(
|
src/main.py
CHANGED
|
@@ -1,9 +1,17 @@
|
|
|
1
1
|
"""Main entry point for the CSM Dashboard application."""
|
|
2
2
|
|
|
3
|
+
import logging
|
|
4
|
+
|
|
3
5
|
import typer
|
|
4
6
|
|
|
5
7
|
from .cli.commands import app as cli_app
|
|
6
8
|
|
|
9
|
+
# Configure root logger
|
|
10
|
+
logging.basicConfig(
|
|
11
|
+
level=logging.INFO,
|
|
12
|
+
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
|
13
|
+
)
|
|
14
|
+
|
|
7
15
|
# Create main app that includes all CLI commands
|
|
8
16
|
app = typer.Typer(
|
|
9
17
|
name="csm-dashboard",
|
|
@@ -25,6 +33,9 @@ def serve(
|
|
|
25
33
|
|
|
26
34
|
from .web.app import create_app
|
|
27
35
|
|
|
36
|
+
logger = logging.getLogger(__name__)
|
|
37
|
+
logger.info(f"Starting CSM Dashboard server on {host}:{port}")
|
|
38
|
+
|
|
28
39
|
web_app = create_app()
|
|
29
40
|
uvicorn.run(
|
|
30
41
|
web_app if not reload else "src.web.app:create_app",
|
|
@@ -32,6 +43,7 @@ def serve(
|
|
|
32
43
|
port=port,
|
|
33
44
|
reload=reload,
|
|
34
45
|
factory=reload,
|
|
46
|
+
log_level="info",
|
|
35
47
|
)
|
|
36
48
|
|
|
37
49
|
|
src/services/operator_service.py
CHANGED
|
@@ -1,7 +1,11 @@
|
|
|
1
1
|
"""Main service for computing operator rewards."""
|
|
2
2
|
|
|
3
|
+
from datetime import datetime, timezone
|
|
4
|
+
import logging
|
|
3
5
|
from decimal import Decimal
|
|
4
6
|
|
|
7
|
+
logger = logging.getLogger(__name__)
|
|
8
|
+
|
|
5
9
|
from ..core.types import (
|
|
6
10
|
APYMetrics,
|
|
7
11
|
BondSummary,
|
|
@@ -263,17 +267,23 @@ class OperatorService:
|
|
|
263
267
|
|
|
264
268
|
# Estimate next distribution date (~28 days after current frame ends)
|
|
265
269
|
# Frame duration ≈ 28 days = ~6300 epochs
|
|
270
|
+
# If IPFS logs are behind, keep advancing until we get a future date
|
|
271
|
+
now = datetime.now(timezone.utc)
|
|
266
272
|
next_epoch = current_frame.end_epoch + 6300
|
|
267
|
-
|
|
273
|
+
next_dt = epoch_to_dt(next_epoch)
|
|
274
|
+
while next_dt < now:
|
|
275
|
+
next_epoch += 6300 # Add another ~28 days
|
|
276
|
+
next_dt = epoch_to_dt(next_epoch)
|
|
277
|
+
next_distribution_date = next_dt.isoformat()
|
|
268
278
|
|
|
269
279
|
# Estimate next distribution ETH based on current daily rate
|
|
270
280
|
if current_days > 0:
|
|
271
281
|
daily_rate = current_eth / Decimal(current_days)
|
|
272
282
|
next_distribution_est_eth = float(daily_rate * Decimal(28))
|
|
273
283
|
|
|
274
|
-
except Exception:
|
|
284
|
+
except Exception as e:
|
|
275
285
|
# If historical APY calculation fails, continue without it
|
|
276
|
-
|
|
286
|
+
logger.warning(f"Historical APY calculation failed for operator {operator_id}: {e}")
|
|
277
287
|
|
|
278
288
|
# 2. Bond APY (stETH protocol rebase rate)
|
|
279
289
|
steth_data = await self.lido_api.get_steth_apr()
|
|
@@ -330,12 +340,13 @@ class OperatorService:
|
|
|
330
340
|
prev_bond = self.onchain.calculate_required_bond(
|
|
331
341
|
prev_frame.validator_count, curve_id
|
|
332
342
|
)
|
|
343
|
+
# Keep calculation in Decimal for precision
|
|
333
344
|
previous_bond_eth = round(
|
|
334
|
-
float(prev_bond
|
|
345
|
+
float(prev_bond * Decimal(prev_apr / 100) * Decimal(prev_days / 365)), 6
|
|
335
346
|
)
|
|
336
347
|
else:
|
|
337
348
|
previous_bond_eth = round(
|
|
338
|
-
float(bond_eth
|
|
349
|
+
float(bond_eth * Decimal(prev_apr / 100) * Decimal(prev_days / 365)), 6
|
|
339
350
|
)
|
|
340
351
|
|
|
341
352
|
# Current frame bond earnings
|
|
@@ -353,21 +364,29 @@ class OperatorService:
|
|
|
353
364
|
curr_apr = bond_apy
|
|
354
365
|
if curr_apr is not None:
|
|
355
366
|
current_bond_apr = round(curr_apr, 2)
|
|
367
|
+
# Keep calculation in Decimal for precision
|
|
356
368
|
current_bond_eth = round(
|
|
357
|
-
float(bond_eth
|
|
369
|
+
float(bond_eth * Decimal(curr_apr / 100) * Decimal(curr_days / 365)), 6
|
|
358
370
|
)
|
|
359
371
|
|
|
360
372
|
# Lifetime bond earnings (sum of all frame durations with per-frame APR)
|
|
361
373
|
# When include_history=True, calculate accurate lifetime APY with per-frame bond
|
|
374
|
+
# Also build frame_list here to avoid duplicate loop
|
|
362
375
|
if frames:
|
|
363
376
|
lifetime_bond_sum = 0.0
|
|
364
377
|
# For accurate lifetime APY calculation (duration-weighted)
|
|
365
378
|
frame_reward_apys = []
|
|
366
379
|
frame_bond_apys = []
|
|
367
380
|
frame_durations = []
|
|
381
|
+
frame_list = [] # Build frame_list here instead of separate loop
|
|
368
382
|
|
|
369
|
-
for f in frames:
|
|
383
|
+
for i, f in enumerate(frames):
|
|
370
384
|
f_days = self.ipfs_logs.calculate_frame_duration_days(f)
|
|
385
|
+
f_eth = await self.onchain.shares_to_eth(f.distributed_rewards)
|
|
386
|
+
f_apy = None
|
|
387
|
+
f_bond_apy = None
|
|
388
|
+
f_net_apy = None
|
|
389
|
+
|
|
371
390
|
if f_days > 0:
|
|
372
391
|
# Use average historical APR for each frame period
|
|
373
392
|
f_start_ts = BEACON_GENESIS + (f.start_epoch * 384)
|
|
@@ -384,17 +403,40 @@ class OperatorService:
|
|
|
384
403
|
f_bond = self.onchain.calculate_required_bond(
|
|
385
404
|
f.validator_count, curve_id
|
|
386
405
|
)
|
|
387
|
-
|
|
406
|
+
# Keep calculations in Decimal for precision
|
|
407
|
+
lifetime_bond_sum += float(f_bond * Decimal(f_apr / 100) * Decimal(f_days / 365))
|
|
388
408
|
|
|
389
|
-
# Calculate per-frame reward APY
|
|
390
|
-
f_eth = await self.onchain.shares_to_eth(f.distributed_rewards)
|
|
409
|
+
# Calculate per-frame reward APY using accurate per-frame bond
|
|
391
410
|
if f_bond > 0:
|
|
392
|
-
|
|
393
|
-
|
|
411
|
+
f_apy = round(float((f_eth / f_bond) * Decimal(365.0 / f_days) * Decimal(100)), 2)
|
|
412
|
+
f_bond_apy = round(f_apr, 2)
|
|
413
|
+
f_net_apy = round(f_apy + f_bond_apy, 2)
|
|
414
|
+
|
|
415
|
+
frame_reward_apys.append(f_apy)
|
|
394
416
|
frame_bond_apys.append(f_apr)
|
|
395
417
|
frame_durations.append(f_days)
|
|
396
418
|
else:
|
|
397
|
-
lifetime_bond_sum += float(bond_eth
|
|
419
|
+
lifetime_bond_sum += float(bond_eth * Decimal(f_apr / 100) * Decimal(f_days / 365))
|
|
420
|
+
# Fallback: use current bond for APY calc
|
|
421
|
+
if bond_eth >= MIN_BOND_ETH:
|
|
422
|
+
f_apy = round(float(f_eth / bond_eth) * (365.0 / f_days) * 100, 2)
|
|
423
|
+
|
|
424
|
+
# Build frame_list entry if history requested
|
|
425
|
+
if include_history:
|
|
426
|
+
frame_list.append(
|
|
427
|
+
DistributionFrame(
|
|
428
|
+
frame_number=i + 1,
|
|
429
|
+
start_date=epoch_to_dt(f.start_epoch).isoformat(),
|
|
430
|
+
end_date=epoch_to_dt(f.end_epoch).isoformat(),
|
|
431
|
+
rewards_eth=float(f_eth),
|
|
432
|
+
rewards_shares=f.distributed_rewards,
|
|
433
|
+
duration_days=round(f_days, 1),
|
|
434
|
+
validator_count=f.validator_count,
|
|
435
|
+
apy=f_apy,
|
|
436
|
+
bond_apy=f_bond_apy,
|
|
437
|
+
net_apy=f_net_apy,
|
|
438
|
+
)
|
|
439
|
+
)
|
|
398
440
|
|
|
399
441
|
if lifetime_bond_sum > 0:
|
|
400
442
|
lifetime_bond_eth = round(lifetime_bond_sum, 6)
|
|
@@ -447,30 +489,6 @@ class OperatorService:
|
|
|
447
489
|
(lifetime_distribution_eth or 0) + (lifetime_bond_eth or 0), 6
|
|
448
490
|
)
|
|
449
491
|
|
|
450
|
-
# 6. Build frame history if requested
|
|
451
|
-
if include_history and frames:
|
|
452
|
-
frame_list = []
|
|
453
|
-
for i, f in enumerate(frames):
|
|
454
|
-
# Convert shares to ETH (not just dividing by 10^18)
|
|
455
|
-
f_eth = await self.onchain.shares_to_eth(f.distributed_rewards)
|
|
456
|
-
f_days = self.ipfs_logs.calculate_frame_duration_days(f)
|
|
457
|
-
f_apy = None
|
|
458
|
-
if f_days > 0 and bond_eth >= MIN_BOND_ETH:
|
|
459
|
-
f_apy = round(float(f_eth / bond_eth) * (365.0 / f_days) * 100, 2)
|
|
460
|
-
|
|
461
|
-
frame_list.append(
|
|
462
|
-
DistributionFrame(
|
|
463
|
-
frame_number=i + 1,
|
|
464
|
-
start_date=epoch_to_dt(f.start_epoch).isoformat(),
|
|
465
|
-
end_date=epoch_to_dt(f.end_epoch).isoformat(),
|
|
466
|
-
rewards_eth=float(f_eth),
|
|
467
|
-
rewards_shares=f.distributed_rewards,
|
|
468
|
-
duration_days=round(f_days, 1),
|
|
469
|
-
validator_count=f.validator_count,
|
|
470
|
-
apy=f_apy,
|
|
471
|
-
)
|
|
472
|
-
)
|
|
473
|
-
|
|
474
492
|
return APYMetrics(
|
|
475
493
|
previous_distribution_eth=previous_distribution_eth,
|
|
476
494
|
previous_distribution_apy=previous_distribution_apy,
|
|
@@ -532,9 +550,9 @@ class OperatorService:
|
|
|
532
550
|
max_strikes=summary.get("max_strikes", 0),
|
|
533
551
|
strike_threshold=summary.get("strike_threshold", 3),
|
|
534
552
|
)
|
|
535
|
-
except Exception:
|
|
553
|
+
except Exception as e:
|
|
536
554
|
# If strikes fetch fails, continue with empty summary
|
|
537
|
-
|
|
555
|
+
logger.warning(f"Failed to fetch strikes for operator {operator_id}: {e}")
|
|
538
556
|
|
|
539
557
|
return HealthStatus(
|
|
540
558
|
bond_healthy=bond_healthy,
|
|
@@ -562,7 +580,8 @@ class OperatorService:
|
|
|
562
580
|
"""
|
|
563
581
|
try:
|
|
564
582
|
log_history = await self.onchain.get_distribution_log_history()
|
|
565
|
-
except Exception:
|
|
583
|
+
except Exception as e:
|
|
584
|
+
logger.warning(f"Failed to fetch distribution log history: {e}")
|
|
566
585
|
return []
|
|
567
586
|
|
|
568
587
|
if not log_history:
|
|
@@ -583,8 +602,9 @@ class OperatorService:
|
|
|
583
602
|
"start": start_date.strftime("%b %d"),
|
|
584
603
|
"end": end_date.strftime("%b %d"),
|
|
585
604
|
})
|
|
586
|
-
except Exception:
|
|
605
|
+
except Exception as e:
|
|
587
606
|
# Skip frames we can't fetch
|
|
607
|
+
logger.debug(f"Failed to fetch frame data for CID {entry.get('logCid', 'unknown')}: {e}")
|
|
588
608
|
continue
|
|
589
609
|
|
|
590
610
|
# Pad to ensure we always have `count` entries (for UI consistency)
|
|
@@ -615,7 +635,8 @@ class OperatorService:
|
|
|
615
635
|
|
|
616
636
|
validators = await self.beacon.get_validators_by_pubkeys(pubkeys)
|
|
617
637
|
return get_earliest_activation(validators)
|
|
618
|
-
except Exception:
|
|
638
|
+
except Exception as e:
|
|
639
|
+
logger.debug(f"Failed to get active_since for operator {operator_id}: {e}")
|
|
619
640
|
return None
|
|
620
641
|
|
|
621
642
|
async def get_withdrawal_history(self, operator_id: int) -> list[WithdrawalEvent]:
|
|
@@ -643,5 +664,6 @@ class OperatorService:
|
|
|
643
664
|
)
|
|
644
665
|
for e in events
|
|
645
666
|
]
|
|
646
|
-
except Exception:
|
|
667
|
+
except Exception as e:
|
|
668
|
+
logger.warning(f"Failed to fetch withdrawal history for operator {operator_id}: {e}")
|
|
647
669
|
return []
|