csm-dashboard 0.3.6__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
src/data/strikes.py CHANGED
@@ -2,6 +2,7 @@
2
2
 
3
3
  import asyncio
4
4
  import json
5
+ import logging
5
6
  import time
6
7
  from dataclasses import dataclass
7
8
  from pathlib import Path
@@ -12,6 +13,8 @@ from web3 import Web3
12
13
  from ..core.config import get_settings
13
14
  from .cache import cached
14
15
 
16
+ logger = logging.getLogger(__name__)
17
+
15
18
 
16
19
  # Strike thresholds by operator type (curve_id)
17
20
  # Default (Permissionless): 3 strikes till key exit
@@ -26,6 +29,8 @@ DEFAULT_STRIKE_THRESHOLD = 3
26
29
 
27
30
  def get_strike_threshold(curve_id: int) -> int:
28
31
  """Get the strike threshold for ejection based on operator curve_id."""
32
+ if curve_id not in STRIKE_THRESHOLDS:
33
+ logger.warning(f"Unknown curve_id {curve_id}, defaulting to strike threshold {DEFAULT_STRIKE_THRESHOLD}")
29
34
  return STRIKE_THRESHOLDS.get(curve_id, DEFAULT_STRIKE_THRESHOLD)
30
35
 
31
36
 
@@ -43,13 +48,6 @@ class ValidatorStrikes:
43
48
  class StrikesProvider:
44
49
  """Fetches strikes data from CSStrikes contract via IPFS."""
45
50
 
46
- # IPFS gateways to try in order (same as ipfs_logs.py)
47
- GATEWAYS = [
48
- "https://dweb.link/ipfs/",
49
- "https://ipfs.io/ipfs/",
50
- "https://cloudflare-ipfs.com/ipfs/",
51
- ]
52
-
53
51
  # Rate limiting: minimum seconds between gateway requests
54
52
  MIN_REQUEST_INTERVAL = 1.0
55
53
 
@@ -66,6 +64,8 @@ class StrikesProvider:
66
64
 
67
65
  def __init__(self, rpc_url: str | None = None, cache_dir: Path | None = None):
68
66
  self.settings = get_settings()
67
+ # Use configurable gateways from settings (comma-separated)
68
+ self.gateways = [g.strip() for g in self.settings.ipfs_gateways.split(",") if g.strip()]
69
69
  self.w3 = Web3(Web3.HTTPProvider(rpc_url or self.settings.eth_rpc_url))
70
70
  self.cache_dir = cache_dir or Path.home() / ".cache" / "csm-dashboard" / "strikes"
71
71
  self.cache_dir.mkdir(parents=True, exist_ok=True)
@@ -114,7 +114,9 @@ class StrikesProvider:
114
114
  @cached(ttl=300) # Cache CID for 5 minutes
115
115
  async def get_tree_cid(self) -> str:
116
116
  """Get the current strikes tree CID from the contract."""
117
- return self.csstrikes.functions.treeCid().call()
117
+ return await asyncio.to_thread(
118
+ self.csstrikes.functions.treeCid().call
119
+ )
118
120
 
119
121
  async def _fetch_tree_from_ipfs(self, cid: str) -> dict | None:
120
122
  """Fetch tree data from IPFS gateways."""
@@ -128,18 +130,24 @@ class StrikesProvider:
128
130
 
129
131
  # Try each gateway
130
132
  async with httpx.AsyncClient(timeout=30.0, follow_redirects=True) as client:
131
- for gateway in self.GATEWAYS:
133
+ for gateway in self.gateways:
132
134
  try:
133
135
  url = f"{gateway}{cid}"
134
136
  response = await client.get(url)
135
137
  if response.status_code == 200:
136
- data = response.json()
138
+ try:
139
+ data = response.json()
140
+ except json.JSONDecodeError as e:
141
+ logger.warning(f"Failed to parse strikes tree JSON from {gateway}: {e}")
142
+ continue
137
143
  # Cache the successful result
138
144
  self._save_to_cache(cid, data)
139
145
  return data
140
- except Exception:
146
+ except Exception as e:
147
+ logger.debug(f"IPFS gateway {gateway} failed for strikes CID {cid}: {e}")
141
148
  continue
142
149
 
150
+ logger.warning(f"All IPFS gateways failed for strikes CID {cid}")
143
151
  return None
144
152
 
145
153
  @cached(ttl=300) # Cache parsed tree for 5 minutes
@@ -192,11 +200,25 @@ class StrikesProvider:
192
200
  pubkey = value[1]
193
201
  strikes_array = value[2]
194
202
 
203
+ # Validate types - operator_id must be an int
204
+ if not isinstance(entry_operator_id, int):
205
+ try:
206
+ entry_operator_id = int(entry_operator_id)
207
+ except (ValueError, TypeError):
208
+ continue
209
+
195
210
  if entry_operator_id != operator_id:
196
211
  continue
197
212
 
198
- # Count total strikes (sum of the 6-frame array)
199
- strike_count = sum(strikes_array) if isinstance(strikes_array, list) else 0
213
+ # Ensure pubkey is a string
214
+ if not isinstance(pubkey, str):
215
+ pubkey = str(pubkey) if pubkey else ""
216
+
217
+ # Count total strikes (sum of the 6-frame array), filtering non-numeric values
218
+ if isinstance(strikes_array, list):
219
+ strike_count = sum(s for s in strikes_array if isinstance(s, (int, float)))
220
+ else:
221
+ strike_count = 0
200
222
 
201
223
  operator_strikes.append(
202
224
  ValidatorStrikes(
src/main.py CHANGED
@@ -1,9 +1,17 @@
1
1
  """Main entry point for the CSM Dashboard application."""
2
2
 
3
+ import logging
4
+
3
5
  import typer
4
6
 
5
7
  from .cli.commands import app as cli_app
6
8
 
9
+ # Configure root logger
10
+ logging.basicConfig(
11
+ level=logging.INFO,
12
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
13
+ )
14
+
7
15
  # Create main app that includes all CLI commands
8
16
  app = typer.Typer(
9
17
  name="csm-dashboard",
@@ -25,6 +33,9 @@ def serve(
25
33
 
26
34
  from .web.app import create_app
27
35
 
36
+ logger = logging.getLogger(__name__)
37
+ logger.info(f"Starting CSM Dashboard server on {host}:{port}")
38
+
28
39
  web_app = create_app()
29
40
  uvicorn.run(
30
41
  web_app if not reload else "src.web.app:create_app",
@@ -32,6 +43,7 @@ def serve(
32
43
  port=port,
33
44
  reload=reload,
34
45
  factory=reload,
46
+ log_level="info",
35
47
  )
36
48
 
37
49
 
@@ -1,7 +1,11 @@
1
1
  """Main service for computing operator rewards."""
2
2
 
3
+ from datetime import datetime, timezone
4
+ import logging
3
5
  from decimal import Decimal
4
6
 
7
+ logger = logging.getLogger(__name__)
8
+
5
9
  from ..core.types import (
6
10
  APYMetrics,
7
11
  BondSummary,
@@ -263,38 +267,38 @@ class OperatorService:
263
267
 
264
268
  # Estimate next distribution date (~28 days after current frame ends)
265
269
  # Frame duration ≈ 28 days = ~6300 epochs
270
+ # If IPFS logs are behind, keep advancing until we get a future date
271
+ now = datetime.now(timezone.utc)
266
272
  next_epoch = current_frame.end_epoch + 6300
267
- next_distribution_date = epoch_to_dt(next_epoch).isoformat()
273
+ next_dt = epoch_to_dt(next_epoch)
274
+ while next_dt < now:
275
+ next_epoch += 6300 # Add another ~28 days
276
+ next_dt = epoch_to_dt(next_epoch)
277
+ next_distribution_date = next_dt.isoformat()
268
278
 
269
279
  # Estimate next distribution ETH based on current daily rate
270
280
  if current_days > 0:
271
281
  daily_rate = current_eth / Decimal(current_days)
272
282
  next_distribution_est_eth = float(daily_rate * Decimal(28))
273
283
 
274
- except Exception:
284
+ except Exception as e:
275
285
  # If historical APY calculation fails, continue without it
276
- pass
286
+ logger.warning(f"Historical APY calculation failed for operator {operator_id}: {e}")
277
287
 
278
288
  # 2. Bond APY (stETH protocol rebase rate)
279
289
  steth_data = await self.lido_api.get_steth_apr()
280
290
  bond_apy = steth_data.get("apr")
281
291
 
282
- # 3. Net APY calculations
292
+ # 3. Net APY calculations (initialized here, calculated after historical APR section)
283
293
  net_apy_28d = None
284
294
  net_apy_ltd = None
285
295
 
286
- # Current frame net APY (historical_reward_apy_28d is basically current frame APY)
287
- if historical_reward_apy_28d is not None and bond_apy is not None:
288
- net_apy_28d = round(historical_reward_apy_28d + bond_apy, 2)
289
- elif bond_apy is not None:
290
- net_apy_28d = round(bond_apy, 2)
291
-
292
296
  # Lifetime net APY - intentionally NOT calculated
293
297
  # (same reason as historical_reward_apy_ltd - can't accurately calculate without historical bond)
294
298
  # net_apy_ltd remains None
295
299
 
296
- # Previous frame net APY calculation is moved after we know previous_bond_apy
297
- # (calculated in section 4 below)
300
+ # Current frame net APY and Previous frame net APY are calculated in section 4b/4c
301
+ # after we have historical APR values (current_bond_apr, previous_bond_apy)
298
302
 
299
303
  # 4. Calculate bond stETH earnings (from stETH rebasing)
300
304
  # Formula: bond_eth * (apr / 100) * (duration_days / 365)
@@ -336,12 +340,13 @@ class OperatorService:
336
340
  prev_bond = self.onchain.calculate_required_bond(
337
341
  prev_frame.validator_count, curve_id
338
342
  )
343
+ # Keep calculation in Decimal for precision
339
344
  previous_bond_eth = round(
340
- float(prev_bond) * (prev_apr / 100) * (prev_days / 365), 6
345
+ float(prev_bond * Decimal(prev_apr / 100) * Decimal(prev_days / 365)), 6
341
346
  )
342
347
  else:
343
348
  previous_bond_eth = round(
344
- float(bond_eth) * (prev_apr / 100) * (prev_days / 365), 6
349
+ float(bond_eth * Decimal(prev_apr / 100) * Decimal(prev_days / 365)), 6
345
350
  )
346
351
 
347
352
  # Current frame bond earnings
@@ -359,21 +364,29 @@ class OperatorService:
359
364
  curr_apr = bond_apy
360
365
  if curr_apr is not None:
361
366
  current_bond_apr = round(curr_apr, 2)
367
+ # Keep calculation in Decimal for precision
362
368
  current_bond_eth = round(
363
- float(bond_eth) * (curr_apr / 100) * (curr_days / 365), 6
369
+ float(bond_eth * Decimal(curr_apr / 100) * Decimal(curr_days / 365)), 6
364
370
  )
365
371
 
366
372
  # Lifetime bond earnings (sum of all frame durations with per-frame APR)
367
373
  # When include_history=True, calculate accurate lifetime APY with per-frame bond
374
+ # Also build frame_list here to avoid duplicate loop
368
375
  if frames:
369
376
  lifetime_bond_sum = 0.0
370
377
  # For accurate lifetime APY calculation (duration-weighted)
371
378
  frame_reward_apys = []
372
379
  frame_bond_apys = []
373
380
  frame_durations = []
381
+ frame_list = [] # Build frame_list here instead of separate loop
374
382
 
375
- for f in frames:
383
+ for i, f in enumerate(frames):
376
384
  f_days = self.ipfs_logs.calculate_frame_duration_days(f)
385
+ f_eth = await self.onchain.shares_to_eth(f.distributed_rewards)
386
+ f_apy = None
387
+ f_bond_apy = None
388
+ f_net_apy = None
389
+
377
390
  if f_days > 0:
378
391
  # Use average historical APR for each frame period
379
392
  f_start_ts = BEACON_GENESIS + (f.start_epoch * 384)
@@ -390,17 +403,40 @@ class OperatorService:
390
403
  f_bond = self.onchain.calculate_required_bond(
391
404
  f.validator_count, curve_id
392
405
  )
393
- lifetime_bond_sum += float(f_bond) * (f_apr / 100) * (f_days / 365)
406
+ # Keep calculations in Decimal for precision
407
+ lifetime_bond_sum += float(f_bond * Decimal(f_apr / 100) * Decimal(f_days / 365))
394
408
 
395
- # Calculate per-frame reward APY for weighted average
396
- f_eth = await self.onchain.shares_to_eth(f.distributed_rewards)
409
+ # Calculate per-frame reward APY using accurate per-frame bond
397
410
  if f_bond > 0:
398
- f_reward_apy = float(f_eth / f_bond) * (365.0 / f_days) * 100
399
- frame_reward_apys.append(f_reward_apy)
411
+ f_apy = round(float((f_eth / f_bond) * Decimal(365.0 / f_days) * Decimal(100)), 2)
412
+ f_bond_apy = round(f_apr, 2)
413
+ f_net_apy = round(f_apy + f_bond_apy, 2)
414
+
415
+ frame_reward_apys.append(f_apy)
400
416
  frame_bond_apys.append(f_apr)
401
417
  frame_durations.append(f_days)
402
418
  else:
403
- lifetime_bond_sum += float(bond_eth) * (f_apr / 100) * (f_days / 365)
419
+ lifetime_bond_sum += float(bond_eth * Decimal(f_apr / 100) * Decimal(f_days / 365))
420
+ # Fallback: use current bond for APY calc
421
+ if bond_eth >= MIN_BOND_ETH:
422
+ f_apy = round(float(f_eth / bond_eth) * (365.0 / f_days) * 100, 2)
423
+
424
+ # Build frame_list entry if history requested
425
+ if include_history:
426
+ frame_list.append(
427
+ DistributionFrame(
428
+ frame_number=i + 1,
429
+ start_date=epoch_to_dt(f.start_epoch).isoformat(),
430
+ end_date=epoch_to_dt(f.end_epoch).isoformat(),
431
+ rewards_eth=float(f_eth),
432
+ rewards_shares=f.distributed_rewards,
433
+ duration_days=round(f_days, 1),
434
+ validator_count=f.validator_count,
435
+ apy=f_apy,
436
+ bond_apy=f_bond_apy,
437
+ net_apy=f_net_apy,
438
+ )
439
+ )
404
440
 
405
441
  if lifetime_bond_sum > 0:
406
442
  lifetime_bond_eth = round(lifetime_bond_sum, 6)
@@ -431,6 +467,14 @@ class OperatorService:
431
467
  if prev_bond_apy_to_use is not None:
432
468
  previous_net_apy = round(previous_distribution_apy + prev_bond_apy_to_use, 2)
433
469
 
470
+ # 4c. Current frame net APY (using historical APR when available, like previous frame)
471
+ if historical_reward_apy_28d is not None:
472
+ curr_bond_apy_to_use = current_bond_apr if current_bond_apr is not None else bond_apy
473
+ if curr_bond_apy_to_use is not None:
474
+ net_apy_28d = round(historical_reward_apy_28d + curr_bond_apy_to_use, 2)
475
+ elif bond_apy is not None:
476
+ net_apy_28d = round(bond_apy, 2)
477
+
434
478
  # 5. Calculate net totals (Rewards + Bond)
435
479
  if previous_distribution_eth is not None or previous_bond_eth is not None:
436
480
  previous_net_total_eth = round(
@@ -445,30 +489,6 @@ class OperatorService:
445
489
  (lifetime_distribution_eth or 0) + (lifetime_bond_eth or 0), 6
446
490
  )
447
491
 
448
- # 6. Build frame history if requested
449
- if include_history and frames:
450
- frame_list = []
451
- for i, f in enumerate(frames):
452
- # Convert shares to ETH (not just dividing by 10^18)
453
- f_eth = await self.onchain.shares_to_eth(f.distributed_rewards)
454
- f_days = self.ipfs_logs.calculate_frame_duration_days(f)
455
- f_apy = None
456
- if f_days > 0 and bond_eth >= MIN_BOND_ETH:
457
- f_apy = round(float(f_eth / bond_eth) * (365.0 / f_days) * 100, 2)
458
-
459
- frame_list.append(
460
- DistributionFrame(
461
- frame_number=i + 1,
462
- start_date=epoch_to_dt(f.start_epoch).isoformat(),
463
- end_date=epoch_to_dt(f.end_epoch).isoformat(),
464
- rewards_eth=float(f_eth),
465
- rewards_shares=f.distributed_rewards,
466
- duration_days=round(f_days, 1),
467
- validator_count=f.validator_count,
468
- apy=f_apy,
469
- )
470
- )
471
-
472
492
  return APYMetrics(
473
493
  previous_distribution_eth=previous_distribution_eth,
474
494
  previous_distribution_apy=previous_distribution_apy,
@@ -530,9 +550,9 @@ class OperatorService:
530
550
  max_strikes=summary.get("max_strikes", 0),
531
551
  strike_threshold=summary.get("strike_threshold", 3),
532
552
  )
533
- except Exception:
553
+ except Exception as e:
534
554
  # If strikes fetch fails, continue with empty summary
535
- pass
555
+ logger.warning(f"Failed to fetch strikes for operator {operator_id}: {e}")
536
556
 
537
557
  return HealthStatus(
538
558
  bond_healthy=bond_healthy,
@@ -560,7 +580,8 @@ class OperatorService:
560
580
  """
561
581
  try:
562
582
  log_history = await self.onchain.get_distribution_log_history()
563
- except Exception:
583
+ except Exception as e:
584
+ logger.warning(f"Failed to fetch distribution log history: {e}")
564
585
  return []
565
586
 
566
587
  if not log_history:
@@ -581,8 +602,9 @@ class OperatorService:
581
602
  "start": start_date.strftime("%b %d"),
582
603
  "end": end_date.strftime("%b %d"),
583
604
  })
584
- except Exception:
605
+ except Exception as e:
585
606
  # Skip frames we can't fetch
607
+ logger.debug(f"Failed to fetch frame data for CID {entry.get('logCid', 'unknown')}: {e}")
586
608
  continue
587
609
 
588
610
  # Pad to ensure we always have `count` entries (for UI consistency)
@@ -613,7 +635,8 @@ class OperatorService:
613
635
 
614
636
  validators = await self.beacon.get_validators_by_pubkeys(pubkeys)
615
637
  return get_earliest_activation(validators)
616
- except Exception:
638
+ except Exception as e:
639
+ logger.debug(f"Failed to get active_since for operator {operator_id}: {e}")
617
640
  return None
618
641
 
619
642
  async def get_withdrawal_history(self, operator_id: int) -> list[WithdrawalEvent]:
@@ -641,5 +664,6 @@ class OperatorService:
641
664
  )
642
665
  for e in events
643
666
  ]
644
- except Exception:
667
+ except Exception as e:
668
+ logger.warning(f"Failed to fetch withdrawal history for operator {operator_id}: {e}")
645
669
  return []