csm-dashboard 0.3.6__tar.gz → 0.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/PKG-INFO +2 -1
  2. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/docker-compose.yml +2 -0
  3. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/pyproject.toml +2 -1
  4. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/cli/commands.py +8 -0
  5. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/core/config.py +7 -0
  6. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/core/types.py +3 -6
  7. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/data/beacon.py +23 -9
  8. csm_dashboard-0.4.0/src/data/cache.py +112 -0
  9. csm_dashboard-0.4.0/src/data/database.py +189 -0
  10. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/data/etherscan.py +33 -7
  11. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/data/ipfs_logs.py +29 -20
  12. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/data/lido_api.py +38 -12
  13. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/data/onchain.py +111 -58
  14. csm_dashboard-0.4.0/src/data/price.py +46 -0
  15. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/data/rewards_tree.py +18 -3
  16. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/data/strikes.py +35 -13
  17. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/main.py +12 -0
  18. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/services/operator_service.py +76 -52
  19. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/web/app.py +794 -72
  20. csm_dashboard-0.4.0/src/web/routes.py +618 -0
  21. csm_dashboard-0.3.6/src/data/cache.py +0 -67
  22. csm_dashboard-0.3.6/src/web/routes.py +0 -246
  23. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/.dockerignore +0 -0
  24. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/.env.example +0 -0
  25. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/.github/workflows/docker-publish.yaml +0 -0
  26. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/.github/workflows/release.yaml +0 -0
  27. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/.gitignore +0 -0
  28. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/CHANGELOG.md +0 -0
  29. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/Dockerfile +0 -0
  30. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/README.md +0 -0
  31. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/img/csm-dash-cli.png +0 -0
  32. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/img/csm-dash-web.png +0 -0
  33. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/img/favicon.ico +0 -0
  34. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/img/logo.png +0 -0
  35. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/my-lido-csm-dashboard.xml +0 -0
  36. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/requirements.txt +0 -0
  37. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/__init__.py +0 -0
  38. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/abis/CSAccounting.json +0 -0
  39. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/abis/CSFeeDistributor.json +0 -0
  40. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/abis/CSModule.json +0 -0
  41. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/abis/WithdrawalQueueERC721.json +0 -0
  42. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/abis/__init__.py +0 -0
  43. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/abis/stETH.json +0 -0
  44. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/cli/__init__.py +0 -0
  45. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/core/__init__.py +0 -0
  46. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/core/contracts.py +0 -0
  47. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/data/__init__.py +0 -0
  48. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/data/known_cids.py +0 -0
  49. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/services/__init__.py +0 -0
  50. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/src/web/__init__.py +0 -0
  51. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/tests/__init__.py +0 -0
  52. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/tests/conftest.py +0 -0
  53. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/tests/unit/test_cache.py +0 -0
  54. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/tests/unit/test_config.py +0 -0
  55. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/tests/unit/test_strikes.py +0 -0
  56. {csm_dashboard-0.3.6 → csm_dashboard-0.4.0}/tests/unit/test_types.py +0 -0
@@ -1,8 +1,9 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: csm-dashboard
3
- Version: 0.3.6
3
+ Version: 0.4.0
4
4
  Summary: Lido CSM Operator Dashboard for tracking validator earnings
5
5
  Requires-Python: >=3.11
6
+ Requires-Dist: aiosqlite>=0.19
6
7
  Requires-Dist: fastapi>=0.104
7
8
  Requires-Dist: httpx>=0.25
8
9
  Requires-Dist: pydantic-settings>=2.0
@@ -30,6 +30,8 @@ services:
30
30
 
31
31
  # Restart policy
32
32
  restart: unless-stopped
33
+ volumes:
34
+ - ./dashboard-data/:/root/.cache/csm-dashboard/
33
35
 
34
36
  # Logging configuration
35
37
  logging:
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "csm-dashboard"
3
- version = "0.3.6"
3
+ version = "0.4.0"
4
4
  description = "Lido CSM Operator Dashboard for tracking validator earnings"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.11"
@@ -14,6 +14,7 @@ dependencies = [
14
14
  "pydantic>=2.5",
15
15
  "pydantic-settings>=2.0",
16
16
  "python-dotenv>=1.0",
17
+ "aiosqlite>=0.19",
17
18
  ]
18
19
 
19
20
  [project.optional-dependencies]
@@ -110,6 +110,8 @@ def format_as_api_json(rewards: OperatorRewards, include_validators: bool = Fals
110
110
  "duration_days": f.duration_days,
111
111
  "validator_count": f.validator_count,
112
112
  "apy": f.apy,
113
+ "bond_apy": f.bond_apy,
114
+ "net_apy": f.net_apy,
113
115
  }
114
116
  for f in rewards.apy.frames
115
117
  ]
@@ -553,6 +555,9 @@ def rewards(
553
555
  history_table.add_column("Rewards (ETH)", style="green", justify="right")
554
556
  history_table.add_column("Vals", style="dim", justify="right")
555
557
  history_table.add_column("ETH/Val", style="green", justify="right")
558
+ history_table.add_column("Reward APY", style="green", justify="right")
559
+ history_table.add_column("Bond APY", style="green", justify="right")
560
+ history_table.add_column("Net APY", style="bold yellow", justify="right")
556
561
 
557
562
  # Display oldest first (chronological order)
558
563
  for frame in rewards.apy.frames:
@@ -571,6 +576,9 @@ def rewards(
571
576
  f"{frame.rewards_eth:.4f}",
572
577
  str(frame.validator_count),
573
578
  f"{eth_per_val:.6f}",
579
+ fmt_apy(frame.apy),
580
+ fmt_apy(frame.bond_apy),
581
+ fmt_apy(frame.net_apy),
574
582
  )
575
583
 
576
584
  console.print(history_table)
@@ -1,6 +1,7 @@
1
1
  """Configuration management using pydantic-settings."""
2
2
 
3
3
  from functools import lru_cache
4
+ from pathlib import Path
4
5
 
5
6
  from pydantic_settings import BaseSettings, SettingsConfigDict
6
7
 
@@ -31,6 +32,12 @@ class Settings(BaseSettings):
31
32
  # Cache Settings
32
33
  cache_ttl_seconds: int = 300 # 5 minutes
33
34
 
35
+ # Database Settings
36
+ database_path: Path = Path.home() / ".cache" / "csm-dashboard" / "operators.db"
37
+ # IPFS Gateway Configuration (comma-separated list)
38
+ # Default: dweb.link (IPFS Foundation), ipfs.io, w3s.link (web3.storage)
39
+ ipfs_gateways: str = "https://dweb.link/ipfs/,https://ipfs.io/ipfs/,https://w3s.link/ipfs/"
40
+
34
41
  # Contract Addresses (Mainnet)
35
42
  csmodule_address: str = "0xdA7dE2ECdDfccC6c3AF10108Db212ACBBf9EA83F"
36
43
  csaccounting_address: str = "0x4d72BFF1BeaC69925F8Bd12526a39BAAb069e5Da"
@@ -58,7 +58,9 @@ class DistributionFrame(BaseModel):
58
58
  rewards_shares: int
59
59
  duration_days: float
60
60
  validator_count: int = 0 # Number of validators in this frame
61
- apy: float | None = None # Annualized for this frame (kept for backwards compat)
61
+ apy: float | None = None # Reward APY for this frame (kept for backwards compat)
62
+ bond_apy: float | None = None # Bond APY for this frame
63
+ net_apy: float | None = None # Net APY (reward + bond) for this frame
62
64
 
63
65
 
64
66
  class WithdrawalEvent(BaseModel):
@@ -144,11 +146,6 @@ class APYMetrics(BaseModel):
144
146
  current_net_total_eth: float | None = None
145
147
  lifetime_net_total_eth: float | None = None
146
148
 
147
- # Legacy fields (deprecated, kept for backwards compatibility)
148
- reward_apy_7d: float | None = None
149
- reward_apy_28d: float | None = None
150
- net_apy_7d: float | None = None
151
-
152
149
 
153
150
  class StrikeSummary(BaseModel):
154
151
  """Summary of strikes for an operator."""
@@ -1,12 +1,15 @@
1
1
  """Beacon chain data fetching via beaconcha.in API."""
2
2
 
3
3
  import asyncio
4
+ import logging
4
5
  from datetime import datetime, timedelta, timezone
5
6
  from decimal import Decimal
6
7
  from enum import Enum
7
8
 
8
9
  import httpx
9
10
 
11
+ logger = logging.getLogger(__name__)
12
+
10
13
  from ..core.config import get_settings
11
14
  from .cache import cached
12
15
 
@@ -256,14 +259,23 @@ class BeaconDataProvider:
256
259
 
257
260
  def _parse_validator(self, data: dict) -> ValidatorInfo:
258
261
  """Parse beaconcha.in validator response."""
262
+ # Validate epoch values - far-future values (like 2^64-1) indicate "not set"
263
+ activation_epoch = data.get("activationepoch")
264
+ if activation_epoch is not None and (activation_epoch < 0 or activation_epoch > 2**32):
265
+ activation_epoch = None
266
+
267
+ exit_epoch = data.get("exitepoch")
268
+ if exit_epoch is not None and (exit_epoch < 0 or exit_epoch > 2**32):
269
+ exit_epoch = None
270
+
259
271
  return ValidatorInfo(
260
272
  pubkey=data.get("pubkey", ""),
261
273
  index=data.get("validatorindex"),
262
274
  status=ValidatorStatus.from_beaconcha(data.get("status", "unknown")),
263
275
  balance_gwei=data.get("balance", 0),
264
276
  effectiveness=data.get("effectiveness"),
265
- activation_epoch=data.get("activationepoch"),
266
- exit_epoch=data.get("exitepoch") if data.get("exitepoch") is not None and data.get("exitepoch") >= 0 else None,
277
+ activation_epoch=activation_epoch,
278
+ exit_epoch=exit_epoch,
267
279
  )
268
280
 
269
281
  @cached(ttl=300)
@@ -280,8 +292,8 @@ class BeaconDataProvider:
280
292
 
281
293
  if response.status_code == 200:
282
294
  return response.json().get("data")
283
- except Exception:
284
- pass
295
+ except Exception as e:
296
+ logger.debug(f"Failed to get validator performance for index {validator_index}: {e}")
285
297
 
286
298
  return None
287
299
 
@@ -332,13 +344,15 @@ class BeaconDataProvider:
332
344
  # attestation_head_reward (not a "total" field)
333
345
  income = entry.get("income", {})
334
346
  if isinstance(income, dict):
335
- # Sum all reward types (values are in gwei)
336
- total_income_gwei += sum(income.values())
337
- elif isinstance(income, int):
347
+ # Sum all reward types (values are in gwei), filtering out non-numeric
348
+ total_income_gwei += sum(
349
+ v for v in income.values() if isinstance(v, (int, float))
350
+ )
351
+ elif isinstance(income, (int, float)):
338
352
  total_income_gwei += income
339
- except Exception:
353
+ except Exception as e:
340
354
  # On error, continue with partial data
341
- pass
355
+ logger.warning(f"Failed to fetch income for validator batch: {e}")
342
356
 
343
357
  return {
344
358
  "total_income_eth": Decimal(total_income_gwei) / Decimal(10**9),
@@ -0,0 +1,112 @@
1
+ """Simple in-memory cache with TTL support and LRU eviction."""
2
+
3
+ import hashlib
4
+ import logging
5
+ from collections import OrderedDict
6
+ from datetime import datetime, timedelta
7
+ from functools import wraps
8
+ from typing import Any, Callable
9
+
10
+ from ..core.config import get_settings
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+ # Default maximum cache entries to prevent unbounded memory growth
15
+ DEFAULT_MAX_SIZE = 1000
16
+
17
+
18
+ class SimpleCache:
19
+ """
20
+ Simple in-memory cache with TTL and LRU eviction.
21
+
22
+ Safe for single-threaded async but not thread-safe.
23
+ Uses OrderedDict for LRU eviction when max_size is reached.
24
+ """
25
+
26
+ def __init__(self, default_ttl: int | None = None, max_size: int = DEFAULT_MAX_SIZE):
27
+ self._cache: OrderedDict[str, tuple[Any, datetime]] = OrderedDict()
28
+ self._default_ttl = default_ttl or get_settings().cache_ttl_seconds
29
+ self._max_size = max_size
30
+
31
+ def get(self, key: str) -> Any | None:
32
+ """Get value from cache if not expired. Moves accessed key to end (LRU)."""
33
+ if key in self._cache:
34
+ value, expiry = self._cache[key]
35
+ if datetime.now() < expiry:
36
+ # Move to end to mark as recently used
37
+ self._cache.move_to_end(key)
38
+ return value
39
+ # Expired - remove it
40
+ del self._cache[key]
41
+ return None
42
+
43
+ def set(self, key: str, value: Any, ttl: int | None = None) -> None:
44
+ """Set value in cache with TTL. Evicts LRU entries if at max size."""
45
+ # If key exists, remove it first (will be re-added at end)
46
+ if key in self._cache:
47
+ del self._cache[key]
48
+
49
+ # Evict oldest entries if at max size
50
+ while len(self._cache) >= self._max_size:
51
+ oldest_key, _ = self._cache.popitem(last=False)
52
+ logger.debug(f"Cache eviction: removed {oldest_key[:16]}...")
53
+
54
+ expiry = datetime.now() + timedelta(seconds=ttl or self._default_ttl)
55
+ self._cache[key] = (value, expiry)
56
+
57
+ def clear(self) -> None:
58
+ """Clear all cached values."""
59
+ self._cache.clear()
60
+
61
+ def cleanup_expired(self) -> int:
62
+ """Remove all expired entries. Returns count of removed entries."""
63
+ now = datetime.now()
64
+ expired_keys = [
65
+ key for key, (_, expiry) in self._cache.items() if now >= expiry
66
+ ]
67
+ for key in expired_keys:
68
+ del self._cache[key]
69
+ return len(expired_keys)
70
+
71
+ @property
72
+ def size(self) -> int:
73
+ """Current number of entries in cache."""
74
+ return len(self._cache)
75
+
76
+
77
+ # Global cache instance
78
+ _cache = SimpleCache()
79
+
80
+
81
+ def cached(ttl: int | None = None) -> Callable:
82
+ """Decorator for caching async function results."""
83
+
84
+ def decorator(func: Callable) -> Callable:
85
+ @wraps(func)
86
+ async def wrapper(*args: Any, **kwargs: Any) -> Any:
87
+ # Create cache key from function name and arguments
88
+ # Skip 'self' in args to allow cache sharing across instances (for methods)
89
+ # Detect 'self' by checking if first arg is an instance with the decorated method
90
+ cache_args = args
91
+ if args and hasattr(args[0], func.__name__):
92
+ # First arg is likely 'self' - skip it for cache key
93
+ cache_args = args[1:]
94
+ key_data = f"{func.__module__}.{func.__name__}:{repr(cache_args)}:{repr(sorted(kwargs.items()))}"
95
+ cache_key = hashlib.md5(key_data.encode()).hexdigest()
96
+
97
+ cached_result = _cache.get(cache_key)
98
+ if cached_result is not None:
99
+ return cached_result
100
+
101
+ result = await func(*args, **kwargs)
102
+ _cache.set(cache_key, result, ttl)
103
+ return result
104
+
105
+ return wrapper
106
+
107
+ return decorator
108
+
109
+
110
+ def get_cache() -> SimpleCache:
111
+ """Get the global cache instance."""
112
+ return _cache
@@ -0,0 +1,189 @@
1
+ """SQLite database for persisting saved operators."""
2
+
3
+ import json
4
+ import logging
5
+ from datetime import datetime
6
+ from pathlib import Path
7
+
8
+ import aiosqlite
9
+
10
+ from ..core.config import get_settings
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+ _db_initialized = False
15
+
16
+ # Database connection timeout in seconds (prevents hanging on locks)
17
+ DB_TIMEOUT = 5.0
18
+
19
+
20
+ async def get_db_path() -> Path:
21
+ """Get the database file path, creating parent directories if needed."""
22
+ settings = get_settings()
23
+ db_path = settings.database_path
24
+ db_path.parent.mkdir(parents=True, exist_ok=True)
25
+ logger.debug(f"Database path: {db_path}")
26
+ return db_path
27
+
28
+
29
+ async def init_db() -> None:
30
+ """Initialize the database schema."""
31
+ global _db_initialized
32
+ if _db_initialized:
33
+ logger.debug("Database already initialized")
34
+ return
35
+
36
+ logger.info("Initializing database schema")
37
+ db_path = await get_db_path()
38
+ async with aiosqlite.connect(db_path, timeout=DB_TIMEOUT) as db:
39
+ # Enable WAL mode for better concurrent access
40
+ await db.execute("PRAGMA journal_mode=WAL")
41
+ await db.execute("""
42
+ CREATE TABLE IF NOT EXISTS saved_operators (
43
+ operator_id INTEGER PRIMARY KEY,
44
+ manager_address TEXT NOT NULL,
45
+ reward_address TEXT NOT NULL,
46
+ data_json TEXT NOT NULL,
47
+ saved_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
48
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
49
+ )
50
+ """)
51
+ await db.commit()
52
+ _db_initialized = True
53
+ logger.info("Database initialized successfully")
54
+
55
+
56
+ async def save_operator(operator_id: int, data: dict) -> None:
57
+ """Save or update an operator in the database.
58
+
59
+ Args:
60
+ operator_id: The operator ID
61
+ data: The full operator data dictionary (from API response)
62
+ """
63
+ await init_db()
64
+ db_path = await get_db_path()
65
+
66
+ manager_address = data.get("manager_address", "")
67
+ reward_address = data.get("reward_address", "")
68
+ data_json = json.dumps(data)
69
+ now = datetime.utcnow().isoformat()
70
+
71
+ async with aiosqlite.connect(db_path, timeout=DB_TIMEOUT) as db:
72
+ await db.execute("""
73
+ INSERT INTO saved_operators (operator_id, manager_address, reward_address, data_json, saved_at, updated_at)
74
+ VALUES (?, ?, ?, ?, ?, ?)
75
+ ON CONFLICT(operator_id) DO UPDATE SET
76
+ manager_address = excluded.manager_address,
77
+ reward_address = excluded.reward_address,
78
+ data_json = excluded.data_json,
79
+ updated_at = excluded.updated_at
80
+ """, (operator_id, manager_address, reward_address, data_json, now, now))
81
+ await db.commit()
82
+
83
+
84
+ async def get_saved_operators() -> list[dict]:
85
+ """Get all saved operators with their cached data.
86
+
87
+ Returns:
88
+ List of operator data dictionaries with added metadata (saved_at, updated_at)
89
+ """
90
+ try:
91
+ logger.debug("Getting saved operators from database")
92
+ await init_db()
93
+ db_path = await get_db_path()
94
+
95
+ async with aiosqlite.connect(db_path, timeout=DB_TIMEOUT) as db:
96
+ db.row_factory = aiosqlite.Row
97
+ async with db.execute("""
98
+ SELECT operator_id, data_json, saved_at, updated_at
99
+ FROM saved_operators
100
+ ORDER BY saved_at DESC
101
+ """) as cursor:
102
+ rows = await cursor.fetchall()
103
+
104
+ result = []
105
+ for row in rows:
106
+ try:
107
+ data = json.loads(row["data_json"])
108
+ data["_saved_at"] = row["saved_at"]
109
+ data["_updated_at"] = row["updated_at"]
110
+ result.append(data)
111
+ except json.JSONDecodeError:
112
+ logger.warning(f"Corrupted JSON for operator {row['operator_id']}, skipping")
113
+ continue
114
+
115
+ logger.debug(f"Retrieved {len(result)} saved operators from database")
116
+ return result
117
+ except Exception as e:
118
+ logger.error(f"Database error in get_saved_operators: {e}", exc_info=True)
119
+ return []
120
+
121
+
122
+ async def delete_operator(operator_id: int) -> bool:
123
+ """Remove an operator from the saved list.
124
+
125
+ Args:
126
+ operator_id: The operator ID to remove
127
+
128
+ Returns:
129
+ True if the operator was deleted, False if not found
130
+ """
131
+ await init_db()
132
+ db_path = await get_db_path()
133
+
134
+ async with aiosqlite.connect(db_path, timeout=DB_TIMEOUT) as db:
135
+ cursor = await db.execute(
136
+ "DELETE FROM saved_operators WHERE operator_id = ?",
137
+ (operator_id,)
138
+ )
139
+ await db.commit()
140
+ return cursor.rowcount > 0
141
+
142
+
143
+ async def is_operator_saved(operator_id: int) -> bool:
144
+ """Check if an operator is saved.
145
+
146
+ Args:
147
+ operator_id: The operator ID to check
148
+
149
+ Returns:
150
+ True if the operator is saved, False otherwise
151
+ """
152
+ await init_db()
153
+ db_path = await get_db_path()
154
+
155
+ async with aiosqlite.connect(db_path, timeout=DB_TIMEOUT) as db:
156
+ async with db.execute(
157
+ "SELECT 1 FROM saved_operators WHERE operator_id = ?",
158
+ (operator_id,)
159
+ ) as cursor:
160
+ row = await cursor.fetchone()
161
+ return row is not None
162
+
163
+
164
+ async def update_operator_data(operator_id: int, data: dict) -> bool:
165
+ """Update the cached data for a saved operator.
166
+
167
+ Args:
168
+ operator_id: The operator ID
169
+ data: The new operator data dictionary
170
+
171
+ Returns:
172
+ True if updated, False if operator not found
173
+ """
174
+ await init_db()
175
+ db_path = await get_db_path()
176
+
177
+ manager_address = data.get("manager_address", "")
178
+ reward_address = data.get("reward_address", "")
179
+ data_json = json.dumps(data)
180
+ now = datetime.utcnow().isoformat()
181
+
182
+ async with aiosqlite.connect(db_path, timeout=DB_TIMEOUT) as db:
183
+ cursor = await db.execute("""
184
+ UPDATE saved_operators
185
+ SET manager_address = ?, reward_address = ?, data_json = ?, updated_at = ?
186
+ WHERE operator_id = ?
187
+ """, (manager_address, reward_address, data_json, now, operator_id))
188
+ await db.commit()
189
+ return cursor.rowcount > 0
@@ -1,10 +1,16 @@
1
1
  """Etherscan API client for event queries."""
2
2
 
3
+ import json
4
+ import logging
5
+ from decimal import Decimal
6
+
3
7
  import httpx
4
8
  from web3 import Web3
5
9
 
6
10
  from ..core.config import get_settings
7
11
 
12
+ logger = logging.getLogger(__name__)
13
+
8
14
 
9
15
  class EtherscanProvider:
10
16
  """Query contract events via Etherscan API."""
@@ -47,7 +53,12 @@ class EtherscanProvider:
47
53
  },
48
54
  )
49
55
 
50
- data = response.json()
56
+ try:
57
+ data = response.json()
58
+ except json.JSONDecodeError as e:
59
+ logger.warning(f"Failed to parse Etherscan response: {e}")
60
+ return []
61
+
51
62
  if data.get("status") != "1":
52
63
  return []
53
64
 
@@ -115,7 +126,12 @@ class EtherscanProvider:
115
126
  },
116
127
  )
117
128
 
118
- data = response.json()
129
+ try:
130
+ data = response.json()
131
+ except json.JSONDecodeError as e:
132
+ logger.warning(f"Failed to parse Etherscan transfer events response: {e}")
133
+ return []
134
+
119
135
  if data.get("status") != "1":
120
136
  return []
121
137
 
@@ -193,7 +209,12 @@ class EtherscanProvider:
193
209
  },
194
210
  )
195
211
 
196
- data = response.json()
212
+ try:
213
+ data = response.json()
214
+ except json.JSONDecodeError as e:
215
+ logger.warning(f"Failed to parse Etherscan withdrawal requested events: {e}")
216
+ return []
217
+
197
218
  if data.get("status") != "1":
198
219
  return []
199
220
 
@@ -271,7 +292,12 @@ class EtherscanProvider:
271
292
  },
272
293
  )
273
294
 
274
- data = response.json()
295
+ try:
296
+ data = response.json()
297
+ except json.JSONDecodeError as e:
298
+ logger.warning(f"Failed to parse Etherscan withdrawal claimed events: {e}")
299
+ return []
300
+
275
301
  if data.get("status") != "1":
276
302
  return []
277
303
 
@@ -280,14 +306,14 @@ class EtherscanProvider:
280
306
  try:
281
307
  # requestId is topic1 (indexed)
282
308
  request_id = int(log["topics"][1], 16)
283
- # amountOfETH is in data field
284
- amount_eth = int(log["data"], 16) / 10**18
309
+ # amountOfETH is in data field - use Decimal for precision
310
+ amount_eth = Decimal(int(log["data"], 16)) / Decimal(10**18)
285
311
 
286
312
  results.append(
287
313
  {
288
314
  "request_id": request_id,
289
315
  "tx_hash": log["transactionHash"],
290
- "amount_eth": amount_eth,
316
+ "amount_eth": float(amount_eth), # Convert to float for JSON serialization
291
317
  "block": int(log["blockNumber"], 16),
292
318
  }
293
319
  )