csm-dashboard 0.3.6.1__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {csm_dashboard-0.3.6.1.dist-info → csm_dashboard-0.4.0.dist-info}/METADATA +2 -1
- csm_dashboard-0.4.0.dist-info/RECORD +35 -0
- src/cli/commands.py +8 -0
- src/core/config.py +7 -0
- src/core/types.py +3 -6
- src/data/beacon.py +23 -9
- src/data/cache.py +53 -8
- src/data/database.py +189 -0
- src/data/etherscan.py +33 -7
- src/data/ipfs_logs.py +29 -20
- src/data/lido_api.py +38 -12
- src/data/onchain.py +111 -58
- src/data/price.py +46 -0
- src/data/rewards_tree.py +18 -3
- src/data/strikes.py +35 -13
- src/main.py +12 -0
- src/services/operator_service.py +65 -43
- src/web/app.py +794 -72
- src/web/routes.py +372 -0
- csm_dashboard-0.3.6.1.dist-info/RECORD +0 -33
- {csm_dashboard-0.3.6.1.dist-info → csm_dashboard-0.4.0.dist-info}/WHEEL +0 -0
- {csm_dashboard-0.3.6.1.dist-info → csm_dashboard-0.4.0.dist-info}/entry_points.txt +0 -0
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: csm-dashboard
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.4.0
|
|
4
4
|
Summary: Lido CSM Operator Dashboard for tracking validator earnings
|
|
5
5
|
Requires-Python: >=3.11
|
|
6
|
+
Requires-Dist: aiosqlite>=0.19
|
|
6
7
|
Requires-Dist: fastapi>=0.104
|
|
7
8
|
Requires-Dist: httpx>=0.25
|
|
8
9
|
Requires-Dist: pydantic-settings>=2.0
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
src/__init__.py,sha256=Bfqpjo9Q1XaV8DNqkf1sADzWg2ACpfZWSGLtahZE3iU,35
|
|
2
|
+
src/main.py,sha256=uzUJpgsicRwJ6e6xnGnVWSUkKIcaicpsP2ds8gtHX8U,1240
|
|
3
|
+
src/abis/CSAccounting.json,sha256=-eBMqw3XqgMDzlVrG8mOrd7IYLf8nfsBIpjYqaKPYno,1281
|
|
4
|
+
src/abis/CSFeeDistributor.json,sha256=unLBacJcCHq4xsmB4xOPlVXcOrxGWNf6KDmC3Ld5u-c,1517
|
|
5
|
+
src/abis/CSModule.json,sha256=T6D6aInBoqVH3ZD6U6p3lrPa3t_ucA9V83IwE80kOuU,1687
|
|
6
|
+
src/abis/WithdrawalQueueERC721.json,sha256=b25a5RQW5HGPH_KQcolecVToHTgYwqeikz0ZEZ9MGxU,1424
|
|
7
|
+
src/abis/__init__.py,sha256=9HV2hKMGSoNAi8evsjzymTr4V4kowITNsX1-LPu6l98,20
|
|
8
|
+
src/abis/stETH.json,sha256=ldxbIRrtt8ePVFewJ9Tnz4qUGFmuOXa41GN1t3tnWEg,1106
|
|
9
|
+
src/cli/__init__.py,sha256=mgHAwomqzAhOHJnlWrWtCguhGzhDWlkCvkzKsfoFsds,35
|
|
10
|
+
src/cli/commands.py,sha256=7Uw_jTygQ-pRTlNaaFnlM8ablZz9Vy3N-X9dvNSXhMg,37971
|
|
11
|
+
src/core/__init__.py,sha256=ZDHZojANK1ZFpn5lQROERTo97MYQFAxqA9APvs7ruEQ,57
|
|
12
|
+
src/core/config.py,sha256=Ue2PPYe_0Obh_Gh8jiRnk1hIqqS2oMtg0x_wSmi_gtM,1976
|
|
13
|
+
src/core/contracts.py,sha256=u1KW0z-9V21Zpf7qxGBvRy2Ffdo4YfGusUdT4K_ggP4,582
|
|
14
|
+
src/core/types.py,sha256=YHDnXMB4OZ8-oUru6quLf5Rk80d_PJF3as2mz-TmF-Y,7760
|
|
15
|
+
src/data/__init__.py,sha256=DItA8aEp8Lbr0uFlJVppMaTtVEEznoA1hkRpH-vfHhk,57
|
|
16
|
+
src/data/beacon.py,sha256=oYw4nkq_UbDo27tlU-5rtPiZ7rgiYsiOdKG279njJzw,16118
|
|
17
|
+
src/data/cache.py,sha256=mMGKtjQ1AAmgWXmR0f36u0FDskMwURacoEvHj214vG0,3858
|
|
18
|
+
src/data/database.py,sha256=M0iXfoiZNCdRPqg1N9mzhY-HfF4HuE48O_qMA4nZguE,6165
|
|
19
|
+
src/data/etherscan.py,sha256=hjNKlvMLCIvFE4vJgwRI5RGVj3pwT-oLb0KpootH3ko,11856
|
|
20
|
+
src/data/ipfs_logs.py,sha256=8NeiR_ixOPNx8xdvWuczrMivwAbjeGhSu3c7tgtFflw,11453
|
|
21
|
+
src/data/known_cids.py,sha256=onwTTNvAv4h0-3LZgLhqMlKzuNH2VhBqQGZ9fm8GyoE,1705
|
|
22
|
+
src/data/lido_api.py,sha256=OEvJ2iX6GeSOEVf-SNKYZeaRHKFQPm1EwCwt37KnDZQ,5781
|
|
23
|
+
src/data/onchain.py,sha256=JSgZ5OCpPpQXpnTr4_Cw8v6z0yQkQISydSfpMGLXCIA,27379
|
|
24
|
+
src/data/price.py,sha256=dbXrTl0bAL3fTmC1ZoKNqFhIe1xdz-yX4iNKGA2tFhY,1426
|
|
25
|
+
src/data/rewards_tree.py,sha256=JxvN5tvhUYIAIjgs2shp4A4OYGn7UXLZ5K9lN7s7oxI,2442
|
|
26
|
+
src/data/strikes.py,sha256=fHd8n7KyQOFN1EhxKgkFXym0pCkEAyc9EdG4Ay-axpc,10219
|
|
27
|
+
src/services/__init__.py,sha256=MC7blFLAMazErCWuyYXvS6sO3uZm1z_RUOtnlIK0kSo,38
|
|
28
|
+
src/services/operator_service.py,sha256=U9y0NBA8KkweH7sA_8uyWNCLNReG_m6o-AR1uHiVDeE,31946
|
|
29
|
+
src/web/__init__.py,sha256=iI2c5xxXmzsNxIetm0P2qE3uVsT-ClsMfzn620r5YTU,40
|
|
30
|
+
src/web/app.py,sha256=yGRNRXxD7NDxjxwWFbXtMlIchPF1x6JeFgJgfYdRtNs,86092
|
|
31
|
+
src/web/routes.py,sha256=XIwkZWH_A7Pot5aiEMBhGWtEOATgz5BoYnDI55jihBw,26040
|
|
32
|
+
csm_dashboard-0.4.0.dist-info/METADATA,sha256=KsQmuAMRPdm4yo7_cLbmDp01_dP1XL4hIZuRSnOdfOY,12583
|
|
33
|
+
csm_dashboard-0.4.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
34
|
+
csm_dashboard-0.4.0.dist-info/entry_points.txt,sha256=P1Ul8ALIPBwDlVlXqTPuzJ64xxRpIJsYW8U73Tyjgtg,37
|
|
35
|
+
csm_dashboard-0.4.0.dist-info/RECORD,,
|
src/cli/commands.py
CHANGED
|
@@ -110,6 +110,8 @@ def format_as_api_json(rewards: OperatorRewards, include_validators: bool = Fals
|
|
|
110
110
|
"duration_days": f.duration_days,
|
|
111
111
|
"validator_count": f.validator_count,
|
|
112
112
|
"apy": f.apy,
|
|
113
|
+
"bond_apy": f.bond_apy,
|
|
114
|
+
"net_apy": f.net_apy,
|
|
113
115
|
}
|
|
114
116
|
for f in rewards.apy.frames
|
|
115
117
|
]
|
|
@@ -553,6 +555,9 @@ def rewards(
|
|
|
553
555
|
history_table.add_column("Rewards (ETH)", style="green", justify="right")
|
|
554
556
|
history_table.add_column("Vals", style="dim", justify="right")
|
|
555
557
|
history_table.add_column("ETH/Val", style="green", justify="right")
|
|
558
|
+
history_table.add_column("Reward APY", style="green", justify="right")
|
|
559
|
+
history_table.add_column("Bond APY", style="green", justify="right")
|
|
560
|
+
history_table.add_column("Net APY", style="bold yellow", justify="right")
|
|
556
561
|
|
|
557
562
|
# Display oldest first (chronological order)
|
|
558
563
|
for frame in rewards.apy.frames:
|
|
@@ -571,6 +576,9 @@ def rewards(
|
|
|
571
576
|
f"{frame.rewards_eth:.4f}",
|
|
572
577
|
str(frame.validator_count),
|
|
573
578
|
f"{eth_per_val:.6f}",
|
|
579
|
+
fmt_apy(frame.apy),
|
|
580
|
+
fmt_apy(frame.bond_apy),
|
|
581
|
+
fmt_apy(frame.net_apy),
|
|
574
582
|
)
|
|
575
583
|
|
|
576
584
|
console.print(history_table)
|
src/core/config.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"""Configuration management using pydantic-settings."""
|
|
2
2
|
|
|
3
3
|
from functools import lru_cache
|
|
4
|
+
from pathlib import Path
|
|
4
5
|
|
|
5
6
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
6
7
|
|
|
@@ -31,6 +32,12 @@ class Settings(BaseSettings):
|
|
|
31
32
|
# Cache Settings
|
|
32
33
|
cache_ttl_seconds: int = 300 # 5 minutes
|
|
33
34
|
|
|
35
|
+
# Database Settings
|
|
36
|
+
database_path: Path = Path.home() / ".cache" / "csm-dashboard" / "operators.db"
|
|
37
|
+
# IPFS Gateway Configuration (comma-separated list)
|
|
38
|
+
# Default: dweb.link (IPFS Foundation), ipfs.io, w3s.link (web3.storage)
|
|
39
|
+
ipfs_gateways: str = "https://dweb.link/ipfs/,https://ipfs.io/ipfs/,https://w3s.link/ipfs/"
|
|
40
|
+
|
|
34
41
|
# Contract Addresses (Mainnet)
|
|
35
42
|
csmodule_address: str = "0xdA7dE2ECdDfccC6c3AF10108Db212ACBBf9EA83F"
|
|
36
43
|
csaccounting_address: str = "0x4d72BFF1BeaC69925F8Bd12526a39BAAb069e5Da"
|
src/core/types.py
CHANGED
|
@@ -58,7 +58,9 @@ class DistributionFrame(BaseModel):
|
|
|
58
58
|
rewards_shares: int
|
|
59
59
|
duration_days: float
|
|
60
60
|
validator_count: int = 0 # Number of validators in this frame
|
|
61
|
-
apy: float | None = None #
|
|
61
|
+
apy: float | None = None # Reward APY for this frame (kept for backwards compat)
|
|
62
|
+
bond_apy: float | None = None # Bond APY for this frame
|
|
63
|
+
net_apy: float | None = None # Net APY (reward + bond) for this frame
|
|
62
64
|
|
|
63
65
|
|
|
64
66
|
class WithdrawalEvent(BaseModel):
|
|
@@ -144,11 +146,6 @@ class APYMetrics(BaseModel):
|
|
|
144
146
|
current_net_total_eth: float | None = None
|
|
145
147
|
lifetime_net_total_eth: float | None = None
|
|
146
148
|
|
|
147
|
-
# Legacy fields (deprecated, kept for backwards compatibility)
|
|
148
|
-
reward_apy_7d: float | None = None
|
|
149
|
-
reward_apy_28d: float | None = None
|
|
150
|
-
net_apy_7d: float | None = None
|
|
151
|
-
|
|
152
149
|
|
|
153
150
|
class StrikeSummary(BaseModel):
|
|
154
151
|
"""Summary of strikes for an operator."""
|
src/data/beacon.py
CHANGED
|
@@ -1,12 +1,15 @@
|
|
|
1
1
|
"""Beacon chain data fetching via beaconcha.in API."""
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
|
+
import logging
|
|
4
5
|
from datetime import datetime, timedelta, timezone
|
|
5
6
|
from decimal import Decimal
|
|
6
7
|
from enum import Enum
|
|
7
8
|
|
|
8
9
|
import httpx
|
|
9
10
|
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
10
13
|
from ..core.config import get_settings
|
|
11
14
|
from .cache import cached
|
|
12
15
|
|
|
@@ -256,14 +259,23 @@ class BeaconDataProvider:
|
|
|
256
259
|
|
|
257
260
|
def _parse_validator(self, data: dict) -> ValidatorInfo:
|
|
258
261
|
"""Parse beaconcha.in validator response."""
|
|
262
|
+
# Validate epoch values - far-future values (like 2^64-1) indicate "not set"
|
|
263
|
+
activation_epoch = data.get("activationepoch")
|
|
264
|
+
if activation_epoch is not None and (activation_epoch < 0 or activation_epoch > 2**32):
|
|
265
|
+
activation_epoch = None
|
|
266
|
+
|
|
267
|
+
exit_epoch = data.get("exitepoch")
|
|
268
|
+
if exit_epoch is not None and (exit_epoch < 0 or exit_epoch > 2**32):
|
|
269
|
+
exit_epoch = None
|
|
270
|
+
|
|
259
271
|
return ValidatorInfo(
|
|
260
272
|
pubkey=data.get("pubkey", ""),
|
|
261
273
|
index=data.get("validatorindex"),
|
|
262
274
|
status=ValidatorStatus.from_beaconcha(data.get("status", "unknown")),
|
|
263
275
|
balance_gwei=data.get("balance", 0),
|
|
264
276
|
effectiveness=data.get("effectiveness"),
|
|
265
|
-
activation_epoch=
|
|
266
|
-
exit_epoch=
|
|
277
|
+
activation_epoch=activation_epoch,
|
|
278
|
+
exit_epoch=exit_epoch,
|
|
267
279
|
)
|
|
268
280
|
|
|
269
281
|
@cached(ttl=300)
|
|
@@ -280,8 +292,8 @@ class BeaconDataProvider:
|
|
|
280
292
|
|
|
281
293
|
if response.status_code == 200:
|
|
282
294
|
return response.json().get("data")
|
|
283
|
-
except Exception:
|
|
284
|
-
|
|
295
|
+
except Exception as e:
|
|
296
|
+
logger.debug(f"Failed to get validator performance for index {validator_index}: {e}")
|
|
285
297
|
|
|
286
298
|
return None
|
|
287
299
|
|
|
@@ -332,13 +344,15 @@ class BeaconDataProvider:
|
|
|
332
344
|
# attestation_head_reward (not a "total" field)
|
|
333
345
|
income = entry.get("income", {})
|
|
334
346
|
if isinstance(income, dict):
|
|
335
|
-
# Sum all reward types (values are in gwei)
|
|
336
|
-
total_income_gwei += sum(
|
|
337
|
-
|
|
347
|
+
# Sum all reward types (values are in gwei), filtering out non-numeric
|
|
348
|
+
total_income_gwei += sum(
|
|
349
|
+
v for v in income.values() if isinstance(v, (int, float))
|
|
350
|
+
)
|
|
351
|
+
elif isinstance(income, (int, float)):
|
|
338
352
|
total_income_gwei += income
|
|
339
|
-
except Exception:
|
|
353
|
+
except Exception as e:
|
|
340
354
|
# On error, continue with partial data
|
|
341
|
-
|
|
355
|
+
logger.warning(f"Failed to fetch income for validator batch: {e}")
|
|
342
356
|
|
|
343
357
|
return {
|
|
344
358
|
"total_income_eth": Decimal(total_income_gwei) / Decimal(10**9),
|
src/data/cache.py
CHANGED
|
@@ -1,31 +1,56 @@
|
|
|
1
|
-
"""Simple in-memory cache with TTL support."""
|
|
1
|
+
"""Simple in-memory cache with TTL support and LRU eviction."""
|
|
2
2
|
|
|
3
3
|
import hashlib
|
|
4
|
+
import logging
|
|
5
|
+
from collections import OrderedDict
|
|
4
6
|
from datetime import datetime, timedelta
|
|
5
7
|
from functools import wraps
|
|
6
8
|
from typing import Any, Callable
|
|
7
9
|
|
|
8
10
|
from ..core.config import get_settings
|
|
9
11
|
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
# Default maximum cache entries to prevent unbounded memory growth
|
|
15
|
+
DEFAULT_MAX_SIZE = 1000
|
|
16
|
+
|
|
10
17
|
|
|
11
18
|
class SimpleCache:
|
|
12
|
-
"""
|
|
19
|
+
"""
|
|
20
|
+
Simple in-memory cache with TTL and LRU eviction.
|
|
21
|
+
|
|
22
|
+
Safe for single-threaded async but not thread-safe.
|
|
23
|
+
Uses OrderedDict for LRU eviction when max_size is reached.
|
|
24
|
+
"""
|
|
13
25
|
|
|
14
|
-
def __init__(self, default_ttl: int | None = None):
|
|
15
|
-
self._cache:
|
|
26
|
+
def __init__(self, default_ttl: int | None = None, max_size: int = DEFAULT_MAX_SIZE):
|
|
27
|
+
self._cache: OrderedDict[str, tuple[Any, datetime]] = OrderedDict()
|
|
16
28
|
self._default_ttl = default_ttl or get_settings().cache_ttl_seconds
|
|
29
|
+
self._max_size = max_size
|
|
17
30
|
|
|
18
31
|
def get(self, key: str) -> Any | None:
|
|
19
|
-
"""Get value from cache if not expired."""
|
|
32
|
+
"""Get value from cache if not expired. Moves accessed key to end (LRU)."""
|
|
20
33
|
if key in self._cache:
|
|
21
34
|
value, expiry = self._cache[key]
|
|
22
35
|
if datetime.now() < expiry:
|
|
36
|
+
# Move to end to mark as recently used
|
|
37
|
+
self._cache.move_to_end(key)
|
|
23
38
|
return value
|
|
39
|
+
# Expired - remove it
|
|
24
40
|
del self._cache[key]
|
|
25
41
|
return None
|
|
26
42
|
|
|
27
43
|
def set(self, key: str, value: Any, ttl: int | None = None) -> None:
|
|
28
|
-
"""Set value in cache with TTL."""
|
|
44
|
+
"""Set value in cache with TTL. Evicts LRU entries if at max size."""
|
|
45
|
+
# If key exists, remove it first (will be re-added at end)
|
|
46
|
+
if key in self._cache:
|
|
47
|
+
del self._cache[key]
|
|
48
|
+
|
|
49
|
+
# Evict oldest entries if at max size
|
|
50
|
+
while len(self._cache) >= self._max_size:
|
|
51
|
+
oldest_key, _ = self._cache.popitem(last=False)
|
|
52
|
+
logger.debug(f"Cache eviction: removed {oldest_key[:16]}...")
|
|
53
|
+
|
|
29
54
|
expiry = datetime.now() + timedelta(seconds=ttl or self._default_ttl)
|
|
30
55
|
self._cache[key] = (value, expiry)
|
|
31
56
|
|
|
@@ -33,6 +58,21 @@ class SimpleCache:
|
|
|
33
58
|
"""Clear all cached values."""
|
|
34
59
|
self._cache.clear()
|
|
35
60
|
|
|
61
|
+
def cleanup_expired(self) -> int:
|
|
62
|
+
"""Remove all expired entries. Returns count of removed entries."""
|
|
63
|
+
now = datetime.now()
|
|
64
|
+
expired_keys = [
|
|
65
|
+
key for key, (_, expiry) in self._cache.items() if now >= expiry
|
|
66
|
+
]
|
|
67
|
+
for key in expired_keys:
|
|
68
|
+
del self._cache[key]
|
|
69
|
+
return len(expired_keys)
|
|
70
|
+
|
|
71
|
+
@property
|
|
72
|
+
def size(self) -> int:
|
|
73
|
+
"""Current number of entries in cache."""
|
|
74
|
+
return len(self._cache)
|
|
75
|
+
|
|
36
76
|
|
|
37
77
|
# Global cache instance
|
|
38
78
|
_cache = SimpleCache()
|
|
@@ -45,8 +85,13 @@ def cached(ttl: int | None = None) -> Callable:
|
|
|
45
85
|
@wraps(func)
|
|
46
86
|
async def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
47
87
|
# Create cache key from function name and arguments
|
|
48
|
-
#
|
|
49
|
-
|
|
88
|
+
# Skip 'self' in args to allow cache sharing across instances (for methods)
|
|
89
|
+
# Detect 'self' by checking if first arg is an instance with the decorated method
|
|
90
|
+
cache_args = args
|
|
91
|
+
if args and hasattr(args[0], func.__name__):
|
|
92
|
+
# First arg is likely 'self' - skip it for cache key
|
|
93
|
+
cache_args = args[1:]
|
|
94
|
+
key_data = f"{func.__module__}.{func.__name__}:{repr(cache_args)}:{repr(sorted(kwargs.items()))}"
|
|
50
95
|
cache_key = hashlib.md5(key_data.encode()).hexdigest()
|
|
51
96
|
|
|
52
97
|
cached_result = _cache.get(cache_key)
|
src/data/database.py
ADDED
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
"""SQLite database for persisting saved operators."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
import aiosqlite
|
|
9
|
+
|
|
10
|
+
from ..core.config import get_settings
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
_db_initialized = False
|
|
15
|
+
|
|
16
|
+
# Database connection timeout in seconds (prevents hanging on locks)
|
|
17
|
+
DB_TIMEOUT = 5.0
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
async def get_db_path() -> Path:
|
|
21
|
+
"""Get the database file path, creating parent directories if needed."""
|
|
22
|
+
settings = get_settings()
|
|
23
|
+
db_path = settings.database_path
|
|
24
|
+
db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
25
|
+
logger.debug(f"Database path: {db_path}")
|
|
26
|
+
return db_path
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
async def init_db() -> None:
|
|
30
|
+
"""Initialize the database schema."""
|
|
31
|
+
global _db_initialized
|
|
32
|
+
if _db_initialized:
|
|
33
|
+
logger.debug("Database already initialized")
|
|
34
|
+
return
|
|
35
|
+
|
|
36
|
+
logger.info("Initializing database schema")
|
|
37
|
+
db_path = await get_db_path()
|
|
38
|
+
async with aiosqlite.connect(db_path, timeout=DB_TIMEOUT) as db:
|
|
39
|
+
# Enable WAL mode for better concurrent access
|
|
40
|
+
await db.execute("PRAGMA journal_mode=WAL")
|
|
41
|
+
await db.execute("""
|
|
42
|
+
CREATE TABLE IF NOT EXISTS saved_operators (
|
|
43
|
+
operator_id INTEGER PRIMARY KEY,
|
|
44
|
+
manager_address TEXT NOT NULL,
|
|
45
|
+
reward_address TEXT NOT NULL,
|
|
46
|
+
data_json TEXT NOT NULL,
|
|
47
|
+
saved_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
48
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
49
|
+
)
|
|
50
|
+
""")
|
|
51
|
+
await db.commit()
|
|
52
|
+
_db_initialized = True
|
|
53
|
+
logger.info("Database initialized successfully")
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
async def save_operator(operator_id: int, data: dict) -> None:
|
|
57
|
+
"""Save or update an operator in the database.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
operator_id: The operator ID
|
|
61
|
+
data: The full operator data dictionary (from API response)
|
|
62
|
+
"""
|
|
63
|
+
await init_db()
|
|
64
|
+
db_path = await get_db_path()
|
|
65
|
+
|
|
66
|
+
manager_address = data.get("manager_address", "")
|
|
67
|
+
reward_address = data.get("reward_address", "")
|
|
68
|
+
data_json = json.dumps(data)
|
|
69
|
+
now = datetime.utcnow().isoformat()
|
|
70
|
+
|
|
71
|
+
async with aiosqlite.connect(db_path, timeout=DB_TIMEOUT) as db:
|
|
72
|
+
await db.execute("""
|
|
73
|
+
INSERT INTO saved_operators (operator_id, manager_address, reward_address, data_json, saved_at, updated_at)
|
|
74
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
75
|
+
ON CONFLICT(operator_id) DO UPDATE SET
|
|
76
|
+
manager_address = excluded.manager_address,
|
|
77
|
+
reward_address = excluded.reward_address,
|
|
78
|
+
data_json = excluded.data_json,
|
|
79
|
+
updated_at = excluded.updated_at
|
|
80
|
+
""", (operator_id, manager_address, reward_address, data_json, now, now))
|
|
81
|
+
await db.commit()
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
async def get_saved_operators() -> list[dict]:
|
|
85
|
+
"""Get all saved operators with their cached data.
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
List of operator data dictionaries with added metadata (saved_at, updated_at)
|
|
89
|
+
"""
|
|
90
|
+
try:
|
|
91
|
+
logger.debug("Getting saved operators from database")
|
|
92
|
+
await init_db()
|
|
93
|
+
db_path = await get_db_path()
|
|
94
|
+
|
|
95
|
+
async with aiosqlite.connect(db_path, timeout=DB_TIMEOUT) as db:
|
|
96
|
+
db.row_factory = aiosqlite.Row
|
|
97
|
+
async with db.execute("""
|
|
98
|
+
SELECT operator_id, data_json, saved_at, updated_at
|
|
99
|
+
FROM saved_operators
|
|
100
|
+
ORDER BY saved_at DESC
|
|
101
|
+
""") as cursor:
|
|
102
|
+
rows = await cursor.fetchall()
|
|
103
|
+
|
|
104
|
+
result = []
|
|
105
|
+
for row in rows:
|
|
106
|
+
try:
|
|
107
|
+
data = json.loads(row["data_json"])
|
|
108
|
+
data["_saved_at"] = row["saved_at"]
|
|
109
|
+
data["_updated_at"] = row["updated_at"]
|
|
110
|
+
result.append(data)
|
|
111
|
+
except json.JSONDecodeError:
|
|
112
|
+
logger.warning(f"Corrupted JSON for operator {row['operator_id']}, skipping")
|
|
113
|
+
continue
|
|
114
|
+
|
|
115
|
+
logger.debug(f"Retrieved {len(result)} saved operators from database")
|
|
116
|
+
return result
|
|
117
|
+
except Exception as e:
|
|
118
|
+
logger.error(f"Database error in get_saved_operators: {e}", exc_info=True)
|
|
119
|
+
return []
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
async def delete_operator(operator_id: int) -> bool:
|
|
123
|
+
"""Remove an operator from the saved list.
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
operator_id: The operator ID to remove
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
True if the operator was deleted, False if not found
|
|
130
|
+
"""
|
|
131
|
+
await init_db()
|
|
132
|
+
db_path = await get_db_path()
|
|
133
|
+
|
|
134
|
+
async with aiosqlite.connect(db_path, timeout=DB_TIMEOUT) as db:
|
|
135
|
+
cursor = await db.execute(
|
|
136
|
+
"DELETE FROM saved_operators WHERE operator_id = ?",
|
|
137
|
+
(operator_id,)
|
|
138
|
+
)
|
|
139
|
+
await db.commit()
|
|
140
|
+
return cursor.rowcount > 0
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
async def is_operator_saved(operator_id: int) -> bool:
|
|
144
|
+
"""Check if an operator is saved.
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
operator_id: The operator ID to check
|
|
148
|
+
|
|
149
|
+
Returns:
|
|
150
|
+
True if the operator is saved, False otherwise
|
|
151
|
+
"""
|
|
152
|
+
await init_db()
|
|
153
|
+
db_path = await get_db_path()
|
|
154
|
+
|
|
155
|
+
async with aiosqlite.connect(db_path, timeout=DB_TIMEOUT) as db:
|
|
156
|
+
async with db.execute(
|
|
157
|
+
"SELECT 1 FROM saved_operators WHERE operator_id = ?",
|
|
158
|
+
(operator_id,)
|
|
159
|
+
) as cursor:
|
|
160
|
+
row = await cursor.fetchone()
|
|
161
|
+
return row is not None
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
async def update_operator_data(operator_id: int, data: dict) -> bool:
|
|
165
|
+
"""Update the cached data for a saved operator.
|
|
166
|
+
|
|
167
|
+
Args:
|
|
168
|
+
operator_id: The operator ID
|
|
169
|
+
data: The new operator data dictionary
|
|
170
|
+
|
|
171
|
+
Returns:
|
|
172
|
+
True if updated, False if operator not found
|
|
173
|
+
"""
|
|
174
|
+
await init_db()
|
|
175
|
+
db_path = await get_db_path()
|
|
176
|
+
|
|
177
|
+
manager_address = data.get("manager_address", "")
|
|
178
|
+
reward_address = data.get("reward_address", "")
|
|
179
|
+
data_json = json.dumps(data)
|
|
180
|
+
now = datetime.utcnow().isoformat()
|
|
181
|
+
|
|
182
|
+
async with aiosqlite.connect(db_path, timeout=DB_TIMEOUT) as db:
|
|
183
|
+
cursor = await db.execute("""
|
|
184
|
+
UPDATE saved_operators
|
|
185
|
+
SET manager_address = ?, reward_address = ?, data_json = ?, updated_at = ?
|
|
186
|
+
WHERE operator_id = ?
|
|
187
|
+
""", (manager_address, reward_address, data_json, now, operator_id))
|
|
188
|
+
await db.commit()
|
|
189
|
+
return cursor.rowcount > 0
|
src/data/etherscan.py
CHANGED
|
@@ -1,10 +1,16 @@
|
|
|
1
1
|
"""Etherscan API client for event queries."""
|
|
2
2
|
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
from decimal import Decimal
|
|
6
|
+
|
|
3
7
|
import httpx
|
|
4
8
|
from web3 import Web3
|
|
5
9
|
|
|
6
10
|
from ..core.config import get_settings
|
|
7
11
|
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
8
14
|
|
|
9
15
|
class EtherscanProvider:
|
|
10
16
|
"""Query contract events via Etherscan API."""
|
|
@@ -47,7 +53,12 @@ class EtherscanProvider:
|
|
|
47
53
|
},
|
|
48
54
|
)
|
|
49
55
|
|
|
50
|
-
|
|
56
|
+
try:
|
|
57
|
+
data = response.json()
|
|
58
|
+
except json.JSONDecodeError as e:
|
|
59
|
+
logger.warning(f"Failed to parse Etherscan response: {e}")
|
|
60
|
+
return []
|
|
61
|
+
|
|
51
62
|
if data.get("status") != "1":
|
|
52
63
|
return []
|
|
53
64
|
|
|
@@ -115,7 +126,12 @@ class EtherscanProvider:
|
|
|
115
126
|
},
|
|
116
127
|
)
|
|
117
128
|
|
|
118
|
-
|
|
129
|
+
try:
|
|
130
|
+
data = response.json()
|
|
131
|
+
except json.JSONDecodeError as e:
|
|
132
|
+
logger.warning(f"Failed to parse Etherscan transfer events response: {e}")
|
|
133
|
+
return []
|
|
134
|
+
|
|
119
135
|
if data.get("status") != "1":
|
|
120
136
|
return []
|
|
121
137
|
|
|
@@ -193,7 +209,12 @@ class EtherscanProvider:
|
|
|
193
209
|
},
|
|
194
210
|
)
|
|
195
211
|
|
|
196
|
-
|
|
212
|
+
try:
|
|
213
|
+
data = response.json()
|
|
214
|
+
except json.JSONDecodeError as e:
|
|
215
|
+
logger.warning(f"Failed to parse Etherscan withdrawal requested events: {e}")
|
|
216
|
+
return []
|
|
217
|
+
|
|
197
218
|
if data.get("status") != "1":
|
|
198
219
|
return []
|
|
199
220
|
|
|
@@ -271,7 +292,12 @@ class EtherscanProvider:
|
|
|
271
292
|
},
|
|
272
293
|
)
|
|
273
294
|
|
|
274
|
-
|
|
295
|
+
try:
|
|
296
|
+
data = response.json()
|
|
297
|
+
except json.JSONDecodeError as e:
|
|
298
|
+
logger.warning(f"Failed to parse Etherscan withdrawal claimed events: {e}")
|
|
299
|
+
return []
|
|
300
|
+
|
|
275
301
|
if data.get("status") != "1":
|
|
276
302
|
return []
|
|
277
303
|
|
|
@@ -280,14 +306,14 @@ class EtherscanProvider:
|
|
|
280
306
|
try:
|
|
281
307
|
# requestId is topic1 (indexed)
|
|
282
308
|
request_id = int(log["topics"][1], 16)
|
|
283
|
-
# amountOfETH is in data field
|
|
284
|
-
amount_eth = int(log["data"], 16) / 10**18
|
|
309
|
+
# amountOfETH is in data field - use Decimal for precision
|
|
310
|
+
amount_eth = Decimal(int(log["data"], 16)) / Decimal(10**18)
|
|
285
311
|
|
|
286
312
|
results.append(
|
|
287
313
|
{
|
|
288
314
|
"request_id": request_id,
|
|
289
315
|
"tx_hash": log["transactionHash"],
|
|
290
|
-
"amount_eth": amount_eth,
|
|
316
|
+
"amount_eth": float(amount_eth), # Convert to float for JSON serialization
|
|
291
317
|
"block": int(log["blockNumber"], 16),
|
|
292
318
|
}
|
|
293
319
|
)
|