brawny 0.1.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (141) hide show
  1. brawny/__init__.py +106 -0
  2. brawny/_context.py +232 -0
  3. brawny/_rpc/__init__.py +38 -0
  4. brawny/_rpc/broadcast.py +172 -0
  5. brawny/_rpc/clients.py +98 -0
  6. brawny/_rpc/context.py +49 -0
  7. brawny/_rpc/errors.py +252 -0
  8. brawny/_rpc/gas.py +158 -0
  9. brawny/_rpc/manager.py +982 -0
  10. brawny/_rpc/selector.py +156 -0
  11. brawny/accounts.py +534 -0
  12. brawny/alerts/__init__.py +132 -0
  13. brawny/alerts/abi_resolver.py +530 -0
  14. brawny/alerts/base.py +152 -0
  15. brawny/alerts/context.py +271 -0
  16. brawny/alerts/contracts.py +635 -0
  17. brawny/alerts/encoded_call.py +201 -0
  18. brawny/alerts/errors.py +267 -0
  19. brawny/alerts/events.py +680 -0
  20. brawny/alerts/function_caller.py +364 -0
  21. brawny/alerts/health.py +185 -0
  22. brawny/alerts/routing.py +118 -0
  23. brawny/alerts/send.py +364 -0
  24. brawny/api.py +660 -0
  25. brawny/chain.py +93 -0
  26. brawny/cli/__init__.py +16 -0
  27. brawny/cli/app.py +17 -0
  28. brawny/cli/bootstrap.py +37 -0
  29. brawny/cli/commands/__init__.py +41 -0
  30. brawny/cli/commands/abi.py +93 -0
  31. brawny/cli/commands/accounts.py +632 -0
  32. brawny/cli/commands/console.py +495 -0
  33. brawny/cli/commands/contract.py +139 -0
  34. brawny/cli/commands/health.py +112 -0
  35. brawny/cli/commands/init_project.py +86 -0
  36. brawny/cli/commands/intents.py +130 -0
  37. brawny/cli/commands/job_dev.py +254 -0
  38. brawny/cli/commands/jobs.py +308 -0
  39. brawny/cli/commands/logs.py +87 -0
  40. brawny/cli/commands/maintenance.py +182 -0
  41. brawny/cli/commands/migrate.py +51 -0
  42. brawny/cli/commands/networks.py +253 -0
  43. brawny/cli/commands/run.py +249 -0
  44. brawny/cli/commands/script.py +209 -0
  45. brawny/cli/commands/signer.py +248 -0
  46. brawny/cli/helpers.py +265 -0
  47. brawny/cli_templates.py +1445 -0
  48. brawny/config/__init__.py +74 -0
  49. brawny/config/models.py +404 -0
  50. brawny/config/parser.py +633 -0
  51. brawny/config/routing.py +55 -0
  52. brawny/config/validation.py +246 -0
  53. brawny/daemon/__init__.py +14 -0
  54. brawny/daemon/context.py +69 -0
  55. brawny/daemon/core.py +702 -0
  56. brawny/daemon/loops.py +327 -0
  57. brawny/db/__init__.py +78 -0
  58. brawny/db/base.py +986 -0
  59. brawny/db/base_new.py +165 -0
  60. brawny/db/circuit_breaker.py +97 -0
  61. brawny/db/global_cache.py +298 -0
  62. brawny/db/mappers.py +182 -0
  63. brawny/db/migrate.py +349 -0
  64. brawny/db/migrations/001_init.sql +186 -0
  65. brawny/db/migrations/002_add_included_block.sql +7 -0
  66. brawny/db/migrations/003_add_broadcast_at.sql +10 -0
  67. brawny/db/migrations/004_broadcast_binding.sql +20 -0
  68. brawny/db/migrations/005_add_retry_after.sql +9 -0
  69. brawny/db/migrations/006_add_retry_count_column.sql +11 -0
  70. brawny/db/migrations/007_add_gap_tracking.sql +18 -0
  71. brawny/db/migrations/008_add_transactions.sql +72 -0
  72. brawny/db/migrations/009_add_intent_metadata.sql +5 -0
  73. brawny/db/migrations/010_add_nonce_gap_index.sql +9 -0
  74. brawny/db/migrations/011_add_job_logs.sql +24 -0
  75. brawny/db/migrations/012_add_claimed_by.sql +5 -0
  76. brawny/db/ops/__init__.py +29 -0
  77. brawny/db/ops/attempts.py +108 -0
  78. brawny/db/ops/blocks.py +83 -0
  79. brawny/db/ops/cache.py +93 -0
  80. brawny/db/ops/intents.py +296 -0
  81. brawny/db/ops/jobs.py +110 -0
  82. brawny/db/ops/logs.py +97 -0
  83. brawny/db/ops/nonces.py +322 -0
  84. brawny/db/postgres.py +2535 -0
  85. brawny/db/postgres_new.py +196 -0
  86. brawny/db/queries.py +584 -0
  87. brawny/db/sqlite.py +2733 -0
  88. brawny/db/sqlite_new.py +191 -0
  89. brawny/history.py +126 -0
  90. brawny/interfaces.py +136 -0
  91. brawny/invariants.py +155 -0
  92. brawny/jobs/__init__.py +26 -0
  93. brawny/jobs/base.py +287 -0
  94. brawny/jobs/discovery.py +233 -0
  95. brawny/jobs/job_validation.py +111 -0
  96. brawny/jobs/kv.py +125 -0
  97. brawny/jobs/registry.py +283 -0
  98. brawny/keystore.py +484 -0
  99. brawny/lifecycle.py +551 -0
  100. brawny/logging.py +290 -0
  101. brawny/metrics.py +594 -0
  102. brawny/model/__init__.py +53 -0
  103. brawny/model/contexts.py +319 -0
  104. brawny/model/enums.py +70 -0
  105. brawny/model/errors.py +194 -0
  106. brawny/model/events.py +93 -0
  107. brawny/model/startup.py +20 -0
  108. brawny/model/types.py +483 -0
  109. brawny/networks/__init__.py +96 -0
  110. brawny/networks/config.py +269 -0
  111. brawny/networks/manager.py +423 -0
  112. brawny/obs/__init__.py +67 -0
  113. brawny/obs/emit.py +158 -0
  114. brawny/obs/health.py +175 -0
  115. brawny/obs/heartbeat.py +133 -0
  116. brawny/reconciliation.py +108 -0
  117. brawny/scheduler/__init__.py +19 -0
  118. brawny/scheduler/poller.py +472 -0
  119. brawny/scheduler/reorg.py +632 -0
  120. brawny/scheduler/runner.py +708 -0
  121. brawny/scheduler/shutdown.py +371 -0
  122. brawny/script_tx.py +297 -0
  123. brawny/scripting.py +251 -0
  124. brawny/startup.py +76 -0
  125. brawny/telegram.py +393 -0
  126. brawny/testing.py +108 -0
  127. brawny/tx/__init__.py +41 -0
  128. brawny/tx/executor.py +1071 -0
  129. brawny/tx/fees.py +50 -0
  130. brawny/tx/intent.py +423 -0
  131. brawny/tx/monitor.py +628 -0
  132. brawny/tx/nonce.py +498 -0
  133. brawny/tx/replacement.py +456 -0
  134. brawny/tx/utils.py +26 -0
  135. brawny/utils.py +205 -0
  136. brawny/validation.py +69 -0
  137. brawny-0.1.13.dist-info/METADATA +156 -0
  138. brawny-0.1.13.dist-info/RECORD +141 -0
  139. brawny-0.1.13.dist-info/WHEEL +5 -0
  140. brawny-0.1.13.dist-info/entry_points.txt +2 -0
  141. brawny-0.1.13.dist-info/top_level.txt +1 -0
brawny/db/base_new.py ADDED
@@ -0,0 +1,165 @@
1
+ """Slim database interface for brawny.
2
+
3
+ Provides 4 execution primitives + transaction + connect/close.
4
+ All domain operations live in db/ops/ modules.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from abc import ABC, abstractmethod
10
+ from contextlib import contextmanager
11
+ from dataclasses import dataclass
12
+ from datetime import datetime
13
+ from typing import Any, Iterator, Literal
14
+
15
+
16
+ Dialect = Literal["postgres", "sqlite"]
17
+ IsolationLevel = Literal["SERIALIZABLE", "READ COMMITTED", "REPEATABLE READ"]
18
+
19
+
20
+ @dataclass
21
+ class BlockState:
22
+ """Block processing state."""
23
+
24
+ chain_id: int
25
+ last_processed_block_number: int
26
+ last_processed_block_hash: str
27
+ created_at: datetime
28
+ updated_at: datetime
29
+
30
+
31
+ @dataclass
32
+ class BlockHashEntry:
33
+ """Block hash history entry for reorg detection."""
34
+
35
+ id: int
36
+ chain_id: int
37
+ block_number: int
38
+ block_hash: str
39
+ inserted_at: datetime
40
+
41
+
42
+ @dataclass
43
+ class ABICacheEntry:
44
+ """Cached ABI entry."""
45
+
46
+ chain_id: int
47
+ address: str
48
+ abi_json: str
49
+ source: str
50
+ resolved_at: datetime
51
+
52
+
53
+ @dataclass
54
+ class ProxyCacheEntry:
55
+ """Cached proxy resolution entry."""
56
+
57
+ chain_id: int
58
+ proxy_address: str
59
+ implementation_address: str
60
+ resolved_at: datetime
61
+
62
+
63
+ class Database(ABC):
64
+ """Database interface with 4 execution primitives.
65
+
66
+ Implementations provide connection management and query execution.
67
+ SQL queries are in db/queries.py, row mapping in db/mappers.py.
68
+ """
69
+
70
+ @property
71
+ @abstractmethod
72
+ def dialect(self) -> Dialect:
73
+ """Return dialect name for query selection."""
74
+ ...
75
+
76
+ @abstractmethod
77
+ def connect(self) -> None:
78
+ """Establish database connection."""
79
+ ...
80
+
81
+ @abstractmethod
82
+ def close(self) -> None:
83
+ """Close database connection and cleanup resources."""
84
+ ...
85
+
86
+ @abstractmethod
87
+ def is_connected(self) -> bool:
88
+ """Check if database is connected."""
89
+ ...
90
+
91
+ @abstractmethod
92
+ @contextmanager
93
+ def transaction(
94
+ self, isolation_level: IsolationLevel | None = None
95
+ ) -> Iterator[None]:
96
+ """Context manager for database transactions.
97
+
98
+ Args:
99
+ isolation_level: Optional isolation level (Postgres only, ignored on SQLite)
100
+
101
+ Usage:
102
+ with db.transaction():
103
+ ops.intents.create_intent(db, ...)
104
+ ops.nonces.reserve_nonce(db, ...)
105
+
106
+ # For atomic nonce reservation on Postgres
107
+ with db.transaction(isolation_level="SERIALIZABLE"):
108
+ ...
109
+ """
110
+ ...
111
+
112
+ @abstractmethod
113
+ def execute(self, query: str, params: dict[str, Any] | None = None) -> None:
114
+ """Execute a query without returning results.
115
+
116
+ Args:
117
+ query: SQL with :name placeholders
118
+ params: Dict of parameter values
119
+ """
120
+ ...
121
+
122
+ @abstractmethod
123
+ def fetch_one(
124
+ self, query: str, params: dict[str, Any] | None = None
125
+ ) -> dict[str, Any] | None:
126
+ """Execute a query and return single result or None.
127
+
128
+ Args:
129
+ query: SQL with :name placeholders
130
+ params: Dict of parameter values
131
+
132
+ Returns:
133
+ Single row as dict, or None if no results
134
+ """
135
+ ...
136
+
137
+ @abstractmethod
138
+ def fetch_all(
139
+ self, query: str, params: dict[str, Any] | None = None
140
+ ) -> list[dict[str, Any]]:
141
+ """Execute a query and return all results.
142
+
143
+ Args:
144
+ query: SQL with :name placeholders
145
+ params: Dict of parameter values
146
+
147
+ Returns:
148
+ List of rows as dicts
149
+ """
150
+ ...
151
+
152
+ @abstractmethod
153
+ def execute_rowcount(
154
+ self, query: str, params: dict[str, Any] | None = None
155
+ ) -> int:
156
+ """Execute a query and return affected row count.
157
+
158
+ Args:
159
+ query: SQL with :name placeholders
160
+ params: Dict of parameter values
161
+
162
+ Returns:
163
+ Number of rows affected
164
+ """
165
+ ...
@@ -0,0 +1,97 @@
1
+ """Database circuit breaker for outage protection."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import time
6
+ from dataclasses import dataclass
7
+
8
+ from brawny.logging import get_logger
9
+ from brawny.metrics import (
10
+ DB_CIRCUIT_BREAKER_OPEN,
11
+ DB_CIRCUIT_BREAKER_STATE,
12
+ get_metrics,
13
+ )
14
+ from brawny.model.errors import DatabaseCircuitBreakerOpenError
15
+
16
+ logger = get_logger(__name__)
17
+
18
+
19
+ @dataclass
20
+ class CircuitBreakerState:
21
+ consecutive_failures: int = 0
22
+ open_until: float | None = None
23
+ alert_sent: bool = False
24
+
25
+
26
+ class DatabaseCircuitBreaker:
27
+ """Simple circuit breaker for database operations."""
28
+
29
+ def __init__(
30
+ self,
31
+ failure_threshold: int,
32
+ open_seconds: int,
33
+ backend: str,
34
+ ) -> None:
35
+ self._failure_threshold = max(1, failure_threshold)
36
+ self._open_seconds = max(1, open_seconds)
37
+ self._backend = backend
38
+ self._state = CircuitBreakerState()
39
+
40
+ def before_call(self) -> None:
41
+ """Raise if breaker is open."""
42
+ if self._is_open():
43
+ raise DatabaseCircuitBreakerOpenError(
44
+ "Database circuit breaker is open."
45
+ )
46
+
47
+ def record_success(self) -> None:
48
+ """Reset breaker on successful call."""
49
+ if self._state.consecutive_failures or self._state.open_until is not None:
50
+ metrics = get_metrics()
51
+ metrics.gauge(DB_CIRCUIT_BREAKER_STATE).set(
52
+ 0,
53
+ db_backend=self._backend,
54
+ )
55
+ self._state.consecutive_failures = 0
56
+ self._state.open_until = None
57
+ self._state.alert_sent = False
58
+
59
+ def record_failure(self, error: Exception) -> None:
60
+ """Record a failed DB call and open breaker if threshold is reached."""
61
+ self._state.consecutive_failures += 1
62
+ if self._state.consecutive_failures < self._failure_threshold:
63
+ return
64
+
65
+ now = time.time()
66
+ if self._state.open_until and now < self._state.open_until:
67
+ return
68
+
69
+ self._state.open_until = now + self._open_seconds
70
+ metrics = get_metrics()
71
+ metrics.counter(DB_CIRCUIT_BREAKER_OPEN).inc(
72
+ db_backend=self._backend,
73
+ )
74
+ metrics.gauge(DB_CIRCUIT_BREAKER_STATE).set(
75
+ 1,
76
+ db_backend=self._backend,
77
+ )
78
+
79
+ if not self._state.alert_sent:
80
+ logger.error(
81
+ "db.circuit_breaker.open",
82
+ db_backend=self._backend,
83
+ failure_threshold=self._failure_threshold,
84
+ open_seconds=self._open_seconds,
85
+ error=str(error)[:200],
86
+ )
87
+ self._state.alert_sent = True
88
+
89
+ def _is_open(self) -> bool:
90
+ if self._state.open_until is None:
91
+ return False
92
+ if time.time() >= self._state.open_until:
93
+ self._state.open_until = None
94
+ self._state.consecutive_failures = 0
95
+ self._state.alert_sent = False
96
+ return False
97
+ return True
@@ -0,0 +1,298 @@
1
+ """Global ABI cache stored in ~/.brawny/abi_cache.db
2
+
3
+ This module provides a standalone SQLite database for caching contract ABIs
4
+ and proxy resolutions. Unlike the project database, this cache is shared
5
+ across all projects and persists in the user's home directory.
6
+
7
+ Mirrors eth-brownie's ~/.brownie/ pattern for global data storage.
8
+ """
9
+
10
+ from __future__ import annotations
11
+
12
+ import sqlite3
13
+ import threading
14
+ from dataclasses import dataclass
15
+ from datetime import datetime, timezone
16
+ from pathlib import Path
17
+ from typing import TYPE_CHECKING
18
+
19
+ if TYPE_CHECKING:
20
+ pass
21
+
22
+ # Global paths (following brownie's ~/.brownie/ pattern)
23
+ BRAWNY_DIR = Path.home() / ".brawny"
24
+ ABI_CACHE_DB = BRAWNY_DIR / "abi_cache.db"
25
+
26
+
27
+ @dataclass
28
+ class ABICacheEntry:
29
+ """Cached ABI entry."""
30
+
31
+ chain_id: int
32
+ address: str
33
+ abi_json: str
34
+ source: str
35
+ resolved_at: datetime
36
+
37
+
38
+ @dataclass
39
+ class ProxyCacheEntry:
40
+ """Cached proxy resolution."""
41
+
42
+ chain_id: int
43
+ proxy_address: str
44
+ implementation_address: str
45
+ resolved_at: datetime
46
+
47
+
48
+ class GlobalABICache:
49
+ """SQLite-backed global ABI cache.
50
+
51
+ Provides persistent storage for contract ABIs and proxy resolutions
52
+ in ~/.brawny/abi_cache.db. Auto-creates the database and schema
53
+ on first use.
54
+
55
+ Thread-safe for concurrent access within a single process.
56
+ """
57
+
58
+ def __init__(self, db_path: Path | None = None) -> None:
59
+ """Initialize the cache.
60
+
61
+ Args:
62
+ db_path: Override path for testing. Defaults to ~/.brawny/abi_cache.db
63
+ """
64
+ self._db_path = db_path or ABI_CACHE_DB
65
+ self._conn: sqlite3.Connection | None = None
66
+ self._lock = threading.Lock()
67
+
68
+ def _ensure_connected(self) -> sqlite3.Connection:
69
+ """Ensure database connection exists, creating if needed."""
70
+ if self._conn is None:
71
+ # Create directory if needed
72
+ self._db_path.parent.mkdir(parents=True, exist_ok=True)
73
+
74
+ # Connect with check_same_thread=False for multi-threaded use
75
+ self._conn = sqlite3.connect(
76
+ str(self._db_path),
77
+ check_same_thread=False,
78
+ timeout=30.0,
79
+ )
80
+ self._conn.row_factory = sqlite3.Row
81
+ self._init_schema()
82
+
83
+ return self._conn
84
+
85
+ def _init_schema(self) -> None:
86
+ """Create tables if they don't exist."""
87
+ assert self._conn is not None
88
+
89
+ self._conn.executescript("""
90
+ CREATE TABLE IF NOT EXISTS abi_cache (
91
+ chain_id INTEGER NOT NULL,
92
+ address TEXT NOT NULL,
93
+ abi_json TEXT NOT NULL,
94
+ source TEXT NOT NULL,
95
+ resolved_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
96
+ PRIMARY KEY (chain_id, address)
97
+ );
98
+
99
+ CREATE INDEX IF NOT EXISTS idx_abi_cache_resolved
100
+ ON abi_cache(resolved_at);
101
+
102
+ CREATE TABLE IF NOT EXISTS proxy_cache (
103
+ chain_id INTEGER NOT NULL,
104
+ proxy_address TEXT NOT NULL,
105
+ implementation_address TEXT NOT NULL,
106
+ resolved_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
107
+ PRIMARY KEY (chain_id, proxy_address)
108
+ );
109
+ """)
110
+ self._conn.commit()
111
+
112
+ def get_cached_abi(self, chain_id: int, address: str) -> ABICacheEntry | None:
113
+ """Get cached ABI for a contract.
114
+
115
+ Args:
116
+ chain_id: Chain ID
117
+ address: Contract address (checksummed)
118
+
119
+ Returns:
120
+ ABICacheEntry if found, None otherwise
121
+ """
122
+ with self._lock:
123
+ conn = self._ensure_connected()
124
+ cursor = conn.execute(
125
+ "SELECT * FROM abi_cache WHERE chain_id = ? AND address = ?",
126
+ (chain_id, address),
127
+ )
128
+ row = cursor.fetchone()
129
+ if not row:
130
+ return None
131
+
132
+ resolved_at = row["resolved_at"]
133
+ if isinstance(resolved_at, str):
134
+ resolved_at = datetime.fromisoformat(resolved_at)
135
+
136
+ return ABICacheEntry(
137
+ chain_id=row["chain_id"],
138
+ address=row["address"],
139
+ abi_json=row["abi_json"],
140
+ source=row["source"],
141
+ resolved_at=resolved_at,
142
+ )
143
+
144
+ def set_cached_abi(
145
+ self,
146
+ chain_id: int,
147
+ address: str,
148
+ abi_json: str,
149
+ source: str,
150
+ ) -> None:
151
+ """Cache an ABI for a contract.
152
+
153
+ Args:
154
+ chain_id: Chain ID
155
+ address: Contract address (checksummed)
156
+ abi_json: JSON-encoded ABI
157
+ source: Source of ABI ('etherscan', 'sourcify', 'manual', 'proxy_implementation')
158
+ """
159
+ with self._lock:
160
+ conn = self._ensure_connected()
161
+ conn.execute(
162
+ """
163
+ INSERT INTO abi_cache (chain_id, address, abi_json, source)
164
+ VALUES (?, ?, ?, ?)
165
+ ON CONFLICT(chain_id, address) DO UPDATE SET
166
+ abi_json = excluded.abi_json,
167
+ source = excluded.source,
168
+ resolved_at = CURRENT_TIMESTAMP
169
+ """,
170
+ (chain_id, address, abi_json, source),
171
+ )
172
+ conn.commit()
173
+
174
+ def clear_cached_abi(self, chain_id: int, address: str) -> bool:
175
+ """Clear cached ABI for a contract.
176
+
177
+ Args:
178
+ chain_id: Chain ID
179
+ address: Contract address
180
+
181
+ Returns:
182
+ True if entry was deleted, False if not found
183
+ """
184
+ with self._lock:
185
+ conn = self._ensure_connected()
186
+ cursor = conn.execute(
187
+ "DELETE FROM abi_cache WHERE chain_id = ? AND address = ?",
188
+ (chain_id, address),
189
+ )
190
+ conn.commit()
191
+ return cursor.rowcount > 0
192
+
193
+ def cleanup_expired_abis(self, max_age_seconds: int) -> int:
194
+ """Delete ABIs older than max_age_seconds.
195
+
196
+ Args:
197
+ max_age_seconds: Maximum age in seconds
198
+
199
+ Returns:
200
+ Number of entries deleted
201
+ """
202
+ with self._lock:
203
+ conn = self._ensure_connected()
204
+ cursor = conn.execute(
205
+ """
206
+ DELETE FROM abi_cache
207
+ WHERE resolved_at < datetime('now', ? || ' seconds')
208
+ """,
209
+ (f"-{max_age_seconds}",),
210
+ )
211
+ conn.commit()
212
+ return cursor.rowcount
213
+
214
+ def get_cached_proxy(
215
+ self, chain_id: int, proxy_address: str
216
+ ) -> ProxyCacheEntry | None:
217
+ """Get cached proxy implementation address.
218
+
219
+ Args:
220
+ chain_id: Chain ID
221
+ proxy_address: Proxy contract address
222
+
223
+ Returns:
224
+ ProxyCacheEntry if found, None otherwise
225
+ """
226
+ with self._lock:
227
+ conn = self._ensure_connected()
228
+ cursor = conn.execute(
229
+ "SELECT * FROM proxy_cache WHERE chain_id = ? AND proxy_address = ?",
230
+ (chain_id, proxy_address),
231
+ )
232
+ row = cursor.fetchone()
233
+ if not row:
234
+ return None
235
+
236
+ resolved_at = row["resolved_at"]
237
+ if isinstance(resolved_at, str):
238
+ resolved_at = datetime.fromisoformat(resolved_at)
239
+
240
+ return ProxyCacheEntry(
241
+ chain_id=row["chain_id"],
242
+ proxy_address=row["proxy_address"],
243
+ implementation_address=row["implementation_address"],
244
+ resolved_at=resolved_at,
245
+ )
246
+
247
+ def set_cached_proxy(
248
+ self,
249
+ chain_id: int,
250
+ proxy_address: str,
251
+ implementation_address: str,
252
+ ) -> None:
253
+ """Cache a proxy-to-implementation mapping.
254
+
255
+ Args:
256
+ chain_id: Chain ID
257
+ proxy_address: Proxy contract address
258
+ implementation_address: Implementation contract address
259
+ """
260
+ with self._lock:
261
+ conn = self._ensure_connected()
262
+ conn.execute(
263
+ """
264
+ INSERT INTO proxy_cache (chain_id, proxy_address, implementation_address)
265
+ VALUES (?, ?, ?)
266
+ ON CONFLICT(chain_id, proxy_address) DO UPDATE SET
267
+ implementation_address = excluded.implementation_address,
268
+ resolved_at = CURRENT_TIMESTAMP
269
+ """,
270
+ (chain_id, proxy_address, implementation_address),
271
+ )
272
+ conn.commit()
273
+
274
+ def clear_cached_proxy(self, chain_id: int, proxy_address: str) -> bool:
275
+ """Clear cached proxy resolution.
276
+
277
+ Args:
278
+ chain_id: Chain ID
279
+ proxy_address: Proxy contract address
280
+
281
+ Returns:
282
+ True if entry was deleted, False if not found
283
+ """
284
+ with self._lock:
285
+ conn = self._ensure_connected()
286
+ cursor = conn.execute(
287
+ "DELETE FROM proxy_cache WHERE chain_id = ? AND proxy_address = ?",
288
+ (chain_id, proxy_address),
289
+ )
290
+ conn.commit()
291
+ return cursor.rowcount > 0
292
+
293
+ def close(self) -> None:
294
+ """Close the database connection."""
295
+ with self._lock:
296
+ if self._conn is not None:
297
+ self._conn.close()
298
+ self._conn = None