brawny 0.1.13__py3-none-any.whl → 0.1.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- brawny/__init__.py +2 -0
- brawny/_context.py +5 -5
- brawny/_rpc/__init__.py +36 -12
- brawny/_rpc/broadcast.py +14 -13
- brawny/_rpc/caller.py +243 -0
- brawny/_rpc/client.py +539 -0
- brawny/_rpc/clients.py +11 -11
- brawny/_rpc/context.py +23 -0
- brawny/_rpc/errors.py +465 -31
- brawny/_rpc/gas.py +7 -6
- brawny/_rpc/pool.py +18 -0
- brawny/_rpc/retry.py +266 -0
- brawny/_rpc/retry_policy.py +81 -0
- brawny/accounts.py +28 -9
- brawny/alerts/__init__.py +15 -18
- brawny/alerts/abi_resolver.py +212 -36
- brawny/alerts/base.py +2 -2
- brawny/alerts/contracts.py +77 -10
- brawny/alerts/errors.py +30 -3
- brawny/alerts/events.py +38 -5
- brawny/alerts/health.py +19 -13
- brawny/alerts/send.py +513 -55
- brawny/api.py +39 -11
- brawny/assets/AGENTS.md +325 -0
- brawny/async_runtime.py +48 -0
- brawny/chain.py +3 -3
- brawny/cli/commands/__init__.py +2 -0
- brawny/cli/commands/console.py +69 -19
- brawny/cli/commands/contract.py +2 -2
- brawny/cli/commands/controls.py +121 -0
- brawny/cli/commands/health.py +2 -2
- brawny/cli/commands/job_dev.py +6 -5
- brawny/cli/commands/jobs.py +99 -2
- brawny/cli/commands/maintenance.py +13 -29
- brawny/cli/commands/migrate.py +1 -0
- brawny/cli/commands/run.py +10 -3
- brawny/cli/commands/script.py +8 -3
- brawny/cli/commands/signer.py +143 -26
- brawny/cli/helpers.py +0 -3
- brawny/cli_templates.py +25 -349
- brawny/config/__init__.py +4 -1
- brawny/config/models.py +43 -57
- brawny/config/parser.py +268 -57
- brawny/config/validation.py +52 -15
- brawny/daemon/context.py +4 -2
- brawny/daemon/core.py +185 -63
- brawny/daemon/loops.py +166 -98
- brawny/daemon/supervisor.py +261 -0
- brawny/db/__init__.py +14 -26
- brawny/db/base.py +248 -151
- brawny/db/global_cache.py +11 -1
- brawny/db/migrate.py +175 -28
- brawny/db/migrations/001_init.sql +4 -3
- brawny/db/migrations/010_add_nonce_gap_index.sql +1 -1
- brawny/db/migrations/011_add_job_logs.sql +1 -2
- brawny/db/migrations/012_add_claimed_by.sql +2 -2
- brawny/db/migrations/013_attempt_unique.sql +10 -0
- brawny/db/migrations/014_add_lease_expires_at.sql +5 -0
- brawny/db/migrations/015_add_signer_alias.sql +14 -0
- brawny/db/migrations/016_runtime_controls_and_quarantine.sql +32 -0
- brawny/db/migrations/017_add_job_drain.sql +6 -0
- brawny/db/migrations/018_add_nonce_reset_audit.sql +20 -0
- brawny/db/migrations/019_add_job_cooldowns.sql +8 -0
- brawny/db/migrations/020_attempt_unique_initial.sql +7 -0
- brawny/db/ops/__init__.py +3 -25
- brawny/db/ops/logs.py +1 -2
- brawny/db/queries.py +47 -91
- brawny/db/serialized.py +65 -0
- brawny/db/sqlite/__init__.py +1001 -0
- brawny/db/sqlite/connection.py +231 -0
- brawny/db/sqlite/execute.py +116 -0
- brawny/db/sqlite/mappers.py +190 -0
- brawny/db/sqlite/repos/attempts.py +372 -0
- brawny/db/sqlite/repos/block_state.py +102 -0
- brawny/db/sqlite/repos/cache.py +104 -0
- brawny/db/sqlite/repos/intents.py +1021 -0
- brawny/db/sqlite/repos/jobs.py +200 -0
- brawny/db/sqlite/repos/maintenance.py +182 -0
- brawny/db/sqlite/repos/signers_nonces.py +566 -0
- brawny/db/sqlite/tx.py +119 -0
- brawny/http.py +194 -0
- brawny/invariants.py +11 -24
- brawny/jobs/base.py +8 -0
- brawny/jobs/job_validation.py +2 -1
- brawny/keystore.py +83 -7
- brawny/lifecycle.py +64 -12
- brawny/logging.py +0 -2
- brawny/metrics.py +84 -12
- brawny/model/contexts.py +111 -9
- brawny/model/enums.py +1 -0
- brawny/model/errors.py +18 -0
- brawny/model/types.py +47 -131
- brawny/network_guard.py +133 -0
- brawny/networks/__init__.py +5 -5
- brawny/networks/config.py +1 -7
- brawny/networks/manager.py +14 -11
- brawny/runtime_controls.py +74 -0
- brawny/scheduler/poller.py +11 -7
- brawny/scheduler/reorg.py +95 -39
- brawny/scheduler/runner.py +442 -168
- brawny/scheduler/shutdown.py +3 -3
- brawny/script_tx.py +3 -3
- brawny/telegram.py +53 -7
- brawny/testing.py +1 -0
- brawny/timeout.py +38 -0
- brawny/tx/executor.py +922 -308
- brawny/tx/intent.py +54 -16
- brawny/tx/monitor.py +31 -12
- brawny/tx/nonce.py +212 -90
- brawny/tx/replacement.py +69 -18
- brawny/tx/retry_policy.py +24 -0
- brawny/tx/stages/types.py +75 -0
- brawny/types.py +18 -0
- brawny/utils.py +41 -0
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/METADATA +3 -3
- brawny-0.1.22.dist-info/RECORD +163 -0
- brawny/_rpc/manager.py +0 -982
- brawny/_rpc/selector.py +0 -156
- brawny/db/base_new.py +0 -165
- brawny/db/mappers.py +0 -182
- brawny/db/migrations/008_add_transactions.sql +0 -72
- brawny/db/ops/attempts.py +0 -108
- brawny/db/ops/blocks.py +0 -83
- brawny/db/ops/cache.py +0 -93
- brawny/db/ops/intents.py +0 -296
- brawny/db/ops/jobs.py +0 -110
- brawny/db/ops/nonces.py +0 -322
- brawny/db/postgres.py +0 -2535
- brawny/db/postgres_new.py +0 -196
- brawny/db/sqlite.py +0 -2733
- brawny/db/sqlite_new.py +0 -191
- brawny-0.1.13.dist-info/RECORD +0 -141
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/WHEEL +0 -0
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/entry_points.txt +0 -0
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/top_level.txt +0 -0
brawny/db/sqlite.py
DELETED
|
@@ -1,2733 +0,0 @@
|
|
|
1
|
-
"""SQLite database implementation for brawny.
|
|
2
|
-
|
|
3
|
-
SQLite is for development only. Production deployments must use PostgreSQL.
|
|
4
|
-
|
|
5
|
-
Key differences from PostgreSQL:
|
|
6
|
-
- Uses IMMEDIATE transaction mode for nonce reservation (app-level locking)
|
|
7
|
-
- Uses deterministic ordering with secondary sort for intent claiming
|
|
8
|
-
- No connection pooling (single connection)
|
|
9
|
-
- SERIAL becomes INTEGER PRIMARY KEY AUTOINCREMENT
|
|
10
|
-
"""
|
|
11
|
-
|
|
12
|
-
from __future__ import annotations
|
|
13
|
-
|
|
14
|
-
import json
|
|
15
|
-
import re
|
|
16
|
-
import sqlite3
|
|
17
|
-
import threading
|
|
18
|
-
from contextlib import contextmanager
|
|
19
|
-
from datetime import datetime, timezone
|
|
20
|
-
from pathlib import Path
|
|
21
|
-
from typing import Any, Iterator
|
|
22
|
-
from uuid import UUID
|
|
23
|
-
|
|
24
|
-
from brawny.db.base import (
|
|
25
|
-
ABICacheEntry,
|
|
26
|
-
BlockHashEntry,
|
|
27
|
-
BlockState,
|
|
28
|
-
Database,
|
|
29
|
-
IsolationLevel,
|
|
30
|
-
ProxyCacheEntry,
|
|
31
|
-
)
|
|
32
|
-
from brawny.db.circuit_breaker import DatabaseCircuitBreaker
|
|
33
|
-
from brawny.model.enums import AttemptStatus, IntentStatus, NonceStatus, TxStatus
|
|
34
|
-
from brawny.model.errors import DatabaseError, ErrorInfo, FailureType
|
|
35
|
-
from brawny.model.types import (
|
|
36
|
-
BroadcastInfo,
|
|
37
|
-
GasParams,
|
|
38
|
-
JobConfig,
|
|
39
|
-
NonceReservation,
|
|
40
|
-
SignerState,
|
|
41
|
-
Transaction,
|
|
42
|
-
TxAttempt,
|
|
43
|
-
TxHashRecord,
|
|
44
|
-
TxIntent,
|
|
45
|
-
)
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
def adapt_datetime(dt: datetime) -> str:
|
|
49
|
-
"""Adapt datetime to ISO format string for SQLite."""
|
|
50
|
-
if dt.tzinfo is None:
|
|
51
|
-
dt = dt.replace(tzinfo=timezone.utc)
|
|
52
|
-
return dt.isoformat()
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
def convert_datetime(val: bytes) -> datetime:
|
|
56
|
-
"""Convert ISO format string from SQLite to datetime."""
|
|
57
|
-
s = val.decode("utf-8")
|
|
58
|
-
# Handle various formats
|
|
59
|
-
try:
|
|
60
|
-
return datetime.fromisoformat(s)
|
|
61
|
-
except ValueError:
|
|
62
|
-
# Try parsing without timezone
|
|
63
|
-
return datetime.strptime(s, "%Y-%m-%d %H:%M:%S").replace(tzinfo=timezone.utc)
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
# Register adapters
|
|
67
|
-
sqlite3.register_adapter(datetime, adapt_datetime)
|
|
68
|
-
sqlite3.register_converter("TIMESTAMP", convert_datetime)
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
class SQLiteDatabase(Database):
|
|
72
|
-
"""SQLite implementation of the Database interface.
|
|
73
|
-
|
|
74
|
-
Thread-safety: Uses a per-thread connection model with a shared lock
|
|
75
|
-
for transaction isolation.
|
|
76
|
-
"""
|
|
77
|
-
|
|
78
|
-
def __init__(
|
|
79
|
-
self,
|
|
80
|
-
database_path: str,
|
|
81
|
-
circuit_breaker_failures: int = 5,
|
|
82
|
-
circuit_breaker_seconds: int = 30,
|
|
83
|
-
) -> None:
|
|
84
|
-
"""Initialize SQLite database.
|
|
85
|
-
|
|
86
|
-
Args:
|
|
87
|
-
database_path: Path to SQLite database file (or :memory:)
|
|
88
|
-
circuit_breaker_failures: Failures before opening breaker
|
|
89
|
-
circuit_breaker_seconds: Seconds to keep breaker open
|
|
90
|
-
"""
|
|
91
|
-
# Remove sqlite:/// prefix if present
|
|
92
|
-
if database_path.startswith("sqlite:///"):
|
|
93
|
-
database_path = database_path[10:]
|
|
94
|
-
|
|
95
|
-
self._database_path = database_path
|
|
96
|
-
self._conn: sqlite3.Connection | None = None
|
|
97
|
-
self._lock = threading.RLock()
|
|
98
|
-
self._in_transaction = False
|
|
99
|
-
self._circuit_breaker = DatabaseCircuitBreaker(
|
|
100
|
-
failure_threshold=circuit_breaker_failures,
|
|
101
|
-
open_seconds=circuit_breaker_seconds,
|
|
102
|
-
backend="sqlite",
|
|
103
|
-
)
|
|
104
|
-
|
|
105
|
-
@property
|
|
106
|
-
def dialect(self) -> str:
|
|
107
|
-
"""Return dialect name for query selection."""
|
|
108
|
-
return "sqlite"
|
|
109
|
-
|
|
110
|
-
def connect(self) -> None:
|
|
111
|
-
"""Establish database connection."""
|
|
112
|
-
if self._conn is not None:
|
|
113
|
-
return
|
|
114
|
-
|
|
115
|
-
# Create directory if needed
|
|
116
|
-
if self._database_path != ":memory:":
|
|
117
|
-
path = Path(self._database_path)
|
|
118
|
-
path.parent.mkdir(parents=True, exist_ok=True)
|
|
119
|
-
|
|
120
|
-
self._conn = sqlite3.connect(
|
|
121
|
-
self._database_path,
|
|
122
|
-
detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES,
|
|
123
|
-
check_same_thread=False,
|
|
124
|
-
timeout=30.0,
|
|
125
|
-
)
|
|
126
|
-
self._conn.row_factory = sqlite3.Row
|
|
127
|
-
# Enable foreign keys
|
|
128
|
-
self._conn.execute("PRAGMA foreign_keys = ON")
|
|
129
|
-
# Use WAL mode for better concurrency
|
|
130
|
-
self._conn.execute("PRAGMA journal_mode = WAL")
|
|
131
|
-
|
|
132
|
-
def close(self) -> None:
|
|
133
|
-
"""Close database connection."""
|
|
134
|
-
if self._conn:
|
|
135
|
-
self._conn.close()
|
|
136
|
-
self._conn = None
|
|
137
|
-
|
|
138
|
-
def is_connected(self) -> bool:
|
|
139
|
-
"""Check if database is connected."""
|
|
140
|
-
return self._conn is not None
|
|
141
|
-
|
|
142
|
-
def _ensure_connected(self) -> sqlite3.Connection:
|
|
143
|
-
"""Ensure connection exists and return it."""
|
|
144
|
-
if self._conn is None:
|
|
145
|
-
raise DatabaseError("Database not connected. Call connect() first.")
|
|
146
|
-
return self._conn
|
|
147
|
-
|
|
148
|
-
@contextmanager
|
|
149
|
-
def transaction(
|
|
150
|
-
self, isolation_level: IsolationLevel | None = None
|
|
151
|
-
) -> Iterator[None]:
|
|
152
|
-
"""Context manager for database transactions.
|
|
153
|
-
|
|
154
|
-
SQLite supports: DEFERRED, IMMEDIATE, EXCLUSIVE
|
|
155
|
-
We map:
|
|
156
|
-
- SERIALIZABLE -> EXCLUSIVE
|
|
157
|
-
- READ COMMITTED -> IMMEDIATE
|
|
158
|
-
- Others -> DEFERRED
|
|
159
|
-
"""
|
|
160
|
-
conn = self._ensure_connected()
|
|
161
|
-
|
|
162
|
-
# Map isolation levels to SQLite modes
|
|
163
|
-
if isolation_level == "SERIALIZABLE":
|
|
164
|
-
begin_cmd = "BEGIN EXCLUSIVE"
|
|
165
|
-
elif isolation_level in ("READ COMMITTED", "REPEATABLE READ"):
|
|
166
|
-
begin_cmd = "BEGIN IMMEDIATE"
|
|
167
|
-
else:
|
|
168
|
-
begin_cmd = "BEGIN DEFERRED"
|
|
169
|
-
|
|
170
|
-
with self._lock:
|
|
171
|
-
try:
|
|
172
|
-
conn.execute(begin_cmd)
|
|
173
|
-
self._in_transaction = True
|
|
174
|
-
yield
|
|
175
|
-
conn.commit()
|
|
176
|
-
except Exception:
|
|
177
|
-
conn.rollback()
|
|
178
|
-
raise
|
|
179
|
-
finally:
|
|
180
|
-
self._in_transaction = False
|
|
181
|
-
|
|
182
|
-
def _adapt_sql(self, query: str) -> str:
|
|
183
|
-
"""Adapt PostgreSQL-style SQL to SQLite.
|
|
184
|
-
|
|
185
|
-
Handles:
|
|
186
|
-
- $1, $2 -> ? (parameter placeholders)
|
|
187
|
-
- SERIAL -> INTEGER (type mapping)
|
|
188
|
-
- NOW() -> CURRENT_TIMESTAMP
|
|
189
|
-
"""
|
|
190
|
-
# Replace $N parameters with ?
|
|
191
|
-
query = re.sub(r"\$\d+", "?", query)
|
|
192
|
-
# Replace SERIAL with appropriate SQLite type
|
|
193
|
-
query = query.replace("SERIAL", "INTEGER")
|
|
194
|
-
# Replace NOW() with CURRENT_TIMESTAMP
|
|
195
|
-
query = query.replace("NOW()", "CURRENT_TIMESTAMP")
|
|
196
|
-
return query
|
|
197
|
-
|
|
198
|
-
def execute(
|
|
199
|
-
self,
|
|
200
|
-
query: str,
|
|
201
|
-
params: tuple[Any, ...] | dict[str, Any] | None = None,
|
|
202
|
-
) -> None:
|
|
203
|
-
"""Execute a query without returning results."""
|
|
204
|
-
conn = self._ensure_connected()
|
|
205
|
-
query = self._adapt_sql(query)
|
|
206
|
-
self._circuit_breaker.before_call()
|
|
207
|
-
|
|
208
|
-
with self._lock:
|
|
209
|
-
cursor = conn.cursor()
|
|
210
|
-
try:
|
|
211
|
-
if params is None:
|
|
212
|
-
cursor.execute(query)
|
|
213
|
-
elif isinstance(params, dict):
|
|
214
|
-
cursor.execute(query, params)
|
|
215
|
-
else:
|
|
216
|
-
cursor.execute(query, params)
|
|
217
|
-
if not self._in_transaction:
|
|
218
|
-
conn.commit()
|
|
219
|
-
self._circuit_breaker.record_success()
|
|
220
|
-
except sqlite3.Error as e:
|
|
221
|
-
self._circuit_breaker.record_failure(e)
|
|
222
|
-
raise DatabaseError(f"SQLite query failed: {e}") from e
|
|
223
|
-
finally:
|
|
224
|
-
cursor.close()
|
|
225
|
-
|
|
226
|
-
def execute_returning(
|
|
227
|
-
self,
|
|
228
|
-
query: str,
|
|
229
|
-
params: tuple[Any, ...] | dict[str, Any] | None = None,
|
|
230
|
-
) -> list[dict[str, Any]]:
|
|
231
|
-
"""Execute a query and return all results as dicts."""
|
|
232
|
-
conn = self._ensure_connected()
|
|
233
|
-
query = self._adapt_sql(query)
|
|
234
|
-
self._circuit_breaker.before_call()
|
|
235
|
-
|
|
236
|
-
with self._lock:
|
|
237
|
-
cursor = conn.cursor()
|
|
238
|
-
try:
|
|
239
|
-
if params is None:
|
|
240
|
-
cursor.execute(query)
|
|
241
|
-
elif isinstance(params, dict):
|
|
242
|
-
cursor.execute(query, params)
|
|
243
|
-
else:
|
|
244
|
-
cursor.execute(query, params)
|
|
245
|
-
|
|
246
|
-
rows = cursor.fetchall()
|
|
247
|
-
if not rows:
|
|
248
|
-
self._circuit_breaker.record_success()
|
|
249
|
-
return []
|
|
250
|
-
|
|
251
|
-
# Convert Row objects to dicts
|
|
252
|
-
self._circuit_breaker.record_success()
|
|
253
|
-
return [dict(row) for row in rows]
|
|
254
|
-
except sqlite3.Error as e:
|
|
255
|
-
self._circuit_breaker.record_failure(e)
|
|
256
|
-
raise DatabaseError(f"SQLite query failed: {e}") from e
|
|
257
|
-
finally:
|
|
258
|
-
cursor.close()
|
|
259
|
-
|
|
260
|
-
def execute_one(
|
|
261
|
-
self,
|
|
262
|
-
query: str,
|
|
263
|
-
params: tuple[Any, ...] | dict[str, Any] | None = None,
|
|
264
|
-
) -> dict[str, Any] | None:
|
|
265
|
-
"""Execute a query and return a single result or None."""
|
|
266
|
-
results = self.execute_returning(query, params)
|
|
267
|
-
return results[0] if results else None
|
|
268
|
-
|
|
269
|
-
def execute_returning_rowcount(
|
|
270
|
-
self,
|
|
271
|
-
query: str,
|
|
272
|
-
params: tuple[Any, ...] | dict[str, Any] | None = None,
|
|
273
|
-
) -> int:
|
|
274
|
-
"""Execute SQL and return rowcount.
|
|
275
|
-
|
|
276
|
-
IMPORTANT: Both sqlite3 and psycopg2 expose cursor.rowcount after execute.
|
|
277
|
-
This method ensures we capture it reliably.
|
|
278
|
-
"""
|
|
279
|
-
conn = self._ensure_connected()
|
|
280
|
-
query = self._adapt_sql(query)
|
|
281
|
-
self._circuit_breaker.before_call()
|
|
282
|
-
|
|
283
|
-
with self._lock:
|
|
284
|
-
cursor = conn.cursor()
|
|
285
|
-
try:
|
|
286
|
-
if params is None:
|
|
287
|
-
cursor.execute(query)
|
|
288
|
-
else:
|
|
289
|
-
cursor.execute(query, params)
|
|
290
|
-
rowcount = cursor.rowcount
|
|
291
|
-
if not self._in_transaction:
|
|
292
|
-
conn.commit()
|
|
293
|
-
self._circuit_breaker.record_success()
|
|
294
|
-
return rowcount
|
|
295
|
-
except sqlite3.Error as e:
|
|
296
|
-
self._circuit_breaker.record_failure(e)
|
|
297
|
-
raise DatabaseError(f"SQLite query failed: {e}") from e
|
|
298
|
-
finally:
|
|
299
|
-
cursor.close()
|
|
300
|
-
|
|
301
|
-
# =========================================================================
|
|
302
|
-
# Block State Operations
|
|
303
|
-
# =========================================================================
|
|
304
|
-
|
|
305
|
-
def get_block_state(self, chain_id: int) -> BlockState | None:
|
|
306
|
-
row = self.execute_one(
|
|
307
|
-
"SELECT * FROM block_state WHERE chain_id = ?",
|
|
308
|
-
(chain_id,),
|
|
309
|
-
)
|
|
310
|
-
if not row:
|
|
311
|
-
return None
|
|
312
|
-
return BlockState(
|
|
313
|
-
chain_id=row["chain_id"],
|
|
314
|
-
last_processed_block_number=row["last_processed_block_number"],
|
|
315
|
-
last_processed_block_hash=row["last_processed_block_hash"],
|
|
316
|
-
created_at=row["created_at"],
|
|
317
|
-
updated_at=row["updated_at"],
|
|
318
|
-
)
|
|
319
|
-
|
|
320
|
-
def upsert_block_state(
|
|
321
|
-
self,
|
|
322
|
-
chain_id: int,
|
|
323
|
-
block_number: int,
|
|
324
|
-
block_hash: str,
|
|
325
|
-
) -> None:
|
|
326
|
-
self.execute(
|
|
327
|
-
"""
|
|
328
|
-
INSERT INTO block_state (chain_id, last_processed_block_number, last_processed_block_hash)
|
|
329
|
-
VALUES (?, ?, ?)
|
|
330
|
-
ON CONFLICT(chain_id) DO UPDATE SET
|
|
331
|
-
last_processed_block_number = excluded.last_processed_block_number,
|
|
332
|
-
last_processed_block_hash = excluded.last_processed_block_hash,
|
|
333
|
-
updated_at = CURRENT_TIMESTAMP
|
|
334
|
-
""",
|
|
335
|
-
(chain_id, block_number, block_hash),
|
|
336
|
-
)
|
|
337
|
-
|
|
338
|
-
def get_block_hash_at_height(
|
|
339
|
-
self, chain_id: int, block_number: int
|
|
340
|
-
) -> str | None:
|
|
341
|
-
row = self.execute_one(
|
|
342
|
-
"SELECT block_hash FROM block_hash_history WHERE chain_id = ? AND block_number = ?",
|
|
343
|
-
(chain_id, block_number),
|
|
344
|
-
)
|
|
345
|
-
return row["block_hash"] if row else None
|
|
346
|
-
|
|
347
|
-
def insert_block_hash(
|
|
348
|
-
self, chain_id: int, block_number: int, block_hash: str
|
|
349
|
-
) -> None:
|
|
350
|
-
self.execute(
|
|
351
|
-
"""
|
|
352
|
-
INSERT INTO block_hash_history (chain_id, block_number, block_hash)
|
|
353
|
-
VALUES (?, ?, ?)
|
|
354
|
-
ON CONFLICT(chain_id, block_number) DO UPDATE SET
|
|
355
|
-
block_hash = excluded.block_hash,
|
|
356
|
-
inserted_at = CURRENT_TIMESTAMP
|
|
357
|
-
""",
|
|
358
|
-
(chain_id, block_number, block_hash),
|
|
359
|
-
)
|
|
360
|
-
|
|
361
|
-
def delete_block_hashes_above(self, chain_id: int, block_number: int) -> int:
|
|
362
|
-
conn = self._ensure_connected()
|
|
363
|
-
with self._lock:
|
|
364
|
-
cursor = conn.cursor()
|
|
365
|
-
cursor.execute(
|
|
366
|
-
"DELETE FROM block_hash_history WHERE chain_id = ? AND block_number > ?",
|
|
367
|
-
(chain_id, block_number),
|
|
368
|
-
)
|
|
369
|
-
count = cursor.rowcount
|
|
370
|
-
if not self._in_transaction:
|
|
371
|
-
conn.commit()
|
|
372
|
-
cursor.close()
|
|
373
|
-
return count
|
|
374
|
-
|
|
375
|
-
def delete_block_hash_at_height(self, chain_id: int, block_number: int) -> bool:
|
|
376
|
-
conn = self._ensure_connected()
|
|
377
|
-
with self._lock:
|
|
378
|
-
cursor = conn.cursor()
|
|
379
|
-
cursor.execute(
|
|
380
|
-
"DELETE FROM block_hash_history WHERE chain_id = ? AND block_number = ?",
|
|
381
|
-
(chain_id, block_number),
|
|
382
|
-
)
|
|
383
|
-
deleted = cursor.rowcount > 0
|
|
384
|
-
if not self._in_transaction:
|
|
385
|
-
conn.commit()
|
|
386
|
-
cursor.close()
|
|
387
|
-
return deleted
|
|
388
|
-
|
|
389
|
-
def cleanup_old_block_hashes(self, chain_id: int, keep_count: int) -> int:
|
|
390
|
-
# Get max block number
|
|
391
|
-
row = self.execute_one(
|
|
392
|
-
"SELECT MAX(block_number) as max_block FROM block_hash_history WHERE chain_id = ?",
|
|
393
|
-
(chain_id,),
|
|
394
|
-
)
|
|
395
|
-
if not row or row["max_block"] is None:
|
|
396
|
-
return 0
|
|
397
|
-
|
|
398
|
-
cutoff = row["max_block"] - keep_count + 1
|
|
399
|
-
conn = self._ensure_connected()
|
|
400
|
-
with self._lock:
|
|
401
|
-
cursor = conn.cursor()
|
|
402
|
-
cursor.execute(
|
|
403
|
-
"DELETE FROM block_hash_history WHERE chain_id = ? AND block_number < ?",
|
|
404
|
-
(chain_id, cutoff),
|
|
405
|
-
)
|
|
406
|
-
count = cursor.rowcount
|
|
407
|
-
if not self._in_transaction:
|
|
408
|
-
conn.commit()
|
|
409
|
-
cursor.close()
|
|
410
|
-
return count
|
|
411
|
-
|
|
412
|
-
def get_oldest_block_in_history(self, chain_id: int) -> int | None:
|
|
413
|
-
row = self.execute_one(
|
|
414
|
-
"SELECT MIN(block_number) as min_block FROM block_hash_history WHERE chain_id = ?",
|
|
415
|
-
(chain_id,),
|
|
416
|
-
)
|
|
417
|
-
return row["min_block"] if row else None
|
|
418
|
-
|
|
419
|
-
def get_latest_block_in_history(self, chain_id: int) -> int | None:
|
|
420
|
-
row = self.execute_one(
|
|
421
|
-
"SELECT MAX(block_number) as max_block FROM block_hash_history WHERE chain_id = ?",
|
|
422
|
-
(chain_id,),
|
|
423
|
-
)
|
|
424
|
-
return row["max_block"] if row else None
|
|
425
|
-
|
|
426
|
-
def get_inflight_intent_count(
|
|
427
|
-
self, chain_id: int, job_id: str, signer_address: str
|
|
428
|
-
) -> int:
|
|
429
|
-
row = self.execute_one(
|
|
430
|
-
"""
|
|
431
|
-
SELECT COUNT(*) as count
|
|
432
|
-
FROM tx_intents
|
|
433
|
-
WHERE chain_id = ?
|
|
434
|
-
AND job_id = ?
|
|
435
|
-
AND signer_address = ?
|
|
436
|
-
AND status IN ('created', 'claimed', 'sending', 'pending')
|
|
437
|
-
""",
|
|
438
|
-
(chain_id, job_id, signer_address.lower()),
|
|
439
|
-
)
|
|
440
|
-
return int(row["count"]) if row else 0
|
|
441
|
-
|
|
442
|
-
def get_inflight_intents_for_scope(
|
|
443
|
-
self,
|
|
444
|
-
chain_id: int,
|
|
445
|
-
job_id: str,
|
|
446
|
-
signer_address: str,
|
|
447
|
-
to_address: str,
|
|
448
|
-
) -> list[dict[str, Any]]:
|
|
449
|
-
rows = self.execute_returning(
|
|
450
|
-
"""
|
|
451
|
-
SELECT intent_id, status, claimed_at, created_at
|
|
452
|
-
FROM tx_intents
|
|
453
|
-
WHERE chain_id = ?
|
|
454
|
-
AND job_id = ?
|
|
455
|
-
AND signer_address = ?
|
|
456
|
-
AND to_address = ?
|
|
457
|
-
AND status IN ('created', 'claimed', 'sending', 'pending')
|
|
458
|
-
ORDER BY created_at ASC
|
|
459
|
-
""",
|
|
460
|
-
(chain_id, job_id, signer_address.lower(), to_address),
|
|
461
|
-
)
|
|
462
|
-
return [dict(row) for row in rows]
|
|
463
|
-
|
|
464
|
-
# =========================================================================
|
|
465
|
-
# Job Operations
|
|
466
|
-
# =========================================================================
|
|
467
|
-
|
|
468
|
-
def get_job(self, job_id: str) -> JobConfig | None:
|
|
469
|
-
row = self.execute_one("SELECT * FROM jobs WHERE job_id = ?", (job_id,))
|
|
470
|
-
if not row:
|
|
471
|
-
return None
|
|
472
|
-
return self._row_to_job_config(row)
|
|
473
|
-
|
|
474
|
-
def get_enabled_jobs(self) -> list[JobConfig]:
|
|
475
|
-
rows = self.execute_returning(
|
|
476
|
-
"SELECT * FROM jobs WHERE enabled = 1 ORDER BY job_id"
|
|
477
|
-
)
|
|
478
|
-
return [self._row_to_job_config(row) for row in rows]
|
|
479
|
-
|
|
480
|
-
def list_all_jobs(self) -> list[JobConfig]:
|
|
481
|
-
rows = self.execute_returning("SELECT * FROM jobs ORDER BY job_id")
|
|
482
|
-
return [self._row_to_job_config(row) for row in rows]
|
|
483
|
-
|
|
484
|
-
def _row_to_job_config(self, row: dict[str, Any]) -> JobConfig:
|
|
485
|
-
return JobConfig(
|
|
486
|
-
job_id=row["job_id"],
|
|
487
|
-
job_name=row["job_name"],
|
|
488
|
-
enabled=bool(row["enabled"]),
|
|
489
|
-
check_interval_blocks=row["check_interval_blocks"],
|
|
490
|
-
last_checked_block_number=row["last_checked_block_number"],
|
|
491
|
-
last_triggered_block_number=row["last_triggered_block_number"],
|
|
492
|
-
created_at=row["created_at"],
|
|
493
|
-
updated_at=row["updated_at"],
|
|
494
|
-
)
|
|
495
|
-
|
|
496
|
-
def upsert_job(
|
|
497
|
-
self,
|
|
498
|
-
job_id: str,
|
|
499
|
-
job_name: str,
|
|
500
|
-
check_interval_blocks: int,
|
|
501
|
-
enabled: bool = True,
|
|
502
|
-
) -> None:
|
|
503
|
-
self.execute(
|
|
504
|
-
"""
|
|
505
|
-
INSERT INTO jobs (job_id, job_name, check_interval_blocks, enabled)
|
|
506
|
-
VALUES (?, ?, ?, ?)
|
|
507
|
-
ON CONFLICT(job_id) DO UPDATE SET
|
|
508
|
-
job_name = excluded.job_name,
|
|
509
|
-
check_interval_blocks = excluded.check_interval_blocks,
|
|
510
|
-
updated_at = CURRENT_TIMESTAMP
|
|
511
|
-
""",
|
|
512
|
-
(job_id, job_name, check_interval_blocks, enabled),
|
|
513
|
-
)
|
|
514
|
-
|
|
515
|
-
def update_job_checked(
|
|
516
|
-
self, job_id: str, block_number: int, triggered: bool = False
|
|
517
|
-
) -> None:
|
|
518
|
-
if triggered:
|
|
519
|
-
self.execute(
|
|
520
|
-
"""
|
|
521
|
-
UPDATE jobs SET
|
|
522
|
-
last_checked_block_number = ?,
|
|
523
|
-
last_triggered_block_number = ?,
|
|
524
|
-
updated_at = CURRENT_TIMESTAMP
|
|
525
|
-
WHERE job_id = ?
|
|
526
|
-
""",
|
|
527
|
-
(block_number, block_number, job_id),
|
|
528
|
-
)
|
|
529
|
-
else:
|
|
530
|
-
self.execute(
|
|
531
|
-
"""
|
|
532
|
-
UPDATE jobs SET
|
|
533
|
-
last_checked_block_number = ?,
|
|
534
|
-
updated_at = CURRENT_TIMESTAMP
|
|
535
|
-
WHERE job_id = ?
|
|
536
|
-
""",
|
|
537
|
-
(block_number, job_id),
|
|
538
|
-
)
|
|
539
|
-
|
|
540
|
-
def set_job_enabled(self, job_id: str, enabled: bool) -> bool:
|
|
541
|
-
conn = self._ensure_connected()
|
|
542
|
-
with self._lock:
|
|
543
|
-
cursor = conn.cursor()
|
|
544
|
-
cursor.execute(
|
|
545
|
-
"UPDATE jobs SET enabled = ?, updated_at = CURRENT_TIMESTAMP WHERE job_id = ?",
|
|
546
|
-
(enabled, job_id),
|
|
547
|
-
)
|
|
548
|
-
updated = cursor.rowcount > 0
|
|
549
|
-
if not self._in_transaction:
|
|
550
|
-
conn.commit()
|
|
551
|
-
cursor.close()
|
|
552
|
-
return updated
|
|
553
|
-
|
|
554
|
-
def delete_job(self, job_id: str) -> bool:
|
|
555
|
-
conn = self._ensure_connected()
|
|
556
|
-
with self._lock:
|
|
557
|
-
cursor = conn.cursor()
|
|
558
|
-
# Delete job_kv entries first (foreign key)
|
|
559
|
-
cursor.execute("DELETE FROM job_kv WHERE job_id = ?", (job_id,))
|
|
560
|
-
cursor.execute("DELETE FROM jobs WHERE job_id = ?", (job_id,))
|
|
561
|
-
deleted = cursor.rowcount > 0
|
|
562
|
-
if not self._in_transaction:
|
|
563
|
-
conn.commit()
|
|
564
|
-
cursor.close()
|
|
565
|
-
return deleted
|
|
566
|
-
|
|
567
|
-
def get_job_kv(self, job_id: str, key: str) -> Any | None:
|
|
568
|
-
row = self.execute_one(
|
|
569
|
-
"SELECT value_json FROM job_kv WHERE job_id = ? AND key = ?",
|
|
570
|
-
(job_id, key),
|
|
571
|
-
)
|
|
572
|
-
if not row:
|
|
573
|
-
return None
|
|
574
|
-
return json.loads(row["value_json"])
|
|
575
|
-
|
|
576
|
-
def set_job_kv(self, job_id: str, key: str, value: Any) -> None:
|
|
577
|
-
value_json = json.dumps(value)
|
|
578
|
-
self.execute(
|
|
579
|
-
"""
|
|
580
|
-
INSERT INTO job_kv (job_id, key, value_json)
|
|
581
|
-
VALUES (?, ?, ?)
|
|
582
|
-
ON CONFLICT(job_id, key) DO UPDATE SET
|
|
583
|
-
value_json = excluded.value_json,
|
|
584
|
-
updated_at = CURRENT_TIMESTAMP
|
|
585
|
-
""",
|
|
586
|
-
(job_id, key, value_json),
|
|
587
|
-
)
|
|
588
|
-
|
|
589
|
-
def delete_job_kv(self, job_id: str, key: str) -> bool:
|
|
590
|
-
conn = self._ensure_connected()
|
|
591
|
-
with self._lock:
|
|
592
|
-
cursor = conn.cursor()
|
|
593
|
-
cursor.execute(
|
|
594
|
-
"DELETE FROM job_kv WHERE job_id = ? AND key = ?",
|
|
595
|
-
(job_id, key),
|
|
596
|
-
)
|
|
597
|
-
deleted = cursor.rowcount > 0
|
|
598
|
-
if not self._in_transaction:
|
|
599
|
-
conn.commit()
|
|
600
|
-
cursor.close()
|
|
601
|
-
return deleted
|
|
602
|
-
|
|
603
|
-
# =========================================================================
|
|
604
|
-
# Signer & Nonce Operations
|
|
605
|
-
# =========================================================================
|
|
606
|
-
|
|
607
|
-
def get_signer_state(self, chain_id: int, address: str) -> SignerState | None:
|
|
608
|
-
row = self.execute_one(
|
|
609
|
-
"SELECT * FROM signers WHERE chain_id = ? AND signer_address = ?",
|
|
610
|
-
(chain_id, address),
|
|
611
|
-
)
|
|
612
|
-
if not row:
|
|
613
|
-
return None
|
|
614
|
-
return self._row_to_signer_state(row)
|
|
615
|
-
|
|
616
|
-
def get_all_signers(self, chain_id: int) -> list[SignerState]:
|
|
617
|
-
rows = self.execute_returning(
|
|
618
|
-
"SELECT * FROM signers WHERE chain_id = ?", (chain_id,)
|
|
619
|
-
)
|
|
620
|
-
return [self._row_to_signer_state(row) for row in rows]
|
|
621
|
-
|
|
622
|
-
def _row_to_signer_state(self, row: dict[str, Any]) -> SignerState:
|
|
623
|
-
return SignerState(
|
|
624
|
-
chain_id=row["chain_id"],
|
|
625
|
-
signer_address=row["signer_address"],
|
|
626
|
-
next_nonce=row["next_nonce"],
|
|
627
|
-
last_synced_chain_nonce=row["last_synced_chain_nonce"],
|
|
628
|
-
created_at=row["created_at"],
|
|
629
|
-
updated_at=row["updated_at"],
|
|
630
|
-
gap_started_at=row.get("gap_started_at"),
|
|
631
|
-
alias=row.get("alias"),
|
|
632
|
-
)
|
|
633
|
-
|
|
634
|
-
def upsert_signer(
|
|
635
|
-
self,
|
|
636
|
-
chain_id: int,
|
|
637
|
-
address: str,
|
|
638
|
-
next_nonce: int,
|
|
639
|
-
last_synced_chain_nonce: int | None = None,
|
|
640
|
-
) -> None:
|
|
641
|
-
self.execute(
|
|
642
|
-
"""
|
|
643
|
-
INSERT INTO signers (chain_id, signer_address, next_nonce, last_synced_chain_nonce)
|
|
644
|
-
VALUES (?, ?, ?, ?)
|
|
645
|
-
ON CONFLICT(chain_id, signer_address) DO UPDATE SET
|
|
646
|
-
next_nonce = excluded.next_nonce,
|
|
647
|
-
last_synced_chain_nonce = excluded.last_synced_chain_nonce,
|
|
648
|
-
updated_at = CURRENT_TIMESTAMP
|
|
649
|
-
""",
|
|
650
|
-
(chain_id, address, next_nonce, last_synced_chain_nonce),
|
|
651
|
-
)
|
|
652
|
-
|
|
653
|
-
def update_signer_next_nonce(
|
|
654
|
-
self, chain_id: int, address: str, next_nonce: int
|
|
655
|
-
) -> None:
|
|
656
|
-
self.execute(
|
|
657
|
-
"""
|
|
658
|
-
UPDATE signers SET next_nonce = ?, updated_at = CURRENT_TIMESTAMP
|
|
659
|
-
WHERE chain_id = ? AND signer_address = ?
|
|
660
|
-
""",
|
|
661
|
-
(next_nonce, chain_id, address),
|
|
662
|
-
)
|
|
663
|
-
|
|
664
|
-
def update_signer_chain_nonce(
|
|
665
|
-
self, chain_id: int, address: str, chain_nonce: int
|
|
666
|
-
) -> None:
|
|
667
|
-
self.execute(
|
|
668
|
-
"""
|
|
669
|
-
UPDATE signers SET last_synced_chain_nonce = ?, updated_at = CURRENT_TIMESTAMP
|
|
670
|
-
WHERE chain_id = ? AND signer_address = ?
|
|
671
|
-
""",
|
|
672
|
-
(chain_nonce, chain_id, address),
|
|
673
|
-
)
|
|
674
|
-
|
|
675
|
-
def set_gap_started_at(
|
|
676
|
-
self, chain_id: int, address: str, started_at: datetime
|
|
677
|
-
) -> None:
|
|
678
|
-
"""Record when gap blocking started for a signer."""
|
|
679
|
-
self.execute(
|
|
680
|
-
"""
|
|
681
|
-
UPDATE signers SET gap_started_at = ?, updated_at = CURRENT_TIMESTAMP
|
|
682
|
-
WHERE chain_id = ? AND signer_address = ?
|
|
683
|
-
""",
|
|
684
|
-
(started_at.isoformat() if started_at else None, chain_id, address),
|
|
685
|
-
)
|
|
686
|
-
|
|
687
|
-
def clear_gap_started_at(self, chain_id: int, address: str) -> None:
|
|
688
|
-
"""Clear gap tracking (gap resolved or force reset)."""
|
|
689
|
-
self.execute(
|
|
690
|
-
"""
|
|
691
|
-
UPDATE signers SET gap_started_at = NULL, updated_at = CURRENT_TIMESTAMP
|
|
692
|
-
WHERE chain_id = ? AND signer_address = ?
|
|
693
|
-
""",
|
|
694
|
-
(chain_id, address),
|
|
695
|
-
)
|
|
696
|
-
|
|
697
|
-
def get_signer_by_alias(self, chain_id: int, alias: str) -> SignerState | None:
|
|
698
|
-
"""Get signer by alias. Returns None if not found."""
|
|
699
|
-
row = self.execute_one(
|
|
700
|
-
"""
|
|
701
|
-
SELECT * FROM signers
|
|
702
|
-
WHERE chain_id = ? AND alias = ?
|
|
703
|
-
""",
|
|
704
|
-
(chain_id, alias),
|
|
705
|
-
)
|
|
706
|
-
if not row:
|
|
707
|
-
return None
|
|
708
|
-
return self._row_to_signer_state(row)
|
|
709
|
-
|
|
710
|
-
def reserve_nonce_atomic(
|
|
711
|
-
self,
|
|
712
|
-
chain_id: int,
|
|
713
|
-
address: str,
|
|
714
|
-
chain_nonce: int | None,
|
|
715
|
-
intent_id: UUID | None = None,
|
|
716
|
-
) -> int:
|
|
717
|
-
conn = self._ensure_connected()
|
|
718
|
-
intent_id_str = str(intent_id) if intent_id else None
|
|
719
|
-
with self._lock:
|
|
720
|
-
try:
|
|
721
|
-
conn.execute("BEGIN IMMEDIATE")
|
|
722
|
-
self._in_transaction = True
|
|
723
|
-
|
|
724
|
-
conn.execute(
|
|
725
|
-
"""
|
|
726
|
-
INSERT INTO signers (chain_id, signer_address, next_nonce, last_synced_chain_nonce)
|
|
727
|
-
VALUES (?, ?, 0, NULL)
|
|
728
|
-
ON CONFLICT(chain_id, signer_address) DO NOTHING
|
|
729
|
-
""",
|
|
730
|
-
(chain_id, address),
|
|
731
|
-
)
|
|
732
|
-
|
|
733
|
-
cursor = conn.cursor()
|
|
734
|
-
cursor.execute(
|
|
735
|
-
"""
|
|
736
|
-
SELECT next_nonce FROM signers
|
|
737
|
-
WHERE chain_id = ? AND signer_address = ?
|
|
738
|
-
""",
|
|
739
|
-
(chain_id, address),
|
|
740
|
-
)
|
|
741
|
-
row = cursor.fetchone()
|
|
742
|
-
cursor.close()
|
|
743
|
-
if row is None:
|
|
744
|
-
raise DatabaseError("Failed to lock signer row")
|
|
745
|
-
|
|
746
|
-
db_next_nonce = row["next_nonce"]
|
|
747
|
-
base_nonce = chain_nonce if chain_nonce is not None else db_next_nonce
|
|
748
|
-
|
|
749
|
-
cursor = conn.cursor()
|
|
750
|
-
cursor.execute(
|
|
751
|
-
"""
|
|
752
|
-
SELECT nonce FROM nonce_reservations
|
|
753
|
-
WHERE chain_id = ? AND signer_address = ?
|
|
754
|
-
AND status != ?
|
|
755
|
-
AND nonce >= ?
|
|
756
|
-
ORDER BY nonce
|
|
757
|
-
""",
|
|
758
|
-
(chain_id, address, NonceStatus.RELEASED.value, base_nonce),
|
|
759
|
-
)
|
|
760
|
-
rows = cursor.fetchall()
|
|
761
|
-
cursor.close()
|
|
762
|
-
|
|
763
|
-
candidate = base_nonce
|
|
764
|
-
for res in rows:
|
|
765
|
-
if res["nonce"] == candidate:
|
|
766
|
-
candidate += 1
|
|
767
|
-
elif res["nonce"] > candidate:
|
|
768
|
-
break
|
|
769
|
-
|
|
770
|
-
if candidate - base_nonce > 100:
|
|
771
|
-
raise DatabaseError(
|
|
772
|
-
f"Could not find available nonce within 100 slots for signer {address}"
|
|
773
|
-
)
|
|
774
|
-
|
|
775
|
-
conn.execute(
|
|
776
|
-
"""
|
|
777
|
-
INSERT INTO nonce_reservations (chain_id, signer_address, nonce, status, intent_id)
|
|
778
|
-
VALUES (?, ?, ?, ?, ?)
|
|
779
|
-
ON CONFLICT(chain_id, signer_address, nonce) DO UPDATE SET
|
|
780
|
-
status = excluded.status,
|
|
781
|
-
intent_id = excluded.intent_id,
|
|
782
|
-
updated_at = CURRENT_TIMESTAMP
|
|
783
|
-
""",
|
|
784
|
-
(chain_id, address, candidate, NonceStatus.RESERVED.value, intent_id_str),
|
|
785
|
-
)
|
|
786
|
-
|
|
787
|
-
new_next_nonce = max(db_next_nonce, candidate + 1)
|
|
788
|
-
conn.execute(
|
|
789
|
-
"""
|
|
790
|
-
UPDATE signers SET next_nonce = ?, updated_at = CURRENT_TIMESTAMP
|
|
791
|
-
WHERE chain_id = ? AND signer_address = ?
|
|
792
|
-
""",
|
|
793
|
-
(new_next_nonce, chain_id, address),
|
|
794
|
-
)
|
|
795
|
-
|
|
796
|
-
conn.commit()
|
|
797
|
-
return candidate
|
|
798
|
-
except Exception:
|
|
799
|
-
conn.rollback()
|
|
800
|
-
raise
|
|
801
|
-
finally:
|
|
802
|
-
self._in_transaction = False
|
|
803
|
-
|
|
804
|
-
def get_nonce_reservation(
|
|
805
|
-
self, chain_id: int, address: str, nonce: int
|
|
806
|
-
) -> NonceReservation | None:
|
|
807
|
-
row = self.execute_one(
|
|
808
|
-
"""
|
|
809
|
-
SELECT * FROM nonce_reservations
|
|
810
|
-
WHERE chain_id = ? AND signer_address = ? AND nonce = ?
|
|
811
|
-
""",
|
|
812
|
-
(chain_id, address, nonce),
|
|
813
|
-
)
|
|
814
|
-
if not row:
|
|
815
|
-
return None
|
|
816
|
-
return self._row_to_nonce_reservation(row)
|
|
817
|
-
|
|
818
|
-
def get_reservations_for_signer(
|
|
819
|
-
self, chain_id: int, address: str, status: str | None = None
|
|
820
|
-
) -> list[NonceReservation]:
|
|
821
|
-
if status:
|
|
822
|
-
rows = self.execute_returning(
|
|
823
|
-
"""
|
|
824
|
-
SELECT * FROM nonce_reservations
|
|
825
|
-
WHERE chain_id = ? AND signer_address = ? AND status = ?
|
|
826
|
-
ORDER BY nonce
|
|
827
|
-
""",
|
|
828
|
-
(chain_id, address, status),
|
|
829
|
-
)
|
|
830
|
-
else:
|
|
831
|
-
rows = self.execute_returning(
|
|
832
|
-
"""
|
|
833
|
-
SELECT * FROM nonce_reservations
|
|
834
|
-
WHERE chain_id = ? AND signer_address = ?
|
|
835
|
-
ORDER BY nonce
|
|
836
|
-
""",
|
|
837
|
-
(chain_id, address),
|
|
838
|
-
)
|
|
839
|
-
return [self._row_to_nonce_reservation(row) for row in rows]
|
|
840
|
-
|
|
841
|
-
def get_reservations_below_nonce(
|
|
842
|
-
self, chain_id: int, address: str, nonce: int
|
|
843
|
-
) -> list[NonceReservation]:
|
|
844
|
-
rows = self.execute_returning(
|
|
845
|
-
"""
|
|
846
|
-
SELECT * FROM nonce_reservations
|
|
847
|
-
WHERE chain_id = ? AND signer_address = ? AND nonce < ?
|
|
848
|
-
ORDER BY nonce
|
|
849
|
-
""",
|
|
850
|
-
(chain_id, address, nonce),
|
|
851
|
-
)
|
|
852
|
-
return [self._row_to_nonce_reservation(row) for row in rows]
|
|
853
|
-
|
|
854
|
-
def _row_to_nonce_reservation(self, row: dict[str, Any]) -> NonceReservation:
|
|
855
|
-
intent_id = row["intent_id"]
|
|
856
|
-
if intent_id and isinstance(intent_id, str):
|
|
857
|
-
intent_id = UUID(intent_id)
|
|
858
|
-
return NonceReservation(
|
|
859
|
-
id=row["id"],
|
|
860
|
-
chain_id=row["chain_id"],
|
|
861
|
-
signer_address=row["signer_address"],
|
|
862
|
-
nonce=row["nonce"],
|
|
863
|
-
status=NonceStatus(row["status"]),
|
|
864
|
-
intent_id=intent_id,
|
|
865
|
-
created_at=row["created_at"],
|
|
866
|
-
updated_at=row["updated_at"],
|
|
867
|
-
)
|
|
868
|
-
|
|
869
|
-
def create_nonce_reservation(
|
|
870
|
-
self,
|
|
871
|
-
chain_id: int,
|
|
872
|
-
address: str,
|
|
873
|
-
nonce: int,
|
|
874
|
-
status: str = "reserved",
|
|
875
|
-
intent_id: UUID | None = None,
|
|
876
|
-
) -> NonceReservation:
|
|
877
|
-
intent_id_str = str(intent_id) if intent_id else None
|
|
878
|
-
self.execute(
|
|
879
|
-
"""
|
|
880
|
-
INSERT INTO nonce_reservations (chain_id, signer_address, nonce, status, intent_id)
|
|
881
|
-
VALUES (?, ?, ?, ?, ?)
|
|
882
|
-
ON CONFLICT(chain_id, signer_address, nonce) DO UPDATE SET
|
|
883
|
-
status = excluded.status,
|
|
884
|
-
intent_id = excluded.intent_id,
|
|
885
|
-
updated_at = CURRENT_TIMESTAMP
|
|
886
|
-
""",
|
|
887
|
-
(chain_id, address, nonce, status, intent_id_str),
|
|
888
|
-
)
|
|
889
|
-
# Fetch and return the reservation
|
|
890
|
-
reservation = self.get_nonce_reservation(chain_id, address, nonce)
|
|
891
|
-
if not reservation:
|
|
892
|
-
raise DatabaseError("Failed to create nonce reservation")
|
|
893
|
-
return reservation
|
|
894
|
-
|
|
895
|
-
def update_nonce_reservation_status(
|
|
896
|
-
self,
|
|
897
|
-
chain_id: int,
|
|
898
|
-
address: str,
|
|
899
|
-
nonce: int,
|
|
900
|
-
status: str,
|
|
901
|
-
intent_id: UUID | None = None,
|
|
902
|
-
) -> bool:
|
|
903
|
-
conn = self._ensure_connected()
|
|
904
|
-
intent_id_str = str(intent_id) if intent_id else None
|
|
905
|
-
with self._lock:
|
|
906
|
-
cursor = conn.cursor()
|
|
907
|
-
if intent_id_str:
|
|
908
|
-
cursor.execute(
|
|
909
|
-
"""
|
|
910
|
-
UPDATE nonce_reservations SET status = ?, intent_id = ?, updated_at = CURRENT_TIMESTAMP
|
|
911
|
-
WHERE chain_id = ? AND signer_address = ? AND nonce = ?
|
|
912
|
-
""",
|
|
913
|
-
(status, intent_id_str, chain_id, address, nonce),
|
|
914
|
-
)
|
|
915
|
-
else:
|
|
916
|
-
cursor.execute(
|
|
917
|
-
"""
|
|
918
|
-
UPDATE nonce_reservations SET status = ?, updated_at = CURRENT_TIMESTAMP
|
|
919
|
-
WHERE chain_id = ? AND signer_address = ? AND nonce = ?
|
|
920
|
-
""",
|
|
921
|
-
(status, chain_id, address, nonce),
|
|
922
|
-
)
|
|
923
|
-
updated = cursor.rowcount > 0
|
|
924
|
-
if not self._in_transaction:
|
|
925
|
-
conn.commit()
|
|
926
|
-
cursor.close()
|
|
927
|
-
return updated
|
|
928
|
-
|
|
929
|
-
def release_nonce_reservation(
|
|
930
|
-
self, chain_id: int, address: str, nonce: int
|
|
931
|
-
) -> bool:
|
|
932
|
-
return self.update_nonce_reservation_status(
|
|
933
|
-
chain_id, address, nonce, "released"
|
|
934
|
-
)
|
|
935
|
-
|
|
936
|
-
def cleanup_orphaned_nonces(
|
|
937
|
-
self, chain_id: int, older_than_hours: int = 24
|
|
938
|
-
) -> int:
|
|
939
|
-
conn = self._ensure_connected()
|
|
940
|
-
with self._lock:
|
|
941
|
-
cursor = conn.cursor()
|
|
942
|
-
try:
|
|
943
|
-
cursor.execute(
|
|
944
|
-
"""
|
|
945
|
-
DELETE FROM nonce_reservations
|
|
946
|
-
WHERE chain_id = ?
|
|
947
|
-
AND status = 'orphaned'
|
|
948
|
-
AND updated_at < datetime('now', ? || ' hours')
|
|
949
|
-
""",
|
|
950
|
-
(chain_id, f"-{older_than_hours}"),
|
|
951
|
-
)
|
|
952
|
-
deleted = cursor.rowcount
|
|
953
|
-
if not self._in_transaction:
|
|
954
|
-
conn.commit()
|
|
955
|
-
return deleted
|
|
956
|
-
finally:
|
|
957
|
-
cursor.close()
|
|
958
|
-
|
|
959
|
-
# =========================================================================
|
|
960
|
-
# Intent Operations
|
|
961
|
-
# =========================================================================
|
|
962
|
-
|
|
963
|
-
def create_intent(
|
|
964
|
-
self,
|
|
965
|
-
intent_id: UUID,
|
|
966
|
-
job_id: str,
|
|
967
|
-
chain_id: int,
|
|
968
|
-
signer_address: str,
|
|
969
|
-
idempotency_key: str,
|
|
970
|
-
to_address: str,
|
|
971
|
-
data: str | None,
|
|
972
|
-
value_wei: str,
|
|
973
|
-
gas_limit: int | None,
|
|
974
|
-
max_fee_per_gas: str | None,
|
|
975
|
-
max_priority_fee_per_gas: str | None,
|
|
976
|
-
min_confirmations: int,
|
|
977
|
-
deadline_ts: datetime | None,
|
|
978
|
-
broadcast_group: str | None = None,
|
|
979
|
-
broadcast_endpoints: list[str] | None = None,
|
|
980
|
-
metadata: dict | None = None,
|
|
981
|
-
) -> TxIntent | None:
|
|
982
|
-
signer_address = signer_address.lower()
|
|
983
|
-
try:
|
|
984
|
-
self.execute(
|
|
985
|
-
"""
|
|
986
|
-
INSERT INTO tx_intents (
|
|
987
|
-
intent_id, job_id, chain_id, signer_address, idempotency_key,
|
|
988
|
-
to_address, data, value_wei, gas_limit, max_fee_per_gas,
|
|
989
|
-
max_priority_fee_per_gas, min_confirmations, deadline_ts,
|
|
990
|
-
broadcast_group, broadcast_endpoints_json, retry_after, status,
|
|
991
|
-
metadata_json
|
|
992
|
-
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, NULL, 'created', ?)
|
|
993
|
-
""",
|
|
994
|
-
(
|
|
995
|
-
str(intent_id),
|
|
996
|
-
job_id,
|
|
997
|
-
chain_id,
|
|
998
|
-
signer_address,
|
|
999
|
-
idempotency_key,
|
|
1000
|
-
to_address,
|
|
1001
|
-
data,
|
|
1002
|
-
value_wei,
|
|
1003
|
-
gas_limit,
|
|
1004
|
-
max_fee_per_gas,
|
|
1005
|
-
max_priority_fee_per_gas,
|
|
1006
|
-
min_confirmations,
|
|
1007
|
-
deadline_ts,
|
|
1008
|
-
broadcast_group,
|
|
1009
|
-
json.dumps(broadcast_endpoints) if broadcast_endpoints else None,
|
|
1010
|
-
json.dumps(metadata) if metadata else None,
|
|
1011
|
-
),
|
|
1012
|
-
)
|
|
1013
|
-
return self.get_intent(intent_id)
|
|
1014
|
-
except sqlite3.IntegrityError:
|
|
1015
|
-
# Idempotency key already exists
|
|
1016
|
-
return None
|
|
1017
|
-
except DatabaseError as e:
|
|
1018
|
-
if "UNIQUE constraint failed" in str(e):
|
|
1019
|
-
return None
|
|
1020
|
-
raise
|
|
1021
|
-
|
|
1022
|
-
def get_intent(self, intent_id: UUID) -> TxIntent | None:
|
|
1023
|
-
row = self.execute_one(
|
|
1024
|
-
"SELECT * FROM tx_intents WHERE intent_id = ?",
|
|
1025
|
-
(str(intent_id),),
|
|
1026
|
-
)
|
|
1027
|
-
if not row:
|
|
1028
|
-
return None
|
|
1029
|
-
return self._row_to_intent(row)
|
|
1030
|
-
|
|
1031
|
-
def get_intent_by_idempotency_key(
|
|
1032
|
-
self,
|
|
1033
|
-
chain_id: int,
|
|
1034
|
-
signer_address: str,
|
|
1035
|
-
idempotency_key: str,
|
|
1036
|
-
) -> TxIntent | None:
|
|
1037
|
-
row = self.execute_one(
|
|
1038
|
-
"SELECT * FROM tx_intents WHERE chain_id = ? AND signer_address = ? AND idempotency_key = ?",
|
|
1039
|
-
(chain_id, signer_address.lower(), idempotency_key),
|
|
1040
|
-
)
|
|
1041
|
-
if not row:
|
|
1042
|
-
return None
|
|
1043
|
-
return self._row_to_intent(row)
|
|
1044
|
-
|
|
1045
|
-
def _row_to_intent(self, row: dict[str, Any]) -> TxIntent:
|
|
1046
|
-
intent_id = row["intent_id"]
|
|
1047
|
-
if isinstance(intent_id, str):
|
|
1048
|
-
intent_id = UUID(intent_id)
|
|
1049
|
-
# Parse metadata_json
|
|
1050
|
-
metadata_json = row.get("metadata_json")
|
|
1051
|
-
metadata = json.loads(metadata_json) if metadata_json else {}
|
|
1052
|
-
return TxIntent(
|
|
1053
|
-
intent_id=intent_id,
|
|
1054
|
-
job_id=row["job_id"],
|
|
1055
|
-
chain_id=row["chain_id"],
|
|
1056
|
-
signer_address=row["signer_address"],
|
|
1057
|
-
idempotency_key=row["idempotency_key"],
|
|
1058
|
-
to_address=row["to_address"],
|
|
1059
|
-
data=row["data"],
|
|
1060
|
-
value_wei=row["value_wei"],
|
|
1061
|
-
gas_limit=row["gas_limit"],
|
|
1062
|
-
max_fee_per_gas=row["max_fee_per_gas"],
|
|
1063
|
-
max_priority_fee_per_gas=row["max_priority_fee_per_gas"],
|
|
1064
|
-
min_confirmations=row["min_confirmations"],
|
|
1065
|
-
deadline_ts=row["deadline_ts"],
|
|
1066
|
-
retry_after=row["retry_after"],
|
|
1067
|
-
retry_count=row.get("retry_count", 0),
|
|
1068
|
-
status=IntentStatus(row["status"]),
|
|
1069
|
-
claim_token=row["claim_token"],
|
|
1070
|
-
claimed_at=row["claimed_at"],
|
|
1071
|
-
created_at=row["created_at"],
|
|
1072
|
-
updated_at=row["updated_at"],
|
|
1073
|
-
# Broadcast binding (may be None if not yet broadcast)
|
|
1074
|
-
broadcast_group=row.get("broadcast_group"),
|
|
1075
|
-
broadcast_endpoints_json=row.get("broadcast_endpoints_json"),
|
|
1076
|
-
metadata=metadata,
|
|
1077
|
-
)
|
|
1078
|
-
|
|
1079
|
-
def get_intents_by_status(
|
|
1080
|
-
self,
|
|
1081
|
-
status: str | list[str],
|
|
1082
|
-
chain_id: int | None = None,
|
|
1083
|
-
job_id: str | None = None,
|
|
1084
|
-
limit: int = 100,
|
|
1085
|
-
) -> list[TxIntent]:
|
|
1086
|
-
if isinstance(status, str):
|
|
1087
|
-
status = [status]
|
|
1088
|
-
|
|
1089
|
-
placeholders = ",".join("?" * len(status))
|
|
1090
|
-
query = f"SELECT * FROM tx_intents WHERE status IN ({placeholders})"
|
|
1091
|
-
params: list[Any] = list(status)
|
|
1092
|
-
|
|
1093
|
-
if chain_id is not None:
|
|
1094
|
-
query += " AND chain_id = ?"
|
|
1095
|
-
params.append(chain_id)
|
|
1096
|
-
if job_id is not None:
|
|
1097
|
-
query += " AND job_id = ?"
|
|
1098
|
-
params.append(job_id)
|
|
1099
|
-
|
|
1100
|
-
query += " ORDER BY created_at ASC LIMIT ?"
|
|
1101
|
-
params.append(limit)
|
|
1102
|
-
|
|
1103
|
-
rows = self.execute_returning(query, tuple(params))
|
|
1104
|
-
return [self._row_to_intent(row) for row in rows]
|
|
1105
|
-
|
|
1106
|
-
def list_intents_filtered(
|
|
1107
|
-
self,
|
|
1108
|
-
status: str | None = None,
|
|
1109
|
-
job_id: str | None = None,
|
|
1110
|
-
limit: int = 50,
|
|
1111
|
-
) -> list[dict[str, Any]]:
|
|
1112
|
-
query = "SELECT * FROM tx_intents WHERE 1=1"
|
|
1113
|
-
params: list[Any] = []
|
|
1114
|
-
|
|
1115
|
-
if status is not None:
|
|
1116
|
-
query += " AND status = ?"
|
|
1117
|
-
params.append(status)
|
|
1118
|
-
if job_id is not None:
|
|
1119
|
-
query += " AND job_id = ?"
|
|
1120
|
-
params.append(job_id)
|
|
1121
|
-
|
|
1122
|
-
query += " ORDER BY created_at DESC LIMIT ?"
|
|
1123
|
-
params.append(limit)
|
|
1124
|
-
|
|
1125
|
-
return self.execute_returning(query, tuple(params))
|
|
1126
|
-
|
|
1127
|
-
def get_active_intent_count(self, job_id: str, chain_id: int | None = None) -> int:
|
|
1128
|
-
statuses = [
|
|
1129
|
-
IntentStatus.CREATED.value,
|
|
1130
|
-
IntentStatus.CLAIMED.value,
|
|
1131
|
-
IntentStatus.SENDING.value,
|
|
1132
|
-
IntentStatus.PENDING.value,
|
|
1133
|
-
]
|
|
1134
|
-
placeholders = ",".join("?" * len(statuses))
|
|
1135
|
-
query = f"SELECT COUNT(*) AS count FROM tx_intents WHERE status IN ({placeholders}) AND job_id = ?"
|
|
1136
|
-
params: list[Any] = list(statuses)
|
|
1137
|
-
params.append(job_id)
|
|
1138
|
-
if chain_id is not None:
|
|
1139
|
-
query += " AND chain_id = ?"
|
|
1140
|
-
params.append(chain_id)
|
|
1141
|
-
row = self.execute_one(query, tuple(params))
|
|
1142
|
-
return int(row["count"]) if row else 0
|
|
1143
|
-
|
|
1144
|
-
def get_pending_intent_count(self, chain_id: int | None = None) -> int:
|
|
1145
|
-
statuses = [
|
|
1146
|
-
IntentStatus.CREATED.value,
|
|
1147
|
-
IntentStatus.CLAIMED.value,
|
|
1148
|
-
IntentStatus.SENDING.value,
|
|
1149
|
-
IntentStatus.PENDING.value,
|
|
1150
|
-
]
|
|
1151
|
-
placeholders = ",".join("?" * len(statuses))
|
|
1152
|
-
query = f"SELECT COUNT(*) AS count FROM tx_intents WHERE status IN ({placeholders})"
|
|
1153
|
-
params: list[Any] = list(statuses)
|
|
1154
|
-
if chain_id is not None:
|
|
1155
|
-
query += " AND chain_id = ?"
|
|
1156
|
-
params.append(chain_id)
|
|
1157
|
-
row = self.execute_one(query, tuple(params))
|
|
1158
|
-
return int(row["count"]) if row else 0
|
|
1159
|
-
|
|
1160
|
-
def get_backing_off_intent_count(self, chain_id: int | None = None) -> int:
|
|
1161
|
-
query = "SELECT COUNT(*) AS count FROM tx_intents WHERE retry_after > CURRENT_TIMESTAMP"
|
|
1162
|
-
params: list[Any] = []
|
|
1163
|
-
if chain_id is not None:
|
|
1164
|
-
query += " AND chain_id = ?"
|
|
1165
|
-
params.append(chain_id)
|
|
1166
|
-
row = self.execute_one(query, tuple(params))
|
|
1167
|
-
return int(row["count"]) if row else 0
|
|
1168
|
-
|
|
1169
|
-
def get_oldest_pending_intent_age(self, chain_id: int) -> float | None:
|
|
1170
|
-
query = """
|
|
1171
|
-
SELECT (julianday('now') - julianday(MIN(created_at))) * 86400 AS age_seconds
|
|
1172
|
-
FROM tx_intents
|
|
1173
|
-
WHERE chain_id = ?
|
|
1174
|
-
AND status IN ('created', 'pending', 'claimed', 'sending')
|
|
1175
|
-
"""
|
|
1176
|
-
result = self.execute_one(query, (chain_id,))
|
|
1177
|
-
if result and result.get("age_seconds") is not None:
|
|
1178
|
-
return result["age_seconds"]
|
|
1179
|
-
return None
|
|
1180
|
-
|
|
1181
|
-
def list_intent_inconsistencies(
|
|
1182
|
-
self,
|
|
1183
|
-
max_age_seconds: int,
|
|
1184
|
-
limit: int = 100,
|
|
1185
|
-
chain_id: int | None = None,
|
|
1186
|
-
) -> list[dict[str, Any]]:
|
|
1187
|
-
chain_clause = ""
|
|
1188
|
-
chain_params: list[Any] = []
|
|
1189
|
-
if chain_id is not None:
|
|
1190
|
-
chain_clause = " AND chain_id = ?"
|
|
1191
|
-
chain_params = [chain_id] * 5
|
|
1192
|
-
|
|
1193
|
-
query = f"""
|
|
1194
|
-
SELECT intent_id, status, 'pending_no_attempt' AS reason
|
|
1195
|
-
FROM tx_intents
|
|
1196
|
-
WHERE status = 'pending'
|
|
1197
|
-
{chain_clause}
|
|
1198
|
-
AND NOT EXISTS (
|
|
1199
|
-
SELECT 1 FROM tx_attempts
|
|
1200
|
-
WHERE tx_attempts.intent_id = tx_intents.intent_id
|
|
1201
|
-
AND tx_attempts.tx_hash IS NOT NULL
|
|
1202
|
-
)
|
|
1203
|
-
|
|
1204
|
-
UNION ALL
|
|
1205
|
-
SELECT intent_id, status, 'confirmed_no_confirmed_attempt' AS reason
|
|
1206
|
-
FROM tx_intents
|
|
1207
|
-
WHERE status = 'confirmed'
|
|
1208
|
-
{chain_clause}
|
|
1209
|
-
AND NOT EXISTS (
|
|
1210
|
-
SELECT 1 FROM tx_attempts
|
|
1211
|
-
WHERE tx_attempts.intent_id = tx_intents.intent_id
|
|
1212
|
-
AND tx_attempts.status = 'confirmed'
|
|
1213
|
-
)
|
|
1214
|
-
|
|
1215
|
-
UNION ALL
|
|
1216
|
-
SELECT intent_id, status, 'claimed_missing_claim' AS reason
|
|
1217
|
-
FROM tx_intents
|
|
1218
|
-
WHERE status = 'claimed'
|
|
1219
|
-
{chain_clause}
|
|
1220
|
-
AND (claim_token IS NULL OR claimed_at IS NULL)
|
|
1221
|
-
|
|
1222
|
-
UNION ALL
|
|
1223
|
-
SELECT intent_id, status, 'nonclaimed_with_claim' AS reason
|
|
1224
|
-
FROM tx_intents
|
|
1225
|
-
WHERE status != 'claimed'
|
|
1226
|
-
{chain_clause}
|
|
1227
|
-
AND (claim_token IS NOT NULL OR claimed_at IS NOT NULL)
|
|
1228
|
-
|
|
1229
|
-
UNION ALL
|
|
1230
|
-
SELECT intent_id, status, 'sending_stuck' AS reason
|
|
1231
|
-
FROM tx_intents
|
|
1232
|
-
WHERE status = 'sending'
|
|
1233
|
-
{chain_clause}
|
|
1234
|
-
AND updated_at < datetime('now', ? || ' seconds')
|
|
1235
|
-
|
|
1236
|
-
LIMIT ?
|
|
1237
|
-
"""
|
|
1238
|
-
params_with_age = chain_params + [f"-{max_age_seconds}", limit]
|
|
1239
|
-
rows = self.execute_returning(query, tuple(params_with_age))
|
|
1240
|
-
return [dict(row) for row in rows]
|
|
1241
|
-
|
|
1242
|
-
def list_sending_intents_older_than(
|
|
1243
|
-
self,
|
|
1244
|
-
max_age_seconds: int,
|
|
1245
|
-
limit: int = 100,
|
|
1246
|
-
chain_id: int | None = None,
|
|
1247
|
-
) -> list[TxIntent]:
|
|
1248
|
-
query = """
|
|
1249
|
-
SELECT * FROM tx_intents
|
|
1250
|
-
WHERE status = 'sending'
|
|
1251
|
-
AND updated_at < datetime('now', ? || ' seconds')
|
|
1252
|
-
"""
|
|
1253
|
-
params: list[Any] = [f"-{max_age_seconds}"]
|
|
1254
|
-
if chain_id is not None:
|
|
1255
|
-
query += " AND chain_id = ?"
|
|
1256
|
-
params.append(chain_id)
|
|
1257
|
-
query += " ORDER BY updated_at ASC LIMIT ?"
|
|
1258
|
-
params.append(limit)
|
|
1259
|
-
rows = self.execute_returning(query, tuple(params))
|
|
1260
|
-
return [self._row_to_intent(row) for row in rows]
|
|
1261
|
-
|
|
1262
|
-
def claim_next_intent(
|
|
1263
|
-
self,
|
|
1264
|
-
claim_token: str,
|
|
1265
|
-
claimed_by: str | None = None,
|
|
1266
|
-
) -> TxIntent | None:
|
|
1267
|
-
"""Claim the next available intent for processing.
|
|
1268
|
-
|
|
1269
|
-
SQLite version uses deterministic ordering with immediate lock.
|
|
1270
|
-
"""
|
|
1271
|
-
conn = self._ensure_connected()
|
|
1272
|
-
with self._lock:
|
|
1273
|
-
# Use IMMEDIATE transaction for claiming
|
|
1274
|
-
conn.execute("BEGIN IMMEDIATE")
|
|
1275
|
-
try:
|
|
1276
|
-
cursor = conn.cursor()
|
|
1277
|
-
# Find and claim in one atomic operation
|
|
1278
|
-
cursor.execute(
|
|
1279
|
-
"""
|
|
1280
|
-
UPDATE tx_intents
|
|
1281
|
-
SET status = 'claimed', claim_token = ?, claimed_at = CURRENT_TIMESTAMP,
|
|
1282
|
-
claimed_by = ?,
|
|
1283
|
-
retry_after = NULL,
|
|
1284
|
-
updated_at = CURRENT_TIMESTAMP
|
|
1285
|
-
WHERE intent_id = (
|
|
1286
|
-
SELECT intent_id FROM tx_intents
|
|
1287
|
-
WHERE status = 'created'
|
|
1288
|
-
AND (deadline_ts IS NULL OR deadline_ts > CURRENT_TIMESTAMP)
|
|
1289
|
-
AND (retry_after IS NULL OR retry_after <= CURRENT_TIMESTAMP)
|
|
1290
|
-
ORDER BY created_at ASC, intent_id ASC
|
|
1291
|
-
LIMIT 1
|
|
1292
|
-
)
|
|
1293
|
-
AND status = 'created'
|
|
1294
|
-
""",
|
|
1295
|
-
(claim_token, claimed_by),
|
|
1296
|
-
)
|
|
1297
|
-
|
|
1298
|
-
if cursor.rowcount == 0:
|
|
1299
|
-
conn.rollback()
|
|
1300
|
-
cursor.close()
|
|
1301
|
-
return None
|
|
1302
|
-
|
|
1303
|
-
# Get the claimed intent
|
|
1304
|
-
cursor.execute(
|
|
1305
|
-
"SELECT * FROM tx_intents WHERE claim_token = ? AND status = 'claimed'",
|
|
1306
|
-
(claim_token,),
|
|
1307
|
-
)
|
|
1308
|
-
row = cursor.fetchone()
|
|
1309
|
-
conn.commit()
|
|
1310
|
-
cursor.close()
|
|
1311
|
-
|
|
1312
|
-
if row:
|
|
1313
|
-
return self._row_to_intent(dict(row))
|
|
1314
|
-
return None
|
|
1315
|
-
except Exception:
|
|
1316
|
-
conn.rollback()
|
|
1317
|
-
raise
|
|
1318
|
-
|
|
1319
|
-
def update_intent_status(
|
|
1320
|
-
self,
|
|
1321
|
-
intent_id: UUID,
|
|
1322
|
-
status: str,
|
|
1323
|
-
claim_token: str | None = None,
|
|
1324
|
-
) -> bool:
|
|
1325
|
-
conn = self._ensure_connected()
|
|
1326
|
-
with self._lock:
|
|
1327
|
-
cursor = conn.cursor()
|
|
1328
|
-
if claim_token:
|
|
1329
|
-
cursor.execute(
|
|
1330
|
-
"""
|
|
1331
|
-
UPDATE tx_intents SET status = ?, claim_token = ?,
|
|
1332
|
-
claimed_at = CURRENT_TIMESTAMP, updated_at = CURRENT_TIMESTAMP
|
|
1333
|
-
WHERE intent_id = ?
|
|
1334
|
-
""",
|
|
1335
|
-
(status, claim_token, str(intent_id)),
|
|
1336
|
-
)
|
|
1337
|
-
else:
|
|
1338
|
-
cursor.execute(
|
|
1339
|
-
"""
|
|
1340
|
-
UPDATE tx_intents SET status = ?, updated_at = CURRENT_TIMESTAMP
|
|
1341
|
-
WHERE intent_id = ?
|
|
1342
|
-
""",
|
|
1343
|
-
(status, str(intent_id)),
|
|
1344
|
-
)
|
|
1345
|
-
updated = cursor.rowcount > 0
|
|
1346
|
-
if not self._in_transaction:
|
|
1347
|
-
conn.commit()
|
|
1348
|
-
cursor.close()
|
|
1349
|
-
return updated
|
|
1350
|
-
|
|
1351
|
-
def update_intent_status_if(
|
|
1352
|
-
self,
|
|
1353
|
-
intent_id: UUID,
|
|
1354
|
-
status: str,
|
|
1355
|
-
expected_status: str | list[str],
|
|
1356
|
-
) -> bool:
|
|
1357
|
-
if isinstance(expected_status, str):
|
|
1358
|
-
expected_status = [expected_status]
|
|
1359
|
-
placeholders = ",".join("?" * len(expected_status))
|
|
1360
|
-
conn = self._ensure_connected()
|
|
1361
|
-
with self._lock:
|
|
1362
|
-
cursor = conn.cursor()
|
|
1363
|
-
cursor.execute(
|
|
1364
|
-
f"""
|
|
1365
|
-
UPDATE tx_intents SET status = ?, updated_at = CURRENT_TIMESTAMP
|
|
1366
|
-
WHERE intent_id = ? AND status IN ({placeholders})
|
|
1367
|
-
""",
|
|
1368
|
-
(status, str(intent_id), *expected_status),
|
|
1369
|
-
)
|
|
1370
|
-
updated = cursor.rowcount > 0
|
|
1371
|
-
if not self._in_transaction:
|
|
1372
|
-
conn.commit()
|
|
1373
|
-
cursor.close()
|
|
1374
|
-
return updated
|
|
1375
|
-
|
|
1376
|
-
def transition_intent_status(
|
|
1377
|
-
self,
|
|
1378
|
-
intent_id: UUID,
|
|
1379
|
-
from_statuses: list[str],
|
|
1380
|
-
to_status: str,
|
|
1381
|
-
) -> tuple[bool, str | None]:
|
|
1382
|
-
"""Atomic status transition with conditional claim clearing.
|
|
1383
|
-
|
|
1384
|
-
SQLite version: uses BEGIN IMMEDIATE for fewer lock surprises,
|
|
1385
|
-
then SELECT + UPDATE with WHERE status guard.
|
|
1386
|
-
"""
|
|
1387
|
-
conn = self._ensure_connected()
|
|
1388
|
-
placeholders = ",".join("?" * len(from_statuses))
|
|
1389
|
-
|
|
1390
|
-
with self._lock:
|
|
1391
|
-
cursor = conn.cursor()
|
|
1392
|
-
started_tx = False
|
|
1393
|
-
if not self._in_transaction:
|
|
1394
|
-
# BEGIN IMMEDIATE to acquire write lock early
|
|
1395
|
-
cursor.execute("BEGIN IMMEDIATE")
|
|
1396
|
-
started_tx = True
|
|
1397
|
-
|
|
1398
|
-
try:
|
|
1399
|
-
# Get current status (within transaction)
|
|
1400
|
-
cursor.execute(
|
|
1401
|
-
"SELECT status FROM tx_intents WHERE intent_id = ?",
|
|
1402
|
-
(str(intent_id),)
|
|
1403
|
-
)
|
|
1404
|
-
row = cursor.fetchone()
|
|
1405
|
-
if not row:
|
|
1406
|
-
if started_tx:
|
|
1407
|
-
conn.rollback()
|
|
1408
|
-
cursor.close()
|
|
1409
|
-
return (False, None)
|
|
1410
|
-
|
|
1411
|
-
old_status = row[0]
|
|
1412
|
-
|
|
1413
|
-
# Check if transition is allowed
|
|
1414
|
-
if old_status not in from_statuses:
|
|
1415
|
-
if started_tx:
|
|
1416
|
-
conn.rollback()
|
|
1417
|
-
cursor.close()
|
|
1418
|
-
return (False, None)
|
|
1419
|
-
|
|
1420
|
-
# Clear claim only if leaving 'claimed' (not claimed->claimed)
|
|
1421
|
-
should_clear_claim = old_status == "claimed" and to_status != "claimed"
|
|
1422
|
-
|
|
1423
|
-
if should_clear_claim:
|
|
1424
|
-
cursor.execute(
|
|
1425
|
-
f"""
|
|
1426
|
-
UPDATE tx_intents
|
|
1427
|
-
SET status = ?,
|
|
1428
|
-
updated_at = CURRENT_TIMESTAMP,
|
|
1429
|
-
claim_token = NULL,
|
|
1430
|
-
claimed_at = NULL,
|
|
1431
|
-
claimed_by = NULL
|
|
1432
|
-
WHERE intent_id = ? AND status IN ({placeholders})
|
|
1433
|
-
""",
|
|
1434
|
-
(to_status, str(intent_id), *from_statuses),
|
|
1435
|
-
)
|
|
1436
|
-
else:
|
|
1437
|
-
cursor.execute(
|
|
1438
|
-
f"""
|
|
1439
|
-
UPDATE tx_intents
|
|
1440
|
-
SET status = ?, updated_at = CURRENT_TIMESTAMP
|
|
1441
|
-
WHERE intent_id = ? AND status IN ({placeholders})
|
|
1442
|
-
""",
|
|
1443
|
-
(to_status, str(intent_id), *from_statuses),
|
|
1444
|
-
)
|
|
1445
|
-
|
|
1446
|
-
if cursor.rowcount == 0:
|
|
1447
|
-
# Lost race - status changed between SELECT and UPDATE
|
|
1448
|
-
if started_tx:
|
|
1449
|
-
conn.rollback()
|
|
1450
|
-
cursor.close()
|
|
1451
|
-
return (False, None)
|
|
1452
|
-
|
|
1453
|
-
if started_tx:
|
|
1454
|
-
conn.commit()
|
|
1455
|
-
cursor.close()
|
|
1456
|
-
return (True, old_status)
|
|
1457
|
-
|
|
1458
|
-
except Exception:
|
|
1459
|
-
if started_tx:
|
|
1460
|
-
conn.rollback()
|
|
1461
|
-
cursor.close()
|
|
1462
|
-
raise
|
|
1463
|
-
|
|
1464
|
-
def update_intent_signer(self, intent_id: UUID, signer_address: str) -> bool:
|
|
1465
|
-
conn = self._ensure_connected()
|
|
1466
|
-
with self._lock:
|
|
1467
|
-
cursor = conn.cursor()
|
|
1468
|
-
cursor.execute(
|
|
1469
|
-
"""
|
|
1470
|
-
UPDATE tx_intents SET signer_address = ?, updated_at = CURRENT_TIMESTAMP
|
|
1471
|
-
WHERE intent_id = ?
|
|
1472
|
-
""",
|
|
1473
|
-
(signer_address.lower(), str(intent_id)),
|
|
1474
|
-
)
|
|
1475
|
-
updated = cursor.rowcount > 0
|
|
1476
|
-
if not self._in_transaction:
|
|
1477
|
-
conn.commit()
|
|
1478
|
-
cursor.close()
|
|
1479
|
-
return updated
|
|
1480
|
-
|
|
1481
|
-
def release_intent_claim(self, intent_id: UUID) -> bool:
|
|
1482
|
-
conn = self._ensure_connected()
|
|
1483
|
-
with self._lock:
|
|
1484
|
-
cursor = conn.cursor()
|
|
1485
|
-
cursor.execute(
|
|
1486
|
-
"""
|
|
1487
|
-
UPDATE tx_intents SET status = 'created', claim_token = NULL,
|
|
1488
|
-
claimed_at = NULL, updated_at = CURRENT_TIMESTAMP
|
|
1489
|
-
WHERE intent_id = ? AND status = 'claimed'
|
|
1490
|
-
""",
|
|
1491
|
-
(str(intent_id),),
|
|
1492
|
-
)
|
|
1493
|
-
updated = cursor.rowcount > 0
|
|
1494
|
-
if not self._in_transaction:
|
|
1495
|
-
conn.commit()
|
|
1496
|
-
cursor.close()
|
|
1497
|
-
return updated
|
|
1498
|
-
|
|
1499
|
-
def release_intent_claim_if_token(self, intent_id: UUID, claim_token: str) -> bool:
|
|
1500
|
-
rowcount = self.execute_returning_rowcount(
|
|
1501
|
-
"""
|
|
1502
|
-
UPDATE tx_intents
|
|
1503
|
-
SET status = 'created',
|
|
1504
|
-
claim_token = NULL,
|
|
1505
|
-
claimed_at = NULL,
|
|
1506
|
-
claimed_by = NULL,
|
|
1507
|
-
updated_at = CURRENT_TIMESTAMP
|
|
1508
|
-
WHERE intent_id = ? AND claim_token = ? AND status = 'claimed'
|
|
1509
|
-
""",
|
|
1510
|
-
(str(intent_id), claim_token),
|
|
1511
|
-
)
|
|
1512
|
-
return rowcount == 1
|
|
1513
|
-
|
|
1514
|
-
def clear_intent_claim(self, intent_id: UUID) -> bool:
|
|
1515
|
-
conn = self._ensure_connected()
|
|
1516
|
-
with self._lock:
|
|
1517
|
-
cursor = conn.cursor()
|
|
1518
|
-
cursor.execute(
|
|
1519
|
-
"""
|
|
1520
|
-
UPDATE tx_intents
|
|
1521
|
-
SET claim_token = NULL, claimed_at = NULL, updated_at = CURRENT_TIMESTAMP
|
|
1522
|
-
WHERE intent_id = ?
|
|
1523
|
-
""",
|
|
1524
|
-
(str(intent_id),),
|
|
1525
|
-
)
|
|
1526
|
-
updated = cursor.rowcount > 0
|
|
1527
|
-
if not self._in_transaction:
|
|
1528
|
-
conn.commit()
|
|
1529
|
-
cursor.close()
|
|
1530
|
-
return updated
|
|
1531
|
-
|
|
1532
|
-
def set_intent_retry_after(self, intent_id: UUID, retry_after: datetime | None) -> bool:
|
|
1533
|
-
conn = self._ensure_connected()
|
|
1534
|
-
with self._lock:
|
|
1535
|
-
cursor = conn.cursor()
|
|
1536
|
-
cursor.execute(
|
|
1537
|
-
"""
|
|
1538
|
-
UPDATE tx_intents
|
|
1539
|
-
SET retry_after = ?, updated_at = CURRENT_TIMESTAMP
|
|
1540
|
-
WHERE intent_id = ?
|
|
1541
|
-
""",
|
|
1542
|
-
(retry_after, str(intent_id)),
|
|
1543
|
-
)
|
|
1544
|
-
updated = cursor.rowcount > 0
|
|
1545
|
-
if not self._in_transaction:
|
|
1546
|
-
conn.commit()
|
|
1547
|
-
cursor.close()
|
|
1548
|
-
return updated
|
|
1549
|
-
|
|
1550
|
-
def increment_intent_retry_count(self, intent_id: UUID) -> int:
|
|
1551
|
-
conn = self._ensure_connected()
|
|
1552
|
-
with self._lock:
|
|
1553
|
-
cursor = conn.cursor()
|
|
1554
|
-
cursor.execute(
|
|
1555
|
-
"""
|
|
1556
|
-
UPDATE tx_intents
|
|
1557
|
-
SET retry_count = retry_count + 1, updated_at = CURRENT_TIMESTAMP
|
|
1558
|
-
WHERE intent_id = ?
|
|
1559
|
-
""",
|
|
1560
|
-
(str(intent_id),),
|
|
1561
|
-
)
|
|
1562
|
-
if cursor.rowcount == 0:
|
|
1563
|
-
cursor.close()
|
|
1564
|
-
return 0
|
|
1565
|
-
cursor.execute(
|
|
1566
|
-
"SELECT retry_count FROM tx_intents WHERE intent_id = ?",
|
|
1567
|
-
(str(intent_id),),
|
|
1568
|
-
)
|
|
1569
|
-
row = cursor.fetchone()
|
|
1570
|
-
if not self._in_transaction:
|
|
1571
|
-
conn.commit()
|
|
1572
|
-
cursor.close()
|
|
1573
|
-
return row[0] if row else 0
|
|
1574
|
-
|
|
1575
|
-
def release_stale_intent_claims(self, max_age_seconds: int) -> int:
|
|
1576
|
-
conn = self._ensure_connected()
|
|
1577
|
-
with self._lock:
|
|
1578
|
-
cursor = conn.cursor()
|
|
1579
|
-
cursor.execute(
|
|
1580
|
-
"""
|
|
1581
|
-
UPDATE tx_intents
|
|
1582
|
-
SET status = 'created', claim_token = NULL, claimed_at = NULL,
|
|
1583
|
-
updated_at = CURRENT_TIMESTAMP
|
|
1584
|
-
WHERE status = 'claimed'
|
|
1585
|
-
AND claimed_at < datetime('now', ? || ' seconds')
|
|
1586
|
-
AND NOT EXISTS (
|
|
1587
|
-
SELECT 1 FROM tx_attempts WHERE tx_attempts.intent_id = tx_intents.intent_id
|
|
1588
|
-
)
|
|
1589
|
-
""",
|
|
1590
|
-
(f"-{max_age_seconds}",),
|
|
1591
|
-
)
|
|
1592
|
-
count = cursor.rowcount
|
|
1593
|
-
if not self._in_transaction:
|
|
1594
|
-
conn.commit()
|
|
1595
|
-
cursor.close()
|
|
1596
|
-
return count
|
|
1597
|
-
|
|
1598
|
-
def abandon_intent(self, intent_id: UUID) -> bool:
|
|
1599
|
-
return self.update_intent_status(intent_id, "abandoned")
|
|
1600
|
-
|
|
1601
|
-
def get_pending_intents_for_signer(
|
|
1602
|
-
self, chain_id: int, address: str
|
|
1603
|
-
) -> list[TxIntent]:
|
|
1604
|
-
rows = self.execute_returning(
|
|
1605
|
-
"""
|
|
1606
|
-
SELECT * FROM tx_intents
|
|
1607
|
-
WHERE chain_id = ? AND signer_address = ?
|
|
1608
|
-
AND status IN ('sending', 'pending')
|
|
1609
|
-
ORDER BY created_at
|
|
1610
|
-
""",
|
|
1611
|
-
(chain_id, address),
|
|
1612
|
-
)
|
|
1613
|
-
return [self._row_to_intent(row) for row in rows]
|
|
1614
|
-
|
|
1615
|
-
# =========================================================================
|
|
1616
|
-
# Broadcast Binding Operations
|
|
1617
|
-
# =========================================================================
|
|
1618
|
-
|
|
1619
|
-
def get_broadcast_binding(self, intent_id: UUID) -> tuple[str | None, list[str]] | None:
|
|
1620
|
-
"""Get binding if exists, None for first broadcast.
|
|
1621
|
-
|
|
1622
|
-
Returns:
|
|
1623
|
-
Tuple of (group_name or None, endpoints) or None if not bound yet
|
|
1624
|
-
|
|
1625
|
-
Raises:
|
|
1626
|
-
ValueError: If binding is corrupt (wrong type, empty)
|
|
1627
|
-
"""
|
|
1628
|
-
row = self.execute_one(
|
|
1629
|
-
"""
|
|
1630
|
-
SELECT broadcast_group, broadcast_endpoints_json
|
|
1631
|
-
FROM tx_intents
|
|
1632
|
-
WHERE intent_id = ?
|
|
1633
|
-
""",
|
|
1634
|
-
(str(intent_id),),
|
|
1635
|
-
)
|
|
1636
|
-
|
|
1637
|
-
if not row:
|
|
1638
|
-
return None
|
|
1639
|
-
|
|
1640
|
-
has_endpoints = row["broadcast_endpoints_json"] is not None
|
|
1641
|
-
|
|
1642
|
-
# No endpoints → not bound yet
|
|
1643
|
-
if not has_endpoints:
|
|
1644
|
-
return None
|
|
1645
|
-
|
|
1646
|
-
# Parse and validate endpoints
|
|
1647
|
-
endpoints = json.loads(row["broadcast_endpoints_json"])
|
|
1648
|
-
if not isinstance(endpoints, list):
|
|
1649
|
-
raise ValueError(
|
|
1650
|
-
f"Corrupt binding for intent {intent_id}: "
|
|
1651
|
-
f"endpoints_json is {type(endpoints).__name__}, expected list"
|
|
1652
|
-
)
|
|
1653
|
-
if not endpoints:
|
|
1654
|
-
raise ValueError(
|
|
1655
|
-
f"Corrupt binding for intent {intent_id}: endpoints list is empty"
|
|
1656
|
-
)
|
|
1657
|
-
if not all(isinstance(ep, str) for ep in endpoints):
|
|
1658
|
-
raise ValueError(
|
|
1659
|
-
f"Corrupt binding for intent {intent_id}: endpoints contains non-string"
|
|
1660
|
-
)
|
|
1661
|
-
|
|
1662
|
-
return row["broadcast_group"], endpoints
|
|
1663
|
-
|
|
1664
|
-
# =========================================================================
|
|
1665
|
-
# Attempt Operations
|
|
1666
|
-
# =========================================================================
|
|
1667
|
-
|
|
1668
|
-
def create_attempt(
|
|
1669
|
-
self,
|
|
1670
|
-
attempt_id: UUID,
|
|
1671
|
-
intent_id: UUID,
|
|
1672
|
-
nonce: int,
|
|
1673
|
-
gas_params_json: str,
|
|
1674
|
-
status: str = "signed",
|
|
1675
|
-
tx_hash: str | None = None,
|
|
1676
|
-
replaces_attempt_id: UUID | None = None,
|
|
1677
|
-
broadcast_group: str | None = None,
|
|
1678
|
-
endpoint_url: str | None = None,
|
|
1679
|
-
binding: tuple[str | None, list[str]] | None = None,
|
|
1680
|
-
) -> TxAttempt:
|
|
1681
|
-
"""Create attempt, optionally setting binding atomically.
|
|
1682
|
-
|
|
1683
|
-
Args:
|
|
1684
|
-
binding: If provided (first broadcast), persist binding atomically.
|
|
1685
|
-
Tuple of (group_name or None, endpoints)
|
|
1686
|
-
|
|
1687
|
-
CRITICAL: Uses WHERE broadcast_endpoints_json IS NULL to prevent overwrites.
|
|
1688
|
-
"""
|
|
1689
|
-
replaces_str = str(replaces_attempt_id) if replaces_attempt_id else None
|
|
1690
|
-
conn = self._ensure_connected()
|
|
1691
|
-
|
|
1692
|
-
with self._lock:
|
|
1693
|
-
try:
|
|
1694
|
-
conn.execute("BEGIN IMMEDIATE")
|
|
1695
|
-
self._in_transaction = True
|
|
1696
|
-
|
|
1697
|
-
if binding is not None:
|
|
1698
|
-
# First broadcast: check existence + binding state for clear error messages
|
|
1699
|
-
# (The UPDATE's WHERE clause is the true guard; this is for diagnostics)
|
|
1700
|
-
cursor = conn.cursor()
|
|
1701
|
-
cursor.execute(
|
|
1702
|
-
"SELECT broadcast_endpoints_json FROM tx_intents WHERE intent_id = ?",
|
|
1703
|
-
(str(intent_id),),
|
|
1704
|
-
)
|
|
1705
|
-
row = cursor.fetchone()
|
|
1706
|
-
cursor.close()
|
|
1707
|
-
|
|
1708
|
-
if not row:
|
|
1709
|
-
raise ValueError(f"Intent {intent_id} not found")
|
|
1710
|
-
if row["broadcast_endpoints_json"] is not None:
|
|
1711
|
-
raise ValueError(
|
|
1712
|
-
f"Intent {intent_id} already bound. "
|
|
1713
|
-
f"Cannot rebind — may indicate race condition."
|
|
1714
|
-
)
|
|
1715
|
-
|
|
1716
|
-
group_name, endpoints = binding
|
|
1717
|
-
# Defensive copy — don't persist a list that might be mutated elsewhere
|
|
1718
|
-
endpoints_snapshot = list(endpoints)
|
|
1719
|
-
|
|
1720
|
-
rowcount = self.execute_returning_rowcount(
|
|
1721
|
-
"""
|
|
1722
|
-
UPDATE tx_intents
|
|
1723
|
-
SET broadcast_group = ?,
|
|
1724
|
-
broadcast_endpoints_json = ?,
|
|
1725
|
-
updated_at = CURRENT_TIMESTAMP
|
|
1726
|
-
WHERE intent_id = ?
|
|
1727
|
-
AND broadcast_endpoints_json IS NULL
|
|
1728
|
-
""",
|
|
1729
|
-
(
|
|
1730
|
-
group_name,
|
|
1731
|
-
json.dumps(endpoints_snapshot),
|
|
1732
|
-
str(intent_id),
|
|
1733
|
-
),
|
|
1734
|
-
)
|
|
1735
|
-
|
|
1736
|
-
# Rowcount check guards against TOCTOU race (SELECT passed but UPDATE lost)
|
|
1737
|
-
if rowcount != 1:
|
|
1738
|
-
raise ValueError(
|
|
1739
|
-
f"Binding race condition for intent {intent_id}: "
|
|
1740
|
-
f"another process bound it between SELECT and UPDATE"
|
|
1741
|
-
)
|
|
1742
|
-
|
|
1743
|
-
# Create attempt with broadcast audit fields
|
|
1744
|
-
conn.execute(
|
|
1745
|
-
"""
|
|
1746
|
-
INSERT INTO tx_attempts (
|
|
1747
|
-
attempt_id, intent_id, nonce, gas_params_json, status,
|
|
1748
|
-
tx_hash, replaces_attempt_id, broadcast_group, endpoint_url
|
|
1749
|
-
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
1750
|
-
""",
|
|
1751
|
-
(
|
|
1752
|
-
str(attempt_id),
|
|
1753
|
-
str(intent_id),
|
|
1754
|
-
nonce,
|
|
1755
|
-
gas_params_json,
|
|
1756
|
-
status,
|
|
1757
|
-
tx_hash,
|
|
1758
|
-
replaces_str,
|
|
1759
|
-
broadcast_group,
|
|
1760
|
-
endpoint_url,
|
|
1761
|
-
),
|
|
1762
|
-
)
|
|
1763
|
-
|
|
1764
|
-
conn.commit()
|
|
1765
|
-
except Exception:
|
|
1766
|
-
conn.rollback()
|
|
1767
|
-
raise
|
|
1768
|
-
finally:
|
|
1769
|
-
self._in_transaction = False
|
|
1770
|
-
|
|
1771
|
-
attempt = self.get_attempt(attempt_id)
|
|
1772
|
-
if not attempt:
|
|
1773
|
-
raise DatabaseError("Failed to create attempt")
|
|
1774
|
-
return attempt
|
|
1775
|
-
|
|
1776
|
-
def get_attempt(self, attempt_id: UUID) -> TxAttempt | None:
|
|
1777
|
-
row = self.execute_one(
|
|
1778
|
-
"SELECT * FROM tx_attempts WHERE attempt_id = ?",
|
|
1779
|
-
(str(attempt_id),),
|
|
1780
|
-
)
|
|
1781
|
-
if not row:
|
|
1782
|
-
return None
|
|
1783
|
-
return self._row_to_attempt(row)
|
|
1784
|
-
|
|
1785
|
-
def get_attempts_for_intent(self, intent_id: UUID) -> list[TxAttempt]:
|
|
1786
|
-
rows = self.execute_returning(
|
|
1787
|
-
"SELECT * FROM tx_attempts WHERE intent_id = ? ORDER BY created_at",
|
|
1788
|
-
(str(intent_id),),
|
|
1789
|
-
)
|
|
1790
|
-
return [self._row_to_attempt(row) for row in rows]
|
|
1791
|
-
|
|
1792
|
-
def get_latest_attempt_for_intent(self, intent_id: UUID) -> TxAttempt | None:
|
|
1793
|
-
row = self.execute_one(
|
|
1794
|
-
"""
|
|
1795
|
-
SELECT * FROM tx_attempts WHERE intent_id = ?
|
|
1796
|
-
ORDER BY created_at DESC LIMIT 1
|
|
1797
|
-
""",
|
|
1798
|
-
(str(intent_id),),
|
|
1799
|
-
)
|
|
1800
|
-
if not row:
|
|
1801
|
-
return None
|
|
1802
|
-
return self._row_to_attempt(row)
|
|
1803
|
-
|
|
1804
|
-
def get_attempt_by_tx_hash(self, tx_hash: str) -> TxAttempt | None:
|
|
1805
|
-
row = self.execute_one(
|
|
1806
|
-
"SELECT * FROM tx_attempts WHERE tx_hash = ?",
|
|
1807
|
-
(tx_hash,),
|
|
1808
|
-
)
|
|
1809
|
-
if not row:
|
|
1810
|
-
return None
|
|
1811
|
-
return self._row_to_attempt(row)
|
|
1812
|
-
|
|
1813
|
-
def _row_to_attempt(self, row: dict[str, Any]) -> TxAttempt:
|
|
1814
|
-
attempt_id = row["attempt_id"]
|
|
1815
|
-
if isinstance(attempt_id, str):
|
|
1816
|
-
attempt_id = UUID(attempt_id)
|
|
1817
|
-
intent_id = row["intent_id"]
|
|
1818
|
-
if isinstance(intent_id, str):
|
|
1819
|
-
intent_id = UUID(intent_id)
|
|
1820
|
-
replaces = row["replaces_attempt_id"]
|
|
1821
|
-
if replaces and isinstance(replaces, str):
|
|
1822
|
-
replaces = UUID(replaces)
|
|
1823
|
-
return TxAttempt(
|
|
1824
|
-
attempt_id=attempt_id,
|
|
1825
|
-
intent_id=intent_id,
|
|
1826
|
-
nonce=row["nonce"],
|
|
1827
|
-
tx_hash=row["tx_hash"],
|
|
1828
|
-
gas_params=GasParams.from_json(row["gas_params_json"]),
|
|
1829
|
-
status=AttemptStatus(row["status"]),
|
|
1830
|
-
error_code=row["error_code"],
|
|
1831
|
-
error_detail=row["error_detail"],
|
|
1832
|
-
replaces_attempt_id=replaces,
|
|
1833
|
-
broadcast_block=row["broadcast_block"],
|
|
1834
|
-
broadcast_at=row.get("broadcast_at"),
|
|
1835
|
-
included_block=row.get("included_block"),
|
|
1836
|
-
created_at=row["created_at"],
|
|
1837
|
-
updated_at=row["updated_at"],
|
|
1838
|
-
# Audit trail (may be None for older attempts)
|
|
1839
|
-
broadcast_group=row.get("broadcast_group"),
|
|
1840
|
-
endpoint_url=row.get("endpoint_url"),
|
|
1841
|
-
)
|
|
1842
|
-
|
|
1843
|
-
def update_attempt_status(
|
|
1844
|
-
self,
|
|
1845
|
-
attempt_id: UUID,
|
|
1846
|
-
status: str,
|
|
1847
|
-
tx_hash: str | None = None,
|
|
1848
|
-
broadcast_block: int | None = None,
|
|
1849
|
-
broadcast_at: datetime | None = None,
|
|
1850
|
-
included_block: int | None = None,
|
|
1851
|
-
error_code: str | None = None,
|
|
1852
|
-
error_detail: str | None = None,
|
|
1853
|
-
) -> bool:
|
|
1854
|
-
conn = self._ensure_connected()
|
|
1855
|
-
with self._lock:
|
|
1856
|
-
cursor = conn.cursor()
|
|
1857
|
-
# Build dynamic update
|
|
1858
|
-
updates = ["status = ?", "updated_at = CURRENT_TIMESTAMP"]
|
|
1859
|
-
params: list[Any] = [status]
|
|
1860
|
-
|
|
1861
|
-
if tx_hash is not None:
|
|
1862
|
-
updates.append("tx_hash = ?")
|
|
1863
|
-
params.append(tx_hash)
|
|
1864
|
-
if broadcast_block is not None:
|
|
1865
|
-
updates.append("broadcast_block = ?")
|
|
1866
|
-
params.append(broadcast_block)
|
|
1867
|
-
if broadcast_at is not None:
|
|
1868
|
-
updates.append("broadcast_at = ?")
|
|
1869
|
-
params.append(broadcast_at)
|
|
1870
|
-
if included_block is not None:
|
|
1871
|
-
updates.append("included_block = ?")
|
|
1872
|
-
params.append(included_block)
|
|
1873
|
-
if error_code is not None:
|
|
1874
|
-
updates.append("error_code = ?")
|
|
1875
|
-
params.append(error_code)
|
|
1876
|
-
if error_detail is not None:
|
|
1877
|
-
updates.append("error_detail = ?")
|
|
1878
|
-
params.append(error_detail)
|
|
1879
|
-
|
|
1880
|
-
params.append(str(attempt_id))
|
|
1881
|
-
query = f"UPDATE tx_attempts SET {', '.join(updates)} WHERE attempt_id = ?"
|
|
1882
|
-
cursor.execute(query, params)
|
|
1883
|
-
updated = cursor.rowcount > 0
|
|
1884
|
-
if not self._in_transaction:
|
|
1885
|
-
conn.commit()
|
|
1886
|
-
cursor.close()
|
|
1887
|
-
return updated
|
|
1888
|
-
|
|
1889
|
-
# =========================================================================
|
|
1890
|
-
# Transaction Operations (NEW - replaces Intent/Attempt in Phase 2+)
|
|
1891
|
-
#
|
|
1892
|
-
# IMPORTANT: Transaction is the only durable execution model.
|
|
1893
|
-
# Do not add attempt-related methods here.
|
|
1894
|
-
# =========================================================================
|
|
1895
|
-
|
|
1896
|
-
def create_tx(
|
|
1897
|
-
self,
|
|
1898
|
-
tx_id: UUID,
|
|
1899
|
-
job_id: str,
|
|
1900
|
-
chain_id: int,
|
|
1901
|
-
idempotency_key: str,
|
|
1902
|
-
signer_address: str,
|
|
1903
|
-
to_address: str,
|
|
1904
|
-
data: str | None,
|
|
1905
|
-
value_wei: str,
|
|
1906
|
-
min_confirmations: int,
|
|
1907
|
-
deadline_ts: datetime | None,
|
|
1908
|
-
gas_params: GasParams | None = None,
|
|
1909
|
-
) -> Transaction | None:
|
|
1910
|
-
"""Create a new transaction.
|
|
1911
|
-
|
|
1912
|
-
Returns None if idempotency_key already exists (idempotency).
|
|
1913
|
-
"""
|
|
1914
|
-
gas_params_json = gas_params.to_json() if gas_params else None
|
|
1915
|
-
conn = self._ensure_connected()
|
|
1916
|
-
|
|
1917
|
-
with self._lock:
|
|
1918
|
-
cursor = conn.cursor()
|
|
1919
|
-
try:
|
|
1920
|
-
cursor.execute(
|
|
1921
|
-
"""
|
|
1922
|
-
INSERT INTO transactions (
|
|
1923
|
-
tx_id, job_id, chain_id, idempotency_key,
|
|
1924
|
-
signer_address, to_address, data, value_wei,
|
|
1925
|
-
min_confirmations, deadline_ts, status,
|
|
1926
|
-
replacement_count, gas_params_json
|
|
1927
|
-
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'created', 0, ?)
|
|
1928
|
-
""",
|
|
1929
|
-
(
|
|
1930
|
-
str(tx_id),
|
|
1931
|
-
job_id,
|
|
1932
|
-
chain_id,
|
|
1933
|
-
idempotency_key,
|
|
1934
|
-
signer_address,
|
|
1935
|
-
to_address,
|
|
1936
|
-
data,
|
|
1937
|
-
value_wei,
|
|
1938
|
-
min_confirmations,
|
|
1939
|
-
deadline_ts,
|
|
1940
|
-
gas_params_json,
|
|
1941
|
-
),
|
|
1942
|
-
)
|
|
1943
|
-
if not self._in_transaction:
|
|
1944
|
-
conn.commit()
|
|
1945
|
-
cursor.close()
|
|
1946
|
-
return self.get_tx(tx_id)
|
|
1947
|
-
except sqlite3.IntegrityError:
|
|
1948
|
-
# Idempotency key already exists
|
|
1949
|
-
cursor.close()
|
|
1950
|
-
return None
|
|
1951
|
-
|
|
1952
|
-
def get_tx(self, tx_id: UUID) -> Transaction | None:
|
|
1953
|
-
"""Get a transaction by ID."""
|
|
1954
|
-
row = self.execute_one(
|
|
1955
|
-
"SELECT * FROM transactions WHERE tx_id = ?",
|
|
1956
|
-
(str(tx_id),),
|
|
1957
|
-
)
|
|
1958
|
-
if not row:
|
|
1959
|
-
return None
|
|
1960
|
-
return self._row_to_transaction(row)
|
|
1961
|
-
|
|
1962
|
-
def get_tx_by_idempotency_key(
|
|
1963
|
-
self,
|
|
1964
|
-
chain_id: int,
|
|
1965
|
-
signer_address: str,
|
|
1966
|
-
idempotency_key: str,
|
|
1967
|
-
) -> Transaction | None:
|
|
1968
|
-
"""Get a transaction by idempotency key (scoped to chain and signer)."""
|
|
1969
|
-
row = self.execute_one(
|
|
1970
|
-
"SELECT * FROM transactions WHERE chain_id = ? AND signer_address = ? AND idempotency_key = ?",
|
|
1971
|
-
(chain_id, signer_address.lower(), idempotency_key),
|
|
1972
|
-
)
|
|
1973
|
-
if not row:
|
|
1974
|
-
return None
|
|
1975
|
-
return self._row_to_transaction(row)
|
|
1976
|
-
|
|
1977
|
-
def get_tx_by_hash(self, tx_hash: str) -> Transaction | None:
|
|
1978
|
-
"""Get a transaction by current tx hash.
|
|
1979
|
-
|
|
1980
|
-
NOTE: Does NOT search tx_hash_history. Only matches current_tx_hash.
|
|
1981
|
-
"""
|
|
1982
|
-
row = self.execute_one(
|
|
1983
|
-
"SELECT * FROM transactions WHERE current_tx_hash = ?",
|
|
1984
|
-
(tx_hash,),
|
|
1985
|
-
)
|
|
1986
|
-
if not row:
|
|
1987
|
-
return None
|
|
1988
|
-
return self._row_to_transaction(row)
|
|
1989
|
-
|
|
1990
|
-
def list_pending_txs(
|
|
1991
|
-
self,
|
|
1992
|
-
chain_id: int | None = None,
|
|
1993
|
-
job_id: str | None = None,
|
|
1994
|
-
) -> list[Transaction]:
|
|
1995
|
-
"""List transactions in CREATED or BROADCAST status."""
|
|
1996
|
-
query = "SELECT * FROM transactions WHERE status IN ('created', 'broadcast')"
|
|
1997
|
-
params: list[Any] = []
|
|
1998
|
-
|
|
1999
|
-
if chain_id is not None:
|
|
2000
|
-
query += " AND chain_id = ?"
|
|
2001
|
-
params.append(chain_id)
|
|
2002
|
-
if job_id is not None:
|
|
2003
|
-
query += " AND job_id = ?"
|
|
2004
|
-
params.append(job_id)
|
|
2005
|
-
|
|
2006
|
-
query += " ORDER BY created_at ASC"
|
|
2007
|
-
rows = self.execute_returning(query, tuple(params))
|
|
2008
|
-
return [self._row_to_transaction(row) for row in rows]
|
|
2009
|
-
|
|
2010
|
-
def claim_tx(self, claim_token: str) -> Transaction | None:
|
|
2011
|
-
"""Claim the next CREATED transaction for processing.
|
|
2012
|
-
|
|
2013
|
-
Status remains CREATED while claimed - no "claimed" status.
|
|
2014
|
-
"""
|
|
2015
|
-
conn = self._ensure_connected()
|
|
2016
|
-
with self._lock:
|
|
2017
|
-
conn.execute("BEGIN IMMEDIATE")
|
|
2018
|
-
try:
|
|
2019
|
-
cursor = conn.cursor()
|
|
2020
|
-
# Find and claim atomically
|
|
2021
|
-
cursor.execute(
|
|
2022
|
-
"""
|
|
2023
|
-
UPDATE transactions
|
|
2024
|
-
SET claim_token = ?, claimed_at = CURRENT_TIMESTAMP,
|
|
2025
|
-
updated_at = CURRENT_TIMESTAMP
|
|
2026
|
-
WHERE tx_id = (
|
|
2027
|
-
SELECT tx_id FROM transactions
|
|
2028
|
-
WHERE status = 'created'
|
|
2029
|
-
AND claim_token IS NULL
|
|
2030
|
-
AND (deadline_ts IS NULL OR deadline_ts > CURRENT_TIMESTAMP)
|
|
2031
|
-
ORDER BY created_at ASC, tx_id ASC
|
|
2032
|
-
LIMIT 1
|
|
2033
|
-
)
|
|
2034
|
-
AND status = 'created'
|
|
2035
|
-
AND claim_token IS NULL
|
|
2036
|
-
""",
|
|
2037
|
-
(claim_token,),
|
|
2038
|
-
)
|
|
2039
|
-
|
|
2040
|
-
if cursor.rowcount == 0:
|
|
2041
|
-
conn.rollback()
|
|
2042
|
-
cursor.close()
|
|
2043
|
-
return None
|
|
2044
|
-
|
|
2045
|
-
# Get the claimed transaction
|
|
2046
|
-
cursor.execute(
|
|
2047
|
-
"SELECT * FROM transactions WHERE claim_token = ? AND status = 'created'",
|
|
2048
|
-
(claim_token,),
|
|
2049
|
-
)
|
|
2050
|
-
row = cursor.fetchone()
|
|
2051
|
-
conn.commit()
|
|
2052
|
-
cursor.close()
|
|
2053
|
-
|
|
2054
|
-
if row:
|
|
2055
|
-
return self._row_to_transaction(dict(row))
|
|
2056
|
-
return None
|
|
2057
|
-
except Exception:
|
|
2058
|
-
conn.rollback()
|
|
2059
|
-
raise
|
|
2060
|
-
|
|
2061
|
-
def set_tx_broadcast(
|
|
2062
|
-
self,
|
|
2063
|
-
tx_id: UUID,
|
|
2064
|
-
tx_hash: str,
|
|
2065
|
-
nonce: int,
|
|
2066
|
-
gas_params: GasParams,
|
|
2067
|
-
broadcast_block: int,
|
|
2068
|
-
broadcast_info: BroadcastInfo | None = None,
|
|
2069
|
-
) -> bool:
|
|
2070
|
-
"""Record initial broadcast.
|
|
2071
|
-
|
|
2072
|
-
Sets status=BROADCAST, creates first tx_hash_history record.
|
|
2073
|
-
"""
|
|
2074
|
-
conn = self._ensure_connected()
|
|
2075
|
-
now = datetime.now(timezone.utc)
|
|
2076
|
-
|
|
2077
|
-
# Create first history record
|
|
2078
|
-
history_record = TxHashRecord(
|
|
2079
|
-
tx_hash=tx_hash,
|
|
2080
|
-
nonce=nonce,
|
|
2081
|
-
broadcast_at=now.isoformat(),
|
|
2082
|
-
broadcast_block=broadcast_block,
|
|
2083
|
-
gas_limit=gas_params.gas_limit,
|
|
2084
|
-
max_fee_per_gas=gas_params.max_fee_per_gas,
|
|
2085
|
-
max_priority_fee_per_gas=gas_params.max_priority_fee_per_gas,
|
|
2086
|
-
reason="initial",
|
|
2087
|
-
outcome=None,
|
|
2088
|
-
)
|
|
2089
|
-
tx_hash_history = json.dumps([history_record.to_dict()])
|
|
2090
|
-
|
|
2091
|
-
with self._lock:
|
|
2092
|
-
cursor = conn.cursor()
|
|
2093
|
-
cursor.execute(
|
|
2094
|
-
"""
|
|
2095
|
-
UPDATE transactions
|
|
2096
|
-
SET status = 'broadcast',
|
|
2097
|
-
current_tx_hash = ?,
|
|
2098
|
-
current_nonce = ?,
|
|
2099
|
-
gas_params_json = ?,
|
|
2100
|
-
broadcast_info_json = ?,
|
|
2101
|
-
tx_hash_history = ?,
|
|
2102
|
-
updated_at = CURRENT_TIMESTAMP
|
|
2103
|
-
WHERE tx_id = ?
|
|
2104
|
-
AND status = 'created'
|
|
2105
|
-
""",
|
|
2106
|
-
(
|
|
2107
|
-
tx_hash,
|
|
2108
|
-
nonce,
|
|
2109
|
-
gas_params.to_json(),
|
|
2110
|
-
broadcast_info.to_json() if broadcast_info else None,
|
|
2111
|
-
tx_hash_history,
|
|
2112
|
-
str(tx_id),
|
|
2113
|
-
),
|
|
2114
|
-
)
|
|
2115
|
-
updated = cursor.rowcount > 0
|
|
2116
|
-
if not self._in_transaction:
|
|
2117
|
-
conn.commit()
|
|
2118
|
-
cursor.close()
|
|
2119
|
-
return updated
|
|
2120
|
-
|
|
2121
|
-
def set_tx_replaced(
|
|
2122
|
-
self,
|
|
2123
|
-
tx_id: UUID,
|
|
2124
|
-
new_tx_hash: str,
|
|
2125
|
-
gas_params: GasParams,
|
|
2126
|
-
broadcast_block: int,
|
|
2127
|
-
reason: str = "fee_bump",
|
|
2128
|
-
) -> bool:
|
|
2129
|
-
"""Record replacement broadcast.
|
|
2130
|
-
|
|
2131
|
-
Appends to tx_hash_history, updates current_tx_hash, increments
|
|
2132
|
-
replacement_count. Status remains BROADCAST.
|
|
2133
|
-
"""
|
|
2134
|
-
conn = self._ensure_connected()
|
|
2135
|
-
now = datetime.now(timezone.utc)
|
|
2136
|
-
|
|
2137
|
-
with self._lock:
|
|
2138
|
-
# First, get current state to update history
|
|
2139
|
-
cursor = conn.cursor()
|
|
2140
|
-
cursor.execute(
|
|
2141
|
-
"SELECT current_nonce, tx_hash_history FROM transactions WHERE tx_id = ? AND status = 'broadcast'",
|
|
2142
|
-
(str(tx_id),),
|
|
2143
|
-
)
|
|
2144
|
-
row = cursor.fetchone()
|
|
2145
|
-
if not row:
|
|
2146
|
-
cursor.close()
|
|
2147
|
-
return False
|
|
2148
|
-
|
|
2149
|
-
nonce = row["current_nonce"]
|
|
2150
|
-
existing_history = json.loads(row["tx_hash_history"]) if row["tx_hash_history"] else []
|
|
2151
|
-
|
|
2152
|
-
# Mark previous entry as replaced
|
|
2153
|
-
if existing_history:
|
|
2154
|
-
existing_history[-1]["outcome"] = "replaced"
|
|
2155
|
-
|
|
2156
|
-
# Add new history record
|
|
2157
|
-
new_record = TxHashRecord(
|
|
2158
|
-
tx_hash=new_tx_hash,
|
|
2159
|
-
nonce=nonce,
|
|
2160
|
-
broadcast_at=now.isoformat(),
|
|
2161
|
-
broadcast_block=broadcast_block,
|
|
2162
|
-
gas_limit=gas_params.gas_limit,
|
|
2163
|
-
max_fee_per_gas=gas_params.max_fee_per_gas,
|
|
2164
|
-
max_priority_fee_per_gas=gas_params.max_priority_fee_per_gas,
|
|
2165
|
-
reason=reason,
|
|
2166
|
-
outcome=None,
|
|
2167
|
-
)
|
|
2168
|
-
existing_history.append(new_record.to_dict())
|
|
2169
|
-
|
|
2170
|
-
cursor.execute(
|
|
2171
|
-
"""
|
|
2172
|
-
UPDATE transactions
|
|
2173
|
-
SET current_tx_hash = ?,
|
|
2174
|
-
gas_params_json = ?,
|
|
2175
|
-
tx_hash_history = ?,
|
|
2176
|
-
replacement_count = replacement_count + 1,
|
|
2177
|
-
updated_at = CURRENT_TIMESTAMP
|
|
2178
|
-
WHERE tx_id = ?
|
|
2179
|
-
AND status = 'broadcast'
|
|
2180
|
-
""",
|
|
2181
|
-
(
|
|
2182
|
-
new_tx_hash,
|
|
2183
|
-
gas_params.to_json(),
|
|
2184
|
-
json.dumps(existing_history),
|
|
2185
|
-
str(tx_id),
|
|
2186
|
-
),
|
|
2187
|
-
)
|
|
2188
|
-
updated = cursor.rowcount > 0
|
|
2189
|
-
if not self._in_transaction:
|
|
2190
|
-
conn.commit()
|
|
2191
|
-
cursor.close()
|
|
2192
|
-
return updated
|
|
2193
|
-
|
|
2194
|
-
def set_tx_confirmed(
|
|
2195
|
-
self,
|
|
2196
|
-
tx_id: UUID,
|
|
2197
|
-
included_block: int,
|
|
2198
|
-
) -> bool:
|
|
2199
|
-
"""Mark transaction confirmed.
|
|
2200
|
-
|
|
2201
|
-
Sets status=CONFIRMED, included_block, confirmed_at.
|
|
2202
|
-
Updates tx_hash_history with outcome.
|
|
2203
|
-
"""
|
|
2204
|
-
conn = self._ensure_connected()
|
|
2205
|
-
now = datetime.now(timezone.utc)
|
|
2206
|
-
|
|
2207
|
-
with self._lock:
|
|
2208
|
-
# Update history outcome
|
|
2209
|
-
cursor = conn.cursor()
|
|
2210
|
-
cursor.execute(
|
|
2211
|
-
"SELECT tx_hash_history FROM transactions WHERE tx_id = ? AND status = 'broadcast'",
|
|
2212
|
-
(str(tx_id),),
|
|
2213
|
-
)
|
|
2214
|
-
row = cursor.fetchone()
|
|
2215
|
-
if not row:
|
|
2216
|
-
cursor.close()
|
|
2217
|
-
return False
|
|
2218
|
-
|
|
2219
|
-
existing_history = json.loads(row["tx_hash_history"]) if row["tx_hash_history"] else []
|
|
2220
|
-
if existing_history:
|
|
2221
|
-
existing_history[-1]["outcome"] = "confirmed"
|
|
2222
|
-
|
|
2223
|
-
cursor.execute(
|
|
2224
|
-
"""
|
|
2225
|
-
UPDATE transactions
|
|
2226
|
-
SET status = 'confirmed',
|
|
2227
|
-
included_block = ?,
|
|
2228
|
-
confirmed_at = ?,
|
|
2229
|
-
tx_hash_history = ?,
|
|
2230
|
-
updated_at = CURRENT_TIMESTAMP
|
|
2231
|
-
WHERE tx_id = ?
|
|
2232
|
-
AND status = 'broadcast'
|
|
2233
|
-
""",
|
|
2234
|
-
(
|
|
2235
|
-
included_block,
|
|
2236
|
-
now,
|
|
2237
|
-
json.dumps(existing_history),
|
|
2238
|
-
str(tx_id),
|
|
2239
|
-
),
|
|
2240
|
-
)
|
|
2241
|
-
updated = cursor.rowcount > 0
|
|
2242
|
-
if not self._in_transaction:
|
|
2243
|
-
conn.commit()
|
|
2244
|
-
cursor.close()
|
|
2245
|
-
return updated
|
|
2246
|
-
|
|
2247
|
-
def set_tx_failed(
|
|
2248
|
-
self,
|
|
2249
|
-
tx_id: UUID,
|
|
2250
|
-
failure_type: FailureType,
|
|
2251
|
-
error_info: ErrorInfo | None = None,
|
|
2252
|
-
) -> bool:
|
|
2253
|
-
"""Mark transaction failed.
|
|
2254
|
-
|
|
2255
|
-
Sets status=FAILED, failure_type, error_info_json.
|
|
2256
|
-
Updates tx_hash_history with outcome if applicable.
|
|
2257
|
-
"""
|
|
2258
|
-
conn = self._ensure_connected()
|
|
2259
|
-
|
|
2260
|
-
# Serialize error_info
|
|
2261
|
-
error_info_json = None
|
|
2262
|
-
if error_info:
|
|
2263
|
-
error_info_json = json.dumps({
|
|
2264
|
-
"error_type": error_info.error_type,
|
|
2265
|
-
"message": error_info.message,
|
|
2266
|
-
"code": error_info.code,
|
|
2267
|
-
})
|
|
2268
|
-
|
|
2269
|
-
with self._lock:
|
|
2270
|
-
# Get and update history if broadcast
|
|
2271
|
-
cursor = conn.cursor()
|
|
2272
|
-
cursor.execute(
|
|
2273
|
-
"SELECT status, tx_hash_history FROM transactions WHERE tx_id = ? AND status IN ('created', 'broadcast')",
|
|
2274
|
-
(str(tx_id),),
|
|
2275
|
-
)
|
|
2276
|
-
row = cursor.fetchone()
|
|
2277
|
-
if not row:
|
|
2278
|
-
cursor.close()
|
|
2279
|
-
return False
|
|
2280
|
-
|
|
2281
|
-
existing_history = json.loads(row["tx_hash_history"]) if row["tx_hash_history"] else []
|
|
2282
|
-
if existing_history and row["status"] == "broadcast":
|
|
2283
|
-
existing_history[-1]["outcome"] = "failed"
|
|
2284
|
-
|
|
2285
|
-
cursor.execute(
|
|
2286
|
-
"""
|
|
2287
|
-
UPDATE transactions
|
|
2288
|
-
SET status = 'failed',
|
|
2289
|
-
failure_type = ?,
|
|
2290
|
-
error_info_json = ?,
|
|
2291
|
-
tx_hash_history = ?,
|
|
2292
|
-
updated_at = CURRENT_TIMESTAMP
|
|
2293
|
-
WHERE tx_id = ?
|
|
2294
|
-
AND status IN ('created', 'broadcast')
|
|
2295
|
-
""",
|
|
2296
|
-
(
|
|
2297
|
-
failure_type.value,
|
|
2298
|
-
error_info_json,
|
|
2299
|
-
json.dumps(existing_history) if existing_history else None,
|
|
2300
|
-
str(tx_id),
|
|
2301
|
-
),
|
|
2302
|
-
)
|
|
2303
|
-
updated = cursor.rowcount > 0
|
|
2304
|
-
if not self._in_transaction:
|
|
2305
|
-
conn.commit()
|
|
2306
|
-
cursor.close()
|
|
2307
|
-
return updated
|
|
2308
|
-
|
|
2309
|
-
def release_stale_tx_claims(self, max_age_seconds: int) -> int:
|
|
2310
|
-
"""Release claims older than threshold. 0 = release all claims."""
|
|
2311
|
-
conn = self._ensure_connected()
|
|
2312
|
-
with self._lock:
|
|
2313
|
-
cursor = conn.cursor()
|
|
2314
|
-
if max_age_seconds == 0:
|
|
2315
|
-
# Release ALL claims
|
|
2316
|
-
cursor.execute(
|
|
2317
|
-
"""
|
|
2318
|
-
UPDATE transactions
|
|
2319
|
-
SET claim_token = NULL, claimed_at = NULL,
|
|
2320
|
-
updated_at = CURRENT_TIMESTAMP
|
|
2321
|
-
WHERE status = 'created'
|
|
2322
|
-
AND claim_token IS NOT NULL
|
|
2323
|
-
"""
|
|
2324
|
-
)
|
|
2325
|
-
else:
|
|
2326
|
-
cursor.execute(
|
|
2327
|
-
"""
|
|
2328
|
-
UPDATE transactions
|
|
2329
|
-
SET claim_token = NULL, claimed_at = NULL,
|
|
2330
|
-
updated_at = CURRENT_TIMESTAMP
|
|
2331
|
-
WHERE status = 'created'
|
|
2332
|
-
AND claim_token IS NOT NULL
|
|
2333
|
-
AND claimed_at < datetime('now', ? || ' seconds')
|
|
2334
|
-
""",
|
|
2335
|
-
(f"-{max_age_seconds}",),
|
|
2336
|
-
)
|
|
2337
|
-
count = cursor.rowcount
|
|
2338
|
-
if not self._in_transaction:
|
|
2339
|
-
conn.commit()
|
|
2340
|
-
cursor.close()
|
|
2341
|
-
return count
|
|
2342
|
-
|
|
2343
|
-
def _row_to_transaction(self, row: dict[str, Any]) -> Transaction:
|
|
2344
|
-
"""Convert database row to Transaction object."""
|
|
2345
|
-
tx_id = row["tx_id"]
|
|
2346
|
-
if isinstance(tx_id, str):
|
|
2347
|
-
tx_id = UUID(tx_id)
|
|
2348
|
-
|
|
2349
|
-
# Parse failure_type if present
|
|
2350
|
-
failure_type = None
|
|
2351
|
-
if row.get("failure_type"):
|
|
2352
|
-
failure_type = FailureType(row["failure_type"])
|
|
2353
|
-
|
|
2354
|
-
return Transaction(
|
|
2355
|
-
tx_id=tx_id,
|
|
2356
|
-
job_id=row["job_id"],
|
|
2357
|
-
chain_id=row["chain_id"],
|
|
2358
|
-
idempotency_key=row["idempotency_key"],
|
|
2359
|
-
signer_address=row["signer_address"],
|
|
2360
|
-
to_address=row["to_address"],
|
|
2361
|
-
data=row["data"],
|
|
2362
|
-
value_wei=row["value_wei"],
|
|
2363
|
-
min_confirmations=row["min_confirmations"],
|
|
2364
|
-
deadline_ts=row["deadline_ts"],
|
|
2365
|
-
status=TxStatus(row["status"]),
|
|
2366
|
-
failure_type=failure_type,
|
|
2367
|
-
current_tx_hash=row["current_tx_hash"],
|
|
2368
|
-
current_nonce=row["current_nonce"],
|
|
2369
|
-
replacement_count=row["replacement_count"],
|
|
2370
|
-
claim_token=row["claim_token"],
|
|
2371
|
-
claimed_at=row["claimed_at"],
|
|
2372
|
-
included_block=row["included_block"],
|
|
2373
|
-
confirmed_at=row["confirmed_at"],
|
|
2374
|
-
created_at=row["created_at"],
|
|
2375
|
-
updated_at=row["updated_at"],
|
|
2376
|
-
gas_params_json=row["gas_params_json"],
|
|
2377
|
-
broadcast_info_json=row["broadcast_info_json"],
|
|
2378
|
-
error_info_json=row["error_info_json"],
|
|
2379
|
-
tx_hash_history=row["tx_hash_history"],
|
|
2380
|
-
)
|
|
2381
|
-
|
|
2382
|
-
# =========================================================================
|
|
2383
|
-
# ABI Cache Operations
|
|
2384
|
-
# =========================================================================
|
|
2385
|
-
|
|
2386
|
-
def get_cached_abi(self, chain_id: int, address: str) -> ABICacheEntry | None:
|
|
2387
|
-
row = self.execute_one(
|
|
2388
|
-
"SELECT * FROM abi_cache WHERE chain_id = ? AND address = ?",
|
|
2389
|
-
(chain_id, address),
|
|
2390
|
-
)
|
|
2391
|
-
if not row:
|
|
2392
|
-
return None
|
|
2393
|
-
return ABICacheEntry(
|
|
2394
|
-
chain_id=row["chain_id"],
|
|
2395
|
-
address=row["address"],
|
|
2396
|
-
abi_json=row["abi_json"],
|
|
2397
|
-
source=row["source"],
|
|
2398
|
-
resolved_at=row["resolved_at"],
|
|
2399
|
-
)
|
|
2400
|
-
|
|
2401
|
-
def set_cached_abi(
|
|
2402
|
-
self,
|
|
2403
|
-
chain_id: int,
|
|
2404
|
-
address: str,
|
|
2405
|
-
abi_json: str,
|
|
2406
|
-
source: str,
|
|
2407
|
-
) -> None:
|
|
2408
|
-
self.execute(
|
|
2409
|
-
"""
|
|
2410
|
-
INSERT INTO abi_cache (chain_id, address, abi_json, source)
|
|
2411
|
-
VALUES (?, ?, ?, ?)
|
|
2412
|
-
ON CONFLICT(chain_id, address) DO UPDATE SET
|
|
2413
|
-
abi_json = excluded.abi_json,
|
|
2414
|
-
source = excluded.source,
|
|
2415
|
-
resolved_at = CURRENT_TIMESTAMP
|
|
2416
|
-
""",
|
|
2417
|
-
(chain_id, address, abi_json, source),
|
|
2418
|
-
)
|
|
2419
|
-
|
|
2420
|
-
def clear_cached_abi(self, chain_id: int, address: str) -> bool:
|
|
2421
|
-
conn = self._ensure_connected()
|
|
2422
|
-
with self._lock:
|
|
2423
|
-
cursor = conn.cursor()
|
|
2424
|
-
cursor.execute(
|
|
2425
|
-
"DELETE FROM abi_cache WHERE chain_id = ? AND address = ?",
|
|
2426
|
-
(chain_id, address),
|
|
2427
|
-
)
|
|
2428
|
-
deleted = cursor.rowcount > 0
|
|
2429
|
-
if not self._in_transaction:
|
|
2430
|
-
conn.commit()
|
|
2431
|
-
cursor.close()
|
|
2432
|
-
return deleted
|
|
2433
|
-
|
|
2434
|
-
def cleanup_expired_abis(self, max_age_seconds: int) -> int:
|
|
2435
|
-
conn = self._ensure_connected()
|
|
2436
|
-
with self._lock:
|
|
2437
|
-
cursor = conn.cursor()
|
|
2438
|
-
cursor.execute(
|
|
2439
|
-
"""
|
|
2440
|
-
DELETE FROM abi_cache
|
|
2441
|
-
WHERE resolved_at < datetime('now', ? || ' seconds')
|
|
2442
|
-
""",
|
|
2443
|
-
(f"-{max_age_seconds}",),
|
|
2444
|
-
)
|
|
2445
|
-
count = cursor.rowcount
|
|
2446
|
-
if not self._in_transaction:
|
|
2447
|
-
conn.commit()
|
|
2448
|
-
cursor.close()
|
|
2449
|
-
return count
|
|
2450
|
-
|
|
2451
|
-
# =========================================================================
|
|
2452
|
-
# Proxy Cache Operations
|
|
2453
|
-
# =========================================================================
|
|
2454
|
-
|
|
2455
|
-
def get_cached_proxy(
|
|
2456
|
-
self, chain_id: int, proxy_address: str
|
|
2457
|
-
) -> ProxyCacheEntry | None:
|
|
2458
|
-
row = self.execute_one(
|
|
2459
|
-
"SELECT * FROM proxy_cache WHERE chain_id = ? AND proxy_address = ?",
|
|
2460
|
-
(chain_id, proxy_address),
|
|
2461
|
-
)
|
|
2462
|
-
if not row:
|
|
2463
|
-
return None
|
|
2464
|
-
return ProxyCacheEntry(
|
|
2465
|
-
chain_id=row["chain_id"],
|
|
2466
|
-
proxy_address=row["proxy_address"],
|
|
2467
|
-
implementation_address=row["implementation_address"],
|
|
2468
|
-
resolved_at=row["resolved_at"],
|
|
2469
|
-
)
|
|
2470
|
-
|
|
2471
|
-
def set_cached_proxy(
|
|
2472
|
-
self,
|
|
2473
|
-
chain_id: int,
|
|
2474
|
-
proxy_address: str,
|
|
2475
|
-
implementation_address: str,
|
|
2476
|
-
) -> None:
|
|
2477
|
-
self.execute(
|
|
2478
|
-
"""
|
|
2479
|
-
INSERT INTO proxy_cache (chain_id, proxy_address, implementation_address)
|
|
2480
|
-
VALUES (?, ?, ?)
|
|
2481
|
-
ON CONFLICT(chain_id, proxy_address) DO UPDATE SET
|
|
2482
|
-
implementation_address = excluded.implementation_address,
|
|
2483
|
-
resolved_at = CURRENT_TIMESTAMP
|
|
2484
|
-
""",
|
|
2485
|
-
(chain_id, proxy_address, implementation_address),
|
|
2486
|
-
)
|
|
2487
|
-
|
|
2488
|
-
def clear_cached_proxy(self, chain_id: int, proxy_address: str) -> bool:
|
|
2489
|
-
conn = self._ensure_connected()
|
|
2490
|
-
with self._lock:
|
|
2491
|
-
cursor = conn.cursor()
|
|
2492
|
-
cursor.execute(
|
|
2493
|
-
"DELETE FROM proxy_cache WHERE chain_id = ? AND proxy_address = ?",
|
|
2494
|
-
(chain_id, proxy_address),
|
|
2495
|
-
)
|
|
2496
|
-
deleted = cursor.rowcount > 0
|
|
2497
|
-
if not self._in_transaction:
|
|
2498
|
-
conn.commit()
|
|
2499
|
-
cursor.close()
|
|
2500
|
-
return deleted
|
|
2501
|
-
|
|
2502
|
-
# =========================================================================
|
|
2503
|
-
# Cleanup & Maintenance
|
|
2504
|
-
# =========================================================================
|
|
2505
|
-
|
|
2506
|
-
def cleanup_old_intents(
|
|
2507
|
-
self,
|
|
2508
|
-
older_than_days: int,
|
|
2509
|
-
statuses: list[str] | None = None,
|
|
2510
|
-
) -> int:
|
|
2511
|
-
if statuses is None:
|
|
2512
|
-
statuses = ["confirmed", "failed", "abandoned"]
|
|
2513
|
-
|
|
2514
|
-
conn = self._ensure_connected()
|
|
2515
|
-
placeholders = ",".join("?" * len(statuses))
|
|
2516
|
-
|
|
2517
|
-
with self._lock:
|
|
2518
|
-
cursor = conn.cursor()
|
|
2519
|
-
cursor.execute(
|
|
2520
|
-
f"""
|
|
2521
|
-
DELETE FROM tx_intents
|
|
2522
|
-
WHERE status IN ({placeholders})
|
|
2523
|
-
AND created_at < datetime('now', ? || ' days')
|
|
2524
|
-
""",
|
|
2525
|
-
(*statuses, f"-{older_than_days}"),
|
|
2526
|
-
)
|
|
2527
|
-
count = cursor.rowcount
|
|
2528
|
-
if not self._in_transaction:
|
|
2529
|
-
conn.commit()
|
|
2530
|
-
cursor.close()
|
|
2531
|
-
return count
|
|
2532
|
-
|
|
2533
|
-
def get_database_stats(self) -> dict[str, Any]:
|
|
2534
|
-
"""Get database statistics for health checks."""
|
|
2535
|
-
stats: dict[str, Any] = {"type": "sqlite", "path": self._database_path}
|
|
2536
|
-
|
|
2537
|
-
# Count intents by status
|
|
2538
|
-
rows = self.execute_returning(
|
|
2539
|
-
"SELECT status, COUNT(*) as count FROM tx_intents GROUP BY status"
|
|
2540
|
-
)
|
|
2541
|
-
stats["intents_by_status"] = {row["status"]: row["count"] for row in rows}
|
|
2542
|
-
|
|
2543
|
-
# Count total jobs
|
|
2544
|
-
row = self.execute_one("SELECT COUNT(*) as count FROM jobs")
|
|
2545
|
-
stats["total_jobs"] = row["count"] if row else 0
|
|
2546
|
-
|
|
2547
|
-
# Count enabled jobs
|
|
2548
|
-
row = self.execute_one("SELECT COUNT(*) as count FROM jobs WHERE enabled = 1")
|
|
2549
|
-
stats["enabled_jobs"] = row["count"] if row else 0
|
|
2550
|
-
|
|
2551
|
-
# Get block state
|
|
2552
|
-
rows = self.execute_returning("SELECT * FROM block_state")
|
|
2553
|
-
stats["block_states"] = [
|
|
2554
|
-
{
|
|
2555
|
-
"chain_id": row["chain_id"],
|
|
2556
|
-
"last_block": row["last_processed_block_number"],
|
|
2557
|
-
}
|
|
2558
|
-
for row in rows
|
|
2559
|
-
]
|
|
2560
|
-
|
|
2561
|
-
return stats
|
|
2562
|
-
|
|
2563
|
-
# =========================================================================
|
|
2564
|
-
# Reconciliation Operations
|
|
2565
|
-
# =========================================================================
|
|
2566
|
-
|
|
2567
|
-
def clear_orphaned_claims(self, chain_id: int, older_than_minutes: int = 2) -> int:
|
|
2568
|
-
"""Clear claim fields where status != 'claimed' and claim is stale."""
|
|
2569
|
-
conn = self._ensure_connected()
|
|
2570
|
-
with self._lock:
|
|
2571
|
-
cursor = conn.cursor()
|
|
2572
|
-
cursor.execute(
|
|
2573
|
-
"""
|
|
2574
|
-
UPDATE tx_intents
|
|
2575
|
-
SET claim_token = NULL,
|
|
2576
|
-
claimed_at = NULL,
|
|
2577
|
-
claimed_by = NULL,
|
|
2578
|
-
updated_at = CURRENT_TIMESTAMP
|
|
2579
|
-
WHERE chain_id = ?
|
|
2580
|
-
AND status != 'claimed'
|
|
2581
|
-
AND claim_token IS NOT NULL
|
|
2582
|
-
AND claimed_at IS NOT NULL
|
|
2583
|
-
AND claimed_at < datetime('now', ? || ' minutes')
|
|
2584
|
-
""",
|
|
2585
|
-
(chain_id, f"-{older_than_minutes}"),
|
|
2586
|
-
)
|
|
2587
|
-
count = cursor.rowcount
|
|
2588
|
-
if not self._in_transaction:
|
|
2589
|
-
conn.commit()
|
|
2590
|
-
cursor.close()
|
|
2591
|
-
return count
|
|
2592
|
-
|
|
2593
|
-
def release_orphaned_nonces(self, chain_id: int, older_than_minutes: int = 5) -> int:
|
|
2594
|
-
"""Release nonces for terminal intents that are stale."""
|
|
2595
|
-
# SQLite doesn't support UPDATE...FROM, use subquery
|
|
2596
|
-
conn = self._ensure_connected()
|
|
2597
|
-
with self._lock:
|
|
2598
|
-
cursor = conn.cursor()
|
|
2599
|
-
cursor.execute(
|
|
2600
|
-
"""
|
|
2601
|
-
UPDATE nonce_reservations
|
|
2602
|
-
SET status = 'released',
|
|
2603
|
-
updated_at = CURRENT_TIMESTAMP
|
|
2604
|
-
WHERE chain_id = ?
|
|
2605
|
-
AND status = 'reserved'
|
|
2606
|
-
AND updated_at < datetime('now', ? || ' minutes')
|
|
2607
|
-
AND intent_id IN (
|
|
2608
|
-
SELECT intent_id FROM tx_intents
|
|
2609
|
-
WHERE status IN ('failed', 'abandoned', 'reverted')
|
|
2610
|
-
AND updated_at < datetime('now', ? || ' minutes')
|
|
2611
|
-
)
|
|
2612
|
-
""",
|
|
2613
|
-
(chain_id, f"-{older_than_minutes}", f"-{older_than_minutes}"),
|
|
2614
|
-
)
|
|
2615
|
-
count = cursor.rowcount
|
|
2616
|
-
if not self._in_transaction:
|
|
2617
|
-
conn.commit()
|
|
2618
|
-
cursor.close()
|
|
2619
|
-
return count
|
|
2620
|
-
|
|
2621
|
-
def count_pending_without_attempts(self, chain_id: int) -> int:
|
|
2622
|
-
"""Count pending intents with no attempt records (integrity issue)."""
|
|
2623
|
-
result = self.execute_one(
|
|
2624
|
-
"""
|
|
2625
|
-
SELECT COUNT(*) as count
|
|
2626
|
-
FROM tx_intents ti
|
|
2627
|
-
LEFT JOIN tx_attempts ta ON ti.intent_id = ta.intent_id
|
|
2628
|
-
WHERE ti.chain_id = ?
|
|
2629
|
-
AND ti.status = 'pending'
|
|
2630
|
-
AND ta.attempt_id IS NULL
|
|
2631
|
-
""",
|
|
2632
|
-
(chain_id,),
|
|
2633
|
-
)
|
|
2634
|
-
return result["count"] if result else 0
|
|
2635
|
-
|
|
2636
|
-
def count_stale_claims(self, chain_id: int, older_than_minutes: int = 10) -> int:
|
|
2637
|
-
"""Count intents stuck in CLAIMED for too long."""
|
|
2638
|
-
result = self.execute_one(
|
|
2639
|
-
"""
|
|
2640
|
-
SELECT COUNT(*) as count
|
|
2641
|
-
FROM tx_intents
|
|
2642
|
-
WHERE chain_id = ?
|
|
2643
|
-
AND status = 'claimed'
|
|
2644
|
-
AND claimed_at IS NOT NULL
|
|
2645
|
-
AND claimed_at < datetime('now', ? || ' minutes')
|
|
2646
|
-
""",
|
|
2647
|
-
(chain_id, f"-{older_than_minutes}"),
|
|
2648
|
-
)
|
|
2649
|
-
return result["count"] if result else 0
|
|
2650
|
-
|
|
2651
|
-
# =========================================================================
|
|
2652
|
-
# Invariant Queries (Phase 2)
|
|
2653
|
-
# =========================================================================
|
|
2654
|
-
|
|
2655
|
-
def count_stuck_claimed(self, chain_id: int, older_than_minutes: int = 10) -> int:
|
|
2656
|
-
"""Count intents stuck in CLAIMED status for too long."""
|
|
2657
|
-
conn = self._ensure_connected()
|
|
2658
|
-
cursor = conn.cursor()
|
|
2659
|
-
cursor.execute(
|
|
2660
|
-
"""
|
|
2661
|
-
SELECT COUNT(*) as count
|
|
2662
|
-
FROM tx_intents
|
|
2663
|
-
WHERE chain_id = ?
|
|
2664
|
-
AND status = 'claimed'
|
|
2665
|
-
AND datetime(claimed_at) < datetime('now', ? || ' minutes')
|
|
2666
|
-
""",
|
|
2667
|
-
(chain_id, -older_than_minutes),
|
|
2668
|
-
)
|
|
2669
|
-
row = cursor.fetchone()
|
|
2670
|
-
return row[0] if row else 0
|
|
2671
|
-
|
|
2672
|
-
def count_orphaned_claims(self, chain_id: int) -> int:
|
|
2673
|
-
"""Count intents with claim_token set but status != claimed."""
|
|
2674
|
-
conn = self._ensure_connected()
|
|
2675
|
-
cursor = conn.cursor()
|
|
2676
|
-
cursor.execute(
|
|
2677
|
-
"""
|
|
2678
|
-
SELECT COUNT(*) as count
|
|
2679
|
-
FROM tx_intents
|
|
2680
|
-
WHERE chain_id = ?
|
|
2681
|
-
AND status != 'claimed'
|
|
2682
|
-
AND claim_token IS NOT NULL
|
|
2683
|
-
""",
|
|
2684
|
-
(chain_id,),
|
|
2685
|
-
)
|
|
2686
|
-
row = cursor.fetchone()
|
|
2687
|
-
return row[0] if row else 0
|
|
2688
|
-
|
|
2689
|
-
def count_orphaned_nonces(self, chain_id: int) -> int:
|
|
2690
|
-
"""Count reserved/in_flight nonces for failed/abandoned intents."""
|
|
2691
|
-
conn = self._ensure_connected()
|
|
2692
|
-
cursor = conn.cursor()
|
|
2693
|
-
cursor.execute(
|
|
2694
|
-
"""
|
|
2695
|
-
SELECT COUNT(*) as count
|
|
2696
|
-
FROM nonce_reservations nr
|
|
2697
|
-
JOIN tx_intents ti ON nr.intent_id = ti.intent_id
|
|
2698
|
-
WHERE nr.chain_id = ?
|
|
2699
|
-
AND nr.status IN ('reserved', 'in_flight')
|
|
2700
|
-
AND ti.status IN ('failed', 'abandoned', 'reverted')
|
|
2701
|
-
""",
|
|
2702
|
-
(chain_id,),
|
|
2703
|
-
)
|
|
2704
|
-
row = cursor.fetchone()
|
|
2705
|
-
return row[0] if row else 0
|
|
2706
|
-
|
|
2707
|
-
def get_oldest_nonce_gap_age_seconds(self, chain_id: int) -> float:
|
|
2708
|
-
"""Get age in seconds of the oldest nonce gap.
|
|
2709
|
-
|
|
2710
|
-
Anchors from signers (small table) for efficiency.
|
|
2711
|
-
Returns 0 if no gaps or if chain nonce not synced.
|
|
2712
|
-
"""
|
|
2713
|
-
conn = self._ensure_connected()
|
|
2714
|
-
cursor = conn.cursor()
|
|
2715
|
-
cursor.execute(
|
|
2716
|
-
"""
|
|
2717
|
-
SELECT COALESCE(
|
|
2718
|
-
(julianday('now') - julianday(datetime(MIN(nr.created_at)))) * 86400,
|
|
2719
|
-
0
|
|
2720
|
-
) AS oldest_gap_seconds
|
|
2721
|
-
FROM signers s
|
|
2722
|
-
JOIN nonce_reservations nr
|
|
2723
|
-
ON nr.chain_id = s.chain_id
|
|
2724
|
-
AND nr.signer_address = s.signer_address
|
|
2725
|
-
WHERE s.chain_id = ?
|
|
2726
|
-
AND s.last_synced_chain_nonce IS NOT NULL
|
|
2727
|
-
AND nr.status IN ('reserved', 'in_flight')
|
|
2728
|
-
AND nr.nonce < s.last_synced_chain_nonce
|
|
2729
|
-
""",
|
|
2730
|
-
(chain_id,),
|
|
2731
|
-
)
|
|
2732
|
-
row = cursor.fetchone()
|
|
2733
|
-
return float(row[0]) if row else 0.0
|