brawny 0.1.13__py3-none-any.whl → 0.1.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- brawny/__init__.py +2 -0
- brawny/_context.py +5 -5
- brawny/_rpc/__init__.py +36 -12
- brawny/_rpc/broadcast.py +14 -13
- brawny/_rpc/caller.py +243 -0
- brawny/_rpc/client.py +539 -0
- brawny/_rpc/clients.py +11 -11
- brawny/_rpc/context.py +23 -0
- brawny/_rpc/errors.py +465 -31
- brawny/_rpc/gas.py +7 -6
- brawny/_rpc/pool.py +18 -0
- brawny/_rpc/retry.py +266 -0
- brawny/_rpc/retry_policy.py +81 -0
- brawny/accounts.py +28 -9
- brawny/alerts/__init__.py +15 -18
- brawny/alerts/abi_resolver.py +212 -36
- brawny/alerts/base.py +2 -2
- brawny/alerts/contracts.py +77 -10
- brawny/alerts/errors.py +30 -3
- brawny/alerts/events.py +38 -5
- brawny/alerts/health.py +19 -13
- brawny/alerts/send.py +513 -55
- brawny/api.py +39 -11
- brawny/assets/AGENTS.md +325 -0
- brawny/async_runtime.py +48 -0
- brawny/chain.py +3 -3
- brawny/cli/commands/__init__.py +2 -0
- brawny/cli/commands/console.py +69 -19
- brawny/cli/commands/contract.py +2 -2
- brawny/cli/commands/controls.py +121 -0
- brawny/cli/commands/health.py +2 -2
- brawny/cli/commands/job_dev.py +6 -5
- brawny/cli/commands/jobs.py +99 -2
- brawny/cli/commands/maintenance.py +13 -29
- brawny/cli/commands/migrate.py +1 -0
- brawny/cli/commands/run.py +10 -3
- brawny/cli/commands/script.py +8 -3
- brawny/cli/commands/signer.py +143 -26
- brawny/cli/helpers.py +0 -3
- brawny/cli_templates.py +25 -349
- brawny/config/__init__.py +4 -1
- brawny/config/models.py +43 -57
- brawny/config/parser.py +268 -57
- brawny/config/validation.py +52 -15
- brawny/daemon/context.py +4 -2
- brawny/daemon/core.py +185 -63
- brawny/daemon/loops.py +166 -98
- brawny/daemon/supervisor.py +261 -0
- brawny/db/__init__.py +14 -26
- brawny/db/base.py +248 -151
- brawny/db/global_cache.py +11 -1
- brawny/db/migrate.py +175 -28
- brawny/db/migrations/001_init.sql +4 -3
- brawny/db/migrations/010_add_nonce_gap_index.sql +1 -1
- brawny/db/migrations/011_add_job_logs.sql +1 -2
- brawny/db/migrations/012_add_claimed_by.sql +2 -2
- brawny/db/migrations/013_attempt_unique.sql +10 -0
- brawny/db/migrations/014_add_lease_expires_at.sql +5 -0
- brawny/db/migrations/015_add_signer_alias.sql +14 -0
- brawny/db/migrations/016_runtime_controls_and_quarantine.sql +32 -0
- brawny/db/migrations/017_add_job_drain.sql +6 -0
- brawny/db/migrations/018_add_nonce_reset_audit.sql +20 -0
- brawny/db/migrations/019_add_job_cooldowns.sql +8 -0
- brawny/db/migrations/020_attempt_unique_initial.sql +7 -0
- brawny/db/ops/__init__.py +3 -25
- brawny/db/ops/logs.py +1 -2
- brawny/db/queries.py +47 -91
- brawny/db/serialized.py +65 -0
- brawny/db/sqlite/__init__.py +1001 -0
- brawny/db/sqlite/connection.py +231 -0
- brawny/db/sqlite/execute.py +116 -0
- brawny/db/sqlite/mappers.py +190 -0
- brawny/db/sqlite/repos/attempts.py +372 -0
- brawny/db/sqlite/repos/block_state.py +102 -0
- brawny/db/sqlite/repos/cache.py +104 -0
- brawny/db/sqlite/repos/intents.py +1021 -0
- brawny/db/sqlite/repos/jobs.py +200 -0
- brawny/db/sqlite/repos/maintenance.py +182 -0
- brawny/db/sqlite/repos/signers_nonces.py +566 -0
- brawny/db/sqlite/tx.py +119 -0
- brawny/http.py +194 -0
- brawny/invariants.py +11 -24
- brawny/jobs/base.py +8 -0
- brawny/jobs/job_validation.py +2 -1
- brawny/keystore.py +83 -7
- brawny/lifecycle.py +64 -12
- brawny/logging.py +0 -2
- brawny/metrics.py +84 -12
- brawny/model/contexts.py +111 -9
- brawny/model/enums.py +1 -0
- brawny/model/errors.py +18 -0
- brawny/model/types.py +47 -131
- brawny/network_guard.py +133 -0
- brawny/networks/__init__.py +5 -5
- brawny/networks/config.py +1 -7
- brawny/networks/manager.py +14 -11
- brawny/runtime_controls.py +74 -0
- brawny/scheduler/poller.py +11 -7
- brawny/scheduler/reorg.py +95 -39
- brawny/scheduler/runner.py +442 -168
- brawny/scheduler/shutdown.py +3 -3
- brawny/script_tx.py +3 -3
- brawny/telegram.py +53 -7
- brawny/testing.py +1 -0
- brawny/timeout.py +38 -0
- brawny/tx/executor.py +922 -308
- brawny/tx/intent.py +54 -16
- brawny/tx/monitor.py +31 -12
- brawny/tx/nonce.py +212 -90
- brawny/tx/replacement.py +69 -18
- brawny/tx/retry_policy.py +24 -0
- brawny/tx/stages/types.py +75 -0
- brawny/types.py +18 -0
- brawny/utils.py +41 -0
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/METADATA +3 -3
- brawny-0.1.22.dist-info/RECORD +163 -0
- brawny/_rpc/manager.py +0 -982
- brawny/_rpc/selector.py +0 -156
- brawny/db/base_new.py +0 -165
- brawny/db/mappers.py +0 -182
- brawny/db/migrations/008_add_transactions.sql +0 -72
- brawny/db/ops/attempts.py +0 -108
- brawny/db/ops/blocks.py +0 -83
- brawny/db/ops/cache.py +0 -93
- brawny/db/ops/intents.py +0 -296
- brawny/db/ops/jobs.py +0 -110
- brawny/db/ops/nonces.py +0 -322
- brawny/db/postgres.py +0 -2535
- brawny/db/postgres_new.py +0 -196
- brawny/db/sqlite.py +0 -2733
- brawny/db/sqlite_new.py +0 -191
- brawny-0.1.13.dist-info/RECORD +0 -141
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/WHEEL +0 -0
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/entry_points.txt +0 -0
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1001 @@
|
|
|
1
|
+
"""SQLite database implementation for brawny.
|
|
2
|
+
|
|
3
|
+
SQLite is the supported production backend for this deployment model.
|
|
4
|
+
|
|
5
|
+
Key characteristics:
|
|
6
|
+
- Uses IMMEDIATE transaction mode for nonce reservation (app-level locking)
|
|
7
|
+
- Uses deterministic ordering with secondary sort for intent claiming
|
|
8
|
+
- No connection pooling (single connection)
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
import sqlite3
|
|
14
|
+
import threading
|
|
15
|
+
from contextlib import contextmanager
|
|
16
|
+
from datetime import datetime
|
|
17
|
+
from typing import Any, Iterator
|
|
18
|
+
from uuid import UUID
|
|
19
|
+
|
|
20
|
+
from brawny.db.base import (
|
|
21
|
+
ABICacheEntry,
|
|
22
|
+
BlockHashEntry,
|
|
23
|
+
BlockState,
|
|
24
|
+
Database,
|
|
25
|
+
IsolationLevel,
|
|
26
|
+
ProxyCacheEntry,
|
|
27
|
+
)
|
|
28
|
+
from brawny.db.circuit_breaker import DatabaseCircuitBreaker
|
|
29
|
+
from brawny.logging import get_logger
|
|
30
|
+
from brawny.model.errors import DatabaseError
|
|
31
|
+
from brawny.model.types import (
|
|
32
|
+
JobConfig,
|
|
33
|
+
NonceReservation,
|
|
34
|
+
RuntimeControl,
|
|
35
|
+
SignerState,
|
|
36
|
+
TxAttempt,
|
|
37
|
+
TxIntent,
|
|
38
|
+
)
|
|
39
|
+
from brawny.types import ClaimedIntent
|
|
40
|
+
from brawny.config.validation import InvalidEndpointError, canonicalize_endpoints
|
|
41
|
+
from . import connection, execute as sqlite_execute, mappers, tx
|
|
42
|
+
from .repos import attempts as attempts_repo
|
|
43
|
+
from .repos import block_state as block_state_repo
|
|
44
|
+
from .repos import cache as cache_repo
|
|
45
|
+
from .repos import intents as intents_repo
|
|
46
|
+
from .repos import jobs as jobs_repo
|
|
47
|
+
from .repos import maintenance as maintenance_repo
|
|
48
|
+
from .repos import signers_nonces as signers_nonces_repo
|
|
49
|
+
from .connection import fcntl
|
|
50
|
+
from .tx import SQLiteBeginMode, _resolve_begin_cmd
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
# Register adapters
|
|
54
|
+
sqlite3.register_adapter(datetime, mappers.adapt_datetime)
|
|
55
|
+
sqlite3.register_converter("TIMESTAMP", mappers.convert_datetime)
|
|
56
|
+
|
|
57
|
+
logger = get_logger(__name__)
|
|
58
|
+
_warned_generic_isolation = False
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class SQLiteDatabase(Database):
|
|
62
|
+
"""SQLite implementation of the Database interface.
|
|
63
|
+
|
|
64
|
+
Thread-safety: Uses a per-thread connection model with a shared lock
|
|
65
|
+
for transaction isolation.
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
def __init__(
|
|
69
|
+
self,
|
|
70
|
+
database_path: str,
|
|
71
|
+
circuit_breaker_failures: int = 5,
|
|
72
|
+
circuit_breaker_seconds: int = 30,
|
|
73
|
+
) -> None:
|
|
74
|
+
"""Initialize SQLite database.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
database_path: Path to SQLite database file (or :memory:)
|
|
78
|
+
circuit_breaker_failures: Failures before opening breaker
|
|
79
|
+
circuit_breaker_seconds: Seconds to keep breaker open
|
|
80
|
+
"""
|
|
81
|
+
# Remove sqlite:/// prefix if present
|
|
82
|
+
if database_path.startswith("sqlite:///"):
|
|
83
|
+
database_path = database_path[10:]
|
|
84
|
+
|
|
85
|
+
self._database_path = database_path
|
|
86
|
+
self._thread_local = threading.local()
|
|
87
|
+
self._conns: set[sqlite3.Connection] = set()
|
|
88
|
+
self._conns_lock = threading.RLock()
|
|
89
|
+
self._connected = False
|
|
90
|
+
self._closed = False
|
|
91
|
+
self._conn_generation = 0
|
|
92
|
+
self._memory_owner_thread_id: int | None = None
|
|
93
|
+
self._lock_handle: Any | None = None
|
|
94
|
+
self._lock_path: Path | None = None
|
|
95
|
+
self._lock = threading.RLock()
|
|
96
|
+
self._tx_depth = 0
|
|
97
|
+
self._tx_failed = False
|
|
98
|
+
self._version_checked = False
|
|
99
|
+
self._circuit_breaker = DatabaseCircuitBreaker(
|
|
100
|
+
failure_threshold=circuit_breaker_failures,
|
|
101
|
+
open_seconds=circuit_breaker_seconds,
|
|
102
|
+
backend="sqlite",
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
@contextmanager
|
|
106
|
+
def _locked(self) -> Iterator[None]:
|
|
107
|
+
with self._lock:
|
|
108
|
+
yield
|
|
109
|
+
|
|
110
|
+
@property
|
|
111
|
+
def dialect(self) -> str:
|
|
112
|
+
"""Return dialect name for query selection."""
|
|
113
|
+
return "sqlite"
|
|
114
|
+
|
|
115
|
+
def connect(self) -> None:
|
|
116
|
+
"""Establish database connection."""
|
|
117
|
+
connection.connect(self)
|
|
118
|
+
|
|
119
|
+
def close(self) -> None:
|
|
120
|
+
"""Close database connection."""
|
|
121
|
+
connection.close(self)
|
|
122
|
+
|
|
123
|
+
def is_connected(self) -> bool:
|
|
124
|
+
"""Check if database is connected."""
|
|
125
|
+
return connection.is_connected(self)
|
|
126
|
+
|
|
127
|
+
def _ensure_connected(self) -> sqlite3.Connection:
|
|
128
|
+
"""Ensure connection exists and return it."""
|
|
129
|
+
with self._locked():
|
|
130
|
+
return connection.ensure_connected(self)
|
|
131
|
+
|
|
132
|
+
def _normalize_address(self, address: str) -> str:
|
|
133
|
+
return mappers.canonicalize_address(address)
|
|
134
|
+
|
|
135
|
+
def _canonicalize_endpoints(self, endpoints: list[str]) -> list[str]:
|
|
136
|
+
try:
|
|
137
|
+
return canonicalize_endpoints(endpoints)
|
|
138
|
+
except InvalidEndpointError as exc:
|
|
139
|
+
raise DatabaseError(f"Invalid endpoint(s): {exc}") from exc
|
|
140
|
+
|
|
141
|
+
def _begin_transaction(self, conn: sqlite3.Connection, begin_cmd: str) -> None:
|
|
142
|
+
tx.begin_transaction(self, conn, begin_cmd)
|
|
143
|
+
|
|
144
|
+
def _commit_transaction(self, conn: sqlite3.Connection) -> None:
|
|
145
|
+
tx.commit_transaction(self, conn)
|
|
146
|
+
|
|
147
|
+
def _rollback_transaction(self, conn: sqlite3.Connection) -> None:
|
|
148
|
+
tx.rollback_transaction(self, conn)
|
|
149
|
+
|
|
150
|
+
def transaction(
|
|
151
|
+
self, isolation_level: IsolationLevel | SQLiteBeginMode | None = None
|
|
152
|
+
) -> Iterator[None]:
|
|
153
|
+
return tx.transaction(self, isolation_level)
|
|
154
|
+
|
|
155
|
+
def execute(
|
|
156
|
+
self,
|
|
157
|
+
query: str,
|
|
158
|
+
params: tuple[Any, ...] | dict[str, Any] | None = None,
|
|
159
|
+
) -> None:
|
|
160
|
+
"""Execute a query without returning results."""
|
|
161
|
+
sqlite_execute.execute(self, query, params)
|
|
162
|
+
|
|
163
|
+
def execute_returning(
|
|
164
|
+
self,
|
|
165
|
+
query: str,
|
|
166
|
+
params: tuple[Any, ...] | dict[str, Any] | None = None,
|
|
167
|
+
) -> list[dict[str, Any]]:
|
|
168
|
+
"""Execute a query and return all results as dicts."""
|
|
169
|
+
return sqlite_execute.execute_returning(self, query, params)
|
|
170
|
+
|
|
171
|
+
def execute_one(
|
|
172
|
+
self,
|
|
173
|
+
query: str,
|
|
174
|
+
params: tuple[Any, ...] | dict[str, Any] | None = None,
|
|
175
|
+
) -> dict[str, Any] | None:
|
|
176
|
+
"""Execute a query and return a single result or None."""
|
|
177
|
+
return sqlite_execute.execute_one(self, query, params)
|
|
178
|
+
|
|
179
|
+
def execute_returning_rowcount(
|
|
180
|
+
self,
|
|
181
|
+
query: str,
|
|
182
|
+
params: tuple[Any, ...] | dict[str, Any] | None = None,
|
|
183
|
+
) -> int:
|
|
184
|
+
"""Execute SQL and return rowcount."""
|
|
185
|
+
return sqlite_execute.execute_returning_rowcount(self, query, params)
|
|
186
|
+
|
|
187
|
+
# =========================================================================
|
|
188
|
+
# Block State Operations
|
|
189
|
+
# =========================================================================
|
|
190
|
+
|
|
191
|
+
def get_block_state(self, chain_id: int) -> BlockState | None:
|
|
192
|
+
return block_state_repo.get_block_state(self, chain_id)
|
|
193
|
+
|
|
194
|
+
def upsert_block_state(
|
|
195
|
+
self,
|
|
196
|
+
chain_id: int,
|
|
197
|
+
block_number: int,
|
|
198
|
+
block_hash: str,
|
|
199
|
+
) -> None:
|
|
200
|
+
block_state_repo.upsert_block_state(self, chain_id, block_number, block_hash)
|
|
201
|
+
|
|
202
|
+
def get_block_hash_at_height(
|
|
203
|
+
self, chain_id: int, block_number: int
|
|
204
|
+
) -> str | None:
|
|
205
|
+
return block_state_repo.get_block_hash_at_height(self, chain_id, block_number)
|
|
206
|
+
|
|
207
|
+
def insert_block_hash(
|
|
208
|
+
self, chain_id: int, block_number: int, block_hash: str
|
|
209
|
+
) -> None:
|
|
210
|
+
block_state_repo.insert_block_hash(self, chain_id, block_number, block_hash)
|
|
211
|
+
|
|
212
|
+
def delete_block_hashes_above(self, chain_id: int, block_number: int) -> int:
|
|
213
|
+
return block_state_repo.delete_block_hashes_above(self, chain_id, block_number)
|
|
214
|
+
|
|
215
|
+
def delete_block_hash_at_height(self, chain_id: int, block_number: int) -> bool:
|
|
216
|
+
return block_state_repo.delete_block_hash_at_height(self, chain_id, block_number)
|
|
217
|
+
|
|
218
|
+
def cleanup_old_block_hashes(self, chain_id: int, keep_count: int) -> int:
|
|
219
|
+
return block_state_repo.cleanup_old_block_hashes(self, chain_id, keep_count)
|
|
220
|
+
|
|
221
|
+
def get_oldest_block_in_history(self, chain_id: int) -> int | None:
|
|
222
|
+
return block_state_repo.get_oldest_block_in_history(self, chain_id)
|
|
223
|
+
|
|
224
|
+
def get_latest_block_in_history(self, chain_id: int) -> int | None:
|
|
225
|
+
return block_state_repo.get_latest_block_in_history(self, chain_id)
|
|
226
|
+
|
|
227
|
+
def get_inflight_intent_count(
|
|
228
|
+
self, chain_id: int, job_id: str, signer_address: str
|
|
229
|
+
) -> int:
|
|
230
|
+
return intents_repo.get_inflight_intent_count(
|
|
231
|
+
self, chain_id, job_id, signer_address
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
def get_inflight_intents_for_scope(
|
|
235
|
+
self,
|
|
236
|
+
chain_id: int,
|
|
237
|
+
job_id: str,
|
|
238
|
+
signer_address: str,
|
|
239
|
+
to_address: str,
|
|
240
|
+
) -> list[dict[str, Any]]:
|
|
241
|
+
return intents_repo.get_inflight_intents_for_scope(
|
|
242
|
+
self, chain_id, job_id, signer_address, to_address
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
# =========================================================================
|
|
246
|
+
# Job Operations
|
|
247
|
+
# =========================================================================
|
|
248
|
+
|
|
249
|
+
def get_job(self, job_id: str) -> JobConfig | None:
|
|
250
|
+
return jobs_repo.get_job(self, job_id)
|
|
251
|
+
|
|
252
|
+
def get_enabled_jobs(self) -> list[JobConfig]:
|
|
253
|
+
return jobs_repo.get_enabled_jobs(self)
|
|
254
|
+
|
|
255
|
+
def list_all_jobs(self) -> list[JobConfig]:
|
|
256
|
+
return jobs_repo.list_all_jobs(self)
|
|
257
|
+
|
|
258
|
+
def upsert_job(
|
|
259
|
+
self,
|
|
260
|
+
job_id: str,
|
|
261
|
+
job_name: str,
|
|
262
|
+
check_interval_blocks: int,
|
|
263
|
+
enabled: bool = True,
|
|
264
|
+
) -> None:
|
|
265
|
+
jobs_repo.upsert_job(self, job_id, job_name, check_interval_blocks, enabled)
|
|
266
|
+
|
|
267
|
+
def update_job_checked(
|
|
268
|
+
self, job_id: str, block_number: int, triggered: bool = False
|
|
269
|
+
) -> None:
|
|
270
|
+
jobs_repo.update_job_checked(self, job_id, block_number, triggered)
|
|
271
|
+
|
|
272
|
+
def set_job_enabled(self, job_id: str, enabled: bool) -> bool:
|
|
273
|
+
return jobs_repo.set_job_enabled(self, job_id, enabled)
|
|
274
|
+
|
|
275
|
+
def set_job_drain(
|
|
276
|
+
self,
|
|
277
|
+
job_id: str,
|
|
278
|
+
drain_until: datetime,
|
|
279
|
+
reason: str | None = None,
|
|
280
|
+
actor: str | None = None,
|
|
281
|
+
source: str | None = None,
|
|
282
|
+
) -> bool:
|
|
283
|
+
return jobs_repo.set_job_drain(self, job_id, drain_until, reason, actor, source)
|
|
284
|
+
|
|
285
|
+
def clear_job_drain(
|
|
286
|
+
self,
|
|
287
|
+
job_id: str,
|
|
288
|
+
actor: str | None = None,
|
|
289
|
+
source: str | None = None,
|
|
290
|
+
) -> bool:
|
|
291
|
+
return jobs_repo.clear_job_drain(self, job_id, actor, source)
|
|
292
|
+
|
|
293
|
+
def delete_job(self, job_id: str) -> bool:
|
|
294
|
+
return jobs_repo.delete_job(self, job_id)
|
|
295
|
+
|
|
296
|
+
def get_job_kv(self, job_id: str, key: str) -> Any | None:
|
|
297
|
+
return jobs_repo.get_job_kv(self, job_id, key)
|
|
298
|
+
|
|
299
|
+
def set_job_kv(self, job_id: str, key: str, value: Any) -> None:
|
|
300
|
+
jobs_repo.set_job_kv(self, job_id, key, value)
|
|
301
|
+
|
|
302
|
+
def delete_job_kv(self, job_id: str, key: str) -> bool:
|
|
303
|
+
return jobs_repo.delete_job_kv(self, job_id, key)
|
|
304
|
+
|
|
305
|
+
# =========================================================================
|
|
306
|
+
# Signer & Nonce Operations
|
|
307
|
+
# =========================================================================
|
|
308
|
+
|
|
309
|
+
def get_signer_state(self, chain_id: int, address: str) -> SignerState | None:
|
|
310
|
+
return signers_nonces_repo.get_signer_state(self, chain_id, address)
|
|
311
|
+
|
|
312
|
+
def get_all_signers(self, chain_id: int) -> list[SignerState]:
|
|
313
|
+
return signers_nonces_repo.get_all_signers(self, chain_id)
|
|
314
|
+
|
|
315
|
+
def upsert_signer(
|
|
316
|
+
self,
|
|
317
|
+
chain_id: int,
|
|
318
|
+
address: str,
|
|
319
|
+
next_nonce: int,
|
|
320
|
+
last_synced_chain_nonce: int | None = None,
|
|
321
|
+
) -> None:
|
|
322
|
+
signers_nonces_repo.upsert_signer(
|
|
323
|
+
self, chain_id, address, next_nonce, last_synced_chain_nonce
|
|
324
|
+
)
|
|
325
|
+
|
|
326
|
+
def update_signer_next_nonce(
|
|
327
|
+
self, chain_id: int, address: str, next_nonce: int
|
|
328
|
+
) -> None:
|
|
329
|
+
signers_nonces_repo.update_signer_next_nonce(
|
|
330
|
+
self, chain_id, address, next_nonce
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
def update_signer_chain_nonce(
|
|
334
|
+
self, chain_id: int, address: str, chain_nonce: int
|
|
335
|
+
) -> None:
|
|
336
|
+
signers_nonces_repo.update_signer_chain_nonce(
|
|
337
|
+
self, chain_id, address, chain_nonce
|
|
338
|
+
)
|
|
339
|
+
|
|
340
|
+
def set_gap_started_at(
|
|
341
|
+
self, chain_id: int, address: str, started_at: datetime
|
|
342
|
+
) -> None:
|
|
343
|
+
"""Record when gap blocking started for a signer."""
|
|
344
|
+
signers_nonces_repo.set_gap_started_at(self, chain_id, address, started_at)
|
|
345
|
+
|
|
346
|
+
def clear_gap_started_at(self, chain_id: int, address: str) -> None:
|
|
347
|
+
"""Clear gap tracking (gap resolved or force reset)."""
|
|
348
|
+
signers_nonces_repo.clear_gap_started_at(self, chain_id, address)
|
|
349
|
+
|
|
350
|
+
def set_signer_quarantined(
|
|
351
|
+
self,
|
|
352
|
+
chain_id: int,
|
|
353
|
+
address: str,
|
|
354
|
+
reason: str,
|
|
355
|
+
actor: str | None = None,
|
|
356
|
+
source: str | None = None,
|
|
357
|
+
) -> bool:
|
|
358
|
+
return signers_nonces_repo.set_signer_quarantined(
|
|
359
|
+
self, chain_id, address, reason, actor, source
|
|
360
|
+
)
|
|
361
|
+
|
|
362
|
+
def clear_signer_quarantined(
|
|
363
|
+
self,
|
|
364
|
+
chain_id: int,
|
|
365
|
+
address: str,
|
|
366
|
+
actor: str | None = None,
|
|
367
|
+
source: str | None = None,
|
|
368
|
+
) -> bool:
|
|
369
|
+
return signers_nonces_repo.clear_signer_quarantined(
|
|
370
|
+
self, chain_id, address, actor, source
|
|
371
|
+
)
|
|
372
|
+
|
|
373
|
+
def set_replacements_paused(
|
|
374
|
+
self,
|
|
375
|
+
chain_id: int,
|
|
376
|
+
address: str,
|
|
377
|
+
paused: bool,
|
|
378
|
+
reason: str | None = None,
|
|
379
|
+
actor: str | None = None,
|
|
380
|
+
source: str | None = None,
|
|
381
|
+
) -> bool:
|
|
382
|
+
return signers_nonces_repo.set_replacements_paused(
|
|
383
|
+
self, chain_id, address, paused, reason, actor, source
|
|
384
|
+
)
|
|
385
|
+
|
|
386
|
+
# =========================================================================
|
|
387
|
+
# Runtime Controls (containment with TTL)
|
|
388
|
+
# =========================================================================
|
|
389
|
+
|
|
390
|
+
def set_runtime_control(
|
|
391
|
+
self,
|
|
392
|
+
control: str,
|
|
393
|
+
active: bool,
|
|
394
|
+
expires_at: datetime | None,
|
|
395
|
+
reason: str | None,
|
|
396
|
+
actor: str | None,
|
|
397
|
+
mode: str,
|
|
398
|
+
) -> RuntimeControl:
|
|
399
|
+
return signers_nonces_repo.set_runtime_control(
|
|
400
|
+
self, control, active, expires_at, reason, actor, mode
|
|
401
|
+
)
|
|
402
|
+
|
|
403
|
+
def get_runtime_control(self, control: str) -> RuntimeControl | None:
|
|
404
|
+
return signers_nonces_repo.get_runtime_control(self, control)
|
|
405
|
+
|
|
406
|
+
def list_runtime_controls(self) -> list[RuntimeControl]:
|
|
407
|
+
return signers_nonces_repo.list_runtime_controls(self)
|
|
408
|
+
|
|
409
|
+
def record_nonce_reset_audit(
|
|
410
|
+
self,
|
|
411
|
+
chain_id: int,
|
|
412
|
+
signer_address: str,
|
|
413
|
+
old_next_nonce: int | None,
|
|
414
|
+
new_next_nonce: int,
|
|
415
|
+
released_reservations: int,
|
|
416
|
+
source: str,
|
|
417
|
+
reason: str | None,
|
|
418
|
+
) -> None:
|
|
419
|
+
"""Record a nonce force reset in the audit table."""
|
|
420
|
+
signers_nonces_repo.record_nonce_reset_audit(
|
|
421
|
+
self,
|
|
422
|
+
chain_id,
|
|
423
|
+
signer_address,
|
|
424
|
+
old_next_nonce,
|
|
425
|
+
new_next_nonce,
|
|
426
|
+
released_reservations,
|
|
427
|
+
source,
|
|
428
|
+
reason,
|
|
429
|
+
)
|
|
430
|
+
|
|
431
|
+
def record_mutation_audit(
|
|
432
|
+
self,
|
|
433
|
+
entity_type: str,
|
|
434
|
+
entity_id: str,
|
|
435
|
+
action: str,
|
|
436
|
+
actor: str | None = None,
|
|
437
|
+
reason: str | None = None,
|
|
438
|
+
source: str | None = None,
|
|
439
|
+
metadata: dict[str, Any] | None = None,
|
|
440
|
+
) -> None:
|
|
441
|
+
signers_nonces_repo.record_mutation_audit(
|
|
442
|
+
self, entity_type, entity_id, action, actor, reason, source, metadata
|
|
443
|
+
)
|
|
444
|
+
|
|
445
|
+
def get_signer_by_alias(self, chain_id: int, alias: str) -> SignerState | None:
|
|
446
|
+
"""Get signer by alias. Returns None if not found."""
|
|
447
|
+
return signers_nonces_repo.get_signer_by_alias(self, chain_id, alias)
|
|
448
|
+
|
|
449
|
+
def reserve_nonce_atomic(
|
|
450
|
+
self,
|
|
451
|
+
chain_id: int,
|
|
452
|
+
address: str,
|
|
453
|
+
chain_nonce: int | None,
|
|
454
|
+
intent_id: UUID | None = None,
|
|
455
|
+
) -> int:
|
|
456
|
+
return signers_nonces_repo.reserve_nonce_atomic(
|
|
457
|
+
self, chain_id, address, chain_nonce, intent_id
|
|
458
|
+
)
|
|
459
|
+
|
|
460
|
+
def get_nonce_reservation(
|
|
461
|
+
self, chain_id: int, address: str, nonce: int
|
|
462
|
+
) -> NonceReservation | None:
|
|
463
|
+
return signers_nonces_repo.get_nonce_reservation(self, chain_id, address, nonce)
|
|
464
|
+
|
|
465
|
+
def get_reservations_for_signer(
|
|
466
|
+
self, chain_id: int, address: str, status: str | None = None
|
|
467
|
+
) -> list[NonceReservation]:
|
|
468
|
+
return signers_nonces_repo.get_reservations_for_signer(
|
|
469
|
+
self, chain_id, address, status
|
|
470
|
+
)
|
|
471
|
+
|
|
472
|
+
def get_reservations_below_nonce(
|
|
473
|
+
self, chain_id: int, address: str, nonce: int
|
|
474
|
+
) -> list[NonceReservation]:
|
|
475
|
+
return signers_nonces_repo.get_reservations_below_nonce(
|
|
476
|
+
self, chain_id, address, nonce
|
|
477
|
+
)
|
|
478
|
+
|
|
479
|
+
def create_nonce_reservation(
|
|
480
|
+
self,
|
|
481
|
+
chain_id: int,
|
|
482
|
+
address: str,
|
|
483
|
+
nonce: int,
|
|
484
|
+
status: str = "reserved",
|
|
485
|
+
intent_id: UUID | None = None,
|
|
486
|
+
) -> NonceReservation:
|
|
487
|
+
return signers_nonces_repo.create_nonce_reservation(
|
|
488
|
+
self, chain_id, address, nonce, status, intent_id
|
|
489
|
+
)
|
|
490
|
+
|
|
491
|
+
def update_nonce_reservation_status(
|
|
492
|
+
self,
|
|
493
|
+
chain_id: int,
|
|
494
|
+
address: str,
|
|
495
|
+
nonce: int,
|
|
496
|
+
status: str,
|
|
497
|
+
intent_id: UUID | None = None,
|
|
498
|
+
) -> bool:
|
|
499
|
+
return signers_nonces_repo.update_nonce_reservation_status(
|
|
500
|
+
self, chain_id, address, nonce, status, intent_id
|
|
501
|
+
)
|
|
502
|
+
|
|
503
|
+
def release_nonce_reservation(
|
|
504
|
+
self,
|
|
505
|
+
chain_id: int,
|
|
506
|
+
address: str,
|
|
507
|
+
nonce: int,
|
|
508
|
+
actor: str | None = None,
|
|
509
|
+
reason: str | None = None,
|
|
510
|
+
source: str | None = None,
|
|
511
|
+
) -> bool:
|
|
512
|
+
return signers_nonces_repo.release_nonce_reservation(
|
|
513
|
+
self, chain_id, address, nonce, actor, reason, source
|
|
514
|
+
)
|
|
515
|
+
|
|
516
|
+
def cleanup_orphaned_nonces(
|
|
517
|
+
self, chain_id: int, older_than_hours: int = 24
|
|
518
|
+
) -> int:
|
|
519
|
+
return signers_nonces_repo.cleanup_orphaned_nonces(
|
|
520
|
+
self, chain_id, older_than_hours
|
|
521
|
+
)
|
|
522
|
+
|
|
523
|
+
# =========================================================================
|
|
524
|
+
# Intent Operations
|
|
525
|
+
# =========================================================================
|
|
526
|
+
|
|
527
|
+
def create_intent(
|
|
528
|
+
self,
|
|
529
|
+
intent_id: UUID,
|
|
530
|
+
job_id: str,
|
|
531
|
+
chain_id: int,
|
|
532
|
+
signer_address: str,
|
|
533
|
+
idempotency_key: str,
|
|
534
|
+
to_address: str,
|
|
535
|
+
data: str | None,
|
|
536
|
+
value_wei: str,
|
|
537
|
+
gas_limit: int | None,
|
|
538
|
+
max_fee_per_gas: str | None,
|
|
539
|
+
max_priority_fee_per_gas: str | None,
|
|
540
|
+
min_confirmations: int,
|
|
541
|
+
deadline_ts: datetime | None,
|
|
542
|
+
signer_alias: str | None = None,
|
|
543
|
+
broadcast_group: str | None = None,
|
|
544
|
+
broadcast_endpoints: list[str] | None = None,
|
|
545
|
+
metadata: dict | None = None,
|
|
546
|
+
) -> TxIntent | None:
|
|
547
|
+
return intents_repo.create_intent(
|
|
548
|
+
self,
|
|
549
|
+
intent_id,
|
|
550
|
+
job_id,
|
|
551
|
+
chain_id,
|
|
552
|
+
signer_address,
|
|
553
|
+
idempotency_key,
|
|
554
|
+
to_address,
|
|
555
|
+
data,
|
|
556
|
+
value_wei,
|
|
557
|
+
gas_limit,
|
|
558
|
+
max_fee_per_gas,
|
|
559
|
+
max_priority_fee_per_gas,
|
|
560
|
+
min_confirmations,
|
|
561
|
+
deadline_ts,
|
|
562
|
+
signer_alias,
|
|
563
|
+
broadcast_group,
|
|
564
|
+
broadcast_endpoints,
|
|
565
|
+
metadata,
|
|
566
|
+
)
|
|
567
|
+
|
|
568
|
+
def get_intent(self, intent_id: UUID) -> TxIntent | None:
|
|
569
|
+
return intents_repo.get_intent(self, intent_id)
|
|
570
|
+
|
|
571
|
+
def get_intent_by_idempotency_key(
|
|
572
|
+
self,
|
|
573
|
+
chain_id: int,
|
|
574
|
+
signer_address: str,
|
|
575
|
+
idempotency_key: str,
|
|
576
|
+
) -> TxIntent | None:
|
|
577
|
+
return intents_repo.get_intent_by_idempotency_key(
|
|
578
|
+
self, chain_id, signer_address, idempotency_key
|
|
579
|
+
)
|
|
580
|
+
|
|
581
|
+
def get_intents_by_status(
|
|
582
|
+
self,
|
|
583
|
+
status: str | list[str],
|
|
584
|
+
chain_id: int | None = None,
|
|
585
|
+
job_id: str | None = None,
|
|
586
|
+
limit: int = 100,
|
|
587
|
+
) -> list[TxIntent]:
|
|
588
|
+
return intents_repo.get_intents_by_status(
|
|
589
|
+
self, status, chain_id, job_id, limit
|
|
590
|
+
)
|
|
591
|
+
|
|
592
|
+
def list_intents_filtered(
|
|
593
|
+
self,
|
|
594
|
+
status: str | None = None,
|
|
595
|
+
job_id: str | None = None,
|
|
596
|
+
limit: int = 50,
|
|
597
|
+
) -> list[dict[str, Any]]:
|
|
598
|
+
return intents_repo.list_intents_filtered(self, status, job_id, limit)
|
|
599
|
+
|
|
600
|
+
def get_active_intent_count(self, job_id: str, chain_id: int | None = None) -> int:
|
|
601
|
+
return intents_repo.get_active_intent_count(self, job_id, chain_id)
|
|
602
|
+
|
|
603
|
+
def get_pending_intent_count(self, chain_id: int | None = None) -> int:
|
|
604
|
+
return intents_repo.get_pending_intent_count(self, chain_id)
|
|
605
|
+
|
|
606
|
+
def get_backing_off_intent_count(self, chain_id: int | None = None) -> int:
|
|
607
|
+
return intents_repo.get_backing_off_intent_count(self, chain_id)
|
|
608
|
+
|
|
609
|
+
def get_oldest_pending_intent_age(self, chain_id: int) -> float | None:
|
|
610
|
+
return intents_repo.get_oldest_pending_intent_age(self, chain_id)
|
|
611
|
+
|
|
612
|
+
def list_intent_inconsistencies(
|
|
613
|
+
self,
|
|
614
|
+
max_age_seconds: int,
|
|
615
|
+
limit: int = 100,
|
|
616
|
+
chain_id: int | None = None,
|
|
617
|
+
) -> list[dict[str, Any]]:
|
|
618
|
+
return intents_repo.list_intent_inconsistencies(
|
|
619
|
+
self, max_age_seconds, limit, chain_id
|
|
620
|
+
)
|
|
621
|
+
|
|
622
|
+
def list_sending_intents_older_than(
|
|
623
|
+
self,
|
|
624
|
+
max_age_seconds: int,
|
|
625
|
+
limit: int = 100,
|
|
626
|
+
chain_id: int | None = None,
|
|
627
|
+
) -> list[TxIntent]:
|
|
628
|
+
return intents_repo.list_sending_intents_older_than(
|
|
629
|
+
self, max_age_seconds, limit, chain_id
|
|
630
|
+
)
|
|
631
|
+
|
|
632
|
+
def list_claimed_intents_older_than(
|
|
633
|
+
self,
|
|
634
|
+
max_age_seconds: int,
|
|
635
|
+
limit: int = 100,
|
|
636
|
+
chain_id: int | None = None,
|
|
637
|
+
) -> list[TxIntent]:
|
|
638
|
+
return intents_repo.list_claimed_intents_older_than(
|
|
639
|
+
self, max_age_seconds, limit, chain_id
|
|
640
|
+
)
|
|
641
|
+
|
|
642
|
+
def claim_next_intent(
|
|
643
|
+
self,
|
|
644
|
+
claim_token: str,
|
|
645
|
+
claimed_by: str | None = None,
|
|
646
|
+
lease_seconds: int | None = None,
|
|
647
|
+
) -> ClaimedIntent | None:
|
|
648
|
+
return intents_repo.claim_next_intent(self, claim_token, claimed_by, lease_seconds)
|
|
649
|
+
|
|
650
|
+
def update_intent_status(
|
|
651
|
+
self,
|
|
652
|
+
intent_id: UUID,
|
|
653
|
+
status: str,
|
|
654
|
+
claim_token: str | None = None,
|
|
655
|
+
) -> bool:
|
|
656
|
+
return intents_repo.update_intent_status(self, intent_id, status, claim_token)
|
|
657
|
+
|
|
658
|
+
def update_intent_status_if(
|
|
659
|
+
self,
|
|
660
|
+
intent_id: UUID,
|
|
661
|
+
status: str,
|
|
662
|
+
expected_status: str | list[str],
|
|
663
|
+
) -> bool:
|
|
664
|
+
return intents_repo.update_intent_status_if(
|
|
665
|
+
self, intent_id, status, expected_status
|
|
666
|
+
)
|
|
667
|
+
|
|
668
|
+
def transition_intent_status(
|
|
669
|
+
self,
|
|
670
|
+
intent_id: UUID,
|
|
671
|
+
from_statuses: list[str],
|
|
672
|
+
to_status: str,
|
|
673
|
+
) -> tuple[bool, str | None]:
|
|
674
|
+
return intents_repo.transition_intent_status_immediate(
|
|
675
|
+
self, intent_id, from_statuses, to_status
|
|
676
|
+
)
|
|
677
|
+
|
|
678
|
+
def update_intent_signer(self, intent_id: UUID, signer_address: str) -> bool:
|
|
679
|
+
return intents_repo.update_intent_signer(self, intent_id, signer_address)
|
|
680
|
+
|
|
681
|
+
def release_intent_claim(self, intent_id: UUID) -> bool:
|
|
682
|
+
return intents_repo.release_intent_claim(self, intent_id)
|
|
683
|
+
|
|
684
|
+
def release_intent_claim_if_token(self, intent_id: UUID, claim_token: str) -> bool:
|
|
685
|
+
return intents_repo.release_intent_claim_if_token(self, intent_id, claim_token)
|
|
686
|
+
|
|
687
|
+
def release_claim_if_token_and_no_attempts(
|
|
688
|
+
self, intent_id: UUID, claim_token: str
|
|
689
|
+
) -> bool:
|
|
690
|
+
return intents_repo.release_claim_if_token_and_no_attempts(
|
|
691
|
+
self, intent_id, claim_token
|
|
692
|
+
)
|
|
693
|
+
|
|
694
|
+
def clear_intent_claim(self, intent_id: UUID) -> bool:
|
|
695
|
+
return intents_repo.clear_intent_claim(self, intent_id)
|
|
696
|
+
|
|
697
|
+
def set_intent_retry_after(
|
|
698
|
+
self, intent_id: UUID, retry_after: datetime | None
|
|
699
|
+
) -> bool:
|
|
700
|
+
return intents_repo.set_intent_retry_after(self, intent_id, retry_after)
|
|
701
|
+
|
|
702
|
+
def increment_intent_retry_count(self, intent_id: UUID) -> int:
|
|
703
|
+
return intents_repo.increment_intent_retry_count(self, intent_id)
|
|
704
|
+
|
|
705
|
+
def should_create_intent(
|
|
706
|
+
self,
|
|
707
|
+
cooldown_key: str,
|
|
708
|
+
now: int,
|
|
709
|
+
cooldown_seconds: int,
|
|
710
|
+
) -> tuple[bool, int | None]:
|
|
711
|
+
return intents_repo.should_create_intent(
|
|
712
|
+
self, cooldown_key, now, cooldown_seconds
|
|
713
|
+
)
|
|
714
|
+
|
|
715
|
+
def prune_job_cooldowns(self, older_than_days: int) -> int:
|
|
716
|
+
return intents_repo.prune_job_cooldowns(self, older_than_days)
|
|
717
|
+
|
|
718
|
+
def requeue_expired_claims_no_attempts(
|
|
719
|
+
self,
|
|
720
|
+
limit: int,
|
|
721
|
+
grace_seconds: int,
|
|
722
|
+
chain_id: int | None = None,
|
|
723
|
+
) -> int:
|
|
724
|
+
return intents_repo.requeue_expired_claims_no_attempts(
|
|
725
|
+
self, limit, grace_seconds, chain_id
|
|
726
|
+
)
|
|
727
|
+
|
|
728
|
+
def count_expired_claims_with_attempts(
|
|
729
|
+
self,
|
|
730
|
+
limit: int,
|
|
731
|
+
grace_seconds: int,
|
|
732
|
+
chain_id: int | None = None,
|
|
733
|
+
) -> int:
|
|
734
|
+
return intents_repo.count_expired_claims_with_attempts(
|
|
735
|
+
self, limit, grace_seconds, chain_id
|
|
736
|
+
)
|
|
737
|
+
|
|
738
|
+
def requeue_missing_lease_claims_no_attempts(
|
|
739
|
+
self,
|
|
740
|
+
limit: int,
|
|
741
|
+
cutoff_seconds: int,
|
|
742
|
+
chain_id: int | None = None,
|
|
743
|
+
) -> int:
|
|
744
|
+
return intents_repo.requeue_missing_lease_claims_no_attempts(
|
|
745
|
+
self, limit, cutoff_seconds, chain_id
|
|
746
|
+
)
|
|
747
|
+
|
|
748
|
+
def count_missing_lease_claims_with_attempts(
|
|
749
|
+
self,
|
|
750
|
+
limit: int,
|
|
751
|
+
cutoff_seconds: int,
|
|
752
|
+
chain_id: int | None = None,
|
|
753
|
+
) -> int:
|
|
754
|
+
return intents_repo.count_missing_lease_claims_with_attempts(
|
|
755
|
+
self, limit, cutoff_seconds, chain_id
|
|
756
|
+
)
|
|
757
|
+
|
|
758
|
+
def abandon_intent(self, intent_id: UUID) -> bool:
|
|
759
|
+
return intents_repo.abandon_intent(self, intent_id)
|
|
760
|
+
|
|
761
|
+
def get_pending_intents_for_signer(
|
|
762
|
+
self, chain_id: int, address: str
|
|
763
|
+
) -> list[TxIntent]:
|
|
764
|
+
return intents_repo.get_pending_intents_for_signer(self, chain_id, address)
|
|
765
|
+
|
|
766
|
+
def bind_broadcast_endpoints(
|
|
767
|
+
self,
|
|
768
|
+
intent_id: UUID,
|
|
769
|
+
group_name: str | None,
|
|
770
|
+
endpoints: list[str],
|
|
771
|
+
) -> tuple[str | None, list[str]]:
|
|
772
|
+
return intents_repo.bind_broadcast_endpoints(
|
|
773
|
+
self, intent_id, group_name, endpoints
|
|
774
|
+
)
|
|
775
|
+
|
|
776
|
+
def get_broadcast_binding(
|
|
777
|
+
self, intent_id: UUID
|
|
778
|
+
) -> tuple[str | None, list[str]] | None:
|
|
779
|
+
return intents_repo.get_broadcast_binding(self, intent_id)
|
|
780
|
+
|
|
781
|
+
# =========================================================================
|
|
782
|
+
# Attempt Operations
|
|
783
|
+
# =========================================================================
|
|
784
|
+
|
|
785
|
+
def create_attempt(
|
|
786
|
+
self,
|
|
787
|
+
attempt_id: UUID,
|
|
788
|
+
intent_id: UUID,
|
|
789
|
+
nonce: int,
|
|
790
|
+
gas_params_json: str,
|
|
791
|
+
status: str = "pending_send",
|
|
792
|
+
tx_hash: str | None = None,
|
|
793
|
+
replaces_attempt_id: UUID | None = None,
|
|
794
|
+
broadcast_group: str | None = None,
|
|
795
|
+
endpoint_url: str | None = None,
|
|
796
|
+
binding: tuple[str | None, list[str]] | None = None,
|
|
797
|
+
actor: str | None = None,
|
|
798
|
+
reason: str | None = None,
|
|
799
|
+
source: str | None = None,
|
|
800
|
+
) -> TxAttempt:
|
|
801
|
+
return attempts_repo.create_attempt(
|
|
802
|
+
self,
|
|
803
|
+
attempt_id,
|
|
804
|
+
intent_id,
|
|
805
|
+
nonce,
|
|
806
|
+
gas_params_json,
|
|
807
|
+
status,
|
|
808
|
+
tx_hash,
|
|
809
|
+
replaces_attempt_id,
|
|
810
|
+
broadcast_group,
|
|
811
|
+
endpoint_url,
|
|
812
|
+
binding,
|
|
813
|
+
actor,
|
|
814
|
+
reason,
|
|
815
|
+
source,
|
|
816
|
+
)
|
|
817
|
+
|
|
818
|
+
def create_attempt_once(
|
|
819
|
+
self,
|
|
820
|
+
attempt_id: UUID,
|
|
821
|
+
intent_id: UUID,
|
|
822
|
+
nonce: int,
|
|
823
|
+
gas_params_json: str,
|
|
824
|
+
status: str = "pending_send",
|
|
825
|
+
tx_hash: str | None = None,
|
|
826
|
+
replaces_attempt_id: UUID | None = None,
|
|
827
|
+
broadcast_group: str | None = None,
|
|
828
|
+
endpoint_url: str | None = None,
|
|
829
|
+
binding: tuple[str | None, list[str]] | None = None,
|
|
830
|
+
actor: str | None = None,
|
|
831
|
+
reason: str | None = None,
|
|
832
|
+
source: str | None = None,
|
|
833
|
+
) -> TxAttempt:
|
|
834
|
+
return attempts_repo.create_attempt_once(
|
|
835
|
+
self,
|
|
836
|
+
attempt_id,
|
|
837
|
+
intent_id,
|
|
838
|
+
nonce,
|
|
839
|
+
gas_params_json,
|
|
840
|
+
status,
|
|
841
|
+
tx_hash,
|
|
842
|
+
replaces_attempt_id,
|
|
843
|
+
broadcast_group,
|
|
844
|
+
endpoint_url,
|
|
845
|
+
binding,
|
|
846
|
+
actor,
|
|
847
|
+
reason,
|
|
848
|
+
source,
|
|
849
|
+
)
|
|
850
|
+
|
|
851
|
+
def require_bound_and_attempt(
|
|
852
|
+
self,
|
|
853
|
+
intent_id: UUID,
|
|
854
|
+
nonce: int,
|
|
855
|
+
endpoints: list[str],
|
|
856
|
+
) -> None:
|
|
857
|
+
attempts_repo.require_bound_and_attempt(self, intent_id, nonce, endpoints)
|
|
858
|
+
|
|
859
|
+
def get_attempt(self, attempt_id: UUID) -> TxAttempt | None:
|
|
860
|
+
return attempts_repo.get_attempt(self, attempt_id)
|
|
861
|
+
|
|
862
|
+
def get_attempts_for_intent(self, intent_id: UUID) -> list[TxAttempt]:
|
|
863
|
+
return attempts_repo.get_attempts_for_intent(self, intent_id)
|
|
864
|
+
|
|
865
|
+
def get_latest_attempt_for_intent(self, intent_id: UUID) -> TxAttempt | None:
|
|
866
|
+
return attempts_repo.get_latest_attempt_for_intent(self, intent_id)
|
|
867
|
+
|
|
868
|
+
def get_attempt_by_tx_hash(self, tx_hash: str) -> TxAttempt | None:
|
|
869
|
+
return attempts_repo.get_attempt_by_tx_hash(self, tx_hash)
|
|
870
|
+
|
|
871
|
+
def update_attempt_status(
|
|
872
|
+
self,
|
|
873
|
+
attempt_id: UUID,
|
|
874
|
+
status: str,
|
|
875
|
+
tx_hash: str | None = None,
|
|
876
|
+
broadcast_block: int | None = None,
|
|
877
|
+
broadcast_at: datetime | None = None,
|
|
878
|
+
included_block: int | None = None,
|
|
879
|
+
endpoint_url: str | None = None,
|
|
880
|
+
error_code: str | None = None,
|
|
881
|
+
error_detail: str | None = None,
|
|
882
|
+
actor: str | None = None,
|
|
883
|
+
reason: str | None = None,
|
|
884
|
+
source: str | None = None,
|
|
885
|
+
) -> bool:
|
|
886
|
+
return attempts_repo.update_attempt_status(
|
|
887
|
+
self,
|
|
888
|
+
attempt_id,
|
|
889
|
+
status,
|
|
890
|
+
tx_hash,
|
|
891
|
+
broadcast_block,
|
|
892
|
+
broadcast_at,
|
|
893
|
+
included_block,
|
|
894
|
+
endpoint_url,
|
|
895
|
+
error_code,
|
|
896
|
+
error_detail,
|
|
897
|
+
actor,
|
|
898
|
+
reason,
|
|
899
|
+
source,
|
|
900
|
+
)
|
|
901
|
+
|
|
902
|
+
# =========================================================================
|
|
903
|
+
# ABI Cache Operations
|
|
904
|
+
# =========================================================================
|
|
905
|
+
|
|
906
|
+
def get_cached_abi(self, chain_id: int, address: str) -> ABICacheEntry | None:
|
|
907
|
+
return cache_repo.get_cached_abi(self, chain_id, address)
|
|
908
|
+
|
|
909
|
+
def set_cached_abi(
|
|
910
|
+
self,
|
|
911
|
+
chain_id: int,
|
|
912
|
+
address: str,
|
|
913
|
+
abi_json: str,
|
|
914
|
+
source: str,
|
|
915
|
+
) -> None:
|
|
916
|
+
cache_repo.set_cached_abi(self, chain_id, address, abi_json, source)
|
|
917
|
+
|
|
918
|
+
def clear_cached_abi(self, chain_id: int, address: str) -> bool:
|
|
919
|
+
return cache_repo.clear_cached_abi(self, chain_id, address)
|
|
920
|
+
|
|
921
|
+
def cleanup_expired_abis(self, max_age_seconds: int) -> int:
|
|
922
|
+
return cache_repo.cleanup_expired_abis(self, max_age_seconds)
|
|
923
|
+
|
|
924
|
+
# =========================================================================
|
|
925
|
+
# Proxy Cache Operations
|
|
926
|
+
# =========================================================================
|
|
927
|
+
|
|
928
|
+
def get_cached_proxy(
|
|
929
|
+
self, chain_id: int, proxy_address: str
|
|
930
|
+
) -> ProxyCacheEntry | None:
|
|
931
|
+
return cache_repo.get_cached_proxy(self, chain_id, proxy_address)
|
|
932
|
+
|
|
933
|
+
def set_cached_proxy(
|
|
934
|
+
self,
|
|
935
|
+
chain_id: int,
|
|
936
|
+
proxy_address: str,
|
|
937
|
+
implementation_address: str,
|
|
938
|
+
) -> None:
|
|
939
|
+
cache_repo.set_cached_proxy(self, chain_id, proxy_address, implementation_address)
|
|
940
|
+
|
|
941
|
+
def clear_cached_proxy(self, chain_id: int, proxy_address: str) -> bool:
|
|
942
|
+
return cache_repo.clear_cached_proxy(self, chain_id, proxy_address)
|
|
943
|
+
|
|
944
|
+
# =========================================================================
|
|
945
|
+
# Cleanup & Maintenance
|
|
946
|
+
# =========================================================================
|
|
947
|
+
|
|
948
|
+
def cleanup_old_intents(
|
|
949
|
+
self,
|
|
950
|
+
older_than_days: int,
|
|
951
|
+
statuses: list[str] | None = None,
|
|
952
|
+
) -> int:
|
|
953
|
+
return maintenance_repo.cleanup_old_intents(self, older_than_days, statuses)
|
|
954
|
+
|
|
955
|
+
def get_database_stats(self) -> dict[str, Any]:
|
|
956
|
+
"""Get database statistics for health checks."""
|
|
957
|
+
return maintenance_repo.get_database_stats(self)
|
|
958
|
+
|
|
959
|
+
# =========================================================================
|
|
960
|
+
# Reconciliation Operations
|
|
961
|
+
# =========================================================================
|
|
962
|
+
|
|
963
|
+
def clear_orphaned_claims(self, chain_id: int, older_than_minutes: int = 2) -> int:
|
|
964
|
+
"""Clear claim fields where status != 'claimed' and claim is stale."""
|
|
965
|
+
return maintenance_repo.clear_orphaned_claims(self, chain_id, older_than_minutes)
|
|
966
|
+
|
|
967
|
+
def release_orphaned_nonces(self, chain_id: int, older_than_minutes: int = 5) -> int:
|
|
968
|
+
"""Release nonces for terminal intents that are stale."""
|
|
969
|
+
return maintenance_repo.release_orphaned_nonces(self, chain_id, older_than_minutes)
|
|
970
|
+
|
|
971
|
+
def count_pending_without_attempts(self, chain_id: int) -> int:
|
|
972
|
+
"""Count pending intents with no attempt records (integrity issue)."""
|
|
973
|
+
return maintenance_repo.count_pending_without_attempts(self, chain_id)
|
|
974
|
+
|
|
975
|
+
def count_stale_claims(self, chain_id: int, older_than_minutes: int = 10) -> int:
|
|
976
|
+
"""Count intents stuck in CLAIMED for too long."""
|
|
977
|
+
return maintenance_repo.count_stale_claims(self, chain_id, older_than_minutes)
|
|
978
|
+
|
|
979
|
+
# =========================================================================
|
|
980
|
+
# Invariant Queries (Phase 2)
|
|
981
|
+
# =========================================================================
|
|
982
|
+
|
|
983
|
+
def count_stuck_claimed(self, chain_id: int, older_than_minutes: int = 10) -> int:
|
|
984
|
+
"""Count intents stuck in CLAIMED status for too long."""
|
|
985
|
+
return maintenance_repo.count_stuck_claimed(self, chain_id, older_than_minutes)
|
|
986
|
+
|
|
987
|
+
def count_orphaned_claims(self, chain_id: int) -> int:
|
|
988
|
+
"""Count intents with claim_token set but status != claimed."""
|
|
989
|
+
return maintenance_repo.count_orphaned_claims(self, chain_id)
|
|
990
|
+
|
|
991
|
+
def count_orphaned_nonces(self, chain_id: int) -> int:
|
|
992
|
+
"""Count reserved/in_flight nonces for failed/abandoned intents."""
|
|
993
|
+
return maintenance_repo.count_orphaned_nonces(self, chain_id)
|
|
994
|
+
|
|
995
|
+
def get_oldest_nonce_gap_age_seconds(self, chain_id: int) -> float:
|
|
996
|
+
"""Get age in seconds of the oldest nonce gap.
|
|
997
|
+
|
|
998
|
+
Anchors from signers (small table) for efficiency.
|
|
999
|
+
Returns 0 if no gaps or if chain nonce not synced.
|
|
1000
|
+
"""
|
|
1001
|
+
return maintenance_repo.get_oldest_nonce_gap_age_seconds(self, chain_id)
|