brawny 0.1.13__py3-none-any.whl → 0.1.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- brawny/__init__.py +2 -0
- brawny/_context.py +5 -5
- brawny/_rpc/__init__.py +36 -12
- brawny/_rpc/broadcast.py +14 -13
- brawny/_rpc/caller.py +243 -0
- brawny/_rpc/client.py +539 -0
- brawny/_rpc/clients.py +11 -11
- brawny/_rpc/context.py +23 -0
- brawny/_rpc/errors.py +465 -31
- brawny/_rpc/gas.py +7 -6
- brawny/_rpc/pool.py +18 -0
- brawny/_rpc/retry.py +266 -0
- brawny/_rpc/retry_policy.py +81 -0
- brawny/accounts.py +28 -9
- brawny/alerts/__init__.py +15 -18
- brawny/alerts/abi_resolver.py +212 -36
- brawny/alerts/base.py +2 -2
- brawny/alerts/contracts.py +77 -10
- brawny/alerts/errors.py +30 -3
- brawny/alerts/events.py +38 -5
- brawny/alerts/health.py +19 -13
- brawny/alerts/send.py +513 -55
- brawny/api.py +39 -11
- brawny/assets/AGENTS.md +325 -0
- brawny/async_runtime.py +48 -0
- brawny/chain.py +3 -3
- brawny/cli/commands/__init__.py +2 -0
- brawny/cli/commands/console.py +69 -19
- brawny/cli/commands/contract.py +2 -2
- brawny/cli/commands/controls.py +121 -0
- brawny/cli/commands/health.py +2 -2
- brawny/cli/commands/job_dev.py +6 -5
- brawny/cli/commands/jobs.py +99 -2
- brawny/cli/commands/maintenance.py +13 -29
- brawny/cli/commands/migrate.py +1 -0
- brawny/cli/commands/run.py +10 -3
- brawny/cli/commands/script.py +8 -3
- brawny/cli/commands/signer.py +143 -26
- brawny/cli/helpers.py +0 -3
- brawny/cli_templates.py +25 -349
- brawny/config/__init__.py +4 -1
- brawny/config/models.py +43 -57
- brawny/config/parser.py +268 -57
- brawny/config/validation.py +52 -15
- brawny/daemon/context.py +4 -2
- brawny/daemon/core.py +185 -63
- brawny/daemon/loops.py +166 -98
- brawny/daemon/supervisor.py +261 -0
- brawny/db/__init__.py +14 -26
- brawny/db/base.py +248 -151
- brawny/db/global_cache.py +11 -1
- brawny/db/migrate.py +175 -28
- brawny/db/migrations/001_init.sql +4 -3
- brawny/db/migrations/010_add_nonce_gap_index.sql +1 -1
- brawny/db/migrations/011_add_job_logs.sql +1 -2
- brawny/db/migrations/012_add_claimed_by.sql +2 -2
- brawny/db/migrations/013_attempt_unique.sql +10 -0
- brawny/db/migrations/014_add_lease_expires_at.sql +5 -0
- brawny/db/migrations/015_add_signer_alias.sql +14 -0
- brawny/db/migrations/016_runtime_controls_and_quarantine.sql +32 -0
- brawny/db/migrations/017_add_job_drain.sql +6 -0
- brawny/db/migrations/018_add_nonce_reset_audit.sql +20 -0
- brawny/db/migrations/019_add_job_cooldowns.sql +8 -0
- brawny/db/migrations/020_attempt_unique_initial.sql +7 -0
- brawny/db/ops/__init__.py +3 -25
- brawny/db/ops/logs.py +1 -2
- brawny/db/queries.py +47 -91
- brawny/db/serialized.py +65 -0
- brawny/db/sqlite/__init__.py +1001 -0
- brawny/db/sqlite/connection.py +231 -0
- brawny/db/sqlite/execute.py +116 -0
- brawny/db/sqlite/mappers.py +190 -0
- brawny/db/sqlite/repos/attempts.py +372 -0
- brawny/db/sqlite/repos/block_state.py +102 -0
- brawny/db/sqlite/repos/cache.py +104 -0
- brawny/db/sqlite/repos/intents.py +1021 -0
- brawny/db/sqlite/repos/jobs.py +200 -0
- brawny/db/sqlite/repos/maintenance.py +182 -0
- brawny/db/sqlite/repos/signers_nonces.py +566 -0
- brawny/db/sqlite/tx.py +119 -0
- brawny/http.py +194 -0
- brawny/invariants.py +11 -24
- brawny/jobs/base.py +8 -0
- brawny/jobs/job_validation.py +2 -1
- brawny/keystore.py +83 -7
- brawny/lifecycle.py +64 -12
- brawny/logging.py +0 -2
- brawny/metrics.py +84 -12
- brawny/model/contexts.py +111 -9
- brawny/model/enums.py +1 -0
- brawny/model/errors.py +18 -0
- brawny/model/types.py +47 -131
- brawny/network_guard.py +133 -0
- brawny/networks/__init__.py +5 -5
- brawny/networks/config.py +1 -7
- brawny/networks/manager.py +14 -11
- brawny/runtime_controls.py +74 -0
- brawny/scheduler/poller.py +11 -7
- brawny/scheduler/reorg.py +95 -39
- brawny/scheduler/runner.py +442 -168
- brawny/scheduler/shutdown.py +3 -3
- brawny/script_tx.py +3 -3
- brawny/telegram.py +53 -7
- brawny/testing.py +1 -0
- brawny/timeout.py +38 -0
- brawny/tx/executor.py +922 -308
- brawny/tx/intent.py +54 -16
- brawny/tx/monitor.py +31 -12
- brawny/tx/nonce.py +212 -90
- brawny/tx/replacement.py +69 -18
- brawny/tx/retry_policy.py +24 -0
- brawny/tx/stages/types.py +75 -0
- brawny/types.py +18 -0
- brawny/utils.py +41 -0
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/METADATA +3 -3
- brawny-0.1.22.dist-info/RECORD +163 -0
- brawny/_rpc/manager.py +0 -982
- brawny/_rpc/selector.py +0 -156
- brawny/db/base_new.py +0 -165
- brawny/db/mappers.py +0 -182
- brawny/db/migrations/008_add_transactions.sql +0 -72
- brawny/db/ops/attempts.py +0 -108
- brawny/db/ops/blocks.py +0 -83
- brawny/db/ops/cache.py +0 -93
- brawny/db/ops/intents.py +0 -296
- brawny/db/ops/jobs.py +0 -110
- brawny/db/ops/nonces.py +0 -322
- brawny/db/postgres.py +0 -2535
- brawny/db/postgres_new.py +0 -196
- brawny/db/sqlite.py +0 -2733
- brawny/db/sqlite_new.py +0 -191
- brawny-0.1.13.dist-info/RECORD +0 -141
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/WHEEL +0 -0
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/entry_points.txt +0 -0
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,372 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import sqlite3
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from typing import Any
|
|
7
|
+
from uuid import UUID
|
|
8
|
+
|
|
9
|
+
from brawny.db.sqlite import mappers, tx
|
|
10
|
+
from brawny.model.errors import DatabaseError, InvariantViolation
|
|
11
|
+
from brawny.model.types import TxAttempt
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def create_attempt(
|
|
15
|
+
db: Any,
|
|
16
|
+
attempt_id: UUID,
|
|
17
|
+
intent_id: UUID,
|
|
18
|
+
nonce: int,
|
|
19
|
+
gas_params_json: str,
|
|
20
|
+
status: str = "pending_send",
|
|
21
|
+
tx_hash: str | None = None,
|
|
22
|
+
replaces_attempt_id: UUID | None = None,
|
|
23
|
+
broadcast_group: str | None = None,
|
|
24
|
+
endpoint_url: str | None = None,
|
|
25
|
+
binding: tuple[str | None, list[str]] | None = None,
|
|
26
|
+
actor: str | None = None,
|
|
27
|
+
reason: str | None = None,
|
|
28
|
+
source: str | None = None,
|
|
29
|
+
) -> TxAttempt:
|
|
30
|
+
"""Create attempt, optionally setting binding atomically.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
binding: If provided (first broadcast), persist binding atomically.
|
|
34
|
+
Tuple of (group_name or None, endpoints)
|
|
35
|
+
|
|
36
|
+
CRITICAL: Uses WHERE broadcast_endpoints_json IS NULL to prevent overwrites.
|
|
37
|
+
"""
|
|
38
|
+
replaces_str = str(replaces_attempt_id) if replaces_attempt_id else None
|
|
39
|
+
with tx.transaction_conn(db, tx.SQLiteBeginMode.IMMEDIATE) as conn:
|
|
40
|
+
binding_id: str | None = None
|
|
41
|
+
|
|
42
|
+
if binding is not None:
|
|
43
|
+
group_name, endpoints = binding
|
|
44
|
+
db.bind_broadcast_endpoints(intent_id, group_name, endpoints)
|
|
45
|
+
|
|
46
|
+
cursor = conn.cursor()
|
|
47
|
+
try:
|
|
48
|
+
cursor.execute(
|
|
49
|
+
"SELECT broadcast_binding_id FROM tx_intents WHERE intent_id = ?",
|
|
50
|
+
(str(intent_id),),
|
|
51
|
+
)
|
|
52
|
+
row = cursor.fetchone()
|
|
53
|
+
if row is not None:
|
|
54
|
+
binding_id = row["broadcast_binding_id"]
|
|
55
|
+
finally:
|
|
56
|
+
cursor.close()
|
|
57
|
+
|
|
58
|
+
conn.execute(
|
|
59
|
+
"""
|
|
60
|
+
INSERT INTO tx_attempts (
|
|
61
|
+
attempt_id, intent_id, nonce, gas_params_json, status,
|
|
62
|
+
tx_hash, replaces_attempt_id, broadcast_group, endpoint_url,
|
|
63
|
+
endpoint_binding_id
|
|
64
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
65
|
+
""",
|
|
66
|
+
(
|
|
67
|
+
str(attempt_id),
|
|
68
|
+
str(intent_id),
|
|
69
|
+
nonce,
|
|
70
|
+
gas_params_json,
|
|
71
|
+
status,
|
|
72
|
+
tx_hash,
|
|
73
|
+
replaces_str,
|
|
74
|
+
broadcast_group,
|
|
75
|
+
endpoint_url,
|
|
76
|
+
binding_id,
|
|
77
|
+
),
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
attempt = db.get_attempt(attempt_id)
|
|
81
|
+
if not attempt:
|
|
82
|
+
raise DatabaseError("Failed to create attempt")
|
|
83
|
+
db.record_mutation_audit(
|
|
84
|
+
entity_type="attempt",
|
|
85
|
+
entity_id=str(attempt_id),
|
|
86
|
+
action="create_attempt",
|
|
87
|
+
actor=actor,
|
|
88
|
+
reason=reason,
|
|
89
|
+
source=source,
|
|
90
|
+
metadata={"intent_id": str(intent_id), "nonce": nonce, "status": status},
|
|
91
|
+
)
|
|
92
|
+
return attempt
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def create_attempt_once(
|
|
96
|
+
db: Any,
|
|
97
|
+
attempt_id: UUID,
|
|
98
|
+
intent_id: UUID,
|
|
99
|
+
nonce: int,
|
|
100
|
+
gas_params_json: str,
|
|
101
|
+
status: str = "pending_send",
|
|
102
|
+
tx_hash: str | None = None,
|
|
103
|
+
replaces_attempt_id: UUID | None = None,
|
|
104
|
+
broadcast_group: str | None = None,
|
|
105
|
+
endpoint_url: str | None = None,
|
|
106
|
+
binding: tuple[str | None, list[str]] | None = None,
|
|
107
|
+
actor: str | None = None,
|
|
108
|
+
reason: str | None = None,
|
|
109
|
+
source: str | None = None,
|
|
110
|
+
) -> TxAttempt:
|
|
111
|
+
if replaces_attempt_id is not None:
|
|
112
|
+
return create_attempt(
|
|
113
|
+
db,
|
|
114
|
+
attempt_id=attempt_id,
|
|
115
|
+
intent_id=intent_id,
|
|
116
|
+
nonce=nonce,
|
|
117
|
+
gas_params_json=gas_params_json,
|
|
118
|
+
status=status,
|
|
119
|
+
tx_hash=tx_hash,
|
|
120
|
+
replaces_attempt_id=replaces_attempt_id,
|
|
121
|
+
broadcast_group=broadcast_group,
|
|
122
|
+
endpoint_url=endpoint_url,
|
|
123
|
+
binding=binding,
|
|
124
|
+
actor=actor,
|
|
125
|
+
reason=reason,
|
|
126
|
+
source=source,
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
try:
|
|
130
|
+
with tx.transaction_conn(db, tx.SQLiteBeginMode.IMMEDIATE) as conn:
|
|
131
|
+
cursor = conn.cursor()
|
|
132
|
+
try:
|
|
133
|
+
if binding is not None:
|
|
134
|
+
group_name, endpoints = binding
|
|
135
|
+
try:
|
|
136
|
+
db.bind_broadcast_endpoints(intent_id, group_name, endpoints)
|
|
137
|
+
except (DatabaseError, InvariantViolation) as exc:
|
|
138
|
+
raise InvariantViolation(
|
|
139
|
+
f"Intent {intent_id} binding failed: {exc}"
|
|
140
|
+
) from exc
|
|
141
|
+
|
|
142
|
+
cursor.execute(
|
|
143
|
+
"""
|
|
144
|
+
SELECT * FROM tx_attempts
|
|
145
|
+
WHERE intent_id = ? AND nonce = ? AND replaces_attempt_id IS NULL
|
|
146
|
+
ORDER BY created_at ASC LIMIT 1
|
|
147
|
+
""",
|
|
148
|
+
(str(intent_id), nonce),
|
|
149
|
+
)
|
|
150
|
+
row = cursor.fetchone()
|
|
151
|
+
if row:
|
|
152
|
+
attempt = mappers._row_to_attempt(dict(row))
|
|
153
|
+
if tx_hash and attempt.tx_hash and attempt.tx_hash.lower() != tx_hash.lower():
|
|
154
|
+
raise InvariantViolation(
|
|
155
|
+
f"Attempt already exists for intent {intent_id} nonce {nonce}"
|
|
156
|
+
)
|
|
157
|
+
return attempt
|
|
158
|
+
|
|
159
|
+
cursor.execute(
|
|
160
|
+
"SELECT broadcast_binding_id FROM tx_intents WHERE intent_id = ?",
|
|
161
|
+
(str(intent_id),),
|
|
162
|
+
)
|
|
163
|
+
binding_row = cursor.fetchone()
|
|
164
|
+
binding_id = binding_row["broadcast_binding_id"] if binding_row else None
|
|
165
|
+
|
|
166
|
+
conn.execute(
|
|
167
|
+
"""
|
|
168
|
+
INSERT INTO tx_attempts (
|
|
169
|
+
attempt_id, intent_id, nonce, gas_params_json, status,
|
|
170
|
+
tx_hash, replaces_attempt_id, broadcast_group, endpoint_url,
|
|
171
|
+
endpoint_binding_id
|
|
172
|
+
) VALUES (?, ?, ?, ?, ?, ?, NULL, ?, ?, ?)
|
|
173
|
+
""",
|
|
174
|
+
(
|
|
175
|
+
str(attempt_id),
|
|
176
|
+
str(intent_id),
|
|
177
|
+
nonce,
|
|
178
|
+
gas_params_json,
|
|
179
|
+
status,
|
|
180
|
+
tx_hash,
|
|
181
|
+
broadcast_group,
|
|
182
|
+
endpoint_url,
|
|
183
|
+
binding_id,
|
|
184
|
+
),
|
|
185
|
+
)
|
|
186
|
+
finally:
|
|
187
|
+
cursor.close()
|
|
188
|
+
except sqlite3.IntegrityError:
|
|
189
|
+
row = db.execute_one(
|
|
190
|
+
"""
|
|
191
|
+
SELECT * FROM tx_attempts
|
|
192
|
+
WHERE intent_id = ? AND nonce = ? AND replaces_attempt_id IS NULL
|
|
193
|
+
ORDER BY created_at ASC LIMIT 1
|
|
194
|
+
""",
|
|
195
|
+
(str(intent_id), nonce),
|
|
196
|
+
)
|
|
197
|
+
if row:
|
|
198
|
+
attempt = mappers._row_to_attempt(row)
|
|
199
|
+
if tx_hash and attempt.tx_hash and attempt.tx_hash.lower() != tx_hash.lower():
|
|
200
|
+
raise InvariantViolation(
|
|
201
|
+
f"Attempt already exists for intent {intent_id} nonce {nonce}"
|
|
202
|
+
)
|
|
203
|
+
return attempt
|
|
204
|
+
raise
|
|
205
|
+
|
|
206
|
+
attempt = db.get_attempt(attempt_id)
|
|
207
|
+
if not attempt:
|
|
208
|
+
raise DatabaseError("Failed to create attempt")
|
|
209
|
+
db.record_mutation_audit(
|
|
210
|
+
entity_type="attempt",
|
|
211
|
+
entity_id=str(attempt_id),
|
|
212
|
+
action="create_attempt",
|
|
213
|
+
actor=actor,
|
|
214
|
+
reason=reason,
|
|
215
|
+
source=source,
|
|
216
|
+
metadata={"intent_id": str(intent_id), "nonce": nonce, "status": status},
|
|
217
|
+
)
|
|
218
|
+
return attempt
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def require_bound_and_attempt(
|
|
222
|
+
db: Any,
|
|
223
|
+
intent_id: UUID,
|
|
224
|
+
nonce: int,
|
|
225
|
+
endpoints: list[str],
|
|
226
|
+
) -> None:
|
|
227
|
+
canonical = db._canonicalize_endpoints(endpoints)
|
|
228
|
+
row = db.execute_one(
|
|
229
|
+
"""
|
|
230
|
+
SELECT broadcast_binding_id, broadcast_endpoints_json
|
|
231
|
+
FROM tx_intents WHERE intent_id = ?
|
|
232
|
+
""",
|
|
233
|
+
(str(intent_id),),
|
|
234
|
+
)
|
|
235
|
+
if not row or row["broadcast_endpoints_json"] is None:
|
|
236
|
+
raise InvariantViolation(f"Intent {intent_id} has no broadcast binding")
|
|
237
|
+
|
|
238
|
+
stored = json.loads(row["broadcast_endpoints_json"])
|
|
239
|
+
stored_canonical = db._canonicalize_endpoints(stored)
|
|
240
|
+
if stored_canonical != canonical:
|
|
241
|
+
raise InvariantViolation(
|
|
242
|
+
f"Intent {intent_id} binding does not match endpoints"
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
binding_id = row["broadcast_binding_id"]
|
|
246
|
+
attempt_row = db.execute_one(
|
|
247
|
+
"""
|
|
248
|
+
SELECT attempt_id, endpoint_binding_id
|
|
249
|
+
FROM tx_attempts
|
|
250
|
+
WHERE intent_id = ? AND nonce = ?
|
|
251
|
+
ORDER BY created_at ASC LIMIT 1
|
|
252
|
+
""",
|
|
253
|
+
(str(intent_id), nonce),
|
|
254
|
+
)
|
|
255
|
+
if not attempt_row:
|
|
256
|
+
raise InvariantViolation(f"Intent {intent_id} missing attempt for nonce {nonce}")
|
|
257
|
+
|
|
258
|
+
if binding_id is not None:
|
|
259
|
+
attempt_binding = attempt_row.get("endpoint_binding_id")
|
|
260
|
+
if attempt_binding is None or str(attempt_binding) != str(binding_id):
|
|
261
|
+
raise InvariantViolation(
|
|
262
|
+
f"Intent {intent_id} attempt binding mismatch for nonce {nonce}"
|
|
263
|
+
)
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def get_attempt(db: Any, attempt_id: UUID) -> TxAttempt | None:
|
|
267
|
+
row = db.execute_one(
|
|
268
|
+
"SELECT * FROM tx_attempts WHERE attempt_id = ?",
|
|
269
|
+
(str(attempt_id),),
|
|
270
|
+
)
|
|
271
|
+
if not row:
|
|
272
|
+
return None
|
|
273
|
+
return mappers._row_to_attempt(row)
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
def get_attempts_for_intent(db: Any, intent_id: UUID) -> list[TxAttempt]:
|
|
277
|
+
rows = db.execute_returning(
|
|
278
|
+
"SELECT * FROM tx_attempts WHERE intent_id = ? ORDER BY created_at",
|
|
279
|
+
(str(intent_id),),
|
|
280
|
+
)
|
|
281
|
+
return [mappers._row_to_attempt(row) for row in rows]
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def get_latest_attempt_for_intent(db: Any, intent_id: UUID) -> TxAttempt | None:
|
|
285
|
+
row = db.execute_one(
|
|
286
|
+
"""
|
|
287
|
+
SELECT * FROM tx_attempts WHERE intent_id = ?
|
|
288
|
+
ORDER BY created_at DESC LIMIT 1
|
|
289
|
+
""",
|
|
290
|
+
(str(intent_id),),
|
|
291
|
+
)
|
|
292
|
+
if not row:
|
|
293
|
+
return None
|
|
294
|
+
return mappers._row_to_attempt(row)
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def get_attempt_by_tx_hash(db: Any, tx_hash: str) -> TxAttempt | None:
|
|
298
|
+
row = db.execute_one(
|
|
299
|
+
"SELECT * FROM tx_attempts WHERE tx_hash = ?",
|
|
300
|
+
(tx_hash,),
|
|
301
|
+
)
|
|
302
|
+
if not row:
|
|
303
|
+
return None
|
|
304
|
+
return mappers._row_to_attempt(row)
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
def update_attempt_status(
|
|
308
|
+
db: Any,
|
|
309
|
+
attempt_id: UUID,
|
|
310
|
+
status: str,
|
|
311
|
+
tx_hash: str | None = None,
|
|
312
|
+
broadcast_block: int | None = None,
|
|
313
|
+
broadcast_at: datetime | None = None,
|
|
314
|
+
included_block: int | None = None,
|
|
315
|
+
endpoint_url: str | None = None,
|
|
316
|
+
error_code: str | None = None,
|
|
317
|
+
error_detail: str | None = None,
|
|
318
|
+
actor: str | None = None,
|
|
319
|
+
reason: str | None = None,
|
|
320
|
+
source: str | None = None,
|
|
321
|
+
) -> bool:
|
|
322
|
+
with tx.transaction_conn(db) as conn:
|
|
323
|
+
cursor = conn.cursor()
|
|
324
|
+
try:
|
|
325
|
+
updates = ["status = ?", "updated_at = CURRENT_TIMESTAMP"]
|
|
326
|
+
params: list[Any] = [status]
|
|
327
|
+
|
|
328
|
+
if tx_hash is not None:
|
|
329
|
+
updates.append("tx_hash = ?")
|
|
330
|
+
params.append(tx_hash)
|
|
331
|
+
if broadcast_block is not None:
|
|
332
|
+
updates.append("broadcast_block = ?")
|
|
333
|
+
params.append(broadcast_block)
|
|
334
|
+
if broadcast_at is not None:
|
|
335
|
+
updates.append("broadcast_at = ?")
|
|
336
|
+
params.append(broadcast_at)
|
|
337
|
+
if included_block is not None:
|
|
338
|
+
updates.append("included_block = ?")
|
|
339
|
+
params.append(included_block)
|
|
340
|
+
if endpoint_url is not None:
|
|
341
|
+
updates.append("endpoint_url = ?")
|
|
342
|
+
params.append(endpoint_url)
|
|
343
|
+
if error_code is not None:
|
|
344
|
+
updates.append("error_code = ?")
|
|
345
|
+
params.append(error_code)
|
|
346
|
+
if error_detail is not None:
|
|
347
|
+
updates.append("error_detail = ?")
|
|
348
|
+
params.append(error_detail)
|
|
349
|
+
|
|
350
|
+
params.append(str(attempt_id))
|
|
351
|
+
query = f"UPDATE tx_attempts SET {', '.join(updates)} WHERE attempt_id = ?"
|
|
352
|
+
cursor.execute(query, params)
|
|
353
|
+
updated = cursor.rowcount > 0
|
|
354
|
+
finally:
|
|
355
|
+
cursor.close()
|
|
356
|
+
|
|
357
|
+
if updated:
|
|
358
|
+
db.record_mutation_audit(
|
|
359
|
+
entity_type="attempt",
|
|
360
|
+
entity_id=str(attempt_id),
|
|
361
|
+
action=f"status:{status}",
|
|
362
|
+
actor=actor,
|
|
363
|
+
reason=reason,
|
|
364
|
+
source=source,
|
|
365
|
+
metadata={
|
|
366
|
+
"tx_hash": tx_hash,
|
|
367
|
+
"broadcast_block": broadcast_block,
|
|
368
|
+
"included_block": included_block,
|
|
369
|
+
"error_code": error_code,
|
|
370
|
+
},
|
|
371
|
+
)
|
|
372
|
+
return updated
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from brawny.db.base import BlockState
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def get_block_state(db: Any, chain_id: int) -> BlockState | None:
|
|
9
|
+
row = db.execute_one(
|
|
10
|
+
"SELECT * FROM block_state WHERE chain_id = ?",
|
|
11
|
+
(chain_id,),
|
|
12
|
+
)
|
|
13
|
+
if not row:
|
|
14
|
+
return None
|
|
15
|
+
return BlockState(
|
|
16
|
+
chain_id=row["chain_id"],
|
|
17
|
+
last_processed_block_number=row["last_processed_block_number"],
|
|
18
|
+
last_processed_block_hash=row["last_processed_block_hash"],
|
|
19
|
+
created_at=row["created_at"],
|
|
20
|
+
updated_at=row["updated_at"],
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def upsert_block_state(db: Any, chain_id: int, block_number: int, block_hash: str) -> None:
|
|
25
|
+
db.execute(
|
|
26
|
+
"""
|
|
27
|
+
INSERT INTO block_state (chain_id, last_processed_block_number, last_processed_block_hash)
|
|
28
|
+
VALUES (?, ?, ?)
|
|
29
|
+
ON CONFLICT(chain_id) DO UPDATE SET
|
|
30
|
+
last_processed_block_number = excluded.last_processed_block_number,
|
|
31
|
+
last_processed_block_hash = excluded.last_processed_block_hash,
|
|
32
|
+
updated_at = CURRENT_TIMESTAMP
|
|
33
|
+
""",
|
|
34
|
+
(chain_id, block_number, block_hash),
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def get_block_hash_at_height(db: Any, chain_id: int, block_number: int) -> str | None:
|
|
39
|
+
row = db.execute_one(
|
|
40
|
+
"SELECT block_hash FROM block_hash_history WHERE chain_id = ? AND block_number = ?",
|
|
41
|
+
(chain_id, block_number),
|
|
42
|
+
)
|
|
43
|
+
return row["block_hash"] if row else None
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def insert_block_hash(db: Any, chain_id: int, block_number: int, block_hash: str) -> None:
|
|
47
|
+
db.execute(
|
|
48
|
+
"""
|
|
49
|
+
INSERT INTO block_hash_history (chain_id, block_number, block_hash)
|
|
50
|
+
VALUES (?, ?, ?)
|
|
51
|
+
ON CONFLICT(chain_id, block_number) DO UPDATE SET
|
|
52
|
+
block_hash = excluded.block_hash,
|
|
53
|
+
inserted_at = CURRENT_TIMESTAMP
|
|
54
|
+
""",
|
|
55
|
+
(chain_id, block_number, block_hash),
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def delete_block_hashes_above(db: Any, chain_id: int, block_number: int) -> int:
|
|
60
|
+
return db.execute_returning_rowcount(
|
|
61
|
+
"DELETE FROM block_hash_history WHERE chain_id = ? AND block_number > ?",
|
|
62
|
+
(chain_id, block_number),
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def delete_block_hash_at_height(db: Any, chain_id: int, block_number: int) -> bool:
|
|
67
|
+
rowcount = db.execute_returning_rowcount(
|
|
68
|
+
"DELETE FROM block_hash_history WHERE chain_id = ? AND block_number = ?",
|
|
69
|
+
(chain_id, block_number),
|
|
70
|
+
)
|
|
71
|
+
return rowcount > 0
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def cleanup_old_block_hashes(db: Any, chain_id: int, keep_count: int) -> int:
|
|
75
|
+
row = db.execute_one(
|
|
76
|
+
"SELECT MAX(block_number) as max_block FROM block_hash_history WHERE chain_id = ?",
|
|
77
|
+
(chain_id,),
|
|
78
|
+
)
|
|
79
|
+
if not row or row["max_block"] is None:
|
|
80
|
+
return 0
|
|
81
|
+
|
|
82
|
+
cutoff = row["max_block"] - keep_count + 1
|
|
83
|
+
return db.execute_returning_rowcount(
|
|
84
|
+
"DELETE FROM block_hash_history WHERE chain_id = ? AND block_number < ?",
|
|
85
|
+
(chain_id, cutoff),
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def get_oldest_block_in_history(db: Any, chain_id: int) -> int | None:
|
|
90
|
+
row = db.execute_one(
|
|
91
|
+
"SELECT MIN(block_number) as min_block FROM block_hash_history WHERE chain_id = ?",
|
|
92
|
+
(chain_id,),
|
|
93
|
+
)
|
|
94
|
+
return row["min_block"] if row else None
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def get_latest_block_in_history(db: Any, chain_id: int) -> int | None:
|
|
98
|
+
row = db.execute_one(
|
|
99
|
+
"SELECT MAX(block_number) as max_block FROM block_hash_history WHERE chain_id = ?",
|
|
100
|
+
(chain_id,),
|
|
101
|
+
)
|
|
102
|
+
return row["max_block"] if row else None
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from brawny.db.base import ABICacheEntry, ProxyCacheEntry
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def get_cached_abi(db: Any, chain_id: int, address: str) -> ABICacheEntry | None:
|
|
9
|
+
address = db._normalize_address(address)
|
|
10
|
+
row = db.execute_one(
|
|
11
|
+
"SELECT * FROM abi_cache WHERE chain_id = ? AND address = ?",
|
|
12
|
+
(chain_id, address),
|
|
13
|
+
)
|
|
14
|
+
if not row:
|
|
15
|
+
return None
|
|
16
|
+
return ABICacheEntry(
|
|
17
|
+
chain_id=row["chain_id"],
|
|
18
|
+
address=row["address"],
|
|
19
|
+
abi_json=row["abi_json"],
|
|
20
|
+
source=row["source"],
|
|
21
|
+
resolved_at=row["resolved_at"],
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def set_cached_abi(
|
|
26
|
+
db: Any,
|
|
27
|
+
chain_id: int,
|
|
28
|
+
address: str,
|
|
29
|
+
abi_json: str,
|
|
30
|
+
source: str,
|
|
31
|
+
) -> None:
|
|
32
|
+
address = db._normalize_address(address)
|
|
33
|
+
db.execute(
|
|
34
|
+
"""
|
|
35
|
+
INSERT INTO abi_cache (chain_id, address, abi_json, source)
|
|
36
|
+
VALUES (?, ?, ?, ?)
|
|
37
|
+
ON CONFLICT(chain_id, address) DO UPDATE SET
|
|
38
|
+
abi_json = excluded.abi_json,
|
|
39
|
+
source = excluded.source,
|
|
40
|
+
resolved_at = CURRENT_TIMESTAMP
|
|
41
|
+
""",
|
|
42
|
+
(chain_id, address, abi_json, source),
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def clear_cached_abi(db: Any, chain_id: int, address: str) -> bool:
|
|
47
|
+
address = db._normalize_address(address)
|
|
48
|
+
rowcount = db.execute_returning_rowcount(
|
|
49
|
+
"DELETE FROM abi_cache WHERE chain_id = ? AND address = ?",
|
|
50
|
+
(chain_id, address),
|
|
51
|
+
)
|
|
52
|
+
return rowcount > 0
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def cleanup_expired_abis(db: Any, max_age_seconds: int) -> int:
|
|
56
|
+
return db.execute_returning_rowcount(
|
|
57
|
+
"DELETE FROM abi_cache WHERE resolved_at < datetime('now', ? || ' seconds')",
|
|
58
|
+
(f"-{max_age_seconds}",),
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def get_cached_proxy(db: Any, chain_id: int, proxy_address: str) -> ProxyCacheEntry | None:
|
|
63
|
+
proxy_address = db._normalize_address(proxy_address)
|
|
64
|
+
row = db.execute_one(
|
|
65
|
+
"SELECT * FROM proxy_cache WHERE chain_id = ? AND proxy_address = ?",
|
|
66
|
+
(chain_id, proxy_address),
|
|
67
|
+
)
|
|
68
|
+
if not row:
|
|
69
|
+
return None
|
|
70
|
+
return ProxyCacheEntry(
|
|
71
|
+
chain_id=row["chain_id"],
|
|
72
|
+
proxy_address=row["proxy_address"],
|
|
73
|
+
implementation_address=row["implementation_address"],
|
|
74
|
+
resolved_at=row["resolved_at"],
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def set_cached_proxy(
|
|
79
|
+
db: Any,
|
|
80
|
+
chain_id: int,
|
|
81
|
+
proxy_address: str,
|
|
82
|
+
implementation_address: str,
|
|
83
|
+
) -> None:
|
|
84
|
+
proxy_address = db._normalize_address(proxy_address)
|
|
85
|
+
implementation_address = db._normalize_address(implementation_address)
|
|
86
|
+
db.execute(
|
|
87
|
+
"""
|
|
88
|
+
INSERT INTO proxy_cache (chain_id, proxy_address, implementation_address)
|
|
89
|
+
VALUES (?, ?, ?)
|
|
90
|
+
ON CONFLICT(chain_id, proxy_address) DO UPDATE SET
|
|
91
|
+
implementation_address = excluded.implementation_address,
|
|
92
|
+
resolved_at = CURRENT_TIMESTAMP
|
|
93
|
+
""",
|
|
94
|
+
(chain_id, proxy_address, implementation_address),
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def clear_cached_proxy(db: Any, chain_id: int, proxy_address: str) -> bool:
|
|
99
|
+
proxy_address = db._normalize_address(proxy_address)
|
|
100
|
+
rowcount = db.execute_returning_rowcount(
|
|
101
|
+
"DELETE FROM proxy_cache WHERE chain_id = ? AND proxy_address = ?",
|
|
102
|
+
(chain_id, proxy_address),
|
|
103
|
+
)
|
|
104
|
+
return rowcount > 0
|