brawny 0.1.13__py3-none-any.whl → 0.1.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- brawny/__init__.py +2 -0
- brawny/_context.py +5 -5
- brawny/_rpc/__init__.py +36 -12
- brawny/_rpc/broadcast.py +14 -13
- brawny/_rpc/caller.py +243 -0
- brawny/_rpc/client.py +539 -0
- brawny/_rpc/clients.py +11 -11
- brawny/_rpc/context.py +23 -0
- brawny/_rpc/errors.py +465 -31
- brawny/_rpc/gas.py +7 -6
- brawny/_rpc/pool.py +18 -0
- brawny/_rpc/retry.py +266 -0
- brawny/_rpc/retry_policy.py +81 -0
- brawny/accounts.py +28 -9
- brawny/alerts/__init__.py +15 -18
- brawny/alerts/abi_resolver.py +212 -36
- brawny/alerts/base.py +2 -2
- brawny/alerts/contracts.py +77 -10
- brawny/alerts/errors.py +30 -3
- brawny/alerts/events.py +38 -5
- brawny/alerts/health.py +19 -13
- brawny/alerts/send.py +513 -55
- brawny/api.py +39 -11
- brawny/assets/AGENTS.md +325 -0
- brawny/async_runtime.py +48 -0
- brawny/chain.py +3 -3
- brawny/cli/commands/__init__.py +2 -0
- brawny/cli/commands/console.py +69 -19
- brawny/cli/commands/contract.py +2 -2
- brawny/cli/commands/controls.py +121 -0
- brawny/cli/commands/health.py +2 -2
- brawny/cli/commands/job_dev.py +6 -5
- brawny/cli/commands/jobs.py +99 -2
- brawny/cli/commands/maintenance.py +13 -29
- brawny/cli/commands/migrate.py +1 -0
- brawny/cli/commands/run.py +10 -3
- brawny/cli/commands/script.py +8 -3
- brawny/cli/commands/signer.py +143 -26
- brawny/cli/helpers.py +0 -3
- brawny/cli_templates.py +25 -349
- brawny/config/__init__.py +4 -1
- brawny/config/models.py +43 -57
- brawny/config/parser.py +268 -57
- brawny/config/validation.py +52 -15
- brawny/daemon/context.py +4 -2
- brawny/daemon/core.py +185 -63
- brawny/daemon/loops.py +166 -98
- brawny/daemon/supervisor.py +261 -0
- brawny/db/__init__.py +14 -26
- brawny/db/base.py +248 -151
- brawny/db/global_cache.py +11 -1
- brawny/db/migrate.py +175 -28
- brawny/db/migrations/001_init.sql +4 -3
- brawny/db/migrations/010_add_nonce_gap_index.sql +1 -1
- brawny/db/migrations/011_add_job_logs.sql +1 -2
- brawny/db/migrations/012_add_claimed_by.sql +2 -2
- brawny/db/migrations/013_attempt_unique.sql +10 -0
- brawny/db/migrations/014_add_lease_expires_at.sql +5 -0
- brawny/db/migrations/015_add_signer_alias.sql +14 -0
- brawny/db/migrations/016_runtime_controls_and_quarantine.sql +32 -0
- brawny/db/migrations/017_add_job_drain.sql +6 -0
- brawny/db/migrations/018_add_nonce_reset_audit.sql +20 -0
- brawny/db/migrations/019_add_job_cooldowns.sql +8 -0
- brawny/db/migrations/020_attempt_unique_initial.sql +7 -0
- brawny/db/ops/__init__.py +3 -25
- brawny/db/ops/logs.py +1 -2
- brawny/db/queries.py +47 -91
- brawny/db/serialized.py +65 -0
- brawny/db/sqlite/__init__.py +1001 -0
- brawny/db/sqlite/connection.py +231 -0
- brawny/db/sqlite/execute.py +116 -0
- brawny/db/sqlite/mappers.py +190 -0
- brawny/db/sqlite/repos/attempts.py +372 -0
- brawny/db/sqlite/repos/block_state.py +102 -0
- brawny/db/sqlite/repos/cache.py +104 -0
- brawny/db/sqlite/repos/intents.py +1021 -0
- brawny/db/sqlite/repos/jobs.py +200 -0
- brawny/db/sqlite/repos/maintenance.py +182 -0
- brawny/db/sqlite/repos/signers_nonces.py +566 -0
- brawny/db/sqlite/tx.py +119 -0
- brawny/http.py +194 -0
- brawny/invariants.py +11 -24
- brawny/jobs/base.py +8 -0
- brawny/jobs/job_validation.py +2 -1
- brawny/keystore.py +83 -7
- brawny/lifecycle.py +64 -12
- brawny/logging.py +0 -2
- brawny/metrics.py +84 -12
- brawny/model/contexts.py +111 -9
- brawny/model/enums.py +1 -0
- brawny/model/errors.py +18 -0
- brawny/model/types.py +47 -131
- brawny/network_guard.py +133 -0
- brawny/networks/__init__.py +5 -5
- brawny/networks/config.py +1 -7
- brawny/networks/manager.py +14 -11
- brawny/runtime_controls.py +74 -0
- brawny/scheduler/poller.py +11 -7
- brawny/scheduler/reorg.py +95 -39
- brawny/scheduler/runner.py +442 -168
- brawny/scheduler/shutdown.py +3 -3
- brawny/script_tx.py +3 -3
- brawny/telegram.py +53 -7
- brawny/testing.py +1 -0
- brawny/timeout.py +38 -0
- brawny/tx/executor.py +922 -308
- brawny/tx/intent.py +54 -16
- brawny/tx/monitor.py +31 -12
- brawny/tx/nonce.py +212 -90
- brawny/tx/replacement.py +69 -18
- brawny/tx/retry_policy.py +24 -0
- brawny/tx/stages/types.py +75 -0
- brawny/types.py +18 -0
- brawny/utils.py +41 -0
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/METADATA +3 -3
- brawny-0.1.22.dist-info/RECORD +163 -0
- brawny/_rpc/manager.py +0 -982
- brawny/_rpc/selector.py +0 -156
- brawny/db/base_new.py +0 -165
- brawny/db/mappers.py +0 -182
- brawny/db/migrations/008_add_transactions.sql +0 -72
- brawny/db/ops/attempts.py +0 -108
- brawny/db/ops/blocks.py +0 -83
- brawny/db/ops/cache.py +0 -93
- brawny/db/ops/intents.py +0 -296
- brawny/db/ops/jobs.py +0 -110
- brawny/db/ops/nonces.py +0 -322
- brawny/db/postgres.py +0 -2535
- brawny/db/postgres_new.py +0 -196
- brawny/db/sqlite.py +0 -2733
- brawny/db/sqlite_new.py +0 -191
- brawny-0.1.13.dist-info/RECORD +0 -141
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/WHEEL +0 -0
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/entry_points.txt +0 -0
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/top_level.txt +0 -0
brawny/db/__init__.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"""Database layer with
|
|
1
|
+
"""Database layer with SQLite-only support."""
|
|
2
2
|
|
|
3
3
|
from brawny.db.base import (
|
|
4
4
|
ABICacheEntry,
|
|
@@ -9,11 +9,8 @@ from brawny.db.base import (
|
|
|
9
9
|
ProxyCacheEntry,
|
|
10
10
|
)
|
|
11
11
|
from brawny.db.migrate import Migrator, discover_migrations, get_pending_migrations
|
|
12
|
-
try:
|
|
13
|
-
from brawny.db.postgres import PostgresDatabase
|
|
14
|
-
except ModuleNotFoundError:
|
|
15
|
-
PostgresDatabase = None # type: ignore
|
|
16
12
|
from brawny.db.sqlite import SQLiteDatabase
|
|
13
|
+
from brawny.db.serialized import SerializedDatabase
|
|
17
14
|
|
|
18
15
|
__all__ = [
|
|
19
16
|
# Base classes
|
|
@@ -26,7 +23,7 @@ __all__ = [
|
|
|
26
23
|
"ProxyCacheEntry",
|
|
27
24
|
# Implementations
|
|
28
25
|
"SQLiteDatabase",
|
|
29
|
-
"
|
|
26
|
+
"SerializedDatabase",
|
|
30
27
|
# Migration
|
|
31
28
|
"Migrator",
|
|
32
29
|
"discover_migrations",
|
|
@@ -40,13 +37,11 @@ def create_database(database_url: str, **kwargs: object) -> Database:
|
|
|
40
37
|
"""Factory function to create a database instance based on URL.
|
|
41
38
|
|
|
42
39
|
Args:
|
|
43
|
-
database_url: Database connection URL
|
|
44
|
-
- sqlite:///path/to/db.sqlite
|
|
45
|
-
- postgresql://user:pass@host:port/dbname
|
|
40
|
+
database_url: Database connection URL (sqlite:///path/to/db.sqlite)
|
|
46
41
|
**kwargs: Additional arguments passed to the database constructor
|
|
47
42
|
|
|
48
43
|
Returns:
|
|
49
|
-
Database instance (SQLiteDatabase
|
|
44
|
+
Database instance (SQLiteDatabase)
|
|
50
45
|
|
|
51
46
|
Raises:
|
|
52
47
|
ValueError: If database URL scheme is not supported
|
|
@@ -54,25 +49,18 @@ def create_database(database_url: str, **kwargs: object) -> Database:
|
|
|
54
49
|
circuit_breaker_failures = int(kwargs.pop("circuit_breaker_failures", 5))
|
|
55
50
|
circuit_breaker_seconds = int(kwargs.pop("circuit_breaker_seconds", 30))
|
|
56
51
|
if database_url.startswith("sqlite:///"):
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
"Postgres support requires psycopg and psycopg-pool. "
|
|
66
|
-
"Install with: pip install psycopg[binary] psycopg-pool"
|
|
52
|
+
kwargs.pop("db_op_timeout_seconds", None)
|
|
53
|
+
kwargs.pop("db_busy_retries", None)
|
|
54
|
+
kwargs.pop("db_busy_backoff_seconds", None)
|
|
55
|
+
return SerializedDatabase(
|
|
56
|
+
SQLiteDatabase(
|
|
57
|
+
database_url,
|
|
58
|
+
circuit_breaker_failures=circuit_breaker_failures,
|
|
59
|
+
circuit_breaker_seconds=circuit_breaker_seconds,
|
|
67
60
|
)
|
|
68
|
-
return PostgresDatabase( # type: ignore
|
|
69
|
-
database_url,
|
|
70
|
-
circuit_breaker_failures=circuit_breaker_failures,
|
|
71
|
-
circuit_breaker_seconds=circuit_breaker_seconds,
|
|
72
|
-
**kwargs,
|
|
73
61
|
)
|
|
74
62
|
else:
|
|
75
63
|
raise ValueError(
|
|
76
64
|
f"Unsupported database URL: {database_url}. "
|
|
77
|
-
"Must start with 'sqlite:///'
|
|
65
|
+
"Must start with 'sqlite:///'"
|
|
78
66
|
)
|
brawny/db/base.py
CHANGED
|
@@ -1,7 +1,4 @@
|
|
|
1
|
-
"""Database abstraction layer for brawny.
|
|
2
|
-
|
|
3
|
-
Provides a unified interface for both PostgreSQL and SQLite backends.
|
|
4
|
-
"""
|
|
1
|
+
"""Database abstraction layer for brawny (SQLite-only)."""
|
|
5
2
|
|
|
6
3
|
from __future__ import annotations
|
|
7
4
|
|
|
@@ -19,11 +16,12 @@ if TYPE_CHECKING:
|
|
|
19
16
|
GasParams,
|
|
20
17
|
JobConfig,
|
|
21
18
|
NonceReservation,
|
|
19
|
+
RuntimeControl,
|
|
22
20
|
SignerState,
|
|
23
|
-
Transaction,
|
|
24
21
|
TxAttempt,
|
|
25
22
|
TxIntent,
|
|
26
23
|
)
|
|
24
|
+
from brawny.types import ClaimedIntent
|
|
27
25
|
|
|
28
26
|
|
|
29
27
|
IsolationLevel = Literal["SERIALIZABLE", "REPEATABLE READ", "READ COMMITTED", "READ UNCOMMITTED"]
|
|
@@ -260,6 +258,28 @@ class Database(ABC):
|
|
|
260
258
|
"""Enable or disable a job. Returns True if job exists."""
|
|
261
259
|
...
|
|
262
260
|
|
|
261
|
+
@abstractmethod
|
|
262
|
+
def set_job_drain(
|
|
263
|
+
self,
|
|
264
|
+
job_id: str,
|
|
265
|
+
drain_until: datetime,
|
|
266
|
+
reason: str | None = None,
|
|
267
|
+
actor: str | None = None,
|
|
268
|
+
source: str | None = None,
|
|
269
|
+
) -> bool:
|
|
270
|
+
"""Drain a job until a timestamp. Returns True if job exists."""
|
|
271
|
+
...
|
|
272
|
+
|
|
273
|
+
@abstractmethod
|
|
274
|
+
def clear_job_drain(
|
|
275
|
+
self,
|
|
276
|
+
job_id: str,
|
|
277
|
+
actor: str | None = None,
|
|
278
|
+
source: str | None = None,
|
|
279
|
+
) -> bool:
|
|
280
|
+
"""Clear job drain. Returns True if job exists."""
|
|
281
|
+
...
|
|
282
|
+
|
|
263
283
|
@abstractmethod
|
|
264
284
|
def delete_job(self, job_id: str) -> bool:
|
|
265
285
|
"""Delete a job from the database. Returns True if job existed."""
|
|
@@ -331,6 +351,59 @@ class Database(ABC):
|
|
|
331
351
|
"""Clear gap tracking (gap resolved or force reset)."""
|
|
332
352
|
...
|
|
333
353
|
|
|
354
|
+
@abstractmethod
|
|
355
|
+
def set_signer_quarantined(
|
|
356
|
+
self,
|
|
357
|
+
chain_id: int,
|
|
358
|
+
address: str,
|
|
359
|
+
reason: str,
|
|
360
|
+
actor: str | None = None,
|
|
361
|
+
source: str | None = None,
|
|
362
|
+
) -> bool:
|
|
363
|
+
"""Quarantine signer (block nonce reservations/broadcast)."""
|
|
364
|
+
...
|
|
365
|
+
|
|
366
|
+
@abstractmethod
|
|
367
|
+
def clear_signer_quarantined(
|
|
368
|
+
self,
|
|
369
|
+
chain_id: int,
|
|
370
|
+
address: str,
|
|
371
|
+
actor: str | None = None,
|
|
372
|
+
source: str | None = None,
|
|
373
|
+
) -> bool:
|
|
374
|
+
"""Clear signer quarantine."""
|
|
375
|
+
...
|
|
376
|
+
|
|
377
|
+
@abstractmethod
|
|
378
|
+
def set_replacements_paused(
|
|
379
|
+
self,
|
|
380
|
+
chain_id: int,
|
|
381
|
+
address: str,
|
|
382
|
+
paused: bool,
|
|
383
|
+
reason: str | None = None,
|
|
384
|
+
actor: str | None = None,
|
|
385
|
+
source: str | None = None,
|
|
386
|
+
) -> bool:
|
|
387
|
+
"""Pause or resume replacements for a signer."""
|
|
388
|
+
...
|
|
389
|
+
|
|
390
|
+
@abstractmethod
|
|
391
|
+
def record_nonce_reset_audit(
|
|
392
|
+
self,
|
|
393
|
+
chain_id: int,
|
|
394
|
+
signer_address: str,
|
|
395
|
+
old_next_nonce: int | None,
|
|
396
|
+
new_next_nonce: int,
|
|
397
|
+
released_reservations: int,
|
|
398
|
+
source: str,
|
|
399
|
+
reason: str | None,
|
|
400
|
+
) -> None:
|
|
401
|
+
"""Record a nonce force reset in the audit table.
|
|
402
|
+
|
|
403
|
+
Provides durable record for incident investigation.
|
|
404
|
+
"""
|
|
405
|
+
...
|
|
406
|
+
|
|
334
407
|
@abstractmethod
|
|
335
408
|
def get_signer_by_alias(self, chain_id: int, alias: str) -> SignerState | None:
|
|
336
409
|
"""Get signer by alias. Returns None if not found."""
|
|
@@ -394,11 +467,54 @@ class Database(ABC):
|
|
|
394
467
|
|
|
395
468
|
@abstractmethod
|
|
396
469
|
def release_nonce_reservation(
|
|
397
|
-
self,
|
|
470
|
+
self,
|
|
471
|
+
chain_id: int,
|
|
472
|
+
address: str,
|
|
473
|
+
nonce: int,
|
|
474
|
+
actor: str | None = None,
|
|
475
|
+
reason: str | None = None,
|
|
476
|
+
source: str | None = None,
|
|
398
477
|
) -> bool:
|
|
399
478
|
"""Release (mark as released) a nonce reservation."""
|
|
400
479
|
...
|
|
401
480
|
|
|
481
|
+
@abstractmethod
|
|
482
|
+
def record_mutation_audit(
|
|
483
|
+
self,
|
|
484
|
+
entity_type: str,
|
|
485
|
+
entity_id: str,
|
|
486
|
+
action: str,
|
|
487
|
+
actor: str | None = None,
|
|
488
|
+
reason: str | None = None,
|
|
489
|
+
source: str | None = None,
|
|
490
|
+
metadata: dict[str, Any] | None = None,
|
|
491
|
+
) -> None:
|
|
492
|
+
"""Record durable mutation audit entry."""
|
|
493
|
+
...
|
|
494
|
+
|
|
495
|
+
@abstractmethod
|
|
496
|
+
def set_runtime_control(
|
|
497
|
+
self,
|
|
498
|
+
control: str,
|
|
499
|
+
active: bool,
|
|
500
|
+
expires_at: datetime | None,
|
|
501
|
+
reason: str | None,
|
|
502
|
+
actor: str | None,
|
|
503
|
+
mode: str,
|
|
504
|
+
) -> "RuntimeControl":
|
|
505
|
+
"""Create or update runtime control with TTL."""
|
|
506
|
+
...
|
|
507
|
+
|
|
508
|
+
@abstractmethod
|
|
509
|
+
def get_runtime_control(self, control: str) -> "RuntimeControl | None":
|
|
510
|
+
"""Fetch runtime control by name."""
|
|
511
|
+
...
|
|
512
|
+
|
|
513
|
+
@abstractmethod
|
|
514
|
+
def list_runtime_controls(self) -> list["RuntimeControl"]:
|
|
515
|
+
"""List all runtime controls."""
|
|
516
|
+
...
|
|
517
|
+
|
|
402
518
|
@abstractmethod
|
|
403
519
|
def cleanup_orphaned_nonces(
|
|
404
520
|
self, chain_id: int, older_than_hours: int = 24
|
|
@@ -434,6 +550,7 @@ class Database(ABC):
|
|
|
434
550
|
max_priority_fee_per_gas: str | None,
|
|
435
551
|
min_confirmations: int,
|
|
436
552
|
deadline_ts: datetime | None,
|
|
553
|
+
signer_alias: str | None = None,
|
|
437
554
|
broadcast_group: str | None = None,
|
|
438
555
|
broadcast_endpoints: list[str] | None = None,
|
|
439
556
|
) -> TxIntent | None:
|
|
@@ -491,6 +608,46 @@ class Database(ABC):
|
|
|
491
608
|
"""Count intents with retry_after in the future."""
|
|
492
609
|
...
|
|
493
610
|
|
|
611
|
+
@abstractmethod
|
|
612
|
+
def bind_broadcast_endpoints(
|
|
613
|
+
self,
|
|
614
|
+
intent_id: UUID,
|
|
615
|
+
group_name: str | None,
|
|
616
|
+
endpoints: list[str],
|
|
617
|
+
) -> tuple[str | None, list[str]]:
|
|
618
|
+
"""Bind broadcast endpoints to an intent (idempotent)."""
|
|
619
|
+
...
|
|
620
|
+
|
|
621
|
+
@abstractmethod
|
|
622
|
+
def create_attempt_once(
|
|
623
|
+
self,
|
|
624
|
+
attempt_id: UUID,
|
|
625
|
+
intent_id: UUID,
|
|
626
|
+
nonce: int,
|
|
627
|
+
gas_params_json: str,
|
|
628
|
+
status: str = "pending_send",
|
|
629
|
+
tx_hash: str | None = None,
|
|
630
|
+
replaces_attempt_id: UUID | None = None,
|
|
631
|
+
broadcast_group: str | None = None,
|
|
632
|
+
endpoint_url: str | None = None,
|
|
633
|
+
binding: tuple[str | None, list[str]] | None = None,
|
|
634
|
+
actor: str | None = None,
|
|
635
|
+
reason: str | None = None,
|
|
636
|
+
source: str | None = None,
|
|
637
|
+
) -> TxAttempt:
|
|
638
|
+
"""Create attempt once per intent+nonce; return existing if present."""
|
|
639
|
+
...
|
|
640
|
+
|
|
641
|
+
@abstractmethod
|
|
642
|
+
def require_bound_and_attempt(
|
|
643
|
+
self,
|
|
644
|
+
intent_id: UUID,
|
|
645
|
+
nonce: int,
|
|
646
|
+
endpoints: list[str],
|
|
647
|
+
) -> None:
|
|
648
|
+
"""Assert broadcast binding and attempt existence before side effects."""
|
|
649
|
+
...
|
|
650
|
+
|
|
494
651
|
@abstractmethod
|
|
495
652
|
def get_oldest_pending_intent_age(self, chain_id: int) -> float | None:
|
|
496
653
|
"""Get age in seconds of the oldest pending intent.
|
|
@@ -522,12 +679,23 @@ class Database(ABC):
|
|
|
522
679
|
"""List sending intents older than a threshold."""
|
|
523
680
|
...
|
|
524
681
|
|
|
682
|
+
@abstractmethod
|
|
683
|
+
def list_claimed_intents_older_than(
|
|
684
|
+
self,
|
|
685
|
+
max_age_seconds: int,
|
|
686
|
+
limit: int = 100,
|
|
687
|
+
chain_id: int | None = None,
|
|
688
|
+
) -> list[TxIntent]:
|
|
689
|
+
"""List claimed intents older than a threshold (with attempts)."""
|
|
690
|
+
...
|
|
691
|
+
|
|
525
692
|
@abstractmethod
|
|
526
693
|
def claim_next_intent(
|
|
527
694
|
self,
|
|
528
695
|
claim_token: str,
|
|
529
696
|
claimed_by: str | None = None,
|
|
530
|
-
|
|
697
|
+
lease_seconds: int | None = None,
|
|
698
|
+
) -> ClaimedIntent | None:
|
|
531
699
|
"""Claim the next available intent for processing."""
|
|
532
700
|
...
|
|
533
701
|
|
|
@@ -588,6 +756,18 @@ class Database(ABC):
|
|
|
588
756
|
"""Release claim only if claim_token matches. Returns True if released."""
|
|
589
757
|
...
|
|
590
758
|
|
|
759
|
+
@abstractmethod
|
|
760
|
+
def release_claim_if_token_and_no_attempts(
|
|
761
|
+
self, intent_id: UUID, claim_token: str
|
|
762
|
+
) -> bool:
|
|
763
|
+
"""Atomically release claim only if token matches AND no attempts exist.
|
|
764
|
+
|
|
765
|
+
Safe primitive for pre-attempt failure handling:
|
|
766
|
+
- Returns True iff release succeeded (ownership + no work started)
|
|
767
|
+
- Returns False if token mismatch, attempts exist, or not claimed
|
|
768
|
+
"""
|
|
769
|
+
...
|
|
770
|
+
|
|
591
771
|
@abstractmethod
|
|
592
772
|
def clear_intent_claim(self, intent_id: UUID) -> bool:
|
|
593
773
|
"""Clear claim token and claimed_at without changing status."""
|
|
@@ -604,8 +784,58 @@ class Database(ABC):
|
|
|
604
784
|
...
|
|
605
785
|
|
|
606
786
|
@abstractmethod
|
|
607
|
-
def
|
|
608
|
-
|
|
787
|
+
def requeue_expired_claims_no_attempts(
|
|
788
|
+
self,
|
|
789
|
+
limit: int,
|
|
790
|
+
grace_seconds: int,
|
|
791
|
+
chain_id: int | None = None,
|
|
792
|
+
) -> int:
|
|
793
|
+
"""Requeue expired claimed intents with no attempts. Returns count requeued."""
|
|
794
|
+
...
|
|
795
|
+
|
|
796
|
+
@abstractmethod
|
|
797
|
+
def count_expired_claims_with_attempts(
|
|
798
|
+
self,
|
|
799
|
+
limit: int,
|
|
800
|
+
grace_seconds: int,
|
|
801
|
+
chain_id: int | None = None,
|
|
802
|
+
) -> int:
|
|
803
|
+
"""Count expired claimed intents that have attempts."""
|
|
804
|
+
...
|
|
805
|
+
|
|
806
|
+
@abstractmethod
|
|
807
|
+
def requeue_missing_lease_claims_no_attempts(
|
|
808
|
+
self,
|
|
809
|
+
limit: int,
|
|
810
|
+
cutoff_seconds: int,
|
|
811
|
+
chain_id: int | None = None,
|
|
812
|
+
) -> int:
|
|
813
|
+
"""Requeue claimed intents with NULL lease_expires_at and no attempts."""
|
|
814
|
+
...
|
|
815
|
+
|
|
816
|
+
@abstractmethod
|
|
817
|
+
def count_missing_lease_claims_with_attempts(
|
|
818
|
+
self,
|
|
819
|
+
limit: int,
|
|
820
|
+
cutoff_seconds: int,
|
|
821
|
+
chain_id: int | None = None,
|
|
822
|
+
) -> int:
|
|
823
|
+
"""Count claimed intents with NULL lease_expires_at that have attempts."""
|
|
824
|
+
...
|
|
825
|
+
|
|
826
|
+
@abstractmethod
|
|
827
|
+
def should_create_intent(
|
|
828
|
+
self,
|
|
829
|
+
cooldown_key: str,
|
|
830
|
+
now: int,
|
|
831
|
+
cooldown_seconds: int,
|
|
832
|
+
) -> tuple[bool, int | None]:
|
|
833
|
+
"""Check cooldown key and update if allowed. Returns (allowed, last_intent_at)."""
|
|
834
|
+
...
|
|
835
|
+
|
|
836
|
+
@abstractmethod
|
|
837
|
+
def prune_job_cooldowns(self, older_than_days: int) -> int:
|
|
838
|
+
"""Delete stale cooldown keys older than N days. Returns count deleted."""
|
|
609
839
|
...
|
|
610
840
|
|
|
611
841
|
@abstractmethod
|
|
@@ -631,12 +861,15 @@ class Database(ABC):
|
|
|
631
861
|
intent_id: UUID,
|
|
632
862
|
nonce: int,
|
|
633
863
|
gas_params_json: str,
|
|
634
|
-
status: str = "
|
|
864
|
+
status: str = "pending_send",
|
|
635
865
|
tx_hash: str | None = None,
|
|
636
866
|
replaces_attempt_id: UUID | None = None,
|
|
637
867
|
broadcast_group: str | None = None,
|
|
638
868
|
endpoint_url: str | None = None,
|
|
639
869
|
binding: tuple[str, list[str]] | None = None,
|
|
870
|
+
actor: str | None = None,
|
|
871
|
+
reason: str | None = None,
|
|
872
|
+
source: str | None = None,
|
|
640
873
|
) -> TxAttempt:
|
|
641
874
|
"""Create a new transaction attempt.
|
|
642
875
|
|
|
@@ -645,7 +878,7 @@ class Database(ABC):
|
|
|
645
878
|
intent_id: Parent intent ID
|
|
646
879
|
nonce: Transaction nonce
|
|
647
880
|
gas_params_json: Gas parameters as JSON
|
|
648
|
-
status: Initial status (default: "
|
|
881
|
+
status: Initial status (default: "pending_send")
|
|
649
882
|
tx_hash: Transaction hash if known
|
|
650
883
|
replaces_attempt_id: ID of attempt being replaced
|
|
651
884
|
broadcast_group: RPC group used for broadcast
|
|
@@ -684,152 +917,16 @@ class Database(ABC):
|
|
|
684
917
|
broadcast_block: int | None = None,
|
|
685
918
|
broadcast_at: datetime | None = None,
|
|
686
919
|
included_block: int | None = None,
|
|
920
|
+
endpoint_url: str | None = None,
|
|
687
921
|
error_code: str | None = None,
|
|
688
922
|
error_detail: str | None = None,
|
|
923
|
+
actor: str | None = None,
|
|
924
|
+
reason: str | None = None,
|
|
925
|
+
source: str | None = None,
|
|
689
926
|
) -> bool:
|
|
690
927
|
"""Update attempt status and related fields."""
|
|
691
928
|
...
|
|
692
929
|
|
|
693
|
-
# =========================================================================
|
|
694
|
-
# Transaction Operations (NEW - replaces Intent/Attempt in Phase 2+)
|
|
695
|
-
#
|
|
696
|
-
# IMPORTANT: Transaction is the only durable execution model.
|
|
697
|
-
# Do not add attempt-related methods here.
|
|
698
|
-
# =========================================================================
|
|
699
|
-
|
|
700
|
-
@abstractmethod
|
|
701
|
-
def create_tx(
|
|
702
|
-
self,
|
|
703
|
-
tx_id: UUID,
|
|
704
|
-
job_id: str,
|
|
705
|
-
chain_id: int,
|
|
706
|
-
idempotency_key: str,
|
|
707
|
-
signer_address: str,
|
|
708
|
-
to_address: str,
|
|
709
|
-
data: str | None,
|
|
710
|
-
value_wei: str,
|
|
711
|
-
min_confirmations: int,
|
|
712
|
-
deadline_ts: datetime | None,
|
|
713
|
-
gas_params: GasParams | None = None,
|
|
714
|
-
) -> Transaction | None:
|
|
715
|
-
"""Create a new transaction.
|
|
716
|
-
|
|
717
|
-
Returns None if idempotency_key already exists (idempotency).
|
|
718
|
-
Initial status is always 'created'.
|
|
719
|
-
"""
|
|
720
|
-
...
|
|
721
|
-
|
|
722
|
-
@abstractmethod
|
|
723
|
-
def get_tx(self, tx_id: UUID) -> Transaction | None:
|
|
724
|
-
"""Get a transaction by ID."""
|
|
725
|
-
...
|
|
726
|
-
|
|
727
|
-
@abstractmethod
|
|
728
|
-
def get_tx_by_idempotency_key(
|
|
729
|
-
self,
|
|
730
|
-
chain_id: int,
|
|
731
|
-
signer_address: str,
|
|
732
|
-
idempotency_key: str,
|
|
733
|
-
) -> Transaction | None:
|
|
734
|
-
"""Get a transaction by idempotency key (scoped to chain and signer)."""
|
|
735
|
-
...
|
|
736
|
-
|
|
737
|
-
@abstractmethod
|
|
738
|
-
def get_tx_by_hash(self, tx_hash: str) -> Transaction | None:
|
|
739
|
-
"""Get a transaction by current tx hash.
|
|
740
|
-
|
|
741
|
-
NOTE: Does NOT search tx_hash_history. Only matches current_tx_hash.
|
|
742
|
-
"""
|
|
743
|
-
...
|
|
744
|
-
|
|
745
|
-
@abstractmethod
|
|
746
|
-
def list_pending_txs(
|
|
747
|
-
self,
|
|
748
|
-
chain_id: int | None = None,
|
|
749
|
-
job_id: str | None = None,
|
|
750
|
-
) -> list[Transaction]:
|
|
751
|
-
"""List transactions in CREATED or BROADCAST status."""
|
|
752
|
-
...
|
|
753
|
-
|
|
754
|
-
@abstractmethod
|
|
755
|
-
def claim_tx(self, claim_token: str) -> Transaction | None:
|
|
756
|
-
"""Claim the next CREATED transaction for processing.
|
|
757
|
-
|
|
758
|
-
This is a lease, not ownership. The claim gates execution only.
|
|
759
|
-
Status remains CREATED while claimed - no "claimed" status.
|
|
760
|
-
"""
|
|
761
|
-
...
|
|
762
|
-
|
|
763
|
-
@abstractmethod
|
|
764
|
-
def set_tx_broadcast(
|
|
765
|
-
self,
|
|
766
|
-
tx_id: UUID,
|
|
767
|
-
tx_hash: str,
|
|
768
|
-
nonce: int,
|
|
769
|
-
gas_params: GasParams,
|
|
770
|
-
broadcast_block: int,
|
|
771
|
-
broadcast_info: BroadcastInfo | None = None,
|
|
772
|
-
) -> bool:
|
|
773
|
-
"""Record initial broadcast.
|
|
774
|
-
|
|
775
|
-
Sets status=BROADCAST, creates first tx_hash_history record.
|
|
776
|
-
Returns True if successful, False if tx not found or wrong status.
|
|
777
|
-
"""
|
|
778
|
-
...
|
|
779
|
-
|
|
780
|
-
@abstractmethod
|
|
781
|
-
def set_tx_replaced(
|
|
782
|
-
self,
|
|
783
|
-
tx_id: UUID,
|
|
784
|
-
new_tx_hash: str,
|
|
785
|
-
gas_params: GasParams,
|
|
786
|
-
broadcast_block: int,
|
|
787
|
-
reason: str = "fee_bump",
|
|
788
|
-
) -> bool:
|
|
789
|
-
"""Record replacement broadcast.
|
|
790
|
-
|
|
791
|
-
Appends to tx_hash_history, updates current_tx_hash, increments
|
|
792
|
-
replacement_count. Status remains BROADCAST.
|
|
793
|
-
|
|
794
|
-
Returns True if successful, False if tx not found or wrong status.
|
|
795
|
-
"""
|
|
796
|
-
...
|
|
797
|
-
|
|
798
|
-
@abstractmethod
|
|
799
|
-
def set_tx_confirmed(
|
|
800
|
-
self,
|
|
801
|
-
tx_id: UUID,
|
|
802
|
-
included_block: int,
|
|
803
|
-
) -> bool:
|
|
804
|
-
"""Mark transaction confirmed.
|
|
805
|
-
|
|
806
|
-
Sets status=CONFIRMED, included_block, confirmed_at.
|
|
807
|
-
Returns True if successful, False if tx not found or wrong status.
|
|
808
|
-
"""
|
|
809
|
-
...
|
|
810
|
-
|
|
811
|
-
@abstractmethod
|
|
812
|
-
def set_tx_failed(
|
|
813
|
-
self,
|
|
814
|
-
tx_id: UUID,
|
|
815
|
-
failure_type: FailureType,
|
|
816
|
-
error_info: ErrorInfo | None = None,
|
|
817
|
-
) -> bool:
|
|
818
|
-
"""Mark transaction failed.
|
|
819
|
-
|
|
820
|
-
Sets status=FAILED, failure_type, error_info_json.
|
|
821
|
-
Returns True if successful, False if tx not found or already terminal.
|
|
822
|
-
"""
|
|
823
|
-
...
|
|
824
|
-
|
|
825
|
-
@abstractmethod
|
|
826
|
-
def release_stale_tx_claims(self, max_age_seconds: int) -> int:
|
|
827
|
-
"""Release claims older than threshold.
|
|
828
|
-
|
|
829
|
-
Returns count of claims released.
|
|
830
|
-
"""
|
|
831
|
-
...
|
|
832
|
-
|
|
833
930
|
# =========================================================================
|
|
834
931
|
# ABI Cache Operations
|
|
835
932
|
# =========================================================================
|
brawny/db/global_cache.py
CHANGED
|
@@ -16,6 +16,8 @@ from datetime import datetime, timezone
|
|
|
16
16
|
from pathlib import Path
|
|
17
17
|
from typing import TYPE_CHECKING
|
|
18
18
|
|
|
19
|
+
from brawny.utils import db_address
|
|
20
|
+
|
|
19
21
|
if TYPE_CHECKING:
|
|
20
22
|
pass
|
|
21
23
|
|
|
@@ -84,7 +86,8 @@ class GlobalABICache:
|
|
|
84
86
|
|
|
85
87
|
def _init_schema(self) -> None:
|
|
86
88
|
"""Create tables if they don't exist."""
|
|
87
|
-
|
|
89
|
+
if self._conn is None:
|
|
90
|
+
raise RuntimeError("Global cache connection not initialized")
|
|
88
91
|
|
|
89
92
|
self._conn.executescript("""
|
|
90
93
|
CREATE TABLE IF NOT EXISTS abi_cache (
|
|
@@ -119,6 +122,7 @@ class GlobalABICache:
|
|
|
119
122
|
Returns:
|
|
120
123
|
ABICacheEntry if found, None otherwise
|
|
121
124
|
"""
|
|
125
|
+
address = db_address(address)
|
|
122
126
|
with self._lock:
|
|
123
127
|
conn = self._ensure_connected()
|
|
124
128
|
cursor = conn.execute(
|
|
@@ -156,6 +160,7 @@ class GlobalABICache:
|
|
|
156
160
|
abi_json: JSON-encoded ABI
|
|
157
161
|
source: Source of ABI ('etherscan', 'sourcify', 'manual', 'proxy_implementation')
|
|
158
162
|
"""
|
|
163
|
+
address = db_address(address)
|
|
159
164
|
with self._lock:
|
|
160
165
|
conn = self._ensure_connected()
|
|
161
166
|
conn.execute(
|
|
@@ -181,6 +186,7 @@ class GlobalABICache:
|
|
|
181
186
|
Returns:
|
|
182
187
|
True if entry was deleted, False if not found
|
|
183
188
|
"""
|
|
189
|
+
address = db_address(address)
|
|
184
190
|
with self._lock:
|
|
185
191
|
conn = self._ensure_connected()
|
|
186
192
|
cursor = conn.execute(
|
|
@@ -223,6 +229,7 @@ class GlobalABICache:
|
|
|
223
229
|
Returns:
|
|
224
230
|
ProxyCacheEntry if found, None otherwise
|
|
225
231
|
"""
|
|
232
|
+
proxy_address = db_address(proxy_address)
|
|
226
233
|
with self._lock:
|
|
227
234
|
conn = self._ensure_connected()
|
|
228
235
|
cursor = conn.execute(
|
|
@@ -257,6 +264,8 @@ class GlobalABICache:
|
|
|
257
264
|
proxy_address: Proxy contract address
|
|
258
265
|
implementation_address: Implementation contract address
|
|
259
266
|
"""
|
|
267
|
+
proxy_address = db_address(proxy_address)
|
|
268
|
+
implementation_address = db_address(implementation_address)
|
|
260
269
|
with self._lock:
|
|
261
270
|
conn = self._ensure_connected()
|
|
262
271
|
conn.execute(
|
|
@@ -281,6 +290,7 @@ class GlobalABICache:
|
|
|
281
290
|
Returns:
|
|
282
291
|
True if entry was deleted, False if not found
|
|
283
292
|
"""
|
|
293
|
+
proxy_address = db_address(proxy_address)
|
|
284
294
|
with self._lock:
|
|
285
295
|
conn = self._ensure_connected()
|
|
286
296
|
cursor = conn.execute(
|