brawny 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- brawny/__init__.py +106 -0
- brawny/_context.py +232 -0
- brawny/_rpc/__init__.py +38 -0
- brawny/_rpc/broadcast.py +172 -0
- brawny/_rpc/clients.py +98 -0
- brawny/_rpc/context.py +49 -0
- brawny/_rpc/errors.py +252 -0
- brawny/_rpc/gas.py +158 -0
- brawny/_rpc/manager.py +982 -0
- brawny/_rpc/selector.py +156 -0
- brawny/accounts.py +534 -0
- brawny/alerts/__init__.py +132 -0
- brawny/alerts/abi_resolver.py +530 -0
- brawny/alerts/base.py +152 -0
- brawny/alerts/context.py +271 -0
- brawny/alerts/contracts.py +635 -0
- brawny/alerts/encoded_call.py +201 -0
- brawny/alerts/errors.py +267 -0
- brawny/alerts/events.py +680 -0
- brawny/alerts/function_caller.py +364 -0
- brawny/alerts/health.py +185 -0
- brawny/alerts/routing.py +118 -0
- brawny/alerts/send.py +364 -0
- brawny/api.py +660 -0
- brawny/chain.py +93 -0
- brawny/cli/__init__.py +16 -0
- brawny/cli/app.py +17 -0
- brawny/cli/bootstrap.py +37 -0
- brawny/cli/commands/__init__.py +41 -0
- brawny/cli/commands/abi.py +93 -0
- brawny/cli/commands/accounts.py +632 -0
- brawny/cli/commands/console.py +495 -0
- brawny/cli/commands/contract.py +139 -0
- brawny/cli/commands/health.py +112 -0
- brawny/cli/commands/init_project.py +86 -0
- brawny/cli/commands/intents.py +130 -0
- brawny/cli/commands/job_dev.py +254 -0
- brawny/cli/commands/jobs.py +308 -0
- brawny/cli/commands/logs.py +87 -0
- brawny/cli/commands/maintenance.py +182 -0
- brawny/cli/commands/migrate.py +51 -0
- brawny/cli/commands/networks.py +253 -0
- brawny/cli/commands/run.py +249 -0
- brawny/cli/commands/script.py +209 -0
- brawny/cli/commands/signer.py +248 -0
- brawny/cli/helpers.py +265 -0
- brawny/cli_templates.py +1445 -0
- brawny/config/__init__.py +74 -0
- brawny/config/models.py +404 -0
- brawny/config/parser.py +633 -0
- brawny/config/routing.py +55 -0
- brawny/config/validation.py +246 -0
- brawny/daemon/__init__.py +14 -0
- brawny/daemon/context.py +69 -0
- brawny/daemon/core.py +702 -0
- brawny/daemon/loops.py +327 -0
- brawny/db/__init__.py +78 -0
- brawny/db/base.py +986 -0
- brawny/db/base_new.py +165 -0
- brawny/db/circuit_breaker.py +97 -0
- brawny/db/global_cache.py +298 -0
- brawny/db/mappers.py +182 -0
- brawny/db/migrate.py +349 -0
- brawny/db/migrations/001_init.sql +186 -0
- brawny/db/migrations/002_add_included_block.sql +7 -0
- brawny/db/migrations/003_add_broadcast_at.sql +10 -0
- brawny/db/migrations/004_broadcast_binding.sql +20 -0
- brawny/db/migrations/005_add_retry_after.sql +9 -0
- brawny/db/migrations/006_add_retry_count_column.sql +11 -0
- brawny/db/migrations/007_add_gap_tracking.sql +18 -0
- brawny/db/migrations/008_add_transactions.sql +72 -0
- brawny/db/migrations/009_add_intent_metadata.sql +5 -0
- brawny/db/migrations/010_add_nonce_gap_index.sql +9 -0
- brawny/db/migrations/011_add_job_logs.sql +24 -0
- brawny/db/migrations/012_add_claimed_by.sql +5 -0
- brawny/db/ops/__init__.py +29 -0
- brawny/db/ops/attempts.py +108 -0
- brawny/db/ops/blocks.py +83 -0
- brawny/db/ops/cache.py +93 -0
- brawny/db/ops/intents.py +296 -0
- brawny/db/ops/jobs.py +110 -0
- brawny/db/ops/logs.py +97 -0
- brawny/db/ops/nonces.py +322 -0
- brawny/db/postgres.py +2535 -0
- brawny/db/postgres_new.py +196 -0
- brawny/db/queries.py +584 -0
- brawny/db/sqlite.py +2733 -0
- brawny/db/sqlite_new.py +191 -0
- brawny/history.py +126 -0
- brawny/interfaces.py +136 -0
- brawny/invariants.py +155 -0
- brawny/jobs/__init__.py +26 -0
- brawny/jobs/base.py +287 -0
- brawny/jobs/discovery.py +233 -0
- brawny/jobs/job_validation.py +111 -0
- brawny/jobs/kv.py +125 -0
- brawny/jobs/registry.py +283 -0
- brawny/keystore.py +484 -0
- brawny/lifecycle.py +551 -0
- brawny/logging.py +290 -0
- brawny/metrics.py +594 -0
- brawny/model/__init__.py +53 -0
- brawny/model/contexts.py +319 -0
- brawny/model/enums.py +70 -0
- brawny/model/errors.py +194 -0
- brawny/model/events.py +93 -0
- brawny/model/startup.py +20 -0
- brawny/model/types.py +483 -0
- brawny/networks/__init__.py +96 -0
- brawny/networks/config.py +269 -0
- brawny/networks/manager.py +423 -0
- brawny/obs/__init__.py +67 -0
- brawny/obs/emit.py +158 -0
- brawny/obs/health.py +175 -0
- brawny/obs/heartbeat.py +133 -0
- brawny/reconciliation.py +108 -0
- brawny/scheduler/__init__.py +19 -0
- brawny/scheduler/poller.py +472 -0
- brawny/scheduler/reorg.py +632 -0
- brawny/scheduler/runner.py +708 -0
- brawny/scheduler/shutdown.py +371 -0
- brawny/script_tx.py +297 -0
- brawny/scripting.py +251 -0
- brawny/startup.py +76 -0
- brawny/telegram.py +393 -0
- brawny/testing.py +108 -0
- brawny/tx/__init__.py +41 -0
- brawny/tx/executor.py +1071 -0
- brawny/tx/fees.py +50 -0
- brawny/tx/intent.py +423 -0
- brawny/tx/monitor.py +628 -0
- brawny/tx/nonce.py +498 -0
- brawny/tx/replacement.py +456 -0
- brawny/tx/utils.py +26 -0
- brawny/utils.py +205 -0
- brawny/validation.py +69 -0
- brawny-0.1.13.dist-info/METADATA +156 -0
- brawny-0.1.13.dist-info/RECORD +141 -0
- brawny-0.1.13.dist-info/WHEEL +5 -0
- brawny-0.1.13.dist-info/entry_points.txt +2 -0
- brawny-0.1.13.dist-info/top_level.txt +1 -0
brawny/db/sqlite_new.py
ADDED
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
"""SQLite database implementation.
|
|
2
|
+
|
|
3
|
+
Slim execution layer with 4 primitives. All business operations live in db/ops/.
|
|
4
|
+
Uses single connection with WAL mode.
|
|
5
|
+
|
|
6
|
+
SQLite supports :name placeholders natively with dict params.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import sqlite3
|
|
12
|
+
import threading
|
|
13
|
+
from contextlib import contextmanager
|
|
14
|
+
from datetime import datetime, timezone
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from typing import Any, Iterator
|
|
17
|
+
|
|
18
|
+
from brawny.db.base_new import Database, Dialect, IsolationLevel
|
|
19
|
+
from brawny.model.errors import DatabaseError
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _adapt_datetime(dt: datetime) -> str:
|
|
23
|
+
"""Adapt datetime to ISO format string for SQLite."""
|
|
24
|
+
if dt.tzinfo is None:
|
|
25
|
+
dt = dt.replace(tzinfo=timezone.utc)
|
|
26
|
+
return dt.isoformat()
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _convert_datetime(val: bytes) -> datetime:
|
|
30
|
+
"""Convert ISO format string from SQLite to datetime."""
|
|
31
|
+
s = val.decode("utf-8")
|
|
32
|
+
try:
|
|
33
|
+
return datetime.fromisoformat(s)
|
|
34
|
+
except ValueError:
|
|
35
|
+
# Handle format without timezone
|
|
36
|
+
return datetime.strptime(s, "%Y-%m-%d %H:%M:%S").replace(tzinfo=timezone.utc)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
# Register adapters globally
|
|
40
|
+
sqlite3.register_adapter(datetime, _adapt_datetime)
|
|
41
|
+
sqlite3.register_converter("TIMESTAMP", _convert_datetime)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _dict_factory(cursor: sqlite3.Cursor, row: tuple) -> dict[str, Any]:
|
|
45
|
+
"""Row factory that returns dict rows."""
|
|
46
|
+
cols = [d[0] for d in cursor.description]
|
|
47
|
+
return dict(zip(cols, row))
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class SQLiteDatabase(Database):
|
|
51
|
+
"""SQLite implementation with single connection.
|
|
52
|
+
|
|
53
|
+
Uses WAL mode for better concurrency. Thread safety via lock.
|
|
54
|
+
Queries use :name placeholders natively.
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
def __init__(self, database_path: str) -> None:
|
|
58
|
+
"""Initialize SQLite database.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
database_path: Path to SQLite database file (or :memory:)
|
|
62
|
+
"""
|
|
63
|
+
# Remove sqlite:/// prefix if present
|
|
64
|
+
if database_path.startswith("sqlite:///"):
|
|
65
|
+
database_path = database_path[10:]
|
|
66
|
+
|
|
67
|
+
self._database_path = database_path
|
|
68
|
+
self._conn: sqlite3.Connection | None = None
|
|
69
|
+
self._lock = threading.RLock()
|
|
70
|
+
self._in_transaction = False
|
|
71
|
+
|
|
72
|
+
@property
|
|
73
|
+
def dialect(self) -> Dialect:
|
|
74
|
+
"""Return dialect name for query selection."""
|
|
75
|
+
return "sqlite"
|
|
76
|
+
|
|
77
|
+
def connect(self) -> None:
|
|
78
|
+
"""Establish database connection."""
|
|
79
|
+
if self._conn is not None:
|
|
80
|
+
return
|
|
81
|
+
|
|
82
|
+
# Create directory if needed
|
|
83
|
+
if self._database_path != ":memory:":
|
|
84
|
+
path = Path(self._database_path)
|
|
85
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
86
|
+
|
|
87
|
+
self._conn = sqlite3.connect(
|
|
88
|
+
self._database_path,
|
|
89
|
+
detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES,
|
|
90
|
+
check_same_thread=False,
|
|
91
|
+
timeout=30.0,
|
|
92
|
+
isolation_level=None, # Autocommit mode - we manage transactions manually
|
|
93
|
+
)
|
|
94
|
+
self._conn.row_factory = _dict_factory
|
|
95
|
+
# Enable foreign keys and WAL mode
|
|
96
|
+
self._conn.execute("PRAGMA foreign_keys = ON")
|
|
97
|
+
self._conn.execute("PRAGMA journal_mode = WAL")
|
|
98
|
+
|
|
99
|
+
def close(self) -> None:
|
|
100
|
+
"""Close database connection."""
|
|
101
|
+
if self._conn:
|
|
102
|
+
self._conn.close()
|
|
103
|
+
self._conn = None
|
|
104
|
+
|
|
105
|
+
def is_connected(self) -> bool:
|
|
106
|
+
"""Check if database is connected."""
|
|
107
|
+
return self._conn is not None
|
|
108
|
+
|
|
109
|
+
def _ensure_connected(self) -> sqlite3.Connection:
|
|
110
|
+
"""Ensure connection exists and return it."""
|
|
111
|
+
if self._conn is None:
|
|
112
|
+
raise DatabaseError("Database not connected. Call connect() first.")
|
|
113
|
+
return self._conn
|
|
114
|
+
|
|
115
|
+
@contextmanager
|
|
116
|
+
def transaction(
|
|
117
|
+
self, isolation_level: IsolationLevel | None = None
|
|
118
|
+
) -> Iterator[None]:
|
|
119
|
+
"""Context manager for database transactions.
|
|
120
|
+
|
|
121
|
+
Uses BEGIN IMMEDIATE for write transactions to avoid
|
|
122
|
+
SQLITE_BUSY errors on concurrent writes.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
isolation_level: Ignored on SQLite (BEGIN IMMEDIATE provides isolation)
|
|
126
|
+
"""
|
|
127
|
+
conn = self._ensure_connected()
|
|
128
|
+
|
|
129
|
+
with self._lock:
|
|
130
|
+
if self._in_transaction:
|
|
131
|
+
raise DatabaseError("Nested transactions are not supported")
|
|
132
|
+
|
|
133
|
+
try:
|
|
134
|
+
conn.execute("BEGIN IMMEDIATE")
|
|
135
|
+
self._in_transaction = True
|
|
136
|
+
yield
|
|
137
|
+
conn.commit()
|
|
138
|
+
except Exception:
|
|
139
|
+
conn.rollback()
|
|
140
|
+
raise
|
|
141
|
+
finally:
|
|
142
|
+
self._in_transaction = False
|
|
143
|
+
|
|
144
|
+
def execute(self, query: str, params: dict[str, Any] | None = None) -> None:
|
|
145
|
+
"""Execute a query without returning results."""
|
|
146
|
+
conn = self._ensure_connected()
|
|
147
|
+
with self._lock:
|
|
148
|
+
try:
|
|
149
|
+
conn.execute(query, params or {})
|
|
150
|
+
if not self._in_transaction:
|
|
151
|
+
conn.commit()
|
|
152
|
+
except sqlite3.Error as e:
|
|
153
|
+
raise DatabaseError(f"SQLite query failed: {e}") from e
|
|
154
|
+
|
|
155
|
+
def fetch_one(
|
|
156
|
+
self, query: str, params: dict[str, Any] | None = None
|
|
157
|
+
) -> dict[str, Any] | None:
|
|
158
|
+
"""Execute a query and return single result or None."""
|
|
159
|
+
conn = self._ensure_connected()
|
|
160
|
+
with self._lock:
|
|
161
|
+
try:
|
|
162
|
+
cursor = conn.execute(query, params or {})
|
|
163
|
+
return cursor.fetchone()
|
|
164
|
+
except sqlite3.Error as e:
|
|
165
|
+
raise DatabaseError(f"SQLite query failed: {e}") from e
|
|
166
|
+
|
|
167
|
+
def fetch_all(
|
|
168
|
+
self, query: str, params: dict[str, Any] | None = None
|
|
169
|
+
) -> list[dict[str, Any]]:
|
|
170
|
+
"""Execute a query and return all results."""
|
|
171
|
+
conn = self._ensure_connected()
|
|
172
|
+
with self._lock:
|
|
173
|
+
try:
|
|
174
|
+
cursor = conn.execute(query, params or {})
|
|
175
|
+
return cursor.fetchall()
|
|
176
|
+
except sqlite3.Error as e:
|
|
177
|
+
raise DatabaseError(f"SQLite query failed: {e}") from e
|
|
178
|
+
|
|
179
|
+
def execute_rowcount(
|
|
180
|
+
self, query: str, params: dict[str, Any] | None = None
|
|
181
|
+
) -> int:
|
|
182
|
+
"""Execute a query and return affected row count."""
|
|
183
|
+
conn = self._ensure_connected()
|
|
184
|
+
with self._lock:
|
|
185
|
+
try:
|
|
186
|
+
cursor = conn.execute(query, params or {})
|
|
187
|
+
if not self._in_transaction:
|
|
188
|
+
conn.commit()
|
|
189
|
+
return cursor.rowcount
|
|
190
|
+
except sqlite3.Error as e:
|
|
191
|
+
raise DatabaseError(f"SQLite query failed: {e}") from e
|
brawny/history.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
"""Transaction history tracking.
|
|
2
|
+
|
|
3
|
+
Usage:
|
|
4
|
+
from brawny import history
|
|
5
|
+
|
|
6
|
+
history[-1] # Last transaction
|
|
7
|
+
history.filter(sender="0x...") # Filter by attribute
|
|
8
|
+
len(history) # Count
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
from typing import TYPE_CHECKING, Any, Callable, Iterator
|
|
14
|
+
|
|
15
|
+
if TYPE_CHECKING:
|
|
16
|
+
from brawny.jobs.base import TxReceipt
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
_history: "TxHistory | None" = None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class TxHistory:
|
|
23
|
+
"""Container for transaction receipts in current session.
|
|
24
|
+
|
|
25
|
+
Brownie-compatible interface for tracking transactions.
|
|
26
|
+
Note: Scripts are single-threaded, so no locking is needed.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def __init__(self) -> None:
|
|
30
|
+
self._receipts: list["TxReceipt"] = []
|
|
31
|
+
|
|
32
|
+
def _add(self, receipt: "TxReceipt") -> None:
|
|
33
|
+
"""Add receipt to history (internal use)."""
|
|
34
|
+
self._receipts.append(receipt)
|
|
35
|
+
|
|
36
|
+
def __getitem__(self, index: int) -> "TxReceipt":
|
|
37
|
+
return self._receipts[index]
|
|
38
|
+
|
|
39
|
+
def __len__(self) -> int:
|
|
40
|
+
return len(self._receipts)
|
|
41
|
+
|
|
42
|
+
def __iter__(self) -> Iterator["TxReceipt"]:
|
|
43
|
+
return iter(self._receipts)
|
|
44
|
+
|
|
45
|
+
def filter(
|
|
46
|
+
self,
|
|
47
|
+
key: Callable[["TxReceipt"], bool] | None = None,
|
|
48
|
+
**kwargs: Any,
|
|
49
|
+
) -> list["TxReceipt"]:
|
|
50
|
+
"""Filter transactions by attribute or function.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
key: Optional filter function
|
|
54
|
+
**kwargs: Attribute filters (e.g., sender="0x...")
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
List of matching receipts
|
|
58
|
+
"""
|
|
59
|
+
results = list(self._receipts)
|
|
60
|
+
|
|
61
|
+
if key:
|
|
62
|
+
results = [r for r in results if key(r)]
|
|
63
|
+
|
|
64
|
+
for attr, value in kwargs.items():
|
|
65
|
+
results = [r for r in results if getattr(r, attr, None) == value]
|
|
66
|
+
|
|
67
|
+
return results
|
|
68
|
+
|
|
69
|
+
def clear(self) -> None:
|
|
70
|
+
"""Clear transaction history."""
|
|
71
|
+
self._receipts.clear()
|
|
72
|
+
|
|
73
|
+
def copy(self) -> list["TxReceipt"]:
|
|
74
|
+
"""Get copy of receipts as list."""
|
|
75
|
+
return list(self._receipts)
|
|
76
|
+
|
|
77
|
+
def __repr__(self) -> str:
|
|
78
|
+
return f"<TxHistory [{len(self)} txs]>"
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def _init_history() -> None:
|
|
82
|
+
"""Initialize global history singleton."""
|
|
83
|
+
global _history
|
|
84
|
+
_history = TxHistory()
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def _get_history() -> TxHistory:
|
|
88
|
+
"""Get history singleton."""
|
|
89
|
+
if _history is None:
|
|
90
|
+
_init_history()
|
|
91
|
+
return _history
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def _add_to_history(receipt: "TxReceipt") -> None:
|
|
95
|
+
"""Add receipt to global history."""
|
|
96
|
+
_get_history()._add(receipt)
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
# Proxy for import-time access
|
|
100
|
+
class _HistoryProxy:
|
|
101
|
+
"""Proxy that delegates to history singleton."""
|
|
102
|
+
|
|
103
|
+
def __getitem__(self, index: int) -> "TxReceipt":
|
|
104
|
+
return _get_history()[index]
|
|
105
|
+
|
|
106
|
+
def __len__(self) -> int:
|
|
107
|
+
return len(_get_history())
|
|
108
|
+
|
|
109
|
+
def __iter__(self) -> Iterator["TxReceipt"]:
|
|
110
|
+
return iter(_get_history())
|
|
111
|
+
|
|
112
|
+
def filter(self, key: Callable | None = None, **kwargs: Any) -> list["TxReceipt"]:
|
|
113
|
+
return _get_history().filter(key, **kwargs)
|
|
114
|
+
|
|
115
|
+
def clear(self) -> None:
|
|
116
|
+
_get_history().clear()
|
|
117
|
+
|
|
118
|
+
def copy(self) -> list["TxReceipt"]:
|
|
119
|
+
return _get_history().copy()
|
|
120
|
+
|
|
121
|
+
def __repr__(self) -> str:
|
|
122
|
+
return repr(_get_history())
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
# Global proxy instance
|
|
126
|
+
history = _HistoryProxy()
|
brawny/interfaces.py
ADDED
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
"""Project interface support (Brownie-style JSON ABI interfaces)."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any
|
|
9
|
+
|
|
10
|
+
from brawny.logging import get_logger
|
|
11
|
+
|
|
12
|
+
logger = get_logger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _load_json_abi(path: Path) -> list[dict[str, Any]]:
|
|
16
|
+
"""Load ABI list from a JSON file.
|
|
17
|
+
|
|
18
|
+
Accepts either a raw ABI list or an artifact with an "abi" field.
|
|
19
|
+
"""
|
|
20
|
+
data = json.loads(path.read_text(encoding="utf-8"))
|
|
21
|
+
if isinstance(data, list):
|
|
22
|
+
return data
|
|
23
|
+
if isinstance(data, dict) and isinstance(data.get("abi"), list):
|
|
24
|
+
return data["abi"]
|
|
25
|
+
raise ValueError("Interface JSON must be ABI list or object with 'abi' list")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass(frozen=True)
|
|
29
|
+
class InterfaceConstructor:
|
|
30
|
+
"""Constructor used to create Contract handles from a JSON ABI."""
|
|
31
|
+
|
|
32
|
+
name: str
|
|
33
|
+
abi: list[dict[str, Any]]
|
|
34
|
+
selectors: dict[bytes, str] = field(init=False)
|
|
35
|
+
|
|
36
|
+
def __post_init__(self) -> None:
|
|
37
|
+
from eth_utils import function_signature_to_4byte_selector
|
|
38
|
+
from brawny.alerts.abi_resolver import get_function_signature
|
|
39
|
+
|
|
40
|
+
selectors: dict[bytes, str] = {}
|
|
41
|
+
for item in self.abi:
|
|
42
|
+
if item.get("type") != "function":
|
|
43
|
+
continue
|
|
44
|
+
signature = get_function_signature(item["name"], item.get("inputs", []))
|
|
45
|
+
selectors[function_signature_to_4byte_selector(signature)] = item["name"]
|
|
46
|
+
object.__setattr__(self, "selectors", selectors)
|
|
47
|
+
|
|
48
|
+
def __call__(self, address: str):
|
|
49
|
+
from brawny.api import Contract
|
|
50
|
+
|
|
51
|
+
return Contract(address, abi=self.abi)
|
|
52
|
+
|
|
53
|
+
def __repr__(self) -> str:
|
|
54
|
+
return f"<InterfaceConstructor '{self.name}'>"
|
|
55
|
+
|
|
56
|
+
def decode_input(self, calldata: str | bytes) -> tuple[str, list[Any]]:
|
|
57
|
+
"""Decode calldata for this interface.
|
|
58
|
+
|
|
59
|
+
Returns:
|
|
60
|
+
(function_signature, decoded_args)
|
|
61
|
+
"""
|
|
62
|
+
from eth_abi import decode as abi_decode
|
|
63
|
+
from eth_utils import function_signature_to_4byte_selector
|
|
64
|
+
from hexbytes import HexBytes
|
|
65
|
+
from brawny.alerts.abi_resolver import get_function_signature
|
|
66
|
+
|
|
67
|
+
data = HexBytes(calldata)
|
|
68
|
+
fn_selector = data[:4]
|
|
69
|
+
|
|
70
|
+
abi = next(
|
|
71
|
+
(
|
|
72
|
+
item
|
|
73
|
+
for item in self.abi
|
|
74
|
+
if item.get("type") == "function"
|
|
75
|
+
and function_signature_to_4byte_selector(
|
|
76
|
+
get_function_signature(item["name"], item.get("inputs", []))
|
|
77
|
+
)
|
|
78
|
+
== fn_selector
|
|
79
|
+
),
|
|
80
|
+
None,
|
|
81
|
+
)
|
|
82
|
+
if abi is None:
|
|
83
|
+
raise ValueError("Four byte selector does not match the ABI for this contract")
|
|
84
|
+
|
|
85
|
+
function_sig = get_function_signature(abi["name"], abi.get("inputs", []))
|
|
86
|
+
types_list = [inp["type"] for inp in abi.get("inputs", [])]
|
|
87
|
+
decoded = list(abi_decode(types_list, data[4:])) if types_list else []
|
|
88
|
+
return function_sig, decoded
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class InterfaceContainer:
|
|
92
|
+
"""Container providing access to interfaces within ./interfaces."""
|
|
93
|
+
|
|
94
|
+
def __init__(self, interfaces_dir: Path | None = None) -> None:
|
|
95
|
+
self._interfaces_dir = interfaces_dir or (Path.cwd() / "interfaces")
|
|
96
|
+
self._loaded = False
|
|
97
|
+
|
|
98
|
+
def _load(self) -> None:
|
|
99
|
+
if self._loaded:
|
|
100
|
+
return
|
|
101
|
+
self._loaded = True
|
|
102
|
+
if not self._interfaces_dir.is_dir():
|
|
103
|
+
return
|
|
104
|
+
|
|
105
|
+
for path in sorted(self._interfaces_dir.rglob("*.json")):
|
|
106
|
+
name = path.stem
|
|
107
|
+
try:
|
|
108
|
+
abi = _load_json_abi(path)
|
|
109
|
+
except Exception as exc:
|
|
110
|
+
logger.warning(
|
|
111
|
+
"interface.load_failed",
|
|
112
|
+
name=name,
|
|
113
|
+
path=str(path),
|
|
114
|
+
error=str(exc),
|
|
115
|
+
)
|
|
116
|
+
continue
|
|
117
|
+
self._add(name, abi)
|
|
118
|
+
|
|
119
|
+
def _add(self, name: str, abi: list[dict[str, Any]]) -> None:
|
|
120
|
+
constructor = InterfaceConstructor(name, abi)
|
|
121
|
+
setattr(self, name, constructor)
|
|
122
|
+
|
|
123
|
+
def __getattr__(self, name: str):
|
|
124
|
+
self._load()
|
|
125
|
+
try:
|
|
126
|
+
return self.__dict__[name]
|
|
127
|
+
except KeyError as exc:
|
|
128
|
+
raise AttributeError(name) from exc
|
|
129
|
+
|
|
130
|
+
def __dir__(self) -> list[str]:
|
|
131
|
+
self._load()
|
|
132
|
+
return sorted(set(self.__dict__.keys()))
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
# Singleton instance (Brownie-style)
|
|
136
|
+
interface = InterfaceContainer()
|
brawny/invariants.py
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
"""System invariants exposed as metrics.
|
|
2
|
+
|
|
3
|
+
These queries should return 0 in a healthy system. Non-zero values
|
|
4
|
+
indicate potential issues that need investigation.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from dataclasses import asdict, dataclass
|
|
10
|
+
from typing import TYPE_CHECKING, Any
|
|
11
|
+
|
|
12
|
+
from brawny.alerts.health import health_alert
|
|
13
|
+
from brawny.logging import get_logger
|
|
14
|
+
from brawny.metrics import (
|
|
15
|
+
INVARIANT_NONCE_GAP_AGE,
|
|
16
|
+
INVARIANT_ORPHANED_CLAIMS,
|
|
17
|
+
INVARIANT_ORPHANED_NONCES,
|
|
18
|
+
INVARIANT_PENDING_NO_ATTEMPTS,
|
|
19
|
+
INVARIANT_STUCK_CLAIMED,
|
|
20
|
+
get_metrics,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
if TYPE_CHECKING:
|
|
24
|
+
from brawny.db.base import Database
|
|
25
|
+
|
|
26
|
+
logger = get_logger(__name__)
|
|
27
|
+
|
|
28
|
+
# Threshold for "stuck" claimed intents (minutes)
|
|
29
|
+
STUCK_CLAIM_THRESHOLD_MINUTES = 10
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def _get_stuck_claim_details(
|
|
33
|
+
db: "Database",
|
|
34
|
+
chain_id: int,
|
|
35
|
+
older_than_minutes: int = STUCK_CLAIM_THRESHOLD_MINUTES,
|
|
36
|
+
limit: int = 20,
|
|
37
|
+
) -> list[dict[str, Any]]:
|
|
38
|
+
"""Fetch sample of stuck claimed intents for debugging context."""
|
|
39
|
+
if db.dialect == "sqlite":
|
|
40
|
+
query = """
|
|
41
|
+
SELECT intent_id, job_id,
|
|
42
|
+
(julianday('now') - julianday(claimed_at)) * 86400 as age_seconds
|
|
43
|
+
FROM tx_intents
|
|
44
|
+
WHERE chain_id = :chain_id
|
|
45
|
+
AND status = 'claimed'
|
|
46
|
+
AND datetime(claimed_at) < datetime('now', :offset || ' minutes')
|
|
47
|
+
ORDER BY claimed_at ASC
|
|
48
|
+
LIMIT :limit
|
|
49
|
+
"""
|
|
50
|
+
params = {"chain_id": chain_id, "offset": -older_than_minutes, "limit": limit}
|
|
51
|
+
else:
|
|
52
|
+
query = """
|
|
53
|
+
SELECT intent_id, job_id,
|
|
54
|
+
EXTRACT(EPOCH FROM (NOW() - claimed_at)) as age_seconds
|
|
55
|
+
FROM tx_intents
|
|
56
|
+
WHERE chain_id = :chain_id
|
|
57
|
+
AND status = 'claimed'
|
|
58
|
+
AND claimed_at < NOW() - make_interval(mins => :older_than_minutes)
|
|
59
|
+
ORDER BY claimed_at ASC
|
|
60
|
+
LIMIT :limit
|
|
61
|
+
"""
|
|
62
|
+
params = {"chain_id": chain_id, "older_than_minutes": older_than_minutes, "limit": limit}
|
|
63
|
+
|
|
64
|
+
return db.execute_returning(query, params)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@dataclass
|
|
68
|
+
class InvariantMetrics:
|
|
69
|
+
"""Current values of all invariant checks."""
|
|
70
|
+
|
|
71
|
+
stuck_claimed_intents: int
|
|
72
|
+
nonce_gap_oldest_age_seconds: float
|
|
73
|
+
pending_without_attempts: int
|
|
74
|
+
orphaned_claims: int
|
|
75
|
+
orphaned_nonces: int
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def collect_invariants(
|
|
79
|
+
db: Database,
|
|
80
|
+
chain_id: int,
|
|
81
|
+
health_send_fn: Any = None,
|
|
82
|
+
health_chat_id: str | None = None,
|
|
83
|
+
health_cooldown: int = 1800,
|
|
84
|
+
) -> InvariantMetrics:
|
|
85
|
+
"""Collect all invariant metrics for a chain.
|
|
86
|
+
|
|
87
|
+
Call periodically (e.g., every 30 seconds) to update Prometheus gauges.
|
|
88
|
+
Uses DB methods added in Phase 1 (count_pending_without_attempts) and
|
|
89
|
+
Phase 2 (count_stuck_claimed, etc.).
|
|
90
|
+
"""
|
|
91
|
+
m = InvariantMetrics(
|
|
92
|
+
stuck_claimed_intents=db.count_stuck_claimed(chain_id),
|
|
93
|
+
nonce_gap_oldest_age_seconds=db.get_oldest_nonce_gap_age_seconds(chain_id),
|
|
94
|
+
pending_without_attempts=db.count_pending_without_attempts(chain_id),
|
|
95
|
+
orphaned_claims=db.count_orphaned_claims(chain_id),
|
|
96
|
+
orphaned_nonces=db.count_orphaned_nonces(chain_id),
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
# Export to Prometheus using metric constants
|
|
100
|
+
metrics = get_metrics()
|
|
101
|
+
metrics.gauge(INVARIANT_STUCK_CLAIMED).set(
|
|
102
|
+
m.stuck_claimed_intents, chain_id=chain_id
|
|
103
|
+
)
|
|
104
|
+
metrics.gauge(INVARIANT_NONCE_GAP_AGE).set(
|
|
105
|
+
m.nonce_gap_oldest_age_seconds, chain_id=chain_id
|
|
106
|
+
)
|
|
107
|
+
metrics.gauge(INVARIANT_PENDING_NO_ATTEMPTS).set(
|
|
108
|
+
m.pending_without_attempts, chain_id=chain_id
|
|
109
|
+
)
|
|
110
|
+
metrics.gauge(INVARIANT_ORPHANED_CLAIMS).set(
|
|
111
|
+
m.orphaned_claims, chain_id=chain_id
|
|
112
|
+
)
|
|
113
|
+
metrics.gauge(INVARIANT_ORPHANED_NONCES).set(
|
|
114
|
+
m.orphaned_nonces, chain_id=chain_id
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
# Log if any non-zero
|
|
118
|
+
if any([
|
|
119
|
+
m.stuck_claimed_intents,
|
|
120
|
+
m.pending_without_attempts,
|
|
121
|
+
m.orphaned_claims,
|
|
122
|
+
m.orphaned_nonces,
|
|
123
|
+
]):
|
|
124
|
+
extra: dict[str, Any] = {}
|
|
125
|
+
if m.stuck_claimed_intents:
|
|
126
|
+
details = _get_stuck_claim_details(db, chain_id)
|
|
127
|
+
extra = {
|
|
128
|
+
"stuck_intents_sample": [d["intent_id"] for d in details],
|
|
129
|
+
"stuck_jobs_sample": list(set(d["job_id"] for d in details)),
|
|
130
|
+
"oldest_claim_age_seconds": details[0]["age_seconds"] if details else 0,
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
# Send health alert for stuck claimed intents
|
|
134
|
+
health_alert(
|
|
135
|
+
component="brawny.invariants",
|
|
136
|
+
chain_id=chain_id,
|
|
137
|
+
error=f"stuck_claimed_intents={m.stuck_claimed_intents}, oldest_age={extra.get('oldest_claim_age_seconds', 0):.0f}s",
|
|
138
|
+
fingerprint_key="invariant.stuck_claimed",
|
|
139
|
+
job_id=extra.get("stuck_jobs_sample", [None])[0],
|
|
140
|
+
intent_id=extra.get("stuck_intents_sample", [None])[0],
|
|
141
|
+
action="Run: brawny intents clear-stuck",
|
|
142
|
+
db_dialect=db.dialect,
|
|
143
|
+
send_fn=health_send_fn,
|
|
144
|
+
health_chat_id=health_chat_id,
|
|
145
|
+
cooldown_seconds=health_cooldown,
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
logger.warning(
|
|
149
|
+
"invariants.violations_detected",
|
|
150
|
+
**asdict(m),
|
|
151
|
+
chain_id=chain_id,
|
|
152
|
+
**extra,
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
return m
|
brawny/jobs/__init__.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"""Job framework with base class, registry, and discovery."""
|
|
2
|
+
|
|
3
|
+
from brawny.jobs.base import Job, TxInfo, TxReceipt, BlockInfo
|
|
4
|
+
from brawny.jobs.registry import job, registry, get_registry, JobRegistry
|
|
5
|
+
from brawny.jobs.discovery import discover_jobs, discover_jobs_from_path, auto_discover_jobs
|
|
6
|
+
from brawny.jobs.job_validation import validate_job, validate_all_jobs
|
|
7
|
+
|
|
8
|
+
__all__ = [
|
|
9
|
+
# Base classes
|
|
10
|
+
"Job",
|
|
11
|
+
"TxInfo",
|
|
12
|
+
"TxReceipt",
|
|
13
|
+
"BlockInfo",
|
|
14
|
+
# Registry
|
|
15
|
+
"job",
|
|
16
|
+
"registry",
|
|
17
|
+
"get_registry",
|
|
18
|
+
"JobRegistry",
|
|
19
|
+
# Discovery
|
|
20
|
+
"discover_jobs",
|
|
21
|
+
"discover_jobs_from_path",
|
|
22
|
+
"auto_discover_jobs",
|
|
23
|
+
# Validation
|
|
24
|
+
"validate_job",
|
|
25
|
+
"validate_all_jobs",
|
|
26
|
+
]
|