brawny 0.1.13__py3-none-any.whl → 0.1.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. brawny/__init__.py +2 -0
  2. brawny/_context.py +5 -5
  3. brawny/_rpc/__init__.py +36 -12
  4. brawny/_rpc/broadcast.py +14 -13
  5. brawny/_rpc/caller.py +243 -0
  6. brawny/_rpc/client.py +539 -0
  7. brawny/_rpc/clients.py +11 -11
  8. brawny/_rpc/context.py +23 -0
  9. brawny/_rpc/errors.py +465 -31
  10. brawny/_rpc/gas.py +7 -6
  11. brawny/_rpc/pool.py +18 -0
  12. brawny/_rpc/retry.py +266 -0
  13. brawny/_rpc/retry_policy.py +81 -0
  14. brawny/accounts.py +28 -9
  15. brawny/alerts/__init__.py +15 -18
  16. brawny/alerts/abi_resolver.py +212 -36
  17. brawny/alerts/base.py +2 -2
  18. brawny/alerts/contracts.py +77 -10
  19. brawny/alerts/errors.py +30 -3
  20. brawny/alerts/events.py +38 -5
  21. brawny/alerts/health.py +19 -13
  22. brawny/alerts/send.py +513 -55
  23. brawny/api.py +39 -11
  24. brawny/assets/AGENTS.md +325 -0
  25. brawny/async_runtime.py +48 -0
  26. brawny/chain.py +3 -3
  27. brawny/cli/commands/__init__.py +2 -0
  28. brawny/cli/commands/console.py +69 -19
  29. brawny/cli/commands/contract.py +2 -2
  30. brawny/cli/commands/controls.py +121 -0
  31. brawny/cli/commands/health.py +2 -2
  32. brawny/cli/commands/job_dev.py +6 -5
  33. brawny/cli/commands/jobs.py +99 -2
  34. brawny/cli/commands/maintenance.py +13 -29
  35. brawny/cli/commands/migrate.py +1 -0
  36. brawny/cli/commands/run.py +10 -3
  37. brawny/cli/commands/script.py +8 -3
  38. brawny/cli/commands/signer.py +143 -26
  39. brawny/cli/helpers.py +0 -3
  40. brawny/cli_templates.py +25 -349
  41. brawny/config/__init__.py +4 -1
  42. brawny/config/models.py +43 -57
  43. brawny/config/parser.py +268 -57
  44. brawny/config/validation.py +52 -15
  45. brawny/daemon/context.py +4 -2
  46. brawny/daemon/core.py +185 -63
  47. brawny/daemon/loops.py +166 -98
  48. brawny/daemon/supervisor.py +261 -0
  49. brawny/db/__init__.py +14 -26
  50. brawny/db/base.py +248 -151
  51. brawny/db/global_cache.py +11 -1
  52. brawny/db/migrate.py +175 -28
  53. brawny/db/migrations/001_init.sql +4 -3
  54. brawny/db/migrations/010_add_nonce_gap_index.sql +1 -1
  55. brawny/db/migrations/011_add_job_logs.sql +1 -2
  56. brawny/db/migrations/012_add_claimed_by.sql +2 -2
  57. brawny/db/migrations/013_attempt_unique.sql +10 -0
  58. brawny/db/migrations/014_add_lease_expires_at.sql +5 -0
  59. brawny/db/migrations/015_add_signer_alias.sql +14 -0
  60. brawny/db/migrations/016_runtime_controls_and_quarantine.sql +32 -0
  61. brawny/db/migrations/017_add_job_drain.sql +6 -0
  62. brawny/db/migrations/018_add_nonce_reset_audit.sql +20 -0
  63. brawny/db/migrations/019_add_job_cooldowns.sql +8 -0
  64. brawny/db/migrations/020_attempt_unique_initial.sql +7 -0
  65. brawny/db/ops/__init__.py +3 -25
  66. brawny/db/ops/logs.py +1 -2
  67. brawny/db/queries.py +47 -91
  68. brawny/db/serialized.py +65 -0
  69. brawny/db/sqlite/__init__.py +1001 -0
  70. brawny/db/sqlite/connection.py +231 -0
  71. brawny/db/sqlite/execute.py +116 -0
  72. brawny/db/sqlite/mappers.py +190 -0
  73. brawny/db/sqlite/repos/attempts.py +372 -0
  74. brawny/db/sqlite/repos/block_state.py +102 -0
  75. brawny/db/sqlite/repos/cache.py +104 -0
  76. brawny/db/sqlite/repos/intents.py +1021 -0
  77. brawny/db/sqlite/repos/jobs.py +200 -0
  78. brawny/db/sqlite/repos/maintenance.py +182 -0
  79. brawny/db/sqlite/repos/signers_nonces.py +566 -0
  80. brawny/db/sqlite/tx.py +119 -0
  81. brawny/http.py +194 -0
  82. brawny/invariants.py +11 -24
  83. brawny/jobs/base.py +8 -0
  84. brawny/jobs/job_validation.py +2 -1
  85. brawny/keystore.py +83 -7
  86. brawny/lifecycle.py +64 -12
  87. brawny/logging.py +0 -2
  88. brawny/metrics.py +84 -12
  89. brawny/model/contexts.py +111 -9
  90. brawny/model/enums.py +1 -0
  91. brawny/model/errors.py +18 -0
  92. brawny/model/types.py +47 -131
  93. brawny/network_guard.py +133 -0
  94. brawny/networks/__init__.py +5 -5
  95. brawny/networks/config.py +1 -7
  96. brawny/networks/manager.py +14 -11
  97. brawny/runtime_controls.py +74 -0
  98. brawny/scheduler/poller.py +11 -7
  99. brawny/scheduler/reorg.py +95 -39
  100. brawny/scheduler/runner.py +442 -168
  101. brawny/scheduler/shutdown.py +3 -3
  102. brawny/script_tx.py +3 -3
  103. brawny/telegram.py +53 -7
  104. brawny/testing.py +1 -0
  105. brawny/timeout.py +38 -0
  106. brawny/tx/executor.py +922 -308
  107. brawny/tx/intent.py +54 -16
  108. brawny/tx/monitor.py +31 -12
  109. brawny/tx/nonce.py +212 -90
  110. brawny/tx/replacement.py +69 -18
  111. brawny/tx/retry_policy.py +24 -0
  112. brawny/tx/stages/types.py +75 -0
  113. brawny/types.py +18 -0
  114. brawny/utils.py +41 -0
  115. {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/METADATA +3 -3
  116. brawny-0.1.22.dist-info/RECORD +163 -0
  117. brawny/_rpc/manager.py +0 -982
  118. brawny/_rpc/selector.py +0 -156
  119. brawny/db/base_new.py +0 -165
  120. brawny/db/mappers.py +0 -182
  121. brawny/db/migrations/008_add_transactions.sql +0 -72
  122. brawny/db/ops/attempts.py +0 -108
  123. brawny/db/ops/blocks.py +0 -83
  124. brawny/db/ops/cache.py +0 -93
  125. brawny/db/ops/intents.py +0 -296
  126. brawny/db/ops/jobs.py +0 -110
  127. brawny/db/ops/nonces.py +0 -322
  128. brawny/db/postgres.py +0 -2535
  129. brawny/db/postgres_new.py +0 -196
  130. brawny/db/sqlite.py +0 -2733
  131. brawny/db/sqlite_new.py +0 -191
  132. brawny-0.1.13.dist-info/RECORD +0 -141
  133. {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/WHEEL +0 -0
  134. {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/entry_points.txt +0 -0
  135. {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/top_level.txt +0 -0
brawny/http.py ADDED
@@ -0,0 +1,194 @@
1
+ """Approved HTTP client for job code with allowlist enforcement."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import time
6
+ from dataclasses import dataclass, field
7
+ from typing import Any
8
+ from urllib.parse import urlparse
9
+
10
+ import httpx
11
+
12
+ from brawny.logging import get_logger
13
+ from brawny.network_guard import allow_network_calls
14
+
15
+ logger = get_logger(__name__)
16
+
17
+ _RETRY_STATUS = {429, 500, 502, 503, 504}
18
+
19
+
20
+ @dataclass(frozen=True)
21
+ class HttpConfig:
22
+ """HTTP policy for job-originated requests."""
23
+
24
+ allowed_domains: list[str] = field(default_factory=list)
25
+ connect_timeout_seconds: float = 5.0
26
+ read_timeout_seconds: float = 10.0
27
+ max_retries: int = 2
28
+ backoff_base_seconds: float = 0.5
29
+
30
+
31
+ class ApprovedHttpClient:
32
+ """HTTP client with retries, timeouts, and domain allowlist.
33
+
34
+ Retries apply to GET/HEAD by default. Non-idempotent methods require
35
+ explicit opt-in or an idempotency key.
36
+ """
37
+
38
+ def __init__(self, config: HttpConfig) -> None:
39
+ self._config = config
40
+
41
+ @property
42
+ def config(self) -> HttpConfig:
43
+ return self._config
44
+
45
+ def _is_allowed_host(self, host: str) -> bool:
46
+ if not host:
47
+ return False
48
+ allowed = self._config.allowed_domains
49
+ if not allowed:
50
+ return False
51
+ if "*" in allowed:
52
+ return True
53
+ host = host.lower().rstrip(".")
54
+ try:
55
+ import ipaddress
56
+
57
+ ip = ipaddress.ip_address(host)
58
+ return any(entry.strip().lower() == str(ip) for entry in allowed)
59
+ except ValueError:
60
+ pass
61
+ for entry in allowed:
62
+ entry = entry.lower().strip()
63
+ if not entry:
64
+ continue
65
+ if entry.startswith("*."):
66
+ suffix = entry[1:]
67
+ if host.endswith(suffix):
68
+ return True
69
+ elif entry.startswith("."):
70
+ if host.endswith(entry):
71
+ return True
72
+ elif host == entry:
73
+ return True
74
+ return False
75
+
76
+ def _validate_url(self, url: str) -> None:
77
+ parsed = urlparse(url)
78
+ if parsed.scheme not in ("http", "https"):
79
+ raise ValueError(f"Only http/https URLs are allowed: {url}")
80
+ host = parsed.hostname or ""
81
+ if not self._is_allowed_host(host):
82
+ raise ValueError(f"HTTP domain not allowed: {host or url}")
83
+
84
+ def request(
85
+ self,
86
+ method: str,
87
+ url: str,
88
+ *,
89
+ timeout: float | None = None,
90
+ retry_non_idempotent: bool = False,
91
+ idempotency_key: str | None = None,
92
+ **kwargs: Any,
93
+ ) -> httpx.Response:
94
+ self._validate_url(url)
95
+ timeout = timeout or self._timeout()
96
+ last_error: Exception | None = None
97
+ attempts = max(0, self._config.max_retries) + 1
98
+ method_upper = method.upper()
99
+ allow_retry = method_upper in ("GET", "HEAD") or retry_non_idempotent or idempotency_key is not None
100
+ headers = dict(kwargs.pop("headers", {}) or {})
101
+ if idempotency_key:
102
+ headers["Idempotency-Key"] = idempotency_key
103
+ kwargs["headers"] = headers
104
+
105
+ for attempt in range(attempts):
106
+ try:
107
+ with allow_network_calls(reason="approved_http_client"):
108
+ with httpx.Client(timeout=timeout) as client:
109
+ resp = client.request(method, url, **kwargs)
110
+ if resp.status_code in _RETRY_STATUS and attempt < attempts - 1 and allow_retry:
111
+ self._sleep_backoff(attempt)
112
+ continue
113
+ resp.raise_for_status()
114
+ return resp
115
+ except (httpx.RequestError, httpx.HTTPStatusError) as exc:
116
+ last_error = exc
117
+ if attempt < attempts - 1 and allow_retry:
118
+ self._sleep_backoff(attempt)
119
+ continue
120
+ raise
121
+ if last_error:
122
+ raise last_error
123
+ raise RuntimeError("HTTP request failed without error")
124
+
125
+ async def async_request(
126
+ self,
127
+ method: str,
128
+ url: str,
129
+ *,
130
+ timeout: float | None = None,
131
+ retry_non_idempotent: bool = False,
132
+ idempotency_key: str | None = None,
133
+ **kwargs: Any,
134
+ ) -> httpx.Response:
135
+ self._validate_url(url)
136
+ timeout = timeout or self._timeout()
137
+ last_error: Exception | None = None
138
+ attempts = max(0, self._config.max_retries) + 1
139
+ method_upper = method.upper()
140
+ allow_retry = method_upper in ("GET", "HEAD") or retry_non_idempotent or idempotency_key is not None
141
+ headers = dict(kwargs.pop("headers", {}) or {})
142
+ if idempotency_key:
143
+ headers["Idempotency-Key"] = idempotency_key
144
+ kwargs["headers"] = headers
145
+
146
+ for attempt in range(attempts):
147
+ try:
148
+ with allow_network_calls(reason="approved_http_client"):
149
+ async with httpx.AsyncClient(timeout=timeout) as client:
150
+ resp = await client.request(method, url, **kwargs)
151
+ if resp.status_code in _RETRY_STATUS and attempt < attempts - 1 and allow_retry:
152
+ await self._async_sleep_backoff(attempt)
153
+ continue
154
+ resp.raise_for_status()
155
+ return resp
156
+ except (httpx.RequestError, httpx.HTTPStatusError) as exc:
157
+ last_error = exc
158
+ if attempt < attempts - 1 and allow_retry:
159
+ await self._async_sleep_backoff(attempt)
160
+ continue
161
+ raise
162
+ if last_error:
163
+ raise last_error
164
+ raise RuntimeError("HTTP request failed without error")
165
+
166
+ def get(self, url: str, **kwargs: Any) -> httpx.Response:
167
+ return self.request("GET", url, **kwargs)
168
+
169
+ def post(self, url: str, **kwargs: Any) -> httpx.Response:
170
+ return self.request("POST", url, **kwargs)
171
+
172
+ async def async_get(self, url: str, **kwargs: Any) -> httpx.Response:
173
+ return await self.async_request("GET", url, **kwargs)
174
+
175
+ async def async_post(self, url: str, **kwargs: Any) -> httpx.Response:
176
+ return await self.async_request("POST", url, **kwargs)
177
+
178
+ def _sleep_backoff(self, attempt: int) -> None:
179
+ delay = self._config.backoff_base_seconds * (2 ** attempt)
180
+ time.sleep(delay)
181
+
182
+ async def _async_sleep_backoff(self, attempt: int) -> None:
183
+ import asyncio
184
+
185
+ delay = self._config.backoff_base_seconds * (2 ** attempt)
186
+ await asyncio.sleep(delay)
187
+
188
+ def _timeout(self) -> httpx.Timeout:
189
+ return httpx.Timeout(
190
+ connect=self._config.connect_timeout_seconds,
191
+ read=self._config.read_timeout_seconds,
192
+ write=self._config.read_timeout_seconds,
193
+ pool=self._config.connect_timeout_seconds,
194
+ )
brawny/invariants.py CHANGED
@@ -36,30 +36,17 @@ def _get_stuck_claim_details(
36
36
  limit: int = 20,
37
37
  ) -> list[dict[str, Any]]:
38
38
  """Fetch sample of stuck claimed intents for debugging context."""
39
- if db.dialect == "sqlite":
40
- query = """
41
- SELECT intent_id, job_id,
42
- (julianday('now') - julianday(claimed_at)) * 86400 as age_seconds
43
- FROM tx_intents
44
- WHERE chain_id = :chain_id
45
- AND status = 'claimed'
46
- AND datetime(claimed_at) < datetime('now', :offset || ' minutes')
47
- ORDER BY claimed_at ASC
48
- LIMIT :limit
49
- """
50
- params = {"chain_id": chain_id, "offset": -older_than_minutes, "limit": limit}
51
- else:
52
- query = """
53
- SELECT intent_id, job_id,
54
- EXTRACT(EPOCH FROM (NOW() - claimed_at)) as age_seconds
55
- FROM tx_intents
56
- WHERE chain_id = :chain_id
57
- AND status = 'claimed'
58
- AND claimed_at < NOW() - make_interval(mins => :older_than_minutes)
59
- ORDER BY claimed_at ASC
60
- LIMIT :limit
61
- """
62
- params = {"chain_id": chain_id, "older_than_minutes": older_than_minutes, "limit": limit}
39
+ query = """
40
+ SELECT intent_id, job_id,
41
+ (julianday('now') - julianday(claimed_at)) * 86400 as age_seconds
42
+ FROM tx_intents
43
+ WHERE chain_id = :chain_id
44
+ AND status = 'claimed'
45
+ AND datetime(claimed_at) < datetime('now', :offset || ' minutes')
46
+ ORDER BY claimed_at ASC
47
+ LIMIT :limit
48
+ """
49
+ params = {"chain_id": chain_id, "offset": -older_than_minutes, "limit": limit}
63
50
 
64
51
  return db.execute_returning(query, params)
65
52
 
brawny/jobs/base.py CHANGED
@@ -104,6 +104,7 @@ class Job(ABC):
104
104
  check_timeout_seconds: int = 30
105
105
  build_timeout_seconds: int = 10
106
106
  max_in_flight_intents: int | None = None
107
+ cooldown_seconds: int | None = None
107
108
 
108
109
  # Simulation config
109
110
  disable_simulation: bool = False
@@ -165,6 +166,13 @@ class Job(ABC):
165
166
  elif cls.telegram_chat_ids is None:
166
167
  cls.telegram_chat_ids = []
167
168
 
169
+ def cooldown_key(self, trigger: Trigger) -> str | None:
170
+ """Optional cooldown key for this trigger.
171
+
172
+ Return None for job-wide cooldown.
173
+ """
174
+ return None
175
+
168
176
  def check(self, *args: Any, **kwargs: Any) -> Trigger | None:
169
177
  """Check if job should trigger.
170
178
 
@@ -79,7 +79,8 @@ def validate_job(job: "Job", keystore: "Keystore | None" = None) -> list[str]:
79
79
  signer = getattr(job, "_signer_name", None)
80
80
  if signer and keystore:
81
81
  if not keystore.has_key(signer):
82
- available = keystore.list_keys()
82
+ # Show aliases if available (what user can type), else addresses
83
+ available = keystore.list_aliases() or keystore.list_keys()
83
84
  if available:
84
85
  errors.append(f"signer '{signer}' not found in keystore (available: {', '.join(available)})")
85
86
  else:
brawny/keystore.py CHANGED
@@ -31,6 +31,16 @@ load_dotenv()
31
31
 
32
32
  logger = get_logger(__name__)
33
33
 
34
+
35
+ def _normalize_addr(addr: str) -> str:
36
+ """Normalize address to checksummed format.
37
+
38
+ Handles addresses with or without 0x prefix.
39
+ """
40
+ if not addr.startswith("0x"):
41
+ addr = "0x" + addr
42
+ return Web3.to_checksum_address(addr)
43
+
34
44
  if TYPE_CHECKING:
35
45
  from eth_account.datastructures import SignedTransaction
36
46
 
@@ -80,13 +90,35 @@ class Keystore(ABC):
80
90
 
81
91
  @abstractmethod
82
92
  def list_keys(self) -> list[str]:
83
- """List available key identifiers.
93
+ """Return list of signer addresses (checksummed, sorted, de-duplicated).
84
94
 
85
95
  Returns:
86
- List of key identifiers (addresses)
96
+ Sorted list of unique checksummed Ethereum addresses.
87
97
  """
88
98
  ...
89
99
 
100
+ def list_aliases(self) -> list[str]:
101
+ """Return list of signer aliases/names (sorted).
102
+
103
+ Default implementation returns empty list.
104
+ Override in subclasses that support aliases.
105
+
106
+ Returns:
107
+ Sorted list of human-readable aliases, or empty if no aliases configured.
108
+ """
109
+ return []
110
+
111
+ def list_keys_with_aliases(self) -> list[tuple[str, str | None]]:
112
+ """Return list of (address, alias) tuples, sorted by address.
113
+
114
+ Default implementation returns addresses with None alias.
115
+ Override in subclasses that support aliases.
116
+
117
+ Returns:
118
+ List of tuples where each tuple is (checksummed_address, alias_or_none).
119
+ """
120
+ return [(addr, None) for addr in self.list_keys()]
121
+
90
122
  def has_key(self, key_id: str) -> bool:
91
123
  """Check if a key exists.
92
124
 
@@ -196,8 +228,8 @@ class EnvKeystore(Keystore):
196
228
  raise KeystoreError(f"Signing failed: {type(e).__name__}: {e}")
197
229
 
198
230
  def list_keys(self) -> list[str]:
199
- """List all available signer addresses."""
200
- return list(self._accounts.keys())
231
+ """Return list of signer addresses (checksummed, sorted)."""
232
+ return sorted(self._accounts.keys())
201
233
 
202
234
 
203
235
  class FileKeystore(Keystore):
@@ -430,13 +462,57 @@ class FileKeystore(Keystore):
430
462
  raise KeystoreError(f"Signing failed: {type(e).__name__}: {e}")
431
463
 
432
464
  def list_keys(self) -> list[str]:
433
- """List all available signer addresses."""
465
+ """Return list of signer addresses (checksummed, sorted, de-duplicated).
466
+
467
+ **Source of truth behavior:**
468
+ - When _name_to_address mapping exists, it is the SOLE source of truth.
469
+ The keystore directory is NOT scanned. This means addresses only in
470
+ keystore files (without aliases) will NOT appear.
471
+ - When _name_to_address is empty/None, addresses are extracted from
472
+ the loaded accounts.
473
+
474
+ Returns:
475
+ Sorted list of unique checksummed Ethereum addresses.
476
+ """
477
+ if self._name_to_address:
478
+ # Mapping is source of truth - normalize, de-dupe, and sort
479
+ # (multiple aliases may map to same address)
480
+ return sorted({_normalize_addr(a) for a in self._name_to_address.values()})
481
+
482
+ # Return addresses from loaded accounts
483
+ return sorted(self._accounts.keys())
484
+
485
+ def list_aliases(self) -> list[str]:
486
+ """Return list of signer aliases/names (sorted).
487
+
488
+ Returns:
489
+ Sorted list of human-readable aliases, or empty if no aliases configured.
490
+ """
434
491
  if self._name_to_address:
435
492
  return sorted(self._name_to_address.keys())
436
- return list(self._accounts.keys())
493
+ return []
494
+
495
+ def list_keys_with_aliases(self) -> list[tuple[str, str | None]]:
496
+ """Return list of (address, alias) tuples, sorted by address.
497
+
498
+ Returns:
499
+ List of tuples where each tuple is (checksummed_address, alias_or_none).
500
+ """
501
+ if self._name_to_address:
502
+ pairs = [
503
+ (_normalize_addr(addr), name)
504
+ for name, addr in self._name_to_address.items()
505
+ ]
506
+ return sorted(pairs, key=lambda x: x[0]) # Sort by address
507
+
508
+ # No aliases: return addresses with None alias
509
+ return [(addr, None) for addr in self.list_keys()]
437
510
 
438
511
  def list_named_keys(self) -> dict[str, str]:
439
- """Return mapping of wallet name to address."""
512
+ """Return mapping of wallet name to address.
513
+
514
+ DEPRECATED: Use list_aliases() or list_keys_with_aliases() instead.
515
+ """
440
516
  return dict(self._name_to_address)
441
517
 
442
518
  def get_warnings(self) -> list[StartupMessage]:
brawny/lifecycle.py CHANGED
@@ -10,9 +10,11 @@ from contextlib import contextmanager
10
10
  from typing import TYPE_CHECKING, Any
11
11
  from uuid import UUID
12
12
 
13
- from brawny.alerts.send import AlertConfig, AlertEvent, AlertPayload
13
+ from brawny.alerts.send import AlertConfig, AlertEvent, AlertPayload, JobAlertSender
14
+ from brawny.http import ApprovedHttpClient
14
15
  from brawny.jobs.kv import DatabaseJobKVStore, DatabaseJobKVReader
15
16
  from brawny.logging import LogEvents, get_logger
17
+ from brawny.metrics import BACKGROUND_TASK_ERRORS, get_metrics
16
18
  from brawny.model.contexts import (
17
19
  AlertContext,
18
20
  BlockContext,
@@ -29,13 +31,14 @@ from brawny.model.errors import (
29
31
  )
30
32
  from brawny.model.events import DecodedEvent
31
33
  from brawny.model.types import BlockInfo, Trigger, HookName
34
+ from brawny.network_guard import job_network_guard
32
35
 
33
36
  if TYPE_CHECKING:
34
37
  from brawny.config import Config
35
38
  from brawny.db.base import Database
36
39
  from brawny.jobs.base import Job, TxInfo, TxReceipt, BlockInfo as AlertBlockInfo
37
40
  from brawny.model.types import TxAttempt, TxIntent
38
- from brawny._rpc.manager import RPCManager
41
+ from brawny._rpc.clients import ReadClient
39
42
  from brawny.alerts.contracts import ContractSystem, SimpleContractFactory
40
43
  from brawny.telegram import TelegramBot
41
44
 
@@ -57,7 +60,7 @@ class LifecycleDispatcher:
57
60
  def __init__(
58
61
  self,
59
62
  db: Database,
60
- rpc: RPCManager,
63
+ rpc: ReadClient,
61
64
  config: Config,
62
65
  jobs: dict[str, Job],
63
66
  contract_system: ContractSystem | None = None,
@@ -69,6 +72,7 @@ class LifecycleDispatcher:
69
72
  self._jobs = jobs
70
73
  self._contract_system = contract_system
71
74
  self._telegram_bot = telegram_bot
75
+ self._http_client = ApprovedHttpClient(config.http)
72
76
  self._global_alert_config = self._build_global_alert_config()
73
77
 
74
78
  # =========================================================================
@@ -95,7 +99,8 @@ class LifecycleDispatcher:
95
99
  try:
96
100
  with self._alert_context(ctx):
97
101
  set_job_context(True)
98
- hook_fn(ctx)
102
+ with job_network_guard():
103
+ hook_fn(ctx)
99
104
  except Exception as e:
100
105
  logger.error(
101
106
  f"job.{hook}_crashed",
@@ -143,6 +148,7 @@ class LifecycleDispatcher:
143
148
  block=block_ctx,
144
149
  kv=DatabaseJobKVStore(self._db, job.job_id),
145
150
  logger=logger.bind(job_id=job.job_id, chain_id=self._config.chain_id),
151
+ http=self._http_client,
146
152
  job_id=job.job_id,
147
153
  job_name=job.name,
148
154
  chain_id=self._config.chain_id,
@@ -150,6 +156,7 @@ class LifecycleDispatcher:
150
156
  telegram_config=self._config.telegram,
151
157
  telegram_bot=self._telegram_bot,
152
158
  job_alert_to=getattr(job, "_alert_to", None),
159
+ _alert_sender=self._make_job_alert_sender(job),
153
160
  )
154
161
  self.dispatch_hook(job, "on_trigger", ctx)
155
162
 
@@ -188,6 +195,7 @@ class LifecycleDispatcher:
188
195
  block=block_ctx,
189
196
  kv=DatabaseJobKVReader(self._db, job.job_id),
190
197
  logger=logger.bind(job_id=job.job_id, chain_id=self._config.chain_id),
198
+ http=self._http_client,
191
199
  job_id=job.job_id,
192
200
  job_name=job.name,
193
201
  chain_id=self._config.chain_id,
@@ -195,6 +203,7 @@ class LifecycleDispatcher:
195
203
  telegram_config=self._config.telegram,
196
204
  telegram_bot=self._telegram_bot,
197
205
  job_alert_to=getattr(job, "_alert_to", None),
206
+ _alert_sender=self._make_job_alert_sender(job),
198
207
  )
199
208
  self.dispatch_hook(job, "on_success", ctx)
200
209
 
@@ -223,6 +232,7 @@ class LifecycleDispatcher:
223
232
  block=block_ctx,
224
233
  kv=DatabaseJobKVReader(self._db, job.job_id),
225
234
  logger=logger.bind(job_id=job.job_id, chain_id=self._config.chain_id),
235
+ http=self._http_client,
226
236
  job_id=job.job_id,
227
237
  job_name=job.name,
228
238
  chain_id=self._config.chain_id,
@@ -230,6 +240,7 @@ class LifecycleDispatcher:
230
240
  telegram_config=self._config.telegram,
231
241
  telegram_bot=self._telegram_bot,
232
242
  job_alert_to=getattr(job, "_alert_to", None),
243
+ _alert_sender=self._make_job_alert_sender(job),
233
244
  )
234
245
  self.dispatch_hook(job, "on_failure", ctx)
235
246
 
@@ -256,6 +267,7 @@ class LifecycleDispatcher:
256
267
  block=block_ctx,
257
268
  kv=DatabaseJobKVReader(self._db, job.job_id),
258
269
  logger=logger.bind(job_id=job.job_id, chain_id=self._config.chain_id),
270
+ http=self._http_client,
259
271
  job_id=job.job_id,
260
272
  job_name=job.name,
261
273
  chain_id=self._config.chain_id,
@@ -263,6 +275,7 @@ class LifecycleDispatcher:
263
275
  telegram_config=self._config.telegram,
264
276
  telegram_bot=self._telegram_bot,
265
277
  job_alert_to=getattr(job, "_alert_to", None),
278
+ _alert_sender=self._make_job_alert_sender(job),
266
279
  )
267
280
  self.dispatch_hook(job, "on_failure", ctx)
268
281
 
@@ -290,6 +303,7 @@ class LifecycleDispatcher:
290
303
  block=block_ctx,
291
304
  kv=DatabaseJobKVReader(self._db, job.job_id),
292
305
  logger=logger.bind(job_id=job.job_id, chain_id=self._config.chain_id),
306
+ http=self._http_client,
293
307
  job_id=job.job_id,
294
308
  job_name=job.name,
295
309
  chain_id=self._config.chain_id,
@@ -297,6 +311,7 @@ class LifecycleDispatcher:
297
311
  telegram_config=self._config.telegram,
298
312
  telegram_bot=self._telegram_bot,
299
313
  job_alert_to=getattr(job, "_alert_to", None),
314
+ _alert_sender=self._make_job_alert_sender(job),
300
315
  )
301
316
  self.dispatch_hook(job, "on_failure", ctx)
302
317
 
@@ -342,15 +357,28 @@ class LifecycleDispatcher:
342
357
 
343
358
  def _fire_alert(self, payload: AlertPayload, config: AlertConfig) -> None:
344
359
  """Fire alert asynchronously. Fire-and-forget."""
345
- import asyncio
346
360
  from brawny.alerts import send as alerts_send
347
361
 
348
362
  try:
349
- loop = asyncio.get_running_loop()
350
- loop.create_task(alerts_send.send_alert(payload, config))
351
- except RuntimeError:
352
- # No running loop - run synchronously
353
- asyncio.run(alerts_send.send_alert(payload, config))
363
+ alerts_send.enqueue_alert(payload, config)
364
+ except Exception as exc:
365
+ metrics = get_metrics()
366
+ metrics.counter(BACKGROUND_TASK_ERRORS).inc(task="alert_send")
367
+ logger.warning("alert.enqueue_failed", error=str(exc)[:200])
368
+
369
+ def _handle_alert_task_result(self, task: "asyncio.Task[object]") -> None:
370
+ import asyncio
371
+
372
+ if task.cancelled():
373
+ return
374
+ try:
375
+ task.result()
376
+ except asyncio.CancelledError:
377
+ raise
378
+ except Exception as exc:
379
+ metrics = get_metrics()
380
+ metrics.counter(BACKGROUND_TASK_ERRORS).inc(task="alert_send")
381
+ self._log.warning("alert.task_failed", error=str(exc)[:200])
354
382
 
355
383
  def _build_global_alert_config(self) -> AlertConfig:
356
384
  """Build global AlertConfig from application config (legacy compatibility)."""
@@ -390,6 +418,18 @@ class LifecycleDispatcher:
390
418
  and self._global_alert_config.telegram_chat_ids
391
419
  )
392
420
 
421
+ def _make_job_alert_sender(self, job: Job) -> JobAlertSender:
422
+ """Create alert sender for ctx.alert() in lifecycle hooks.
423
+
424
+ Returns a JobAlertSender that routes to job-specific destinations.
425
+ """
426
+ return JobAlertSender(
427
+ telegram_bot=self._telegram_bot,
428
+ telegram_config=self._config.telegram,
429
+ job_alert_to=getattr(job, "_alert_to", None),
430
+ job_id=job.job_id,
431
+ )
432
+
393
433
  # =========================================================================
394
434
  # Helpers
395
435
  # =========================================================================
@@ -453,7 +493,12 @@ class LifecycleDispatcher:
453
493
  return None
454
494
  try:
455
495
  block = self._rpc.get_block(block_number)
456
- except Exception:
496
+ except Exception as e:
497
+ logger.warning(
498
+ "alerts.block_fetch_failed",
499
+ block_number=block_number,
500
+ error=str(e)[:200],
501
+ )
457
502
  return None
458
503
  return self._to_alert_block(
459
504
  BlockInfo(
@@ -463,6 +508,7 @@ class LifecycleDispatcher:
463
508
  if hasattr(block["hash"], "hex")
464
509
  else block["hash"],
465
510
  timestamp=block["timestamp"],
511
+ base_fee=block.get("baseFeePerGas", 0),
466
512
  )
467
513
  )
468
514
 
@@ -470,7 +516,12 @@ class LifecycleDispatcher:
470
516
  """Get BlockInfo model from block number."""
471
517
  try:
472
518
  block = self._rpc.get_block(block_number)
473
- except Exception:
519
+ except Exception as e:
520
+ logger.warning(
521
+ "alerts.block_model_failed",
522
+ block_number=block_number,
523
+ error=str(e)[:200],
524
+ )
474
525
  return None
475
526
  return BlockInfo(
476
527
  chain_id=self._config.chain_id,
@@ -479,6 +530,7 @@ class LifecycleDispatcher:
479
530
  if hasattr(block["hash"], "hex")
480
531
  else block["hash"],
481
532
  timestamp=block["timestamp"],
533
+ base_fee=block.get("baseFeePerGas", 0),
482
534
  )
483
535
 
484
536
  def _to_alert_block(self, block: BlockInfo) -> AlertBlockInfo:
brawny/logging.py CHANGED
@@ -282,8 +282,6 @@ class LogEvents:
282
282
  RPC_REQUEST = "rpc.request"
283
283
  RPC_ERROR = "rpc.error"
284
284
  RPC_ALL_ENDPOINTS_FAILED = "rpc.all_endpoints_failed"
285
- RPC_CIRCUIT_BREAKER_OPEN = "rpc.circuit_breaker_open"
286
- RPC_CIRCUIT_BREAKER_CLOSED = "rpc.circuit_breaker_closed"
287
285
 
288
286
  # Shutdown
289
287
  SHUTDOWN_INITIATED = "shutdown.initiated"