brawny 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- brawny/__init__.py +106 -0
- brawny/_context.py +232 -0
- brawny/_rpc/__init__.py +38 -0
- brawny/_rpc/broadcast.py +172 -0
- brawny/_rpc/clients.py +98 -0
- brawny/_rpc/context.py +49 -0
- brawny/_rpc/errors.py +252 -0
- brawny/_rpc/gas.py +158 -0
- brawny/_rpc/manager.py +982 -0
- brawny/_rpc/selector.py +156 -0
- brawny/accounts.py +534 -0
- brawny/alerts/__init__.py +132 -0
- brawny/alerts/abi_resolver.py +530 -0
- brawny/alerts/base.py +152 -0
- brawny/alerts/context.py +271 -0
- brawny/alerts/contracts.py +635 -0
- brawny/alerts/encoded_call.py +201 -0
- brawny/alerts/errors.py +267 -0
- brawny/alerts/events.py +680 -0
- brawny/alerts/function_caller.py +364 -0
- brawny/alerts/health.py +185 -0
- brawny/alerts/routing.py +118 -0
- brawny/alerts/send.py +364 -0
- brawny/api.py +660 -0
- brawny/chain.py +93 -0
- brawny/cli/__init__.py +16 -0
- brawny/cli/app.py +17 -0
- brawny/cli/bootstrap.py +37 -0
- brawny/cli/commands/__init__.py +41 -0
- brawny/cli/commands/abi.py +93 -0
- brawny/cli/commands/accounts.py +632 -0
- brawny/cli/commands/console.py +495 -0
- brawny/cli/commands/contract.py +139 -0
- brawny/cli/commands/health.py +112 -0
- brawny/cli/commands/init_project.py +86 -0
- brawny/cli/commands/intents.py +130 -0
- brawny/cli/commands/job_dev.py +254 -0
- brawny/cli/commands/jobs.py +308 -0
- brawny/cli/commands/logs.py +87 -0
- brawny/cli/commands/maintenance.py +182 -0
- brawny/cli/commands/migrate.py +51 -0
- brawny/cli/commands/networks.py +253 -0
- brawny/cli/commands/run.py +249 -0
- brawny/cli/commands/script.py +209 -0
- brawny/cli/commands/signer.py +248 -0
- brawny/cli/helpers.py +265 -0
- brawny/cli_templates.py +1445 -0
- brawny/config/__init__.py +74 -0
- brawny/config/models.py +404 -0
- brawny/config/parser.py +633 -0
- brawny/config/routing.py +55 -0
- brawny/config/validation.py +246 -0
- brawny/daemon/__init__.py +14 -0
- brawny/daemon/context.py +69 -0
- brawny/daemon/core.py +702 -0
- brawny/daemon/loops.py +327 -0
- brawny/db/__init__.py +78 -0
- brawny/db/base.py +986 -0
- brawny/db/base_new.py +165 -0
- brawny/db/circuit_breaker.py +97 -0
- brawny/db/global_cache.py +298 -0
- brawny/db/mappers.py +182 -0
- brawny/db/migrate.py +349 -0
- brawny/db/migrations/001_init.sql +186 -0
- brawny/db/migrations/002_add_included_block.sql +7 -0
- brawny/db/migrations/003_add_broadcast_at.sql +10 -0
- brawny/db/migrations/004_broadcast_binding.sql +20 -0
- brawny/db/migrations/005_add_retry_after.sql +9 -0
- brawny/db/migrations/006_add_retry_count_column.sql +11 -0
- brawny/db/migrations/007_add_gap_tracking.sql +18 -0
- brawny/db/migrations/008_add_transactions.sql +72 -0
- brawny/db/migrations/009_add_intent_metadata.sql +5 -0
- brawny/db/migrations/010_add_nonce_gap_index.sql +9 -0
- brawny/db/migrations/011_add_job_logs.sql +24 -0
- brawny/db/migrations/012_add_claimed_by.sql +5 -0
- brawny/db/ops/__init__.py +29 -0
- brawny/db/ops/attempts.py +108 -0
- brawny/db/ops/blocks.py +83 -0
- brawny/db/ops/cache.py +93 -0
- brawny/db/ops/intents.py +296 -0
- brawny/db/ops/jobs.py +110 -0
- brawny/db/ops/logs.py +97 -0
- brawny/db/ops/nonces.py +322 -0
- brawny/db/postgres.py +2535 -0
- brawny/db/postgres_new.py +196 -0
- brawny/db/queries.py +584 -0
- brawny/db/sqlite.py +2733 -0
- brawny/db/sqlite_new.py +191 -0
- brawny/history.py +126 -0
- brawny/interfaces.py +136 -0
- brawny/invariants.py +155 -0
- brawny/jobs/__init__.py +26 -0
- brawny/jobs/base.py +287 -0
- brawny/jobs/discovery.py +233 -0
- brawny/jobs/job_validation.py +111 -0
- brawny/jobs/kv.py +125 -0
- brawny/jobs/registry.py +283 -0
- brawny/keystore.py +484 -0
- brawny/lifecycle.py +551 -0
- brawny/logging.py +290 -0
- brawny/metrics.py +594 -0
- brawny/model/__init__.py +53 -0
- brawny/model/contexts.py +319 -0
- brawny/model/enums.py +70 -0
- brawny/model/errors.py +194 -0
- brawny/model/events.py +93 -0
- brawny/model/startup.py +20 -0
- brawny/model/types.py +483 -0
- brawny/networks/__init__.py +96 -0
- brawny/networks/config.py +269 -0
- brawny/networks/manager.py +423 -0
- brawny/obs/__init__.py +67 -0
- brawny/obs/emit.py +158 -0
- brawny/obs/health.py +175 -0
- brawny/obs/heartbeat.py +133 -0
- brawny/reconciliation.py +108 -0
- brawny/scheduler/__init__.py +19 -0
- brawny/scheduler/poller.py +472 -0
- brawny/scheduler/reorg.py +632 -0
- brawny/scheduler/runner.py +708 -0
- brawny/scheduler/shutdown.py +371 -0
- brawny/script_tx.py +297 -0
- brawny/scripting.py +251 -0
- brawny/startup.py +76 -0
- brawny/telegram.py +393 -0
- brawny/testing.py +108 -0
- brawny/tx/__init__.py +41 -0
- brawny/tx/executor.py +1071 -0
- brawny/tx/fees.py +50 -0
- brawny/tx/intent.py +423 -0
- brawny/tx/monitor.py +628 -0
- brawny/tx/nonce.py +498 -0
- brawny/tx/replacement.py +456 -0
- brawny/tx/utils.py +26 -0
- brawny/utils.py +205 -0
- brawny/validation.py +69 -0
- brawny-0.1.13.dist-info/METADATA +156 -0
- brawny-0.1.13.dist-info/RECORD +141 -0
- brawny-0.1.13.dist-info/WHEEL +5 -0
- brawny-0.1.13.dist-info/entry_points.txt +2 -0
- brawny-0.1.13.dist-info/top_level.txt +1 -0
brawny/jobs/base.py
ADDED
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
"""Base Job class for brawny.
|
|
2
|
+
|
|
3
|
+
Jobs are the core abstraction for scheduling and executing Ethereum transactions
|
|
4
|
+
based on block events. Jobs implement check() to evaluate conditions and
|
|
5
|
+
build_tx() to create transactions.
|
|
6
|
+
|
|
7
|
+
Phase-specific contexts (OE7):
|
|
8
|
+
- CheckContext: Read chain state, return Trigger. KV is read+write.
|
|
9
|
+
- BuildContext: Produces TxSpec. Has trigger + signer. KV is read-only.
|
|
10
|
+
- AlertContext: Receives immutable snapshots. KV is read-only.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from __future__ import annotations
|
|
14
|
+
|
|
15
|
+
from abc import ABC
|
|
16
|
+
from typing import TYPE_CHECKING, Any
|
|
17
|
+
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
from brawny.model.contexts import (
|
|
20
|
+
CheckContext,
|
|
21
|
+
BuildContext,
|
|
22
|
+
AlertContext,
|
|
23
|
+
TriggerContext,
|
|
24
|
+
SuccessContext,
|
|
25
|
+
FailureContext,
|
|
26
|
+
)
|
|
27
|
+
from brawny.model.types import Trigger, TxIntent, TxIntentSpec, TxAttempt
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class TxInfo:
|
|
31
|
+
"""Transaction info for alert context."""
|
|
32
|
+
|
|
33
|
+
def __init__(
|
|
34
|
+
self,
|
|
35
|
+
hash: str,
|
|
36
|
+
nonce: int,
|
|
37
|
+
from_address: str,
|
|
38
|
+
to_address: str,
|
|
39
|
+
gas_limit: int,
|
|
40
|
+
max_fee_per_gas: int,
|
|
41
|
+
max_priority_fee_per_gas: int,
|
|
42
|
+
) -> None:
|
|
43
|
+
self.hash = hash
|
|
44
|
+
self.nonce = nonce
|
|
45
|
+
self.from_address = from_address
|
|
46
|
+
self.to_address = to_address
|
|
47
|
+
self.gas_limit = gas_limit
|
|
48
|
+
self.max_fee_per_gas = max_fee_per_gas
|
|
49
|
+
self.max_priority_fee_per_gas = max_priority_fee_per_gas
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class TxReceipt:
|
|
53
|
+
"""Transaction receipt for alert context."""
|
|
54
|
+
|
|
55
|
+
def __init__(
|
|
56
|
+
self,
|
|
57
|
+
transaction_hash: str,
|
|
58
|
+
block_number: int,
|
|
59
|
+
block_hash: str,
|
|
60
|
+
status: int,
|
|
61
|
+
gas_used: int,
|
|
62
|
+
logs: list[dict[str, Any]],
|
|
63
|
+
) -> None:
|
|
64
|
+
self.transactionHash = transaction_hash
|
|
65
|
+
self.blockNumber = block_number
|
|
66
|
+
self.blockHash = block_hash
|
|
67
|
+
self.status = status
|
|
68
|
+
self.gasUsed = gas_used
|
|
69
|
+
self.logs = logs
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class BlockInfo:
|
|
73
|
+
"""Block info for alert context."""
|
|
74
|
+
|
|
75
|
+
def __init__(
|
|
76
|
+
self,
|
|
77
|
+
number: int,
|
|
78
|
+
hash: str,
|
|
79
|
+
timestamp: int,
|
|
80
|
+
) -> None:
|
|
81
|
+
self.number = number
|
|
82
|
+
self.hash = hash
|
|
83
|
+
self.timestamp = timestamp
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class Job(ABC):
|
|
87
|
+
"""Base class for all jobs.
|
|
88
|
+
|
|
89
|
+
Jobs are the core abstraction for scheduling and executing Ethereum
|
|
90
|
+
transactions based on block events.
|
|
91
|
+
|
|
92
|
+
Attributes:
|
|
93
|
+
job_id: Stable identifier, must not change across deployments
|
|
94
|
+
name: Human-readable name for logging and alerts
|
|
95
|
+
check_interval_blocks: Minimum blocks between check() calls
|
|
96
|
+
check_timeout_seconds: Timeout for check() execution
|
|
97
|
+
build_timeout_seconds: Timeout for build_intent() execution
|
|
98
|
+
max_in_flight_intents: Optional cap on active intents for this job
|
|
99
|
+
"""
|
|
100
|
+
|
|
101
|
+
job_id: str
|
|
102
|
+
name: str
|
|
103
|
+
check_interval_blocks: int = 1
|
|
104
|
+
check_timeout_seconds: int = 30
|
|
105
|
+
build_timeout_seconds: int = 10
|
|
106
|
+
max_in_flight_intents: int | None = None
|
|
107
|
+
|
|
108
|
+
# Simulation config
|
|
109
|
+
disable_simulation: bool = False
|
|
110
|
+
rpc: str | None = None # Override global RPC URL for simulation
|
|
111
|
+
|
|
112
|
+
# Gas overrides (None = inherit from config, all values in wei)
|
|
113
|
+
max_fee: int | None = None
|
|
114
|
+
priority_fee: int | None = None
|
|
115
|
+
|
|
116
|
+
# Alert config
|
|
117
|
+
# NOTE: Use None as sentinel to avoid mutable default sharing across subclasses
|
|
118
|
+
telegram_chat_ids: list[str] | None = None # Override global alert targets (None = use global)
|
|
119
|
+
|
|
120
|
+
# Signer config (set by @job(signer="...") decorator)
|
|
121
|
+
_signer_name: str | None = None
|
|
122
|
+
|
|
123
|
+
# Alert routing (set by @job(alert_to="...") decorator)
|
|
124
|
+
_alert_to: list[str] | None = None
|
|
125
|
+
|
|
126
|
+
@property
|
|
127
|
+
def signer(self) -> str | None:
|
|
128
|
+
"""Signer alias from @job(signer="..."), or None if not set."""
|
|
129
|
+
return self._signer_name
|
|
130
|
+
|
|
131
|
+
@signer.setter
|
|
132
|
+
def signer(self, value: str | None) -> None:
|
|
133
|
+
"""Allow setting signer dynamically (tests/dev flows)."""
|
|
134
|
+
self._signer_name = value
|
|
135
|
+
|
|
136
|
+
@property
|
|
137
|
+
def signer_address(self) -> str:
|
|
138
|
+
"""Resolved checksummed address for this job's signer.
|
|
139
|
+
|
|
140
|
+
Raises:
|
|
141
|
+
RuntimeError: If no signer configured.
|
|
142
|
+
KeystoreError: If signer not found in keystore.
|
|
143
|
+
"""
|
|
144
|
+
if self._signer_name is None:
|
|
145
|
+
raise RuntimeError(f"Job '{self.job_id}' has no signer configured.")
|
|
146
|
+
if self._signer_name.startswith("0x") and len(self._signer_name) == 42:
|
|
147
|
+
from web3 import Web3
|
|
148
|
+
|
|
149
|
+
return Web3.to_checksum_address(self._signer_name)
|
|
150
|
+
from brawny.api import get_address_from_alias
|
|
151
|
+
return get_address_from_alias(self._signer_name)
|
|
152
|
+
|
|
153
|
+
def __init_subclass__(cls, **kwargs: Any) -> None:
|
|
154
|
+
"""Ensure each subclass has its own mutable containers.
|
|
155
|
+
|
|
156
|
+
This prevents the Python mutable default argument bug where all
|
|
157
|
+
subclasses would share the same dict/list instance.
|
|
158
|
+
"""
|
|
159
|
+
super().__init_subclass__(**kwargs)
|
|
160
|
+
|
|
161
|
+
# Create fresh containers for each subclass if not explicitly defined
|
|
162
|
+
# Check if the attribute is inherited from Job (shared) vs defined on cls
|
|
163
|
+
if "telegram_chat_ids" not in cls.__dict__:
|
|
164
|
+
cls.telegram_chat_ids = []
|
|
165
|
+
elif cls.telegram_chat_ids is None:
|
|
166
|
+
cls.telegram_chat_ids = []
|
|
167
|
+
|
|
168
|
+
def check(self, *args: Any, **kwargs: Any) -> Trigger | None:
|
|
169
|
+
"""Check if job should trigger.
|
|
170
|
+
|
|
171
|
+
Called at most once per check_interval_blocks.
|
|
172
|
+
|
|
173
|
+
Supported signatures:
|
|
174
|
+
def check(self) -> Trigger | None # Implicit context
|
|
175
|
+
def check(self, ctx) -> Trigger | None # Explicit context
|
|
176
|
+
|
|
177
|
+
When using implicit context, access via:
|
|
178
|
+
- block.number, block.timestamp (from brawny.api)
|
|
179
|
+
- kv.get(), kv.set() (from brawny.api)
|
|
180
|
+
- Contract() (from brawny.api)
|
|
181
|
+
- ctx() for full context access
|
|
182
|
+
|
|
183
|
+
Note:
|
|
184
|
+
Explicit style requires the parameter to be named 'ctx' so the
|
|
185
|
+
runner can detect it safely. Using a different name will be
|
|
186
|
+
treated as implicit style.
|
|
187
|
+
|
|
188
|
+
Returns:
|
|
189
|
+
Trigger if action needed, None otherwise
|
|
190
|
+
"""
|
|
191
|
+
raise NotImplementedError(f"{self.__class__.__name__} must implement check()")
|
|
192
|
+
|
|
193
|
+
def build_tx(self, *args: Any, **kwargs: Any) -> TxIntentSpec:
|
|
194
|
+
"""Build transaction spec from trigger.
|
|
195
|
+
|
|
196
|
+
Only called if trigger.tx_required is True. Trigger is available
|
|
197
|
+
via ctx.trigger (explicit) or via the ctx() helper (implicit).
|
|
198
|
+
|
|
199
|
+
Supported signatures:
|
|
200
|
+
def build_tx(self) -> TxIntentSpec # Implicit context
|
|
201
|
+
def build_tx(self, ctx) -> TxIntentSpec # Explicit context
|
|
202
|
+
|
|
203
|
+
Use ctx.contracts.at(name, addr) for 'latest' reads.
|
|
204
|
+
Safety-critical predicates should be computed in check() and
|
|
205
|
+
encoded in ctx.trigger.reason or intent.metadata.
|
|
206
|
+
|
|
207
|
+
Note:
|
|
208
|
+
Explicit style requires the parameter to be named 'ctx' so the
|
|
209
|
+
runner can detect it safely.
|
|
210
|
+
|
|
211
|
+
Returns:
|
|
212
|
+
Transaction intent specification
|
|
213
|
+
|
|
214
|
+
Raises:
|
|
215
|
+
NotImplementedError: For monitor-only jobs
|
|
216
|
+
"""
|
|
217
|
+
raise NotImplementedError(f"{self.__class__.__name__} must implement build_tx()")
|
|
218
|
+
|
|
219
|
+
def validate_simulation(self, output: str) -> bool:
|
|
220
|
+
"""Validate simulation output before broadcast.
|
|
221
|
+
|
|
222
|
+
Called only if simulation succeeds (no revert). Override to add
|
|
223
|
+
custom checks like verifying return values or slippage.
|
|
224
|
+
|
|
225
|
+
Args:
|
|
226
|
+
output: Hex-encoded return data from simulation (0x...)
|
|
227
|
+
|
|
228
|
+
Returns:
|
|
229
|
+
True to proceed with broadcast.
|
|
230
|
+
False to fail (executor raises SimulationReverted).
|
|
231
|
+
|
|
232
|
+
Example:
|
|
233
|
+
def validate_simulation(self, output):
|
|
234
|
+
decoded = self._decode_output(output)
|
|
235
|
+
return decoded >= self.min_output
|
|
236
|
+
"""
|
|
237
|
+
return True
|
|
238
|
+
|
|
239
|
+
# =========================================================================
|
|
240
|
+
# Lifecycle Hooks (New Simplified API)
|
|
241
|
+
# =========================================================================
|
|
242
|
+
|
|
243
|
+
def on_trigger(self, ctx: "TriggerContext") -> None:
|
|
244
|
+
"""Called when check() returns a Trigger, BEFORE build_tx().
|
|
245
|
+
|
|
246
|
+
Use for:
|
|
247
|
+
- Monitor-only jobs (tx_required=False) - your only hook
|
|
248
|
+
- Pre-transaction alerts/logging
|
|
249
|
+
- KV updates before intent creation
|
|
250
|
+
|
|
251
|
+
Note: No intent exists yet. After this hook, trigger is gone -
|
|
252
|
+
only intent.metadata persists.
|
|
253
|
+
|
|
254
|
+
To send alerts, use:
|
|
255
|
+
from brawny import alert
|
|
256
|
+
alert(f"Triggered: {ctx.trigger.reason}")
|
|
257
|
+
"""
|
|
258
|
+
pass
|
|
259
|
+
|
|
260
|
+
def on_success(self, ctx: "SuccessContext") -> None:
|
|
261
|
+
"""Called when transaction confirms.
|
|
262
|
+
|
|
263
|
+
ctx.intent.metadata["reason"] = original trigger.reason
|
|
264
|
+
ctx.intent.metadata[...] = your custom data from build_tx()
|
|
265
|
+
|
|
266
|
+
To send alerts, use:
|
|
267
|
+
from brawny import alert
|
|
268
|
+
alert(f"Confirmed: {ctx.intent.metadata['reason']}")
|
|
269
|
+
"""
|
|
270
|
+
pass
|
|
271
|
+
|
|
272
|
+
def on_failure(self, ctx: "FailureContext") -> None:
|
|
273
|
+
"""Called on failures. ctx.intent may be None for pre-intent failures.
|
|
274
|
+
|
|
275
|
+
Pre-intent failures include:
|
|
276
|
+
- check() exception
|
|
277
|
+
- build_tx() exception
|
|
278
|
+
- intent creation failure
|
|
279
|
+
|
|
280
|
+
To send alerts, use:
|
|
281
|
+
from brawny import alert
|
|
282
|
+
if ctx.intent:
|
|
283
|
+
alert(f"Failed: {ctx.intent.metadata['reason']}")
|
|
284
|
+
else:
|
|
285
|
+
alert(f"Pre-intent failure: {ctx.error}")
|
|
286
|
+
"""
|
|
287
|
+
pass
|
brawny/jobs/discovery.py
ADDED
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
"""Job discovery for brawny.
|
|
2
|
+
|
|
3
|
+
Provides module and path-based job discovery mechanisms.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import importlib
|
|
9
|
+
import importlib.util
|
|
10
|
+
import pkgutil
|
|
11
|
+
import sys
|
|
12
|
+
import traceback
|
|
13
|
+
from dataclasses import dataclass
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
|
|
16
|
+
from brawny.logging import get_logger
|
|
17
|
+
|
|
18
|
+
logger = get_logger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass(frozen=True)
|
|
22
|
+
class JobLoadError:
|
|
23
|
+
"""Represents a failure to load a job module."""
|
|
24
|
+
|
|
25
|
+
path: str
|
|
26
|
+
message: str # str(e) for quick scanning
|
|
27
|
+
traceback: str # full traceback for debugging
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class JobDiscoveryFailed(Exception):
|
|
31
|
+
"""Raised when one or more job modules fail to load."""
|
|
32
|
+
|
|
33
|
+
def __init__(self, errors: list[JobLoadError]) -> None:
|
|
34
|
+
self.errors = errors
|
|
35
|
+
super().__init__(f"Failed to load {len(errors)} job module(s)")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _import_module_tree(module_path: str) -> list[JobLoadError]:
|
|
39
|
+
"""Import a module and all submodules if it's a package.
|
|
40
|
+
|
|
41
|
+
Returns:
|
|
42
|
+
List of JobLoadError for any modules that failed to load.
|
|
43
|
+
"""
|
|
44
|
+
load_errors: list[JobLoadError] = []
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
module = importlib.import_module(module_path)
|
|
48
|
+
except Exception as e:
|
|
49
|
+
load_errors.append(
|
|
50
|
+
JobLoadError(
|
|
51
|
+
path=module_path,
|
|
52
|
+
message=str(e),
|
|
53
|
+
traceback=traceback.format_exc(),
|
|
54
|
+
)
|
|
55
|
+
)
|
|
56
|
+
logger.error(
|
|
57
|
+
"job.module_load_failed",
|
|
58
|
+
module=module_path,
|
|
59
|
+
error=str(e),
|
|
60
|
+
error_type=type(e).__name__,
|
|
61
|
+
)
|
|
62
|
+
return load_errors
|
|
63
|
+
|
|
64
|
+
# If it's a package, walk submodules recursively.
|
|
65
|
+
if hasattr(module, "__path__"):
|
|
66
|
+
for _, name, _ in pkgutil.walk_packages(module.__path__, module.__name__ + "."):
|
|
67
|
+
if name.split(".")[-1].startswith("_"):
|
|
68
|
+
continue
|
|
69
|
+
try:
|
|
70
|
+
importlib.import_module(name)
|
|
71
|
+
except Exception as e:
|
|
72
|
+
load_errors.append(
|
|
73
|
+
JobLoadError(
|
|
74
|
+
path=name,
|
|
75
|
+
message=str(e),
|
|
76
|
+
traceback=traceback.format_exc(),
|
|
77
|
+
)
|
|
78
|
+
)
|
|
79
|
+
logger.error(
|
|
80
|
+
"job.module_load_failed",
|
|
81
|
+
module=name,
|
|
82
|
+
error=str(e),
|
|
83
|
+
error_type=type(e).__name__,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
return load_errors
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def discover_jobs(module_paths: list[str]) -> tuple[list[str], list[JobLoadError]]:
|
|
90
|
+
"""Discover and import job modules.
|
|
91
|
+
|
|
92
|
+
Imports the specified modules to trigger @job decorators.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
module_paths: List of Python module paths to import
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
Tuple of (discovered job IDs, list of JobLoadError for failed modules)
|
|
99
|
+
"""
|
|
100
|
+
# Lazy import to avoid circular dependency
|
|
101
|
+
from brawny.jobs.registry import get_registry
|
|
102
|
+
|
|
103
|
+
registry = get_registry()
|
|
104
|
+
discovered: list[str] = []
|
|
105
|
+
load_errors: list[JobLoadError] = []
|
|
106
|
+
|
|
107
|
+
for module_path in module_paths:
|
|
108
|
+
# Record jobs before import
|
|
109
|
+
before = set(registry.list_job_ids())
|
|
110
|
+
|
|
111
|
+
# Import module and any submodules (package tree)
|
|
112
|
+
errors = _import_module_tree(module_path)
|
|
113
|
+
load_errors.extend(errors)
|
|
114
|
+
|
|
115
|
+
# Find newly registered jobs
|
|
116
|
+
after = set(registry.list_job_ids())
|
|
117
|
+
new_jobs = after - before
|
|
118
|
+
|
|
119
|
+
for job_id in new_jobs:
|
|
120
|
+
discovered.append(job_id)
|
|
121
|
+
|
|
122
|
+
return discovered, load_errors
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def discover_jobs_from_path(jobs_dir: str | Path) -> tuple[list[str], list[JobLoadError]]:
|
|
126
|
+
"""Discover jobs by scanning a directory for Python files.
|
|
127
|
+
|
|
128
|
+
Recursively finds all .py files and imports them to trigger @job.
|
|
129
|
+
Does NOT require __init__.py files in subdirectories.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
jobs_dir: Path to jobs directory
|
|
133
|
+
|
|
134
|
+
Returns:
|
|
135
|
+
Tuple of (discovered job IDs, list of JobLoadError for failed modules)
|
|
136
|
+
"""
|
|
137
|
+
# Lazy import to avoid circular dependency
|
|
138
|
+
from brawny.jobs.registry import get_registry
|
|
139
|
+
|
|
140
|
+
registry = get_registry()
|
|
141
|
+
jobs_path = Path(jobs_dir).resolve()
|
|
142
|
+
load_errors: list[JobLoadError] = []
|
|
143
|
+
|
|
144
|
+
if not jobs_path.is_dir():
|
|
145
|
+
logger.error("job.discovery.not_a_directory", path=str(jobs_path))
|
|
146
|
+
return [], []
|
|
147
|
+
|
|
148
|
+
discovered: list[str] = []
|
|
149
|
+
before = set(registry.list_job_ids())
|
|
150
|
+
|
|
151
|
+
# Find all .py files recursively
|
|
152
|
+
for py_file in jobs_path.rglob("*.py"):
|
|
153
|
+
# Skip private files and __init__.py
|
|
154
|
+
if py_file.name.startswith("_"):
|
|
155
|
+
continue
|
|
156
|
+
|
|
157
|
+
# Skip examples directory (reference code, not to be registered)
|
|
158
|
+
if "examples" in py_file.parts:
|
|
159
|
+
continue
|
|
160
|
+
|
|
161
|
+
try:
|
|
162
|
+
# Create a unique module name based on path
|
|
163
|
+
rel_path = py_file.relative_to(jobs_path)
|
|
164
|
+
module_name = f"_jobs_.{rel_path.with_suffix('').as_posix().replace('/', '.')}"
|
|
165
|
+
|
|
166
|
+
# Import the file directly
|
|
167
|
+
spec = importlib.util.spec_from_file_location(module_name, py_file)
|
|
168
|
+
if spec and spec.loader:
|
|
169
|
+
module = importlib.util.module_from_spec(spec)
|
|
170
|
+
sys.modules[module_name] = module
|
|
171
|
+
spec.loader.exec_module(module)
|
|
172
|
+
|
|
173
|
+
except Exception as e:
|
|
174
|
+
load_errors.append(
|
|
175
|
+
JobLoadError(
|
|
176
|
+
path=str(py_file),
|
|
177
|
+
message=str(e),
|
|
178
|
+
traceback=traceback.format_exc(),
|
|
179
|
+
)
|
|
180
|
+
)
|
|
181
|
+
logger.error(
|
|
182
|
+
"job.module_load_failed",
|
|
183
|
+
file=str(py_file),
|
|
184
|
+
error=str(e),
|
|
185
|
+
error_type=type(e).__name__,
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
# Find newly registered jobs
|
|
189
|
+
after = set(registry.list_job_ids())
|
|
190
|
+
new_jobs = after - before
|
|
191
|
+
|
|
192
|
+
for job_id in sorted(new_jobs):
|
|
193
|
+
discovered.append(job_id)
|
|
194
|
+
|
|
195
|
+
return discovered, load_errors
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def auto_discover_jobs() -> tuple[list[str], list[JobLoadError]]:
|
|
199
|
+
"""Auto-discover jobs from conventional locations.
|
|
200
|
+
|
|
201
|
+
Checks in order:
|
|
202
|
+
1. ./jobs/ directory
|
|
203
|
+
2. ./src/*/jobs/ directories
|
|
204
|
+
|
|
205
|
+
This enables zero-config job discovery for projects that follow conventions.
|
|
206
|
+
|
|
207
|
+
Returns:
|
|
208
|
+
Tuple of (discovered job IDs, list of JobLoadError for failed modules)
|
|
209
|
+
"""
|
|
210
|
+
discovered: list[str] = []
|
|
211
|
+
load_errors: list[JobLoadError] = []
|
|
212
|
+
cwd = Path.cwd()
|
|
213
|
+
|
|
214
|
+
# Check ./jobs/
|
|
215
|
+
jobs_dir = cwd / "jobs"
|
|
216
|
+
if jobs_dir.is_dir():
|
|
217
|
+
jobs, errors = discover_jobs_from_path(jobs_dir)
|
|
218
|
+
discovered.extend(jobs)
|
|
219
|
+
load_errors.extend(errors)
|
|
220
|
+
|
|
221
|
+
# Check ./src/*/jobs/ (setuptools convention)
|
|
222
|
+
if not discovered:
|
|
223
|
+
src_dir = cwd / "src"
|
|
224
|
+
if src_dir.is_dir():
|
|
225
|
+
for pkg_dir in src_dir.iterdir():
|
|
226
|
+
if pkg_dir.is_dir() and not pkg_dir.name.startswith("_"):
|
|
227
|
+
pkg_jobs = pkg_dir / "jobs"
|
|
228
|
+
if pkg_jobs.is_dir():
|
|
229
|
+
jobs, errors = discover_jobs_from_path(pkg_jobs)
|
|
230
|
+
discovered.extend(jobs)
|
|
231
|
+
load_errors.extend(errors)
|
|
232
|
+
|
|
233
|
+
return discovered, load_errors
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
"""Job validation for brawny.
|
|
2
|
+
|
|
3
|
+
Provides structural validation for job instances.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
from typing import TYPE_CHECKING
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from brawny.jobs.base import Job
|
|
12
|
+
from brawny.keystore import Keystore
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _is_valid_address(address: str) -> bool:
|
|
16
|
+
"""Check if a string looks like a valid Ethereum address.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
address: String to validate
|
|
20
|
+
|
|
21
|
+
Returns:
|
|
22
|
+
True if valid address format
|
|
23
|
+
"""
|
|
24
|
+
if not isinstance(address, str):
|
|
25
|
+
return False
|
|
26
|
+
if not address.startswith("0x"):
|
|
27
|
+
return False
|
|
28
|
+
if len(address) != 42:
|
|
29
|
+
return False
|
|
30
|
+
try:
|
|
31
|
+
int(address, 16)
|
|
32
|
+
return True
|
|
33
|
+
except ValueError:
|
|
34
|
+
return False
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def validate_job(job: "Job", keystore: "Keystore | None" = None) -> list[str]:
|
|
38
|
+
"""Validate job structure and configuration.
|
|
39
|
+
|
|
40
|
+
Checks:
|
|
41
|
+
- Required attributes (job_id, name)
|
|
42
|
+
- Required methods (check)
|
|
43
|
+
- check_interval_blocks is positive
|
|
44
|
+
- Signer exists in keystore (if configured and keystore provided)
|
|
45
|
+
|
|
46
|
+
Does NOT:
|
|
47
|
+
- Call check() or build_intent()
|
|
48
|
+
- Make RPC calls
|
|
49
|
+
- Validate runtime behavior
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
job: Job instance to validate
|
|
53
|
+
keystore: Optional keystore for signer validation
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
List of error messages (empty if valid)
|
|
57
|
+
"""
|
|
58
|
+
errors: list[str] = []
|
|
59
|
+
|
|
60
|
+
# Required attributes
|
|
61
|
+
job_id = getattr(job, "job_id", None)
|
|
62
|
+
if not job_id:
|
|
63
|
+
errors.append("missing job_id attribute")
|
|
64
|
+
|
|
65
|
+
name = getattr(job, "name", None)
|
|
66
|
+
if not name:
|
|
67
|
+
errors.append("missing name attribute")
|
|
68
|
+
|
|
69
|
+
# Required methods
|
|
70
|
+
if not callable(getattr(job, "check", None)):
|
|
71
|
+
errors.append("missing check() method")
|
|
72
|
+
|
|
73
|
+
# check_interval_blocks should be positive
|
|
74
|
+
interval = getattr(job, "check_interval_blocks", 1)
|
|
75
|
+
if not isinstance(interval, int) or interval < 1:
|
|
76
|
+
errors.append(f"check_interval_blocks must be positive integer, got {interval}")
|
|
77
|
+
|
|
78
|
+
# Signer validation (if keystore available)
|
|
79
|
+
signer = getattr(job, "_signer_name", None)
|
|
80
|
+
if signer and keystore:
|
|
81
|
+
if not keystore.has_key(signer):
|
|
82
|
+
available = keystore.list_keys()
|
|
83
|
+
if available:
|
|
84
|
+
errors.append(f"signer '{signer}' not found in keystore (available: {', '.join(available)})")
|
|
85
|
+
else:
|
|
86
|
+
errors.append(f"signer '{signer}' not found (keystore is empty)")
|
|
87
|
+
|
|
88
|
+
return errors
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def validate_all_jobs(
|
|
92
|
+
jobs: dict[str, "Job"],
|
|
93
|
+
keystore: "Keystore | None" = None,
|
|
94
|
+
) -> dict[str, list[str]]:
|
|
95
|
+
"""Validate all jobs and return errors by job_id.
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
jobs: Dict of job_id -> Job instance
|
|
99
|
+
keystore: Optional keystore for signer validation
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
Dict of job_id -> list of errors (only jobs with errors included)
|
|
103
|
+
"""
|
|
104
|
+
all_errors: dict[str, list[str]] = {}
|
|
105
|
+
|
|
106
|
+
for job_id, job in jobs.items():
|
|
107
|
+
errors = validate_job(job, keystore)
|
|
108
|
+
if errors:
|
|
109
|
+
all_errors[job_id] = errors
|
|
110
|
+
|
|
111
|
+
return all_errors
|