brawny 0.1.13__py3-none-any.whl → 0.1.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- brawny/__init__.py +2 -0
- brawny/_context.py +5 -5
- brawny/_rpc/__init__.py +36 -12
- brawny/_rpc/broadcast.py +14 -13
- brawny/_rpc/caller.py +243 -0
- brawny/_rpc/client.py +539 -0
- brawny/_rpc/clients.py +11 -11
- brawny/_rpc/context.py +23 -0
- brawny/_rpc/errors.py +465 -31
- brawny/_rpc/gas.py +7 -6
- brawny/_rpc/pool.py +18 -0
- brawny/_rpc/retry.py +266 -0
- brawny/_rpc/retry_policy.py +81 -0
- brawny/accounts.py +28 -9
- brawny/alerts/__init__.py +15 -18
- brawny/alerts/abi_resolver.py +212 -36
- brawny/alerts/base.py +2 -2
- brawny/alerts/contracts.py +77 -10
- brawny/alerts/errors.py +30 -3
- brawny/alerts/events.py +38 -5
- brawny/alerts/health.py +19 -13
- brawny/alerts/send.py +513 -55
- brawny/api.py +39 -11
- brawny/assets/AGENTS.md +325 -0
- brawny/async_runtime.py +48 -0
- brawny/chain.py +3 -3
- brawny/cli/commands/__init__.py +2 -0
- brawny/cli/commands/console.py +69 -19
- brawny/cli/commands/contract.py +2 -2
- brawny/cli/commands/controls.py +121 -0
- brawny/cli/commands/health.py +2 -2
- brawny/cli/commands/job_dev.py +6 -5
- brawny/cli/commands/jobs.py +99 -2
- brawny/cli/commands/maintenance.py +13 -29
- brawny/cli/commands/migrate.py +1 -0
- brawny/cli/commands/run.py +10 -3
- brawny/cli/commands/script.py +8 -3
- brawny/cli/commands/signer.py +143 -26
- brawny/cli/helpers.py +0 -3
- brawny/cli_templates.py +25 -349
- brawny/config/__init__.py +4 -1
- brawny/config/models.py +43 -57
- brawny/config/parser.py +268 -57
- brawny/config/validation.py +52 -15
- brawny/daemon/context.py +4 -2
- brawny/daemon/core.py +185 -63
- brawny/daemon/loops.py +166 -98
- brawny/daemon/supervisor.py +261 -0
- brawny/db/__init__.py +14 -26
- brawny/db/base.py +248 -151
- brawny/db/global_cache.py +11 -1
- brawny/db/migrate.py +175 -28
- brawny/db/migrations/001_init.sql +4 -3
- brawny/db/migrations/010_add_nonce_gap_index.sql +1 -1
- brawny/db/migrations/011_add_job_logs.sql +1 -2
- brawny/db/migrations/012_add_claimed_by.sql +2 -2
- brawny/db/migrations/013_attempt_unique.sql +10 -0
- brawny/db/migrations/014_add_lease_expires_at.sql +5 -0
- brawny/db/migrations/015_add_signer_alias.sql +14 -0
- brawny/db/migrations/016_runtime_controls_and_quarantine.sql +32 -0
- brawny/db/migrations/017_add_job_drain.sql +6 -0
- brawny/db/migrations/018_add_nonce_reset_audit.sql +20 -0
- brawny/db/migrations/019_add_job_cooldowns.sql +8 -0
- brawny/db/migrations/020_attempt_unique_initial.sql +7 -0
- brawny/db/ops/__init__.py +3 -25
- brawny/db/ops/logs.py +1 -2
- brawny/db/queries.py +47 -91
- brawny/db/serialized.py +65 -0
- brawny/db/sqlite/__init__.py +1001 -0
- brawny/db/sqlite/connection.py +231 -0
- brawny/db/sqlite/execute.py +116 -0
- brawny/db/sqlite/mappers.py +190 -0
- brawny/db/sqlite/repos/attempts.py +372 -0
- brawny/db/sqlite/repos/block_state.py +102 -0
- brawny/db/sqlite/repos/cache.py +104 -0
- brawny/db/sqlite/repos/intents.py +1021 -0
- brawny/db/sqlite/repos/jobs.py +200 -0
- brawny/db/sqlite/repos/maintenance.py +182 -0
- brawny/db/sqlite/repos/signers_nonces.py +566 -0
- brawny/db/sqlite/tx.py +119 -0
- brawny/http.py +194 -0
- brawny/invariants.py +11 -24
- brawny/jobs/base.py +8 -0
- brawny/jobs/job_validation.py +2 -1
- brawny/keystore.py +83 -7
- brawny/lifecycle.py +64 -12
- brawny/logging.py +0 -2
- brawny/metrics.py +84 -12
- brawny/model/contexts.py +111 -9
- brawny/model/enums.py +1 -0
- brawny/model/errors.py +18 -0
- brawny/model/types.py +47 -131
- brawny/network_guard.py +133 -0
- brawny/networks/__init__.py +5 -5
- brawny/networks/config.py +1 -7
- brawny/networks/manager.py +14 -11
- brawny/runtime_controls.py +74 -0
- brawny/scheduler/poller.py +11 -7
- brawny/scheduler/reorg.py +95 -39
- brawny/scheduler/runner.py +442 -168
- brawny/scheduler/shutdown.py +3 -3
- brawny/script_tx.py +3 -3
- brawny/telegram.py +53 -7
- brawny/testing.py +1 -0
- brawny/timeout.py +38 -0
- brawny/tx/executor.py +922 -308
- brawny/tx/intent.py +54 -16
- brawny/tx/monitor.py +31 -12
- brawny/tx/nonce.py +212 -90
- brawny/tx/replacement.py +69 -18
- brawny/tx/retry_policy.py +24 -0
- brawny/tx/stages/types.py +75 -0
- brawny/types.py +18 -0
- brawny/utils.py +41 -0
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/METADATA +3 -3
- brawny-0.1.22.dist-info/RECORD +163 -0
- brawny/_rpc/manager.py +0 -982
- brawny/_rpc/selector.py +0 -156
- brawny/db/base_new.py +0 -165
- brawny/db/mappers.py +0 -182
- brawny/db/migrations/008_add_transactions.sql +0 -72
- brawny/db/ops/attempts.py +0 -108
- brawny/db/ops/blocks.py +0 -83
- brawny/db/ops/cache.py +0 -93
- brawny/db/ops/intents.py +0 -296
- brawny/db/ops/jobs.py +0 -110
- brawny/db/ops/nonces.py +0 -322
- brawny/db/postgres.py +0 -2535
- brawny/db/postgres_new.py +0 -196
- brawny/db/sqlite.py +0 -2733
- brawny/db/sqlite_new.py +0 -191
- brawny-0.1.13.dist-info/RECORD +0 -141
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/WHEEL +0 -0
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/entry_points.txt +0 -0
- {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/top_level.txt +0 -0
brawny/db/ops/blocks.py
DELETED
|
@@ -1,83 +0,0 @@
|
|
|
1
|
-
"""Block state and hash history operations."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
from brawny.db.base_new import Database, BlockState
|
|
6
|
-
from brawny.db import queries as Q
|
|
7
|
-
from brawny.db import mappers as M
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
def get_block_state(db: Database, chain_id: int) -> BlockState | None:
|
|
11
|
-
"""Get the current block processing state."""
|
|
12
|
-
row = db.fetch_one(Q.GET_BLOCK_STATE, {"chain_id": chain_id})
|
|
13
|
-
return M.row_to_block_state(row) if row else None
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
def upsert_block_state(
|
|
17
|
-
db: Database, chain_id: int, block_number: int, block_hash: str
|
|
18
|
-
) -> None:
|
|
19
|
-
"""Update or insert block processing state."""
|
|
20
|
-
db.execute(Q.UPSERT_BLOCK_STATE, {
|
|
21
|
-
"chain_id": chain_id,
|
|
22
|
-
"block_number": block_number,
|
|
23
|
-
"block_hash": block_hash,
|
|
24
|
-
})
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
def get_block_hash_at_height(
|
|
28
|
-
db: Database, chain_id: int, block_number: int
|
|
29
|
-
) -> str | None:
|
|
30
|
-
"""Get stored block hash at a specific height."""
|
|
31
|
-
row = db.fetch_one(Q.GET_BLOCK_HASH_AT_HEIGHT, {
|
|
32
|
-
"chain_id": chain_id,
|
|
33
|
-
"block_number": block_number,
|
|
34
|
-
})
|
|
35
|
-
return row["block_hash"] if row else None
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
def insert_block_hash(
|
|
39
|
-
db: Database, chain_id: int, block_number: int, block_hash: str
|
|
40
|
-
) -> None:
|
|
41
|
-
"""Insert a block hash into history."""
|
|
42
|
-
db.execute(Q.INSERT_BLOCK_HASH, {
|
|
43
|
-
"chain_id": chain_id,
|
|
44
|
-
"block_number": block_number,
|
|
45
|
-
"block_hash": block_hash,
|
|
46
|
-
})
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
def delete_block_hashes_above(db: Database, chain_id: int, block_number: int) -> int:
|
|
50
|
-
"""Delete block hashes above a certain height (for reorg rewind)."""
|
|
51
|
-
return db.execute_rowcount(Q.DELETE_BLOCK_HASHES_ABOVE, {
|
|
52
|
-
"chain_id": chain_id,
|
|
53
|
-
"block_number": block_number,
|
|
54
|
-
})
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
def delete_block_hash_at_height(db: Database, chain_id: int, block_number: int) -> bool:
|
|
58
|
-
"""Delete a specific block hash (for stale hash cleanup)."""
|
|
59
|
-
count = db.execute_rowcount(Q.DELETE_BLOCK_HASH_AT_HEIGHT, {
|
|
60
|
-
"chain_id": chain_id,
|
|
61
|
-
"block_number": block_number,
|
|
62
|
-
})
|
|
63
|
-
return count > 0
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
def cleanup_old_block_hashes(db: Database, chain_id: int, keep_count: int) -> int:
|
|
67
|
-
"""Delete old block hashes beyond the history window."""
|
|
68
|
-
# Get max block number
|
|
69
|
-
row = db.fetch_one(Q.GET_MAX_BLOCK_IN_HISTORY, {"chain_id": chain_id})
|
|
70
|
-
if not row or row["max_block"] is None:
|
|
71
|
-
return 0
|
|
72
|
-
|
|
73
|
-
cutoff = row["max_block"] - keep_count
|
|
74
|
-
return db.execute_rowcount(Q.DELETE_BLOCK_HASHES_BELOW, {
|
|
75
|
-
"chain_id": chain_id,
|
|
76
|
-
"cutoff": cutoff,
|
|
77
|
-
})
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
def get_oldest_block_in_history(db: Database, chain_id: int) -> int | None:
|
|
81
|
-
"""Get the oldest block number in hash history."""
|
|
82
|
-
row = db.fetch_one(Q.GET_OLDEST_BLOCK_IN_HISTORY, {"chain_id": chain_id})
|
|
83
|
-
return row["min_block"] if row else None
|
brawny/db/ops/cache.py
DELETED
|
@@ -1,93 +0,0 @@
|
|
|
1
|
-
"""ABI and proxy cache operations."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
from brawny.db.base_new import Database, ABICacheEntry, ProxyCacheEntry
|
|
6
|
-
from brawny.db import queries as Q
|
|
7
|
-
from brawny.db import mappers as M
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
# =============================================================================
|
|
11
|
-
# ABI Cache
|
|
12
|
-
# =============================================================================
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
def get_cached_abi(db: Database, chain_id: int, address: str) -> ABICacheEntry | None:
|
|
16
|
-
"""Get cached ABI for a contract address."""
|
|
17
|
-
row = db.fetch_one(Q.GET_ABI_CACHE, {
|
|
18
|
-
"chain_id": chain_id,
|
|
19
|
-
"address": address.lower(),
|
|
20
|
-
})
|
|
21
|
-
return M.row_to_abi_cache(row) if row else None
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
def set_cached_abi(
|
|
25
|
-
db: Database,
|
|
26
|
-
chain_id: int,
|
|
27
|
-
address: str,
|
|
28
|
-
abi_json: str,
|
|
29
|
-
source: str,
|
|
30
|
-
) -> None:
|
|
31
|
-
"""Cache an ABI for a contract address.
|
|
32
|
-
|
|
33
|
-
Args:
|
|
34
|
-
db: Database instance
|
|
35
|
-
chain_id: Chain ID
|
|
36
|
-
address: Contract address
|
|
37
|
-
abi_json: JSON-encoded ABI
|
|
38
|
-
source: Source of ABI ('etherscan', 'sourcify', 'manual', 'proxy_implementation')
|
|
39
|
-
"""
|
|
40
|
-
db.execute(Q.UPSERT_ABI_CACHE, {
|
|
41
|
-
"chain_id": chain_id,
|
|
42
|
-
"address": address.lower(),
|
|
43
|
-
"abi_json": abi_json,
|
|
44
|
-
"source": source,
|
|
45
|
-
})
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
def clear_cached_abi(db: Database, chain_id: int, address: str) -> bool:
|
|
49
|
-
"""Clear cached ABI for a contract address."""
|
|
50
|
-
count = db.execute_rowcount(Q.DELETE_ABI_CACHE, {
|
|
51
|
-
"chain_id": chain_id,
|
|
52
|
-
"address": address.lower(),
|
|
53
|
-
})
|
|
54
|
-
return count > 0
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
# =============================================================================
|
|
58
|
-
# Proxy Cache
|
|
59
|
-
# =============================================================================
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
def get_cached_proxy(
|
|
63
|
-
db: Database, chain_id: int, proxy_address: str
|
|
64
|
-
) -> ProxyCacheEntry | None:
|
|
65
|
-
"""Get cached proxy implementation address."""
|
|
66
|
-
row = db.fetch_one(Q.GET_PROXY_CACHE, {
|
|
67
|
-
"chain_id": chain_id,
|
|
68
|
-
"proxy_address": proxy_address.lower(),
|
|
69
|
-
})
|
|
70
|
-
return M.row_to_proxy_cache(row) if row else None
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
def set_cached_proxy(
|
|
74
|
-
db: Database,
|
|
75
|
-
chain_id: int,
|
|
76
|
-
proxy_address: str,
|
|
77
|
-
implementation_address: str,
|
|
78
|
-
) -> None:
|
|
79
|
-
"""Cache a proxy-to-implementation mapping."""
|
|
80
|
-
db.execute(Q.UPSERT_PROXY_CACHE, {
|
|
81
|
-
"chain_id": chain_id,
|
|
82
|
-
"proxy_address": proxy_address.lower(),
|
|
83
|
-
"implementation_address": implementation_address.lower(),
|
|
84
|
-
})
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
def clear_cached_proxy(db: Database, chain_id: int, proxy_address: str) -> bool:
|
|
88
|
-
"""Clear cached proxy resolution."""
|
|
89
|
-
count = db.execute_rowcount(Q.DELETE_PROXY_CACHE, {
|
|
90
|
-
"chain_id": chain_id,
|
|
91
|
-
"proxy_address": proxy_address.lower(),
|
|
92
|
-
})
|
|
93
|
-
return count > 0
|
brawny/db/ops/intents.py
DELETED
|
@@ -1,296 +0,0 @@
|
|
|
1
|
-
"""Transaction intent operations."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
import json
|
|
6
|
-
from datetime import datetime
|
|
7
|
-
from typing import Any
|
|
8
|
-
from uuid import UUID
|
|
9
|
-
|
|
10
|
-
from brawny.db.base_new import Database
|
|
11
|
-
from brawny.db import queries as Q
|
|
12
|
-
from brawny.db import mappers as M
|
|
13
|
-
from brawny.model.types import TxIntent, JSONValue
|
|
14
|
-
from brawny.model.enums import IntentStatus
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
def create_intent(
|
|
18
|
-
db: Database,
|
|
19
|
-
intent_id: UUID,
|
|
20
|
-
job_id: str,
|
|
21
|
-
chain_id: int,
|
|
22
|
-
signer_address: str,
|
|
23
|
-
idempotency_key: str,
|
|
24
|
-
to_address: str,
|
|
25
|
-
data: str | None,
|
|
26
|
-
value_wei: str,
|
|
27
|
-
gas_limit: int | None,
|
|
28
|
-
max_fee_per_gas: str | None,
|
|
29
|
-
max_priority_fee_per_gas: str | None,
|
|
30
|
-
min_confirmations: int,
|
|
31
|
-
deadline_ts: datetime | None,
|
|
32
|
-
broadcast_group: str | None = None,
|
|
33
|
-
broadcast_endpoints: list[str] | None = None,
|
|
34
|
-
metadata: dict[str, JSONValue] | None = None,
|
|
35
|
-
) -> TxIntent | None:
|
|
36
|
-
"""Create a new transaction intent.
|
|
37
|
-
|
|
38
|
-
Returns None if idempotency_key already exists (ON CONFLICT DO NOTHING).
|
|
39
|
-
|
|
40
|
-
Args:
|
|
41
|
-
metadata: Per-intent context for alerts. Must be JSON-serializable.
|
|
42
|
-
"""
|
|
43
|
-
# Validate and serialize metadata
|
|
44
|
-
metadata_json: str | None = None
|
|
45
|
-
if metadata:
|
|
46
|
-
try:
|
|
47
|
-
metadata_json = json.dumps(metadata)
|
|
48
|
-
except TypeError as e:
|
|
49
|
-
raise ValueError(f"intent.metadata must be JSON-serializable: {e}")
|
|
50
|
-
|
|
51
|
-
row = db.fetch_one(Q.CREATE_INTENT, {
|
|
52
|
-
"intent_id": str(intent_id),
|
|
53
|
-
"job_id": job_id,
|
|
54
|
-
"chain_id": chain_id,
|
|
55
|
-
"signer_address": signer_address,
|
|
56
|
-
"idempotency_key": idempotency_key,
|
|
57
|
-
"to_address": to_address,
|
|
58
|
-
"data": data,
|
|
59
|
-
"value_wei": value_wei,
|
|
60
|
-
"gas_limit": gas_limit,
|
|
61
|
-
"max_fee_per_gas": max_fee_per_gas,
|
|
62
|
-
"max_priority_fee_per_gas": max_priority_fee_per_gas,
|
|
63
|
-
"min_confirmations": min_confirmations,
|
|
64
|
-
"deadline_ts": deadline_ts,
|
|
65
|
-
"broadcast_group": broadcast_group,
|
|
66
|
-
"broadcast_endpoints_json": json.dumps(broadcast_endpoints) if broadcast_endpoints else None,
|
|
67
|
-
"metadata_json": metadata_json,
|
|
68
|
-
})
|
|
69
|
-
return M.row_to_intent(row) if row else None
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
def get_intent(db: Database, intent_id: UUID) -> TxIntent | None:
|
|
73
|
-
"""Get intent by ID."""
|
|
74
|
-
row = db.fetch_one(Q.GET_INTENT, {"intent_id": str(intent_id)})
|
|
75
|
-
return M.row_to_intent(row) if row else None
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
def get_intent_by_idempotency_key(
|
|
79
|
-
db: Database,
|
|
80
|
-
chain_id: int,
|
|
81
|
-
signer_address: str,
|
|
82
|
-
idempotency_key: str,
|
|
83
|
-
) -> TxIntent | None:
|
|
84
|
-
"""Get intent by idempotency key (scoped to chain and signer)."""
|
|
85
|
-
row = db.fetch_one(Q.GET_INTENT_BY_IDEMPOTENCY_KEY, {
|
|
86
|
-
"chain_id": chain_id,
|
|
87
|
-
"signer_address": signer_address.lower(),
|
|
88
|
-
"idempotency_key": idempotency_key,
|
|
89
|
-
})
|
|
90
|
-
return M.row_to_intent(row) if row else None
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
def claim_next_intent(
|
|
94
|
-
db: Database,
|
|
95
|
-
claim_token: str,
|
|
96
|
-
claimed_by: str | None = None,
|
|
97
|
-
) -> TxIntent | None:
|
|
98
|
-
"""Claim the next available intent for processing.
|
|
99
|
-
|
|
100
|
-
Uses dialect-specific query (FOR UPDATE SKIP LOCKED on Postgres).
|
|
101
|
-
"""
|
|
102
|
-
query = Q.CLAIM_NEXT_INTENT[db.dialect]
|
|
103
|
-
row = db.fetch_one(query, {"claim_token": claim_token, "claimed_by": claimed_by})
|
|
104
|
-
return M.row_to_intent(row) if row else None
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
def update_intent_status(db: Database, intent_id: UUID, status: str) -> bool:
|
|
108
|
-
"""Update intent status."""
|
|
109
|
-
count = db.execute_rowcount(Q.UPDATE_INTENT_STATUS, {
|
|
110
|
-
"intent_id": str(intent_id),
|
|
111
|
-
"status": status,
|
|
112
|
-
})
|
|
113
|
-
return count > 0
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
def update_intent_to_sending(db: Database, intent_id: UUID, claim_token: str) -> bool:
|
|
117
|
-
"""Transition intent from claimed to sending (validates claim token)."""
|
|
118
|
-
count = db.execute_rowcount(Q.UPDATE_INTENT_TO_SENDING, {
|
|
119
|
-
"intent_id": str(intent_id),
|
|
120
|
-
"claim_token": claim_token,
|
|
121
|
-
})
|
|
122
|
-
return count > 0
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
def update_intent_to_pending(db: Database, intent_id: UUID) -> bool:
|
|
126
|
-
"""Transition intent to pending (broadcast successful)."""
|
|
127
|
-
count = db.execute_rowcount(Q.UPDATE_INTENT_TO_PENDING, {
|
|
128
|
-
"intent_id": str(intent_id),
|
|
129
|
-
})
|
|
130
|
-
return count > 0
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
def update_intent_to_confirmed(db: Database, intent_id: UUID) -> bool:
|
|
134
|
-
"""Transition intent to confirmed."""
|
|
135
|
-
count = db.execute_rowcount(Q.UPDATE_INTENT_TO_CONFIRMED, {
|
|
136
|
-
"intent_id": str(intent_id),
|
|
137
|
-
})
|
|
138
|
-
return count > 0
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
def update_intent_to_failed(db: Database, intent_id: UUID) -> bool:
|
|
142
|
-
"""Transition intent to failed."""
|
|
143
|
-
count = db.execute_rowcount(Q.UPDATE_INTENT_TO_FAILED, {
|
|
144
|
-
"intent_id": str(intent_id),
|
|
145
|
-
})
|
|
146
|
-
return count > 0
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
def update_intent_to_reverted(db: Database, intent_id: UUID) -> bool:
|
|
150
|
-
"""Transition intent to reverted."""
|
|
151
|
-
count = db.execute_rowcount(Q.UPDATE_INTENT_TO_REVERTED, {
|
|
152
|
-
"intent_id": str(intent_id),
|
|
153
|
-
})
|
|
154
|
-
return count > 0
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
def set_intent_retry_after(db: Database, intent_id: UUID, retry_after: datetime) -> bool:
|
|
158
|
-
"""Set retry_after and increment retry_count."""
|
|
159
|
-
count = db.execute_rowcount(Q.UPDATE_INTENT_RETRY_AFTER, {
|
|
160
|
-
"intent_id": str(intent_id),
|
|
161
|
-
"retry_after": retry_after,
|
|
162
|
-
})
|
|
163
|
-
return count > 0
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
def release_intent_claim(
|
|
167
|
-
db: Database, intent_id: UUID, claim_token: str, retry_after: datetime | None = None
|
|
168
|
-
) -> bool:
|
|
169
|
-
"""Release a claimed intent back to created state."""
|
|
170
|
-
count = db.execute_rowcount(Q.RELEASE_INTENT_CLAIM, {
|
|
171
|
-
"intent_id": str(intent_id),
|
|
172
|
-
"claim_token": claim_token,
|
|
173
|
-
"retry_after": retry_after,
|
|
174
|
-
})
|
|
175
|
-
return count > 0
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
def update_intent_broadcast_binding(
|
|
179
|
-
db: Database,
|
|
180
|
-
intent_id: UUID,
|
|
181
|
-
broadcast_group: str,
|
|
182
|
-
broadcast_endpoints: list[str],
|
|
183
|
-
) -> bool:
|
|
184
|
-
"""Set the broadcast binding for an intent (RPC endpoints to use)."""
|
|
185
|
-
count = db.execute_rowcount(Q.UPDATE_INTENT_BROADCAST_BINDING, {
|
|
186
|
-
"intent_id": str(intent_id),
|
|
187
|
-
"broadcast_group": broadcast_group,
|
|
188
|
-
"broadcast_endpoints_json": json.dumps(broadcast_endpoints),
|
|
189
|
-
})
|
|
190
|
-
return count > 0
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
def get_intents_by_status(
|
|
194
|
-
db: Database,
|
|
195
|
-
status: str | list[str],
|
|
196
|
-
chain_id: int | None = None,
|
|
197
|
-
job_id: str | None = None,
|
|
198
|
-
limit: int = 100,
|
|
199
|
-
) -> list[TxIntent]:
|
|
200
|
-
"""Get intents by status with optional filters."""
|
|
201
|
-
if isinstance(status, str):
|
|
202
|
-
status = [status]
|
|
203
|
-
|
|
204
|
-
# Build query dynamically based on filters
|
|
205
|
-
placeholders = ", ".join(f":status_{i}" for i in range(len(status)))
|
|
206
|
-
query = f"SELECT * FROM tx_intents WHERE status IN ({placeholders})"
|
|
207
|
-
params: dict[str, str | int] = {f"status_{i}": s for i, s in enumerate(status)}
|
|
208
|
-
|
|
209
|
-
if chain_id is not None:
|
|
210
|
-
query += " AND chain_id = :chain_id"
|
|
211
|
-
params["chain_id"] = chain_id
|
|
212
|
-
if job_id is not None:
|
|
213
|
-
query += " AND job_id = :job_id"
|
|
214
|
-
params["job_id"] = job_id
|
|
215
|
-
|
|
216
|
-
query += " ORDER BY created_at ASC LIMIT :limit"
|
|
217
|
-
params["limit"] = limit
|
|
218
|
-
|
|
219
|
-
rows = db.fetch_all(query, params)
|
|
220
|
-
return [M.row_to_intent(row) for row in rows]
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
def get_active_intent_count(db: Database, job_id: str, chain_id: int | None = None) -> int:
|
|
224
|
-
"""Get count of active (non-terminal) intents for a job."""
|
|
225
|
-
statuses = [
|
|
226
|
-
IntentStatus.CREATED.value,
|
|
227
|
-
IntentStatus.CLAIMED.value,
|
|
228
|
-
IntentStatus.SENDING.value,
|
|
229
|
-
IntentStatus.PENDING.value,
|
|
230
|
-
]
|
|
231
|
-
placeholders = ", ".join(f":status_{i}" for i in range(len(statuses)))
|
|
232
|
-
query = f"SELECT COUNT(*) AS count FROM tx_intents WHERE status IN ({placeholders}) AND job_id = :job_id"
|
|
233
|
-
params: dict[str, str | int] = {f"status_{i}": s for i, s in enumerate(statuses)}
|
|
234
|
-
params["job_id"] = job_id
|
|
235
|
-
|
|
236
|
-
if chain_id is not None:
|
|
237
|
-
query += " AND chain_id = :chain_id"
|
|
238
|
-
params["chain_id"] = chain_id
|
|
239
|
-
|
|
240
|
-
row = db.fetch_one(query, params)
|
|
241
|
-
return int(row["count"]) if row else 0
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
def get_pending_intent_count(db: Database, chain_id: int | None = None) -> int:
|
|
245
|
-
"""Get count of pending intents."""
|
|
246
|
-
statuses = [
|
|
247
|
-
IntentStatus.CREATED.value,
|
|
248
|
-
IntentStatus.CLAIMED.value,
|
|
249
|
-
IntentStatus.SENDING.value,
|
|
250
|
-
IntentStatus.PENDING.value,
|
|
251
|
-
]
|
|
252
|
-
placeholders = ", ".join(f":status_{i}" for i in range(len(statuses)))
|
|
253
|
-
query = f"SELECT COUNT(*) AS count FROM tx_intents WHERE status IN ({placeholders})"
|
|
254
|
-
params: dict[str, str | int] = {f"status_{i}": s for i, s in enumerate(statuses)}
|
|
255
|
-
|
|
256
|
-
if chain_id is not None:
|
|
257
|
-
query += " AND chain_id = :chain_id"
|
|
258
|
-
params["chain_id"] = chain_id
|
|
259
|
-
|
|
260
|
-
row = db.fetch_one(query, params)
|
|
261
|
-
return int(row["count"]) if row else 0
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
def get_backing_off_intent_count(db: Database, chain_id: int | None = None) -> int:
|
|
265
|
-
"""Get count of intents in backoff."""
|
|
266
|
-
query = "SELECT COUNT(*) AS count FROM tx_intents WHERE retry_after > CURRENT_TIMESTAMP"
|
|
267
|
-
params: dict[str, int] = {}
|
|
268
|
-
|
|
269
|
-
if chain_id is not None:
|
|
270
|
-
query += " AND chain_id = :chain_id"
|
|
271
|
-
params["chain_id"] = chain_id
|
|
272
|
-
|
|
273
|
-
row = db.fetch_one(query, params)
|
|
274
|
-
return int(row["count"]) if row else 0
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
def get_stuck_sending_intents(db: Database, cutoff_time: datetime) -> list[TxIntent]:
|
|
278
|
-
"""Get intents stuck in SENDING state."""
|
|
279
|
-
rows = db.fetch_all(Q.GET_STUCK_SENDING_INTENTS, {"cutoff_time": cutoff_time})
|
|
280
|
-
return [M.row_to_intent(row) for row in rows]
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
def get_stuck_pending_intents(db: Database, cutoff_time: datetime) -> list[TxIntent]:
|
|
284
|
-
"""Get intents stuck in PENDING state."""
|
|
285
|
-
rows = db.fetch_all(Q.GET_STUCK_PENDING_INTENTS, {"cutoff_time": cutoff_time})
|
|
286
|
-
return [M.row_to_intent(row) for row in rows]
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
def delete_old_confirmed_intents(db: Database, cutoff_time: datetime) -> int:
|
|
290
|
-
"""Delete old terminal intents."""
|
|
291
|
-
return db.execute_rowcount(Q.DELETE_OLD_CONFIRMED_INTENTS, {"cutoff_time": cutoff_time})
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
def delete_abandoned_intents(db: Database, cutoff_time: datetime) -> int:
|
|
295
|
-
"""Delete old abandoned intents."""
|
|
296
|
-
return db.execute_rowcount(Q.DELETE_ABANDONED_INTENTS, {"cutoff_time": cutoff_time})
|
brawny/db/ops/jobs.py
DELETED
|
@@ -1,110 +0,0 @@
|
|
|
1
|
-
"""Job configuration and KV store operations."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
import json
|
|
6
|
-
from typing import Any
|
|
7
|
-
|
|
8
|
-
from brawny.db.base_new import Database
|
|
9
|
-
from brawny.db import queries as Q
|
|
10
|
-
from brawny.db import mappers as M
|
|
11
|
-
from brawny.model.types import JobConfig
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
def get_job(db: Database, job_id: str) -> JobConfig | None:
|
|
15
|
-
"""Get job configuration by ID."""
|
|
16
|
-
row = db.fetch_one(Q.GET_JOB, {"job_id": job_id})
|
|
17
|
-
return M.row_to_job_config(row) if row else None
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
def get_enabled_jobs(db: Database) -> list[JobConfig]:
|
|
21
|
-
"""Get all enabled jobs ordered by job_id."""
|
|
22
|
-
rows = db.fetch_all(Q.GET_ENABLED_JOBS)
|
|
23
|
-
return [M.row_to_job_config(row) for row in rows]
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
def list_all_jobs(db: Database) -> list[JobConfig]:
|
|
27
|
-
"""List all jobs ordered by job_id."""
|
|
28
|
-
rows = db.fetch_all(Q.LIST_ALL_JOBS)
|
|
29
|
-
return [M.row_to_job_config(row) for row in rows]
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
def upsert_job(
|
|
33
|
-
db: Database,
|
|
34
|
-
job_id: str,
|
|
35
|
-
job_name: str,
|
|
36
|
-
check_interval_blocks: int,
|
|
37
|
-
enabled: bool = True,
|
|
38
|
-
) -> None:
|
|
39
|
-
"""Upsert job configuration."""
|
|
40
|
-
db.execute(Q.UPSERT_JOB, {
|
|
41
|
-
"job_id": job_id,
|
|
42
|
-
"job_name": job_name,
|
|
43
|
-
"check_interval_blocks": check_interval_blocks,
|
|
44
|
-
"enabled": 1 if enabled else 0,
|
|
45
|
-
})
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
def set_job_enabled(db: Database, job_id: str, enabled: bool) -> bool:
|
|
49
|
-
"""Enable or disable a job."""
|
|
50
|
-
count = db.execute_rowcount(Q.UPDATE_JOB_ENABLED, {
|
|
51
|
-
"job_id": job_id,
|
|
52
|
-
"enabled": 1 if enabled else 0,
|
|
53
|
-
})
|
|
54
|
-
return count > 0
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
def update_job_checked(
|
|
58
|
-
db: Database, job_id: str, block_number: int, triggered: bool = False
|
|
59
|
-
) -> bool:
|
|
60
|
-
"""Update job checked state and optionally triggered state."""
|
|
61
|
-
if triggered:
|
|
62
|
-
count = db.execute_rowcount(Q.UPDATE_JOB_TRIGGERED, {
|
|
63
|
-
"job_id": job_id,
|
|
64
|
-
"block_number": block_number,
|
|
65
|
-
})
|
|
66
|
-
else:
|
|
67
|
-
count = db.execute_rowcount(Q.UPDATE_JOB_CHECKED, {
|
|
68
|
-
"job_id": job_id,
|
|
69
|
-
"block_number": block_number,
|
|
70
|
-
})
|
|
71
|
-
return count > 0
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
def delete_job(db: Database, job_id: str) -> bool:
|
|
75
|
-
"""Delete a job configuration."""
|
|
76
|
-
count = db.execute_rowcount(Q.DELETE_JOB, {"job_id": job_id})
|
|
77
|
-
return count > 0
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
# =============================================================================
|
|
81
|
-
# Job KV Store
|
|
82
|
-
# =============================================================================
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
def get_job_kv(db: Database, job_id: str, key: str, default: Any = None) -> Any:
|
|
86
|
-
"""Get a value from job KV store."""
|
|
87
|
-
row = db.fetch_one(Q.GET_JOB_KV, {"job_id": job_id, "key": key})
|
|
88
|
-
if row is None:
|
|
89
|
-
return default
|
|
90
|
-
return json.loads(row["value_json"])
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
def set_job_kv(db: Database, job_id: str, key: str, value: Any) -> None:
|
|
94
|
-
"""Set a value in job KV store."""
|
|
95
|
-
db.execute(Q.UPSERT_JOB_KV, {
|
|
96
|
-
"job_id": job_id,
|
|
97
|
-
"key": key,
|
|
98
|
-
"value_json": json.dumps(value),
|
|
99
|
-
})
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
def delete_job_kv(db: Database, job_id: str, key: str) -> bool:
|
|
103
|
-
"""Delete a key from job KV store."""
|
|
104
|
-
count = db.execute_rowcount(Q.DELETE_JOB_KV, {"job_id": job_id, "key": key})
|
|
105
|
-
return count > 0
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
def delete_all_job_kv(db: Database, job_id: str) -> int:
|
|
109
|
-
"""Delete all KV entries for a job."""
|
|
110
|
-
return db.execute_rowcount(Q.DELETE_ALL_JOB_KV, {"job_id": job_id})
|