brawny 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- brawny/__init__.py +106 -0
- brawny/_context.py +232 -0
- brawny/_rpc/__init__.py +38 -0
- brawny/_rpc/broadcast.py +172 -0
- brawny/_rpc/clients.py +98 -0
- brawny/_rpc/context.py +49 -0
- brawny/_rpc/errors.py +252 -0
- brawny/_rpc/gas.py +158 -0
- brawny/_rpc/manager.py +982 -0
- brawny/_rpc/selector.py +156 -0
- brawny/accounts.py +534 -0
- brawny/alerts/__init__.py +132 -0
- brawny/alerts/abi_resolver.py +530 -0
- brawny/alerts/base.py +152 -0
- brawny/alerts/context.py +271 -0
- brawny/alerts/contracts.py +635 -0
- brawny/alerts/encoded_call.py +201 -0
- brawny/alerts/errors.py +267 -0
- brawny/alerts/events.py +680 -0
- brawny/alerts/function_caller.py +364 -0
- brawny/alerts/health.py +185 -0
- brawny/alerts/routing.py +118 -0
- brawny/alerts/send.py +364 -0
- brawny/api.py +660 -0
- brawny/chain.py +93 -0
- brawny/cli/__init__.py +16 -0
- brawny/cli/app.py +17 -0
- brawny/cli/bootstrap.py +37 -0
- brawny/cli/commands/__init__.py +41 -0
- brawny/cli/commands/abi.py +93 -0
- brawny/cli/commands/accounts.py +632 -0
- brawny/cli/commands/console.py +495 -0
- brawny/cli/commands/contract.py +139 -0
- brawny/cli/commands/health.py +112 -0
- brawny/cli/commands/init_project.py +86 -0
- brawny/cli/commands/intents.py +130 -0
- brawny/cli/commands/job_dev.py +254 -0
- brawny/cli/commands/jobs.py +308 -0
- brawny/cli/commands/logs.py +87 -0
- brawny/cli/commands/maintenance.py +182 -0
- brawny/cli/commands/migrate.py +51 -0
- brawny/cli/commands/networks.py +253 -0
- brawny/cli/commands/run.py +249 -0
- brawny/cli/commands/script.py +209 -0
- brawny/cli/commands/signer.py +248 -0
- brawny/cli/helpers.py +265 -0
- brawny/cli_templates.py +1445 -0
- brawny/config/__init__.py +74 -0
- brawny/config/models.py +404 -0
- brawny/config/parser.py +633 -0
- brawny/config/routing.py +55 -0
- brawny/config/validation.py +246 -0
- brawny/daemon/__init__.py +14 -0
- brawny/daemon/context.py +69 -0
- brawny/daemon/core.py +702 -0
- brawny/daemon/loops.py +327 -0
- brawny/db/__init__.py +78 -0
- brawny/db/base.py +986 -0
- brawny/db/base_new.py +165 -0
- brawny/db/circuit_breaker.py +97 -0
- brawny/db/global_cache.py +298 -0
- brawny/db/mappers.py +182 -0
- brawny/db/migrate.py +349 -0
- brawny/db/migrations/001_init.sql +186 -0
- brawny/db/migrations/002_add_included_block.sql +7 -0
- brawny/db/migrations/003_add_broadcast_at.sql +10 -0
- brawny/db/migrations/004_broadcast_binding.sql +20 -0
- brawny/db/migrations/005_add_retry_after.sql +9 -0
- brawny/db/migrations/006_add_retry_count_column.sql +11 -0
- brawny/db/migrations/007_add_gap_tracking.sql +18 -0
- brawny/db/migrations/008_add_transactions.sql +72 -0
- brawny/db/migrations/009_add_intent_metadata.sql +5 -0
- brawny/db/migrations/010_add_nonce_gap_index.sql +9 -0
- brawny/db/migrations/011_add_job_logs.sql +24 -0
- brawny/db/migrations/012_add_claimed_by.sql +5 -0
- brawny/db/ops/__init__.py +29 -0
- brawny/db/ops/attempts.py +108 -0
- brawny/db/ops/blocks.py +83 -0
- brawny/db/ops/cache.py +93 -0
- brawny/db/ops/intents.py +296 -0
- brawny/db/ops/jobs.py +110 -0
- brawny/db/ops/logs.py +97 -0
- brawny/db/ops/nonces.py +322 -0
- brawny/db/postgres.py +2535 -0
- brawny/db/postgres_new.py +196 -0
- brawny/db/queries.py +584 -0
- brawny/db/sqlite.py +2733 -0
- brawny/db/sqlite_new.py +191 -0
- brawny/history.py +126 -0
- brawny/interfaces.py +136 -0
- brawny/invariants.py +155 -0
- brawny/jobs/__init__.py +26 -0
- brawny/jobs/base.py +287 -0
- brawny/jobs/discovery.py +233 -0
- brawny/jobs/job_validation.py +111 -0
- brawny/jobs/kv.py +125 -0
- brawny/jobs/registry.py +283 -0
- brawny/keystore.py +484 -0
- brawny/lifecycle.py +551 -0
- brawny/logging.py +290 -0
- brawny/metrics.py +594 -0
- brawny/model/__init__.py +53 -0
- brawny/model/contexts.py +319 -0
- brawny/model/enums.py +70 -0
- brawny/model/errors.py +194 -0
- brawny/model/events.py +93 -0
- brawny/model/startup.py +20 -0
- brawny/model/types.py +483 -0
- brawny/networks/__init__.py +96 -0
- brawny/networks/config.py +269 -0
- brawny/networks/manager.py +423 -0
- brawny/obs/__init__.py +67 -0
- brawny/obs/emit.py +158 -0
- brawny/obs/health.py +175 -0
- brawny/obs/heartbeat.py +133 -0
- brawny/reconciliation.py +108 -0
- brawny/scheduler/__init__.py +19 -0
- brawny/scheduler/poller.py +472 -0
- brawny/scheduler/reorg.py +632 -0
- brawny/scheduler/runner.py +708 -0
- brawny/scheduler/shutdown.py +371 -0
- brawny/script_tx.py +297 -0
- brawny/scripting.py +251 -0
- brawny/startup.py +76 -0
- brawny/telegram.py +393 -0
- brawny/testing.py +108 -0
- brawny/tx/__init__.py +41 -0
- brawny/tx/executor.py +1071 -0
- brawny/tx/fees.py +50 -0
- brawny/tx/intent.py +423 -0
- brawny/tx/monitor.py +628 -0
- brawny/tx/nonce.py +498 -0
- brawny/tx/replacement.py +456 -0
- brawny/tx/utils.py +26 -0
- brawny/utils.py +205 -0
- brawny/validation.py +69 -0
- brawny-0.1.13.dist-info/METADATA +156 -0
- brawny-0.1.13.dist-info/RECORD +141 -0
- brawny-0.1.13.dist-info/WHEEL +5 -0
- brawny-0.1.13.dist-info/entry_points.txt +2 -0
- brawny-0.1.13.dist-info/top_level.txt +1 -0
brawny/db/mappers.py
ADDED
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
"""Row to model mappers for database results.
|
|
2
|
+
|
|
3
|
+
Centralized conversion from database rows (dicts) to domain models.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
from typing import Any
|
|
10
|
+
from uuid import UUID
|
|
11
|
+
|
|
12
|
+
from brawny.db.base_new import BlockState, BlockHashEntry, ABICacheEntry, ProxyCacheEntry
|
|
13
|
+
from brawny.model.types import (
|
|
14
|
+
JobConfig,
|
|
15
|
+
TxIntent,
|
|
16
|
+
TxAttempt,
|
|
17
|
+
SignerState,
|
|
18
|
+
NonceReservation,
|
|
19
|
+
GasParams,
|
|
20
|
+
)
|
|
21
|
+
from brawny.model.enums import IntentStatus, AttemptStatus, NonceStatus
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def row_to_block_state(row: dict[str, Any]) -> BlockState:
|
|
25
|
+
"""Convert database row to BlockState."""
|
|
26
|
+
return BlockState(
|
|
27
|
+
chain_id=row["chain_id"],
|
|
28
|
+
last_processed_block_number=row["last_processed_block_number"],
|
|
29
|
+
last_processed_block_hash=row["last_processed_block_hash"],
|
|
30
|
+
created_at=row["created_at"],
|
|
31
|
+
updated_at=row["updated_at"],
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def row_to_block_hash_entry(row: dict[str, Any]) -> BlockHashEntry:
|
|
36
|
+
"""Convert database row to BlockHashEntry."""
|
|
37
|
+
return BlockHashEntry(
|
|
38
|
+
id=row["id"],
|
|
39
|
+
chain_id=row["chain_id"],
|
|
40
|
+
block_number=row["block_number"],
|
|
41
|
+
block_hash=row["block_hash"],
|
|
42
|
+
inserted_at=row["inserted_at"],
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def row_to_job_config(row: dict[str, Any]) -> JobConfig:
|
|
47
|
+
"""Convert database row to JobConfig."""
|
|
48
|
+
return JobConfig(
|
|
49
|
+
job_id=row["job_id"],
|
|
50
|
+
job_name=row["job_name"],
|
|
51
|
+
enabled=bool(row["enabled"]),
|
|
52
|
+
check_interval_blocks=row["check_interval_blocks"],
|
|
53
|
+
last_checked_block_number=row["last_checked_block_number"],
|
|
54
|
+
last_triggered_block_number=row["last_triggered_block_number"],
|
|
55
|
+
created_at=row["created_at"],
|
|
56
|
+
updated_at=row["updated_at"],
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def row_to_signer_state(row: dict[str, Any]) -> SignerState:
|
|
61
|
+
"""Convert database row to SignerState."""
|
|
62
|
+
return SignerState(
|
|
63
|
+
chain_id=row["chain_id"],
|
|
64
|
+
signer_address=row["signer_address"],
|
|
65
|
+
next_nonce=row["next_nonce"],
|
|
66
|
+
last_synced_chain_nonce=row["last_synced_chain_nonce"],
|
|
67
|
+
created_at=row["created_at"],
|
|
68
|
+
updated_at=row["updated_at"],
|
|
69
|
+
gap_started_at=row.get("gap_started_at"),
|
|
70
|
+
alias=row.get("alias"),
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def row_to_nonce_reservation(row: dict[str, Any]) -> NonceReservation:
|
|
75
|
+
"""Convert database row to NonceReservation."""
|
|
76
|
+
intent_id = row["intent_id"]
|
|
77
|
+
# Handle string UUIDs (SQLite stores as string)
|
|
78
|
+
if intent_id and isinstance(intent_id, str):
|
|
79
|
+
intent_id = UUID(intent_id)
|
|
80
|
+
return NonceReservation(
|
|
81
|
+
id=row["id"],
|
|
82
|
+
chain_id=row["chain_id"],
|
|
83
|
+
signer_address=row["signer_address"],
|
|
84
|
+
nonce=row["nonce"],
|
|
85
|
+
status=NonceStatus(row["status"]),
|
|
86
|
+
intent_id=intent_id,
|
|
87
|
+
created_at=row["created_at"],
|
|
88
|
+
updated_at=row["updated_at"],
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def row_to_intent(row: dict[str, Any]) -> TxIntent:
|
|
93
|
+
"""Convert database row to TxIntent."""
|
|
94
|
+
intent_id = row["intent_id"]
|
|
95
|
+
# Handle string UUIDs (SQLite stores as string)
|
|
96
|
+
if isinstance(intent_id, str):
|
|
97
|
+
intent_id = UUID(intent_id)
|
|
98
|
+
|
|
99
|
+
# Parse metadata_json once at DB boundary
|
|
100
|
+
metadata_json = row.get("metadata_json")
|
|
101
|
+
metadata = json.loads(metadata_json) if metadata_json else {}
|
|
102
|
+
|
|
103
|
+
return TxIntent(
|
|
104
|
+
intent_id=intent_id,
|
|
105
|
+
job_id=row["job_id"],
|
|
106
|
+
chain_id=row["chain_id"],
|
|
107
|
+
signer_address=row["signer_address"],
|
|
108
|
+
idempotency_key=row["idempotency_key"],
|
|
109
|
+
to_address=row["to_address"],
|
|
110
|
+
data=row["data"],
|
|
111
|
+
value_wei=row["value_wei"],
|
|
112
|
+
gas_limit=row["gas_limit"],
|
|
113
|
+
max_fee_per_gas=row["max_fee_per_gas"],
|
|
114
|
+
max_priority_fee_per_gas=row["max_priority_fee_per_gas"],
|
|
115
|
+
min_confirmations=row["min_confirmations"],
|
|
116
|
+
deadline_ts=row["deadline_ts"],
|
|
117
|
+
retry_after=row.get("retry_after"),
|
|
118
|
+
retry_count=row.get("retry_count", 0),
|
|
119
|
+
status=IntentStatus(row["status"]),
|
|
120
|
+
claim_token=row["claim_token"],
|
|
121
|
+
claimed_at=row["claimed_at"],
|
|
122
|
+
created_at=row["created_at"],
|
|
123
|
+
updated_at=row["updated_at"],
|
|
124
|
+
broadcast_group=row.get("broadcast_group"),
|
|
125
|
+
broadcast_endpoints_json=row.get("broadcast_endpoints_json"),
|
|
126
|
+
metadata=metadata,
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def row_to_attempt(row: dict[str, Any]) -> TxAttempt:
|
|
131
|
+
"""Convert database row to TxAttempt."""
|
|
132
|
+
attempt_id = row["attempt_id"]
|
|
133
|
+
intent_id = row["intent_id"]
|
|
134
|
+
replaces_attempt_id = row.get("replaces_attempt_id")
|
|
135
|
+
|
|
136
|
+
# Handle string UUIDs (SQLite stores as string)
|
|
137
|
+
if isinstance(attempt_id, str):
|
|
138
|
+
attempt_id = UUID(attempt_id)
|
|
139
|
+
if isinstance(intent_id, str):
|
|
140
|
+
intent_id = UUID(intent_id)
|
|
141
|
+
if replaces_attempt_id and isinstance(replaces_attempt_id, str):
|
|
142
|
+
replaces_attempt_id = UUID(replaces_attempt_id)
|
|
143
|
+
|
|
144
|
+
return TxAttempt(
|
|
145
|
+
attempt_id=attempt_id,
|
|
146
|
+
intent_id=intent_id,
|
|
147
|
+
nonce=row["nonce"],
|
|
148
|
+
tx_hash=row["tx_hash"],
|
|
149
|
+
gas_params=GasParams.from_json(row["gas_params_json"]),
|
|
150
|
+
status=AttemptStatus(row["status"]),
|
|
151
|
+
error_code=row.get("error_code"),
|
|
152
|
+
error_detail=row.get("error_detail"),
|
|
153
|
+
replaces_attempt_id=replaces_attempt_id,
|
|
154
|
+
broadcast_block=row.get("broadcast_block"),
|
|
155
|
+
broadcast_at=row.get("broadcast_at"),
|
|
156
|
+
included_block=row.get("included_block"),
|
|
157
|
+
created_at=row["created_at"],
|
|
158
|
+
updated_at=row["updated_at"],
|
|
159
|
+
broadcast_group=row.get("broadcast_group"),
|
|
160
|
+
endpoint_url=row.get("endpoint_url"),
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def row_to_abi_cache(row: dict[str, Any]) -> ABICacheEntry:
|
|
165
|
+
"""Convert database row to ABICacheEntry."""
|
|
166
|
+
return ABICacheEntry(
|
|
167
|
+
chain_id=row["chain_id"],
|
|
168
|
+
address=row["address"],
|
|
169
|
+
abi_json=row["abi_json"],
|
|
170
|
+
source=row["source"],
|
|
171
|
+
resolved_at=row["resolved_at"],
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def row_to_proxy_cache(row: dict[str, Any]) -> ProxyCacheEntry:
|
|
176
|
+
"""Convert database row to ProxyCacheEntry."""
|
|
177
|
+
return ProxyCacheEntry(
|
|
178
|
+
chain_id=row["chain_id"],
|
|
179
|
+
proxy_address=row["proxy_address"],
|
|
180
|
+
implementation_address=row["implementation_address"],
|
|
181
|
+
resolved_at=row["resolved_at"],
|
|
182
|
+
)
|
brawny/db/migrate.py
ADDED
|
@@ -0,0 +1,349 @@
|
|
|
1
|
+
"""Database migration management for brawny.
|
|
2
|
+
|
|
3
|
+
Handles schema migrations for both PostgreSQL and SQLite.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import re
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from datetime import datetime
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import TYPE_CHECKING
|
|
13
|
+
|
|
14
|
+
from brawny.model.errors import DatabaseError
|
|
15
|
+
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from brawny.db.base import Database
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
MIGRATIONS_DIR = Path(__file__).parent / "migrations"
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass
|
|
24
|
+
class Migration:
|
|
25
|
+
"""Represents a database migration."""
|
|
26
|
+
|
|
27
|
+
version: str
|
|
28
|
+
filename: str
|
|
29
|
+
sql: str
|
|
30
|
+
applied_at: datetime | None = None
|
|
31
|
+
|
|
32
|
+
@property
|
|
33
|
+
def is_applied(self) -> bool:
|
|
34
|
+
return self.applied_at is not None
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def discover_migrations() -> list[Migration]:
|
|
38
|
+
"""Discover all migration files in the migrations directory.
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
List of Migration objects sorted by version
|
|
42
|
+
|
|
43
|
+
Raises:
|
|
44
|
+
DatabaseError: If duplicate migration versions are found
|
|
45
|
+
"""
|
|
46
|
+
migrations: list[Migration] = []
|
|
47
|
+
seen_versions: dict[str, str] = {}
|
|
48
|
+
|
|
49
|
+
if not MIGRATIONS_DIR.exists():
|
|
50
|
+
return migrations
|
|
51
|
+
|
|
52
|
+
for file_path in sorted(MIGRATIONS_DIR.glob("*.sql")):
|
|
53
|
+
# Extract version from filename (e.g., "001_init.sql" -> "001")
|
|
54
|
+
match = re.match(r"^(\d+)_.*\.sql$", file_path.name)
|
|
55
|
+
if match:
|
|
56
|
+
version = match.group(1)
|
|
57
|
+
|
|
58
|
+
# Check for duplicate version numbers
|
|
59
|
+
if version in seen_versions:
|
|
60
|
+
raise DatabaseError(
|
|
61
|
+
f"Duplicate migration version {version}: "
|
|
62
|
+
f"{seen_versions[version]} and {file_path.name}"
|
|
63
|
+
)
|
|
64
|
+
seen_versions[version] = file_path.name
|
|
65
|
+
|
|
66
|
+
sql = file_path.read_text()
|
|
67
|
+
migrations.append(
|
|
68
|
+
Migration(
|
|
69
|
+
version=version,
|
|
70
|
+
filename=file_path.name,
|
|
71
|
+
sql=sql,
|
|
72
|
+
)
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
return sorted(migrations, key=lambda m: m.version)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def get_applied_migrations(db: Database) -> set[str]:
|
|
79
|
+
"""Get set of applied migration versions.
|
|
80
|
+
|
|
81
|
+
Args:
|
|
82
|
+
db: Database connection
|
|
83
|
+
|
|
84
|
+
Returns:
|
|
85
|
+
Set of version strings that have been applied
|
|
86
|
+
"""
|
|
87
|
+
try:
|
|
88
|
+
# Check if migrations table exists
|
|
89
|
+
result = db.execute_returning(
|
|
90
|
+
"""
|
|
91
|
+
SELECT version, applied_at
|
|
92
|
+
FROM schema_migrations
|
|
93
|
+
ORDER BY version
|
|
94
|
+
"""
|
|
95
|
+
)
|
|
96
|
+
return {row["version"] for row in result}
|
|
97
|
+
except Exception:
|
|
98
|
+
# Table doesn't exist yet
|
|
99
|
+
return set()
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def get_pending_migrations(db: Database) -> list[Migration]:
|
|
103
|
+
"""Get list of migrations that haven't been applied.
|
|
104
|
+
|
|
105
|
+
Args:
|
|
106
|
+
db: Database connection
|
|
107
|
+
|
|
108
|
+
Returns:
|
|
109
|
+
List of pending Migration objects
|
|
110
|
+
"""
|
|
111
|
+
applied = get_applied_migrations(db)
|
|
112
|
+
all_migrations = discover_migrations()
|
|
113
|
+
return [m for m in all_migrations if m.version not in applied]
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def get_migration_status(db: Database) -> list[Migration]:
|
|
117
|
+
"""Get status of all migrations.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
db: Database connection
|
|
121
|
+
|
|
122
|
+
Returns:
|
|
123
|
+
List of all migrations with applied_at set if applied
|
|
124
|
+
"""
|
|
125
|
+
applied: dict[str, datetime] = {}
|
|
126
|
+
|
|
127
|
+
try:
|
|
128
|
+
result = db.execute_returning(
|
|
129
|
+
"""
|
|
130
|
+
SELECT version, applied_at
|
|
131
|
+
FROM schema_migrations
|
|
132
|
+
ORDER BY version
|
|
133
|
+
"""
|
|
134
|
+
)
|
|
135
|
+
for row in result:
|
|
136
|
+
applied[row["version"]] = row["applied_at"]
|
|
137
|
+
except Exception:
|
|
138
|
+
pass
|
|
139
|
+
|
|
140
|
+
all_migrations = discover_migrations()
|
|
141
|
+
for migration in all_migrations:
|
|
142
|
+
if migration.version in applied:
|
|
143
|
+
migration.applied_at = applied[migration.version]
|
|
144
|
+
|
|
145
|
+
return all_migrations
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def run_migration(db: Database, migration: Migration) -> None:
|
|
149
|
+
"""Run a single migration.
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
db: Database connection
|
|
153
|
+
migration: Migration to run
|
|
154
|
+
|
|
155
|
+
Raises:
|
|
156
|
+
DatabaseError: If migration fails
|
|
157
|
+
"""
|
|
158
|
+
try:
|
|
159
|
+
if db.dialect == "sqlite" and migration.version == "012":
|
|
160
|
+
with db.transaction():
|
|
161
|
+
existing = {
|
|
162
|
+
r["name"].lower()
|
|
163
|
+
for r in db.execute_returning("PRAGMA table_info(tx_intents)")
|
|
164
|
+
}
|
|
165
|
+
if "claimed_by" in existing:
|
|
166
|
+
db.execute(
|
|
167
|
+
"INSERT OR IGNORE INTO schema_migrations (version) VALUES (?)",
|
|
168
|
+
(migration.version,),
|
|
169
|
+
)
|
|
170
|
+
return
|
|
171
|
+
try:
|
|
172
|
+
db.execute(
|
|
173
|
+
"ALTER TABLE tx_intents ADD COLUMN claimed_by VARCHAR(200)"
|
|
174
|
+
)
|
|
175
|
+
except Exception as exc:
|
|
176
|
+
if "duplicate column name" not in str(exc).lower():
|
|
177
|
+
raise
|
|
178
|
+
db.execute(
|
|
179
|
+
"INSERT OR IGNORE INTO schema_migrations (version) VALUES (?)",
|
|
180
|
+
(migration.version,),
|
|
181
|
+
)
|
|
182
|
+
return
|
|
183
|
+
|
|
184
|
+
with db.transaction():
|
|
185
|
+
# Split SQL into individual statements for SQLite compatibility
|
|
186
|
+
# SQLite can only execute one statement at a time
|
|
187
|
+
statements = _split_sql_statements(migration.sql)
|
|
188
|
+
for stmt in statements:
|
|
189
|
+
stmt = stmt.strip()
|
|
190
|
+
if not stmt:
|
|
191
|
+
continue
|
|
192
|
+
try:
|
|
193
|
+
db.execute(stmt)
|
|
194
|
+
except DatabaseError as e:
|
|
195
|
+
if _is_duplicate_column_error(e, stmt):
|
|
196
|
+
# Idempotent safety for already-applied schema changes.
|
|
197
|
+
continue
|
|
198
|
+
raise
|
|
199
|
+
except Exception as e:
|
|
200
|
+
raise DatabaseError(
|
|
201
|
+
f"Migration {migration.version} ({migration.filename}) failed: {e}"
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def _is_duplicate_column_error(error: Exception, stmt: str) -> bool:
|
|
206
|
+
"""Return True if the error indicates an already-existing column."""
|
|
207
|
+
message = str(error).lower()
|
|
208
|
+
if "duplicate column name" in message or "already exists" in message:
|
|
209
|
+
if "add column" in stmt.lower():
|
|
210
|
+
return True
|
|
211
|
+
return False
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def _split_sql_statements(sql: str) -> list[str]:
|
|
215
|
+
"""Split SQL into individual statements.
|
|
216
|
+
|
|
217
|
+
Handles semicolons, comments, and multi-line statements.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
sql: SQL text with multiple statements
|
|
221
|
+
|
|
222
|
+
Returns:
|
|
223
|
+
List of individual statements
|
|
224
|
+
"""
|
|
225
|
+
# Remove SQL comments
|
|
226
|
+
lines = []
|
|
227
|
+
for line in sql.split('\n'):
|
|
228
|
+
# Remove -- comments
|
|
229
|
+
if '--' in line:
|
|
230
|
+
line = line[:line.index('--')]
|
|
231
|
+
lines.append(line)
|
|
232
|
+
sql = '\n'.join(lines)
|
|
233
|
+
|
|
234
|
+
# Split on semicolons, but be careful about strings
|
|
235
|
+
# This is a simple split - for production, use a proper SQL parser
|
|
236
|
+
statements = []
|
|
237
|
+
current = []
|
|
238
|
+
in_string = False
|
|
239
|
+
string_char = None
|
|
240
|
+
|
|
241
|
+
for char in sql:
|
|
242
|
+
if char in ('"', "'") and not in_string:
|
|
243
|
+
in_string = True
|
|
244
|
+
string_char = char
|
|
245
|
+
current.append(char)
|
|
246
|
+
elif char == string_char and in_string:
|
|
247
|
+
in_string = False
|
|
248
|
+
string_char = None
|
|
249
|
+
current.append(char)
|
|
250
|
+
elif char == ';' and not in_string:
|
|
251
|
+
stmt = ''.join(current).strip()
|
|
252
|
+
if stmt:
|
|
253
|
+
statements.append(stmt)
|
|
254
|
+
current = []
|
|
255
|
+
else:
|
|
256
|
+
current.append(char)
|
|
257
|
+
|
|
258
|
+
# Don't forget the last statement if no trailing semicolon
|
|
259
|
+
stmt = ''.join(current).strip()
|
|
260
|
+
if stmt:
|
|
261
|
+
statements.append(stmt)
|
|
262
|
+
|
|
263
|
+
return statements
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def run_pending_migrations(db: Database) -> list[Migration]:
|
|
267
|
+
"""Run all pending migrations.
|
|
268
|
+
|
|
269
|
+
Args:
|
|
270
|
+
db: Database connection
|
|
271
|
+
|
|
272
|
+
Returns:
|
|
273
|
+
List of migrations that were applied
|
|
274
|
+
|
|
275
|
+
Raises:
|
|
276
|
+
DatabaseError: If any migration fails
|
|
277
|
+
"""
|
|
278
|
+
pending = get_pending_migrations(db)
|
|
279
|
+
|
|
280
|
+
for migration in pending:
|
|
281
|
+
run_migration(db, migration)
|
|
282
|
+
migration.applied_at = datetime.utcnow()
|
|
283
|
+
|
|
284
|
+
verify_critical_schema(db)
|
|
285
|
+
|
|
286
|
+
return pending
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
def verify_critical_schema(db: Database) -> None:
|
|
290
|
+
"""Hard-fail if critical columns missing. Runs for daemon + CLI."""
|
|
291
|
+
# If earlier schemas lacked any of these, reduce to the minimum needed for safe operation.
|
|
292
|
+
required = {"intent_id", "status", "claim_token", "claimed_at", "claimed_by"}
|
|
293
|
+
|
|
294
|
+
if db.dialect == "sqlite":
|
|
295
|
+
rows = db.execute_returning("PRAGMA table_info(tx_intents)")
|
|
296
|
+
existing = {r["name"].lower() for r in rows}
|
|
297
|
+
else:
|
|
298
|
+
rows = db.execute_returning(
|
|
299
|
+
"SELECT column_name FROM information_schema.columns "
|
|
300
|
+
"WHERE table_schema = current_schema() AND table_name = 'tx_intents'"
|
|
301
|
+
)
|
|
302
|
+
existing = {r["column_name"].lower() for r in rows}
|
|
303
|
+
|
|
304
|
+
missing = required - existing
|
|
305
|
+
if missing:
|
|
306
|
+
raise RuntimeError(
|
|
307
|
+
f"FATAL: tx_intents missing columns {missing}. "
|
|
308
|
+
f"Migration 012 may have failed. Check DB manually."
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
class Migrator:
|
|
313
|
+
"""High-level migration management interface."""
|
|
314
|
+
|
|
315
|
+
def __init__(self, db: Database) -> None:
|
|
316
|
+
self.db = db
|
|
317
|
+
|
|
318
|
+
def status(self) -> list[dict[str, str | bool]]:
|
|
319
|
+
"""Get migration status as a list of dicts.
|
|
320
|
+
|
|
321
|
+
Returns:
|
|
322
|
+
List of dicts with version, filename, applied, and applied_at
|
|
323
|
+
"""
|
|
324
|
+
migrations = get_migration_status(self.db)
|
|
325
|
+
return [
|
|
326
|
+
{
|
|
327
|
+
"version": m.version,
|
|
328
|
+
"filename": m.filename,
|
|
329
|
+
"applied": m.is_applied,
|
|
330
|
+
"applied_at": m.applied_at.isoformat() if m.applied_at else None,
|
|
331
|
+
}
|
|
332
|
+
for m in migrations
|
|
333
|
+
]
|
|
334
|
+
|
|
335
|
+
def pending(self) -> list[Migration]:
|
|
336
|
+
"""Get pending migrations."""
|
|
337
|
+
return get_pending_migrations(self.db)
|
|
338
|
+
|
|
339
|
+
def migrate(self) -> list[Migration]:
|
|
340
|
+
"""Run all pending migrations.
|
|
341
|
+
|
|
342
|
+
Returns:
|
|
343
|
+
List of applied migrations
|
|
344
|
+
"""
|
|
345
|
+
return run_pending_migrations(self.db)
|
|
346
|
+
|
|
347
|
+
def has_pending(self) -> bool:
|
|
348
|
+
"""Check if there are pending migrations."""
|
|
349
|
+
return len(self.pending()) > 0
|