brawny 0.1.13__py3-none-any.whl → 0.1.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. brawny/__init__.py +2 -0
  2. brawny/_context.py +5 -5
  3. brawny/_rpc/__init__.py +36 -12
  4. brawny/_rpc/broadcast.py +14 -13
  5. brawny/_rpc/caller.py +243 -0
  6. brawny/_rpc/client.py +539 -0
  7. brawny/_rpc/clients.py +11 -11
  8. brawny/_rpc/context.py +23 -0
  9. brawny/_rpc/errors.py +465 -31
  10. brawny/_rpc/gas.py +7 -6
  11. brawny/_rpc/pool.py +18 -0
  12. brawny/_rpc/retry.py +266 -0
  13. brawny/_rpc/retry_policy.py +81 -0
  14. brawny/accounts.py +28 -9
  15. brawny/alerts/__init__.py +15 -18
  16. brawny/alerts/abi_resolver.py +212 -36
  17. brawny/alerts/base.py +2 -2
  18. brawny/alerts/contracts.py +77 -10
  19. brawny/alerts/errors.py +30 -3
  20. brawny/alerts/events.py +38 -5
  21. brawny/alerts/health.py +19 -13
  22. brawny/alerts/send.py +513 -55
  23. brawny/api.py +39 -11
  24. brawny/assets/AGENTS.md +325 -0
  25. brawny/async_runtime.py +48 -0
  26. brawny/chain.py +3 -3
  27. brawny/cli/commands/__init__.py +2 -0
  28. brawny/cli/commands/console.py +69 -19
  29. brawny/cli/commands/contract.py +2 -2
  30. brawny/cli/commands/controls.py +121 -0
  31. brawny/cli/commands/health.py +2 -2
  32. brawny/cli/commands/job_dev.py +6 -5
  33. brawny/cli/commands/jobs.py +99 -2
  34. brawny/cli/commands/maintenance.py +13 -29
  35. brawny/cli/commands/migrate.py +1 -0
  36. brawny/cli/commands/run.py +10 -3
  37. brawny/cli/commands/script.py +8 -3
  38. brawny/cli/commands/signer.py +143 -26
  39. brawny/cli/helpers.py +0 -3
  40. brawny/cli_templates.py +25 -349
  41. brawny/config/__init__.py +4 -1
  42. brawny/config/models.py +43 -57
  43. brawny/config/parser.py +268 -57
  44. brawny/config/validation.py +52 -15
  45. brawny/daemon/context.py +4 -2
  46. brawny/daemon/core.py +185 -63
  47. brawny/daemon/loops.py +166 -98
  48. brawny/daemon/supervisor.py +261 -0
  49. brawny/db/__init__.py +14 -26
  50. brawny/db/base.py +248 -151
  51. brawny/db/global_cache.py +11 -1
  52. brawny/db/migrate.py +175 -28
  53. brawny/db/migrations/001_init.sql +4 -3
  54. brawny/db/migrations/010_add_nonce_gap_index.sql +1 -1
  55. brawny/db/migrations/011_add_job_logs.sql +1 -2
  56. brawny/db/migrations/012_add_claimed_by.sql +2 -2
  57. brawny/db/migrations/013_attempt_unique.sql +10 -0
  58. brawny/db/migrations/014_add_lease_expires_at.sql +5 -0
  59. brawny/db/migrations/015_add_signer_alias.sql +14 -0
  60. brawny/db/migrations/016_runtime_controls_and_quarantine.sql +32 -0
  61. brawny/db/migrations/017_add_job_drain.sql +6 -0
  62. brawny/db/migrations/018_add_nonce_reset_audit.sql +20 -0
  63. brawny/db/migrations/019_add_job_cooldowns.sql +8 -0
  64. brawny/db/migrations/020_attempt_unique_initial.sql +7 -0
  65. brawny/db/ops/__init__.py +3 -25
  66. brawny/db/ops/logs.py +1 -2
  67. brawny/db/queries.py +47 -91
  68. brawny/db/serialized.py +65 -0
  69. brawny/db/sqlite/__init__.py +1001 -0
  70. brawny/db/sqlite/connection.py +231 -0
  71. brawny/db/sqlite/execute.py +116 -0
  72. brawny/db/sqlite/mappers.py +190 -0
  73. brawny/db/sqlite/repos/attempts.py +372 -0
  74. brawny/db/sqlite/repos/block_state.py +102 -0
  75. brawny/db/sqlite/repos/cache.py +104 -0
  76. brawny/db/sqlite/repos/intents.py +1021 -0
  77. brawny/db/sqlite/repos/jobs.py +200 -0
  78. brawny/db/sqlite/repos/maintenance.py +182 -0
  79. brawny/db/sqlite/repos/signers_nonces.py +566 -0
  80. brawny/db/sqlite/tx.py +119 -0
  81. brawny/http.py +194 -0
  82. brawny/invariants.py +11 -24
  83. brawny/jobs/base.py +8 -0
  84. brawny/jobs/job_validation.py +2 -1
  85. brawny/keystore.py +83 -7
  86. brawny/lifecycle.py +64 -12
  87. brawny/logging.py +0 -2
  88. brawny/metrics.py +84 -12
  89. brawny/model/contexts.py +111 -9
  90. brawny/model/enums.py +1 -0
  91. brawny/model/errors.py +18 -0
  92. brawny/model/types.py +47 -131
  93. brawny/network_guard.py +133 -0
  94. brawny/networks/__init__.py +5 -5
  95. brawny/networks/config.py +1 -7
  96. brawny/networks/manager.py +14 -11
  97. brawny/runtime_controls.py +74 -0
  98. brawny/scheduler/poller.py +11 -7
  99. brawny/scheduler/reorg.py +95 -39
  100. brawny/scheduler/runner.py +442 -168
  101. brawny/scheduler/shutdown.py +3 -3
  102. brawny/script_tx.py +3 -3
  103. brawny/telegram.py +53 -7
  104. brawny/testing.py +1 -0
  105. brawny/timeout.py +38 -0
  106. brawny/tx/executor.py +922 -308
  107. brawny/tx/intent.py +54 -16
  108. brawny/tx/monitor.py +31 -12
  109. brawny/tx/nonce.py +212 -90
  110. brawny/tx/replacement.py +69 -18
  111. brawny/tx/retry_policy.py +24 -0
  112. brawny/tx/stages/types.py +75 -0
  113. brawny/types.py +18 -0
  114. brawny/utils.py +41 -0
  115. {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/METADATA +3 -3
  116. brawny-0.1.22.dist-info/RECORD +163 -0
  117. brawny/_rpc/manager.py +0 -982
  118. brawny/_rpc/selector.py +0 -156
  119. brawny/db/base_new.py +0 -165
  120. brawny/db/mappers.py +0 -182
  121. brawny/db/migrations/008_add_transactions.sql +0 -72
  122. brawny/db/ops/attempts.py +0 -108
  123. brawny/db/ops/blocks.py +0 -83
  124. brawny/db/ops/cache.py +0 -93
  125. brawny/db/ops/intents.py +0 -296
  126. brawny/db/ops/jobs.py +0 -110
  127. brawny/db/ops/nonces.py +0 -322
  128. brawny/db/postgres.py +0 -2535
  129. brawny/db/postgres_new.py +0 -196
  130. brawny/db/sqlite.py +0 -2733
  131. brawny/db/sqlite_new.py +0 -191
  132. brawny-0.1.13.dist-info/RECORD +0 -141
  133. {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/WHEEL +0 -0
  134. {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/entry_points.txt +0 -0
  135. {brawny-0.1.13.dist-info → brawny-0.1.22.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,231 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import socket
5
+ import sqlite3
6
+ import subprocess
7
+ import sys
8
+ from pathlib import Path
9
+ from typing import Any
10
+
11
+ try:
12
+ import fcntl
13
+ except ImportError: # pragma: no cover - non-Unix platforms
14
+ fcntl = None # type: ignore
15
+
16
+ import threading
17
+
18
+ from brawny.logging import get_logger
19
+ from brawny.model.errors import DatabaseError
20
+
21
+ logger = get_logger("brawny.db.sqlite")
22
+
23
+
24
+ def connect(db: Any) -> None:
25
+ if db._closed:
26
+ raise DatabaseError("Database is closed")
27
+ if db._connected:
28
+ return
29
+
30
+ lock_acquired = False
31
+ conn = None
32
+ try:
33
+ if db._database_path != ":memory:":
34
+ path = Path(db._database_path)
35
+ path.parent.mkdir(parents=True, exist_ok=True)
36
+ _assert_local_filesystem(path)
37
+ _acquire_db_lock(db, path)
38
+ lock_acquired = True
39
+
40
+ db._closed = False
41
+ conn = _open_connection(db)
42
+ db._connected = True
43
+ journal_mode_row = conn.execute("PRAGMA journal_mode").fetchone()
44
+ journal_mode = journal_mode_row[0] if journal_mode_row else "unknown"
45
+ logger.info("sqlite.journal_mode", mode=str(journal_mode).lower())
46
+ except Exception:
47
+ if conn is not None:
48
+ try:
49
+ conn.close()
50
+ except Exception:
51
+ pass
52
+ with db._conns_lock:
53
+ db._conns.discard(conn)
54
+ _clear_thread_local_conn(db)
55
+ db._connected = False
56
+ if lock_acquired and db._lock_handle is not None:
57
+ try:
58
+ if fcntl is not None:
59
+ fcntl.flock(db._lock_handle.fileno(), fcntl.LOCK_UN)
60
+ finally:
61
+ db._lock_handle.close()
62
+ db._lock_handle = None
63
+ if db._lock_path is not None:
64
+ try:
65
+ db._lock_path.unlink(missing_ok=True)
66
+ except OSError:
67
+ pass
68
+ db._lock_path = None
69
+ raise
70
+
71
+
72
+ def close(db: Any) -> None:
73
+ db._closed = True
74
+ db._connected = False
75
+ with db._conns_lock:
76
+ for conn in list(db._conns):
77
+ try:
78
+ conn.close()
79
+ except Exception:
80
+ pass
81
+ db._conns.clear()
82
+ db._conn_generation += 1
83
+ db._memory_owner_thread_id = None
84
+ _clear_thread_local_conn(db)
85
+
86
+ if db._lock_handle is not None:
87
+ try:
88
+ if fcntl is not None:
89
+ fcntl.flock(db._lock_handle.fileno(), fcntl.LOCK_UN)
90
+ finally:
91
+ db._lock_handle.close()
92
+ db._lock_handle = None
93
+ if db._lock_path is not None:
94
+ try:
95
+ db._lock_path.unlink(missing_ok=True)
96
+ except OSError:
97
+ pass
98
+ db._lock_path = None
99
+
100
+
101
+ def is_connected(db: Any) -> bool:
102
+ return db._connected and not db._closed
103
+
104
+
105
+ def ensure_connected(db: Any) -> sqlite3.Connection:
106
+ if db._closed:
107
+ raise DatabaseError("Database is closed")
108
+ if not db._connected:
109
+ raise DatabaseError("Database not connected. Call connect() first.")
110
+ conn = getattr(db._thread_local, "conn", None)
111
+ generation = getattr(db._thread_local, "generation", None)
112
+ if conn is not None and generation == db._conn_generation:
113
+ return conn
114
+ return _open_connection(db)
115
+
116
+
117
+ def _open_connection(db: Any) -> sqlite3.Connection:
118
+ if db._database_path == ":memory:":
119
+ current_thread_id = threading.get_ident()
120
+ owner_thread_id = db._memory_owner_thread_id
121
+ if owner_thread_id is None:
122
+ db._memory_owner_thread_id = current_thread_id
123
+ elif owner_thread_id != current_thread_id:
124
+ raise DatabaseError(
125
+ "SQLite in-memory databases cannot be shared across threads. "
126
+ f"path={db._database_path} owner_thread_id={owner_thread_id} "
127
+ f"current_thread_id={current_thread_id}"
128
+ )
129
+ conn = sqlite3.connect(
130
+ db._database_path,
131
+ detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES,
132
+ check_same_thread=True,
133
+ timeout=30.0,
134
+ )
135
+ conn.row_factory = sqlite3.Row
136
+ _assert_minimum_sqlite_version(db)
137
+ conn.execute("PRAGMA foreign_keys = ON")
138
+ conn.execute("PRAGMA journal_mode = WAL")
139
+ conn.execute("PRAGMA synchronous = NORMAL")
140
+ conn.execute("PRAGMA busy_timeout = 5000")
141
+ conn.execute("PRAGMA temp_store = MEMORY")
142
+ db._thread_local.conn = conn
143
+ db._thread_local.generation = db._conn_generation
144
+ with db._conns_lock:
145
+ db._conns.add(conn)
146
+ return conn
147
+
148
+
149
+ def _clear_thread_local_conn(db: Any) -> None:
150
+ if isinstance(db._thread_local, threading.local):
151
+ if hasattr(db._thread_local, "conn"):
152
+ try:
153
+ del db._thread_local.conn
154
+ except Exception:
155
+ pass
156
+ if hasattr(db._thread_local, "generation"):
157
+ try:
158
+ del db._thread_local.generation
159
+ except Exception:
160
+ pass
161
+
162
+
163
+ def _assert_minimum_sqlite_version(db: Any) -> None:
164
+ if db._version_checked:
165
+ return
166
+ minimum = (3, 35, 0)
167
+ current = sqlite3.sqlite_version_info
168
+ if current < minimum:
169
+ raise DatabaseError(
170
+ "SQLite >= 3.35 is required (for RETURNING support); "
171
+ f"found {sqlite3.sqlite_version}"
172
+ )
173
+ db._version_checked = True
174
+
175
+
176
+ def _acquire_db_lock(db: Any, db_path: Path) -> None:
177
+ if fcntl is None:
178
+ raise DatabaseError("SQLite runner lock requires fcntl (Unix-only).")
179
+
180
+ lock_path = db_path.with_suffix(db_path.suffix + ".lock")
181
+ lock_handle = lock_path.open("a+")
182
+ try:
183
+ fcntl.flock(lock_handle.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
184
+ except OSError as exc:
185
+ lock_handle.seek(0)
186
+ existing = lock_handle.read().strip()
187
+ lock_handle.close()
188
+ detail = f" Existing lock: {existing}" if existing else ""
189
+ raise DatabaseError(
190
+ f"Database lock already held for {db_path}.{detail}"
191
+ ) from exc
192
+
193
+ lock_handle.seek(0)
194
+ lock_handle.truncate()
195
+ lock_handle.write(f"{socket.gethostname()}:{os.getpid()}\n")
196
+ lock_handle.flush()
197
+ db._lock_handle = lock_handle
198
+ db._lock_path = lock_path
199
+
200
+
201
+ def _assert_local_filesystem(db_path: Path) -> None:
202
+ fs_type = _detect_fs_type(db_path)
203
+ if fs_type is None:
204
+ logger.warning(
205
+ "sqlite.fs_type_unknown",
206
+ path=str(db_path),
207
+ hint="Ensure the database is on a local filesystem (no NFS/SMB).",
208
+ )
209
+ return
210
+
211
+ fs_type_lower = fs_type.lower()
212
+ network_fs = {"nfs", "nfs4", "smbfs", "cifs", "afpfs", "fuse.sshfs", "sshfs"}
213
+ if fs_type_lower in network_fs:
214
+ raise DatabaseError(
215
+ f"SQLite database must be on a local filesystem; detected {fs_type} at {db_path}"
216
+ )
217
+
218
+
219
+ def _detect_fs_type(db_path: Path) -> str | None:
220
+ try:
221
+ if sys.platform == "darwin":
222
+ output = subprocess.check_output(["stat", "-f", "%T", str(db_path)])
223
+ return output.decode().strip()
224
+ if sys.platform.startswith("linux"):
225
+ output = subprocess.check_output(
226
+ ["stat", "-f", "-c", "%T", str(db_path)]
227
+ )
228
+ return output.decode().strip()
229
+ except Exception:
230
+ return None
231
+ return None
@@ -0,0 +1,116 @@
1
+ from __future__ import annotations
2
+
3
+ import sqlite3
4
+ import time
5
+ from typing import Any, Literal
6
+
7
+ from brawny.logging import get_logger
8
+ from brawny.model.errors import DatabaseError
9
+
10
+
11
+ FetchMode = Literal["none", "all", "one", "rowcount"]
12
+ logger = get_logger("brawny.db.sqlite")
13
+
14
+
15
+ def _run(
16
+ db: Any,
17
+ query: str,
18
+ params: tuple[Any, ...] | dict[str, Any] | None,
19
+ *,
20
+ fetch: FetchMode,
21
+ commit: bool,
22
+ ) -> Any:
23
+ max_retries = 5
24
+ backoff = 0.05
25
+ attempt = 0
26
+
27
+ while True:
28
+ conn = db._ensure_connected()
29
+ db._circuit_breaker.before_call()
30
+
31
+ locked_error: sqlite3.OperationalError | None = None
32
+ with db._locked():
33
+ cursor = conn.cursor()
34
+ try:
35
+ if params is None:
36
+ cursor.execute(query)
37
+ elif isinstance(params, dict):
38
+ cursor.execute(query, params)
39
+ else:
40
+ cursor.execute(query, params)
41
+
42
+ if fetch == "all":
43
+ rows = cursor.fetchall()
44
+ result: Any = [dict(row) for row in rows] if rows else []
45
+ elif fetch == "one":
46
+ row = cursor.fetchone()
47
+ result = dict(row) if row else None
48
+ elif fetch == "rowcount":
49
+ result = cursor.rowcount
50
+ else:
51
+ result = None
52
+
53
+ if commit and db._tx_depth == 0:
54
+ conn.commit()
55
+ db._circuit_breaker.record_success()
56
+ return result
57
+ except sqlite3.OperationalError as e:
58
+ if "database is locked" in str(e).lower():
59
+ locked_error = e
60
+ else:
61
+ db._circuit_breaker.record_failure(e)
62
+ raise DatabaseError(f"SQLite query failed: {e}") from e
63
+ except sqlite3.Error as e:
64
+ db._circuit_breaker.record_failure(e)
65
+ raise DatabaseError(f"SQLite query failed: {e}") from e
66
+ finally:
67
+ cursor.close()
68
+
69
+ if locked_error is not None:
70
+ if db._tx_depth > 0:
71
+ db._circuit_breaker.record_failure(locked_error)
72
+ raise DatabaseError(f"SQLite query failed: {locked_error}") from locked_error
73
+ attempt += 1
74
+ if attempt <= max_retries:
75
+ logger.debug(
76
+ "sqlite.lock_retry",
77
+ attempt=attempt,
78
+ backoff_seconds=backoff,
79
+ )
80
+ time.sleep(backoff)
81
+ backoff = min(backoff * 2, 0.2)
82
+ continue
83
+ db._circuit_breaker.record_failure(locked_error)
84
+ raise DatabaseError(f"SQLite query failed: {locked_error}") from locked_error
85
+
86
+
87
+ def execute(
88
+ db: Any,
89
+ query: str,
90
+ params: tuple[Any, ...] | dict[str, Any] | None = None,
91
+ ) -> None:
92
+ _run(db, query, params, fetch="none", commit=True)
93
+
94
+
95
+ def execute_returning(
96
+ db: Any,
97
+ query: str,
98
+ params: tuple[Any, ...] | dict[str, Any] | None = None,
99
+ ) -> list[dict[str, Any]]:
100
+ return _run(db, query, params, fetch="all", commit=False)
101
+
102
+
103
+ def execute_one(
104
+ db: Any,
105
+ query: str,
106
+ params: tuple[Any, ...] | dict[str, Any] | None = None,
107
+ ) -> dict[str, Any] | None:
108
+ return _run(db, query, params, fetch="one", commit=False)
109
+
110
+
111
+ def execute_returning_rowcount(
112
+ db: Any,
113
+ query: str,
114
+ params: tuple[Any, ...] | dict[str, Any] | None = None,
115
+ ) -> int:
116
+ return _run(db, query, params, fetch="rowcount", commit=True)
@@ -0,0 +1,190 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ from datetime import datetime, timezone
5
+ from typing import Any
6
+ from uuid import UUID
7
+
8
+ from brawny.model.enums import AttemptStatus, IntentStatus, NonceStatus
9
+ from brawny.model.types import (
10
+ GasParams,
11
+ JobConfig,
12
+ NonceReservation,
13
+ RuntimeControl,
14
+ SignerState,
15
+ TxAttempt,
16
+ TxIntent,
17
+ )
18
+ from brawny.types import ClaimedIntent
19
+ from brawny.utils import db_address
20
+
21
+
22
+ def adapt_datetime(dt: datetime) -> str:
23
+ """Adapt datetime to ISO format string for SQLite."""
24
+ if dt.tzinfo is None:
25
+ dt = dt.replace(tzinfo=timezone.utc)
26
+ return dt.isoformat()
27
+
28
+
29
+ def convert_datetime(val: bytes) -> datetime:
30
+ """Convert ISO format string from SQLite to datetime."""
31
+ s = val.decode("utf-8")
32
+ try:
33
+ return datetime.fromisoformat(s)
34
+ except ValueError:
35
+ return datetime.strptime(s, "%Y-%m-%d %H:%M:%S").replace(tzinfo=timezone.utc)
36
+
37
+
38
+ def parse_uuid(value: str | UUID | None) -> UUID | None:
39
+ if value is None:
40
+ return None
41
+ if isinstance(value, UUID):
42
+ return value
43
+ return UUID(value)
44
+
45
+
46
+ def dump_uuid(value: UUID | None) -> str | None:
47
+ if value is None:
48
+ return None
49
+ return str(value)
50
+
51
+
52
+ def parse_json(raw: Any, default: Any | None = None) -> Any:
53
+ if raw is None:
54
+ return default
55
+ if isinstance(raw, str):
56
+ return json.loads(raw)
57
+ return raw
58
+
59
+
60
+ def parse_datetime(value: datetime | str | None) -> datetime | None:
61
+ if value is None:
62
+ return None
63
+ if isinstance(value, datetime):
64
+ return value
65
+ return datetime.fromisoformat(value)
66
+
67
+
68
+ def canonicalize_address(address: str) -> str:
69
+ return db_address(address)
70
+
71
+
72
+ def _row_to_job_config(row: dict[str, Any]) -> JobConfig:
73
+ return JobConfig(
74
+ job_id=row["job_id"],
75
+ job_name=row["job_name"],
76
+ enabled=bool(row["enabled"]),
77
+ check_interval_blocks=row["check_interval_blocks"],
78
+ last_checked_block_number=row["last_checked_block_number"],
79
+ last_triggered_block_number=row["last_triggered_block_number"],
80
+ drain_until=row.get("drain_until"),
81
+ drain_reason=row.get("drain_reason"),
82
+ created_at=row["created_at"],
83
+ updated_at=row["updated_at"],
84
+ )
85
+
86
+
87
+ def _row_to_signer_state(row: dict[str, Any]) -> SignerState:
88
+ return SignerState(
89
+ chain_id=row["chain_id"],
90
+ signer_address=row["signer_address"],
91
+ next_nonce=row["next_nonce"],
92
+ last_synced_chain_nonce=row["last_synced_chain_nonce"],
93
+ created_at=row["created_at"],
94
+ updated_at=row["updated_at"],
95
+ gap_started_at=row.get("gap_started_at"),
96
+ alias=row.get("alias"),
97
+ quarantined_at=row.get("quarantined_at"),
98
+ quarantine_reason=row.get("quarantine_reason"),
99
+ replacements_paused=bool(row.get("replacements_paused", 0)),
100
+ )
101
+
102
+
103
+ def _row_to_runtime_control(row: dict[str, Any]) -> RuntimeControl:
104
+ return RuntimeControl(
105
+ control=row["control"],
106
+ active=bool(row["active"]),
107
+ expires_at=parse_datetime(row.get("expires_at")),
108
+ reason=row.get("reason"),
109
+ actor=row.get("actor"),
110
+ mode=row.get("mode") or "auto",
111
+ updated_at=row["updated_at"],
112
+ )
113
+
114
+
115
+ def _row_to_nonce_reservation(row: dict[str, Any]) -> NonceReservation:
116
+ return NonceReservation(
117
+ id=row["id"],
118
+ chain_id=row["chain_id"],
119
+ signer_address=row["signer_address"],
120
+ nonce=row["nonce"],
121
+ status=NonceStatus(row["status"]),
122
+ intent_id=parse_uuid(row["intent_id"]),
123
+ created_at=row["created_at"],
124
+ updated_at=row["updated_at"],
125
+ )
126
+
127
+
128
+ def _row_to_intent(row: dict[str, Any]) -> TxIntent:
129
+ metadata = parse_json(row.get("metadata_json"), default={})
130
+ return TxIntent(
131
+ intent_id=parse_uuid(row["intent_id"]),
132
+ job_id=row["job_id"],
133
+ chain_id=row["chain_id"],
134
+ signer_address=row["signer_address"],
135
+ signer_alias=row.get("signer_alias"),
136
+ idempotency_key=row["idempotency_key"],
137
+ to_address=row["to_address"],
138
+ data=row["data"],
139
+ value_wei=row["value_wei"],
140
+ gas_limit=row["gas_limit"],
141
+ max_fee_per_gas=row["max_fee_per_gas"],
142
+ max_priority_fee_per_gas=row["max_priority_fee_per_gas"],
143
+ min_confirmations=row["min_confirmations"],
144
+ deadline_ts=row["deadline_ts"],
145
+ retry_after=row["retry_after"],
146
+ retry_count=row.get("retry_count", 0),
147
+ status=IntentStatus(row["status"]),
148
+ claim_token=row["claim_token"],
149
+ claimed_at=row["claimed_at"],
150
+ created_at=row["created_at"],
151
+ updated_at=row["updated_at"],
152
+ claimed_by=row.get("claimed_by"),
153
+ lease_expires_at=row.get("lease_expires_at"),
154
+ broadcast_group=row.get("broadcast_group"),
155
+ broadcast_endpoints_json=row.get("broadcast_endpoints_json"),
156
+ broadcast_binding_id=parse_uuid(row.get("broadcast_binding_id")),
157
+ metadata=metadata,
158
+ )
159
+
160
+
161
+ def _row_to_claimed_intent(row: dict[str, Any]) -> ClaimedIntent:
162
+ return ClaimedIntent(
163
+ intent_id=parse_uuid(row["intent_id"]),
164
+ claim_token=row["claim_token"],
165
+ claimed_by=row.get("claimed_by"),
166
+ lease_expires_at=row.get("lease_expires_at"),
167
+ claimed_at=row["claimed_at"],
168
+ )
169
+
170
+
171
+ def _row_to_attempt(row: dict[str, Any]) -> TxAttempt:
172
+ return TxAttempt(
173
+ attempt_id=parse_uuid(row["attempt_id"]),
174
+ intent_id=parse_uuid(row["intent_id"]),
175
+ nonce=row["nonce"],
176
+ tx_hash=row["tx_hash"],
177
+ gas_params=GasParams.from_json(row["gas_params_json"]),
178
+ status=AttemptStatus(row["status"]),
179
+ error_code=row["error_code"],
180
+ error_detail=row["error_detail"],
181
+ replaces_attempt_id=parse_uuid(row.get("replaces_attempt_id")),
182
+ broadcast_block=row["broadcast_block"],
183
+ broadcast_at=row.get("broadcast_at"),
184
+ included_block=row.get("included_block"),
185
+ created_at=row["created_at"],
186
+ updated_at=row["updated_at"],
187
+ broadcast_group=row.get("broadcast_group"),
188
+ endpoint_url=row.get("endpoint_url"),
189
+ endpoint_binding_id=parse_uuid(row.get("endpoint_binding_id")),
190
+ )