astreum 0.2.29__py3-none-any.whl → 0.2.61__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- astreum/__init__.py +9 -1
- astreum/_communication/__init__.py +11 -0
- astreum/{models → _communication}/message.py +101 -64
- astreum/_communication/peer.py +23 -0
- astreum/_communication/ping.py +33 -0
- astreum/_communication/route.py +95 -0
- astreum/_communication/setup.py +322 -0
- astreum/_communication/util.py +42 -0
- astreum/_consensus/__init__.py +20 -0
- astreum/_consensus/account.py +95 -0
- astreum/_consensus/accounts.py +38 -0
- astreum/_consensus/block.py +311 -0
- astreum/_consensus/chain.py +66 -0
- astreum/_consensus/fork.py +100 -0
- astreum/_consensus/genesis.py +72 -0
- astreum/_consensus/receipt.py +136 -0
- astreum/_consensus/setup.py +115 -0
- astreum/_consensus/transaction.py +215 -0
- astreum/_consensus/workers/__init__.py +9 -0
- astreum/_consensus/workers/discovery.py +48 -0
- astreum/_consensus/workers/validation.py +125 -0
- astreum/_consensus/workers/verify.py +63 -0
- astreum/_lispeum/__init__.py +16 -0
- astreum/_lispeum/environment.py +13 -0
- astreum/_lispeum/expression.py +190 -0
- astreum/_lispeum/high_evaluation.py +236 -0
- astreum/_lispeum/low_evaluation.py +123 -0
- astreum/_lispeum/meter.py +18 -0
- astreum/_lispeum/parser.py +51 -0
- astreum/_lispeum/tokenizer.py +22 -0
- astreum/_node.py +198 -0
- astreum/_storage/__init__.py +7 -0
- astreum/_storage/atom.py +109 -0
- astreum/_storage/patricia.py +478 -0
- astreum/_storage/setup.py +35 -0
- astreum/models/block.py +48 -39
- astreum/node.py +755 -563
- astreum/utils/bytes.py +24 -0
- astreum/utils/integer.py +25 -0
- astreum/utils/logging.py +219 -0
- {astreum-0.2.29.dist-info → astreum-0.2.61.dist-info}/METADATA +50 -14
- astreum-0.2.61.dist-info/RECORD +57 -0
- astreum/lispeum/__init__.py +0 -2
- astreum/lispeum/environment.py +0 -40
- astreum/lispeum/expression.py +0 -86
- astreum/lispeum/parser.py +0 -41
- astreum/lispeum/tokenizer.py +0 -52
- astreum/models/account.py +0 -91
- astreum/models/accounts.py +0 -34
- astreum/models/transaction.py +0 -106
- astreum/relay/__init__.py +0 -0
- astreum/relay/peer.py +0 -9
- astreum/relay/route.py +0 -25
- astreum/relay/setup.py +0 -58
- astreum-0.2.29.dist-info/RECORD +0 -33
- {astreum-0.2.29.dist-info → astreum-0.2.61.dist-info}/WHEEL +0 -0
- {astreum-0.2.29.dist-info → astreum-0.2.61.dist-info}/licenses/LICENSE +0 -0
- {astreum-0.2.29.dist-info → astreum-0.2.61.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
from typing import Tuple
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def address_str_to_host_and_port(address: str) -> Tuple[str, int]:
|
|
5
|
+
"""Parse `host:port` (or `[ipv6]:port`) into a tuple."""
|
|
6
|
+
addr = address.strip()
|
|
7
|
+
if not addr:
|
|
8
|
+
raise ValueError("address cannot be empty")
|
|
9
|
+
|
|
10
|
+
host: str
|
|
11
|
+
port_str: str
|
|
12
|
+
|
|
13
|
+
if addr.startswith('['):
|
|
14
|
+
end = addr.find(']')
|
|
15
|
+
if end == -1:
|
|
16
|
+
raise ValueError("missing closing ']' in IPv6 address")
|
|
17
|
+
host = addr[1:end]
|
|
18
|
+
remainder = addr[end + 1 :]
|
|
19
|
+
if not remainder.startswith(':'):
|
|
20
|
+
raise ValueError("missing port separator after IPv6 address")
|
|
21
|
+
port_str = remainder[1:]
|
|
22
|
+
else:
|
|
23
|
+
if ':' not in addr:
|
|
24
|
+
raise ValueError("address must contain ':' separating host and port")
|
|
25
|
+
host, port_str = addr.rsplit(':', 1)
|
|
26
|
+
|
|
27
|
+
host = host.strip()
|
|
28
|
+
if not host:
|
|
29
|
+
raise ValueError("host cannot be empty")
|
|
30
|
+
port_str = port_str.strip()
|
|
31
|
+
if not port_str:
|
|
32
|
+
raise ValueError("port cannot be empty")
|
|
33
|
+
|
|
34
|
+
try:
|
|
35
|
+
port = int(port_str, 10)
|
|
36
|
+
except ValueError as exc:
|
|
37
|
+
raise ValueError(f"invalid port number: {port_str}") from exc
|
|
38
|
+
|
|
39
|
+
if not (0 < port < 65536):
|
|
40
|
+
raise ValueError(f"port out of range: {port}")
|
|
41
|
+
|
|
42
|
+
return host, port
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from .account import Account
|
|
2
|
+
from .accounts import Accounts
|
|
3
|
+
from .block import Block
|
|
4
|
+
from .chain import Chain
|
|
5
|
+
from .fork import Fork
|
|
6
|
+
from .receipt import Receipt
|
|
7
|
+
from .transaction import Transaction
|
|
8
|
+
from .setup import consensus_setup
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"Block",
|
|
13
|
+
"Chain",
|
|
14
|
+
"Fork",
|
|
15
|
+
"Receipt",
|
|
16
|
+
"Transaction",
|
|
17
|
+
"Account",
|
|
18
|
+
"Accounts",
|
|
19
|
+
"consensus_setup",
|
|
20
|
+
]
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from typing import Any, List, Optional, Tuple
|
|
5
|
+
|
|
6
|
+
from .._storage.atom import Atom, ZERO32
|
|
7
|
+
from .._storage.patricia import PatriciaTrie
|
|
8
|
+
from ..utils.integer import bytes_to_int, int_to_bytes
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class Account:
|
|
13
|
+
balance: int
|
|
14
|
+
code: bytes
|
|
15
|
+
counter: int
|
|
16
|
+
data_hash: bytes
|
|
17
|
+
data: PatriciaTrie
|
|
18
|
+
hash: bytes = ZERO32
|
|
19
|
+
body_hash: bytes = ZERO32
|
|
20
|
+
atoms: List[Atom] = field(default_factory=list)
|
|
21
|
+
|
|
22
|
+
@classmethod
|
|
23
|
+
def create(cls, balance: int = 0, data_hash: bytes = ZERO32, code: bytes = ZERO32, counter: int = 0) -> "Account":
|
|
24
|
+
account = cls(
|
|
25
|
+
balance=int(balance),
|
|
26
|
+
code=bytes(code),
|
|
27
|
+
counter=int(counter),
|
|
28
|
+
data_hash=bytes(data_hash),
|
|
29
|
+
data=PatriciaTrie(root_hash=bytes(data_hash)),
|
|
30
|
+
)
|
|
31
|
+
account.to_atom()
|
|
32
|
+
return account
|
|
33
|
+
|
|
34
|
+
@classmethod
|
|
35
|
+
def from_atom(cls, node: Any, account_id: bytes) -> "Account":
|
|
36
|
+
storage_get = node.storage_get
|
|
37
|
+
|
|
38
|
+
type_atom = storage_get(account_id)
|
|
39
|
+
if type_atom is None or type_atom.data != b"account":
|
|
40
|
+
raise ValueError("not an account (type mismatch)")
|
|
41
|
+
|
|
42
|
+
def _read_atom(atom_id: Optional[bytes]) -> Optional[Atom]:
|
|
43
|
+
if not atom_id or atom_id == ZERO32:
|
|
44
|
+
return None
|
|
45
|
+
return storage_get(atom_id)
|
|
46
|
+
|
|
47
|
+
balance_atom = _read_atom(type_atom.next)
|
|
48
|
+
if balance_atom is None:
|
|
49
|
+
raise ValueError("malformed account (balance missing)")
|
|
50
|
+
|
|
51
|
+
code_atom = _read_atom(balance_atom.next)
|
|
52
|
+
if code_atom is None:
|
|
53
|
+
raise ValueError("malformed account (code missing)")
|
|
54
|
+
|
|
55
|
+
counter_atom = _read_atom(code_atom.next)
|
|
56
|
+
if counter_atom is None:
|
|
57
|
+
raise ValueError("malformed account (counter missing)")
|
|
58
|
+
|
|
59
|
+
data_atom = _read_atom(counter_atom.next)
|
|
60
|
+
if data_atom is None:
|
|
61
|
+
raise ValueError("malformed account (data missing)")
|
|
62
|
+
|
|
63
|
+
account = cls.create(
|
|
64
|
+
balance=bytes_to_int(balance_atom.data),
|
|
65
|
+
data_hash=data_atom.data,
|
|
66
|
+
counter=bytes_to_int(counter_atom.data),
|
|
67
|
+
code=code_atom.data,
|
|
68
|
+
)
|
|
69
|
+
if account.hash != account_id:
|
|
70
|
+
raise ValueError("account hash mismatch while decoding")
|
|
71
|
+
return account
|
|
72
|
+
|
|
73
|
+
def to_atom(self) -> Tuple[bytes, List[Atom]]:
|
|
74
|
+
# Build a single forward chain: account -> balance -> code -> counter -> data.
|
|
75
|
+
data_atom = Atom.from_data(data=bytes(self.data_hash))
|
|
76
|
+
counter_atom = Atom.from_data(
|
|
77
|
+
data=int_to_bytes(self.counter),
|
|
78
|
+
next_hash=data_atom.object_id(),
|
|
79
|
+
)
|
|
80
|
+
code_atom = Atom.from_data(
|
|
81
|
+
data=bytes(self.code),
|
|
82
|
+
next_hash=counter_atom.object_id(),
|
|
83
|
+
)
|
|
84
|
+
balance_atom = Atom.from_data(
|
|
85
|
+
data=int_to_bytes(self.balance),
|
|
86
|
+
next_hash=code_atom.object_id(),
|
|
87
|
+
)
|
|
88
|
+
type_atom = Atom.from_data(data=b"account", next_hash=balance_atom.object_id())
|
|
89
|
+
|
|
90
|
+
atoms = [data_atom, counter_atom, code_atom, balance_atom, type_atom]
|
|
91
|
+
account_hash = type_atom.object_id()
|
|
92
|
+
self.hash = account_hash
|
|
93
|
+
self.body_hash = account_hash
|
|
94
|
+
self.atoms = atoms
|
|
95
|
+
return account_hash, list(atoms)
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any, Dict, Optional
|
|
4
|
+
|
|
5
|
+
from .._storage.patricia import PatriciaTrie
|
|
6
|
+
from .account import Account
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Accounts:
|
|
10
|
+
def __init__(
|
|
11
|
+
self,
|
|
12
|
+
root_hash: Optional[bytes] = None,
|
|
13
|
+
) -> None:
|
|
14
|
+
self._trie = PatriciaTrie(root_hash=root_hash)
|
|
15
|
+
self._cache: Dict[bytes, Account] = {}
|
|
16
|
+
|
|
17
|
+
@property
|
|
18
|
+
def root_hash(self) -> Optional[bytes]:
|
|
19
|
+
return self._trie.root_hash
|
|
20
|
+
|
|
21
|
+
def get_account(self, address: bytes, node: Optional[Any] = None) -> Optional[Account]:
|
|
22
|
+
cached = self._cache.get(address)
|
|
23
|
+
if cached is not None:
|
|
24
|
+
return cached
|
|
25
|
+
|
|
26
|
+
if node is None:
|
|
27
|
+
raise ValueError("Accounts requires a node reference for trie access")
|
|
28
|
+
|
|
29
|
+
account_id: Optional[bytes] = self._trie.get(node, address)
|
|
30
|
+
if account_id is None:
|
|
31
|
+
return None
|
|
32
|
+
|
|
33
|
+
account = Account.from_atom(node, account_id)
|
|
34
|
+
self._cache[address] = account
|
|
35
|
+
return account
|
|
36
|
+
|
|
37
|
+
def set_account(self, address: bytes, account: Account) -> None:
|
|
38
|
+
self._cache[address] = account
|
|
@@ -0,0 +1,311 @@
|
|
|
1
|
+
|
|
2
|
+
from typing import Any, Callable, List, Optional, Tuple, TYPE_CHECKING
|
|
3
|
+
|
|
4
|
+
from .._storage.atom import Atom, AtomKind, ZERO32
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
from .._storage.patricia import PatriciaTrie
|
|
8
|
+
from .transaction import Transaction
|
|
9
|
+
from .receipt import Receipt
|
|
10
|
+
from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PublicKey
|
|
11
|
+
from cryptography.exceptions import InvalidSignature
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _int_to_be_bytes(n: Optional[int]) -> bytes:
|
|
15
|
+
if n is None:
|
|
16
|
+
return b""
|
|
17
|
+
n = int(n)
|
|
18
|
+
if n == 0:
|
|
19
|
+
return b"\x00"
|
|
20
|
+
size = (n.bit_length() + 7) // 8
|
|
21
|
+
return n.to_bytes(size, "big")
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _be_bytes_to_int(b: Optional[bytes]) -> int:
|
|
25
|
+
if not b:
|
|
26
|
+
return 0
|
|
27
|
+
return int.from_bytes(b, "big")
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class Block:
|
|
31
|
+
"""Validation Block representation using Atom storage.
|
|
32
|
+
|
|
33
|
+
Top-level encoding:
|
|
34
|
+
block_id = type_atom.object_id()
|
|
35
|
+
chain: type_atom --next--> signature_atom --next--> body_list_atom --next--> ZERO32
|
|
36
|
+
where: type_atom = Atom(kind=AtomKind.SYMBOL, data=b"block")
|
|
37
|
+
signature_atom = Atom(kind=AtomKind.BYTES, data=<signature-bytes>)
|
|
38
|
+
body_list_atom = Atom(kind=AtomKind.LIST, data=<body_head_id>)
|
|
39
|
+
|
|
40
|
+
Details order in body_list:
|
|
41
|
+
0: previous_block_hash (bytes)
|
|
42
|
+
1: number (int -> big-endian bytes)
|
|
43
|
+
2: timestamp (int -> big-endian bytes)
|
|
44
|
+
3: accounts_hash (bytes)
|
|
45
|
+
4: transactions_total_fees (int -> big-endian bytes)
|
|
46
|
+
5: transactions_hash (bytes)
|
|
47
|
+
6: receipts_hash (bytes)
|
|
48
|
+
7: delay_difficulty (int -> big-endian bytes)
|
|
49
|
+
8: delay_output (bytes)
|
|
50
|
+
9: validator_public_key (bytes)
|
|
51
|
+
|
|
52
|
+
Notes:
|
|
53
|
+
- "body tree" is represented here by the body_list id (self.body_hash), not
|
|
54
|
+
embedded again as a field to avoid circular references.
|
|
55
|
+
- "signature" is a field on the class but is not required for validation
|
|
56
|
+
navigation; include it in the instance but it is not encoded in atoms
|
|
57
|
+
unless explicitly provided via details extension in the future.
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
# essential identifiers
|
|
61
|
+
hash: bytes
|
|
62
|
+
previous_block_hash: bytes
|
|
63
|
+
previous_block: Optional["Block"]
|
|
64
|
+
|
|
65
|
+
# block details
|
|
66
|
+
number: Optional[int]
|
|
67
|
+
timestamp: Optional[int]
|
|
68
|
+
accounts_hash: Optional[bytes]
|
|
69
|
+
transactions_total_fees: Optional[int]
|
|
70
|
+
transactions_hash: Optional[bytes]
|
|
71
|
+
receipts_hash: Optional[bytes]
|
|
72
|
+
delay_difficulty: Optional[int]
|
|
73
|
+
delay_output: Optional[bytes]
|
|
74
|
+
validator_public_key: Optional[bytes]
|
|
75
|
+
|
|
76
|
+
# additional
|
|
77
|
+
body_hash: Optional[bytes]
|
|
78
|
+
signature: Optional[bytes]
|
|
79
|
+
|
|
80
|
+
# structures
|
|
81
|
+
accounts: Optional["PatriciaTrie"]
|
|
82
|
+
transactions: Optional[List["Transaction"]]
|
|
83
|
+
receipts: Optional[List["Receipt"]]
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def __init__(self) -> None:
|
|
88
|
+
# defaults for safety
|
|
89
|
+
self.hash = b""
|
|
90
|
+
self.previous_block_hash = ZERO32
|
|
91
|
+
self.previous_block = None
|
|
92
|
+
self.number = None
|
|
93
|
+
self.timestamp = None
|
|
94
|
+
self.accounts_hash = None
|
|
95
|
+
self.transactions_total_fees = None
|
|
96
|
+
self.transactions_hash = None
|
|
97
|
+
self.receipts_hash = None
|
|
98
|
+
self.delay_difficulty = None
|
|
99
|
+
self.delay_output = None
|
|
100
|
+
self.validator_public_key = None
|
|
101
|
+
self.body_hash = None
|
|
102
|
+
self.signature = None
|
|
103
|
+
self.accounts = None
|
|
104
|
+
self.transactions = None
|
|
105
|
+
self.receipts = None
|
|
106
|
+
|
|
107
|
+
def to_atom(self) -> Tuple[bytes, List[Atom]]:
|
|
108
|
+
# Build body details as direct byte atoms, in defined order
|
|
109
|
+
details_ids: List[bytes] = []
|
|
110
|
+
block_atoms: List[Atom] = []
|
|
111
|
+
|
|
112
|
+
def _emit(detail_bytes: bytes) -> None:
|
|
113
|
+
atom = Atom.from_data(data=detail_bytes, kind=AtomKind.BYTES)
|
|
114
|
+
details_ids.append(atom.object_id())
|
|
115
|
+
block_atoms.append(atom)
|
|
116
|
+
|
|
117
|
+
# 0: previous_block_hash
|
|
118
|
+
_emit(self.previous_block_hash)
|
|
119
|
+
# 1: number
|
|
120
|
+
_emit(_int_to_be_bytes(self.number))
|
|
121
|
+
# 2: timestamp
|
|
122
|
+
_emit(_int_to_be_bytes(self.timestamp))
|
|
123
|
+
# 3: accounts_hash
|
|
124
|
+
_emit(self.accounts_hash or b"")
|
|
125
|
+
# 4: transactions_total_fees
|
|
126
|
+
_emit(_int_to_be_bytes(self.transactions_total_fees))
|
|
127
|
+
# 5: transactions_hash
|
|
128
|
+
_emit(self.transactions_hash or b"")
|
|
129
|
+
# 6: receipts_hash
|
|
130
|
+
_emit(self.receipts_hash or b"")
|
|
131
|
+
# 7: delay_difficulty
|
|
132
|
+
_emit(_int_to_be_bytes(self.delay_difficulty))
|
|
133
|
+
# 8: delay_output
|
|
134
|
+
_emit(self.delay_output or b"")
|
|
135
|
+
# 9: validator_public_key
|
|
136
|
+
_emit(self.validator_public_key or b"")
|
|
137
|
+
|
|
138
|
+
# Build body list chain (head points to the first detail atom id)
|
|
139
|
+
body_atoms: List[Atom] = []
|
|
140
|
+
body_head = ZERO32
|
|
141
|
+
for child_id in reversed(details_ids):
|
|
142
|
+
node = Atom.from_data(data=child_id, next_hash=body_head, kind=AtomKind.BYTES)
|
|
143
|
+
body_head = node.object_id()
|
|
144
|
+
body_atoms.append(node)
|
|
145
|
+
body_atoms.reverse()
|
|
146
|
+
|
|
147
|
+
block_atoms.extend(body_atoms)
|
|
148
|
+
|
|
149
|
+
body_list_atom = Atom.from_data(data=body_head, kind=AtomKind.LIST)
|
|
150
|
+
self.body_hash = body_list_atom.object_id()
|
|
151
|
+
|
|
152
|
+
# Signature atom links to body list atom; type atom links to signature atom
|
|
153
|
+
sig_atom = Atom.from_data(data=self.signature, next_hash=self.body_hash, kind=AtomKind.BYTES)
|
|
154
|
+
type_atom = Atom.from_data(data=b"block", next_hash=sig_atom.object_id(), kind=AtomKind.SYMBOL)
|
|
155
|
+
|
|
156
|
+
block_atoms.append(body_list_atom)
|
|
157
|
+
block_atoms.append(sig_atom)
|
|
158
|
+
block_atoms.append(type_atom)
|
|
159
|
+
|
|
160
|
+
self.hash = type_atom.object_id()
|
|
161
|
+
return self.hash, block_atoms
|
|
162
|
+
|
|
163
|
+
@classmethod
|
|
164
|
+
def from_atom(cls, source: Any, block_id: bytes) -> "Block":
|
|
165
|
+
storage_get: Optional[Callable[[bytes], Optional[Atom]]]
|
|
166
|
+
if callable(source):
|
|
167
|
+
storage_get = source
|
|
168
|
+
else:
|
|
169
|
+
storage_get = getattr(source, "storage_get", None)
|
|
170
|
+
if not callable(storage_get):
|
|
171
|
+
raise TypeError(
|
|
172
|
+
"Block.from_atom requires a node with 'storage_get' or a callable storage getter"
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
def _atom_kind(atom: Optional[Atom]) -> Optional[AtomKind]:
|
|
176
|
+
kind_value = getattr(atom, "kind", None)
|
|
177
|
+
if isinstance(kind_value, AtomKind):
|
|
178
|
+
return kind_value
|
|
179
|
+
if isinstance(kind_value, int):
|
|
180
|
+
try:
|
|
181
|
+
return AtomKind(kind_value)
|
|
182
|
+
except ValueError:
|
|
183
|
+
return None
|
|
184
|
+
return None
|
|
185
|
+
|
|
186
|
+
def _require_atom(atom_id: Optional[bytes], context: str, expected_kind: Optional[AtomKind] = None) -> Atom:
|
|
187
|
+
if not atom_id or atom_id == ZERO32:
|
|
188
|
+
raise ValueError(f"missing {context}")
|
|
189
|
+
atom = storage_get(atom_id)
|
|
190
|
+
if atom is None:
|
|
191
|
+
raise ValueError(f"missing {context}")
|
|
192
|
+
if expected_kind is not None:
|
|
193
|
+
kind = _atom_kind(atom)
|
|
194
|
+
if kind is not expected_kind:
|
|
195
|
+
raise ValueError(f"malformed {context}")
|
|
196
|
+
return atom
|
|
197
|
+
|
|
198
|
+
def _read_list(head_id: Optional[bytes], context: str) -> List[bytes]:
|
|
199
|
+
entries: List[bytes] = []
|
|
200
|
+
current = head_id
|
|
201
|
+
if not current or current == ZERO32:
|
|
202
|
+
return entries
|
|
203
|
+
while current and current != ZERO32:
|
|
204
|
+
node = storage_get(current)
|
|
205
|
+
if node is None:
|
|
206
|
+
raise ValueError(f"missing list node while decoding {context}")
|
|
207
|
+
node_kind = _atom_kind(node)
|
|
208
|
+
if node_kind is not AtomKind.BYTES:
|
|
209
|
+
raise ValueError(f"list element must be bytes while decoding {context}")
|
|
210
|
+
if len(node.data) != len(ZERO32):
|
|
211
|
+
raise ValueError(f"list element payload has unexpected length while decoding {context}")
|
|
212
|
+
entries.append(node.data)
|
|
213
|
+
current = node.next
|
|
214
|
+
return entries
|
|
215
|
+
|
|
216
|
+
type_atom = _require_atom(block_id, "block type atom", AtomKind.SYMBOL)
|
|
217
|
+
if type_atom.data != b"block":
|
|
218
|
+
raise ValueError("not a block (type atom payload)")
|
|
219
|
+
|
|
220
|
+
sig_atom = _require_atom(type_atom.next, "block signature atom", AtomKind.BYTES)
|
|
221
|
+
body_list_id = sig_atom.next
|
|
222
|
+
body_list_atom = _require_atom(body_list_id, "block body list atom", AtomKind.LIST)
|
|
223
|
+
if body_list_atom.next and body_list_atom.next != ZERO32:
|
|
224
|
+
raise ValueError("malformed block (body list tail)")
|
|
225
|
+
|
|
226
|
+
body_child_ids = _read_list(body_list_atom.data, "block body")
|
|
227
|
+
|
|
228
|
+
details: List[bytes] = []
|
|
229
|
+
for idx, child_id in enumerate(body_child_ids):
|
|
230
|
+
if idx >= 10:
|
|
231
|
+
break
|
|
232
|
+
if not child_id or child_id == ZERO32:
|
|
233
|
+
details.append(b"")
|
|
234
|
+
continue
|
|
235
|
+
detail_atom = storage_get(child_id)
|
|
236
|
+
details.append(detail_atom.data if detail_atom is not None else b"")
|
|
237
|
+
|
|
238
|
+
if len(details) < 10:
|
|
239
|
+
details.extend([b""] * (10 - len(details)))
|
|
240
|
+
|
|
241
|
+
b = cls()
|
|
242
|
+
b.hash = block_id
|
|
243
|
+
b.body_hash = body_list_id
|
|
244
|
+
|
|
245
|
+
get = lambda i: details[i] if i < len(details) else b""
|
|
246
|
+
b.previous_block_hash = get(0) or ZERO32
|
|
247
|
+
b.previous_block = None
|
|
248
|
+
b.number = _be_bytes_to_int(get(1))
|
|
249
|
+
b.timestamp = _be_bytes_to_int(get(2))
|
|
250
|
+
b.accounts_hash = get(3) or None
|
|
251
|
+
b.transactions_total_fees = _be_bytes_to_int(get(4))
|
|
252
|
+
b.transactions_hash = get(5) or None
|
|
253
|
+
b.receipts_hash = get(6) or None
|
|
254
|
+
b.delay_difficulty = _be_bytes_to_int(get(7))
|
|
255
|
+
b.delay_output = get(8) or None
|
|
256
|
+
b.validator_public_key = get(9) or None
|
|
257
|
+
|
|
258
|
+
b.signature = sig_atom.data if sig_atom is not None else None
|
|
259
|
+
|
|
260
|
+
return b
|
|
261
|
+
|
|
262
|
+
def validate(self, storage_get: Callable[[bytes], Optional[Atom]]) -> bool:
|
|
263
|
+
"""Validate this block against storage.
|
|
264
|
+
|
|
265
|
+
Checks:
|
|
266
|
+
- Signature: signature must verify over the body list id using the
|
|
267
|
+
validator's public key.
|
|
268
|
+
- Timestamp monotonicity: if previous block exists (not ZERO32), this
|
|
269
|
+
block's timestamp must be >= previous.timestamp + 1.
|
|
270
|
+
"""
|
|
271
|
+
# Unverifiable if critical fields are missing
|
|
272
|
+
if not self.body_hash:
|
|
273
|
+
return False
|
|
274
|
+
if not self.signature:
|
|
275
|
+
return False
|
|
276
|
+
if not self.validator_public_key:
|
|
277
|
+
return False
|
|
278
|
+
if self.timestamp is None:
|
|
279
|
+
return False
|
|
280
|
+
|
|
281
|
+
# 1) Signature check over body hash
|
|
282
|
+
try:
|
|
283
|
+
pub = Ed25519PublicKey.from_public_bytes(bytes(self.validator_public_key))
|
|
284
|
+
pub.verify(self.signature, self.body_hash)
|
|
285
|
+
except InvalidSignature as e:
|
|
286
|
+
raise ValueError("invalid signature") from e
|
|
287
|
+
|
|
288
|
+
# 2) Timestamp monotonicity against previous block
|
|
289
|
+
prev_ts: Optional[int] = None
|
|
290
|
+
prev_hash = self.previous_block_hash or ZERO32
|
|
291
|
+
|
|
292
|
+
if self.previous_block is not None:
|
|
293
|
+
prev_ts = int(self.previous_block.timestamp or 0)
|
|
294
|
+
prev_hash = self.previous_block.hash or prev_hash or ZERO32
|
|
295
|
+
|
|
296
|
+
if prev_hash and prev_hash != ZERO32 and prev_ts is None:
|
|
297
|
+
# If previous block cannot be loaded, treat as unverifiable, not malicious
|
|
298
|
+
try:
|
|
299
|
+
prev = Block.from_atom(storage_get, prev_hash)
|
|
300
|
+
except Exception:
|
|
301
|
+
return False
|
|
302
|
+
prev_ts = int(prev.timestamp or 0)
|
|
303
|
+
|
|
304
|
+
if prev_hash and prev_hash != ZERO32:
|
|
305
|
+
if prev_ts is None:
|
|
306
|
+
return False
|
|
307
|
+
cur_ts = int(self.timestamp or 0)
|
|
308
|
+
if cur_ts < prev_ts + 1:
|
|
309
|
+
raise ValueError("timestamp must be at least prev+1")
|
|
310
|
+
|
|
311
|
+
return True
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
# chain.py
|
|
2
|
+
from typing import Callable, Dict, Optional
|
|
3
|
+
from .block import Block
|
|
4
|
+
from .._storage.atom import ZERO32, Atom
|
|
5
|
+
|
|
6
|
+
class Chain:
|
|
7
|
+
def __init__(self, head_block: Block):
|
|
8
|
+
self.head_block = head_block
|
|
9
|
+
self.validated_upto_block = None
|
|
10
|
+
# Root (genesis) hash for this chain; set by validation setup when known
|
|
11
|
+
self.root: Optional[bytes] = None
|
|
12
|
+
# Fork position: the head hash of the default/current fork for this chain
|
|
13
|
+
self.fork_position: Optional[bytes] = getattr(head_block, "hash", None)
|
|
14
|
+
# Mark the first malicious block encountered during validation; None means not found
|
|
15
|
+
self.malicious_block_hash: Optional[bytes] = None
|
|
16
|
+
|
|
17
|
+
def validate(self, storage_get: Callable[[bytes], Atom]) -> Block:
|
|
18
|
+
"""Validate the chain from head to genesis and return the root block.
|
|
19
|
+
|
|
20
|
+
Incorporates per-block validation (signature on body and timestamp
|
|
21
|
+
monotonicity). Uses a simple cache to avoid duplicate Atom fetches and
|
|
22
|
+
duplicate block decoding during the backward walk.
|
|
23
|
+
"""
|
|
24
|
+
# Atom and Block caches for this validation pass
|
|
25
|
+
atom_cache: Dict[bytes, Optional[Atom]] = {}
|
|
26
|
+
block_cache: Dict[bytes, Block] = {}
|
|
27
|
+
|
|
28
|
+
def get_cached(k: bytes) -> Optional[Atom]:
|
|
29
|
+
if k in atom_cache:
|
|
30
|
+
return atom_cache[k]
|
|
31
|
+
a = storage_get(k)
|
|
32
|
+
atom_cache[k] = a
|
|
33
|
+
return a
|
|
34
|
+
|
|
35
|
+
def load_block(bid: bytes) -> Block:
|
|
36
|
+
if bid in block_cache:
|
|
37
|
+
return block_cache[bid]
|
|
38
|
+
b = Block.from_atom(get_cached, bid)
|
|
39
|
+
block_cache[bid] = b
|
|
40
|
+
return b
|
|
41
|
+
|
|
42
|
+
blk = self.head_block
|
|
43
|
+
# Ensure head is in cache if it has a hash
|
|
44
|
+
if getattr(blk, "hash", None):
|
|
45
|
+
block_cache[blk.hash] = blk # type: ignore[attr-defined]
|
|
46
|
+
|
|
47
|
+
# Walk back, validating each block
|
|
48
|
+
while True:
|
|
49
|
+
# Validate current block (signature over body, timestamp rule)
|
|
50
|
+
try:
|
|
51
|
+
blk.validate(get_cached) # may decode previous but uses cached atoms
|
|
52
|
+
except Exception:
|
|
53
|
+
# record first failure point then propagate
|
|
54
|
+
self.malicious_block_hash = getattr(blk, "hash", None)
|
|
55
|
+
raise
|
|
56
|
+
|
|
57
|
+
prev_hash = blk.previous_block_hash if hasattr(blk, "previous_block_hash") else ZERO32
|
|
58
|
+
if prev_hash == ZERO32:
|
|
59
|
+
break
|
|
60
|
+
# Move to previous block using cache-aware loader
|
|
61
|
+
prev_blk = load_block(prev_hash)
|
|
62
|
+
blk.previous_block = prev_blk # cache the object for any downstream use
|
|
63
|
+
blk = prev_blk
|
|
64
|
+
|
|
65
|
+
self.validated_upto_block = blk
|
|
66
|
+
return blk
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Optional, Set, Any, Callable, Dict
|
|
4
|
+
from .block import Block
|
|
5
|
+
from .._storage.atom import ZERO32, Atom
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class Fork:
|
|
9
|
+
"""A branch head within a Chain (same root).
|
|
10
|
+
|
|
11
|
+
- head: current tip block id (bytes)
|
|
12
|
+
- peers: identifiers (e.g., peer pubkey objects) following this head
|
|
13
|
+
- root: genesis block id for this chain (optional)
|
|
14
|
+
- validated_upto: earliest verified ancestor (optional)
|
|
15
|
+
- chain_fork_position: the chain's fork anchor relevant to this fork
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
head: bytes,
|
|
21
|
+
) -> None:
|
|
22
|
+
self.head: bytes = head
|
|
23
|
+
self.peers: Set[Any] = set()
|
|
24
|
+
self.root: Optional[bytes] = None
|
|
25
|
+
self.validated_upto: Optional[bytes] = None
|
|
26
|
+
self.chain_fork_position: Optional[bytes] = None
|
|
27
|
+
# Mark the first block found malicious during validation; None means not found
|
|
28
|
+
self.malicious_block_hash: Optional[bytes] = None
|
|
29
|
+
|
|
30
|
+
def add_peer(self, peer_id: Any) -> None:
|
|
31
|
+
self.peers.add(peer_id)
|
|
32
|
+
|
|
33
|
+
def remove_peer(self, peer_id: Any) -> None:
|
|
34
|
+
self.peers.discard(peer_id)
|
|
35
|
+
|
|
36
|
+
def validate(
|
|
37
|
+
self,
|
|
38
|
+
storage_get: Callable[[bytes], Optional[object]],
|
|
39
|
+
stop_heads: Optional[Set[bytes]] = None,
|
|
40
|
+
) -> bool:
|
|
41
|
+
"""Validate only up to the chain fork position, not genesis.
|
|
42
|
+
|
|
43
|
+
Returns True if self.head descends from self.chain_fork_position (or if
|
|
44
|
+
chain_fork_position is None/equals head), and updates validated_upto to
|
|
45
|
+
that anchor. If stop_heads is provided, returns True early if ancestry
|
|
46
|
+
reaches any of those heads, setting validated_upto to the matched head.
|
|
47
|
+
Returns False if ancestry cannot be confirmed.
|
|
48
|
+
"""
|
|
49
|
+
if self.chain_fork_position is None or self.chain_fork_position == self.head:
|
|
50
|
+
self.validated_upto = self.head
|
|
51
|
+
return True
|
|
52
|
+
# Caches to avoid double fetching/decoding
|
|
53
|
+
atom_cache: Dict[bytes, Optional[Atom]] = {}
|
|
54
|
+
block_cache: Dict[bytes, Block] = {}
|
|
55
|
+
|
|
56
|
+
def get_cached(k: bytes) -> Optional[Atom]:
|
|
57
|
+
if k in atom_cache:
|
|
58
|
+
return atom_cache[k]
|
|
59
|
+
a = storage_get(k) # type: ignore[call-arg]
|
|
60
|
+
atom_cache[k] = a # may be None if missing
|
|
61
|
+
return a
|
|
62
|
+
|
|
63
|
+
def load_block(bid: bytes) -> Optional[Block]:
|
|
64
|
+
if bid in block_cache:
|
|
65
|
+
return block_cache[bid]
|
|
66
|
+
try:
|
|
67
|
+
b = Block.from_atom(get_cached, bid)
|
|
68
|
+
except Exception:
|
|
69
|
+
return None
|
|
70
|
+
block_cache[bid] = b
|
|
71
|
+
return b
|
|
72
|
+
|
|
73
|
+
blk = load_block(self.head)
|
|
74
|
+
if blk is None:
|
|
75
|
+
# Missing head data: unverifiable, not malicious
|
|
76
|
+
return False
|
|
77
|
+
# Walk up to fork anchor, validating each block signature + timestamp
|
|
78
|
+
while True:
|
|
79
|
+
try:
|
|
80
|
+
blk.validate(get_cached) # type: ignore[arg-type]
|
|
81
|
+
except Exception:
|
|
82
|
+
# mark the first failure point
|
|
83
|
+
self.malicious_block_hash = blk.hash
|
|
84
|
+
return False
|
|
85
|
+
|
|
86
|
+
# Early-exit if we met another known fork head
|
|
87
|
+
if stop_heads and blk.hash in stop_heads:
|
|
88
|
+
self.validated_upto = blk.hash
|
|
89
|
+
return True
|
|
90
|
+
|
|
91
|
+
if blk.hash == self.chain_fork_position:
|
|
92
|
+
self.validated_upto = blk.hash
|
|
93
|
+
return True
|
|
94
|
+
|
|
95
|
+
prev_hash = blk.previous_block_hash if hasattr(blk, "previous_block_hash") else ZERO32
|
|
96
|
+
nxt = load_block(prev_hash)
|
|
97
|
+
if nxt is None:
|
|
98
|
+
return False
|
|
99
|
+
blk.previous_block = nxt # cache for future use
|
|
100
|
+
blk = nxt
|