astreum 0.3.16__py3-none-any.whl → 0.3.46__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- astreum/__init__.py +1 -2
- astreum/communication/__init__.py +15 -11
- astreum/communication/difficulty.py +39 -0
- astreum/communication/disconnect.py +57 -0
- astreum/communication/handlers/handshake.py +105 -62
- astreum/communication/handlers/object_request.py +179 -149
- astreum/communication/handlers/object_response.py +7 -1
- astreum/communication/handlers/ping.py +9 -0
- astreum/communication/handlers/route_request.py +7 -1
- astreum/communication/handlers/route_response.py +7 -1
- astreum/communication/incoming_queue.py +96 -0
- astreum/communication/message_pow.py +36 -0
- astreum/communication/models/peer.py +4 -0
- astreum/communication/models/ping.py +27 -6
- astreum/communication/models/route.py +4 -0
- astreum/communication/{start.py → node.py} +10 -11
- astreum/communication/outgoing_queue.py +108 -0
- astreum/communication/processors/incoming.py +110 -37
- astreum/communication/processors/outgoing.py +35 -2
- astreum/communication/processors/peer.py +133 -58
- astreum/communication/setup.py +272 -113
- astreum/communication/util.py +14 -0
- astreum/node.py +99 -92
- astreum/storage/actions/get.py +79 -48
- astreum/storage/actions/set.py +171 -156
- astreum/storage/providers.py +24 -0
- astreum/storage/setup.py +23 -22
- astreum/utils/config.py +234 -45
- astreum/utils/logging.py +1 -1
- astreum/{consensus → validation}/__init__.py +0 -4
- astreum/validation/constants.py +2 -0
- astreum/{consensus → validation}/genesis.py +4 -6
- astreum/validation/models/block.py +544 -0
- astreum/validation/models/fork.py +511 -0
- astreum/{consensus → validation}/models/receipt.py +17 -4
- astreum/{consensus → validation}/models/transaction.py +45 -3
- astreum/validation/node.py +190 -0
- astreum/{consensus → validation}/validator.py +1 -1
- astreum/validation/workers/__init__.py +8 -0
- astreum/{consensus → validation}/workers/validation.py +360 -333
- astreum/verification/__init__.py +4 -0
- astreum/{consensus/workers/discovery.py → verification/discover.py} +1 -1
- astreum/verification/node.py +61 -0
- astreum/verification/worker.py +183 -0
- {astreum-0.3.16.dist-info → astreum-0.3.46.dist-info}/METADATA +43 -9
- astreum-0.3.46.dist-info/RECORD +79 -0
- astreum/consensus/models/block.py +0 -364
- astreum/consensus/models/chain.py +0 -66
- astreum/consensus/models/fork.py +0 -100
- astreum/consensus/setup.py +0 -83
- astreum/consensus/start.py +0 -67
- astreum/consensus/workers/__init__.py +0 -9
- astreum/consensus/workers/verify.py +0 -90
- astreum-0.3.16.dist-info/RECORD +0 -72
- /astreum/{consensus → validation}/models/__init__.py +0 -0
- /astreum/{consensus → validation}/models/account.py +0 -0
- /astreum/{consensus → validation}/models/accounts.py +0 -0
- {astreum-0.3.16.dist-info → astreum-0.3.46.dist-info}/WHEEL +0 -0
- {astreum-0.3.16.dist-info → astreum-0.3.46.dist-info}/licenses/LICENSE +0 -0
- {astreum-0.3.16.dist-info → astreum-0.3.46.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,511 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Optional, Set, Any
|
|
4
|
+
from cryptography.exceptions import InvalidSignature
|
|
5
|
+
from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PublicKey
|
|
6
|
+
from .block import Block
|
|
7
|
+
from ...storage.models.atom import ZERO32
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class Fork:
|
|
11
|
+
"""A branch head within a Chain (same root).
|
|
12
|
+
|
|
13
|
+
- head: current tip block id (bytes)
|
|
14
|
+
- peers: identifiers (e.g., peer pubkey objects) following this head
|
|
15
|
+
- root: genesis block id for this chain (optional)
|
|
16
|
+
- validated_upto: earliest verified ancestor (optional)
|
|
17
|
+
- chain_fork_position: the chain's fork anchor relevant to this fork
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
def __init__(
|
|
21
|
+
self,
|
|
22
|
+
head: bytes,
|
|
23
|
+
) -> None:
|
|
24
|
+
self.head: bytes = head
|
|
25
|
+
self.peers: Set[Any] = set()
|
|
26
|
+
self.root: Optional[bytes] = None
|
|
27
|
+
self.validated_upto: Optional[bytes] = None
|
|
28
|
+
self.chain_fork_position: Optional[bytes] = None
|
|
29
|
+
# Mark the first block found malicious during validation; None means not found
|
|
30
|
+
self.malicious_block_hash: Optional[bytes] = None
|
|
31
|
+
|
|
32
|
+
def add_peer(self, peer_id: Any) -> None:
|
|
33
|
+
self.peers.add(peer_id)
|
|
34
|
+
|
|
35
|
+
def remove_peer(self, peer_id: Any) -> None:
|
|
36
|
+
self.peers.discard(peer_id)
|
|
37
|
+
|
|
38
|
+
def verify(self, node: Any) -> bool:
|
|
39
|
+
"""Verify this fork using the node to manage fork splits/joins."""
|
|
40
|
+
if node is None:
|
|
41
|
+
raise ValueError("node required for fork validation")
|
|
42
|
+
|
|
43
|
+
logger = getattr(node, "logger", None)
|
|
44
|
+
|
|
45
|
+
def _hex(value: Optional[bytes]) -> str:
|
|
46
|
+
if isinstance(value, (bytes, bytearray)):
|
|
47
|
+
return value.hex()
|
|
48
|
+
return str(value)
|
|
49
|
+
|
|
50
|
+
def _log_debug(message: str, *args: object) -> None:
|
|
51
|
+
if logger:
|
|
52
|
+
logger.debug(message, *args)
|
|
53
|
+
|
|
54
|
+
def _log_warning(message: str, *args: object) -> None:
|
|
55
|
+
if logger:
|
|
56
|
+
logger.warning(message, *args)
|
|
57
|
+
|
|
58
|
+
_log_debug("Fork verify start head=%s", _hex(self.head))
|
|
59
|
+
|
|
60
|
+
visited_set: Set[bytes] = set()
|
|
61
|
+
anchor_hash: Optional[bytes] = None
|
|
62
|
+
anchor_kind: Optional[str] = None
|
|
63
|
+
intersection_fork_head: Optional[bytes] = None
|
|
64
|
+
anchor_validated = False
|
|
65
|
+
|
|
66
|
+
def validate_header(child: Block, parent: Optional[Block]) -> bool:
|
|
67
|
+
"""
|
|
68
|
+
Lightweight/header validation without tx/receipt/account checks.
|
|
69
|
+
|
|
70
|
+
The caller supplies the parent block (or None for genesis) so we can
|
|
71
|
+
verify linkage, height, timestamps, and difficulty in a single pass.
|
|
72
|
+
"""
|
|
73
|
+
is_genesis = parent is None or (child.previous_block_hash or ZERO32) == ZERO32
|
|
74
|
+
|
|
75
|
+
node_chain = getattr(node, "chain", None)
|
|
76
|
+
if node_chain is not None and child.chain_id != node_chain:
|
|
77
|
+
_log_debug(
|
|
78
|
+
"Header verify failed chain_id=%s expected=%s block=%s",
|
|
79
|
+
child.chain_id,
|
|
80
|
+
node_chain,
|
|
81
|
+
_hex(child.atom_hash),
|
|
82
|
+
)
|
|
83
|
+
return False
|
|
84
|
+
|
|
85
|
+
# Basic field presence
|
|
86
|
+
if child.timestamp is None:
|
|
87
|
+
_log_debug(
|
|
88
|
+
"Header verify failed missing timestamp block=%s",
|
|
89
|
+
_hex(child.atom_hash),
|
|
90
|
+
)
|
|
91
|
+
return False
|
|
92
|
+
if not is_genesis:
|
|
93
|
+
if not child.body_hash or not child.signature or not child.validator_public_key_bytes:
|
|
94
|
+
_log_debug(
|
|
95
|
+
"Header verify failed missing body/signature/validator block=%s",
|
|
96
|
+
_hex(child.atom_hash),
|
|
97
|
+
)
|
|
98
|
+
return False
|
|
99
|
+
|
|
100
|
+
# Linkage rules
|
|
101
|
+
if is_genesis:
|
|
102
|
+
if (child.previous_block_hash or ZERO32) != ZERO32:
|
|
103
|
+
_log_debug(
|
|
104
|
+
"Header verify failed genesis prev_hash=%s block=%s",
|
|
105
|
+
_hex(child.previous_block_hash),
|
|
106
|
+
_hex(child.atom_hash),
|
|
107
|
+
)
|
|
108
|
+
return False
|
|
109
|
+
if child.number not in (0,):
|
|
110
|
+
_log_debug(
|
|
111
|
+
"Header verify failed genesis number=%s block=%s",
|
|
112
|
+
child.number,
|
|
113
|
+
_hex(child.atom_hash),
|
|
114
|
+
)
|
|
115
|
+
return False
|
|
116
|
+
else:
|
|
117
|
+
parent_hash = parent.atom_hash or ZERO32
|
|
118
|
+
if (child.previous_block_hash or ZERO32) != parent_hash:
|
|
119
|
+
_log_debug(
|
|
120
|
+
"Header verify failed prev hash mismatch block=%s prev=%s expected=%s",
|
|
121
|
+
_hex(child.atom_hash),
|
|
122
|
+
_hex(child.previous_block_hash),
|
|
123
|
+
_hex(parent_hash),
|
|
124
|
+
)
|
|
125
|
+
return False
|
|
126
|
+
expected_number = (parent.number or 0) + 1
|
|
127
|
+
if child.number != expected_number:
|
|
128
|
+
_log_debug(
|
|
129
|
+
"Header verify failed number mismatch block=%s number=%s expected=%s",
|
|
130
|
+
_hex(child.atom_hash),
|
|
131
|
+
child.number,
|
|
132
|
+
expected_number,
|
|
133
|
+
)
|
|
134
|
+
return False
|
|
135
|
+
|
|
136
|
+
parent_ts = parent.timestamp
|
|
137
|
+
if parent_ts is not None and int(child.timestamp) < int(parent_ts) + 1:
|
|
138
|
+
_log_debug(
|
|
139
|
+
"Header verify failed timestamp block=%s ts=%s parent_ts=%s",
|
|
140
|
+
_hex(child.atom_hash),
|
|
141
|
+
child.timestamp,
|
|
142
|
+
parent_ts,
|
|
143
|
+
)
|
|
144
|
+
return False
|
|
145
|
+
|
|
146
|
+
# Signature over body hash
|
|
147
|
+
try:
|
|
148
|
+
pub = Ed25519PublicKey.from_public_bytes(
|
|
149
|
+
bytes(child.validator_public_key_bytes)
|
|
150
|
+
)
|
|
151
|
+
pub.verify(child.signature, child.body_hash) # type: ignore[arg-type]
|
|
152
|
+
except InvalidSignature:
|
|
153
|
+
_log_debug(
|
|
154
|
+
"Header verify failed signature block=%s",
|
|
155
|
+
_hex(child.atom_hash),
|
|
156
|
+
)
|
|
157
|
+
return False
|
|
158
|
+
except Exception:
|
|
159
|
+
_log_debug(
|
|
160
|
+
"Header verify failed signature error block=%s",
|
|
161
|
+
_hex(child.atom_hash),
|
|
162
|
+
)
|
|
163
|
+
return False
|
|
164
|
+
|
|
165
|
+
# Difficulty and PoW
|
|
166
|
+
expected_diff = Block.calculate_delay_difficulty(
|
|
167
|
+
previous_timestamp=parent.timestamp,
|
|
168
|
+
current_timestamp=child.timestamp,
|
|
169
|
+
previous_difficulty=parent.delay_difficulty,
|
|
170
|
+
)
|
|
171
|
+
if child.delay_difficulty is None or int(child.delay_difficulty) != int(
|
|
172
|
+
expected_diff
|
|
173
|
+
):
|
|
174
|
+
_log_debug(
|
|
175
|
+
"Header verify failed difficulty block=%s diff=%s expected=%s",
|
|
176
|
+
_hex(child.atom_hash),
|
|
177
|
+
child.delay_difficulty,
|
|
178
|
+
expected_diff,
|
|
179
|
+
)
|
|
180
|
+
return False
|
|
181
|
+
|
|
182
|
+
required_work = max(1, int(parent.delay_difficulty or 1))
|
|
183
|
+
block_hash = child.atom_hash or b""
|
|
184
|
+
if not block_hash:
|
|
185
|
+
_log_debug(
|
|
186
|
+
"Header verify failed missing hash block=%s",
|
|
187
|
+
_hex(child.atom_hash),
|
|
188
|
+
)
|
|
189
|
+
return False
|
|
190
|
+
if Block._leading_zero_bits(block_hash) < required_work:
|
|
191
|
+
_log_debug(
|
|
192
|
+
"Header verify failed pow block=%s zeros=%s required=%s",
|
|
193
|
+
_hex(child.atom_hash),
|
|
194
|
+
Block._leading_zero_bits(block_hash),
|
|
195
|
+
required_work,
|
|
196
|
+
)
|
|
197
|
+
return False
|
|
198
|
+
|
|
199
|
+
return True
|
|
200
|
+
|
|
201
|
+
def is_on_other_fork_path(target_hash: bytes) -> Optional[bytes]:
|
|
202
|
+
"""Return the head of a fork whose ancestry includes target_hash."""
|
|
203
|
+
for other_head in node.forks:
|
|
204
|
+
if other_head == self.head:
|
|
205
|
+
continue
|
|
206
|
+
blk_hash = other_head
|
|
207
|
+
seen: Set[bytes] = set()
|
|
208
|
+
while blk_hash and blk_hash not in seen:
|
|
209
|
+
seen.add(blk_hash)
|
|
210
|
+
if blk_hash == target_hash:
|
|
211
|
+
return other_head
|
|
212
|
+
try:
|
|
213
|
+
blk = Block.from_atom(node, blk_hash)
|
|
214
|
+
except Exception:
|
|
215
|
+
_log_debug(
|
|
216
|
+
"Fork path lookup failed loading block=%s",
|
|
217
|
+
_hex(blk_hash),
|
|
218
|
+
)
|
|
219
|
+
blk = None
|
|
220
|
+
if blk is None:
|
|
221
|
+
break
|
|
222
|
+
prev = getattr(blk, "previous_block_hash", ZERO32) or ZERO32
|
|
223
|
+
if prev == ZERO32:
|
|
224
|
+
break
|
|
225
|
+
blk_hash = prev
|
|
226
|
+
return None
|
|
227
|
+
|
|
228
|
+
cursor = self.head
|
|
229
|
+
pending_child: Optional[Block] = None
|
|
230
|
+
while cursor and cursor not in visited_set:
|
|
231
|
+
try:
|
|
232
|
+
blk = Block.from_atom(node, cursor)
|
|
233
|
+
except Exception:
|
|
234
|
+
_log_debug("Fork verify failed loading block=%s", _hex(cursor))
|
|
235
|
+
blk = None
|
|
236
|
+
if blk is None:
|
|
237
|
+
self.malicious_block_hash = (
|
|
238
|
+
pending_child.atom_hash if pending_child else cursor
|
|
239
|
+
)
|
|
240
|
+
_log_warning(
|
|
241
|
+
"Fork verify failed missing block=%s pending=%s",
|
|
242
|
+
_hex(cursor),
|
|
243
|
+
_hex(pending_child.atom_hash) if pending_child else None,
|
|
244
|
+
)
|
|
245
|
+
return False
|
|
246
|
+
|
|
247
|
+
if pending_child is not None:
|
|
248
|
+
if not validate_header(pending_child, blk):
|
|
249
|
+
self.malicious_block_hash = (
|
|
250
|
+
pending_child.atom_hash
|
|
251
|
+
or pending_child.body_hash
|
|
252
|
+
or pending_child.previous_block_hash
|
|
253
|
+
or cursor
|
|
254
|
+
)
|
|
255
|
+
_log_warning(
|
|
256
|
+
"Fork verify failed header block=%s parent=%s",
|
|
257
|
+
_hex(pending_child.atom_hash),
|
|
258
|
+
_hex(blk.atom_hash),
|
|
259
|
+
)
|
|
260
|
+
return False
|
|
261
|
+
if not pending_child.atom_hash:
|
|
262
|
+
self.malicious_block_hash = (
|
|
263
|
+
pending_child.body_hash
|
|
264
|
+
or pending_child.previous_block_hash
|
|
265
|
+
or cursor
|
|
266
|
+
)
|
|
267
|
+
_log_warning(
|
|
268
|
+
"Fork verify failed missing hash block=%s",
|
|
269
|
+
_hex(pending_child.body_hash),
|
|
270
|
+
)
|
|
271
|
+
return False
|
|
272
|
+
if anchor_hash is not None and pending_child.atom_hash == anchor_hash:
|
|
273
|
+
anchor_validated = True
|
|
274
|
+
_log_debug(
|
|
275
|
+
"Fork verify reached anchor=%s kind=%s",
|
|
276
|
+
_hex(anchor_hash),
|
|
277
|
+
anchor_kind,
|
|
278
|
+
)
|
|
279
|
+
break
|
|
280
|
+
|
|
281
|
+
visited_set.add(cursor)
|
|
282
|
+
|
|
283
|
+
if anchor_hash is None:
|
|
284
|
+
if cursor in node.forks and cursor != self.head:
|
|
285
|
+
anchor_hash = cursor
|
|
286
|
+
anchor_kind = "fork_head"
|
|
287
|
+
_log_debug(
|
|
288
|
+
"Fork verify anchor fork_head=%s",
|
|
289
|
+
_hex(anchor_hash),
|
|
290
|
+
)
|
|
291
|
+
else:
|
|
292
|
+
other_head = is_on_other_fork_path(cursor)
|
|
293
|
+
if other_head:
|
|
294
|
+
anchor_hash = cursor
|
|
295
|
+
anchor_kind = "intersection"
|
|
296
|
+
intersection_fork_head = other_head
|
|
297
|
+
_log_debug(
|
|
298
|
+
"Fork verify anchor intersection=%s other_head=%s",
|
|
299
|
+
_hex(anchor_hash),
|
|
300
|
+
_hex(other_head),
|
|
301
|
+
)
|
|
302
|
+
else:
|
|
303
|
+
prev_hash = getattr(blk, "previous_block_hash", ZERO32) or ZERO32
|
|
304
|
+
if prev_hash == ZERO32:
|
|
305
|
+
anchor_hash = cursor
|
|
306
|
+
anchor_kind = "genesis"
|
|
307
|
+
_log_debug(
|
|
308
|
+
"Fork verify anchor genesis=%s",
|
|
309
|
+
_hex(anchor_hash),
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
pending_child = blk
|
|
313
|
+
prev_hash = getattr(blk, "previous_block_hash", ZERO32) or ZERO32
|
|
314
|
+
if prev_hash == ZERO32:
|
|
315
|
+
break
|
|
316
|
+
cursor = prev_hash
|
|
317
|
+
|
|
318
|
+
if pending_child is not None and not anchor_validated:
|
|
319
|
+
parent_blk: Optional[Block] = None
|
|
320
|
+
prev_hash = getattr(pending_child, "previous_block_hash", ZERO32) or ZERO32
|
|
321
|
+
if prev_hash not in (None, ZERO32, b""):
|
|
322
|
+
try:
|
|
323
|
+
parent_blk = Block.from_atom(node, prev_hash)
|
|
324
|
+
except Exception:
|
|
325
|
+
_log_debug(
|
|
326
|
+
"Fork verify failed loading parent block=%s",
|
|
327
|
+
_hex(prev_hash),
|
|
328
|
+
)
|
|
329
|
+
parent_blk = None
|
|
330
|
+
if not validate_header(pending_child, parent_blk):
|
|
331
|
+
self.malicious_block_hash = (
|
|
332
|
+
pending_child.atom_hash
|
|
333
|
+
or pending_child.body_hash
|
|
334
|
+
or pending_child.previous_block_hash
|
|
335
|
+
or self.head
|
|
336
|
+
)
|
|
337
|
+
_log_warning(
|
|
338
|
+
"Fork verify failed header block=%s parent=%s",
|
|
339
|
+
_hex(pending_child.atom_hash),
|
|
340
|
+
_hex(parent_blk.atom_hash) if parent_blk else None,
|
|
341
|
+
)
|
|
342
|
+
return False
|
|
343
|
+
if not pending_child.atom_hash:
|
|
344
|
+
self.malicious_block_hash = (
|
|
345
|
+
pending_child.body_hash
|
|
346
|
+
or pending_child.previous_block_hash
|
|
347
|
+
or self.head
|
|
348
|
+
)
|
|
349
|
+
_log_warning(
|
|
350
|
+
"Fork verify failed missing hash block=%s",
|
|
351
|
+
_hex(pending_child.body_hash),
|
|
352
|
+
)
|
|
353
|
+
return False
|
|
354
|
+
if anchor_hash is None:
|
|
355
|
+
anchor_hash = pending_child.atom_hash
|
|
356
|
+
anchor_kind = "genesis"
|
|
357
|
+
_log_debug(
|
|
358
|
+
"Fork verify anchor genesis=%s",
|
|
359
|
+
_hex(anchor_hash),
|
|
360
|
+
)
|
|
361
|
+
if pending_child.atom_hash == anchor_hash:
|
|
362
|
+
anchor_validated = True
|
|
363
|
+
|
|
364
|
+
if anchor_hash is None or not anchor_validated:
|
|
365
|
+
_log_warning(
|
|
366
|
+
"Fork verify failed anchor validated=%s anchor=%s",
|
|
367
|
+
anchor_validated,
|
|
368
|
+
_hex(anchor_hash),
|
|
369
|
+
)
|
|
370
|
+
return False
|
|
371
|
+
|
|
372
|
+
_log_debug(
|
|
373
|
+
"Fork verify heavy pass head=%s anchor=%s",
|
|
374
|
+
_hex(self.head),
|
|
375
|
+
_hex(anchor_hash),
|
|
376
|
+
)
|
|
377
|
+
heavy_cursor = self.head
|
|
378
|
+
heavy_pending: Optional[Block] = None
|
|
379
|
+
heavy_seen: Set[bytes] = set()
|
|
380
|
+
heavy_anchor_verified = False
|
|
381
|
+
while heavy_cursor and heavy_cursor not in heavy_seen:
|
|
382
|
+
heavy_seen.add(heavy_cursor)
|
|
383
|
+
try:
|
|
384
|
+
blk = Block.from_atom(node, heavy_cursor)
|
|
385
|
+
except Exception:
|
|
386
|
+
self.malicious_block_hash = (
|
|
387
|
+
heavy_pending.atom_hash if heavy_pending else heavy_cursor
|
|
388
|
+
)
|
|
389
|
+
_log_warning(
|
|
390
|
+
"Fork verify failed heavy load block=%s pending=%s",
|
|
391
|
+
_hex(heavy_cursor),
|
|
392
|
+
_hex(heavy_pending.atom_hash) if heavy_pending else None,
|
|
393
|
+
)
|
|
394
|
+
return False
|
|
395
|
+
|
|
396
|
+
if heavy_pending is not None:
|
|
397
|
+
heavy_pending.previous_block = blk
|
|
398
|
+
if not heavy_pending.verify(node):
|
|
399
|
+
self.malicious_block_hash = (
|
|
400
|
+
heavy_pending.atom_hash
|
|
401
|
+
or heavy_pending.previous_block_hash
|
|
402
|
+
or heavy_cursor
|
|
403
|
+
)
|
|
404
|
+
_log_warning(
|
|
405
|
+
"Fork verify failed heavy block=%s parent=%s",
|
|
406
|
+
_hex(heavy_pending.atom_hash),
|
|
407
|
+
_hex(blk.atom_hash),
|
|
408
|
+
)
|
|
409
|
+
return False
|
|
410
|
+
if heavy_pending.atom_hash == anchor_hash:
|
|
411
|
+
heavy_anchor_verified = True
|
|
412
|
+
_log_debug(
|
|
413
|
+
"Fork verify heavy reached anchor=%s",
|
|
414
|
+
_hex(anchor_hash),
|
|
415
|
+
)
|
|
416
|
+
break
|
|
417
|
+
|
|
418
|
+
prev_hash = getattr(blk, "previous_block_hash", ZERO32) or ZERO32
|
|
419
|
+
heavy_pending = blk
|
|
420
|
+
if prev_hash == ZERO32:
|
|
421
|
+
break
|
|
422
|
+
heavy_cursor = prev_hash
|
|
423
|
+
|
|
424
|
+
if not heavy_anchor_verified and heavy_pending is not None:
|
|
425
|
+
if heavy_pending.atom_hash == anchor_hash:
|
|
426
|
+
heavy_pending.previous_block = None
|
|
427
|
+
if not heavy_pending.verify(node):
|
|
428
|
+
self.malicious_block_hash = (
|
|
429
|
+
heavy_pending.atom_hash
|
|
430
|
+
or heavy_pending.previous_block_hash
|
|
431
|
+
or self.head
|
|
432
|
+
)
|
|
433
|
+
_log_warning(
|
|
434
|
+
"Fork verify failed heavy anchor block=%s",
|
|
435
|
+
_hex(heavy_pending.atom_hash),
|
|
436
|
+
)
|
|
437
|
+
return False
|
|
438
|
+
heavy_anchor_verified = True
|
|
439
|
+
|
|
440
|
+
if not heavy_anchor_verified:
|
|
441
|
+
_log_warning(
|
|
442
|
+
"Fork verify failed heavy anchor verified=%s anchor=%s",
|
|
443
|
+
heavy_anchor_verified,
|
|
444
|
+
_hex(anchor_hash),
|
|
445
|
+
)
|
|
446
|
+
return False
|
|
447
|
+
|
|
448
|
+
# Commit staged fork edits
|
|
449
|
+
if anchor_kind == "fork_head":
|
|
450
|
+
ref = node.forks.get(anchor_hash)
|
|
451
|
+
chain_anchor = ref.chain_fork_position if ref else anchor_hash
|
|
452
|
+
base_root = ref.root if ref and ref.root else anchor_hash
|
|
453
|
+
self.validated_upto = anchor_hash
|
|
454
|
+
self.chain_fork_position = chain_anchor or anchor_hash
|
|
455
|
+
self.root = base_root
|
|
456
|
+
self.malicious_block_hash = None
|
|
457
|
+
node.forks[self.head] = self
|
|
458
|
+
_log_debug(
|
|
459
|
+
"Fork verify committed fork_head head=%s anchor=%s",
|
|
460
|
+
_hex(self.head),
|
|
461
|
+
_hex(anchor_hash),
|
|
462
|
+
)
|
|
463
|
+
return True
|
|
464
|
+
|
|
465
|
+
if anchor_kind == "intersection":
|
|
466
|
+
base_root = anchor_hash
|
|
467
|
+
existing = node.forks.get(intersection_fork_head) if intersection_fork_head else None
|
|
468
|
+
if existing and existing.root:
|
|
469
|
+
base_root = existing.root
|
|
470
|
+
|
|
471
|
+
base_fork = node.forks.get(anchor_hash)
|
|
472
|
+
if base_fork is None:
|
|
473
|
+
base_fork = Fork(head=anchor_hash)
|
|
474
|
+
base_fork.root = base_root
|
|
475
|
+
base_fork.chain_fork_position = anchor_hash
|
|
476
|
+
base_fork.validated_upto = anchor_hash
|
|
477
|
+
|
|
478
|
+
if existing is not None:
|
|
479
|
+
existing.chain_fork_position = anchor_hash
|
|
480
|
+
existing.validated_upto = anchor_hash
|
|
481
|
+
existing.root = base_root
|
|
482
|
+
node.forks[existing.head] = existing
|
|
483
|
+
|
|
484
|
+
self.chain_fork_position = anchor_hash
|
|
485
|
+
self.validated_upto = anchor_hash
|
|
486
|
+
self.root = base_root
|
|
487
|
+
self.malicious_block_hash = None
|
|
488
|
+
|
|
489
|
+
node.forks[base_fork.head] = base_fork
|
|
490
|
+
node.forks[self.head] = self
|
|
491
|
+
_log_debug(
|
|
492
|
+
"Fork verify committed intersection head=%s anchor=%s",
|
|
493
|
+
_hex(self.head),
|
|
494
|
+
_hex(anchor_hash),
|
|
495
|
+
)
|
|
496
|
+
return True
|
|
497
|
+
|
|
498
|
+
if anchor_kind == "genesis":
|
|
499
|
+
self.validated_upto = anchor_hash
|
|
500
|
+
self.chain_fork_position = anchor_hash
|
|
501
|
+
self.root = anchor_hash
|
|
502
|
+
self.malicious_block_hash = None
|
|
503
|
+
node.forks[self.head] = self
|
|
504
|
+
_log_debug(
|
|
505
|
+
"Fork verify committed genesis head=%s anchor=%s",
|
|
506
|
+
_hex(self.head),
|
|
507
|
+
_hex(anchor_hash),
|
|
508
|
+
)
|
|
509
|
+
return True
|
|
510
|
+
|
|
511
|
+
return False
|
|
@@ -31,7 +31,9 @@ class Receipt:
|
|
|
31
31
|
cost: int,
|
|
32
32
|
status: int,
|
|
33
33
|
logs_hash: bytes = ZERO32,
|
|
34
|
+
version: int = 1,
|
|
34
35
|
) -> None:
|
|
36
|
+
self.version = int(version)
|
|
35
37
|
self.transaction_hash = bytes(transaction_hash)
|
|
36
38
|
self.cost = int(cost)
|
|
37
39
|
self.logs_hash = bytes(logs_hash)
|
|
@@ -58,21 +60,31 @@ class Receipt:
|
|
|
58
60
|
next_hash = atom.object_id()
|
|
59
61
|
detail_atoms.reverse()
|
|
60
62
|
|
|
61
|
-
|
|
63
|
+
version_atom = Atom(
|
|
64
|
+
data=_int_to_be_bytes(self.version),
|
|
65
|
+
next_id=next_hash,
|
|
66
|
+
kind=AtomKind.BYTES,
|
|
67
|
+
)
|
|
68
|
+
type_atom = Atom(data=b"receipt", next_id=version_atom.object_id(), kind=AtomKind.SYMBOL)
|
|
62
69
|
|
|
63
|
-
atoms = detail_atoms + [type_atom]
|
|
70
|
+
atoms = detail_atoms + [version_atom, type_atom]
|
|
64
71
|
receipt_id = type_atom.object_id()
|
|
65
72
|
return receipt_id, atoms
|
|
66
73
|
|
|
67
74
|
@classmethod
|
|
68
75
|
def from_atom(cls, node: Any, receipt_id: bytes) -> Receipt:
|
|
69
76
|
atom_chain = node.get_atom_list_from_storage(receipt_id)
|
|
70
|
-
if atom_chain is None or len(atom_chain) !=
|
|
77
|
+
if atom_chain is None or len(atom_chain) != 6:
|
|
71
78
|
raise ValueError("malformed receipt atom chain")
|
|
72
79
|
|
|
73
|
-
type_atom, tx_atom, status_atom, cost_atom, logs_atom = atom_chain
|
|
80
|
+
type_atom, version_atom, tx_atom, status_atom, cost_atom, logs_atom = atom_chain
|
|
74
81
|
if type_atom.kind is not AtomKind.SYMBOL or type_atom.data != b"receipt":
|
|
75
82
|
raise ValueError("not a receipt (type atom)")
|
|
83
|
+
if version_atom.kind is not AtomKind.BYTES:
|
|
84
|
+
raise ValueError("malformed receipt (version atom)")
|
|
85
|
+
version_value = _be_bytes_to_int(version_atom.data)
|
|
86
|
+
if version_value != 1:
|
|
87
|
+
raise ValueError("unsupported receipt version")
|
|
76
88
|
if tx_atom.kind is not AtomKind.LIST:
|
|
77
89
|
raise ValueError("receipt transaction hash must be list-kind")
|
|
78
90
|
if status_atom.kind is not AtomKind.BYTES or cost_atom.kind is not AtomKind.BYTES or logs_atom.kind is not AtomKind.LIST:
|
|
@@ -92,6 +104,7 @@ class Receipt:
|
|
|
92
104
|
cost=_be_bytes_to_int(cost_bytes),
|
|
93
105
|
logs_hash=logs_bytes,
|
|
94
106
|
status=status_value,
|
|
107
|
+
version=version_value,
|
|
95
108
|
)
|
|
96
109
|
receipt.atom_hash = bytes(receipt_id)
|
|
97
110
|
receipt.atoms = atom_chain
|
|
@@ -6,7 +6,7 @@ from typing import Any, List, Optional, Tuple
|
|
|
6
6
|
from ...storage.models.atom import Atom, AtomKind, ZERO32
|
|
7
7
|
from ...utils.integer import bytes_to_int, int_to_bytes
|
|
8
8
|
from .account import Account
|
|
9
|
-
from ..
|
|
9
|
+
from ..constants import TREASURY_ADDRESS
|
|
10
10
|
from .receipt import STATUS_FAILED, Receipt, STATUS_SUCCESS
|
|
11
11
|
|
|
12
12
|
@dataclass
|
|
@@ -14,6 +14,7 @@ class Transaction:
|
|
|
14
14
|
chain_id: int
|
|
15
15
|
amount: int
|
|
16
16
|
counter: int
|
|
17
|
+
version: int = 1
|
|
17
18
|
data: bytes = b""
|
|
18
19
|
recipient: bytes = b""
|
|
19
20
|
sender: bytes = b""
|
|
@@ -53,13 +54,19 @@ class Transaction:
|
|
|
53
54
|
next_id=body_list_id,
|
|
54
55
|
kind=AtomKind.BYTES,
|
|
55
56
|
)
|
|
57
|
+
version_atom = Atom(
|
|
58
|
+
data=int_to_bytes(self.version),
|
|
59
|
+
next_id=signature_atom.object_id(),
|
|
60
|
+
kind=AtomKind.BYTES,
|
|
61
|
+
)
|
|
56
62
|
type_atom = Atom(
|
|
57
63
|
data=b"transaction",
|
|
58
|
-
next_id=
|
|
64
|
+
next_id=version_atom.object_id(),
|
|
59
65
|
kind=AtomKind.SYMBOL,
|
|
60
66
|
)
|
|
61
67
|
|
|
62
68
|
acc.append(signature_atom)
|
|
69
|
+
acc.append(version_atom)
|
|
63
70
|
acc.append(type_atom)
|
|
64
71
|
|
|
65
72
|
self.hash = type_atom.object_id()
|
|
@@ -106,7 +113,16 @@ class Transaction:
|
|
|
106
113
|
if type_atom.data != b"transaction":
|
|
107
114
|
raise ValueError("not a transaction (type atom payload)")
|
|
108
115
|
|
|
109
|
-
|
|
116
|
+
version_atom = _require_atom(type_atom.next_id, "transaction version atom", AtomKind.BYTES)
|
|
117
|
+
version = bytes_to_int(version_atom.data)
|
|
118
|
+
if version != 1:
|
|
119
|
+
raise ValueError("unsupported transaction version")
|
|
120
|
+
|
|
121
|
+
signature_atom = _require_atom(
|
|
122
|
+
version_atom.next_id,
|
|
123
|
+
"transaction signature atom",
|
|
124
|
+
AtomKind.BYTES,
|
|
125
|
+
)
|
|
110
126
|
body_list_atom = _require_atom(signature_atom.next_id, "transaction body list atom", AtomKind.LIST)
|
|
111
127
|
if body_list_atom.next_id and body_list_atom.next_id != ZERO32:
|
|
112
128
|
raise ValueError("malformed transaction (body list tail)")
|
|
@@ -141,8 +157,34 @@ class Transaction:
|
|
|
141
157
|
sender=sender_bytes,
|
|
142
158
|
signature=signature_atom.data,
|
|
143
159
|
hash=bytes(transaction_id),
|
|
160
|
+
version=version,
|
|
144
161
|
)
|
|
145
162
|
|
|
163
|
+
@classmethod
|
|
164
|
+
def get_atoms(
|
|
165
|
+
cls,
|
|
166
|
+
node: Any,
|
|
167
|
+
transaction_id: bytes,
|
|
168
|
+
) -> Optional[List[Atom]]:
|
|
169
|
+
"""Load the transaction atom chain from storage, returning the atoms or None."""
|
|
170
|
+
atoms = node.get_atom_list_from_storage(transaction_id)
|
|
171
|
+
if atoms is None or len(atoms) < 4:
|
|
172
|
+
return None
|
|
173
|
+
type_atom = atoms[0]
|
|
174
|
+
if type_atom.kind is not AtomKind.SYMBOL or type_atom.data != b"transaction":
|
|
175
|
+
return None
|
|
176
|
+
version_atom = atoms[1]
|
|
177
|
+
if version_atom.kind is not AtomKind.BYTES or bytes_to_int(version_atom.data) != 1:
|
|
178
|
+
return None
|
|
179
|
+
|
|
180
|
+
body_list_atom = atoms[-1]
|
|
181
|
+
detail_atoms = node.get_atom_list_from_storage(body_list_atom.data)
|
|
182
|
+
if detail_atoms is None:
|
|
183
|
+
return None
|
|
184
|
+
atoms.extend(detail_atoms)
|
|
185
|
+
|
|
186
|
+
return atoms
|
|
187
|
+
|
|
146
188
|
|
|
147
189
|
def apply_transaction(node: Any, block: object, transaction_hash: bytes) -> int:
|
|
148
190
|
"""Apply transaction to the candidate block and return the collected fee."""
|