astreum 0.2.39__py3-none-any.whl → 0.2.41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. astreum/_communication/__init__.py +2 -0
  2. astreum/{models → _communication}/message.py +100 -64
  3. astreum/_communication/ping.py +33 -0
  4. astreum/_communication/route.py +53 -20
  5. astreum/_communication/setup.py +240 -99
  6. astreum/_communication/util.py +42 -0
  7. astreum/_consensus/__init__.py +6 -0
  8. astreum/_consensus/account.py +170 -0
  9. astreum/_consensus/accounts.py +67 -0
  10. astreum/_consensus/block.py +84 -52
  11. astreum/_consensus/chain.py +65 -62
  12. astreum/_consensus/fork.py +99 -97
  13. astreum/_consensus/genesis.py +141 -0
  14. astreum/_consensus/receipt.py +177 -0
  15. astreum/_consensus/setup.py +21 -162
  16. astreum/_consensus/transaction.py +43 -23
  17. astreum/_consensus/workers/__init__.py +9 -0
  18. astreum/_consensus/workers/discovery.py +48 -0
  19. astreum/_consensus/workers/validation.py +122 -0
  20. astreum/_consensus/workers/verify.py +63 -0
  21. astreum/_storage/atom.py +24 -7
  22. astreum/_storage/patricia.py +443 -0
  23. astreum/models/block.py +10 -10
  24. astreum/node.py +755 -753
  25. {astreum-0.2.39.dist-info → astreum-0.2.41.dist-info}/METADATA +1 -1
  26. astreum-0.2.41.dist-info/RECORD +53 -0
  27. astreum/lispeum/__init__.py +0 -0
  28. astreum/lispeum/environment.py +0 -40
  29. astreum/lispeum/expression.py +0 -86
  30. astreum/lispeum/parser.py +0 -41
  31. astreum/lispeum/tokenizer.py +0 -52
  32. astreum/models/account.py +0 -91
  33. astreum/models/accounts.py +0 -34
  34. astreum/models/transaction.py +0 -106
  35. astreum/relay/__init__.py +0 -0
  36. astreum/relay/peer.py +0 -9
  37. astreum/relay/route.py +0 -25
  38. astreum/relay/setup.py +0 -58
  39. astreum-0.2.39.dist-info/RECORD +0 -55
  40. {astreum-0.2.39.dist-info → astreum-0.2.41.dist-info}/WHEEL +0 -0
  41. {astreum-0.2.39.dist-info → astreum-0.2.41.dist-info}/licenses/LICENSE +0 -0
  42. {astreum-0.2.39.dist-info → astreum-0.2.41.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,443 @@
1
+ import blake3
2
+ from typing import Dict, List, Optional, Tuple, TYPE_CHECKING
3
+
4
+ from .atom import Atom, ZERO32
5
+
6
+ if TYPE_CHECKING:
7
+ from .._node import Node
8
+
9
+ class PatriciaNode:
10
+ """
11
+ A node in a compressed-key Patricia trie.
12
+
13
+ Attributes:
14
+ key_len (int): Number of bits in the `key` prefix that are meaningful.
15
+ key (bytes): The MSB-aligned bit prefix (zero-padded in last byte).
16
+ value (Optional[bytes]): Stored payload (None for internal nodes).
17
+ child_0 (Optional[bytes]): Hash pointer for next-bit == 0.
18
+ child_1 (Optional[bytes]): Hash pointer for next-bit == 1.
19
+ """
20
+
21
+ def __init__(
22
+ self,
23
+ key_len: int,
24
+ key: bytes,
25
+ value: Optional[bytes],
26
+ child_0: Optional[bytes],
27
+ child_1: Optional[bytes]
28
+ ):
29
+ self.key_len = key_len
30
+ self.key = key
31
+ self.value = value
32
+ self.child_0 = child_0
33
+ self.child_1 = child_1
34
+ self._hash: Optional[bytes] = None
35
+
36
+ def hash(self) -> bytes:
37
+ """
38
+ Compute and cache the BLAKE3 hash of this node's serialized form.
39
+ """
40
+ if self._hash is None:
41
+ self._hash = blake3.blake3(self.to_bytes()).digest()
42
+ return self._hash
43
+
44
+ def to_atoms(self) -> Tuple[bytes, List[Atom]]:
45
+ """
46
+ Materialise this node as a flat atom chain containing the fields in a
47
+ traversal-friendly order: key length prefix (single byte) + key bytes,
48
+ child_0 hash, child_1 hash, and value bytes. Returns the head atom hash
49
+ and the atom list.
50
+ """
51
+ if self.key_len > 255:
52
+ raise ValueError("Patricia key length > 255 bits cannot be encoded in a single atom field")
53
+
54
+ entries: List[bytes] = [
55
+ bytes([self.key_len]) + self.key,
56
+ self.child_0 or ZERO32,
57
+ self.child_1 or ZERO32,
58
+ self.value or b"",
59
+ ]
60
+
61
+ atoms: List[Atom] = []
62
+ next_hash = ZERO32
63
+ for payload in reversed(entries):
64
+ atom = Atom.from_data(data=payload, next_hash=next_hash)
65
+ atoms.append(atom)
66
+ next_hash = atom.object_id()
67
+
68
+ head = next_hash
69
+ atoms.reverse()
70
+ return head, atoms
71
+
72
+ @classmethod
73
+ def from_atoms(
74
+ cls,
75
+ node: "Node",
76
+ head_hash: bytes,
77
+ ) -> "PatriciaNode":
78
+ """
79
+ Reconstruct a node from the atom chain rooted at `head_hash`, using the
80
+ supplied `node` instance to resolve atom object ids.
81
+ """
82
+ if head_hash == ZERO32:
83
+ raise ValueError("empty atom chain for Patricia node")
84
+
85
+ entries: List[bytes] = []
86
+ current = head_hash
87
+ hops = 0
88
+
89
+ while current != ZERO32 and hops < 4:
90
+ atom = node._local_get(current)
91
+ if atom is None:
92
+ raise ValueError("missing atom while decoding Patricia node")
93
+ entries.append(atom.data)
94
+ current = atom.next
95
+ hops += 1
96
+
97
+ if current != ZERO32:
98
+ raise ValueError("too many fields while decoding Patricia node")
99
+
100
+ if len(entries) != 4:
101
+ raise ValueError("incomplete atom sequence for Patricia node")
102
+
103
+ key_entry = entries[0]
104
+ if not key_entry:
105
+ raise ValueError("missing key entry while decoding Patricia node")
106
+ key_len = key_entry[0]
107
+ key = key_entry[1:]
108
+ child_0 = entries[1] if entries[1] != ZERO32 else None
109
+ child_1 = entries[2] if entries[2] != ZERO32 else None
110
+ value = entries[3]
111
+
112
+ return cls(key_len=key_len, key=key, value=value, child_0=child_0, child_1=child_1)
113
+
114
+ class PatriciaTrie:
115
+ """
116
+ A compressed-key Patricia trie supporting get and put.
117
+ """
118
+
119
+ def __init__(
120
+ self,
121
+ root_hash: Optional[bytes] = None,
122
+ ) -> None:
123
+ """
124
+ :param root_hash: optional hash of existing root node
125
+ """
126
+ self.nodes: Dict[bytes, PatriciaNode] = {}
127
+ self.root_hash = root_hash
128
+
129
+ @staticmethod
130
+ def _bit(buf: bytes, idx: int) -> bool:
131
+ """
132
+ Return the bit at position `idx` (MSB-first) from `buf`.
133
+ """
134
+ byte_i, offset = divmod(idx, 8)
135
+ return ((buf[byte_i] >> (7 - offset)) & 1) == 1
136
+
137
+ @classmethod
138
+ def _match_prefix(
139
+ cls,
140
+ prefix: bytes,
141
+ prefix_len: int,
142
+ key: bytes,
143
+ key_bit_offset: int,
144
+ ) -> bool:
145
+ """
146
+ Check whether the `prefix_len` bits of `prefix` match
147
+ bits in `key` starting at `key_bit_offset`.
148
+ """
149
+ total_bits = len(key) * 8
150
+ if key_bit_offset + prefix_len > total_bits:
151
+ return False
152
+ for i in range(prefix_len):
153
+ if cls._bit(prefix, i) != cls._bit(key, key_bit_offset + i):
154
+ return False
155
+ return True
156
+
157
+ def _fetch(self, storage_node: "Node", h: bytes) -> Optional[PatriciaNode]:
158
+ """
159
+ Fetch a node by hash, consulting the in-memory cache first and falling
160
+ back to the atom storage provided by `storage_node`.
161
+ """
162
+ cached = self.nodes.get(h)
163
+ if cached is not None:
164
+ return cached
165
+
166
+ if storage_node._local_get(h) is None:
167
+ return None
168
+
169
+ pat_node = PatriciaNode.from_atoms(storage_node, h)
170
+ self.nodes[h] = pat_node
171
+ return pat_node
172
+
173
+ def get(self, storage_node: "Node", key: bytes) -> Optional[bytes]:
174
+ """
175
+ Return the stored value for `key`, or None if absent.
176
+ """
177
+ # Empty trie?
178
+ if self.root_hash is None:
179
+ return None
180
+
181
+ current = self._fetch(storage_node, self.root_hash)
182
+ if current is None:
183
+ return None
184
+
185
+ key_pos = 0 # bit offset into key
186
+
187
+ while current is not None:
188
+ # 1) Check that this node's prefix matches the key here
189
+ if not self._match_prefix(current.key, current.key_len, key, key_pos):
190
+ return None
191
+ key_pos += current.key_len
192
+
193
+ # 2) If we've consumed all bits of the search key:
194
+ if key_pos == len(key) * 8:
195
+ # Return value only if this node actually stores one
196
+ return current.value
197
+
198
+ # 3) Decide which branch to follow via next bit
199
+ try:
200
+ next_bit = self._bit(key, key_pos)
201
+ except IndexError:
202
+ return None
203
+
204
+ child_hash = current.child_1 if next_bit else current.child_0
205
+ if child_hash is None:
206
+ return None # dead end
207
+
208
+ # 4) Fetch child and continue descent
209
+ current = self._fetch(storage_node, child_hash)
210
+ if current is None:
211
+ return None # dangling pointer
212
+
213
+ key_pos += 1 # consumed routing bit
214
+
215
+ return None
216
+
217
+ def put(self, storage_node: "Node", key: bytes, value: bytes) -> None:
218
+ """
219
+ Insert or update `key` with `value` in-place.
220
+ """
221
+ total_bits = len(key) * 8
222
+
223
+ # S1 – Empty trie → create root leaf
224
+ if self.root_hash is None:
225
+ leaf = self._make_node(key, total_bits, value, None, None)
226
+ self.root_hash = leaf.hash()
227
+ return
228
+
229
+ # S2 – traversal bookkeeping
230
+ stack: List[Tuple[PatriciaNode, bytes, int]] = [] # (parent, parent_hash, dir_bit)
231
+ current = self._fetch(storage_node, self.root_hash)
232
+ assert current is not None
233
+ key_pos = 0
234
+
235
+ # S4 – main descent loop
236
+ while True:
237
+ # 4.1 – prefix mismatch? → split
238
+ if not self._match_prefix(current.key, current.key_len, key, key_pos):
239
+ self._split_and_insert(current, stack, key, key_pos, value)
240
+ return
241
+
242
+ # 4.2 – consume this prefix
243
+ key_pos += current.key_len
244
+
245
+ # 4.3 – matched entire key → update value
246
+ if key_pos == total_bits:
247
+ old_hash = current.hash()
248
+ current.value = value
249
+ self._invalidate_hash(current)
250
+ new_hash = current.hash()
251
+ if new_hash != old_hash:
252
+ self.nodes.pop(old_hash, None)
253
+ self.nodes[new_hash] = current
254
+ self._bubble(stack, new_hash)
255
+ return
256
+
257
+ # 4.4 – routing bit
258
+ next_bit = self._bit(key, key_pos)
259
+ child_hash = current.child_1 if next_bit else current.child_0
260
+
261
+ # 4.6 – no child → easy append leaf
262
+ if child_hash is None:
263
+ self._append_leaf(current, next_bit, key, key_pos, value, stack)
264
+ return
265
+
266
+ # 4.7 – push current node onto stack
267
+ stack.append((current, current.hash(), int(next_bit)))
268
+
269
+ # 4.8 – fetch child and continue
270
+ child = self._fetch(storage_node, child_hash)
271
+ if child is None:
272
+ # Dangling pointer: treat as missing child
273
+ parent, _, _ = stack[-1]
274
+ self._append_leaf(parent, next_bit, key, key_pos, value, stack[:-1])
275
+ return
276
+
277
+ current = child
278
+ key_pos += 1 # consumed routing bit
279
+
280
+ def _append_leaf(
281
+ self,
282
+ parent: PatriciaNode,
283
+ dir_bit: bool,
284
+ key: bytes,
285
+ key_pos: int,
286
+ value: bytes,
287
+ stack: List[Tuple[PatriciaNode, bytes, int]],
288
+ ) -> None:
289
+ tail_len = len(key) * 8 - (key_pos + 1)
290
+ tail_bits, tail_len = self._bit_slice(key, key_pos + 1, tail_len)
291
+ leaf = self._make_node(tail_bits, tail_len, value, None, None)
292
+
293
+ old_parent_hash = parent.hash()
294
+
295
+ if dir_bit:
296
+ parent.child_1 = leaf.hash()
297
+ else:
298
+ parent.child_0 = leaf.hash()
299
+
300
+ self._invalidate_hash(parent)
301
+ new_parent_hash = parent.hash()
302
+ if new_parent_hash != old_parent_hash:
303
+ self.nodes.pop(old_parent_hash, None)
304
+ self.nodes[new_parent_hash] = parent
305
+ self._bubble(stack, new_parent_hash)
306
+
307
+
308
+ def _split_and_insert(
309
+ self,
310
+ node: PatriciaNode,
311
+ stack: List[Tuple[PatriciaNode, bytes, int]],
312
+ key: bytes,
313
+ key_pos: int,
314
+ value: bytes,
315
+ ) -> None:
316
+ # ➊—find longest-common-prefix (lcp) as before …
317
+ max_lcp = min(node.key_len, len(key) * 8 - key_pos)
318
+ lcp = 0
319
+ while lcp < max_lcp and self._bit(node.key, lcp) == self._bit(key, key_pos + lcp):
320
+ lcp += 1
321
+
322
+ # divergence bit values (taken **before** we mutate node.key)
323
+ old_div_bit = self._bit(node.key, lcp)
324
+ new_div_bit = self._bit(key, key_pos + lcp)
325
+
326
+ # ➋—internal node that holds the common prefix
327
+ common_bits, common_len = self._bit_slice(node.key, 0, lcp)
328
+ internal = self._make_node(common_bits, common_len, None, None, None)
329
+
330
+ # ➌—trim the *existing* node’s prefix **after** the divergence bit
331
+ old_suffix_bits, old_suffix_len = self._bit_slice(
332
+ node.key,
333
+ lcp + 1, # start *after* divergence bit
334
+ node.key_len - lcp - 1 # may be zero
335
+ )
336
+ old_node_hash = node.hash()
337
+
338
+ node.key = old_suffix_bits
339
+ node.key_len = old_suffix_len
340
+ self._invalidate_hash(node)
341
+ new_node_hash = node.hash()
342
+ if new_node_hash != old_node_hash:
343
+ self.nodes.pop(old_node_hash, None)
344
+ self.nodes[new_node_hash] = node
345
+
346
+ # ➍—new leaf for the key being inserted (unchanged)
347
+ new_tail_len = len(key) * 8 - (key_pos + lcp + 1)
348
+ new_tail_bits, _ = self._bit_slice(key, key_pos + lcp + 1, new_tail_len)
349
+ leaf = self._make_node(new_tail_bits, new_tail_len, value, None, None)
350
+
351
+ # ➎—hang the two children off the internal node
352
+ if old_div_bit:
353
+ internal.child_1 = new_node_hash
354
+ internal.child_0 = leaf.hash()
355
+ else:
356
+ internal.child_0 = new_node_hash
357
+ internal.child_1 = leaf.hash()
358
+
359
+ # ➏—rehash up to the root (unchanged)
360
+ self._invalidate_hash(internal)
361
+ internal_hash = internal.hash()
362
+ self.nodes[internal_hash] = internal
363
+
364
+ if not stack:
365
+ self.root_hash = internal_hash
366
+ return
367
+
368
+ parent, _, dir_bit = stack.pop()
369
+ if dir_bit == 0:
370
+ parent.child_0 = internal_hash
371
+ else:
372
+ parent.child_1 = internal_hash
373
+ self._invalidate_hash(parent)
374
+ self._bubble(stack, parent.hash())
375
+
376
+
377
+ def _make_node(
378
+ self,
379
+ prefix_bits: bytes,
380
+ prefix_len: int,
381
+ value: Optional[bytes],
382
+ child0: Optional[bytes],
383
+ child1: Optional[bytes],
384
+ ) -> PatriciaNode:
385
+ node = PatriciaNode(prefix_len, prefix_bits, value, child0, child1)
386
+ self.nodes[node.hash()] = node
387
+ return node
388
+
389
+ def _invalidate_hash(self, node: PatriciaNode) -> None:
390
+ """Clear cached hash so next .hash() recomputes."""
391
+ node._hash = None # type: ignore
392
+
393
+ def _bubble(
394
+ self,
395
+ stack: List[Tuple[PatriciaNode, bytes, int]],
396
+ new_hash: bytes
397
+ ) -> None:
398
+ """
399
+ Propagate updated child-hash `new_hash` up the ancestor stack,
400
+ rebasing each parent's pointer, invalidating and re-hashing.
401
+ """
402
+ while stack:
403
+ parent, old_hash, dir_bit = stack.pop()
404
+
405
+ if dir_bit == 0:
406
+ parent.child_0 = new_hash
407
+ else:
408
+ parent.child_1 = new_hash
409
+
410
+ self._invalidate_hash(parent)
411
+ new_hash = parent.hash()
412
+ if new_hash != old_hash:
413
+ self.nodes.pop(old_hash, None)
414
+ self.nodes[new_hash] = parent
415
+
416
+ self.root_hash = new_hash
417
+
418
+
419
+ def _bit_slice(
420
+ self,
421
+ buf: bytes,
422
+ start_bit: int,
423
+ length: int
424
+ ) -> tuple[bytes, int]:
425
+ """
426
+ Extract `length` bits from `buf` starting at `start_bit` (MSB-first),
427
+ returning (bytes, bit_len) with zero-padding.
428
+ """
429
+ if length == 0:
430
+ return b"", 0
431
+
432
+ total = int.from_bytes(buf, "big")
433
+ bits_in_buf = len(buf) * 8
434
+
435
+ # shift so slice ends at LSB
436
+ shift = bits_in_buf - (start_bit + length)
437
+ slice_int = (total >> shift) & ((1 << length) - 1)
438
+
439
+ # left-align to MSB of first byte
440
+ pad = (8 - (length % 8)) % 8
441
+ slice_int <<= pad
442
+ byte_len = (length + 7) // 8
443
+ return slice_int.to_bytes(byte_len, "big"), length
astreum/models/block.py CHANGED
@@ -4,8 +4,8 @@ from threading import Thread
4
4
  from typing import List, Dict, Any, Optional, Union
5
5
 
6
6
  from astreum.crypto.wesolowski import vdf_generate, vdf_verify
7
- from astreum.models.account import Account
8
- from astreum.models.accounts import Accounts
7
+ from astreum._consensus.account import Account
8
+ from astreum._consensus.accounts import Accounts
9
9
  from astreum.models.patricia import PatriciaTrie
10
10
  from astreum.models.transaction import Transaction
11
11
  from ..crypto import ed25519
@@ -32,7 +32,7 @@ class Block:
32
32
  accounts: Optional[Accounts] = None,
33
33
  transaction_limit: Optional[int] = None,
34
34
  transactions_total_fees: Optional[int] = None,
35
- transactions_root_hash: Optional[bytes] = None,
35
+ transactions_hash: Optional[bytes] = None,
36
36
  transactions_count: Optional[int] = None,
37
37
  delay_difficulty: Optional[int] = None,
38
38
  delay_output: Optional[bytes] = None,
@@ -49,7 +49,7 @@ class Block:
49
49
  self.accounts = accounts
50
50
  self.transaction_limit = transaction_limit
51
51
  self.transactions_total_fees = transactions_total_fees
52
- self.transactions_root_hash = transactions_root_hash
52
+ self.transactions_hash = transactions_hash
53
53
  self.transactions_count = transactions_count
54
54
  self.delay_difficulty = delay_difficulty
55
55
  self.delay_output = delay_output
@@ -125,7 +125,7 @@ class Block:
125
125
  accounts = accts,
126
126
  transactions_total_fees = 0,
127
127
  transaction_limit = 1,
128
- transactions_root_hash = b"\x00" * 32,
128
+ transactions_hash = b"\x00" * 32,
129
129
  transactions_count = 0,
130
130
  delay_difficulty = 1,
131
131
  delay_output = b"",
@@ -219,7 +219,7 @@ class Block:
219
219
  # ------------------ timing & roots ----------------------------------
220
220
  blk.block_time = blk.timestamp - previous_block.timestamp
221
221
  blk.accounts_hash = blk.accounts.root_hash
222
- blk.transactions_root_hash = MerkleTree.from_leaves(blk.tx_hashes).root_hash
222
+ blk.transactions_hash = MerkleTree.from_leaves(blk.tx_hashes).root_hash
223
223
  blk.transactions_total_fees = blk.total_fees
224
224
 
225
225
  # ------------------ build full body root ----------------------------
@@ -234,7 +234,7 @@ class Block:
234
234
  "timestamp": blk.timestamp,
235
235
  "transaction_limit": blk.transaction_limit,
236
236
  "transactions_count": blk.transactions_count,
237
- "transactions_root_hash": blk.transactions_root_hash,
237
+ "transactions_hash": blk.transactions_hash,
238
238
  "transactions_total_fees": blk.transactions_total_fees,
239
239
  "validator_pk": blk.validator_pk,
240
240
  }
@@ -362,7 +362,7 @@ class Block:
362
362
  f_names = (
363
363
  "accounts_hash","block_time","delay_difficulty","delay_output","delay_proof",
364
364
  "number","prev_block_hash","timestamp","transaction_limit",
365
- "transactions_count","transactions_root_hash","transactions_total_fees",
365
+ "transactions_count","transactions_hash","transactions_total_fees",
366
366
  "validator_pk",
367
367
  )
368
368
  leaves = [
@@ -391,8 +391,8 @@ class Block:
391
391
  # ---------- 4. replay all txs -------------------------------------
392
392
  accs = Accounts(root_hash=prev_blk.get_field("accounts_hash"),
393
393
  node_get=remote_get_fn)
394
- tx_mt = MerkleTree(node_get=remote_get_fn,
395
- root_hash=self.transactions_root_hash)
394
+ tx_mt = MerkleTree(node_get=remote_get_fn,
395
+ root_hash=self.transactions_hash)
396
396
  if tx_mt.leaf_count() != self.transactions_count:
397
397
  raise ValueError("transactions_count mismatch")
398
398