astreum 0.2.29__py3-none-any.whl → 0.2.61__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- astreum/__init__.py +9 -1
- astreum/_communication/__init__.py +11 -0
- astreum/{models → _communication}/message.py +101 -64
- astreum/_communication/peer.py +23 -0
- astreum/_communication/ping.py +33 -0
- astreum/_communication/route.py +95 -0
- astreum/_communication/setup.py +322 -0
- astreum/_communication/util.py +42 -0
- astreum/_consensus/__init__.py +20 -0
- astreum/_consensus/account.py +95 -0
- astreum/_consensus/accounts.py +38 -0
- astreum/_consensus/block.py +311 -0
- astreum/_consensus/chain.py +66 -0
- astreum/_consensus/fork.py +100 -0
- astreum/_consensus/genesis.py +72 -0
- astreum/_consensus/receipt.py +136 -0
- astreum/_consensus/setup.py +115 -0
- astreum/_consensus/transaction.py +215 -0
- astreum/_consensus/workers/__init__.py +9 -0
- astreum/_consensus/workers/discovery.py +48 -0
- astreum/_consensus/workers/validation.py +125 -0
- astreum/_consensus/workers/verify.py +63 -0
- astreum/_lispeum/__init__.py +16 -0
- astreum/_lispeum/environment.py +13 -0
- astreum/_lispeum/expression.py +190 -0
- astreum/_lispeum/high_evaluation.py +236 -0
- astreum/_lispeum/low_evaluation.py +123 -0
- astreum/_lispeum/meter.py +18 -0
- astreum/_lispeum/parser.py +51 -0
- astreum/_lispeum/tokenizer.py +22 -0
- astreum/_node.py +198 -0
- astreum/_storage/__init__.py +7 -0
- astreum/_storage/atom.py +109 -0
- astreum/_storage/patricia.py +478 -0
- astreum/_storage/setup.py +35 -0
- astreum/models/block.py +48 -39
- astreum/node.py +755 -563
- astreum/utils/bytes.py +24 -0
- astreum/utils/integer.py +25 -0
- astreum/utils/logging.py +219 -0
- {astreum-0.2.29.dist-info → astreum-0.2.61.dist-info}/METADATA +50 -14
- astreum-0.2.61.dist-info/RECORD +57 -0
- astreum/lispeum/__init__.py +0 -2
- astreum/lispeum/environment.py +0 -40
- astreum/lispeum/expression.py +0 -86
- astreum/lispeum/parser.py +0 -41
- astreum/lispeum/tokenizer.py +0 -52
- astreum/models/account.py +0 -91
- astreum/models/accounts.py +0 -34
- astreum/models/transaction.py +0 -106
- astreum/relay/__init__.py +0 -0
- astreum/relay/peer.py +0 -9
- astreum/relay/route.py +0 -25
- astreum/relay/setup.py +0 -58
- astreum-0.2.29.dist-info/RECORD +0 -33
- {astreum-0.2.29.dist-info → astreum-0.2.61.dist-info}/WHEEL +0 -0
- {astreum-0.2.29.dist-info → astreum-0.2.61.dist-info}/licenses/LICENSE +0 -0
- {astreum-0.2.29.dist-info → astreum-0.2.61.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,478 @@
|
|
|
1
|
+
import blake3
|
|
2
|
+
from typing import Dict, List, Optional, Tuple, TYPE_CHECKING
|
|
3
|
+
|
|
4
|
+
from .atom import Atom, AtomKind, ZERO32
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
from .._node import Node
|
|
8
|
+
|
|
9
|
+
class PatriciaNode:
|
|
10
|
+
"""
|
|
11
|
+
A node in a compressed-key Patricia trie.
|
|
12
|
+
|
|
13
|
+
Attributes:
|
|
14
|
+
key_len (int): Number of bits in the `key` prefix that are meaningful.
|
|
15
|
+
key (bytes): The MSB-aligned bit prefix (zero-padded in last byte).
|
|
16
|
+
value (Optional[bytes]): Stored payload (None for internal nodes).
|
|
17
|
+
child_0 (Optional[bytes]): Hash pointer for next-bit == 0.
|
|
18
|
+
child_1 (Optional[bytes]): Hash pointer for next-bit == 1.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
key_len: int,
|
|
24
|
+
key: bytes,
|
|
25
|
+
value: Optional[bytes],
|
|
26
|
+
child_0: Optional[bytes],
|
|
27
|
+
child_1: Optional[bytes]
|
|
28
|
+
):
|
|
29
|
+
self.key_len = key_len
|
|
30
|
+
self.key = key
|
|
31
|
+
self.value = value
|
|
32
|
+
self.child_0 = child_0
|
|
33
|
+
self.child_1 = child_1
|
|
34
|
+
self._hash: Optional[bytes] = None
|
|
35
|
+
|
|
36
|
+
def hash(self) -> bytes:
|
|
37
|
+
"""
|
|
38
|
+
Compute and cache the BLAKE3 hash of this node's serialized form.
|
|
39
|
+
"""
|
|
40
|
+
if self._hash is None:
|
|
41
|
+
self._hash = blake3.blake3(self.to_bytes()).digest()
|
|
42
|
+
return self._hash
|
|
43
|
+
|
|
44
|
+
def to_atoms(self) -> Tuple[bytes, List[Atom]]:
|
|
45
|
+
"""
|
|
46
|
+
Materialise this node with the canonical atom layout used by the
|
|
47
|
+
storage layer: a leading SYMBOL atom with payload ``b"radix"`` whose
|
|
48
|
+
``next`` pointer links to four BYTES atoms containing, in order:
|
|
49
|
+
key (len byte + key payload), child_0 hash, child_1 hash, value bytes.
|
|
50
|
+
Returns the top atom hash and the emitted atoms.
|
|
51
|
+
"""
|
|
52
|
+
if self.key_len > 255:
|
|
53
|
+
raise ValueError("Patricia key length > 255 bits cannot be encoded in a single atom field")
|
|
54
|
+
|
|
55
|
+
entries: List[bytes] = [
|
|
56
|
+
bytes([self.key_len]) + self.key,
|
|
57
|
+
self.child_0 or ZERO32,
|
|
58
|
+
self.child_1 or ZERO32,
|
|
59
|
+
self.value or b"",
|
|
60
|
+
]
|
|
61
|
+
|
|
62
|
+
data_atoms: List[Atom] = []
|
|
63
|
+
next_hash = ZERO32
|
|
64
|
+
for payload in reversed(entries):
|
|
65
|
+
atom = Atom.from_data(data=payload, next_hash=next_hash, kind=AtomKind.BYTES)
|
|
66
|
+
data_atoms.append(atom)
|
|
67
|
+
next_hash = atom.object_id()
|
|
68
|
+
|
|
69
|
+
data_atoms.reverse()
|
|
70
|
+
|
|
71
|
+
type_atom = Atom.from_data(
|
|
72
|
+
data=b"radix",
|
|
73
|
+
next_hash=next_hash,
|
|
74
|
+
kind=AtomKind.SYMBOL,
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
atoms = data_atoms + [type_atom]
|
|
78
|
+
return type_atom.object_id(), atoms
|
|
79
|
+
|
|
80
|
+
@classmethod
|
|
81
|
+
def from_atoms(
|
|
82
|
+
cls,
|
|
83
|
+
node: "Node",
|
|
84
|
+
head_hash: bytes,
|
|
85
|
+
) -> "PatriciaNode":
|
|
86
|
+
"""
|
|
87
|
+
Reconstruct a node from the atom chain rooted at `head_hash`, using the
|
|
88
|
+
supplied `node` instance to resolve atom object ids.
|
|
89
|
+
"""
|
|
90
|
+
if head_hash == ZERO32:
|
|
91
|
+
raise ValueError("empty atom chain for Patricia node")
|
|
92
|
+
|
|
93
|
+
def _atom_kind(atom: Optional[Atom]) -> Optional[AtomKind]:
|
|
94
|
+
kind_value = getattr(atom, "kind", None)
|
|
95
|
+
if isinstance(kind_value, AtomKind):
|
|
96
|
+
return kind_value
|
|
97
|
+
if isinstance(kind_value, int):
|
|
98
|
+
try:
|
|
99
|
+
return AtomKind(kind_value)
|
|
100
|
+
except ValueError:
|
|
101
|
+
return None
|
|
102
|
+
return None
|
|
103
|
+
|
|
104
|
+
def _require_atom(atom_hash: Optional[bytes], context: str) -> Atom:
|
|
105
|
+
if not atom_hash or atom_hash == ZERO32:
|
|
106
|
+
raise ValueError(f"missing {context}")
|
|
107
|
+
atom = node.storage_get(atom_hash)
|
|
108
|
+
if atom is None:
|
|
109
|
+
raise ValueError(f"missing {context}")
|
|
110
|
+
return atom
|
|
111
|
+
|
|
112
|
+
type_atom = _require_atom(head_hash, "Patricia type atom")
|
|
113
|
+
if _atom_kind(type_atom) is not AtomKind.SYMBOL:
|
|
114
|
+
raise ValueError("malformed Patricia node (type atom kind)")
|
|
115
|
+
if type_atom.data != b"radix":
|
|
116
|
+
raise ValueError("not a Patricia node (type mismatch)")
|
|
117
|
+
|
|
118
|
+
entries: List[bytes] = []
|
|
119
|
+
current = type_atom.next
|
|
120
|
+
hops = 0
|
|
121
|
+
|
|
122
|
+
while current and current != ZERO32 and hops < 4:
|
|
123
|
+
atom = node.storage_get(current)
|
|
124
|
+
if atom is None:
|
|
125
|
+
raise ValueError("missing atom while decoding Patricia node")
|
|
126
|
+
if _atom_kind(atom) is not AtomKind.BYTES:
|
|
127
|
+
raise ValueError("Patricia node detail atoms must be bytes")
|
|
128
|
+
entries.append(atom.data)
|
|
129
|
+
current = atom.next
|
|
130
|
+
hops += 1
|
|
131
|
+
|
|
132
|
+
if current and current != ZERO32:
|
|
133
|
+
raise ValueError("too many fields while decoding Patricia node")
|
|
134
|
+
|
|
135
|
+
if len(entries) != 4:
|
|
136
|
+
raise ValueError("incomplete atom sequence for Patricia node")
|
|
137
|
+
|
|
138
|
+
key_entry = entries[0]
|
|
139
|
+
if not key_entry:
|
|
140
|
+
raise ValueError("missing key entry while decoding Patricia node")
|
|
141
|
+
key_len = key_entry[0]
|
|
142
|
+
key = key_entry[1:]
|
|
143
|
+
child_0 = entries[1] if entries[1] != ZERO32 else None
|
|
144
|
+
child_1 = entries[2] if entries[2] != ZERO32 else None
|
|
145
|
+
value = entries[3]
|
|
146
|
+
|
|
147
|
+
return cls(key_len=key_len, key=key, value=value, child_0=child_0, child_1=child_1)
|
|
148
|
+
|
|
149
|
+
class PatriciaTrie:
|
|
150
|
+
"""
|
|
151
|
+
A compressed-key Patricia trie supporting get and put.
|
|
152
|
+
"""
|
|
153
|
+
|
|
154
|
+
def __init__(
|
|
155
|
+
self,
|
|
156
|
+
root_hash: Optional[bytes] = None,
|
|
157
|
+
) -> None:
|
|
158
|
+
"""
|
|
159
|
+
:param root_hash: optional hash of existing root node
|
|
160
|
+
"""
|
|
161
|
+
self.nodes: Dict[bytes, PatriciaNode] = {}
|
|
162
|
+
self.root_hash = root_hash
|
|
163
|
+
|
|
164
|
+
@staticmethod
|
|
165
|
+
def _bit(buf: bytes, idx: int) -> bool:
|
|
166
|
+
"""
|
|
167
|
+
Return the bit at position `idx` (MSB-first) from `buf`.
|
|
168
|
+
"""
|
|
169
|
+
byte_i, offset = divmod(idx, 8)
|
|
170
|
+
return ((buf[byte_i] >> (7 - offset)) & 1) == 1
|
|
171
|
+
|
|
172
|
+
@classmethod
|
|
173
|
+
def _match_prefix(
|
|
174
|
+
cls,
|
|
175
|
+
prefix: bytes,
|
|
176
|
+
prefix_len: int,
|
|
177
|
+
key: bytes,
|
|
178
|
+
key_bit_offset: int,
|
|
179
|
+
) -> bool:
|
|
180
|
+
"""
|
|
181
|
+
Check whether the `prefix_len` bits of `prefix` match
|
|
182
|
+
bits in `key` starting at `key_bit_offset`.
|
|
183
|
+
"""
|
|
184
|
+
total_bits = len(key) * 8
|
|
185
|
+
if key_bit_offset + prefix_len > total_bits:
|
|
186
|
+
return False
|
|
187
|
+
for i in range(prefix_len):
|
|
188
|
+
if cls._bit(prefix, i) != cls._bit(key, key_bit_offset + i):
|
|
189
|
+
return False
|
|
190
|
+
return True
|
|
191
|
+
|
|
192
|
+
def _fetch(self, storage_node: "Node", h: bytes) -> Optional[PatriciaNode]:
|
|
193
|
+
"""
|
|
194
|
+
Fetch a node by hash, consulting the in-memory cache first and falling
|
|
195
|
+
back to the atom storage provided by `storage_node`.
|
|
196
|
+
"""
|
|
197
|
+
cached = self.nodes.get(h)
|
|
198
|
+
if cached is not None:
|
|
199
|
+
return cached
|
|
200
|
+
|
|
201
|
+
if storage_node.storage_get(h) is None:
|
|
202
|
+
return None
|
|
203
|
+
|
|
204
|
+
pat_node = PatriciaNode.from_atoms(storage_node, h)
|
|
205
|
+
self.nodes[h] = pat_node
|
|
206
|
+
return pat_node
|
|
207
|
+
|
|
208
|
+
def get(self, storage_node: "Node", key: bytes) -> Optional[bytes]:
|
|
209
|
+
"""
|
|
210
|
+
Return the stored value for `key`, or None if absent.
|
|
211
|
+
"""
|
|
212
|
+
# Empty trie?
|
|
213
|
+
if self.root_hash is None:
|
|
214
|
+
return None
|
|
215
|
+
|
|
216
|
+
current = self._fetch(storage_node, self.root_hash)
|
|
217
|
+
if current is None:
|
|
218
|
+
return None
|
|
219
|
+
|
|
220
|
+
key_pos = 0 # bit offset into key
|
|
221
|
+
|
|
222
|
+
while current is not None:
|
|
223
|
+
# 1) Check that this node's prefix matches the key here
|
|
224
|
+
if not self._match_prefix(current.key, current.key_len, key, key_pos):
|
|
225
|
+
return None
|
|
226
|
+
key_pos += current.key_len
|
|
227
|
+
|
|
228
|
+
# 2) If we've consumed all bits of the search key:
|
|
229
|
+
if key_pos == len(key) * 8:
|
|
230
|
+
# Return value only if this node actually stores one
|
|
231
|
+
return current.value
|
|
232
|
+
|
|
233
|
+
# 3) Decide which branch to follow via next bit
|
|
234
|
+
try:
|
|
235
|
+
next_bit = self._bit(key, key_pos)
|
|
236
|
+
except IndexError:
|
|
237
|
+
return None
|
|
238
|
+
|
|
239
|
+
child_hash = current.child_1 if next_bit else current.child_0
|
|
240
|
+
if child_hash is None:
|
|
241
|
+
return None # dead end
|
|
242
|
+
|
|
243
|
+
# 4) Fetch child and continue descent
|
|
244
|
+
current = self._fetch(storage_node, child_hash)
|
|
245
|
+
if current is None:
|
|
246
|
+
return None # dangling pointer
|
|
247
|
+
|
|
248
|
+
key_pos += 1 # consumed routing bit
|
|
249
|
+
|
|
250
|
+
return None
|
|
251
|
+
|
|
252
|
+
def put(self, storage_node: "Node", key: bytes, value: bytes) -> None:
|
|
253
|
+
"""
|
|
254
|
+
Insert or update `key` with `value` in-place.
|
|
255
|
+
"""
|
|
256
|
+
total_bits = len(key) * 8
|
|
257
|
+
|
|
258
|
+
# S1 – Empty trie → create root leaf
|
|
259
|
+
if self.root_hash is None:
|
|
260
|
+
leaf = self._make_node(key, total_bits, value, None, None)
|
|
261
|
+
self.root_hash = leaf.hash()
|
|
262
|
+
return
|
|
263
|
+
|
|
264
|
+
# S2 – traversal bookkeeping
|
|
265
|
+
stack: List[Tuple[PatriciaNode, bytes, int]] = [] # (parent, parent_hash, dir_bit)
|
|
266
|
+
current = self._fetch(storage_node, self.root_hash)
|
|
267
|
+
assert current is not None
|
|
268
|
+
key_pos = 0
|
|
269
|
+
|
|
270
|
+
# S4 – main descent loop
|
|
271
|
+
while True:
|
|
272
|
+
# 4.1 – prefix mismatch? → split
|
|
273
|
+
if not self._match_prefix(current.key, current.key_len, key, key_pos):
|
|
274
|
+
self._split_and_insert(current, stack, key, key_pos, value)
|
|
275
|
+
return
|
|
276
|
+
|
|
277
|
+
# 4.2 – consume this prefix
|
|
278
|
+
key_pos += current.key_len
|
|
279
|
+
|
|
280
|
+
# 4.3 – matched entire key → update value
|
|
281
|
+
if key_pos == total_bits:
|
|
282
|
+
old_hash = current.hash()
|
|
283
|
+
current.value = value
|
|
284
|
+
self._invalidate_hash(current)
|
|
285
|
+
new_hash = current.hash()
|
|
286
|
+
if new_hash != old_hash:
|
|
287
|
+
self.nodes.pop(old_hash, None)
|
|
288
|
+
self.nodes[new_hash] = current
|
|
289
|
+
self._bubble(stack, new_hash)
|
|
290
|
+
return
|
|
291
|
+
|
|
292
|
+
# 4.4 – routing bit
|
|
293
|
+
next_bit = self._bit(key, key_pos)
|
|
294
|
+
child_hash = current.child_1 if next_bit else current.child_0
|
|
295
|
+
|
|
296
|
+
# 4.6 – no child → easy append leaf
|
|
297
|
+
if child_hash is None:
|
|
298
|
+
self._append_leaf(current, next_bit, key, key_pos, value, stack)
|
|
299
|
+
return
|
|
300
|
+
|
|
301
|
+
# 4.7 – push current node onto stack
|
|
302
|
+
stack.append((current, current.hash(), int(next_bit)))
|
|
303
|
+
|
|
304
|
+
# 4.8 – fetch child and continue
|
|
305
|
+
child = self._fetch(storage_node, child_hash)
|
|
306
|
+
if child is None:
|
|
307
|
+
# Dangling pointer: treat as missing child
|
|
308
|
+
parent, _, _ = stack[-1]
|
|
309
|
+
self._append_leaf(parent, next_bit, key, key_pos, value, stack[:-1])
|
|
310
|
+
return
|
|
311
|
+
|
|
312
|
+
current = child
|
|
313
|
+
key_pos += 1 # consumed routing bit
|
|
314
|
+
|
|
315
|
+
def _append_leaf(
|
|
316
|
+
self,
|
|
317
|
+
parent: PatriciaNode,
|
|
318
|
+
dir_bit: bool,
|
|
319
|
+
key: bytes,
|
|
320
|
+
key_pos: int,
|
|
321
|
+
value: bytes,
|
|
322
|
+
stack: List[Tuple[PatriciaNode, bytes, int]],
|
|
323
|
+
) -> None:
|
|
324
|
+
tail_len = len(key) * 8 - (key_pos + 1)
|
|
325
|
+
tail_bits, tail_len = self._bit_slice(key, key_pos + 1, tail_len)
|
|
326
|
+
leaf = self._make_node(tail_bits, tail_len, value, None, None)
|
|
327
|
+
|
|
328
|
+
old_parent_hash = parent.hash()
|
|
329
|
+
|
|
330
|
+
if dir_bit:
|
|
331
|
+
parent.child_1 = leaf.hash()
|
|
332
|
+
else:
|
|
333
|
+
parent.child_0 = leaf.hash()
|
|
334
|
+
|
|
335
|
+
self._invalidate_hash(parent)
|
|
336
|
+
new_parent_hash = parent.hash()
|
|
337
|
+
if new_parent_hash != old_parent_hash:
|
|
338
|
+
self.nodes.pop(old_parent_hash, None)
|
|
339
|
+
self.nodes[new_parent_hash] = parent
|
|
340
|
+
self._bubble(stack, new_parent_hash)
|
|
341
|
+
|
|
342
|
+
|
|
343
|
+
def _split_and_insert(
|
|
344
|
+
self,
|
|
345
|
+
node: PatriciaNode,
|
|
346
|
+
stack: List[Tuple[PatriciaNode, bytes, int]],
|
|
347
|
+
key: bytes,
|
|
348
|
+
key_pos: int,
|
|
349
|
+
value: bytes,
|
|
350
|
+
) -> None:
|
|
351
|
+
# ➊—find longest-common-prefix (lcp) as before …
|
|
352
|
+
max_lcp = min(node.key_len, len(key) * 8 - key_pos)
|
|
353
|
+
lcp = 0
|
|
354
|
+
while lcp < max_lcp and self._bit(node.key, lcp) == self._bit(key, key_pos + lcp):
|
|
355
|
+
lcp += 1
|
|
356
|
+
|
|
357
|
+
# divergence bit values (taken **before** we mutate node.key)
|
|
358
|
+
old_div_bit = self._bit(node.key, lcp)
|
|
359
|
+
new_div_bit = self._bit(key, key_pos + lcp)
|
|
360
|
+
|
|
361
|
+
# ➋—internal node that holds the common prefix
|
|
362
|
+
common_bits, common_len = self._bit_slice(node.key, 0, lcp)
|
|
363
|
+
internal = self._make_node(common_bits, common_len, None, None, None)
|
|
364
|
+
|
|
365
|
+
# ➌—trim the *existing* node’s prefix **after** the divergence bit
|
|
366
|
+
old_suffix_bits, old_suffix_len = self._bit_slice(
|
|
367
|
+
node.key,
|
|
368
|
+
lcp + 1, # start *after* divergence bit
|
|
369
|
+
node.key_len - lcp - 1 # may be zero
|
|
370
|
+
)
|
|
371
|
+
old_node_hash = node.hash()
|
|
372
|
+
|
|
373
|
+
node.key = old_suffix_bits
|
|
374
|
+
node.key_len = old_suffix_len
|
|
375
|
+
self._invalidate_hash(node)
|
|
376
|
+
new_node_hash = node.hash()
|
|
377
|
+
if new_node_hash != old_node_hash:
|
|
378
|
+
self.nodes.pop(old_node_hash, None)
|
|
379
|
+
self.nodes[new_node_hash] = node
|
|
380
|
+
|
|
381
|
+
# ➍—new leaf for the key being inserted (unchanged)
|
|
382
|
+
new_tail_len = len(key) * 8 - (key_pos + lcp + 1)
|
|
383
|
+
new_tail_bits, _ = self._bit_slice(key, key_pos + lcp + 1, new_tail_len)
|
|
384
|
+
leaf = self._make_node(new_tail_bits, new_tail_len, value, None, None)
|
|
385
|
+
|
|
386
|
+
# ➎—hang the two children off the internal node
|
|
387
|
+
if old_div_bit:
|
|
388
|
+
internal.child_1 = new_node_hash
|
|
389
|
+
internal.child_0 = leaf.hash()
|
|
390
|
+
else:
|
|
391
|
+
internal.child_0 = new_node_hash
|
|
392
|
+
internal.child_1 = leaf.hash()
|
|
393
|
+
|
|
394
|
+
# ➏—rehash up to the root (unchanged)
|
|
395
|
+
self._invalidate_hash(internal)
|
|
396
|
+
internal_hash = internal.hash()
|
|
397
|
+
self.nodes[internal_hash] = internal
|
|
398
|
+
|
|
399
|
+
if not stack:
|
|
400
|
+
self.root_hash = internal_hash
|
|
401
|
+
return
|
|
402
|
+
|
|
403
|
+
parent, _, dir_bit = stack.pop()
|
|
404
|
+
if dir_bit == 0:
|
|
405
|
+
parent.child_0 = internal_hash
|
|
406
|
+
else:
|
|
407
|
+
parent.child_1 = internal_hash
|
|
408
|
+
self._invalidate_hash(parent)
|
|
409
|
+
self._bubble(stack, parent.hash())
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
def _make_node(
|
|
413
|
+
self,
|
|
414
|
+
prefix_bits: bytes,
|
|
415
|
+
prefix_len: int,
|
|
416
|
+
value: Optional[bytes],
|
|
417
|
+
child0: Optional[bytes],
|
|
418
|
+
child1: Optional[bytes],
|
|
419
|
+
) -> PatriciaNode:
|
|
420
|
+
node = PatriciaNode(prefix_len, prefix_bits, value, child0, child1)
|
|
421
|
+
self.nodes[node.hash()] = node
|
|
422
|
+
return node
|
|
423
|
+
|
|
424
|
+
def _invalidate_hash(self, node: PatriciaNode) -> None:
|
|
425
|
+
"""Clear cached hash so next .hash() recomputes."""
|
|
426
|
+
node._hash = None # type: ignore
|
|
427
|
+
|
|
428
|
+
def _bubble(
|
|
429
|
+
self,
|
|
430
|
+
stack: List[Tuple[PatriciaNode, bytes, int]],
|
|
431
|
+
new_hash: bytes
|
|
432
|
+
) -> None:
|
|
433
|
+
"""
|
|
434
|
+
Propagate updated child-hash `new_hash` up the ancestor stack,
|
|
435
|
+
rebasing each parent's pointer, invalidating and re-hashing.
|
|
436
|
+
"""
|
|
437
|
+
while stack:
|
|
438
|
+
parent, old_hash, dir_bit = stack.pop()
|
|
439
|
+
|
|
440
|
+
if dir_bit == 0:
|
|
441
|
+
parent.child_0 = new_hash
|
|
442
|
+
else:
|
|
443
|
+
parent.child_1 = new_hash
|
|
444
|
+
|
|
445
|
+
self._invalidate_hash(parent)
|
|
446
|
+
new_hash = parent.hash()
|
|
447
|
+
if new_hash != old_hash:
|
|
448
|
+
self.nodes.pop(old_hash, None)
|
|
449
|
+
self.nodes[new_hash] = parent
|
|
450
|
+
|
|
451
|
+
self.root_hash = new_hash
|
|
452
|
+
|
|
453
|
+
|
|
454
|
+
def _bit_slice(
|
|
455
|
+
self,
|
|
456
|
+
buf: bytes,
|
|
457
|
+
start_bit: int,
|
|
458
|
+
length: int
|
|
459
|
+
) -> tuple[bytes, int]:
|
|
460
|
+
"""
|
|
461
|
+
Extract `length` bits from `buf` starting at `start_bit` (MSB-first),
|
|
462
|
+
returning (bytes, bit_len) with zero-padding.
|
|
463
|
+
"""
|
|
464
|
+
if length == 0:
|
|
465
|
+
return b"", 0
|
|
466
|
+
|
|
467
|
+
total = int.from_bytes(buf, "big")
|
|
468
|
+
bits_in_buf = len(buf) * 8
|
|
469
|
+
|
|
470
|
+
# shift so slice ends at LSB
|
|
471
|
+
shift = bits_in_buf - (start_bit + length)
|
|
472
|
+
slice_int = (total >> shift) & ((1 << length) - 1)
|
|
473
|
+
|
|
474
|
+
# left-align to MSB of first byte
|
|
475
|
+
pad = (8 - (length % 8)) % 8
|
|
476
|
+
slice_int <<= pad
|
|
477
|
+
byte_len = (length + 7) // 8
|
|
478
|
+
return slice_int.to_bytes(byte_len, "big"), length
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def storage_setup(node: Any, config: dict) -> None:
|
|
8
|
+
"""Initialize hot/cold storage helpers on the node."""
|
|
9
|
+
|
|
10
|
+
node.hot_storage = {}
|
|
11
|
+
node.hot_storage_hits = {}
|
|
12
|
+
node.storage_index = {}
|
|
13
|
+
node.hot_storage_size = 0
|
|
14
|
+
hot_storage_default_limit = 1 << 30 # 1 GiB
|
|
15
|
+
hot_storage_limit_value = config.get("hot_storage_limit", hot_storage_default_limit)
|
|
16
|
+
try:
|
|
17
|
+
node.hot_storage_limit = int(hot_storage_limit_value)
|
|
18
|
+
except (TypeError, ValueError):
|
|
19
|
+
node.hot_storage_limit = hot_storage_default_limit
|
|
20
|
+
|
|
21
|
+
node.cold_storage_size = 0
|
|
22
|
+
cold_storage_default_limit = 10 << 30 # 10 GiB
|
|
23
|
+
cold_storage_limit_value = config.get("cold_storage_limit", cold_storage_default_limit)
|
|
24
|
+
try:
|
|
25
|
+
node.cold_storage_limit = int(cold_storage_limit_value)
|
|
26
|
+
except (TypeError, ValueError):
|
|
27
|
+
node.cold_storage_limit = cold_storage_default_limit
|
|
28
|
+
|
|
29
|
+
cold_storage_path = config.get("cold_storage_path")
|
|
30
|
+
if cold_storage_path:
|
|
31
|
+
try:
|
|
32
|
+
Path(cold_storage_path).mkdir(parents=True, exist_ok=True)
|
|
33
|
+
except OSError:
|
|
34
|
+
cold_storage_path = None
|
|
35
|
+
node.cold_storage_path = cold_storage_path
|