astreum 0.1.14__py3-none-any.whl → 0.1.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of astreum might be problematic. Click here for more details.

@@ -1,15 +1,16 @@
1
- import socket
2
- from pathlib import Path
3
- from typing import Optional, Tuple, Dict
4
- from astreum.machine import AstreumMachine
5
- from .relay import Relay
6
- from .relay.message import Topic
7
- from .relay.route import RouteTable
8
- from .relay.peer import Peer
9
- import os
10
- import struct
1
+ """
2
+ Storage implementation for the Astreum node.
3
+
4
+ This module provides a Storage class that manages persistent storage
5
+ of data using either in-memory dictionaries or file system storage.
6
+ It supports network-based retrieval for missing data.
7
+ """
8
+
11
9
  import threading
12
10
  import time
11
+ from pathlib import Path
12
+ from typing import Optional, Dict, Set, Tuple, List, Any
13
+
13
14
 
14
15
  class Storage:
15
16
  def __init__(self, config: dict):
@@ -212,98 +213,39 @@ class Storage:
212
213
  # Try to get the object (which may start a network request)
213
214
  obj_data = self.get(current_hash, timeout)
214
215
  if obj_data is None:
215
- # Failed to get this object, but we continue with the rest
216
- print(f"Warning: Failed to get object {current_hash.hex()}")
216
+ # Object not found, continue with other objects
217
217
  continue
218
218
 
219
- # Store the object in our result
219
+ # Store the object
220
220
  objects[current_hash] = obj_data
221
221
 
222
- # Only process non-leaf nodes for recursion
223
- try:
224
- # Extract leaf flag and type
225
- is_leaf = struct.unpack("?", obj_data[0:1])[0]
226
- if is_leaf:
227
- # Leaf node, no need to recurse
228
- continue
229
-
230
- type_indicator = obj_data[1:2]
231
- next_depth = current_depth + 1
232
-
233
- if type_indicator == b'L': # List
234
- # Non-leaf list has child element hashes
235
- elements_bytes = obj_data[2:]
236
- element_hashes = [elements_bytes[i:i+32] for i in range(0, len(elements_bytes), 32)]
237
-
238
- # Add each element hash to the queue
239
- for elem_hash in element_hashes:
240
- pending_queue.append((elem_hash, next_depth))
241
-
242
- elif type_indicator == b'F': # Function
243
- # Non-leaf function has body hash
244
- remaining_bytes = obj_data[2:]
245
-
246
- # Find the separator between params and body hash
247
- params_end = remaining_bytes.find(b',', remaining_bytes.rfind(b','))
248
- if params_end == -1:
249
- params_end = 0 # No params
250
-
251
- body_hash = remaining_bytes[params_end+1:]
252
-
253
- # Add body hash to the queue
254
- pending_queue.append((body_hash, next_depth))
255
-
256
- except Exception as e:
257
- print(f"Error processing object {current_hash.hex()}: {e}")
258
- continue
259
-
222
+ # Queue child objects if not at max depth
223
+ if current_depth < max_depth:
224
+ # Try to detect child objects in the data
225
+ # This depends on the data format, so this is just a placeholder
226
+ # In a real implementation, you would parse the data based on its format
227
+ # and extract references to other objects
228
+ child_hashes = self._extract_child_hashes(obj_data)
229
+ for child_hash in child_hashes:
230
+ pending_queue.append((child_hash, current_depth + 1))
231
+
260
232
  return objects
261
-
262
- class Account:
263
- def __init__(self, public_key: bytes, balance: int, counter: int):
264
- self.public_key = public_key
265
- self.balance = balance
266
- self.counter = counter
267
-
268
- class Block:
269
- def __init__(
270
- self,
271
- accounts: bytes,
272
- chain: Chain,
273
- difficulty: int,
274
- delay: int,
275
- number: int,
276
- previous: Block,
277
- receipts: bytes,
278
- aster: int,
279
- time: int,
280
- transactions: bytes,
281
- validator: Account,
282
- signature: bytes
283
- ):
284
- self.accounts = accounts
285
- self.chain = chain
286
- self.difficulty = difficulty
287
- self.delay = delay
288
- self.number = number
289
- self.previous = previous
290
- self.receipts = receipts
291
- self.aster = aster
292
- self.time = time
293
- self.transactions = transactions
294
- self.validator = validator
295
- self.signature = signature
296
-
297
- class Chain:
298
- def __init__(self, latest_block: Block):
299
- self.latest_block = latest_block
233
+
234
+ def _extract_child_hashes(self, data: bytes) -> List[bytes]:
235
+ """
236
+ Extract child object hashes from object data.
237
+ This is a placeholder method that should be overridden or adapted based on the object format.
238
+
239
+ Args:
240
+ data: The object data
241
+
242
+ Returns:
243
+ List of child object hashes
244
+ """
245
+ # In a real implementation, this would parse the data based on its format
246
+ # and extract references to other objects
247
+ # For example, if the data is a serialized Merkle node, you might extract
248
+ # left and right child hashes
300
249
 
301
- class Transaction:
302
- def __init__(self, chain: Chain, receipient: Account, sender: Account, counter: int, amount: int, signature: bytes, data: bytes):
303
- self.chain = chain
304
- self.receipient = receipient
305
- self.sender = sender
306
- self.counter = counter
307
- self.amount = amount
308
- self.signature = signature
309
- self.data = data
250
+ # For now, return an empty list
251
+ return []
@@ -0,0 +1,146 @@
1
+ from astreum.utils.bytes_format import encode, decode
2
+ from astreum.utils.hash import hash_data
3
+ from typing import Optional, List
4
+ from ..storage import Storage
5
+
6
+
7
+ class Trie:
8
+ def __init__(self, root: TrieNode, storage: Storage):
9
+ self.root = root
10
+ self.storage = storage
11
+
12
+ def insert(self, key: bytes, data: bytes):
13
+ self.root = self._insert(self.root, key, data)
14
+
15
+ def _insert(self, node: TrieNode, key: bytes, data: bytes) -> TrieNode:
16
+ if node is None:
17
+ return TrieNode(key, data)
18
+ if key < node.key:
19
+ node.children = self._insert(node.children, key, data)
20
+ elif key > node.key:
21
+ node.children = self._insert(node.children, key, data)
22
+ else:
23
+ node.data = data
24
+ return node
25
+
26
+ def lookup(self, key: bytes) -> bytes:
27
+ """
28
+ Look up a key in the trie.
29
+
30
+ Args:
31
+ key: The key to look up
32
+
33
+ Returns:
34
+ The data associated with the key, or None if not found
35
+ """
36
+ return self._lookup(self.root, key)
37
+
38
+ def _lookup(self, node: Optional[TrieNode], key: bytes) -> bytes:
39
+ """
40
+ Recursive helper for looking up a key in the trie.
41
+
42
+ Args:
43
+ node: The current node being examined
44
+ key: The key to look up
45
+
46
+ Returns:
47
+ The data associated with the key, or None if not found
48
+ """
49
+ if node is None:
50
+ return None
51
+
52
+ # If we found an exact match, return the data
53
+ if node.key == key:
54
+ return node.data
55
+
56
+ # Make sure node has a storage reference
57
+ if node.storage is None:
58
+ node.storage = self.storage
59
+
60
+ # Use child_lookup to find the most promising child
61
+ child_node = node.child_lookup(key)
62
+ if child_node is None:
63
+ return None
64
+
65
+ # Create and traverse to the child node
66
+ child_node = TrieNode.from_bytes(child_data, self.storage)
67
+ return self._lookup(child_node, key)
68
+
69
+ class TrieNode:
70
+ """
71
+ A node in a trie.
72
+
73
+ Attributes:
74
+ key: The key of the node
75
+ data: The data stored in the node
76
+ children: The children of the node
77
+ storage: Reference to storage service (not serialized)
78
+ """
79
+ def __init__(self, key: bytes, data: bytes = None, children: bytes = None, storage = None):
80
+ """
81
+ Initialize a new TrieNode.
82
+
83
+ Args:
84
+ key: The key of the node
85
+ data: The data stored in the node
86
+ children: a byte string of children hashes each are 32 bytes long
87
+ storage: Storage instance for retrieving child nodes (not serialized)
88
+ """
89
+ self.key = key
90
+ self.data = data
91
+ self.children = children
92
+ self.storage = storage
93
+
94
+ def to_bytes(self) -> bytes:
95
+ """Serialize the node data (excluding storage reference)"""
96
+ return encode([self.key, self.data, self.children])
97
+
98
+ @classmethod
99
+ def from_bytes(cls, data: bytes, storage = None) -> 'TrieNode':
100
+ """
101
+ Deserialize node data and optionally attach a storage reference.
102
+
103
+ Args:
104
+ data: The serialized node data
105
+ storage: Optional storage instance to attach to the node
106
+
107
+ Returns:
108
+ A new TrieNode instance
109
+ """
110
+ key, data, children = decode(data)
111
+ return TrieNode(key, data, children, storage)
112
+
113
+ def hash(self) -> bytes:
114
+ return hash_data(self.to_bytes())
115
+
116
+ def child_lookup(self, key: bytes) -> Optional[TrieNode]:
117
+ """
118
+ Does a binary lookup of the keys in the children of this node.
119
+ Uses storage to look up children and finds a starting match for the key.
120
+ """
121
+ if self.children is None or self.storage is None:
122
+ return None
123
+
124
+ # Parse children bytes into a list of 32-byte hashes
125
+ children_hashes = []
126
+ for i in range(0, len(self.children), 32):
127
+ if i + 32 <= len(self.children):
128
+ children_hashes.append(self.children[i:i+32])
129
+
130
+ # Look up each child in storage and compare keys
131
+ for child_hash in children_hashes:
132
+ # Get child node data from storage
133
+ child_data = self.storage.get(child_hash)
134
+ if child_data is None:
135
+ continue # Skip if not found in storage
136
+
137
+ # Deserialize the child node
138
+ child_node = TrieNode.from_bytes(child_data, self.storage)
139
+
140
+ # Check if this child's key is a prefix of the lookup key
141
+ # or if the lookup key is a prefix of this child's key
142
+ min_len = min(len(child_node.key), len(key))
143
+ if child_node.key[:min_len] == key[:min_len]:
144
+ return child_node
145
+
146
+ return None
@@ -0,0 +1,137 @@
1
+ """
2
+ Utility functions for working with the storage module and Merkle trees,
3
+ with special focus on validator stake operations and binary searches.
4
+ """
5
+
6
+ from typing import List, Dict, Optional, Tuple, Any, Callable, TypeVar
7
+ from .merkle import MerkleTree, MerkleProof, MerkleNode, MerkleNodeType
8
+ from .merkle import find_first, find_all, map, binary_search
9
+
10
+ T = TypeVar('T')
11
+
12
+
13
+ def create_ordered_merkle_tree(items: List[bytes], storage=None) -> Tuple[bytes, MerkleTree]:
14
+ """
15
+ Create a new ordered Merkle tree from items.
16
+
17
+ Args:
18
+ items: List of items to include in the tree, will be sorted
19
+ storage: Optional storage backend to persist the tree
20
+
21
+ Returns:
22
+ Tuple of (root_hash, merkle_tree)
23
+ """
24
+ tree = MerkleTree(storage)
25
+ root_hash = tree.add_sorted(items)
26
+ return root_hash, tree
27
+
28
+
29
+ def query_validator_stake(storage, stake_root_hash: bytes, validator_address: bytes) -> Optional[int]:
30
+ """
31
+ Query a validator's stake by their address from a stake Merkle tree.
32
+
33
+ Args:
34
+ storage: Storage instance used by the Merkle tree
35
+ stake_root_hash: Root hash of the stake Merkle tree
36
+ validator_address: Address of the validator to look up
37
+
38
+ Returns:
39
+ The validator's stake amount as an integer, or None if not found
40
+ """
41
+ # Define a comparison function for binary search (assuming address is first part of data)
42
+ def compare_address(data: bytes) -> int:
43
+ # Extract address from data (format depends on how stakes are stored)
44
+ # Assuming format: [address][stake]
45
+ data_address = data[:len(validator_address)]
46
+
47
+ if data_address < validator_address:
48
+ return 1 # Data is less than target
49
+ elif data_address > validator_address:
50
+ return -1 # Data is greater than target
51
+ else:
52
+ return 0 # Match found
53
+
54
+ # Binary search for the validator's stake
55
+ stake_data = binary_search(storage, stake_root_hash, compare_address)
56
+
57
+ if stake_data:
58
+ # Extract stake amount from data
59
+ # Assuming format: [address][stake_amount as 8-byte integer]
60
+ address_len = len(validator_address)
61
+ stake_amount = int.from_bytes(stake_data[address_len:address_len+8], byteorder='big')
62
+ return stake_amount
63
+
64
+ return None
65
+
66
+
67
+ def find_validator_stakes(storage, stake_root_hash: bytes, min_stake: int = 0) -> List[Tuple[bytes, int]]:
68
+ """
69
+ Find all validators with stakes above a minimum threshold.
70
+
71
+ Args:
72
+ storage: Storage instance used by the Merkle tree
73
+ stake_root_hash: Root hash of the stake Merkle tree
74
+ min_stake: Minimum stake threshold (default: 0)
75
+
76
+ Returns:
77
+ List of (validator_address, stake_amount) tuples
78
+ """
79
+ # Define predicate to filter validators by minimum stake
80
+ def has_min_stake(data: bytes) -> bool:
81
+ # Assuming format: [address][stake_amount as 8-byte integer]
82
+ address_len = len(data) - 8 # Adjust based on your actual format
83
+ stake_amount = int.from_bytes(data[address_len:], byteorder='big')
84
+ return stake_amount >= min_stake
85
+
86
+ # Define transform to extract address and stake
87
+ def extract_address_and_stake(data: bytes) -> Tuple[bytes, int]:
88
+ # Assuming format: [address][stake_amount as 8-byte integer]
89
+ address_len = len(data) - 8 # Adjust based on your actual format
90
+ address = data[:address_len]
91
+ stake = int.from_bytes(data[address_len:], byteorder='big')
92
+ return (address, stake)
93
+
94
+ # Find all validators meeting criteria and transform results
95
+ matching_validators = find_all(storage, stake_root_hash, has_min_stake)
96
+ return [extract_address_and_stake(data) for data in matching_validators]
97
+
98
+
99
+ def get_total_stake(storage, stake_root_hash: bytes) -> int:
100
+ """
101
+ Calculate the total stake across all validators.
102
+
103
+ Args:
104
+ storage: Storage instance used by the Merkle tree
105
+ stake_root_hash: Root hash of the stake Merkle tree
106
+
107
+ Returns:
108
+ Total stake amount
109
+ """
110
+ # Define transform to extract stake amount
111
+ def extract_stake(data: bytes) -> int:
112
+ # Assuming format: [address][stake_amount as 8-byte integer]
113
+ address_len = len(data) - 8 # Adjust based on your actual format
114
+ return int.from_bytes(data[address_len:], byteorder='big')
115
+
116
+ # Map all leaves to their stake values and sum
117
+ stakes = map(storage, stake_root_hash, extract_stake)
118
+ return sum(stakes)
119
+
120
+
121
+ def query_with_custom_resolver(storage, root_hash: bytes,
122
+ resolver_fn: Callable[[bytes], T]) -> T:
123
+ """
124
+ Query a Merkle tree using a custom resolver function.
125
+
126
+ This is a general-purpose function that allows custom logic to be applied
127
+ to tree traversal and data extraction.
128
+
129
+ Args:
130
+ storage: Storage instance used by the Merkle tree
131
+ root_hash: Root hash of the Merkle tree
132
+ resolver_fn: Function that takes a root hash and returns a result
133
+
134
+ Returns:
135
+ Whatever the resolver function returns
136
+ """
137
+ return resolver_fn(root_hash)
astreum/node/utils.py ADDED
@@ -0,0 +1,34 @@
1
+ """
2
+ Utility functions for the Astreum blockchain.
3
+ """
4
+
5
+ import blake3
6
+
7
+ def hash_data(data: bytes) -> bytes:
8
+ """
9
+ Hash data using BLAKE3.
10
+
11
+ Args:
12
+ data: Data to hash
13
+
14
+ Returns:
15
+ 32-byte BLAKE3 hash
16
+ """
17
+ return blake3.blake3(data).digest()
18
+
19
+ def hash_object(obj) -> bytes:
20
+ """
21
+ Hash a Python object by converting it to a string and then hashing.
22
+
23
+ Args:
24
+ obj: Python object to hash
25
+
26
+ Returns:
27
+ 32-byte BLAKE3 hash
28
+ """
29
+ if isinstance(obj, bytes):
30
+ return hash_data(obj)
31
+ elif isinstance(obj, str):
32
+ return hash_data(obj.encode('utf-8'))
33
+ else:
34
+ return hash_data(str(obj).encode('utf-8'))
@@ -0,0 +1,84 @@
1
+ """
2
+ Validation module for the Astreum blockchain.
3
+
4
+ This module provides functions for validating blocks and transactions,
5
+ computing and verifying VDFs, and selecting validators.
6
+ """
7
+
8
+ # Export validation functions
9
+ from .block import (
10
+ validate_block,
11
+ create_block,
12
+ create_genesis_block,
13
+ select_validator,
14
+ select_validator_for_slot
15
+ )
16
+
17
+ # Export VDF functions
18
+ from .vdf import (
19
+ compute_vdf,
20
+ verify_vdf,
21
+ validate_block_vdf
22
+ )
23
+
24
+ # Export account functions
25
+ from .account import (
26
+ Account,
27
+ get_validator_stake,
28
+ is_validator
29
+ )
30
+
31
+ # Export constants
32
+ from .constants import (
33
+ VALIDATION_ADDRESS,
34
+ BURN_ADDRESS,
35
+ MIN_STAKE_AMOUNT,
36
+ SLOT_DURATION,
37
+ VDF_DIFFICULTY
38
+ )
39
+
40
+ # Export blockchain state functions
41
+ from .state import (
42
+ add_block_to_state,
43
+ validate_and_apply_block,
44
+ create_account_state,
45
+ get_validator_for_slot,
46
+ select_best_chain,
47
+ compare_chains,
48
+ get_validator_set
49
+ )
50
+
51
+ __all__ = [
52
+ # Block validation
53
+ 'validate_block',
54
+ 'create_block',
55
+ 'create_genesis_block',
56
+ 'select_validator',
57
+ 'select_validator_for_slot',
58
+
59
+ # VDF functions
60
+ 'compute_vdf',
61
+ 'verify_vdf',
62
+ 'validate_block_vdf',
63
+
64
+ # Account functions
65
+ 'Account',
66
+ 'get_validator_stake',
67
+ 'is_validator',
68
+
69
+ # Constants
70
+ 'VALIDATION_ADDRESS',
71
+ 'BURN_ADDRESS',
72
+ 'MIN_STAKE_AMOUNT',
73
+ 'SLOT_DURATION',
74
+ 'VDF_DIFFICULTY',
75
+
76
+ # Blockchain state
77
+ 'add_block_to_state',
78
+ 'validate_and_apply_block',
79
+ 'create_account_state',
80
+ 'get_validator_for_slot',
81
+ 'select_best_chain',
82
+ 'compare_chains',
83
+ 'get_validator_set'
84
+ ]