astreum 0.1.13__tar.gz → 0.1.15__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of astreum might be problematic. Click here for more details.
- {astreum-0.1.13/src/astreum.egg-info → astreum-0.1.15}/PKG-INFO +2 -1
- {astreum-0.1.13 → astreum-0.1.15}/pyproject.toml +2 -1
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/storage.py +10 -10
- astreum-0.1.15/src/astreum/lispeum/utils.py +17 -0
- astreum-0.1.15/src/astreum/node/__init__.py +480 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/node/relay/__init__.py +72 -30
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/node/relay/envelope.py +5 -5
- astreum-0.1.15/src/astreum/node/storage/__init__.py +13 -0
- astreum-0.1.15/src/astreum/node/storage/merkle.py +734 -0
- astreum-0.1.13/src/astreum/node/models.py → astreum-0.1.15/src/astreum/node/storage/storage.py +41 -99
- astreum-0.1.15/src/astreum/node/storage/trie.py +146 -0
- astreum-0.1.15/src/astreum/node/storage/utils.py +137 -0
- astreum-0.1.15/src/astreum/node/utils.py +34 -0
- astreum-0.1.15/src/astreum/node/validation/__init__.py +84 -0
- astreum-0.1.15/src/astreum/node/validation/account.py +874 -0
- astreum-0.1.15/src/astreum/node/validation/block/__init__.py +12 -0
- astreum-0.1.15/src/astreum/node/validation/block/create.py +98 -0
- astreum-0.1.15/src/astreum/node/validation/block/model.py +81 -0
- astreum-0.1.15/src/astreum/node/validation/block/validate.py +196 -0
- astreum-0.1.15/src/astreum/node/validation/constants.py +15 -0
- astreum-0.1.15/src/astreum/node/validation/stake.py +229 -0
- astreum-0.1.15/src/astreum/node/validation/state.py +230 -0
- astreum-0.1.15/src/astreum/node/validation/vdf.py +80 -0
- {astreum-0.1.13 → astreum-0.1.15/src/astreum.egg-info}/PKG-INFO +2 -1
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum.egg-info/SOURCES.txt +17 -1
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum.egg-info/requires.txt +1 -0
- astreum-0.1.13/src/astreum/node/__init__.py +0 -592
- {astreum-0.1.13 → astreum-0.1.15}/LICENSE +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/README.md +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/setup.cfg +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/__init__.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/__init__.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/expression.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/parser.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/special/__init__.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/special/definition.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/special/list/__init__.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/special/list/all.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/special/list/any.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/special/list/fold.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/special/list/get.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/special/list/insert.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/special/list/map.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/special/list/position.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/special/list/remove.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/special/number/__init__.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/special/number/addition.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/lispeum/tokenizer.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/machine/__init__.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/machine/environment.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/machine/error.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/node/relay/bucket.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/node/relay/message.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/node/relay/peer.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/node/relay/route.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/utils/__init__.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum/utils/bytes_format.py +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum.egg-info/dependency_links.txt +0 -0
- {astreum-0.1.13 → astreum-0.1.15}/src/astreum.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: astreum
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.15
|
|
4
4
|
Summary: Python library to interact with the Astreum blockchain and its Lispeum virtual machine.
|
|
5
5
|
Author-email: "Roy R. O. Okello" <roy@stelar.xyz>
|
|
6
6
|
Project-URL: Homepage, https://github.com/astreum/lib
|
|
@@ -13,6 +13,7 @@ Description-Content-Type: text/markdown
|
|
|
13
13
|
License-File: LICENSE
|
|
14
14
|
Requires-Dist: pycryptodomex==3.21.0
|
|
15
15
|
Requires-Dist: cryptography==44.0.2
|
|
16
|
+
Requires-Dist: blake3==1.0.4
|
|
16
17
|
|
|
17
18
|
# lib
|
|
18
19
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "astreum"
|
|
3
|
-
version = "0.1.
|
|
3
|
+
version = "0.1.15"
|
|
4
4
|
authors = [
|
|
5
5
|
{ name="Roy R. O. Okello", email="roy@stelar.xyz" },
|
|
6
6
|
]
|
|
@@ -15,6 +15,7 @@ classifiers = [
|
|
|
15
15
|
dependencies = [
|
|
16
16
|
"pycryptodomex==3.21.0",
|
|
17
17
|
"cryptography==44.0.2",
|
|
18
|
+
"blake3==1.0.4"
|
|
18
19
|
]
|
|
19
20
|
|
|
20
21
|
[project.urls]
|
|
@@ -5,11 +5,11 @@ This module provides functions to convert Lispeum expressions to an
|
|
|
5
5
|
object-based Merkle tree representation for storage and retrieval.
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
|
-
import hashlib
|
|
9
8
|
import struct
|
|
10
9
|
from typing import Dict, Tuple, Any, List, Optional
|
|
11
10
|
|
|
12
11
|
from astreum.lispeum.expression import Expr
|
|
12
|
+
from .utils import hash_data
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
def expr_to_objects(expr: Any) -> Tuple[bytes, Dict[bytes, bytes]]:
|
|
@@ -61,7 +61,7 @@ def _serialize_expr(expr: Any, objects: Dict[bytes, bytes]) -> bytes:
|
|
|
61
61
|
|
|
62
62
|
# Create the object with leaf flag and body
|
|
63
63
|
object_bytes = struct.pack("?", is_leaf) + type_bytes
|
|
64
|
-
object_hash =
|
|
64
|
+
object_hash = hash_data(object_bytes)
|
|
65
65
|
objects[object_hash] = object_bytes
|
|
66
66
|
|
|
67
67
|
return object_hash
|
|
@@ -82,7 +82,7 @@ def _serialize_expr(expr: Any, objects: Dict[bytes, bytes]) -> bytes:
|
|
|
82
82
|
|
|
83
83
|
# Create the object with leaf flag and body
|
|
84
84
|
object_bytes = struct.pack("?", is_leaf) + type_bytes + value_bytes
|
|
85
|
-
object_hash =
|
|
85
|
+
object_hash = hash_data(object_bytes)
|
|
86
86
|
objects[object_hash] = object_bytes
|
|
87
87
|
|
|
88
88
|
return object_hash
|
|
@@ -95,7 +95,7 @@ def _serialize_expr(expr: Any, objects: Dict[bytes, bytes]) -> bytes:
|
|
|
95
95
|
|
|
96
96
|
# Create the object with leaf flag and body
|
|
97
97
|
object_bytes = struct.pack("?", is_leaf) + type_bytes + value_bytes
|
|
98
|
-
object_hash =
|
|
98
|
+
object_hash = hash_data(object_bytes)
|
|
99
99
|
objects[object_hash] = object_bytes
|
|
100
100
|
|
|
101
101
|
return object_hash
|
|
@@ -108,7 +108,7 @@ def _serialize_expr(expr: Any, objects: Dict[bytes, bytes]) -> bytes:
|
|
|
108
108
|
|
|
109
109
|
# Create the object with leaf flag and body
|
|
110
110
|
object_bytes = struct.pack("?", is_leaf) + type_bytes + value_bytes
|
|
111
|
-
object_hash =
|
|
111
|
+
object_hash = hash_data(object_bytes)
|
|
112
112
|
objects[object_hash] = object_bytes
|
|
113
113
|
|
|
114
114
|
return object_hash
|
|
@@ -121,7 +121,7 @@ def _serialize_expr(expr: Any, objects: Dict[bytes, bytes]) -> bytes:
|
|
|
121
121
|
|
|
122
122
|
# Create the object with leaf flag and body
|
|
123
123
|
object_bytes = struct.pack("?", is_leaf) + type_bytes + value_bytes
|
|
124
|
-
object_hash =
|
|
124
|
+
object_hash = hash_data(object_bytes)
|
|
125
125
|
objects[object_hash] = object_bytes
|
|
126
126
|
|
|
127
127
|
return object_hash
|
|
@@ -134,7 +134,7 @@ def _serialize_expr(expr: Any, objects: Dict[bytes, bytes]) -> bytes:
|
|
|
134
134
|
|
|
135
135
|
# Create the object with leaf flag and body
|
|
136
136
|
object_bytes = struct.pack("?", is_leaf) + type_bytes + value_bytes
|
|
137
|
-
object_hash =
|
|
137
|
+
object_hash = hash_data(object_bytes)
|
|
138
138
|
objects[object_hash] = object_bytes
|
|
139
139
|
|
|
140
140
|
return object_hash
|
|
@@ -155,7 +155,7 @@ def _serialize_expr(expr: Any, objects: Dict[bytes, bytes]) -> bytes:
|
|
|
155
155
|
|
|
156
156
|
# Create the object with leaf flag and body
|
|
157
157
|
object_bytes = struct.pack("?", is_leaf) + type_bytes + params_bytes + body_hash
|
|
158
|
-
object_hash =
|
|
158
|
+
object_hash = hash_data(object_bytes)
|
|
159
159
|
objects[object_hash] = object_bytes
|
|
160
160
|
|
|
161
161
|
return object_hash
|
|
@@ -172,7 +172,7 @@ def _serialize_expr(expr: Any, objects: Dict[bytes, bytes]) -> bytes:
|
|
|
172
172
|
|
|
173
173
|
# Create the object with leaf flag and body
|
|
174
174
|
object_bytes = struct.pack("?", is_leaf) + type_bytes + category_bytes + b'\0' + message_bytes + b'\0' + details_bytes
|
|
175
|
-
object_hash =
|
|
175
|
+
object_hash = hash_data(object_bytes)
|
|
176
176
|
objects[object_hash] = object_bytes
|
|
177
177
|
|
|
178
178
|
return object_hash
|
|
@@ -185,7 +185,7 @@ def _serialize_expr(expr: Any, objects: Dict[bytes, bytes]) -> bytes:
|
|
|
185
185
|
|
|
186
186
|
# Create the object with leaf flag and body
|
|
187
187
|
object_bytes = struct.pack("?", is_leaf) + type_bytes + value_bytes
|
|
188
|
-
object_hash =
|
|
188
|
+
object_hash = hash_data(object_bytes)
|
|
189
189
|
objects[object_hash] = object_bytes
|
|
190
190
|
|
|
191
191
|
return object_hash
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Utility functions for the Lispeum module.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import blake3
|
|
6
|
+
|
|
7
|
+
def hash_data(data: bytes) -> bytes:
|
|
8
|
+
"""
|
|
9
|
+
Hash data using BLAKE3.
|
|
10
|
+
|
|
11
|
+
Args:
|
|
12
|
+
data: Data to hash
|
|
13
|
+
|
|
14
|
+
Returns:
|
|
15
|
+
32-byte BLAKE3 hash
|
|
16
|
+
"""
|
|
17
|
+
return blake3.blake3(data).digest()
|
|
@@ -0,0 +1,480 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import time
|
|
3
|
+
import threading
|
|
4
|
+
import random
|
|
5
|
+
from typing import Tuple, Optional, List, Dict
|
|
6
|
+
import json
|
|
7
|
+
from cryptography.hazmat.primitives.asymmetric import ed25519
|
|
8
|
+
from cryptography.hazmat.primitives import serialization
|
|
9
|
+
|
|
10
|
+
from .relay import Relay, Topic
|
|
11
|
+
from .relay.peer import Peer
|
|
12
|
+
from .models import Storage, Block, Transaction
|
|
13
|
+
from .machine import AstreumMachine
|
|
14
|
+
from .utils import encode, decode, hash_data
|
|
15
|
+
from astreum.lispeum.storage import store_expr, get_expr_from_storage
|
|
16
|
+
|
|
17
|
+
# Import our validation components using the new functional approach
|
|
18
|
+
from .validation import (
|
|
19
|
+
validate_block,
|
|
20
|
+
create_block,
|
|
21
|
+
create_genesis_block,
|
|
22
|
+
compute_vdf,
|
|
23
|
+
verify_vdf,
|
|
24
|
+
select_validator,
|
|
25
|
+
select_validator_for_slot,
|
|
26
|
+
Account,
|
|
27
|
+
get_validator_stake,
|
|
28
|
+
is_validator,
|
|
29
|
+
VALIDATION_ADDRESS,
|
|
30
|
+
BURN_ADDRESS,
|
|
31
|
+
MIN_STAKE_AMOUNT,
|
|
32
|
+
SLOT_DURATION,
|
|
33
|
+
VDF_DIFFICULTY
|
|
34
|
+
)
|
|
35
|
+
from .validation.state import (
|
|
36
|
+
add_block_to_state,
|
|
37
|
+
validate_and_apply_block,
|
|
38
|
+
create_account_state,
|
|
39
|
+
get_validator_for_slot,
|
|
40
|
+
select_best_chain,
|
|
41
|
+
compare_chains,
|
|
42
|
+
get_validator_set
|
|
43
|
+
)
|
|
44
|
+
from .validation.adapter import BlockAdapter, TransactionAdapter, AccountAdapter
|
|
45
|
+
|
|
46
|
+
class Node:
|
|
47
|
+
def __init__(self, config: dict):
|
|
48
|
+
# Ensure config is a dictionary, but allow it to be None
|
|
49
|
+
self.config = config if config is not None else {}
|
|
50
|
+
|
|
51
|
+
# Handle validation key if provided
|
|
52
|
+
self.validation_private_key = None
|
|
53
|
+
self.validation_public_key = None
|
|
54
|
+
self.is_validator = False
|
|
55
|
+
|
|
56
|
+
# Extract validation private key from config
|
|
57
|
+
if 'validation_private_key' in self.config:
|
|
58
|
+
try:
|
|
59
|
+
key_bytes = bytes.fromhex(self.config['validation_private_key'])
|
|
60
|
+
self.validation_private_key = ed25519.Ed25519PrivateKey.from_private_bytes(key_bytes)
|
|
61
|
+
self.validation_public_key = self.validation_private_key.public_key()
|
|
62
|
+
self.is_validator = True
|
|
63
|
+
|
|
64
|
+
# Set validation_route to True in config so relay will join validation route
|
|
65
|
+
self.config['validation_route'] = True
|
|
66
|
+
print(f"Node is configured as a validator with validation key")
|
|
67
|
+
except Exception as e:
|
|
68
|
+
print(f"Error loading validation private key: {e}")
|
|
69
|
+
|
|
70
|
+
# Initialize relay with our config
|
|
71
|
+
self.relay = Relay(self.config)
|
|
72
|
+
|
|
73
|
+
# Get the node_id from relay
|
|
74
|
+
self.node_id = self.relay.node_id
|
|
75
|
+
|
|
76
|
+
# Initialize storage
|
|
77
|
+
self.storage = Storage(self.config)
|
|
78
|
+
self.storage.node = self # Set the storage node reference to self
|
|
79
|
+
|
|
80
|
+
# Initialize blockchain state
|
|
81
|
+
self.blockchain = create_account_state(self.config)
|
|
82
|
+
|
|
83
|
+
# Store our validator info if we're a validator
|
|
84
|
+
if self.is_validator and self.validation_public_key:
|
|
85
|
+
self.validator_address = self.validation_public_key.public_bytes(
|
|
86
|
+
encoding=serialization.Encoding.Raw,
|
|
87
|
+
format=serialization.PublicFormat.Raw
|
|
88
|
+
)
|
|
89
|
+
self.validator_private_bytes = self.validation_private_key.private_bytes(
|
|
90
|
+
encoding=serialization.Encoding.Raw,
|
|
91
|
+
format=serialization.PrivateFormat.Raw,
|
|
92
|
+
encryption_algorithm=serialization.NoEncryption()
|
|
93
|
+
)
|
|
94
|
+
print(f"Registered validator with address: {self.validator_address.hex()}")
|
|
95
|
+
else:
|
|
96
|
+
self.validator_address = None
|
|
97
|
+
self.validator_private_bytes = None
|
|
98
|
+
|
|
99
|
+
# Latest block of the chain this node is following
|
|
100
|
+
self.latest_block = None
|
|
101
|
+
self.followed_chain_id = self.config.get('followed_chain_id', None)
|
|
102
|
+
|
|
103
|
+
# Initialize machine
|
|
104
|
+
self.machine = AstreumMachine(node=self)
|
|
105
|
+
|
|
106
|
+
# Register message handlers
|
|
107
|
+
self.relay.message_handlers[Topic.PEER_ROUTE] = self._handle_peer_route
|
|
108
|
+
self.relay.message_handlers[Topic.PING] = self._handle_ping
|
|
109
|
+
self.relay.message_handlers[Topic.PONG] = self._handle_pong
|
|
110
|
+
self.relay.message_handlers[Topic.OBJECT_REQUEST] = self._handle_object_request
|
|
111
|
+
self.relay.message_handlers[Topic.OBJECT_RESPONSE] = self._handle_object_response
|
|
112
|
+
self.relay.message_handlers[Topic.ROUTE_REQUEST] = self._handle_route_request
|
|
113
|
+
self.relay.message_handlers[Topic.ROUTE] = self._handle_route
|
|
114
|
+
self.relay.message_handlers[Topic.LATEST_BLOCK_REQUEST] = self._handle_latest_block_request
|
|
115
|
+
self.relay.message_handlers[Topic.LATEST_BLOCK] = self._handle_latest_block
|
|
116
|
+
self.relay.message_handlers[Topic.TRANSACTION] = self._handle_transaction
|
|
117
|
+
self.relay.message_handlers[Topic.BLOCK_REQUEST] = self._handle_block_request
|
|
118
|
+
self.relay.message_handlers[Topic.BLOCK_RESPONSE] = self._handle_block_response
|
|
119
|
+
|
|
120
|
+
# Initialize latest block from storage if available
|
|
121
|
+
self._initialize_latest_block()
|
|
122
|
+
|
|
123
|
+
# Candidate chains that might be adopted
|
|
124
|
+
self.candidate_chains = {} # chain_id -> {'latest_block': block, 'timestamp': time.time()}
|
|
125
|
+
self.pending_blocks = {} # block_hash -> {'block': block, 'timestamp': time.time()}
|
|
126
|
+
|
|
127
|
+
# Threads for validation and chain monitoring
|
|
128
|
+
self.running = False
|
|
129
|
+
self.main_chain_validation_thread = None
|
|
130
|
+
self.candidate_chain_validation_thread = None
|
|
131
|
+
|
|
132
|
+
# Pending transactions for a block
|
|
133
|
+
self.pending_transactions = {} # tx_hash -> {'transaction': tx, 'timestamp': time.time()}
|
|
134
|
+
|
|
135
|
+
# Last block production attempt time
|
|
136
|
+
self.last_block_attempt_time = 0
|
|
137
|
+
|
|
138
|
+
def start(self):
|
|
139
|
+
"""Start the node."""
|
|
140
|
+
self.running = True
|
|
141
|
+
|
|
142
|
+
# Start relay
|
|
143
|
+
self.relay.start()
|
|
144
|
+
|
|
145
|
+
# Start chain monitoring thread
|
|
146
|
+
self.main_chain_validation_thread = threading.Thread(
|
|
147
|
+
target=self._main_chain_validation_loop,
|
|
148
|
+
name="MainChainValidation"
|
|
149
|
+
)
|
|
150
|
+
self.main_chain_validation_thread.daemon = True
|
|
151
|
+
self.main_chain_validation_thread.start()
|
|
152
|
+
|
|
153
|
+
self.candidate_chain_validation_thread = threading.Thread(
|
|
154
|
+
target=self._candidate_chain_validation_loop,
|
|
155
|
+
name="CandidateChainValidation"
|
|
156
|
+
)
|
|
157
|
+
self.candidate_chain_validation_thread.daemon = True
|
|
158
|
+
self.candidate_chain_validation_thread.start()
|
|
159
|
+
|
|
160
|
+
# Set up recurring block query tasks
|
|
161
|
+
main_query_thread = threading.Thread(
|
|
162
|
+
target=self._block_query_loop,
|
|
163
|
+
args=('main',),
|
|
164
|
+
daemon=True
|
|
165
|
+
)
|
|
166
|
+
main_query_thread.start()
|
|
167
|
+
|
|
168
|
+
validation_query_thread = threading.Thread(
|
|
169
|
+
target=self._block_query_loop,
|
|
170
|
+
args=('validation',),
|
|
171
|
+
daemon=True
|
|
172
|
+
)
|
|
173
|
+
validation_query_thread.start()
|
|
174
|
+
|
|
175
|
+
print(f"Node started with ID {self.node_id.hex()}")
|
|
176
|
+
|
|
177
|
+
def stop(self):
|
|
178
|
+
"""Stop the node and all its services."""
|
|
179
|
+
self.running = False
|
|
180
|
+
|
|
181
|
+
# Stop all threads
|
|
182
|
+
if self.main_chain_validation_thread and self.main_chain_validation_thread.is_alive():
|
|
183
|
+
self.main_chain_validation_thread.join(timeout=1.0)
|
|
184
|
+
|
|
185
|
+
if self.candidate_chain_validation_thread and self.candidate_chain_validation_thread.is_alive():
|
|
186
|
+
self.candidate_chain_validation_thread.join(timeout=1.0)
|
|
187
|
+
|
|
188
|
+
# Stop relay last
|
|
189
|
+
if self.relay:
|
|
190
|
+
self.relay.stop()
|
|
191
|
+
|
|
192
|
+
print("Node stopped")
|
|
193
|
+
|
|
194
|
+
def _main_chain_validation_loop(self):
|
|
195
|
+
"""
|
|
196
|
+
Main validation loop for the primary blockchain.
|
|
197
|
+
This thread prioritizes validating blocks on the main chain we're following.
|
|
198
|
+
"""
|
|
199
|
+
while self.running:
|
|
200
|
+
try:
|
|
201
|
+
# Update latest block if we don't have one yet
|
|
202
|
+
if not self.latest_block and hasattr(self.blockchain, 'get_latest_block'):
|
|
203
|
+
self.latest_block = self.blockchain.get_latest_block()
|
|
204
|
+
|
|
205
|
+
# Process any blocks that extend our main chain immediately
|
|
206
|
+
self._process_main_chain_blocks()
|
|
207
|
+
|
|
208
|
+
# Attempt block production if we are a validator
|
|
209
|
+
if self.is_validator and self.validator_address:
|
|
210
|
+
self._attempt_block_production()
|
|
211
|
+
|
|
212
|
+
# Cleanup old items
|
|
213
|
+
self._prune_pending_items()
|
|
214
|
+
|
|
215
|
+
# Sleep to prevent high CPU usage
|
|
216
|
+
time.sleep(0.1) # Short sleep for main chain validation
|
|
217
|
+
except Exception as e:
|
|
218
|
+
print(f"Error in main chain validation loop: {e}")
|
|
219
|
+
time.sleep(1) # Longer sleep on error
|
|
220
|
+
|
|
221
|
+
def _candidate_chain_validation_loop(self):
|
|
222
|
+
"""
|
|
223
|
+
Validation loop for candidate chains (potential forks).
|
|
224
|
+
This thread handles validation of blocks from alternate chains
|
|
225
|
+
without slowing down the main chain processing.
|
|
226
|
+
"""
|
|
227
|
+
while self.running:
|
|
228
|
+
try:
|
|
229
|
+
# Process candidate chains
|
|
230
|
+
self._evaluate_candidate_chains()
|
|
231
|
+
|
|
232
|
+
# Prune old candidate chains
|
|
233
|
+
self._prune_candidate_chains()
|
|
234
|
+
|
|
235
|
+
# Sleep longer for candidate chain validation (lower priority)
|
|
236
|
+
time.sleep(1) # Longer sleep for candidate chain validation
|
|
237
|
+
except Exception as e:
|
|
238
|
+
print(f"Error in candidate chain validation loop: {e}")
|
|
239
|
+
time.sleep(2) # Even longer sleep on error
|
|
240
|
+
|
|
241
|
+
def _prune_pending_items(self):
|
|
242
|
+
"""Remove old pending blocks and transactions."""
|
|
243
|
+
current_time = time.time()
|
|
244
|
+
|
|
245
|
+
# Prune old pending blocks (older than 1 hour)
|
|
246
|
+
blocks_to_remove = [
|
|
247
|
+
block_hash for block_hash, data in self.pending_blocks.items()
|
|
248
|
+
if current_time - data['timestamp'] > 3600 # 1 hour
|
|
249
|
+
]
|
|
250
|
+
for block_hash in blocks_to_remove:
|
|
251
|
+
del self.pending_blocks[block_hash]
|
|
252
|
+
|
|
253
|
+
# Prune old pending transactions (older than 30 minutes)
|
|
254
|
+
txs_to_remove = [
|
|
255
|
+
tx_hash for tx_hash, data in self.pending_transactions.items()
|
|
256
|
+
if current_time - data['timestamp'] > 1800 # 30 minutes
|
|
257
|
+
]
|
|
258
|
+
for tx_hash in txs_to_remove:
|
|
259
|
+
del self.pending_transactions[tx_hash]
|
|
260
|
+
|
|
261
|
+
def _process_main_chain_blocks(self):
|
|
262
|
+
"""
|
|
263
|
+
Process blocks that extend our current main chain.
|
|
264
|
+
Prioritizes blocks that build on our latest block.
|
|
265
|
+
"""
|
|
266
|
+
# Skip if we don't have a latest block yet
|
|
267
|
+
if not self.latest_block:
|
|
268
|
+
return
|
|
269
|
+
|
|
270
|
+
# Get the hash of our latest block
|
|
271
|
+
latest_hash = self.latest_block.get_hash()
|
|
272
|
+
|
|
273
|
+
# Find any pending blocks that build on our latest block
|
|
274
|
+
main_chain_blocks = []
|
|
275
|
+
for block_hash, data in list(self.pending_blocks.items()):
|
|
276
|
+
block = data['block']
|
|
277
|
+
|
|
278
|
+
# Check if this block extends our latest block
|
|
279
|
+
if block.previous == latest_hash:
|
|
280
|
+
main_chain_blocks.append(block)
|
|
281
|
+
|
|
282
|
+
# Process found blocks
|
|
283
|
+
for block in main_chain_blocks:
|
|
284
|
+
self._validate_and_process_main_chain_block(block)
|
|
285
|
+
|
|
286
|
+
def _validate_and_process_main_chain_block(self, block: Block):
|
|
287
|
+
"""
|
|
288
|
+
Validate and process a block that extends our main chain.
|
|
289
|
+
|
|
290
|
+
Args:
|
|
291
|
+
block: Block to validate and process
|
|
292
|
+
"""
|
|
293
|
+
try:
|
|
294
|
+
# Validate block
|
|
295
|
+
is_valid = validate_block(block, self.blockchain.get_accounts_at_block(block.previous), self.blockchain.get_blocks())
|
|
296
|
+
|
|
297
|
+
if is_valid:
|
|
298
|
+
# Apply block to our state
|
|
299
|
+
success = validate_and_apply_block(self.blockchain, block)
|
|
300
|
+
if success:
|
|
301
|
+
print(f"Applied valid block {block.number} to blockchain state")
|
|
302
|
+
self._update_latest_block(block)
|
|
303
|
+
blocks_to_remove = [block.get_hash()]
|
|
304
|
+
for block_hash in blocks_to_remove:
|
|
305
|
+
if block_hash in self.pending_blocks:
|
|
306
|
+
del self.pending_blocks[block_hash]
|
|
307
|
+
print(f"Added block {block.number} to blockchain")
|
|
308
|
+
return True
|
|
309
|
+
except Exception as e:
|
|
310
|
+
print(f"Error validating main chain block {block.number}: {e}")
|
|
311
|
+
|
|
312
|
+
return False
|
|
313
|
+
|
|
314
|
+
def _evaluate_candidate_chains(self):
|
|
315
|
+
"""
|
|
316
|
+
Evaluate candidate chains to determine if any should become our main chain.
|
|
317
|
+
This will validate pending blocks and look for chains with higher cumulative difficulty.
|
|
318
|
+
"""
|
|
319
|
+
# Skip if no candidate chains
|
|
320
|
+
if not self.candidate_chains:
|
|
321
|
+
return
|
|
322
|
+
|
|
323
|
+
# For each candidate chain, validate blocks and calculate metrics
|
|
324
|
+
for chain_id, data in list(self.candidate_chains.items()):
|
|
325
|
+
latest_candidate_block = data['latest_block']
|
|
326
|
+
|
|
327
|
+
# Build the chain backwards
|
|
328
|
+
chain_blocks = self._build_chain_from_latest(latest_candidate_block)
|
|
329
|
+
|
|
330
|
+
# Skip if we couldn't build a complete chain
|
|
331
|
+
if not chain_blocks:
|
|
332
|
+
continue
|
|
333
|
+
|
|
334
|
+
# Validate the entire chain
|
|
335
|
+
valid_chain = self._validate_candidate_chain(chain_blocks)
|
|
336
|
+
|
|
337
|
+
# If valid and better than our current chain, switch to it
|
|
338
|
+
if valid_chain and self._is_better_chain(chain_blocks):
|
|
339
|
+
self._switch_to_new_chain(chain_blocks)
|
|
340
|
+
|
|
341
|
+
def _build_chain_from_latest(self, latest_block: Block) -> List[Block]:
|
|
342
|
+
"""
|
|
343
|
+
Build a chain from the latest block back to a known point in our blockchain.
|
|
344
|
+
|
|
345
|
+
Args:
|
|
346
|
+
latest_block: Latest block in the candidate chain
|
|
347
|
+
|
|
348
|
+
Returns:
|
|
349
|
+
List of blocks in the chain, ordered from oldest to newest
|
|
350
|
+
"""
|
|
351
|
+
chain_blocks = [latest_block]
|
|
352
|
+
current_block = latest_block
|
|
353
|
+
|
|
354
|
+
# Track visited blocks to avoid cycles
|
|
355
|
+
visited = {current_block.get_hash()}
|
|
356
|
+
|
|
357
|
+
# Build chain backwards until we either:
|
|
358
|
+
# 1. Find a block in our main chain
|
|
359
|
+
# 2. Run out of blocks
|
|
360
|
+
# 3. Detect a cycle
|
|
361
|
+
while current_block.number > 0:
|
|
362
|
+
previous_hash = current_block.previous
|
|
363
|
+
|
|
364
|
+
# Check if we have this block in our blockchain
|
|
365
|
+
if hasattr(self.blockchain, 'has_block') and self.blockchain.has_block(previous_hash):
|
|
366
|
+
# Found connection to our main chain
|
|
367
|
+
previous_block = self.blockchain.get_block(previous_hash)
|
|
368
|
+
chain_blocks.insert(0, previous_block)
|
|
369
|
+
break
|
|
370
|
+
|
|
371
|
+
# Check if block is in pending blocks
|
|
372
|
+
elif previous_hash in self.pending_blocks:
|
|
373
|
+
previous_block = self.pending_blocks[previous_hash]['block']
|
|
374
|
+
|
|
375
|
+
# Check for cycles
|
|
376
|
+
if previous_hash in visited:
|
|
377
|
+
print(f"Cycle detected in candidate chain at block {previous_block.number}")
|
|
378
|
+
return []
|
|
379
|
+
|
|
380
|
+
visited.add(previous_hash)
|
|
381
|
+
chain_blocks.insert(0, previous_block)
|
|
382
|
+
current_block = previous_block
|
|
383
|
+
else:
|
|
384
|
+
# Missing block, cannot validate the chain
|
|
385
|
+
print(f"Missing block {previous_hash.hex()} in candidate chain")
|
|
386
|
+
return []
|
|
387
|
+
|
|
388
|
+
return chain_blocks
|
|
389
|
+
|
|
390
|
+
def _validate_candidate_chain(self, chain_blocks: List[Block]) -> bool:
|
|
391
|
+
"""
|
|
392
|
+
Validate a candidate chain of blocks.
|
|
393
|
+
|
|
394
|
+
Args:
|
|
395
|
+
chain_blocks: List of blocks in the chain (oldest to newest)
|
|
396
|
+
|
|
397
|
+
Returns:
|
|
398
|
+
True if the chain is valid, False otherwise
|
|
399
|
+
"""
|
|
400
|
+
# Validate each block in the chain
|
|
401
|
+
for i, block in enumerate(chain_blocks):
|
|
402
|
+
# Skip first block, it's either genesis or a block we already have
|
|
403
|
+
if i == 0:
|
|
404
|
+
continue
|
|
405
|
+
|
|
406
|
+
# Validate block connections
|
|
407
|
+
if block.previous != chain_blocks[i-1].get_hash():
|
|
408
|
+
print(f"Invalid chain: block {block.number} does not reference previous block")
|
|
409
|
+
return False
|
|
410
|
+
|
|
411
|
+
# Validate block
|
|
412
|
+
is_valid = validate_block(block, self.blockchain.get_accounts_at_block(block.previous), self.blockchain.get_blocks())
|
|
413
|
+
if not is_valid:
|
|
414
|
+
print(f"Invalid chain: block {block.number} is invalid")
|
|
415
|
+
return False
|
|
416
|
+
|
|
417
|
+
return True
|
|
418
|
+
|
|
419
|
+
def _is_better_chain(self, chain_blocks: List[Block]) -> bool:
|
|
420
|
+
"""
|
|
421
|
+
Determine if a candidate chain is better than our current chain.
|
|
422
|
+
|
|
423
|
+
Args:
|
|
424
|
+
chain_blocks: List of blocks in the candidate chain
|
|
425
|
+
|
|
426
|
+
Returns:
|
|
427
|
+
True if the candidate chain is better, False otherwise
|
|
428
|
+
"""
|
|
429
|
+
# Get the latest block from the candidate chain
|
|
430
|
+
candidate_latest = chain_blocks[-1]
|
|
431
|
+
|
|
432
|
+
# If we don't have a latest block, any valid chain is better
|
|
433
|
+
if not self.latest_block:
|
|
434
|
+
return True
|
|
435
|
+
|
|
436
|
+
# Compare block numbers (longest chain rule)
|
|
437
|
+
if candidate_latest.number > self.latest_block.number:
|
|
438
|
+
print(f"Candidate chain is longer: {candidate_latest.number} vs {self.latest_block.number}")
|
|
439
|
+
return True
|
|
440
|
+
|
|
441
|
+
return False
|
|
442
|
+
|
|
443
|
+
def _switch_to_new_chain(self, chain_blocks: List[Block]):
|
|
444
|
+
"""
|
|
445
|
+
Switch to a new chain by adding all blocks to our blockchain.
|
|
446
|
+
|
|
447
|
+
Args:
|
|
448
|
+
chain_blocks: List of blocks in the chain (oldest to newest)
|
|
449
|
+
"""
|
|
450
|
+
# Find the point where the chains diverge
|
|
451
|
+
divergence_point = 0
|
|
452
|
+
for i, block in enumerate(chain_blocks):
|
|
453
|
+
# Check if we have this block in our blockchain
|
|
454
|
+
if hasattr(self.blockchain, 'has_block') and self.blockchain.has_block(block.get_hash()):
|
|
455
|
+
divergence_point = i + 1
|
|
456
|
+
else:
|
|
457
|
+
break
|
|
458
|
+
|
|
459
|
+
# Add all blocks after the divergence point
|
|
460
|
+
for i in range(divergence_point, len(chain_blocks)):
|
|
461
|
+
block = chain_blocks[i]
|
|
462
|
+
|
|
463
|
+
# Add block to blockchain
|
|
464
|
+
if hasattr(self.blockchain, 'add_block'):
|
|
465
|
+
try:
|
|
466
|
+
self.blockchain.add_block(block)
|
|
467
|
+
|
|
468
|
+
# Remove from pending blocks
|
|
469
|
+
block_hash = block.get_hash()
|
|
470
|
+
if block_hash in self.pending_blocks:
|
|
471
|
+
del self.pending_blocks[block_hash]
|
|
472
|
+
|
|
473
|
+
print(f"Added block {block.number} to blockchain")
|
|
474
|
+
except Exception as e:
|
|
475
|
+
print(f"Error adding block {block.number} to blockchain: {e}")
|
|
476
|
+
return
|
|
477
|
+
|
|
478
|
+
# Update latest block
|
|
479
|
+
self._update_latest_block(chain_blocks[-1])
|
|
480
|
+
print(f"Switched to new chain, latest block: {self.latest_block.number}")
|