astreum 0.1.14__py3-none-any.whl → 0.1.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of astreum might be problematic. Click here for more details.

astreum/node/__init__.py CHANGED
@@ -1,19 +1,48 @@
1
1
  import os
2
- import hashlib
3
2
  import time
4
3
  import threading
5
4
  import random
6
- from typing import Tuple, Optional, List
5
+ from typing import Tuple, Optional, List, Dict
7
6
  import json
8
7
  from cryptography.hazmat.primitives.asymmetric import ed25519
8
+ from cryptography.hazmat.primitives import serialization
9
9
 
10
10
  from .relay import Relay, Topic
11
11
  from .relay.peer import Peer
12
12
  from .models import Storage, Block, Transaction
13
13
  from .machine import AstreumMachine
14
- from .utils import encode, decode
14
+ from .utils import encode, decode, hash_data
15
15
  from astreum.lispeum.storage import store_expr, get_expr_from_storage
16
16
 
17
+ # Import our validation components using the new functional approach
18
+ from .validation import (
19
+ validate_block,
20
+ create_block,
21
+ create_genesis_block,
22
+ compute_vdf,
23
+ verify_vdf,
24
+ select_validator,
25
+ select_validator_for_slot,
26
+ Account,
27
+ get_validator_stake,
28
+ is_validator,
29
+ VALIDATION_ADDRESS,
30
+ BURN_ADDRESS,
31
+ MIN_STAKE_AMOUNT,
32
+ SLOT_DURATION,
33
+ VDF_DIFFICULTY
34
+ )
35
+ from .validation.state import (
36
+ add_block_to_state,
37
+ validate_and_apply_block,
38
+ create_account_state,
39
+ get_validator_for_slot,
40
+ select_best_chain,
41
+ compare_chains,
42
+ get_validator_set
43
+ )
44
+ from .validation.adapter import BlockAdapter, TransactionAdapter, AccountAdapter
45
+
17
46
  class Node:
18
47
  def __init__(self, config: dict):
19
48
  # Ensure config is a dictionary, but allow it to be None
@@ -48,6 +77,25 @@ class Node:
48
77
  self.storage = Storage(self.config)
49
78
  self.storage.node = self # Set the storage node reference to self
50
79
 
80
+ # Initialize blockchain state
81
+ self.blockchain = create_account_state(self.config)
82
+
83
+ # Store our validator info if we're a validator
84
+ if self.is_validator and self.validation_public_key:
85
+ self.validator_address = self.validation_public_key.public_bytes(
86
+ encoding=serialization.Encoding.Raw,
87
+ format=serialization.PublicFormat.Raw
88
+ )
89
+ self.validator_private_bytes = self.validation_private_key.private_bytes(
90
+ encoding=serialization.Encoding.Raw,
91
+ format=serialization.PrivateFormat.Raw,
92
+ encryption_algorithm=serialization.NoEncryption()
93
+ )
94
+ print(f"Registered validator with address: {self.validator_address.hex()}")
95
+ else:
96
+ self.validator_address = None
97
+ self.validator_private_bytes = None
98
+
51
99
  # Latest block of the chain this node is following
52
100
  self.latest_block = None
53
101
  self.followed_chain_id = self.config.get('followed_chain_id', None)
@@ -66,634 +114,367 @@ class Node:
66
114
  self.relay.message_handlers[Topic.LATEST_BLOCK_REQUEST] = self._handle_latest_block_request
67
115
  self.relay.message_handlers[Topic.LATEST_BLOCK] = self._handle_latest_block
68
116
  self.relay.message_handlers[Topic.TRANSACTION] = self._handle_transaction
117
+ self.relay.message_handlers[Topic.BLOCK_REQUEST] = self._handle_block_request
118
+ self.relay.message_handlers[Topic.BLOCK_RESPONSE] = self._handle_block_response
69
119
 
70
120
  # Initialize latest block from storage if available
71
121
  self._initialize_latest_block()
72
122
 
73
123
  # Candidate chains that might be adopted
74
124
  self.candidate_chains = {} # chain_id -> {'latest_block': block, 'timestamp': time.time()}
125
+ self.pending_blocks = {} # block_hash -> {'block': block, 'timestamp': time.time()}
75
126
 
76
- # Block query timers for different routes
127
+ # Threads for validation and chain monitoring
77
128
  self.running = False
78
- self.block_query_threads = []
129
+ self.main_chain_validation_thread = None
130
+ self.candidate_chain_validation_thread = None
131
+
132
+ # Pending transactions for a block
133
+ self.pending_transactions = {} # tx_hash -> {'transaction': tx, 'timestamp': time.time()}
79
134
 
135
+ # Last block production attempt time
136
+ self.last_block_attempt_time = 0
137
+
80
138
  def start(self):
81
- """Start the node and all its services."""
139
+ """Start the node."""
82
140
  self.running = True
83
141
 
84
- # Start periodic block query thread for validation route
142
+ # Start relay
143
+ self.relay.start()
144
+
145
+ # Start chain monitoring thread
146
+ self.main_chain_validation_thread = threading.Thread(
147
+ target=self._main_chain_validation_loop,
148
+ name="MainChainValidation"
149
+ )
150
+ self.main_chain_validation_thread.daemon = True
151
+ self.main_chain_validation_thread.start()
152
+
153
+ self.candidate_chain_validation_thread = threading.Thread(
154
+ target=self._candidate_chain_validation_loop,
155
+ name="CandidateChainValidation"
156
+ )
157
+ self.candidate_chain_validation_thread.daemon = True
158
+ self.candidate_chain_validation_thread.start()
159
+
160
+ # Set up recurring block query tasks
161
+ main_query_thread = threading.Thread(
162
+ target=self._block_query_loop,
163
+ args=('main',),
164
+ daemon=True
165
+ )
166
+ main_query_thread.start()
167
+
85
168
  validation_query_thread = threading.Thread(
86
- target=self._periodic_validation_route_query,
169
+ target=self._block_query_loop,
170
+ args=('validation',),
87
171
  daemon=True
88
172
  )
89
173
  validation_query_thread.start()
90
- self.block_query_threads.append(validation_query_thread)
91
174
 
92
175
  print(f"Node started with ID {self.node_id.hex()}")
93
- print(f"Listening on port {self.relay.incoming_port}")
94
-
176
+
95
177
  def stop(self):
96
178
  """Stop the node and all its services."""
97
179
  self.running = False
98
180
 
99
181
  # Stop all threads
100
- for thread in self.block_query_threads:
101
- if thread.is_alive():
102
- thread.join(timeout=1.0) # Give threads 1 second to shut down
103
-
182
+ if self.main_chain_validation_thread and self.main_chain_validation_thread.is_alive():
183
+ self.main_chain_validation_thread.join(timeout=1.0)
184
+
185
+ if self.candidate_chain_validation_thread and self.candidate_chain_validation_thread.is_alive():
186
+ self.candidate_chain_validation_thread.join(timeout=1.0)
187
+
188
+ # Stop relay last
189
+ if self.relay:
190
+ self.relay.stop()
191
+
104
192
  print("Node stopped")
105
-
106
- def _periodic_validation_route_query(self):
107
- """Periodically query random peers in the validation route for latest blocks."""
193
+
194
+ def _main_chain_validation_loop(self):
195
+ """
196
+ Main validation loop for the primary blockchain.
197
+ This thread prioritizes validating blocks on the main chain we're following.
198
+ """
108
199
  while self.running:
109
200
  try:
110
- # Query 3 random peers from validation route for latest block
111
- self._query_random_peers_for_latest_block(route_type=1, count=3)
201
+ # Update latest block if we don't have one yet
202
+ if not self.latest_block and hasattr(self.blockchain, 'get_latest_block'):
203
+ self.latest_block = self.blockchain.get_latest_block()
112
204
 
113
- # Prune old candidate chains periodically
114
- self._prune_candidate_chains()
205
+ # Process any blocks that extend our main chain immediately
206
+ self._process_main_chain_blocks()
207
+
208
+ # Attempt block production if we are a validator
209
+ if self.is_validator and self.validator_address:
210
+ self._attempt_block_production()
211
+
212
+ # Cleanup old items
213
+ self._prune_pending_items()
115
214
 
116
- # Sleep according to validator status
117
- if self.is_validator:
118
- # Validators check more frequently (every second)
119
- time.sleep(1)
120
- else:
121
- # Regular nodes check less frequently (every 3 seconds)
122
- time.sleep(3)
215
+ # Sleep to prevent high CPU usage
216
+ time.sleep(0.1) # Short sleep for main chain validation
123
217
  except Exception as e:
124
- print(f"Error in validation route query: {e}")
125
- time.sleep(1) # Sleep briefly before retrying
218
+ print(f"Error in main chain validation loop: {e}")
219
+ time.sleep(1) # Longer sleep on error
126
220
 
127
- def _query_random_peers_for_latest_block(self, route_type: int, count: int = 3):
221
+ def _candidate_chain_validation_loop(self):
128
222
  """
129
- Query random peers from specified route for latest block.
130
-
131
- Args:
132
- route_type (int): Route type (0 for peer, 1 for validation)
133
- count (int): Number of random peers to query
223
+ Validation loop for candidate chains (potential forks).
224
+ This thread handles validation of blocks from alternate chains
225
+ without slowing down the main chain processing.
134
226
  """
135
- # Only continue if we're tracking this route
136
- if not self.relay.is_tracking_route(route_type):
137
- return
138
-
139
- # Get random peers from the route
140
- random_peers = self.relay.get_random_peers_from_route(route_type, count)
141
-
142
- # Query each peer for latest block
143
- for peer in random_peers:
227
+ while self.running:
144
228
  try:
145
- # Create empty request message
146
- request_data = b''
229
+ # Process candidate chains
230
+ self._evaluate_candidate_chains()
147
231
 
148
- # Send request to peer
149
- addr = (peer.ip, peer.port)
150
- self.relay.send_message(request_data, Topic.LATEST_BLOCK_REQUEST, addr)
232
+ # Prune old candidate chains
233
+ self._prune_candidate_chains()
234
+
235
+ # Sleep longer for candidate chain validation (lower priority)
236
+ time.sleep(1) # Longer sleep for candidate chain validation
151
237
  except Exception as e:
152
- print(f"Error querying peer {peer.node_id.hex()}: {e}")
153
-
154
- def _handle_ping(self, body: bytes, addr: Tuple[str, int], envelope):
155
- """
156
- Handle ping messages by storing peer info and responding with a pong.
157
-
158
- The ping message contains:
159
- - public_key: The sender's public key
160
- - difficulty: The sender's preferred proof-of-work difficulty
161
- - routes: The sender's available routes
162
- """
163
- try:
164
- # Parse peer information from the ping message
165
- parts = decode(body)
166
- if len(parts) != 3:
167
- return
238
+ print(f"Error in candidate chain validation loop: {e}")
239
+ time.sleep(2) # Even longer sleep on error
168
240
 
169
- public_key, difficulty_bytes, routes_data = parts
170
- difficulty = int.from_bytes(difficulty_bytes, byteorder='big')
171
-
172
- # Store peer information in routing table
173
- peer = self.relay.add_peer(addr, public_key, difficulty)
174
-
175
- # Process the routes the sender is participating in
176
- if routes_data:
177
- # routes_data is a simple list like [0, 1] meaning peer route and validation route
178
- # Add peer to each route they participate in
179
- self.relay.add_peer_to_route(peer, list(routes_data))
241
+ def _prune_pending_items(self):
242
+ """Remove old pending blocks and transactions."""
243
+ current_time = time.time()
244
+
245
+ # Prune old pending blocks (older than 1 hour)
246
+ blocks_to_remove = [
247
+ block_hash for block_hash, data in self.pending_blocks.items()
248
+ if current_time - data['timestamp'] > 3600 # 1 hour
249
+ ]
250
+ for block_hash in blocks_to_remove:
251
+ del self.pending_blocks[block_hash]
252
+
253
+ # Prune old pending transactions (older than 30 minutes)
254
+ txs_to_remove = [
255
+ tx_hash for tx_hash, data in self.pending_transactions.items()
256
+ if current_time - data['timestamp'] > 1800 # 30 minutes
257
+ ]
258
+ for tx_hash in txs_to_remove:
259
+ del self.pending_transactions[tx_hash]
260
+
261
+ def _process_main_chain_blocks(self):
262
+ """
263
+ Process blocks that extend our current main chain.
264
+ Prioritizes blocks that build on our latest block.
265
+ """
266
+ # Skip if we don't have a latest block yet
267
+ if not self.latest_block:
268
+ return
180
269
 
181
- # Create response with our public key, difficulty and routes we participate in
182
- pong_data = encode([
183
- self.node_id, # Our public key
184
- self.config.get('difficulty', 1).to_bytes(4, byteorder='big'), # Our difficulty
185
- self.relay.get_routes() # Our routes as bytes([0, 1]) for peer and validation
186
- ])
270
+ # Get the hash of our latest block
271
+ latest_hash = self.latest_block.get_hash()
272
+
273
+ # Find any pending blocks that build on our latest block
274
+ main_chain_blocks = []
275
+ for block_hash, data in list(self.pending_blocks.items()):
276
+ block = data['block']
187
277
 
188
- self.relay.send_message(pong_data, Topic.PONG, addr)
189
- except Exception as e:
190
- print(f"Error handling ping message: {e}")
191
-
192
- def _handle_pong(self, body: bytes, addr: Tuple[str, int], envelope):
193
- """
194
- Handle pong messages by updating peer information.
195
- No response is sent to a pong message.
196
- """
197
- try:
198
- # Parse peer information from the pong message
199
- parts = decode(body)
200
- if len(parts) != 3:
201
- return
278
+ # Check if this block extends our latest block
279
+ if block.previous == latest_hash:
280
+ main_chain_blocks.append(block)
202
281
 
203
- public_key, difficulty_bytes, routes_data = parts
204
- difficulty = int.from_bytes(difficulty_bytes, byteorder='big')
205
-
206
- # Update peer information in routing table
207
- peer = self.relay.add_peer(addr, public_key, difficulty)
282
+ # Process found blocks
283
+ for block in main_chain_blocks:
284
+ self._validate_and_process_main_chain_block(block)
208
285
 
209
- # Process the routes the sender is participating in
210
- if routes_data:
211
- # routes_data is a simple list like [0, 1] meaning peer route and validation route
212
- # Add peer to each route they participate in
213
- self.relay.add_peer_to_route(peer, list(routes_data))
214
- except Exception as e:
215
- print(f"Error handling pong message: {e}")
216
-
217
- def _handle_object_request(self, body: bytes, addr: Tuple[str, int], envelope):
286
+ def _validate_and_process_main_chain_block(self, block: Block):
218
287
  """
219
- Handle an object request from a peer.
288
+ Validate and process a block that extends our main chain.
220
289
 
221
290
  Args:
222
- body: Message body containing the object hash
223
- addr: Address of the requesting peer
224
- envelope: Full message envelope
291
+ block: Block to validate and process
225
292
  """
226
293
  try:
227
- # Decode the request
228
- request = json.loads(body.decode('utf-8'))
229
- object_hash = bytes.fromhex(request.get('hash'))
230
-
231
- # Check if we have the requested object
232
- if not self.storage.contains(object_hash):
233
- # We don't have the object, ignore the request
234
- return
235
-
236
- # Get the object data
237
- object_data = self.storage._local_get(object_hash)
238
- if not object_data:
239
- return
240
-
241
- # Create a response message
242
- response = {
243
- 'hash': object_hash.hex(),
244
- 'data': object_data.hex()
245
- }
294
+ # Validate block
295
+ is_valid = validate_block(block, self.blockchain.get_accounts_at_block(block.previous), self.blockchain.get_blocks())
296
+
297
+ if is_valid:
298
+ # Apply block to our state
299
+ success = validate_and_apply_block(self.blockchain, block)
300
+ if success:
301
+ print(f"Applied valid block {block.number} to blockchain state")
302
+ self._update_latest_block(block)
303
+ blocks_to_remove = [block.get_hash()]
304
+ for block_hash in blocks_to_remove:
305
+ if block_hash in self.pending_blocks:
306
+ del self.pending_blocks[block_hash]
307
+ print(f"Added block {block.number} to blockchain")
308
+ return True
309
+ except Exception as e:
310
+ print(f"Error validating main chain block {block.number}: {e}")
246
311
 
247
- # Send the response
248
- self.relay.send_message_to_addr(
249
- addr,
250
- Topic.OBJECT_RESPONSE,
251
- json.dumps(response).encode('utf-8')
252
- )
312
+ return False
253
313
 
254
- except Exception as e:
255
- print(f"Error handling object request: {e}")
256
-
257
- def _handle_object_response(self, body: bytes, addr: Tuple[str, int], envelope):
314
+ def _evaluate_candidate_chains(self):
258
315
  """
259
- Handle an object response from a peer.
260
-
261
- Args:
262
- body: Message body containing the object hash and data
263
- addr: Address of the responding peer
264
- envelope: Full message envelope
316
+ Evaluate candidate chains to determine if any should become our main chain.
317
+ This will validate pending blocks and look for chains with higher cumulative difficulty.
265
318
  """
266
- try:
267
- # Decode the response
268
- response = json.loads(body.decode('utf-8'))
269
- object_hash = bytes.fromhex(response.get('hash'))
270
- object_data = bytes.fromhex(response.get('data'))
319
+ # Skip if no candidate chains
320
+ if not self.candidate_chains:
321
+ return
271
322
 
272
- # Store the object
273
- self.storage.put(object_hash, object_data)
323
+ # For each candidate chain, validate blocks and calculate metrics
324
+ for chain_id, data in list(self.candidate_chains.items()):
325
+ latest_candidate_block = data['latest_block']
274
326
 
275
- except Exception as e:
276
- print(f"Error handling object response: {e}")
277
-
278
- def _handle_object(self, body: bytes, addr: Tuple[str, int], envelope):
279
- """
280
- Handle receipt of an object.
281
- If not in storage, verify the hash and put in storage.
282
- """
283
- try:
284
- # Verify hash matches the object
285
- object_hash = hashlib.sha256(body).digest()
327
+ # Build the chain backwards
328
+ chain_blocks = self._build_chain_from_latest(latest_candidate_block)
286
329
 
287
- # Check if we already have this object
288
- if not self.storage.exists(object_hash):
289
- # Store the object
290
- self.storage.put(object_hash, body)
291
- except Exception as e:
292
- print(f"Error handling object: {e}")
293
-
294
- def request_object(self, object_hash: bytes, max_attempts: int = 3) -> Optional[bytes]:
330
+ # Skip if we couldn't build a complete chain
331
+ if not chain_blocks:
332
+ continue
333
+
334
+ # Validate the entire chain
335
+ valid_chain = self._validate_candidate_chain(chain_blocks)
336
+
337
+ # If valid and better than our current chain, switch to it
338
+ if valid_chain and self._is_better_chain(chain_blocks):
339
+ self._switch_to_new_chain(chain_blocks)
340
+
341
+ def _build_chain_from_latest(self, latest_block: Block) -> List[Block]:
295
342
  """
296
- Request an object from the network by its hash.
297
-
298
- This method sends an object request to peers closest to the object hash
299
- and waits for a response until timeout.
343
+ Build a chain from the latest block back to a known point in our blockchain.
300
344
 
301
345
  Args:
302
- object_hash: The hash of the object to request
303
- max_attempts: Maximum number of request attempts
346
+ latest_block: Latest block in the candidate chain
304
347
 
305
348
  Returns:
306
- The object data if found, None otherwise
307
- """
308
- # First check if we already have the object
309
- if self.storage.contains(object_hash):
310
- return self.storage._local_get(object_hash)
311
-
312
- # Find the bucket containing the peers closest to the object's hash
313
- closest_peers = self.relay.get_closest_peers(object_hash, count=3)
314
- if not closest_peers:
315
- return None
316
-
317
- # Create a message to request the object
318
- topic = Topic.OBJECT_REQUEST
319
- object_request_msg = {
320
- 'hash': object_hash.hex()
321
- }
322
-
323
- # Track which peers we've already tried
324
- attempted_peers = set()
325
-
326
- # We'll try up to max_attempts times
327
- for _ in range(max_attempts):
328
- # Find peers we haven't tried yet
329
- untried_peers = [p for p in closest_peers if p.id not in attempted_peers]
330
- if not untried_peers:
331
- break
332
-
333
- # Send the request to all untried peers
334
- request_sent = False
335
- for peer in untried_peers:
336
- try:
337
- self.relay.send_message_to_peer(peer, topic, object_request_msg)
338
- attempted_peers.add(peer.id)
339
- request_sent = True
340
- except Exception as e:
341
- print(f"Failed to send object request to peer {peer.id.hex()}: {e}")
342
-
343
- if not request_sent:
349
+ List of blocks in the chain, ordered from oldest to newest
350
+ """
351
+ chain_blocks = [latest_block]
352
+ current_block = latest_block
353
+
354
+ # Track visited blocks to avoid cycles
355
+ visited = {current_block.get_hash()}
356
+
357
+ # Build chain backwards until we either:
358
+ # 1. Find a block in our main chain
359
+ # 2. Run out of blocks
360
+ # 3. Detect a cycle
361
+ while current_block.number > 0:
362
+ previous_hash = current_block.previous
363
+
364
+ # Check if we have this block in our blockchain
365
+ if hasattr(self.blockchain, 'has_block') and self.blockchain.has_block(previous_hash):
366
+ # Found connection to our main chain
367
+ previous_block = self.blockchain.get_block(previous_hash)
368
+ chain_blocks.insert(0, previous_block)
344
369
  break
345
370
 
346
- # Short wait to allow for response
347
- time.sleep(0.5)
348
-
349
- # Check if any of the requests succeeded
350
- if self.storage.contains(object_hash):
351
- return self.storage._local_get(object_hash)
371
+ # Check if block is in pending blocks
372
+ elif previous_hash in self.pending_blocks:
373
+ previous_block = self.pending_blocks[previous_hash]['block']
352
374
 
353
- # If we get here, we couldn't get the object
354
- return None
355
-
356
- def _handle_route_request(self, body: bytes, addr: Tuple[str, int], envelope):
357
- """
358
- Handle request for routing information.
359
- Seed route to peer with one peer per bucket in the route table.
360
- """
361
- try:
362
- # Create a list to store one peer from each bucket
363
- route_peers = []
364
-
365
- # Get one peer from each bucket
366
- for bucket_index in range(self.relay.num_buckets):
367
- peers = self.relay.get_bucket_peers(bucket_index)
368
- if peers and len(peers) > 0:
369
- # Add one peer from this bucket
370
- route_peers.append(peers[0])
371
-
372
- # Serialize the peer list
373
- # Format: List of [peer_addr, peer_port, peer_key]
374
- peer_data = []
375
- for peer in route_peers:
376
- peer_addr, peer_port = peer.address
377
- peer_data.append(encode([
378
- peer_addr.encode('utf-8'),
379
- peer_port.to_bytes(2, byteorder='big'),
380
- peer.node_id
381
- ]))
382
-
383
- # Encode the complete route data
384
- route_data = encode(peer_data)
385
-
386
- # Send routing information back
387
- self.relay.send_message(route_data, Topic.ROUTE, addr)
388
- except Exception as e:
389
- print(f"Error handling route request: {e}")
390
-
391
- def _handle_route(self, body: bytes, addr: Tuple[str, int], envelope):
392
- """
393
- Handle receipt of a route message containing a list of IP addresses to ping.
394
- """
395
- try:
396
- # Decode the list of peers
397
- peer_entries = decode(body)
398
-
399
- # Process each peer
400
- for peer_data in peer_entries:
401
- try:
402
- peer_parts = decode(peer_data)
403
- if len(peer_parts) != 3:
404
- continue
405
-
406
- peer_addr_bytes, peer_port_bytes, peer_id = peer_parts
407
- peer_addr = peer_addr_bytes.decode('utf-8')
408
- peer_port = int.from_bytes(peer_port_bytes, byteorder='big')
409
-
410
- # Create peer address tuple
411
- peer_address = (peer_addr, peer_port)
375
+ # Check for cycles
376
+ if previous_hash in visited:
377
+ print(f"Cycle detected in candidate chain at block {previous_block.number}")
378
+ return []
412
379
 
413
- # Ping this peer if it's not already in our routing table
414
- # and it's not our own address
415
- if (not self.relay.has_peer(peer_address) and
416
- peer_address != self.relay.get_address()):
417
- # Create ping message with our info and routes
418
- # Encode our peer and validation routes
419
- peer_routes_list = self.relay.get_routes()
420
-
421
- # Combine into a single list of routes with type flags
422
- # For each route: [is_validation_route, route_id]
423
- routes = []
424
-
425
- # Add peer routes (type flag = 0)
426
- for route in peer_routes_list:
427
- routes.append(encode([bytes([0]), route]))
428
-
429
- # Encode the complete routes list
430
- all_routes = encode(routes)
431
-
432
- ping_data = encode([
433
- self.node_id, # Our public key
434
- self.config.get('difficulty', 1).to_bytes(4, byteorder='big'), # Our difficulty
435
- all_routes # All routes we participate in
436
- ])
437
-
438
- # Send ping to the peer
439
- self.relay.send_message(ping_data, Topic.PING, peer_address)
440
- except Exception as e:
441
- print(f"Error processing peer in route: {e}")
442
- continue
443
- except Exception as e:
444
- print(f"Error handling route message: {e}")
445
-
446
- def _handle_latest_block_request(self, body: bytes, addr: Tuple[str, int], envelope):
447
- """
448
- Handle request for the latest block from the chain currently following.
449
- Any node can request the latest block for syncing purposes.
450
- """
451
- try:
452
- # Return our latest block from the followed chain
453
- if self.latest_block:
454
- # Send latest block to the requester
455
- self.relay.send_message(self.latest_block.to_bytes(), Topic.LATEST_BLOCK, addr)
456
- except Exception as e:
457
- print(f"Error handling latest block request: {e}")
458
-
459
- def _handle_latest_block(self, body: bytes, addr: Tuple[str, int], envelope):
460
- """
461
- Handle receipt of a latest block message.
462
- Identify chain, validate if following chain, only accept if latest block
463
- in chain is in the previous field.
464
- """
465
- try:
466
- # All nodes can process latest blocks now, regardless of route membership
467
-
468
- # Deserialize the block
469
- block = Block.from_bytes(body)
470
- if not block:
471
- return
472
-
473
- # Check if we're following this chain
474
- if not self.machine.is_following_chain(block.chain_id):
475
- # Store as a potential candidate chain if it has a higher height
476
- if not self.followed_chain_id or block.chain_id != self.followed_chain_id:
477
- self._add_candidate_chain(block)
478
- return
479
-
480
- # Only proceed if block chain_id matches what we're following
481
- if self.followed_chain_id and block.chain_id != self.followed_chain_id:
482
- return
483
-
484
- if not self.latest_block:
485
- # We don't have a latest block, so this might be the first one we've seen
486
- # Store it as our latest
487
- self._update_latest_block(block)
488
- return
380
+ visited.add(previous_hash)
381
+ chain_blocks.insert(0, previous_block)
382
+ current_block = previous_block
383
+ else:
384
+ # Missing block, cannot validate the chain
385
+ print(f"Missing block {previous_hash.hex()} in candidate chain")
386
+ return []
489
387
 
490
- # If this block is newer than our latest, update our latest block
491
- if block.height > self.latest_block.height:
492
- # Verify chain continuity
493
- if self.latest_block.hash_bytes in block.previous_blocks:
494
- # This is a valid continuation of our chain
495
- self._update_latest_block(block)
496
- else:
497
- # This block doesn't build on our latest, check for forking
498
- # but continue tracking it as a candidate
499
- self._add_candidate_chain(block)
500
-
501
- except Exception as e:
502
- print(f"Error handling latest block: {e}")
503
-
504
- def _handle_transaction(self, body: bytes, addr: Tuple[str, int], envelope):
505
- """
506
- Handle incoming transaction messages.
507
-
508
- This method is called when we receive a transaction from the network.
509
- Transactions should only be processed by validator nodes.
388
+ return chain_blocks
510
389
 
511
- Args:
512
- body: Transaction data
513
- addr: Source address
514
- envelope: Full message envelope
515
- """
516
- # Ignore if we're not a validator (don't have a validation key)
517
- if not self.is_validator or not self.relay.is_in_validation_route():
518
- print("Ignoring transaction as we're not a validator")
519
- return
520
-
521
- try:
522
- # Parse transaction data
523
- tx_data = json.loads(body.decode('utf-8'))
524
-
525
- # Store the transaction in our local storage
526
- tx_hash = bytes.fromhex(tx_data.get('hash'))
527
- tx_raw = bytes.fromhex(tx_data.get('data'))
528
-
529
- # Create transaction entry in storage
530
- if not self.storage.contains(tx_hash):
531
- self.storage.put(tx_hash, tx_raw)
532
- print(f"Stored transaction {tx_hash.hex()}")
533
-
534
- # Process the transaction as a validator
535
- self._process_transaction_as_validator(tx_hash, tx_raw)
536
-
537
- except Exception as e:
538
- print(f"Error handling transaction: {e}")
539
-
540
- def _process_transaction_as_validator(self, tx_hash: bytes, tx_raw: bytes):
390
+ def _validate_candidate_chain(self, chain_blocks: List[Block]) -> bool:
541
391
  """
542
- Process a transaction as a validator node.
543
-
544
- This method is called when we receive a transaction and we're a validator.
545
- It verifies the transaction and may include it in a future block.
392
+ Validate a candidate chain of blocks.
546
393
 
547
394
  Args:
548
- tx_hash: Transaction hash
549
- tx_raw: Raw transaction data
550
- """
551
- try:
552
- print(f"Processing transaction {tx_hash.hex()} as validator")
553
- # Here we would verify the transaction and potentially queue it
554
- # for inclusion in the next block we create
555
-
556
- # For now, just log that we processed it
557
- print(f"Verified transaction {tx_hash.hex()}")
558
-
559
- # TODO: Implement transaction validation and queueing for block creation
560
-
561
- except Exception as e:
562
- print(f"Error processing transaction as validator: {e}")
395
+ chain_blocks: List of blocks in the chain (oldest to newest)
563
396
 
564
- def _initialize_latest_block(self):
565
- """Initialize latest block from storage if available."""
566
- # Implementation would load the latest block from storage
567
- pass
568
-
569
- def set_followed_chain(self, chain_id):
570
- """
571
- Set the chain that this node follows.
572
-
573
- Args:
574
- chain_id: The ID of the chain to follow
575
- """
576
- self.followed_chain_id = chain_id
577
- self.latest_block = self.machine.get_latest_block(chain_id)
578
-
579
- def get_latest_block(self):
580
- """
581
- Get the latest block of the chain this node is following.
582
-
583
397
  Returns:
584
- The latest block, or None if not available
585
- """
586
- return self.latest_block
587
-
588
- def _add_candidate_chain(self, block):
589
- """
590
- Add a block to the candidate chains.
591
-
592
- Args:
593
- block: Block to add
398
+ True if the chain is valid, False otherwise
594
399
  """
595
- chain_id = block.chain_id
596
-
597
- # Check if we already have this chain as a candidate
598
- if chain_id in self.candidate_chains:
599
- existing_block = self.candidate_chains[chain_id]['latest_block']
600
-
601
- # Only update if this block is newer
602
- if block.height > existing_block.height:
603
- self.candidate_chains[chain_id] = {
604
- 'latest_block': block,
605
- 'timestamp': time.time()
606
- }
607
- else:
608
- # Add as a new candidate chain
609
- self.candidate_chains[chain_id] = {
610
- 'latest_block': block,
611
- 'timestamp': time.time()
612
- }
613
-
614
- print(f"Added candidate chain {chain_id.hex()} with height {block.height}")
615
-
616
- def _update_latest_block(self, block):
617
- """
618
- Update our latest block and process it.
619
-
620
- Args:
621
- block: New latest block to set
622
- """
623
- # Process the block if it's new
624
- if not self.latest_block or block.hash_bytes != self.latest_block.hash_bytes:
625
- # Process block logic in the machine
626
- self.machine.process_block(block)
627
-
628
- # Update our latest block reference
629
- self.latest_block = block
630
-
631
- # Update followed chain ID if needed
632
- if not self.followed_chain_id:
633
- self.followed_chain_id = block.chain_id
400
+ # Validate each block in the chain
401
+ for i, block in enumerate(chain_blocks):
402
+ # Skip first block, it's either genesis or a block we already have
403
+ if i == 0:
404
+ continue
634
405
 
635
- print(f"Updated latest block to height {block.height}, hash {block.hash}")
636
-
637
- # Save latest block to storage for persistence
638
- if self.storage:
639
- self.storage.put_latest_block(block)
406
+ # Validate block connections
407
+ if block.previous != chain_blocks[i-1].get_hash():
408
+ print(f"Invalid chain: block {block.number} does not reference previous block")
409
+ return False
640
410
 
641
- def _prune_candidate_chains(self):
642
- """Remove candidate chains that are older than 1 hour."""
643
- current_time = time.time()
644
- chains_to_remove = []
645
-
646
- for chain_id, data in self.candidate_chains.items():
647
- if current_time - data['timestamp'] > 3600: # 1 hour in seconds
648
- chains_to_remove.append(chain_id)
411
+ # Validate block
412
+ is_valid = validate_block(block, self.blockchain.get_accounts_at_block(block.previous), self.blockchain.get_blocks())
413
+ if not is_valid:
414
+ print(f"Invalid chain: block {block.number} is invalid")
415
+ return False
649
416
 
650
- for chain_id in chains_to_remove:
651
- del self.candidate_chains[chain_id]
652
-
653
- def evaluate_candidate_chains(self):
654
- """
655
- Evaluate all candidate chains to see if we should switch to one.
656
- This is a placeholder for now - in a real implementation, you would
657
- verify the chain and potentially switch to it if it's valid and better.
658
- """
659
- # TODO: Implement chain evaluation logic
660
- pass
661
-
662
- def post_global_storage(self, name: str, value):
663
- """
664
- Store a global variable in node storage.
665
-
666
- Args:
667
- name: Name of the variable
668
- value: Value to store
669
- """
670
- # Store the expression directly in node storage using DAG representation
671
- root_hash = store_expr(value, self.storage)
672
-
673
- # Create a key for this variable name (without special prefixes)
674
- key = hashlib.sha256(name.encode()).digest()
675
-
676
- # Store the root hash reference
677
- self.storage.put(key, root_hash)
417
+ return True
678
418
 
679
- def query_global_storage(self, name: str):
419
+ def _is_better_chain(self, chain_blocks: List[Block]) -> bool:
680
420
  """
681
- Retrieve a global variable from node storage.
421
+ Determine if a candidate chain is better than our current chain.
682
422
 
683
423
  Args:
684
- name: Name of the variable to retrieve
424
+ chain_blocks: List of blocks in the candidate chain
685
425
 
686
426
  Returns:
687
- The stored expression, or None if not found
427
+ True if the candidate chain is better, False otherwise
688
428
  """
689
- # Create the key for this variable name
690
- key = hashlib.sha256(name.encode()).digest()
429
+ # Get the latest block from the candidate chain
430
+ candidate_latest = chain_blocks[-1]
691
431
 
692
- # Try to retrieve the root hash
693
- root_hash = self.storage.get(key)
432
+ # If we don't have a latest block, any valid chain is better
433
+ if not self.latest_block:
434
+ return True
435
+
436
+ # Compare block numbers (longest chain rule)
437
+ if candidate_latest.number > self.latest_block.number:
438
+ print(f"Candidate chain is longer: {candidate_latest.number} vs {self.latest_block.number}")
439
+ return True
440
+
441
+ return False
694
442
 
695
- if root_hash:
696
- # Load the expression using its root hash
697
- return get_expr_from_storage(root_hash, self.storage)
443
+ def _switch_to_new_chain(self, chain_blocks: List[Block]):
444
+ """
445
+ Switch to a new chain by adding all blocks to our blockchain.
698
446
 
699
- return None
447
+ Args:
448
+ chain_blocks: List of blocks in the chain (oldest to newest)
449
+ """
450
+ # Find the point where the chains diverge
451
+ divergence_point = 0
452
+ for i, block in enumerate(chain_blocks):
453
+ # Check if we have this block in our blockchain
454
+ if hasattr(self.blockchain, 'has_block') and self.blockchain.has_block(block.get_hash()):
455
+ divergence_point = i + 1
456
+ else:
457
+ break
458
+
459
+ # Add all blocks after the divergence point
460
+ for i in range(divergence_point, len(chain_blocks)):
461
+ block = chain_blocks[i]
462
+
463
+ # Add block to blockchain
464
+ if hasattr(self.blockchain, 'add_block'):
465
+ try:
466
+ self.blockchain.add_block(block)
467
+
468
+ # Remove from pending blocks
469
+ block_hash = block.get_hash()
470
+ if block_hash in self.pending_blocks:
471
+ del self.pending_blocks[block_hash]
472
+
473
+ print(f"Added block {block.number} to blockchain")
474
+ except Exception as e:
475
+ print(f"Error adding block {block.number} to blockchain: {e}")
476
+ return
477
+
478
+ # Update latest block
479
+ self._update_latest_block(chain_blocks[-1])
480
+ print(f"Switched to new chain, latest block: {self.latest_block.number}")