astreum 0.1.13__py3-none-any.whl → 0.1.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of astreum might be problematic. Click here for more details.

astreum/node/__init__.py CHANGED
@@ -1,17 +1,48 @@
1
1
  import os
2
- import hashlib
3
2
  import time
4
- from typing import Tuple, Optional
3
+ import threading
4
+ import random
5
+ from typing import Tuple, Optional, List, Dict
5
6
  import json
6
7
  from cryptography.hazmat.primitives.asymmetric import ed25519
8
+ from cryptography.hazmat.primitives import serialization
7
9
 
8
10
  from .relay import Relay, Topic
9
11
  from .relay.peer import Peer
10
12
  from .models import Storage, Block, Transaction
11
13
  from .machine import AstreumMachine
12
- from .utils import encode, decode
14
+ from .utils import encode, decode, hash_data
13
15
  from astreum.lispeum.storage import store_expr, get_expr_from_storage
14
16
 
17
+ # Import our validation components using the new functional approach
18
+ from .validation import (
19
+ validate_block,
20
+ create_block,
21
+ create_genesis_block,
22
+ compute_vdf,
23
+ verify_vdf,
24
+ select_validator,
25
+ select_validator_for_slot,
26
+ Account,
27
+ get_validator_stake,
28
+ is_validator,
29
+ VALIDATION_ADDRESS,
30
+ BURN_ADDRESS,
31
+ MIN_STAKE_AMOUNT,
32
+ SLOT_DURATION,
33
+ VDF_DIFFICULTY
34
+ )
35
+ from .validation.state import (
36
+ add_block_to_state,
37
+ validate_and_apply_block,
38
+ create_account_state,
39
+ get_validator_for_slot,
40
+ select_best_chain,
41
+ compare_chains,
42
+ get_validator_set
43
+ )
44
+ from .validation.adapter import BlockAdapter, TransactionAdapter, AccountAdapter
45
+
15
46
  class Node:
16
47
  def __init__(self, config: dict):
17
48
  # Ensure config is a dictionary, but allow it to be None
@@ -46,6 +77,25 @@ class Node:
46
77
  self.storage = Storage(self.config)
47
78
  self.storage.node = self # Set the storage node reference to self
48
79
 
80
+ # Initialize blockchain state
81
+ self.blockchain = create_account_state(self.config)
82
+
83
+ # Store our validator info if we're a validator
84
+ if self.is_validator and self.validation_public_key:
85
+ self.validator_address = self.validation_public_key.public_bytes(
86
+ encoding=serialization.Encoding.Raw,
87
+ format=serialization.PublicFormat.Raw
88
+ )
89
+ self.validator_private_bytes = self.validation_private_key.private_bytes(
90
+ encoding=serialization.Encoding.Raw,
91
+ format=serialization.PrivateFormat.Raw,
92
+ encryption_algorithm=serialization.NoEncryption()
93
+ )
94
+ print(f"Registered validator with address: {self.validator_address.hex()}")
95
+ else:
96
+ self.validator_address = None
97
+ self.validator_private_bytes = None
98
+
49
99
  # Latest block of the chain this node is following
50
100
  self.latest_block = None
51
101
  self.followed_chain_id = self.config.get('followed_chain_id', None)
@@ -64,529 +114,367 @@ class Node:
64
114
  self.relay.message_handlers[Topic.LATEST_BLOCK_REQUEST] = self._handle_latest_block_request
65
115
  self.relay.message_handlers[Topic.LATEST_BLOCK] = self._handle_latest_block
66
116
  self.relay.message_handlers[Topic.TRANSACTION] = self._handle_transaction
117
+ self.relay.message_handlers[Topic.BLOCK_REQUEST] = self._handle_block_request
118
+ self.relay.message_handlers[Topic.BLOCK_RESPONSE] = self._handle_block_response
67
119
 
68
120
  # Initialize latest block from storage if available
69
121
  self._initialize_latest_block()
70
122
 
71
123
  # Candidate chains that might be adopted
72
124
  self.candidate_chains = {} # chain_id -> {'latest_block': block, 'timestamp': time.time()}
125
+ self.pending_blocks = {} # block_hash -> {'block': block, 'timestamp': time.time()}
73
126
 
74
- def _handle_ping(self, body: bytes, addr: Tuple[str, int], envelope):
75
- """
76
- Handle ping messages by storing peer info and responding with a pong.
127
+ # Threads for validation and chain monitoring
128
+ self.running = False
129
+ self.main_chain_validation_thread = None
130
+ self.candidate_chain_validation_thread = None
77
131
 
78
- The ping message contains:
79
- - public_key: The sender's public key
80
- - difficulty: The sender's preferred proof-of-work difficulty
81
- - routes: The sender's available routes
82
- """
83
- try:
84
- # Parse peer information from the ping message
85
- parts = decode(body)
86
- if len(parts) != 3:
87
- return
88
-
89
- public_key, difficulty_bytes, routes_data = parts
90
- difficulty = int.from_bytes(difficulty_bytes, byteorder='big')
91
-
92
- # Store peer information in routing table
93
- peer = self.relay.add_peer(addr, public_key, difficulty)
132
+ # Pending transactions for a block
133
+ self.pending_transactions = {} # tx_hash -> {'transaction': tx, 'timestamp': time.time()}
134
+
135
+ # Last block production attempt time
136
+ self.last_block_attempt_time = 0
137
+
138
+ def start(self):
139
+ """Start the node."""
140
+ self.running = True
141
+
142
+ # Start relay
143
+ self.relay.start()
144
+
145
+ # Start chain monitoring thread
146
+ self.main_chain_validation_thread = threading.Thread(
147
+ target=self._main_chain_validation_loop,
148
+ name="MainChainValidation"
149
+ )
150
+ self.main_chain_validation_thread.daemon = True
151
+ self.main_chain_validation_thread.start()
152
+
153
+ self.candidate_chain_validation_thread = threading.Thread(
154
+ target=self._candidate_chain_validation_loop,
155
+ name="CandidateChainValidation"
156
+ )
157
+ self.candidate_chain_validation_thread.daemon = True
158
+ self.candidate_chain_validation_thread.start()
159
+
160
+ # Set up recurring block query tasks
161
+ main_query_thread = threading.Thread(
162
+ target=self._block_query_loop,
163
+ args=('main',),
164
+ daemon=True
165
+ )
166
+ main_query_thread.start()
167
+
168
+ validation_query_thread = threading.Thread(
169
+ target=self._block_query_loop,
170
+ args=('validation',),
171
+ daemon=True
172
+ )
173
+ validation_query_thread.start()
174
+
175
+ print(f"Node started with ID {self.node_id.hex()}")
176
+
177
+ def stop(self):
178
+ """Stop the node and all its services."""
179
+ self.running = False
180
+
181
+ # Stop all threads
182
+ if self.main_chain_validation_thread and self.main_chain_validation_thread.is_alive():
183
+ self.main_chain_validation_thread.join(timeout=1.0)
94
184
 
95
- # Process the routes the sender is participating in
96
- if routes_data:
97
- # routes_data is a simple list like [0, 1] meaning peer route and validation route
98
- # Add peer to each route they participate in
99
- self.relay.add_peer_to_route(peer, list(routes_data))
185
+ if self.candidate_chain_validation_thread and self.candidate_chain_validation_thread.is_alive():
186
+ self.candidate_chain_validation_thread.join(timeout=1.0)
100
187
 
101
- # Create response with our public key, difficulty and routes we participate in
102
- pong_data = encode([
103
- self.node_id, # Our public key
104
- self.config.get('difficulty', 1).to_bytes(4, byteorder='big'), # Our difficulty
105
- self.relay.get_routes() # Our routes as bytes([0, 1]) for peer and validation
106
- ])
188
+ # Stop relay last
189
+ if self.relay:
190
+ self.relay.stop()
107
191
 
108
- self.relay.send_message(pong_data, Topic.PONG, addr)
109
- except Exception as e:
110
- print(f"Error handling ping message: {e}")
192
+ print("Node stopped")
111
193
 
112
- def _handle_pong(self, body: bytes, addr: Tuple[str, int], envelope):
194
+ def _main_chain_validation_loop(self):
113
195
  """
114
- Handle pong messages by updating peer information.
115
- No response is sent to a pong message.
196
+ Main validation loop for the primary blockchain.
197
+ This thread prioritizes validating blocks on the main chain we're following.
116
198
  """
117
- try:
118
- # Parse peer information from the pong message
119
- parts = decode(body)
120
- if len(parts) != 3:
121
- return
199
+ while self.running:
200
+ try:
201
+ # Update latest block if we don't have one yet
202
+ if not self.latest_block and hasattr(self.blockchain, 'get_latest_block'):
203
+ self.latest_block = self.blockchain.get_latest_block()
122
204
 
123
- public_key, difficulty_bytes, routes_data = parts
124
- difficulty = int.from_bytes(difficulty_bytes, byteorder='big')
125
-
126
- # Update peer information in routing table
127
- peer = self.relay.add_peer(addr, public_key, difficulty)
128
-
129
- # Process the routes the sender is participating in
130
- if routes_data:
131
- # routes_data is a simple list like [0, 1] meaning peer route and validation route
132
- # Add peer to each route they participate in
133
- self.relay.add_peer_to_route(peer, list(routes_data))
134
- except Exception as e:
135
- print(f"Error handling pong message: {e}")
136
-
137
- def _handle_object_request(self, body: bytes, addr: Tuple[str, int], envelope):
138
- """
139
- Handle an object request from a peer.
140
-
141
- Args:
142
- body: Message body containing the object hash
143
- addr: Address of the requesting peer
144
- envelope: Full message envelope
145
- """
146
- try:
147
- # Decode the request
148
- request = json.loads(body.decode('utf-8'))
149
- object_hash = bytes.fromhex(request.get('hash'))
150
-
151
- # Check if we have the requested object
152
- if not self.storage.contains(object_hash):
153
- # We don't have the object, ignore the request
154
- return
205
+ # Process any blocks that extend our main chain immediately
206
+ self._process_main_chain_blocks()
155
207
 
156
- # Get the object data
157
- object_data = self.storage._local_get(object_hash)
158
- if not object_data:
159
- return
208
+ # Attempt block production if we are a validator
209
+ if self.is_validator and self.validator_address:
210
+ self._attempt_block_production()
160
211
 
161
- # Create a response message
162
- response = {
163
- 'hash': object_hash.hex(),
164
- 'data': object_data.hex()
165
- }
166
-
167
- # Send the response
168
- self.relay.send_message_to_addr(
169
- addr,
170
- Topic.OBJECT_RESPONSE,
171
- json.dumps(response).encode('utf-8')
172
- )
173
-
174
- except Exception as e:
175
- print(f"Error handling object request: {e}")
176
-
177
- def _handle_object_response(self, body: bytes, addr: Tuple[str, int], envelope):
178
- """
179
- Handle an object response from a peer.
180
-
181
- Args:
182
- body: Message body containing the object hash and data
183
- addr: Address of the responding peer
184
- envelope: Full message envelope
185
- """
186
- try:
187
- # Decode the response
188
- response = json.loads(body.decode('utf-8'))
189
- object_hash = bytes.fromhex(response.get('hash'))
190
- object_data = bytes.fromhex(response.get('data'))
191
-
192
- # Store the object
193
- self.storage.put(object_hash, object_data)
194
-
195
- except Exception as e:
196
- print(f"Error handling object response: {e}")
197
-
198
- def _handle_object(self, body: bytes, addr: Tuple[str, int], envelope):
212
+ # Cleanup old items
213
+ self._prune_pending_items()
214
+
215
+ # Sleep to prevent high CPU usage
216
+ time.sleep(0.1) # Short sleep for main chain validation
217
+ except Exception as e:
218
+ print(f"Error in main chain validation loop: {e}")
219
+ time.sleep(1) # Longer sleep on error
220
+
221
+ def _candidate_chain_validation_loop(self):
199
222
  """
200
- Handle receipt of an object.
201
- If not in storage, verify the hash and put in storage.
223
+ Validation loop for candidate chains (potential forks).
224
+ This thread handles validation of blocks from alternate chains
225
+ without slowing down the main chain processing.
202
226
  """
203
- try:
204
- # Verify hash matches the object
205
- object_hash = hashlib.sha256(body).digest()
206
-
207
- # Check if we already have this object
208
- if not self.storage.exists(object_hash):
209
- # Store the object
210
- self.storage.put(object_hash, body)
211
- except Exception as e:
212
- print(f"Error handling object: {e}")
213
-
214
- def request_object(self, object_hash: bytes, max_attempts: int = 3) -> Optional[bytes]:
215
- """
216
- Request an object from the network by its hash.
217
-
218
- This method sends an object request to peers closest to the object hash
219
- and waits for a response until timeout.
220
-
221
- Args:
222
- object_hash: The hash of the object to request
223
- max_attempts: Maximum number of request attempts
224
-
225
- Returns:
226
- The object data if found, None otherwise
227
- """
228
- # First check if we already have the object
229
- if self.storage.contains(object_hash):
230
- return self.storage._local_get(object_hash)
231
-
232
- # Find the bucket containing the peers closest to the object's hash
233
- closest_peers = self.relay.get_closest_peers(object_hash, count=3)
234
- if not closest_peers:
235
- return None
236
-
237
- # Create a message to request the object
238
- topic = Topic.OBJECT_REQUEST
239
- object_request_msg = {
240
- 'hash': object_hash.hex()
241
- }
242
-
243
- # Track which peers we've already tried
244
- attempted_peers = set()
245
-
246
- # We'll try up to max_attempts times
247
- for _ in range(max_attempts):
248
- # Find peers we haven't tried yet
249
- untried_peers = [p for p in closest_peers if p.id not in attempted_peers]
250
- if not untried_peers:
251
- break
227
+ while self.running:
228
+ try:
229
+ # Process candidate chains
230
+ self._evaluate_candidate_chains()
252
231
 
253
- # Send the request to all untried peers
254
- request_sent = False
255
- for peer in untried_peers:
256
- try:
257
- self.relay.send_message_to_peer(peer, topic, object_request_msg)
258
- attempted_peers.add(peer.id)
259
- request_sent = True
260
- except Exception as e:
261
- print(f"Failed to send object request to peer {peer.id.hex()}: {e}")
262
-
263
- if not request_sent:
264
- break
232
+ # Prune old candidate chains
233
+ self._prune_candidate_chains()
265
234
 
266
- # Short wait to allow for response
267
- time.sleep(0.5)
268
-
269
- # Check if any of the requests succeeded
270
- if self.storage.contains(object_hash):
271
- return self.storage._local_get(object_hash)
235
+ # Sleep longer for candidate chain validation (lower priority)
236
+ time.sleep(1) # Longer sleep for candidate chain validation
237
+ except Exception as e:
238
+ print(f"Error in candidate chain validation loop: {e}")
239
+ time.sleep(2) # Even longer sleep on error
272
240
 
273
- # If we get here, we couldn't get the object
274
- return None
275
-
276
- def _handle_route_request(self, body: bytes, addr: Tuple[str, int], envelope):
277
- """
278
- Handle request for routing information.
279
- Seed route to peer with one peer per bucket in the route table.
280
- """
281
- try:
282
- # Create a list to store one peer from each bucket
283
- route_peers = []
284
-
285
- # Get one peer from each bucket
286
- for bucket_index in range(self.relay.num_buckets):
287
- peers = self.relay.get_bucket_peers(bucket_index)
288
- if peers and len(peers) > 0:
289
- # Add one peer from this bucket
290
- route_peers.append(peers[0])
291
-
292
- # Serialize the peer list
293
- # Format: List of [peer_addr, peer_port, peer_key]
294
- peer_data = []
295
- for peer in route_peers:
296
- peer_addr, peer_port = peer.address
297
- peer_data.append(encode([
298
- peer_addr.encode('utf-8'),
299
- peer_port.to_bytes(2, byteorder='big'),
300
- peer.node_id
301
- ]))
241
+ def _prune_pending_items(self):
242
+ """Remove old pending blocks and transactions."""
243
+ current_time = time.time()
244
+
245
+ # Prune old pending blocks (older than 1 hour)
246
+ blocks_to_remove = [
247
+ block_hash for block_hash, data in self.pending_blocks.items()
248
+ if current_time - data['timestamp'] > 3600 # 1 hour
249
+ ]
250
+ for block_hash in blocks_to_remove:
251
+ del self.pending_blocks[block_hash]
252
+
253
+ # Prune old pending transactions (older than 30 minutes)
254
+ txs_to_remove = [
255
+ tx_hash for tx_hash, data in self.pending_transactions.items()
256
+ if current_time - data['timestamp'] > 1800 # 30 minutes
257
+ ]
258
+ for tx_hash in txs_to_remove:
259
+ del self.pending_transactions[tx_hash]
260
+
261
+ def _process_main_chain_blocks(self):
262
+ """
263
+ Process blocks that extend our current main chain.
264
+ Prioritizes blocks that build on our latest block.
265
+ """
266
+ # Skip if we don't have a latest block yet
267
+ if not self.latest_block:
268
+ return
302
269
 
303
- # Encode the complete route data
304
- route_data = encode(peer_data)
270
+ # Get the hash of our latest block
271
+ latest_hash = self.latest_block.get_hash()
272
+
273
+ # Find any pending blocks that build on our latest block
274
+ main_chain_blocks = []
275
+ for block_hash, data in list(self.pending_blocks.items()):
276
+ block = data['block']
305
277
 
306
- # Send routing information back
307
- self.relay.send_message(route_data, Topic.ROUTE, addr)
308
- except Exception as e:
309
- print(f"Error handling route request: {e}")
310
-
311
- def _handle_route(self, body: bytes, addr: Tuple[str, int], envelope):
312
- """
313
- Handle receipt of a route message containing a list of IP addresses to ping.
314
- """
315
- try:
316
- # Decode the list of peers
317
- peer_entries = decode(body)
278
+ # Check if this block extends our latest block
279
+ if block.previous == latest_hash:
280
+ main_chain_blocks.append(block)
281
+
282
+ # Process found blocks
283
+ for block in main_chain_blocks:
284
+ self._validate_and_process_main_chain_block(block)
318
285
 
319
- # Process each peer
320
- for peer_data in peer_entries:
321
- try:
322
- peer_parts = decode(peer_data)
323
- if len(peer_parts) != 3:
324
- continue
325
-
326
- peer_addr_bytes, peer_port_bytes, peer_id = peer_parts
327
- peer_addr = peer_addr_bytes.decode('utf-8')
328
- peer_port = int.from_bytes(peer_port_bytes, byteorder='big')
329
-
330
- # Create peer address tuple
331
- peer_address = (peer_addr, peer_port)
332
-
333
- # Ping this peer if it's not already in our routing table
334
- # and it's not our own address
335
- if (not self.relay.has_peer(peer_address) and
336
- peer_address != self.relay.get_address()):
337
- # Create ping message with our info and routes
338
- # Encode our peer and validation routes
339
- peer_routes_list = self.relay.get_routes()
340
-
341
- # Combine into a single list of routes with type flags
342
- # For each route: [is_validation_route, route_id]
343
- routes = []
344
-
345
- # Add peer routes (type flag = 0)
346
- for route in peer_routes_list:
347
- routes.append(encode([bytes([0]), route]))
348
-
349
- # Encode the complete routes list
350
- all_routes = encode(routes)
351
-
352
- ping_data = encode([
353
- self.node_id, # Our public key
354
- self.config.get('difficulty', 1).to_bytes(4, byteorder='big'), # Our difficulty
355
- all_routes # All routes we participate in
356
- ])
357
-
358
- # Send ping to the peer
359
- self.relay.send_message(ping_data, Topic.PING, peer_address)
360
- except Exception as e:
361
- print(f"Error processing peer in route: {e}")
362
- continue
363
- except Exception as e:
364
- print(f"Error handling route message: {e}")
365
-
366
- def _handle_latest_block_request(self, body: bytes, addr: Tuple[str, int], envelope):
286
+ def _validate_and_process_main_chain_block(self, block: Block):
367
287
  """
368
- Handle request for the latest block from the chain currently following.
369
- Any node can request the latest block for syncing purposes.
288
+ Validate and process a block that extends our main chain.
289
+
290
+ Args:
291
+ block: Block to validate and process
370
292
  """
371
293
  try:
372
- # Return our latest block from the followed chain
373
- if self.latest_block:
374
- # Send latest block to the requester
375
- self.relay.send_message(self.latest_block.to_bytes(), Topic.LATEST_BLOCK, addr)
294
+ # Validate block
295
+ is_valid = validate_block(block, self.blockchain.get_accounts_at_block(block.previous), self.blockchain.get_blocks())
296
+
297
+ if is_valid:
298
+ # Apply block to our state
299
+ success = validate_and_apply_block(self.blockchain, block)
300
+ if success:
301
+ print(f"Applied valid block {block.number} to blockchain state")
302
+ self._update_latest_block(block)
303
+ blocks_to_remove = [block.get_hash()]
304
+ for block_hash in blocks_to_remove:
305
+ if block_hash in self.pending_blocks:
306
+ del self.pending_blocks[block_hash]
307
+ print(f"Added block {block.number} to blockchain")
308
+ return True
376
309
  except Exception as e:
377
- print(f"Error handling latest block request: {e}")
378
-
379
- def _handle_latest_block(self, body: bytes, addr: Tuple[str, int], envelope):
380
- """
381
- Handle receipt of a latest block message.
382
- Identify chain, validate if following chain, only accept if latest block
383
- in chain is in the previous field.
384
- """
385
- try:
386
- # Check if we're in the validation route
387
- # This is now already checked by the relay's _handle_message method
388
- if not self.relay.is_in_validation_route():
389
- return
310
+ print(f"Error validating main chain block {block.number}: {e}")
390
311
 
391
- # Deserialize the block
392
- block = Block.from_bytes(body)
393
- if not block:
394
- return
395
-
396
- # Check if we're following this chain
397
- if not self.machine.is_following_chain(block.chain_id):
398
- # Store as a potential candidate chain if it has a higher height
399
- if not self.followed_chain_id or block.chain_id != self.followed_chain_id:
400
- self._add_candidate_chain(block)
401
- return
402
-
403
- # Get our current latest block
404
- our_latest = self.latest_block
312
+ return False
405
313
 
406
- # Verify block hash links to our latest block
407
- if our_latest and block.previous_hash == our_latest.hash:
408
- # Process the valid block
409
- self.machine.process_block(block)
410
-
411
- # Update our latest block
412
- self.latest_block = block
413
- # Check if this block is ahead of our current chain
414
- elif our_latest and block.height > our_latest.height:
415
- # Block is ahead but doesn't link directly to our latest
416
- # Add to candidate chains for potential future adoption
417
- self._add_candidate_chain(block)
418
-
419
- # No automatic broadcasting - nodes will request latest blocks when needed
420
- except Exception as e:
421
- print(f"Error handling latest block: {e}")
422
-
423
- def _handle_transaction(self, body: bytes, addr: Tuple[str, int], envelope):
314
+ def _evaluate_candidate_chains(self):
424
315
  """
425
- Handle incoming transaction messages.
426
-
427
- This method is called when we receive a transaction from the network.
428
- Transactions should only be processed by validator nodes.
429
-
430
- Args:
431
- body: Transaction data
432
- addr: Source address
433
- envelope: Full message envelope
316
+ Evaluate candidate chains to determine if any should become our main chain.
317
+ This will validate pending blocks and look for chains with higher cumulative difficulty.
434
318
  """
435
- # Ignore if we're not a validator (don't have a validation key)
436
- if not self.is_validator or not self.relay.is_in_validation_route():
437
- print("Ignoring transaction as we're not a validator")
319
+ # Skip if no candidate chains
320
+ if not self.candidate_chains:
438
321
  return
439
322
 
440
- try:
441
- # Parse transaction data
442
- tx_data = json.loads(body.decode('utf-8'))
323
+ # For each candidate chain, validate blocks and calculate metrics
324
+ for chain_id, data in list(self.candidate_chains.items()):
325
+ latest_candidate_block = data['latest_block']
443
326
 
444
- # Store the transaction in our local storage
445
- tx_hash = bytes.fromhex(tx_data.get('hash'))
446
- tx_raw = bytes.fromhex(tx_data.get('data'))
327
+ # Build the chain backwards
328
+ chain_blocks = self._build_chain_from_latest(latest_candidate_block)
447
329
 
448
- # Create transaction entry in storage
449
- if not self.storage.contains(tx_hash):
450
- self.storage.put(tx_hash, tx_raw)
451
- print(f"Stored transaction {tx_hash.hex()}")
330
+ # Skip if we couldn't build a complete chain
331
+ if not chain_blocks:
332
+ continue
452
333
 
453
- # Process the transaction as a validator
454
- self._process_transaction_as_validator(tx_hash, tx_raw)
334
+ # Validate the entire chain
335
+ valid_chain = self._validate_candidate_chain(chain_blocks)
455
336
 
456
- except Exception as e:
457
- print(f"Error handling transaction: {e}")
458
-
459
- def _process_transaction_as_validator(self, tx_hash: bytes, tx_raw: bytes):
337
+ # If valid and better than our current chain, switch to it
338
+ if valid_chain and self._is_better_chain(chain_blocks):
339
+ self._switch_to_new_chain(chain_blocks)
340
+
341
+ def _build_chain_from_latest(self, latest_block: Block) -> List[Block]:
460
342
  """
461
- Process a transaction as a validator node.
462
-
463
- This method is called when we receive a transaction and we're a validator.
464
- It verifies the transaction and may include it in a future block.
343
+ Build a chain from the latest block back to a known point in our blockchain.
465
344
 
466
345
  Args:
467
- tx_hash: Transaction hash
468
- tx_raw: Raw transaction data
469
- """
470
- try:
471
- print(f"Processing transaction {tx_hash.hex()} as validator")
472
- # Here we would verify the transaction and potentially queue it
473
- # for inclusion in the next block we create
346
+ latest_block: Latest block in the candidate chain
474
347
 
475
- # For now, just log that we processed it
476
- print(f"Verified transaction {tx_hash.hex()}")
477
-
478
- # TODO: Implement transaction validation and queueing for block creation
479
-
480
- except Exception as e:
481
- print(f"Error processing transaction as validator: {e}")
482
-
483
- def _initialize_latest_block(self):
484
- """Initialize latest block from storage if available."""
485
- # Implementation would load the latest block from storage
486
- pass
487
-
488
- def set_followed_chain(self, chain_id):
489
- """
490
- Set the chain that this node follows.
491
-
492
- Args:
493
- chain_id: The ID of the chain to follow
494
- """
495
- self.followed_chain_id = chain_id
496
- self.latest_block = self.machine.get_latest_block(chain_id)
497
-
498
- def get_latest_block(self):
499
- """
500
- Get the latest block of the chain this node is following.
501
-
502
348
  Returns:
503
- The latest block, or None if not available
504
- """
505
- return self.latest_block
506
-
507
- def _add_candidate_chain(self, block):
508
- """
509
- Add a block to candidate chains for potential future adoption.
510
-
511
- Args:
512
- block: The block to add as a candidate
513
- """
514
- chain_id = block.chain_id
515
-
516
- # If we already have this chain as a candidate, only update if this block is newer
517
- if chain_id in self.candidate_chains:
518
- current_candidate = self.candidate_chains[chain_id]['latest_block']
519
- if block.height > current_candidate.height:
520
- self.candidate_chains[chain_id] = {
521
- 'latest_block': block,
522
- 'timestamp': time.time()
523
- }
524
- else:
525
- # Add as a new candidate chain
526
- self.candidate_chains[chain_id] = {
527
- 'latest_block': block,
528
- 'timestamp': time.time()
529
- }
530
-
531
- # Prune old candidates (older than 1 hour)
532
- self._prune_candidate_chains()
533
-
534
- def _prune_candidate_chains(self):
535
- """Remove candidate chains that are older than 1 hour."""
536
- current_time = time.time()
537
- chains_to_remove = []
538
-
539
- for chain_id, data in self.candidate_chains.items():
540
- if current_time - data['timestamp'] > 3600: # 1 hour in seconds
541
- chains_to_remove.append(chain_id)
349
+ List of blocks in the chain, ordered from oldest to newest
350
+ """
351
+ chain_blocks = [latest_block]
352
+ current_block = latest_block
353
+
354
+ # Track visited blocks to avoid cycles
355
+ visited = {current_block.get_hash()}
356
+
357
+ # Build chain backwards until we either:
358
+ # 1. Find a block in our main chain
359
+ # 2. Run out of blocks
360
+ # 3. Detect a cycle
361
+ while current_block.number > 0:
362
+ previous_hash = current_block.previous
363
+
364
+ # Check if we have this block in our blockchain
365
+ if hasattr(self.blockchain, 'has_block') and self.blockchain.has_block(previous_hash):
366
+ # Found connection to our main chain
367
+ previous_block = self.blockchain.get_block(previous_hash)
368
+ chain_blocks.insert(0, previous_block)
369
+ break
542
370
 
543
- for chain_id in chains_to_remove:
544
- del self.candidate_chains[chain_id]
545
-
546
- def evaluate_candidate_chains(self):
547
- """
548
- Evaluate all candidate chains to see if we should switch to one.
549
- This is a placeholder for now - in a real implementation, you would
550
- verify the chain and potentially switch to it if it's valid and better.
551
- """
552
- # TODO: Implement chain evaluation logic
553
- pass
554
-
555
- def post_global_storage(self, name: str, value):
371
+ # Check if block is in pending blocks
372
+ elif previous_hash in self.pending_blocks:
373
+ previous_block = self.pending_blocks[previous_hash]['block']
374
+
375
+ # Check for cycles
376
+ if previous_hash in visited:
377
+ print(f"Cycle detected in candidate chain at block {previous_block.number}")
378
+ return []
379
+
380
+ visited.add(previous_hash)
381
+ chain_blocks.insert(0, previous_block)
382
+ current_block = previous_block
383
+ else:
384
+ # Missing block, cannot validate the chain
385
+ print(f"Missing block {previous_hash.hex()} in candidate chain")
386
+ return []
387
+
388
+ return chain_blocks
389
+
390
+ def _validate_candidate_chain(self, chain_blocks: List[Block]) -> bool:
556
391
  """
557
- Store a global variable in node storage.
392
+ Validate a candidate chain of blocks.
558
393
 
559
394
  Args:
560
- name: Name of the variable
561
- value: Value to store
395
+ chain_blocks: List of blocks in the chain (oldest to newest)
396
+
397
+ Returns:
398
+ True if the chain is valid, False otherwise
562
399
  """
563
- # Store the expression directly in node storage using DAG representation
564
- root_hash = store_expr(value, self.storage)
565
-
566
- # Create a key for this variable name (without special prefixes)
567
- key = hashlib.sha256(name.encode()).digest()
568
-
569
- # Store the root hash reference
570
- self.storage.put(key, root_hash)
400
+ # Validate each block in the chain
401
+ for i, block in enumerate(chain_blocks):
402
+ # Skip first block, it's either genesis or a block we already have
403
+ if i == 0:
404
+ continue
405
+
406
+ # Validate block connections
407
+ if block.previous != chain_blocks[i-1].get_hash():
408
+ print(f"Invalid chain: block {block.number} does not reference previous block")
409
+ return False
410
+
411
+ # Validate block
412
+ is_valid = validate_block(block, self.blockchain.get_accounts_at_block(block.previous), self.blockchain.get_blocks())
413
+ if not is_valid:
414
+ print(f"Invalid chain: block {block.number} is invalid")
415
+ return False
416
+
417
+ return True
571
418
 
572
- def query_global_storage(self, name: str):
419
+ def _is_better_chain(self, chain_blocks: List[Block]) -> bool:
573
420
  """
574
- Retrieve a global variable from node storage.
421
+ Determine if a candidate chain is better than our current chain.
575
422
 
576
423
  Args:
577
- name: Name of the variable to retrieve
424
+ chain_blocks: List of blocks in the candidate chain
578
425
 
579
426
  Returns:
580
- The stored expression, or None if not found
427
+ True if the candidate chain is better, False otherwise
581
428
  """
582
- # Create the key for this variable name
583
- key = hashlib.sha256(name.encode()).digest()
429
+ # Get the latest block from the candidate chain
430
+ candidate_latest = chain_blocks[-1]
584
431
 
585
- # Try to retrieve the root hash
586
- root_hash = self.storage.get(key)
432
+ # If we don't have a latest block, any valid chain is better
433
+ if not self.latest_block:
434
+ return True
435
+
436
+ # Compare block numbers (longest chain rule)
437
+ if candidate_latest.number > self.latest_block.number:
438
+ print(f"Candidate chain is longer: {candidate_latest.number} vs {self.latest_block.number}")
439
+ return True
440
+
441
+ return False
587
442
 
588
- if root_hash:
589
- # Load the expression using its root hash
590
- return get_expr_from_storage(root_hash, self.storage)
443
+ def _switch_to_new_chain(self, chain_blocks: List[Block]):
444
+ """
445
+ Switch to a new chain by adding all blocks to our blockchain.
591
446
 
592
- return None
447
+ Args:
448
+ chain_blocks: List of blocks in the chain (oldest to newest)
449
+ """
450
+ # Find the point where the chains diverge
451
+ divergence_point = 0
452
+ for i, block in enumerate(chain_blocks):
453
+ # Check if we have this block in our blockchain
454
+ if hasattr(self.blockchain, 'has_block') and self.blockchain.has_block(block.get_hash()):
455
+ divergence_point = i + 1
456
+ else:
457
+ break
458
+
459
+ # Add all blocks after the divergence point
460
+ for i in range(divergence_point, len(chain_blocks)):
461
+ block = chain_blocks[i]
462
+
463
+ # Add block to blockchain
464
+ if hasattr(self.blockchain, 'add_block'):
465
+ try:
466
+ self.blockchain.add_block(block)
467
+
468
+ # Remove from pending blocks
469
+ block_hash = block.get_hash()
470
+ if block_hash in self.pending_blocks:
471
+ del self.pending_blocks[block_hash]
472
+
473
+ print(f"Added block {block.number} to blockchain")
474
+ except Exception as e:
475
+ print(f"Error adding block {block.number} to blockchain: {e}")
476
+ return
477
+
478
+ # Update latest block
479
+ self._update_latest_block(chain_blocks[-1])
480
+ print(f"Switched to new chain, latest block: {self.latest_block.number}")