lunalib 1.5.2__py3-none-any.whl → 1.6.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lunalib/core/p2p.py ADDED
@@ -0,0 +1,346 @@
1
+ # lunalib/core/p2p.py
2
+ import time
3
+ import requests
4
+ import threading
5
+ import json
6
+ from typing import List, Dict, Optional, Callable
7
+ from queue import Queue
8
+ import hashlib
9
+
10
+
11
+ class P2PClient:
12
+ """
13
+ P2P client for blockchain synchronization with hybrid primary/peer architecture.
14
+ Downloads initial state from primary node, then syncs via P2P with periodic validation.
15
+ """
16
+
17
+ def __init__(self, primary_node_url: str, node_id: Optional[str] = None):
18
+ self.primary_node = primary_node_url
19
+ self.node_id = node_id or self._generate_node_id()
20
+ self.peers = []
21
+ self.last_primary_check = 0
22
+ self.last_peer_update = 0
23
+ self.peer_update_interval = 300 # 5 minutes
24
+ self.primary_check_interval = 3600 # 1 hour
25
+
26
+ # Callbacks for events
27
+ self.on_new_block_callback = None
28
+ self.on_new_transaction_callback = None
29
+ self.on_peer_update_callback = None
30
+
31
+ # P2P message queue
32
+ self.message_queue = Queue()
33
+ self.is_running = False
34
+ self.sync_thread = None
35
+
36
+ def _generate_node_id(self) -> str:
37
+ """Generate unique node ID"""
38
+ import socket
39
+ hostname = socket.gethostname()
40
+ timestamp = str(time.time())
41
+ return hashlib.sha256(f"{hostname}{timestamp}".encode()).hexdigest()[:16]
42
+
43
+ def start(self):
44
+ """Start P2P client"""
45
+ if self.is_running:
46
+ return
47
+
48
+ self.is_running = True
49
+
50
+ # Initial setup
51
+ self._initial_sync()
52
+ self._register_with_primary()
53
+ self._update_peer_list()
54
+
55
+ # Start background sync thread
56
+ self.sync_thread = threading.Thread(target=self._sync_loop, daemon=True)
57
+ self.sync_thread.start()
58
+
59
+ print(f"✅ P2P Client started (Node ID: {self.node_id})")
60
+
61
+ def stop(self):
62
+ """Stop P2P client"""
63
+ self.is_running = False
64
+ if self.sync_thread:
65
+ self.sync_thread.join(timeout=5)
66
+ print("🛑 P2P Client stopped")
67
+
68
+ def _initial_sync(self):
69
+ """Download initial blockchain and mempool from primary node"""
70
+ try:
71
+ print(f"📥 Initial sync from primary node: {self.primary_node}")
72
+
73
+ # Download blockchain
74
+ response = requests.get(f"{self.primary_node}/api/blockchain/full", timeout=30)
75
+ if response.status_code == 200:
76
+ blockchain_data = response.json()
77
+ print(f"✅ Downloaded blockchain: {len(blockchain_data.get('blocks', []))} blocks")
78
+ return blockchain_data
79
+
80
+ except Exception as e:
81
+ print(f"❌ Initial sync failed: {e}")
82
+ return None
83
+
84
+ def _register_with_primary(self):
85
+ """Register this node with the primary daemon"""
86
+ try:
87
+ peer_info = {
88
+ 'node_id': self.node_id,
89
+ 'timestamp': time.time(),
90
+ 'capabilities': ['sync', 'relay']
91
+ }
92
+
93
+ response = requests.post(
94
+ f"{self.primary_node}/api/peers/register",
95
+ json=peer_info,
96
+ timeout=10
97
+ )
98
+
99
+ if response.status_code == 200:
100
+ print(f"✅ Registered with primary node as peer: {self.node_id}")
101
+ return True
102
+ else:
103
+ print(f"⚠️ Registration failed: {response.status_code}")
104
+ return False
105
+
106
+ except Exception as e:
107
+ print(f"❌ Registration error: {e}")
108
+ return False
109
+
110
+ def _update_peer_list(self):
111
+ """Get updated peer list from primary daemon"""
112
+ try:
113
+ response = requests.get(f"{self.primary_node}/api/peers/list", timeout=10)
114
+
115
+ if response.status_code == 200:
116
+ peer_data = response.json()
117
+ new_peers = peer_data.get('peers', [])
118
+
119
+ # Filter out self
120
+ self.peers = [p for p in new_peers if p.get('node_id') != self.node_id]
121
+
122
+ print(f"📋 Updated peer list: {len(self.peers)} peers")
123
+ self.last_peer_update = time.time()
124
+
125
+ if self.on_peer_update_callback:
126
+ self.on_peer_update_callback(self.peers)
127
+
128
+ return self.peers
129
+
130
+ except Exception as e:
131
+ print(f"❌ Peer list update failed: {e}")
132
+ return []
133
+
134
+ def _sync_loop(self):
135
+ """Background sync loop for P2P updates"""
136
+ while self.is_running:
137
+ try:
138
+ current_time = time.time()
139
+
140
+ # Periodic peer list update
141
+ if current_time - self.last_peer_update > self.peer_update_interval:
142
+ self._update_peer_list()
143
+
144
+ # Periodic primary node validation
145
+ if current_time - self.last_primary_check > self.primary_check_interval:
146
+ self._validate_with_primary()
147
+
148
+ # Sync with peers
149
+ self._sync_from_peers()
150
+
151
+ # Process message queue
152
+ self._process_messages()
153
+
154
+ time.sleep(5) # Check every 5 seconds
155
+
156
+ except Exception as e:
157
+ print(f"❌ Sync loop error: {e}")
158
+ time.sleep(10)
159
+
160
+ def _sync_from_peers(self):
161
+ """Sync new blocks and transactions from peers"""
162
+ for peer in self.peers[:5]: # Sync with first 5 peers
163
+ try:
164
+ peer_url = peer.get('url')
165
+ if not peer_url:
166
+ continue
167
+
168
+ # Get latest block from peer
169
+ response = requests.get(f"{peer_url}/api/blockchain/latest", timeout=5)
170
+ if response.status_code == 200:
171
+ latest_block = response.json()
172
+
173
+ if self.on_new_block_callback:
174
+ self.on_new_block_callback(latest_block)
175
+
176
+ except Exception as e:
177
+ # Silent fail for peer sync
178
+ pass
179
+
180
+ def _validate_with_primary(self):
181
+ """Validate local blockchain state with primary node"""
182
+ try:
183
+ print("🔍 Validating with primary node...")
184
+
185
+ response = requests.get(f"{self.primary_node}/api/blockchain/latest", timeout=10)
186
+ if response.status_code == 200:
187
+ primary_latest = response.json()
188
+ primary_hash = primary_latest.get('hash')
189
+ primary_height = primary_latest.get('index')
190
+
191
+ print(f"✅ Primary validation: Block #{primary_height}, Hash: {primary_hash[:16]}...")
192
+ self.last_primary_check = time.time()
193
+
194
+ return primary_latest
195
+
196
+ except Exception as e:
197
+ print(f"❌ Primary validation failed: {e}")
198
+ return None
199
+
200
+ def _process_messages(self):
201
+ """Process incoming P2P messages"""
202
+ while not self.message_queue.empty():
203
+ try:
204
+ message = self.message_queue.get_nowait()
205
+ msg_type = message.get('type')
206
+
207
+ if msg_type == 'new_block':
208
+ if self.on_new_block_callback:
209
+ self.on_new_block_callback(message.get('data'))
210
+ elif msg_type == 'new_transaction':
211
+ if self.on_new_transaction_callback:
212
+ self.on_new_transaction_callback(message.get('data'))
213
+
214
+ except Exception as e:
215
+ print(f"❌ Message processing error: {e}")
216
+
217
+ def broadcast_block(self, block: Dict):
218
+ """Broadcast new block to peers"""
219
+ for peer in self.peers:
220
+ try:
221
+ peer_url = peer.get('url')
222
+ if peer_url:
223
+ requests.post(
224
+ f"{peer_url}/api/blocks/new",
225
+ json=block,
226
+ timeout=3
227
+ )
228
+ except:
229
+ pass # Silent fail for broadcasts
230
+
231
+ def broadcast_transaction(self, transaction: Dict):
232
+ """Broadcast new transaction to peers"""
233
+ for peer in self.peers:
234
+ try:
235
+ peer_url = peer.get('url')
236
+ if peer_url:
237
+ requests.post(
238
+ f"{peer_url}/api/transactions/new",
239
+ json=transaction,
240
+ timeout=3
241
+ )
242
+ except:
243
+ pass # Silent fail for broadcasts
244
+
245
+ def get_peers(self) -> List[Dict]:
246
+ """Get current peer list"""
247
+ return self.peers
248
+
249
+ def set_callbacks(self, on_new_block=None, on_new_transaction=None, on_peer_update=None):
250
+ """Set event callbacks"""
251
+ self.on_new_block_callback = on_new_block
252
+ self.on_new_transaction_callback = on_new_transaction
253
+ self.on_peer_update_callback = on_peer_update
254
+
255
+
256
+ class HybridBlockchainClient:
257
+ """
258
+ Hybrid blockchain client that combines primary node trust with P2P scalability.
259
+ - Initial sync from primary node
260
+ - P2P updates from peers
261
+ - Periodic validation against primary node
262
+ """
263
+
264
+ def __init__(self, primary_node_url: str, blockchain_manager, mempool_manager):
265
+ self.primary_node = primary_node_url
266
+ self.blockchain = blockchain_manager
267
+ self.mempool = mempool_manager
268
+
269
+ # Initialize P2P client
270
+ self.p2p = P2PClient(primary_node_url)
271
+ self.p2p.set_callbacks(
272
+ on_new_block=self._handle_new_block,
273
+ on_new_transaction=self._handle_new_transaction,
274
+ on_peer_update=self._handle_peer_update
275
+ )
276
+
277
+ def start(self):
278
+ """Start hybrid client"""
279
+ print("🚀 Starting Hybrid Blockchain Client...")
280
+ self.p2p.start()
281
+
282
+ def stop(self):
283
+ """Stop hybrid client"""
284
+ self.p2p.stop()
285
+
286
+ def _handle_new_block(self, block: Dict):
287
+ """Handle new block from P2P network"""
288
+ try:
289
+ # Validate block before adding
290
+ if self._validate_block_with_primary(block):
291
+ print(f"📦 New block from P2P: #{block.get('index')}")
292
+ # Add to local blockchain
293
+ # self.blockchain.add_block(block)
294
+ except Exception as e:
295
+ print(f"❌ Block handling error: {e}")
296
+
297
+ def _handle_new_transaction(self, transaction: Dict):
298
+ """Handle new transaction from P2P network"""
299
+ try:
300
+ # Validate transaction with primary node
301
+ if self._validate_transaction_with_primary(transaction):
302
+ print(f"💳 New transaction from P2P: {transaction.get('hash', 'unknown')[:16]}...")
303
+ # Add to local mempool
304
+ # self.mempool.add_transaction(transaction)
305
+ except Exception as e:
306
+ print(f"❌ Transaction handling error: {e}")
307
+
308
+ def _handle_peer_update(self, peers: List[Dict]):
309
+ """Handle peer list update"""
310
+ print(f"👥 Peer list updated: {len(peers)} peers available")
311
+
312
+ def _validate_block_with_primary(self, block: Dict) -> bool:
313
+ """Validate block against primary node"""
314
+ try:
315
+ response = requests.post(
316
+ f"{self.primary_node}/api/blocks/validate",
317
+ json=block,
318
+ timeout=5
319
+ )
320
+ return response.status_code == 200 and response.json().get('valid', False)
321
+ except:
322
+ return False
323
+
324
+ def _validate_transaction_with_primary(self, transaction: Dict) -> bool:
325
+ """Validate transaction against primary node"""
326
+ try:
327
+ response = requests.post(
328
+ f"{self.primary_node}/api/transactions/validate",
329
+ json=transaction,
330
+ timeout=5
331
+ )
332
+ return response.status_code == 200 and response.json().get('valid', False)
333
+ except:
334
+ return False
335
+
336
+ def broadcast_block(self, block: Dict):
337
+ """Broadcast block to P2P network"""
338
+ self.p2p.broadcast_block(block)
339
+
340
+ def broadcast_transaction(self, transaction: Dict):
341
+ """Broadcast transaction to P2P network"""
342
+ self.p2p.broadcast_transaction(transaction)
343
+
344
+ def get_peers(self) -> List[Dict]:
345
+ """Get current peer list"""
346
+ return self.p2p.get_peers()
@@ -1,3 +1,11 @@
1
+ import sys
2
+ def safe_print(*args, **kwargs):
3
+ encoding = sys.stdout.encoding or 'utf-8'
4
+ try:
5
+ print(*args, **kwargs)
6
+ except UnicodeEncodeError:
7
+ print(*(str(a).encode(encoding, errors='replace').decode(encoding) for a in args), **kwargs)
8
+ safe_print("Warning: cryptography library not available. Using fallback methods.")
1
9
  import time
2
10
  import hashlib
3
11
  import secrets
@@ -12,7 +20,7 @@ try:
12
20
  from cryptography.exceptions import InvalidSignature
13
21
  CRYPTOGRAPHY_AVAILABLE = True
14
22
  except ImportError:
15
- print("Warning: cryptography library not available. Using fallback methods.")
23
+ safe_print("Warning: cryptography library not available. Using fallback methods.")
16
24
  CRYPTOGRAPHY_AVAILABLE = False
17
25
 
18
26
  from .bill_registry import BillRegistry
@@ -182,7 +190,7 @@ class DigitalBill:
182
190
 
183
191
  return self.signature
184
192
  except Exception as e:
185
- print(f"Cryptographic signing failed, using fallback: {e}")
193
+ safe_print(f"Cryptographic signing failed, using fallback: {e}")
186
194
  return self._sign_fallback(private_key)
187
195
 
188
196
  def _sign_fallback(self, private_key):
lunalib/gtx/genesis.py CHANGED
@@ -48,7 +48,7 @@ class GTXGenesis:
48
48
  return {'valid': False, 'error': 'Bill not found in registry'}
49
49
 
50
50
  # DEBUG: Print what we received
51
- print(f"DEBUG: Full bill record: {bill_record}")
51
+ safe_print(f"DEBUG: Full bill record: {bill_record}")
52
52
 
53
53
  # Extract the actual bill_data from the metadata field
54
54
  bill_data = bill_record.get('metadata', {})
@@ -56,7 +56,7 @@ class GTXGenesis:
56
56
  return {'valid': False, 'error': 'No bill data found in metadata'}
57
57
 
58
58
  # DEBUG: Print the extracted bill_data
59
- print(f"DEBUG: Extracted bill_data: {bill_data}")
59
+ safe_print(f"DEBUG: Extracted bill_data: {bill_data}")
60
60
 
61
61
  # Extract signature components from bill_data (not from bill_record)
62
62
  public_key = bill_data.get('public_key')
@@ -68,10 +68,10 @@ class GTXGenesis:
68
68
  timestamp = bill_data.get('timestamp', 0)
69
69
  bill_type = bill_data.get('type', 'GTX_Genesis')
70
70
 
71
- print(f"🔍 GTXGenesis.verify_bill() for {front_serial}:")
72
- print(f" Signature: {signature}")
73
- print(f" Public Key: {public_key}")
74
- print(f" Metadata Hash: {metadata_hash}")
71
+ safe_print(f"🔍 GTXGenesis.verify_bill() for {front_serial}:")
72
+ safe_print(f" Signature: {signature}")
73
+ safe_print(f" Public Key: {public_key}")
74
+ safe_print(f" Metadata Hash: {metadata_hash}")
75
75
 
76
76
  # Use the same verification logic as the endpoint
77
77
  verification_method = "unknown"
@@ -81,7 +81,7 @@ class GTXGenesis:
81
81
  if metadata_hash and signature == metadata_hash:
82
82
  signature_valid = True
83
83
  verification_method = "signature_is_metadata_hash"
84
- print(f"✅ Verified: signature matches metadata_hash")
84
+ safe_print(f"✅ Verified: signature matches metadata_hash")
85
85
 
86
86
  # METHOD 2: Check hash of public_key + metadata_hash
87
87
  elif signature_valid is None and metadata_hash and public_key and signature:
@@ -90,7 +90,7 @@ class GTXGenesis:
90
90
  if signature == expected_signature:
91
91
  signature_valid = True
92
92
  verification_method = "metadata_hash_signature"
93
- print(f"Verified: hash(public_key + metadata_hash)")
93
+ safe_print(f"Verified: hash(public_key + metadata_hash)")
94
94
 
95
95
  # METHOD 3: Check DigitalBill calculated hash
96
96
  elif signature_valid is None:
@@ -122,32 +122,31 @@ class GTXGenesis:
122
122
  if signature == calculated_hash:
123
123
  signature_valid = True
124
124
  verification_method = "digital_bill_calculate_hash"
125
- print(f"Verified: DigitalBill.calculate_hash()")
125
+ safe_print(f"Verified: DigitalBill.calculate_hash()")
126
126
  print(f" Calculated hash: {calculated_hash}")
127
127
 
128
128
  # Approach 2: Use the verify() method (checks all signature types)
129
129
  elif digital_bill.verify():
130
130
  signature_valid = True
131
131
  verification_method = "digital_bill_verify_method"
132
- print(f"Verified: DigitalBill.verify()")
132
+ safe_print(f"Verified: DigitalBill.verify()")
133
133
 
134
134
  # Approach 3: Check if signature matches metadata_hash generation
135
135
  elif signature == digital_bill._generate_metadata_hash():
136
136
  signature_valid = True
137
137
  verification_method = "digital_bill_metadata_hash"
138
- print(f"Verified: matches generated metadata_hash")
138
+ safe_print(f"Verified: matches generated metadata_hash")
139
139
 
140
140
  else:
141
- print(f"DigitalBill verification failed:")
142
- print(f" Calculated hash: {calculated_hash}")
143
- print(f" Signature: {signature}")
144
- print(f" Metadata hash: {metadata_hash}")
145
- print(f" Public key: {public_key}")
146
-
141
+ safe_print(f"DigitalBill verification failed:")
142
+ safe_print(f" Calculated hash: {calculated_hash}")
143
+ safe_print(f" Signature: {signature}")
144
+ safe_print(f" Metadata hash: {metadata_hash}")
145
+ safe_print(f" Public key: {public_key}")
147
146
  except Exception as e:
148
- print(f"DigitalBill verification error: {e}")
147
+ safe_print(f"DigitalBill verification error: {e}")
149
148
  import traceback
150
- print(f"Traceback: {traceback.format_exc()}")
149
+ safe_print(f"Traceback: {traceback.format_exc()}")
151
150
 
152
151
  # METHOD 4: Check simple concatenation hash
153
152
  elif signature_valid is None and signature:
@@ -156,7 +155,7 @@ class GTXGenesis:
156
155
  if signature == expected_simple_hash:
157
156
  signature_valid = True
158
157
  verification_method = "simple_hash"
159
- print(f"✅ Verified: hash(serial+denom+issued+timestamp)")
158
+ safe_print(f"✅ Verified: hash(serial+denom+issued+timestamp)")
160
159
 
161
160
  # METHOD 5: Check bill JSON hash
162
161
  elif signature_valid is None:
@@ -173,19 +172,19 @@ class GTXGenesis:
173
172
  if signature == bill_json_hash:
174
173
  signature_valid = True
175
174
  verification_method = "bill_json_hash"
176
- print(f"Verified: hash(bill_data_json)")
175
+ safe_print(f"Verified: hash(bill_data_json)")
177
176
 
178
177
  # Final fallback: accept any non-empty signature temporarily
179
178
  if signature_valid is None and signature and len(signature) > 10:
180
179
  signature_valid = True
181
180
  verification_method = "fallback_accept"
182
- print(f"Using fallback acceptance for signature")
181
+ safe_print(f"Using fallback acceptance for signature")
183
182
 
184
183
  # If all methods failed
185
184
  if signature_valid is None:
186
185
  signature_valid = False
187
186
  verification_method = "all_failed"
188
- print(f"All verification methods failed")
187
+ safe_print(f"All verification methods failed")
189
188
 
190
189
  # Return result in same format as endpoint
191
190
  if signature_valid:
@@ -337,7 +336,7 @@ class GTXGenesis:
337
336
  return computed_hash == bill_info['hash']
338
337
 
339
338
  except Exception as e:
340
- print(f"Bill verification error: {e}")
339
+ safe_print(f"Bill verification error: {e}")
341
340
  return False
342
341
 
343
342
  def _get_denomination_breakdown(self, bills: List[Dict]) -> Dict[int, int]:
@@ -0,0 +1,5 @@
1
+ from .miner import GenesisMiner, Miner
2
+ from .difficulty import DifficultySystem
3
+ from .cuda_manager import CUDAManager
4
+
5
+ __all__ = ['GenesisMiner', 'Miner', 'DifficultySystem', 'CUDAManager']
@@ -47,7 +47,7 @@ class CUDAManager:
47
47
  self.cuda_available = False
48
48
 
49
49
  def cuda_mine_batch(self, mining_data: Dict, difficulty: int, batch_size: int = 100000) -> Optional[Dict]:
50
- """Mine using CUDA acceleration"""
50
+ """Mine using CUDA acceleration with CPU-side hash computation"""
51
51
  if not self.cuda_available:
52
52
  return None
53
53
 
@@ -56,24 +56,27 @@ class CUDAManager:
56
56
  nonce_start = 0
57
57
  start_time = time.time()
58
58
 
59
+ # Pre-compute the base string without nonce for efficiency
60
+ base_data = {k: v for k, v in mining_data.items() if k != 'nonce'}
61
+
59
62
  while True:
60
- # Prepare batch data for GPU
61
- nonces = cp.arange(nonce_start, nonce_start + batch_size, dtype=cp.uint64)
62
- mining_strings = self._prepare_mining_batch(mining_data, nonces)
63
+ # Generate nonces on GPU for parallel processing
64
+ nonces_gpu = cp.arange(nonce_start, nonce_start + batch_size, dtype=cp.int64)
65
+ nonces_cpu = cp.asnumpy(nonces_gpu) # Transfer to CPU for hashing
63
66
 
64
- # Compute hashes on GPU
65
- hashes = self._compute_hashes_gpu(mining_strings)
67
+ # Compute hashes in parallel on CPU (GPU hash acceleration requires custom CUDA kernels)
68
+ hashes = self._compute_hashes_parallel(base_data, nonces_cpu)
66
69
 
67
70
  # Check for successful hash
68
71
  for i, hash_hex in enumerate(hashes):
69
72
  if hash_hex.startswith(target):
70
73
  mining_time = time.time() - start_time
71
- successful_nonce = nonce_start + i
74
+ successful_nonce = int(nonces_cpu[i])
72
75
 
73
76
  return {
74
77
  "success": True,
75
78
  "hash": hash_hex,
76
- "nonce": int(successful_nonce),
79
+ "nonce": successful_nonce,
77
80
  "mining_time": mining_time,
78
81
  "method": "cuda"
79
82
  }
@@ -85,36 +88,28 @@ class CUDAManager:
85
88
  current_time = time.time()
86
89
  hashrate = nonce_start / (current_time - start_time)
87
90
  print(f"⏳ CUDA: {nonce_start:,} attempts | {hashrate:,.0f} H/s")
91
+
92
+ # Timeout check
93
+ if time.time() - start_time > 300: # 5 minutes timeout
94
+ break
88
95
 
89
96
  except Exception as e:
90
97
  print(f"CUDA mining error: {e}")
91
98
 
92
99
  return None
93
100
 
94
- def _prepare_mining_batch(self, mining_data: Dict, nonces) -> Any:
95
- """Prepare batch mining data for GPU"""
96
- mining_strings = []
101
+ def _compute_hashes_parallel(self, base_data: Dict, nonces: list) -> list:
102
+ """Compute SHA256 hashes in parallel on CPU (string operations not supported on GPU)"""
103
+ hashes = []
97
104
 
98
105
  for nonce in nonces:
106
+ # Create mining data with current nonce
107
+ mining_data = base_data.copy()
99
108
  mining_data["nonce"] = int(nonce)
100
- data_string = json.dumps(mining_data, sort_keys=True)
101
- mining_strings.append(data_string.encode())
102
109
 
103
- return cp.array(mining_strings)
104
-
105
- def _compute_hashes_gpu(self, mining_strings) -> list:
106
- """Compute SHA256 hashes on GPU"""
107
- # Convert to CuPy array if needed
108
- if not isinstance(mining_strings, cp.ndarray):
109
- mining_strings = cp.array(mining_strings)
110
-
111
- # This is a simplified implementation
112
- # In a real implementation, you'd use proper CUDA kernels
113
- hashes = []
114
- for data in mining_strings:
115
- # For now, fall back to CPU hashing
116
- # A real implementation would use CUDA-accelerated hashing
117
- hash_obj = hashlib.sha256(data.tobytes())
110
+ # Compute hash
111
+ data_string = json.dumps(mining_data, sort_keys=True)
112
+ hash_obj = hashlib.sha256(data_string.encode())
118
113
  hashes.append(hash_obj.hexdigest())
119
114
 
120
115
  return hashes
@@ -65,6 +65,44 @@ class DifficultySystem:
65
65
 
66
66
  return base_reward * (1 + time_bonus)
67
67
 
68
+ def calculate_block_reward(self, difficulty: int) -> float:
69
+ """Calculate block reward based on difficulty using exponential system
70
+
71
+ Reward = 10^(difficulty - 1)
72
+ difficulty 1 = 1 LKC (10^0)
73
+ difficulty 2 = 10 LKC (10^1)
74
+ difficulty 3 = 100 LKC (10^2)
75
+ difficulty 9 = 100,000,000 LKC (10^8)
76
+ """
77
+ if difficulty < 1:
78
+ difficulty = 1
79
+ elif difficulty > 9:
80
+ difficulty = 9
81
+
82
+ return 10 ** (difficulty - 1)
83
+
84
+ def validate_block_hash(self, block_hash: str, difficulty: int) -> bool:
85
+ """Validate that block hash meets difficulty requirement"""
86
+ if not block_hash or difficulty < 1:
87
+ return False
88
+
89
+ target_prefix = "0" * difficulty
90
+ return block_hash.startswith(target_prefix)
91
+
92
+ def validate_block_structure(self, block: dict) -> tuple:
93
+ """Validate block has all required fields
94
+
95
+ Returns: (is_valid, error_message)
96
+ """
97
+ required_fields = ['index', 'previous_hash', 'timestamp', 'transactions',
98
+ 'miner', 'difficulty', 'nonce', 'hash', 'reward']
99
+
100
+ for field in required_fields:
101
+ if field not in block:
102
+ return False, f"Missing required field: {field}"
103
+
104
+ return True, ""
105
+
68
106
  def get_difficulty_name(self, difficulty_level):
69
107
  """Get human-readable name for difficulty level"""
70
108
  names = {