lunalib 1.6.0__py3-none-any.whl → 1.6.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lunalib/core/__init__.py +14 -0
- lunalib/core/daemon.py +391 -0
- lunalib/core/p2p.py +346 -0
- lunalib/mining/__init__.py +5 -1
- lunalib/mining/cuda_manager.py +23 -28
- lunalib/mining/difficulty.py +38 -0
- lunalib/mining/miner.py +436 -45
- {lunalib-1.6.0.dist-info → lunalib-1.6.6.dist-info}/METADATA +1 -1
- {lunalib-1.6.0.dist-info → lunalib-1.6.6.dist-info}/RECORD +11 -9
- {lunalib-1.6.0.dist-info → lunalib-1.6.6.dist-info}/WHEEL +0 -0
- {lunalib-1.6.0.dist-info → lunalib-1.6.6.dist-info}/top_level.txt +0 -0
lunalib/core/p2p.py
ADDED
|
@@ -0,0 +1,346 @@
|
|
|
1
|
+
# lunalib/core/p2p.py
|
|
2
|
+
import time
|
|
3
|
+
import requests
|
|
4
|
+
import threading
|
|
5
|
+
import json
|
|
6
|
+
from typing import List, Dict, Optional, Callable
|
|
7
|
+
from queue import Queue
|
|
8
|
+
import hashlib
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class P2PClient:
|
|
12
|
+
"""
|
|
13
|
+
P2P client for blockchain synchronization with hybrid primary/peer architecture.
|
|
14
|
+
Downloads initial state from primary node, then syncs via P2P with periodic validation.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
def __init__(self, primary_node_url: str, node_id: Optional[str] = None):
|
|
18
|
+
self.primary_node = primary_node_url
|
|
19
|
+
self.node_id = node_id or self._generate_node_id()
|
|
20
|
+
self.peers = []
|
|
21
|
+
self.last_primary_check = 0
|
|
22
|
+
self.last_peer_update = 0
|
|
23
|
+
self.peer_update_interval = 300 # 5 minutes
|
|
24
|
+
self.primary_check_interval = 3600 # 1 hour
|
|
25
|
+
|
|
26
|
+
# Callbacks for events
|
|
27
|
+
self.on_new_block_callback = None
|
|
28
|
+
self.on_new_transaction_callback = None
|
|
29
|
+
self.on_peer_update_callback = None
|
|
30
|
+
|
|
31
|
+
# P2P message queue
|
|
32
|
+
self.message_queue = Queue()
|
|
33
|
+
self.is_running = False
|
|
34
|
+
self.sync_thread = None
|
|
35
|
+
|
|
36
|
+
def _generate_node_id(self) -> str:
|
|
37
|
+
"""Generate unique node ID"""
|
|
38
|
+
import socket
|
|
39
|
+
hostname = socket.gethostname()
|
|
40
|
+
timestamp = str(time.time())
|
|
41
|
+
return hashlib.sha256(f"{hostname}{timestamp}".encode()).hexdigest()[:16]
|
|
42
|
+
|
|
43
|
+
def start(self):
|
|
44
|
+
"""Start P2P client"""
|
|
45
|
+
if self.is_running:
|
|
46
|
+
return
|
|
47
|
+
|
|
48
|
+
self.is_running = True
|
|
49
|
+
|
|
50
|
+
# Initial setup
|
|
51
|
+
self._initial_sync()
|
|
52
|
+
self._register_with_primary()
|
|
53
|
+
self._update_peer_list()
|
|
54
|
+
|
|
55
|
+
# Start background sync thread
|
|
56
|
+
self.sync_thread = threading.Thread(target=self._sync_loop, daemon=True)
|
|
57
|
+
self.sync_thread.start()
|
|
58
|
+
|
|
59
|
+
print(f"✅ P2P Client started (Node ID: {self.node_id})")
|
|
60
|
+
|
|
61
|
+
def stop(self):
|
|
62
|
+
"""Stop P2P client"""
|
|
63
|
+
self.is_running = False
|
|
64
|
+
if self.sync_thread:
|
|
65
|
+
self.sync_thread.join(timeout=5)
|
|
66
|
+
print("🛑 P2P Client stopped")
|
|
67
|
+
|
|
68
|
+
def _initial_sync(self):
|
|
69
|
+
"""Download initial blockchain and mempool from primary node"""
|
|
70
|
+
try:
|
|
71
|
+
print(f"📥 Initial sync from primary node: {self.primary_node}")
|
|
72
|
+
|
|
73
|
+
# Download blockchain
|
|
74
|
+
response = requests.get(f"{self.primary_node}/api/blockchain/full", timeout=30)
|
|
75
|
+
if response.status_code == 200:
|
|
76
|
+
blockchain_data = response.json()
|
|
77
|
+
print(f"✅ Downloaded blockchain: {len(blockchain_data.get('blocks', []))} blocks")
|
|
78
|
+
return blockchain_data
|
|
79
|
+
|
|
80
|
+
except Exception as e:
|
|
81
|
+
print(f"❌ Initial sync failed: {e}")
|
|
82
|
+
return None
|
|
83
|
+
|
|
84
|
+
def _register_with_primary(self):
|
|
85
|
+
"""Register this node with the primary daemon"""
|
|
86
|
+
try:
|
|
87
|
+
peer_info = {
|
|
88
|
+
'node_id': self.node_id,
|
|
89
|
+
'timestamp': time.time(),
|
|
90
|
+
'capabilities': ['sync', 'relay']
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
response = requests.post(
|
|
94
|
+
f"{self.primary_node}/api/peers/register",
|
|
95
|
+
json=peer_info,
|
|
96
|
+
timeout=10
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
if response.status_code == 200:
|
|
100
|
+
print(f"✅ Registered with primary node as peer: {self.node_id}")
|
|
101
|
+
return True
|
|
102
|
+
else:
|
|
103
|
+
print(f"⚠️ Registration failed: {response.status_code}")
|
|
104
|
+
return False
|
|
105
|
+
|
|
106
|
+
except Exception as e:
|
|
107
|
+
print(f"❌ Registration error: {e}")
|
|
108
|
+
return False
|
|
109
|
+
|
|
110
|
+
def _update_peer_list(self):
|
|
111
|
+
"""Get updated peer list from primary daemon"""
|
|
112
|
+
try:
|
|
113
|
+
response = requests.get(f"{self.primary_node}/api/peers/list", timeout=10)
|
|
114
|
+
|
|
115
|
+
if response.status_code == 200:
|
|
116
|
+
peer_data = response.json()
|
|
117
|
+
new_peers = peer_data.get('peers', [])
|
|
118
|
+
|
|
119
|
+
# Filter out self
|
|
120
|
+
self.peers = [p for p in new_peers if p.get('node_id') != self.node_id]
|
|
121
|
+
|
|
122
|
+
print(f"📋 Updated peer list: {len(self.peers)} peers")
|
|
123
|
+
self.last_peer_update = time.time()
|
|
124
|
+
|
|
125
|
+
if self.on_peer_update_callback:
|
|
126
|
+
self.on_peer_update_callback(self.peers)
|
|
127
|
+
|
|
128
|
+
return self.peers
|
|
129
|
+
|
|
130
|
+
except Exception as e:
|
|
131
|
+
print(f"❌ Peer list update failed: {e}")
|
|
132
|
+
return []
|
|
133
|
+
|
|
134
|
+
def _sync_loop(self):
|
|
135
|
+
"""Background sync loop for P2P updates"""
|
|
136
|
+
while self.is_running:
|
|
137
|
+
try:
|
|
138
|
+
current_time = time.time()
|
|
139
|
+
|
|
140
|
+
# Periodic peer list update
|
|
141
|
+
if current_time - self.last_peer_update > self.peer_update_interval:
|
|
142
|
+
self._update_peer_list()
|
|
143
|
+
|
|
144
|
+
# Periodic primary node validation
|
|
145
|
+
if current_time - self.last_primary_check > self.primary_check_interval:
|
|
146
|
+
self._validate_with_primary()
|
|
147
|
+
|
|
148
|
+
# Sync with peers
|
|
149
|
+
self._sync_from_peers()
|
|
150
|
+
|
|
151
|
+
# Process message queue
|
|
152
|
+
self._process_messages()
|
|
153
|
+
|
|
154
|
+
time.sleep(5) # Check every 5 seconds
|
|
155
|
+
|
|
156
|
+
except Exception as e:
|
|
157
|
+
print(f"❌ Sync loop error: {e}")
|
|
158
|
+
time.sleep(10)
|
|
159
|
+
|
|
160
|
+
def _sync_from_peers(self):
|
|
161
|
+
"""Sync new blocks and transactions from peers"""
|
|
162
|
+
for peer in self.peers[:5]: # Sync with first 5 peers
|
|
163
|
+
try:
|
|
164
|
+
peer_url = peer.get('url')
|
|
165
|
+
if not peer_url:
|
|
166
|
+
continue
|
|
167
|
+
|
|
168
|
+
# Get latest block from peer
|
|
169
|
+
response = requests.get(f"{peer_url}/api/blockchain/latest", timeout=5)
|
|
170
|
+
if response.status_code == 200:
|
|
171
|
+
latest_block = response.json()
|
|
172
|
+
|
|
173
|
+
if self.on_new_block_callback:
|
|
174
|
+
self.on_new_block_callback(latest_block)
|
|
175
|
+
|
|
176
|
+
except Exception as e:
|
|
177
|
+
# Silent fail for peer sync
|
|
178
|
+
pass
|
|
179
|
+
|
|
180
|
+
def _validate_with_primary(self):
|
|
181
|
+
"""Validate local blockchain state with primary node"""
|
|
182
|
+
try:
|
|
183
|
+
print("🔍 Validating with primary node...")
|
|
184
|
+
|
|
185
|
+
response = requests.get(f"{self.primary_node}/api/blockchain/latest", timeout=10)
|
|
186
|
+
if response.status_code == 200:
|
|
187
|
+
primary_latest = response.json()
|
|
188
|
+
primary_hash = primary_latest.get('hash')
|
|
189
|
+
primary_height = primary_latest.get('index')
|
|
190
|
+
|
|
191
|
+
print(f"✅ Primary validation: Block #{primary_height}, Hash: {primary_hash[:16]}...")
|
|
192
|
+
self.last_primary_check = time.time()
|
|
193
|
+
|
|
194
|
+
return primary_latest
|
|
195
|
+
|
|
196
|
+
except Exception as e:
|
|
197
|
+
print(f"❌ Primary validation failed: {e}")
|
|
198
|
+
return None
|
|
199
|
+
|
|
200
|
+
def _process_messages(self):
|
|
201
|
+
"""Process incoming P2P messages"""
|
|
202
|
+
while not self.message_queue.empty():
|
|
203
|
+
try:
|
|
204
|
+
message = self.message_queue.get_nowait()
|
|
205
|
+
msg_type = message.get('type')
|
|
206
|
+
|
|
207
|
+
if msg_type == 'new_block':
|
|
208
|
+
if self.on_new_block_callback:
|
|
209
|
+
self.on_new_block_callback(message.get('data'))
|
|
210
|
+
elif msg_type == 'new_transaction':
|
|
211
|
+
if self.on_new_transaction_callback:
|
|
212
|
+
self.on_new_transaction_callback(message.get('data'))
|
|
213
|
+
|
|
214
|
+
except Exception as e:
|
|
215
|
+
print(f"❌ Message processing error: {e}")
|
|
216
|
+
|
|
217
|
+
def broadcast_block(self, block: Dict):
|
|
218
|
+
"""Broadcast new block to peers"""
|
|
219
|
+
for peer in self.peers:
|
|
220
|
+
try:
|
|
221
|
+
peer_url = peer.get('url')
|
|
222
|
+
if peer_url:
|
|
223
|
+
requests.post(
|
|
224
|
+
f"{peer_url}/api/blocks/new",
|
|
225
|
+
json=block,
|
|
226
|
+
timeout=3
|
|
227
|
+
)
|
|
228
|
+
except:
|
|
229
|
+
pass # Silent fail for broadcasts
|
|
230
|
+
|
|
231
|
+
def broadcast_transaction(self, transaction: Dict):
|
|
232
|
+
"""Broadcast new transaction to peers"""
|
|
233
|
+
for peer in self.peers:
|
|
234
|
+
try:
|
|
235
|
+
peer_url = peer.get('url')
|
|
236
|
+
if peer_url:
|
|
237
|
+
requests.post(
|
|
238
|
+
f"{peer_url}/api/transactions/new",
|
|
239
|
+
json=transaction,
|
|
240
|
+
timeout=3
|
|
241
|
+
)
|
|
242
|
+
except:
|
|
243
|
+
pass # Silent fail for broadcasts
|
|
244
|
+
|
|
245
|
+
def get_peers(self) -> List[Dict]:
|
|
246
|
+
"""Get current peer list"""
|
|
247
|
+
return self.peers
|
|
248
|
+
|
|
249
|
+
def set_callbacks(self, on_new_block=None, on_new_transaction=None, on_peer_update=None):
|
|
250
|
+
"""Set event callbacks"""
|
|
251
|
+
self.on_new_block_callback = on_new_block
|
|
252
|
+
self.on_new_transaction_callback = on_new_transaction
|
|
253
|
+
self.on_peer_update_callback = on_peer_update
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
class HybridBlockchainClient:
|
|
257
|
+
"""
|
|
258
|
+
Hybrid blockchain client that combines primary node trust with P2P scalability.
|
|
259
|
+
- Initial sync from primary node
|
|
260
|
+
- P2P updates from peers
|
|
261
|
+
- Periodic validation against primary node
|
|
262
|
+
"""
|
|
263
|
+
|
|
264
|
+
def __init__(self, primary_node_url: str, blockchain_manager, mempool_manager):
|
|
265
|
+
self.primary_node = primary_node_url
|
|
266
|
+
self.blockchain = blockchain_manager
|
|
267
|
+
self.mempool = mempool_manager
|
|
268
|
+
|
|
269
|
+
# Initialize P2P client
|
|
270
|
+
self.p2p = P2PClient(primary_node_url)
|
|
271
|
+
self.p2p.set_callbacks(
|
|
272
|
+
on_new_block=self._handle_new_block,
|
|
273
|
+
on_new_transaction=self._handle_new_transaction,
|
|
274
|
+
on_peer_update=self._handle_peer_update
|
|
275
|
+
)
|
|
276
|
+
|
|
277
|
+
def start(self):
|
|
278
|
+
"""Start hybrid client"""
|
|
279
|
+
print("🚀 Starting Hybrid Blockchain Client...")
|
|
280
|
+
self.p2p.start()
|
|
281
|
+
|
|
282
|
+
def stop(self):
|
|
283
|
+
"""Stop hybrid client"""
|
|
284
|
+
self.p2p.stop()
|
|
285
|
+
|
|
286
|
+
def _handle_new_block(self, block: Dict):
|
|
287
|
+
"""Handle new block from P2P network"""
|
|
288
|
+
try:
|
|
289
|
+
# Validate block before adding
|
|
290
|
+
if self._validate_block_with_primary(block):
|
|
291
|
+
print(f"📦 New block from P2P: #{block.get('index')}")
|
|
292
|
+
# Add to local blockchain
|
|
293
|
+
# self.blockchain.add_block(block)
|
|
294
|
+
except Exception as e:
|
|
295
|
+
print(f"❌ Block handling error: {e}")
|
|
296
|
+
|
|
297
|
+
def _handle_new_transaction(self, transaction: Dict):
|
|
298
|
+
"""Handle new transaction from P2P network"""
|
|
299
|
+
try:
|
|
300
|
+
# Validate transaction with primary node
|
|
301
|
+
if self._validate_transaction_with_primary(transaction):
|
|
302
|
+
print(f"💳 New transaction from P2P: {transaction.get('hash', 'unknown')[:16]}...")
|
|
303
|
+
# Add to local mempool
|
|
304
|
+
# self.mempool.add_transaction(transaction)
|
|
305
|
+
except Exception as e:
|
|
306
|
+
print(f"❌ Transaction handling error: {e}")
|
|
307
|
+
|
|
308
|
+
def _handle_peer_update(self, peers: List[Dict]):
|
|
309
|
+
"""Handle peer list update"""
|
|
310
|
+
print(f"👥 Peer list updated: {len(peers)} peers available")
|
|
311
|
+
|
|
312
|
+
def _validate_block_with_primary(self, block: Dict) -> bool:
|
|
313
|
+
"""Validate block against primary node"""
|
|
314
|
+
try:
|
|
315
|
+
response = requests.post(
|
|
316
|
+
f"{self.primary_node}/api/blocks/validate",
|
|
317
|
+
json=block,
|
|
318
|
+
timeout=5
|
|
319
|
+
)
|
|
320
|
+
return response.status_code == 200 and response.json().get('valid', False)
|
|
321
|
+
except:
|
|
322
|
+
return False
|
|
323
|
+
|
|
324
|
+
def _validate_transaction_with_primary(self, transaction: Dict) -> bool:
|
|
325
|
+
"""Validate transaction against primary node"""
|
|
326
|
+
try:
|
|
327
|
+
response = requests.post(
|
|
328
|
+
f"{self.primary_node}/api/transactions/validate",
|
|
329
|
+
json=transaction,
|
|
330
|
+
timeout=5
|
|
331
|
+
)
|
|
332
|
+
return response.status_code == 200 and response.json().get('valid', False)
|
|
333
|
+
except:
|
|
334
|
+
return False
|
|
335
|
+
|
|
336
|
+
def broadcast_block(self, block: Dict):
|
|
337
|
+
"""Broadcast block to P2P network"""
|
|
338
|
+
self.p2p.broadcast_block(block)
|
|
339
|
+
|
|
340
|
+
def broadcast_transaction(self, transaction: Dict):
|
|
341
|
+
"""Broadcast transaction to P2P network"""
|
|
342
|
+
self.p2p.broadcast_transaction(transaction)
|
|
343
|
+
|
|
344
|
+
def get_peers(self) -> List[Dict]:
|
|
345
|
+
"""Get current peer list"""
|
|
346
|
+
return self.p2p.get_peers()
|
lunalib/mining/__init__.py
CHANGED
lunalib/mining/cuda_manager.py
CHANGED
|
@@ -47,7 +47,7 @@ class CUDAManager:
|
|
|
47
47
|
self.cuda_available = False
|
|
48
48
|
|
|
49
49
|
def cuda_mine_batch(self, mining_data: Dict, difficulty: int, batch_size: int = 100000) -> Optional[Dict]:
|
|
50
|
-
"""Mine using CUDA acceleration"""
|
|
50
|
+
"""Mine using CUDA acceleration with CPU-side hash computation"""
|
|
51
51
|
if not self.cuda_available:
|
|
52
52
|
return None
|
|
53
53
|
|
|
@@ -56,24 +56,27 @@ class CUDAManager:
|
|
|
56
56
|
nonce_start = 0
|
|
57
57
|
start_time = time.time()
|
|
58
58
|
|
|
59
|
+
# Pre-compute the base string without nonce for efficiency
|
|
60
|
+
base_data = {k: v for k, v in mining_data.items() if k != 'nonce'}
|
|
61
|
+
|
|
59
62
|
while True:
|
|
60
|
-
#
|
|
61
|
-
|
|
62
|
-
|
|
63
|
+
# Generate nonces on GPU for parallel processing
|
|
64
|
+
nonces_gpu = cp.arange(nonce_start, nonce_start + batch_size, dtype=cp.int64)
|
|
65
|
+
nonces_cpu = cp.asnumpy(nonces_gpu) # Transfer to CPU for hashing
|
|
63
66
|
|
|
64
|
-
# Compute hashes on GPU
|
|
65
|
-
hashes = self.
|
|
67
|
+
# Compute hashes in parallel on CPU (GPU hash acceleration requires custom CUDA kernels)
|
|
68
|
+
hashes = self._compute_hashes_parallel(base_data, nonces_cpu)
|
|
66
69
|
|
|
67
70
|
# Check for successful hash
|
|
68
71
|
for i, hash_hex in enumerate(hashes):
|
|
69
72
|
if hash_hex.startswith(target):
|
|
70
73
|
mining_time = time.time() - start_time
|
|
71
|
-
successful_nonce =
|
|
74
|
+
successful_nonce = int(nonces_cpu[i])
|
|
72
75
|
|
|
73
76
|
return {
|
|
74
77
|
"success": True,
|
|
75
78
|
"hash": hash_hex,
|
|
76
|
-
"nonce":
|
|
79
|
+
"nonce": successful_nonce,
|
|
77
80
|
"mining_time": mining_time,
|
|
78
81
|
"method": "cuda"
|
|
79
82
|
}
|
|
@@ -85,36 +88,28 @@ class CUDAManager:
|
|
|
85
88
|
current_time = time.time()
|
|
86
89
|
hashrate = nonce_start / (current_time - start_time)
|
|
87
90
|
print(f"⏳ CUDA: {nonce_start:,} attempts | {hashrate:,.0f} H/s")
|
|
91
|
+
|
|
92
|
+
# Timeout check
|
|
93
|
+
if time.time() - start_time > 300: # 5 minutes timeout
|
|
94
|
+
break
|
|
88
95
|
|
|
89
96
|
except Exception as e:
|
|
90
97
|
print(f"CUDA mining error: {e}")
|
|
91
98
|
|
|
92
99
|
return None
|
|
93
100
|
|
|
94
|
-
def
|
|
95
|
-
"""
|
|
96
|
-
|
|
101
|
+
def _compute_hashes_parallel(self, base_data: Dict, nonces: list) -> list:
|
|
102
|
+
"""Compute SHA256 hashes in parallel on CPU (string operations not supported on GPU)"""
|
|
103
|
+
hashes = []
|
|
97
104
|
|
|
98
105
|
for nonce in nonces:
|
|
106
|
+
# Create mining data with current nonce
|
|
107
|
+
mining_data = base_data.copy()
|
|
99
108
|
mining_data["nonce"] = int(nonce)
|
|
100
|
-
data_string = json.dumps(mining_data, sort_keys=True)
|
|
101
|
-
mining_strings.append(data_string.encode())
|
|
102
109
|
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
"""Compute SHA256 hashes on GPU"""
|
|
107
|
-
# Convert to CuPy array if needed
|
|
108
|
-
if not isinstance(mining_strings, cp.ndarray):
|
|
109
|
-
mining_strings = cp.array(mining_strings)
|
|
110
|
-
|
|
111
|
-
# This is a simplified implementation
|
|
112
|
-
# In a real implementation, you'd use proper CUDA kernels
|
|
113
|
-
hashes = []
|
|
114
|
-
for data in mining_strings:
|
|
115
|
-
# For now, fall back to CPU hashing
|
|
116
|
-
# A real implementation would use CUDA-accelerated hashing
|
|
117
|
-
hash_obj = hashlib.sha256(data.tobytes())
|
|
110
|
+
# Compute hash
|
|
111
|
+
data_string = json.dumps(mining_data, sort_keys=True)
|
|
112
|
+
hash_obj = hashlib.sha256(data_string.encode())
|
|
118
113
|
hashes.append(hash_obj.hexdigest())
|
|
119
114
|
|
|
120
115
|
return hashes
|
lunalib/mining/difficulty.py
CHANGED
|
@@ -65,6 +65,44 @@ class DifficultySystem:
|
|
|
65
65
|
|
|
66
66
|
return base_reward * (1 + time_bonus)
|
|
67
67
|
|
|
68
|
+
def calculate_block_reward(self, difficulty: int) -> float:
|
|
69
|
+
"""Calculate block reward based on difficulty using exponential system
|
|
70
|
+
|
|
71
|
+
Reward = 10^(difficulty - 1)
|
|
72
|
+
difficulty 1 = 1 LKC (10^0)
|
|
73
|
+
difficulty 2 = 10 LKC (10^1)
|
|
74
|
+
difficulty 3 = 100 LKC (10^2)
|
|
75
|
+
difficulty 9 = 100,000,000 LKC (10^8)
|
|
76
|
+
"""
|
|
77
|
+
if difficulty < 1:
|
|
78
|
+
difficulty = 1
|
|
79
|
+
elif difficulty > 9:
|
|
80
|
+
difficulty = 9
|
|
81
|
+
|
|
82
|
+
return 10 ** (difficulty - 1)
|
|
83
|
+
|
|
84
|
+
def validate_block_hash(self, block_hash: str, difficulty: int) -> bool:
|
|
85
|
+
"""Validate that block hash meets difficulty requirement"""
|
|
86
|
+
if not block_hash or difficulty < 1:
|
|
87
|
+
return False
|
|
88
|
+
|
|
89
|
+
target_prefix = "0" * difficulty
|
|
90
|
+
return block_hash.startswith(target_prefix)
|
|
91
|
+
|
|
92
|
+
def validate_block_structure(self, block: dict) -> tuple:
|
|
93
|
+
"""Validate block has all required fields
|
|
94
|
+
|
|
95
|
+
Returns: (is_valid, error_message)
|
|
96
|
+
"""
|
|
97
|
+
required_fields = ['index', 'previous_hash', 'timestamp', 'transactions',
|
|
98
|
+
'miner', 'difficulty', 'nonce', 'hash', 'reward']
|
|
99
|
+
|
|
100
|
+
for field in required_fields:
|
|
101
|
+
if field not in block:
|
|
102
|
+
return False, f"Missing required field: {field}"
|
|
103
|
+
|
|
104
|
+
return True, ""
|
|
105
|
+
|
|
68
106
|
def get_difficulty_name(self, difficulty_level):
|
|
69
107
|
"""Get human-readable name for difficulty level"""
|
|
70
108
|
names = {
|