lunalib 1.6.0__py3-none-any.whl → 1.6.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lunalib/core/p2p.py ADDED
@@ -0,0 +1,361 @@
1
+ # lunalib/core/p2p.py
2
+ import time
3
+ import requests
4
+ import threading
5
+ import json
6
+ from typing import List, Dict, Optional, Callable
7
+ from queue import Queue
8
+ import hashlib
9
+
10
+
11
+ class P2PClient:
12
+ """
13
+ P2P client for blockchain synchronization with hybrid primary/peer architecture.
14
+ Downloads initial state from primary node, then syncs via P2P with periodic validation.
15
+ """
16
+
17
+ def __init__(self, primary_node_url: str, node_id: Optional[str] = None, peer_url: Optional[str] = None):
18
+ self.primary_node = primary_node_url
19
+ self.node_id = node_id or self._generate_node_id()
20
+ self.peer_url = peer_url or self._generate_peer_url()
21
+ self.peers = []
22
+ self.last_primary_check = 0
23
+ self.last_peer_update = 0
24
+ self.peer_update_interval = 300 # 5 minutes
25
+ self.primary_check_interval = 3600 # 1 hour
26
+
27
+ # Callbacks for events
28
+ self.on_new_block_callback = None
29
+ self.on_new_transaction_callback = None
30
+ self.on_peer_update_callback = None
31
+
32
+ # P2P message queue
33
+ self.message_queue = Queue()
34
+ self.is_running = False
35
+ self.sync_thread = None
36
+
37
+ def _generate_node_id(self) -> str:
38
+ """Generate unique node ID"""
39
+ import socket
40
+ hostname = socket.gethostname()
41
+ timestamp = str(time.time())
42
+ return hashlib.sha256(f"{hostname}{timestamp}".encode()).hexdigest()[:16]
43
+
44
+ def _generate_peer_url(self) -> str:
45
+ """Generate peer URL (defaults to localhost, should be overridden for public nodes)"""
46
+ import socket
47
+ try:
48
+ # Try to get local IP
49
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
50
+ s.connect(("8.8.8.8", 80))
51
+ local_ip = s.getsockname()[0]
52
+ s.close()
53
+ return f"http://{local_ip}:8080"
54
+ except:
55
+ return "http://localhost:8080"
56
+
57
+ def start(self):
58
+ """Start P2P client"""
59
+ if self.is_running:
60
+ return
61
+
62
+ self.is_running = True
63
+
64
+ # Initial setup
65
+ self._initial_sync()
66
+ self._register_with_primary()
67
+ self._update_peer_list()
68
+
69
+ # Start background sync thread
70
+ self.sync_thread = threading.Thread(target=self._sync_loop, daemon=True)
71
+ self.sync_thread.start()
72
+
73
+ print(f"✅ P2P Client started (Node ID: {self.node_id})")
74
+
75
+ def stop(self):
76
+ """Stop P2P client"""
77
+ self.is_running = False
78
+ if self.sync_thread:
79
+ self.sync_thread.join(timeout=5)
80
+ print("🛑 P2P Client stopped")
81
+
82
+ def _initial_sync(self):
83
+ """Download initial blockchain and mempool from primary node"""
84
+ try:
85
+ print(f"📥 Initial sync from primary node: {self.primary_node}")
86
+
87
+ # Download blockchain
88
+ response = requests.get(f"{self.primary_node}/api/blockchain/full", timeout=30)
89
+ if response.status_code == 200:
90
+ blockchain_data = response.json()
91
+ print(f"✅ Downloaded blockchain: {len(blockchain_data.get('blocks', []))} blocks")
92
+ return blockchain_data
93
+
94
+ except Exception as e:
95
+ print(f"❌ Initial sync failed: {e}")
96
+ return None
97
+
98
+ def _register_with_primary(self):
99
+ """Register this node with the primary daemon"""
100
+ try:
101
+ peer_info = {
102
+ 'node_id': self.node_id,
103
+ 'peer_url': self.peer_url,
104
+ 'timestamp': time.time(),
105
+ 'capabilities': ['sync', 'relay']
106
+ }
107
+
108
+ response = requests.post(
109
+ f"{self.primary_node}/api/peers/register",
110
+ json=peer_info,
111
+ timeout=10
112
+ )
113
+
114
+ if response.status_code == 200:
115
+ print(f"✅ Registered with primary node as peer: {self.node_id} ({self.peer_url})")
116
+ return True
117
+ else:
118
+ print(f"⚠️ Registration failed: {response.status_code}")
119
+ return False
120
+
121
+ except Exception as e:
122
+ print(f"❌ Registration error: {e}")
123
+ return False
124
+
125
+ def _update_peer_list(self):
126
+ """Get updated peer list from primary daemon"""
127
+ try:
128
+ response = requests.get(f"{self.primary_node}/api/peers/list", timeout=10)
129
+
130
+ if response.status_code == 200:
131
+ peer_data = response.json()
132
+ new_peers = peer_data.get('peers', [])
133
+
134
+ # Filter out self
135
+ self.peers = [p for p in new_peers if p.get('node_id') != self.node_id]
136
+
137
+ print(f"📋 Updated peer list: {len(self.peers)} peers")
138
+ self.last_peer_update = time.time()
139
+
140
+ if self.on_peer_update_callback:
141
+ self.on_peer_update_callback(self.peers)
142
+
143
+ return self.peers
144
+
145
+ except Exception as e:
146
+ print(f"❌ Peer list update failed: {e}")
147
+ return []
148
+
149
+ def _sync_loop(self):
150
+ """Background sync loop for P2P updates"""
151
+ while self.is_running:
152
+ try:
153
+ current_time = time.time()
154
+
155
+ # Periodic peer list update
156
+ if current_time - self.last_peer_update > self.peer_update_interval:
157
+ self._update_peer_list()
158
+
159
+ # Periodic primary node validation
160
+ if current_time - self.last_primary_check > self.primary_check_interval:
161
+ self._validate_with_primary()
162
+
163
+ # Sync with peers
164
+ self._sync_from_peers()
165
+
166
+ # Process message queue
167
+ self._process_messages()
168
+
169
+ time.sleep(5) # Check every 5 seconds
170
+
171
+ except Exception as e:
172
+ print(f"❌ Sync loop error: {e}")
173
+ time.sleep(10)
174
+
175
+ def _sync_from_peers(self):
176
+ """Sync new blocks and transactions from peers"""
177
+ for peer in self.peers[:5]: # Sync with first 5 peers
178
+ try:
179
+ peer_url = peer.get('url')
180
+ if not peer_url:
181
+ continue
182
+
183
+ # Get latest block from peer
184
+ response = requests.get(f"{peer_url}/api/blockchain/latest", timeout=5)
185
+ if response.status_code == 200:
186
+ latest_block = response.json()
187
+
188
+ if self.on_new_block_callback:
189
+ self.on_new_block_callback(latest_block)
190
+
191
+ except Exception as e:
192
+ # Silent fail for peer sync
193
+ pass
194
+
195
+ def _validate_with_primary(self):
196
+ """Validate local blockchain state with primary node"""
197
+ try:
198
+ print("🔍 Validating with primary node...")
199
+
200
+ response = requests.get(f"{self.primary_node}/api/blockchain/latest", timeout=10)
201
+ if response.status_code == 200:
202
+ primary_latest = response.json()
203
+ primary_hash = primary_latest.get('hash')
204
+ primary_height = primary_latest.get('index')
205
+
206
+ print(f"✅ Primary validation: Block #{primary_height}, Hash: {primary_hash[:16]}...")
207
+ self.last_primary_check = time.time()
208
+
209
+ return primary_latest
210
+
211
+ except Exception as e:
212
+ print(f"❌ Primary validation failed: {e}")
213
+ return None
214
+
215
+ def _process_messages(self):
216
+ """Process incoming P2P messages"""
217
+ while not self.message_queue.empty():
218
+ try:
219
+ message = self.message_queue.get_nowait()
220
+ msg_type = message.get('type')
221
+
222
+ if msg_type == 'new_block':
223
+ if self.on_new_block_callback:
224
+ self.on_new_block_callback(message.get('data'))
225
+ elif msg_type == 'new_transaction':
226
+ if self.on_new_transaction_callback:
227
+ self.on_new_transaction_callback(message.get('data'))
228
+
229
+ except Exception as e:
230
+ print(f"❌ Message processing error: {e}")
231
+
232
+ def broadcast_block(self, block: Dict):
233
+ """Broadcast new block to peers"""
234
+ for peer in self.peers:
235
+ try:
236
+ peer_url = peer.get('url')
237
+ if peer_url:
238
+ requests.post(
239
+ f"{peer_url}/api/blocks/new",
240
+ json=block,
241
+ timeout=3
242
+ )
243
+ except:
244
+ pass # Silent fail for broadcasts
245
+
246
+ def broadcast_transaction(self, transaction: Dict):
247
+ """Broadcast new transaction to peers"""
248
+ for peer in self.peers:
249
+ try:
250
+ peer_url = peer.get('url')
251
+ if peer_url:
252
+ requests.post(
253
+ f"{peer_url}/api/transactions/new",
254
+ json=transaction,
255
+ timeout=3
256
+ )
257
+ except:
258
+ pass # Silent fail for broadcasts
259
+
260
+ def get_peers(self) -> List[Dict]:
261
+ """Get current peer list"""
262
+ return self.peers
263
+
264
+ def set_callbacks(self, on_new_block=None, on_new_transaction=None, on_peer_update=None):
265
+ """Set event callbacks"""
266
+ self.on_new_block_callback = on_new_block
267
+ self.on_new_transaction_callback = on_new_transaction
268
+ self.on_peer_update_callback = on_peer_update
269
+
270
+
271
+ class HybridBlockchainClient:
272
+ """
273
+ Hybrid blockchain client that combines primary node trust with P2P scalability.
274
+ - Initial sync from primary node
275
+ - P2P updates from peers
276
+ - Periodic validation against primary node
277
+ """
278
+
279
+ def __init__(self, primary_node_url: str, blockchain_manager, mempool_manager, peer_url: Optional[str] = None):
280
+ self.primary_node = primary_node_url
281
+ self.blockchain = blockchain_manager
282
+ self.mempool = mempool_manager
283
+
284
+ # Initialize P2P client
285
+ self.p2p = P2PClient(primary_node_url, peer_url=peer_url)
286
+ self.p2p.set_callbacks(
287
+ on_new_block=self._handle_new_block,
288
+ on_new_transaction=self._handle_new_transaction,
289
+ on_peer_update=self._handle_peer_update
290
+ )
291
+
292
+ def start(self):
293
+ """Start hybrid client"""
294
+ print("🚀 Starting Hybrid Blockchain Client...")
295
+ self.p2p.start()
296
+
297
+ def stop(self):
298
+ """Stop hybrid client"""
299
+ self.p2p.stop()
300
+
301
+ def _handle_new_block(self, block: Dict):
302
+ """Handle new block from P2P network"""
303
+ try:
304
+ # Validate block before adding
305
+ if self._validate_block_with_primary(block):
306
+ print(f"📦 New block from P2P: #{block.get('index')}")
307
+ # Add to local blockchain
308
+ # self.blockchain.add_block(block)
309
+ except Exception as e:
310
+ print(f"❌ Block handling error: {e}")
311
+
312
+ def _handle_new_transaction(self, transaction: Dict):
313
+ """Handle new transaction from P2P network"""
314
+ try:
315
+ # Validate transaction with primary node
316
+ if self._validate_transaction_with_primary(transaction):
317
+ print(f"💳 New transaction from P2P: {transaction.get('hash', 'unknown')[:16]}...")
318
+ # Add to local mempool
319
+ # self.mempool.add_transaction(transaction)
320
+ except Exception as e:
321
+ print(f"❌ Transaction handling error: {e}")
322
+
323
+ def _handle_peer_update(self, peers: List[Dict]):
324
+ """Handle peer list update"""
325
+ print(f"👥 Peer list updated: {len(peers)} peers available")
326
+
327
+ def _validate_block_with_primary(self, block: Dict) -> bool:
328
+ """Validate block against primary node"""
329
+ try:
330
+ response = requests.post(
331
+ f"{self.primary_node}/api/blocks/validate",
332
+ json=block,
333
+ timeout=5
334
+ )
335
+ return response.status_code == 200 and response.json().get('valid', False)
336
+ except:
337
+ return False
338
+
339
+ def _validate_transaction_with_primary(self, transaction: Dict) -> bool:
340
+ """Validate transaction against primary node"""
341
+ try:
342
+ response = requests.post(
343
+ f"{self.primary_node}/api/transactions/validate",
344
+ json=transaction,
345
+ timeout=5
346
+ )
347
+ return response.status_code == 200 and response.json().get('valid', False)
348
+ except:
349
+ return False
350
+
351
+ def broadcast_block(self, block: Dict):
352
+ """Broadcast block to P2P network"""
353
+ self.p2p.broadcast_block(block)
354
+
355
+ def broadcast_transaction(self, transaction: Dict):
356
+ """Broadcast transaction to P2P network"""
357
+ self.p2p.broadcast_transaction(transaction)
358
+
359
+ def get_peers(self) -> List[Dict]:
360
+ """Get current peer list"""
361
+ return self.p2p.get_peers()
@@ -1 +1,5 @@
1
- from .miner import GenesisMiner
1
+ from .miner import GenesisMiner, Miner
2
+ from .difficulty import DifficultySystem
3
+ from .cuda_manager import CUDAManager
4
+
5
+ __all__ = ['GenesisMiner', 'Miner', 'DifficultySystem', 'CUDAManager']
@@ -47,7 +47,7 @@ class CUDAManager:
47
47
  self.cuda_available = False
48
48
 
49
49
  def cuda_mine_batch(self, mining_data: Dict, difficulty: int, batch_size: int = 100000) -> Optional[Dict]:
50
- """Mine using CUDA acceleration"""
50
+ """Mine using CUDA acceleration with CPU-side hash computation"""
51
51
  if not self.cuda_available:
52
52
  return None
53
53
 
@@ -56,24 +56,27 @@ class CUDAManager:
56
56
  nonce_start = 0
57
57
  start_time = time.time()
58
58
 
59
+ # Pre-compute the base string without nonce for efficiency
60
+ base_data = {k: v for k, v in mining_data.items() if k != 'nonce'}
61
+
59
62
  while True:
60
- # Prepare batch data for GPU
61
- nonces = cp.arange(nonce_start, nonce_start + batch_size, dtype=cp.uint64)
62
- mining_strings = self._prepare_mining_batch(mining_data, nonces)
63
+ # Generate nonces on GPU for parallel processing
64
+ nonces_gpu = cp.arange(nonce_start, nonce_start + batch_size, dtype=cp.int64)
65
+ nonces_cpu = cp.asnumpy(nonces_gpu) # Transfer to CPU for hashing
63
66
 
64
- # Compute hashes on GPU
65
- hashes = self._compute_hashes_gpu(mining_strings)
67
+ # Compute hashes in parallel on CPU (GPU hash acceleration requires custom CUDA kernels)
68
+ hashes = self._compute_hashes_parallel(base_data, nonces_cpu)
66
69
 
67
70
  # Check for successful hash
68
71
  for i, hash_hex in enumerate(hashes):
69
72
  if hash_hex.startswith(target):
70
73
  mining_time = time.time() - start_time
71
- successful_nonce = nonce_start + i
74
+ successful_nonce = int(nonces_cpu[i])
72
75
 
73
76
  return {
74
77
  "success": True,
75
78
  "hash": hash_hex,
76
- "nonce": int(successful_nonce),
79
+ "nonce": successful_nonce,
77
80
  "mining_time": mining_time,
78
81
  "method": "cuda"
79
82
  }
@@ -85,36 +88,28 @@ class CUDAManager:
85
88
  current_time = time.time()
86
89
  hashrate = nonce_start / (current_time - start_time)
87
90
  print(f"⏳ CUDA: {nonce_start:,} attempts | {hashrate:,.0f} H/s")
91
+
92
+ # Timeout check
93
+ if time.time() - start_time > 300: # 5 minutes timeout
94
+ break
88
95
 
89
96
  except Exception as e:
90
97
  print(f"CUDA mining error: {e}")
91
98
 
92
99
  return None
93
100
 
94
- def _prepare_mining_batch(self, mining_data: Dict, nonces) -> Any:
95
- """Prepare batch mining data for GPU"""
96
- mining_strings = []
101
+ def _compute_hashes_parallel(self, base_data: Dict, nonces: list) -> list:
102
+ """Compute SHA256 hashes in parallel on CPU (string operations not supported on GPU)"""
103
+ hashes = []
97
104
 
98
105
  for nonce in nonces:
106
+ # Create mining data with current nonce
107
+ mining_data = base_data.copy()
99
108
  mining_data["nonce"] = int(nonce)
100
- data_string = json.dumps(mining_data, sort_keys=True)
101
- mining_strings.append(data_string.encode())
102
109
 
103
- return cp.array(mining_strings)
104
-
105
- def _compute_hashes_gpu(self, mining_strings) -> list:
106
- """Compute SHA256 hashes on GPU"""
107
- # Convert to CuPy array if needed
108
- if not isinstance(mining_strings, cp.ndarray):
109
- mining_strings = cp.array(mining_strings)
110
-
111
- # This is a simplified implementation
112
- # In a real implementation, you'd use proper CUDA kernels
113
- hashes = []
114
- for data in mining_strings:
115
- # For now, fall back to CPU hashing
116
- # A real implementation would use CUDA-accelerated hashing
117
- hash_obj = hashlib.sha256(data.tobytes())
110
+ # Compute hash
111
+ data_string = json.dumps(mining_data, sort_keys=True)
112
+ hash_obj = hashlib.sha256(data_string.encode())
118
113
  hashes.append(hash_obj.hexdigest())
119
114
 
120
115
  return hashes
@@ -65,6 +65,44 @@ class DifficultySystem:
65
65
 
66
66
  return base_reward * (1 + time_bonus)
67
67
 
68
+ def calculate_block_reward(self, difficulty: int) -> float:
69
+ """Calculate block reward based on difficulty using exponential system
70
+
71
+ Reward = 10^(difficulty - 1)
72
+ difficulty 1 = 1 LKC (10^0)
73
+ difficulty 2 = 10 LKC (10^1)
74
+ difficulty 3 = 100 LKC (10^2)
75
+ difficulty 9 = 100,000,000 LKC (10^8)
76
+ """
77
+ if difficulty < 1:
78
+ difficulty = 1
79
+ elif difficulty > 9:
80
+ difficulty = 9
81
+
82
+ return 10 ** (difficulty - 1)
83
+
84
+ def validate_block_hash(self, block_hash: str, difficulty: int) -> bool:
85
+ """Validate that block hash meets difficulty requirement"""
86
+ if not block_hash or difficulty < 1:
87
+ return False
88
+
89
+ target_prefix = "0" * difficulty
90
+ return block_hash.startswith(target_prefix)
91
+
92
+ def validate_block_structure(self, block: dict) -> tuple:
93
+ """Validate block has all required fields
94
+
95
+ Returns: (is_valid, error_message)
96
+ """
97
+ required_fields = ['index', 'previous_hash', 'timestamp', 'transactions',
98
+ 'miner', 'difficulty', 'nonce', 'hash', 'reward']
99
+
100
+ for field in required_fields:
101
+ if field not in block:
102
+ return False, f"Missing required field: {field}"
103
+
104
+ return True, ""
105
+
68
106
  def get_difficulty_name(self, difficulty_level):
69
107
  """Get human-readable name for difficulty level"""
70
108
  names = {