nexaroa 0.0.111__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. neuroshard/__init__.py +93 -0
  2. neuroshard/__main__.py +4 -0
  3. neuroshard/cli.py +466 -0
  4. neuroshard/core/__init__.py +92 -0
  5. neuroshard/core/consensus/verifier.py +252 -0
  6. neuroshard/core/crypto/__init__.py +20 -0
  7. neuroshard/core/crypto/ecdsa.py +392 -0
  8. neuroshard/core/economics/__init__.py +52 -0
  9. neuroshard/core/economics/constants.py +387 -0
  10. neuroshard/core/economics/ledger.py +2111 -0
  11. neuroshard/core/economics/market.py +975 -0
  12. neuroshard/core/economics/wallet.py +168 -0
  13. neuroshard/core/governance/__init__.py +74 -0
  14. neuroshard/core/governance/proposal.py +561 -0
  15. neuroshard/core/governance/registry.py +545 -0
  16. neuroshard/core/governance/versioning.py +332 -0
  17. neuroshard/core/governance/voting.py +453 -0
  18. neuroshard/core/model/__init__.py +30 -0
  19. neuroshard/core/model/dynamic.py +4186 -0
  20. neuroshard/core/model/llm.py +905 -0
  21. neuroshard/core/model/registry.py +164 -0
  22. neuroshard/core/model/scaler.py +387 -0
  23. neuroshard/core/model/tokenizer.py +568 -0
  24. neuroshard/core/network/__init__.py +56 -0
  25. neuroshard/core/network/connection_pool.py +72 -0
  26. neuroshard/core/network/dht.py +130 -0
  27. neuroshard/core/network/dht_plan.py +55 -0
  28. neuroshard/core/network/dht_proof_store.py +516 -0
  29. neuroshard/core/network/dht_protocol.py +261 -0
  30. neuroshard/core/network/dht_service.py +506 -0
  31. neuroshard/core/network/encrypted_channel.py +141 -0
  32. neuroshard/core/network/nat.py +201 -0
  33. neuroshard/core/network/nat_traversal.py +695 -0
  34. neuroshard/core/network/p2p.py +929 -0
  35. neuroshard/core/network/p2p_data.py +150 -0
  36. neuroshard/core/swarm/__init__.py +106 -0
  37. neuroshard/core/swarm/aggregation.py +729 -0
  38. neuroshard/core/swarm/buffers.py +643 -0
  39. neuroshard/core/swarm/checkpoint.py +709 -0
  40. neuroshard/core/swarm/compute.py +624 -0
  41. neuroshard/core/swarm/diloco.py +844 -0
  42. neuroshard/core/swarm/factory.py +1288 -0
  43. neuroshard/core/swarm/heartbeat.py +669 -0
  44. neuroshard/core/swarm/logger.py +487 -0
  45. neuroshard/core/swarm/router.py +658 -0
  46. neuroshard/core/swarm/service.py +640 -0
  47. neuroshard/core/training/__init__.py +29 -0
  48. neuroshard/core/training/checkpoint.py +600 -0
  49. neuroshard/core/training/distributed.py +1602 -0
  50. neuroshard/core/training/global_tracker.py +617 -0
  51. neuroshard/core/training/production.py +276 -0
  52. neuroshard/governance_cli.py +729 -0
  53. neuroshard/grpc_server.py +895 -0
  54. neuroshard/runner.py +3223 -0
  55. neuroshard/sdk/__init__.py +92 -0
  56. neuroshard/sdk/client.py +990 -0
  57. neuroshard/sdk/errors.py +101 -0
  58. neuroshard/sdk/types.py +282 -0
  59. neuroshard/tracker/__init__.py +0 -0
  60. neuroshard/tracker/server.py +864 -0
  61. neuroshard/ui/__init__.py +0 -0
  62. neuroshard/ui/app.py +102 -0
  63. neuroshard/ui/templates/index.html +1052 -0
  64. neuroshard/utils/__init__.py +0 -0
  65. neuroshard/utils/autostart.py +81 -0
  66. neuroshard/utils/hardware.py +121 -0
  67. neuroshard/utils/serialization.py +90 -0
  68. neuroshard/version.py +1 -0
  69. nexaroa-0.0.111.dist-info/METADATA +283 -0
  70. nexaroa-0.0.111.dist-info/RECORD +78 -0
  71. nexaroa-0.0.111.dist-info/WHEEL +5 -0
  72. nexaroa-0.0.111.dist-info/entry_points.txt +4 -0
  73. nexaroa-0.0.111.dist-info/licenses/LICENSE +190 -0
  74. nexaroa-0.0.111.dist-info/top_level.txt +2 -0
  75. protos/__init__.py +0 -0
  76. protos/neuroshard.proto +651 -0
  77. protos/neuroshard_pb2.py +160 -0
  78. protos/neuroshard_pb2_grpc.py +1298 -0
@@ -0,0 +1,2111 @@
1
+ """
2
+ NEURO Token Ledger System
3
+
4
+ This module implements the NEURO token economics for NeuroShard:
5
+ - Proof of Neural Work (PoNW) verification
6
+ - Token minting through verified work
7
+ - Fee burn mechanism (5% deflationary)
8
+ - Anti-cheat measures and rate limiting
9
+ - ECDSA cryptographic signature verification (trustless)
10
+
11
+ Security Model:
12
+ 1. Nodes cannot claim arbitrary rewards - all rewards require verifiable proof
13
+ 2. Proofs are signed with ECDSA - ANYONE can verify without shared secrets
14
+ 3. Replay attacks prevented via signature deduplication
15
+ 4. Plausibility checks prevent inflated claims
16
+ 5. Cross-validation via gossip consensus
17
+
18
+ Based on: docs/whitepaper/neuroshard_whitepaper.tex
19
+ """
20
+
21
+ import sqlite3
22
+ import time
23
+ import json
24
+ import logging
25
+ import threading
26
+ import hashlib
27
+ import os
28
+ from typing import Dict, List, Optional, Tuple, Any
29
+ from dataclasses import dataclass, asdict
30
+ from enum import Enum
31
+
32
+ # Import ECDSA crypto module - REQUIRED
33
+ from neuroshard.core.crypto.ecdsa import (
34
+ NodeCrypto,
35
+ verify_signature,
36
+ register_public_key,
37
+ get_public_key,
38
+ is_valid_signature_format,
39
+ is_valid_node_id_format
40
+ )
41
+
42
+ # Import ProofVerifier for semantic validation
43
+ from neuroshard.core.consensus.verifier import ProofVerifier
44
+
45
+ logger = logging.getLogger(__name__)
46
+
47
+ # ============================================================================
48
+ # CONSTANTS - Token Economics (from Centralized Economics Module)
49
+ # ============================================================================
50
+ # All economic constants are defined in neuroshard/core/economics.py
51
+ # Import from there to ensure consistency across the codebase.
52
+ # ============================================================================
53
+
54
+ from neuroshard.core.economics.constants import (
55
+ # Reward rates
56
+ UPTIME_REWARD_PER_MINUTE,
57
+ TRAINING_REWARD_PER_BATCH,
58
+ DATA_REWARD_PER_SAMPLE,
59
+
60
+ # Dynamic inference pricing (PURE MARKET - no caps)
61
+ INFERENCE_MARKET_PRICE_SMOOTHING,
62
+ INFERENCE_MARKET_CAPACITY_TIMEOUT,
63
+ INFERENCE_MARKET_TARGET_RESPONSE_TIME,
64
+ INFERENCE_MARKET_BASE_PRICE,
65
+
66
+ # Role distribution
67
+ DRIVER_SHARE,
68
+ WORKER_SHARE,
69
+ VALIDATOR_SHARE,
70
+ DRIVER_BONUS,
71
+ VALIDATOR_BONUS,
72
+ WORKER_LAYER_BONUS,
73
+ MAX_LAYER_BONUS,
74
+ TRAINING_BONUS,
75
+
76
+ # Staking
77
+ STAKING_BASE_BONUS,
78
+ STAKING_UNIT,
79
+ STAKING_DIMINISHING,
80
+ MIN_STAKE_AMOUNT,
81
+ MAX_STAKE_AMOUNT,
82
+
83
+ # Validator requirements
84
+ VALIDATOR_MIN_STAKE,
85
+ VALIDATOR_MIN_MEMORY_MB,
86
+ VALIDATION_FEE_PER_PROOF,
87
+ VALIDATION_CONSENSUS_THRESHOLD,
88
+ VALIDATOR_ROTATION_ENABLED,
89
+ VALIDATOR_SELECTION_RANDOMNESS,
90
+ REMOTE_STAKE_MULTIPLIER_CAP,
91
+
92
+ # Fees and burns
93
+ FEE_BURN_RATE,
94
+ BURN_ADDRESS,
95
+
96
+ # Anti-cheat limits
97
+ MAX_UPTIME_PER_PROOF,
98
+ MAX_TOKENS_PER_MINUTE,
99
+ MAX_PROOFS_PER_HOUR,
100
+ PROOF_FRESHNESS_WINDOW,
101
+ MAX_REWARD_PER_PROOF,
102
+
103
+ # Slashing
104
+ SLASH_AMOUNT,
105
+ WHISTLEBLOWER_REWARD_RATE,
106
+ VALIDATOR_SLASH_MULTIPLIER,
107
+
108
+ # Helper functions
109
+ calculate_stake_multiplier,
110
+ is_valid_stake_amount,
111
+ is_valid_stake_duration,
112
+ )
113
+
114
+
115
+ class ProofType(Enum):
116
+ """Types of Proof of Neural Work."""
117
+ UPTIME = "uptime"
118
+ INFERENCE = "inference"
119
+ TRAINING = "training"
120
+ DATA = "data"
121
+
122
+
123
+ @dataclass
124
+ class PoNWProof:
125
+ """
126
+ Proof of Neural Work - Cryptographically signed proof of contribution.
127
+
128
+ Security Properties:
129
+ 1. node_id: Derived from node_token (cannot be forged)
130
+ 2. timestamp: Must be recent (prevents replay)
131
+ 3. signature: HMAC-SHA256(node_token, canonical_payload)
132
+ 4. nonce: Random value to prevent signature collision
133
+ 5. request_id: Links to InferenceRequest for price locking (NEW: marketplace)
134
+ """
135
+ node_id: str
136
+ proof_type: str
137
+ timestamp: float
138
+ nonce: str
139
+
140
+ # Work metrics
141
+ uptime_seconds: float = 0.0
142
+ tokens_processed: int = 0
143
+ training_batches: int = 0
144
+ data_samples: int = 0
145
+
146
+ # NEW: Marketplace - links to InferenceRequest for price locking
147
+ request_id: Optional[str] = None # If inference, which request was this?
148
+
149
+ # Context for verification
150
+ model_hash: str = "" # Hash of model state (for training proofs)
151
+ layers_held: int = 0 # Number of layers this node holds
152
+ has_embedding: bool = False # Driver node
153
+ has_lm_head: bool = False # Validator node
154
+
155
+ # Training metrics (for global loss tracking)
156
+ current_loss: Optional[float] = None # Current training loss (for aggregation)
157
+
158
+ # Signature
159
+ signature: str = ""
160
+
161
+ def canonical_payload(self) -> str:
162
+ """Create canonical string for signing (deterministic ordering).
163
+
164
+ CRITICAL: All float fields must use consistent formatting to ensure
165
+ the same payload is generated on sender and receiver.
166
+
167
+ NOTE: request_id is included in signature to prevent proof stealing.
168
+ """
169
+ return (
170
+ f"{self.node_id}:{self.proof_type}:{self.timestamp:.6f}:{self.nonce}:"
171
+ f"{float(self.uptime_seconds):.1f}:{self.tokens_processed}:{self.training_batches}:"
172
+ f"{self.data_samples}:{self.request_id if self.request_id else ''}:"
173
+ f"{self.model_hash}:{self.layers_held}"
174
+ )
175
+
176
+ def to_dict(self) -> Dict:
177
+ return asdict(self)
178
+
179
+ @classmethod
180
+ def from_dict(cls, data: Dict) -> 'PoNWProof':
181
+ return cls(**{k: v for k, v in data.items() if k in cls.__dataclass_fields__})
182
+
183
+
184
+ def sign_proof(proof: 'PoNWProof', node_token: str) -> 'PoNWProof':
185
+ """
186
+ Sign a Proof of Neural Work using ECDSA.
187
+
188
+ Args:
189
+ proof: The proof to sign
190
+ node_token: The node's secret token for signing
191
+
192
+ Returns:
193
+ The same proof with signature field populated
194
+ """
195
+ from neuroshard.core.crypto.ecdsa import sign_message
196
+ payload = proof.canonical_payload()
197
+ proof.signature = sign_message(payload, node_token)
198
+ return proof
199
+
200
+
201
+ @dataclass
202
+ class Transaction:
203
+ """NEURO transfer transaction with fee burn."""
204
+ tx_id: str
205
+ from_id: str
206
+ to_id: str
207
+ amount: float
208
+ fee: float # Total fee
209
+ burn_amount: float # 5% of fee burned
210
+ timestamp: float
211
+ signature: str
212
+ memo: str = ""
213
+
214
+ def canonical_payload(self) -> str:
215
+ return f"{self.from_id}:{self.to_id}:{self.amount}:{self.fee}:{self.timestamp}:{self.memo}"
216
+
217
+
218
+ @dataclass
219
+ class LedgerStats:
220
+ """Global ledger statistics."""
221
+ total_minted: float = 0.0
222
+ total_burned: float = 0.0
223
+ total_transferred: float = 0.0
224
+ circulating_supply: float = 0.0
225
+ total_proofs_processed: int = 0
226
+ total_transactions: int = 0
227
+
228
+
229
+ class NEUROLedger:
230
+ """
231
+ Secure NEURO Token Ledger with Proof of Neural Work verification.
232
+
233
+ Security Features:
234
+ 1. ECDSA signatures on all proofs (trustless verification)
235
+ 2. Rate limiting to prevent reward inflation
236
+ 3. Plausibility checks on claimed work
237
+ 4. Replay attack prevention via signature deduplication
238
+ 5. Fee burn mechanism (5% deflationary)
239
+ 6. Slashing for detected fraud
240
+
241
+ Cryptography:
242
+ - Uses ECDSA with secp256k1 curve (same as Bitcoin/Ethereum)
243
+ - Anyone can verify signatures with just the public key
244
+ - No shared secrets needed for verification
245
+ """
246
+
247
+ def __init__(
248
+ self,
249
+ db_path: str = "neuro_ledger.db",
250
+ node_id: Optional[str] = None,
251
+ node_token: Optional[str] = None,
252
+ model_interface: Optional[Any] = None
253
+ ):
254
+ self.db_path = db_path
255
+ self.lock = threading.Lock()
256
+
257
+ # Initialize ECDSA crypto - REQUIRED
258
+ self.crypto: Optional[NodeCrypto] = None
259
+ if node_token:
260
+ self.crypto = NodeCrypto(node_token)
261
+ self.node_id = self.crypto.node_id
262
+ logger.info(f"ECDSA crypto initialized for node {self.node_id[:16]}...")
263
+ else:
264
+ self.node_id = node_id or "unknown"
265
+
266
+ self.node_token = node_token
267
+
268
+ # Initialize inference market (PURE MARKET PRICING - no caps!)
269
+ # Quality emerges naturally: stupid model = no demand = low price
270
+ # Excellent model = high demand = high price (market finds true value)
271
+ self.inference_market = None
272
+ from neuroshard.core.economics.market import InferenceMarket
273
+ self.inference_market = InferenceMarket(
274
+ price_smoothing=INFERENCE_MARKET_PRICE_SMOOTHING,
275
+ capacity_timeout=INFERENCE_MARKET_CAPACITY_TIMEOUT,
276
+ base_price=INFERENCE_MARKET_BASE_PRICE
277
+ )
278
+ logger.info(f"Dynamic inference pricing enabled: PURE MARKET (no artificial caps)")
279
+
280
+ # Initialize database
281
+ self._init_db()
282
+
283
+ # Initialize Verifier
284
+ self.verifier = ProofVerifier(model_interface=model_interface)
285
+
286
+ # Role verification callback (set by runner after layer pool is initialized)
287
+ # This prevents nodes from claiming Validator/Driver role bonuses they don't have
288
+ self._role_verifier = None
289
+
290
+ # Cache for verified roles (signature -> (verified_embed, verified_head))
291
+ # Used to ensure rewards are based on VERIFIED roles, not claimed ones
292
+ self._verified_roles: Dict[str, Tuple[bool, bool]] = {}
293
+
294
+ logger.info(f"NEUROLedger initialized: node={self.node_id[:16]}...")
295
+
296
+ def set_role_verifier(self, verifier_fn):
297
+ """
298
+ Set the role verification callback.
299
+
300
+ The callback should accept (node_id, claimed_has_embedding, claimed_has_lm_head)
301
+ and return (is_valid, actual_has_embedding, actual_has_lm_head).
302
+
303
+ This prevents nodes from claiming Validator bonuses they don't deserve.
304
+ """
305
+ self._role_verifier = verifier_fn
306
+ logger.info("Role verifier registered - fake role claims will be rejected")
307
+
308
+ def set_model_interface(self, model_interface):
309
+ """
310
+ Set the model interface for training work verification.
311
+
312
+ This is called after the NeuroNode is initialized, since the ledger
313
+ is created before the node (in P2P setup).
314
+
315
+ Args:
316
+ model_interface: Object implementing verify_training_work(proof).
317
+ Typically a SwarmEnabledDynamicNode.
318
+ """
319
+ self.verifier.model_interface = model_interface
320
+ logger.info("Model interface registered - training work verification enabled")
321
+
322
+ def _init_db(self):
323
+ """Initialize SQLite database with all required tables."""
324
+ with self.lock:
325
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
326
+ # Enable Write-Ahead Logging for better concurrency
327
+ try:
328
+ conn.execute("PRAGMA journal_mode=WAL;")
329
+ except Exception as e:
330
+ logger.warning(f"Failed to enable WAL mode: {e}")
331
+
332
+ # Main balances table
333
+ conn.execute("""
334
+ CREATE TABLE IF NOT EXISTS balances (
335
+ node_id TEXT PRIMARY KEY,
336
+ balance REAL DEFAULT 0.0,
337
+ total_earned REAL DEFAULT 0.0,
338
+ total_spent REAL DEFAULT 0.0,
339
+ last_proof_time REAL DEFAULT 0.0,
340
+ proof_count INTEGER DEFAULT 0,
341
+ created_at REAL DEFAULT 0.0
342
+ )
343
+ """)
344
+
345
+ # Proof history (for replay prevention and audit)
346
+ conn.execute("""
347
+ CREATE TABLE IF NOT EXISTS proof_history (
348
+ signature TEXT PRIMARY KEY,
349
+ node_id TEXT NOT NULL,
350
+ proof_type TEXT NOT NULL,
351
+ timestamp REAL NOT NULL,
352
+ uptime_seconds REAL DEFAULT 0.0,
353
+ tokens_processed INTEGER DEFAULT 0,
354
+ training_batches INTEGER DEFAULT 0,
355
+ data_samples INTEGER DEFAULT 0,
356
+ reward_amount REAL DEFAULT 0.0,
357
+ received_at REAL NOT NULL,
358
+ verified BOOLEAN DEFAULT 1,
359
+ current_loss REAL DEFAULT NULL,
360
+ has_lm_head BOOLEAN DEFAULT 0
361
+ )
362
+ """)
363
+ # Add columns if missing (migration for existing DBs)
364
+ try:
365
+ conn.execute("ALTER TABLE proof_history ADD COLUMN current_loss REAL DEFAULT NULL")
366
+ except:
367
+ pass # Column already exists
368
+ try:
369
+ conn.execute("ALTER TABLE proof_history ADD COLUMN has_lm_head BOOLEAN DEFAULT 0")
370
+ except:
371
+ pass # Column already exists
372
+ conn.execute("CREATE INDEX IF NOT EXISTS idx_proof_node ON proof_history(node_id)")
373
+ conn.execute("CREATE INDEX IF NOT EXISTS idx_proof_time ON proof_history(timestamp)")
374
+
375
+ # Transaction history
376
+ conn.execute("""
377
+ CREATE TABLE IF NOT EXISTS transactions (
378
+ tx_id TEXT PRIMARY KEY,
379
+ from_id TEXT NOT NULL,
380
+ to_id TEXT NOT NULL,
381
+ amount REAL NOT NULL,
382
+ fee REAL DEFAULT 0.0,
383
+ burn_amount REAL DEFAULT 0.0,
384
+ timestamp REAL NOT NULL,
385
+ memo TEXT DEFAULT '',
386
+ signature TEXT NOT NULL
387
+ )
388
+ """)
389
+ conn.execute("CREATE INDEX IF NOT EXISTS idx_tx_from ON transactions(from_id)")
390
+ conn.execute("CREATE INDEX IF NOT EXISTS idx_tx_to ON transactions(to_id)")
391
+
392
+ # Stakes (for reward multiplier)
393
+ conn.execute("""
394
+ CREATE TABLE IF NOT EXISTS stakes (
395
+ node_id TEXT PRIMARY KEY,
396
+ amount REAL DEFAULT 0.0,
397
+ locked_until REAL DEFAULT 0.0,
398
+ updated_at REAL DEFAULT 0.0
399
+ )
400
+ """)
401
+
402
+ # Global stats
403
+ conn.execute("""
404
+ CREATE TABLE IF NOT EXISTS global_stats (
405
+ id INTEGER PRIMARY KEY CHECK (id = 1),
406
+ total_minted REAL DEFAULT 0.0,
407
+ total_burned REAL DEFAULT 0.0,
408
+ total_transferred REAL DEFAULT 0.0,
409
+ total_proofs INTEGER DEFAULT 0,
410
+ total_transactions INTEGER DEFAULT 0,
411
+ updated_at REAL DEFAULT 0.0
412
+ )
413
+ """)
414
+
415
+ # Initialize global stats if not exists (Genesis Block)
416
+ #
417
+ # TRANSPARENCY NOTICE:
418
+ # ====================
419
+ # This is the Genesis Block initialization. The ledger starts with:
420
+ # - total_minted = 0.0 (no pre-mine)
421
+ # - total_burned = 0.0
422
+ # - total_transferred = 0.0
423
+ # - total_proofs = 0
424
+ # - total_transactions = 0
425
+ #
426
+ # ALL NEURO tokens must be earned through verified Proof of Neural Work.
427
+ # There is NO pre-allocation, NO founder tokens, NO ICO.
428
+ # Even the project creators must run nodes and do real work to earn NEURO.
429
+ #
430
+ # This can be independently verified by any node by checking:
431
+ # SELECT * FROM global_stats WHERE id = 1;
432
+ #
433
+ conn.execute("""
434
+ INSERT OR IGNORE INTO global_stats (id, total_minted, total_burned, updated_at)
435
+ VALUES (1, 0.0, 0.0, ?)
436
+ """, (time.time(),))
437
+
438
+ # Create genesis record in proof_history for auditability
439
+ genesis_exists = conn.execute(
440
+ "SELECT 1 FROM proof_history WHERE signature = 'GENESIS_BLOCK'"
441
+ ).fetchone()
442
+
443
+ if not genesis_exists:
444
+ conn.execute("""
445
+ INSERT INTO proof_history
446
+ (signature, node_id, proof_type, timestamp, uptime_seconds,
447
+ tokens_processed, training_batches, data_samples, reward_amount, received_at)
448
+ VALUES ('GENESIS_BLOCK', 'GENESIS', 'GENESIS', ?, 0, 0, 0, 0, 0.0, ?)
449
+ """, (time.time(), time.time()))
450
+ logger.info("Genesis Block created - Ledger initialized with zero supply")
451
+
452
+ # Rate limiting table
453
+ conn.execute("""
454
+ CREATE TABLE IF NOT EXISTS rate_limits (
455
+ node_id TEXT PRIMARY KEY,
456
+ proofs_last_hour INTEGER DEFAULT 0,
457
+ tokens_last_minute INTEGER DEFAULT 0,
458
+ last_reset_hour REAL DEFAULT 0.0,
459
+ last_reset_minute REAL DEFAULT 0.0
460
+ )
461
+ """)
462
+
463
+ # Fraud reports (for slashing)
464
+ conn.execute("""
465
+ CREATE TABLE IF NOT EXISTS fraud_reports (
466
+ report_id TEXT PRIMARY KEY,
467
+ reporter_id TEXT NOT NULL,
468
+ accused_id TEXT NOT NULL,
469
+ proof_signature TEXT,
470
+ reason TEXT NOT NULL,
471
+ evidence TEXT,
472
+ status TEXT DEFAULT 'pending',
473
+ slash_amount REAL DEFAULT 0.0,
474
+ created_at REAL NOT NULL
475
+ )
476
+ """)
477
+
478
+ # ========================================================================
479
+ # SIGNATURE & VERIFICATION
480
+ # ========================================================================
481
+
482
+ def _sign(self, payload: str) -> str:
483
+ """
484
+ Sign a payload using ECDSA with secp256k1.
485
+
486
+ ECDSA signatures enable trustless verification by any node.
487
+ Anyone can verify using our public key.
488
+ """
489
+ if not self.crypto:
490
+ raise ValueError("Cannot sign without crypto initialized (need node_token)")
491
+
492
+ return self.crypto.sign(payload)
493
+
494
+ def get_public_key_hex(self) -> str:
495
+ """Get this node's public key in hex format for sharing."""
496
+ if not self.crypto:
497
+ raise ValueError("Crypto not initialized")
498
+ return self.crypto.get_public_key_hex()
499
+
500
+ def get_public_key_bytes(self) -> bytes:
501
+ """Get this node's public key bytes for verification."""
502
+ if not self.crypto:
503
+ raise ValueError("Crypto not initialized")
504
+ return self.crypto.get_public_key_bytes()
505
+
506
+ def _verify_signature(self, proof: PoNWProof) -> bool:
507
+ """
508
+ Verify proof signature using ECDSA.
509
+
510
+ Security Model:
511
+ ===============
512
+ ECDSA enables TRUSTLESS verification:
513
+ - Signature can be verified by ANYONE with the public key
514
+ - No shared secret needed
515
+ - Full cryptographic verification
516
+ - Same curve as Bitcoin/Ethereum (secp256k1)
517
+
518
+ TRANSPARENCY GUARANTEE:
519
+ =======================
520
+ There is NO admin backdoor. The ONLY way to get NEURO is:
521
+ 1. Run a node that does real work (training, inference, uptime)
522
+ 2. Create a proof of that work, signed with ECDSA
523
+ 3. Submit the proof, which passes ALL verification checks
524
+ 4. Receive rewards proportional to verified work
525
+
526
+ Even the project creators must run nodes and do real work to earn NEURO.
527
+ """
528
+ if not proof.signature or proof.signature == "unsigned":
529
+ logger.warning("Missing or unsigned signature")
530
+ return False
531
+
532
+ # Validate signature format
533
+ if not is_valid_signature_format(proof.signature):
534
+ logger.warning(f"Invalid signature format: {proof.signature[:20]}...")
535
+ return False
536
+
537
+ # Validate node_id format (32 hex chars)
538
+ if not is_valid_node_id_format(proof.node_id):
539
+ logger.warning(f"Invalid node_id format: {proof.node_id}")
540
+ return False
541
+
542
+ # For our own proofs, verify with our crypto
543
+ if proof.node_id == self.node_id and self.crypto:
544
+ payload = proof.canonical_payload()
545
+ return self.crypto.verify(payload, proof.signature)
546
+
547
+ # For external proofs, use the public key registry
548
+ payload = proof.canonical_payload()
549
+ result = verify_signature(proof.node_id, payload, proof.signature)
550
+ if not result:
551
+ # Log as debug - signature mismatches are common during version transitions
552
+ # where different nodes may have different canonical_payload formats
553
+ logger.warning(f"Signature verification failed for {proof.node_id[:16]}... (likely version mismatch)")
554
+ return result
555
+
556
+ # ========================================================================
557
+ # PROOF CREATION & PROCESSING
558
+ # ========================================================================
559
+
560
+ def create_proof(
561
+ self,
562
+ proof_type: ProofType,
563
+ uptime_seconds: float = 0.0,
564
+ tokens_processed: int = 0,
565
+ training_batches: int = 0,
566
+ data_samples: int = 0,
567
+ model_hash: str = "",
568
+ layers_held: int = 0,
569
+ has_embedding: bool = False,
570
+ has_lm_head: bool = False,
571
+ current_loss: Optional[float] = None
572
+ ) -> PoNWProof:
573
+ """
574
+ Create a signed Proof of Neural Work.
575
+
576
+ Rate Limiting Applied:
577
+ - uptime_seconds capped at MAX_UPTIME_PER_PROOF
578
+ - tokens_processed checked against rate limits
579
+ """
580
+ # Apply rate limits
581
+ uptime_seconds = min(uptime_seconds, MAX_UPTIME_PER_PROOF)
582
+
583
+ # Generate unique nonce
584
+ nonce = hashlib.sha256(f"{time.time()}:{os.urandom(16).hex()}".encode()).hexdigest()[:16]
585
+
586
+ # CRITICAL: data_samples, model_hash, request_id match canonical_payload
587
+ proof = PoNWProof(
588
+ node_id=self.node_id,
589
+ proof_type=proof_type.value,
590
+ timestamp=time.time(),
591
+ nonce=nonce,
592
+ uptime_seconds=uptime_seconds,
593
+ tokens_processed=tokens_processed,
594
+ training_batches=training_batches,
595
+ data_samples=data_samples,
596
+ model_hash=model_hash,
597
+ layers_held=layers_held,
598
+ has_embedding=has_embedding,
599
+ has_lm_head=has_lm_head,
600
+ current_loss=current_loss
601
+ )
602
+
603
+ # Sign the proof
604
+ payload = proof.canonical_payload()
605
+ proof.signature = self._sign(payload)
606
+
607
+ return proof
608
+
609
+ def verify_proof(self, proof: PoNWProof) -> Tuple[bool, str]:
610
+ """
611
+ Verify a Proof of Neural Work.
612
+
613
+ Checks:
614
+ 1. Signature validity
615
+ 2. Timestamp freshness
616
+ 3. Replay prevention (signature not seen before)
617
+ 4. Rate limiting
618
+ 5. Plausibility of claimed work
619
+
620
+ Returns: (is_valid, reason)
621
+ """
622
+ # 1. Check signature
623
+ if not self._verify_signature(proof):
624
+ return False, "Invalid signature"
625
+
626
+ # 2. Check timestamp freshness
627
+ age = time.time() - proof.timestamp
628
+ if age > PROOF_FRESHNESS_WINDOW:
629
+ return False, f"Proof too old ({age:.0f}s > {PROOF_FRESHNESS_WINDOW}s)"
630
+ if age < -60: # Allow 1 minute clock skew
631
+ return False, "Proof timestamp in future"
632
+
633
+ with self.lock:
634
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
635
+ # 3. Check for replay
636
+ existing = conn.execute(
637
+ "SELECT 1 FROM proof_history WHERE signature = ?",
638
+ (proof.signature,)
639
+ ).fetchone()
640
+ if existing:
641
+ return False, "Duplicate proof (replay)"
642
+
643
+ # 4. Rate limiting
644
+ is_limited, limit_reason = self._check_rate_limits(conn, proof)
645
+ if is_limited:
646
+ return False, limit_reason
647
+
648
+ # 5. Plausibility checks
649
+ is_plausible, plausibility_reason = self._check_plausibility(proof)
650
+ if not is_plausible:
651
+ return False, plausibility_reason
652
+
653
+ # 6. Work Content Verification (Semantic Check)
654
+ # This verifies that the work was ACTUALLY done (e.g. gradients reduce loss)
655
+ is_work_valid, work_reason = self.verifier.verify_work_content(proof)
656
+ if not is_work_valid:
657
+ return False, f"Work validation failed: {work_reason}"
658
+
659
+ return True, "Valid"
660
+
661
+ def _check_rate_limits(self, conn, proof: PoNWProof) -> Tuple[bool, str]:
662
+ """Check if node is within rate limits."""
663
+ now = time.time()
664
+
665
+ # Get or create rate limit record
666
+ row = conn.execute(
667
+ "SELECT proofs_last_hour, tokens_last_minute, last_reset_hour, last_reset_minute FROM rate_limits WHERE node_id = ?",
668
+ (proof.node_id,)
669
+ ).fetchone()
670
+
671
+ if row:
672
+ proofs_last_hour, tokens_last_minute, last_reset_hour, last_reset_minute = row
673
+
674
+ # Reset hourly counter if needed
675
+ if now - last_reset_hour > 3600:
676
+ proofs_last_hour = 0
677
+ last_reset_hour = now
678
+
679
+ # Reset minute counter if needed
680
+ if now - last_reset_minute > 60:
681
+ tokens_last_minute = 0
682
+ last_reset_minute = now
683
+ else:
684
+ proofs_last_hour = 0
685
+ tokens_last_minute = 0
686
+ last_reset_hour = now
687
+ last_reset_minute = now
688
+
689
+ # Check limits
690
+ if proofs_last_hour >= MAX_PROOFS_PER_HOUR:
691
+ return True, f"Rate limit: max {MAX_PROOFS_PER_HOUR} proofs/hour"
692
+
693
+ new_tokens = tokens_last_minute + proof.tokens_processed
694
+ if new_tokens > MAX_TOKENS_PER_MINUTE * 60: # Scaled to hour
695
+ return True, f"Rate limit: max {MAX_TOKENS_PER_MINUTE} tokens/minute"
696
+
697
+ # Update rate limits
698
+ conn.execute("""
699
+ INSERT INTO rate_limits (node_id, proofs_last_hour, tokens_last_minute, last_reset_hour, last_reset_minute)
700
+ VALUES (?, ?, ?, ?, ?)
701
+ ON CONFLICT(node_id) DO UPDATE SET
702
+ proofs_last_hour = ?,
703
+ tokens_last_minute = ?,
704
+ last_reset_hour = ?,
705
+ last_reset_minute = ?
706
+ """, (
707
+ proof.node_id,
708
+ proofs_last_hour + 1, new_tokens, last_reset_hour, last_reset_minute,
709
+ proofs_last_hour + 1, new_tokens, last_reset_hour, last_reset_minute
710
+ ))
711
+
712
+ return False, ""
713
+
714
+ def _check_plausibility(self, proof: PoNWProof) -> Tuple[bool, str]:
715
+ """Check if claimed work is plausible."""
716
+ # CRITICAL: Verify role claims against actual layer assignments
717
+ # This prevents nodes from claiming Validator/Driver bonuses they don't deserve
718
+ if self._role_verifier and (proof.has_embedding or proof.has_lm_head):
719
+ is_role_valid, actual_embed, actual_head = self._role_verifier(
720
+ proof.node_id,
721
+ proof.has_embedding,
722
+ proof.has_lm_head
723
+ )
724
+
725
+ # Store VERIFIED roles for reward calculation
726
+ # Even if we accept the proof, we only pay bonuses for VERIFIED roles
727
+ self._verified_roles[proof.signature] = (actual_embed, actual_head)
728
+
729
+ if not is_role_valid:
730
+ if proof.has_lm_head and not actual_head:
731
+ logger.warning(f"FAKE VALIDATOR DETECTED: {proof.node_id[:16]}... claimed has_lm_head=True but is NOT a Validator")
732
+ return False, "Invalid role claim: not a Validator"
733
+ if proof.has_embedding and not actual_embed:
734
+ logger.warning(f"FAKE DRIVER DETECTED: {proof.node_id[:16]}... claimed has_embedding=True but is NOT a Driver")
735
+ return False, "Invalid role claim: not a Driver"
736
+ else:
737
+ # No role verifier or no role claims - use claimed values
738
+ self._verified_roles[proof.signature] = (proof.has_embedding, proof.has_lm_head)
739
+
740
+ # Uptime check
741
+ if proof.uptime_seconds > MAX_UPTIME_PER_PROOF:
742
+ return False, f"Uptime too high ({proof.uptime_seconds}s > {MAX_UPTIME_PER_PROOF}s)"
743
+
744
+ # Token rate check (tokens per second)
745
+ if proof.uptime_seconds > 0:
746
+ tokens_per_second = proof.tokens_processed / proof.uptime_seconds
747
+ max_tps = MAX_TOKENS_PER_MINUTE / 60
748
+ if tokens_per_second > max_tps * 2: # Allow 2x buffer
749
+ return False, f"Token rate implausible ({tokens_per_second:.0f} > {max_tps * 2:.0f} tps)"
750
+
751
+ # Training batches check (max ~60 per minute on good hardware)
752
+ if proof.uptime_seconds > 0:
753
+ batches_per_minute = (proof.training_batches / proof.uptime_seconds) * 60
754
+ if batches_per_minute > 120: # 2 batches/second max
755
+ return False, f"Training rate implausible ({batches_per_minute:.0f} batches/min)"
756
+
757
+ return True, ""
758
+
759
+ def process_proof(self, proof: PoNWProof) -> Tuple[bool, float, str]:
760
+ """
761
+ Process a verified proof and credit rewards.
762
+
763
+ Returns: (success, reward_amount, message)
764
+ """
765
+ # Verify first
766
+ is_valid, reason = self.verify_proof(proof)
767
+ if not is_valid:
768
+ return False, 0.0, reason
769
+
770
+ # Calculate reward
771
+ reward = self._calculate_reward(proof)
772
+
773
+ with self.lock:
774
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
775
+ # Double-check replay (in case of race)
776
+ existing = conn.execute(
777
+ "SELECT 1 FROM proof_history WHERE signature = ?",
778
+ (proof.signature,)
779
+ ).fetchone()
780
+ if existing:
781
+ return False, 0.0, "Duplicate proof"
782
+
783
+ # Record proof (including current_loss and has_lm_head for training stats aggregation)
784
+ conn.execute("""
785
+ INSERT INTO proof_history
786
+ (signature, node_id, proof_type, timestamp, uptime_seconds,
787
+ tokens_processed, training_batches, data_samples, reward_amount, received_at, current_loss, has_lm_head)
788
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
789
+ """, (
790
+ proof.signature, proof.node_id, proof.proof_type, proof.timestamp,
791
+ proof.uptime_seconds, proof.tokens_processed, proof.training_batches,
792
+ proof.data_samples, reward, time.time(), proof.current_loss, proof.has_lm_head
793
+ ))
794
+
795
+ # Credit balance
796
+ conn.execute("""
797
+ INSERT INTO balances (node_id, balance, total_earned, last_proof_time, proof_count, created_at)
798
+ VALUES (?, ?, ?, ?, 1, ?)
799
+ ON CONFLICT(node_id) DO UPDATE SET
800
+ balance = balance + ?,
801
+ total_earned = total_earned + ?,
802
+ last_proof_time = ?,
803
+ proof_count = proof_count + 1
804
+ """, (
805
+ proof.node_id, reward, reward, proof.timestamp, time.time(),
806
+ reward, reward, proof.timestamp
807
+ ))
808
+
809
+ # Update global stats
810
+ conn.execute("""
811
+ UPDATE global_stats SET
812
+ total_minted = total_minted + ?,
813
+ total_proofs = total_proofs + 1,
814
+ updated_at = ?
815
+ WHERE id = 1
816
+ """, (reward, time.time()))
817
+
818
+ # If this was a marketplace request, register proof received (DISTRIBUTED)
819
+ # Multiple nodes (driver, workers, validator) submit proofs for same request
820
+ if proof.request_id and self.inference_market:
821
+ is_complete, error = self.inference_market.register_proof_received(
822
+ request_id=proof.request_id,
823
+ node_id=proof.node_id,
824
+ is_driver=proof.has_embedding,
825
+ is_validator=proof.has_lm_head
826
+ )
827
+ if error:
828
+ logger.warning(f"Failed to register proof for {proof.request_id[:8]}...: {error}")
829
+ elif is_complete:
830
+ logger.info(f"Request {proof.request_id[:8]}... COMPLETED (all proofs received)")
831
+
832
+ logger.info(f"PoNW: {proof.node_id[:12]}... earned {reward:.6f} NEURO "
833
+ f"(type={proof.proof_type}, uptime={proof.uptime_seconds:.0f}s, "
834
+ f"tokens={proof.tokens_processed})"
835
+ f"{f', request={proof.request_id[:8]}...' if proof.request_id else ''}")
836
+
837
+ return True, reward, "Proof processed"
838
+
839
+ def _calculate_reward(self, proof: PoNWProof) -> float:
840
+ """
841
+ Calculate NEURO reward for a Proof of Neural Work.
842
+
843
+ Reward Structure:
844
+ ================
845
+
846
+ 1. UPTIME REWARD (all nodes):
847
+ - 0.1 NEURO per minute of uptime
848
+ - Incentivizes nodes to stay online
849
+
850
+ 2. INFERENCE REWARD (PURE MARKET-BASED):
851
+ - Total pool: DYNAMIC (based on supply/demand)
852
+ - Worthless model → ~0 NEURO (no demand)
853
+ - Good model → Market price rises naturally
854
+ - Distributed by role:
855
+ * DRIVER (has_embedding=True): See DRIVER_SHARE
856
+ * WORKER (middle layers): See WORKER_SHARE
857
+ * VALIDATOR (has_lm_head=True): See VALIDATOR_SHARE
858
+
859
+ 3. TRAINING REWARD:
860
+ - See TRAINING_REWARD_PER_BATCH in economics.py
861
+
862
+ 4. DATA REWARD:
863
+ - See DATA_REWARD_PER_SAMPLE in economics.py
864
+
865
+ 5. MULTIPLIERS:
866
+ - Staking: Logarithmic curve (see calculate_stake_multiplier)
867
+ - Role bonus: Defined in economics.py
868
+ - Layer bonus: Defined in economics.py
869
+ """
870
+ # =====================================================================
871
+ # 1. UPTIME REWARD (same for all roles)
872
+ # =====================================================================
873
+ uptime_reward = (proof.uptime_seconds / 60.0) * UPTIME_REWARD_PER_MINUTE
874
+
875
+ # =====================================================================
876
+ # 2. INFERENCE REWARD (MARKETPLACE with REQUEST MATCHING)
877
+ # =====================================================================
878
+ # Price is LOCKED at request submission time (prevents timing attacks)
879
+ # - If request_id present: Use locked price from InferenceRequest
880
+ # - If no request_id: Use current market price (legacy/direct inference)
881
+
882
+ if proof.request_id and self.inference_market:
883
+ # NEW MARKETPLACE: Use locked price from request
884
+ request = self.inference_market.get_request(proof.request_id)
885
+
886
+ if not request:
887
+ raise ValueError(f"Request {proof.request_id} not found")
888
+
889
+ if request.claimed_by != proof.node_id:
890
+ raise ValueError(f"Request {proof.request_id} was claimed by {request.claimed_by}, "
891
+ f"but proof submitted by {proof.node_id}")
892
+
893
+ if request.completed:
894
+ raise ValueError(f"Request {proof.request_id} already completed")
895
+
896
+ # Use LOCKED price from request submission time
897
+ market_price = request.locked_price
898
+ logger.debug(f"Using locked price {market_price:.6f} from request {proof.request_id[:8]}...")
899
+ else:
900
+ # Legacy mode or direct inference: use current market price
901
+ market_price = self.inference_market.get_current_price()
902
+ logger.debug(f"Using current market price {market_price:.6f} (no request_id)")
903
+
904
+ inference_pool = (proof.tokens_processed / 1_000_000.0) * market_price
905
+
906
+ # Determine role and calculate share
907
+ # CRITICAL: Use VERIFIED roles, not claimed roles!
908
+ # This ensures nodes can't claim bonuses they don't deserve
909
+ if proof.signature in self._verified_roles:
910
+ is_driver, is_validator = self._verified_roles[proof.signature]
911
+ # Clean up cache entry
912
+ del self._verified_roles[proof.signature]
913
+ else:
914
+ # Fallback to claimed (for self-proofs or legacy)
915
+ is_driver = proof.has_embedding
916
+ is_validator = proof.has_lm_head
917
+
918
+ is_worker = proof.layers_held > 0 and not (is_driver and is_validator)
919
+
920
+ inference_reward = 0.0
921
+
922
+ if is_driver:
923
+ # Driver gets 15% of the inference pool
924
+ inference_reward += inference_pool * DRIVER_SHARE
925
+
926
+ if is_validator:
927
+ # Validator gets 15% of the inference pool
928
+ inference_reward += inference_pool * VALIDATOR_SHARE
929
+
930
+ if is_worker or proof.layers_held > 0:
931
+ # Workers get rewarded per layer they process
932
+ #
933
+ # The WORKER_SHARE (70%) represents the total computation work.
934
+ # Each layer does roughly equal work, so we reward per layer.
935
+ #
936
+ # Formula: worker_reward = (layers_held / total_layers) * worker_pool
937
+ #
938
+ # But we don't know total_layers in the network at proof time.
939
+ # Instead, we use a PER-LAYER reward rate:
940
+ #
941
+ # WORKER_SHARE_PER_LAYER = WORKER_SHARE / expected_layers
942
+ #
943
+ # For simplicity, we give full worker share if you hold ANY layers,
944
+ # since each node only claims for tokens THEY processed through
945
+ # THEIR layers. The tokens_processed already reflects their work.
946
+ #
947
+ # In multi-node inference:
948
+ # - Driver processes 100K tokens → claims Driver share for 100K
949
+ # - Worker1 processes 100K tokens → claims Worker share for 100K
950
+ # - Worker2 processes 100K tokens → claims Worker share for 100K
951
+ # - Validator processes 100K tokens → claims Validator share for 100K
952
+ #
953
+ # Each node's tokens_processed = tokens they actually computed.
954
+ # So we give full worker share - the tokens_processed is already
955
+ # the accurate measure of their contribution.
956
+
957
+ worker_pool = inference_pool * WORKER_SHARE
958
+ inference_reward += worker_pool # Full share - tokens_processed is already per-node
959
+
960
+ # =====================================================================
961
+ # 3. TRAINING REWARD
962
+ # =====================================================================
963
+ training_reward = proof.training_batches * TRAINING_REWARD_PER_BATCH
964
+
965
+ # =====================================================================
966
+ # 4. DATA REWARD
967
+ # =====================================================================
968
+ data_reward = proof.data_samples * DATA_REWARD_PER_SAMPLE
969
+
970
+ # =====================================================================
971
+ # 5. CALCULATE BASE REWARD
972
+ # =====================================================================
973
+ base_reward = uptime_reward + inference_reward + training_reward + data_reward
974
+
975
+ # =====================================================================
976
+ # 6. STAKING MULTIPLIER (with diminishing returns)
977
+ # =====================================================================
978
+ # SECURITY: For LOCAL proofs, use our verified stake
979
+ # For REMOTE proofs, we use their claimed stake but cap the multiplier
980
+ stake = self._get_stake(proof.node_id)
981
+
982
+ # If this is a REMOTE proof (not from us), cap the stake multiplier
983
+ # to prevent fake stake claims from inflating rewards
984
+ is_local_proof = (proof.node_id == self.node_id)
985
+
986
+ if is_local_proof:
987
+ # Our own proof - use full stake multiplier
988
+ stake_multiplier = self._calculate_stake_multiplier(stake)
989
+ else:
990
+ # Remote proof - cap multiplier for security (from economics.py)
991
+ # This limits the impact of fake stake claims
992
+ stake_multiplier = min(REMOTE_STAKE_MULTIPLIER_CAP, self._calculate_stake_multiplier(stake))
993
+
994
+ # =====================================================================
995
+ # 7. ROLE BONUS MULTIPLIER (on uptime reward component)
996
+ # =====================================================================
997
+ role_multiplier = 1.0
998
+
999
+ if is_driver:
1000
+ role_multiplier *= DRIVER_BONUS # See economics.py for rate
1001
+
1002
+ if is_validator:
1003
+ role_multiplier *= VALIDATOR_BONUS # See economics.py for rate
1004
+
1005
+ # Layer bonus for workers
1006
+ if proof.layers_held > 0:
1007
+ layer_bonus = min(MAX_LAYER_BONUS, proof.layers_held * WORKER_LAYER_BONUS)
1008
+ role_multiplier *= (1.0 + layer_bonus)
1009
+
1010
+ # Training bonus
1011
+ if proof.training_batches > 0:
1012
+ role_multiplier *= TRAINING_BONUS
1013
+
1014
+ # =====================================================================
1015
+ # 8. FINAL REWARD
1016
+ # =====================================================================
1017
+ total_reward = base_reward * stake_multiplier * role_multiplier
1018
+
1019
+ return total_reward
1020
+
1021
+ def _get_stake(self, node_id: str) -> float:
1022
+ """Get staked amount for a node."""
1023
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
1024
+ row = conn.execute(
1025
+ "SELECT amount FROM stakes WHERE node_id = ? AND locked_until > ?",
1026
+ (node_id, time.time())
1027
+ ).fetchone()
1028
+ return row[0] if row else 0.0
1029
+
1030
+ def _calculate_stake_multiplier(self, stake: float) -> float:
1031
+ """
1032
+ Calculate stake multiplier using centralized economics function.
1033
+
1034
+ See neuroshard/core/economics.py for formula and examples.
1035
+ """
1036
+ return calculate_stake_multiplier(stake)
1037
+
1038
+ # ========================================================================
1039
+ # VALIDATOR ELIGIBILITY
1040
+ # ========================================================================
1041
+
1042
+ def is_eligible_validator(self, node_id: str = None) -> Tuple[bool, str]:
1043
+ """
1044
+ Check if a node is eligible to be a Validator.
1045
+
1046
+ Requirements:
1047
+ 1. Minimum stake of VALIDATOR_MIN_STAKE (100 NEURO)
1048
+ 2. Memory requirements checked at layer assignment time
1049
+
1050
+ Returns: (eligible, reason)
1051
+ """
1052
+ node_id = node_id or self.node_id
1053
+ stake = self._get_stake(node_id)
1054
+
1055
+ if stake < VALIDATOR_MIN_STAKE:
1056
+ return False, f"Insufficient stake: {stake:.2f} < {VALIDATOR_MIN_STAKE} NEURO required"
1057
+
1058
+ return True, f"Eligible with {stake:.2f} NEURO staked"
1059
+
1060
+ def get_validator_info(self, node_id: str = None) -> dict:
1061
+ """Get validator status and info for a node."""
1062
+ node_id = node_id or self.node_id
1063
+ stake = self._get_stake(node_id)
1064
+ eligible, reason = self.is_eligible_validator(node_id)
1065
+
1066
+ return {
1067
+ "node_id": node_id,
1068
+ "stake": stake,
1069
+ "stake_multiplier": self._calculate_stake_multiplier(stake),
1070
+ "is_eligible_validator": eligible,
1071
+ "eligibility_reason": reason,
1072
+ "min_stake_required": VALIDATOR_MIN_STAKE,
1073
+ "validation_fee_per_proof": VALIDATION_FEE_PER_PROOF,
1074
+ }
1075
+
1076
+ # ========================================================================
1077
+ # DYNAMIC INFERENCE MARKET
1078
+ # ========================================================================
1079
+
1080
+ def register_inference_capacity(
1081
+ self,
1082
+ tokens_per_second: int,
1083
+ min_price: float = 0.0
1084
+ ):
1085
+ """
1086
+ Register this node's available inference capacity with the market.
1087
+
1088
+ Args:
1089
+ tokens_per_second: Processing capacity (tokens/sec)
1090
+ min_price: Minimum NEURO per 1M tokens node will accept
1091
+ """
1092
+ if not USE_DYNAMIC_INFERENCE_PRICING or not self.inference_market:
1093
+ return
1094
+
1095
+ self.inference_market.register_capacity(
1096
+ node_id=self.node_id,
1097
+ tokens_per_second=tokens_per_second,
1098
+ min_price=min_price
1099
+ )
1100
+ logger.debug(f"Registered inference capacity: {tokens_per_second} t/s, min_price={min_price:.4f}")
1101
+
1102
+ def withdraw_inference_capacity(self):
1103
+ """
1104
+ Withdraw this node from the inference market (e.g., to focus on training).
1105
+ """
1106
+ if not USE_DYNAMIC_INFERENCE_PRICING or not self.inference_market:
1107
+ return
1108
+
1109
+ self.inference_market.withdraw_capacity(self.node_id)
1110
+ logger.debug(f"Withdrew from inference market")
1111
+
1112
+ def get_inference_market_stats(self) -> dict:
1113
+ """
1114
+ Get current inference market statistics.
1115
+
1116
+ Returns:
1117
+ Market stats including price, supply, demand, utilization
1118
+ """
1119
+ # ALWAYS use dynamic market pricing (no fallback)
1120
+ stats = self.inference_market.get_market_stats()
1121
+ stats["mode"] = "pure_market"
1122
+ return stats
1123
+
1124
+ def submit_inference_request(
1125
+ self,
1126
+ request_id: str,
1127
+ user_id: str,
1128
+ tokens_requested: int,
1129
+ max_price: float,
1130
+ priority: int = 0
1131
+ ) -> bool:
1132
+ """
1133
+ Submit an inference request to the market.
1134
+
1135
+ Args:
1136
+ request_id: Unique request identifier
1137
+ user_id: User submitting request
1138
+ tokens_requested: Number of tokens to generate
1139
+ max_price: Maximum NEURO per 1M tokens user will pay
1140
+ priority: Request priority (higher = more urgent)
1141
+
1142
+ Returns:
1143
+ True if request accepted, False if price too high
1144
+ """
1145
+
1146
+ return self.inference_market.submit_request(
1147
+ request_id=request_id,
1148
+ user_id=user_id,
1149
+ tokens_requested=tokens_requested,
1150
+ max_price=max_price,
1151
+ priority=priority
1152
+ )
1153
+
1154
+ # ========================================================================
1155
+ # PROOF VALIDATION (Stake-Weighted Consensus)
1156
+ # ========================================================================
1157
+
1158
+ def validate_proof_as_validator(
1159
+ self,
1160
+ proof: PoNWProof,
1161
+ vote: bool,
1162
+ validation_details: str = ""
1163
+ ) -> Tuple[bool, float, str]:
1164
+ """
1165
+ Cast a validation vote on a proof as a Validator.
1166
+
1167
+ Only nodes meeting VALIDATOR_MIN_STAKE can validate.
1168
+ Validators earn VALIDATION_FEE_PER_PROOF for each validation.
1169
+ Bad validators (voting against consensus) can be slashed.
1170
+
1171
+ Args:
1172
+ proof: The PoNW proof to validate
1173
+ vote: True = valid, False = invalid
1174
+ validation_details: Optional details about validation
1175
+
1176
+ Returns: (success, fee_earned, message)
1177
+ """
1178
+ # Check eligibility
1179
+ eligible, reason = self.is_eligible_validator()
1180
+ if not eligible:
1181
+ return False, 0.0, f"Not eligible to validate: {reason}"
1182
+
1183
+ # Record the validation vote
1184
+ validation_id = hashlib.sha256(
1185
+ f"{self.node_id}:{proof.signature}:{time.time()}".encode()
1186
+ ).hexdigest()[:32]
1187
+
1188
+ my_stake = self._get_stake(self.node_id)
1189
+
1190
+ with self.lock:
1191
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
1192
+ # Create validation_votes table if not exists
1193
+ conn.execute("""
1194
+ CREATE TABLE IF NOT EXISTS validation_votes (
1195
+ validation_id TEXT PRIMARY KEY,
1196
+ proof_signature TEXT NOT NULL,
1197
+ validator_id TEXT NOT NULL,
1198
+ validator_stake REAL NOT NULL,
1199
+ vote INTEGER NOT NULL,
1200
+ details TEXT,
1201
+ timestamp REAL NOT NULL,
1202
+ fee_earned REAL DEFAULT 0.0,
1203
+ UNIQUE(proof_signature, validator_id)
1204
+ )
1205
+ """)
1206
+
1207
+ # Check if already voted
1208
+ existing = conn.execute(
1209
+ "SELECT validation_id FROM validation_votes WHERE proof_signature = ? AND validator_id = ?",
1210
+ (proof.signature, self.node_id)
1211
+ ).fetchone()
1212
+
1213
+ if existing:
1214
+ return False, 0.0, "Already voted on this proof"
1215
+
1216
+ # Record vote
1217
+ conn.execute("""
1218
+ INSERT INTO validation_votes
1219
+ (validation_id, proof_signature, validator_id, validator_stake, vote, details, timestamp, fee_earned)
1220
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
1221
+ """, (
1222
+ validation_id,
1223
+ proof.signature,
1224
+ self.node_id,
1225
+ my_stake,
1226
+ 1 if vote else 0,
1227
+ validation_details,
1228
+ time.time(),
1229
+ VALIDATION_FEE_PER_PROOF
1230
+ ))
1231
+
1232
+ # Credit validation fee
1233
+ conn.execute("""
1234
+ UPDATE balances SET
1235
+ balance = balance + ?,
1236
+ total_earned = total_earned + ?
1237
+ WHERE node_id = ?
1238
+ """, (VALIDATION_FEE_PER_PROOF, VALIDATION_FEE_PER_PROOF, self.node_id))
1239
+
1240
+ logger.info(f"Validation vote recorded: {self.node_id[:16]}... voted {'VALID' if vote else 'INVALID'} "
1241
+ f"on proof {proof.signature[:16]}... (stake: {my_stake:.2f})")
1242
+
1243
+ return True, VALIDATION_FEE_PER_PROOF, f"Vote recorded, earned {VALIDATION_FEE_PER_PROOF} NEURO"
1244
+
1245
+ def get_proof_validation_status(self, proof_signature: str) -> dict:
1246
+ """
1247
+ Get the current validation status of a proof.
1248
+
1249
+ Returns stake-weighted vote tallies and consensus status.
1250
+ """
1251
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
1252
+ # Check if validation_votes table exists
1253
+ table_exists = conn.execute(
1254
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='validation_votes'"
1255
+ ).fetchone()
1256
+
1257
+ if not table_exists:
1258
+ return {
1259
+ "proof_signature": proof_signature,
1260
+ "total_votes": 0,
1261
+ "valid_votes": 0,
1262
+ "invalid_votes": 0,
1263
+ "valid_stake": 0.0,
1264
+ "invalid_stake": 0.0,
1265
+ "total_stake": 0.0,
1266
+ "consensus_reached": False,
1267
+ "consensus_result": None,
1268
+ }
1269
+
1270
+ # Get all votes for this proof
1271
+ votes = conn.execute("""
1272
+ SELECT validator_id, validator_stake, vote
1273
+ FROM validation_votes
1274
+ WHERE proof_signature = ?
1275
+ """, (proof_signature,)).fetchall()
1276
+
1277
+ valid_stake = 0.0
1278
+ invalid_stake = 0.0
1279
+ valid_count = 0
1280
+ invalid_count = 0
1281
+
1282
+ for _, stake, vote in votes:
1283
+ if vote:
1284
+ valid_stake += stake
1285
+ valid_count += 1
1286
+ else:
1287
+ invalid_stake += stake
1288
+ invalid_count += 1
1289
+
1290
+ total_stake = valid_stake + invalid_stake
1291
+
1292
+ # Check consensus
1293
+ consensus_reached = False
1294
+ consensus_result = None
1295
+
1296
+ if total_stake > 0:
1297
+ valid_ratio = valid_stake / total_stake
1298
+ if valid_ratio >= VALIDATION_CONSENSUS_THRESHOLD:
1299
+ consensus_reached = True
1300
+ consensus_result = True # Valid
1301
+ elif (1 - valid_ratio) >= VALIDATION_CONSENSUS_THRESHOLD:
1302
+ consensus_reached = True
1303
+ consensus_result = False # Invalid
1304
+
1305
+ return {
1306
+ "proof_signature": proof_signature,
1307
+ "total_votes": len(votes),
1308
+ "valid_votes": valid_count,
1309
+ "invalid_votes": invalid_count,
1310
+ "valid_stake": valid_stake,
1311
+ "invalid_stake": invalid_stake,
1312
+ "total_stake": total_stake,
1313
+ "valid_ratio": valid_stake / total_stake if total_stake > 0 else 0,
1314
+ "consensus_reached": consensus_reached,
1315
+ "consensus_result": consensus_result,
1316
+ "threshold": VALIDATION_CONSENSUS_THRESHOLD,
1317
+ }
1318
+
1319
+ def select_validators_for_proof(self, proof: PoNWProof, num_validators: int = 3) -> List[str]:
1320
+ """
1321
+ Select validators for a proof using stake-weighted random selection.
1322
+
1323
+ Selection algorithm:
1324
+ 1. Get all eligible validators (stake >= VALIDATOR_MIN_STAKE)
1325
+ 2. Weight by stake with randomness factor
1326
+ 3. Select top N by weighted score
1327
+
1328
+ This ensures:
1329
+ - Higher stake = higher chance of selection
1330
+ - But randomness prevents monopoly
1331
+ - Small stakers still get opportunities
1332
+ """
1333
+ import random
1334
+
1335
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
1336
+ # Get all nodes with sufficient stake
1337
+ eligible = conn.execute("""
1338
+ SELECT node_id, amount
1339
+ FROM stakes
1340
+ WHERE amount >= ? AND locked_until > ?
1341
+ """, (VALIDATOR_MIN_STAKE, time.time())).fetchall()
1342
+
1343
+ if not eligible:
1344
+ return []
1345
+
1346
+ # Exclude the proof submitter
1347
+ eligible = [(nid, stake) for nid, stake in eligible if nid != proof.node_id]
1348
+
1349
+ if not eligible:
1350
+ return []
1351
+
1352
+ # Calculate selection scores (stake-weighted with randomness)
1353
+ scores = []
1354
+ for node_id, stake in eligible:
1355
+ # Score = stake * (1 - randomness) + random * randomness * max_stake
1356
+ max_stake = max(s for _, s in eligible)
1357
+ stake_component = stake * (1 - VALIDATOR_SELECTION_RANDOMNESS)
1358
+ random_component = random.random() * VALIDATOR_SELECTION_RANDOMNESS * max_stake
1359
+ score = stake_component + random_component
1360
+ scores.append((node_id, score, stake))
1361
+
1362
+ # Sort by score and select top N
1363
+ scores.sort(key=lambda x: x[1], reverse=True)
1364
+ selected = [node_id for node_id, _, _ in scores[:num_validators]]
1365
+
1366
+ logger.debug(f"Selected {len(selected)} validators for proof {proof.signature[:16]}...")
1367
+
1368
+ return selected
1369
+
1370
+ # ========================================================================
1371
+ # TRANSACTIONS & FEE BURN
1372
+ # ========================================================================
1373
+
1374
+ def transfer(
1375
+ self,
1376
+ to_id: str,
1377
+ amount: float,
1378
+ memo: str = ""
1379
+ ) -> Tuple[bool, str, Optional[Transaction]]:
1380
+ """
1381
+ Transfer NEURO to another node with 5% fee burn.
1382
+
1383
+ Fee Structure:
1384
+ - 5% of amount is burned (deflationary)
1385
+ - Recipient receives full amount
1386
+ - Sender pays amount + fee
1387
+
1388
+ Returns: (success, message, transaction)
1389
+ """
1390
+ if amount <= 0:
1391
+ return False, "Amount must be positive", None
1392
+
1393
+ # Calculate fee and burn
1394
+ fee = amount * FEE_BURN_RATE
1395
+ burn_amount = fee # 100% of fee is burned
1396
+ total_deduction = amount + fee
1397
+
1398
+ # Generate transaction ID
1399
+ tx_id = hashlib.sha256(
1400
+ f"{self.node_id}:{to_id}:{amount}:{time.time()}:{os.urandom(8).hex()}".encode()
1401
+ ).hexdigest()
1402
+
1403
+ tx = Transaction(
1404
+ tx_id=tx_id,
1405
+ from_id=self.node_id,
1406
+ to_id=to_id,
1407
+ amount=amount,
1408
+ fee=fee,
1409
+ burn_amount=burn_amount,
1410
+ timestamp=time.time(),
1411
+ signature="",
1412
+ memo=memo
1413
+ )
1414
+
1415
+ # Sign transaction
1416
+ tx.signature = self._sign(tx.canonical_payload())
1417
+
1418
+ with self.lock:
1419
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
1420
+ # Check sender balance
1421
+ row = conn.execute(
1422
+ "SELECT balance FROM balances WHERE node_id = ?",
1423
+ (self.node_id,)
1424
+ ).fetchone()
1425
+
1426
+ current_balance = row[0] if row else 0.0
1427
+
1428
+ if current_balance < total_deduction:
1429
+ return False, f"Insufficient balance ({current_balance:.6f} < {total_deduction:.6f})", None
1430
+
1431
+ # Deduct from sender (amount + fee)
1432
+ conn.execute("""
1433
+ UPDATE balances SET
1434
+ balance = balance - ?,
1435
+ total_spent = total_spent + ?
1436
+ WHERE node_id = ?
1437
+ """, (total_deduction, total_deduction, self.node_id))
1438
+
1439
+ # Credit to recipient
1440
+ conn.execute("""
1441
+ INSERT INTO balances (node_id, balance, total_earned, created_at)
1442
+ VALUES (?, ?, ?, ?)
1443
+ ON CONFLICT(node_id) DO UPDATE SET
1444
+ balance = balance + ?,
1445
+ total_earned = total_earned + ?
1446
+ """, (to_id, amount, amount, time.time(), amount, amount))
1447
+
1448
+ # Record burn (to special burn address)
1449
+ conn.execute("""
1450
+ INSERT INTO balances (node_id, balance, total_earned, created_at)
1451
+ VALUES (?, ?, ?, ?)
1452
+ ON CONFLICT(node_id) DO UPDATE SET
1453
+ balance = balance + ?,
1454
+ total_earned = total_earned + ?
1455
+ """, (BURN_ADDRESS, burn_amount, burn_amount, time.time(), burn_amount, burn_amount))
1456
+
1457
+ # Record transaction
1458
+ conn.execute("""
1459
+ INSERT INTO transactions
1460
+ (tx_id, from_id, to_id, amount, fee, burn_amount, timestamp, memo, signature)
1461
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
1462
+ """, (
1463
+ tx.tx_id, tx.from_id, tx.to_id, tx.amount, tx.fee,
1464
+ tx.burn_amount, tx.timestamp, tx.memo, tx.signature
1465
+ ))
1466
+
1467
+ # Update global stats
1468
+ conn.execute("""
1469
+ UPDATE global_stats SET
1470
+ total_burned = total_burned + ?,
1471
+ total_transferred = total_transferred + ?,
1472
+ total_transactions = total_transactions + 1,
1473
+ updated_at = ?
1474
+ WHERE id = 1
1475
+ """, (burn_amount, amount, time.time()))
1476
+
1477
+ logger.info(f"Transfer: {self.node_id[:12]}... → {to_id[:12]}... "
1478
+ f"amount={amount:.6f} fee={fee:.6f} burned={burn_amount:.6f}")
1479
+
1480
+ return True, "Transfer complete", tx
1481
+
1482
+ def spend_for_inference(self, tokens_requested: int) -> Tuple[bool, float, str]:
1483
+ """
1484
+ Spend NEURO for inference (with 5% burn).
1485
+
1486
+ Cost: 1 NEURO per 1M tokens (from whitepaper)
1487
+ Fee: 5% burned
1488
+
1489
+ Returns: (success, cost, message)
1490
+ """
1491
+ # Calculate cost
1492
+ cost = (tokens_requested / 1_000_000.0) * 1.0
1493
+ cost = max(0.0001, cost) # Minimum cost
1494
+
1495
+ fee = cost * FEE_BURN_RATE
1496
+ total_cost = cost + fee
1497
+
1498
+ with self.lock:
1499
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
1500
+ # Check balance
1501
+ row = conn.execute(
1502
+ "SELECT balance FROM balances WHERE node_id = ?",
1503
+ (self.node_id,)
1504
+ ).fetchone()
1505
+
1506
+ current_balance = row[0] if row else 0.0
1507
+
1508
+ if current_balance < total_cost:
1509
+ return False, total_cost, f"Insufficient NEURO ({current_balance:.6f} < {total_cost:.6f})"
1510
+
1511
+ # Deduct cost
1512
+ conn.execute("""
1513
+ UPDATE balances SET
1514
+ balance = balance - ?,
1515
+ total_spent = total_spent + ?
1516
+ WHERE node_id = ?
1517
+ """, (total_cost, total_cost, self.node_id))
1518
+
1519
+ # Burn the fee
1520
+ conn.execute("""
1521
+ INSERT INTO balances (node_id, balance, total_earned, created_at)
1522
+ VALUES (?, ?, ?, ?)
1523
+ ON CONFLICT(node_id) DO UPDATE SET
1524
+ balance = balance + ?
1525
+ """, (BURN_ADDRESS, fee, fee, time.time(), fee))
1526
+
1527
+ # Update global stats
1528
+ conn.execute("""
1529
+ UPDATE global_stats SET
1530
+ total_burned = total_burned + ?,
1531
+ updated_at = ?
1532
+ WHERE id = 1
1533
+ """, (fee, time.time()))
1534
+
1535
+ logger.info(f"Inference spend: {self.node_id[:12]}... cost={cost:.6f} fee={fee:.6f} burned")
1536
+
1537
+ return True, total_cost, "Inference authorized"
1538
+
1539
+ # ========================================================================
1540
+ # STAKING
1541
+ # ========================================================================
1542
+
1543
+ def stake(self, amount: float, duration_days: int = 30) -> Tuple[bool, str]:
1544
+ """
1545
+ Stake NEURO for reward multiplier.
1546
+
1547
+ Staking provides:
1548
+ - 10% bonus per 1000 NEURO staked
1549
+ - Locked for specified duration
1550
+ """
1551
+ if amount <= 0:
1552
+ return False, "Amount must be positive"
1553
+
1554
+ lock_until = time.time() + (duration_days * 24 * 3600)
1555
+
1556
+ with self.lock:
1557
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
1558
+ # Check balance
1559
+ row = conn.execute(
1560
+ "SELECT balance FROM balances WHERE node_id = ?",
1561
+ (self.node_id,)
1562
+ ).fetchone()
1563
+
1564
+ current_balance = row[0] if row else 0.0
1565
+
1566
+ if current_balance < amount:
1567
+ return False, f"Insufficient balance ({current_balance:.6f} < {amount:.6f})"
1568
+
1569
+ # Get current stake
1570
+ row = conn.execute(
1571
+ "SELECT amount FROM stakes WHERE node_id = ?",
1572
+ (self.node_id,)
1573
+ ).fetchone()
1574
+ current_stake = row[0] if row else 0.0
1575
+
1576
+ # Deduct from balance
1577
+ conn.execute("""
1578
+ UPDATE balances SET balance = balance - ? WHERE node_id = ?
1579
+ """, (amount, self.node_id))
1580
+
1581
+ # Add to stake
1582
+ conn.execute("""
1583
+ INSERT INTO stakes (node_id, amount, locked_until, updated_at)
1584
+ VALUES (?, ?, ?, ?)
1585
+ ON CONFLICT(node_id) DO UPDATE SET
1586
+ amount = amount + ?,
1587
+ locked_until = MAX(locked_until, ?),
1588
+ updated_at = ?
1589
+ """, (
1590
+ self.node_id, amount, lock_until, time.time(),
1591
+ amount, lock_until, time.time()
1592
+ ))
1593
+
1594
+ new_stake = current_stake + amount
1595
+ multiplier = calculate_stake_multiplier(new_stake)
1596
+
1597
+ logger.info(f"Staked: {self.node_id[:12]}... amount={amount:.6f} "
1598
+ f"total_stake={new_stake:.6f} multiplier={multiplier:.2f}x")
1599
+
1600
+ return True, f"Staked {amount:.6f} NEURO (new multiplier: {multiplier:.2f}x)"
1601
+
1602
+ def unstake(self) -> Tuple[bool, float, str]:
1603
+ """
1604
+ Unstake NEURO (if lock period expired).
1605
+
1606
+ Returns: (success, amount_unstaked, message)
1607
+ """
1608
+ with self.lock:
1609
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
1610
+ row = conn.execute(
1611
+ "SELECT amount, locked_until FROM stakes WHERE node_id = ?",
1612
+ (self.node_id,)
1613
+ ).fetchone()
1614
+
1615
+ if not row or row[0] == 0:
1616
+ return False, 0.0, "No stake found"
1617
+
1618
+ amount, locked_until = row
1619
+
1620
+ if time.time() < locked_until:
1621
+ remaining = (locked_until - time.time()) / 3600
1622
+ return False, 0.0, f"Stake locked for {remaining:.1f} more hours"
1623
+
1624
+ # Return stake to balance
1625
+ conn.execute("""
1626
+ UPDATE balances SET balance = balance + ? WHERE node_id = ?
1627
+ """, (amount, self.node_id))
1628
+
1629
+ # Clear stake
1630
+ conn.execute("""
1631
+ UPDATE stakes SET amount = 0, updated_at = ? WHERE node_id = ?
1632
+ """, (time.time(), self.node_id))
1633
+
1634
+ logger.info(f"Unstaked: {self.node_id[:12]}... amount={amount:.6f}")
1635
+
1636
+ return True, amount, f"Unstaked {amount:.6f} NEURO"
1637
+
1638
+ def update_stake(self, node_id: str, amount: float, locked_until: float = None) -> bool:
1639
+ """
1640
+ Update stake record for a REMOTE node (from P2P gossip).
1641
+
1642
+ SECURITY MODEL:
1643
+ ===============
1644
+ This does NOT directly affect reward calculations for the remote node.
1645
+ It only maintains a local VIEW of what other nodes claim to have staked.
1646
+
1647
+ The actual reward multiplier is calculated based on:
1648
+ 1. For LOCAL node: Our own stake (from stake() method, which requires balance)
1649
+ 2. For REMOTE proofs: We can verify their claimed multiplier is reasonable
1650
+
1651
+ A malicious node can claim any stake, but:
1652
+ - They cannot earn MORE than the base reward without actual work
1653
+ - Validators cross-check stake claims during proof validation
1654
+ - Consensus rejects proofs with implausible multipliers
1655
+
1656
+ This gossip sync is primarily for:
1657
+ - Validator selection (who can validate proofs)
1658
+ - Network visibility (dashboard displays)
1659
+
1660
+ Returns True if the update was applied.
1661
+ """
1662
+ if locked_until is None:
1663
+ locked_until = time.time() + 86400 * 30 # Default 30 day lock
1664
+
1665
+ # Validate stake amount using centralized economics
1666
+ is_valid, error_msg = is_valid_stake_amount(amount)
1667
+ if not is_valid:
1668
+ logger.warning(f"Rejected stake claim from {node_id[:16]}...: {error_msg}")
1669
+ return False
1670
+
1671
+ try:
1672
+ with self.lock:
1673
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
1674
+ # Mark this as a REMOTE stake (not locally verified)
1675
+ conn.execute("""
1676
+ INSERT INTO stakes (node_id, amount, locked_until, updated_at)
1677
+ VALUES (?, ?, ?, ?)
1678
+ ON CONFLICT(node_id) DO UPDATE SET
1679
+ amount = ?,
1680
+ locked_until = ?,
1681
+ updated_at = ?
1682
+ """, (node_id, amount, locked_until, time.time(), amount, locked_until, time.time()))
1683
+ return True
1684
+ except Exception as e:
1685
+ logger.error(f"Failed to update stake for {node_id[:16]}...: {e}")
1686
+ return False
1687
+
1688
+ def get_local_stake(self, node_id: str) -> float:
1689
+ """Get stake for a specific node."""
1690
+ return self._get_stake(node_id)
1691
+
1692
+ def create_transaction(self, from_id: str, to_id: str, amount: float, signature: str) -> bool:
1693
+ """
1694
+ Create a transaction from gossip (external source).
1695
+
1696
+ Note: This only processes transactions where we are the sender,
1697
+ as we can't spend other nodes' balances.
1698
+ """
1699
+ # Only allow if we're the sender
1700
+ if from_id != self.node_id:
1701
+ logger.debug(f"Cannot create transaction for another node: {from_id[:12]}...")
1702
+ return False
1703
+
1704
+ success, _, _ = self.transfer(to_id, amount)
1705
+ return success
1706
+
1707
+ # ========================================================================
1708
+ # SLASHING (Fraud Prevention)
1709
+ # ========================================================================
1710
+
1711
+ def report_fraud(
1712
+ self,
1713
+ accused_id: str,
1714
+ reason: str,
1715
+ proof_signature: Optional[str] = None,
1716
+ evidence: Optional[str] = None
1717
+ ) -> Tuple[bool, str]:
1718
+ """
1719
+ Report suspected fraud for slashing.
1720
+
1721
+ If verified, the accused node loses SLASH_AMOUNT NEURO:
1722
+ - 50% goes to reporter (whistleblower reward)
1723
+ - 50% is burned
1724
+ """
1725
+ report_id = hashlib.sha256(
1726
+ f"{self.node_id}:{accused_id}:{time.time()}:{os.urandom(8).hex()}".encode()
1727
+ ).hexdigest()
1728
+
1729
+ with self.lock:
1730
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
1731
+ conn.execute("""
1732
+ INSERT INTO fraud_reports
1733
+ (report_id, reporter_id, accused_id, proof_signature, reason, evidence, created_at)
1734
+ VALUES (?, ?, ?, ?, ?, ?, ?)
1735
+ """, (
1736
+ report_id, self.node_id, accused_id, proof_signature,
1737
+ reason, evidence, time.time()
1738
+ ))
1739
+
1740
+ logger.warning(f"Fraud report: {self.node_id[:12]}... reported {accused_id[:12]}... "
1741
+ f"reason={reason}")
1742
+
1743
+ return True, f"Fraud report submitted (ID: {report_id[:16]}...)"
1744
+
1745
+ def execute_slash(self, accused_id: str, reporter_id: str) -> Tuple[bool, str]:
1746
+ """
1747
+ Execute slashing after fraud verification.
1748
+
1749
+ Called by consensus mechanism after fraud is confirmed.
1750
+ """
1751
+ with self.lock:
1752
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
1753
+ # Get accused balance
1754
+ row = conn.execute(
1755
+ "SELECT balance FROM balances WHERE node_id = ?",
1756
+ (accused_id,)
1757
+ ).fetchone()
1758
+
1759
+ current_balance = row[0] if row else 0.0
1760
+ slash_amount = min(SLASH_AMOUNT, current_balance)
1761
+
1762
+ if slash_amount <= 0:
1763
+ return False, "No balance to slash"
1764
+
1765
+ whistleblower_reward = slash_amount * WHISTLEBLOWER_REWARD_RATE
1766
+ burn_amount = slash_amount - whistleblower_reward
1767
+
1768
+ # Deduct from accused
1769
+ conn.execute("""
1770
+ UPDATE balances SET balance = balance - ? WHERE node_id = ?
1771
+ """, (slash_amount, accused_id))
1772
+
1773
+ # Reward whistleblower
1774
+ conn.execute("""
1775
+ INSERT INTO balances (node_id, balance, total_earned, created_at)
1776
+ VALUES (?, ?, ?, ?)
1777
+ ON CONFLICT(node_id) DO UPDATE SET
1778
+ balance = balance + ?,
1779
+ total_earned = total_earned + ?
1780
+ """, (reporter_id, whistleblower_reward, whistleblower_reward, time.time(),
1781
+ whistleblower_reward, whistleblower_reward))
1782
+
1783
+ # Burn remainder
1784
+ conn.execute("""
1785
+ INSERT INTO balances (node_id, balance, total_earned, created_at)
1786
+ VALUES (?, ?, ?, ?)
1787
+ ON CONFLICT(node_id) DO UPDATE SET
1788
+ balance = balance + ?
1789
+ """, (BURN_ADDRESS, burn_amount, burn_amount, time.time(), burn_amount))
1790
+
1791
+ # Update global stats
1792
+ conn.execute("""
1793
+ UPDATE global_stats SET
1794
+ total_burned = total_burned + ?,
1795
+ updated_at = ?
1796
+ WHERE id = 1
1797
+ """, (burn_amount, time.time()))
1798
+
1799
+ logger.warning(f"Slashed: {accused_id[:12]}... lost {slash_amount:.6f} NEURO "
1800
+ f"(whistleblower={whistleblower_reward:.6f}, burned={burn_amount:.6f})")
1801
+
1802
+ return True, f"Slashed {slash_amount:.6f} NEURO"
1803
+
1804
+ def slash_bad_validator(self, validator_id: str, proof_signature: str, reason: str) -> Tuple[bool, str]:
1805
+ """
1806
+ Slash a validator who voted against consensus.
1807
+
1808
+ Validators are held to a higher standard - they are slashed 2x the normal amount
1809
+ for voting incorrectly (VALIDATOR_SLASH_MULTIPLIER).
1810
+
1811
+ This is called when consensus is reached and a validator's vote differs.
1812
+ """
1813
+ slash_amount = SLASH_AMOUNT * VALIDATOR_SLASH_MULTIPLIER
1814
+
1815
+ with self.lock:
1816
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
1817
+ # Get validator's stake (they lose from stake first)
1818
+ stake_row = conn.execute(
1819
+ "SELECT amount FROM stakes WHERE node_id = ?",
1820
+ (validator_id,)
1821
+ ).fetchone()
1822
+
1823
+ stake = stake_row[0] if stake_row else 0.0
1824
+
1825
+ # Get balance
1826
+ balance_row = conn.execute(
1827
+ "SELECT balance FROM balances WHERE node_id = ?",
1828
+ (validator_id,)
1829
+ ).fetchone()
1830
+
1831
+ balance = balance_row[0] if balance_row else 0.0
1832
+
1833
+ total_available = stake + balance
1834
+ actual_slash = min(slash_amount, total_available)
1835
+
1836
+ if actual_slash <= 0:
1837
+ return False, "No funds to slash"
1838
+
1839
+ # Deduct from stake first, then balance
1840
+ stake_deduction = min(actual_slash, stake)
1841
+ balance_deduction = actual_slash - stake_deduction
1842
+
1843
+ if stake_deduction > 0:
1844
+ conn.execute("""
1845
+ UPDATE stakes SET amount = amount - ? WHERE node_id = ?
1846
+ """, (stake_deduction, validator_id))
1847
+
1848
+ if balance_deduction > 0:
1849
+ conn.execute("""
1850
+ UPDATE balances SET balance = balance - ? WHERE node_id = ?
1851
+ """, (balance_deduction, validator_id))
1852
+
1853
+ # Burn the slashed amount
1854
+ conn.execute("""
1855
+ INSERT INTO balances (node_id, balance, total_earned, created_at)
1856
+ VALUES (?, ?, ?, ?)
1857
+ ON CONFLICT(node_id) DO UPDATE SET
1858
+ balance = balance + ?
1859
+ """, (BURN_ADDRESS, actual_slash, actual_slash, time.time(), actual_slash))
1860
+
1861
+ # Update global stats
1862
+ conn.execute("""
1863
+ UPDATE global_stats SET
1864
+ total_burned = total_burned + ?,
1865
+ updated_at = ?
1866
+ WHERE id = 1
1867
+ """, (actual_slash, time.time()))
1868
+
1869
+ # Record the slash
1870
+ conn.execute("""
1871
+ INSERT INTO fraud_reports
1872
+ (report_id, reporter_id, accused_id, proof_signature, reason, evidence, created_at, status)
1873
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
1874
+ """, (
1875
+ hashlib.sha256(f"validator_slash:{validator_id}:{proof_signature}:{time.time()}".encode()).hexdigest()[:32],
1876
+ "CONSENSUS", # Reporter is the consensus mechanism
1877
+ validator_id,
1878
+ proof_signature,
1879
+ reason,
1880
+ f"Slashed {actual_slash:.6f} NEURO (stake: {stake_deduction:.6f}, balance: {balance_deduction:.6f})",
1881
+ time.time(),
1882
+ "executed"
1883
+ ))
1884
+
1885
+ logger.warning(f"Validator slashed: {validator_id[:12]}... lost {actual_slash:.6f} NEURO "
1886
+ f"for bad validation on {proof_signature[:16]}... Reason: {reason}")
1887
+
1888
+ return True, f"Validator slashed {actual_slash:.6f} NEURO"
1889
+
1890
+ def check_and_slash_bad_validators(self, proof_signature: str) -> List[str]:
1891
+ """
1892
+ Check if consensus was reached and slash validators who voted wrong.
1893
+
1894
+ Called after a proof reaches consensus.
1895
+ Returns list of slashed validator IDs.
1896
+ """
1897
+ status = self.get_proof_validation_status(proof_signature)
1898
+
1899
+ if not status["consensus_reached"]:
1900
+ return []
1901
+
1902
+ consensus_result = status["consensus_result"]
1903
+ slashed = []
1904
+
1905
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
1906
+ # Get all votes for this proof
1907
+ votes = conn.execute("""
1908
+ SELECT validator_id, vote
1909
+ FROM validation_votes
1910
+ WHERE proof_signature = ?
1911
+ """, (proof_signature,)).fetchall()
1912
+
1913
+ for validator_id, vote in votes:
1914
+ vote_bool = bool(vote)
1915
+ if vote_bool != consensus_result:
1916
+ # This validator voted against consensus
1917
+ success, msg = self.slash_bad_validator(
1918
+ validator_id=validator_id,
1919
+ proof_signature=proof_signature,
1920
+ reason=f"Voted {'VALID' if vote_bool else 'INVALID'} but consensus was {'VALID' if consensus_result else 'INVALID'}"
1921
+ )
1922
+ if success:
1923
+ slashed.append(validator_id)
1924
+
1925
+ if slashed:
1926
+ logger.info(f"Slashed {len(slashed)} validators for proof {proof_signature[:16]}...")
1927
+
1928
+ return slashed
1929
+
1930
+ # ========================================================================
1931
+ # QUERIES
1932
+ # ========================================================================
1933
+
1934
+ def get_balance(self, node_id: Optional[str] = None) -> float:
1935
+ """Get balance for a node."""
1936
+ node_id = node_id or self.node_id
1937
+
1938
+ with self.lock:
1939
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
1940
+ row = conn.execute(
1941
+ "SELECT balance FROM balances WHERE node_id = ?",
1942
+ (node_id,)
1943
+ ).fetchone()
1944
+ return row[0] if row else 0.0
1945
+
1946
+ def get_balance_details(self, node_id: Optional[str] = None) -> Dict:
1947
+ """
1948
+ Get detailed balance information including confirmation status.
1949
+
1950
+ IMPORTANT: Understanding Local vs Network Balance
1951
+ ================================================
1952
+
1953
+ LOCAL BALANCE (what this node's ledger shows):
1954
+ - All proofs YOU generated that YOU witnessed
1955
+ - Includes solo-earned NEURO
1956
+
1957
+ NETWORK BALANCE (what other nodes see):
1958
+ - Only proofs gossiped within PROOF_FRESHNESS_WINDOW (5 min)
1959
+ - If you ran solo, network didn't witness your work
1960
+
1961
+ WHY THE DIFFERENCE?
1962
+ - Security: Prevents fabricating work while alone
1963
+ - Like Bitcoin: unmined blocks don't count until network sees them
1964
+ - Solo NEURO is "unconfirmed" - needs witnesses to be network-confirmed
1965
+
1966
+ HOW TO CONFIRM:
1967
+ - Keep running with peers online
1968
+ - All new proofs will be gossiped and confirmed
1969
+ - Historical solo-earned NEURO stays LOCAL only
1970
+ """
1971
+ node_id = node_id or self.node_id
1972
+
1973
+ with self.lock:
1974
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
1975
+ # Get balance info
1976
+ row = conn.execute("""
1977
+ SELECT balance, total_earned, proof_count
1978
+ FROM balances WHERE node_id = ?
1979
+ """, (node_id,)).fetchone()
1980
+
1981
+ local_balance = row[0] if row else 0.0
1982
+ total_earned = row[1] if row else 0.0
1983
+ proof_count = row[2] if row else 0
1984
+
1985
+ # Get count of proofs that could have been gossiped
1986
+ # (proofs where we had peers at the time)
1987
+ # This is approximate - we track via proof timestamps
1988
+ fresh_proofs = conn.execute("""
1989
+ SELECT COUNT(*) FROM proof_history
1990
+ WHERE node_id = ?
1991
+ """, (node_id,)).fetchone()[0]
1992
+
1993
+ return {
1994
+ "node_id": node_id,
1995
+ "local_balance": local_balance,
1996
+ "total_earned": total_earned,
1997
+ "proof_count": proof_count,
1998
+ "note": (
1999
+ "Local balance includes all proofs you generated. "
2000
+ "Network balance only counts proofs witnessed by peers within 5 min. "
2001
+ "If you ran solo, that NEURO is LOCAL only."
2002
+ )
2003
+ }
2004
+
2005
+ def get_account_info(self, node_id: Optional[str] = None) -> Dict:
2006
+ """Get full account information."""
2007
+ node_id = node_id or self.node_id
2008
+
2009
+ with self.lock:
2010
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
2011
+ # Balance info
2012
+ row = conn.execute("""
2013
+ SELECT balance, total_earned, total_spent, last_proof_time, proof_count, created_at
2014
+ FROM balances WHERE node_id = ?
2015
+ """, (node_id,)).fetchone()
2016
+
2017
+ if not row:
2018
+ return {
2019
+ "node_id": node_id,
2020
+ "balance": 0.0,
2021
+ "total_earned": 0.0,
2022
+ "total_spent": 0.0,
2023
+ "stake": 0.0,
2024
+ "stake_multiplier": 1.0,
2025
+ "proof_count": 0
2026
+ }
2027
+
2028
+ balance, total_earned, total_spent, last_proof_time, proof_count, created_at = row
2029
+
2030
+ # Stake info
2031
+ stake_row = conn.execute(
2032
+ "SELECT amount, locked_until FROM stakes WHERE node_id = ?",
2033
+ (node_id,)
2034
+ ).fetchone()
2035
+
2036
+ stake = stake_row[0] if stake_row else 0.0
2037
+ stake_locked_until = stake_row[1] if stake_row else 0.0
2038
+ stake_multiplier = calculate_stake_multiplier(stake)
2039
+
2040
+ return {
2041
+ "node_id": node_id,
2042
+ "balance": balance,
2043
+ "total_earned": total_earned,
2044
+ "total_spent": total_spent,
2045
+ "stake": stake,
2046
+ "stake_locked_until": stake_locked_until,
2047
+ "stake_multiplier": stake_multiplier,
2048
+ "proof_count": proof_count,
2049
+ "last_proof_time": last_proof_time,
2050
+ "created_at": created_at
2051
+ }
2052
+
2053
+ def get_global_stats(self) -> LedgerStats:
2054
+ """Get global ledger statistics."""
2055
+ with self.lock:
2056
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
2057
+ row = conn.execute("""
2058
+ SELECT total_minted, total_burned, total_transferred, total_proofs, total_transactions
2059
+ FROM global_stats WHERE id = 1
2060
+ """).fetchone()
2061
+
2062
+ if not row:
2063
+ return LedgerStats()
2064
+
2065
+ total_minted, total_burned, total_transferred, total_proofs, total_transactions = row
2066
+
2067
+ return LedgerStats(
2068
+ total_minted=total_minted,
2069
+ total_burned=total_burned,
2070
+ total_transferred=total_transferred,
2071
+ circulating_supply=total_minted - total_burned,
2072
+ total_proofs_processed=total_proofs,
2073
+ total_transactions=total_transactions
2074
+ )
2075
+
2076
+ def get_burn_stats(self) -> Dict:
2077
+ """Get burn statistics."""
2078
+ with self.lock:
2079
+ with sqlite3.connect(self.db_path, timeout=60.0) as conn:
2080
+ # Total burned
2081
+ row = conn.execute(
2082
+ "SELECT balance FROM balances WHERE node_id = ?",
2083
+ (BURN_ADDRESS,)
2084
+ ).fetchone()
2085
+ total_burned = row[0] if row else 0.0
2086
+
2087
+ # Global stats
2088
+ stats_row = conn.execute(
2089
+ "SELECT total_minted, total_burned FROM global_stats WHERE id = 1"
2090
+ ).fetchone()
2091
+
2092
+ total_minted = stats_row[0] if stats_row else 0.0
2093
+
2094
+ return {
2095
+ "total_burned": total_burned,
2096
+ "total_minted": total_minted,
2097
+ "burn_rate": FEE_BURN_RATE,
2098
+ "circulating_supply": total_minted - total_burned,
2099
+ "burn_percentage": (total_burned / total_minted * 100) if total_minted > 0 else 0.0
2100
+ }
2101
+
2102
+
2103
+ # ============================================================================
2104
+ # BACKWARD COMPATIBILITY ALIASES
2105
+ # ============================================================================
2106
+
2107
+ # LedgerManager is now just an alias for NEUROLedger
2108
+ LedgerManager = NEUROLedger
2109
+
2110
+ # Legacy ProofOfWork class for any remaining references
2111
+ ProofOfWork = PoNWProof