clawrtc 1.5.0 → 1.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/clawrtc/data/fingerprint_checks.py +557 -450
- package/clawrtc.egg-info/PKG-INFO +1 -1
- package/dist/{clawrtc-1.5.0-py3-none-any.whl → clawrtc-1.6.0-py3-none-any.whl} +0 -0
- package/dist/clawrtc-1.6.0.tar.gz +0 -0
- package/package.json +2 -2
- package/pyproject.toml +1 -1
- package/dist/clawrtc-1.5.0.tar.gz +0 -0
|
@@ -1,450 +1,557 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
"""
|
|
3
|
-
RIP-PoA Hardware Fingerprint Validation
|
|
4
|
-
========================================
|
|
5
|
-
7 Required Checks for RTC Reward Approval
|
|
6
|
-
ALL MUST PASS for antiquity multiplier rewards
|
|
7
|
-
|
|
8
|
-
Checks:
|
|
9
|
-
1. Clock-Skew & Oscillator Drift
|
|
10
|
-
2. Cache Timing Fingerprint
|
|
11
|
-
3. SIMD Unit Identity
|
|
12
|
-
4. Thermal Drift Entropy
|
|
13
|
-
5. Instruction Path Jitter
|
|
14
|
-
6. Anti-Emulation Behavioral Checks
|
|
15
|
-
7. ROM Fingerprint (retro platforms only)
|
|
16
|
-
"""
|
|
17
|
-
|
|
18
|
-
import hashlib
|
|
19
|
-
import os
|
|
20
|
-
import platform
|
|
21
|
-
import statistics
|
|
22
|
-
import subprocess
|
|
23
|
-
import time
|
|
24
|
-
from typing import Dict, List, Optional, Tuple
|
|
25
|
-
|
|
26
|
-
# Import ROM fingerprint database if available
|
|
27
|
-
try:
|
|
28
|
-
from rom_fingerprint_db import (
|
|
29
|
-
identify_rom,
|
|
30
|
-
is_known_emulator_rom,
|
|
31
|
-
compute_file_hash,
|
|
32
|
-
detect_platform_roms,
|
|
33
|
-
get_real_hardware_rom_signature,
|
|
34
|
-
)
|
|
35
|
-
ROM_DB_AVAILABLE = True
|
|
36
|
-
except ImportError:
|
|
37
|
-
ROM_DB_AVAILABLE = False
|
|
38
|
-
|
|
39
|
-
def check_clock_drift(samples: int = 200) -> Tuple[bool, Dict]:
|
|
40
|
-
"""Check 1: Clock-Skew & Oscillator Drift"""
|
|
41
|
-
intervals = []
|
|
42
|
-
reference_ops = 5000
|
|
43
|
-
|
|
44
|
-
for i in range(samples):
|
|
45
|
-
data = "drift_{}".format(i).encode()
|
|
46
|
-
start = time.perf_counter_ns()
|
|
47
|
-
for _ in range(reference_ops):
|
|
48
|
-
hashlib.sha256(data).digest()
|
|
49
|
-
elapsed = time.perf_counter_ns() - start
|
|
50
|
-
intervals.append(elapsed)
|
|
51
|
-
if i % 50 == 0:
|
|
52
|
-
time.sleep(0.001)
|
|
53
|
-
|
|
54
|
-
mean_ns = statistics.mean(intervals)
|
|
55
|
-
stdev_ns = statistics.stdev(intervals)
|
|
56
|
-
cv = stdev_ns / mean_ns if mean_ns > 0 else 0
|
|
57
|
-
|
|
58
|
-
drift_pairs = [intervals[i] - intervals[i-1] for i in range(1, len(intervals))]
|
|
59
|
-
drift_stdev = statistics.stdev(drift_pairs) if len(drift_pairs) > 1 else 0
|
|
60
|
-
|
|
61
|
-
data = {
|
|
62
|
-
"mean_ns": int(mean_ns),
|
|
63
|
-
"stdev_ns": int(stdev_ns),
|
|
64
|
-
"cv": round(cv, 6),
|
|
65
|
-
"drift_stdev": int(drift_stdev),
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
valid = True
|
|
69
|
-
if cv < 0.0001:
|
|
70
|
-
valid = False
|
|
71
|
-
data["fail_reason"] = "synthetic_timing"
|
|
72
|
-
elif drift_stdev == 0:
|
|
73
|
-
valid = False
|
|
74
|
-
data["fail_reason"] = "no_drift"
|
|
75
|
-
|
|
76
|
-
return valid, data
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
def check_cache_timing(iterations: int = 100) -> Tuple[bool, Dict]:
|
|
80
|
-
"""Check 2: Cache Timing Fingerprint (L1/L2/L3 Latency)"""
|
|
81
|
-
l1_size = 8 * 1024
|
|
82
|
-
l2_size = 128 * 1024
|
|
83
|
-
l3_size = 4 * 1024 * 1024
|
|
84
|
-
|
|
85
|
-
def measure_access_time(buffer_size: int, accesses: int = 1000) -> float:
|
|
86
|
-
buf = bytearray(buffer_size)
|
|
87
|
-
for i in range(0, buffer_size, 64):
|
|
88
|
-
buf[i] = i % 256
|
|
89
|
-
start = time.perf_counter_ns()
|
|
90
|
-
for i in range(accesses):
|
|
91
|
-
_ = buf[(i * 64) % buffer_size]
|
|
92
|
-
elapsed = time.perf_counter_ns() - start
|
|
93
|
-
return elapsed / accesses
|
|
94
|
-
|
|
95
|
-
l1_times = [measure_access_time(l1_size) for _ in range(iterations)]
|
|
96
|
-
l2_times = [measure_access_time(l2_size) for _ in range(iterations)]
|
|
97
|
-
l3_times = [measure_access_time(l3_size) for _ in range(iterations)]
|
|
98
|
-
|
|
99
|
-
l1_avg = statistics.mean(l1_times)
|
|
100
|
-
l2_avg = statistics.mean(l2_times)
|
|
101
|
-
l3_avg = statistics.mean(l3_times)
|
|
102
|
-
|
|
103
|
-
l2_l1_ratio = l2_avg / l1_avg if l1_avg > 0 else 0
|
|
104
|
-
l3_l2_ratio = l3_avg / l2_avg if l2_avg > 0 else 0
|
|
105
|
-
|
|
106
|
-
data = {
|
|
107
|
-
"l1_ns": round(l1_avg, 2),
|
|
108
|
-
"l2_ns": round(l2_avg, 2),
|
|
109
|
-
"l3_ns": round(l3_avg, 2),
|
|
110
|
-
"l2_l1_ratio": round(l2_l1_ratio, 3),
|
|
111
|
-
"l3_l2_ratio": round(l3_l2_ratio, 3),
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
valid = True
|
|
115
|
-
if l2_l1_ratio < 1.01 and l3_l2_ratio < 1.01:
|
|
116
|
-
valid = False
|
|
117
|
-
data["fail_reason"] = "no_cache_hierarchy"
|
|
118
|
-
elif l1_avg == 0 or l2_avg == 0 or l3_avg == 0:
|
|
119
|
-
valid = False
|
|
120
|
-
data["fail_reason"] = "zero_latency"
|
|
121
|
-
|
|
122
|
-
return valid, data
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
def check_simd_identity() -> Tuple[bool, Dict]:
|
|
126
|
-
"""Check 3: SIMD Unit Identity (SSE/AVX/AltiVec/NEON)"""
|
|
127
|
-
flags = []
|
|
128
|
-
arch = platform.machine().lower()
|
|
129
|
-
|
|
130
|
-
try:
|
|
131
|
-
with open("/proc/cpuinfo", "r") as f:
|
|
132
|
-
for line in f:
|
|
133
|
-
if "flags" in line.lower() or "features" in line.lower():
|
|
134
|
-
parts = line.split(":")
|
|
135
|
-
if len(parts) > 1:
|
|
136
|
-
flags = parts[1].strip().split()
|
|
137
|
-
break
|
|
138
|
-
except:
|
|
139
|
-
pass
|
|
140
|
-
|
|
141
|
-
if not flags:
|
|
142
|
-
try:
|
|
143
|
-
result = subprocess.run(
|
|
144
|
-
["sysctl", "-a"],
|
|
145
|
-
capture_output=True, text=True, timeout=5
|
|
146
|
-
)
|
|
147
|
-
for line in result.stdout.split("\n"):
|
|
148
|
-
if "feature" in line.lower() or "altivec" in line.lower():
|
|
149
|
-
flags.append(line.split(":")[-1].strip())
|
|
150
|
-
except:
|
|
151
|
-
pass
|
|
152
|
-
|
|
153
|
-
has_sse = any("sse" in f.lower() for f in flags)
|
|
154
|
-
has_avx = any("avx" in f.lower() for f in flags)
|
|
155
|
-
has_altivec = any("altivec" in f.lower() for f in flags) or "ppc" in arch
|
|
156
|
-
has_neon = any("neon" in f.lower() for f in flags) or "arm" in arch
|
|
157
|
-
|
|
158
|
-
data = {
|
|
159
|
-
"arch": arch,
|
|
160
|
-
"simd_flags_count": len(flags),
|
|
161
|
-
"has_sse": has_sse,
|
|
162
|
-
"has_avx": has_avx,
|
|
163
|
-
"has_altivec": has_altivec,
|
|
164
|
-
"has_neon": has_neon,
|
|
165
|
-
"sample_flags": flags[:10] if flags else [],
|
|
166
|
-
}
|
|
167
|
-
|
|
168
|
-
valid = has_sse or has_avx or has_altivec or has_neon or len(flags) > 0
|
|
169
|
-
if not valid:
|
|
170
|
-
data["fail_reason"] = "no_simd_detected"
|
|
171
|
-
|
|
172
|
-
return valid, data
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
def check_thermal_drift(samples: int = 50) -> Tuple[bool, Dict]:
|
|
176
|
-
"""Check 4: Thermal Drift Entropy"""
|
|
177
|
-
cold_times = []
|
|
178
|
-
for i in range(samples):
|
|
179
|
-
start = time.perf_counter_ns()
|
|
180
|
-
for _ in range(10000):
|
|
181
|
-
hashlib.sha256("cold_{}".format(i).encode()).digest()
|
|
182
|
-
cold_times.append(time.perf_counter_ns() - start)
|
|
183
|
-
|
|
184
|
-
for _ in range(100):
|
|
185
|
-
for __ in range(50000):
|
|
186
|
-
hashlib.sha256(b"warmup").digest()
|
|
187
|
-
|
|
188
|
-
hot_times = []
|
|
189
|
-
for i in range(samples):
|
|
190
|
-
start = time.perf_counter_ns()
|
|
191
|
-
for _ in range(10000):
|
|
192
|
-
hashlib.sha256("hot_{}".format(i).encode()).digest()
|
|
193
|
-
hot_times.append(time.perf_counter_ns() - start)
|
|
194
|
-
|
|
195
|
-
cold_avg = statistics.mean(cold_times)
|
|
196
|
-
hot_avg = statistics.mean(hot_times)
|
|
197
|
-
cold_stdev = statistics.stdev(cold_times)
|
|
198
|
-
hot_stdev = statistics.stdev(hot_times)
|
|
199
|
-
drift_ratio = hot_avg / cold_avg if cold_avg > 0 else 0
|
|
200
|
-
|
|
201
|
-
data = {
|
|
202
|
-
"cold_avg_ns": int(cold_avg),
|
|
203
|
-
"hot_avg_ns": int(hot_avg),
|
|
204
|
-
"cold_stdev": int(cold_stdev),
|
|
205
|
-
"hot_stdev": int(hot_stdev),
|
|
206
|
-
"drift_ratio": round(drift_ratio, 4),
|
|
207
|
-
}
|
|
208
|
-
|
|
209
|
-
valid = True
|
|
210
|
-
if cold_stdev == 0 and hot_stdev == 0:
|
|
211
|
-
valid = False
|
|
212
|
-
data["fail_reason"] = "no_thermal_variance"
|
|
213
|
-
|
|
214
|
-
return valid, data
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
def check_instruction_jitter(samples: int = 100) -> Tuple[bool, Dict]:
|
|
218
|
-
"""Check 5: Instruction Path Jitter"""
|
|
219
|
-
def measure_int_ops(count: int = 10000) -> float:
|
|
220
|
-
start = time.perf_counter_ns()
|
|
221
|
-
x = 1
|
|
222
|
-
for i in range(count):
|
|
223
|
-
x = (x * 7 + 13) % 65537
|
|
224
|
-
return time.perf_counter_ns() - start
|
|
225
|
-
|
|
226
|
-
def measure_fp_ops(count: int = 10000) -> float:
|
|
227
|
-
start = time.perf_counter_ns()
|
|
228
|
-
x = 1.5
|
|
229
|
-
for i in range(count):
|
|
230
|
-
x = (x * 1.414 + 0.5) % 1000.0
|
|
231
|
-
return time.perf_counter_ns() - start
|
|
232
|
-
|
|
233
|
-
def measure_branch_ops(count: int = 10000) -> float:
|
|
234
|
-
start = time.perf_counter_ns()
|
|
235
|
-
x = 0
|
|
236
|
-
for i in range(count):
|
|
237
|
-
if i % 2 == 0:
|
|
238
|
-
x += 1
|
|
239
|
-
else:
|
|
240
|
-
x -= 1
|
|
241
|
-
return time.perf_counter_ns() - start
|
|
242
|
-
|
|
243
|
-
int_times = [measure_int_ops() for _ in range(samples)]
|
|
244
|
-
fp_times = [measure_fp_ops() for _ in range(samples)]
|
|
245
|
-
branch_times = [measure_branch_ops() for _ in range(samples)]
|
|
246
|
-
|
|
247
|
-
int_avg = statistics.mean(int_times)
|
|
248
|
-
fp_avg = statistics.mean(fp_times)
|
|
249
|
-
branch_avg = statistics.mean(branch_times)
|
|
250
|
-
|
|
251
|
-
int_stdev = statistics.stdev(int_times)
|
|
252
|
-
fp_stdev = statistics.stdev(fp_times)
|
|
253
|
-
branch_stdev = statistics.stdev(branch_times)
|
|
254
|
-
|
|
255
|
-
data = {
|
|
256
|
-
"int_avg_ns": int(int_avg),
|
|
257
|
-
"fp_avg_ns": int(fp_avg),
|
|
258
|
-
"branch_avg_ns": int(branch_avg),
|
|
259
|
-
"int_stdev": int(int_stdev),
|
|
260
|
-
"fp_stdev": int(fp_stdev),
|
|
261
|
-
"branch_stdev": int(branch_stdev),
|
|
262
|
-
}
|
|
263
|
-
|
|
264
|
-
valid = True
|
|
265
|
-
if int_stdev == 0 and fp_stdev == 0 and branch_stdev == 0:
|
|
266
|
-
valid = False
|
|
267
|
-
data["fail_reason"] = "no_jitter"
|
|
268
|
-
|
|
269
|
-
return valid, data
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
def check_anti_emulation() -> Tuple[bool, Dict]:
|
|
273
|
-
"""Check 6: Anti-Emulation Behavioral Checks
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
"
|
|
307
|
-
|
|
308
|
-
"
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
#
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
if
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
"
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
if
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
RIP-PoA Hardware Fingerprint Validation
|
|
4
|
+
========================================
|
|
5
|
+
7 Required Checks for RTC Reward Approval
|
|
6
|
+
ALL MUST PASS for antiquity multiplier rewards
|
|
7
|
+
|
|
8
|
+
Checks:
|
|
9
|
+
1. Clock-Skew & Oscillator Drift
|
|
10
|
+
2. Cache Timing Fingerprint
|
|
11
|
+
3. SIMD Unit Identity
|
|
12
|
+
4. Thermal Drift Entropy
|
|
13
|
+
5. Instruction Path Jitter
|
|
14
|
+
6. Anti-Emulation Behavioral Checks
|
|
15
|
+
7. ROM Fingerprint (retro platforms only)
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
import hashlib
|
|
19
|
+
import os
|
|
20
|
+
import platform
|
|
21
|
+
import statistics
|
|
22
|
+
import subprocess
|
|
23
|
+
import time
|
|
24
|
+
from typing import Dict, List, Optional, Tuple
|
|
25
|
+
|
|
26
|
+
# Import ROM fingerprint database if available
|
|
27
|
+
try:
|
|
28
|
+
from rom_fingerprint_db import (
|
|
29
|
+
identify_rom,
|
|
30
|
+
is_known_emulator_rom,
|
|
31
|
+
compute_file_hash,
|
|
32
|
+
detect_platform_roms,
|
|
33
|
+
get_real_hardware_rom_signature,
|
|
34
|
+
)
|
|
35
|
+
ROM_DB_AVAILABLE = True
|
|
36
|
+
except ImportError:
|
|
37
|
+
ROM_DB_AVAILABLE = False
|
|
38
|
+
|
|
39
|
+
def check_clock_drift(samples: int = 200) -> Tuple[bool, Dict]:
|
|
40
|
+
"""Check 1: Clock-Skew & Oscillator Drift"""
|
|
41
|
+
intervals = []
|
|
42
|
+
reference_ops = 5000
|
|
43
|
+
|
|
44
|
+
for i in range(samples):
|
|
45
|
+
data = "drift_{}".format(i).encode()
|
|
46
|
+
start = time.perf_counter_ns()
|
|
47
|
+
for _ in range(reference_ops):
|
|
48
|
+
hashlib.sha256(data).digest()
|
|
49
|
+
elapsed = time.perf_counter_ns() - start
|
|
50
|
+
intervals.append(elapsed)
|
|
51
|
+
if i % 50 == 0:
|
|
52
|
+
time.sleep(0.001)
|
|
53
|
+
|
|
54
|
+
mean_ns = statistics.mean(intervals)
|
|
55
|
+
stdev_ns = statistics.stdev(intervals)
|
|
56
|
+
cv = stdev_ns / mean_ns if mean_ns > 0 else 0
|
|
57
|
+
|
|
58
|
+
drift_pairs = [intervals[i] - intervals[i-1] for i in range(1, len(intervals))]
|
|
59
|
+
drift_stdev = statistics.stdev(drift_pairs) if len(drift_pairs) > 1 else 0
|
|
60
|
+
|
|
61
|
+
data = {
|
|
62
|
+
"mean_ns": int(mean_ns),
|
|
63
|
+
"stdev_ns": int(stdev_ns),
|
|
64
|
+
"cv": round(cv, 6),
|
|
65
|
+
"drift_stdev": int(drift_stdev),
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
valid = True
|
|
69
|
+
if cv < 0.0001:
|
|
70
|
+
valid = False
|
|
71
|
+
data["fail_reason"] = "synthetic_timing"
|
|
72
|
+
elif drift_stdev == 0:
|
|
73
|
+
valid = False
|
|
74
|
+
data["fail_reason"] = "no_drift"
|
|
75
|
+
|
|
76
|
+
return valid, data
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def check_cache_timing(iterations: int = 100) -> Tuple[bool, Dict]:
|
|
80
|
+
"""Check 2: Cache Timing Fingerprint (L1/L2/L3 Latency)"""
|
|
81
|
+
l1_size = 8 * 1024
|
|
82
|
+
l2_size = 128 * 1024
|
|
83
|
+
l3_size = 4 * 1024 * 1024
|
|
84
|
+
|
|
85
|
+
def measure_access_time(buffer_size: int, accesses: int = 1000) -> float:
|
|
86
|
+
buf = bytearray(buffer_size)
|
|
87
|
+
for i in range(0, buffer_size, 64):
|
|
88
|
+
buf[i] = i % 256
|
|
89
|
+
start = time.perf_counter_ns()
|
|
90
|
+
for i in range(accesses):
|
|
91
|
+
_ = buf[(i * 64) % buffer_size]
|
|
92
|
+
elapsed = time.perf_counter_ns() - start
|
|
93
|
+
return elapsed / accesses
|
|
94
|
+
|
|
95
|
+
l1_times = [measure_access_time(l1_size) for _ in range(iterations)]
|
|
96
|
+
l2_times = [measure_access_time(l2_size) for _ in range(iterations)]
|
|
97
|
+
l3_times = [measure_access_time(l3_size) for _ in range(iterations)]
|
|
98
|
+
|
|
99
|
+
l1_avg = statistics.mean(l1_times)
|
|
100
|
+
l2_avg = statistics.mean(l2_times)
|
|
101
|
+
l3_avg = statistics.mean(l3_times)
|
|
102
|
+
|
|
103
|
+
l2_l1_ratio = l2_avg / l1_avg if l1_avg > 0 else 0
|
|
104
|
+
l3_l2_ratio = l3_avg / l2_avg if l2_avg > 0 else 0
|
|
105
|
+
|
|
106
|
+
data = {
|
|
107
|
+
"l1_ns": round(l1_avg, 2),
|
|
108
|
+
"l2_ns": round(l2_avg, 2),
|
|
109
|
+
"l3_ns": round(l3_avg, 2),
|
|
110
|
+
"l2_l1_ratio": round(l2_l1_ratio, 3),
|
|
111
|
+
"l3_l2_ratio": round(l3_l2_ratio, 3),
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
valid = True
|
|
115
|
+
if l2_l1_ratio < 1.01 and l3_l2_ratio < 1.01:
|
|
116
|
+
valid = False
|
|
117
|
+
data["fail_reason"] = "no_cache_hierarchy"
|
|
118
|
+
elif l1_avg == 0 or l2_avg == 0 or l3_avg == 0:
|
|
119
|
+
valid = False
|
|
120
|
+
data["fail_reason"] = "zero_latency"
|
|
121
|
+
|
|
122
|
+
return valid, data
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def check_simd_identity() -> Tuple[bool, Dict]:
|
|
126
|
+
"""Check 3: SIMD Unit Identity (SSE/AVX/AltiVec/NEON)"""
|
|
127
|
+
flags = []
|
|
128
|
+
arch = platform.machine().lower()
|
|
129
|
+
|
|
130
|
+
try:
|
|
131
|
+
with open("/proc/cpuinfo", "r") as f:
|
|
132
|
+
for line in f:
|
|
133
|
+
if "flags" in line.lower() or "features" in line.lower():
|
|
134
|
+
parts = line.split(":")
|
|
135
|
+
if len(parts) > 1:
|
|
136
|
+
flags = parts[1].strip().split()
|
|
137
|
+
break
|
|
138
|
+
except:
|
|
139
|
+
pass
|
|
140
|
+
|
|
141
|
+
if not flags:
|
|
142
|
+
try:
|
|
143
|
+
result = subprocess.run(
|
|
144
|
+
["sysctl", "-a"],
|
|
145
|
+
capture_output=True, text=True, timeout=5
|
|
146
|
+
)
|
|
147
|
+
for line in result.stdout.split("\n"):
|
|
148
|
+
if "feature" in line.lower() or "altivec" in line.lower():
|
|
149
|
+
flags.append(line.split(":")[-1].strip())
|
|
150
|
+
except:
|
|
151
|
+
pass
|
|
152
|
+
|
|
153
|
+
has_sse = any("sse" in f.lower() for f in flags)
|
|
154
|
+
has_avx = any("avx" in f.lower() for f in flags)
|
|
155
|
+
has_altivec = any("altivec" in f.lower() for f in flags) or "ppc" in arch
|
|
156
|
+
has_neon = any("neon" in f.lower() for f in flags) or "arm" in arch
|
|
157
|
+
|
|
158
|
+
data = {
|
|
159
|
+
"arch": arch,
|
|
160
|
+
"simd_flags_count": len(flags),
|
|
161
|
+
"has_sse": has_sse,
|
|
162
|
+
"has_avx": has_avx,
|
|
163
|
+
"has_altivec": has_altivec,
|
|
164
|
+
"has_neon": has_neon,
|
|
165
|
+
"sample_flags": flags[:10] if flags else [],
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
valid = has_sse or has_avx or has_altivec or has_neon or len(flags) > 0
|
|
169
|
+
if not valid:
|
|
170
|
+
data["fail_reason"] = "no_simd_detected"
|
|
171
|
+
|
|
172
|
+
return valid, data
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def check_thermal_drift(samples: int = 50) -> Tuple[bool, Dict]:
|
|
176
|
+
"""Check 4: Thermal Drift Entropy"""
|
|
177
|
+
cold_times = []
|
|
178
|
+
for i in range(samples):
|
|
179
|
+
start = time.perf_counter_ns()
|
|
180
|
+
for _ in range(10000):
|
|
181
|
+
hashlib.sha256("cold_{}".format(i).encode()).digest()
|
|
182
|
+
cold_times.append(time.perf_counter_ns() - start)
|
|
183
|
+
|
|
184
|
+
for _ in range(100):
|
|
185
|
+
for __ in range(50000):
|
|
186
|
+
hashlib.sha256(b"warmup").digest()
|
|
187
|
+
|
|
188
|
+
hot_times = []
|
|
189
|
+
for i in range(samples):
|
|
190
|
+
start = time.perf_counter_ns()
|
|
191
|
+
for _ in range(10000):
|
|
192
|
+
hashlib.sha256("hot_{}".format(i).encode()).digest()
|
|
193
|
+
hot_times.append(time.perf_counter_ns() - start)
|
|
194
|
+
|
|
195
|
+
cold_avg = statistics.mean(cold_times)
|
|
196
|
+
hot_avg = statistics.mean(hot_times)
|
|
197
|
+
cold_stdev = statistics.stdev(cold_times)
|
|
198
|
+
hot_stdev = statistics.stdev(hot_times)
|
|
199
|
+
drift_ratio = hot_avg / cold_avg if cold_avg > 0 else 0
|
|
200
|
+
|
|
201
|
+
data = {
|
|
202
|
+
"cold_avg_ns": int(cold_avg),
|
|
203
|
+
"hot_avg_ns": int(hot_avg),
|
|
204
|
+
"cold_stdev": int(cold_stdev),
|
|
205
|
+
"hot_stdev": int(hot_stdev),
|
|
206
|
+
"drift_ratio": round(drift_ratio, 4),
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
valid = True
|
|
210
|
+
if cold_stdev == 0 and hot_stdev == 0:
|
|
211
|
+
valid = False
|
|
212
|
+
data["fail_reason"] = "no_thermal_variance"
|
|
213
|
+
|
|
214
|
+
return valid, data
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
def check_instruction_jitter(samples: int = 100) -> Tuple[bool, Dict]:
|
|
218
|
+
"""Check 5: Instruction Path Jitter"""
|
|
219
|
+
def measure_int_ops(count: int = 10000) -> float:
|
|
220
|
+
start = time.perf_counter_ns()
|
|
221
|
+
x = 1
|
|
222
|
+
for i in range(count):
|
|
223
|
+
x = (x * 7 + 13) % 65537
|
|
224
|
+
return time.perf_counter_ns() - start
|
|
225
|
+
|
|
226
|
+
def measure_fp_ops(count: int = 10000) -> float:
|
|
227
|
+
start = time.perf_counter_ns()
|
|
228
|
+
x = 1.5
|
|
229
|
+
for i in range(count):
|
|
230
|
+
x = (x * 1.414 + 0.5) % 1000.0
|
|
231
|
+
return time.perf_counter_ns() - start
|
|
232
|
+
|
|
233
|
+
def measure_branch_ops(count: int = 10000) -> float:
|
|
234
|
+
start = time.perf_counter_ns()
|
|
235
|
+
x = 0
|
|
236
|
+
for i in range(count):
|
|
237
|
+
if i % 2 == 0:
|
|
238
|
+
x += 1
|
|
239
|
+
else:
|
|
240
|
+
x -= 1
|
|
241
|
+
return time.perf_counter_ns() - start
|
|
242
|
+
|
|
243
|
+
int_times = [measure_int_ops() for _ in range(samples)]
|
|
244
|
+
fp_times = [measure_fp_ops() for _ in range(samples)]
|
|
245
|
+
branch_times = [measure_branch_ops() for _ in range(samples)]
|
|
246
|
+
|
|
247
|
+
int_avg = statistics.mean(int_times)
|
|
248
|
+
fp_avg = statistics.mean(fp_times)
|
|
249
|
+
branch_avg = statistics.mean(branch_times)
|
|
250
|
+
|
|
251
|
+
int_stdev = statistics.stdev(int_times)
|
|
252
|
+
fp_stdev = statistics.stdev(fp_times)
|
|
253
|
+
branch_stdev = statistics.stdev(branch_times)
|
|
254
|
+
|
|
255
|
+
data = {
|
|
256
|
+
"int_avg_ns": int(int_avg),
|
|
257
|
+
"fp_avg_ns": int(fp_avg),
|
|
258
|
+
"branch_avg_ns": int(branch_avg),
|
|
259
|
+
"int_stdev": int(int_stdev),
|
|
260
|
+
"fp_stdev": int(fp_stdev),
|
|
261
|
+
"branch_stdev": int(branch_stdev),
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
valid = True
|
|
265
|
+
if int_stdev == 0 and fp_stdev == 0 and branch_stdev == 0:
|
|
266
|
+
valid = False
|
|
267
|
+
data["fail_reason"] = "no_jitter"
|
|
268
|
+
|
|
269
|
+
return valid, data
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def check_anti_emulation() -> Tuple[bool, Dict]:
|
|
273
|
+
"""Check 6: Anti-Emulation Behavioral Checks
|
|
274
|
+
|
|
275
|
+
Detects traditional hypervisors AND cloud provider VMs:
|
|
276
|
+
- VMware, VirtualBox, KVM, QEMU, Xen, Hyper-V, Parallels
|
|
277
|
+
- AWS EC2 (Nitro/Xen), GCP, Azure, DigitalOcean
|
|
278
|
+
- Linode, Vultr, Hetzner, Oracle Cloud, OVH
|
|
279
|
+
- Cloud metadata endpoints (169.254.169.254)
|
|
280
|
+
|
|
281
|
+
Updated 2026-02-21: Added cloud provider detection after
|
|
282
|
+
discovering AWS t3.medium instances attempting to mine.
|
|
283
|
+
"""
|
|
284
|
+
vm_indicators = []
|
|
285
|
+
|
|
286
|
+
# --- DMI paths to check ---
|
|
287
|
+
vm_paths = [
|
|
288
|
+
"/sys/class/dmi/id/product_name",
|
|
289
|
+
"/sys/class/dmi/id/sys_vendor",
|
|
290
|
+
"/sys/class/dmi/id/board_vendor",
|
|
291
|
+
"/sys/class/dmi/id/board_name",
|
|
292
|
+
"/sys/class/dmi/id/bios_vendor",
|
|
293
|
+
"/sys/class/dmi/id/chassis_vendor",
|
|
294
|
+
"/sys/class/dmi/id/chassis_asset_tag",
|
|
295
|
+
"/proc/scsi/scsi",
|
|
296
|
+
]
|
|
297
|
+
|
|
298
|
+
# --- VM and cloud provider strings to match ---
|
|
299
|
+
# Traditional hypervisors
|
|
300
|
+
vm_strings = [
|
|
301
|
+
"vmware", "virtualbox", "kvm", "qemu", "xen",
|
|
302
|
+
"hyperv", "hyper-v", "parallels", "bhyve",
|
|
303
|
+
# AWS EC2 (Nitro and Xen instances)
|
|
304
|
+
"amazon", "amazon ec2", "ec2", "nitro",
|
|
305
|
+
# Google Cloud Platform
|
|
306
|
+
"google", "google compute engine", "gce",
|
|
307
|
+
# Microsoft Azure
|
|
308
|
+
"microsoft corporation", "azure",
|
|
309
|
+
# DigitalOcean
|
|
310
|
+
"digitalocean",
|
|
311
|
+
# Linode (now Akamai)
|
|
312
|
+
"linode", "akamai",
|
|
313
|
+
# Vultr
|
|
314
|
+
"vultr",
|
|
315
|
+
# Hetzner
|
|
316
|
+
"hetzner",
|
|
317
|
+
# Oracle Cloud
|
|
318
|
+
"oracle", "oraclecloud",
|
|
319
|
+
# OVH
|
|
320
|
+
"ovh", "ovhcloud",
|
|
321
|
+
# Alibaba Cloud
|
|
322
|
+
"alibaba", "alicloud",
|
|
323
|
+
# Generic cloud/VM indicators
|
|
324
|
+
"bochs", # BIOS often seen in cloud VMs
|
|
325
|
+
"innotek", # VirtualBox parent company
|
|
326
|
+
"seabios", # Common VM BIOS
|
|
327
|
+
]
|
|
328
|
+
|
|
329
|
+
for path in vm_paths:
|
|
330
|
+
try:
|
|
331
|
+
with open(path, "r") as f:
|
|
332
|
+
content = f.read().strip().lower()
|
|
333
|
+
for vm in vm_strings:
|
|
334
|
+
if vm in content:
|
|
335
|
+
vm_indicators.append("{}:{}".format(path, vm))
|
|
336
|
+
except:
|
|
337
|
+
pass
|
|
338
|
+
|
|
339
|
+
# --- Environment variable checks ---
|
|
340
|
+
for key in ["KUBERNETES", "DOCKER", "VIRTUAL", "container",
|
|
341
|
+
"AWS_EXECUTION_ENV", "ECS_CONTAINER_METADATA_URI",
|
|
342
|
+
"GOOGLE_CLOUD_PROJECT", "AZURE_FUNCTIONS_ENVIRONMENT",
|
|
343
|
+
"WEBSITE_INSTANCE_ID"]:
|
|
344
|
+
if key in os.environ:
|
|
345
|
+
vm_indicators.append("ENV:{}".format(key))
|
|
346
|
+
|
|
347
|
+
# --- CPU hypervisor flag check ---
|
|
348
|
+
try:
|
|
349
|
+
with open("/proc/cpuinfo", "r") as f:
|
|
350
|
+
if "hypervisor" in f.read().lower():
|
|
351
|
+
vm_indicators.append("cpuinfo:hypervisor")
|
|
352
|
+
except:
|
|
353
|
+
pass
|
|
354
|
+
|
|
355
|
+
# --- /sys/hypervisor check (Xen-based cloud VMs expose this) ---
|
|
356
|
+
try:
|
|
357
|
+
if os.path.exists("/sys/hypervisor/type"):
|
|
358
|
+
with open("/sys/hypervisor/type", "r") as f:
|
|
359
|
+
hv_type = f.read().strip().lower()
|
|
360
|
+
if hv_type:
|
|
361
|
+
vm_indicators.append("sys_hypervisor:{}".format(hv_type))
|
|
362
|
+
except:
|
|
363
|
+
pass
|
|
364
|
+
|
|
365
|
+
# --- Cloud metadata endpoint check ---
|
|
366
|
+
# AWS, GCP, Azure, DigitalOcean all use 169.254.169.254
|
|
367
|
+
# A real bare-metal machine will NOT have this endpoint
|
|
368
|
+
try:
|
|
369
|
+
import urllib.request
|
|
370
|
+
req = urllib.request.Request(
|
|
371
|
+
"http://169.254.169.254/",
|
|
372
|
+
headers={"Metadata": "true"} # Azure header
|
|
373
|
+
)
|
|
374
|
+
resp = urllib.request.urlopen(req, timeout=1)
|
|
375
|
+
# If we get ANY response, we're on a cloud VM
|
|
376
|
+
cloud_body = resp.read(512).decode("utf-8", errors="replace").lower()
|
|
377
|
+
cloud_provider = "unknown_cloud"
|
|
378
|
+
if "latest" in cloud_body or "meta-data" in cloud_body:
|
|
379
|
+
cloud_provider = "aws_or_gcp"
|
|
380
|
+
if "azure" in cloud_body or "microsoft" in cloud_body:
|
|
381
|
+
cloud_provider = "azure"
|
|
382
|
+
vm_indicators.append("cloud_metadata:{}".format(cloud_provider))
|
|
383
|
+
except:
|
|
384
|
+
pass # Timeout or connection refused = good (not cloud)
|
|
385
|
+
|
|
386
|
+
# --- AWS IMDSv2 check (token-based, t3/t4 instances) ---
|
|
387
|
+
try:
|
|
388
|
+
import urllib.request
|
|
389
|
+
token_req = urllib.request.Request(
|
|
390
|
+
"http://169.254.169.254/latest/api/token",
|
|
391
|
+
headers={"X-aws-ec2-metadata-token-ttl-seconds": "5"},
|
|
392
|
+
method="PUT"
|
|
393
|
+
)
|
|
394
|
+
token_resp = urllib.request.urlopen(token_req, timeout=1)
|
|
395
|
+
if token_resp.status == 200:
|
|
396
|
+
vm_indicators.append("cloud_metadata:aws_imdsv2")
|
|
397
|
+
except:
|
|
398
|
+
pass
|
|
399
|
+
|
|
400
|
+
# --- systemd-detect-virt (if available) ---
|
|
401
|
+
try:
|
|
402
|
+
result = subprocess.run(
|
|
403
|
+
["systemd-detect-virt"], capture_output=True, text=True, timeout=5
|
|
404
|
+
)
|
|
405
|
+
virt_type = result.stdout.strip().lower()
|
|
406
|
+
if virt_type and virt_type != "none":
|
|
407
|
+
vm_indicators.append("systemd_detect_virt:{}".format(virt_type))
|
|
408
|
+
except:
|
|
409
|
+
pass
|
|
410
|
+
|
|
411
|
+
data = {
|
|
412
|
+
"vm_indicators": vm_indicators,
|
|
413
|
+
"indicator_count": len(vm_indicators),
|
|
414
|
+
"is_likely_vm": len(vm_indicators) > 0,
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
valid = len(vm_indicators) == 0
|
|
418
|
+
if not valid:
|
|
419
|
+
data["fail_reason"] = "vm_detected"
|
|
420
|
+
|
|
421
|
+
return valid, data
|
|
422
|
+
|
|
423
|
+
|
|
424
|
+
|
|
425
|
+
def check_rom_fingerprint() -> Tuple[bool, Dict]:
|
|
426
|
+
"""
|
|
427
|
+
Check 7: ROM Fingerprint (for retro platforms)
|
|
428
|
+
|
|
429
|
+
Detects if running with a known emulator ROM dump.
|
|
430
|
+
Real vintage hardware should have unique/variant ROMs.
|
|
431
|
+
Emulators all use the same pirated ROM packs.
|
|
432
|
+
"""
|
|
433
|
+
if not ROM_DB_AVAILABLE:
|
|
434
|
+
# Skip for modern hardware or if DB not available
|
|
435
|
+
return True, {"skipped": True, "reason": "rom_db_not_available_or_modern_hw"}
|
|
436
|
+
|
|
437
|
+
arch = platform.machine().lower()
|
|
438
|
+
rom_hashes = {}
|
|
439
|
+
emulator_detected = False
|
|
440
|
+
detection_details = []
|
|
441
|
+
|
|
442
|
+
# Check for PowerPC (Mac emulation target)
|
|
443
|
+
if "ppc" in arch or "powerpc" in arch:
|
|
444
|
+
# Try to get real hardware ROM signature
|
|
445
|
+
real_rom = get_real_hardware_rom_signature()
|
|
446
|
+
if real_rom:
|
|
447
|
+
rom_hashes["real_hardware"] = real_rom
|
|
448
|
+
else:
|
|
449
|
+
# Check if running under emulator with known ROM
|
|
450
|
+
platform_roms = detect_platform_roms()
|
|
451
|
+
if platform_roms:
|
|
452
|
+
for platform_name, rom_hash in platform_roms.items():
|
|
453
|
+
if is_known_emulator_rom(rom_hash, "md5"):
|
|
454
|
+
emulator_detected = True
|
|
455
|
+
rom_info = identify_rom(rom_hash, "md5")
|
|
456
|
+
detection_details.append({
|
|
457
|
+
"platform": platform_name,
|
|
458
|
+
"hash": rom_hash,
|
|
459
|
+
"known_as": rom_info,
|
|
460
|
+
})
|
|
461
|
+
|
|
462
|
+
# Check for 68K (Amiga, Atari ST, old Mac)
|
|
463
|
+
elif "m68k" in arch or "68000" in arch:
|
|
464
|
+
platform_roms = detect_platform_roms()
|
|
465
|
+
for platform_name, rom_hash in platform_roms.items():
|
|
466
|
+
if "amiga" in platform_name.lower():
|
|
467
|
+
if is_known_emulator_rom(rom_hash, "sha1"):
|
|
468
|
+
emulator_detected = True
|
|
469
|
+
rom_info = identify_rom(rom_hash, "sha1")
|
|
470
|
+
detection_details.append({
|
|
471
|
+
"platform": platform_name,
|
|
472
|
+
"hash": rom_hash,
|
|
473
|
+
"known_as": rom_info,
|
|
474
|
+
})
|
|
475
|
+
elif "mac" in platform_name.lower():
|
|
476
|
+
if is_known_emulator_rom(rom_hash, "apple"):
|
|
477
|
+
emulator_detected = True
|
|
478
|
+
rom_info = identify_rom(rom_hash, "apple")
|
|
479
|
+
detection_details.append({
|
|
480
|
+
"platform": platform_name,
|
|
481
|
+
"hash": rom_hash,
|
|
482
|
+
"known_as": rom_info,
|
|
483
|
+
})
|
|
484
|
+
|
|
485
|
+
# For modern hardware, report "N/A" but pass
|
|
486
|
+
else:
|
|
487
|
+
return True, {
|
|
488
|
+
"skipped": False,
|
|
489
|
+
"arch": arch,
|
|
490
|
+
"is_retro_platform": False,
|
|
491
|
+
"rom_check": "not_applicable_modern_hw",
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
data = {
|
|
495
|
+
"arch": arch,
|
|
496
|
+
"is_retro_platform": True,
|
|
497
|
+
"rom_hashes": rom_hashes,
|
|
498
|
+
"emulator_detected": emulator_detected,
|
|
499
|
+
"detection_details": detection_details,
|
|
500
|
+
}
|
|
501
|
+
|
|
502
|
+
if emulator_detected:
|
|
503
|
+
data["fail_reason"] = "known_emulator_rom"
|
|
504
|
+
return False, data
|
|
505
|
+
|
|
506
|
+
return True, data
|
|
507
|
+
|
|
508
|
+
|
|
509
|
+
def validate_all_checks(include_rom_check: bool = True) -> Tuple[bool, Dict]:
|
|
510
|
+
"""Run all 7 fingerprint checks. ALL MUST PASS for RTC approval."""
|
|
511
|
+
results = {}
|
|
512
|
+
all_passed = True
|
|
513
|
+
|
|
514
|
+
checks = [
|
|
515
|
+
("clock_drift", "Clock-Skew & Oscillator Drift", check_clock_drift),
|
|
516
|
+
("cache_timing", "Cache Timing Fingerprint", check_cache_timing),
|
|
517
|
+
("simd_identity", "SIMD Unit Identity", check_simd_identity),
|
|
518
|
+
("thermal_drift", "Thermal Drift Entropy", check_thermal_drift),
|
|
519
|
+
("instruction_jitter", "Instruction Path Jitter", check_instruction_jitter),
|
|
520
|
+
("anti_emulation", "Anti-Emulation Checks", check_anti_emulation),
|
|
521
|
+
]
|
|
522
|
+
|
|
523
|
+
# Add ROM check for retro platforms
|
|
524
|
+
if include_rom_check and ROM_DB_AVAILABLE:
|
|
525
|
+
checks.append(("rom_fingerprint", "ROM Fingerprint (Retro)", check_rom_fingerprint))
|
|
526
|
+
|
|
527
|
+
print(f"Running {len(checks)} Hardware Fingerprint Checks...")
|
|
528
|
+
print("=" * 50)
|
|
529
|
+
|
|
530
|
+
total_checks = len(checks)
|
|
531
|
+
for i, (key, name, func) in enumerate(checks, 1):
|
|
532
|
+
print(f"\n[{i}/{total_checks}] {name}...")
|
|
533
|
+
try:
|
|
534
|
+
passed, data = func()
|
|
535
|
+
except Exception as e:
|
|
536
|
+
passed = False
|
|
537
|
+
data = {"error": str(e)}
|
|
538
|
+
results[key] = {"passed": passed, "data": data}
|
|
539
|
+
if not passed:
|
|
540
|
+
all_passed = False
|
|
541
|
+
print(" Result: {}".format("PASS" if passed else "FAIL"))
|
|
542
|
+
|
|
543
|
+
print("\n" + "=" * 50)
|
|
544
|
+
print("OVERALL RESULT: {}".format("ALL CHECKS PASSED" if all_passed else "FAILED"))
|
|
545
|
+
|
|
546
|
+
if not all_passed:
|
|
547
|
+
failed = [k for k, v in results.items() if not v["passed"]]
|
|
548
|
+
print("Failed checks: {}".format(failed))
|
|
549
|
+
|
|
550
|
+
return all_passed, results
|
|
551
|
+
|
|
552
|
+
|
|
553
|
+
if __name__ == "__main__":
|
|
554
|
+
import json
|
|
555
|
+
passed, results = validate_all_checks()
|
|
556
|
+
print("\n\nDetailed Results:")
|
|
557
|
+
print(json.dumps(results, indent=2, default=str))
|
|
Binary file
|
|
Binary file
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "clawrtc",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.6.0",
|
|
4
4
|
"description": "Mine RTC tokens with your AI agent using Proof-of-Antiquity consensus. Coinbase wallet + x402 payments.",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"clawrtc",
|
|
@@ -34,6 +34,6 @@
|
|
|
34
34
|
"platforms": ["rustchain"],
|
|
35
35
|
"author": "Elyan Labs",
|
|
36
36
|
"homepage": "https://rustchain.org",
|
|
37
|
-
"version": "1.
|
|
37
|
+
"version": "1.6.0"
|
|
38
38
|
}
|
|
39
39
|
}
|
package/pyproject.toml
CHANGED
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "clawrtc"
|
|
7
|
-
version = "1.
|
|
7
|
+
version = "1.6.0"
|
|
8
8
|
description = "ClawRTC — Let your AI agent mine RTC tokens on any modern hardware. Built-in wallet, VM-penalized."
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
license = {text = "MIT"}
|
|
Binary file
|