@svrnsec/pulse 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,4906 @@
1
+ 'use strict';
2
+
3
+ var _documentCurrentScript = typeof document !== 'undefined' ? document.currentScript : null;
4
+ /**
5
+ * @sovereign/pulse — Statistical Jitter Analysis
6
+ *
7
+ * Analyses the timing distribution from the entropy probe to classify
8
+ * the host as a real consumer device or a sanitised datacenter VM.
9
+ *
10
+ * Core insight:
11
+ * Real hardware → thermal throttling, OS context switches, DRAM refresh
12
+ * cycles create a characteristic "noisy" but physically
13
+ * plausible timing distribution.
14
+ * Datacenter VM → hypervisor scheduler presents a nearly-flat execution
15
+ * curve; thermal feedback is absent; timer may be
16
+ * quantised to the host's scheduler quantum.
17
+ */
18
+
19
+ // ---------------------------------------------------------------------------
20
+ // Public API
21
+ // ---------------------------------------------------------------------------
22
+
23
+ /**
24
+ * Full statistical analysis of a timing vector.
25
+ *
26
+ * @param {number[]} timings - per-iteration millisecond deltas from WASM probe
27
+ * @param {object} [opts]
28
+ * @param {object} [opts.autocorrelations] - pre-computed { lag1 … lag10 }
29
+ * @returns {JitterAnalysis}
30
+ */
31
+ function classifyJitter(timings, opts = {}) {
32
+ if (!timings || timings.length < 10) {
33
+ return _insufficientData();
34
+ }
35
+
36
+ const stats = computeStats(timings);
37
+ const autocorr = opts.autocorrelations ?? _computeLocalAutocorr(timings);
38
+ const hurst = computeHurst(timings);
39
+ const quantEnt = detectQuantizationEntropy(timings);
40
+ const thermal = detectThermalSignature(timings);
41
+ const outlierRate = _outlierRate(timings, stats);
42
+
43
+ // ── Scoring rubric ───────────────────────────────────────────────────────
44
+ // Each criterion contributes 0–1 to a weighted sum.
45
+ // Weights sum to 1.0; final score is in [0, 1].
46
+ // 1.0 = almost certainly a real consumer device + real silicon
47
+ // 0.0 = almost certainly a sanitised VM / AI instance
48
+
49
+ const components = {};
50
+ const flags = [];
51
+
52
+ // 1. Coefficient of Variation (weight 0.25)
53
+ // Real hardware: CV ∈ [0.04, 0.35]
54
+ // VM: CV often < 0.02 ("too flat") or > 0.5 (scheduler bursts)
55
+ let cvScore = 0;
56
+ if (stats.cv >= 0.04 && stats.cv <= 0.35) {
57
+ cvScore = 1.0;
58
+ } else if (stats.cv >= 0.02 && stats.cv < 0.04) {
59
+ cvScore = (stats.cv - 0.02) / 0.02; // linear ramp up
60
+ flags.push('LOW_CV_BORDERLINE');
61
+ } else if (stats.cv > 0.35 && stats.cv < 0.5) {
62
+ cvScore = 1.0 - (stats.cv - 0.35) / 0.15; // ramp down
63
+ flags.push('HIGH_CV_POSSIBLE_SCHEDULER_BURST');
64
+ } else if (stats.cv < 0.02) {
65
+ cvScore = 0;
66
+ flags.push('CV_TOO_FLAT_VM_INDICATOR');
67
+ } else {
68
+ cvScore = 0.2;
69
+ flags.push('CV_TOO_HIGH_SCHEDULER_BURST');
70
+ }
71
+ components.cv = { score: cvScore, weight: 0.25, value: stats.cv };
72
+
73
+ // 2. Autocorrelation profile (weight 0.20)
74
+ // Real thermal noise → all lags near 0 (i.i.d. / Brownian)
75
+ // VM hypervisor scheduler → positive autocorr (periodic steal-time bursts)
76
+ // We use the maximum absolute autocorrelation across all measured lags
77
+ // to catch both lag-1 and longer-period scheduler artifacts.
78
+ const acVals = Object.values(autocorr).filter(v => v != null);
79
+ const maxAbsAC = acVals.length ? Math.max(...acVals.map(Math.abs)) : 0;
80
+ const meanAbsAC = acVals.length ? acVals.reduce((s, v) => s + Math.abs(v), 0) / acVals.length : 0;
81
+ const acStat = (maxAbsAC + meanAbsAC) / 2; // blend: worst + average
82
+
83
+ let ac1Score = 0;
84
+ if (acStat < 0.12) {
85
+ ac1Score = 1.0;
86
+ } else if (acStat < 0.28) {
87
+ ac1Score = 1.0 - (acStat - 0.12) / 0.16;
88
+ flags.push('MODERATE_AUTOCORR_POSSIBLE_SCHEDULER');
89
+ } else {
90
+ ac1Score = 0;
91
+ flags.push('HIGH_AUTOCORR_VM_SCHEDULER_DETECTED');
92
+ }
93
+ components.autocorr = { score: ac1Score, weight: 0.20, value: acStat };
94
+
95
+ // 3. Quantization Entropy (weight 0.20)
96
+ // High entropy → timings are spread, not clustered on fixed boundaries
97
+ // Low entropy → values cluster on integer-ms ticks (legacy VM timer)
98
+ //
99
+ // Scale:
100
+ // QE ≥ 4.5 → 1.00 (strongly physical)
101
+ // QE 3.0–4.5 → 0.00–1.00 (linear ramp, healthy range)
102
+ // QE 2.0–3.0 → 0.00–0.20 (borderline; still gives partial credit so one
103
+ // weak metric doesn't zero-out the whole score)
104
+ // QE < 2.0 → 0.00 (clearly synthetic/quantised timer)
105
+ let qeScore = 0;
106
+ if (quantEnt >= 4.5) {
107
+ qeScore = 1.0;
108
+ } else if (quantEnt >= 3.0) {
109
+ qeScore = (quantEnt - 3.0) / 1.5; // 0.00 → 1.00
110
+ } else if (quantEnt >= 2.0) {
111
+ // Partial credit — not obviously VM but not clearly physical.
112
+ // Lets other strong signals (CV, autocorr, Hurst) still carry the device
113
+ // over the physical threshold instead of being zeroed by a single weak metric.
114
+ qeScore = ((quantEnt - 2.0) / 1.0) * 0.20; // 0.00 → 0.20
115
+ flags.push('LOW_QUANTIZATION_ENTROPY_BORDERLINE');
116
+ } else {
117
+ qeScore = 0;
118
+ flags.push('LOW_QUANTIZATION_ENTROPY_SYNTHETIC_TIMER');
119
+ }
120
+ components.quantization = { score: qeScore, weight: 0.20, value: quantEnt };
121
+
122
+ // 4. Hurst Exponent (weight 0.15)
123
+ // Genuine white thermal noise → H ≈ 0.5
124
+ // VM scheduler periodicity → H > 0.7 (persistent / self-similar)
125
+ // Synthetic / replayed → H near 0 or 1
126
+ let hurstScore = 0;
127
+ const hurstDev = Math.abs(hurst - 0.5);
128
+ if (hurstDev < 0.10) {
129
+ hurstScore = 1.0;
130
+ } else if (hurstDev < 0.25) {
131
+ hurstScore = 1.0 - (hurstDev - 0.10) / 0.15;
132
+ if (hurst > 0.7) flags.push('HIGH_HURST_VM_SCHEDULER_PERIODICITY');
133
+ } else {
134
+ hurstScore = 0;
135
+ if (hurst > 0.7) flags.push('VERY_HIGH_HURST_VM');
136
+ else if (hurst < 0.3) flags.push('VERY_LOW_HURST_ANTIPERSISTENT');
137
+ }
138
+ components.hurst = { score: hurstScore, weight: 0.15, value: hurst };
139
+
140
+ // 5. Thermal signature (weight 0.10)
141
+ // Real CPU under sustained load → upward drift or sawtooth (fan cycling)
142
+ // VM: flat timing regardless of simulated load (no thermal feedback loop)
143
+ let thermalScore = 0;
144
+ if (thermal.pattern === 'rising' || thermal.pattern === 'sawtooth') {
145
+ thermalScore = 1.0;
146
+ } else if (Math.abs(thermal.slope) > 5e-5) {
147
+ thermalScore = 0.5; // some drift present
148
+ flags.push('WEAK_THERMAL_SIGNATURE');
149
+ } else {
150
+ thermalScore = 0;
151
+ flags.push('FLAT_THERMAL_PROFILE_VM_INDICATOR');
152
+ }
153
+ components.thermal = { score: thermalScore, weight: 0.10, value: thermal.slope };
154
+
155
+ // 6. Outlier rate (weight 0.10)
156
+ // Context switches on real OS → occasional timing spikes (> 3σ)
157
+ // VMs: far fewer OS-level interruptions visible to guest
158
+ let outlierScore = 0;
159
+ if (outlierRate >= 0.02 && outlierRate <= 0.15) {
160
+ outlierScore = 1.0;
161
+ } else if (outlierRate > 0 && outlierRate < 0.02) {
162
+ outlierScore = outlierRate / 0.02;
163
+ flags.push('FEW_OUTLIERS_POSSIBLY_VM');
164
+ } else if (outlierRate > 0.15) {
165
+ outlierScore = Math.max(0, 1.0 - (outlierRate - 0.15) / 0.15);
166
+ flags.push('EXCESSIVE_OUTLIERS_UNSTABLE');
167
+ }
168
+ components.outliers = { score: outlierScore, weight: 0.10, value: outlierRate };
169
+
170
+ // ── Weighted aggregate ────────────────────────────────────────────────────
171
+ const score = Object.values(components)
172
+ .reduce((sum, c) => sum + c.score * c.weight, 0);
173
+
174
+ return {
175
+ score: Math.max(0, Math.min(1, score)),
176
+ flags,
177
+ components,
178
+ stats,
179
+ autocorrelations: autocorr,
180
+ hurstExponent: hurst,
181
+ quantizationEntropy: quantEnt,
182
+ thermalSignature: thermal,
183
+ outlierRate,
184
+ };
185
+ }
186
+
187
+ // ---------------------------------------------------------------------------
188
+ // computeStats
189
+ // ---------------------------------------------------------------------------
190
+
191
+ /**
192
+ * Descriptive statistics for a timing vector.
193
+ * @param {number[]} arr
194
+ * @returns {TimingStats}
195
+ */
196
+ function computeStats(arr) {
197
+ const sorted = [...arr].sort((a, b) => a - b);
198
+ const n = arr.length;
199
+ const mean = arr.reduce((s, v) => s + v, 0) / n;
200
+ const varr = arr.reduce((s, v) => s + (v - mean) ** 2, 0) / (n - 1);
201
+ const std = Math.sqrt(varr);
202
+
203
+ const pct = (p) => {
204
+ const idx = (p / 100) * (n - 1);
205
+ const lo = Math.floor(idx);
206
+ const hi = Math.ceil(idx);
207
+ return sorted[lo] + (sorted[hi] - sorted[lo]) * (idx - lo);
208
+ };
209
+
210
+ // Skewness (Fisher-Pearson)
211
+ const skew = n < 3 ? 0 :
212
+ arr.reduce((s, v) => s + ((v - mean) / std) ** 3, 0) *
213
+ (n / ((n - 1) * (n - 2)));
214
+
215
+ // Excess kurtosis
216
+ const kurt = n < 4 ? 0 :
217
+ (arr.reduce((s, v) => s + ((v - mean) / std) ** 4, 0) *
218
+ (n * (n + 1)) / ((n - 1) * (n - 2) * (n - 3))) -
219
+ (3 * (n - 1) ** 2) / ((n - 2) * (n - 3));
220
+
221
+ return {
222
+ n, mean, std,
223
+ cv: std / mean,
224
+ min: sorted[0],
225
+ max: sorted[n - 1],
226
+ p5: pct(5),
227
+ p25: pct(25),
228
+ p50: pct(50),
229
+ p75: pct(75),
230
+ p95: pct(95),
231
+ p99: pct(99),
232
+ skewness: skew,
233
+ kurtosis: kurt,
234
+ };
235
+ }
236
+
237
+ /**
238
+ * @typedef {object} TimingStats
239
+ * @property {number} n
240
+ * @property {number} mean
241
+ * @property {number} std
242
+ * @property {number} cv
243
+ * @property {number} min
244
+ * @property {number} max
245
+ * @property {number} p5
246
+ * @property {number} p25
247
+ * @property {number} p50
248
+ * @property {number} p75
249
+ * @property {number} p95
250
+ * @property {number} p99
251
+ * @property {number} skewness
252
+ * @property {number} kurtosis
253
+ */
254
+
255
+ // ---------------------------------------------------------------------------
256
+ // computeHurst
257
+ // ---------------------------------------------------------------------------
258
+
259
+ /**
260
+ * Estimates the Hurst exponent via Rescaled Range (R/S) analysis.
261
+ * Covers 4 sub-series sizes (n/4, n/3, n/2, n) to get a log-log slope.
262
+ *
263
+ * H ≈ 0.5 → random walk (Brownian, thermal noise)
264
+ * H > 0.5 → persistent (VM hypervisor periodicity)
265
+ * H < 0.5 → anti-persistent
266
+ *
267
+ * @param {number[]} arr
268
+ * @returns {number}
269
+ */
270
+ function computeHurst(arr) {
271
+ const n = arr.length;
272
+ if (n < 16) return 0.5; // not enough data
273
+
274
+ const sizes = [
275
+ Math.floor(n / 4),
276
+ Math.floor(n / 3),
277
+ Math.floor(n / 2),
278
+ n,
279
+ ].filter(s => s >= 8);
280
+
281
+ const points = sizes.map(s => {
282
+ const rs = _rescaledRange(arr.slice(0, s));
283
+ return [Math.log(s), Math.log(rs)];
284
+ });
285
+
286
+ // Ordinary least squares on log-log
287
+ const xMean = points.reduce((s, p) => s + p[0], 0) / points.length;
288
+ const yMean = points.reduce((s, p) => s + p[1], 0) / points.length;
289
+ let num = 0, den = 0;
290
+ for (const [x, y] of points) {
291
+ num += (x - xMean) * (y - yMean);
292
+ den += (x - xMean) ** 2;
293
+ }
294
+ const H = den === 0 ? 0.5 : num / den;
295
+ return Math.max(0, Math.min(1, H));
296
+ }
297
+
298
+ function _rescaledRange(arr) {
299
+ const n = arr.length;
300
+ const mean = arr.reduce((s, v) => s + v, 0) / n;
301
+ const dev = arr.map(v => v - mean);
302
+
303
+ // Cumulative deviation
304
+ const cum = [];
305
+ let acc = 0;
306
+ for (const d of dev) { acc += d; cum.push(acc); }
307
+
308
+ const R = Math.max(...cum) - Math.min(...cum);
309
+ const S = Math.sqrt(arr.reduce((s, v) => s + (v - mean) ** 2, 0) / n);
310
+ return S === 0 ? 1 : R / S;
311
+ }
312
+
313
+ // ---------------------------------------------------------------------------
314
+ // detectQuantizationEntropy
315
+ // ---------------------------------------------------------------------------
316
+
317
+ /**
318
+ * Computes Shannon entropy of a histogram of timing values.
319
+ * Low entropy (< 3 bits) indicates clustered / quantised timings (VM timer).
320
+ *
321
+ * @param {number[]} arr
322
+ * @param {number} [binWidthMs=0.2]
323
+ * @returns {number} entropy in bits
324
+ */
325
+ function detectQuantizationEntropy(arr, binWidthMs = 0.2) {
326
+ if (!arr.length) return 0;
327
+ const bins = new Map();
328
+ for (const v of arr) {
329
+ const bin = Math.round(v / binWidthMs);
330
+ bins.set(bin, (bins.get(bin) ?? 0) + 1);
331
+ }
332
+ const n = arr.length;
333
+ let H = 0;
334
+ for (const count of bins.values()) {
335
+ const p = count / n;
336
+ H -= p * Math.log2(p);
337
+ }
338
+ return H;
339
+ }
340
+
341
+ // ---------------------------------------------------------------------------
342
+ // detectThermalSignature
343
+ // ---------------------------------------------------------------------------
344
+
345
+ /**
346
+ * Analyses whether the timing series shows a thermal throttle pattern:
347
+ * a rising trend (CPU heating up) or sawtooth (fan intervention).
348
+ *
349
+ * @param {number[]} arr
350
+ * @returns {{ slope: number, pattern: 'rising'|'falling'|'sawtooth'|'flat', r2: number }}
351
+ */
352
+ function detectThermalSignature(arr) {
353
+ const n = arr.length;
354
+ if (n < 10) return { slope: 0, pattern: 'flat', r2: 0 };
355
+
356
+ // Linear regression (timing vs sample index)
357
+ const xMean = (n - 1) / 2;
358
+ const yMean = arr.reduce((s, v) => s + v, 0) / n;
359
+ let num = 0, den = 0;
360
+ for (let i = 0; i < n; i++) {
361
+ num += (i - xMean) * (arr[i] - yMean);
362
+ den += (i - xMean) ** 2;
363
+ }
364
+ const slope = den === 0 ? 0 : num / den;
365
+
366
+ // R² of linear fit
367
+ const ss_res = arr.reduce((s, v, i) => {
368
+ const pred = yMean + slope * (i - xMean);
369
+ return s + (v - pred) ** 2;
370
+ }, 0);
371
+ const ss_tot = arr.reduce((s, v) => s + (v - yMean) ** 2, 0);
372
+ const r2 = ss_tot === 0 ? 0 : 1 - ss_res / ss_tot;
373
+
374
+ // Sawtooth detection: look for a drop > 2σ after a rising segment
375
+ const std = Math.sqrt(arr.reduce((s, v) => s + (v - yMean) ** 2, 0) / n);
376
+ let sawtoothCount = 0;
377
+ for (let i = 1; i < n; i++) {
378
+ if (arr[i - 1] - arr[i] > 2 * std) sawtoothCount++;
379
+ }
380
+
381
+ let pattern;
382
+ if (sawtoothCount >= 2) pattern = 'sawtooth';
383
+ else if (slope > 5e-5) pattern = 'rising';
384
+ else if (slope < -5e-5) pattern = 'falling';
385
+ else pattern = 'flat';
386
+
387
+ return { slope, pattern, r2, sawtoothCount };
388
+ }
389
+
390
+ // ---------------------------------------------------------------------------
391
+ // Internal helpers
392
+ // ---------------------------------------------------------------------------
393
+
394
+ function _outlierRate(arr, stats) {
395
+ const threshold = stats.mean + 3 * stats.std;
396
+ return arr.filter(v => v > threshold).length / arr.length;
397
+ }
398
+
399
+ function _computeLocalAutocorr(arr) {
400
+ const autocorr = {};
401
+ for (const lag of [1, 2, 3, 5, 10]) {
402
+ autocorr[`lag${lag}`] = _pearsonAC(arr, lag);
403
+ }
404
+ return autocorr;
405
+ }
406
+
407
+ function _pearsonAC(arr, lag) {
408
+ const n = arr.length;
409
+ if (lag >= n) return 0;
410
+ const valid = n - lag;
411
+ const mean = arr.reduce((s, v) => s + v, 0) / n;
412
+ let num = 0, da = 0, db = 0;
413
+ for (let i = 0; i < valid; i++) {
414
+ const a = arr[i] - mean;
415
+ const b = arr[i + lag] - mean;
416
+ num += a * b;
417
+ da += a * a;
418
+ db += b * b;
419
+ }
420
+ const denom = Math.sqrt(da * db);
421
+ return denom < 1e-14 ? 0 : num / denom;
422
+ }
423
+
424
+ function _insufficientData() {
425
+ return {
426
+ score: 0,
427
+ flags: ['INSUFFICIENT_DATA'],
428
+ components: {},
429
+ stats: null,
430
+ autocorrelations: {},
431
+ hurstExponent: 0.5,
432
+ quantizationEntropy: 0,
433
+ thermalSignature: { slope: 0, pattern: 'flat', r2: 0 },
434
+ outlierRate: 0,
435
+ };
436
+ }
437
+
438
+ /**
439
+ * @typedef {object} JitterAnalysis
440
+ * @property {number} score - [0,1], 1 = real hardware
441
+ * @property {string[]} flags - diagnostic flags
442
+ * @property {object} components - per-criterion scores and weights
443
+ * @property {TimingStats} stats
444
+ * @property {object} autocorrelations
445
+ * @property {number} hurstExponent
446
+ * @property {number} quantizationEntropy
447
+ * @property {object} thermalSignature
448
+ * @property {number} outlierRate
449
+ */
450
+
451
+ var jitter = /*#__PURE__*/Object.freeze({
452
+ __proto__: null,
453
+ classifyJitter: classifyJitter,
454
+ computeHurst: computeHurst,
455
+ computeStats: computeStats,
456
+ detectQuantizationEntropy: detectQuantizationEntropy,
457
+ detectThermalSignature: detectThermalSignature
458
+ });
459
+
460
+ /**
461
+ * @sovereign/pulse — Adaptive Entropy Probe
462
+ *
463
+ * Runs the WASM probe in batches and stops early once the signal is decisive.
464
+ *
465
+ * Why this works:
466
+ * A KVM VM with QE=1.27 and lag-1 autocorr=0.67 is unambiguously a VM after
467
+ * just 50 iterations. Running 200 iterations confirms what was already obvious
468
+ * at 50 — it adds no new information but wastes 3 seconds of user time.
469
+ *
470
+ * Conversely, a physical device with healthy entropy needs more data to
471
+ * rule out edge cases, so it runs longer.
472
+ *
473
+ * Speed profile:
474
+ * Obvious VM (QE < 1.5, lag1 > 0.60) → stops at 50 iters → ~0.9s (75% faster)
475
+ * Clear HW (QE > 3.5, lag1 < 0.10) → stops at ~100 iters → ~1.8s (50% faster)
476
+ * Ambiguous (borderline metrics) → runs full 200 iters → ~3.5s (same)
477
+ */
478
+
479
+
480
+ // ---------------------------------------------------------------------------
481
+ // Quick classifier (cheap, runs after every batch)
482
+ // ---------------------------------------------------------------------------
483
+
484
+ /**
485
+ * Fast signal-quality check. No Hurst, no thermal analysis — just the three
486
+ * metrics that converge quickest: QE, CV, and lag-1 autocorrelation.
487
+ *
488
+ * @param {number[]} timings
489
+ * @returns {{ vmConf: number, hwConf: number, qe: number, cv: number, lag1: number }}
490
+ */
491
+ function quickSignal(timings) {
492
+ const n = timings.length;
493
+ const mean = timings.reduce((s, v) => s + v, 0) / n;
494
+ const variance = timings.reduce((s, v) => s + (v - mean) ** 2, 0) / n;
495
+ const cv = mean > 0 ? Math.sqrt(variance) / mean : 0;
496
+ const qe = detectQuantizationEntropy(timings);
497
+
498
+ // Pearson autocorrelation at lag-1 (O(n), fits in a single pass)
499
+ let num = 0, da = 0, db = 0;
500
+ for (let i = 0; i < n - 1; i++) {
501
+ const a = timings[i] - mean;
502
+ const b = timings[i + 1] - mean;
503
+ num += a * b;
504
+ da += a * a;
505
+ db += b * b;
506
+ }
507
+ const lag1 = Math.sqrt(da * db) < 1e-14 ? 0 : num / Math.sqrt(da * db);
508
+
509
+ // VM confidence: each factor independently identifies the hypervisor footprint
510
+ const vmConf = Math.min(1,
511
+ (qe < 1.50 ? 0.40 : qe < 2.00 ? 0.20 : 0.0) +
512
+ (lag1 > 0.60 ? 0.35 : lag1 > 0.40 ? 0.18 : 0.0) +
513
+ (cv < 0.04 ? 0.25 : cv < 0.07 ? 0.10 : 0.0)
514
+ );
515
+
516
+ // HW confidence: must see all three positive signals together
517
+ const hwConf = Math.min(1,
518
+ (qe > 3.50 ? 0.38 : qe > 3.00 ? 0.22 : 0.0) +
519
+ (Math.abs(lag1) < 0.10 ? 0.32 : Math.abs(lag1) < 0.20 ? 0.15 : 0.0) +
520
+ (cv > 0.10 ? 0.30 : cv > 0.07 ? 0.14 : 0.0)
521
+ );
522
+
523
+ return { vmConf, hwConf, qe, cv, lag1 };
524
+ }
525
+
526
+ // ---------------------------------------------------------------------------
527
+ // collectEntropyAdaptive
528
+ // ---------------------------------------------------------------------------
529
+
530
+ /**
531
+ * @param {object} opts
532
+ * @param {number} [opts.minIterations=50] - never stop before this
533
+ * @param {number} [opts.maxIterations=200] - hard cap
534
+ * @param {number} [opts.batchSize=25] - WASM call granularity
535
+ * @param {number} [opts.vmThreshold=0.85] - stop early if VM confidence ≥ this
536
+ * @param {number} [opts.hwThreshold=0.80] - stop early if HW confidence ≥ this
537
+ * @param {number} [opts.hwMinIterations=75] - physical needs more data to confirm
538
+ * @param {number} [opts.matrixSize=64]
539
+ * @param {Function} [opts.onBatch] - called after each batch with interim signal
540
+ * @param {string} [opts.wasmPath]
541
+ * @param {Function} wasmModule - pre-initialised WASM module
542
+ * @returns {Promise<AdaptiveEntropyResult>}
543
+ */
544
+ async function collectEntropyAdaptive(wasmModule, opts = {}) {
545
+ const {
546
+ minIterations = 50,
547
+ maxIterations = 200,
548
+ batchSize = 25,
549
+ vmThreshold = 0.85,
550
+ hwThreshold = 0.80,
551
+ hwMinIterations = 75,
552
+ matrixSize = 64,
553
+ onBatch,
554
+ } = opts;
555
+
556
+ const wasm = wasmModule;
557
+ const allTimings = [];
558
+ const batches = []; // per-batch timing snapshots
559
+ let stoppedAt = null; // { reason, iterations, vmConf, hwConf }
560
+ let checksum = 0;
561
+
562
+ const t_start = Date.now();
563
+
564
+ while (allTimings.length < maxIterations) {
565
+ const n = Math.min(batchSize, maxIterations - allTimings.length);
566
+ const result = wasm.run_entropy_probe(n, matrixSize);
567
+ const chunk = Array.from(result.timings);
568
+
569
+ allTimings.push(...chunk);
570
+ checksum += result.checksum;
571
+
572
+ const sig = quickSignal(allTimings);
573
+ batches.push({ iterations: allTimings.length, ...sig });
574
+
575
+ // Fire progress callback with live signal so callers can stream to UI
576
+ if (typeof onBatch === 'function') {
577
+ try {
578
+ onBatch({
579
+ iterations: allTimings.length,
580
+ maxIterations,
581
+ pct: Math.round(allTimings.length / maxIterations * 100),
582
+ vmConf: sig.vmConf,
583
+ hwConf: sig.hwConf,
584
+ qe: sig.qe,
585
+ cv: sig.cv,
586
+ lag1: sig.lag1,
587
+ // Thresholds: 0.70 — high enough that a legitimate device won't be
588
+ // shown a false early verdict from a noisy first batch.
589
+ // 'borderline' surfaces when one axis is moderate but not decisive.
590
+ earlyVerdict: sig.vmConf > 0.70 ? 'vm'
591
+ : sig.hwConf > 0.70 ? 'physical'
592
+ : (sig.vmConf > 0.45 || sig.hwConf > 0.45) ? 'borderline'
593
+ : 'uncertain',
594
+ });
595
+ } catch {}
596
+ }
597
+
598
+ // ── Early-exit checks ──────────────────────────────────────────────────
599
+ if (allTimings.length < minIterations) continue;
600
+
601
+ if (sig.vmConf >= vmThreshold) {
602
+ stoppedAt = { reason: 'VM_SIGNAL_DECISIVE', vmConf: sig.vmConf, hwConf: sig.hwConf };
603
+ break;
604
+ }
605
+
606
+ if (allTimings.length >= hwMinIterations && sig.hwConf >= hwThreshold) {
607
+ stoppedAt = { reason: 'PHYSICAL_SIGNAL_DECISIVE', vmConf: sig.vmConf, hwConf: sig.hwConf };
608
+ break;
609
+ }
610
+ }
611
+
612
+ const elapsed = Date.now() - t_start;
613
+ const iterationsRan = allTimings.length;
614
+ const iterationsSaved = maxIterations - iterationsRan;
615
+ const speedupFactor = maxIterations / iterationsRan;
616
+
617
+ // ── Resolution probe using cached WASM call ────────────────────────────
618
+ const resResult = wasm.run_entropy_probe(1, 4); // tiny probe for resolution
619
+ const resProbe = Array.from(resResult.resolution_probe ?? []);
620
+
621
+ const resDeltas = [];
622
+ for (let i = 1; i < resProbe.length; i++) {
623
+ const d = resProbe[i] - resProbe[i - 1];
624
+ if (d > 0) resDeltas.push(d);
625
+ }
626
+
627
+ return {
628
+ timings: allTimings,
629
+ iterations: iterationsRan,
630
+ maxIterations,
631
+ checksum: checksum.toString(),
632
+ resolutionProbe: resProbe,
633
+ timerGranularityMs: resDeltas.length
634
+ ? resDeltas.reduce((a, b) => Math.min(a, b), Infinity)
635
+ : null,
636
+ earlyExit: stoppedAt ? {
637
+ ...stoppedAt,
638
+ iterationsSaved,
639
+ timeSavedMs: Math.round(iterationsSaved * (elapsed / iterationsRan)),
640
+ speedupFactor: +speedupFactor.toFixed(2),
641
+ } : null,
642
+ batches,
643
+ elapsedMs: elapsed,
644
+ collectedAt: t_start,
645
+ matrixSize,
646
+ phased: false, // adaptive replaces phased for speed
647
+ };
648
+ }
649
+
650
+ /**
651
+ * @typedef {object} AdaptiveEntropyResult
652
+ * @property {number[]} timings
653
+ * @property {number} iterations - how many actually ran
654
+ * @property {number} maxIterations - cap that was set
655
+ * @property {object|null} earlyExit - null if ran to completion
656
+ * @property {object[]} batches - per-batch signal snapshots
657
+ * @property {number} elapsedMs
658
+ */
659
+
660
+ /**
661
+ * @sovereign/pulse — Entropy Collector
662
+ *
663
+ * Bridges the Rust/WASM matrix-multiply probe into JavaScript.
664
+ * The WASM module is lazily initialised once and cached for subsequent calls.
665
+ */
666
+
667
+
668
+ // ---------------------------------------------------------------------------
669
+ // WASM loader (lazy singleton)
670
+ // ---------------------------------------------------------------------------
671
+ let _wasmModule = null;
672
+ let _initPromise = null;
673
+
674
+ /**
675
+ * Initialise (or return the cached) WASM module.
676
+ * Works in browsers (via fetch), in Electron (Node.js context), and in
677
+ * Jest/Vitest via a manual WASM path override.
678
+ *
679
+ * @param {string} [wasmPath] – override path/URL to the .wasm binary
680
+ */
681
+ async function initWasm(wasmPath) {
682
+ if (_wasmModule) return _wasmModule;
683
+ if (_initPromise) return _initPromise;
684
+
685
+ _initPromise = (async () => {
686
+ // Dynamic import so bundlers can tree-shake this for server-only builds.
687
+ const { default: init, run_entropy_probe, run_memory_probe, compute_autocorrelation } =
688
+ await Promise.resolve().then(function () { return pulse_core; });
689
+
690
+ const url = wasmPath ?? new URL('../../pkg/pulse_core_bg.wasm', (typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.tagName.toUpperCase() === 'SCRIPT' && _documentCurrentScript.src || new URL('pulse.cjs.js', document.baseURI).href))).href;
691
+ await init(url);
692
+
693
+ _wasmModule = { run_entropy_probe, run_memory_probe, compute_autocorrelation };
694
+ return _wasmModule;
695
+ })();
696
+
697
+ return _initPromise;
698
+ }
699
+
700
+ // ---------------------------------------------------------------------------
701
+ // collectEntropy
702
+ // ---------------------------------------------------------------------------
703
+
704
+ /**
705
+ * Run the WASM entropy probe and return raw timing data.
706
+ *
707
+ * @param {object} opts
708
+ * @param {number} [opts.iterations=200] - number of matrix-multiply rounds
709
+ * @param {number} [opts.matrixSize=64] - N for the N×N matrices
710
+ * @param {number} [opts.memSizeKb=512] - size of the memory bandwidth probe
711
+ * @param {number} [opts.memIterations=50]
712
+ * @param {boolean} [opts.phased=true] - run cold/load/hot phases for entropy-jitter ratio
713
+ * @param {string} [opts.wasmPath] - optional custom WASM binary path
714
+ *
715
+ * @returns {Promise<EntropyResult>}
716
+ */
717
+ async function collectEntropy(opts = {}) {
718
+ const {
719
+ iterations = 200,
720
+ matrixSize = 64,
721
+ memSizeKb = 512,
722
+ memIterations = 50,
723
+ phased = true,
724
+ adaptive = false,
725
+ adaptiveThreshold = 0.85,
726
+ onBatch,
727
+ wasmPath,
728
+ } = opts;
729
+
730
+ const wasm = await initWasm(wasmPath);
731
+ const t_start = Date.now();
732
+
733
+ let phases = null;
734
+ let timings, resolutionProbe, checksum, timerGranularityMs;
735
+ let _adaptiveInfo = null;
736
+
737
+ // ── Adaptive mode: smart early exit, fastest for obvious VMs ──────────
738
+ if (adaptive) {
739
+ const r = await collectEntropyAdaptive(wasm, {
740
+ minIterations: 50,
741
+ maxIterations: iterations,
742
+ batchSize: 25,
743
+ vmThreshold: adaptiveThreshold,
744
+ hwThreshold: 0.80,
745
+ hwMinIterations: 75,
746
+ matrixSize,
747
+ onBatch,
748
+ });
749
+ timings = r.timings;
750
+ resolutionProbe = r.resolutionProbe ?? [];
751
+ checksum = r.checksum;
752
+ timerGranularityMs = r.timerGranularityMs;
753
+ _adaptiveInfo = { earlyExit: r.earlyExit, batches: r.batches, elapsedMs: r.elapsedMs };
754
+
755
+ // ── Phased collection: cold → load → hot ──────────────────────────────
756
+ // Each phase runs a separate WASM probe. On real hardware, sustained load
757
+ // increases thermal noise so Phase 3 (hot) entropy is measurably higher
758
+ // than Phase 1 (cold). A VM's hypervisor clock is insensitive to guest
759
+ // thermal state, so all three phases return nearly identical entropy.
760
+ } else if (phased && iterations >= 60) {
761
+ const coldN = Math.floor(iterations * 0.25); // ~25% cold
762
+ const loadN = Math.floor(iterations * 0.50); // ~50% sustained load
763
+ const hotN = iterations - coldN - loadN; // ~25% hot
764
+
765
+ const cold = wasm.run_entropy_probe(coldN, matrixSize);
766
+ const load = wasm.run_entropy_probe(loadN, matrixSize);
767
+ const hot = wasm.run_entropy_probe(hotN, matrixSize);
768
+
769
+ const coldTimings = Array.from(cold.timings);
770
+ const loadTimings = Array.from(load.timings);
771
+ const hotTimings = Array.from(hot.timings);
772
+
773
+ timings = [...coldTimings, ...loadTimings, ...hotTimings];
774
+ resolutionProbe = Array.from(cold.resolution_probe);
775
+ checksum = (cold.checksum + load.checksum + hot.checksum).toString();
776
+
777
+ const { detectQuantizationEntropy } = await Promise.resolve().then(function () { return jitter; });
778
+ const coldQE = detectQuantizationEntropy(coldTimings);
779
+ const hotQE = detectQuantizationEntropy(hotTimings);
780
+
781
+ phases = {
782
+ cold: { n: coldN, timings: coldTimings, qe: coldQE, mean: _mean$1(coldTimings) },
783
+ load: { n: loadN, timings: loadTimings, qe: detectQuantizationEntropy(loadTimings), mean: _mean$1(loadTimings) },
784
+ hot: { n: hotN, timings: hotTimings, qe: hotQE, mean: _mean$1(hotTimings) },
785
+ // The key signal: entropy growth under load.
786
+ // Real silicon: hotQE / coldQE typically 1.05 – 1.40
787
+ // VM: hotQE / coldQE typically 0.95 – 1.05 (flat)
788
+ entropyJitterRatio: coldQE > 0 ? hotQE / coldQE : 1.0,
789
+ };
790
+ } else {
791
+ // Single-phase fallback (fewer iterations or phased disabled)
792
+ const result = wasm.run_entropy_probe(iterations, matrixSize);
793
+ timings = Array.from(result.timings);
794
+ resolutionProbe = Array.from(result.resolution_probe);
795
+ checksum = result.checksum.toString();
796
+ }
797
+
798
+ // ── Timer resolution (non-adaptive path only — adaptive computes its own) ─
799
+ if (!adaptive) {
800
+ const resDeltas = [];
801
+ for (let i = 1; i < resolutionProbe.length; i++) {
802
+ const d = resolutionProbe[i] - resolutionProbe[i - 1];
803
+ if (d > 0) resDeltas.push(d);
804
+ }
805
+ timerGranularityMs = resDeltas.length
806
+ ? resDeltas.reduce((a, b) => Math.min(a, b), Infinity)
807
+ : null;
808
+ }
809
+
810
+ // ── Autocorrelation at diagnostic lags ────────────────────────────────
811
+ // Extended lags catch long-period steal-time rhythms (Xen: ~150 iters)
812
+ const lags = [1, 2, 3, 5, 10, 25, 50];
813
+ const autocorrelations = {};
814
+ for (const lag of lags) {
815
+ if (lag < timings.length) {
816
+ autocorrelations[`lag${lag}`] = wasm.compute_autocorrelation(timings, lag);
817
+ }
818
+ }
819
+
820
+ // ── Secondary probe: memory bandwidth jitter ───────────────────────────
821
+ const memTimings = Array.from(wasm.run_memory_probe(memSizeKb, memIterations));
822
+
823
+ return {
824
+ timings,
825
+ resolutionProbe,
826
+ timerGranularityMs,
827
+ autocorrelations,
828
+ memTimings,
829
+ phases,
830
+ checksum,
831
+ collectedAt: t_start,
832
+ iterations: timings.length, // actual count (adaptive may differ from requested)
833
+ matrixSize,
834
+ adaptive: _adaptiveInfo, // null in non-adaptive mode
835
+ };
836
+ }
837
+
838
+ function _mean$1(arr) {
839
+ return arr.length ? arr.reduce((s, v) => s + v, 0) / arr.length : 0;
840
+ }
841
+
842
+ /**
843
+ * @typedef {object} EntropyResult
844
+ * @property {number[]} timings - per-iteration wall-clock deltas (ms)
845
+ * @property {number[]} resolutionProbe - raw successive perf.now() readings
846
+ * @property {number|null} timerGranularityMs - effective timer resolution
847
+ * @property {object} autocorrelations - { lag1, lag2, lag3, lag5, lag10 }
848
+ * @property {number[]} memTimings - memory-probe timings (ms)
849
+ * @property {string} checksum - proof the computation ran
850
+ * @property {number} collectedAt - Date.now() at probe start
851
+ * @property {number} iterations
852
+ * @property {number} matrixSize
853
+ */
854
+
855
+ /**
856
+ * @sovereign/pulse — Bio-Binding Layer
857
+ *
858
+ * Captures mouse-movement micro-stutters and keystroke-cadence dynamics
859
+ * WHILE the hardware entropy probe is running. Computes the
860
+ * "Interference Coefficient": how much human input jitters hardware timing.
861
+ *
862
+ * PRIVACY NOTE: Only timing deltas are retained. No key labels, no raw
863
+ * (x, y) coordinates, no content of any kind is stored or transmitted.
864
+ */
865
+
866
+ // ---------------------------------------------------------------------------
867
+ // Internal state
868
+ // ---------------------------------------------------------------------------
869
+ const MAX_EVENTS = 500; // rolling buffer cap
870
+
871
+ // ---------------------------------------------------------------------------
872
+ // BioCollector class
873
+ // ---------------------------------------------------------------------------
874
+ class BioCollector {
875
+ constructor() {
876
+ this._mouseEvents = []; // { t: DOMHighResTimeStamp, dx, dy }
877
+ this._keyEvents = []; // { t, type: 'down'|'up', dwell: ms|null }
878
+ this._lastKey = {}; // keyCode → { downAt: t }
879
+ this._lastMouse = null; // { t, x, y }
880
+ this._startTime = null;
881
+ this._active = false;
882
+
883
+ // Bound handlers (needed for removeEventListener)
884
+ this._onMouseMove = this._onMouseMove.bind(this);
885
+ this._onKeyDown = this._onKeyDown.bind(this);
886
+ this._onKeyUp = this._onKeyUp.bind(this);
887
+ }
888
+
889
+ // ── Lifecycle ────────────────────────────────────────────────────────────
890
+
891
+ start() {
892
+ if (this._active) return;
893
+ this._active = true;
894
+ this._startTime = performance.now();
895
+
896
+ if (typeof window !== 'undefined') {
897
+ window.addEventListener('pointermove', this._onMouseMove, { passive: true });
898
+ window.addEventListener('keydown', this._onKeyDown, { passive: true });
899
+ window.addEventListener('keyup', this._onKeyUp, { passive: true });
900
+ }
901
+ }
902
+
903
+ stop() {
904
+ if (!this._active) return;
905
+ this._active = false;
906
+
907
+ if (typeof window !== 'undefined') {
908
+ window.removeEventListener('pointermove', this._onMouseMove);
909
+ window.removeEventListener('keydown', this._onKeyDown);
910
+ window.removeEventListener('keyup', this._onKeyUp);
911
+ }
912
+ }
913
+
914
+ // ── Event handlers ────────────────────────────────────────────────────────
915
+
916
+ _onMouseMove(e) {
917
+ if (!this._active) return;
918
+ const t = e.timeStamp ?? performance.now();
919
+ const cur = { t, x: e.clientX, y: e.clientY };
920
+
921
+ if (this._lastMouse) {
922
+ const dt = t - this._lastMouse.t;
923
+ const dx = cur.x - this._lastMouse.x;
924
+ const dy = cur.y - this._lastMouse.y;
925
+ // Only store the delta, not absolute position (privacy)
926
+ if (this._mouseEvents.length < MAX_EVENTS) {
927
+ this._mouseEvents.push({ t, dt, dx, dy,
928
+ pressure: e.pressure ?? 0,
929
+ pointerType: e.pointerType ?? 'mouse' });
930
+ }
931
+ }
932
+ this._lastMouse = cur;
933
+ }
934
+
935
+ _onKeyDown(e) {
936
+ if (!this._active) return;
937
+ const t = e.timeStamp ?? performance.now();
938
+ // Store timestamp keyed by code (NOT key label)
939
+ this._lastKey[e.code] = { downAt: t };
940
+ }
941
+
942
+ _onKeyUp(e) {
943
+ if (!this._active) return;
944
+ const t = e.timeStamp ?? performance.now();
945
+ const rec = this._lastKey[e.code];
946
+ const dwell = rec ? (t - rec.downAt) : null;
947
+ delete this._lastKey[e.code];
948
+
949
+ if (this._keyEvents.length < MAX_EVENTS) {
950
+ // Only dwell time; key identity NOT stored.
951
+ this._keyEvents.push({ t, dwell });
952
+ }
953
+ }
954
+
955
+ // ── snapshot ─────────────────────────────────────────────────────────────
956
+
957
+ /**
958
+ * Returns a privacy-preserving statistical snapshot of collected bio signals.
959
+ * Raw events are summarised; nothing identifiable is included in the output.
960
+ *
961
+ * @param {number[]} computationTimings - entropy probe timing array
962
+ * @returns {BioSnapshot}
963
+ */
964
+ snapshot(computationTimings = []) {
965
+ const now = performance.now();
966
+ const durationMs = this._startTime != null ? (now - this._startTime) : 0;
967
+
968
+ // ── Mouse statistics ────────────────────────────────────────────────
969
+ const iei = this._mouseEvents.map(e => e.dt);
970
+ const velocities = this._mouseEvents.map(e =>
971
+ e.dt > 0 ? Math.hypot(e.dx, e.dy) / e.dt : 0
972
+ );
973
+ const pressure = this._mouseEvents.map(e => e.pressure);
974
+ const angJerk = _computeAngularJerk(this._mouseEvents);
975
+
976
+ const mouseStats = {
977
+ sampleCount: iei.length,
978
+ ieiMean: _mean(iei),
979
+ ieiCV: _cv(iei),
980
+ velocityP50: _percentile$1(velocities, 50),
981
+ velocityP95: _percentile$1(velocities, 95),
982
+ angularJerkMean: _mean(angJerk),
983
+ pressureVariance: _variance(pressure),
984
+ };
985
+
986
+ // ── Keyboard statistics ───────────────────────────────────────────────
987
+ const dwellTimes = this._keyEvents.filter(e => e.dwell != null).map(e => e.dwell);
988
+ const iki = [];
989
+ for (let i = 1; i < this._keyEvents.length; i++) {
990
+ iki.push(this._keyEvents[i].t - this._keyEvents[i - 1].t);
991
+ }
992
+
993
+ const keyStats = {
994
+ sampleCount: dwellTimes.length,
995
+ dwellMean: _mean(dwellTimes),
996
+ dwellCV: _cv(dwellTimes),
997
+ ikiMean: _mean(iki),
998
+ ikiCV: _cv(iki),
999
+ };
1000
+
1001
+ // ── Interference Coefficient ──────────────────────────────────────────
1002
+ // Cross-correlate input event density with computation timing deviations.
1003
+ // A real human on real hardware creates measurable CPU-scheduling pressure
1004
+ // that perturbs the entropy probe's timing.
1005
+ const interferenceCoefficient = _computeInterference(
1006
+ this._mouseEvents,
1007
+ this._keyEvents,
1008
+ computationTimings,
1009
+ );
1010
+
1011
+ return {
1012
+ mouse: mouseStats,
1013
+ keyboard: keyStats,
1014
+ interferenceCoefficient,
1015
+ durationMs,
1016
+ hasActivity: iei.length > 5 || dwellTimes.length > 2,
1017
+ };
1018
+ }
1019
+ }
1020
+
1021
+ /**
1022
+ * @typedef {object} BioSnapshot
1023
+ * @property {object} mouse
1024
+ * @property {object} keyboard
1025
+ * @property {number} interferenceCoefficient – [−1, 1]; higher = more human
1026
+ * @property {number} durationMs
1027
+ * @property {boolean} hasActivity
1028
+ */
1029
+
1030
+ // ---------------------------------------------------------------------------
1031
+ // Statistical helpers (private)
1032
+ // ---------------------------------------------------------------------------
1033
+
1034
+ function _mean(arr) {
1035
+ if (!arr.length) return 0;
1036
+ return arr.reduce((a, b) => a + b, 0) / arr.length;
1037
+ }
1038
+
1039
+ function _variance(arr) {
1040
+ if (arr.length < 2) return 0;
1041
+ const m = _mean(arr);
1042
+ return arr.reduce((s, v) => s + (v - m) ** 2, 0) / (arr.length - 1);
1043
+ }
1044
+
1045
+ function _cv(arr) {
1046
+ if (!arr.length) return 0;
1047
+ const m = _mean(arr);
1048
+ if (m === 0) return 0;
1049
+ return Math.sqrt(_variance(arr)) / Math.abs(m);
1050
+ }
1051
+
1052
+ function _percentile$1(sorted, p) {
1053
+ const arr = [...sorted].sort((a, b) => a - b);
1054
+ if (!arr.length) return 0;
1055
+ const idx = (p / 100) * (arr.length - 1);
1056
+ const lo = Math.floor(idx);
1057
+ const hi = Math.ceil(idx);
1058
+ return arr[lo] + (arr[hi] - arr[lo]) * (idx - lo);
1059
+ }
1060
+
1061
+ /** Angular jerk: second derivative of movement direction (radians / s²) */
1062
+ function _computeAngularJerk(events) {
1063
+ if (events.length < 3) return [];
1064
+ const angles = [];
1065
+ for (let i = 0; i < events.length; i++) {
1066
+ const { dx, dy } = events[i];
1067
+ angles.push(Math.atan2(dy, dx));
1068
+ }
1069
+ const d1 = [];
1070
+ for (let i = 1; i < angles.length; i++) {
1071
+ const dt = events[i].dt || 1;
1072
+ d1.push((angles[i] - angles[i - 1]) / dt);
1073
+ }
1074
+ const d2 = [];
1075
+ for (let i = 1; i < d1.length; i++) {
1076
+ const dt = events[i].dt || 1;
1077
+ d2.push(Math.abs((d1[i] - d1[i - 1]) / dt));
1078
+ }
1079
+ return d2;
1080
+ }
1081
+
1082
+ /**
1083
+ * Interference Coefficient
1084
+ *
1085
+ * For each computation sample, check whether an input event occurred within
1086
+ * ±16 ms (one animation frame). Build two parallel series:
1087
+ * X[i] = 1 if input near sample i, else 0
1088
+ * Y[i] = deviation of timing[i] from mean timing
1089
+ * Return the Pearson correlation between X and Y.
1090
+ * A real human on real hardware produces positive correlation (input events
1091
+ * cause measurable CPU scheduling perturbations).
1092
+ */
1093
+ function _computeInterference(mouseEvents, keyEvents, timings) {
1094
+ if (!timings.length) return 0;
1095
+
1096
+ const allInputTimes = [
1097
+ ...mouseEvents.map(e => e.t),
1098
+ ...keyEvents.map(e => e.t),
1099
+ ].sort((a, b) => a - b);
1100
+
1101
+ if (!allInputTimes.length) return 0;
1102
+
1103
+ const WINDOW_MS = 16;
1104
+ const meanTiming = _mean(timings);
1105
+
1106
+ // We need absolute timestamps for the probe samples.
1107
+ // We don't have them directly – use relative index spacing as a proxy.
1108
+ // The entropy probe runs for ~(mean * n) ms starting at collectedAt.
1109
+ // This is a statistical approximation; the exact alignment improves
1110
+ // when callers pass `collectedAt` from the entropy result.
1111
+ // For now we distribute samples evenly across the collection window.
1112
+ const first = allInputTimes[0];
1113
+ const last = allInputTimes[allInputTimes.length - 1];
1114
+ const span = Math.max(last - first, 1);
1115
+
1116
+ const X = timings.map((_, i) => {
1117
+ const tSample = first + (i / timings.length) * span;
1118
+ return allInputTimes.some(t => Math.abs(t - tSample) < WINDOW_MS) ? 1 : 0;
1119
+ });
1120
+
1121
+ const Y = timings.map(t => t - meanTiming);
1122
+
1123
+ return _pearson(X, Y);
1124
+ }
1125
+
1126
+ function _pearson(X, Y) {
1127
+ const n = X.length;
1128
+ if (n < 2) return 0;
1129
+ const mx = _mean(X);
1130
+ const my = _mean(Y);
1131
+ let num = 0, da = 0, db = 0;
1132
+ for (let i = 0; i < n; i++) {
1133
+ const a = X[i] - mx;
1134
+ const b = Y[i] - my;
1135
+ num += a * b;
1136
+ da += a * a;
1137
+ db += b * b;
1138
+ }
1139
+ const denom = Math.sqrt(da * db);
1140
+ return denom < 1e-14 ? 0 : num / denom;
1141
+ }
1142
+
1143
+ /**
1144
+ * Utilities for hex, bytes, CSPRNG.
1145
+ * @module
1146
+ */
1147
+ /*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */
1148
+ // We use WebCrypto aka globalThis.crypto, which exists in browsers and node.js 16+.
1149
+ // node.js versions earlier than v19 don't declare it in global scope.
1150
+ // For node.js, package.json#exports field mapping rewrites import
1151
+ // from `crypto` to `cryptoNode`, which imports native module.
1152
+ // Makes the utils un-importable in browsers without a bundler.
1153
+ // Once node.js 18 is deprecated (2025-04-30), we can just drop the import.
1154
+ /** Checks if something is Uint8Array. Be careful: nodejs Buffer will return true. */
1155
+ function isBytes(a) {
1156
+ return a instanceof Uint8Array || (ArrayBuffer.isView(a) && a.constructor.name === 'Uint8Array');
1157
+ }
1158
+ /** Asserts something is positive integer. */
1159
+ function anumber(n) {
1160
+ if (!Number.isSafeInteger(n) || n < 0)
1161
+ throw new Error('positive integer expected, got ' + n);
1162
+ }
1163
+ /** Asserts something is Uint8Array. */
1164
+ function abytes(b, ...lengths) {
1165
+ if (!isBytes(b))
1166
+ throw new Error('Uint8Array expected');
1167
+ if (lengths.length > 0 && !lengths.includes(b.length))
1168
+ throw new Error('Uint8Array expected of length ' + lengths + ', got length=' + b.length);
1169
+ }
1170
+ /** Asserts a hash instance has not been destroyed / finished */
1171
+ function aexists(instance, checkFinished = true) {
1172
+ if (instance.destroyed)
1173
+ throw new Error('Hash instance has been destroyed');
1174
+ if (checkFinished && instance.finished)
1175
+ throw new Error('Hash#digest() has already been called');
1176
+ }
1177
+ /** Asserts output is properly-sized byte array */
1178
+ function aoutput(out, instance) {
1179
+ abytes(out);
1180
+ const min = instance.outputLen;
1181
+ if (out.length < min) {
1182
+ throw new Error('digestInto() expects output buffer of length at least ' + min);
1183
+ }
1184
+ }
1185
+ /** Cast u8 / u16 / u32 to u8. */
1186
+ function u8(arr) {
1187
+ return new Uint8Array(arr.buffer, arr.byteOffset, arr.byteLength);
1188
+ }
1189
+ /** Cast u8 / u16 / u32 to u32. */
1190
+ function u32(arr) {
1191
+ return new Uint32Array(arr.buffer, arr.byteOffset, Math.floor(arr.byteLength / 4));
1192
+ }
1193
+ /** Zeroize a byte array. Warning: JS provides no guarantees. */
1194
+ function clean(...arrays) {
1195
+ for (let i = 0; i < arrays.length; i++) {
1196
+ arrays[i].fill(0);
1197
+ }
1198
+ }
1199
+ /** The rotate right (circular right shift) operation for uint32 */
1200
+ function rotr(word, shift) {
1201
+ return (word << (32 - shift)) | (word >>> shift);
1202
+ }
1203
+ /** Is current platform little-endian? Most are. Big-Endian platform: IBM */
1204
+ const isLE = /* @__PURE__ */ (() => new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44)();
1205
+ /** The byte swap operation for uint32 */
1206
+ function byteSwap(word) {
1207
+ return (((word << 24) & 0xff000000) |
1208
+ ((word << 8) & 0xff0000) |
1209
+ ((word >>> 8) & 0xff00) |
1210
+ ((word >>> 24) & 0xff));
1211
+ }
1212
+ /** Conditionally byte swap if on a big-endian platform */
1213
+ const swap8IfBE = isLE
1214
+ ? (n) => n
1215
+ : (n) => byteSwap(n);
1216
+ /** In place byte swap for Uint32Array */
1217
+ function byteSwap32(arr) {
1218
+ for (let i = 0; i < arr.length; i++) {
1219
+ arr[i] = byteSwap(arr[i]);
1220
+ }
1221
+ return arr;
1222
+ }
1223
+ const swap32IfBE = isLE
1224
+ ? (u) => u
1225
+ : byteSwap32;
1226
+ // Built-in hex conversion https://caniuse.com/mdn-javascript_builtins_uint8array_fromhex
1227
+ const hasHexBuiltin = /* @__PURE__ */ (() =>
1228
+ // @ts-ignore
1229
+ typeof Uint8Array.from([]).toHex === 'function' && typeof Uint8Array.fromHex === 'function')();
1230
+ // Array where index 0xf0 (240) is mapped to string 'f0'
1231
+ const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0'));
1232
+ /**
1233
+ * Convert byte array to hex string. Uses built-in function, when available.
1234
+ * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123'
1235
+ */
1236
+ function bytesToHex(bytes) {
1237
+ abytes(bytes);
1238
+ // @ts-ignore
1239
+ if (hasHexBuiltin)
1240
+ return bytes.toHex();
1241
+ // pre-caching improves the speed 6x
1242
+ let hex = '';
1243
+ for (let i = 0; i < bytes.length; i++) {
1244
+ hex += hexes[bytes[i]];
1245
+ }
1246
+ return hex;
1247
+ }
1248
+ /**
1249
+ * Converts string to bytes using UTF8 encoding.
1250
+ * @example utf8ToBytes('abc') // Uint8Array.from([97, 98, 99])
1251
+ */
1252
+ function utf8ToBytes(str) {
1253
+ if (typeof str !== 'string')
1254
+ throw new Error('string expected');
1255
+ return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809
1256
+ }
1257
+ /**
1258
+ * Normalizes (non-hex) string or Uint8Array to Uint8Array.
1259
+ * Warning: when Uint8Array is passed, it would NOT get copied.
1260
+ * Keep in mind for future mutable operations.
1261
+ */
1262
+ function toBytes(data) {
1263
+ if (typeof data === 'string')
1264
+ data = utf8ToBytes(data);
1265
+ abytes(data);
1266
+ return data;
1267
+ }
1268
+ /** For runtime check if class implements interface */
1269
+ class Hash {
1270
+ }
1271
+ function createXOFer(hashCons) {
1272
+ const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest();
1273
+ const tmp = hashCons({});
1274
+ hashC.outputLen = tmp.outputLen;
1275
+ hashC.blockLen = tmp.blockLen;
1276
+ hashC.create = (opts) => hashCons(opts);
1277
+ return hashC;
1278
+ }
1279
+
1280
+ /**
1281
+ * Internal Merkle-Damgard hash utils.
1282
+ * @module
1283
+ */
1284
+ /**
1285
+ * Initial SHA-2 state: fractional parts of square roots of first 16 primes 2..53.
1286
+ * Check out `test/misc/sha2-gen-iv.js` for recomputation guide.
1287
+ */
1288
+ /** Initial SHA256 state. Bits 0..32 of frac part of sqrt of primes 2..19 */
1289
+ const SHA256_IV = /* @__PURE__ */ Uint32Array.from([
1290
+ 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19,
1291
+ ]);
1292
+
1293
+ /**
1294
+ * Internal helpers for u64. BigUint64Array is too slow as per 2025, so we implement it using Uint32Array.
1295
+ * @todo re-check https://issues.chromium.org/issues/42212588
1296
+ * @module
1297
+ */
1298
+ const U32_MASK64 = /* @__PURE__ */ BigInt(2 ** 32 - 1);
1299
+ const _32n = /* @__PURE__ */ BigInt(32);
1300
+ function fromBig(n, le = false) {
1301
+ if (le)
1302
+ return { h: Number(n & U32_MASK64), l: Number((n >> _32n) & U32_MASK64) };
1303
+ return { h: Number((n >> _32n) & U32_MASK64) | 0, l: Number(n & U32_MASK64) | 0 };
1304
+ }
1305
+
1306
+ /**
1307
+ * Internal helpers for blake hash.
1308
+ * @module
1309
+ */
1310
+ // Mixing function G splitted in two halfs
1311
+ function G1s(a, b, c, d, x) {
1312
+ a = (a + b + x) | 0;
1313
+ d = rotr(d ^ a, 16);
1314
+ c = (c + d) | 0;
1315
+ b = rotr(b ^ c, 12);
1316
+ return { a, b, c, d };
1317
+ }
1318
+ function G2s(a, b, c, d, x) {
1319
+ a = (a + b + x) | 0;
1320
+ d = rotr(d ^ a, 8);
1321
+ c = (c + d) | 0;
1322
+ b = rotr(b ^ c, 7);
1323
+ return { a, b, c, d };
1324
+ }
1325
+
1326
+ /**
1327
+ * blake2b (64-bit) & blake2s (8 to 32-bit) hash functions.
1328
+ * b could have been faster, but there is no fast u64 in js, so s is 1.5x faster.
1329
+ * @module
1330
+ */
1331
+ /** Class, from which others are subclassed. */
1332
+ class BLAKE2 extends Hash {
1333
+ constructor(blockLen, outputLen) {
1334
+ super();
1335
+ this.finished = false;
1336
+ this.destroyed = false;
1337
+ this.length = 0;
1338
+ this.pos = 0;
1339
+ anumber(blockLen);
1340
+ anumber(outputLen);
1341
+ this.blockLen = blockLen;
1342
+ this.outputLen = outputLen;
1343
+ this.buffer = new Uint8Array(blockLen);
1344
+ this.buffer32 = u32(this.buffer);
1345
+ }
1346
+ update(data) {
1347
+ aexists(this);
1348
+ data = toBytes(data);
1349
+ abytes(data);
1350
+ // Main difference with other hashes: there is flag for last block,
1351
+ // so we cannot process current block before we know that there
1352
+ // is the next one. This significantly complicates logic and reduces ability
1353
+ // to do zero-copy processing
1354
+ const { blockLen, buffer, buffer32 } = this;
1355
+ const len = data.length;
1356
+ const offset = data.byteOffset;
1357
+ const buf = data.buffer;
1358
+ for (let pos = 0; pos < len;) {
1359
+ // If buffer is full and we still have input (don't process last block, same as blake2s)
1360
+ if (this.pos === blockLen) {
1361
+ swap32IfBE(buffer32);
1362
+ this.compress(buffer32, 0, false);
1363
+ swap32IfBE(buffer32);
1364
+ this.pos = 0;
1365
+ }
1366
+ const take = Math.min(blockLen - this.pos, len - pos);
1367
+ const dataOffset = offset + pos;
1368
+ // full block && aligned to 4 bytes && not last in input
1369
+ if (take === blockLen && !(dataOffset % 4) && pos + take < len) {
1370
+ const data32 = new Uint32Array(buf, dataOffset, Math.floor((len - pos) / 4));
1371
+ swap32IfBE(data32);
1372
+ for (let pos32 = 0; pos + blockLen < len; pos32 += buffer32.length, pos += blockLen) {
1373
+ this.length += blockLen;
1374
+ this.compress(data32, pos32, false);
1375
+ }
1376
+ swap32IfBE(data32);
1377
+ continue;
1378
+ }
1379
+ buffer.set(data.subarray(pos, pos + take), this.pos);
1380
+ this.pos += take;
1381
+ this.length += take;
1382
+ pos += take;
1383
+ }
1384
+ return this;
1385
+ }
1386
+ digestInto(out) {
1387
+ aexists(this);
1388
+ aoutput(out, this);
1389
+ const { pos, buffer32 } = this;
1390
+ this.finished = true;
1391
+ // Padding
1392
+ clean(this.buffer.subarray(pos));
1393
+ swap32IfBE(buffer32);
1394
+ this.compress(buffer32, 0, true);
1395
+ swap32IfBE(buffer32);
1396
+ const out32 = u32(out);
1397
+ this.get().forEach((v, i) => (out32[i] = swap8IfBE(v)));
1398
+ }
1399
+ digest() {
1400
+ const { buffer, outputLen } = this;
1401
+ this.digestInto(buffer);
1402
+ const res = buffer.slice(0, outputLen);
1403
+ this.destroy();
1404
+ return res;
1405
+ }
1406
+ _cloneInto(to) {
1407
+ const { buffer, length, finished, destroyed, outputLen, pos } = this;
1408
+ to || (to = new this.constructor({ dkLen: outputLen }));
1409
+ to.set(...this.get());
1410
+ to.buffer.set(buffer);
1411
+ to.destroyed = destroyed;
1412
+ to.finished = finished;
1413
+ to.length = length;
1414
+ to.pos = pos;
1415
+ // @ts-ignore
1416
+ to.outputLen = outputLen;
1417
+ return to;
1418
+ }
1419
+ clone() {
1420
+ return this._cloneInto();
1421
+ }
1422
+ }
1423
+ // prettier-ignore
1424
+ function compress(s, offset, msg, rounds, v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15) {
1425
+ let j = 0;
1426
+ for (let i = 0; i < rounds; i++) {
1427
+ ({ a: v0, b: v4, c: v8, d: v12 } = G1s(v0, v4, v8, v12, msg[offset + s[j++]]));
1428
+ ({ a: v0, b: v4, c: v8, d: v12 } = G2s(v0, v4, v8, v12, msg[offset + s[j++]]));
1429
+ ({ a: v1, b: v5, c: v9, d: v13 } = G1s(v1, v5, v9, v13, msg[offset + s[j++]]));
1430
+ ({ a: v1, b: v5, c: v9, d: v13 } = G2s(v1, v5, v9, v13, msg[offset + s[j++]]));
1431
+ ({ a: v2, b: v6, c: v10, d: v14 } = G1s(v2, v6, v10, v14, msg[offset + s[j++]]));
1432
+ ({ a: v2, b: v6, c: v10, d: v14 } = G2s(v2, v6, v10, v14, msg[offset + s[j++]]));
1433
+ ({ a: v3, b: v7, c: v11, d: v15 } = G1s(v3, v7, v11, v15, msg[offset + s[j++]]));
1434
+ ({ a: v3, b: v7, c: v11, d: v15 } = G2s(v3, v7, v11, v15, msg[offset + s[j++]]));
1435
+ ({ a: v0, b: v5, c: v10, d: v15 } = G1s(v0, v5, v10, v15, msg[offset + s[j++]]));
1436
+ ({ a: v0, b: v5, c: v10, d: v15 } = G2s(v0, v5, v10, v15, msg[offset + s[j++]]));
1437
+ ({ a: v1, b: v6, c: v11, d: v12 } = G1s(v1, v6, v11, v12, msg[offset + s[j++]]));
1438
+ ({ a: v1, b: v6, c: v11, d: v12 } = G2s(v1, v6, v11, v12, msg[offset + s[j++]]));
1439
+ ({ a: v2, b: v7, c: v8, d: v13 } = G1s(v2, v7, v8, v13, msg[offset + s[j++]]));
1440
+ ({ a: v2, b: v7, c: v8, d: v13 } = G2s(v2, v7, v8, v13, msg[offset + s[j++]]));
1441
+ ({ a: v3, b: v4, c: v9, d: v14 } = G1s(v3, v4, v9, v14, msg[offset + s[j++]]));
1442
+ ({ a: v3, b: v4, c: v9, d: v14 } = G2s(v3, v4, v9, v14, msg[offset + s[j++]]));
1443
+ }
1444
+ return { v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15 };
1445
+ }
1446
+
1447
+ /**
1448
+ * Blake3 fast hash is Blake2 with reduced security (round count). Can also be used as MAC & KDF.
1449
+ *
1450
+ * It is advertised as "the fastest cryptographic hash". However, it isn't true in JS.
1451
+ * Why is this so slow? While it should be 6x faster than blake2b, perf diff is only 20%:
1452
+ *
1453
+ * * There is only 30% reduction in number of rounds from blake2s
1454
+ * * Speed-up comes from tree structure, which is parallelized using SIMD & threading.
1455
+ * These features are not present in JS, so we only get overhead from trees.
1456
+ * * Parallelization only happens on 1024-byte chunks: there is no benefit for small inputs.
1457
+ * * It is still possible to make it faster using: a) loop unrolling b) web workers c) wasm
1458
+ * @module
1459
+ */
1460
+ // Flag bitset
1461
+ const B3_Flags = {
1462
+ CHUNK_START: 0b1,
1463
+ CHUNK_END: 0b10,
1464
+ PARENT: 0b100,
1465
+ ROOT: 0b1000,
1466
+ KEYED_HASH: 0b10000,
1467
+ DERIVE_KEY_CONTEXT: 0b100000,
1468
+ DERIVE_KEY_MATERIAL: 0b1000000,
1469
+ };
1470
+ const B3_IV = SHA256_IV.slice();
1471
+ const B3_SIGMA = /* @__PURE__ */ (() => {
1472
+ const Id = Array.from({ length: 16 }, (_, i) => i);
1473
+ const permute = (arr) => [2, 6, 3, 10, 7, 0, 4, 13, 1, 11, 12, 5, 9, 14, 15, 8].map((i) => arr[i]);
1474
+ const res = [];
1475
+ for (let i = 0, v = Id; i < 7; i++, v = permute(v))
1476
+ res.push(...v);
1477
+ return Uint8Array.from(res);
1478
+ })();
1479
+ /** Blake3 hash. Can be used as MAC and KDF. */
1480
+ class BLAKE3 extends BLAKE2 {
1481
+ constructor(opts = {}, flags = 0) {
1482
+ super(64, opts.dkLen === undefined ? 32 : opts.dkLen);
1483
+ this.chunkPos = 0; // Position of current block in chunk
1484
+ this.chunksDone = 0; // How many chunks we already have
1485
+ this.flags = 0 | 0;
1486
+ this.stack = [];
1487
+ // Output
1488
+ this.posOut = 0;
1489
+ this.bufferOut32 = new Uint32Array(16);
1490
+ this.chunkOut = 0; // index of output chunk
1491
+ this.enableXOF = true;
1492
+ const { key, context } = opts;
1493
+ const hasContext = context !== undefined;
1494
+ if (key !== undefined) {
1495
+ if (hasContext)
1496
+ throw new Error('Only "key" or "context" can be specified at same time');
1497
+ const k = toBytes(key).slice();
1498
+ abytes(k, 32);
1499
+ this.IV = u32(k);
1500
+ swap32IfBE(this.IV);
1501
+ this.flags = flags | B3_Flags.KEYED_HASH;
1502
+ }
1503
+ else if (hasContext) {
1504
+ const ctx = toBytes(context);
1505
+ const contextKey = new BLAKE3({ dkLen: 32 }, B3_Flags.DERIVE_KEY_CONTEXT)
1506
+ .update(ctx)
1507
+ .digest();
1508
+ this.IV = u32(contextKey);
1509
+ swap32IfBE(this.IV);
1510
+ this.flags = flags | B3_Flags.DERIVE_KEY_MATERIAL;
1511
+ }
1512
+ else {
1513
+ this.IV = B3_IV.slice();
1514
+ this.flags = flags;
1515
+ }
1516
+ this.state = this.IV.slice();
1517
+ this.bufferOut = u8(this.bufferOut32);
1518
+ }
1519
+ // Unused
1520
+ get() {
1521
+ return [];
1522
+ }
1523
+ set() { }
1524
+ b2Compress(counter, flags, buf, bufPos = 0) {
1525
+ const { state: s, pos } = this;
1526
+ const { h, l } = fromBig(BigInt(counter), true);
1527
+ // prettier-ignore
1528
+ const { v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15 } = compress(B3_SIGMA, bufPos, buf, 7, s[0], s[1], s[2], s[3], s[4], s[5], s[6], s[7], B3_IV[0], B3_IV[1], B3_IV[2], B3_IV[3], h, l, pos, flags);
1529
+ s[0] = v0 ^ v8;
1530
+ s[1] = v1 ^ v9;
1531
+ s[2] = v2 ^ v10;
1532
+ s[3] = v3 ^ v11;
1533
+ s[4] = v4 ^ v12;
1534
+ s[5] = v5 ^ v13;
1535
+ s[6] = v6 ^ v14;
1536
+ s[7] = v7 ^ v15;
1537
+ }
1538
+ compress(buf, bufPos = 0, isLast = false) {
1539
+ // Compress last block
1540
+ let flags = this.flags;
1541
+ if (!this.chunkPos)
1542
+ flags |= B3_Flags.CHUNK_START;
1543
+ if (this.chunkPos === 15 || isLast)
1544
+ flags |= B3_Flags.CHUNK_END;
1545
+ if (!isLast)
1546
+ this.pos = this.blockLen;
1547
+ this.b2Compress(this.chunksDone, flags, buf, bufPos);
1548
+ this.chunkPos += 1;
1549
+ // If current block is last in chunk (16 blocks), then compress chunks
1550
+ if (this.chunkPos === 16 || isLast) {
1551
+ let chunk = this.state;
1552
+ this.state = this.IV.slice();
1553
+ // If not the last one, compress only when there are trailing zeros in chunk counter
1554
+ // chunks used as binary tree where current stack is path. Zero means current leaf is finished and can be compressed.
1555
+ // 1 (001) - leaf not finished (just push current chunk to stack)
1556
+ // 2 (010) - leaf finished at depth=1 (merge with last elm on stack and push back)
1557
+ // 3 (011) - last leaf not finished
1558
+ // 4 (100) - leafs finished at depth=1 and depth=2
1559
+ for (let last, chunks = this.chunksDone + 1; isLast || !(chunks & 1); chunks >>= 1) {
1560
+ if (!(last = this.stack.pop()))
1561
+ break;
1562
+ this.buffer32.set(last, 0);
1563
+ this.buffer32.set(chunk, 8);
1564
+ this.pos = this.blockLen;
1565
+ this.b2Compress(0, this.flags | B3_Flags.PARENT, this.buffer32, 0);
1566
+ chunk = this.state;
1567
+ this.state = this.IV.slice();
1568
+ }
1569
+ this.chunksDone++;
1570
+ this.chunkPos = 0;
1571
+ this.stack.push(chunk);
1572
+ }
1573
+ this.pos = 0;
1574
+ }
1575
+ _cloneInto(to) {
1576
+ to = super._cloneInto(to);
1577
+ const { IV, flags, state, chunkPos, posOut, chunkOut, stack, chunksDone } = this;
1578
+ to.state.set(state.slice());
1579
+ to.stack = stack.map((i) => Uint32Array.from(i));
1580
+ to.IV.set(IV);
1581
+ to.flags = flags;
1582
+ to.chunkPos = chunkPos;
1583
+ to.chunksDone = chunksDone;
1584
+ to.posOut = posOut;
1585
+ to.chunkOut = chunkOut;
1586
+ to.enableXOF = this.enableXOF;
1587
+ to.bufferOut32.set(this.bufferOut32);
1588
+ return to;
1589
+ }
1590
+ destroy() {
1591
+ this.destroyed = true;
1592
+ clean(this.state, this.buffer32, this.IV, this.bufferOut32);
1593
+ clean(...this.stack);
1594
+ }
1595
+ // Same as b2Compress, but doesn't modify state and returns 16 u32 array (instead of 8)
1596
+ b2CompressOut() {
1597
+ const { state: s, pos, flags, buffer32, bufferOut32: out32 } = this;
1598
+ const { h, l } = fromBig(BigInt(this.chunkOut++));
1599
+ swap32IfBE(buffer32);
1600
+ // prettier-ignore
1601
+ const { v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15 } = compress(B3_SIGMA, 0, buffer32, 7, s[0], s[1], s[2], s[3], s[4], s[5], s[6], s[7], B3_IV[0], B3_IV[1], B3_IV[2], B3_IV[3], l, h, pos, flags);
1602
+ out32[0] = v0 ^ v8;
1603
+ out32[1] = v1 ^ v9;
1604
+ out32[2] = v2 ^ v10;
1605
+ out32[3] = v3 ^ v11;
1606
+ out32[4] = v4 ^ v12;
1607
+ out32[5] = v5 ^ v13;
1608
+ out32[6] = v6 ^ v14;
1609
+ out32[7] = v7 ^ v15;
1610
+ out32[8] = s[0] ^ v8;
1611
+ out32[9] = s[1] ^ v9;
1612
+ out32[10] = s[2] ^ v10;
1613
+ out32[11] = s[3] ^ v11;
1614
+ out32[12] = s[4] ^ v12;
1615
+ out32[13] = s[5] ^ v13;
1616
+ out32[14] = s[6] ^ v14;
1617
+ out32[15] = s[7] ^ v15;
1618
+ swap32IfBE(buffer32);
1619
+ swap32IfBE(out32);
1620
+ this.posOut = 0;
1621
+ }
1622
+ finish() {
1623
+ if (this.finished)
1624
+ return;
1625
+ this.finished = true;
1626
+ // Padding
1627
+ clean(this.buffer.subarray(this.pos));
1628
+ // Process last chunk
1629
+ let flags = this.flags | B3_Flags.ROOT;
1630
+ if (this.stack.length) {
1631
+ flags |= B3_Flags.PARENT;
1632
+ swap32IfBE(this.buffer32);
1633
+ this.compress(this.buffer32, 0, true);
1634
+ swap32IfBE(this.buffer32);
1635
+ this.chunksDone = 0;
1636
+ this.pos = this.blockLen;
1637
+ }
1638
+ else {
1639
+ flags |= (!this.chunkPos ? B3_Flags.CHUNK_START : 0) | B3_Flags.CHUNK_END;
1640
+ }
1641
+ this.flags = flags;
1642
+ this.b2CompressOut();
1643
+ }
1644
+ writeInto(out) {
1645
+ aexists(this, false);
1646
+ abytes(out);
1647
+ this.finish();
1648
+ const { blockLen, bufferOut } = this;
1649
+ for (let pos = 0, len = out.length; pos < len;) {
1650
+ if (this.posOut >= blockLen)
1651
+ this.b2CompressOut();
1652
+ const take = Math.min(blockLen - this.posOut, len - pos);
1653
+ out.set(bufferOut.subarray(this.posOut, this.posOut + take), pos);
1654
+ this.posOut += take;
1655
+ pos += take;
1656
+ }
1657
+ return out;
1658
+ }
1659
+ xofInto(out) {
1660
+ if (!this.enableXOF)
1661
+ throw new Error('XOF is not possible after digest call');
1662
+ return this.writeInto(out);
1663
+ }
1664
+ xof(bytes) {
1665
+ anumber(bytes);
1666
+ return this.xofInto(new Uint8Array(bytes));
1667
+ }
1668
+ digestInto(out) {
1669
+ aoutput(out, this);
1670
+ if (this.finished)
1671
+ throw new Error('digest() was already called');
1672
+ this.enableXOF = false;
1673
+ this.writeInto(out);
1674
+ this.destroy();
1675
+ return out;
1676
+ }
1677
+ digest() {
1678
+ return this.digestInto(new Uint8Array(this.outputLen));
1679
+ }
1680
+ }
1681
+ /**
1682
+ * BLAKE3 hash function. Can be used as MAC and KDF.
1683
+ * @param msg - message that would be hashed
1684
+ * @param opts - `dkLen` for output length, `key` for MAC mode, `context` for KDF mode
1685
+ * @example
1686
+ * const data = new Uint8Array(32);
1687
+ * const hash = blake3(data);
1688
+ * const mac = blake3(data, { key: new Uint8Array(32) });
1689
+ * const kdf = blake3(data, { context: 'application name' });
1690
+ */
1691
+ const blake3 = /* @__PURE__ */ createXOFer((opts) => new BLAKE3(opts));
1692
+
1693
+ /**
1694
+ * @sovereign/pulse — Hardware Fingerprint & Proof Builder
1695
+ *
1696
+ * Assembles all collected signals into a canonical ProofPayload, then
1697
+ * produces a BLAKE3 commitment: BLAKE3(canonicalJSON(payload)).
1698
+ *
1699
+ * The commitment is what gets sent to the server. The server recomputes
1700
+ * the hash from the payload to detect tampering. Raw timing arrays and
1701
+ * pixel buffers are NOT included — only statistical summaries.
1702
+ *
1703
+ * Zero-Knowledge property: the server learns only that the device passes
1704
+ * statistical thresholds. It never sees raw hardware telemetry.
1705
+ */
1706
+
1707
+
1708
+ // ---------------------------------------------------------------------------
1709
+ // BLAKE3 helpers (re-exported for use by canvas.js etc.)
1710
+ // ---------------------------------------------------------------------------
1711
+
1712
+ /**
1713
+ * Compute BLAKE3 of a Uint8Array and return hex string.
1714
+ * @param {Uint8Array} data
1715
+ * @returns {string}
1716
+ */
1717
+ function blake3Hex(data) {
1718
+ return bytesToHex(blake3(data));
1719
+ }
1720
+
1721
+ /**
1722
+ * Compute BLAKE3 of a UTF-8 string and return hex string.
1723
+ * @param {string} str
1724
+ * @returns {string}
1725
+ */
1726
+ function blake3HexStr(str) {
1727
+ return blake3Hex(new TextEncoder().encode(str));
1728
+ }
1729
+
1730
+ // ---------------------------------------------------------------------------
1731
+ // buildProof
1732
+ // ---------------------------------------------------------------------------
1733
+
1734
+ /**
1735
+ * Assembles a ProofPayload from all collected signals.
1736
+ * This is the canonical structure that gets hashed into the commitment.
1737
+ *
1738
+ * @param {object} p
1739
+ * @param {import('../collector/entropy.js').EntropyResult} p.entropy
1740
+ * @param {import('../analysis/jitter.js').JitterAnalysis} p.jitter
1741
+ * @param {import('../collector/bio.js').BioSnapshot} p.bio
1742
+ * @param {import('../collector/canvas.js').CanvasFingerprint} p.canvas
1743
+ * @param {import('../analysis/audio.js').AudioJitter} p.audio
1744
+ * @param {string} p.nonce – server-issued challenge nonce (hex)
1745
+ * @returns {ProofPayload}
1746
+ */
1747
+ function buildProof({ entropy, jitter, bio, canvas, audio, nonce }) {
1748
+ if (!nonce || typeof nonce !== 'string') {
1749
+ throw new Error('@sovereign/pulse: nonce is required for anti-replay protection');
1750
+ }
1751
+
1752
+ // Hash the raw timing arrays IN-BROWSER so we can prove their integrity
1753
+ // without transmitting the raw data.
1754
+ const timingsHash = blake3HexStr(JSON.stringify(entropy.timings));
1755
+ const memHash = blake3HexStr(JSON.stringify(entropy.memTimings));
1756
+
1757
+ const payload = {
1758
+ version: 1,
1759
+ timestamp: entropy.collectedAt,
1760
+ nonce,
1761
+
1762
+ signals: {
1763
+ // ── Entropy probe ───────────────────────────────────────────────────
1764
+ entropy: {
1765
+ timingsMean: _round$1(jitter.stats?.mean, 4),
1766
+ timingsCV: _round$1(jitter.stats?.cv, 4),
1767
+ timingsP50: _round$1(jitter.stats?.p50, 4),
1768
+ timingsP95: _round$1(jitter.stats?.p95, 4),
1769
+ timingsSkewness: _round$1(jitter.stats?.skewness, 4),
1770
+ timingsKurtosis: _round$1(jitter.stats?.kurtosis, 4),
1771
+ autocorr_lag1: _round$1(jitter.autocorrelations?.lag1, 4),
1772
+ autocorr_lag2: _round$1(jitter.autocorrelations?.lag2, 4),
1773
+ autocorr_lag5: _round$1(jitter.autocorrelations?.lag5, 4),
1774
+ autocorr_lag10: _round$1(jitter.autocorrelations?.lag10, 4),
1775
+ hurstExponent: _round$1(jitter.hurstExponent, 4),
1776
+ quantizationEntropy: _round$1(jitter.quantizationEntropy, 4),
1777
+ thermalDrift: _round$1(jitter.thermalSignature?.slope, 8),
1778
+ thermalPattern: jitter.thermalSignature?.pattern ?? 'unknown',
1779
+ outlierRate: _round$1(jitter.outlierRate, 4),
1780
+ timerGranularityMs: _round$1(entropy.timerGranularityMs, 6),
1781
+ checksum: entropy.checksum, // proves computation ran
1782
+ timingsHash, // proves timing array integrity
1783
+ memTimingsHash: memHash,
1784
+ iterations: entropy.iterations,
1785
+ matrixSize: entropy.matrixSize,
1786
+ },
1787
+
1788
+ // ── Bio signals ─────────────────────────────────────────────────────
1789
+ bio: {
1790
+ mouseSampleCount: bio.mouse.sampleCount,
1791
+ mouseIEIMean: _round$1(bio.mouse.ieiMean, 3),
1792
+ mouseIEICV: _round$1(bio.mouse.ieiCV, 4),
1793
+ mouseVelocityP50: _round$1(bio.mouse.velocityP50, 3),
1794
+ mouseVelocityP95: _round$1(bio.mouse.velocityP95, 3),
1795
+ mouseAngularJerkMean: _round$1(bio.mouse.angularJerkMean, 4),
1796
+ pressureVariance: _round$1(bio.mouse.pressureVariance, 6),
1797
+ keyboardSampleCount: bio.keyboard.sampleCount,
1798
+ keyboardDwellMean: _round$1(bio.keyboard.dwellMean, 3),
1799
+ keyboardDwellCV: _round$1(bio.keyboard.dwellCV, 4),
1800
+ keyboardIKIMean: _round$1(bio.keyboard.ikiMean, 3),
1801
+ keyboardIKICV: _round$1(bio.keyboard.ikiCV, 4),
1802
+ interferenceCoefficient: _round$1(bio.interferenceCoefficient, 4),
1803
+ hasActivity: bio.hasActivity,
1804
+ durationMs: _round$1(bio.durationMs, 1),
1805
+ },
1806
+
1807
+ // ── Canvas fingerprint ───────────────────────────────────────────────
1808
+ canvas: {
1809
+ webglRenderer: canvas.webglRenderer,
1810
+ webglVendor: canvas.webglVendor,
1811
+ webglVersion: canvas.webglVersion,
1812
+ webglPixelHash: canvas.webglPixelHash,
1813
+ canvas2dHash: canvas.canvas2dHash,
1814
+ extensionCount: canvas.extensionCount,
1815
+ isSoftwareRenderer: canvas.isSoftwareRenderer,
1816
+ available: canvas.available,
1817
+ },
1818
+
1819
+ // ── Audio jitter ─────────────────────────────────────────────────────
1820
+ audio: {
1821
+ available: audio.available,
1822
+ workletAvailable: audio.workletAvailable,
1823
+ callbackJitterCV: _round$1(audio.callbackJitterCV, 4),
1824
+ noiseFloorMean: _round$1(audio.noiseFloorMean, 6),
1825
+ noiseFloorStd: _round$1(audio.noiseFloorStd, 6),
1826
+ sampleRate: audio.sampleRate,
1827
+ callbackCount: audio.callbackCount,
1828
+ jitterMeanMs: _round$1(audio.jitterMeanMs, 4),
1829
+ jitterP95Ms: _round$1(audio.jitterP95Ms, 4),
1830
+ },
1831
+ },
1832
+
1833
+ // Top-level classification summary
1834
+ classification: {
1835
+ jitterScore: _round$1(jitter.score, 4),
1836
+ flags: jitter.flags ?? [],
1837
+ },
1838
+ };
1839
+
1840
+ return payload;
1841
+ }
1842
+
1843
+ /**
1844
+ * @typedef {object} ProofPayload
1845
+ * @property {number} version
1846
+ * @property {number} timestamp
1847
+ * @property {string} nonce
1848
+ * @property {object} signals
1849
+ * @property {object} classification
1850
+ */
1851
+
1852
+ // ---------------------------------------------------------------------------
1853
+ // buildCommitment
1854
+ // ---------------------------------------------------------------------------
1855
+
1856
+ /**
1857
+ * Hashes a ProofPayload into a BLAKE3 commitment.
1858
+ * Uses a deterministic canonical JSON serialiser (sorted keys) to ensure
1859
+ * byte-identical output across JS engines.
1860
+ *
1861
+ * @param {ProofPayload} payload
1862
+ * @returns {{ payload: ProofPayload, hash: string }}
1863
+ */
1864
+ function buildCommitment(payload) {
1865
+ const canonical = canonicalJson(payload);
1866
+ const hash = blake3HexStr(canonical);
1867
+ return { payload, hash };
1868
+ }
1869
+
1870
+ // ---------------------------------------------------------------------------
1871
+ // canonicalJson
1872
+ //
1873
+ // JSON.stringify with sorted keys — ensures the hash is engine-independent.
1874
+ // Numbers are serialised with fixed precision to avoid cross-platform float
1875
+ // formatting differences.
1876
+ // ---------------------------------------------------------------------------
1877
+
1878
+ function canonicalJson(obj) {
1879
+ return JSON.stringify(obj, _replacer, 0);
1880
+ }
1881
+
1882
+ function _replacer(key, value) {
1883
+ // Sort object keys deterministically
1884
+ if (value !== null && typeof value === 'object' && !Array.isArray(value)) {
1885
+ const sorted = {};
1886
+ for (const k of Object.keys(value).sort()) {
1887
+ sorted[k] = value[k];
1888
+ }
1889
+ return sorted;
1890
+ }
1891
+ return value;
1892
+ }
1893
+
1894
+ // ---------------------------------------------------------------------------
1895
+ // Internal utilities
1896
+ // ---------------------------------------------------------------------------
1897
+
1898
+ function _round$1(v, decimals) {
1899
+ if (v == null || !isFinite(v)) return null;
1900
+ const factor = 10 ** decimals;
1901
+ return Math.round(v * factor) / factor;
1902
+ }
1903
+
1904
+ /**
1905
+ * @sovereign/pulse — GPU Canvas Fingerprint
1906
+ *
1907
+ * Collects device-class signals from WebGL and 2D Canvas rendering.
1908
+ * The exact pixel values of GPU-rendered scenes are vendor/driver-specific
1909
+ * due to floating-point rounding in shader execution. Virtual machines
1910
+ * expose software renderers (LLVMpipe, SwiftShader, Microsoft Basic Render
1911
+ * Driver) whose strings and output pixels are well-known and enumerable.
1912
+ *
1913
+ * NO persistent identifier is generated – only a content hash is retained.
1914
+ */
1915
+
1916
+
1917
+ // ---------------------------------------------------------------------------
1918
+ // Known software-renderer substrings (VM / headless environment indicators)
1919
+ // ---------------------------------------------------------------------------
1920
+ const SOFTWARE_RENDERER_PATTERNS = [
1921
+ 'llvmpipe', 'swiftshader', 'softpipe', 'mesa offscreen',
1922
+ 'microsoft basic render', 'vmware svga', 'virtualbox',
1923
+ 'parallels', 'angle (', 'google swiftshader',
1924
+ ];
1925
+
1926
+ // ---------------------------------------------------------------------------
1927
+ // collectCanvasFingerprint
1928
+ // ---------------------------------------------------------------------------
1929
+
1930
+ /**
1931
+ * @returns {Promise<CanvasFingerprint>}
1932
+ */
1933
+ async function collectCanvasFingerprint() {
1934
+ const result = {
1935
+ webglRenderer: null,
1936
+ webglVendor: null,
1937
+ webglVersion: null,
1938
+ webglPixelHash: null,
1939
+ canvas2dHash: null,
1940
+ extensionCount: 0,
1941
+ extensions: [],
1942
+ isSoftwareRenderer: false,
1943
+ available: false,
1944
+ };
1945
+
1946
+ if (typeof document === 'undefined' && typeof OffscreenCanvas === 'undefined') {
1947
+ // Node.js / server-side with no DOM – skip gracefully.
1948
+ return result;
1949
+ }
1950
+
1951
+ // ── WebGL fingerprint ────────────────────────────────────────────────────
1952
+ try {
1953
+ const canvas = _createCanvas(512, 512);
1954
+ let gl = canvas.getContext('webgl2') || canvas.getContext('webgl');
1955
+
1956
+ if (gl) {
1957
+ result.webglVersion = gl instanceof WebGL2RenderingContext ? 2 : 1;
1958
+ result.available = true;
1959
+
1960
+ // Renderer info
1961
+ const dbgInfo = gl.getExtension('WEBGL_debug_renderer_info');
1962
+ if (dbgInfo) {
1963
+ result.webglRenderer = gl.getParameter(dbgInfo.UNMASKED_RENDERER_WEBGL);
1964
+ result.webglVendor = gl.getParameter(dbgInfo.UNMASKED_VENDOR_WEBGL);
1965
+ }
1966
+
1967
+ // Extension list (fingerprints driver capabilities)
1968
+ const exts = gl.getSupportedExtensions() ?? [];
1969
+ result.extensions = exts;
1970
+ result.extensionCount = exts.length;
1971
+
1972
+ // Software-renderer detection
1973
+ const rendererLc = (result.webglRenderer ?? '').toLowerCase();
1974
+ result.isSoftwareRenderer = SOFTWARE_RENDERER_PATTERNS.some(p =>
1975
+ rendererLc.includes(p)
1976
+ );
1977
+
1978
+ // ── Render a Mandelbrot fragment scene ───────────────────────────────
1979
+ // Floating-point precision differences in the GPU's shader ALU cause
1980
+ // per-pixel rounding variations that are stable per device but differ
1981
+ // across GPU vendors, driver versions, and software renderers.
1982
+ const pixels = _renderMandelbrot(gl, canvas);
1983
+ result.webglPixelHash = pixels ? blake3Hex(pixels) : null;
1984
+
1985
+ gl.getExtension('WEBGL_lose_context')?.loseContext();
1986
+ }
1987
+ } catch (_) {
1988
+ // WebGL blocked (privacy settings, etc.) – continue with 2D canvas.
1989
+ }
1990
+
1991
+ // ── 2D Canvas fingerprint ────────────────────────────────────────────────
1992
+ try {
1993
+ const c2 = _createCanvas(200, 50);
1994
+ const ctx2 = c2.getContext('2d');
1995
+
1996
+ if (ctx2) {
1997
+ // Text rendering differences: font hinting, subpixel AA, emoji rasterisation
1998
+ ctx2.textBaseline = 'top';
1999
+ ctx2.font = '14px Arial, sans-serif';
2000
+ ctx2.fillStyle = 'rgba(102,204,0,0.7)';
2001
+ ctx2.fillText('Cwm fjordbank glyphs vext quiz 🎯', 2, 5);
2002
+
2003
+ // Shadow compositing (driver-specific blur kernel)
2004
+ ctx2.shadowBlur = 10;
2005
+ ctx2.shadowColor = 'blue';
2006
+ ctx2.fillStyle = 'rgba(255,0,255,0.5)';
2007
+ ctx2.fillRect(100, 25, 80, 20);
2008
+
2009
+ // Bezier curve (Bézier precision varies per 2D canvas implementation)
2010
+ ctx2.beginPath();
2011
+ ctx2.moveTo(10, 40);
2012
+ ctx2.bezierCurveTo(30, 0, 70, 80, 160, 30);
2013
+ ctx2.strokeStyle = 'rgba(0,0,255,0.8)';
2014
+ ctx2.lineWidth = 1.5;
2015
+ ctx2.stroke();
2016
+
2017
+ const dataUrl = c2.toDataURL('image/png');
2018
+ // Hash the data URL (not storing raw image data)
2019
+ const enc = new TextEncoder().encode(dataUrl);
2020
+ result.canvas2dHash = blake3Hex(enc);
2021
+
2022
+ result.available = true;
2023
+ }
2024
+ } catch (_) {
2025
+ // 2D canvas blocked.
2026
+ }
2027
+
2028
+ return result;
2029
+ }
2030
+
2031
+ /**
2032
+ * @typedef {object} CanvasFingerprint
2033
+ * @property {string|null} webglRenderer
2034
+ * @property {string|null} webglVendor
2035
+ * @property {1|2|null} webglVersion
2036
+ * @property {string|null} webglPixelHash
2037
+ * @property {string|null} canvas2dHash
2038
+ * @property {number} extensionCount
2039
+ * @property {string[]} extensions
2040
+ * @property {boolean} isSoftwareRenderer
2041
+ * @property {boolean} available
2042
+ */
2043
+
2044
+ // ---------------------------------------------------------------------------
2045
+ // Internal helpers
2046
+ // ---------------------------------------------------------------------------
2047
+
2048
+ function _createCanvas(w, h) {
2049
+ if (typeof OffscreenCanvas !== 'undefined') {
2050
+ return new OffscreenCanvas(w, h);
2051
+ }
2052
+ const c = document.createElement('canvas');
2053
+ c.width = w;
2054
+ c.height = h;
2055
+ return c;
2056
+ }
2057
+
2058
+ /**
2059
+ * Render a Mandelbrot set fragment using WebGL and read back pixels.
2060
+ * The number of iterations is fixed (100) so that rounding differences in
2061
+ * the smooth-colouring formula are the primary source of per-GPU variation.
2062
+ *
2063
+ * @param {WebGLRenderingContext} gl
2064
+ * @param {HTMLCanvasElement|OffscreenCanvas} canvas
2065
+ * @returns {Uint8Array|null}
2066
+ */
2067
+ function _renderMandelbrot(gl, canvas) {
2068
+ const W = canvas.width;
2069
+ const H = canvas.height;
2070
+
2071
+ // Vertex shader – full-screen quad
2072
+ const vsSource = `
2073
+ attribute vec4 a_pos;
2074
+ void main() { gl_Position = a_pos; }
2075
+ `;
2076
+
2077
+ // Fragment shader – Mandelbrot with smooth colouring
2078
+ // Floating-point precision in the escape-radius and log() calls differs
2079
+ // between GPU vendors / drivers, producing per-device pixel signatures.
2080
+ const fsSource = `
2081
+ precision highp float;
2082
+ uniform vec2 u_res;
2083
+ void main() {
2084
+ vec2 uv = (gl_FragCoord.xy / u_res - 0.5) * 3.5;
2085
+ uv.x -= 0.5;
2086
+ vec2 c = uv;
2087
+ vec2 z = vec2(0.0);
2088
+ float n = 0.0;
2089
+ for (int i = 0; i < 100; i++) {
2090
+ if (dot(z, z) > 4.0) break;
2091
+ z = vec2(z.x*z.x - z.y*z.y, 2.0*z.x*z.y) + c;
2092
+ n += 1.0;
2093
+ }
2094
+ float smooth_n = n - log2(log2(dot(z,z))) + 4.0;
2095
+ float t = smooth_n / 100.0;
2096
+ gl_FragColor = vec4(0.5 + 0.5*cos(6.28318*t + vec3(0.0, 0.4, 0.7)), 1.0);
2097
+ }
2098
+ `;
2099
+
2100
+ const vs = _compileShader(gl, gl.VERTEX_SHADER, vsSource);
2101
+ const fs = _compileShader(gl, gl.FRAGMENT_SHADER, fsSource);
2102
+ if (!vs || !fs) return null;
2103
+
2104
+ const prog = gl.createProgram();
2105
+ gl.attachShader(prog, vs);
2106
+ gl.attachShader(prog, fs);
2107
+ gl.linkProgram(prog);
2108
+ if (!gl.getProgramParameter(prog, gl.LINK_STATUS)) return null;
2109
+
2110
+ gl.useProgram(prog);
2111
+
2112
+ // Full-screen quad
2113
+ const buf = gl.createBuffer();
2114
+ gl.bindBuffer(gl.ARRAY_BUFFER, buf);
2115
+ gl.bufferData(gl.ARRAY_BUFFER,
2116
+ new Float32Array([-1,-1, 1,-1, -1,1, 1,1]), gl.STATIC_DRAW);
2117
+ const loc = gl.getAttribLocation(prog, 'a_pos');
2118
+ gl.enableVertexAttribArray(loc);
2119
+ gl.vertexAttribPointer(loc, 2, gl.FLOAT, false, 0, 0);
2120
+
2121
+ const resLoc = gl.getUniformLocation(prog, 'u_res');
2122
+ gl.uniform2f(resLoc, W, H);
2123
+
2124
+ gl.viewport(0, 0, W, H);
2125
+ gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
2126
+
2127
+ // Read back a 64×64 centre crop (reduces data without losing discriminating power)
2128
+ const x0 = Math.floor((W - 64) / 2);
2129
+ const y0 = Math.floor((H - 64) / 2);
2130
+ const pixels = new Uint8Array(64 * 64 * 4);
2131
+ gl.readPixels(x0, y0, 64, 64, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
2132
+
2133
+ return pixels;
2134
+ }
2135
+
2136
+ function _compileShader(gl, type, source) {
2137
+ const s = gl.createShader(type);
2138
+ gl.shaderSource(s, source);
2139
+ gl.compileShader(s);
2140
+ return gl.getShaderParameter(s, gl.COMPILE_STATUS) ? s : null;
2141
+ }
2142
+
2143
+ /**
2144
+ * @sovereign/pulse — AudioContext Oscillator Jitter
2145
+ *
2146
+ * Measures the scheduling jitter of the browser's audio pipeline.
2147
+ * Real audio hardware callbacks are driven by a hardware interrupt (IRQ)
2148
+ * from the sound card; the timing reflects the actual interrupt latency
2149
+ * of the physical device. VM audio drivers (if present at all) are
2150
+ * emulated and show either unrealistically low jitter or burst-mode
2151
+ * scheduling artefacts that are statistically distinguishable.
2152
+ */
2153
+
2154
+ /**
2155
+ * @param {object} [opts]
2156
+ * @param {number} [opts.durationMs=2000] - how long to collect audio callbacks
2157
+ * @param {number} [opts.bufferSize=256] - ScriptProcessorNode buffer size
2158
+ * @returns {Promise<AudioJitter>}
2159
+ */
2160
+ async function collectAudioJitter(opts = {}) {
2161
+ const { durationMs = 2000, bufferSize = 256 } = opts;
2162
+
2163
+ const base = {
2164
+ available: false,
2165
+ workletAvailable: false,
2166
+ callbackJitterCV: 0,
2167
+ noiseFloorMean: 0,
2168
+ sampleRate: 0,
2169
+ callbackCount: 0,
2170
+ jitterTimings: [],
2171
+ };
2172
+
2173
+ if (typeof AudioContext === 'undefined' && typeof webkitAudioContext === 'undefined') {
2174
+ return base; // Node.js / server environment
2175
+ }
2176
+
2177
+ let ctx;
2178
+ try {
2179
+ ctx = new (window.AudioContext || window.webkitAudioContext)();
2180
+ } catch (_) {
2181
+ return base;
2182
+ }
2183
+
2184
+ // Some browsers require a user gesture before AudioContext can run.
2185
+ if (ctx.state === 'suspended') {
2186
+ try {
2187
+ await ctx.resume();
2188
+ } catch (_) {
2189
+ await ctx.close().catch(() => {});
2190
+ return base;
2191
+ }
2192
+ }
2193
+
2194
+ const sampleRate = ctx.sampleRate;
2195
+ const expectedInterval = (bufferSize / sampleRate) * 1000; // ms per callback
2196
+
2197
+ const jitterTimings = []; // absolute AudioContext.currentTime at each callback
2198
+ const callbackDeltas = [];
2199
+
2200
+ await new Promise((resolve) => {
2201
+ // ── AudioWorklet (preferred — runs on dedicated real-time thread) ──────
2202
+ const useWorklet = typeof AudioWorkletNode !== 'undefined';
2203
+ base.workletAvailable = useWorklet;
2204
+
2205
+ if (useWorklet) {
2206
+ // Inline worklet: send currentTime back via MessagePort every buffer
2207
+ const workletCode = `
2208
+ class PulseProbe extends AudioWorkletProcessor {
2209
+ process(inputs, outputs) {
2210
+ this.port.postMessage({ t: currentTime });
2211
+ // Pass-through silence
2212
+ for (const out of outputs)
2213
+ for (const ch of out) ch.fill(0);
2214
+ return true;
2215
+ }
2216
+ }
2217
+ registerProcessor('pulse-probe', PulseProbe);
2218
+ `;
2219
+ const blob = new Blob([workletCode], { type: 'application/javascript' });
2220
+ const blobUrl = URL.createObjectURL(blob);
2221
+
2222
+ ctx.audioWorklet.addModule(blobUrl).then(() => {
2223
+ const node = new AudioWorkletNode(ctx, 'pulse-probe');
2224
+ node.port.onmessage = (e) => {
2225
+ jitterTimings.push(e.data.t * 1000); // convert to ms
2226
+ };
2227
+ node.connect(ctx.destination);
2228
+
2229
+ setTimeout(async () => {
2230
+ node.disconnect();
2231
+ URL.revokeObjectURL(blobUrl);
2232
+ resolve(node);
2233
+ }, durationMs);
2234
+ }).catch(() => {
2235
+ URL.revokeObjectURL(blobUrl);
2236
+ _fallbackScriptProcessor(ctx, bufferSize, durationMs, jitterTimings, resolve);
2237
+ });
2238
+
2239
+ } else {
2240
+ _fallbackScriptProcessor(ctx, bufferSize, durationMs, jitterTimings, resolve);
2241
+ }
2242
+ });
2243
+
2244
+ // ── Compute deltas between successive callback times ────────────────────
2245
+ for (let i = 1; i < jitterTimings.length; i++) {
2246
+ callbackDeltas.push(jitterTimings[i] - jitterTimings[i - 1]);
2247
+ }
2248
+
2249
+ // ── Noise floor via AnalyserNode ─────────────────────────────────────────
2250
+ // Feed a silent oscillator through an analyser; the FFT magnitude at silence
2251
+ // reveals the hardware's thermal noise floor (varies per ADC/DAC chipset).
2252
+ const noiseFloor = await _measureNoiseFloor(ctx);
2253
+
2254
+ await ctx.close().catch(() => {});
2255
+
2256
+ // ── Statistics ────────────────────────────────────────────────────────────
2257
+ const mean = callbackDeltas.length
2258
+ ? callbackDeltas.reduce((s, v) => s + v, 0) / callbackDeltas.length
2259
+ : 0;
2260
+ const variance = callbackDeltas.length > 1
2261
+ ? callbackDeltas.reduce((s, v) => s + (v - mean) ** 2, 0) / (callbackDeltas.length - 1)
2262
+ : 0;
2263
+ const jitterCV = mean > 0 ? Math.sqrt(variance) / mean : 0;
2264
+
2265
+ return {
2266
+ available: true,
2267
+ workletAvailable: base.workletAvailable,
2268
+ callbackJitterCV: jitterCV,
2269
+ noiseFloorMean: noiseFloor.mean,
2270
+ noiseFloorStd: noiseFloor.std,
2271
+ sampleRate,
2272
+ callbackCount: jitterTimings.length,
2273
+ expectedIntervalMs: expectedInterval,
2274
+ // Only include summary stats, not raw timings (privacy / size)
2275
+ jitterMeanMs: mean,
2276
+ jitterP95Ms: _percentile(callbackDeltas, 95),
2277
+ };
2278
+ }
2279
+
2280
+ /**
2281
+ * @typedef {object} AudioJitter
2282
+ * @property {boolean} available
2283
+ * @property {boolean} workletAvailable
2284
+ * @property {number} callbackJitterCV
2285
+ * @property {number} noiseFloorMean
2286
+ * @property {number} sampleRate
2287
+ * @property {number} callbackCount
2288
+ */
2289
+
2290
+ // ---------------------------------------------------------------------------
2291
+ // Internal helpers
2292
+ // ---------------------------------------------------------------------------
2293
+
2294
+ function _fallbackScriptProcessor(ctx, bufferSize, durationMs, jitterTimings, resolve) {
2295
+ // ScriptProcessorNode is deprecated but universally supported.
2296
+ const proc = ctx.createScriptProcessor(bufferSize, 1, 1);
2297
+ proc.onaudioprocess = () => {
2298
+ jitterTimings.push(ctx.currentTime * 1000);
2299
+ };
2300
+ // Connect to keep the graph alive
2301
+ const osc = ctx.createOscillator();
2302
+ osc.frequency.value = 1; // sub-audible
2303
+ osc.connect(proc);
2304
+ proc.connect(ctx.destination);
2305
+ osc.start();
2306
+
2307
+ setTimeout(() => {
2308
+ osc.stop();
2309
+ osc.disconnect();
2310
+ proc.disconnect();
2311
+ resolve(proc);
2312
+ }, durationMs);
2313
+ }
2314
+
2315
+ async function _measureNoiseFloor(ctx) {
2316
+ try {
2317
+ const analyser = ctx.createAnalyser();
2318
+ analyser.fftSize = 256;
2319
+ analyser.connect(ctx.destination);
2320
+
2321
+ // Silent source
2322
+ const buf = ctx.createBuffer(1, ctx.sampleRate * 0.1, ctx.sampleRate);
2323
+ const src = ctx.createBufferSource();
2324
+ src.buffer = buf;
2325
+ src.connect(analyser);
2326
+ src.start();
2327
+
2328
+ await new Promise(r => setTimeout(r, 150));
2329
+
2330
+ const data = new Float32Array(analyser.frequencyBinCount);
2331
+ analyser.getFloatFrequencyData(data);
2332
+ analyser.disconnect();
2333
+
2334
+ // Limit to 32 bins to keep the payload small
2335
+ const trimmed = Array.from(data.slice(0, 32)).map(v =>
2336
+ isFinite(v) ? Math.pow(10, v / 20) : 0 // dB → linear
2337
+ );
2338
+ const mean = trimmed.reduce((s, v) => s + v, 0) / trimmed.length;
2339
+ const std = Math.sqrt(
2340
+ trimmed.reduce((s, v) => s + (v - mean) ** 2, 0) / trimmed.length
2341
+ );
2342
+ return { mean, std };
2343
+ } catch (_) {
2344
+ return { mean: 0, std: 0 };
2345
+ }
2346
+ }
2347
+
2348
+ function _percentile(arr, p) {
2349
+ if (!arr.length) return 0;
2350
+ const sorted = [...arr].sort((a, b) => a - b);
2351
+ const idx = (p / 100) * (sorted.length - 1);
2352
+ const lo = Math.floor(idx);
2353
+ const hi = Math.ceil(idx);
2354
+ return sorted[lo] + (sorted[hi] - sorted[lo]) * (idx - lo);
2355
+ }
2356
+
2357
+ /**
2358
+ * @sovereign/pulse — Cross-Metric Heuristic Engine
2359
+ *
2360
+ * Instead of checking individual thresholds in isolation, this module looks
2361
+ * at the *relationships* between metrics. A sophisticated adversary can spoof
2362
+ * any single number. Spoofing six metrics so they remain mutually consistent
2363
+ * with physical laws is exponentially harder.
2364
+ *
2365
+ * Three core insights drive this engine:
2366
+ *
2367
+ * 1. Entropy-Jitter Coherence
2368
+ * Real silicon gets noisier as it heats up. Under sustained load, the
2369
+ * Quantization Entropy of the timing distribution grows because thermal
2370
+ * fluctuations add variance. A VM's hypervisor clock doesn't care about
2371
+ * guest temperature — its entropy is flat across all load phases.
2372
+ *
2373
+ * 2. Hurst-Autocorrelation Coherence
2374
+ * Genuine Brownian noise has Hurst ≈ 0.5 and near-zero autocorrelation
2375
+ * at all lags. These two values are physically linked. If they diverge —
2376
+ * high autocorrelation but Hurst near 0.5, or vice versa — the timings
2377
+ * were generated, not measured.
2378
+ *
2379
+ * 3. CV-Entropy Coherence
2380
+ * High variance (CV) must come from somewhere. On real hardware, high CV
2381
+ * means the timing distribution is spread out, which also means high
2382
+ * entropy. A VM that inflates CV without inflating entropy (e.g. by
2383
+ * adding synthetic outliers at fixed offsets) produces a coherence gap.
2384
+ */
2385
+
2386
+
2387
+ // ---------------------------------------------------------------------------
2388
+ // runHeuristicEngine
2389
+ // ---------------------------------------------------------------------------
2390
+
2391
+ /**
2392
+ * @param {object} p
2393
+ * @param {import('./jitter.js').JitterAnalysis} p.jitter
2394
+ * @param {object|null} p.phases - from entropy collector
2395
+ * @param {object} p.autocorrelations
2396
+ * @returns {HeuristicReport}
2397
+ */
2398
+ function runHeuristicEngine({ jitter, phases, autocorrelations }) {
2399
+ const findings = [];
2400
+ const bonuses = [];
2401
+ let penalty = 0; // accumulated penalty [0, 1]
2402
+ let bonus = 0; // accumulated bonus [0, 1]
2403
+ let hardOverride = null; // 'vm' | null — bypasses score entirely when set
2404
+
2405
+ const stats = jitter.stats;
2406
+ if (!stats) return _empty$1();
2407
+
2408
+ // ── 1. Entropy-Jitter Ratio (phases required) ────────────────────────────
2409
+ let entropyJitterRatio = null;
2410
+ let entropyJitterScore = 0.5; // neutral if no phased data
2411
+
2412
+ if (phases) {
2413
+ entropyJitterRatio = phases.entropyJitterRatio;
2414
+
2415
+ const coldQE = phases.cold?.qe ?? null;
2416
+ const hotQE = phases.hot?.qe ?? null;
2417
+
2418
+ // ── HARD KILL: Phase trajectory mathematical contradiction ──────────────
2419
+ //
2420
+ // EJR is defined as: entropyJitterRatio = hot_QE / cold_QE
2421
+ //
2422
+ // Before trusting any EJR value, verify it is internally consistent with
2423
+ // the QE measurements it purports to summarise. Two forgery vectors exist:
2424
+ //
2425
+ // Attack A — EJR field overwritten independently:
2426
+ // Attacker sets entropyJitterRatio = 1.15 to claim thermal growth,
2427
+ // but leaves cold_QE = 3.50, hot_QE = 3.00 unchanged.
2428
+ // Computed EJR = 3.00 / 3.50 = 0.857.
2429
+ // Discrepancy 1.15 − 0.857 = 0.293 >> 0.005 tolerance → HARD KILL.
2430
+ //
2431
+ // Attack B — QE values also faked but left inconsistent:
2432
+ // Attacker overwrites both QE fields carelessly: cold_QE = 3.5,
2433
+ // hot_QE = 3.0, but EJR = 1.15 is still written.
2434
+ // cold_QE ≥ hot_QE with EJR ≥ 1.08 is a mathematical impossibility —
2435
+ // if hot ≤ cold then hot/cold ≤ 1.0, which can never be ≥ 1.08.
2436
+ //
2437
+ // Tolerance of 0.005 accounts for floating-point rounding in the entropy
2438
+ // collector (detectQuantizationEntropy uses discrete histogram bins).
2439
+ //
2440
+ // When HARD KILL fires:
2441
+ // • hardOverride = 'vm' — fingerprint.js short-circuits isSynthetic
2442
+ // • entropyJitterScore = 0.0 — no EJR contribution to stage-2 bonus
2443
+ // • penalty += 1.0 — overwhelms the physical floor cap
2444
+ // • No further EJR evaluation runs (the data cannot be trusted)
2445
+ // • The physical floor protection is explicitly bypassed (see aggregate)
2446
+
2447
+ if (coldQE !== null && hotQE !== null) {
2448
+ const computedEJR = coldQE > 0 ? hotQE / coldQE : null;
2449
+ const fieldTampered = computedEJR !== null &&
2450
+ Math.abs(entropyJitterRatio - computedEJR) > 0.005;
2451
+ const qeContradicts = entropyJitterRatio >= 1.08 && coldQE >= hotQE;
2452
+
2453
+ if (fieldTampered || qeContradicts) {
2454
+ hardOverride = 'vm';
2455
+ entropyJitterScore = 0.0;
2456
+ findings.push({
2457
+ id: 'EJR_PHASE_HARD_KILL',
2458
+ label: fieldTampered
2459
+ ? 'HARD KILL: stored EJR is inconsistent with cold/hot QE values — phase data tampered'
2460
+ : 'HARD KILL: EJR ≥ 1.08 claims entropy growth but cold_QE ≥ hot_QE — physically impossible',
2461
+ detail: `ejr_stored=${entropyJitterRatio.toFixed(4)} ` +
2462
+ `ejr_computed=${computedEJR?.toFixed(4) ?? 'n/a'} ` +
2463
+ `cold_QE=${coldQE.toFixed(4)} hot_QE=${hotQE.toFixed(4)} ` +
2464
+ `delta=${computedEJR != null ? Math.abs(entropyJitterRatio - computedEJR).toFixed(4) : 'n/a'}`,
2465
+ severity: 'critical',
2466
+ penalty: 1.0,
2467
+ });
2468
+ penalty += 1.0;
2469
+
2470
+ } else {
2471
+ // QE values confirmed consistent — proceed with normal EJR evaluation.
2472
+ _evaluateEJR(entropyJitterRatio, coldQE, hotQE, findings, bonuses,
2473
+ (p) => { penalty += p; }, (b) => { bonus += b; },
2474
+ (s) => { entropyJitterScore = s; });
2475
+ }
2476
+
2477
+ } else {
2478
+ // QE values unavailable — evaluate EJR ratio in isolation.
2479
+ _evaluateEJR(entropyJitterRatio, null, null, findings, bonuses,
2480
+ (p) => { penalty += p; }, (b) => { bonus += b; },
2481
+ (s) => { entropyJitterScore = s; });
2482
+ }
2483
+
2484
+ // Phase mean drift: real CPU heats up → iterations get slower.
2485
+ // Only apply if hard kill wasn't already triggered.
2486
+ if (!hardOverride) {
2487
+ const coldToHotDrift = (phases.hot?.mean ?? 0) - (phases.cold?.mean ?? 0);
2488
+ if (coldToHotDrift > 0.05) {
2489
+ bonuses.push({
2490
+ id: 'THERMAL_DRIFT_CONFIRMED',
2491
+ label: 'CPU mean timing increased from cold to hot phase (thermal drift)',
2492
+ detail: `cold=${phases.cold.mean.toFixed(3)}ms hot=${phases.hot.mean.toFixed(3)}ms Δ=${coldToHotDrift.toFixed(3)}ms`,
2493
+ value: 0.08,
2494
+ });
2495
+ bonus += 0.08;
2496
+ }
2497
+ }
2498
+ }
2499
+
2500
+ // ── 2. Hurst-Autocorrelation Coherence ───────────────────────────────────
2501
+ const h = jitter.hurstExponent ?? 0.5;
2502
+ const ac1 = Math.abs(autocorrelations?.lag1 ?? 0);
2503
+ const ac5 = Math.abs(autocorrelations?.lag5 ?? 0);
2504
+ Math.abs(autocorrelations?.lag50 ?? 0);
2505
+
2506
+ // Physical law: Brownian noise (H≈0.5) must have low autocorrelation.
2507
+ // Divergence between these two means the data wasn't generated by physics.
2508
+ const hurstExpectedAC = Math.abs(2 * h - 1); // theoretical max |autocorr| for given H
2509
+ const actualAC = (ac1 + ac5) / 2;
2510
+ const acHurstDivergence = Math.abs(actualAC - hurstExpectedAC);
2511
+
2512
+ if (acHurstDivergence > 0.35) {
2513
+ findings.push({
2514
+ id: 'HURST_AUTOCORR_INCOHERENT',
2515
+ label: 'Hurst exponent and autocorrelation are physically inconsistent',
2516
+ detail: `H=${h.toFixed(3)} expected_AC≈${hurstExpectedAC.toFixed(3)} actual_AC=${actualAC.toFixed(3)} divergence=${acHurstDivergence.toFixed(3)}`,
2517
+ severity: 'high',
2518
+ penalty: 0.12,
2519
+ });
2520
+ penalty += 0.12;
2521
+ } else if (h > 0.45 && h < 0.55 && ac1 < 0.15) {
2522
+ // Ideal Brownian + low autocorr — physically coherent
2523
+ bonuses.push({
2524
+ id: 'BROWNIAN_COHERENCE_CONFIRMED',
2525
+ label: 'Hurst ≈ 0.5 and autocorrelation near zero — genuine Brownian noise',
2526
+ detail: `H=${h.toFixed(3)} lag1_AC=${ac1.toFixed(3)}`,
2527
+ value: 0.10,
2528
+ });
2529
+ bonus += 0.10;
2530
+ }
2531
+
2532
+ // ── 3. CV-Entropy Coherence ───────────────────────────────────────────────
2533
+ // High CV should correlate with high QE. If CV is high but QE is low,
2534
+ // the variance was added artificially (fixed-offset outliers, synthetic spikes).
2535
+ const cv = stats.cv;
2536
+ const qe = jitter.quantizationEntropy;
2537
+
2538
+ // Expected QE given CV, assuming roughly normal distribution
2539
+ // Normal dist with σ/μ = CV: entropy ≈ log2(σ * sqrt(2πe)) + log2(n/binWidth)
2540
+ // We use a simplified linear proxy calibrated against real benchmarks.
2541
+ const expectedQE = Math.max(0, 1.5 + cv * 16); // empirical: CV=0.15 → QE≈3.9
2542
+ const qeDivergence = expectedQE - qe; // positive = QE lower than expected
2543
+
2544
+ if (qeDivergence > 1.8 && cv > 0.05) {
2545
+ // High variance but low entropy: synthetic outliers at fixed offsets
2546
+ findings.push({
2547
+ id: 'CV_ENTROPY_INCOHERENT',
2548
+ label: 'High CV but low entropy — variance appears synthetic (fixed-offset outliers)',
2549
+ detail: `CV=${cv.toFixed(4)} QE=${qe.toFixed(3)} bits expected_QE≈${expectedQE.toFixed(3)} gap=${qeDivergence.toFixed(3)}`,
2550
+ severity: 'high',
2551
+ penalty: 0.10,
2552
+ });
2553
+ penalty += 0.10;
2554
+ } else if (qeDivergence < 0.5 && cv > 0.08) {
2555
+ // CV and QE are coherent — timings come from a real distribution
2556
+ bonuses.push({
2557
+ id: 'CV_ENTROPY_COHERENT',
2558
+ label: 'Variance and entropy are physically coherent',
2559
+ detail: `CV=${cv.toFixed(4)} QE=${qe.toFixed(3)} expected≈${expectedQE.toFixed(3)}`,
2560
+ value: 0.06,
2561
+ });
2562
+ bonus += 0.06;
2563
+ }
2564
+
2565
+ // ── 4. Steal-time periodicity (the "Picket Fence" detector) ─────────────
2566
+ // VM steal-time bursts create a periodic signal in the autocorrelation.
2567
+ // If lag-50 autocorrelation is significantly higher than lag-5,
2568
+ // the scheduler quantum is approximately 50× the mean iteration time.
2569
+ const picketFence = _detectPicketFence(autocorrelations);
2570
+ if (picketFence.detected) {
2571
+ findings.push({
2572
+ id: 'PICKET_FENCE_DETECTED',
2573
+ label: `"Picket Fence" steal-time rhythm detected at lag ${picketFence.dominantLag}`,
2574
+ detail: picketFence.detail,
2575
+ severity: 'high',
2576
+ penalty: 0.08,
2577
+ });
2578
+ penalty += 0.08;
2579
+ }
2580
+
2581
+ // ── 5. Skewness-Kurtosis coherence ───────────────────────────────────────
2582
+ // Real hardware timing is right-skewed (occasional slow outliers from OS preemption).
2583
+ // VMs that add synthetic outliers at fixed offsets produce wrong skew/kurtosis.
2584
+ const skew = stats.skewness ?? 0;
2585
+ const kurt = stats.kurtosis ?? 0;
2586
+
2587
+ if (skew > 0.3 && kurt > 0) {
2588
+ // Right-skewed, leptokurtic — consistent with OS preemption on real hardware
2589
+ bonuses.push({
2590
+ id: 'NATURAL_SKEW_CONFIRMED',
2591
+ label: 'Right-skewed distribution with positive kurtosis — OS preemption pattern',
2592
+ detail: `skew=${skew.toFixed(3)} kurtosis=${kurt.toFixed(3)}`,
2593
+ value: 0.06,
2594
+ });
2595
+ bonus += 0.06;
2596
+ } else if (skew < 0 && Math.abs(kurt) > 1) {
2597
+ // Negative skew with high kurtosis: inconsistent with physical timing noise
2598
+ findings.push({
2599
+ id: 'SKEW_KURTOSIS_ANOMALY',
2600
+ label: 'Left-skewed distribution — inconsistent with natural hardware timing',
2601
+ detail: `skew=${skew.toFixed(3)} kurtosis=${kurt.toFixed(3)}`,
2602
+ severity: 'medium',
2603
+ penalty: 0.06,
2604
+ });
2605
+ penalty += 0.06;
2606
+ }
2607
+
2608
+ // ── Physical floor protection (anti-compounding) ──────────────────────────
2609
+ // When the three PRIMARY timing metrics are clearly consistent with real
2610
+ // silicon, cap the penalty so that marginal secondary signals (weak Picket
2611
+ // Fence, mild EJR, slight skew anomaly) cannot compound into a rejection.
2612
+ //
2613
+ // Why: a modern i7 laptop running heavy browser extensions may show:
2614
+ // EJR = 1.01 → -0.10 penalty (just under the 1.02 threshold)
2615
+ // lag50 = 0.31 → picket fence → -0.08 penalty (background process rhythm)
2616
+ // slight negative skew → -0.06 penalty
2617
+ // total: -0.24, drops score from 0.73 → 0.49 → wrongly flagged as synthetic
2618
+ //
2619
+ // Solution: if ≥ 2 of the 3 primary metrics are unambiguously physical,
2620
+ // treat the device as "probably physical with some noise" and limit the
2621
+ // penalty to 0.22 (enough to lower confidence but not enough to reject).
2622
+ const clearQE = jitter.quantizationEntropy > 3.2;
2623
+ const clearCV = stats.cv >= 0.05 && stats.cv <= 0.30;
2624
+ const clearLag1 = Math.abs(autocorrelations?.lag1 ?? 1) < 0.22;
2625
+ const clearPhysicalCount = [clearQE, clearCV, clearLag1].filter(Boolean).length;
2626
+
2627
+ // Also check: if at least one metric is a HARD VM indicator (QE < 2.0 or
2628
+ // lag1 > 0.65), override the floor — the floor is for borderline noise, not
2629
+ // for devices that are clearly VMs on at least one axis.
2630
+ const hardVmSignal =
2631
+ jitter.quantizationEntropy < 2.0 ||
2632
+ Math.abs(autocorrelations?.lag1 ?? 0) > 0.65;
2633
+
2634
+ const penaltyCap = (!hardVmSignal && clearPhysicalCount >= 2)
2635
+ ? 0.22 // physical floor: cap compounding for clearly physical devices
2636
+ : 0.60; // default: full penalty range for ambiguous or VM-like signals
2637
+
2638
+ // HARD KILL overrides the physical floor protection entirely.
2639
+ // The floor was designed to protect legitimate hardware with multiple
2640
+ // marginal-but-honest signals — it must never shelter a forged proof.
2641
+ const totalPenalty = hardOverride === 'vm'
2642
+ ? Math.min(1.0, penalty) // hard kill: uncapped, overwhelms all bonuses
2643
+ : Math.min(penaltyCap, penalty); // normal: apply floor protection
2644
+
2645
+ // When a hard kill is active, strip all bonuses — they were earned on
2646
+ // data that has been proved untrustworthy.
2647
+ const totalBonus = hardOverride === 'vm' ? 0 : Math.min(0.35, bonus);
2648
+
2649
+ return {
2650
+ penalty: totalPenalty,
2651
+ bonus: totalBonus,
2652
+ netAdjustment: totalBonus - totalPenalty,
2653
+ findings,
2654
+ bonuses: hardOverride === 'vm' ? [] : bonuses,
2655
+ entropyJitterRatio,
2656
+ entropyJitterScore,
2657
+ picketFence,
2658
+ hardOverride,
2659
+ coherenceFlags: findings.map(f => f.id),
2660
+ };
2661
+ }
2662
+
2663
+ /**
2664
+ * @typedef {object} HeuristicReport
2665
+ * @property {number} penalty - total score penalty [0, 1.0]
2666
+ * @property {number} bonus - total score bonus [0, 0.35]
2667
+ * @property {number} netAdjustment - bonus - penalty
2668
+ * @property {object[]} findings - detected anomalies
2669
+ * @property {object[]} bonuses - confirmed physical properties
2670
+ * @property {number|null} entropyJitterRatio
2671
+ * @property {'vm'|null} hardOverride - set when a mathematical impossibility is detected
2672
+ * @property {object} picketFence
2673
+ * @property {string[]} coherenceFlags
2674
+ */
2675
+
2676
+ // ---------------------------------------------------------------------------
2677
+ // EJR evaluation helper (extracted so it can run with or without QE values)
2678
+ // ---------------------------------------------------------------------------
2679
+
2680
+ /**
2681
+ * Applies the normal EJR classification logic (called only after the hard-kill
2682
+ * check passes, meaning the EJR value has been verified as consistent).
2683
+ */
2684
+ function _evaluateEJR(ejr, coldQE, hotQE, findings, bonuses, addPenalty, addBonus, setScore) {
2685
+ const qeDetail = coldQE != null
2686
+ ? `cold_QE=${coldQE.toFixed(3)} hot_QE=${hotQE.toFixed(3)}`
2687
+ : '';
2688
+
2689
+ if (ejr >= 1.08) {
2690
+ setScore(1.0);
2691
+ bonuses.push({
2692
+ id: 'ENTROPY_GROWS_WITH_LOAD',
2693
+ label: 'Entropy grew under load — thermal feedback confirmed',
2694
+ detail: `ratio=${ejr.toFixed(3)} ${qeDetail}`,
2695
+ value: 0.12,
2696
+ });
2697
+ addBonus(0.12);
2698
+
2699
+ } else if (ejr >= 1.02) {
2700
+ setScore(0.7);
2701
+ findings.push({
2702
+ id: 'ENTROPY_MILD_GROWTH',
2703
+ label: 'Weak entropy growth under load',
2704
+ detail: `ratio=${ejr.toFixed(3)} ${qeDetail}`,
2705
+ severity: 'info',
2706
+ penalty: 0,
2707
+ });
2708
+
2709
+ } else if (ejr > 0.95) {
2710
+ // Flat entropy — hypervisor clock unresponsive to guest load
2711
+ setScore(0.2);
2712
+ findings.push({
2713
+ id: 'ENTROPY_FLAT_UNDER_LOAD',
2714
+ label: 'Entropy did not grow under load — hypervisor clock suspected',
2715
+ detail: `ratio=${ejr.toFixed(3)} (expected ≥ 1.08 for real hardware) ${qeDetail}`,
2716
+ severity: 'high',
2717
+ penalty: 0.10,
2718
+ });
2719
+ addPenalty(0.10);
2720
+
2721
+ } else {
2722
+ // Entropy DECREASED — hypervisor clock rounding became more aggressive
2723
+ setScore(0.0);
2724
+ findings.push({
2725
+ id: 'ENTROPY_DECREASES_UNDER_LOAD',
2726
+ label: 'Entropy shrank under load — hypervisor clock-rounding confirmed',
2727
+ detail: `ratio=${ejr.toFixed(3)} (clock rounding more aggressive at high load) ${qeDetail}`,
2728
+ severity: 'critical',
2729
+ penalty: 0.18,
2730
+ });
2731
+ addPenalty(0.18);
2732
+ }
2733
+ }
2734
+
2735
+ // ---------------------------------------------------------------------------
2736
+ // Picket Fence detector
2737
+ // ---------------------------------------------------------------------------
2738
+
2739
+ /**
2740
+ * Detects periodic steal-time bursts by finding the lag with the highest
2741
+ * autocorrelation beyond lag-5. A strong periodic peak indicates the
2742
+ * hypervisor is scheduling the guest on a fixed quantum.
2743
+ *
2744
+ * Named "Picket Fence" because of how the timing histogram looks: dense
2745
+ * clusters at fixed intervals with empty space between them — like fence posts.
2746
+ */
2747
+ function _detectPicketFence(autocorrelations) {
2748
+ const longLags = [10, 25, 50].map(l => ({
2749
+ lag: l,
2750
+ ac: Math.abs(autocorrelations?.[`lag${l}`] ?? 0),
2751
+ }));
2752
+
2753
+ const shortBaseline = (Math.abs(autocorrelations?.lag5 ?? 0) +
2754
+ Math.abs(autocorrelations?.lag3 ?? 0)) / 2;
2755
+
2756
+ const peak = longLags.reduce((best, cur) =>
2757
+ cur.ac > best.ac ? cur : best, { lag: 0, ac: 0 });
2758
+
2759
+ // "Picket fence" condition: a long-lag autocorr significantly exceeds baseline
2760
+ if (peak.ac > 0.30 && peak.ac > shortBaseline + 0.20) {
2761
+ return {
2762
+ detected: true,
2763
+ dominantLag: peak.lag,
2764
+ peakAC: peak.ac,
2765
+ baseline: shortBaseline,
2766
+ detail: `lag${peak.lag}_AC=${peak.ac.toFixed(3)} baseline_AC=${shortBaseline.toFixed(3)} ` +
2767
+ `estimated_quantum≈${(peak.lag * 5).toFixed(0)}ms (at 5ms/iter)`,
2768
+ };
2769
+ }
2770
+
2771
+ return { detected: false, dominantLag: null, peakAC: peak.ac, baseline: shortBaseline, detail: '' };
2772
+ }
2773
+
2774
+ function _empty$1() {
2775
+ return {
2776
+ penalty: 0, bonus: 0, netAdjustment: 0,
2777
+ findings: [], bonuses: [],
2778
+ entropyJitterRatio: null, entropyJitterScore: 0.5,
2779
+ hardOverride: null,
2780
+ picketFence: { detected: false },
2781
+ coherenceFlags: [],
2782
+ };
2783
+ }
2784
+
2785
+ /**
2786
+ * @sovereign/pulse — Zero-Latency Second-Stage Coherence Analysis
2787
+ *
2788
+ * Runs entirely on data already collected by the entropy probe, bio
2789
+ * collector, canvas fingerprinter, and audio analyser.
2790
+ * Adds approximately 1–3 ms of CPU time. Zero WASM, zero network.
2791
+ *
2792
+ * Architecture:
2793
+ * Stage 1 — classifyJitter() → rawScore [0, 1]
2794
+ * Stage 2 — runHeuristicEngine() → netAdjustment (physics coherence)
2795
+ * Stage 3 — runCoherenceAnalysis() → THIS MODULE
2796
+ * ↳ small score refinement [-0.15, +0.18]
2797
+ * ↳ dynamic threshold [0.55, 0.67]
2798
+ * ↳ hard override 'vm' | null
2799
+ *
2800
+ * Why a third stage?
2801
+ * Stage 1 checks individual metrics in isolation.
2802
+ * Stage 2 checks pairwise relationships between metrics.
2803
+ * Stage 3 checks STRUCTURAL properties of the entire time-series and
2804
+ * signal evolution that require the full dataset to evaluate — and that
2805
+ * a sophisticated attacker cannot spoof without also spoofing every
2806
+ * other correlated signal simultaneously.
2807
+ *
2808
+ * The six checks cover orthogonal signal dimensions so they are hard to
2809
+ * spoof together even if one is individually defeated:
2810
+ *
2811
+ * 1. Timing distinctness — frequency domain (quantization density)
2812
+ * 2. AC decay shape — temporal domain (Brownian vs harmonic)
2813
+ * 3. Chunk CV stability — stationarity axis (thermal non-stationarity)
2814
+ * 4. Level-dependent noise — noise model axis (multiplicative vs additive)
2815
+ * 5. Batch convergence — measurement stability (adaptive mode)
2816
+ * 6. Phase trajectory — EJR monotonicity (thermal sequence integrity)
2817
+ *
2818
+ * Dynamic threshold:
2819
+ * The evidence weight reflects how much data was actually collected.
2820
+ * An early-exit proof with 50 iterations and no bio activity has far less
2821
+ * support than a 200-iteration proof with bio, audio, and phased data.
2822
+ * The threshold rises automatically as evidence decreases:
2823
+ * Full evidence → threshold 0.55 (standard)
2824
+ * Minimal proof → threshold 0.67 (conservative gate)
2825
+ * This prevents low-evidence proofs from passing the same bar as full ones.
2826
+ */
2827
+
2828
+ // ---------------------------------------------------------------------------
2829
+ // runCoherenceAnalysis
2830
+ // ---------------------------------------------------------------------------
2831
+
2832
+ /**
2833
+ * @param {object} p
2834
+ * @param {number[]} p.timings — raw timing array (already collected)
2835
+ * @param {object} p.jitter — JitterAnalysis from classifyJitter()
2836
+ * @param {object|null} p.phases — phased entropy result (optional)
2837
+ * @param {object[]|null} p.batches — adaptive batch snapshots (optional)
2838
+ * @param {object} p.bio — bio snapshot
2839
+ * @param {object} p.canvas — canvas fingerprint
2840
+ * @param {object} p.audio — audio jitter result
2841
+ * @returns {CoherenceReport}
2842
+ */
2843
+ function runCoherenceAnalysis({ timings, jitter, phases, batches, bio, canvas, audio }) {
2844
+ if (!timings || timings.length < 10) {
2845
+ // Insufficient data — return a conservative threshold and no adjustments.
2846
+ return _empty(0.64);
2847
+ }
2848
+
2849
+ const checks = []; // anomalies found (each may carry a penalty)
2850
+ const bonuses = []; // physical properties confirmed (each carries a bonus)
2851
+ let penalty = 0;
2852
+ let bonus = 0;
2853
+ let hardOverride = null; // 'vm' | null
2854
+
2855
+ const n = timings.length;
2856
+ const ac = jitter.autocorrelations ?? {};
2857
+
2858
+ // ── Check 1: Timing Distinctness Ratio ──────────────────────────────────────
2859
+ // VM quantized timers repeat the same integer-millisecond values.
2860
+ // Real silicon at sub-ms resolution produces mostly unique values.
2861
+ //
2862
+ // Bin width: 0.2 ms (matches detectQuantizationEntropy for consistency).
2863
+ // Normalized by sample count → iteration-count-independent.
2864
+ //
2865
+ // distinctRatio > 0.65 → sub-ms resolution confirmed → physical bonus
2866
+ // distinctRatio < 0.30 → heavy quantization → VM penalty
2867
+ // distinctRatio < 0.45 at n ≥ 100 → mild VM penalty
2868
+ {
2869
+ const bins = new Set(timings.map(t => Math.round(t / 0.2)));
2870
+ const distinctRatio = bins.size / n;
2871
+
2872
+ if (n >= 50) {
2873
+ if (distinctRatio > 0.65) {
2874
+ bonuses.push({
2875
+ id: 'HIGH_TIMING_DISTINCTNESS',
2876
+ label: 'Timer produces mostly unique values — sub-ms resolution confirmed',
2877
+ detail: `ratio=${distinctRatio.toFixed(3)} (${bins.size}/${n} distinct 0.2ms bins)`,
2878
+ value: 0.06,
2879
+ });
2880
+ bonus += 0.06;
2881
+
2882
+ } else if (distinctRatio < 0.30) {
2883
+ // Severely quantized — VM with integer-ms timer emulation
2884
+ checks.push({
2885
+ id: 'LOW_TIMING_DISTINCTNESS',
2886
+ label: 'Heavy timer quantization — integer-ms VM timer suspected',
2887
+ detail: `ratio=${distinctRatio.toFixed(3)} (only ${bins.size}/${n} distinct 0.2ms bins)`,
2888
+ severity: 'high',
2889
+ penalty: 0.12,
2890
+ });
2891
+ penalty += 0.12;
2892
+
2893
+ } else if (distinctRatio < 0.45 && n >= 100) {
2894
+ // At 100+ iterations we expect more spread; below 0.45 is suspicious
2895
+ checks.push({
2896
+ id: 'BORDERLINE_TIMING_DISTINCTNESS',
2897
+ label: 'Below-expected timer resolution — coarse-grained timer suspected',
2898
+ detail: `ratio=${distinctRatio.toFixed(3)} at n=${n}`,
2899
+ severity: 'medium',
2900
+ penalty: 0.05,
2901
+ });
2902
+ penalty += 0.05;
2903
+ }
2904
+ }
2905
+ }
2906
+
2907
+ // ── Check 2: Autocorrelation Decay Shape ────────────────────────────────────
2908
+ // Genuine Brownian noise decays monotonically: lag1 > lag2 > lag5 > lag10 > …
2909
+ // VM scheduler rhythms create harmonic revivals: lag25 or lag50 elevated
2910
+ // above lag10 because steal-time bursts recur at the scheduler quantum period.
2911
+ //
2912
+ // This is structurally orthogonal to the Picket Fence detector (stage 2),
2913
+ // which checks absolute magnitude — this checks the SHAPE of the decay curve.
2914
+ {
2915
+ const l1 = Math.abs(ac.lag1 ?? 0);
2916
+ const l2 = Math.abs(ac.lag2 ?? 0);
2917
+ const l3 = Math.abs(ac.lag3 ?? 0);
2918
+ const l5 = Math.abs(ac.lag5 ?? 0);
2919
+ const l10 = Math.abs(ac.lag10 ?? 0);
2920
+ const l25 = Math.abs(ac.lag25 ?? 0);
2921
+ const l50 = Math.abs(ac.lag50 ?? 0);
2922
+
2923
+ // Strict Brownian decay: each successive lag is no higher than the previous
2924
+ // (+0.03 tolerance for estimation noise)
2925
+ const isBrownianDecay =
2926
+ l1 < 0.20 &&
2927
+ l2 <= l1 + 0.03 &&
2928
+ l5 <= l2 + 0.03 &&
2929
+ l10 <= l5 + 0.03 &&
2930
+ l25 <= l10 + 0.05 &&
2931
+ l50 <= l10 + 0.05;
2932
+
2933
+ // Harmonic revival: a long lag significantly exceeds medium lags
2934
+ // (scheduler quantum footprint)
2935
+ const revival25 = l25 > l5 + 0.12 && l25 > 0.18;
2936
+ const revival50 = l50 > l5 + 0.12 && l50 > 0.18;
2937
+
2938
+ if (isBrownianDecay && l1 < 0.15) {
2939
+ bonuses.push({
2940
+ id: 'BROWNIAN_DECAY_SHAPE',
2941
+ label: 'AC decays monotonically at all measured lags — genuine Brownian noise structure',
2942
+ detail: `lag1=${l1.toFixed(3)} lag3=${l3.toFixed(3)} lag5=${l5.toFixed(3)} lag10=${l10.toFixed(3)} lag50=${l50.toFixed(3)}`,
2943
+ value: 0.09,
2944
+ });
2945
+ bonus += 0.09;
2946
+ }
2947
+
2948
+ if (revival25 || revival50) {
2949
+ const peakLag = revival25 ? 25 : 50;
2950
+ const peakVal = revival25 ? l25 : l50;
2951
+ checks.push({
2952
+ id: 'HARMONIC_AUTOCORR_REVIVAL',
2953
+ label: `Long-lag AC revival at lag ${peakLag} — VM scheduler harmonic footprint`,
2954
+ detail: `lag5=${l5.toFixed(3)} lag${peakLag}=${peakVal.toFixed(3)} Δ=${(peakVal - l5).toFixed(3)}`,
2955
+ severity: 'high',
2956
+ penalty: 0.10,
2957
+ });
2958
+ penalty += 0.10;
2959
+ }
2960
+ }
2961
+
2962
+ // ── Check 3: Chunk CV Stability (temporal stationarity test) ─────────────────
2963
+ // Split the timing series into 4 equal windows and compute CV per window.
2964
+ // Real hardware: CV varies across chunks — CPU temperature changes, workload
2965
+ // varies, OS scheduling fluctuates — making the process non-stationary.
2966
+ // VM hypervisor: CV is nearly identical in every chunk because the hypervisor's
2967
+ // scheduling behaviour is constant — a stationary process.
2968
+ //
2969
+ // Metric: CV of the 4 chunk CVs (CV-of-CVs).
2970
+ // > 0.15 → non-stationary noise → physical bonus
2971
+ // < 0.06 → suspiciously constant → VM penalty
2972
+ if (n >= 40) {
2973
+ const chunkSize = Math.floor(n / 4);
2974
+ const chunkCVs = [];
2975
+
2976
+ for (let c = 0; c < 4; c++) {
2977
+ const chunk = timings.slice(c * chunkSize, (c + 1) * chunkSize);
2978
+ const m = chunk.reduce((a, b) => a + b, 0) / chunk.length;
2979
+ const s = Math.sqrt(chunk.reduce((acc, v) => acc + (v - m) ** 2, 0) / chunk.length);
2980
+ if (m > 0) chunkCVs.push(s / m);
2981
+ }
2982
+
2983
+ if (chunkCVs.length === 4) {
2984
+ const cvMean = chunkCVs.reduce((a, b) => a + b, 0) / 4;
2985
+ const cvStd = Math.sqrt(chunkCVs.reduce((s, v) => s + (v - cvMean) ** 2, 0) / 4);
2986
+ const cvOfCVs = cvMean > 1e-9 ? cvStd / cvMean : 0;
2987
+
2988
+ if (cvOfCVs > 0.15) {
2989
+ bonuses.push({
2990
+ id: 'TEMPORAL_NON_STATIONARITY',
2991
+ label: 'Noise level varies across time windows — thermal non-stationarity confirmed',
2992
+ detail: `CV-of-CVs=${cvOfCVs.toFixed(3)} windows=[${chunkCVs.map(v => v.toFixed(3)).join(', ')}]`,
2993
+ value: 0.07,
2994
+ });
2995
+ bonus += 0.07;
2996
+
2997
+ } else if (cvOfCVs < 0.06 && cvMean > 0.01) {
2998
+ checks.push({
2999
+ id: 'STATIONARY_NOISE_PROCESS',
3000
+ label: 'Noise level constant across all time windows — hypervisor stationarity suspected',
3001
+ detail: `CV-of-CVs=${cvOfCVs.toFixed(3)} windows=[${chunkCVs.map(v => v.toFixed(3)).join(', ')}]`,
3002
+ severity: 'high',
3003
+ penalty: 0.09,
3004
+ });
3005
+ penalty += 0.09;
3006
+ }
3007
+ }
3008
+ }
3009
+
3010
+ // ── Check 4: Level-Dependent Volatility (noise model test) ──────────────────
3011
+ // Thermal noise is multiplicative: the physical process that adds jitter
3012
+ // (electron thermal motion, gate capacitance variation) scales with the
3013
+ // operating conditions that also drive longer execution times.
3014
+ // Consequence: larger timing values tend to have more incremental variance.
3015
+ // This produces a positive Pearson correlation between:
3016
+ // — timing[i] (level — how long that iteration took)
3017
+ // — |timing[i+1]-timing[i]| (volatility — how much it changed)
3018
+ //
3019
+ // VM hypervisor noise is additive: a constant scheduling jitter is applied
3020
+ // regardless of the iteration's timing level → near-zero correlation.
3021
+ //
3022
+ // r > 0.15 → multiplicative noise → physical bonus
3023
+ // r < 0.04 at n ≥ 80 → additive noise → VM penalty
3024
+ if (n >= 30) {
3025
+ const levels = timings.slice(0, n - 1);
3026
+ const deltas = timings.slice(1).map((v, i) => Math.abs(v - timings[i]));
3027
+ const lMean = levels.reduce((a, b) => a + b, 0) / levels.length;
3028
+ const dMean = deltas.reduce((a, b) => a + b, 0) / deltas.length;
3029
+
3030
+ let cov = 0, lVar = 0, dVar = 0;
3031
+ for (let i = 0; i < levels.length; i++) {
3032
+ const ld = levels[i] - lMean;
3033
+ const dd = deltas[i] - dMean;
3034
+ cov += ld * dd;
3035
+ lVar += ld * ld;
3036
+ dVar += dd * dd;
3037
+ }
3038
+ const denom = Math.sqrt(lVar * dVar);
3039
+ const levelVolCorr = denom < 1e-14 ? 0 : cov / denom;
3040
+
3041
+ if (levelVolCorr > 0.15) {
3042
+ bonuses.push({
3043
+ id: 'MULTIPLICATIVE_NOISE_MODEL',
3044
+ label: 'Timing variance scales with level — multiplicative thermal noise confirmed',
3045
+ detail: `level-volatility r=${levelVolCorr.toFixed(3)} (expected >0.15 for real silicon)`,
3046
+ value: 0.07,
3047
+ });
3048
+ bonus += 0.07;
3049
+
3050
+ } else if (levelVolCorr < 0.04 && n >= 80) {
3051
+ // Enough samples to trust the estimate; near-zero = additive hypervisor noise
3052
+ checks.push({
3053
+ id: 'ADDITIVE_NOISE_MODEL',
3054
+ label: 'Timing variance independent of level — additive hypervisor noise suspected',
3055
+ detail: `level-volatility r=${levelVolCorr.toFixed(3)} (expected >0.15 for real silicon)`,
3056
+ severity: 'medium',
3057
+ penalty: 0.07,
3058
+ });
3059
+ penalty += 0.07;
3060
+ }
3061
+ }
3062
+
3063
+ // ── Check 5: Batch Convergence Variance (adaptive mode only) ─────────────────
3064
+ // In adaptive mode each batch of 25 iterations produces a vmConf estimate.
3065
+ // Real hardware: these estimates wander batch-to-batch because the underlying
3066
+ // physical source is genuinely stochastic.
3067
+ // VM hypervisor: estimates lock in immediately — deterministic scheduling means
3068
+ // each batch produces essentially the same picture.
3069
+ //
3070
+ // Most diagnostic in the ambiguous zone (vmConf 0.25–0.70) where stability
3071
+ // is suspicious. A clearly obvious VM (vmConf 0.90 every batch) is expected
3072
+ // to be stable. A borderline device (vmConf 0.45 across 6 identical batches)
3073
+ // is exhibiting VM-like stability despite claiming ambiguity.
3074
+ //
3075
+ // Uses only batches collected after iteration 75 to avoid early-sample noise.
3076
+ if (batches && batches.length >= 4) {
3077
+ const stableBatches = batches.filter(b => b.iterations >= 75);
3078
+
3079
+ if (stableBatches.length >= 3) {
3080
+ const vmConfs = stableBatches.map(b => b.vmConf);
3081
+ const hwConfs = stableBatches.map(b => b.hwConf);
3082
+ const vmMean = vmConfs.reduce((a, b) => a + b, 0) / vmConfs.length;
3083
+ const hwMean = hwConfs.reduce((a, b) => a + b, 0) / hwConfs.length;
3084
+ const vmStd = Math.sqrt(
3085
+ vmConfs.reduce((s, v) => s + (v - vmMean) ** 2, 0) / vmConfs.length
3086
+ );
3087
+
3088
+ // Only meaningful in the ambiguous zone
3089
+ const isAmbiguous = vmMean > 0.25 && vmMean < 0.70 && hwMean < 0.55;
3090
+
3091
+ if (isAmbiguous) {
3092
+ if (vmStd > 0.06) {
3093
+ bonuses.push({
3094
+ id: 'SIGNAL_FLUCTUATES_STOCHASTICALLY',
3095
+ label: 'Batch-by-batch signal variance confirms genuine stochastic noise source',
3096
+ detail: `vmConf σ=${vmStd.toFixed(3)} across ${stableBatches.length} stable batches (μ=${vmMean.toFixed(3)})`,
3097
+ value: 0.05,
3098
+ });
3099
+ bonus += 0.05;
3100
+
3101
+ } else if (vmStd < 0.025 && stableBatches.length >= 4) {
3102
+ checks.push({
3103
+ id: 'SIGNAL_DETERMINISTICALLY_STABLE',
3104
+ label: 'Signal locked-in immediately — deterministic hypervisor suspected',
3105
+ detail: `vmConf σ=${vmStd.toFixed(3)} across ${stableBatches.length} stable batches (μ=${vmMean.toFixed(3)})`,
3106
+ severity: 'medium',
3107
+ penalty: 0.06,
3108
+ });
3109
+ penalty += 0.06;
3110
+ }
3111
+ }
3112
+ }
3113
+ }
3114
+
3115
+ // ── Check 6: Phase Entropy Trajectory ────────────────────────────────────────
3116
+ // The EJR (hot_QE / cold_QE) captures the endpoint ratio, but misses the
3117
+ // intermediate trajectory. We additionally verify monotonic growth:
3118
+ // cold_QE < load_QE < hot_QE
3119
+ //
3120
+ // If all three phases are available, monotonic growth is a strong bonus.
3121
+ // Non-monotonic trajectory (entropy dropped then recovered) is suspicious.
3122
+ //
3123
+ // HARD OVERRIDE: if EJR ≥ 1.08 (claims entropy grew from cold to hot)
3124
+ // but cold_QE ≥ hot_QE (entropy provably didn't grow), the proof is
3125
+ // mathematically self-contradictory → forgery attempt.
3126
+ if (phases) {
3127
+ const coldQE = phases.cold?.qe ?? null;
3128
+ const loadQE = phases.load?.qe ?? null;
3129
+ const hotQE = phases.hot?.qe ?? null;
3130
+ const ejr = phases.entropyJitterRatio ?? null;
3131
+
3132
+ // Mathematical contradiction: EJR = hot_QE / cold_QE, so EJR ≥ 1.08
3133
+ // implies hot_QE ≥ 1.08 × cold_QE > cold_QE. Violation = tampered proof.
3134
+ if (ejr !== null && ejr >= 1.08 && coldQE !== null && hotQE !== null) {
3135
+ if (coldQE >= hotQE) {
3136
+ hardOverride = 'vm';
3137
+ checks.push({
3138
+ id: 'EJR_QE_CONTRADICTION',
3139
+ label: 'HARD OVERRIDE: EJR claims entropy growth but cold_QE ≥ hot_QE — mathematically impossible',
3140
+ detail: `ejr=${ejr.toFixed(4)} cold_QE=${coldQE.toFixed(3)} hot_QE=${hotQE.toFixed(3)} (ejr=hot/cold requires hot>cold)`,
3141
+ severity: 'critical',
3142
+ penalty: 0.60, // overwhelms any bonus
3143
+ });
3144
+ penalty += 0.60;
3145
+ }
3146
+ }
3147
+
3148
+ // Monotonic trajectory check (requires all three phases)
3149
+ if (!hardOverride && coldQE !== null && loadQE !== null && hotQE !== null) {
3150
+ if (coldQE < loadQE && loadQE < hotQE) {
3151
+ bonuses.push({
3152
+ id: 'MONOTONIC_ENTROPY_TRAJECTORY',
3153
+ label: 'QE increased continuously cold→load→hot — unbroken thermal feedback confirmed',
3154
+ detail: `${coldQE.toFixed(3)} → ${loadQE.toFixed(3)} → ${hotQE.toFixed(3)}`,
3155
+ value: 0.09,
3156
+ });
3157
+ bonus += 0.09;
3158
+
3159
+ } else if (coldQE >= loadQE || loadQE >= hotQE) {
3160
+ // Entropy stalled or reversed mid-run — unusual for real silicon
3161
+ checks.push({
3162
+ id: 'NON_MONOTONIC_ENTROPY_TRAJECTORY',
3163
+ label: 'Entropy did not increase monotonically across load phases',
3164
+ detail: `cold=${coldQE.toFixed(3)} load=${loadQE.toFixed(3)} hot=${hotQE.toFixed(3)}`,
3165
+ severity: 'medium',
3166
+ penalty: 0.06,
3167
+ });
3168
+ penalty += 0.06;
3169
+ }
3170
+ }
3171
+ }
3172
+
3173
+ // ── Dynamic threshold ─────────────────────────────────────────────────────────
3174
+ // A proof built from more evidence earns a more permissive (lower) passing bar.
3175
+ // Weights:
3176
+ // iterations (0→200): up to 0.65 of the evidence score
3177
+ // phased collection: +0.15 (gold standard of thermal measurement)
3178
+ // bio activity: +0.10 (human presence confirmed)
3179
+ // audio available: +0.05 (additional timing channel)
3180
+ // canvas available: +0.05 (hardware renderer identified)
3181
+ //
3182
+ // dynamicThreshold = 0.55 + (1 − evidenceWeight) × 0.12
3183
+ // Full evidence → 0.55 (standard gate)
3184
+ // Minimal proof → 0.67 (tightened gate for low-evidence submissions)
3185
+ const iterFraction = Math.min(1.0, n / 200);
3186
+ const phasedBonus = phases ? 0.15 : 0.0;
3187
+ const bioBonus = bio?.hasActivity ? 0.10 : 0.0;
3188
+ const audioBonus = audio?.available ? 0.05 : 0.0;
3189
+ const canvasBonus = canvas?.available ? 0.05 : 0.0;
3190
+
3191
+ const evidenceWeight = Math.min(1.0,
3192
+ iterFraction * 0.65 + phasedBonus + bioBonus + audioBonus + canvasBonus
3193
+ );
3194
+
3195
+ const dynamicThreshold = +(0.55 + (1 - evidenceWeight) * 0.12).toFixed(4);
3196
+
3197
+ // ── Stage-3 caps ─────────────────────────────────────────────────────────────
3198
+ // Stage 3 is a REFINEMENT, not the primary classifier. The caps are smaller
3199
+ // than stage 2 to prevent triple-compounding across all three stages on
3200
+ // legitimate hardware with multiple marginal-but-not-damning signals.
3201
+ const totalPenalty = Math.min(0.15, penalty); // hard floor: stage 3 can't reject alone
3202
+ const totalBonus = Math.min(0.18, bonus);
3203
+
3204
+ return {
3205
+ penalty: totalPenalty,
3206
+ bonus: totalBonus,
3207
+ netAdjustment: +(totalBonus - totalPenalty).toFixed(4),
3208
+ checks,
3209
+ bonuses,
3210
+ hardOverride, // 'vm' | null
3211
+ dynamicThreshold, // [0.55, 0.67]
3212
+ evidenceWeight: +evidenceWeight.toFixed(4),
3213
+ coherenceFlags: checks.map(c => c.id),
3214
+ physicalFlags: bonuses.map(b => b.id),
3215
+ };
3216
+ }
3217
+
3218
+ // ---------------------------------------------------------------------------
3219
+ // computeServerDynamicThreshold
3220
+ // ---------------------------------------------------------------------------
3221
+
3222
+ /**
3223
+ * Server-side recomputation of the dynamic threshold.
3224
+ * The server NEVER trusts the client's dynamicThreshold value; it recomputes
3225
+ * from known payload fields.
3226
+ *
3227
+ * @param {object} payload - validated ProofPayload
3228
+ * @returns {number} - minimum passing score for this proof [0.50, 0.62]
3229
+ */
3230
+ function computeServerDynamicThreshold(payload) {
3231
+ const entropy = payload?.signals?.entropy;
3232
+ const bio = payload?.signals?.bio;
3233
+ const audio = payload?.signals?.audio;
3234
+ const canvas = payload?.signals?.canvas;
3235
+
3236
+ const n = entropy?.iterations ?? 0;
3237
+ const hasPhases = payload?.heuristic?.entropyJitterRatio != null;
3238
+ const hasBio = bio?.hasActivity === true;
3239
+ const hasAudio = audio?.available === true;
3240
+ const hasCanvas = canvas?.available === true;
3241
+
3242
+ const iterFraction = Math.min(1.0, n / 200);
3243
+ const evidenceWeight = Math.min(1.0,
3244
+ iterFraction * 0.65 +
3245
+ (hasPhases ? 0.15 : 0) +
3246
+ (hasBio ? 0.10 : 0) +
3247
+ (hasAudio ? 0.05 : 0) +
3248
+ (hasCanvas ? 0.05 : 0)
3249
+ );
3250
+
3251
+ // Server threshold: [0.50, 0.62]
3252
+ // Slightly more lenient than client [0.55, 0.67] because the server already
3253
+ // applies minJitterScore as an independent check. The dynamic component
3254
+ // adds an ADDITIONAL evidence-proportional tightening on top.
3255
+ return +(0.50 + (1 - evidenceWeight) * 0.12).toFixed(4);
3256
+ }
3257
+
3258
+ // ---------------------------------------------------------------------------
3259
+ // Internal helpers
3260
+ // ---------------------------------------------------------------------------
3261
+
3262
+ function _empty(threshold) {
3263
+ return {
3264
+ penalty: 0, bonus: 0, netAdjustment: 0,
3265
+ checks: [], bonuses: [],
3266
+ hardOverride: null,
3267
+ dynamicThreshold: threshold,
3268
+ evidenceWeight: 0,
3269
+ coherenceFlags: [],
3270
+ physicalFlags: [],
3271
+ };
3272
+ }
3273
+
3274
+ /**
3275
+ * @typedef {object} CoherenceReport
3276
+ * @property {number} penalty - total score penalty [0, 0.15]
3277
+ * @property {number} bonus - total score bonus [0, 0.18]
3278
+ * @property {number} netAdjustment - bonus − penalty [-0.15, +0.18]
3279
+ * @property {object[]} checks - anomalies found (with penalty values)
3280
+ * @property {object[]} bonuses - physical properties confirmed
3281
+ * @property {'vm'|null} hardOverride - overrides score when set
3282
+ * @property {number} dynamicThreshold - computed passing threshold [0.55, 0.67]
3283
+ * @property {number} evidenceWeight - how much evidence was collected [0, 1]
3284
+ * @property {string[]} coherenceFlags - check IDs for logging
3285
+ * @property {string[]} physicalFlags - bonus IDs for logging
3286
+ */
3287
+
3288
+ /**
3289
+ * @sovereign/pulse — Hypervisor & Cloud Provider Fingerprinter
3290
+ *
3291
+ * Each hypervisor has a distinct "steal-time rhythm" — a characteristic
3292
+ * pattern in how it schedules guest vCPUs on host physical cores.
3293
+ * This creates detectable signatures in the timing autocorrelation profile.
3294
+ *
3295
+ * Think of it like a heartbeat EKG:
3296
+ * KVM → regular 50-iteration bursts (~250ms quantum at 5ms/iter)
3297
+ * Xen → longer 150-iteration bursts (~750ms credit scheduler quantum)
3298
+ * VMware → irregular bursts, memory balloon noise
3299
+ * Hyper-V → 78-iteration bursts (~390ms at 5ms/iter, 15.6ms quantum)
3300
+ * Nitro → almost none — SR-IOV passthrough is nearly invisible
3301
+ * Physical → no rhythm at all
3302
+ *
3303
+ * Canvas renderer strings give a second, independent signal that we cross-
3304
+ * reference to increase confidence in the provider classification.
3305
+ */
3306
+
3307
+ // ---------------------------------------------------------------------------
3308
+ // Provider profile database
3309
+ // ---------------------------------------------------------------------------
3310
+ // Each profile is calibrated from real benchmark data.
3311
+ // Fields: lag1_range, lag50_range, qe_range, cv_range, renderer_hints
3312
+
3313
+ const PROVIDER_PROFILES = [
3314
+ {
3315
+ id: 'physical',
3316
+ label: 'Physical Hardware',
3317
+ profile: 'analog-fog',
3318
+ confidence: 0, // set dynamically
3319
+ match: ({ lag1, lag50, qe, cv, entropyJitterRatio, isSoftwareRenderer }) =>
3320
+ !isSoftwareRenderer &&
3321
+ Math.abs(lag1) < 0.20 &&
3322
+ Math.abs(lag50) < 0.15 &&
3323
+ qe > 3.0 &&
3324
+ cv > 0.06 &&
3325
+ (entropyJitterRatio === null || entropyJitterRatio >= 1.02),
3326
+ },
3327
+ {
3328
+ id: 'kvm-generic',
3329
+ label: 'KVM Hypervisor (generic)',
3330
+ profile: 'picket-fence',
3331
+ match: ({ lag1, lag50, qe, cv }) =>
3332
+ lag1 > 0.40 && qe < 2.5 && cv < 0.15 && Math.abs(lag50) > 0.25,
3333
+ providerHints: ['digitalocean', 'linode', 'vultr', 'hetzner', 'ovh'],
3334
+ },
3335
+ {
3336
+ id: 'kvm-digitalocean',
3337
+ label: 'DigitalOcean Droplet (KVM)',
3338
+ profile: 'picket-fence',
3339
+ match: ({ lag1, lag50, qe, cv, rendererHints }) =>
3340
+ lag1 > 0.55 && qe < 2.0 && cv < 0.12 &&
3341
+ (rendererHints.some(r => ['llvmpipe', 'virtio', 'qxl'].includes(r)) ||
3342
+ lag50 > 0.30),
3343
+ },
3344
+ {
3345
+ id: 'kvm-aws-ec2-xen',
3346
+ label: 'AWS EC2 (Xen/older generation)',
3347
+ profile: 'picket-fence',
3348
+ // Xen credit scheduler has longer period (~150 iters)
3349
+ match: ({ lag1, lag25, lag50, qe, cv }) =>
3350
+ qe < 2.2 && cv < 0.13 &&
3351
+ lag25 > 0.20 && lag50 > 0.20 &&
3352
+ lag1 < 0.50, // lag-1 less pronounced than KVM
3353
+ },
3354
+ {
3355
+ id: 'nitro-aws',
3356
+ label: 'AWS EC2 Nitro (near-baremetal)',
3357
+ profile: 'near-physical',
3358
+ // Nitro uses SR-IOV and dedicated hardware — steal-time is very low.
3359
+ // Looks almost physical but canvas renderer gives it away.
3360
+ match: ({ lag1, lag50, qe, cv, isSoftwareRenderer, rendererHints }) =>
3361
+ qe > 2.5 && cv > 0.05 &&
3362
+ lag1 < 0.25 && lag50 < 0.20 &&
3363
+ (isSoftwareRenderer ||
3364
+ rendererHints.some(r => r.includes('nvidia t4') || r.includes('nvidia a10'))),
3365
+ },
3366
+ {
3367
+ id: 'vmware-esxi',
3368
+ label: 'VMware ESXi',
3369
+ profile: 'burst-scheduler',
3370
+ // VMware balloon driver creates irregular memory pressure bursts
3371
+ match: ({ lag1, lag50, qe, cv, rendererHints }) =>
3372
+ qe < 2.5 &&
3373
+ (rendererHints.some(r => r.includes('vmware')) ||
3374
+ (lag1 > 0.30 && lag50 < lag1 * 0.7 && cv < 0.14)),
3375
+ },
3376
+ {
3377
+ id: 'hyperv',
3378
+ label: 'Microsoft Hyper-V',
3379
+ profile: 'picket-fence',
3380
+ // 15.6ms scheduler quantum → burst every ~78 iters
3381
+ match: ({ lag1, lag25, qe, cv, rendererHints }) =>
3382
+ qe < 2.3 &&
3383
+ (rendererHints.some(r => r.includes('microsoft basic render') || r.includes('warp')) ||
3384
+ (lag25 > 0.25 && lag1 > 0.35 && cv < 0.12)),
3385
+ },
3386
+ {
3387
+ id: 'gcp-kvm',
3388
+ label: 'Google Cloud (KVM)',
3389
+ profile: 'picket-fence',
3390
+ match: ({ lag1, lag50, qe, cv, rendererHints }) =>
3391
+ qe < 2.3 && lag1 > 0.45 &&
3392
+ (rendererHints.some(r => r.includes('swiftshader') || r.includes('google')) ||
3393
+ (lag50 > 0.28 && cv < 0.11)),
3394
+ },
3395
+ {
3396
+ id: 'gh200-datacenter',
3397
+ label: 'NVIDIA GH200 / HPC Datacenter',
3398
+ profile: 'hypervisor-flat',
3399
+ // Even with massive compute, still trapped by hypervisor clock.
3400
+ // GH200 shows near-zero Hurst (extreme quantization) + very high lag-1.
3401
+ match: ({ lag1, qe, hurst, cv, rendererHints }) =>
3402
+ (rendererHints.some(r => r.includes('gh200') || r.includes('grace hopper') ||
3403
+ r.includes('nvidia a100') || r.includes('nvidia h100')) ||
3404
+ (hurst < 0.10 && lag1 > 0.60 && qe < 1.8 && cv < 0.10)),
3405
+ },
3406
+ {
3407
+ id: 'generic-vm',
3408
+ label: 'Virtual Machine (unclassified)',
3409
+ profile: 'picket-fence',
3410
+ match: ({ lag1, qe, cv, isSoftwareRenderer }) =>
3411
+ isSoftwareRenderer ||
3412
+ (qe < 2.0 && lag1 > 0.35) ||
3413
+ (cv < 0.02),
3414
+ },
3415
+ ];
3416
+
3417
+ // ---------------------------------------------------------------------------
3418
+ // detectProvider
3419
+ // ---------------------------------------------------------------------------
3420
+
3421
+ /**
3422
+ * Classifies the host environment based on timing + canvas signals.
3423
+ *
3424
+ * @param {object} p
3425
+ * @param {import('./jitter.js').JitterAnalysis} p.jitter
3426
+ * @param {object} p.autocorrelations - extended lags including lag25, lag50
3427
+ * @param {import('../collector/canvas.js').CanvasFingerprint} p.canvas
3428
+ * @param {object|null} p.phases
3429
+ * @returns {ProviderResult}
3430
+ */
3431
+ function detectProvider({ jitter, autocorrelations, canvas, phases }) {
3432
+ const rendererHints = _rendererHints(canvas?.webglRenderer, canvas?.webglVendor);
3433
+
3434
+ const signals = {
3435
+ lag1: Math.abs(autocorrelations?.lag1 ?? 0),
3436
+ lag25: Math.abs(autocorrelations?.lag25 ?? 0),
3437
+ lag50: Math.abs(autocorrelations?.lag50 ?? 0),
3438
+ qe: jitter.quantizationEntropy,
3439
+ cv: jitter.stats?.cv ?? 0,
3440
+ hurst: jitter.hurstExponent ?? 0.5,
3441
+ isSoftwareRenderer: canvas?.isSoftwareRenderer ?? false,
3442
+ rendererHints,
3443
+ entropyJitterRatio: phases?.entropyJitterRatio ?? null,
3444
+ };
3445
+
3446
+ // Score each profile and pick the best match
3447
+ const scored = PROVIDER_PROFILES
3448
+ .filter(p => {
3449
+ try { return p.match(signals); }
3450
+ catch { return false; }
3451
+ })
3452
+ .map(p => ({
3453
+ ...p,
3454
+ // Physical hardware is the last resort; give it lower priority when
3455
+ // other profiles match so we don't misclassify VMs.
3456
+ priority: p.id === 'physical' ? 0 : 1,
3457
+ }))
3458
+ .sort((a, b) => b.priority - a.priority);
3459
+
3460
+ const best = scored[0] ?? { id: 'unknown', label: 'Unknown', profile: 'unknown' };
3461
+
3462
+ // Confidence: how many "VM indicator" thresholds the signals cross
3463
+ const vmIndicatorCount = [
3464
+ signals.qe < 2.5,
3465
+ signals.lag1 > 0.35,
3466
+ signals.lag50 > 0.20,
3467
+ signals.cv < 0.04,
3468
+ signals.isSoftwareRenderer,
3469
+ signals.hurst < 0.15,
3470
+ phases?.entropyJitterRatio != null && phases.entropyJitterRatio < 1.02,
3471
+ ].filter(Boolean).length;
3472
+
3473
+ const isPhysical = best.id === 'physical';
3474
+ const confidence = isPhysical
3475
+ ? Math.max(20, 95 - vmIndicatorCount * 15)
3476
+ : Math.min(95, 40 + vmIndicatorCount * 12);
3477
+
3478
+ return {
3479
+ providerId: best.id,
3480
+ providerLabel: best.label,
3481
+ profile: best.profile,
3482
+ confidence,
3483
+ isVirtualized: best.id !== 'physical',
3484
+ signals,
3485
+ alternatives: scored.slice(1, 3).map(p => ({ id: p.id, label: p.label })),
3486
+ rendererHints,
3487
+ schedulerQuantumMs: _estimateQuantum(signals),
3488
+ };
3489
+ }
3490
+
3491
+ /**
3492
+ * @typedef {object} ProviderResult
3493
+ * @property {string} providerId
3494
+ * @property {string} providerLabel
3495
+ * @property {string} profile 'analog-fog' | 'picket-fence' | 'burst-scheduler' | 'near-physical' | 'hypervisor-flat' | 'unknown'
3496
+ * @property {number} confidence 0–100
3497
+ * @property {boolean} isVirtualized
3498
+ * @property {object} signals
3499
+ * @property {object[]} alternatives
3500
+ * @property {string[]} rendererHints
3501
+ * @property {number|null} schedulerQuantumMs
3502
+ */
3503
+
3504
+ // ---------------------------------------------------------------------------
3505
+ // Internal helpers
3506
+ // ---------------------------------------------------------------------------
3507
+
3508
+ /**
3509
+ * Extract lowercase hint tokens from WebGL renderer string for pattern matching.
3510
+ */
3511
+ function _rendererHints(renderer = '', vendor = '') {
3512
+ return `${renderer} ${vendor}`.toLowerCase()
3513
+ .split(/[\s\/(),]+/)
3514
+ .filter(t => t.length > 2);
3515
+ }
3516
+
3517
+ /**
3518
+ * Estimate the hypervisor's scheduler quantum from the dominant autocorrelation lag.
3519
+ * Returns null if the device appears to be physical.
3520
+ */
3521
+ function _estimateQuantum({ lag1, lag25, lag50, qe }) {
3522
+ if (qe > 3.2) return null; // likely physical
3523
+
3524
+ // Find the dominant lag (highest absolute autocorrelation beyond lag-5)
3525
+ const lags = [
3526
+ { lag: 50, ac: lag50 },
3527
+ { lag: 25, ac: lag25 },
3528
+ ];
3529
+ const peak = lags.reduce((b, c) => c.ac > b.ac ? c : b, { lag: 0, ac: 0 });
3530
+
3531
+ if (peak.ac < 0.20) return null;
3532
+
3533
+ // Quantum (ms) ≈ dominant_lag × estimated_iteration_time (≈5ms)
3534
+ return peak.lag * 5;
3535
+ }
3536
+
3537
+ /**
3538
+ * @sovereign/pulse — High-Level Fingerprint Class
3539
+ *
3540
+ * The developer-facing API. Instead of forcing devs to understand Hurst
3541
+ * Exponents and Quantization Entropy, they get a Fingerprint object with
3542
+ * plain-language properties and one critical boolean: isSynthetic.
3543
+ *
3544
+ * Usage:
3545
+ *
3546
+ * import { Fingerprint } from '@sovereign/pulse';
3547
+ *
3548
+ * const fp = await Fingerprint.collect({ nonce });
3549
+ *
3550
+ * if (fp.isSynthetic) {
3551
+ * console.log(`Blocked: ${fp.providerLabel} detected (${fp.confidence}% confidence)`);
3552
+ * console.log(`Profile: ${fp.profile}`); // 'picket-fence'
3553
+ * console.log(`Reason: ${fp.topFlag}`); // 'LOW_QE + HIGH_LAG1_AUTOCORR'
3554
+ * } else {
3555
+ * console.log(`Verified: ${fp.hardwareId()}`);
3556
+ * console.log(`Score: ${fp.score}`); // 0.0 – 1.0
3557
+ * }
3558
+ *
3559
+ * // Always send to server for final validation:
3560
+ * const { payload, hash } = fp.toCommitment();
3561
+ */
3562
+
3563
+
3564
+ // ---------------------------------------------------------------------------
3565
+ // Fingerprint class
3566
+ // ---------------------------------------------------------------------------
3567
+
3568
+ class Fingerprint {
3569
+ /** @private */
3570
+ constructor(raw) {
3571
+ this._raw = raw; // full internal data
3572
+ this._commitment = null; // lazy-built on first toCommitment() call
3573
+ }
3574
+
3575
+ // ── Static factory ────────────────────────────────────────────────────────
3576
+
3577
+ /**
3578
+ * Collect all hardware signals and return a Fingerprint instance.
3579
+ *
3580
+ * @param {object} opts
3581
+ * @param {string} opts.nonce - server-issued challenge nonce (required)
3582
+ * @param {number} [opts.iterations=200]
3583
+ * @param {number} [opts.bioWindowMs=3000]
3584
+ * @param {boolean} [opts.phased=true] - run cold/load/hot phases
3585
+ * @param {Function} [opts.onProgress] - (stage: string) => void
3586
+ * @param {string} [opts.wasmPath]
3587
+ * @returns {Promise<Fingerprint>}
3588
+ */
3589
+ static async collect(opts = {}) {
3590
+ const {
3591
+ nonce,
3592
+ iterations = 200,
3593
+ bioWindowMs = 3000,
3594
+ phased = true,
3595
+ adaptive = true,
3596
+ adaptiveThreshold = 0.85,
3597
+ onProgress,
3598
+ wasmPath,
3599
+ } = opts;
3600
+
3601
+ if (!nonce) throw new Error('Fingerprint.collect() requires opts.nonce');
3602
+
3603
+ const emit = (stage, meta) => { try { onProgress?.(stage, meta); } catch {} };
3604
+
3605
+ emit('start');
3606
+
3607
+ // ── Parallel collection ────────────────────────────────────────────────
3608
+ const bio = new BioCollector();
3609
+ bio.start();
3610
+
3611
+ const [entropy, canvas, audio] = await Promise.all([
3612
+ collectEntropy({
3613
+ iterations, phased, adaptive, adaptiveThreshold, wasmPath,
3614
+ onBatch: (meta) => emit('entropy_batch', meta),
3615
+ }).then(r => { emit('entropy_done'); return r; }),
3616
+ collectCanvasFingerprint()
3617
+ .then(r => { emit('canvas_done'); return r; }),
3618
+ collectAudioJitter({ durationMs: Math.min(bioWindowMs, 2000) })
3619
+ .then(r => { emit('audio_done'); return r; }),
3620
+ ]);
3621
+
3622
+ // Wait out the bio window
3623
+ const elapsed = Date.now() - entropy.collectedAt;
3624
+ const remain = Math.max(0, bioWindowMs - elapsed);
3625
+ if (remain > 0) await new Promise(r => setTimeout(r, remain));
3626
+
3627
+ bio.stop();
3628
+ const bioSnapshot = bio.snapshot(entropy.timings);
3629
+ emit('bio_done');
3630
+
3631
+ // ── Analysis pipeline ─────────────────────────────────────────────────
3632
+ const jitter = classifyJitter(entropy.timings, { autocorrelations: entropy.autocorrelations });
3633
+ const heuristic = runHeuristicEngine({ jitter, phases: entropy.phases, autocorrelations: entropy.autocorrelations });
3634
+ const provider = detectProvider({ jitter, autocorrelations: entropy.autocorrelations, canvas, phases: entropy.phases });
3635
+
3636
+ // ── Three-stage scoring pipeline ──────────────────────────────────────
3637
+ // Stage 1: base jitter score from timing distribution analysis
3638
+ const rawScore = jitter.score;
3639
+ // Stage 2: heuristic cross-metric coherence adjustment
3640
+ const adjScore = Math.max(0, Math.min(1, rawScore + heuristic.netAdjustment));
3641
+ // Stage 3: zero-latency structural coherence analysis on already-collected data
3642
+ const coherence = runCoherenceAnalysis({
3643
+ timings: entropy.timings,
3644
+ jitter,
3645
+ phases: entropy.phases ?? null,
3646
+ batches: entropy.batches ?? null,
3647
+ bio: bioSnapshot,
3648
+ canvas,
3649
+ audio,
3650
+ });
3651
+ // Final score: stage-2 adjusted score refined by stage-3 coherence
3652
+ const finalScore = Math.max(0, Math.min(1, adjScore + coherence.netAdjustment));
3653
+
3654
+ emit('analysis_done');
3655
+
3656
+ // ── Build commitment ──────────────────────────────────────────────────
3657
+ const payload = buildProof({ entropy, jitter, bio: bioSnapshot, canvas, audio, nonce });
3658
+ // Inject heuristic + provider into proof payload for server-side reference
3659
+ payload.heuristic = {
3660
+ penalty: heuristic.penalty,
3661
+ bonus: heuristic.bonus,
3662
+ entropyJitterRatio: heuristic.entropyJitterRatio,
3663
+ picketFence: heuristic.picketFence.detected,
3664
+ coherenceFlags: heuristic.coherenceFlags,
3665
+ hardOverride: heuristic.hardOverride, // 'vm' | null
3666
+ };
3667
+ payload.provider = {
3668
+ id: provider.providerId,
3669
+ label: provider.providerLabel,
3670
+ profile: provider.profile,
3671
+ confidence: provider.confidence,
3672
+ schedulerQuantum: provider.schedulerQuantumMs,
3673
+ };
3674
+ // Stage-3 coherence summary (server uses these for logging + dynamic threshold)
3675
+ payload.coherence = {
3676
+ netAdjustment: coherence.netAdjustment,
3677
+ dynamicThreshold: coherence.dynamicThreshold,
3678
+ evidenceWeight: coherence.evidenceWeight,
3679
+ coherenceFlags: coherence.coherenceFlags,
3680
+ physicalFlags: coherence.physicalFlags,
3681
+ hardOverride: coherence.hardOverride,
3682
+ };
3683
+ payload.classification.adjustedScore = _round(adjScore, 4);
3684
+ payload.classification.finalScore = _round(finalScore, 4);
3685
+ payload.classification.dynamicThreshold = coherence.dynamicThreshold;
3686
+
3687
+ const commitment = buildCommitment(payload);
3688
+ emit('complete');
3689
+
3690
+ return new Fingerprint({
3691
+ entropy, canvas, audio,
3692
+ bioSnapshot, jitter, heuristic, coherence, provider,
3693
+ rawScore, adjScore, finalScore,
3694
+ nonce, commitment,
3695
+ });
3696
+ }
3697
+
3698
+ // ── Primary API ────────────────────────────────────────────────────────────
3699
+
3700
+ /**
3701
+ * True if the device is likely a VM, AI inference endpoint, or sanitised
3702
+ * cloud environment. Uses the adjusted score (base + heuristic bonuses/penalties).
3703
+ * @type {boolean}
3704
+ */
3705
+ get isSynthetic() {
3706
+ // Stage-2 hard kill: EJR/QE mathematical contradiction detected in the
3707
+ // heuristic engine before any bonuses could accumulate.
3708
+ if (this._raw.heuristic.hardOverride === 'vm') return true;
3709
+ // Stage-3 hard kill: EJR/QE contradiction or phase forgery detected in
3710
+ // the coherence analyser (second line of defence).
3711
+ if (this._raw.coherence.hardOverride === 'vm') return true;
3712
+ // Normal path: final score vs dynamic threshold.
3713
+ return this._raw.finalScore < this._raw.coherence.dynamicThreshold;
3714
+ }
3715
+
3716
+ /**
3717
+ * Confidence in the isSynthetic verdict, 0–100.
3718
+ * @type {number}
3719
+ */
3720
+ get confidence() {
3721
+ const s = this._raw.finalScore;
3722
+ const t = this._raw.coherence.dynamicThreshold;
3723
+ // Map distance from threshold to confidence percentage.
3724
+ // At the threshold: 0% confident. Far above/below: approaching 100%.
3725
+ const distance = Math.abs(s - t);
3726
+ return Math.min(100, Math.round(distance * 500));
3727
+ }
3728
+
3729
+ /**
3730
+ * Normalised score [0.0, 1.0]. Higher = more physical.
3731
+ * This is the FINAL score after all three analysis stages.
3732
+ * @type {number}
3733
+ */
3734
+ get score() {
3735
+ return _round(this._raw.finalScore, 4);
3736
+ }
3737
+
3738
+ /**
3739
+ * The dynamic passing threshold for this proof [0.55, 0.67].
3740
+ * Reflects how much evidence was collected — a full-evidence proof has a
3741
+ * lower (more permissive) threshold; a minimal-evidence proof has a higher
3742
+ * (more conservative) threshold.
3743
+ * @type {number}
3744
+ */
3745
+ get threshold() {
3746
+ return this._raw.coherence.dynamicThreshold;
3747
+ }
3748
+
3749
+ /**
3750
+ * How much evidence was collected [0, 1].
3751
+ * 1.0 = 200 iterations + phased + bio + audio + canvas
3752
+ * 0.0 = minimal proof
3753
+ * @type {number}
3754
+ */
3755
+ get evidenceWeight() {
3756
+ return this._raw.coherence.evidenceWeight;
3757
+ }
3758
+
3759
+ /**
3760
+ * Human-readable confidence tier.
3761
+ * @type {'high'|'medium'|'low'|'uncertain'}
3762
+ */
3763
+ get tier() {
3764
+ const c = this.confidence;
3765
+ if (c >= 70) return 'high';
3766
+ if (c >= 40) return 'medium';
3767
+ if (c >= 20) return 'low';
3768
+ return 'uncertain';
3769
+ }
3770
+
3771
+ /**
3772
+ * Detected timing profile name.
3773
+ * 'analog-fog' → real hardware, natural Brownian noise
3774
+ * 'picket-fence' → VM steal-time bursts at regular intervals
3775
+ * 'burst-scheduler' → irregular VM scheduling (VMware-style)
3776
+ * 'hypervisor-flat' → flat timing, hypervisor completely irons out noise
3777
+ * 'near-physical' → hard to classify (Nitro, GPU passthrough)
3778
+ * 'unknown'
3779
+ * @type {string}
3780
+ */
3781
+ get profile() {
3782
+ return this._raw.provider.profile;
3783
+ }
3784
+
3785
+ /**
3786
+ * Detected cloud provider / hypervisor.
3787
+ * @type {string} e.g. 'kvm-digitalocean', 'nitro-aws', 'physical', 'generic-vm'
3788
+ */
3789
+ get providerId() {
3790
+ return this._raw.provider.providerId;
3791
+ }
3792
+
3793
+ /**
3794
+ * Human-readable provider label.
3795
+ * @type {string} e.g. 'DigitalOcean Droplet (KVM)', 'Physical Hardware'
3796
+ */
3797
+ get providerLabel() {
3798
+ return this._raw.provider.providerLabel;
3799
+ }
3800
+
3801
+ /**
3802
+ * Estimated hypervisor scheduler quantum in milliseconds.
3803
+ * Null if the device appears to be physical.
3804
+ * @type {number|null}
3805
+ */
3806
+ get schedulerQuantumMs() {
3807
+ return this._raw.provider.schedulerQuantumMs;
3808
+ }
3809
+
3810
+ /**
3811
+ * Entropy-Jitter Ratio — the key signal distinguishing real silicon from VMs.
3812
+ * Values ≥ 1.08 confirm thermal feedback (real hardware).
3813
+ * Values near 1.0 indicate a hypervisor clock (VM).
3814
+ * Null if phased collection was not run.
3815
+ * @type {number|null}
3816
+ */
3817
+ get entropyJitterRatio() {
3818
+ return this._raw.heuristic.entropyJitterRatio;
3819
+ }
3820
+
3821
+ /**
3822
+ * The most diagnostic flag from the heuristic engine.
3823
+ * @type {string}
3824
+ */
3825
+ get topFlag() {
3826
+ const flags = [
3827
+ ...this._raw.heuristic.findings.map(f => f.id),
3828
+ ...this._raw.jitter.flags,
3829
+ ];
3830
+ return flags[0] ?? 'NONE';
3831
+ }
3832
+
3833
+ /**
3834
+ * All flags from both the base classifier and heuristic engine.
3835
+ * @type {string[]}
3836
+ */
3837
+ get flags() {
3838
+ return [
3839
+ ...this._raw.heuristic.coherenceFlags,
3840
+ ...this._raw.jitter.flags,
3841
+ ];
3842
+ }
3843
+
3844
+ /**
3845
+ * Summary of heuristic findings with human-readable labels.
3846
+ * @type {Array<{id, label, severity, detail}>}
3847
+ */
3848
+ get findings() {
3849
+ return this._raw.heuristic.findings;
3850
+ }
3851
+
3852
+ /**
3853
+ * Confirmed physical properties (positive evidence for real hardware).
3854
+ * @type {Array<{id, label, detail}>}
3855
+ */
3856
+ get physicalEvidence() {
3857
+ return this._raw.heuristic.bonuses;
3858
+ }
3859
+
3860
+ // ── Hardware ID ────────────────────────────────────────────────────────────
3861
+
3862
+ /**
3863
+ * A stable, privacy-preserving hardware identifier derived from the GPU
3864
+ * canvas fingerprint, audio sample rate, and WebGL extension set.
3865
+ *
3866
+ * Properties:
3867
+ * - Stable: same device → same ID across sessions
3868
+ * - Not uniquely identifying: changes if GPU or driver changes
3869
+ * - Not reversible: BLAKE3 hash, cannot recover original signals
3870
+ * - Not a tracking cookie: no PII, no cross-origin data
3871
+ *
3872
+ * @returns {string} 16-character hex ID
3873
+ */
3874
+ hardwareId() {
3875
+ const { canvas, audio } = this._raw;
3876
+ const components = [
3877
+ canvas?.webglRenderer ?? '',
3878
+ canvas?.webglVendor ?? '',
3879
+ canvas?.extensionCount?.toString() ?? '',
3880
+ audio?.sampleRate?.toString() ?? '',
3881
+ canvas?.webglVersion?.toString() ?? '',
3882
+ ].join('|');
3883
+ return blake3HexStr(components).slice(0, 16);
3884
+ }
3885
+
3886
+ // ── Diagnostic data ────────────────────────────────────────────────────────
3887
+
3888
+ /**
3889
+ * Key metrics summary — useful for logging and debugging.
3890
+ * @returns {object}
3891
+ */
3892
+ metrics() {
3893
+ const { jitter, heuristic, coherence, provider } = this._raw;
3894
+ return {
3895
+ // ── Final verdict ──────────────────────────────────────────────────
3896
+ score: this.score, // final (stage 3)
3897
+ threshold: this.threshold, // dynamic passing bar
3898
+ evidenceWeight: this.evidenceWeight,
3899
+ isSynthetic: this.isSynthetic,
3900
+ // ── Score pipeline breakdown ───────────────────────────────────────
3901
+ rawScore: _round(this._raw.rawScore, 4), // stage 1
3902
+ adjustedScore: _round(this._raw.adjScore, 4), // stage 2
3903
+ finalScore: _round(this._raw.finalScore, 4), // stage 3
3904
+ heuristicAdjustment: _round(heuristic.netAdjustment, 4),
3905
+ coherenceAdjustment: _round(coherence.netAdjustment, 4),
3906
+ // ── Timing signals ─────────────────────────────────────────────────
3907
+ cv: _round(jitter.stats?.cv, 4),
3908
+ hurstExponent: _round(jitter.hurstExponent, 4),
3909
+ quantizationEntropy: _round(jitter.quantizationEntropy, 4),
3910
+ autocorrLag1: _round(jitter.autocorrelations?.lag1, 4),
3911
+ autocorrLag50: _round(this._raw.entropy.autocorrelations?.lag50, 4),
3912
+ outlierRate: _round(jitter.outlierRate, 4),
3913
+ thermalPattern: jitter.thermalSignature?.pattern,
3914
+ entropyJitterRatio: _round(heuristic.entropyJitterRatio, 4),
3915
+ picketFence: heuristic.picketFence.detected,
3916
+ // ── Coherence signals ──────────────────────────────────────────────
3917
+ coherenceFlags: coherence.coherenceFlags,
3918
+ physicalFlags: coherence.physicalFlags,
3919
+ hardOverride: coherence.hardOverride,
3920
+ // ── Provider ───────────────────────────────────────────────────────
3921
+ provider: provider.providerLabel,
3922
+ providerConfidence: provider.confidence,
3923
+ schedulerQuantumMs: provider.schedulerQuantumMs,
3924
+ // ── Hardware ───────────────────────────────────────────────────────
3925
+ webglRenderer: this._raw.canvas?.webglRenderer,
3926
+ isSoftwareRenderer: this._raw.canvas?.isSoftwareRenderer,
3927
+ hardwareId: this.hardwareId(),
3928
+ };
3929
+ }
3930
+
3931
+ /**
3932
+ * Full diagnostic report for debugging / integration testing.
3933
+ * @returns {object}
3934
+ */
3935
+ report() {
3936
+ const { coherence } = this._raw;
3937
+ return {
3938
+ verdict: {
3939
+ isSynthetic: this.isSynthetic,
3940
+ score: this.score,
3941
+ threshold: this.threshold,
3942
+ confidence: this.confidence,
3943
+ tier: this.tier,
3944
+ profile: this.profile,
3945
+ provider: this.providerLabel,
3946
+ topFlag: this.topFlag,
3947
+ hardOverride: coherence.hardOverride,
3948
+ evidenceWeight: this.evidenceWeight,
3949
+ },
3950
+ pipeline: {
3951
+ rawScore: _round(this._raw.rawScore, 4),
3952
+ adjustedScore: _round(this._raw.adjScore, 4),
3953
+ finalScore: _round(this._raw.finalScore, 4),
3954
+ heuristicAdjustment: _round(this._raw.heuristic.netAdjustment, 4),
3955
+ coherenceAdjustment: _round(coherence.netAdjustment, 4),
3956
+ dynamicThreshold: coherence.dynamicThreshold,
3957
+ },
3958
+ metrics: this.metrics(),
3959
+ findings: this.findings,
3960
+ physicalEvidence: this.physicalEvidence,
3961
+ coherenceChecks: coherence.checks,
3962
+ coherenceBonuses: coherence.bonuses,
3963
+ phases: this._raw.entropy.phases ? {
3964
+ cold: { qe: _round(this._raw.entropy.phases.cold.qe, 4), mean: _round(this._raw.entropy.phases.cold.mean, 4) },
3965
+ hot: { qe: _round(this._raw.entropy.phases.hot.qe, 4), mean: _round(this._raw.entropy.phases.hot.mean, 4) },
3966
+ entropyJitterRatio: _round(this._raw.entropy.phases.entropyJitterRatio, 4),
3967
+ } : null,
3968
+ };
3969
+ }
3970
+
3971
+ // ── Proof commitment ───────────────────────────────────────────────────────
3972
+
3973
+ /**
3974
+ * Returns the BLAKE3 commitment to send to the server for validation.
3975
+ * @returns {{ payload: object, hash: string }}
3976
+ */
3977
+ toCommitment() {
3978
+ return this._raw.commitment;
3979
+ }
3980
+
3981
+ // ── String representations ─────────────────────────────────────────────────
3982
+
3983
+ toString() {
3984
+ const icon = this.isSynthetic ? '🚩' : '✅';
3985
+ const verb = this.isSynthetic ? 'Synthetic' : 'Physical';
3986
+ return `${icon} ${verb} | ${this.providerLabel} | score=${this.score} | conf=${this.confidence}% | profile=${this.profile}`;
3987
+ }
3988
+
3989
+ toJSON() {
3990
+ return this.report();
3991
+ }
3992
+ }
3993
+
3994
+ // ---------------------------------------------------------------------------
3995
+ // Internal helpers
3996
+ // ---------------------------------------------------------------------------
3997
+
3998
+ function _round(v, d) {
3999
+ if (v == null || !isFinite(v)) return null;
4000
+ const f = 10 ** d;
4001
+ return Math.round(v * f) / f;
4002
+ }
4003
+
4004
+ /**
4005
+ * @sovereign/pulse — Server-Side Validator
4006
+ *
4007
+ * Verifies a ProofPayload + BLAKE3 commitment received from the client.
4008
+ * This module is for NODE.JS / SERVER use only. It should NOT be bundled
4009
+ * into the browser build (see package.json "exports" field).
4010
+ *
4011
+ * Trust model:
4012
+ * • The server issues a challenge `nonce` before the client runs pulse().
4013
+ * • The client returns { payload, hash }.
4014
+ * • The server calls validateProof(payload, hash, options) to:
4015
+ * 1. Verify hash integrity (no tampering).
4016
+ * 2. Verify nonce freshness (no replay).
4017
+ * 3. Verify timestamp recency.
4018
+ * 4. Check jitter score against thresholds.
4019
+ * 5. Check canvas fingerprint against software-renderer blocklist.
4020
+ * 6. Cross-validate signal consistency.
4021
+ *
4022
+ * NOTE: The server NEVER sees raw timing arrays or mouse coordinates.
4023
+ * Only statistical summaries are transmitted. This is the ZK property.
4024
+ */
4025
+
4026
+
4027
+ // ---------------------------------------------------------------------------
4028
+ // Known software / virtual renderer substring patterns (lowercase)
4029
+ // ---------------------------------------------------------------------------
4030
+ const VM_RENDERER_BLOCKLIST = [
4031
+ // Software / virtual renderers
4032
+ 'llvmpipe', 'swiftshader', 'softpipe', 'mesa offscreen',
4033
+ 'microsoft basic render', 'vmware svga', 'vmware', 'virtualbox',
4034
+ 'parallels', 'chromium swiftshader', 'google swiftshader',
4035
+ 'angle (', 'cirrussm', 'qxl', 'virtio', 'bochs',
4036
+ // NVIDIA datacenter / inference — no consumer unit has these
4037
+ 'nvidia t4', // AWS/GCP inference VM
4038
+ 'nvidia a10g', // AWS g5 inference
4039
+ 'nvidia a100', // Datacenter A100
4040
+ 'nvidia h100', // Hopper — datacenter only
4041
+ 'nvidia h200', // Hopper successor — datacenter only
4042
+ 'nvidia b100', // Blackwell — datacenter only
4043
+ 'nvidia b200', // Blackwell Ultra — datacenter only
4044
+ 'nvidia gh200', // Grace-Hopper superchip
4045
+ // AMD datacenter / HPC — no consumer has these
4046
+ 'amd instinct', // covers mi100, mi200, mi250, mi300 family
4047
+ 'amd mi300',
4048
+ 'amd mi250',
4049
+ 'amd mi200',
4050
+ // Cloud-specific AI accelerators
4051
+ 'aws inferentia',
4052
+ 'aws trainium',
4053
+ 'google tpu',
4054
+ ];
4055
+
4056
+ // ---------------------------------------------------------------------------
4057
+ // validateProof
4058
+ // ---------------------------------------------------------------------------
4059
+
4060
+ /**
4061
+ * Validates a client-submitted proof.
4062
+ *
4063
+ * @param {import('./fingerprint.js').ProofPayload} payload
4064
+ * @param {string} receivedHash - hex BLAKE3 from the client
4065
+ * @param {object} [opts]
4066
+ * @param {number} [opts.minJitterScore=0.55] - minimum acceptable jitter score
4067
+ * @param {number} [opts.maxAgeMs=300_000] - max payload age (5 min)
4068
+ * @param {number} [opts.clockSkewMs=30_000] - tolerated future timestamp drift
4069
+ * @param {boolean} [opts.requireBio=false] - reject if no bio activity
4070
+ * @param {boolean} [opts.blockSoftwareRenderer=true] - reject software WebGL
4071
+ * @param {Function} [opts.checkNonce] - async fn(nonce) → boolean
4072
+ * Called to verify the nonce was issued by this server and not yet consumed.
4073
+ * Should mark the nonce as consumed atomically (e.g. Redis SET NX with TTL).
4074
+ * If omitted, nonce freshness is NOT checked (not recommended for production).
4075
+ *
4076
+ * @returns {Promise<ValidationResult>}
4077
+ */
4078
+ async function validateProof(payload, receivedHash, opts = {}) {
4079
+ const {
4080
+ minJitterScore = 0.55,
4081
+ maxAgeMs = 300_000,
4082
+ clockSkewMs = 30_000,
4083
+ requireBio = false,
4084
+ blockSoftwareRenderer = true,
4085
+ checkNonce = null,
4086
+ } = opts;
4087
+
4088
+ const reasons = [];
4089
+ const riskFlags = [];
4090
+ let valid = true;
4091
+
4092
+ // ── 0. Strict payload structure validation ────────────────────────────────
4093
+ if (!payload || typeof payload !== 'object' || Array.isArray(payload)) {
4094
+ return _reject(['INVALID_PAYLOAD_STRUCTURE']);
4095
+ }
4096
+
4097
+ // Prototype pollution guard — reject any payload with __proto__ / constructor tricks
4098
+ if (
4099
+ Object.prototype.hasOwnProperty.call(payload, '__proto__') ||
4100
+ Object.prototype.hasOwnProperty.call(payload, 'constructor') ||
4101
+ Object.prototype.hasOwnProperty.call(payload, 'prototype')
4102
+ ) {
4103
+ return _reject(['PROTOTYPE_POLLUTION_ATTEMPT']);
4104
+ }
4105
+
4106
+ // Required top-level fields
4107
+ const REQUIRED_TOP = ['version', 'timestamp', 'nonce', 'signals', 'classification'];
4108
+ for (const field of REQUIRED_TOP) {
4109
+ if (!(field in payload)) {
4110
+ return _reject([`MISSING_REQUIRED_FIELD:${field}`]);
4111
+ }
4112
+ }
4113
+
4114
+ // Type assertions on top-level scalars
4115
+ if (typeof payload.version !== 'number') return _reject(['INVALID_TYPE:version']);
4116
+ if (typeof payload.timestamp !== 'number') return _reject(['INVALID_TYPE:timestamp']);
4117
+ if (typeof payload.nonce !== 'string') return _reject(['INVALID_TYPE:nonce']);
4118
+ if (typeof payload.signals !== 'object' || Array.isArray(payload.signals)) {
4119
+ return _reject(['INVALID_TYPE:signals']);
4120
+ }
4121
+ if (typeof payload.classification !== 'object' || Array.isArray(payload.classification)) {
4122
+ return _reject(['INVALID_TYPE:classification']);
4123
+ }
4124
+
4125
+ // Nonce must be a 64-character lowercase hex string (32 bytes)
4126
+ if (!/^[0-9a-f]{64}$/.test(payload.nonce)) {
4127
+ return _reject(['INVALID_NONCE_FORMAT']);
4128
+ }
4129
+
4130
+ // Timestamp must be a plausible Unix ms value (> year 2020, < year 2100)
4131
+ const TS_MIN = 1_577_836_800_000; // 2020-01-01
4132
+ const TS_MAX = 4_102_444_800_000; // 2100-01-01
4133
+ if (payload.timestamp < TS_MIN || payload.timestamp > TS_MAX) {
4134
+ return _reject(['TIMESTAMP_OUT_OF_RANGE']);
4135
+ }
4136
+
4137
+ if (payload.version !== 1) {
4138
+ return _reject(['UNSUPPORTED_PROOF_VERSION']);
4139
+ }
4140
+
4141
+ // ── 1. Hash integrity ─────────────────────────────────────────────────────
4142
+ // receivedHash must be exactly 64 lowercase hex characters
4143
+ if (typeof receivedHash !== 'string' || !/^[0-9a-f]{64}$/.test(receivedHash)) {
4144
+ return _reject(['INVALID_HASH_FORMAT']);
4145
+ }
4146
+ const canonical = canonicalJson(payload);
4147
+ const enc = new TextEncoder().encode(canonical);
4148
+ const computed = bytesToHex(blake3(enc));
4149
+
4150
+ if (computed !== receivedHash) {
4151
+ return _reject(['HASH_MISMATCH_PAYLOAD_TAMPERED']);
4152
+ }
4153
+
4154
+ // ── 2. Timestamp recency ──────────────────────────────────────────────────
4155
+ const now = Date.now();
4156
+ const age = now - payload.timestamp;
4157
+ if (age > maxAgeMs) {
4158
+ valid = false;
4159
+ reasons.push(`PROOF_EXPIRED: age=${Math.round(age / 1000)}s, max=${maxAgeMs / 1000}s`);
4160
+ }
4161
+ if (payload.timestamp > now + clockSkewMs) {
4162
+ valid = false;
4163
+ reasons.push('PROOF_FROM_FUTURE');
4164
+ }
4165
+
4166
+ // ── 3. Nonce freshness ────────────────────────────────────────────────────
4167
+ if (checkNonce) {
4168
+ const nonceOk = await checkNonce(payload.nonce);
4169
+ if (!nonceOk) {
4170
+ valid = false;
4171
+ reasons.push('NONCE_INVALID_OR_REPLAYED');
4172
+ }
4173
+ } else {
4174
+ riskFlags.push('NONCE_FRESHNESS_NOT_CHECKED');
4175
+ }
4176
+
4177
+ // ── 4. Jitter score ───────────────────────────────────────────────────────
4178
+ const jitterScore = payload.classification?.jitterScore ?? 0;
4179
+ if (jitterScore < minJitterScore) {
4180
+ valid = false;
4181
+ reasons.push(`JITTER_SCORE_TOO_LOW: ${jitterScore} < ${minJitterScore}`);
4182
+ }
4183
+
4184
+ // ── 4b. Dynamic threshold (evidence-proportional gate) ──────────────────
4185
+ // The server independently computes the minimum passing score based on how
4186
+ // much evidence the proof contains. The client's dynamicThreshold field is
4187
+ // NEVER trusted — it is only used for logging/auditing.
4188
+ //
4189
+ // Logic: a proof with only 50 iterations and no bio/audio faces a higher bar
4190
+ // (0.62) than a full 200-iteration proof with phased data (0.50).
4191
+ // This makes replay attacks with minimal proofs automatically fail the gate.
4192
+ const serverDynamicMin = computeServerDynamicThreshold(payload);
4193
+
4194
+ // We check the FINAL client score (which includes stage-3 coherence adjustment)
4195
+ // if it was included, otherwise fall back to the base jitterScore.
4196
+ const finalClientScore = payload.classification?.finalScore ?? jitterScore;
4197
+ if (finalClientScore < serverDynamicMin) {
4198
+ valid = false;
4199
+ reasons.push(
4200
+ `DYNAMIC_THRESHOLD_NOT_MET: score=${finalClientScore} < ` +
4201
+ `serverMin=${serverDynamicMin} (evidenceWeight=${
4202
+ _computeEvidenceWeight(payload).toFixed(3)
4203
+ })`
4204
+ );
4205
+ }
4206
+
4207
+ // Surface diagnostic flags from the client's classifier
4208
+ for (const flag of (payload.classification?.flags ?? [])) {
4209
+ if (flag.includes('VM') || flag.includes('FLAT') || flag.includes('SYNTHETIC')) {
4210
+ riskFlags.push(`CLIENT_FLAG:${flag}`);
4211
+ }
4212
+ }
4213
+
4214
+ // Hard override from the client heuristic engine (stage 2).
4215
+ // EJR_PHASE_HARD_KILL fires when the stored entropyJitterRatio is mathematically
4216
+ // inconsistent with the stored cold_QE / hot_QE values — proof of tampering.
4217
+ // A legitimate SDK running on real hardware never triggers this.
4218
+ if (payload.heuristic?.hardOverride === 'vm') {
4219
+ valid = false;
4220
+ reasons.push(
4221
+ `HEURISTIC_HARD_OVERRIDE: stage-2 EJR/QE mathematical contradiction — ` +
4222
+ `${(payload.heuristic.coherenceFlags ?? []).join(', ')}`
4223
+ );
4224
+ }
4225
+
4226
+ // Hard override from the client coherence stage (stage 3).
4227
+ // Second line of defence — catches the same contradiction via a different
4228
+ // code path and also catches the phase-trajectory forgery variant.
4229
+ if (payload.coherence?.hardOverride === 'vm') {
4230
+ valid = false;
4231
+ reasons.push(
4232
+ `COHERENCE_HARD_OVERRIDE: stage-3 analysis detected a mathematical ` +
4233
+ `impossibility — ${(payload.coherence.coherenceFlags ?? []).join(', ')}`
4234
+ );
4235
+ }
4236
+
4237
+ // Surface all coherence flags for risk tracking / audit logs
4238
+ for (const flag of (payload.heuristic?.coherenceFlags ?? [])) {
4239
+ riskFlags.push(`HEURISTIC:${flag}`);
4240
+ }
4241
+ for (const flag of (payload.coherence?.coherenceFlags ?? [])) {
4242
+ riskFlags.push(`COHERENCE:${flag}`);
4243
+ }
4244
+
4245
+ // ── 5. Canvas / WebGL renderer check ──────────────────────────────────────
4246
+ const canvas = payload.signals?.canvas;
4247
+ if (canvas) {
4248
+ if (canvas.isSoftwareRenderer && blockSoftwareRenderer) {
4249
+ valid = false;
4250
+ reasons.push(`SOFTWARE_RENDERER_DETECTED: ${canvas.webglRenderer}`);
4251
+ }
4252
+ const rendererLc = (canvas.webglRenderer ?? '').toLowerCase();
4253
+ for (const pattern of VM_RENDERER_BLOCKLIST) {
4254
+ if (rendererLc.includes(pattern)) {
4255
+ valid = false;
4256
+ reasons.push(`BLOCKLISTED_RENDERER: ${canvas.webglRenderer}`);
4257
+ riskFlags.push(`RENDERER_MATCH:${pattern}`);
4258
+ break;
4259
+ }
4260
+ }
4261
+ if (!canvas.available) {
4262
+ riskFlags.push('CANVAS_UNAVAILABLE');
4263
+ }
4264
+ }
4265
+
4266
+ // ── 6. Bio activity ───────────────────────────────────────────────────────
4267
+ const bio = payload.signals?.bio;
4268
+ if (bio) {
4269
+ if (requireBio && !bio.hasActivity) {
4270
+ valid = false;
4271
+ reasons.push('NO_BIO_ACTIVITY_DETECTED');
4272
+ }
4273
+ if (bio.mouseSampleCount === 0 && bio.keyboardSampleCount === 0) {
4274
+ riskFlags.push('ZERO_BIO_SAMPLES');
4275
+ }
4276
+ // Interference coefficient check: real human+hardware shows measurable correlation
4277
+ if (bio.interferenceCoefficient < -0.3) {
4278
+ riskFlags.push('NEGATIVE_INTERFERENCE_COEFFICIENT');
4279
+ }
4280
+ }
4281
+
4282
+ // ── 7. Internal consistency checks ────────────────────────────────────────
4283
+ const entropy = payload.signals?.entropy;
4284
+ if (entropy) {
4285
+ // CV and jitter score should be directionally consistent
4286
+ if (entropy.timingsCV < 0.01 && jitterScore > 0.7) {
4287
+ riskFlags.push('INCONSISTENCY:LOW_CV_BUT_HIGH_SCORE');
4288
+ }
4289
+ // Timer granularity should not be exactly 0 (no real device has infinite resolution)
4290
+ if (entropy.timerGranularityMs === 0) {
4291
+ riskFlags.push('SUSPICIOUS_ZERO_TIMER_GRANULARITY');
4292
+ }
4293
+ // Extreme thermal patterns inconsistent with score
4294
+ if (entropy.thermalPattern === 'flat' && jitterScore > 0.8) {
4295
+ riskFlags.push('INCONSISTENCY:FLAT_THERMAL_BUT_HIGH_SCORE');
4296
+ }
4297
+ // Hurst exponent way out of range
4298
+ if (entropy.hurstExponent != null) {
4299
+ if (entropy.hurstExponent < 0.2 || entropy.hurstExponent > 0.85) {
4300
+ riskFlags.push(`EXTREME_HURST:${entropy.hurstExponent}`);
4301
+ }
4302
+ }
4303
+ }
4304
+
4305
+ // ── 7b. Cross-signal physics forgery detection ────────────────────────────
4306
+ // BLAKE3 prevents tampering with a payload that was legitimately generated by
4307
+ // the SDK. However, a determined attacker can:
4308
+ // 1. Obtain a valid server nonce
4309
+ // 2. Craft a fake payload with forged statistics
4310
+ // 3. Compute BLAKE3(forgedPayload) themselves (BLAKE3 is public)
4311
+ // 4. Submit { payload: forgedPayload, hash: selfComputedHash }
4312
+ //
4313
+ // These checks detect statistically impossible metric combinations that no
4314
+ // real device would ever produce, catching crafted payloads even though the
4315
+ // hash integrity check passes.
4316
+ //
4317
+ // All three thresholds are set conservatively: they only fire when the
4318
+ // combination is physically IMPOSSIBLE, not just unlikely, to avoid false
4319
+ // positives on unusual-but-legitimate hardware.
4320
+ if (entropy) {
4321
+ const cv = entropy.timingsCV ?? null;
4322
+ const qe = entropy.quantizationEntropy ?? null;
4323
+ const lag1 = entropy.autocorr_lag1 ?? null;
4324
+
4325
+ // Impossibly flat CV + high physical score
4326
+ // Real explanation: CV < 0.015 means timing jitter < 1.5% — hypervisor-flat.
4327
+ // No real-silicon CPU running a WASM matrix multiply achieves this.
4328
+ // A high jitterScore (> 0.65) is physically incompatible with CV < 0.015.
4329
+ if (cv !== null && cv < 0.015 && jitterScore > 0.65) {
4330
+ valid = false;
4331
+ reasons.push(
4332
+ `FORGED_SIGNAL:CV_SCORE_IMPOSSIBLE cv=${cv.toFixed(5)} is hypervisor-flat ` +
4333
+ `but jitterScore=${jitterScore.toFixed(3)} claims physical hardware`
4334
+ );
4335
+ }
4336
+
4337
+ // VM-grade autocorrelation + high physical score
4338
+ // lag1 > 0.70 is a hypervisor scheduler rhythm — unambiguous VM signature.
4339
+ // A device with that level of autocorrelation cannot score > 0.70 on the
4340
+ // physical scale; the jitter classifier would have penalised it heavily.
4341
+ if (lag1 !== null && lag1 > 0.70 && jitterScore > 0.70) {
4342
+ valid = false;
4343
+ reasons.push(
4344
+ `FORGED_SIGNAL:AUTOCORR_SCORE_IMPOSSIBLE lag1=${lag1.toFixed(3)} is VM-level ` +
4345
+ `but jitterScore=${jitterScore.toFixed(3)} claims physical hardware`
4346
+ );
4347
+ }
4348
+
4349
+ // VM-grade quantization entropy + high physical score
4350
+ // QE < 2.0 means timings cluster on a small number of distinct values —
4351
+ // the classic integer-millisecond quantisation of an emulated/virtual timer.
4352
+ // A device producing QE < 2.0 cannot legitimately score > 0.65 as physical.
4353
+ if (qe !== null && qe < 2.0 && jitterScore > 0.65) {
4354
+ valid = false;
4355
+ reasons.push(
4356
+ `FORGED_SIGNAL:QE_SCORE_IMPOSSIBLE qe=${qe.toFixed(3)} bits is VM-level ` +
4357
+ `but jitterScore=${jitterScore.toFixed(3)} claims physical hardware`
4358
+ );
4359
+ }
4360
+ }
4361
+
4362
+ // ── 8. Audio signal check ─────────────────────────────────────────────────
4363
+ const audio = payload.signals?.audio;
4364
+ if (audio?.available) {
4365
+ // Impossibly low jitter CV may indicate a synthetic audio driver
4366
+ if (audio.callbackJitterCV < 0.001) {
4367
+ riskFlags.push('AUDIO_JITTER_TOO_FLAT');
4368
+ }
4369
+ }
4370
+
4371
+ // ── Confidence rating ─────────────────────────────────────────────────────
4372
+ let confidence;
4373
+ if (!valid) {
4374
+ confidence = 'rejected';
4375
+ } else if (riskFlags.length === 0 && jitterScore >= 0.75) {
4376
+ confidence = 'high';
4377
+ } else if (riskFlags.length <= 2 && jitterScore >= 0.60) {
4378
+ confidence = 'medium';
4379
+ } else {
4380
+ confidence = 'low';
4381
+ }
4382
+
4383
+ return {
4384
+ valid,
4385
+ score: jitterScore,
4386
+ confidence,
4387
+ reasons,
4388
+ riskFlags,
4389
+ meta: {
4390
+ receivedAt: now,
4391
+ proofAge: age,
4392
+ jitterScore,
4393
+ canvasRenderer: canvas?.webglRenderer ?? null,
4394
+ bioActivity: bio?.hasActivity ?? false,
4395
+ },
4396
+ };
4397
+ }
4398
+
4399
+ /**
4400
+ * @typedef {object} ValidationResult
4401
+ * @property {boolean} valid
4402
+ * @property {number} score
4403
+ * @property {'high'|'medium'|'low'|'rejected'} confidence
4404
+ * @property {string[]} reasons - human-readable rejection reasons
4405
+ * @property {string[]} riskFlags - non-blocking risk indicators
4406
+ * @property {object} meta
4407
+ */
4408
+
4409
+ // ---------------------------------------------------------------------------
4410
+ // generateNonce (convenience helper for the server challenge flow)
4411
+ // ---------------------------------------------------------------------------
4412
+
4413
+ /**
4414
+ * Generate a cryptographically random 32-byte nonce for the server challenge.
4415
+ * The server should store this nonce with a TTL before issuing it to the client.
4416
+ *
4417
+ * @returns {string} hex nonce
4418
+ */
4419
+ async function generateNonce() {
4420
+ const buf = new Uint8Array(32);
4421
+ if (typeof globalThis.crypto?.getRandomValues === 'function') {
4422
+ // Browser + Node.js ≥ 19
4423
+ globalThis.crypto.getRandomValues(buf);
4424
+ } else {
4425
+ // Node.js 18 — webcrypto is at `crypto.webcrypto`
4426
+ const { webcrypto } = await import('node:crypto');
4427
+ webcrypto.getRandomValues(buf);
4428
+ }
4429
+ return bytesToHex(buf);
4430
+ }
4431
+
4432
+ // ---------------------------------------------------------------------------
4433
+ // Internal helpers
4434
+ // ---------------------------------------------------------------------------
4435
+
4436
+ function _reject(reasons) {
4437
+ return {
4438
+ valid: false,
4439
+ score: 0,
4440
+ confidence: 'rejected',
4441
+ reasons,
4442
+ riskFlags: [],
4443
+ meta: {},
4444
+ };
4445
+ }
4446
+
4447
+ function _computeEvidenceWeight(payload) {
4448
+ const n = payload?.signals?.entropy?.iterations ?? 0;
4449
+ const hasPhases = payload?.heuristic?.entropyJitterRatio != null;
4450
+ const hasBio = payload?.signals?.bio?.hasActivity === true;
4451
+ const hasAudio = payload?.signals?.audio?.available === true;
4452
+ const hasCanvas = payload?.signals?.canvas?.available === true;
4453
+ return Math.min(1.0,
4454
+ Math.min(1.0, n / 200) * 0.65 +
4455
+ (hasPhases ? 0.15 : 0) +
4456
+ (hasBio ? 0.10 : 0) +
4457
+ (hasAudio ? 0.05 : 0) +
4458
+ (hasCanvas ? 0.05 : 0)
4459
+ );
4460
+ }
4461
+
4462
+ /**
4463
+ * @sovereign/pulse
4464
+ *
4465
+ * Physical Turing Test — distinguishes a real consumer device with a human
4466
+ * operator from a sanitised Datacenter VM / AI Instance.
4467
+ *
4468
+ * Usage (client-side):
4469
+ *
4470
+ * import { pulse } from '@sovereign/pulse';
4471
+ *
4472
+ * // 1. Get a server-issued nonce (prevents replay attacks)
4473
+ * const { nonce } = await fetch('/api/pulse-challenge').then(r => r.json());
4474
+ *
4475
+ * // 2. Run the probe (takes ~3-5 seconds)
4476
+ * const { payload, hash } = await pulse({ nonce });
4477
+ *
4478
+ * // 3. Send to your server
4479
+ * const verdict = await fetch('/api/pulse-verify', {
4480
+ * method: 'POST',
4481
+ * body: JSON.stringify({ payload, hash }),
4482
+ * }).then(r => r.json());
4483
+ *
4484
+ * Usage (server-side):
4485
+ *
4486
+ * import { validateProof, generateNonce } from '@sovereign/pulse/validator';
4487
+ *
4488
+ * // Challenge endpoint
4489
+ * app.get('/api/pulse-challenge', (req, res) => {
4490
+ * const nonce = generateNonce();
4491
+ * await redis.set(`pulse:nonce:${nonce}`, '1', 'EX', 300); // 5-min TTL
4492
+ * res.json({ nonce });
4493
+ * });
4494
+ *
4495
+ * // Verify endpoint
4496
+ * app.post('/api/pulse-verify', async (req, res) => {
4497
+ * const { payload, hash } = req.body;
4498
+ * const result = await validateProof(payload, hash, {
4499
+ * checkNonce: async (n) => {
4500
+ * const ok = await redis.del(`pulse:nonce:${n}`);
4501
+ * return ok === 1; // true only if nonce existed and was consumed
4502
+ * },
4503
+ * });
4504
+ * res.json(result);
4505
+ * });
4506
+ */
4507
+
4508
+
4509
+ // ---------------------------------------------------------------------------
4510
+ // Hosted API mode — pulse({ apiKey }) with zero server setup
4511
+ // ---------------------------------------------------------------------------
4512
+
4513
+ /**
4514
+ * Run pulse() against the sovereign hosted API.
4515
+ * Fetches nonce, runs probe locally (WASM still on device), submits proof.
4516
+ *
4517
+ * @param {object} opts — same as pulse(), plus apiKey + apiUrl
4518
+ * @returns {Promise<{ payload, hash, result }>}
4519
+ */
4520
+ async function _pulseHosted(opts) {
4521
+ const {
4522
+ apiKey,
4523
+ apiUrl = 'https://api.sovereign.dev',
4524
+ iterations = 200,
4525
+ matrixSize = 64,
4526
+ bioWindowMs = 3_000,
4527
+ phased = true,
4528
+ adaptive = true,
4529
+ adaptiveThreshold = 0.85,
4530
+ requireBio = false,
4531
+ wasmPath,
4532
+ onProgress,
4533
+ verifyOptions = {},
4534
+ } = opts;
4535
+
4536
+ // 1. Fetch nonce from hosted challenge endpoint
4537
+ const challengeRes = await fetch(`${apiUrl}/v1/challenge`, {
4538
+ headers: { 'Authorization': `Bearer ${apiKey}` },
4539
+ });
4540
+ if (!challengeRes.ok) {
4541
+ const body = await challengeRes.json().catch(() => ({}));
4542
+ throw new Error(`[pulse] Challenge failed (${challengeRes.status}): ${body.message ?? 'unknown error'}`);
4543
+ }
4544
+ const { nonce } = await challengeRes.json();
4545
+
4546
+ // 2. Run the local probe (WASM, bio, canvas, audio — all on device)
4547
+ const commitment = await _runProbe({
4548
+ nonce, iterations, matrixSize, bioWindowMs,
4549
+ phased, adaptive, adaptiveThreshold, requireBio,
4550
+ wasmPath, onProgress,
4551
+ });
4552
+
4553
+ // 3. Submit proof to hosted verify endpoint
4554
+ const verifyRes = await fetch(`${apiUrl}/v1/verify`, {
4555
+ method: 'POST',
4556
+ headers: {
4557
+ 'Content-Type': 'application/json',
4558
+ 'Authorization': `Bearer ${apiKey}`,
4559
+ },
4560
+ body: JSON.stringify({
4561
+ payload: commitment.payload,
4562
+ hash: commitment.hash,
4563
+ options: verifyOptions,
4564
+ }),
4565
+ });
4566
+
4567
+ const result = await verifyRes.json();
4568
+
4569
+ // Return commitment + server result for convenience
4570
+ return { ...commitment, result };
4571
+ }
4572
+
4573
+ // ---------------------------------------------------------------------------
4574
+ // pulse() — main entry point
4575
+ // ---------------------------------------------------------------------------
4576
+
4577
+ /**
4578
+ * Run the full @sovereign/pulse probe and return a signed commitment.
4579
+ *
4580
+ * Two modes:
4581
+ * - pulse({ nonce }) — self-hosted (you manage the nonce server)
4582
+ * - pulse({ apiKey }) — hosted API (zero server setup required)
4583
+ *
4584
+ * @param {PulseOptions} opts
4585
+ * @returns {Promise<PulseCommitment>}
4586
+ */
4587
+ async function pulse(opts = {}) {
4588
+ // ── Hosted API mode ────────────────────────────────────────────────────────
4589
+ if (opts.apiKey) {
4590
+ return _pulseHosted(opts);
4591
+ }
4592
+
4593
+ // ── Self-hosted mode ───────────────────────────────────────────────────────
4594
+ const { nonce } = opts;
4595
+ if (!nonce || typeof nonce !== 'string') {
4596
+ throw new Error(
4597
+ '@sovereign/pulse: opts.nonce is required (self-hosted), or pass opts.apiKey for zero-config hosted mode.'
4598
+ );
4599
+ }
4600
+
4601
+ return _runProbe(opts);
4602
+ }
4603
+
4604
+ /**
4605
+ * Internal probe runner — shared between self-hosted and hosted API modes.
4606
+ * @private
4607
+ */
4608
+ async function _runProbe(opts) {
4609
+ const {
4610
+ nonce,
4611
+ timeout = 8_000,
4612
+ iterations = 200,
4613
+ matrixSize = 64,
4614
+ bioWindowMs = 3_000,
4615
+ phased = true,
4616
+ adaptive = true,
4617
+ adaptiveThreshold = 0.85,
4618
+ requireBio = false,
4619
+ wasmPath,
4620
+ onProgress,
4621
+ } = opts;
4622
+
4623
+ _emit(onProgress, 'start');
4624
+
4625
+ // ── Phase 1: Start bio collector immediately (collects events over time) ──
4626
+ const bio = new BioCollector();
4627
+ bio.start();
4628
+
4629
+ // ── Phase 2: Parallel collection ──────────────────────────────────────────
4630
+ const raceTimeout = new Promise((_, reject) =>
4631
+ setTimeout(() => reject(new Error('pulse() timed out')), timeout)
4632
+ );
4633
+
4634
+ let entropyResult, canvasResult, audioResult;
4635
+
4636
+ try {
4637
+ [entropyResult, canvasResult, audioResult] = await Promise.race([
4638
+ Promise.all([
4639
+ collectEntropy({
4640
+ iterations, matrixSize, phased, adaptive, adaptiveThreshold, wasmPath,
4641
+ onBatch: (meta) => _emit(onProgress, 'entropy_batch', meta),
4642
+ }).then(r => { _emit(onProgress, 'entropy_done'); return r; }),
4643
+ collectCanvasFingerprint()
4644
+ .then(r => { _emit(onProgress, 'canvas_done'); return r; }),
4645
+ collectAudioJitter({ durationMs: Math.min(bioWindowMs, 2_000) })
4646
+ .then(r => { _emit(onProgress, 'audio_done'); return r; }),
4647
+ ]),
4648
+ raceTimeout,
4649
+ ]);
4650
+ } catch (err) {
4651
+ bio.stop();
4652
+ throw err;
4653
+ }
4654
+
4655
+ // ── Phase 3: Bio snapshot ─────────────────────────────────────────────────
4656
+ const bioElapsed = Date.now() - entropyResult.collectedAt;
4657
+ const bioRemain = Math.max(0, bioWindowMs - bioElapsed);
4658
+ if (bioRemain > 0) await _sleep(bioRemain);
4659
+
4660
+ bio.stop();
4661
+ const bioSnapshot = bio.snapshot(entropyResult.timings);
4662
+
4663
+ if (requireBio && !bioSnapshot.hasActivity) {
4664
+ throw new Error('@sovereign/pulse: no bio activity detected (requireBio=true)');
4665
+ }
4666
+
4667
+ _emit(onProgress, 'bio_done');
4668
+
4669
+ // ── Phase 4: Jitter analysis ───────────────────────────────────────────────
4670
+ const jitterAnalysis = classifyJitter(entropyResult.timings, {
4671
+ autocorrelations: entropyResult.autocorrelations,
4672
+ });
4673
+
4674
+ _emit(onProgress, 'analysis_done');
4675
+
4676
+ // ── Phase 5: Build proof & commitment ─────────────────────────────────────
4677
+ const payload = buildProof({
4678
+ entropy: entropyResult,
4679
+ jitter: jitterAnalysis,
4680
+ bio: bioSnapshot,
4681
+ canvas: canvasResult,
4682
+ audio: audioResult,
4683
+ nonce,
4684
+ });
4685
+
4686
+ const commitment = buildCommitment(payload);
4687
+
4688
+ _emit(onProgress, 'complete', {
4689
+ score: jitterAnalysis.score,
4690
+ confidence: _scoreToLabel(jitterAnalysis.score),
4691
+ flags: jitterAnalysis.flags,
4692
+ });
4693
+
4694
+ return commitment;
4695
+ }
4696
+
4697
+ // ---------------------------------------------------------------------------
4698
+ // Internal helpers
4699
+ // ---------------------------------------------------------------------------
4700
+
4701
+ function _sleep(ms) {
4702
+ return new Promise(r => setTimeout(r, ms));
4703
+ }
4704
+
4705
+ function _emit(fn, stage, meta = {}) {
4706
+ if (typeof fn === 'function') {
4707
+ try { fn(stage, meta); } catch (_) {}
4708
+ }
4709
+ }
4710
+
4711
+ function _scoreToLabel(score) {
4712
+ if (score >= 0.75) return 'high';
4713
+ if (score >= 0.55) return 'medium';
4714
+ if (score >= 0.35) return 'low';
4715
+ return 'rejected';
4716
+ }
4717
+
4718
+ /**
4719
+ * pulse_core — pure-JavaScript probe engine
4720
+ *
4721
+ * This module ships the entropy probe as portable JS so the package works
4722
+ * out-of-the-box without a Rust toolchain. When a compiled .wasm binary is
4723
+ * present (dropped in via `build.sh`) this file is replaced by the wasm-pack
4724
+ * output and the native engine runs instead.
4725
+ *
4726
+ * Physics model
4727
+ * ─────────────
4728
+ * Real silicon: DRAM refresh cycles, branch-predictor misses, and L3-cache
4729
+ * evictions inject sub-microsecond noise into any tight compute loop.
4730
+ * Hypervisors virtualise the TSC and smooth those interrupts out, leaving
4731
+ * a near-flat timing distribution that our QE/EJR checks catch.
4732
+ *
4733
+ * The JS loop below is a faithful port of the Rust matrix-multiply probe:
4734
+ * same work unit (N×N DGEMM-style loop), same checksum accumulation to
4735
+ * prevent dead-code elimination, same resolution micro-probe.
4736
+ */
4737
+
4738
+ /* ─── clock ─────────────────────────────────────────────────────────────── */
4739
+
4740
+ const _now = (typeof performance !== 'undefined' && typeof performance.now === 'function')
4741
+ ? () => performance.now()
4742
+ : (() => {
4743
+ // Node.js fallback: process.hrtime.bigint() → milliseconds
4744
+ const _hr = process.hrtime.bigint;
4745
+ return () => Number(_hr()) / 1_000_000;
4746
+ })();
4747
+
4748
+ /* ─── init (no-op for the JS engine) ───────────────────────────────────── */
4749
+
4750
+ /**
4751
+ * Initialise the engine. When a real .wasm binary is supplied the wasm-pack
4752
+ * glue calls WebAssembly.instantiateStreaming here. The JS engine is already
4753
+ * "compiled", so we return immediately.
4754
+ *
4755
+ * @param {string|URL|Request|BufferSource|WebAssembly.Module} [_source]
4756
+ * @returns {Promise<void>}
4757
+ */
4758
+ async function init(_source) {
4759
+ // JS engine is ready synchronously — nothing to stream or compile.
4760
+ }
4761
+
4762
+ /* ─── run_entropy_probe ─────────────────────────────────────────────────── */
4763
+
4764
+ /**
4765
+ * Run N iterations of a matrix-multiply work unit and record wall-clock time
4766
+ * per iteration. The distribution of those times is what the heuristic
4767
+ * engine analyses.
4768
+ *
4769
+ * @param {number} iterations – number of timing samples to collect
4770
+ * @param {number} matrixSize – N for the N×N multiply (default 64)
4771
+ * @returns {{ timings: Float64Array, checksum: number, resolution_probe: Float64Array }}
4772
+ */
4773
+ function run_entropy_probe(iterations, matrixSize = 64) {
4774
+ const N = matrixSize | 0;
4775
+
4776
+ // Persistent working matrices — allocated once per probe to avoid GC noise.
4777
+ const A = new Float64Array(N * N);
4778
+ const B = new Float64Array(N * N);
4779
+ const C = new Float64Array(N * N);
4780
+
4781
+ // Seed matrices with pseudo-random data (deterministic per call for
4782
+ // reproducibility, but different each run due to xorshift seeding from time).
4783
+ let seed = (_now() * 1e6) | 0 || 0xdeadbeef;
4784
+ const xr = () => { seed ^= seed << 13; seed ^= seed >> 17; seed ^= seed << 5; return (seed >>> 0) / 4294967296; };
4785
+ for (let i = 0; i < N * N; i++) { A[i] = xr(); B[i] = xr(); }
4786
+
4787
+ const timings = new Float64Array(iterations);
4788
+ const resolution_probe = new Float64Array(32);
4789
+ let checksum = 0;
4790
+
4791
+ for (let iter = 0; iter < iterations; iter++) {
4792
+ // Zero accumulator each round (realistic cache pressure).
4793
+ C.fill(0);
4794
+
4795
+ const t0 = _now();
4796
+
4797
+ // N×N matrix multiply: C = A · B (ikj loop order for cache friendliness)
4798
+ for (let i = 0; i < N; i++) {
4799
+ const rowA = i * N;
4800
+ const rowC = i * N;
4801
+ for (let k = 0; k < N; k++) {
4802
+ const aik = A[rowA + k];
4803
+ const rowBk = k * N;
4804
+ for (let j = 0; j < N; j++) {
4805
+ C[rowC + j] += aik * B[rowBk + j];
4806
+ }
4807
+ }
4808
+ }
4809
+
4810
+ const t1 = _now();
4811
+ timings[iter] = t1 - t0;
4812
+
4813
+ // Accumulate one element so the compiler cannot eliminate the work.
4814
+ checksum += C[0];
4815
+ }
4816
+
4817
+ // Resolution micro-probe: fire 32 back-to-back timestamps.
4818
+ // The minimum non-zero delta reveals timer granularity.
4819
+ for (let i = 0; i < resolution_probe.length; i++) {
4820
+ resolution_probe[i] = _now();
4821
+ }
4822
+
4823
+ return { timings, checksum, resolution_probe };
4824
+ }
4825
+
4826
+ /* ─── run_memory_probe ──────────────────────────────────────────────────── */
4827
+
4828
+ /**
4829
+ * Sequential read/write bandwidth probe over a large buffer.
4830
+ * Memory latency variance is a secondary signal (NUMA, DRAM refresh).
4831
+ *
4832
+ * @param {number} memSizeKb – buffer size in kibibytes
4833
+ * @param {number} memIterations
4834
+ * @returns {{ timings: Float64Array, checksum: number }}
4835
+ */
4836
+ function run_memory_probe(memSizeKb = 512, memIterations = 50) {
4837
+ const len = (memSizeKb * 1024 / 8) | 0; // 64-bit elements
4838
+ const buf = new Float64Array(len);
4839
+ const timings = new Float64Array(memIterations);
4840
+ let checksum = 0;
4841
+
4842
+ // Warm-up pass (fills TLB, avoids first-access bias)
4843
+ for (let i = 0; i < len; i++) buf[i] = i;
4844
+
4845
+ for (let iter = 0; iter < memIterations; iter++) {
4846
+ const t0 = _now();
4847
+ // Sequential read-modify-write
4848
+ for (let i = 0; i < len; i++) buf[i] = buf[i] * 1.0000001;
4849
+ const t1 = _now();
4850
+
4851
+ timings[iter] = t1 - t0;
4852
+ checksum += buf[0];
4853
+ }
4854
+
4855
+ return { timings, checksum };
4856
+ }
4857
+
4858
+ /* ─── compute_autocorrelation ───────────────────────────────────────────── */
4859
+
4860
+ /**
4861
+ * Pearson autocorrelation for lags 1..maxLag.
4862
+ * O(n·maxLag) — kept cheap by the adaptive early-exit cap.
4863
+ *
4864
+ * @param {ArrayLike<number>} data
4865
+ * @param {number} maxLag
4866
+ * @returns {Float64Array} length = maxLag, index 0 = lag-1
4867
+ */
4868
+ function compute_autocorrelation(data, maxLag) {
4869
+ const n = data.length;
4870
+ let mean = 0;
4871
+ for (let i = 0; i < n; i++) mean += data[i];
4872
+ mean /= n;
4873
+
4874
+ let variance = 0;
4875
+ for (let i = 0; i < n; i++) variance += (data[i] - mean) ** 2;
4876
+ variance /= n;
4877
+
4878
+ const result = new Float64Array(maxLag);
4879
+ if (variance < 1e-14) return result; // degenerate — all identical
4880
+
4881
+ for (let lag = 1; lag <= maxLag; lag++) {
4882
+ let cov = 0;
4883
+ for (let i = 0; i < n - lag; i++) {
4884
+ cov += (data[i] - mean) * (data[i + lag] - mean);
4885
+ }
4886
+ result[lag - 1] = cov / ((n - lag) * variance);
4887
+ }
4888
+
4889
+ return result;
4890
+ }
4891
+
4892
+ var pulse_core = /*#__PURE__*/Object.freeze({
4893
+ __proto__: null,
4894
+ compute_autocorrelation: compute_autocorrelation,
4895
+ default: init,
4896
+ run_entropy_probe: run_entropy_probe,
4897
+ run_memory_probe: run_memory_probe
4898
+ });
4899
+
4900
+ exports.Fingerprint = Fingerprint;
4901
+ exports.detectProvider = detectProvider;
4902
+ exports.generateNonce = generateNonce;
4903
+ exports.pulse = pulse;
4904
+ exports.runHeuristicEngine = runHeuristicEngine;
4905
+ exports.validateProof = validateProof;
4906
+ //# sourceMappingURL=pulse.cjs.js.map