@svrnsec/pulse 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,4898 @@
1
+ /**
2
+ * @sovereign/pulse — Statistical Jitter Analysis
3
+ *
4
+ * Analyses the timing distribution from the entropy probe to classify
5
+ * the host as a real consumer device or a sanitised datacenter VM.
6
+ *
7
+ * Core insight:
8
+ * Real hardware → thermal throttling, OS context switches, DRAM refresh
9
+ * cycles create a characteristic "noisy" but physically
10
+ * plausible timing distribution.
11
+ * Datacenter VM → hypervisor scheduler presents a nearly-flat execution
12
+ * curve; thermal feedback is absent; timer may be
13
+ * quantised to the host's scheduler quantum.
14
+ */
15
+
16
+ // ---------------------------------------------------------------------------
17
+ // Public API
18
+ // ---------------------------------------------------------------------------
19
+
20
+ /**
21
+ * Full statistical analysis of a timing vector.
22
+ *
23
+ * @param {number[]} timings - per-iteration millisecond deltas from WASM probe
24
+ * @param {object} [opts]
25
+ * @param {object} [opts.autocorrelations] - pre-computed { lag1 … lag10 }
26
+ * @returns {JitterAnalysis}
27
+ */
28
+ function classifyJitter(timings, opts = {}) {
29
+ if (!timings || timings.length < 10) {
30
+ return _insufficientData();
31
+ }
32
+
33
+ const stats = computeStats(timings);
34
+ const autocorr = opts.autocorrelations ?? _computeLocalAutocorr(timings);
35
+ const hurst = computeHurst(timings);
36
+ const quantEnt = detectQuantizationEntropy(timings);
37
+ const thermal = detectThermalSignature(timings);
38
+ const outlierRate = _outlierRate(timings, stats);
39
+
40
+ // ── Scoring rubric ───────────────────────────────────────────────────────
41
+ // Each criterion contributes 0–1 to a weighted sum.
42
+ // Weights sum to 1.0; final score is in [0, 1].
43
+ // 1.0 = almost certainly a real consumer device + real silicon
44
+ // 0.0 = almost certainly a sanitised VM / AI instance
45
+
46
+ const components = {};
47
+ const flags = [];
48
+
49
+ // 1. Coefficient of Variation (weight 0.25)
50
+ // Real hardware: CV ∈ [0.04, 0.35]
51
+ // VM: CV often < 0.02 ("too flat") or > 0.5 (scheduler bursts)
52
+ let cvScore = 0;
53
+ if (stats.cv >= 0.04 && stats.cv <= 0.35) {
54
+ cvScore = 1.0;
55
+ } else if (stats.cv >= 0.02 && stats.cv < 0.04) {
56
+ cvScore = (stats.cv - 0.02) / 0.02; // linear ramp up
57
+ flags.push('LOW_CV_BORDERLINE');
58
+ } else if (stats.cv > 0.35 && stats.cv < 0.5) {
59
+ cvScore = 1.0 - (stats.cv - 0.35) / 0.15; // ramp down
60
+ flags.push('HIGH_CV_POSSIBLE_SCHEDULER_BURST');
61
+ } else if (stats.cv < 0.02) {
62
+ cvScore = 0;
63
+ flags.push('CV_TOO_FLAT_VM_INDICATOR');
64
+ } else {
65
+ cvScore = 0.2;
66
+ flags.push('CV_TOO_HIGH_SCHEDULER_BURST');
67
+ }
68
+ components.cv = { score: cvScore, weight: 0.25, value: stats.cv };
69
+
70
+ // 2. Autocorrelation profile (weight 0.20)
71
+ // Real thermal noise → all lags near 0 (i.i.d. / Brownian)
72
+ // VM hypervisor scheduler → positive autocorr (periodic steal-time bursts)
73
+ // We use the maximum absolute autocorrelation across all measured lags
74
+ // to catch both lag-1 and longer-period scheduler artifacts.
75
+ const acVals = Object.values(autocorr).filter(v => v != null);
76
+ const maxAbsAC = acVals.length ? Math.max(...acVals.map(Math.abs)) : 0;
77
+ const meanAbsAC = acVals.length ? acVals.reduce((s, v) => s + Math.abs(v), 0) / acVals.length : 0;
78
+ const acStat = (maxAbsAC + meanAbsAC) / 2; // blend: worst + average
79
+
80
+ let ac1Score = 0;
81
+ if (acStat < 0.12) {
82
+ ac1Score = 1.0;
83
+ } else if (acStat < 0.28) {
84
+ ac1Score = 1.0 - (acStat - 0.12) / 0.16;
85
+ flags.push('MODERATE_AUTOCORR_POSSIBLE_SCHEDULER');
86
+ } else {
87
+ ac1Score = 0;
88
+ flags.push('HIGH_AUTOCORR_VM_SCHEDULER_DETECTED');
89
+ }
90
+ components.autocorr = { score: ac1Score, weight: 0.20, value: acStat };
91
+
92
+ // 3. Quantization Entropy (weight 0.20)
93
+ // High entropy → timings are spread, not clustered on fixed boundaries
94
+ // Low entropy → values cluster on integer-ms ticks (legacy VM timer)
95
+ //
96
+ // Scale:
97
+ // QE ≥ 4.5 → 1.00 (strongly physical)
98
+ // QE 3.0–4.5 → 0.00–1.00 (linear ramp, healthy range)
99
+ // QE 2.0–3.0 → 0.00–0.20 (borderline; still gives partial credit so one
100
+ // weak metric doesn't zero-out the whole score)
101
+ // QE < 2.0 → 0.00 (clearly synthetic/quantised timer)
102
+ let qeScore = 0;
103
+ if (quantEnt >= 4.5) {
104
+ qeScore = 1.0;
105
+ } else if (quantEnt >= 3.0) {
106
+ qeScore = (quantEnt - 3.0) / 1.5; // 0.00 → 1.00
107
+ } else if (quantEnt >= 2.0) {
108
+ // Partial credit — not obviously VM but not clearly physical.
109
+ // Lets other strong signals (CV, autocorr, Hurst) still carry the device
110
+ // over the physical threshold instead of being zeroed by a single weak metric.
111
+ qeScore = ((quantEnt - 2.0) / 1.0) * 0.20; // 0.00 → 0.20
112
+ flags.push('LOW_QUANTIZATION_ENTROPY_BORDERLINE');
113
+ } else {
114
+ qeScore = 0;
115
+ flags.push('LOW_QUANTIZATION_ENTROPY_SYNTHETIC_TIMER');
116
+ }
117
+ components.quantization = { score: qeScore, weight: 0.20, value: quantEnt };
118
+
119
+ // 4. Hurst Exponent (weight 0.15)
120
+ // Genuine white thermal noise → H ≈ 0.5
121
+ // VM scheduler periodicity → H > 0.7 (persistent / self-similar)
122
+ // Synthetic / replayed → H near 0 or 1
123
+ let hurstScore = 0;
124
+ const hurstDev = Math.abs(hurst - 0.5);
125
+ if (hurstDev < 0.10) {
126
+ hurstScore = 1.0;
127
+ } else if (hurstDev < 0.25) {
128
+ hurstScore = 1.0 - (hurstDev - 0.10) / 0.15;
129
+ if (hurst > 0.7) flags.push('HIGH_HURST_VM_SCHEDULER_PERIODICITY');
130
+ } else {
131
+ hurstScore = 0;
132
+ if (hurst > 0.7) flags.push('VERY_HIGH_HURST_VM');
133
+ else if (hurst < 0.3) flags.push('VERY_LOW_HURST_ANTIPERSISTENT');
134
+ }
135
+ components.hurst = { score: hurstScore, weight: 0.15, value: hurst };
136
+
137
+ // 5. Thermal signature (weight 0.10)
138
+ // Real CPU under sustained load → upward drift or sawtooth (fan cycling)
139
+ // VM: flat timing regardless of simulated load (no thermal feedback loop)
140
+ let thermalScore = 0;
141
+ if (thermal.pattern === 'rising' || thermal.pattern === 'sawtooth') {
142
+ thermalScore = 1.0;
143
+ } else if (Math.abs(thermal.slope) > 5e-5) {
144
+ thermalScore = 0.5; // some drift present
145
+ flags.push('WEAK_THERMAL_SIGNATURE');
146
+ } else {
147
+ thermalScore = 0;
148
+ flags.push('FLAT_THERMAL_PROFILE_VM_INDICATOR');
149
+ }
150
+ components.thermal = { score: thermalScore, weight: 0.10, value: thermal.slope };
151
+
152
+ // 6. Outlier rate (weight 0.10)
153
+ // Context switches on real OS → occasional timing spikes (> 3σ)
154
+ // VMs: far fewer OS-level interruptions visible to guest
155
+ let outlierScore = 0;
156
+ if (outlierRate >= 0.02 && outlierRate <= 0.15) {
157
+ outlierScore = 1.0;
158
+ } else if (outlierRate > 0 && outlierRate < 0.02) {
159
+ outlierScore = outlierRate / 0.02;
160
+ flags.push('FEW_OUTLIERS_POSSIBLY_VM');
161
+ } else if (outlierRate > 0.15) {
162
+ outlierScore = Math.max(0, 1.0 - (outlierRate - 0.15) / 0.15);
163
+ flags.push('EXCESSIVE_OUTLIERS_UNSTABLE');
164
+ }
165
+ components.outliers = { score: outlierScore, weight: 0.10, value: outlierRate };
166
+
167
+ // ── Weighted aggregate ────────────────────────────────────────────────────
168
+ const score = Object.values(components)
169
+ .reduce((sum, c) => sum + c.score * c.weight, 0);
170
+
171
+ return {
172
+ score: Math.max(0, Math.min(1, score)),
173
+ flags,
174
+ components,
175
+ stats,
176
+ autocorrelations: autocorr,
177
+ hurstExponent: hurst,
178
+ quantizationEntropy: quantEnt,
179
+ thermalSignature: thermal,
180
+ outlierRate,
181
+ };
182
+ }
183
+
184
+ // ---------------------------------------------------------------------------
185
+ // computeStats
186
+ // ---------------------------------------------------------------------------
187
+
188
+ /**
189
+ * Descriptive statistics for a timing vector.
190
+ * @param {number[]} arr
191
+ * @returns {TimingStats}
192
+ */
193
+ function computeStats(arr) {
194
+ const sorted = [...arr].sort((a, b) => a - b);
195
+ const n = arr.length;
196
+ const mean = arr.reduce((s, v) => s + v, 0) / n;
197
+ const varr = arr.reduce((s, v) => s + (v - mean) ** 2, 0) / (n - 1);
198
+ const std = Math.sqrt(varr);
199
+
200
+ const pct = (p) => {
201
+ const idx = (p / 100) * (n - 1);
202
+ const lo = Math.floor(idx);
203
+ const hi = Math.ceil(idx);
204
+ return sorted[lo] + (sorted[hi] - sorted[lo]) * (idx - lo);
205
+ };
206
+
207
+ // Skewness (Fisher-Pearson)
208
+ const skew = n < 3 ? 0 :
209
+ arr.reduce((s, v) => s + ((v - mean) / std) ** 3, 0) *
210
+ (n / ((n - 1) * (n - 2)));
211
+
212
+ // Excess kurtosis
213
+ const kurt = n < 4 ? 0 :
214
+ (arr.reduce((s, v) => s + ((v - mean) / std) ** 4, 0) *
215
+ (n * (n + 1)) / ((n - 1) * (n - 2) * (n - 3))) -
216
+ (3 * (n - 1) ** 2) / ((n - 2) * (n - 3));
217
+
218
+ return {
219
+ n, mean, std,
220
+ cv: std / mean,
221
+ min: sorted[0],
222
+ max: sorted[n - 1],
223
+ p5: pct(5),
224
+ p25: pct(25),
225
+ p50: pct(50),
226
+ p75: pct(75),
227
+ p95: pct(95),
228
+ p99: pct(99),
229
+ skewness: skew,
230
+ kurtosis: kurt,
231
+ };
232
+ }
233
+
234
+ /**
235
+ * @typedef {object} TimingStats
236
+ * @property {number} n
237
+ * @property {number} mean
238
+ * @property {number} std
239
+ * @property {number} cv
240
+ * @property {number} min
241
+ * @property {number} max
242
+ * @property {number} p5
243
+ * @property {number} p25
244
+ * @property {number} p50
245
+ * @property {number} p75
246
+ * @property {number} p95
247
+ * @property {number} p99
248
+ * @property {number} skewness
249
+ * @property {number} kurtosis
250
+ */
251
+
252
+ // ---------------------------------------------------------------------------
253
+ // computeHurst
254
+ // ---------------------------------------------------------------------------
255
+
256
+ /**
257
+ * Estimates the Hurst exponent via Rescaled Range (R/S) analysis.
258
+ * Covers 4 sub-series sizes (n/4, n/3, n/2, n) to get a log-log slope.
259
+ *
260
+ * H ≈ 0.5 → random walk (Brownian, thermal noise)
261
+ * H > 0.5 → persistent (VM hypervisor periodicity)
262
+ * H < 0.5 → anti-persistent
263
+ *
264
+ * @param {number[]} arr
265
+ * @returns {number}
266
+ */
267
+ function computeHurst(arr) {
268
+ const n = arr.length;
269
+ if (n < 16) return 0.5; // not enough data
270
+
271
+ const sizes = [
272
+ Math.floor(n / 4),
273
+ Math.floor(n / 3),
274
+ Math.floor(n / 2),
275
+ n,
276
+ ].filter(s => s >= 8);
277
+
278
+ const points = sizes.map(s => {
279
+ const rs = _rescaledRange(arr.slice(0, s));
280
+ return [Math.log(s), Math.log(rs)];
281
+ });
282
+
283
+ // Ordinary least squares on log-log
284
+ const xMean = points.reduce((s, p) => s + p[0], 0) / points.length;
285
+ const yMean = points.reduce((s, p) => s + p[1], 0) / points.length;
286
+ let num = 0, den = 0;
287
+ for (const [x, y] of points) {
288
+ num += (x - xMean) * (y - yMean);
289
+ den += (x - xMean) ** 2;
290
+ }
291
+ const H = den === 0 ? 0.5 : num / den;
292
+ return Math.max(0, Math.min(1, H));
293
+ }
294
+
295
+ function _rescaledRange(arr) {
296
+ const n = arr.length;
297
+ const mean = arr.reduce((s, v) => s + v, 0) / n;
298
+ const dev = arr.map(v => v - mean);
299
+
300
+ // Cumulative deviation
301
+ const cum = [];
302
+ let acc = 0;
303
+ for (const d of dev) { acc += d; cum.push(acc); }
304
+
305
+ const R = Math.max(...cum) - Math.min(...cum);
306
+ const S = Math.sqrt(arr.reduce((s, v) => s + (v - mean) ** 2, 0) / n);
307
+ return S === 0 ? 1 : R / S;
308
+ }
309
+
310
+ // ---------------------------------------------------------------------------
311
+ // detectQuantizationEntropy
312
+ // ---------------------------------------------------------------------------
313
+
314
+ /**
315
+ * Computes Shannon entropy of a histogram of timing values.
316
+ * Low entropy (< 3 bits) indicates clustered / quantised timings (VM timer).
317
+ *
318
+ * @param {number[]} arr
319
+ * @param {number} [binWidthMs=0.2]
320
+ * @returns {number} entropy in bits
321
+ */
322
+ function detectQuantizationEntropy(arr, binWidthMs = 0.2) {
323
+ if (!arr.length) return 0;
324
+ const bins = new Map();
325
+ for (const v of arr) {
326
+ const bin = Math.round(v / binWidthMs);
327
+ bins.set(bin, (bins.get(bin) ?? 0) + 1);
328
+ }
329
+ const n = arr.length;
330
+ let H = 0;
331
+ for (const count of bins.values()) {
332
+ const p = count / n;
333
+ H -= p * Math.log2(p);
334
+ }
335
+ return H;
336
+ }
337
+
338
+ // ---------------------------------------------------------------------------
339
+ // detectThermalSignature
340
+ // ---------------------------------------------------------------------------
341
+
342
+ /**
343
+ * Analyses whether the timing series shows a thermal throttle pattern:
344
+ * a rising trend (CPU heating up) or sawtooth (fan intervention).
345
+ *
346
+ * @param {number[]} arr
347
+ * @returns {{ slope: number, pattern: 'rising'|'falling'|'sawtooth'|'flat', r2: number }}
348
+ */
349
+ function detectThermalSignature(arr) {
350
+ const n = arr.length;
351
+ if (n < 10) return { slope: 0, pattern: 'flat', r2: 0 };
352
+
353
+ // Linear regression (timing vs sample index)
354
+ const xMean = (n - 1) / 2;
355
+ const yMean = arr.reduce((s, v) => s + v, 0) / n;
356
+ let num = 0, den = 0;
357
+ for (let i = 0; i < n; i++) {
358
+ num += (i - xMean) * (arr[i] - yMean);
359
+ den += (i - xMean) ** 2;
360
+ }
361
+ const slope = den === 0 ? 0 : num / den;
362
+
363
+ // R² of linear fit
364
+ const ss_res = arr.reduce((s, v, i) => {
365
+ const pred = yMean + slope * (i - xMean);
366
+ return s + (v - pred) ** 2;
367
+ }, 0);
368
+ const ss_tot = arr.reduce((s, v) => s + (v - yMean) ** 2, 0);
369
+ const r2 = ss_tot === 0 ? 0 : 1 - ss_res / ss_tot;
370
+
371
+ // Sawtooth detection: look for a drop > 2σ after a rising segment
372
+ const std = Math.sqrt(arr.reduce((s, v) => s + (v - yMean) ** 2, 0) / n);
373
+ let sawtoothCount = 0;
374
+ for (let i = 1; i < n; i++) {
375
+ if (arr[i - 1] - arr[i] > 2 * std) sawtoothCount++;
376
+ }
377
+
378
+ let pattern;
379
+ if (sawtoothCount >= 2) pattern = 'sawtooth';
380
+ else if (slope > 5e-5) pattern = 'rising';
381
+ else if (slope < -5e-5) pattern = 'falling';
382
+ else pattern = 'flat';
383
+
384
+ return { slope, pattern, r2, sawtoothCount };
385
+ }
386
+
387
+ // ---------------------------------------------------------------------------
388
+ // Internal helpers
389
+ // ---------------------------------------------------------------------------
390
+
391
+ function _outlierRate(arr, stats) {
392
+ const threshold = stats.mean + 3 * stats.std;
393
+ return arr.filter(v => v > threshold).length / arr.length;
394
+ }
395
+
396
+ function _computeLocalAutocorr(arr) {
397
+ const autocorr = {};
398
+ for (const lag of [1, 2, 3, 5, 10]) {
399
+ autocorr[`lag${lag}`] = _pearsonAC(arr, lag);
400
+ }
401
+ return autocorr;
402
+ }
403
+
404
+ function _pearsonAC(arr, lag) {
405
+ const n = arr.length;
406
+ if (lag >= n) return 0;
407
+ const valid = n - lag;
408
+ const mean = arr.reduce((s, v) => s + v, 0) / n;
409
+ let num = 0, da = 0, db = 0;
410
+ for (let i = 0; i < valid; i++) {
411
+ const a = arr[i] - mean;
412
+ const b = arr[i + lag] - mean;
413
+ num += a * b;
414
+ da += a * a;
415
+ db += b * b;
416
+ }
417
+ const denom = Math.sqrt(da * db);
418
+ return denom < 1e-14 ? 0 : num / denom;
419
+ }
420
+
421
+ function _insufficientData() {
422
+ return {
423
+ score: 0,
424
+ flags: ['INSUFFICIENT_DATA'],
425
+ components: {},
426
+ stats: null,
427
+ autocorrelations: {},
428
+ hurstExponent: 0.5,
429
+ quantizationEntropy: 0,
430
+ thermalSignature: { slope: 0, pattern: 'flat', r2: 0 },
431
+ outlierRate: 0,
432
+ };
433
+ }
434
+
435
+ /**
436
+ * @typedef {object} JitterAnalysis
437
+ * @property {number} score - [0,1], 1 = real hardware
438
+ * @property {string[]} flags - diagnostic flags
439
+ * @property {object} components - per-criterion scores and weights
440
+ * @property {TimingStats} stats
441
+ * @property {object} autocorrelations
442
+ * @property {number} hurstExponent
443
+ * @property {number} quantizationEntropy
444
+ * @property {object} thermalSignature
445
+ * @property {number} outlierRate
446
+ */
447
+
448
+ var jitter = /*#__PURE__*/Object.freeze({
449
+ __proto__: null,
450
+ classifyJitter: classifyJitter,
451
+ computeHurst: computeHurst,
452
+ computeStats: computeStats,
453
+ detectQuantizationEntropy: detectQuantizationEntropy,
454
+ detectThermalSignature: detectThermalSignature
455
+ });
456
+
457
+ /**
458
+ * @sovereign/pulse — Adaptive Entropy Probe
459
+ *
460
+ * Runs the WASM probe in batches and stops early once the signal is decisive.
461
+ *
462
+ * Why this works:
463
+ * A KVM VM with QE=1.27 and lag-1 autocorr=0.67 is unambiguously a VM after
464
+ * just 50 iterations. Running 200 iterations confirms what was already obvious
465
+ * at 50 — it adds no new information but wastes 3 seconds of user time.
466
+ *
467
+ * Conversely, a physical device with healthy entropy needs more data to
468
+ * rule out edge cases, so it runs longer.
469
+ *
470
+ * Speed profile:
471
+ * Obvious VM (QE < 1.5, lag1 > 0.60) → stops at 50 iters → ~0.9s (75% faster)
472
+ * Clear HW (QE > 3.5, lag1 < 0.10) → stops at ~100 iters → ~1.8s (50% faster)
473
+ * Ambiguous (borderline metrics) → runs full 200 iters → ~3.5s (same)
474
+ */
475
+
476
+
477
+ // ---------------------------------------------------------------------------
478
+ // Quick classifier (cheap, runs after every batch)
479
+ // ---------------------------------------------------------------------------
480
+
481
+ /**
482
+ * Fast signal-quality check. No Hurst, no thermal analysis — just the three
483
+ * metrics that converge quickest: QE, CV, and lag-1 autocorrelation.
484
+ *
485
+ * @param {number[]} timings
486
+ * @returns {{ vmConf: number, hwConf: number, qe: number, cv: number, lag1: number }}
487
+ */
488
+ function quickSignal(timings) {
489
+ const n = timings.length;
490
+ const mean = timings.reduce((s, v) => s + v, 0) / n;
491
+ const variance = timings.reduce((s, v) => s + (v - mean) ** 2, 0) / n;
492
+ const cv = mean > 0 ? Math.sqrt(variance) / mean : 0;
493
+ const qe = detectQuantizationEntropy(timings);
494
+
495
+ // Pearson autocorrelation at lag-1 (O(n), fits in a single pass)
496
+ let num = 0, da = 0, db = 0;
497
+ for (let i = 0; i < n - 1; i++) {
498
+ const a = timings[i] - mean;
499
+ const b = timings[i + 1] - mean;
500
+ num += a * b;
501
+ da += a * a;
502
+ db += b * b;
503
+ }
504
+ const lag1 = Math.sqrt(da * db) < 1e-14 ? 0 : num / Math.sqrt(da * db);
505
+
506
+ // VM confidence: each factor independently identifies the hypervisor footprint
507
+ const vmConf = Math.min(1,
508
+ (qe < 1.50 ? 0.40 : qe < 2.00 ? 0.20 : 0.0) +
509
+ (lag1 > 0.60 ? 0.35 : lag1 > 0.40 ? 0.18 : 0.0) +
510
+ (cv < 0.04 ? 0.25 : cv < 0.07 ? 0.10 : 0.0)
511
+ );
512
+
513
+ // HW confidence: must see all three positive signals together
514
+ const hwConf = Math.min(1,
515
+ (qe > 3.50 ? 0.38 : qe > 3.00 ? 0.22 : 0.0) +
516
+ (Math.abs(lag1) < 0.10 ? 0.32 : Math.abs(lag1) < 0.20 ? 0.15 : 0.0) +
517
+ (cv > 0.10 ? 0.30 : cv > 0.07 ? 0.14 : 0.0)
518
+ );
519
+
520
+ return { vmConf, hwConf, qe, cv, lag1 };
521
+ }
522
+
523
+ // ---------------------------------------------------------------------------
524
+ // collectEntropyAdaptive
525
+ // ---------------------------------------------------------------------------
526
+
527
+ /**
528
+ * @param {object} opts
529
+ * @param {number} [opts.minIterations=50] - never stop before this
530
+ * @param {number} [opts.maxIterations=200] - hard cap
531
+ * @param {number} [opts.batchSize=25] - WASM call granularity
532
+ * @param {number} [opts.vmThreshold=0.85] - stop early if VM confidence ≥ this
533
+ * @param {number} [opts.hwThreshold=0.80] - stop early if HW confidence ≥ this
534
+ * @param {number} [opts.hwMinIterations=75] - physical needs more data to confirm
535
+ * @param {number} [opts.matrixSize=64]
536
+ * @param {Function} [opts.onBatch] - called after each batch with interim signal
537
+ * @param {string} [opts.wasmPath]
538
+ * @param {Function} wasmModule - pre-initialised WASM module
539
+ * @returns {Promise<AdaptiveEntropyResult>}
540
+ */
541
+ async function collectEntropyAdaptive(wasmModule, opts = {}) {
542
+ const {
543
+ minIterations = 50,
544
+ maxIterations = 200,
545
+ batchSize = 25,
546
+ vmThreshold = 0.85,
547
+ hwThreshold = 0.80,
548
+ hwMinIterations = 75,
549
+ matrixSize = 64,
550
+ onBatch,
551
+ } = opts;
552
+
553
+ const wasm = wasmModule;
554
+ const allTimings = [];
555
+ const batches = []; // per-batch timing snapshots
556
+ let stoppedAt = null; // { reason, iterations, vmConf, hwConf }
557
+ let checksum = 0;
558
+
559
+ const t_start = Date.now();
560
+
561
+ while (allTimings.length < maxIterations) {
562
+ const n = Math.min(batchSize, maxIterations - allTimings.length);
563
+ const result = wasm.run_entropy_probe(n, matrixSize);
564
+ const chunk = Array.from(result.timings);
565
+
566
+ allTimings.push(...chunk);
567
+ checksum += result.checksum;
568
+
569
+ const sig = quickSignal(allTimings);
570
+ batches.push({ iterations: allTimings.length, ...sig });
571
+
572
+ // Fire progress callback with live signal so callers can stream to UI
573
+ if (typeof onBatch === 'function') {
574
+ try {
575
+ onBatch({
576
+ iterations: allTimings.length,
577
+ maxIterations,
578
+ pct: Math.round(allTimings.length / maxIterations * 100),
579
+ vmConf: sig.vmConf,
580
+ hwConf: sig.hwConf,
581
+ qe: sig.qe,
582
+ cv: sig.cv,
583
+ lag1: sig.lag1,
584
+ // Thresholds: 0.70 — high enough that a legitimate device won't be
585
+ // shown a false early verdict from a noisy first batch.
586
+ // 'borderline' surfaces when one axis is moderate but not decisive.
587
+ earlyVerdict: sig.vmConf > 0.70 ? 'vm'
588
+ : sig.hwConf > 0.70 ? 'physical'
589
+ : (sig.vmConf > 0.45 || sig.hwConf > 0.45) ? 'borderline'
590
+ : 'uncertain',
591
+ });
592
+ } catch {}
593
+ }
594
+
595
+ // ── Early-exit checks ──────────────────────────────────────────────────
596
+ if (allTimings.length < minIterations) continue;
597
+
598
+ if (sig.vmConf >= vmThreshold) {
599
+ stoppedAt = { reason: 'VM_SIGNAL_DECISIVE', vmConf: sig.vmConf, hwConf: sig.hwConf };
600
+ break;
601
+ }
602
+
603
+ if (allTimings.length >= hwMinIterations && sig.hwConf >= hwThreshold) {
604
+ stoppedAt = { reason: 'PHYSICAL_SIGNAL_DECISIVE', vmConf: sig.vmConf, hwConf: sig.hwConf };
605
+ break;
606
+ }
607
+ }
608
+
609
+ const elapsed = Date.now() - t_start;
610
+ const iterationsRan = allTimings.length;
611
+ const iterationsSaved = maxIterations - iterationsRan;
612
+ const speedupFactor = maxIterations / iterationsRan;
613
+
614
+ // ── Resolution probe using cached WASM call ────────────────────────────
615
+ const resResult = wasm.run_entropy_probe(1, 4); // tiny probe for resolution
616
+ const resProbe = Array.from(resResult.resolution_probe ?? []);
617
+
618
+ const resDeltas = [];
619
+ for (let i = 1; i < resProbe.length; i++) {
620
+ const d = resProbe[i] - resProbe[i - 1];
621
+ if (d > 0) resDeltas.push(d);
622
+ }
623
+
624
+ return {
625
+ timings: allTimings,
626
+ iterations: iterationsRan,
627
+ maxIterations,
628
+ checksum: checksum.toString(),
629
+ resolutionProbe: resProbe,
630
+ timerGranularityMs: resDeltas.length
631
+ ? resDeltas.reduce((a, b) => Math.min(a, b), Infinity)
632
+ : null,
633
+ earlyExit: stoppedAt ? {
634
+ ...stoppedAt,
635
+ iterationsSaved,
636
+ timeSavedMs: Math.round(iterationsSaved * (elapsed / iterationsRan)),
637
+ speedupFactor: +speedupFactor.toFixed(2),
638
+ } : null,
639
+ batches,
640
+ elapsedMs: elapsed,
641
+ collectedAt: t_start,
642
+ matrixSize,
643
+ phased: false, // adaptive replaces phased for speed
644
+ };
645
+ }
646
+
647
+ /**
648
+ * @typedef {object} AdaptiveEntropyResult
649
+ * @property {number[]} timings
650
+ * @property {number} iterations - how many actually ran
651
+ * @property {number} maxIterations - cap that was set
652
+ * @property {object|null} earlyExit - null if ran to completion
653
+ * @property {object[]} batches - per-batch signal snapshots
654
+ * @property {number} elapsedMs
655
+ */
656
+
657
+ /**
658
+ * @sovereign/pulse — Entropy Collector
659
+ *
660
+ * Bridges the Rust/WASM matrix-multiply probe into JavaScript.
661
+ * The WASM module is lazily initialised once and cached for subsequent calls.
662
+ */
663
+
664
+
665
+ // ---------------------------------------------------------------------------
666
+ // WASM loader (lazy singleton)
667
+ // ---------------------------------------------------------------------------
668
+ let _wasmModule = null;
669
+ let _initPromise = null;
670
+
671
+ /**
672
+ * Initialise (or return the cached) WASM module.
673
+ * Works in browsers (via fetch), in Electron (Node.js context), and in
674
+ * Jest/Vitest via a manual WASM path override.
675
+ *
676
+ * @param {string} [wasmPath] – override path/URL to the .wasm binary
677
+ */
678
+ async function initWasm(wasmPath) {
679
+ if (_wasmModule) return _wasmModule;
680
+ if (_initPromise) return _initPromise;
681
+
682
+ _initPromise = (async () => {
683
+ // Dynamic import so bundlers can tree-shake this for server-only builds.
684
+ const { default: init, run_entropy_probe, run_memory_probe, compute_autocorrelation } =
685
+ await Promise.resolve().then(function () { return pulse_core; });
686
+
687
+ const url = wasmPath ?? new URL('../../pkg/pulse_core_bg.wasm', import.meta.url).href;
688
+ await init(url);
689
+
690
+ _wasmModule = { run_entropy_probe, run_memory_probe, compute_autocorrelation };
691
+ return _wasmModule;
692
+ })();
693
+
694
+ return _initPromise;
695
+ }
696
+
697
+ // ---------------------------------------------------------------------------
698
+ // collectEntropy
699
+ // ---------------------------------------------------------------------------
700
+
701
+ /**
702
+ * Run the WASM entropy probe and return raw timing data.
703
+ *
704
+ * @param {object} opts
705
+ * @param {number} [opts.iterations=200] - number of matrix-multiply rounds
706
+ * @param {number} [opts.matrixSize=64] - N for the N×N matrices
707
+ * @param {number} [opts.memSizeKb=512] - size of the memory bandwidth probe
708
+ * @param {number} [opts.memIterations=50]
709
+ * @param {boolean} [opts.phased=true] - run cold/load/hot phases for entropy-jitter ratio
710
+ * @param {string} [opts.wasmPath] - optional custom WASM binary path
711
+ *
712
+ * @returns {Promise<EntropyResult>}
713
+ */
714
+ async function collectEntropy(opts = {}) {
715
+ const {
716
+ iterations = 200,
717
+ matrixSize = 64,
718
+ memSizeKb = 512,
719
+ memIterations = 50,
720
+ phased = true,
721
+ adaptive = false,
722
+ adaptiveThreshold = 0.85,
723
+ onBatch,
724
+ wasmPath,
725
+ } = opts;
726
+
727
+ const wasm = await initWasm(wasmPath);
728
+ const t_start = Date.now();
729
+
730
+ let phases = null;
731
+ let timings, resolutionProbe, checksum, timerGranularityMs;
732
+ let _adaptiveInfo = null;
733
+
734
+ // ── Adaptive mode: smart early exit, fastest for obvious VMs ──────────
735
+ if (adaptive) {
736
+ const r = await collectEntropyAdaptive(wasm, {
737
+ minIterations: 50,
738
+ maxIterations: iterations,
739
+ batchSize: 25,
740
+ vmThreshold: adaptiveThreshold,
741
+ hwThreshold: 0.80,
742
+ hwMinIterations: 75,
743
+ matrixSize,
744
+ onBatch,
745
+ });
746
+ timings = r.timings;
747
+ resolutionProbe = r.resolutionProbe ?? [];
748
+ checksum = r.checksum;
749
+ timerGranularityMs = r.timerGranularityMs;
750
+ _adaptiveInfo = { earlyExit: r.earlyExit, batches: r.batches, elapsedMs: r.elapsedMs };
751
+
752
+ // ── Phased collection: cold → load → hot ──────────────────────────────
753
+ // Each phase runs a separate WASM probe. On real hardware, sustained load
754
+ // increases thermal noise so Phase 3 (hot) entropy is measurably higher
755
+ // than Phase 1 (cold). A VM's hypervisor clock is insensitive to guest
756
+ // thermal state, so all three phases return nearly identical entropy.
757
+ } else if (phased && iterations >= 60) {
758
+ const coldN = Math.floor(iterations * 0.25); // ~25% cold
759
+ const loadN = Math.floor(iterations * 0.50); // ~50% sustained load
760
+ const hotN = iterations - coldN - loadN; // ~25% hot
761
+
762
+ const cold = wasm.run_entropy_probe(coldN, matrixSize);
763
+ const load = wasm.run_entropy_probe(loadN, matrixSize);
764
+ const hot = wasm.run_entropy_probe(hotN, matrixSize);
765
+
766
+ const coldTimings = Array.from(cold.timings);
767
+ const loadTimings = Array.from(load.timings);
768
+ const hotTimings = Array.from(hot.timings);
769
+
770
+ timings = [...coldTimings, ...loadTimings, ...hotTimings];
771
+ resolutionProbe = Array.from(cold.resolution_probe);
772
+ checksum = (cold.checksum + load.checksum + hot.checksum).toString();
773
+
774
+ const { detectQuantizationEntropy } = await Promise.resolve().then(function () { return jitter; });
775
+ const coldQE = detectQuantizationEntropy(coldTimings);
776
+ const hotQE = detectQuantizationEntropy(hotTimings);
777
+
778
+ phases = {
779
+ cold: { n: coldN, timings: coldTimings, qe: coldQE, mean: _mean$1(coldTimings) },
780
+ load: { n: loadN, timings: loadTimings, qe: detectQuantizationEntropy(loadTimings), mean: _mean$1(loadTimings) },
781
+ hot: { n: hotN, timings: hotTimings, qe: hotQE, mean: _mean$1(hotTimings) },
782
+ // The key signal: entropy growth under load.
783
+ // Real silicon: hotQE / coldQE typically 1.05 – 1.40
784
+ // VM: hotQE / coldQE typically 0.95 – 1.05 (flat)
785
+ entropyJitterRatio: coldQE > 0 ? hotQE / coldQE : 1.0,
786
+ };
787
+ } else {
788
+ // Single-phase fallback (fewer iterations or phased disabled)
789
+ const result = wasm.run_entropy_probe(iterations, matrixSize);
790
+ timings = Array.from(result.timings);
791
+ resolutionProbe = Array.from(result.resolution_probe);
792
+ checksum = result.checksum.toString();
793
+ }
794
+
795
+ // ── Timer resolution (non-adaptive path only — adaptive computes its own) ─
796
+ if (!adaptive) {
797
+ const resDeltas = [];
798
+ for (let i = 1; i < resolutionProbe.length; i++) {
799
+ const d = resolutionProbe[i] - resolutionProbe[i - 1];
800
+ if (d > 0) resDeltas.push(d);
801
+ }
802
+ timerGranularityMs = resDeltas.length
803
+ ? resDeltas.reduce((a, b) => Math.min(a, b), Infinity)
804
+ : null;
805
+ }
806
+
807
+ // ── Autocorrelation at diagnostic lags ────────────────────────────────
808
+ // Extended lags catch long-period steal-time rhythms (Xen: ~150 iters)
809
+ const lags = [1, 2, 3, 5, 10, 25, 50];
810
+ const autocorrelations = {};
811
+ for (const lag of lags) {
812
+ if (lag < timings.length) {
813
+ autocorrelations[`lag${lag}`] = wasm.compute_autocorrelation(timings, lag);
814
+ }
815
+ }
816
+
817
+ // ── Secondary probe: memory bandwidth jitter ───────────────────────────
818
+ const memTimings = Array.from(wasm.run_memory_probe(memSizeKb, memIterations));
819
+
820
+ return {
821
+ timings,
822
+ resolutionProbe,
823
+ timerGranularityMs,
824
+ autocorrelations,
825
+ memTimings,
826
+ phases,
827
+ checksum,
828
+ collectedAt: t_start,
829
+ iterations: timings.length, // actual count (adaptive may differ from requested)
830
+ matrixSize,
831
+ adaptive: _adaptiveInfo, // null in non-adaptive mode
832
+ };
833
+ }
834
+
835
+ function _mean$1(arr) {
836
+ return arr.length ? arr.reduce((s, v) => s + v, 0) / arr.length : 0;
837
+ }
838
+
839
+ /**
840
+ * @typedef {object} EntropyResult
841
+ * @property {number[]} timings - per-iteration wall-clock deltas (ms)
842
+ * @property {number[]} resolutionProbe - raw successive perf.now() readings
843
+ * @property {number|null} timerGranularityMs - effective timer resolution
844
+ * @property {object} autocorrelations - { lag1, lag2, lag3, lag5, lag10 }
845
+ * @property {number[]} memTimings - memory-probe timings (ms)
846
+ * @property {string} checksum - proof the computation ran
847
+ * @property {number} collectedAt - Date.now() at probe start
848
+ * @property {number} iterations
849
+ * @property {number} matrixSize
850
+ */
851
+
852
+ /**
853
+ * @sovereign/pulse — Bio-Binding Layer
854
+ *
855
+ * Captures mouse-movement micro-stutters and keystroke-cadence dynamics
856
+ * WHILE the hardware entropy probe is running. Computes the
857
+ * "Interference Coefficient": how much human input jitters hardware timing.
858
+ *
859
+ * PRIVACY NOTE: Only timing deltas are retained. No key labels, no raw
860
+ * (x, y) coordinates, no content of any kind is stored or transmitted.
861
+ */
862
+
863
+ // ---------------------------------------------------------------------------
864
+ // Internal state
865
+ // ---------------------------------------------------------------------------
866
+ const MAX_EVENTS = 500; // rolling buffer cap
867
+
868
+ // ---------------------------------------------------------------------------
869
+ // BioCollector class
870
+ // ---------------------------------------------------------------------------
871
+ class BioCollector {
872
+ constructor() {
873
+ this._mouseEvents = []; // { t: DOMHighResTimeStamp, dx, dy }
874
+ this._keyEvents = []; // { t, type: 'down'|'up', dwell: ms|null }
875
+ this._lastKey = {}; // keyCode → { downAt: t }
876
+ this._lastMouse = null; // { t, x, y }
877
+ this._startTime = null;
878
+ this._active = false;
879
+
880
+ // Bound handlers (needed for removeEventListener)
881
+ this._onMouseMove = this._onMouseMove.bind(this);
882
+ this._onKeyDown = this._onKeyDown.bind(this);
883
+ this._onKeyUp = this._onKeyUp.bind(this);
884
+ }
885
+
886
+ // ── Lifecycle ────────────────────────────────────────────────────────────
887
+
888
+ start() {
889
+ if (this._active) return;
890
+ this._active = true;
891
+ this._startTime = performance.now();
892
+
893
+ if (typeof window !== 'undefined') {
894
+ window.addEventListener('pointermove', this._onMouseMove, { passive: true });
895
+ window.addEventListener('keydown', this._onKeyDown, { passive: true });
896
+ window.addEventListener('keyup', this._onKeyUp, { passive: true });
897
+ }
898
+ }
899
+
900
+ stop() {
901
+ if (!this._active) return;
902
+ this._active = false;
903
+
904
+ if (typeof window !== 'undefined') {
905
+ window.removeEventListener('pointermove', this._onMouseMove);
906
+ window.removeEventListener('keydown', this._onKeyDown);
907
+ window.removeEventListener('keyup', this._onKeyUp);
908
+ }
909
+ }
910
+
911
+ // ── Event handlers ────────────────────────────────────────────────────────
912
+
913
+ _onMouseMove(e) {
914
+ if (!this._active) return;
915
+ const t = e.timeStamp ?? performance.now();
916
+ const cur = { t, x: e.clientX, y: e.clientY };
917
+
918
+ if (this._lastMouse) {
919
+ const dt = t - this._lastMouse.t;
920
+ const dx = cur.x - this._lastMouse.x;
921
+ const dy = cur.y - this._lastMouse.y;
922
+ // Only store the delta, not absolute position (privacy)
923
+ if (this._mouseEvents.length < MAX_EVENTS) {
924
+ this._mouseEvents.push({ t, dt, dx, dy,
925
+ pressure: e.pressure ?? 0,
926
+ pointerType: e.pointerType ?? 'mouse' });
927
+ }
928
+ }
929
+ this._lastMouse = cur;
930
+ }
931
+
932
+ _onKeyDown(e) {
933
+ if (!this._active) return;
934
+ const t = e.timeStamp ?? performance.now();
935
+ // Store timestamp keyed by code (NOT key label)
936
+ this._lastKey[e.code] = { downAt: t };
937
+ }
938
+
939
+ _onKeyUp(e) {
940
+ if (!this._active) return;
941
+ const t = e.timeStamp ?? performance.now();
942
+ const rec = this._lastKey[e.code];
943
+ const dwell = rec ? (t - rec.downAt) : null;
944
+ delete this._lastKey[e.code];
945
+
946
+ if (this._keyEvents.length < MAX_EVENTS) {
947
+ // Only dwell time; key identity NOT stored.
948
+ this._keyEvents.push({ t, dwell });
949
+ }
950
+ }
951
+
952
+ // ── snapshot ─────────────────────────────────────────────────────────────
953
+
954
+ /**
955
+ * Returns a privacy-preserving statistical snapshot of collected bio signals.
956
+ * Raw events are summarised; nothing identifiable is included in the output.
957
+ *
958
+ * @param {number[]} computationTimings - entropy probe timing array
959
+ * @returns {BioSnapshot}
960
+ */
961
+ snapshot(computationTimings = []) {
962
+ const now = performance.now();
963
+ const durationMs = this._startTime != null ? (now - this._startTime) : 0;
964
+
965
+ // ── Mouse statistics ────────────────────────────────────────────────
966
+ const iei = this._mouseEvents.map(e => e.dt);
967
+ const velocities = this._mouseEvents.map(e =>
968
+ e.dt > 0 ? Math.hypot(e.dx, e.dy) / e.dt : 0
969
+ );
970
+ const pressure = this._mouseEvents.map(e => e.pressure);
971
+ const angJerk = _computeAngularJerk(this._mouseEvents);
972
+
973
+ const mouseStats = {
974
+ sampleCount: iei.length,
975
+ ieiMean: _mean(iei),
976
+ ieiCV: _cv(iei),
977
+ velocityP50: _percentile$1(velocities, 50),
978
+ velocityP95: _percentile$1(velocities, 95),
979
+ angularJerkMean: _mean(angJerk),
980
+ pressureVariance: _variance(pressure),
981
+ };
982
+
983
+ // ── Keyboard statistics ───────────────────────────────────────────────
984
+ const dwellTimes = this._keyEvents.filter(e => e.dwell != null).map(e => e.dwell);
985
+ const iki = [];
986
+ for (let i = 1; i < this._keyEvents.length; i++) {
987
+ iki.push(this._keyEvents[i].t - this._keyEvents[i - 1].t);
988
+ }
989
+
990
+ const keyStats = {
991
+ sampleCount: dwellTimes.length,
992
+ dwellMean: _mean(dwellTimes),
993
+ dwellCV: _cv(dwellTimes),
994
+ ikiMean: _mean(iki),
995
+ ikiCV: _cv(iki),
996
+ };
997
+
998
+ // ── Interference Coefficient ──────────────────────────────────────────
999
+ // Cross-correlate input event density with computation timing deviations.
1000
+ // A real human on real hardware creates measurable CPU-scheduling pressure
1001
+ // that perturbs the entropy probe's timing.
1002
+ const interferenceCoefficient = _computeInterference(
1003
+ this._mouseEvents,
1004
+ this._keyEvents,
1005
+ computationTimings,
1006
+ );
1007
+
1008
+ return {
1009
+ mouse: mouseStats,
1010
+ keyboard: keyStats,
1011
+ interferenceCoefficient,
1012
+ durationMs,
1013
+ hasActivity: iei.length > 5 || dwellTimes.length > 2,
1014
+ };
1015
+ }
1016
+ }
1017
+
1018
+ /**
1019
+ * @typedef {object} BioSnapshot
1020
+ * @property {object} mouse
1021
+ * @property {object} keyboard
1022
+ * @property {number} interferenceCoefficient – [−1, 1]; higher = more human
1023
+ * @property {number} durationMs
1024
+ * @property {boolean} hasActivity
1025
+ */
1026
+
1027
+ // ---------------------------------------------------------------------------
1028
+ // Statistical helpers (private)
1029
+ // ---------------------------------------------------------------------------
1030
+
1031
+ function _mean(arr) {
1032
+ if (!arr.length) return 0;
1033
+ return arr.reduce((a, b) => a + b, 0) / arr.length;
1034
+ }
1035
+
1036
+ function _variance(arr) {
1037
+ if (arr.length < 2) return 0;
1038
+ const m = _mean(arr);
1039
+ return arr.reduce((s, v) => s + (v - m) ** 2, 0) / (arr.length - 1);
1040
+ }
1041
+
1042
+ function _cv(arr) {
1043
+ if (!arr.length) return 0;
1044
+ const m = _mean(arr);
1045
+ if (m === 0) return 0;
1046
+ return Math.sqrt(_variance(arr)) / Math.abs(m);
1047
+ }
1048
+
1049
+ function _percentile$1(sorted, p) {
1050
+ const arr = [...sorted].sort((a, b) => a - b);
1051
+ if (!arr.length) return 0;
1052
+ const idx = (p / 100) * (arr.length - 1);
1053
+ const lo = Math.floor(idx);
1054
+ const hi = Math.ceil(idx);
1055
+ return arr[lo] + (arr[hi] - arr[lo]) * (idx - lo);
1056
+ }
1057
+
1058
+ /** Angular jerk: second derivative of movement direction (radians / s²) */
1059
+ function _computeAngularJerk(events) {
1060
+ if (events.length < 3) return [];
1061
+ const angles = [];
1062
+ for (let i = 0; i < events.length; i++) {
1063
+ const { dx, dy } = events[i];
1064
+ angles.push(Math.atan2(dy, dx));
1065
+ }
1066
+ const d1 = [];
1067
+ for (let i = 1; i < angles.length; i++) {
1068
+ const dt = events[i].dt || 1;
1069
+ d1.push((angles[i] - angles[i - 1]) / dt);
1070
+ }
1071
+ const d2 = [];
1072
+ for (let i = 1; i < d1.length; i++) {
1073
+ const dt = events[i].dt || 1;
1074
+ d2.push(Math.abs((d1[i] - d1[i - 1]) / dt));
1075
+ }
1076
+ return d2;
1077
+ }
1078
+
1079
+ /**
1080
+ * Interference Coefficient
1081
+ *
1082
+ * For each computation sample, check whether an input event occurred within
1083
+ * ±16 ms (one animation frame). Build two parallel series:
1084
+ * X[i] = 1 if input near sample i, else 0
1085
+ * Y[i] = deviation of timing[i] from mean timing
1086
+ * Return the Pearson correlation between X and Y.
1087
+ * A real human on real hardware produces positive correlation (input events
1088
+ * cause measurable CPU scheduling perturbations).
1089
+ */
1090
+ function _computeInterference(mouseEvents, keyEvents, timings) {
1091
+ if (!timings.length) return 0;
1092
+
1093
+ const allInputTimes = [
1094
+ ...mouseEvents.map(e => e.t),
1095
+ ...keyEvents.map(e => e.t),
1096
+ ].sort((a, b) => a - b);
1097
+
1098
+ if (!allInputTimes.length) return 0;
1099
+
1100
+ const WINDOW_MS = 16;
1101
+ const meanTiming = _mean(timings);
1102
+
1103
+ // We need absolute timestamps for the probe samples.
1104
+ // We don't have them directly – use relative index spacing as a proxy.
1105
+ // The entropy probe runs for ~(mean * n) ms starting at collectedAt.
1106
+ // This is a statistical approximation; the exact alignment improves
1107
+ // when callers pass `collectedAt` from the entropy result.
1108
+ // For now we distribute samples evenly across the collection window.
1109
+ const first = allInputTimes[0];
1110
+ const last = allInputTimes[allInputTimes.length - 1];
1111
+ const span = Math.max(last - first, 1);
1112
+
1113
+ const X = timings.map((_, i) => {
1114
+ const tSample = first + (i / timings.length) * span;
1115
+ return allInputTimes.some(t => Math.abs(t - tSample) < WINDOW_MS) ? 1 : 0;
1116
+ });
1117
+
1118
+ const Y = timings.map(t => t - meanTiming);
1119
+
1120
+ return _pearson(X, Y);
1121
+ }
1122
+
1123
+ function _pearson(X, Y) {
1124
+ const n = X.length;
1125
+ if (n < 2) return 0;
1126
+ const mx = _mean(X);
1127
+ const my = _mean(Y);
1128
+ let num = 0, da = 0, db = 0;
1129
+ for (let i = 0; i < n; i++) {
1130
+ const a = X[i] - mx;
1131
+ const b = Y[i] - my;
1132
+ num += a * b;
1133
+ da += a * a;
1134
+ db += b * b;
1135
+ }
1136
+ const denom = Math.sqrt(da * db);
1137
+ return denom < 1e-14 ? 0 : num / denom;
1138
+ }
1139
+
1140
+ /**
1141
+ * Utilities for hex, bytes, CSPRNG.
1142
+ * @module
1143
+ */
1144
+ /*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */
1145
+ // We use WebCrypto aka globalThis.crypto, which exists in browsers and node.js 16+.
1146
+ // node.js versions earlier than v19 don't declare it in global scope.
1147
+ // For node.js, package.json#exports field mapping rewrites import
1148
+ // from `crypto` to `cryptoNode`, which imports native module.
1149
+ // Makes the utils un-importable in browsers without a bundler.
1150
+ // Once node.js 18 is deprecated (2025-04-30), we can just drop the import.
1151
+ /** Checks if something is Uint8Array. Be careful: nodejs Buffer will return true. */
1152
+ function isBytes(a) {
1153
+ return a instanceof Uint8Array || (ArrayBuffer.isView(a) && a.constructor.name === 'Uint8Array');
1154
+ }
1155
+ /** Asserts something is positive integer. */
1156
+ function anumber(n) {
1157
+ if (!Number.isSafeInteger(n) || n < 0)
1158
+ throw new Error('positive integer expected, got ' + n);
1159
+ }
1160
+ /** Asserts something is Uint8Array. */
1161
+ function abytes(b, ...lengths) {
1162
+ if (!isBytes(b))
1163
+ throw new Error('Uint8Array expected');
1164
+ if (lengths.length > 0 && !lengths.includes(b.length))
1165
+ throw new Error('Uint8Array expected of length ' + lengths + ', got length=' + b.length);
1166
+ }
1167
+ /** Asserts a hash instance has not been destroyed / finished */
1168
+ function aexists(instance, checkFinished = true) {
1169
+ if (instance.destroyed)
1170
+ throw new Error('Hash instance has been destroyed');
1171
+ if (checkFinished && instance.finished)
1172
+ throw new Error('Hash#digest() has already been called');
1173
+ }
1174
+ /** Asserts output is properly-sized byte array */
1175
+ function aoutput(out, instance) {
1176
+ abytes(out);
1177
+ const min = instance.outputLen;
1178
+ if (out.length < min) {
1179
+ throw new Error('digestInto() expects output buffer of length at least ' + min);
1180
+ }
1181
+ }
1182
+ /** Cast u8 / u16 / u32 to u8. */
1183
+ function u8(arr) {
1184
+ return new Uint8Array(arr.buffer, arr.byteOffset, arr.byteLength);
1185
+ }
1186
+ /** Cast u8 / u16 / u32 to u32. */
1187
+ function u32(arr) {
1188
+ return new Uint32Array(arr.buffer, arr.byteOffset, Math.floor(arr.byteLength / 4));
1189
+ }
1190
+ /** Zeroize a byte array. Warning: JS provides no guarantees. */
1191
+ function clean(...arrays) {
1192
+ for (let i = 0; i < arrays.length; i++) {
1193
+ arrays[i].fill(0);
1194
+ }
1195
+ }
1196
+ /** The rotate right (circular right shift) operation for uint32 */
1197
+ function rotr(word, shift) {
1198
+ return (word << (32 - shift)) | (word >>> shift);
1199
+ }
1200
+ /** Is current platform little-endian? Most are. Big-Endian platform: IBM */
1201
+ const isLE = /* @__PURE__ */ (() => new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44)();
1202
+ /** The byte swap operation for uint32 */
1203
+ function byteSwap(word) {
1204
+ return (((word << 24) & 0xff000000) |
1205
+ ((word << 8) & 0xff0000) |
1206
+ ((word >>> 8) & 0xff00) |
1207
+ ((word >>> 24) & 0xff));
1208
+ }
1209
+ /** Conditionally byte swap if on a big-endian platform */
1210
+ const swap8IfBE = isLE
1211
+ ? (n) => n
1212
+ : (n) => byteSwap(n);
1213
+ /** In place byte swap for Uint32Array */
1214
+ function byteSwap32(arr) {
1215
+ for (let i = 0; i < arr.length; i++) {
1216
+ arr[i] = byteSwap(arr[i]);
1217
+ }
1218
+ return arr;
1219
+ }
1220
+ const swap32IfBE = isLE
1221
+ ? (u) => u
1222
+ : byteSwap32;
1223
+ // Built-in hex conversion https://caniuse.com/mdn-javascript_builtins_uint8array_fromhex
1224
+ const hasHexBuiltin = /* @__PURE__ */ (() =>
1225
+ // @ts-ignore
1226
+ typeof Uint8Array.from([]).toHex === 'function' && typeof Uint8Array.fromHex === 'function')();
1227
+ // Array where index 0xf0 (240) is mapped to string 'f0'
1228
+ const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0'));
1229
+ /**
1230
+ * Convert byte array to hex string. Uses built-in function, when available.
1231
+ * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123'
1232
+ */
1233
+ function bytesToHex(bytes) {
1234
+ abytes(bytes);
1235
+ // @ts-ignore
1236
+ if (hasHexBuiltin)
1237
+ return bytes.toHex();
1238
+ // pre-caching improves the speed 6x
1239
+ let hex = '';
1240
+ for (let i = 0; i < bytes.length; i++) {
1241
+ hex += hexes[bytes[i]];
1242
+ }
1243
+ return hex;
1244
+ }
1245
+ /**
1246
+ * Converts string to bytes using UTF8 encoding.
1247
+ * @example utf8ToBytes('abc') // Uint8Array.from([97, 98, 99])
1248
+ */
1249
+ function utf8ToBytes(str) {
1250
+ if (typeof str !== 'string')
1251
+ throw new Error('string expected');
1252
+ return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809
1253
+ }
1254
+ /**
1255
+ * Normalizes (non-hex) string or Uint8Array to Uint8Array.
1256
+ * Warning: when Uint8Array is passed, it would NOT get copied.
1257
+ * Keep in mind for future mutable operations.
1258
+ */
1259
+ function toBytes(data) {
1260
+ if (typeof data === 'string')
1261
+ data = utf8ToBytes(data);
1262
+ abytes(data);
1263
+ return data;
1264
+ }
1265
+ /** For runtime check if class implements interface */
1266
+ class Hash {
1267
+ }
1268
+ function createXOFer(hashCons) {
1269
+ const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest();
1270
+ const tmp = hashCons({});
1271
+ hashC.outputLen = tmp.outputLen;
1272
+ hashC.blockLen = tmp.blockLen;
1273
+ hashC.create = (opts) => hashCons(opts);
1274
+ return hashC;
1275
+ }
1276
+
1277
+ /**
1278
+ * Internal Merkle-Damgard hash utils.
1279
+ * @module
1280
+ */
1281
+ /**
1282
+ * Initial SHA-2 state: fractional parts of square roots of first 16 primes 2..53.
1283
+ * Check out `test/misc/sha2-gen-iv.js` for recomputation guide.
1284
+ */
1285
+ /** Initial SHA256 state. Bits 0..32 of frac part of sqrt of primes 2..19 */
1286
+ const SHA256_IV = /* @__PURE__ */ Uint32Array.from([
1287
+ 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19,
1288
+ ]);
1289
+
1290
+ /**
1291
+ * Internal helpers for u64. BigUint64Array is too slow as per 2025, so we implement it using Uint32Array.
1292
+ * @todo re-check https://issues.chromium.org/issues/42212588
1293
+ * @module
1294
+ */
1295
+ const U32_MASK64 = /* @__PURE__ */ BigInt(2 ** 32 - 1);
1296
+ const _32n = /* @__PURE__ */ BigInt(32);
1297
+ function fromBig(n, le = false) {
1298
+ if (le)
1299
+ return { h: Number(n & U32_MASK64), l: Number((n >> _32n) & U32_MASK64) };
1300
+ return { h: Number((n >> _32n) & U32_MASK64) | 0, l: Number(n & U32_MASK64) | 0 };
1301
+ }
1302
+
1303
+ /**
1304
+ * Internal helpers for blake hash.
1305
+ * @module
1306
+ */
1307
+ // Mixing function G splitted in two halfs
1308
+ function G1s(a, b, c, d, x) {
1309
+ a = (a + b + x) | 0;
1310
+ d = rotr(d ^ a, 16);
1311
+ c = (c + d) | 0;
1312
+ b = rotr(b ^ c, 12);
1313
+ return { a, b, c, d };
1314
+ }
1315
+ function G2s(a, b, c, d, x) {
1316
+ a = (a + b + x) | 0;
1317
+ d = rotr(d ^ a, 8);
1318
+ c = (c + d) | 0;
1319
+ b = rotr(b ^ c, 7);
1320
+ return { a, b, c, d };
1321
+ }
1322
+
1323
+ /**
1324
+ * blake2b (64-bit) & blake2s (8 to 32-bit) hash functions.
1325
+ * b could have been faster, but there is no fast u64 in js, so s is 1.5x faster.
1326
+ * @module
1327
+ */
1328
+ /** Class, from which others are subclassed. */
1329
+ class BLAKE2 extends Hash {
1330
+ constructor(blockLen, outputLen) {
1331
+ super();
1332
+ this.finished = false;
1333
+ this.destroyed = false;
1334
+ this.length = 0;
1335
+ this.pos = 0;
1336
+ anumber(blockLen);
1337
+ anumber(outputLen);
1338
+ this.blockLen = blockLen;
1339
+ this.outputLen = outputLen;
1340
+ this.buffer = new Uint8Array(blockLen);
1341
+ this.buffer32 = u32(this.buffer);
1342
+ }
1343
+ update(data) {
1344
+ aexists(this);
1345
+ data = toBytes(data);
1346
+ abytes(data);
1347
+ // Main difference with other hashes: there is flag for last block,
1348
+ // so we cannot process current block before we know that there
1349
+ // is the next one. This significantly complicates logic and reduces ability
1350
+ // to do zero-copy processing
1351
+ const { blockLen, buffer, buffer32 } = this;
1352
+ const len = data.length;
1353
+ const offset = data.byteOffset;
1354
+ const buf = data.buffer;
1355
+ for (let pos = 0; pos < len;) {
1356
+ // If buffer is full and we still have input (don't process last block, same as blake2s)
1357
+ if (this.pos === blockLen) {
1358
+ swap32IfBE(buffer32);
1359
+ this.compress(buffer32, 0, false);
1360
+ swap32IfBE(buffer32);
1361
+ this.pos = 0;
1362
+ }
1363
+ const take = Math.min(blockLen - this.pos, len - pos);
1364
+ const dataOffset = offset + pos;
1365
+ // full block && aligned to 4 bytes && not last in input
1366
+ if (take === blockLen && !(dataOffset % 4) && pos + take < len) {
1367
+ const data32 = new Uint32Array(buf, dataOffset, Math.floor((len - pos) / 4));
1368
+ swap32IfBE(data32);
1369
+ for (let pos32 = 0; pos + blockLen < len; pos32 += buffer32.length, pos += blockLen) {
1370
+ this.length += blockLen;
1371
+ this.compress(data32, pos32, false);
1372
+ }
1373
+ swap32IfBE(data32);
1374
+ continue;
1375
+ }
1376
+ buffer.set(data.subarray(pos, pos + take), this.pos);
1377
+ this.pos += take;
1378
+ this.length += take;
1379
+ pos += take;
1380
+ }
1381
+ return this;
1382
+ }
1383
+ digestInto(out) {
1384
+ aexists(this);
1385
+ aoutput(out, this);
1386
+ const { pos, buffer32 } = this;
1387
+ this.finished = true;
1388
+ // Padding
1389
+ clean(this.buffer.subarray(pos));
1390
+ swap32IfBE(buffer32);
1391
+ this.compress(buffer32, 0, true);
1392
+ swap32IfBE(buffer32);
1393
+ const out32 = u32(out);
1394
+ this.get().forEach((v, i) => (out32[i] = swap8IfBE(v)));
1395
+ }
1396
+ digest() {
1397
+ const { buffer, outputLen } = this;
1398
+ this.digestInto(buffer);
1399
+ const res = buffer.slice(0, outputLen);
1400
+ this.destroy();
1401
+ return res;
1402
+ }
1403
+ _cloneInto(to) {
1404
+ const { buffer, length, finished, destroyed, outputLen, pos } = this;
1405
+ to || (to = new this.constructor({ dkLen: outputLen }));
1406
+ to.set(...this.get());
1407
+ to.buffer.set(buffer);
1408
+ to.destroyed = destroyed;
1409
+ to.finished = finished;
1410
+ to.length = length;
1411
+ to.pos = pos;
1412
+ // @ts-ignore
1413
+ to.outputLen = outputLen;
1414
+ return to;
1415
+ }
1416
+ clone() {
1417
+ return this._cloneInto();
1418
+ }
1419
+ }
1420
+ // prettier-ignore
1421
+ function compress(s, offset, msg, rounds, v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15) {
1422
+ let j = 0;
1423
+ for (let i = 0; i < rounds; i++) {
1424
+ ({ a: v0, b: v4, c: v8, d: v12 } = G1s(v0, v4, v8, v12, msg[offset + s[j++]]));
1425
+ ({ a: v0, b: v4, c: v8, d: v12 } = G2s(v0, v4, v8, v12, msg[offset + s[j++]]));
1426
+ ({ a: v1, b: v5, c: v9, d: v13 } = G1s(v1, v5, v9, v13, msg[offset + s[j++]]));
1427
+ ({ a: v1, b: v5, c: v9, d: v13 } = G2s(v1, v5, v9, v13, msg[offset + s[j++]]));
1428
+ ({ a: v2, b: v6, c: v10, d: v14 } = G1s(v2, v6, v10, v14, msg[offset + s[j++]]));
1429
+ ({ a: v2, b: v6, c: v10, d: v14 } = G2s(v2, v6, v10, v14, msg[offset + s[j++]]));
1430
+ ({ a: v3, b: v7, c: v11, d: v15 } = G1s(v3, v7, v11, v15, msg[offset + s[j++]]));
1431
+ ({ a: v3, b: v7, c: v11, d: v15 } = G2s(v3, v7, v11, v15, msg[offset + s[j++]]));
1432
+ ({ a: v0, b: v5, c: v10, d: v15 } = G1s(v0, v5, v10, v15, msg[offset + s[j++]]));
1433
+ ({ a: v0, b: v5, c: v10, d: v15 } = G2s(v0, v5, v10, v15, msg[offset + s[j++]]));
1434
+ ({ a: v1, b: v6, c: v11, d: v12 } = G1s(v1, v6, v11, v12, msg[offset + s[j++]]));
1435
+ ({ a: v1, b: v6, c: v11, d: v12 } = G2s(v1, v6, v11, v12, msg[offset + s[j++]]));
1436
+ ({ a: v2, b: v7, c: v8, d: v13 } = G1s(v2, v7, v8, v13, msg[offset + s[j++]]));
1437
+ ({ a: v2, b: v7, c: v8, d: v13 } = G2s(v2, v7, v8, v13, msg[offset + s[j++]]));
1438
+ ({ a: v3, b: v4, c: v9, d: v14 } = G1s(v3, v4, v9, v14, msg[offset + s[j++]]));
1439
+ ({ a: v3, b: v4, c: v9, d: v14 } = G2s(v3, v4, v9, v14, msg[offset + s[j++]]));
1440
+ }
1441
+ return { v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15 };
1442
+ }
1443
+
1444
+ /**
1445
+ * Blake3 fast hash is Blake2 with reduced security (round count). Can also be used as MAC & KDF.
1446
+ *
1447
+ * It is advertised as "the fastest cryptographic hash". However, it isn't true in JS.
1448
+ * Why is this so slow? While it should be 6x faster than blake2b, perf diff is only 20%:
1449
+ *
1450
+ * * There is only 30% reduction in number of rounds from blake2s
1451
+ * * Speed-up comes from tree structure, which is parallelized using SIMD & threading.
1452
+ * These features are not present in JS, so we only get overhead from trees.
1453
+ * * Parallelization only happens on 1024-byte chunks: there is no benefit for small inputs.
1454
+ * * It is still possible to make it faster using: a) loop unrolling b) web workers c) wasm
1455
+ * @module
1456
+ */
1457
+ // Flag bitset
1458
+ const B3_Flags = {
1459
+ CHUNK_START: 0b1,
1460
+ CHUNK_END: 0b10,
1461
+ PARENT: 0b100,
1462
+ ROOT: 0b1000,
1463
+ KEYED_HASH: 0b10000,
1464
+ DERIVE_KEY_CONTEXT: 0b100000,
1465
+ DERIVE_KEY_MATERIAL: 0b1000000,
1466
+ };
1467
+ const B3_IV = SHA256_IV.slice();
1468
+ const B3_SIGMA = /* @__PURE__ */ (() => {
1469
+ const Id = Array.from({ length: 16 }, (_, i) => i);
1470
+ const permute = (arr) => [2, 6, 3, 10, 7, 0, 4, 13, 1, 11, 12, 5, 9, 14, 15, 8].map((i) => arr[i]);
1471
+ const res = [];
1472
+ for (let i = 0, v = Id; i < 7; i++, v = permute(v))
1473
+ res.push(...v);
1474
+ return Uint8Array.from(res);
1475
+ })();
1476
+ /** Blake3 hash. Can be used as MAC and KDF. */
1477
+ class BLAKE3 extends BLAKE2 {
1478
+ constructor(opts = {}, flags = 0) {
1479
+ super(64, opts.dkLen === undefined ? 32 : opts.dkLen);
1480
+ this.chunkPos = 0; // Position of current block in chunk
1481
+ this.chunksDone = 0; // How many chunks we already have
1482
+ this.flags = 0 | 0;
1483
+ this.stack = [];
1484
+ // Output
1485
+ this.posOut = 0;
1486
+ this.bufferOut32 = new Uint32Array(16);
1487
+ this.chunkOut = 0; // index of output chunk
1488
+ this.enableXOF = true;
1489
+ const { key, context } = opts;
1490
+ const hasContext = context !== undefined;
1491
+ if (key !== undefined) {
1492
+ if (hasContext)
1493
+ throw new Error('Only "key" or "context" can be specified at same time');
1494
+ const k = toBytes(key).slice();
1495
+ abytes(k, 32);
1496
+ this.IV = u32(k);
1497
+ swap32IfBE(this.IV);
1498
+ this.flags = flags | B3_Flags.KEYED_HASH;
1499
+ }
1500
+ else if (hasContext) {
1501
+ const ctx = toBytes(context);
1502
+ const contextKey = new BLAKE3({ dkLen: 32 }, B3_Flags.DERIVE_KEY_CONTEXT)
1503
+ .update(ctx)
1504
+ .digest();
1505
+ this.IV = u32(contextKey);
1506
+ swap32IfBE(this.IV);
1507
+ this.flags = flags | B3_Flags.DERIVE_KEY_MATERIAL;
1508
+ }
1509
+ else {
1510
+ this.IV = B3_IV.slice();
1511
+ this.flags = flags;
1512
+ }
1513
+ this.state = this.IV.slice();
1514
+ this.bufferOut = u8(this.bufferOut32);
1515
+ }
1516
+ // Unused
1517
+ get() {
1518
+ return [];
1519
+ }
1520
+ set() { }
1521
+ b2Compress(counter, flags, buf, bufPos = 0) {
1522
+ const { state: s, pos } = this;
1523
+ const { h, l } = fromBig(BigInt(counter), true);
1524
+ // prettier-ignore
1525
+ const { v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15 } = compress(B3_SIGMA, bufPos, buf, 7, s[0], s[1], s[2], s[3], s[4], s[5], s[6], s[7], B3_IV[0], B3_IV[1], B3_IV[2], B3_IV[3], h, l, pos, flags);
1526
+ s[0] = v0 ^ v8;
1527
+ s[1] = v1 ^ v9;
1528
+ s[2] = v2 ^ v10;
1529
+ s[3] = v3 ^ v11;
1530
+ s[4] = v4 ^ v12;
1531
+ s[5] = v5 ^ v13;
1532
+ s[6] = v6 ^ v14;
1533
+ s[7] = v7 ^ v15;
1534
+ }
1535
+ compress(buf, bufPos = 0, isLast = false) {
1536
+ // Compress last block
1537
+ let flags = this.flags;
1538
+ if (!this.chunkPos)
1539
+ flags |= B3_Flags.CHUNK_START;
1540
+ if (this.chunkPos === 15 || isLast)
1541
+ flags |= B3_Flags.CHUNK_END;
1542
+ if (!isLast)
1543
+ this.pos = this.blockLen;
1544
+ this.b2Compress(this.chunksDone, flags, buf, bufPos);
1545
+ this.chunkPos += 1;
1546
+ // If current block is last in chunk (16 blocks), then compress chunks
1547
+ if (this.chunkPos === 16 || isLast) {
1548
+ let chunk = this.state;
1549
+ this.state = this.IV.slice();
1550
+ // If not the last one, compress only when there are trailing zeros in chunk counter
1551
+ // chunks used as binary tree where current stack is path. Zero means current leaf is finished and can be compressed.
1552
+ // 1 (001) - leaf not finished (just push current chunk to stack)
1553
+ // 2 (010) - leaf finished at depth=1 (merge with last elm on stack and push back)
1554
+ // 3 (011) - last leaf not finished
1555
+ // 4 (100) - leafs finished at depth=1 and depth=2
1556
+ for (let last, chunks = this.chunksDone + 1; isLast || !(chunks & 1); chunks >>= 1) {
1557
+ if (!(last = this.stack.pop()))
1558
+ break;
1559
+ this.buffer32.set(last, 0);
1560
+ this.buffer32.set(chunk, 8);
1561
+ this.pos = this.blockLen;
1562
+ this.b2Compress(0, this.flags | B3_Flags.PARENT, this.buffer32, 0);
1563
+ chunk = this.state;
1564
+ this.state = this.IV.slice();
1565
+ }
1566
+ this.chunksDone++;
1567
+ this.chunkPos = 0;
1568
+ this.stack.push(chunk);
1569
+ }
1570
+ this.pos = 0;
1571
+ }
1572
+ _cloneInto(to) {
1573
+ to = super._cloneInto(to);
1574
+ const { IV, flags, state, chunkPos, posOut, chunkOut, stack, chunksDone } = this;
1575
+ to.state.set(state.slice());
1576
+ to.stack = stack.map((i) => Uint32Array.from(i));
1577
+ to.IV.set(IV);
1578
+ to.flags = flags;
1579
+ to.chunkPos = chunkPos;
1580
+ to.chunksDone = chunksDone;
1581
+ to.posOut = posOut;
1582
+ to.chunkOut = chunkOut;
1583
+ to.enableXOF = this.enableXOF;
1584
+ to.bufferOut32.set(this.bufferOut32);
1585
+ return to;
1586
+ }
1587
+ destroy() {
1588
+ this.destroyed = true;
1589
+ clean(this.state, this.buffer32, this.IV, this.bufferOut32);
1590
+ clean(...this.stack);
1591
+ }
1592
+ // Same as b2Compress, but doesn't modify state and returns 16 u32 array (instead of 8)
1593
+ b2CompressOut() {
1594
+ const { state: s, pos, flags, buffer32, bufferOut32: out32 } = this;
1595
+ const { h, l } = fromBig(BigInt(this.chunkOut++));
1596
+ swap32IfBE(buffer32);
1597
+ // prettier-ignore
1598
+ const { v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15 } = compress(B3_SIGMA, 0, buffer32, 7, s[0], s[1], s[2], s[3], s[4], s[5], s[6], s[7], B3_IV[0], B3_IV[1], B3_IV[2], B3_IV[3], l, h, pos, flags);
1599
+ out32[0] = v0 ^ v8;
1600
+ out32[1] = v1 ^ v9;
1601
+ out32[2] = v2 ^ v10;
1602
+ out32[3] = v3 ^ v11;
1603
+ out32[4] = v4 ^ v12;
1604
+ out32[5] = v5 ^ v13;
1605
+ out32[6] = v6 ^ v14;
1606
+ out32[7] = v7 ^ v15;
1607
+ out32[8] = s[0] ^ v8;
1608
+ out32[9] = s[1] ^ v9;
1609
+ out32[10] = s[2] ^ v10;
1610
+ out32[11] = s[3] ^ v11;
1611
+ out32[12] = s[4] ^ v12;
1612
+ out32[13] = s[5] ^ v13;
1613
+ out32[14] = s[6] ^ v14;
1614
+ out32[15] = s[7] ^ v15;
1615
+ swap32IfBE(buffer32);
1616
+ swap32IfBE(out32);
1617
+ this.posOut = 0;
1618
+ }
1619
+ finish() {
1620
+ if (this.finished)
1621
+ return;
1622
+ this.finished = true;
1623
+ // Padding
1624
+ clean(this.buffer.subarray(this.pos));
1625
+ // Process last chunk
1626
+ let flags = this.flags | B3_Flags.ROOT;
1627
+ if (this.stack.length) {
1628
+ flags |= B3_Flags.PARENT;
1629
+ swap32IfBE(this.buffer32);
1630
+ this.compress(this.buffer32, 0, true);
1631
+ swap32IfBE(this.buffer32);
1632
+ this.chunksDone = 0;
1633
+ this.pos = this.blockLen;
1634
+ }
1635
+ else {
1636
+ flags |= (!this.chunkPos ? B3_Flags.CHUNK_START : 0) | B3_Flags.CHUNK_END;
1637
+ }
1638
+ this.flags = flags;
1639
+ this.b2CompressOut();
1640
+ }
1641
+ writeInto(out) {
1642
+ aexists(this, false);
1643
+ abytes(out);
1644
+ this.finish();
1645
+ const { blockLen, bufferOut } = this;
1646
+ for (let pos = 0, len = out.length; pos < len;) {
1647
+ if (this.posOut >= blockLen)
1648
+ this.b2CompressOut();
1649
+ const take = Math.min(blockLen - this.posOut, len - pos);
1650
+ out.set(bufferOut.subarray(this.posOut, this.posOut + take), pos);
1651
+ this.posOut += take;
1652
+ pos += take;
1653
+ }
1654
+ return out;
1655
+ }
1656
+ xofInto(out) {
1657
+ if (!this.enableXOF)
1658
+ throw new Error('XOF is not possible after digest call');
1659
+ return this.writeInto(out);
1660
+ }
1661
+ xof(bytes) {
1662
+ anumber(bytes);
1663
+ return this.xofInto(new Uint8Array(bytes));
1664
+ }
1665
+ digestInto(out) {
1666
+ aoutput(out, this);
1667
+ if (this.finished)
1668
+ throw new Error('digest() was already called');
1669
+ this.enableXOF = false;
1670
+ this.writeInto(out);
1671
+ this.destroy();
1672
+ return out;
1673
+ }
1674
+ digest() {
1675
+ return this.digestInto(new Uint8Array(this.outputLen));
1676
+ }
1677
+ }
1678
+ /**
1679
+ * BLAKE3 hash function. Can be used as MAC and KDF.
1680
+ * @param msg - message that would be hashed
1681
+ * @param opts - `dkLen` for output length, `key` for MAC mode, `context` for KDF mode
1682
+ * @example
1683
+ * const data = new Uint8Array(32);
1684
+ * const hash = blake3(data);
1685
+ * const mac = blake3(data, { key: new Uint8Array(32) });
1686
+ * const kdf = blake3(data, { context: 'application name' });
1687
+ */
1688
+ const blake3 = /* @__PURE__ */ createXOFer((opts) => new BLAKE3(opts));
1689
+
1690
+ /**
1691
+ * @sovereign/pulse — Hardware Fingerprint & Proof Builder
1692
+ *
1693
+ * Assembles all collected signals into a canonical ProofPayload, then
1694
+ * produces a BLAKE3 commitment: BLAKE3(canonicalJSON(payload)).
1695
+ *
1696
+ * The commitment is what gets sent to the server. The server recomputes
1697
+ * the hash from the payload to detect tampering. Raw timing arrays and
1698
+ * pixel buffers are NOT included — only statistical summaries.
1699
+ *
1700
+ * Zero-Knowledge property: the server learns only that the device passes
1701
+ * statistical thresholds. It never sees raw hardware telemetry.
1702
+ */
1703
+
1704
+
1705
+ // ---------------------------------------------------------------------------
1706
+ // BLAKE3 helpers (re-exported for use by canvas.js etc.)
1707
+ // ---------------------------------------------------------------------------
1708
+
1709
+ /**
1710
+ * Compute BLAKE3 of a Uint8Array and return hex string.
1711
+ * @param {Uint8Array} data
1712
+ * @returns {string}
1713
+ */
1714
+ function blake3Hex(data) {
1715
+ return bytesToHex(blake3(data));
1716
+ }
1717
+
1718
+ /**
1719
+ * Compute BLAKE3 of a UTF-8 string and return hex string.
1720
+ * @param {string} str
1721
+ * @returns {string}
1722
+ */
1723
+ function blake3HexStr(str) {
1724
+ return blake3Hex(new TextEncoder().encode(str));
1725
+ }
1726
+
1727
+ // ---------------------------------------------------------------------------
1728
+ // buildProof
1729
+ // ---------------------------------------------------------------------------
1730
+
1731
+ /**
1732
+ * Assembles a ProofPayload from all collected signals.
1733
+ * This is the canonical structure that gets hashed into the commitment.
1734
+ *
1735
+ * @param {object} p
1736
+ * @param {import('../collector/entropy.js').EntropyResult} p.entropy
1737
+ * @param {import('../analysis/jitter.js').JitterAnalysis} p.jitter
1738
+ * @param {import('../collector/bio.js').BioSnapshot} p.bio
1739
+ * @param {import('../collector/canvas.js').CanvasFingerprint} p.canvas
1740
+ * @param {import('../analysis/audio.js').AudioJitter} p.audio
1741
+ * @param {string} p.nonce – server-issued challenge nonce (hex)
1742
+ * @returns {ProofPayload}
1743
+ */
1744
+ function buildProof({ entropy, jitter, bio, canvas, audio, nonce }) {
1745
+ if (!nonce || typeof nonce !== 'string') {
1746
+ throw new Error('@sovereign/pulse: nonce is required for anti-replay protection');
1747
+ }
1748
+
1749
+ // Hash the raw timing arrays IN-BROWSER so we can prove their integrity
1750
+ // without transmitting the raw data.
1751
+ const timingsHash = blake3HexStr(JSON.stringify(entropy.timings));
1752
+ const memHash = blake3HexStr(JSON.stringify(entropy.memTimings));
1753
+
1754
+ const payload = {
1755
+ version: 1,
1756
+ timestamp: entropy.collectedAt,
1757
+ nonce,
1758
+
1759
+ signals: {
1760
+ // ── Entropy probe ───────────────────────────────────────────────────
1761
+ entropy: {
1762
+ timingsMean: _round$1(jitter.stats?.mean, 4),
1763
+ timingsCV: _round$1(jitter.stats?.cv, 4),
1764
+ timingsP50: _round$1(jitter.stats?.p50, 4),
1765
+ timingsP95: _round$1(jitter.stats?.p95, 4),
1766
+ timingsSkewness: _round$1(jitter.stats?.skewness, 4),
1767
+ timingsKurtosis: _round$1(jitter.stats?.kurtosis, 4),
1768
+ autocorr_lag1: _round$1(jitter.autocorrelations?.lag1, 4),
1769
+ autocorr_lag2: _round$1(jitter.autocorrelations?.lag2, 4),
1770
+ autocorr_lag5: _round$1(jitter.autocorrelations?.lag5, 4),
1771
+ autocorr_lag10: _round$1(jitter.autocorrelations?.lag10, 4),
1772
+ hurstExponent: _round$1(jitter.hurstExponent, 4),
1773
+ quantizationEntropy: _round$1(jitter.quantizationEntropy, 4),
1774
+ thermalDrift: _round$1(jitter.thermalSignature?.slope, 8),
1775
+ thermalPattern: jitter.thermalSignature?.pattern ?? 'unknown',
1776
+ outlierRate: _round$1(jitter.outlierRate, 4),
1777
+ timerGranularityMs: _round$1(entropy.timerGranularityMs, 6),
1778
+ checksum: entropy.checksum, // proves computation ran
1779
+ timingsHash, // proves timing array integrity
1780
+ memTimingsHash: memHash,
1781
+ iterations: entropy.iterations,
1782
+ matrixSize: entropy.matrixSize,
1783
+ },
1784
+
1785
+ // ── Bio signals ─────────────────────────────────────────────────────
1786
+ bio: {
1787
+ mouseSampleCount: bio.mouse.sampleCount,
1788
+ mouseIEIMean: _round$1(bio.mouse.ieiMean, 3),
1789
+ mouseIEICV: _round$1(bio.mouse.ieiCV, 4),
1790
+ mouseVelocityP50: _round$1(bio.mouse.velocityP50, 3),
1791
+ mouseVelocityP95: _round$1(bio.mouse.velocityP95, 3),
1792
+ mouseAngularJerkMean: _round$1(bio.mouse.angularJerkMean, 4),
1793
+ pressureVariance: _round$1(bio.mouse.pressureVariance, 6),
1794
+ keyboardSampleCount: bio.keyboard.sampleCount,
1795
+ keyboardDwellMean: _round$1(bio.keyboard.dwellMean, 3),
1796
+ keyboardDwellCV: _round$1(bio.keyboard.dwellCV, 4),
1797
+ keyboardIKIMean: _round$1(bio.keyboard.ikiMean, 3),
1798
+ keyboardIKICV: _round$1(bio.keyboard.ikiCV, 4),
1799
+ interferenceCoefficient: _round$1(bio.interferenceCoefficient, 4),
1800
+ hasActivity: bio.hasActivity,
1801
+ durationMs: _round$1(bio.durationMs, 1),
1802
+ },
1803
+
1804
+ // ── Canvas fingerprint ───────────────────────────────────────────────
1805
+ canvas: {
1806
+ webglRenderer: canvas.webglRenderer,
1807
+ webglVendor: canvas.webglVendor,
1808
+ webglVersion: canvas.webglVersion,
1809
+ webglPixelHash: canvas.webglPixelHash,
1810
+ canvas2dHash: canvas.canvas2dHash,
1811
+ extensionCount: canvas.extensionCount,
1812
+ isSoftwareRenderer: canvas.isSoftwareRenderer,
1813
+ available: canvas.available,
1814
+ },
1815
+
1816
+ // ── Audio jitter ─────────────────────────────────────────────────────
1817
+ audio: {
1818
+ available: audio.available,
1819
+ workletAvailable: audio.workletAvailable,
1820
+ callbackJitterCV: _round$1(audio.callbackJitterCV, 4),
1821
+ noiseFloorMean: _round$1(audio.noiseFloorMean, 6),
1822
+ noiseFloorStd: _round$1(audio.noiseFloorStd, 6),
1823
+ sampleRate: audio.sampleRate,
1824
+ callbackCount: audio.callbackCount,
1825
+ jitterMeanMs: _round$1(audio.jitterMeanMs, 4),
1826
+ jitterP95Ms: _round$1(audio.jitterP95Ms, 4),
1827
+ },
1828
+ },
1829
+
1830
+ // Top-level classification summary
1831
+ classification: {
1832
+ jitterScore: _round$1(jitter.score, 4),
1833
+ flags: jitter.flags ?? [],
1834
+ },
1835
+ };
1836
+
1837
+ return payload;
1838
+ }
1839
+
1840
+ /**
1841
+ * @typedef {object} ProofPayload
1842
+ * @property {number} version
1843
+ * @property {number} timestamp
1844
+ * @property {string} nonce
1845
+ * @property {object} signals
1846
+ * @property {object} classification
1847
+ */
1848
+
1849
+ // ---------------------------------------------------------------------------
1850
+ // buildCommitment
1851
+ // ---------------------------------------------------------------------------
1852
+
1853
+ /**
1854
+ * Hashes a ProofPayload into a BLAKE3 commitment.
1855
+ * Uses a deterministic canonical JSON serialiser (sorted keys) to ensure
1856
+ * byte-identical output across JS engines.
1857
+ *
1858
+ * @param {ProofPayload} payload
1859
+ * @returns {{ payload: ProofPayload, hash: string }}
1860
+ */
1861
+ function buildCommitment(payload) {
1862
+ const canonical = canonicalJson(payload);
1863
+ const hash = blake3HexStr(canonical);
1864
+ return { payload, hash };
1865
+ }
1866
+
1867
+ // ---------------------------------------------------------------------------
1868
+ // canonicalJson
1869
+ //
1870
+ // JSON.stringify with sorted keys — ensures the hash is engine-independent.
1871
+ // Numbers are serialised with fixed precision to avoid cross-platform float
1872
+ // formatting differences.
1873
+ // ---------------------------------------------------------------------------
1874
+
1875
+ function canonicalJson(obj) {
1876
+ return JSON.stringify(obj, _replacer, 0);
1877
+ }
1878
+
1879
+ function _replacer(key, value) {
1880
+ // Sort object keys deterministically
1881
+ if (value !== null && typeof value === 'object' && !Array.isArray(value)) {
1882
+ const sorted = {};
1883
+ for (const k of Object.keys(value).sort()) {
1884
+ sorted[k] = value[k];
1885
+ }
1886
+ return sorted;
1887
+ }
1888
+ return value;
1889
+ }
1890
+
1891
+ // ---------------------------------------------------------------------------
1892
+ // Internal utilities
1893
+ // ---------------------------------------------------------------------------
1894
+
1895
+ function _round$1(v, decimals) {
1896
+ if (v == null || !isFinite(v)) return null;
1897
+ const factor = 10 ** decimals;
1898
+ return Math.round(v * factor) / factor;
1899
+ }
1900
+
1901
+ /**
1902
+ * @sovereign/pulse — GPU Canvas Fingerprint
1903
+ *
1904
+ * Collects device-class signals from WebGL and 2D Canvas rendering.
1905
+ * The exact pixel values of GPU-rendered scenes are vendor/driver-specific
1906
+ * due to floating-point rounding in shader execution. Virtual machines
1907
+ * expose software renderers (LLVMpipe, SwiftShader, Microsoft Basic Render
1908
+ * Driver) whose strings and output pixels are well-known and enumerable.
1909
+ *
1910
+ * NO persistent identifier is generated – only a content hash is retained.
1911
+ */
1912
+
1913
+
1914
+ // ---------------------------------------------------------------------------
1915
+ // Known software-renderer substrings (VM / headless environment indicators)
1916
+ // ---------------------------------------------------------------------------
1917
+ const SOFTWARE_RENDERER_PATTERNS = [
1918
+ 'llvmpipe', 'swiftshader', 'softpipe', 'mesa offscreen',
1919
+ 'microsoft basic render', 'vmware svga', 'virtualbox',
1920
+ 'parallels', 'angle (', 'google swiftshader',
1921
+ ];
1922
+
1923
+ // ---------------------------------------------------------------------------
1924
+ // collectCanvasFingerprint
1925
+ // ---------------------------------------------------------------------------
1926
+
1927
+ /**
1928
+ * @returns {Promise<CanvasFingerprint>}
1929
+ */
1930
+ async function collectCanvasFingerprint() {
1931
+ const result = {
1932
+ webglRenderer: null,
1933
+ webglVendor: null,
1934
+ webglVersion: null,
1935
+ webglPixelHash: null,
1936
+ canvas2dHash: null,
1937
+ extensionCount: 0,
1938
+ extensions: [],
1939
+ isSoftwareRenderer: false,
1940
+ available: false,
1941
+ };
1942
+
1943
+ if (typeof document === 'undefined' && typeof OffscreenCanvas === 'undefined') {
1944
+ // Node.js / server-side with no DOM – skip gracefully.
1945
+ return result;
1946
+ }
1947
+
1948
+ // ── WebGL fingerprint ────────────────────────────────────────────────────
1949
+ try {
1950
+ const canvas = _createCanvas(512, 512);
1951
+ let gl = canvas.getContext('webgl2') || canvas.getContext('webgl');
1952
+
1953
+ if (gl) {
1954
+ result.webglVersion = gl instanceof WebGL2RenderingContext ? 2 : 1;
1955
+ result.available = true;
1956
+
1957
+ // Renderer info
1958
+ const dbgInfo = gl.getExtension('WEBGL_debug_renderer_info');
1959
+ if (dbgInfo) {
1960
+ result.webglRenderer = gl.getParameter(dbgInfo.UNMASKED_RENDERER_WEBGL);
1961
+ result.webglVendor = gl.getParameter(dbgInfo.UNMASKED_VENDOR_WEBGL);
1962
+ }
1963
+
1964
+ // Extension list (fingerprints driver capabilities)
1965
+ const exts = gl.getSupportedExtensions() ?? [];
1966
+ result.extensions = exts;
1967
+ result.extensionCount = exts.length;
1968
+
1969
+ // Software-renderer detection
1970
+ const rendererLc = (result.webglRenderer ?? '').toLowerCase();
1971
+ result.isSoftwareRenderer = SOFTWARE_RENDERER_PATTERNS.some(p =>
1972
+ rendererLc.includes(p)
1973
+ );
1974
+
1975
+ // ── Render a Mandelbrot fragment scene ───────────────────────────────
1976
+ // Floating-point precision differences in the GPU's shader ALU cause
1977
+ // per-pixel rounding variations that are stable per device but differ
1978
+ // across GPU vendors, driver versions, and software renderers.
1979
+ const pixels = _renderMandelbrot(gl, canvas);
1980
+ result.webglPixelHash = pixels ? blake3Hex(pixels) : null;
1981
+
1982
+ gl.getExtension('WEBGL_lose_context')?.loseContext();
1983
+ }
1984
+ } catch (_) {
1985
+ // WebGL blocked (privacy settings, etc.) – continue with 2D canvas.
1986
+ }
1987
+
1988
+ // ── 2D Canvas fingerprint ────────────────────────────────────────────────
1989
+ try {
1990
+ const c2 = _createCanvas(200, 50);
1991
+ const ctx2 = c2.getContext('2d');
1992
+
1993
+ if (ctx2) {
1994
+ // Text rendering differences: font hinting, subpixel AA, emoji rasterisation
1995
+ ctx2.textBaseline = 'top';
1996
+ ctx2.font = '14px Arial, sans-serif';
1997
+ ctx2.fillStyle = 'rgba(102,204,0,0.7)';
1998
+ ctx2.fillText('Cwm fjordbank glyphs vext quiz 🎯', 2, 5);
1999
+
2000
+ // Shadow compositing (driver-specific blur kernel)
2001
+ ctx2.shadowBlur = 10;
2002
+ ctx2.shadowColor = 'blue';
2003
+ ctx2.fillStyle = 'rgba(255,0,255,0.5)';
2004
+ ctx2.fillRect(100, 25, 80, 20);
2005
+
2006
+ // Bezier curve (Bézier precision varies per 2D canvas implementation)
2007
+ ctx2.beginPath();
2008
+ ctx2.moveTo(10, 40);
2009
+ ctx2.bezierCurveTo(30, 0, 70, 80, 160, 30);
2010
+ ctx2.strokeStyle = 'rgba(0,0,255,0.8)';
2011
+ ctx2.lineWidth = 1.5;
2012
+ ctx2.stroke();
2013
+
2014
+ const dataUrl = c2.toDataURL('image/png');
2015
+ // Hash the data URL (not storing raw image data)
2016
+ const enc = new TextEncoder().encode(dataUrl);
2017
+ result.canvas2dHash = blake3Hex(enc);
2018
+
2019
+ result.available = true;
2020
+ }
2021
+ } catch (_) {
2022
+ // 2D canvas blocked.
2023
+ }
2024
+
2025
+ return result;
2026
+ }
2027
+
2028
+ /**
2029
+ * @typedef {object} CanvasFingerprint
2030
+ * @property {string|null} webglRenderer
2031
+ * @property {string|null} webglVendor
2032
+ * @property {1|2|null} webglVersion
2033
+ * @property {string|null} webglPixelHash
2034
+ * @property {string|null} canvas2dHash
2035
+ * @property {number} extensionCount
2036
+ * @property {string[]} extensions
2037
+ * @property {boolean} isSoftwareRenderer
2038
+ * @property {boolean} available
2039
+ */
2040
+
2041
+ // ---------------------------------------------------------------------------
2042
+ // Internal helpers
2043
+ // ---------------------------------------------------------------------------
2044
+
2045
+ function _createCanvas(w, h) {
2046
+ if (typeof OffscreenCanvas !== 'undefined') {
2047
+ return new OffscreenCanvas(w, h);
2048
+ }
2049
+ const c = document.createElement('canvas');
2050
+ c.width = w;
2051
+ c.height = h;
2052
+ return c;
2053
+ }
2054
+
2055
+ /**
2056
+ * Render a Mandelbrot set fragment using WebGL and read back pixels.
2057
+ * The number of iterations is fixed (100) so that rounding differences in
2058
+ * the smooth-colouring formula are the primary source of per-GPU variation.
2059
+ *
2060
+ * @param {WebGLRenderingContext} gl
2061
+ * @param {HTMLCanvasElement|OffscreenCanvas} canvas
2062
+ * @returns {Uint8Array|null}
2063
+ */
2064
+ function _renderMandelbrot(gl, canvas) {
2065
+ const W = canvas.width;
2066
+ const H = canvas.height;
2067
+
2068
+ // Vertex shader – full-screen quad
2069
+ const vsSource = `
2070
+ attribute vec4 a_pos;
2071
+ void main() { gl_Position = a_pos; }
2072
+ `;
2073
+
2074
+ // Fragment shader – Mandelbrot with smooth colouring
2075
+ // Floating-point precision in the escape-radius and log() calls differs
2076
+ // between GPU vendors / drivers, producing per-device pixel signatures.
2077
+ const fsSource = `
2078
+ precision highp float;
2079
+ uniform vec2 u_res;
2080
+ void main() {
2081
+ vec2 uv = (gl_FragCoord.xy / u_res - 0.5) * 3.5;
2082
+ uv.x -= 0.5;
2083
+ vec2 c = uv;
2084
+ vec2 z = vec2(0.0);
2085
+ float n = 0.0;
2086
+ for (int i = 0; i < 100; i++) {
2087
+ if (dot(z, z) > 4.0) break;
2088
+ z = vec2(z.x*z.x - z.y*z.y, 2.0*z.x*z.y) + c;
2089
+ n += 1.0;
2090
+ }
2091
+ float smooth_n = n - log2(log2(dot(z,z))) + 4.0;
2092
+ float t = smooth_n / 100.0;
2093
+ gl_FragColor = vec4(0.5 + 0.5*cos(6.28318*t + vec3(0.0, 0.4, 0.7)), 1.0);
2094
+ }
2095
+ `;
2096
+
2097
+ const vs = _compileShader(gl, gl.VERTEX_SHADER, vsSource);
2098
+ const fs = _compileShader(gl, gl.FRAGMENT_SHADER, fsSource);
2099
+ if (!vs || !fs) return null;
2100
+
2101
+ const prog = gl.createProgram();
2102
+ gl.attachShader(prog, vs);
2103
+ gl.attachShader(prog, fs);
2104
+ gl.linkProgram(prog);
2105
+ if (!gl.getProgramParameter(prog, gl.LINK_STATUS)) return null;
2106
+
2107
+ gl.useProgram(prog);
2108
+
2109
+ // Full-screen quad
2110
+ const buf = gl.createBuffer();
2111
+ gl.bindBuffer(gl.ARRAY_BUFFER, buf);
2112
+ gl.bufferData(gl.ARRAY_BUFFER,
2113
+ new Float32Array([-1,-1, 1,-1, -1,1, 1,1]), gl.STATIC_DRAW);
2114
+ const loc = gl.getAttribLocation(prog, 'a_pos');
2115
+ gl.enableVertexAttribArray(loc);
2116
+ gl.vertexAttribPointer(loc, 2, gl.FLOAT, false, 0, 0);
2117
+
2118
+ const resLoc = gl.getUniformLocation(prog, 'u_res');
2119
+ gl.uniform2f(resLoc, W, H);
2120
+
2121
+ gl.viewport(0, 0, W, H);
2122
+ gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
2123
+
2124
+ // Read back a 64×64 centre crop (reduces data without losing discriminating power)
2125
+ const x0 = Math.floor((W - 64) / 2);
2126
+ const y0 = Math.floor((H - 64) / 2);
2127
+ const pixels = new Uint8Array(64 * 64 * 4);
2128
+ gl.readPixels(x0, y0, 64, 64, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
2129
+
2130
+ return pixels;
2131
+ }
2132
+
2133
+ function _compileShader(gl, type, source) {
2134
+ const s = gl.createShader(type);
2135
+ gl.shaderSource(s, source);
2136
+ gl.compileShader(s);
2137
+ return gl.getShaderParameter(s, gl.COMPILE_STATUS) ? s : null;
2138
+ }
2139
+
2140
+ /**
2141
+ * @sovereign/pulse — AudioContext Oscillator Jitter
2142
+ *
2143
+ * Measures the scheduling jitter of the browser's audio pipeline.
2144
+ * Real audio hardware callbacks are driven by a hardware interrupt (IRQ)
2145
+ * from the sound card; the timing reflects the actual interrupt latency
2146
+ * of the physical device. VM audio drivers (if present at all) are
2147
+ * emulated and show either unrealistically low jitter or burst-mode
2148
+ * scheduling artefacts that are statistically distinguishable.
2149
+ */
2150
+
2151
+ /**
2152
+ * @param {object} [opts]
2153
+ * @param {number} [opts.durationMs=2000] - how long to collect audio callbacks
2154
+ * @param {number} [opts.bufferSize=256] - ScriptProcessorNode buffer size
2155
+ * @returns {Promise<AudioJitter>}
2156
+ */
2157
+ async function collectAudioJitter(opts = {}) {
2158
+ const { durationMs = 2000, bufferSize = 256 } = opts;
2159
+
2160
+ const base = {
2161
+ available: false,
2162
+ workletAvailable: false,
2163
+ callbackJitterCV: 0,
2164
+ noiseFloorMean: 0,
2165
+ sampleRate: 0,
2166
+ callbackCount: 0,
2167
+ jitterTimings: [],
2168
+ };
2169
+
2170
+ if (typeof AudioContext === 'undefined' && typeof webkitAudioContext === 'undefined') {
2171
+ return base; // Node.js / server environment
2172
+ }
2173
+
2174
+ let ctx;
2175
+ try {
2176
+ ctx = new (window.AudioContext || window.webkitAudioContext)();
2177
+ } catch (_) {
2178
+ return base;
2179
+ }
2180
+
2181
+ // Some browsers require a user gesture before AudioContext can run.
2182
+ if (ctx.state === 'suspended') {
2183
+ try {
2184
+ await ctx.resume();
2185
+ } catch (_) {
2186
+ await ctx.close().catch(() => {});
2187
+ return base;
2188
+ }
2189
+ }
2190
+
2191
+ const sampleRate = ctx.sampleRate;
2192
+ const expectedInterval = (bufferSize / sampleRate) * 1000; // ms per callback
2193
+
2194
+ const jitterTimings = []; // absolute AudioContext.currentTime at each callback
2195
+ const callbackDeltas = [];
2196
+
2197
+ await new Promise((resolve) => {
2198
+ // ── AudioWorklet (preferred — runs on dedicated real-time thread) ──────
2199
+ const useWorklet = typeof AudioWorkletNode !== 'undefined';
2200
+ base.workletAvailable = useWorklet;
2201
+
2202
+ if (useWorklet) {
2203
+ // Inline worklet: send currentTime back via MessagePort every buffer
2204
+ const workletCode = `
2205
+ class PulseProbe extends AudioWorkletProcessor {
2206
+ process(inputs, outputs) {
2207
+ this.port.postMessage({ t: currentTime });
2208
+ // Pass-through silence
2209
+ for (const out of outputs)
2210
+ for (const ch of out) ch.fill(0);
2211
+ return true;
2212
+ }
2213
+ }
2214
+ registerProcessor('pulse-probe', PulseProbe);
2215
+ `;
2216
+ const blob = new Blob([workletCode], { type: 'application/javascript' });
2217
+ const blobUrl = URL.createObjectURL(blob);
2218
+
2219
+ ctx.audioWorklet.addModule(blobUrl).then(() => {
2220
+ const node = new AudioWorkletNode(ctx, 'pulse-probe');
2221
+ node.port.onmessage = (e) => {
2222
+ jitterTimings.push(e.data.t * 1000); // convert to ms
2223
+ };
2224
+ node.connect(ctx.destination);
2225
+
2226
+ setTimeout(async () => {
2227
+ node.disconnect();
2228
+ URL.revokeObjectURL(blobUrl);
2229
+ resolve(node);
2230
+ }, durationMs);
2231
+ }).catch(() => {
2232
+ URL.revokeObjectURL(blobUrl);
2233
+ _fallbackScriptProcessor(ctx, bufferSize, durationMs, jitterTimings, resolve);
2234
+ });
2235
+
2236
+ } else {
2237
+ _fallbackScriptProcessor(ctx, bufferSize, durationMs, jitterTimings, resolve);
2238
+ }
2239
+ });
2240
+
2241
+ // ── Compute deltas between successive callback times ────────────────────
2242
+ for (let i = 1; i < jitterTimings.length; i++) {
2243
+ callbackDeltas.push(jitterTimings[i] - jitterTimings[i - 1]);
2244
+ }
2245
+
2246
+ // ── Noise floor via AnalyserNode ─────────────────────────────────────────
2247
+ // Feed a silent oscillator through an analyser; the FFT magnitude at silence
2248
+ // reveals the hardware's thermal noise floor (varies per ADC/DAC chipset).
2249
+ const noiseFloor = await _measureNoiseFloor(ctx);
2250
+
2251
+ await ctx.close().catch(() => {});
2252
+
2253
+ // ── Statistics ────────────────────────────────────────────────────────────
2254
+ const mean = callbackDeltas.length
2255
+ ? callbackDeltas.reduce((s, v) => s + v, 0) / callbackDeltas.length
2256
+ : 0;
2257
+ const variance = callbackDeltas.length > 1
2258
+ ? callbackDeltas.reduce((s, v) => s + (v - mean) ** 2, 0) / (callbackDeltas.length - 1)
2259
+ : 0;
2260
+ const jitterCV = mean > 0 ? Math.sqrt(variance) / mean : 0;
2261
+
2262
+ return {
2263
+ available: true,
2264
+ workletAvailable: base.workletAvailable,
2265
+ callbackJitterCV: jitterCV,
2266
+ noiseFloorMean: noiseFloor.mean,
2267
+ noiseFloorStd: noiseFloor.std,
2268
+ sampleRate,
2269
+ callbackCount: jitterTimings.length,
2270
+ expectedIntervalMs: expectedInterval,
2271
+ // Only include summary stats, not raw timings (privacy / size)
2272
+ jitterMeanMs: mean,
2273
+ jitterP95Ms: _percentile(callbackDeltas, 95),
2274
+ };
2275
+ }
2276
+
2277
+ /**
2278
+ * @typedef {object} AudioJitter
2279
+ * @property {boolean} available
2280
+ * @property {boolean} workletAvailable
2281
+ * @property {number} callbackJitterCV
2282
+ * @property {number} noiseFloorMean
2283
+ * @property {number} sampleRate
2284
+ * @property {number} callbackCount
2285
+ */
2286
+
2287
+ // ---------------------------------------------------------------------------
2288
+ // Internal helpers
2289
+ // ---------------------------------------------------------------------------
2290
+
2291
+ function _fallbackScriptProcessor(ctx, bufferSize, durationMs, jitterTimings, resolve) {
2292
+ // ScriptProcessorNode is deprecated but universally supported.
2293
+ const proc = ctx.createScriptProcessor(bufferSize, 1, 1);
2294
+ proc.onaudioprocess = () => {
2295
+ jitterTimings.push(ctx.currentTime * 1000);
2296
+ };
2297
+ // Connect to keep the graph alive
2298
+ const osc = ctx.createOscillator();
2299
+ osc.frequency.value = 1; // sub-audible
2300
+ osc.connect(proc);
2301
+ proc.connect(ctx.destination);
2302
+ osc.start();
2303
+
2304
+ setTimeout(() => {
2305
+ osc.stop();
2306
+ osc.disconnect();
2307
+ proc.disconnect();
2308
+ resolve(proc);
2309
+ }, durationMs);
2310
+ }
2311
+
2312
+ async function _measureNoiseFloor(ctx) {
2313
+ try {
2314
+ const analyser = ctx.createAnalyser();
2315
+ analyser.fftSize = 256;
2316
+ analyser.connect(ctx.destination);
2317
+
2318
+ // Silent source
2319
+ const buf = ctx.createBuffer(1, ctx.sampleRate * 0.1, ctx.sampleRate);
2320
+ const src = ctx.createBufferSource();
2321
+ src.buffer = buf;
2322
+ src.connect(analyser);
2323
+ src.start();
2324
+
2325
+ await new Promise(r => setTimeout(r, 150));
2326
+
2327
+ const data = new Float32Array(analyser.frequencyBinCount);
2328
+ analyser.getFloatFrequencyData(data);
2329
+ analyser.disconnect();
2330
+
2331
+ // Limit to 32 bins to keep the payload small
2332
+ const trimmed = Array.from(data.slice(0, 32)).map(v =>
2333
+ isFinite(v) ? Math.pow(10, v / 20) : 0 // dB → linear
2334
+ );
2335
+ const mean = trimmed.reduce((s, v) => s + v, 0) / trimmed.length;
2336
+ const std = Math.sqrt(
2337
+ trimmed.reduce((s, v) => s + (v - mean) ** 2, 0) / trimmed.length
2338
+ );
2339
+ return { mean, std };
2340
+ } catch (_) {
2341
+ return { mean: 0, std: 0 };
2342
+ }
2343
+ }
2344
+
2345
+ function _percentile(arr, p) {
2346
+ if (!arr.length) return 0;
2347
+ const sorted = [...arr].sort((a, b) => a - b);
2348
+ const idx = (p / 100) * (sorted.length - 1);
2349
+ const lo = Math.floor(idx);
2350
+ const hi = Math.ceil(idx);
2351
+ return sorted[lo] + (sorted[hi] - sorted[lo]) * (idx - lo);
2352
+ }
2353
+
2354
+ /**
2355
+ * @sovereign/pulse — Cross-Metric Heuristic Engine
2356
+ *
2357
+ * Instead of checking individual thresholds in isolation, this module looks
2358
+ * at the *relationships* between metrics. A sophisticated adversary can spoof
2359
+ * any single number. Spoofing six metrics so they remain mutually consistent
2360
+ * with physical laws is exponentially harder.
2361
+ *
2362
+ * Three core insights drive this engine:
2363
+ *
2364
+ * 1. Entropy-Jitter Coherence
2365
+ * Real silicon gets noisier as it heats up. Under sustained load, the
2366
+ * Quantization Entropy of the timing distribution grows because thermal
2367
+ * fluctuations add variance. A VM's hypervisor clock doesn't care about
2368
+ * guest temperature — its entropy is flat across all load phases.
2369
+ *
2370
+ * 2. Hurst-Autocorrelation Coherence
2371
+ * Genuine Brownian noise has Hurst ≈ 0.5 and near-zero autocorrelation
2372
+ * at all lags. These two values are physically linked. If they diverge —
2373
+ * high autocorrelation but Hurst near 0.5, or vice versa — the timings
2374
+ * were generated, not measured.
2375
+ *
2376
+ * 3. CV-Entropy Coherence
2377
+ * High variance (CV) must come from somewhere. On real hardware, high CV
2378
+ * means the timing distribution is spread out, which also means high
2379
+ * entropy. A VM that inflates CV without inflating entropy (e.g. by
2380
+ * adding synthetic outliers at fixed offsets) produces a coherence gap.
2381
+ */
2382
+
2383
+
2384
+ // ---------------------------------------------------------------------------
2385
+ // runHeuristicEngine
2386
+ // ---------------------------------------------------------------------------
2387
+
2388
+ /**
2389
+ * @param {object} p
2390
+ * @param {import('./jitter.js').JitterAnalysis} p.jitter
2391
+ * @param {object|null} p.phases - from entropy collector
2392
+ * @param {object} p.autocorrelations
2393
+ * @returns {HeuristicReport}
2394
+ */
2395
+ function runHeuristicEngine({ jitter, phases, autocorrelations }) {
2396
+ const findings = [];
2397
+ const bonuses = [];
2398
+ let penalty = 0; // accumulated penalty [0, 1]
2399
+ let bonus = 0; // accumulated bonus [0, 1]
2400
+ let hardOverride = null; // 'vm' | null — bypasses score entirely when set
2401
+
2402
+ const stats = jitter.stats;
2403
+ if (!stats) return _empty$1();
2404
+
2405
+ // ── 1. Entropy-Jitter Ratio (phases required) ────────────────────────────
2406
+ let entropyJitterRatio = null;
2407
+ let entropyJitterScore = 0.5; // neutral if no phased data
2408
+
2409
+ if (phases) {
2410
+ entropyJitterRatio = phases.entropyJitterRatio;
2411
+
2412
+ const coldQE = phases.cold?.qe ?? null;
2413
+ const hotQE = phases.hot?.qe ?? null;
2414
+
2415
+ // ── HARD KILL: Phase trajectory mathematical contradiction ──────────────
2416
+ //
2417
+ // EJR is defined as: entropyJitterRatio = hot_QE / cold_QE
2418
+ //
2419
+ // Before trusting any EJR value, verify it is internally consistent with
2420
+ // the QE measurements it purports to summarise. Two forgery vectors exist:
2421
+ //
2422
+ // Attack A — EJR field overwritten independently:
2423
+ // Attacker sets entropyJitterRatio = 1.15 to claim thermal growth,
2424
+ // but leaves cold_QE = 3.50, hot_QE = 3.00 unchanged.
2425
+ // Computed EJR = 3.00 / 3.50 = 0.857.
2426
+ // Discrepancy 1.15 − 0.857 = 0.293 >> 0.005 tolerance → HARD KILL.
2427
+ //
2428
+ // Attack B — QE values also faked but left inconsistent:
2429
+ // Attacker overwrites both QE fields carelessly: cold_QE = 3.5,
2430
+ // hot_QE = 3.0, but EJR = 1.15 is still written.
2431
+ // cold_QE ≥ hot_QE with EJR ≥ 1.08 is a mathematical impossibility —
2432
+ // if hot ≤ cold then hot/cold ≤ 1.0, which can never be ≥ 1.08.
2433
+ //
2434
+ // Tolerance of 0.005 accounts for floating-point rounding in the entropy
2435
+ // collector (detectQuantizationEntropy uses discrete histogram bins).
2436
+ //
2437
+ // When HARD KILL fires:
2438
+ // • hardOverride = 'vm' — fingerprint.js short-circuits isSynthetic
2439
+ // • entropyJitterScore = 0.0 — no EJR contribution to stage-2 bonus
2440
+ // • penalty += 1.0 — overwhelms the physical floor cap
2441
+ // • No further EJR evaluation runs (the data cannot be trusted)
2442
+ // • The physical floor protection is explicitly bypassed (see aggregate)
2443
+
2444
+ if (coldQE !== null && hotQE !== null) {
2445
+ const computedEJR = coldQE > 0 ? hotQE / coldQE : null;
2446
+ const fieldTampered = computedEJR !== null &&
2447
+ Math.abs(entropyJitterRatio - computedEJR) > 0.005;
2448
+ const qeContradicts = entropyJitterRatio >= 1.08 && coldQE >= hotQE;
2449
+
2450
+ if (fieldTampered || qeContradicts) {
2451
+ hardOverride = 'vm';
2452
+ entropyJitterScore = 0.0;
2453
+ findings.push({
2454
+ id: 'EJR_PHASE_HARD_KILL',
2455
+ label: fieldTampered
2456
+ ? 'HARD KILL: stored EJR is inconsistent with cold/hot QE values — phase data tampered'
2457
+ : 'HARD KILL: EJR ≥ 1.08 claims entropy growth but cold_QE ≥ hot_QE — physically impossible',
2458
+ detail: `ejr_stored=${entropyJitterRatio.toFixed(4)} ` +
2459
+ `ejr_computed=${computedEJR?.toFixed(4) ?? 'n/a'} ` +
2460
+ `cold_QE=${coldQE.toFixed(4)} hot_QE=${hotQE.toFixed(4)} ` +
2461
+ `delta=${computedEJR != null ? Math.abs(entropyJitterRatio - computedEJR).toFixed(4) : 'n/a'}`,
2462
+ severity: 'critical',
2463
+ penalty: 1.0,
2464
+ });
2465
+ penalty += 1.0;
2466
+
2467
+ } else {
2468
+ // QE values confirmed consistent — proceed with normal EJR evaluation.
2469
+ _evaluateEJR(entropyJitterRatio, coldQE, hotQE, findings, bonuses,
2470
+ (p) => { penalty += p; }, (b) => { bonus += b; },
2471
+ (s) => { entropyJitterScore = s; });
2472
+ }
2473
+
2474
+ } else {
2475
+ // QE values unavailable — evaluate EJR ratio in isolation.
2476
+ _evaluateEJR(entropyJitterRatio, null, null, findings, bonuses,
2477
+ (p) => { penalty += p; }, (b) => { bonus += b; },
2478
+ (s) => { entropyJitterScore = s; });
2479
+ }
2480
+
2481
+ // Phase mean drift: real CPU heats up → iterations get slower.
2482
+ // Only apply if hard kill wasn't already triggered.
2483
+ if (!hardOverride) {
2484
+ const coldToHotDrift = (phases.hot?.mean ?? 0) - (phases.cold?.mean ?? 0);
2485
+ if (coldToHotDrift > 0.05) {
2486
+ bonuses.push({
2487
+ id: 'THERMAL_DRIFT_CONFIRMED',
2488
+ label: 'CPU mean timing increased from cold to hot phase (thermal drift)',
2489
+ detail: `cold=${phases.cold.mean.toFixed(3)}ms hot=${phases.hot.mean.toFixed(3)}ms Δ=${coldToHotDrift.toFixed(3)}ms`,
2490
+ value: 0.08,
2491
+ });
2492
+ bonus += 0.08;
2493
+ }
2494
+ }
2495
+ }
2496
+
2497
+ // ── 2. Hurst-Autocorrelation Coherence ───────────────────────────────────
2498
+ const h = jitter.hurstExponent ?? 0.5;
2499
+ const ac1 = Math.abs(autocorrelations?.lag1 ?? 0);
2500
+ const ac5 = Math.abs(autocorrelations?.lag5 ?? 0);
2501
+ Math.abs(autocorrelations?.lag50 ?? 0);
2502
+
2503
+ // Physical law: Brownian noise (H≈0.5) must have low autocorrelation.
2504
+ // Divergence between these two means the data wasn't generated by physics.
2505
+ const hurstExpectedAC = Math.abs(2 * h - 1); // theoretical max |autocorr| for given H
2506
+ const actualAC = (ac1 + ac5) / 2;
2507
+ const acHurstDivergence = Math.abs(actualAC - hurstExpectedAC);
2508
+
2509
+ if (acHurstDivergence > 0.35) {
2510
+ findings.push({
2511
+ id: 'HURST_AUTOCORR_INCOHERENT',
2512
+ label: 'Hurst exponent and autocorrelation are physically inconsistent',
2513
+ detail: `H=${h.toFixed(3)} expected_AC≈${hurstExpectedAC.toFixed(3)} actual_AC=${actualAC.toFixed(3)} divergence=${acHurstDivergence.toFixed(3)}`,
2514
+ severity: 'high',
2515
+ penalty: 0.12,
2516
+ });
2517
+ penalty += 0.12;
2518
+ } else if (h > 0.45 && h < 0.55 && ac1 < 0.15) {
2519
+ // Ideal Brownian + low autocorr — physically coherent
2520
+ bonuses.push({
2521
+ id: 'BROWNIAN_COHERENCE_CONFIRMED',
2522
+ label: 'Hurst ≈ 0.5 and autocorrelation near zero — genuine Brownian noise',
2523
+ detail: `H=${h.toFixed(3)} lag1_AC=${ac1.toFixed(3)}`,
2524
+ value: 0.10,
2525
+ });
2526
+ bonus += 0.10;
2527
+ }
2528
+
2529
+ // ── 3. CV-Entropy Coherence ───────────────────────────────────────────────
2530
+ // High CV should correlate with high QE. If CV is high but QE is low,
2531
+ // the variance was added artificially (fixed-offset outliers, synthetic spikes).
2532
+ const cv = stats.cv;
2533
+ const qe = jitter.quantizationEntropy;
2534
+
2535
+ // Expected QE given CV, assuming roughly normal distribution
2536
+ // Normal dist with σ/μ = CV: entropy ≈ log2(σ * sqrt(2πe)) + log2(n/binWidth)
2537
+ // We use a simplified linear proxy calibrated against real benchmarks.
2538
+ const expectedQE = Math.max(0, 1.5 + cv * 16); // empirical: CV=0.15 → QE≈3.9
2539
+ const qeDivergence = expectedQE - qe; // positive = QE lower than expected
2540
+
2541
+ if (qeDivergence > 1.8 && cv > 0.05) {
2542
+ // High variance but low entropy: synthetic outliers at fixed offsets
2543
+ findings.push({
2544
+ id: 'CV_ENTROPY_INCOHERENT',
2545
+ label: 'High CV but low entropy — variance appears synthetic (fixed-offset outliers)',
2546
+ detail: `CV=${cv.toFixed(4)} QE=${qe.toFixed(3)} bits expected_QE≈${expectedQE.toFixed(3)} gap=${qeDivergence.toFixed(3)}`,
2547
+ severity: 'high',
2548
+ penalty: 0.10,
2549
+ });
2550
+ penalty += 0.10;
2551
+ } else if (qeDivergence < 0.5 && cv > 0.08) {
2552
+ // CV and QE are coherent — timings come from a real distribution
2553
+ bonuses.push({
2554
+ id: 'CV_ENTROPY_COHERENT',
2555
+ label: 'Variance and entropy are physically coherent',
2556
+ detail: `CV=${cv.toFixed(4)} QE=${qe.toFixed(3)} expected≈${expectedQE.toFixed(3)}`,
2557
+ value: 0.06,
2558
+ });
2559
+ bonus += 0.06;
2560
+ }
2561
+
2562
+ // ── 4. Steal-time periodicity (the "Picket Fence" detector) ─────────────
2563
+ // VM steal-time bursts create a periodic signal in the autocorrelation.
2564
+ // If lag-50 autocorrelation is significantly higher than lag-5,
2565
+ // the scheduler quantum is approximately 50× the mean iteration time.
2566
+ const picketFence = _detectPicketFence(autocorrelations);
2567
+ if (picketFence.detected) {
2568
+ findings.push({
2569
+ id: 'PICKET_FENCE_DETECTED',
2570
+ label: `"Picket Fence" steal-time rhythm detected at lag ${picketFence.dominantLag}`,
2571
+ detail: picketFence.detail,
2572
+ severity: 'high',
2573
+ penalty: 0.08,
2574
+ });
2575
+ penalty += 0.08;
2576
+ }
2577
+
2578
+ // ── 5. Skewness-Kurtosis coherence ───────────────────────────────────────
2579
+ // Real hardware timing is right-skewed (occasional slow outliers from OS preemption).
2580
+ // VMs that add synthetic outliers at fixed offsets produce wrong skew/kurtosis.
2581
+ const skew = stats.skewness ?? 0;
2582
+ const kurt = stats.kurtosis ?? 0;
2583
+
2584
+ if (skew > 0.3 && kurt > 0) {
2585
+ // Right-skewed, leptokurtic — consistent with OS preemption on real hardware
2586
+ bonuses.push({
2587
+ id: 'NATURAL_SKEW_CONFIRMED',
2588
+ label: 'Right-skewed distribution with positive kurtosis — OS preemption pattern',
2589
+ detail: `skew=${skew.toFixed(3)} kurtosis=${kurt.toFixed(3)}`,
2590
+ value: 0.06,
2591
+ });
2592
+ bonus += 0.06;
2593
+ } else if (skew < 0 && Math.abs(kurt) > 1) {
2594
+ // Negative skew with high kurtosis: inconsistent with physical timing noise
2595
+ findings.push({
2596
+ id: 'SKEW_KURTOSIS_ANOMALY',
2597
+ label: 'Left-skewed distribution — inconsistent with natural hardware timing',
2598
+ detail: `skew=${skew.toFixed(3)} kurtosis=${kurt.toFixed(3)}`,
2599
+ severity: 'medium',
2600
+ penalty: 0.06,
2601
+ });
2602
+ penalty += 0.06;
2603
+ }
2604
+
2605
+ // ── Physical floor protection (anti-compounding) ──────────────────────────
2606
+ // When the three PRIMARY timing metrics are clearly consistent with real
2607
+ // silicon, cap the penalty so that marginal secondary signals (weak Picket
2608
+ // Fence, mild EJR, slight skew anomaly) cannot compound into a rejection.
2609
+ //
2610
+ // Why: a modern i7 laptop running heavy browser extensions may show:
2611
+ // EJR = 1.01 → -0.10 penalty (just under the 1.02 threshold)
2612
+ // lag50 = 0.31 → picket fence → -0.08 penalty (background process rhythm)
2613
+ // slight negative skew → -0.06 penalty
2614
+ // total: -0.24, drops score from 0.73 → 0.49 → wrongly flagged as synthetic
2615
+ //
2616
+ // Solution: if ≥ 2 of the 3 primary metrics are unambiguously physical,
2617
+ // treat the device as "probably physical with some noise" and limit the
2618
+ // penalty to 0.22 (enough to lower confidence but not enough to reject).
2619
+ const clearQE = jitter.quantizationEntropy > 3.2;
2620
+ const clearCV = stats.cv >= 0.05 && stats.cv <= 0.30;
2621
+ const clearLag1 = Math.abs(autocorrelations?.lag1 ?? 1) < 0.22;
2622
+ const clearPhysicalCount = [clearQE, clearCV, clearLag1].filter(Boolean).length;
2623
+
2624
+ // Also check: if at least one metric is a HARD VM indicator (QE < 2.0 or
2625
+ // lag1 > 0.65), override the floor — the floor is for borderline noise, not
2626
+ // for devices that are clearly VMs on at least one axis.
2627
+ const hardVmSignal =
2628
+ jitter.quantizationEntropy < 2.0 ||
2629
+ Math.abs(autocorrelations?.lag1 ?? 0) > 0.65;
2630
+
2631
+ const penaltyCap = (!hardVmSignal && clearPhysicalCount >= 2)
2632
+ ? 0.22 // physical floor: cap compounding for clearly physical devices
2633
+ : 0.60; // default: full penalty range for ambiguous or VM-like signals
2634
+
2635
+ // HARD KILL overrides the physical floor protection entirely.
2636
+ // The floor was designed to protect legitimate hardware with multiple
2637
+ // marginal-but-honest signals — it must never shelter a forged proof.
2638
+ const totalPenalty = hardOverride === 'vm'
2639
+ ? Math.min(1.0, penalty) // hard kill: uncapped, overwhelms all bonuses
2640
+ : Math.min(penaltyCap, penalty); // normal: apply floor protection
2641
+
2642
+ // When a hard kill is active, strip all bonuses — they were earned on
2643
+ // data that has been proved untrustworthy.
2644
+ const totalBonus = hardOverride === 'vm' ? 0 : Math.min(0.35, bonus);
2645
+
2646
+ return {
2647
+ penalty: totalPenalty,
2648
+ bonus: totalBonus,
2649
+ netAdjustment: totalBonus - totalPenalty,
2650
+ findings,
2651
+ bonuses: hardOverride === 'vm' ? [] : bonuses,
2652
+ entropyJitterRatio,
2653
+ entropyJitterScore,
2654
+ picketFence,
2655
+ hardOverride,
2656
+ coherenceFlags: findings.map(f => f.id),
2657
+ };
2658
+ }
2659
+
2660
+ /**
2661
+ * @typedef {object} HeuristicReport
2662
+ * @property {number} penalty - total score penalty [0, 1.0]
2663
+ * @property {number} bonus - total score bonus [0, 0.35]
2664
+ * @property {number} netAdjustment - bonus - penalty
2665
+ * @property {object[]} findings - detected anomalies
2666
+ * @property {object[]} bonuses - confirmed physical properties
2667
+ * @property {number|null} entropyJitterRatio
2668
+ * @property {'vm'|null} hardOverride - set when a mathematical impossibility is detected
2669
+ * @property {object} picketFence
2670
+ * @property {string[]} coherenceFlags
2671
+ */
2672
+
2673
+ // ---------------------------------------------------------------------------
2674
+ // EJR evaluation helper (extracted so it can run with or without QE values)
2675
+ // ---------------------------------------------------------------------------
2676
+
2677
+ /**
2678
+ * Applies the normal EJR classification logic (called only after the hard-kill
2679
+ * check passes, meaning the EJR value has been verified as consistent).
2680
+ */
2681
+ function _evaluateEJR(ejr, coldQE, hotQE, findings, bonuses, addPenalty, addBonus, setScore) {
2682
+ const qeDetail = coldQE != null
2683
+ ? `cold_QE=${coldQE.toFixed(3)} hot_QE=${hotQE.toFixed(3)}`
2684
+ : '';
2685
+
2686
+ if (ejr >= 1.08) {
2687
+ setScore(1.0);
2688
+ bonuses.push({
2689
+ id: 'ENTROPY_GROWS_WITH_LOAD',
2690
+ label: 'Entropy grew under load — thermal feedback confirmed',
2691
+ detail: `ratio=${ejr.toFixed(3)} ${qeDetail}`,
2692
+ value: 0.12,
2693
+ });
2694
+ addBonus(0.12);
2695
+
2696
+ } else if (ejr >= 1.02) {
2697
+ setScore(0.7);
2698
+ findings.push({
2699
+ id: 'ENTROPY_MILD_GROWTH',
2700
+ label: 'Weak entropy growth under load',
2701
+ detail: `ratio=${ejr.toFixed(3)} ${qeDetail}`,
2702
+ severity: 'info',
2703
+ penalty: 0,
2704
+ });
2705
+
2706
+ } else if (ejr > 0.95) {
2707
+ // Flat entropy — hypervisor clock unresponsive to guest load
2708
+ setScore(0.2);
2709
+ findings.push({
2710
+ id: 'ENTROPY_FLAT_UNDER_LOAD',
2711
+ label: 'Entropy did not grow under load — hypervisor clock suspected',
2712
+ detail: `ratio=${ejr.toFixed(3)} (expected ≥ 1.08 for real hardware) ${qeDetail}`,
2713
+ severity: 'high',
2714
+ penalty: 0.10,
2715
+ });
2716
+ addPenalty(0.10);
2717
+
2718
+ } else {
2719
+ // Entropy DECREASED — hypervisor clock rounding became more aggressive
2720
+ setScore(0.0);
2721
+ findings.push({
2722
+ id: 'ENTROPY_DECREASES_UNDER_LOAD',
2723
+ label: 'Entropy shrank under load — hypervisor clock-rounding confirmed',
2724
+ detail: `ratio=${ejr.toFixed(3)} (clock rounding more aggressive at high load) ${qeDetail}`,
2725
+ severity: 'critical',
2726
+ penalty: 0.18,
2727
+ });
2728
+ addPenalty(0.18);
2729
+ }
2730
+ }
2731
+
2732
+ // ---------------------------------------------------------------------------
2733
+ // Picket Fence detector
2734
+ // ---------------------------------------------------------------------------
2735
+
2736
+ /**
2737
+ * Detects periodic steal-time bursts by finding the lag with the highest
2738
+ * autocorrelation beyond lag-5. A strong periodic peak indicates the
2739
+ * hypervisor is scheduling the guest on a fixed quantum.
2740
+ *
2741
+ * Named "Picket Fence" because of how the timing histogram looks: dense
2742
+ * clusters at fixed intervals with empty space between them — like fence posts.
2743
+ */
2744
+ function _detectPicketFence(autocorrelations) {
2745
+ const longLags = [10, 25, 50].map(l => ({
2746
+ lag: l,
2747
+ ac: Math.abs(autocorrelations?.[`lag${l}`] ?? 0),
2748
+ }));
2749
+
2750
+ const shortBaseline = (Math.abs(autocorrelations?.lag5 ?? 0) +
2751
+ Math.abs(autocorrelations?.lag3 ?? 0)) / 2;
2752
+
2753
+ const peak = longLags.reduce((best, cur) =>
2754
+ cur.ac > best.ac ? cur : best, { lag: 0, ac: 0 });
2755
+
2756
+ // "Picket fence" condition: a long-lag autocorr significantly exceeds baseline
2757
+ if (peak.ac > 0.30 && peak.ac > shortBaseline + 0.20) {
2758
+ return {
2759
+ detected: true,
2760
+ dominantLag: peak.lag,
2761
+ peakAC: peak.ac,
2762
+ baseline: shortBaseline,
2763
+ detail: `lag${peak.lag}_AC=${peak.ac.toFixed(3)} baseline_AC=${shortBaseline.toFixed(3)} ` +
2764
+ `estimated_quantum≈${(peak.lag * 5).toFixed(0)}ms (at 5ms/iter)`,
2765
+ };
2766
+ }
2767
+
2768
+ return { detected: false, dominantLag: null, peakAC: peak.ac, baseline: shortBaseline, detail: '' };
2769
+ }
2770
+
2771
+ function _empty$1() {
2772
+ return {
2773
+ penalty: 0, bonus: 0, netAdjustment: 0,
2774
+ findings: [], bonuses: [],
2775
+ entropyJitterRatio: null, entropyJitterScore: 0.5,
2776
+ hardOverride: null,
2777
+ picketFence: { detected: false },
2778
+ coherenceFlags: [],
2779
+ };
2780
+ }
2781
+
2782
+ /**
2783
+ * @sovereign/pulse — Zero-Latency Second-Stage Coherence Analysis
2784
+ *
2785
+ * Runs entirely on data already collected by the entropy probe, bio
2786
+ * collector, canvas fingerprinter, and audio analyser.
2787
+ * Adds approximately 1–3 ms of CPU time. Zero WASM, zero network.
2788
+ *
2789
+ * Architecture:
2790
+ * Stage 1 — classifyJitter() → rawScore [0, 1]
2791
+ * Stage 2 — runHeuristicEngine() → netAdjustment (physics coherence)
2792
+ * Stage 3 — runCoherenceAnalysis() → THIS MODULE
2793
+ * ↳ small score refinement [-0.15, +0.18]
2794
+ * ↳ dynamic threshold [0.55, 0.67]
2795
+ * ↳ hard override 'vm' | null
2796
+ *
2797
+ * Why a third stage?
2798
+ * Stage 1 checks individual metrics in isolation.
2799
+ * Stage 2 checks pairwise relationships between metrics.
2800
+ * Stage 3 checks STRUCTURAL properties of the entire time-series and
2801
+ * signal evolution that require the full dataset to evaluate — and that
2802
+ * a sophisticated attacker cannot spoof without also spoofing every
2803
+ * other correlated signal simultaneously.
2804
+ *
2805
+ * The six checks cover orthogonal signal dimensions so they are hard to
2806
+ * spoof together even if one is individually defeated:
2807
+ *
2808
+ * 1. Timing distinctness — frequency domain (quantization density)
2809
+ * 2. AC decay shape — temporal domain (Brownian vs harmonic)
2810
+ * 3. Chunk CV stability — stationarity axis (thermal non-stationarity)
2811
+ * 4. Level-dependent noise — noise model axis (multiplicative vs additive)
2812
+ * 5. Batch convergence — measurement stability (adaptive mode)
2813
+ * 6. Phase trajectory — EJR monotonicity (thermal sequence integrity)
2814
+ *
2815
+ * Dynamic threshold:
2816
+ * The evidence weight reflects how much data was actually collected.
2817
+ * An early-exit proof with 50 iterations and no bio activity has far less
2818
+ * support than a 200-iteration proof with bio, audio, and phased data.
2819
+ * The threshold rises automatically as evidence decreases:
2820
+ * Full evidence → threshold 0.55 (standard)
2821
+ * Minimal proof → threshold 0.67 (conservative gate)
2822
+ * This prevents low-evidence proofs from passing the same bar as full ones.
2823
+ */
2824
+
2825
+ // ---------------------------------------------------------------------------
2826
+ // runCoherenceAnalysis
2827
+ // ---------------------------------------------------------------------------
2828
+
2829
+ /**
2830
+ * @param {object} p
2831
+ * @param {number[]} p.timings — raw timing array (already collected)
2832
+ * @param {object} p.jitter — JitterAnalysis from classifyJitter()
2833
+ * @param {object|null} p.phases — phased entropy result (optional)
2834
+ * @param {object[]|null} p.batches — adaptive batch snapshots (optional)
2835
+ * @param {object} p.bio — bio snapshot
2836
+ * @param {object} p.canvas — canvas fingerprint
2837
+ * @param {object} p.audio — audio jitter result
2838
+ * @returns {CoherenceReport}
2839
+ */
2840
+ function runCoherenceAnalysis({ timings, jitter, phases, batches, bio, canvas, audio }) {
2841
+ if (!timings || timings.length < 10) {
2842
+ // Insufficient data — return a conservative threshold and no adjustments.
2843
+ return _empty(0.64);
2844
+ }
2845
+
2846
+ const checks = []; // anomalies found (each may carry a penalty)
2847
+ const bonuses = []; // physical properties confirmed (each carries a bonus)
2848
+ let penalty = 0;
2849
+ let bonus = 0;
2850
+ let hardOverride = null; // 'vm' | null
2851
+
2852
+ const n = timings.length;
2853
+ const ac = jitter.autocorrelations ?? {};
2854
+
2855
+ // ── Check 1: Timing Distinctness Ratio ──────────────────────────────────────
2856
+ // VM quantized timers repeat the same integer-millisecond values.
2857
+ // Real silicon at sub-ms resolution produces mostly unique values.
2858
+ //
2859
+ // Bin width: 0.2 ms (matches detectQuantizationEntropy for consistency).
2860
+ // Normalized by sample count → iteration-count-independent.
2861
+ //
2862
+ // distinctRatio > 0.65 → sub-ms resolution confirmed → physical bonus
2863
+ // distinctRatio < 0.30 → heavy quantization → VM penalty
2864
+ // distinctRatio < 0.45 at n ≥ 100 → mild VM penalty
2865
+ {
2866
+ const bins = new Set(timings.map(t => Math.round(t / 0.2)));
2867
+ const distinctRatio = bins.size / n;
2868
+
2869
+ if (n >= 50) {
2870
+ if (distinctRatio > 0.65) {
2871
+ bonuses.push({
2872
+ id: 'HIGH_TIMING_DISTINCTNESS',
2873
+ label: 'Timer produces mostly unique values — sub-ms resolution confirmed',
2874
+ detail: `ratio=${distinctRatio.toFixed(3)} (${bins.size}/${n} distinct 0.2ms bins)`,
2875
+ value: 0.06,
2876
+ });
2877
+ bonus += 0.06;
2878
+
2879
+ } else if (distinctRatio < 0.30) {
2880
+ // Severely quantized — VM with integer-ms timer emulation
2881
+ checks.push({
2882
+ id: 'LOW_TIMING_DISTINCTNESS',
2883
+ label: 'Heavy timer quantization — integer-ms VM timer suspected',
2884
+ detail: `ratio=${distinctRatio.toFixed(3)} (only ${bins.size}/${n} distinct 0.2ms bins)`,
2885
+ severity: 'high',
2886
+ penalty: 0.12,
2887
+ });
2888
+ penalty += 0.12;
2889
+
2890
+ } else if (distinctRatio < 0.45 && n >= 100) {
2891
+ // At 100+ iterations we expect more spread; below 0.45 is suspicious
2892
+ checks.push({
2893
+ id: 'BORDERLINE_TIMING_DISTINCTNESS',
2894
+ label: 'Below-expected timer resolution — coarse-grained timer suspected',
2895
+ detail: `ratio=${distinctRatio.toFixed(3)} at n=${n}`,
2896
+ severity: 'medium',
2897
+ penalty: 0.05,
2898
+ });
2899
+ penalty += 0.05;
2900
+ }
2901
+ }
2902
+ }
2903
+
2904
+ // ── Check 2: Autocorrelation Decay Shape ────────────────────────────────────
2905
+ // Genuine Brownian noise decays monotonically: lag1 > lag2 > lag5 > lag10 > …
2906
+ // VM scheduler rhythms create harmonic revivals: lag25 or lag50 elevated
2907
+ // above lag10 because steal-time bursts recur at the scheduler quantum period.
2908
+ //
2909
+ // This is structurally orthogonal to the Picket Fence detector (stage 2),
2910
+ // which checks absolute magnitude — this checks the SHAPE of the decay curve.
2911
+ {
2912
+ const l1 = Math.abs(ac.lag1 ?? 0);
2913
+ const l2 = Math.abs(ac.lag2 ?? 0);
2914
+ const l3 = Math.abs(ac.lag3 ?? 0);
2915
+ const l5 = Math.abs(ac.lag5 ?? 0);
2916
+ const l10 = Math.abs(ac.lag10 ?? 0);
2917
+ const l25 = Math.abs(ac.lag25 ?? 0);
2918
+ const l50 = Math.abs(ac.lag50 ?? 0);
2919
+
2920
+ // Strict Brownian decay: each successive lag is no higher than the previous
2921
+ // (+0.03 tolerance for estimation noise)
2922
+ const isBrownianDecay =
2923
+ l1 < 0.20 &&
2924
+ l2 <= l1 + 0.03 &&
2925
+ l5 <= l2 + 0.03 &&
2926
+ l10 <= l5 + 0.03 &&
2927
+ l25 <= l10 + 0.05 &&
2928
+ l50 <= l10 + 0.05;
2929
+
2930
+ // Harmonic revival: a long lag significantly exceeds medium lags
2931
+ // (scheduler quantum footprint)
2932
+ const revival25 = l25 > l5 + 0.12 && l25 > 0.18;
2933
+ const revival50 = l50 > l5 + 0.12 && l50 > 0.18;
2934
+
2935
+ if (isBrownianDecay && l1 < 0.15) {
2936
+ bonuses.push({
2937
+ id: 'BROWNIAN_DECAY_SHAPE',
2938
+ label: 'AC decays monotonically at all measured lags — genuine Brownian noise structure',
2939
+ detail: `lag1=${l1.toFixed(3)} lag3=${l3.toFixed(3)} lag5=${l5.toFixed(3)} lag10=${l10.toFixed(3)} lag50=${l50.toFixed(3)}`,
2940
+ value: 0.09,
2941
+ });
2942
+ bonus += 0.09;
2943
+ }
2944
+
2945
+ if (revival25 || revival50) {
2946
+ const peakLag = revival25 ? 25 : 50;
2947
+ const peakVal = revival25 ? l25 : l50;
2948
+ checks.push({
2949
+ id: 'HARMONIC_AUTOCORR_REVIVAL',
2950
+ label: `Long-lag AC revival at lag ${peakLag} — VM scheduler harmonic footprint`,
2951
+ detail: `lag5=${l5.toFixed(3)} lag${peakLag}=${peakVal.toFixed(3)} Δ=${(peakVal - l5).toFixed(3)}`,
2952
+ severity: 'high',
2953
+ penalty: 0.10,
2954
+ });
2955
+ penalty += 0.10;
2956
+ }
2957
+ }
2958
+
2959
+ // ── Check 3: Chunk CV Stability (temporal stationarity test) ─────────────────
2960
+ // Split the timing series into 4 equal windows and compute CV per window.
2961
+ // Real hardware: CV varies across chunks — CPU temperature changes, workload
2962
+ // varies, OS scheduling fluctuates — making the process non-stationary.
2963
+ // VM hypervisor: CV is nearly identical in every chunk because the hypervisor's
2964
+ // scheduling behaviour is constant — a stationary process.
2965
+ //
2966
+ // Metric: CV of the 4 chunk CVs (CV-of-CVs).
2967
+ // > 0.15 → non-stationary noise → physical bonus
2968
+ // < 0.06 → suspiciously constant → VM penalty
2969
+ if (n >= 40) {
2970
+ const chunkSize = Math.floor(n / 4);
2971
+ const chunkCVs = [];
2972
+
2973
+ for (let c = 0; c < 4; c++) {
2974
+ const chunk = timings.slice(c * chunkSize, (c + 1) * chunkSize);
2975
+ const m = chunk.reduce((a, b) => a + b, 0) / chunk.length;
2976
+ const s = Math.sqrt(chunk.reduce((acc, v) => acc + (v - m) ** 2, 0) / chunk.length);
2977
+ if (m > 0) chunkCVs.push(s / m);
2978
+ }
2979
+
2980
+ if (chunkCVs.length === 4) {
2981
+ const cvMean = chunkCVs.reduce((a, b) => a + b, 0) / 4;
2982
+ const cvStd = Math.sqrt(chunkCVs.reduce((s, v) => s + (v - cvMean) ** 2, 0) / 4);
2983
+ const cvOfCVs = cvMean > 1e-9 ? cvStd / cvMean : 0;
2984
+
2985
+ if (cvOfCVs > 0.15) {
2986
+ bonuses.push({
2987
+ id: 'TEMPORAL_NON_STATIONARITY',
2988
+ label: 'Noise level varies across time windows — thermal non-stationarity confirmed',
2989
+ detail: `CV-of-CVs=${cvOfCVs.toFixed(3)} windows=[${chunkCVs.map(v => v.toFixed(3)).join(', ')}]`,
2990
+ value: 0.07,
2991
+ });
2992
+ bonus += 0.07;
2993
+
2994
+ } else if (cvOfCVs < 0.06 && cvMean > 0.01) {
2995
+ checks.push({
2996
+ id: 'STATIONARY_NOISE_PROCESS',
2997
+ label: 'Noise level constant across all time windows — hypervisor stationarity suspected',
2998
+ detail: `CV-of-CVs=${cvOfCVs.toFixed(3)} windows=[${chunkCVs.map(v => v.toFixed(3)).join(', ')}]`,
2999
+ severity: 'high',
3000
+ penalty: 0.09,
3001
+ });
3002
+ penalty += 0.09;
3003
+ }
3004
+ }
3005
+ }
3006
+
3007
+ // ── Check 4: Level-Dependent Volatility (noise model test) ──────────────────
3008
+ // Thermal noise is multiplicative: the physical process that adds jitter
3009
+ // (electron thermal motion, gate capacitance variation) scales with the
3010
+ // operating conditions that also drive longer execution times.
3011
+ // Consequence: larger timing values tend to have more incremental variance.
3012
+ // This produces a positive Pearson correlation between:
3013
+ // — timing[i] (level — how long that iteration took)
3014
+ // — |timing[i+1]-timing[i]| (volatility — how much it changed)
3015
+ //
3016
+ // VM hypervisor noise is additive: a constant scheduling jitter is applied
3017
+ // regardless of the iteration's timing level → near-zero correlation.
3018
+ //
3019
+ // r > 0.15 → multiplicative noise → physical bonus
3020
+ // r < 0.04 at n ≥ 80 → additive noise → VM penalty
3021
+ if (n >= 30) {
3022
+ const levels = timings.slice(0, n - 1);
3023
+ const deltas = timings.slice(1).map((v, i) => Math.abs(v - timings[i]));
3024
+ const lMean = levels.reduce((a, b) => a + b, 0) / levels.length;
3025
+ const dMean = deltas.reduce((a, b) => a + b, 0) / deltas.length;
3026
+
3027
+ let cov = 0, lVar = 0, dVar = 0;
3028
+ for (let i = 0; i < levels.length; i++) {
3029
+ const ld = levels[i] - lMean;
3030
+ const dd = deltas[i] - dMean;
3031
+ cov += ld * dd;
3032
+ lVar += ld * ld;
3033
+ dVar += dd * dd;
3034
+ }
3035
+ const denom = Math.sqrt(lVar * dVar);
3036
+ const levelVolCorr = denom < 1e-14 ? 0 : cov / denom;
3037
+
3038
+ if (levelVolCorr > 0.15) {
3039
+ bonuses.push({
3040
+ id: 'MULTIPLICATIVE_NOISE_MODEL',
3041
+ label: 'Timing variance scales with level — multiplicative thermal noise confirmed',
3042
+ detail: `level-volatility r=${levelVolCorr.toFixed(3)} (expected >0.15 for real silicon)`,
3043
+ value: 0.07,
3044
+ });
3045
+ bonus += 0.07;
3046
+
3047
+ } else if (levelVolCorr < 0.04 && n >= 80) {
3048
+ // Enough samples to trust the estimate; near-zero = additive hypervisor noise
3049
+ checks.push({
3050
+ id: 'ADDITIVE_NOISE_MODEL',
3051
+ label: 'Timing variance independent of level — additive hypervisor noise suspected',
3052
+ detail: `level-volatility r=${levelVolCorr.toFixed(3)} (expected >0.15 for real silicon)`,
3053
+ severity: 'medium',
3054
+ penalty: 0.07,
3055
+ });
3056
+ penalty += 0.07;
3057
+ }
3058
+ }
3059
+
3060
+ // ── Check 5: Batch Convergence Variance (adaptive mode only) ─────────────────
3061
+ // In adaptive mode each batch of 25 iterations produces a vmConf estimate.
3062
+ // Real hardware: these estimates wander batch-to-batch because the underlying
3063
+ // physical source is genuinely stochastic.
3064
+ // VM hypervisor: estimates lock in immediately — deterministic scheduling means
3065
+ // each batch produces essentially the same picture.
3066
+ //
3067
+ // Most diagnostic in the ambiguous zone (vmConf 0.25–0.70) where stability
3068
+ // is suspicious. A clearly obvious VM (vmConf 0.90 every batch) is expected
3069
+ // to be stable. A borderline device (vmConf 0.45 across 6 identical batches)
3070
+ // is exhibiting VM-like stability despite claiming ambiguity.
3071
+ //
3072
+ // Uses only batches collected after iteration 75 to avoid early-sample noise.
3073
+ if (batches && batches.length >= 4) {
3074
+ const stableBatches = batches.filter(b => b.iterations >= 75);
3075
+
3076
+ if (stableBatches.length >= 3) {
3077
+ const vmConfs = stableBatches.map(b => b.vmConf);
3078
+ const hwConfs = stableBatches.map(b => b.hwConf);
3079
+ const vmMean = vmConfs.reduce((a, b) => a + b, 0) / vmConfs.length;
3080
+ const hwMean = hwConfs.reduce((a, b) => a + b, 0) / hwConfs.length;
3081
+ const vmStd = Math.sqrt(
3082
+ vmConfs.reduce((s, v) => s + (v - vmMean) ** 2, 0) / vmConfs.length
3083
+ );
3084
+
3085
+ // Only meaningful in the ambiguous zone
3086
+ const isAmbiguous = vmMean > 0.25 && vmMean < 0.70 && hwMean < 0.55;
3087
+
3088
+ if (isAmbiguous) {
3089
+ if (vmStd > 0.06) {
3090
+ bonuses.push({
3091
+ id: 'SIGNAL_FLUCTUATES_STOCHASTICALLY',
3092
+ label: 'Batch-by-batch signal variance confirms genuine stochastic noise source',
3093
+ detail: `vmConf σ=${vmStd.toFixed(3)} across ${stableBatches.length} stable batches (μ=${vmMean.toFixed(3)})`,
3094
+ value: 0.05,
3095
+ });
3096
+ bonus += 0.05;
3097
+
3098
+ } else if (vmStd < 0.025 && stableBatches.length >= 4) {
3099
+ checks.push({
3100
+ id: 'SIGNAL_DETERMINISTICALLY_STABLE',
3101
+ label: 'Signal locked-in immediately — deterministic hypervisor suspected',
3102
+ detail: `vmConf σ=${vmStd.toFixed(3)} across ${stableBatches.length} stable batches (μ=${vmMean.toFixed(3)})`,
3103
+ severity: 'medium',
3104
+ penalty: 0.06,
3105
+ });
3106
+ penalty += 0.06;
3107
+ }
3108
+ }
3109
+ }
3110
+ }
3111
+
3112
+ // ── Check 6: Phase Entropy Trajectory ────────────────────────────────────────
3113
+ // The EJR (hot_QE / cold_QE) captures the endpoint ratio, but misses the
3114
+ // intermediate trajectory. We additionally verify monotonic growth:
3115
+ // cold_QE < load_QE < hot_QE
3116
+ //
3117
+ // If all three phases are available, monotonic growth is a strong bonus.
3118
+ // Non-monotonic trajectory (entropy dropped then recovered) is suspicious.
3119
+ //
3120
+ // HARD OVERRIDE: if EJR ≥ 1.08 (claims entropy grew from cold to hot)
3121
+ // but cold_QE ≥ hot_QE (entropy provably didn't grow), the proof is
3122
+ // mathematically self-contradictory → forgery attempt.
3123
+ if (phases) {
3124
+ const coldQE = phases.cold?.qe ?? null;
3125
+ const loadQE = phases.load?.qe ?? null;
3126
+ const hotQE = phases.hot?.qe ?? null;
3127
+ const ejr = phases.entropyJitterRatio ?? null;
3128
+
3129
+ // Mathematical contradiction: EJR = hot_QE / cold_QE, so EJR ≥ 1.08
3130
+ // implies hot_QE ≥ 1.08 × cold_QE > cold_QE. Violation = tampered proof.
3131
+ if (ejr !== null && ejr >= 1.08 && coldQE !== null && hotQE !== null) {
3132
+ if (coldQE >= hotQE) {
3133
+ hardOverride = 'vm';
3134
+ checks.push({
3135
+ id: 'EJR_QE_CONTRADICTION',
3136
+ label: 'HARD OVERRIDE: EJR claims entropy growth but cold_QE ≥ hot_QE — mathematically impossible',
3137
+ detail: `ejr=${ejr.toFixed(4)} cold_QE=${coldQE.toFixed(3)} hot_QE=${hotQE.toFixed(3)} (ejr=hot/cold requires hot>cold)`,
3138
+ severity: 'critical',
3139
+ penalty: 0.60, // overwhelms any bonus
3140
+ });
3141
+ penalty += 0.60;
3142
+ }
3143
+ }
3144
+
3145
+ // Monotonic trajectory check (requires all three phases)
3146
+ if (!hardOverride && coldQE !== null && loadQE !== null && hotQE !== null) {
3147
+ if (coldQE < loadQE && loadQE < hotQE) {
3148
+ bonuses.push({
3149
+ id: 'MONOTONIC_ENTROPY_TRAJECTORY',
3150
+ label: 'QE increased continuously cold→load→hot — unbroken thermal feedback confirmed',
3151
+ detail: `${coldQE.toFixed(3)} → ${loadQE.toFixed(3)} → ${hotQE.toFixed(3)}`,
3152
+ value: 0.09,
3153
+ });
3154
+ bonus += 0.09;
3155
+
3156
+ } else if (coldQE >= loadQE || loadQE >= hotQE) {
3157
+ // Entropy stalled or reversed mid-run — unusual for real silicon
3158
+ checks.push({
3159
+ id: 'NON_MONOTONIC_ENTROPY_TRAJECTORY',
3160
+ label: 'Entropy did not increase monotonically across load phases',
3161
+ detail: `cold=${coldQE.toFixed(3)} load=${loadQE.toFixed(3)} hot=${hotQE.toFixed(3)}`,
3162
+ severity: 'medium',
3163
+ penalty: 0.06,
3164
+ });
3165
+ penalty += 0.06;
3166
+ }
3167
+ }
3168
+ }
3169
+
3170
+ // ── Dynamic threshold ─────────────────────────────────────────────────────────
3171
+ // A proof built from more evidence earns a more permissive (lower) passing bar.
3172
+ // Weights:
3173
+ // iterations (0→200): up to 0.65 of the evidence score
3174
+ // phased collection: +0.15 (gold standard of thermal measurement)
3175
+ // bio activity: +0.10 (human presence confirmed)
3176
+ // audio available: +0.05 (additional timing channel)
3177
+ // canvas available: +0.05 (hardware renderer identified)
3178
+ //
3179
+ // dynamicThreshold = 0.55 + (1 − evidenceWeight) × 0.12
3180
+ // Full evidence → 0.55 (standard gate)
3181
+ // Minimal proof → 0.67 (tightened gate for low-evidence submissions)
3182
+ const iterFraction = Math.min(1.0, n / 200);
3183
+ const phasedBonus = phases ? 0.15 : 0.0;
3184
+ const bioBonus = bio?.hasActivity ? 0.10 : 0.0;
3185
+ const audioBonus = audio?.available ? 0.05 : 0.0;
3186
+ const canvasBonus = canvas?.available ? 0.05 : 0.0;
3187
+
3188
+ const evidenceWeight = Math.min(1.0,
3189
+ iterFraction * 0.65 + phasedBonus + bioBonus + audioBonus + canvasBonus
3190
+ );
3191
+
3192
+ const dynamicThreshold = +(0.55 + (1 - evidenceWeight) * 0.12).toFixed(4);
3193
+
3194
+ // ── Stage-3 caps ─────────────────────────────────────────────────────────────
3195
+ // Stage 3 is a REFINEMENT, not the primary classifier. The caps are smaller
3196
+ // than stage 2 to prevent triple-compounding across all three stages on
3197
+ // legitimate hardware with multiple marginal-but-not-damning signals.
3198
+ const totalPenalty = Math.min(0.15, penalty); // hard floor: stage 3 can't reject alone
3199
+ const totalBonus = Math.min(0.18, bonus);
3200
+
3201
+ return {
3202
+ penalty: totalPenalty,
3203
+ bonus: totalBonus,
3204
+ netAdjustment: +(totalBonus - totalPenalty).toFixed(4),
3205
+ checks,
3206
+ bonuses,
3207
+ hardOverride, // 'vm' | null
3208
+ dynamicThreshold, // [0.55, 0.67]
3209
+ evidenceWeight: +evidenceWeight.toFixed(4),
3210
+ coherenceFlags: checks.map(c => c.id),
3211
+ physicalFlags: bonuses.map(b => b.id),
3212
+ };
3213
+ }
3214
+
3215
+ // ---------------------------------------------------------------------------
3216
+ // computeServerDynamicThreshold
3217
+ // ---------------------------------------------------------------------------
3218
+
3219
+ /**
3220
+ * Server-side recomputation of the dynamic threshold.
3221
+ * The server NEVER trusts the client's dynamicThreshold value; it recomputes
3222
+ * from known payload fields.
3223
+ *
3224
+ * @param {object} payload - validated ProofPayload
3225
+ * @returns {number} - minimum passing score for this proof [0.50, 0.62]
3226
+ */
3227
+ function computeServerDynamicThreshold(payload) {
3228
+ const entropy = payload?.signals?.entropy;
3229
+ const bio = payload?.signals?.bio;
3230
+ const audio = payload?.signals?.audio;
3231
+ const canvas = payload?.signals?.canvas;
3232
+
3233
+ const n = entropy?.iterations ?? 0;
3234
+ const hasPhases = payload?.heuristic?.entropyJitterRatio != null;
3235
+ const hasBio = bio?.hasActivity === true;
3236
+ const hasAudio = audio?.available === true;
3237
+ const hasCanvas = canvas?.available === true;
3238
+
3239
+ const iterFraction = Math.min(1.0, n / 200);
3240
+ const evidenceWeight = Math.min(1.0,
3241
+ iterFraction * 0.65 +
3242
+ (hasPhases ? 0.15 : 0) +
3243
+ (hasBio ? 0.10 : 0) +
3244
+ (hasAudio ? 0.05 : 0) +
3245
+ (hasCanvas ? 0.05 : 0)
3246
+ );
3247
+
3248
+ // Server threshold: [0.50, 0.62]
3249
+ // Slightly more lenient than client [0.55, 0.67] because the server already
3250
+ // applies minJitterScore as an independent check. The dynamic component
3251
+ // adds an ADDITIONAL evidence-proportional tightening on top.
3252
+ return +(0.50 + (1 - evidenceWeight) * 0.12).toFixed(4);
3253
+ }
3254
+
3255
+ // ---------------------------------------------------------------------------
3256
+ // Internal helpers
3257
+ // ---------------------------------------------------------------------------
3258
+
3259
+ function _empty(threshold) {
3260
+ return {
3261
+ penalty: 0, bonus: 0, netAdjustment: 0,
3262
+ checks: [], bonuses: [],
3263
+ hardOverride: null,
3264
+ dynamicThreshold: threshold,
3265
+ evidenceWeight: 0,
3266
+ coherenceFlags: [],
3267
+ physicalFlags: [],
3268
+ };
3269
+ }
3270
+
3271
+ /**
3272
+ * @typedef {object} CoherenceReport
3273
+ * @property {number} penalty - total score penalty [0, 0.15]
3274
+ * @property {number} bonus - total score bonus [0, 0.18]
3275
+ * @property {number} netAdjustment - bonus − penalty [-0.15, +0.18]
3276
+ * @property {object[]} checks - anomalies found (with penalty values)
3277
+ * @property {object[]} bonuses - physical properties confirmed
3278
+ * @property {'vm'|null} hardOverride - overrides score when set
3279
+ * @property {number} dynamicThreshold - computed passing threshold [0.55, 0.67]
3280
+ * @property {number} evidenceWeight - how much evidence was collected [0, 1]
3281
+ * @property {string[]} coherenceFlags - check IDs for logging
3282
+ * @property {string[]} physicalFlags - bonus IDs for logging
3283
+ */
3284
+
3285
+ /**
3286
+ * @sovereign/pulse — Hypervisor & Cloud Provider Fingerprinter
3287
+ *
3288
+ * Each hypervisor has a distinct "steal-time rhythm" — a characteristic
3289
+ * pattern in how it schedules guest vCPUs on host physical cores.
3290
+ * This creates detectable signatures in the timing autocorrelation profile.
3291
+ *
3292
+ * Think of it like a heartbeat EKG:
3293
+ * KVM → regular 50-iteration bursts (~250ms quantum at 5ms/iter)
3294
+ * Xen → longer 150-iteration bursts (~750ms credit scheduler quantum)
3295
+ * VMware → irregular bursts, memory balloon noise
3296
+ * Hyper-V → 78-iteration bursts (~390ms at 5ms/iter, 15.6ms quantum)
3297
+ * Nitro → almost none — SR-IOV passthrough is nearly invisible
3298
+ * Physical → no rhythm at all
3299
+ *
3300
+ * Canvas renderer strings give a second, independent signal that we cross-
3301
+ * reference to increase confidence in the provider classification.
3302
+ */
3303
+
3304
+ // ---------------------------------------------------------------------------
3305
+ // Provider profile database
3306
+ // ---------------------------------------------------------------------------
3307
+ // Each profile is calibrated from real benchmark data.
3308
+ // Fields: lag1_range, lag50_range, qe_range, cv_range, renderer_hints
3309
+
3310
+ const PROVIDER_PROFILES = [
3311
+ {
3312
+ id: 'physical',
3313
+ label: 'Physical Hardware',
3314
+ profile: 'analog-fog',
3315
+ confidence: 0, // set dynamically
3316
+ match: ({ lag1, lag50, qe, cv, entropyJitterRatio, isSoftwareRenderer }) =>
3317
+ !isSoftwareRenderer &&
3318
+ Math.abs(lag1) < 0.20 &&
3319
+ Math.abs(lag50) < 0.15 &&
3320
+ qe > 3.0 &&
3321
+ cv > 0.06 &&
3322
+ (entropyJitterRatio === null || entropyJitterRatio >= 1.02),
3323
+ },
3324
+ {
3325
+ id: 'kvm-generic',
3326
+ label: 'KVM Hypervisor (generic)',
3327
+ profile: 'picket-fence',
3328
+ match: ({ lag1, lag50, qe, cv }) =>
3329
+ lag1 > 0.40 && qe < 2.5 && cv < 0.15 && Math.abs(lag50) > 0.25,
3330
+ providerHints: ['digitalocean', 'linode', 'vultr', 'hetzner', 'ovh'],
3331
+ },
3332
+ {
3333
+ id: 'kvm-digitalocean',
3334
+ label: 'DigitalOcean Droplet (KVM)',
3335
+ profile: 'picket-fence',
3336
+ match: ({ lag1, lag50, qe, cv, rendererHints }) =>
3337
+ lag1 > 0.55 && qe < 2.0 && cv < 0.12 &&
3338
+ (rendererHints.some(r => ['llvmpipe', 'virtio', 'qxl'].includes(r)) ||
3339
+ lag50 > 0.30),
3340
+ },
3341
+ {
3342
+ id: 'kvm-aws-ec2-xen',
3343
+ label: 'AWS EC2 (Xen/older generation)',
3344
+ profile: 'picket-fence',
3345
+ // Xen credit scheduler has longer period (~150 iters)
3346
+ match: ({ lag1, lag25, lag50, qe, cv }) =>
3347
+ qe < 2.2 && cv < 0.13 &&
3348
+ lag25 > 0.20 && lag50 > 0.20 &&
3349
+ lag1 < 0.50, // lag-1 less pronounced than KVM
3350
+ },
3351
+ {
3352
+ id: 'nitro-aws',
3353
+ label: 'AWS EC2 Nitro (near-baremetal)',
3354
+ profile: 'near-physical',
3355
+ // Nitro uses SR-IOV and dedicated hardware — steal-time is very low.
3356
+ // Looks almost physical but canvas renderer gives it away.
3357
+ match: ({ lag1, lag50, qe, cv, isSoftwareRenderer, rendererHints }) =>
3358
+ qe > 2.5 && cv > 0.05 &&
3359
+ lag1 < 0.25 && lag50 < 0.20 &&
3360
+ (isSoftwareRenderer ||
3361
+ rendererHints.some(r => r.includes('nvidia t4') || r.includes('nvidia a10'))),
3362
+ },
3363
+ {
3364
+ id: 'vmware-esxi',
3365
+ label: 'VMware ESXi',
3366
+ profile: 'burst-scheduler',
3367
+ // VMware balloon driver creates irregular memory pressure bursts
3368
+ match: ({ lag1, lag50, qe, cv, rendererHints }) =>
3369
+ qe < 2.5 &&
3370
+ (rendererHints.some(r => r.includes('vmware')) ||
3371
+ (lag1 > 0.30 && lag50 < lag1 * 0.7 && cv < 0.14)),
3372
+ },
3373
+ {
3374
+ id: 'hyperv',
3375
+ label: 'Microsoft Hyper-V',
3376
+ profile: 'picket-fence',
3377
+ // 15.6ms scheduler quantum → burst every ~78 iters
3378
+ match: ({ lag1, lag25, qe, cv, rendererHints }) =>
3379
+ qe < 2.3 &&
3380
+ (rendererHints.some(r => r.includes('microsoft basic render') || r.includes('warp')) ||
3381
+ (lag25 > 0.25 && lag1 > 0.35 && cv < 0.12)),
3382
+ },
3383
+ {
3384
+ id: 'gcp-kvm',
3385
+ label: 'Google Cloud (KVM)',
3386
+ profile: 'picket-fence',
3387
+ match: ({ lag1, lag50, qe, cv, rendererHints }) =>
3388
+ qe < 2.3 && lag1 > 0.45 &&
3389
+ (rendererHints.some(r => r.includes('swiftshader') || r.includes('google')) ||
3390
+ (lag50 > 0.28 && cv < 0.11)),
3391
+ },
3392
+ {
3393
+ id: 'gh200-datacenter',
3394
+ label: 'NVIDIA GH200 / HPC Datacenter',
3395
+ profile: 'hypervisor-flat',
3396
+ // Even with massive compute, still trapped by hypervisor clock.
3397
+ // GH200 shows near-zero Hurst (extreme quantization) + very high lag-1.
3398
+ match: ({ lag1, qe, hurst, cv, rendererHints }) =>
3399
+ (rendererHints.some(r => r.includes('gh200') || r.includes('grace hopper') ||
3400
+ r.includes('nvidia a100') || r.includes('nvidia h100')) ||
3401
+ (hurst < 0.10 && lag1 > 0.60 && qe < 1.8 && cv < 0.10)),
3402
+ },
3403
+ {
3404
+ id: 'generic-vm',
3405
+ label: 'Virtual Machine (unclassified)',
3406
+ profile: 'picket-fence',
3407
+ match: ({ lag1, qe, cv, isSoftwareRenderer }) =>
3408
+ isSoftwareRenderer ||
3409
+ (qe < 2.0 && lag1 > 0.35) ||
3410
+ (cv < 0.02),
3411
+ },
3412
+ ];
3413
+
3414
+ // ---------------------------------------------------------------------------
3415
+ // detectProvider
3416
+ // ---------------------------------------------------------------------------
3417
+
3418
+ /**
3419
+ * Classifies the host environment based on timing + canvas signals.
3420
+ *
3421
+ * @param {object} p
3422
+ * @param {import('./jitter.js').JitterAnalysis} p.jitter
3423
+ * @param {object} p.autocorrelations - extended lags including lag25, lag50
3424
+ * @param {import('../collector/canvas.js').CanvasFingerprint} p.canvas
3425
+ * @param {object|null} p.phases
3426
+ * @returns {ProviderResult}
3427
+ */
3428
+ function detectProvider({ jitter, autocorrelations, canvas, phases }) {
3429
+ const rendererHints = _rendererHints(canvas?.webglRenderer, canvas?.webglVendor);
3430
+
3431
+ const signals = {
3432
+ lag1: Math.abs(autocorrelations?.lag1 ?? 0),
3433
+ lag25: Math.abs(autocorrelations?.lag25 ?? 0),
3434
+ lag50: Math.abs(autocorrelations?.lag50 ?? 0),
3435
+ qe: jitter.quantizationEntropy,
3436
+ cv: jitter.stats?.cv ?? 0,
3437
+ hurst: jitter.hurstExponent ?? 0.5,
3438
+ isSoftwareRenderer: canvas?.isSoftwareRenderer ?? false,
3439
+ rendererHints,
3440
+ entropyJitterRatio: phases?.entropyJitterRatio ?? null,
3441
+ };
3442
+
3443
+ // Score each profile and pick the best match
3444
+ const scored = PROVIDER_PROFILES
3445
+ .filter(p => {
3446
+ try { return p.match(signals); }
3447
+ catch { return false; }
3448
+ })
3449
+ .map(p => ({
3450
+ ...p,
3451
+ // Physical hardware is the last resort; give it lower priority when
3452
+ // other profiles match so we don't misclassify VMs.
3453
+ priority: p.id === 'physical' ? 0 : 1,
3454
+ }))
3455
+ .sort((a, b) => b.priority - a.priority);
3456
+
3457
+ const best = scored[0] ?? { id: 'unknown', label: 'Unknown', profile: 'unknown' };
3458
+
3459
+ // Confidence: how many "VM indicator" thresholds the signals cross
3460
+ const vmIndicatorCount = [
3461
+ signals.qe < 2.5,
3462
+ signals.lag1 > 0.35,
3463
+ signals.lag50 > 0.20,
3464
+ signals.cv < 0.04,
3465
+ signals.isSoftwareRenderer,
3466
+ signals.hurst < 0.15,
3467
+ phases?.entropyJitterRatio != null && phases.entropyJitterRatio < 1.02,
3468
+ ].filter(Boolean).length;
3469
+
3470
+ const isPhysical = best.id === 'physical';
3471
+ const confidence = isPhysical
3472
+ ? Math.max(20, 95 - vmIndicatorCount * 15)
3473
+ : Math.min(95, 40 + vmIndicatorCount * 12);
3474
+
3475
+ return {
3476
+ providerId: best.id,
3477
+ providerLabel: best.label,
3478
+ profile: best.profile,
3479
+ confidence,
3480
+ isVirtualized: best.id !== 'physical',
3481
+ signals,
3482
+ alternatives: scored.slice(1, 3).map(p => ({ id: p.id, label: p.label })),
3483
+ rendererHints,
3484
+ schedulerQuantumMs: _estimateQuantum(signals),
3485
+ };
3486
+ }
3487
+
3488
+ /**
3489
+ * @typedef {object} ProviderResult
3490
+ * @property {string} providerId
3491
+ * @property {string} providerLabel
3492
+ * @property {string} profile 'analog-fog' | 'picket-fence' | 'burst-scheduler' | 'near-physical' | 'hypervisor-flat' | 'unknown'
3493
+ * @property {number} confidence 0–100
3494
+ * @property {boolean} isVirtualized
3495
+ * @property {object} signals
3496
+ * @property {object[]} alternatives
3497
+ * @property {string[]} rendererHints
3498
+ * @property {number|null} schedulerQuantumMs
3499
+ */
3500
+
3501
+ // ---------------------------------------------------------------------------
3502
+ // Internal helpers
3503
+ // ---------------------------------------------------------------------------
3504
+
3505
+ /**
3506
+ * Extract lowercase hint tokens from WebGL renderer string for pattern matching.
3507
+ */
3508
+ function _rendererHints(renderer = '', vendor = '') {
3509
+ return `${renderer} ${vendor}`.toLowerCase()
3510
+ .split(/[\s\/(),]+/)
3511
+ .filter(t => t.length > 2);
3512
+ }
3513
+
3514
+ /**
3515
+ * Estimate the hypervisor's scheduler quantum from the dominant autocorrelation lag.
3516
+ * Returns null if the device appears to be physical.
3517
+ */
3518
+ function _estimateQuantum({ lag1, lag25, lag50, qe }) {
3519
+ if (qe > 3.2) return null; // likely physical
3520
+
3521
+ // Find the dominant lag (highest absolute autocorrelation beyond lag-5)
3522
+ const lags = [
3523
+ { lag: 50, ac: lag50 },
3524
+ { lag: 25, ac: lag25 },
3525
+ ];
3526
+ const peak = lags.reduce((b, c) => c.ac > b.ac ? c : b, { lag: 0, ac: 0 });
3527
+
3528
+ if (peak.ac < 0.20) return null;
3529
+
3530
+ // Quantum (ms) ≈ dominant_lag × estimated_iteration_time (≈5ms)
3531
+ return peak.lag * 5;
3532
+ }
3533
+
3534
+ /**
3535
+ * @sovereign/pulse — High-Level Fingerprint Class
3536
+ *
3537
+ * The developer-facing API. Instead of forcing devs to understand Hurst
3538
+ * Exponents and Quantization Entropy, they get a Fingerprint object with
3539
+ * plain-language properties and one critical boolean: isSynthetic.
3540
+ *
3541
+ * Usage:
3542
+ *
3543
+ * import { Fingerprint } from '@sovereign/pulse';
3544
+ *
3545
+ * const fp = await Fingerprint.collect({ nonce });
3546
+ *
3547
+ * if (fp.isSynthetic) {
3548
+ * console.log(`Blocked: ${fp.providerLabel} detected (${fp.confidence}% confidence)`);
3549
+ * console.log(`Profile: ${fp.profile}`); // 'picket-fence'
3550
+ * console.log(`Reason: ${fp.topFlag}`); // 'LOW_QE + HIGH_LAG1_AUTOCORR'
3551
+ * } else {
3552
+ * console.log(`Verified: ${fp.hardwareId()}`);
3553
+ * console.log(`Score: ${fp.score}`); // 0.0 – 1.0
3554
+ * }
3555
+ *
3556
+ * // Always send to server for final validation:
3557
+ * const { payload, hash } = fp.toCommitment();
3558
+ */
3559
+
3560
+
3561
+ // ---------------------------------------------------------------------------
3562
+ // Fingerprint class
3563
+ // ---------------------------------------------------------------------------
3564
+
3565
+ class Fingerprint {
3566
+ /** @private */
3567
+ constructor(raw) {
3568
+ this._raw = raw; // full internal data
3569
+ this._commitment = null; // lazy-built on first toCommitment() call
3570
+ }
3571
+
3572
+ // ── Static factory ────────────────────────────────────────────────────────
3573
+
3574
+ /**
3575
+ * Collect all hardware signals and return a Fingerprint instance.
3576
+ *
3577
+ * @param {object} opts
3578
+ * @param {string} opts.nonce - server-issued challenge nonce (required)
3579
+ * @param {number} [opts.iterations=200]
3580
+ * @param {number} [opts.bioWindowMs=3000]
3581
+ * @param {boolean} [opts.phased=true] - run cold/load/hot phases
3582
+ * @param {Function} [opts.onProgress] - (stage: string) => void
3583
+ * @param {string} [opts.wasmPath]
3584
+ * @returns {Promise<Fingerprint>}
3585
+ */
3586
+ static async collect(opts = {}) {
3587
+ const {
3588
+ nonce,
3589
+ iterations = 200,
3590
+ bioWindowMs = 3000,
3591
+ phased = true,
3592
+ adaptive = true,
3593
+ adaptiveThreshold = 0.85,
3594
+ onProgress,
3595
+ wasmPath,
3596
+ } = opts;
3597
+
3598
+ if (!nonce) throw new Error('Fingerprint.collect() requires opts.nonce');
3599
+
3600
+ const emit = (stage, meta) => { try { onProgress?.(stage, meta); } catch {} };
3601
+
3602
+ emit('start');
3603
+
3604
+ // ── Parallel collection ────────────────────────────────────────────────
3605
+ const bio = new BioCollector();
3606
+ bio.start();
3607
+
3608
+ const [entropy, canvas, audio] = await Promise.all([
3609
+ collectEntropy({
3610
+ iterations, phased, adaptive, adaptiveThreshold, wasmPath,
3611
+ onBatch: (meta) => emit('entropy_batch', meta),
3612
+ }).then(r => { emit('entropy_done'); return r; }),
3613
+ collectCanvasFingerprint()
3614
+ .then(r => { emit('canvas_done'); return r; }),
3615
+ collectAudioJitter({ durationMs: Math.min(bioWindowMs, 2000) })
3616
+ .then(r => { emit('audio_done'); return r; }),
3617
+ ]);
3618
+
3619
+ // Wait out the bio window
3620
+ const elapsed = Date.now() - entropy.collectedAt;
3621
+ const remain = Math.max(0, bioWindowMs - elapsed);
3622
+ if (remain > 0) await new Promise(r => setTimeout(r, remain));
3623
+
3624
+ bio.stop();
3625
+ const bioSnapshot = bio.snapshot(entropy.timings);
3626
+ emit('bio_done');
3627
+
3628
+ // ── Analysis pipeline ─────────────────────────────────────────────────
3629
+ const jitter = classifyJitter(entropy.timings, { autocorrelations: entropy.autocorrelations });
3630
+ const heuristic = runHeuristicEngine({ jitter, phases: entropy.phases, autocorrelations: entropy.autocorrelations });
3631
+ const provider = detectProvider({ jitter, autocorrelations: entropy.autocorrelations, canvas, phases: entropy.phases });
3632
+
3633
+ // ── Three-stage scoring pipeline ──────────────────────────────────────
3634
+ // Stage 1: base jitter score from timing distribution analysis
3635
+ const rawScore = jitter.score;
3636
+ // Stage 2: heuristic cross-metric coherence adjustment
3637
+ const adjScore = Math.max(0, Math.min(1, rawScore + heuristic.netAdjustment));
3638
+ // Stage 3: zero-latency structural coherence analysis on already-collected data
3639
+ const coherence = runCoherenceAnalysis({
3640
+ timings: entropy.timings,
3641
+ jitter,
3642
+ phases: entropy.phases ?? null,
3643
+ batches: entropy.batches ?? null,
3644
+ bio: bioSnapshot,
3645
+ canvas,
3646
+ audio,
3647
+ });
3648
+ // Final score: stage-2 adjusted score refined by stage-3 coherence
3649
+ const finalScore = Math.max(0, Math.min(1, adjScore + coherence.netAdjustment));
3650
+
3651
+ emit('analysis_done');
3652
+
3653
+ // ── Build commitment ──────────────────────────────────────────────────
3654
+ const payload = buildProof({ entropy, jitter, bio: bioSnapshot, canvas, audio, nonce });
3655
+ // Inject heuristic + provider into proof payload for server-side reference
3656
+ payload.heuristic = {
3657
+ penalty: heuristic.penalty,
3658
+ bonus: heuristic.bonus,
3659
+ entropyJitterRatio: heuristic.entropyJitterRatio,
3660
+ picketFence: heuristic.picketFence.detected,
3661
+ coherenceFlags: heuristic.coherenceFlags,
3662
+ hardOverride: heuristic.hardOverride, // 'vm' | null
3663
+ };
3664
+ payload.provider = {
3665
+ id: provider.providerId,
3666
+ label: provider.providerLabel,
3667
+ profile: provider.profile,
3668
+ confidence: provider.confidence,
3669
+ schedulerQuantum: provider.schedulerQuantumMs,
3670
+ };
3671
+ // Stage-3 coherence summary (server uses these for logging + dynamic threshold)
3672
+ payload.coherence = {
3673
+ netAdjustment: coherence.netAdjustment,
3674
+ dynamicThreshold: coherence.dynamicThreshold,
3675
+ evidenceWeight: coherence.evidenceWeight,
3676
+ coherenceFlags: coherence.coherenceFlags,
3677
+ physicalFlags: coherence.physicalFlags,
3678
+ hardOverride: coherence.hardOverride,
3679
+ };
3680
+ payload.classification.adjustedScore = _round(adjScore, 4);
3681
+ payload.classification.finalScore = _round(finalScore, 4);
3682
+ payload.classification.dynamicThreshold = coherence.dynamicThreshold;
3683
+
3684
+ const commitment = buildCommitment(payload);
3685
+ emit('complete');
3686
+
3687
+ return new Fingerprint({
3688
+ entropy, canvas, audio,
3689
+ bioSnapshot, jitter, heuristic, coherence, provider,
3690
+ rawScore, adjScore, finalScore,
3691
+ nonce, commitment,
3692
+ });
3693
+ }
3694
+
3695
+ // ── Primary API ────────────────────────────────────────────────────────────
3696
+
3697
+ /**
3698
+ * True if the device is likely a VM, AI inference endpoint, or sanitised
3699
+ * cloud environment. Uses the adjusted score (base + heuristic bonuses/penalties).
3700
+ * @type {boolean}
3701
+ */
3702
+ get isSynthetic() {
3703
+ // Stage-2 hard kill: EJR/QE mathematical contradiction detected in the
3704
+ // heuristic engine before any bonuses could accumulate.
3705
+ if (this._raw.heuristic.hardOverride === 'vm') return true;
3706
+ // Stage-3 hard kill: EJR/QE contradiction or phase forgery detected in
3707
+ // the coherence analyser (second line of defence).
3708
+ if (this._raw.coherence.hardOverride === 'vm') return true;
3709
+ // Normal path: final score vs dynamic threshold.
3710
+ return this._raw.finalScore < this._raw.coherence.dynamicThreshold;
3711
+ }
3712
+
3713
+ /**
3714
+ * Confidence in the isSynthetic verdict, 0–100.
3715
+ * @type {number}
3716
+ */
3717
+ get confidence() {
3718
+ const s = this._raw.finalScore;
3719
+ const t = this._raw.coherence.dynamicThreshold;
3720
+ // Map distance from threshold to confidence percentage.
3721
+ // At the threshold: 0% confident. Far above/below: approaching 100%.
3722
+ const distance = Math.abs(s - t);
3723
+ return Math.min(100, Math.round(distance * 500));
3724
+ }
3725
+
3726
+ /**
3727
+ * Normalised score [0.0, 1.0]. Higher = more physical.
3728
+ * This is the FINAL score after all three analysis stages.
3729
+ * @type {number}
3730
+ */
3731
+ get score() {
3732
+ return _round(this._raw.finalScore, 4);
3733
+ }
3734
+
3735
+ /**
3736
+ * The dynamic passing threshold for this proof [0.55, 0.67].
3737
+ * Reflects how much evidence was collected — a full-evidence proof has a
3738
+ * lower (more permissive) threshold; a minimal-evidence proof has a higher
3739
+ * (more conservative) threshold.
3740
+ * @type {number}
3741
+ */
3742
+ get threshold() {
3743
+ return this._raw.coherence.dynamicThreshold;
3744
+ }
3745
+
3746
+ /**
3747
+ * How much evidence was collected [0, 1].
3748
+ * 1.0 = 200 iterations + phased + bio + audio + canvas
3749
+ * 0.0 = minimal proof
3750
+ * @type {number}
3751
+ */
3752
+ get evidenceWeight() {
3753
+ return this._raw.coherence.evidenceWeight;
3754
+ }
3755
+
3756
+ /**
3757
+ * Human-readable confidence tier.
3758
+ * @type {'high'|'medium'|'low'|'uncertain'}
3759
+ */
3760
+ get tier() {
3761
+ const c = this.confidence;
3762
+ if (c >= 70) return 'high';
3763
+ if (c >= 40) return 'medium';
3764
+ if (c >= 20) return 'low';
3765
+ return 'uncertain';
3766
+ }
3767
+
3768
+ /**
3769
+ * Detected timing profile name.
3770
+ * 'analog-fog' → real hardware, natural Brownian noise
3771
+ * 'picket-fence' → VM steal-time bursts at regular intervals
3772
+ * 'burst-scheduler' → irregular VM scheduling (VMware-style)
3773
+ * 'hypervisor-flat' → flat timing, hypervisor completely irons out noise
3774
+ * 'near-physical' → hard to classify (Nitro, GPU passthrough)
3775
+ * 'unknown'
3776
+ * @type {string}
3777
+ */
3778
+ get profile() {
3779
+ return this._raw.provider.profile;
3780
+ }
3781
+
3782
+ /**
3783
+ * Detected cloud provider / hypervisor.
3784
+ * @type {string} e.g. 'kvm-digitalocean', 'nitro-aws', 'physical', 'generic-vm'
3785
+ */
3786
+ get providerId() {
3787
+ return this._raw.provider.providerId;
3788
+ }
3789
+
3790
+ /**
3791
+ * Human-readable provider label.
3792
+ * @type {string} e.g. 'DigitalOcean Droplet (KVM)', 'Physical Hardware'
3793
+ */
3794
+ get providerLabel() {
3795
+ return this._raw.provider.providerLabel;
3796
+ }
3797
+
3798
+ /**
3799
+ * Estimated hypervisor scheduler quantum in milliseconds.
3800
+ * Null if the device appears to be physical.
3801
+ * @type {number|null}
3802
+ */
3803
+ get schedulerQuantumMs() {
3804
+ return this._raw.provider.schedulerQuantumMs;
3805
+ }
3806
+
3807
+ /**
3808
+ * Entropy-Jitter Ratio — the key signal distinguishing real silicon from VMs.
3809
+ * Values ≥ 1.08 confirm thermal feedback (real hardware).
3810
+ * Values near 1.0 indicate a hypervisor clock (VM).
3811
+ * Null if phased collection was not run.
3812
+ * @type {number|null}
3813
+ */
3814
+ get entropyJitterRatio() {
3815
+ return this._raw.heuristic.entropyJitterRatio;
3816
+ }
3817
+
3818
+ /**
3819
+ * The most diagnostic flag from the heuristic engine.
3820
+ * @type {string}
3821
+ */
3822
+ get topFlag() {
3823
+ const flags = [
3824
+ ...this._raw.heuristic.findings.map(f => f.id),
3825
+ ...this._raw.jitter.flags,
3826
+ ];
3827
+ return flags[0] ?? 'NONE';
3828
+ }
3829
+
3830
+ /**
3831
+ * All flags from both the base classifier and heuristic engine.
3832
+ * @type {string[]}
3833
+ */
3834
+ get flags() {
3835
+ return [
3836
+ ...this._raw.heuristic.coherenceFlags,
3837
+ ...this._raw.jitter.flags,
3838
+ ];
3839
+ }
3840
+
3841
+ /**
3842
+ * Summary of heuristic findings with human-readable labels.
3843
+ * @type {Array<{id, label, severity, detail}>}
3844
+ */
3845
+ get findings() {
3846
+ return this._raw.heuristic.findings;
3847
+ }
3848
+
3849
+ /**
3850
+ * Confirmed physical properties (positive evidence for real hardware).
3851
+ * @type {Array<{id, label, detail}>}
3852
+ */
3853
+ get physicalEvidence() {
3854
+ return this._raw.heuristic.bonuses;
3855
+ }
3856
+
3857
+ // ── Hardware ID ────────────────────────────────────────────────────────────
3858
+
3859
+ /**
3860
+ * A stable, privacy-preserving hardware identifier derived from the GPU
3861
+ * canvas fingerprint, audio sample rate, and WebGL extension set.
3862
+ *
3863
+ * Properties:
3864
+ * - Stable: same device → same ID across sessions
3865
+ * - Not uniquely identifying: changes if GPU or driver changes
3866
+ * - Not reversible: BLAKE3 hash, cannot recover original signals
3867
+ * - Not a tracking cookie: no PII, no cross-origin data
3868
+ *
3869
+ * @returns {string} 16-character hex ID
3870
+ */
3871
+ hardwareId() {
3872
+ const { canvas, audio } = this._raw;
3873
+ const components = [
3874
+ canvas?.webglRenderer ?? '',
3875
+ canvas?.webglVendor ?? '',
3876
+ canvas?.extensionCount?.toString() ?? '',
3877
+ audio?.sampleRate?.toString() ?? '',
3878
+ canvas?.webglVersion?.toString() ?? '',
3879
+ ].join('|');
3880
+ return blake3HexStr(components).slice(0, 16);
3881
+ }
3882
+
3883
+ // ── Diagnostic data ────────────────────────────────────────────────────────
3884
+
3885
+ /**
3886
+ * Key metrics summary — useful for logging and debugging.
3887
+ * @returns {object}
3888
+ */
3889
+ metrics() {
3890
+ const { jitter, heuristic, coherence, provider } = this._raw;
3891
+ return {
3892
+ // ── Final verdict ──────────────────────────────────────────────────
3893
+ score: this.score, // final (stage 3)
3894
+ threshold: this.threshold, // dynamic passing bar
3895
+ evidenceWeight: this.evidenceWeight,
3896
+ isSynthetic: this.isSynthetic,
3897
+ // ── Score pipeline breakdown ───────────────────────────────────────
3898
+ rawScore: _round(this._raw.rawScore, 4), // stage 1
3899
+ adjustedScore: _round(this._raw.adjScore, 4), // stage 2
3900
+ finalScore: _round(this._raw.finalScore, 4), // stage 3
3901
+ heuristicAdjustment: _round(heuristic.netAdjustment, 4),
3902
+ coherenceAdjustment: _round(coherence.netAdjustment, 4),
3903
+ // ── Timing signals ─────────────────────────────────────────────────
3904
+ cv: _round(jitter.stats?.cv, 4),
3905
+ hurstExponent: _round(jitter.hurstExponent, 4),
3906
+ quantizationEntropy: _round(jitter.quantizationEntropy, 4),
3907
+ autocorrLag1: _round(jitter.autocorrelations?.lag1, 4),
3908
+ autocorrLag50: _round(this._raw.entropy.autocorrelations?.lag50, 4),
3909
+ outlierRate: _round(jitter.outlierRate, 4),
3910
+ thermalPattern: jitter.thermalSignature?.pattern,
3911
+ entropyJitterRatio: _round(heuristic.entropyJitterRatio, 4),
3912
+ picketFence: heuristic.picketFence.detected,
3913
+ // ── Coherence signals ──────────────────────────────────────────────
3914
+ coherenceFlags: coherence.coherenceFlags,
3915
+ physicalFlags: coherence.physicalFlags,
3916
+ hardOverride: coherence.hardOverride,
3917
+ // ── Provider ───────────────────────────────────────────────────────
3918
+ provider: provider.providerLabel,
3919
+ providerConfidence: provider.confidence,
3920
+ schedulerQuantumMs: provider.schedulerQuantumMs,
3921
+ // ── Hardware ───────────────────────────────────────────────────────
3922
+ webglRenderer: this._raw.canvas?.webglRenderer,
3923
+ isSoftwareRenderer: this._raw.canvas?.isSoftwareRenderer,
3924
+ hardwareId: this.hardwareId(),
3925
+ };
3926
+ }
3927
+
3928
+ /**
3929
+ * Full diagnostic report for debugging / integration testing.
3930
+ * @returns {object}
3931
+ */
3932
+ report() {
3933
+ const { coherence } = this._raw;
3934
+ return {
3935
+ verdict: {
3936
+ isSynthetic: this.isSynthetic,
3937
+ score: this.score,
3938
+ threshold: this.threshold,
3939
+ confidence: this.confidence,
3940
+ tier: this.tier,
3941
+ profile: this.profile,
3942
+ provider: this.providerLabel,
3943
+ topFlag: this.topFlag,
3944
+ hardOverride: coherence.hardOverride,
3945
+ evidenceWeight: this.evidenceWeight,
3946
+ },
3947
+ pipeline: {
3948
+ rawScore: _round(this._raw.rawScore, 4),
3949
+ adjustedScore: _round(this._raw.adjScore, 4),
3950
+ finalScore: _round(this._raw.finalScore, 4),
3951
+ heuristicAdjustment: _round(this._raw.heuristic.netAdjustment, 4),
3952
+ coherenceAdjustment: _round(coherence.netAdjustment, 4),
3953
+ dynamicThreshold: coherence.dynamicThreshold,
3954
+ },
3955
+ metrics: this.metrics(),
3956
+ findings: this.findings,
3957
+ physicalEvidence: this.physicalEvidence,
3958
+ coherenceChecks: coherence.checks,
3959
+ coherenceBonuses: coherence.bonuses,
3960
+ phases: this._raw.entropy.phases ? {
3961
+ cold: { qe: _round(this._raw.entropy.phases.cold.qe, 4), mean: _round(this._raw.entropy.phases.cold.mean, 4) },
3962
+ hot: { qe: _round(this._raw.entropy.phases.hot.qe, 4), mean: _round(this._raw.entropy.phases.hot.mean, 4) },
3963
+ entropyJitterRatio: _round(this._raw.entropy.phases.entropyJitterRatio, 4),
3964
+ } : null,
3965
+ };
3966
+ }
3967
+
3968
+ // ── Proof commitment ───────────────────────────────────────────────────────
3969
+
3970
+ /**
3971
+ * Returns the BLAKE3 commitment to send to the server for validation.
3972
+ * @returns {{ payload: object, hash: string }}
3973
+ */
3974
+ toCommitment() {
3975
+ return this._raw.commitment;
3976
+ }
3977
+
3978
+ // ── String representations ─────────────────────────────────────────────────
3979
+
3980
+ toString() {
3981
+ const icon = this.isSynthetic ? '🚩' : '✅';
3982
+ const verb = this.isSynthetic ? 'Synthetic' : 'Physical';
3983
+ return `${icon} ${verb} | ${this.providerLabel} | score=${this.score} | conf=${this.confidence}% | profile=${this.profile}`;
3984
+ }
3985
+
3986
+ toJSON() {
3987
+ return this.report();
3988
+ }
3989
+ }
3990
+
3991
+ // ---------------------------------------------------------------------------
3992
+ // Internal helpers
3993
+ // ---------------------------------------------------------------------------
3994
+
3995
+ function _round(v, d) {
3996
+ if (v == null || !isFinite(v)) return null;
3997
+ const f = 10 ** d;
3998
+ return Math.round(v * f) / f;
3999
+ }
4000
+
4001
+ /**
4002
+ * @sovereign/pulse — Server-Side Validator
4003
+ *
4004
+ * Verifies a ProofPayload + BLAKE3 commitment received from the client.
4005
+ * This module is for NODE.JS / SERVER use only. It should NOT be bundled
4006
+ * into the browser build (see package.json "exports" field).
4007
+ *
4008
+ * Trust model:
4009
+ * • The server issues a challenge `nonce` before the client runs pulse().
4010
+ * • The client returns { payload, hash }.
4011
+ * • The server calls validateProof(payload, hash, options) to:
4012
+ * 1. Verify hash integrity (no tampering).
4013
+ * 2. Verify nonce freshness (no replay).
4014
+ * 3. Verify timestamp recency.
4015
+ * 4. Check jitter score against thresholds.
4016
+ * 5. Check canvas fingerprint against software-renderer blocklist.
4017
+ * 6. Cross-validate signal consistency.
4018
+ *
4019
+ * NOTE: The server NEVER sees raw timing arrays or mouse coordinates.
4020
+ * Only statistical summaries are transmitted. This is the ZK property.
4021
+ */
4022
+
4023
+
4024
+ // ---------------------------------------------------------------------------
4025
+ // Known software / virtual renderer substring patterns (lowercase)
4026
+ // ---------------------------------------------------------------------------
4027
+ const VM_RENDERER_BLOCKLIST = [
4028
+ // Software / virtual renderers
4029
+ 'llvmpipe', 'swiftshader', 'softpipe', 'mesa offscreen',
4030
+ 'microsoft basic render', 'vmware svga', 'vmware', 'virtualbox',
4031
+ 'parallels', 'chromium swiftshader', 'google swiftshader',
4032
+ 'angle (', 'cirrussm', 'qxl', 'virtio', 'bochs',
4033
+ // NVIDIA datacenter / inference — no consumer unit has these
4034
+ 'nvidia t4', // AWS/GCP inference VM
4035
+ 'nvidia a10g', // AWS g5 inference
4036
+ 'nvidia a100', // Datacenter A100
4037
+ 'nvidia h100', // Hopper — datacenter only
4038
+ 'nvidia h200', // Hopper successor — datacenter only
4039
+ 'nvidia b100', // Blackwell — datacenter only
4040
+ 'nvidia b200', // Blackwell Ultra — datacenter only
4041
+ 'nvidia gh200', // Grace-Hopper superchip
4042
+ // AMD datacenter / HPC — no consumer has these
4043
+ 'amd instinct', // covers mi100, mi200, mi250, mi300 family
4044
+ 'amd mi300',
4045
+ 'amd mi250',
4046
+ 'amd mi200',
4047
+ // Cloud-specific AI accelerators
4048
+ 'aws inferentia',
4049
+ 'aws trainium',
4050
+ 'google tpu',
4051
+ ];
4052
+
4053
+ // ---------------------------------------------------------------------------
4054
+ // validateProof
4055
+ // ---------------------------------------------------------------------------
4056
+
4057
+ /**
4058
+ * Validates a client-submitted proof.
4059
+ *
4060
+ * @param {import('./fingerprint.js').ProofPayload} payload
4061
+ * @param {string} receivedHash - hex BLAKE3 from the client
4062
+ * @param {object} [opts]
4063
+ * @param {number} [opts.minJitterScore=0.55] - minimum acceptable jitter score
4064
+ * @param {number} [opts.maxAgeMs=300_000] - max payload age (5 min)
4065
+ * @param {number} [opts.clockSkewMs=30_000] - tolerated future timestamp drift
4066
+ * @param {boolean} [opts.requireBio=false] - reject if no bio activity
4067
+ * @param {boolean} [opts.blockSoftwareRenderer=true] - reject software WebGL
4068
+ * @param {Function} [opts.checkNonce] - async fn(nonce) → boolean
4069
+ * Called to verify the nonce was issued by this server and not yet consumed.
4070
+ * Should mark the nonce as consumed atomically (e.g. Redis SET NX with TTL).
4071
+ * If omitted, nonce freshness is NOT checked (not recommended for production).
4072
+ *
4073
+ * @returns {Promise<ValidationResult>}
4074
+ */
4075
+ async function validateProof(payload, receivedHash, opts = {}) {
4076
+ const {
4077
+ minJitterScore = 0.55,
4078
+ maxAgeMs = 300_000,
4079
+ clockSkewMs = 30_000,
4080
+ requireBio = false,
4081
+ blockSoftwareRenderer = true,
4082
+ checkNonce = null,
4083
+ } = opts;
4084
+
4085
+ const reasons = [];
4086
+ const riskFlags = [];
4087
+ let valid = true;
4088
+
4089
+ // ── 0. Strict payload structure validation ────────────────────────────────
4090
+ if (!payload || typeof payload !== 'object' || Array.isArray(payload)) {
4091
+ return _reject(['INVALID_PAYLOAD_STRUCTURE']);
4092
+ }
4093
+
4094
+ // Prototype pollution guard — reject any payload with __proto__ / constructor tricks
4095
+ if (
4096
+ Object.prototype.hasOwnProperty.call(payload, '__proto__') ||
4097
+ Object.prototype.hasOwnProperty.call(payload, 'constructor') ||
4098
+ Object.prototype.hasOwnProperty.call(payload, 'prototype')
4099
+ ) {
4100
+ return _reject(['PROTOTYPE_POLLUTION_ATTEMPT']);
4101
+ }
4102
+
4103
+ // Required top-level fields
4104
+ const REQUIRED_TOP = ['version', 'timestamp', 'nonce', 'signals', 'classification'];
4105
+ for (const field of REQUIRED_TOP) {
4106
+ if (!(field in payload)) {
4107
+ return _reject([`MISSING_REQUIRED_FIELD:${field}`]);
4108
+ }
4109
+ }
4110
+
4111
+ // Type assertions on top-level scalars
4112
+ if (typeof payload.version !== 'number') return _reject(['INVALID_TYPE:version']);
4113
+ if (typeof payload.timestamp !== 'number') return _reject(['INVALID_TYPE:timestamp']);
4114
+ if (typeof payload.nonce !== 'string') return _reject(['INVALID_TYPE:nonce']);
4115
+ if (typeof payload.signals !== 'object' || Array.isArray(payload.signals)) {
4116
+ return _reject(['INVALID_TYPE:signals']);
4117
+ }
4118
+ if (typeof payload.classification !== 'object' || Array.isArray(payload.classification)) {
4119
+ return _reject(['INVALID_TYPE:classification']);
4120
+ }
4121
+
4122
+ // Nonce must be a 64-character lowercase hex string (32 bytes)
4123
+ if (!/^[0-9a-f]{64}$/.test(payload.nonce)) {
4124
+ return _reject(['INVALID_NONCE_FORMAT']);
4125
+ }
4126
+
4127
+ // Timestamp must be a plausible Unix ms value (> year 2020, < year 2100)
4128
+ const TS_MIN = 1_577_836_800_000; // 2020-01-01
4129
+ const TS_MAX = 4_102_444_800_000; // 2100-01-01
4130
+ if (payload.timestamp < TS_MIN || payload.timestamp > TS_MAX) {
4131
+ return _reject(['TIMESTAMP_OUT_OF_RANGE']);
4132
+ }
4133
+
4134
+ if (payload.version !== 1) {
4135
+ return _reject(['UNSUPPORTED_PROOF_VERSION']);
4136
+ }
4137
+
4138
+ // ── 1. Hash integrity ─────────────────────────────────────────────────────
4139
+ // receivedHash must be exactly 64 lowercase hex characters
4140
+ if (typeof receivedHash !== 'string' || !/^[0-9a-f]{64}$/.test(receivedHash)) {
4141
+ return _reject(['INVALID_HASH_FORMAT']);
4142
+ }
4143
+ const canonical = canonicalJson(payload);
4144
+ const enc = new TextEncoder().encode(canonical);
4145
+ const computed = bytesToHex(blake3(enc));
4146
+
4147
+ if (computed !== receivedHash) {
4148
+ return _reject(['HASH_MISMATCH_PAYLOAD_TAMPERED']);
4149
+ }
4150
+
4151
+ // ── 2. Timestamp recency ──────────────────────────────────────────────────
4152
+ const now = Date.now();
4153
+ const age = now - payload.timestamp;
4154
+ if (age > maxAgeMs) {
4155
+ valid = false;
4156
+ reasons.push(`PROOF_EXPIRED: age=${Math.round(age / 1000)}s, max=${maxAgeMs / 1000}s`);
4157
+ }
4158
+ if (payload.timestamp > now + clockSkewMs) {
4159
+ valid = false;
4160
+ reasons.push('PROOF_FROM_FUTURE');
4161
+ }
4162
+
4163
+ // ── 3. Nonce freshness ────────────────────────────────────────────────────
4164
+ if (checkNonce) {
4165
+ const nonceOk = await checkNonce(payload.nonce);
4166
+ if (!nonceOk) {
4167
+ valid = false;
4168
+ reasons.push('NONCE_INVALID_OR_REPLAYED');
4169
+ }
4170
+ } else {
4171
+ riskFlags.push('NONCE_FRESHNESS_NOT_CHECKED');
4172
+ }
4173
+
4174
+ // ── 4. Jitter score ───────────────────────────────────────────────────────
4175
+ const jitterScore = payload.classification?.jitterScore ?? 0;
4176
+ if (jitterScore < minJitterScore) {
4177
+ valid = false;
4178
+ reasons.push(`JITTER_SCORE_TOO_LOW: ${jitterScore} < ${minJitterScore}`);
4179
+ }
4180
+
4181
+ // ── 4b. Dynamic threshold (evidence-proportional gate) ──────────────────
4182
+ // The server independently computes the minimum passing score based on how
4183
+ // much evidence the proof contains. The client's dynamicThreshold field is
4184
+ // NEVER trusted — it is only used for logging/auditing.
4185
+ //
4186
+ // Logic: a proof with only 50 iterations and no bio/audio faces a higher bar
4187
+ // (0.62) than a full 200-iteration proof with phased data (0.50).
4188
+ // This makes replay attacks with minimal proofs automatically fail the gate.
4189
+ const serverDynamicMin = computeServerDynamicThreshold(payload);
4190
+
4191
+ // We check the FINAL client score (which includes stage-3 coherence adjustment)
4192
+ // if it was included, otherwise fall back to the base jitterScore.
4193
+ const finalClientScore = payload.classification?.finalScore ?? jitterScore;
4194
+ if (finalClientScore < serverDynamicMin) {
4195
+ valid = false;
4196
+ reasons.push(
4197
+ `DYNAMIC_THRESHOLD_NOT_MET: score=${finalClientScore} < ` +
4198
+ `serverMin=${serverDynamicMin} (evidenceWeight=${
4199
+ _computeEvidenceWeight(payload).toFixed(3)
4200
+ })`
4201
+ );
4202
+ }
4203
+
4204
+ // Surface diagnostic flags from the client's classifier
4205
+ for (const flag of (payload.classification?.flags ?? [])) {
4206
+ if (flag.includes('VM') || flag.includes('FLAT') || flag.includes('SYNTHETIC')) {
4207
+ riskFlags.push(`CLIENT_FLAG:${flag}`);
4208
+ }
4209
+ }
4210
+
4211
+ // Hard override from the client heuristic engine (stage 2).
4212
+ // EJR_PHASE_HARD_KILL fires when the stored entropyJitterRatio is mathematically
4213
+ // inconsistent with the stored cold_QE / hot_QE values — proof of tampering.
4214
+ // A legitimate SDK running on real hardware never triggers this.
4215
+ if (payload.heuristic?.hardOverride === 'vm') {
4216
+ valid = false;
4217
+ reasons.push(
4218
+ `HEURISTIC_HARD_OVERRIDE: stage-2 EJR/QE mathematical contradiction — ` +
4219
+ `${(payload.heuristic.coherenceFlags ?? []).join(', ')}`
4220
+ );
4221
+ }
4222
+
4223
+ // Hard override from the client coherence stage (stage 3).
4224
+ // Second line of defence — catches the same contradiction via a different
4225
+ // code path and also catches the phase-trajectory forgery variant.
4226
+ if (payload.coherence?.hardOverride === 'vm') {
4227
+ valid = false;
4228
+ reasons.push(
4229
+ `COHERENCE_HARD_OVERRIDE: stage-3 analysis detected a mathematical ` +
4230
+ `impossibility — ${(payload.coherence.coherenceFlags ?? []).join(', ')}`
4231
+ );
4232
+ }
4233
+
4234
+ // Surface all coherence flags for risk tracking / audit logs
4235
+ for (const flag of (payload.heuristic?.coherenceFlags ?? [])) {
4236
+ riskFlags.push(`HEURISTIC:${flag}`);
4237
+ }
4238
+ for (const flag of (payload.coherence?.coherenceFlags ?? [])) {
4239
+ riskFlags.push(`COHERENCE:${flag}`);
4240
+ }
4241
+
4242
+ // ── 5. Canvas / WebGL renderer check ──────────────────────────────────────
4243
+ const canvas = payload.signals?.canvas;
4244
+ if (canvas) {
4245
+ if (canvas.isSoftwareRenderer && blockSoftwareRenderer) {
4246
+ valid = false;
4247
+ reasons.push(`SOFTWARE_RENDERER_DETECTED: ${canvas.webglRenderer}`);
4248
+ }
4249
+ const rendererLc = (canvas.webglRenderer ?? '').toLowerCase();
4250
+ for (const pattern of VM_RENDERER_BLOCKLIST) {
4251
+ if (rendererLc.includes(pattern)) {
4252
+ valid = false;
4253
+ reasons.push(`BLOCKLISTED_RENDERER: ${canvas.webglRenderer}`);
4254
+ riskFlags.push(`RENDERER_MATCH:${pattern}`);
4255
+ break;
4256
+ }
4257
+ }
4258
+ if (!canvas.available) {
4259
+ riskFlags.push('CANVAS_UNAVAILABLE');
4260
+ }
4261
+ }
4262
+
4263
+ // ── 6. Bio activity ───────────────────────────────────────────────────────
4264
+ const bio = payload.signals?.bio;
4265
+ if (bio) {
4266
+ if (requireBio && !bio.hasActivity) {
4267
+ valid = false;
4268
+ reasons.push('NO_BIO_ACTIVITY_DETECTED');
4269
+ }
4270
+ if (bio.mouseSampleCount === 0 && bio.keyboardSampleCount === 0) {
4271
+ riskFlags.push('ZERO_BIO_SAMPLES');
4272
+ }
4273
+ // Interference coefficient check: real human+hardware shows measurable correlation
4274
+ if (bio.interferenceCoefficient < -0.3) {
4275
+ riskFlags.push('NEGATIVE_INTERFERENCE_COEFFICIENT');
4276
+ }
4277
+ }
4278
+
4279
+ // ── 7. Internal consistency checks ────────────────────────────────────────
4280
+ const entropy = payload.signals?.entropy;
4281
+ if (entropy) {
4282
+ // CV and jitter score should be directionally consistent
4283
+ if (entropy.timingsCV < 0.01 && jitterScore > 0.7) {
4284
+ riskFlags.push('INCONSISTENCY:LOW_CV_BUT_HIGH_SCORE');
4285
+ }
4286
+ // Timer granularity should not be exactly 0 (no real device has infinite resolution)
4287
+ if (entropy.timerGranularityMs === 0) {
4288
+ riskFlags.push('SUSPICIOUS_ZERO_TIMER_GRANULARITY');
4289
+ }
4290
+ // Extreme thermal patterns inconsistent with score
4291
+ if (entropy.thermalPattern === 'flat' && jitterScore > 0.8) {
4292
+ riskFlags.push('INCONSISTENCY:FLAT_THERMAL_BUT_HIGH_SCORE');
4293
+ }
4294
+ // Hurst exponent way out of range
4295
+ if (entropy.hurstExponent != null) {
4296
+ if (entropy.hurstExponent < 0.2 || entropy.hurstExponent > 0.85) {
4297
+ riskFlags.push(`EXTREME_HURST:${entropy.hurstExponent}`);
4298
+ }
4299
+ }
4300
+ }
4301
+
4302
+ // ── 7b. Cross-signal physics forgery detection ────────────────────────────
4303
+ // BLAKE3 prevents tampering with a payload that was legitimately generated by
4304
+ // the SDK. However, a determined attacker can:
4305
+ // 1. Obtain a valid server nonce
4306
+ // 2. Craft a fake payload with forged statistics
4307
+ // 3. Compute BLAKE3(forgedPayload) themselves (BLAKE3 is public)
4308
+ // 4. Submit { payload: forgedPayload, hash: selfComputedHash }
4309
+ //
4310
+ // These checks detect statistically impossible metric combinations that no
4311
+ // real device would ever produce, catching crafted payloads even though the
4312
+ // hash integrity check passes.
4313
+ //
4314
+ // All three thresholds are set conservatively: they only fire when the
4315
+ // combination is physically IMPOSSIBLE, not just unlikely, to avoid false
4316
+ // positives on unusual-but-legitimate hardware.
4317
+ if (entropy) {
4318
+ const cv = entropy.timingsCV ?? null;
4319
+ const qe = entropy.quantizationEntropy ?? null;
4320
+ const lag1 = entropy.autocorr_lag1 ?? null;
4321
+
4322
+ // Impossibly flat CV + high physical score
4323
+ // Real explanation: CV < 0.015 means timing jitter < 1.5% — hypervisor-flat.
4324
+ // No real-silicon CPU running a WASM matrix multiply achieves this.
4325
+ // A high jitterScore (> 0.65) is physically incompatible with CV < 0.015.
4326
+ if (cv !== null && cv < 0.015 && jitterScore > 0.65) {
4327
+ valid = false;
4328
+ reasons.push(
4329
+ `FORGED_SIGNAL:CV_SCORE_IMPOSSIBLE cv=${cv.toFixed(5)} is hypervisor-flat ` +
4330
+ `but jitterScore=${jitterScore.toFixed(3)} claims physical hardware`
4331
+ );
4332
+ }
4333
+
4334
+ // VM-grade autocorrelation + high physical score
4335
+ // lag1 > 0.70 is a hypervisor scheduler rhythm — unambiguous VM signature.
4336
+ // A device with that level of autocorrelation cannot score > 0.70 on the
4337
+ // physical scale; the jitter classifier would have penalised it heavily.
4338
+ if (lag1 !== null && lag1 > 0.70 && jitterScore > 0.70) {
4339
+ valid = false;
4340
+ reasons.push(
4341
+ `FORGED_SIGNAL:AUTOCORR_SCORE_IMPOSSIBLE lag1=${lag1.toFixed(3)} is VM-level ` +
4342
+ `but jitterScore=${jitterScore.toFixed(3)} claims physical hardware`
4343
+ );
4344
+ }
4345
+
4346
+ // VM-grade quantization entropy + high physical score
4347
+ // QE < 2.0 means timings cluster on a small number of distinct values —
4348
+ // the classic integer-millisecond quantisation of an emulated/virtual timer.
4349
+ // A device producing QE < 2.0 cannot legitimately score > 0.65 as physical.
4350
+ if (qe !== null && qe < 2.0 && jitterScore > 0.65) {
4351
+ valid = false;
4352
+ reasons.push(
4353
+ `FORGED_SIGNAL:QE_SCORE_IMPOSSIBLE qe=${qe.toFixed(3)} bits is VM-level ` +
4354
+ `but jitterScore=${jitterScore.toFixed(3)} claims physical hardware`
4355
+ );
4356
+ }
4357
+ }
4358
+
4359
+ // ── 8. Audio signal check ─────────────────────────────────────────────────
4360
+ const audio = payload.signals?.audio;
4361
+ if (audio?.available) {
4362
+ // Impossibly low jitter CV may indicate a synthetic audio driver
4363
+ if (audio.callbackJitterCV < 0.001) {
4364
+ riskFlags.push('AUDIO_JITTER_TOO_FLAT');
4365
+ }
4366
+ }
4367
+
4368
+ // ── Confidence rating ─────────────────────────────────────────────────────
4369
+ let confidence;
4370
+ if (!valid) {
4371
+ confidence = 'rejected';
4372
+ } else if (riskFlags.length === 0 && jitterScore >= 0.75) {
4373
+ confidence = 'high';
4374
+ } else if (riskFlags.length <= 2 && jitterScore >= 0.60) {
4375
+ confidence = 'medium';
4376
+ } else {
4377
+ confidence = 'low';
4378
+ }
4379
+
4380
+ return {
4381
+ valid,
4382
+ score: jitterScore,
4383
+ confidence,
4384
+ reasons,
4385
+ riskFlags,
4386
+ meta: {
4387
+ receivedAt: now,
4388
+ proofAge: age,
4389
+ jitterScore,
4390
+ canvasRenderer: canvas?.webglRenderer ?? null,
4391
+ bioActivity: bio?.hasActivity ?? false,
4392
+ },
4393
+ };
4394
+ }
4395
+
4396
+ /**
4397
+ * @typedef {object} ValidationResult
4398
+ * @property {boolean} valid
4399
+ * @property {number} score
4400
+ * @property {'high'|'medium'|'low'|'rejected'} confidence
4401
+ * @property {string[]} reasons - human-readable rejection reasons
4402
+ * @property {string[]} riskFlags - non-blocking risk indicators
4403
+ * @property {object} meta
4404
+ */
4405
+
4406
+ // ---------------------------------------------------------------------------
4407
+ // generateNonce (convenience helper for the server challenge flow)
4408
+ // ---------------------------------------------------------------------------
4409
+
4410
+ /**
4411
+ * Generate a cryptographically random 32-byte nonce for the server challenge.
4412
+ * The server should store this nonce with a TTL before issuing it to the client.
4413
+ *
4414
+ * @returns {string} hex nonce
4415
+ */
4416
+ async function generateNonce() {
4417
+ const buf = new Uint8Array(32);
4418
+ if (typeof globalThis.crypto?.getRandomValues === 'function') {
4419
+ // Browser + Node.js ≥ 19
4420
+ globalThis.crypto.getRandomValues(buf);
4421
+ } else {
4422
+ // Node.js 18 — webcrypto is at `crypto.webcrypto`
4423
+ const { webcrypto } = await import('node:crypto');
4424
+ webcrypto.getRandomValues(buf);
4425
+ }
4426
+ return bytesToHex(buf);
4427
+ }
4428
+
4429
+ // ---------------------------------------------------------------------------
4430
+ // Internal helpers
4431
+ // ---------------------------------------------------------------------------
4432
+
4433
+ function _reject(reasons) {
4434
+ return {
4435
+ valid: false,
4436
+ score: 0,
4437
+ confidence: 'rejected',
4438
+ reasons,
4439
+ riskFlags: [],
4440
+ meta: {},
4441
+ };
4442
+ }
4443
+
4444
+ function _computeEvidenceWeight(payload) {
4445
+ const n = payload?.signals?.entropy?.iterations ?? 0;
4446
+ const hasPhases = payload?.heuristic?.entropyJitterRatio != null;
4447
+ const hasBio = payload?.signals?.bio?.hasActivity === true;
4448
+ const hasAudio = payload?.signals?.audio?.available === true;
4449
+ const hasCanvas = payload?.signals?.canvas?.available === true;
4450
+ return Math.min(1.0,
4451
+ Math.min(1.0, n / 200) * 0.65 +
4452
+ (hasPhases ? 0.15 : 0) +
4453
+ (hasBio ? 0.10 : 0) +
4454
+ (hasAudio ? 0.05 : 0) +
4455
+ (hasCanvas ? 0.05 : 0)
4456
+ );
4457
+ }
4458
+
4459
+ /**
4460
+ * @sovereign/pulse
4461
+ *
4462
+ * Physical Turing Test — distinguishes a real consumer device with a human
4463
+ * operator from a sanitised Datacenter VM / AI Instance.
4464
+ *
4465
+ * Usage (client-side):
4466
+ *
4467
+ * import { pulse } from '@sovereign/pulse';
4468
+ *
4469
+ * // 1. Get a server-issued nonce (prevents replay attacks)
4470
+ * const { nonce } = await fetch('/api/pulse-challenge').then(r => r.json());
4471
+ *
4472
+ * // 2. Run the probe (takes ~3-5 seconds)
4473
+ * const { payload, hash } = await pulse({ nonce });
4474
+ *
4475
+ * // 3. Send to your server
4476
+ * const verdict = await fetch('/api/pulse-verify', {
4477
+ * method: 'POST',
4478
+ * body: JSON.stringify({ payload, hash }),
4479
+ * }).then(r => r.json());
4480
+ *
4481
+ * Usage (server-side):
4482
+ *
4483
+ * import { validateProof, generateNonce } from '@sovereign/pulse/validator';
4484
+ *
4485
+ * // Challenge endpoint
4486
+ * app.get('/api/pulse-challenge', (req, res) => {
4487
+ * const nonce = generateNonce();
4488
+ * await redis.set(`pulse:nonce:${nonce}`, '1', 'EX', 300); // 5-min TTL
4489
+ * res.json({ nonce });
4490
+ * });
4491
+ *
4492
+ * // Verify endpoint
4493
+ * app.post('/api/pulse-verify', async (req, res) => {
4494
+ * const { payload, hash } = req.body;
4495
+ * const result = await validateProof(payload, hash, {
4496
+ * checkNonce: async (n) => {
4497
+ * const ok = await redis.del(`pulse:nonce:${n}`);
4498
+ * return ok === 1; // true only if nonce existed and was consumed
4499
+ * },
4500
+ * });
4501
+ * res.json(result);
4502
+ * });
4503
+ */
4504
+
4505
+
4506
+ // ---------------------------------------------------------------------------
4507
+ // Hosted API mode — pulse({ apiKey }) with zero server setup
4508
+ // ---------------------------------------------------------------------------
4509
+
4510
+ /**
4511
+ * Run pulse() against the sovereign hosted API.
4512
+ * Fetches nonce, runs probe locally (WASM still on device), submits proof.
4513
+ *
4514
+ * @param {object} opts — same as pulse(), plus apiKey + apiUrl
4515
+ * @returns {Promise<{ payload, hash, result }>}
4516
+ */
4517
+ async function _pulseHosted(opts) {
4518
+ const {
4519
+ apiKey,
4520
+ apiUrl = 'https://api.sovereign.dev',
4521
+ iterations = 200,
4522
+ matrixSize = 64,
4523
+ bioWindowMs = 3_000,
4524
+ phased = true,
4525
+ adaptive = true,
4526
+ adaptiveThreshold = 0.85,
4527
+ requireBio = false,
4528
+ wasmPath,
4529
+ onProgress,
4530
+ verifyOptions = {},
4531
+ } = opts;
4532
+
4533
+ // 1. Fetch nonce from hosted challenge endpoint
4534
+ const challengeRes = await fetch(`${apiUrl}/v1/challenge`, {
4535
+ headers: { 'Authorization': `Bearer ${apiKey}` },
4536
+ });
4537
+ if (!challengeRes.ok) {
4538
+ const body = await challengeRes.json().catch(() => ({}));
4539
+ throw new Error(`[pulse] Challenge failed (${challengeRes.status}): ${body.message ?? 'unknown error'}`);
4540
+ }
4541
+ const { nonce } = await challengeRes.json();
4542
+
4543
+ // 2. Run the local probe (WASM, bio, canvas, audio — all on device)
4544
+ const commitment = await _runProbe({
4545
+ nonce, iterations, matrixSize, bioWindowMs,
4546
+ phased, adaptive, adaptiveThreshold, requireBio,
4547
+ wasmPath, onProgress,
4548
+ });
4549
+
4550
+ // 3. Submit proof to hosted verify endpoint
4551
+ const verifyRes = await fetch(`${apiUrl}/v1/verify`, {
4552
+ method: 'POST',
4553
+ headers: {
4554
+ 'Content-Type': 'application/json',
4555
+ 'Authorization': `Bearer ${apiKey}`,
4556
+ },
4557
+ body: JSON.stringify({
4558
+ payload: commitment.payload,
4559
+ hash: commitment.hash,
4560
+ options: verifyOptions,
4561
+ }),
4562
+ });
4563
+
4564
+ const result = await verifyRes.json();
4565
+
4566
+ // Return commitment + server result for convenience
4567
+ return { ...commitment, result };
4568
+ }
4569
+
4570
+ // ---------------------------------------------------------------------------
4571
+ // pulse() — main entry point
4572
+ // ---------------------------------------------------------------------------
4573
+
4574
+ /**
4575
+ * Run the full @sovereign/pulse probe and return a signed commitment.
4576
+ *
4577
+ * Two modes:
4578
+ * - pulse({ nonce }) — self-hosted (you manage the nonce server)
4579
+ * - pulse({ apiKey }) — hosted API (zero server setup required)
4580
+ *
4581
+ * @param {PulseOptions} opts
4582
+ * @returns {Promise<PulseCommitment>}
4583
+ */
4584
+ async function pulse(opts = {}) {
4585
+ // ── Hosted API mode ────────────────────────────────────────────────────────
4586
+ if (opts.apiKey) {
4587
+ return _pulseHosted(opts);
4588
+ }
4589
+
4590
+ // ── Self-hosted mode ───────────────────────────────────────────────────────
4591
+ const { nonce } = opts;
4592
+ if (!nonce || typeof nonce !== 'string') {
4593
+ throw new Error(
4594
+ '@sovereign/pulse: opts.nonce is required (self-hosted), or pass opts.apiKey for zero-config hosted mode.'
4595
+ );
4596
+ }
4597
+
4598
+ return _runProbe(opts);
4599
+ }
4600
+
4601
+ /**
4602
+ * Internal probe runner — shared between self-hosted and hosted API modes.
4603
+ * @private
4604
+ */
4605
+ async function _runProbe(opts) {
4606
+ const {
4607
+ nonce,
4608
+ timeout = 8_000,
4609
+ iterations = 200,
4610
+ matrixSize = 64,
4611
+ bioWindowMs = 3_000,
4612
+ phased = true,
4613
+ adaptive = true,
4614
+ adaptiveThreshold = 0.85,
4615
+ requireBio = false,
4616
+ wasmPath,
4617
+ onProgress,
4618
+ } = opts;
4619
+
4620
+ _emit(onProgress, 'start');
4621
+
4622
+ // ── Phase 1: Start bio collector immediately (collects events over time) ──
4623
+ const bio = new BioCollector();
4624
+ bio.start();
4625
+
4626
+ // ── Phase 2: Parallel collection ──────────────────────────────────────────
4627
+ const raceTimeout = new Promise((_, reject) =>
4628
+ setTimeout(() => reject(new Error('pulse() timed out')), timeout)
4629
+ );
4630
+
4631
+ let entropyResult, canvasResult, audioResult;
4632
+
4633
+ try {
4634
+ [entropyResult, canvasResult, audioResult] = await Promise.race([
4635
+ Promise.all([
4636
+ collectEntropy({
4637
+ iterations, matrixSize, phased, adaptive, adaptiveThreshold, wasmPath,
4638
+ onBatch: (meta) => _emit(onProgress, 'entropy_batch', meta),
4639
+ }).then(r => { _emit(onProgress, 'entropy_done'); return r; }),
4640
+ collectCanvasFingerprint()
4641
+ .then(r => { _emit(onProgress, 'canvas_done'); return r; }),
4642
+ collectAudioJitter({ durationMs: Math.min(bioWindowMs, 2_000) })
4643
+ .then(r => { _emit(onProgress, 'audio_done'); return r; }),
4644
+ ]),
4645
+ raceTimeout,
4646
+ ]);
4647
+ } catch (err) {
4648
+ bio.stop();
4649
+ throw err;
4650
+ }
4651
+
4652
+ // ── Phase 3: Bio snapshot ─────────────────────────────────────────────────
4653
+ const bioElapsed = Date.now() - entropyResult.collectedAt;
4654
+ const bioRemain = Math.max(0, bioWindowMs - bioElapsed);
4655
+ if (bioRemain > 0) await _sleep(bioRemain);
4656
+
4657
+ bio.stop();
4658
+ const bioSnapshot = bio.snapshot(entropyResult.timings);
4659
+
4660
+ if (requireBio && !bioSnapshot.hasActivity) {
4661
+ throw new Error('@sovereign/pulse: no bio activity detected (requireBio=true)');
4662
+ }
4663
+
4664
+ _emit(onProgress, 'bio_done');
4665
+
4666
+ // ── Phase 4: Jitter analysis ───────────────────────────────────────────────
4667
+ const jitterAnalysis = classifyJitter(entropyResult.timings, {
4668
+ autocorrelations: entropyResult.autocorrelations,
4669
+ });
4670
+
4671
+ _emit(onProgress, 'analysis_done');
4672
+
4673
+ // ── Phase 5: Build proof & commitment ─────────────────────────────────────
4674
+ const payload = buildProof({
4675
+ entropy: entropyResult,
4676
+ jitter: jitterAnalysis,
4677
+ bio: bioSnapshot,
4678
+ canvas: canvasResult,
4679
+ audio: audioResult,
4680
+ nonce,
4681
+ });
4682
+
4683
+ const commitment = buildCommitment(payload);
4684
+
4685
+ _emit(onProgress, 'complete', {
4686
+ score: jitterAnalysis.score,
4687
+ confidence: _scoreToLabel(jitterAnalysis.score),
4688
+ flags: jitterAnalysis.flags,
4689
+ });
4690
+
4691
+ return commitment;
4692
+ }
4693
+
4694
+ // ---------------------------------------------------------------------------
4695
+ // Internal helpers
4696
+ // ---------------------------------------------------------------------------
4697
+
4698
+ function _sleep(ms) {
4699
+ return new Promise(r => setTimeout(r, ms));
4700
+ }
4701
+
4702
+ function _emit(fn, stage, meta = {}) {
4703
+ if (typeof fn === 'function') {
4704
+ try { fn(stage, meta); } catch (_) {}
4705
+ }
4706
+ }
4707
+
4708
+ function _scoreToLabel(score) {
4709
+ if (score >= 0.75) return 'high';
4710
+ if (score >= 0.55) return 'medium';
4711
+ if (score >= 0.35) return 'low';
4712
+ return 'rejected';
4713
+ }
4714
+
4715
+ /**
4716
+ * pulse_core — pure-JavaScript probe engine
4717
+ *
4718
+ * This module ships the entropy probe as portable JS so the package works
4719
+ * out-of-the-box without a Rust toolchain. When a compiled .wasm binary is
4720
+ * present (dropped in via `build.sh`) this file is replaced by the wasm-pack
4721
+ * output and the native engine runs instead.
4722
+ *
4723
+ * Physics model
4724
+ * ─────────────
4725
+ * Real silicon: DRAM refresh cycles, branch-predictor misses, and L3-cache
4726
+ * evictions inject sub-microsecond noise into any tight compute loop.
4727
+ * Hypervisors virtualise the TSC and smooth those interrupts out, leaving
4728
+ * a near-flat timing distribution that our QE/EJR checks catch.
4729
+ *
4730
+ * The JS loop below is a faithful port of the Rust matrix-multiply probe:
4731
+ * same work unit (N×N DGEMM-style loop), same checksum accumulation to
4732
+ * prevent dead-code elimination, same resolution micro-probe.
4733
+ */
4734
+
4735
+ /* ─── clock ─────────────────────────────────────────────────────────────── */
4736
+
4737
+ const _now = (typeof performance !== 'undefined' && typeof performance.now === 'function')
4738
+ ? () => performance.now()
4739
+ : (() => {
4740
+ // Node.js fallback: process.hrtime.bigint() → milliseconds
4741
+ const _hr = process.hrtime.bigint;
4742
+ return () => Number(_hr()) / 1_000_000;
4743
+ })();
4744
+
4745
+ /* ─── init (no-op for the JS engine) ───────────────────────────────────── */
4746
+
4747
+ /**
4748
+ * Initialise the engine. When a real .wasm binary is supplied the wasm-pack
4749
+ * glue calls WebAssembly.instantiateStreaming here. The JS engine is already
4750
+ * "compiled", so we return immediately.
4751
+ *
4752
+ * @param {string|URL|Request|BufferSource|WebAssembly.Module} [_source]
4753
+ * @returns {Promise<void>}
4754
+ */
4755
+ async function init(_source) {
4756
+ // JS engine is ready synchronously — nothing to stream or compile.
4757
+ }
4758
+
4759
+ /* ─── run_entropy_probe ─────────────────────────────────────────────────── */
4760
+
4761
+ /**
4762
+ * Run N iterations of a matrix-multiply work unit and record wall-clock time
4763
+ * per iteration. The distribution of those times is what the heuristic
4764
+ * engine analyses.
4765
+ *
4766
+ * @param {number} iterations – number of timing samples to collect
4767
+ * @param {number} matrixSize – N for the N×N multiply (default 64)
4768
+ * @returns {{ timings: Float64Array, checksum: number, resolution_probe: Float64Array }}
4769
+ */
4770
+ function run_entropy_probe(iterations, matrixSize = 64) {
4771
+ const N = matrixSize | 0;
4772
+
4773
+ // Persistent working matrices — allocated once per probe to avoid GC noise.
4774
+ const A = new Float64Array(N * N);
4775
+ const B = new Float64Array(N * N);
4776
+ const C = new Float64Array(N * N);
4777
+
4778
+ // Seed matrices with pseudo-random data (deterministic per call for
4779
+ // reproducibility, but different each run due to xorshift seeding from time).
4780
+ let seed = (_now() * 1e6) | 0 || 0xdeadbeef;
4781
+ const xr = () => { seed ^= seed << 13; seed ^= seed >> 17; seed ^= seed << 5; return (seed >>> 0) / 4294967296; };
4782
+ for (let i = 0; i < N * N; i++) { A[i] = xr(); B[i] = xr(); }
4783
+
4784
+ const timings = new Float64Array(iterations);
4785
+ const resolution_probe = new Float64Array(32);
4786
+ let checksum = 0;
4787
+
4788
+ for (let iter = 0; iter < iterations; iter++) {
4789
+ // Zero accumulator each round (realistic cache pressure).
4790
+ C.fill(0);
4791
+
4792
+ const t0 = _now();
4793
+
4794
+ // N×N matrix multiply: C = A · B (ikj loop order for cache friendliness)
4795
+ for (let i = 0; i < N; i++) {
4796
+ const rowA = i * N;
4797
+ const rowC = i * N;
4798
+ for (let k = 0; k < N; k++) {
4799
+ const aik = A[rowA + k];
4800
+ const rowBk = k * N;
4801
+ for (let j = 0; j < N; j++) {
4802
+ C[rowC + j] += aik * B[rowBk + j];
4803
+ }
4804
+ }
4805
+ }
4806
+
4807
+ const t1 = _now();
4808
+ timings[iter] = t1 - t0;
4809
+
4810
+ // Accumulate one element so the compiler cannot eliminate the work.
4811
+ checksum += C[0];
4812
+ }
4813
+
4814
+ // Resolution micro-probe: fire 32 back-to-back timestamps.
4815
+ // The minimum non-zero delta reveals timer granularity.
4816
+ for (let i = 0; i < resolution_probe.length; i++) {
4817
+ resolution_probe[i] = _now();
4818
+ }
4819
+
4820
+ return { timings, checksum, resolution_probe };
4821
+ }
4822
+
4823
+ /* ─── run_memory_probe ──────────────────────────────────────────────────── */
4824
+
4825
+ /**
4826
+ * Sequential read/write bandwidth probe over a large buffer.
4827
+ * Memory latency variance is a secondary signal (NUMA, DRAM refresh).
4828
+ *
4829
+ * @param {number} memSizeKb – buffer size in kibibytes
4830
+ * @param {number} memIterations
4831
+ * @returns {{ timings: Float64Array, checksum: number }}
4832
+ */
4833
+ function run_memory_probe(memSizeKb = 512, memIterations = 50) {
4834
+ const len = (memSizeKb * 1024 / 8) | 0; // 64-bit elements
4835
+ const buf = new Float64Array(len);
4836
+ const timings = new Float64Array(memIterations);
4837
+ let checksum = 0;
4838
+
4839
+ // Warm-up pass (fills TLB, avoids first-access bias)
4840
+ for (let i = 0; i < len; i++) buf[i] = i;
4841
+
4842
+ for (let iter = 0; iter < memIterations; iter++) {
4843
+ const t0 = _now();
4844
+ // Sequential read-modify-write
4845
+ for (let i = 0; i < len; i++) buf[i] = buf[i] * 1.0000001;
4846
+ const t1 = _now();
4847
+
4848
+ timings[iter] = t1 - t0;
4849
+ checksum += buf[0];
4850
+ }
4851
+
4852
+ return { timings, checksum };
4853
+ }
4854
+
4855
+ /* ─── compute_autocorrelation ───────────────────────────────────────────── */
4856
+
4857
+ /**
4858
+ * Pearson autocorrelation for lags 1..maxLag.
4859
+ * O(n·maxLag) — kept cheap by the adaptive early-exit cap.
4860
+ *
4861
+ * @param {ArrayLike<number>} data
4862
+ * @param {number} maxLag
4863
+ * @returns {Float64Array} length = maxLag, index 0 = lag-1
4864
+ */
4865
+ function compute_autocorrelation(data, maxLag) {
4866
+ const n = data.length;
4867
+ let mean = 0;
4868
+ for (let i = 0; i < n; i++) mean += data[i];
4869
+ mean /= n;
4870
+
4871
+ let variance = 0;
4872
+ for (let i = 0; i < n; i++) variance += (data[i] - mean) ** 2;
4873
+ variance /= n;
4874
+
4875
+ const result = new Float64Array(maxLag);
4876
+ if (variance < 1e-14) return result; // degenerate — all identical
4877
+
4878
+ for (let lag = 1; lag <= maxLag; lag++) {
4879
+ let cov = 0;
4880
+ for (let i = 0; i < n - lag; i++) {
4881
+ cov += (data[i] - mean) * (data[i + lag] - mean);
4882
+ }
4883
+ result[lag - 1] = cov / ((n - lag) * variance);
4884
+ }
4885
+
4886
+ return result;
4887
+ }
4888
+
4889
+ var pulse_core = /*#__PURE__*/Object.freeze({
4890
+ __proto__: null,
4891
+ compute_autocorrelation: compute_autocorrelation,
4892
+ default: init,
4893
+ run_entropy_probe: run_entropy_probe,
4894
+ run_memory_probe: run_memory_probe
4895
+ });
4896
+
4897
+ export { Fingerprint, detectProvider, generateNonce, pulse, runHeuristicEngine, validateProof };
4898
+ //# sourceMappingURL=pulse.esm.js.map