deaf-intelligence 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,499 @@
1
+ /**
2
+ * Constrained Temporal Disaggregation with Continuity
3
+ *
4
+ * Solves: given N days split into EST [0..B-1] and S4A_PREV [B..N-1],
5
+ * produce daily values where:
6
+ * - sum(EST) = estTotal
7
+ * - sum(S4A_PREV) = prevAgg
8
+ * - v[B-1] ≈ v[B] (no jump at boundary)
9
+ * - v[N-1] ≈ firstS4aVal (anchor at end)
10
+ * - release spike preserved (Denton proportional)
11
+ * - all values >= 0
12
+ *
13
+ * Method: Proportional Denton scaling + boundary blending + deficit absorption.
14
+ *
15
+ * Classical Denton/Chow-Lin handle EITHER sum OR endpoint constraints, not both.
16
+ * (Denton 1971, Dagum & Cholette 2006, Chow-Lin 1971 — all use a single
17
+ * aggregation matrix encoding one constraint type.)
18
+ *
19
+ * We solve the custom QP with dual constraints:
20
+ * min (y - s)' W (y - s) [shape preservation]
21
+ * s.t. sum(y) = T [sum constraint]
22
+ * y[n] = E [endpoint constraint]
23
+ * y >= 0 [non-negativity]
24
+ *
25
+ * Practical implementation:
26
+ * 1. Build indicator shape s (spike + decay + ramp)
27
+ * 2. Proportional scale: y = s × (T / sum(s)) → preserves spike ratios
28
+ * 3. Blend last K days toward endpoint E → smooth boundary
29
+ * 4. Absorb sum deficit into interior → exact sum
30
+ *
31
+ * References:
32
+ * - Denton (1971), "Adjustment of Monthly or Quarterly Series to Annual Totals"
33
+ * - Dagum & Cholette (2006), "Benchmarking, Temporal Distribution, and Reconciliation"
34
+ * - Dergachev et al. (2019), "Analyzing the Spotify Top 200 Through a Point Process Lens"
35
+ * - tempdisagg R package (Sax & Steiner, 2013)
36
+ */
37
+ // ─── Shape Builder ───
38
+ /**
39
+ * Build indicator shape: linear ramp from startVal to endVal with optional
40
+ * transient spike at releaseDayIdx.
41
+ */
42
+ function buildShape(n, startVal, endVal, releaseDayIdx, spikeA, spikeLambda) {
43
+ const shape = [];
44
+ // If release is within this period, zero out pre-release days and ramp
45
+ // from startVal to endVal over the ACTIVE portion only. This ensures
46
+ // proportional scaling and sum constraints apply only to days when
47
+ // the track existed — no post-hoc zeroing that breaks sums.
48
+ const activeStart = (releaseDayIdx >= 0 && releaseDayIdx < n) ? releaseDayIdx : 0;
49
+ const activeDays = n - activeStart;
50
+ for (let t = 0; t < n; t++) {
51
+ if (t < activeStart) {
52
+ shape.push(0);
53
+ continue;
54
+ }
55
+ let spike = 0;
56
+ if (releaseDayIdx >= 0 && t >= releaseDayIdx && t < releaseDayIdx + 14) {
57
+ spike = spikeA * Math.exp(-spikeLambda * (t - releaseDayIdx));
58
+ }
59
+ const activeT = t - activeStart;
60
+ const progress = activeDays > 1 ? activeT / (activeDays - 1) : 1;
61
+ const base = startVal + (endVal - startVal) * progress;
62
+ shape.push(Math.max(0, base + spike));
63
+ }
64
+ return shape;
65
+ }
66
+ // ─── Proportional Denton Scale + Boundary Blend ───
67
+ /**
68
+ * Scale shape to target sum while blending toward endpoint(s).
69
+ *
70
+ * 1. Proportional scale: y = shape × (target / sum(shape))
71
+ * - Preserves spike/baseline ratios (Denton proportional principle)
72
+ * 2. Blend toward endpoints over K days (linear interpolation)
73
+ * - Smooth boundaries without distorting interior
74
+ * 3. Absorb sum deficit into interior (proportional adjustment)
75
+ * - Exact sum after correction
76
+ */
77
+ function scaleToTarget(shape, target, opts = {}) {
78
+ const n = shape.length;
79
+ if (n === 0)
80
+ return [];
81
+ if (target <= 0)
82
+ return new Array(n).fill(0);
83
+ // Count trailing/leading active days for adaptive blend sizing
84
+ let trailingActive = 0;
85
+ for (let i = n - 1; i >= 0; i--) {
86
+ if (shape[i] > 0)
87
+ trailingActive++;
88
+ else
89
+ break;
90
+ }
91
+ let leadingActive = 0;
92
+ for (let i = 0; i < n; i++) {
93
+ if (shape[i] > 0)
94
+ leadingActive++;
95
+ else
96
+ break;
97
+ }
98
+ const rawK = opts.blendK ?? 14;
99
+ // Blend max 20% of active period — preserves shape for short periods
100
+ const endK = Math.min(rawK, Math.floor(n / 3), Math.max(1, Math.floor(trailingActive / 5)));
101
+ const startK = Math.min(rawK, Math.floor(n / 3), Math.max(1, Math.floor(leadingActive / 5)));
102
+ // Step 1: Proportional Denton scale — preserves spike/baseline ratios
103
+ const result = [...shape];
104
+ let shapeSum = 0;
105
+ for (const v of result)
106
+ shapeSum += v;
107
+ if (shapeSum > 0) {
108
+ const scale = target / shapeSum;
109
+ for (let i = 0; i < n; i++)
110
+ result[i] = Math.max(0, result[i] * scale);
111
+ }
112
+ else {
113
+ const perDay = target / n;
114
+ for (let i = 0; i < n; i++)
115
+ result[i] = perDay;
116
+ }
117
+ // Step 2: Blend toward endpoints (smooth float transitions)
118
+ if (opts.startVal != null && startK > 0) {
119
+ const sv = opts.startVal;
120
+ for (let i = 0; i < startK; i++) {
121
+ const w = 1 - (i + 1) / startK;
122
+ result[i] = result[i] * (1 - w) + sv * w;
123
+ }
124
+ }
125
+ if (opts.endVal != null && endK > 0) {
126
+ const ev = opts.endVal;
127
+ for (let i = 0; i < endK; i++) {
128
+ const w = (i + 1) / endK;
129
+ const idx = n - endK + i;
130
+ result[idx] = result[idx] * (1 - w) + ev * w;
131
+ }
132
+ }
133
+ // Step 3: Absorb float deficit into interior
134
+ const intStart = opts.startVal != null ? startK : 0;
135
+ const intEnd = opts.endVal != null ? n - endK : n;
136
+ for (let pass = 0; pass < 8; pass++) {
137
+ let newSum = 0;
138
+ for (const v of result)
139
+ newSum += v;
140
+ const deficit = target - newSum;
141
+ if (Math.abs(deficit) < 0.5)
142
+ break;
143
+ if (intEnd > intStart) {
144
+ let intSum = 0;
145
+ for (let i = intStart; i < intEnd; i++)
146
+ intSum += result[i];
147
+ if (intSum > 0) {
148
+ const adj = (intSum + deficit) / intSum;
149
+ for (let i = intStart; i < intEnd; i++)
150
+ result[i] = Math.max(0, result[i] * adj);
151
+ }
152
+ }
153
+ }
154
+ for (let i = 0; i < n; i++)
155
+ result[i] = Math.max(0, result[i]);
156
+ return result;
157
+ }
158
+ // ─── Core Algorithm ───
159
+ export function constrainedDisaggregate(input) {
160
+ const { totalDays, boundaryIdx, estTotal, prevAgg, firstS4aVal, releaseDayIdx, spikeA, spikeLambda, baseline } = input;
161
+ const B = boundaryIdx;
162
+ const N = totalDays;
163
+ const prevDays = N - B;
164
+ // Edge: no EST period
165
+ if (B <= 0) {
166
+ const shape = buildShape(N, baseline, firstS4aVal, releaseDayIdx, spikeA, spikeLambda);
167
+ const daily = scaleToTarget(shape, prevAgg, { endVal: firstS4aVal });
168
+ let sum = 0;
169
+ for (const v of daily)
170
+ sum += v;
171
+ return { daily, boundaryValue: daily[0], estSum: 0, prevSum: sum, maxError: Math.abs(sum - prevAgg) };
172
+ }
173
+ // Edge: no S4A_PREV
174
+ if (prevDays <= 0) {
175
+ const shape = buildShape(B, baseline, firstS4aVal, releaseDayIdx, spikeA, spikeLambda);
176
+ const daily = scaleToTarget(shape, estTotal, { endVal: firstS4aVal });
177
+ let sum = 0;
178
+ for (const v of daily)
179
+ sum += v;
180
+ return { daily, boundaryValue: daily[B - 1], estSum: sum, prevSum: 0, maxError: Math.abs(sum - estTotal) };
181
+ }
182
+ // estTotal <= 0: zero EST, solve S4A_PREV only
183
+ if (estTotal <= 0) {
184
+ const daily = new Array(N).fill(0);
185
+ const releaseInPrev = releaseDayIdx >= B ? releaseDayIdx - B : -1;
186
+ const prevShape = buildShape(prevDays, baseline, firstS4aVal, releaseInPrev, spikeA, spikeLambda);
187
+ const prevScaled = scaleToTarget(prevShape, prevAgg, { endVal: firstS4aVal });
188
+ let prevSum = 0;
189
+ for (let t = 0; t < prevDays; t++) {
190
+ daily[B + t] = prevScaled[t];
191
+ prevSum += prevScaled[t];
192
+ }
193
+ return {
194
+ daily, boundaryValue: 0, estSum: 0, prevSum,
195
+ maxError: Math.abs(prevSum - prevAgg),
196
+ };
197
+ }
198
+ // ── Main case: both EST and S4A_PREV ──
199
+ //
200
+ // Find v_b (boundary value) iteratively:
201
+ // 1. Start with harmonic mean of daily averages
202
+ // 2. Build shapes, scale, check boundary match
203
+ // 3. Adjust v_b toward the mean of both sides' boundary values
204
+ // 4. Converges in 3-5 iterations typically
205
+ const avgEst = estTotal / B;
206
+ const avgPrev = prevAgg / prevDays;
207
+ let vb = (avgEst + avgPrev) > 0
208
+ ? 2 * avgEst * avgPrev / (avgEst + avgPrev) // harmonic mean
209
+ : Math.min(avgEst, avgPrev);
210
+ if (!isFinite(vb) || vb <= 0)
211
+ vb = Math.min(avgEst, avgPrev);
212
+ const releaseInEst = releaseDayIdx >= 0 && releaseDayIdx < B ? releaseDayIdx : -1;
213
+ const releaseInPrev = releaseDayIdx >= B ? releaseDayIdx - B : -1;
214
+ let bestDaily = new Array(N).fill(0);
215
+ let bestEstSum = 0, bestPrevSum = 0;
216
+ for (let iter = 0; iter < 15; iter++) {
217
+ const estShape = buildShape(B, baseline, vb, releaseInEst, spikeA, spikeLambda);
218
+ const prevShape = buildShape(prevDays, vb, firstS4aVal, releaseInPrev, spikeA, spikeLambda);
219
+ const estScaled = scaleToTarget(estShape, estTotal, { endVal: vb });
220
+ const prevScaled = scaleToTarget(prevShape, prevAgg, { startVal: vb, endVal: firstS4aVal });
221
+ // Copy into daily
222
+ const daily = new Array(N).fill(0);
223
+ let estSum = 0, prevSum = 0;
224
+ for (let t = 0; t < B; t++) {
225
+ daily[t] = estScaled[t];
226
+ estSum += estScaled[t];
227
+ }
228
+ for (let t = 0; t < prevDays; t++) {
229
+ daily[B + t] = prevScaled[t];
230
+ prevSum += prevScaled[t];
231
+ }
232
+ bestDaily = daily;
233
+ bestEstSum = estSum;
234
+ bestPrevSum = prevSum;
235
+ // Check boundary: EST's last value vs S4A_PREV's first value
236
+ const estEnd = estScaled[B - 1];
237
+ const prevStart = prevScaled[0];
238
+ const gap = Math.abs(estEnd - prevStart);
239
+ if (gap < 1)
240
+ break;
241
+ // Steer v_b: the blend ensures estEnd ≈ vb and prevStart ≈ vb,
242
+ // but if the scale factors differ greatly, there's residual gap.
243
+ // Adjust vb to compensate.
244
+ const newVb = (estEnd + prevStart) / 2;
245
+ if (Math.abs(newVb - vb) < 0.5)
246
+ break;
247
+ vb = Math.max(0.01, newVb);
248
+ }
249
+ return {
250
+ daily: bestDaily,
251
+ boundaryValue: vb,
252
+ estSum: bestEstSum,
253
+ prevSum: bestPrevSum,
254
+ maxError: Math.max(Math.abs(bestEstSum - estTotal), Math.abs(bestPrevSum - prevAgg)),
255
+ };
256
+ }
257
+ // ─── Largest Remainder rounding (Hamilton's method) ───
258
+ //
259
+ // Same algorithm as electoral apportionment (seat allocation).
260
+ // Proven optimal for all L^p norms (Cont & Heidari, arXiv:1501.00014).
261
+ //
262
+ // 1. Floor all values
263
+ // 2. Compute shortfall = target - sum(floors)
264
+ // 3. Sort by fractional part descending
265
+ // 4. Round up the top `shortfall` entries
266
+ //
267
+ // Guarantees: sum = target EXACTLY. Each value = floor or ceil (max ±1 error).
268
+ export function roundWithDiffusion(values) {
269
+ // Legacy wrapper — uses Largest Remainder internally
270
+ const target = Math.round(values.reduce((a, b) => a + b, 0));
271
+ return largestRemainderRound(values, target);
272
+ }
273
+ /**
274
+ * Largest Remainder rounding with pinned anchor.
275
+ * Pins values[anchorIdx] = anchorVal, then distributes (target - anchorVal)
276
+ * across remaining positions using Hamilton's method.
277
+ *
278
+ * Guarantees:
279
+ * - result[anchorIdx] = anchorVal (exact)
280
+ * - sum(result) = target (exact)
281
+ * - each result[i] = floor(x[i]) or ceil(x[i])
282
+ */
283
+ export function roundWithAnchor(values, anchorIdx, anchorVal, explicitTarget) {
284
+ const n = values.length;
285
+ const target = explicitTarget ?? Math.round(values.reduce((a, b) => a + b, 0));
286
+ // Pin anchor, distribute remainder across other positions
287
+ const remaining = target - anchorVal;
288
+ const otherValues = [];
289
+ const otherIndices = [];
290
+ for (let i = 0; i < n; i++) {
291
+ if (i === anchorIdx)
292
+ continue;
293
+ otherValues.push(values[i]);
294
+ otherIndices.push(i);
295
+ }
296
+ // Rescale others to sum to `remaining` (preserving ratios)
297
+ let otherSum = 0;
298
+ for (const v of otherValues)
299
+ otherSum += v;
300
+ const rescaled = otherSum > 0
301
+ ? otherValues.map(v => v * remaining / otherSum)
302
+ : otherValues.map(() => remaining / otherValues.length);
303
+ const rounded = largestRemainderRound(rescaled, remaining);
304
+ // Assemble result
305
+ const result = new Array(n).fill(0);
306
+ result[anchorIdx] = anchorVal;
307
+ for (let i = 0; i < otherIndices.length; i++) {
308
+ result[otherIndices[i]] = rounded[i];
309
+ }
310
+ return result;
311
+ }
312
+ /**
313
+ * Core Largest Remainder (Hamilton) algorithm.
314
+ * Distributes integer `target` across positions proportional to `values`.
315
+ */
316
+ function largestRemainderRound(values, target) {
317
+ const n = values.length;
318
+ if (n === 0)
319
+ return [];
320
+ if (target <= 0)
321
+ return new Array(n).fill(0);
322
+ // Floor everything
323
+ const floors = values.map(v => Math.max(0, Math.floor(v)));
324
+ let floorSum = 0;
325
+ for (const f of floors)
326
+ floorSum += f;
327
+ // How many +1s needed?
328
+ let shortfall = target - floorSum;
329
+ if (shortfall <= 0) {
330
+ // Overshot — need to remove some. Rare but handle it.
331
+ // Sort by smallest fractional part (those closest to rounding down)
332
+ const indexed = values.map((v, i) => ({ frac: v - Math.floor(v), i }));
333
+ indexed.sort((a, b) => a.frac - b.frac);
334
+ const result = [...floors];
335
+ let excess = -shortfall;
336
+ for (let k = 0; k < indexed.length && excess > 0; k++) {
337
+ if (result[indexed[k].i] > 0) {
338
+ result[indexed[k].i]--;
339
+ excess--;
340
+ }
341
+ }
342
+ return result;
343
+ }
344
+ // Sort by fractional part DESCENDING (biggest fractions get +1)
345
+ const indexed = values.map((v, i) => ({
346
+ frac: v - Math.floor(v),
347
+ floor: Math.floor(v),
348
+ i,
349
+ }));
350
+ indexed.sort((a, b) => b.frac - a.frac || b.floor - a.floor);
351
+ const result = [...floors];
352
+ for (let k = 0; k < shortfall && k < indexed.length; k++) {
353
+ result[indexed[k].i]++;
354
+ }
355
+ return result;
356
+ }
357
+ // ─── Self-test ───
358
+ if (import.meta.url === `file://${process.argv[1]}` || process.argv[1]?.endsWith("constrained-disagg.ts")) {
359
+ function verify(label, input) {
360
+ const r = constrainedDisaggregate(input);
361
+ const B = input.boundaryIdx;
362
+ const N = input.totalDays;
363
+ let es = 0, ps = 0;
364
+ for (let i = 0; i < B; i++)
365
+ es += r.daily[i];
366
+ for (let i = B; i < N; i++)
367
+ ps += r.daily[i];
368
+ const jump = Math.abs(r.daily[B - 1] - r.daily[B]);
369
+ const avgBound = (r.daily[B - 1] + r.daily[B]) / 2;
370
+ const relJump = avgBound > 0 ? jump / avgBound * 100 : 0;
371
+ const anchorErr = Math.abs(r.daily[N - 1] - input.firstS4aVal);
372
+ const negCount = r.daily.filter(v => v < -0.01).length;
373
+ // Find spike (max in first 30 days after release)
374
+ let spikeVal = 0, spikeDay = -1;
375
+ const searchStart = Math.max(0, input.releaseDayIdx);
376
+ for (let i = searchStart; i < Math.min(searchStart + 30, N); i++) {
377
+ if (r.daily[i] > spikeVal) {
378
+ spikeVal = r.daily[i];
379
+ spikeDay = i;
380
+ }
381
+ }
382
+ // Baseline (average of days 60-90 after release or last 30 days)
383
+ const baseStart = Math.min(searchStart + 60, N - 30);
384
+ let baseSum = 0, baseCount = 0;
385
+ for (let i = baseStart; i < Math.min(baseStart + 30, N); i++) {
386
+ if (r.daily[i] > 0) {
387
+ baseSum += r.daily[i];
388
+ baseCount++;
389
+ }
390
+ }
391
+ const baseAvg = baseCount > 0 ? baseSum / baseCount : 0;
392
+ const spikeRatio = baseAvg > 0 ? spikeVal / baseAvg : 0;
393
+ console.log(`\n=== ${label} ===`);
394
+ console.log(`EST: sum=${es.toFixed(0)} target=${input.estTotal} err=${Math.abs(es - input.estTotal).toFixed(1)}`);
395
+ console.log(`PREV: sum=${ps.toFixed(0)} target=${input.prevAgg} err=${Math.abs(ps - input.prevAgg).toFixed(1)}`);
396
+ console.log(`Boundary: day${B - 1}=${r.daily[B - 1].toFixed(1)} → day${B}=${r.daily[B].toFixed(1)} jump=${relJump.toFixed(1)}%`);
397
+ console.log(`Anchor: day${N - 1}=${r.daily[N - 1].toFixed(1)} target=${input.firstS4aVal} err=${anchorErr.toFixed(1)}`);
398
+ console.log(`Spike: day${spikeDay}=${spikeVal.toFixed(0)} base=${baseAvg.toFixed(0)} ratio=${spikeRatio.toFixed(1)}× ${spikeRatio > 1.5 ? '✅' : '⚠️ flat'}`);
399
+ console.log(`Negatives: ${negCount}`);
400
+ }
401
+ verify("Standard (spike in EST)", {
402
+ totalDays: 730, boundaryIdx: 365, estTotal: 50000, prevAgg: 30000,
403
+ firstS4aVal: 80, releaseDayIdx: 0, spikeA: 200, spikeLambda: 0.05, baseline: 40,
404
+ });
405
+ verify("Growing Track", {
406
+ totalDays: 730, boundaryIdx: 365, estTotal: 10000, prevAgg: 40000,
407
+ firstS4aVal: 150, releaseDayIdx: 0, spikeA: 50, spikeLambda: 0.05, baseline: 20,
408
+ });
409
+ verify("Release in S4A_PREV (spike at day 400)", {
410
+ totalDays: 730, boundaryIdx: 365, estTotal: 0, prevAgg: 25000,
411
+ firstS4aVal: 60, releaseDayIdx: 400, spikeA: 300, spikeLambda: 0.07, baseline: 30,
412
+ });
413
+ verify("KAYO 6am-like (release in S4A_PREV, estTotal=0)", {
414
+ totalDays: 391, boundaryIdx: 26, estTotal: 0, prevAgg: 111046,
415
+ firstS4aVal: 432, releaseDayIdx: 111, spikeA: 1254, spikeLambda: 0.06, baseline: 418,
416
+ });
417
+ verify("KAYO Realistic (big track, spike in EST)", {
418
+ totalDays: 600, boundaryIdx: 235, estTotal: 420000, prevAgg: 380000,
419
+ firstS4aVal: 850, releaseDayIdx: 0, spikeA: 3000, spikeLambda: 0.05, baseline: 700,
420
+ });
421
+ // ── New tests (2026-03-29) ──
422
+ // Small integer metrics: playlist_adds with low counts.
423
+ // Tests that Largest Remainder rounding handles small targets correctly
424
+ // and doesn't produce excessive zeros or negative values.
425
+ verify("Small Integers (playlist_adds: target=95, 42 active days)", {
426
+ totalDays: 400, boundaryIdx: 358, estTotal: 0, prevAgg: 95,
427
+ firstS4aVal: 2, releaseDayIdx: 358, spikeA: 8, spikeLambda: 0.10, baseline: 1,
428
+ });
429
+ verify("Tiny prevAgg (saves: target=12, 30 active days)", {
430
+ totalDays: 395, boundaryIdx: 365, estTotal: 5, prevAgg: 12,
431
+ firstS4aVal: 0, releaseDayIdx: 350, spikeA: 3, spikeLambda: 0.15, baseline: 0,
432
+ });
433
+ // UTC edge case: release date at exact period boundary.
434
+ // Tests that buildShape correctly handles releaseDayIdx == boundaryIdx
435
+ // (all EST days should be zero, entire prevAgg allocated to S4A_PREV).
436
+ verify("UTC Edge: release at boundary (releaseDayIdx == boundaryIdx)", {
437
+ totalDays: 730, boundaryIdx: 365, estTotal: 0, prevAgg: 50000,
438
+ firstS4aVal: 120, releaseDayIdx: 365, spikeA: 500, spikeLambda: 0.06, baseline: 50,
439
+ });
440
+ verify("UTC Edge: release 1 day before boundary", {
441
+ totalDays: 730, boundaryIdx: 365, estTotal: 80, prevAgg: 45000,
442
+ firstS4aVal: 100, releaseDayIdx: 364, spikeA: 400, spikeLambda: 0.06, baseline: 45,
443
+ });
444
+ // Listener overlap ratio scenario: prevAgg is inflated by overlap multiplier.
445
+ // In real usage, scraper multiplies prevAgg × overlapRatio (2.5-3.5×) before
446
+ // calling disaggregate. This tests large prevAgg relative to daily values.
447
+ verify("Listener Overlap (prevAgg=85000 from 28000×3.0, anchor=80)", {
448
+ totalDays: 730, boundaryIdx: 365, estTotal: 25000, prevAgg: 85000,
449
+ firstS4aVal: 80, releaseDayIdx: 0, spikeA: 150, spikeLambda: 0.05, baseline: 30,
450
+ });
451
+ verify("Listener Overlap High Ratio (prevAgg=140000 from 40000×3.5)", {
452
+ totalDays: 600, boundaryIdx: 235, estTotal: 100000, prevAgg: 140000,
453
+ firstS4aVal: 200, releaseDayIdx: 0, spikeA: 300, spikeLambda: 0.07, baseline: 100,
454
+ });
455
+ // Stress test
456
+ console.log("\n=== Stress Test (200 random combos) ===");
457
+ let passCount = 0, worstRelJump = 0, worstSumErr = 0, spikeKills = 0;
458
+ for (let trial = 0; trial < 200; trial++) {
459
+ const N = 400 + Math.floor(Math.random() * 600);
460
+ const B = 100 + Math.floor(Math.random() * (N - 200));
461
+ const est = Math.floor(Math.random() * 100000);
462
+ const prev = Math.floor(Math.random() * 100000) + 1;
463
+ const anchor = Math.random() * 200 + 1;
464
+ const relDay = Math.floor(Math.random() * N);
465
+ const spA = Math.random() * 500;
466
+ const r = constrainedDisaggregate({
467
+ totalDays: N, boundaryIdx: B, estTotal: est, prevAgg: prev,
468
+ firstS4aVal: anchor, releaseDayIdx: relDay,
469
+ spikeA: spA, spikeLambda: 0.03 + Math.random() * 0.1, baseline: Math.random() * 50,
470
+ });
471
+ let es = 0, ps = 0;
472
+ for (let i = 0; i < B; i++)
473
+ es += r.daily[i];
474
+ for (let i = B; i < N; i++)
475
+ ps += r.daily[i];
476
+ const jump = Math.abs(r.daily[B - 1] - r.daily[B]);
477
+ const avgB = (r.daily[B - 1] + r.daily[B]) / 2;
478
+ const relJump = avgB > 0 ? jump / avgB : 0;
479
+ const sumErr = Math.abs(es - est) + Math.abs(ps - prev);
480
+ const hasNeg = r.daily.some(v => v < -0.01);
481
+ worstRelJump = Math.max(worstRelJump, relJump);
482
+ worstSumErr = Math.max(worstSumErr, sumErr);
483
+ if (sumErr < 1 && !hasNeg)
484
+ passCount++;
485
+ // Check spike preservation
486
+ if (spA > 50 && relDay >= 0 && relDay < N) {
487
+ const spikeRegion = r.daily.slice(relDay, Math.min(relDay + 14, N));
488
+ const postRegion = r.daily.slice(Math.min(relDay + 30, N - 10), Math.min(relDay + 60, N));
489
+ const maxSpike = Math.max(...spikeRegion);
490
+ const avgPost = postRegion.length > 0 ? postRegion.reduce((a, b) => a + b, 0) / postRegion.length : 0;
491
+ if (avgPost > 0 && maxSpike / avgPost < 1.2)
492
+ spikeKills++;
493
+ }
494
+ }
495
+ console.log(`Passed: ${passCount}/200 (sum err < 1, no negatives)`);
496
+ console.log(`Worst rel jump: ${(worstRelJump * 100).toFixed(1)}%`);
497
+ console.log(`Worst sum err: ${worstSumErr.toFixed(2)}`);
498
+ console.log(`Spike killed: ${spikeKills}/200 (should be near 0)`);
499
+ }
package/dist/db.d.ts ADDED
@@ -0,0 +1,34 @@
1
+ /**
2
+ * SQLite compatibility layer — wraps node-sqlite3-wasm to match better-sqlite3 API.
3
+ *
4
+ * WHY: better-sqlite3 is a C++ native addon that fails to install without build tools
5
+ * (gcc/xcode/Visual Studio). node-sqlite3-wasm is pure WASM — `npm install` always works.
6
+ *
7
+ * This wrapper maps better-sqlite3's spread-parameter API to node-sqlite3-wasm's
8
+ * array-parameter API, so the rest of the codebase doesn't need to change.
9
+ *
10
+ * better-sqlite3: stmt.run(a, b, c) → node-sqlite3-wasm: stmt.run([a, b, c])
11
+ * better-sqlite3: stmt.get(a, b) → node-sqlite3-wasm: stmt.get([a, b])
12
+ * better-sqlite3: new Database(p, {readonly:true}) → new Database(p, {readOnly:true})
13
+ *
14
+ * DB file format is standard SQLite — files created by either library are compatible.
15
+ */
16
+ declare class WrappedStatement {
17
+ private stmt;
18
+ constructor(stmt: any);
19
+ run(...params: any[]): any;
20
+ get(...params: any[]): any;
21
+ all(...params: any[]): any[];
22
+ }
23
+ export default class Database {
24
+ private db;
25
+ constructor(path: string, options?: {
26
+ readonly?: boolean;
27
+ readOnly?: boolean;
28
+ });
29
+ prepare(sql: string): WrappedStatement;
30
+ exec(sql: string): void;
31
+ close(): void;
32
+ transaction<T>(fn: () => T): () => T;
33
+ }
34
+ export {};
package/dist/db.js ADDED
@@ -0,0 +1,65 @@
1
+ /**
2
+ * SQLite compatibility layer — wraps node-sqlite3-wasm to match better-sqlite3 API.
3
+ *
4
+ * WHY: better-sqlite3 is a C++ native addon that fails to install without build tools
5
+ * (gcc/xcode/Visual Studio). node-sqlite3-wasm is pure WASM — `npm install` always works.
6
+ *
7
+ * This wrapper maps better-sqlite3's spread-parameter API to node-sqlite3-wasm's
8
+ * array-parameter API, so the rest of the codebase doesn't need to change.
9
+ *
10
+ * better-sqlite3: stmt.run(a, b, c) → node-sqlite3-wasm: stmt.run([a, b, c])
11
+ * better-sqlite3: stmt.get(a, b) → node-sqlite3-wasm: stmt.get([a, b])
12
+ * better-sqlite3: new Database(p, {readonly:true}) → new Database(p, {readOnly:true})
13
+ *
14
+ * DB file format is standard SQLite — files created by either library are compatible.
15
+ */
16
+ // node-sqlite3-wasm is CJS with default export containing .Database
17
+ // @ts-ignore — CJS interop with ESM
18
+ import sqlite3 from "node-sqlite3-wasm";
19
+ const WasmDatabase = sqlite3.Database;
20
+ class WrappedStatement {
21
+ stmt;
22
+ constructor(stmt) {
23
+ this.stmt = stmt;
24
+ }
25
+ run(...params) {
26
+ return this.stmt.run(params);
27
+ }
28
+ get(...params) {
29
+ return this.stmt.get(params) ?? undefined;
30
+ }
31
+ all(...params) {
32
+ return this.stmt.all(params);
33
+ }
34
+ }
35
+ export default class Database {
36
+ db;
37
+ constructor(path, options) {
38
+ this.db = new WasmDatabase(path, {
39
+ readOnly: options?.readonly ?? options?.readOnly ?? false,
40
+ });
41
+ }
42
+ prepare(sql) {
43
+ return new WrappedStatement(this.db.prepare(sql));
44
+ }
45
+ exec(sql) {
46
+ this.db.exec(sql);
47
+ }
48
+ close() {
49
+ this.db.close();
50
+ }
51
+ transaction(fn) {
52
+ return () => {
53
+ this.db.exec("BEGIN");
54
+ try {
55
+ const result = fn();
56
+ this.db.exec("COMMIT");
57
+ return result;
58
+ }
59
+ catch (e) {
60
+ this.db.exec("ROLLBACK");
61
+ throw e;
62
+ }
63
+ };
64
+ }
65
+ }
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ export {};