cbrowser 18.24.0 → 18.25.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +30 -0
- package/dist/cli.js.map +1 -1
- package/dist/mcp-tools/base/index.d.ts +1 -1
- package/dist/mcp-tools/base/index.js +1 -1
- package/dist/mcp-tools/base/visual-testing-tools.d.ts.map +1 -1
- package/dist/mcp-tools/base/visual-testing-tools.js +112 -0
- package/dist/mcp-tools/base/visual-testing-tools.js.map +1 -1
- package/dist/visual/distance-metrics.d.ts +260 -0
- package/dist/visual/distance-metrics.d.ts.map +1 -0
- package/dist/visual/distance-metrics.js +914 -0
- package/dist/visual/distance-metrics.js.map +1 -0
- package/dist/visual/index.d.ts +1 -0
- package/dist/visual/index.d.ts.map +1 -1
- package/dist/visual/index.js +1 -0
- package/dist/visual/index.js.map +1 -1
- package/dist/visual/regression.d.ts +89 -0
- package/dist/visual/regression.d.ts.map +1 -1
- package/dist/visual/regression.js +446 -5
- package/dist/visual/regression.js.map +1 -1
- package/package.json +2 -1
|
@@ -0,0 +1,914 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Distance metrics for visual comparison
|
|
3
|
+
*
|
|
4
|
+
* Implements Sliced Wasserstein Distance (SWD) for robust image comparison.
|
|
5
|
+
* Based on optimal transport theory — measures minimum cost to transform
|
|
6
|
+
* one image distribution into another.
|
|
7
|
+
*
|
|
8
|
+
* Key advantages over byte-diff / SSIM:
|
|
9
|
+
* - Robust to sub-pixel shifts, anti-aliasing, font rendering differences
|
|
10
|
+
* - Cross-resolution capable (compares distributions, not pixel grids)
|
|
11
|
+
* - Decomposable (can show WHERE visual mass moved)
|
|
12
|
+
* - Supports barycenter computation (consensus baselines from N captures)
|
|
13
|
+
*
|
|
14
|
+
* @version 1.0.0
|
|
15
|
+
* @see https://github.com/alexandriashai/cbrowser/issues/158
|
|
16
|
+
*/
|
|
17
|
+
import { readFileSync } from 'fs';
|
|
18
|
+
async function loadImage(path, downscale = 1.0) {
|
|
19
|
+
// Dynamic import handles both ESM default and CJS module.exports
|
|
20
|
+
const sharpModule = await import('sharp');
|
|
21
|
+
const sharpFn = sharpModule.default ?? sharpModule;
|
|
22
|
+
const img = sharpFn(path);
|
|
23
|
+
const metadata = await img.metadata();
|
|
24
|
+
const targetWidth = Math.round((metadata.width || 800) * downscale);
|
|
25
|
+
const targetHeight = Math.round((metadata.height || 600) * downscale);
|
|
26
|
+
const { data, info } = await img
|
|
27
|
+
.resize(targetWidth, targetHeight, { fit: 'fill' })
|
|
28
|
+
.ensureAlpha()
|
|
29
|
+
.raw()
|
|
30
|
+
.toBuffer({ resolveWithObject: true });
|
|
31
|
+
return {
|
|
32
|
+
data: new Uint8Array(data.buffer, data.byteOffset, data.byteLength),
|
|
33
|
+
width: info.width,
|
|
34
|
+
height: info.height,
|
|
35
|
+
channels: 4,
|
|
36
|
+
};
|
|
37
|
+
}
|
|
38
|
+
// ── Core: 1D Wasserstein Distance ──
|
|
39
|
+
/**
|
|
40
|
+
* Compute exact 1D Wasserstein-1 distance between two sorted arrays.
|
|
41
|
+
* This is the foundation — W1 between 1D distributions equals the
|
|
42
|
+
* L1 distance between their quantile functions (sorted values).
|
|
43
|
+
*/
|
|
44
|
+
function wasserstein1D(a, b) {
|
|
45
|
+
const n = Math.min(a.length, b.length);
|
|
46
|
+
if (n === 0)
|
|
47
|
+
return 0;
|
|
48
|
+
// Sort both arrays (quantile functions)
|
|
49
|
+
const sortedA = new Float64Array(a).sort();
|
|
50
|
+
const sortedB = new Float64Array(b).sort();
|
|
51
|
+
// W1 = (1/n) * sum |F^{-1}_A(i/n) - F^{-1}_B(i/n)|
|
|
52
|
+
let sum = 0;
|
|
53
|
+
for (let i = 0; i < n; i++) {
|
|
54
|
+
sum += Math.abs(sortedA[i] - sortedB[i]);
|
|
55
|
+
}
|
|
56
|
+
return sum / n;
|
|
57
|
+
}
|
|
58
|
+
// ── Core: Sliced Wasserstein Distance ──
|
|
59
|
+
/**
|
|
60
|
+
* Compute Sliced Wasserstein Distance between two point clouds.
|
|
61
|
+
*
|
|
62
|
+
* Algorithm:
|
|
63
|
+
* 1. Generate K random unit vectors (projections)
|
|
64
|
+
* 2. Project both point clouds onto each direction
|
|
65
|
+
* 3. Compute 1D Wasserstein for each projection
|
|
66
|
+
* 4. Average across all projections
|
|
67
|
+
*
|
|
68
|
+
* Complexity: O(K * N * log N) where K = projections, N = points
|
|
69
|
+
*/
|
|
70
|
+
function slicedWassersteinDistance(pointsA, // Array of d-dimensional points
|
|
71
|
+
pointsB, // Array of d-dimensional points
|
|
72
|
+
numProjections = 64, dim = 3) {
|
|
73
|
+
if (pointsA.length === 0 || pointsB.length === 0)
|
|
74
|
+
return 1;
|
|
75
|
+
// Subsample to equal size if needed
|
|
76
|
+
const n = Math.min(pointsA.length, pointsB.length, 50000);
|
|
77
|
+
const sampleA = subsample(pointsA, n);
|
|
78
|
+
const sampleB = subsample(pointsB, n);
|
|
79
|
+
let totalDistance = 0;
|
|
80
|
+
for (let p = 0; p < numProjections; p++) {
|
|
81
|
+
// Generate random unit vector
|
|
82
|
+
const direction = randomUnitVector(dim);
|
|
83
|
+
// Project points onto direction
|
|
84
|
+
const projA = new Float64Array(n);
|
|
85
|
+
const projB = new Float64Array(n);
|
|
86
|
+
for (let i = 0; i < n; i++) {
|
|
87
|
+
projA[i] = dot(sampleA[i], direction);
|
|
88
|
+
projB[i] = dot(sampleB[i], direction);
|
|
89
|
+
}
|
|
90
|
+
// 1D Wasserstein on projections
|
|
91
|
+
totalDistance += wasserstein1D(projA, projB);
|
|
92
|
+
}
|
|
93
|
+
return totalDistance / numProjections;
|
|
94
|
+
}
|
|
95
|
+
// ── Helper: Random projections ──
|
|
96
|
+
function randomUnitVector(dim) {
|
|
97
|
+
const v = new Float64Array(dim);
|
|
98
|
+
let norm = 0;
|
|
99
|
+
for (let i = 0; i < dim; i++) {
|
|
100
|
+
// Box-Muller for normal distribution
|
|
101
|
+
const u1 = Math.random();
|
|
102
|
+
const u2 = Math.random();
|
|
103
|
+
v[i] = Math.sqrt(-2 * Math.log(u1 || 1e-10)) * Math.cos(2 * Math.PI * u2);
|
|
104
|
+
norm += v[i] * v[i];
|
|
105
|
+
}
|
|
106
|
+
norm = Math.sqrt(norm);
|
|
107
|
+
for (let i = 0; i < dim; i++)
|
|
108
|
+
v[i] /= norm;
|
|
109
|
+
return v;
|
|
110
|
+
}
|
|
111
|
+
function dot(a, b) {
|
|
112
|
+
let sum = 0;
|
|
113
|
+
for (let i = 0; i < a.length; i++)
|
|
114
|
+
sum += a[i] * b[i];
|
|
115
|
+
return sum;
|
|
116
|
+
}
|
|
117
|
+
function subsample(arr, n) {
|
|
118
|
+
if (arr.length <= n)
|
|
119
|
+
return arr;
|
|
120
|
+
const step = arr.length / n;
|
|
121
|
+
const result = [];
|
|
122
|
+
for (let i = 0; i < n; i++) {
|
|
123
|
+
result.push(arr[Math.floor(i * step)]);
|
|
124
|
+
}
|
|
125
|
+
return result;
|
|
126
|
+
}
|
|
127
|
+
// ── Image → Point Cloud Conversion ──
|
|
128
|
+
/**
|
|
129
|
+
* Convert image to color point cloud (R, G, B).
|
|
130
|
+
* Each pixel becomes a 3D point in color space.
|
|
131
|
+
*/
|
|
132
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
133
|
+
function imageToColorPoints(img, ignoreRegions) {
|
|
134
|
+
const points = [];
|
|
135
|
+
for (let y = 0; y < img.height; y++) {
|
|
136
|
+
for (let x = 0; x < img.width; x++) {
|
|
137
|
+
// Check if in ignore region
|
|
138
|
+
if (ignoreRegions?.some(r => x >= r.x && x < r.x + r.width && y >= r.y && y < r.y + r.height))
|
|
139
|
+
continue;
|
|
140
|
+
const idx = (y * img.width + x) * 4;
|
|
141
|
+
const a = img.data[idx + 3] / 255; // Alpha
|
|
142
|
+
if (a < 0.1)
|
|
143
|
+
continue; // Skip transparent pixels
|
|
144
|
+
points.push(new Float64Array([
|
|
145
|
+
img.data[idx] / 255, // R normalized
|
|
146
|
+
img.data[idx + 1] / 255, // G normalized
|
|
147
|
+
img.data[idx + 2] / 255, // B normalized
|
|
148
|
+
]));
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
return points;
|
|
152
|
+
}
|
|
153
|
+
/**
|
|
154
|
+
* Convert image to spatial-color point cloud (x, y, R, G, B).
|
|
155
|
+
* Captures both WHERE colors are and WHAT colors they are.
|
|
156
|
+
* This is the key for layout-aware comparison.
|
|
157
|
+
*/
|
|
158
|
+
function imageToSpatialColorPoints(img, ignoreRegions) {
|
|
159
|
+
const points = [];
|
|
160
|
+
for (let y = 0; y < img.height; y++) {
|
|
161
|
+
for (let x = 0; x < img.width; x++) {
|
|
162
|
+
if (ignoreRegions?.some(r => x >= r.x && x < r.x + r.width && y >= r.y && y < r.y + r.height))
|
|
163
|
+
continue;
|
|
164
|
+
const idx = (y * img.width + x) * 4;
|
|
165
|
+
const a = img.data[idx + 3] / 255;
|
|
166
|
+
if (a < 0.1)
|
|
167
|
+
continue;
|
|
168
|
+
points.push(new Float64Array([
|
|
169
|
+
x / img.width, // Normalized x position
|
|
170
|
+
y / img.height, // Normalized y position
|
|
171
|
+
img.data[idx] / 255, // R
|
|
172
|
+
img.data[idx + 1] / 255, // G
|
|
173
|
+
img.data[idx + 2] / 255, // B
|
|
174
|
+
]));
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
return points;
|
|
178
|
+
}
|
|
179
|
+
/**
|
|
180
|
+
* Per-channel color histogram distance using 1D Wasserstein.
|
|
181
|
+
* Fast and effective for overall color distribution comparison.
|
|
182
|
+
*/
|
|
183
|
+
function colorHistogramDistance(imgA, imgB) {
|
|
184
|
+
const bins = 64;
|
|
185
|
+
const histA = { r: new Float64Array(bins), g: new Float64Array(bins), b: new Float64Array(bins) };
|
|
186
|
+
const histB = { r: new Float64Array(bins), g: new Float64Array(bins), b: new Float64Array(bins) };
|
|
187
|
+
const fillHist = (img, hist) => {
|
|
188
|
+
let count = 0;
|
|
189
|
+
for (let i = 0; i < img.data.length; i += 4) {
|
|
190
|
+
if (img.data[i + 3] < 25)
|
|
191
|
+
continue; // Skip transparent
|
|
192
|
+
const bin = (val) => Math.min(Math.floor(val / 256 * bins), bins - 1);
|
|
193
|
+
hist.r[bin(img.data[i])]++;
|
|
194
|
+
hist.g[bin(img.data[i + 1])]++;
|
|
195
|
+
hist.b[bin(img.data[i + 2])]++;
|
|
196
|
+
count++;
|
|
197
|
+
}
|
|
198
|
+
// Normalize to probability distribution
|
|
199
|
+
if (count > 0) {
|
|
200
|
+
for (let j = 0; j < bins; j++) {
|
|
201
|
+
hist.r[j] /= count;
|
|
202
|
+
hist.g[j] /= count;
|
|
203
|
+
hist.b[j] /= count;
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
};
|
|
207
|
+
fillHist(imgA, histA);
|
|
208
|
+
fillHist(imgB, histB);
|
|
209
|
+
const r = wasserstein1D(histA.r, histB.r);
|
|
210
|
+
const g = wasserstein1D(histA.g, histB.g);
|
|
211
|
+
const b = wasserstein1D(histA.b, histB.b);
|
|
212
|
+
return { r, g, b, combined: (r + g + b) / 3 };
|
|
213
|
+
}
|
|
214
|
+
// ── Public API ──
|
|
215
|
+
/**
|
|
216
|
+
* Compute Sliced Wasserstein Distance between two images.
|
|
217
|
+
*
|
|
218
|
+
* @param baselinePath - Path to baseline PNG/JPEG
|
|
219
|
+
* @param currentPath - Path to current PNG/JPEG
|
|
220
|
+
* @param config - Configuration options
|
|
221
|
+
* @returns DistanceMetricResult with similarity score (0-1)
|
|
222
|
+
*/
|
|
223
|
+
export async function computeWassersteinDistance(baselinePath, currentPath, config = {}) {
|
|
224
|
+
const startTime = performance.now();
|
|
225
|
+
const { numProjections = 64, compareMode = 'combined', downscale = 0.5, ignoreRegions, } = config;
|
|
226
|
+
// Load images
|
|
227
|
+
const [imgA, imgB] = await Promise.all([
|
|
228
|
+
loadImage(baselinePath, downscale),
|
|
229
|
+
loadImage(currentPath, downscale),
|
|
230
|
+
]);
|
|
231
|
+
// Scale ignore regions for downscaled image
|
|
232
|
+
const scaledRegions = ignoreRegions?.map(r => ({
|
|
233
|
+
x: Math.round(r.x * downscale),
|
|
234
|
+
y: Math.round(r.y * downscale),
|
|
235
|
+
width: Math.round(r.width * downscale),
|
|
236
|
+
height: Math.round(r.height * downscale),
|
|
237
|
+
}));
|
|
238
|
+
let distance;
|
|
239
|
+
let channelDistances;
|
|
240
|
+
if (compareMode === 'color') {
|
|
241
|
+
// Color histogram comparison (fastest)
|
|
242
|
+
const hist = colorHistogramDistance(imgA, imgB);
|
|
243
|
+
distance = hist.combined;
|
|
244
|
+
channelDistances = { ...hist, spatial: 0 };
|
|
245
|
+
}
|
|
246
|
+
else if (compareMode === 'spatial') {
|
|
247
|
+
// Spatial-color point cloud (most comprehensive)
|
|
248
|
+
const pointsA = imageToSpatialColorPoints(imgA, scaledRegions);
|
|
249
|
+
const pointsB = imageToSpatialColorPoints(imgB, scaledRegions);
|
|
250
|
+
distance = slicedWassersteinDistance(pointsA, pointsB, numProjections, 5);
|
|
251
|
+
}
|
|
252
|
+
else {
|
|
253
|
+
// Combined: both color histogram AND spatial
|
|
254
|
+
const hist = colorHistogramDistance(imgA, imgB);
|
|
255
|
+
const pointsA = imageToSpatialColorPoints(imgA, scaledRegions);
|
|
256
|
+
const pointsB = imageToSpatialColorPoints(imgB, scaledRegions);
|
|
257
|
+
const spatialDist = slicedWassersteinDistance(pointsA, pointsB, numProjections, 5);
|
|
258
|
+
// Weighted combination: spatial is more informative but noisier
|
|
259
|
+
distance = hist.combined * 0.3 + spatialDist * 0.7;
|
|
260
|
+
channelDistances = { ...hist, spatial: spatialDist };
|
|
261
|
+
}
|
|
262
|
+
// Normalize to 0-1 similarity score
|
|
263
|
+
// Distance is typically in [0, ~0.5] range for real images
|
|
264
|
+
// Use sigmoid-like mapping for intuitive scoring
|
|
265
|
+
const normalizedScore = Math.max(0, Math.min(1, 1 - distance * 4));
|
|
266
|
+
const computeTimeMs = performance.now() - startTime;
|
|
267
|
+
return {
|
|
268
|
+
metric: 'sliced-wasserstein',
|
|
269
|
+
distance,
|
|
270
|
+
normalizedScore,
|
|
271
|
+
details: {
|
|
272
|
+
computeTimeMs,
|
|
273
|
+
dimensions: { width: imgA.width, height: imgA.height },
|
|
274
|
+
numProjections,
|
|
275
|
+
channelDistances,
|
|
276
|
+
transportCost: distance,
|
|
277
|
+
},
|
|
278
|
+
};
|
|
279
|
+
}
|
|
280
|
+
/**
|
|
281
|
+
* Compute the existing byte-diff metric (for comparison / fallback).
|
|
282
|
+
*/
|
|
283
|
+
export function computeByteDiff(baselinePath, currentPath) {
|
|
284
|
+
const startTime = performance.now();
|
|
285
|
+
const bufA = readFileSync(baselinePath);
|
|
286
|
+
const bufB = readFileSync(currentPath);
|
|
287
|
+
const minLen = Math.min(bufA.length, bufB.length);
|
|
288
|
+
const maxLen = Math.max(bufA.length, bufB.length);
|
|
289
|
+
let diff = 0;
|
|
290
|
+
for (let i = 0; i < minLen; i++) {
|
|
291
|
+
if (bufA[i] !== bufB[i])
|
|
292
|
+
diff++;
|
|
293
|
+
}
|
|
294
|
+
diff += maxLen - minLen; // Size difference = all different
|
|
295
|
+
const diffRatio = diff / maxLen;
|
|
296
|
+
return {
|
|
297
|
+
metric: 'byte-diff',
|
|
298
|
+
distance: diffRatio,
|
|
299
|
+
normalizedScore: 1 - diffRatio,
|
|
300
|
+
details: { computeTimeMs: performance.now() - startTime },
|
|
301
|
+
};
|
|
302
|
+
}
|
|
303
|
+
/**
|
|
304
|
+
* Compute combined distance using multiple metrics.
|
|
305
|
+
* Runs byte-diff (fast) and Wasserstein (robust), combines scores.
|
|
306
|
+
*/
|
|
307
|
+
export async function computeCombinedDistance(baselinePath, currentPath, weights = {}, wassersteinConfig) {
|
|
308
|
+
const startTime = performance.now();
|
|
309
|
+
const { byteDiff: bw = 0.2, wasserstein: ww = 0.8 } = weights;
|
|
310
|
+
const [byteResult, wassersteinResult] = await Promise.all([
|
|
311
|
+
Promise.resolve(computeByteDiff(baselinePath, currentPath)),
|
|
312
|
+
computeWassersteinDistance(baselinePath, currentPath, wassersteinConfig),
|
|
313
|
+
]);
|
|
314
|
+
const combinedScore = byteResult.normalizedScore * bw + wassersteinResult.normalizedScore * ww;
|
|
315
|
+
return {
|
|
316
|
+
metric: 'combined',
|
|
317
|
+
distance: 1 - combinedScore,
|
|
318
|
+
normalizedScore: combinedScore,
|
|
319
|
+
details: {
|
|
320
|
+
computeTimeMs: performance.now() - startTime,
|
|
321
|
+
channelDistances: wassersteinResult.details.channelDistances,
|
|
322
|
+
dimensions: wassersteinResult.details.dimensions,
|
|
323
|
+
numProjections: wassersteinResult.details.numProjections,
|
|
324
|
+
},
|
|
325
|
+
};
|
|
326
|
+
}
|
|
327
|
+
// ── Barycenter (Phase 2 foundation) ──
|
|
328
|
+
/**
|
|
329
|
+
* Compute color histogram barycenter of multiple images.
|
|
330
|
+
* Returns the "average" color distribution — useful for creating
|
|
331
|
+
* consensus baselines from multiple captures.
|
|
332
|
+
*
|
|
333
|
+
* For Phase 2: full Wasserstein barycenter via Sinkhorn/IBP.
|
|
334
|
+
* This simpler version averages histograms, which approximates
|
|
335
|
+
* the Wasserstein barycenter for 1D marginals.
|
|
336
|
+
*/
|
|
337
|
+
export async function computeHistogramBarycenter(imagePaths, config = {}) {
|
|
338
|
+
const { downscale = 0.5 } = config;
|
|
339
|
+
const bins = 64;
|
|
340
|
+
// Load all images and compute histograms
|
|
341
|
+
const histograms = [];
|
|
342
|
+
for (const path of imagePaths) {
|
|
343
|
+
const img = await loadImage(path, downscale);
|
|
344
|
+
const hist = { r: new Float64Array(bins), g: new Float64Array(bins), b: new Float64Array(bins) };
|
|
345
|
+
let count = 0;
|
|
346
|
+
for (let i = 0; i < img.data.length; i += 4) {
|
|
347
|
+
if (img.data[i + 3] < 25)
|
|
348
|
+
continue;
|
|
349
|
+
const bin = (val) => Math.min(Math.floor(val / 256 * bins), bins - 1);
|
|
350
|
+
hist.r[bin(img.data[i])]++;
|
|
351
|
+
hist.g[bin(img.data[i + 1])]++;
|
|
352
|
+
hist.b[bin(img.data[i + 2])]++;
|
|
353
|
+
count++;
|
|
354
|
+
}
|
|
355
|
+
if (count > 0) {
|
|
356
|
+
for (let j = 0; j < bins; j++) {
|
|
357
|
+
hist.r[j] /= count;
|
|
358
|
+
hist.g[j] /= count;
|
|
359
|
+
hist.b[j] /= count;
|
|
360
|
+
}
|
|
361
|
+
}
|
|
362
|
+
histograms.push(hist);
|
|
363
|
+
}
|
|
364
|
+
// Compute barycenter (quantile averaging for 1D Wasserstein barycenter)
|
|
365
|
+
const barycenter = { r: new Float64Array(bins), g: new Float64Array(bins), b: new Float64Array(bins) };
|
|
366
|
+
const n = histograms.length;
|
|
367
|
+
for (const channel of ['r', 'g', 'b']) {
|
|
368
|
+
// Convert histograms to quantile functions, average, convert back
|
|
369
|
+
const cdfs = histograms.map(h => {
|
|
370
|
+
const cdf = new Float64Array(bins);
|
|
371
|
+
cdf[0] = h[channel][0];
|
|
372
|
+
for (let i = 1; i < bins; i++)
|
|
373
|
+
cdf[i] = cdf[i - 1] + h[channel][i];
|
|
374
|
+
return cdf;
|
|
375
|
+
});
|
|
376
|
+
// Average CDFs (this gives the Wasserstein barycenter in 1D!)
|
|
377
|
+
const avgCdf = new Float64Array(bins);
|
|
378
|
+
for (let i = 0; i < bins; i++) {
|
|
379
|
+
for (const cdf of cdfs)
|
|
380
|
+
avgCdf[i] += cdf[i] / n;
|
|
381
|
+
}
|
|
382
|
+
// Convert CDF back to PDF
|
|
383
|
+
barycenter[channel][0] = avgCdf[0];
|
|
384
|
+
for (let i = 1; i < bins; i++) {
|
|
385
|
+
barycenter[channel][i] = Math.max(0, avgCdf[i] - avgCdf[i - 1]);
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
// Compute mean distance from each image to the barycenter
|
|
389
|
+
let totalDist = 0;
|
|
390
|
+
for (const hist of histograms) {
|
|
391
|
+
const dr = wasserstein1D(hist.r, barycenter.r);
|
|
392
|
+
const dg = wasserstein1D(hist.g, barycenter.g);
|
|
393
|
+
const db = wasserstein1D(hist.b, barycenter.b);
|
|
394
|
+
totalDist += (dr + dg + db) / 3;
|
|
395
|
+
}
|
|
396
|
+
return {
|
|
397
|
+
histogram: barycenter,
|
|
398
|
+
meanDistance: totalDist / n,
|
|
399
|
+
};
|
|
400
|
+
}
|
|
401
|
+
/**
|
|
402
|
+
* Compute a Smart Barycenter Baseline from multiple screenshot paths.
|
|
403
|
+
*
|
|
404
|
+
* Algorithm:
|
|
405
|
+
* 1. Compute per-image color histograms
|
|
406
|
+
* 2. Compute Wasserstein barycenter (optimal consensus distribution)
|
|
407
|
+
* 3. Measure each capture's distance to the barycenter
|
|
408
|
+
* 4. Reject outliers (captures too far from consensus)
|
|
409
|
+
* 5. Recompute barycenter without outliers
|
|
410
|
+
* 6. Select the median capture (closest to barycenter) as reference image
|
|
411
|
+
* 7. Compute adaptive threshold based on observed variance
|
|
412
|
+
*
|
|
413
|
+
* The result: a baseline that represents the "typical" rendering,
|
|
414
|
+
* robust to timing variations, dynamic content, and animation states.
|
|
415
|
+
*
|
|
416
|
+
* @param screenshotPaths - Array of PNG paths (pre-captured)
|
|
417
|
+
* @param name - Baseline name
|
|
418
|
+
* @param url - Source URL
|
|
419
|
+
* @param config - Configuration
|
|
420
|
+
*/
|
|
421
|
+
export async function computeSmartBaseline(screenshotPaths, name, url, config = {}) {
|
|
422
|
+
const startTime = performance.now();
|
|
423
|
+
const { downscale = 0.5, outlierThreshold = 2.0, outputDir: _outputDir, } = config;
|
|
424
|
+
if (screenshotPaths.length < 2) {
|
|
425
|
+
throw new Error('Smart baseline requires at least 2 captures');
|
|
426
|
+
}
|
|
427
|
+
const bins = 64;
|
|
428
|
+
// Step 1: Load all images and compute histograms
|
|
429
|
+
const images = [];
|
|
430
|
+
const histograms = [];
|
|
431
|
+
for (const path of screenshotPaths) {
|
|
432
|
+
const img = await loadImage(path, downscale);
|
|
433
|
+
images.push(img);
|
|
434
|
+
const hist = { r: new Float64Array(bins), g: new Float64Array(bins), b: new Float64Array(bins) };
|
|
435
|
+
let count = 0;
|
|
436
|
+
for (let i = 0; i < img.data.length; i += 4) {
|
|
437
|
+
if (img.data[i + 3] < 25)
|
|
438
|
+
continue;
|
|
439
|
+
const bin = (val) => Math.min(Math.floor(val / 256 * bins), bins - 1);
|
|
440
|
+
hist.r[bin(img.data[i])]++;
|
|
441
|
+
hist.g[bin(img.data[i + 1])]++;
|
|
442
|
+
hist.b[bin(img.data[i + 2])]++;
|
|
443
|
+
count++;
|
|
444
|
+
}
|
|
445
|
+
if (count > 0) {
|
|
446
|
+
for (let j = 0; j < bins; j++) {
|
|
447
|
+
hist.r[j] /= count;
|
|
448
|
+
hist.g[j] /= count;
|
|
449
|
+
hist.b[j] /= count;
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
histograms.push(hist);
|
|
453
|
+
}
|
|
454
|
+
// Step 2: Compute initial barycenter
|
|
455
|
+
const computeBarycenter = (hists) => {
|
|
456
|
+
const bary = { r: new Float64Array(bins), g: new Float64Array(bins), b: new Float64Array(bins) };
|
|
457
|
+
const n = hists.length;
|
|
458
|
+
for (const ch of ['r', 'g', 'b']) {
|
|
459
|
+
const cdfs = hists.map(h => {
|
|
460
|
+
const cdf = new Float64Array(bins);
|
|
461
|
+
cdf[0] = h[ch][0];
|
|
462
|
+
for (let i = 1; i < bins; i++)
|
|
463
|
+
cdf[i] = cdf[i - 1] + h[ch][i];
|
|
464
|
+
return cdf;
|
|
465
|
+
});
|
|
466
|
+
const avgCdf = new Float64Array(bins);
|
|
467
|
+
for (let i = 0; i < bins; i++) {
|
|
468
|
+
for (const cdf of cdfs)
|
|
469
|
+
avgCdf[i] += cdf[i] / n;
|
|
470
|
+
}
|
|
471
|
+
bary[ch][0] = avgCdf[0];
|
|
472
|
+
for (let i = 1; i < bins; i++) {
|
|
473
|
+
bary[ch][i] = Math.max(0, avgCdf[i] - avgCdf[i - 1]);
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
return bary;
|
|
477
|
+
};
|
|
478
|
+
const distToBarycenter = (hist, bary) => {
|
|
479
|
+
return (wasserstein1D(hist.r, bary.r) + wasserstein1D(hist.g, bary.g) + wasserstein1D(hist.b, bary.b)) / 3;
|
|
480
|
+
};
|
|
481
|
+
let bary = computeBarycenter(histograms);
|
|
482
|
+
// Step 3: Measure distances
|
|
483
|
+
let distances = histograms.map(h => distToBarycenter(h, bary));
|
|
484
|
+
// Step 4: Outlier rejection
|
|
485
|
+
const mean = distances.reduce((a, b) => a + b, 0) / distances.length;
|
|
486
|
+
const variance = distances.reduce((a, d) => a + (d - mean) ** 2, 0) / distances.length;
|
|
487
|
+
const stdDev = Math.sqrt(variance);
|
|
488
|
+
const inlierIndices = [];
|
|
489
|
+
const outlierIndices = [];
|
|
490
|
+
for (let i = 0; i < distances.length; i++) {
|
|
491
|
+
if (stdDev < 1e-10 || Math.abs(distances[i] - mean) <= outlierThreshold * stdDev) {
|
|
492
|
+
inlierIndices.push(i);
|
|
493
|
+
}
|
|
494
|
+
else {
|
|
495
|
+
outlierIndices.push(i);
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
// Step 5: Recompute barycenter without outliers (if any were rejected)
|
|
499
|
+
if (outlierIndices.length > 0 && inlierIndices.length >= 2) {
|
|
500
|
+
const inlierHists = inlierIndices.map(i => histograms[i]);
|
|
501
|
+
bary = computeBarycenter(inlierHists);
|
|
502
|
+
distances = histograms.map(h => distToBarycenter(h, bary));
|
|
503
|
+
}
|
|
504
|
+
// Step 6: Select median capture (closest to barycenter among inliers)
|
|
505
|
+
let medianIdx = inlierIndices[0];
|
|
506
|
+
let minDist = distances[medianIdx];
|
|
507
|
+
for (const idx of inlierIndices) {
|
|
508
|
+
if (distances[idx] < minDist) {
|
|
509
|
+
minDist = distances[idx];
|
|
510
|
+
medianIdx = idx;
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
// Step 7: Compute adaptive threshold
|
|
514
|
+
// The threshold should be generous enough to accommodate the observed variance
|
|
515
|
+
// but tight enough to catch real changes
|
|
516
|
+
const inlierDistances = inlierIndices.map(i => distances[i]);
|
|
517
|
+
const inlierMean = inlierDistances.reduce((a, b) => a + b, 0) / inlierDistances.length;
|
|
518
|
+
const inlierMax = Math.max(...inlierDistances);
|
|
519
|
+
const inlierStdDev = Math.sqrt(inlierDistances.reduce((a, d) => a + (d - inlierMean) ** 2, 0) / inlierDistances.length);
|
|
520
|
+
// Adaptive threshold = max observed variance + 3 stddev margin
|
|
521
|
+
// Converted to similarity score (1 - distance * 4)
|
|
522
|
+
const adaptiveDistance = inlierMax + 3 * inlierStdDev;
|
|
523
|
+
const adaptiveThreshold = Math.max(0.7, Math.min(0.98, 1 - adaptiveDistance * 4));
|
|
524
|
+
const computeTimeMs = performance.now() - startTime;
|
|
525
|
+
return {
|
|
526
|
+
id: `smart-${name.toLowerCase().replace(/[^a-z0-9]+/g, '-')}-${Date.now()}`,
|
|
527
|
+
name,
|
|
528
|
+
url,
|
|
529
|
+
referencePath: screenshotPaths[medianIdx],
|
|
530
|
+
capturePaths: screenshotPaths,
|
|
531
|
+
barycenter: bary,
|
|
532
|
+
captureDistances: distances,
|
|
533
|
+
meanDistance: inlierMean,
|
|
534
|
+
stdDevDistance: inlierStdDev,
|
|
535
|
+
numCaptures: inlierIndices.length,
|
|
536
|
+
numOutliers: outlierIndices.length,
|
|
537
|
+
adaptiveThreshold,
|
|
538
|
+
timestamp: new Date().toISOString(),
|
|
539
|
+
computeTimeMs,
|
|
540
|
+
};
|
|
541
|
+
}
|
|
542
|
+
/**
|
|
543
|
+
* Compare a new screenshot against a Smart Baseline.
|
|
544
|
+
*
|
|
545
|
+
* Uses the barycenter's color distribution as the reference rather than
|
|
546
|
+
* a single screenshot — this makes the comparison robust to the natural
|
|
547
|
+
* variance observed during baseline creation.
|
|
548
|
+
*
|
|
549
|
+
* @param screenshotPath - Path to the new screenshot
|
|
550
|
+
* @param baseline - Smart baseline to compare against
|
|
551
|
+
* @param config - Wasserstein configuration
|
|
552
|
+
*/
|
|
553
|
+
export async function compareAgainstSmartBaseline(screenshotPath, baseline, config = {}) {
|
|
554
|
+
const startTime = performance.now();
|
|
555
|
+
const { downscale = 0.5 } = config;
|
|
556
|
+
const bins = 64;
|
|
557
|
+
// Load new image and compute histogram
|
|
558
|
+
const img = await loadImage(screenshotPath, downscale);
|
|
559
|
+
const hist = { r: new Float64Array(bins), g: new Float64Array(bins), b: new Float64Array(bins) };
|
|
560
|
+
let count = 0;
|
|
561
|
+
for (let i = 0; i < img.data.length; i += 4) {
|
|
562
|
+
if (img.data[i + 3] < 25)
|
|
563
|
+
continue;
|
|
564
|
+
const bin = (val) => Math.min(Math.floor(val / 256 * bins), bins - 1);
|
|
565
|
+
hist.r[bin(img.data[i])]++;
|
|
566
|
+
hist.g[bin(img.data[i + 1])]++;
|
|
567
|
+
hist.b[bin(img.data[i + 2])]++;
|
|
568
|
+
count++;
|
|
569
|
+
}
|
|
570
|
+
if (count > 0) {
|
|
571
|
+
for (let j = 0; j < bins; j++) {
|
|
572
|
+
hist.r[j] /= count;
|
|
573
|
+
hist.g[j] /= count;
|
|
574
|
+
hist.b[j] /= count;
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
// Compare against barycenter
|
|
578
|
+
const distR = wasserstein1D(hist.r, baseline.barycenter.r);
|
|
579
|
+
const distG = wasserstein1D(hist.g, baseline.barycenter.g);
|
|
580
|
+
const distB = wasserstein1D(hist.b, baseline.barycenter.b);
|
|
581
|
+
const distance = (distR + distG + distB) / 3;
|
|
582
|
+
// Also run full Wasserstein against the reference image for spatial comparison
|
|
583
|
+
const spatialResult = await computeWassersteinDistance(baseline.referencePath, screenshotPath, { ...config, compareMode: 'combined' });
|
|
584
|
+
// Combined score: 40% histogram-vs-barycenter + 60% spatial-vs-reference
|
|
585
|
+
const combinedDistance = distance * 0.4 + spatialResult.distance * 0.6;
|
|
586
|
+
const normalizedScore = Math.max(0, Math.min(1, 1 - combinedDistance * 4));
|
|
587
|
+
// Is this within the observed variance of the baseline captures?
|
|
588
|
+
const varianceRatio = baseline.stdDevDistance > 1e-10
|
|
589
|
+
? distance / baseline.stdDevDistance
|
|
590
|
+
: distance < 0.001 ? 0 : 100;
|
|
591
|
+
const withinVariance = varianceRatio <= 3; // Within 3 stddev of observed variance
|
|
592
|
+
return {
|
|
593
|
+
metric: 'sliced-wasserstein',
|
|
594
|
+
distance: combinedDistance,
|
|
595
|
+
normalizedScore,
|
|
596
|
+
details: {
|
|
597
|
+
computeTimeMs: performance.now() - startTime,
|
|
598
|
+
dimensions: { width: img.width, height: img.height },
|
|
599
|
+
channelDistances: { r: distR, g: distG, b: distB, spatial: spatialResult.distance },
|
|
600
|
+
transportCost: distance,
|
|
601
|
+
},
|
|
602
|
+
withinVariance,
|
|
603
|
+
varianceRatio,
|
|
604
|
+
};
|
|
605
|
+
}
|
|
606
|
+
/**
|
|
607
|
+
* Compute a Visual Transport Map between two images.
|
|
608
|
+
*
|
|
609
|
+
* Divides both images into a grid, computes the color distribution
|
|
610
|
+
* per cell, then solves a transport problem to determine how
|
|
611
|
+
* visual mass flows from the baseline to the current state.
|
|
612
|
+
*
|
|
613
|
+
* The result shows:
|
|
614
|
+
* - WHERE changes happened (heatmap)
|
|
615
|
+
* - HOW visual content moved (flow arrows)
|
|
616
|
+
* - WHAT changed most (hotspots)
|
|
617
|
+
* - A rendered SVG overlay visualization
|
|
618
|
+
*
|
|
619
|
+
* @since v18.0.0 (Phase 3)
|
|
620
|
+
* @see https://github.com/alexandriashai/cbrowser/issues/158
|
|
621
|
+
*/
|
|
622
|
+
export async function computeTransportMap(baselinePath, currentPath, config = {}) {
|
|
623
|
+
const startTime = performance.now();
|
|
624
|
+
const { cellSize = 32, minFlowMass = 0.01, maxFlows = 100, downscale = 0.5, numHotspots = 5, } = config;
|
|
625
|
+
// Load images
|
|
626
|
+
const [imgA, imgB] = await Promise.all([
|
|
627
|
+
loadImage(baselinePath, downscale),
|
|
628
|
+
loadImage(currentPath, downscale),
|
|
629
|
+
]);
|
|
630
|
+
const scaledCellSize = Math.max(4, Math.round(cellSize * downscale));
|
|
631
|
+
const rows = Math.ceil(imgA.height / scaledCellSize);
|
|
632
|
+
const cols = Math.ceil(imgA.width / scaledCellSize);
|
|
633
|
+
const numCells = rows * cols;
|
|
634
|
+
// Compute per-cell color distributions
|
|
635
|
+
const cellHistA = computeCellHistograms(imgA, rows, cols, scaledCellSize);
|
|
636
|
+
const cellHistB = computeCellHistograms(imgB, rows, cols, scaledCellSize);
|
|
637
|
+
// Compute per-cell Wasserstein distance (change magnitude)
|
|
638
|
+
const heatmap = new Float64Array(numCells);
|
|
639
|
+
for (let i = 0; i < numCells; i++) {
|
|
640
|
+
const dR = wasserstein1D(cellHistA[i].r, cellHistB[i].r);
|
|
641
|
+
const dG = wasserstein1D(cellHistA[i].g, cellHistB[i].g);
|
|
642
|
+
const dB = wasserstein1D(cellHistA[i].b, cellHistB[i].b);
|
|
643
|
+
heatmap[i] = (dR + dG + dB) / 3;
|
|
644
|
+
}
|
|
645
|
+
// Compute transport flows between cells
|
|
646
|
+
// For each cell with significant change, find where the visual mass went
|
|
647
|
+
const flows = [];
|
|
648
|
+
const totalMassA = new Float64Array(numCells);
|
|
649
|
+
const totalMassB = new Float64Array(numCells);
|
|
650
|
+
for (let i = 0; i < numCells; i++) {
|
|
651
|
+
totalMassA[i] = cellHistA[i].totalMass;
|
|
652
|
+
totalMassB[i] = cellHistB[i].totalMass;
|
|
653
|
+
}
|
|
654
|
+
// Normalize masses
|
|
655
|
+
const sumA = totalMassA.reduce((a, b) => a + b, 0) || 1;
|
|
656
|
+
const sumB = totalMassB.reduce((a, b) => a + b, 0) || 1;
|
|
657
|
+
for (let i = 0; i < numCells; i++) {
|
|
658
|
+
totalMassA[i] /= sumA;
|
|
659
|
+
totalMassB[i] /= sumB;
|
|
660
|
+
}
|
|
661
|
+
// Greedy transport: for cells that lost mass, find nearest cells that gained mass
|
|
662
|
+
const massDiff = new Float64Array(numCells);
|
|
663
|
+
for (let i = 0; i < numCells; i++) {
|
|
664
|
+
massDiff[i] = totalMassB[i] - totalMassA[i];
|
|
665
|
+
}
|
|
666
|
+
// Sources (lost mass) and sinks (gained mass)
|
|
667
|
+
const sources = [];
|
|
668
|
+
const sinks = [];
|
|
669
|
+
for (let i = 0; i < numCells; i++) {
|
|
670
|
+
const row = Math.floor(i / cols);
|
|
671
|
+
const col = i % cols;
|
|
672
|
+
if (massDiff[i] < -minFlowMass * 0.1) {
|
|
673
|
+
sources.push({ idx: i, mass: -massDiff[i], row, col });
|
|
674
|
+
}
|
|
675
|
+
else if (massDiff[i] > minFlowMass * 0.1) {
|
|
676
|
+
sinks.push({ idx: i, mass: massDiff[i], row, col });
|
|
677
|
+
}
|
|
678
|
+
}
|
|
679
|
+
// Match sources to nearest sinks (greedy nearest-neighbor transport)
|
|
680
|
+
let totalCost = 0;
|
|
681
|
+
for (const src of sources) {
|
|
682
|
+
if (src.mass < minFlowMass * 0.1)
|
|
683
|
+
continue;
|
|
684
|
+
// Find nearest sink with available capacity
|
|
685
|
+
let bestSink = null;
|
|
686
|
+
let bestDist = Infinity;
|
|
687
|
+
for (const sink of sinks) {
|
|
688
|
+
if (sink.mass < minFlowMass * 0.1)
|
|
689
|
+
continue;
|
|
690
|
+
const dist = Math.sqrt((src.row - sink.row) ** 2 + (src.col - sink.col) ** 2);
|
|
691
|
+
if (dist < bestDist) {
|
|
692
|
+
bestDist = dist;
|
|
693
|
+
bestSink = sink;
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
if (!bestSink)
|
|
697
|
+
continue;
|
|
698
|
+
const transportedMass = Math.min(src.mass, bestSink.mass);
|
|
699
|
+
if (transportedMass < minFlowMass)
|
|
700
|
+
continue;
|
|
701
|
+
const srcX = (src.col + 0.5) * scaledCellSize / downscale;
|
|
702
|
+
const srcY = (src.row + 0.5) * scaledCellSize / downscale;
|
|
703
|
+
const dstX = (bestSink.col + 0.5) * scaledCellSize / downscale;
|
|
704
|
+
const dstY = (bestSink.row + 0.5) * scaledCellSize / downscale;
|
|
705
|
+
flows.push({
|
|
706
|
+
from: { x: srcX, y: srcY, row: src.row, col: src.col },
|
|
707
|
+
to: { x: dstX, y: dstY, row: bestSink.row, col: bestSink.col },
|
|
708
|
+
mass: transportedMass,
|
|
709
|
+
distance: bestDist * scaledCellSize / downscale,
|
|
710
|
+
colorFrom: cellHistA[src.idx].dominantColor,
|
|
711
|
+
colorTo: cellHistB[bestSink.idx].dominantColor,
|
|
712
|
+
});
|
|
713
|
+
totalCost += transportedMass * bestDist;
|
|
714
|
+
src.mass -= transportedMass;
|
|
715
|
+
bestSink.mass -= transportedMass;
|
|
716
|
+
}
|
|
717
|
+
// Sort flows by mass and limit
|
|
718
|
+
flows.sort((a, b) => b.mass - a.mass);
|
|
719
|
+
const topFlows = flows.slice(0, maxFlows);
|
|
720
|
+
// Identify hotspots (regions with highest change)
|
|
721
|
+
const hotspots = identifyHotspots(heatmap, rows, cols, scaledCellSize, downscale, numHotspots);
|
|
722
|
+
// Generate SVG visualization
|
|
723
|
+
const origWidth = imgA.width / downscale;
|
|
724
|
+
const origHeight = imgA.height / downscale;
|
|
725
|
+
const svg = generateTransportSVG(topFlows, heatmap, rows, cols, scaledCellSize, downscale, origWidth, origHeight, hotspots);
|
|
726
|
+
return {
|
|
727
|
+
gridSize: { rows, cols },
|
|
728
|
+
flows: topFlows,
|
|
729
|
+
heatmap,
|
|
730
|
+
totalCost,
|
|
731
|
+
hotspots,
|
|
732
|
+
svg,
|
|
733
|
+
dimensions: { width: origWidth, height: origHeight },
|
|
734
|
+
computeTimeMs: performance.now() - startTime,
|
|
735
|
+
};
|
|
736
|
+
}
|
|
737
|
+
function computeCellHistograms(img, rows, cols, cellSize) {
|
|
738
|
+
const bins = 16; // Fewer bins for per-cell (speed)
|
|
739
|
+
const cells = [];
|
|
740
|
+
for (let row = 0; row < rows; row++) {
|
|
741
|
+
for (let col = 0; col < cols; col++) {
|
|
742
|
+
const hist = {
|
|
743
|
+
r: new Float64Array(bins),
|
|
744
|
+
g: new Float64Array(bins),
|
|
745
|
+
b: new Float64Array(bins),
|
|
746
|
+
totalMass: 0,
|
|
747
|
+
dominantColor: '#000000',
|
|
748
|
+
};
|
|
749
|
+
let rSum = 0, gSum = 0, bSum = 0, count = 0;
|
|
750
|
+
const yStart = row * cellSize;
|
|
751
|
+
const xStart = col * cellSize;
|
|
752
|
+
const yEnd = Math.min(yStart + cellSize, img.height);
|
|
753
|
+
const xEnd = Math.min(xStart + cellSize, img.width);
|
|
754
|
+
for (let y = yStart; y < yEnd; y++) {
|
|
755
|
+
for (let x = xStart; x < xEnd; x++) {
|
|
756
|
+
const idx = (y * img.width + x) * 4;
|
|
757
|
+
if (img.data[idx + 3] < 25)
|
|
758
|
+
continue;
|
|
759
|
+
const r = img.data[idx];
|
|
760
|
+
const g = img.data[idx + 1];
|
|
761
|
+
const b = img.data[idx + 2];
|
|
762
|
+
hist.r[Math.min(Math.floor(r / 256 * bins), bins - 1)]++;
|
|
763
|
+
hist.g[Math.min(Math.floor(g / 256 * bins), bins - 1)]++;
|
|
764
|
+
hist.b[Math.min(Math.floor(b / 256 * bins), bins - 1)]++;
|
|
765
|
+
rSum += r;
|
|
766
|
+
gSum += g;
|
|
767
|
+
bSum += b;
|
|
768
|
+
count++;
|
|
769
|
+
}
|
|
770
|
+
}
|
|
771
|
+
if (count > 0) {
|
|
772
|
+
// Normalize to probability
|
|
773
|
+
for (let i = 0; i < bins; i++) {
|
|
774
|
+
hist.r[i] /= count;
|
|
775
|
+
hist.g[i] /= count;
|
|
776
|
+
hist.b[i] /= count;
|
|
777
|
+
}
|
|
778
|
+
hist.totalMass = count;
|
|
779
|
+
const avgR = Math.round(rSum / count);
|
|
780
|
+
const avgG = Math.round(gSum / count);
|
|
781
|
+
const avgB = Math.round(bSum / count);
|
|
782
|
+
hist.dominantColor = `#${avgR.toString(16).padStart(2, '0')}${avgG.toString(16).padStart(2, '0')}${avgB.toString(16).padStart(2, '0')}`;
|
|
783
|
+
}
|
|
784
|
+
cells.push(hist);
|
|
785
|
+
}
|
|
786
|
+
}
|
|
787
|
+
return cells;
|
|
788
|
+
}
|
|
789
|
+
// ── Hotspot identification ──
|
|
790
|
+
function identifyHotspots(heatmap, rows, cols, cellSize, downscale, count) {
|
|
791
|
+
// Find top-N cells by change magnitude, then merge adjacent ones
|
|
792
|
+
const indexed = Array.from(heatmap).map((mag, i) => ({ mag, i }));
|
|
793
|
+
indexed.sort((a, b) => b.mag - a.mag);
|
|
794
|
+
const used = new Set();
|
|
795
|
+
const hotspots = [];
|
|
796
|
+
for (const { mag, i } of indexed) {
|
|
797
|
+
if (hotspots.length >= count)
|
|
798
|
+
break;
|
|
799
|
+
if (mag < 0.005)
|
|
800
|
+
break;
|
|
801
|
+
if (used.has(i))
|
|
802
|
+
continue;
|
|
803
|
+
const row = Math.floor(i / cols);
|
|
804
|
+
const col = i % cols;
|
|
805
|
+
// Expand hotspot to include adjacent high-change cells
|
|
806
|
+
let minRow = row, maxRow = row, minCol = col, maxCol = col;
|
|
807
|
+
const threshold = mag * 0.5;
|
|
808
|
+
const expand = (r, c) => {
|
|
809
|
+
const idx = r * cols + c;
|
|
810
|
+
if (r < 0 || r >= rows || c < 0 || c >= cols || used.has(idx))
|
|
811
|
+
return;
|
|
812
|
+
if (heatmap[idx] >= threshold) {
|
|
813
|
+
used.add(idx);
|
|
814
|
+
minRow = Math.min(minRow, r);
|
|
815
|
+
maxRow = Math.max(maxRow, r);
|
|
816
|
+
minCol = Math.min(minCol, c);
|
|
817
|
+
maxCol = Math.max(maxCol, c);
|
|
818
|
+
expand(r - 1, c);
|
|
819
|
+
expand(r + 1, c);
|
|
820
|
+
expand(r, c - 1);
|
|
821
|
+
expand(r, c + 1);
|
|
822
|
+
}
|
|
823
|
+
};
|
|
824
|
+
used.add(i);
|
|
825
|
+
expand(row - 1, col);
|
|
826
|
+
expand(row + 1, col);
|
|
827
|
+
expand(row, col - 1);
|
|
828
|
+
expand(row, col + 1);
|
|
829
|
+
const x = (minCol * cellSize) / downscale;
|
|
830
|
+
const y = (minRow * cellSize) / downscale;
|
|
831
|
+
const w = ((maxCol - minCol + 1) * cellSize) / downscale;
|
|
832
|
+
const h = ((maxRow - minRow + 1) * cellSize) / downscale;
|
|
833
|
+
// Describe what kind of change
|
|
834
|
+
const area = (maxRow - minRow + 1) * (maxCol - minCol + 1);
|
|
835
|
+
let description;
|
|
836
|
+
if (area > rows * cols * 0.3) {
|
|
837
|
+
description = 'Major layout change across large area';
|
|
838
|
+
}
|
|
839
|
+
else if (area > 5) {
|
|
840
|
+
description = `Significant visual change in ${w.toFixed(0)}×${h.toFixed(0)}px region`;
|
|
841
|
+
}
|
|
842
|
+
else {
|
|
843
|
+
description = `Localized visual change at (${x.toFixed(0)}, ${y.toFixed(0)})`;
|
|
844
|
+
}
|
|
845
|
+
hotspots.push({
|
|
846
|
+
region: { x: Math.round(x), y: Math.round(y), width: Math.round(w), height: Math.round(h) },
|
|
847
|
+
magnitude: mag,
|
|
848
|
+
description,
|
|
849
|
+
});
|
|
850
|
+
}
|
|
851
|
+
return hotspots;
|
|
852
|
+
}
|
|
853
|
+
// ── SVG Visualization ──
|
|
854
|
+
function generateTransportSVG(flows, heatmap, rows, cols, cellSize, downscale, width, height, hotspots) {
|
|
855
|
+
const maxHeat = Math.max(...Array.from(heatmap), 0.001);
|
|
856
|
+
let svg = `<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 ${width} ${height}" width="${width}" height="${height}">\n`;
|
|
857
|
+
svg += ` <defs>\n`;
|
|
858
|
+
svg += ` <marker id="arrowhead" markerWidth="6" markerHeight="4" refX="5" refY="2" orient="auto">\n`;
|
|
859
|
+
svg += ` <polygon points="0 0, 6 2, 0 4" fill="#ff6b6b" opacity="0.8"/>\n`;
|
|
860
|
+
svg += ` </marker>\n`;
|
|
861
|
+
svg += ` </defs>\n`;
|
|
862
|
+
// Heatmap overlay
|
|
863
|
+
svg += ` <g opacity="0.4">\n`;
|
|
864
|
+
for (let i = 0; i < heatmap.length; i++) {
|
|
865
|
+
if (heatmap[i] < 0.005)
|
|
866
|
+
continue;
|
|
867
|
+
const row = Math.floor(i / cols);
|
|
868
|
+
const col = i % cols;
|
|
869
|
+
const x = (col * cellSize) / downscale;
|
|
870
|
+
const y = (row * cellSize) / downscale;
|
|
871
|
+
const w = cellSize / downscale;
|
|
872
|
+
const h = cellSize / downscale;
|
|
873
|
+
const intensity = Math.min(heatmap[i] / maxHeat, 1);
|
|
874
|
+
// Blue → Yellow → Red color ramp
|
|
875
|
+
const r = Math.round(intensity > 0.5 ? 255 : intensity * 2 * 255);
|
|
876
|
+
const g = Math.round(intensity > 0.5 ? (1 - intensity) * 2 * 255 : intensity * 2 * 200);
|
|
877
|
+
const b = Math.round(intensity > 0.5 ? 0 : (1 - intensity * 2) * 255);
|
|
878
|
+
svg += ` <rect x="${x}" y="${y}" width="${w}" height="${h}" fill="rgb(${r},${g},${b})" rx="1"/>\n`;
|
|
879
|
+
}
|
|
880
|
+
svg += ` </g>\n`;
|
|
881
|
+
// Transport flow arrows
|
|
882
|
+
svg += ` <g>\n`;
|
|
883
|
+
const maxMass = Math.max(...flows.map(f => f.mass), 0.001);
|
|
884
|
+
for (const flow of flows) {
|
|
885
|
+
const opacity = Math.min(0.3 + (flow.mass / maxMass) * 0.6, 0.9);
|
|
886
|
+
const strokeWidth = Math.max(1, (flow.mass / maxMass) * 4);
|
|
887
|
+
svg += ` <line x1="${flow.from.x}" y1="${flow.from.y}" x2="${flow.to.x}" y2="${flow.to.y}" `;
|
|
888
|
+
svg += `stroke="#ff6b6b" stroke-width="${strokeWidth.toFixed(1)}" opacity="${opacity.toFixed(2)}" `;
|
|
889
|
+
svg += `marker-end="url(#arrowhead)"/>\n`;
|
|
890
|
+
}
|
|
891
|
+
svg += ` </g>\n`;
|
|
892
|
+
// Hotspot outlines
|
|
893
|
+
svg += ` <g fill="none" stroke="#ffd43b" stroke-width="2" stroke-dasharray="6,3">\n`;
|
|
894
|
+
for (const hs of hotspots) {
|
|
895
|
+
svg += ` <rect x="${hs.region.x}" y="${hs.region.y}" width="${hs.region.width}" height="${hs.region.height}" rx="4"/>\n`;
|
|
896
|
+
}
|
|
897
|
+
svg += ` </g>\n`;
|
|
898
|
+
// Hotspot labels
|
|
899
|
+
svg += ` <g font-family="system-ui, sans-serif" font-size="11" font-weight="600">\n`;
|
|
900
|
+
for (let i = 0; i < hotspots.length; i++) {
|
|
901
|
+
const hs = hotspots[i];
|
|
902
|
+
const labelX = hs.region.x + hs.region.width + 4;
|
|
903
|
+
const labelY = hs.region.y + 14;
|
|
904
|
+
svg += ` <rect x="${labelX - 2}" y="${labelY - 11}" width="${hs.description.length * 6 + 8}" height="16" fill="rgba(0,0,0,0.7)" rx="3"/>\n`;
|
|
905
|
+
svg += ` <text x="${labelX + 2}" y="${labelY}" fill="#ffd43b">${escapeXml(hs.description)}</text>\n`;
|
|
906
|
+
}
|
|
907
|
+
svg += ` </g>\n`;
|
|
908
|
+
svg += `</svg>`;
|
|
909
|
+
return svg;
|
|
910
|
+
}
|
|
911
|
+
function escapeXml(s) {
|
|
912
|
+
return s.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>').replace(/"/g, '"');
|
|
913
|
+
}
|
|
914
|
+
//# sourceMappingURL=distance-metrics.js.map
|