@srsergio/taptapp-ar 1.1.1 → 1.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/dist/compiler/node-worker.js +1 -197
  2. package/dist/compiler/offline-compiler.js +1 -207
  3. package/dist/core/constants.js +1 -38
  4. package/dist/core/detector/crop-detector.js +1 -88
  5. package/dist/core/detector/detector-lite.js +1 -455
  6. package/dist/core/detector/freak.js +1 -89
  7. package/dist/core/estimation/estimate.js +1 -16
  8. package/dist/core/estimation/estimator.js +1 -30
  9. package/dist/core/estimation/morph-refinement.js +1 -116
  10. package/dist/core/estimation/non-rigid-refine.js +1 -70
  11. package/dist/core/estimation/pnp-solver.js +1 -109
  12. package/dist/core/estimation/refine-estimate.js +1 -311
  13. package/dist/core/estimation/utils.js +1 -67
  14. package/dist/core/features/auto-rotation-feature.js +1 -30
  15. package/dist/core/features/crop-detection-feature.js +1 -26
  16. package/dist/core/features/feature-base.js +1 -1
  17. package/dist/core/features/feature-manager.js +1 -55
  18. package/dist/core/features/one-euro-filter-feature.js +1 -44
  19. package/dist/core/features/temporal-filter-feature.js +1 -57
  20. package/dist/core/image-list.js +1 -54
  21. package/dist/core/input-loader.js +1 -87
  22. package/dist/core/matching/hamming-distance.js +1 -66
  23. package/dist/core/matching/hdc.js +1 -102
  24. package/dist/core/matching/hierarchical-clustering.js +1 -130
  25. package/dist/core/matching/hough.js +1 -170
  26. package/dist/core/matching/matcher.js +1 -66
  27. package/dist/core/matching/matching.js +1 -401
  28. package/dist/core/matching/ransacHomography.js +1 -132
  29. package/dist/core/perception/bio-inspired-engine.js +1 -232
  30. package/dist/core/perception/foveal-attention.js +1 -280
  31. package/dist/core/perception/index.js +1 -17
  32. package/dist/core/perception/predictive-coding.js +1 -278
  33. package/dist/core/perception/saccadic-controller.js +1 -269
  34. package/dist/core/perception/saliency-map.js +1 -254
  35. package/dist/core/perception/scale-orchestrator.js +1 -68
  36. package/dist/core/protocol.js +1 -254
  37. package/dist/core/tracker/extract-utils.js +1 -29
  38. package/dist/core/tracker/extract.js +1 -306
  39. package/dist/core/tracker/tracker.js +1 -352
  40. package/dist/core/utils/cumsum.js +1 -37
  41. package/dist/core/utils/delaunay.js +1 -125
  42. package/dist/core/utils/geometry.js +1 -101
  43. package/dist/core/utils/gpu-compute.js +1 -231
  44. package/dist/core/utils/homography.js +1 -138
  45. package/dist/core/utils/images.js +1 -108
  46. package/dist/core/utils/lsh-binarizer.js +1 -37
  47. package/dist/core/utils/lsh-direct.js +1 -76
  48. package/dist/core/utils/projection.js +1 -51
  49. package/dist/core/utils/randomizer.js +1 -25
  50. package/dist/core/utils/worker-pool.js +1 -89
  51. package/dist/index.js +1 -7
  52. package/dist/libs/one-euro-filter.js +1 -70
  53. package/dist/react/TaptappAR.js +1 -151
  54. package/dist/react/types.js +1 -16
  55. package/dist/react/use-ar.js +1 -118
  56. package/dist/runtime/aframe.js +1 -272
  57. package/dist/runtime/bio-inspired-controller.js +1 -358
  58. package/dist/runtime/controller.js +1 -592
  59. package/dist/runtime/controller.worker.js +1 -93
  60. package/dist/runtime/index.js +1 -5
  61. package/dist/runtime/three.js +1 -304
  62. package/dist/runtime/track.js +1 -381
  63. package/package.json +10 -4
@@ -1,401 +1 @@
1
- import TinyQueue from "tinyqueue";
2
- import { compute as hammingCompute } from "./hamming-distance.js";
3
- import { computeHoughMatches } from "./hough.js";
4
- import { computeHomography } from "./ransacHomography.js";
5
- import { multiplyPointHomographyInhomogenous, matrixInverse33 } from "../utils/geometry.js";
6
- import { refineWithMorphology } from "../estimation/morph-refinement.js";
7
- import { popcount32 } from "./hierarchical-clustering.js";
8
- import { AR_CONFIG } from "../constants.js";
9
- const INLIER_THRESHOLD = AR_CONFIG.INLIER_THRESHOLD;
10
- const MIN_NUM_INLIERS = AR_CONFIG.MIN_NUM_INLIERS;
11
- const CLUSTER_MAX_POP = AR_CONFIG.CLUSTER_MAX_POP;
12
- const HAMMING_THRESHOLD = AR_CONFIG.HAMMING_THRESHOLD;
13
- const HDC_RATIO_THRESHOLD = AR_CONFIG.HDC_RATIO_THRESHOLD;
14
- const MAX_MATCH_QUERY_POINTS = AR_CONFIG.MAX_MATCH_QUERY_POINTS;
15
- // match list of querpoints against pre-built list of keyframes
16
- const match = ({ keyframe, querypoints: rawQuerypoints, querywidth, queryheight, debugMode, expectedScale }) => {
17
- let debugExtra = {};
18
- // 🎯 Performance Optimizer: Use only the most "salient" points (highest response)
19
- const querypoints = rawQuerypoints.length > MAX_MATCH_QUERY_POINTS
20
- ? [...rawQuerypoints].sort((a, b) => (b.score || b.response || 0) - (a.score || a.response || 0)).slice(0, MAX_MATCH_QUERY_POINTS)
21
- : rawQuerypoints;
22
- const matches = [];
23
- const qlen = querypoints.length;
24
- const kmax = keyframe.max;
25
- const kmin = keyframe.min;
26
- // Detect descriptor mode: HDC (32-bit signature), Compact (32-bit XOR folded), or Raw (64-bit)
27
- const isHDC = keyframe.hdc === true || (kmax && kmax.hdc === 1);
28
- const isCompact = (kmax && kmax.compact === 1) || (kmin && kmin.compact === 1);
29
- const descSize = (isHDC || isCompact) ? 1 : 2; // Compact uses 32-bit like HDC
30
- const currentRatioThreshold = isHDC ? HDC_RATIO_THRESHOLD : HAMMING_THRESHOLD;
31
- for (let j = 0; j < qlen; j++) {
32
- const querypoint = querypoints[j];
33
- const col = querypoint.maxima ? kmax : kmin;
34
- if (!col || col.x.length === 0)
35
- continue;
36
- const rootNode = col.t;
37
- const keypointIndexes = [];
38
- const queue = new TinyQueue([], (a1, a2) => a1.d - a2.d);
39
- _query({
40
- node: rootNode,
41
- descriptors: col.d,
42
- querypoint,
43
- queue,
44
- keypointIndexes,
45
- numPop: 0,
46
- isHDC,
47
- descSize,
48
- isCompact
49
- });
50
- let bestIndex = -1;
51
- let bestD1 = Number.MAX_SAFE_INTEGER;
52
- let bestD2 = Number.MAX_SAFE_INTEGER;
53
- const qDesc = querypoint.descriptors;
54
- const cDesc = col.d;
55
- // For compact mode: pre-compute XOR folded query descriptor (64-bit → 32-bit)
56
- const qDescCompact = isCompact && qDesc && qDesc.length >= 2
57
- ? (qDesc[0] ^ qDesc[1]) >>> 0
58
- : 0;
59
- for (let k = 0; k < keypointIndexes.length; k++) {
60
- const idx = keypointIndexes[k];
61
- // 🚀 NANITE-STYLE: Dynamic scale filtering
62
- // If we have an expected scale, skip points that are outside the resolution range
63
- if (expectedScale !== undefined && col.s) {
64
- const featureScale = col.s[idx]; // Octave scale (1, 2, 4...)
65
- const idealKeyScale = (querypoint.scale || 1.0) / expectedScale;
66
- // allow ~1 octave of margin
67
- if (featureScale < idealKeyScale * 0.4 || featureScale > idealKeyScale * 2.5) {
68
- continue;
69
- }
70
- }
71
- let d;
72
- if (isHDC) {
73
- d = popcount32(cDesc[idx] ^ querypoint.hdcSignature);
74
- }
75
- else if (isCompact) {
76
- // Compact mode: compare 32-bit XOR folded descriptors
77
- d = popcount32(cDesc[idx] ^ qDescCompact);
78
- }
79
- else {
80
- d = hammingCompute({ v1: cDesc, v1Offset: idx * descSize, v2: qDesc });
81
- }
82
- if (d < bestD1) {
83
- bestD2 = bestD1;
84
- bestD1 = d;
85
- bestIndex = idx;
86
- }
87
- else if (d < bestD2) {
88
- bestD2 = d;
89
- }
90
- }
91
- if (bestIndex !== -1) {
92
- if (bestD2 === Number.MAX_SAFE_INTEGER || (bestD1 / bestD2) < currentRatioThreshold) {
93
- matches.push({
94
- querypoint,
95
- keypoint: {
96
- x: col.x[bestIndex],
97
- y: col.y[bestIndex],
98
- angle: col.a[bestIndex],
99
- scale: col.s ? col.s[bestIndex] : keyframe.s
100
- },
101
- d: bestD1
102
- });
103
- }
104
- }
105
- }
106
- if (matches.length < MIN_NUM_INLIERS) {
107
- return { debugExtra };
108
- }
109
- // 🌌 Moonshot: Constellation matching disabled for performance calibration
110
- const constellationMatches = matches;
111
- if (debugMode)
112
- debugExtra.constellationMatches = constellationMatches;
113
- const houghMatches = computeHoughMatches({
114
- keywidth: keyframe.w || keyframe.width,
115
- keyheight: keyframe.h || keyframe.height,
116
- querywidth,
117
- queryheight,
118
- matches: constellationMatches,
119
- });
120
- if (debugMode) {
121
- debugExtra.houghMatches = houghMatches;
122
- }
123
- if (houghMatches.length < MIN_NUM_INLIERS) {
124
- return { debugExtra };
125
- }
126
- const H = computeHomography({
127
- srcPoints: houghMatches.map((m) => [m.keypoint.x, m.keypoint.y]),
128
- dstPoints: houghMatches.map((m) => [m.querypoint.x, m.querypoint.y]),
129
- keyframe: { width: keyframe.w || keyframe.width, height: keyframe.h || keyframe.height },
130
- });
131
- if (H === null) {
132
- return { debugExtra };
133
- }
134
- const inlierMatches = _findInlierMatches({
135
- H,
136
- matches: houghMatches,
137
- threshold: INLIER_THRESHOLD,
138
- });
139
- if (debugMode)
140
- debugExtra.inlierMatches = inlierMatches;
141
- if (inlierMatches.length < MIN_NUM_INLIERS) {
142
- return { debugExtra };
143
- }
144
- if (debugMode && Math.random() < 0.02) {
145
- console.log(`MATCH: Homography success with ${inlierMatches.length} inliers`);
146
- }
147
- const HInv = matrixInverse33(H, 0.00001);
148
- const dThreshold2 = 100;
149
- const matches2 = [];
150
- const hi00 = HInv[0], hi01 = HInv[1], hi02 = HInv[2];
151
- const hi10 = HInv[3], hi11 = HInv[4], hi12 = HInv[5];
152
- const hi20 = HInv[6], hi21 = HInv[7], hi22 = HInv[8];
153
- for (let j = 0; j < qlen; j++) {
154
- const querypoint = querypoints[j];
155
- const qx = querypoint.x, qy = querypoint.y;
156
- const uz = (qx * hi20) + (qy * hi21) + hi22;
157
- const invZ = 1.0 / uz;
158
- const mapX = ((qx * hi00) + (qy * hi01) + hi02) * invZ;
159
- const mapY = ((qx * hi10) + (qy * hi11) + hi12) * invZ;
160
- let bestIndex = -1;
161
- let bestD1 = Number.MAX_SAFE_INTEGER;
162
- let bestD2 = Number.MAX_SAFE_INTEGER;
163
- const col = querypoint.maxima ? kmax : kmin;
164
- if (!col)
165
- continue;
166
- const cx = col.x, cy = col.y, cd = col.d;
167
- const qDesc = querypoint.descriptors;
168
- // For compact mode: XOR fold query descriptor
169
- const qDescCompact = isCompact && qDesc && qDesc.length >= 2
170
- ? (qDesc[0] ^ qDesc[1]) >>> 0
171
- : 0;
172
- for (let k = 0, clen = cx.length; k < clen; k++) {
173
- const dx = cx[k] - mapX;
174
- const dy = cy[k] - mapY;
175
- const d2 = dx * dx + dy * dy;
176
- if (d2 > dThreshold2)
177
- continue;
178
- let d;
179
- if (isHDC) {
180
- d = popcount32(cd[k] ^ querypoint.hdcSignature);
181
- }
182
- else if (isCompact) {
183
- d = popcount32(cd[k] ^ qDescCompact);
184
- }
185
- else {
186
- d = hammingCompute({ v1: cd, v1Offset: k * descSize, v2: qDesc });
187
- }
188
- if (d < bestD1) {
189
- bestD2 = bestD1;
190
- bestD1 = d;
191
- bestIndex = k;
192
- }
193
- else if (d < bestD2) {
194
- bestD2 = d;
195
- }
196
- }
197
- if (bestIndex !== -1 &&
198
- (bestD2 === Number.MAX_SAFE_INTEGER || (bestD1 / bestD2) < currentRatioThreshold)) {
199
- matches2.push({
200
- querypoint,
201
- keypoint: {
202
- x: col.x[bestIndex],
203
- y: col.y[bestIndex],
204
- angle: col.a[bestIndex],
205
- scale: col.s ? col.s[bestIndex] : keyframe.s
206
- }
207
- });
208
- }
209
- }
210
- if (debugMode)
211
- debugExtra.matches2 = matches2;
212
- const houghMatches2 = computeHoughMatches({
213
- keywidth: keyframe.w || keyframe.width,
214
- keyheight: keyframe.h || keyframe.height,
215
- querywidth,
216
- queryheight,
217
- matches: matches2,
218
- });
219
- if (debugMode)
220
- debugExtra.houghMatches2 = houghMatches2;
221
- const H2 = computeHomography({
222
- srcPoints: houghMatches2.map((m) => [m.keypoint.x, m.keypoint.y]),
223
- dstPoints: houghMatches2.map((m) => [m.querypoint.x, m.querypoint.y]),
224
- keyframe: { width: keyframe.w || keyframe.width, height: keyframe.h || keyframe.height },
225
- });
226
- if (H2 === null)
227
- return { debugExtra };
228
- const inlierMatches2 = _findInlierMatches({
229
- H: H2,
230
- matches: houghMatches2,
231
- threshold: INLIER_THRESHOLD,
232
- });
233
- if (debugMode)
234
- debugExtra.inlierMatches2 = inlierMatches2;
235
- const refinedH = refineWithMorphology({
236
- imageData: rawQuerypoints[0].imageData,
237
- width: querywidth,
238
- height: queryheight,
239
- targetData: { w: keyframe.w || keyframe.width, h: keyframe.h || keyframe.height },
240
- initialH: H2,
241
- iterations: 3
242
- });
243
- return { H: refinedH || H2, matches: inlierMatches2, debugExtra };
244
- };
245
- const _query = ({ node, descriptors, querypoint, queue, keypointIndexes, numPop, isHDC, descSize, isCompact }) => {
246
- const isLeaf = node[0] === 1;
247
- const childrenOrIndices = node[2];
248
- if (isLeaf) {
249
- for (let i = 0; i < childrenOrIndices.length; i++) {
250
- keypointIndexes.push(childrenOrIndices[i]);
251
- }
252
- return;
253
- }
254
- const qDesc = querypoint.descriptors;
255
- // For compact mode: XOR fold query descriptor
256
- const qDescCompact = isCompact && qDesc && qDesc.length >= 2
257
- ? (qDesc[0] ^ qDesc[1]) >>> 0
258
- : 0;
259
- let minD = Number.MAX_SAFE_INTEGER;
260
- const clen = childrenOrIndices.length;
261
- const distances = new Int32Array(clen);
262
- for (let i = 0; i < clen; i++) {
263
- const childNode = childrenOrIndices[i];
264
- const cIdx = childNode[1];
265
- let d;
266
- if (isHDC) {
267
- d = popcount32(descriptors[cIdx] ^ querypoint.hdcSignature);
268
- }
269
- else if (isCompact) {
270
- d = popcount32(descriptors[cIdx] ^ qDescCompact);
271
- }
272
- else {
273
- d = hammingCompute({
274
- v1: descriptors,
275
- v1Offset: cIdx * descSize,
276
- v2: qDesc,
277
- });
278
- }
279
- distances[i] = d;
280
- if (d < minD)
281
- minD = d;
282
- }
283
- for (let i = 0; i < clen; i++) {
284
- const dist = distances[i];
285
- if (dist <= minD) {
286
- _query({ node: childrenOrIndices[i], descriptors, querypoint, queue, keypointIndexes, numPop: numPop + 1, isHDC, descSize, isCompact });
287
- }
288
- else {
289
- queue.push({ node: childrenOrIndices[i], d: dist });
290
- }
291
- }
292
- if (numPop < CLUSTER_MAX_POP && queue.length > 0) {
293
- const { node } = queue.pop();
294
- _query({ node, descriptors, querypoint, queue, keypointIndexes, numPop: numPop + 1, isHDC, descSize, isCompact });
295
- }
296
- };
297
- const _findInlierMatches = (options) => {
298
- const { H, matches, threshold } = options;
299
- const threshold2 = threshold * threshold;
300
- const h00 = H[0], h01 = H[1], h02 = H[2];
301
- const h10 = H[3], h11 = H[4], h12 = H[5];
302
- const h20 = H[6], h21 = H[7], h22 = H[8];
303
- const goodMatches = [];
304
- for (let i = 0; i < matches.length; i++) {
305
- const m = matches[i];
306
- const qp = m.querypoint;
307
- const kp = m.keypoint;
308
- const uz = (kp.x * h20) + (kp.y * h21) + h22;
309
- const invZ = 1.0 / uz;
310
- const mx = ((kp.x * h00) + (kp.y * h01) + h02) * invZ;
311
- const my = ((kp.x * h10) + (kp.y * h11) + h12) * invZ;
312
- const dx = mx - qp.x;
313
- const dy = my - qp.y;
314
- if (dx * dx + dy * dy <= threshold2) {
315
- goodMatches.push(m);
316
- }
317
- }
318
- return goodMatches;
319
- };
320
- const _applyConstellationFilter = (matches) => {
321
- const len = matches.length;
322
- if (len < 3)
323
- return matches;
324
- const pool = matches.slice().sort((a, b) => a.d - b.d).slice(0, 1500);
325
- const RATIO_TOLERANCE = 0.25;
326
- const COSINE_TOLERANCE = 0.2;
327
- const MAX_NEIGHBORS = 6;
328
- const MIN_VERIFICATIONS = 1;
329
- const gridSize = 50;
330
- const grid = new Map();
331
- const getGridKey = (x, y) => `${Math.floor(x / gridSize)},${Math.floor(y / gridSize)}`;
332
- for (let i = 0; i < pool.length; i++) {
333
- const qp = pool[i].querypoint;
334
- const key = getGridKey(qp.x, qp.y);
335
- if (!grid.has(key))
336
- grid.set(key, []);
337
- grid.get(key).push(i);
338
- }
339
- const scores = new Int32Array(pool.length);
340
- for (let i = 0; i < pool.length; i++) {
341
- const m1 = pool[i];
342
- const qp1 = m1.querypoint;
343
- const neighbors = [];
344
- const gx = Math.floor(qp1.x / gridSize);
345
- const gy = Math.floor(qp1.y / gridSize);
346
- for (let dx = -1; dx <= 1; dx++) {
347
- for (let dy = -1; dy <= 1; dy++) {
348
- const cell = grid.get(`${gx + dx},${gy + dy}`);
349
- if (!cell)
350
- continue;
351
- for (const idx of cell) {
352
- if (idx === i)
353
- continue;
354
- const m2 = pool[idx];
355
- const distSq = Math.pow(m2.querypoint.x - qp1.x, 2) + Math.pow(m2.querypoint.y - qp1.y, 2);
356
- neighbors.push({ index: idx, d2: distSq });
357
- }
358
- }
359
- }
360
- }
361
- const filtered = [];
362
- // Dummy logic just to keep the structure
363
- for (let i = 0; i < pool.length; i++) {
364
- filtered.push(pool[i]);
365
- }
366
- return filtered;
367
- };
368
- const _checkTriadConsistency = (m1, m2, m3, ratioTol, cosTol) => {
369
- const q1x = m1.querypoint.x, q1y = m1.querypoint.y;
370
- const q2x = m2.querypoint.x, q2y = m2.querypoint.y;
371
- const q3x = m3.querypoint.x, q3y = m3.querypoint.y;
372
- const v21q = [q2x - q1x, q2y - q1y];
373
- const v31q = [q3x - q1x, q3y - q1y];
374
- const k1x = m1.keypoint.x, k1y = m1.keypoint.y;
375
- const k2x = m2.keypoint.x, k2y = m2.keypoint.y;
376
- const k3x = m3.keypoint.x, k3y = m3.keypoint.y;
377
- const v21k = [k2x - k1x, k2y - k1y];
378
- const v31k = [k3x - k1x, k3y - k1y];
379
- const d21q2 = v21q[0] * v21q[0] + v21q[1] * v21q[1];
380
- const d31q2 = v31q[0] * v31q[0] + v31q[1] * v31q[1];
381
- const d21k2 = v21k[0] * v21k[0] + v21k[1] * v21k[1];
382
- const d31k2 = v31k[0] * v31k[0] + v31k[1] * v31k[1];
383
- if (d31q2 < 1e-4 || d31k2 < 1e-4)
384
- return false;
385
- const ratioQ = d21q2 / d31q2;
386
- const ratioK = d21k2 / d31k2;
387
- if (Math.abs(ratioQ - ratioK) / (ratioK + 1e-6) > ratioTol * 2)
388
- return false;
389
- const dotQ = v21q[0] * v31q[0] + v21q[1] * v31q[1];
390
- const cosQ = dotQ / Math.sqrt(d21q2 * d31q2);
391
- const dotK = v21k[0] * v31k[0] + v21k[1] * v31k[1];
392
- const cosK = dotK / Math.sqrt(d21k2 * d31k2);
393
- if (Math.abs(cosQ - cosK) > cosTol)
394
- return false;
395
- const crossQ = v21q[0] * v31q[1] - v21q[1] * v31q[0];
396
- const crossK = v21k[0] * v31k[1] - v21k[1] * v31k[0];
397
- if ((crossQ > 0) !== (crossK > 0))
398
- return false;
399
- return true;
400
- };
401
- export { match };
1
+ import e from"tinyqueue";import{compute as t}from"./hamming-distance.js";import{computeHoughMatches as o}from"./hough.js";import{computeHomography as n}from"./ransacHomography.js";import{multiplyPointHomographyInhomogenous as s,matrixInverse33 as i}from"../utils/geometry.js";import{refineWithMorphology as r}from"../estimation/morph-refinement.js";import{popcount32 as h}from"./hierarchical-clustering.js";import{AR_CONFIG as c}from"../constants.js";const u=c.INLIER_THRESHOLD,a=c.MIN_NUM_INLIERS,p=c.CLUSTER_MAX_POP,m=c.HAMMING_THRESHOLD,d=c.HDC_RATIO_THRESHOLD,l=c.MAX_MATCH_QUERY_POINTS,y=({keyframe:s,querypoints:c,querywidth:p,queryheight:y,debugMode:E,expectedScale:x})=>{let _={};const q=c.length>l?[...c].sort((e,t)=>(t.score||t.response||0)-(e.score||e.response||0)).slice(0,l):c,w=[],M=q.length,A=s.max,H=s.min,S=!0===s.hdc||A&&1===A.hdc,I=A&&1===A.compact||H&&1===H.compact,N=S||I?1:2,k=S?d:m;for(let o=0;o<M;o++){const n=q[o],i=n.maxima?A:H;if(!i||0===i.x.length)continue;const r=i.t,c=[],u=new e([],(e,t)=>e.d-t.d);g({node:r,descriptors:i.d,querypoint:n,queue:u,keypointIndexes:c,numPop:0,isHDC:S,descSize:N,isCompact:I});let a=-1,p=Number.MAX_SAFE_INTEGER,m=Number.MAX_SAFE_INTEGER;const d=n.descriptors,l=i.d,y=I&&d&&d.length>=2?(d[0]^d[1])>>>0:0;for(let e=0;e<c.length;e++){const o=c[e];if(void 0!==x&&i.s){const e=i.s[o],t=(n.scale||1)/x;if(e<.4*t||e>2.5*t)continue}let s;s=S?h(l[o]^n.hdcSignature):I?h(l[o]^y):t({v1:l,v1Offset:o*N,v2:d}),s<p?(m=p,p=s,a=o):s<m&&(m=s)}-1!==a&&(m===Number.MAX_SAFE_INTEGER||p/m<k)&&w.push({querypoint:n,keypoint:{x:i.x[a],y:i.y[a],angle:i.a[a],scale:i.s?i.s[a]:s.s},d:p})}if(w.length<a)return{debugExtra:_};const R=w;E&&(_.constellationMatches=R);const T=o({keywidth:s.w||s.width,keyheight:s.h||s.height,querywidth:p,queryheight:y,matches:R});if(E&&(_.houghMatches=T),T.length<a)return{debugExtra:_};const b=n({srcPoints:T.map(e=>[e.keypoint.x,e.keypoint.y]),dstPoints:T.map(e=>[e.querypoint.x,e.querypoint.y]),keyframe:{width:s.w||s.width,height:s.h||s.height}});if(null===b)return{debugExtra:_};const C=f({H:b,matches:T,threshold:u});if(E&&(_.inlierMatches=C),C.length<a)return{debugExtra:_};E&&Math.random()<.02&&console.log(`MATCH: Homography success with ${C.length} inliers`);const D=i(b,1e-5),P=[],v=D[0],O=D[1],X=D[2],G=D[3],j=D[4],F=D[5],L=D[6],z=D[7],U=D[8];for(let e=0;e<M;e++){const o=q[e],n=o.x,i=o.y,r=1/(n*L+i*z+U),c=(n*v+i*O+X)*r,u=(n*G+i*j+F)*r;let a=-1,p=Number.MAX_SAFE_INTEGER,m=Number.MAX_SAFE_INTEGER;const d=o.maxima?A:H;if(!d)continue;const l=d.x,y=d.y,g=d.d,f=o.descriptors,E=I&&f&&f.length>=2?(f[0]^f[1])>>>0:0;for(let e=0,n=l.length;e<n;e++){const n=l[e]-c,s=y[e]-u;if(n*n+s*s>100)continue;let i;i=S?h(g[e]^o.hdcSignature):I?h(g[e]^E):t({v1:g,v1Offset:e*N,v2:f}),i<p?(m=p,p=i,a=e):i<m&&(m=i)}-1!==a&&(m===Number.MAX_SAFE_INTEGER||p/m<k)&&P.push({querypoint:o,keypoint:{x:d.x[a],y:d.y[a],angle:d.a[a],scale:d.s?d.s[a]:s.s}})}E&&(_.matches2=P);const Q=o({keywidth:s.w||s.width,keyheight:s.h||s.height,querywidth:p,queryheight:y,matches:P});E&&(_.houghMatches2=Q);const Y=n({srcPoints:Q.map(e=>[e.keypoint.x,e.keypoint.y]),dstPoints:Q.map(e=>[e.querypoint.x,e.querypoint.y]),keyframe:{width:s.w||s.width,height:s.h||s.height}});if(null===Y)return{debugExtra:_};const $=f({H:Y,matches:Q,threshold:u});return E&&(_.inlierMatches2=$),{H:r({imageData:c[0].imageData,width:p,height:y,targetData:{w:s.w||s.width,h:s.h||s.height},initialH:Y,iterations:3})||Y,matches:$,debugExtra:_}},g=({node:e,descriptors:o,querypoint:n,queue:s,keypointIndexes:i,numPop:r,isHDC:c,descSize:u,isCompact:a})=>{const m=1===e[0],d=e[2];if(m){for(let e=0;e<d.length;e++)i.push(d[e]);return}const l=n.descriptors,y=a&&l&&l.length>=2?(l[0]^l[1])>>>0:0;let f=Number.MAX_SAFE_INTEGER;const E=d.length,x=new Int32Array(E);for(let e=0;e<E;e++){const s=d[e][1];let i;i=c?h(o[s]^n.hdcSignature):a?h(o[s]^y):t({v1:o,v1Offset:s*u,v2:l}),x[e]=i,i<f&&(f=i)}for(let e=0;e<E;e++){const t=x[e];t<=f?g({node:d[e],descriptors:o,querypoint:n,queue:s,keypointIndexes:i,numPop:r+1,isHDC:c,descSize:u,isCompact:a}):s.push({node:d[e],d:t})}if(r<p&&s.length>0){const{node:e}=s.pop();g({node:e,descriptors:o,querypoint:n,queue:s,keypointIndexes:i,numPop:r+1,isHDC:c,descSize:u,isCompact:a})}},f=e=>{const{H:t,matches:o,threshold:n}=e,s=n*n,i=t[0],r=t[1],h=t[2],c=t[3],u=t[4],a=t[5],p=t[6],m=t[7],d=t[8],l=[];for(let e=0;e<o.length;e++){const t=o[e],n=t.querypoint,y=t.keypoint,g=1/(y.x*p+y.y*m+d),f=(y.x*i+y.y*r+h)*g,E=(y.x*c+y.y*u+a)*g,x=f-n.x,_=E-n.y;x*x+_*_<=s&&l.push(t)}return l};export{y as match};
@@ -1,132 +1 @@
1
- import { createRandomizer } from "../utils/randomizer.js";
2
- import { quadrilateralConvex, matrixInverse33, smallestTriangleArea, multiplyPointHomographyInhomogenous, checkThreePointsConsistent, checkFourPointsConsistent, } from "../utils/geometry.js";
3
- import { solveHomography } from "../utils/homography.js";
4
- const CAUCHY_SCALE = 0.01;
5
- const CHUNK_SIZE = 10;
6
- const NUM_HYPOTHESES = 100;
7
- const NUM_HYPOTHESES_QUICK = 50;
8
- // Using RANSAC to estimate homography
9
- const computeHomography = (options) => {
10
- const { srcPoints, dstPoints, keyframe, quickMode } = options;
11
- // testPoints is four corners of keyframe
12
- const testPoints = [
13
- [0, 0],
14
- [keyframe.width, 0],
15
- [keyframe.width, keyframe.height],
16
- [0, keyframe.height],
17
- ];
18
- const sampleSize = 4; // use four points to compute homography
19
- if (srcPoints.length < sampleSize)
20
- return null;
21
- const scale = CAUCHY_SCALE;
22
- const oneOverScale2 = 1.0 / (scale * scale);
23
- const chuckSize = Math.min(CHUNK_SIZE, srcPoints.length);
24
- const randomizer = createRandomizer();
25
- const perm = [];
26
- for (let i = 0; i < srcPoints.length; i++) {
27
- perm[i] = i;
28
- }
29
- randomizer.arrayShuffle({ arr: perm, sampleSize: perm.length });
30
- const numHypothesis = quickMode ? NUM_HYPOTHESES_QUICK : NUM_HYPOTHESES;
31
- const maxTrials = numHypothesis * 2;
32
- // build numerous hypotheses by randoming draw four points
33
- // TODO: optimize: if number of points is less than certain number, can brute force all combinations
34
- let trial = 0;
35
- const Hs = [];
36
- while (trial < maxTrials && Hs.length < numHypothesis) {
37
- trial += 1;
38
- randomizer.arrayShuffle({ arr: perm, sampleSize: sampleSize });
39
- // their relative positions match each other
40
- if (!checkFourPointsConsistent(srcPoints[perm[0]], srcPoints[perm[1]], srcPoints[perm[2]], srcPoints[perm[3]], dstPoints[perm[0]], dstPoints[perm[1]], dstPoints[perm[2]], dstPoints[perm[3]])) {
41
- continue;
42
- }
43
- const H = solveHomography([srcPoints[perm[0]], srcPoints[perm[1]], srcPoints[perm[2]], srcPoints[perm[3]]], [dstPoints[perm[0]], dstPoints[perm[1]], dstPoints[perm[2]], dstPoints[perm[3]]]);
44
- if (H === null)
45
- continue;
46
- if (!_checkHomographyPointsGeometricallyConsistent({ H, testPoints })) {
47
- continue;
48
- }
49
- Hs.push(H);
50
- }
51
- if (Hs.length === 0)
52
- return null;
53
- // pick the best hypothesis
54
- const hypotheses = [];
55
- for (let i = 0; i < Hs.length; i++) {
56
- hypotheses.push({
57
- H: Hs[i],
58
- cost: 0,
59
- });
60
- }
61
- let curChuckSize = chuckSize;
62
- for (let i = 0; i < srcPoints.length && hypotheses.length > 2; i += curChuckSize) {
63
- curChuckSize = Math.min(chuckSize, srcPoints.length - i);
64
- let chuckEnd = i + curChuckSize;
65
- for (let j = 0; j < hypotheses.length; j++) {
66
- for (let k = i; k < chuckEnd; k++) {
67
- const cost = _cauchyProjectiveReprojectionCost({
68
- H: hypotheses[j].H,
69
- srcPoint: srcPoints[k],
70
- dstPoint: dstPoints[k],
71
- oneOverScale2,
72
- });
73
- hypotheses[j].cost += cost;
74
- }
75
- }
76
- hypotheses.sort((h1, h2) => {
77
- return h1.cost - h2.cost;
78
- });
79
- hypotheses.splice(-Math.floor((hypotheses.length + 1) / 2)); // keep the best half
80
- }
81
- let finalH = null;
82
- for (let i = 0; i < hypotheses.length; i++) {
83
- const H = _normalizeHomography({ inH: hypotheses[i].H });
84
- if (_checkHeuristics({ H: H, testPoints, keyframe })) {
85
- finalH = H;
86
- break;
87
- }
88
- }
89
- return finalH;
90
- };
91
- const _checkHeuristics = ({ H, testPoints, keyframe }) => {
92
- const mp = [];
93
- for (let i = 0; i < testPoints.length; i++) {
94
- // 4 test points, corner of keyframe
95
- mp.push(multiplyPointHomographyInhomogenous(testPoints[i], H));
96
- }
97
- const smallArea = smallestTriangleArea(mp[0], mp[1], mp[2], mp[3]);
98
- if (smallArea < keyframe.width * keyframe.height * 0.0001)
99
- return false;
100
- if (!quadrilateralConvex(mp[0], mp[1], mp[2], mp[3]))
101
- return false;
102
- return true;
103
- };
104
- const _normalizeHomography = ({ inH }) => {
105
- const oneOver = 1.0 / inH[8];
106
- const H = [];
107
- for (let i = 0; i < 8; i++) {
108
- H[i] = inH[i] * oneOver;
109
- }
110
- H[8] = 1.0;
111
- return H;
112
- };
113
- const _cauchyProjectiveReprojectionCost = ({ H, srcPoint, dstPoint, oneOverScale2 }) => {
114
- const x = multiplyPointHomographyInhomogenous(srcPoint, H);
115
- const f = [x[0] - dstPoint[0], x[1] - dstPoint[1]];
116
- return Math.log(1 + (f[0] * f[0] + f[1] * f[1]) * oneOverScale2);
117
- };
118
- const _checkHomographyPointsGeometricallyConsistent = ({ H, testPoints }) => {
119
- const mappedPoints = [];
120
- for (let i = 0; i < testPoints.length; i++) {
121
- mappedPoints[i] = multiplyPointHomographyInhomogenous(testPoints[i], H);
122
- }
123
- for (let i = 0; i < testPoints.length; i++) {
124
- const i1 = i;
125
- const i2 = (i + 1) % testPoints.length;
126
- const i3 = (i + 2) % testPoints.length;
127
- if (!checkThreePointsConsistent(testPoints[i1], testPoints[i2], testPoints[i3], mappedPoints[i1], mappedPoints[i2], mappedPoints[i3]))
128
- return false;
129
- }
130
- return true;
131
- };
132
- export { computeHomography };
1
+ import{createRandomizer as t}from"../utils/randomizer.js";import{quadrilateralConvex as e,matrixInverse33 as n,smallestTriangleArea as o,multiplyPointHomographyInhomogenous as r,checkThreePointsConsistent as l,checkFourPointsConsistent as s}from"../utils/geometry.js";import{solveHomography as h}from"../utils/homography.js";const i=e=>{const{srcPoints:n,dstPoints:o,keyframe:r,quickMode:l}=e,i=[[0,0],[r.width,0],[r.width,r.height],[0,r.height]];if(n.length<4)return null;const a=Math.min(10,n.length),m=t(),p=[];for(let t=0;t<n.length;t++)p[t]=t;m.arrayShuffle({arr:p,sampleSize:p.length});const H=l?50:100,P=2*H;let d=0;const y=[];for(;d<P&&y.length<H;){if(d+=1,m.arrayShuffle({arr:p,sampleSize:4}),!s(n[p[0]],n[p[1]],n[p[2]],n[p[3]],o[p[0]],o[p[1]],o[p[2]],o[p[3]]))continue;const t=h([n[p[0]],n[p[1]],n[p[2]],n[p[3]]],[o[p[0]],o[p[1]],o[p[2]],o[p[3]]]);null!==t&&u({H:t,testPoints:i})&&y.push(t)}if(0===y.length)return null;const S=[];for(let t=0;t<y.length;t++)S.push({H:y[t],cost:0});let k=a;for(let t=0;t<n.length&&S.length>2;t+=k){k=Math.min(a,n.length-t);let e=t+k;for(let r=0;r<S.length;r++)for(let l=t;l<e;l++){const t=g({H:S[r].H,srcPoint:n[l],dstPoint:o[l],oneOverScale2:1e4});S[r].cost+=t}S.sort((t,e)=>t.cost-e.cost),S.splice(-Math.floor((S.length+1)/2))}let M=null;for(let t=0;t<S.length;t++){const e=c({inH:S[t].H});if(f({H:e,testPoints:i,keyframe:r})){M=e;break}}return M},f=({H:t,testPoints:n,keyframe:l})=>{const s=[];for(let e=0;e<n.length;e++)s.push(r(n[e],t));return!(o(s[0],s[1],s[2],s[3])<l.width*l.height*1e-4||!e(s[0],s[1],s[2],s[3]))},c=({inH:t})=>{const e=1/t[8],n=[];for(let o=0;o<8;o++)n[o]=t[o]*e;return n[8]=1,n},g=({H:t,srcPoint:e,dstPoint:n,oneOverScale2:o})=>{const l=r(e,t),s=[l[0]-n[0],l[1]-n[1]];return Math.log(1+(s[0]*s[0]+s[1]*s[1])*o)},u=({H:t,testPoints:e})=>{const n=[];for(let o=0;o<e.length;o++)n[o]=r(e[o],t);for(let t=0;t<e.length;t++){const o=t,r=(t+1)%e.length,s=(t+2)%e.length;if(!l(e[o],e[r],e[s],n[o],n[r],n[s]))return!1}return!0};export{i as computeHomography};