@srsergio/taptapp-ar 1.0.89 → 1.0.90

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/README.md +8 -6
  2. package/dist/compiler/offline-compiler.js +16 -4
  3. package/dist/core/detector/detector-lite.d.ts +1 -0
  4. package/dist/core/detector/detector-lite.js +31 -15
  5. package/dist/core/estimation/estimate.d.ts +7 -0
  6. package/dist/core/estimation/estimate.js +13 -48
  7. package/dist/core/estimation/morph-refinement.d.ts +8 -0
  8. package/dist/core/estimation/morph-refinement.js +116 -0
  9. package/dist/core/estimation/pnp-solver.d.ts +5 -0
  10. package/dist/core/estimation/pnp-solver.js +109 -0
  11. package/dist/core/input-loader.js +19 -2
  12. package/dist/core/matching/hdc.d.ts +27 -0
  13. package/dist/core/matching/hdc.js +102 -0
  14. package/dist/core/matching/hierarchical-clustering.d.ts +1 -3
  15. package/dist/core/matching/hierarchical-clustering.js +30 -29
  16. package/dist/core/matching/hough.js +12 -11
  17. package/dist/core/matching/matcher.d.ts +4 -0
  18. package/dist/core/matching/matcher.js +23 -8
  19. package/dist/core/matching/matching.d.ts +22 -2
  20. package/dist/core/matching/matching.js +169 -39
  21. package/dist/core/matching/ransacHomography.js +3 -6
  22. package/dist/core/protocol.d.ts +5 -3
  23. package/dist/core/protocol.js +28 -6
  24. package/dist/runtime/controller.js +19 -14
  25. package/dist/runtime/controller.worker.js +4 -1
  26. package/package.json +3 -2
  27. package/src/compiler/offline-compiler.ts +17 -4
  28. package/src/core/detector/detector-lite.js +32 -15
  29. package/src/core/estimation/estimate.js +14 -63
  30. package/src/core/estimation/morph-refinement.js +139 -0
  31. package/src/core/estimation/pnp-solver.js +131 -0
  32. package/src/core/input-loader.js +21 -2
  33. package/src/core/matching/hdc.ts +117 -0
  34. package/src/core/matching/hierarchical-clustering.js +30 -29
  35. package/src/core/matching/hough.js +12 -11
  36. package/src/core/matching/matcher.js +27 -9
  37. package/src/core/matching/matching.js +192 -39
  38. package/src/core/matching/ransacHomography.js +3 -6
  39. package/src/core/protocol.ts +26 -6
  40. package/src/runtime/controller.ts +20 -14
  41. package/src/runtime/controller.worker.js +4 -1
@@ -0,0 +1,102 @@
1
+ /**
2
+ * Hyperdimensional Computing (HDC) Core for AR
3
+ *
4
+ * Provides ultra-fast, ultra-compressed feature matching using
5
+ * High-Dimensional Random Vectors.
6
+ */
7
+ export const HDC_DIMENSION = 1024; // bits
8
+ export const HDC_WORDS = HDC_DIMENSION / 32;
9
+ /**
10
+ * Deterministic Random Number Generator (PCG-like)
11
+ */
12
+ class PRNG {
13
+ state;
14
+ constructor(seed) {
15
+ this.state = seed;
16
+ }
17
+ next() {
18
+ this.state = (this.state * 1664525 + 1013904223) >>> 0;
19
+ return this.state / 0xFFFFFFFF;
20
+ }
21
+ }
22
+ /**
23
+ * Generates a deterministic basis of Hypervectors
24
+ */
25
+ export function generateBasis(seed, count) {
26
+ const prng = new PRNG(seed);
27
+ const basis = [];
28
+ for (let i = 0; i < count; i++) {
29
+ const hv = new Uint32Array(HDC_WORDS);
30
+ for (let j = 0; j < HDC_WORDS; j++) {
31
+ hv[j] = (prng.next() * 0xFFFFFFFF) >>> 0;
32
+ }
33
+ basis.push(hv);
34
+ }
35
+ return basis;
36
+ }
37
+ /**
38
+ * Projects a 64-bit descriptor into the Hyperdimensional Space
39
+ * Uses "Random Projection" logic (Locality Sensitive Hashing in HDC)
40
+ */
41
+ export function projectDescriptor(desc, basis) {
42
+ const result = new Uint32Array(HDC_WORDS);
43
+ // For each bit in the HDC space
44
+ for (let i = 0; i < HDC_DIMENSION; i++) {
45
+ const wordIdx = i >>> 5;
46
+ const bitIdx = i & 31;
47
+ // This is a simplified random projection
48
+ // In a real HDC system, we'd use more complex binding
49
+ // But for Vanilla JS performance, we use bitwise voting
50
+ let sum = 0;
51
+ const b = basis[i % basis.length];
52
+ // Dot product between descriptor and basis vector (subset)
53
+ // desc[0] and desc[1] are the 64 bits
54
+ for (let j = 0; j < 2; j++) {
55
+ sum += popcount(desc[j] & b[j]);
56
+ }
57
+ if (sum > 16) { // Threshold for "firing"
58
+ result[wordIdx] |= (1 << bitIdx);
59
+ }
60
+ }
61
+ return result;
62
+ }
63
+ /**
64
+ * Compresses an HDC vector into an "Ultra-Short Signature" (32 bits)
65
+ * This allows storing 1000 points in just 4KB of descriptors.
66
+ */
67
+ export function compressToSignature(hv) {
68
+ // FNV-1a Hash for robust 32-bit compression
69
+ let h1 = 0x811c9dc5;
70
+ for (let i = 0; i < hv.length; i++) {
71
+ h1 ^= hv[i];
72
+ h1 = Math.imul(h1, 0x01000193);
73
+ }
74
+ return h1 >>> 0;
75
+ }
76
+ function popcount(n) {
77
+ n = n - ((n >> 1) & 0x55555555);
78
+ n = (n & 0x33333333) + ((n >> 2) & 0x33333333);
79
+ return (((n + (n >> 4)) & 0x0F0F0F0F) * 0x01010101) >> 24;
80
+ }
81
+ /**
82
+ * Bundles multiple points into a single Global Hypervector (The "Image DNA")
83
+ * This allows checking if an image is present with ONE vector comparison.
84
+ */
85
+ export function bundle(hvs) {
86
+ const global = new Uint32Array(HDC_WORDS);
87
+ const threshold = hvs.length / 2;
88
+ const counters = new Uint16Array(HDC_DIMENSION);
89
+ for (const hv of hvs) {
90
+ for (let i = 0; i < HDC_DIMENSION; i++) {
91
+ if (hv[i >>> 5] & (1 << (i & 31))) {
92
+ counters[i]++;
93
+ }
94
+ }
95
+ }
96
+ for (let i = 0; i < HDC_DIMENSION; i++) {
97
+ if (counters[i] > threshold) {
98
+ global[i >>> 5] |= (1 << (i & 31));
99
+ }
100
+ }
101
+ return global;
102
+ }
@@ -1,6 +1,4 @@
1
- /**
2
- * Build hierarchical clusters
3
- */
1
+ export function popcount32(n: any): number;
4
2
  export function build({ points }: {
5
3
  points: any;
6
4
  }): {
@@ -1,18 +1,15 @@
1
1
  import { compute64 as hammingCompute64 } from "./hamming-distance.js";
2
2
  import { createRandomizer } from "../utils/randomizer.js";
3
- const MIN_FEATURE_PER_NODE = 32; // Increased from 16 for speed
4
- const NUM_ASSIGNMENT_HYPOTHESES = 12; // Reduced from 16 for speed
3
+ const MIN_FEATURE_PER_NODE = 32;
4
+ const NUM_ASSIGNMENT_HYPOTHESES = 12;
5
5
  const NUM_CENTERS = 8;
6
- /**
7
- * 🚀 Moonshot Optimized K-Medoids
8
- *
9
- * Major Optimizations:
10
- * 1. Flattened Memory: Operates on a single Uint32Array block instead of objects.
11
- * 2. Zero Property Access: Avoids .descriptors lookup in the tightest loop.
12
- * 3. Cache-Friendly: Accesses contiguous descriptor data.
13
- */
6
+ export function popcount32(n) {
7
+ n = n - ((n >> 1) & 0x55555555);
8
+ n = (n & 0x33333333) + ((n >> 2) & 0x33333333);
9
+ return (((n + (n >> 4)) & 0x0F0F0F0F) * 0x01010101) >> 24;
10
+ }
14
11
  const _computeKMedoids = (options) => {
15
- const { descriptors, pointIndexes, randomizer } = options;
12
+ const { descriptors, pointIndexes, randomizer, useHDC } = options;
16
13
  const numPointIndexes = pointIndexes.length;
17
14
  const randomPointIndexes = new Int32Array(numPointIndexes);
18
15
  for (let i = 0; i < numPointIndexes; i++) {
@@ -20,11 +17,9 @@ const _computeKMedoids = (options) => {
20
17
  }
21
18
  let bestSumD = Number.MAX_SAFE_INTEGER;
22
19
  let bestAssignment = null;
23
- // Pre-fetch centers indices to avoid nested index lookups
24
20
  const centerPointIndices = new Int32Array(NUM_CENTERS);
25
21
  for (let i = 0; i < NUM_ASSIGNMENT_HYPOTHESES; i++) {
26
22
  randomizer.arrayShuffle({ arr: randomPointIndexes, sampleSize: NUM_CENTERS });
27
- // Set centers for this hypothesis
28
23
  for (let k = 0; k < NUM_CENTERS; k++) {
29
24
  centerPointIndices[k] = pointIndexes[randomPointIndexes[k]];
30
25
  }
@@ -32,14 +27,17 @@ const _computeKMedoids = (options) => {
32
27
  const currentAssignment = new Int32Array(numPointIndexes);
33
28
  for (let j = 0; j < numPointIndexes; j++) {
34
29
  const pIdx = pointIndexes[j];
35
- const pOffset = pIdx * 2;
36
- let bestD = 255; // Max possible Hamming for 64-bit is 64, but let's be safe
30
+ let bestD = 255;
37
31
  let bestCenterIdx = -1;
38
32
  for (let k = 0; k < NUM_CENTERS; k++) {
39
33
  const cIdx = centerPointIndices[k];
40
- const cOffset = cIdx * 2;
41
- // DIRECT CALL TO INLINED HAMMING
42
- const d = hammingCompute64(descriptors, pOffset, descriptors, cOffset);
34
+ let d;
35
+ if (useHDC) {
36
+ d = popcount32(descriptors[pIdx] ^ descriptors[cIdx]);
37
+ }
38
+ else {
39
+ d = hammingCompute64(descriptors, pIdx * 2, descriptors, cIdx * 2);
40
+ }
43
41
  if (d < bestD) {
44
42
  bestCenterIdx = randomPointIndexes[k];
45
43
  bestD = d;
@@ -55,20 +53,21 @@ const _computeKMedoids = (options) => {
55
53
  }
56
54
  return bestAssignment;
57
55
  };
58
- /**
59
- * Build hierarchical clusters
60
- */
61
56
  const build = ({ points }) => {
62
57
  const numPoints = points.length;
63
58
  if (numPoints === 0)
64
59
  return { rootNode: { leaf: true, pointIndexes: [], centerPointIndex: null } };
65
- // 🚀 MOONSHOT: Flatten all descriptors into a single Uint32Array
66
- // This is the key to sub-second performance.
67
- const descriptors = new Uint32Array(numPoints * 2);
60
+ const useHDC = points[0] && points[0].hdcSignature !== undefined;
61
+ const descriptors = new Uint32Array(useHDC ? numPoints : numPoints * 2);
68
62
  for (let i = 0; i < numPoints; i++) {
69
- const d = points[i].descriptors;
70
- descriptors[i * 2] = d[0];
71
- descriptors[i * 2 + 1] = d[1];
63
+ if (useHDC) {
64
+ descriptors[i] = points[i].hdcSignature;
65
+ }
66
+ else {
67
+ const d = points[i].descriptors;
68
+ descriptors[i * 2] = d[0];
69
+ descriptors[i * 2 + 1] = d[1];
70
+ }
72
71
  }
73
72
  const pointIndexes = new Int32Array(numPoints);
74
73
  for (let i = 0; i < numPoints; i++) {
@@ -80,11 +79,12 @@ const build = ({ points }) => {
80
79
  pointIndexes,
81
80
  centerPointIndex: null,
82
81
  randomizer,
82
+ useHDC
83
83
  });
84
84
  return { rootNode };
85
85
  };
86
86
  const _build = (options) => {
87
- const { descriptors, pointIndexes, centerPointIndex, randomizer } = options;
87
+ const { descriptors, pointIndexes, centerPointIndex, randomizer, useHDC } = options;
88
88
  const numPoints = pointIndexes.length;
89
89
  let isLeaf = false;
90
90
  if (numPoints <= NUM_CENTERS || numPoints <= MIN_FEATURE_PER_NODE) {
@@ -92,7 +92,7 @@ const _build = (options) => {
92
92
  }
93
93
  const clusters = new Map();
94
94
  if (!isLeaf) {
95
- const assignment = _computeKMedoids({ descriptors, pointIndexes, randomizer });
95
+ const assignment = _computeKMedoids({ descriptors, pointIndexes, randomizer, useHDC });
96
96
  for (let i = 0; i < assignment.length; i++) {
97
97
  const centerIdx = pointIndexes[assignment[i]];
98
98
  let cluster = clusters.get(centerIdx);
@@ -122,6 +122,7 @@ const _build = (options) => {
122
122
  pointIndexes: new Int32Array(clusterPoints),
123
123
  centerPointIndex: cIdx,
124
124
  randomizer,
125
+ useHDC
125
126
  }));
126
127
  }
127
128
  return node;
@@ -7,9 +7,9 @@ const computeHoughMatches = (options) => {
7
7
  const maxY = queryheight * 1.2;
8
8
  const minY = -maxY;
9
9
  const numAngleBins = 12;
10
- const numScaleBins = 10;
11
- const minScale = -1;
12
- const maxScale = 1;
10
+ const numScaleBins = 12; // 🚀 Increased bins
11
+ const minScale = -2; // 📐 Support 1% scale (10^-2)
12
+ const maxScale = 1; // 📐 Support 1000% scale (10^1)
13
13
  const scaleK = 10.0;
14
14
  const scaleOneOverLogK = 1.0 / Math.log(scaleK);
15
15
  const maxDim = Math.max(keywidth, keyheight);
@@ -31,9 +31,9 @@ const computeHoughMatches = (options) => {
31
31
  return a1 - a2;
32
32
  });
33
33
  const medianProjectedDim = projectedDims[Math.floor(projectedDims.length / 2) - (projectedDims.length % 2 == 0 ? 1 : 0) - 1];
34
- const binSize = 0.25 * medianProjectedDim;
35
- const numXBins = Math.max(5, Math.ceil((maxX - minX) / binSize));
36
- const numYBins = Math.max(5, Math.ceil((maxY - minY) / binSize));
34
+ const binSize = Math.max(20, 0.25 * medianProjectedDim); // 🚀 Ensure bins aren't too small for noise
35
+ const numXBins = Math.max(5, Math.min(40, Math.ceil((maxX - minX) / binSize))); // 🎯 Cap bins to keep voting dense
36
+ const numYBins = Math.max(5, Math.min(40, Math.ceil((maxY - minY) / binSize)));
37
37
  const numXYBins = numXBins * numYBins;
38
38
  const numXYAngleBins = numXYBins * numAngleBins;
39
39
  // do voting
@@ -120,25 +120,26 @@ const computeHoughMatches = (options) => {
120
120
  const binY = Math.floor((((maxVoteIndex - binX) % numXYAngleBins) % numXYBins) / numXBins);
121
121
  const binAngle = Math.floor(((maxVoteIndex - binX - binY * numXBins) % numXYAngleBins) / numXYBins);
122
122
  const binScale = Math.floor((maxVoteIndex - binX - binY * numXBins - binAngle * numXYBins) / numXYAngleBins);
123
- //console.log("hough voted: ", {binX, binY, binAngle, binScale, maxVoteIndex});
123
+ // console.log(`[Hough] Peak votes: ${maxVotes} out of ${matches.length} matches.`);
124
124
  const houghMatches = [];
125
+ const relaxedDelta = 2.0; // 🚀 Increased for better cluster robustness
125
126
  for (let i = 0; i < matches.length; i++) {
126
127
  if (!querypointValids[i])
127
128
  continue;
128
129
  const queryBins = querypointBinLocations[i];
129
130
  // compute bin difference
130
131
  const distBinX = Math.abs(queryBins.binX - (binX + 0.5));
131
- if (distBinX >= kHoughBinDelta)
132
+ if (distBinX >= relaxedDelta)
132
133
  continue;
133
134
  const distBinY = Math.abs(queryBins.binY - (binY + 0.5));
134
- if (distBinY >= kHoughBinDelta)
135
+ if (distBinY >= relaxedDelta)
135
136
  continue;
136
137
  const distBinScale = Math.abs(queryBins.binScale - (binScale + 0.5));
137
- if (distBinScale >= kHoughBinDelta)
138
+ if (distBinScale >= relaxedDelta)
138
139
  continue;
139
140
  const temp = Math.abs(queryBins.binAngle - (binAngle + 0.5));
140
141
  const distBinAngle = Math.min(temp, numAngleBins - temp);
141
- if (distBinAngle >= kHoughBinDelta)
142
+ if (distBinAngle >= relaxedDelta)
142
143
  continue;
143
144
  houghMatches.push(matches[i]);
144
145
  }
@@ -4,12 +4,14 @@ export class Matcher {
4
4
  queryHeight: any;
5
5
  debugMode: boolean;
6
6
  matchDetection(keyframes: any, featurePoints: any): {
7
+ targetIndex: number;
7
8
  keyframeIndex: number;
8
9
  debugExtra: {
9
10
  frames: never[];
10
11
  };
11
12
  screenCoords?: undefined;
12
13
  worldCoords?: undefined;
14
+ H?: undefined;
13
15
  } | {
14
16
  screenCoords: {
15
17
  x: any;
@@ -20,7 +22,9 @@ export class Matcher {
20
22
  y: number;
21
23
  z: number;
22
24
  }[];
25
+ targetIndex: number;
23
26
  keyframeIndex: number;
27
+ H: any[];
24
28
  debugExtra: {
25
29
  frames: never[];
26
30
  };
@@ -8,27 +8,35 @@ class Matcher {
8
8
  matchDetection(keyframes, featurePoints) {
9
9
  let debugExtra = { frames: [] };
10
10
  let bestResult = null;
11
- for (let i = 0; i < keyframes.length; i++) {
11
+ // keyframes is actually the matchingData array for a single target
12
+ if (!keyframes || !Array.isArray(keyframes)) {
13
+ return { targetIndex: -1, keyframeIndex: -1, debugExtra };
14
+ }
15
+ for (let j = 0; j < keyframes.length; j++) {
12
16
  const { H, matches, debugExtra: frameDebugExtra, } = match({
13
- keyframe: keyframes[i],
17
+ keyframe: keyframes[j],
14
18
  querypoints: featurePoints,
15
19
  querywidth: this.queryWidth,
16
20
  queryheight: this.queryHeight,
17
21
  debugMode: this.debugMode,
18
22
  });
19
- debugExtra.frames.push(frameDebugExtra);
23
+ if (frameDebugExtra) {
24
+ frameDebugExtra.keyframeIndex = j;
25
+ debugExtra.frames.push(frameDebugExtra);
26
+ }
20
27
  if (H) {
21
28
  if (bestResult === null || bestResult.matches.length < matches.length) {
22
- bestResult = { keyframeIndex: i, H, matches };
29
+ bestResult = { keyframeIndex: j, H, matches };
23
30
  }
24
31
  }
25
32
  }
26
33
  if (bestResult === null) {
27
- return { keyframeIndex: -1, debugExtra };
34
+ return { targetIndex: -1, keyframeIndex: -1, debugExtra };
28
35
  }
29
36
  const screenCoords = [];
30
37
  const worldCoords = [];
31
38
  const keyframe = keyframes[bestResult.keyframeIndex];
39
+ const scale = keyframe.s || keyframe.scale || 1.0;
32
40
  for (let i = 0; i < bestResult.matches.length; i++) {
33
41
  const querypoint = bestResult.matches[i].querypoint;
34
42
  const keypoint = bestResult.matches[i].keypoint;
@@ -37,12 +45,19 @@ class Matcher {
37
45
  y: querypoint.y,
38
46
  });
39
47
  worldCoords.push({
40
- x: (keypoint.x + 0.5) / (keyframe.s || keyframe.scale || 1.0),
41
- y: (keypoint.y + 0.5) / (keyframe.s || keyframe.scale || 1.0),
48
+ x: (keypoint.x + 0.5) / scale,
49
+ y: (keypoint.y + 0.5) / scale,
42
50
  z: 0,
43
51
  });
44
52
  }
45
- return { screenCoords, worldCoords, keyframeIndex: bestResult.keyframeIndex, debugExtra };
53
+ return {
54
+ screenCoords,
55
+ worldCoords,
56
+ targetIndex: -1, // Caller knows the targetIndex
57
+ keyframeIndex: bestResult.keyframeIndex,
58
+ H: bestResult.H,
59
+ debugExtra
60
+ };
46
61
  }
47
62
  }
48
63
  export { Matcher };
@@ -1,4 +1,4 @@
1
- export function match({ keyframe, querypoints, querywidth, queryheight, debugMode }: {
1
+ export function match({ keyframe, querypoints: rawQuerypoints, querywidth, queryheight, debugMode }: {
2
2
  keyframe: any;
3
3
  querypoints: any;
4
4
  querywidth: any;
@@ -6,6 +6,16 @@ export function match({ keyframe, querypoints, querywidth, queryheight, debugMod
6
6
  debugMode: any;
7
7
  }): {
8
8
  debugExtra: {
9
+ constellationMatches: {
10
+ querypoint: any;
11
+ keypoint: {
12
+ x: any;
13
+ y: any;
14
+ angle: any;
15
+ scale: any;
16
+ };
17
+ d: number;
18
+ }[];
9
19
  houghMatches: any[];
10
20
  inlierMatches: any[];
11
21
  matches2: {
@@ -23,9 +33,19 @@ export function match({ keyframe, querypoints, querywidth, queryheight, debugMod
23
33
  H?: undefined;
24
34
  matches?: undefined;
25
35
  } | {
26
- H: number[];
36
+ H: any[];
27
37
  matches: any[];
28
38
  debugExtra: {
39
+ constellationMatches: {
40
+ querypoint: any;
41
+ keypoint: {
42
+ x: any;
43
+ y: any;
44
+ angle: any;
45
+ scale: any;
46
+ };
47
+ d: number;
48
+ }[];
29
49
  houghMatches: any[];
30
50
  inlierMatches: any[];
31
51
  matches2: {