@srsergio/taptapp-ar 1.0.0 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. package/README.md +102 -26
  2. package/dist/compiler/aframe.js +0 -3
  3. package/dist/compiler/compiler-base.d.ts +3 -7
  4. package/dist/compiler/compiler-base.js +28 -14
  5. package/dist/compiler/compiler.js +1 -1
  6. package/dist/compiler/compiler.worker.js +1 -1
  7. package/dist/compiler/controller.js +4 -5
  8. package/dist/compiler/controller.worker.js +0 -2
  9. package/dist/compiler/detector/crop-detector.js +0 -2
  10. package/dist/compiler/detector/detector-lite.d.ts +73 -0
  11. package/dist/compiler/detector/detector-lite.js +430 -0
  12. package/dist/compiler/detector/detector.js +236 -243
  13. package/dist/compiler/detector/kernels/cpu/binomialFilter.js +0 -1
  14. package/dist/compiler/detector/kernels/cpu/computeLocalization.js +0 -4
  15. package/dist/compiler/detector/kernels/cpu/computeOrientationHistograms.js +0 -18
  16. package/dist/compiler/detector/kernels/cpu/fakeShader.js +1 -1
  17. package/dist/compiler/detector/kernels/cpu/prune.d.ts +7 -1
  18. package/dist/compiler/detector/kernels/cpu/prune.js +1 -42
  19. package/dist/compiler/detector/kernels/webgl/upsampleBilinear.js +2 -2
  20. package/dist/compiler/estimation/refine-estimate.js +0 -1
  21. package/dist/compiler/estimation/utils.d.ts +1 -1
  22. package/dist/compiler/estimation/utils.js +1 -14
  23. package/dist/compiler/image-list.js +4 -4
  24. package/dist/compiler/input-loader.js +2 -2
  25. package/dist/compiler/matching/hamming-distance.js +13 -13
  26. package/dist/compiler/matching/hierarchical-clustering.js +1 -1
  27. package/dist/compiler/matching/matching.d.ts +20 -4
  28. package/dist/compiler/matching/matching.js +67 -41
  29. package/dist/compiler/matching/ransacHomography.js +1 -2
  30. package/dist/compiler/node-worker.d.ts +1 -0
  31. package/dist/compiler/node-worker.js +84 -0
  32. package/dist/compiler/offline-compiler.d.ts +171 -6
  33. package/dist/compiler/offline-compiler.js +303 -421
  34. package/dist/compiler/tensorflow-setup.js +27 -1
  35. package/dist/compiler/three.js +3 -5
  36. package/dist/compiler/tracker/extract.d.ts +1 -0
  37. package/dist/compiler/tracker/extract.js +200 -244
  38. package/dist/compiler/tracker/tracker.d.ts +1 -1
  39. package/dist/compiler/tracker/tracker.js +13 -18
  40. package/dist/compiler/utils/cumsum.d.ts +4 -2
  41. package/dist/compiler/utils/cumsum.js +17 -19
  42. package/dist/compiler/utils/gpu-compute.d.ts +57 -0
  43. package/dist/compiler/utils/gpu-compute.js +262 -0
  44. package/dist/compiler/utils/images.d.ts +4 -4
  45. package/dist/compiler/utils/images.js +67 -53
  46. package/dist/compiler/utils/worker-pool.d.ts +14 -0
  47. package/dist/compiler/utils/worker-pool.js +84 -0
  48. package/dist/index.d.ts +0 -2
  49. package/dist/index.js +0 -2
  50. package/package.json +19 -13
  51. package/src/compiler/aframe.js +2 -4
  52. package/src/compiler/compiler-base.js +29 -14
  53. package/src/compiler/compiler.js +1 -1
  54. package/src/compiler/compiler.worker.js +1 -1
  55. package/src/compiler/controller.js +4 -5
  56. package/src/compiler/controller.worker.js +0 -2
  57. package/src/compiler/detector/crop-detector.js +0 -2
  58. package/src/compiler/detector/detector-lite.js +494 -0
  59. package/src/compiler/detector/detector.js +1052 -1063
  60. package/src/compiler/detector/kernels/cpu/binomialFilter.js +0 -1
  61. package/src/compiler/detector/kernels/cpu/computeLocalization.js +0 -4
  62. package/src/compiler/detector/kernels/cpu/computeOrientationHistograms.js +0 -17
  63. package/src/compiler/detector/kernels/cpu/fakeShader.js +1 -1
  64. package/src/compiler/detector/kernels/cpu/prune.js +1 -37
  65. package/src/compiler/detector/kernels/webgl/upsampleBilinear.js +2 -2
  66. package/src/compiler/estimation/refine-estimate.js +0 -1
  67. package/src/compiler/estimation/utils.js +9 -24
  68. package/src/compiler/image-list.js +4 -4
  69. package/src/compiler/input-loader.js +2 -2
  70. package/src/compiler/matching/hamming-distance.js +11 -15
  71. package/src/compiler/matching/hierarchical-clustering.js +1 -1
  72. package/src/compiler/matching/matching.js +72 -42
  73. package/src/compiler/matching/ransacHomography.js +0 -2
  74. package/src/compiler/node-worker.js +93 -0
  75. package/src/compiler/offline-compiler.js +339 -504
  76. package/src/compiler/tensorflow-setup.js +29 -1
  77. package/src/compiler/three.js +3 -5
  78. package/src/compiler/tracker/extract.js +211 -267
  79. package/src/compiler/tracker/tracker.js +13 -22
  80. package/src/compiler/utils/cumsum.js +17 -19
  81. package/src/compiler/utils/gpu-compute.js +303 -0
  82. package/src/compiler/utils/images.js +84 -53
  83. package/src/compiler/utils/worker-pool.js +89 -0
  84. package/src/index.ts +0 -2
  85. package/src/compiler/estimation/esimate-experiment.js +0 -316
  86. package/src/compiler/estimation/refine-estimate-experiment.js +0 -512
  87. package/src/react/AREditor.tsx +0 -394
  88. package/src/react/ProgressDialog.tsx +0 -185
@@ -2,7 +2,6 @@ import * as FakeShader from "./fakeShader.js";
2
2
 
3
3
  function GetKernels(image) {
4
4
  const imageWidth = image.shape[1];
5
- const key = "w" + imageWidth;
6
5
 
7
6
  const imageHeight = image.shape[0];
8
7
  const kernel1 = {
@@ -38,10 +38,6 @@ function GetProgram(numDogPyramidImages, extremasListLength) {
38
38
  return program;
39
39
  }
40
40
 
41
- const int = Math.trunc;
42
- function clamp(n, min, max) {
43
- return Math.min(Math.max(min, n), max - 1);
44
- }
45
41
 
46
42
  export const computeLocalization = (args) => {
47
43
  /** @type {import('@tensorflow/tfjs').TensorInfo} */
@@ -2,24 +2,12 @@ import * as FakeShader from "./fakeShader.js";
2
2
  const oneOver2PI = 0.159154943091895;
3
3
  const ORIENTATION_NUM_BINS = 36;
4
4
 
5
- const cache = {};
6
5
  function GetPrograms(prunedExtremasT, radialPropertiesT, pyramidImagesLength) {
7
- const key = `${pyramidImagesLength}|${prunedExtremasT.shape[0]}|${radialPropertiesT.shape[0]}`;
8
- //if (!cache.hasOwnProperty(key)) {
9
6
  const imageVariableNames = [];
10
7
  for (let i = 1; i < pyramidImagesLength; i++) {
11
8
  imageVariableNames.push("image" + i);
12
9
  }
13
10
 
14
- /* let kernel1SubCodes = `float getPixel(int octave, int y, int x) {`;
15
- for (let i = 1; i < pyramidImagesLength; i++) {
16
- kernel1SubCodes += `
17
- if (octave == ${i}) {
18
- return getImage${i}(y, x);
19
- }
20
- `;
21
- }
22
- kernel1SubCodes += `}`; */
23
11
 
24
12
  const kernel1 = {
25
13
  variableNames: [...imageVariableNames, "extrema", "radial"],
@@ -34,11 +22,6 @@ function GetPrograms(prunedExtremasT, radialPropertiesT, pyramidImagesLength) {
34
22
  }
35
23
  return this[k](y, x);
36
24
  };
37
- /** replicated undefined behavior like you have on OpenGL */
38
- function atan(x, y) {
39
- if (x == 0 && y == 0) return 1.57;
40
- return Math.atan2(x, y);
41
- }
42
25
  //void main() {
43
26
  const coords = this.getOutputCoords();
44
27
  const featureIndex = coords[0];
@@ -38,7 +38,7 @@ function runCode(backend, kernel, inputs, dtype) {
38
38
  //const temp = new Matrix();
39
39
  //console.log("Creating output shape:",kernel.outputShape);
40
40
  const temp = zeros(kernel.outputShape); //reshape([0,0,0],kernel.outputShape);
41
- const output = map(temp, (value, index, matrix) => {
41
+ const output = map(temp, (_value, index) => {
42
42
  tempData.getOutputCoords = () => {
43
43
  return index;
44
44
  };
@@ -1,40 +1,4 @@
1
- import * as FakeShader from "./fakeShader.js";
2
- /*
3
- const kernel = {
4
- variableNames: ['extrema'],
5
- outputShape: [Math.floor(extremaHeight/2), Math.floor(extremaWidth/2)],
6
- userCode: `
7
- void main() {
8
- ivec2 coords = getOutputCoords();
9
- int y = coords[0] * 2;
10
- int x = coords[1] * 2;
11
1
 
12
- float location = 0.0;
13
- float values = getExtrema(y, x);
14
-
15
- if (getExtrema(y+1, x) != 0.0) {
16
- location = 1.0;
17
- values = getExtrema(y+1, x);
18
- }
19
- else if (getExtrema(y, x+1) != 0.0) {
20
- location = 2.0;
21
- values = getExtrema(y, x+1);
22
- }
23
- else if (getExtrema(y+1, x+1) != 0.0) {
24
- location = 3.0;
25
- values = getExtrema(y+1, x+1);
26
- }
27
-
28
- if (values < 0.0) {
29
- setOutput(location * -1000.0 + values);
30
- } else {
31
- setOutput(location * 1000.0 + values);
32
- }
33
- }
34
- `
35
- }
36
-
37
- */
38
2
  function clamp(n, min, max) {
39
3
  return Math.min(Math.max(min, n), max - 1);
40
4
  }
@@ -107,7 +71,7 @@ const pruneConfig = {
107
71
  kernelFunc: prune, // as {} as KernelFunc,
108
72
  };
109
73
 
110
- module.exports = {
74
+ export {
111
75
  pruneConfig,
112
76
  prune,
113
77
  pruneImpl,
@@ -1,7 +1,7 @@
1
1
  import { MathBackendWebGL } from "@tensorflow/tfjs-backend-webgl";
2
2
 
3
3
  const cache = {};
4
- function GetProgram(image, targetImage) {
4
+ function GetProgram(targetImage) {
5
5
  const targetImageWidth = targetImage.shape[1];
6
6
  const targetImageHeight = targetImage.shape[0];
7
7
  const kernelKey = "w" + targetImageWidth + "h" + targetImageHeight;
@@ -50,7 +50,7 @@ export const upsampleBilinear = (args) => {
50
50
  /** @type {MathBackendWebGL} */
51
51
  const backend = args.backend;
52
52
 
53
- const program = GetProgram(image, targetImage);
53
+ const program = GetProgram(targetImage);
54
54
  return backend.runWebGLProgram(program, [image], image.dtype);
55
55
  };
56
56
 
@@ -10,7 +10,6 @@ const K2_FACTOR = 4.0; // Question: should it be relative to the size of the scr
10
10
  const ICP_MAX_LOOP = 10;
11
11
  const ICP_BREAK_LOOP_ERROR_THRESH = 0.1;
12
12
  const ICP_BREAK_LOOP_ERROR_RATIO_THRESH = 0.99;
13
- const ICP_BREAK_LOOP_ERROR_THRESH2 = 4.0;
14
13
 
15
14
  // some temporary/intermediate variables used later. Declare them beforehand to reduce new object allocations
16
15
  let mat = [[], [], []];
@@ -6,23 +6,23 @@ const buildModelViewProjectionTransform = (projectionTransform, modelViewTransfo
6
6
  const modelViewProjectionTransform = [
7
7
  [
8
8
  projectionTransform[0][0] * modelViewTransform[0][0] +
9
- projectionTransform[0][2] * modelViewTransform[2][0],
9
+ projectionTransform[0][2] * modelViewTransform[2][0],
10
10
  projectionTransform[0][0] * modelViewTransform[0][1] +
11
- projectionTransform[0][2] * modelViewTransform[2][1],
11
+ projectionTransform[0][2] * modelViewTransform[2][1],
12
12
  projectionTransform[0][0] * modelViewTransform[0][2] +
13
- projectionTransform[0][2] * modelViewTransform[2][2],
13
+ projectionTransform[0][2] * modelViewTransform[2][2],
14
14
  projectionTransform[0][0] * modelViewTransform[0][3] +
15
- projectionTransform[0][2] * modelViewTransform[2][3],
15
+ projectionTransform[0][2] * modelViewTransform[2][3],
16
16
  ],
17
17
  [
18
18
  projectionTransform[1][1] * modelViewTransform[1][0] +
19
- projectionTransform[1][2] * modelViewTransform[2][0],
19
+ projectionTransform[1][2] * modelViewTransform[2][0],
20
20
  projectionTransform[1][1] * modelViewTransform[1][1] +
21
- projectionTransform[1][2] * modelViewTransform[2][1],
21
+ projectionTransform[1][2] * modelViewTransform[2][1],
22
22
  projectionTransform[1][1] * modelViewTransform[1][2] +
23
- projectionTransform[1][2] * modelViewTransform[2][2],
23
+ projectionTransform[1][2] * modelViewTransform[2][2],
24
24
  projectionTransform[1][1] * modelViewTransform[1][3] +
25
- projectionTransform[1][2] * modelViewTransform[2][3],
25
+ projectionTransform[1][2] * modelViewTransform[2][3],
26
26
  ],
27
27
  [
28
28
  modelViewTransform[2][0],
@@ -48,7 +48,7 @@ const buildModelViewProjectionTransform = (projectionTransform, modelViewTransfo
48
48
  */
49
49
  };
50
50
 
51
- const applyModelViewProjectionTransform = (modelViewProjectionTransform, x, y, z) => {
51
+ const applyModelViewProjectionTransform = (modelViewProjectionTransform, x, y, _z) => {
52
52
  // assume z is zero
53
53
  const ux =
54
54
  modelViewProjectionTransform[0][0] * x +
@@ -75,21 +75,6 @@ const computeScreenCoordiate = (modelViewProjectionTransform, x, y, z) => {
75
75
  return { x: ux / uz, y: uy / uz };
76
76
  };
77
77
 
78
- const screenToMarkerCoordinate = (modelViewProjectionTransform, sx, sy) => {
79
- const c11 = modelViewProjectionTransform[2][0] * sx - modelViewProjectionTransform[0][0];
80
- const c12 = modelViewProjectionTransform[2][1] * sx - modelViewProjectionTransform[0][1];
81
- const c21 = modelViewProjectionTransform[2][0] * sy - modelViewProjectionTransform[1][0];
82
- const c22 = modelViewProjectionTransform[2][1] * sy - modelViewProjectionTransform[1][1];
83
- const b1 = modelViewProjectionTransform[0][3] - modelViewProjectionTransform[2][3] * sx;
84
- const b2 = modelViewProjectionTransform[1][3] - modelViewProjectionTransform[2][3] * sy;
85
-
86
- const m = c11 * c22 - c12 * c21;
87
- return {
88
- x: (c22 * b1 - c12 * b2) / m,
89
- y: (c11 * b2 - c21 * b1) / m,
90
- };
91
- };
92
-
93
78
  export {
94
79
  buildModelViewProjectionTransform,
95
80
  applyModelViewProjectionTransform,
@@ -19,7 +19,9 @@ const buildImageList = (inputImage) => {
19
19
  let c = minScale;
20
20
  while (true) {
21
21
  scaleList.push(c);
22
- c *= Math.pow(2.0, 1.0 / 3.0);
22
+ // Optimization: Paso balanceado (aprox 1.5)
23
+ // Mejor cobertura que 2.0, pero mucho más ligero que 1.41 o 1.26
24
+ c *= Math.pow(2.0, 0.6);
23
25
  if (c >= 0.95) {
24
26
  c = 1;
25
27
  break;
@@ -30,8 +32,6 @@ const buildImageList = (inputImage) => {
30
32
 
31
33
  const imageList = [];
32
34
  for (let i = 0; i < scaleList.length; i++) {
33
- const w = inputImage.width * scaleList[i];
34
- const h = inputImage.height * scaleList[i];
35
35
  imageList.push(
36
36
  Object.assign(resize({ image: inputImage, ratio: scaleList[i] }), { scale: scaleList[i] }),
37
37
  );
@@ -49,7 +49,7 @@ const buildTrackingImageList = (inputImage) => {
49
49
  const minDimension = Math.min(inputImage.width, inputImage.height);
50
50
  const scaleList = [];
51
51
  const imageList = [];
52
- scaleList.push(256.0 / minDimension);
52
+ // Solo generamos la versión de 128px para ahorrar espacio (antes generaba 256px y 128px)
53
53
  scaleList.push(128.0 / minDimension);
54
54
  for (let i = 0; i < scaleList.length; i++) {
55
55
  imageList.push(
@@ -95,12 +95,12 @@ class InputLoader {
95
95
 
96
96
  _compileAndRun(program, inputs) {
97
97
  const outInfo = tf.backend().compileAndRun(program, inputs);
98
- return tf.engine().makeTensorFromDataId(outInfo.dataId, outInfo.shape, outInfo.dtype);
98
+ return tf.engine().makeTensor(outInfo.dataId, outInfo.shape, outInfo.dtype);
99
99
  }
100
100
 
101
101
  _runWebGLProgram(program, inputs, outputType) {
102
102
  const outInfo = tf.backend().runWebGLProgram(program, inputs, outputType);
103
- return tf.engine().makeTensorFromDataId(outInfo.dataId, outInfo.shape, outInfo.dtype);
103
+ return tf.engine().makeTensor(outInfo.dataId, outInfo.shape, outInfo.dtype);
104
104
  }
105
105
  }
106
106
 
@@ -1,23 +1,19 @@
1
- // Fast computation on number of bit sets
2
- // Ref: https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel
1
+ // Precomputed bit count lookup table for Uint8Array (Much faster than bit manipulation)
2
+ const BIT_COUNT_8 = new Uint8Array(256);
3
+ for (let i = 0; i < 256; i++) {
4
+ let c = 0, n = i;
5
+ while (n > 0) { n &= (n - 1); c++; }
6
+ BIT_COUNT_8[i] = c;
7
+ }
8
+
3
9
  const compute = (options) => {
4
10
  const { v1, v2 } = options;
5
11
  let d = 0;
6
-
7
- for (let i = 0; i < v1.length; i++) {
8
- let x = (v1[i] ^ v2[i]) >>> 0;
9
- d += bitCount(x);
12
+ const len = v1.length;
13
+ for (let i = 0; i < len; i++) {
14
+ d += BIT_COUNT_8[v1[i] ^ v2[i]];
10
15
  }
11
16
  return d;
12
17
  };
13
18
 
14
- const bitCount = (v) => {
15
- var c = v - ((v >> 1) & 0x55555555);
16
- c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
17
- c = ((c >> 4) + c) & 0x0f0f0f0f;
18
- c = ((c >> 8) + c) & 0x00ff00ff;
19
- c = ((c >> 16) + c) & 0x0000ffff;
20
- return c;
21
- };
22
-
23
19
  export { compute };
@@ -2,7 +2,7 @@ import { compute as hammingCompute } from "./hamming-distance.js";
2
2
  import { createRandomizer } from "../utils/randomizer.js";
3
3
 
4
4
  const MIN_FEATURE_PER_NODE = 16;
5
- const NUM_ASSIGNMENT_HYPOTHESES = 128;
5
+ const NUM_ASSIGNMENT_HYPOTHESES = 64;
6
6
  const NUM_CENTERS = 8;
7
7
 
8
8
  const _computeKMedoids = (options) => {
@@ -17,42 +17,58 @@ const match = ({ keyframe, querypoints, querywidth, queryheight, debugMode }) =>
17
17
  const matches = [];
18
18
  for (let j = 0; j < querypoints.length; j++) {
19
19
  const querypoint = querypoints[j];
20
- const keypoints = querypoint.maxima ? keyframe.maximaPoints : keyframe.minimaPoints;
21
- if (keypoints.length === 0) continue;
20
+ const col = querypoint.maxima ? keyframe.max : keyframe.min;
21
+ if (!col || col.x.length === 0) continue;
22
22
 
23
- const rootNode = querypoint.maxima
24
- ? keyframe.maximaPointsCluster.rootNode
25
- : keyframe.minimaPointsCluster.rootNode;
23
+ const rootNode = col.t;
26
24
 
27
25
  const keypointIndexes = [];
28
26
  const queue = new TinyQueue([], (a1, a2) => {
29
27
  return a1.d - a2.d;
30
28
  });
31
29
 
32
- // query all potential keypoints
33
- _query({ node: rootNode, keypoints, querypoint, queue, keypointIndexes, numPop: 0 });
30
+ // query potential candidates from the columnar tree
31
+ _query({
32
+ node: rootNode,
33
+ descriptors: col.d,
34
+ querypoint,
35
+ queue,
36
+ keypointIndexes,
37
+ numPop: 0
38
+ });
34
39
 
35
40
  let bestIndex = -1;
36
41
  let bestD1 = Number.MAX_SAFE_INTEGER;
37
42
  let bestD2 = Number.MAX_SAFE_INTEGER;
38
43
 
39
44
  for (let k = 0; k < keypointIndexes.length; k++) {
40
- const keypoint = keypoints[keypointIndexes[k]];
45
+ const idx = keypointIndexes[k];
46
+
47
+ // Access descriptor directly from binary buffer (Zero-copy)
48
+ const keypointDescriptor = col.d.subarray(idx * 84, (idx + 1) * 84);
41
49
 
42
- const d = hammingCompute({ v1: keypoint.descriptors, v2: querypoint.descriptors });
50
+ const d = hammingCompute({ v1: keypointDescriptor, v2: querypoint.descriptors });
43
51
  if (d < bestD1) {
44
52
  bestD2 = bestD1;
45
53
  bestD1 = d;
46
- bestIndex = keypointIndexes[k];
54
+ bestIndex = idx;
47
55
  } else if (d < bestD2) {
48
56
  bestD2 = d;
49
57
  }
50
58
  }
59
+
51
60
  if (
52
61
  bestIndex !== -1 &&
53
62
  (bestD2 === Number.MAX_SAFE_INTEGER || (1.0 * bestD1) / bestD2 < HAMMING_THRESHOLD)
54
63
  ) {
55
- matches.push({ querypoint, keypoint: keypoints[bestIndex] });
64
+ matches.push({
65
+ querypoint,
66
+ keypoint: {
67
+ x: col.x[bestIndex],
68
+ y: col.y[bestIndex],
69
+ angle: col.a[bestIndex]
70
+ }
71
+ });
56
72
  }
57
73
  }
58
74
 
@@ -63,8 +79,8 @@ const match = ({ keyframe, querypoints, querywidth, queryheight, debugMode }) =>
63
79
  if (matches.length < MIN_NUM_INLIERS) return { debugExtra };
64
80
 
65
81
  const houghMatches = computeHoughMatches({
66
- keywidth: keyframe.width,
67
- keyheight: keyframe.height,
82
+ keywidth: keyframe.w, // Protocol V3 uses .w, .h
83
+ keyheight: keyframe.h,
68
84
  querywidth,
69
85
  queryheight,
70
86
  matches,
@@ -77,7 +93,7 @@ const match = ({ keyframe, querypoints, querywidth, queryheight, debugMode }) =>
77
93
  const H = computeHomography({
78
94
  srcPoints: houghMatches.map((m) => [m.keypoint.x, m.keypoint.y]),
79
95
  dstPoints: houghMatches.map((m) => [m.querypoint.x, m.querypoint.y]),
80
- keyframe,
96
+ keyframe: { width: keyframe.w, height: keyframe.h },
81
97
  });
82
98
 
83
99
  if (H === null) return { debugExtra };
@@ -94,10 +110,11 @@ const match = ({ keyframe, querypoints, querywidth, queryheight, debugMode }) =>
94
110
 
95
111
  if (inlierMatches.length < MIN_NUM_INLIERS) return { debugExtra };
96
112
 
97
- // do another loop of match using the homography
113
+ // Second pass with homography guided matching
98
114
  const HInv = matrixInverse33(H, 0.00001);
99
115
  const dThreshold2 = 10 * 10;
100
116
  const matches2 = [];
117
+
101
118
  for (let j = 0; j < querypoints.length; j++) {
102
119
  const querypoint = querypoints[j];
103
120
  const mapquerypoint = multiplyPointHomographyInhomogenous([querypoint.x, querypoint.y], HInv);
@@ -106,18 +123,19 @@ const match = ({ keyframe, querypoints, querywidth, queryheight, debugMode }) =>
106
123
  let bestD1 = Number.MAX_SAFE_INTEGER;
107
124
  let bestD2 = Number.MAX_SAFE_INTEGER;
108
125
 
109
- const keypoints = querypoint.maxima ? keyframe.maximaPoints : keyframe.minimaPoints;
126
+ const col = querypoint.maxima ? keyframe.max : keyframe.min;
127
+ if (!col) continue;
110
128
 
111
- for (let k = 0; k < keypoints.length; k++) {
112
- const keypoint = keypoints[k];
129
+ for (let k = 0; k < col.x.length; k++) {
130
+ const dx = col.x[k] - mapquerypoint[0];
131
+ const dy = col.y[k] - mapquerypoint[1];
132
+ const d2 = dx * dx + dy * dy;
113
133
 
114
- // check distance threshold
115
- const d2 =
116
- (keypoint.x - mapquerypoint[0]) * (keypoint.x - mapquerypoint[0]) +
117
- (keypoint.y - mapquerypoint[1]) * (keypoint.y - mapquerypoint[1]);
118
134
  if (d2 > dThreshold2) continue;
119
135
 
120
- const d = hammingCompute({ v1: keypoint.descriptors, v2: querypoint.descriptors });
136
+ const keypointDescriptor = col.d.subarray(k * 84, (k + 1) * 84);
137
+ const d = hammingCompute({ v1: keypointDescriptor, v2: querypoint.descriptors });
138
+
121
139
  if (d < bestD1) {
122
140
  bestD2 = bestD1;
123
141
  bestD1 = d;
@@ -131,7 +149,14 @@ const match = ({ keyframe, querypoints, querywidth, queryheight, debugMode }) =>
131
149
  bestIndex !== -1 &&
132
150
  (bestD2 === Number.MAX_SAFE_INTEGER || (1.0 * bestD1) / bestD2 < HAMMING_THRESHOLD)
133
151
  ) {
134
- matches2.push({ querypoint, keypoint: keypoints[bestIndex] });
152
+ matches2.push({
153
+ querypoint,
154
+ keypoint: {
155
+ x: col.x[bestIndex],
156
+ y: col.y[bestIndex],
157
+ angle: col.a[bestIndex]
158
+ }
159
+ });
135
160
  }
136
161
  }
137
162
 
@@ -140,8 +165,8 @@ const match = ({ keyframe, querypoints, querywidth, queryheight, debugMode }) =>
140
165
  }
141
166
 
142
167
  const houghMatches2 = computeHoughMatches({
143
- keywidth: keyframe.width,
144
- keyheight: keyframe.height,
168
+ keywidth: keyframe.w,
169
+ keyheight: keyframe.h,
145
170
  querywidth,
146
171
  queryheight,
147
172
  matches: matches2,
@@ -154,7 +179,7 @@ const match = ({ keyframe, querypoints, querywidth, queryheight, debugMode }) =>
154
179
  const H2 = computeHomography({
155
180
  srcPoints: houghMatches2.map((m) => [m.keypoint.x, m.keypoint.y]),
156
181
  dstPoints: houghMatches2.map((m) => [m.querypoint.x, m.querypoint.y]),
157
- keyframe,
182
+ keyframe: { width: keyframe.w, height: keyframe.h },
158
183
  });
159
184
 
160
185
  if (H2 === null) return { debugExtra };
@@ -172,45 +197,50 @@ const match = ({ keyframe, querypoints, querywidth, queryheight, debugMode }) =>
172
197
  return { H: H2, matches: inlierMatches2, debugExtra };
173
198
  };
174
199
 
175
- const _query = ({ node, keypoints, querypoint, queue, keypointIndexes, numPop }) => {
176
- if (node.leaf) {
177
- for (let i = 0; i < node.pointIndexes.length; i++) {
178
- keypointIndexes.push(node.pointIndexes[i]);
200
+ const _query = ({ node, descriptors, querypoint, queue, keypointIndexes, numPop }) => {
201
+ const isLeaf = node[0] === 1;
202
+ const centerIdx = node[1];
203
+ const childrenOrIndices = node[2];
204
+
205
+ if (isLeaf) {
206
+ for (let i = 0; i < childrenOrIndices.length; i++) {
207
+ keypointIndexes.push(childrenOrIndices[i]);
179
208
  }
180
209
  return;
181
210
  }
182
211
 
183
212
  const distances = [];
184
- for (let i = 0; i < node.children.length; i++) {
185
- const childNode = node.children[i];
186
- const centerPointIndex = childNode.centerPointIndex;
213
+ for (let i = 0; i < childrenOrIndices.length; i++) {
214
+ const childNode = childrenOrIndices[i];
215
+ const cIdx = childNode[1];
216
+
187
217
  const d = hammingCompute({
188
- v1: keypoints[centerPointIndex].descriptors,
218
+ v1: descriptors.subarray(cIdx * 84, (cIdx + 1) * 84),
189
219
  v2: querypoint.descriptors,
190
220
  });
191
221
  distances.push(d);
192
222
  }
193
223
 
194
224
  let minD = Number.MAX_SAFE_INTEGER;
195
- for (let i = 0; i < node.children.length; i++) {
225
+ for (let i = 0; i < childrenOrIndices.length; i++) {
196
226
  minD = Math.min(minD, distances[i]);
197
227
  }
198
228
 
199
- for (let i = 0; i < node.children.length; i++) {
229
+ for (let i = 0; i < childrenOrIndices.length; i++) {
200
230
  if (distances[i] !== minD) {
201
- queue.push({ node: node.children[i], d: distances[i] });
231
+ queue.push({ node: childrenOrIndices[i], d: distances[i] });
202
232
  }
203
233
  }
204
- for (let i = 0; i < node.children.length; i++) {
234
+ for (let i = 0; i < childrenOrIndices.length; i++) {
205
235
  if (distances[i] === minD) {
206
- _query({ node: node.children[i], keypoints, querypoint, queue, keypointIndexes, numPop });
236
+ _query({ node: childrenOrIndices[i], descriptors, querypoint, queue, keypointIndexes, numPop });
207
237
  }
208
238
  }
209
239
 
210
240
  if (numPop < CLUSTER_MAX_POP && queue.length > 0) {
211
- const { node, d } = queue.pop();
241
+ const { node } = queue.pop();
212
242
  numPop += 1;
213
- _query({ node, keypoints, querypoint, queue, keypointIndexes, numPop });
243
+ _query({ node, descriptors, querypoint, queue, keypointIndexes, numPop });
214
244
  }
215
245
  };
216
246
 
@@ -1,4 +1,3 @@
1
- import { Matrix, inverse } from "ml-matrix";
2
1
  import { createRandomizer } from "../utils/randomizer.js";
3
2
  import {
4
3
  quadrilateralConvex,
@@ -7,7 +6,6 @@ import {
7
6
  multiplyPointHomographyInhomogenous,
8
7
  checkThreePointsConsistent,
9
8
  checkFourPointsConsistent,
10
- determinant,
11
9
  } from "../utils/geometry.js";
12
10
  import { solveHomography } from "../utils/homography.js";
13
11
 
@@ -0,0 +1,93 @@
1
+ /**
2
+ * Worker Node.js para compilación de imágenes AR
3
+ *
4
+ * OPTIMIZADO: Sin TensorFlow para evitar bloqueos de inicialización.
5
+ * Usa JavaScript puro para máxima velocidad.
6
+ */
7
+ import { parentPort } from 'node:worker_threads';
8
+ import { extractTrackingFeatures } from './tracker/extract-utils.js';
9
+ import { buildTrackingImageList, buildImageList } from './image-list.js';
10
+ import { DetectorLite } from './detector/detector-lite.js';
11
+ import { build as hierarchicalClusteringBuild } from './matching/hierarchical-clustering.js';
12
+
13
+ if (!parentPort) {
14
+ throw new Error('This file must be run as a worker thread.');
15
+ }
16
+
17
+ parentPort.on('message', async (msg) => {
18
+ if (msg.type === 'compile') {
19
+ const { targetImage, percentPerImage, basePercent } = msg;
20
+
21
+ try {
22
+ const imageList = buildTrackingImageList(targetImage);
23
+ const percentPerAction = percentPerImage / imageList.length;
24
+ let localPercent = 0;
25
+
26
+ const trackingData = extractTrackingFeatures(imageList, (index) => {
27
+ localPercent += percentPerAction;
28
+ parentPort.postMessage({
29
+ type: 'progress',
30
+ percent: basePercent + localPercent
31
+ });
32
+ });
33
+
34
+ parentPort.postMessage({
35
+ type: 'compileDone',
36
+ trackingData
37
+ });
38
+ } catch (error) {
39
+ parentPort.postMessage({
40
+ type: 'error',
41
+ error: error.message
42
+ });
43
+ }
44
+ } else if (msg.type === 'match') {
45
+ const { targetImage, percentPerImage, basePercent } = msg;
46
+
47
+ try {
48
+ const imageList = buildImageList(targetImage);
49
+ const percentPerAction = percentPerImage / imageList.length;
50
+ let localPercent = 0;
51
+
52
+ const keyframes = [];
53
+ for (let i = 0; i < imageList.length; i++) {
54
+ const image = imageList[i];
55
+ const detector = new DetectorLite(image.width, image.height);
56
+
57
+ // Detectar features usando JS puro (sin TensorFlow)
58
+ const { featurePoints: ps } = detector.detect(image.data);
59
+
60
+ const maximaPoints = ps.filter((p) => p.maxima);
61
+ const minimaPoints = ps.filter((p) => !p.maxima);
62
+ const maximaPointsCluster = hierarchicalClusteringBuild({ points: maximaPoints });
63
+ const minimaPointsCluster = hierarchicalClusteringBuild({ points: minimaPoints });
64
+
65
+ keyframes.push({
66
+ maximaPoints,
67
+ minimaPoints,
68
+ maximaPointsCluster,
69
+ minimaPointsCluster,
70
+ width: image.width,
71
+ height: image.height,
72
+ scale: image.scale,
73
+ });
74
+
75
+ localPercent += percentPerAction;
76
+ parentPort.postMessage({
77
+ type: 'progress',
78
+ percent: basePercent + localPercent
79
+ });
80
+ }
81
+
82
+ parentPort.postMessage({
83
+ type: 'matchDone',
84
+ matchingData: keyframes
85
+ });
86
+ } catch (error) {
87
+ parentPort.postMessage({
88
+ type: 'error',
89
+ error: error.message
90
+ });
91
+ }
92
+ }
93
+ });