@srsergio/taptapp-ar 1.0.0 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. package/README.md +102 -26
  2. package/dist/compiler/aframe.js +0 -3
  3. package/dist/compiler/compiler-base.d.ts +3 -7
  4. package/dist/compiler/compiler-base.js +28 -14
  5. package/dist/compiler/compiler.js +1 -1
  6. package/dist/compiler/compiler.worker.js +1 -1
  7. package/dist/compiler/controller.js +4 -5
  8. package/dist/compiler/controller.worker.js +0 -2
  9. package/dist/compiler/detector/crop-detector.js +0 -2
  10. package/dist/compiler/detector/detector-lite.d.ts +73 -0
  11. package/dist/compiler/detector/detector-lite.js +430 -0
  12. package/dist/compiler/detector/detector.js +236 -243
  13. package/dist/compiler/detector/kernels/cpu/binomialFilter.js +0 -1
  14. package/dist/compiler/detector/kernels/cpu/computeLocalization.js +0 -4
  15. package/dist/compiler/detector/kernels/cpu/computeOrientationHistograms.js +0 -18
  16. package/dist/compiler/detector/kernels/cpu/fakeShader.js +1 -1
  17. package/dist/compiler/detector/kernels/cpu/prune.d.ts +7 -1
  18. package/dist/compiler/detector/kernels/cpu/prune.js +1 -42
  19. package/dist/compiler/detector/kernels/webgl/upsampleBilinear.js +2 -2
  20. package/dist/compiler/estimation/refine-estimate.js +0 -1
  21. package/dist/compiler/estimation/utils.d.ts +1 -1
  22. package/dist/compiler/estimation/utils.js +1 -14
  23. package/dist/compiler/image-list.js +4 -4
  24. package/dist/compiler/input-loader.js +2 -2
  25. package/dist/compiler/matching/hamming-distance.js +13 -13
  26. package/dist/compiler/matching/hierarchical-clustering.js +1 -1
  27. package/dist/compiler/matching/matching.d.ts +20 -4
  28. package/dist/compiler/matching/matching.js +67 -41
  29. package/dist/compiler/matching/ransacHomography.js +1 -2
  30. package/dist/compiler/node-worker.d.ts +1 -0
  31. package/dist/compiler/node-worker.js +84 -0
  32. package/dist/compiler/offline-compiler.d.ts +171 -6
  33. package/dist/compiler/offline-compiler.js +303 -421
  34. package/dist/compiler/tensorflow-setup.js +27 -1
  35. package/dist/compiler/three.js +3 -5
  36. package/dist/compiler/tracker/extract.d.ts +1 -0
  37. package/dist/compiler/tracker/extract.js +200 -244
  38. package/dist/compiler/tracker/tracker.d.ts +1 -1
  39. package/dist/compiler/tracker/tracker.js +13 -18
  40. package/dist/compiler/utils/cumsum.d.ts +4 -2
  41. package/dist/compiler/utils/cumsum.js +17 -19
  42. package/dist/compiler/utils/gpu-compute.d.ts +57 -0
  43. package/dist/compiler/utils/gpu-compute.js +262 -0
  44. package/dist/compiler/utils/images.d.ts +4 -4
  45. package/dist/compiler/utils/images.js +67 -53
  46. package/dist/compiler/utils/worker-pool.d.ts +14 -0
  47. package/dist/compiler/utils/worker-pool.js +84 -0
  48. package/dist/index.d.ts +0 -2
  49. package/dist/index.js +0 -2
  50. package/package.json +19 -13
  51. package/src/compiler/aframe.js +2 -4
  52. package/src/compiler/compiler-base.js +29 -14
  53. package/src/compiler/compiler.js +1 -1
  54. package/src/compiler/compiler.worker.js +1 -1
  55. package/src/compiler/controller.js +4 -5
  56. package/src/compiler/controller.worker.js +0 -2
  57. package/src/compiler/detector/crop-detector.js +0 -2
  58. package/src/compiler/detector/detector-lite.js +494 -0
  59. package/src/compiler/detector/detector.js +1052 -1063
  60. package/src/compiler/detector/kernels/cpu/binomialFilter.js +0 -1
  61. package/src/compiler/detector/kernels/cpu/computeLocalization.js +0 -4
  62. package/src/compiler/detector/kernels/cpu/computeOrientationHistograms.js +0 -17
  63. package/src/compiler/detector/kernels/cpu/fakeShader.js +1 -1
  64. package/src/compiler/detector/kernels/cpu/prune.js +1 -37
  65. package/src/compiler/detector/kernels/webgl/upsampleBilinear.js +2 -2
  66. package/src/compiler/estimation/refine-estimate.js +0 -1
  67. package/src/compiler/estimation/utils.js +9 -24
  68. package/src/compiler/image-list.js +4 -4
  69. package/src/compiler/input-loader.js +2 -2
  70. package/src/compiler/matching/hamming-distance.js +11 -15
  71. package/src/compiler/matching/hierarchical-clustering.js +1 -1
  72. package/src/compiler/matching/matching.js +72 -42
  73. package/src/compiler/matching/ransacHomography.js +0 -2
  74. package/src/compiler/node-worker.js +93 -0
  75. package/src/compiler/offline-compiler.js +339 -504
  76. package/src/compiler/tensorflow-setup.js +29 -1
  77. package/src/compiler/three.js +3 -5
  78. package/src/compiler/tracker/extract.js +211 -267
  79. package/src/compiler/tracker/tracker.js +13 -22
  80. package/src/compiler/utils/cumsum.js +17 -19
  81. package/src/compiler/utils/gpu-compute.js +303 -0
  82. package/src/compiler/utils/images.js +84 -53
  83. package/src/compiler/utils/worker-pool.js +89 -0
  84. package/src/index.ts +0 -2
  85. package/src/compiler/estimation/esimate-experiment.js +0 -316
  86. package/src/compiler/estimation/refine-estimate-experiment.js +0 -512
  87. package/src/react/AREditor.tsx +0 -394
  88. package/src/react/ProgressDialog.tsx +0 -185
@@ -1,18 +1,29 @@
1
1
  import { Cumsum } from "../utils/cumsum.js";
2
+ import { gpuCompute } from "../utils/gpu-compute.js";
2
3
 
3
4
  const SEARCH_SIZE1 = 10;
4
5
  const SEARCH_SIZE2 = 2;
5
6
 
6
- //const TEMPLATE_SIZE = 22 // DEFAULT
7
+ // Template parameters - ajustados para más puntos
7
8
  const TEMPLATE_SIZE = 6;
8
- const TEMPLATE_SD_THRESH = 5.0;
9
+ const TEMPLATE_SD_THRESH = 4.0; // Reducido de 5.0 para aceptar más candidatos
9
10
  const MAX_SIM_THRESH = 0.95;
10
11
 
11
12
  const MAX_THRESH = 0.9;
12
- //const MIN_THRESH = 0.55;
13
13
  const MIN_THRESH = 0.2;
14
14
  const SD_THRESH = 8.0;
15
- const OCCUPANCY_SIZE = (24 * 2) / 3;
15
+ const OCCUPANCY_SIZE = 10; // Reducido de 16 para permitir puntos más cercanos
16
+
17
+ // GPU mode flag - set to false to use original JS implementation
18
+ let useGPU = true;
19
+
20
+ /**
21
+ * Set GPU mode for extraction
22
+ * @param {boolean} enabled - Whether to use GPU acceleration
23
+ */
24
+ export const setGPUMode = (enabled) => {
25
+ useGPU = enabled;
26
+ };
16
27
 
17
28
  /*
18
29
  * Input image is in grey format. the imageData array size is width * height. value range from 0-255
@@ -23,292 +34,241 @@ const OCCUPANCY_SIZE = (24 * 2) / 3;
23
34
  * @param {int} options.height image height
24
35
  */
25
36
  const extract = (image) => {
26
- const { data: imageData, width, height, scale } = image;
27
-
28
- // Step 1 - filter out interesting points. Interesting points have strong pixel value changed across neighbours
29
- const isPixelSelected = [width * height];
30
- for (let i = 0; i < isPixelSelected.length; i++) isPixelSelected[i] = false;
31
-
32
- // Step 1.1 consider a pixel at position (x, y). compute:
33
- // dx = ((data[x+1, y-1] - data[x-1, y-1]) + (data[x+1, y] - data[x-1, y]) + (data[x+1, y+1] - data[x-1, y-1])) / 256 / 3
34
- // dy = ((data[x+1, y+1] - data[x+1, y-1]) + (data[x, y+1] - data[x, y-1]) + (data[x-1, y+1] - data[x-1, y-1])) / 256 / 3
35
- // dValue = sqrt(dx^2 + dy^2) / 2;
36
- const dValue = new Float32Array(imageData.length);
37
- for (let i = 0; i < width; i++) {
38
- dValue[i] = -1;
39
- dValue[width * (height - 1) + i] = -1;
40
- }
41
- for (let j = 0; j < height; j++) {
42
- dValue[j * width] = -1;
43
- dValue[j * width + width - 1] = -1;
44
- }
37
+ const { data: imageData, width, height } = image;
38
+
39
+ let dValue, isCandidate;
40
+
41
+ if (useGPU) {
42
+ // GPU-accelerated edge detection
43
+ const result = gpuCompute.edgeDetection(imageData, width, height);
44
+ dValue = result.dValue;
45
+ isCandidate = result.isCandidate;
46
+ } else {
47
+ // Original JS implementation
48
+ dValue = new Float32Array(imageData.length);
49
+ isCandidate = new Uint8Array(imageData.length);
45
50
 
46
- for (let i = 1; i < width - 1; i++) {
47
51
  for (let j = 1; j < height - 1; j++) {
48
- let pos = i + width * j;
52
+ const rowOffset = j * width;
53
+ const prevRowOffset = (j - 1) * width;
54
+ const nextRowOffset = (j + 1) * width;
55
+
56
+ for (let i = 1; i < width - 1; i++) {
57
+ const pos = rowOffset + i;
49
58
 
50
- let dx = 0.0;
51
- let dy = 0.0;
52
- for (let k = -1; k <= 1; k++) {
53
- dx += imageData[pos + width * k + 1] - imageData[pos + width * k - 1];
54
- dy += imageData[pos + width + k] - imageData[pos - width + k];
59
+ // dx/dy with tight loops
60
+ let dx = (imageData[prevRowOffset + i + 1] - imageData[prevRowOffset + i - 1] +
61
+ imageData[rowOffset + i + 1] - imageData[rowOffset + i - 1] +
62
+ imageData[nextRowOffset + i + 1] - imageData[nextRowOffset + i - 1]) / 768;
63
+
64
+ let dy = (imageData[nextRowOffset + i - 1] - imageData[prevRowOffset + i - 1] +
65
+ imageData[nextRowOffset + i] - imageData[prevRowOffset + i] +
66
+ imageData[nextRowOffset + i + 1] - imageData[prevRowOffset + i + 1]) / 768;
67
+
68
+ dValue[pos] = Math.sqrt((dx * dx + dy * dy) / 2);
55
69
  }
56
- dx /= 3 * 256;
57
- dy /= 3 * 256;
58
- dValue[pos] = Math.sqrt((dx * dx + dy * dy) / 2);
59
70
  }
60
- }
61
71
 
62
- // Step 1.2 - select all pixel which is dValue largest than all its neighbour as "potential" candidate
63
- // the number of selected points is still too many, so we use the value to further filter (e.g. largest the dValue, the better)
64
- const dValueHist = new Uint32Array(1000); // histogram of dvalue scaled to [0, 1000)
65
- for (let i = 0; i < 1000; i++) dValueHist[i] = 0;
66
- const neighbourOffsets = [-1, 1, -width, width];
67
- let allCount = 0;
68
- for (let i = 1; i < width - 1; i++) {
72
+ // Step 1.2 - Local Maxima (for JS path)
69
73
  for (let j = 1; j < height - 1; j++) {
70
- let pos = i + width * j;
71
- let isMax = true;
72
- for (let d = 0; d < neighbourOffsets.length; d++) {
73
- if (dValue[pos] <= dValue[pos + neighbourOffsets[d]]) {
74
- isMax = false;
75
- break;
74
+ const rowOffset = j * width;
75
+ for (let i = 1; i < width - 1; i++) {
76
+ const pos = rowOffset + i;
77
+ const val = dValue[pos];
78
+ if (val > 0 &&
79
+ val >= dValue[pos - 1] && val >= dValue[pos + 1] &&
80
+ val >= dValue[pos - width] && val >= dValue[pos + width]) {
81
+ isCandidate[pos] = 1;
76
82
  }
77
83
  }
78
- if (isMax) {
79
- let k = Math.floor(dValue[pos] * 1000);
80
- if (k > 999) k = 999; // k>999 should not happen if computaiton is correction
81
- if (k < 0) k = 0; // k<0 should not happen if computaiton is correction
82
- dValueHist[k] += 1;
83
- allCount += 1;
84
- isPixelSelected[pos] = true;
84
+ }
85
+ }
86
+
87
+ // Step 1.2 - Build Histogram from detected candidates
88
+ const dValueHist = new Uint32Array(1000);
89
+ let allCount = 0;
90
+ for (let j = 1; j < height - 1; j++) {
91
+ const rowOffset = j * width;
92
+ for (let i = 1; i < width - 1; i++) {
93
+ const pos = rowOffset + i;
94
+ if (isCandidate[pos]) {
95
+ const val = dValue[pos];
96
+ let k = Math.floor(val * 1000);
97
+ if (k > 999) k = 999;
98
+ dValueHist[k]++;
99
+ allCount++;
85
100
  }
86
101
  }
87
102
  }
88
103
 
89
- // reduce number of points according to dValue.
90
- // actually, the whole Step 1. might be better to just sort the dvalues and pick the top (0.02 * width * height) points
91
- const maxPoints = 0.02 * width * height;
92
- let k = 999;
104
+ // Determine dValue threshold for top 5% (aumentado de 2% para más candidatos)
105
+ const maxPoints = 0.05 * width * height;
106
+ let kThresh = 999;
93
107
  let filteredCount = 0;
94
- while (k >= 0) {
95
- filteredCount += dValueHist[k];
108
+ while (kThresh >= 0) {
109
+ filteredCount += dValueHist[kThresh];
96
110
  if (filteredCount > maxPoints) break;
97
- k--;
98
- }
99
-
100
- //console.log("image size: ", width * height);
101
- //console.log("extracted featues: ", allCount);
102
- //console.log("filtered featues: ", filteredCount);
103
-
104
- for (let i = 0; i < isPixelSelected.length; i++) {
105
- if (isPixelSelected[i]) {
106
- if (dValue[i] * 1000 < k) isPixelSelected[i] = false;
107
- }
111
+ kThresh--;
108
112
  }
109
-
110
- //console.log("selected count: ", isPixelSelected.reduce((a, b) => {return a + (b?1:0);}, 0));
113
+ const minDValue = kThresh / 1000;
111
114
 
112
115
  // Step 2
113
- // prebuild cumulative sum matrix for fast computation
114
- const imageDataSqr = [];
116
+ const imageDataSqr = new Float32Array(imageData.length);
115
117
  for (let i = 0; i < imageData.length; i++) {
116
118
  imageDataSqr[i] = imageData[i] * imageData[i];
117
119
  }
118
120
  const imageDataCumsum = new Cumsum(imageData, width, height);
119
121
  const imageDataSqrCumsum = new Cumsum(imageDataSqr, width, height);
120
122
 
121
- // holds the max similariliy value computed within SEARCH area of each pixel
122
- // idea: if there is high simliarity with another pixel in nearby area, then it's not a good feature point
123
- // next step is to find pixel with low similarity
124
- const featureMap = new Float32Array(imageData.length);
125
-
126
- for (let i = 0; i < width; i++) {
127
- for (let j = 0; j < height; j++) {
128
- const pos = j * width + i;
129
- if (!isPixelSelected[pos]) {
130
- featureMap[pos] = 1.0;
131
- continue;
132
- }
133
-
134
- const vlen = _templateVar({
135
- image,
136
- cx: i,
137
- cy: j,
138
- sdThresh: TEMPLATE_SD_THRESH,
139
- imageDataCumsum,
140
- imageDataSqrCumsum,
123
+ // Collect candidates above threshold
124
+ const candidates = [];
125
+ for (let i = 0; i < imageData.length; i++) {
126
+ if (isCandidate[i] && dValue[i] >= minDValue) {
127
+ candidates.push({
128
+ pos: i,
129
+ dval: dValue[i],
130
+ x: i % width,
131
+ y: Math.floor(i / width)
141
132
  });
142
- if (vlen === null) {
143
- featureMap[pos] = 1.0;
144
- continue;
145
- }
146
-
147
- let max = -1.0;
148
- for (let jj = -SEARCH_SIZE1; jj <= SEARCH_SIZE1; jj++) {
149
- for (let ii = -SEARCH_SIZE1; ii <= SEARCH_SIZE1; ii++) {
150
- if (ii * ii + jj * jj <= SEARCH_SIZE2 * SEARCH_SIZE2) continue;
151
- const sim = _getSimilarity({
152
- image,
153
- cx: i + ii,
154
- cy: j + jj,
155
- vlen: vlen,
156
- tx: i,
157
- ty: j,
158
- imageDataCumsum,
159
- imageDataSqrCumsum,
160
- });
161
-
162
- if (sim === null) continue;
163
-
164
- if (sim > max) {
165
- max = sim;
166
- if (max > MAX_SIM_THRESH) break;
167
- }
168
- }
169
- if (max > MAX_SIM_THRESH) break;
170
- }
171
- featureMap[pos] = max;
172
133
  }
173
134
  }
135
+ // Sort by dValue DESCENDING
136
+ candidates.sort((a, b) => b.dval - a.dval);
174
137
 
175
- // Step 2.2 select feature
176
- const coords = _selectFeature({
177
- image,
178
- featureMap,
179
- templateSize: TEMPLATE_SIZE,
180
- searchSize: SEARCH_SIZE2,
181
- occSize: OCCUPANCY_SIZE,
182
- maxSimThresh: MAX_THRESH,
183
- minSimThresh: MIN_THRESH,
184
- sdThresh: SD_THRESH,
185
- imageDataCumsum,
186
- imageDataSqrCumsum,
187
- });
138
+ // Step 3 - On-Demand Feature Selection (The 10x Win)
139
+ const divSize = (TEMPLATE_SIZE * 2 + 1) * 3;
140
+ const maxFeatureNum = Math.floor(width / OCCUPANCY_SIZE) * Math.floor(height / OCCUPANCY_SIZE) +
141
+ Math.floor(width / divSize) * Math.floor(height / divSize);
188
142
 
189
- return coords;
190
- };
143
+ const coords = [];
144
+ const invalidated = new Uint8Array(width * height);
145
+ const templateWidth = 2 * TEMPLATE_SIZE + 1;
146
+ const nPixels = templateWidth * templateWidth;
191
147
 
192
- const _selectFeature = (options) => {
193
- let {
194
- image,
195
- featureMap,
196
- templateSize,
197
- searchSize,
198
- occSize,
199
- maxSimThresh,
200
- minSimThresh,
201
- sdThresh,
202
- imageDataCumsum,
203
- imageDataSqrCumsum,
204
- } = options;
205
- const { data: imageData, width, height, scale } = image;
206
-
207
- //console.log("params: ", templateSize, templateSize, occSize, maxSimThresh, minSimThresh, sdThresh);
208
-
209
- //occSize *= 2;
210
- occSize = Math.floor(Math.min(image.width, image.height) / 10);
211
-
212
- const divSize = (templateSize * 2 + 1) * 3;
213
- const xDiv = Math.floor(width / divSize);
214
- const yDiv = Math.floor(height / divSize);
215
-
216
- let maxFeatureNum = Math.floor(width / occSize) * Math.floor(height / occSize) + xDiv * yDiv;
217
- //console.log("max feature num: ", maxFeatureNum);
148
+ const actualOccSize = Math.floor(Math.min(width, height) / 12); // Reducido de 10 para más densidad
218
149
 
219
- const coords = [];
220
- const image2 = new Float32Array(imageData.length);
221
- for (let i = 0; i < image2.length; i++) {
222
- image2[i] = featureMap[i];
223
- }
150
+ for (let i = 0; i < candidates.length; i++) {
151
+ const { x, y, pos } = candidates[i];
152
+ if (invalidated[pos]) continue;
224
153
 
225
- let num = 0;
226
- while (num < maxFeatureNum) {
227
- let minSim = maxSimThresh;
228
- let cx = -1;
229
- let cy = -1;
230
- for (let j = 0; j < height; j++) {
231
- for (let i = 0; i < width; i++) {
232
- if (image2[j * width + i] < minSim) {
233
- minSim = image2[j * width + i];
234
- cx = i;
235
- cy = j;
236
- }
237
- }
154
+ // Boundary safety for template
155
+ if (x < TEMPLATE_SIZE + SEARCH_SIZE1 || x >= width - TEMPLATE_SIZE - SEARCH_SIZE1 ||
156
+ y < TEMPLATE_SIZE + SEARCH_SIZE1 || y >= height - TEMPLATE_SIZE - SEARCH_SIZE1) {
157
+ continue;
238
158
  }
239
- if (cx === -1) break;
240
159
 
241
160
  const vlen = _templateVar({
242
161
  image,
243
- cx: cx,
244
- cy: cy,
245
- sdThresh: 0,
162
+ cx: x,
163
+ cy: y,
164
+ sdThresh: TEMPLATE_SD_THRESH,
246
165
  imageDataCumsum,
247
166
  imageDataSqrCumsum,
248
167
  });
249
- if (vlen === null) {
250
- image2[cy * width + cx] = 1.0;
251
- continue;
252
- }
253
- if (vlen / (templateSize * 2 + 1) < sdThresh) {
254
- image2[cy * width + cx] = 1.0;
255
- continue;
168
+ if (vlen === null) continue;
169
+
170
+ const templateAvg = imageDataCumsum.query(
171
+ x - TEMPLATE_SIZE,
172
+ y - TEMPLATE_SIZE,
173
+ x + TEMPLATE_SIZE,
174
+ y + TEMPLATE_SIZE,
175
+ ) / nPixels;
176
+
177
+ // Optimization: Cache template once per candidate
178
+ const templateData = new Uint8Array(templateWidth * templateWidth);
179
+ let tidx = 0;
180
+ const tStart = (y - TEMPLATE_SIZE) * width + (x - TEMPLATE_SIZE);
181
+ for (let tj = 0; tj < templateWidth; tj++) {
182
+ const rowOffset = tStart + tj * width;
183
+ for (let ti = 0; ti < templateWidth; ti++) {
184
+ templateData[tidx++] = imageData[rowOffset + ti];
185
+ }
256
186
  }
257
187
 
258
- let min = 1.0;
188
+ // Step 2.1: Find max similarity in search area (On demand!)
259
189
  let max = -1.0;
190
+ for (let jj = -SEARCH_SIZE1; jj <= SEARCH_SIZE1; jj++) {
191
+ for (let ii = -SEARCH_SIZE1; ii <= SEARCH_SIZE1; ii++) {
192
+ if (ii * ii + jj * jj <= SEARCH_SIZE2 * SEARCH_SIZE2) continue;
260
193
 
261
- for (let j = -searchSize; j <= searchSize; j++) {
262
- for (let i = -searchSize; i <= searchSize; i++) {
263
- if (i * i + j * j > searchSize * searchSize) continue;
264
- if (i === 0 && j === 0) continue;
265
-
266
- const sim = _getSimilarity({
194
+ const sim = _getSimilarityOptimized({
267
195
  image,
268
- vlen,
269
- cx: cx + i,
270
- cy: cy + j,
271
- tx: cx,
272
- ty: cy,
196
+ cx: x + ii,
197
+ cy: y + jj,
198
+ vlen: vlen,
199
+ templateData,
200
+ templateAvg,
201
+ templateWidth,
273
202
  imageDataCumsum,
274
203
  imageDataSqrCumsum,
204
+ width,
205
+ height
275
206
  });
276
- if (sim === null) continue;
277
207
 
278
- if (sim < min) {
279
- min = sim;
280
- if (min < minSimThresh && min < minSim) break;
281
- }
282
- if (sim > max) {
208
+ if (sim !== null && sim > max) {
283
209
  max = sim;
284
- if (max > 0.99) break;
210
+ if (max > MAX_THRESH) break;
285
211
  }
286
212
  }
287
- if ((min < minSimThresh && min < minSim) || max > 0.99) break;
213
+ if (max > MAX_THRESH) break;
288
214
  }
289
215
 
290
- if ((min < minSimThresh && min < minSim) || max > 0.99) {
291
- image2[cy * width + cx] = 1.0;
292
- continue;
293
- }
216
+ // Now decide if we select it
217
+ if (max < MAX_THRESH) {
218
+ // Uniqueness check (Step 2.2 sub-loop)
219
+ let minUnique = 1.0;
220
+ let maxUnique = -1.0;
221
+ let failedUnique = false;
294
222
 
295
- coords.push({ x: cx, y: cy });
296
- //coords.push({
297
- //mx: 1.0 * cx / scale,
298
- //my: 1.0 * (height - cy) / scale,
299
- //})
223
+ for (let jj = -SEARCH_SIZE2; jj <= SEARCH_SIZE2; jj++) {
224
+ for (let ii = -SEARCH_SIZE2; ii <= SEARCH_SIZE2; ii++) {
225
+ if (ii * ii + jj * jj > SEARCH_SIZE2 * SEARCH_SIZE2) continue;
226
+ if (ii === 0 && jj === 0) continue;
300
227
 
301
- num += 1;
302
- //console.log(num, '(', cx, ',', cy, ')', minSim, 'min = ', min, 'max = ', max, 'sd = ', vlen/(templateSize*2+1));
228
+ const sim = _getSimilarityOptimized({
229
+ image,
230
+ vlen,
231
+ cx: x + ii,
232
+ cy: y + jj,
233
+ templateData,
234
+ templateAvg,
235
+ templateWidth,
236
+ imageDataCumsum,
237
+ imageDataSqrCumsum,
238
+ width,
239
+ height
240
+ });
303
241
 
304
- // no other feature points within occSize square
305
- for (let j = -occSize; j <= occSize; j++) {
306
- for (let i = -occSize; i <= occSize; i++) {
307
- if (cy + j < 0 || cy + j >= height || cx + i < 0 || cx + i >= width) continue;
308
- image2[(cy + j) * width + (cx + i)] = 1.0;
242
+ if (sim === null) continue;
243
+ if (sim < minUnique) minUnique = sim;
244
+ if (sim > maxUnique) maxUnique = sim;
245
+ if (minUnique < MIN_THRESH || maxUnique > 0.99) {
246
+ failedUnique = true;
247
+ break;
248
+ }
249
+ }
250
+ if (failedUnique) break;
251
+ }
252
+
253
+ if (!failedUnique) {
254
+ coords.push({ x, y });
255
+ // Invalidate neighbors
256
+ for (let jj = -actualOccSize; jj <= actualOccSize; jj++) {
257
+ const yy = y + jj;
258
+ if (yy < 0 || yy >= height) continue;
259
+ const rowStart = yy * width;
260
+ for (let ii = -actualOccSize; ii <= actualOccSize; ii++) {
261
+ const xx = x + ii;
262
+ if (xx < 0 || xx >= width) continue;
263
+ invalidated[rowStart + xx] = 1;
264
+ }
265
+ }
309
266
  }
310
267
  }
268
+
269
+ if (coords.length >= maxFeatureNum) break;
311
270
  }
271
+
312
272
  return coords;
313
273
  };
314
274
 
@@ -353,67 +313,51 @@ const _templateVar = ({ image, cx, cy, sdThresh, imageDataCumsum, imageDataSqrCu
353
313
  return vlen;
354
314
  };
355
315
 
356
- const _getSimilarity = (options) => {
357
- const { image, cx, cy, vlen, tx, ty, imageDataCumsum, imageDataSqrCumsum } = options;
358
- const { data: imageData, width, height } = image;
359
- const templateSize = TEMPLATE_SIZE;
316
+ const _getSimilarityOptimized = (options) => {
317
+ const { cx, cy, vlen, templateData, templateAvg, templateWidth, imageDataCumsum, imageDataSqrCumsum, width, height } = options;
318
+ const imageData = options.image.data;
319
+ const templateSize = (templateWidth - 1) / 2;
360
320
 
361
321
  if (cx - templateSize < 0 || cx + templateSize >= width) return null;
362
322
  if (cy - templateSize < 0 || cy + templateSize >= height) return null;
363
323
 
364
- const templateWidth = 2 * templateSize + 1;
365
-
366
- let sx = imageDataCumsum.query(
324
+ const nP = templateWidth * templateWidth;
325
+ const sx = imageDataCumsum.query(
367
326
  cx - templateSize,
368
327
  cy - templateSize,
369
328
  cx + templateSize,
370
329
  cy + templateSize,
371
330
  );
372
- let sxx = imageDataSqrCumsum.query(
331
+ const sxx = imageDataSqrCumsum.query(
373
332
  cx - templateSize,
374
333
  cy - templateSize,
375
334
  cx + templateSize,
376
335
  cy + templateSize,
377
336
  );
378
- let sxy = 0;
379
337
 
380
- // !! This loop is the performance bottleneck. Use moving pointers to optimize
381
- //
382
- // for (let i = cx - templateSize, i2 = tx - templateSize; i <= cx + templateSize; i++, i2++) {
383
- // for (let j = cy - templateSize, j2 = ty - templateSize; j <= cy + templateSize; j++, j2++) {
384
- // sxy += imageData[j*width + i] * imageData[j2*width + i2];
385
- // }
386
- // }
387
- //
338
+ // Full calculation
339
+ let sxy = 0;
388
340
  let p1 = (cy - templateSize) * width + (cx - templateSize);
389
- let p2 = (ty - templateSize) * width + (tx - templateSize);
390
- let nextRowOffset = width - templateWidth;
341
+ let p2 = 0;
342
+ const nextRowOffset = width - templateWidth;
343
+
391
344
  for (let j = 0; j < templateWidth; j++) {
392
345
  for (let i = 0; i < templateWidth; i++) {
393
- sxy += imageData[p1] * imageData[p2];
394
- p1 += 1;
395
- p2 += 1;
346
+ sxy += imageData[p1++] * templateData[p2++];
396
347
  }
397
348
  p1 += nextRowOffset;
398
- p2 += nextRowOffset;
399
349
  }
400
350
 
401
- let templateAverage = imageDataCumsum.query(
402
- tx - templateSize,
403
- ty - templateSize,
404
- tx + templateSize,
405
- ty + templateSize,
406
- );
407
- templateAverage /= templateWidth * templateWidth;
408
- sxy -= templateAverage * sx;
351
+ // Covariance check
352
+ // E[(X-EX)(Y-EY)] = E[XY] - EX*EY
353
+ // sum((Xi - avgX)(Yi - avgY)) = sum(XiYi) - avgY * sum(Xi)
354
+ const sxy_final = sxy - templateAvg * sx;
409
355
 
410
- let vlen2 = sxx - (sx * sx) / (templateWidth * templateWidth);
411
- if (vlen2 == 0) return null;
356
+ let vlen2 = sxx - (sx * sx) / (nP);
357
+ if (vlen2 <= 0) return null;
412
358
  vlen2 = Math.sqrt(vlen2);
413
359
 
414
- // covariance between template and current pixel
415
- const sim = (1.0 * sxy) / (vlen * vlen2);
416
- return sim;
360
+ return (1.0 * sxy_final) / (vlen * vlen2);
417
361
  };
418
362
 
419
363
  export { extract };