@srsergio/taptapp-ar 1.0.43 → 1.0.50

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/README.md +42 -45
  2. package/dist/compiler/aframe.js +8 -8
  3. package/dist/compiler/controller.d.ts +50 -76
  4. package/dist/compiler/controller.js +72 -116
  5. package/dist/compiler/detector/detector-lite.js +82 -99
  6. package/dist/compiler/index.js +3 -3
  7. package/dist/compiler/matching/hamming-distance.d.ts +8 -0
  8. package/dist/compiler/matching/hamming-distance.js +35 -16
  9. package/dist/compiler/matching/hierarchical-clustering.d.ts +9 -0
  10. package/dist/compiler/matching/hierarchical-clustering.js +76 -56
  11. package/dist/compiler/matching/matching.js +3 -3
  12. package/dist/compiler/node-worker.js +144 -18
  13. package/dist/compiler/offline-compiler.d.ts +34 -83
  14. package/dist/compiler/offline-compiler.js +92 -96
  15. package/dist/compiler/simple-ar.d.ts +31 -57
  16. package/dist/compiler/simple-ar.js +32 -73
  17. package/dist/compiler/three.d.ts +13 -8
  18. package/dist/compiler/three.js +6 -6
  19. package/dist/compiler/tracker/extract.js +17 -14
  20. package/dist/compiler/utils/images.js +11 -16
  21. package/dist/compiler/utils/lsh-direct.d.ts +12 -0
  22. package/dist/compiler/utils/lsh-direct.js +76 -0
  23. package/dist/compiler/utils/worker-pool.js +10 -1
  24. package/dist/index.d.ts +2 -2
  25. package/dist/index.js +2 -2
  26. package/dist/react/types.d.ts +1 -1
  27. package/dist/react/types.js +1 -1
  28. package/package.json +2 -1
  29. package/src/compiler/aframe.js +8 -8
  30. package/src/compiler/controller.ts +512 -0
  31. package/src/compiler/detector/detector-lite.js +87 -107
  32. package/src/compiler/index.js +3 -3
  33. package/src/compiler/matching/hamming-distance.js +39 -16
  34. package/src/compiler/matching/hierarchical-clustering.js +85 -57
  35. package/src/compiler/matching/matching.js +3 -3
  36. package/src/compiler/node-worker.js +163 -18
  37. package/src/compiler/offline-compiler.ts +513 -0
  38. package/src/compiler/{simple-ar.js → simple-ar.ts} +64 -91
  39. package/src/compiler/three.js +6 -6
  40. package/src/compiler/tracker/extract.js +18 -15
  41. package/src/compiler/utils/images.js +11 -21
  42. package/src/compiler/utils/lsh-direct.js +86 -0
  43. package/src/compiler/utils/worker-pool.js +9 -1
  44. package/src/index.ts +2 -2
  45. package/src/react/types.ts +2 -2
  46. package/src/compiler/controller.js +0 -554
  47. package/src/compiler/offline-compiler.js +0 -515
@@ -5,21 +5,49 @@ import { InputLoader } from "./input-loader.js";
5
5
  import { OneEuroFilter } from "../libs/one-euro-filter.js";
6
6
  let ControllerWorker;
7
7
  // Conditional import for worker to avoid crash in non-vite environments
8
- try {
9
- const workerModule = await import("./controller.worker.js?worker&inline");
10
- ControllerWorker = workerModule.default;
11
- }
12
- catch (e) {
13
- // Fallback for tests or other environments
14
- ControllerWorker = null;
15
- }
16
- const DEFAULT_FILTER_CUTOFF = 0.1; // Menor cutoff para filtrar más ruidos cuando está quieto
17
- const DEFAULT_FILTER_BETA = 0.01; // Beta bajo para suavizar movimientos rápidos
18
- const DEFAULT_WARMUP_TOLERANCE = 8; // Más frames de calentamiento para asegurar estabilidad inicial
19
- const DEFAULT_MISS_TOLERANCE = 2; // Reducido para que el objeto desaparezca más rápido tras pérdida
8
+ const getControllerWorker = async () => {
9
+ if (typeof Worker === 'undefined')
10
+ return null;
11
+ try {
12
+ // @ts-ignore
13
+ const workerModule = await import("./controller.worker.js?worker&inline");
14
+ return workerModule.default;
15
+ }
16
+ catch (e) {
17
+ return null;
18
+ }
19
+ };
20
+ ControllerWorker = await getControllerWorker();
21
+ const DEFAULT_FILTER_CUTOFF = 0.1;
22
+ const DEFAULT_FILTER_BETA = 0.01;
23
+ const DEFAULT_WARMUP_TOLERANCE = 8;
24
+ const DEFAULT_MISS_TOLERANCE = 2;
20
25
  class Controller {
21
- constructor({ inputWidth, inputHeight, onUpdate = null, debugMode = false, maxTrack = 1, warmupTolerance = null, missTolerance = null, filterMinCF = null, filterBeta = null, worker = null, // Allow custom worker injection
22
- }) {
26
+ inputWidth;
27
+ inputHeight;
28
+ maxTrack;
29
+ filterMinCF;
30
+ filterBeta;
31
+ warmupTolerance;
32
+ missTolerance;
33
+ cropDetector;
34
+ inputLoader;
35
+ markerDimensions = null;
36
+ onUpdate;
37
+ debugMode;
38
+ processingVideo = false;
39
+ interestedTargetIndex = -1;
40
+ trackingStates = [];
41
+ worker;
42
+ projectionTransform;
43
+ projectionMatrix;
44
+ tracker = null;
45
+ matchingDataList;
46
+ workerMatchDone = null;
47
+ workerTrackDone = null;
48
+ mainThreadMatcher;
49
+ mainThreadEstimator;
50
+ constructor({ inputWidth, inputHeight, onUpdate = null, debugMode = false, maxTrack = 1, warmupTolerance = null, missTolerance = null, filterMinCF = null, filterBeta = null, worker = null, }) {
23
51
  this.inputWidth = inputWidth;
24
52
  this.inputHeight = inputHeight;
25
53
  this.maxTrack = maxTrack;
@@ -27,14 +55,10 @@ class Controller {
27
55
  this.filterBeta = filterBeta === null ? DEFAULT_FILTER_BETA : filterBeta;
28
56
  this.warmupTolerance = warmupTolerance === null ? DEFAULT_WARMUP_TOLERANCE : warmupTolerance;
29
57
  this.missTolerance = missTolerance === null ? DEFAULT_MISS_TOLERANCE : missTolerance;
30
- this.cropDetector = new CropDetector(this.inputWidth, this.inputHeight, debugMode, true);
58
+ this.cropDetector = new CropDetector(this.inputWidth, this.inputHeight, debugMode);
31
59
  this.inputLoader = new InputLoader(this.inputWidth, this.inputHeight);
32
- this.markerDimensions = null;
33
60
  this.onUpdate = onUpdate;
34
61
  this.debugMode = debugMode;
35
- this.processingVideo = false;
36
- this.interestedTargetIndex = -1;
37
- this.trackingStates = [];
38
62
  this.worker = worker;
39
63
  if (this.worker)
40
64
  this._setupWorkerListener();
@@ -70,37 +94,27 @@ class Controller {
70
94
  _ensureWorker() {
71
95
  if (this.worker)
72
96
  return;
73
- if (ControllerWorker) {
97
+ if (ControllerWorker && typeof Worker !== 'undefined') {
74
98
  this.worker = new ControllerWorker();
75
99
  this._setupWorkerListener();
76
100
  }
77
101
  }
78
- /**
79
- * Load image targets from one or multiple .mind files
80
- * @param {string|string[]} fileURLs - Single URL or array of URLs to .mind files
81
- * @returns {Promise<{dimensions, matchingDataList, trackingDataList}>}
82
- */
83
102
  async addImageTargets(fileURLs) {
84
103
  const urls = Array.isArray(fileURLs) ? fileURLs : [fileURLs];
85
- // Fetch all .mind files in parallel
86
104
  const buffers = await Promise.all(urls.map(async (url) => {
87
105
  const response = await fetch(url);
88
106
  return response.arrayBuffer();
89
107
  }));
90
- // Combine all buffers into a single target list
91
108
  return this.addImageTargetsFromBuffers(buffers);
92
109
  }
93
- /**
94
- * Load image targets from multiple ArrayBuffers
95
- * @param {ArrayBuffer[]} buffers - Array of .mind file buffers
96
- */
97
110
  addImageTargetsFromBuffers(buffers) {
98
111
  const allTrackingData = [];
99
112
  const allMatchingData = [];
100
113
  const allDimensions = [];
101
114
  for (const buffer of buffers) {
102
115
  const compiler = new Compiler();
103
- const { dataList } = compiler.importData(buffer);
116
+ const result = compiler.importData(buffer);
117
+ const dataList = result.dataList || [];
104
118
  for (const item of dataList) {
105
119
  allMatchingData.push(item.matchingData);
106
120
  allTrackingData.push(item.trackingData);
@@ -120,13 +134,9 @@ class Controller {
120
134
  });
121
135
  }
122
136
  this.markerDimensions = allDimensions;
123
- this.matchingDataList = allMatchingData; // Store for main-thread fallback
137
+ this.matchingDataList = allMatchingData;
124
138
  return { dimensions: allDimensions, matchingDataList: allMatchingData, trackingDataList: allTrackingData };
125
139
  }
126
- /**
127
- * Load image targets from a single ArrayBuffer (backward compatible)
128
- * @param {ArrayBuffer} buffer - Single .mind file buffer
129
- */
130
140
  addImageTargetsFromBuffer(buffer) {
131
141
  return this.addImageTargetsFromBuffers([buffer]);
132
142
  }
@@ -146,31 +156,12 @@ class Controller {
146
156
  return this.projectionMatrix;
147
157
  }
148
158
  getRotatedZ90Matrix(m) {
149
- // rotate 90 degree along z-axis
150
- // rotation matrix
151
- // | 0 -1 0 0 |
152
- // | 1 0 0 0 |
153
- // | 0 0 1 0 |
154
- // | 0 0 0 1 |
155
- const rotatedMatrix = [
156
- -m[1],
157
- m[0],
158
- m[2],
159
- m[3],
160
- -m[5],
161
- m[4],
162
- m[6],
163
- m[7],
164
- -m[9],
165
- m[8],
166
- m[10],
167
- m[11],
168
- -m[13],
169
- m[12],
170
- m[14],
171
- m[15],
159
+ return [
160
+ -m[1], m[0], m[2], m[3],
161
+ -m[5], m[4], m[6], m[7],
162
+ -m[9], m[8], m[10], m[11],
163
+ -m[13], m[12], m[14], m[15],
172
164
  ];
173
- return rotatedMatrix;
174
165
  }
175
166
  getWorldMatrix(modelViewTransform, targetIndex) {
176
167
  return this._glModelViewMatrix(modelViewTransform, targetIndex);
@@ -183,7 +174,7 @@ class Controller {
183
174
  async _trackAndUpdate(inputData, lastModelViewTransform, targetIndex) {
184
175
  const { worldCoords, screenCoords } = this.tracker.track(inputData, lastModelViewTransform, targetIndex);
185
176
  if (worldCoords.length < 6)
186
- return null; // Umbral de puntos mínimos para mantener el seguimiento
177
+ return null;
187
178
  const modelViewTransform = await this._workerTrackUpdate(lastModelViewTransform, {
188
179
  worldCoords,
189
180
  screenCoords,
@@ -195,7 +186,7 @@ class Controller {
195
186
  return;
196
187
  this.processingVideo = true;
197
188
  this.trackingStates = [];
198
- for (let i = 0; i < this.markerDimensions.length; i++) {
189
+ for (let i = 0; i < (this.markerDimensions?.length || 0); i++) {
199
190
  this.trackingStates.push({
200
191
  showing: false,
201
192
  isTracking: false,
@@ -210,10 +201,7 @@ class Controller {
210
201
  if (!this.processingVideo)
211
202
  break;
212
203
  const inputData = this.inputLoader.loadInput(input);
213
- const nTracking = this.trackingStates.reduce((acc, s) => {
214
- return acc + (!!s.isTracking ? 1 : 0);
215
- }, 0);
216
- // detect and match only if less then maxTrack
204
+ const nTracking = this.trackingStates.reduce((acc, s) => acc + (!!s.isTracking ? 1 : 0), 0);
217
205
  if (nTracking < this.maxTrack) {
218
206
  const matchingIndexes = [];
219
207
  for (let i = 0; i < this.trackingStates.length; i++) {
@@ -230,7 +218,6 @@ class Controller {
230
218
  this.trackingStates[matchedTargetIndex].currentModelViewTransform = modelViewTransform;
231
219
  }
232
220
  }
233
- // tracking update
234
221
  for (let i = 0; i < this.trackingStates.length; i++) {
235
222
  const trackingState = this.trackingStates[i];
236
223
  if (trackingState.isTracking) {
@@ -242,7 +229,6 @@ class Controller {
242
229
  trackingState.currentModelViewTransform = modelViewTransform;
243
230
  }
244
231
  }
245
- // if not showing, then show it once it reaches warmup number of frames
246
232
  if (!trackingState.showing) {
247
233
  if (trackingState.isTracking) {
248
234
  trackingState.trackMiss = 0;
@@ -254,7 +240,6 @@ class Controller {
254
240
  }
255
241
  }
256
242
  }
257
- // if showing, then count miss, and hide it when reaches tolerance
258
243
  if (trackingState.showing) {
259
244
  if (!trackingState.isTracking) {
260
245
  trackingState.trackCount = 0;
@@ -262,32 +247,30 @@ class Controller {
262
247
  if (trackingState.trackMiss > this.missTolerance) {
263
248
  trackingState.showing = false;
264
249
  trackingState.trackingMatrix = null;
265
- this.onUpdate &&
266
- this.onUpdate({ type: "updateMatrix", targetIndex: i, worldMatrix: null });
250
+ this.onUpdate && this.onUpdate({ type: "updateMatrix", targetIndex: i, worldMatrix: null });
267
251
  }
268
252
  }
269
253
  else {
270
254
  trackingState.trackMiss = 0;
271
255
  }
272
256
  }
273
- // if showing, then call onUpdate, with world matrix
274
257
  if (trackingState.showing) {
275
258
  const worldMatrix = this._glModelViewMatrix(trackingState.currentModelViewTransform, i);
276
259
  trackingState.trackingMatrix = trackingState.filter.filter(Date.now(), worldMatrix);
277
- let clone = [];
278
- for (let j = 0; j < trackingState.trackingMatrix.length; j++) {
279
- clone[j] = trackingState.trackingMatrix[j];
280
- }
260
+ let clone = [...trackingState.trackingMatrix];
281
261
  const isInputRotated = input.width === this.inputHeight && input.height === this.inputWidth;
282
262
  if (isInputRotated) {
283
263
  clone = this.getRotatedZ90Matrix(clone);
284
264
  }
285
- this.onUpdate &&
286
- this.onUpdate({ type: "updateMatrix", targetIndex: i, worldMatrix: clone, modelViewTransform: trackingState.currentModelViewTransform });
265
+ this.onUpdate && this.onUpdate({
266
+ type: "updateMatrix",
267
+ targetIndex: i,
268
+ worldMatrix: clone,
269
+ modelViewTransform: trackingState.currentModelViewTransform
270
+ });
287
271
  }
288
272
  }
289
273
  this.onUpdate && this.onUpdate({ type: "processDone" });
290
- // Use requestAnimationFrame if available, otherwise just wait briefly
291
274
  if (typeof requestAnimationFrame !== "undefined") {
292
275
  await new Promise(requestAnimationFrame);
293
276
  }
@@ -314,18 +297,15 @@ class Controller {
314
297
  }
315
298
  async track(input, modelViewTransform, targetIndex) {
316
299
  const inputData = this.inputLoader.loadInput(input);
317
- const result = this.tracker.track(inputData, modelViewTransform, targetIndex);
318
- return result;
300
+ return this.tracker.track(inputData, modelViewTransform, targetIndex);
319
301
  }
320
302
  async trackUpdate(modelViewTransform, trackFeatures) {
321
303
  if (trackFeatures.worldCoords.length < 4)
322
304
  return null;
323
- const modelViewTransform2 = await this._workerTrackUpdate(modelViewTransform, trackFeatures);
324
- return modelViewTransform2;
305
+ return this._workerTrackUpdate(modelViewTransform, trackFeatures);
325
306
  }
326
307
  _workerMatch(featurePoints, targetIndexes) {
327
308
  return new Promise((resolve) => {
328
- // If no worker available, process on main thread
329
309
  if (!this.worker) {
330
310
  this._matchOnMainThread(featurePoints, targetIndexes).then(resolve);
331
311
  return;
@@ -343,7 +323,6 @@ class Controller {
343
323
  });
344
324
  }
345
325
  async _matchOnMainThread(featurePoints, targetIndexes) {
346
- // Lazy initialize Matcher and Estimator for main thread
347
326
  if (!this.mainThreadMatcher) {
348
327
  const { Matcher } = await import("./matching/matcher.js");
349
328
  const { Estimator } = await import("./estimation/estimator.js");
@@ -380,7 +359,6 @@ class Controller {
380
359
  }
381
360
  _workerTrackUpdate(modelViewTransform, trackingFeatures) {
382
361
  return new Promise((resolve) => {
383
- // If no worker available, process on main thread
384
362
  if (!this.worker) {
385
363
  this._trackUpdateOnMainThread(modelViewTransform, trackingFeatures).then(resolve);
386
364
  return;
@@ -398,55 +376,33 @@ class Controller {
398
376
  });
399
377
  }
400
378
  async _trackUpdateOnMainThread(modelViewTransform, trackingFeatures) {
401
- // Lazy initialize Estimator for main thread
402
379
  if (!this.mainThreadEstimator) {
403
380
  const { Estimator } = await import("./estimation/estimator.js");
404
381
  this.mainThreadEstimator = new Estimator(this.projectionTransform);
405
382
  }
406
383
  const { worldCoords, screenCoords } = trackingFeatures;
407
- const finalModelViewTransform = this.mainThreadEstimator.refineEstimate({
384
+ return this.mainThreadEstimator.refineEstimate({
408
385
  initialModelViewTransform: modelViewTransform,
409
386
  worldCoords,
410
387
  screenCoords,
411
388
  });
412
- return finalModelViewTransform;
413
389
  }
414
390
  _glModelViewMatrix(modelViewTransform, targetIndex) {
415
391
  const height = this.markerDimensions[targetIndex][1];
416
- const openGLWorldMatrix = [
417
- modelViewTransform[0][0],
418
- -modelViewTransform[1][0],
419
- -modelViewTransform[2][0],
420
- 0,
421
- -modelViewTransform[0][1],
422
- modelViewTransform[1][1],
423
- modelViewTransform[2][1],
424
- 0,
425
- -modelViewTransform[0][2],
426
- modelViewTransform[1][2],
427
- modelViewTransform[2][2],
428
- 0,
392
+ return [
393
+ modelViewTransform[0][0], -modelViewTransform[1][0], -modelViewTransform[2][0], 0,
394
+ -modelViewTransform[0][1], modelViewTransform[1][1], modelViewTransform[2][1], 0,
395
+ -modelViewTransform[0][2], modelViewTransform[1][2], modelViewTransform[2][2], 0,
429
396
  modelViewTransform[0][1] * height + modelViewTransform[0][3],
430
397
  -(modelViewTransform[1][1] * height + modelViewTransform[1][3]),
431
398
  -(modelViewTransform[2][1] * height + modelViewTransform[2][3]),
432
399
  1,
433
400
  ];
434
- return openGLWorldMatrix;
435
401
  }
436
402
  _glProjectionMatrix({ projectionTransform, width, height, near, far }) {
437
403
  const proj = [
438
- [
439
- (2 * projectionTransform[0][0]) / width,
440
- 0,
441
- -((2 * projectionTransform[0][2]) / width - 1),
442
- 0,
443
- ],
444
- [
445
- 0,
446
- (2 * projectionTransform[1][1]) / height,
447
- -((2 * projectionTransform[1][2]) / height - 1),
448
- 0,
449
- ],
404
+ [(2 * projectionTransform[0][0]) / width, 0, -((2 * projectionTransform[0][2]) / width - 1), 0],
405
+ [0, (2 * projectionTransform[1][1]) / height, -((2 * projectionTransform[1][2]) / height - 1), 0],
450
406
  [0, 0, -(far + near) / (far - near), (-2 * far * near) / (far - near)],
451
407
  [0, 0, -1, 0],
452
408
  ];
@@ -11,11 +11,11 @@
11
11
  */
12
12
  import { FREAKPOINTS } from "./freak.js";
13
13
  import { gpuCompute } from "../utils/gpu-compute.js";
14
- import { binarizeFREAK32 } from "../utils/lsh-binarizer.js";
15
- const PYRAMID_MIN_SIZE = 4; // Reducido de 8 a 4 para exprimir al máximo la resolución
14
+ import { computeLSH64, computeFullFREAK, packLSHIntoDescriptor } from "../utils/lsh-direct.js";
15
+ const PYRAMID_MIN_SIZE = 4; // Restored to 4 for better small-scale detection
16
16
  // PYRAMID_MAX_OCTAVE ya no es necesario, el límite lo da PYRAMID_MIN_SIZE
17
- const NUM_BUCKETS_PER_DIMENSION = 8;
18
- const MAX_FEATURES_PER_BUCKET = 12; // Ajustado para un equilibrio óptimo entre densidad y estabilidad
17
+ const NUM_BUCKETS_PER_DIMENSION = 10;
18
+ const MAX_FEATURES_PER_BUCKET = 30; // Maximized to ensure robustness in Moonshot mode
19
19
  const ORIENTATION_NUM_BINS = 36;
20
20
  const FREAK_EXPANSION_FACTOR = 7.0;
21
21
  // Global GPU mode flag
@@ -47,7 +47,7 @@ export class DetectorLite {
47
47
  if (numOctaves === 10)
48
48
  break;
49
49
  }
50
- this.numOctaves = numOctaves;
50
+ this.numOctaves = options.maxOctaves !== undefined ? Math.min(numOctaves, options.maxOctaves) : numOctaves;
51
51
  }
52
52
  /**
53
53
  * Detecta características en una imagen en escala de grises
@@ -79,15 +79,18 @@ export class DetectorLite {
79
79
  // 6. Calcular descriptores FREAK
80
80
  this._computeFreakDescriptors(prunedExtremas, pyramidImages);
81
81
  // Convertir a formato de salida
82
- const featurePoints = prunedExtremas.map(ext => ({
83
- maxima: ext.score > 0,
84
- x: ext.x * Math.pow(2, ext.octave) + Math.pow(2, ext.octave - 1) - 0.5,
85
- y: ext.y * Math.pow(2, ext.octave) + Math.pow(2, ext.octave - 1) - 0.5,
86
- scale: Math.pow(2, ext.octave),
87
- angle: ext.angle || 0,
88
- descriptors: (this.useLSH && ext.lsh) ? ext.lsh : (ext.descriptors || [])
89
- }));
90
- return { featurePoints };
82
+ const featurePoints = prunedExtremas.map(ext => {
83
+ const scale = Math.pow(2, ext.octave);
84
+ return {
85
+ maxima: ext.score > 0,
86
+ x: ext.x * scale + scale * 0.5 - 0.5,
87
+ y: ext.y * scale + scale * 0.5 - 0.5,
88
+ scale: scale,
89
+ angle: ext.angle || 0,
90
+ descriptors: (this.useLSH && ext.lsh) ? ext.descriptors : (ext.descriptors || [])
91
+ };
92
+ });
93
+ return { featurePoints, pyramid: pyramidImages };
91
94
  }
92
95
  /**
93
96
  * Construye una pirámide gaussiana
@@ -122,12 +125,15 @@ export class DetectorLite {
122
125
  let currentHeight = height;
123
126
  for (let i = 0; i < this.numOctaves; i++) {
124
127
  const img1 = this._applyGaussianFilter(currentData, currentWidth, currentHeight);
128
+ // Only need the second blur if we are going to compute DoG with the NEXT layer
129
+ // or if we need it for this octave's DoG.
130
+ // Actually, for maxOctaves=1, we only need img1 and maybe img2 for one DoG layer.
125
131
  const img2 = this._applyGaussianFilter(img1.data, currentWidth, currentHeight);
126
132
  pyramid.push([
127
133
  { data: img1.data, width: currentWidth, height: currentHeight },
128
134
  { data: img2.data, width: currentWidth, height: currentHeight }
129
135
  ]);
130
- // Downsample para siguiente octava
136
+ // Downsample para siguiente octava - Only if we have more octaves to go
131
137
  if (i < this.numOctaves - 1) {
132
138
  const downsampled = this._downsample(img2.data, currentWidth, currentHeight);
133
139
  currentData = downsampled.data;
@@ -143,40 +149,40 @@ export class DetectorLite {
143
149
  _applyGaussianFilter(data, width, height) {
144
150
  const output = new Float32Array(width * height);
145
151
  const temp = new Float32Array(width * height);
146
- const k0 = 1 / 16, k1 = 4 / 16, k2 = 6 / 16;
152
+ const k0 = 0.0625, k1 = 0.25, k2 = 0.375; // 1/16, 4/16, 6/16
147
153
  const w1 = width - 1;
148
- const h1 = height - 1;
149
- // Horizontal pass - unrolled kernel
154
+ // Horizontal pass - Speed optimized with manual border handling
150
155
  for (let y = 0; y < height; y++) {
151
156
  const rowOffset = y * width;
152
- for (let x = 0; x < width; x++) {
153
- const x0 = x < 2 ? 0 : x - 2;
154
- const x1 = x < 1 ? 0 : x - 1;
155
- const x3 = x > w1 - 1 ? w1 : x + 1;
156
- const x4 = x > w1 - 2 ? w1 : x + 2;
157
- temp[rowOffset + x] =
158
- data[rowOffset + x0] * k0 +
159
- data[rowOffset + x1] * k1 +
160
- data[rowOffset + x] * k2 +
161
- data[rowOffset + x3] * k1 +
162
- data[rowOffset + x4] * k0;
157
+ // Left border
158
+ temp[rowOffset] = data[rowOffset] * (k0 + k1 + k2) + data[rowOffset + 1] * k1 + data[rowOffset + 2] * k0;
159
+ temp[rowOffset + 1] = data[rowOffset] * k1 + data[rowOffset + 1] * k2 + data[rowOffset + 2] * k1 + data[rowOffset + 3] * k0;
160
+ // Main loop - NO boundary checks
161
+ for (let x = 2; x < width - 2; x++) {
162
+ const pos = rowOffset + x;
163
+ temp[pos] = data[pos - 2] * k0 + data[pos - 1] * k1 + data[pos] * k2 + data[pos + 1] * k1 + data[pos + 2] * k0;
163
164
  }
165
+ // Right border
166
+ const r2 = rowOffset + width - 2;
167
+ const r1 = rowOffset + width - 1;
168
+ temp[r2] = data[r2 - 2] * k0 + data[r2 - 1] * k1 + data[r2] * k2 + data[r1] * k1;
169
+ temp[r1] = data[r1 - 2] * k0 + data[r1 - 1] * k1 + data[r1] * (k2 + k1 + k0);
164
170
  }
165
- // Vertical pass - unrolled kernel
166
- for (let y = 0; y < height; y++) {
167
- const y0 = (y < 2 ? 0 : y - 2) * width;
168
- const y1 = (y < 1 ? 0 : y - 1) * width;
169
- const y2 = y * width;
170
- const y3 = (y > h1 - 1 ? h1 : y + 1) * width;
171
- const y4 = (y > h1 - 2 ? h1 : y + 2) * width;
172
- for (let x = 0; x < width; x++) {
173
- output[y2 + x] =
174
- temp[y0 + x] * k0 +
175
- temp[y1 + x] * k1 +
176
- temp[y2 + x] * k2 +
177
- temp[y3 + x] * k1 +
178
- temp[y4 + x] * k0;
171
+ // Vertical pass - Speed optimized
172
+ for (let x = 0; x < width; x++) {
173
+ // Top border
174
+ output[x] = temp[x] * (k0 + k1 + k2) + temp[x + width] * k1 + temp[x + width * 2] * k0;
175
+ output[x + width] = temp[x] * k1 + temp[x + width] * k2 + temp[x + width * 2] * k1 + temp[x + width * 3] * k0;
176
+ // Main loop - NO boundary checks
177
+ for (let y = 2; y < height - 2; y++) {
178
+ const p = y * width + x;
179
+ output[p] = temp[p - width * 2] * k0 + temp[p - width] * k1 + temp[p] * k2 + temp[p + width] * k1 + temp[p + width * 2] * k0;
179
180
  }
181
+ // Bottom border
182
+ const b2 = (height - 2) * width + x;
183
+ const b1 = (height - 1) * width + x;
184
+ output[b2] = temp[b2 - width * 2] * k0 + temp[b2 - width] * k1 + temp[b2] * k2 + temp[b1] * k1;
185
+ output[b1] = temp[b1 - width * 2] * k0 + temp[b1 - width] * k1 + temp[b1] * (k2 + k1 + k0);
180
186
  }
181
187
  return { data: output, width, height };
182
188
  }
@@ -184,29 +190,16 @@ export class DetectorLite {
184
190
  * Downsample imagen por factor de 2
185
191
  */
186
192
  _downsample(data, width, height) {
187
- const newWidth = Math.floor(width / 2);
188
- const newHeight = Math.floor(height / 2);
193
+ const newWidth = width >> 1;
194
+ const newHeight = height >> 1;
189
195
  const output = new Float32Array(newWidth * newHeight);
190
196
  for (let y = 0; y < newHeight; y++) {
197
+ const r0 = (y * 2) * width;
198
+ const r1 = r0 + width;
199
+ const dr = y * newWidth;
191
200
  for (let x = 0; x < newWidth; x++) {
192
- // Interpolación bilinear
193
- const srcX = x * 2 + 0.5;
194
- const srcY = y * 2 + 0.5;
195
- const x0 = Math.floor(srcX);
196
- const y0 = Math.floor(srcY);
197
- const x1 = Math.min(x0 + 1, width - 1);
198
- const y1 = Math.min(y0 + 1, height - 1);
199
- const fx = srcX - x0;
200
- const fy = srcY - y0;
201
- const v00 = data[y0 * width + x0];
202
- const v10 = data[y0 * width + x1];
203
- const v01 = data[y1 * width + x0];
204
- const v11 = data[y1 * width + x1];
205
- output[y * newWidth + x] =
206
- v00 * (1 - fx) * (1 - fy) +
207
- v10 * fx * (1 - fy) +
208
- v01 * (1 - fx) * fy +
209
- v11 * fx * fy;
201
+ const i2 = x * 2;
202
+ output[dr + x] = (data[r0 + i2] + data[r0 + i2 + 1] + data[r1 + i2] + data[r1 + i2 + 1]) * 0.25;
210
203
  }
211
204
  }
212
205
  return { data: output, width: newWidth, height: newHeight };
@@ -234,19 +227,17 @@ export class DetectorLite {
234
227
  */
235
228
  _findExtremas(dogPyramid, pyramidImages) {
236
229
  const extremas = [];
237
- for (let octave = 1; octave < dogPyramid.length - 1; octave++) {
230
+ for (let octave = 0; octave < dogPyramid.length; octave++) {
238
231
  const curr = dogPyramid[octave];
239
- const prev = dogPyramid[octave - 1];
240
- const next = dogPyramid[octave + 1];
232
+ const prev = octave > 0 ? dogPyramid[octave - 1] : null;
233
+ const next = octave < dogPyramid.length - 1 ? dogPyramid[octave + 1] : null;
241
234
  const width = curr.width;
242
235
  const height = curr.height;
243
- const prevWidth = prev.width;
244
- const nextWidth = next.width;
245
236
  for (let y = 1; y < height - 1; y++) {
246
237
  for (let x = 1; x < width - 1; x++) {
247
238
  const val = curr.data[y * width + x];
248
- if (Math.abs(val) < 0.01)
249
- continue; // Threshold reducido de 0.015 a 0.01 para mayor sensibilidad
239
+ if (Math.abs(val) < 0.003)
240
+ continue; // Aggressively lowered threshold to 0.003 for max sensitivity
250
241
  let isMaxima = true;
251
242
  let isMinima = true;
252
243
  // Check 3x3 neighborhood in current scale
@@ -261,10 +252,11 @@ export class DetectorLite {
261
252
  isMinima = false;
262
253
  }
263
254
  }
264
- // Check previous scale (scaled coordinates)
265
- if (isMaxima || isMinima) {
266
- const px = Math.floor(x * 2);
267
- const py = Math.floor(y * 2);
255
+ // Check previous scale (scaled coordinates) - skip if no prev layer
256
+ if ((isMaxima || isMinima) && prev) {
257
+ const px = x << 1;
258
+ const py = y << 1;
259
+ const prevWidth = prev.width;
268
260
  for (let dy = -1; dy <= 1 && (isMaxima || isMinima); dy++) {
269
261
  for (let dx = -1; dx <= 1 && (isMaxima || isMinima); dx++) {
270
262
  const xx = Math.max(0, Math.min(prevWidth - 1, px + dx));
@@ -277,10 +269,11 @@ export class DetectorLite {
277
269
  }
278
270
  }
279
271
  }
280
- // Check next scale (scaled coordinates)
281
- if (isMaxima || isMinima) {
282
- const nx = Math.floor(x / 2);
283
- const ny = Math.floor(y / 2);
272
+ // Check next scale (scaled coordinates) - skip if no next layer
273
+ if ((isMaxima || isMinima) && next) {
274
+ const nx = x >> 1;
275
+ const ny = y >> 1;
276
+ const nextWidth = next.width;
284
277
  for (let dy = -1; dy <= 1 && (isMaxima || isMinima); dy++) {
285
278
  for (let dx = -1; dx <= 1 && (isMaxima || isMinima); dx++) {
286
279
  const xx = Math.max(0, Math.min(nextWidth - 1, nx + dx));
@@ -341,7 +334,7 @@ export class DetectorLite {
341
334
  */
342
335
  _computeOrientations(extremas, pyramidImages) {
343
336
  for (const ext of extremas) {
344
- if (ext.octave < 1 || ext.octave >= pyramidImages.length) {
337
+ if (ext.octave < 0 || ext.octave >= pyramidImages.length) {
345
338
  ext.angle = 0;
346
339
  continue;
347
340
  }
@@ -384,8 +377,8 @@ export class DetectorLite {
384
377
  */
385
378
  _computeFreakDescriptors(extremas, pyramidImages) {
386
379
  for (const ext of extremas) {
387
- if (ext.octave < 1 || ext.octave >= pyramidImages.length) {
388
- ext.descriptors = [];
380
+ if (ext.octave < 0 || ext.octave >= pyramidImages.length) {
381
+ ext.descriptors = new Uint8Array(8);
389
382
  continue;
390
383
  }
391
384
  const img = pyramidImages[ext.octave][1];
@@ -412,26 +405,16 @@ export class DetectorLite {
412
405
  data[y1 * width + x0] * (1 - fracX) * fracY +
413
406
  data[y1 * width + x1] * fracX * fracY;
414
407
  }
415
- // Pack pairs into Uint8Array (84 bytes per descriptor)
416
- const descriptor = new Uint8Array(84);
417
- let bitCount = 0;
418
- let byteIdx = 0;
419
- for (let i = 0; i < FREAKPOINTS.length; i++) {
420
- for (let j = i + 1; j < FREAKPOINTS.length; j++) {
421
- if (samples[i] < samples[j]) {
422
- descriptor[byteIdx] |= (1 << (7 - bitCount));
423
- }
424
- bitCount++;
425
- if (bitCount === 8) {
426
- byteIdx++;
427
- bitCount = 0;
428
- }
429
- }
430
- }
408
+ // 🚀 MOONSHOT: Direct LSH computation
409
+ // Avoids computing 672 bits of FREAK just to sample 64.
431
410
  if (this.useLSH) {
432
- ext.lsh = binarizeFREAK32(descriptor);
411
+ ext.lsh = computeLSH64(samples);
412
+ // Pack LSH into 8-byte descriptors for compatibility
413
+ ext.descriptors = packLSHIntoDescriptor(ext.lsh);
414
+ }
415
+ else {
416
+ ext.descriptors = computeFullFREAK(samples);
433
417
  }
434
- ext.descriptors = descriptor;
435
418
  }
436
419
  }
437
420
  }
@@ -1,8 +1,8 @@
1
1
  import { OfflineCompiler } from "./offline-compiler.js";
2
2
  export { OfflineCompiler };
3
- if (!window.MINDAR) {
4
- window.MINDAR = {};
3
+ if (!window.TAAR) {
4
+ window.TAAR = {};
5
5
  }
6
- window.MINDAR.IMAGE = {
6
+ window.TAAR.IMAGE = {
7
7
  OfflineCompiler,
8
8
  };