@srsergio/taptapp-ar 1.0.93 → 1.0.95

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/README.md +16 -14
  2. package/dist/compiler/offline-compiler.d.ts +3 -3
  3. package/dist/compiler/offline-compiler.js +50 -33
  4. package/dist/core/constants.d.ts +2 -0
  5. package/dist/core/constants.js +4 -1
  6. package/dist/core/detector/detector-lite.d.ts +6 -5
  7. package/dist/core/detector/detector-lite.js +46 -16
  8. package/dist/core/matching/matcher.d.ts +1 -1
  9. package/dist/core/matching/matcher.js +7 -4
  10. package/dist/core/matching/matching.d.ts +2 -1
  11. package/dist/core/matching/matching.js +43 -11
  12. package/dist/core/perception/bio-inspired-engine.d.ts +130 -0
  13. package/dist/core/perception/bio-inspired-engine.js +232 -0
  14. package/dist/core/perception/foveal-attention.d.ts +142 -0
  15. package/dist/core/perception/foveal-attention.js +280 -0
  16. package/dist/core/perception/index.d.ts +6 -0
  17. package/dist/core/perception/index.js +17 -0
  18. package/dist/core/perception/predictive-coding.d.ts +92 -0
  19. package/dist/core/perception/predictive-coding.js +278 -0
  20. package/dist/core/perception/saccadic-controller.d.ts +126 -0
  21. package/dist/core/perception/saccadic-controller.js +269 -0
  22. package/dist/core/perception/saliency-map.d.ts +74 -0
  23. package/dist/core/perception/saliency-map.js +254 -0
  24. package/dist/core/perception/scale-orchestrator.d.ts +28 -0
  25. package/dist/core/perception/scale-orchestrator.js +68 -0
  26. package/dist/core/protocol.d.ts +14 -1
  27. package/dist/core/protocol.js +33 -1
  28. package/dist/runtime/bio-inspired-controller.d.ts +135 -0
  29. package/dist/runtime/bio-inspired-controller.js +358 -0
  30. package/dist/runtime/controller.d.ts +11 -2
  31. package/dist/runtime/controller.js +20 -8
  32. package/dist/runtime/controller.worker.js +2 -2
  33. package/dist/runtime/simple-ar.d.ts +24 -20
  34. package/dist/runtime/simple-ar.js +172 -156
  35. package/package.json +1 -1
  36. package/src/compiler/offline-compiler.ts +56 -36
  37. package/src/core/constants.ts +5 -1
  38. package/src/core/detector/detector-lite.js +46 -16
  39. package/src/core/matching/matcher.js +8 -4
  40. package/src/core/matching/matching.js +51 -12
  41. package/src/core/perception/bio-inspired-engine.js +275 -0
  42. package/src/core/perception/foveal-attention.js +306 -0
  43. package/src/core/perception/index.js +18 -0
  44. package/src/core/perception/predictive-coding.js +327 -0
  45. package/src/core/perception/saccadic-controller.js +303 -0
  46. package/src/core/perception/saliency-map.js +296 -0
  47. package/src/core/perception/scale-orchestrator.js +80 -0
  48. package/src/core/protocol.ts +38 -1
  49. package/src/runtime/bio-inspired-controller.ts +448 -0
  50. package/src/runtime/controller.ts +22 -7
  51. package/src/runtime/controller.worker.js +2 -1
  52. package/src/runtime/simple-ar.ts +197 -171
@@ -1,5 +1,54 @@
1
- import { Controller } from "./controller.js";
1
+ import { BioInspiredController } from "./bio-inspired-controller.js";
2
2
  import { projectToScreen } from "../core/utils/projection.js";
3
+ /**
4
+ * 🕵️ Internal Smoothing Manager
5
+ * Applies Median + Adaptive Alpha filtering for sub-pixel stability.
6
+ */
7
+ class SmoothingManager {
8
+ history = new Map();
9
+ lastFiltered = new Map();
10
+ medianSize = 3;
11
+ deadZone = 0.2;
12
+ smooth(id, raw, reliability) {
13
+ if (!this.history.has(id))
14
+ this.history.set(id, []);
15
+ const h = this.history.get(id);
16
+ h.push(raw);
17
+ if (h.length > this.medianSize)
18
+ h.shift();
19
+ // Get median
20
+ const sortedX = [...h].map(p => p.x).sort((a, b) => a - b);
21
+ const sortedY = [...h].map(p => p.y).sort((a, b) => a - b);
22
+ const median = {
23
+ x: sortedX[Math.floor(sortedX.length / 2)],
24
+ y: sortedY[Math.floor(sortedY.length / 2)]
25
+ };
26
+ // Adaptive Alpha based on reliability
27
+ const baseAlpha = 0.15;
28
+ const alpha = baseAlpha + (reliability * (1.0 - baseAlpha));
29
+ const last = this.lastFiltered.get(id) || median;
30
+ let filteredX = last.x * (1 - alpha) + median.x * alpha;
31
+ let filteredY = last.y * (1 - alpha) + median.y * alpha;
32
+ // Dead-zone to kill jitter at rest
33
+ if (Math.abs(filteredX - last.x) < this.deadZone)
34
+ filteredX = last.x;
35
+ if (Math.abs(filteredY - last.y) < this.deadZone)
36
+ filteredY = last.y;
37
+ const result = { x: filteredX, y: filteredY };
38
+ this.lastFiltered.set(id, result);
39
+ return result;
40
+ }
41
+ reset(id) {
42
+ if (id !== undefined) {
43
+ this.history.delete(id);
44
+ this.lastFiltered.delete(id);
45
+ }
46
+ else {
47
+ this.history.clear();
48
+ this.lastFiltered.clear();
49
+ }
50
+ }
51
+ }
3
52
  class SimpleAR {
4
53
  container;
5
54
  targetSrc;
@@ -10,16 +59,17 @@ class SimpleAR {
10
59
  onUpdateCallback;
11
60
  cameraConfig;
12
61
  debug;
13
- lastTime;
14
- frameCount;
15
- fps;
16
- debugPanel = null;
17
62
  video = null;
18
63
  controller = null;
64
+ smoother = new SmoothingManager();
19
65
  isTracking = false;
20
- lastMatrix = null;
21
- filters = [];
22
66
  markerDimensions = [];
67
+ debugPanel = null;
68
+ debugCanvas = null;
69
+ debugCtx = null;
70
+ lastTime = 0;
71
+ fps = 0;
72
+ frameCount = 0;
23
73
  constructor({ container, targetSrc, overlay, scale = 1.0, onFound = null, onLost = null, onUpdate = null, cameraConfig = { facingMode: 'environment', width: 1280, height: 720 }, debug = false, }) {
24
74
  this.container = container;
25
75
  this.targetSrc = targetSrc;
@@ -30,23 +80,19 @@ class SimpleAR {
30
80
  this.onUpdateCallback = onUpdate;
31
81
  this.cameraConfig = cameraConfig;
32
82
  this.debug = debug;
33
- // @ts-ignore
34
- if (this.debug)
35
- window.AR_DEBUG = true;
36
- this.lastTime = performance.now();
37
- this.frameCount = 0;
38
- this.fps = 0;
39
83
  }
40
84
  async start() {
41
85
  this._createVideo();
42
86
  await this._startCamera();
43
87
  this._initController();
44
- if (this.debug)
88
+ if (this.debug) {
45
89
  this._createDebugPanel();
90
+ this._createDebugCanvas();
91
+ }
46
92
  const targets = Array.isArray(this.targetSrc) ? this.targetSrc : [this.targetSrc];
47
93
  const result = await this.controller.addImageTargets(targets);
48
94
  this.markerDimensions = result.dimensions;
49
- console.log("Targets loaded. Dimensions:", this.markerDimensions);
95
+ // Kick off loop
50
96
  this.controller.processVideo(this.video);
51
97
  return this;
52
98
  }
@@ -61,7 +107,7 @@ class SimpleAR {
61
107
  this.video = null;
62
108
  }
63
109
  this.isTracking = false;
64
- this.markerDimensions = [];
110
+ this.smoother.reset();
65
111
  }
66
112
  _createVideo() {
67
113
  this.video = document.createElement('video');
@@ -70,21 +116,15 @@ class SimpleAR {
70
116
  this.video.setAttribute('muted', '');
71
117
  this.video.style.cssText = `
72
118
  position: absolute;
73
- top: 0;
74
- left: 0;
75
- width: 100%;
76
- height: 100%;
77
- object-fit: cover;
78
- z-index: 0;
119
+ top: 0; left: 0; width: 100%; height: 100%;
120
+ object-fit: cover; z-index: 0;
79
121
  `;
80
122
  this.container.style.position = 'relative';
81
123
  this.container.style.overflow = 'hidden';
82
124
  this.container.insertBefore(this.video, this.container.firstChild);
83
125
  }
84
126
  async _startCamera() {
85
- const stream = await navigator.mediaDevices.getUserMedia({
86
- video: this.cameraConfig
87
- });
127
+ const stream = await navigator.mediaDevices.getUserMedia({ video: this.cameraConfig });
88
128
  this.video.srcObject = stream;
89
129
  await this.video.play();
90
130
  await new Promise(resolve => {
@@ -94,114 +134,76 @@ class SimpleAR {
94
134
  });
95
135
  }
96
136
  _initController() {
97
- this.controller = new Controller({
137
+ this.controller = new BioInspiredController({
98
138
  inputWidth: this.video.videoWidth,
99
139
  inputHeight: this.video.videoHeight,
100
140
  debugMode: this.debug,
101
- warmupTolerance: 3, // 🚀 Faster lock than default
102
- missTolerance: 10, // 🛡️ More resilient to temporary occlusion
141
+ bioInspired: {
142
+ enabled: true,
143
+ aggressiveSkipping: false
144
+ },
103
145
  onUpdate: (data) => this._handleUpdate(data)
104
146
  });
105
147
  }
106
148
  _handleUpdate(data) {
107
- if (data.type !== 'updateMatrix')
149
+ if (data.type !== 'updateMatrix') {
150
+ if (data.type === 'featurePoints' && this.debugCtx) {
151
+ this._drawDebugFeatures(data.featurePoints);
152
+ }
108
153
  return;
154
+ }
155
+ // FPS Meter
109
156
  const now = performance.now();
110
157
  this.frameCount++;
111
158
  if (now - this.lastTime >= 1000) {
112
159
  this.fps = Math.round((this.frameCount * 1000) / (now - this.lastTime));
113
160
  this.frameCount = 0;
114
161
  this.lastTime = now;
115
- if (this.debug)
116
- this._updateDebugPanel(this.isTracking);
117
162
  }
118
- const { targetIndex, worldMatrix, modelViewTransform, screenCoords, reliabilities, stabilities, detectionPoints } = data;
119
- // Project points to screen coordinates
120
- let projectedPoints = [];
163
+ const { targetIndex, worldMatrix, modelViewTransform, reliabilities, stabilities, screenCoords, pixelsSaved } = data;
164
+ // Apply Smoothing
165
+ let smoothedCoords = screenCoords || [];
121
166
  if (screenCoords && screenCoords.length > 0) {
122
- const containerRect = this.container.getBoundingClientRect();
123
- const videoW = this.video.videoWidth;
124
- const videoH = this.video.videoHeight;
125
- const isPortrait = containerRect.height > containerRect.width;
126
- const isVideoLandscape = videoW > videoH;
127
- const needsRotation = isPortrait && isVideoLandscape;
128
- const proj = this.controller.projectionTransform;
129
- const vW = needsRotation ? videoH : videoW;
130
- const vH = needsRotation ? videoW : videoH;
131
- const pScale = Math.max(containerRect.width / vW, containerRect.height / vH);
132
- const dW = vW * pScale;
133
- const dH = vH * pScale;
134
- const oX = (containerRect.width - dW) / 2;
135
- const oY = (containerRect.height - dH) / 2;
136
- projectedPoints = screenCoords.map((p) => {
137
- let sx, sy;
138
- if (needsRotation) {
139
- sx = oX + (dW / 2) - (p.y - proj[1][2]) * pScale;
140
- sy = oY + (dH / 2) + (p.x - proj[0][2]) * pScale;
141
- }
142
- else {
143
- sx = oX + (dW / 2) + (p.x - proj[0][2]) * pScale;
144
- sy = oY + (dH / 2) + (p.y - proj[1][2]) * pScale;
145
- }
146
- return { x: sx, y: sy };
147
- });
148
- }
149
- let projectedDetectionPoints = [];
150
- if (detectionPoints && detectionPoints.length > 0) {
151
- const containerRect = this.container.getBoundingClientRect();
152
- const videoW = this.video.videoWidth;
153
- const videoH = this.video.videoHeight;
154
- const isPortrait = containerRect.height > containerRect.width;
155
- const isVideoLandscape = videoW > videoH;
156
- const needsRotation = isPortrait && isVideoLandscape;
157
- const proj = this.controller.projectionTransform;
158
- const vW = needsRotation ? videoH : videoW;
159
- const vH = needsRotation ? videoW : videoH;
160
- const pScale = Math.max(containerRect.width / vW, containerRect.height / vH);
161
- const dW = vW * pScale;
162
- const dH = vH * pScale;
163
- const oX = (containerRect.width - dW) / 2;
164
- const oY = (containerRect.height - dH) / 2;
165
- projectedDetectionPoints = detectionPoints.map((p) => {
166
- let sx, sy;
167
- if (needsRotation) {
168
- sx = oX + (dW / 2) - (p.y - proj[1][2]) * pScale;
169
- sy = oY + (dH / 2) + (p.x - proj[0][2]) * pScale;
170
- }
171
- else {
172
- sx = oX + (dW / 2) + (p.x - proj[0][2]) * pScale;
173
- sy = oY + (dH / 2) + (p.y - proj[1][2]) * pScale;
174
- }
175
- return { x: sx, y: sy };
167
+ smoothedCoords = screenCoords.map((p) => {
168
+ const rel = reliabilities ? (reliabilities[p.id] || 0.5) : 0.5;
169
+ const sm = this.smoother.smooth(p.id, p, rel);
170
+ return { ...sm, id: p.id };
176
171
  });
177
172
  }
178
173
  if (worldMatrix) {
179
174
  if (!this.isTracking) {
180
175
  this.isTracking = true;
181
- this.overlay && (this.overlay.style.opacity = '1');
176
+ if (this.overlay)
177
+ this.overlay.style.opacity = '1';
182
178
  this.onFound && this.onFound({ targetIndex });
183
179
  }
184
- this.lastMatrix = worldMatrix;
185
180
  this._positionOverlay(modelViewTransform, targetIndex);
186
181
  }
187
182
  else {
188
183
  if (this.isTracking) {
189
184
  this.isTracking = false;
190
- this.overlay && (this.overlay.style.opacity = '0');
185
+ if (this.overlay)
186
+ this.overlay.style.opacity = '0';
191
187
  this.onLost && this.onLost({ targetIndex });
188
+ this.smoother.reset();
192
189
  }
193
190
  }
194
- // Always notify the callback if we have points, or if we just lost tracking
195
- if (projectedPoints.length > 0 || projectedDetectionPoints.length > 0 || (worldMatrix === null && data.type === 'updateMatrix')) {
196
- this.onUpdateCallback && this.onUpdateCallback({
191
+ // Notify callback
192
+ if (this.onUpdateCallback) {
193
+ this.onUpdateCallback({
197
194
  targetIndex,
198
195
  worldMatrix,
199
- screenCoords: projectedPoints,
196
+ screenCoords: smoothedCoords,
200
197
  reliabilities: reliabilities || [],
201
198
  stabilities: stabilities || [],
202
- detectionPoints: projectedDetectionPoints
199
+ detectionPoints: data.featurePoints
203
200
  });
204
201
  }
202
+ // Draw Debug UI
203
+ if (this.debug) {
204
+ this._updateHUD(data);
205
+ this._drawDebugPoints(smoothedCoords, stabilities);
206
+ }
205
207
  }
206
208
  _positionOverlay(mVT, targetIndex) {
207
209
  if (!this.overlay || !this.markerDimensions[targetIndex])
@@ -210,51 +212,16 @@ class SimpleAR {
210
212
  const containerRect = this.container.getBoundingClientRect();
211
213
  const videoW = this.video.videoWidth;
212
214
  const videoH = this.video.videoHeight;
215
+ const proj = this.controller.projectionTransform;
216
+ // Handle portrait rotation for mobile
213
217
  const isPortrait = containerRect.height > containerRect.width;
214
218
  const isVideoLandscape = videoW > videoH;
215
219
  const needsRotation = isPortrait && isVideoLandscape;
216
- const proj = this.controller.projectionTransform;
217
220
  const pUL = projectToScreen(0, 0, 0, mVT, proj, videoW, videoH, containerRect, needsRotation);
218
221
  const pUR = projectToScreen(markerW, 0, 0, mVT, proj, videoW, videoH, containerRect, needsRotation);
219
222
  const pLL = projectToScreen(0, markerH, 0, mVT, proj, videoW, videoH, containerRect, needsRotation);
220
223
  const pLR = projectToScreen(markerW, markerH, 0, mVT, proj, videoW, videoH, containerRect, needsRotation);
221
- const solveHomography = (w, h, p1, p2, p3, p4) => {
222
- const x1 = p1.sx, y1 = p1.sy;
223
- const x2 = p2.sx, y2 = p2.sy;
224
- const x3 = p3.sx, y3 = p3.sy;
225
- const x4 = p4.sx, y4 = p4.sy;
226
- const dx1 = x2 - x4, dx2 = x3 - x4, dx3 = x1 - x2 + x4 - x3;
227
- const dy1 = y2 - y4, dy2 = y3 - y4, dy3 = y1 - y2 + y4 - y3;
228
- let a, b, c, d, e, f, g, h_coeff;
229
- if (dx3 === 0 && dy3 === 0) {
230
- a = x2 - x1;
231
- b = x3 - x1;
232
- c = x1;
233
- d = y2 - y1;
234
- e = y3 - y1;
235
- f = y1;
236
- g = 0;
237
- h_coeff = 0;
238
- }
239
- else {
240
- const det = dx1 * dy2 - dx2 * dy1;
241
- g = (dx3 * dy2 - dx2 * dy3) / det;
242
- h_coeff = (dx1 * dy3 - dx3 * dy1) / det;
243
- a = x2 - x1 + g * x2;
244
- b = x3 - x1 + h_coeff * x3;
245
- c = x1;
246
- d = y2 - y1 + g * y2;
247
- e = y3 - y1 + h_coeff * y3;
248
- f = y1;
249
- }
250
- return [
251
- a / w, d / w, 0, g / w,
252
- b / h, e / h, 0, h_coeff / h,
253
- 0, 0, 1, 0,
254
- c, f, 0, 1
255
- ];
256
- };
257
- const matrix = solveHomography(markerW, markerH, pUL, pUR, pLL, pLR);
224
+ const matrix = this._solveHomography(markerW, markerH, pUL, pUR, pLL, pLR);
258
225
  this.overlay.style.maxWidth = 'none';
259
226
  this.overlay.style.width = `${markerW}px`;
260
227
  this.overlay.style.height = `${markerH}px`;
@@ -270,38 +237,87 @@ class SimpleAR {
270
237
  translate(${-markerW / 2}px, ${-markerH / 2}px)
271
238
  `;
272
239
  }
240
+ _solveHomography(w, h, p1, p2, p3, p4) {
241
+ const x1 = p1.sx, y1 = p1.sy;
242
+ const x2 = p2.sx, y2 = p2.sy;
243
+ const x3 = p3.sx, y3 = p3.sy;
244
+ const x4 = p4.sx, y4 = p4.sy;
245
+ const dx1 = x2 - x4, dx2 = x3 - x4, dx3 = x1 - x2 + x4 - x3;
246
+ const dy1 = y2 - y4, dy2 = y3 - y4, dy3 = y1 - y2 + y4 - y3;
247
+ const det = dx1 * dy2 - dx2 * dy1;
248
+ const g = (dx3 * dy2 - dx2 * dy3) / det;
249
+ const h_coeff = (dx1 * dy3 - dx3 * dy1) / det;
250
+ const a = x2 - x1 + g * x2;
251
+ const b = x3 - x1 + h_coeff * x3;
252
+ const c = x1;
253
+ const d = y2 - y1 + g * y2;
254
+ const e = y3 - y1 + h_coeff * y3;
255
+ const f = y1;
256
+ return [
257
+ a / w, d / w, 0, g / w,
258
+ b / h, e / h, 0, h_coeff / h,
259
+ 0, 0, 1, 0,
260
+ c, f, 0, 1
261
+ ];
262
+ }
263
+ // --- DEBUG METHODS ---
273
264
  _createDebugPanel() {
274
265
  this.debugPanel = document.createElement('div');
275
266
  this.debugPanel.style.cssText = `
276
- position: absolute;
277
- top: 10px;
278
- left: 10px;
279
- background: rgba(0, 0, 0, 0.8);
280
- color: #0f0;
281
- font-family: monospace;
282
- font-size: 12px;
283
- padding: 8px;
284
- border-radius: 4px;
285
- z-index: 99999;
286
- pointer-events: none;
287
- line-height: 1.5;
267
+ position: absolute; top: 10px; left: 10px;
268
+ background: rgba(0, 0, 0, 0.7); color: #0f0;
269
+ font-family: monospace; font-size: 11px; padding: 10px;
270
+ border-radius: 5px; z-index: 100; pointer-events: none;
271
+ line-height: 1.4; border-left: 3px solid #0f0;
288
272
  `;
289
273
  this.container.appendChild(this.debugPanel);
290
274
  }
291
- _updateDebugPanel(isTracking) {
275
+ _createDebugCanvas() {
276
+ this.debugCanvas = document.createElement('canvas');
277
+ this.debugCanvas.width = this.container.clientWidth;
278
+ this.debugCanvas.height = this.container.clientHeight;
279
+ this.debugCanvas.style.cssText = `
280
+ position: absolute; top: 0; left: 0; width: 100%; height: 100%;
281
+ pointer-events: none; z-index: 99;
282
+ `;
283
+ this.container.appendChild(this.debugCanvas);
284
+ this.debugCtx = this.debugCanvas.getContext('2d');
285
+ }
286
+ _updateHUD(data) {
292
287
  if (!this.debugPanel)
293
288
  return;
294
- // @ts-ignore
295
- const memory = performance.memory ? Math.round(performance.memory.usedJSHeapSize / 1024 / 1024) : '?';
296
- const color = isTracking ? '#0f0' : '#f00';
297
- const status = isTracking ? 'TRACKING' : 'SEARCHING';
289
+ const rel = data.reliabilities ? (data.reliabilities.reduce((a, b) => a + b, 0) / data.reliabilities.length).toFixed(2) : "0.00";
290
+ const stab = data.stabilities ? (data.stabilities.reduce((a, b) => a + b, 0) / data.stabilities.length).toFixed(2) : "0.00";
291
+ const savings = data.pixelsSaved ? ((data.pixelsSaved / (this.video.videoWidth * this.video.videoHeight)) * 100).toFixed(0) : "0";
298
292
  this.debugPanel.innerHTML = `
299
- <div>HEAD-UP DISPLAY</div>
300
- <div>----------------</div>
301
- <div>FPS: ${this.fps}</div>
302
- <div>STATUS: <span style="color:${color}">${status}</span></div>
303
- <div>MEM: ${memory} MB</div>
293
+ <b>TapTapp AR HUD</b><br>
294
+ ------------------<br>
295
+ STATUS: <span style="color:${this.isTracking ? '#0f0' : '#f00'}">${this.isTracking ? 'TRACKING' : 'SEARCHING'}</span><br>
296
+ FPS: ${this.fps}<br>
297
+ RELIAB: ${rel}<br>
298
+ STABIL: ${stab}<br>
299
+ SAVINGS: ${savings}% Pixels<br>
300
+ POINTS: ${data.screenCoords?.length || 0}
304
301
  `;
305
302
  }
303
+ _drawDebugPoints(coords, stabilities) {
304
+ if (!this.debugCtx)
305
+ return;
306
+ this.debugCtx.clearRect(0, 0, this.debugCanvas.width, this.debugCanvas.height);
307
+ coords.forEach((p, i) => {
308
+ const s = stabilities ? (stabilities[i] || 0) : 0.5;
309
+ this.debugCtx.fillStyle = `rgba(0, 255, 0, ${0.4 + s * 0.6})`;
310
+ this.debugCtx.fillRect(p.x - 1, p.y - 1, 2, 2);
311
+ });
312
+ }
313
+ _drawDebugFeatures(points) {
314
+ if (!this.debugCtx || this.isTracking)
315
+ return;
316
+ this.debugCtx.clearRect(0, 0, this.debugCanvas.width, this.debugCanvas.height);
317
+ this.debugCtx.fillStyle = 'rgba(255, 255, 0, 0.4)';
318
+ points.slice(0, 200).forEach(p => {
319
+ this.debugCtx.fillRect(p.x - 1, p.y - 1, 2, 2);
320
+ });
321
+ }
306
322
  }
307
323
  export { SimpleAR };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@srsergio/taptapp-ar",
3
- "version": "1.0.93",
3
+ "version": "1.0.95",
4
4
  "description": "Ultra-fast Augmented Reality (AR) SDK for Node.js and Browser. Image tracking with 100% pure JavaScript, zero-dependencies, and high-performance compilation.",
5
5
  "keywords": [
6
6
  "augmented reality",
@@ -94,36 +94,51 @@ export class OfflineCompiler {
94
94
  const results = [];
95
95
  for (let i = 0; i < targetImages.length; i++) {
96
96
  const targetImage = targetImages[i];
97
- const fullImageList = buildImageList(targetImage);
98
- // 🚀 MOONSHOT: Keep many scales for better robustness
99
- const imageList = fullImageList;
100
- const percentPerImageScale = percentPerImage / imageList.length;
101
-
102
- const keyframes = [];
103
-
104
- for (const image of imageList as any[]) {
105
- const detector = new DetectorLite(image.width, image.height, { useLSH: AR_CONFIG.USE_LSH, maxFeaturesPerBucket: AR_CONFIG.MAX_FEATURES_PER_BUCKET });
106
- const { featurePoints: ps } = detector.detect(image.data);
107
-
108
- const maximaPoints = ps.filter((p: any) => p.maxima);
109
- const minimaPoints = ps.filter((p: any) => !p.maxima);
110
- const maximaPointsCluster = hierarchicalClusteringBuild({ points: maximaPoints });
111
- const minimaPointsCluster = hierarchicalClusteringBuild({ points: minimaPoints });
112
-
113
- keyframes.push({
114
- maximaPoints,
115
- minimaPoints,
116
- maximaPointsCluster,
117
- minimaPointsCluster,
118
- width: image.width,
119
- height: image.height,
120
- scale: image.scale,
121
- });
122
- currentPercent += percentPerImageScale;
123
- progressCallback(currentPercent);
97
+
98
+ // 🚀 NANITE-STYLE: Only process the target at scale 1.0
99
+ // The DetectorLite already builds its own pyramid and finds features at all octaves (virtualized LOD)
100
+ const detector = new DetectorLite(targetImage.width, targetImage.height, {
101
+ useLSH: AR_CONFIG.USE_LSH,
102
+ maxFeaturesPerBucket: AR_CONFIG.MAX_FEATURES_PER_BUCKET
103
+ });
104
+ const { featurePoints: rawPs } = detector.detect(targetImage.data);
105
+
106
+ // 🎯 Stratified Sampling: Ensure we have features from ALL scales
107
+ // We take the top N features per octave to guarantee scale coverage (Nanite-style)
108
+ const octaves = [0, 1, 2, 3, 4, 5];
109
+ const ps: any[] = [];
110
+ const featuresPerOctave = 300;
111
+
112
+ for (const oct of octaves) {
113
+ const octScale = Math.pow(2, oct);
114
+ const octFeatures = rawPs
115
+ .filter(p => Math.abs(p.scale - octScale) < 0.1)
116
+ .sort((a, b) => (b.score || 0) - (a.score || 0))
117
+ .slice(0, featuresPerOctave);
118
+ ps.push(...octFeatures);
124
119
  }
125
120
 
126
- results.push(keyframes);
121
+ const maximaPoints = ps.filter((p: any) => p.maxima);
122
+ const minimaPoints = ps.filter((p: any) => !p.maxima);
123
+ const maximaPointsCluster = hierarchicalClusteringBuild({ points: maximaPoints });
124
+ const minimaPointsCluster = hierarchicalClusteringBuild({ points: minimaPoints });
125
+
126
+ const keyframe = {
127
+ maximaPoints,
128
+ minimaPoints,
129
+ maximaPointsCluster,
130
+ minimaPointsCluster,
131
+ width: targetImage.width,
132
+ height: targetImage.height,
133
+ scale: 1.0,
134
+ };
135
+
136
+ // Wrapped in array because the protocol expects matchingData to be an array of keyframes
137
+ // We provide only one keyframe containing features from all octaves
138
+ results.push([keyframe]);
139
+
140
+ currentPercent += percentPerImage;
141
+ progressCallback(currentPercent);
127
142
  }
128
143
 
129
144
  return results;
@@ -204,14 +219,19 @@ export class OfflineCompiler {
204
219
  }
205
220
  };
206
221
  }),
207
- matchingData: item.matchingData.map((kf: any) => ({
208
- w: kf.width,
209
- h: kf.height,
210
- s: kf.scale,
211
- hdc: false,
212
- max: protocol.columnarize(kf.maximaPoints, kf.maximaPointsCluster, kf.width, kf.height, false),
213
- min: protocol.columnarize(kf.minimaPoints, kf.minimaPointsCluster, kf.width, kf.height, false),
214
- })),
222
+ matchingData: item.matchingData.map((kf: any) => {
223
+ const useCompact = AR_CONFIG.USE_COMPACT_DESCRIPTORS;
224
+ const columnarizeFn = useCompact ? protocol.columnarizeCompact : protocol.columnarize;
225
+ return {
226
+ w: kf.width,
227
+ h: kf.height,
228
+ s: kf.scale,
229
+ hdc: false,
230
+ max: columnarizeFn(kf.maximaPoints, kf.maximaPointsCluster, kf.width, kf.height),
231
+ min: columnarizeFn(kf.minimaPoints, kf.minimaPointsCluster, kf.width, kf.height),
232
+ };
233
+ }),
234
+
215
235
  };
216
236
  });
217
237
 
@@ -29,7 +29,7 @@ export const AR_CONFIG = {
29
29
 
30
30
  // Image processing / Scale list
31
31
  MIN_IMAGE_PIXEL_SIZE: 32,
32
- SCALE_STEP_EXPONENT: 0.6,
32
+ SCALE_STEP_EXPONENT: 1.0, // Optimized: was 0.6, now 1.0 (reduces scales from ~7 to ~4)
33
33
  TRACKING_DOWNSCALE_LEVEL_1: 256.0,
34
34
  TRACKING_DOWNSCALE_LEVEL_2: 128.0,
35
35
 
@@ -38,4 +38,8 @@ export const AR_CONFIG = {
38
38
  MISS_TOLERANCE: 1,
39
39
  ONE_EURO_FILTER_CUTOFF: 0.5,
40
40
  ONE_EURO_FILTER_BETA: 0.1,
41
+
42
+ // TAAR Size Optimization
43
+ USE_COMPACT_DESCRIPTORS: true, // 32-bit XOR folded descriptors vs 64-bit raw
44
+ COMPACT_HAMMING_THRESHOLD: 8, // Threshold for 32-bit descriptors (vs 15 for 64-bit)
41
45
  };