@srsergio/taptapp-ar 1.0.37 → 1.0.38

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -65,7 +65,7 @@ export class Controller {
65
65
  dummyRun(input: any): void;
66
66
  getProjectionMatrix(): number[];
67
67
  getRotatedZ90Matrix(m: any): any[];
68
- getWorldMatrix(modelViewTransform: any, targetIndex: any): any[];
68
+ getWorldMatrix(modelViewTransform: any, targetIndex: any): any[] | null;
69
69
  _detectAndMatch(inputData: any, targetIndexes: any): Promise<{
70
70
  targetIndex: any;
71
71
  modelViewTransform: any;
@@ -73,7 +73,7 @@ export class Controller {
73
73
  _trackAndUpdate(inputData: any, lastModelViewTransform: any, targetIndex: any): Promise<{
74
74
  modelViewTransform: any;
75
75
  inliers: number;
76
- octaveIndex: any;
76
+ octaveIndex: number;
77
77
  } | null>;
78
78
  processVideo(input: any): void;
79
79
  stopProcessVideo(): void;
@@ -102,6 +102,7 @@ export class Controller {
102
102
  x: number;
103
103
  y: number;
104
104
  }[];
105
+ octaveIndex: number;
105
106
  debugExtra: {};
106
107
  }>;
107
108
  trackUpdate(modelViewTransform: any, trackFeatures: any): Promise<any>;
@@ -128,7 +129,7 @@ export class Controller {
128
129
  _workerTrackUpdate(modelViewTransform: any, trackingFeatures: any): Promise<any>;
129
130
  workerTrackDone: ((data: any) => void) | undefined;
130
131
  _trackUpdateOnMainThread(modelViewTransform: any, trackingFeatures: any): Promise<never[][] | null>;
131
- _glModelViewMatrix(modelViewTransform: any, targetIndex: any): any[];
132
+ _glModelViewMatrix(modelViewTransform: any, targetIndex: any): any[] | null;
132
133
  _glProjectionMatrix({ projectionTransform, width, height, near, far }: {
133
134
  projectionTransform: any;
134
135
  width: any;
@@ -181,17 +181,17 @@ class Controller {
181
181
  return { targetIndex: matchedTargetIndex, modelViewTransform };
182
182
  }
183
183
  async _trackAndUpdate(inputData, lastModelViewTransform, targetIndex) {
184
- const { worldCoords, screenCoords, debugExtra } = this.tracker.track(inputData, lastModelViewTransform, targetIndex);
185
- if (worldCoords.length < 6)
184
+ const result = this.tracker.track(inputData, lastModelViewTransform, targetIndex);
185
+ if (result.worldCoords.length < 6)
186
186
  return null; // Umbral de puntos mínimos para mantener el seguimiento
187
187
  const modelViewTransform = await this._workerTrackUpdate(lastModelViewTransform, {
188
- worldCoords,
189
- screenCoords,
188
+ worldCoords: result.worldCoords,
189
+ screenCoords: result.screenCoords,
190
190
  });
191
191
  return {
192
192
  modelViewTransform,
193
- inliers: worldCoords.length,
194
- octaveIndex: debugExtra.octaveIndex
193
+ inliers: result.worldCoords.length,
194
+ octaveIndex: result.octaveIndex
195
195
  };
196
196
  }
197
197
  processVideo(input) {
@@ -219,18 +219,22 @@ class Controller {
219
219
  return acc + (!!s.isTracking ? 1 : 0);
220
220
  }, 0);
221
221
  // detect and match only if less then maxTrack
222
+ // BUG FIX: Only match if we are NOT in a "ghosting" period for a target
223
+ // to prevent the "found but immediately lost" loop that keeps opacity at 1.
222
224
  if (nTracking < this.maxTrack) {
223
225
  const matchingIndexes = [];
224
226
  for (let i = 0; i < this.trackingStates.length; i++) {
225
227
  const trackingState = this.trackingStates[i];
226
228
  if (trackingState.isTracking === true)
227
229
  continue;
230
+ if (trackingState.showing === true)
231
+ continue; // Don't try to re-detect if we are still buffers-showing the last position
228
232
  if (this.interestedTargetIndex !== -1 && this.interestedTargetIndex !== i)
229
233
  continue;
230
234
  matchingIndexes.push(i);
231
235
  }
232
236
  const { targetIndex: matchedTargetIndex, modelViewTransform } = await this._detectAndMatch(inputData, matchingIndexes);
233
- if (matchedTargetIndex !== -1) {
237
+ if (matchedTargetIndex !== -1 && modelViewTransform) {
234
238
  this.trackingStates[matchedTargetIndex].isTracking = true;
235
239
  this.trackingStates[matchedTargetIndex].currentModelViewTransform = modelViewTransform;
236
240
  }
@@ -248,13 +252,14 @@ class Controller {
248
252
  trackingState.currentModelViewTransform = result.modelViewTransform;
249
253
  // --- LIVE MODEL ADAPTATION LOGIC ---
250
254
  // Si el tracking es muy sólido (muchos inliers) y estable, refinamos el modelo
251
- if (result.inliers > 25) {
255
+ // Requisito: > 35 inliers (muy exigente) para evitar polución por ruido
256
+ if (result.inliers > 35) {
252
257
  trackingState.stabilityCount++;
253
- if (trackingState.stabilityCount > 20) { // 20 frames de estabilidad absoluta
254
- this.tracker.applyLiveFeedback(i, result.octaveIndex, 0.1); // 10% de mezcla real
258
+ if (trackingState.stabilityCount > 30) { // 30 frames (~1s) de estabilidad absoluta
259
+ this.tracker.applyLiveFeedback(i, result.octaveIndex, 0.05); // Menor alpha (5%) para ser más conservador
255
260
  if (this.debugMode)
256
- console.log(`✨ Live Reification: Target ${i} (Octave ${result.octaveIndex}) updated with real-world textures.`);
257
- trackingState.stabilityCount = 0; // Reset para la siguiente actualización
261
+ console.log(`✨ Live Reification: Target ${i} (Octave ${result.octaveIndex}) updated.`);
262
+ trackingState.stabilityCount = 0;
258
263
  }
259
264
  }
260
265
  else {
@@ -292,7 +297,7 @@ class Controller {
292
297
  }
293
298
  }
294
299
  // if showing, then call onUpdate, with world matrix
295
- if (trackingState.showing) {
300
+ if (trackingState.showing && trackingState.currentModelViewTransform) {
296
301
  const worldMatrix = this._glModelViewMatrix(trackingState.currentModelViewTransform, i);
297
302
  trackingState.trackingMatrix = trackingState.filter.filter(Date.now(), worldMatrix);
298
303
  let clone = [];
@@ -433,6 +438,8 @@ class Controller {
433
438
  return finalModelViewTransform;
434
439
  }
435
440
  _glModelViewMatrix(modelViewTransform, targetIndex) {
441
+ if (!modelViewTransform)
442
+ return null;
436
443
  const height = this.markerDimensions[targetIndex][1];
437
444
  const openGLWorldMatrix = [
438
445
  modelViewTransform[0][0],
@@ -20,6 +20,7 @@ export class Tracker {
20
20
  x: number;
21
21
  y: number;
22
22
  }[];
23
+ octaveIndex: number;
23
24
  debugExtra: {};
24
25
  };
25
26
  /**
@@ -87,16 +87,7 @@ class Tracker {
87
87
  });
88
88
  }
89
89
  }
90
- if (this.debugMode) {
91
- debugExtra = {
92
- octaveIndex,
93
- projectedImage: Array.from(projectedImage),
94
- matchingPoints,
95
- goodTrack,
96
- trackedPoints: screenCoords,
97
- };
98
- }
99
- return { worldCoords, screenCoords, debugExtra };
90
+ return { worldCoords, screenCoords, octaveIndex, debugExtra };
100
91
  }
101
92
  /**
102
93
  * Pure JS implementation of NCC matching
@@ -239,8 +230,13 @@ class Tracker {
239
230
  * @param {number} alpha - Blending factor (e.g. 0.1 for 10% new data)
240
231
  */
241
232
  applyLiveFeedback(targetIndex, octaveIndex, alpha) {
242
- const prebuilt = this.prebuiltData[targetIndex][octaveIndex];
243
- if (!prebuilt || !prebuilt.projectedImage)
233
+ if (targetIndex === undefined || octaveIndex === undefined)
234
+ return;
235
+ const targetPrebuilts = this.prebuiltData[targetIndex];
236
+ if (!targetPrebuilts)
237
+ return;
238
+ const prebuilt = targetPrebuilts[octaveIndex];
239
+ if (!prebuilt || !prebuilt.projectedImage || !prebuilt.data)
244
240
  return;
245
241
  const markerPixels = prebuilt.data;
246
242
  const projectedPixels = prebuilt.projectedImage;
@@ -248,8 +244,11 @@ class Tracker {
248
244
  // Blend the projected (camera-sourced) pixels into the marker reference data
249
245
  // This allows the NCC matching to adapt to real-world lighting and print quality
250
246
  for (let i = 0; i < count; i++) {
247
+ const val = projectedPixels[i];
248
+ if (isNaN(val))
249
+ continue; // Don't pollute with NaN
251
250
  // Simple linear blend
252
- markerPixels[i] = (1 - alpha) * markerPixels[i] + alpha * projectedPixels[i];
251
+ markerPixels[i] = (1 - alpha) * markerPixels[i] + alpha * val;
253
252
  }
254
253
  }
255
254
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@srsergio/taptapp-ar",
3
- "version": "1.0.37",
3
+ "version": "1.0.38",
4
4
  "description": "AR Compiler for Node.js and Browser",
5
5
  "repository": {
6
6
  "type": "git",
@@ -226,20 +226,20 @@ class Controller {
226
226
  return { targetIndex: matchedTargetIndex, modelViewTransform };
227
227
  }
228
228
  async _trackAndUpdate(inputData, lastModelViewTransform, targetIndex) {
229
- const { worldCoords, screenCoords, debugExtra } = this.tracker.track(
229
+ const result = this.tracker.track(
230
230
  inputData,
231
231
  lastModelViewTransform,
232
232
  targetIndex,
233
233
  );
234
- if (worldCoords.length < 6) return null; // Umbral de puntos mínimos para mantener el seguimiento
234
+ if (result.worldCoords.length < 6) return null; // Umbral de puntos mínimos para mantener el seguimiento
235
235
  const modelViewTransform = await this._workerTrackUpdate(lastModelViewTransform, {
236
- worldCoords,
237
- screenCoords,
236
+ worldCoords: result.worldCoords,
237
+ screenCoords: result.screenCoords,
238
238
  });
239
239
  return {
240
240
  modelViewTransform,
241
- inliers: worldCoords.length,
242
- octaveIndex: debugExtra.octaveIndex
241
+ inliers: result.worldCoords.length,
242
+ octaveIndex: result.octaveIndex
243
243
  };
244
244
  }
245
245
 
@@ -272,11 +272,14 @@ class Controller {
272
272
  }, 0);
273
273
 
274
274
  // detect and match only if less then maxTrack
275
+ // BUG FIX: Only match if we are NOT in a "ghosting" period for a target
276
+ // to prevent the "found but immediately lost" loop that keeps opacity at 1.
275
277
  if (nTracking < this.maxTrack) {
276
278
  const matchingIndexes = [];
277
279
  for (let i = 0; i < this.trackingStates.length; i++) {
278
280
  const trackingState = this.trackingStates[i];
279
281
  if (trackingState.isTracking === true) continue;
282
+ if (trackingState.showing === true) continue; // Don't try to re-detect if we are still buffers-showing the last position
280
283
  if (this.interestedTargetIndex !== -1 && this.interestedTargetIndex !== i) continue;
281
284
 
282
285
  matchingIndexes.push(i);
@@ -285,7 +288,7 @@ class Controller {
285
288
  const { targetIndex: matchedTargetIndex, modelViewTransform } =
286
289
  await this._detectAndMatch(inputData, matchingIndexes);
287
290
 
288
- if (matchedTargetIndex !== -1) {
291
+ if (matchedTargetIndex !== -1 && modelViewTransform) {
289
292
  this.trackingStates[matchedTargetIndex].isTracking = true;
290
293
  this.trackingStates[matchedTargetIndex].currentModelViewTransform = modelViewTransform;
291
294
  }
@@ -309,12 +312,13 @@ class Controller {
309
312
 
310
313
  // --- LIVE MODEL ADAPTATION LOGIC ---
311
314
  // Si el tracking es muy sólido (muchos inliers) y estable, refinamos el modelo
312
- if (result.inliers > 25) {
315
+ // Requisito: > 35 inliers (muy exigente) para evitar polución por ruido
316
+ if (result.inliers > 35) {
313
317
  trackingState.stabilityCount++;
314
- if (trackingState.stabilityCount > 20) { // 20 frames de estabilidad absoluta
315
- this.tracker.applyLiveFeedback(i, result.octaveIndex, 0.1); // 10% de mezcla real
316
- if (this.debugMode) console.log(`✨ Live Reification: Target ${i} (Octave ${result.octaveIndex}) updated with real-world textures.`);
317
- trackingState.stabilityCount = 0; // Reset para la siguiente actualización
318
+ if (trackingState.stabilityCount > 30) { // 30 frames (~1s) de estabilidad absoluta
319
+ this.tracker.applyLiveFeedback(i, result.octaveIndex, 0.05); // Menor alpha (5%) para ser más conservador
320
+ if (this.debugMode) console.log(`✨ Live Reification: Target ${i} (Octave ${result.octaveIndex}) updated.`);
321
+ trackingState.stabilityCount = 0;
318
322
  }
319
323
  } else {
320
324
  trackingState.stabilityCount = Math.max(0, trackingState.stabilityCount - 1);
@@ -354,7 +358,7 @@ class Controller {
354
358
  }
355
359
 
356
360
  // if showing, then call onUpdate, with world matrix
357
- if (trackingState.showing) {
361
+ if (trackingState.showing && trackingState.currentModelViewTransform) {
358
362
  const worldMatrix = this._glModelViewMatrix(trackingState.currentModelViewTransform, i);
359
363
  trackingState.trackingMatrix = trackingState.filter.filter(Date.now(), worldMatrix);
360
364
 
@@ -521,6 +525,7 @@ class Controller {
521
525
  }
522
526
 
523
527
  _glModelViewMatrix(modelViewTransform, targetIndex) {
528
+ if (!modelViewTransform) return null;
524
529
  const height = this.markerDimensions[targetIndex][1];
525
530
 
526
531
  const openGLWorldMatrix = [
@@ -127,17 +127,7 @@ class Tracker {
127
127
  }
128
128
  }
129
129
 
130
- if (this.debugMode) {
131
- debugExtra = {
132
- octaveIndex,
133
- projectedImage: Array.from(projectedImage),
134
- matchingPoints,
135
- goodTrack,
136
- trackedPoints: screenCoords,
137
- };
138
- }
139
-
140
- return { worldCoords, screenCoords, debugExtra };
130
+ return { worldCoords, screenCoords, octaveIndex, debugExtra };
141
131
  }
142
132
 
143
133
  /**
@@ -308,8 +298,12 @@ class Tracker {
308
298
  * @param {number} alpha - Blending factor (e.g. 0.1 for 10% new data)
309
299
  */
310
300
  applyLiveFeedback(targetIndex, octaveIndex, alpha) {
311
- const prebuilt = this.prebuiltData[targetIndex][octaveIndex];
312
- if (!prebuilt || !prebuilt.projectedImage) return;
301
+ if (targetIndex === undefined || octaveIndex === undefined) return;
302
+ const targetPrebuilts = this.prebuiltData[targetIndex];
303
+ if (!targetPrebuilts) return;
304
+
305
+ const prebuilt = targetPrebuilts[octaveIndex];
306
+ if (!prebuilt || !prebuilt.projectedImage || !prebuilt.data) return;
313
307
 
314
308
  const markerPixels = prebuilt.data;
315
309
  const projectedPixels = prebuilt.projectedImage;
@@ -318,8 +312,10 @@ class Tracker {
318
312
  // Blend the projected (camera-sourced) pixels into the marker reference data
319
313
  // This allows the NCC matching to adapt to real-world lighting and print quality
320
314
  for (let i = 0; i < count; i++) {
315
+ const val = projectedPixels[i];
316
+ if (isNaN(val)) continue; // Don't pollute with NaN
321
317
  // Simple linear blend
322
- markerPixels[i] = (1 - alpha) * markerPixels[i] + alpha * projectedPixels[i];
318
+ markerPixels[i] = (1 - alpha) * markerPixels[i] + alpha * val;
323
319
  }
324
320
  }
325
321
  }