@srsergio/taptapp-ar 1.0.36 → 1.0.38

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -65,12 +65,16 @@ export class Controller {
65
65
  dummyRun(input: any): void;
66
66
  getProjectionMatrix(): number[];
67
67
  getRotatedZ90Matrix(m: any): any[];
68
- getWorldMatrix(modelViewTransform: any, targetIndex: any): any[];
68
+ getWorldMatrix(modelViewTransform: any, targetIndex: any): any[] | null;
69
69
  _detectAndMatch(inputData: any, targetIndexes: any): Promise<{
70
70
  targetIndex: any;
71
71
  modelViewTransform: any;
72
72
  }>;
73
- _trackAndUpdate(inputData: any, lastModelViewTransform: any, targetIndex: any): Promise<any>;
73
+ _trackAndUpdate(inputData: any, lastModelViewTransform: any, targetIndex: any): Promise<{
74
+ modelViewTransform: any;
75
+ inliers: number;
76
+ octaveIndex: number;
77
+ } | null>;
74
78
  processVideo(input: any): void;
75
79
  stopProcessVideo(): void;
76
80
  detect(input: any): Promise<{
@@ -98,6 +102,7 @@ export class Controller {
98
102
  x: number;
99
103
  y: number;
100
104
  }[];
105
+ octaveIndex: number;
101
106
  debugExtra: {};
102
107
  }>;
103
108
  trackUpdate(modelViewTransform: any, trackFeatures: any): Promise<any>;
@@ -124,7 +129,7 @@ export class Controller {
124
129
  _workerTrackUpdate(modelViewTransform: any, trackingFeatures: any): Promise<any>;
125
130
  workerTrackDone: ((data: any) => void) | undefined;
126
131
  _trackUpdateOnMainThread(modelViewTransform: any, trackingFeatures: any): Promise<never[][] | null>;
127
- _glModelViewMatrix(modelViewTransform: any, targetIndex: any): any[];
132
+ _glModelViewMatrix(modelViewTransform: any, targetIndex: any): any[] | null;
128
133
  _glProjectionMatrix({ projectionTransform, width, height, near, far }: {
129
134
  projectionTransform: any;
130
135
  width: any;
@@ -181,14 +181,18 @@ class Controller {
181
181
  return { targetIndex: matchedTargetIndex, modelViewTransform };
182
182
  }
183
183
  async _trackAndUpdate(inputData, lastModelViewTransform, targetIndex) {
184
- const { worldCoords, screenCoords } = this.tracker.track(inputData, lastModelViewTransform, targetIndex);
185
- if (worldCoords.length < 6)
184
+ const result = this.tracker.track(inputData, lastModelViewTransform, targetIndex);
185
+ if (result.worldCoords.length < 6)
186
186
  return null; // Umbral de puntos mínimos para mantener el seguimiento
187
187
  const modelViewTransform = await this._workerTrackUpdate(lastModelViewTransform, {
188
- worldCoords,
189
- screenCoords,
188
+ worldCoords: result.worldCoords,
189
+ screenCoords: result.screenCoords,
190
190
  });
191
- return modelViewTransform;
191
+ return {
192
+ modelViewTransform,
193
+ inliers: result.worldCoords.length,
194
+ octaveIndex: result.octaveIndex
195
+ };
192
196
  }
193
197
  processVideo(input) {
194
198
  if (this.processingVideo)
@@ -202,6 +206,7 @@ class Controller {
202
206
  currentModelViewTransform: null,
203
207
  trackCount: 0,
204
208
  trackMiss: 0,
209
+ stabilityCount: 0, // Nuevo: Contador para Live Adaptation
205
210
  filter: new OneEuroFilter({ minCutOff: this.filterMinCF, beta: this.filterBeta }),
206
211
  });
207
212
  }
@@ -214,18 +219,22 @@ class Controller {
214
219
  return acc + (!!s.isTracking ? 1 : 0);
215
220
  }, 0);
216
221
  // detect and match only if less then maxTrack
222
+ // BUG FIX: Only match if we are NOT in a "ghosting" period for a target
223
+ // to prevent the "found but immediately lost" loop that keeps opacity at 1.
217
224
  if (nTracking < this.maxTrack) {
218
225
  const matchingIndexes = [];
219
226
  for (let i = 0; i < this.trackingStates.length; i++) {
220
227
  const trackingState = this.trackingStates[i];
221
228
  if (trackingState.isTracking === true)
222
229
  continue;
230
+ if (trackingState.showing === true)
231
+ continue; // Don't try to re-detect if we are still buffers-showing the last position
223
232
  if (this.interestedTargetIndex !== -1 && this.interestedTargetIndex !== i)
224
233
  continue;
225
234
  matchingIndexes.push(i);
226
235
  }
227
236
  const { targetIndex: matchedTargetIndex, modelViewTransform } = await this._detectAndMatch(inputData, matchingIndexes);
228
- if (matchedTargetIndex !== -1) {
237
+ if (matchedTargetIndex !== -1 && modelViewTransform) {
229
238
  this.trackingStates[matchedTargetIndex].isTracking = true;
230
239
  this.trackingStates[matchedTargetIndex].currentModelViewTransform = modelViewTransform;
231
240
  }
@@ -234,12 +243,29 @@ class Controller {
234
243
  for (let i = 0; i < this.trackingStates.length; i++) {
235
244
  const trackingState = this.trackingStates[i];
236
245
  if (trackingState.isTracking) {
237
- let modelViewTransform = await this._trackAndUpdate(inputData, trackingState.currentModelViewTransform, i);
238
- if (modelViewTransform === null) {
246
+ let result = await this._trackAndUpdate(inputData, trackingState.currentModelViewTransform, i);
247
+ if (result === null) {
239
248
  trackingState.isTracking = false;
249
+ trackingState.stabilityCount = 0;
240
250
  }
241
251
  else {
242
- trackingState.currentModelViewTransform = modelViewTransform;
252
+ trackingState.currentModelViewTransform = result.modelViewTransform;
253
+ // --- LIVE MODEL ADAPTATION LOGIC ---
254
+ // Si el tracking es muy sólido (muchos inliers) y estable, refinamos el modelo
255
+ // Requisito: > 35 inliers (muy exigente) para evitar polución por ruido
256
+ if (result.inliers > 35) {
257
+ trackingState.stabilityCount++;
258
+ if (trackingState.stabilityCount > 30) { // 30 frames (~1s) de estabilidad absoluta
259
+ this.tracker.applyLiveFeedback(i, result.octaveIndex, 0.05); // Menor alpha (5%) para ser más conservador
260
+ if (this.debugMode)
261
+ console.log(`✨ Live Reification: Target ${i} (Octave ${result.octaveIndex}) updated.`);
262
+ trackingState.stabilityCount = 0;
263
+ }
264
+ }
265
+ else {
266
+ trackingState.stabilityCount = Math.max(0, trackingState.stabilityCount - 1);
267
+ }
268
+ // -----------------------------------
243
269
  }
244
270
  }
245
271
  // if not showing, then show it once it reaches warmup number of frames
@@ -271,7 +297,7 @@ class Controller {
271
297
  }
272
298
  }
273
299
  // if showing, then call onUpdate, with world matrix
274
- if (trackingState.showing) {
300
+ if (trackingState.showing && trackingState.currentModelViewTransform) {
275
301
  const worldMatrix = this._glModelViewMatrix(trackingState.currentModelViewTransform, i);
276
302
  trackingState.trackingMatrix = trackingState.filter.filter(Date.now(), worldMatrix);
277
303
  let clone = [];
@@ -412,6 +438,8 @@ class Controller {
412
438
  return finalModelViewTransform;
413
439
  }
414
440
  _glModelViewMatrix(modelViewTransform, targetIndex) {
441
+ if (!modelViewTransform)
442
+ return null;
415
443
  const height = this.markerDimensions[targetIndex][1];
416
444
  const openGLWorldMatrix = [
417
445
  modelViewTransform[0][0],
@@ -20,6 +20,7 @@ export class Tracker {
20
20
  x: number;
21
21
  y: number;
22
22
  }[];
23
+ octaveIndex: number;
23
24
  debugExtra: {};
24
25
  };
25
26
  /**
@@ -33,4 +34,11 @@ export class Tracker {
33
34
  * Pure JS implementation of Bilinear Warping
34
35
  */
35
36
  _computeProjection(M: any, inputData: any, prebuilt: any): void;
37
+ /**
38
+ * Refines the target data (Living Mind Map) using actual camera feedback
39
+ * @param {number} targetIndex
40
+ * @param {number} octaveIndex
41
+ * @param {number} alpha - Blending factor (e.g. 0.1 for 10% new data)
42
+ */
43
+ applyLiveFeedback(targetIndex: number, octaveIndex: number, alpha: number): void;
36
44
  }
@@ -87,16 +87,7 @@ class Tracker {
87
87
  });
88
88
  }
89
89
  }
90
- if (this.debugMode) {
91
- debugExtra = {
92
- octaveIndex,
93
- projectedImage: Array.from(projectedImage),
94
- matchingPoints,
95
- goodTrack,
96
- trackedPoints: screenCoords,
97
- };
98
- }
99
- return { worldCoords, screenCoords, debugExtra };
90
+ return { worldCoords, screenCoords, octaveIndex, debugExtra };
100
91
  }
101
92
  /**
102
93
  * Pure JS implementation of NCC matching
@@ -232,5 +223,33 @@ class Tracker {
232
223
  }
233
224
  }
234
225
  }
226
+ /**
227
+ * Refines the target data (Living Mind Map) using actual camera feedback
228
+ * @param {number} targetIndex
229
+ * @param {number} octaveIndex
230
+ * @param {number} alpha - Blending factor (e.g. 0.1 for 10% new data)
231
+ */
232
+ applyLiveFeedback(targetIndex, octaveIndex, alpha) {
233
+ if (targetIndex === undefined || octaveIndex === undefined)
234
+ return;
235
+ const targetPrebuilts = this.prebuiltData[targetIndex];
236
+ if (!targetPrebuilts)
237
+ return;
238
+ const prebuilt = targetPrebuilts[octaveIndex];
239
+ if (!prebuilt || !prebuilt.projectedImage || !prebuilt.data)
240
+ return;
241
+ const markerPixels = prebuilt.data;
242
+ const projectedPixels = prebuilt.projectedImage;
243
+ const count = markerPixels.length;
244
+ // Blend the projected (camera-sourced) pixels into the marker reference data
245
+ // This allows the NCC matching to adapt to real-world lighting and print quality
246
+ for (let i = 0; i < count; i++) {
247
+ const val = projectedPixels[i];
248
+ if (isNaN(val))
249
+ continue; // Don't pollute with NaN
250
+ // Simple linear blend
251
+ markerPixels[i] = (1 - alpha) * markerPixels[i] + alpha * val;
252
+ }
253
+ }
235
254
  }
236
255
  export { Tracker };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@srsergio/taptapp-ar",
3
- "version": "1.0.36",
3
+ "version": "1.0.38",
4
4
  "description": "AR Compiler for Node.js and Browser",
5
5
  "repository": {
6
6
  "type": "git",
@@ -226,17 +226,21 @@ class Controller {
226
226
  return { targetIndex: matchedTargetIndex, modelViewTransform };
227
227
  }
228
228
  async _trackAndUpdate(inputData, lastModelViewTransform, targetIndex) {
229
- const { worldCoords, screenCoords } = this.tracker.track(
229
+ const result = this.tracker.track(
230
230
  inputData,
231
231
  lastModelViewTransform,
232
232
  targetIndex,
233
233
  );
234
- if (worldCoords.length < 6) return null; // Umbral de puntos mínimos para mantener el seguimiento
234
+ if (result.worldCoords.length < 6) return null; // Umbral de puntos mínimos para mantener el seguimiento
235
235
  const modelViewTransform = await this._workerTrackUpdate(lastModelViewTransform, {
236
- worldCoords,
237
- screenCoords,
236
+ worldCoords: result.worldCoords,
237
+ screenCoords: result.screenCoords,
238
238
  });
239
- return modelViewTransform;
239
+ return {
240
+ modelViewTransform,
241
+ inliers: result.worldCoords.length,
242
+ octaveIndex: result.octaveIndex
243
+ };
240
244
  }
241
245
 
242
246
  processVideo(input) {
@@ -252,6 +256,7 @@ class Controller {
252
256
  currentModelViewTransform: null,
253
257
  trackCount: 0,
254
258
  trackMiss: 0,
259
+ stabilityCount: 0, // Nuevo: Contador para Live Adaptation
255
260
  filter: new OneEuroFilter({ minCutOff: this.filterMinCF, beta: this.filterBeta }),
256
261
  });
257
262
  }
@@ -267,11 +272,14 @@ class Controller {
267
272
  }, 0);
268
273
 
269
274
  // detect and match only if less then maxTrack
275
+ // BUG FIX: Only match if we are NOT in a "ghosting" period for a target
276
+ // to prevent the "found but immediately lost" loop that keeps opacity at 1.
270
277
  if (nTracking < this.maxTrack) {
271
278
  const matchingIndexes = [];
272
279
  for (let i = 0; i < this.trackingStates.length; i++) {
273
280
  const trackingState = this.trackingStates[i];
274
281
  if (trackingState.isTracking === true) continue;
282
+ if (trackingState.showing === true) continue; // Don't try to re-detect if we are still buffers-showing the last position
275
283
  if (this.interestedTargetIndex !== -1 && this.interestedTargetIndex !== i) continue;
276
284
 
277
285
  matchingIndexes.push(i);
@@ -280,7 +288,7 @@ class Controller {
280
288
  const { targetIndex: matchedTargetIndex, modelViewTransform } =
281
289
  await this._detectAndMatch(inputData, matchingIndexes);
282
290
 
283
- if (matchedTargetIndex !== -1) {
291
+ if (matchedTargetIndex !== -1 && modelViewTransform) {
284
292
  this.trackingStates[matchedTargetIndex].isTracking = true;
285
293
  this.trackingStates[matchedTargetIndex].currentModelViewTransform = modelViewTransform;
286
294
  }
@@ -291,15 +299,31 @@ class Controller {
291
299
  const trackingState = this.trackingStates[i];
292
300
 
293
301
  if (trackingState.isTracking) {
294
- let modelViewTransform = await this._trackAndUpdate(
302
+ let result = await this._trackAndUpdate(
295
303
  inputData,
296
304
  trackingState.currentModelViewTransform,
297
305
  i,
298
306
  );
299
- if (modelViewTransform === null) {
307
+ if (result === null) {
300
308
  trackingState.isTracking = false;
309
+ trackingState.stabilityCount = 0;
301
310
  } else {
302
- trackingState.currentModelViewTransform = modelViewTransform;
311
+ trackingState.currentModelViewTransform = result.modelViewTransform;
312
+
313
+ // --- LIVE MODEL ADAPTATION LOGIC ---
314
+ // Si el tracking es muy sólido (muchos inliers) y estable, refinamos el modelo
315
+ // Requisito: > 35 inliers (muy exigente) para evitar polución por ruido
316
+ if (result.inliers > 35) {
317
+ trackingState.stabilityCount++;
318
+ if (trackingState.stabilityCount > 30) { // 30 frames (~1s) de estabilidad absoluta
319
+ this.tracker.applyLiveFeedback(i, result.octaveIndex, 0.05); // Menor alpha (5%) para ser más conservador
320
+ if (this.debugMode) console.log(`✨ Live Reification: Target ${i} (Octave ${result.octaveIndex}) updated.`);
321
+ trackingState.stabilityCount = 0;
322
+ }
323
+ } else {
324
+ trackingState.stabilityCount = Math.max(0, trackingState.stabilityCount - 1);
325
+ }
326
+ // -----------------------------------
303
327
  }
304
328
  }
305
329
 
@@ -334,7 +358,7 @@ class Controller {
334
358
  }
335
359
 
336
360
  // if showing, then call onUpdate, with world matrix
337
- if (trackingState.showing) {
361
+ if (trackingState.showing && trackingState.currentModelViewTransform) {
338
362
  const worldMatrix = this._glModelViewMatrix(trackingState.currentModelViewTransform, i);
339
363
  trackingState.trackingMatrix = trackingState.filter.filter(Date.now(), worldMatrix);
340
364
 
@@ -501,6 +525,7 @@ class Controller {
501
525
  }
502
526
 
503
527
  _glModelViewMatrix(modelViewTransform, targetIndex) {
528
+ if (!modelViewTransform) return null;
504
529
  const height = this.markerDimensions[targetIndex][1];
505
530
 
506
531
  const openGLWorldMatrix = [
@@ -127,17 +127,7 @@ class Tracker {
127
127
  }
128
128
  }
129
129
 
130
- if (this.debugMode) {
131
- debugExtra = {
132
- octaveIndex,
133
- projectedImage: Array.from(projectedImage),
134
- matchingPoints,
135
- goodTrack,
136
- trackedPoints: screenCoords,
137
- };
138
- }
139
-
140
- return { worldCoords, screenCoords, debugExtra };
130
+ return { worldCoords, screenCoords, octaveIndex, debugExtra };
141
131
  }
142
132
 
143
133
  /**
@@ -300,6 +290,34 @@ class Tracker {
300
290
  }
301
291
  }
302
292
  }
293
+
294
+ /**
295
+ * Refines the target data (Living Mind Map) using actual camera feedback
296
+ * @param {number} targetIndex
297
+ * @param {number} octaveIndex
298
+ * @param {number} alpha - Blending factor (e.g. 0.1 for 10% new data)
299
+ */
300
+ applyLiveFeedback(targetIndex, octaveIndex, alpha) {
301
+ if (targetIndex === undefined || octaveIndex === undefined) return;
302
+ const targetPrebuilts = this.prebuiltData[targetIndex];
303
+ if (!targetPrebuilts) return;
304
+
305
+ const prebuilt = targetPrebuilts[octaveIndex];
306
+ if (!prebuilt || !prebuilt.projectedImage || !prebuilt.data) return;
307
+
308
+ const markerPixels = prebuilt.data;
309
+ const projectedPixels = prebuilt.projectedImage;
310
+ const count = markerPixels.length;
311
+
312
+ // Blend the projected (camera-sourced) pixels into the marker reference data
313
+ // This allows the NCC matching to adapt to real-world lighting and print quality
314
+ for (let i = 0; i < count; i++) {
315
+ const val = projectedPixels[i];
316
+ if (isNaN(val)) continue; // Don't pollute with NaN
317
+ // Simple linear blend
318
+ markerPixels[i] = (1 - alpha) * markerPixels[i] + alpha * val;
319
+ }
320
+ }
303
321
  }
304
322
 
305
323
  export { Tracker };