@srsergio/taptapp-ar 1.0.62 → 1.0.64

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,7 +8,7 @@ class CropDetector {
8
8
  let minDimension = Math.min(width, height) / 2;
9
9
  let cropSize = Math.pow(2, Math.round(Math.log(minDimension) / Math.log(2)));
10
10
  this.cropSize = cropSize;
11
- this.detector = new DetectorLite(cropSize, cropSize, { useLSH: true, maxOctaves: 1 });
11
+ this.detector = new DetectorLite(cropSize, cropSize, { useLSH: true });
12
12
  this.lastRandomIndex = 4;
13
13
  }
14
14
  detect(input) {
@@ -9,7 +9,6 @@ export class DetectorLite {
9
9
  useGPU: any;
10
10
  useLSH: any;
11
11
  numOctaves: number;
12
- maxFeaturesPerBucket: any;
13
12
  /**
14
13
  * Detecta características en una imagen en escala de grises
15
14
  * @param {Float32Array|Uint8Array} imageData - Datos de imagen (width * height)
@@ -15,6 +15,7 @@ import { computeLSH64, computeFullFREAK, packLSHIntoDescriptor } from "../utils/
15
15
  const PYRAMID_MIN_SIZE = 4; // Restored to 4 for better small-scale detection
16
16
  // PYRAMID_MAX_OCTAVE ya no es necesario, el límite lo da PYRAMID_MIN_SIZE
17
17
  const NUM_BUCKETS_PER_DIMENSION = 10;
18
+ const MAX_FEATURES_PER_BUCKET = 30; // Maximized to ensure robustness in Moonshot mode
18
19
  const ORIENTATION_NUM_BINS = 36;
19
20
  const FREAK_EXPANSION_FACTOR = 7.0;
20
21
  // Global GPU mode flag
@@ -47,9 +48,6 @@ export class DetectorLite {
47
48
  break;
48
49
  }
49
50
  this.numOctaves = options.maxOctaves !== undefined ? Math.min(numOctaves, options.maxOctaves) : numOctaves;
50
- // 🚀 SMART BITRATE (VBR): Internal logic to decide feature density based on scale
51
- const scale = options.scale !== undefined ? options.scale : 1.0;
52
- this.maxFeaturesPerBucket = options.maxFeaturesPerBucket || Math.max(4, Math.floor(12 * Math.sqrt(scale)));
53
51
  }
54
52
  /**
55
53
  * Detecta características en una imagen en escala de grises
@@ -307,7 +305,7 @@ export class DetectorLite {
307
305
  */
308
306
  _applyPrune(extremas) {
309
307
  const nBuckets = NUM_BUCKETS_PER_DIMENSION;
310
- const nFeatures = this.maxFeaturesPerBucket;
308
+ const nFeatures = MAX_FEATURES_PER_BUCKET;
311
309
  // Agrupar por buckets
312
310
  const buckets = [];
313
311
  for (let i = 0; i < nBuckets * nBuckets; i++) {
@@ -4,7 +4,7 @@ import { resize } from "./utils/images.js";
4
4
  * Un valor más bajo permite detectar imágenes más pequeñas pero aumenta el tiempo de procesamiento
5
5
  * @constant {number}
6
6
  */
7
- const MIN_IMAGE_PIXEL_SIZE = 40; // Increased to 40 to skip extremely small, noisy layers and reduce size
7
+ const MIN_IMAGE_PIXEL_SIZE = 32;
8
8
  /**
9
9
  * Construye una lista de imágenes con diferentes escalas para detección de características
10
10
  * @param {Object} inputImage - Imagen de entrada con propiedades width, height y data
@@ -16,8 +16,9 @@ const buildImageList = (inputImage) => {
16
16
  let c = minScale;
17
17
  while (true) {
18
18
  scaleList.push(c);
19
- // Optimization: More aggressive step (pow(2, 0.75) approx 1.68) for smaller exports
20
- c *= Math.pow(2.0, 0.75);
19
+ // Optimization: Paso balanceado (aprox 1.5)
20
+ // Mejor cobertura que 2.0, pero mucho más ligero que 1.41 o 1.26
21
+ c *= Math.pow(2.0, 0.6);
21
22
  if (c >= 0.95) {
22
23
  c = 1;
23
24
  break;
@@ -64,19 +64,40 @@ parentPort.on('message', async (msg) => {
64
64
  else if (msg.type === 'match') {
65
65
  const { targetImage, percentPerImage, basePercent } = msg;
66
66
  try {
67
- const { buildImageList } = await import('./image-list.js');
68
- const imageList = buildImageList(targetImage);
69
- const percentPerScale = percentPerImage / imageList.length;
70
- const keyframes = [];
71
- for (let i = 0; i < imageList.length; i++) {
72
- const image = imageList[i];
73
- // 🚀 SMART BITRATE (VBR): Now handled internally by DetectorLite via 'scale'
74
- const detector = new DetectorLite(image.width, image.height, {
75
- useLSH: true,
76
- maxOctaves: 1,
77
- scale: image.scale
67
+ // 🚀 MOONSHOT: Only run detector ONCE on full-res image.
68
+ // DetectorLite internally builds a pyramid (octaves 1.0, 0.5, 0.25, etc.)
69
+ const detector = new DetectorLite(targetImage.width, targetImage.height, {
70
+ useLSH: true
71
+ });
72
+ parentPort.postMessage({ type: 'progress', percent: basePercent + percentPerImage * 0.1 });
73
+ const { featurePoints: allPoints } = detector.detect(targetImage.data);
74
+ parentPort.postMessage({ type: 'progress', percent: basePercent + percentPerImage * 0.5 });
75
+ // Group points by their scale (octave)
76
+ const scalesMap = new Map();
77
+ for (const p of allPoints) {
78
+ const octaveScale = p.scale;
79
+ let list = scalesMap.get(octaveScale);
80
+ if (!list) {
81
+ list = [];
82
+ scalesMap.set(octaveScale, list);
83
+ }
84
+ // Coordinates in p are already full-res.
85
+ // We need them relative to the scaled image for the keyframe.
86
+ list.push({
87
+ ...p,
88
+ x: p.x / octaveScale,
89
+ y: p.y / octaveScale,
90
+ scale: 1.0 // Keypoint scale is always 1.0 relative to its own keyframe image
78
91
  });
79
- const { featurePoints: ps } = detector.detect(image.data);
92
+ }
93
+ // Optional: Run another detector pass at an intermediate scale to improve coverage
94
+ // (e.g. at 1/1.41 ratio) if tracking robustness suffers.
95
+ // For now, let's stick to octaves for MAXIMUM speed.
96
+ const keyframes = [];
97
+ const sortedScales = Array.from(scalesMap.keys()).sort((a, b) => a - b);
98
+ const percentPerScale = (percentPerImage * 0.4) / sortedScales.length;
99
+ for (const s of sortedScales) {
100
+ const ps = scalesMap.get(s);
80
101
  const sortedPs = sortPoints(ps);
81
102
  const maximaPoints = sortedPs.filter((p) => p.maxima);
82
103
  const minimaPoints = sortedPs.filter((p) => !p.maxima);
@@ -87,13 +108,13 @@ parentPort.on('message', async (msg) => {
87
108
  minimaPoints,
88
109
  maximaPointsCluster,
89
110
  minimaPointsCluster,
90
- width: image.width,
91
- height: image.height,
92
- scale: image.scale,
111
+ width: Math.round(targetImage.width / s),
112
+ height: Math.round(targetImage.height / s),
113
+ scale: 1.0 / s, // keyframe.scale is relative to full target image
93
114
  });
94
115
  parentPort.postMessage({
95
116
  type: 'progress',
96
- percent: basePercent + (i + 1) * percentPerScale
117
+ percent: basePercent + percentPerImage * 0.6 + keyframes.length * percentPerScale
97
118
  });
98
119
  }
99
120
  parentPort.postMessage({
@@ -12,10 +12,7 @@ export declare class OfflineCompiler {
12
12
  constructor();
13
13
  _initNodeWorkers(): Promise<void>;
14
14
  compileImageTargets(images: any[], progressCallback: (p: number) => void): Promise<any>;
15
- _compileTarget(targetImages: any[], progressCallback: (p: number) => void): Promise<{
16
- matchingData: any;
17
- trackingData: any;
18
- }[]>;
15
+ _compileTarget(targetImages: any[], progressCallback: (p: number) => void): Promise<any[]>;
19
16
  _compileMatch(targetImages: any[], progressCallback: (p: number) => void): Promise<any[]>;
20
17
  _compileTrack(targetImages: any[], progressCallback: (p: number) => void): Promise<any[]>;
21
18
  compileTrack({ progressCallback, targetImages, basePercent }: {
@@ -88,8 +88,22 @@ export class OfflineCompiler {
88
88
  async _compileTarget(targetImages, progressCallback) {
89
89
  if (isNode)
90
90
  await this._initNodeWorkers();
91
- // Reverted: 'compile-all' combined task was causing issues with pyramid processing
92
- // We go back to sequential match and track for reliability
91
+ if (this.workerPool) {
92
+ const progressMap = new Float32Array(targetImages.length);
93
+ const wrappedPromises = targetImages.map((targetImage, index) => {
94
+ return this.workerPool.runTask({
95
+ type: 'compile-all', // 🚀 MOONSHOT: Combined task
96
+ targetImage,
97
+ onProgress: (p) => {
98
+ progressMap[index] = p;
99
+ const sum = progressMap.reduce((a, b) => a + b, 0);
100
+ progressCallback(sum / targetImages.length);
101
+ }
102
+ });
103
+ });
104
+ return Promise.all(wrappedPromises);
105
+ }
106
+ // Fallback or non-worker implementation: run match and track sequentially
93
107
  const matchingResults = await this._compileMatch(targetImages, (p) => progressCallback(p * 0.5));
94
108
  const trackingResults = await this._compileTrack(targetImages, (p) => progressCallback(50 + p * 0.5));
95
109
  return targetImages.map((_, i) => ({
@@ -126,12 +140,7 @@ export class OfflineCompiler {
126
140
  const percentPerImageScale = percentPerImage / imageList.length;
127
141
  const keyframes = [];
128
142
  for (const image of imageList) {
129
- // 🚀 SMART BITRATE (VBR): Internalized in DetectorLite
130
- const detector = new DetectorLite(image.width, image.height, {
131
- useLSH: true,
132
- maxOctaves: 1,
133
- scale: image.scale
134
- });
143
+ const detector = new DetectorLite(image.width, image.height, { useLSH: true });
135
144
  const { featurePoints: ps } = detector.detect(image.data);
136
145
  const maximaPoints = ps.filter((p) => p.maxima);
137
146
  const minimaPoints = ps.filter((p) => !p.maxima);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@srsergio/taptapp-ar",
3
- "version": "1.0.62",
3
+ "version": "1.0.64",
4
4
  "author": "Sergio Lazaro <srsergiolazaro@gmail.com>",
5
5
  "license": "GPL-3.0",
6
6
  "description": "Ultra-fast, lightweight Augmented Reality Image Tracking SDK for the web. Features an optimized offline compiler, React components, and compatibility with Three.js/A-Frame. No heavy ML frameworks required.",
@@ -11,7 +11,7 @@ class CropDetector {
11
11
  let cropSize = Math.pow(2, Math.round(Math.log(minDimension) / Math.log(2)));
12
12
  this.cropSize = cropSize;
13
13
 
14
- this.detector = new DetectorLite(cropSize, cropSize, { useLSH: true, maxOctaves: 1 });
14
+ this.detector = new DetectorLite(cropSize, cropSize, { useLSH: true });
15
15
 
16
16
  this.lastRandomIndex = 4;
17
17
  }
@@ -19,6 +19,7 @@ const PYRAMID_MIN_SIZE = 4; // Restored to 4 for better small-scale detection
19
19
 
20
20
 
21
21
  const NUM_BUCKETS_PER_DIMENSION = 10;
22
+ const MAX_FEATURES_PER_BUCKET = 30; // Maximized to ensure robustness in Moonshot mode
22
23
 
23
24
 
24
25
  const ORIENTATION_NUM_BINS = 36;
@@ -57,10 +58,6 @@ export class DetectorLite {
57
58
  }
58
59
 
59
60
  this.numOctaves = options.maxOctaves !== undefined ? Math.min(numOctaves, options.maxOctaves) : numOctaves;
60
-
61
- // 🚀 SMART BITRATE (VBR): Internal logic to decide feature density based on scale
62
- const scale = options.scale !== undefined ? options.scale : 1.0;
63
- this.maxFeaturesPerBucket = options.maxFeaturesPerBucket || Math.max(4, Math.floor(12 * Math.sqrt(scale)));
64
61
  }
65
62
 
66
63
  /**
@@ -353,7 +350,7 @@ export class DetectorLite {
353
350
  */
354
351
  _applyPrune(extremas) {
355
352
  const nBuckets = NUM_BUCKETS_PER_DIMENSION;
356
- const nFeatures = this.maxFeaturesPerBucket;
353
+ const nFeatures = MAX_FEATURES_PER_BUCKET;
357
354
 
358
355
  // Agrupar por buckets
359
356
  const buckets = [];
@@ -5,7 +5,7 @@ import { resize } from "./utils/images.js";
5
5
  * Un valor más bajo permite detectar imágenes más pequeñas pero aumenta el tiempo de procesamiento
6
6
  * @constant {number}
7
7
  */
8
- const MIN_IMAGE_PIXEL_SIZE = 40; // Increased to 40 to skip extremely small, noisy layers and reduce size
8
+ const MIN_IMAGE_PIXEL_SIZE = 32;
9
9
 
10
10
 
11
11
 
@@ -21,8 +21,9 @@ const buildImageList = (inputImage) => {
21
21
  let c = minScale;
22
22
  while (true) {
23
23
  scaleList.push(c);
24
- // Optimization: More aggressive step (pow(2, 0.75) approx 1.68) for smaller exports
25
- c *= Math.pow(2.0, 0.75);
24
+ // Optimization: Paso balanceado (aprox 1.5)
25
+ // Mejor cobertura que 2.0, pero mucho más ligero que 1.41 o 1.26
26
+ c *= Math.pow(2.0, 0.6);
26
27
  if (c >= 0.95) {
27
28
  c = 1;
28
29
  break;
@@ -71,22 +71,49 @@ parentPort.on('message', async (msg) => {
71
71
  const { targetImage, percentPerImage, basePercent } = msg;
72
72
 
73
73
  try {
74
- const { buildImageList } = await import('./image-list.js');
75
- const imageList = buildImageList(targetImage);
76
- const percentPerScale = percentPerImage / imageList.length;
77
- const keyframes = [];
74
+ // 🚀 MOONSHOT: Only run detector ONCE on full-res image.
75
+ // DetectorLite internally builds a pyramid (octaves 1.0, 0.5, 0.25, etc.)
76
+ const detector = new DetectorLite(targetImage.width, targetImage.height, {
77
+ useLSH: true
78
+ });
79
+
80
+ parentPort.postMessage({ type: 'progress', percent: basePercent + percentPerImage * 0.1 });
81
+
82
+ const { featurePoints: allPoints } = detector.detect(targetImage.data);
83
+
84
+ parentPort.postMessage({ type: 'progress', percent: basePercent + percentPerImage * 0.5 });
78
85
 
79
- for (let i = 0; i < imageList.length; i++) {
80
- const image = imageList[i];
86
+ // Group points by their scale (octave)
87
+ const scalesMap = new Map();
88
+ for (const p of allPoints) {
89
+ const octaveScale = p.scale;
90
+ let list = scalesMap.get(octaveScale);
91
+ if (!list) {
92
+ list = [];
93
+ scalesMap.set(octaveScale, list);
94
+ }
81
95
 
82
- // 🚀 SMART BITRATE (VBR): Now handled internally by DetectorLite via 'scale'
83
- const detector = new DetectorLite(image.width, image.height, {
84
- useLSH: true,
85
- maxOctaves: 1,
86
- scale: image.scale
96
+ // Coordinates in p are already full-res.
97
+ // We need them relative to the scaled image for the keyframe.
98
+ list.push({
99
+ ...p,
100
+ x: p.x / octaveScale,
101
+ y: p.y / octaveScale,
102
+ scale: 1.0 // Keypoint scale is always 1.0 relative to its own keyframe image
87
103
  });
88
- const { featurePoints: ps } = detector.detect(image.data);
104
+ }
89
105
 
106
+ // Optional: Run another detector pass at an intermediate scale to improve coverage
107
+ // (e.g. at 1/1.41 ratio) if tracking robustness suffers.
108
+ // For now, let's stick to octaves for MAXIMUM speed.
109
+
110
+ const keyframes = [];
111
+ const sortedScales = Array.from(scalesMap.keys()).sort((a, b) => a - b);
112
+
113
+ const percentPerScale = (percentPerImage * 0.4) / sortedScales.length;
114
+
115
+ for (const s of sortedScales) {
116
+ const ps = scalesMap.get(s);
90
117
  const sortedPs = sortPoints(ps);
91
118
  const maximaPoints = sortedPs.filter((p) => p.maxima);
92
119
  const minimaPoints = sortedPs.filter((p) => !p.maxima);
@@ -99,14 +126,14 @@ parentPort.on('message', async (msg) => {
99
126
  minimaPoints,
100
127
  maximaPointsCluster,
101
128
  minimaPointsCluster,
102
- width: image.width,
103
- height: image.height,
104
- scale: image.scale,
129
+ width: Math.round(targetImage.width / s),
130
+ height: Math.round(targetImage.height / s),
131
+ scale: 1.0 / s, // keyframe.scale is relative to full target image
105
132
  });
106
133
 
107
134
  parentPort.postMessage({
108
135
  type: 'progress',
109
- percent: basePercent + (i + 1) * percentPerScale
136
+ percent: basePercent + percentPerImage * 0.6 + keyframes.length * percentPerScale
110
137
  });
111
138
  }
112
139
 
@@ -110,8 +110,23 @@ export class OfflineCompiler {
110
110
  async _compileTarget(targetImages: any[], progressCallback: (p: number) => void) {
111
111
  if (isNode) await this._initNodeWorkers();
112
112
 
113
- // Reverted: 'compile-all' combined task was causing issues with pyramid processing
114
- // We go back to sequential match and track for reliability
113
+ if (this.workerPool) {
114
+ const progressMap = new Float32Array(targetImages.length);
115
+ const wrappedPromises = targetImages.map((targetImage: any, index: number) => {
116
+ return this.workerPool!.runTask({
117
+ type: 'compile-all', // 🚀 MOONSHOT: Combined task
118
+ targetImage,
119
+ onProgress: (p: number) => {
120
+ progressMap[index] = p;
121
+ const sum = progressMap.reduce((a, b) => a + b, 0);
122
+ progressCallback(sum / targetImages.length);
123
+ }
124
+ });
125
+ });
126
+ return Promise.all(wrappedPromises);
127
+ }
128
+
129
+ // Fallback or non-worker implementation: run match and track sequentially
115
130
  const matchingResults = await this._compileMatch(targetImages, (p) => progressCallback(p * 0.5));
116
131
  const trackingResults = await this._compileTrack(targetImages, (p) => progressCallback(50 + p * 0.5));
117
132
 
@@ -155,12 +170,7 @@ export class OfflineCompiler {
155
170
  const keyframes = [];
156
171
 
157
172
  for (const image of imageList as any[]) {
158
- // 🚀 SMART BITRATE (VBR): Internalized in DetectorLite
159
- const detector = new DetectorLite(image.width, image.height, {
160
- useLSH: true,
161
- maxOctaves: 1,
162
- scale: image.scale
163
- });
173
+ const detector = new DetectorLite(image.width, image.height, { useLSH: true });
164
174
  const { featurePoints: ps } = detector.detect(image.data);
165
175
 
166
176
  const maximaPoints = ps.filter((p: any) => p.maxima);