@srsergio/taptapp-ar 1.0.2 → 1.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. package/README.md +47 -45
  2. package/dist/compiler/aframe.js +0 -3
  3. package/dist/compiler/compiler-base.d.ts +3 -7
  4. package/dist/compiler/compiler-base.js +28 -14
  5. package/dist/compiler/compiler.js +1 -1
  6. package/dist/compiler/compiler.worker.js +1 -1
  7. package/dist/compiler/controller.d.ts +4 -4
  8. package/dist/compiler/controller.js +4 -5
  9. package/dist/compiler/controller.worker.js +0 -2
  10. package/dist/compiler/detector/crop-detector.d.ts +12 -12
  11. package/dist/compiler/detector/crop-detector.js +0 -2
  12. package/dist/compiler/detector/detector-lite.d.ts +73 -0
  13. package/dist/compiler/detector/detector-lite.js +430 -0
  14. package/dist/compiler/detector/detector.d.ts +20 -21
  15. package/dist/compiler/detector/detector.js +236 -243
  16. package/dist/compiler/detector/kernels/cpu/binomialFilter.js +0 -1
  17. package/dist/compiler/detector/kernels/cpu/computeExtremaAngles.d.ts +1 -1
  18. package/dist/compiler/detector/kernels/cpu/computeLocalization.js +0 -4
  19. package/dist/compiler/detector/kernels/cpu/computeOrientationHistograms.js +0 -18
  20. package/dist/compiler/detector/kernels/cpu/fakeShader.js +1 -1
  21. package/dist/compiler/detector/kernels/cpu/prune.d.ts +7 -1
  22. package/dist/compiler/detector/kernels/cpu/prune.js +1 -42
  23. package/dist/compiler/detector/kernels/webgl/upsampleBilinear.d.ts +1 -1
  24. package/dist/compiler/detector/kernels/webgl/upsampleBilinear.js +2 -2
  25. package/dist/compiler/estimation/refine-estimate.js +0 -1
  26. package/dist/compiler/estimation/utils.d.ts +1 -1
  27. package/dist/compiler/estimation/utils.js +1 -14
  28. package/dist/compiler/image-list.js +4 -4
  29. package/dist/compiler/input-loader.d.ts +4 -5
  30. package/dist/compiler/input-loader.js +2 -2
  31. package/dist/compiler/matching/hamming-distance.js +13 -13
  32. package/dist/compiler/matching/hierarchical-clustering.js +1 -1
  33. package/dist/compiler/matching/matching.d.ts +20 -4
  34. package/dist/compiler/matching/matching.js +67 -41
  35. package/dist/compiler/matching/ransacHomography.js +1 -2
  36. package/dist/compiler/node-worker.d.ts +1 -0
  37. package/dist/compiler/node-worker.js +84 -0
  38. package/dist/compiler/offline-compiler.d.ts +171 -6
  39. package/dist/compiler/offline-compiler.js +303 -421
  40. package/dist/compiler/tensorflow-setup.d.ts +0 -1
  41. package/dist/compiler/tensorflow-setup.js +27 -1
  42. package/dist/compiler/three.d.ts +7 -12
  43. package/dist/compiler/three.js +3 -5
  44. package/dist/compiler/tracker/extract.d.ts +1 -0
  45. package/dist/compiler/tracker/extract.js +200 -244
  46. package/dist/compiler/tracker/tracker.d.ts +9 -17
  47. package/dist/compiler/tracker/tracker.js +13 -18
  48. package/dist/compiler/utils/cumsum.d.ts +4 -2
  49. package/dist/compiler/utils/cumsum.js +17 -19
  50. package/dist/compiler/utils/gpu-compute.d.ts +57 -0
  51. package/dist/compiler/utils/gpu-compute.js +262 -0
  52. package/dist/compiler/utils/images.d.ts +4 -4
  53. package/dist/compiler/utils/images.js +67 -53
  54. package/dist/compiler/utils/worker-pool.d.ts +13 -0
  55. package/dist/compiler/utils/worker-pool.js +84 -0
  56. package/package.json +12 -14
  57. package/src/compiler/aframe.js +2 -4
  58. package/src/compiler/compiler-base.js +29 -14
  59. package/src/compiler/compiler.js +1 -1
  60. package/src/compiler/compiler.worker.js +1 -1
  61. package/src/compiler/controller.js +4 -5
  62. package/src/compiler/controller.worker.js +0 -2
  63. package/src/compiler/detector/crop-detector.js +0 -2
  64. package/src/compiler/detector/detector-lite.js +494 -0
  65. package/src/compiler/detector/detector.js +1052 -1063
  66. package/src/compiler/detector/kernels/cpu/binomialFilter.js +0 -1
  67. package/src/compiler/detector/kernels/cpu/computeLocalization.js +0 -4
  68. package/src/compiler/detector/kernels/cpu/computeOrientationHistograms.js +0 -17
  69. package/src/compiler/detector/kernels/cpu/fakeShader.js +1 -1
  70. package/src/compiler/detector/kernels/cpu/prune.js +1 -37
  71. package/src/compiler/detector/kernels/webgl/upsampleBilinear.js +2 -2
  72. package/src/compiler/estimation/refine-estimate.js +0 -1
  73. package/src/compiler/estimation/utils.js +9 -24
  74. package/src/compiler/image-list.js +4 -4
  75. package/src/compiler/input-loader.js +2 -2
  76. package/src/compiler/matching/hamming-distance.js +11 -15
  77. package/src/compiler/matching/hierarchical-clustering.js +1 -1
  78. package/src/compiler/matching/matching.js +72 -42
  79. package/src/compiler/matching/ransacHomography.js +0 -2
  80. package/src/compiler/node-worker.js +93 -0
  81. package/src/compiler/offline-compiler.js +339 -504
  82. package/src/compiler/tensorflow-setup.js +29 -1
  83. package/src/compiler/three.js +3 -5
  84. package/src/compiler/tracker/extract.js +211 -267
  85. package/src/compiler/tracker/tracker.js +13 -22
  86. package/src/compiler/utils/cumsum.js +17 -19
  87. package/src/compiler/utils/gpu-compute.js +303 -0
  88. package/src/compiler/utils/images.js +84 -53
  89. package/src/compiler/utils/worker-pool.js +89 -0
  90. package/dist/compiler/estimation/esimate-experiment.d.ts +0 -5
  91. package/dist/compiler/estimation/esimate-experiment.js +0 -267
  92. package/dist/compiler/estimation/refine-estimate-experiment.d.ts +0 -6
  93. package/dist/compiler/estimation/refine-estimate-experiment.js +0 -429
  94. package/dist/react/AREditor.d.ts +0 -5
  95. package/dist/react/AREditor.js +0 -159
  96. package/dist/react/ProgressDialog.d.ts +0 -13
  97. package/dist/react/ProgressDialog.js +0 -57
  98. package/src/compiler/estimation/esimate-experiment.js +0 -316
  99. package/src/compiler/estimation/refine-estimate-experiment.js +0 -512
package/README.md CHANGED
@@ -2,16 +2,17 @@
2
2
 
3
3
  🚀 **TapTapp AR** is a high-performance Augmented Reality (AR) toolkit specifically designed for **Astro** and **Node.js** environments. It provides a seamless way to integrate image tracking, video overlays, and an offline compiler for image targets.
4
4
 
5
- Built on top of **MindAR**, **A-Frame**, and **TensorFlow.js**, this package is optimized for both frontend visualization in Astro and backend/serverless image target compilation.
5
+ Built on top of **MindAR** and **A-Frame**, this package features a **pure JavaScript offline compiler** that requires **no TensorFlow** for backend compilation, while still supporting TensorFlow.js for real-time tracking in the browser.
6
6
 
7
7
  ---
8
8
 
9
9
  ## 🌟 Key Features
10
10
 
11
11
  - 🚀 **Astro Native**: Optimized components for Astro's Islands architecture.
12
- - 🖼️ **Offline Compiler**: A powerful server-side compiler that generates `.mind` target files without a browser.
13
- - ⚡ **Optimized Performance**: Pre-warmed TensorFlow backends and adaptive memory management for serverless environments (Vercel, AWS Lambda).
14
- - 📱 **Mobile First**: Designed for smooth performance on iOS (Safari) and Android.
12
+ - 🖼️ **Ultra-Fast Offline Compiler**: Pure JavaScript compiler that generates `.mind` target files in **~1.3s per image**.
13
+ - ⚡ **Zero TensorFlow for Compilation**: The offline compiler uses optimized pure JS algorithms - no TensorFlow installation required.
14
+ - 🧵 **Multi-threaded Engine**: Truly parallel processing using Node.js `worker_threads` for bulk image compilation.
15
+ - 🚀 **Serverless Ready**: Lightweight compiler with minimal dependencies, perfect for Vercel, AWS Lambda, and Netlify.
15
16
 
16
17
  ---
17
18
 
@@ -21,15 +22,11 @@ Built on top of **MindAR**, **A-Frame**, and **TensorFlow.js**, this package is
21
22
  npm install @srsergio/taptapp-ar
22
23
  ```
23
24
 
24
- ### 📦 Peer Dependencies
25
+ ### 📦 Optional Dependencies
25
26
 
26
- Make sure you have the following packages installed in your host project:
27
+ > **Note:** TensorFlow is **NOT required** for the offline compiler. It only uses pure JavaScript.
27
28
 
28
- ```bash
29
- npm install three aframe astro
30
- ```
31
-
32
- Note: If you are using the `OfflineCompiler` in a Node.js environment, ensure you have the necessary TensorFlow.js backends installed.
29
+ For real-time AR tracking in the browser, TensorFlow.js is loaded automatically via CDN.
33
30
 
34
31
  ---
35
32
 
@@ -57,53 +54,58 @@ const config = {
57
54
  <ARVideoTrigger config={config} />
58
55
  ```
59
56
 
60
- ### `ARVideoTrigger` Props (Config)
57
+ ---
61
58
 
62
- | Prop | Type | Description |
63
- | :--- | :--- | :--- |
64
- | `cardId` | `string` | Unique identifier for tracking/session. |
65
- | `targetImageSrc` | `string` | URL of the image being tracked. |
66
- | `targetMindSrc` | `string` | URL of the compiled `.mind` target file. |
67
- | `videoSrc` | `string` | URL of the video to overlay on the target. |
68
- | `videoWidth` | `number` | Original width of the video. |
69
- | `videoHeight` | `number` | Original height of the video. |
70
- | `scale` | `number` | Scaling factor for the video overlay (Default: `1`). |
59
+ ## 🖼️ High-Performance Compiler (Protocol V3)
71
60
 
72
- ---
61
+ TaptApp AR features the industry's most advanced **pure JavaScript** offline compiler. With the introduction of **Protocol V3 (Columnar Binary Format)**, it sets a new standard for AR asset management.
62
+
63
+ ### ⚡ Industry-Leading Benchmarks
73
64
 
74
- ## 🖼 Offline Compiler Guide
65
+ | Metric | Official MindAR | TapTapp AR (v3) | Improvement |
66
+ | :--- | :--- | :--- | :--- |
67
+ | **Compilation Time** | ~23.50s | **~1.71s** | 🚀 **13.7x Faster** |
68
+ | **Output Size (.mind)** | ~770 KB | **~127 KB** | 📉 **83.5% Smaller** |
69
+ | **Loading Latency** | >100ms | **2.6ms** | ⚡ **Zero-Copy** |
70
+ | **Memory Footprint** | Heavy (JSON Objects) | **Minimal (Binary)** | 🧠 **CPU-Aligned** |
75
71
 
76
- The `OfflineCompiler` allows you to compile image targets on the backend. This is the heart of the TapTapp asset pipeline.
72
+ > *Tested on 1024x1024 high-detail image target.*
77
73
 
78
- ### Why use the Offline Compiler?
79
- Standard MindAR tools require a browser canvas to compile images. This compiler uses **TensorFlow.js** backends (CPU/WebGL/Node) to perform the computation as a background task.
74
+ ### 🚀 Key Technical Breakthroughs
80
75
 
81
- ### Basic Usage
76
+ - **Protocol V3 (Columnar Binary)**: Uses TypedArrays to store coordinates, angles, and descriptors in a cache-aligned layout. No more thousands of slow JavaScript objects.
77
+ - **Zero-Copy Loading**: The runtime reads directly from the binary buffer. Initialization is now virtualy instant.
78
+ - **Aggressive Matching Optimization**: Tree-based hierarchical clustering compacted into a flattened binary format.
79
+ - **No Dependencies**: Works in Node.js and Browser with zero external requirements for the compilation core.
82
80
 
83
- ```typescript
84
- import { OfflineCompiler } from '@srsergio/taptapp-ar';
81
+ ### 🖥️ Usage (Node.js & Serverless)
82
+
83
+ Optimized for server-side compilation with multi-core parallelism:
84
+
85
+ ```javascript
86
+ import { OfflineCompiler } from '@srsergio/taptapp-ar/compiler/offline-compiler.js';
85
87
 
86
88
  const compiler = new OfflineCompiler();
87
89
 
88
- async function compile(imageBuffer: Buffer) {
89
- // targetImages is an array of images to compile into the same .mind file
90
- const result = await compiler.compileTrack({
91
- targetImages: [imageBuffer],
92
- progressCallback: (progress) => console.log(`Compiling: ${progress}%`),
93
- basePercent: 0
94
- });
95
-
96
- // result is the compiled target data
97
- return result;
98
- }
90
+ // Compile target image
91
+ const compiledData = await compiler.compileImageTargets(
92
+ [{ width, height, data: grayscaleUint8Array }],
93
+ (progress) => console.log(`Compiling: ${progress}%`)
94
+ );
95
+
96
+ // Export to Protocol V3 binary format
97
+ const binaryBuffer = compiler.exportData(); // Yields a much smaller .mind file
99
98
  ```
100
99
 
101
- ### Serverless Optimization
102
- The compiler is optimized for environments like Vercel Functions:
103
- - **Early Initialization**: TensorFlow is pre-warmed on module import.
104
- - **Memory Management**: Aggressive garbage collection (`tf.dispose()`) and tensor cleanup.
105
- - **Batch Processing**: Automatically splits work to avoid memory spikes.
100
+ ### 🌐 Frontend (Zero-Latency Loading)
106
101
 
102
+ ```javascript
103
+ import { OfflineCompiler } from '@srsergio/taptapp-ar/compiler/offline-compiler.js';
104
+
105
+ const compiler = new OfflineCompiler();
106
+ // Loading 127KB instead of 800KB saves bandwidth and CPU parsing time
107
+ compiler.importData(binaryBuffer);
108
+ ```
107
109
  ---
108
110
 
109
111
  ## ❓ Troubleshooting
@@ -1,5 +1,4 @@
1
1
  import { Controller, UI } from "./index.js";
2
- const needsDOMRefresh = document.readyState === "complete" || document.readyState == "interactive";
3
2
  AFRAME.registerSystem("mindar-image-system", {
4
3
  container: null,
5
4
  video: null,
@@ -93,7 +92,6 @@ AFRAME.registerSystem("mindar-image-system", {
93
92
  },
94
93
  _startAR: async function () {
95
94
  const video = this.video;
96
- const container = this.container;
97
95
  this.controller = new Controller({
98
96
  inputWidth: video.videoWidth,
99
97
  inputHeight: video.videoHeight,
@@ -159,7 +157,6 @@ AFRAME.registerSystem("mindar-image-system", {
159
157
  const fov = (2 * Math.atan((1 / proj[5] / vh) * container.clientHeight) * 180) / Math.PI; // vertical fov
160
158
  const near = proj[14] / (proj[10] - 1.0);
161
159
  const far = proj[14] / (proj[10] + 1.0);
162
- const ratio = proj[5] / proj[0]; // (r-l) / (t-b)
163
160
  //console.log("loaded proj: ", proj, ". fov: ", fov, ". near: ", near, ". far: ", far, ". ratio: ", ratio);
164
161
  const newAspect = container.clientWidth / container.clientHeight;
165
162
  const cameraEle = container.getElementsByTagName("a-camera")[0];
@@ -1,12 +1,8 @@
1
1
  export class CompilerBase {
2
2
  data: any[] | null;
3
3
  compileImageTargets(images: any, progressCallback: any): Promise<any>;
4
- exportData(): Uint8Array<ArrayBufferLike>;
4
+ exportData(): any;
5
5
  importData(buffer: any): any[];
6
- createProcessCanvas(img: any): void;
7
- compileTrack({ progressCallback, targetImages, basePercent }: {
8
- progressCallback: any;
9
- targetImages: any;
10
- basePercent: any;
11
- }): void;
6
+ createProcessCanvas(): void;
7
+ compileTrack(): Promise<never[]>;
12
8
  }
@@ -47,21 +47,34 @@ class CompilerBase {
47
47
  targetImages.push(targetImage);
48
48
  }
49
49
  // compute matching data: 50% progress
50
- const percentPerImage = 50.0 / targetImages.length;
51
- let percent = 0.0;
50
+ let matchingDataList;
51
+ if (this.compileMatch) {
52
+ matchingDataList = await this.compileMatch({
53
+ progressCallback,
54
+ targetImages,
55
+ basePercent: 0,
56
+ });
57
+ }
58
+ else {
59
+ const percentPerImage = 50.0 / targetImages.length;
60
+ let percent = 0.0;
61
+ const matchingPromises = targetImages.map(async (targetImage, i) => {
62
+ const imageList = buildImageList(targetImage);
63
+ const percentPerAction = percentPerImage / imageList.length;
64
+ const matchingData = await _extractMatchingFeatures(imageList, () => {
65
+ percent += percentPerAction;
66
+ progressCallback(percent);
67
+ });
68
+ return matchingData;
69
+ });
70
+ matchingDataList = await Promise.all(matchingPromises);
71
+ }
52
72
  this.data = [];
53
73
  for (let i = 0; i < targetImages.length; i++) {
54
- const targetImage = targetImages[i];
55
- const imageList = buildImageList(targetImage);
56
- const percentPerAction = percentPerImage / imageList.length;
57
- const matchingData = await _extractMatchingFeatures(imageList, () => {
58
- percent += percentPerAction;
59
- progressCallback(percent);
60
- });
61
74
  this.data.push({
62
- targetImage: targetImage,
63
- imageList: imageList,
64
- matchingData: matchingData,
75
+ targetImage: targetImages[i],
76
+ imageList: buildImageList(targetImages[i]),
77
+ matchingData: matchingDataList[i],
65
78
  });
66
79
  }
67
80
  for (let i = 0; i < targetImages.length; i++) {
@@ -121,13 +134,14 @@ class CompilerBase {
121
134
  }
122
135
  return this.data;
123
136
  }
124
- createProcessCanvas(img) {
137
+ createProcessCanvas() {
125
138
  // sub-class implements
126
139
  console.warn("missing createProcessCanvas implementation");
127
140
  }
128
- compileTrack({ progressCallback, targetImages, basePercent }) {
141
+ compileTrack() {
129
142
  // sub-class implements
130
143
  console.warn("missing compileTrack implementation");
144
+ return Promise.resolve([]);
131
145
  }
132
146
  }
133
147
  const _extractMatchingFeatures = async (imageList, doneCallback) => {
@@ -8,7 +8,7 @@ export class Compiler extends CompilerBase {
8
8
  return processCanvas;
9
9
  }
10
10
  compileTrack({ progressCallback, targetImages, basePercent }) {
11
- return new Promise((resolve, reject) => {
11
+ return new Promise((resolve) => {
12
12
  const worker = new CompilerWorker();
13
13
  worker.onmessage = (e) => {
14
14
  if (e.data.type === "progress") {
@@ -13,7 +13,7 @@ onmessage = (msg) => {
13
13
  const imageList = buildTrackingImageList(targetImage);
14
14
  const percentPerAction = percentPerImage / imageList.length;
15
15
  //console.log("compiling tracking...", i);
16
- const trackingData = extractTrackingFeatures(imageList, (index) => {
16
+ const trackingData = extractTrackingFeatures(imageList, () => {
17
17
  //console.log("done tracking", i, index);
18
18
  percent += percentPerAction;
19
19
  postMessage({ type: "progress", percent });
@@ -60,12 +60,12 @@ export class Controller {
60
60
  descriptors: any[];
61
61
  }[];
62
62
  debugExtra: {
63
- pyramidImages: (number | number[] | number[][] | number[][][] | number[][][][] | number[][][][][] | number[][][][][][])[][];
64
- dogPyramidImages: (number | number[] | number[][] | number[][][] | number[][][][] | number[][][][][] | number[][][][][][] | null)[];
65
- extremasResults: (number | number[] | number[][] | number[][][] | number[][][][] | number[][][][][] | number[][][][][][])[];
63
+ pyramidImages: any[][];
64
+ dogPyramidImages: any[];
65
+ extremasResults: any[];
66
66
  extremaAngles: any;
67
67
  prunedExtremas: number[][];
68
- localizedExtremas: number | number[] | number[][] | number[][][] | number[][][][] | number[][][][][] | number[][][][][][];
68
+ localizedExtremas: any;
69
69
  } | null;
70
70
  }>;
71
71
  match(featurePoints: any, targetIndex: any): Promise<{
@@ -63,7 +63,7 @@ class Controller {
63
63
  console.table(tf.memory());
64
64
  }
65
65
  addImageTargets(fileURL) {
66
- return new Promise(async (resolve, reject) => {
66
+ return new Promise(async (resolve) => {
67
67
  const content = await fetch(fileURL);
68
68
  const buffer = await content.arrayBuffer();
69
69
  const result = this.addImageTargetsFromBuffer(buffer);
@@ -75,7 +75,6 @@ class Controller {
75
75
  const dataList = compiler.importData(buffer);
76
76
  const trackingDataList = [];
77
77
  const matchingDataList = [];
78
- const imageListList = [];
79
78
  const dimensions = [];
80
79
  for (let i = 0; i < dataList.length; i++) {
81
80
  matchingDataList.push(dataList[i].matchingData);
@@ -264,7 +263,7 @@ class Controller {
264
263
  }
265
264
  async detect(input) {
266
265
  const inputT = this.inputLoader.loadInput(input);
267
- const { featurePoints, debugExtra } = await this.cropDetector.detect(inputT);
266
+ const { featurePoints, debugExtra } = this.cropDetector.detect(inputT);
268
267
  inputT.dispose();
269
268
  return { featurePoints, debugExtra };
270
269
  }
@@ -287,7 +286,7 @@ class Controller {
287
286
  return modelViewTransform2;
288
287
  }
289
288
  _workerMatch(featurePoints, targetIndexes) {
290
- return new Promise(async (resolve, reject) => {
289
+ return new Promise((resolve) => {
291
290
  this.workerMatchDone = (data) => {
292
291
  resolve({
293
292
  targetIndex: data.targetIndex,
@@ -299,7 +298,7 @@ class Controller {
299
298
  });
300
299
  }
301
300
  _workerTrackUpdate(modelViewTransform, trackingFeatures) {
302
- return new Promise(async (resolve, reject) => {
301
+ return new Promise((resolve) => {
303
302
  this.workerTrackDone = (data) => {
304
303
  resolve(data.modelViewTransform);
305
304
  };
@@ -1,6 +1,5 @@
1
1
  import { Matcher } from "./matching/matcher.js";
2
2
  import { Estimator } from "./estimation/estimator.js";
3
- let projectionTransform = null;
4
3
  let matchingDataList = null;
5
4
  let debugMode = false;
6
5
  let matcher = null;
@@ -9,7 +8,6 @@ onmessage = (msg) => {
9
8
  const { data } = msg;
10
9
  switch (data.type) {
11
10
  case "setup":
12
- projectionTransform = data.projectionTransform;
13
11
  matchingDataList = data.matchingDataList;
14
12
  debugMode = data.debugMode;
15
13
  matcher = new Matcher(data.inputWidth, data.inputHeight, debugMode);
@@ -17,12 +17,12 @@ export class CropDetector {
17
17
  descriptors: any[];
18
18
  }[];
19
19
  debugExtra: {
20
- pyramidImages: (number | number[] | number[][] | number[][][] | number[][][][] | number[][][][][] | number[][][][][][])[][];
21
- dogPyramidImages: (number | number[] | number[][] | number[][][] | number[][][][] | number[][][][][] | number[][][][][][] | null)[];
22
- extremasResults: (number | number[] | number[][] | number[][][] | number[][][][] | number[][][][][] | number[][][][][][])[];
20
+ pyramidImages: any[][];
21
+ dogPyramidImages: any[];
22
+ extremasResults: any[];
23
23
  extremaAngles: any;
24
24
  prunedExtremas: number[][];
25
- localizedExtremas: number | number[] | number[][] | number[][][] | number[][][][] | number[][][][][] | number[][][][][][];
25
+ localizedExtremas: any;
26
26
  } | null;
27
27
  };
28
28
  detectMoving(inputImageT: any): {
@@ -35,12 +35,12 @@ export class CropDetector {
35
35
  descriptors: any[];
36
36
  }[];
37
37
  debugExtra: {
38
- pyramidImages: (number | number[] | number[][] | number[][][] | number[][][][] | number[][][][][] | number[][][][][][])[][];
39
- dogPyramidImages: (number | number[] | number[][] | number[][][] | number[][][][] | number[][][][][] | number[][][][][][] | null)[];
40
- extremasResults: (number | number[] | number[][] | number[][][] | number[][][][] | number[][][][][] | number[][][][][][])[];
38
+ pyramidImages: any[][];
39
+ dogPyramidImages: any[];
40
+ extremasResults: any[];
41
41
  extremaAngles: any;
42
42
  prunedExtremas: number[][];
43
- localizedExtremas: number | number[] | number[][] | number[][][] | number[][][][] | number[][][][][] | number[][][][][][];
43
+ localizedExtremas: any;
44
44
  } | null;
45
45
  };
46
46
  _detect(inputImageT: any, startX: any, startY: any): {
@@ -53,12 +53,12 @@ export class CropDetector {
53
53
  descriptors: any[];
54
54
  }[];
55
55
  debugExtra: {
56
- pyramidImages: (number | number[] | number[][] | number[][][] | number[][][][] | number[][][][][] | number[][][][][][])[][];
57
- dogPyramidImages: (number | number[] | number[][] | number[][][] | number[][][][] | number[][][][][] | number[][][][][][] | null)[];
58
- extremasResults: (number | number[] | number[][] | number[][][] | number[][][][] | number[][][][][] | number[][][][][][])[];
56
+ pyramidImages: any[][];
57
+ dogPyramidImages: any[];
58
+ extremasResults: any[];
59
59
  extremaAngles: any;
60
60
  prunedExtremas: number[][];
61
- localizedExtremas: number | number[] | number[][] | number[][][] | number[][][][] | number[][][][][] | number[][][][][][];
61
+ localizedExtremas: any;
62
62
  } | null;
63
63
  };
64
64
  }
@@ -1,6 +1,4 @@
1
- import * as tf from "@tensorflow/tfjs";
2
1
  import { Detector } from "./detector.js";
3
- import { buildModelViewProjectionTransform, computeScreenCoordiate } from "../estimation/utils.js";
4
2
  class CropDetector {
5
3
  constructor(width, height, debugMode = false) {
6
4
  this.debugMode = debugMode;
@@ -0,0 +1,73 @@
1
+ export function setDetectorGPUMode(enabled: boolean): void;
2
+ /**
3
+ * Detector de características sin TensorFlow
4
+ */
5
+ export class DetectorLite {
6
+ constructor(width: any, height: any, options?: {});
7
+ width: any;
8
+ height: any;
9
+ useGPU: any;
10
+ numOctaves: number;
11
+ /**
12
+ * Detecta características en una imagen en escala de grises
13
+ * @param {Float32Array|Uint8Array} imageData - Datos de imagen (width * height)
14
+ * @returns {{featurePoints: Array}} Puntos de características detectados
15
+ */
16
+ detect(imageData: Float32Array | Uint8Array): {
17
+ featurePoints: any[];
18
+ };
19
+ /**
20
+ * Construye una pirámide gaussiana
21
+ */
22
+ _buildGaussianPyramid(data: any, width: any, height: any): {
23
+ data: Float32Array<ArrayBuffer>;
24
+ width: any;
25
+ height: any;
26
+ }[][];
27
+ /**
28
+ * Aplica un filtro gaussiano binomial [1,4,6,4,1] - Optimizado
29
+ */
30
+ _applyGaussianFilter(data: any, width: any, height: any): {
31
+ data: Float32Array<ArrayBuffer>;
32
+ width: any;
33
+ height: any;
34
+ };
35
+ /**
36
+ * Downsample imagen por factor de 2
37
+ */
38
+ _downsample(data: any, width: any, height: any): {
39
+ data: Float32Array<ArrayBuffer>;
40
+ width: number;
41
+ height: number;
42
+ };
43
+ /**
44
+ * Construye pirámide de diferencia de gaussianas
45
+ */
46
+ _buildDogPyramid(pyramidImages: any): {
47
+ data: Float32Array<ArrayBuffer>;
48
+ width: any;
49
+ height: any;
50
+ }[];
51
+ /**
52
+ * Encuentra extremos locales en la pirámide DoG
53
+ */
54
+ _findExtremas(dogPyramid: any, pyramidImages: any): {
55
+ score: number;
56
+ octave: number;
57
+ x: number;
58
+ y: number;
59
+ absScore: number;
60
+ }[];
61
+ /**
62
+ * Aplica pruning para mantener solo los mejores features por bucket
63
+ */
64
+ _applyPrune(extremas: any): any[];
65
+ /**
66
+ * Calcula la orientación de cada feature
67
+ */
68
+ _computeOrientations(extremas: any, pyramidImages: any): void;
69
+ /**
70
+ * Calcula descriptores FREAK
71
+ */
72
+ _computeFreakDescriptors(extremas: any, pyramidImages: any): void;
73
+ }