@srsergio/taptapp-ar 1.1.1 → 1.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/compiler/node-worker.js +1 -197
- package/dist/compiler/offline-compiler.js +1 -207
- package/dist/core/constants.js +1 -38
- package/dist/core/detector/crop-detector.js +1 -88
- package/dist/core/detector/detector-lite.js +1 -455
- package/dist/core/detector/freak.js +1 -89
- package/dist/core/estimation/estimate.js +1 -16
- package/dist/core/estimation/estimator.js +1 -30
- package/dist/core/estimation/morph-refinement.js +1 -116
- package/dist/core/estimation/non-rigid-refine.js +1 -70
- package/dist/core/estimation/pnp-solver.js +1 -109
- package/dist/core/estimation/refine-estimate.js +1 -311
- package/dist/core/estimation/utils.js +1 -67
- package/dist/core/features/auto-rotation-feature.js +1 -30
- package/dist/core/features/crop-detection-feature.js +1 -26
- package/dist/core/features/feature-base.js +1 -1
- package/dist/core/features/feature-manager.js +1 -55
- package/dist/core/features/one-euro-filter-feature.js +1 -44
- package/dist/core/features/temporal-filter-feature.js +1 -57
- package/dist/core/image-list.js +1 -54
- package/dist/core/input-loader.js +1 -87
- package/dist/core/matching/hamming-distance.js +1 -66
- package/dist/core/matching/hdc.js +1 -102
- package/dist/core/matching/hierarchical-clustering.js +1 -130
- package/dist/core/matching/hough.js +1 -170
- package/dist/core/matching/matcher.js +1 -66
- package/dist/core/matching/matching.js +1 -401
- package/dist/core/matching/ransacHomography.js +1 -132
- package/dist/core/perception/bio-inspired-engine.js +1 -232
- package/dist/core/perception/foveal-attention.js +1 -280
- package/dist/core/perception/index.js +1 -17
- package/dist/core/perception/predictive-coding.js +1 -278
- package/dist/core/perception/saccadic-controller.js +1 -269
- package/dist/core/perception/saliency-map.js +1 -254
- package/dist/core/perception/scale-orchestrator.js +1 -68
- package/dist/core/protocol.js +1 -254
- package/dist/core/tracker/extract-utils.js +1 -29
- package/dist/core/tracker/extract.js +1 -306
- package/dist/core/tracker/tracker.js +1 -352
- package/dist/core/utils/cumsum.js +1 -37
- package/dist/core/utils/delaunay.js +1 -125
- package/dist/core/utils/geometry.js +1 -101
- package/dist/core/utils/gpu-compute.js +1 -231
- package/dist/core/utils/homography.js +1 -138
- package/dist/core/utils/images.js +1 -108
- package/dist/core/utils/lsh-binarizer.js +1 -37
- package/dist/core/utils/lsh-direct.js +1 -76
- package/dist/core/utils/projection.js +1 -51
- package/dist/core/utils/randomizer.js +1 -25
- package/dist/core/utils/worker-pool.js +1 -89
- package/dist/index.js +1 -7
- package/dist/libs/one-euro-filter.js +1 -70
- package/dist/react/TaptappAR.js +1 -151
- package/dist/react/types.js +1 -16
- package/dist/react/use-ar.js +1 -118
- package/dist/runtime/aframe.js +1 -272
- package/dist/runtime/bio-inspired-controller.js +1 -358
- package/dist/runtime/controller.js +1 -592
- package/dist/runtime/controller.worker.js +1 -93
- package/dist/runtime/index.js +1 -5
- package/dist/runtime/three.js +1 -304
- package/dist/runtime/track.js +1 -381
- package/package.json +9 -3
|
@@ -1,197 +1 @@
|
|
|
1
|
-
|
|
2
|
-
* Worker Node.js para compilación de imágenes AR
|
|
3
|
-
*
|
|
4
|
-
* OPTIMIZADO: Sin TensorFlow para evitar bloqueos de inicialización.
|
|
5
|
-
* Usa JavaScript puro para máxima velocidad.
|
|
6
|
-
*/
|
|
7
|
-
import { parentPort } from 'node:worker_threads';
|
|
8
|
-
import { extractTrackingFeatures } from '../core/tracker/extract-utils.js';
|
|
9
|
-
import { buildTrackingImageList } from '../core/image-list.js';
|
|
10
|
-
import { DetectorLite } from '../core/detector/detector-lite.js';
|
|
11
|
-
import { build as hierarchicalClusteringBuild } from '../core/matching/hierarchical-clustering.js';
|
|
12
|
-
import { getMorton } from '../core/protocol.js';
|
|
13
|
-
if (!parentPort) {
|
|
14
|
-
throw new Error('This file must be run as a worker thread.');
|
|
15
|
-
}
|
|
16
|
-
const mortonCache = new Int32Array(2048); // Cache for sorting stability
|
|
17
|
-
function sortPoints(points) {
|
|
18
|
-
if (points.length <= 1)
|
|
19
|
-
return points;
|
|
20
|
-
// Sort in-place to avoid allocations
|
|
21
|
-
return points.sort((a, b) => {
|
|
22
|
-
return getMorton(a.x, a.y) - getMorton(b.x, b.y);
|
|
23
|
-
});
|
|
24
|
-
}
|
|
25
|
-
parentPort.on('message', async (msg) => {
|
|
26
|
-
if (msg.type === 'compile') {
|
|
27
|
-
const { targetImage, percentPerImage, basePercent } = msg;
|
|
28
|
-
try {
|
|
29
|
-
const imageList = buildTrackingImageList(targetImage);
|
|
30
|
-
const percentPerAction = percentPerImage / imageList.length;
|
|
31
|
-
let localPercent = 0;
|
|
32
|
-
const trackingData = extractTrackingFeatures(imageList, (index) => {
|
|
33
|
-
localPercent += percentPerAction;
|
|
34
|
-
parentPort.postMessage({
|
|
35
|
-
type: 'progress',
|
|
36
|
-
percent: basePercent + localPercent
|
|
37
|
-
});
|
|
38
|
-
});
|
|
39
|
-
parentPort.postMessage({
|
|
40
|
-
type: 'compileDone',
|
|
41
|
-
trackingData
|
|
42
|
-
});
|
|
43
|
-
}
|
|
44
|
-
catch (error) {
|
|
45
|
-
parentPort.postMessage({
|
|
46
|
-
type: 'error',
|
|
47
|
-
error: error.message + '\n' + error.stack
|
|
48
|
-
});
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
else if (msg.type === 'match') {
|
|
52
|
-
const { targetImage, percentPerImage, basePercent } = msg;
|
|
53
|
-
try {
|
|
54
|
-
// 🚀 MOONSHOT: Only run detector ONCE on full-res image.
|
|
55
|
-
// DetectorLite internally builds a pyramid (octaves 1.0, 0.5, 0.25, etc.)
|
|
56
|
-
const detector = new DetectorLite(targetImage.width, targetImage.height, {
|
|
57
|
-
useLSH: true
|
|
58
|
-
});
|
|
59
|
-
parentPort.postMessage({ type: 'progress', percent: basePercent + percentPerImage * 0.1 });
|
|
60
|
-
const { featurePoints: allPoints } = detector.detect(targetImage.data);
|
|
61
|
-
parentPort.postMessage({ type: 'progress', percent: basePercent + percentPerImage * 0.5 });
|
|
62
|
-
// Group points by their scale (octave)
|
|
63
|
-
const scalesMap = new Map();
|
|
64
|
-
for (const p of allPoints) {
|
|
65
|
-
const octaveScale = p.scale;
|
|
66
|
-
let list = scalesMap.get(octaveScale);
|
|
67
|
-
if (!list) {
|
|
68
|
-
list = [];
|
|
69
|
-
scalesMap.set(octaveScale, list);
|
|
70
|
-
}
|
|
71
|
-
// Coordinates in p are already full-res.
|
|
72
|
-
// We need them relative to the scaled image for the keyframe.
|
|
73
|
-
list.push({
|
|
74
|
-
...p,
|
|
75
|
-
x: p.x / octaveScale,
|
|
76
|
-
y: p.y / octaveScale,
|
|
77
|
-
scale: 1.0 // Keypoint scale is always 1.0 relative to its own keyframe image
|
|
78
|
-
});
|
|
79
|
-
}
|
|
80
|
-
// Optional: Run another detector pass at an intermediate scale to improve coverage
|
|
81
|
-
// (e.g. at 1/1.41 ratio) if tracking robustness suffers.
|
|
82
|
-
// For now, let's stick to octaves for MAXIMUM speed.
|
|
83
|
-
const keyframes = [];
|
|
84
|
-
const sortedScales = Array.from(scalesMap.keys()).sort((a, b) => a - b);
|
|
85
|
-
const percentPerScale = (percentPerImage * 0.4) / sortedScales.length;
|
|
86
|
-
for (const s of sortedScales) {
|
|
87
|
-
const ps = scalesMap.get(s);
|
|
88
|
-
const sortedPs = sortPoints(ps);
|
|
89
|
-
const maximaPoints = sortedPs.filter((p) => p.maxima);
|
|
90
|
-
const minimaPoints = sortedPs.filter((p) => !p.maxima);
|
|
91
|
-
const maximaPointsCluster = hierarchicalClusteringBuild({ points: maximaPoints });
|
|
92
|
-
const minimaPointsCluster = hierarchicalClusteringBuild({ points: minimaPoints });
|
|
93
|
-
keyframes.push({
|
|
94
|
-
maximaPoints,
|
|
95
|
-
minimaPoints,
|
|
96
|
-
maximaPointsCluster,
|
|
97
|
-
minimaPointsCluster,
|
|
98
|
-
width: Math.round(targetImage.width / s),
|
|
99
|
-
height: Math.round(targetImage.height / s),
|
|
100
|
-
scale: 1.0 / s, // keyframe.scale is relative to full target image
|
|
101
|
-
});
|
|
102
|
-
parentPort.postMessage({
|
|
103
|
-
type: 'progress',
|
|
104
|
-
percent: basePercent + percentPerImage * 0.6 + keyframes.length * percentPerScale
|
|
105
|
-
});
|
|
106
|
-
}
|
|
107
|
-
parentPort.postMessage({
|
|
108
|
-
type: 'matchDone',
|
|
109
|
-
matchingData: keyframes
|
|
110
|
-
});
|
|
111
|
-
}
|
|
112
|
-
catch (error) {
|
|
113
|
-
parentPort.postMessage({
|
|
114
|
-
type: 'error',
|
|
115
|
-
error: error.message + '\n' + error.stack
|
|
116
|
-
});
|
|
117
|
-
}
|
|
118
|
-
}
|
|
119
|
-
else if (msg.type === 'compile-all') {
|
|
120
|
-
const { targetImage } = msg;
|
|
121
|
-
try {
|
|
122
|
-
// 1. Single Pass Detection + Pyramid Generation
|
|
123
|
-
const detector = new DetectorLite(targetImage.width, targetImage.height, { useLSH: true });
|
|
124
|
-
parentPort.postMessage({ type: 'progress', percent: 10 });
|
|
125
|
-
const { featurePoints, pyramid } = detector.detect(targetImage.data);
|
|
126
|
-
parentPort.postMessage({ type: 'progress', percent: 40 });
|
|
127
|
-
// 2. Extract Tracking Data using the ALREADY BLURRED pyramid
|
|
128
|
-
// We need 2 levels closest to 256 and 128
|
|
129
|
-
const trackingImageList = [];
|
|
130
|
-
// Octave 0 is Original blured. Octave 1 is 0.5x. Octave 2 is 0.25x.
|
|
131
|
-
// We'll pick the best ones.
|
|
132
|
-
const targetSizes = [256, 128];
|
|
133
|
-
for (const targetSize of targetSizes) {
|
|
134
|
-
let bestLevel = 0;
|
|
135
|
-
let minDiff = Math.abs(Math.min(targetImage.width, targetImage.height) - targetSize);
|
|
136
|
-
for (let l = 1; l < pyramid.length; l++) {
|
|
137
|
-
const img = pyramid[l][0];
|
|
138
|
-
const diff = Math.abs(Math.min(img.width, img.height) - targetSize);
|
|
139
|
-
if (diff < minDiff) {
|
|
140
|
-
minDiff = diff;
|
|
141
|
-
bestLevel = l;
|
|
142
|
-
}
|
|
143
|
-
}
|
|
144
|
-
const levelImg = pyramid[bestLevel][0];
|
|
145
|
-
trackingImageList.push({
|
|
146
|
-
data: levelImg.data,
|
|
147
|
-
width: levelImg.width,
|
|
148
|
-
height: levelImg.height,
|
|
149
|
-
scale: levelImg.width / targetImage.width
|
|
150
|
-
});
|
|
151
|
-
}
|
|
152
|
-
const trackingData = extractTrackingFeatures(trackingImageList, () => { });
|
|
153
|
-
parentPort.postMessage({ type: 'progress', percent: 60 });
|
|
154
|
-
// 3. Build Keyframes for Matching
|
|
155
|
-
const scalesMap = new Map();
|
|
156
|
-
for (const p of featurePoints) {
|
|
157
|
-
const s = p.scale;
|
|
158
|
-
let list = scalesMap.get(s);
|
|
159
|
-
if (!list) {
|
|
160
|
-
list = [];
|
|
161
|
-
scalesMap.set(s, list);
|
|
162
|
-
}
|
|
163
|
-
list.push({ ...p, x: p.x / s, y: p.y / s, scale: 1.0 });
|
|
164
|
-
}
|
|
165
|
-
const keyframes = [];
|
|
166
|
-
const sortedScales = Array.from(scalesMap.keys()).sort((a, b) => a - b);
|
|
167
|
-
for (const s of sortedScales) {
|
|
168
|
-
const ps = scalesMap.get(s);
|
|
169
|
-
const sortedPs = sortPoints(ps);
|
|
170
|
-
const maximaPoints = sortedPs.filter((p) => p.maxima);
|
|
171
|
-
const minimaPoints = sortedPs.filter((p) => !p.maxima);
|
|
172
|
-
const maximaPointsCluster = hierarchicalClusteringBuild({ points: maximaPoints });
|
|
173
|
-
const minimaPointsCluster = hierarchicalClusteringBuild({ points: minimaPoints });
|
|
174
|
-
keyframes.push({
|
|
175
|
-
maximaPoints,
|
|
176
|
-
minimaPoints,
|
|
177
|
-
maximaPointsCluster,
|
|
178
|
-
minimaPointsCluster,
|
|
179
|
-
width: Math.round(targetImage.width / s),
|
|
180
|
-
height: Math.round(targetImage.height / s),
|
|
181
|
-
scale: 1.0 / s,
|
|
182
|
-
});
|
|
183
|
-
}
|
|
184
|
-
parentPort.postMessage({
|
|
185
|
-
type: 'compileDone', // Reusing message type for compatibility with WorkerPool
|
|
186
|
-
matchingData: keyframes,
|
|
187
|
-
trackingData: trackingData
|
|
188
|
-
});
|
|
189
|
-
}
|
|
190
|
-
catch (error) {
|
|
191
|
-
parentPort.postMessage({
|
|
192
|
-
type: 'error',
|
|
193
|
-
error: error.message + '\n' + error.stack
|
|
194
|
-
});
|
|
195
|
-
}
|
|
196
|
-
}
|
|
197
|
-
});
|
|
1
|
+
import{parentPort as t}from"node:worker_threads";import{extractTrackingFeatures as e}from"../core/tracker/extract-utils.js";import{buildTrackingImageList as s}from"../core/image-list.js";import{DetectorLite as r}from"../core/detector/detector-lite.js";import{build as o}from"../core/matching/hierarchical-clustering.js";import{getMorton as a}from"../core/protocol.js";if(!t)throw new Error("This file must be run as a worker thread.");function n(t){return t.length<=1?t:t.sort((t,e)=>a(t.x,t.y)-a(e.x,e.y))}new Int32Array(2048),t.on("message",async a=>{if("compile"===a.type){const{targetImage:r,percentPerImage:o,basePercent:n}=a;try{const a=s(r),i=o/a.length;let c=0;const p=e(a,e=>{c+=i,t.postMessage({type:"progress",percent:n+c})});t.postMessage({type:"compileDone",trackingData:p})}catch(e){t.postMessage({type:"error",error:e.message+"\n"+e.stack})}}else if("match"===a.type){const{targetImage:e,percentPerImage:s,basePercent:i}=a;try{const a=new r(e.width,e.height,{useLSH:!0});t.postMessage({type:"progress",percent:i+.1*s});const{featurePoints:c}=a.detect(e.data);t.postMessage({type:"progress",percent:i+.5*s});const p=new Map;for(const t of c){const e=t.scale;let s=p.get(e);s||(s=[],p.set(e,s)),s.push({...t,x:t.x/e,y:t.y/e,scale:1})}const h=[],m=Array.from(p.keys()).sort((t,e)=>t-e),g=.4*s/m.length;for(const r of m){const a=n(p.get(r)),c=a.filter(t=>t.maxima),m=a.filter(t=>!t.maxima),l=o({points:c}),y=o({points:m});h.push({maximaPoints:c,minimaPoints:m,maximaPointsCluster:l,minimaPointsCluster:y,width:Math.round(e.width/r),height:Math.round(e.height/r),scale:1/r}),t.postMessage({type:"progress",percent:i+.6*s+h.length*g})}t.postMessage({type:"matchDone",matchingData:h})}catch(e){t.postMessage({type:"error",error:e.message+"\n"+e.stack})}}else if("compile-all"===a.type){const{targetImage:s}=a;try{const a=new r(s.width,s.height,{useLSH:!0});t.postMessage({type:"progress",percent:10});const{featurePoints:i,pyramid:c}=a.detect(s.data);t.postMessage({type:"progress",percent:40});const p=[],h=[256,128];for(const t of h){let e=0,r=Math.abs(Math.min(s.width,s.height)-t);for(let s=1;s<c.length;s++){const o=c[s][0],a=Math.abs(Math.min(o.width,o.height)-t);a<r&&(r=a,e=s)}const o=c[e][0];p.push({data:o.data,width:o.width,height:o.height,scale:o.width/s.width})}const m=e(p,()=>{});t.postMessage({type:"progress",percent:60});const g=new Map;for(const t of i){const e=t.scale;let s=g.get(e);s||(s=[],g.set(e,s)),s.push({...t,x:t.x/e,y:t.y/e,scale:1})}const l=[],y=Array.from(g.keys()).sort((t,e)=>t-e);for(const t of y){const e=n(g.get(t)),r=e.filter(t=>t.maxima),a=e.filter(t=>!t.maxima),i=o({points:r}),c=o({points:a});l.push({maximaPoints:r,minimaPoints:a,maximaPointsCluster:i,minimaPointsCluster:c,width:Math.round(s.width/t),height:Math.round(s.height/t),scale:1/t})}t.postMessage({type:"compileDone",matchingData:l,trackingData:m})}catch(e){t.postMessage({type:"error",error:e.message+"\n"+e.stack})}}});
|
|
@@ -1,207 +1 @@
|
|
|
1
|
-
|
|
2
|
-
* @fileoverview Compilador Offline Optimizado - Sin TensorFlow para máxima velocidad
|
|
3
|
-
*
|
|
4
|
-
* Este módulo implementa un compilador de imágenes AR ultrarrápido
|
|
5
|
-
* que NO depende de TensorFlow, eliminando todos los problemas de
|
|
6
|
-
* inicialización, bloqueos y compatibilidad.
|
|
7
|
-
*/
|
|
8
|
-
import { buildTrackingImageList } from "../core/image-list.js";
|
|
9
|
-
import { extractTrackingFeatures } from "../core/tracker/extract-utils.js";
|
|
10
|
-
import { DetectorLite } from "../core/detector/detector-lite.js";
|
|
11
|
-
import { build as hierarchicalClusteringBuild } from "../core/matching/hierarchical-clustering.js";
|
|
12
|
-
import * as protocol from "../core/protocol.js";
|
|
13
|
-
import { triangulate, getEdges } from "../core/utils/delaunay.js";
|
|
14
|
-
import { AR_CONFIG } from "../core/constants.js";
|
|
15
|
-
// Detect environment
|
|
16
|
-
const isNode = typeof process !== "undefined" &&
|
|
17
|
-
process.versions != null &&
|
|
18
|
-
process.versions.node != null;
|
|
19
|
-
export class OfflineCompiler {
|
|
20
|
-
data = null;
|
|
21
|
-
constructor() {
|
|
22
|
-
console.log("⚡ OfflineCompiler: Main thread mode (no workers)");
|
|
23
|
-
}
|
|
24
|
-
async compileImageTargets(images, progressCallback) {
|
|
25
|
-
console.time("⏱️ Compilación total");
|
|
26
|
-
const targetImages = [];
|
|
27
|
-
// Preparar imágenes
|
|
28
|
-
for (let i = 0; i < images.length; i++) {
|
|
29
|
-
const img = images[i];
|
|
30
|
-
if (!img || !img.width || !img.height || !img.data) {
|
|
31
|
-
throw new Error(`Imagen inválida en posición ${i}. Debe tener propiedades width, height y data.`);
|
|
32
|
-
}
|
|
33
|
-
const greyImageData = new Uint8Array(img.width * img.height);
|
|
34
|
-
if (img.data.length === img.width * img.height) {
|
|
35
|
-
greyImageData.set(img.data);
|
|
36
|
-
}
|
|
37
|
-
else if (img.data.length === img.width * img.height * 4) {
|
|
38
|
-
for (let j = 0; j < greyImageData.length; j++) {
|
|
39
|
-
const offset = j * 4;
|
|
40
|
-
greyImageData[j] = Math.floor((img.data[offset] + img.data[offset + 1] + img.data[offset + 2]) / 3);
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
else {
|
|
44
|
-
throw new Error(`Formato de datos de imagen no soportado en posición ${i}`);
|
|
45
|
-
}
|
|
46
|
-
targetImages.push({
|
|
47
|
-
data: greyImageData,
|
|
48
|
-
width: img.width,
|
|
49
|
-
height: img.height,
|
|
50
|
-
});
|
|
51
|
-
}
|
|
52
|
-
const results = await this._compileTarget(targetImages, progressCallback);
|
|
53
|
-
this.data = targetImages.map((img, i) => ({
|
|
54
|
-
targetImage: img,
|
|
55
|
-
matchingData: results[i].matchingData,
|
|
56
|
-
trackingData: results[i].trackingData,
|
|
57
|
-
}));
|
|
58
|
-
console.timeEnd("⏱️ Compilación total");
|
|
59
|
-
return this.data;
|
|
60
|
-
}
|
|
61
|
-
async _compileTarget(targetImages, progressCallback) {
|
|
62
|
-
// Run match and track sequentially to match browser behavior exactly
|
|
63
|
-
const matchingResults = await this._compileMatch(targetImages, (p) => progressCallback(p * 0.5));
|
|
64
|
-
const trackingResults = await this._compileTrack(targetImages, (p) => progressCallback(50 + p * 0.5));
|
|
65
|
-
return targetImages.map((_, i) => ({
|
|
66
|
-
matchingData: matchingResults[i],
|
|
67
|
-
trackingData: trackingResults[i]
|
|
68
|
-
}));
|
|
69
|
-
}
|
|
70
|
-
async _compileMatch(targetImages, progressCallback) {
|
|
71
|
-
const percentPerImage = 100 / targetImages.length;
|
|
72
|
-
let currentPercent = 0;
|
|
73
|
-
const results = [];
|
|
74
|
-
for (let i = 0; i < targetImages.length; i++) {
|
|
75
|
-
const targetImage = targetImages[i];
|
|
76
|
-
// 🚀 NANITE-STYLE: Only process the target at scale 1.0
|
|
77
|
-
// The DetectorLite already builds its own pyramid and finds features at all octaves (virtualized LOD)
|
|
78
|
-
const detector = new DetectorLite(targetImage.width, targetImage.height, {
|
|
79
|
-
useLSH: AR_CONFIG.USE_LSH,
|
|
80
|
-
maxFeaturesPerBucket: AR_CONFIG.MAX_FEATURES_PER_BUCKET
|
|
81
|
-
});
|
|
82
|
-
const { featurePoints: rawPs } = detector.detect(targetImage.data);
|
|
83
|
-
// 🎯 Stratified Sampling: Ensure we have features from ALL scales
|
|
84
|
-
// We take the top N features per octave to guarantee scale coverage (Nanite-style)
|
|
85
|
-
const octaves = [0, 1, 2, 3, 4, 5];
|
|
86
|
-
const ps = [];
|
|
87
|
-
const featuresPerOctave = 300;
|
|
88
|
-
for (const oct of octaves) {
|
|
89
|
-
const octScale = Math.pow(2, oct);
|
|
90
|
-
const octFeatures = rawPs
|
|
91
|
-
.filter(p => Math.abs(p.scale - octScale) < 0.1)
|
|
92
|
-
.sort((a, b) => (b.score || 0) - (a.score || 0))
|
|
93
|
-
.slice(0, featuresPerOctave);
|
|
94
|
-
ps.push(...octFeatures);
|
|
95
|
-
}
|
|
96
|
-
const maximaPoints = ps.filter((p) => p.maxima);
|
|
97
|
-
const minimaPoints = ps.filter((p) => !p.maxima);
|
|
98
|
-
const maximaPointsCluster = hierarchicalClusteringBuild({ points: maximaPoints });
|
|
99
|
-
const minimaPointsCluster = hierarchicalClusteringBuild({ points: minimaPoints });
|
|
100
|
-
const keyframe = {
|
|
101
|
-
maximaPoints,
|
|
102
|
-
minimaPoints,
|
|
103
|
-
maximaPointsCluster,
|
|
104
|
-
minimaPointsCluster,
|
|
105
|
-
width: targetImage.width,
|
|
106
|
-
height: targetImage.height,
|
|
107
|
-
scale: 1.0,
|
|
108
|
-
};
|
|
109
|
-
// Wrapped in array because the protocol expects matchingData to be an array of keyframes
|
|
110
|
-
// We provide only one keyframe containing features from all octaves
|
|
111
|
-
results.push([keyframe]);
|
|
112
|
-
currentPercent += percentPerImage;
|
|
113
|
-
progressCallback(currentPercent);
|
|
114
|
-
}
|
|
115
|
-
return results;
|
|
116
|
-
}
|
|
117
|
-
async _compileTrack(targetImages, progressCallback) {
|
|
118
|
-
const percentPerImage = 100 / targetImages.length;
|
|
119
|
-
let currentPercent = 0;
|
|
120
|
-
const results = [];
|
|
121
|
-
for (let i = 0; i < targetImages.length; i++) {
|
|
122
|
-
const targetImage = targetImages[i];
|
|
123
|
-
const imageList = buildTrackingImageList(targetImage);
|
|
124
|
-
const percentPerScale = percentPerImage / imageList.length;
|
|
125
|
-
const trackingData = extractTrackingFeatures(imageList, () => {
|
|
126
|
-
currentPercent += percentPerScale;
|
|
127
|
-
progressCallback(currentPercent);
|
|
128
|
-
});
|
|
129
|
-
results.push(trackingData);
|
|
130
|
-
}
|
|
131
|
-
return results;
|
|
132
|
-
}
|
|
133
|
-
async compileTrack({ progressCallback, targetImages, basePercent = 0 }) {
|
|
134
|
-
return this._compileTrack(targetImages, (percent) => {
|
|
135
|
-
progressCallback(basePercent + percent * (100 - basePercent) / 100);
|
|
136
|
-
});
|
|
137
|
-
}
|
|
138
|
-
async compileMatch({ progressCallback, targetImages, basePercent = 0 }) {
|
|
139
|
-
return this._compileMatch(targetImages, (percent) => {
|
|
140
|
-
progressCallback(basePercent + percent * (50 - basePercent) / 100);
|
|
141
|
-
});
|
|
142
|
-
}
|
|
143
|
-
exportData() {
|
|
144
|
-
if (!this.data) {
|
|
145
|
-
throw new Error("No hay datos compilados para exportar");
|
|
146
|
-
}
|
|
147
|
-
const dataList = this.data.map((item) => {
|
|
148
|
-
return {
|
|
149
|
-
targetImage: {
|
|
150
|
-
width: item.targetImage.width,
|
|
151
|
-
height: item.targetImage.height,
|
|
152
|
-
},
|
|
153
|
-
trackingData: item.trackingData.map((td) => {
|
|
154
|
-
const count = td.points.length;
|
|
155
|
-
const px = new Float32Array(count);
|
|
156
|
-
const py = new Float32Array(count);
|
|
157
|
-
for (let i = 0; i < count; i++) {
|
|
158
|
-
px[i] = td.points[i].x;
|
|
159
|
-
py[i] = td.points[i].y;
|
|
160
|
-
}
|
|
161
|
-
const triangles = triangulate(td.points);
|
|
162
|
-
const edges = getEdges(triangles);
|
|
163
|
-
const restLengths = new Float32Array(edges.length);
|
|
164
|
-
for (let j = 0; j < edges.length; j++) {
|
|
165
|
-
const p1 = td.points[edges[j][0]];
|
|
166
|
-
const p2 = td.points[edges[j][1]];
|
|
167
|
-
restLengths[j] = Math.sqrt((p1.x - p2.x) ** 2 + (p1.y - p2.y) ** 2);
|
|
168
|
-
}
|
|
169
|
-
return {
|
|
170
|
-
w: td.width,
|
|
171
|
-
h: td.height,
|
|
172
|
-
s: td.scale,
|
|
173
|
-
px,
|
|
174
|
-
py,
|
|
175
|
-
d: td.data,
|
|
176
|
-
mesh: {
|
|
177
|
-
t: new Uint16Array(triangles.flat()),
|
|
178
|
-
e: new Uint16Array(edges.flat()),
|
|
179
|
-
rl: restLengths
|
|
180
|
-
}
|
|
181
|
-
};
|
|
182
|
-
}),
|
|
183
|
-
matchingData: item.matchingData.map((kf) => {
|
|
184
|
-
const useCompact = AR_CONFIG.USE_COMPACT_DESCRIPTORS;
|
|
185
|
-
const columnarizeFn = useCompact ? protocol.columnarizeCompact : protocol.columnarize;
|
|
186
|
-
return {
|
|
187
|
-
w: kf.width,
|
|
188
|
-
h: kf.height,
|
|
189
|
-
s: kf.scale,
|
|
190
|
-
hdc: false,
|
|
191
|
-
max: columnarizeFn(kf.maximaPoints, kf.maximaPointsCluster, kf.width, kf.height),
|
|
192
|
-
min: columnarizeFn(kf.minimaPoints, kf.minimaPointsCluster, kf.width, kf.height),
|
|
193
|
-
};
|
|
194
|
-
}),
|
|
195
|
-
};
|
|
196
|
-
});
|
|
197
|
-
return protocol.encodeTaar(dataList);
|
|
198
|
-
}
|
|
199
|
-
importData(buffer) {
|
|
200
|
-
const result = protocol.decodeTaar(buffer);
|
|
201
|
-
this.data = result.dataList;
|
|
202
|
-
return result;
|
|
203
|
-
}
|
|
204
|
-
async destroy() {
|
|
205
|
-
// No workers to destroy
|
|
206
|
-
}
|
|
207
|
-
}
|
|
1
|
+
import{buildTrackingImageList as t}from"../core/image-list.js";import{extractTrackingFeatures as a}from"../core/tracker/extract-utils.js";import{DetectorLite as e}from"../core/detector/detector-lite.js";import{build as i}from"../core/matching/hierarchical-clustering.js";import*as o from"../core/protocol.js";import{triangulate as r,getEdges as n}from"../core/utils/delaunay.js";import{AR_CONFIG as s}from"../core/constants.js";"undefined"!=typeof process&&null!=process.versions&&process.versions.node;export class OfflineCompiler{data=null;constructor(){console.log("⚡ OfflineCompiler: Main thread mode (no workers)")}async compileImageTargets(t,a){console.time("⏱️ Compilación total");const e=[];for(let a=0;a<t.length;a++){const i=t[a];if(!(i&&i.width&&i.height&&i.data))throw new Error(`Imagen inválida en posición ${a}. Debe tener propiedades width, height y data.`);const o=new Uint8Array(i.width*i.height);if(i.data.length===i.width*i.height)o.set(i.data);else{if(i.data.length!==i.width*i.height*4)throw new Error(`Formato de datos de imagen no soportado en posición ${a}`);for(let t=0;t<o.length;t++){const a=4*t;o[t]=Math.floor((i.data[a]+i.data[a+1]+i.data[a+2])/3)}}e.push({data:o,width:i.width,height:i.height})}const i=await this._compileTarget(e,a);return this.data=e.map((t,a)=>({targetImage:t,matchingData:i[a].matchingData,trackingData:i[a].trackingData})),console.timeEnd("⏱️ Compilación total"),this.data}async _compileTarget(t,a){const e=await this._compileMatch(t,t=>a(.5*t)),i=await this._compileTrack(t,t=>a(50+.5*t));return t.map((t,a)=>({matchingData:e[a],trackingData:i[a]}))}async _compileMatch(t,a){const o=100/t.length;let r=0;const n=[];for(let c=0;c<t.length;c++){const h=t[c],l=new e(h.width,h.height,{useLSH:s.USE_LSH,maxFeaturesPerBucket:s.MAX_FEATURES_PER_BUCKET}),{featurePoints:m}=l.detect(h.data),g=[0,1,2,3,4,5],p=[],d=300;for(const t of g){const a=Math.pow(2,t),e=m.filter(t=>Math.abs(t.scale-a)<.1).sort((t,a)=>(a.score||0)-(t.score||0)).slice(0,d);p.push(...e)}const w=p.filter(t=>t.maxima),u=p.filter(t=>!t.maxima),f={maximaPoints:w,minimaPoints:u,maximaPointsCluster:i({points:w}),minimaPointsCluster:i({points:u}),width:h.width,height:h.height,scale:1};n.push([f]),r+=o,a(r)}return n}async _compileTrack(e,i){const o=100/e.length;let r=0;const n=[];for(let s=0;s<e.length;s++){const c=e[s],h=t(c),l=o/h.length,m=a(h,()=>{r+=l,i(r)});n.push(m)}return n}async compileTrack({progressCallback:t,targetImages:a,basePercent:e=0}){return this._compileTrack(a,a=>{t(e+a*(100-e)/100)})}async compileMatch({progressCallback:t,targetImages:a,basePercent:e=0}){return this._compileMatch(a,a=>{t(e+a*(50-e)/100)})}exportData(){if(!this.data)throw new Error("No hay datos compilados para exportar");const t=this.data.map(t=>({targetImage:{width:t.targetImage.width,height:t.targetImage.height},trackingData:t.trackingData.map(t=>{const a=t.points.length,e=new Float32Array(a),i=new Float32Array(a);for(let o=0;o<a;o++)e[o]=t.points[o].x,i[o]=t.points[o].y;const o=r(t.points),s=n(o),c=new Float32Array(s.length);for(let a=0;a<s.length;a++){const e=t.points[s[a][0]],i=t.points[s[a][1]];c[a]=Math.sqrt((e.x-i.x)**2+(e.y-i.y)**2)}return{w:t.width,h:t.height,s:t.scale,px:e,py:i,d:t.data,mesh:{t:new Uint16Array(o.flat()),e:new Uint16Array(s.flat()),rl:c}}}),matchingData:t.matchingData.map(t=>{const a=s.USE_COMPACT_DESCRIPTORS?o.columnarizeCompact:o.columnarize;return{w:t.width,h:t.height,s:t.scale,hdc:!1,max:a(t.maximaPoints,t.maximaPointsCluster,t.width,t.height),min:a(t.minimaPoints,t.minimaPointsCluster,t.width,t.height)}})}));return o.encodeTaar(t)}importData(t){const a=o.decodeTaar(t);return this.data=a.dataList,a}async destroy(){}}
|
package/dist/core/constants.js
CHANGED
|
@@ -1,38 +1 @@
|
|
|
1
|
-
|
|
2
|
-
* @fileoverview Centralized constants for the AR Engine
|
|
3
|
-
*/
|
|
4
|
-
export const AR_CONFIG = {
|
|
5
|
-
// Camera settings
|
|
6
|
-
VIEWPORT_WIDTH: 640,
|
|
7
|
-
VIEWPORT_HEIGHT: 480,
|
|
8
|
-
DEFAULT_FOVY: 60.0,
|
|
9
|
-
DEFAULT_NEAR: 1.0,
|
|
10
|
-
DEFAULT_FAR: 10000.0,
|
|
11
|
-
// Detection settings
|
|
12
|
-
MAX_FEATURES_PER_BUCKET: 24,
|
|
13
|
-
USE_LSH: true,
|
|
14
|
-
// Matching settings
|
|
15
|
-
HAMMING_THRESHOLD: 0.85,
|
|
16
|
-
HDC_RATIO_THRESHOLD: 0.85,
|
|
17
|
-
INLIER_THRESHOLD: 15.0,
|
|
18
|
-
MIN_NUM_INLIERS: 6,
|
|
19
|
-
MAX_MATCH_QUERY_POINTS: 800,
|
|
20
|
-
CLUSTER_MAX_POP: 25,
|
|
21
|
-
// Tracker / NCC settings
|
|
22
|
-
TRACKER_TEMPLATE_SIZE: 6,
|
|
23
|
-
TRACKER_SEARCH_SIZE: 12,
|
|
24
|
-
TRACKER_SIMILARITY_THRESHOLD: 0.65,
|
|
25
|
-
// Image processing / Scale list
|
|
26
|
-
MIN_IMAGE_PIXEL_SIZE: 32,
|
|
27
|
-
SCALE_STEP_EXPONENT: 1.0, // Optimized: was 0.6, now 1.0 (reduces scales from ~7 to ~4)
|
|
28
|
-
TRACKING_DOWNSCALE_LEVEL_1: 256.0,
|
|
29
|
-
TRACKING_DOWNSCALE_LEVEL_2: 128.0,
|
|
30
|
-
// Tracker settings
|
|
31
|
-
WARMUP_TOLERANCE: 2,
|
|
32
|
-
MISS_TOLERANCE: 1,
|
|
33
|
-
ONE_EURO_FILTER_CUTOFF: 0.5,
|
|
34
|
-
ONE_EURO_FILTER_BETA: 0.1,
|
|
35
|
-
// TAAR Size Optimization
|
|
36
|
-
USE_COMPACT_DESCRIPTORS: true, // 32-bit XOR folded descriptors vs 64-bit raw
|
|
37
|
-
COMPACT_HAMMING_THRESHOLD: 8, // Threshold for 32-bit descriptors (vs 15 for 64-bit)
|
|
38
|
-
};
|
|
1
|
+
export const AR_CONFIG={VIEWPORT_WIDTH:640,VIEWPORT_HEIGHT:480,DEFAULT_FOVY:60,DEFAULT_NEAR:1,DEFAULT_FAR:1e4,MAX_FEATURES_PER_BUCKET:24,USE_LSH:!0,HAMMING_THRESHOLD:.85,HDC_RATIO_THRESHOLD:.85,INLIER_THRESHOLD:15,MIN_NUM_INLIERS:6,MAX_MATCH_QUERY_POINTS:800,CLUSTER_MAX_POP:25,TRACKER_TEMPLATE_SIZE:6,TRACKER_SEARCH_SIZE:12,TRACKER_SIMILARITY_THRESHOLD:.65,MIN_IMAGE_PIXEL_SIZE:32,SCALE_STEP_EXPONENT:1,TRACKING_DOWNSCALE_LEVEL_1:256,TRACKING_DOWNSCALE_LEVEL_2:128,WARMUP_TOLERANCE:2,MISS_TOLERANCE:1,ONE_EURO_FILTER_CUTOFF:.5,ONE_EURO_FILTER_BETA:.1,USE_COMPACT_DESCRIPTORS:!0,COMPACT_HAMMING_THRESHOLD:8};
|
|
@@ -1,88 +1 @@
|
|
|
1
|
-
import
|
|
2
|
-
class CropDetector {
|
|
3
|
-
constructor(width, height, debugMode = false) {
|
|
4
|
-
this.debugMode = debugMode;
|
|
5
|
-
this.width = width;
|
|
6
|
-
this.height = height;
|
|
7
|
-
// nearest power of 2, min dimensions
|
|
8
|
-
let minDimension = Math.min(width, height) / 2;
|
|
9
|
-
let cropSize = Math.pow(2, Math.round(Math.log(minDimension) / Math.log(2)));
|
|
10
|
-
this.cropSize = cropSize;
|
|
11
|
-
this.detector = new DetectorLite(cropSize, cropSize, { useLSH: true });
|
|
12
|
-
this.lastRandomIndex = 4;
|
|
13
|
-
}
|
|
14
|
-
detect(input) {
|
|
15
|
-
const imageData = input;
|
|
16
|
-
// crop center
|
|
17
|
-
const startY = Math.floor(this.height / 2 - this.cropSize / 2);
|
|
18
|
-
const startX = Math.floor(this.width / 2 - this.cropSize / 2);
|
|
19
|
-
const result = this._detect(imageData, startX, startY);
|
|
20
|
-
if (this.debugMode) {
|
|
21
|
-
result.debugExtra.crop = { startX, startY, cropSize: this.cropSize };
|
|
22
|
-
}
|
|
23
|
-
return result;
|
|
24
|
-
}
|
|
25
|
-
detectMoving(input) {
|
|
26
|
-
const imageData = input;
|
|
27
|
-
if (!this.frameCounter)
|
|
28
|
-
this.frameCounter = 0;
|
|
29
|
-
this.frameCounter++;
|
|
30
|
-
// Scan full screen every 2 frames
|
|
31
|
-
if (this.frameCounter % 2 === 0) {
|
|
32
|
-
return this._detectGlobal(imageData);
|
|
33
|
-
}
|
|
34
|
-
// Local crops: ensure we visit every single cell
|
|
35
|
-
const gridSize = 5;
|
|
36
|
-
const dx = this.lastRandomIndex % gridSize;
|
|
37
|
-
const dy = Math.floor(this.lastRandomIndex / gridSize);
|
|
38
|
-
const stepX = this.cropSize / 3;
|
|
39
|
-
const stepY = this.cropSize / 3;
|
|
40
|
-
let startY = Math.floor(this.height / 2 - this.cropSize / 2 + (dy - 2) * stepY);
|
|
41
|
-
let startX = Math.floor(this.width / 2 - this.cropSize / 2 + (dx - 2) * stepX);
|
|
42
|
-
startX = Math.max(0, Math.min(this.width - this.cropSize - 1, startX));
|
|
43
|
-
startY = Math.max(0, Math.min(this.height - this.cropSize - 1, startY));
|
|
44
|
-
this.lastRandomIndex = (this.lastRandomIndex + 1) % (gridSize * gridSize);
|
|
45
|
-
return this._detect(imageData, startX, startY);
|
|
46
|
-
}
|
|
47
|
-
_detectGlobal(imageData) {
|
|
48
|
-
const croppedData = new Float32Array(this.cropSize * this.cropSize);
|
|
49
|
-
const scaleX = this.width / this.cropSize;
|
|
50
|
-
const scaleY = this.height / this.cropSize;
|
|
51
|
-
// Use sharp sampling for better descriptors
|
|
52
|
-
for (let y = 0; y < this.cropSize; y++) {
|
|
53
|
-
const srcY = Math.floor(y * scaleY) * this.width;
|
|
54
|
-
const dstY = y * this.cropSize;
|
|
55
|
-
for (let x = 0; x < this.cropSize; x++) {
|
|
56
|
-
croppedData[dstY + x] = imageData[srcY + Math.floor(x * scaleX)];
|
|
57
|
-
}
|
|
58
|
-
}
|
|
59
|
-
const { featurePoints } = this.detector.detect(croppedData);
|
|
60
|
-
featurePoints.forEach((p) => {
|
|
61
|
-
p.x *= scaleX;
|
|
62
|
-
p.y *= scaleY;
|
|
63
|
-
});
|
|
64
|
-
return {
|
|
65
|
-
featurePoints,
|
|
66
|
-
debugExtra: this.debugMode ? { projectedImage: Array.from(croppedData), isGlobal: true } : {}
|
|
67
|
-
};
|
|
68
|
-
}
|
|
69
|
-
_detect(imageData, startX, startY) {
|
|
70
|
-
// Crop manually since imageData is now a flat array (width * height)
|
|
71
|
-
const croppedData = new Float32Array(this.cropSize * this.cropSize);
|
|
72
|
-
for (let y = 0; y < this.cropSize; y++) {
|
|
73
|
-
for (let x = 0; x < this.cropSize; x++) {
|
|
74
|
-
croppedData[y * this.cropSize + x] = imageData[(startY + y) * this.width + (startX + x)];
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
const { featurePoints } = this.detector.detect(croppedData);
|
|
78
|
-
featurePoints.forEach((p) => {
|
|
79
|
-
p.x += startX;
|
|
80
|
-
p.y += startY;
|
|
81
|
-
});
|
|
82
|
-
return {
|
|
83
|
-
featurePoints,
|
|
84
|
-
debugExtra: this.debugMode ? { projectedImage: Array.from(croppedData) } : {}
|
|
85
|
-
};
|
|
86
|
-
}
|
|
87
|
-
}
|
|
88
|
-
export { CropDetector };
|
|
1
|
+
import{DetectorLite as t}from"./detector-lite.js";class e{constructor(e,i,o=!1){this.debugMode=o,this.width=e,this.height=i;let h=Math.min(e,i)/2,r=Math.pow(2,Math.round(Math.log(h)/Math.log(2)));this.cropSize=r,this.detector=new t(r,r,{useLSH:!0}),this.lastRandomIndex=4}detect(t){const e=t,i=Math.floor(this.height/2-this.cropSize/2),o=Math.floor(this.width/2-this.cropSize/2),h=this._detect(e,o,i);return this.debugMode&&(h.debugExtra.crop={startX:o,startY:i,cropSize:this.cropSize}),h}detectMoving(t){const e=t;if(this.frameCounter||(this.frameCounter=0),this.frameCounter++,this.frameCounter%2==0)return this._detectGlobal(e);const i=this.lastRandomIndex%5,o=Math.floor(this.lastRandomIndex/5),h=this.cropSize/3,r=this.cropSize/3;let s=Math.floor(this.height/2-this.cropSize/2+(o-2)*r),a=Math.floor(this.width/2-this.cropSize/2+(i-2)*h);return a=Math.max(0,Math.min(this.width-this.cropSize-1,a)),s=Math.max(0,Math.min(this.height-this.cropSize-1,s)),this.lastRandomIndex=(this.lastRandomIndex+1)%25,this._detect(e,a,s)}_detectGlobal(t){const e=new Float32Array(this.cropSize*this.cropSize),i=this.width/this.cropSize,o=this.height/this.cropSize;for(let h=0;h<this.cropSize;h++){const r=Math.floor(h*o)*this.width,s=h*this.cropSize;for(let o=0;o<this.cropSize;o++)e[s+o]=t[r+Math.floor(o*i)]}const{featurePoints:h}=this.detector.detect(e);return h.forEach(t=>{t.x*=i,t.y*=o}),{featurePoints:h,debugExtra:this.debugMode?{projectedImage:Array.from(e),isGlobal:!0}:{}}}_detect(t,e,i){const o=new Float32Array(this.cropSize*this.cropSize);for(let h=0;h<this.cropSize;h++)for(let r=0;r<this.cropSize;r++)o[h*this.cropSize+r]=t[(i+h)*this.width+(e+r)];const{featurePoints:h}=this.detector.detect(o);return h.forEach(t=>{t.x+=e,t.y+=i}),{featurePoints:h,debugExtra:this.debugMode?{projectedImage:Array.from(o)}:{}}}}export{e as CropDetector};
|