@srsergio/taptapp-ar 1.1.1 → 1.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/compiler/node-worker.js +1 -197
- package/dist/compiler/offline-compiler.js +1 -207
- package/dist/core/constants.js +1 -38
- package/dist/core/detector/crop-detector.js +1 -88
- package/dist/core/detector/detector-lite.js +1 -455
- package/dist/core/detector/freak.js +1 -89
- package/dist/core/estimation/estimate.js +1 -16
- package/dist/core/estimation/estimator.js +1 -30
- package/dist/core/estimation/morph-refinement.js +1 -116
- package/dist/core/estimation/non-rigid-refine.js +1 -70
- package/dist/core/estimation/pnp-solver.js +1 -109
- package/dist/core/estimation/refine-estimate.js +1 -311
- package/dist/core/estimation/utils.js +1 -67
- package/dist/core/features/auto-rotation-feature.js +1 -30
- package/dist/core/features/crop-detection-feature.js +1 -26
- package/dist/core/features/feature-base.js +1 -1
- package/dist/core/features/feature-manager.js +1 -55
- package/dist/core/features/one-euro-filter-feature.js +1 -44
- package/dist/core/features/temporal-filter-feature.js +1 -57
- package/dist/core/image-list.js +1 -54
- package/dist/core/input-loader.js +1 -87
- package/dist/core/matching/hamming-distance.js +1 -66
- package/dist/core/matching/hdc.js +1 -102
- package/dist/core/matching/hierarchical-clustering.js +1 -130
- package/dist/core/matching/hough.js +1 -170
- package/dist/core/matching/matcher.js +1 -66
- package/dist/core/matching/matching.js +1 -401
- package/dist/core/matching/ransacHomography.js +1 -132
- package/dist/core/perception/bio-inspired-engine.js +1 -232
- package/dist/core/perception/foveal-attention.js +1 -280
- package/dist/core/perception/index.js +1 -17
- package/dist/core/perception/predictive-coding.js +1 -278
- package/dist/core/perception/saccadic-controller.js +1 -269
- package/dist/core/perception/saliency-map.js +1 -254
- package/dist/core/perception/scale-orchestrator.js +1 -68
- package/dist/core/protocol.js +1 -254
- package/dist/core/tracker/extract-utils.js +1 -29
- package/dist/core/tracker/extract.js +1 -306
- package/dist/core/tracker/tracker.js +1 -352
- package/dist/core/utils/cumsum.js +1 -37
- package/dist/core/utils/delaunay.js +1 -125
- package/dist/core/utils/geometry.js +1 -101
- package/dist/core/utils/gpu-compute.js +1 -231
- package/dist/core/utils/homography.js +1 -138
- package/dist/core/utils/images.js +1 -108
- package/dist/core/utils/lsh-binarizer.js +1 -37
- package/dist/core/utils/lsh-direct.js +1 -76
- package/dist/core/utils/projection.js +1 -51
- package/dist/core/utils/randomizer.js +1 -25
- package/dist/core/utils/worker-pool.js +1 -89
- package/dist/index.js +1 -7
- package/dist/libs/one-euro-filter.js +1 -70
- package/dist/react/TaptappAR.js +1 -151
- package/dist/react/types.js +1 -16
- package/dist/react/use-ar.js +1 -118
- package/dist/runtime/aframe.js +1 -272
- package/dist/runtime/bio-inspired-controller.js +1 -358
- package/dist/runtime/controller.js +1 -592
- package/dist/runtime/controller.worker.js +1 -93
- package/dist/runtime/index.js +1 -5
- package/dist/runtime/three.js +1 -304
- package/dist/runtime/track.js +1 -381
- package/package.json +10 -4
|
@@ -1,455 +1 @@
|
|
|
1
|
-
|
|
2
|
-
* Detector Lite - Pure JavaScript Feature Detector
|
|
3
|
-
*
|
|
4
|
-
* Un detector de características simplificado que no depende de TensorFlow.
|
|
5
|
-
* Optimizado para velocidad en compilación offline.
|
|
6
|
-
*
|
|
7
|
-
* Implementa:
|
|
8
|
-
* - Construcción de pirámide gaussiana (con aceleración GPU opcional)
|
|
9
|
-
* - Diferencia de Gaussianas (DoG) para detección de extremos
|
|
10
|
-
* - Descriptores FREAK simplificados
|
|
11
|
-
*/
|
|
12
|
-
import { FREAKPOINTS } from "./freak.js";
|
|
13
|
-
import { gpuCompute } from "../utils/gpu-compute.js";
|
|
14
|
-
import { computeLSH64, computeFullFREAK, packLSHIntoDescriptor } from "../utils/lsh-direct.js";
|
|
15
|
-
import { HDC_SEED } from "../protocol.js";
|
|
16
|
-
const PYRAMID_MIN_SIZE = 4; // Restored to 4 for better small-scale detection
|
|
17
|
-
// PYRAMID_MAX_OCTAVE ya no es necesario, el límite lo da PYRAMID_MIN_SIZE
|
|
18
|
-
const NUM_BUCKETS_PER_DIMENSION = 15; // Increased from 10 to 15 for better local detail
|
|
19
|
-
const DEFAULT_MAX_FEATURES_PER_BUCKET = 12; // Increased from 8 to 12
|
|
20
|
-
const ORIENTATION_NUM_BINS = 36;
|
|
21
|
-
const FREAK_EXPANSION_FACTOR = 7.0;
|
|
22
|
-
// Global GPU mode flag
|
|
23
|
-
let globalUseGPU = true;
|
|
24
|
-
/**
|
|
25
|
-
* Set global GPU mode for all DetectorLite instances
|
|
26
|
-
* @param {boolean} enabled - Whether to use GPU acceleration
|
|
27
|
-
*/
|
|
28
|
-
export const setDetectorGPUMode = (enabled) => {
|
|
29
|
-
globalUseGPU = enabled;
|
|
30
|
-
};
|
|
31
|
-
/**
|
|
32
|
-
* Detector de características sin TensorFlow
|
|
33
|
-
*/
|
|
34
|
-
export class DetectorLite {
|
|
35
|
-
constructor(width, height, options = {}) {
|
|
36
|
-
this.width = width;
|
|
37
|
-
this.height = height;
|
|
38
|
-
this.useGPU = options.useGPU !== undefined ? options.useGPU : globalUseGPU;
|
|
39
|
-
// Protocol V6 (Moonshot): 64-bit LSH is the standard descriptor format
|
|
40
|
-
this.useLSH = options.useLSH !== undefined ? options.useLSH : true;
|
|
41
|
-
this.useHDC = options.useHDC !== undefined ? options.useHDC : true; // Enabled by default for Moonshot
|
|
42
|
-
this.maxFeaturesPerBucket = options.maxFeaturesPerBucket !== undefined ? options.maxFeaturesPerBucket : DEFAULT_MAX_FEATURES_PER_BUCKET;
|
|
43
|
-
let numOctaves = 0;
|
|
44
|
-
let w = width, h = height;
|
|
45
|
-
while (w >= PYRAMID_MIN_SIZE && h >= PYRAMID_MIN_SIZE) {
|
|
46
|
-
w = Math.floor(w / 2);
|
|
47
|
-
h = Math.floor(h / 2);
|
|
48
|
-
numOctaves++;
|
|
49
|
-
// Límite de seguridad razonable para evitar bucles infinitos en imágenes gigantes
|
|
50
|
-
if (numOctaves === 10)
|
|
51
|
-
break;
|
|
52
|
-
}
|
|
53
|
-
this.numOctaves = options.maxOctaves !== undefined ? Math.min(numOctaves, options.maxOctaves) : numOctaves;
|
|
54
|
-
}
|
|
55
|
-
/**
|
|
56
|
-
* Detecta características en una imagen en escala de grises
|
|
57
|
-
* @param {Float32Array|Uint8Array} imageData - Datos de imagen (width * height)
|
|
58
|
-
* @param {Object} options - Opciones de detección (ej. octavesToProcess)
|
|
59
|
-
* @returns {{featurePoints: Array}} Puntos de características detectados
|
|
60
|
-
*/
|
|
61
|
-
detect(imageData, options = {}) {
|
|
62
|
-
const octavesToProcess = options.octavesToProcess || Array.from({ length: this.numOctaves }, (_, i) => i);
|
|
63
|
-
// Normalizar a Float32Array si es necesario
|
|
64
|
-
let data;
|
|
65
|
-
if (imageData instanceof Float32Array) {
|
|
66
|
-
data = imageData;
|
|
67
|
-
}
|
|
68
|
-
else {
|
|
69
|
-
data = new Float32Array(imageData.length);
|
|
70
|
-
for (let i = 0; i < imageData.length; i++) {
|
|
71
|
-
data[i] = imageData[i];
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
// 1. Construir pirámide gaussiana (solo octavas solicitadas)
|
|
75
|
-
const pyramidImages = this._buildGaussianPyramid(data, this.width, this.height, octavesToProcess);
|
|
76
|
-
// 2. Construir pirámide DoG (Difference of Gaussians)
|
|
77
|
-
const dogPyramid = this._buildDogPyramid(pyramidImages, octavesToProcess);
|
|
78
|
-
// 3. Encontrar extremos locales
|
|
79
|
-
const extremas = this._findExtremas(dogPyramid, pyramidImages);
|
|
80
|
-
// 4. Aplicar pruning por buckets
|
|
81
|
-
const prunedExtremas = this._applyPrune(extremas);
|
|
82
|
-
// 5. Calcular orientaciones
|
|
83
|
-
this._computeOrientations(prunedExtremas, pyramidImages);
|
|
84
|
-
// 6. Calcular descriptores FREAK
|
|
85
|
-
this._computeFreakDescriptors(prunedExtremas, pyramidImages);
|
|
86
|
-
// Convertir a formato de salida
|
|
87
|
-
const featurePoints = prunedExtremas.map(ext => {
|
|
88
|
-
const scale = Math.pow(2, ext.octave);
|
|
89
|
-
return {
|
|
90
|
-
maxima: ext.score > 0,
|
|
91
|
-
x: ext.x * scale + scale * 0.5 - 0.5,
|
|
92
|
-
y: ext.y * scale + scale * 0.5 - 0.5,
|
|
93
|
-
scale: scale,
|
|
94
|
-
angle: ext.angle || 0,
|
|
95
|
-
score: ext.absScore, // Pass through score for sorting in Matcher
|
|
96
|
-
descriptors: (this.useLSH && ext.lsh) ? ext.lsh : (ext.descriptors || []),
|
|
97
|
-
imageData: data // Pass source image for refinement
|
|
98
|
-
};
|
|
99
|
-
});
|
|
100
|
-
return { featurePoints, pyramid: pyramidImages };
|
|
101
|
-
}
|
|
102
|
-
/**
|
|
103
|
-
* Construye una pirámide gaussiana
|
|
104
|
-
*/
|
|
105
|
-
_buildGaussianPyramid(data, width, height, octavesToProcess = null) {
|
|
106
|
-
// Use GPU-accelerated pyramid if available
|
|
107
|
-
if (this.useGPU) {
|
|
108
|
-
try {
|
|
109
|
-
const gpuPyramid = gpuCompute.buildPyramid(data, width, height, this.numOctaves);
|
|
110
|
-
// Convert GPU pyramid format to expected format
|
|
111
|
-
const pyramid = [];
|
|
112
|
-
for (let i = 0; i < gpuPyramid.length && i < this.numOctaves; i++) {
|
|
113
|
-
if (octavesToProcess && !octavesToProcess.includes(i)) {
|
|
114
|
-
pyramid.push(null);
|
|
115
|
-
continue;
|
|
116
|
-
}
|
|
117
|
-
const level = gpuPyramid[i];
|
|
118
|
-
// Apply second blur for DoG computation
|
|
119
|
-
const img2 = this._applyGaussianFilter(level.data, level.width, level.height);
|
|
120
|
-
pyramid.push([
|
|
121
|
-
{ data: level.data, width: level.width, height: level.height },
|
|
122
|
-
{ data: img2.data, width: level.width, height: level.height }
|
|
123
|
-
]);
|
|
124
|
-
}
|
|
125
|
-
return pyramid;
|
|
126
|
-
}
|
|
127
|
-
catch (e) {
|
|
128
|
-
// Fall back to CPU if GPU fails
|
|
129
|
-
console.warn("GPU pyramid failed, falling back to CPU:", e.message);
|
|
130
|
-
}
|
|
131
|
-
}
|
|
132
|
-
// Buffer management: Reuse arrays if dimensions match to reduce GC
|
|
133
|
-
if (!this._pyramidBuffers || this._pyramidBuffers.width !== width || this._pyramidBuffers.height !== height) {
|
|
134
|
-
this._pyramidBuffers = { width, height, temp: new Float32Array(width * height) };
|
|
135
|
-
}
|
|
136
|
-
const pyramid = [];
|
|
137
|
-
let currentData = data;
|
|
138
|
-
let currentWidth = width;
|
|
139
|
-
let currentHeight = height;
|
|
140
|
-
for (let i = 0; i < this.numOctaves; i++) {
|
|
141
|
-
const shouldProcess = !octavesToProcess || octavesToProcess.includes(i);
|
|
142
|
-
if (shouldProcess) {
|
|
143
|
-
const img1 = this._applyGaussianFilter(currentData, currentWidth, currentHeight);
|
|
144
|
-
const img2 = this._applyGaussianFilter(img1.data, currentWidth, currentHeight);
|
|
145
|
-
pyramid.push([
|
|
146
|
-
{ data: img1.data, width: currentWidth, height: currentHeight },
|
|
147
|
-
{ data: img2.data, width: currentWidth, height: currentHeight }
|
|
148
|
-
]);
|
|
149
|
-
}
|
|
150
|
-
else {
|
|
151
|
-
pyramid.push(null);
|
|
152
|
-
}
|
|
153
|
-
if (i < this.numOctaves - 1) {
|
|
154
|
-
// For CPU downsampling, we STILL need to downsample even if we skip processing the current octave
|
|
155
|
-
// UNLESS the next octave is also skipped. But for simplicity and safety, we downsample if needed by ANY future octave.
|
|
156
|
-
const needsDownsample = !octavesToProcess || octavesToProcess.some(o => o > i);
|
|
157
|
-
if (needsDownsample) {
|
|
158
|
-
// If current octave was processed, we use img1.data (or original data if i=0 and not processed?).
|
|
159
|
-
// Wait, standard is to downsample from the blurred image of previous octave.
|
|
160
|
-
const sourceData = shouldProcess ? pyramid[i][0].data : currentData;
|
|
161
|
-
const downsampled = this._downsample(sourceData, currentWidth, currentHeight);
|
|
162
|
-
currentData = downsampled.data;
|
|
163
|
-
currentWidth = downsampled.width;
|
|
164
|
-
currentHeight = downsampled.height;
|
|
165
|
-
}
|
|
166
|
-
else {
|
|
167
|
-
// Optimization: if no more octaves are needed, we can stop here
|
|
168
|
-
break;
|
|
169
|
-
}
|
|
170
|
-
}
|
|
171
|
-
}
|
|
172
|
-
return pyramid;
|
|
173
|
-
}
|
|
174
|
-
/**
|
|
175
|
-
* Aplica un filtro gaussiano binomial [1,4,6,4,1] - Optimizado
|
|
176
|
-
*/
|
|
177
|
-
_applyGaussianFilter(data, width, height) {
|
|
178
|
-
const output = new Float32Array(width * height);
|
|
179
|
-
const temp = this._pyramidBuffers?.temp || new Float32Array(width * height);
|
|
180
|
-
const k0 = 0.0625, k1 = 0.25, k2 = 0.375; // 1/16, 4/16, 6/16
|
|
181
|
-
const w1 = width - 1;
|
|
182
|
-
// Horizontal pass - Speed optimized with manual border handling
|
|
183
|
-
for (let y = 0; y < height; y++) {
|
|
184
|
-
const rowOffset = y * width;
|
|
185
|
-
// Left border (Normalized)
|
|
186
|
-
const sumL0 = k0 + k1 + k2 + k1 + k0; // Ideal sum
|
|
187
|
-
temp[rowOffset] = (data[rowOffset] * (k0 + k1 + k2) + data[rowOffset + 1] * k1 + data[rowOffset + 2] * k0) * (1.0 / (k0 + k1 + k2));
|
|
188
|
-
temp[rowOffset + 1] = (data[rowOffset] * k1 + data[rowOffset + 1] * k2 + data[rowOffset + 2] * k1 + data[rowOffset + 3] * k0) * (1.0 / (k1 + k2 + k1 + k0));
|
|
189
|
-
// Main loop - NO boundary checks
|
|
190
|
-
for (let x = 2; x < width - 2; x++) {
|
|
191
|
-
const pos = rowOffset + x;
|
|
192
|
-
temp[pos] = data[pos - 2] * k0 + data[pos - 1] * k1 + data[pos] * k2 + data[pos + 1] * k1 + data[pos + 2] * k0;
|
|
193
|
-
}
|
|
194
|
-
// Right border (Normalized)
|
|
195
|
-
const r2 = rowOffset + width - 2;
|
|
196
|
-
const r1 = rowOffset + width - 1;
|
|
197
|
-
temp[r2] = (data[r2 - 2] * k0 + data[r2 - 1] * k1 + data[r2] * k2 + data[r1] * k1) * (1.0 / (k0 + k1 + k2 + k1));
|
|
198
|
-
temp[r1] = (data[r1 - 2] * k0 + data[r1 - 1] * k1 + data[r1] * (k2 + k1 + k0)) * (1.0 / (k0 + k1 + k2));
|
|
199
|
-
}
|
|
200
|
-
// Vertical pass - Speed optimized
|
|
201
|
-
for (let x = 0; x < width; x++) {
|
|
202
|
-
// Top border (Normalized)
|
|
203
|
-
output[x] = (temp[x] * (k0 + k1 + k2) + temp[x + width] * k1 + temp[x + width * 2] * k0) * (1.0 / (k0 + k1 + k2));
|
|
204
|
-
output[x + width] = (temp[x] * k1 + temp[x + width] * k2 + temp[x + width * 2] * k1 + temp[x + width * 3] * k0) * (1.0 / (k1 + k2 + k1 + k0));
|
|
205
|
-
// Main loop - NO boundary checks
|
|
206
|
-
for (let y = 2; y < height - 2; y++) {
|
|
207
|
-
const p = y * width + x;
|
|
208
|
-
output[p] = temp[p - width * 2] * k0 + temp[p - width] * k1 + temp[p] * k2 + temp[p + width] * k1 + temp[p + width * 2] * k0;
|
|
209
|
-
}
|
|
210
|
-
// Bottom border (Normalized)
|
|
211
|
-
const b2 = (height - 2) * width + x;
|
|
212
|
-
const b1 = (height - 1) * width + x;
|
|
213
|
-
output[b2] = (temp[b2 - width * 2] * k0 + temp[b2 - width] * k1 + temp[b2] * k2 + temp[b1] * k1) * (1.0 / (k0 + k1 + k2 + k1));
|
|
214
|
-
output[b1] = (temp[b1 - width * 2] * k0 + temp[b1 - width] * k1 + temp[b1] * (k2 + k1 + k0)) * (1.0 / (k0 + k1 + k2));
|
|
215
|
-
}
|
|
216
|
-
return { data: output, width, height };
|
|
217
|
-
}
|
|
218
|
-
/**
|
|
219
|
-
* Downsample imagen por factor de 2
|
|
220
|
-
*/
|
|
221
|
-
_downsample(data, width, height) {
|
|
222
|
-
const newWidth = width >> 1;
|
|
223
|
-
const newHeight = height >> 1;
|
|
224
|
-
const output = new Float32Array(newWidth * newHeight);
|
|
225
|
-
for (let y = 0; y < newHeight; y++) {
|
|
226
|
-
const r0 = (y * 2) * width;
|
|
227
|
-
const r1 = r0 + width;
|
|
228
|
-
const dr = y * newWidth;
|
|
229
|
-
for (let x = 0; x < newWidth; x++) {
|
|
230
|
-
const i2 = x * 2;
|
|
231
|
-
output[dr + x] = (data[r0 + i2] + data[r0 + i2 + 1] + data[r1 + i2] + data[r1 + i2 + 1]) * 0.25;
|
|
232
|
-
}
|
|
233
|
-
}
|
|
234
|
-
return { data: output, width: newWidth, height: newHeight };
|
|
235
|
-
}
|
|
236
|
-
/**
|
|
237
|
-
* Construye pirámide de diferencia de gaussianas
|
|
238
|
-
*/
|
|
239
|
-
_buildDogPyramid(pyramidImages, octavesToProcess = null) {
|
|
240
|
-
const dogPyramid = [];
|
|
241
|
-
for (let i = 0; i < pyramidImages.length; i++) {
|
|
242
|
-
if (!pyramidImages[i]) {
|
|
243
|
-
dogPyramid.push(null);
|
|
244
|
-
continue;
|
|
245
|
-
}
|
|
246
|
-
const img1 = pyramidImages[i][0];
|
|
247
|
-
const img2 = pyramidImages[i][1];
|
|
248
|
-
const width = img1.width;
|
|
249
|
-
const height = img1.height;
|
|
250
|
-
const dog = new Float32Array(width * height);
|
|
251
|
-
for (let j = 0; j < dog.length; j++) {
|
|
252
|
-
dog[j] = img2.data[j] - img1.data[j];
|
|
253
|
-
}
|
|
254
|
-
dogPyramid.push({ data: dog, width, height });
|
|
255
|
-
}
|
|
256
|
-
return dogPyramid;
|
|
257
|
-
}
|
|
258
|
-
/**
|
|
259
|
-
* Encuentra extremos locales en la pirámide DoG
|
|
260
|
-
*/
|
|
261
|
-
_findExtremas(dogPyramid, pyramidImages) {
|
|
262
|
-
const extremas = [];
|
|
263
|
-
for (let octave = 0; octave < dogPyramid.length; octave++) {
|
|
264
|
-
const curr = dogPyramid[octave];
|
|
265
|
-
if (!curr)
|
|
266
|
-
continue;
|
|
267
|
-
const prev = octave > 0 ? dogPyramid[octave - 1] : null;
|
|
268
|
-
const next = octave < dogPyramid.length - 1 ? dogPyramid[octave + 1] : null;
|
|
269
|
-
const width = curr.width;
|
|
270
|
-
const height = curr.height;
|
|
271
|
-
for (let y = 1; y < height - 1; y++) {
|
|
272
|
-
for (let x = 1; x < width - 1; x++) {
|
|
273
|
-
const val = curr.data[y * width + x];
|
|
274
|
-
if (Math.abs(val) < 0.003)
|
|
275
|
-
continue; // Aggressively lowered threshold to 0.003 for max sensitivity
|
|
276
|
-
let isMaxima = true;
|
|
277
|
-
let isMinima = true;
|
|
278
|
-
// Check 3x3 neighborhood in current scale
|
|
279
|
-
for (let dy = -1; dy <= 1 && (isMaxima || isMinima); dy++) {
|
|
280
|
-
for (let dx = -1; dx <= 1 && (isMaxima || isMinima); dx++) {
|
|
281
|
-
if (dx === 0 && dy === 0)
|
|
282
|
-
continue;
|
|
283
|
-
const neighbor = curr.data[(y + dy) * width + (x + dx)];
|
|
284
|
-
if (neighbor >= val)
|
|
285
|
-
isMaxima = false;
|
|
286
|
-
if (neighbor <= val)
|
|
287
|
-
isMinima = false;
|
|
288
|
-
}
|
|
289
|
-
}
|
|
290
|
-
// Check previous scale (scaled coordinates) - skip if no prev layer
|
|
291
|
-
if ((isMaxima || isMinima) && prev) {
|
|
292
|
-
const px = x << 1;
|
|
293
|
-
const py = y << 1;
|
|
294
|
-
const prevWidth = prev.width;
|
|
295
|
-
for (let dy = -1; dy <= 1 && (isMaxima || isMinima); dy++) {
|
|
296
|
-
for (let dx = -1; dx <= 1 && (isMaxima || isMinima); dx++) {
|
|
297
|
-
const xx = Math.max(0, Math.min(prevWidth - 1, px + dx));
|
|
298
|
-
const yy = Math.max(0, Math.min(prev.height - 1, py + dy));
|
|
299
|
-
const neighbor = prev.data[yy * prevWidth + xx];
|
|
300
|
-
if (neighbor >= val)
|
|
301
|
-
isMaxima = false;
|
|
302
|
-
if (neighbor <= val)
|
|
303
|
-
isMinima = false;
|
|
304
|
-
}
|
|
305
|
-
}
|
|
306
|
-
}
|
|
307
|
-
// Check next scale (scaled coordinates) - skip if no next layer
|
|
308
|
-
if ((isMaxima || isMinima) && next) {
|
|
309
|
-
const nx = x >> 1;
|
|
310
|
-
const ny = y >> 1;
|
|
311
|
-
const nextWidth = next.width;
|
|
312
|
-
for (let dy = -1; dy <= 1 && (isMaxima || isMinima); dy++) {
|
|
313
|
-
for (let dx = -1; dx <= 1 && (isMaxima || isMinima); dx++) {
|
|
314
|
-
const xx = Math.max(0, Math.min(nextWidth - 1, nx + dx));
|
|
315
|
-
const yy = Math.max(0, Math.min(next.height - 1, ny + dy));
|
|
316
|
-
const neighbor = next.data[yy * nextWidth + xx];
|
|
317
|
-
if (neighbor >= val)
|
|
318
|
-
isMaxima = false;
|
|
319
|
-
if (neighbor <= val)
|
|
320
|
-
isMinima = false;
|
|
321
|
-
}
|
|
322
|
-
}
|
|
323
|
-
}
|
|
324
|
-
if (isMaxima || isMinima) {
|
|
325
|
-
extremas.push({
|
|
326
|
-
score: isMaxima ? Math.abs(val) : -Math.abs(val),
|
|
327
|
-
octave,
|
|
328
|
-
x,
|
|
329
|
-
y,
|
|
330
|
-
absScore: Math.abs(val)
|
|
331
|
-
});
|
|
332
|
-
}
|
|
333
|
-
}
|
|
334
|
-
}
|
|
335
|
-
}
|
|
336
|
-
return extremas;
|
|
337
|
-
}
|
|
338
|
-
/**
|
|
339
|
-
* Aplica pruning para mantener solo los mejores features por bucket
|
|
340
|
-
*/
|
|
341
|
-
_applyPrune(extremas) {
|
|
342
|
-
const nBuckets = NUM_BUCKETS_PER_DIMENSION;
|
|
343
|
-
const nFeatures = this.maxFeaturesPerBucket;
|
|
344
|
-
// Agrupar por buckets
|
|
345
|
-
const buckets = [];
|
|
346
|
-
for (let i = 0; i < nBuckets * nBuckets; i++) {
|
|
347
|
-
buckets.push([]);
|
|
348
|
-
}
|
|
349
|
-
for (const ext of extremas) {
|
|
350
|
-
const bucketX = Math.min(nBuckets - 1, Math.floor(ext.x / (this.width / Math.pow(2, ext.octave)) * nBuckets));
|
|
351
|
-
const bucketY = Math.min(nBuckets - 1, Math.floor(ext.y / (this.height / Math.pow(2, ext.octave)) * nBuckets));
|
|
352
|
-
const bucketIdx = bucketY * nBuckets + bucketX;
|
|
353
|
-
if (bucketIdx >= 0 && bucketIdx < buckets.length) {
|
|
354
|
-
buckets[bucketIdx].push(ext);
|
|
355
|
-
}
|
|
356
|
-
}
|
|
357
|
-
// Seleccionar top features por bucket
|
|
358
|
-
const result = [];
|
|
359
|
-
for (const bucket of buckets) {
|
|
360
|
-
bucket.sort((a, b) => b.absScore - a.absScore);
|
|
361
|
-
for (let i = 0; i < Math.min(nFeatures, bucket.length); i++) {
|
|
362
|
-
result.push(bucket[i]);
|
|
363
|
-
}
|
|
364
|
-
}
|
|
365
|
-
return result;
|
|
366
|
-
}
|
|
367
|
-
/**
|
|
368
|
-
* Calcula la orientación de cada feature
|
|
369
|
-
*/
|
|
370
|
-
_computeOrientations(extremas, pyramidImages) {
|
|
371
|
-
for (const ext of extremas) {
|
|
372
|
-
if (ext.octave < 0 || ext.octave >= pyramidImages.length) {
|
|
373
|
-
ext.angle = 0;
|
|
374
|
-
continue;
|
|
375
|
-
}
|
|
376
|
-
const img = pyramidImages[ext.octave][1];
|
|
377
|
-
const width = img.width;
|
|
378
|
-
const height = img.height;
|
|
379
|
-
const data = img.data;
|
|
380
|
-
const x = Math.floor(ext.x);
|
|
381
|
-
const y = Math.floor(ext.y);
|
|
382
|
-
// Compute gradient histogram
|
|
383
|
-
const histogram = new Float32Array(ORIENTATION_NUM_BINS);
|
|
384
|
-
const radius = 4;
|
|
385
|
-
for (let dy = -radius; dy <= radius; dy++) {
|
|
386
|
-
for (let dx = -radius; dx <= radius; dx++) {
|
|
387
|
-
const yy = y + dy;
|
|
388
|
-
const xx = x + dx;
|
|
389
|
-
if (yy <= 0 || yy >= height - 1 || xx <= 0 || xx >= width - 1)
|
|
390
|
-
continue;
|
|
391
|
-
const gradY = data[(yy + 1) * width + xx] - data[(yy - 1) * width + xx];
|
|
392
|
-
const gradX = data[yy * width + xx + 1] - data[yy * width + xx - 1];
|
|
393
|
-
const mag = Math.sqrt(gradX * gradX + gradY * gradY);
|
|
394
|
-
const angle = Math.atan2(gradY, gradX) + Math.PI; // 0 to 2*PI
|
|
395
|
-
const bin = Math.floor(angle / (2 * Math.PI) * ORIENTATION_NUM_BINS) % ORIENTATION_NUM_BINS;
|
|
396
|
-
const weight = Math.exp(-(dx * dx + dy * dy) / (2 * radius * radius));
|
|
397
|
-
histogram[bin] += mag * weight;
|
|
398
|
-
}
|
|
399
|
-
}
|
|
400
|
-
// Find peak
|
|
401
|
-
let maxBin = 0;
|
|
402
|
-
for (let i = 1; i < ORIENTATION_NUM_BINS; i++) {
|
|
403
|
-
if (histogram[i] > histogram[maxBin]) {
|
|
404
|
-
maxBin = i;
|
|
405
|
-
}
|
|
406
|
-
}
|
|
407
|
-
ext.angle = (maxBin + 0.5) * 2 * Math.PI / ORIENTATION_NUM_BINS - Math.PI;
|
|
408
|
-
}
|
|
409
|
-
}
|
|
410
|
-
/**
|
|
411
|
-
* Calcula descriptores FREAK
|
|
412
|
-
*/
|
|
413
|
-
_computeFreakDescriptors(extremas, pyramidImages) {
|
|
414
|
-
for (const ext of extremas) {
|
|
415
|
-
if (ext.octave < 0 || ext.octave >= pyramidImages.length) {
|
|
416
|
-
ext.descriptors = new Uint8Array(8);
|
|
417
|
-
continue;
|
|
418
|
-
}
|
|
419
|
-
const img = pyramidImages[ext.octave][1];
|
|
420
|
-
const width = img.width;
|
|
421
|
-
const height = img.height;
|
|
422
|
-
const data = img.data;
|
|
423
|
-
const cos = Math.cos(ext.angle || 0) * FREAK_EXPANSION_FACTOR;
|
|
424
|
-
const sin = Math.sin(ext.angle || 0) * FREAK_EXPANSION_FACTOR;
|
|
425
|
-
// Sample FREAK points
|
|
426
|
-
const samples = new Float32Array(FREAKPOINTS.length);
|
|
427
|
-
for (let i = 0; i < FREAKPOINTS.length; i++) {
|
|
428
|
-
const [, fx, fy] = FREAKPOINTS[i];
|
|
429
|
-
const xp = ext.x + fx * cos - fy * sin;
|
|
430
|
-
const yp = ext.y + fx * sin + fy * cos;
|
|
431
|
-
const x0 = Math.max(0, Math.min(width - 2, Math.floor(xp)));
|
|
432
|
-
const y0 = Math.max(0, Math.min(height - 2, Math.floor(yp)));
|
|
433
|
-
const x1 = x0 + 1;
|
|
434
|
-
const y1 = y0 + 1;
|
|
435
|
-
const fracX = xp - x0;
|
|
436
|
-
const fracY = yp - y0;
|
|
437
|
-
samples[i] =
|
|
438
|
-
data[y0 * width + x0] * (1 - fracX) * (1 - fracY) +
|
|
439
|
-
data[y0 * width + x1] * fracX * (1 - fracY) +
|
|
440
|
-
data[y1 * width + x0] * (1 - fracX) * fracY +
|
|
441
|
-
data[y1 * width + x1] * fracX * fracY;
|
|
442
|
-
}
|
|
443
|
-
// 🚀 MOONSHOT: Direct LSH computation
|
|
444
|
-
// Avoids computing 672 bits of FREAK just to sample 64.
|
|
445
|
-
if (this.useLSH) {
|
|
446
|
-
ext.lsh = computeLSH64(samples);
|
|
447
|
-
// Pack LSH into 8-byte descriptors for compatibility
|
|
448
|
-
ext.descriptors = packLSHIntoDescriptor(ext.lsh);
|
|
449
|
-
}
|
|
450
|
-
else {
|
|
451
|
-
ext.descriptors = computeFullFREAK(samples);
|
|
452
|
-
}
|
|
453
|
-
}
|
|
454
|
-
}
|
|
455
|
-
}
|
|
1
|
+
import{FREAKPOINTS as t}from"./freak.js";import{gpuCompute as e}from"../utils/gpu-compute.js";import{computeLSH64 as a,computeFullFREAK as s,packLSHIntoDescriptor as o}from"../utils/lsh-direct.js";import{HDC_SEED as i}from"../protocol.js";let h=!0;export const setDetectorGPUMode=t=>{h=t};export class DetectorLite{constructor(t,e,a={}){this.width=t,this.height=e,this.useGPU=void 0!==a.useGPU?a.useGPU:h,this.useLSH=void 0===a.useLSH||a.useLSH,this.useHDC=void 0===a.useHDC||a.useHDC,this.maxFeaturesPerBucket=void 0!==a.maxFeaturesPerBucket?a.maxFeaturesPerBucket:12;let s=0,o=t,i=e;for(;o>=4&&i>=4&&(o=Math.floor(o/2),i=Math.floor(i/2),s++,10!==s););this.numOctaves=void 0!==a.maxOctaves?Math.min(s,a.maxOctaves):s}detect(t,e={}){const a=e.octavesToProcess||Array.from({length:this.numOctaves},(t,e)=>e);let s;if(t instanceof Float32Array)s=t;else{s=new Float32Array(t.length);for(let e=0;e<t.length;e++)s[e]=t[e]}const o=this._buildGaussianPyramid(s,this.width,this.height,a),i=this._buildDogPyramid(o,a),h=this._findExtremas(i,o),r=this._applyPrune(h);return this._computeOrientations(r,o),this._computeFreakDescriptors(r,o),{featurePoints:r.map(t=>{const e=Math.pow(2,t.octave);return{maxima:t.score>0,x:t.x*e+.5*e-.5,y:t.y*e+.5*e-.5,scale:e,angle:t.angle||0,score:t.absScore,descriptors:this.useLSH&&t.lsh?t.lsh:t.descriptors||[],imageData:s}}),pyramid:o}}_buildGaussianPyramid(t,a,s,o=null){if(this.useGPU)try{const i=e.buildPyramid(t,a,s,this.numOctaves),h=[];for(let t=0;t<i.length&&t<this.numOctaves;t++){if(o&&!o.includes(t)){h.push(null);continue}const e=i[t],a=this._applyGaussianFilter(e.data,e.width,e.height);h.push([{data:e.data,width:e.width,height:e.height},{data:a.data,width:e.width,height:e.height}])}return h}catch(t){console.warn("GPU pyramid failed, falling back to CPU:",t.message)}this._pyramidBuffers&&this._pyramidBuffers.width===a&&this._pyramidBuffers.height===s||(this._pyramidBuffers={width:a,height:s,temp:new Float32Array(a*s)});const i=[];let h=t,r=a,n=s;for(let t=0;t<this.numOctaves;t++){const e=!o||o.includes(t);if(e){const t=this._applyGaussianFilter(h,r,n),e=this._applyGaussianFilter(t.data,r,n);i.push([{data:t.data,width:r,height:n},{data:e.data,width:r,height:n}])}else i.push(null);if(t<this.numOctaves-1){if(o&&!o.some(e=>e>t))break;{const a=e?i[t][0].data:h,s=this._downsample(a,r,n);h=s.data,r=s.width,n=s.height}}}return i}_applyGaussianFilter(t,e,a){const s=new Float32Array(e*a),o=this._pyramidBuffers?.temp||new Float32Array(e*a),i=.0625,h=.25,r=.375;for(let s=0;s<a;s++){const a=s*e;o[a]=(.6875*t[a]+t[a+1]*h+t[a+2]*i)*(1/.6875),o[a+1]=(t[a]*h+t[a+1]*r+t[a+2]*h+t[a+3]*i)*(1/.9375);for(let s=2;s<e-2;s++){const e=a+s;o[e]=t[e-2]*i+t[e-1]*h+t[e]*r+t[e+1]*h+t[e+2]*i}const n=a+e-2,l=a+e-1;o[n]=(t[n-2]*i+t[n-1]*h+t[n]*r+t[l]*h)*(1/.9375),o[l]=(t[l-2]*i+t[l-1]*h+.6875*t[l])*(1/.6875)}for(let t=0;t<e;t++){s[t]=(.6875*o[t]+o[t+e]*h+o[t+2*e]*i)*(1/.6875),s[t+e]=(o[t]*h+o[t+e]*r+o[t+2*e]*h+o[t+3*e]*i)*(1/.9375);for(let n=2;n<a-2;n++){const a=n*e+t;s[a]=o[a-2*e]*i+o[a-e]*h+o[a]*r+o[a+e]*h+o[a+2*e]*i}const n=(a-2)*e+t,l=(a-1)*e+t;s[n]=(o[n-2*e]*i+o[n-e]*h+o[n]*r+o[l]*h)*(1/.9375),s[l]=(o[l-2*e]*i+o[l-e]*h+.6875*o[l])*(1/.6875)}return{data:s,width:e,height:a}}_downsample(t,e,a){const s=e>>1,o=a>>1,i=new Float32Array(s*o);for(let a=0;a<o;a++){const o=2*a*e,h=o+e,r=a*s;for(let e=0;e<s;e++){const a=2*e;i[r+e]=.25*(t[o+a]+t[o+a+1]+t[h+a]+t[h+a+1])}}return{data:i,width:s,height:o}}_buildDogPyramid(t,e=null){const a=[];for(let e=0;e<t.length;e++){if(!t[e]){a.push(null);continue}const s=t[e][0],o=t[e][1],i=s.width,h=s.height,r=new Float32Array(i*h);for(let t=0;t<r.length;t++)r[t]=o.data[t]-s.data[t];a.push({data:r,width:i,height:h})}return a}_findExtremas(t,e){const a=[];for(let e=0;e<t.length;e++){const s=t[e];if(!s)continue;const o=e>0?t[e-1]:null,i=e<t.length-1?t[e+1]:null,h=s.width,r=s.height;for(let t=1;t<r-1;t++)for(let r=1;r<h-1;r++){const n=s.data[t*h+r];if(Math.abs(n)<.003)continue;let l=!0,c=!0;for(let e=-1;e<=1&&(l||c);e++)for(let a=-1;a<=1&&(l||c);a++){if(0===a&&0===e)continue;const o=s.data[(t+e)*h+(r+a)];o>=n&&(l=!1),o<=n&&(c=!1)}if((l||c)&&o){const e=r<<1,a=t<<1,s=o.width;for(let t=-1;t<=1&&(l||c);t++)for(let i=-1;i<=1&&(l||c);i++){const h=Math.max(0,Math.min(s-1,e+i)),r=Math.max(0,Math.min(o.height-1,a+t)),u=o.data[r*s+h];u>=n&&(l=!1),u<=n&&(c=!1)}}if((l||c)&&i){const e=r>>1,a=t>>1,s=i.width;for(let t=-1;t<=1&&(l||c);t++)for(let o=-1;o<=1&&(l||c);o++){const h=Math.max(0,Math.min(s-1,e+o)),r=Math.max(0,Math.min(i.height-1,a+t)),u=i.data[r*s+h];u>=n&&(l=!1),u<=n&&(c=!1)}}(l||c)&&a.push({score:l?Math.abs(n):-Math.abs(n),octave:e,x:r,y:t,absScore:Math.abs(n)})}}return a}_applyPrune(t){const e=this.maxFeaturesPerBucket,a=[];for(let t=0;t<225;t++)a.push([]);for(const e of t){const t=Math.min(14,Math.floor(e.x/(this.width/Math.pow(2,e.octave))*15)),s=15*Math.min(14,Math.floor(e.y/(this.height/Math.pow(2,e.octave))*15))+t;s>=0&&s<a.length&&a[s].push(e)}const s=[];for(const t of a){t.sort((t,e)=>e.absScore-t.absScore);for(let a=0;a<Math.min(e,t.length);a++)s.push(t[a])}return s}_computeOrientations(t,e){for(const a of t){if(a.octave<0||a.octave>=e.length){a.angle=0;continue}const t=e[a.octave][1],s=t.width,o=t.height,i=t.data,h=Math.floor(a.x),r=Math.floor(a.y),n=new Float32Array(36),l=4;for(let t=-l;t<=l;t++)for(let e=-l;e<=l;e++){const a=r+t,c=h+e;if(a<=0||a>=o-1||c<=0||c>=s-1)continue;const u=i[(a+1)*s+c]-i[(a-1)*s+c],d=i[a*s+c+1]-i[a*s+c-1],f=Math.sqrt(d*d+u*u),m=Math.atan2(u,d)+Math.PI,p=Math.floor(m/(2*Math.PI)*36)%36,g=Math.exp(-(e*e+t*t)/(2*l*l));n[p]+=f*g}let c=0;for(let t=1;t<36;t++)n[t]>n[c]&&(c=t);a.angle=2*(c+.5)*Math.PI/36-Math.PI}}_computeFreakDescriptors(e,i){for(const h of e){if(h.octave<0||h.octave>=i.length){h.descriptors=new Uint8Array(8);continue}const e=i[h.octave][1],r=e.width,n=e.height,l=e.data,c=7*Math.cos(h.angle||0),u=7*Math.sin(h.angle||0),d=new Float32Array(t.length);for(let e=0;e<t.length;e++){const[,a,s]=t[e],o=h.x+a*c-s*u,i=h.y+a*u+s*c,f=Math.max(0,Math.min(r-2,Math.floor(o))),m=Math.max(0,Math.min(n-2,Math.floor(i))),p=f+1,g=m+1,M=o-f,w=i-m;d[e]=l[m*r+f]*(1-M)*(1-w)+l[m*r+p]*M*(1-w)+l[g*r+f]*(1-M)*w+l[g*r+p]*M*w}this.useLSH?(h.lsh=a(d),h.descriptors=o(h.lsh)):h.descriptors=s(d)}}}
|
|
@@ -1,89 +1 @@
|
|
|
1
|
-
|
|
2
|
-
const FREAK_RINGS = [
|
|
3
|
-
// ring 5
|
|
4
|
-
{
|
|
5
|
-
sigma: 0.55,
|
|
6
|
-
points: [
|
|
7
|
-
[-1.0, 0.0],
|
|
8
|
-
[-0.5, -0.866025],
|
|
9
|
-
[0.5, -0.866025],
|
|
10
|
-
[1.0, -0.0],
|
|
11
|
-
[0.5, 0.866025],
|
|
12
|
-
[-0.5, 0.866025],
|
|
13
|
-
],
|
|
14
|
-
},
|
|
15
|
-
// ring 4
|
|
16
|
-
{
|
|
17
|
-
sigma: 0.475,
|
|
18
|
-
points: [
|
|
19
|
-
[0.0, 0.930969],
|
|
20
|
-
[-0.806243, 0.465485],
|
|
21
|
-
[-0.806243, -0.465485],
|
|
22
|
-
[-0.0, -0.930969],
|
|
23
|
-
[0.806243, -0.465485],
|
|
24
|
-
[0.806243, 0.465485],
|
|
25
|
-
],
|
|
26
|
-
},
|
|
27
|
-
// ring 3
|
|
28
|
-
{
|
|
29
|
-
sigma: 0.4,
|
|
30
|
-
points: [
|
|
31
|
-
[0.847306, -0.0],
|
|
32
|
-
[0.423653, 0.733789],
|
|
33
|
-
[-0.423653, 0.733789],
|
|
34
|
-
[-0.847306, 0.0],
|
|
35
|
-
[-0.423653, -0.733789],
|
|
36
|
-
[0.423653, -0.733789],
|
|
37
|
-
],
|
|
38
|
-
},
|
|
39
|
-
// ring 2
|
|
40
|
-
{
|
|
41
|
-
sigma: 0.325,
|
|
42
|
-
points: [
|
|
43
|
-
[-0.0, -0.741094],
|
|
44
|
-
[0.641806, -0.370547],
|
|
45
|
-
[0.641806, 0.370547],
|
|
46
|
-
[0.0, 0.741094],
|
|
47
|
-
[-0.641806, 0.370547],
|
|
48
|
-
[-0.641806, -0.370547],
|
|
49
|
-
],
|
|
50
|
-
},
|
|
51
|
-
// ring 1
|
|
52
|
-
{
|
|
53
|
-
sigma: 0.25,
|
|
54
|
-
points: [
|
|
55
|
-
[-0.595502, 0.0],
|
|
56
|
-
[-0.297751, -0.51572],
|
|
57
|
-
[0.297751, -0.51572],
|
|
58
|
-
[0.595502, -0.0],
|
|
59
|
-
[0.297751, 0.51572],
|
|
60
|
-
[-0.297751, 0.51572],
|
|
61
|
-
],
|
|
62
|
-
},
|
|
63
|
-
// ring 0
|
|
64
|
-
{
|
|
65
|
-
sigma: 0.175,
|
|
66
|
-
points: [
|
|
67
|
-
[0.0, 0.362783],
|
|
68
|
-
[-0.314179, 0.181391],
|
|
69
|
-
[-0.314179, -0.181391],
|
|
70
|
-
[-0.0, -0.362783],
|
|
71
|
-
[0.314179, -0.181391],
|
|
72
|
-
[0.314179, 0.181391],
|
|
73
|
-
],
|
|
74
|
-
},
|
|
75
|
-
// center
|
|
76
|
-
{
|
|
77
|
-
sigma: 0.1,
|
|
78
|
-
points: [[0, 0]],
|
|
79
|
-
},
|
|
80
|
-
];
|
|
81
|
-
const FREAKPOINTS = [];
|
|
82
|
-
for (let r = 0; r < FREAK_RINGS.length; r++) {
|
|
83
|
-
const sigma = FREAK_RINGS[r].sigma;
|
|
84
|
-
for (let i = 0; i < FREAK_RINGS[r].points.length; i++) {
|
|
85
|
-
const point = FREAK_RINGS[r].points[i];
|
|
86
|
-
FREAKPOINTS.push([sigma, point[0], point[1]]);
|
|
87
|
-
}
|
|
88
|
-
}
|
|
89
|
-
export { FREAKPOINTS };
|
|
1
|
+
const s=[{sigma:.55,points:[[-1,0],[-.5,-.866025],[.5,-.866025],[1,-0],[.5,.866025],[-.5,.866025]]},{sigma:.475,points:[[0,.930969],[-.806243,.465485],[-.806243,-.465485],[-0,-.930969],[.806243,-.465485],[.806243,.465485]]},{sigma:.4,points:[[.847306,-0],[.423653,.733789],[-.423653,.733789],[-.847306,0],[-.423653,-.733789],[.423653,-.733789]]},{sigma:.325,points:[[-0,-.741094],[.641806,-.370547],[.641806,.370547],[0,.741094],[-.641806,.370547],[-.641806,-.370547]]},{sigma:.25,points:[[-.595502,0],[-.297751,-.51572],[.297751,-.51572],[.595502,-0],[.297751,.51572],[-.297751,.51572]]},{sigma:.175,points:[[0,.362783],[-.314179,.181391],[-.314179,-.181391],[-0,-.362783],[.314179,-.181391],[.314179,.181391]]},{sigma:.1,points:[[0,0]]}],i=[];for(let t=0;t<s.length;t++){const o=s[t].sigma;for(let n=0;n<s[t].points.length;n++){const p=s[t].points[n];i.push([o,p[0],p[1]])}}export{i as FREAKPOINTS};
|
|
@@ -1,16 +1 @@
|
|
|
1
|
-
import
|
|
2
|
-
/**
|
|
3
|
-
* 🚀 MOONSHOT: Direct PnP Solver for AR
|
|
4
|
-
*
|
|
5
|
-
* Instead of estimating a 2D Homography and decomposing it,
|
|
6
|
-
* we solve for the 3D Pose [R|t] directly using the
|
|
7
|
-
* Perspective-n-Point algorithm.
|
|
8
|
-
*/
|
|
9
|
-
const estimate = ({ screenCoords, worldCoords, projectionTransform }) => {
|
|
10
|
-
return solvePosePnP({
|
|
11
|
-
screenCoords,
|
|
12
|
-
worldCoords,
|
|
13
|
-
projectionTransform
|
|
14
|
-
});
|
|
15
|
-
};
|
|
16
|
-
export { estimate };
|
|
1
|
+
import{solvePosePnP as o}from"./pnp-solver.js";const r=({screenCoords:r,worldCoords:s,projectionTransform:e})=>o({screenCoords:r,worldCoords:s,projectionTransform:e});export{r as estimate};
|
|
@@ -1,30 +1 @@
|
|
|
1
|
-
import
|
|
2
|
-
import { refineEstimate } from "./refine-estimate.js";
|
|
3
|
-
class Estimator {
|
|
4
|
-
constructor(projectionTransform) {
|
|
5
|
-
this.projectionTransform = projectionTransform;
|
|
6
|
-
}
|
|
7
|
-
// Solve homography between screen points and world points using Direct Linear Transformation
|
|
8
|
-
// then decompose homography into rotation and translation matrix (i.e. modelViewTransform)
|
|
9
|
-
estimate({ screenCoords, worldCoords }) {
|
|
10
|
-
const modelViewTransform = estimate({
|
|
11
|
-
screenCoords,
|
|
12
|
-
worldCoords,
|
|
13
|
-
projectionTransform: this.projectionTransform,
|
|
14
|
-
});
|
|
15
|
-
return modelViewTransform;
|
|
16
|
-
}
|
|
17
|
-
// Given an initial guess of the modelViewTransform and new pairs of screen-world coordinates,
|
|
18
|
-
// use Iterative Closest Point to refine the transformation
|
|
19
|
-
//refineEstimate({initialModelViewTransform, screenCoords, worldCoords}) {
|
|
20
|
-
refineEstimate({ initialModelViewTransform, worldCoords, screenCoords }) {
|
|
21
|
-
const updatedModelViewTransform = refineEstimate({
|
|
22
|
-
initialModelViewTransform,
|
|
23
|
-
worldCoords,
|
|
24
|
-
screenCoords,
|
|
25
|
-
projectionTransform: this.projectionTransform,
|
|
26
|
-
});
|
|
27
|
-
return updatedModelViewTransform;
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
export { Estimator };
|
|
1
|
+
import{estimate as r}from"./estimate.js";import{refineEstimate as o}from"./refine-estimate.js";class e{constructor(r){this.projectionTransform=r}estimate({screenCoords:o,worldCoords:e}){return r({screenCoords:o,worldCoords:e,projectionTransform:this.projectionTransform})}refineEstimate({initialModelViewTransform:r,worldCoords:e,screenCoords:s}){return o({initialModelViewTransform:r,worldCoords:e,screenCoords:s,projectionTransform:this.projectionTransform})}}export{e as Estimator};
|