@srsergio/taptapp-ar 1.0.92 → 1.0.94

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/README.md +16 -14
  2. package/dist/compiler/offline-compiler.d.ts +3 -3
  3. package/dist/compiler/offline-compiler.js +50 -33
  4. package/dist/core/constants.d.ts +2 -0
  5. package/dist/core/constants.js +4 -1
  6. package/dist/core/detector/detector-lite.d.ts +6 -5
  7. package/dist/core/detector/detector-lite.js +46 -16
  8. package/dist/core/image-list.d.ts +24 -6
  9. package/dist/core/image-list.js +4 -4
  10. package/dist/core/matching/matcher.d.ts +1 -1
  11. package/dist/core/matching/matcher.js +7 -4
  12. package/dist/core/matching/matching.d.ts +2 -1
  13. package/dist/core/matching/matching.js +43 -11
  14. package/dist/core/perception/bio-inspired-engine.d.ts +130 -0
  15. package/dist/core/perception/bio-inspired-engine.js +232 -0
  16. package/dist/core/perception/foveal-attention.d.ts +142 -0
  17. package/dist/core/perception/foveal-attention.js +280 -0
  18. package/dist/core/perception/index.d.ts +6 -0
  19. package/dist/core/perception/index.js +17 -0
  20. package/dist/core/perception/predictive-coding.d.ts +92 -0
  21. package/dist/core/perception/predictive-coding.js +278 -0
  22. package/dist/core/perception/saccadic-controller.d.ts +126 -0
  23. package/dist/core/perception/saccadic-controller.js +269 -0
  24. package/dist/core/perception/saliency-map.d.ts +74 -0
  25. package/dist/core/perception/saliency-map.js +254 -0
  26. package/dist/core/perception/scale-orchestrator.d.ts +28 -0
  27. package/dist/core/perception/scale-orchestrator.js +68 -0
  28. package/dist/core/protocol.d.ts +14 -1
  29. package/dist/core/protocol.js +33 -1
  30. package/dist/runtime/bio-inspired-controller.d.ts +135 -0
  31. package/dist/runtime/bio-inspired-controller.js +358 -0
  32. package/dist/runtime/controller.d.ts +11 -2
  33. package/dist/runtime/controller.js +20 -8
  34. package/dist/runtime/controller.worker.js +2 -2
  35. package/package.json +1 -1
  36. package/src/compiler/offline-compiler.ts +56 -36
  37. package/src/core/constants.ts +5 -1
  38. package/src/core/detector/detector-lite.js +46 -16
  39. package/src/core/image-list.js +4 -4
  40. package/src/core/matching/matcher.js +8 -4
  41. package/src/core/matching/matching.js +51 -12
  42. package/src/core/perception/bio-inspired-engine.js +275 -0
  43. package/src/core/perception/foveal-attention.js +306 -0
  44. package/src/core/perception/index.js +18 -0
  45. package/src/core/perception/predictive-coding.js +327 -0
  46. package/src/core/perception/saccadic-controller.js +303 -0
  47. package/src/core/perception/saliency-map.js +296 -0
  48. package/src/core/perception/scale-orchestrator.js +80 -0
  49. package/src/core/protocol.ts +38 -1
  50. package/src/runtime/bio-inspired-controller.ts +448 -0
  51. package/src/runtime/controller.ts +22 -7
  52. package/src/runtime/controller.worker.js +2 -1
@@ -1,6 +1,6 @@
1
1
  import * as msgpack from "@msgpack/msgpack";
2
2
 
3
- export const CURRENT_VERSION = 9; // Bumped for HDC support
3
+ export const CURRENT_VERSION = 11; // Bumped for Nanite virtualized features support
4
4
  export const HDC_SEED = 0x1337BEEF; // Default system seed
5
5
 
6
6
  /**
@@ -104,6 +104,43 @@ export function columnarize(points: any[], tree: any, width: number, height: num
104
104
  };
105
105
  }
106
106
 
107
+ /**
108
+ * Columnarizes point data with COMPACT 32-bit descriptors (XOR folding)
109
+ * Reduces descriptor storage by 50% with minimal accuracy loss
110
+ */
111
+ export function columnarizeCompact(points: any[], tree: any, width: number, height: number) {
112
+ const count = points.length;
113
+ const x = new Uint16Array(count);
114
+ const y = new Uint16Array(count);
115
+ const angle = new Int16Array(count);
116
+ const scale = new Uint8Array(count);
117
+ const descriptors = new Uint32Array(count); // 32-bit compact descriptors
118
+
119
+ for (let i = 0; i < count; i++) {
120
+ x[i] = Math.round((points[i].x / width) * 65535);
121
+ y[i] = Math.round((points[i].y / height) * 65535);
122
+ angle[i] = Math.round((points[i].angle / Math.PI) * 32767);
123
+ scale[i] = Math.round(Math.log2(points[i].scale || 1));
124
+
125
+ if (points[i].descriptors && points[i].descriptors.length >= 2) {
126
+ // XOR folding: Combine two 32-bit values into one 32-bit value
127
+ // This preserves discriminative power while halving storage
128
+ descriptors[i] = (points[i].descriptors[0] ^ points[i].descriptors[1]) >>> 0;
129
+ }
130
+ }
131
+
132
+ return {
133
+ x,
134
+ y,
135
+ a: angle,
136
+ s: scale,
137
+ d: descriptors,
138
+ compact: 1, // Flag to indicate compact 32-bit descriptors
139
+ t: compactTree(tree.rootNode),
140
+ };
141
+ }
142
+
143
+
107
144
  /**
108
145
  * Compacts hierarchical clustering tree into a minimal array structure
109
146
  */
@@ -0,0 +1,448 @@
1
+ /**
2
+ * Bio-Inspired Controller Adapter
3
+ *
4
+ * Wraps the standard Controller with Bio-Inspired Perception capabilities.
5
+ * Provides significant performance improvements while maintaining API compatibility.
6
+ *
7
+ * Key features:
8
+ * - Foveal attention: Processes only regions of interest at full resolution
9
+ * - Predictive coding: Skips processing when scene is static
10
+ * - Saccadic sampling: Strategic "glances" at high-saliency regions
11
+ *
12
+ * Usage:
13
+ * ```javascript
14
+ * import { BioInspiredController } from './bio-inspired-controller.js';
15
+ *
16
+ * const controller = new BioInspiredController({
17
+ * inputWidth: 640,
18
+ * inputHeight: 480,
19
+ * onUpdate: (data) => console.log(data),
20
+ * bioInspired: {
21
+ * enabled: true,
22
+ * aggressiveSkipping: true,
23
+ * }
24
+ * });
25
+ * ```
26
+ */
27
+
28
+ import { Controller, ControllerOptions } from './controller.js';
29
+ import { BioInspiredEngine, BIO_CONFIG } from '../core/perception/index.js';
30
+ import { AutoRotationFeature } from '../core/features/auto-rotation-feature.js';
31
+
32
+ /**
33
+ * Result from BioInspiredEngine.process()
34
+ */
35
+ interface BioResult {
36
+ skipped: boolean;
37
+ prediction?: {
38
+ worldMatrix?: Float32Array;
39
+ };
40
+ attentionRegions?: any[];
41
+ foveaCenter?: { x: number; y: number };
42
+ pixelsSaved?: number;
43
+ octavesToProcess?: number[]; // Added for scale orchestration
44
+ }
45
+
46
+ /**
47
+ * Extended options for Bio-Inspired Controller
48
+ */
49
+ export interface BioInspiredControllerOptions extends ControllerOptions {
50
+ bioInspired?: {
51
+ enabled?: boolean;
52
+ aggressiveSkipping?: boolean;
53
+ foveaRadiusRatio?: number;
54
+ maxSaccades?: number;
55
+ };
56
+ }
57
+
58
+ /**
59
+ * Bio-Inspired Controller
60
+ *
61
+ * Extends the standard Controller with bio-inspired perception capabilities.
62
+ */
63
+ class BioInspiredController extends Controller {
64
+ private bioEngine: BioInspiredEngine | null = null;
65
+ private bioEnabled: boolean = true;
66
+ private bioMetricsInterval: number | null = null;
67
+ private lastBioResult: any = null;
68
+
69
+ constructor(options: BioInspiredControllerOptions) {
70
+ super(options);
71
+
72
+ const bioOptions = options.bioInspired || {};
73
+ this.bioEnabled = bioOptions.enabled !== false;
74
+
75
+ if (this.bioEnabled) {
76
+ // Initialize Bio-Inspired Engine
77
+ const bioConfig: any = {};
78
+
79
+ if (bioOptions.foveaRadiusRatio !== undefined) {
80
+ bioConfig.FOVEA_RADIUS_RATIO = bioOptions.foveaRadiusRatio;
81
+ }
82
+ if (bioOptions.maxSaccades !== undefined) {
83
+ bioConfig.MAX_SACCADES_PER_FRAME = bioOptions.maxSaccades;
84
+ }
85
+ if (bioOptions.aggressiveSkipping !== undefined) {
86
+ bioConfig.ENABLE_SKIP_FRAMES = bioOptions.aggressiveSkipping;
87
+ if (bioOptions.aggressiveSkipping) {
88
+ bioConfig.CHANGE_THRESHOLD = 0.03; // More aggressive
89
+ }
90
+ }
91
+
92
+ this.bioEngine = new BioInspiredEngine(
93
+ options.inputWidth,
94
+ options.inputHeight,
95
+ bioConfig
96
+ );
97
+ }
98
+ }
99
+
100
+ /**
101
+ * Override processVideo to add bio-inspired perception
102
+ */
103
+ processVideo(input: any) {
104
+ if (!this.bioEnabled || !this.bioEngine) {
105
+ return super.processVideo(input);
106
+ }
107
+
108
+ if (this.processingVideo) return;
109
+ this.processingVideo = true;
110
+
111
+ // Reset tracking states
112
+ this.trackingStates = [];
113
+ for (let i = 0; i < (this.markerDimensions?.length || 0); i++) {
114
+ this.trackingStates.push({
115
+ showing: false,
116
+ isTracking: false,
117
+ currentModelViewTransform: null,
118
+ trackCount: 0,
119
+ trackMiss: 0,
120
+ });
121
+ }
122
+
123
+ const startProcessing = async () => {
124
+ while (this.processingVideo) {
125
+ const inputData = this.inputLoader.loadInput(input);
126
+
127
+ // Get current tracking state for bio engine
128
+ const activeTracking = this.trackingStates.find(s => s.isTracking);
129
+ const trackingState = activeTracking ? {
130
+ isTracking: true,
131
+ activeOctave: activeTracking.lastOctaveIndex, // Tracked octave index
132
+ worldMatrix: activeTracking.currentModelViewTransform
133
+ ? this._flattenMatrix(activeTracking.currentModelViewTransform)
134
+ : null
135
+ } : null;
136
+
137
+ // Process through bio-inspired engine
138
+ const bioResult = this.bioEngine!.process(inputData, (trackingState as any) || undefined) as BioResult;
139
+ this.lastBioResult = bioResult;
140
+
141
+ // If bio engine says we can skip, use prediction
142
+ if (bioResult.skipped && activeTracking?.isTracking) {
143
+ // Use predicted state
144
+ this._handleSkippedFrame(activeTracking, bioResult);
145
+ } else {
146
+ // Normal processing with attention regions
147
+ await this._processWithAttention(input, inputData, bioResult);
148
+ }
149
+
150
+ // Wait for next frame
151
+ if (typeof requestAnimationFrame !== 'undefined') {
152
+ await new Promise(requestAnimationFrame);
153
+ } else {
154
+ await new Promise(resolve => setTimeout(resolve, 16));
155
+ }
156
+ }
157
+ };
158
+
159
+ startProcessing();
160
+ }
161
+
162
+ /**
163
+ * Handle a skipped frame using prediction
164
+ * @private
165
+ */
166
+ private _handleSkippedFrame(trackingState: any, bioResult: any) {
167
+ // Use predicted matrix
168
+ if (bioResult.prediction && bioResult.prediction.worldMatrix) {
169
+ trackingState.currentModelViewTransform = this._unflattenMatrix(bioResult.prediction.worldMatrix);
170
+ }
171
+
172
+ // Notify with skipped status
173
+ const worldMatrix = trackingState.currentModelViewTransform
174
+ ? this._glModelViewMatrix(trackingState.currentModelViewTransform, 0)
175
+ : null;
176
+
177
+ this.onUpdate?.({
178
+ type: 'updateMatrix',
179
+ targetIndex: 0,
180
+ worldMatrix: worldMatrix ? this.featureManager.applyWorldMatrixFilters(0, worldMatrix, { stability: 0.9 }) : null,
181
+ skipped: true,
182
+ bioMetrics: this.bioEngine?.getMetrics(),
183
+ });
184
+
185
+ this.onUpdate?.({ type: 'processDone' });
186
+ }
187
+
188
+ /**
189
+ * Process frame using bio-inspired attention regions
190
+ * @private
191
+ */
192
+ private async _processWithAttention(input: any, inputData: Uint8Array, bioResult: BioResult) {
193
+ const nTracking = this.trackingStates.reduce((acc, s) => acc + (s.isTracking ? 1 : 0), 0);
194
+
195
+ // Detection phase - use primary attention region for efficiency
196
+ if (nTracking < this.maxTrack) {
197
+ const matchingIndexes = this.trackingStates
198
+ .map((s, i) => ({ state: s, index: i }))
199
+ .filter(({ state, index }) =>
200
+ !state.isTracking &&
201
+ (this.interestedTargetIndex === -1 || this.interestedTargetIndex === index)
202
+ )
203
+ .map(({ index }) => index);
204
+
205
+ if (matchingIndexes.length > 0) {
206
+ // Use full input for detection (bio engine already optimized upstream processing)
207
+ const { targetIndex: matchedTargetIndex, modelViewTransform, featurePoints } =
208
+ await this._detectAndMatch(inputData, matchingIndexes, bioResult.octavesToProcess || null);
209
+
210
+ if (matchedTargetIndex !== -1) {
211
+ this.trackingStates[matchedTargetIndex].isTracking = true;
212
+ this.trackingStates[matchedTargetIndex].currentModelViewTransform = modelViewTransform;
213
+
214
+ // Update bio engine fovea to focus on detected target
215
+ if (bioResult.attentionRegions?.[0]) {
216
+ this.bioEngine?.reset();
217
+ }
218
+ }
219
+
220
+ this.onUpdate?.({ type: 'featurePoints', featurePoints });
221
+ }
222
+ }
223
+
224
+ // Tracking phase
225
+ for (let i = 0; i < this.trackingStates.length; i++) {
226
+ const trackingState = this.trackingStates[i];
227
+
228
+ if (trackingState.isTracking) {
229
+ const result = await this._trackAndUpdate(
230
+ inputData,
231
+ trackingState.currentModelViewTransform,
232
+ i
233
+ );
234
+
235
+ if (!result || !result.modelViewTransform) {
236
+ trackingState.isTracking = false;
237
+ trackingState.screenCoords = result?.screenCoords || [];
238
+ trackingState.reliabilities = result?.reliabilities || [];
239
+ trackingState.stabilities = result?.stabilities || [];
240
+ } else {
241
+ trackingState.currentModelViewTransform = result.modelViewTransform;
242
+ trackingState.screenCoords = result.screenCoords;
243
+ trackingState.reliabilities = result.reliabilities;
244
+ trackingState.stabilities = result.stabilities;
245
+ (trackingState as any).deformedMesh = result.deformedMesh;
246
+ }
247
+ }
248
+
249
+ const wasShowing = trackingState.showing;
250
+ trackingState.showing = this.featureManager.shouldShow(i, trackingState.isTracking);
251
+
252
+ if (wasShowing && !trackingState.showing) {
253
+ trackingState.trackingMatrix = null;
254
+ this.featureManager.notifyUpdate({ type: 'reset', targetIndex: i });
255
+ }
256
+
257
+ // Emit update
258
+ if (trackingState.showing || trackingState.screenCoords?.length > 0 || (wasShowing && !trackingState.showing)) {
259
+ const worldMatrix = trackingState.showing
260
+ ? this._glModelViewMatrix(trackingState.currentModelViewTransform, i)
261
+ : null;
262
+
263
+ let finalMatrix = null;
264
+ if (worldMatrix) {
265
+ const stabilities = trackingState.stabilities || [];
266
+ const avgStability = stabilities.length > 0
267
+ ? stabilities.reduce((a: number, b: number) => a + b, 0) / stabilities.length
268
+ : 0;
269
+
270
+ finalMatrix = this.featureManager.applyWorldMatrixFilters(i, worldMatrix, { stability: avgStability });
271
+ trackingState.trackingMatrix = finalMatrix;
272
+
273
+ const isInputRotated = input.width === this.inputHeight && input.height === this.inputWidth;
274
+ if (isInputRotated) {
275
+ const rotationFeature = this.featureManager.getFeature<AutoRotationFeature>('auto-rotation');
276
+ if (rotationFeature) {
277
+ finalMatrix = rotationFeature.rotate(finalMatrix);
278
+ }
279
+ }
280
+ }
281
+
282
+ this.onUpdate?.({
283
+ type: 'updateMatrix',
284
+ targetIndex: i,
285
+ worldMatrix: finalMatrix,
286
+ modelViewTransform: trackingState.currentModelViewTransform,
287
+ screenCoords: trackingState.screenCoords,
288
+ reliabilities: trackingState.reliabilities,
289
+ stabilities: trackingState.stabilities,
290
+ deformedMesh: (trackingState as any).deformedMesh,
291
+ bioMetrics: this.bioEngine?.getMetrics(),
292
+ foveaCenter: bioResult.foveaCenter,
293
+ pixelsSaved: bioResult.pixelsSaved,
294
+ });
295
+ }
296
+ }
297
+
298
+ this.onUpdate?.({ type: 'processDone' });
299
+ }
300
+
301
+ /**
302
+ * Detect and match features, optionally limited to specific octaves
303
+ */
304
+ async _detectAndMatch(inputData: any, targetIndexes: number[], octavesToProcess: number[] | null = null) {
305
+ // 🚀 NANITE-STYLE: Estimate scale for filtered matching
306
+ let predictedScale: number | undefined = undefined;
307
+ for (const state of this.trackingStates) {
308
+ if (state.isTracking && state.currentModelViewTransform) {
309
+ const m = state.currentModelViewTransform;
310
+ predictedScale = Math.sqrt(m[0][0] ** 2 + m[1][0] ** 2 + m[2][0] ** 2);
311
+ break;
312
+ }
313
+ }
314
+
315
+ const { targetIndex, modelViewTransform, screenCoords, worldCoords, featurePoints } = await this._workerMatch(
316
+ null, // No feature points, worker will detect from inputData
317
+ targetIndexes,
318
+ inputData,
319
+ predictedScale,
320
+ octavesToProcess
321
+ );
322
+ return { targetIndex, modelViewTransform, screenCoords, worldCoords, featurePoints };
323
+ }
324
+
325
+ /**
326
+ * Communicate with worker for matching phase
327
+ */
328
+ _workerMatch(featurePoints: any, targetIndexes: number[], inputData: any = null, expectedScale?: number, octavesToProcess: number[] | null = null): Promise<any> {
329
+ return new Promise((resolve) => {
330
+ if (!this.worker) {
331
+ // If no feature points but we have input data, detect first
332
+ let fpPromise;
333
+ if (!featurePoints && inputData) {
334
+ fpPromise = Promise.resolve(this.fullDetector!.detect(inputData, { octavesToProcess }).featurePoints);
335
+ } else {
336
+ fpPromise = Promise.resolve(featurePoints);
337
+ }
338
+
339
+ fpPromise.then(fp => {
340
+ this._matchOnMainThread(fp, targetIndexes, expectedScale).then(resolve);
341
+ }).catch(() => resolve({ targetIndex: -1 }));
342
+ return;
343
+ }
344
+
345
+ const timeout = setTimeout(() => {
346
+ (this as any).workerMatchDone = null;
347
+ resolve({ targetIndex: -1 });
348
+ }, 1000);
349
+
350
+ (this as any).workerMatchDone = (data: any) => {
351
+ clearTimeout(timeout);
352
+ (this as any).workerMatchDone = null;
353
+ resolve(data);
354
+ };
355
+
356
+ if (inputData) {
357
+ this.worker.postMessage({ type: "match", inputData, targetIndexes, octavesToProcess, expectedScale });
358
+ } else {
359
+ this.worker.postMessage({ type: "match", featurePoints: featurePoints, targetIndexes, expectedScale });
360
+ }
361
+ });
362
+ }
363
+
364
+ /**
365
+ * Override _trackAndUpdate to capture active octave for the next frame's orchestration
366
+ */
367
+ async _trackAndUpdate(inputData: any, lastModelViewTransform: number[][], targetIndex: number) {
368
+ const result = await super._trackAndUpdate(inputData, lastModelViewTransform, targetIndex);
369
+ if (result && (result as any).octaveIndex !== undefined) {
370
+ this.trackingStates[targetIndex].lastOctaveIndex = (result as any).octaveIndex;
371
+ }
372
+ return result;
373
+ }
374
+
375
+ /**
376
+ * Flatten a 3x4 matrix to Float32Array
377
+ * @private
378
+ */
379
+ private _flattenMatrix(matrix: number[][]): Float32Array {
380
+ const result = new Float32Array(16);
381
+ for (let i = 0; i < 3; i++) {
382
+ for (let j = 0; j < 4; j++) {
383
+ result[i * 4 + j] = matrix[i][j];
384
+ }
385
+ }
386
+ result[12] = 0;
387
+ result[13] = 0;
388
+ result[14] = 0;
389
+ result[15] = 1;
390
+ return result;
391
+ }
392
+
393
+ /**
394
+ * Unflatten Float32Array to 3x4 matrix
395
+ * @private
396
+ */
397
+ private _unflattenMatrix(flat: Float32Array): number[][] {
398
+ return [
399
+ [flat[0], flat[1], flat[2], flat[3]],
400
+ [flat[4], flat[5], flat[6], flat[7]],
401
+ [flat[8], flat[9], flat[10], flat[11]],
402
+ ];
403
+ }
404
+
405
+ /**
406
+ * Get bio-inspired engine metrics
407
+ */
408
+ getBioMetrics() {
409
+ return this.bioEngine?.getMetrics() || null;
410
+ }
411
+
412
+ /**
413
+ * Get last bio processing result
414
+ */
415
+ getLastBioResult() {
416
+ return this.lastBioResult;
417
+ }
418
+
419
+ /**
420
+ * Enable/disable bio-inspired processing dynamically
421
+ */
422
+ setBioEnabled(enabled: boolean) {
423
+ this.bioEnabled = enabled;
424
+ if (enabled && !this.bioEngine) {
425
+ this.bioEngine = new BioInspiredEngine(this.inputWidth, this.inputHeight);
426
+ }
427
+ }
428
+
429
+ /**
430
+ * Configure bio-inspired engine at runtime
431
+ */
432
+ configureBio(options: Partial<typeof BIO_CONFIG>) {
433
+ this.bioEngine?.configure(options);
434
+ }
435
+
436
+ /**
437
+ * Override dispose to clean up bio engine
438
+ */
439
+ dispose() {
440
+ super.dispose();
441
+ this.bioEngine = null;
442
+ if (this.bioMetricsInterval) {
443
+ clearInterval(this.bioMetricsInterval);
444
+ }
445
+ }
446
+ }
447
+
448
+ export { BioInspiredController };
@@ -257,10 +257,23 @@ class Controller {
257
257
  }
258
258
 
259
259
  async _detectAndMatch(inputData: any, targetIndexes: number[]) {
260
+ // 🚀 NANITE-STYLE: Estimate scale for filtered matching
261
+ // If we were already tracking a target, use its scale as a hint for faster matching
262
+ let predictedScale: number | undefined = undefined;
263
+ for (const state of this.trackingStates) {
264
+ if (state.isTracking && state.currentModelViewTransform) {
265
+ const m = state.currentModelViewTransform;
266
+ // Vector magnitude of the first column is a good approximation of the scale
267
+ predictedScale = Math.sqrt(m[0][0] ** 2 + m[1][0] ** 2 + m[2][0] ** 2);
268
+ break;
269
+ }
270
+ }
271
+
260
272
  const { targetIndex, modelViewTransform, screenCoords, worldCoords, featurePoints } = await this._workerMatch(
261
273
  null, // No feature points, worker will detect from inputData
262
274
  targetIndexes,
263
- inputData
275
+ inputData,
276
+ predictedScale
264
277
  );
265
278
  return { targetIndex, modelViewTransform, screenCoords, worldCoords, featurePoints };
266
279
  }
@@ -361,7 +374,8 @@ class Controller {
361
374
  screenCoords: finalScreenCoords,
362
375
  reliabilities: finalReliabilities,
363
376
  stabilities: finalStabilities,
364
- deformedMesh
377
+ deformedMesh,
378
+ octaveIndex // Pass this up for the orchestrator
365
379
  };
366
380
  }
367
381
 
@@ -521,7 +535,7 @@ class Controller {
521
535
  return this._workerTrackUpdate(modelViewTransform, trackFeatures);
522
536
  }
523
537
 
524
- _workerMatch(featurePoints: any, targetIndexes: number[], inputData: any = null): Promise<any> {
538
+ _workerMatch(featurePoints: any, targetIndexes: number[], inputData: any = null, expectedScale?: number): Promise<any> {
525
539
  return new Promise((resolve) => {
526
540
  if (!this.worker) {
527
541
  // If no feature points but we have input data, detect first
@@ -533,7 +547,7 @@ class Controller {
533
547
  }
534
548
 
535
549
  fpPromise.then(fp => {
536
- this._matchOnMainThread(fp, targetIndexes).then(resolve);
550
+ this._matchOnMainThread(fp, targetIndexes, expectedScale).then(resolve);
537
551
  }).catch(() => resolve({ targetIndex: -1 }));
538
552
  return;
539
553
  }
@@ -557,9 +571,9 @@ class Controller {
557
571
  };
558
572
 
559
573
  if (inputData) {
560
- this.worker.postMessage({ type: "match", inputData, targetIndexes });
574
+ this.worker.postMessage({ type: "match", inputData, targetIndexes, expectedScale });
561
575
  } else {
562
- this.worker.postMessage({ type: "match", featurePoints: featurePoints, targetIndexes });
576
+ this.worker.postMessage({ type: "match", featurePoints: featurePoints, targetIndexes, expectedScale });
563
577
  }
564
578
  });
565
579
  }
@@ -591,7 +605,7 @@ class Controller {
591
605
  });
592
606
  }
593
607
 
594
- async _matchOnMainThread(featurePoints: any, targetIndexes: number[]) {
608
+ async _matchOnMainThread(featurePoints: any, targetIndexes: number[], expectedScale?: number) {
595
609
  if (!this.mainThreadMatcher) {
596
610
  const { Matcher } = await import("../core/matching/matcher.js");
597
611
  const { Estimator } = await import("../core/estimation/estimator.js");
@@ -610,6 +624,7 @@ class Controller {
610
624
  const { keyframeIndex, screenCoords, worldCoords, debugExtra } = this.mainThreadMatcher.matchDetection(
611
625
  this.matchingDataList[matchingIndex],
612
626
  featurePoints,
627
+ expectedScale
613
628
  );
614
629
  matchedDebugExtra = debugExtra;
615
630
 
@@ -49,7 +49,7 @@ onmessage = (msg) => {
49
49
  // New: If the worker received image data, run detector here too
50
50
  let featurePoints = data.featurePoints;
51
51
  if (data.inputData) {
52
- const detectionResult = detector.detect(data.inputData);
52
+ const detectionResult = detector.detect(data.inputData, { octavesToProcess: data.octavesToProcess });
53
53
  featurePoints = detectionResult.featurePoints;
54
54
  }
55
55
 
@@ -59,6 +59,7 @@ onmessage = (msg) => {
59
59
  const { keyframeIndex, screenCoords, worldCoords, debugExtra } = matcher.matchDetection(
60
60
  matchingDataList[matchingIndex],
61
61
  featurePoints,
62
+ data.expectedScale
62
63
  );
63
64
  matchedDebugExtra = debugExtra;
64
65