@srsergio/taptapp-ar 1.0.101 → 1.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/dist/compiler/node-worker.js +1 -197
  2. package/dist/compiler/offline-compiler.js +1 -207
  3. package/dist/core/constants.js +1 -38
  4. package/dist/core/detector/crop-detector.js +1 -88
  5. package/dist/core/detector/detector-lite.js +1 -455
  6. package/dist/core/detector/freak.js +1 -89
  7. package/dist/core/estimation/estimate.js +1 -16
  8. package/dist/core/estimation/estimator.js +1 -30
  9. package/dist/core/estimation/morph-refinement.js +1 -116
  10. package/dist/core/estimation/non-rigid-refine.js +1 -70
  11. package/dist/core/estimation/pnp-solver.js +1 -109
  12. package/dist/core/estimation/refine-estimate.js +1 -311
  13. package/dist/core/estimation/utils.js +1 -67
  14. package/dist/core/features/auto-rotation-feature.js +1 -30
  15. package/dist/core/features/crop-detection-feature.js +1 -26
  16. package/dist/core/features/feature-base.js +1 -1
  17. package/dist/core/features/feature-manager.js +1 -55
  18. package/dist/core/features/one-euro-filter-feature.js +1 -44
  19. package/dist/core/features/temporal-filter-feature.js +1 -57
  20. package/dist/core/image-list.js +1 -54
  21. package/dist/core/input-loader.js +1 -87
  22. package/dist/core/matching/hamming-distance.js +1 -66
  23. package/dist/core/matching/hdc.js +1 -102
  24. package/dist/core/matching/hierarchical-clustering.js +1 -130
  25. package/dist/core/matching/hough.js +1 -170
  26. package/dist/core/matching/matcher.js +1 -66
  27. package/dist/core/matching/matching.js +1 -401
  28. package/dist/core/matching/ransacHomography.js +1 -132
  29. package/dist/core/perception/bio-inspired-engine.js +1 -232
  30. package/dist/core/perception/foveal-attention.js +1 -280
  31. package/dist/core/perception/index.js +1 -17
  32. package/dist/core/perception/predictive-coding.js +1 -278
  33. package/dist/core/perception/saccadic-controller.js +1 -269
  34. package/dist/core/perception/saliency-map.js +1 -254
  35. package/dist/core/perception/scale-orchestrator.js +1 -68
  36. package/dist/core/protocol.js +1 -254
  37. package/dist/core/tracker/extract-utils.js +1 -29
  38. package/dist/core/tracker/extract.js +1 -306
  39. package/dist/core/tracker/tracker.js +1 -352
  40. package/dist/core/utils/cumsum.js +1 -37
  41. package/dist/core/utils/delaunay.js +1 -125
  42. package/dist/core/utils/geometry.js +1 -101
  43. package/dist/core/utils/gpu-compute.js +1 -231
  44. package/dist/core/utils/homography.js +1 -138
  45. package/dist/core/utils/images.js +1 -108
  46. package/dist/core/utils/lsh-binarizer.js +1 -37
  47. package/dist/core/utils/lsh-direct.js +1 -76
  48. package/dist/core/utils/projection.js +1 -51
  49. package/dist/core/utils/randomizer.js +1 -25
  50. package/dist/core/utils/worker-pool.js +1 -89
  51. package/dist/index.js +1 -7
  52. package/dist/libs/one-euro-filter.js +1 -70
  53. package/dist/react/TaptappAR.js +1 -151
  54. package/dist/react/types.js +1 -16
  55. package/dist/react/use-ar.js +1 -118
  56. package/dist/runtime/aframe.js +1 -272
  57. package/dist/runtime/bio-inspired-controller.js +1 -358
  58. package/dist/runtime/controller.js +1 -592
  59. package/dist/runtime/controller.worker.js +1 -93
  60. package/dist/runtime/index.js +1 -5
  61. package/dist/runtime/three.js +1 -304
  62. package/dist/runtime/track.js +1 -381
  63. package/package.json +9 -3
@@ -1,592 +1 @@
1
- import { Tracker } from "../core/tracker/tracker.js";
2
- import { InputLoader } from "../core/input-loader.js";
3
- import { FeatureManager } from "../core/features/feature-manager.js";
4
- import { OneEuroFilterFeature } from "../core/features/one-euro-filter-feature.js";
5
- import { TemporalFilterFeature } from "../core/features/temporal-filter-feature.js";
6
- import { AutoRotationFeature } from "../core/features/auto-rotation-feature.js";
7
- import { DetectorLite } from "../core/detector/detector-lite.js";
8
- import * as protocol from "../core/protocol.js";
9
- import { AR_CONFIG } from "../core/constants.js";
10
- let ControllerWorker;
11
- // Conditional import for worker to avoid crash in non-vite environments
12
- const getControllerWorker = async () => {
13
- if (typeof Worker === 'undefined')
14
- return null;
15
- try {
16
- // @ts-ignore
17
- const workerModule = await import("./controller.worker.js?worker&inline");
18
- return workerModule.default;
19
- }
20
- catch (e) {
21
- return null;
22
- }
23
- };
24
- ControllerWorker = await getControllerWorker();
25
- const DEFAULT_FILTER_CUTOFF = AR_CONFIG.ONE_EURO_FILTER_CUTOFF;
26
- const DEFAULT_FILTER_BETA = AR_CONFIG.ONE_EURO_FILTER_BETA;
27
- const DEFAULT_WARMUP_TOLERANCE = AR_CONFIG.WARMUP_TOLERANCE;
28
- const DEFAULT_MISS_TOLERANCE = AR_CONFIG.MISS_TOLERANCE;
29
- const WORKER_TIMEOUT_MS = 1000; // Prevent worker hangs from killing the loop
30
- let loopIdCounter = 0;
31
- class Controller {
32
- inputWidth;
33
- inputHeight;
34
- maxTrack;
35
- inputLoader;
36
- markerDimensions = null;
37
- onUpdate;
38
- debugMode;
39
- processingVideo = false;
40
- interestedTargetIndex = -1;
41
- trackingStates = [];
42
- worker;
43
- projectionTransform;
44
- projectionMatrix;
45
- tracker = null;
46
- matchingDataList;
47
- workerMatchDone = null;
48
- workerTrackDone = null;
49
- workerFullTrackDone = null;
50
- mainThreadMatcher;
51
- mainThreadEstimator;
52
- featureManager;
53
- fullDetector = null;
54
- constructor({ inputWidth, inputHeight, onUpdate = null, debugMode = false, maxTrack = 1, warmupTolerance = null, missTolerance = null, filterMinCF = null, filterBeta = null, worker = null, }) {
55
- this.inputWidth = inputWidth;
56
- this.inputHeight = inputHeight;
57
- this.maxTrack = maxTrack;
58
- this.featureManager = new FeatureManager();
59
- this.featureManager.addFeature(new OneEuroFilterFeature(filterMinCF === null ? DEFAULT_FILTER_CUTOFF : filterMinCF, filterBeta === null ? DEFAULT_FILTER_BETA : filterBeta));
60
- this.featureManager.addFeature(new TemporalFilterFeature(warmupTolerance === null ? DEFAULT_WARMUP_TOLERANCE : warmupTolerance, missTolerance === null ? DEFAULT_MISS_TOLERANCE : missTolerance));
61
- this.featureManager.addFeature(new AutoRotationFeature());
62
- // User wants "sin recortes", so we don't add CropDetectionFeature
63
- this.inputLoader = new InputLoader(this.inputWidth, this.inputHeight);
64
- this.onUpdate = onUpdate;
65
- this.debugMode = debugMode;
66
- this.worker = worker;
67
- if (this.worker)
68
- this._setupWorkerListener();
69
- // Moonshot: Full frame detector for better sensitivity
70
- this.fullDetector = new DetectorLite(this.inputWidth, this.inputHeight, {
71
- useLSH: AR_CONFIG.USE_LSH,
72
- maxFeaturesPerBucket: AR_CONFIG.MAX_FEATURES_PER_BUCKET
73
- });
74
- this.featureManager.init({
75
- inputWidth: this.inputWidth,
76
- inputHeight: this.inputHeight,
77
- projectionTransform: [], // Will be set below
78
- debugMode: this.debugMode
79
- });
80
- const near = AR_CONFIG.DEFAULT_NEAR;
81
- const far = AR_CONFIG.DEFAULT_FAR;
82
- const fovy = (AR_CONFIG.DEFAULT_FOVY * Math.PI) / 180;
83
- const f = this.inputHeight / 2 / Math.tan(fovy / 2);
84
- this.projectionTransform = [
85
- [f, 0, this.inputWidth / 2],
86
- [0, f, this.inputHeight / 2],
87
- [0, 0, 1],
88
- ];
89
- this.featureManager.init({
90
- inputWidth: this.inputWidth,
91
- inputHeight: this.inputHeight,
92
- projectionTransform: this.projectionTransform,
93
- debugMode: this.debugMode
94
- });
95
- this.projectionMatrix = this._glProjectionMatrix({
96
- projectionTransform: this.projectionTransform,
97
- width: this.inputWidth,
98
- height: this.inputHeight,
99
- near: near,
100
- far: far,
101
- });
102
- }
103
- _setupWorkerListener() {
104
- if (!this.worker)
105
- return;
106
- this.worker.onmessage = (e) => {
107
- if (e.data.type === "matchDone" && this.workerMatchDone !== null) {
108
- this.workerMatchDone(e.data);
109
- }
110
- if (e.data.type === "trackDone" && this.workerFullTrackDone !== null) {
111
- this.workerFullTrackDone(e.data);
112
- }
113
- if (e.data.type === "trackUpdateDone" && this.workerTrackDone !== null) {
114
- this.workerTrackDone(e.data);
115
- }
116
- };
117
- }
118
- _ensureWorker() {
119
- if (this.worker)
120
- return;
121
- if (ControllerWorker && typeof Worker !== 'undefined') {
122
- this.worker = new ControllerWorker();
123
- this._setupWorkerListener();
124
- }
125
- }
126
- async addImageTargets(fileURLs) {
127
- const urls = Array.isArray(fileURLs) ? fileURLs : [fileURLs];
128
- const buffers = await Promise.all(urls.map(async (url) => {
129
- const response = await fetch(url);
130
- return response.arrayBuffer();
131
- }));
132
- return this.addImageTargetsFromBuffers(buffers);
133
- }
134
- addImageTargetsFromBuffers(buffers) {
135
- const allTrackingData = [];
136
- const allMatchingData = [];
137
- const allDimensions = [];
138
- for (const buffer of buffers) {
139
- const result = protocol.decodeTaar(buffer);
140
- const dataList = result.dataList || [];
141
- for (const item of dataList) {
142
- allMatchingData.push(item.matchingData);
143
- allTrackingData.push(item.trackingData);
144
- allDimensions.push([item.targetImage.width, item.targetImage.height]);
145
- }
146
- }
147
- this.tracker = new Tracker(allDimensions, allTrackingData, this.projectionTransform, this.inputWidth, this.inputHeight, this.debugMode);
148
- this._ensureWorker();
149
- if (this.worker) {
150
- this.worker.postMessage({
151
- type: "setup",
152
- inputWidth: this.inputWidth,
153
- inputHeight: this.inputHeight,
154
- projectionTransform: this.projectionTransform,
155
- debugMode: this.debugMode,
156
- matchingDataList: allMatchingData,
157
- trackingDataList: allTrackingData,
158
- markerDimensions: allDimensions
159
- });
160
- }
161
- this.markerDimensions = allDimensions;
162
- this.matchingDataList = allMatchingData;
163
- return { dimensions: allDimensions, matchingDataList: allMatchingData, trackingDataList: allTrackingData };
164
- }
165
- addImageTargetsFromBuffer(buffer) {
166
- return this.addImageTargetsFromBuffers([buffer]);
167
- }
168
- dispose() {
169
- this.stopProcessVideo();
170
- if (this.worker) {
171
- this.worker.postMessage({ type: "dispose" });
172
- this.worker = null;
173
- }
174
- }
175
- dummyRun(input) {
176
- const inputData = this.inputLoader.loadInput(input);
177
- this.fullDetector?.detect(inputData);
178
- this.tracker.dummyRun(inputData);
179
- }
180
- getProjectionMatrix() {
181
- return this.projectionMatrix;
182
- }
183
- getRotatedZ90Matrix(m) {
184
- return [
185
- -m[1], m[0], m[2], m[3],
186
- -m[5], m[4], m[6], m[7],
187
- -m[9], m[8], m[10], m[11],
188
- -m[13], m[12], m[14], m[15],
189
- ];
190
- }
191
- getWorldMatrix(modelViewTransform, targetIndex) {
192
- return this._glModelViewMatrix(modelViewTransform, targetIndex);
193
- }
194
- async _detectAndMatch(inputData, targetIndexes) {
195
- // 🚀 NANITE-STYLE: Estimate scale for filtered matching
196
- // If we were already tracking a target, use its scale as a hint for faster matching
197
- let predictedScale = undefined;
198
- for (const state of this.trackingStates) {
199
- if (state.isTracking && state.currentModelViewTransform) {
200
- const m = state.currentModelViewTransform;
201
- // Vector magnitude of the first column is a good approximation of the scale
202
- predictedScale = Math.sqrt(m[0][0] ** 2 + m[1][0] ** 2 + m[2][0] ** 2);
203
- break;
204
- }
205
- }
206
- const { targetIndex, modelViewTransform, screenCoords, worldCoords, featurePoints } = await this._workerMatch(null, // No feature points, worker will detect from inputData
207
- targetIndexes, inputData, predictedScale);
208
- return { targetIndex, modelViewTransform, screenCoords, worldCoords, featurePoints };
209
- }
210
- async _trackAndUpdate(inputData, lastModelViewTransform, targetIndex) {
211
- const { worldCoords, screenCoords, reliabilities, indices = [], octaveIndex = 0, deformedMesh } = await this._workerTrack(inputData, lastModelViewTransform, targetIndex);
212
- if (!worldCoords || worldCoords.length === 0) {
213
- return { modelViewTransform: null, screenCoords: [], reliabilities: [], stabilities: [], deformedMesh: null };
214
- }
215
- const state = this.trackingStates[targetIndex];
216
- if (!state.pointStabilities)
217
- state.pointStabilities = [];
218
- if (!state.lastScreenCoords)
219
- state.lastScreenCoords = [];
220
- if (!state.pointStabilities[octaveIndex]) {
221
- const numPoints = this.tracker.prebuiltData[targetIndex][octaveIndex].px.length;
222
- state.pointStabilities[octaveIndex] = new Float32Array(numPoints).fill(0);
223
- state.lastScreenCoords[octaveIndex] = new Array(numPoints).fill(null);
224
- }
225
- const stabilities = state.pointStabilities[octaveIndex];
226
- const lastCoords = state.lastScreenCoords[octaveIndex];
227
- // Update stability for ALL points in the current octave
228
- for (let i = 0; i < stabilities.length; i++) {
229
- const isCurrentlyTracked = indices.includes(i);
230
- if (isCurrentlyTracked) {
231
- const idxInResult = indices.indexOf(i);
232
- stabilities[i] = Math.min(1.0, stabilities[i] + 0.4); // Fast attack
233
- lastCoords[i] = screenCoords[idxInResult]; // Update last known position
234
- }
235
- else {
236
- stabilities[i] = Math.max(0.0, stabilities[i] - 0.08); // Slow decay (approx 12 frames/0.2s)
237
- }
238
- }
239
- // Collect points for the UI: both currently tracked AND hibernating
240
- const finalScreenCoords = [];
241
- const finalReliabilities = [];
242
- const finalStabilities = [];
243
- const finalWorldCoords = [];
244
- for (let i = 0; i < stabilities.length; i++) {
245
- if (stabilities[i] > 0) {
246
- const isCurrentlyTracked = indices.includes(i);
247
- finalScreenCoords.push({
248
- x: lastCoords[i].x,
249
- y: lastCoords[i].y,
250
- id: i // Unique index from tracker
251
- });
252
- finalStabilities.push(stabilities[i]);
253
- if (isCurrentlyTracked) {
254
- const idxInResult = indices.indexOf(i);
255
- finalReliabilities.push(reliabilities[idxInResult]);
256
- finalWorldCoords.push(worldCoords[idxInResult]);
257
- }
258
- else {
259
- finalReliabilities.push(0); // Hibernating points have 0 reliability
260
- }
261
- }
262
- }
263
- // 🚀 WARMUP FIX: If we just started tracking (less than 15 frames), we are much more relaxed
264
- const isWarmup = state.trackCount < 15;
265
- const numTracked = finalWorldCoords.length;
266
- const minPoints = isWarmup ? 4 : 5; // Start with 4, then require 5
267
- if (numTracked < minPoints) {
268
- return {
269
- modelViewTransform: null,
270
- screenCoords: finalScreenCoords,
271
- reliabilities: finalReliabilities,
272
- stabilities: finalStabilities
273
- };
274
- }
275
- state.trackCount++;
276
- const modelViewTransform = await this._workerTrackUpdate(lastModelViewTransform, {
277
- worldCoords: finalWorldCoords,
278
- screenCoords: finalWorldCoords.map((_, i) => {
279
- const globalIdx = indices[i];
280
- return lastCoords[globalIdx];
281
- }),
282
- stabilities: finalWorldCoords.map((_, i) => {
283
- const globalIdx = indices[i];
284
- return stabilities[globalIdx];
285
- }),
286
- deformedMesh
287
- });
288
- return {
289
- modelViewTransform,
290
- screenCoords: finalScreenCoords,
291
- reliabilities: finalReliabilities,
292
- stabilities: finalStabilities,
293
- deformedMesh,
294
- octaveIndex // Pass this up for the orchestrator
295
- };
296
- }
297
- processVideo(input) {
298
- if (this.processingVideo)
299
- return;
300
- this.processingVideo = true;
301
- const currentLoopId = ++loopIdCounter; // Added for ghost loop prevention
302
- this.trackingStates = [];
303
- for (let i = 0; i < (this.markerDimensions?.length || 0); i++) {
304
- this.trackingStates.push({
305
- showing: false,
306
- isTracking: false,
307
- currentModelViewTransform: null,
308
- trackCount: 0,
309
- trackMiss: 0,
310
- });
311
- }
312
- const startProcessing = async () => {
313
- while (true) {
314
- if (!this.processingVideo || currentLoopId !== loopIdCounter)
315
- break;
316
- const inputData = this.inputLoader.loadInput(input);
317
- const nTracking = this.trackingStates.reduce((acc, s) => acc + (!!s.isTracking ? 1 : 0), 0);
318
- if (nTracking < this.maxTrack) {
319
- const matchingIndexes = [];
320
- for (let i = 0; i < this.trackingStates.length; i++) {
321
- const trackingState = this.trackingStates[i];
322
- if (trackingState.isTracking === true)
323
- continue;
324
- if (this.interestedTargetIndex !== -1 && this.interestedTargetIndex !== i)
325
- continue;
326
- matchingIndexes.push(i);
327
- }
328
- const { targetIndex: matchedTargetIndex, modelViewTransform, featurePoints } = await this._detectAndMatch(inputData, matchingIndexes);
329
- if (matchedTargetIndex !== -1) {
330
- this.trackingStates[matchedTargetIndex].isTracking = true;
331
- this.trackingStates[matchedTargetIndex].currentModelViewTransform = modelViewTransform;
332
- }
333
- // If we have feature points, we can store them in a special "lastSeenFeatures"
334
- // or just pass them in processDone for general visualization
335
- this.onUpdate && this.onUpdate({ type: "featurePoints", featurePoints });
336
- }
337
- for (let i = 0; i < this.trackingStates.length; i++) {
338
- const trackingState = this.trackingStates[i];
339
- if (trackingState.isTracking) {
340
- const result = await this._trackAndUpdate(inputData, trackingState.currentModelViewTransform, i);
341
- if (result === null || result.modelViewTransform === null) {
342
- trackingState.isTracking = false;
343
- // Keep points for the last update so they can be shown as it "asoma"
344
- trackingState.screenCoords = result?.screenCoords || [];
345
- trackingState.reliabilities = result?.reliabilities || [];
346
- trackingState.stabilities = result?.stabilities || [];
347
- }
348
- else {
349
- trackingState.currentModelViewTransform = result.modelViewTransform;
350
- trackingState.screenCoords = result.screenCoords;
351
- trackingState.reliabilities = result.reliabilities;
352
- trackingState.stabilities = result.stabilities;
353
- trackingState.deformedMesh = result.deformedMesh;
354
- }
355
- }
356
- const wasShowing = trackingState.showing;
357
- trackingState.showing = this.featureManager.shouldShow(i, trackingState.isTracking);
358
- if (wasShowing && !trackingState.showing) {
359
- trackingState.trackingMatrix = null;
360
- this.featureManager.notifyUpdate({ type: "reset", targetIndex: i });
361
- }
362
- // Always notify update if we have points or if visibility changed
363
- if (trackingState.showing || (trackingState.screenCoords && trackingState.screenCoords.length > 0) || (wasShowing && !trackingState.showing)) {
364
- const worldMatrix = trackingState.showing ? this._glModelViewMatrix(trackingState.currentModelViewTransform, i) : null;
365
- let finalMatrix = null;
366
- if (worldMatrix) {
367
- // Calculate confidence score based on point stability
368
- const stabilities = trackingState.stabilities || [];
369
- const avgStability = stabilities.length > 0
370
- ? stabilities.reduce((a, b) => a + b, 0) / stabilities.length
371
- : 0;
372
- const filteredMatrix = this.featureManager.applyWorldMatrixFilters(i, worldMatrix, { stability: avgStability });
373
- trackingState.trackingMatrix = filteredMatrix;
374
- finalMatrix = [...filteredMatrix];
375
- const isInputRotated = input.width === this.inputHeight && input.height === this.inputWidth;
376
- if (isInputRotated) {
377
- const rotationFeature = this.featureManager.getFeature("auto-rotation");
378
- if (rotationFeature) {
379
- finalMatrix = rotationFeature.rotate(finalMatrix);
380
- }
381
- }
382
- }
383
- this.onUpdate && this.onUpdate({
384
- type: "updateMatrix",
385
- targetIndex: i,
386
- worldMatrix: finalMatrix,
387
- modelViewTransform: trackingState.currentModelViewTransform,
388
- screenCoords: trackingState.screenCoords,
389
- reliabilities: trackingState.reliabilities,
390
- stabilities: trackingState.stabilities,
391
- deformedMesh: trackingState.deformedMesh
392
- });
393
- }
394
- }
395
- this.onUpdate && this.onUpdate({ type: "processDone" });
396
- if (typeof requestAnimationFrame !== "undefined") {
397
- await new Promise(requestAnimationFrame);
398
- }
399
- else {
400
- await new Promise(resolve => setTimeout(resolve, 16));
401
- }
402
- }
403
- };
404
- startProcessing();
405
- }
406
- stopProcessVideo() {
407
- this.processingVideo = false;
408
- }
409
- async detect(input) {
410
- const inputData = this.inputLoader.loadInput(input);
411
- const { featurePoints } = this.fullDetector.detect(inputData);
412
- return { featurePoints, debugExtra: {} };
413
- }
414
- async match(featurePoints, targetIndex) {
415
- const { targetIndex: matchedTargetIndex, modelViewTransform, screenCoords, worldCoords, debugExtra } = await this._workerMatch(featurePoints, [
416
- targetIndex,
417
- ]);
418
- return { targetIndex: matchedTargetIndex, modelViewTransform, screenCoords, worldCoords, debugExtra };
419
- }
420
- async track(input, modelViewTransform, targetIndex) {
421
- const inputData = this.inputLoader.loadInput(input);
422
- return this.tracker.track(inputData, modelViewTransform, targetIndex);
423
- }
424
- async trackUpdate(modelViewTransform, trackFeatures) {
425
- if (trackFeatures.worldCoords.length < 4)
426
- return null;
427
- return this._workerTrackUpdate(modelViewTransform, trackFeatures);
428
- }
429
- _workerMatch(featurePoints, targetIndexes, inputData = null, expectedScale) {
430
- return new Promise((resolve) => {
431
- if (!this.worker) {
432
- // If no feature points but we have input data, detect first
433
- let fpPromise;
434
- if (!featurePoints && inputData) {
435
- fpPromise = Promise.resolve(this.fullDetector.detect(inputData).featurePoints);
436
- }
437
- else {
438
- fpPromise = Promise.resolve(featurePoints);
439
- }
440
- fpPromise.then(fp => {
441
- this._matchOnMainThread(fp, targetIndexes, expectedScale).then(resolve);
442
- }).catch(() => resolve({ targetIndex: -1 }));
443
- return;
444
- }
445
- const timeout = setTimeout(() => {
446
- this.workerMatchDone = null;
447
- resolve({ targetIndex: -1 });
448
- }, WORKER_TIMEOUT_MS);
449
- this.workerMatchDone = (data) => {
450
- clearTimeout(timeout);
451
- this.workerMatchDone = null;
452
- resolve({
453
- targetIndex: data.targetIndex,
454
- modelViewTransform: data.modelViewTransform,
455
- screenCoords: data.screenCoords,
456
- worldCoords: data.worldCoords,
457
- featurePoints: data.featurePoints,
458
- debugExtra: data.debugExtra,
459
- });
460
- };
461
- if (inputData) {
462
- this.worker.postMessage({ type: "match", inputData, targetIndexes, expectedScale });
463
- }
464
- else {
465
- this.worker.postMessage({ type: "match", featurePoints: featurePoints, targetIndexes, expectedScale });
466
- }
467
- });
468
- }
469
- _workerTrack(inputData, lastModelViewTransform, targetIndex) {
470
- return new Promise((resolve) => {
471
- if (!this.worker) {
472
- resolve(this.tracker.track(inputData, lastModelViewTransform, targetIndex));
473
- return;
474
- }
475
- const timeout = setTimeout(() => {
476
- this.workerFullTrackDone = null;
477
- resolve({ worldCoords: [], screenCoords: [], reliabilities: [] });
478
- }, WORKER_TIMEOUT_MS);
479
- this.workerFullTrackDone = (data) => {
480
- clearTimeout(timeout);
481
- this.workerFullTrackDone = null;
482
- resolve(data);
483
- };
484
- this.worker.postMessage({
485
- type: "track",
486
- inputData,
487
- lastModelViewTransform,
488
- targetIndex
489
- });
490
- });
491
- }
492
- async _matchOnMainThread(featurePoints, targetIndexes, expectedScale) {
493
- if (!this.mainThreadMatcher) {
494
- const { Matcher } = await import("../core/matching/matcher.js");
495
- const { Estimator } = await import("../core/estimation/estimator.js");
496
- this.mainThreadMatcher = new Matcher(this.inputWidth, this.inputHeight, this.debugMode);
497
- this.mainThreadEstimator = new Estimator(this.projectionTransform);
498
- }
499
- let matchedTargetIndex = -1;
500
- let matchedModelViewTransform = null;
501
- let matchedScreenCoords = null;
502
- let matchedWorldCoords = null;
503
- let matchedDebugExtra = null;
504
- for (let i = 0; i < targetIndexes.length; i++) {
505
- const matchingIndex = targetIndexes[i];
506
- const { keyframeIndex, screenCoords, worldCoords, debugExtra } = this.mainThreadMatcher.matchDetection(this.matchingDataList[matchingIndex], featurePoints, expectedScale);
507
- matchedDebugExtra = debugExtra;
508
- if (keyframeIndex !== -1) {
509
- const modelViewTransform = this.mainThreadEstimator.estimate({ screenCoords, worldCoords });
510
- if (modelViewTransform) {
511
- matchedTargetIndex = matchingIndex;
512
- matchedModelViewTransform = modelViewTransform;
513
- matchedScreenCoords = screenCoords;
514
- matchedWorldCoords = worldCoords;
515
- }
516
- break;
517
- }
518
- }
519
- return {
520
- targetIndex: matchedTargetIndex,
521
- modelViewTransform: matchedModelViewTransform,
522
- screenCoords: matchedScreenCoords,
523
- worldCoords: matchedWorldCoords,
524
- debugExtra: matchedDebugExtra,
525
- };
526
- }
527
- _workerTrackUpdate(modelViewTransform, trackingFeatures) {
528
- return new Promise((resolve) => {
529
- if (!this.worker) {
530
- this._trackUpdateOnMainThread(modelViewTransform, trackingFeatures).then(resolve).catch(() => resolve(null));
531
- return;
532
- }
533
- const timeout = setTimeout(() => {
534
- this.workerTrackDone = null;
535
- resolve(null);
536
- }, WORKER_TIMEOUT_MS);
537
- this.workerTrackDone = (data) => {
538
- clearTimeout(timeout);
539
- this.workerTrackDone = null;
540
- resolve(data.modelViewTransform);
541
- };
542
- const { worldCoords, screenCoords, stabilities } = trackingFeatures;
543
- this.worker.postMessage({
544
- type: "trackUpdate",
545
- modelViewTransform,
546
- worldCoords,
547
- screenCoords,
548
- stabilities
549
- });
550
- });
551
- }
552
- async _trackUpdateOnMainThread(modelViewTransform, trackingFeatures) {
553
- if (!this.mainThreadEstimator) {
554
- const { Estimator } = await import("../core/estimation/estimator.js");
555
- this.mainThreadEstimator = new Estimator(this.projectionTransform);
556
- }
557
- const { worldCoords, screenCoords, stabilities } = trackingFeatures;
558
- return this.mainThreadEstimator.refineEstimate({
559
- initialModelViewTransform: modelViewTransform,
560
- worldCoords,
561
- screenCoords,
562
- stabilities
563
- });
564
- }
565
- _glModelViewMatrix(modelViewTransform, targetIndex) {
566
- // Transformation to map Computer Vision coordinates (Y-down, Z-forward)
567
- // to OpenGL coordinates (Y-up, Z-backward).
568
- // We negate the 2nd and 3rd rows of the pose matrix.
569
- return [
570
- modelViewTransform[0][0], -modelViewTransform[1][0], -modelViewTransform[2][0], 0,
571
- modelViewTransform[0][1], -modelViewTransform[1][1], -modelViewTransform[2][1], 0,
572
- modelViewTransform[0][2], -modelViewTransform[1][2], -modelViewTransform[2][2], 0,
573
- modelViewTransform[0][3], -modelViewTransform[1][3], -modelViewTransform[2][3], 1,
574
- ];
575
- }
576
- _glProjectionMatrix({ projectionTransform, width, height, near, far }) {
577
- const proj = [
578
- [(2 * projectionTransform[0][0]) / width, 0, -((2 * projectionTransform[0][2]) / width - 1), 0],
579
- [0, (2 * projectionTransform[1][1]) / height, -((2 * projectionTransform[1][2]) / height - 1), 0],
580
- [0, 0, -(far + near) / (far - near), (-2 * far * near) / (far - near)],
581
- [0, 0, -1, 0],
582
- ];
583
- const projMatrix = [];
584
- for (let i = 0; i < 4; i++) {
585
- for (let j = 0; j < 4; j++) {
586
- projMatrix.push(proj[j][i]);
587
- }
588
- }
589
- return projMatrix;
590
- }
591
- }
592
- export { Controller };
1
+ import{Tracker as t}from"../core/tracker/tracker.js";import{InputLoader as e}from"../core/input-loader.js";import{FeatureManager as r}from"../core/features/feature-manager.js";import{OneEuroFilterFeature as i}from"../core/features/one-euro-filter-feature.js";import{TemporalFilterFeature as s}from"../core/features/temporal-filter-feature.js";import{AutoRotationFeature as o}from"../core/features/auto-rotation-feature.js";import{DetectorLite as a}from"../core/detector/detector-lite.js";import*as n from"../core/protocol.js";import{AR_CONFIG as d}from"../core/constants.js";let h;h=await(async()=>{if("undefined"==typeof Worker)return null;try{return(await import("./controller.worker.js?worker&inline")).default}catch(t){return null}})();const l=d.ONE_EURO_FILTER_CUTOFF,c=d.ONE_EURO_FILTER_BETA,u=d.WARMUP_TOLERANCE,m=d.MISS_TOLERANCE,p=1e3;let f=0;class g{inputWidth;inputHeight;maxTrack;inputLoader;markerDimensions=null;onUpdate;debugMode;processingVideo=!1;interestedTargetIndex=-1;trackingStates=[];worker;projectionTransform;projectionMatrix;tracker=null;matchingDataList;workerMatchDone=null;workerTrackDone=null;workerFullTrackDone=null;mainThreadMatcher;mainThreadEstimator;featureManager;fullDetector=null;constructor({inputWidth:t,inputHeight:n,onUpdate:h=null,debugMode:p=!1,maxTrack:f=1,warmupTolerance:g=null,missTolerance:w=null,filterMinCF:k=null,filterBeta:T=null,worker:M=null}){this.inputWidth=t,this.inputHeight=n,this.maxTrack=f,this.featureManager=new r,this.featureManager.addFeature(new i(null===k?l:k,null===T?c:T)),this.featureManager.addFeature(new s(null===g?u:g,null===w?m:w)),this.featureManager.addFeature(new o),this.inputLoader=new e(this.inputWidth,this.inputHeight),this.onUpdate=h,this.debugMode=p,this.worker=M,this.worker&&this._setupWorkerListener(),this.fullDetector=new a(this.inputWidth,this.inputHeight,{useLSH:d.USE_LSH,maxFeaturesPerBucket:d.MAX_FEATURES_PER_BUCKET}),this.featureManager.init({inputWidth:this.inputWidth,inputHeight:this.inputHeight,projectionTransform:[],debugMode:this.debugMode});const x=d.DEFAULT_NEAR,C=d.DEFAULT_FAR,b=d.DEFAULT_FOVY*Math.PI/180,D=this.inputHeight/2/Math.tan(b/2);this.projectionTransform=[[D,0,this.inputWidth/2],[0,D,this.inputHeight/2],[0,0,1]],this.featureManager.init({inputWidth:this.inputWidth,inputHeight:this.inputHeight,projectionTransform:this.projectionTransform,debugMode:this.debugMode}),this.projectionMatrix=this._glProjectionMatrix({projectionTransform:this.projectionTransform,width:this.inputWidth,height:this.inputHeight,near:x,far:C})}_setupWorkerListener(){this.worker&&(this.worker.onmessage=t=>{"matchDone"===t.data.type&&null!==this.workerMatchDone&&this.workerMatchDone(t.data),"trackDone"===t.data.type&&null!==this.workerFullTrackDone&&this.workerFullTrackDone(t.data),"trackUpdateDone"===t.data.type&&null!==this.workerTrackDone&&this.workerTrackDone(t.data)})}_ensureWorker(){this.worker||h&&"undefined"!=typeof Worker&&(this.worker=new h,this._setupWorkerListener())}async addImageTargets(t){const e=Array.isArray(t)?t:[t],r=await Promise.all(e.map(async t=>(await fetch(t)).arrayBuffer()));return this.addImageTargetsFromBuffers(r)}addImageTargetsFromBuffers(e){const r=[],i=[],s=[];for(const t of e){const e=n.decodeTaar(t).dataList||[];for(const t of e)i.push(t.matchingData),r.push(t.trackingData),s.push([t.targetImage.width,t.targetImage.height])}return this.tracker=new t(s,r,this.projectionTransform,this.inputWidth,this.inputHeight,this.debugMode),this._ensureWorker(),this.worker&&this.worker.postMessage({type:"setup",inputWidth:this.inputWidth,inputHeight:this.inputHeight,projectionTransform:this.projectionTransform,debugMode:this.debugMode,matchingDataList:i,trackingDataList:r,markerDimensions:s}),this.markerDimensions=s,this.matchingDataList=i,{dimensions:s,matchingDataList:i,trackingDataList:r}}addImageTargetsFromBuffer(t){return this.addImageTargetsFromBuffers([t])}dispose(){this.stopProcessVideo(),this.worker&&(this.worker.postMessage({type:"dispose"}),this.worker=null)}dummyRun(t){const e=this.inputLoader.loadInput(t);this.fullDetector?.detect(e),this.tracker.dummyRun(e)}getProjectionMatrix(){return this.projectionMatrix}getRotatedZ90Matrix(t){return[-t[1],t[0],t[2],t[3],-t[5],t[4],t[6],t[7],-t[9],t[8],t[10],t[11],-t[13],t[12],t[14],t[15]]}getWorldMatrix(t,e){return this._glModelViewMatrix(t,e)}async _detectAndMatch(t,e){let r;for(const t of this.trackingStates)if(t.isTracking&&t.currentModelViewTransform){const e=t.currentModelViewTransform;r=Math.sqrt(e[0][0]**2+e[1][0]**2+e[2][0]**2);break}const{targetIndex:i,modelViewTransform:s,screenCoords:o,worldCoords:a,featurePoints:n}=await this._workerMatch(null,e,t,r);return{targetIndex:i,modelViewTransform:s,screenCoords:o,worldCoords:a,featurePoints:n}}async _trackAndUpdate(t,e,r){const{worldCoords:i,screenCoords:s,reliabilities:o,indices:a=[],octaveIndex:n=0,deformedMesh:d}=await this._workerTrack(t,e,r);if(!i||0===i.length)return{modelViewTransform:null,screenCoords:[],reliabilities:[],stabilities:[],deformedMesh:null};const h=this.trackingStates[r];if(h.pointStabilities||(h.pointStabilities=[]),h.lastScreenCoords||(h.lastScreenCoords=[]),!h.pointStabilities[n]){const t=this.tracker.prebuiltData[r][n].px.length;h.pointStabilities[n]=new Float32Array(t).fill(0),h.lastScreenCoords[n]=new Array(t).fill(null)}const l=h.pointStabilities[n],c=h.lastScreenCoords[n];for(let t=0;t<l.length;t++)if(a.includes(t)){const e=a.indexOf(t);l[t]=Math.min(1,l[t]+.4),c[t]=s[e]}else l[t]=Math.max(0,l[t]-.08);const u=[],m=[],p=[],f=[];for(let t=0;t<l.length;t++)if(l[t]>0){const e=a.includes(t);if(u.push({x:c[t].x,y:c[t].y,id:t}),p.push(l[t]),e){const e=a.indexOf(t);m.push(o[e]),f.push(i[e])}else m.push(0)}const g=h.trackCount<15;return f.length<(g?4:5)?{modelViewTransform:null,screenCoords:u,reliabilities:m,stabilities:p}:(h.trackCount++,{modelViewTransform:await this._workerTrackUpdate(e,{worldCoords:f,screenCoords:f.map((t,e)=>{const r=a[e];return c[r]}),stabilities:f.map((t,e)=>{const r=a[e];return l[r]}),deformedMesh:d}),screenCoords:u,reliabilities:m,stabilities:p,deformedMesh:d,octaveIndex:n})}processVideo(t){if(this.processingVideo)return;this.processingVideo=!0;const e=++f;this.trackingStates=[];for(let t=0;t<(this.markerDimensions?.length||0);t++)this.trackingStates.push({showing:!1,isTracking:!1,currentModelViewTransform:null,trackCount:0,trackMiss:0});(async()=>{for(;this.processingVideo&&e===f;){const e=this.inputLoader.loadInput(t);if(this.trackingStates.reduce((t,e)=>t+(e.isTracking?1:0),0)<this.maxTrack){const t=[];for(let e=0;e<this.trackingStates.length;e++)!0!==this.trackingStates[e].isTracking&&(-1!==this.interestedTargetIndex&&this.interestedTargetIndex!==e||t.push(e));const{targetIndex:r,modelViewTransform:i,featurePoints:s}=await this._detectAndMatch(e,t);-1!==r&&(this.trackingStates[r].isTracking=!0,this.trackingStates[r].currentModelViewTransform=i),this.onUpdate&&this.onUpdate({type:"featurePoints",featurePoints:s})}for(let r=0;r<this.trackingStates.length;r++){const i=this.trackingStates[r];if(i.isTracking){const t=await this._trackAndUpdate(e,i.currentModelViewTransform,r);null===t||null===t.modelViewTransform?(i.isTracking=!1,i.screenCoords=t?.screenCoords||[],i.reliabilities=t?.reliabilities||[],i.stabilities=t?.stabilities||[]):(i.currentModelViewTransform=t.modelViewTransform,i.screenCoords=t.screenCoords,i.reliabilities=t.reliabilities,i.stabilities=t.stabilities,i.deformedMesh=t.deformedMesh)}const s=i.showing;if(i.showing=this.featureManager.shouldShow(r,i.isTracking),s&&!i.showing&&(i.trackingMatrix=null,this.featureManager.notifyUpdate({type:"reset",targetIndex:r})),i.showing||i.screenCoords&&i.screenCoords.length>0||s&&!i.showing){const e=i.showing?this._glModelViewMatrix(i.currentModelViewTransform,r):null;let s=null;if(e){const o=i.stabilities||[],a=o.length>0?o.reduce((t,e)=>t+e,0)/o.length:0,n=this.featureManager.applyWorldMatrixFilters(r,e,{stability:a});if(i.trackingMatrix=n,s=[...n],t.width===this.inputHeight&&t.height===this.inputWidth){const t=this.featureManager.getFeature("auto-rotation");t&&(s=t.rotate(s))}}this.onUpdate&&this.onUpdate({type:"updateMatrix",targetIndex:r,worldMatrix:s,modelViewTransform:i.currentModelViewTransform,screenCoords:i.screenCoords,reliabilities:i.reliabilities,stabilities:i.stabilities,deformedMesh:i.deformedMesh})}}this.onUpdate&&this.onUpdate({type:"processDone"}),"undefined"!=typeof requestAnimationFrame?await new Promise(requestAnimationFrame):await new Promise(t=>setTimeout(t,16))}})()}stopProcessVideo(){this.processingVideo=!1}async detect(t){const e=this.inputLoader.loadInput(t),{featurePoints:r}=this.fullDetector.detect(e);return{featurePoints:r,debugExtra:{}}}async match(t,e){const{targetIndex:r,modelViewTransform:i,screenCoords:s,worldCoords:o,debugExtra:a}=await this._workerMatch(t,[e]);return{targetIndex:r,modelViewTransform:i,screenCoords:s,worldCoords:o,debugExtra:a}}async track(t,e,r){const i=this.inputLoader.loadInput(t);return this.tracker.track(i,e,r)}async trackUpdate(t,e){return e.worldCoords.length<4?null:this._workerTrackUpdate(t,e)}_workerMatch(t,e,r=null,i){return new Promise(s=>{if(!this.worker){let o;return o=!t&&r?Promise.resolve(this.fullDetector.detect(r).featurePoints):Promise.resolve(t),void o.then(t=>{this._matchOnMainThread(t,e,i).then(s)}).catch(()=>s({targetIndex:-1}))}const o=setTimeout(()=>{this.workerMatchDone=null,s({targetIndex:-1})},p);this.workerMatchDone=t=>{clearTimeout(o),this.workerMatchDone=null,s({targetIndex:t.targetIndex,modelViewTransform:t.modelViewTransform,screenCoords:t.screenCoords,worldCoords:t.worldCoords,featurePoints:t.featurePoints,debugExtra:t.debugExtra})},r?this.worker.postMessage({type:"match",inputData:r,targetIndexes:e,expectedScale:i}):this.worker.postMessage({type:"match",featurePoints:t,targetIndexes:e,expectedScale:i})})}_workerTrack(t,e,r){return new Promise(i=>{if(!this.worker)return void i(this.tracker.track(t,e,r));const s=setTimeout(()=>{this.workerFullTrackDone=null,i({worldCoords:[],screenCoords:[],reliabilities:[]})},p);this.workerFullTrackDone=t=>{clearTimeout(s),this.workerFullTrackDone=null,i(t)},this.worker.postMessage({type:"track",inputData:t,lastModelViewTransform:e,targetIndex:r})})}async _matchOnMainThread(t,e,r){if(!this.mainThreadMatcher){const{Matcher:t}=await import("../core/matching/matcher.js"),{Estimator:e}=await import("../core/estimation/estimator.js");this.mainThreadMatcher=new t(this.inputWidth,this.inputHeight,this.debugMode),this.mainThreadEstimator=new e(this.projectionTransform)}let i=-1,s=null,o=null,a=null,n=null;for(let d=0;d<e.length;d++){const h=e[d],{keyframeIndex:l,screenCoords:c,worldCoords:u,debugExtra:m}=this.mainThreadMatcher.matchDetection(this.matchingDataList[h],t,r);if(n=m,-1!==l){const t=this.mainThreadEstimator.estimate({screenCoords:c,worldCoords:u});t&&(i=h,s=t,o=c,a=u);break}}return{targetIndex:i,modelViewTransform:s,screenCoords:o,worldCoords:a,debugExtra:n}}_workerTrackUpdate(t,e){return new Promise(r=>{if(!this.worker)return void this._trackUpdateOnMainThread(t,e).then(r).catch(()=>r(null));const i=setTimeout(()=>{this.workerTrackDone=null,r(null)},p);this.workerTrackDone=t=>{clearTimeout(i),this.workerTrackDone=null,r(t.modelViewTransform)};const{worldCoords:s,screenCoords:o,stabilities:a}=e;this.worker.postMessage({type:"trackUpdate",modelViewTransform:t,worldCoords:s,screenCoords:o,stabilities:a})})}async _trackUpdateOnMainThread(t,e){if(!this.mainThreadEstimator){const{Estimator:t}=await import("../core/estimation/estimator.js");this.mainThreadEstimator=new t(this.projectionTransform)}const{worldCoords:r,screenCoords:i,stabilities:s}=e;return this.mainThreadEstimator.refineEstimate({initialModelViewTransform:t,worldCoords:r,screenCoords:i,stabilities:s})}_glModelViewMatrix(t,e){return[t[0][0],-t[1][0],-t[2][0],0,t[0][1],-t[1][1],-t[2][1],0,t[0][2],-t[1][2],-t[2][2],0,t[0][3],-t[1][3],-t[2][3],1]}_glProjectionMatrix({projectionTransform:t,width:e,height:r,near:i,far:s}){const o=[[2*t[0][0]/e,0,-(2*t[0][2]/e-1),0],[0,2*t[1][1]/r,-(2*t[1][2]/r-1),0],[0,0,-(s+i)/(s-i),-2*s*i/(s-i)],[0,0,-1,0]],a=[];for(let t=0;t<4;t++)for(let e=0;e<4;e++)a.push(o[e][t]);return a}}export{g as Controller};