@srsergio/taptapp-ar 1.0.12 β 1.0.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +57 -0
- package/dist/compiler/controller.d.ts +35 -2
- package/dist/compiler/controller.js +98 -24
- package/dist/compiler/simple-ar.d.ts +60 -0
- package/dist/compiler/simple-ar.js +173 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +2 -0
- package/package.json +15 -1
- package/src/compiler/controller.js +120 -26
- package/src/compiler/simple-ar.js +203 -0
- package/src/index.ts +2 -0
package/README.md
CHANGED
|
@@ -134,7 +134,11 @@ const controller = new Controller({
|
|
|
134
134
|
}
|
|
135
135
|
});
|
|
136
136
|
|
|
137
|
+
// Single target
|
|
137
138
|
await controller.addImageTargets('./targets.mind');
|
|
139
|
+
|
|
140
|
+
// OR multiple targets from different .mind files
|
|
141
|
+
await controller.addImageTargets(['./target1.mind', './target2.mind', './target3.mind']);
|
|
138
142
|
controller.processVideo(videoElement); // Starts the internal RAF loop
|
|
139
143
|
```
|
|
140
144
|
|
|
@@ -156,6 +160,59 @@ if (targetIndex !== -1) {
|
|
|
156
160
|
}
|
|
157
161
|
```
|
|
158
162
|
|
|
163
|
+
### 4. Vanilla JS (No Framework) π¦
|
|
164
|
+
The **simplest way** to use ARβno Three.js, no A-Frame. Just overlay an image on the tracked target.
|
|
165
|
+
|
|
166
|
+
```javascript
|
|
167
|
+
import { SimpleAR } from '@srsergio/taptapp-ar';
|
|
168
|
+
|
|
169
|
+
const ar = new SimpleAR({
|
|
170
|
+
container: document.getElementById('ar-container'),
|
|
171
|
+
targetSrc: './my-target.mind', // Single URL or array: ['./a.mind', './b.mind']
|
|
172
|
+
overlay: document.getElementById('my-overlay'),
|
|
173
|
+
onFound: ({ targetIndex }) => console.log(`Target ${targetIndex} detected! π―`),
|
|
174
|
+
onLost: ({ targetIndex }) => console.log(`Target ${targetIndex} lost π`)
|
|
175
|
+
});
|
|
176
|
+
|
|
177
|
+
await ar.start();
|
|
178
|
+
|
|
179
|
+
// When done:
|
|
180
|
+
ar.stop();
|
|
181
|
+
```
|
|
182
|
+
|
|
183
|
+
#### π Minimal HTML
|
|
184
|
+
```html
|
|
185
|
+
<div id="ar-container" style="width: 100vw; height: 100vh;">
|
|
186
|
+
<img id="my-overlay" src="./overlay.png"
|
|
187
|
+
style="opacity: 0; z-index: 1; width: 200px; transition: opacity 0.3s;" />
|
|
188
|
+
</div>
|
|
189
|
+
|
|
190
|
+
<script type="module">
|
|
191
|
+
import { SimpleAR } from '@srsergio/taptapp-ar';
|
|
192
|
+
|
|
193
|
+
const ar = new SimpleAR({
|
|
194
|
+
container: document.getElementById('ar-container'),
|
|
195
|
+
targetSrc: './targets.mind',
|
|
196
|
+
overlay: document.getElementById('my-overlay'),
|
|
197
|
+
});
|
|
198
|
+
|
|
199
|
+
ar.start();
|
|
200
|
+
</script>
|
|
201
|
+
```
|
|
202
|
+
|
|
203
|
+
#### βοΈ SimpleAR Options
|
|
204
|
+
| Option | Required | Description |
|
|
205
|
+
| :--- | :--- | :--- |
|
|
206
|
+
| `container` | β
| DOM element where video + overlay render |
|
|
207
|
+
| `targetSrc` | β
| URL to your `.mind` file |
|
|
208
|
+
| `overlay` | β
| DOM element to position on the target |
|
|
209
|
+
| `onFound` | β | Callback when target is detected |
|
|
210
|
+
| `onLost` | β | Callback when target is lost |
|
|
211
|
+
| `onUpdate` | β | Called each frame with `{ targetIndex, worldMatrix }` |
|
|
212
|
+
| `cameraConfig` | β | Camera constraints (default: `{ facingMode: 'environment', width: 1280, height: 720 }`) |
|
|
213
|
+
|
|
214
|
+
---
|
|
215
|
+
|
|
159
216
|
#### π οΈ Life-cycle Management
|
|
160
217
|
Properly management is crucial to avoid memory leaks:
|
|
161
218
|
|
|
@@ -31,13 +31,36 @@ export class Controller {
|
|
|
31
31
|
projectionMatrix: number[];
|
|
32
32
|
_setupWorkerListener(): void;
|
|
33
33
|
_ensureWorker(): void;
|
|
34
|
-
|
|
35
|
-
|
|
34
|
+
/**
|
|
35
|
+
* Load image targets from one or multiple .mind files
|
|
36
|
+
* @param {string|string[]} fileURLs - Single URL or array of URLs to .mind files
|
|
37
|
+
* @returns {Promise<{dimensions, matchingDataList, trackingDataList}>}
|
|
38
|
+
*/
|
|
39
|
+
addImageTargets(fileURLs: string | string[]): Promise<{
|
|
40
|
+
dimensions: any;
|
|
41
|
+
matchingDataList: any;
|
|
42
|
+
trackingDataList: any;
|
|
43
|
+
}>;
|
|
44
|
+
/**
|
|
45
|
+
* Load image targets from multiple ArrayBuffers
|
|
46
|
+
* @param {ArrayBuffer[]} buffers - Array of .mind file buffers
|
|
47
|
+
*/
|
|
48
|
+
addImageTargetsFromBuffers(buffers: ArrayBuffer[]): {
|
|
36
49
|
dimensions: any[][];
|
|
37
50
|
matchingDataList: any[];
|
|
38
51
|
trackingDataList: any[];
|
|
39
52
|
};
|
|
40
53
|
tracker: Tracker | undefined;
|
|
54
|
+
matchingDataList: any[] | undefined;
|
|
55
|
+
/**
|
|
56
|
+
* Load image targets from a single ArrayBuffer (backward compatible)
|
|
57
|
+
* @param {ArrayBuffer} buffer - Single .mind file buffer
|
|
58
|
+
*/
|
|
59
|
+
addImageTargetsFromBuffer(buffer: ArrayBuffer): {
|
|
60
|
+
dimensions: any[][];
|
|
61
|
+
matchingDataList: any[];
|
|
62
|
+
trackingDataList: any[];
|
|
63
|
+
};
|
|
41
64
|
dispose(): void;
|
|
42
65
|
dummyRun(input: any): void;
|
|
43
66
|
getProjectionMatrix(): number[];
|
|
@@ -78,8 +101,18 @@ export class Controller {
|
|
|
78
101
|
trackUpdate(modelViewTransform: any, trackFeatures: any): Promise<any>;
|
|
79
102
|
_workerMatch(featurePoints: any, targetIndexes: any): Promise<any>;
|
|
80
103
|
workerMatchDone: ((data: any) => void) | undefined;
|
|
104
|
+
_matchOnMainThread(featurePoints: any, targetIndexes: any): Promise<{
|
|
105
|
+
targetIndex: number;
|
|
106
|
+
modelViewTransform: number[][] | null;
|
|
107
|
+
debugExtra: {
|
|
108
|
+
frames: never[];
|
|
109
|
+
} | null;
|
|
110
|
+
}>;
|
|
111
|
+
mainThreadMatcher: import("./matching/matcher.js").Matcher | undefined;
|
|
112
|
+
mainThreadEstimator: import("./estimation/estimator.js").Estimator | undefined;
|
|
81
113
|
_workerTrackUpdate(modelViewTransform: any, trackingFeatures: any): Promise<any>;
|
|
82
114
|
workerTrackDone: ((data: any) => void) | undefined;
|
|
115
|
+
_trackUpdateOnMainThread(modelViewTransform: any, trackingFeatures: any): Promise<never[][] | null>;
|
|
83
116
|
_glModelViewMatrix(modelViewTransform: any, targetIndex: any): any[];
|
|
84
117
|
_glProjectionMatrix({ projectionTransform, width, height, near, far }: {
|
|
85
118
|
projectionTransform: any;
|
|
@@ -75,27 +75,39 @@ class Controller {
|
|
|
75
75
|
this._setupWorkerListener();
|
|
76
76
|
}
|
|
77
77
|
}
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
78
|
+
/**
|
|
79
|
+
* Load image targets from one or multiple .mind files
|
|
80
|
+
* @param {string|string[]} fileURLs - Single URL or array of URLs to .mind files
|
|
81
|
+
* @returns {Promise<{dimensions, matchingDataList, trackingDataList}>}
|
|
82
|
+
*/
|
|
83
|
+
async addImageTargets(fileURLs) {
|
|
84
|
+
const urls = Array.isArray(fileURLs) ? fileURLs : [fileURLs];
|
|
85
|
+
// Fetch all .mind files in parallel
|
|
86
|
+
const buffers = await Promise.all(urls.map(async (url) => {
|
|
87
|
+
const response = await fetch(url);
|
|
88
|
+
return response.arrayBuffer();
|
|
89
|
+
}));
|
|
90
|
+
// Combine all buffers into a single target list
|
|
91
|
+
return this.addImageTargetsFromBuffers(buffers);
|
|
85
92
|
}
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
const
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
93
|
+
/**
|
|
94
|
+
* Load image targets from multiple ArrayBuffers
|
|
95
|
+
* @param {ArrayBuffer[]} buffers - Array of .mind file buffers
|
|
96
|
+
*/
|
|
97
|
+
addImageTargetsFromBuffers(buffers) {
|
|
98
|
+
const allTrackingData = [];
|
|
99
|
+
const allMatchingData = [];
|
|
100
|
+
const allDimensions = [];
|
|
101
|
+
for (const buffer of buffers) {
|
|
102
|
+
const compiler = new Compiler();
|
|
103
|
+
const dataList = compiler.importData(buffer);
|
|
104
|
+
for (const item of dataList) {
|
|
105
|
+
allMatchingData.push(item.matchingData);
|
|
106
|
+
allTrackingData.push(item.trackingData);
|
|
107
|
+
allDimensions.push([item.targetImage.width, item.targetImage.height]);
|
|
108
|
+
}
|
|
97
109
|
}
|
|
98
|
-
this.tracker = new Tracker(
|
|
110
|
+
this.tracker = new Tracker(allDimensions, allTrackingData, this.projectionTransform, this.inputWidth, this.inputHeight, this.debugMode);
|
|
99
111
|
this._ensureWorker();
|
|
100
112
|
if (this.worker) {
|
|
101
113
|
this.worker.postMessage({
|
|
@@ -104,11 +116,19 @@ class Controller {
|
|
|
104
116
|
inputHeight: this.inputHeight,
|
|
105
117
|
projectionTransform: this.projectionTransform,
|
|
106
118
|
debugMode: this.debugMode,
|
|
107
|
-
matchingDataList,
|
|
119
|
+
matchingDataList: allMatchingData,
|
|
108
120
|
});
|
|
109
121
|
}
|
|
110
|
-
this.markerDimensions =
|
|
111
|
-
|
|
122
|
+
this.markerDimensions = allDimensions;
|
|
123
|
+
this.matchingDataList = allMatchingData; // Store for main-thread fallback
|
|
124
|
+
return { dimensions: allDimensions, matchingDataList: allMatchingData, trackingDataList: allTrackingData };
|
|
125
|
+
}
|
|
126
|
+
/**
|
|
127
|
+
* Load image targets from a single ArrayBuffer (backward compatible)
|
|
128
|
+
* @param {ArrayBuffer} buffer - Single .mind file buffer
|
|
129
|
+
*/
|
|
130
|
+
addImageTargetsFromBuffer(buffer) {
|
|
131
|
+
return this.addImageTargetsFromBuffers([buffer]);
|
|
112
132
|
}
|
|
113
133
|
dispose() {
|
|
114
134
|
this.stopProcessVideo();
|
|
@@ -305,6 +325,11 @@ class Controller {
|
|
|
305
325
|
}
|
|
306
326
|
_workerMatch(featurePoints, targetIndexes) {
|
|
307
327
|
return new Promise((resolve) => {
|
|
328
|
+
// If no worker available, process on main thread
|
|
329
|
+
if (!this.worker) {
|
|
330
|
+
this._matchOnMainThread(featurePoints, targetIndexes).then(resolve);
|
|
331
|
+
return;
|
|
332
|
+
}
|
|
308
333
|
this.workerMatchDone = (data) => {
|
|
309
334
|
resolve({
|
|
310
335
|
targetIndex: data.targetIndex,
|
|
@@ -312,16 +337,51 @@ class Controller {
|
|
|
312
337
|
debugExtra: data.debugExtra,
|
|
313
338
|
});
|
|
314
339
|
};
|
|
315
|
-
this.worker
|
|
340
|
+
this.worker.postMessage({ type: "match", featurePoints: featurePoints, targetIndexes });
|
|
316
341
|
});
|
|
317
342
|
}
|
|
343
|
+
async _matchOnMainThread(featurePoints, targetIndexes) {
|
|
344
|
+
// Lazy initialize Matcher and Estimator for main thread
|
|
345
|
+
if (!this.mainThreadMatcher) {
|
|
346
|
+
const { Matcher } = await import("./matching/matcher.js");
|
|
347
|
+
const { Estimator } = await import("./estimation/estimator.js");
|
|
348
|
+
this.mainThreadMatcher = new Matcher(this.inputWidth, this.inputHeight, this.debugMode);
|
|
349
|
+
this.mainThreadEstimator = new Estimator(this.projectionTransform);
|
|
350
|
+
}
|
|
351
|
+
let matchedTargetIndex = -1;
|
|
352
|
+
let matchedModelViewTransform = null;
|
|
353
|
+
let matchedDebugExtra = null;
|
|
354
|
+
for (let i = 0; i < targetIndexes.length; i++) {
|
|
355
|
+
const matchingIndex = targetIndexes[i];
|
|
356
|
+
const { keyframeIndex, screenCoords, worldCoords, debugExtra } = this.mainThreadMatcher.matchDetection(this.matchingDataList[matchingIndex], featurePoints);
|
|
357
|
+
matchedDebugExtra = debugExtra;
|
|
358
|
+
if (keyframeIndex !== -1) {
|
|
359
|
+
const modelViewTransform = this.mainThreadEstimator.estimate({ screenCoords, worldCoords });
|
|
360
|
+
if (modelViewTransform) {
|
|
361
|
+
matchedTargetIndex = matchingIndex;
|
|
362
|
+
matchedModelViewTransform = modelViewTransform;
|
|
363
|
+
}
|
|
364
|
+
break;
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
return {
|
|
368
|
+
targetIndex: matchedTargetIndex,
|
|
369
|
+
modelViewTransform: matchedModelViewTransform,
|
|
370
|
+
debugExtra: matchedDebugExtra,
|
|
371
|
+
};
|
|
372
|
+
}
|
|
318
373
|
_workerTrackUpdate(modelViewTransform, trackingFeatures) {
|
|
319
374
|
return new Promise((resolve) => {
|
|
375
|
+
// If no worker available, process on main thread
|
|
376
|
+
if (!this.worker) {
|
|
377
|
+
this._trackUpdateOnMainThread(modelViewTransform, trackingFeatures).then(resolve);
|
|
378
|
+
return;
|
|
379
|
+
}
|
|
320
380
|
this.workerTrackDone = (data) => {
|
|
321
381
|
resolve(data.modelViewTransform);
|
|
322
382
|
};
|
|
323
383
|
const { worldCoords, screenCoords } = trackingFeatures;
|
|
324
|
-
this.worker
|
|
384
|
+
this.worker.postMessage({
|
|
325
385
|
type: "trackUpdate",
|
|
326
386
|
modelViewTransform,
|
|
327
387
|
worldCoords,
|
|
@@ -329,6 +389,20 @@ class Controller {
|
|
|
329
389
|
});
|
|
330
390
|
});
|
|
331
391
|
}
|
|
392
|
+
async _trackUpdateOnMainThread(modelViewTransform, trackingFeatures) {
|
|
393
|
+
// Lazy initialize Estimator for main thread
|
|
394
|
+
if (!this.mainThreadEstimator) {
|
|
395
|
+
const { Estimator } = await import("./estimation/estimator.js");
|
|
396
|
+
this.mainThreadEstimator = new Estimator(this.projectionTransform);
|
|
397
|
+
}
|
|
398
|
+
const { worldCoords, screenCoords } = trackingFeatures;
|
|
399
|
+
const finalModelViewTransform = this.mainThreadEstimator.refineEstimate({
|
|
400
|
+
initialModelViewTransform: modelViewTransform,
|
|
401
|
+
worldCoords,
|
|
402
|
+
screenCoords,
|
|
403
|
+
});
|
|
404
|
+
return finalModelViewTransform;
|
|
405
|
+
}
|
|
332
406
|
_glModelViewMatrix(modelViewTransform, targetIndex) {
|
|
333
407
|
const height = this.markerDimensions[targetIndex][1];
|
|
334
408
|
const openGLWorldMatrix = [
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* π¦ SimpleAR - Dead-simple vanilla AR for image overlays
|
|
3
|
+
*
|
|
4
|
+
* No Three.js. No A-Frame. Just HTML, CSS, and JavaScript.
|
|
5
|
+
*
|
|
6
|
+
* @example
|
|
7
|
+
* const ar = new SimpleAR({
|
|
8
|
+
* container: document.getElementById('ar-container'),
|
|
9
|
+
* targetSrc: './my-target.mind',
|
|
10
|
+
* overlay: document.getElementById('my-overlay'),
|
|
11
|
+
* onFound: () => console.log('Target found!'),
|
|
12
|
+
* onLost: () => console.log('Target lost!')
|
|
13
|
+
* });
|
|
14
|
+
*
|
|
15
|
+
* await ar.start();
|
|
16
|
+
*/
|
|
17
|
+
export class SimpleAR {
|
|
18
|
+
constructor({ container, targetSrc, overlay, onFound, onLost, onUpdate, cameraConfig, }: {
|
|
19
|
+
container: any;
|
|
20
|
+
targetSrc: any;
|
|
21
|
+
overlay: any;
|
|
22
|
+
onFound?: null | undefined;
|
|
23
|
+
onLost?: null | undefined;
|
|
24
|
+
onUpdate?: null | undefined;
|
|
25
|
+
cameraConfig?: {
|
|
26
|
+
facingMode: string;
|
|
27
|
+
width: number;
|
|
28
|
+
height: number;
|
|
29
|
+
} | undefined;
|
|
30
|
+
});
|
|
31
|
+
container: any;
|
|
32
|
+
targetSrc: any;
|
|
33
|
+
overlay: any;
|
|
34
|
+
onFound: any;
|
|
35
|
+
onLost: any;
|
|
36
|
+
onUpdateCallback: any;
|
|
37
|
+
cameraConfig: {
|
|
38
|
+
facingMode: string;
|
|
39
|
+
width: number;
|
|
40
|
+
height: number;
|
|
41
|
+
};
|
|
42
|
+
video: HTMLVideoElement | null;
|
|
43
|
+
controller: Controller | null;
|
|
44
|
+
isTracking: boolean;
|
|
45
|
+
lastMatrix: any;
|
|
46
|
+
/**
|
|
47
|
+
* Initialize and start AR tracking
|
|
48
|
+
*/
|
|
49
|
+
start(): Promise<this>;
|
|
50
|
+
/**
|
|
51
|
+
* Stop AR tracking and release resources
|
|
52
|
+
*/
|
|
53
|
+
stop(): void;
|
|
54
|
+
_createVideo(): void;
|
|
55
|
+
_startCamera(): Promise<void>;
|
|
56
|
+
_initController(): void;
|
|
57
|
+
_handleUpdate(data: any): void;
|
|
58
|
+
_positionOverlay(worldMatrix: any): void;
|
|
59
|
+
}
|
|
60
|
+
import { Controller } from "./controller.js";
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
import { Controller } from "./controller.js";
|
|
2
|
+
/**
|
|
3
|
+
* π¦ SimpleAR - Dead-simple vanilla AR for image overlays
|
|
4
|
+
*
|
|
5
|
+
* No Three.js. No A-Frame. Just HTML, CSS, and JavaScript.
|
|
6
|
+
*
|
|
7
|
+
* @example
|
|
8
|
+
* const ar = new SimpleAR({
|
|
9
|
+
* container: document.getElementById('ar-container'),
|
|
10
|
+
* targetSrc: './my-target.mind',
|
|
11
|
+
* overlay: document.getElementById('my-overlay'),
|
|
12
|
+
* onFound: () => console.log('Target found!'),
|
|
13
|
+
* onLost: () => console.log('Target lost!')
|
|
14
|
+
* });
|
|
15
|
+
*
|
|
16
|
+
* await ar.start();
|
|
17
|
+
*/
|
|
18
|
+
class SimpleAR {
|
|
19
|
+
constructor({ container, targetSrc, overlay, onFound = null, onLost = null, onUpdate = null, cameraConfig = { facingMode: 'environment', width: 1280, height: 720 }, }) {
|
|
20
|
+
this.container = container;
|
|
21
|
+
this.targetSrc = targetSrc;
|
|
22
|
+
this.overlay = overlay;
|
|
23
|
+
this.onFound = onFound;
|
|
24
|
+
this.onLost = onLost;
|
|
25
|
+
this.onUpdateCallback = onUpdate;
|
|
26
|
+
this.cameraConfig = cameraConfig;
|
|
27
|
+
this.video = null;
|
|
28
|
+
this.controller = null;
|
|
29
|
+
this.isTracking = false;
|
|
30
|
+
this.lastMatrix = null;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Initialize and start AR tracking
|
|
34
|
+
*/
|
|
35
|
+
async start() {
|
|
36
|
+
// 1. Create video element
|
|
37
|
+
this._createVideo();
|
|
38
|
+
// 2. Start camera
|
|
39
|
+
await this._startCamera();
|
|
40
|
+
// 3. Initialize controller
|
|
41
|
+
this._initController();
|
|
42
|
+
// 4. Load targets (supports single URL or array of URLs)
|
|
43
|
+
const targets = Array.isArray(this.targetSrc) ? this.targetSrc : [this.targetSrc];
|
|
44
|
+
await this.controller.addImageTargets(targets);
|
|
45
|
+
this.controller.processVideo(this.video);
|
|
46
|
+
return this;
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Stop AR tracking and release resources
|
|
50
|
+
*/
|
|
51
|
+
stop() {
|
|
52
|
+
if (this.controller) {
|
|
53
|
+
this.controller.dispose();
|
|
54
|
+
this.controller = null;
|
|
55
|
+
}
|
|
56
|
+
if (this.video && this.video.srcObject) {
|
|
57
|
+
this.video.srcObject.getTracks().forEach(track => track.stop());
|
|
58
|
+
this.video.remove();
|
|
59
|
+
this.video = null;
|
|
60
|
+
}
|
|
61
|
+
this.isTracking = false;
|
|
62
|
+
}
|
|
63
|
+
_createVideo() {
|
|
64
|
+
this.video = document.createElement('video');
|
|
65
|
+
this.video.setAttribute('autoplay', '');
|
|
66
|
+
this.video.setAttribute('playsinline', '');
|
|
67
|
+
this.video.setAttribute('muted', '');
|
|
68
|
+
this.video.style.cssText = `
|
|
69
|
+
position: absolute;
|
|
70
|
+
top: 0;
|
|
71
|
+
left: 0;
|
|
72
|
+
width: 100%;
|
|
73
|
+
height: 100%;
|
|
74
|
+
object-fit: cover;
|
|
75
|
+
z-index: 0;
|
|
76
|
+
`;
|
|
77
|
+
this.container.style.position = 'relative';
|
|
78
|
+
this.container.style.overflow = 'hidden';
|
|
79
|
+
this.container.insertBefore(this.video, this.container.firstChild);
|
|
80
|
+
}
|
|
81
|
+
async _startCamera() {
|
|
82
|
+
const stream = await navigator.mediaDevices.getUserMedia({
|
|
83
|
+
video: this.cameraConfig
|
|
84
|
+
});
|
|
85
|
+
this.video.srcObject = stream;
|
|
86
|
+
await this.video.play();
|
|
87
|
+
// Wait for video dimensions to be available
|
|
88
|
+
await new Promise(resolve => {
|
|
89
|
+
if (this.video.videoWidth > 0)
|
|
90
|
+
return resolve();
|
|
91
|
+
this.video.onloadedmetadata = resolve;
|
|
92
|
+
});
|
|
93
|
+
}
|
|
94
|
+
_initController() {
|
|
95
|
+
this.controller = new Controller({
|
|
96
|
+
inputWidth: this.video.videoWidth,
|
|
97
|
+
inputHeight: this.video.videoHeight,
|
|
98
|
+
onUpdate: (data) => this._handleUpdate(data)
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
_handleUpdate(data) {
|
|
102
|
+
if (data.type !== 'updateMatrix')
|
|
103
|
+
return;
|
|
104
|
+
const { targetIndex, worldMatrix } = data;
|
|
105
|
+
if (worldMatrix) {
|
|
106
|
+
// Target found
|
|
107
|
+
if (!this.isTracking) {
|
|
108
|
+
this.isTracking = true;
|
|
109
|
+
this.overlay && (this.overlay.style.opacity = '1');
|
|
110
|
+
this.onFound && this.onFound({ targetIndex });
|
|
111
|
+
}
|
|
112
|
+
this.lastMatrix = worldMatrix;
|
|
113
|
+
this._positionOverlay(worldMatrix);
|
|
114
|
+
this.onUpdateCallback && this.onUpdateCallback({ targetIndex, worldMatrix });
|
|
115
|
+
}
|
|
116
|
+
else {
|
|
117
|
+
// Target lost
|
|
118
|
+
if (this.isTracking) {
|
|
119
|
+
this.isTracking = false;
|
|
120
|
+
this.overlay && (this.overlay.style.opacity = '0');
|
|
121
|
+
this.onLost && this.onLost({ targetIndex });
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
_positionOverlay(worldMatrix) {
|
|
126
|
+
if (!this.overlay)
|
|
127
|
+
return;
|
|
128
|
+
const containerRect = this.container.getBoundingClientRect();
|
|
129
|
+
const videoW = this.video.videoWidth;
|
|
130
|
+
const videoH = this.video.videoHeight;
|
|
131
|
+
// Calculate display area considering object-fit: cover
|
|
132
|
+
const containerAspect = containerRect.width / containerRect.height;
|
|
133
|
+
const videoAspect = videoW / videoH;
|
|
134
|
+
let displayW, displayH, offsetX, offsetY;
|
|
135
|
+
if (containerAspect > videoAspect) {
|
|
136
|
+
// Container is wider - video fills width, crops height
|
|
137
|
+
displayW = containerRect.width;
|
|
138
|
+
displayH = containerRect.width / videoAspect;
|
|
139
|
+
offsetX = 0;
|
|
140
|
+
offsetY = (containerRect.height - displayH) / 2;
|
|
141
|
+
}
|
|
142
|
+
else {
|
|
143
|
+
// Container is taller - video fills height, crops width
|
|
144
|
+
displayH = containerRect.height;
|
|
145
|
+
displayW = containerRect.height * videoAspect;
|
|
146
|
+
offsetX = (containerRect.width - displayW) / 2;
|
|
147
|
+
offsetY = 0;
|
|
148
|
+
}
|
|
149
|
+
const scaleX = displayW / videoW;
|
|
150
|
+
const scaleY = displayH / videoH;
|
|
151
|
+
// Extract position and rotation from world matrix
|
|
152
|
+
// Matrix is column-major: [m0,m1,m2,m3, m4,m5,m6,m7, m8,m9,m10,m11, m12,m13,m14,m15]
|
|
153
|
+
const tx = worldMatrix[12];
|
|
154
|
+
const ty = worldMatrix[13];
|
|
155
|
+
const scale = Math.sqrt(worldMatrix[0] ** 2 + worldMatrix[1] ** 2);
|
|
156
|
+
const rotation = Math.atan2(worldMatrix[1], worldMatrix[0]);
|
|
157
|
+
// Convert from normalized coords to screen coords
|
|
158
|
+
const screenX = offsetX + (videoW / 2 + tx) * scaleX;
|
|
159
|
+
const screenY = offsetY + (videoH / 2 - ty) * scaleY;
|
|
160
|
+
// Apply transform
|
|
161
|
+
this.overlay.style.position = 'absolute';
|
|
162
|
+
this.overlay.style.transformOrigin = 'center center';
|
|
163
|
+
this.overlay.style.left = '0';
|
|
164
|
+
this.overlay.style.top = '0';
|
|
165
|
+
this.overlay.style.transform = `
|
|
166
|
+
translate(${screenX}px, ${screenY}px)
|
|
167
|
+
translate(-50%, -50%)
|
|
168
|
+
scale(${scale * scaleX * 0.01})
|
|
169
|
+
rotate(${-rotation}rad)
|
|
170
|
+
`;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
export { SimpleAR };
|
package/dist/index.d.ts
CHANGED
package/dist/index.js
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@srsergio/taptapp-ar",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.14",
|
|
4
4
|
"description": "AR Compiler for Node.js and Browser",
|
|
5
5
|
"repository": {
|
|
6
6
|
"type": "git",
|
|
@@ -37,6 +37,20 @@
|
|
|
37
37
|
"react-dom": ">=18.0.0",
|
|
38
38
|
"three": ">=0.160.0"
|
|
39
39
|
},
|
|
40
|
+
"peerDependenciesMeta": {
|
|
41
|
+
"aframe": {
|
|
42
|
+
"optional": true
|
|
43
|
+
},
|
|
44
|
+
"react": {
|
|
45
|
+
"optional": true
|
|
46
|
+
},
|
|
47
|
+
"react-dom": {
|
|
48
|
+
"optional": true
|
|
49
|
+
},
|
|
50
|
+
"three": {
|
|
51
|
+
"optional": true
|
|
52
|
+
}
|
|
53
|
+
},
|
|
40
54
|
"dependencies": {
|
|
41
55
|
"@msgpack/msgpack": "^3.0.0-beta2",
|
|
42
56
|
"ml-matrix": "^6.10.4",
|
|
@@ -91,32 +91,49 @@ class Controller {
|
|
|
91
91
|
}
|
|
92
92
|
}
|
|
93
93
|
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
94
|
+
/**
|
|
95
|
+
* Load image targets from one or multiple .mind files
|
|
96
|
+
* @param {string|string[]} fileURLs - Single URL or array of URLs to .mind files
|
|
97
|
+
* @returns {Promise<{dimensions, matchingDataList, trackingDataList}>}
|
|
98
|
+
*/
|
|
99
|
+
async addImageTargets(fileURLs) {
|
|
100
|
+
const urls = Array.isArray(fileURLs) ? fileURLs : [fileURLs];
|
|
101
|
+
|
|
102
|
+
// Fetch all .mind files in parallel
|
|
103
|
+
const buffers = await Promise.all(
|
|
104
|
+
urls.map(async (url) => {
|
|
105
|
+
const response = await fetch(url);
|
|
106
|
+
return response.arrayBuffer();
|
|
107
|
+
})
|
|
108
|
+
);
|
|
109
|
+
|
|
110
|
+
// Combine all buffers into a single target list
|
|
111
|
+
return this.addImageTargetsFromBuffers(buffers);
|
|
101
112
|
}
|
|
102
113
|
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
const
|
|
109
|
-
const
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
114
|
+
/**
|
|
115
|
+
* Load image targets from multiple ArrayBuffers
|
|
116
|
+
* @param {ArrayBuffer[]} buffers - Array of .mind file buffers
|
|
117
|
+
*/
|
|
118
|
+
addImageTargetsFromBuffers(buffers) {
|
|
119
|
+
const allTrackingData = [];
|
|
120
|
+
const allMatchingData = [];
|
|
121
|
+
const allDimensions = [];
|
|
122
|
+
|
|
123
|
+
for (const buffer of buffers) {
|
|
124
|
+
const compiler = new Compiler();
|
|
125
|
+
const dataList = compiler.importData(buffer);
|
|
126
|
+
|
|
127
|
+
for (const item of dataList) {
|
|
128
|
+
allMatchingData.push(item.matchingData);
|
|
129
|
+
allTrackingData.push(item.trackingData);
|
|
130
|
+
allDimensions.push([item.targetImage.width, item.targetImage.height]);
|
|
131
|
+
}
|
|
115
132
|
}
|
|
116
133
|
|
|
117
134
|
this.tracker = new Tracker(
|
|
118
|
-
|
|
119
|
-
|
|
135
|
+
allDimensions,
|
|
136
|
+
allTrackingData,
|
|
120
137
|
this.projectionTransform,
|
|
121
138
|
this.inputWidth,
|
|
122
139
|
this.inputHeight,
|
|
@@ -131,12 +148,21 @@ class Controller {
|
|
|
131
148
|
inputHeight: this.inputHeight,
|
|
132
149
|
projectionTransform: this.projectionTransform,
|
|
133
150
|
debugMode: this.debugMode,
|
|
134
|
-
matchingDataList,
|
|
151
|
+
matchingDataList: allMatchingData,
|
|
135
152
|
});
|
|
136
153
|
}
|
|
137
154
|
|
|
138
|
-
this.markerDimensions =
|
|
139
|
-
|
|
155
|
+
this.markerDimensions = allDimensions;
|
|
156
|
+
this.matchingDataList = allMatchingData; // Store for main-thread fallback
|
|
157
|
+
return { dimensions: allDimensions, matchingDataList: allMatchingData, trackingDataList: allTrackingData };
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
/**
|
|
161
|
+
* Load image targets from a single ArrayBuffer (backward compatible)
|
|
162
|
+
* @param {ArrayBuffer} buffer - Single .mind file buffer
|
|
163
|
+
*/
|
|
164
|
+
addImageTargetsFromBuffer(buffer) {
|
|
165
|
+
return this.addImageTargetsFromBuffers([buffer]);
|
|
140
166
|
}
|
|
141
167
|
|
|
142
168
|
dispose() {
|
|
@@ -370,6 +396,12 @@ class Controller {
|
|
|
370
396
|
|
|
371
397
|
_workerMatch(featurePoints, targetIndexes) {
|
|
372
398
|
return new Promise((resolve) => {
|
|
399
|
+
// If no worker available, process on main thread
|
|
400
|
+
if (!this.worker) {
|
|
401
|
+
this._matchOnMainThread(featurePoints, targetIndexes).then(resolve);
|
|
402
|
+
return;
|
|
403
|
+
}
|
|
404
|
+
|
|
373
405
|
this.workerMatchDone = (data) => {
|
|
374
406
|
resolve({
|
|
375
407
|
targetIndex: data.targetIndex,
|
|
@@ -377,17 +409,63 @@ class Controller {
|
|
|
377
409
|
debugExtra: data.debugExtra,
|
|
378
410
|
});
|
|
379
411
|
};
|
|
380
|
-
this.worker
|
|
412
|
+
this.worker.postMessage({ type: "match", featurePoints: featurePoints, targetIndexes });
|
|
381
413
|
});
|
|
382
414
|
}
|
|
383
415
|
|
|
416
|
+
async _matchOnMainThread(featurePoints, targetIndexes) {
|
|
417
|
+
// Lazy initialize Matcher and Estimator for main thread
|
|
418
|
+
if (!this.mainThreadMatcher) {
|
|
419
|
+
const { Matcher } = await import("./matching/matcher.js");
|
|
420
|
+
const { Estimator } = await import("./estimation/estimator.js");
|
|
421
|
+
this.mainThreadMatcher = new Matcher(this.inputWidth, this.inputHeight, this.debugMode);
|
|
422
|
+
this.mainThreadEstimator = new Estimator(this.projectionTransform);
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
let matchedTargetIndex = -1;
|
|
426
|
+
let matchedModelViewTransform = null;
|
|
427
|
+
let matchedDebugExtra = null;
|
|
428
|
+
|
|
429
|
+
for (let i = 0; i < targetIndexes.length; i++) {
|
|
430
|
+
const matchingIndex = targetIndexes[i];
|
|
431
|
+
|
|
432
|
+
const { keyframeIndex, screenCoords, worldCoords, debugExtra } = this.mainThreadMatcher.matchDetection(
|
|
433
|
+
this.matchingDataList[matchingIndex],
|
|
434
|
+
featurePoints,
|
|
435
|
+
);
|
|
436
|
+
matchedDebugExtra = debugExtra;
|
|
437
|
+
|
|
438
|
+
if (keyframeIndex !== -1) {
|
|
439
|
+
const modelViewTransform = this.mainThreadEstimator.estimate({ screenCoords, worldCoords });
|
|
440
|
+
|
|
441
|
+
if (modelViewTransform) {
|
|
442
|
+
matchedTargetIndex = matchingIndex;
|
|
443
|
+
matchedModelViewTransform = modelViewTransform;
|
|
444
|
+
}
|
|
445
|
+
break;
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
return {
|
|
450
|
+
targetIndex: matchedTargetIndex,
|
|
451
|
+
modelViewTransform: matchedModelViewTransform,
|
|
452
|
+
debugExtra: matchedDebugExtra,
|
|
453
|
+
};
|
|
454
|
+
}
|
|
455
|
+
|
|
384
456
|
_workerTrackUpdate(modelViewTransform, trackingFeatures) {
|
|
385
457
|
return new Promise((resolve) => {
|
|
458
|
+
// If no worker available, process on main thread
|
|
459
|
+
if (!this.worker) {
|
|
460
|
+
this._trackUpdateOnMainThread(modelViewTransform, trackingFeatures).then(resolve);
|
|
461
|
+
return;
|
|
462
|
+
}
|
|
463
|
+
|
|
386
464
|
this.workerTrackDone = (data) => {
|
|
387
465
|
resolve(data.modelViewTransform);
|
|
388
466
|
};
|
|
389
467
|
const { worldCoords, screenCoords } = trackingFeatures;
|
|
390
|
-
this.worker
|
|
468
|
+
this.worker.postMessage({
|
|
391
469
|
type: "trackUpdate",
|
|
392
470
|
modelViewTransform,
|
|
393
471
|
worldCoords,
|
|
@@ -396,6 +474,22 @@ class Controller {
|
|
|
396
474
|
});
|
|
397
475
|
}
|
|
398
476
|
|
|
477
|
+
async _trackUpdateOnMainThread(modelViewTransform, trackingFeatures) {
|
|
478
|
+
// Lazy initialize Estimator for main thread
|
|
479
|
+
if (!this.mainThreadEstimator) {
|
|
480
|
+
const { Estimator } = await import("./estimation/estimator.js");
|
|
481
|
+
this.mainThreadEstimator = new Estimator(this.projectionTransform);
|
|
482
|
+
}
|
|
483
|
+
|
|
484
|
+
const { worldCoords, screenCoords } = trackingFeatures;
|
|
485
|
+
const finalModelViewTransform = this.mainThreadEstimator.refineEstimate({
|
|
486
|
+
initialModelViewTransform: modelViewTransform,
|
|
487
|
+
worldCoords,
|
|
488
|
+
screenCoords,
|
|
489
|
+
});
|
|
490
|
+
return finalModelViewTransform;
|
|
491
|
+
}
|
|
492
|
+
|
|
399
493
|
_glModelViewMatrix(modelViewTransform, targetIndex) {
|
|
400
494
|
const height = this.markerDimensions[targetIndex][1];
|
|
401
495
|
|
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
import { Controller } from "./controller.js";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* π¦ SimpleAR - Dead-simple vanilla AR for image overlays
|
|
5
|
+
*
|
|
6
|
+
* No Three.js. No A-Frame. Just HTML, CSS, and JavaScript.
|
|
7
|
+
*
|
|
8
|
+
* @example
|
|
9
|
+
* const ar = new SimpleAR({
|
|
10
|
+
* container: document.getElementById('ar-container'),
|
|
11
|
+
* targetSrc: './my-target.mind',
|
|
12
|
+
* overlay: document.getElementById('my-overlay'),
|
|
13
|
+
* onFound: () => console.log('Target found!'),
|
|
14
|
+
* onLost: () => console.log('Target lost!')
|
|
15
|
+
* });
|
|
16
|
+
*
|
|
17
|
+
* await ar.start();
|
|
18
|
+
*/
|
|
19
|
+
class SimpleAR {
|
|
20
|
+
constructor({
|
|
21
|
+
container,
|
|
22
|
+
targetSrc,
|
|
23
|
+
overlay,
|
|
24
|
+
onFound = null,
|
|
25
|
+
onLost = null,
|
|
26
|
+
onUpdate = null,
|
|
27
|
+
cameraConfig = { facingMode: 'environment', width: 1280, height: 720 },
|
|
28
|
+
}) {
|
|
29
|
+
this.container = container;
|
|
30
|
+
this.targetSrc = targetSrc;
|
|
31
|
+
this.overlay = overlay;
|
|
32
|
+
this.onFound = onFound;
|
|
33
|
+
this.onLost = onLost;
|
|
34
|
+
this.onUpdateCallback = onUpdate;
|
|
35
|
+
this.cameraConfig = cameraConfig;
|
|
36
|
+
|
|
37
|
+
this.video = null;
|
|
38
|
+
this.controller = null;
|
|
39
|
+
this.isTracking = false;
|
|
40
|
+
this.lastMatrix = null;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Initialize and start AR tracking
|
|
45
|
+
*/
|
|
46
|
+
async start() {
|
|
47
|
+
// 1. Create video element
|
|
48
|
+
this._createVideo();
|
|
49
|
+
|
|
50
|
+
// 2. Start camera
|
|
51
|
+
await this._startCamera();
|
|
52
|
+
|
|
53
|
+
// 3. Initialize controller
|
|
54
|
+
this._initController();
|
|
55
|
+
|
|
56
|
+
// 4. Load targets (supports single URL or array of URLs)
|
|
57
|
+
const targets = Array.isArray(this.targetSrc) ? this.targetSrc : [this.targetSrc];
|
|
58
|
+
await this.controller.addImageTargets(targets);
|
|
59
|
+
this.controller.processVideo(this.video);
|
|
60
|
+
|
|
61
|
+
return this;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Stop AR tracking and release resources
|
|
66
|
+
*/
|
|
67
|
+
stop() {
|
|
68
|
+
if (this.controller) {
|
|
69
|
+
this.controller.dispose();
|
|
70
|
+
this.controller = null;
|
|
71
|
+
}
|
|
72
|
+
if (this.video && this.video.srcObject) {
|
|
73
|
+
this.video.srcObject.getTracks().forEach(track => track.stop());
|
|
74
|
+
this.video.remove();
|
|
75
|
+
this.video = null;
|
|
76
|
+
}
|
|
77
|
+
this.isTracking = false;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
_createVideo() {
|
|
81
|
+
this.video = document.createElement('video');
|
|
82
|
+
this.video.setAttribute('autoplay', '');
|
|
83
|
+
this.video.setAttribute('playsinline', '');
|
|
84
|
+
this.video.setAttribute('muted', '');
|
|
85
|
+
this.video.style.cssText = `
|
|
86
|
+
position: absolute;
|
|
87
|
+
top: 0;
|
|
88
|
+
left: 0;
|
|
89
|
+
width: 100%;
|
|
90
|
+
height: 100%;
|
|
91
|
+
object-fit: cover;
|
|
92
|
+
z-index: 0;
|
|
93
|
+
`;
|
|
94
|
+
this.container.style.position = 'relative';
|
|
95
|
+
this.container.style.overflow = 'hidden';
|
|
96
|
+
this.container.insertBefore(this.video, this.container.firstChild);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
async _startCamera() {
|
|
100
|
+
const stream = await navigator.mediaDevices.getUserMedia({
|
|
101
|
+
video: this.cameraConfig
|
|
102
|
+
});
|
|
103
|
+
this.video.srcObject = stream;
|
|
104
|
+
await this.video.play();
|
|
105
|
+
|
|
106
|
+
// Wait for video dimensions to be available
|
|
107
|
+
await new Promise(resolve => {
|
|
108
|
+
if (this.video.videoWidth > 0) return resolve();
|
|
109
|
+
this.video.onloadedmetadata = resolve;
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
_initController() {
|
|
114
|
+
this.controller = new Controller({
|
|
115
|
+
inputWidth: this.video.videoWidth,
|
|
116
|
+
inputHeight: this.video.videoHeight,
|
|
117
|
+
onUpdate: (data) => this._handleUpdate(data)
|
|
118
|
+
});
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
_handleUpdate(data) {
|
|
122
|
+
if (data.type !== 'updateMatrix') return;
|
|
123
|
+
|
|
124
|
+
const { targetIndex, worldMatrix } = data;
|
|
125
|
+
|
|
126
|
+
if (worldMatrix) {
|
|
127
|
+
// Target found
|
|
128
|
+
if (!this.isTracking) {
|
|
129
|
+
this.isTracking = true;
|
|
130
|
+
this.overlay && (this.overlay.style.opacity = '1');
|
|
131
|
+
this.onFound && this.onFound({ targetIndex });
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
this.lastMatrix = worldMatrix;
|
|
135
|
+
this._positionOverlay(worldMatrix);
|
|
136
|
+
this.onUpdateCallback && this.onUpdateCallback({ targetIndex, worldMatrix });
|
|
137
|
+
|
|
138
|
+
} else {
|
|
139
|
+
// Target lost
|
|
140
|
+
if (this.isTracking) {
|
|
141
|
+
this.isTracking = false;
|
|
142
|
+
this.overlay && (this.overlay.style.opacity = '0');
|
|
143
|
+
this.onLost && this.onLost({ targetIndex });
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
_positionOverlay(worldMatrix) {
|
|
149
|
+
if (!this.overlay) return;
|
|
150
|
+
|
|
151
|
+
const containerRect = this.container.getBoundingClientRect();
|
|
152
|
+
const videoW = this.video.videoWidth;
|
|
153
|
+
const videoH = this.video.videoHeight;
|
|
154
|
+
|
|
155
|
+
// Calculate display area considering object-fit: cover
|
|
156
|
+
const containerAspect = containerRect.width / containerRect.height;
|
|
157
|
+
const videoAspect = videoW / videoH;
|
|
158
|
+
|
|
159
|
+
let displayW, displayH, offsetX, offsetY;
|
|
160
|
+
|
|
161
|
+
if (containerAspect > videoAspect) {
|
|
162
|
+
// Container is wider - video fills width, crops height
|
|
163
|
+
displayW = containerRect.width;
|
|
164
|
+
displayH = containerRect.width / videoAspect;
|
|
165
|
+
offsetX = 0;
|
|
166
|
+
offsetY = (containerRect.height - displayH) / 2;
|
|
167
|
+
} else {
|
|
168
|
+
// Container is taller - video fills height, crops width
|
|
169
|
+
displayH = containerRect.height;
|
|
170
|
+
displayW = containerRect.height * videoAspect;
|
|
171
|
+
offsetX = (containerRect.width - displayW) / 2;
|
|
172
|
+
offsetY = 0;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
const scaleX = displayW / videoW;
|
|
176
|
+
const scaleY = displayH / videoH;
|
|
177
|
+
|
|
178
|
+
// Extract position and rotation from world matrix
|
|
179
|
+
// Matrix is column-major: [m0,m1,m2,m3, m4,m5,m6,m7, m8,m9,m10,m11, m12,m13,m14,m15]
|
|
180
|
+
const tx = worldMatrix[12];
|
|
181
|
+
const ty = worldMatrix[13];
|
|
182
|
+
const scale = Math.sqrt(worldMatrix[0] ** 2 + worldMatrix[1] ** 2);
|
|
183
|
+
const rotation = Math.atan2(worldMatrix[1], worldMatrix[0]);
|
|
184
|
+
|
|
185
|
+
// Convert from normalized coords to screen coords
|
|
186
|
+
const screenX = offsetX + (videoW / 2 + tx) * scaleX;
|
|
187
|
+
const screenY = offsetY + (videoH / 2 - ty) * scaleY;
|
|
188
|
+
|
|
189
|
+
// Apply transform
|
|
190
|
+
this.overlay.style.position = 'absolute';
|
|
191
|
+
this.overlay.style.transformOrigin = 'center center';
|
|
192
|
+
this.overlay.style.left = '0';
|
|
193
|
+
this.overlay.style.top = '0';
|
|
194
|
+
this.overlay.style.transform = `
|
|
195
|
+
translate(${screenX}px, ${screenY}px)
|
|
196
|
+
translate(-50%, -50%)
|
|
197
|
+
scale(${scale * scaleX * 0.01})
|
|
198
|
+
rotate(${-rotation}rad)
|
|
199
|
+
`;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
export { SimpleAR };
|
package/src/index.ts
CHANGED