@srsergio/taptapp-ar 1.0.21 → 1.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/compiler/simple-ar.js +39 -20
- package/package.json +1 -1
- package/src/compiler/simple-ar.js +42 -22
|
@@ -131,51 +131,70 @@ class SimpleAR {
|
|
|
131
131
|
return;
|
|
132
132
|
const [markerW, markerH] = this.markerDimensions[targetIndex];
|
|
133
133
|
const containerRect = this.container.getBoundingClientRect();
|
|
134
|
+
// 1. Raw Video Dimensions (Sensor Frame)
|
|
134
135
|
const videoW = this.video.videoWidth;
|
|
135
136
|
const videoH = this.video.videoHeight;
|
|
136
|
-
//
|
|
137
|
+
// 2. Detect if screen orientation is different from video buffer
|
|
138
|
+
const isPortrait = containerRect.height > containerRect.width;
|
|
139
|
+
const isVideoLandscape = videoW > videoH;
|
|
140
|
+
const needsRotation = isPortrait && isVideoLandscape;
|
|
141
|
+
// Effective dimensions of the display buffer
|
|
142
|
+
const effectiveBufferW = needsRotation ? videoH : videoW;
|
|
143
|
+
const effectiveBufferH = needsRotation ? videoW : videoH;
|
|
137
144
|
const containerAspect = containerRect.width / containerRect.height;
|
|
138
|
-
const
|
|
145
|
+
const bufferAspect = effectiveBufferW / effectiveBufferH;
|
|
139
146
|
let displayW, displayH, offsetX, offsetY;
|
|
140
|
-
if (containerAspect >
|
|
147
|
+
if (containerAspect > bufferAspect) {
|
|
141
148
|
displayW = containerRect.width;
|
|
142
|
-
displayH = containerRect.width /
|
|
149
|
+
displayH = containerRect.width / bufferAspect;
|
|
143
150
|
offsetX = 0;
|
|
144
151
|
offsetY = (containerRect.height - displayH) / 2;
|
|
145
152
|
}
|
|
146
153
|
else {
|
|
147
154
|
displayH = containerRect.height;
|
|
148
|
-
displayW = containerRect.height *
|
|
155
|
+
displayW = containerRect.height * bufferAspect;
|
|
149
156
|
offsetX = (containerRect.width - displayW) / 2;
|
|
150
157
|
offsetY = 0;
|
|
151
158
|
}
|
|
152
|
-
const scaleX = displayW /
|
|
153
|
-
const scaleY = displayH /
|
|
154
|
-
//
|
|
159
|
+
const scaleX = displayW / effectiveBufferW;
|
|
160
|
+
const scaleY = displayH / effectiveBufferH;
|
|
161
|
+
// 3. Focal Length (MUST match Controller.js projection)
|
|
162
|
+
// Controller.js uses inputHeight / 2 as the vertical reference.
|
|
163
|
+
const f = videoH / 2 / Math.tan((45.0 * Math.PI / 180) / 2);
|
|
164
|
+
// 4. Project marker center into camera space
|
|
155
165
|
const tx = mVT[0][0] * (markerW / 2) + mVT[0][1] * (markerH / 2) + mVT[0][3];
|
|
156
166
|
const ty = mVT[1][0] * (markerW / 2) + mVT[1][1] * (markerH / 2) + mVT[1][3];
|
|
157
167
|
const tz = mVT[2][0] * (markerW / 2) + mVT[2][1] * (markerH / 2) + mVT[2][3];
|
|
158
|
-
//
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
168
|
+
// 5. Map Camera coordinates to Screen coordinates
|
|
169
|
+
let screenX, screenY;
|
|
170
|
+
if (needsRotation) {
|
|
171
|
+
// Mapping Sensor coordinates to Rotated Screen coordinates
|
|
172
|
+
// Sensor +X -> Screen +Y
|
|
173
|
+
// Sensor +Y -> Screen -X (relative to logical center)
|
|
174
|
+
screenX = offsetX + (effectiveBufferW / 2 + (ty * f / tz)) * scaleX;
|
|
175
|
+
screenY = offsetY + (effectiveBufferH / 2 - (tx * f / tz)) * scaleY;
|
|
176
|
+
}
|
|
177
|
+
else {
|
|
178
|
+
screenX = offsetX + (effectiveBufferW / 2 + (tx * f / tz)) * scaleX;
|
|
179
|
+
screenY = offsetY + (effectiveBufferH / 2 + (ty * f / tz)) * scaleY;
|
|
180
|
+
}
|
|
181
|
+
// 6. Rotation: sync with CSS transform
|
|
182
|
+
//atan2 gives angle of world X-axis in camera space.
|
|
183
|
+
let rotation = Math.atan2(mVT[1][0], mVT[0][0]);
|
|
184
|
+
if (needsRotation) {
|
|
185
|
+
rotation += Math.PI / 2; // Compensate for the 90deg rotation of the video element
|
|
186
|
+
}
|
|
187
|
+
// 7. Scale calculation
|
|
166
188
|
const matrixScale = Math.sqrt(mVT[0][0] ** 2 + mVT[1][0] ** 2);
|
|
167
|
-
// Perspective scale: 1 world pixel = (f/tz) screen pixels
|
|
168
189
|
const perspectiveScale = (f / tz) * scaleX;
|
|
169
|
-
// Detect overlay intrinsic size
|
|
170
190
|
const intrinsicWidth = (this.overlay instanceof HTMLVideoElement)
|
|
171
191
|
? this.overlay.videoWidth
|
|
172
192
|
: (this.overlay instanceof HTMLImageElement ? this.overlay.naturalWidth : 0);
|
|
173
|
-
// Final scale = (Target Width in Pixels on screen) / (Overlay Intrinsic Width) * scaleMultiplier
|
|
174
193
|
const baseScale = intrinsicWidth > 0
|
|
175
194
|
? (matrixScale * markerW * perspectiveScale) / intrinsicWidth
|
|
176
195
|
: 1.0;
|
|
177
196
|
const finalScale = baseScale * this.scaleMultiplier;
|
|
178
|
-
//
|
|
197
|
+
// Apply transform
|
|
179
198
|
this.overlay.style.width = 'auto';
|
|
180
199
|
this.overlay.style.height = 'auto';
|
|
181
200
|
this.overlay.style.position = 'absolute';
|
package/package.json
CHANGED
|
@@ -155,63 +155,83 @@ class SimpleAR {
|
|
|
155
155
|
|
|
156
156
|
const [markerW, markerH] = this.markerDimensions[targetIndex];
|
|
157
157
|
const containerRect = this.container.getBoundingClientRect();
|
|
158
|
+
|
|
159
|
+
// 1. Raw Video Dimensions (Sensor Frame)
|
|
158
160
|
const videoW = this.video.videoWidth;
|
|
159
161
|
const videoH = this.video.videoHeight;
|
|
160
162
|
|
|
161
|
-
//
|
|
163
|
+
// 2. Detect if screen orientation is different from video buffer
|
|
164
|
+
const isPortrait = containerRect.height > containerRect.width;
|
|
165
|
+
const isVideoLandscape = videoW > videoH;
|
|
166
|
+
const needsRotation = isPortrait && isVideoLandscape;
|
|
167
|
+
|
|
168
|
+
// Effective dimensions of the display buffer
|
|
169
|
+
const effectiveBufferW = needsRotation ? videoH : videoW;
|
|
170
|
+
const effectiveBufferH = needsRotation ? videoW : videoH;
|
|
171
|
+
|
|
162
172
|
const containerAspect = containerRect.width / containerRect.height;
|
|
163
|
-
const
|
|
173
|
+
const bufferAspect = effectiveBufferW / effectiveBufferH;
|
|
164
174
|
|
|
165
175
|
let displayW, displayH, offsetX, offsetY;
|
|
166
|
-
|
|
167
|
-
if (containerAspect > videoAspect) {
|
|
176
|
+
if (containerAspect > bufferAspect) {
|
|
168
177
|
displayW = containerRect.width;
|
|
169
|
-
displayH = containerRect.width /
|
|
178
|
+
displayH = containerRect.width / bufferAspect;
|
|
170
179
|
offsetX = 0;
|
|
171
180
|
offsetY = (containerRect.height - displayH) / 2;
|
|
172
181
|
} else {
|
|
173
182
|
displayH = containerRect.height;
|
|
174
|
-
displayW = containerRect.height *
|
|
183
|
+
displayW = containerRect.height * bufferAspect;
|
|
175
184
|
offsetX = (containerRect.width - displayW) / 2;
|
|
176
185
|
offsetY = 0;
|
|
177
186
|
}
|
|
178
187
|
|
|
179
|
-
const scaleX = displayW /
|
|
180
|
-
const scaleY = displayH /
|
|
188
|
+
const scaleX = displayW / effectiveBufferW;
|
|
189
|
+
const scaleY = displayH / effectiveBufferH;
|
|
190
|
+
|
|
191
|
+
// 3. Focal Length (MUST match Controller.js projection)
|
|
192
|
+
// Controller.js uses inputHeight / 2 as the vertical reference.
|
|
193
|
+
const f = videoH / 2 / Math.tan((45.0 * Math.PI / 180) / 2);
|
|
181
194
|
|
|
182
|
-
// Project
|
|
195
|
+
// 4. Project marker center into camera space
|
|
183
196
|
const tx = mVT[0][0] * (markerW / 2) + mVT[0][1] * (markerH / 2) + mVT[0][3];
|
|
184
197
|
const ty = mVT[1][0] * (markerW / 2) + mVT[1][1] * (markerH / 2) + mVT[1][3];
|
|
185
198
|
const tz = mVT[2][0] * (markerW / 2) + mVT[2][1] * (markerH / 2) + mVT[2][3];
|
|
186
199
|
|
|
187
|
-
//
|
|
188
|
-
|
|
200
|
+
// 5. Map Camera coordinates to Screen coordinates
|
|
201
|
+
let screenX, screenY;
|
|
202
|
+
if (needsRotation) {
|
|
203
|
+
// Mapping Sensor coordinates to Rotated Screen coordinates
|
|
204
|
+
// Sensor +X -> Screen +Y
|
|
205
|
+
// Sensor +Y -> Screen -X (relative to logical center)
|
|
206
|
+
screenX = offsetX + (effectiveBufferW / 2 + (ty * f / tz)) * scaleX;
|
|
207
|
+
screenY = offsetY + (effectiveBufferH / 2 - (tx * f / tz)) * scaleY;
|
|
208
|
+
} else {
|
|
209
|
+
screenX = offsetX + (effectiveBufferW / 2 + (tx * f / tz)) * scaleX;
|
|
210
|
+
screenY = offsetY + (effectiveBufferH / 2 + (ty * f / tz)) * scaleY;
|
|
211
|
+
}
|
|
189
212
|
|
|
190
|
-
//
|
|
191
|
-
//
|
|
192
|
-
|
|
193
|
-
|
|
213
|
+
// 6. Rotation: sync with CSS transform
|
|
214
|
+
//atan2 gives angle of world X-axis in camera space.
|
|
215
|
+
let rotation = Math.atan2(mVT[1][0], mVT[0][0]);
|
|
216
|
+
if (needsRotation) {
|
|
217
|
+
rotation += Math.PI / 2; // Compensate for the 90deg rotation of the video element
|
|
218
|
+
}
|
|
194
219
|
|
|
195
|
-
//
|
|
196
|
-
const rotation = Math.atan2(mVT[1][0], mVT[0][0]);
|
|
220
|
+
// 7. Scale calculation
|
|
197
221
|
const matrixScale = Math.sqrt(mVT[0][0] ** 2 + mVT[1][0] ** 2);
|
|
198
|
-
|
|
199
|
-
// Perspective scale: 1 world pixel = (f/tz) screen pixels
|
|
200
222
|
const perspectiveScale = (f / tz) * scaleX;
|
|
201
223
|
|
|
202
|
-
// Detect overlay intrinsic size
|
|
203
224
|
const intrinsicWidth = (this.overlay instanceof HTMLVideoElement)
|
|
204
225
|
? this.overlay.videoWidth
|
|
205
226
|
: (this.overlay instanceof HTMLImageElement ? this.overlay.naturalWidth : 0);
|
|
206
227
|
|
|
207
|
-
// Final scale = (Target Width in Pixels on screen) / (Overlay Intrinsic Width) * scaleMultiplier
|
|
208
228
|
const baseScale = intrinsicWidth > 0
|
|
209
229
|
? (matrixScale * markerW * perspectiveScale) / intrinsicWidth
|
|
210
230
|
: 1.0;
|
|
211
231
|
|
|
212
232
|
const finalScale = baseScale * this.scaleMultiplier;
|
|
213
233
|
|
|
214
|
-
//
|
|
234
|
+
// Apply transform
|
|
215
235
|
this.overlay.style.width = 'auto';
|
|
216
236
|
this.overlay.style.height = 'auto';
|
|
217
237
|
this.overlay.style.position = 'absolute';
|