@srsergio/taptapp-ar 1.0.95 → 1.0.97

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,170 @@
1
+ /**
2
+ * TapTapp AR - Easy Tracking Configuration
3
+ *
4
+ * Simple API for configuring image target tracking with minimal setup.
5
+ * Based on the reliable configuration from reliability-test.html.
6
+ *
7
+ * @example
8
+ * ```typescript
9
+ * import { createTracker } from 'taptapp-ar';
10
+ *
11
+ * const tracker = await createTracker({
12
+ * targetSrc: './my-target.png',
13
+ * container: document.getElementById('ar-container')!,
14
+ * overlay: document.getElementById('overlay')!,
15
+ * callbacks: {
16
+ * onFound: () => console.log('Target found!'),
17
+ * onLost: () => console.log('Target lost'),
18
+ * onUpdate: (data) => console.log('Update:', data)
19
+ * }
20
+ * });
21
+ *
22
+ * // Start tracking from camera
23
+ * tracker.startCamera();
24
+ *
25
+ * // Or track from a video/canvas element
26
+ * tracker.startVideo(videoElement);
27
+ *
28
+ * // Stop tracking
29
+ * tracker.stop();
30
+ * ```
31
+ */
32
+ import { BioInspiredController } from './bio-inspired-controller.js';
33
+ /**
34
+ * Tracking update data passed to onUpdate callback
35
+ */
36
+ export interface TrackingUpdate {
37
+ /** Whether the target is currently being tracked */
38
+ isTracking: boolean;
39
+ /** 4x4 world transformation matrix (column-major, for WebGL/Three.js) */
40
+ worldMatrix: number[] | null;
41
+ /** 3x4 model-view transform matrix */
42
+ modelViewTransform: number[][] | null;
43
+ /** Screen coordinates of tracked feature points */
44
+ screenCoords: Array<{
45
+ x: number;
46
+ y: number;
47
+ id: number;
48
+ }>;
49
+ /** Reliability scores (0-1) for each tracked point */
50
+ reliabilities: number[];
51
+ /** Stability scores (0-1) for each tracked point */
52
+ stabilities: number[];
53
+ /** Average reliability across all points */
54
+ avgReliability: number;
55
+ /** Average stability across all points */
56
+ avgStability: number;
57
+ /** Reference to the controller for advanced usage */
58
+ controller: BioInspiredController;
59
+ /** Index of the tracked target (for multi-target tracking) */
60
+ targetIndex: number;
61
+ /** Target dimensions [width, height] */
62
+ targetDimensions: [number, number];
63
+ }
64
+ /**
65
+ * Tracking event callbacks
66
+ */
67
+ export interface TrackingCallbacks {
68
+ /**
69
+ * Called when the target is first detected
70
+ * @param data Initial tracking data
71
+ */
72
+ onFound?: (data: TrackingUpdate) => void;
73
+ /**
74
+ * Called when tracking is lost
75
+ * @param data Last known tracking data
76
+ */
77
+ onLost?: (data: TrackingUpdate) => void;
78
+ /**
79
+ * Called on every frame update while tracking
80
+ * @param data Current tracking data
81
+ */
82
+ onUpdate?: (data: TrackingUpdate) => void;
83
+ /**
84
+ * Called during target compilation
85
+ * @param progress Progress percentage (0-100)
86
+ */
87
+ onCompileProgress?: (progress: number) => void;
88
+ }
89
+ /**
90
+ * Configuration options for the tracker
91
+ */
92
+ export interface TrackerConfig {
93
+ /**
94
+ * Source of the target image to track.
95
+ * Can be a URL string, HTMLImageElement, ImageData, or ArrayBuffer (pre-compiled .taar)
96
+ */
97
+ targetSrc: string | HTMLImageElement | ImageData | ArrayBuffer;
98
+ /**
99
+ * Container element for the video/canvas display
100
+ */
101
+ container: HTMLElement;
102
+ /**
103
+ * Optional overlay element to position over the tracked target
104
+ */
105
+ overlay?: HTMLElement;
106
+ /**
107
+ * Tracking event callbacks
108
+ */
109
+ callbacks?: TrackingCallbacks;
110
+ /**
111
+ * Camera configuration (MediaStreamConstraints['video'])
112
+ * @default { facingMode: 'environment', width: { ideal: 1280 }, height: { ideal: 960 } }
113
+ */
114
+ cameraConfig?: MediaStreamConstraints['video'];
115
+ /**
116
+ * Viewport width for processing
117
+ * @default 1280
118
+ */
119
+ viewportWidth?: number;
120
+ /**
121
+ * Viewport height for processing
122
+ * @default 960
123
+ */
124
+ viewportHeight?: number;
125
+ /**
126
+ * Enable debug mode for additional logging
127
+ * @default false
128
+ */
129
+ debugMode?: boolean;
130
+ /**
131
+ * Enable bio-inspired perception optimizations
132
+ * @default true
133
+ */
134
+ bioInspiredEnabled?: boolean;
135
+ /**
136
+ * Scale multiplier for the overlay
137
+ * @default 1.0
138
+ */
139
+ scale?: number;
140
+ }
141
+ /**
142
+ * Tracker instance returned by createTracker
143
+ */
144
+ export interface Tracker {
145
+ /** Start tracking from device camera */
146
+ startCamera(): Promise<void>;
147
+ /** Start tracking from a video or canvas element */
148
+ startVideo(source: HTMLVideoElement | HTMLCanvasElement): void;
149
+ /** Stop tracking and release resources */
150
+ stop(): void;
151
+ /** Whether the tracker is currently active */
152
+ readonly isActive: boolean;
153
+ /** Whether a target is currently being tracked */
154
+ readonly isTracking: boolean;
155
+ /** The underlying BioInspiredController instance */
156
+ readonly controller: BioInspiredController;
157
+ /** Target dimensions [width, height] */
158
+ readonly targetDimensions: [number, number];
159
+ /** Get the projection matrix for 3D rendering */
160
+ getProjectionMatrix(): number[];
161
+ }
162
+ /**
163
+ * Create and configure an AR tracker with minimal setup
164
+ */
165
+ export declare function createTracker(config: TrackerConfig): Promise<Tracker>;
166
+ /**
167
+ * Convenience function to create a tracker with camera autostart
168
+ */
169
+ export declare function startTracking(config: TrackerConfig): Promise<Tracker>;
170
+ export default createTracker;
@@ -0,0 +1,381 @@
1
+ /**
2
+ * TapTapp AR - Easy Tracking Configuration
3
+ *
4
+ * Simple API for configuring image target tracking with minimal setup.
5
+ * Based on the reliable configuration from reliability-test.html.
6
+ *
7
+ * @example
8
+ * ```typescript
9
+ * import { createTracker } from 'taptapp-ar';
10
+ *
11
+ * const tracker = await createTracker({
12
+ * targetSrc: './my-target.png',
13
+ * container: document.getElementById('ar-container')!,
14
+ * overlay: document.getElementById('overlay')!,
15
+ * callbacks: {
16
+ * onFound: () => console.log('Target found!'),
17
+ * onLost: () => console.log('Target lost'),
18
+ * onUpdate: (data) => console.log('Update:', data)
19
+ * }
20
+ * });
21
+ *
22
+ * // Start tracking from camera
23
+ * tracker.startCamera();
24
+ *
25
+ * // Or track from a video/canvas element
26
+ * tracker.startVideo(videoElement);
27
+ *
28
+ * // Stop tracking
29
+ * tracker.stop();
30
+ * ```
31
+ */
32
+ import { BioInspiredController } from './bio-inspired-controller.js';
33
+ import { OfflineCompiler } from '../compiler/offline-compiler.js';
34
+ import { projectToScreen } from '../core/utils/projection.js';
35
+ // ============================================================================
36
+ // Implementation
37
+ // ============================================================================
38
+ /**
39
+ * Load an image from a URL
40
+ */
41
+ async function loadImage(url) {
42
+ return new Promise((resolve, reject) => {
43
+ const img = new Image();
44
+ img.crossOrigin = 'anonymous';
45
+ img.onload = () => resolve(img);
46
+ img.onerror = () => reject(new Error(`Failed to load image: ${url}`));
47
+ img.src = url;
48
+ });
49
+ }
50
+ /**
51
+ * Get ImageData from various source types
52
+ */
53
+ async function getImageData(source) {
54
+ let img;
55
+ if (typeof source === 'string') {
56
+ img = await loadImage(source);
57
+ }
58
+ else if (source instanceof HTMLImageElement) {
59
+ img = source;
60
+ if (!img.complete) {
61
+ await new Promise((resolve, reject) => {
62
+ img.onload = resolve;
63
+ img.onerror = reject;
64
+ });
65
+ }
66
+ }
67
+ else {
68
+ // Already ImageData
69
+ return { imageData: source, width: source.width, height: source.height };
70
+ }
71
+ const canvas = document.createElement('canvas');
72
+ canvas.width = img.width;
73
+ canvas.height = img.height;
74
+ const ctx = canvas.getContext('2d');
75
+ ctx.drawImage(img, 0, 0);
76
+ const imageData = ctx.getImageData(0, 0, img.width, img.height);
77
+ return { imageData, width: img.width, height: img.height };
78
+ }
79
+ /**
80
+ * Solve homography for overlay positioning (from reliability-test.html)
81
+ */
82
+ function solveHomography(w, h, p1, p2, p3, p4) {
83
+ const x1 = p1.sx, y1 = p1.sy;
84
+ const x2 = p2.sx, y2 = p2.sy;
85
+ const x3 = p3.sx, y3 = p3.sy;
86
+ const x4 = p4.sx, y4 = p4.sy;
87
+ const dx1 = x2 - x4, dx2 = x3 - x4, dx3 = x1 - x2 + x4 - x3;
88
+ const dy1 = y2 - y4, dy2 = y3 - y4, dy3 = y1 - y2 + y4 - y3;
89
+ const det = dx1 * dy2 - dx2 * dy1;
90
+ const g = (dx3 * dy2 - dx2 * dy3) / det;
91
+ const h_coeff = (dx1 * dy3 - dx3 * dy1) / det;
92
+ const a = x2 - x1 + g * x2;
93
+ const b = x3 - x1 + h_coeff * x3;
94
+ const c = x1;
95
+ const d = y2 - y1 + g * y2;
96
+ const e = y3 - y1 + h_coeff * y3;
97
+ const f = y1;
98
+ return [
99
+ a / w, d / w, 0, g / w,
100
+ b / h, e / h, 0, h_coeff / h,
101
+ 0, 0, 1, 0,
102
+ c, f, 0, 1
103
+ ];
104
+ }
105
+ /**
106
+ * Create and configure an AR tracker with minimal setup
107
+ */
108
+ export async function createTracker(config) {
109
+ const { targetSrc, container, overlay, callbacks = {}, cameraConfig = {
110
+ facingMode: 'environment',
111
+ width: { ideal: 1280 },
112
+ height: { ideal: 960 }
113
+ }, viewportWidth = 1280, viewportHeight = 960, debugMode = false, bioInspiredEnabled = true, scale = 1.0 } = config;
114
+ // State
115
+ let isActive = false;
116
+ let wasTracking = false;
117
+ let mediaStream = null;
118
+ let videoElement = null;
119
+ let targetDimensions = [0, 0];
120
+ // Create video canvas for camera input
121
+ const videoCanvas = document.createElement('canvas');
122
+ videoCanvas.width = viewportWidth;
123
+ videoCanvas.height = viewportHeight;
124
+ videoCanvas.style.width = '100%';
125
+ videoCanvas.style.height = '100%';
126
+ videoCanvas.style.objectFit = 'cover';
127
+ videoCanvas.style.position = 'absolute';
128
+ videoCanvas.style.top = '0';
129
+ videoCanvas.style.left = '0';
130
+ videoCanvas.style.zIndex = '0';
131
+ const videoCtx = videoCanvas.getContext('2d');
132
+ // Setup overlay styles if provided
133
+ if (overlay) {
134
+ overlay.style.position = 'absolute';
135
+ overlay.style.transformOrigin = '0 0';
136
+ overlay.style.display = 'none';
137
+ overlay.style.pointerEvents = 'none';
138
+ }
139
+ // Compile target or load pre-compiled data
140
+ let compiledBuffer;
141
+ if (targetSrc instanceof ArrayBuffer) {
142
+ compiledBuffer = targetSrc;
143
+ }
144
+ else if (typeof targetSrc === 'string' && targetSrc.toLowerCase().split('?')[0].endsWith('.taar')) {
145
+ // Pre-compiled .taar file URL
146
+ if (debugMode)
147
+ console.log(`[TapTapp AR] Fetching pre-compiled target: ${targetSrc}`);
148
+ const response = await fetch(targetSrc);
149
+ if (!response.ok)
150
+ throw new Error(`Failed to fetch .taar file: ${response.statusText}`);
151
+ compiledBuffer = await response.arrayBuffer();
152
+ }
153
+ else {
154
+ // Source is an image or ImageData that needs compilation
155
+ if (debugMode)
156
+ console.log('[TapTapp AR] Compiling image target...');
157
+ const { imageData, width, height } = await getImageData(targetSrc);
158
+ targetDimensions = [width, height];
159
+ const compiler = new OfflineCompiler();
160
+ await compiler.compileImageTargets([{ width, height, data: imageData.data }], (progress) => callbacks.onCompileProgress?.(progress));
161
+ const exported = compiler.exportData();
162
+ compiledBuffer = exported.buffer.slice(exported.byteOffset, exported.byteOffset + exported.byteLength);
163
+ }
164
+ // Create controller with bio-inspired perception
165
+ const controller = new BioInspiredController({
166
+ inputWidth: viewportWidth,
167
+ inputHeight: viewportHeight,
168
+ debugMode,
169
+ bioInspired: {
170
+ enabled: bioInspiredEnabled,
171
+ aggressiveSkipping: false // Keep stable for real-world conditions
172
+ },
173
+ onUpdate: (data) => handleControllerUpdate(data)
174
+ });
175
+ // Load compiled targets
176
+ const loadResult = await controller.addImageTargetsFromBuffer(compiledBuffer);
177
+ if (loadResult.dimensions && loadResult.dimensions[0]) {
178
+ targetDimensions = loadResult.dimensions[0];
179
+ }
180
+ /**
181
+ * Handle controller updates and dispatch to user callbacks
182
+ */
183
+ function handleControllerUpdate(data) {
184
+ if (data.type === 'processDone')
185
+ return;
186
+ if (data.type !== 'updateMatrix')
187
+ return;
188
+ const { targetIndex, worldMatrix, modelViewTransform, screenCoords = [], reliabilities = [], stabilities = [] } = data;
189
+ const isTracking = worldMatrix !== null;
190
+ // Calculate averages
191
+ const avgReliability = reliabilities.length > 0
192
+ ? reliabilities.reduce((a, b) => a + b, 0) / reliabilities.length
193
+ : 0;
194
+ const avgStability = stabilities.length > 0
195
+ ? stabilities.reduce((a, b) => a + b, 0) / stabilities.length
196
+ : 0;
197
+ const updateData = {
198
+ isTracking,
199
+ worldMatrix,
200
+ modelViewTransform,
201
+ screenCoords,
202
+ reliabilities,
203
+ stabilities,
204
+ avgReliability,
205
+ avgStability,
206
+ controller,
207
+ targetIndex,
208
+ targetDimensions
209
+ };
210
+ // Dispatch state change callbacks
211
+ if (isTracking && !wasTracking) {
212
+ callbacks.onFound?.(updateData);
213
+ }
214
+ else if (!isTracking && wasTracking) {
215
+ callbacks.onLost?.(updateData);
216
+ }
217
+ // Always call onUpdate when tracking
218
+ if (isTracking || wasTracking) {
219
+ callbacks.onUpdate?.(updateData);
220
+ }
221
+ // Update overlay position if provided
222
+ if (overlay && modelViewTransform && worldMatrix) {
223
+ positionOverlay(modelViewTransform);
224
+ }
225
+ else if (overlay && !isTracking) {
226
+ overlay.style.display = 'none';
227
+ }
228
+ wasTracking = isTracking;
229
+ }
230
+ /**
231
+ * Position the overlay element using homography transform
232
+ */
233
+ function positionOverlay(modelViewTransform) {
234
+ if (!overlay)
235
+ return;
236
+ const [markerW, markerH] = targetDimensions;
237
+ const proj = controller.projectionTransform;
238
+ const containerRect = container.getBoundingClientRect();
239
+ // Get corners in screen space
240
+ const pUL = projectToScreen(0, 0, 0, modelViewTransform, proj, viewportWidth, viewportHeight, containerRect, false);
241
+ const pUR = projectToScreen(markerW, 0, 0, modelViewTransform, proj, viewportWidth, viewportHeight, containerRect, false);
242
+ const pLL = projectToScreen(0, markerH, 0, modelViewTransform, proj, viewportWidth, viewportHeight, containerRect, false);
243
+ const pLR = projectToScreen(markerW, markerH, 0, modelViewTransform, proj, viewportWidth, viewportHeight, containerRect, false);
244
+ const matrix = solveHomography(markerW, markerH, pUL, pUR, pLL, pLR);
245
+ overlay.style.width = `${markerW}px`;
246
+ overlay.style.height = `${markerH}px`;
247
+ // Apply custom scale if provided
248
+ let matrixString = matrix.join(',');
249
+ if (scale !== 1.0) {
250
+ overlay.style.transform = `matrix3d(${matrixString}) scale(${scale})`;
251
+ }
252
+ else {
253
+ overlay.style.transform = `matrix3d(${matrixString})`;
254
+ }
255
+ overlay.style.display = 'block';
256
+ }
257
+ /**
258
+ * Draw video frame to canvas
259
+ */
260
+ function drawVideoToCanvas(source) {
261
+ if (source instanceof HTMLVideoElement) {
262
+ videoCtx.drawImage(source, 0, 0, viewportWidth, viewportHeight);
263
+ }
264
+ else {
265
+ videoCtx.drawImage(source, 0, 0, viewportWidth, viewportHeight);
266
+ }
267
+ }
268
+ // ========================================================================
269
+ // Public API
270
+ // ========================================================================
271
+ const tracker = {
272
+ async startCamera() {
273
+ if (isActive)
274
+ return;
275
+ try {
276
+ // Try environment mode first (mobile back camera)
277
+ try {
278
+ mediaStream = await navigator.mediaDevices.getUserMedia({
279
+ video: cameraConfig,
280
+ audio: false
281
+ });
282
+ }
283
+ catch (e) {
284
+ console.warn('[TapTapp AR] Failed to open environment camera, falling back to default:', e);
285
+ // Fallback to any camera
286
+ mediaStream = await navigator.mediaDevices.getUserMedia({
287
+ video: true,
288
+ audio: false
289
+ });
290
+ }
291
+ videoElement = document.createElement('video');
292
+ videoElement.srcObject = mediaStream;
293
+ videoElement.playsInline = true;
294
+ videoElement.muted = true;
295
+ await videoElement.play();
296
+ // Add video canvas to container (at the beginning to be behind)
297
+ container.style.position = 'relative';
298
+ if (container.firstChild) {
299
+ container.insertBefore(videoCanvas, container.firstChild);
300
+ }
301
+ else {
302
+ container.appendChild(videoCanvas);
303
+ }
304
+ isActive = true;
305
+ // Start processing loop
306
+ const processLoop = () => {
307
+ if (!isActive || !videoElement)
308
+ return;
309
+ drawVideoToCanvas(videoElement);
310
+ requestAnimationFrame(processLoop);
311
+ };
312
+ processLoop();
313
+ controller.processVideo(videoCanvas);
314
+ }
315
+ catch (error) {
316
+ console.error('[TapTapp AR] Camera access failed:', error);
317
+ throw error;
318
+ }
319
+ },
320
+ startVideo(source) {
321
+ if (isActive)
322
+ return;
323
+ container.style.position = 'relative';
324
+ container.appendChild(videoCanvas);
325
+ isActive = true;
326
+ // Start processing loop
327
+ const processLoop = () => {
328
+ if (!isActive)
329
+ return;
330
+ drawVideoToCanvas(source);
331
+ requestAnimationFrame(processLoop);
332
+ };
333
+ processLoop();
334
+ controller.processVideo(videoCanvas);
335
+ },
336
+ stop() {
337
+ isActive = false;
338
+ controller.stopProcessVideo();
339
+ if (mediaStream) {
340
+ mediaStream.getTracks().forEach(track => track.stop());
341
+ mediaStream = null;
342
+ }
343
+ if (videoElement) {
344
+ videoElement.srcObject = null;
345
+ videoElement = null;
346
+ }
347
+ if (videoCanvas.parentNode) {
348
+ videoCanvas.parentNode.removeChild(videoCanvas);
349
+ }
350
+ if (overlay) {
351
+ overlay.style.display = 'none';
352
+ }
353
+ },
354
+ get isActive() {
355
+ return isActive;
356
+ },
357
+ get isTracking() {
358
+ return wasTracking;
359
+ },
360
+ get controller() {
361
+ return controller;
362
+ },
363
+ get targetDimensions() {
364
+ return targetDimensions;
365
+ },
366
+ getProjectionMatrix() {
367
+ return controller.getProjectionMatrix();
368
+ }
369
+ };
370
+ return tracker;
371
+ }
372
+ /**
373
+ * Convenience function to create a tracker with camera autostart
374
+ */
375
+ export async function startTracking(config) {
376
+ const tracker = await createTracker(config);
377
+ await tracker.startCamera();
378
+ return tracker;
379
+ }
380
+ // Default export for easy importing
381
+ export default createTracker;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@srsergio/taptapp-ar",
3
- "version": "1.0.95",
3
+ "version": "1.0.97",
4
4
  "description": "Ultra-fast Augmented Reality (AR) SDK for Node.js and Browser. Image tracking with 100% pure JavaScript, zero-dependencies, and high-performance compilation.",
5
5
  "keywords": [
6
6
  "augmented reality",
package/src/index.ts CHANGED
@@ -3,5 +3,5 @@ export * from "./react/TaptappAR.js";
3
3
  export * from "./react/use-ar.js";
4
4
  export * from "./compiler/offline-compiler.js";
5
5
  export { Controller } from "./runtime/controller.js";
6
- export { SimpleAR } from "./runtime/simple-ar.js";
6
+ export { createTracker, startTracking } from "./runtime/track.js";
7
7
  export * as protocol from "./core/protocol.js";
@@ -15,7 +15,7 @@ export const TaptappAR: React.FC<TaptappARProps> = ({
15
15
  showScanningOverlay = true,
16
16
  showErrorOverlay = true
17
17
  }) => {
18
- const { containerRef, overlayRef, status, toggleVideo, trackedPoints } = useAR(config);
18
+ const { containerRef, overlayRef, status, toggleVideo, trackedPoints, error } = useAR(config);
19
19
 
20
20
  // Simple heuristic to determine if it's a video or image
21
21
  // based on the presence of videoSrc and common extensions
@@ -46,13 +46,24 @@ export const TaptappAR: React.FC<TaptappARProps> = ({
46
46
  </div>
47
47
  )}
48
48
 
49
+ {/* Compiling Overlay (for JIT image processing) */}
50
+ {status === "compiling" && (
51
+ <div className="taptapp-ar-overlay taptapp-ar-compiling" style={{ background: 'rgba(0,0,0,0.9)' }}>
52
+ <div className="scanning-content">
53
+ <div className="loading-spinner"></div>
54
+ <p className="scanning-text" style={{ marginTop: '20px' }}>Preparando motor AR...</p>
55
+ <p style={{ fontSize: '0.8rem', opacity: 0.6 }}>Compilando imagen de referencia</p>
56
+ </div>
57
+ </div>
58
+ )}
59
+
49
60
  {/* Error Overlay */}
50
61
  {showErrorOverlay && status === "error" && (
51
62
  <div className="taptapp-ar-overlay taptapp-ar-error">
52
63
  <div className="error-content">
53
64
  <span className="error-icon">⚠️</span>
54
65
  <p className="error-title">No se pudo iniciar AR</p>
55
- <p className="error-text">Verifica los permisos de cámara</p>
66
+ <p className="error-text">{error || "Verifica los permisos de cámara"}</p>
56
67
  <button className="retry-btn" onClick={() => window.location.reload()}>
57
68
  Reintentar
58
69
  </button>
@@ -180,6 +191,17 @@ export const TaptappAR: React.FC<TaptappARProps> = ({
180
191
  font-weight: 500;
181
192
  letter-spacing: 0.5px;
182
193
  }
194
+ .loading-spinner {
195
+ width: 40px;
196
+ height: 40px;
197
+ border: 3px solid rgba(255,255,255,0.1);
198
+ border-radius: 50%;
199
+ border-top-color: #00e5ff;
200
+ animation: spin 1s ease-in-out infinite;
201
+ }
202
+ @keyframes spin {
203
+ to { transform: rotate(360deg); }
204
+ }
183
205
  .error-icon { font-size: 3rem; margin-bottom: 10px; }
184
206
  .error-title { font-size: 1.2rem; font-weight: bold; margin: 0; }
185
207
  .error-text { opacity: 0.8; margin: 5px 0 20px; }
@@ -195,12 +217,15 @@ export const TaptappAR: React.FC<TaptappARProps> = ({
195
217
  }
196
218
  .retry-btn:active { transform: scale(0.95); }
197
219
  .taptapp-ar-overlay-element {
198
- display: block;
199
- width: 100%;
220
+ display: none; /* Controlled by tracker */
221
+ position: absolute;
222
+ top: 0;
223
+ left: 0;
224
+ width: auto;
200
225
  height: auto;
201
- opacity: 0;
202
226
  pointer-events: none;
203
- transition: opacity 0.3s ease;
227
+ z-index: 10;
228
+ /* Will be positioned via matrix3d by track.ts */
204
229
  }
205
230
  .taptapp-ar-points-overlay {
206
231
  position: absolute;
@@ -1,7 +1,7 @@
1
1
  export interface ARConfig {
2
2
  cardId: string;
3
3
  targetImageSrc: string;
4
- targetTaarSrc: string;
4
+ targetTaarSrc?: string;
5
5
  videoSrc: string;
6
6
  videoWidth: number;
7
7
  videoHeight: number;