@srsergio/taptapp-ar 1.0.0 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +99 -25
- package/dist/index.d.ts +0 -2
- package/dist/index.js +0 -2
- package/package.json +9 -1
- package/src/index.ts +0 -2
- package/src/react/AREditor.tsx +0 -394
- package/src/react/ProgressDialog.tsx +0 -185
package/README.md
CHANGED
|
@@ -1,62 +1,136 @@
|
|
|
1
1
|
# @srsergio/taptapp-ar
|
|
2
2
|
|
|
3
|
-
AR
|
|
3
|
+
🚀 **TapTapp AR** is a high-performance Augmented Reality (AR) toolkit specifically designed for **Astro** and **Node.js** environments. It provides a seamless way to integrate image tracking, video overlays, and an offline compiler for image targets.
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
Built on top of **MindAR**, **A-Frame**, and **TensorFlow.js**, this package is optimized for both frontend visualization in Astro and backend/serverless image target compilation.
|
|
6
6
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
7
|
+
---
|
|
8
|
+
|
|
9
|
+
## 🌟 Key Features
|
|
10
|
+
|
|
11
|
+
- 🚀 **Astro Native**: Optimized components for Astro's Islands architecture.
|
|
12
|
+
- 🖼️ **Offline Compiler**: A powerful server-side compiler that generates `.mind` target files without a browser.
|
|
13
|
+
- ⚡ **Optimized Performance**: Pre-warmed TensorFlow backends and adaptive memory management for serverless environments (Vercel, AWS Lambda).
|
|
14
|
+
- 📱 **Mobile First**: Designed for smooth performance on iOS (Safari) and Android.
|
|
15
|
+
|
|
16
|
+
---
|
|
11
17
|
|
|
12
|
-
## Installation
|
|
18
|
+
## 🛠 Installation
|
|
13
19
|
|
|
14
20
|
```bash
|
|
15
21
|
npm install @srsergio/taptapp-ar
|
|
16
22
|
```
|
|
17
23
|
|
|
18
|
-
|
|
24
|
+
### 📦 Peer Dependencies
|
|
25
|
+
|
|
26
|
+
Make sure you have the following packages installed in your host project:
|
|
27
|
+
|
|
28
|
+
```bash
|
|
29
|
+
npm install three aframe astro
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
Note: If you are using the `OfflineCompiler` in a Node.js environment, ensure you have the necessary TensorFlow.js backends installed.
|
|
33
|
+
|
|
34
|
+
---
|
|
19
35
|
|
|
20
|
-
##
|
|
36
|
+
## 🚀 Astro Integration Guide
|
|
21
37
|
|
|
22
|
-
|
|
38
|
+
The easiest way to display AR content is using the `ARVideoTrigger` component.
|
|
39
|
+
|
|
40
|
+
### Usage
|
|
23
41
|
|
|
24
42
|
```astro
|
|
25
43
|
---
|
|
26
44
|
import ARVideoTrigger from '@srsergio/taptapp-ar/astro/ARVideoTrigger.astro';
|
|
27
45
|
|
|
28
46
|
const config = {
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
47
|
+
cardId: 'unique-id',
|
|
48
|
+
targetImageSrc: 'https://cdn.example.com/target.jpg',
|
|
49
|
+
targetMindSrc: 'https://cdn.example.com/targets.mind',
|
|
50
|
+
videoSrc: 'https://cdn.example.com/overlay.mp4',
|
|
32
51
|
videoWidth: 1280,
|
|
33
52
|
videoHeight: 720,
|
|
34
|
-
scale: 1,
|
|
53
|
+
scale: 1.2,
|
|
35
54
|
};
|
|
36
55
|
---
|
|
37
56
|
|
|
38
57
|
<ARVideoTrigger config={config} />
|
|
39
58
|
```
|
|
40
59
|
|
|
41
|
-
###
|
|
60
|
+
### `ARVideoTrigger` Props (Config)
|
|
42
61
|
|
|
43
|
-
|
|
44
|
-
|
|
62
|
+
| Prop | Type | Description |
|
|
63
|
+
| :--- | :--- | :--- |
|
|
64
|
+
| `cardId` | `string` | Unique identifier for tracking/session. |
|
|
65
|
+
| `targetImageSrc` | `string` | URL of the image being tracked. |
|
|
66
|
+
| `targetMindSrc` | `string` | URL of the compiled `.mind` target file. |
|
|
67
|
+
| `videoSrc` | `string` | URL of the video to overlay on the target. |
|
|
68
|
+
| `videoWidth` | `number` | Original width of the video. |
|
|
69
|
+
| `videoHeight` | `number` | Original height of the video. |
|
|
70
|
+
| `scale` | `number` | Scaling factor for the video overlay (Default: `1`). |
|
|
45
71
|
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
72
|
+
---
|
|
73
|
+
|
|
74
|
+
## 🖼 Offline Compiler Guide
|
|
75
|
+
|
|
76
|
+
The `OfflineCompiler` allows you to compile image targets on the backend. This is the heart of the TapTapp asset pipeline.
|
|
50
77
|
|
|
51
|
-
###
|
|
78
|
+
### Why use the Offline Compiler?
|
|
79
|
+
Standard MindAR tools require a browser canvas to compile images. This compiler uses **TensorFlow.js** backends (CPU/WebGL/Node) to perform the computation as a background task.
|
|
80
|
+
|
|
81
|
+
### Basic Usage
|
|
52
82
|
|
|
53
83
|
```typescript
|
|
54
84
|
import { OfflineCompiler } from '@srsergio/taptapp-ar';
|
|
55
85
|
|
|
56
86
|
const compiler = new OfflineCompiler();
|
|
57
|
-
|
|
87
|
+
|
|
88
|
+
async function compile(imageBuffer: Buffer) {
|
|
89
|
+
// targetImages is an array of images to compile into the same .mind file
|
|
90
|
+
const result = await compiler.compileTrack({
|
|
91
|
+
targetImages: [imageBuffer],
|
|
92
|
+
progressCallback: (progress) => console.log(`Compiling: ${progress}%`),
|
|
93
|
+
basePercent: 0
|
|
94
|
+
});
|
|
95
|
+
|
|
96
|
+
// result is the compiled target data
|
|
97
|
+
return result;
|
|
98
|
+
}
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
### ⚡ Serverless Optimization
|
|
102
|
+
The compiler is optimized for environments like Vercel Functions:
|
|
103
|
+
- **Early Initialization**: TensorFlow is pre-warmed on module import.
|
|
104
|
+
- **Memory Management**: Aggressive garbage collection (`tf.dispose()`) and tensor cleanup.
|
|
105
|
+
- **Batch Processing**: Automatically splits work to avoid memory spikes.
|
|
106
|
+
|
|
107
|
+
---
|
|
108
|
+
|
|
109
|
+
## ❓ Troubleshooting
|
|
110
|
+
|
|
111
|
+
| Issue | Solution |
|
|
112
|
+
| :--- | :--- |
|
|
113
|
+
| **Camera not starting** | Ensure your site is served via `HTTPS`. Browsers block camera access on insecure origins. |
|
|
114
|
+
| **Video not playing** | iOS Safari requires `muted` and `playsinline` attributes for autoplaying videos. Our components handle this by default. |
|
|
115
|
+
| **CORS errors** | Ensure that `targetImageSrc`, `targetMindSrc`, and `videoSrc` have CORS headers enabled (`Access-Control-Allow-Origin: *`). |
|
|
116
|
+
| **Memory Outage on Serverless** | Reduce the resolution of your target images. High-res images increase memory pressure during compilation. |
|
|
117
|
+
|
|
118
|
+
---
|
|
119
|
+
|
|
120
|
+
## 🏗 Development
|
|
121
|
+
|
|
122
|
+
```bash
|
|
123
|
+
# Install dependencies
|
|
124
|
+
npm install
|
|
125
|
+
|
|
126
|
+
# Build the package
|
|
127
|
+
npm run build
|
|
58
128
|
```
|
|
59
129
|
|
|
60
|
-
|
|
130
|
+
The package uses **TypeScript** and exports both ESM and CJS compatible builds located in the `dist` folder.
|
|
131
|
+
|
|
132
|
+
---
|
|
133
|
+
|
|
134
|
+
## 📄 License
|
|
61
135
|
|
|
62
|
-
MIT
|
|
136
|
+
MIT © [srsergiolazaro](https://github.com/srsergiolazaro)
|
package/dist/index.d.ts
CHANGED
package/dist/index.js
CHANGED
package/package.json
CHANGED
|
@@ -1,7 +1,15 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@srsergio/taptapp-ar",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.2",
|
|
4
4
|
"description": "AR Visualizer and Compiler for Astro and React",
|
|
5
|
+
"repository": {
|
|
6
|
+
"type": "git",
|
|
7
|
+
"url": "git+https://github.com/srsergiolazaro/taptapp-ar.git"
|
|
8
|
+
},
|
|
9
|
+
"bugs": {
|
|
10
|
+
"url": "https://github.com/srsergiolazaro/taptapp-ar/issues"
|
|
11
|
+
},
|
|
12
|
+
"homepage": "https://github.com/srsergiolazaro/taptapp-ar#readme",
|
|
5
13
|
"type": "module",
|
|
6
14
|
"main": "./dist/index.js",
|
|
7
15
|
"module": "./dist/index.js",
|
package/src/index.ts
CHANGED
package/src/react/AREditor.tsx
DELETED
|
@@ -1,394 +0,0 @@
|
|
|
1
|
-
import { useState, useRef, useCallback } from "react";
|
|
2
|
-
import { customAlphabet } from "nanoid";
|
|
3
|
-
import { Image, Video, Upload, Camera, LoaderCircle } from "lucide-react";
|
|
4
|
-
|
|
5
|
-
const ALLOWED_MIME_TYPES = ["image/jpeg", "image/png", "image/webp"];
|
|
6
|
-
const ALLOWED_VIDEO_TYPES = ["video/mp4", "video/webm"];
|
|
7
|
-
|
|
8
|
-
interface FileUploadState {
|
|
9
|
-
file: File | null;
|
|
10
|
-
preview: string;
|
|
11
|
-
}
|
|
12
|
-
|
|
13
|
-
interface AREditorProps {
|
|
14
|
-
adminId: string;
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
const useFileUpload = (allowedTypes: string[]) => {
|
|
18
|
-
const [fileState, setFileState] = useState<FileUploadState>({ file: null, preview: "" });
|
|
19
|
-
const [dimensions, setDimensions] = useState<{ width?: number; height?: number }>({});
|
|
20
|
-
const fileInputRef = useRef<HTMLInputElement>(null);
|
|
21
|
-
|
|
22
|
-
const handleFileChange = useCallback(
|
|
23
|
-
(file: File | null) => {
|
|
24
|
-
if (fileState.preview) {
|
|
25
|
-
URL.revokeObjectURL(fileState.preview);
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
if (!file) {
|
|
29
|
-
setFileState({ file: null, preview: "" });
|
|
30
|
-
return;
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
// Para archivos .mind, validar la extensión en lugar del tipo MIME
|
|
34
|
-
if (allowedTypes.includes(".mind")) {
|
|
35
|
-
if (!file.name.toLowerCase().endsWith(".mind")) {
|
|
36
|
-
throw new Error("El archivo debe tener extensión .mind");
|
|
37
|
-
}
|
|
38
|
-
} else if (!allowedTypes.includes(file.type)) {
|
|
39
|
-
throw new Error("Tipo de archivo no permitido");
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
if (file.type.includes("video")) {
|
|
43
|
-
const video = document.createElement("video");
|
|
44
|
-
video.src = URL.createObjectURL(file);
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
console.log("Archivo cargado:", {
|
|
48
|
-
nombre: file.name,
|
|
49
|
-
tamaño: (file.size / 1024).toFixed(2) + " KB",
|
|
50
|
-
tipo: file.type || "application/octet-stream",
|
|
51
|
-
});
|
|
52
|
-
|
|
53
|
-
const preview = URL.createObjectURL(file);
|
|
54
|
-
if (file.type.includes("video")) {
|
|
55
|
-
const video = document.createElement("video");
|
|
56
|
-
video.src = URL.createObjectURL(file);
|
|
57
|
-
video.addEventListener("loadedmetadata", () => {
|
|
58
|
-
const width = video.videoWidth;
|
|
59
|
-
const height = video.videoHeight;
|
|
60
|
-
setDimensions({ width, height });
|
|
61
|
-
console.log("Ancho y alto del video:", width, height);
|
|
62
|
-
});
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
setFileState({ file, preview });
|
|
66
|
-
},
|
|
67
|
-
[allowedTypes, fileState.preview],
|
|
68
|
-
);
|
|
69
|
-
|
|
70
|
-
const reset = useCallback(() => {
|
|
71
|
-
if (fileState.preview) {
|
|
72
|
-
URL.revokeObjectURL(fileState.preview);
|
|
73
|
-
}
|
|
74
|
-
setFileState({ file: null, preview: "" });
|
|
75
|
-
if (fileInputRef.current) {
|
|
76
|
-
fileInputRef.current.value = "";
|
|
77
|
-
}
|
|
78
|
-
}, [fileState.preview]);
|
|
79
|
-
|
|
80
|
-
return { fileState, handleFileChange, reset, fileInputRef, dimensions };
|
|
81
|
-
};
|
|
82
|
-
|
|
83
|
-
const useUploadFile = () => {
|
|
84
|
-
const uploadFile = async (file: File, type: "image" | "video" | "mind") => {
|
|
85
|
-
const customNanoid = customAlphabet("1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ", 21);
|
|
86
|
-
const id = customNanoid();
|
|
87
|
-
const formData = new FormData();
|
|
88
|
-
formData.append("file", file);
|
|
89
|
-
|
|
90
|
-
const endpoint =
|
|
91
|
-
type === "video"
|
|
92
|
-
? `https://r2-worker.sergiolazaromondargo.workers.dev/video/${id}`
|
|
93
|
-
: type === "mind"
|
|
94
|
-
? `https://r2-worker.sergiolazaromondargo.workers.dev/mind/${id}`
|
|
95
|
-
: `https://r2-worker.sergiolazaromondargo.workers.dev/${id}`;
|
|
96
|
-
|
|
97
|
-
const response = await fetch(endpoint, {
|
|
98
|
-
method: "PUT",
|
|
99
|
-
body: formData,
|
|
100
|
-
});
|
|
101
|
-
|
|
102
|
-
if (!response.ok) {
|
|
103
|
-
throw new Error(`Error al subir ${type}: ${response.status} ${response.statusText}`);
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
return await response.json();
|
|
107
|
-
};
|
|
108
|
-
|
|
109
|
-
return { uploadFile };
|
|
110
|
-
};
|
|
111
|
-
|
|
112
|
-
export const AREditor: React.FC<AREditorProps> = ({ adminId }) => {
|
|
113
|
-
const {
|
|
114
|
-
fileState: imageState,
|
|
115
|
-
handleFileChange: handleImageChange,
|
|
116
|
-
reset: resetImage,
|
|
117
|
-
fileInputRef: imageInputRef,
|
|
118
|
-
} = useFileUpload(ALLOWED_MIME_TYPES);
|
|
119
|
-
|
|
120
|
-
const {
|
|
121
|
-
fileState: mindState,
|
|
122
|
-
handleFileChange: handleMindChange,
|
|
123
|
-
reset: resetMind,
|
|
124
|
-
fileInputRef: mindInputRef,
|
|
125
|
-
} = useFileUpload([".mind"]);
|
|
126
|
-
|
|
127
|
-
const {
|
|
128
|
-
fileState: videoState,
|
|
129
|
-
handleFileChange: handleVideoChange,
|
|
130
|
-
reset: resetVideo,
|
|
131
|
-
fileInputRef: videoInputRef,
|
|
132
|
-
dimensions: videoDimensions,
|
|
133
|
-
} = useFileUpload(ALLOWED_VIDEO_TYPES);
|
|
134
|
-
|
|
135
|
-
const [videoScale, setVideoScale] = useState<number>(1);
|
|
136
|
-
const [loading, setLoading] = useState(false);
|
|
137
|
-
const [error, setError] = useState<string>("");
|
|
138
|
-
|
|
139
|
-
const { uploadFile } = useUploadFile();
|
|
140
|
-
|
|
141
|
-
const handleSave = async () => {
|
|
142
|
-
try {
|
|
143
|
-
setLoading(true);
|
|
144
|
-
setError("");
|
|
145
|
-
|
|
146
|
-
if (!imageState.file || !mindState.file || !videoState.file) {
|
|
147
|
-
throw new Error("Se requieren una imagen, un archivo .mind y un video");
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
const [imageResult, mindResult, videoResult] = await Promise.all([
|
|
151
|
-
uploadFile(imageState.file, "image"),
|
|
152
|
-
uploadFile(mindState.file, "mind"),
|
|
153
|
-
uploadFile(videoState.file, "video"),
|
|
154
|
-
]);
|
|
155
|
-
|
|
156
|
-
const data = {
|
|
157
|
-
adminId,
|
|
158
|
-
data: [
|
|
159
|
-
{
|
|
160
|
-
id: `photos-${Date.now()}`,
|
|
161
|
-
type: "photos",
|
|
162
|
-
images: [{ image: imageResult.url, fileId: imageResult.fileId }],
|
|
163
|
-
},
|
|
164
|
-
{
|
|
165
|
-
id: `videoNative-${Date.now()}`,
|
|
166
|
-
type: "videoNative",
|
|
167
|
-
url: videoResult.url,
|
|
168
|
-
fileId: videoResult.fileId,
|
|
169
|
-
scale: videoScale,
|
|
170
|
-
width: videoDimensions.width,
|
|
171
|
-
height: videoDimensions.height,
|
|
172
|
-
},
|
|
173
|
-
{
|
|
174
|
-
id: `ar-${Date.now()}`,
|
|
175
|
-
type: "ar",
|
|
176
|
-
url: mindResult.url,
|
|
177
|
-
fileId: mindResult.fileId,
|
|
178
|
-
},
|
|
179
|
-
],
|
|
180
|
-
type: "ar",
|
|
181
|
-
};
|
|
182
|
-
|
|
183
|
-
const response = await fetch("/api/updateadmin.json", {
|
|
184
|
-
method: "POST",
|
|
185
|
-
headers: { "Content-Type": "application/json" },
|
|
186
|
-
body: JSON.stringify(data),
|
|
187
|
-
});
|
|
188
|
-
|
|
189
|
-
if (!response.ok) {
|
|
190
|
-
throw new Error(`Error actualizando datos AR: ${response.status}`);
|
|
191
|
-
}
|
|
192
|
-
|
|
193
|
-
alert("¡Guardado exitosamente!");
|
|
194
|
-
resetImage();
|
|
195
|
-
resetMind();
|
|
196
|
-
resetVideo();
|
|
197
|
-
} catch (error: any) {
|
|
198
|
-
setError(error.message);
|
|
199
|
-
} finally {
|
|
200
|
-
setLoading(false);
|
|
201
|
-
}
|
|
202
|
-
};
|
|
203
|
-
|
|
204
|
-
const FileUploadSection = ({
|
|
205
|
-
type,
|
|
206
|
-
icon: Icon,
|
|
207
|
-
fileState,
|
|
208
|
-
inputRef,
|
|
209
|
-
onFileChange,
|
|
210
|
-
allowedTypes,
|
|
211
|
-
label,
|
|
212
|
-
}: {
|
|
213
|
-
type: string;
|
|
214
|
-
icon: typeof Image;
|
|
215
|
-
fileState: FileUploadState;
|
|
216
|
-
inputRef: React.RefObject<HTMLInputElement>;
|
|
217
|
-
onFileChange: (file: File | null) => void;
|
|
218
|
-
allowedTypes: string[];
|
|
219
|
-
label: string;
|
|
220
|
-
}) => (
|
|
221
|
-
<div className="group relative overflow-hidden rounded-xl shadow-lg bg-white/80 backdrop-blur-sm transition-all duration-300 hover:shadow-xl hover:scale-[1.02] border border-gray-100">
|
|
222
|
-
<input
|
|
223
|
-
ref={inputRef}
|
|
224
|
-
type="file"
|
|
225
|
-
accept={allowedTypes.join(",")}
|
|
226
|
-
onChange={(e) => {
|
|
227
|
-
try {
|
|
228
|
-
const file = e.target.files?.[0] || null;
|
|
229
|
-
onFileChange(file);
|
|
230
|
-
} catch (error: any) {
|
|
231
|
-
setError(error.message);
|
|
232
|
-
}
|
|
233
|
-
}}
|
|
234
|
-
className="hidden"
|
|
235
|
-
/>
|
|
236
|
-
|
|
237
|
-
{!fileState.file ? (
|
|
238
|
-
<label
|
|
239
|
-
htmlFor={inputRef.current?.id}
|
|
240
|
-
onClick={() => inputRef.current?.click()}
|
|
241
|
-
className="flex cursor-pointer flex-col items-center justify-center p-10 bg-gradient-to-br from-gray-50 to-white transition-colors group-hover:from-blue-50 group-hover:to-purple-50"
|
|
242
|
-
>
|
|
243
|
-
<div className="transform transition-transform duration-300 group-hover:scale-110">
|
|
244
|
-
<Icon className="h-16 w-16 text-gray-400 group-hover:text-blue-500" />
|
|
245
|
-
</div>
|
|
246
|
-
<span className="mt-4 text-lg font-medium bg-gradient-to-r from-gray-600 to-gray-800 bg-clip-text text-transparent group-hover:from-blue-600 group-hover:to-purple-600">
|
|
247
|
-
{label}
|
|
248
|
-
</span>
|
|
249
|
-
<span className="mt-2 text-sm text-gray-400 group-hover:text-gray-500">
|
|
250
|
-
{allowedTypes.join(", ")}
|
|
251
|
-
</span>
|
|
252
|
-
</label>
|
|
253
|
-
) : (
|
|
254
|
-
<div className="p-6 space-y-4">
|
|
255
|
-
<div className="relative aspect-video w-full overflow-hidden rounded-lg ring-1 ring-gray-100">
|
|
256
|
-
{type === "video" ? (
|
|
257
|
-
<video src={fileState.preview} controls className="h-full w-full object-cover">
|
|
258
|
-
Tu navegador no soporta la reproducción de videos.
|
|
259
|
-
</video>
|
|
260
|
-
) : type === "image" ? (
|
|
261
|
-
<img src={fileState.preview} alt="Preview" className="h-full w-full object-cover" />
|
|
262
|
-
) : (
|
|
263
|
-
<div className="flex h-full flex-col items-center justify-center space-y-3 bg-gradient-to-br from-blue-50 to-purple-50 p-4">
|
|
264
|
-
<div className="flex items-center justify-center rounded-full bg-gradient-to-r from-blue-400 to-purple-400 p-3">
|
|
265
|
-
<Upload className="h-6 w-6 text-white" />
|
|
266
|
-
</div>
|
|
267
|
-
<div className="text-center space-y-2">
|
|
268
|
-
<span className="block text-lg font-medium text-gray-600">
|
|
269
|
-
{fileState.file.name}
|
|
270
|
-
</span>
|
|
271
|
-
<span className="block text-sm text-gray-500">
|
|
272
|
-
Tamaño: {(fileState.file.size / 1024).toFixed(2)} KB
|
|
273
|
-
</span>
|
|
274
|
-
<span className="mt-1 block text-sm font-medium text-green-600">
|
|
275
|
-
✓ Archivo AR cargado correctamente
|
|
276
|
-
</span>
|
|
277
|
-
</div>
|
|
278
|
-
</div>
|
|
279
|
-
)}
|
|
280
|
-
</div>
|
|
281
|
-
<div className="flex items-center justify-between">
|
|
282
|
-
<div className="flex items-center space-x-3">
|
|
283
|
-
<Icon className="h-5 w-5 text-blue-500" />
|
|
284
|
-
<span className="text-sm font-medium text-gray-600">{fileState.file.name}</span>
|
|
285
|
-
</div>
|
|
286
|
-
<button
|
|
287
|
-
onClick={() => onFileChange(null)}
|
|
288
|
-
className="rounded-full bg-gradient-to-r from-blue-500 to-purple-500 px-4 py-2 text-sm font-medium text-white shadow-md transition-all hover:from-blue-600 hover:to-purple-600 hover:shadow-lg active:scale-95"
|
|
289
|
-
>
|
|
290
|
-
Cambiar
|
|
291
|
-
</button>
|
|
292
|
-
</div>
|
|
293
|
-
</div>
|
|
294
|
-
)}
|
|
295
|
-
</div>
|
|
296
|
-
);
|
|
297
|
-
|
|
298
|
-
return (
|
|
299
|
-
<div className="min-h-screen w-full bg-gradient-to-br from-blue-50 via-white to-purple-50 p-4 md:p-8">
|
|
300
|
-
<div className="mx-auto max-w-3xl rounded-3xl bg-white/90 backdrop-blur-md p-6 md:p-10 shadow-2xl ring-1 ring-black/10">
|
|
301
|
-
<div className="flex flex-col items-center justify-center space-y-8">
|
|
302
|
-
<div className="rounded-2xl bg-gradient-to-br from-blue-500 to-purple-500 p-6 shadow-xl shadow-blue-300/30 hover:scale-105 transition-transform">
|
|
303
|
-
<Camera className="h-12 w-12 text-white" />
|
|
304
|
-
</div>
|
|
305
|
-
<h1 className="bg-gradient-to-r from-blue-600 to-purple-600 bg-clip-text text-5xl font-bold text-transparent text-center">
|
|
306
|
-
Editor de Experiencia AR
|
|
307
|
-
</h1>
|
|
308
|
-
<p className="text-2xl text-gray-600 text-center font-light">
|
|
309
|
-
Crea una experiencia de realidad aumentada única
|
|
310
|
-
</p>
|
|
311
|
-
</div>
|
|
312
|
-
|
|
313
|
-
<div className="mt-12 space-y-8">
|
|
314
|
-
<FileUploadSection
|
|
315
|
-
type="image"
|
|
316
|
-
icon={Image}
|
|
317
|
-
fileState={imageState}
|
|
318
|
-
inputRef={imageInputRef}
|
|
319
|
-
onFileChange={handleImageChange}
|
|
320
|
-
allowedTypes={ALLOWED_MIME_TYPES}
|
|
321
|
-
label="Haz clic para seleccionar una imagen"
|
|
322
|
-
/>
|
|
323
|
-
|
|
324
|
-
<FileUploadSection
|
|
325
|
-
type="mind"
|
|
326
|
-
icon={Upload}
|
|
327
|
-
fileState={mindState}
|
|
328
|
-
inputRef={mindInputRef}
|
|
329
|
-
onFileChange={handleMindChange}
|
|
330
|
-
allowedTypes={[".mind"]}
|
|
331
|
-
label="Haz clic para seleccionar archivo .mind"
|
|
332
|
-
/>
|
|
333
|
-
|
|
334
|
-
<FileUploadSection
|
|
335
|
-
type="video"
|
|
336
|
-
icon={Video}
|
|
337
|
-
fileState={videoState}
|
|
338
|
-
inputRef={videoInputRef}
|
|
339
|
-
onFileChange={handleVideoChange}
|
|
340
|
-
allowedTypes={ALLOWED_VIDEO_TYPES}
|
|
341
|
-
label="Haz clic para seleccionar un video"
|
|
342
|
-
/>
|
|
343
|
-
|
|
344
|
-
<div className="space-y-4 rounded-2xl border border-gray-200/50 bg-white/90 backdrop-blur-md p-8 shadow-lg ring-1 ring-black/10">
|
|
345
|
-
<label className="flex items-center justify-between text-2xl font-semibold text-gray-800">
|
|
346
|
-
<span>Escala del Video</span>
|
|
347
|
-
<span className="bg-gradient-to-r from-blue-600 to-purple-600 bg-clip-text text-transparent font-bold">
|
|
348
|
-
{videoScale}x
|
|
349
|
-
</span>
|
|
350
|
-
</label>
|
|
351
|
-
<div className="relative py-8">
|
|
352
|
-
<div className="absolute h-3 w-full rounded-full bg-gradient-to-r from-blue-400 to-purple-400 opacity-20"></div>
|
|
353
|
-
<div
|
|
354
|
-
className="absolute h-3 rounded-full bg-gradient-to-r from-blue-400 to-purple-400 shadow-lg"
|
|
355
|
-
style={{ width: `${(videoScale / 2) * 100}%` }}
|
|
356
|
-
></div>
|
|
357
|
-
<input
|
|
358
|
-
type="range"
|
|
359
|
-
min="0.1"
|
|
360
|
-
max="2"
|
|
361
|
-
step="0.1"
|
|
362
|
-
value={videoScale}
|
|
363
|
-
onChange={(e) => setVideoScale(Number(e.target.value))}
|
|
364
|
-
className="relative h-3 w-full cursor-pointer appearance-none rounded-lg bg-transparent focus:outline-none focus:ring-2 focus:ring-blue-400 focus:ring-offset-4"
|
|
365
|
-
style={{ WebkitAppearance: "none" }}
|
|
366
|
-
/>
|
|
367
|
-
</div>
|
|
368
|
-
</div>
|
|
369
|
-
</div>
|
|
370
|
-
|
|
371
|
-
{error && (
|
|
372
|
-
<div className="mt-6 rounded-xl bg-red-50 p-4 text-red-700 shadow-sm ring-1 ring-red-100">
|
|
373
|
-
{error}
|
|
374
|
-
</div>
|
|
375
|
-
)}
|
|
376
|
-
|
|
377
|
-
<button
|
|
378
|
-
onClick={handleSave}
|
|
379
|
-
disabled={loading}
|
|
380
|
-
className="mt-10 w-full rounded-xl bg-gradient-to-r from-blue-600 to-purple-600 py-5 text-xl font-semibold text-white shadow-xl transition-all hover:from-blue-700 hover:to-purple-700 disabled:from-gray-400 disabled:to-gray-400 disabled:shadow-none focus:outline-none focus:ring-2 focus:ring-blue-400 focus:ring-offset-4"
|
|
381
|
-
>
|
|
382
|
-
{loading ? (
|
|
383
|
-
<div className="flex items-center justify-center space-x-3">
|
|
384
|
-
<LoaderCircle className="h-7 w-7 animate-spin" />
|
|
385
|
-
<span>Guardando...</span>
|
|
386
|
-
</div>
|
|
387
|
-
) : (
|
|
388
|
-
"Guardar"
|
|
389
|
-
)}
|
|
390
|
-
</button>
|
|
391
|
-
</div>
|
|
392
|
-
</div>
|
|
393
|
-
);
|
|
394
|
-
};
|
|
@@ -1,185 +0,0 @@
|
|
|
1
|
-
import React, { useEffect } from "react";
|
|
2
|
-
import {
|
|
3
|
-
CheckCircle2,
|
|
4
|
-
Loader2,
|
|
5
|
-
Upload,
|
|
6
|
-
Image as ImageIcon,
|
|
7
|
-
Video as VideoIcon,
|
|
8
|
-
} from "lucide-react";
|
|
9
|
-
|
|
10
|
-
interface ProgressStage {
|
|
11
|
-
label: string;
|
|
12
|
-
status: "pending" | "processing" | "completed" | "error";
|
|
13
|
-
progress?: number;
|
|
14
|
-
icon: React.ReactNode;
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
interface ProgressDialogProps {
|
|
18
|
-
open: boolean;
|
|
19
|
-
imageStatus: "pending" | "processing" | "completed" | "error";
|
|
20
|
-
videoStatus: "pending" | "processing" | "completed" | "error";
|
|
21
|
-
arProcessingStatus: "pending" | "processing" | "completed" | "error";
|
|
22
|
-
arUploadStatus: "pending" | "processing" | "completed" | "error";
|
|
23
|
-
imageProgress?: number;
|
|
24
|
-
videoProgress?: number;
|
|
25
|
-
arProcessingProgress?: number;
|
|
26
|
-
arUploadProgress?: number;
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
export function ProgressDialog({
|
|
30
|
-
open,
|
|
31
|
-
imageStatus,
|
|
32
|
-
videoStatus,
|
|
33
|
-
arProcessingStatus,
|
|
34
|
-
arUploadStatus,
|
|
35
|
-
imageProgress = 0,
|
|
36
|
-
videoProgress = 0,
|
|
37
|
-
arProcessingProgress = 0,
|
|
38
|
-
arUploadProgress = 0,
|
|
39
|
-
}: ProgressDialogProps) {
|
|
40
|
-
// Configurar las etapas del progreso
|
|
41
|
-
const stages: ProgressStage[] = [
|
|
42
|
-
{
|
|
43
|
-
label: "Subiendo imagen",
|
|
44
|
-
status: imageStatus,
|
|
45
|
-
progress: imageProgress,
|
|
46
|
-
icon: <ImageIcon className="h-5 w-5" />,
|
|
47
|
-
},
|
|
48
|
-
{
|
|
49
|
-
label: "Subiendo video",
|
|
50
|
-
status: videoStatus,
|
|
51
|
-
progress: videoProgress,
|
|
52
|
-
icon: <VideoIcon className="h-5 w-5" />,
|
|
53
|
-
},
|
|
54
|
-
{
|
|
55
|
-
label: "Procesando imagen para AR",
|
|
56
|
-
status: arProcessingStatus,
|
|
57
|
-
progress: arProcessingProgress,
|
|
58
|
-
icon: <Loader2 className="h-5 w-5" />,
|
|
59
|
-
},
|
|
60
|
-
{
|
|
61
|
-
label: "Subiendo experiencia AR",
|
|
62
|
-
status: arUploadStatus,
|
|
63
|
-
progress: arUploadProgress,
|
|
64
|
-
icon: <Upload className="h-5 w-5" />,
|
|
65
|
-
},
|
|
66
|
-
];
|
|
67
|
-
|
|
68
|
-
// Calcular el progreso total (promedio de todos los procesos)
|
|
69
|
-
const completedSteps = stages.filter((stage) => stage.status === "completed").length;
|
|
70
|
-
const totalProgress =
|
|
71
|
-
(imageProgress + videoProgress + arProcessingProgress + arUploadProgress) / 4;
|
|
72
|
-
const overallProgress = Math.min(Math.max(totalProgress, completedSteps * 25), 100);
|
|
73
|
-
|
|
74
|
-
// Bloquear el scroll cuando el modal está abierto
|
|
75
|
-
useEffect(() => {
|
|
76
|
-
if (open) {
|
|
77
|
-
document.body.style.overflow = "hidden";
|
|
78
|
-
} else {
|
|
79
|
-
document.body.style.overflow = "auto";
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
return () => {
|
|
83
|
-
document.body.style.overflow = "auto";
|
|
84
|
-
};
|
|
85
|
-
}, [open]);
|
|
86
|
-
|
|
87
|
-
if (!open) return null;
|
|
88
|
-
|
|
89
|
-
return (
|
|
90
|
-
<div className="fixed inset-0 z-50 flex items-center justify-center overflow-y-auto bg-black bg-opacity-50 p-4">
|
|
91
|
-
<div className="relative max-h-full w-full max-w-md overflow-hidden rounded-lg bg-white p-6 shadow-xl">
|
|
92
|
-
{/* Encabezado */}
|
|
93
|
-
<div className="mb-4 flex items-center justify-between">
|
|
94
|
-
<h2 className="text-center font-semibold text-xl">Guardando experiencia AR</h2>
|
|
95
|
-
</div>
|
|
96
|
-
|
|
97
|
-
{/* Barra de progreso general */}
|
|
98
|
-
<div className="mt-2">
|
|
99
|
-
<div className="relative pt-1">
|
|
100
|
-
<div className="mb-6">
|
|
101
|
-
<div className="h-2 w-full rounded-full bg-gray-200">
|
|
102
|
-
<div
|
|
103
|
-
className="h-2 rounded-full bg-blue-600 transition-all duration-500"
|
|
104
|
-
style={{ width: `${overallProgress}%` }}
|
|
105
|
-
></div>
|
|
106
|
-
</div>
|
|
107
|
-
<div className="mt-2 text-center text-gray-600 text-sm">
|
|
108
|
-
Progreso total: {Math.round(overallProgress)}%
|
|
109
|
-
</div>
|
|
110
|
-
</div>
|
|
111
|
-
</div>
|
|
112
|
-
</div>
|
|
113
|
-
|
|
114
|
-
{/* Etapas individuales */}
|
|
115
|
-
<div className="mt-2 space-y-5">
|
|
116
|
-
{stages.map((stage, index) => (
|
|
117
|
-
<div key={index} className="relative">
|
|
118
|
-
<div className="flex items-center">
|
|
119
|
-
<div
|
|
120
|
-
className={`mr-3 flex h-10 w-10 flex-shrink-0 items-center justify-center rounded-full ${stage.status === "completed"
|
|
121
|
-
? "bg-green-100"
|
|
122
|
-
: stage.status === "processing"
|
|
123
|
-
? "bg-blue-100"
|
|
124
|
-
: stage.status === "error"
|
|
125
|
-
? "bg-red-100"
|
|
126
|
-
: "bg-gray-100"
|
|
127
|
-
}`}
|
|
128
|
-
>
|
|
129
|
-
{stage.status === "completed" ? (
|
|
130
|
-
<CheckCircle2 className="h-6 w-6 text-green-600" />
|
|
131
|
-
) : stage.status === "processing" ? (
|
|
132
|
-
<div className="text-blue-600">{stage.icon}</div>
|
|
133
|
-
) : stage.status === "error" ? (
|
|
134
|
-
<div className="text-red-600">{stage.icon}</div>
|
|
135
|
-
) : (
|
|
136
|
-
<div className="text-gray-400">{stage.icon}</div>
|
|
137
|
-
)}
|
|
138
|
-
</div>
|
|
139
|
-
|
|
140
|
-
<div className="min-w-0 flex-1">
|
|
141
|
-
<div className="font-medium text-sm">{stage.label}</div>
|
|
142
|
-
|
|
143
|
-
{stage.status === "processing" && (
|
|
144
|
-
<div className="mt-1">
|
|
145
|
-
<div className="h-1.5 w-full rounded-full bg-gray-200">
|
|
146
|
-
<div
|
|
147
|
-
className="h-1.5 rounded-full bg-blue-600 transition-all duration-300"
|
|
148
|
-
style={{ width: `${stage.progress || 0}%` }}
|
|
149
|
-
></div>
|
|
150
|
-
</div>
|
|
151
|
-
<div className="mt-1 text-gray-500 text-xs">
|
|
152
|
-
{Math.round(stage.progress || 0)}%
|
|
153
|
-
</div>
|
|
154
|
-
</div>
|
|
155
|
-
)}
|
|
156
|
-
|
|
157
|
-
{stage.status === "completed" && (
|
|
158
|
-
<div className="mt-1 text-green-600 text-xs">Completado</div>
|
|
159
|
-
)}
|
|
160
|
-
|
|
161
|
-
{stage.status === "error" && (
|
|
162
|
-
<div className="mt-1 text-red-600 text-xs">Error</div>
|
|
163
|
-
)}
|
|
164
|
-
|
|
165
|
-
{stage.status === "pending" && (
|
|
166
|
-
<div className="mt-1 text-gray-500 text-xs">Pendiente</div>
|
|
167
|
-
)}
|
|
168
|
-
</div>
|
|
169
|
-
</div>
|
|
170
|
-
</div>
|
|
171
|
-
))}
|
|
172
|
-
</div>
|
|
173
|
-
|
|
174
|
-
{/* Mensaje de finalización */}
|
|
175
|
-
{completedSteps === stages.length && (
|
|
176
|
-
<div className="mt-4 rounded-lg bg-green-50 p-4 text-center">
|
|
177
|
-
<CheckCircle2 className="mx-auto mb-2 h-8 w-8 text-green-500" />
|
|
178
|
-
<p className="font-medium text-green-800">¡Experiencia AR guardada con éxito!</p>
|
|
179
|
-
<p className="text-green-700 text-sm">Tu contenido está listo para ser visualizado.</p>
|
|
180
|
-
</div>
|
|
181
|
-
)}
|
|
182
|
-
</div>
|
|
183
|
-
</div>
|
|
184
|
-
);
|
|
185
|
-
}
|