@twick/browser-render 0.15.6 → 0.15.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +13 -15
- package/dist/index.d.mts +225 -0
- package/dist/index.d.ts +225 -0
- package/dist/index.js +585 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +546 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +15 -8
- package/public/mp4-wasm.wasm +0 -0
- package/AUDIO_IMPLEMENTATION.md +0 -217
- package/package.json.bak +0 -53
- package/src/audio/audio-processor.ts +0 -239
- package/src/audio/audio-video-muxer.ts +0 -79
- package/src/browser-renderer.ts +0 -495
- package/src/hooks/use-browser-renderer.ts +0 -218
- package/src/index.ts +0 -20
- package/src/mp4-wasm.d.ts +0 -4
- package/tsconfig.json +0 -23
- package/tsup.config.ts +0 -19
package/src/browser-renderer.ts
DELETED
|
@@ -1,495 +0,0 @@
|
|
|
1
|
-
import { Renderer, Vector2 } from "@twick/core";
|
|
2
|
-
import type { Project, RendererSettings } from "@twick/core";
|
|
3
|
-
import defaultProject from "@twick/visualizer/dist/project.js";
|
|
4
|
-
import { BrowserAudioProcessor, getAssetPlacement, type AssetInfo } from './audio/audio-processor';
|
|
5
|
-
|
|
6
|
-
/**
|
|
7
|
-
* Browser-native video exporter using WebCodecs API
|
|
8
|
-
* This exporter downloads the video directly in the browser without any server interaction
|
|
9
|
-
*/
|
|
10
|
-
class BrowserWasmExporter {
|
|
11
|
-
public static readonly id = '@twick/core/wasm';
|
|
12
|
-
public static readonly displayName = 'Browser Video (Wasm)';
|
|
13
|
-
|
|
14
|
-
private encoder: any;
|
|
15
|
-
private videoBlob: Blob | null = null;
|
|
16
|
-
private onProgressCallback?: (progress: number) => void;
|
|
17
|
-
private currentFrame: number = 0;
|
|
18
|
-
private fps: number = 30;
|
|
19
|
-
|
|
20
|
-
public static async create(settings: RendererSettings) {
|
|
21
|
-
return new BrowserWasmExporter(settings);
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
public constructor(
|
|
25
|
-
private readonly settings: RendererSettings,
|
|
26
|
-
) {
|
|
27
|
-
this.fps = settings.fps || 30;
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
public async start(): Promise<void> {
|
|
31
|
-
try {
|
|
32
|
-
// Import mp4-wasm
|
|
33
|
-
const loadMp4Module = (await import('mp4-wasm')).default;
|
|
34
|
-
|
|
35
|
-
// Try multiple locations to fetch the WASM file
|
|
36
|
-
const possiblePaths = [
|
|
37
|
-
// Vite dev server virtual path
|
|
38
|
-
'/@mp4-wasm',
|
|
39
|
-
// Common bundled asset paths (Vite uses hashed names)
|
|
40
|
-
'/assets/mp4-wasm.wasm',
|
|
41
|
-
'/assets/mp4-YBRi_559.wasm', // Known Vite hash
|
|
42
|
-
'/mp4-wasm.wasm',
|
|
43
|
-
// Node modules path (for dev)
|
|
44
|
-
'/node_modules/mp4-wasm/dist/mp4-wasm.wasm',
|
|
45
|
-
];
|
|
46
|
-
|
|
47
|
-
let buffer: ArrayBuffer | null = null;
|
|
48
|
-
let successPath = '';
|
|
49
|
-
|
|
50
|
-
for (const path of possiblePaths) {
|
|
51
|
-
try {
|
|
52
|
-
const resp = await fetch(path);
|
|
53
|
-
if (resp.ok) {
|
|
54
|
-
const contentType = resp.headers.get('content-type');
|
|
55
|
-
// Make sure we got a WASM file, not HTML
|
|
56
|
-
if (contentType && contentType.includes('html')) {
|
|
57
|
-
continue;
|
|
58
|
-
}
|
|
59
|
-
buffer = await resp.arrayBuffer();
|
|
60
|
-
successPath = path;
|
|
61
|
-
break;
|
|
62
|
-
}
|
|
63
|
-
} catch (e) {
|
|
64
|
-
continue;
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
if (!buffer) {
|
|
69
|
-
throw new Error(
|
|
70
|
-
'Could not load WASM file from any location. ' +
|
|
71
|
-
'Please copy mp4-wasm.wasm to your public directory or configure Vite to serve it.'
|
|
72
|
-
);
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
const mp4 = await loadMp4Module({ wasmBinary: buffer });
|
|
76
|
-
|
|
77
|
-
this.encoder = mp4.createWebCodecsEncoder({
|
|
78
|
-
width: this.settings.size.x,
|
|
79
|
-
height: this.settings.size.y,
|
|
80
|
-
fps: this.fps,
|
|
81
|
-
});
|
|
82
|
-
} catch (error) {
|
|
83
|
-
console.error('WASM loading error:', error);
|
|
84
|
-
throw error;
|
|
85
|
-
}
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
public async handleFrame(canvas: HTMLCanvasElement, frameNumber?: number): Promise<void> {
|
|
89
|
-
const frameIndex = frameNumber !== undefined ? frameNumber : this.currentFrame;
|
|
90
|
-
const timestampMicroseconds = Math.round((frameIndex / this.fps) * 1_000_000);
|
|
91
|
-
|
|
92
|
-
const frame = new VideoFrame(canvas, {
|
|
93
|
-
timestamp: timestampMicroseconds,
|
|
94
|
-
duration: Math.round((1 / this.fps) * 1_000_000)
|
|
95
|
-
});
|
|
96
|
-
|
|
97
|
-
await this.encoder.addFrame(frame);
|
|
98
|
-
frame.close();
|
|
99
|
-
|
|
100
|
-
if (frameNumber === undefined) {
|
|
101
|
-
this.currentFrame++;
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
public async stop(): Promise<void> {
|
|
106
|
-
const buf = await this.encoder.end();
|
|
107
|
-
this.videoBlob = new Blob([buf], { type: 'video/mp4' });
|
|
108
|
-
}
|
|
109
|
-
|
|
110
|
-
public async generateAudio(
|
|
111
|
-
assets: AssetInfo[][],
|
|
112
|
-
startFrame: number,
|
|
113
|
-
endFrame: number,
|
|
114
|
-
): Promise<ArrayBuffer | null> {
|
|
115
|
-
try {
|
|
116
|
-
console.log('🔊 Starting audio processing...', {
|
|
117
|
-
frames: assets.length,
|
|
118
|
-
startFrame,
|
|
119
|
-
endFrame
|
|
120
|
-
});
|
|
121
|
-
|
|
122
|
-
const processor = new BrowserAudioProcessor();
|
|
123
|
-
const assetPlacements = getAssetPlacement(assets);
|
|
124
|
-
|
|
125
|
-
console.log(`📊 Found ${assetPlacements.length} audio assets to process`);
|
|
126
|
-
|
|
127
|
-
if (assetPlacements.length === 0) {
|
|
128
|
-
console.log('⚠️ No audio assets found');
|
|
129
|
-
return null;
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
const processedBuffers: AudioBuffer[] = [];
|
|
133
|
-
for (const asset of assetPlacements) {
|
|
134
|
-
if (asset.volume > 0 && asset.playbackRate > 0) {
|
|
135
|
-
console.log(`🎵 Processing audio: ${asset.key}`);
|
|
136
|
-
const buffer = await processor.processAudioAsset(
|
|
137
|
-
asset,
|
|
138
|
-
this.settings.fps || 30,
|
|
139
|
-
endFrame - startFrame
|
|
140
|
-
);
|
|
141
|
-
processedBuffers.push(buffer);
|
|
142
|
-
}
|
|
143
|
-
}
|
|
144
|
-
|
|
145
|
-
if (processedBuffers.length === 0) {
|
|
146
|
-
console.log('⚠️ No audio buffers to mix');
|
|
147
|
-
return null;
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
console.log(`🎛️ Mixing ${processedBuffers.length} audio track(s)...`);
|
|
151
|
-
const mixedBuffer = processor.mixAudioBuffers(processedBuffers);
|
|
152
|
-
const wavData = processor.audioBufferToWav(mixedBuffer);
|
|
153
|
-
|
|
154
|
-
await processor.close();
|
|
155
|
-
console.log(`✅ Audio processed: ${(wavData.byteLength / 1024 / 1024).toFixed(2)} MB`);
|
|
156
|
-
|
|
157
|
-
return wavData;
|
|
158
|
-
} catch (error) {
|
|
159
|
-
console.error('❌ Audio processing failed:', error);
|
|
160
|
-
return null;
|
|
161
|
-
}
|
|
162
|
-
}
|
|
163
|
-
|
|
164
|
-
public async mergeMedia(): Promise<void> {
|
|
165
|
-
// In browser, we don't need to merge separately
|
|
166
|
-
// The video is already created with audio in the encoder
|
|
167
|
-
}
|
|
168
|
-
|
|
169
|
-
public async downloadVideos(assets: any[][]): Promise<void> {
|
|
170
|
-
// Browser doesn't need to download source videos
|
|
171
|
-
// They're already accessible via URLs
|
|
172
|
-
}
|
|
173
|
-
|
|
174
|
-
public getVideoBlob(): Blob | null {
|
|
175
|
-
return this.videoBlob;
|
|
176
|
-
}
|
|
177
|
-
|
|
178
|
-
public setProgressCallback(callback: (progress: number) => void): void {
|
|
179
|
-
this.onProgressCallback = callback;
|
|
180
|
-
}
|
|
181
|
-
}
|
|
182
|
-
|
|
183
|
-
/**
|
|
184
|
-
* Browser rendering configuration
|
|
185
|
-
*/
|
|
186
|
-
export interface BrowserRenderConfig {
|
|
187
|
-
/**
|
|
188
|
-
* Custom Project object
|
|
189
|
-
* If not provided, defaults to @twick/visualizer project
|
|
190
|
-
*
|
|
191
|
-
* Note: Must be an imported Project object, not a string path.
|
|
192
|
-
* String paths only work in Node.js environments (server renderer).
|
|
193
|
-
*
|
|
194
|
-
* Example:
|
|
195
|
-
* ```typescript
|
|
196
|
-
* import myProject from './my-custom-project';
|
|
197
|
-
*
|
|
198
|
-
* await renderTwickVideoInBrowser({
|
|
199
|
-
* projectFile: myProject,
|
|
200
|
-
* variables: { input: {...} }
|
|
201
|
-
* });
|
|
202
|
-
* ```
|
|
203
|
-
*/
|
|
204
|
-
projectFile?: Project;
|
|
205
|
-
/** Input variables containing project configuration */
|
|
206
|
-
variables: {
|
|
207
|
-
input: any;
|
|
208
|
-
playerId?: string;
|
|
209
|
-
[key: string]: any;
|
|
210
|
-
};
|
|
211
|
-
/** Render settings */
|
|
212
|
-
settings?: {
|
|
213
|
-
width?: number;
|
|
214
|
-
height?: number;
|
|
215
|
-
fps?: number;
|
|
216
|
-
quality?: 'low' | 'medium' | 'high';
|
|
217
|
-
range?: [number, number]; // [start, end] in seconds
|
|
218
|
-
includeAudio?: boolean; // Enable audio processing
|
|
219
|
-
downloadAudioSeparately?: boolean; // Download audio.wav separately
|
|
220
|
-
onAudioReady?: (audioBlob: Blob) => void; // Callback when audio is ready
|
|
221
|
-
onProgress?: (progress: number) => void;
|
|
222
|
-
onComplete?: (videoBlob: Blob) => void;
|
|
223
|
-
onError?: (error: Error) => void;
|
|
224
|
-
};
|
|
225
|
-
}
|
|
226
|
-
|
|
227
|
-
/**
|
|
228
|
-
* Renders a Twick video directly in the browser without requiring a server.
|
|
229
|
-
* Uses WebCodecs API for encoding video frames into MP4 format.
|
|
230
|
-
*
|
|
231
|
-
* This function uses the same signature as the server renderer for consistency.
|
|
232
|
-
*
|
|
233
|
-
* @param config - Configuration object containing variables and settings
|
|
234
|
-
* @param config.projectFile - Optional project file path or Project object (defaults to visualizer project)
|
|
235
|
-
* @param config.variables - Variables containing input configuration (tracks, elements, etc.)
|
|
236
|
-
* @param config.settings - Optional render settings (width, height, fps, etc.)
|
|
237
|
-
* @returns Promise resolving to a Blob containing the rendered video
|
|
238
|
-
*
|
|
239
|
-
* @example
|
|
240
|
-
* ```js
|
|
241
|
-
* import { renderTwickVideoInBrowser } from '@twick/browser-render';
|
|
242
|
-
*
|
|
243
|
-
* // Using default visualizer project
|
|
244
|
-
* const videoBlob = await renderTwickVideoInBrowser({
|
|
245
|
-
* variables: {
|
|
246
|
-
* input: {
|
|
247
|
-
* properties: { width: 1920, height: 1080, fps: 30 },
|
|
248
|
-
* tracks: [
|
|
249
|
-
* // Your tracks configuration
|
|
250
|
-
* ]
|
|
251
|
-
* }
|
|
252
|
-
* },
|
|
253
|
-
* settings: {
|
|
254
|
-
* width: 1920,
|
|
255
|
-
* height: 1080,
|
|
256
|
-
* fps: 30,
|
|
257
|
-
* quality: 'high',
|
|
258
|
-
* onProgress: (progress) => console.log(`Rendering: ${progress * 100}%`),
|
|
259
|
-
* }
|
|
260
|
-
* });
|
|
261
|
-
*
|
|
262
|
-
* // Using custom project
|
|
263
|
-
* import myProject from './my-custom-project';
|
|
264
|
-
* const videoBlob = await renderTwickVideoInBrowser({
|
|
265
|
-
* projectFile: myProject, // Must be an imported Project object
|
|
266
|
-
* variables: { input: {...} },
|
|
267
|
-
* settings: {...}
|
|
268
|
-
* });
|
|
269
|
-
*
|
|
270
|
-
* // Download the video
|
|
271
|
-
* const url = URL.createObjectURL(videoBlob);
|
|
272
|
-
* const a = document.createElement('a');
|
|
273
|
-
* a.href = url;
|
|
274
|
-
* a.download = 'video.mp4';
|
|
275
|
-
* a.click();
|
|
276
|
-
* URL.revokeObjectURL(url);
|
|
277
|
-
* ```
|
|
278
|
-
*/
|
|
279
|
-
export const renderTwickVideoInBrowser = async (
|
|
280
|
-
config: BrowserRenderConfig
|
|
281
|
-
): Promise<Blob> => {
|
|
282
|
-
// Save original methods to restore later
|
|
283
|
-
const originalVideoPlay = HTMLVideoElement.prototype.play;
|
|
284
|
-
const originalAudioPlay = HTMLAudioElement.prototype.play;
|
|
285
|
-
const originalCreateElement = document.createElement.bind(document);
|
|
286
|
-
|
|
287
|
-
// Override play methods to force muting
|
|
288
|
-
HTMLVideoElement.prototype.play = function() {
|
|
289
|
-
this.muted = true;
|
|
290
|
-
this.volume = 0;
|
|
291
|
-
return originalVideoPlay.call(this);
|
|
292
|
-
};
|
|
293
|
-
|
|
294
|
-
HTMLAudioElement.prototype.play = function() {
|
|
295
|
-
this.muted = true;
|
|
296
|
-
this.volume = 0;
|
|
297
|
-
return originalAudioPlay.call(this);
|
|
298
|
-
};
|
|
299
|
-
|
|
300
|
-
// Override createElement to mute video/audio on creation
|
|
301
|
-
document.createElement = function(tagName: string, options?: any) {
|
|
302
|
-
const element = originalCreateElement(tagName, options);
|
|
303
|
-
if (tagName.toLowerCase() === 'video' || tagName.toLowerCase() === 'audio') {
|
|
304
|
-
(element as any).muted = true;
|
|
305
|
-
(element as any).volume = 0;
|
|
306
|
-
}
|
|
307
|
-
return element;
|
|
308
|
-
} as any;
|
|
309
|
-
|
|
310
|
-
try {
|
|
311
|
-
const { projectFile, variables, settings = {} } = config;
|
|
312
|
-
|
|
313
|
-
// Validate input
|
|
314
|
-
if (!variables || !variables.input) {
|
|
315
|
-
throw new Error('Invalid configuration. "variables.input" is required.');
|
|
316
|
-
}
|
|
317
|
-
|
|
318
|
-
// Get dimensions from input properties or use settings
|
|
319
|
-
const width = settings.width || variables.input.properties?.width || 1920;
|
|
320
|
-
const height = settings.height || variables.input.properties?.height || 1080;
|
|
321
|
-
const fps = settings.fps || variables.input.properties?.fps || 30;
|
|
322
|
-
|
|
323
|
-
// Load the project
|
|
324
|
-
let project: Project;
|
|
325
|
-
|
|
326
|
-
if (!projectFile) {
|
|
327
|
-
// Use default visualizer project
|
|
328
|
-
project = defaultProject;
|
|
329
|
-
} else {
|
|
330
|
-
// Use provided project object
|
|
331
|
-
project = projectFile as Project;
|
|
332
|
-
}
|
|
333
|
-
|
|
334
|
-
// Set variables on the project (same as server renderer)
|
|
335
|
-
// The renderer expects variables to be assigned directly to the project
|
|
336
|
-
project.variables = variables as any;
|
|
337
|
-
|
|
338
|
-
// Create renderer settings
|
|
339
|
-
const renderSettings: RendererSettings = {
|
|
340
|
-
name: 'browser-render',
|
|
341
|
-
exporter: {
|
|
342
|
-
name: '@twick/core/wasm',
|
|
343
|
-
},
|
|
344
|
-
size: new Vector2(width, height),
|
|
345
|
-
resolutionScale: 1,
|
|
346
|
-
colorSpace: 'srgb',
|
|
347
|
-
fps: fps,
|
|
348
|
-
range: settings.range || [0, Infinity],
|
|
349
|
-
background: variables.input.backgroundColor || '#000000',
|
|
350
|
-
...(settings.quality && {
|
|
351
|
-
quality: settings.quality,
|
|
352
|
-
}),
|
|
353
|
-
};
|
|
354
|
-
|
|
355
|
-
// Create the renderer
|
|
356
|
-
const renderer = new Renderer(project);
|
|
357
|
-
|
|
358
|
-
// Create and initialize the browser exporter
|
|
359
|
-
const exporter = await BrowserWasmExporter.create(renderSettings);
|
|
360
|
-
await exporter.start();
|
|
361
|
-
|
|
362
|
-
if (settings.onProgress) {
|
|
363
|
-
exporter.setProgressCallback(settings.onProgress);
|
|
364
|
-
}
|
|
365
|
-
|
|
366
|
-
// Configure renderer
|
|
367
|
-
await renderer['reloadScenes'](renderSettings);
|
|
368
|
-
(renderer as any).stage.configure(renderSettings);
|
|
369
|
-
(renderer as any).playback.fps = renderSettings.fps;
|
|
370
|
-
|
|
371
|
-
// Set playback state to Rendering (critical for video elements)
|
|
372
|
-
// PlaybackState: Playing = 0, Rendering = 1, Paused = 2, Presenting = 3
|
|
373
|
-
(renderer as any).playback.state = 1;
|
|
374
|
-
|
|
375
|
-
// Calculate total frames from scenes
|
|
376
|
-
const totalFrames = await renderer.getNumberOfFrames(renderSettings);
|
|
377
|
-
|
|
378
|
-
// Initialize playback
|
|
379
|
-
await (renderer as any).playback.recalculate();
|
|
380
|
-
await (renderer as any).playback.reset();
|
|
381
|
-
await (renderer as any).playback.seek(0);
|
|
382
|
-
|
|
383
|
-
// Track media assets for audio processing
|
|
384
|
-
const mediaAssets: AssetInfo[][] = [];
|
|
385
|
-
|
|
386
|
-
// Render frames
|
|
387
|
-
for (let frame = 0; frame < totalFrames; frame++) {
|
|
388
|
-
if (frame > 0) {
|
|
389
|
-
await (renderer as any).playback.progress();
|
|
390
|
-
}
|
|
391
|
-
|
|
392
|
-
await (renderer as any).stage.render(
|
|
393
|
-
(renderer as any).playback.currentScene,
|
|
394
|
-
(renderer as any).playback.previousScene,
|
|
395
|
-
);
|
|
396
|
-
|
|
397
|
-
// Collect media assets from current scene for audio
|
|
398
|
-
const currentAssets = (renderer as any).playback.currentScene.getMediaAssets?.() || [];
|
|
399
|
-
mediaAssets.push(currentAssets);
|
|
400
|
-
|
|
401
|
-
const canvas = (renderer as any).stage.finalBuffer;
|
|
402
|
-
await exporter.handleFrame(canvas, frame);
|
|
403
|
-
|
|
404
|
-
if (settings.onProgress) {
|
|
405
|
-
settings.onProgress(frame / totalFrames);
|
|
406
|
-
}
|
|
407
|
-
}
|
|
408
|
-
|
|
409
|
-
await exporter.stop();
|
|
410
|
-
|
|
411
|
-
// Generate audio if requested
|
|
412
|
-
let audioData: ArrayBuffer | null = null;
|
|
413
|
-
if (settings.includeAudio && mediaAssets.length > 0) {
|
|
414
|
-
console.log('🎵 Generating audio track...');
|
|
415
|
-
audioData = await exporter.generateAudio(mediaAssets, 0, totalFrames);
|
|
416
|
-
}
|
|
417
|
-
|
|
418
|
-
let finalBlob = exporter.getVideoBlob();
|
|
419
|
-
if (!finalBlob) {
|
|
420
|
-
throw new Error('Failed to create video blob');
|
|
421
|
-
}
|
|
422
|
-
|
|
423
|
-
// Handle audio if it was generated
|
|
424
|
-
if (audioData && settings.includeAudio) {
|
|
425
|
-
console.log('✅ Audio extracted and processed successfully');
|
|
426
|
-
console.log('📊 Audio data size:', (audioData.byteLength / 1024 / 1024).toFixed(2), 'MB');
|
|
427
|
-
|
|
428
|
-
// Option 1: Download audio separately (most reliable)
|
|
429
|
-
if ((settings as any).downloadAudioSeparately) {
|
|
430
|
-
const audioBlob = new Blob([audioData], { type: 'audio/wav' });
|
|
431
|
-
const audioUrl = URL.createObjectURL(audioBlob);
|
|
432
|
-
const a = document.createElement('a');
|
|
433
|
-
a.href = audioUrl;
|
|
434
|
-
a.download = 'audio.wav';
|
|
435
|
-
a.click();
|
|
436
|
-
URL.revokeObjectURL(audioUrl);
|
|
437
|
-
console.log('✅ Audio downloaded separately as audio.wav');
|
|
438
|
-
}
|
|
439
|
-
|
|
440
|
-
// Option 2: Return audio via callback
|
|
441
|
-
if ((settings as any).onAudioReady) {
|
|
442
|
-
const audioBlob = new Blob([audioData], { type: 'audio/wav' });
|
|
443
|
-
(settings as any).onAudioReady(audioBlob);
|
|
444
|
-
}
|
|
445
|
-
|
|
446
|
-
console.log('💡 Note: Client-side audio muxing is complex.');
|
|
447
|
-
console.log('💡 For full audio support, use server-side rendering: @twick/render-server');
|
|
448
|
-
console.log('💡 Or mux manually with: ffmpeg -i video.mp4 -i audio.wav -c:v copy -c:a aac output.mp4');
|
|
449
|
-
}
|
|
450
|
-
|
|
451
|
-
if (settings.onComplete) {
|
|
452
|
-
settings.onComplete(finalBlob);
|
|
453
|
-
}
|
|
454
|
-
|
|
455
|
-
return finalBlob;
|
|
456
|
-
} catch (error) {
|
|
457
|
-
if (config.settings?.onError) {
|
|
458
|
-
config.settings.onError(error as Error);
|
|
459
|
-
}
|
|
460
|
-
throw error;
|
|
461
|
-
} finally {
|
|
462
|
-
// Restore original methods
|
|
463
|
-
HTMLVideoElement.prototype.play = originalVideoPlay;
|
|
464
|
-
HTMLAudioElement.prototype.play = originalAudioPlay;
|
|
465
|
-
document.createElement = originalCreateElement as any;
|
|
466
|
-
}
|
|
467
|
-
};
|
|
468
|
-
|
|
469
|
-
/**
|
|
470
|
-
* Helper function to download a video blob as a file
|
|
471
|
-
*
|
|
472
|
-
* @param videoBlob - The video blob to download
|
|
473
|
-
* @param filename - The desired filename (default: 'video.mp4')
|
|
474
|
-
*
|
|
475
|
-
* @example
|
|
476
|
-
* ```js
|
|
477
|
-
* const blob = await renderTwickVideoInBrowser(projectData);
|
|
478
|
-
* downloadVideoBlob(blob, 'my-video.mp4');
|
|
479
|
-
* ```
|
|
480
|
-
*/
|
|
481
|
-
export const downloadVideoBlob = (videoBlob: Blob, filename: string = 'video.mp4'): void => {
|
|
482
|
-
const url = URL.createObjectURL(videoBlob);
|
|
483
|
-
const a = document.createElement('a');
|
|
484
|
-
a.href = url;
|
|
485
|
-
a.download = filename;
|
|
486
|
-
a.style.display = 'none';
|
|
487
|
-
document.body.appendChild(a);
|
|
488
|
-
a.click();
|
|
489
|
-
document.body.removeChild(a);
|
|
490
|
-
|
|
491
|
-
// Clean up the object URL after a delay
|
|
492
|
-
setTimeout(() => URL.revokeObjectURL(url), 1000);
|
|
493
|
-
};
|
|
494
|
-
|
|
495
|
-
export default renderTwickVideoInBrowser;
|
|
@@ -1,218 +0,0 @@
|
|
|
1
|
-
import { useState, useCallback } from 'react';
|
|
2
|
-
import { renderTwickVideoInBrowser, downloadVideoBlob } from '../browser-renderer';
|
|
3
|
-
import type { BrowserRenderConfig } from '../browser-renderer';
|
|
4
|
-
|
|
5
|
-
export interface UseBrowserRendererOptions {
|
|
6
|
-
/**
|
|
7
|
-
* Custom Project object
|
|
8
|
-
* If not provided, defaults to @twick/visualizer project
|
|
9
|
-
*
|
|
10
|
-
* Note: Must be an imported Project object, not a string path.
|
|
11
|
-
* String paths only work in Node.js (server renderer).
|
|
12
|
-
*
|
|
13
|
-
* Example:
|
|
14
|
-
* ```typescript
|
|
15
|
-
* import myProject from './my-custom-project';
|
|
16
|
-
* useBrowserRenderer({ projectFile: myProject })
|
|
17
|
-
* ```
|
|
18
|
-
*/
|
|
19
|
-
projectFile?: any;
|
|
20
|
-
/** Video width in pixels */
|
|
21
|
-
width?: number;
|
|
22
|
-
/** Video height in pixels */
|
|
23
|
-
height?: number;
|
|
24
|
-
/** Frames per second */
|
|
25
|
-
fps?: number;
|
|
26
|
-
/** Render quality */
|
|
27
|
-
quality?: 'low' | 'medium' | 'high';
|
|
28
|
-
/** Time range to render [start, end] in seconds */
|
|
29
|
-
range?: [number, number];
|
|
30
|
-
/** Include audio in rendered video (experimental) */
|
|
31
|
-
includeAudio?: boolean;
|
|
32
|
-
/** Download audio separately as WAV file */
|
|
33
|
-
downloadAudioSeparately?: boolean;
|
|
34
|
-
/** Callback when audio is ready */
|
|
35
|
-
onAudioReady?: (audioBlob: Blob) => void;
|
|
36
|
-
/** Automatically download the video when rendering completes */
|
|
37
|
-
autoDownload?: boolean;
|
|
38
|
-
/** Default filename for downloads */
|
|
39
|
-
downloadFilename?: string;
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
export interface UseBrowserRendererReturn {
|
|
43
|
-
/** Start rendering the video */
|
|
44
|
-
render: (variables: BrowserRenderConfig['variables']) => Promise<Blob | null>;
|
|
45
|
-
/** Current rendering progress (0-1) */
|
|
46
|
-
progress: number;
|
|
47
|
-
/** Whether rendering is in progress */
|
|
48
|
-
isRendering: boolean;
|
|
49
|
-
/** Error if rendering failed */
|
|
50
|
-
error: Error | null;
|
|
51
|
-
/** The rendered video blob (available after rendering completes) */
|
|
52
|
-
videoBlob: Blob | null;
|
|
53
|
-
/** Download the rendered video */
|
|
54
|
-
download: (filename?: string) => void;
|
|
55
|
-
/** Reset the renderer state */
|
|
56
|
-
reset: () => void;
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
/**
|
|
60
|
-
* React hook for rendering Twick videos in the browser
|
|
61
|
-
*
|
|
62
|
-
* Uses the same pattern as the server renderer for consistency.
|
|
63
|
-
*
|
|
64
|
-
* @param options - Rendering options
|
|
65
|
-
* @returns Renderer state and control functions
|
|
66
|
-
*
|
|
67
|
-
* @example
|
|
68
|
-
* ```tsx
|
|
69
|
-
* import { useBrowserRenderer } from '@twick/browser-render';
|
|
70
|
-
*
|
|
71
|
-
* // Using default visualizer project
|
|
72
|
-
* function MyComponent() {
|
|
73
|
-
* const { render, progress, isRendering, videoBlob, download } = useBrowserRenderer({
|
|
74
|
-
* width: 1920,
|
|
75
|
-
* height: 1080,
|
|
76
|
-
* fps: 30,
|
|
77
|
-
* autoDownload: true,
|
|
78
|
-
* });
|
|
79
|
-
*
|
|
80
|
-
* const handleRender = async () => {
|
|
81
|
-
* await render({
|
|
82
|
-
* input: {
|
|
83
|
-
* properties: { width: 1920, height: 1080, fps: 30 },
|
|
84
|
-
* tracks: [
|
|
85
|
-
* // Your tracks configuration
|
|
86
|
-
* ]
|
|
87
|
-
* }
|
|
88
|
-
* });
|
|
89
|
-
* };
|
|
90
|
-
*
|
|
91
|
-
* return (
|
|
92
|
-
* <div>
|
|
93
|
-
* <button onClick={handleRender} disabled={isRendering}>
|
|
94
|
-
* {isRendering ? `Rendering... ${(progress * 100).toFixed(0)}%` : 'Render Video'}
|
|
95
|
-
* </button>
|
|
96
|
-
* {videoBlob && !autoDownload && (
|
|
97
|
-
* <button onClick={() => download('my-video.mp4')}>Download</button>
|
|
98
|
-
* )}
|
|
99
|
-
* </div>
|
|
100
|
-
* );
|
|
101
|
-
* }
|
|
102
|
-
*
|
|
103
|
-
* // Using custom project (must import it first)
|
|
104
|
-
* import myProject from './my-project';
|
|
105
|
-
*
|
|
106
|
-
* function CustomProjectComponent() {
|
|
107
|
-
* const { render } = useBrowserRenderer({
|
|
108
|
-
* projectFile: myProject, // Pass the imported project object
|
|
109
|
-
* width: 1920,
|
|
110
|
-
* height: 1080,
|
|
111
|
-
* });
|
|
112
|
-
*
|
|
113
|
-
* // ... rest of component
|
|
114
|
-
* }
|
|
115
|
-
* ```
|
|
116
|
-
*/
|
|
117
|
-
export const useBrowserRenderer = (options: UseBrowserRendererOptions = {}): UseBrowserRendererReturn => {
|
|
118
|
-
const [progress, setProgress] = useState(0);
|
|
119
|
-
const [isRendering, setIsRendering] = useState(false);
|
|
120
|
-
const [error, setError] = useState<Error | null>(null);
|
|
121
|
-
const [videoBlob, setVideoBlob] = useState<Blob | null>(null);
|
|
122
|
-
|
|
123
|
-
const reset = useCallback(() => {
|
|
124
|
-
setProgress(0);
|
|
125
|
-
setIsRendering(false);
|
|
126
|
-
setError(null);
|
|
127
|
-
setVideoBlob(null);
|
|
128
|
-
}, []);
|
|
129
|
-
|
|
130
|
-
const download = useCallback((filename?: string) => {
|
|
131
|
-
if (!videoBlob) {
|
|
132
|
-
const downloadError = new Error('No video available to download. Please render the video first.');
|
|
133
|
-
setError(downloadError);
|
|
134
|
-
console.error(downloadError.message);
|
|
135
|
-
return;
|
|
136
|
-
}
|
|
137
|
-
|
|
138
|
-
try {
|
|
139
|
-
downloadVideoBlob(videoBlob, filename || options.downloadFilename || 'video.mp4');
|
|
140
|
-
} catch (err) {
|
|
141
|
-
const downloadError = err instanceof Error ? err : new Error('Failed to download video');
|
|
142
|
-
setError(downloadError);
|
|
143
|
-
console.error('Download error:', downloadError);
|
|
144
|
-
}
|
|
145
|
-
}, [videoBlob, options.downloadFilename]);
|
|
146
|
-
|
|
147
|
-
const render = useCallback(async (variables: BrowserRenderConfig['variables']): Promise<Blob | null> => {
|
|
148
|
-
reset();
|
|
149
|
-
setIsRendering(true);
|
|
150
|
-
|
|
151
|
-
try {
|
|
152
|
-
const { projectFile, width, height, fps, quality, range, includeAudio, downloadAudioSeparately, onAudioReady, autoDownload, downloadFilename, ...restOptions } = options;
|
|
153
|
-
|
|
154
|
-
const blob = await renderTwickVideoInBrowser({
|
|
155
|
-
projectFile,
|
|
156
|
-
variables,
|
|
157
|
-
settings: {
|
|
158
|
-
width,
|
|
159
|
-
height,
|
|
160
|
-
includeAudio,
|
|
161
|
-
downloadAudioSeparately,
|
|
162
|
-
onAudioReady,
|
|
163
|
-
fps,
|
|
164
|
-
quality,
|
|
165
|
-
range,
|
|
166
|
-
...restOptions,
|
|
167
|
-
onProgress: (p) => {
|
|
168
|
-
setProgress(p);
|
|
169
|
-
},
|
|
170
|
-
onComplete: (blob) => {
|
|
171
|
-
setVideoBlob(blob);
|
|
172
|
-
if (autoDownload) {
|
|
173
|
-
try {
|
|
174
|
-
downloadVideoBlob(blob, downloadFilename || 'video.mp4');
|
|
175
|
-
} catch (downloadErr) {
|
|
176
|
-
const error = downloadErr instanceof Error
|
|
177
|
-
? downloadErr
|
|
178
|
-
: new Error('Failed to auto-download video');
|
|
179
|
-
setError(error);
|
|
180
|
-
console.error('Auto-download error:', error);
|
|
181
|
-
}
|
|
182
|
-
}
|
|
183
|
-
},
|
|
184
|
-
onError: (err) => {
|
|
185
|
-
setError(err);
|
|
186
|
-
},
|
|
187
|
-
},
|
|
188
|
-
});
|
|
189
|
-
|
|
190
|
-
if (!blob) {
|
|
191
|
-
throw new Error('Rendering failed: No video blob was generated');
|
|
192
|
-
}
|
|
193
|
-
|
|
194
|
-
setVideoBlob(blob);
|
|
195
|
-
setProgress(1);
|
|
196
|
-
return blob;
|
|
197
|
-
} catch (err) {
|
|
198
|
-
const error = err instanceof Error ? err : new Error(String(err));
|
|
199
|
-
setError(error);
|
|
200
|
-
console.error('Render error:', error);
|
|
201
|
-
return null;
|
|
202
|
-
} finally {
|
|
203
|
-
setIsRendering(false);
|
|
204
|
-
}
|
|
205
|
-
}, [options, reset]);
|
|
206
|
-
|
|
207
|
-
return {
|
|
208
|
-
render,
|
|
209
|
-
progress,
|
|
210
|
-
isRendering,
|
|
211
|
-
error,
|
|
212
|
-
videoBlob,
|
|
213
|
-
download,
|
|
214
|
-
reset,
|
|
215
|
-
};
|
|
216
|
-
};
|
|
217
|
-
|
|
218
|
-
export default useBrowserRenderer;
|