@twick/browser-render 0.15.6 → 0.15.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs ADDED
@@ -0,0 +1,708 @@
1
+ // src/browser-renderer.ts
2
+ import { Renderer, Vector2 } from "@twick/core";
3
+ import defaultProject from "@twick/visualizer/dist/project.js";
4
+
5
+ // src/audio/video-audio-extractor.ts
6
+ var VideoElementAudioExtractor = class {
7
+ audioContext;
8
+ video;
9
+ destination = null;
10
+ mediaRecorder = null;
11
+ audioChunks = [];
12
+ constructor(videoSrc, sampleRate = 48e3) {
13
+ this.audioContext = new AudioContext({ sampleRate });
14
+ this.video = document.createElement("video");
15
+ this.video.crossOrigin = "anonymous";
16
+ this.video.src = videoSrc;
17
+ this.video.muted = true;
18
+ }
19
+ async initialize() {
20
+ return new Promise((resolve, reject) => {
21
+ this.video.addEventListener("loadedmetadata", () => resolve(), { once: true });
22
+ this.video.addEventListener("error", (e) => {
23
+ reject(new Error(`Failed to load video for audio extraction: ${e}`));
24
+ }, { once: true });
25
+ });
26
+ }
27
+ /**
28
+ * Extract audio by playing the video and capturing audio output
29
+ */
30
+ async extractAudio(startTime, duration, playbackRate = 1) {
31
+ const source = this.audioContext.createMediaElementSource(this.video);
32
+ this.destination = this.audioContext.createMediaStreamDestination();
33
+ source.connect(this.destination);
34
+ this.audioChunks = [];
35
+ this.mediaRecorder = new MediaRecorder(this.destination.stream, {
36
+ mimeType: "audio/webm"
37
+ });
38
+ this.mediaRecorder.ondataavailable = (event) => {
39
+ if (event.data.size > 0) {
40
+ this.audioChunks.push(event.data);
41
+ }
42
+ };
43
+ this.video.currentTime = startTime;
44
+ this.video.playbackRate = playbackRate;
45
+ await new Promise((resolve) => {
46
+ this.video.addEventListener("seeked", () => resolve(), { once: true });
47
+ });
48
+ return new Promise((resolve, reject) => {
49
+ const recordingTimeout = setTimeout(() => {
50
+ reject(new Error("Audio extraction timeout"));
51
+ }, (duration / playbackRate + 5) * 1e3);
52
+ this.mediaRecorder.start();
53
+ this.video.play();
54
+ setTimeout(async () => {
55
+ clearTimeout(recordingTimeout);
56
+ this.video.pause();
57
+ this.mediaRecorder.stop();
58
+ await new Promise((res) => {
59
+ this.mediaRecorder.addEventListener("stop", () => res(), { once: true });
60
+ });
61
+ try {
62
+ const audioBlob = new Blob(this.audioChunks, { type: "audio/webm" });
63
+ const arrayBuffer = await audioBlob.arrayBuffer();
64
+ const audioBuffer = await this.audioContext.decodeAudioData(arrayBuffer);
65
+ resolve(audioBuffer);
66
+ } catch (err) {
67
+ reject(new Error(`Failed to decode recorded audio: ${err}`));
68
+ }
69
+ }, duration / playbackRate * 1e3);
70
+ });
71
+ }
72
+ async close() {
73
+ if (this.mediaRecorder && this.mediaRecorder.state !== "inactive") {
74
+ this.mediaRecorder.stop();
75
+ }
76
+ this.video.pause();
77
+ this.video.src = "";
78
+ if (this.audioContext.state !== "closed") {
79
+ await this.audioContext.close();
80
+ }
81
+ }
82
+ };
83
+ async function extractAudioFromVideo(videoSrc, startTime, duration, playbackRate = 1, sampleRate = 48e3) {
84
+ const extractor = new VideoElementAudioExtractor(videoSrc, sampleRate);
85
+ try {
86
+ await extractor.initialize();
87
+ const audioBuffer = await extractor.extractAudio(startTime, duration, playbackRate);
88
+ return audioBuffer;
89
+ } finally {
90
+ await extractor.close();
91
+ }
92
+ }
93
+
94
+ // src/audio/audio-processor.ts
95
+ function getAssetPlacement(frames) {
96
+ const assets = [];
97
+ const assetTimeMap = /* @__PURE__ */ new Map();
98
+ for (let frame = 0; frame < frames.length; frame++) {
99
+ for (const asset of frames[frame]) {
100
+ if (!assetTimeMap.has(asset.key)) {
101
+ assetTimeMap.set(asset.key, {
102
+ start: asset.currentTime,
103
+ end: asset.currentTime
104
+ });
105
+ assets.push({
106
+ key: asset.key,
107
+ src: asset.src,
108
+ type: asset.type,
109
+ startInVideo: frame,
110
+ endInVideo: frame,
111
+ duration: 0,
112
+ durationInSeconds: 0,
113
+ playbackRate: asset.playbackRate,
114
+ volume: asset.volume,
115
+ trimLeftInSeconds: asset.currentTime
116
+ });
117
+ } else {
118
+ const timeInfo = assetTimeMap.get(asset.key);
119
+ if (timeInfo) {
120
+ timeInfo.end = asset.currentTime;
121
+ }
122
+ const existingAsset = assets.find((a) => a.key === asset.key);
123
+ if (existingAsset) {
124
+ existingAsset.endInVideo = frame;
125
+ }
126
+ }
127
+ }
128
+ }
129
+ assets.forEach((asset) => {
130
+ const timeInfo = assetTimeMap.get(asset.key);
131
+ if (timeInfo) {
132
+ asset.durationInSeconds = (timeInfo.end - timeInfo.start) / asset.playbackRate;
133
+ }
134
+ asset.duration = asset.endInVideo - asset.startInVideo + 1;
135
+ });
136
+ return assets;
137
+ }
138
+ var BrowserAudioProcessor = class {
139
+ constructor(sampleRate = 48e3) {
140
+ this.sampleRate = sampleRate;
141
+ this.audioContext = new AudioContext({ sampleRate });
142
+ }
143
+ audioContext;
144
+ /**
145
+ * Fetch and decode audio from a media source
146
+ * Falls back to video element extraction if decodeAudioData fails
147
+ */
148
+ async fetchAndDecodeAudio(src) {
149
+ try {
150
+ const response = await fetch(src);
151
+ const arrayBuffer = await response.arrayBuffer();
152
+ return await this.audioContext.decodeAudioData(arrayBuffer);
153
+ } catch (err) {
154
+ try {
155
+ return await extractAudioFromVideo(
156
+ src,
157
+ 0,
158
+ 999999,
159
+ 1,
160
+ this.sampleRate
161
+ );
162
+ } catch (fallbackErr) {
163
+ throw new Error(`Failed to extract audio: ${err}. Fallback also failed: ${fallbackErr}`);
164
+ }
165
+ }
166
+ }
167
+ /**
168
+ * Process audio asset with playback rate, volume, and timing
169
+ */
170
+ async processAudioAsset(asset, fps, totalFrames) {
171
+ const audioBuffer = await this.fetchAndDecodeAudio(asset.src);
172
+ const duration = totalFrames / fps;
173
+ const outputLength = Math.ceil(duration * this.sampleRate);
174
+ const outputBuffer = this.audioContext.createBuffer(
175
+ 2,
176
+ // stereo
177
+ outputLength,
178
+ this.sampleRate
179
+ );
180
+ const startTime = asset.startInVideo / fps;
181
+ const trimLeft = asset.trimLeftInSeconds / asset.playbackRate;
182
+ const trimRight = trimLeft + asset.durationInSeconds;
183
+ for (let channel = 0; channel < 2; channel++) {
184
+ const inputData = audioBuffer.getChannelData(Math.min(channel, audioBuffer.numberOfChannels - 1));
185
+ const outputData = outputBuffer.getChannelData(channel);
186
+ const startSample = Math.floor(startTime * this.sampleRate);
187
+ const trimLeftSample = Math.floor(trimLeft * this.sampleRate);
188
+ const trimRightSample = Math.floor(trimRight * this.sampleRate);
189
+ for (let i = 0; i < outputData.length; i++) {
190
+ const outputTime = i / this.sampleRate;
191
+ const assetTime = outputTime - startTime;
192
+ if (assetTime < 0 || assetTime >= asset.durationInSeconds) {
193
+ outputData[i] = 0;
194
+ } else {
195
+ const inputSample = Math.floor(trimLeftSample + assetTime * asset.playbackRate * this.sampleRate);
196
+ if (inputSample >= 0 && inputSample < inputData.length) {
197
+ outputData[i] = inputData[inputSample] * asset.volume;
198
+ } else {
199
+ outputData[i] = 0;
200
+ }
201
+ }
202
+ }
203
+ }
204
+ return outputBuffer;
205
+ }
206
+ /**
207
+ * Mix multiple audio buffers
208
+ */
209
+ mixAudioBuffers(buffers) {
210
+ if (buffers.length === 0) {
211
+ return this.audioContext.createBuffer(2, 1, this.sampleRate);
212
+ }
213
+ const maxLength = Math.max(...buffers.map((b) => b.length));
214
+ const mixedBuffer = this.audioContext.createBuffer(2, maxLength, this.sampleRate);
215
+ for (let channel = 0; channel < 2; channel++) {
216
+ const mixedData = mixedBuffer.getChannelData(channel);
217
+ buffers.forEach((buffer) => {
218
+ const channelData = buffer.getChannelData(Math.min(channel, buffer.numberOfChannels - 1));
219
+ for (let i = 0; i < channelData.length; i++) {
220
+ mixedData[i] = (mixedData[i] || 0) + channelData[i] / buffers.length;
221
+ }
222
+ });
223
+ }
224
+ return mixedBuffer;
225
+ }
226
+ /**
227
+ * Convert AudioBuffer to WAV format
228
+ */
229
+ audioBufferToWav(buffer) {
230
+ const numberOfChannels = buffer.numberOfChannels;
231
+ const sampleRate = buffer.sampleRate;
232
+ const format = 1;
233
+ const bitDepth = 16;
234
+ const bytesPerSample = bitDepth / 8;
235
+ const blockAlign = numberOfChannels * bytesPerSample;
236
+ const data = new Float32Array(buffer.length * numberOfChannels);
237
+ for (let channel = 0; channel < numberOfChannels; channel++) {
238
+ const channelData = buffer.getChannelData(channel);
239
+ for (let i = 0; i < buffer.length; i++) {
240
+ data[i * numberOfChannels + channel] = channelData[i];
241
+ }
242
+ }
243
+ const dataLength = data.length * bytesPerSample;
244
+ const headerLength = 44;
245
+ const wav = new ArrayBuffer(headerLength + dataLength);
246
+ const view = new DataView(wav);
247
+ const writeString = (offset2, string) => {
248
+ for (let i = 0; i < string.length; i++) {
249
+ view.setUint8(offset2 + i, string.charCodeAt(i));
250
+ }
251
+ };
252
+ writeString(0, "RIFF");
253
+ view.setUint32(4, 36 + dataLength, true);
254
+ writeString(8, "WAVE");
255
+ writeString(12, "fmt ");
256
+ view.setUint32(16, 16, true);
257
+ view.setUint16(20, format, true);
258
+ view.setUint16(22, numberOfChannels, true);
259
+ view.setUint32(24, sampleRate, true);
260
+ view.setUint32(28, sampleRate * blockAlign, true);
261
+ view.setUint16(32, blockAlign, true);
262
+ view.setUint16(34, bitDepth, true);
263
+ writeString(36, "data");
264
+ view.setUint32(40, dataLength, true);
265
+ const volume = 0.8;
266
+ let offset = 44;
267
+ for (let i = 0; i < data.length; i++) {
268
+ const sample = Math.max(-1, Math.min(1, data[i]));
269
+ view.setInt16(offset, sample < 0 ? sample * 32768 : sample * 32767, true);
270
+ offset += 2;
271
+ }
272
+ return wav;
273
+ }
274
+ async close() {
275
+ await this.audioContext.close();
276
+ }
277
+ };
278
+
279
+ // src/audio/audio-video-muxer.ts
280
+ function getFFmpegBaseURL() {
281
+ if (typeof window !== "undefined") {
282
+ return `${window.location.origin}/ffmpeg`;
283
+ }
284
+ return "/ffmpeg";
285
+ }
286
+ async function muxAudioVideo(options) {
287
+ try {
288
+ const { FFmpeg } = await import("@ffmpeg/ffmpeg");
289
+ const { fetchFile } = await import("@ffmpeg/util");
290
+ const ffmpeg = new FFmpeg();
291
+ const base = getFFmpegBaseURL();
292
+ const coreURL = `${base}/ffmpeg-core.js`;
293
+ const wasmURL = `${base}/ffmpeg-core.wasm`;
294
+ await ffmpeg.load({
295
+ coreURL,
296
+ wasmURL
297
+ });
298
+ await ffmpeg.writeFile(
299
+ "video.mp4",
300
+ await fetchFile(options.videoBlob)
301
+ );
302
+ await ffmpeg.writeFile(
303
+ "audio.wav",
304
+ new Uint8Array(options.audioBuffer)
305
+ );
306
+ await ffmpeg.exec([
307
+ "-i",
308
+ "video.mp4",
309
+ "-i",
310
+ "audio.wav",
311
+ "-c:v",
312
+ "copy",
313
+ "-c:a",
314
+ "aac",
315
+ "-b:a",
316
+ "192k",
317
+ "-shortest",
318
+ "output.mp4"
319
+ ]);
320
+ const data = await ffmpeg.readFile("output.mp4");
321
+ const uint8 = typeof data === "string" ? new TextEncoder().encode(data) : new Uint8Array(data);
322
+ return new Blob([uint8], { type: "video/mp4" });
323
+ } catch {
324
+ return options.videoBlob;
325
+ }
326
+ }
327
+
328
+ // src/browser-renderer.ts
329
+ var BrowserWasmExporter = class _BrowserWasmExporter {
330
+ constructor(settings) {
331
+ this.settings = settings;
332
+ this.fps = settings.fps || 30;
333
+ }
334
+ static id = "@twick/core/wasm";
335
+ static displayName = "Browser Video (Wasm)";
336
+ encoder;
337
+ videoBlob = null;
338
+ onProgressCallback;
339
+ currentFrame = 0;
340
+ fps = 30;
341
+ static async create(settings) {
342
+ return new _BrowserWasmExporter(settings);
343
+ }
344
+ async start() {
345
+ try {
346
+ const loadMp4Module = (await import("mp4-wasm")).default;
347
+ const possiblePaths = [
348
+ // Vite dev server virtual path
349
+ "/@mp4-wasm",
350
+ // Common bundled asset paths (Vite uses hashed names)
351
+ "/assets/mp4-wasm.wasm",
352
+ "/assets/mp4-YBRi_559.wasm",
353
+ // Known Vite hash
354
+ "/mp4-wasm.wasm",
355
+ // Node modules path (for dev)
356
+ "/node_modules/mp4-wasm/dist/mp4-wasm.wasm"
357
+ ];
358
+ let buffer = null;
359
+ let successPath = "";
360
+ for (const path of possiblePaths) {
361
+ try {
362
+ const resp = await fetch(path);
363
+ if (resp.ok) {
364
+ const contentType = resp.headers.get("content-type");
365
+ if (contentType && contentType.includes("html")) {
366
+ continue;
367
+ }
368
+ buffer = await resp.arrayBuffer();
369
+ successPath = path;
370
+ break;
371
+ }
372
+ } catch (e) {
373
+ continue;
374
+ }
375
+ }
376
+ if (!buffer) {
377
+ throw new Error(
378
+ "Could not load WASM file from any location. Please copy mp4-wasm.wasm to your public directory or configure Vite to serve it."
379
+ );
380
+ }
381
+ const mp4 = await loadMp4Module({ wasmBinary: buffer });
382
+ this.encoder = mp4.createWebCodecsEncoder({
383
+ width: this.settings.size.x,
384
+ height: this.settings.size.y,
385
+ fps: this.fps
386
+ });
387
+ } catch (error) {
388
+ throw error;
389
+ }
390
+ }
391
+ async handleFrame(canvas, frameNumber) {
392
+ const frameIndex = frameNumber !== void 0 ? frameNumber : this.currentFrame;
393
+ const timestampMicroseconds = Math.round(frameIndex / this.fps * 1e6);
394
+ const frame = new VideoFrame(canvas, {
395
+ timestamp: timestampMicroseconds,
396
+ duration: Math.round(1 / this.fps * 1e6)
397
+ });
398
+ await this.encoder.addFrame(frame);
399
+ frame.close();
400
+ if (frameNumber === void 0) {
401
+ this.currentFrame++;
402
+ }
403
+ }
404
+ async stop() {
405
+ const buf = await this.encoder.end();
406
+ this.videoBlob = new Blob([buf], { type: "video/mp4" });
407
+ }
408
+ async generateAudio(assets, startFrame, endFrame) {
409
+ try {
410
+ const processor = new BrowserAudioProcessor();
411
+ const assetPlacements = getAssetPlacement(assets);
412
+ if (assetPlacements.length === 0) {
413
+ return null;
414
+ }
415
+ const processedBuffers = [];
416
+ for (const asset of assetPlacements) {
417
+ if (asset.volume > 0 && asset.playbackRate > 0) {
418
+ try {
419
+ const buffer = await processor.processAudioAsset(
420
+ asset,
421
+ this.settings.fps || 30,
422
+ endFrame - startFrame
423
+ );
424
+ processedBuffers.push(buffer);
425
+ } catch {
426
+ }
427
+ }
428
+ }
429
+ if (processedBuffers.length === 0) {
430
+ return null;
431
+ }
432
+ const mixedBuffer = processor.mixAudioBuffers(processedBuffers);
433
+ const wavData = processor.audioBufferToWav(mixedBuffer);
434
+ await processor.close();
435
+ return wavData;
436
+ } catch {
437
+ return null;
438
+ }
439
+ }
440
+ async mergeMedia() {
441
+ }
442
+ async downloadVideos(assets) {
443
+ }
444
+ getVideoBlob() {
445
+ return this.videoBlob;
446
+ }
447
+ setProgressCallback(callback) {
448
+ this.onProgressCallback = callback;
449
+ }
450
+ };
451
+ var renderTwickVideoInBrowser = async (config) => {
452
+ const originalVideoPlay = HTMLVideoElement.prototype.play;
453
+ const originalAudioPlay = HTMLAudioElement.prototype.play;
454
+ const originalCreateElement = document.createElement.bind(document);
455
+ HTMLVideoElement.prototype.play = function() {
456
+ this.muted = true;
457
+ this.volume = 0;
458
+ return originalVideoPlay.call(this);
459
+ };
460
+ HTMLAudioElement.prototype.play = function() {
461
+ this.muted = true;
462
+ this.volume = 0;
463
+ return originalAudioPlay.call(this);
464
+ };
465
+ document.createElement = function(tagName, options) {
466
+ const element = originalCreateElement(tagName, options);
467
+ if (tagName.toLowerCase() === "video" || tagName.toLowerCase() === "audio") {
468
+ element.muted = true;
469
+ element.volume = 0;
470
+ }
471
+ return element;
472
+ };
473
+ try {
474
+ const { projectFile, variables, settings = {} } = config;
475
+ if (!variables || !variables.input) {
476
+ throw new Error('Invalid configuration. "variables.input" is required.');
477
+ }
478
+ const width = settings.width || variables.input.properties?.width || 1920;
479
+ const height = settings.height || variables.input.properties?.height || 1080;
480
+ const fps = settings.fps || variables.input.properties?.fps || 30;
481
+ const project = !projectFile ? defaultProject : projectFile;
482
+ project.variables = variables;
483
+ const renderSettings = {
484
+ name: "browser-render",
485
+ exporter: {
486
+ name: "@twick/core/wasm"
487
+ },
488
+ size: new Vector2(width, height),
489
+ resolutionScale: 1,
490
+ colorSpace: "srgb",
491
+ fps,
492
+ range: settings.range || [0, Infinity],
493
+ background: variables.input.backgroundColor || "#000000",
494
+ ...settings.quality && {
495
+ quality: settings.quality
496
+ }
497
+ };
498
+ const renderer = new Renderer(project);
499
+ const exporter = await BrowserWasmExporter.create(renderSettings);
500
+ await exporter.start();
501
+ if (settings.onProgress) {
502
+ exporter.setProgressCallback(settings.onProgress);
503
+ }
504
+ await renderer["reloadScenes"](renderSettings);
505
+ renderer.stage.configure(renderSettings);
506
+ renderer.playback.fps = renderSettings.fps;
507
+ renderer.playback.state = 1;
508
+ const totalFrames = await renderer.getNumberOfFrames(renderSettings);
509
+ if (totalFrames === 0 || !isFinite(totalFrames)) {
510
+ throw new Error(
511
+ "Cannot render: Video has zero duration. Please ensure your project has valid content with non-zero duration. Check that all video elements have valid sources and are properly loaded."
512
+ );
513
+ }
514
+ const videoElements = [];
515
+ if (variables.input.tracks) {
516
+ variables.input.tracks.forEach((track) => {
517
+ if (track.elements) {
518
+ track.elements.forEach((el) => {
519
+ if (el.type === "video") videoElements.push(el);
520
+ });
521
+ }
522
+ });
523
+ }
524
+ if (videoElements.length > 0) {
525
+ for (const videoEl of videoElements) {
526
+ const src = videoEl.props?.src;
527
+ if (!src || src === "undefined") continue;
528
+ const preloadVideo = document.createElement("video");
529
+ preloadVideo.crossOrigin = "anonymous";
530
+ preloadVideo.preload = "metadata";
531
+ preloadVideo.src = src;
532
+ await new Promise((resolve, reject) => {
533
+ const timeout = setTimeout(
534
+ () => reject(new Error(`Timeout loading video metadata: ${src.substring(0, 80)}`)),
535
+ 3e4
536
+ );
537
+ preloadVideo.addEventListener("loadedmetadata", () => {
538
+ clearTimeout(timeout);
539
+ resolve();
540
+ }, { once: true });
541
+ preloadVideo.addEventListener("error", () => {
542
+ clearTimeout(timeout);
543
+ const err = preloadVideo.error;
544
+ reject(new Error(`Failed to load video: ${err?.message || "Unknown error"}`));
545
+ }, { once: true });
546
+ });
547
+ }
548
+ }
549
+ await renderer.playback.recalculate();
550
+ await renderer.playback.reset();
551
+ await renderer.playback.seek(0);
552
+ const mediaAssets = [];
553
+ for (let frame = 0; frame < totalFrames; frame++) {
554
+ if (frame > 0) {
555
+ await renderer.playback.progress();
556
+ }
557
+ await renderer.stage.render(
558
+ renderer.playback.currentScene,
559
+ renderer.playback.previousScene
560
+ );
561
+ const currentAssets = renderer.playback.currentScene.getMediaAssets?.() || [];
562
+ mediaAssets.push(currentAssets);
563
+ const canvas = renderer.stage.finalBuffer;
564
+ await exporter.handleFrame(canvas, frame);
565
+ if (settings.onProgress) settings.onProgress(frame / totalFrames);
566
+ }
567
+ await exporter.stop();
568
+ let audioData = null;
569
+ if (settings.includeAudio && mediaAssets.length > 0) {
570
+ audioData = await exporter.generateAudio(mediaAssets, 0, totalFrames);
571
+ }
572
+ let finalBlob = exporter.getVideoBlob();
573
+ if (!finalBlob) {
574
+ throw new Error("Failed to create video blob");
575
+ }
576
+ if (audioData && settings.includeAudio) {
577
+ try {
578
+ finalBlob = await muxAudioVideo({
579
+ videoBlob: finalBlob,
580
+ audioBuffer: audioData
581
+ });
582
+ } catch {
583
+ const audioBlob = new Blob([audioData], { type: "audio/wav" });
584
+ const audioUrl = URL.createObjectURL(audioBlob);
585
+ const a = document.createElement("a");
586
+ a.href = audioUrl;
587
+ a.download = "audio.wav";
588
+ a.click();
589
+ URL.revokeObjectURL(audioUrl);
590
+ }
591
+ }
592
+ if (settings.onComplete) {
593
+ settings.onComplete(finalBlob);
594
+ }
595
+ return finalBlob;
596
+ } catch (error) {
597
+ if (config.settings?.onError) {
598
+ config.settings.onError(error);
599
+ }
600
+ throw error;
601
+ } finally {
602
+ HTMLVideoElement.prototype.play = originalVideoPlay;
603
+ HTMLAudioElement.prototype.play = originalAudioPlay;
604
+ document.createElement = originalCreateElement;
605
+ }
606
+ };
607
+ var downloadVideoBlob = (videoBlob, filename = "video.mp4") => {
608
+ const url = URL.createObjectURL(videoBlob);
609
+ const a = document.createElement("a");
610
+ a.href = url;
611
+ a.download = filename;
612
+ a.style.display = "none";
613
+ document.body.appendChild(a);
614
+ a.click();
615
+ document.body.removeChild(a);
616
+ setTimeout(() => URL.revokeObjectURL(url), 1e3);
617
+ };
618
+
619
+ // src/hooks/use-browser-renderer.ts
620
+ import { useState, useCallback } from "react";
621
+ var useBrowserRenderer = (options = {}) => {
622
+ const [progress, setProgress] = useState(0);
623
+ const [isRendering, setIsRendering] = useState(false);
624
+ const [error, setError] = useState(null);
625
+ const [videoBlob, setVideoBlob] = useState(null);
626
+ const reset = useCallback(() => {
627
+ setProgress(0);
628
+ setIsRendering(false);
629
+ setError(null);
630
+ setVideoBlob(null);
631
+ }, []);
632
+ const download = useCallback((filename) => {
633
+ if (!videoBlob) {
634
+ setError(new Error("No video available to download. Please render the video first."));
635
+ return;
636
+ }
637
+ try {
638
+ downloadVideoBlob(videoBlob, filename || options.downloadFilename || "video.mp4");
639
+ } catch (err) {
640
+ setError(err instanceof Error ? err : new Error("Failed to download video"));
641
+ }
642
+ }, [videoBlob, options.downloadFilename]);
643
+ const render = useCallback(async (variables) => {
644
+ reset();
645
+ setIsRendering(true);
646
+ try {
647
+ const { projectFile, width, height, fps, quality, range, includeAudio, downloadAudioSeparately, onAudioReady, autoDownload, downloadFilename, ...restOptions } = options;
648
+ const blob = await renderTwickVideoInBrowser({
649
+ projectFile,
650
+ variables,
651
+ settings: {
652
+ width,
653
+ height,
654
+ includeAudio,
655
+ downloadAudioSeparately,
656
+ onAudioReady,
657
+ fps,
658
+ quality,
659
+ range,
660
+ ...restOptions,
661
+ onProgress: (p) => {
662
+ setProgress(p);
663
+ },
664
+ onComplete: (blob2) => {
665
+ setVideoBlob(blob2);
666
+ if (autoDownload) {
667
+ try {
668
+ downloadVideoBlob(blob2, downloadFilename || "video.mp4");
669
+ } catch (downloadErr) {
670
+ setError(downloadErr instanceof Error ? downloadErr : new Error("Failed to auto-download video"));
671
+ }
672
+ }
673
+ },
674
+ onError: (err) => {
675
+ setError(err);
676
+ }
677
+ }
678
+ });
679
+ if (!blob) {
680
+ throw new Error("Rendering failed: No video blob was generated");
681
+ }
682
+ setVideoBlob(blob);
683
+ setProgress(1);
684
+ return blob;
685
+ } catch (err) {
686
+ setError(err instanceof Error ? err : new Error(String(err)));
687
+ return null;
688
+ } finally {
689
+ setIsRendering(false);
690
+ }
691
+ }, [options, reset]);
692
+ return {
693
+ render,
694
+ progress,
695
+ isRendering,
696
+ error,
697
+ videoBlob,
698
+ download,
699
+ reset
700
+ };
701
+ };
702
+ export {
703
+ renderTwickVideoInBrowser as default,
704
+ downloadVideoBlob,
705
+ renderTwickVideoInBrowser,
706
+ useBrowserRenderer
707
+ };
708
+ //# sourceMappingURL=index.mjs.map