@twick/browser-render 0.15.7 → 0.15.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -46,19 +46,21 @@ a.click();
46
46
  import { useBrowserRenderer } from '@twick/browser-render';
47
47
 
48
48
  function VideoRenderer() {
49
- const { render, progress, isRendering, videoBlob, download } = useBrowserRenderer({
50
- width: 1920,
51
- height: 1080,
52
- fps: 30,
53
- autoDownload: false
54
- });
49
+ const { render, progress, isRendering, videoBlob, download, error, reset } =
50
+ useBrowserRenderer({
51
+ width: 720,
52
+ height: 1280,
53
+ fps: 30,
54
+ includeAudio: true, // enable audio rendering + FFmpeg mux
55
+ autoDownload: true, // auto-download final MP4
56
+ });
55
57
 
56
58
  const handleRender = async () => {
57
59
  await render({
58
60
  input: {
59
- properties: { width: 1920, height: 1080, fps: 30 },
60
- tracks: [/* ... */]
61
- }
61
+ properties: { width: 720, height: 1280, fps: 30 },
62
+ tracks: [/* ... */],
63
+ },
62
64
  });
63
65
  };
64
66
 
@@ -69,6 +71,12 @@ function VideoRenderer() {
69
71
  </button>
70
72
  {isRendering && <progress value={progress} max={1} />}
71
73
  {videoBlob && <button onClick={download}>Download</button>}
74
+ {error && (
75
+ <div>
76
+ <p>{error.message}</p>
77
+ <button onClick={reset}>Clear error</button>
78
+ </div>
79
+ )}
72
80
  </div>
73
81
  );
74
82
  }
@@ -92,6 +100,9 @@ function VideoRenderer() {
92
100
  fps?: number; // Frames per second (default: 30)
93
101
  quality?: 'low' | 'medium' | 'high';
94
102
  range?: [number, number]; // [start, end] in seconds
103
+ includeAudio?: boolean; // Enable audio rendering and muxing (default: false)
104
+ downloadAudioSeparately?: boolean; // Also download audio.wav when muxing fails
105
+ onAudioReady?: (audioBlob: Blob) => void; // Callback when raw audio is ready
95
106
  onProgress?: (progress: number) => void;
96
107
  onComplete?: (videoBlob: Blob) => void;
97
108
  onError?: (error: Error) => void;
@@ -130,9 +141,38 @@ export default defineConfig({
130
141
  });
131
142
  ```
132
143
 
144
+ ### FFmpeg audio/video muxing (optional)
145
+
146
+ When `settings.includeAudio` is enabled, `@twick/browser-render` will render audio in the browser and (by default) try to mux it into the final MP4 using `@ffmpeg/ffmpeg`.
147
+ To match the setup used in `@twick/examples`, you should:
148
+
149
+ 1. Install the FFmpeg packages:
150
+
151
+ ```bash
152
+ npm install @ffmpeg/ffmpeg @ffmpeg/util @ffmpeg/core
153
+ ```
154
+
155
+ 2. Expose the FFmpeg core files from your app’s `public` folder so they are available at:
156
+
157
+ ```text
158
+ /public/ffmpeg/ffmpeg-core.js
159
+ /public/ffmpeg/ffmpeg-core.wasm
160
+ ```
161
+
162
+ For example, you can copy them from `node_modules/@ffmpeg/core/dist`:
163
+
164
+ ```bash
165
+ mkdir -p public/ffmpeg
166
+ cp node_modules/@ffmpeg/core/dist/ffmpeg-core.js public/ffmpeg/
167
+ cp node_modules/@ffmpeg/core/dist/ffmpeg-core.wasm public/ffmpeg/
168
+ ```
169
+
170
+ In the browser, the muxer loads these files from `${window.location.origin}/ffmpeg`, so the URLs must match that structure.
171
+ If FFmpeg cannot be loaded, the renderer will fall back to returning a video-only file and (optionally) downloading `audio.wav` separately when `downloadAudioSeparately` is true.
172
+
133
173
  ## Limitations
134
174
 
135
- - **Audio**: Audio processing is not yet implemented. Only video encoding is supported.
175
+ - **Audio**: Audio rendering and FFmpeg-based muxing run entirely in the browser and are still considered experimental. If FFmpeg assets are not available, only video will be muxed and audio may be downloaded as a separate file.
136
176
  - **Browser Support**: Requires WebCodecs API (Chrome 94+, Edge 94+)
137
177
 
138
178
  ## License
package/dist/index.js CHANGED
@@ -41,6 +41,95 @@ module.exports = __toCommonJS(index_exports);
41
41
  var import_core = require("@twick/core");
42
42
  var import_project = __toESM(require("@twick/visualizer/dist/project.js"));
43
43
 
44
+ // src/audio/video-audio-extractor.ts
45
+ var VideoElementAudioExtractor = class {
46
+ audioContext;
47
+ video;
48
+ destination = null;
49
+ mediaRecorder = null;
50
+ audioChunks = [];
51
+ constructor(videoSrc, sampleRate = 48e3) {
52
+ this.audioContext = new AudioContext({ sampleRate });
53
+ this.video = document.createElement("video");
54
+ this.video.crossOrigin = "anonymous";
55
+ this.video.src = videoSrc;
56
+ this.video.muted = true;
57
+ }
58
+ async initialize() {
59
+ return new Promise((resolve, reject) => {
60
+ this.video.addEventListener("loadedmetadata", () => resolve(), { once: true });
61
+ this.video.addEventListener("error", (e) => {
62
+ reject(new Error(`Failed to load video for audio extraction: ${e}`));
63
+ }, { once: true });
64
+ });
65
+ }
66
+ /**
67
+ * Extract audio by playing the video and capturing audio output
68
+ */
69
+ async extractAudio(startTime, duration, playbackRate = 1) {
70
+ const source = this.audioContext.createMediaElementSource(this.video);
71
+ this.destination = this.audioContext.createMediaStreamDestination();
72
+ source.connect(this.destination);
73
+ this.audioChunks = [];
74
+ this.mediaRecorder = new MediaRecorder(this.destination.stream, {
75
+ mimeType: "audio/webm"
76
+ });
77
+ this.mediaRecorder.ondataavailable = (event) => {
78
+ if (event.data.size > 0) {
79
+ this.audioChunks.push(event.data);
80
+ }
81
+ };
82
+ this.video.currentTime = startTime;
83
+ this.video.playbackRate = playbackRate;
84
+ await new Promise((resolve) => {
85
+ this.video.addEventListener("seeked", () => resolve(), { once: true });
86
+ });
87
+ return new Promise((resolve, reject) => {
88
+ const recordingTimeout = setTimeout(() => {
89
+ reject(new Error("Audio extraction timeout"));
90
+ }, (duration / playbackRate + 5) * 1e3);
91
+ this.mediaRecorder.start();
92
+ this.video.play();
93
+ setTimeout(async () => {
94
+ clearTimeout(recordingTimeout);
95
+ this.video.pause();
96
+ this.mediaRecorder.stop();
97
+ await new Promise((res) => {
98
+ this.mediaRecorder.addEventListener("stop", () => res(), { once: true });
99
+ });
100
+ try {
101
+ const audioBlob = new Blob(this.audioChunks, { type: "audio/webm" });
102
+ const arrayBuffer = await audioBlob.arrayBuffer();
103
+ const audioBuffer = await this.audioContext.decodeAudioData(arrayBuffer);
104
+ resolve(audioBuffer);
105
+ } catch (err) {
106
+ reject(new Error(`Failed to decode recorded audio: ${err}`));
107
+ }
108
+ }, duration / playbackRate * 1e3);
109
+ });
110
+ }
111
+ async close() {
112
+ if (this.mediaRecorder && this.mediaRecorder.state !== "inactive") {
113
+ this.mediaRecorder.stop();
114
+ }
115
+ this.video.pause();
116
+ this.video.src = "";
117
+ if (this.audioContext.state !== "closed") {
118
+ await this.audioContext.close();
119
+ }
120
+ }
121
+ };
122
+ async function extractAudioFromVideo(videoSrc, startTime, duration, playbackRate = 1, sampleRate = 48e3) {
123
+ const extractor = new VideoElementAudioExtractor(videoSrc, sampleRate);
124
+ try {
125
+ await extractor.initialize();
126
+ const audioBuffer = await extractor.extractAudio(startTime, duration, playbackRate);
127
+ return audioBuffer;
128
+ } finally {
129
+ await extractor.close();
130
+ }
131
+ }
132
+
44
133
  // src/audio/audio-processor.ts
45
134
  function getAssetPlacement(frames) {
46
135
  const assets = [];
@@ -93,11 +182,26 @@ var BrowserAudioProcessor = class {
93
182
  audioContext;
94
183
  /**
95
184
  * Fetch and decode audio from a media source
185
+ * Falls back to video element extraction if decodeAudioData fails
96
186
  */
97
187
  async fetchAndDecodeAudio(src) {
98
- const response = await fetch(src);
99
- const arrayBuffer = await response.arrayBuffer();
100
- return await this.audioContext.decodeAudioData(arrayBuffer);
188
+ try {
189
+ const response = await fetch(src);
190
+ const arrayBuffer = await response.arrayBuffer();
191
+ return await this.audioContext.decodeAudioData(arrayBuffer);
192
+ } catch (err) {
193
+ try {
194
+ return await extractAudioFromVideo(
195
+ src,
196
+ 0,
197
+ 999999,
198
+ 1,
199
+ this.sampleRate
200
+ );
201
+ } catch (fallbackErr) {
202
+ throw new Error(`Failed to extract audio: ${err}. Fallback also failed: ${fallbackErr}`);
203
+ }
204
+ }
101
205
  }
102
206
  /**
103
207
  * Process audio asset with playback rate, volume, and timing
@@ -211,6 +315,55 @@ var BrowserAudioProcessor = class {
211
315
  }
212
316
  };
213
317
 
318
+ // src/audio/audio-video-muxer.ts
319
+ function getFFmpegBaseURL() {
320
+ if (typeof window !== "undefined") {
321
+ return `${window.location.origin}/ffmpeg`;
322
+ }
323
+ return "/ffmpeg";
324
+ }
325
+ async function muxAudioVideo(options) {
326
+ try {
327
+ const { FFmpeg } = await import("@ffmpeg/ffmpeg");
328
+ const { fetchFile } = await import("@ffmpeg/util");
329
+ const ffmpeg = new FFmpeg();
330
+ const base = getFFmpegBaseURL();
331
+ const coreURL = `${base}/ffmpeg-core.js`;
332
+ const wasmURL = `${base}/ffmpeg-core.wasm`;
333
+ await ffmpeg.load({
334
+ coreURL,
335
+ wasmURL
336
+ });
337
+ await ffmpeg.writeFile(
338
+ "video.mp4",
339
+ await fetchFile(options.videoBlob)
340
+ );
341
+ await ffmpeg.writeFile(
342
+ "audio.wav",
343
+ new Uint8Array(options.audioBuffer)
344
+ );
345
+ await ffmpeg.exec([
346
+ "-i",
347
+ "video.mp4",
348
+ "-i",
349
+ "audio.wav",
350
+ "-c:v",
351
+ "copy",
352
+ "-c:a",
353
+ "aac",
354
+ "-b:a",
355
+ "192k",
356
+ "-shortest",
357
+ "output.mp4"
358
+ ]);
359
+ const data = await ffmpeg.readFile("output.mp4");
360
+ const uint8 = typeof data === "string" ? new TextEncoder().encode(data) : new Uint8Array(data);
361
+ return new Blob([uint8], { type: "video/mp4" });
362
+ } catch {
363
+ return options.videoBlob;
364
+ }
365
+ }
366
+
214
367
  // src/browser-renderer.ts
215
368
  var BrowserWasmExporter = class _BrowserWasmExporter {
216
369
  constructor(settings) {
@@ -271,7 +424,6 @@ var BrowserWasmExporter = class _BrowserWasmExporter {
271
424
  fps: this.fps
272
425
  });
273
426
  } catch (error) {
274
- console.error("WASM loading error:", error);
275
427
  throw error;
276
428
  }
277
429
  }
@@ -294,42 +446,33 @@ var BrowserWasmExporter = class _BrowserWasmExporter {
294
446
  }
295
447
  async generateAudio(assets, startFrame, endFrame) {
296
448
  try {
297
- console.log("\u{1F50A} Starting audio processing...", {
298
- frames: assets.length,
299
- startFrame,
300
- endFrame
301
- });
302
449
  const processor = new BrowserAudioProcessor();
303
450
  const assetPlacements = getAssetPlacement(assets);
304
- console.log(`\u{1F4CA} Found ${assetPlacements.length} audio assets to process`);
305
451
  if (assetPlacements.length === 0) {
306
- console.log("\u26A0\uFE0F No audio assets found");
307
452
  return null;
308
453
  }
309
454
  const processedBuffers = [];
310
455
  for (const asset of assetPlacements) {
311
456
  if (asset.volume > 0 && asset.playbackRate > 0) {
312
- console.log(`\u{1F3B5} Processing audio: ${asset.key}`);
313
- const buffer = await processor.processAudioAsset(
314
- asset,
315
- this.settings.fps || 30,
316
- endFrame - startFrame
317
- );
318
- processedBuffers.push(buffer);
457
+ try {
458
+ const buffer = await processor.processAudioAsset(
459
+ asset,
460
+ this.settings.fps || 30,
461
+ endFrame - startFrame
462
+ );
463
+ processedBuffers.push(buffer);
464
+ } catch {
465
+ }
319
466
  }
320
467
  }
321
468
  if (processedBuffers.length === 0) {
322
- console.log("\u26A0\uFE0F No audio buffers to mix");
323
469
  return null;
324
470
  }
325
- console.log(`\u{1F39B}\uFE0F Mixing ${processedBuffers.length} audio track(s)...`);
326
471
  const mixedBuffer = processor.mixAudioBuffers(processedBuffers);
327
472
  const wavData = processor.audioBufferToWav(mixedBuffer);
328
473
  await processor.close();
329
- console.log(`\u2705 Audio processed: ${(wavData.byteLength / 1024 / 1024).toFixed(2)} MB`);
330
474
  return wavData;
331
- } catch (error) {
332
- console.error("\u274C Audio processing failed:", error);
475
+ } catch {
333
476
  return null;
334
477
  }
335
478
  }
@@ -374,12 +517,7 @@ var renderTwickVideoInBrowser = async (config) => {
374
517
  const width = settings.width || variables.input.properties?.width || 1920;
375
518
  const height = settings.height || variables.input.properties?.height || 1080;
376
519
  const fps = settings.fps || variables.input.properties?.fps || 30;
377
- let project;
378
- if (!projectFile) {
379
- project = import_project.default;
380
- } else {
381
- project = projectFile;
382
- }
520
+ const project = !projectFile ? import_project.default : projectFile;
383
521
  project.variables = variables;
384
522
  const renderSettings = {
385
523
  name: "browser-render",
@@ -407,6 +545,46 @@ var renderTwickVideoInBrowser = async (config) => {
407
545
  renderer.playback.fps = renderSettings.fps;
408
546
  renderer.playback.state = 1;
409
547
  const totalFrames = await renderer.getNumberOfFrames(renderSettings);
548
+ if (totalFrames === 0 || !isFinite(totalFrames)) {
549
+ throw new Error(
550
+ "Cannot render: Video has zero duration. Please ensure your project has valid content with non-zero duration. Check that all video elements have valid sources and are properly loaded."
551
+ );
552
+ }
553
+ const videoElements = [];
554
+ if (variables.input.tracks) {
555
+ variables.input.tracks.forEach((track) => {
556
+ if (track.elements) {
557
+ track.elements.forEach((el) => {
558
+ if (el.type === "video") videoElements.push(el);
559
+ });
560
+ }
561
+ });
562
+ }
563
+ if (videoElements.length > 0) {
564
+ for (const videoEl of videoElements) {
565
+ const src = videoEl.props?.src;
566
+ if (!src || src === "undefined") continue;
567
+ const preloadVideo = document.createElement("video");
568
+ preloadVideo.crossOrigin = "anonymous";
569
+ preloadVideo.preload = "metadata";
570
+ preloadVideo.src = src;
571
+ await new Promise((resolve, reject) => {
572
+ const timeout = setTimeout(
573
+ () => reject(new Error(`Timeout loading video metadata: ${src.substring(0, 80)}`)),
574
+ 3e4
575
+ );
576
+ preloadVideo.addEventListener("loadedmetadata", () => {
577
+ clearTimeout(timeout);
578
+ resolve();
579
+ }, { once: true });
580
+ preloadVideo.addEventListener("error", () => {
581
+ clearTimeout(timeout);
582
+ const err = preloadVideo.error;
583
+ reject(new Error(`Failed to load video: ${err?.message || "Unknown error"}`));
584
+ }, { once: true });
585
+ });
586
+ }
587
+ }
410
588
  await renderer.playback.recalculate();
411
589
  await renderer.playback.reset();
412
590
  await renderer.playback.seek(0);
@@ -423,14 +601,11 @@ var renderTwickVideoInBrowser = async (config) => {
423
601
  mediaAssets.push(currentAssets);
424
602
  const canvas = renderer.stage.finalBuffer;
425
603
  await exporter.handleFrame(canvas, frame);
426
- if (settings.onProgress) {
427
- settings.onProgress(frame / totalFrames);
428
- }
604
+ if (settings.onProgress) settings.onProgress(frame / totalFrames);
429
605
  }
430
606
  await exporter.stop();
431
607
  let audioData = null;
432
608
  if (settings.includeAudio && mediaAssets.length > 0) {
433
- console.log("\u{1F3B5} Generating audio track...");
434
609
  audioData = await exporter.generateAudio(mediaAssets, 0, totalFrames);
435
610
  }
436
611
  let finalBlob = exporter.getVideoBlob();
@@ -438,9 +613,12 @@ var renderTwickVideoInBrowser = async (config) => {
438
613
  throw new Error("Failed to create video blob");
439
614
  }
440
615
  if (audioData && settings.includeAudio) {
441
- console.log("\u2705 Audio extracted and processed successfully");
442
- console.log("\u{1F4CA} Audio data size:", (audioData.byteLength / 1024 / 1024).toFixed(2), "MB");
443
- if (settings.downloadAudioSeparately) {
616
+ try {
617
+ finalBlob = await muxAudioVideo({
618
+ videoBlob: finalBlob,
619
+ audioBuffer: audioData
620
+ });
621
+ } catch {
444
622
  const audioBlob = new Blob([audioData], { type: "audio/wav" });
445
623
  const audioUrl = URL.createObjectURL(audioBlob);
446
624
  const a = document.createElement("a");
@@ -448,15 +626,7 @@ var renderTwickVideoInBrowser = async (config) => {
448
626
  a.download = "audio.wav";
449
627
  a.click();
450
628
  URL.revokeObjectURL(audioUrl);
451
- console.log("\u2705 Audio downloaded separately as audio.wav");
452
- }
453
- if (settings.onAudioReady) {
454
- const audioBlob = new Blob([audioData], { type: "audio/wav" });
455
- settings.onAudioReady(audioBlob);
456
629
  }
457
- console.log("\u{1F4A1} Note: Client-side audio muxing is complex.");
458
- console.log("\u{1F4A1} For full audio support, use server-side rendering: @twick/render-server");
459
- console.log("\u{1F4A1} Or mux manually with: ffmpeg -i video.mp4 -i audio.wav -c:v copy -c:a aac output.mp4");
460
630
  }
461
631
  if (settings.onComplete) {
462
632
  settings.onComplete(finalBlob);
@@ -500,17 +670,13 @@ var useBrowserRenderer = (options = {}) => {
500
670
  }, []);
501
671
  const download = (0, import_react.useCallback)((filename) => {
502
672
  if (!videoBlob) {
503
- const downloadError = new Error("No video available to download. Please render the video first.");
504
- setError(downloadError);
505
- console.error(downloadError.message);
673
+ setError(new Error("No video available to download. Please render the video first."));
506
674
  return;
507
675
  }
508
676
  try {
509
677
  downloadVideoBlob(videoBlob, filename || options.downloadFilename || "video.mp4");
510
678
  } catch (err) {
511
- const downloadError = err instanceof Error ? err : new Error("Failed to download video");
512
- setError(downloadError);
513
- console.error("Download error:", downloadError);
679
+ setError(err instanceof Error ? err : new Error("Failed to download video"));
514
680
  }
515
681
  }, [videoBlob, options.downloadFilename]);
516
682
  const render = (0, import_react.useCallback)(async (variables) => {
@@ -540,9 +706,7 @@ var useBrowserRenderer = (options = {}) => {
540
706
  try {
541
707
  downloadVideoBlob(blob2, downloadFilename || "video.mp4");
542
708
  } catch (downloadErr) {
543
- const error2 = downloadErr instanceof Error ? downloadErr : new Error("Failed to auto-download video");
544
- setError(error2);
545
- console.error("Auto-download error:", error2);
709
+ setError(downloadErr instanceof Error ? downloadErr : new Error("Failed to auto-download video"));
546
710
  }
547
711
  }
548
712
  },
@@ -558,9 +722,7 @@ var useBrowserRenderer = (options = {}) => {
558
722
  setProgress(1);
559
723
  return blob;
560
724
  } catch (err) {
561
- const error2 = err instanceof Error ? err : new Error(String(err));
562
- setError(error2);
563
- console.error("Render error:", error2);
725
+ setError(err instanceof Error ? err : new Error(String(err)));
564
726
  return null;
565
727
  } finally {
566
728
  setIsRendering(false);