@twick/browser-render 0.15.7 → 0.15.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -40,6 +40,169 @@ module.exports = __toCommonJS(index_exports);
40
40
  // src/browser-renderer.ts
41
41
  var import_core = require("@twick/core");
42
42
  var import_project = __toESM(require("@twick/visualizer/dist/project.js"));
43
+ var import_media_utils = require("@twick/media-utils");
44
+
45
+ // src/audio/video-audio-extractor.ts
46
+ var VideoElementAudioExtractor = class {
47
+ audioContext;
48
+ video;
49
+ destination = null;
50
+ mediaRecorder = null;
51
+ audioChunks = [];
52
+ constructor(videoSrc, sampleRate = 48e3) {
53
+ this.audioContext = new AudioContext({ sampleRate });
54
+ this.video = document.createElement("video");
55
+ this.video.crossOrigin = "anonymous";
56
+ this.video.src = videoSrc;
57
+ this.video.muted = true;
58
+ }
59
+ async initialize() {
60
+ return new Promise((resolve, reject) => {
61
+ this.video.addEventListener("loadedmetadata", () => resolve(), { once: true });
62
+ this.video.addEventListener("error", (e) => {
63
+ reject(new Error(`Failed to load video for audio extraction: ${e}`));
64
+ }, { once: true });
65
+ });
66
+ }
67
+ /**
68
+ * Extract audio by playing the video and capturing audio output
69
+ */
70
+ async extractAudio(startTime, duration, playbackRate = 1) {
71
+ try {
72
+ const source = this.audioContext.createMediaElementSource(this.video);
73
+ this.destination = this.audioContext.createMediaStreamDestination();
74
+ source.connect(this.destination);
75
+ } catch (err) {
76
+ throw new Error("Video has no audio track");
77
+ }
78
+ this.audioChunks = [];
79
+ let mimeType = "audio/webm";
80
+ if (!MediaRecorder.isTypeSupported(mimeType)) {
81
+ mimeType = "";
82
+ }
83
+ try {
84
+ this.mediaRecorder = new MediaRecorder(this.destination.stream, {
85
+ mimeType: mimeType || void 0
86
+ });
87
+ } catch (err) {
88
+ throw new Error(`Failed to create MediaRecorder: ${err}. Video may have no audio track.`);
89
+ }
90
+ this.mediaRecorder.ondataavailable = (event) => {
91
+ if (event.data && event.data.size > 0) {
92
+ this.audioChunks.push(event.data);
93
+ }
94
+ };
95
+ this.video.currentTime = startTime;
96
+ this.video.playbackRate = playbackRate;
97
+ await new Promise((resolve, reject) => {
98
+ const seekTimeout = setTimeout(() => {
99
+ reject(new Error("Video seek timeout"));
100
+ }, 5e3);
101
+ this.video.addEventListener("seeked", () => {
102
+ clearTimeout(seekTimeout);
103
+ resolve();
104
+ }, { once: true });
105
+ this.video.addEventListener("error", () => {
106
+ clearTimeout(seekTimeout);
107
+ reject(new Error("Video seek error"));
108
+ }, { once: true });
109
+ });
110
+ return new Promise((resolve, reject) => {
111
+ const recordingTimeout = setTimeout(() => {
112
+ this.video.pause();
113
+ if (this.mediaRecorder && this.mediaRecorder.state !== "inactive") {
114
+ this.mediaRecorder.stop();
115
+ }
116
+ reject(new Error("Audio extraction timeout - video may have no audio track"));
117
+ }, (duration / playbackRate + 10) * 1e3);
118
+ let hasData = false;
119
+ const dataCheckInterval = setInterval(() => {
120
+ if (this.audioChunks.length > 0 && this.audioChunks.some((chunk) => chunk.size > 0)) {
121
+ hasData = true;
122
+ }
123
+ }, 1e3);
124
+ this.mediaRecorder.onerror = (event) => {
125
+ clearInterval(dataCheckInterval);
126
+ clearTimeout(recordingTimeout);
127
+ this.video.pause();
128
+ reject(new Error(`MediaRecorder error: ${event}. Video may have no audio track.`));
129
+ };
130
+ try {
131
+ this.mediaRecorder.start(100);
132
+ this.video.play().catch((playErr) => {
133
+ clearInterval(dataCheckInterval);
134
+ clearTimeout(recordingTimeout);
135
+ reject(new Error(`Failed to play video: ${playErr}`));
136
+ });
137
+ } catch (startErr) {
138
+ clearInterval(dataCheckInterval);
139
+ clearTimeout(recordingTimeout);
140
+ reject(new Error(`Failed to start recording: ${startErr}`));
141
+ }
142
+ setTimeout(async () => {
143
+ clearInterval(dataCheckInterval);
144
+ clearTimeout(recordingTimeout);
145
+ this.video.pause();
146
+ if (this.mediaRecorder && this.mediaRecorder.state !== "inactive") {
147
+ this.mediaRecorder.stop();
148
+ }
149
+ const stopTimeout = setTimeout(() => {
150
+ if (this.audioChunks.length === 0 || !hasData) {
151
+ reject(new Error("No audio data captured - video has no audio track"));
152
+ }
153
+ }, 2e3);
154
+ await new Promise((res) => {
155
+ if (this.mediaRecorder) {
156
+ this.mediaRecorder.addEventListener("stop", () => {
157
+ clearTimeout(stopTimeout);
158
+ res();
159
+ }, { once: true });
160
+ } else {
161
+ clearTimeout(stopTimeout);
162
+ res();
163
+ }
164
+ });
165
+ try {
166
+ if (this.audioChunks.length === 0 || !this.audioChunks.some((chunk) => chunk.size > 0)) {
167
+ throw new Error("No audio data captured - video has no audio track");
168
+ }
169
+ const audioBlob = new Blob(this.audioChunks, { type: "audio/webm" });
170
+ if (audioBlob.size === 0) {
171
+ throw new Error("Audio blob is empty - video has no audio track");
172
+ }
173
+ const arrayBuffer = await audioBlob.arrayBuffer();
174
+ const audioBuffer = await this.audioContext.decodeAudioData(arrayBuffer);
175
+ if (audioBuffer.length === 0 || audioBuffer.duration === 0) {
176
+ throw new Error("Audio buffer is empty - video has no audio track");
177
+ }
178
+ resolve(audioBuffer);
179
+ } catch (err) {
180
+ reject(new Error(`Failed to decode recorded audio: ${err}`));
181
+ }
182
+ }, duration / playbackRate * 1e3);
183
+ });
184
+ }
185
+ async close() {
186
+ if (this.mediaRecorder && this.mediaRecorder.state !== "inactive") {
187
+ this.mediaRecorder.stop();
188
+ }
189
+ this.video.pause();
190
+ this.video.src = "";
191
+ if (this.audioContext.state !== "closed") {
192
+ await this.audioContext.close();
193
+ }
194
+ }
195
+ };
196
+ async function extractAudioFromVideo(videoSrc, startTime, duration, playbackRate = 1, sampleRate = 48e3) {
197
+ const extractor = new VideoElementAudioExtractor(videoSrc, sampleRate);
198
+ try {
199
+ await extractor.initialize();
200
+ const audioBuffer = await extractor.extractAudio(startTime, duration, playbackRate);
201
+ return audioBuffer;
202
+ } finally {
203
+ await extractor.close();
204
+ }
205
+ }
43
206
 
44
207
  // src/audio/audio-processor.ts
45
208
  function getAssetPlacement(frames) {
@@ -93,11 +256,26 @@ var BrowserAudioProcessor = class {
93
256
  audioContext;
94
257
  /**
95
258
  * Fetch and decode audio from a media source
259
+ * Falls back to video element extraction if decodeAudioData fails
96
260
  */
97
261
  async fetchAndDecodeAudio(src) {
98
- const response = await fetch(src);
99
- const arrayBuffer = await response.arrayBuffer();
100
- return await this.audioContext.decodeAudioData(arrayBuffer);
262
+ try {
263
+ const response = await fetch(src);
264
+ const arrayBuffer = await response.arrayBuffer();
265
+ return await this.audioContext.decodeAudioData(arrayBuffer);
266
+ } catch (err) {
267
+ try {
268
+ return await extractAudioFromVideo(
269
+ src,
270
+ 0,
271
+ 999999,
272
+ 1,
273
+ this.sampleRate
274
+ );
275
+ } catch (fallbackErr) {
276
+ throw new Error(`Failed to extract audio: ${err}. Fallback also failed: ${fallbackErr}`);
277
+ }
278
+ }
101
279
  }
102
280
  /**
103
281
  * Process audio asset with playback rate, volume, and timing
@@ -211,6 +389,99 @@ var BrowserAudioProcessor = class {
211
389
  }
212
390
  };
213
391
 
392
+ // src/audio/audio-video-muxer.ts
393
+ function getFFmpegBaseURL() {
394
+ if (typeof window !== "undefined") {
395
+ return `${window.location.origin}/ffmpeg`;
396
+ }
397
+ return "/ffmpeg";
398
+ }
399
+ async function muxAudioVideo(options) {
400
+ const muxStartTime = Date.now();
401
+ try {
402
+ console.log("Starting FFmpeg muxing...");
403
+ console.log(` Video blob size: ${options.videoBlob.size} bytes (${(options.videoBlob.size / 1024 / 1024).toFixed(2)} MB)`);
404
+ console.log(` Audio buffer size: ${options.audioBuffer.byteLength} bytes (${(options.audioBuffer.byteLength / 1024 / 1024).toFixed(2)} MB)`);
405
+ const { FFmpeg } = await import("@ffmpeg/ffmpeg");
406
+ const { fetchFile } = await import("@ffmpeg/util");
407
+ const ffmpeg = new FFmpeg();
408
+ const base = getFFmpegBaseURL();
409
+ const coreURL = `${base}/ffmpeg-core.js`;
410
+ const wasmURL = `${base}/ffmpeg-core.wasm`;
411
+ console.log(`Loading FFmpeg from ${base}`);
412
+ const loadStartTime = Date.now();
413
+ await ffmpeg.load({
414
+ coreURL,
415
+ wasmURL
416
+ });
417
+ const loadDuration = Date.now() - loadStartTime;
418
+ console.log(`FFmpeg loaded successfully in ${loadDuration}ms`);
419
+ console.log("Writing video and audio files...");
420
+ const writeStartTime = Date.now();
421
+ await ffmpeg.writeFile(
422
+ "video.mp4",
423
+ await fetchFile(options.videoBlob)
424
+ );
425
+ console.log(` Video file written: ${options.videoBlob.size} bytes`);
426
+ await ffmpeg.writeFile(
427
+ "audio.wav",
428
+ new Uint8Array(options.audioBuffer)
429
+ );
430
+ const writeDuration = Date.now() - writeStartTime;
431
+ console.log(` Audio file written: ${options.audioBuffer.byteLength} bytes`);
432
+ console.log(`Files written successfully in ${writeDuration}ms`);
433
+ console.log("Executing FFmpeg muxing command...");
434
+ const execStartTime = Date.now();
435
+ const ffmpegLogs = [];
436
+ ffmpeg.on("log", ({ message }) => {
437
+ ffmpegLogs.push(message);
438
+ console.log(` [FFmpeg] ${message}`);
439
+ });
440
+ await ffmpeg.exec([
441
+ "-i",
442
+ "video.mp4",
443
+ "-i",
444
+ "audio.wav",
445
+ "-c:v",
446
+ "copy",
447
+ "-c:a",
448
+ "aac",
449
+ "-b:a",
450
+ "192k",
451
+ "-shortest",
452
+ "output.mp4"
453
+ ]);
454
+ const execDuration = Date.now() - execStartTime;
455
+ console.log(`FFmpeg muxing completed in ${execDuration}ms`);
456
+ const readStartTime = Date.now();
457
+ const data = await ffmpeg.readFile("output.mp4");
458
+ const readDuration = Date.now() - readStartTime;
459
+ console.log(`Output file read successfully in ${readDuration}ms`);
460
+ const uint8 = typeof data === "string" ? new TextEncoder().encode(data) : new Uint8Array(data);
461
+ const result = new Blob([uint8], { type: "video/mp4" });
462
+ const totalDuration = Date.now() - muxStartTime;
463
+ console.log(`Muxing successful: ${result.size} bytes (${(result.size / 1024 / 1024).toFixed(2)} MB) in ${totalDuration}ms`);
464
+ console.log(` Breakdown: load=${loadDuration}ms, write=${writeDuration}ms, exec=${execDuration}ms, read=${readDuration}ms`);
465
+ return result;
466
+ } catch (error) {
467
+ const totalDuration = Date.now() - muxStartTime;
468
+ const errorMsg = error instanceof Error ? error.message : String(error);
469
+ const errorStack = error instanceof Error ? error.stack : void 0;
470
+ console.error("FFmpeg muxing failed:", errorMsg);
471
+ if (errorStack) {
472
+ console.error("Error stack:", errorStack);
473
+ }
474
+ console.error("Error details:", {
475
+ errorType: error instanceof Error ? error.constructor.name : typeof error,
476
+ errorMessage: errorMsg,
477
+ duration: `${totalDuration}ms`,
478
+ videoBlobSize: options.videoBlob.size,
479
+ audioBufferSize: options.audioBuffer.byteLength
480
+ });
481
+ throw error;
482
+ }
483
+ }
484
+
214
485
  // src/browser-renderer.ts
215
486
  var BrowserWasmExporter = class _BrowserWasmExporter {
216
487
  constructor(settings) {
@@ -271,7 +542,6 @@ var BrowserWasmExporter = class _BrowserWasmExporter {
271
542
  fps: this.fps
272
543
  });
273
544
  } catch (error) {
274
- console.error("WASM loading error:", error);
275
545
  throw error;
276
546
  }
277
547
  }
@@ -294,42 +564,99 @@ var BrowserWasmExporter = class _BrowserWasmExporter {
294
564
  }
295
565
  async generateAudio(assets, startFrame, endFrame) {
296
566
  try {
297
- console.log("\u{1F50A} Starting audio processing...", {
298
- frames: assets.length,
299
- startFrame,
300
- endFrame
301
- });
567
+ console.log(`Generating audio from ${assets.length} frames`);
302
568
  const processor = new BrowserAudioProcessor();
303
569
  const assetPlacements = getAssetPlacement(assets);
304
- console.log(`\u{1F4CA} Found ${assetPlacements.length} audio assets to process`);
305
570
  if (assetPlacements.length === 0) {
306
- console.log("\u26A0\uFE0F No audio assets found");
571
+ console.log("No asset placements found");
307
572
  return null;
308
573
  }
574
+ console.log(`Processing ${assetPlacements.length} asset placements`);
309
575
  const processedBuffers = [];
310
- for (const asset of assetPlacements) {
576
+ for (let i = 0; i < assetPlacements.length; i++) {
577
+ const asset = assetPlacements[i];
578
+ console.log(`[${i + 1}/${assetPlacements.length}] Processing asset: ${asset.src} (type: ${asset.type}, volume: ${asset.volume}, playbackRate: ${asset.playbackRate})`);
311
579
  if (asset.volume > 0 && asset.playbackRate > 0) {
312
- console.log(`\u{1F3B5} Processing audio: ${asset.key}`);
313
- const buffer = await processor.processAudioAsset(
314
- asset,
315
- this.settings.fps || 30,
316
- endFrame - startFrame
317
- );
318
- processedBuffers.push(buffer);
580
+ const startTime = Date.now();
581
+ try {
582
+ if (asset.type === "video") {
583
+ console.log(` \u2192 Checking if asset has audio: ${asset.src.substring(0, 50)}...`);
584
+ try {
585
+ const assetHasAudio = await (0, import_media_utils.hasAudio)(asset.src);
586
+ if (!assetHasAudio) {
587
+ console.log(` \u23ED Skipping asset (no audio detected): ${asset.src.substring(0, 50)}...`);
588
+ continue;
589
+ }
590
+ console.log(` \u2713 Asset has audio, proceeding: ${asset.src.substring(0, 50)}...`);
591
+ } catch (audioCheckError) {
592
+ const errorMsg = audioCheckError instanceof Error ? audioCheckError.message : String(audioCheckError);
593
+ const errorStack = audioCheckError instanceof Error ? audioCheckError.stack : void 0;
594
+ console.warn(` \u26A0 Audio check failed, proceeding anyway: ${asset.src.substring(0, 50)}...`);
595
+ console.warn(` Error: ${errorMsg}`);
596
+ if (errorStack) {
597
+ console.warn(` Stack: ${errorStack}`);
598
+ }
599
+ }
600
+ }
601
+ console.log(` \u2192 Starting processAudioAsset for: ${asset.src}`);
602
+ const processPromise = processor.processAudioAsset(
603
+ asset,
604
+ this.settings.fps || 30,
605
+ endFrame - startFrame
606
+ );
607
+ const timeoutPromise = new Promise((_, reject) => {
608
+ setTimeout(() => {
609
+ reject(new Error(`Timeout processing audio asset after 20s - video may have no audio track`));
610
+ }, 2e4);
611
+ });
612
+ const buffer = await Promise.race([processPromise, timeoutPromise]);
613
+ const duration = Date.now() - startTime;
614
+ console.log(` \u2713 Successfully processed audio asset in ${duration}ms: ${asset.src.substring(0, 50)}...`);
615
+ processedBuffers.push(buffer);
616
+ } catch (error) {
617
+ const errorMsg = error instanceof Error ? error.message : String(error);
618
+ const errorStack = error instanceof Error ? error.stack : void 0;
619
+ const duration = Date.now() - startTime;
620
+ console.warn(` \u2717 Failed to process audio asset after ${duration}ms: ${asset.src.substring(0, 50)}...`);
621
+ console.warn(` Error: ${errorMsg}`);
622
+ if (errorStack) {
623
+ console.warn(` Stack: ${errorStack}`);
624
+ }
625
+ console.warn(` Asset details: type=${asset.type}, volume=${asset.volume}, playbackRate=${asset.playbackRate}, startFrame=${asset.startInVideo}, endFrame=${asset.endInVideo}`);
626
+ }
627
+ } else {
628
+ console.log(` \u23ED Skipping asset: volume=${asset.volume}, playbackRate=${asset.playbackRate}`);
319
629
  }
320
630
  }
321
631
  if (processedBuffers.length === 0) {
322
- console.log("\u26A0\uFE0F No audio buffers to mix");
632
+ console.warn("No audio buffers were successfully processed");
633
+ console.warn(` Total assets attempted: ${assetPlacements.length}`);
634
+ console.warn(` Assets with volume>0 and playbackRate>0: ${assetPlacements.filter((a) => a.volume > 0 && a.playbackRate > 0).length}`);
323
635
  return null;
324
636
  }
325
- console.log(`\u{1F39B}\uFE0F Mixing ${processedBuffers.length} audio track(s)...`);
637
+ console.log(`Mixing ${processedBuffers.length} audio buffers`);
638
+ const mixStartTime = Date.now();
326
639
  const mixedBuffer = processor.mixAudioBuffers(processedBuffers);
640
+ const mixDuration = Date.now() - mixStartTime;
641
+ console.log(`Audio mixing completed in ${mixDuration}ms`);
642
+ const wavStartTime = Date.now();
327
643
  const wavData = processor.audioBufferToWav(mixedBuffer);
644
+ const wavDuration = Date.now() - wavStartTime;
645
+ console.log(`WAV conversion completed in ${wavDuration}ms`);
646
+ console.log(`Audio generation complete: ${wavData.byteLength} bytes (${(wavData.byteLength / 1024 / 1024).toFixed(2)} MB)`);
328
647
  await processor.close();
329
- console.log(`\u2705 Audio processed: ${(wavData.byteLength / 1024 / 1024).toFixed(2)} MB`);
330
648
  return wavData;
331
649
  } catch (error) {
332
- console.error("\u274C Audio processing failed:", error);
650
+ const errorMsg = error instanceof Error ? error.message : String(error);
651
+ const errorStack = error instanceof Error ? error.stack : void 0;
652
+ console.error("Audio generation error:", errorMsg);
653
+ if (errorStack) {
654
+ console.error("Error stack:", errorStack);
655
+ }
656
+ console.error("Error details:", {
657
+ errorType: error instanceof Error ? error.constructor.name : typeof error,
658
+ errorMessage: errorMsg
659
+ });
333
660
  return null;
334
661
  }
335
662
  }
@@ -374,12 +701,7 @@ var renderTwickVideoInBrowser = async (config) => {
374
701
  const width = settings.width || variables.input.properties?.width || 1920;
375
702
  const height = settings.height || variables.input.properties?.height || 1080;
376
703
  const fps = settings.fps || variables.input.properties?.fps || 30;
377
- let project;
378
- if (!projectFile) {
379
- project = import_project.default;
380
- } else {
381
- project = projectFile;
382
- }
704
+ const project = !projectFile ? import_project.default : projectFile;
383
705
  project.variables = variables;
384
706
  const renderSettings = {
385
707
  name: "browser-render",
@@ -407,6 +729,65 @@ var renderTwickVideoInBrowser = async (config) => {
407
729
  renderer.playback.fps = renderSettings.fps;
408
730
  renderer.playback.state = 1;
409
731
  const totalFrames = await renderer.getNumberOfFrames(renderSettings);
732
+ if (totalFrames === 0 || !isFinite(totalFrames)) {
733
+ throw new Error(
734
+ "Cannot render: Video has zero duration. Please ensure your project has valid content with non-zero duration. Check that all video elements have valid sources and are properly loaded."
735
+ );
736
+ }
737
+ const videoElements = [];
738
+ if (variables.input.tracks) {
739
+ variables.input.tracks.forEach((track) => {
740
+ if (track.elements) {
741
+ track.elements.forEach((el) => {
742
+ if (el.type === "video") videoElements.push(el);
743
+ });
744
+ }
745
+ });
746
+ }
747
+ let hasAnyAudio = false;
748
+ console.log(`Found ${videoElements.length} video element(s) to check for audio`);
749
+ if (videoElements.length > 0) {
750
+ for (const videoEl of videoElements) {
751
+ const src = videoEl.props?.src;
752
+ if (!src || src === "undefined") continue;
753
+ const preloadVideo = document.createElement("video");
754
+ preloadVideo.crossOrigin = "anonymous";
755
+ preloadVideo.preload = "metadata";
756
+ preloadVideo.src = src;
757
+ await new Promise((resolve, reject) => {
758
+ const timeout = setTimeout(
759
+ () => reject(new Error(`Timeout loading video metadata: ${src.substring(0, 80)}`)),
760
+ 3e4
761
+ );
762
+ preloadVideo.addEventListener("loadedmetadata", () => {
763
+ clearTimeout(timeout);
764
+ resolve();
765
+ }, { once: true });
766
+ preloadVideo.addEventListener("error", () => {
767
+ clearTimeout(timeout);
768
+ const err = preloadVideo.error;
769
+ reject(new Error(`Failed to load video: ${err?.message || "Unknown error"}`));
770
+ }, { once: true });
771
+ });
772
+ if (settings.includeAudio) {
773
+ try {
774
+ console.log(`Checking if video has audio: ${src.substring(0, 50)}...`);
775
+ const videoHasAudio = await (0, import_media_utils.hasAudio)(src);
776
+ console.log(`Audio check result for ${src.substring(0, 50)}...: ${videoHasAudio ? "HAS AUDIO" : "NO AUDIO"}`);
777
+ if (videoHasAudio) {
778
+ hasAnyAudio = true;
779
+ console.log(`\u2713 Video has audio: ${src.substring(0, 50)}...`);
780
+ } else {
781
+ console.log(`\u2717 Video has no audio: ${src.substring(0, 50)}...`);
782
+ }
783
+ } catch (error) {
784
+ console.warn(`Failed to check audio for ${src.substring(0, 50)}...:`, error);
785
+ hasAnyAudio = true;
786
+ console.log(`\u26A0 Assuming video might have audio due to check error`);
787
+ }
788
+ }
789
+ }
790
+ }
410
791
  await renderer.playback.recalculate();
411
792
  await renderer.playback.reset();
412
793
  await renderer.playback.seek(0);
@@ -423,40 +804,107 @@ var renderTwickVideoInBrowser = async (config) => {
423
804
  mediaAssets.push(currentAssets);
424
805
  const canvas = renderer.stage.finalBuffer;
425
806
  await exporter.handleFrame(canvas, frame);
426
- if (settings.onProgress) {
427
- settings.onProgress(frame / totalFrames);
428
- }
807
+ if (settings.onProgress) settings.onProgress(frame / totalFrames);
429
808
  }
430
809
  await exporter.stop();
431
810
  let audioData = null;
432
- if (settings.includeAudio && mediaAssets.length > 0) {
433
- console.log("\u{1F3B5} Generating audio track...");
434
- audioData = await exporter.generateAudio(mediaAssets, 0, totalFrames);
811
+ console.log(`Audio detection summary: hasAnyAudio=${hasAnyAudio}, includeAudio=${settings.includeAudio}, mediaAssets=${mediaAssets.length}`);
812
+ if (settings.includeAudio && mediaAssets.length > 0 && hasAnyAudio) {
813
+ console.log("Starting audio processing (audio detected in videos)");
814
+ if (settings.onProgress) {
815
+ settings.onProgress(0.98);
816
+ }
817
+ try {
818
+ console.log("Calling generateAudio...");
819
+ audioData = await exporter.generateAudio(mediaAssets, 0, totalFrames);
820
+ console.log("generateAudio completed");
821
+ if (audioData) {
822
+ console.log(`\u2713 Audio generation successful: ${audioData.byteLength} bytes`);
823
+ } else {
824
+ console.log("\u2717 No audio data generated");
825
+ }
826
+ if (settings.onProgress) {
827
+ settings.onProgress(0.99);
828
+ }
829
+ } catch (audioError) {
830
+ const errorMsg = audioError instanceof Error ? audioError.message : String(audioError);
831
+ const errorStack = audioError instanceof Error ? audioError.stack : void 0;
832
+ console.error("\u2717 Audio generation failed, continuing without audio");
833
+ console.error(` Error: ${errorMsg}`);
834
+ if (errorStack) {
835
+ console.error(` Stack: ${errorStack}`);
836
+ }
837
+ console.error(" Context:", {
838
+ hasAnyAudio,
839
+ includeAudio: settings.includeAudio,
840
+ mediaAssetsCount: mediaAssets.length,
841
+ totalFrames
842
+ });
843
+ audioData = null;
844
+ }
845
+ } else if (settings.includeAudio && mediaAssets.length > 0 && !hasAnyAudio) {
846
+ console.log("\u23ED Skipping audio processing: no audio detected in videos");
847
+ } else {
848
+ console.log(`\u23ED Skipping audio processing: includeAudio=${settings.includeAudio}, mediaAssets=${mediaAssets.length}, hasAnyAudio=${hasAnyAudio}`);
435
849
  }
436
850
  let finalBlob = exporter.getVideoBlob();
437
851
  if (!finalBlob) {
438
852
  throw new Error("Failed to create video blob");
439
853
  }
854
+ if (finalBlob.size === 0) {
855
+ throw new Error("Video blob is empty. Rendering may have failed.");
856
+ }
440
857
  if (audioData && settings.includeAudio) {
441
- console.log("\u2705 Audio extracted and processed successfully");
442
- console.log("\u{1F4CA} Audio data size:", (audioData.byteLength / 1024 / 1024).toFixed(2), "MB");
443
- if (settings.downloadAudioSeparately) {
444
- const audioBlob = new Blob([audioData], { type: "audio/wav" });
445
- const audioUrl = URL.createObjectURL(audioBlob);
446
- const a = document.createElement("a");
447
- a.href = audioUrl;
448
- a.download = "audio.wav";
449
- a.click();
450
- URL.revokeObjectURL(audioUrl);
451
- console.log("\u2705 Audio downloaded separately as audio.wav");
452
- }
453
- if (settings.onAudioReady) {
454
- const audioBlob = new Blob([audioData], { type: "audio/wav" });
455
- settings.onAudioReady(audioBlob);
858
+ console.log(`Attempting to mux audio (${audioData.byteLength} bytes) with video (${finalBlob.size} bytes)`);
859
+ try {
860
+ const muxedBlob = await muxAudioVideo({
861
+ videoBlob: finalBlob,
862
+ audioBuffer: audioData
863
+ });
864
+ if (!muxedBlob || muxedBlob.size === 0) {
865
+ throw new Error("Muxed video blob is empty");
866
+ }
867
+ if (muxedBlob.size === finalBlob.size) {
868
+ console.warn("Muxed blob size unchanged - muxing may have failed silently");
869
+ } else {
870
+ console.log(`Muxing successful: ${finalBlob.size} bytes -> ${muxedBlob.size} bytes`);
871
+ }
872
+ finalBlob = muxedBlob;
873
+ } catch (muxError) {
874
+ const errorMsg = muxError instanceof Error ? muxError.message : String(muxError);
875
+ const errorStack = muxError instanceof Error ? muxError.stack : void 0;
876
+ console.error("Audio muxing failed");
877
+ console.error(` Error: ${errorMsg}`);
878
+ if (errorStack) {
879
+ console.error(` Stack: ${errorStack}`);
880
+ }
881
+ console.error(" Context:", {
882
+ videoBlobSize: finalBlob.size,
883
+ audioDataSize: audioData?.byteLength || 0
884
+ });
885
+ if (settings.downloadAudioSeparately && audioData) {
886
+ const audioBlob = new Blob([audioData], { type: "audio/wav" });
887
+ const audioUrl = URL.createObjectURL(audioBlob);
888
+ const a = document.createElement("a");
889
+ a.href = audioUrl;
890
+ a.download = "audio.wav";
891
+ a.click();
892
+ URL.revokeObjectURL(audioUrl);
893
+ }
894
+ console.warn("Continuing with video without audio due to muxing failure");
895
+ finalBlob = exporter.getVideoBlob();
896
+ if (!finalBlob || finalBlob.size === 0) {
897
+ throw new Error("Video blob is invalid after muxing failure");
898
+ }
456
899
  }
457
- console.log("\u{1F4A1} Note: Client-side audio muxing is complex.");
458
- console.log("\u{1F4A1} For full audio support, use server-side rendering: @twick/render-server");
459
- console.log("\u{1F4A1} Or mux manually with: ffmpeg -i video.mp4 -i audio.wav -c:v copy -c:a aac output.mp4");
900
+ } else if (settings.includeAudio && !audioData) {
901
+ console.warn("Audio processing was enabled but no audio data was generated");
902
+ }
903
+ if (!finalBlob || finalBlob.size === 0) {
904
+ throw new Error("Final video blob is empty or invalid");
905
+ }
906
+ if (settings.onProgress) {
907
+ settings.onProgress(1);
460
908
  }
461
909
  if (settings.onComplete) {
462
910
  settings.onComplete(finalBlob);
@@ -500,17 +948,13 @@ var useBrowserRenderer = (options = {}) => {
500
948
  }, []);
501
949
  const download = (0, import_react.useCallback)((filename) => {
502
950
  if (!videoBlob) {
503
- const downloadError = new Error("No video available to download. Please render the video first.");
504
- setError(downloadError);
505
- console.error(downloadError.message);
951
+ setError(new Error("No video available to download. Please render the video first."));
506
952
  return;
507
953
  }
508
954
  try {
509
955
  downloadVideoBlob(videoBlob, filename || options.downloadFilename || "video.mp4");
510
956
  } catch (err) {
511
- const downloadError = err instanceof Error ? err : new Error("Failed to download video");
512
- setError(downloadError);
513
- console.error("Download error:", downloadError);
957
+ setError(err instanceof Error ? err : new Error("Failed to download video"));
514
958
  }
515
959
  }, [videoBlob, options.downloadFilename]);
516
960
  const render = (0, import_react.useCallback)(async (variables) => {
@@ -540,9 +984,7 @@ var useBrowserRenderer = (options = {}) => {
540
984
  try {
541
985
  downloadVideoBlob(blob2, downloadFilename || "video.mp4");
542
986
  } catch (downloadErr) {
543
- const error2 = downloadErr instanceof Error ? downloadErr : new Error("Failed to auto-download video");
544
- setError(error2);
545
- console.error("Auto-download error:", error2);
987
+ setError(downloadErr instanceof Error ? downloadErr : new Error("Failed to auto-download video"));
546
988
  }
547
989
  }
548
990
  },
@@ -558,9 +1000,7 @@ var useBrowserRenderer = (options = {}) => {
558
1000
  setProgress(1);
559
1001
  return blob;
560
1002
  } catch (err) {
561
- const error2 = err instanceof Error ? err : new Error(String(err));
562
- setError(error2);
563
- console.error("Render error:", error2);
1003
+ setError(err instanceof Error ? err : new Error(String(err)));
564
1004
  return null;
565
1005
  } finally {
566
1006
  setIsRendering(false);