@twick/browser-render 0.15.9 → 0.15.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -32,6 +32,7 @@ var index_exports = {};
32
32
  __export(index_exports, {
33
33
  default: () => renderTwickVideoInBrowser,
34
34
  downloadVideoBlob: () => downloadVideoBlob,
35
+ normalizeVideoBlob: () => normalizeVideoBlob,
35
36
  renderTwickVideoInBrowser: () => renderTwickVideoInBrowser,
36
37
  useBrowserRenderer: () => useBrowserRenderer
37
38
  });
@@ -435,19 +436,36 @@ async function muxAudioVideo(options) {
435
436
  const ffmpegLogs = [];
436
437
  ffmpeg.on("log", ({ message }) => {
437
438
  ffmpegLogs.push(message);
438
- console.log(` [FFmpeg] ${message}`);
439
439
  });
440
440
  await ffmpeg.exec([
441
+ // Inputs
441
442
  "-i",
442
443
  "video.mp4",
443
444
  "-i",
444
445
  "audio.wav",
446
+ // Explicit stream mapping
447
+ "-map",
448
+ "0:v:0",
449
+ "-map",
450
+ "1:a:0",
451
+ // Re-encode video to a very standard H.264 stream.
452
+ // Copying the WebCodecs/mp4-wasm bitstream can sometimes
453
+ // lead to timing issues where only the first second renders.
445
454
  "-c:v",
446
- "copy",
455
+ "libx264",
456
+ "-preset",
457
+ "veryfast",
458
+ "-crf",
459
+ "20",
460
+ // AAC audio
447
461
  "-c:a",
448
462
  "aac",
449
463
  "-b:a",
450
464
  "192k",
465
+ // Make MP4 more web‑friendly
466
+ "-movflags",
467
+ "+faststart",
468
+ // Stop at the shortest of the two streams
451
469
  "-shortest",
452
470
  "output.mp4"
453
471
  ]);
@@ -483,6 +501,12 @@ async function muxAudioVideo(options) {
483
501
  }
484
502
 
485
503
  // src/browser-renderer.ts
504
+ function isWindows() {
505
+ if (typeof navigator === "undefined") return false;
506
+ const ua = navigator.userAgent;
507
+ const plat = navigator.platform ?? "";
508
+ return /Win(dows|32|64|CE)/i.test(ua) || /Win/i.test(plat);
509
+ }
486
510
  var BrowserWasmExporter = class _BrowserWasmExporter {
487
511
  constructor(settings) {
488
512
  this.settings = settings;
@@ -495,52 +519,111 @@ var BrowserWasmExporter = class _BrowserWasmExporter {
495
519
  onProgressCallback;
496
520
  currentFrame = 0;
497
521
  fps = 30;
522
+ /** On Windows, copy each frame to this canvas before creating VideoFrame to avoid invalid encoder output. */
523
+ copyCanvas = null;
524
+ /** On Windows use native VideoEncoder + Mediabunny with prefer-software so frames are actually encoded. */
525
+ useNativeEncoder = false;
526
+ nativeVideoEncoder = null;
527
+ nativeOutput = null;
528
+ nativePacketSource = null;
529
+ nativeAddPromise = Promise.resolve();
530
+ nativeFirstChunk = true;
498
531
  static async create(settings) {
499
532
  return new _BrowserWasmExporter(settings);
500
533
  }
501
534
  async start() {
535
+ const w = this.settings.size.x;
536
+ const h = this.settings.size.y;
537
+ const fps = this.fps;
538
+ if (isWindows()) {
539
+ try {
540
+ const { Output, BufferTarget, Mp4OutputFormat, EncodedVideoPacketSource, EncodedPacket } = await import("mediabunny");
541
+ const output = new Output({
542
+ format: new Mp4OutputFormat(),
543
+ target: new BufferTarget()
544
+ });
545
+ const packetSource = new EncodedVideoPacketSource("avc");
546
+ output.addVideoTrack(packetSource);
547
+ await output.start();
548
+ this.nativeOutput = output;
549
+ this.nativePacketSource = packetSource;
550
+ this.nativeAddPromise = Promise.resolve();
551
+ this.nativeFirstChunk = true;
552
+ const videoEncoder = new VideoEncoder({
553
+ output: (chunk, meta) => {
554
+ const packet = EncodedPacket.fromEncodedChunk(chunk);
555
+ const isFirst = this.nativeFirstChunk;
556
+ const metaArg = isFirst ? meta : void 0;
557
+ this.nativeFirstChunk = false;
558
+ this.nativeAddPromise = this.nativeAddPromise.then(
559
+ () => this.nativePacketSource.add(packet, metaArg)
560
+ );
561
+ },
562
+ error: (e) => console.error("[BrowserRender] VideoEncoder error:", e)
563
+ });
564
+ const bitrate = Math.max(5e5, w * h * fps * 0.1 | 0);
565
+ const config = {
566
+ codec: "avc1.42001f",
567
+ width: w,
568
+ height: h,
569
+ bitrate,
570
+ framerate: fps,
571
+ hardwareAcceleration: "prefer-software"
572
+ };
573
+ const support = await VideoEncoder.isConfigSupported(config);
574
+ if (!support.supported) {
575
+ delete config.hardwareAcceleration;
576
+ }
577
+ videoEncoder.configure(config);
578
+ this.nativeVideoEncoder = videoEncoder;
579
+ this.useNativeEncoder = true;
580
+ this.copyCanvas = document.createElement("canvas");
581
+ this.copyCanvas.width = w;
582
+ this.copyCanvas.height = h;
583
+ return;
584
+ } catch {
585
+ this.useNativeEncoder = false;
586
+ this.nativeVideoEncoder = null;
587
+ this.nativeOutput = null;
588
+ this.nativePacketSource = null;
589
+ }
590
+ }
502
591
  try {
503
592
  const loadMp4Module = (await import("mp4-wasm")).default;
504
593
  const possiblePaths = [
505
- // Vite dev server virtual path
506
594
  "/@mp4-wasm",
507
- // Common bundled asset paths (Vite uses hashed names)
508
595
  "/assets/mp4-wasm.wasm",
509
596
  "/assets/mp4-YBRi_559.wasm",
510
- // Known Vite hash
511
597
  "/mp4-wasm.wasm",
512
- // Node modules path (for dev)
513
598
  "/node_modules/mp4-wasm/dist/mp4-wasm.wasm"
514
599
  ];
515
600
  let buffer = null;
516
- let successPath = "";
517
601
  for (const path of possiblePaths) {
518
602
  try {
519
603
  const resp = await fetch(path);
520
604
  if (resp.ok) {
521
605
  const contentType = resp.headers.get("content-type");
522
- if (contentType && contentType.includes("html")) {
523
- continue;
524
- }
606
+ if (contentType && contentType.includes("html")) continue;
525
607
  buffer = await resp.arrayBuffer();
526
- successPath = path;
527
608
  break;
528
609
  }
529
- } catch (e) {
610
+ } catch {
530
611
  continue;
531
612
  }
532
613
  }
533
614
  if (!buffer) {
615
+ console.error("[BrowserRender] Exporter start: no WASM buffer from any path");
534
616
  throw new Error(
535
617
  "Could not load WASM file from any location. Please copy mp4-wasm.wasm to your public directory or configure Vite to serve it."
536
618
  );
537
619
  }
538
620
  const mp4 = await loadMp4Module({ wasmBinary: buffer });
539
- this.encoder = mp4.createWebCodecsEncoder({
540
- width: this.settings.size.x,
541
- height: this.settings.size.y,
542
- fps: this.fps
543
- });
621
+ this.encoder = mp4.createWebCodecsEncoder({ width: w, height: h, fps });
622
+ if (isWindows()) {
623
+ this.copyCanvas = document.createElement("canvas");
624
+ this.copyCanvas.width = w;
625
+ this.copyCanvas.height = h;
626
+ }
544
627
  } catch (error) {
545
628
  throw error;
546
629
  }
@@ -548,57 +631,92 @@ var BrowserWasmExporter = class _BrowserWasmExporter {
548
631
  async handleFrame(canvas, frameNumber) {
549
632
  const frameIndex = frameNumber !== void 0 ? frameNumber : this.currentFrame;
550
633
  const timestampMicroseconds = Math.round(frameIndex / this.fps * 1e6);
551
- const frame = new VideoFrame(canvas, {
552
- timestamp: timestampMicroseconds,
553
- duration: Math.round(1 / this.fps * 1e6)
554
- });
555
- await this.encoder.addFrame(frame);
556
- frame.close();
634
+ const durationMicroseconds = Math.round(1 / this.fps * 1e6);
635
+ let sourceCanvas = canvas;
636
+ if (this.copyCanvas) {
637
+ if (this.copyCanvas.width !== canvas.width || this.copyCanvas.height !== canvas.height) {
638
+ this.copyCanvas.width = canvas.width;
639
+ this.copyCanvas.height = canvas.height;
640
+ }
641
+ const ctx = this.copyCanvas.getContext("2d");
642
+ if (ctx) {
643
+ ctx.imageSmoothingEnabled = false;
644
+ ctx.drawImage(canvas, 0, 0);
645
+ sourceCanvas = this.copyCanvas;
646
+ }
647
+ }
648
+ if (this.useNativeEncoder && this.nativeVideoEncoder) {
649
+ const bitmap = await createImageBitmap(sourceCanvas);
650
+ const frame = new VideoFrame(bitmap, {
651
+ timestamp: timestampMicroseconds,
652
+ duration: durationMicroseconds
653
+ });
654
+ this.nativeVideoEncoder.encode(frame, { keyFrame: frameIndex === 0 });
655
+ frame.close();
656
+ bitmap.close();
657
+ } else {
658
+ let frame;
659
+ if (isWindows() && typeof createImageBitmap === "function") {
660
+ const bitmap = await createImageBitmap(sourceCanvas);
661
+ frame = new VideoFrame(bitmap, {
662
+ timestamp: timestampMicroseconds,
663
+ duration: durationMicroseconds
664
+ });
665
+ await this.encoder.addFrame(frame);
666
+ frame.close();
667
+ bitmap.close();
668
+ } else {
669
+ frame = new VideoFrame(sourceCanvas, {
670
+ timestamp: timestampMicroseconds,
671
+ duration: durationMicroseconds
672
+ });
673
+ await this.encoder.addFrame(frame);
674
+ frame.close();
675
+ }
676
+ }
557
677
  if (frameNumber === void 0) {
558
678
  this.currentFrame++;
559
679
  }
560
680
  }
561
681
  async stop() {
682
+ if (this.useNativeEncoder && this.nativeVideoEncoder && this.nativeOutput && this.nativePacketSource) {
683
+ await this.nativeVideoEncoder.flush();
684
+ this.nativeVideoEncoder.close();
685
+ this.nativeVideoEncoder = null;
686
+ await this.nativeAddPromise;
687
+ this.nativePacketSource.close();
688
+ await this.nativeOutput.finalize();
689
+ const buf2 = this.nativeOutput.target.buffer;
690
+ this.nativeOutput = null;
691
+ this.nativePacketSource = null;
692
+ this.videoBlob = new Blob([buf2], { type: "video/mp4" });
693
+ return;
694
+ }
562
695
  const buf = await this.encoder.end();
563
- this.videoBlob = new Blob([buf], { type: "video/mp4" });
696
+ const copy = buf instanceof ArrayBuffer ? buf.slice(0) : new Uint8Array(buf).slice().buffer;
697
+ this.videoBlob = new Blob([copy], { type: "video/mp4" });
564
698
  }
565
- async generateAudio(assets, startFrame, endFrame) {
699
+ async generateAudio(assets, startFrame, endFrame, onProgress) {
566
700
  try {
567
- console.log(`Generating audio from ${assets.length} frames`);
568
701
  const processor = new BrowserAudioProcessor();
569
702
  const assetPlacements = getAssetPlacement(assets);
570
703
  if (assetPlacements.length === 0) {
571
- console.log("No asset placements found");
572
704
  return null;
573
705
  }
574
- console.log(`Processing ${assetPlacements.length} asset placements`);
706
+ const processableCount = assetPlacements.filter((a) => a.volume > 0 && a.playbackRate > 0).length;
707
+ let processedCount = 0;
575
708
  const processedBuffers = [];
576
709
  for (let i = 0; i < assetPlacements.length; i++) {
577
710
  const asset = assetPlacements[i];
578
- console.log(`[${i + 1}/${assetPlacements.length}] Processing asset: ${asset.src} (type: ${asset.type}, volume: ${asset.volume}, playbackRate: ${asset.playbackRate})`);
579
711
  if (asset.volume > 0 && asset.playbackRate > 0) {
580
- const startTime = Date.now();
581
712
  try {
582
713
  if (asset.type === "video") {
583
- console.log(` \u2192 Checking if asset has audio: ${asset.src.substring(0, 50)}...`);
584
714
  try {
585
715
  const assetHasAudio = await (0, import_media_utils.hasAudio)(asset.src);
586
- if (!assetHasAudio) {
587
- console.log(` \u23ED Skipping asset (no audio detected): ${asset.src.substring(0, 50)}...`);
588
- continue;
589
- }
590
- console.log(` \u2713 Asset has audio, proceeding: ${asset.src.substring(0, 50)}...`);
591
- } catch (audioCheckError) {
592
- const errorMsg = audioCheckError instanceof Error ? audioCheckError.message : String(audioCheckError);
593
- const errorStack = audioCheckError instanceof Error ? audioCheckError.stack : void 0;
594
- console.warn(` \u26A0 Audio check failed, proceeding anyway: ${asset.src.substring(0, 50)}...`);
595
- console.warn(` Error: ${errorMsg}`);
596
- if (errorStack) {
597
- console.warn(` Stack: ${errorStack}`);
598
- }
716
+ if (!assetHasAudio) continue;
717
+ } catch {
599
718
  }
600
719
  }
601
- console.log(` \u2192 Starting processAudioAsset for: ${asset.src}`);
602
720
  const processPromise = processor.processAudioAsset(
603
721
  asset,
604
722
  this.settings.fps || 30,
@@ -610,53 +728,30 @@ var BrowserWasmExporter = class _BrowserWasmExporter {
610
728
  }, 2e4);
611
729
  });
612
730
  const buffer = await Promise.race([processPromise, timeoutPromise]);
613
- const duration = Date.now() - startTime;
614
- console.log(` \u2713 Successfully processed audio asset in ${duration}ms: ${asset.src.substring(0, 50)}...`);
615
731
  processedBuffers.push(buffer);
616
- } catch (error) {
617
- const errorMsg = error instanceof Error ? error.message : String(error);
618
- const errorStack = error instanceof Error ? error.stack : void 0;
619
- const duration = Date.now() - startTime;
620
- console.warn(` \u2717 Failed to process audio asset after ${duration}ms: ${asset.src.substring(0, 50)}...`);
621
- console.warn(` Error: ${errorMsg}`);
622
- if (errorStack) {
623
- console.warn(` Stack: ${errorStack}`);
732
+ processedCount++;
733
+ if (onProgress && processableCount > 0) {
734
+ onProgress(processedCount / processableCount);
624
735
  }
625
- console.warn(` Asset details: type=${asset.type}, volume=${asset.volume}, playbackRate=${asset.playbackRate}, startFrame=${asset.startInVideo}, endFrame=${asset.endInVideo}`);
736
+ } catch {
626
737
  }
627
- } else {
628
- console.log(` \u23ED Skipping asset: volume=${asset.volume}, playbackRate=${asset.playbackRate}`);
629
738
  }
630
739
  }
631
740
  if (processedBuffers.length === 0) {
632
- console.warn("No audio buffers were successfully processed");
633
- console.warn(` Total assets attempted: ${assetPlacements.length}`);
634
- console.warn(` Assets with volume>0 and playbackRate>0: ${assetPlacements.filter((a) => a.volume > 0 && a.playbackRate > 0).length}`);
635
741
  return null;
636
742
  }
637
- console.log(`Mixing ${processedBuffers.length} audio buffers`);
638
- const mixStartTime = Date.now();
743
+ if (onProgress) onProgress(0.85);
639
744
  const mixedBuffer = processor.mixAudioBuffers(processedBuffers);
640
- const mixDuration = Date.now() - mixStartTime;
641
- console.log(`Audio mixing completed in ${mixDuration}ms`);
642
- const wavStartTime = Date.now();
745
+ if (onProgress) onProgress(0.92);
643
746
  const wavData = processor.audioBufferToWav(mixedBuffer);
644
- const wavDuration = Date.now() - wavStartTime;
645
- console.log(`WAV conversion completed in ${wavDuration}ms`);
646
- console.log(`Audio generation complete: ${wavData.byteLength} bytes (${(wavData.byteLength / 1024 / 1024).toFixed(2)} MB)`);
747
+ if (onProgress) onProgress(1);
647
748
  await processor.close();
648
749
  return wavData;
649
750
  } catch (error) {
650
751
  const errorMsg = error instanceof Error ? error.message : String(error);
651
752
  const errorStack = error instanceof Error ? error.stack : void 0;
652
- console.error("Audio generation error:", errorMsg);
653
- if (errorStack) {
654
- console.error("Error stack:", errorStack);
655
- }
656
- console.error("Error details:", {
657
- errorType: error instanceof Error ? error.constructor.name : typeof error,
658
- errorMessage: errorMsg
659
- });
753
+ console.error("[BrowserRender] Audio generation error:", errorMsg);
754
+ if (errorStack) console.error("[BrowserRender] Stack:", errorStack);
660
755
  return null;
661
756
  }
662
757
  }
@@ -696,6 +791,7 @@ var renderTwickVideoInBrowser = async (config) => {
696
791
  try {
697
792
  const { projectFile, variables, settings = {} } = config;
698
793
  if (!variables || !variables.input) {
794
+ console.error("[BrowserRender] renderTwickVideoInBrowser: missing variables.input");
699
795
  throw new Error('Invalid configuration. "variables.input" is required.');
700
796
  }
701
797
  const width = settings.width || variables.input.properties?.width || 1920;
@@ -730,22 +826,27 @@ var renderTwickVideoInBrowser = async (config) => {
730
826
  renderer.playback.state = 1;
731
827
  const totalFrames = await renderer.getNumberOfFrames(renderSettings);
732
828
  if (totalFrames === 0 || !isFinite(totalFrames)) {
829
+ console.error("[BrowserRender] renderTwickVideoInBrowser: invalid totalFrames", totalFrames);
733
830
  throw new Error(
734
831
  "Cannot render: Video has zero duration. Please ensure your project has valid content with non-zero duration. Check that all video elements have valid sources and are properly loaded."
735
832
  );
736
833
  }
737
834
  const videoElements = [];
835
+ const audioElements = [];
738
836
  if (variables.input.tracks) {
739
837
  variables.input.tracks.forEach((track) => {
740
838
  if (track.elements) {
741
839
  track.elements.forEach((el) => {
742
840
  if (el.type === "video") videoElements.push(el);
841
+ if (el.type === "audio") audioElements.push(el);
743
842
  });
744
843
  }
745
844
  });
746
845
  }
747
846
  let hasAnyAudio = false;
748
- console.log(`Found ${videoElements.length} video element(s) to check for audio`);
847
+ if (settings.includeAudio && audioElements.length > 0) {
848
+ hasAnyAudio = true;
849
+ }
749
850
  if (videoElements.length > 0) {
750
851
  for (const videoEl of videoElements) {
751
852
  const src = videoEl.props?.src;
@@ -771,19 +872,10 @@ var renderTwickVideoInBrowser = async (config) => {
771
872
  });
772
873
  if (settings.includeAudio) {
773
874
  try {
774
- console.log(`Checking if video has audio: ${src.substring(0, 50)}...`);
775
875
  const videoHasAudio = await (0, import_media_utils.hasAudio)(src);
776
- console.log(`Audio check result for ${src.substring(0, 50)}...: ${videoHasAudio ? "HAS AUDIO" : "NO AUDIO"}`);
777
- if (videoHasAudio) {
778
- hasAnyAudio = true;
779
- console.log(`\u2713 Video has audio: ${src.substring(0, 50)}...`);
780
- } else {
781
- console.log(`\u2717 Video has no audio: ${src.substring(0, 50)}...`);
782
- }
783
- } catch (error) {
784
- console.warn(`Failed to check audio for ${src.substring(0, 50)}...:`, error);
876
+ if (videoHasAudio) hasAnyAudio = true;
877
+ } catch {
785
878
  hasAnyAudio = true;
786
- console.log(`\u26A0 Assuming video might have audio due to check error`);
787
879
  }
788
880
  }
789
881
  }
@@ -804,58 +896,66 @@ var renderTwickVideoInBrowser = async (config) => {
804
896
  mediaAssets.push(currentAssets);
805
897
  const canvas = renderer.stage.finalBuffer;
806
898
  await exporter.handleFrame(canvas, frame);
807
- if (settings.onProgress) settings.onProgress(frame / totalFrames);
899
+ if (settings.onProgress) settings.onProgress(frame / totalFrames * 0.9);
808
900
  }
809
901
  await exporter.stop();
902
+ if (audioElements.length > 0 && settings.includeAudio) {
903
+ for (let frame = 0; frame < mediaAssets.length; frame++) {
904
+ const timeInSec = frame / fps;
905
+ for (const el of audioElements) {
906
+ const s = typeof el.s === "number" ? el.s : 0;
907
+ const e = typeof el.e === "number" ? el.e : Number.MAX_VALUE;
908
+ if (timeInSec >= s && timeInSec < e && el.props?.src) {
909
+ const playbackRate = el.props.playbackRate ?? 1;
910
+ const volume = el.props.volume ?? 1;
911
+ const trimStart = el.props.time ?? 0;
912
+ const currentTime = (timeInSec - s) * playbackRate + trimStart;
913
+ mediaAssets[frame].push({
914
+ key: el.id,
915
+ src: el.props.src,
916
+ type: "audio",
917
+ currentTime,
918
+ playbackRate,
919
+ volume
920
+ });
921
+ }
922
+ }
923
+ }
924
+ }
810
925
  let audioData = null;
811
- console.log(`Audio detection summary: hasAnyAudio=${hasAnyAudio}, includeAudio=${settings.includeAudio}, mediaAssets=${mediaAssets.length}`);
812
926
  if (settings.includeAudio && mediaAssets.length > 0 && hasAnyAudio) {
813
- console.log("Starting audio processing (audio detected in videos)");
814
- if (settings.onProgress) {
815
- settings.onProgress(0.98);
816
- }
927
+ const reportAudioProgress = (p) => {
928
+ if (settings.onProgress) settings.onProgress(0.9 + p * 0.07);
929
+ };
817
930
  try {
818
- console.log("Calling generateAudio...");
819
- audioData = await exporter.generateAudio(mediaAssets, 0, totalFrames);
820
- console.log("generateAudio completed");
821
- if (audioData) {
822
- console.log(`\u2713 Audio generation successful: ${audioData.byteLength} bytes`);
823
- } else {
824
- console.log("\u2717 No audio data generated");
825
- }
826
- if (settings.onProgress) {
827
- settings.onProgress(0.99);
828
- }
931
+ audioData = await exporter.generateAudio(mediaAssets, 0, totalFrames, reportAudioProgress);
932
+ if (settings.onProgress) settings.onProgress(0.97);
829
933
  } catch (audioError) {
830
934
  const errorMsg = audioError instanceof Error ? audioError.message : String(audioError);
831
935
  const errorStack = audioError instanceof Error ? audioError.stack : void 0;
832
- console.error("\u2717 Audio generation failed, continuing without audio");
833
- console.error(` Error: ${errorMsg}`);
834
- if (errorStack) {
835
- console.error(` Stack: ${errorStack}`);
836
- }
837
- console.error(" Context:", {
838
- hasAnyAudio,
839
- includeAudio: settings.includeAudio,
840
- mediaAssetsCount: mediaAssets.length,
841
- totalFrames
842
- });
936
+ console.error("[BrowserRender] Audio generation failed:", errorMsg);
937
+ if (errorStack) console.error("[BrowserRender] Stack:", errorStack);
843
938
  audioData = null;
844
939
  }
845
- } else if (settings.includeAudio && mediaAssets.length > 0 && !hasAnyAudio) {
846
- console.log("\u23ED Skipping audio processing: no audio detected in videos");
847
- } else {
848
- console.log(`\u23ED Skipping audio processing: includeAudio=${settings.includeAudio}, mediaAssets=${mediaAssets.length}, hasAnyAudio=${hasAnyAudio}`);
849
940
  }
850
941
  let finalBlob = exporter.getVideoBlob();
851
942
  if (!finalBlob) {
943
+ console.error("[BrowserRender] renderTwickVideoInBrowser: getVideoBlob returned null");
852
944
  throw new Error("Failed to create video blob");
853
945
  }
854
946
  if (finalBlob.size === 0) {
947
+ console.error("[BrowserRender] renderTwickVideoInBrowser: video blob size is 0");
855
948
  throw new Error("Video blob is empty. Rendering may have failed.");
856
949
  }
950
+ const MIN_VIDEO_BLOB_BYTES = 1024;
951
+ if (finalBlob.size < MIN_VIDEO_BLOB_BYTES) {
952
+ console.error("[BrowserRender] renderTwickVideoInBrowser: video blob too small", finalBlob.size, "bytes (expected >=", MIN_VIDEO_BLOB_BYTES, ")");
953
+ throw new Error(
954
+ `Video blob is too small (${finalBlob.size} bytes). No video frames were encoded. This often happens on Windows when the encoder does not accept the frame format from canvas. Try using a different browser or updating graphics drivers.`
955
+ );
956
+ }
857
957
  if (audioData && settings.includeAudio) {
858
- console.log(`Attempting to mux audio (${audioData.byteLength} bytes) with video (${finalBlob.size} bytes)`);
958
+ if (settings.onProgress) settings.onProgress(0.98);
859
959
  try {
860
960
  const muxedBlob = await muxAudioVideo({
861
961
  videoBlob: finalBlob,
@@ -864,24 +964,12 @@ var renderTwickVideoInBrowser = async (config) => {
864
964
  if (!muxedBlob || muxedBlob.size === 0) {
865
965
  throw new Error("Muxed video blob is empty");
866
966
  }
867
- if (muxedBlob.size === finalBlob.size) {
868
- console.warn("Muxed blob size unchanged - muxing may have failed silently");
869
- } else {
870
- console.log(`Muxing successful: ${finalBlob.size} bytes -> ${muxedBlob.size} bytes`);
871
- }
872
967
  finalBlob = muxedBlob;
873
968
  } catch (muxError) {
874
969
  const errorMsg = muxError instanceof Error ? muxError.message : String(muxError);
875
970
  const errorStack = muxError instanceof Error ? muxError.stack : void 0;
876
- console.error("Audio muxing failed");
877
- console.error(` Error: ${errorMsg}`);
878
- if (errorStack) {
879
- console.error(` Stack: ${errorStack}`);
880
- }
881
- console.error(" Context:", {
882
- videoBlobSize: finalBlob.size,
883
- audioDataSize: audioData?.byteLength || 0
884
- });
971
+ console.error("[BrowserRender] Audio muxing failed:", errorMsg);
972
+ if (errorStack) console.error("[BrowserRender] Stack:", errorStack);
885
973
  if (settings.downloadAudioSeparately && audioData) {
886
974
  const audioBlob = new Blob([audioData], { type: "audio/wav" });
887
975
  const audioUrl = URL.createObjectURL(audioBlob);
@@ -891,16 +979,14 @@ var renderTwickVideoInBrowser = async (config) => {
891
979
  a.click();
892
980
  URL.revokeObjectURL(audioUrl);
893
981
  }
894
- console.warn("Continuing with video without audio due to muxing failure");
895
982
  finalBlob = exporter.getVideoBlob();
896
983
  if (!finalBlob || finalBlob.size === 0) {
897
984
  throw new Error("Video blob is invalid after muxing failure");
898
985
  }
899
986
  }
900
- } else if (settings.includeAudio && !audioData) {
901
- console.warn("Audio processing was enabled but no audio data was generated");
902
987
  }
903
988
  if (!finalBlob || finalBlob.size === 0) {
989
+ console.error("[BrowserRender] renderTwickVideoInBrowser: final blob invalid", finalBlob?.size);
904
990
  throw new Error("Final video blob is empty or invalid");
905
991
  }
906
992
  if (settings.onProgress) {
@@ -930,9 +1016,112 @@ var downloadVideoBlob = (videoBlob, filename = "video.mp4") => {
930
1016
  document.body.appendChild(a);
931
1017
  a.click();
932
1018
  document.body.removeChild(a);
933
- setTimeout(() => URL.revokeObjectURL(url), 1e3);
1019
+ const revokeMs = isWindows() ? 5e3 : 1e3;
1020
+ setTimeout(() => URL.revokeObjectURL(url), revokeMs);
934
1021
  };
935
1022
 
1023
+ // src/audio/video-normalizer.ts
1024
+ function getFFmpegBaseURL2() {
1025
+ if (typeof window !== "undefined") {
1026
+ return `${window.location.origin}/ffmpeg`;
1027
+ }
1028
+ return "/ffmpeg";
1029
+ }
1030
+ async function normalizeVideoBlob(input, options = {}) {
1031
+ const startTime = Date.now();
1032
+ const targetWidth = options.width ?? 720;
1033
+ const targetFps = options.fps ?? 30;
1034
+ try {
1035
+ console.log("[VideoNormalizer] Starting normalization...");
1036
+ console.log(`[VideoNormalizer] Input size: ${input.size} bytes (${(input.size / 1024 / 1024).toFixed(2)} MB)`);
1037
+ const { FFmpeg } = await import("@ffmpeg/ffmpeg");
1038
+ const { fetchFile } = await import("@ffmpeg/util");
1039
+ const ffmpeg = new FFmpeg();
1040
+ const base = getFFmpegBaseURL2();
1041
+ const coreURL = `${base}/ffmpeg-core.js`;
1042
+ const wasmURL = `${base}/ffmpeg-core.wasm`;
1043
+ console.log(`[VideoNormalizer] Loading FFmpeg from ${base}`);
1044
+ const loadStart = Date.now();
1045
+ await ffmpeg.load({ coreURL, wasmURL });
1046
+ const loadMs = Date.now() - loadStart;
1047
+ console.log(`[VideoNormalizer] FFmpeg loaded in ${loadMs}ms`);
1048
+ console.log("[VideoNormalizer] Writing input file...");
1049
+ const writeStart = Date.now();
1050
+ await ffmpeg.writeFile("in.mp4", await fetchFile(input));
1051
+ const writeMs = Date.now() - writeStart;
1052
+ console.log(`[VideoNormalizer] Input file written in ${writeMs}ms`);
1053
+ console.log("[VideoNormalizer] Executing normalization command...");
1054
+ const execStart = Date.now();
1055
+ ffmpeg.on("log", ({ message }) => {
1056
+ console.log(`[VideoNormalizer:FFmpeg] ${message}`);
1057
+ });
1058
+ await ffmpeg.exec([
1059
+ "-i",
1060
+ "in.mp4",
1061
+ // Normalize geometry & frame rate
1062
+ "-vf",
1063
+ `scale=${targetWidth}:-2,fps=${targetFps},format=yuv420p`,
1064
+ // Standard H.264 video
1065
+ "-c:v",
1066
+ "libx264",
1067
+ "-preset",
1068
+ "veryfast",
1069
+ "-crf",
1070
+ "20",
1071
+ "-pix_fmt",
1072
+ "yuv420p",
1073
+ "-profile:v",
1074
+ "main",
1075
+ "-r",
1076
+ String(targetFps),
1077
+ // AAC audio, stereo, 48kHz
1078
+ "-c:a",
1079
+ "aac",
1080
+ "-b:a",
1081
+ "128k",
1082
+ "-ar",
1083
+ "48000",
1084
+ "-ac",
1085
+ "2",
1086
+ // Web-friendly MP4
1087
+ "-movflags",
1088
+ "+faststart",
1089
+ "out.mp4"
1090
+ ]);
1091
+ const execMs = Date.now() - execStart;
1092
+ console.log(`[VideoNormalizer] Normalization completed in ${execMs}ms`);
1093
+ const readStart = Date.now();
1094
+ const data = await ffmpeg.readFile("out.mp4");
1095
+ const readMs = Date.now() - readStart;
1096
+ console.log(`[VideoNormalizer] Output file read in ${readMs}ms`);
1097
+ const uint8 = typeof data === "string" ? new TextEncoder().encode(data) : new Uint8Array(data);
1098
+ const blob = new Blob([uint8], { type: "video/mp4" });
1099
+ const totalMs = Date.now() - startTime;
1100
+ console.log(`[VideoNormalizer] Normalization successful: ${blob.size} bytes (${(blob.size / 1024 / 1024).toFixed(2)} MB) in ${totalMs}ms`);
1101
+ return {
1102
+ blob,
1103
+ size: blob.size,
1104
+ debug: {
1105
+ loadMs,
1106
+ writeMs,
1107
+ execMs,
1108
+ readMs,
1109
+ totalMs
1110
+ }
1111
+ };
1112
+ } catch (error) {
1113
+ const totalMs = Date.now() - startTime;
1114
+ const msg = error instanceof Error ? error.message : String(error);
1115
+ const stack = error instanceof Error ? error.stack : void 0;
1116
+ console.error("[VideoNormalizer] Normalization failed:", msg);
1117
+ if (stack) {
1118
+ console.error("[VideoNormalizer] Stack:", stack);
1119
+ }
1120
+ console.error("[VideoNormalizer] Duration:", `${totalMs}ms`);
1121
+ throw error;
1122
+ }
1123
+ }
1124
+
936
1125
  // src/hooks/use-browser-renderer.ts
937
1126
  var import_react = require("react");
938
1127
  var useBrowserRenderer = (options = {}) => {
@@ -979,6 +1168,7 @@ var useBrowserRenderer = (options = {}) => {
979
1168
  setProgress(p);
980
1169
  },
981
1170
  onComplete: (blob2) => {
1171
+ console.log("[BrowserRender] useBrowserRenderer: onComplete received blob", blob2 ? `size=${blob2.size} type=${blob2.type}` : "null");
982
1172
  setVideoBlob(blob2);
983
1173
  if (autoDownload) {
984
1174
  try {
@@ -989,6 +1179,7 @@ var useBrowserRenderer = (options = {}) => {
989
1179
  }
990
1180
  },
991
1181
  onError: (err) => {
1182
+ console.error("[BrowserRender] useBrowserRenderer: onError", err?.message);
992
1183
  setError(err);
993
1184
  }
994
1185
  }
@@ -1000,6 +1191,8 @@ var useBrowserRenderer = (options = {}) => {
1000
1191
  setProgress(1);
1001
1192
  return blob;
1002
1193
  } catch (err) {
1194
+ const errorMsg = err instanceof Error ? err.message : String(err);
1195
+ console.error("[BrowserRender] useBrowserRenderer: render failed", errorMsg);
1003
1196
  setError(err instanceof Error ? err : new Error(String(err)));
1004
1197
  return null;
1005
1198
  } finally {
@@ -1019,6 +1212,7 @@ var useBrowserRenderer = (options = {}) => {
1019
1212
  // Annotate the CommonJS export names for ESM import in node:
1020
1213
  0 && (module.exports = {
1021
1214
  downloadVideoBlob,
1215
+ normalizeVideoBlob,
1022
1216
  renderTwickVideoInBrowser,
1023
1217
  useBrowserRenderer
1024
1218
  });