@twick/browser-render 0.15.9 → 0.15.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -396,19 +396,36 @@ async function muxAudioVideo(options) {
396
396
  const ffmpegLogs = [];
397
397
  ffmpeg.on("log", ({ message }) => {
398
398
  ffmpegLogs.push(message);
399
- console.log(` [FFmpeg] ${message}`);
400
399
  });
401
400
  await ffmpeg.exec([
401
+ // Inputs
402
402
  "-i",
403
403
  "video.mp4",
404
404
  "-i",
405
405
  "audio.wav",
406
+ // Explicit stream mapping
407
+ "-map",
408
+ "0:v:0",
409
+ "-map",
410
+ "1:a:0",
411
+ // Re-encode video to a very standard H.264 stream.
412
+ // Copying the WebCodecs/mp4-wasm bitstream can sometimes
413
+ // lead to timing issues where only the first second renders.
406
414
  "-c:v",
407
- "copy",
415
+ "libx264",
416
+ "-preset",
417
+ "veryfast",
418
+ "-crf",
419
+ "20",
420
+ // AAC audio
408
421
  "-c:a",
409
422
  "aac",
410
423
  "-b:a",
411
424
  "192k",
425
+ // Make MP4 more web‑friendly
426
+ "-movflags",
427
+ "+faststart",
428
+ // Stop at the shortest of the two streams
412
429
  "-shortest",
413
430
  "output.mp4"
414
431
  ]);
@@ -444,6 +461,12 @@ async function muxAudioVideo(options) {
444
461
  }
445
462
 
446
463
  // src/browser-renderer.ts
464
+ function isWindows() {
465
+ if (typeof navigator === "undefined") return false;
466
+ const ua = navigator.userAgent;
467
+ const plat = navigator.platform ?? "";
468
+ return /Win(dows|32|64|CE)/i.test(ua) || /Win/i.test(plat);
469
+ }
447
470
  var BrowserWasmExporter = class _BrowserWasmExporter {
448
471
  constructor(settings) {
449
472
  this.settings = settings;
@@ -456,52 +479,111 @@ var BrowserWasmExporter = class _BrowserWasmExporter {
456
479
  onProgressCallback;
457
480
  currentFrame = 0;
458
481
  fps = 30;
482
+ /** On Windows, copy each frame to this canvas before creating VideoFrame to avoid invalid encoder output. */
483
+ copyCanvas = null;
484
+ /** On Windows use native VideoEncoder + Mediabunny with prefer-software so frames are actually encoded. */
485
+ useNativeEncoder = false;
486
+ nativeVideoEncoder = null;
487
+ nativeOutput = null;
488
+ nativePacketSource = null;
489
+ nativeAddPromise = Promise.resolve();
490
+ nativeFirstChunk = true;
459
491
  static async create(settings) {
460
492
  return new _BrowserWasmExporter(settings);
461
493
  }
462
494
  async start() {
495
+ const w = this.settings.size.x;
496
+ const h = this.settings.size.y;
497
+ const fps = this.fps;
498
+ if (isWindows()) {
499
+ try {
500
+ const { Output, BufferTarget, Mp4OutputFormat, EncodedVideoPacketSource, EncodedPacket } = await import("mediabunny");
501
+ const output = new Output({
502
+ format: new Mp4OutputFormat(),
503
+ target: new BufferTarget()
504
+ });
505
+ const packetSource = new EncodedVideoPacketSource("avc");
506
+ output.addVideoTrack(packetSource);
507
+ await output.start();
508
+ this.nativeOutput = output;
509
+ this.nativePacketSource = packetSource;
510
+ this.nativeAddPromise = Promise.resolve();
511
+ this.nativeFirstChunk = true;
512
+ const videoEncoder = new VideoEncoder({
513
+ output: (chunk, meta) => {
514
+ const packet = EncodedPacket.fromEncodedChunk(chunk);
515
+ const isFirst = this.nativeFirstChunk;
516
+ const metaArg = isFirst ? meta : void 0;
517
+ this.nativeFirstChunk = false;
518
+ this.nativeAddPromise = this.nativeAddPromise.then(
519
+ () => this.nativePacketSource.add(packet, metaArg)
520
+ );
521
+ },
522
+ error: (e) => console.error("[BrowserRender] VideoEncoder error:", e)
523
+ });
524
+ const bitrate = Math.max(5e5, w * h * fps * 0.1 | 0);
525
+ const config = {
526
+ codec: "avc1.42001f",
527
+ width: w,
528
+ height: h,
529
+ bitrate,
530
+ framerate: fps,
531
+ hardwareAcceleration: "prefer-software"
532
+ };
533
+ const support = await VideoEncoder.isConfigSupported(config);
534
+ if (!support.supported) {
535
+ delete config.hardwareAcceleration;
536
+ }
537
+ videoEncoder.configure(config);
538
+ this.nativeVideoEncoder = videoEncoder;
539
+ this.useNativeEncoder = true;
540
+ this.copyCanvas = document.createElement("canvas");
541
+ this.copyCanvas.width = w;
542
+ this.copyCanvas.height = h;
543
+ return;
544
+ } catch {
545
+ this.useNativeEncoder = false;
546
+ this.nativeVideoEncoder = null;
547
+ this.nativeOutput = null;
548
+ this.nativePacketSource = null;
549
+ }
550
+ }
463
551
  try {
464
552
  const loadMp4Module = (await import("mp4-wasm")).default;
465
553
  const possiblePaths = [
466
- // Vite dev server virtual path
467
554
  "/@mp4-wasm",
468
- // Common bundled asset paths (Vite uses hashed names)
469
555
  "/assets/mp4-wasm.wasm",
470
556
  "/assets/mp4-YBRi_559.wasm",
471
- // Known Vite hash
472
557
  "/mp4-wasm.wasm",
473
- // Node modules path (for dev)
474
558
  "/node_modules/mp4-wasm/dist/mp4-wasm.wasm"
475
559
  ];
476
560
  let buffer = null;
477
- let successPath = "";
478
561
  for (const path of possiblePaths) {
479
562
  try {
480
563
  const resp = await fetch(path);
481
564
  if (resp.ok) {
482
565
  const contentType = resp.headers.get("content-type");
483
- if (contentType && contentType.includes("html")) {
484
- continue;
485
- }
566
+ if (contentType && contentType.includes("html")) continue;
486
567
  buffer = await resp.arrayBuffer();
487
- successPath = path;
488
568
  break;
489
569
  }
490
- } catch (e) {
570
+ } catch {
491
571
  continue;
492
572
  }
493
573
  }
494
574
  if (!buffer) {
575
+ console.error("[BrowserRender] Exporter start: no WASM buffer from any path");
495
576
  throw new Error(
496
577
  "Could not load WASM file from any location. Please copy mp4-wasm.wasm to your public directory or configure Vite to serve it."
497
578
  );
498
579
  }
499
580
  const mp4 = await loadMp4Module({ wasmBinary: buffer });
500
- this.encoder = mp4.createWebCodecsEncoder({
501
- width: this.settings.size.x,
502
- height: this.settings.size.y,
503
- fps: this.fps
504
- });
581
+ this.encoder = mp4.createWebCodecsEncoder({ width: w, height: h, fps });
582
+ if (isWindows()) {
583
+ this.copyCanvas = document.createElement("canvas");
584
+ this.copyCanvas.width = w;
585
+ this.copyCanvas.height = h;
586
+ }
505
587
  } catch (error) {
506
588
  throw error;
507
589
  }
@@ -509,57 +591,92 @@ var BrowserWasmExporter = class _BrowserWasmExporter {
509
591
  async handleFrame(canvas, frameNumber) {
510
592
  const frameIndex = frameNumber !== void 0 ? frameNumber : this.currentFrame;
511
593
  const timestampMicroseconds = Math.round(frameIndex / this.fps * 1e6);
512
- const frame = new VideoFrame(canvas, {
513
- timestamp: timestampMicroseconds,
514
- duration: Math.round(1 / this.fps * 1e6)
515
- });
516
- await this.encoder.addFrame(frame);
517
- frame.close();
594
+ const durationMicroseconds = Math.round(1 / this.fps * 1e6);
595
+ let sourceCanvas = canvas;
596
+ if (this.copyCanvas) {
597
+ if (this.copyCanvas.width !== canvas.width || this.copyCanvas.height !== canvas.height) {
598
+ this.copyCanvas.width = canvas.width;
599
+ this.copyCanvas.height = canvas.height;
600
+ }
601
+ const ctx = this.copyCanvas.getContext("2d");
602
+ if (ctx) {
603
+ ctx.imageSmoothingEnabled = false;
604
+ ctx.drawImage(canvas, 0, 0);
605
+ sourceCanvas = this.copyCanvas;
606
+ }
607
+ }
608
+ if (this.useNativeEncoder && this.nativeVideoEncoder) {
609
+ const bitmap = await createImageBitmap(sourceCanvas);
610
+ const frame = new VideoFrame(bitmap, {
611
+ timestamp: timestampMicroseconds,
612
+ duration: durationMicroseconds
613
+ });
614
+ this.nativeVideoEncoder.encode(frame, { keyFrame: frameIndex === 0 });
615
+ frame.close();
616
+ bitmap.close();
617
+ } else {
618
+ let frame;
619
+ if (isWindows() && typeof createImageBitmap === "function") {
620
+ const bitmap = await createImageBitmap(sourceCanvas);
621
+ frame = new VideoFrame(bitmap, {
622
+ timestamp: timestampMicroseconds,
623
+ duration: durationMicroseconds
624
+ });
625
+ await this.encoder.addFrame(frame);
626
+ frame.close();
627
+ bitmap.close();
628
+ } else {
629
+ frame = new VideoFrame(sourceCanvas, {
630
+ timestamp: timestampMicroseconds,
631
+ duration: durationMicroseconds
632
+ });
633
+ await this.encoder.addFrame(frame);
634
+ frame.close();
635
+ }
636
+ }
518
637
  if (frameNumber === void 0) {
519
638
  this.currentFrame++;
520
639
  }
521
640
  }
522
641
  async stop() {
642
+ if (this.useNativeEncoder && this.nativeVideoEncoder && this.nativeOutput && this.nativePacketSource) {
643
+ await this.nativeVideoEncoder.flush();
644
+ this.nativeVideoEncoder.close();
645
+ this.nativeVideoEncoder = null;
646
+ await this.nativeAddPromise;
647
+ this.nativePacketSource.close();
648
+ await this.nativeOutput.finalize();
649
+ const buf2 = this.nativeOutput.target.buffer;
650
+ this.nativeOutput = null;
651
+ this.nativePacketSource = null;
652
+ this.videoBlob = new Blob([buf2], { type: "video/mp4" });
653
+ return;
654
+ }
523
655
  const buf = await this.encoder.end();
524
- this.videoBlob = new Blob([buf], { type: "video/mp4" });
656
+ const copy = buf instanceof ArrayBuffer ? buf.slice(0) : new Uint8Array(buf).slice().buffer;
657
+ this.videoBlob = new Blob([copy], { type: "video/mp4" });
525
658
  }
526
- async generateAudio(assets, startFrame, endFrame) {
659
+ async generateAudio(assets, startFrame, endFrame, onProgress) {
527
660
  try {
528
- console.log(`Generating audio from ${assets.length} frames`);
529
661
  const processor = new BrowserAudioProcessor();
530
662
  const assetPlacements = getAssetPlacement(assets);
531
663
  if (assetPlacements.length === 0) {
532
- console.log("No asset placements found");
533
664
  return null;
534
665
  }
535
- console.log(`Processing ${assetPlacements.length} asset placements`);
666
+ const processableCount = assetPlacements.filter((a) => a.volume > 0 && a.playbackRate > 0).length;
667
+ let processedCount = 0;
536
668
  const processedBuffers = [];
537
669
  for (let i = 0; i < assetPlacements.length; i++) {
538
670
  const asset = assetPlacements[i];
539
- console.log(`[${i + 1}/${assetPlacements.length}] Processing asset: ${asset.src} (type: ${asset.type}, volume: ${asset.volume}, playbackRate: ${asset.playbackRate})`);
540
671
  if (asset.volume > 0 && asset.playbackRate > 0) {
541
- const startTime = Date.now();
542
672
  try {
543
673
  if (asset.type === "video") {
544
- console.log(` \u2192 Checking if asset has audio: ${asset.src.substring(0, 50)}...`);
545
674
  try {
546
675
  const assetHasAudio = await hasAudio(asset.src);
547
- if (!assetHasAudio) {
548
- console.log(` \u23ED Skipping asset (no audio detected): ${asset.src.substring(0, 50)}...`);
549
- continue;
550
- }
551
- console.log(` \u2713 Asset has audio, proceeding: ${asset.src.substring(0, 50)}...`);
552
- } catch (audioCheckError) {
553
- const errorMsg = audioCheckError instanceof Error ? audioCheckError.message : String(audioCheckError);
554
- const errorStack = audioCheckError instanceof Error ? audioCheckError.stack : void 0;
555
- console.warn(` \u26A0 Audio check failed, proceeding anyway: ${asset.src.substring(0, 50)}...`);
556
- console.warn(` Error: ${errorMsg}`);
557
- if (errorStack) {
558
- console.warn(` Stack: ${errorStack}`);
559
- }
676
+ if (!assetHasAudio) continue;
677
+ } catch {
560
678
  }
561
679
  }
562
- console.log(` \u2192 Starting processAudioAsset for: ${asset.src}`);
563
680
  const processPromise = processor.processAudioAsset(
564
681
  asset,
565
682
  this.settings.fps || 30,
@@ -571,53 +688,30 @@ var BrowserWasmExporter = class _BrowserWasmExporter {
571
688
  }, 2e4);
572
689
  });
573
690
  const buffer = await Promise.race([processPromise, timeoutPromise]);
574
- const duration = Date.now() - startTime;
575
- console.log(` \u2713 Successfully processed audio asset in ${duration}ms: ${asset.src.substring(0, 50)}...`);
576
691
  processedBuffers.push(buffer);
577
- } catch (error) {
578
- const errorMsg = error instanceof Error ? error.message : String(error);
579
- const errorStack = error instanceof Error ? error.stack : void 0;
580
- const duration = Date.now() - startTime;
581
- console.warn(` \u2717 Failed to process audio asset after ${duration}ms: ${asset.src.substring(0, 50)}...`);
582
- console.warn(` Error: ${errorMsg}`);
583
- if (errorStack) {
584
- console.warn(` Stack: ${errorStack}`);
692
+ processedCount++;
693
+ if (onProgress && processableCount > 0) {
694
+ onProgress(processedCount / processableCount);
585
695
  }
586
- console.warn(` Asset details: type=${asset.type}, volume=${asset.volume}, playbackRate=${asset.playbackRate}, startFrame=${asset.startInVideo}, endFrame=${asset.endInVideo}`);
696
+ } catch {
587
697
  }
588
- } else {
589
- console.log(` \u23ED Skipping asset: volume=${asset.volume}, playbackRate=${asset.playbackRate}`);
590
698
  }
591
699
  }
592
700
  if (processedBuffers.length === 0) {
593
- console.warn("No audio buffers were successfully processed");
594
- console.warn(` Total assets attempted: ${assetPlacements.length}`);
595
- console.warn(` Assets with volume>0 and playbackRate>0: ${assetPlacements.filter((a) => a.volume > 0 && a.playbackRate > 0).length}`);
596
701
  return null;
597
702
  }
598
- console.log(`Mixing ${processedBuffers.length} audio buffers`);
599
- const mixStartTime = Date.now();
703
+ if (onProgress) onProgress(0.85);
600
704
  const mixedBuffer = processor.mixAudioBuffers(processedBuffers);
601
- const mixDuration = Date.now() - mixStartTime;
602
- console.log(`Audio mixing completed in ${mixDuration}ms`);
603
- const wavStartTime = Date.now();
705
+ if (onProgress) onProgress(0.92);
604
706
  const wavData = processor.audioBufferToWav(mixedBuffer);
605
- const wavDuration = Date.now() - wavStartTime;
606
- console.log(`WAV conversion completed in ${wavDuration}ms`);
607
- console.log(`Audio generation complete: ${wavData.byteLength} bytes (${(wavData.byteLength / 1024 / 1024).toFixed(2)} MB)`);
707
+ if (onProgress) onProgress(1);
608
708
  await processor.close();
609
709
  return wavData;
610
710
  } catch (error) {
611
711
  const errorMsg = error instanceof Error ? error.message : String(error);
612
712
  const errorStack = error instanceof Error ? error.stack : void 0;
613
- console.error("Audio generation error:", errorMsg);
614
- if (errorStack) {
615
- console.error("Error stack:", errorStack);
616
- }
617
- console.error("Error details:", {
618
- errorType: error instanceof Error ? error.constructor.name : typeof error,
619
- errorMessage: errorMsg
620
- });
713
+ console.error("[BrowserRender] Audio generation error:", errorMsg);
714
+ if (errorStack) console.error("[BrowserRender] Stack:", errorStack);
621
715
  return null;
622
716
  }
623
717
  }
@@ -657,6 +751,7 @@ var renderTwickVideoInBrowser = async (config) => {
657
751
  try {
658
752
  const { projectFile, variables, settings = {} } = config;
659
753
  if (!variables || !variables.input) {
754
+ console.error("[BrowserRender] renderTwickVideoInBrowser: missing variables.input");
660
755
  throw new Error('Invalid configuration. "variables.input" is required.');
661
756
  }
662
757
  const width = settings.width || variables.input.properties?.width || 1920;
@@ -691,22 +786,27 @@ var renderTwickVideoInBrowser = async (config) => {
691
786
  renderer.playback.state = 1;
692
787
  const totalFrames = await renderer.getNumberOfFrames(renderSettings);
693
788
  if (totalFrames === 0 || !isFinite(totalFrames)) {
789
+ console.error("[BrowserRender] renderTwickVideoInBrowser: invalid totalFrames", totalFrames);
694
790
  throw new Error(
695
791
  "Cannot render: Video has zero duration. Please ensure your project has valid content with non-zero duration. Check that all video elements have valid sources and are properly loaded."
696
792
  );
697
793
  }
698
794
  const videoElements = [];
795
+ const audioElements = [];
699
796
  if (variables.input.tracks) {
700
797
  variables.input.tracks.forEach((track) => {
701
798
  if (track.elements) {
702
799
  track.elements.forEach((el) => {
703
800
  if (el.type === "video") videoElements.push(el);
801
+ if (el.type === "audio") audioElements.push(el);
704
802
  });
705
803
  }
706
804
  });
707
805
  }
708
806
  let hasAnyAudio = false;
709
- console.log(`Found ${videoElements.length} video element(s) to check for audio`);
807
+ if (settings.includeAudio && audioElements.length > 0) {
808
+ hasAnyAudio = true;
809
+ }
710
810
  if (videoElements.length > 0) {
711
811
  for (const videoEl of videoElements) {
712
812
  const src = videoEl.props?.src;
@@ -732,19 +832,10 @@ var renderTwickVideoInBrowser = async (config) => {
732
832
  });
733
833
  if (settings.includeAudio) {
734
834
  try {
735
- console.log(`Checking if video has audio: ${src.substring(0, 50)}...`);
736
835
  const videoHasAudio = await hasAudio(src);
737
- console.log(`Audio check result for ${src.substring(0, 50)}...: ${videoHasAudio ? "HAS AUDIO" : "NO AUDIO"}`);
738
- if (videoHasAudio) {
739
- hasAnyAudio = true;
740
- console.log(`\u2713 Video has audio: ${src.substring(0, 50)}...`);
741
- } else {
742
- console.log(`\u2717 Video has no audio: ${src.substring(0, 50)}...`);
743
- }
744
- } catch (error) {
745
- console.warn(`Failed to check audio for ${src.substring(0, 50)}...:`, error);
836
+ if (videoHasAudio) hasAnyAudio = true;
837
+ } catch {
746
838
  hasAnyAudio = true;
747
- console.log(`\u26A0 Assuming video might have audio due to check error`);
748
839
  }
749
840
  }
750
841
  }
@@ -765,58 +856,66 @@ var renderTwickVideoInBrowser = async (config) => {
765
856
  mediaAssets.push(currentAssets);
766
857
  const canvas = renderer.stage.finalBuffer;
767
858
  await exporter.handleFrame(canvas, frame);
768
- if (settings.onProgress) settings.onProgress(frame / totalFrames);
859
+ if (settings.onProgress) settings.onProgress(frame / totalFrames * 0.9);
769
860
  }
770
861
  await exporter.stop();
862
+ if (audioElements.length > 0 && settings.includeAudio) {
863
+ for (let frame = 0; frame < mediaAssets.length; frame++) {
864
+ const timeInSec = frame / fps;
865
+ for (const el of audioElements) {
866
+ const s = typeof el.s === "number" ? el.s : 0;
867
+ const e = typeof el.e === "number" ? el.e : Number.MAX_VALUE;
868
+ if (timeInSec >= s && timeInSec < e && el.props?.src) {
869
+ const playbackRate = el.props.playbackRate ?? 1;
870
+ const volume = el.props.volume ?? 1;
871
+ const trimStart = el.props.time ?? 0;
872
+ const currentTime = (timeInSec - s) * playbackRate + trimStart;
873
+ mediaAssets[frame].push({
874
+ key: el.id,
875
+ src: el.props.src,
876
+ type: "audio",
877
+ currentTime,
878
+ playbackRate,
879
+ volume
880
+ });
881
+ }
882
+ }
883
+ }
884
+ }
771
885
  let audioData = null;
772
- console.log(`Audio detection summary: hasAnyAudio=${hasAnyAudio}, includeAudio=${settings.includeAudio}, mediaAssets=${mediaAssets.length}`);
773
886
  if (settings.includeAudio && mediaAssets.length > 0 && hasAnyAudio) {
774
- console.log("Starting audio processing (audio detected in videos)");
775
- if (settings.onProgress) {
776
- settings.onProgress(0.98);
777
- }
887
+ const reportAudioProgress = (p) => {
888
+ if (settings.onProgress) settings.onProgress(0.9 + p * 0.07);
889
+ };
778
890
  try {
779
- console.log("Calling generateAudio...");
780
- audioData = await exporter.generateAudio(mediaAssets, 0, totalFrames);
781
- console.log("generateAudio completed");
782
- if (audioData) {
783
- console.log(`\u2713 Audio generation successful: ${audioData.byteLength} bytes`);
784
- } else {
785
- console.log("\u2717 No audio data generated");
786
- }
787
- if (settings.onProgress) {
788
- settings.onProgress(0.99);
789
- }
891
+ audioData = await exporter.generateAudio(mediaAssets, 0, totalFrames, reportAudioProgress);
892
+ if (settings.onProgress) settings.onProgress(0.97);
790
893
  } catch (audioError) {
791
894
  const errorMsg = audioError instanceof Error ? audioError.message : String(audioError);
792
895
  const errorStack = audioError instanceof Error ? audioError.stack : void 0;
793
- console.error("\u2717 Audio generation failed, continuing without audio");
794
- console.error(` Error: ${errorMsg}`);
795
- if (errorStack) {
796
- console.error(` Stack: ${errorStack}`);
797
- }
798
- console.error(" Context:", {
799
- hasAnyAudio,
800
- includeAudio: settings.includeAudio,
801
- mediaAssetsCount: mediaAssets.length,
802
- totalFrames
803
- });
896
+ console.error("[BrowserRender] Audio generation failed:", errorMsg);
897
+ if (errorStack) console.error("[BrowserRender] Stack:", errorStack);
804
898
  audioData = null;
805
899
  }
806
- } else if (settings.includeAudio && mediaAssets.length > 0 && !hasAnyAudio) {
807
- console.log("\u23ED Skipping audio processing: no audio detected in videos");
808
- } else {
809
- console.log(`\u23ED Skipping audio processing: includeAudio=${settings.includeAudio}, mediaAssets=${mediaAssets.length}, hasAnyAudio=${hasAnyAudio}`);
810
900
  }
811
901
  let finalBlob = exporter.getVideoBlob();
812
902
  if (!finalBlob) {
903
+ console.error("[BrowserRender] renderTwickVideoInBrowser: getVideoBlob returned null");
813
904
  throw new Error("Failed to create video blob");
814
905
  }
815
906
  if (finalBlob.size === 0) {
907
+ console.error("[BrowserRender] renderTwickVideoInBrowser: video blob size is 0");
816
908
  throw new Error("Video blob is empty. Rendering may have failed.");
817
909
  }
910
+ const MIN_VIDEO_BLOB_BYTES = 1024;
911
+ if (finalBlob.size < MIN_VIDEO_BLOB_BYTES) {
912
+ console.error("[BrowserRender] renderTwickVideoInBrowser: video blob too small", finalBlob.size, "bytes (expected >=", MIN_VIDEO_BLOB_BYTES, ")");
913
+ throw new Error(
914
+ `Video blob is too small (${finalBlob.size} bytes). No video frames were encoded. This often happens on Windows when the encoder does not accept the frame format from canvas. Try using a different browser or updating graphics drivers.`
915
+ );
916
+ }
818
917
  if (audioData && settings.includeAudio) {
819
- console.log(`Attempting to mux audio (${audioData.byteLength} bytes) with video (${finalBlob.size} bytes)`);
918
+ if (settings.onProgress) settings.onProgress(0.98);
820
919
  try {
821
920
  const muxedBlob = await muxAudioVideo({
822
921
  videoBlob: finalBlob,
@@ -825,24 +924,12 @@ var renderTwickVideoInBrowser = async (config) => {
825
924
  if (!muxedBlob || muxedBlob.size === 0) {
826
925
  throw new Error("Muxed video blob is empty");
827
926
  }
828
- if (muxedBlob.size === finalBlob.size) {
829
- console.warn("Muxed blob size unchanged - muxing may have failed silently");
830
- } else {
831
- console.log(`Muxing successful: ${finalBlob.size} bytes -> ${muxedBlob.size} bytes`);
832
- }
833
927
  finalBlob = muxedBlob;
834
928
  } catch (muxError) {
835
929
  const errorMsg = muxError instanceof Error ? muxError.message : String(muxError);
836
930
  const errorStack = muxError instanceof Error ? muxError.stack : void 0;
837
- console.error("Audio muxing failed");
838
- console.error(` Error: ${errorMsg}`);
839
- if (errorStack) {
840
- console.error(` Stack: ${errorStack}`);
841
- }
842
- console.error(" Context:", {
843
- videoBlobSize: finalBlob.size,
844
- audioDataSize: audioData?.byteLength || 0
845
- });
931
+ console.error("[BrowserRender] Audio muxing failed:", errorMsg);
932
+ if (errorStack) console.error("[BrowserRender] Stack:", errorStack);
846
933
  if (settings.downloadAudioSeparately && audioData) {
847
934
  const audioBlob = new Blob([audioData], { type: "audio/wav" });
848
935
  const audioUrl = URL.createObjectURL(audioBlob);
@@ -852,16 +939,14 @@ var renderTwickVideoInBrowser = async (config) => {
852
939
  a.click();
853
940
  URL.revokeObjectURL(audioUrl);
854
941
  }
855
- console.warn("Continuing with video without audio due to muxing failure");
856
942
  finalBlob = exporter.getVideoBlob();
857
943
  if (!finalBlob || finalBlob.size === 0) {
858
944
  throw new Error("Video blob is invalid after muxing failure");
859
945
  }
860
946
  }
861
- } else if (settings.includeAudio && !audioData) {
862
- console.warn("Audio processing was enabled but no audio data was generated");
863
947
  }
864
948
  if (!finalBlob || finalBlob.size === 0) {
949
+ console.error("[BrowserRender] renderTwickVideoInBrowser: final blob invalid", finalBlob?.size);
865
950
  throw new Error("Final video blob is empty or invalid");
866
951
  }
867
952
  if (settings.onProgress) {
@@ -891,9 +976,112 @@ var downloadVideoBlob = (videoBlob, filename = "video.mp4") => {
891
976
  document.body.appendChild(a);
892
977
  a.click();
893
978
  document.body.removeChild(a);
894
- setTimeout(() => URL.revokeObjectURL(url), 1e3);
979
+ const revokeMs = isWindows() ? 5e3 : 1e3;
980
+ setTimeout(() => URL.revokeObjectURL(url), revokeMs);
895
981
  };
896
982
 
983
+ // src/audio/video-normalizer.ts
984
+ function getFFmpegBaseURL2() {
985
+ if (typeof window !== "undefined") {
986
+ return `${window.location.origin}/ffmpeg`;
987
+ }
988
+ return "/ffmpeg";
989
+ }
990
+ async function normalizeVideoBlob(input, options = {}) {
991
+ const startTime = Date.now();
992
+ const targetWidth = options.width ?? 720;
993
+ const targetFps = options.fps ?? 30;
994
+ try {
995
+ console.log("[VideoNormalizer] Starting normalization...");
996
+ console.log(`[VideoNormalizer] Input size: ${input.size} bytes (${(input.size / 1024 / 1024).toFixed(2)} MB)`);
997
+ const { FFmpeg } = await import("@ffmpeg/ffmpeg");
998
+ const { fetchFile } = await import("@ffmpeg/util");
999
+ const ffmpeg = new FFmpeg();
1000
+ const base = getFFmpegBaseURL2();
1001
+ const coreURL = `${base}/ffmpeg-core.js`;
1002
+ const wasmURL = `${base}/ffmpeg-core.wasm`;
1003
+ console.log(`[VideoNormalizer] Loading FFmpeg from ${base}`);
1004
+ const loadStart = Date.now();
1005
+ await ffmpeg.load({ coreURL, wasmURL });
1006
+ const loadMs = Date.now() - loadStart;
1007
+ console.log(`[VideoNormalizer] FFmpeg loaded in ${loadMs}ms`);
1008
+ console.log("[VideoNormalizer] Writing input file...");
1009
+ const writeStart = Date.now();
1010
+ await ffmpeg.writeFile("in.mp4", await fetchFile(input));
1011
+ const writeMs = Date.now() - writeStart;
1012
+ console.log(`[VideoNormalizer] Input file written in ${writeMs}ms`);
1013
+ console.log("[VideoNormalizer] Executing normalization command...");
1014
+ const execStart = Date.now();
1015
+ ffmpeg.on("log", ({ message }) => {
1016
+ console.log(`[VideoNormalizer:FFmpeg] ${message}`);
1017
+ });
1018
+ await ffmpeg.exec([
1019
+ "-i",
1020
+ "in.mp4",
1021
+ // Normalize geometry & frame rate
1022
+ "-vf",
1023
+ `scale=${targetWidth}:-2,fps=${targetFps},format=yuv420p`,
1024
+ // Standard H.264 video
1025
+ "-c:v",
1026
+ "libx264",
1027
+ "-preset",
1028
+ "veryfast",
1029
+ "-crf",
1030
+ "20",
1031
+ "-pix_fmt",
1032
+ "yuv420p",
1033
+ "-profile:v",
1034
+ "main",
1035
+ "-r",
1036
+ String(targetFps),
1037
+ // AAC audio, stereo, 48kHz
1038
+ "-c:a",
1039
+ "aac",
1040
+ "-b:a",
1041
+ "128k",
1042
+ "-ar",
1043
+ "48000",
1044
+ "-ac",
1045
+ "2",
1046
+ // Web-friendly MP4
1047
+ "-movflags",
1048
+ "+faststart",
1049
+ "out.mp4"
1050
+ ]);
1051
+ const execMs = Date.now() - execStart;
1052
+ console.log(`[VideoNormalizer] Normalization completed in ${execMs}ms`);
1053
+ const readStart = Date.now();
1054
+ const data = await ffmpeg.readFile("out.mp4");
1055
+ const readMs = Date.now() - readStart;
1056
+ console.log(`[VideoNormalizer] Output file read in ${readMs}ms`);
1057
+ const uint8 = typeof data === "string" ? new TextEncoder().encode(data) : new Uint8Array(data);
1058
+ const blob = new Blob([uint8], { type: "video/mp4" });
1059
+ const totalMs = Date.now() - startTime;
1060
+ console.log(`[VideoNormalizer] Normalization successful: ${blob.size} bytes (${(blob.size / 1024 / 1024).toFixed(2)} MB) in ${totalMs}ms`);
1061
+ return {
1062
+ blob,
1063
+ size: blob.size,
1064
+ debug: {
1065
+ loadMs,
1066
+ writeMs,
1067
+ execMs,
1068
+ readMs,
1069
+ totalMs
1070
+ }
1071
+ };
1072
+ } catch (error) {
1073
+ const totalMs = Date.now() - startTime;
1074
+ const msg = error instanceof Error ? error.message : String(error);
1075
+ const stack = error instanceof Error ? error.stack : void 0;
1076
+ console.error("[VideoNormalizer] Normalization failed:", msg);
1077
+ if (stack) {
1078
+ console.error("[VideoNormalizer] Stack:", stack);
1079
+ }
1080
+ console.error("[VideoNormalizer] Duration:", `${totalMs}ms`);
1081
+ throw error;
1082
+ }
1083
+ }
1084
+
897
1085
  // src/hooks/use-browser-renderer.ts
898
1086
  import { useState, useCallback } from "react";
899
1087
  var useBrowserRenderer = (options = {}) => {
@@ -940,6 +1128,7 @@ var useBrowserRenderer = (options = {}) => {
940
1128
  setProgress(p);
941
1129
  },
942
1130
  onComplete: (blob2) => {
1131
+ console.log("[BrowserRender] useBrowserRenderer: onComplete received blob", blob2 ? `size=${blob2.size} type=${blob2.type}` : "null");
943
1132
  setVideoBlob(blob2);
944
1133
  if (autoDownload) {
945
1134
  try {
@@ -950,6 +1139,7 @@ var useBrowserRenderer = (options = {}) => {
950
1139
  }
951
1140
  },
952
1141
  onError: (err) => {
1142
+ console.error("[BrowserRender] useBrowserRenderer: onError", err?.message);
953
1143
  setError(err);
954
1144
  }
955
1145
  }
@@ -961,6 +1151,8 @@ var useBrowserRenderer = (options = {}) => {
961
1151
  setProgress(1);
962
1152
  return blob;
963
1153
  } catch (err) {
1154
+ const errorMsg = err instanceof Error ? err.message : String(err);
1155
+ console.error("[BrowserRender] useBrowserRenderer: render failed", errorMsg);
964
1156
  setError(err instanceof Error ? err : new Error(String(err)));
965
1157
  return null;
966
1158
  } finally {
@@ -980,6 +1172,7 @@ var useBrowserRenderer = (options = {}) => {
980
1172
  export {
981
1173
  renderTwickVideoInBrowser as default,
982
1174
  downloadVideoBlob,
1175
+ normalizeVideoBlob,
983
1176
  renderTwickVideoInBrowser,
984
1177
  useBrowserRenderer
985
1178
  };