@twick/browser-render 0.15.8 → 0.15.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +41 -0
- package/dist/index.d.mts +45 -1
- package/dist/index.d.ts +45 -1
- package/dist/index.js +530 -58
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +529 -58
- package/dist/index.mjs.map +1 -1
- package/dist/vite-plugin-ffmpeg.d.mts +18 -0
- package/dist/vite-plugin-ffmpeg.d.ts +18 -0
- package/dist/vite-plugin-ffmpeg.js +156 -0
- package/dist/vite-plugin-ffmpeg.js.map +1 -0
- package/dist/vite-plugin-ffmpeg.mjs +120 -0
- package/dist/vite-plugin-ffmpeg.mjs.map +1 -0
- package/package.json +14 -5
- package/scripts/copy-public-assets.js +124 -0
- package/scripts/copy-wasm.js +19 -0
package/dist/index.js
CHANGED
|
@@ -32,6 +32,7 @@ var index_exports = {};
|
|
|
32
32
|
__export(index_exports, {
|
|
33
33
|
default: () => renderTwickVideoInBrowser,
|
|
34
34
|
downloadVideoBlob: () => downloadVideoBlob,
|
|
35
|
+
normalizeVideoBlob: () => normalizeVideoBlob,
|
|
35
36
|
renderTwickVideoInBrowser: () => renderTwickVideoInBrowser,
|
|
36
37
|
useBrowserRenderer: () => useBrowserRenderer
|
|
37
38
|
});
|
|
@@ -40,6 +41,7 @@ module.exports = __toCommonJS(index_exports);
|
|
|
40
41
|
// src/browser-renderer.ts
|
|
41
42
|
var import_core = require("@twick/core");
|
|
42
43
|
var import_project = __toESM(require("@twick/visualizer/dist/project.js"));
|
|
44
|
+
var import_media_utils = require("@twick/media-utils");
|
|
43
45
|
|
|
44
46
|
// src/audio/video-audio-extractor.ts
|
|
45
47
|
var VideoElementAudioExtractor = class {
|
|
@@ -67,40 +69,113 @@ var VideoElementAudioExtractor = class {
|
|
|
67
69
|
* Extract audio by playing the video and capturing audio output
|
|
68
70
|
*/
|
|
69
71
|
async extractAudio(startTime, duration, playbackRate = 1) {
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
72
|
+
try {
|
|
73
|
+
const source = this.audioContext.createMediaElementSource(this.video);
|
|
74
|
+
this.destination = this.audioContext.createMediaStreamDestination();
|
|
75
|
+
source.connect(this.destination);
|
|
76
|
+
} catch (err) {
|
|
77
|
+
throw new Error("Video has no audio track");
|
|
78
|
+
}
|
|
73
79
|
this.audioChunks = [];
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
80
|
+
let mimeType = "audio/webm";
|
|
81
|
+
if (!MediaRecorder.isTypeSupported(mimeType)) {
|
|
82
|
+
mimeType = "";
|
|
83
|
+
}
|
|
84
|
+
try {
|
|
85
|
+
this.mediaRecorder = new MediaRecorder(this.destination.stream, {
|
|
86
|
+
mimeType: mimeType || void 0
|
|
87
|
+
});
|
|
88
|
+
} catch (err) {
|
|
89
|
+
throw new Error(`Failed to create MediaRecorder: ${err}. Video may have no audio track.`);
|
|
90
|
+
}
|
|
77
91
|
this.mediaRecorder.ondataavailable = (event) => {
|
|
78
|
-
if (event.data.size > 0) {
|
|
92
|
+
if (event.data && event.data.size > 0) {
|
|
79
93
|
this.audioChunks.push(event.data);
|
|
80
94
|
}
|
|
81
95
|
};
|
|
82
96
|
this.video.currentTime = startTime;
|
|
83
97
|
this.video.playbackRate = playbackRate;
|
|
84
|
-
await new Promise((resolve) => {
|
|
85
|
-
|
|
98
|
+
await new Promise((resolve, reject) => {
|
|
99
|
+
const seekTimeout = setTimeout(() => {
|
|
100
|
+
reject(new Error("Video seek timeout"));
|
|
101
|
+
}, 5e3);
|
|
102
|
+
this.video.addEventListener("seeked", () => {
|
|
103
|
+
clearTimeout(seekTimeout);
|
|
104
|
+
resolve();
|
|
105
|
+
}, { once: true });
|
|
106
|
+
this.video.addEventListener("error", () => {
|
|
107
|
+
clearTimeout(seekTimeout);
|
|
108
|
+
reject(new Error("Video seek error"));
|
|
109
|
+
}, { once: true });
|
|
86
110
|
});
|
|
87
111
|
return new Promise((resolve, reject) => {
|
|
88
112
|
const recordingTimeout = setTimeout(() => {
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
113
|
+
this.video.pause();
|
|
114
|
+
if (this.mediaRecorder && this.mediaRecorder.state !== "inactive") {
|
|
115
|
+
this.mediaRecorder.stop();
|
|
116
|
+
}
|
|
117
|
+
reject(new Error("Audio extraction timeout - video may have no audio track"));
|
|
118
|
+
}, (duration / playbackRate + 10) * 1e3);
|
|
119
|
+
let hasData = false;
|
|
120
|
+
const dataCheckInterval = setInterval(() => {
|
|
121
|
+
if (this.audioChunks.length > 0 && this.audioChunks.some((chunk) => chunk.size > 0)) {
|
|
122
|
+
hasData = true;
|
|
123
|
+
}
|
|
124
|
+
}, 1e3);
|
|
125
|
+
this.mediaRecorder.onerror = (event) => {
|
|
126
|
+
clearInterval(dataCheckInterval);
|
|
127
|
+
clearTimeout(recordingTimeout);
|
|
128
|
+
this.video.pause();
|
|
129
|
+
reject(new Error(`MediaRecorder error: ${event}. Video may have no audio track.`));
|
|
130
|
+
};
|
|
131
|
+
try {
|
|
132
|
+
this.mediaRecorder.start(100);
|
|
133
|
+
this.video.play().catch((playErr) => {
|
|
134
|
+
clearInterval(dataCheckInterval);
|
|
135
|
+
clearTimeout(recordingTimeout);
|
|
136
|
+
reject(new Error(`Failed to play video: ${playErr}`));
|
|
137
|
+
});
|
|
138
|
+
} catch (startErr) {
|
|
139
|
+
clearInterval(dataCheckInterval);
|
|
140
|
+
clearTimeout(recordingTimeout);
|
|
141
|
+
reject(new Error(`Failed to start recording: ${startErr}`));
|
|
142
|
+
}
|
|
93
143
|
setTimeout(async () => {
|
|
144
|
+
clearInterval(dataCheckInterval);
|
|
94
145
|
clearTimeout(recordingTimeout);
|
|
95
146
|
this.video.pause();
|
|
96
|
-
this.mediaRecorder.
|
|
147
|
+
if (this.mediaRecorder && this.mediaRecorder.state !== "inactive") {
|
|
148
|
+
this.mediaRecorder.stop();
|
|
149
|
+
}
|
|
150
|
+
const stopTimeout = setTimeout(() => {
|
|
151
|
+
if (this.audioChunks.length === 0 || !hasData) {
|
|
152
|
+
reject(new Error("No audio data captured - video has no audio track"));
|
|
153
|
+
}
|
|
154
|
+
}, 2e3);
|
|
97
155
|
await new Promise((res) => {
|
|
98
|
-
this.mediaRecorder
|
|
156
|
+
if (this.mediaRecorder) {
|
|
157
|
+
this.mediaRecorder.addEventListener("stop", () => {
|
|
158
|
+
clearTimeout(stopTimeout);
|
|
159
|
+
res();
|
|
160
|
+
}, { once: true });
|
|
161
|
+
} else {
|
|
162
|
+
clearTimeout(stopTimeout);
|
|
163
|
+
res();
|
|
164
|
+
}
|
|
99
165
|
});
|
|
100
166
|
try {
|
|
167
|
+
if (this.audioChunks.length === 0 || !this.audioChunks.some((chunk) => chunk.size > 0)) {
|
|
168
|
+
throw new Error("No audio data captured - video has no audio track");
|
|
169
|
+
}
|
|
101
170
|
const audioBlob = new Blob(this.audioChunks, { type: "audio/webm" });
|
|
171
|
+
if (audioBlob.size === 0) {
|
|
172
|
+
throw new Error("Audio blob is empty - video has no audio track");
|
|
173
|
+
}
|
|
102
174
|
const arrayBuffer = await audioBlob.arrayBuffer();
|
|
103
175
|
const audioBuffer = await this.audioContext.decodeAudioData(arrayBuffer);
|
|
176
|
+
if (audioBuffer.length === 0 || audioBuffer.duration === 0) {
|
|
177
|
+
throw new Error("Audio buffer is empty - video has no audio track");
|
|
178
|
+
}
|
|
104
179
|
resolve(audioBuffer);
|
|
105
180
|
} catch (err) {
|
|
106
181
|
reject(new Error(`Failed to decode recorded audio: ${err}`));
|
|
@@ -323,48 +398,115 @@ function getFFmpegBaseURL() {
|
|
|
323
398
|
return "/ffmpeg";
|
|
324
399
|
}
|
|
325
400
|
async function muxAudioVideo(options) {
|
|
401
|
+
const muxStartTime = Date.now();
|
|
326
402
|
try {
|
|
403
|
+
console.log("Starting FFmpeg muxing...");
|
|
404
|
+
console.log(` Video blob size: ${options.videoBlob.size} bytes (${(options.videoBlob.size / 1024 / 1024).toFixed(2)} MB)`);
|
|
405
|
+
console.log(` Audio buffer size: ${options.audioBuffer.byteLength} bytes (${(options.audioBuffer.byteLength / 1024 / 1024).toFixed(2)} MB)`);
|
|
327
406
|
const { FFmpeg } = await import("@ffmpeg/ffmpeg");
|
|
328
407
|
const { fetchFile } = await import("@ffmpeg/util");
|
|
329
408
|
const ffmpeg = new FFmpeg();
|
|
330
409
|
const base = getFFmpegBaseURL();
|
|
331
410
|
const coreURL = `${base}/ffmpeg-core.js`;
|
|
332
411
|
const wasmURL = `${base}/ffmpeg-core.wasm`;
|
|
412
|
+
console.log(`Loading FFmpeg from ${base}`);
|
|
413
|
+
const loadStartTime = Date.now();
|
|
333
414
|
await ffmpeg.load({
|
|
334
415
|
coreURL,
|
|
335
416
|
wasmURL
|
|
336
417
|
});
|
|
418
|
+
const loadDuration = Date.now() - loadStartTime;
|
|
419
|
+
console.log(`FFmpeg loaded successfully in ${loadDuration}ms`);
|
|
420
|
+
console.log("Writing video and audio files...");
|
|
421
|
+
const writeStartTime = Date.now();
|
|
337
422
|
await ffmpeg.writeFile(
|
|
338
423
|
"video.mp4",
|
|
339
424
|
await fetchFile(options.videoBlob)
|
|
340
425
|
);
|
|
426
|
+
console.log(` Video file written: ${options.videoBlob.size} bytes`);
|
|
341
427
|
await ffmpeg.writeFile(
|
|
342
428
|
"audio.wav",
|
|
343
429
|
new Uint8Array(options.audioBuffer)
|
|
344
430
|
);
|
|
431
|
+
const writeDuration = Date.now() - writeStartTime;
|
|
432
|
+
console.log(` Audio file written: ${options.audioBuffer.byteLength} bytes`);
|
|
433
|
+
console.log(`Files written successfully in ${writeDuration}ms`);
|
|
434
|
+
console.log("Executing FFmpeg muxing command...");
|
|
435
|
+
const execStartTime = Date.now();
|
|
436
|
+
const ffmpegLogs = [];
|
|
437
|
+
ffmpeg.on("log", ({ message }) => {
|
|
438
|
+
ffmpegLogs.push(message);
|
|
439
|
+
});
|
|
345
440
|
await ffmpeg.exec([
|
|
441
|
+
// Inputs
|
|
346
442
|
"-i",
|
|
347
443
|
"video.mp4",
|
|
348
444
|
"-i",
|
|
349
445
|
"audio.wav",
|
|
446
|
+
// Explicit stream mapping
|
|
447
|
+
"-map",
|
|
448
|
+
"0:v:0",
|
|
449
|
+
"-map",
|
|
450
|
+
"1:a:0",
|
|
451
|
+
// Re-encode video to a very standard H.264 stream.
|
|
452
|
+
// Copying the WebCodecs/mp4-wasm bitstream can sometimes
|
|
453
|
+
// lead to timing issues where only the first second renders.
|
|
350
454
|
"-c:v",
|
|
351
|
-
"
|
|
455
|
+
"libx264",
|
|
456
|
+
"-preset",
|
|
457
|
+
"veryfast",
|
|
458
|
+
"-crf",
|
|
459
|
+
"20",
|
|
460
|
+
// AAC audio
|
|
352
461
|
"-c:a",
|
|
353
462
|
"aac",
|
|
354
463
|
"-b:a",
|
|
355
464
|
"192k",
|
|
465
|
+
// Make MP4 more web‑friendly
|
|
466
|
+
"-movflags",
|
|
467
|
+
"+faststart",
|
|
468
|
+
// Stop at the shortest of the two streams
|
|
356
469
|
"-shortest",
|
|
357
470
|
"output.mp4"
|
|
358
471
|
]);
|
|
472
|
+
const execDuration = Date.now() - execStartTime;
|
|
473
|
+
console.log(`FFmpeg muxing completed in ${execDuration}ms`);
|
|
474
|
+
const readStartTime = Date.now();
|
|
359
475
|
const data = await ffmpeg.readFile("output.mp4");
|
|
476
|
+
const readDuration = Date.now() - readStartTime;
|
|
477
|
+
console.log(`Output file read successfully in ${readDuration}ms`);
|
|
360
478
|
const uint8 = typeof data === "string" ? new TextEncoder().encode(data) : new Uint8Array(data);
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
479
|
+
const result = new Blob([uint8], { type: "video/mp4" });
|
|
480
|
+
const totalDuration = Date.now() - muxStartTime;
|
|
481
|
+
console.log(`Muxing successful: ${result.size} bytes (${(result.size / 1024 / 1024).toFixed(2)} MB) in ${totalDuration}ms`);
|
|
482
|
+
console.log(` Breakdown: load=${loadDuration}ms, write=${writeDuration}ms, exec=${execDuration}ms, read=${readDuration}ms`);
|
|
483
|
+
return result;
|
|
484
|
+
} catch (error) {
|
|
485
|
+
const totalDuration = Date.now() - muxStartTime;
|
|
486
|
+
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
487
|
+
const errorStack = error instanceof Error ? error.stack : void 0;
|
|
488
|
+
console.error("FFmpeg muxing failed:", errorMsg);
|
|
489
|
+
if (errorStack) {
|
|
490
|
+
console.error("Error stack:", errorStack);
|
|
491
|
+
}
|
|
492
|
+
console.error("Error details:", {
|
|
493
|
+
errorType: error instanceof Error ? error.constructor.name : typeof error,
|
|
494
|
+
errorMessage: errorMsg,
|
|
495
|
+
duration: `${totalDuration}ms`,
|
|
496
|
+
videoBlobSize: options.videoBlob.size,
|
|
497
|
+
audioBufferSize: options.audioBuffer.byteLength
|
|
498
|
+
});
|
|
499
|
+
throw error;
|
|
364
500
|
}
|
|
365
501
|
}
|
|
366
502
|
|
|
367
503
|
// src/browser-renderer.ts
|
|
504
|
+
function isWindows() {
|
|
505
|
+
if (typeof navigator === "undefined") return false;
|
|
506
|
+
const ua = navigator.userAgent;
|
|
507
|
+
const plat = navigator.platform ?? "";
|
|
508
|
+
return /Win(dows|32|64|CE)/i.test(ua) || /Win/i.test(plat);
|
|
509
|
+
}
|
|
368
510
|
var BrowserWasmExporter = class _BrowserWasmExporter {
|
|
369
511
|
constructor(settings) {
|
|
370
512
|
this.settings = settings;
|
|
@@ -377,52 +519,111 @@ var BrowserWasmExporter = class _BrowserWasmExporter {
|
|
|
377
519
|
onProgressCallback;
|
|
378
520
|
currentFrame = 0;
|
|
379
521
|
fps = 30;
|
|
522
|
+
/** On Windows, copy each frame to this canvas before creating VideoFrame to avoid invalid encoder output. */
|
|
523
|
+
copyCanvas = null;
|
|
524
|
+
/** On Windows use native VideoEncoder + Mediabunny with prefer-software so frames are actually encoded. */
|
|
525
|
+
useNativeEncoder = false;
|
|
526
|
+
nativeVideoEncoder = null;
|
|
527
|
+
nativeOutput = null;
|
|
528
|
+
nativePacketSource = null;
|
|
529
|
+
nativeAddPromise = Promise.resolve();
|
|
530
|
+
nativeFirstChunk = true;
|
|
380
531
|
static async create(settings) {
|
|
381
532
|
return new _BrowserWasmExporter(settings);
|
|
382
533
|
}
|
|
383
534
|
async start() {
|
|
535
|
+
const w = this.settings.size.x;
|
|
536
|
+
const h = this.settings.size.y;
|
|
537
|
+
const fps = this.fps;
|
|
538
|
+
if (isWindows()) {
|
|
539
|
+
try {
|
|
540
|
+
const { Output, BufferTarget, Mp4OutputFormat, EncodedVideoPacketSource, EncodedPacket } = await import("mediabunny");
|
|
541
|
+
const output = new Output({
|
|
542
|
+
format: new Mp4OutputFormat(),
|
|
543
|
+
target: new BufferTarget()
|
|
544
|
+
});
|
|
545
|
+
const packetSource = new EncodedVideoPacketSource("avc");
|
|
546
|
+
output.addVideoTrack(packetSource);
|
|
547
|
+
await output.start();
|
|
548
|
+
this.nativeOutput = output;
|
|
549
|
+
this.nativePacketSource = packetSource;
|
|
550
|
+
this.nativeAddPromise = Promise.resolve();
|
|
551
|
+
this.nativeFirstChunk = true;
|
|
552
|
+
const videoEncoder = new VideoEncoder({
|
|
553
|
+
output: (chunk, meta) => {
|
|
554
|
+
const packet = EncodedPacket.fromEncodedChunk(chunk);
|
|
555
|
+
const isFirst = this.nativeFirstChunk;
|
|
556
|
+
const metaArg = isFirst ? meta : void 0;
|
|
557
|
+
this.nativeFirstChunk = false;
|
|
558
|
+
this.nativeAddPromise = this.nativeAddPromise.then(
|
|
559
|
+
() => this.nativePacketSource.add(packet, metaArg)
|
|
560
|
+
);
|
|
561
|
+
},
|
|
562
|
+
error: (e) => console.error("[BrowserRender] VideoEncoder error:", e)
|
|
563
|
+
});
|
|
564
|
+
const bitrate = Math.max(5e5, w * h * fps * 0.1 | 0);
|
|
565
|
+
const config = {
|
|
566
|
+
codec: "avc1.42001f",
|
|
567
|
+
width: w,
|
|
568
|
+
height: h,
|
|
569
|
+
bitrate,
|
|
570
|
+
framerate: fps,
|
|
571
|
+
hardwareAcceleration: "prefer-software"
|
|
572
|
+
};
|
|
573
|
+
const support = await VideoEncoder.isConfigSupported(config);
|
|
574
|
+
if (!support.supported) {
|
|
575
|
+
delete config.hardwareAcceleration;
|
|
576
|
+
}
|
|
577
|
+
videoEncoder.configure(config);
|
|
578
|
+
this.nativeVideoEncoder = videoEncoder;
|
|
579
|
+
this.useNativeEncoder = true;
|
|
580
|
+
this.copyCanvas = document.createElement("canvas");
|
|
581
|
+
this.copyCanvas.width = w;
|
|
582
|
+
this.copyCanvas.height = h;
|
|
583
|
+
return;
|
|
584
|
+
} catch {
|
|
585
|
+
this.useNativeEncoder = false;
|
|
586
|
+
this.nativeVideoEncoder = null;
|
|
587
|
+
this.nativeOutput = null;
|
|
588
|
+
this.nativePacketSource = null;
|
|
589
|
+
}
|
|
590
|
+
}
|
|
384
591
|
try {
|
|
385
592
|
const loadMp4Module = (await import("mp4-wasm")).default;
|
|
386
593
|
const possiblePaths = [
|
|
387
|
-
// Vite dev server virtual path
|
|
388
594
|
"/@mp4-wasm",
|
|
389
|
-
// Common bundled asset paths (Vite uses hashed names)
|
|
390
595
|
"/assets/mp4-wasm.wasm",
|
|
391
596
|
"/assets/mp4-YBRi_559.wasm",
|
|
392
|
-
// Known Vite hash
|
|
393
597
|
"/mp4-wasm.wasm",
|
|
394
|
-
// Node modules path (for dev)
|
|
395
598
|
"/node_modules/mp4-wasm/dist/mp4-wasm.wasm"
|
|
396
599
|
];
|
|
397
600
|
let buffer = null;
|
|
398
|
-
let successPath = "";
|
|
399
601
|
for (const path of possiblePaths) {
|
|
400
602
|
try {
|
|
401
603
|
const resp = await fetch(path);
|
|
402
604
|
if (resp.ok) {
|
|
403
605
|
const contentType = resp.headers.get("content-type");
|
|
404
|
-
if (contentType && contentType.includes("html"))
|
|
405
|
-
continue;
|
|
406
|
-
}
|
|
606
|
+
if (contentType && contentType.includes("html")) continue;
|
|
407
607
|
buffer = await resp.arrayBuffer();
|
|
408
|
-
successPath = path;
|
|
409
608
|
break;
|
|
410
609
|
}
|
|
411
|
-
} catch
|
|
610
|
+
} catch {
|
|
412
611
|
continue;
|
|
413
612
|
}
|
|
414
613
|
}
|
|
415
614
|
if (!buffer) {
|
|
615
|
+
console.error("[BrowserRender] Exporter start: no WASM buffer from any path");
|
|
416
616
|
throw new Error(
|
|
417
617
|
"Could not load WASM file from any location. Please copy mp4-wasm.wasm to your public directory or configure Vite to serve it."
|
|
418
618
|
);
|
|
419
619
|
}
|
|
420
620
|
const mp4 = await loadMp4Module({ wasmBinary: buffer });
|
|
421
|
-
this.encoder = mp4.createWebCodecsEncoder({
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
621
|
+
this.encoder = mp4.createWebCodecsEncoder({ width: w, height: h, fps });
|
|
622
|
+
if (isWindows()) {
|
|
623
|
+
this.copyCanvas = document.createElement("canvas");
|
|
624
|
+
this.copyCanvas.width = w;
|
|
625
|
+
this.copyCanvas.height = h;
|
|
626
|
+
}
|
|
426
627
|
} catch (error) {
|
|
427
628
|
throw error;
|
|
428
629
|
}
|
|
@@ -430,37 +631,108 @@ var BrowserWasmExporter = class _BrowserWasmExporter {
|
|
|
430
631
|
async handleFrame(canvas, frameNumber) {
|
|
431
632
|
const frameIndex = frameNumber !== void 0 ? frameNumber : this.currentFrame;
|
|
432
633
|
const timestampMicroseconds = Math.round(frameIndex / this.fps * 1e6);
|
|
433
|
-
const
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
634
|
+
const durationMicroseconds = Math.round(1 / this.fps * 1e6);
|
|
635
|
+
let sourceCanvas = canvas;
|
|
636
|
+
if (this.copyCanvas) {
|
|
637
|
+
if (this.copyCanvas.width !== canvas.width || this.copyCanvas.height !== canvas.height) {
|
|
638
|
+
this.copyCanvas.width = canvas.width;
|
|
639
|
+
this.copyCanvas.height = canvas.height;
|
|
640
|
+
}
|
|
641
|
+
const ctx = this.copyCanvas.getContext("2d");
|
|
642
|
+
if (ctx) {
|
|
643
|
+
ctx.imageSmoothingEnabled = false;
|
|
644
|
+
ctx.drawImage(canvas, 0, 0);
|
|
645
|
+
sourceCanvas = this.copyCanvas;
|
|
646
|
+
}
|
|
647
|
+
}
|
|
648
|
+
if (this.useNativeEncoder && this.nativeVideoEncoder) {
|
|
649
|
+
const bitmap = await createImageBitmap(sourceCanvas);
|
|
650
|
+
const frame = new VideoFrame(bitmap, {
|
|
651
|
+
timestamp: timestampMicroseconds,
|
|
652
|
+
duration: durationMicroseconds
|
|
653
|
+
});
|
|
654
|
+
this.nativeVideoEncoder.encode(frame, { keyFrame: frameIndex === 0 });
|
|
655
|
+
frame.close();
|
|
656
|
+
bitmap.close();
|
|
657
|
+
} else {
|
|
658
|
+
let frame;
|
|
659
|
+
if (isWindows() && typeof createImageBitmap === "function") {
|
|
660
|
+
const bitmap = await createImageBitmap(sourceCanvas);
|
|
661
|
+
frame = new VideoFrame(bitmap, {
|
|
662
|
+
timestamp: timestampMicroseconds,
|
|
663
|
+
duration: durationMicroseconds
|
|
664
|
+
});
|
|
665
|
+
await this.encoder.addFrame(frame);
|
|
666
|
+
frame.close();
|
|
667
|
+
bitmap.close();
|
|
668
|
+
} else {
|
|
669
|
+
frame = new VideoFrame(sourceCanvas, {
|
|
670
|
+
timestamp: timestampMicroseconds,
|
|
671
|
+
duration: durationMicroseconds
|
|
672
|
+
});
|
|
673
|
+
await this.encoder.addFrame(frame);
|
|
674
|
+
frame.close();
|
|
675
|
+
}
|
|
676
|
+
}
|
|
439
677
|
if (frameNumber === void 0) {
|
|
440
678
|
this.currentFrame++;
|
|
441
679
|
}
|
|
442
680
|
}
|
|
443
681
|
async stop() {
|
|
682
|
+
if (this.useNativeEncoder && this.nativeVideoEncoder && this.nativeOutput && this.nativePacketSource) {
|
|
683
|
+
await this.nativeVideoEncoder.flush();
|
|
684
|
+
this.nativeVideoEncoder.close();
|
|
685
|
+
this.nativeVideoEncoder = null;
|
|
686
|
+
await this.nativeAddPromise;
|
|
687
|
+
this.nativePacketSource.close();
|
|
688
|
+
await this.nativeOutput.finalize();
|
|
689
|
+
const buf2 = this.nativeOutput.target.buffer;
|
|
690
|
+
this.nativeOutput = null;
|
|
691
|
+
this.nativePacketSource = null;
|
|
692
|
+
this.videoBlob = new Blob([buf2], { type: "video/mp4" });
|
|
693
|
+
return;
|
|
694
|
+
}
|
|
444
695
|
const buf = await this.encoder.end();
|
|
445
|
-
|
|
696
|
+
const copy = buf instanceof ArrayBuffer ? buf.slice(0) : new Uint8Array(buf).slice().buffer;
|
|
697
|
+
this.videoBlob = new Blob([copy], { type: "video/mp4" });
|
|
446
698
|
}
|
|
447
|
-
async generateAudio(assets, startFrame, endFrame) {
|
|
699
|
+
async generateAudio(assets, startFrame, endFrame, onProgress) {
|
|
448
700
|
try {
|
|
449
701
|
const processor = new BrowserAudioProcessor();
|
|
450
702
|
const assetPlacements = getAssetPlacement(assets);
|
|
451
703
|
if (assetPlacements.length === 0) {
|
|
452
704
|
return null;
|
|
453
705
|
}
|
|
706
|
+
const processableCount = assetPlacements.filter((a) => a.volume > 0 && a.playbackRate > 0).length;
|
|
707
|
+
let processedCount = 0;
|
|
454
708
|
const processedBuffers = [];
|
|
455
|
-
for (
|
|
709
|
+
for (let i = 0; i < assetPlacements.length; i++) {
|
|
710
|
+
const asset = assetPlacements[i];
|
|
456
711
|
if (asset.volume > 0 && asset.playbackRate > 0) {
|
|
457
712
|
try {
|
|
458
|
-
|
|
713
|
+
if (asset.type === "video") {
|
|
714
|
+
try {
|
|
715
|
+
const assetHasAudio = await (0, import_media_utils.hasAudio)(asset.src);
|
|
716
|
+
if (!assetHasAudio) continue;
|
|
717
|
+
} catch {
|
|
718
|
+
}
|
|
719
|
+
}
|
|
720
|
+
const processPromise = processor.processAudioAsset(
|
|
459
721
|
asset,
|
|
460
722
|
this.settings.fps || 30,
|
|
461
723
|
endFrame - startFrame
|
|
462
724
|
);
|
|
725
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
726
|
+
setTimeout(() => {
|
|
727
|
+
reject(new Error(`Timeout processing audio asset after 20s - video may have no audio track`));
|
|
728
|
+
}, 2e4);
|
|
729
|
+
});
|
|
730
|
+
const buffer = await Promise.race([processPromise, timeoutPromise]);
|
|
463
731
|
processedBuffers.push(buffer);
|
|
732
|
+
processedCount++;
|
|
733
|
+
if (onProgress && processableCount > 0) {
|
|
734
|
+
onProgress(processedCount / processableCount);
|
|
735
|
+
}
|
|
464
736
|
} catch {
|
|
465
737
|
}
|
|
466
738
|
}
|
|
@@ -468,11 +740,18 @@ var BrowserWasmExporter = class _BrowserWasmExporter {
|
|
|
468
740
|
if (processedBuffers.length === 0) {
|
|
469
741
|
return null;
|
|
470
742
|
}
|
|
743
|
+
if (onProgress) onProgress(0.85);
|
|
471
744
|
const mixedBuffer = processor.mixAudioBuffers(processedBuffers);
|
|
745
|
+
if (onProgress) onProgress(0.92);
|
|
472
746
|
const wavData = processor.audioBufferToWav(mixedBuffer);
|
|
747
|
+
if (onProgress) onProgress(1);
|
|
473
748
|
await processor.close();
|
|
474
749
|
return wavData;
|
|
475
|
-
} catch {
|
|
750
|
+
} catch (error) {
|
|
751
|
+
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
752
|
+
const errorStack = error instanceof Error ? error.stack : void 0;
|
|
753
|
+
console.error("[BrowserRender] Audio generation error:", errorMsg);
|
|
754
|
+
if (errorStack) console.error("[BrowserRender] Stack:", errorStack);
|
|
476
755
|
return null;
|
|
477
756
|
}
|
|
478
757
|
}
|
|
@@ -512,6 +791,7 @@ var renderTwickVideoInBrowser = async (config) => {
|
|
|
512
791
|
try {
|
|
513
792
|
const { projectFile, variables, settings = {} } = config;
|
|
514
793
|
if (!variables || !variables.input) {
|
|
794
|
+
console.error("[BrowserRender] renderTwickVideoInBrowser: missing variables.input");
|
|
515
795
|
throw new Error('Invalid configuration. "variables.input" is required.');
|
|
516
796
|
}
|
|
517
797
|
const width = settings.width || variables.input.properties?.width || 1920;
|
|
@@ -546,20 +826,27 @@ var renderTwickVideoInBrowser = async (config) => {
|
|
|
546
826
|
renderer.playback.state = 1;
|
|
547
827
|
const totalFrames = await renderer.getNumberOfFrames(renderSettings);
|
|
548
828
|
if (totalFrames === 0 || !isFinite(totalFrames)) {
|
|
829
|
+
console.error("[BrowserRender] renderTwickVideoInBrowser: invalid totalFrames", totalFrames);
|
|
549
830
|
throw new Error(
|
|
550
831
|
"Cannot render: Video has zero duration. Please ensure your project has valid content with non-zero duration. Check that all video elements have valid sources and are properly loaded."
|
|
551
832
|
);
|
|
552
833
|
}
|
|
553
834
|
const videoElements = [];
|
|
835
|
+
const audioElements = [];
|
|
554
836
|
if (variables.input.tracks) {
|
|
555
837
|
variables.input.tracks.forEach((track) => {
|
|
556
838
|
if (track.elements) {
|
|
557
839
|
track.elements.forEach((el) => {
|
|
558
840
|
if (el.type === "video") videoElements.push(el);
|
|
841
|
+
if (el.type === "audio") audioElements.push(el);
|
|
559
842
|
});
|
|
560
843
|
}
|
|
561
844
|
});
|
|
562
845
|
}
|
|
846
|
+
let hasAnyAudio = false;
|
|
847
|
+
if (settings.includeAudio && audioElements.length > 0) {
|
|
848
|
+
hasAnyAudio = true;
|
|
849
|
+
}
|
|
563
850
|
if (videoElements.length > 0) {
|
|
564
851
|
for (const videoEl of videoElements) {
|
|
565
852
|
const src = videoEl.props?.src;
|
|
@@ -583,6 +870,14 @@ var renderTwickVideoInBrowser = async (config) => {
|
|
|
583
870
|
reject(new Error(`Failed to load video: ${err?.message || "Unknown error"}`));
|
|
584
871
|
}, { once: true });
|
|
585
872
|
});
|
|
873
|
+
if (settings.includeAudio) {
|
|
874
|
+
try {
|
|
875
|
+
const videoHasAudio = await (0, import_media_utils.hasAudio)(src);
|
|
876
|
+
if (videoHasAudio) hasAnyAudio = true;
|
|
877
|
+
} catch {
|
|
878
|
+
hasAnyAudio = true;
|
|
879
|
+
}
|
|
880
|
+
}
|
|
586
881
|
}
|
|
587
882
|
}
|
|
588
883
|
await renderer.playback.recalculate();
|
|
@@ -601,33 +896,102 @@ var renderTwickVideoInBrowser = async (config) => {
|
|
|
601
896
|
mediaAssets.push(currentAssets);
|
|
602
897
|
const canvas = renderer.stage.finalBuffer;
|
|
603
898
|
await exporter.handleFrame(canvas, frame);
|
|
604
|
-
if (settings.onProgress) settings.onProgress(frame / totalFrames);
|
|
899
|
+
if (settings.onProgress) settings.onProgress(frame / totalFrames * 0.9);
|
|
605
900
|
}
|
|
606
901
|
await exporter.stop();
|
|
902
|
+
if (audioElements.length > 0 && settings.includeAudio) {
|
|
903
|
+
for (let frame = 0; frame < mediaAssets.length; frame++) {
|
|
904
|
+
const timeInSec = frame / fps;
|
|
905
|
+
for (const el of audioElements) {
|
|
906
|
+
const s = typeof el.s === "number" ? el.s : 0;
|
|
907
|
+
const e = typeof el.e === "number" ? el.e : Number.MAX_VALUE;
|
|
908
|
+
if (timeInSec >= s && timeInSec < e && el.props?.src) {
|
|
909
|
+
const playbackRate = el.props.playbackRate ?? 1;
|
|
910
|
+
const volume = el.props.volume ?? 1;
|
|
911
|
+
const trimStart = el.props.time ?? 0;
|
|
912
|
+
const currentTime = (timeInSec - s) * playbackRate + trimStart;
|
|
913
|
+
mediaAssets[frame].push({
|
|
914
|
+
key: el.id,
|
|
915
|
+
src: el.props.src,
|
|
916
|
+
type: "audio",
|
|
917
|
+
currentTime,
|
|
918
|
+
playbackRate,
|
|
919
|
+
volume
|
|
920
|
+
});
|
|
921
|
+
}
|
|
922
|
+
}
|
|
923
|
+
}
|
|
924
|
+
}
|
|
607
925
|
let audioData = null;
|
|
608
|
-
if (settings.includeAudio && mediaAssets.length > 0) {
|
|
609
|
-
|
|
926
|
+
if (settings.includeAudio && mediaAssets.length > 0 && hasAnyAudio) {
|
|
927
|
+
const reportAudioProgress = (p) => {
|
|
928
|
+
if (settings.onProgress) settings.onProgress(0.9 + p * 0.07);
|
|
929
|
+
};
|
|
930
|
+
try {
|
|
931
|
+
audioData = await exporter.generateAudio(mediaAssets, 0, totalFrames, reportAudioProgress);
|
|
932
|
+
if (settings.onProgress) settings.onProgress(0.97);
|
|
933
|
+
} catch (audioError) {
|
|
934
|
+
const errorMsg = audioError instanceof Error ? audioError.message : String(audioError);
|
|
935
|
+
const errorStack = audioError instanceof Error ? audioError.stack : void 0;
|
|
936
|
+
console.error("[BrowserRender] Audio generation failed:", errorMsg);
|
|
937
|
+
if (errorStack) console.error("[BrowserRender] Stack:", errorStack);
|
|
938
|
+
audioData = null;
|
|
939
|
+
}
|
|
610
940
|
}
|
|
611
941
|
let finalBlob = exporter.getVideoBlob();
|
|
612
942
|
if (!finalBlob) {
|
|
943
|
+
console.error("[BrowserRender] renderTwickVideoInBrowser: getVideoBlob returned null");
|
|
613
944
|
throw new Error("Failed to create video blob");
|
|
614
945
|
}
|
|
946
|
+
if (finalBlob.size === 0) {
|
|
947
|
+
console.error("[BrowserRender] renderTwickVideoInBrowser: video blob size is 0");
|
|
948
|
+
throw new Error("Video blob is empty. Rendering may have failed.");
|
|
949
|
+
}
|
|
950
|
+
const MIN_VIDEO_BLOB_BYTES = 1024;
|
|
951
|
+
if (finalBlob.size < MIN_VIDEO_BLOB_BYTES) {
|
|
952
|
+
console.error("[BrowserRender] renderTwickVideoInBrowser: video blob too small", finalBlob.size, "bytes (expected >=", MIN_VIDEO_BLOB_BYTES, ")");
|
|
953
|
+
throw new Error(
|
|
954
|
+
`Video blob is too small (${finalBlob.size} bytes). No video frames were encoded. This often happens on Windows when the encoder does not accept the frame format from canvas. Try using a different browser or updating graphics drivers.`
|
|
955
|
+
);
|
|
956
|
+
}
|
|
615
957
|
if (audioData && settings.includeAudio) {
|
|
958
|
+
if (settings.onProgress) settings.onProgress(0.98);
|
|
616
959
|
try {
|
|
617
|
-
|
|
960
|
+
const muxedBlob = await muxAudioVideo({
|
|
618
961
|
videoBlob: finalBlob,
|
|
619
962
|
audioBuffer: audioData
|
|
620
963
|
});
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
964
|
+
if (!muxedBlob || muxedBlob.size === 0) {
|
|
965
|
+
throw new Error("Muxed video blob is empty");
|
|
966
|
+
}
|
|
967
|
+
finalBlob = muxedBlob;
|
|
968
|
+
} catch (muxError) {
|
|
969
|
+
const errorMsg = muxError instanceof Error ? muxError.message : String(muxError);
|
|
970
|
+
const errorStack = muxError instanceof Error ? muxError.stack : void 0;
|
|
971
|
+
console.error("[BrowserRender] Audio muxing failed:", errorMsg);
|
|
972
|
+
if (errorStack) console.error("[BrowserRender] Stack:", errorStack);
|
|
973
|
+
if (settings.downloadAudioSeparately && audioData) {
|
|
974
|
+
const audioBlob = new Blob([audioData], { type: "audio/wav" });
|
|
975
|
+
const audioUrl = URL.createObjectURL(audioBlob);
|
|
976
|
+
const a = document.createElement("a");
|
|
977
|
+
a.href = audioUrl;
|
|
978
|
+
a.download = "audio.wav";
|
|
979
|
+
a.click();
|
|
980
|
+
URL.revokeObjectURL(audioUrl);
|
|
981
|
+
}
|
|
982
|
+
finalBlob = exporter.getVideoBlob();
|
|
983
|
+
if (!finalBlob || finalBlob.size === 0) {
|
|
984
|
+
throw new Error("Video blob is invalid after muxing failure");
|
|
985
|
+
}
|
|
629
986
|
}
|
|
630
987
|
}
|
|
988
|
+
if (!finalBlob || finalBlob.size === 0) {
|
|
989
|
+
console.error("[BrowserRender] renderTwickVideoInBrowser: final blob invalid", finalBlob?.size);
|
|
990
|
+
throw new Error("Final video blob is empty or invalid");
|
|
991
|
+
}
|
|
992
|
+
if (settings.onProgress) {
|
|
993
|
+
settings.onProgress(1);
|
|
994
|
+
}
|
|
631
995
|
if (settings.onComplete) {
|
|
632
996
|
settings.onComplete(finalBlob);
|
|
633
997
|
}
|
|
@@ -652,9 +1016,112 @@ var downloadVideoBlob = (videoBlob, filename = "video.mp4") => {
|
|
|
652
1016
|
document.body.appendChild(a);
|
|
653
1017
|
a.click();
|
|
654
1018
|
document.body.removeChild(a);
|
|
655
|
-
|
|
1019
|
+
const revokeMs = isWindows() ? 5e3 : 1e3;
|
|
1020
|
+
setTimeout(() => URL.revokeObjectURL(url), revokeMs);
|
|
656
1021
|
};
|
|
657
1022
|
|
|
1023
|
+
// src/audio/video-normalizer.ts
|
|
1024
|
+
function getFFmpegBaseURL2() {
|
|
1025
|
+
if (typeof window !== "undefined") {
|
|
1026
|
+
return `${window.location.origin}/ffmpeg`;
|
|
1027
|
+
}
|
|
1028
|
+
return "/ffmpeg";
|
|
1029
|
+
}
|
|
1030
|
+
async function normalizeVideoBlob(input, options = {}) {
|
|
1031
|
+
const startTime = Date.now();
|
|
1032
|
+
const targetWidth = options.width ?? 720;
|
|
1033
|
+
const targetFps = options.fps ?? 30;
|
|
1034
|
+
try {
|
|
1035
|
+
console.log("[VideoNormalizer] Starting normalization...");
|
|
1036
|
+
console.log(`[VideoNormalizer] Input size: ${input.size} bytes (${(input.size / 1024 / 1024).toFixed(2)} MB)`);
|
|
1037
|
+
const { FFmpeg } = await import("@ffmpeg/ffmpeg");
|
|
1038
|
+
const { fetchFile } = await import("@ffmpeg/util");
|
|
1039
|
+
const ffmpeg = new FFmpeg();
|
|
1040
|
+
const base = getFFmpegBaseURL2();
|
|
1041
|
+
const coreURL = `${base}/ffmpeg-core.js`;
|
|
1042
|
+
const wasmURL = `${base}/ffmpeg-core.wasm`;
|
|
1043
|
+
console.log(`[VideoNormalizer] Loading FFmpeg from ${base}`);
|
|
1044
|
+
const loadStart = Date.now();
|
|
1045
|
+
await ffmpeg.load({ coreURL, wasmURL });
|
|
1046
|
+
const loadMs = Date.now() - loadStart;
|
|
1047
|
+
console.log(`[VideoNormalizer] FFmpeg loaded in ${loadMs}ms`);
|
|
1048
|
+
console.log("[VideoNormalizer] Writing input file...");
|
|
1049
|
+
const writeStart = Date.now();
|
|
1050
|
+
await ffmpeg.writeFile("in.mp4", await fetchFile(input));
|
|
1051
|
+
const writeMs = Date.now() - writeStart;
|
|
1052
|
+
console.log(`[VideoNormalizer] Input file written in ${writeMs}ms`);
|
|
1053
|
+
console.log("[VideoNormalizer] Executing normalization command...");
|
|
1054
|
+
const execStart = Date.now();
|
|
1055
|
+
ffmpeg.on("log", ({ message }) => {
|
|
1056
|
+
console.log(`[VideoNormalizer:FFmpeg] ${message}`);
|
|
1057
|
+
});
|
|
1058
|
+
await ffmpeg.exec([
|
|
1059
|
+
"-i",
|
|
1060
|
+
"in.mp4",
|
|
1061
|
+
// Normalize geometry & frame rate
|
|
1062
|
+
"-vf",
|
|
1063
|
+
`scale=${targetWidth}:-2,fps=${targetFps},format=yuv420p`,
|
|
1064
|
+
// Standard H.264 video
|
|
1065
|
+
"-c:v",
|
|
1066
|
+
"libx264",
|
|
1067
|
+
"-preset",
|
|
1068
|
+
"veryfast",
|
|
1069
|
+
"-crf",
|
|
1070
|
+
"20",
|
|
1071
|
+
"-pix_fmt",
|
|
1072
|
+
"yuv420p",
|
|
1073
|
+
"-profile:v",
|
|
1074
|
+
"main",
|
|
1075
|
+
"-r",
|
|
1076
|
+
String(targetFps),
|
|
1077
|
+
// AAC audio, stereo, 48kHz
|
|
1078
|
+
"-c:a",
|
|
1079
|
+
"aac",
|
|
1080
|
+
"-b:a",
|
|
1081
|
+
"128k",
|
|
1082
|
+
"-ar",
|
|
1083
|
+
"48000",
|
|
1084
|
+
"-ac",
|
|
1085
|
+
"2",
|
|
1086
|
+
// Web-friendly MP4
|
|
1087
|
+
"-movflags",
|
|
1088
|
+
"+faststart",
|
|
1089
|
+
"out.mp4"
|
|
1090
|
+
]);
|
|
1091
|
+
const execMs = Date.now() - execStart;
|
|
1092
|
+
console.log(`[VideoNormalizer] Normalization completed in ${execMs}ms`);
|
|
1093
|
+
const readStart = Date.now();
|
|
1094
|
+
const data = await ffmpeg.readFile("out.mp4");
|
|
1095
|
+
const readMs = Date.now() - readStart;
|
|
1096
|
+
console.log(`[VideoNormalizer] Output file read in ${readMs}ms`);
|
|
1097
|
+
const uint8 = typeof data === "string" ? new TextEncoder().encode(data) : new Uint8Array(data);
|
|
1098
|
+
const blob = new Blob([uint8], { type: "video/mp4" });
|
|
1099
|
+
const totalMs = Date.now() - startTime;
|
|
1100
|
+
console.log(`[VideoNormalizer] Normalization successful: ${blob.size} bytes (${(blob.size / 1024 / 1024).toFixed(2)} MB) in ${totalMs}ms`);
|
|
1101
|
+
return {
|
|
1102
|
+
blob,
|
|
1103
|
+
size: blob.size,
|
|
1104
|
+
debug: {
|
|
1105
|
+
loadMs,
|
|
1106
|
+
writeMs,
|
|
1107
|
+
execMs,
|
|
1108
|
+
readMs,
|
|
1109
|
+
totalMs
|
|
1110
|
+
}
|
|
1111
|
+
};
|
|
1112
|
+
} catch (error) {
|
|
1113
|
+
const totalMs = Date.now() - startTime;
|
|
1114
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1115
|
+
const stack = error instanceof Error ? error.stack : void 0;
|
|
1116
|
+
console.error("[VideoNormalizer] Normalization failed:", msg);
|
|
1117
|
+
if (stack) {
|
|
1118
|
+
console.error("[VideoNormalizer] Stack:", stack);
|
|
1119
|
+
}
|
|
1120
|
+
console.error("[VideoNormalizer] Duration:", `${totalMs}ms`);
|
|
1121
|
+
throw error;
|
|
1122
|
+
}
|
|
1123
|
+
}
|
|
1124
|
+
|
|
658
1125
|
// src/hooks/use-browser-renderer.ts
|
|
659
1126
|
var import_react = require("react");
|
|
660
1127
|
var useBrowserRenderer = (options = {}) => {
|
|
@@ -701,6 +1168,7 @@ var useBrowserRenderer = (options = {}) => {
|
|
|
701
1168
|
setProgress(p);
|
|
702
1169
|
},
|
|
703
1170
|
onComplete: (blob2) => {
|
|
1171
|
+
console.log("[BrowserRender] useBrowserRenderer: onComplete received blob", blob2 ? `size=${blob2.size} type=${blob2.type}` : "null");
|
|
704
1172
|
setVideoBlob(blob2);
|
|
705
1173
|
if (autoDownload) {
|
|
706
1174
|
try {
|
|
@@ -711,6 +1179,7 @@ var useBrowserRenderer = (options = {}) => {
|
|
|
711
1179
|
}
|
|
712
1180
|
},
|
|
713
1181
|
onError: (err) => {
|
|
1182
|
+
console.error("[BrowserRender] useBrowserRenderer: onError", err?.message);
|
|
714
1183
|
setError(err);
|
|
715
1184
|
}
|
|
716
1185
|
}
|
|
@@ -722,6 +1191,8 @@ var useBrowserRenderer = (options = {}) => {
|
|
|
722
1191
|
setProgress(1);
|
|
723
1192
|
return blob;
|
|
724
1193
|
} catch (err) {
|
|
1194
|
+
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
1195
|
+
console.error("[BrowserRender] useBrowserRenderer: render failed", errorMsg);
|
|
725
1196
|
setError(err instanceof Error ? err : new Error(String(err)));
|
|
726
1197
|
return null;
|
|
727
1198
|
} finally {
|
|
@@ -741,6 +1212,7 @@ var useBrowserRenderer = (options = {}) => {
|
|
|
741
1212
|
// Annotate the CommonJS export names for ESM import in node:
|
|
742
1213
|
0 && (module.exports = {
|
|
743
1214
|
downloadVideoBlob,
|
|
1215
|
+
normalizeVideoBlob,
|
|
744
1216
|
renderTwickVideoInBrowser,
|
|
745
1217
|
useBrowserRenderer
|
|
746
1218
|
});
|