@twick/browser-render 0.15.8 → 0.15.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +41 -0
- package/dist/index.d.mts +45 -1
- package/dist/index.d.ts +45 -1
- package/dist/index.js +530 -58
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +529 -58
- package/dist/index.mjs.map +1 -1
- package/dist/vite-plugin-ffmpeg.d.mts +18 -0
- package/dist/vite-plugin-ffmpeg.d.ts +18 -0
- package/dist/vite-plugin-ffmpeg.js +156 -0
- package/dist/vite-plugin-ffmpeg.js.map +1 -0
- package/dist/vite-plugin-ffmpeg.mjs +120 -0
- package/dist/vite-plugin-ffmpeg.mjs.map +1 -0
- package/package.json +14 -5
- package/scripts/copy-public-assets.js +124 -0
- package/scripts/copy-wasm.js +19 -0
package/dist/index.mjs
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
// src/browser-renderer.ts
|
|
2
2
|
import { Renderer, Vector2 } from "@twick/core";
|
|
3
3
|
import defaultProject from "@twick/visualizer/dist/project.js";
|
|
4
|
+
import { hasAudio } from "@twick/media-utils";
|
|
4
5
|
|
|
5
6
|
// src/audio/video-audio-extractor.ts
|
|
6
7
|
var VideoElementAudioExtractor = class {
|
|
@@ -28,40 +29,113 @@ var VideoElementAudioExtractor = class {
|
|
|
28
29
|
* Extract audio by playing the video and capturing audio output
|
|
29
30
|
*/
|
|
30
31
|
async extractAudio(startTime, duration, playbackRate = 1) {
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
32
|
+
try {
|
|
33
|
+
const source = this.audioContext.createMediaElementSource(this.video);
|
|
34
|
+
this.destination = this.audioContext.createMediaStreamDestination();
|
|
35
|
+
source.connect(this.destination);
|
|
36
|
+
} catch (err) {
|
|
37
|
+
throw new Error("Video has no audio track");
|
|
38
|
+
}
|
|
34
39
|
this.audioChunks = [];
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
40
|
+
let mimeType = "audio/webm";
|
|
41
|
+
if (!MediaRecorder.isTypeSupported(mimeType)) {
|
|
42
|
+
mimeType = "";
|
|
43
|
+
}
|
|
44
|
+
try {
|
|
45
|
+
this.mediaRecorder = new MediaRecorder(this.destination.stream, {
|
|
46
|
+
mimeType: mimeType || void 0
|
|
47
|
+
});
|
|
48
|
+
} catch (err) {
|
|
49
|
+
throw new Error(`Failed to create MediaRecorder: ${err}. Video may have no audio track.`);
|
|
50
|
+
}
|
|
38
51
|
this.mediaRecorder.ondataavailable = (event) => {
|
|
39
|
-
if (event.data.size > 0) {
|
|
52
|
+
if (event.data && event.data.size > 0) {
|
|
40
53
|
this.audioChunks.push(event.data);
|
|
41
54
|
}
|
|
42
55
|
};
|
|
43
56
|
this.video.currentTime = startTime;
|
|
44
57
|
this.video.playbackRate = playbackRate;
|
|
45
|
-
await new Promise((resolve) => {
|
|
46
|
-
|
|
58
|
+
await new Promise((resolve, reject) => {
|
|
59
|
+
const seekTimeout = setTimeout(() => {
|
|
60
|
+
reject(new Error("Video seek timeout"));
|
|
61
|
+
}, 5e3);
|
|
62
|
+
this.video.addEventListener("seeked", () => {
|
|
63
|
+
clearTimeout(seekTimeout);
|
|
64
|
+
resolve();
|
|
65
|
+
}, { once: true });
|
|
66
|
+
this.video.addEventListener("error", () => {
|
|
67
|
+
clearTimeout(seekTimeout);
|
|
68
|
+
reject(new Error("Video seek error"));
|
|
69
|
+
}, { once: true });
|
|
47
70
|
});
|
|
48
71
|
return new Promise((resolve, reject) => {
|
|
49
72
|
const recordingTimeout = setTimeout(() => {
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
73
|
+
this.video.pause();
|
|
74
|
+
if (this.mediaRecorder && this.mediaRecorder.state !== "inactive") {
|
|
75
|
+
this.mediaRecorder.stop();
|
|
76
|
+
}
|
|
77
|
+
reject(new Error("Audio extraction timeout - video may have no audio track"));
|
|
78
|
+
}, (duration / playbackRate + 10) * 1e3);
|
|
79
|
+
let hasData = false;
|
|
80
|
+
const dataCheckInterval = setInterval(() => {
|
|
81
|
+
if (this.audioChunks.length > 0 && this.audioChunks.some((chunk) => chunk.size > 0)) {
|
|
82
|
+
hasData = true;
|
|
83
|
+
}
|
|
84
|
+
}, 1e3);
|
|
85
|
+
this.mediaRecorder.onerror = (event) => {
|
|
86
|
+
clearInterval(dataCheckInterval);
|
|
87
|
+
clearTimeout(recordingTimeout);
|
|
88
|
+
this.video.pause();
|
|
89
|
+
reject(new Error(`MediaRecorder error: ${event}. Video may have no audio track.`));
|
|
90
|
+
};
|
|
91
|
+
try {
|
|
92
|
+
this.mediaRecorder.start(100);
|
|
93
|
+
this.video.play().catch((playErr) => {
|
|
94
|
+
clearInterval(dataCheckInterval);
|
|
95
|
+
clearTimeout(recordingTimeout);
|
|
96
|
+
reject(new Error(`Failed to play video: ${playErr}`));
|
|
97
|
+
});
|
|
98
|
+
} catch (startErr) {
|
|
99
|
+
clearInterval(dataCheckInterval);
|
|
100
|
+
clearTimeout(recordingTimeout);
|
|
101
|
+
reject(new Error(`Failed to start recording: ${startErr}`));
|
|
102
|
+
}
|
|
54
103
|
setTimeout(async () => {
|
|
104
|
+
clearInterval(dataCheckInterval);
|
|
55
105
|
clearTimeout(recordingTimeout);
|
|
56
106
|
this.video.pause();
|
|
57
|
-
this.mediaRecorder.
|
|
107
|
+
if (this.mediaRecorder && this.mediaRecorder.state !== "inactive") {
|
|
108
|
+
this.mediaRecorder.stop();
|
|
109
|
+
}
|
|
110
|
+
const stopTimeout = setTimeout(() => {
|
|
111
|
+
if (this.audioChunks.length === 0 || !hasData) {
|
|
112
|
+
reject(new Error("No audio data captured - video has no audio track"));
|
|
113
|
+
}
|
|
114
|
+
}, 2e3);
|
|
58
115
|
await new Promise((res) => {
|
|
59
|
-
this.mediaRecorder
|
|
116
|
+
if (this.mediaRecorder) {
|
|
117
|
+
this.mediaRecorder.addEventListener("stop", () => {
|
|
118
|
+
clearTimeout(stopTimeout);
|
|
119
|
+
res();
|
|
120
|
+
}, { once: true });
|
|
121
|
+
} else {
|
|
122
|
+
clearTimeout(stopTimeout);
|
|
123
|
+
res();
|
|
124
|
+
}
|
|
60
125
|
});
|
|
61
126
|
try {
|
|
127
|
+
if (this.audioChunks.length === 0 || !this.audioChunks.some((chunk) => chunk.size > 0)) {
|
|
128
|
+
throw new Error("No audio data captured - video has no audio track");
|
|
129
|
+
}
|
|
62
130
|
const audioBlob = new Blob(this.audioChunks, { type: "audio/webm" });
|
|
131
|
+
if (audioBlob.size === 0) {
|
|
132
|
+
throw new Error("Audio blob is empty - video has no audio track");
|
|
133
|
+
}
|
|
63
134
|
const arrayBuffer = await audioBlob.arrayBuffer();
|
|
64
135
|
const audioBuffer = await this.audioContext.decodeAudioData(arrayBuffer);
|
|
136
|
+
if (audioBuffer.length === 0 || audioBuffer.duration === 0) {
|
|
137
|
+
throw new Error("Audio buffer is empty - video has no audio track");
|
|
138
|
+
}
|
|
65
139
|
resolve(audioBuffer);
|
|
66
140
|
} catch (err) {
|
|
67
141
|
reject(new Error(`Failed to decode recorded audio: ${err}`));
|
|
@@ -284,48 +358,115 @@ function getFFmpegBaseURL() {
|
|
|
284
358
|
return "/ffmpeg";
|
|
285
359
|
}
|
|
286
360
|
async function muxAudioVideo(options) {
|
|
361
|
+
const muxStartTime = Date.now();
|
|
287
362
|
try {
|
|
363
|
+
console.log("Starting FFmpeg muxing...");
|
|
364
|
+
console.log(` Video blob size: ${options.videoBlob.size} bytes (${(options.videoBlob.size / 1024 / 1024).toFixed(2)} MB)`);
|
|
365
|
+
console.log(` Audio buffer size: ${options.audioBuffer.byteLength} bytes (${(options.audioBuffer.byteLength / 1024 / 1024).toFixed(2)} MB)`);
|
|
288
366
|
const { FFmpeg } = await import("@ffmpeg/ffmpeg");
|
|
289
367
|
const { fetchFile } = await import("@ffmpeg/util");
|
|
290
368
|
const ffmpeg = new FFmpeg();
|
|
291
369
|
const base = getFFmpegBaseURL();
|
|
292
370
|
const coreURL = `${base}/ffmpeg-core.js`;
|
|
293
371
|
const wasmURL = `${base}/ffmpeg-core.wasm`;
|
|
372
|
+
console.log(`Loading FFmpeg from ${base}`);
|
|
373
|
+
const loadStartTime = Date.now();
|
|
294
374
|
await ffmpeg.load({
|
|
295
375
|
coreURL,
|
|
296
376
|
wasmURL
|
|
297
377
|
});
|
|
378
|
+
const loadDuration = Date.now() - loadStartTime;
|
|
379
|
+
console.log(`FFmpeg loaded successfully in ${loadDuration}ms`);
|
|
380
|
+
console.log("Writing video and audio files...");
|
|
381
|
+
const writeStartTime = Date.now();
|
|
298
382
|
await ffmpeg.writeFile(
|
|
299
383
|
"video.mp4",
|
|
300
384
|
await fetchFile(options.videoBlob)
|
|
301
385
|
);
|
|
386
|
+
console.log(` Video file written: ${options.videoBlob.size} bytes`);
|
|
302
387
|
await ffmpeg.writeFile(
|
|
303
388
|
"audio.wav",
|
|
304
389
|
new Uint8Array(options.audioBuffer)
|
|
305
390
|
);
|
|
391
|
+
const writeDuration = Date.now() - writeStartTime;
|
|
392
|
+
console.log(` Audio file written: ${options.audioBuffer.byteLength} bytes`);
|
|
393
|
+
console.log(`Files written successfully in ${writeDuration}ms`);
|
|
394
|
+
console.log("Executing FFmpeg muxing command...");
|
|
395
|
+
const execStartTime = Date.now();
|
|
396
|
+
const ffmpegLogs = [];
|
|
397
|
+
ffmpeg.on("log", ({ message }) => {
|
|
398
|
+
ffmpegLogs.push(message);
|
|
399
|
+
});
|
|
306
400
|
await ffmpeg.exec([
|
|
401
|
+
// Inputs
|
|
307
402
|
"-i",
|
|
308
403
|
"video.mp4",
|
|
309
404
|
"-i",
|
|
310
405
|
"audio.wav",
|
|
406
|
+
// Explicit stream mapping
|
|
407
|
+
"-map",
|
|
408
|
+
"0:v:0",
|
|
409
|
+
"-map",
|
|
410
|
+
"1:a:0",
|
|
411
|
+
// Re-encode video to a very standard H.264 stream.
|
|
412
|
+
// Copying the WebCodecs/mp4-wasm bitstream can sometimes
|
|
413
|
+
// lead to timing issues where only the first second renders.
|
|
311
414
|
"-c:v",
|
|
312
|
-
"
|
|
415
|
+
"libx264",
|
|
416
|
+
"-preset",
|
|
417
|
+
"veryfast",
|
|
418
|
+
"-crf",
|
|
419
|
+
"20",
|
|
420
|
+
// AAC audio
|
|
313
421
|
"-c:a",
|
|
314
422
|
"aac",
|
|
315
423
|
"-b:a",
|
|
316
424
|
"192k",
|
|
425
|
+
// Make MP4 more web‑friendly
|
|
426
|
+
"-movflags",
|
|
427
|
+
"+faststart",
|
|
428
|
+
// Stop at the shortest of the two streams
|
|
317
429
|
"-shortest",
|
|
318
430
|
"output.mp4"
|
|
319
431
|
]);
|
|
432
|
+
const execDuration = Date.now() - execStartTime;
|
|
433
|
+
console.log(`FFmpeg muxing completed in ${execDuration}ms`);
|
|
434
|
+
const readStartTime = Date.now();
|
|
320
435
|
const data = await ffmpeg.readFile("output.mp4");
|
|
436
|
+
const readDuration = Date.now() - readStartTime;
|
|
437
|
+
console.log(`Output file read successfully in ${readDuration}ms`);
|
|
321
438
|
const uint8 = typeof data === "string" ? new TextEncoder().encode(data) : new Uint8Array(data);
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
439
|
+
const result = new Blob([uint8], { type: "video/mp4" });
|
|
440
|
+
const totalDuration = Date.now() - muxStartTime;
|
|
441
|
+
console.log(`Muxing successful: ${result.size} bytes (${(result.size / 1024 / 1024).toFixed(2)} MB) in ${totalDuration}ms`);
|
|
442
|
+
console.log(` Breakdown: load=${loadDuration}ms, write=${writeDuration}ms, exec=${execDuration}ms, read=${readDuration}ms`);
|
|
443
|
+
return result;
|
|
444
|
+
} catch (error) {
|
|
445
|
+
const totalDuration = Date.now() - muxStartTime;
|
|
446
|
+
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
447
|
+
const errorStack = error instanceof Error ? error.stack : void 0;
|
|
448
|
+
console.error("FFmpeg muxing failed:", errorMsg);
|
|
449
|
+
if (errorStack) {
|
|
450
|
+
console.error("Error stack:", errorStack);
|
|
451
|
+
}
|
|
452
|
+
console.error("Error details:", {
|
|
453
|
+
errorType: error instanceof Error ? error.constructor.name : typeof error,
|
|
454
|
+
errorMessage: errorMsg,
|
|
455
|
+
duration: `${totalDuration}ms`,
|
|
456
|
+
videoBlobSize: options.videoBlob.size,
|
|
457
|
+
audioBufferSize: options.audioBuffer.byteLength
|
|
458
|
+
});
|
|
459
|
+
throw error;
|
|
325
460
|
}
|
|
326
461
|
}
|
|
327
462
|
|
|
328
463
|
// src/browser-renderer.ts
|
|
464
|
+
function isWindows() {
|
|
465
|
+
if (typeof navigator === "undefined") return false;
|
|
466
|
+
const ua = navigator.userAgent;
|
|
467
|
+
const plat = navigator.platform ?? "";
|
|
468
|
+
return /Win(dows|32|64|CE)/i.test(ua) || /Win/i.test(plat);
|
|
469
|
+
}
|
|
329
470
|
var BrowserWasmExporter = class _BrowserWasmExporter {
|
|
330
471
|
constructor(settings) {
|
|
331
472
|
this.settings = settings;
|
|
@@ -338,52 +479,111 @@ var BrowserWasmExporter = class _BrowserWasmExporter {
|
|
|
338
479
|
onProgressCallback;
|
|
339
480
|
currentFrame = 0;
|
|
340
481
|
fps = 30;
|
|
482
|
+
/** On Windows, copy each frame to this canvas before creating VideoFrame to avoid invalid encoder output. */
|
|
483
|
+
copyCanvas = null;
|
|
484
|
+
/** On Windows use native VideoEncoder + Mediabunny with prefer-software so frames are actually encoded. */
|
|
485
|
+
useNativeEncoder = false;
|
|
486
|
+
nativeVideoEncoder = null;
|
|
487
|
+
nativeOutput = null;
|
|
488
|
+
nativePacketSource = null;
|
|
489
|
+
nativeAddPromise = Promise.resolve();
|
|
490
|
+
nativeFirstChunk = true;
|
|
341
491
|
static async create(settings) {
|
|
342
492
|
return new _BrowserWasmExporter(settings);
|
|
343
493
|
}
|
|
344
494
|
async start() {
|
|
495
|
+
const w = this.settings.size.x;
|
|
496
|
+
const h = this.settings.size.y;
|
|
497
|
+
const fps = this.fps;
|
|
498
|
+
if (isWindows()) {
|
|
499
|
+
try {
|
|
500
|
+
const { Output, BufferTarget, Mp4OutputFormat, EncodedVideoPacketSource, EncodedPacket } = await import("mediabunny");
|
|
501
|
+
const output = new Output({
|
|
502
|
+
format: new Mp4OutputFormat(),
|
|
503
|
+
target: new BufferTarget()
|
|
504
|
+
});
|
|
505
|
+
const packetSource = new EncodedVideoPacketSource("avc");
|
|
506
|
+
output.addVideoTrack(packetSource);
|
|
507
|
+
await output.start();
|
|
508
|
+
this.nativeOutput = output;
|
|
509
|
+
this.nativePacketSource = packetSource;
|
|
510
|
+
this.nativeAddPromise = Promise.resolve();
|
|
511
|
+
this.nativeFirstChunk = true;
|
|
512
|
+
const videoEncoder = new VideoEncoder({
|
|
513
|
+
output: (chunk, meta) => {
|
|
514
|
+
const packet = EncodedPacket.fromEncodedChunk(chunk);
|
|
515
|
+
const isFirst = this.nativeFirstChunk;
|
|
516
|
+
const metaArg = isFirst ? meta : void 0;
|
|
517
|
+
this.nativeFirstChunk = false;
|
|
518
|
+
this.nativeAddPromise = this.nativeAddPromise.then(
|
|
519
|
+
() => this.nativePacketSource.add(packet, metaArg)
|
|
520
|
+
);
|
|
521
|
+
},
|
|
522
|
+
error: (e) => console.error("[BrowserRender] VideoEncoder error:", e)
|
|
523
|
+
});
|
|
524
|
+
const bitrate = Math.max(5e5, w * h * fps * 0.1 | 0);
|
|
525
|
+
const config = {
|
|
526
|
+
codec: "avc1.42001f",
|
|
527
|
+
width: w,
|
|
528
|
+
height: h,
|
|
529
|
+
bitrate,
|
|
530
|
+
framerate: fps,
|
|
531
|
+
hardwareAcceleration: "prefer-software"
|
|
532
|
+
};
|
|
533
|
+
const support = await VideoEncoder.isConfigSupported(config);
|
|
534
|
+
if (!support.supported) {
|
|
535
|
+
delete config.hardwareAcceleration;
|
|
536
|
+
}
|
|
537
|
+
videoEncoder.configure(config);
|
|
538
|
+
this.nativeVideoEncoder = videoEncoder;
|
|
539
|
+
this.useNativeEncoder = true;
|
|
540
|
+
this.copyCanvas = document.createElement("canvas");
|
|
541
|
+
this.copyCanvas.width = w;
|
|
542
|
+
this.copyCanvas.height = h;
|
|
543
|
+
return;
|
|
544
|
+
} catch {
|
|
545
|
+
this.useNativeEncoder = false;
|
|
546
|
+
this.nativeVideoEncoder = null;
|
|
547
|
+
this.nativeOutput = null;
|
|
548
|
+
this.nativePacketSource = null;
|
|
549
|
+
}
|
|
550
|
+
}
|
|
345
551
|
try {
|
|
346
552
|
const loadMp4Module = (await import("mp4-wasm")).default;
|
|
347
553
|
const possiblePaths = [
|
|
348
|
-
// Vite dev server virtual path
|
|
349
554
|
"/@mp4-wasm",
|
|
350
|
-
// Common bundled asset paths (Vite uses hashed names)
|
|
351
555
|
"/assets/mp4-wasm.wasm",
|
|
352
556
|
"/assets/mp4-YBRi_559.wasm",
|
|
353
|
-
// Known Vite hash
|
|
354
557
|
"/mp4-wasm.wasm",
|
|
355
|
-
// Node modules path (for dev)
|
|
356
558
|
"/node_modules/mp4-wasm/dist/mp4-wasm.wasm"
|
|
357
559
|
];
|
|
358
560
|
let buffer = null;
|
|
359
|
-
let successPath = "";
|
|
360
561
|
for (const path of possiblePaths) {
|
|
361
562
|
try {
|
|
362
563
|
const resp = await fetch(path);
|
|
363
564
|
if (resp.ok) {
|
|
364
565
|
const contentType = resp.headers.get("content-type");
|
|
365
|
-
if (contentType && contentType.includes("html"))
|
|
366
|
-
continue;
|
|
367
|
-
}
|
|
566
|
+
if (contentType && contentType.includes("html")) continue;
|
|
368
567
|
buffer = await resp.arrayBuffer();
|
|
369
|
-
successPath = path;
|
|
370
568
|
break;
|
|
371
569
|
}
|
|
372
|
-
} catch
|
|
570
|
+
} catch {
|
|
373
571
|
continue;
|
|
374
572
|
}
|
|
375
573
|
}
|
|
376
574
|
if (!buffer) {
|
|
575
|
+
console.error("[BrowserRender] Exporter start: no WASM buffer from any path");
|
|
377
576
|
throw new Error(
|
|
378
577
|
"Could not load WASM file from any location. Please copy mp4-wasm.wasm to your public directory or configure Vite to serve it."
|
|
379
578
|
);
|
|
380
579
|
}
|
|
381
580
|
const mp4 = await loadMp4Module({ wasmBinary: buffer });
|
|
382
|
-
this.encoder = mp4.createWebCodecsEncoder({
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
581
|
+
this.encoder = mp4.createWebCodecsEncoder({ width: w, height: h, fps });
|
|
582
|
+
if (isWindows()) {
|
|
583
|
+
this.copyCanvas = document.createElement("canvas");
|
|
584
|
+
this.copyCanvas.width = w;
|
|
585
|
+
this.copyCanvas.height = h;
|
|
586
|
+
}
|
|
387
587
|
} catch (error) {
|
|
388
588
|
throw error;
|
|
389
589
|
}
|
|
@@ -391,37 +591,108 @@ var BrowserWasmExporter = class _BrowserWasmExporter {
|
|
|
391
591
|
async handleFrame(canvas, frameNumber) {
|
|
392
592
|
const frameIndex = frameNumber !== void 0 ? frameNumber : this.currentFrame;
|
|
393
593
|
const timestampMicroseconds = Math.round(frameIndex / this.fps * 1e6);
|
|
394
|
-
const
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
594
|
+
const durationMicroseconds = Math.round(1 / this.fps * 1e6);
|
|
595
|
+
let sourceCanvas = canvas;
|
|
596
|
+
if (this.copyCanvas) {
|
|
597
|
+
if (this.copyCanvas.width !== canvas.width || this.copyCanvas.height !== canvas.height) {
|
|
598
|
+
this.copyCanvas.width = canvas.width;
|
|
599
|
+
this.copyCanvas.height = canvas.height;
|
|
600
|
+
}
|
|
601
|
+
const ctx = this.copyCanvas.getContext("2d");
|
|
602
|
+
if (ctx) {
|
|
603
|
+
ctx.imageSmoothingEnabled = false;
|
|
604
|
+
ctx.drawImage(canvas, 0, 0);
|
|
605
|
+
sourceCanvas = this.copyCanvas;
|
|
606
|
+
}
|
|
607
|
+
}
|
|
608
|
+
if (this.useNativeEncoder && this.nativeVideoEncoder) {
|
|
609
|
+
const bitmap = await createImageBitmap(sourceCanvas);
|
|
610
|
+
const frame = new VideoFrame(bitmap, {
|
|
611
|
+
timestamp: timestampMicroseconds,
|
|
612
|
+
duration: durationMicroseconds
|
|
613
|
+
});
|
|
614
|
+
this.nativeVideoEncoder.encode(frame, { keyFrame: frameIndex === 0 });
|
|
615
|
+
frame.close();
|
|
616
|
+
bitmap.close();
|
|
617
|
+
} else {
|
|
618
|
+
let frame;
|
|
619
|
+
if (isWindows() && typeof createImageBitmap === "function") {
|
|
620
|
+
const bitmap = await createImageBitmap(sourceCanvas);
|
|
621
|
+
frame = new VideoFrame(bitmap, {
|
|
622
|
+
timestamp: timestampMicroseconds,
|
|
623
|
+
duration: durationMicroseconds
|
|
624
|
+
});
|
|
625
|
+
await this.encoder.addFrame(frame);
|
|
626
|
+
frame.close();
|
|
627
|
+
bitmap.close();
|
|
628
|
+
} else {
|
|
629
|
+
frame = new VideoFrame(sourceCanvas, {
|
|
630
|
+
timestamp: timestampMicroseconds,
|
|
631
|
+
duration: durationMicroseconds
|
|
632
|
+
});
|
|
633
|
+
await this.encoder.addFrame(frame);
|
|
634
|
+
frame.close();
|
|
635
|
+
}
|
|
636
|
+
}
|
|
400
637
|
if (frameNumber === void 0) {
|
|
401
638
|
this.currentFrame++;
|
|
402
639
|
}
|
|
403
640
|
}
|
|
404
641
|
async stop() {
|
|
642
|
+
if (this.useNativeEncoder && this.nativeVideoEncoder && this.nativeOutput && this.nativePacketSource) {
|
|
643
|
+
await this.nativeVideoEncoder.flush();
|
|
644
|
+
this.nativeVideoEncoder.close();
|
|
645
|
+
this.nativeVideoEncoder = null;
|
|
646
|
+
await this.nativeAddPromise;
|
|
647
|
+
this.nativePacketSource.close();
|
|
648
|
+
await this.nativeOutput.finalize();
|
|
649
|
+
const buf2 = this.nativeOutput.target.buffer;
|
|
650
|
+
this.nativeOutput = null;
|
|
651
|
+
this.nativePacketSource = null;
|
|
652
|
+
this.videoBlob = new Blob([buf2], { type: "video/mp4" });
|
|
653
|
+
return;
|
|
654
|
+
}
|
|
405
655
|
const buf = await this.encoder.end();
|
|
406
|
-
|
|
656
|
+
const copy = buf instanceof ArrayBuffer ? buf.slice(0) : new Uint8Array(buf).slice().buffer;
|
|
657
|
+
this.videoBlob = new Blob([copy], { type: "video/mp4" });
|
|
407
658
|
}
|
|
408
|
-
async generateAudio(assets, startFrame, endFrame) {
|
|
659
|
+
async generateAudio(assets, startFrame, endFrame, onProgress) {
|
|
409
660
|
try {
|
|
410
661
|
const processor = new BrowserAudioProcessor();
|
|
411
662
|
const assetPlacements = getAssetPlacement(assets);
|
|
412
663
|
if (assetPlacements.length === 0) {
|
|
413
664
|
return null;
|
|
414
665
|
}
|
|
666
|
+
const processableCount = assetPlacements.filter((a) => a.volume > 0 && a.playbackRate > 0).length;
|
|
667
|
+
let processedCount = 0;
|
|
415
668
|
const processedBuffers = [];
|
|
416
|
-
for (
|
|
669
|
+
for (let i = 0; i < assetPlacements.length; i++) {
|
|
670
|
+
const asset = assetPlacements[i];
|
|
417
671
|
if (asset.volume > 0 && asset.playbackRate > 0) {
|
|
418
672
|
try {
|
|
419
|
-
|
|
673
|
+
if (asset.type === "video") {
|
|
674
|
+
try {
|
|
675
|
+
const assetHasAudio = await hasAudio(asset.src);
|
|
676
|
+
if (!assetHasAudio) continue;
|
|
677
|
+
} catch {
|
|
678
|
+
}
|
|
679
|
+
}
|
|
680
|
+
const processPromise = processor.processAudioAsset(
|
|
420
681
|
asset,
|
|
421
682
|
this.settings.fps || 30,
|
|
422
683
|
endFrame - startFrame
|
|
423
684
|
);
|
|
685
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
686
|
+
setTimeout(() => {
|
|
687
|
+
reject(new Error(`Timeout processing audio asset after 20s - video may have no audio track`));
|
|
688
|
+
}, 2e4);
|
|
689
|
+
});
|
|
690
|
+
const buffer = await Promise.race([processPromise, timeoutPromise]);
|
|
424
691
|
processedBuffers.push(buffer);
|
|
692
|
+
processedCount++;
|
|
693
|
+
if (onProgress && processableCount > 0) {
|
|
694
|
+
onProgress(processedCount / processableCount);
|
|
695
|
+
}
|
|
425
696
|
} catch {
|
|
426
697
|
}
|
|
427
698
|
}
|
|
@@ -429,11 +700,18 @@ var BrowserWasmExporter = class _BrowserWasmExporter {
|
|
|
429
700
|
if (processedBuffers.length === 0) {
|
|
430
701
|
return null;
|
|
431
702
|
}
|
|
703
|
+
if (onProgress) onProgress(0.85);
|
|
432
704
|
const mixedBuffer = processor.mixAudioBuffers(processedBuffers);
|
|
705
|
+
if (onProgress) onProgress(0.92);
|
|
433
706
|
const wavData = processor.audioBufferToWav(mixedBuffer);
|
|
707
|
+
if (onProgress) onProgress(1);
|
|
434
708
|
await processor.close();
|
|
435
709
|
return wavData;
|
|
436
|
-
} catch {
|
|
710
|
+
} catch (error) {
|
|
711
|
+
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
712
|
+
const errorStack = error instanceof Error ? error.stack : void 0;
|
|
713
|
+
console.error("[BrowserRender] Audio generation error:", errorMsg);
|
|
714
|
+
if (errorStack) console.error("[BrowserRender] Stack:", errorStack);
|
|
437
715
|
return null;
|
|
438
716
|
}
|
|
439
717
|
}
|
|
@@ -473,6 +751,7 @@ var renderTwickVideoInBrowser = async (config) => {
|
|
|
473
751
|
try {
|
|
474
752
|
const { projectFile, variables, settings = {} } = config;
|
|
475
753
|
if (!variables || !variables.input) {
|
|
754
|
+
console.error("[BrowserRender] renderTwickVideoInBrowser: missing variables.input");
|
|
476
755
|
throw new Error('Invalid configuration. "variables.input" is required.');
|
|
477
756
|
}
|
|
478
757
|
const width = settings.width || variables.input.properties?.width || 1920;
|
|
@@ -507,20 +786,27 @@ var renderTwickVideoInBrowser = async (config) => {
|
|
|
507
786
|
renderer.playback.state = 1;
|
|
508
787
|
const totalFrames = await renderer.getNumberOfFrames(renderSettings);
|
|
509
788
|
if (totalFrames === 0 || !isFinite(totalFrames)) {
|
|
789
|
+
console.error("[BrowserRender] renderTwickVideoInBrowser: invalid totalFrames", totalFrames);
|
|
510
790
|
throw new Error(
|
|
511
791
|
"Cannot render: Video has zero duration. Please ensure your project has valid content with non-zero duration. Check that all video elements have valid sources and are properly loaded."
|
|
512
792
|
);
|
|
513
793
|
}
|
|
514
794
|
const videoElements = [];
|
|
795
|
+
const audioElements = [];
|
|
515
796
|
if (variables.input.tracks) {
|
|
516
797
|
variables.input.tracks.forEach((track) => {
|
|
517
798
|
if (track.elements) {
|
|
518
799
|
track.elements.forEach((el) => {
|
|
519
800
|
if (el.type === "video") videoElements.push(el);
|
|
801
|
+
if (el.type === "audio") audioElements.push(el);
|
|
520
802
|
});
|
|
521
803
|
}
|
|
522
804
|
});
|
|
523
805
|
}
|
|
806
|
+
let hasAnyAudio = false;
|
|
807
|
+
if (settings.includeAudio && audioElements.length > 0) {
|
|
808
|
+
hasAnyAudio = true;
|
|
809
|
+
}
|
|
524
810
|
if (videoElements.length > 0) {
|
|
525
811
|
for (const videoEl of videoElements) {
|
|
526
812
|
const src = videoEl.props?.src;
|
|
@@ -544,6 +830,14 @@ var renderTwickVideoInBrowser = async (config) => {
|
|
|
544
830
|
reject(new Error(`Failed to load video: ${err?.message || "Unknown error"}`));
|
|
545
831
|
}, { once: true });
|
|
546
832
|
});
|
|
833
|
+
if (settings.includeAudio) {
|
|
834
|
+
try {
|
|
835
|
+
const videoHasAudio = await hasAudio(src);
|
|
836
|
+
if (videoHasAudio) hasAnyAudio = true;
|
|
837
|
+
} catch {
|
|
838
|
+
hasAnyAudio = true;
|
|
839
|
+
}
|
|
840
|
+
}
|
|
547
841
|
}
|
|
548
842
|
}
|
|
549
843
|
await renderer.playback.recalculate();
|
|
@@ -562,33 +856,102 @@ var renderTwickVideoInBrowser = async (config) => {
|
|
|
562
856
|
mediaAssets.push(currentAssets);
|
|
563
857
|
const canvas = renderer.stage.finalBuffer;
|
|
564
858
|
await exporter.handleFrame(canvas, frame);
|
|
565
|
-
if (settings.onProgress) settings.onProgress(frame / totalFrames);
|
|
859
|
+
if (settings.onProgress) settings.onProgress(frame / totalFrames * 0.9);
|
|
566
860
|
}
|
|
567
861
|
await exporter.stop();
|
|
862
|
+
if (audioElements.length > 0 && settings.includeAudio) {
|
|
863
|
+
for (let frame = 0; frame < mediaAssets.length; frame++) {
|
|
864
|
+
const timeInSec = frame / fps;
|
|
865
|
+
for (const el of audioElements) {
|
|
866
|
+
const s = typeof el.s === "number" ? el.s : 0;
|
|
867
|
+
const e = typeof el.e === "number" ? el.e : Number.MAX_VALUE;
|
|
868
|
+
if (timeInSec >= s && timeInSec < e && el.props?.src) {
|
|
869
|
+
const playbackRate = el.props.playbackRate ?? 1;
|
|
870
|
+
const volume = el.props.volume ?? 1;
|
|
871
|
+
const trimStart = el.props.time ?? 0;
|
|
872
|
+
const currentTime = (timeInSec - s) * playbackRate + trimStart;
|
|
873
|
+
mediaAssets[frame].push({
|
|
874
|
+
key: el.id,
|
|
875
|
+
src: el.props.src,
|
|
876
|
+
type: "audio",
|
|
877
|
+
currentTime,
|
|
878
|
+
playbackRate,
|
|
879
|
+
volume
|
|
880
|
+
});
|
|
881
|
+
}
|
|
882
|
+
}
|
|
883
|
+
}
|
|
884
|
+
}
|
|
568
885
|
let audioData = null;
|
|
569
|
-
if (settings.includeAudio && mediaAssets.length > 0) {
|
|
570
|
-
|
|
886
|
+
if (settings.includeAudio && mediaAssets.length > 0 && hasAnyAudio) {
|
|
887
|
+
const reportAudioProgress = (p) => {
|
|
888
|
+
if (settings.onProgress) settings.onProgress(0.9 + p * 0.07);
|
|
889
|
+
};
|
|
890
|
+
try {
|
|
891
|
+
audioData = await exporter.generateAudio(mediaAssets, 0, totalFrames, reportAudioProgress);
|
|
892
|
+
if (settings.onProgress) settings.onProgress(0.97);
|
|
893
|
+
} catch (audioError) {
|
|
894
|
+
const errorMsg = audioError instanceof Error ? audioError.message : String(audioError);
|
|
895
|
+
const errorStack = audioError instanceof Error ? audioError.stack : void 0;
|
|
896
|
+
console.error("[BrowserRender] Audio generation failed:", errorMsg);
|
|
897
|
+
if (errorStack) console.error("[BrowserRender] Stack:", errorStack);
|
|
898
|
+
audioData = null;
|
|
899
|
+
}
|
|
571
900
|
}
|
|
572
901
|
let finalBlob = exporter.getVideoBlob();
|
|
573
902
|
if (!finalBlob) {
|
|
903
|
+
console.error("[BrowserRender] renderTwickVideoInBrowser: getVideoBlob returned null");
|
|
574
904
|
throw new Error("Failed to create video blob");
|
|
575
905
|
}
|
|
906
|
+
if (finalBlob.size === 0) {
|
|
907
|
+
console.error("[BrowserRender] renderTwickVideoInBrowser: video blob size is 0");
|
|
908
|
+
throw new Error("Video blob is empty. Rendering may have failed.");
|
|
909
|
+
}
|
|
910
|
+
const MIN_VIDEO_BLOB_BYTES = 1024;
|
|
911
|
+
if (finalBlob.size < MIN_VIDEO_BLOB_BYTES) {
|
|
912
|
+
console.error("[BrowserRender] renderTwickVideoInBrowser: video blob too small", finalBlob.size, "bytes (expected >=", MIN_VIDEO_BLOB_BYTES, ")");
|
|
913
|
+
throw new Error(
|
|
914
|
+
`Video blob is too small (${finalBlob.size} bytes). No video frames were encoded. This often happens on Windows when the encoder does not accept the frame format from canvas. Try using a different browser or updating graphics drivers.`
|
|
915
|
+
);
|
|
916
|
+
}
|
|
576
917
|
if (audioData && settings.includeAudio) {
|
|
918
|
+
if (settings.onProgress) settings.onProgress(0.98);
|
|
577
919
|
try {
|
|
578
|
-
|
|
920
|
+
const muxedBlob = await muxAudioVideo({
|
|
579
921
|
videoBlob: finalBlob,
|
|
580
922
|
audioBuffer: audioData
|
|
581
923
|
});
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
924
|
+
if (!muxedBlob || muxedBlob.size === 0) {
|
|
925
|
+
throw new Error("Muxed video blob is empty");
|
|
926
|
+
}
|
|
927
|
+
finalBlob = muxedBlob;
|
|
928
|
+
} catch (muxError) {
|
|
929
|
+
const errorMsg = muxError instanceof Error ? muxError.message : String(muxError);
|
|
930
|
+
const errorStack = muxError instanceof Error ? muxError.stack : void 0;
|
|
931
|
+
console.error("[BrowserRender] Audio muxing failed:", errorMsg);
|
|
932
|
+
if (errorStack) console.error("[BrowserRender] Stack:", errorStack);
|
|
933
|
+
if (settings.downloadAudioSeparately && audioData) {
|
|
934
|
+
const audioBlob = new Blob([audioData], { type: "audio/wav" });
|
|
935
|
+
const audioUrl = URL.createObjectURL(audioBlob);
|
|
936
|
+
const a = document.createElement("a");
|
|
937
|
+
a.href = audioUrl;
|
|
938
|
+
a.download = "audio.wav";
|
|
939
|
+
a.click();
|
|
940
|
+
URL.revokeObjectURL(audioUrl);
|
|
941
|
+
}
|
|
942
|
+
finalBlob = exporter.getVideoBlob();
|
|
943
|
+
if (!finalBlob || finalBlob.size === 0) {
|
|
944
|
+
throw new Error("Video blob is invalid after muxing failure");
|
|
945
|
+
}
|
|
590
946
|
}
|
|
591
947
|
}
|
|
948
|
+
if (!finalBlob || finalBlob.size === 0) {
|
|
949
|
+
console.error("[BrowserRender] renderTwickVideoInBrowser: final blob invalid", finalBlob?.size);
|
|
950
|
+
throw new Error("Final video blob is empty or invalid");
|
|
951
|
+
}
|
|
952
|
+
if (settings.onProgress) {
|
|
953
|
+
settings.onProgress(1);
|
|
954
|
+
}
|
|
592
955
|
if (settings.onComplete) {
|
|
593
956
|
settings.onComplete(finalBlob);
|
|
594
957
|
}
|
|
@@ -613,9 +976,112 @@ var downloadVideoBlob = (videoBlob, filename = "video.mp4") => {
|
|
|
613
976
|
document.body.appendChild(a);
|
|
614
977
|
a.click();
|
|
615
978
|
document.body.removeChild(a);
|
|
616
|
-
|
|
979
|
+
const revokeMs = isWindows() ? 5e3 : 1e3;
|
|
980
|
+
setTimeout(() => URL.revokeObjectURL(url), revokeMs);
|
|
617
981
|
};
|
|
618
982
|
|
|
983
|
+
// src/audio/video-normalizer.ts
|
|
984
|
+
function getFFmpegBaseURL2() {
|
|
985
|
+
if (typeof window !== "undefined") {
|
|
986
|
+
return `${window.location.origin}/ffmpeg`;
|
|
987
|
+
}
|
|
988
|
+
return "/ffmpeg";
|
|
989
|
+
}
|
|
990
|
+
async function normalizeVideoBlob(input, options = {}) {
|
|
991
|
+
const startTime = Date.now();
|
|
992
|
+
const targetWidth = options.width ?? 720;
|
|
993
|
+
const targetFps = options.fps ?? 30;
|
|
994
|
+
try {
|
|
995
|
+
console.log("[VideoNormalizer] Starting normalization...");
|
|
996
|
+
console.log(`[VideoNormalizer] Input size: ${input.size} bytes (${(input.size / 1024 / 1024).toFixed(2)} MB)`);
|
|
997
|
+
const { FFmpeg } = await import("@ffmpeg/ffmpeg");
|
|
998
|
+
const { fetchFile } = await import("@ffmpeg/util");
|
|
999
|
+
const ffmpeg = new FFmpeg();
|
|
1000
|
+
const base = getFFmpegBaseURL2();
|
|
1001
|
+
const coreURL = `${base}/ffmpeg-core.js`;
|
|
1002
|
+
const wasmURL = `${base}/ffmpeg-core.wasm`;
|
|
1003
|
+
console.log(`[VideoNormalizer] Loading FFmpeg from ${base}`);
|
|
1004
|
+
const loadStart = Date.now();
|
|
1005
|
+
await ffmpeg.load({ coreURL, wasmURL });
|
|
1006
|
+
const loadMs = Date.now() - loadStart;
|
|
1007
|
+
console.log(`[VideoNormalizer] FFmpeg loaded in ${loadMs}ms`);
|
|
1008
|
+
console.log("[VideoNormalizer] Writing input file...");
|
|
1009
|
+
const writeStart = Date.now();
|
|
1010
|
+
await ffmpeg.writeFile("in.mp4", await fetchFile(input));
|
|
1011
|
+
const writeMs = Date.now() - writeStart;
|
|
1012
|
+
console.log(`[VideoNormalizer] Input file written in ${writeMs}ms`);
|
|
1013
|
+
console.log("[VideoNormalizer] Executing normalization command...");
|
|
1014
|
+
const execStart = Date.now();
|
|
1015
|
+
ffmpeg.on("log", ({ message }) => {
|
|
1016
|
+
console.log(`[VideoNormalizer:FFmpeg] ${message}`);
|
|
1017
|
+
});
|
|
1018
|
+
await ffmpeg.exec([
|
|
1019
|
+
"-i",
|
|
1020
|
+
"in.mp4",
|
|
1021
|
+
// Normalize geometry & frame rate
|
|
1022
|
+
"-vf",
|
|
1023
|
+
`scale=${targetWidth}:-2,fps=${targetFps},format=yuv420p`,
|
|
1024
|
+
// Standard H.264 video
|
|
1025
|
+
"-c:v",
|
|
1026
|
+
"libx264",
|
|
1027
|
+
"-preset",
|
|
1028
|
+
"veryfast",
|
|
1029
|
+
"-crf",
|
|
1030
|
+
"20",
|
|
1031
|
+
"-pix_fmt",
|
|
1032
|
+
"yuv420p",
|
|
1033
|
+
"-profile:v",
|
|
1034
|
+
"main",
|
|
1035
|
+
"-r",
|
|
1036
|
+
String(targetFps),
|
|
1037
|
+
// AAC audio, stereo, 48kHz
|
|
1038
|
+
"-c:a",
|
|
1039
|
+
"aac",
|
|
1040
|
+
"-b:a",
|
|
1041
|
+
"128k",
|
|
1042
|
+
"-ar",
|
|
1043
|
+
"48000",
|
|
1044
|
+
"-ac",
|
|
1045
|
+
"2",
|
|
1046
|
+
// Web-friendly MP4
|
|
1047
|
+
"-movflags",
|
|
1048
|
+
"+faststart",
|
|
1049
|
+
"out.mp4"
|
|
1050
|
+
]);
|
|
1051
|
+
const execMs = Date.now() - execStart;
|
|
1052
|
+
console.log(`[VideoNormalizer] Normalization completed in ${execMs}ms`);
|
|
1053
|
+
const readStart = Date.now();
|
|
1054
|
+
const data = await ffmpeg.readFile("out.mp4");
|
|
1055
|
+
const readMs = Date.now() - readStart;
|
|
1056
|
+
console.log(`[VideoNormalizer] Output file read in ${readMs}ms`);
|
|
1057
|
+
const uint8 = typeof data === "string" ? new TextEncoder().encode(data) : new Uint8Array(data);
|
|
1058
|
+
const blob = new Blob([uint8], { type: "video/mp4" });
|
|
1059
|
+
const totalMs = Date.now() - startTime;
|
|
1060
|
+
console.log(`[VideoNormalizer] Normalization successful: ${blob.size} bytes (${(blob.size / 1024 / 1024).toFixed(2)} MB) in ${totalMs}ms`);
|
|
1061
|
+
return {
|
|
1062
|
+
blob,
|
|
1063
|
+
size: blob.size,
|
|
1064
|
+
debug: {
|
|
1065
|
+
loadMs,
|
|
1066
|
+
writeMs,
|
|
1067
|
+
execMs,
|
|
1068
|
+
readMs,
|
|
1069
|
+
totalMs
|
|
1070
|
+
}
|
|
1071
|
+
};
|
|
1072
|
+
} catch (error) {
|
|
1073
|
+
const totalMs = Date.now() - startTime;
|
|
1074
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1075
|
+
const stack = error instanceof Error ? error.stack : void 0;
|
|
1076
|
+
console.error("[VideoNormalizer] Normalization failed:", msg);
|
|
1077
|
+
if (stack) {
|
|
1078
|
+
console.error("[VideoNormalizer] Stack:", stack);
|
|
1079
|
+
}
|
|
1080
|
+
console.error("[VideoNormalizer] Duration:", `${totalMs}ms`);
|
|
1081
|
+
throw error;
|
|
1082
|
+
}
|
|
1083
|
+
}
|
|
1084
|
+
|
|
619
1085
|
// src/hooks/use-browser-renderer.ts
|
|
620
1086
|
import { useState, useCallback } from "react";
|
|
621
1087
|
var useBrowserRenderer = (options = {}) => {
|
|
@@ -662,6 +1128,7 @@ var useBrowserRenderer = (options = {}) => {
|
|
|
662
1128
|
setProgress(p);
|
|
663
1129
|
},
|
|
664
1130
|
onComplete: (blob2) => {
|
|
1131
|
+
console.log("[BrowserRender] useBrowserRenderer: onComplete received blob", blob2 ? `size=${blob2.size} type=${blob2.type}` : "null");
|
|
665
1132
|
setVideoBlob(blob2);
|
|
666
1133
|
if (autoDownload) {
|
|
667
1134
|
try {
|
|
@@ -672,6 +1139,7 @@ var useBrowserRenderer = (options = {}) => {
|
|
|
672
1139
|
}
|
|
673
1140
|
},
|
|
674
1141
|
onError: (err) => {
|
|
1142
|
+
console.error("[BrowserRender] useBrowserRenderer: onError", err?.message);
|
|
675
1143
|
setError(err);
|
|
676
1144
|
}
|
|
677
1145
|
}
|
|
@@ -683,6 +1151,8 @@ var useBrowserRenderer = (options = {}) => {
|
|
|
683
1151
|
setProgress(1);
|
|
684
1152
|
return blob;
|
|
685
1153
|
} catch (err) {
|
|
1154
|
+
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
1155
|
+
console.error("[BrowserRender] useBrowserRenderer: render failed", errorMsg);
|
|
686
1156
|
setError(err instanceof Error ? err : new Error(String(err)));
|
|
687
1157
|
return null;
|
|
688
1158
|
} finally {
|
|
@@ -702,6 +1172,7 @@ var useBrowserRenderer = (options = {}) => {
|
|
|
702
1172
|
export {
|
|
703
1173
|
renderTwickVideoInBrowser as default,
|
|
704
1174
|
downloadVideoBlob,
|
|
1175
|
+
normalizeVideoBlob,
|
|
705
1176
|
renderTwickVideoInBrowser,
|
|
706
1177
|
useBrowserRenderer
|
|
707
1178
|
};
|