@twick/browser-render 0.15.6 → 0.15.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,747 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+
30
+ // src/index.ts
31
+ var index_exports = {};
32
+ __export(index_exports, {
33
+ default: () => renderTwickVideoInBrowser,
34
+ downloadVideoBlob: () => downloadVideoBlob,
35
+ renderTwickVideoInBrowser: () => renderTwickVideoInBrowser,
36
+ useBrowserRenderer: () => useBrowserRenderer
37
+ });
38
+ module.exports = __toCommonJS(index_exports);
39
+
40
+ // src/browser-renderer.ts
41
+ var import_core = require("@twick/core");
42
+ var import_project = __toESM(require("@twick/visualizer/dist/project.js"));
43
+
44
+ // src/audio/video-audio-extractor.ts
45
+ var VideoElementAudioExtractor = class {
46
+ audioContext;
47
+ video;
48
+ destination = null;
49
+ mediaRecorder = null;
50
+ audioChunks = [];
51
+ constructor(videoSrc, sampleRate = 48e3) {
52
+ this.audioContext = new AudioContext({ sampleRate });
53
+ this.video = document.createElement("video");
54
+ this.video.crossOrigin = "anonymous";
55
+ this.video.src = videoSrc;
56
+ this.video.muted = true;
57
+ }
58
+ async initialize() {
59
+ return new Promise((resolve, reject) => {
60
+ this.video.addEventListener("loadedmetadata", () => resolve(), { once: true });
61
+ this.video.addEventListener("error", (e) => {
62
+ reject(new Error(`Failed to load video for audio extraction: ${e}`));
63
+ }, { once: true });
64
+ });
65
+ }
66
+ /**
67
+ * Extract audio by playing the video and capturing audio output
68
+ */
69
+ async extractAudio(startTime, duration, playbackRate = 1) {
70
+ const source = this.audioContext.createMediaElementSource(this.video);
71
+ this.destination = this.audioContext.createMediaStreamDestination();
72
+ source.connect(this.destination);
73
+ this.audioChunks = [];
74
+ this.mediaRecorder = new MediaRecorder(this.destination.stream, {
75
+ mimeType: "audio/webm"
76
+ });
77
+ this.mediaRecorder.ondataavailable = (event) => {
78
+ if (event.data.size > 0) {
79
+ this.audioChunks.push(event.data);
80
+ }
81
+ };
82
+ this.video.currentTime = startTime;
83
+ this.video.playbackRate = playbackRate;
84
+ await new Promise((resolve) => {
85
+ this.video.addEventListener("seeked", () => resolve(), { once: true });
86
+ });
87
+ return new Promise((resolve, reject) => {
88
+ const recordingTimeout = setTimeout(() => {
89
+ reject(new Error("Audio extraction timeout"));
90
+ }, (duration / playbackRate + 5) * 1e3);
91
+ this.mediaRecorder.start();
92
+ this.video.play();
93
+ setTimeout(async () => {
94
+ clearTimeout(recordingTimeout);
95
+ this.video.pause();
96
+ this.mediaRecorder.stop();
97
+ await new Promise((res) => {
98
+ this.mediaRecorder.addEventListener("stop", () => res(), { once: true });
99
+ });
100
+ try {
101
+ const audioBlob = new Blob(this.audioChunks, { type: "audio/webm" });
102
+ const arrayBuffer = await audioBlob.arrayBuffer();
103
+ const audioBuffer = await this.audioContext.decodeAudioData(arrayBuffer);
104
+ resolve(audioBuffer);
105
+ } catch (err) {
106
+ reject(new Error(`Failed to decode recorded audio: ${err}`));
107
+ }
108
+ }, duration / playbackRate * 1e3);
109
+ });
110
+ }
111
+ async close() {
112
+ if (this.mediaRecorder && this.mediaRecorder.state !== "inactive") {
113
+ this.mediaRecorder.stop();
114
+ }
115
+ this.video.pause();
116
+ this.video.src = "";
117
+ if (this.audioContext.state !== "closed") {
118
+ await this.audioContext.close();
119
+ }
120
+ }
121
+ };
122
+ async function extractAudioFromVideo(videoSrc, startTime, duration, playbackRate = 1, sampleRate = 48e3) {
123
+ const extractor = new VideoElementAudioExtractor(videoSrc, sampleRate);
124
+ try {
125
+ await extractor.initialize();
126
+ const audioBuffer = await extractor.extractAudio(startTime, duration, playbackRate);
127
+ return audioBuffer;
128
+ } finally {
129
+ await extractor.close();
130
+ }
131
+ }
132
+
133
+ // src/audio/audio-processor.ts
134
+ function getAssetPlacement(frames) {
135
+ const assets = [];
136
+ const assetTimeMap = /* @__PURE__ */ new Map();
137
+ for (let frame = 0; frame < frames.length; frame++) {
138
+ for (const asset of frames[frame]) {
139
+ if (!assetTimeMap.has(asset.key)) {
140
+ assetTimeMap.set(asset.key, {
141
+ start: asset.currentTime,
142
+ end: asset.currentTime
143
+ });
144
+ assets.push({
145
+ key: asset.key,
146
+ src: asset.src,
147
+ type: asset.type,
148
+ startInVideo: frame,
149
+ endInVideo: frame,
150
+ duration: 0,
151
+ durationInSeconds: 0,
152
+ playbackRate: asset.playbackRate,
153
+ volume: asset.volume,
154
+ trimLeftInSeconds: asset.currentTime
155
+ });
156
+ } else {
157
+ const timeInfo = assetTimeMap.get(asset.key);
158
+ if (timeInfo) {
159
+ timeInfo.end = asset.currentTime;
160
+ }
161
+ const existingAsset = assets.find((a) => a.key === asset.key);
162
+ if (existingAsset) {
163
+ existingAsset.endInVideo = frame;
164
+ }
165
+ }
166
+ }
167
+ }
168
+ assets.forEach((asset) => {
169
+ const timeInfo = assetTimeMap.get(asset.key);
170
+ if (timeInfo) {
171
+ asset.durationInSeconds = (timeInfo.end - timeInfo.start) / asset.playbackRate;
172
+ }
173
+ asset.duration = asset.endInVideo - asset.startInVideo + 1;
174
+ });
175
+ return assets;
176
+ }
177
+ var BrowserAudioProcessor = class {
178
+ constructor(sampleRate = 48e3) {
179
+ this.sampleRate = sampleRate;
180
+ this.audioContext = new AudioContext({ sampleRate });
181
+ }
182
+ audioContext;
183
+ /**
184
+ * Fetch and decode audio from a media source
185
+ * Falls back to video element extraction if decodeAudioData fails
186
+ */
187
+ async fetchAndDecodeAudio(src) {
188
+ try {
189
+ const response = await fetch(src);
190
+ const arrayBuffer = await response.arrayBuffer();
191
+ return await this.audioContext.decodeAudioData(arrayBuffer);
192
+ } catch (err) {
193
+ try {
194
+ return await extractAudioFromVideo(
195
+ src,
196
+ 0,
197
+ 999999,
198
+ 1,
199
+ this.sampleRate
200
+ );
201
+ } catch (fallbackErr) {
202
+ throw new Error(`Failed to extract audio: ${err}. Fallback also failed: ${fallbackErr}`);
203
+ }
204
+ }
205
+ }
206
+ /**
207
+ * Process audio asset with playback rate, volume, and timing
208
+ */
209
+ async processAudioAsset(asset, fps, totalFrames) {
210
+ const audioBuffer = await this.fetchAndDecodeAudio(asset.src);
211
+ const duration = totalFrames / fps;
212
+ const outputLength = Math.ceil(duration * this.sampleRate);
213
+ const outputBuffer = this.audioContext.createBuffer(
214
+ 2,
215
+ // stereo
216
+ outputLength,
217
+ this.sampleRate
218
+ );
219
+ const startTime = asset.startInVideo / fps;
220
+ const trimLeft = asset.trimLeftInSeconds / asset.playbackRate;
221
+ const trimRight = trimLeft + asset.durationInSeconds;
222
+ for (let channel = 0; channel < 2; channel++) {
223
+ const inputData = audioBuffer.getChannelData(Math.min(channel, audioBuffer.numberOfChannels - 1));
224
+ const outputData = outputBuffer.getChannelData(channel);
225
+ const startSample = Math.floor(startTime * this.sampleRate);
226
+ const trimLeftSample = Math.floor(trimLeft * this.sampleRate);
227
+ const trimRightSample = Math.floor(trimRight * this.sampleRate);
228
+ for (let i = 0; i < outputData.length; i++) {
229
+ const outputTime = i / this.sampleRate;
230
+ const assetTime = outputTime - startTime;
231
+ if (assetTime < 0 || assetTime >= asset.durationInSeconds) {
232
+ outputData[i] = 0;
233
+ } else {
234
+ const inputSample = Math.floor(trimLeftSample + assetTime * asset.playbackRate * this.sampleRate);
235
+ if (inputSample >= 0 && inputSample < inputData.length) {
236
+ outputData[i] = inputData[inputSample] * asset.volume;
237
+ } else {
238
+ outputData[i] = 0;
239
+ }
240
+ }
241
+ }
242
+ }
243
+ return outputBuffer;
244
+ }
245
+ /**
246
+ * Mix multiple audio buffers
247
+ */
248
+ mixAudioBuffers(buffers) {
249
+ if (buffers.length === 0) {
250
+ return this.audioContext.createBuffer(2, 1, this.sampleRate);
251
+ }
252
+ const maxLength = Math.max(...buffers.map((b) => b.length));
253
+ const mixedBuffer = this.audioContext.createBuffer(2, maxLength, this.sampleRate);
254
+ for (let channel = 0; channel < 2; channel++) {
255
+ const mixedData = mixedBuffer.getChannelData(channel);
256
+ buffers.forEach((buffer) => {
257
+ const channelData = buffer.getChannelData(Math.min(channel, buffer.numberOfChannels - 1));
258
+ for (let i = 0; i < channelData.length; i++) {
259
+ mixedData[i] = (mixedData[i] || 0) + channelData[i] / buffers.length;
260
+ }
261
+ });
262
+ }
263
+ return mixedBuffer;
264
+ }
265
+ /**
266
+ * Convert AudioBuffer to WAV format
267
+ */
268
+ audioBufferToWav(buffer) {
269
+ const numberOfChannels = buffer.numberOfChannels;
270
+ const sampleRate = buffer.sampleRate;
271
+ const format = 1;
272
+ const bitDepth = 16;
273
+ const bytesPerSample = bitDepth / 8;
274
+ const blockAlign = numberOfChannels * bytesPerSample;
275
+ const data = new Float32Array(buffer.length * numberOfChannels);
276
+ for (let channel = 0; channel < numberOfChannels; channel++) {
277
+ const channelData = buffer.getChannelData(channel);
278
+ for (let i = 0; i < buffer.length; i++) {
279
+ data[i * numberOfChannels + channel] = channelData[i];
280
+ }
281
+ }
282
+ const dataLength = data.length * bytesPerSample;
283
+ const headerLength = 44;
284
+ const wav = new ArrayBuffer(headerLength + dataLength);
285
+ const view = new DataView(wav);
286
+ const writeString = (offset2, string) => {
287
+ for (let i = 0; i < string.length; i++) {
288
+ view.setUint8(offset2 + i, string.charCodeAt(i));
289
+ }
290
+ };
291
+ writeString(0, "RIFF");
292
+ view.setUint32(4, 36 + dataLength, true);
293
+ writeString(8, "WAVE");
294
+ writeString(12, "fmt ");
295
+ view.setUint32(16, 16, true);
296
+ view.setUint16(20, format, true);
297
+ view.setUint16(22, numberOfChannels, true);
298
+ view.setUint32(24, sampleRate, true);
299
+ view.setUint32(28, sampleRate * blockAlign, true);
300
+ view.setUint16(32, blockAlign, true);
301
+ view.setUint16(34, bitDepth, true);
302
+ writeString(36, "data");
303
+ view.setUint32(40, dataLength, true);
304
+ const volume = 0.8;
305
+ let offset = 44;
306
+ for (let i = 0; i < data.length; i++) {
307
+ const sample = Math.max(-1, Math.min(1, data[i]));
308
+ view.setInt16(offset, sample < 0 ? sample * 32768 : sample * 32767, true);
309
+ offset += 2;
310
+ }
311
+ return wav;
312
+ }
313
+ async close() {
314
+ await this.audioContext.close();
315
+ }
316
+ };
317
+
318
+ // src/audio/audio-video-muxer.ts
319
+ function getFFmpegBaseURL() {
320
+ if (typeof window !== "undefined") {
321
+ return `${window.location.origin}/ffmpeg`;
322
+ }
323
+ return "/ffmpeg";
324
+ }
325
+ async function muxAudioVideo(options) {
326
+ try {
327
+ const { FFmpeg } = await import("@ffmpeg/ffmpeg");
328
+ const { fetchFile } = await import("@ffmpeg/util");
329
+ const ffmpeg = new FFmpeg();
330
+ const base = getFFmpegBaseURL();
331
+ const coreURL = `${base}/ffmpeg-core.js`;
332
+ const wasmURL = `${base}/ffmpeg-core.wasm`;
333
+ await ffmpeg.load({
334
+ coreURL,
335
+ wasmURL
336
+ });
337
+ await ffmpeg.writeFile(
338
+ "video.mp4",
339
+ await fetchFile(options.videoBlob)
340
+ );
341
+ await ffmpeg.writeFile(
342
+ "audio.wav",
343
+ new Uint8Array(options.audioBuffer)
344
+ );
345
+ await ffmpeg.exec([
346
+ "-i",
347
+ "video.mp4",
348
+ "-i",
349
+ "audio.wav",
350
+ "-c:v",
351
+ "copy",
352
+ "-c:a",
353
+ "aac",
354
+ "-b:a",
355
+ "192k",
356
+ "-shortest",
357
+ "output.mp4"
358
+ ]);
359
+ const data = await ffmpeg.readFile("output.mp4");
360
+ const uint8 = typeof data === "string" ? new TextEncoder().encode(data) : new Uint8Array(data);
361
+ return new Blob([uint8], { type: "video/mp4" });
362
+ } catch {
363
+ return options.videoBlob;
364
+ }
365
+ }
366
+
367
+ // src/browser-renderer.ts
368
+ var BrowserWasmExporter = class _BrowserWasmExporter {
369
+ constructor(settings) {
370
+ this.settings = settings;
371
+ this.fps = settings.fps || 30;
372
+ }
373
+ static id = "@twick/core/wasm";
374
+ static displayName = "Browser Video (Wasm)";
375
+ encoder;
376
+ videoBlob = null;
377
+ onProgressCallback;
378
+ currentFrame = 0;
379
+ fps = 30;
380
+ static async create(settings) {
381
+ return new _BrowserWasmExporter(settings);
382
+ }
383
+ async start() {
384
+ try {
385
+ const loadMp4Module = (await import("mp4-wasm")).default;
386
+ const possiblePaths = [
387
+ // Vite dev server virtual path
388
+ "/@mp4-wasm",
389
+ // Common bundled asset paths (Vite uses hashed names)
390
+ "/assets/mp4-wasm.wasm",
391
+ "/assets/mp4-YBRi_559.wasm",
392
+ // Known Vite hash
393
+ "/mp4-wasm.wasm",
394
+ // Node modules path (for dev)
395
+ "/node_modules/mp4-wasm/dist/mp4-wasm.wasm"
396
+ ];
397
+ let buffer = null;
398
+ let successPath = "";
399
+ for (const path of possiblePaths) {
400
+ try {
401
+ const resp = await fetch(path);
402
+ if (resp.ok) {
403
+ const contentType = resp.headers.get("content-type");
404
+ if (contentType && contentType.includes("html")) {
405
+ continue;
406
+ }
407
+ buffer = await resp.arrayBuffer();
408
+ successPath = path;
409
+ break;
410
+ }
411
+ } catch (e) {
412
+ continue;
413
+ }
414
+ }
415
+ if (!buffer) {
416
+ throw new Error(
417
+ "Could not load WASM file from any location. Please copy mp4-wasm.wasm to your public directory or configure Vite to serve it."
418
+ );
419
+ }
420
+ const mp4 = await loadMp4Module({ wasmBinary: buffer });
421
+ this.encoder = mp4.createWebCodecsEncoder({
422
+ width: this.settings.size.x,
423
+ height: this.settings.size.y,
424
+ fps: this.fps
425
+ });
426
+ } catch (error) {
427
+ throw error;
428
+ }
429
+ }
430
+ async handleFrame(canvas, frameNumber) {
431
+ const frameIndex = frameNumber !== void 0 ? frameNumber : this.currentFrame;
432
+ const timestampMicroseconds = Math.round(frameIndex / this.fps * 1e6);
433
+ const frame = new VideoFrame(canvas, {
434
+ timestamp: timestampMicroseconds,
435
+ duration: Math.round(1 / this.fps * 1e6)
436
+ });
437
+ await this.encoder.addFrame(frame);
438
+ frame.close();
439
+ if (frameNumber === void 0) {
440
+ this.currentFrame++;
441
+ }
442
+ }
443
+ async stop() {
444
+ const buf = await this.encoder.end();
445
+ this.videoBlob = new Blob([buf], { type: "video/mp4" });
446
+ }
447
+ async generateAudio(assets, startFrame, endFrame) {
448
+ try {
449
+ const processor = new BrowserAudioProcessor();
450
+ const assetPlacements = getAssetPlacement(assets);
451
+ if (assetPlacements.length === 0) {
452
+ return null;
453
+ }
454
+ const processedBuffers = [];
455
+ for (const asset of assetPlacements) {
456
+ if (asset.volume > 0 && asset.playbackRate > 0) {
457
+ try {
458
+ const buffer = await processor.processAudioAsset(
459
+ asset,
460
+ this.settings.fps || 30,
461
+ endFrame - startFrame
462
+ );
463
+ processedBuffers.push(buffer);
464
+ } catch {
465
+ }
466
+ }
467
+ }
468
+ if (processedBuffers.length === 0) {
469
+ return null;
470
+ }
471
+ const mixedBuffer = processor.mixAudioBuffers(processedBuffers);
472
+ const wavData = processor.audioBufferToWav(mixedBuffer);
473
+ await processor.close();
474
+ return wavData;
475
+ } catch {
476
+ return null;
477
+ }
478
+ }
479
+ async mergeMedia() {
480
+ }
481
+ async downloadVideos(assets) {
482
+ }
483
+ getVideoBlob() {
484
+ return this.videoBlob;
485
+ }
486
+ setProgressCallback(callback) {
487
+ this.onProgressCallback = callback;
488
+ }
489
+ };
490
+ var renderTwickVideoInBrowser = async (config) => {
491
+ const originalVideoPlay = HTMLVideoElement.prototype.play;
492
+ const originalAudioPlay = HTMLAudioElement.prototype.play;
493
+ const originalCreateElement = document.createElement.bind(document);
494
+ HTMLVideoElement.prototype.play = function() {
495
+ this.muted = true;
496
+ this.volume = 0;
497
+ return originalVideoPlay.call(this);
498
+ };
499
+ HTMLAudioElement.prototype.play = function() {
500
+ this.muted = true;
501
+ this.volume = 0;
502
+ return originalAudioPlay.call(this);
503
+ };
504
+ document.createElement = function(tagName, options) {
505
+ const element = originalCreateElement(tagName, options);
506
+ if (tagName.toLowerCase() === "video" || tagName.toLowerCase() === "audio") {
507
+ element.muted = true;
508
+ element.volume = 0;
509
+ }
510
+ return element;
511
+ };
512
+ try {
513
+ const { projectFile, variables, settings = {} } = config;
514
+ if (!variables || !variables.input) {
515
+ throw new Error('Invalid configuration. "variables.input" is required.');
516
+ }
517
+ const width = settings.width || variables.input.properties?.width || 1920;
518
+ const height = settings.height || variables.input.properties?.height || 1080;
519
+ const fps = settings.fps || variables.input.properties?.fps || 30;
520
+ const project = !projectFile ? import_project.default : projectFile;
521
+ project.variables = variables;
522
+ const renderSettings = {
523
+ name: "browser-render",
524
+ exporter: {
525
+ name: "@twick/core/wasm"
526
+ },
527
+ size: new import_core.Vector2(width, height),
528
+ resolutionScale: 1,
529
+ colorSpace: "srgb",
530
+ fps,
531
+ range: settings.range || [0, Infinity],
532
+ background: variables.input.backgroundColor || "#000000",
533
+ ...settings.quality && {
534
+ quality: settings.quality
535
+ }
536
+ };
537
+ const renderer = new import_core.Renderer(project);
538
+ const exporter = await BrowserWasmExporter.create(renderSettings);
539
+ await exporter.start();
540
+ if (settings.onProgress) {
541
+ exporter.setProgressCallback(settings.onProgress);
542
+ }
543
+ await renderer["reloadScenes"](renderSettings);
544
+ renderer.stage.configure(renderSettings);
545
+ renderer.playback.fps = renderSettings.fps;
546
+ renderer.playback.state = 1;
547
+ const totalFrames = await renderer.getNumberOfFrames(renderSettings);
548
+ if (totalFrames === 0 || !isFinite(totalFrames)) {
549
+ throw new Error(
550
+ "Cannot render: Video has zero duration. Please ensure your project has valid content with non-zero duration. Check that all video elements have valid sources and are properly loaded."
551
+ );
552
+ }
553
+ const videoElements = [];
554
+ if (variables.input.tracks) {
555
+ variables.input.tracks.forEach((track) => {
556
+ if (track.elements) {
557
+ track.elements.forEach((el) => {
558
+ if (el.type === "video") videoElements.push(el);
559
+ });
560
+ }
561
+ });
562
+ }
563
+ if (videoElements.length > 0) {
564
+ for (const videoEl of videoElements) {
565
+ const src = videoEl.props?.src;
566
+ if (!src || src === "undefined") continue;
567
+ const preloadVideo = document.createElement("video");
568
+ preloadVideo.crossOrigin = "anonymous";
569
+ preloadVideo.preload = "metadata";
570
+ preloadVideo.src = src;
571
+ await new Promise((resolve, reject) => {
572
+ const timeout = setTimeout(
573
+ () => reject(new Error(`Timeout loading video metadata: ${src.substring(0, 80)}`)),
574
+ 3e4
575
+ );
576
+ preloadVideo.addEventListener("loadedmetadata", () => {
577
+ clearTimeout(timeout);
578
+ resolve();
579
+ }, { once: true });
580
+ preloadVideo.addEventListener("error", () => {
581
+ clearTimeout(timeout);
582
+ const err = preloadVideo.error;
583
+ reject(new Error(`Failed to load video: ${err?.message || "Unknown error"}`));
584
+ }, { once: true });
585
+ });
586
+ }
587
+ }
588
+ await renderer.playback.recalculate();
589
+ await renderer.playback.reset();
590
+ await renderer.playback.seek(0);
591
+ const mediaAssets = [];
592
+ for (let frame = 0; frame < totalFrames; frame++) {
593
+ if (frame > 0) {
594
+ await renderer.playback.progress();
595
+ }
596
+ await renderer.stage.render(
597
+ renderer.playback.currentScene,
598
+ renderer.playback.previousScene
599
+ );
600
+ const currentAssets = renderer.playback.currentScene.getMediaAssets?.() || [];
601
+ mediaAssets.push(currentAssets);
602
+ const canvas = renderer.stage.finalBuffer;
603
+ await exporter.handleFrame(canvas, frame);
604
+ if (settings.onProgress) settings.onProgress(frame / totalFrames);
605
+ }
606
+ await exporter.stop();
607
+ let audioData = null;
608
+ if (settings.includeAudio && mediaAssets.length > 0) {
609
+ audioData = await exporter.generateAudio(mediaAssets, 0, totalFrames);
610
+ }
611
+ let finalBlob = exporter.getVideoBlob();
612
+ if (!finalBlob) {
613
+ throw new Error("Failed to create video blob");
614
+ }
615
+ if (audioData && settings.includeAudio) {
616
+ try {
617
+ finalBlob = await muxAudioVideo({
618
+ videoBlob: finalBlob,
619
+ audioBuffer: audioData
620
+ });
621
+ } catch {
622
+ const audioBlob = new Blob([audioData], { type: "audio/wav" });
623
+ const audioUrl = URL.createObjectURL(audioBlob);
624
+ const a = document.createElement("a");
625
+ a.href = audioUrl;
626
+ a.download = "audio.wav";
627
+ a.click();
628
+ URL.revokeObjectURL(audioUrl);
629
+ }
630
+ }
631
+ if (settings.onComplete) {
632
+ settings.onComplete(finalBlob);
633
+ }
634
+ return finalBlob;
635
+ } catch (error) {
636
+ if (config.settings?.onError) {
637
+ config.settings.onError(error);
638
+ }
639
+ throw error;
640
+ } finally {
641
+ HTMLVideoElement.prototype.play = originalVideoPlay;
642
+ HTMLAudioElement.prototype.play = originalAudioPlay;
643
+ document.createElement = originalCreateElement;
644
+ }
645
+ };
646
+ var downloadVideoBlob = (videoBlob, filename = "video.mp4") => {
647
+ const url = URL.createObjectURL(videoBlob);
648
+ const a = document.createElement("a");
649
+ a.href = url;
650
+ a.download = filename;
651
+ a.style.display = "none";
652
+ document.body.appendChild(a);
653
+ a.click();
654
+ document.body.removeChild(a);
655
+ setTimeout(() => URL.revokeObjectURL(url), 1e3);
656
+ };
657
+
658
+ // src/hooks/use-browser-renderer.ts
659
+ var import_react = require("react");
660
+ var useBrowserRenderer = (options = {}) => {
661
+ const [progress, setProgress] = (0, import_react.useState)(0);
662
+ const [isRendering, setIsRendering] = (0, import_react.useState)(false);
663
+ const [error, setError] = (0, import_react.useState)(null);
664
+ const [videoBlob, setVideoBlob] = (0, import_react.useState)(null);
665
+ const reset = (0, import_react.useCallback)(() => {
666
+ setProgress(0);
667
+ setIsRendering(false);
668
+ setError(null);
669
+ setVideoBlob(null);
670
+ }, []);
671
+ const download = (0, import_react.useCallback)((filename) => {
672
+ if (!videoBlob) {
673
+ setError(new Error("No video available to download. Please render the video first."));
674
+ return;
675
+ }
676
+ try {
677
+ downloadVideoBlob(videoBlob, filename || options.downloadFilename || "video.mp4");
678
+ } catch (err) {
679
+ setError(err instanceof Error ? err : new Error("Failed to download video"));
680
+ }
681
+ }, [videoBlob, options.downloadFilename]);
682
+ const render = (0, import_react.useCallback)(async (variables) => {
683
+ reset();
684
+ setIsRendering(true);
685
+ try {
686
+ const { projectFile, width, height, fps, quality, range, includeAudio, downloadAudioSeparately, onAudioReady, autoDownload, downloadFilename, ...restOptions } = options;
687
+ const blob = await renderTwickVideoInBrowser({
688
+ projectFile,
689
+ variables,
690
+ settings: {
691
+ width,
692
+ height,
693
+ includeAudio,
694
+ downloadAudioSeparately,
695
+ onAudioReady,
696
+ fps,
697
+ quality,
698
+ range,
699
+ ...restOptions,
700
+ onProgress: (p) => {
701
+ setProgress(p);
702
+ },
703
+ onComplete: (blob2) => {
704
+ setVideoBlob(blob2);
705
+ if (autoDownload) {
706
+ try {
707
+ downloadVideoBlob(blob2, downloadFilename || "video.mp4");
708
+ } catch (downloadErr) {
709
+ setError(downloadErr instanceof Error ? downloadErr : new Error("Failed to auto-download video"));
710
+ }
711
+ }
712
+ },
713
+ onError: (err) => {
714
+ setError(err);
715
+ }
716
+ }
717
+ });
718
+ if (!blob) {
719
+ throw new Error("Rendering failed: No video blob was generated");
720
+ }
721
+ setVideoBlob(blob);
722
+ setProgress(1);
723
+ return blob;
724
+ } catch (err) {
725
+ setError(err instanceof Error ? err : new Error(String(err)));
726
+ return null;
727
+ } finally {
728
+ setIsRendering(false);
729
+ }
730
+ }, [options, reset]);
731
+ return {
732
+ render,
733
+ progress,
734
+ isRendering,
735
+ error,
736
+ videoBlob,
737
+ download,
738
+ reset
739
+ };
740
+ };
741
+ // Annotate the CommonJS export names for ESM import in node:
742
+ 0 && (module.exports = {
743
+ downloadVideoBlob,
744
+ renderTwickVideoInBrowser,
745
+ useBrowserRenderer
746
+ });
747
+ //# sourceMappingURL=index.js.map