@twick/browser-render 0.15.6 → 0.15.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,239 +0,0 @@
1
- /**
2
- * Browser-based audio processing using Web Audio API
3
- * Mirrors the server's FFmpeg audio generation logic
4
- */
5
-
6
- export interface MediaAsset {
7
- key: string;
8
- src: string;
9
- type: 'video' | 'audio';
10
- startInVideo: number;
11
- endInVideo: number;
12
- duration: number;
13
- playbackRate: number;
14
- volume: number;
15
- trimLeftInSeconds: number;
16
- durationInSeconds: number;
17
- }
18
-
19
- export interface AssetInfo {
20
- key: string;
21
- src: string;
22
- type: 'video' | 'audio';
23
- currentTime: number;
24
- playbackRate: number;
25
- volume: number;
26
- }
27
-
28
- /**
29
- * Get asset placement from frames (similar to server's getAssetPlacement)
30
- */
31
- export function getAssetPlacement(frames: AssetInfo[][]): MediaAsset[] {
32
- const assets: MediaAsset[] = [];
33
- const assetTimeMap = new Map<string, { start: number; end: number }>();
34
-
35
- for (let frame = 0; frame < frames.length; frame++) {
36
- for (const asset of frames[frame]) {
37
- if (!assetTimeMap.has(asset.key)) {
38
- assetTimeMap.set(asset.key, {
39
- start: asset.currentTime,
40
- end: asset.currentTime,
41
- });
42
- assets.push({
43
- key: asset.key,
44
- src: asset.src,
45
- type: asset.type,
46
- startInVideo: frame,
47
- endInVideo: frame,
48
- duration: 0,
49
- durationInSeconds: 0,
50
- playbackRate: asset.playbackRate,
51
- volume: asset.volume,
52
- trimLeftInSeconds: asset.currentTime,
53
- });
54
- } else {
55
- const timeInfo = assetTimeMap.get(asset.key);
56
- if (timeInfo) {
57
- timeInfo.end = asset.currentTime;
58
- }
59
- const existingAsset = assets.find(a => a.key === asset.key);
60
- if (existingAsset) {
61
- existingAsset.endInVideo = frame;
62
- }
63
- }
64
- }
65
- }
66
-
67
- // Calculate durations
68
- assets.forEach(asset => {
69
- const timeInfo = assetTimeMap.get(asset.key);
70
- if (timeInfo) {
71
- asset.durationInSeconds = (timeInfo.end - timeInfo.start) / asset.playbackRate;
72
- }
73
- asset.duration = asset.endInVideo - asset.startInVideo + 1;
74
- });
75
-
76
- return assets;
77
- }
78
-
79
- /**
80
- * Audio processor using Web Audio API
81
- */
82
- export class BrowserAudioProcessor {
83
- private audioContext: AudioContext;
84
-
85
- constructor(private sampleRate: number = 48000) {
86
- this.audioContext = new AudioContext({ sampleRate });
87
- }
88
-
89
- /**
90
- * Fetch and decode audio from a media source
91
- */
92
- async fetchAndDecodeAudio(src: string): Promise<AudioBuffer> {
93
- const response = await fetch(src);
94
- const arrayBuffer = await response.arrayBuffer();
95
- return await this.audioContext.decodeAudioData(arrayBuffer);
96
- }
97
-
98
- /**
99
- * Process audio asset with playback rate, volume, and timing
100
- */
101
- async processAudioAsset(
102
- asset: MediaAsset,
103
- fps: number,
104
- totalFrames: number
105
- ): Promise<AudioBuffer> {
106
- const audioBuffer = await this.fetchAndDecodeAudio(asset.src);
107
-
108
- const duration = totalFrames / fps;
109
- const outputLength = Math.ceil(duration * this.sampleRate);
110
- const outputBuffer = this.audioContext.createBuffer(
111
- 2, // stereo
112
- outputLength,
113
- this.sampleRate
114
- );
115
-
116
- // Calculate timing
117
- const startTime = asset.startInVideo / fps;
118
- const trimLeft = asset.trimLeftInSeconds / asset.playbackRate;
119
- const trimRight = trimLeft + asset.durationInSeconds;
120
-
121
- // Process each channel
122
- for (let channel = 0; channel < 2; channel++) {
123
- const inputData = audioBuffer.getChannelData(Math.min(channel, audioBuffer.numberOfChannels - 1));
124
- const outputData = outputBuffer.getChannelData(channel);
125
-
126
- // Calculate sample positions
127
- const startSample = Math.floor(startTime * this.sampleRate);
128
- const trimLeftSample = Math.floor(trimLeft * this.sampleRate);
129
- const trimRightSample = Math.floor(trimRight * this.sampleRate);
130
-
131
- // Copy and process samples
132
- for (let i = 0; i < outputData.length; i++) {
133
- const outputTime = i / this.sampleRate;
134
- const assetTime = outputTime - startTime;
135
-
136
- if (assetTime < 0 || assetTime >= asset.durationInSeconds) {
137
- outputData[i] = 0; // Silence
138
- } else {
139
- // Apply playback rate
140
- const inputSample = Math.floor((trimLeftSample + assetTime * asset.playbackRate * this.sampleRate));
141
- if (inputSample >= 0 && inputSample < inputData.length) {
142
- outputData[i] = inputData[inputSample] * asset.volume;
143
- } else {
144
- outputData[i] = 0;
145
- }
146
- }
147
- }
148
- }
149
-
150
- return outputBuffer;
151
- }
152
-
153
- /**
154
- * Mix multiple audio buffers
155
- */
156
- mixAudioBuffers(buffers: AudioBuffer[]): AudioBuffer {
157
- if (buffers.length === 0) {
158
- return this.audioContext.createBuffer(2, 1, this.sampleRate);
159
- }
160
-
161
- const maxLength = Math.max(...buffers.map(b => b.length));
162
- const mixedBuffer = this.audioContext.createBuffer(2, maxLength, this.sampleRate);
163
-
164
- for (let channel = 0; channel < 2; channel++) {
165
- const mixedData = mixedBuffer.getChannelData(channel);
166
-
167
- buffers.forEach(buffer => {
168
- const channelData = buffer.getChannelData(Math.min(channel, buffer.numberOfChannels - 1));
169
- for (let i = 0; i < channelData.length; i++) {
170
- mixedData[i] = (mixedData[i] || 0) + channelData[i] / buffers.length;
171
- }
172
- });
173
- }
174
-
175
- return mixedBuffer;
176
- }
177
-
178
- /**
179
- * Convert AudioBuffer to WAV format
180
- */
181
- audioBufferToWav(buffer: AudioBuffer): ArrayBuffer {
182
- const numberOfChannels = buffer.numberOfChannels;
183
- const sampleRate = buffer.sampleRate;
184
- const format = 1; // PCM
185
- const bitDepth = 16;
186
-
187
- const bytesPerSample = bitDepth / 8;
188
- const blockAlign = numberOfChannels * bytesPerSample;
189
-
190
- const data = new Float32Array(buffer.length * numberOfChannels);
191
- for (let channel = 0; channel < numberOfChannels; channel++) {
192
- const channelData = buffer.getChannelData(channel);
193
- for (let i = 0; i < buffer.length; i++) {
194
- data[i * numberOfChannels + channel] = channelData[i];
195
- }
196
- }
197
-
198
- const dataLength = data.length * bytesPerSample;
199
- const headerLength = 44;
200
- const wav = new ArrayBuffer(headerLength + dataLength);
201
- const view = new DataView(wav);
202
-
203
- // Write WAV header
204
- const writeString = (offset: number, string: string) => {
205
- for (let i = 0; i < string.length; i++) {
206
- view.setUint8(offset + i, string.charCodeAt(i));
207
- }
208
- };
209
-
210
- writeString(0, 'RIFF');
211
- view.setUint32(4, 36 + dataLength, true);
212
- writeString(8, 'WAVE');
213
- writeString(12, 'fmt ');
214
- view.setUint32(16, 16, true); // fmt chunk size
215
- view.setUint16(20, format, true);
216
- view.setUint16(22, numberOfChannels, true);
217
- view.setUint32(24, sampleRate, true);
218
- view.setUint32(28, sampleRate * blockAlign, true);
219
- view.setUint16(32, blockAlign, true);
220
- view.setUint16(34, bitDepth, true);
221
- writeString(36, 'data');
222
- view.setUint32(40, dataLength, true);
223
-
224
- // Write audio data
225
- const volume = 0.8;
226
- let offset = 44;
227
- for (let i = 0; i < data.length; i++) {
228
- const sample = Math.max(-1, Math.min(1, data[i]));
229
- view.setInt16(offset, sample < 0 ? sample * 0x8000 : sample * 0x7FFF, true);
230
- offset += 2;
231
- }
232
-
233
- return wav;
234
- }
235
-
236
- async close() {
237
- await this.audioContext.close();
238
- }
239
- }
@@ -1,79 +0,0 @@
1
- /**
2
- * Browser-based audio/video muxing using FFmpeg.wasm (main thread)
3
- * Loads core files from local public/ffmpeg directory
4
- */
5
-
6
- export interface MuxerOptions {
7
- videoBlob: Blob;
8
- audioBuffer: ArrayBuffer;
9
- fps: number;
10
- width: number;
11
- height: number;
12
- }
13
-
14
- /**
15
- * Mux audio and video using FFmpeg.wasm in main thread
16
- * Core files loaded from /ffmpeg/ (no CDN, no CORS issues)
17
- */
18
- export async function muxAudioVideo(options: MuxerOptions): Promise<Blob> {
19
- try {
20
- console.log('🎬 Starting FFmpeg.wasm muxing (main thread)...');
21
-
22
- // Import from installed packages (bundled by Vite)
23
- const { FFmpeg } = await import('@ffmpeg/ffmpeg');
24
- const { fetchFile, toBlobURL } = await import('@ffmpeg/util');
25
-
26
- const ffmpeg = new FFmpeg();
27
-
28
- ffmpeg.on('log', ({ message }) => {
29
- console.log('[FFmpeg]', message);
30
- });
31
-
32
- ffmpeg.on('progress', ({ progress }) => {
33
- console.log(`[FFmpeg] Progress: ${(progress * 100).toFixed(1)}%`);
34
- });
35
-
36
- console.log('[FFmpeg] Loading core from /ffmpeg/...');
37
-
38
- // Load from LOCAL files in public/ffmpeg (no CDN!)
39
- // Note: FFmpeg 0.12.x has worker embedded in core.js, no separate workerURL needed
40
- await ffmpeg.load({
41
- coreURL: await toBlobURL('/ffmpeg/ffmpeg-core.js', 'text/javascript'),
42
- wasmURL: await toBlobURL('/ffmpeg/ffmpeg-core.wasm', 'application/wasm'),
43
- });
44
-
45
- console.log('✅ FFmpeg.wasm loaded');
46
-
47
- // Write input files
48
- console.log('[FFmpeg] Writing input files...');
49
- await ffmpeg.writeFile('video.mp4', await fetchFile(options.videoBlob));
50
- await ffmpeg.writeFile('audio.wav', new Uint8Array(options.audioBuffer));
51
-
52
- console.log('[FFmpeg] Muxing audio and video...');
53
-
54
- // Mux video and audio
55
- await ffmpeg.exec([
56
- '-i', 'video.mp4',
57
- '-i', 'audio.wav',
58
- '-c:v', 'copy',
59
- '-c:a', 'aac',
60
- '-b:a', '192k',
61
- '-shortest',
62
- 'output.mp4'
63
- ]);
64
-
65
- console.log('[FFmpeg] Reading output file...');
66
-
67
- // Read output
68
- const data = await ffmpeg.readFile('output.mp4');
69
- const muxedBlob = new Blob([data], { type: 'video/mp4' });
70
-
71
- console.log(`✅ Muxed video with audio: ${(muxedBlob.size / 1024 / 1024).toFixed(2)} MB`);
72
-
73
- return muxedBlob;
74
- } catch (error) {
75
- console.error('❌ FFmpeg.wasm muxing failed:', error);
76
- console.warn('⚠️ Returning video-only');
77
- return options.videoBlob;
78
- }
79
- }