vueseq 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +148 -53
- package/bin/cli.js +190 -70
- package/package.json +8 -3
- package/src/index.js +3 -2
- package/src/renderer/capture.js +185 -0
- package/src/renderer/encode-optimized.js +317 -0
- package/src/renderer/encode-parallel.js +320 -0
- package/src/renderer/encode.js +285 -63
- package/src/renderer/ffmpeg-encode.js +70 -0
- package/src/renderer/gpu.js +423 -0
- package/src/renderer/render.js +145 -75
- package/src/runtime/gsap-bridge.js +11 -1
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Parallel Frame Capture Worker (Distributed Capture in Batches)
|
|
3
|
+
*
|
|
4
|
+
* Architecture: Scatter-Gather
|
|
5
|
+
* 1. Workers (xN): Capture DOM -> JPEG DataURL (Fast & Light)
|
|
6
|
+
* 2. Node.js: Aggregates frames buffers
|
|
7
|
+
* 3. Main Page: Receives JPEGs -> Decodes (Parallel) -> Encodes to MP4
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { chromium } from 'playwright'
|
|
11
|
+
import { createVideoServer } from '../bundler/vite.js'
|
|
12
|
+
import { readFile, writeFile } from 'fs/promises'
|
|
13
|
+
import { getTimelineDuration } from './render.js'
|
|
14
|
+
import { join } from 'path'
|
|
15
|
+
import { cpus } from 'os'
|
|
16
|
+
import { getOptimalChromiumConfig } from './gpu.js'
|
|
17
|
+
import { getHtml2CanvasScript } from './capture.js'
|
|
18
|
+
|
|
19
|
+
const DEFAULT_WORKERS = Math.max(1, cpus().length)
|
|
20
|
+
|
|
21
|
+
// In-memory buffer limit (frames)
|
|
22
|
+
// Increased to 300 to absorb bursts (10s at 30fps)
|
|
23
|
+
const MAX_BUFFERED_FRAMES = 300
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Inject required libraries into a page
|
|
27
|
+
*/
|
|
28
|
+
async function injectLibraries(page, { width, height }) {
|
|
29
|
+
// Inject Mediabunny
|
|
30
|
+
const mediabunnyPath = join(
|
|
31
|
+
process.cwd(),
|
|
32
|
+
'node_modules',
|
|
33
|
+
'mediabunny',
|
|
34
|
+
'dist',
|
|
35
|
+
'bundles',
|
|
36
|
+
'mediabunny.cjs',
|
|
37
|
+
)
|
|
38
|
+
const mediabunnyCode = await readFile(mediabunnyPath, 'utf-8')
|
|
39
|
+
await page.addScriptTag({ content: mediabunnyCode })
|
|
40
|
+
|
|
41
|
+
// Inject html2canvas
|
|
42
|
+
const html2canvasScript = await getHtml2CanvasScript()
|
|
43
|
+
await page.addScriptTag({ content: html2canvasScript })
|
|
44
|
+
|
|
45
|
+
// Setup capture infrastructure
|
|
46
|
+
await page.evaluate(
|
|
47
|
+
({ width, height }) => {
|
|
48
|
+
const captureCanvas = document.createElement('canvas')
|
|
49
|
+
captureCanvas.width = width
|
|
50
|
+
captureCanvas.height = height
|
|
51
|
+
window.__VUESEQ_CAPTURE_CANVAS__ = captureCanvas
|
|
52
|
+
|
|
53
|
+
window.__VUESEQ_CAPTURE_OPTIONS__ = {
|
|
54
|
+
canvas: captureCanvas,
|
|
55
|
+
width,
|
|
56
|
+
height,
|
|
57
|
+
scale: 1,
|
|
58
|
+
useCORS: true,
|
|
59
|
+
allowTaint: true,
|
|
60
|
+
backgroundColor: null,
|
|
61
|
+
logging: false,
|
|
62
|
+
imageTimeout: 0,
|
|
63
|
+
removeContainer: true,
|
|
64
|
+
foreignObjectRendering: false,
|
|
65
|
+
}
|
|
66
|
+
},
|
|
67
|
+
{ width, height },
|
|
68
|
+
)
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Capture frame and return JPEG Data URL
|
|
73
|
+
*/
|
|
74
|
+
async function captureFrameJPEG(page, timestamp) {
|
|
75
|
+
return await page.evaluate(
|
|
76
|
+
async ({ timestamp }) => {
|
|
77
|
+
// Seek
|
|
78
|
+
window.__VUESEQ_SEEK__(timestamp)
|
|
79
|
+
await new Promise((resolve) => requestAnimationFrame(resolve))
|
|
80
|
+
|
|
81
|
+
// Capture
|
|
82
|
+
await html2canvas(document.body, window.__VUESEQ_CAPTURE_OPTIONS__)
|
|
83
|
+
|
|
84
|
+
const canvas = window.__VUESEQ_CAPTURE_CANVAS__
|
|
85
|
+
// JPEG 0.95 is visually indistinguishable for video source but much faster/smaller than PNG
|
|
86
|
+
return canvas.toDataURL('image/jpeg', 0.95)
|
|
87
|
+
},
|
|
88
|
+
{ timestamp },
|
|
89
|
+
)
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
/**
|
|
93
|
+
* Render to MP4 using parallel frame capture
|
|
94
|
+
*/
|
|
95
|
+
export async function renderToMp4Parallel(options) {
|
|
96
|
+
const {
|
|
97
|
+
input,
|
|
98
|
+
output = './output.mp4',
|
|
99
|
+
fps = 30,
|
|
100
|
+
duration: providedDuration,
|
|
101
|
+
width = 1920,
|
|
102
|
+
height = 1080,
|
|
103
|
+
workers: providedWorkers,
|
|
104
|
+
monitorMemory = false,
|
|
105
|
+
onProgress,
|
|
106
|
+
} = options
|
|
107
|
+
|
|
108
|
+
// Determine Logic
|
|
109
|
+
let numWorkers = providedWorkers ? parseInt(providedWorkers, 10) : DEFAULT_WORKERS
|
|
110
|
+
if (!providedWorkers) {
|
|
111
|
+
console.log(` Auto-detected ${numWorkers} CPU cores (Use --workers N to override)`)
|
|
112
|
+
}
|
|
113
|
+
console.log(` Initializing ${numWorkers} capture workers...`)
|
|
114
|
+
|
|
115
|
+
// 1. Setup
|
|
116
|
+
let duration = providedDuration
|
|
117
|
+
if (!duration || duration <= 0) {
|
|
118
|
+
duration = await getTimelineDuration({ input, width, height })
|
|
119
|
+
if (!duration) throw new Error('Could not auto-detect duration.')
|
|
120
|
+
}
|
|
121
|
+
const totalFrames = Math.ceil(duration * fps)
|
|
122
|
+
|
|
123
|
+
const { url, cleanup: cleanupServer } = await createVideoServer({
|
|
124
|
+
input,
|
|
125
|
+
width,
|
|
126
|
+
height,
|
|
127
|
+
})
|
|
128
|
+
|
|
129
|
+
const gpuConfig = await getOptimalChromiumConfig()
|
|
130
|
+
const launchOptions = {
|
|
131
|
+
headless: gpuConfig.headless,
|
|
132
|
+
args: gpuConfig.args,
|
|
133
|
+
}
|
|
134
|
+
if (gpuConfig.channel) {
|
|
135
|
+
launchOptions.channel = gpuConfig.channel
|
|
136
|
+
}
|
|
137
|
+
// Disable queueing on Node side
|
|
138
|
+
const browser = await chromium.launch(launchOptions)
|
|
139
|
+
|
|
140
|
+
try {
|
|
141
|
+
const context = await browser.newContext({
|
|
142
|
+
viewport: { width, height },
|
|
143
|
+
deviceScaleFactor: 1,
|
|
144
|
+
})
|
|
145
|
+
|
|
146
|
+
// 2. Initialize Pages
|
|
147
|
+
const pages = []
|
|
148
|
+
|
|
149
|
+
// Create Encoder Page (Main)
|
|
150
|
+
const encoderPage = await context.newPage()
|
|
151
|
+
await encoderPage.goto(url, { waitUntil: 'networkidle' })
|
|
152
|
+
await injectLibraries(encoderPage, { width, height })
|
|
153
|
+
pages.push(encoderPage)
|
|
154
|
+
|
|
155
|
+
// Create Worker Pages
|
|
156
|
+
for (let i = 0; i < numWorkers; i++) {
|
|
157
|
+
const page = await context.newPage()
|
|
158
|
+
await page.goto(url, { waitUntil: 'networkidle' })
|
|
159
|
+
await page.waitForFunction(() => window.__VUESEQ_READY__ === true)
|
|
160
|
+
await injectLibraries(page, { width, height })
|
|
161
|
+
pages.push(page)
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
// Initialize Encoder on Main Page
|
|
165
|
+
await encoderPage.evaluate(async ({ width, height, fps }) => {
|
|
166
|
+
const { Output, Mp4OutputFormat, BufferTarget, QUALITY_HIGH, CanvasSource } = window.Mediabunny
|
|
167
|
+
window.__VUESEQ_OUTPUT__ = new Output({
|
|
168
|
+
format: new Mp4OutputFormat(),
|
|
169
|
+
target: new BufferTarget(),
|
|
170
|
+
})
|
|
171
|
+
const canvas = document.createElement('canvas')
|
|
172
|
+
canvas.width = width
|
|
173
|
+
canvas.height = height
|
|
174
|
+
window.__VUESEQ_ENCODE_CANVAS__ = canvas
|
|
175
|
+
window.__VUESEQ_CTX__ = canvas.getContext('2d', { alpha: false })
|
|
176
|
+
|
|
177
|
+
window.__VUESEQ_VIDEO_SOURCE__ = new CanvasSource(canvas, {
|
|
178
|
+
codec: 'avc',
|
|
179
|
+
bitrate: QUALITY_HIGH,
|
|
180
|
+
})
|
|
181
|
+
window.__VUESEQ_OUTPUT__.addVideoTrack(window.__VUESEQ_VIDEO_SOURCE__)
|
|
182
|
+
await window.__VUESEQ_OUTPUT__.start()
|
|
183
|
+
|
|
184
|
+
// Helper for pipelined loading
|
|
185
|
+
window.loadAndEncode = async (frames) => {
|
|
186
|
+
// 1. Parallel Load (Decode JPEGs)
|
|
187
|
+
const images = await Promise.all(frames.map(frame => {
|
|
188
|
+
return new Promise((resolve) => {
|
|
189
|
+
const img = new Image()
|
|
190
|
+
img.onload = () => resolve({ img, timestamp: frame.timestamp })
|
|
191
|
+
img.src = frame.dataUrl
|
|
192
|
+
})
|
|
193
|
+
}))
|
|
194
|
+
|
|
195
|
+
// 2. Sequential Encode
|
|
196
|
+
const ctx = window.__VUESEQ_CTX__
|
|
197
|
+
const duration = 1 / fps
|
|
198
|
+
for (const { img, timestamp } of images) {
|
|
199
|
+
ctx.drawImage(img, 0, 0)
|
|
200
|
+
await window.__VUESEQ_VIDEO_SOURCE__.add(timestamp, duration)
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
}, { width, height, fps })
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
// 3. Orchestrate
|
|
207
|
+
const frameBuffer = new Map()
|
|
208
|
+
let nextEncodeFrame = 0
|
|
209
|
+
let encodedCount = 0
|
|
210
|
+
|
|
211
|
+
// Batch size for transfer to encoder (reduces IPC overhead)
|
|
212
|
+
const ENCODE_BATCH_SIZE = 5
|
|
213
|
+
|
|
214
|
+
const frameAssignments = Array.from({ length: numWorkers }, () => [])
|
|
215
|
+
for (let i = 0; i < totalFrames; i++) {
|
|
216
|
+
frameAssignments[i % numWorkers].push(i)
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
// Function to process the encode queue in Batches
|
|
220
|
+
const processEncodeQueue = async () => {
|
|
221
|
+
const batch = []
|
|
222
|
+
|
|
223
|
+
// Collect available sequential frames
|
|
224
|
+
let lookahead = nextEncodeFrame
|
|
225
|
+
while (frameBuffer.has(lookahead) && batch.length < ENCODE_BATCH_SIZE) {
|
|
226
|
+
batch.push({
|
|
227
|
+
dataUrl: frameBuffer.get(lookahead),
|
|
228
|
+
timestamp: lookahead / fps
|
|
229
|
+
})
|
|
230
|
+
frameBuffer.delete(lookahead)
|
|
231
|
+
lookahead++
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
if (batch.length > 0) {
|
|
235
|
+
// Send batch to encoder
|
|
236
|
+
await encoderPage.evaluate(async ({ frames, fps }) => {
|
|
237
|
+
await window.loadAndEncode(frames)
|
|
238
|
+
}, { frames: batch, fps })
|
|
239
|
+
|
|
240
|
+
nextEncodeFrame = lookahead
|
|
241
|
+
encodedCount += batch.length
|
|
242
|
+
|
|
243
|
+
if (onProgress) {
|
|
244
|
+
onProgress({
|
|
245
|
+
frame: encodedCount,
|
|
246
|
+
total: totalFrames,
|
|
247
|
+
percent: Math.round((encodedCount / totalFrames) * 100)
|
|
248
|
+
})
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
// Worker Loop
|
|
254
|
+
const runWorker = async (workerIndex) => {
|
|
255
|
+
const page = pages[workerIndex + 1]
|
|
256
|
+
const frames = frameAssignments[workerIndex]
|
|
257
|
+
|
|
258
|
+
for (const frameIndex of frames) {
|
|
259
|
+
// Flow Control
|
|
260
|
+
if (frameBuffer.size > MAX_BUFFERED_FRAMES) {
|
|
261
|
+
// Log only once per wait cycle to avoid spam
|
|
262
|
+
// console.log(`[Worker ${workerIndex}] Buffer full. Waiting...`)
|
|
263
|
+
while (frameBuffer.size > MAX_BUFFERED_FRAMES - ENCODE_BATCH_SIZE) {
|
|
264
|
+
// Wait until space clears up roughly one batch
|
|
265
|
+
await new Promise(r => setTimeout(r, 50))
|
|
266
|
+
// Try to drain queue while waiting (if main thread is free here)
|
|
267
|
+
await processEncodeQueue()
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
const timestamp = frameIndex / fps
|
|
272
|
+
const dataUrl = await captureFrameJPEG(page, timestamp)
|
|
273
|
+
|
|
274
|
+
frameBuffer.set(frameIndex, dataUrl)
|
|
275
|
+
|
|
276
|
+
// Try to encode available frames
|
|
277
|
+
await processEncodeQueue()
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
console.log(` Starting parallel capture...`)
|
|
282
|
+
|
|
283
|
+
// Run all workers
|
|
284
|
+
await Promise.all(Array.from({ length: numWorkers }, (_, i) => runWorker(i)))
|
|
285
|
+
|
|
286
|
+
// Ensure all remaining frames are encoded
|
|
287
|
+
while (encodedCount < totalFrames) {
|
|
288
|
+
await processEncodeQueue()
|
|
289
|
+
if (encodedCount < totalFrames) await new Promise(r => setTimeout(r, 100))
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
console.log(` Encoding complete. Finalizing MP4...`)
|
|
293
|
+
|
|
294
|
+
// Finalize
|
|
295
|
+
console.log(` Transferring video data (Base64)...`)
|
|
296
|
+
const base64Data = await encoderPage.evaluate(async () => {
|
|
297
|
+
window.__VUESEQ_VIDEO_SOURCE__.close()
|
|
298
|
+
await window.__VUESEQ_OUTPUT__.finalize()
|
|
299
|
+
const buffer = window.__VUESEQ_OUTPUT__.target.buffer
|
|
300
|
+
|
|
301
|
+
// Fast conversion to Base64 via Blob (avoids massive Array serialization overhead)
|
|
302
|
+
const blob = new Blob([buffer], { type: 'video/mp4' })
|
|
303
|
+
return new Promise((resolve) => {
|
|
304
|
+
const reader = new FileReader()
|
|
305
|
+
reader.onloadend = () => resolve(reader.result)
|
|
306
|
+
reader.readAsDataURL(blob)
|
|
307
|
+
})
|
|
308
|
+
})
|
|
309
|
+
|
|
310
|
+
const buffer = Buffer.from(base64Data.split(',')[1], 'base64')
|
|
311
|
+
await writeFile(output, buffer)
|
|
312
|
+
return output
|
|
313
|
+
|
|
314
|
+
} finally {
|
|
315
|
+
await browser.close()
|
|
316
|
+
await cleanupServer()
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
export { renderToMp4Parallel as renderParallel }
|
package/src/renderer/encode.js
CHANGED
|
@@ -1,92 +1,314 @@
|
|
|
1
1
|
/**
|
|
2
|
-
*
|
|
3
|
-
*
|
|
4
|
-
* Encodes
|
|
5
|
-
*
|
|
2
|
+
* Video Encoder using WebCodecs API + Mediabunny
|
|
3
|
+
*
|
|
4
|
+
* Encodes video directly in the browser using WebCodecs API.
|
|
5
|
+
* No FFmpeg required - all encoding happens via hardware-accelerated WebCodecs.
|
|
6
6
|
*/
|
|
7
7
|
|
|
8
|
-
import {
|
|
8
|
+
import { chromium } from 'playwright'
|
|
9
|
+
import { createVideoServer } from '../bundler/vite.js'
|
|
10
|
+
import { readFile, writeFile } from 'fs/promises'
|
|
11
|
+
import { getTimelineDuration } from './render.js'
|
|
9
12
|
import { join } from 'path'
|
|
13
|
+
import { getOptimalChromiumConfig, checkGPUAcceleration } from './gpu.js'
|
|
14
|
+
|
|
15
|
+
// GPU configuration is now handled by the gpu.js module
|
|
16
|
+
// which auto-detects the best backend for the current system
|
|
10
17
|
|
|
11
18
|
/**
|
|
12
|
-
*
|
|
13
|
-
* @param {
|
|
14
|
-
* @param {string} options.framesDir - Directory containing frame-XXXXX.png files
|
|
15
|
-
* @param {string} options.output - Output video file path
|
|
16
|
-
* @param {number} [options.fps=30] - Frames per second
|
|
17
|
-
* @returns {Promise<string>} - Path to the output video
|
|
19
|
+
* Inject Mediabunny library into the page
|
|
20
|
+
* @param {import('playwright').Page} page
|
|
18
21
|
*/
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
const ffmpeg = spawn('ffmpeg', args, {
|
|
33
|
-
stdio: ['ignore', 'pipe', 'pipe']
|
|
34
|
-
})
|
|
22
|
+
async function injectMediabunny(page) {
|
|
23
|
+
const libPath = join(
|
|
24
|
+
process.cwd(),
|
|
25
|
+
'node_modules',
|
|
26
|
+
'mediabunny',
|
|
27
|
+
'dist',
|
|
28
|
+
'bundles',
|
|
29
|
+
'mediabunny.cjs',
|
|
30
|
+
)
|
|
31
|
+
const libCode = await readFile(libPath, 'utf-8')
|
|
32
|
+
await page.addScriptTag({ content: libCode })
|
|
33
|
+
}
|
|
35
34
|
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
35
|
+
/**
|
|
36
|
+
* Initialize Mediabunny encoder in the browser context
|
|
37
|
+
* @param {import('playwright').Page} page
|
|
38
|
+
* @param {Object} config
|
|
39
|
+
*/
|
|
40
|
+
async function initializeEncoder(page, { width, height, fps }) {
|
|
41
|
+
await page.evaluate(
|
|
42
|
+
async ({ width, height, fps }) => {
|
|
43
|
+
const {
|
|
44
|
+
Output,
|
|
45
|
+
Mp4OutputFormat,
|
|
46
|
+
BufferTarget,
|
|
47
|
+
QUALITY_HIGH,
|
|
48
|
+
CanvasSource,
|
|
49
|
+
} = window.Mediabunny
|
|
40
50
|
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
}
|
|
47
|
-
})
|
|
51
|
+
// Create output with MP4 format
|
|
52
|
+
window.__VUESEQ_OUTPUT__ = new Output({
|
|
53
|
+
format: new Mp4OutputFormat(),
|
|
54
|
+
target: new BufferTarget(),
|
|
55
|
+
})
|
|
48
56
|
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
57
|
+
// Store canvas for reuse
|
|
58
|
+
const canvas = document.createElement('canvas')
|
|
59
|
+
canvas.width = width
|
|
60
|
+
canvas.height = height
|
|
61
|
+
window.__VUESEQ_CANVAS__ = canvas
|
|
62
|
+
|
|
63
|
+
// Create CanvasSource with encoding config
|
|
64
|
+
window.__VUESEQ_VIDEO_SOURCE__ = new CanvasSource(canvas, {
|
|
65
|
+
codec: 'avc',
|
|
66
|
+
bitrate: QUALITY_HIGH,
|
|
67
|
+
})
|
|
68
|
+
|
|
69
|
+
window.__VUESEQ_OUTPUT__.addVideoTrack(window.__VUESEQ_VIDEO_SOURCE__)
|
|
70
|
+
window.__VUESEQ_FPS__ = fps
|
|
71
|
+
|
|
72
|
+
await window.__VUESEQ_OUTPUT__.start()
|
|
73
|
+
},
|
|
74
|
+
{ width, height, fps },
|
|
75
|
+
)
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
/**
|
|
79
|
+
* Encode a single frame using Mediabunny
|
|
80
|
+
* @param {import('playwright').Page} page
|
|
81
|
+
* @param {Buffer} imageBuffer - PNG image buffer from Playwright
|
|
82
|
+
* @param {number} frameIndex - Current frame index
|
|
83
|
+
*/
|
|
84
|
+
async function encodeFrame(page, imageBuffer, frameIndex) {
|
|
85
|
+
// Convert buffer to base64 for transmission to browser
|
|
86
|
+
const base64Data = imageBuffer.toString('base64')
|
|
87
|
+
const dataUrl = `data:image/png;base64,${base64Data}`
|
|
88
|
+
|
|
89
|
+
await page.evaluate(
|
|
90
|
+
async ({ dataUrl, frameIndex, fps }) => {
|
|
91
|
+
// Load image
|
|
92
|
+
const img = new Image()
|
|
93
|
+
await new Promise((resolve, reject) => {
|
|
94
|
+
img.onload = resolve
|
|
95
|
+
img.onerror = reject
|
|
96
|
+
img.src = dataUrl
|
|
97
|
+
})
|
|
98
|
+
|
|
99
|
+
// Draw to canvas
|
|
100
|
+
const canvas = window.__VUESEQ_CANVAS__
|
|
101
|
+
const ctx = canvas.getContext('2d')
|
|
102
|
+
ctx.drawImage(img, 0, 0)
|
|
103
|
+
|
|
104
|
+
// Add frame to video source
|
|
105
|
+
// Timestamp in seconds
|
|
106
|
+
const timestamp = frameIndex / fps
|
|
107
|
+
const frameDuration = 1 / fps
|
|
108
|
+
await window.__VUESEQ_VIDEO_SOURCE__.add(timestamp, frameDuration)
|
|
109
|
+
},
|
|
110
|
+
{
|
|
111
|
+
dataUrl,
|
|
112
|
+
frameIndex,
|
|
113
|
+
fps: await page.evaluate(() => window.__VUESEQ_FPS__),
|
|
114
|
+
},
|
|
115
|
+
)
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
/**
|
|
119
|
+
* Finalize encoding and retrieve the MP4 buffer
|
|
120
|
+
* @param {import('playwright').Page} page
|
|
121
|
+
* @returns {Promise<number[]>} - Array of bytes
|
|
122
|
+
*/
|
|
123
|
+
async function finalizeEncoding(page) {
|
|
124
|
+
console.log(' Transferring video data (Base64)...')
|
|
125
|
+
return await page.evaluate(async () => {
|
|
126
|
+
window.__VUESEQ_VIDEO_SOURCE__.close()
|
|
127
|
+
await window.__VUESEQ_OUTPUT__.finalize()
|
|
128
|
+
const buffer = window.__VUESEQ_OUTPUT__.target.buffer
|
|
129
|
+
|
|
130
|
+
// Cleanup
|
|
131
|
+
delete window.__VUESEQ_OUTPUT__
|
|
132
|
+
delete window.__VUESEQ_CANVAS__
|
|
133
|
+
delete window.__VUESEQ_VIDEO_SOURCE__
|
|
134
|
+
delete window.__VUESEQ_FPS__
|
|
135
|
+
|
|
136
|
+
// Fast conversion via Blob
|
|
137
|
+
const blob = new Blob([buffer], { type: 'video/mp4' })
|
|
138
|
+
return new Promise((resolve) => {
|
|
139
|
+
const reader = new FileReader()
|
|
140
|
+
reader.onloadend = () => resolve(reader.result)
|
|
141
|
+
reader.readAsDataURL(blob)
|
|
61
142
|
})
|
|
143
|
+
})
|
|
62
144
|
}
|
|
63
145
|
|
|
64
146
|
/**
|
|
65
|
-
* Render a Vue component to MP4 video
|
|
147
|
+
* Render a Vue component to MP4 video using WebCodecs API via Mediabunny
|
|
66
148
|
* @param {Object} options
|
|
67
149
|
* @param {string} options.input - Absolute path to the Video.vue component
|
|
68
150
|
* @param {string} [options.output='./output.mp4'] - Output video file path
|
|
69
151
|
* @param {number} [options.fps=30] - Frames per second
|
|
70
|
-
* @param {number} options.duration - Duration in seconds
|
|
152
|
+
* @param {number} options.duration - Duration in seconds (auto-detected if not provided)
|
|
71
153
|
* @param {number} [options.width=1920] - Video width in pixels
|
|
72
154
|
* @param {number} [options.height=1080] - Video height in pixels
|
|
73
155
|
* @param {function} [options.onProgress] - Progress callback
|
|
74
156
|
* @returns {Promise<string>} - Path to the output video
|
|
75
157
|
*/
|
|
76
158
|
export async function renderToMp4(options) {
|
|
77
|
-
|
|
78
|
-
|
|
159
|
+
const {
|
|
160
|
+
input,
|
|
161
|
+
output = './output.mp4',
|
|
162
|
+
fps = 30,
|
|
163
|
+
duration: providedDuration,
|
|
164
|
+
width = 1920,
|
|
165
|
+
height = 1080,
|
|
166
|
+
onProgress,
|
|
167
|
+
} = options
|
|
168
|
+
|
|
169
|
+
// Auto-detect duration if not provided
|
|
170
|
+
let duration = providedDuration
|
|
171
|
+
if (!duration || duration <= 0) {
|
|
172
|
+
duration = await getTimelineDuration({ input, width, height })
|
|
173
|
+
if (!duration || duration <= 0) {
|
|
174
|
+
throw new Error(
|
|
175
|
+
'Could not auto-detect duration. Specify duration manually.',
|
|
176
|
+
)
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
const totalFrames = Math.ceil(duration * fps)
|
|
181
|
+
|
|
182
|
+
// Start Vite server
|
|
183
|
+
const { url, cleanup: cleanupServer } = await createVideoServer({
|
|
184
|
+
input,
|
|
185
|
+
width,
|
|
186
|
+
height,
|
|
187
|
+
})
|
|
79
188
|
|
|
80
|
-
|
|
189
|
+
// Launch headless browser with optimal GPU config
|
|
190
|
+
// The gpu.js module auto-detects the best backend (Vulkan, Metal, D3D11, etc.)
|
|
191
|
+
// and uses the new headless mode for GPU passthrough
|
|
192
|
+
const gpuConfig = await getOptimalChromiumConfig()
|
|
193
|
+
const launchOptions = {
|
|
194
|
+
headless: gpuConfig.headless,
|
|
195
|
+
args: gpuConfig.args,
|
|
196
|
+
}
|
|
197
|
+
if (gpuConfig.channel) {
|
|
198
|
+
launchOptions.channel = gpuConfig.channel
|
|
199
|
+
}
|
|
200
|
+
const browser = await chromium.launch(launchOptions)
|
|
81
201
|
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
202
|
+
const context = await browser.newContext({
|
|
203
|
+
viewport: { width, height },
|
|
204
|
+
deviceScaleFactor: 1,
|
|
205
|
+
})
|
|
206
|
+
|
|
207
|
+
const page = await context.newPage()
|
|
208
|
+
|
|
209
|
+
try {
|
|
210
|
+
// Load the page
|
|
211
|
+
await page.goto(url, { waitUntil: 'networkidle' })
|
|
212
|
+
|
|
213
|
+
// Wait for VueSeq bridge to be ready
|
|
214
|
+
await page.waitForFunction(() => window.__VUESEQ_READY__ === true, {
|
|
215
|
+
timeout: 30000,
|
|
216
|
+
})
|
|
217
|
+
|
|
218
|
+
// Give Vue a moment to mount and GSAP to set up timelines
|
|
219
|
+
await page.waitForTimeout(100)
|
|
220
|
+
|
|
221
|
+
// Inject Mediabunny library
|
|
222
|
+
await injectMediabunny(page)
|
|
223
|
+
|
|
224
|
+
// Initialize encoder
|
|
225
|
+
await initializeEncoder(page, { width, height, fps })
|
|
226
|
+
|
|
227
|
+
// Render and encode each frame
|
|
228
|
+
for (let frame = 0; frame < totalFrames; frame++) {
|
|
229
|
+
const timeInSeconds = frame / fps
|
|
230
|
+
|
|
231
|
+
// Seek GSAP to exact time
|
|
232
|
+
await page.evaluate(async (t) => {
|
|
233
|
+
window.__VUESEQ_SEEK__(t)
|
|
234
|
+
await new Promise((resolve) => requestAnimationFrame(resolve))
|
|
235
|
+
}, timeInSeconds)
|
|
236
|
+
|
|
237
|
+
// Take screenshot
|
|
238
|
+
const screenshotBuffer = await page.screenshot({ type: 'png' })
|
|
239
|
+
|
|
240
|
+
// Encode the frame
|
|
241
|
+
await encodeFrame(page, screenshotBuffer, frame)
|
|
242
|
+
|
|
243
|
+
// Progress callback
|
|
244
|
+
if (onProgress) {
|
|
245
|
+
onProgress({
|
|
246
|
+
frame,
|
|
247
|
+
total: totalFrames,
|
|
248
|
+
timeInSeconds,
|
|
249
|
+
percent: Math.round(((frame + 1) / totalFrames) * 100),
|
|
87
250
|
})
|
|
88
|
-
|
|
89
|
-
} finally {
|
|
90
|
-
await cleanup()
|
|
251
|
+
}
|
|
91
252
|
}
|
|
253
|
+
|
|
254
|
+
// Finalize and get MP4 data (Base64)
|
|
255
|
+
const base64Data = await finalizeEncoding(page)
|
|
256
|
+
|
|
257
|
+
// Write the MP4 file
|
|
258
|
+
const buffer = Buffer.from(base64Data.split(',')[1], 'base64')
|
|
259
|
+
await writeFile(output, buffer)
|
|
260
|
+
|
|
261
|
+
return output
|
|
262
|
+
} finally {
|
|
263
|
+
await browser.close()
|
|
264
|
+
await cleanupServer()
|
|
265
|
+
}
|
|
92
266
|
}
|
|
267
|
+
|
|
268
|
+
/**
|
|
269
|
+
* Check GPU hardware acceleration status
|
|
270
|
+
* Re-exported from gpu.js module
|
|
271
|
+
*/
|
|
272
|
+
export { checkGPUAcceleration } from './gpu.js'
|
|
273
|
+
|
|
274
|
+
/**
|
|
275
|
+
* Check if WebCodecs API is supported in the current environment
|
|
276
|
+
* @returns {Promise<boolean>}
|
|
277
|
+
*/
|
|
278
|
+
export async function isWebCodecsSupported() {
|
|
279
|
+
try {
|
|
280
|
+
const gpuConfig = await getOptimalChromiumConfig()
|
|
281
|
+
const launchOptions = {
|
|
282
|
+
headless: gpuConfig.headless,
|
|
283
|
+
args: gpuConfig.args,
|
|
284
|
+
}
|
|
285
|
+
if (gpuConfig.channel) {
|
|
286
|
+
launchOptions.channel = gpuConfig.channel
|
|
287
|
+
}
|
|
288
|
+
const browser = await chromium.launch(launchOptions)
|
|
289
|
+
const context = await browser.newContext()
|
|
290
|
+
const page = await context.newPage()
|
|
291
|
+
|
|
292
|
+
const supported = await page.evaluate(() => {
|
|
293
|
+
return (
|
|
294
|
+
typeof VideoEncoder !== 'undefined' &&
|
|
295
|
+
typeof VideoFrame !== 'undefined' &&
|
|
296
|
+
typeof VideoEncoder.isConfigSupported === 'function'
|
|
297
|
+
)
|
|
298
|
+
})
|
|
299
|
+
|
|
300
|
+
await browser.close()
|
|
301
|
+
return supported
|
|
302
|
+
} catch {
|
|
303
|
+
return false
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
// Optimized encoder using in-browser capture (no PNG overhead)
|
|
308
|
+
export { renderToMp4Optimized, benchmarkRenderMethods } from './encode-optimized.js'
|
|
309
|
+
|
|
310
|
+
// Legacy FFmpeg-based encoding (kept for compatibility if needed)
|
|
311
|
+
// TODO: Remove in future version
|
|
312
|
+
export { encodeVideo } from './ffmpeg-encode.js'
|
|
313
|
+
|
|
314
|
+
|