@helios-project/player 0.48.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +148 -0
- package/dist/bridge.d.ts +2 -0
- package/dist/bridge.js +169 -0
- package/dist/controllers.d.ts +91 -0
- package/dist/controllers.js +224 -0
- package/dist/features/audio-utils.d.ts +10 -0
- package/dist/features/audio-utils.js +66 -0
- package/dist/features/dom-capture.d.ts +1 -0
- package/dist/features/dom-capture.js +253 -0
- package/dist/features/exporter.d.ts +18 -0
- package/dist/features/exporter.js +228 -0
- package/dist/features/srt-parser.d.ts +7 -0
- package/dist/features/srt-parser.js +75 -0
- package/dist/features/text-tracks.d.ts +40 -0
- package/dist/features/text-tracks.js +99 -0
- package/dist/helios-player.bundle.mjs +7775 -0
- package/dist/helios-player.global.js +633 -0
- package/dist/index.d.ts +166 -0
- package/dist/index.js +1679 -0
- package/package.json +57 -0
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
export async function getAudioAssets(doc) {
|
|
2
|
+
const audioTags = Array.from(doc.querySelectorAll('audio'));
|
|
3
|
+
return Promise.all(audioTags.map(async (tag) => {
|
|
4
|
+
if (!tag.src)
|
|
5
|
+
return { buffer: new ArrayBuffer(0), mimeType: null };
|
|
6
|
+
try {
|
|
7
|
+
const res = await fetch(tag.src);
|
|
8
|
+
const volumeAttr = tag.getAttribute('volume');
|
|
9
|
+
return {
|
|
10
|
+
buffer: await res.arrayBuffer(),
|
|
11
|
+
mimeType: res.headers.get('content-type'),
|
|
12
|
+
// Support non-standard 'volume' attribute for declarative usage, fallback to DOM property
|
|
13
|
+
volume: volumeAttr !== null ? parseFloat(volumeAttr) : tag.volume,
|
|
14
|
+
muted: tag.muted,
|
|
15
|
+
loop: tag.loop,
|
|
16
|
+
startTime: parseFloat(tag.getAttribute('data-start-time') || '0') || 0
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
catch (e) {
|
|
20
|
+
console.warn("Failed to fetch audio asset:", tag.src, e);
|
|
21
|
+
return { buffer: new ArrayBuffer(0), mimeType: null };
|
|
22
|
+
}
|
|
23
|
+
}));
|
|
24
|
+
}
|
|
25
|
+
export async function mixAudio(assets, duration, sampleRate, rangeStart = 0) {
|
|
26
|
+
if (typeof OfflineAudioContext === 'undefined') {
|
|
27
|
+
throw new Error("OfflineAudioContext not supported in this environment");
|
|
28
|
+
}
|
|
29
|
+
if (duration <= 0) {
|
|
30
|
+
// Return silent 1-sample buffer to avoid errors
|
|
31
|
+
const ctx = new OfflineAudioContext(2, 1, sampleRate);
|
|
32
|
+
return ctx.startRendering();
|
|
33
|
+
}
|
|
34
|
+
const length = Math.ceil(duration * sampleRate);
|
|
35
|
+
const ctx = new OfflineAudioContext(2, length, sampleRate);
|
|
36
|
+
for (const asset of assets) {
|
|
37
|
+
if (asset.buffer.byteLength === 0)
|
|
38
|
+
continue;
|
|
39
|
+
try {
|
|
40
|
+
const audioBuffer = await ctx.decodeAudioData(asset.buffer.slice(0));
|
|
41
|
+
const source = ctx.createBufferSource();
|
|
42
|
+
source.buffer = audioBuffer;
|
|
43
|
+
source.loop = !!asset.loop;
|
|
44
|
+
const gainNode = ctx.createGain();
|
|
45
|
+
const volume = asset.muted ? 0 : (typeof asset.volume === 'number' ? asset.volume : 1);
|
|
46
|
+
gainNode.gain.value = volume;
|
|
47
|
+
source.connect(gainNode);
|
|
48
|
+
gainNode.connect(ctx.destination);
|
|
49
|
+
const assetStart = asset.startTime || 0;
|
|
50
|
+
// Calculate when this clip should play relative to the export window
|
|
51
|
+
let playbackStart = assetStart - rangeStart;
|
|
52
|
+
let startOffset = 0;
|
|
53
|
+
if (playbackStart < 0) {
|
|
54
|
+
// If the clip starts before the window, we need to skip the beginning
|
|
55
|
+
startOffset = -playbackStart;
|
|
56
|
+
playbackStart = 0;
|
|
57
|
+
}
|
|
58
|
+
// source.start(when, offset)
|
|
59
|
+
source.start(playbackStart, startOffset);
|
|
60
|
+
}
|
|
61
|
+
catch (e) {
|
|
62
|
+
console.warn("Failed to decode audio asset:", e);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
return ctx.startRendering();
|
|
66
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function captureDomToBitmap(element: HTMLElement): Promise<ImageBitmap>;
|
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
export async function captureDomToBitmap(element) {
|
|
2
|
+
const doc = element.ownerDocument || document;
|
|
3
|
+
// 1. Clone & Inline Assets
|
|
4
|
+
let clone = element.cloneNode(true);
|
|
5
|
+
await inlineImages(clone);
|
|
6
|
+
clone = inlineCanvases(element, clone);
|
|
7
|
+
clone = inlineVideos(element, clone);
|
|
8
|
+
// 2. Serialize DOM
|
|
9
|
+
const serializer = new XMLSerializer();
|
|
10
|
+
const html = serializer.serializeToString(clone);
|
|
11
|
+
// 3. Collect styles
|
|
12
|
+
// We collect all style tags to ensure CSS-in-JS and other styles are preserved.
|
|
13
|
+
const styleElements = Array.from(doc.querySelectorAll('style'));
|
|
14
|
+
const inlineStylesPromises = styleElements.map(async (style) => {
|
|
15
|
+
const css = style.textContent || '';
|
|
16
|
+
const processed = await processCss(css, doc.baseURI);
|
|
17
|
+
// Create a new style tag to preserve the structure,
|
|
18
|
+
// but here we just wrap in <style> as we are concatenating strings anyway.
|
|
19
|
+
// If we want to preserve attributes (like id="my-style"), we should look at attributes.
|
|
20
|
+
// The previous implementation used style.outerHTML which preserved attributes.
|
|
21
|
+
// Let's try to preserve attributes by cloning.
|
|
22
|
+
const styleClone = style.cloneNode(true);
|
|
23
|
+
styleClone.textContent = processed;
|
|
24
|
+
return styleClone.outerHTML;
|
|
25
|
+
});
|
|
26
|
+
const inlineStyles = (await Promise.all(inlineStylesPromises)).join('\n');
|
|
27
|
+
// Fetch and inline external stylesheets
|
|
28
|
+
const externalStyles = await getExternalStyles(doc);
|
|
29
|
+
const styles = externalStyles + '\n' + inlineStyles;
|
|
30
|
+
// 4. Determine dimensions
|
|
31
|
+
// Use scroll dimensions to capture full content, fallback to offset or defaults.
|
|
32
|
+
const width = element.scrollWidth || element.offsetWidth || 1920;
|
|
33
|
+
const height = element.scrollHeight || element.offsetHeight || 1080;
|
|
34
|
+
// 5. Construct SVG
|
|
35
|
+
// We wrap the content in a div to ensure block formatting context.
|
|
36
|
+
const svg = `
|
|
37
|
+
<svg xmlns="http://www.w3.org/2000/svg" width="${width}" height="${height}">
|
|
38
|
+
<foreignObject width="100%" height="100%">
|
|
39
|
+
<div xmlns="http://www.w3.org/1999/xhtml" style="width: 100%; height: 100%;">
|
|
40
|
+
${styles}
|
|
41
|
+
${html}
|
|
42
|
+
</div>
|
|
43
|
+
</foreignObject>
|
|
44
|
+
</svg>
|
|
45
|
+
`;
|
|
46
|
+
// 6. Create Blob and URL
|
|
47
|
+
const blob = new Blob([svg], { type: 'image/svg+xml;charset=utf-8' });
|
|
48
|
+
const url = URL.createObjectURL(blob);
|
|
49
|
+
// 7. Load Image
|
|
50
|
+
const img = new Image();
|
|
51
|
+
// Return a promise that resolves when the image loads
|
|
52
|
+
await new Promise((resolve, reject) => {
|
|
53
|
+
img.onload = () => resolve();
|
|
54
|
+
img.onerror = (e) => reject(new Error('Failed to load SVG image for DOM capture'));
|
|
55
|
+
img.src = url;
|
|
56
|
+
});
|
|
57
|
+
// 8. Create ImageBitmap
|
|
58
|
+
const bitmap = await createImageBitmap(img);
|
|
59
|
+
// 9. Cleanup
|
|
60
|
+
URL.revokeObjectURL(url);
|
|
61
|
+
return bitmap;
|
|
62
|
+
}
|
|
63
|
+
async function getExternalStyles(doc) {
|
|
64
|
+
const links = Array.from(doc.querySelectorAll('link[rel="stylesheet"]'));
|
|
65
|
+
const promises = links.map(async (link) => {
|
|
66
|
+
try {
|
|
67
|
+
// Skip if no href
|
|
68
|
+
if (!link.href)
|
|
69
|
+
return '';
|
|
70
|
+
const response = await fetch(link.href);
|
|
71
|
+
if (!response.ok)
|
|
72
|
+
throw new Error(`HTTP ${response.status} ${response.statusText}`);
|
|
73
|
+
const css = await response.text();
|
|
74
|
+
const processed = await processCss(css, link.href);
|
|
75
|
+
return `<style>/* ${link.href} */\n${processed}</style>`;
|
|
76
|
+
}
|
|
77
|
+
catch (e) {
|
|
78
|
+
console.warn('Helios: Failed to inline stylesheet:', link.href, e);
|
|
79
|
+
return '';
|
|
80
|
+
}
|
|
81
|
+
});
|
|
82
|
+
return (await Promise.all(promises)).join('\n');
|
|
83
|
+
}
|
|
84
|
+
async function processCss(css, baseUrl) {
|
|
85
|
+
const urlRegex = /url\((?:['"]?)(.*?)(?:['"]?)\)/g;
|
|
86
|
+
const matches = Array.from(css.matchAll(urlRegex));
|
|
87
|
+
const replacementsPromises = matches.map(async (match) => {
|
|
88
|
+
const originalMatch = match[0];
|
|
89
|
+
const url = match[1];
|
|
90
|
+
if (url.startsWith('data:'))
|
|
91
|
+
return null;
|
|
92
|
+
try {
|
|
93
|
+
const absoluteUrl = new URL(url, baseUrl).href;
|
|
94
|
+
const dataUri = await fetchAsDataUri(absoluteUrl);
|
|
95
|
+
return {
|
|
96
|
+
original: originalMatch,
|
|
97
|
+
replacement: `url("${dataUri}")`,
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
catch (e) {
|
|
101
|
+
console.warn(`Helios: Failed to inline CSS asset: ${url}`, e);
|
|
102
|
+
return null;
|
|
103
|
+
}
|
|
104
|
+
});
|
|
105
|
+
const replacements = (await Promise.all(replacementsPromises)).filter((r) => r !== null);
|
|
106
|
+
let processedCss = css;
|
|
107
|
+
for (const { original, replacement } of replacements) {
|
|
108
|
+
processedCss = processedCss.split(original).join(replacement);
|
|
109
|
+
}
|
|
110
|
+
return processedCss;
|
|
111
|
+
}
|
|
112
|
+
async function inlineImages(root) {
|
|
113
|
+
const promises = [];
|
|
114
|
+
// A. Handle <img> tags
|
|
115
|
+
const images = root.querySelectorAll('img');
|
|
116
|
+
images.forEach((img) => {
|
|
117
|
+
if (img.src && !img.src.startsWith('data:')) {
|
|
118
|
+
promises.push(fetchAsDataUri(img.src)
|
|
119
|
+
.then((dataUri) => {
|
|
120
|
+
img.src = dataUri;
|
|
121
|
+
})
|
|
122
|
+
.catch((e) => console.warn('Helios: Failed to inline image:', img.src, e)));
|
|
123
|
+
}
|
|
124
|
+
});
|
|
125
|
+
// B. Handle background-images (inline styles only)
|
|
126
|
+
const elementsWithStyle = root.querySelectorAll('[style*="background-image"]');
|
|
127
|
+
elementsWithStyle.forEach((el) => {
|
|
128
|
+
const element = el;
|
|
129
|
+
const bg = element.style.backgroundImage;
|
|
130
|
+
if (bg && bg.includes('url(')) {
|
|
131
|
+
const match = bg.match(/url\(['"]?(.*?)['"]?\)/);
|
|
132
|
+
if (match && match[1] && !match[1].startsWith('data:')) {
|
|
133
|
+
promises.push(fetchAsDataUri(match[1])
|
|
134
|
+
.then((dataUri) => {
|
|
135
|
+
// Replace the specific URL instance to preserve other layers (gradients, etc.)
|
|
136
|
+
element.style.backgroundImage = element.style.backgroundImage.replace(match[0], `url("${dataUri}")`);
|
|
137
|
+
})
|
|
138
|
+
.catch((e) => console.warn('Helios: Failed to inline background:', match[1], e)));
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
});
|
|
142
|
+
await Promise.all(promises);
|
|
143
|
+
}
|
|
144
|
+
async function fetchAsDataUri(url) {
|
|
145
|
+
const response = await fetch(url);
|
|
146
|
+
if (!response.ok)
|
|
147
|
+
throw new Error(`HTTP ${response.status} ${response.statusText}`);
|
|
148
|
+
const blob = await response.blob();
|
|
149
|
+
return new Promise((resolve, reject) => {
|
|
150
|
+
const reader = new FileReader();
|
|
151
|
+
reader.onloadend = () => resolve(reader.result);
|
|
152
|
+
reader.onerror = reject;
|
|
153
|
+
reader.readAsDataURL(blob);
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
function inlineCanvases(original, clone) {
|
|
157
|
+
// Handle root element being a canvas
|
|
158
|
+
if (original instanceof HTMLCanvasElement && clone instanceof HTMLCanvasElement) {
|
|
159
|
+
try {
|
|
160
|
+
const dataUri = original.toDataURL();
|
|
161
|
+
const img = document.createElement('img');
|
|
162
|
+
img.src = dataUri;
|
|
163
|
+
img.style.cssText = original.style.cssText;
|
|
164
|
+
img.className = original.className;
|
|
165
|
+
if (original.id)
|
|
166
|
+
img.id = original.id;
|
|
167
|
+
if (original.hasAttribute('width'))
|
|
168
|
+
img.setAttribute('width', original.getAttribute('width'));
|
|
169
|
+
if (original.hasAttribute('height'))
|
|
170
|
+
img.setAttribute('height', original.getAttribute('height'));
|
|
171
|
+
return img;
|
|
172
|
+
}
|
|
173
|
+
catch (e) {
|
|
174
|
+
console.warn('Helios: Failed to inline root canvas:', e);
|
|
175
|
+
return clone;
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
const originalCanvases = Array.from(original.querySelectorAll('canvas'));
|
|
179
|
+
const clonedCanvases = Array.from(clone.querySelectorAll('canvas'));
|
|
180
|
+
for (let i = 0; i < Math.min(originalCanvases.length, clonedCanvases.length); i++) {
|
|
181
|
+
const source = originalCanvases[i];
|
|
182
|
+
const target = clonedCanvases[i];
|
|
183
|
+
try {
|
|
184
|
+
const dataUri = source.toDataURL();
|
|
185
|
+
const img = document.createElement('img');
|
|
186
|
+
img.src = dataUri;
|
|
187
|
+
img.style.cssText = source.style.cssText;
|
|
188
|
+
img.className = source.className;
|
|
189
|
+
if (source.id)
|
|
190
|
+
img.id = source.id;
|
|
191
|
+
if (source.hasAttribute('width'))
|
|
192
|
+
img.setAttribute('width', source.getAttribute('width'));
|
|
193
|
+
if (source.hasAttribute('height'))
|
|
194
|
+
img.setAttribute('height', source.getAttribute('height'));
|
|
195
|
+
target.parentNode?.replaceChild(img, target);
|
|
196
|
+
}
|
|
197
|
+
catch (e) {
|
|
198
|
+
console.warn('Helios: Failed to inline nested canvas:', e);
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
return clone;
|
|
202
|
+
}
|
|
203
|
+
function inlineVideos(original, clone) {
|
|
204
|
+
// Handle root element being a video
|
|
205
|
+
if (original instanceof HTMLVideoElement && clone instanceof HTMLVideoElement) {
|
|
206
|
+
if (original.readyState >= 2) {
|
|
207
|
+
const img = videoToImage(original);
|
|
208
|
+
if (img)
|
|
209
|
+
return img;
|
|
210
|
+
}
|
|
211
|
+
return clone;
|
|
212
|
+
}
|
|
213
|
+
const originals = Array.from(original.querySelectorAll('video'));
|
|
214
|
+
const clones = Array.from(clone.querySelectorAll('video'));
|
|
215
|
+
for (let i = 0; i < Math.min(originals.length, clones.length); i++) {
|
|
216
|
+
const video = originals[i];
|
|
217
|
+
const target = clones[i];
|
|
218
|
+
if (video.readyState < 2)
|
|
219
|
+
continue; // Skip if no data
|
|
220
|
+
const img = videoToImage(video);
|
|
221
|
+
if (img) {
|
|
222
|
+
target.parentNode?.replaceChild(img, target);
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
return clone;
|
|
226
|
+
}
|
|
227
|
+
function videoToImage(video) {
|
|
228
|
+
try {
|
|
229
|
+
const canvas = document.createElement('canvas');
|
|
230
|
+
canvas.width = video.videoWidth || 300;
|
|
231
|
+
canvas.height = video.videoHeight || 150;
|
|
232
|
+
const ctx = canvas.getContext('2d');
|
|
233
|
+
if (ctx) {
|
|
234
|
+
ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
|
|
235
|
+
const dataUri = canvas.toDataURL();
|
|
236
|
+
const img = document.createElement('img');
|
|
237
|
+
img.src = dataUri;
|
|
238
|
+
img.style.cssText = video.style.cssText;
|
|
239
|
+
img.className = video.className;
|
|
240
|
+
if (video.id)
|
|
241
|
+
img.id = video.id;
|
|
242
|
+
if (video.hasAttribute('width'))
|
|
243
|
+
img.setAttribute('width', video.getAttribute('width'));
|
|
244
|
+
if (video.hasAttribute('height'))
|
|
245
|
+
img.setAttribute('height', video.getAttribute('height'));
|
|
246
|
+
return img;
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
catch (e) {
|
|
250
|
+
console.warn('Helios: Failed to inline video:', e);
|
|
251
|
+
}
|
|
252
|
+
return null;
|
|
253
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { HeliosController } from "../controllers";
|
|
2
|
+
import { SubtitleCue } from "./srt-parser";
|
|
3
|
+
export declare class ClientSideExporter {
|
|
4
|
+
private controller;
|
|
5
|
+
private iframe;
|
|
6
|
+
constructor(controller: HeliosController, iframe: HTMLIFrameElement);
|
|
7
|
+
export(options: {
|
|
8
|
+
onProgress: (progress: number) => void;
|
|
9
|
+
signal: AbortSignal;
|
|
10
|
+
mode?: 'auto' | 'canvas' | 'dom';
|
|
11
|
+
canvasSelector?: string;
|
|
12
|
+
format?: 'mp4' | 'webm';
|
|
13
|
+
includeCaptions?: boolean;
|
|
14
|
+
}): Promise<void>;
|
|
15
|
+
private drawCaptions;
|
|
16
|
+
private download;
|
|
17
|
+
saveCaptionsAsSRT(cues: SubtitleCue[], filename: string): void;
|
|
18
|
+
}
|
|
@@ -0,0 +1,228 @@
|
|
|
1
|
+
import { stringifySRT } from "./srt-parser";
|
|
2
|
+
import { Output, BufferTarget, Mp4OutputFormat, WebMOutputFormat, VideoSampleSource, AudioSampleSource, VideoSample, AudioSample } from "mediabunny";
|
|
3
|
+
import { mixAudio } from "./audio-utils";
|
|
4
|
+
export class ClientSideExporter {
|
|
5
|
+
controller;
|
|
6
|
+
iframe;
|
|
7
|
+
constructor(controller, iframe // Kept for compatibility
|
|
8
|
+
) {
|
|
9
|
+
this.controller = controller;
|
|
10
|
+
this.iframe = iframe;
|
|
11
|
+
}
|
|
12
|
+
async export(options) {
|
|
13
|
+
const { onProgress, signal, mode = 'auto', canvasSelector = 'canvas', format = 'mp4', includeCaptions = true } = options;
|
|
14
|
+
console.log(`Client-side rendering started! Format: ${format}`);
|
|
15
|
+
this.controller.pause();
|
|
16
|
+
try {
|
|
17
|
+
const state = this.controller.getState();
|
|
18
|
+
let startFrame = 0;
|
|
19
|
+
let totalFrames = state.duration * state.fps;
|
|
20
|
+
if (state.playbackRange && state.playbackRange.length === 2) {
|
|
21
|
+
startFrame = state.playbackRange[0];
|
|
22
|
+
const endFrame = state.playbackRange[1];
|
|
23
|
+
totalFrames = endFrame - startFrame;
|
|
24
|
+
}
|
|
25
|
+
// 1. Determine effective mode
|
|
26
|
+
let effectiveMode = mode;
|
|
27
|
+
if (effectiveMode === 'auto') {
|
|
28
|
+
const result = await this.controller.captureFrame(startFrame, {
|
|
29
|
+
selector: canvasSelector,
|
|
30
|
+
mode: 'canvas'
|
|
31
|
+
});
|
|
32
|
+
if (result && result.frame) {
|
|
33
|
+
effectiveMode = 'canvas';
|
|
34
|
+
result.frame.close();
|
|
35
|
+
}
|
|
36
|
+
else {
|
|
37
|
+
effectiveMode = 'dom';
|
|
38
|
+
console.log("Canvas not found for auto export, falling back to DOM mode.");
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
// 2. Capture first frame to determine dimensions
|
|
42
|
+
const firstResult = await this.controller.captureFrame(startFrame, {
|
|
43
|
+
selector: canvasSelector,
|
|
44
|
+
mode: effectiveMode
|
|
45
|
+
});
|
|
46
|
+
if (!firstResult || !firstResult.frame) {
|
|
47
|
+
throw new Error(`Failed to capture first frame in mode: ${effectiveMode}`);
|
|
48
|
+
}
|
|
49
|
+
const { frame: firstFrame, captions: firstCaptions } = firstResult;
|
|
50
|
+
const width = firstFrame.displayWidth;
|
|
51
|
+
const height = firstFrame.displayHeight;
|
|
52
|
+
// 3. Setup Mediabunny Output
|
|
53
|
+
const target = new BufferTarget();
|
|
54
|
+
const outputFormat = format === 'webm' ? new WebMOutputFormat() : new Mp4OutputFormat();
|
|
55
|
+
const output = new Output({
|
|
56
|
+
format: outputFormat,
|
|
57
|
+
target
|
|
58
|
+
});
|
|
59
|
+
// 4. Setup Video Track
|
|
60
|
+
const videoConfig = {
|
|
61
|
+
codec: format === 'webm' ? 'vp9' : 'avc',
|
|
62
|
+
bitrate: 5_000_000
|
|
63
|
+
};
|
|
64
|
+
const videoSource = new VideoSampleSource(videoConfig);
|
|
65
|
+
output.addVideoTrack(videoSource);
|
|
66
|
+
// 5. Setup Audio Track
|
|
67
|
+
let audioSource = null;
|
|
68
|
+
let audioTracks = [];
|
|
69
|
+
try {
|
|
70
|
+
audioTracks = await this.controller.getAudioTracks();
|
|
71
|
+
if (audioTracks && audioTracks.length > 0) {
|
|
72
|
+
const audioConfig = format === 'webm'
|
|
73
|
+
? { codec: 'opus' }
|
|
74
|
+
: { codec: 'aac' };
|
|
75
|
+
audioSource = new AudioSampleSource(audioConfig);
|
|
76
|
+
output.addAudioTrack(audioSource);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
catch (e) {
|
|
80
|
+
console.warn("Failed to setup audio:", e);
|
|
81
|
+
}
|
|
82
|
+
await output.start();
|
|
83
|
+
// 6. Encode Loop
|
|
84
|
+
// Encode first frame
|
|
85
|
+
let frameToEncode = firstFrame;
|
|
86
|
+
if (includeCaptions && firstCaptions && firstCaptions.length > 0) {
|
|
87
|
+
frameToEncode = await this.drawCaptions(firstFrame, firstCaptions);
|
|
88
|
+
firstFrame.close();
|
|
89
|
+
}
|
|
90
|
+
await videoSource.add(new VideoSample(frameToEncode), { keyFrame: true });
|
|
91
|
+
frameToEncode.close();
|
|
92
|
+
onProgress(1 / totalFrames);
|
|
93
|
+
for (let i = 1; i < totalFrames; i++) {
|
|
94
|
+
if (signal.aborted) {
|
|
95
|
+
throw new Error("Export aborted");
|
|
96
|
+
}
|
|
97
|
+
const frameIndex = startFrame + i;
|
|
98
|
+
const result = await this.controller.captureFrame(frameIndex, {
|
|
99
|
+
selector: canvasSelector,
|
|
100
|
+
mode: effectiveMode
|
|
101
|
+
});
|
|
102
|
+
if (!result || !result.frame) {
|
|
103
|
+
throw new Error(`Frame ${frameIndex} missing during export.`);
|
|
104
|
+
}
|
|
105
|
+
const { frame: videoFrame, captions } = result;
|
|
106
|
+
const keyFrame = i % (state.fps * 2) === 0;
|
|
107
|
+
let finalFrame = videoFrame;
|
|
108
|
+
if (includeCaptions && captions && captions.length > 0) {
|
|
109
|
+
finalFrame = await this.drawCaptions(videoFrame, captions);
|
|
110
|
+
videoFrame.close();
|
|
111
|
+
}
|
|
112
|
+
await videoSource.add(new VideoSample(finalFrame), { keyFrame });
|
|
113
|
+
finalFrame.close();
|
|
114
|
+
onProgress((i + 1) / totalFrames);
|
|
115
|
+
}
|
|
116
|
+
// 7. Process Audio
|
|
117
|
+
if (audioSource && audioTracks.length > 0) {
|
|
118
|
+
const durationInSeconds = totalFrames / state.fps;
|
|
119
|
+
const rangeStartInSeconds = startFrame / state.fps;
|
|
120
|
+
const audioBuffer = await mixAudio(audioTracks, durationInSeconds, 48000, rangeStartInSeconds);
|
|
121
|
+
const c0 = audioBuffer.getChannelData(0);
|
|
122
|
+
const c1 = audioBuffer.getChannelData(1);
|
|
123
|
+
const planarData = new Float32Array(c0.length + c1.length);
|
|
124
|
+
planarData.set(c0, 0);
|
|
125
|
+
planarData.set(c1, c0.length);
|
|
126
|
+
const sample = new AudioSample({
|
|
127
|
+
format: 'f32-planar',
|
|
128
|
+
sampleRate: 48000,
|
|
129
|
+
numberOfChannels: 2,
|
|
130
|
+
timestamp: 0,
|
|
131
|
+
data: planarData
|
|
132
|
+
});
|
|
133
|
+
await audioSource.add(sample);
|
|
134
|
+
}
|
|
135
|
+
await output.finalize();
|
|
136
|
+
if (target.buffer) {
|
|
137
|
+
this.download(target.buffer, format);
|
|
138
|
+
console.log("Client-side rendering and download finished!");
|
|
139
|
+
}
|
|
140
|
+
else {
|
|
141
|
+
throw new Error("Export failed: Output buffer is empty");
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
catch (e) {
|
|
145
|
+
if (e.message === "Export aborted") {
|
|
146
|
+
console.log("Export aborted by user.");
|
|
147
|
+
return;
|
|
148
|
+
}
|
|
149
|
+
console.error("Client-side rendering failed:", e);
|
|
150
|
+
throw e;
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
async drawCaptions(frame, captions) {
|
|
154
|
+
const width = frame.displayWidth;
|
|
155
|
+
const height = frame.displayHeight;
|
|
156
|
+
let canvas;
|
|
157
|
+
let ctx;
|
|
158
|
+
if (typeof OffscreenCanvas !== 'undefined') {
|
|
159
|
+
canvas = new OffscreenCanvas(width, height);
|
|
160
|
+
ctx = canvas.getContext('2d');
|
|
161
|
+
}
|
|
162
|
+
else {
|
|
163
|
+
canvas = document.createElement('canvas');
|
|
164
|
+
canvas.width = width;
|
|
165
|
+
canvas.height = height;
|
|
166
|
+
ctx = canvas.getContext('2d');
|
|
167
|
+
}
|
|
168
|
+
if (!ctx)
|
|
169
|
+
throw new Error("Failed to create canvas context for captions");
|
|
170
|
+
ctx.drawImage(frame, 0, 0);
|
|
171
|
+
ctx.save();
|
|
172
|
+
const fontSize = Math.max(16, Math.round(height * 0.05));
|
|
173
|
+
const padding = fontSize * 0.5;
|
|
174
|
+
const lineHeight = fontSize * 1.2;
|
|
175
|
+
const bottomMargin = height * 0.05;
|
|
176
|
+
ctx.font = `${fontSize}px sans-serif`;
|
|
177
|
+
ctx.textAlign = 'center';
|
|
178
|
+
ctx.textBaseline = 'top';
|
|
179
|
+
let currentBottomY = height - bottomMargin;
|
|
180
|
+
const reversedCaptions = [...captions].reverse();
|
|
181
|
+
reversedCaptions.forEach(cue => {
|
|
182
|
+
const lines = cue.text.split('\n');
|
|
183
|
+
const cueHeight = lines.length * lineHeight + (padding * 2);
|
|
184
|
+
let maxLineWidth = 0;
|
|
185
|
+
lines.forEach((line) => {
|
|
186
|
+
const m = ctx.measureText(line);
|
|
187
|
+
if (m.width > maxLineWidth)
|
|
188
|
+
maxLineWidth = m.width;
|
|
189
|
+
});
|
|
190
|
+
const bgWidth = maxLineWidth + (fontSize * 1.0);
|
|
191
|
+
const bgTopY = currentBottomY - cueHeight;
|
|
192
|
+
ctx.shadowColor = 'transparent';
|
|
193
|
+
ctx.fillStyle = 'rgba(0, 0, 0, 0.7)';
|
|
194
|
+
ctx.fillRect((width / 2) - (bgWidth / 2), bgTopY, bgWidth, cueHeight);
|
|
195
|
+
ctx.shadowColor = 'black';
|
|
196
|
+
ctx.shadowBlur = 2;
|
|
197
|
+
ctx.shadowOffsetY = 1;
|
|
198
|
+
ctx.fillStyle = 'white';
|
|
199
|
+
lines.forEach((line, i) => {
|
|
200
|
+
const y = bgTopY + padding + (i * lineHeight);
|
|
201
|
+
ctx.fillText(line, width / 2, y);
|
|
202
|
+
});
|
|
203
|
+
currentBottomY -= (cueHeight + 4);
|
|
204
|
+
});
|
|
205
|
+
ctx.restore();
|
|
206
|
+
return new VideoFrame(canvas, { timestamp: frame.timestamp });
|
|
207
|
+
}
|
|
208
|
+
download(buffer, format) {
|
|
209
|
+
const type = format === 'webm' ? "video/webm" : "video/mp4";
|
|
210
|
+
const blob = new Blob([buffer], { type });
|
|
211
|
+
const url = URL.createObjectURL(blob);
|
|
212
|
+
const a = document.createElement("a");
|
|
213
|
+
a.href = url;
|
|
214
|
+
a.download = `video.${format}`;
|
|
215
|
+
a.click();
|
|
216
|
+
URL.revokeObjectURL(url);
|
|
217
|
+
}
|
|
218
|
+
saveCaptionsAsSRT(cues, filename) {
|
|
219
|
+
const srtContent = stringifySRT(cues);
|
|
220
|
+
const blob = new Blob([srtContent], { type: "text/srt" });
|
|
221
|
+
const url = URL.createObjectURL(blob);
|
|
222
|
+
const a = document.createElement("a");
|
|
223
|
+
a.href = url;
|
|
224
|
+
a.download = filename;
|
|
225
|
+
a.click();
|
|
226
|
+
URL.revokeObjectURL(url);
|
|
227
|
+
}
|
|
228
|
+
}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
export function parseSRT(srt) {
|
|
2
|
+
if (!srt)
|
|
3
|
+
return [];
|
|
4
|
+
const cues = [];
|
|
5
|
+
// Normalize line endings and split by double newlines to separate blocks
|
|
6
|
+
const blocks = srt.trim().replace(/\r\n/g, "\n").split(/\n\n+/);
|
|
7
|
+
for (const block of blocks) {
|
|
8
|
+
const lines = block.split("\n");
|
|
9
|
+
if (lines.length < 2)
|
|
10
|
+
continue;
|
|
11
|
+
// Usually:
|
|
12
|
+
// Line 1: Index
|
|
13
|
+
// Line 2: Timestamp range
|
|
14
|
+
// Line 3+: Text
|
|
15
|
+
// But sometimes index is missing or we need to be robust.
|
|
16
|
+
// We look for the timestamp line.
|
|
17
|
+
let timeLineIndex = -1;
|
|
18
|
+
for (let i = 0; i < Math.min(lines.length, 2); i++) {
|
|
19
|
+
if (lines[i].includes("-->")) {
|
|
20
|
+
timeLineIndex = i;
|
|
21
|
+
break;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
if (timeLineIndex === -1)
|
|
25
|
+
continue;
|
|
26
|
+
const timeLine = lines[timeLineIndex];
|
|
27
|
+
const textLines = lines.slice(timeLineIndex + 1);
|
|
28
|
+
// Format: 00:00:01,000 --> 00:00:04,000
|
|
29
|
+
// We allow dot or comma for milliseconds
|
|
30
|
+
const timeMatch = timeLine.match(/(\d{1,2}:\d{2}:\d{2}[,.]\d{1,3})\s*-->\s*(\d{1,2}:\d{2}:\d{2}[,.]\d{1,3})/);
|
|
31
|
+
if (timeMatch) {
|
|
32
|
+
const startTime = parseTime(timeMatch[1]);
|
|
33
|
+
const endTime = parseTime(timeMatch[2]);
|
|
34
|
+
const text = textLines.join("\n").trim();
|
|
35
|
+
if (!isNaN(startTime) && !isNaN(endTime)) {
|
|
36
|
+
cues.push({ startTime, endTime, text });
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
return cues;
|
|
41
|
+
}
|
|
42
|
+
function parseTime(timeString) {
|
|
43
|
+
// Normalize comma to dot for parseFloat
|
|
44
|
+
const parts = timeString.replace(",", ".").split(":");
|
|
45
|
+
if (parts.length === 3) {
|
|
46
|
+
const hours = parseFloat(parts[0]);
|
|
47
|
+
const minutes = parseFloat(parts[1]);
|
|
48
|
+
const seconds = parseFloat(parts[2]);
|
|
49
|
+
return hours * 3600 + minutes * 60 + seconds;
|
|
50
|
+
}
|
|
51
|
+
return NaN;
|
|
52
|
+
}
|
|
53
|
+
export function stringifySRT(cues) {
|
|
54
|
+
if (!cues || cues.length === 0)
|
|
55
|
+
return "";
|
|
56
|
+
return cues
|
|
57
|
+
.map((cue, index) => {
|
|
58
|
+
const start = formatTime(cue.startTime);
|
|
59
|
+
const end = formatTime(cue.endTime);
|
|
60
|
+
return `${index + 1}\n${start} --> ${end}\n${cue.text}\n\n`;
|
|
61
|
+
})
|
|
62
|
+
.join("");
|
|
63
|
+
}
|
|
64
|
+
function formatTime(seconds) {
|
|
65
|
+
const totalMs = Math.round(seconds * 1000);
|
|
66
|
+
const hours = Math.floor(totalMs / 3600000);
|
|
67
|
+
const minutes = Math.floor((totalMs % 3600000) / 60000);
|
|
68
|
+
const secs = Math.floor((totalMs % 60000) / 1000);
|
|
69
|
+
const ms = totalMs % 1000;
|
|
70
|
+
const hh = String(hours).padStart(2, "0");
|
|
71
|
+
const mm = String(minutes).padStart(2, "0");
|
|
72
|
+
const ss = String(secs).padStart(2, "0");
|
|
73
|
+
const mmm = String(ms).padStart(3, "0");
|
|
74
|
+
return `${hh}:${mm}:${ss},${mmm}`;
|
|
75
|
+
}
|