@helios-project/player 0.48.3 → 0.76.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +73 -2
- package/dist/bridge.js +105 -4
- package/dist/controllers.d.ts +48 -5
- package/dist/controllers.js +175 -9
- package/dist/features/audio-context-manager.d.ts +20 -0
- package/dist/features/audio-context-manager.js +81 -0
- package/dist/features/audio-fader.d.ts +13 -0
- package/dist/features/audio-fader.js +118 -0
- package/dist/features/audio-metering.d.ts +28 -0
- package/dist/features/audio-metering.js +160 -0
- package/dist/features/audio-tracks.d.ts +42 -0
- package/dist/features/audio-tracks.js +104 -0
- package/dist/features/audio-utils.d.ts +8 -1
- package/dist/features/audio-utils.js +118 -23
- package/dist/features/caption-parser.d.ts +9 -0
- package/dist/features/caption-parser.js +140 -0
- package/dist/features/dom-capture.d.ts +4 -1
- package/dist/features/dom-capture.js +178 -67
- package/dist/features/exporter.d.ts +14 -5
- package/dist/features/exporter.js +103 -21
- package/dist/features/media-session.d.ts +12 -0
- package/dist/features/media-session.js +107 -0
- package/dist/features/text-tracks.d.ts +31 -3
- package/dist/features/text-tracks.js +140 -2
- package/dist/features/video-tracks.d.ts +43 -0
- package/dist/features/video-tracks.js +107 -0
- package/dist/helios-player.bundle.mjs +4700 -2968
- package/dist/helios-player.global.js +448 -212
- package/dist/index.d.ts +177 -4
- package/dist/index.js +1598 -151
- package/package.json +3 -3
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
import { SharedAudioContextManager } from './audio-context-manager';
|
|
2
|
+
export class AudioMeter {
|
|
3
|
+
ctx;
|
|
4
|
+
splitter;
|
|
5
|
+
analyserLeft;
|
|
6
|
+
analyserRight;
|
|
7
|
+
dataArrayLeft;
|
|
8
|
+
dataArrayRight;
|
|
9
|
+
sources = new Map();
|
|
10
|
+
isEnabled = false;
|
|
11
|
+
manager;
|
|
12
|
+
observer = null;
|
|
13
|
+
constructor() {
|
|
14
|
+
this.manager = SharedAudioContextManager.getInstance();
|
|
15
|
+
this.ctx = this.manager.context;
|
|
16
|
+
this.splitter = this.ctx.createChannelSplitter(2);
|
|
17
|
+
this.analyserLeft = this.ctx.createAnalyser();
|
|
18
|
+
this.analyserRight = this.ctx.createAnalyser();
|
|
19
|
+
// Use a small buffer for responsiveness
|
|
20
|
+
this.analyserLeft.fftSize = 256;
|
|
21
|
+
this.analyserRight.fftSize = 256;
|
|
22
|
+
this.dataArrayLeft = new Float32Array(this.analyserLeft.fftSize);
|
|
23
|
+
this.dataArrayRight = new Float32Array(this.analyserRight.fftSize);
|
|
24
|
+
this.splitter.connect(this.analyserLeft, 0);
|
|
25
|
+
this.splitter.connect(this.analyserRight, 1);
|
|
26
|
+
}
|
|
27
|
+
connect(doc) {
|
|
28
|
+
if (this.ctx.state === 'suspended') {
|
|
29
|
+
this.ctx.resume().catch(e => console.warn("AudioMeter: Failed to resume context", e));
|
|
30
|
+
}
|
|
31
|
+
const scanElement = (el) => {
|
|
32
|
+
if (el.tagName !== 'AUDIO' && el.tagName !== 'VIDEO')
|
|
33
|
+
return;
|
|
34
|
+
const mediaEl = el;
|
|
35
|
+
this.connectElement(mediaEl);
|
|
36
|
+
};
|
|
37
|
+
// Initial scan
|
|
38
|
+
const mediaElements = Array.from(doc.querySelectorAll('audio, video'));
|
|
39
|
+
mediaElements.forEach(el => this.connectElement(el));
|
|
40
|
+
// Observer for dynamic elements
|
|
41
|
+
this.observer = new MutationObserver((mutations) => {
|
|
42
|
+
mutations.forEach((mutation) => {
|
|
43
|
+
mutation.addedNodes.forEach((node) => {
|
|
44
|
+
if (node.nodeType === Node.ELEMENT_NODE) {
|
|
45
|
+
const el = node;
|
|
46
|
+
// Check the element itself
|
|
47
|
+
scanElement(el);
|
|
48
|
+
// Check children if a container was added
|
|
49
|
+
const children = el.querySelectorAll('audio, video');
|
|
50
|
+
children.forEach(child => scanElement(child));
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
mutation.removedNodes.forEach((node) => {
|
|
54
|
+
if (node.nodeType === Node.ELEMENT_NODE) {
|
|
55
|
+
const el = node;
|
|
56
|
+
if (el.tagName === 'AUDIO' || el.tagName === 'VIDEO') {
|
|
57
|
+
this.disconnectElement(el);
|
|
58
|
+
}
|
|
59
|
+
// Also check children if a container was removed
|
|
60
|
+
const descendants = el.querySelectorAll('audio, video');
|
|
61
|
+
descendants.forEach(d => {
|
|
62
|
+
this.disconnectElement(d);
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
});
|
|
66
|
+
});
|
|
67
|
+
});
|
|
68
|
+
const target = doc.body || doc;
|
|
69
|
+
this.observer.observe(target, { childList: true, subtree: true });
|
|
70
|
+
}
|
|
71
|
+
connectElement(el) {
|
|
72
|
+
if (this.sources.has(el))
|
|
73
|
+
return;
|
|
74
|
+
try {
|
|
75
|
+
const sharedSource = this.manager.getSharedSource(el);
|
|
76
|
+
// Metering path (Pre-fader to visualize activity even if muted)
|
|
77
|
+
// Only connect if enabled
|
|
78
|
+
if (this.isEnabled) {
|
|
79
|
+
sharedSource.connect(this.splitter);
|
|
80
|
+
}
|
|
81
|
+
this.sources.set(el, sharedSource);
|
|
82
|
+
}
|
|
83
|
+
catch (e) {
|
|
84
|
+
console.warn('AudioMeter: Failed to connect element', e);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
disconnectElement(el) {
|
|
88
|
+
const source = this.sources.get(el);
|
|
89
|
+
if (source) {
|
|
90
|
+
source.disconnect(this.splitter);
|
|
91
|
+
this.sources.delete(el);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
enable() {
|
|
95
|
+
if (this.isEnabled)
|
|
96
|
+
return;
|
|
97
|
+
this.isEnabled = true;
|
|
98
|
+
if (this.ctx.state === 'suspended') {
|
|
99
|
+
this.ctx.resume().catch(e => console.warn("AudioMeter: Failed to resume context", e));
|
|
100
|
+
}
|
|
101
|
+
this.sources.forEach(source => {
|
|
102
|
+
source.connect(this.splitter);
|
|
103
|
+
});
|
|
104
|
+
}
|
|
105
|
+
disable() {
|
|
106
|
+
if (!this.isEnabled)
|
|
107
|
+
return;
|
|
108
|
+
this.isEnabled = false;
|
|
109
|
+
this.sources.forEach(source => {
|
|
110
|
+
source.disconnect(this.splitter);
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
getLevels() {
|
|
114
|
+
if (!this.isEnabled) {
|
|
115
|
+
return { left: 0, right: 0, peakLeft: 0, peakRight: 0 };
|
|
116
|
+
}
|
|
117
|
+
this.analyserLeft.getFloatTimeDomainData(this.dataArrayLeft);
|
|
118
|
+
this.analyserRight.getFloatTimeDomainData(this.dataArrayRight);
|
|
119
|
+
return {
|
|
120
|
+
left: this.calculateRMS(this.dataArrayLeft),
|
|
121
|
+
right: this.calculateRMS(this.dataArrayRight),
|
|
122
|
+
peakLeft: this.calculatePeak(this.dataArrayLeft),
|
|
123
|
+
peakRight: this.calculatePeak(this.dataArrayRight)
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
calculateRMS(data) {
|
|
127
|
+
let sum = 0;
|
|
128
|
+
for (let i = 0; i < data.length; i++) {
|
|
129
|
+
sum += data[i] * data[i];
|
|
130
|
+
}
|
|
131
|
+
return Math.sqrt(sum / data.length);
|
|
132
|
+
}
|
|
133
|
+
calculatePeak(data) {
|
|
134
|
+
let max = 0;
|
|
135
|
+
for (let i = 0; i < data.length; i++) {
|
|
136
|
+
const val = Math.abs(data[i]);
|
|
137
|
+
if (val > max)
|
|
138
|
+
max = val;
|
|
139
|
+
}
|
|
140
|
+
return max;
|
|
141
|
+
}
|
|
142
|
+
dispose() {
|
|
143
|
+
if (this.observer) {
|
|
144
|
+
this.observer.disconnect();
|
|
145
|
+
this.observer = null;
|
|
146
|
+
}
|
|
147
|
+
this.sources.forEach(source => {
|
|
148
|
+
source.disconnect(this.splitter);
|
|
149
|
+
});
|
|
150
|
+
this.sources.clear();
|
|
151
|
+
try {
|
|
152
|
+
this.splitter.disconnect();
|
|
153
|
+
this.analyserLeft.disconnect();
|
|
154
|
+
this.analyserRight.disconnect();
|
|
155
|
+
}
|
|
156
|
+
catch (e) {
|
|
157
|
+
console.warn("AudioMeter: Error during dispose", e);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
export interface AudioTrackHost {
|
|
2
|
+
handleAudioTrackEnabledChange(track: HeliosAudioTrack): void;
|
|
3
|
+
}
|
|
4
|
+
export declare class HeliosAudioTrack {
|
|
5
|
+
private _id;
|
|
6
|
+
private _kind;
|
|
7
|
+
private _label;
|
|
8
|
+
private _language;
|
|
9
|
+
private _enabled;
|
|
10
|
+
private _host;
|
|
11
|
+
constructor(id: string, kind: string, label: string, language: string, enabled: boolean, host: AudioTrackHost);
|
|
12
|
+
get id(): string;
|
|
13
|
+
get kind(): string;
|
|
14
|
+
get label(): string;
|
|
15
|
+
get language(): string;
|
|
16
|
+
get enabled(): boolean;
|
|
17
|
+
set enabled(value: boolean);
|
|
18
|
+
/**
|
|
19
|
+
* Internal method to update state without triggering host callback.
|
|
20
|
+
* Used when syncing from external state.
|
|
21
|
+
*/
|
|
22
|
+
_setEnabledInternal(value: boolean): void;
|
|
23
|
+
}
|
|
24
|
+
export declare class HeliosAudioTrackList extends EventTarget implements Iterable<HeliosAudioTrack> {
|
|
25
|
+
private tracks;
|
|
26
|
+
private _onaddtrack;
|
|
27
|
+
private _onremovetrack;
|
|
28
|
+
private _onchange;
|
|
29
|
+
get length(): number;
|
|
30
|
+
[index: number]: HeliosAudioTrack;
|
|
31
|
+
[Symbol.iterator](): Iterator<HeliosAudioTrack>;
|
|
32
|
+
getTrackById(id: string): HeliosAudioTrack | null;
|
|
33
|
+
addTrack(track: HeliosAudioTrack): void;
|
|
34
|
+
removeTrack(track: HeliosAudioTrack): void;
|
|
35
|
+
dispatchChangeEvent(): void;
|
|
36
|
+
get onaddtrack(): ((event: any) => void) | null;
|
|
37
|
+
set onaddtrack(handler: ((event: any) => void) | null);
|
|
38
|
+
get onremovetrack(): ((event: any) => void) | null;
|
|
39
|
+
set onremovetrack(handler: ((event: any) => void) | null);
|
|
40
|
+
get onchange(): ((event: any) => void) | null;
|
|
41
|
+
set onchange(handler: ((event: any) => void) | null);
|
|
42
|
+
}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
export class HeliosAudioTrack {
|
|
2
|
+
_id;
|
|
3
|
+
_kind;
|
|
4
|
+
_label;
|
|
5
|
+
_language;
|
|
6
|
+
_enabled;
|
|
7
|
+
_host;
|
|
8
|
+
constructor(id, kind, label, language, enabled, host) {
|
|
9
|
+
this._id = id;
|
|
10
|
+
this._kind = kind;
|
|
11
|
+
this._label = label;
|
|
12
|
+
this._language = language;
|
|
13
|
+
this._enabled = enabled;
|
|
14
|
+
this._host = host;
|
|
15
|
+
}
|
|
16
|
+
get id() { return this._id; }
|
|
17
|
+
get kind() { return this._kind; }
|
|
18
|
+
get label() { return this._label; }
|
|
19
|
+
get language() { return this._language; }
|
|
20
|
+
get enabled() { return this._enabled; }
|
|
21
|
+
set enabled(value) {
|
|
22
|
+
if (this._enabled !== value) {
|
|
23
|
+
this._enabled = value;
|
|
24
|
+
this._host.handleAudioTrackEnabledChange(this);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Internal method to update state without triggering host callback.
|
|
29
|
+
* Used when syncing from external state.
|
|
30
|
+
*/
|
|
31
|
+
_setEnabledInternal(value) {
|
|
32
|
+
this._enabled = value;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
export class HeliosAudioTrackList extends EventTarget {
|
|
36
|
+
tracks = [];
|
|
37
|
+
_onaddtrack = null;
|
|
38
|
+
_onremovetrack = null;
|
|
39
|
+
_onchange = null;
|
|
40
|
+
get length() { return this.tracks.length; }
|
|
41
|
+
[Symbol.iterator]() {
|
|
42
|
+
return this.tracks[Symbol.iterator]();
|
|
43
|
+
}
|
|
44
|
+
getTrackById(id) {
|
|
45
|
+
return this.tracks.find(t => t.id === id) || null;
|
|
46
|
+
}
|
|
47
|
+
addTrack(track) {
|
|
48
|
+
this.tracks.push(track);
|
|
49
|
+
// Enable array-like access
|
|
50
|
+
this[this.tracks.length - 1] = track;
|
|
51
|
+
const event = new Event('addtrack');
|
|
52
|
+
event.track = track;
|
|
53
|
+
this.dispatchEvent(event);
|
|
54
|
+
}
|
|
55
|
+
removeTrack(track) {
|
|
56
|
+
const index = this.tracks.indexOf(track);
|
|
57
|
+
if (index !== -1) {
|
|
58
|
+
this.tracks.splice(index, 1);
|
|
59
|
+
// Re-index properties
|
|
60
|
+
for (let i = index; i < this.tracks.length; i++) {
|
|
61
|
+
this[i] = this.tracks[i];
|
|
62
|
+
}
|
|
63
|
+
delete this[this.tracks.length];
|
|
64
|
+
const event = new Event('removetrack');
|
|
65
|
+
event.track = track;
|
|
66
|
+
this.dispatchEvent(event);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
// Helper to dispatch change event
|
|
70
|
+
dispatchChangeEvent() {
|
|
71
|
+
this.dispatchEvent(new Event('change'));
|
|
72
|
+
}
|
|
73
|
+
// Standard event handler properties
|
|
74
|
+
get onaddtrack() { return this._onaddtrack; }
|
|
75
|
+
set onaddtrack(handler) {
|
|
76
|
+
if (this._onaddtrack) {
|
|
77
|
+
this.removeEventListener('addtrack', this._onaddtrack);
|
|
78
|
+
}
|
|
79
|
+
this._onaddtrack = handler;
|
|
80
|
+
if (handler) {
|
|
81
|
+
this.addEventListener('addtrack', handler);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
get onremovetrack() { return this._onremovetrack; }
|
|
85
|
+
set onremovetrack(handler) {
|
|
86
|
+
if (this._onremovetrack) {
|
|
87
|
+
this.removeEventListener('removetrack', this._onremovetrack);
|
|
88
|
+
}
|
|
89
|
+
this._onremovetrack = handler;
|
|
90
|
+
if (handler) {
|
|
91
|
+
this.addEventListener('removetrack', handler);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
get onchange() { return this._onchange; }
|
|
95
|
+
set onchange(handler) {
|
|
96
|
+
if (this._onchange) {
|
|
97
|
+
this.removeEventListener('change', this._onchange);
|
|
98
|
+
}
|
|
99
|
+
this._onchange = handler;
|
|
100
|
+
if (handler) {
|
|
101
|
+
this.addEventListener('change', handler);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
}
|
|
@@ -1,10 +1,17 @@
|
|
|
1
|
+
import type { AudioTrackMetadata } from "@helios-project/core";
|
|
1
2
|
export interface AudioAsset {
|
|
3
|
+
id: string;
|
|
2
4
|
buffer: ArrayBuffer;
|
|
3
5
|
mimeType: string | null;
|
|
4
6
|
volume?: number;
|
|
5
7
|
muted?: boolean;
|
|
6
8
|
loop?: boolean;
|
|
7
9
|
startTime?: number;
|
|
10
|
+
fadeInDuration?: number;
|
|
11
|
+
fadeOutDuration?: number;
|
|
8
12
|
}
|
|
9
|
-
export declare function getAudioAssets(doc: Document
|
|
13
|
+
export declare function getAudioAssets(doc: Document, metadataTracks?: AudioTrackMetadata[], audioTrackState?: Record<string, {
|
|
14
|
+
volume: number;
|
|
15
|
+
muted: boolean;
|
|
16
|
+
}>): Promise<AudioAsset[]>;
|
|
10
17
|
export declare function mixAudio(assets: AudioAsset[], duration: number, sampleRate: number, rangeStart?: number): Promise<AudioBuffer>;
|
|
@@ -1,26 +1,71 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
1
|
+
async function fetchAudioAsset(id, src, options) {
|
|
2
|
+
if (!src)
|
|
3
|
+
return { id, buffer: new ArrayBuffer(0), mimeType: null };
|
|
4
|
+
try {
|
|
5
|
+
const res = await fetch(src);
|
|
6
|
+
return {
|
|
7
|
+
id,
|
|
8
|
+
buffer: await res.arrayBuffer(),
|
|
9
|
+
mimeType: res.headers.get('content-type'),
|
|
10
|
+
volume: options.volume ?? 1,
|
|
11
|
+
muted: options.muted ?? false,
|
|
12
|
+
loop: options.loop ?? false,
|
|
13
|
+
startTime: options.startTime ?? 0,
|
|
14
|
+
fadeInDuration: options.fadeInDuration ?? 0,
|
|
15
|
+
fadeOutDuration: options.fadeOutDuration ?? 0
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
catch (e) {
|
|
19
|
+
console.warn("Failed to fetch audio asset:", src, e);
|
|
20
|
+
return { id, buffer: new ArrayBuffer(0), mimeType: null };
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
export async function getAudioAssets(doc, metadataTracks = [], audioTrackState = {}) {
|
|
24
|
+
const domAssetsPromises = Array.from(doc.querySelectorAll('audio')).map((tag, index) => {
|
|
25
|
+
// ID Extraction Priority:
|
|
26
|
+
// 1. data-helios-track-id (Used by DomDriver for control)
|
|
27
|
+
// 2. id attribute (Standard DOM)
|
|
28
|
+
// 3. Fallback: generated "track-${index}" (Stable fallback for listing)
|
|
29
|
+
const id = tag.getAttribute('data-helios-track-id') || tag.id || `track-${index}`;
|
|
30
|
+
const volumeAttr = tag.getAttribute('volume');
|
|
31
|
+
const state = audioTrackState[id];
|
|
32
|
+
let volume = 1;
|
|
33
|
+
let muted = false;
|
|
34
|
+
if (state) {
|
|
35
|
+
volume = state.volume;
|
|
36
|
+
muted = state.muted;
|
|
18
37
|
}
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
38
|
+
else {
|
|
39
|
+
volume = volumeAttr !== null ? parseFloat(volumeAttr) : tag.volume;
|
|
40
|
+
muted = tag.muted;
|
|
22
41
|
}
|
|
23
|
-
|
|
42
|
+
return fetchAudioAsset(id, tag.src, {
|
|
43
|
+
volume,
|
|
44
|
+
muted,
|
|
45
|
+
loop: tag.loop,
|
|
46
|
+
startTime: parseFloat(tag.getAttribute('data-start-time') || '0') || 0,
|
|
47
|
+
fadeInDuration: parseFloat(tag.getAttribute('data-helios-fade-in') || '0') || 0,
|
|
48
|
+
fadeOutDuration: parseFloat(tag.getAttribute('data-helios-fade-out') || '0') || 0
|
|
49
|
+
});
|
|
50
|
+
});
|
|
51
|
+
const metadataAssetsPromises = metadataTracks.map(track => {
|
|
52
|
+
const state = audioTrackState[track.id];
|
|
53
|
+
return fetchAudioAsset(track.id, track.src, {
|
|
54
|
+
volume: state?.volume ?? 1,
|
|
55
|
+
muted: state?.muted ?? false,
|
|
56
|
+
startTime: track.startTime,
|
|
57
|
+
fadeInDuration: track.fadeInDuration,
|
|
58
|
+
fadeOutDuration: track.fadeOutDuration,
|
|
59
|
+
loop: false // Metadata doesn't strictly support loop yet, defaults to false
|
|
60
|
+
});
|
|
61
|
+
});
|
|
62
|
+
const domAssets = await Promise.all(domAssetsPromises);
|
|
63
|
+
const metadataAssets = await Promise.all(metadataAssetsPromises);
|
|
64
|
+
// Merge: Prioritize metadata if IDs collide (Explicit state overrides DOM discovery)
|
|
65
|
+
const assetsMap = new Map();
|
|
66
|
+
domAssets.forEach(a => assetsMap.set(a.id, a));
|
|
67
|
+
metadataAssets.forEach(a => assetsMap.set(a.id, a));
|
|
68
|
+
return Array.from(assetsMap.values());
|
|
24
69
|
}
|
|
25
70
|
export async function mixAudio(assets, duration, sampleRate, rangeStart = 0) {
|
|
26
71
|
if (typeof OfflineAudioContext === 'undefined') {
|
|
@@ -42,8 +87,8 @@ export async function mixAudio(assets, duration, sampleRate, rangeStart = 0) {
|
|
|
42
87
|
source.buffer = audioBuffer;
|
|
43
88
|
source.loop = !!asset.loop;
|
|
44
89
|
const gainNode = ctx.createGain();
|
|
45
|
-
const
|
|
46
|
-
gainNode.gain.value =
|
|
90
|
+
const targetVolume = asset.muted ? 0 : (typeof asset.volume === 'number' ? asset.volume : 1);
|
|
91
|
+
gainNode.gain.value = targetVolume;
|
|
47
92
|
source.connect(gainNode);
|
|
48
93
|
gainNode.connect(ctx.destination);
|
|
49
94
|
const assetStart = asset.startTime || 0;
|
|
@@ -55,6 +100,56 @@ export async function mixAudio(assets, duration, sampleRate, rangeStart = 0) {
|
|
|
55
100
|
startOffset = -playbackStart;
|
|
56
101
|
playbackStart = 0;
|
|
57
102
|
}
|
|
103
|
+
// Apply Fades
|
|
104
|
+
if ((asset.fadeInDuration && asset.fadeInDuration > 0) || (asset.fadeOutDuration && asset.fadeOutDuration > 0)) {
|
|
105
|
+
let fadeInEndTime = playbackStart;
|
|
106
|
+
// Fade In
|
|
107
|
+
if (asset.fadeInDuration && asset.fadeInDuration > 0) {
|
|
108
|
+
const durationRemaining = asset.fadeInDuration - startOffset;
|
|
109
|
+
if (durationRemaining > 0) {
|
|
110
|
+
const initialVol = targetVolume * (startOffset / asset.fadeInDuration);
|
|
111
|
+
gainNode.gain.setValueAtTime(initialVol, playbackStart);
|
|
112
|
+
fadeInEndTime = playbackStart + durationRemaining;
|
|
113
|
+
gainNode.gain.linearRampToValueAtTime(targetVolume, fadeInEndTime);
|
|
114
|
+
}
|
|
115
|
+
else {
|
|
116
|
+
gainNode.gain.setValueAtTime(targetVolume, playbackStart);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
else {
|
|
120
|
+
gainNode.gain.setValueAtTime(targetVolume, playbackStart);
|
|
121
|
+
}
|
|
122
|
+
// Fade Out
|
|
123
|
+
if (asset.fadeOutDuration && asset.fadeOutDuration > 0) {
|
|
124
|
+
const clipDuration = audioBuffer.duration;
|
|
125
|
+
const assetEnd = assetStart + clipDuration;
|
|
126
|
+
const playbackEnd = assetEnd - rangeStart;
|
|
127
|
+
const fadeOutStart = playbackEnd - asset.fadeOutDuration;
|
|
128
|
+
if (fadeOutStart >= 0) {
|
|
129
|
+
if (fadeOutStart >= fadeInEndTime) {
|
|
130
|
+
gainNode.gain.setValueAtTime(targetVolume, fadeOutStart);
|
|
131
|
+
}
|
|
132
|
+
else {
|
|
133
|
+
// Overlap: Anchor at current calculated volume or targetVolume?
|
|
134
|
+
// Simpler to anchor at targetVolume if we assume users don't overlap fades aggressively
|
|
135
|
+
gainNode.gain.setValueAtTime(targetVolume, fadeOutStart);
|
|
136
|
+
}
|
|
137
|
+
gainNode.gain.linearRampToValueAtTime(0, playbackEnd);
|
|
138
|
+
}
|
|
139
|
+
else {
|
|
140
|
+
const timeIntoFade = -fadeOutStart;
|
|
141
|
+
if (timeIntoFade < asset.fadeOutDuration) {
|
|
142
|
+
const progress = timeIntoFade / asset.fadeOutDuration;
|
|
143
|
+
const startVol = targetVolume * (1 - progress);
|
|
144
|
+
gainNode.gain.setValueAtTime(startVol, 0);
|
|
145
|
+
gainNode.gain.linearRampToValueAtTime(0, playbackEnd);
|
|
146
|
+
}
|
|
147
|
+
else {
|
|
148
|
+
gainNode.gain.setValueAtTime(0, 0);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
}
|
|
58
153
|
// source.start(when, offset)
|
|
59
154
|
source.start(playbackStart, startOffset);
|
|
60
155
|
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export interface SubtitleCue {
|
|
2
|
+
id?: string;
|
|
3
|
+
startTime: number;
|
|
4
|
+
endTime: number;
|
|
5
|
+
text: string;
|
|
6
|
+
}
|
|
7
|
+
export declare function parseCaptions(content: string): SubtitleCue[];
|
|
8
|
+
export declare function parseSRT(srt: string): SubtitleCue[];
|
|
9
|
+
export declare function stringifySRT(cues: SubtitleCue[]): string;
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
export function parseCaptions(content) {
|
|
2
|
+
const trimmed = content.trim();
|
|
3
|
+
if (trimmed.startsWith("WEBVTT")) {
|
|
4
|
+
return parseWebVTT(content);
|
|
5
|
+
}
|
|
6
|
+
return parseSRT(content);
|
|
7
|
+
}
|
|
8
|
+
function parseWebVTT(content) {
|
|
9
|
+
const cues = [];
|
|
10
|
+
// Normalize line endings
|
|
11
|
+
const normalized = content.replace(/\r\n/g, "\n");
|
|
12
|
+
// Split into blocks by double newlines
|
|
13
|
+
const blocks = normalized.split(/\n\n+/);
|
|
14
|
+
for (const block of blocks) {
|
|
15
|
+
const lines = block.split("\n").map(l => l.trim()).filter(l => l);
|
|
16
|
+
// Skip header block or metadata blocks
|
|
17
|
+
// Note: A robust parser would need to handle "WEBVTT" followed by text on the same line,
|
|
18
|
+
// or metadata headers. For now we skip blocks starting with keywords.
|
|
19
|
+
if (lines.length === 0 ||
|
|
20
|
+
lines[0] === "WEBVTT" ||
|
|
21
|
+
lines[0].startsWith("WEBVTT ") ||
|
|
22
|
+
lines[0].startsWith("NOTE") ||
|
|
23
|
+
lines[0].startsWith("STYLE") ||
|
|
24
|
+
lines[0].startsWith("REGION")) {
|
|
25
|
+
continue;
|
|
26
|
+
}
|
|
27
|
+
let timeLineIndex = -1;
|
|
28
|
+
for (let i = 0; i < Math.min(lines.length, 2); i++) {
|
|
29
|
+
if (lines[i].includes("-->")) {
|
|
30
|
+
timeLineIndex = i;
|
|
31
|
+
break;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
if (timeLineIndex === -1)
|
|
35
|
+
continue;
|
|
36
|
+
const timeLine = lines[timeLineIndex];
|
|
37
|
+
const textLines = lines.slice(timeLineIndex + 1);
|
|
38
|
+
let id;
|
|
39
|
+
if (timeLineIndex > 0) {
|
|
40
|
+
id = lines[timeLineIndex - 1];
|
|
41
|
+
}
|
|
42
|
+
// WebVTT timestamp regex:
|
|
43
|
+
// (HH:)MM:SS.mmm or (HH:)MM:SS,mmm
|
|
44
|
+
const timeMatch = timeLine.match(/((?:\d{2}:)?\d{2}:\d{2}[.,]\d{3})\s*-->\s*((?:\d{2}:)?\d{2}:\d{2}[.,]\d{3})/);
|
|
45
|
+
if (timeMatch) {
|
|
46
|
+
const startTime = parseTime(timeMatch[1]);
|
|
47
|
+
const endTime = parseTime(timeMatch[2]);
|
|
48
|
+
const text = textLines.join("\n").trim();
|
|
49
|
+
if (!isNaN(startTime) && !isNaN(endTime)) {
|
|
50
|
+
cues.push({ id, startTime, endTime, text });
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
return cues;
|
|
55
|
+
}
|
|
56
|
+
export function parseSRT(srt) {
|
|
57
|
+
if (!srt)
|
|
58
|
+
return [];
|
|
59
|
+
const cues = [];
|
|
60
|
+
// Normalize line endings and split by double newlines to separate blocks
|
|
61
|
+
const blocks = srt.trim().replace(/\r\n/g, "\n").split(/\n\n+/);
|
|
62
|
+
for (const block of blocks) {
|
|
63
|
+
const lines = block.split("\n");
|
|
64
|
+
if (lines.length < 2)
|
|
65
|
+
continue;
|
|
66
|
+
// Usually:
|
|
67
|
+
// Line 1: Index
|
|
68
|
+
// Line 2: Timestamp range
|
|
69
|
+
// Line 3+: Text
|
|
70
|
+
// But sometimes index is missing or we need to be robust.
|
|
71
|
+
// We look for the timestamp line.
|
|
72
|
+
let timeLineIndex = -1;
|
|
73
|
+
for (let i = 0; i < Math.min(lines.length, 2); i++) {
|
|
74
|
+
if (lines[i].includes("-->")) {
|
|
75
|
+
timeLineIndex = i;
|
|
76
|
+
break;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
if (timeLineIndex === -1)
|
|
80
|
+
continue;
|
|
81
|
+
const timeLine = lines[timeLineIndex];
|
|
82
|
+
const textLines = lines.slice(timeLineIndex + 1);
|
|
83
|
+
let id;
|
|
84
|
+
if (timeLineIndex > 0) {
|
|
85
|
+
id = lines[timeLineIndex - 1];
|
|
86
|
+
}
|
|
87
|
+
// Format: 00:00:01,000 --> 00:00:04,000
|
|
88
|
+
// We allow dot or comma for milliseconds
|
|
89
|
+
const timeMatch = timeLine.match(/(\d{1,2}:\d{2}:\d{2}[,.]\d{1,3})\s*-->\s*(\d{1,2}:\d{2}:\d{2}[,.]\d{1,3})/);
|
|
90
|
+
if (timeMatch) {
|
|
91
|
+
const startTime = parseTime(timeMatch[1]);
|
|
92
|
+
const endTime = parseTime(timeMatch[2]);
|
|
93
|
+
const text = textLines.join("\n").trim();
|
|
94
|
+
if (!isNaN(startTime) && !isNaN(endTime)) {
|
|
95
|
+
cues.push({ id, startTime, endTime, text });
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
return cues;
|
|
100
|
+
}
|
|
101
|
+
function parseTime(timeString) {
|
|
102
|
+
// Normalize comma to dot for parseFloat
|
|
103
|
+
const parts = timeString.replace(",", ".").split(":");
|
|
104
|
+
if (parts.length === 3) {
|
|
105
|
+
const hours = parseFloat(parts[0]);
|
|
106
|
+
const minutes = parseFloat(parts[1]);
|
|
107
|
+
const seconds = parseFloat(parts[2]);
|
|
108
|
+
return hours * 3600 + minutes * 60 + seconds;
|
|
109
|
+
}
|
|
110
|
+
else if (parts.length === 2) {
|
|
111
|
+
const minutes = parseFloat(parts[0]);
|
|
112
|
+
const seconds = parseFloat(parts[1]);
|
|
113
|
+
return minutes * 60 + seconds;
|
|
114
|
+
}
|
|
115
|
+
return NaN;
|
|
116
|
+
}
|
|
117
|
+
export function stringifySRT(cues) {
|
|
118
|
+
if (!cues || cues.length === 0)
|
|
119
|
+
return "";
|
|
120
|
+
return cues
|
|
121
|
+
.map((cue, index) => {
|
|
122
|
+
const start = formatTime(cue.startTime);
|
|
123
|
+
const end = formatTime(cue.endTime);
|
|
124
|
+
const id = cue.id || String(index + 1);
|
|
125
|
+
return `${id}\n${start} --> ${end}\n${cue.text}\n\n`;
|
|
126
|
+
})
|
|
127
|
+
.join("");
|
|
128
|
+
}
|
|
129
|
+
function formatTime(seconds) {
|
|
130
|
+
const totalMs = Math.round(seconds * 1000);
|
|
131
|
+
const hours = Math.floor(totalMs / 3600000);
|
|
132
|
+
const minutes = Math.floor((totalMs % 3600000) / 60000);
|
|
133
|
+
const secs = Math.floor((totalMs % 60000) / 1000);
|
|
134
|
+
const ms = totalMs % 1000;
|
|
135
|
+
const hh = String(hours).padStart(2, "0");
|
|
136
|
+
const mm = String(minutes).padStart(2, "0");
|
|
137
|
+
const ss = String(secs).padStart(2, "0");
|
|
138
|
+
const mmm = String(ms).padStart(3, "0");
|
|
139
|
+
return `${hh}:${mm}:${ss},${mmm}`;
|
|
140
|
+
}
|