@mediafox/core 1.2.9 → 1.2.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/compositor-worker.js +1 -227
- package/package.json +5 -4
- package/src/compositor/audio-manager.ts +411 -0
- package/src/compositor/compositor-worker.ts +158 -0
- package/src/compositor/compositor.ts +931 -0
- package/src/compositor/index.ts +19 -0
- package/src/compositor/source-pool.ts +450 -0
- package/src/compositor/types.ts +103 -0
- package/src/compositor/worker-client.ts +139 -0
- package/src/compositor/worker-types.ts +67 -0
- package/src/core/player-core.ts +273 -0
- package/src/core/state-facade.ts +98 -0
- package/src/core/track-switcher.ts +127 -0
- package/src/events/emitter.ts +137 -0
- package/src/events/types.ts +24 -0
- package/src/index.ts +124 -0
- package/src/mediafox.ts +642 -0
- package/src/playback/audio.ts +361 -0
- package/src/playback/controller.ts +446 -0
- package/src/playback/renderer.ts +1176 -0
- package/src/playback/renderers/canvas2d.ts +128 -0
- package/src/playback/renderers/factory.ts +172 -0
- package/src/playback/renderers/index.ts +5 -0
- package/src/playback/renderers/types.ts +57 -0
- package/src/playback/renderers/webgl.ts +373 -0
- package/src/playback/renderers/webgpu.ts +395 -0
- package/src/playlist/manager.ts +268 -0
- package/src/plugins/context.ts +93 -0
- package/src/plugins/index.ts +15 -0
- package/src/plugins/manager.ts +482 -0
- package/src/plugins/types.ts +243 -0
- package/src/sources/manager.ts +285 -0
- package/src/sources/source.ts +84 -0
- package/src/sources/types.ts +17 -0
- package/src/state/store.ts +389 -0
- package/src/state/types.ts +18 -0
- package/src/tracks/manager.ts +421 -0
- package/src/tracks/types.ts +30 -0
- package/src/types/jassub.d.ts +1 -0
- package/src/types.ts +235 -0
- package/src/utils/async-lock.ts +26 -0
- package/src/utils/dispose.ts +28 -0
- package/src/utils/equal.ts +33 -0
- package/src/utils/errors.ts +74 -0
- package/src/utils/logger.ts +50 -0
- package/src/utils/time.ts +157 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@mediafox/core",
|
|
3
|
-
"version": "1.2.
|
|
3
|
+
"version": "1.2.10",
|
|
4
4
|
"description": "Framework-agnostic media player library powered by MediaBunny",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -12,16 +12,17 @@
|
|
|
12
12
|
"import": "./dist/index.js"
|
|
13
13
|
},
|
|
14
14
|
"./compositor-worker": {
|
|
15
|
-
"import": "./
|
|
15
|
+
"import": "./src/compositor/compositor-worker.ts"
|
|
16
16
|
}
|
|
17
17
|
},
|
|
18
18
|
"files": [
|
|
19
|
-
"dist"
|
|
19
|
+
"dist",
|
|
20
|
+
"src"
|
|
20
21
|
],
|
|
21
22
|
"scripts": {
|
|
22
23
|
"dev": "bun run --watch src/index.ts",
|
|
23
24
|
"build": "bun run build:js && bun run build:types",
|
|
24
|
-
"build:js": "bun build src/index.ts --outdir=dist --target=browser --format=esm --minify --external mediabunny && bun build src/compositor/compositor-worker.ts --outdir=dist --target=browser --format=esm --minify",
|
|
25
|
+
"build:js": "bun build src/index.ts --outdir=dist --target=browser --format=esm --minify --external mediabunny && bun build src/compositor/compositor-worker.ts --outdir=dist --target=browser --format=esm --minify --external mediabunny",
|
|
25
26
|
"build:types": "tsc --emitDeclarationOnly",
|
|
26
27
|
"test": "bun test",
|
|
27
28
|
"lint": "biome lint ./src",
|
|
@@ -0,0 +1,411 @@
|
|
|
1
|
+
import type { AudioBufferSink, WrappedAudioBuffer } from 'mediabunny';
|
|
2
|
+
import type { AudioLayer, CompositorSource } from './types';
|
|
3
|
+
|
|
4
|
+
interface ActiveAudioSource {
|
|
5
|
+
sourceId: string;
|
|
6
|
+
bufferSink: AudioBufferSink;
|
|
7
|
+
iterator: AsyncGenerator<WrappedAudioBuffer, void, unknown> | null;
|
|
8
|
+
gainNode: GainNode;
|
|
9
|
+
panNode: StereoPannerNode;
|
|
10
|
+
queuedNodes: Set<AudioBufferSourceNode>;
|
|
11
|
+
volume: number;
|
|
12
|
+
pan: number;
|
|
13
|
+
muted: boolean;
|
|
14
|
+
startSourceTime: number; // Where in the source we started playing (fixed at playback start)
|
|
15
|
+
currentSourceTime: number; // Current expected position (updated each frame for drift detection)
|
|
16
|
+
iteratorStartTime: number; // AudioContext time when iterator was started
|
|
17
|
+
lastScheduledTime: number;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export interface CompositorAudioManagerOptions {
|
|
21
|
+
audioContext?: AudioContext;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Audio manager for the compositor that handles multiple concurrent audio sources.
|
|
26
|
+
* Each audio layer can have independent volume, pan, and mute controls.
|
|
27
|
+
*/
|
|
28
|
+
export class CompositorAudioManager {
|
|
29
|
+
private audioContext: AudioContext;
|
|
30
|
+
private masterGain: GainNode;
|
|
31
|
+
private activeSources = new Map<string, ActiveAudioSource>();
|
|
32
|
+
private activeSourceIdsScratch = new Set<string>();
|
|
33
|
+
private playing = false;
|
|
34
|
+
private disposed = false;
|
|
35
|
+
private playbackId = 0;
|
|
36
|
+
private startContextTime = 0;
|
|
37
|
+
private startMediaTime = 0;
|
|
38
|
+
private pauseTime = 0;
|
|
39
|
+
private masterVolume = 1;
|
|
40
|
+
private masterMuted = false;
|
|
41
|
+
|
|
42
|
+
constructor(options: CompositorAudioManagerOptions = {}) {
|
|
43
|
+
if (options.audioContext) {
|
|
44
|
+
this.audioContext = options.audioContext;
|
|
45
|
+
} else {
|
|
46
|
+
const windowGlobal = globalThis as typeof globalThis & {
|
|
47
|
+
webkitAudioContext?: typeof AudioContext;
|
|
48
|
+
};
|
|
49
|
+
const AudioContextClass = windowGlobal.AudioContext || windowGlobal.webkitAudioContext;
|
|
50
|
+
if (!AudioContextClass) {
|
|
51
|
+
throw new Error('AudioContext is not supported in this environment');
|
|
52
|
+
}
|
|
53
|
+
this.audioContext = new AudioContextClass();
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
this.masterGain = this.audioContext.createGain();
|
|
57
|
+
this.masterGain.connect(this.audioContext.destination);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Registers an audio source that can be used in audio layers.
|
|
62
|
+
* Must be called before using the source in processAudioLayers.
|
|
63
|
+
*/
|
|
64
|
+
registerSource(source: CompositorSource, bufferSink: AudioBufferSink): void {
|
|
65
|
+
if (this.disposed) return;
|
|
66
|
+
|
|
67
|
+
// Create audio nodes for this source
|
|
68
|
+
const gainNode = this.audioContext.createGain();
|
|
69
|
+
const panNode = this.audioContext.createStereoPanner();
|
|
70
|
+
|
|
71
|
+
gainNode.connect(panNode);
|
|
72
|
+
panNode.connect(this.masterGain);
|
|
73
|
+
|
|
74
|
+
this.activeSources.set(source.id, {
|
|
75
|
+
sourceId: source.id,
|
|
76
|
+
bufferSink,
|
|
77
|
+
iterator: null,
|
|
78
|
+
gainNode,
|
|
79
|
+
panNode,
|
|
80
|
+
queuedNodes: new Set(),
|
|
81
|
+
volume: 1,
|
|
82
|
+
pan: 0,
|
|
83
|
+
muted: false,
|
|
84
|
+
startSourceTime: 0,
|
|
85
|
+
currentSourceTime: 0,
|
|
86
|
+
iteratorStartTime: 0,
|
|
87
|
+
lastScheduledTime: 0,
|
|
88
|
+
});
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Unregisters an audio source.
|
|
93
|
+
*/
|
|
94
|
+
unregisterSource(sourceId: string): void {
|
|
95
|
+
const source = this.activeSources.get(sourceId);
|
|
96
|
+
if (!source) return;
|
|
97
|
+
|
|
98
|
+
this.stopSourceAudio(source);
|
|
99
|
+
source.gainNode.disconnect();
|
|
100
|
+
source.panNode.disconnect();
|
|
101
|
+
this.activeSources.delete(sourceId);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
/**
|
|
105
|
+
* Checks if a source is registered.
|
|
106
|
+
*/
|
|
107
|
+
hasSource(sourceId: string): boolean {
|
|
108
|
+
return this.activeSources.has(sourceId);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
/**
|
|
112
|
+
* Processes audio layers for the current frame.
|
|
113
|
+
* Updates which sources are playing and their parameters.
|
|
114
|
+
*/
|
|
115
|
+
processAudioLayers(layers: AudioLayer[], mediaTime: number): void {
|
|
116
|
+
if (this.disposed || !this.playing) return;
|
|
117
|
+
|
|
118
|
+
// Track which sources are active in this frame
|
|
119
|
+
const activeSourceIds = this.activeSourceIdsScratch;
|
|
120
|
+
activeSourceIds.clear();
|
|
121
|
+
|
|
122
|
+
for (const layer of layers) {
|
|
123
|
+
const sourceId = layer.source.id;
|
|
124
|
+
activeSourceIds.add(sourceId);
|
|
125
|
+
|
|
126
|
+
const source = this.activeSources.get(sourceId);
|
|
127
|
+
if (!source) continue;
|
|
128
|
+
|
|
129
|
+
// Update source parameters
|
|
130
|
+
const volume = layer.volume ?? 1;
|
|
131
|
+
const pan = layer.pan ?? 0;
|
|
132
|
+
const muted = layer.muted ?? false;
|
|
133
|
+
const sourceTime = layer.sourceTime ?? mediaTime;
|
|
134
|
+
|
|
135
|
+
// Update gain and pan if changed
|
|
136
|
+
if (source.volume !== volume || source.muted !== muted) {
|
|
137
|
+
source.volume = volume;
|
|
138
|
+
source.muted = muted;
|
|
139
|
+
source.gainNode.gain.value = muted ? 0 : volume * volume;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
if (source.pan !== pan) {
|
|
143
|
+
source.pan = pan;
|
|
144
|
+
source.panNode.pan.value = Math.max(-1, Math.min(1, pan));
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
// Check if we need to restart from a different source time (seek detected)
|
|
148
|
+
const timeDrift = Math.abs(sourceTime - source.currentSourceTime);
|
|
149
|
+
if (timeDrift > 0.5 && source.iterator !== null) {
|
|
150
|
+
// Source time changed significantly, restart iterator
|
|
151
|
+
this.restartSourceIterator(source, sourceTime);
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
// Update current position for drift detection (not used in scheduling)
|
|
155
|
+
source.currentSourceTime = sourceTime;
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// Stop sources that are no longer in the layers
|
|
159
|
+
for (const [sourceId, source] of this.activeSources) {
|
|
160
|
+
if (!activeSourceIds.has(sourceId) && source.iterator !== null) {
|
|
161
|
+
this.stopSourceAudio(source);
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
/**
|
|
167
|
+
* Starts audio playback from the specified time.
|
|
168
|
+
*/
|
|
169
|
+
async play(fromTime: number = this.pauseTime): Promise<void> {
|
|
170
|
+
if (this.playing || this.disposed) return;
|
|
171
|
+
|
|
172
|
+
// Resume audio context if suspended
|
|
173
|
+
if (this.audioContext.state === 'suspended') {
|
|
174
|
+
await this.audioContext.resume();
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
this.playbackId++;
|
|
178
|
+
this.playing = true;
|
|
179
|
+
this.startContextTime = this.audioContext.currentTime;
|
|
180
|
+
this.startMediaTime = fromTime;
|
|
181
|
+
this.pauseTime = fromTime;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
/**
|
|
185
|
+
* Starts scheduling audio for a specific layer.
|
|
186
|
+
* Should be called after processAudioLayers when a new source becomes active.
|
|
187
|
+
*/
|
|
188
|
+
startSourcePlayback(sourceId: string, sourceTime: number): void {
|
|
189
|
+
const source = this.activeSources.get(sourceId);
|
|
190
|
+
if (!source || source.iterator !== null) return;
|
|
191
|
+
|
|
192
|
+
this.restartSourceIterator(source, sourceTime);
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
private restartSourceIterator(source: ActiveAudioSource, sourceTime: number): void {
|
|
196
|
+
// Stop existing audio
|
|
197
|
+
this.stopSourceAudio(source);
|
|
198
|
+
|
|
199
|
+
// Store the source time where we're starting from (used for scheduling)
|
|
200
|
+
source.startSourceTime = sourceTime;
|
|
201
|
+
source.currentSourceTime = sourceTime;
|
|
202
|
+
source.iteratorStartTime = this.audioContext.currentTime;
|
|
203
|
+
|
|
204
|
+
// Start new iterator from the source time
|
|
205
|
+
source.iterator = source.bufferSink.buffers(sourceTime);
|
|
206
|
+
source.lastScheduledTime = sourceTime;
|
|
207
|
+
|
|
208
|
+
// Schedule audio buffers
|
|
209
|
+
this.scheduleSourceBuffers(source, this.playbackId);
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
private async scheduleSourceBuffers(source: ActiveAudioSource, playbackId: number): Promise<void> {
|
|
213
|
+
const iterator = source.iterator;
|
|
214
|
+
if (!iterator) return;
|
|
215
|
+
|
|
216
|
+
try {
|
|
217
|
+
for await (const { buffer, timestamp } of iterator) {
|
|
218
|
+
if (playbackId !== this.playbackId || this.disposed || !this.playing) {
|
|
219
|
+
break;
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
const node = this.audioContext.createBufferSource();
|
|
223
|
+
node.buffer = buffer;
|
|
224
|
+
node.connect(source.gainNode);
|
|
225
|
+
|
|
226
|
+
// Calculate when to play this buffer
|
|
227
|
+
// timestamp is the buffer's position in the source
|
|
228
|
+
// startSourceTime is where we started playing from in the source
|
|
229
|
+
// The offset from the start is: timestamp - startSourceTime
|
|
230
|
+
// Schedule at: iteratorStartTime + offset
|
|
231
|
+
const offsetFromStart = timestamp - source.startSourceTime;
|
|
232
|
+
const scheduledContextTime = source.iteratorStartTime + offsetFromStart;
|
|
233
|
+
|
|
234
|
+
if (scheduledContextTime >= this.audioContext.currentTime) {
|
|
235
|
+
node.start(scheduledContextTime);
|
|
236
|
+
} else {
|
|
237
|
+
const elapsed = this.audioContext.currentTime - scheduledContextTime;
|
|
238
|
+
if (elapsed < buffer.duration) {
|
|
239
|
+
node.start(this.audioContext.currentTime, elapsed);
|
|
240
|
+
} else {
|
|
241
|
+
continue;
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
source.queuedNodes.add(node);
|
|
246
|
+
node.onended = () => {
|
|
247
|
+
source.queuedNodes.delete(node);
|
|
248
|
+
};
|
|
249
|
+
|
|
250
|
+
source.lastScheduledTime = timestamp;
|
|
251
|
+
|
|
252
|
+
// Throttle if we're too far ahead (more than 1 second of audio buffered)
|
|
253
|
+
const elapsedSinceStart = this.audioContext.currentTime - source.iteratorStartTime;
|
|
254
|
+
const bufferedAhead = (timestamp - source.startSourceTime) - elapsedSinceStart;
|
|
255
|
+
if (bufferedAhead > 1) {
|
|
256
|
+
await this.waitForCatchup(source, timestamp);
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
} catch {
|
|
260
|
+
// Iterator was closed or disposed
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
private async waitForCatchup(source: ActiveAudioSource, targetSourceTime: number): Promise<void> {
|
|
265
|
+
return new Promise((resolve) => {
|
|
266
|
+
const checkInterval = setInterval(() => {
|
|
267
|
+
if (!this.playing || this.disposed) {
|
|
268
|
+
clearInterval(checkInterval);
|
|
269
|
+
resolve();
|
|
270
|
+
return;
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
// Calculate how far ahead we've buffered
|
|
274
|
+
const elapsedSinceStart = this.audioContext.currentTime - source.iteratorStartTime;
|
|
275
|
+
const bufferedAhead = (targetSourceTime - source.startSourceTime) - elapsedSinceStart;
|
|
276
|
+
if (bufferedAhead < 1) {
|
|
277
|
+
clearInterval(checkInterval);
|
|
278
|
+
resolve();
|
|
279
|
+
}
|
|
280
|
+
}, 100);
|
|
281
|
+
});
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
/**
|
|
285
|
+
* Pauses audio playback.
|
|
286
|
+
*/
|
|
287
|
+
pause(): void {
|
|
288
|
+
if (!this.playing) return;
|
|
289
|
+
|
|
290
|
+
this.pauseTime = this.getCurrentTime();
|
|
291
|
+
this.playing = false;
|
|
292
|
+
|
|
293
|
+
// Stop all source audio
|
|
294
|
+
for (const source of this.activeSources.values()) {
|
|
295
|
+
this.stopSourceAudio(source);
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
/**
|
|
300
|
+
* Stops audio playback and resets to beginning.
|
|
301
|
+
*/
|
|
302
|
+
stop(): void {
|
|
303
|
+
this.pause();
|
|
304
|
+
this.pauseTime = 0;
|
|
305
|
+
this.startContextTime = 0;
|
|
306
|
+
this.startMediaTime = 0;
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
/**
|
|
310
|
+
* Seeks to a specific time.
|
|
311
|
+
*/
|
|
312
|
+
async seek(timestamp: number): Promise<void> {
|
|
313
|
+
const wasPlaying = this.playing;
|
|
314
|
+
|
|
315
|
+
this.pause();
|
|
316
|
+
this.pauseTime = timestamp;
|
|
317
|
+
this.startMediaTime = timestamp;
|
|
318
|
+
|
|
319
|
+
if (wasPlaying) {
|
|
320
|
+
await this.play(timestamp);
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
private stopSourceAudio(source: ActiveAudioSource): void {
|
|
325
|
+
// Stop all queued nodes
|
|
326
|
+
for (const node of source.queuedNodes) {
|
|
327
|
+
try {
|
|
328
|
+
node.stop();
|
|
329
|
+
} catch {
|
|
330
|
+
// Node might have already ended
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
source.queuedNodes.clear();
|
|
334
|
+
|
|
335
|
+
// Stop iterator
|
|
336
|
+
if (source.iterator) {
|
|
337
|
+
void source.iterator.return();
|
|
338
|
+
source.iterator = null;
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
/**
|
|
343
|
+
* Gets the current playback time.
|
|
344
|
+
*/
|
|
345
|
+
getCurrentTime(): number {
|
|
346
|
+
if (this.playing) {
|
|
347
|
+
return this.startMediaTime + (this.audioContext.currentTime - this.startContextTime);
|
|
348
|
+
}
|
|
349
|
+
return this.pauseTime;
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
/**
|
|
353
|
+
* Sets the master volume (0-1).
|
|
354
|
+
*/
|
|
355
|
+
setMasterVolume(volume: number): void {
|
|
356
|
+
this.masterVolume = Math.max(0, Math.min(1, volume));
|
|
357
|
+
this.updateMasterGain();
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
/**
|
|
361
|
+
* Sets the master mute state.
|
|
362
|
+
*/
|
|
363
|
+
setMasterMuted(muted: boolean): void {
|
|
364
|
+
this.masterMuted = muted;
|
|
365
|
+
this.updateMasterGain();
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
private updateMasterGain(): void {
|
|
369
|
+
const actualVolume = this.masterMuted ? 0 : this.masterVolume;
|
|
370
|
+
this.masterGain.gain.value = actualVolume * actualVolume;
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
/**
|
|
374
|
+
* Gets the audio context.
|
|
375
|
+
*/
|
|
376
|
+
getAudioContext(): AudioContext {
|
|
377
|
+
return this.audioContext;
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
/**
|
|
381
|
+
* Checks if currently playing.
|
|
382
|
+
*/
|
|
383
|
+
isPlaying(): boolean {
|
|
384
|
+
return this.playing;
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
/**
|
|
388
|
+
* Disposes the audio manager and releases resources.
|
|
389
|
+
*/
|
|
390
|
+
dispose(): void {
|
|
391
|
+
if (this.disposed) return;
|
|
392
|
+
this.disposed = true;
|
|
393
|
+
|
|
394
|
+
this.playbackId++;
|
|
395
|
+
this.stop();
|
|
396
|
+
|
|
397
|
+
// Clean up all sources
|
|
398
|
+
for (const source of this.activeSources.values()) {
|
|
399
|
+
this.stopSourceAudio(source);
|
|
400
|
+
source.gainNode.disconnect();
|
|
401
|
+
source.panNode.disconnect();
|
|
402
|
+
}
|
|
403
|
+
this.activeSources.clear();
|
|
404
|
+
|
|
405
|
+
this.masterGain.disconnect();
|
|
406
|
+
|
|
407
|
+
if (this.audioContext.state !== 'closed') {
|
|
408
|
+
void this.audioContext.close();
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
}
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
import { Compositor } from './compositor';
|
|
2
|
+
import type { CompositorLayer, CompositionFrame, CompositorSource } from './types';
|
|
3
|
+
import type {
|
|
4
|
+
CompositorWorkerExportPayload,
|
|
5
|
+
CompositorWorkerFrame,
|
|
6
|
+
CompositorWorkerInitPayload,
|
|
7
|
+
CompositorWorkerLoadPayload,
|
|
8
|
+
CompositorWorkerRenderPayload,
|
|
9
|
+
CompositorWorkerResizePayload,
|
|
10
|
+
CompositorWorkerSourceInfo,
|
|
11
|
+
CompositorWorkerUnloadPayload,
|
|
12
|
+
CompositorWorkerRequest,
|
|
13
|
+
CompositorWorkerResponse,
|
|
14
|
+
} from './worker-types';
|
|
15
|
+
|
|
16
|
+
type WorkerScope = {
|
|
17
|
+
postMessage: (message: unknown, transfer?: Transferable[]) => void;
|
|
18
|
+
onmessage: ((event: MessageEvent<CompositorWorkerRequest>) => void) | null;
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
const workerScope = self as unknown as WorkerScope;
|
|
22
|
+
|
|
23
|
+
let compositor: Compositor | null = null;
|
|
24
|
+
let canvas: OffscreenCanvas | null = null;
|
|
25
|
+
|
|
26
|
+
const buildSourceInfo = (source: CompositorSource): CompositorWorkerSourceInfo => ({
|
|
27
|
+
id: source.id,
|
|
28
|
+
type: source.type,
|
|
29
|
+
duration: source.duration,
|
|
30
|
+
width: source.width,
|
|
31
|
+
height: source.height,
|
|
32
|
+
hasAudio: source.type === 'audio' ? true : source.hasAudio ? source.hasAudio() : false,
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
const mapFrame = (frame: CompositorWorkerFrame): CompositionFrame => {
|
|
36
|
+
if (!compositor) {
|
|
37
|
+
throw new Error('Compositor not initialized');
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const layers = new Array<CompositorLayer>(frame.layers.length);
|
|
41
|
+
for (let i = 0; i < frame.layers.length; i++) {
|
|
42
|
+
const layer = frame.layers[i];
|
|
43
|
+
const source = compositor.getSource(layer.sourceId);
|
|
44
|
+
if (!source) {
|
|
45
|
+
throw new Error(`Unknown source: ${layer.sourceId}`);
|
|
46
|
+
}
|
|
47
|
+
layers[i] = {
|
|
48
|
+
source,
|
|
49
|
+
sourceTime: layer.sourceTime,
|
|
50
|
+
transform: layer.transform,
|
|
51
|
+
visible: layer.visible,
|
|
52
|
+
zIndex: layer.zIndex,
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
return { time: frame.time, layers };
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
const postResponse = (response: CompositorWorkerResponse, transfer?: Transferable[]): void => {
|
|
60
|
+
workerScope.postMessage(response, transfer ?? []);
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
workerScope.onmessage = async (event: MessageEvent<CompositorWorkerRequest>) => {
|
|
64
|
+
const { id, kind, payload } = event.data;
|
|
65
|
+
|
|
66
|
+
try {
|
|
67
|
+
switch (kind) {
|
|
68
|
+
case 'init': {
|
|
69
|
+
const init = payload as CompositorWorkerInitPayload;
|
|
70
|
+
canvas = init.canvas;
|
|
71
|
+
compositor = new Compositor({
|
|
72
|
+
canvas,
|
|
73
|
+
width: init.width,
|
|
74
|
+
height: init.height,
|
|
75
|
+
backgroundColor: init.backgroundColor,
|
|
76
|
+
enableAudio: false,
|
|
77
|
+
});
|
|
78
|
+
postResponse({ id, ok: true });
|
|
79
|
+
return;
|
|
80
|
+
}
|
|
81
|
+
case 'loadSource': {
|
|
82
|
+
const { source, options } = payload as CompositorWorkerLoadPayload;
|
|
83
|
+
if (!compositor) throw new Error('Compositor not initialized');
|
|
84
|
+
const loaded = await compositor.loadSource(source, options);
|
|
85
|
+
postResponse({ id, ok: true, result: buildSourceInfo(loaded) });
|
|
86
|
+
return;
|
|
87
|
+
}
|
|
88
|
+
case 'loadImage': {
|
|
89
|
+
const { source } = payload as CompositorWorkerLoadPayload;
|
|
90
|
+
if (!compositor) throw new Error('Compositor not initialized');
|
|
91
|
+
const loaded = await compositor.loadImage(source as string | Blob | File);
|
|
92
|
+
postResponse({ id, ok: true, result: buildSourceInfo(loaded) });
|
|
93
|
+
return;
|
|
94
|
+
}
|
|
95
|
+
case 'loadAudio': {
|
|
96
|
+
const { source, options } = payload as CompositorWorkerLoadPayload;
|
|
97
|
+
if (!compositor) throw new Error('Compositor not initialized');
|
|
98
|
+
const loaded = await compositor.loadAudio(source, options);
|
|
99
|
+
postResponse({ id, ok: true, result: buildSourceInfo(loaded) });
|
|
100
|
+
return;
|
|
101
|
+
}
|
|
102
|
+
case 'unloadSource': {
|
|
103
|
+
if (!compositor) throw new Error('Compositor not initialized');
|
|
104
|
+
const { id: sourceId } = payload as CompositorWorkerUnloadPayload;
|
|
105
|
+
const result = compositor.unloadSource(sourceId);
|
|
106
|
+
postResponse({ id, ok: true, result });
|
|
107
|
+
return;
|
|
108
|
+
}
|
|
109
|
+
case 'render': {
|
|
110
|
+
if (!compositor) throw new Error('Compositor not initialized');
|
|
111
|
+
const { frame } = payload as CompositorWorkerRenderPayload;
|
|
112
|
+
const mappedFrame = mapFrame(frame);
|
|
113
|
+
const result = await compositor.render(mappedFrame);
|
|
114
|
+
postResponse({ id, ok: true, result });
|
|
115
|
+
return;
|
|
116
|
+
}
|
|
117
|
+
case 'clear': {
|
|
118
|
+
if (!compositor) throw new Error('Compositor not initialized');
|
|
119
|
+
compositor.clear();
|
|
120
|
+
postResponse({ id, ok: true, result: true });
|
|
121
|
+
return;
|
|
122
|
+
}
|
|
123
|
+
case 'resize': {
|
|
124
|
+
if (!compositor) throw new Error('Compositor not initialized');
|
|
125
|
+
const { width, height } = payload as CompositorWorkerResizePayload;
|
|
126
|
+
compositor.resize(width, height);
|
|
127
|
+
postResponse({ id, ok: true, result: true });
|
|
128
|
+
return;
|
|
129
|
+
}
|
|
130
|
+
case 'exportFrame': {
|
|
131
|
+
if (!compositor || !canvas) throw new Error('Compositor not initialized');
|
|
132
|
+
const { frame, options } = payload as CompositorWorkerExportPayload;
|
|
133
|
+
const mappedFrame = mapFrame(frame);
|
|
134
|
+
await compositor.render(mappedFrame);
|
|
135
|
+
const type = `image/${options?.format ?? 'png'}`;
|
|
136
|
+
const blob = await canvas.convertToBlob({
|
|
137
|
+
type,
|
|
138
|
+
quality: options?.quality,
|
|
139
|
+
});
|
|
140
|
+
postResponse({ id, ok: true, result: blob });
|
|
141
|
+
return;
|
|
142
|
+
}
|
|
143
|
+
case 'dispose': {
|
|
144
|
+
compositor?.dispose();
|
|
145
|
+
compositor = null;
|
|
146
|
+
canvas = null;
|
|
147
|
+
postResponse({ id, ok: true, result: true });
|
|
148
|
+
return;
|
|
149
|
+
}
|
|
150
|
+
default: {
|
|
151
|
+
throw new Error(`Unknown worker command: ${kind}`);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
} catch (error) {
|
|
155
|
+
const message = error instanceof Error ? error.message : 'Worker error';
|
|
156
|
+
postResponse({ id, ok: false, error: message });
|
|
157
|
+
}
|
|
158
|
+
};
|