@mediafox/core 1.2.8 → 1.2.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/compositor/compositor.d.ts.map +1 -1
- package/dist/compositor-worker.js +1 -1
- package/dist/index.js +1 -1
- package/package.json +4 -3
- package/src/compositor/audio-manager.ts +411 -0
- package/src/compositor/compositor-worker.ts +158 -0
- package/src/compositor/compositor.ts +931 -0
- package/src/compositor/index.ts +19 -0
- package/src/compositor/source-pool.ts +450 -0
- package/src/compositor/types.ts +103 -0
- package/src/compositor/worker-client.ts +139 -0
- package/src/compositor/worker-types.ts +67 -0
- package/src/core/player-core.ts +273 -0
- package/src/core/state-facade.ts +98 -0
- package/src/core/track-switcher.ts +127 -0
- package/src/events/emitter.ts +137 -0
- package/src/events/types.ts +24 -0
- package/src/index.ts +124 -0
- package/src/mediafox.ts +642 -0
- package/src/playback/audio.ts +361 -0
- package/src/playback/controller.ts +446 -0
- package/src/playback/renderer.ts +1176 -0
- package/src/playback/renderers/canvas2d.ts +128 -0
- package/src/playback/renderers/factory.ts +172 -0
- package/src/playback/renderers/index.ts +5 -0
- package/src/playback/renderers/types.ts +57 -0
- package/src/playback/renderers/webgl.ts +373 -0
- package/src/playback/renderers/webgpu.ts +395 -0
- package/src/playlist/manager.ts +268 -0
- package/src/plugins/context.ts +93 -0
- package/src/plugins/index.ts +15 -0
- package/src/plugins/manager.ts +482 -0
- package/src/plugins/types.ts +243 -0
- package/src/sources/manager.ts +285 -0
- package/src/sources/source.ts +84 -0
- package/src/sources/types.ts +17 -0
- package/src/state/store.ts +389 -0
- package/src/state/types.ts +18 -0
- package/src/tracks/manager.ts +421 -0
- package/src/tracks/types.ts +30 -0
- package/src/types/jassub.d.ts +1 -0
- package/src/types.ts +235 -0
- package/src/utils/async-lock.ts +26 -0
- package/src/utils/dispose.ts +28 -0
- package/src/utils/equal.ts +33 -0
- package/src/utils/errors.ts +74 -0
- package/src/utils/logger.ts +50 -0
- package/src/utils/time.ts +157 -0
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
export { Compositor } from './compositor';
|
|
2
|
+
export { SourcePool } from './source-pool';
|
|
3
|
+
export type {
|
|
4
|
+
AudioLayer,
|
|
5
|
+
CompositionFrame,
|
|
6
|
+
CompositionProvider,
|
|
7
|
+
CompositorEventListener,
|
|
8
|
+
CompositorEventMap,
|
|
9
|
+
CompositorLayer,
|
|
10
|
+
CompositorOptions,
|
|
11
|
+
CompositorRendererType,
|
|
12
|
+
CompositorSource,
|
|
13
|
+
CompositorSourceOptions,
|
|
14
|
+
CompositorWorkerOptions,
|
|
15
|
+
FrameExportOptions,
|
|
16
|
+
LayerTransform,
|
|
17
|
+
PreviewOptions,
|
|
18
|
+
SourceType,
|
|
19
|
+
} from './types';
|
|
@@ -0,0 +1,450 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ALL_FORMATS,
|
|
3
|
+
AudioBufferSink,
|
|
4
|
+
BlobSource,
|
|
5
|
+
BufferSource,
|
|
6
|
+
CanvasSink,
|
|
7
|
+
FilePathSource,
|
|
8
|
+
Input,
|
|
9
|
+
type InputAudioTrack,
|
|
10
|
+
type InputVideoTrack,
|
|
11
|
+
ReadableStreamSource,
|
|
12
|
+
type Source,
|
|
13
|
+
UrlSource,
|
|
14
|
+
type WrappedCanvas,
|
|
15
|
+
} from 'mediabunny';
|
|
16
|
+
import type { MediaSource } from '../types';
|
|
17
|
+
import type { CompositorSource, CompositorSourceOptions, SourceType } from './types';
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* LRU cache for video frames.
|
|
21
|
+
* Uses Map's insertion order + move-to-end on access for O(1) LRU eviction.
|
|
22
|
+
*/
|
|
23
|
+
class LRUFrameCache {
|
|
24
|
+
private cache = new Map<number, WrappedCanvas>();
|
|
25
|
+
private maxSize: number;
|
|
26
|
+
|
|
27
|
+
constructor(maxSize: number) {
|
|
28
|
+
this.maxSize = maxSize;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
get(key: number): WrappedCanvas | undefined {
|
|
32
|
+
const value = this.cache.get(key);
|
|
33
|
+
if (value !== undefined) {
|
|
34
|
+
// Move to end (most recently used)
|
|
35
|
+
this.cache.delete(key);
|
|
36
|
+
this.cache.set(key, value);
|
|
37
|
+
}
|
|
38
|
+
return value;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
set(key: number, value: WrappedCanvas): void {
|
|
42
|
+
// If key exists, delete first to update insertion order
|
|
43
|
+
if (this.cache.has(key)) {
|
|
44
|
+
this.cache.delete(key);
|
|
45
|
+
} else if (this.cache.size >= this.maxSize) {
|
|
46
|
+
// Evict least recently used (first item)
|
|
47
|
+
const firstKey = this.cache.keys().next().value;
|
|
48
|
+
if (firstKey !== undefined) {
|
|
49
|
+
this.cache.delete(firstKey);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
this.cache.set(key, value);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
clear(): void {
|
|
56
|
+
this.cache.clear();
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
get size(): number {
|
|
60
|
+
return this.cache.size;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
interface VideoSourceData {
|
|
65
|
+
input: Input<Source>;
|
|
66
|
+
videoTrack: InputVideoTrack;
|
|
67
|
+
canvasSink: CanvasSink;
|
|
68
|
+
frameCache: LRUFrameCache;
|
|
69
|
+
frameIntervalMs: number; // Frame duration in milliseconds for cache key quantization
|
|
70
|
+
audioTrack: InputAudioTrack | null;
|
|
71
|
+
audioBufferSink: AudioBufferSink | null;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
interface AudioSourceData {
|
|
75
|
+
input: Input<Source>;
|
|
76
|
+
audioTrack: InputAudioTrack;
|
|
77
|
+
audioBufferSink: AudioBufferSink;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
interface ImageSourceData {
|
|
81
|
+
image: HTMLImageElement | ImageBitmap;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
class VideoSource implements CompositorSource {
|
|
85
|
+
readonly id: string;
|
|
86
|
+
readonly type: SourceType = 'video';
|
|
87
|
+
readonly duration: number;
|
|
88
|
+
readonly width: number;
|
|
89
|
+
readonly height: number;
|
|
90
|
+
private data: VideoSourceData;
|
|
91
|
+
private disposed = false;
|
|
92
|
+
|
|
93
|
+
constructor(id: string, data: VideoSourceData, duration: number, width: number, height: number) {
|
|
94
|
+
this.id = id;
|
|
95
|
+
this.data = data;
|
|
96
|
+
this.duration = duration;
|
|
97
|
+
this.width = width;
|
|
98
|
+
this.height = height;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
async getFrameAt(time: number): Promise<CanvasImageSource | null> {
|
|
102
|
+
if (this.disposed) return null;
|
|
103
|
+
|
|
104
|
+
// Quantize to frame boundaries to maximize cache hits
|
|
105
|
+
// e.g., at 30fps (33.33ms/frame), times 0.001s and 0.030s map to same frame 0
|
|
106
|
+
const frameIntervalMs = this.data.frameIntervalMs;
|
|
107
|
+
const cacheKey = Math.floor((time * 1000) / frameIntervalMs) * frameIntervalMs;
|
|
108
|
+
|
|
109
|
+
// Check LRU cache
|
|
110
|
+
const cached = this.data.frameCache.get(cacheKey);
|
|
111
|
+
if (cached) {
|
|
112
|
+
return cached.canvas;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
try {
|
|
116
|
+
const frame = await this.data.canvasSink.getCanvas(time);
|
|
117
|
+
if (!frame) return null;
|
|
118
|
+
|
|
119
|
+
// LRU cache handles eviction automatically
|
|
120
|
+
this.data.frameCache.set(cacheKey, frame);
|
|
121
|
+
|
|
122
|
+
return frame.canvas;
|
|
123
|
+
} catch {
|
|
124
|
+
return null;
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
/**
|
|
129
|
+
* Returns the AudioBufferSink for this video source, or null if the video has no audio.
|
|
130
|
+
*/
|
|
131
|
+
getAudioBufferSink(): AudioBufferSink | null {
|
|
132
|
+
return this.data.audioBufferSink;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
/**
|
|
136
|
+
* Returns true if this video source has an audio track.
|
|
137
|
+
*/
|
|
138
|
+
hasAudio(): boolean {
|
|
139
|
+
return this.data.audioBufferSink !== null;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
clearCache(): void {
|
|
143
|
+
this.data.frameCache.clear();
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
dispose(): void {
|
|
147
|
+
if (this.disposed) return;
|
|
148
|
+
this.disposed = true;
|
|
149
|
+
this.data.frameCache.clear();
|
|
150
|
+
this.data.input.dispose();
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
class ImageSource implements CompositorSource {
|
|
155
|
+
readonly id: string;
|
|
156
|
+
readonly type: SourceType = 'image';
|
|
157
|
+
readonly duration = Infinity; // Images have infinite duration
|
|
158
|
+
readonly width: number;
|
|
159
|
+
readonly height: number;
|
|
160
|
+
private data: ImageSourceData;
|
|
161
|
+
private disposed = false;
|
|
162
|
+
|
|
163
|
+
constructor(id: string, data: ImageSourceData) {
|
|
164
|
+
this.id = id;
|
|
165
|
+
this.data = data;
|
|
166
|
+
this.width = data.image.width;
|
|
167
|
+
this.height = data.image.height;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
async getFrameAt(_time: number): Promise<CanvasImageSource | null> {
|
|
171
|
+
if (this.disposed) return null;
|
|
172
|
+
return this.data.image;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
dispose(): void {
|
|
176
|
+
if (this.disposed) return;
|
|
177
|
+
this.disposed = true;
|
|
178
|
+
if ('close' in this.data.image) {
|
|
179
|
+
(this.data.image as ImageBitmap).close();
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
class AudioOnlySource implements CompositorSource {
|
|
185
|
+
readonly id: string;
|
|
186
|
+
readonly type: SourceType = 'audio';
|
|
187
|
+
readonly duration: number;
|
|
188
|
+
readonly width = 0;
|
|
189
|
+
readonly height = 0;
|
|
190
|
+
private data: AudioSourceData;
|
|
191
|
+
private disposed = false;
|
|
192
|
+
|
|
193
|
+
constructor(id: string, data: AudioSourceData, duration: number) {
|
|
194
|
+
this.id = id;
|
|
195
|
+
this.data = data;
|
|
196
|
+
this.duration = duration;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
async getFrameAt(_time: number): Promise<CanvasImageSource | null> {
|
|
200
|
+
return null; // Audio sources don't have frames
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
/**
|
|
204
|
+
* Returns the AudioBufferSink for this audio source.
|
|
205
|
+
*/
|
|
206
|
+
getAudioBufferSink(): AudioBufferSink {
|
|
207
|
+
return this.data.audioBufferSink;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
dispose(): void {
|
|
211
|
+
if (this.disposed) return;
|
|
212
|
+
this.disposed = true;
|
|
213
|
+
this.data.input.dispose();
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
export class SourcePool {
|
|
218
|
+
private sources = new Map<string, CompositorSource>();
|
|
219
|
+
private audioContext: AudioContext | null = null;
|
|
220
|
+
private nextId = 0;
|
|
221
|
+
|
|
222
|
+
constructor(audioContext?: AudioContext) {
|
|
223
|
+
this.audioContext = audioContext ?? null;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
private generateId(): string {
|
|
227
|
+
return `source_${this.nextId++}`;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
async loadVideo(source: MediaSource, options: CompositorSourceOptions = {}): Promise<CompositorSource> {
|
|
231
|
+
const id = options.id ?? this.generateId();
|
|
232
|
+
|
|
233
|
+
// Create input from source
|
|
234
|
+
const input = this.createInput(source);
|
|
235
|
+
|
|
236
|
+
// Get video tracks
|
|
237
|
+
const videoTracks = await input.getVideoTracks();
|
|
238
|
+
if (videoTracks.length === 0) {
|
|
239
|
+
input.dispose();
|
|
240
|
+
throw new Error('Source has no video track');
|
|
241
|
+
}
|
|
242
|
+
const videoTrack = videoTracks[0];
|
|
243
|
+
|
|
244
|
+
// Check if we can decode
|
|
245
|
+
const canDecode = await videoTrack.canDecode();
|
|
246
|
+
if (!canDecode) {
|
|
247
|
+
input.dispose();
|
|
248
|
+
throw new Error(`Cannot decode video track with codec: ${videoTrack.codec}`);
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
// Create canvas sink for frame extraction with larger pool for smoother playback
|
|
252
|
+
const canvasSink = new CanvasSink(videoTrack, {
|
|
253
|
+
poolSize: 4,
|
|
254
|
+
});
|
|
255
|
+
|
|
256
|
+
// Get duration
|
|
257
|
+
const duration = await videoTrack.computeDuration();
|
|
258
|
+
|
|
259
|
+
// Calculate frame interval from framerate for cache key quantization
|
|
260
|
+
// Fallback to 30fps (33.33ms) if framerate unavailable
|
|
261
|
+
let fps = 30;
|
|
262
|
+
try {
|
|
263
|
+
const stats = await videoTrack.computePacketStats(100);
|
|
264
|
+
if (stats.averagePacketRate > 0) {
|
|
265
|
+
fps = stats.averagePacketRate;
|
|
266
|
+
}
|
|
267
|
+
} catch {
|
|
268
|
+
// Ignore errors in stats computation
|
|
269
|
+
}
|
|
270
|
+
const frameIntervalMs = 1000 / fps;
|
|
271
|
+
|
|
272
|
+
// Adaptive cache size based on resolution
|
|
273
|
+
// Higher resolution = fewer cached frames to limit memory usage
|
|
274
|
+
const pixelCount = videoTrack.displayWidth * videoTrack.displayHeight;
|
|
275
|
+
const cacheSize = pixelCount > 2073600 ? 15 : pixelCount > 921600 ? 30 : 60; // 1080p: 15, 720p: 30, smaller: 60
|
|
276
|
+
|
|
277
|
+
// Try to get audio track if available
|
|
278
|
+
let audioTrack: InputAudioTrack | null = null;
|
|
279
|
+
let audioBufferSink: AudioBufferSink | null = null;
|
|
280
|
+
try {
|
|
281
|
+
const audioTracks = await input.getAudioTracks();
|
|
282
|
+
if (audioTracks.length > 0) {
|
|
283
|
+
audioTrack = audioTracks[0];
|
|
284
|
+
const canDecodeAudio = await audioTrack.canDecode();
|
|
285
|
+
if (canDecodeAudio) {
|
|
286
|
+
audioBufferSink = new AudioBufferSink(audioTrack);
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
} catch {
|
|
290
|
+
// No audio track or can't decode - continue without audio
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
const videoSource = new VideoSource(
|
|
294
|
+
id,
|
|
295
|
+
{
|
|
296
|
+
input,
|
|
297
|
+
videoTrack,
|
|
298
|
+
canvasSink,
|
|
299
|
+
frameCache: new LRUFrameCache(cacheSize),
|
|
300
|
+
frameIntervalMs,
|
|
301
|
+
audioTrack,
|
|
302
|
+
audioBufferSink,
|
|
303
|
+
},
|
|
304
|
+
duration,
|
|
305
|
+
videoTrack.displayWidth,
|
|
306
|
+
videoTrack.displayHeight
|
|
307
|
+
);
|
|
308
|
+
|
|
309
|
+
this.sources.set(id, videoSource);
|
|
310
|
+
return videoSource;
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
async loadImage(source: string | Blob | File): Promise<CompositorSource> {
|
|
314
|
+
const id = this.generateId();
|
|
315
|
+
|
|
316
|
+
let image: HTMLImageElement | ImageBitmap;
|
|
317
|
+
|
|
318
|
+
if (typeof source !== 'string') {
|
|
319
|
+
// Blob or File
|
|
320
|
+
image = await createImageBitmap(source);
|
|
321
|
+
} else {
|
|
322
|
+
if (typeof Image === 'undefined') {
|
|
323
|
+
// Worker context: fetch + createImageBitmap
|
|
324
|
+
const response = await fetch(source);
|
|
325
|
+
if (!response.ok) {
|
|
326
|
+
throw new Error(`Failed to load image: ${source}`);
|
|
327
|
+
}
|
|
328
|
+
const blob = await response.blob();
|
|
329
|
+
image = await createImageBitmap(blob);
|
|
330
|
+
} else {
|
|
331
|
+
// URL string in window context
|
|
332
|
+
image = await new Promise<HTMLImageElement>((resolve, reject) => {
|
|
333
|
+
const img = new Image();
|
|
334
|
+
img.onload = () => resolve(img);
|
|
335
|
+
img.onerror = () => reject(new Error(`Failed to load image: ${source}`));
|
|
336
|
+
img.crossOrigin = 'anonymous';
|
|
337
|
+
img.src = source;
|
|
338
|
+
});
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
const imageSource = new ImageSource(id, { image });
|
|
343
|
+
this.sources.set(id, imageSource);
|
|
344
|
+
return imageSource;
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
async loadAudio(source: MediaSource, options: CompositorSourceOptions = {}): Promise<CompositorSource> {
|
|
348
|
+
const id = options.id ?? this.generateId();
|
|
349
|
+
|
|
350
|
+
// Create input from source
|
|
351
|
+
const input = this.createInput(source);
|
|
352
|
+
|
|
353
|
+
// Get audio tracks
|
|
354
|
+
const audioTracks = await input.getAudioTracks();
|
|
355
|
+
if (audioTracks.length === 0) {
|
|
356
|
+
input.dispose();
|
|
357
|
+
throw new Error('Source has no audio track');
|
|
358
|
+
}
|
|
359
|
+
const audioTrack = audioTracks[0];
|
|
360
|
+
|
|
361
|
+
// Check if we can decode
|
|
362
|
+
const canDecode = await audioTrack.canDecode();
|
|
363
|
+
if (!canDecode) {
|
|
364
|
+
input.dispose();
|
|
365
|
+
throw new Error(`Cannot decode audio track with codec: ${audioTrack.codec}`);
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
// Get duration
|
|
369
|
+
const duration = await audioTrack.computeDuration();
|
|
370
|
+
|
|
371
|
+
// Create audio buffer sink for playback
|
|
372
|
+
const audioBufferSink = new AudioBufferSink(audioTrack);
|
|
373
|
+
|
|
374
|
+
const audioSource = new AudioOnlySource(
|
|
375
|
+
id,
|
|
376
|
+
{
|
|
377
|
+
input,
|
|
378
|
+
audioTrack,
|
|
379
|
+
audioBufferSink,
|
|
380
|
+
},
|
|
381
|
+
duration
|
|
382
|
+
);
|
|
383
|
+
|
|
384
|
+
this.sources.set(id, audioSource);
|
|
385
|
+
return audioSource;
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
private createInput(source: MediaSource): Input<Source> {
|
|
389
|
+
let sourceObj: Source;
|
|
390
|
+
|
|
391
|
+
if (source instanceof File || source instanceof Blob) {
|
|
392
|
+
sourceObj = new BlobSource(source);
|
|
393
|
+
} else if (source instanceof ArrayBuffer || source instanceof Uint8Array) {
|
|
394
|
+
sourceObj = new BufferSource(source);
|
|
395
|
+
} else if (typeof source === 'string' || source instanceof URL) {
|
|
396
|
+
const url = source instanceof URL ? source.href : source;
|
|
397
|
+
if (typeof window === 'undefined' && !url.startsWith('http')) {
|
|
398
|
+
sourceObj = new FilePathSource(url);
|
|
399
|
+
} else {
|
|
400
|
+
sourceObj = new UrlSource(url);
|
|
401
|
+
}
|
|
402
|
+
} else if (typeof ReadableStream !== 'undefined' && source instanceof ReadableStream) {
|
|
403
|
+
sourceObj = new ReadableStreamSource(source as ReadableStream<Uint8Array>);
|
|
404
|
+
} else {
|
|
405
|
+
throw new Error('Unsupported source type');
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
return new Input({
|
|
409
|
+
source: sourceObj,
|
|
410
|
+
formats: ALL_FORMATS,
|
|
411
|
+
});
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
getSource(id: string): CompositorSource | undefined {
|
|
415
|
+
return this.sources.get(id);
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
hasSource(id: string): boolean {
|
|
419
|
+
return this.sources.has(id);
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
unloadSource(id: string): boolean {
|
|
423
|
+
const source = this.sources.get(id);
|
|
424
|
+
if (source) {
|
|
425
|
+
source.dispose();
|
|
426
|
+
this.sources.delete(id);
|
|
427
|
+
return true;
|
|
428
|
+
}
|
|
429
|
+
return false;
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
getAllSources(): CompositorSource[] {
|
|
433
|
+
return Array.from(this.sources.values());
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
clear(): void {
|
|
437
|
+
for (const source of this.sources.values()) {
|
|
438
|
+
source.dispose();
|
|
439
|
+
}
|
|
440
|
+
this.sources.clear();
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
dispose(): void {
|
|
444
|
+
this.clear();
|
|
445
|
+
if (this.audioContext && this.audioContext.state !== 'closed') {
|
|
446
|
+
void this.audioContext.close();
|
|
447
|
+
}
|
|
448
|
+
this.audioContext = null;
|
|
449
|
+
}
|
|
450
|
+
}
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import type { RendererType, Rotation } from '../types';
|
|
2
|
+
|
|
3
|
+
export type CompositorRendererType = RendererType;
|
|
4
|
+
|
|
5
|
+
export interface CompositorOptions {
|
|
6
|
+
canvas: HTMLCanvasElement | OffscreenCanvas;
|
|
7
|
+
width?: number;
|
|
8
|
+
height?: number;
|
|
9
|
+
renderer?: CompositorRendererType;
|
|
10
|
+
backgroundColor?: string;
|
|
11
|
+
enableAudio?: boolean;
|
|
12
|
+
worker?: boolean | CompositorWorkerOptions;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export interface LayerTransform {
|
|
16
|
+
x?: number;
|
|
17
|
+
y?: number;
|
|
18
|
+
width?: number;
|
|
19
|
+
height?: number;
|
|
20
|
+
rotation?: Rotation;
|
|
21
|
+
scaleX?: number;
|
|
22
|
+
scaleY?: number;
|
|
23
|
+
opacity?: number;
|
|
24
|
+
anchorX?: number;
|
|
25
|
+
anchorY?: number;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export interface CompositorLayer {
|
|
29
|
+
source: CompositorSource;
|
|
30
|
+
sourceTime?: number;
|
|
31
|
+
transform?: LayerTransform;
|
|
32
|
+
visible?: boolean;
|
|
33
|
+
zIndex?: number;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export interface AudioLayer {
|
|
37
|
+
source: CompositorSource;
|
|
38
|
+
sourceTime?: number;
|
|
39
|
+
volume?: number;
|
|
40
|
+
pan?: number;
|
|
41
|
+
muted?: boolean;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export interface CompositionFrame {
|
|
45
|
+
time: number;
|
|
46
|
+
layers: CompositorLayer[];
|
|
47
|
+
audio?: AudioLayer[];
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export type CompositionProvider = (time: number) => CompositionFrame;
|
|
51
|
+
|
|
52
|
+
export interface PreviewOptions {
|
|
53
|
+
getComposition: CompositionProvider;
|
|
54
|
+
duration: number;
|
|
55
|
+
fps?: number;
|
|
56
|
+
loop?: boolean;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
export interface CompositorWorkerOptions {
|
|
60
|
+
enabled?: boolean;
|
|
61
|
+
url?: string;
|
|
62
|
+
type?: 'classic' | 'module';
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export interface FrameExportOptions {
|
|
66
|
+
format?: 'png' | 'jpeg' | 'webp';
|
|
67
|
+
quality?: number;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
export type SourceType = 'video' | 'image' | 'audio';
|
|
71
|
+
|
|
72
|
+
export interface CompositorSource {
|
|
73
|
+
id: string;
|
|
74
|
+
type: SourceType;
|
|
75
|
+
duration: number;
|
|
76
|
+
width?: number;
|
|
77
|
+
height?: number;
|
|
78
|
+
getFrameAt(time: number): Promise<CanvasImageSource | null>;
|
|
79
|
+
getAudioBufferSink?(): import('mediabunny').AudioBufferSink | null;
|
|
80
|
+
hasAudio?(): boolean;
|
|
81
|
+
dispose(): void;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
export interface CompositorSourceOptions {
|
|
85
|
+
id?: string;
|
|
86
|
+
startTime?: number;
|
|
87
|
+
endTime?: number;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
export type CompositorEventMap = {
|
|
91
|
+
play: undefined;
|
|
92
|
+
pause: undefined;
|
|
93
|
+
seeking: { time: number };
|
|
94
|
+
seeked: { time: number };
|
|
95
|
+
timeupdate: { currentTime: number };
|
|
96
|
+
ended: undefined;
|
|
97
|
+
error: Error;
|
|
98
|
+
sourceloaded: { id: string; source: CompositorSource };
|
|
99
|
+
sourceunloaded: { id: string };
|
|
100
|
+
compositionchange: undefined;
|
|
101
|
+
};
|
|
102
|
+
|
|
103
|
+
export type CompositorEventListener<K extends keyof CompositorEventMap> = (event: CompositorEventMap[K]) => void;
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
import type { MediaSource } from '../types';
|
|
2
|
+
import type { CompositorWorkerOptions, CompositorSourceOptions, FrameExportOptions } from './types';
|
|
3
|
+
import type {
|
|
4
|
+
CompositorWorkerExportPayload,
|
|
5
|
+
CompositorWorkerFrame,
|
|
6
|
+
CompositorWorkerInitPayload,
|
|
7
|
+
CompositorWorkerLoadPayload,
|
|
8
|
+
CompositorWorkerRenderPayload,
|
|
9
|
+
CompositorWorkerResizePayload,
|
|
10
|
+
CompositorWorkerResponse,
|
|
11
|
+
CompositorWorkerSourceInfo,
|
|
12
|
+
CompositorWorkerUnloadPayload,
|
|
13
|
+
} from './worker-types';
|
|
14
|
+
|
|
15
|
+
interface CompositorWorkerClientOptions {
|
|
16
|
+
canvas: HTMLCanvasElement;
|
|
17
|
+
width: number;
|
|
18
|
+
height: number;
|
|
19
|
+
backgroundColor: string;
|
|
20
|
+
worker: CompositorWorkerOptions | boolean;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
type PendingRequest = {
|
|
24
|
+
resolve: (value: unknown) => void;
|
|
25
|
+
reject: (error: Error) => void;
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
export class CompositorWorkerClient {
|
|
29
|
+
private worker: Worker;
|
|
30
|
+
private nextId = 1;
|
|
31
|
+
private pending = new Map<number, PendingRequest>();
|
|
32
|
+
private ready: Promise<void>;
|
|
33
|
+
|
|
34
|
+
constructor(options: CompositorWorkerClientOptions) {
|
|
35
|
+
const workerOptions = typeof options.worker === 'boolean' ? {} : options.worker ?? {};
|
|
36
|
+
const workerType = workerOptions.type ?? 'module';
|
|
37
|
+
const workerUrl =
|
|
38
|
+
workerOptions.url ?? new URL('./compositor-worker.js', import.meta.url);
|
|
39
|
+
|
|
40
|
+
this.worker = new Worker(workerUrl, { type: workerType });
|
|
41
|
+
this.worker.onmessage = (event: MessageEvent<CompositorWorkerResponse>) => {
|
|
42
|
+
const { id, ok, result, error } = event.data;
|
|
43
|
+
const pending = this.pending.get(id);
|
|
44
|
+
if (!pending) return;
|
|
45
|
+
this.pending.delete(id);
|
|
46
|
+
if (ok) {
|
|
47
|
+
pending.resolve(result);
|
|
48
|
+
} else {
|
|
49
|
+
pending.reject(new Error(error ?? 'Worker error'));
|
|
50
|
+
}
|
|
51
|
+
};
|
|
52
|
+
this.worker.onerror = (event) => {
|
|
53
|
+
const error = event.error instanceof Error ? event.error : new Error('Worker error');
|
|
54
|
+
for (const pending of this.pending.values()) {
|
|
55
|
+
pending.reject(error);
|
|
56
|
+
}
|
|
57
|
+
this.pending.clear();
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
const offscreen = options.canvas.transferControlToOffscreen();
|
|
61
|
+
const initPayload: CompositorWorkerInitPayload = {
|
|
62
|
+
canvas: offscreen,
|
|
63
|
+
width: options.width,
|
|
64
|
+
height: options.height,
|
|
65
|
+
backgroundColor: options.backgroundColor,
|
|
66
|
+
};
|
|
67
|
+
this.ready = this.call<void>('init', initPayload, [offscreen]);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
private postMessage(kind: string, payload?: unknown, transfer?: Transferable[]): number {
|
|
71
|
+
const id = this.nextId++;
|
|
72
|
+
this.worker.postMessage({ id, kind, payload }, transfer ?? []);
|
|
73
|
+
return id;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
private call<T>(kind: string, payload?: unknown, transfer?: Transferable[]): Promise<T> {
|
|
77
|
+
const id = this.postMessage(kind, payload, transfer);
|
|
78
|
+
return new Promise<T>((resolve, reject) => {
|
|
79
|
+
this.pending.set(id, { resolve: resolve as PendingRequest['resolve'], reject });
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
async loadSource(source: MediaSource, options?: CompositorSourceOptions): Promise<CompositorWorkerSourceInfo> {
|
|
84
|
+
await this.ready;
|
|
85
|
+
const payload: CompositorWorkerLoadPayload = { source, options };
|
|
86
|
+
return this.call<CompositorWorkerSourceInfo>('loadSource', payload);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
async loadImage(source: string | Blob | File): Promise<CompositorWorkerSourceInfo> {
|
|
90
|
+
await this.ready;
|
|
91
|
+
const payload: CompositorWorkerLoadPayload = { source };
|
|
92
|
+
return this.call<CompositorWorkerSourceInfo>('loadImage', payload);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
async loadAudio(source: MediaSource, options?: CompositorSourceOptions): Promise<CompositorWorkerSourceInfo> {
|
|
96
|
+
await this.ready;
|
|
97
|
+
const payload: CompositorWorkerLoadPayload = { source, options };
|
|
98
|
+
return this.call<CompositorWorkerSourceInfo>('loadAudio', payload);
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
async unloadSource(id: string): Promise<boolean> {
|
|
102
|
+
await this.ready;
|
|
103
|
+
const payload: CompositorWorkerUnloadPayload = { id };
|
|
104
|
+
return this.call<boolean>('unloadSource', payload);
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
async render(frame: CompositorWorkerFrame): Promise<boolean> {
|
|
108
|
+
await this.ready;
|
|
109
|
+
const payload: CompositorWorkerRenderPayload = { frame };
|
|
110
|
+
return this.call<boolean>('render', payload);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
async clear(): Promise<boolean> {
|
|
114
|
+
await this.ready;
|
|
115
|
+
return this.call<boolean>('clear');
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
async resize(width: number, height: number): Promise<boolean> {
|
|
119
|
+
await this.ready;
|
|
120
|
+
const payload: CompositorWorkerResizePayload = { width, height };
|
|
121
|
+
return this.call<boolean>('resize', payload);
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
async exportFrame(frame: CompositorWorkerFrame, options?: FrameExportOptions): Promise<Blob | null> {
|
|
125
|
+
await this.ready;
|
|
126
|
+
const payload: CompositorWorkerExportPayload = { frame, options };
|
|
127
|
+
return this.call<Blob | null>('exportFrame', payload);
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
dispose(): void {
|
|
131
|
+
try {
|
|
132
|
+
this.worker.postMessage({ id: this.nextId++, kind: 'dispose' });
|
|
133
|
+
} catch {
|
|
134
|
+
// ignore
|
|
135
|
+
}
|
|
136
|
+
this.worker.terminate();
|
|
137
|
+
this.pending.clear();
|
|
138
|
+
}
|
|
139
|
+
}
|