@meframe/core 0.0.3 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Meframe.d.ts +16 -7
- package/dist/Meframe.d.ts.map +1 -1
- package/dist/Meframe.js +75 -90
- package/dist/Meframe.js.map +1 -1
- package/dist/cache/CacheManager.d.ts +28 -11
- package/dist/cache/CacheManager.d.ts.map +1 -1
- package/dist/cache/CacheManager.js +93 -30
- package/dist/cache/CacheManager.js.map +1 -1
- package/dist/cache/L2Cache.d.ts +31 -2
- package/dist/cache/L2Cache.d.ts.map +1 -1
- package/dist/cache/L2Cache.js +245 -44
- package/dist/cache/L2Cache.js.map +1 -1
- package/dist/cache/l1/VideoL1Cache.d.ts +3 -3
- package/dist/cache/l1/VideoL1Cache.d.ts.map +1 -1
- package/dist/cache/l1/VideoL1Cache.js +13 -8
- package/dist/cache/l1/VideoL1Cache.js.map +1 -1
- package/dist/config/defaults.d.ts.map +1 -1
- package/dist/config/defaults.js +2 -1
- package/dist/config/defaults.js.map +1 -1
- package/dist/config/types.d.ts +3 -0
- package/dist/config/types.d.ts.map +1 -1
- package/dist/controllers/PlaybackController.d.ts +7 -8
- package/dist/controllers/PlaybackController.d.ts.map +1 -1
- package/dist/controllers/PlaybackController.js +56 -76
- package/dist/controllers/PlaybackController.js.map +1 -1
- package/dist/controllers/PreRenderService.d.ts +21 -4
- package/dist/controllers/PreRenderService.d.ts.map +1 -1
- package/dist/controllers/PreRenderService.js +67 -5
- package/dist/controllers/PreRenderService.js.map +1 -1
- package/dist/controllers/types.d.ts +2 -3
- package/dist/controllers/types.d.ts.map +1 -1
- package/dist/event/events.d.ts +1 -4
- package/dist/event/events.d.ts.map +1 -1
- package/dist/event/events.js.map +1 -1
- package/dist/model/CompositionModel.d.ts +2 -1
- package/dist/model/CompositionModel.d.ts.map +1 -1
- package/dist/model/CompositionModel.js +3 -1
- package/dist/model/CompositionModel.js.map +1 -1
- package/dist/model/patch.d.ts +6 -2
- package/dist/model/patch.d.ts.map +1 -1
- package/dist/model/patch.js +76 -2
- package/dist/model/patch.js.map +1 -1
- package/dist/model/types.d.ts +1 -0
- package/dist/model/types.d.ts.map +1 -1
- package/dist/node_modules/.pnpm/mp4-muxer@5.2.2/node_modules/mp4-muxer/build/mp4-muxer.js +1858 -0
- package/dist/node_modules/.pnpm/mp4-muxer@5.2.2/node_modules/mp4-muxer/build/mp4-muxer.js.map +1 -0
- package/dist/orchestrator/ClipSessionManager.d.ts +1 -2
- package/dist/orchestrator/ClipSessionManager.d.ts.map +1 -1
- package/dist/orchestrator/ClipSessionManager.js +1 -0
- package/dist/orchestrator/ClipSessionManager.js.map +1 -1
- package/dist/orchestrator/CompositionPlanner.d.ts +8 -7
- package/dist/orchestrator/CompositionPlanner.d.ts.map +1 -1
- package/dist/orchestrator/CompositionPlanner.js +33 -56
- package/dist/orchestrator/CompositionPlanner.js.map +1 -1
- package/dist/orchestrator/Orchestrator.d.ts +9 -2
- package/dist/orchestrator/Orchestrator.d.ts.map +1 -1
- package/dist/orchestrator/Orchestrator.js +100 -50
- package/dist/orchestrator/Orchestrator.js.map +1 -1
- package/dist/orchestrator/VideoClipSession.d.ts +14 -9
- package/dist/orchestrator/VideoClipSession.d.ts.map +1 -1
- package/dist/orchestrator/VideoClipSession.js +108 -85
- package/dist/orchestrator/VideoClipSession.js.map +1 -1
- package/dist/orchestrator/types.d.ts +1 -0
- package/dist/orchestrator/types.d.ts.map +1 -1
- package/dist/stages/compose/GlobalAudioSession.d.ts +34 -1
- package/dist/stages/compose/GlobalAudioSession.d.ts.map +1 -1
- package/dist/stages/compose/GlobalAudioSession.js +149 -5
- package/dist/stages/compose/GlobalAudioSession.js.map +1 -1
- package/dist/stages/compose/VideoComposer.d.ts +1 -0
- package/dist/stages/compose/VideoComposer.d.ts.map +1 -1
- package/dist/stages/demux/MP4Demuxer.d.ts.map +1 -1
- package/dist/stages/encode/AudioChunkEncoder.d.ts +2 -1
- package/dist/stages/encode/AudioChunkEncoder.d.ts.map +1 -1
- package/dist/stages/encode/AudioChunkEncoder.js +41 -0
- package/dist/stages/encode/AudioChunkEncoder.js.map +1 -0
- package/dist/stages/encode/BaseEncoder.d.ts +7 -3
- package/dist/stages/encode/BaseEncoder.d.ts.map +1 -1
- package/dist/stages/encode/BaseEncoder.js +173 -0
- package/dist/stages/encode/BaseEncoder.js.map +1 -0
- package/dist/stages/encode/ClipEncoderManager.d.ts +64 -0
- package/dist/stages/encode/ClipEncoderManager.d.ts.map +1 -0
- package/dist/stages/encode/index.d.ts +1 -1
- package/dist/stages/encode/index.d.ts.map +1 -1
- package/dist/stages/load/ResourceLoader.d.ts +22 -1
- package/dist/stages/load/ResourceLoader.d.ts.map +1 -1
- package/dist/stages/load/ResourceLoader.js +80 -29
- package/dist/stages/load/ResourceLoader.js.map +1 -1
- package/dist/stages/load/TaskManager.d.ts +1 -1
- package/dist/stages/load/TaskManager.d.ts.map +1 -1
- package/dist/stages/load/TaskManager.js +3 -2
- package/dist/stages/load/TaskManager.js.map +1 -1
- package/dist/stages/load/types.d.ts +4 -2
- package/dist/stages/load/types.d.ts.map +1 -1
- package/dist/stages/mux/MP4Muxer.d.ts +19 -38
- package/dist/stages/mux/MP4Muxer.d.ts.map +1 -1
- package/dist/stages/mux/MP4Muxer.js +60 -0
- package/dist/stages/mux/MP4Muxer.js.map +1 -0
- package/dist/stages/mux/MuxManager.d.ts +27 -0
- package/dist/stages/mux/MuxManager.d.ts.map +1 -0
- package/dist/stages/mux/MuxManager.js +148 -0
- package/dist/stages/mux/MuxManager.js.map +1 -0
- package/dist/stages/mux/index.d.ts +1 -0
- package/dist/stages/mux/index.d.ts.map +1 -1
- package/dist/stages/mux/types.d.ts +1 -0
- package/dist/stages/mux/types.d.ts.map +1 -1
- package/dist/types.d.ts +1 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/worker/WorkerPool.d.ts +2 -0
- package/dist/worker/WorkerPool.d.ts.map +1 -1
- package/dist/worker/WorkerPool.js +6 -5
- package/dist/worker/WorkerPool.js.map +1 -1
- package/dist/worker/types.d.ts +1 -4
- package/dist/worker/types.d.ts.map +1 -1
- package/dist/worker/types.js +0 -3
- package/dist/worker/types.js.map +1 -1
- package/dist/worker/worker-event-whitelist.d.ts.map +1 -1
- package/dist/workers/MP4Demuxer.js +7049 -6
- package/dist/workers/MP4Demuxer.js.map +1 -1
- package/dist/workers/WorkerChannel.js +0 -3
- package/dist/workers/WorkerChannel.js.map +1 -1
- package/dist/workers/stages/compose/video-compose.worker.js +126 -83
- package/dist/workers/stages/compose/video-compose.worker.js.map +1 -1
- package/dist/workers/stages/decode/decode.worker.js +25 -16
- package/dist/workers/stages/decode/decode.worker.js.map +1 -1
- package/dist/workers/stages/demux/audio-demux.worker.js +4 -4
- package/dist/workers/stages/demux/audio-demux.worker.js.map +1 -1
- package/dist/workers/stages/demux/video-demux.worker.js +9 -7
- package/dist/workers/stages/demux/video-demux.worker.js.map +1 -1
- package/dist/workers/stages/encode/encode.worker.js +191 -195
- package/dist/workers/stages/encode/encode.worker.js.map +1 -1
- package/package.json +2 -1
- package/dist/controllers/PreviewHandle.d.ts +0 -25
- package/dist/controllers/PreviewHandle.d.ts.map +0 -1
- package/dist/controllers/PreviewHandle.js +0 -45
- package/dist/controllers/PreviewHandle.js.map +0 -1
- package/dist/model/dirty-range.js +0 -220
- package/dist/model/dirty-range.js.map +0 -1
- package/dist/stages/encode/EncoderPool.d.ts +0 -28
- package/dist/stages/encode/EncoderPool.d.ts.map +0 -1
- package/dist/workers/mp4box.all.js +0 -7049
- package/dist/workers/mp4box.all.js.map +0 -1
- package/dist/workers/stages/mux/mux.worker.js +0 -501
- package/dist/workers/stages/mux/mux.worker.js.map +0 -1
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { OfflineAudioMixer } from "./OfflineAudioMixer.js";
|
|
2
2
|
import { MeframeEvent } from "../../event/events.js";
|
|
3
|
+
import { AudioChunkEncoder } from "../encode/AudioChunkEncoder.js";
|
|
3
4
|
class GlobalAudioSession {
|
|
4
5
|
mixWindowUs = 3e6;
|
|
5
6
|
mixer;
|
|
@@ -10,8 +11,8 @@ class GlobalAudioSession {
|
|
|
10
11
|
this.mixer = new OfflineAudioMixer(deps.cacheManager, deps.getModel);
|
|
11
12
|
}
|
|
12
13
|
onAudioData(message) {
|
|
13
|
-
const {
|
|
14
|
-
this.deps.cacheManager.putClipAudioData(
|
|
14
|
+
const { sessionId, audioData, clipStartUs, clipDurationUs } = message;
|
|
15
|
+
this.deps.cacheManager.putClipAudioData(sessionId, audioData, clipStartUs, clipDurationUs);
|
|
15
16
|
}
|
|
16
17
|
async ensureMixedPCM(startUs) {
|
|
17
18
|
const model = this.deps.getModel();
|
|
@@ -33,6 +34,21 @@ class GlobalAudioSession {
|
|
|
33
34
|
return null;
|
|
34
35
|
}
|
|
35
36
|
}
|
|
37
|
+
async prepareAudioForPlayback(startUs) {
|
|
38
|
+
const buffer = await this.ensureMixedPCM(startUs);
|
|
39
|
+
if (!buffer) {
|
|
40
|
+
return null;
|
|
41
|
+
}
|
|
42
|
+
const windowStartUs = this.alignToWindow(startUs);
|
|
43
|
+
const offsetUs = startUs - windowStartUs;
|
|
44
|
+
const offsetSeconds = offsetUs / 1e6;
|
|
45
|
+
const durationSeconds = buffer.duration - offsetSeconds;
|
|
46
|
+
return {
|
|
47
|
+
buffer,
|
|
48
|
+
offsetSeconds,
|
|
49
|
+
durationSeconds
|
|
50
|
+
};
|
|
51
|
+
}
|
|
36
52
|
async activateAllAudioClips() {
|
|
37
53
|
const model = this.deps.getModel();
|
|
38
54
|
if (!model) {
|
|
@@ -46,7 +62,8 @@ class GlobalAudioSession {
|
|
|
46
62
|
this.activeClips.add(clip.id);
|
|
47
63
|
await this.deps.resourceLoader.fetch(clip.resourceId, {
|
|
48
64
|
priority: "high",
|
|
49
|
-
|
|
65
|
+
sessionId: clip.id,
|
|
66
|
+
trackId: track.id
|
|
50
67
|
});
|
|
51
68
|
this.deps.eventBus.emit(MeframeEvent.ClipActivated, { clipId: clip.id });
|
|
52
69
|
}
|
|
@@ -54,7 +71,7 @@ class GlobalAudioSession {
|
|
|
54
71
|
}
|
|
55
72
|
}
|
|
56
73
|
handleAudioStream(stream, metadata) {
|
|
57
|
-
const
|
|
74
|
+
const sessionId = metadata.sessionId || "unknown";
|
|
58
75
|
const clipStartUs = metadata.clipStartUs ?? 0;
|
|
59
76
|
const clipDurationUs = metadata.clipDurationUs ?? 0;
|
|
60
77
|
const reader = stream.getReader();
|
|
@@ -66,7 +83,7 @@ class GlobalAudioSession {
|
|
|
66
83
|
return;
|
|
67
84
|
}
|
|
68
85
|
this.onAudioData({
|
|
69
|
-
|
|
86
|
+
sessionId,
|
|
70
87
|
audioData: value,
|
|
71
88
|
clipStartUs,
|
|
72
89
|
clipDurationUs
|
|
@@ -83,6 +100,133 @@ class GlobalAudioSession {
|
|
|
83
100
|
this.deps.cacheManager.resetAudioCache();
|
|
84
101
|
this.activeClips.clear();
|
|
85
102
|
}
|
|
103
|
+
/**
|
|
104
|
+
* Create export encoded audio stream with metadata callback
|
|
105
|
+
* Mixes all audio clips and encodes to EncodedAudioChunk for export
|
|
106
|
+
* Extracts metadata from first chunk and invokes callback
|
|
107
|
+
*/
|
|
108
|
+
async createExportEncodedStream(config, onFirstMetadata) {
|
|
109
|
+
const audioDataStream = await this.createExportAudioStream();
|
|
110
|
+
if (!audioDataStream) {
|
|
111
|
+
return null;
|
|
112
|
+
}
|
|
113
|
+
const encoder = new AudioChunkEncoder(config);
|
|
114
|
+
await encoder.initialize();
|
|
115
|
+
const encodingTransform = encoder.createStream();
|
|
116
|
+
const encodedStream = audioDataStream.pipeThrough(encodingTransform);
|
|
117
|
+
let firstMetadataExtracted = false;
|
|
118
|
+
return encodedStream.pipeThrough(
|
|
119
|
+
new TransformStream({
|
|
120
|
+
transform(encoderChunk, controller) {
|
|
121
|
+
if (!firstMetadataExtracted && onFirstMetadata) {
|
|
122
|
+
onFirstMetadata(encoderChunk.metadata);
|
|
123
|
+
firstMetadataExtracted = true;
|
|
124
|
+
}
|
|
125
|
+
controller.enqueue(encoderChunk.chunk);
|
|
126
|
+
}
|
|
127
|
+
})
|
|
128
|
+
);
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Create export audio stream
|
|
132
|
+
* Mixes all audio clips for the entire composition and outputs AudioData stream
|
|
133
|
+
*/
|
|
134
|
+
async createExportAudioStream() {
|
|
135
|
+
const model = this.deps.getModel();
|
|
136
|
+
if (!model) {
|
|
137
|
+
return null;
|
|
138
|
+
}
|
|
139
|
+
const totalDurationUs = model.durationUs;
|
|
140
|
+
await this.activateAllAudioClips();
|
|
141
|
+
await this.waitForAudioClipsReady();
|
|
142
|
+
return new ReadableStream({
|
|
143
|
+
start: async (controller) => {
|
|
144
|
+
const windowSize = 3e6;
|
|
145
|
+
let currentUs = 0;
|
|
146
|
+
while (currentUs < totalDurationUs) {
|
|
147
|
+
const windowEndUs = Math.min(currentUs + windowSize, totalDurationUs);
|
|
148
|
+
const mixedBuffer = await this.mixer.mix(currentUs, windowEndUs);
|
|
149
|
+
const audioData = this.audioBufferToAudioData(mixedBuffer, currentUs);
|
|
150
|
+
if (audioData) {
|
|
151
|
+
controller.enqueue(audioData);
|
|
152
|
+
}
|
|
153
|
+
currentUs = windowEndUs;
|
|
154
|
+
}
|
|
155
|
+
controller.close();
|
|
156
|
+
}
|
|
157
|
+
});
|
|
158
|
+
}
|
|
159
|
+
/**
|
|
160
|
+
* Wait for audio clips to be decoded and cached
|
|
161
|
+
*/
|
|
162
|
+
async waitForAudioClipsReady() {
|
|
163
|
+
const model = this.deps.getModel();
|
|
164
|
+
if (!model) return;
|
|
165
|
+
const audioClips = model.tracks.filter((track) => track.kind === "audio").flatMap((track) => track.clips);
|
|
166
|
+
const waitPromises = audioClips.map(
|
|
167
|
+
(clip) => this.waitForClipPCM(clip.id, 1e3)
|
|
168
|
+
// Wait up to 1s per clip
|
|
169
|
+
);
|
|
170
|
+
await Promise.allSettled(waitPromises);
|
|
171
|
+
}
|
|
172
|
+
/**
|
|
173
|
+
* Wait for clip PCM data to be available
|
|
174
|
+
*/
|
|
175
|
+
waitForClipPCM(clipId, timeoutMs) {
|
|
176
|
+
return new Promise((resolve) => {
|
|
177
|
+
const checkInterval = 50;
|
|
178
|
+
let elapsed = 0;
|
|
179
|
+
const check = () => {
|
|
180
|
+
if (this.deps.cacheManager.hasClipPCM(clipId)) {
|
|
181
|
+
resolve(true);
|
|
182
|
+
return;
|
|
183
|
+
}
|
|
184
|
+
elapsed += checkInterval;
|
|
185
|
+
if (elapsed >= timeoutMs) {
|
|
186
|
+
resolve(false);
|
|
187
|
+
return;
|
|
188
|
+
}
|
|
189
|
+
setTimeout(check, checkInterval);
|
|
190
|
+
};
|
|
191
|
+
check();
|
|
192
|
+
});
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* Convert AudioBuffer to AudioData for encoding
|
|
196
|
+
*/
|
|
197
|
+
audioBufferToAudioData(buffer, timestampUs) {
|
|
198
|
+
const sampleRate = buffer.sampleRate;
|
|
199
|
+
const numberOfChannels = buffer.numberOfChannels;
|
|
200
|
+
const numberOfFrames = buffer.length;
|
|
201
|
+
const planes = [];
|
|
202
|
+
for (let channel = 0; channel < numberOfChannels; channel++) {
|
|
203
|
+
planes.push(buffer.getChannelData(channel));
|
|
204
|
+
}
|
|
205
|
+
return new AudioData({
|
|
206
|
+
format: "f32",
|
|
207
|
+
// interleaved format
|
|
208
|
+
sampleRate,
|
|
209
|
+
numberOfFrames,
|
|
210
|
+
numberOfChannels,
|
|
211
|
+
timestamp: timestampUs,
|
|
212
|
+
data: this.interleavePlanarData(planes)
|
|
213
|
+
});
|
|
214
|
+
}
|
|
215
|
+
/**
|
|
216
|
+
* Interleave planar audio data for AudioData constructor
|
|
217
|
+
*/
|
|
218
|
+
interleavePlanarData(planes) {
|
|
219
|
+
const numberOfChannels = planes.length;
|
|
220
|
+
const numberOfFrames = planes[0]?.length ?? 0;
|
|
221
|
+
const totalSamples = numberOfChannels * numberOfFrames;
|
|
222
|
+
const interleaved = new Float32Array(totalSamples);
|
|
223
|
+
for (let frame = 0; frame < numberOfFrames; frame++) {
|
|
224
|
+
for (let channel = 0; channel < numberOfChannels; channel++) {
|
|
225
|
+
interleaved[frame * numberOfChannels + channel] = planes[channel][frame];
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
return interleaved.buffer;
|
|
229
|
+
}
|
|
86
230
|
async setupAudioPipeline(clip) {
|
|
87
231
|
const { id: clipId, resourceId, startUs, durationUs } = clip;
|
|
88
232
|
const audioDemuxWorker = await this.deps.workers.get("audioDemux", clipId, { lazy: true });
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"GlobalAudioSession.js","sources":["../../../src/stages/compose/GlobalAudioSession.ts"],"sourcesContent":["import type { TimeUs } from '../../model/types';\nimport { OfflineAudioMixer } from './OfflineAudioMixer';\nimport type { CompositionModel, Clip } from '../../model';\nimport type { WorkerPool } from '../../worker/WorkerPool';\nimport type { ResourceLoader } from '../load/ResourceLoader';\nimport type { EventBus } from '../../event/EventBus';\nimport type { EventPayloadMap } from '../../event/events';\nimport { MeframeEvent } from '../../event/events';\nimport type { CacheManager } from '../../cache/CacheManager';\n\ninterface AudioDataMessage {\n clipId: string;\n audioData: AudioData;\n clipStartUs: TimeUs;\n clipDurationUs: TimeUs;\n}\n\ninterface AudioSessionDeps {\n cacheManager: CacheManager;\n workers: WorkerPool;\n resourceLoader: ResourceLoader;\n eventBus: EventBus<EventPayloadMap>;\n getModel: () => CompositionModel | null;\n buildWorkerConfigs: () => any;\n}\n\nexport class GlobalAudioSession {\n private mixWindowUs = 3_000_000;\n private mixer: OfflineAudioMixer;\n private activeClips = new Set<string>();\n private deps: AudioSessionDeps;\n\n constructor(deps: AudioSessionDeps) {\n this.deps = deps;\n this.mixer = new OfflineAudioMixer(deps.cacheManager, deps.getModel);\n }\n\n onAudioData(message: AudioDataMessage): void {\n const { clipId, audioData, clipStartUs, clipDurationUs } = message;\n this.deps.cacheManager.putClipAudioData(clipId, audioData, clipStartUs, clipDurationUs);\n }\n\n async ensureMixedPCM(startUs: TimeUs): Promise<AudioBuffer | null> {\n const model = this.deps.getModel();\n if (!model) {\n return null;\n }\n\n const windowStartUs = this.alignToWindow(startUs);\n const windowEndUs = windowStartUs + this.mixWindowUs;\n\n const cached = this.deps.cacheManager.getMixedAudio(windowStartUs, windowEndUs);\n if (cached) {\n return cached;\n }\n\n try {\n const mixedBuffer = await this.mixer.mix(windowStartUs, windowEndUs);\n this.deps.cacheManager.putMixedAudio(windowStartUs, windowEndUs, mixedBuffer);\n return mixedBuffer;\n } catch (error) {\n console.error('[GlobalAudioSession] Mix failed:', error);\n return null;\n }\n }\n\n async activateAllAudioClips(): Promise<void> {\n const model = this.deps.getModel();\n if (!model) {\n return;\n }\n\n const audioTracks = model.tracks.filter((track) => track.kind === 'audio');\n\n for (const track of audioTracks) {\n for (const clip of track.clips) {\n if (!this.activeClips.has(clip.id)) {\n await this.setupAudioPipeline(clip);\n this.activeClips.add(clip.id);\n\n await this.deps.resourceLoader.fetch(clip.resourceId, {\n priority: 'high',\n clipId: clip.id,\n });\n\n this.deps.eventBus.emit(MeframeEvent.ClipActivated, { clipId: clip.id });\n }\n }\n }\n }\n\n handleAudioStream(stream: ReadableStream<AudioData>, metadata: Record<string, any>): void {\n const clipId = metadata.clipId || 'unknown';\n const clipStartUs = metadata.clipStartUs ?? 0;\n const clipDurationUs = metadata.clipDurationUs ?? 0;\n\n const reader = stream.getReader();\n const pump = async (): Promise<void> => {\n try {\n const { done, value } = await reader.read();\n if (done) {\n reader.releaseLock();\n return;\n }\n\n this.onAudioData({\n clipId,\n audioData: value,\n clipStartUs,\n clipDurationUs,\n });\n\n await pump();\n } catch (error) {\n console.error('[GlobalAudioSession] Audio stream error:', error);\n reader.releaseLock();\n }\n };\n\n pump();\n }\n\n reset(): void {\n this.deps.cacheManager.resetAudioCache();\n this.activeClips.clear();\n }\n\n private async setupAudioPipeline(clip: Clip): Promise<void> {\n const { id: clipId, resourceId, startUs, durationUs } = clip;\n const audioDemuxWorker = await this.deps.workers.get('audioDemux', clipId, { lazy: true });\n const decodeWorker = await this.deps.workers.get('decode');\n\n const demuxToDecodeChannel = new MessageChannel();\n await audioDemuxWorker.send(\n 'connect',\n { direction: 'downstream', port: demuxToDecodeChannel.port1, streamType: 'audio', clipId },\n { transfer: [demuxToDecodeChannel.port1] }\n );\n await decodeWorker.send(\n 'connect',\n {\n direction: 'upstream',\n port: demuxToDecodeChannel.port2,\n streamType: 'audio',\n clipId,\n clipStartUs: startUs || 0,\n clipDurationUs: durationUs || 0,\n },\n { transfer: [demuxToDecodeChannel.port2] }\n );\n\n const demuxConfig = this.deps.buildWorkerConfigs().audioDemux;\n await audioDemuxWorker.send('configure', {\n initial: true,\n resourceId,\n clipId,\n config: demuxConfig,\n });\n }\n\n private alignToWindow(timeUs: TimeUs): TimeUs {\n return Math.floor(timeUs / this.mixWindowUs) * this.mixWindowUs;\n }\n}\n"],"names":[],"mappings":";;AA0BO,MAAM,mBAAmB;AAAA,EACtB,cAAc;AAAA,EACd;AAAA,EACA,kCAAkB,IAAA;AAAA,EAClB;AAAA,EAER,YAAY,MAAwB;AAClC,SAAK,OAAO;AACZ,SAAK,QAAQ,IAAI,kBAAkB,KAAK,cAAc,KAAK,QAAQ;AAAA,EACrE;AAAA,EAEA,YAAY,SAAiC;AAC3C,UAAM,EAAE,QAAQ,WAAW,aAAa,mBAAmB;AAC3D,SAAK,KAAK,aAAa,iBAAiB,QAAQ,WAAW,aAAa,cAAc;AAAA,EACxF;AAAA,EAEA,MAAM,eAAe,SAA8C;AACjE,UAAM,QAAQ,KAAK,KAAK,SAAA;AACxB,QAAI,CAAC,OAAO;AACV,aAAO;AAAA,IACT;AAEA,UAAM,gBAAgB,KAAK,cAAc,OAAO;AAChD,UAAM,cAAc,gBAAgB,KAAK;AAEzC,UAAM,SAAS,KAAK,KAAK,aAAa,cAAc,eAAe,WAAW;AAC9E,QAAI,QAAQ;AACV,aAAO;AAAA,IACT;AAEA,QAAI;AACF,YAAM,cAAc,MAAM,KAAK,MAAM,IAAI,eAAe,WAAW;AACnE,WAAK,KAAK,aAAa,cAAc,eAAe,aAAa,WAAW;AAC5E,aAAO;AAAA,IACT,SAAS,OAAO;AACd,cAAQ,MAAM,oCAAoC,KAAK;AACvD,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,wBAAuC;AAC3C,UAAM,QAAQ,KAAK,KAAK,SAAA;AACxB,QAAI,CAAC,OAAO;AACV;AAAA,IACF;AAEA,UAAM,cAAc,MAAM,OAAO,OAAO,CAAC,UAAU,MAAM,SAAS,OAAO;AAEzE,eAAW,SAAS,aAAa;AAC/B,iBAAW,QAAQ,MAAM,OAAO;AAC9B,YAAI,CAAC,KAAK,YAAY,IAAI,KAAK,EAAE,GAAG;AAClC,gBAAM,KAAK,mBAAmB,IAAI;AAClC,eAAK,YAAY,IAAI,KAAK,EAAE;AAE5B,gBAAM,KAAK,KAAK,eAAe,MAAM,KAAK,YAAY;AAAA,YACpD,UAAU;AAAA,YACV,QAAQ,KAAK;AAAA,UAAA,CACd;AAED,eAAK,KAAK,SAAS,KAAK,aAAa,eAAe,EAAE,QAAQ,KAAK,IAAI;AAAA,QACzE;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAkB,QAAmC,UAAqC;AACxF,UAAM,SAAS,SAAS,UAAU;AAClC,UAAM,cAAc,SAAS,eAAe;AAC5C,UAAM,iBAAiB,SAAS,kBAAkB;AAElD,UAAM,SAAS,OAAO,UAAA;AACtB,UAAM,OAAO,YAA2B;AACtC,UAAI;AACF,cAAM,EAAE,MAAM,MAAA,IAAU,MAAM,OAAO,KAAA;AACrC,YAAI,MAAM;AACR,iBAAO,YAAA;AACP;AAAA,QACF;AAEA,aAAK,YAAY;AAAA,UACf;AAAA,UACA,WAAW;AAAA,UACX;AAAA,UACA;AAAA,QAAA,CACD;AAED,cAAM,KAAA;AAAA,MACR,SAAS,OAAO;AACd,gBAAQ,MAAM,4CAA4C,KAAK;AAC/D,eAAO,YAAA;AAAA,MACT;AAAA,IACF;AAEA,SAAA;AAAA,EACF;AAAA,EAEA,QAAc;AACZ,SAAK,KAAK,aAAa,gBAAA;AACvB,SAAK,YAAY,MAAA;AAAA,EACnB;AAAA,EAEA,MAAc,mBAAmB,MAA2B;AAC1D,UAAM,EAAE,IAAI,QAAQ,YAAY,SAAS,eAAe;AACxD,UAAM,mBAAmB,MAAM,KAAK,KAAK,QAAQ,IAAI,cAAc,QAAQ,EAAE,MAAM,KAAA,CAAM;AACzF,UAAM,eAAe,MAAM,KAAK,KAAK,QAAQ,IAAI,QAAQ;AAEzD,UAAM,uBAAuB,IAAI,eAAA;AACjC,UAAM,iBAAiB;AAAA,MACrB;AAAA,MACA,EAAE,WAAW,cAAc,MAAM,qBAAqB,OAAO,YAAY,SAAS,OAAA;AAAA,MAClF,EAAE,UAAU,CAAC,qBAAqB,KAAK,EAAA;AAAA,IAAE;AAE3C,UAAM,aAAa;AAAA,MACjB;AAAA,MACA;AAAA,QACE,WAAW;AAAA,QACX,MAAM,qBAAqB;AAAA,QAC3B,YAAY;AAAA,QACZ;AAAA,QACA,aAAa,WAAW;AAAA,QACxB,gBAAgB,cAAc;AAAA,MAAA;AAAA,MAEhC,EAAE,UAAU,CAAC,qBAAqB,KAAK,EAAA;AAAA,IAAE;AAG3C,UAAM,cAAc,KAAK,KAAK,mBAAA,EAAqB;AACnD,UAAM,iBAAiB,KAAK,aAAa;AAAA,MACvC,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAAA,EACH;AAAA,EAEQ,cAAc,QAAwB;AAC5C,WAAO,KAAK,MAAM,SAAS,KAAK,WAAW,IAAI,KAAK;AAAA,EACtD;AACF;"}
|
|
1
|
+
{"version":3,"file":"GlobalAudioSession.js","sources":["../../../src/stages/compose/GlobalAudioSession.ts"],"sourcesContent":["import type { TimeUs } from '../../model/types';\nimport { OfflineAudioMixer } from './OfflineAudioMixer';\nimport type { CompositionModel, Clip } from '../../model';\nimport type { WorkerPool } from '../../worker/WorkerPool';\nimport type { ResourceLoader } from '../load/ResourceLoader';\nimport type { EventBus } from '../../event/EventBus';\nimport type { EventPayloadMap } from '../../event/events';\nimport { MeframeEvent } from '../../event/events';\nimport type { CacheManager } from '../../cache/CacheManager';\nimport { AudioChunkEncoder } from '../encode/AudioChunkEncoder';\n\ninterface AudioDataMessage {\n sessionId: string;\n audioData: AudioData;\n clipStartUs: TimeUs;\n clipDurationUs: TimeUs;\n}\n\nexport interface AudioPlaybackInfo {\n buffer: AudioBuffer;\n offsetSeconds: number;\n durationSeconds: number;\n}\n\ninterface AudioSessionDeps {\n cacheManager: CacheManager;\n workers: WorkerPool;\n resourceLoader: ResourceLoader;\n eventBus: EventBus<EventPayloadMap>;\n getModel: () => CompositionModel | null;\n buildWorkerConfigs: () => any;\n}\n\nexport class GlobalAudioSession {\n private mixWindowUs = 3_000_000;\n private mixer: OfflineAudioMixer;\n private activeClips = new Set<string>();\n private deps: AudioSessionDeps;\n\n constructor(deps: AudioSessionDeps) {\n this.deps = deps;\n this.mixer = new OfflineAudioMixer(deps.cacheManager, deps.getModel);\n }\n\n onAudioData(message: AudioDataMessage): void {\n const { sessionId, audioData, clipStartUs, clipDurationUs } = message;\n this.deps.cacheManager.putClipAudioData(sessionId, audioData, clipStartUs, clipDurationUs);\n }\n\n async ensureMixedPCM(startUs: TimeUs): Promise<AudioBuffer | null> {\n const model = this.deps.getModel();\n if (!model) {\n return null;\n }\n\n const windowStartUs = this.alignToWindow(startUs);\n const windowEndUs = windowStartUs + this.mixWindowUs;\n\n const cached = this.deps.cacheManager.getMixedAudio(windowStartUs, windowEndUs);\n if (cached) {\n return cached;\n }\n\n try {\n const mixedBuffer = await this.mixer.mix(windowStartUs, windowEndUs);\n this.deps.cacheManager.putMixedAudio(windowStartUs, windowEndUs, mixedBuffer);\n return mixedBuffer;\n } catch (error) {\n console.error('[GlobalAudioSession] Mix failed:', error);\n return null;\n }\n }\n\n async prepareAudioForPlayback(startUs: TimeUs): Promise<AudioPlaybackInfo | null> {\n const buffer = await this.ensureMixedPCM(startUs);\n if (!buffer) {\n return null;\n }\n\n const windowStartUs = this.alignToWindow(startUs);\n const offsetUs = startUs - windowStartUs;\n const offsetSeconds = offsetUs / 1_000_000;\n const durationSeconds = buffer.duration - offsetSeconds;\n\n return {\n buffer,\n offsetSeconds,\n durationSeconds,\n };\n }\n\n async activateAllAudioClips(): Promise<void> {\n const model = this.deps.getModel();\n if (!model) {\n return;\n }\n\n const audioTracks = model.tracks.filter((track) => track.kind === 'audio');\n\n for (const track of audioTracks) {\n for (const clip of track.clips) {\n if (!this.activeClips.has(clip.id)) {\n await this.setupAudioPipeline(clip);\n this.activeClips.add(clip.id);\n\n await this.deps.resourceLoader.fetch(clip.resourceId, {\n priority: 'high',\n sessionId: clip.id,\n trackId: track.id,\n });\n\n this.deps.eventBus.emit(MeframeEvent.ClipActivated, { clipId: clip.id });\n }\n }\n }\n }\n\n handleAudioStream(stream: ReadableStream<AudioData>, metadata: Record<string, any>): void {\n const sessionId = metadata.sessionId || 'unknown';\n const clipStartUs = metadata.clipStartUs ?? 0;\n const clipDurationUs = metadata.clipDurationUs ?? 0;\n\n const reader = stream.getReader();\n const pump = async (): Promise<void> => {\n try {\n const { done, value } = await reader.read();\n if (done) {\n reader.releaseLock();\n return;\n }\n\n this.onAudioData({\n sessionId,\n audioData: value,\n clipStartUs,\n clipDurationUs,\n });\n\n await pump();\n } catch (error) {\n console.error('[GlobalAudioSession] Audio stream error:', error);\n reader.releaseLock();\n }\n };\n\n pump();\n }\n\n reset(): void {\n this.deps.cacheManager.resetAudioCache();\n this.activeClips.clear();\n }\n\n /**\n * Create export encoded audio stream with metadata callback\n * Mixes all audio clips and encodes to EncodedAudioChunk for export\n * Extracts metadata from first chunk and invokes callback\n */\n async createExportEncodedStream(\n config?: Partial<AudioEncoderConfig>,\n onFirstMetadata?: (metadata: EncodedAudioChunkMetadata) => void\n ): Promise<ReadableStream<EncodedAudioChunk> | null> {\n const audioDataStream = await this.createExportAudioStream();\n if (!audioDataStream) {\n return null;\n }\n\n const encoder = new AudioChunkEncoder(config);\n await encoder.initialize();\n\n const encodingTransform = encoder.createStream();\n const encodedStream = audioDataStream.pipeThrough(encodingTransform);\n\n let firstMetadataExtracted = false;\n\n // Extract chunk from EncoderChunk and capture first metadata\n return encodedStream.pipeThrough(\n new TransformStream({\n transform(encoderChunk, controller) {\n // Extract metadata from first chunk\n if (!firstMetadataExtracted && onFirstMetadata) {\n onFirstMetadata(encoderChunk.metadata as EncodedAudioChunkMetadata);\n firstMetadataExtracted = true;\n }\n controller.enqueue(encoderChunk.chunk as EncodedAudioChunk);\n },\n })\n );\n }\n\n /**\n * Create export audio stream\n * Mixes all audio clips for the entire composition and outputs AudioData stream\n */\n async createExportAudioStream(): Promise<ReadableStream<AudioData> | null> {\n const model = this.deps.getModel();\n if (!model) {\n return null;\n }\n\n const totalDurationUs = model.durationUs;\n\n // Ensure all audio clips are active and cached\n await this.activateAllAudioClips();\n\n // Wait for PCM data to be available\n await this.waitForAudioClipsReady();\n\n return new ReadableStream<AudioData>({\n start: async (controller) => {\n const windowSize = 3_000_000; // 3 seconds per chunk\n let currentUs = 0;\n\n while (currentUs < totalDurationUs) {\n const windowEndUs = Math.min(currentUs + windowSize, totalDurationUs);\n\n // Mix this window\n const mixedBuffer = await this.mixer.mix(currentUs, windowEndUs);\n\n // Convert AudioBuffer to AudioData\n const audioData = this.audioBufferToAudioData(mixedBuffer, currentUs);\n if (audioData) {\n controller.enqueue(audioData);\n }\n\n currentUs = windowEndUs;\n }\n\n controller.close();\n },\n });\n }\n\n /**\n * Wait for audio clips to be decoded and cached\n */\n private async waitForAudioClipsReady(): Promise<void> {\n const model = this.deps.getModel();\n if (!model) return;\n\n const audioClips = model.tracks\n .filter((track) => track.kind === 'audio')\n .flatMap((track) => track.clips);\n\n // Wait for each clip to have PCM data\n const waitPromises = audioClips.map(\n (clip) => this.waitForClipPCM(clip.id, 1000) // Wait up to 1s per clip\n );\n\n await Promise.allSettled(waitPromises);\n }\n\n /**\n * Wait for clip PCM data to be available\n */\n private waitForClipPCM(clipId: string, timeoutMs: number): Promise<boolean> {\n return new Promise((resolve) => {\n const checkInterval = 50;\n let elapsed = 0;\n\n const check = () => {\n if (this.deps.cacheManager.hasClipPCM(clipId)) {\n resolve(true);\n return;\n }\n\n elapsed += checkInterval;\n if (elapsed >= timeoutMs) {\n resolve(false);\n return;\n }\n\n setTimeout(check, checkInterval);\n };\n\n check();\n });\n }\n\n /**\n * Convert AudioBuffer to AudioData for encoding\n */\n private audioBufferToAudioData(buffer: AudioBuffer, timestampUs: TimeUs): AudioData | null {\n const sampleRate = buffer.sampleRate;\n const numberOfChannels = buffer.numberOfChannels;\n const numberOfFrames = buffer.length;\n\n // Interleave channels into planar format\n const planes: Float32Array[] = [];\n for (let channel = 0; channel < numberOfChannels; channel++) {\n planes.push(buffer.getChannelData(channel));\n }\n\n return new AudioData({\n format: 'f32', // interleaved format\n sampleRate,\n numberOfFrames,\n numberOfChannels,\n timestamp: timestampUs,\n data: this.interleavePlanarData(planes),\n });\n }\n\n /**\n * Interleave planar audio data for AudioData constructor\n */\n private interleavePlanarData(planes: Float32Array[]): ArrayBuffer {\n const numberOfChannels = planes.length;\n const numberOfFrames = planes[0]?.length ?? 0;\n const totalSamples = numberOfChannels * numberOfFrames;\n\n const interleaved = new Float32Array(totalSamples);\n\n for (let frame = 0; frame < numberOfFrames; frame++) {\n for (let channel = 0; channel < numberOfChannels; channel++) {\n interleaved[frame * numberOfChannels + channel] = planes[channel]![frame]!;\n }\n }\n\n return interleaved.buffer;\n }\n\n private async setupAudioPipeline(clip: Clip): Promise<void> {\n const { id: clipId, resourceId, startUs, durationUs } = clip;\n const audioDemuxWorker = await this.deps.workers.get('audioDemux', clipId, { lazy: true });\n const decodeWorker = await this.deps.workers.get('decode');\n\n const demuxToDecodeChannel = new MessageChannel();\n await audioDemuxWorker.send(\n 'connect',\n { direction: 'downstream', port: demuxToDecodeChannel.port1, streamType: 'audio', clipId },\n { transfer: [demuxToDecodeChannel.port1] }\n );\n await decodeWorker.send(\n 'connect',\n {\n direction: 'upstream',\n port: demuxToDecodeChannel.port2,\n streamType: 'audio',\n clipId,\n clipStartUs: startUs || 0,\n clipDurationUs: durationUs || 0,\n },\n { transfer: [demuxToDecodeChannel.port2] }\n );\n\n const demuxConfig = this.deps.buildWorkerConfigs().audioDemux;\n await audioDemuxWorker.send('configure', {\n initial: true,\n resourceId,\n clipId,\n config: demuxConfig,\n });\n }\n\n private alignToWindow(timeUs: TimeUs): TimeUs {\n return Math.floor(timeUs / this.mixWindowUs) * this.mixWindowUs;\n }\n}\n"],"names":[],"mappings":";;;AAiCO,MAAM,mBAAmB;AAAA,EACtB,cAAc;AAAA,EACd;AAAA,EACA,kCAAkB,IAAA;AAAA,EAClB;AAAA,EAER,YAAY,MAAwB;AAClC,SAAK,OAAO;AACZ,SAAK,QAAQ,IAAI,kBAAkB,KAAK,cAAc,KAAK,QAAQ;AAAA,EACrE;AAAA,EAEA,YAAY,SAAiC;AAC3C,UAAM,EAAE,WAAW,WAAW,aAAa,mBAAmB;AAC9D,SAAK,KAAK,aAAa,iBAAiB,WAAW,WAAW,aAAa,cAAc;AAAA,EAC3F;AAAA,EAEA,MAAM,eAAe,SAA8C;AACjE,UAAM,QAAQ,KAAK,KAAK,SAAA;AACxB,QAAI,CAAC,OAAO;AACV,aAAO;AAAA,IACT;AAEA,UAAM,gBAAgB,KAAK,cAAc,OAAO;AAChD,UAAM,cAAc,gBAAgB,KAAK;AAEzC,UAAM,SAAS,KAAK,KAAK,aAAa,cAAc,eAAe,WAAW;AAC9E,QAAI,QAAQ;AACV,aAAO;AAAA,IACT;AAEA,QAAI;AACF,YAAM,cAAc,MAAM,KAAK,MAAM,IAAI,eAAe,WAAW;AACnE,WAAK,KAAK,aAAa,cAAc,eAAe,aAAa,WAAW;AAC5E,aAAO;AAAA,IACT,SAAS,OAAO;AACd,cAAQ,MAAM,oCAAoC,KAAK;AACvD,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,wBAAwB,SAAoD;AAChF,UAAM,SAAS,MAAM,KAAK,eAAe,OAAO;AAChD,QAAI,CAAC,QAAQ;AACX,aAAO;AAAA,IACT;AAEA,UAAM,gBAAgB,KAAK,cAAc,OAAO;AAChD,UAAM,WAAW,UAAU;AAC3B,UAAM,gBAAgB,WAAW;AACjC,UAAM,kBAAkB,OAAO,WAAW;AAE1C,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEA,MAAM,wBAAuC;AAC3C,UAAM,QAAQ,KAAK,KAAK,SAAA;AACxB,QAAI,CAAC,OAAO;AACV;AAAA,IACF;AAEA,UAAM,cAAc,MAAM,OAAO,OAAO,CAAC,UAAU,MAAM,SAAS,OAAO;AAEzE,eAAW,SAAS,aAAa;AAC/B,iBAAW,QAAQ,MAAM,OAAO;AAC9B,YAAI,CAAC,KAAK,YAAY,IAAI,KAAK,EAAE,GAAG;AAClC,gBAAM,KAAK,mBAAmB,IAAI;AAClC,eAAK,YAAY,IAAI,KAAK,EAAE;AAE5B,gBAAM,KAAK,KAAK,eAAe,MAAM,KAAK,YAAY;AAAA,YACpD,UAAU;AAAA,YACV,WAAW,KAAK;AAAA,YAChB,SAAS,MAAM;AAAA,UAAA,CAChB;AAED,eAAK,KAAK,SAAS,KAAK,aAAa,eAAe,EAAE,QAAQ,KAAK,IAAI;AAAA,QACzE;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAkB,QAAmC,UAAqC;AACxF,UAAM,YAAY,SAAS,aAAa;AACxC,UAAM,cAAc,SAAS,eAAe;AAC5C,UAAM,iBAAiB,SAAS,kBAAkB;AAElD,UAAM,SAAS,OAAO,UAAA;AACtB,UAAM,OAAO,YAA2B;AACtC,UAAI;AACF,cAAM,EAAE,MAAM,MAAA,IAAU,MAAM,OAAO,KAAA;AACrC,YAAI,MAAM;AACR,iBAAO,YAAA;AACP;AAAA,QACF;AAEA,aAAK,YAAY;AAAA,UACf;AAAA,UACA,WAAW;AAAA,UACX;AAAA,UACA;AAAA,QAAA,CACD;AAED,cAAM,KAAA;AAAA,MACR,SAAS,OAAO;AACd,gBAAQ,MAAM,4CAA4C,KAAK;AAC/D,eAAO,YAAA;AAAA,MACT;AAAA,IACF;AAEA,SAAA;AAAA,EACF;AAAA,EAEA,QAAc;AACZ,SAAK,KAAK,aAAa,gBAAA;AACvB,SAAK,YAAY,MAAA;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,0BACJ,QACA,iBACmD;AACnD,UAAM,kBAAkB,MAAM,KAAK,wBAAA;AACnC,QAAI,CAAC,iBAAiB;AACpB,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,IAAI,kBAAkB,MAAM;AAC5C,UAAM,QAAQ,WAAA;AAEd,UAAM,oBAAoB,QAAQ,aAAA;AAClC,UAAM,gBAAgB,gBAAgB,YAAY,iBAAiB;AAEnE,QAAI,yBAAyB;AAG7B,WAAO,cAAc;AAAA,MACnB,IAAI,gBAAgB;AAAA,QAClB,UAAU,cAAc,YAAY;AAElC,cAAI,CAAC,0BAA0B,iBAAiB;AAC9C,4BAAgB,aAAa,QAAqC;AAClE,qCAAyB;AAAA,UAC3B;AACA,qBAAW,QAAQ,aAAa,KAA0B;AAAA,QAC5D;AAAA,MAAA,CACD;AAAA,IAAA;AAAA,EAEL;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,0BAAqE;AACzE,UAAM,QAAQ,KAAK,KAAK,SAAA;AACxB,QAAI,CAAC,OAAO;AACV,aAAO;AAAA,IACT;AAEA,UAAM,kBAAkB,MAAM;AAG9B,UAAM,KAAK,sBAAA;AAGX,UAAM,KAAK,uBAAA;AAEX,WAAO,IAAI,eAA0B;AAAA,MACnC,OAAO,OAAO,eAAe;AAC3B,cAAM,aAAa;AACnB,YAAI,YAAY;AAEhB,eAAO,YAAY,iBAAiB;AAClC,gBAAM,cAAc,KAAK,IAAI,YAAY,YAAY,eAAe;AAGpE,gBAAM,cAAc,MAAM,KAAK,MAAM,IAAI,WAAW,WAAW;AAG/D,gBAAM,YAAY,KAAK,uBAAuB,aAAa,SAAS;AACpE,cAAI,WAAW;AACb,uBAAW,QAAQ,SAAS;AAAA,UAC9B;AAEA,sBAAY;AAAA,QACd;AAEA,mBAAW,MAAA;AAAA,MACb;AAAA,IAAA,CACD;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,yBAAwC;AACpD,UAAM,QAAQ,KAAK,KAAK,SAAA;AACxB,QAAI,CAAC,MAAO;AAEZ,UAAM,aAAa,MAAM,OACtB,OAAO,CAAC,UAAU,MAAM,SAAS,OAAO,EACxC,QAAQ,CAAC,UAAU,MAAM,KAAK;AAGjC,UAAM,eAAe,WAAW;AAAA,MAC9B,CAAC,SAAS,KAAK,eAAe,KAAK,IAAI,GAAI;AAAA;AAAA,IAAA;AAG7C,UAAM,QAAQ,WAAW,YAAY;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,QAAgB,WAAqC;AAC1E,WAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,YAAM,gBAAgB;AACtB,UAAI,UAAU;AAEd,YAAM,QAAQ,MAAM;AAClB,YAAI,KAAK,KAAK,aAAa,WAAW,MAAM,GAAG;AAC7C,kBAAQ,IAAI;AACZ;AAAA,QACF;AAEA,mBAAW;AACX,YAAI,WAAW,WAAW;AACxB,kBAAQ,KAAK;AACb;AAAA,QACF;AAEA,mBAAW,OAAO,aAAa;AAAA,MACjC;AAEA,YAAA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,QAAqB,aAAuC;AACzF,UAAM,aAAa,OAAO;AAC1B,UAAM,mBAAmB,OAAO;AAChC,UAAM,iBAAiB,OAAO;AAG9B,UAAM,SAAyB,CAAA;AAC/B,aAAS,UAAU,GAAG,UAAU,kBAAkB,WAAW;AAC3D,aAAO,KAAK,OAAO,eAAe,OAAO,CAAC;AAAA,IAC5C;AAEA,WAAO,IAAI,UAAU;AAAA,MACnB,QAAQ;AAAA;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA,WAAW;AAAA,MACX,MAAM,KAAK,qBAAqB,MAAM;AAAA,IAAA,CACvC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,qBAAqB,QAAqC;AAChE,UAAM,mBAAmB,OAAO;AAChC,UAAM,iBAAiB,OAAO,CAAC,GAAG,UAAU;AAC5C,UAAM,eAAe,mBAAmB;AAExC,UAAM,cAAc,IAAI,aAAa,YAAY;AAEjD,aAAS,QAAQ,GAAG,QAAQ,gBAAgB,SAAS;AACnD,eAAS,UAAU,GAAG,UAAU,kBAAkB,WAAW;AAC3D,oBAAY,QAAQ,mBAAmB,OAAO,IAAI,OAAO,OAAO,EAAG,KAAK;AAAA,MAC1E;AAAA,IACF;AAEA,WAAO,YAAY;AAAA,EACrB;AAAA,EAEA,MAAc,mBAAmB,MAA2B;AAC1D,UAAM,EAAE,IAAI,QAAQ,YAAY,SAAS,eAAe;AACxD,UAAM,mBAAmB,MAAM,KAAK,KAAK,QAAQ,IAAI,cAAc,QAAQ,EAAE,MAAM,KAAA,CAAM;AACzF,UAAM,eAAe,MAAM,KAAK,KAAK,QAAQ,IAAI,QAAQ;AAEzD,UAAM,uBAAuB,IAAI,eAAA;AACjC,UAAM,iBAAiB;AAAA,MACrB;AAAA,MACA,EAAE,WAAW,cAAc,MAAM,qBAAqB,OAAO,YAAY,SAAS,OAAA;AAAA,MAClF,EAAE,UAAU,CAAC,qBAAqB,KAAK,EAAA;AAAA,IAAE;AAE3C,UAAM,aAAa;AAAA,MACjB;AAAA,MACA;AAAA,QACE,WAAW;AAAA,QACX,MAAM,qBAAqB;AAAA,QAC3B,YAAY;AAAA,QACZ;AAAA,QACA,aAAa,WAAW;AAAA,QACxB,gBAAgB,cAAc;AAAA,MAAA;AAAA,MAEhC,EAAE,UAAU,CAAC,qBAAqB,KAAK,EAAA;AAAA,IAAE;AAG3C,UAAM,cAAc,KAAK,KAAK,mBAAA,EAAqB;AACnD,UAAM,iBAAiB,KAAK,aAAa;AAAA,MACvC,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAAA,EACH;AAAA,EAEQ,cAAc,QAAwB;AAC5C,WAAO,KAAK,MAAM,SAAS,KAAK,WAAW,IAAI,KAAK;AAAA,EACtD;AACF;"}
|
|
@@ -4,6 +4,7 @@ import { ClipInstructionSet } from './instructions';
|
|
|
4
4
|
interface ComposeStreams {
|
|
5
5
|
composeStream: WritableStream<ComposeRequest>;
|
|
6
6
|
cacheStream: ReadableStream<VideoFrame>;
|
|
7
|
+
encodeStream: ReadableStream<VideoFrame>;
|
|
7
8
|
}
|
|
8
9
|
/**
|
|
9
10
|
* VideoComposer - Main visual composition orchestrator
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"VideoComposer.d.ts","sourceRoot":"","sources":["../../../src/stages/compose/VideoComposer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACV,kBAAkB,EAClB,cAAc,EACd,aAAa,EACb,gBAAgB,EAGjB,MAAM,SAAS,CAAC;AAKjB,OAAO,EAAE,kBAAkB,EAAE,MAAM,gBAAgB,CAAC;AAEpD,UAAU,cAAc;IACtB,aAAa,EAAE,cAAc,CAAC,cAAc,CAAC,CAAC;IAC9C,WAAW,EAAE,cAAc,CAAC,UAAU,CAAC,CAAC;
|
|
1
|
+
{"version":3,"file":"VideoComposer.d.ts","sourceRoot":"","sources":["../../../src/stages/compose/VideoComposer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACV,kBAAkB,EAClB,cAAc,EACd,aAAa,EACb,gBAAgB,EAGjB,MAAM,SAAS,CAAC;AAKjB,OAAO,EAAE,kBAAkB,EAAE,MAAM,gBAAgB,CAAC;AAEpD,UAAU,cAAc;IACtB,aAAa,EAAE,cAAc,CAAC,cAAc,CAAC,CAAC;IAC9C,WAAW,EAAE,cAAc,CAAC,UAAU,CAAC,CAAC;IACxC,YAAY,EAAE,cAAc,CAAC,UAAU,CAAC,CAAC;CAC1C;AAED;;GAEG;AACH,qBAAa,aAAa;IACxB,QAAQ,CAAC,MAAM,EAAE,QAAQ,CAAC,kBAAkB,CAAC,CAAC;IAC9C,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC;IAEjC,OAAO,CAAC,GAAG,CAAoC;IAC/C,OAAO,CAAC,aAAa,CAAgB;IACrC,OAAO,CAAC,mBAAmB,CAAsB;IACjD,OAAO,CAAC,eAAe,CAAkB;IACzC,OAAO,CAAC,eAAe,CAAyB;gBAEpC,MAAM,EAAE,kBAAkB;IAyBtC,OAAO,CAAC,aAAa;IAuBrB,aAAa,CAAC,YAAY,CAAC,EAAE,kBAAkB,GAAG,cAAc;IAqC1D,YAAY,CAAC,OAAO,EAAE,cAAc,GAAG,OAAO,CAAC,aAAa,CAAC;IAkE7D,iBAAiB,CACrB,WAAW,EAAE,cAAc,EAC3B,SAAS,EAAE,cAAc,EACzB,UAAU,EAAE,gBAAgB,GAC3B,OAAO,CAAC,aAAa,CAAC;IAWzB,OAAO,CAAC,WAAW;YAaL,iBAAiB;IAW/B,YAAY,CAAC,MAAM,EAAE,OAAO,CAAC,kBAAkB,CAAC,GAAG,IAAI;IAmBvD,OAAO,IAAI,IAAI;CAGhB"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"MP4Demuxer.d.ts","sourceRoot":"","sources":["../../../src/stages/demux/MP4Demuxer.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,SAAS,CAAC;AAEtD;;;GAGG;AACH,qBAAa,UAAU;IACrB,OAAO,CAAC,UAAU,CAAM;IACxB,MAAM,yBAAgC;IACtC,OAAO,UAAS;IAChB,OAAO,CAAC,eAAe,CAAC,CAAsD;IAC9E,OAAO,CAAC,eAAe,CAAC,CAAsD;IAC9E,OAAO,CAAC,kBAAkB,CAAS;IACnC,OAAO,CAAC,eAAe,CAAC,CAAa;IACrC,OAAO,CAAC,UAAU,CAAK;IACvB,OAAO,CAAC,oBAAoB,CAAuB;IACnD,OAAO,CAAC,oBAAoB,CAAuB;gBAEvC,MAAM,GAAE,WAAW,GAAG;QAAE,OAAO,CAAC,EAAE,MAAM,IAAI,CAAA;KAAO;IAW/D,YAAY,CAAC,MAAM,EAAE,WAAW,GAAG,IAAI;IAIvC,OAAO,CAAC,aAAa;IA2BrB,OAAO,CAAC,aAAa;
|
|
1
|
+
{"version":3,"file":"MP4Demuxer.d.ts","sourceRoot":"","sources":["../../../src/stages/demux/MP4Demuxer.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,SAAS,CAAC;AAEtD;;;GAGG;AACH,qBAAa,UAAU;IACrB,OAAO,CAAC,UAAU,CAAM;IACxB,MAAM,yBAAgC;IACtC,OAAO,UAAS;IAChB,OAAO,CAAC,eAAe,CAAC,CAAsD;IAC9E,OAAO,CAAC,eAAe,CAAC,CAAsD;IAC9E,OAAO,CAAC,kBAAkB,CAAS;IACnC,OAAO,CAAC,eAAe,CAAC,CAAa;IACrC,OAAO,CAAC,UAAU,CAAK;IACvB,OAAO,CAAC,oBAAoB,CAAuB;IACnD,OAAO,CAAC,oBAAoB,CAAuB;gBAEvC,MAAM,GAAE,WAAW,GAAG;QAAE,OAAO,CAAC,EAAE,MAAM,IAAI,CAAA;KAAO;IAW/D,YAAY,CAAC,MAAM,EAAE,WAAW,GAAG,IAAI;IAIvC,OAAO,CAAC,aAAa;IA2BrB,OAAO,CAAC,aAAa;IA8BrB,OAAO,CAAC,cAAc;IAmDtB,OAAO,CAAC,mBAAmB;IA0B3B,OAAO,CAAC,mBAAmB;IAgB3B;;OAEG;IACH,iBAAiB,IAAI,eAAe,CAAC,UAAU,EAAE,iBAAiB,CAAC;IAgCnE;;OAEG;IACH,iBAAiB,IAAI,eAAe,CAAC,UAAU,EAAE,iBAAiB,CAAC,GAAG,IAAI;IA6B1E,YAAY,CAAC,KAAK,EAAE,UAAU,GAAG,IAAI;IAOrC;;OAEG;IACH,IAAI,cAAc,IAAI,SAAS,GAAG,SAAS,CAE1C;IAED;;OAEG;IACH,IAAI,cAAc,IAAI,SAAS,GAAG,SAAS,CAE1C;IAED,OAAO,IAAI,IAAI;CAQhB"}
|
|
@@ -8,9 +8,10 @@ import { AudioEncoderConfig } from './types';
|
|
|
8
8
|
export declare class AudioChunkEncoder extends BaseEncoder<AudioEncoder, AudioEncoderConfig, AudioData, EncodedAudioChunk, EncodedAudioChunkMetadata> {
|
|
9
9
|
private static readonly DEFAULT_HIGH_WATER_MARK;
|
|
10
10
|
private static readonly DEFAULT_ENCODE_QUEUE_THRESHOLD;
|
|
11
|
+
static readonly DEFAULT_CONFIG: AudioEncoderConfig;
|
|
11
12
|
protected readonly highWaterMark: number;
|
|
12
13
|
protected readonly encodeQueueThreshold: number;
|
|
13
|
-
constructor(config
|
|
14
|
+
constructor(config?: Partial<AudioEncoderConfig>);
|
|
14
15
|
protected isConfigSupported(config: AudioEncoderConfig): Promise<{
|
|
15
16
|
supported: boolean;
|
|
16
17
|
}>;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"AudioChunkEncoder.d.ts","sourceRoot":"","sources":["../../../src/stages/encode/AudioChunkEncoder.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,SAAS,CAAC;AAElD;;;GAGG;AACH,qBAAa,iBAAkB,SAAQ,WAAW,CAChD,YAAY,EACZ,kBAAkB,EAClB,SAAS,EACT,iBAAiB,EACjB,yBAAyB,CAC1B;IACC,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,uBAAuB,CAAK;IACpD,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,8BAA8B,CAAM;IAE5D,SAAS,CAAC,QAAQ,CAAC,aAAa,EAAE,MAAM,CAAC;IACzC,SAAS,CAAC,QAAQ,CAAC,oBAAoB,EAAE,MAAM,CAAC;gBAEpC,MAAM,EAAE,kBAAkB;
|
|
1
|
+
{"version":3,"file":"AudioChunkEncoder.d.ts","sourceRoot":"","sources":["../../../src/stages/encode/AudioChunkEncoder.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,SAAS,CAAC;AAElD;;;GAGG;AACH,qBAAa,iBAAkB,SAAQ,WAAW,CAChD,YAAY,EACZ,kBAAkB,EAClB,SAAS,EACT,iBAAiB,EACjB,yBAAyB,CAC1B;IACC,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,uBAAuB,CAAK;IACpD,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,8BAA8B,CAAM;IAE5D,MAAM,CAAC,QAAQ,CAAC,cAAc,EAAE,kBAAkB,CAKhD;IAEF,SAAS,CAAC,QAAQ,CAAC,aAAa,EAAE,MAAM,CAAC;IACzC,SAAS,CAAC,QAAQ,CAAC,oBAAoB,EAAE,MAAM,CAAC;gBAEpC,MAAM,CAAC,EAAE,OAAO,CAAC,kBAAkB,CAAC;cAYhC,iBAAiB,CAAC,MAAM,EAAE,kBAAkB,GAAG,OAAO,CAAC;QAAE,SAAS,EAAE,OAAO,CAAA;KAAE,CAAC;IAK9F,SAAS,CAAC,aAAa,CAAC,IAAI,EAAE,gBAAgB,GAAG,YAAY;IAI7D,SAAS,CAAC,cAAc,IAAI,MAAM;IAIlC,MAAM,CAAC,IAAI,EAAE,SAAS,GAAG,IAAI;CAQ9B"}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { BaseEncoder } from "./BaseEncoder.js";
|
|
2
|
+
class AudioChunkEncoder extends BaseEncoder {
|
|
3
|
+
static DEFAULT_HIGH_WATER_MARK = 4;
|
|
4
|
+
static DEFAULT_ENCODE_QUEUE_THRESHOLD = 16;
|
|
5
|
+
static DEFAULT_CONFIG = {
|
|
6
|
+
codec: "mp4a.40.2",
|
|
7
|
+
// AAC-LC
|
|
8
|
+
sampleRate: 48e3,
|
|
9
|
+
numberOfChannels: 2,
|
|
10
|
+
bitrate: 128e3
|
|
11
|
+
};
|
|
12
|
+
highWaterMark;
|
|
13
|
+
encodeQueueThreshold;
|
|
14
|
+
constructor(config) {
|
|
15
|
+
const fullConfig = { ...AudioChunkEncoder.DEFAULT_CONFIG, ...config };
|
|
16
|
+
super(fullConfig);
|
|
17
|
+
this.highWaterMark = fullConfig.backpressure?.highWaterMark ?? AudioChunkEncoder.DEFAULT_HIGH_WATER_MARK;
|
|
18
|
+
this.encodeQueueThreshold = fullConfig.backpressure?.encodeQueueThreshold ?? AudioChunkEncoder.DEFAULT_ENCODE_QUEUE_THRESHOLD;
|
|
19
|
+
}
|
|
20
|
+
async isConfigSupported(config) {
|
|
21
|
+
const result = await AudioEncoder.isConfigSupported(config);
|
|
22
|
+
return { supported: result.supported ?? false };
|
|
23
|
+
}
|
|
24
|
+
createEncoder(init) {
|
|
25
|
+
return new AudioEncoder(init);
|
|
26
|
+
}
|
|
27
|
+
getEncoderType() {
|
|
28
|
+
return "Audio";
|
|
29
|
+
}
|
|
30
|
+
encode(data) {
|
|
31
|
+
if (this.encoder?.state !== "configured") {
|
|
32
|
+
throw new Error("Audio encoder not configured");
|
|
33
|
+
}
|
|
34
|
+
this.encoder.encode(data);
|
|
35
|
+
data.close();
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
export {
|
|
39
|
+
AudioChunkEncoder
|
|
40
|
+
};
|
|
41
|
+
//# sourceMappingURL=AudioChunkEncoder.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"AudioChunkEncoder.js","sources":["../../../src/stages/encode/AudioChunkEncoder.ts"],"sourcesContent":["import { BaseEncoder } from './BaseEncoder';\nimport type { AudioEncoderConfig } from './types';\n\n/**\n * AudioChunkEncoder - Encodes AudioData to EncodedAudioChunk\n * Stream-based encoder with backpressure handling\n */\nexport class AudioChunkEncoder extends BaseEncoder<\n AudioEncoder,\n AudioEncoderConfig,\n AudioData,\n EncodedAudioChunk,\n EncodedAudioChunkMetadata\n> {\n private static readonly DEFAULT_HIGH_WATER_MARK = 4;\n private static readonly DEFAULT_ENCODE_QUEUE_THRESHOLD = 16;\n\n static readonly DEFAULT_CONFIG: AudioEncoderConfig = {\n codec: 'mp4a.40.2', // AAC-LC\n sampleRate: 48000,\n numberOfChannels: 2,\n bitrate: 128000,\n };\n\n protected readonly highWaterMark: number;\n protected readonly encodeQueueThreshold: number;\n\n constructor(config?: Partial<AudioEncoderConfig>) {\n const fullConfig = { ...AudioChunkEncoder.DEFAULT_CONFIG, ...config };\n super(fullConfig);\n\n // Initialize backpressure settings from config or use defaults\n this.highWaterMark =\n fullConfig.backpressure?.highWaterMark ?? AudioChunkEncoder.DEFAULT_HIGH_WATER_MARK;\n this.encodeQueueThreshold =\n fullConfig.backpressure?.encodeQueueThreshold ??\n AudioChunkEncoder.DEFAULT_ENCODE_QUEUE_THRESHOLD;\n }\n\n protected async isConfigSupported(config: AudioEncoderConfig): Promise<{ supported: boolean }> {\n const result = await AudioEncoder.isConfigSupported(config);\n return { supported: result.supported ?? false };\n }\n\n protected createEncoder(init: AudioEncoderInit): AudioEncoder {\n return new AudioEncoder(init);\n }\n\n protected getEncoderType(): string {\n return 'Audio';\n }\n\n encode(data: AudioData): void {\n if (this.encoder?.state !== 'configured') {\n throw new Error('Audio encoder not configured');\n }\n\n this.encoder.encode(data);\n data.close();\n }\n}\n"],"names":[],"mappings":";AAOO,MAAM,0BAA0B,YAMrC;AAAA,EACA,OAAwB,0BAA0B;AAAA,EAClD,OAAwB,iCAAiC;AAAA,EAEzD,OAAgB,iBAAqC;AAAA,IACnD,OAAO;AAAA;AAAA,IACP,YAAY;AAAA,IACZ,kBAAkB;AAAA,IAClB,SAAS;AAAA,EAAA;AAAA,EAGQ;AAAA,EACA;AAAA,EAEnB,YAAY,QAAsC;AAChD,UAAM,aAAa,EAAE,GAAG,kBAAkB,gBAAgB,GAAG,OAAA;AAC7D,UAAM,UAAU;AAGhB,SAAK,gBACH,WAAW,cAAc,iBAAiB,kBAAkB;AAC9D,SAAK,uBACH,WAAW,cAAc,wBACzB,kBAAkB;AAAA,EACtB;AAAA,EAEA,MAAgB,kBAAkB,QAA6D;AAC7F,UAAM,SAAS,MAAM,aAAa,kBAAkB,MAAM;AAC1D,WAAO,EAAE,WAAW,OAAO,aAAa,MAAA;AAAA,EAC1C;AAAA,EAEU,cAAc,MAAsC;AAC5D,WAAO,IAAI,aAAa,IAAI;AAAA,EAC9B;AAAA,EAEU,iBAAyB;AACjC,WAAO;AAAA,EACT;AAAA,EAEA,OAAO,MAAuB;AAC5B,QAAI,KAAK,SAAS,UAAU,cAAc;AACxC,YAAM,IAAI,MAAM,8BAA8B;AAAA,IAChD;AAEA,SAAK,QAAQ,OAAO,IAAI;AACxB,SAAK,MAAA;AAAA,EACP;AACF;"}
|
|
@@ -2,10 +2,14 @@
|
|
|
2
2
|
* Base encoder class for both video and audio encoding
|
|
3
3
|
* Handles common WebCodecs encoder operations
|
|
4
4
|
*/
|
|
5
|
+
export interface EncoderChunk {
|
|
6
|
+
chunk: EncodedVideoChunk;
|
|
7
|
+
metadata: EncodedVideoChunkMetadata;
|
|
8
|
+
}
|
|
5
9
|
export declare abstract class BaseEncoder<TEncoder extends VideoEncoder | AudioEncoder, TConfig extends VideoEncoderConfig | AudioEncoderConfig, TInput extends VideoFrame | AudioData, TChunk extends EncodedVideoChunk | EncodedAudioChunk, TMetadata extends EncodedVideoChunkMetadata | EncodedAudioChunkMetadata> {
|
|
6
10
|
protected encoder?: TEncoder;
|
|
7
11
|
protected config: TConfig;
|
|
8
|
-
protected controller: TransformStreamDefaultController<
|
|
12
|
+
protected controller: TransformStreamDefaultController<EncoderChunk> | null;
|
|
9
13
|
constructor(config: TConfig);
|
|
10
14
|
getConfig(): TConfig;
|
|
11
15
|
protected get currentConfig(): TConfig;
|
|
@@ -19,7 +23,7 @@ export declare abstract class BaseEncoder<TEncoder extends VideoEncoder | AudioE
|
|
|
19
23
|
close(): Promise<void>;
|
|
20
24
|
get isReady(): boolean;
|
|
21
25
|
get queueSize(): number;
|
|
22
|
-
protected handleOutput(chunk: TChunk,
|
|
26
|
+
protected handleOutput(chunk: TChunk, metadata: TMetadata): void;
|
|
23
27
|
protected handleError(error: DOMException): void;
|
|
24
28
|
protected abstract isConfigSupported(config: TConfig): Promise<{
|
|
25
29
|
supported: boolean;
|
|
@@ -33,7 +37,7 @@ export declare abstract class BaseEncoder<TEncoder extends VideoEncoder | AudioE
|
|
|
33
37
|
* Create transform stream for encoding
|
|
34
38
|
* Implements common stream logic with backpressure handling
|
|
35
39
|
*/
|
|
36
|
-
createStream(): TransformStream<TInput,
|
|
40
|
+
createStream(): TransformStream<TInput, EncoderChunk>;
|
|
37
41
|
abstract encode(input: TInput): void;
|
|
38
42
|
}
|
|
39
43
|
interface EncoderInit {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"BaseEncoder.d.ts","sourceRoot":"","sources":["../../../src/stages/encode/BaseEncoder.ts"],"names":[],"mappings":"AAEA;;;GAGG;
|
|
1
|
+
{"version":3,"file":"BaseEncoder.d.ts","sourceRoot":"","sources":["../../../src/stages/encode/BaseEncoder.ts"],"names":[],"mappings":"AAEA;;;GAGG;AAEH,MAAM,WAAW,YAAY;IAC3B,KAAK,EAAE,iBAAiB,CAAC;IACzB,QAAQ,EAAE,yBAAyB,CAAC;CACrC;AAED,8BAAsB,WAAW,CAC/B,QAAQ,SAAS,YAAY,GAAG,YAAY,EAC5C,OAAO,SAAS,kBAAkB,GAAG,kBAAkB,EACvD,MAAM,SAAS,UAAU,GAAG,SAAS,EACrC,MAAM,SAAS,iBAAiB,GAAG,iBAAiB,EACpD,SAAS,SAAS,yBAAyB,GAAG,yBAAyB;IAEvE,SAAS,CAAC,OAAO,CAAC,EAAE,QAAQ,CAAC;IAC7B,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC;IAC1B,SAAS,CAAC,UAAU,EAAE,gCAAgC,CAAC,YAAY,CAAC,GAAG,IAAI,CAAQ;gBAEvE,MAAM,EAAE,OAAO;IAI3B,SAAS,IAAI,OAAO;IAIpB,SAAS,KAAK,aAAa,IAAI,OAAO,CAErC;IAED,SAAS,CAAC,iBAAiB,CAAC,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,GAAG,OAAO;IAW/D,SAAS,CAAC,gBAAgB,CAAC,IAAI,EAAE,OAAO,GAAG,OAAO;IAiBlD,SAAS,CAAC,YAAY,CAAC,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,GAAG,OAAO;IAIjD,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAkB3B,WAAW,CAAC,MAAM,EAAE,OAAO,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IA8BpD,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAQtB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAStB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAa5B,IAAI,OAAO,IAAI,OAAO,CAErB;IAED,IAAI,SAAS,IAAI,MAAM,CAEtB;IAED,SAAS,CAAC,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,GAAG,IAAI;IAchE,SAAS,CAAC,WAAW,CAAC,KAAK,EAAE,YAAY,GAAG,IAAI;IAMhD,SAAS,CAAC,QAAQ,CAAC,iBAAiB,CAAC,MAAM,EAAE,OAAO,GAAG,OAAO,CAAC;QAAE,SAAS,EAAE,OAAO,CAAA;KAAE,CAAC;IACtF,SAAS,CAAC,QAAQ,CAAC,aAAa,CAAC,IAAI,EAAE,WAAW,GAAG,QAAQ;IAC7D,SAAS,CAAC,QAAQ,CAAC,cAAc,IAAI,MAAM;IAG3C,SAAS,CAAC,OAAO,IAAI,IAAI;IAKzB,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,aAAa,EAAE,MAAM,CAAC;IAClD,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,oBAAoB,EAAE,MAAM,CAAC;IAEzD;;;OAGG;IACH,YAAY,IAAI,eAAe,CAAC,MAAM,EAAE,YAAY,CAAC;IAkDrD,QAAQ,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;CACrC;AAED,UAAU,WAAW;IACnB,MAAM,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,QAAQ,EAAE,GAAG,KAAK,IAAI,CAAC;IAC5C,KAAK,EAAE,CAAC,KAAK,EAAE,YAAY,KAAK,IAAI,CAAC;CACtC"}
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
class BaseEncoder {
|
|
2
|
+
encoder;
|
|
3
|
+
config;
|
|
4
|
+
controller = null;
|
|
5
|
+
constructor(config) {
|
|
6
|
+
this.config = config;
|
|
7
|
+
}
|
|
8
|
+
getConfig() {
|
|
9
|
+
return { ...this.config };
|
|
10
|
+
}
|
|
11
|
+
get currentConfig() {
|
|
12
|
+
return this.config;
|
|
13
|
+
}
|
|
14
|
+
shouldReconfigure(partial) {
|
|
15
|
+
const next = { ...this.config, ...partial };
|
|
16
|
+
const keys = Object.keys(partial ?? {});
|
|
17
|
+
for (const key of keys) {
|
|
18
|
+
if (partial[key] !== void 0 && next[key] !== this.config[key]) {
|
|
19
|
+
return true;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
return false;
|
|
23
|
+
}
|
|
24
|
+
hasConfigChanged(next) {
|
|
25
|
+
const currentEntries = Object.entries(this.config);
|
|
26
|
+
for (const [key, value] of currentEntries) {
|
|
27
|
+
if (next[key] !== value) {
|
|
28
|
+
return true;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
for (const key of Object.keys(next)) {
|
|
32
|
+
if (this.config[key] !== next[key]) {
|
|
33
|
+
return true;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
return false;
|
|
37
|
+
}
|
|
38
|
+
configsEqual(a, b) {
|
|
39
|
+
return JSON.stringify(a) === JSON.stringify(b);
|
|
40
|
+
}
|
|
41
|
+
async initialize() {
|
|
42
|
+
if (this.encoder?.state === "configured") {
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
const isSupported = await this.isConfigSupported(this.config);
|
|
46
|
+
if (!isSupported.supported) {
|
|
47
|
+
throw new Error(`Codec not supported: ${this.config.codec}`);
|
|
48
|
+
}
|
|
49
|
+
this.encoder = this.createEncoder({
|
|
50
|
+
output: this.handleOutput.bind(this),
|
|
51
|
+
error: this.handleError.bind(this)
|
|
52
|
+
});
|
|
53
|
+
this.encoder.configure(this.config);
|
|
54
|
+
}
|
|
55
|
+
async reconfigure(config) {
|
|
56
|
+
if (!config || Object.keys(config).length === 0) {
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
const nextConfig = { ...this.config, ...config };
|
|
60
|
+
if (this.configsEqual(this.config, nextConfig)) {
|
|
61
|
+
return;
|
|
62
|
+
}
|
|
63
|
+
if (!this.encoder) {
|
|
64
|
+
this.config = nextConfig;
|
|
65
|
+
await this.initialize();
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
if (this.encoder.state === "configured") {
|
|
69
|
+
await this.encoder.flush();
|
|
70
|
+
}
|
|
71
|
+
const isSupported = await this.isConfigSupported(nextConfig);
|
|
72
|
+
if (!isSupported.supported) {
|
|
73
|
+
throw new Error(`New configuration not supported: ${nextConfig.codec}`);
|
|
74
|
+
}
|
|
75
|
+
this.config = nextConfig;
|
|
76
|
+
this.encoder.configure(this.config);
|
|
77
|
+
}
|
|
78
|
+
async flush() {
|
|
79
|
+
if (!this.encoder) {
|
|
80
|
+
return;
|
|
81
|
+
}
|
|
82
|
+
await this.encoder.flush();
|
|
83
|
+
}
|
|
84
|
+
async reset() {
|
|
85
|
+
if (!this.encoder) {
|
|
86
|
+
return;
|
|
87
|
+
}
|
|
88
|
+
this.encoder.reset();
|
|
89
|
+
this.onReset();
|
|
90
|
+
}
|
|
91
|
+
async close() {
|
|
92
|
+
if (!this.encoder) {
|
|
93
|
+
return;
|
|
94
|
+
}
|
|
95
|
+
if (this.encoder.state === "configured") {
|
|
96
|
+
await this.encoder.flush();
|
|
97
|
+
}
|
|
98
|
+
this.encoder.close();
|
|
99
|
+
this.encoder = void 0;
|
|
100
|
+
}
|
|
101
|
+
get isReady() {
|
|
102
|
+
return this.encoder?.state === "configured";
|
|
103
|
+
}
|
|
104
|
+
get queueSize() {
|
|
105
|
+
return this.encoder?.encodeQueueSize ?? 0;
|
|
106
|
+
}
|
|
107
|
+
handleOutput(chunk, metadata) {
|
|
108
|
+
if (this.controller) {
|
|
109
|
+
try {
|
|
110
|
+
this.controller.enqueue({ chunk, metadata });
|
|
111
|
+
} catch (error) {
|
|
112
|
+
if (!(error instanceof TypeError && error.message.includes("closed"))) {
|
|
113
|
+
throw error;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
handleError(error) {
|
|
119
|
+
console.error(`${this.getEncoderType()} encoder error:`, error);
|
|
120
|
+
this.controller?.error(error);
|
|
121
|
+
}
|
|
122
|
+
// Hook for subclasses to handle reset
|
|
123
|
+
onReset() {
|
|
124
|
+
}
|
|
125
|
+
/**
|
|
126
|
+
* Create transform stream for encoding
|
|
127
|
+
* Implements common stream logic with backpressure handling
|
|
128
|
+
*/
|
|
129
|
+
createStream() {
|
|
130
|
+
return new TransformStream(
|
|
131
|
+
{
|
|
132
|
+
start: async (controller) => {
|
|
133
|
+
this.controller = controller;
|
|
134
|
+
if (!this.isReady) {
|
|
135
|
+
await this.initialize();
|
|
136
|
+
}
|
|
137
|
+
},
|
|
138
|
+
transform: async (input) => {
|
|
139
|
+
if (!this.encoder || this.encoder.state !== "configured") {
|
|
140
|
+
throw new Error("Encoder not configured");
|
|
141
|
+
}
|
|
142
|
+
if (this.encoder.encodeQueueSize >= this.encodeQueueThreshold) {
|
|
143
|
+
await new Promise((resolve) => {
|
|
144
|
+
const check = () => {
|
|
145
|
+
if (!this.encoder || this.encoder.encodeQueueSize < this.encodeQueueThreshold - 1) {
|
|
146
|
+
resolve();
|
|
147
|
+
} else {
|
|
148
|
+
setTimeout(check, 10);
|
|
149
|
+
}
|
|
150
|
+
};
|
|
151
|
+
check();
|
|
152
|
+
});
|
|
153
|
+
}
|
|
154
|
+
const frame = input.frame || input;
|
|
155
|
+
this.encode(frame);
|
|
156
|
+
},
|
|
157
|
+
flush: async () => {
|
|
158
|
+
await this.flush();
|
|
159
|
+
}
|
|
160
|
+
},
|
|
161
|
+
// Queuing strategy with backpressure configuration
|
|
162
|
+
{
|
|
163
|
+
highWaterMark: this.highWaterMark,
|
|
164
|
+
size: () => 1
|
|
165
|
+
// Count-based
|
|
166
|
+
}
|
|
167
|
+
);
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
export {
|
|
171
|
+
BaseEncoder
|
|
172
|
+
};
|
|
173
|
+
//# sourceMappingURL=BaseEncoder.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"BaseEncoder.js","sources":["../../../src/stages/encode/BaseEncoder.ts"],"sourcesContent":["// Base encoder implementation\n\n/**\n * Base encoder class for both video and audio encoding\n * Handles common WebCodecs encoder operations\n */\n\nexport interface EncoderChunk {\n chunk: EncodedVideoChunk;\n metadata: EncodedVideoChunkMetadata;\n}\n\nexport abstract class BaseEncoder<\n TEncoder extends VideoEncoder | AudioEncoder,\n TConfig extends VideoEncoderConfig | AudioEncoderConfig,\n TInput extends VideoFrame | AudioData,\n TChunk extends EncodedVideoChunk | EncodedAudioChunk,\n TMetadata extends EncodedVideoChunkMetadata | EncodedAudioChunkMetadata,\n> {\n protected encoder?: TEncoder;\n protected config: TConfig;\n protected controller: TransformStreamDefaultController<EncoderChunk> | null = null;\n\n constructor(config: TConfig) {\n this.config = config;\n }\n\n getConfig(): TConfig {\n return { ...this.config };\n }\n\n protected get currentConfig(): TConfig {\n return this.config;\n }\n\n protected shouldReconfigure(partial: Partial<TConfig>): boolean {\n const next = { ...this.config, ...partial } as TConfig;\n const keys = Object.keys(partial ?? {}) as Array<keyof TConfig>;\n for (const key of keys) {\n if (partial[key] !== undefined && next[key] !== this.config[key]) {\n return true;\n }\n }\n return false;\n }\n\n protected hasConfigChanged(next: TConfig): boolean {\n const currentEntries = Object.entries(this.config) as Array<[keyof TConfig, any]>;\n for (const [key, value] of currentEntries) {\n if (next[key] !== value) {\n return true;\n }\n }\n\n for (const key of Object.keys(next) as Array<keyof TConfig>) {\n if (this.config[key] !== next[key]) {\n return true;\n }\n }\n\n return false;\n }\n\n protected configsEqual(a: TConfig, b: TConfig): boolean {\n return JSON.stringify(a) === JSON.stringify(b);\n }\n\n async initialize(): Promise<void> {\n if (this.encoder?.state === 'configured') {\n return;\n }\n\n const isSupported = await this.isConfigSupported(this.config);\n if (!isSupported.supported) {\n throw new Error(`Codec not supported: ${this.config.codec}`);\n }\n\n this.encoder = this.createEncoder({\n output: this.handleOutput.bind(this),\n error: this.handleError.bind(this),\n });\n\n (this.encoder as any).configure(this.config);\n }\n\n async reconfigure(config: Partial<TConfig>): Promise<void> {\n if (!config || Object.keys(config).length === 0) {\n return;\n }\n\n const nextConfig = { ...this.config, ...config } as TConfig;\n\n if (this.configsEqual(this.config, nextConfig)) {\n return;\n }\n\n if (!this.encoder) {\n this.config = nextConfig;\n await this.initialize();\n return;\n }\n\n if (this.encoder.state === 'configured') {\n await this.encoder.flush();\n }\n\n const isSupported = await this.isConfigSupported(nextConfig);\n if (!isSupported.supported) {\n throw new Error(`New configuration not supported: ${nextConfig.codec}`);\n }\n\n this.config = nextConfig;\n (this.encoder as any).configure(this.config);\n }\n\n async flush(): Promise<void> {\n if (!this.encoder) {\n return;\n }\n\n await this.encoder.flush();\n }\n\n async reset(): Promise<void> {\n if (!this.encoder) {\n return;\n }\n\n this.encoder.reset();\n this.onReset();\n }\n\n async close(): Promise<void> {\n if (!this.encoder) {\n return;\n }\n\n if (this.encoder.state === 'configured') {\n await this.encoder.flush();\n }\n\n this.encoder.close();\n this.encoder = undefined;\n }\n\n get isReady(): boolean {\n return this.encoder?.state === 'configured';\n }\n\n get queueSize(): number {\n return this.encoder?.encodeQueueSize ?? 0;\n }\n\n protected handleOutput(chunk: TChunk, metadata: TMetadata): void {\n // Only enqueue if controller exists and stream is not closed\n if (this.controller) {\n try {\n this.controller.enqueue({ chunk, metadata });\n } catch (error) {\n // Stream may be closed during flush, ignore enqueue errors\n if (!(error instanceof TypeError && error.message.includes('closed'))) {\n throw error;\n }\n }\n }\n }\n\n protected handleError(error: DOMException): void {\n console.error(`${this.getEncoderType()} encoder error:`, error);\n this.controller?.error(error);\n }\n\n // Abstract methods to be implemented by subclasses\n protected abstract isConfigSupported(config: TConfig): Promise<{ supported: boolean }>;\n protected abstract createEncoder(init: EncoderInit): TEncoder;\n protected abstract getEncoderType(): string;\n\n // Hook for subclasses to handle reset\n protected onReset(): void {\n // Override in subclasses if needed\n }\n\n // Abstract properties for backpressure configuration\n protected abstract readonly highWaterMark: number;\n protected abstract readonly encodeQueueThreshold: number;\n\n /**\n * Create transform stream for encoding\n * Implements common stream logic with backpressure handling\n */\n createStream(): TransformStream<TInput, EncoderChunk> {\n return new TransformStream<TInput, EncoderChunk>(\n {\n start: async (controller) => {\n this.controller = controller;\n\n // Initialize encoder if not already initialized\n if (!this.isReady) {\n await this.initialize();\n }\n },\n\n transform: async (input) => {\n if (!this.encoder || this.encoder.state !== 'configured') {\n throw new Error('Encoder not configured');\n }\n\n // Check encoder queue pressure\n if (this.encoder.encodeQueueSize >= this.encodeQueueThreshold) {\n // Wait for queue to drain\n await new Promise<void>((resolve) => {\n const check = () => {\n if (!this.encoder || this.encoder.encodeQueueSize < this.encodeQueueThreshold - 1) {\n resolve();\n } else {\n setTimeout(check, 10);\n }\n };\n check();\n });\n }\n\n // Encode the input\n const frame = (input as any).frame || input;\n this.encode(frame);\n },\n\n flush: async () => {\n await this.flush();\n },\n },\n // Queuing strategy with backpressure configuration\n {\n highWaterMark: this.highWaterMark,\n size: () => 1, // Count-based\n }\n );\n }\n\n // Abstract method for encoding\n abstract encode(input: TInput): void;\n}\n\ninterface EncoderInit {\n output: (chunk: any, metadata: any) => void;\n error: (error: DOMException) => void;\n}\n"],"names":[],"mappings":"AAYO,MAAe,YAMpB;AAAA,EACU;AAAA,EACA;AAAA,EACA,aAAoE;AAAA,EAE9E,YAAY,QAAiB;AAC3B,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,YAAqB;AACnB,WAAO,EAAE,GAAG,KAAK,OAAA;AAAA,EACnB;AAAA,EAEA,IAAc,gBAAyB;AACrC,WAAO,KAAK;AAAA,EACd;AAAA,EAEU,kBAAkB,SAAoC;AAC9D,UAAM,OAAO,EAAE,GAAG,KAAK,QAAQ,GAAG,QAAA;AAClC,UAAM,OAAO,OAAO,KAAK,WAAW,CAAA,CAAE;AACtC,eAAW,OAAO,MAAM;AACtB,UAAI,QAAQ,GAAG,MAAM,UAAa,KAAK,GAAG,MAAM,KAAK,OAAO,GAAG,GAAG;AAChE,eAAO;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEU,iBAAiB,MAAwB;AACjD,UAAM,iBAAiB,OAAO,QAAQ,KAAK,MAAM;AACjD,eAAW,CAAC,KAAK,KAAK,KAAK,gBAAgB;AACzC,UAAI,KAAK,GAAG,MAAM,OAAO;AACvB,eAAO;AAAA,MACT;AAAA,IACF;AAEA,eAAW,OAAO,OAAO,KAAK,IAAI,GAA2B;AAC3D,UAAI,KAAK,OAAO,GAAG,MAAM,KAAK,GAAG,GAAG;AAClC,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEU,aAAa,GAAY,GAAqB;AACtD,WAAO,KAAK,UAAU,CAAC,MAAM,KAAK,UAAU,CAAC;AAAA,EAC/C;AAAA,EAEA,MAAM,aAA4B;AAChC,QAAI,KAAK,SAAS,UAAU,cAAc;AACxC;AAAA,IACF;AAEA,UAAM,cAAc,MAAM,KAAK,kBAAkB,KAAK,MAAM;AAC5D,QAAI,CAAC,YAAY,WAAW;AAC1B,YAAM,IAAI,MAAM,wBAAwB,KAAK,OAAO,KAAK,EAAE;AAAA,IAC7D;AAEA,SAAK,UAAU,KAAK,cAAc;AAAA,MAChC,QAAQ,KAAK,aAAa,KAAK,IAAI;AAAA,MACnC,OAAO,KAAK,YAAY,KAAK,IAAI;AAAA,IAAA,CAClC;AAEA,SAAK,QAAgB,UAAU,KAAK,MAAM;AAAA,EAC7C;AAAA,EAEA,MAAM,YAAY,QAAyC;AACzD,QAAI,CAAC,UAAU,OAAO,KAAK,MAAM,EAAE,WAAW,GAAG;AAC/C;AAAA,IACF;AAEA,UAAM,aAAa,EAAE,GAAG,KAAK,QAAQ,GAAG,OAAA;AAExC,QAAI,KAAK,aAAa,KAAK,QAAQ,UAAU,GAAG;AAC9C;AAAA,IACF;AAEA,QAAI,CAAC,KAAK,SAAS;AACjB,WAAK,SAAS;AACd,YAAM,KAAK,WAAA;AACX;AAAA,IACF;AAEA,QAAI,KAAK,QAAQ,UAAU,cAAc;AACvC,YAAM,KAAK,QAAQ,MAAA;AAAA,IACrB;AAEA,UAAM,cAAc,MAAM,KAAK,kBAAkB,UAAU;AAC3D,QAAI,CAAC,YAAY,WAAW;AAC1B,YAAM,IAAI,MAAM,oCAAoC,WAAW,KAAK,EAAE;AAAA,IACxE;AAEA,SAAK,SAAS;AACb,SAAK,QAAgB,UAAU,KAAK,MAAM;AAAA,EAC7C;AAAA,EAEA,MAAM,QAAuB;AAC3B,QAAI,CAAC,KAAK,SAAS;AACjB;AAAA,IACF;AAEA,UAAM,KAAK,QAAQ,MAAA;AAAA,EACrB;AAAA,EAEA,MAAM,QAAuB;AAC3B,QAAI,CAAC,KAAK,SAAS;AACjB;AAAA,IACF;AAEA,SAAK,QAAQ,MAAA;AACb,SAAK,QAAA;AAAA,EACP;AAAA,EAEA,MAAM,QAAuB;AAC3B,QAAI,CAAC,KAAK,SAAS;AACjB;AAAA,IACF;AAEA,QAAI,KAAK,QAAQ,UAAU,cAAc;AACvC,YAAM,KAAK,QAAQ,MAAA;AAAA,IACrB;AAEA,SAAK,QAAQ,MAAA;AACb,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,IAAI,UAAmB;AACrB,WAAO,KAAK,SAAS,UAAU;AAAA,EACjC;AAAA,EAEA,IAAI,YAAoB;AACtB,WAAO,KAAK,SAAS,mBAAmB;AAAA,EAC1C;AAAA,EAEU,aAAa,OAAe,UAA2B;AAE/D,QAAI,KAAK,YAAY;AACnB,UAAI;AACF,aAAK,WAAW,QAAQ,EAAE,OAAO,UAAU;AAAA,MAC7C,SAAS,OAAO;AAEd,YAAI,EAAE,iBAAiB,aAAa,MAAM,QAAQ,SAAS,QAAQ,IAAI;AACrE,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEU,YAAY,OAA2B;AAC/C,YAAQ,MAAM,GAAG,KAAK,gBAAgB,mBAAmB,KAAK;AAC9D,SAAK,YAAY,MAAM,KAAK;AAAA,EAC9B;AAAA;AAAA,EAQU,UAAgB;AAAA,EAE1B;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,eAAsD;AACpD,WAAO,IAAI;AAAA,MACT;AAAA,QACE,OAAO,OAAO,eAAe;AAC3B,eAAK,aAAa;AAGlB,cAAI,CAAC,KAAK,SAAS;AACjB,kBAAM,KAAK,WAAA;AAAA,UACb;AAAA,QACF;AAAA,QAEA,WAAW,OAAO,UAAU;AAC1B,cAAI,CAAC,KAAK,WAAW,KAAK,QAAQ,UAAU,cAAc;AACxD,kBAAM,IAAI,MAAM,wBAAwB;AAAA,UAC1C;AAGA,cAAI,KAAK,QAAQ,mBAAmB,KAAK,sBAAsB;AAE7D,kBAAM,IAAI,QAAc,CAAC,YAAY;AACnC,oBAAM,QAAQ,MAAM;AAClB,oBAAI,CAAC,KAAK,WAAW,KAAK,QAAQ,kBAAkB,KAAK,uBAAuB,GAAG;AACjF,0BAAA;AAAA,gBACF,OAAO;AACL,6BAAW,OAAO,EAAE;AAAA,gBACtB;AAAA,cACF;AACA,oBAAA;AAAA,YACF,CAAC;AAAA,UACH;AAGA,gBAAM,QAAS,MAAc,SAAS;AACtC,eAAK,OAAO,KAAK;AAAA,QACnB;AAAA,QAEA,OAAO,YAAY;AACjB,gBAAM,KAAK,MAAA;AAAA,QACb;AAAA,MAAA;AAAA;AAAA,MAGF;AAAA,QACE,eAAe,KAAK;AAAA,QACpB,MAAM,MAAM;AAAA;AAAA,MAAA;AAAA,IACd;AAAA,EAEJ;AAIF;"}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import { VideoChunkEncoder } from './VideoChunkEncoder';
|
|
2
|
+
import { AudioChunkEncoder } from './AudioChunkEncoder';
|
|
3
|
+
import { VideoEncoderConfig, AudioEncoderConfig } from './types';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* ClipEncoderManager - Per-Clip Encoder Instance Manager
|
|
7
|
+
*
|
|
8
|
+
* Responsibilities:
|
|
9
|
+
* - Maintain separate encoder instances per clip (avoid frameCount pollution)
|
|
10
|
+
* - Limit concurrent encoder instances (maxEncoders = 6)
|
|
11
|
+
* - FIFO eviction strategy
|
|
12
|
+
*
|
|
13
|
+
* Note: This is NOT a traditional object pool, as it does not support
|
|
14
|
+
* cross-clip instance reuse. Each sessionId gets its own dedicated encoder
|
|
15
|
+
* that is destroyed (not recycled) when released.
|
|
16
|
+
*/
|
|
17
|
+
export declare class ClipEncoderManager {
|
|
18
|
+
private videoEncoders;
|
|
19
|
+
private audioEncoders;
|
|
20
|
+
private videoCreationOrder;
|
|
21
|
+
private audioCreationOrder;
|
|
22
|
+
private readonly maxEncoders;
|
|
23
|
+
constructor(maxEncoders?: number);
|
|
24
|
+
/**
|
|
25
|
+
* Acquire a video encoder for the given sessionId
|
|
26
|
+
* Creates new encoder if not exists; returns existing one if already created
|
|
27
|
+
*/
|
|
28
|
+
acquireVideo(sessionId: string, config: VideoEncoderConfig): Promise<VideoChunkEncoder>;
|
|
29
|
+
/**
|
|
30
|
+
* Acquire an audio encoder for the given sessionId
|
|
31
|
+
*/
|
|
32
|
+
acquireAudio(sessionId: string, config: AudioEncoderConfig): Promise<AudioChunkEncoder>;
|
|
33
|
+
/**
|
|
34
|
+
* Release video encoder for the given sessionId
|
|
35
|
+
*/
|
|
36
|
+
releaseVideo(sessionId: string): Promise<void>;
|
|
37
|
+
/**
|
|
38
|
+
* Release audio encoder for the given sessionId
|
|
39
|
+
*/
|
|
40
|
+
releaseAudio(sessionId: string): Promise<void>;
|
|
41
|
+
/**
|
|
42
|
+
* Release both video and audio encoders for the given sessionId
|
|
43
|
+
*/
|
|
44
|
+
releaseClip(sessionId: string): Promise<void>;
|
|
45
|
+
/**
|
|
46
|
+
* Close all encoders and clear state
|
|
47
|
+
*/
|
|
48
|
+
closeAll(): Promise<void>;
|
|
49
|
+
/**
|
|
50
|
+
* Check if encoders exist for the given sessionId
|
|
51
|
+
*/
|
|
52
|
+
has(sessionId: string): boolean;
|
|
53
|
+
/**
|
|
54
|
+
* Get statistics about current encoder state
|
|
55
|
+
*/
|
|
56
|
+
getStats(): {
|
|
57
|
+
videoEncoders: number;
|
|
58
|
+
audioEncoders: number;
|
|
59
|
+
maxEncoders: number;
|
|
60
|
+
videoCreationOrder: string[];
|
|
61
|
+
audioCreationOrder: string[];
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
//# sourceMappingURL=ClipEncoderManager.d.ts.map
|