@meframe/core 0.0.1 → 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +17 -4
- package/dist/Meframe.d.ts.map +1 -1
- package/dist/Meframe.js +0 -3
- package/dist/Meframe.js.map +1 -1
- package/dist/assets/audio-compose.worker-nGVvHD5Q.js +1537 -0
- package/dist/assets/audio-compose.worker-nGVvHD5Q.js.map +1 -0
- package/dist/assets/audio-demux.worker-xwWBtbAe.js +8299 -0
- package/dist/assets/audio-demux.worker-xwWBtbAe.js.map +1 -0
- package/dist/assets/decode.worker-DpWHsc7R.js +1291 -0
- package/dist/assets/decode.worker-DpWHsc7R.js.map +1 -0
- package/dist/assets/encode.worker-nfOb3kw6.js +1026 -0
- package/dist/assets/encode.worker-nfOb3kw6.js.map +1 -0
- package/dist/assets/mux.worker-uEMQY066.js +8019 -0
- package/dist/assets/mux.worker-uEMQY066.js.map +1 -0
- package/dist/assets/video-compose.worker-DPzsC21d.js +1683 -0
- package/dist/assets/video-compose.worker-DPzsC21d.js.map +1 -0
- package/dist/assets/video-demux.worker-D019I7GQ.js +7957 -0
- package/dist/assets/video-demux.worker-D019I7GQ.js.map +1 -0
- package/dist/cache/CacheManager.d.ts.map +1 -1
- package/dist/cache/CacheManager.js +8 -1
- package/dist/cache/CacheManager.js.map +1 -1
- package/dist/config/defaults.d.ts.map +1 -1
- package/dist/config/defaults.js +0 -8
- package/dist/config/defaults.js.map +1 -1
- package/dist/config/types.d.ts +0 -4
- package/dist/config/types.d.ts.map +1 -1
- package/dist/controllers/PlaybackController.d.ts +4 -2
- package/dist/controllers/PlaybackController.d.ts.map +1 -1
- package/dist/controllers/PlaybackController.js +7 -13
- package/dist/controllers/PlaybackController.js.map +1 -1
- package/dist/controllers/PreRenderService.d.ts +3 -2
- package/dist/controllers/PreRenderService.d.ts.map +1 -1
- package/dist/controllers/PreRenderService.js.map +1 -1
- package/dist/controllers/PreviewHandle.d.ts +2 -0
- package/dist/controllers/PreviewHandle.d.ts.map +1 -1
- package/dist/controllers/PreviewHandle.js +6 -0
- package/dist/controllers/PreviewHandle.js.map +1 -1
- package/dist/controllers/index.d.ts +1 -1
- package/dist/controllers/index.d.ts.map +1 -1
- package/dist/controllers/types.d.ts +2 -12
- package/dist/controllers/types.d.ts.map +1 -1
- package/dist/event/events.d.ts +5 -59
- package/dist/event/events.d.ts.map +1 -1
- package/dist/event/events.js +1 -6
- package/dist/event/events.js.map +1 -1
- package/dist/model/CompositionModel.js +1 -2
- package/dist/model/CompositionModel.js.map +1 -1
- package/dist/orchestrator/CompositionPlanner.d.ts.map +1 -1
- package/dist/orchestrator/CompositionPlanner.js +1 -0
- package/dist/orchestrator/CompositionPlanner.js.map +1 -1
- package/dist/orchestrator/Orchestrator.d.ts.map +1 -1
- package/dist/orchestrator/Orchestrator.js +1 -12
- package/dist/orchestrator/Orchestrator.js.map +1 -1
- package/dist/orchestrator/VideoClipSession.d.ts.map +1 -1
- package/dist/orchestrator/VideoClipSession.js +4 -5
- package/dist/orchestrator/VideoClipSession.js.map +1 -1
- package/dist/orchestrator/types.d.ts +0 -1
- package/dist/orchestrator/types.d.ts.map +1 -1
- package/dist/stages/compose/GlobalAudioSession.d.ts.map +1 -1
- package/dist/stages/compose/GlobalAudioSession.js +3 -2
- package/dist/stages/compose/GlobalAudioSession.js.map +1 -1
- package/dist/stages/compose/VideoComposer.d.ts.map +1 -1
- package/dist/stages/compose/VideoComposer.js +2 -2
- package/dist/stages/compose/VideoComposer.js.map +1 -1
- package/dist/stages/compose/audio-compose.worker.d.ts.map +1 -1
- package/dist/stages/compose/audio-compose.worker.js +0 -1
- package/dist/stages/compose/audio-compose.worker.js.map +1 -1
- package/dist/stages/compose/audio-compose.worker2.js +5 -0
- package/dist/stages/compose/audio-compose.worker2.js.map +1 -0
- package/dist/stages/compose/types.d.ts +1 -0
- package/dist/stages/compose/types.d.ts.map +1 -1
- package/dist/stages/compose/video-compose.worker.d.ts.map +1 -1
- package/dist/stages/compose/video-compose.worker.js +18 -8
- package/dist/stages/compose/video-compose.worker.js.map +1 -1
- package/dist/stages/compose/video-compose.worker2.js +5 -0
- package/dist/stages/compose/video-compose.worker2.js.map +1 -0
- package/dist/stages/decode/AudioChunkDecoder.d.ts.map +1 -1
- package/dist/stages/decode/AudioChunkDecoder.js +0 -1
- package/dist/stages/decode/AudioChunkDecoder.js.map +1 -1
- package/dist/stages/decode/VideoChunkDecoder.d.ts +0 -1
- package/dist/stages/decode/VideoChunkDecoder.d.ts.map +1 -1
- package/dist/stages/decode/VideoChunkDecoder.js +1 -11
- package/dist/stages/decode/VideoChunkDecoder.js.map +1 -1
- package/dist/stages/decode/decode.worker.d.ts.map +1 -1
- package/dist/stages/decode/decode.worker.js +3 -16
- package/dist/stages/decode/decode.worker.js.map +1 -1
- package/dist/stages/decode/decode.worker2.js +5 -0
- package/dist/stages/decode/decode.worker2.js.map +1 -0
- package/dist/stages/demux/MP4Demuxer.d.ts +2 -0
- package/dist/stages/demux/MP4Demuxer.d.ts.map +1 -1
- package/dist/stages/demux/MP4Demuxer.js +13 -2
- package/dist/stages/demux/MP4Demuxer.js.map +1 -1
- package/dist/stages/demux/audio-demux.worker2.js +5 -0
- package/dist/stages/demux/audio-demux.worker2.js.map +1 -0
- package/dist/stages/demux/video-demux.worker.d.ts +6 -3
- package/dist/stages/demux/video-demux.worker.d.ts.map +1 -1
- package/dist/stages/demux/video-demux.worker.js +5 -27
- package/dist/stages/demux/video-demux.worker.js.map +1 -1
- package/dist/stages/demux/video-demux.worker2.js +5 -0
- package/dist/stages/demux/video-demux.worker2.js.map +1 -0
- package/dist/stages/encode/encode.worker.d.ts.map +1 -1
- package/dist/stages/encode/encode.worker.js +0 -1
- package/dist/stages/encode/encode.worker.js.map +1 -1
- package/dist/stages/encode/encode.worker2.js +5 -0
- package/dist/stages/encode/encode.worker2.js.map +1 -0
- package/dist/stages/load/EventHandlers.d.ts +2 -11
- package/dist/stages/load/EventHandlers.d.ts.map +1 -1
- package/dist/stages/load/EventHandlers.js +1 -24
- package/dist/stages/load/EventHandlers.js.map +1 -1
- package/dist/stages/load/ResourceLoader.d.ts.map +1 -1
- package/dist/stages/load/ResourceLoader.js +11 -13
- package/dist/stages/load/ResourceLoader.js.map +1 -1
- package/dist/stages/load/TaskManager.d.ts +1 -1
- package/dist/stages/load/TaskManager.d.ts.map +1 -1
- package/dist/stages/load/TaskManager.js +3 -2
- package/dist/stages/load/TaskManager.js.map +1 -1
- package/dist/stages/load/types.d.ts +2 -0
- package/dist/stages/load/types.d.ts.map +1 -1
- package/dist/stages/mux/mux.worker2.js +5 -0
- package/dist/stages/mux/mux.worker2.js.map +1 -0
- package/dist/vite-plugin.d.ts +17 -0
- package/dist/vite-plugin.d.ts.map +1 -0
- package/dist/vite-plugin.js +88 -0
- package/dist/vite-plugin.js.map +1 -0
- package/dist/worker/WorkerPool.d.ts +0 -4
- package/dist/worker/WorkerPool.d.ts.map +1 -1
- package/dist/worker/WorkerPool.js +4 -17
- package/dist/worker/WorkerPool.js.map +1 -1
- package/dist/worker/worker-registry.d.ts +12 -0
- package/dist/worker/worker-registry.d.ts.map +1 -0
- package/dist/worker/worker-registry.js +20 -0
- package/dist/worker/worker-registry.js.map +1 -0
- package/package.json +7 -1
|
@@ -0,0 +1,1537 @@
|
|
|
1
|
+
var WorkerMessageType = /* @__PURE__ */ ((WorkerMessageType2) => {
|
|
2
|
+
WorkerMessageType2["Ready"] = "ready";
|
|
3
|
+
WorkerMessageType2["Error"] = "error";
|
|
4
|
+
WorkerMessageType2["Dispose"] = "dispose";
|
|
5
|
+
WorkerMessageType2["Configure"] = "configure";
|
|
6
|
+
WorkerMessageType2["LoadResource"] = "load_resource";
|
|
7
|
+
WorkerMessageType2["ResourceLoaded"] = "resource_loaded";
|
|
8
|
+
WorkerMessageType2["ResourceProgress"] = "resource_progress";
|
|
9
|
+
WorkerMessageType2["ConfigureDemux"] = "configure_demux";
|
|
10
|
+
WorkerMessageType2["AppendBuffer"] = "append_buffer";
|
|
11
|
+
WorkerMessageType2["DemuxSamples"] = "demux_samples";
|
|
12
|
+
WorkerMessageType2["FlushDemux"] = "flush_demux";
|
|
13
|
+
WorkerMessageType2["ConfigureDecode"] = "configure_decode";
|
|
14
|
+
WorkerMessageType2["DecodeChunk"] = "decode_chunk";
|
|
15
|
+
WorkerMessageType2["DecodedFrame"] = "decoded_frame";
|
|
16
|
+
WorkerMessageType2["SeekGop"] = "seek_gop";
|
|
17
|
+
WorkerMessageType2["SetComposition"] = "set_composition";
|
|
18
|
+
WorkerMessageType2["ApplyPatch"] = "apply_patch";
|
|
19
|
+
WorkerMessageType2["RenderFrame"] = "render_frame";
|
|
20
|
+
WorkerMessageType2["ComposeFrameReady"] = "compose_frame_ready";
|
|
21
|
+
WorkerMessageType2["ConfigureEncode"] = "configure_encode";
|
|
22
|
+
WorkerMessageType2["EncodeFrame"] = "encode_frame";
|
|
23
|
+
WorkerMessageType2["EncodeAudio"] = "encode_audio";
|
|
24
|
+
WorkerMessageType2["EncodedChunk"] = "encoded_chunk";
|
|
25
|
+
WorkerMessageType2["FlushEncode"] = "flush_encode";
|
|
26
|
+
WorkerMessageType2["ConfigureMux"] = "configure_mux";
|
|
27
|
+
WorkerMessageType2["AddChunk"] = "add_chunk";
|
|
28
|
+
WorkerMessageType2["FinishMux"] = "finish_mux";
|
|
29
|
+
WorkerMessageType2["MuxComplete"] = "mux_complete";
|
|
30
|
+
WorkerMessageType2["PerformanceStats"] = "performance_stats";
|
|
31
|
+
WorkerMessageType2["RenderWindow"] = "renderWindow";
|
|
32
|
+
WorkerMessageType2["AudioTrackAdd"] = "audio_track:add";
|
|
33
|
+
WorkerMessageType2["AudioTrackRemove"] = "audio_track:remove";
|
|
34
|
+
WorkerMessageType2["AudioTrackUpdate"] = "audio_track:update";
|
|
35
|
+
return WorkerMessageType2;
|
|
36
|
+
})(WorkerMessageType || {});
|
|
37
|
+
var WorkerState = /* @__PURE__ */ ((WorkerState2) => {
|
|
38
|
+
WorkerState2["Idle"] = "idle";
|
|
39
|
+
WorkerState2["Initializing"] = "initializing";
|
|
40
|
+
WorkerState2["Ready"] = "ready";
|
|
41
|
+
WorkerState2["Processing"] = "processing";
|
|
42
|
+
WorkerState2["Error"] = "error";
|
|
43
|
+
WorkerState2["Disposed"] = "disposed";
|
|
44
|
+
return WorkerState2;
|
|
45
|
+
})(WorkerState || {});
|
|
46
|
+
const defaultRetryConfig = {
|
|
47
|
+
maxRetries: 3,
|
|
48
|
+
initialDelay: 100,
|
|
49
|
+
maxDelay: 5e3,
|
|
50
|
+
backoffFactor: 2,
|
|
51
|
+
retryableErrors: ["TIMEOUT", "NETWORK_ERROR", "WORKER_BUSY"]
|
|
52
|
+
};
|
|
53
|
+
function calculateRetryDelay(attempt, config) {
|
|
54
|
+
const { initialDelay = 100, maxDelay = 5e3, backoffFactor = 2 } = config;
|
|
55
|
+
const delay = initialDelay * Math.pow(backoffFactor, attempt - 1);
|
|
56
|
+
return Math.min(delay, maxDelay);
|
|
57
|
+
}
|
|
58
|
+
function isRetryableError(error, config) {
|
|
59
|
+
const { retryableErrors = defaultRetryConfig.retryableErrors } = config;
|
|
60
|
+
if (!error) return false;
|
|
61
|
+
const errorCode = error.code || error.name;
|
|
62
|
+
if (errorCode && retryableErrors.includes(errorCode)) {
|
|
63
|
+
return true;
|
|
64
|
+
}
|
|
65
|
+
const message = error.message || "";
|
|
66
|
+
if (message.includes("timeout") || message.includes("Timeout")) {
|
|
67
|
+
return true;
|
|
68
|
+
}
|
|
69
|
+
return false;
|
|
70
|
+
}
|
|
71
|
+
async function withRetry(fn, config) {
|
|
72
|
+
const { maxRetries } = config;
|
|
73
|
+
let lastError;
|
|
74
|
+
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
75
|
+
try {
|
|
76
|
+
return await fn();
|
|
77
|
+
} catch (error) {
|
|
78
|
+
lastError = error;
|
|
79
|
+
if (!isRetryableError(error, config)) {
|
|
80
|
+
throw error;
|
|
81
|
+
}
|
|
82
|
+
if (attempt === maxRetries) {
|
|
83
|
+
throw error;
|
|
84
|
+
}
|
|
85
|
+
const delay = calculateRetryDelay(attempt, config);
|
|
86
|
+
await sleep(delay);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
throw lastError || new Error("Retry failed");
|
|
90
|
+
}
|
|
91
|
+
function sleep(ms) {
|
|
92
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
93
|
+
}
|
|
94
|
+
function isTransferable(obj) {
|
|
95
|
+
return obj instanceof ArrayBuffer || obj instanceof MessagePort || typeof ImageBitmap !== "undefined" && obj instanceof ImageBitmap || typeof OffscreenCanvas !== "undefined" && obj instanceof OffscreenCanvas || typeof ReadableStream !== "undefined" && obj instanceof ReadableStream || typeof WritableStream !== "undefined" && obj instanceof WritableStream || typeof TransformStream !== "undefined" && obj instanceof TransformStream;
|
|
96
|
+
}
|
|
97
|
+
function findTransferables(obj, transferables) {
|
|
98
|
+
if (!obj || typeof obj !== "object") {
|
|
99
|
+
return;
|
|
100
|
+
}
|
|
101
|
+
if (isTransferable(obj)) {
|
|
102
|
+
transferables.push(obj);
|
|
103
|
+
return;
|
|
104
|
+
}
|
|
105
|
+
if (obj instanceof VideoFrame) {
|
|
106
|
+
transferables.push(obj);
|
|
107
|
+
return;
|
|
108
|
+
}
|
|
109
|
+
if (typeof AudioData !== "undefined" && obj instanceof AudioData) {
|
|
110
|
+
transferables.push(obj);
|
|
111
|
+
return;
|
|
112
|
+
}
|
|
113
|
+
if (typeof EncodedVideoChunk !== "undefined" && obj instanceof EncodedVideoChunk || typeof EncodedAudioChunk !== "undefined" && obj instanceof EncodedAudioChunk) {
|
|
114
|
+
return;
|
|
115
|
+
}
|
|
116
|
+
if (Array.isArray(obj)) {
|
|
117
|
+
for (const item of obj) {
|
|
118
|
+
findTransferables(item, transferables);
|
|
119
|
+
}
|
|
120
|
+
} else {
|
|
121
|
+
for (const key in obj) {
|
|
122
|
+
if (Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
123
|
+
findTransferables(obj[key], transferables);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
function extractTransferables(payload) {
|
|
129
|
+
const transferables = [];
|
|
130
|
+
findTransferables(payload, transferables);
|
|
131
|
+
return transferables;
|
|
132
|
+
}
|
|
133
|
+
class WorkerChannel {
|
|
134
|
+
name;
|
|
135
|
+
port;
|
|
136
|
+
pendingRequests = /* @__PURE__ */ new Map();
|
|
137
|
+
messageHandlers = {};
|
|
138
|
+
state = WorkerState.Idle;
|
|
139
|
+
defaultTimeout;
|
|
140
|
+
defaultMaxRetries;
|
|
141
|
+
constructor(port, config) {
|
|
142
|
+
this.name = config.name;
|
|
143
|
+
this.port = port;
|
|
144
|
+
this.defaultTimeout = config.timeout ?? 3e4;
|
|
145
|
+
this.defaultMaxRetries = config.maxRetries ?? 3;
|
|
146
|
+
this.setupMessageHandler();
|
|
147
|
+
this.state = WorkerState.Ready;
|
|
148
|
+
}
|
|
149
|
+
/**
|
|
150
|
+
* Send a message and wait for response with retry support
|
|
151
|
+
*/
|
|
152
|
+
async send(type, payload, options) {
|
|
153
|
+
const maxRetries = options?.maxRetries ?? this.defaultMaxRetries;
|
|
154
|
+
const retryConfig = {
|
|
155
|
+
...defaultRetryConfig,
|
|
156
|
+
maxRetries,
|
|
157
|
+
...options?.retryConfig
|
|
158
|
+
};
|
|
159
|
+
return withRetry(() => this.sendOnce(type, payload, options), retryConfig);
|
|
160
|
+
}
|
|
161
|
+
/**
|
|
162
|
+
* Send a message once (without retry)
|
|
163
|
+
*/
|
|
164
|
+
async sendOnce(type, payload, options) {
|
|
165
|
+
const id = this.generateMessageId();
|
|
166
|
+
const timeout = options?.timeout ?? this.defaultTimeout;
|
|
167
|
+
const message = {
|
|
168
|
+
type,
|
|
169
|
+
id,
|
|
170
|
+
payload,
|
|
171
|
+
timestamp: Date.now()
|
|
172
|
+
};
|
|
173
|
+
return new Promise((resolve, reject) => {
|
|
174
|
+
const request = {
|
|
175
|
+
id,
|
|
176
|
+
type,
|
|
177
|
+
timestamp: Date.now(),
|
|
178
|
+
timeout,
|
|
179
|
+
resolve,
|
|
180
|
+
reject
|
|
181
|
+
};
|
|
182
|
+
this.pendingRequests.set(id, request);
|
|
183
|
+
const timeoutId = setTimeout(() => {
|
|
184
|
+
const pending = this.pendingRequests.get(id);
|
|
185
|
+
if (pending) {
|
|
186
|
+
this.pendingRequests.delete(id);
|
|
187
|
+
const error = new Error(`Request timeout: ${id} ${type} (${timeout}ms)`);
|
|
188
|
+
error.code = "TIMEOUT";
|
|
189
|
+
pending.reject(error);
|
|
190
|
+
}
|
|
191
|
+
}, timeout);
|
|
192
|
+
request.timeoutId = timeoutId;
|
|
193
|
+
if (options?.transfer) {
|
|
194
|
+
this.port.postMessage(message, options.transfer);
|
|
195
|
+
} else {
|
|
196
|
+
this.port.postMessage(message);
|
|
197
|
+
}
|
|
198
|
+
});
|
|
199
|
+
}
|
|
200
|
+
/**
|
|
201
|
+
* Send a message without waiting for response
|
|
202
|
+
*/
|
|
203
|
+
post(type, payload, transfer) {
|
|
204
|
+
const message = {
|
|
205
|
+
type,
|
|
206
|
+
id: this.generateMessageId(),
|
|
207
|
+
payload,
|
|
208
|
+
timestamp: Date.now()
|
|
209
|
+
};
|
|
210
|
+
if (transfer) {
|
|
211
|
+
this.port.postMessage(message, transfer);
|
|
212
|
+
} else {
|
|
213
|
+
this.port.postMessage(message);
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
/**
|
|
217
|
+
* Register a message handler
|
|
218
|
+
*/
|
|
219
|
+
on(type, handler) {
|
|
220
|
+
this.messageHandlers[type] = handler;
|
|
221
|
+
}
|
|
222
|
+
/**
|
|
223
|
+
* Unregister a message handler
|
|
224
|
+
*/
|
|
225
|
+
off(type) {
|
|
226
|
+
delete this.messageHandlers[type];
|
|
227
|
+
}
|
|
228
|
+
/**
|
|
229
|
+
* Dispose the channel
|
|
230
|
+
*/
|
|
231
|
+
dispose() {
|
|
232
|
+
this.state = WorkerState.Disposed;
|
|
233
|
+
for (const [, request] of this.pendingRequests) {
|
|
234
|
+
if (request.timeoutId) {
|
|
235
|
+
clearTimeout(request.timeoutId);
|
|
236
|
+
}
|
|
237
|
+
request.reject(new Error("Channel disposed"));
|
|
238
|
+
}
|
|
239
|
+
this.pendingRequests.clear();
|
|
240
|
+
this.port.onmessage = null;
|
|
241
|
+
}
|
|
242
|
+
/**
|
|
243
|
+
* Setup message handler for incoming messages
|
|
244
|
+
*/
|
|
245
|
+
setupMessageHandler() {
|
|
246
|
+
this.port.onmessage = async (event) => {
|
|
247
|
+
const data = event.data;
|
|
248
|
+
if (this.isResponse(data)) {
|
|
249
|
+
this.handleResponse(data);
|
|
250
|
+
return;
|
|
251
|
+
}
|
|
252
|
+
if (this.isRequest(data)) {
|
|
253
|
+
await this.handleRequest(data);
|
|
254
|
+
return;
|
|
255
|
+
}
|
|
256
|
+
};
|
|
257
|
+
}
|
|
258
|
+
/**
|
|
259
|
+
* Handle incoming request
|
|
260
|
+
*/
|
|
261
|
+
async handleRequest(message) {
|
|
262
|
+
const handler = this.messageHandlers[message.type];
|
|
263
|
+
if (!handler) {
|
|
264
|
+
this.sendResponse(message.id, false, null, {
|
|
265
|
+
code: "NO_HANDLER",
|
|
266
|
+
message: `No handler registered for message type: ${message.type}`
|
|
267
|
+
});
|
|
268
|
+
return;
|
|
269
|
+
}
|
|
270
|
+
this.state = WorkerState.Processing;
|
|
271
|
+
Promise.resolve().then(() => handler(message.payload, message.transfer)).then((result) => {
|
|
272
|
+
this.sendResponse(message.id, true, result);
|
|
273
|
+
this.state = WorkerState.Ready;
|
|
274
|
+
}).catch((error) => {
|
|
275
|
+
const workerError = {
|
|
276
|
+
code: "HANDLER_ERROR",
|
|
277
|
+
message: error instanceof Error ? error.message : String(error),
|
|
278
|
+
stack: error instanceof Error ? error.stack : void 0
|
|
279
|
+
};
|
|
280
|
+
this.sendResponse(message.id, false, null, workerError);
|
|
281
|
+
this.state = WorkerState.Ready;
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
/**
|
|
285
|
+
* Handle incoming response
|
|
286
|
+
*/
|
|
287
|
+
handleResponse(response) {
|
|
288
|
+
const request = this.pendingRequests.get(response.id);
|
|
289
|
+
if (!request) {
|
|
290
|
+
return;
|
|
291
|
+
}
|
|
292
|
+
this.pendingRequests.delete(response.id);
|
|
293
|
+
if (request.timeoutId) {
|
|
294
|
+
clearTimeout(request.timeoutId);
|
|
295
|
+
}
|
|
296
|
+
if (response.success) {
|
|
297
|
+
request.resolve(response.result);
|
|
298
|
+
} else {
|
|
299
|
+
const error = new Error(response.error?.message || "Unknown error");
|
|
300
|
+
if (response.error) {
|
|
301
|
+
Object.assign(error, response.error);
|
|
302
|
+
}
|
|
303
|
+
request.reject(error);
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
/**
|
|
307
|
+
* Send a response message
|
|
308
|
+
*/
|
|
309
|
+
sendResponse(id, success, result, error) {
|
|
310
|
+
let transfer = [];
|
|
311
|
+
if (isTransferable(result)) {
|
|
312
|
+
transfer.push(result);
|
|
313
|
+
}
|
|
314
|
+
const response = {
|
|
315
|
+
id,
|
|
316
|
+
success,
|
|
317
|
+
result,
|
|
318
|
+
error,
|
|
319
|
+
timestamp: Date.now()
|
|
320
|
+
};
|
|
321
|
+
this.port.postMessage(response, transfer);
|
|
322
|
+
}
|
|
323
|
+
/**
|
|
324
|
+
* Check if message is a response
|
|
325
|
+
*/
|
|
326
|
+
isResponse(data) {
|
|
327
|
+
return data && typeof data === "object" && "id" in data && "success" in data && !("type" in data);
|
|
328
|
+
}
|
|
329
|
+
/**
|
|
330
|
+
* Check if message is a request
|
|
331
|
+
*/
|
|
332
|
+
isRequest(data) {
|
|
333
|
+
return data && typeof data === "object" && "id" in data && "type" in data;
|
|
334
|
+
}
|
|
335
|
+
/**
|
|
336
|
+
* Generate unique message ID
|
|
337
|
+
*/
|
|
338
|
+
generateMessageId() {
|
|
339
|
+
return `${this.name}-${Date.now()}-${Math.random().toString(36).substring(2, 11)}`;
|
|
340
|
+
}
|
|
341
|
+
/**
|
|
342
|
+
* Send a notification message without waiting for response
|
|
343
|
+
* Alias for post() method for compatibility
|
|
344
|
+
*/
|
|
345
|
+
notify(type, payload, transfer) {
|
|
346
|
+
this.post(type, payload, transfer);
|
|
347
|
+
}
|
|
348
|
+
/**
|
|
349
|
+
* Register a message handler
|
|
350
|
+
* Alias for on() method for compatibility
|
|
351
|
+
*/
|
|
352
|
+
registerHandler(type, handler) {
|
|
353
|
+
this.on(type, handler);
|
|
354
|
+
}
|
|
355
|
+
/**
|
|
356
|
+
* Send a ReadableStream to another worker
|
|
357
|
+
* Automatically handles transferable streams vs chunk-by-chunk fallback
|
|
358
|
+
*/
|
|
359
|
+
async sendStream(stream, metadata) {
|
|
360
|
+
const streamId = metadata?.streamId || this.generateMessageId();
|
|
361
|
+
if (isTransferable(stream)) {
|
|
362
|
+
this.port.postMessage(
|
|
363
|
+
{
|
|
364
|
+
type: "stream_transfer",
|
|
365
|
+
...metadata,
|
|
366
|
+
stream,
|
|
367
|
+
streamId
|
|
368
|
+
},
|
|
369
|
+
[stream]
|
|
370
|
+
// Transfer ownership
|
|
371
|
+
);
|
|
372
|
+
} else {
|
|
373
|
+
await this.streamChunks(stream, streamId, metadata);
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
/**
|
|
377
|
+
* Stream chunks from a ReadableStream (fallback when transfer is not supported)
|
|
378
|
+
*/
|
|
379
|
+
async streamChunks(stream, streamId, metadata) {
|
|
380
|
+
const reader = stream.getReader();
|
|
381
|
+
this.post("stream_start", {
|
|
382
|
+
streamId,
|
|
383
|
+
...metadata,
|
|
384
|
+
mode: "chunk_transfer"
|
|
385
|
+
});
|
|
386
|
+
try {
|
|
387
|
+
while (true) {
|
|
388
|
+
const { done, value } = await reader.read();
|
|
389
|
+
if (done) {
|
|
390
|
+
this.post("stream_end", {
|
|
391
|
+
streamId,
|
|
392
|
+
...metadata
|
|
393
|
+
});
|
|
394
|
+
break;
|
|
395
|
+
}
|
|
396
|
+
const transfer = [];
|
|
397
|
+
if (value instanceof ArrayBuffer) {
|
|
398
|
+
transfer.push(value);
|
|
399
|
+
} else if (value instanceof Uint8Array) {
|
|
400
|
+
transfer.push(value.buffer);
|
|
401
|
+
} else if (typeof AudioData !== "undefined" && value instanceof AudioData) {
|
|
402
|
+
transfer.push(value);
|
|
403
|
+
} else if (typeof VideoFrame !== "undefined" && value instanceof VideoFrame) {
|
|
404
|
+
transfer.push(value);
|
|
405
|
+
} else if (typeof value === "object" && value !== null) {
|
|
406
|
+
const extracted = extractTransferables(value);
|
|
407
|
+
transfer.push(...extracted);
|
|
408
|
+
}
|
|
409
|
+
this.post(
|
|
410
|
+
"stream_chunk",
|
|
411
|
+
{
|
|
412
|
+
streamId,
|
|
413
|
+
chunk: value,
|
|
414
|
+
...metadata
|
|
415
|
+
},
|
|
416
|
+
transfer
|
|
417
|
+
);
|
|
418
|
+
}
|
|
419
|
+
} catch (error) {
|
|
420
|
+
this.post("stream_error", {
|
|
421
|
+
streamId,
|
|
422
|
+
error: error instanceof Error ? error.message : String(error),
|
|
423
|
+
...metadata
|
|
424
|
+
});
|
|
425
|
+
throw error;
|
|
426
|
+
} finally {
|
|
427
|
+
reader.releaseLock();
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
/**
|
|
431
|
+
* Receive a stream from another worker
|
|
432
|
+
* Handles both transferable streams and chunk-by-chunk reconstruction
|
|
433
|
+
*/
|
|
434
|
+
async receiveStream(onStream) {
|
|
435
|
+
const chunkedStreams = /* @__PURE__ */ new Map();
|
|
436
|
+
const prev = this.port.onmessage;
|
|
437
|
+
const handler = (event) => {
|
|
438
|
+
const raw = event.data;
|
|
439
|
+
const envelopeType = raw?.type;
|
|
440
|
+
const hasPayload = raw && typeof raw === "object" && "payload" in raw;
|
|
441
|
+
const payload = hasPayload ? raw.payload : raw;
|
|
442
|
+
if (envelopeType === "stream_transfer" && payload?.stream) {
|
|
443
|
+
onStream(payload.stream, payload);
|
|
444
|
+
return;
|
|
445
|
+
}
|
|
446
|
+
if (envelopeType === "stream_start" && payload?.streamId) {
|
|
447
|
+
const stream = new ReadableStream({
|
|
448
|
+
start(controller) {
|
|
449
|
+
chunkedStreams.set(payload.streamId, { controller, metadata: payload });
|
|
450
|
+
}
|
|
451
|
+
});
|
|
452
|
+
onStream(stream, payload);
|
|
453
|
+
return;
|
|
454
|
+
}
|
|
455
|
+
if (envelopeType === "stream_chunk" && payload?.streamId && chunkedStreams.has(payload.streamId)) {
|
|
456
|
+
const s = chunkedStreams.get(payload.streamId);
|
|
457
|
+
if (s) s.controller.enqueue(payload.chunk);
|
|
458
|
+
return;
|
|
459
|
+
}
|
|
460
|
+
if (envelopeType === "stream_end" && payload?.streamId && chunkedStreams.has(payload.streamId)) {
|
|
461
|
+
const s = chunkedStreams.get(payload.streamId);
|
|
462
|
+
if (s) {
|
|
463
|
+
s.controller.close();
|
|
464
|
+
chunkedStreams.delete(payload.streamId);
|
|
465
|
+
}
|
|
466
|
+
return;
|
|
467
|
+
}
|
|
468
|
+
if (envelopeType === "stream_error" && payload?.streamId && chunkedStreams.has(payload.streamId)) {
|
|
469
|
+
const s = chunkedStreams.get(payload.streamId);
|
|
470
|
+
if (s) {
|
|
471
|
+
s.controller.error(new Error(String(payload.error || "stream error")));
|
|
472
|
+
chunkedStreams.delete(payload.streamId);
|
|
473
|
+
}
|
|
474
|
+
return;
|
|
475
|
+
}
|
|
476
|
+
if (typeof prev === "function") prev.call(this.port, event);
|
|
477
|
+
};
|
|
478
|
+
this.port.onmessage = handler;
|
|
479
|
+
}
|
|
480
|
+
}
|
|
481
|
+
class AudioMixer {
|
|
482
|
+
config;
|
|
483
|
+
tracksMap = /* @__PURE__ */ new Map();
|
|
484
|
+
constructor(config) {
|
|
485
|
+
this.config = config;
|
|
486
|
+
}
|
|
487
|
+
getConfig() {
|
|
488
|
+
return { ...this.config };
|
|
489
|
+
}
|
|
490
|
+
updateConfig(update) {
|
|
491
|
+
this.config = { ...this.config, ...update };
|
|
492
|
+
}
|
|
493
|
+
get tracks() {
|
|
494
|
+
return Array.from(this.tracksMap.values());
|
|
495
|
+
}
|
|
496
|
+
createMixStream(ducker) {
|
|
497
|
+
return new TransformStream(
|
|
498
|
+
{
|
|
499
|
+
transform: async (request, controller) => {
|
|
500
|
+
try {
|
|
501
|
+
const frameCount = this.getFrameCount(request.durationUs);
|
|
502
|
+
if (ducker && request.duckingConfig?.enabled && frameCount > 0) {
|
|
503
|
+
const envelope = await ducker.generateDuckingEnvelope(request.tracks, frameCount);
|
|
504
|
+
for (const track of request.tracks) {
|
|
505
|
+
if (request.duckingConfig.targetTracks.includes(track.trackId)) {
|
|
506
|
+
track.duckingEnvelope = ducker.applyEnvelopeToVolume(1, envelope);
|
|
507
|
+
}
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
const result = await this.mixTracks(request, frameCount);
|
|
511
|
+
controller.enqueue(result);
|
|
512
|
+
} catch (error) {
|
|
513
|
+
controller.error(error);
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
},
|
|
517
|
+
{
|
|
518
|
+
highWaterMark: 2,
|
|
519
|
+
size: () => 1
|
|
520
|
+
}
|
|
521
|
+
);
|
|
522
|
+
}
|
|
523
|
+
async mixTracks(request, precomputedFrameCount) {
|
|
524
|
+
const tracks = request.tracks ?? [];
|
|
525
|
+
const frameCount = precomputedFrameCount ?? this.getFrameCount(request.durationUs);
|
|
526
|
+
const requestedChannelCount = this.config.numberOfChannels ?? 0;
|
|
527
|
+
const inferredChannelCount = tracks.reduce((max, track) => {
|
|
528
|
+
const trackChannels = track?.numberOfChannels ?? track?.audioData?.numberOfChannels ?? this.config.numberOfChannels ?? 0;
|
|
529
|
+
return trackChannels > max ? trackChannels : max;
|
|
530
|
+
}, 0);
|
|
531
|
+
const channelCount = requestedChannelCount > 0 ? requestedChannelCount : Math.max(inferredChannelCount, 1);
|
|
532
|
+
const outputChannels = Array.from({ length: channelCount }, () => {
|
|
533
|
+
return new Float32Array(frameCount);
|
|
534
|
+
});
|
|
535
|
+
for (const track of tracks) {
|
|
536
|
+
if (!track) {
|
|
537
|
+
continue;
|
|
538
|
+
}
|
|
539
|
+
const resolvedAudioData = track.audioData;
|
|
540
|
+
if (!resolvedAudioData) {
|
|
541
|
+
continue;
|
|
542
|
+
}
|
|
543
|
+
this.mixTrackIntoOutput(
|
|
544
|
+
outputChannels,
|
|
545
|
+
{
|
|
546
|
+
...track,
|
|
547
|
+
audioData: resolvedAudioData,
|
|
548
|
+
numberOfChannels: track.numberOfChannels ?? resolvedAudioData.numberOfChannels ?? this.config.numberOfChannels,
|
|
549
|
+
sampleRate: track.sampleRate ?? resolvedAudioData.sampleRate ?? this.config.sampleRate
|
|
550
|
+
},
|
|
551
|
+
request.timeUs,
|
|
552
|
+
frameCount
|
|
553
|
+
);
|
|
554
|
+
}
|
|
555
|
+
const { peakLevel, rmsLevel } = this.limitAndMeasure(outputChannels);
|
|
556
|
+
const audioData = this.createAudioData(outputChannels, request.timeUs);
|
|
557
|
+
return {
|
|
558
|
+
audioData,
|
|
559
|
+
timeUs: request.timeUs,
|
|
560
|
+
durationUs: request.durationUs,
|
|
561
|
+
peakLevel,
|
|
562
|
+
rmsLevel
|
|
563
|
+
};
|
|
564
|
+
}
|
|
565
|
+
addTrack(track) {
|
|
566
|
+
this.tracksMap.set(track.id, track);
|
|
567
|
+
}
|
|
568
|
+
removeTrack(trackId) {
|
|
569
|
+
this.tracksMap.delete(trackId);
|
|
570
|
+
}
|
|
571
|
+
updateTrack(trackId, patch) {
|
|
572
|
+
const track = this.tracksMap.get(trackId);
|
|
573
|
+
if (!track) {
|
|
574
|
+
return;
|
|
575
|
+
}
|
|
576
|
+
const { config, ...rest } = patch;
|
|
577
|
+
if (config) {
|
|
578
|
+
Object.assign(track.config, config);
|
|
579
|
+
}
|
|
580
|
+
Object.assign(track, rest);
|
|
581
|
+
}
|
|
582
|
+
mixTrackIntoOutput(outputChannels, track, mixStartUs, totalFrameCount) {
|
|
583
|
+
if (totalFrameCount === 0) {
|
|
584
|
+
track.audioData.close();
|
|
585
|
+
return;
|
|
586
|
+
}
|
|
587
|
+
if (track.sampleRate !== this.config.sampleRate) {
|
|
588
|
+
track.audioData.close();
|
|
589
|
+
throw new Error("AudioMixer: sample rate mismatch");
|
|
590
|
+
}
|
|
591
|
+
const trackChannelCount = track.audioData.numberOfChannels ?? track.numberOfChannels ?? 0;
|
|
592
|
+
if (trackChannelCount === 0) {
|
|
593
|
+
track.audioData.close();
|
|
594
|
+
return;
|
|
595
|
+
}
|
|
596
|
+
const trackChannels = this.extractChannels(track.audioData);
|
|
597
|
+
if (trackChannels.length === 0) {
|
|
598
|
+
track.audioData.close();
|
|
599
|
+
return;
|
|
600
|
+
}
|
|
601
|
+
const trackFrameCount = track.audioData.numberOfFrames;
|
|
602
|
+
if (trackFrameCount === 0) {
|
|
603
|
+
track.audioData.close();
|
|
604
|
+
return;
|
|
605
|
+
}
|
|
606
|
+
const timestampUs = track.audioData.timestamp ?? mixStartUs;
|
|
607
|
+
const deltaUs = timestampUs - mixStartUs;
|
|
608
|
+
let outputOffsetFrames = Math.round(deltaUs / 1e6 * this.config.sampleRate);
|
|
609
|
+
let sourceOffsetFrames = 0;
|
|
610
|
+
if (outputOffsetFrames < 0) {
|
|
611
|
+
sourceOffsetFrames = Math.min(trackFrameCount, -outputOffsetFrames);
|
|
612
|
+
outputOffsetFrames = 0;
|
|
613
|
+
}
|
|
614
|
+
if (outputOffsetFrames >= totalFrameCount) {
|
|
615
|
+
track.audioData.close();
|
|
616
|
+
return;
|
|
617
|
+
}
|
|
618
|
+
const availableFrames = Math.min(
|
|
619
|
+
trackFrameCount - sourceOffsetFrames,
|
|
620
|
+
totalFrameCount - outputOffsetFrames
|
|
621
|
+
);
|
|
622
|
+
if (availableFrames <= 0) {
|
|
623
|
+
track.audioData.close();
|
|
624
|
+
return;
|
|
625
|
+
}
|
|
626
|
+
const gains = this.buildGainEnvelope(
|
|
627
|
+
track,
|
|
628
|
+
availableFrames,
|
|
629
|
+
outputOffsetFrames,
|
|
630
|
+
sourceOffsetFrames,
|
|
631
|
+
trackFrameCount
|
|
632
|
+
);
|
|
633
|
+
const destinationChannelCount = outputChannels.length;
|
|
634
|
+
const sourceChannelCount = trackChannels.length;
|
|
635
|
+
for (let channelIndex = 0; channelIndex < destinationChannelCount; channelIndex++) {
|
|
636
|
+
const destination = outputChannels[channelIndex];
|
|
637
|
+
const source = trackChannels[channelIndex] ?? trackChannels[sourceChannelCount - 1];
|
|
638
|
+
if (!destination || !source) continue;
|
|
639
|
+
for (let frameIndex = 0; frameIndex < availableFrames; frameIndex++) {
|
|
640
|
+
const sample = source[sourceOffsetFrames + frameIndex] ?? 0;
|
|
641
|
+
const gain = gains[frameIndex] ?? 0;
|
|
642
|
+
destination[outputOffsetFrames + frameIndex] = (destination[outputOffsetFrames + frameIndex] ?? 0) + sample * gain;
|
|
643
|
+
}
|
|
644
|
+
}
|
|
645
|
+
track.audioData.close();
|
|
646
|
+
}
|
|
647
|
+
buildGainEnvelope(track, length, outputOffsetFrames, sourceOffsetFrames, trackFrameCount) {
|
|
648
|
+
const gains = new Float32Array(length);
|
|
649
|
+
const baseVolume = typeof track.config.volume === "number" ? track.config.volume : 1;
|
|
650
|
+
gains.fill(baseVolume);
|
|
651
|
+
const fadeInSamples = this.getFadeSampleCount(track.config.fadeIn);
|
|
652
|
+
const fadeOutSamples = this.getFadeSampleCount(track.config.fadeOut);
|
|
653
|
+
const clipDurationSamples = this.getClipSampleCount(track.config.durationUs) || trackFrameCount;
|
|
654
|
+
const trackStartFrame = this.computeTrackStartFrame(track);
|
|
655
|
+
for (let i = 0; i < length; i++) {
|
|
656
|
+
const envelopeIndex = outputOffsetFrames + i;
|
|
657
|
+
const absoluteFrame = trackStartFrame + sourceOffsetFrames + i;
|
|
658
|
+
let gain = baseVolume;
|
|
659
|
+
if (fadeInSamples > 0 && absoluteFrame < fadeInSamples) {
|
|
660
|
+
const progress = Math.min(1, absoluteFrame / fadeInSamples);
|
|
661
|
+
gain *= this.getCurveValue(progress, track.config.fadeIn?.curve);
|
|
662
|
+
}
|
|
663
|
+
if (fadeOutSamples > 0 && clipDurationSamples > 0) {
|
|
664
|
+
const fadeStart = Math.max(0, clipDurationSamples - fadeOutSamples);
|
|
665
|
+
if (absoluteFrame >= fadeStart) {
|
|
666
|
+
const progress = Math.min(1, (absoluteFrame - fadeStart) / fadeOutSamples);
|
|
667
|
+
const remaining = Math.max(0, 1 - progress);
|
|
668
|
+
gain *= this.getCurveValue(remaining, track.config.fadeOut?.curve);
|
|
669
|
+
}
|
|
670
|
+
}
|
|
671
|
+
if (track.duckingEnvelope && envelopeIndex < track.duckingEnvelope.length && envelopeIndex >= 0) {
|
|
672
|
+
gain *= track.duckingEnvelope[envelopeIndex] ?? 1;
|
|
673
|
+
}
|
|
674
|
+
gains[i] = gain;
|
|
675
|
+
}
|
|
676
|
+
return gains;
|
|
677
|
+
}
|
|
678
|
+
extractChannels(audioData) {
|
|
679
|
+
const configuredChannels = this.config.numberOfChannels ?? 0;
|
|
680
|
+
const channelCount = audioData.numberOfChannels ?? configuredChannels;
|
|
681
|
+
const frameCount = audioData.numberOfFrames;
|
|
682
|
+
const format = audioData.format ?? "f32";
|
|
683
|
+
if (!channelCount || !frameCount) {
|
|
684
|
+
return [];
|
|
685
|
+
}
|
|
686
|
+
const toFloat = (value) => value / 32768;
|
|
687
|
+
const zeroChannels = () => Array.from(
|
|
688
|
+
{ length: configuredChannels || channelCount },
|
|
689
|
+
() => new Float32Array(frameCount)
|
|
690
|
+
);
|
|
691
|
+
if (format === "f32") {
|
|
692
|
+
const interleaved = new Float32Array(frameCount * channelCount);
|
|
693
|
+
audioData.copyTo(interleaved, { format: "f32", planeIndex: 0 });
|
|
694
|
+
const channels2 = zeroChannels();
|
|
695
|
+
for (let frame = 0; frame < frameCount; frame++) {
|
|
696
|
+
const offset = frame * channelCount;
|
|
697
|
+
for (let channel = 0; channel < channels2.length; channel++) {
|
|
698
|
+
const channelArray = channels2[channel];
|
|
699
|
+
if (!channelArray) continue;
|
|
700
|
+
const sourceChannel = channel < channelCount ? channel : channelCount - 1;
|
|
701
|
+
channelArray[frame] = interleaved[offset + sourceChannel] ?? 0;
|
|
702
|
+
}
|
|
703
|
+
}
|
|
704
|
+
return channels2;
|
|
705
|
+
}
|
|
706
|
+
if (format === "s16") {
|
|
707
|
+
const interleaved = new Int16Array(frameCount * channelCount);
|
|
708
|
+
audioData.copyTo(interleaved, { format: "s16", planeIndex: 0 });
|
|
709
|
+
const channels2 = zeroChannels();
|
|
710
|
+
for (let frame = 0; frame < frameCount; frame++) {
|
|
711
|
+
const offset = frame * channelCount;
|
|
712
|
+
for (let channel = 0; channel < channels2.length; channel++) {
|
|
713
|
+
const channelArray = channels2[channel];
|
|
714
|
+
if (!channelArray) continue;
|
|
715
|
+
const sourceChannel = channel < channelCount ? channel : channelCount - 1;
|
|
716
|
+
channelArray[frame] = toFloat(interleaved[offset + sourceChannel] ?? 0);
|
|
717
|
+
}
|
|
718
|
+
}
|
|
719
|
+
return channels2;
|
|
720
|
+
}
|
|
721
|
+
if (format === "f32-planar") {
|
|
722
|
+
const channels2 = zeroChannels();
|
|
723
|
+
for (let channel = 0; channel < channels2.length; channel++) {
|
|
724
|
+
const channelArray = channels2[channel];
|
|
725
|
+
if (!channelArray) continue;
|
|
726
|
+
const sourceChannel = channel < channelCount ? channel : channelCount - 1;
|
|
727
|
+
audioData.copyTo(channelArray, { planeIndex: sourceChannel, format: "f32-planar" });
|
|
728
|
+
}
|
|
729
|
+
return channels2;
|
|
730
|
+
}
|
|
731
|
+
if (format === "s16-planar") {
|
|
732
|
+
const tmp = new Int16Array(frameCount);
|
|
733
|
+
const channels2 = zeroChannels();
|
|
734
|
+
for (let channel = 0; channel < channels2.length; channel++) {
|
|
735
|
+
const channelArray = channels2[channel];
|
|
736
|
+
if (!channelArray) continue;
|
|
737
|
+
const sourceChannel = channel < channelCount ? channel : channelCount - 1;
|
|
738
|
+
audioData.copyTo(tmp, { planeIndex: sourceChannel, format: "s16-planar" });
|
|
739
|
+
for (let i = 0; i < frameCount; i++) {
|
|
740
|
+
channelArray[i] = toFloat(tmp[i] ?? 0);
|
|
741
|
+
}
|
|
742
|
+
}
|
|
743
|
+
return channels2;
|
|
744
|
+
}
|
|
745
|
+
const channels = zeroChannels();
|
|
746
|
+
for (let channel = 0; channel < channels.length; channel++) {
|
|
747
|
+
const channelArray = channels[channel];
|
|
748
|
+
if (!channelArray) continue;
|
|
749
|
+
const sourceChannel = channel < channelCount ? channel : channelCount - 1;
|
|
750
|
+
audioData.copyTo(channelArray, { planeIndex: sourceChannel });
|
|
751
|
+
}
|
|
752
|
+
return channels;
|
|
753
|
+
}
|
|
754
|
+
limitAndMeasure(channels) {
|
|
755
|
+
let peak = 0;
|
|
756
|
+
let sumSquares = 0;
|
|
757
|
+
let samples = 0;
|
|
758
|
+
for (const channel of channels) {
|
|
759
|
+
for (let i = 0; i < channel.length; i++) {
|
|
760
|
+
let sample = channel[i] ?? 0;
|
|
761
|
+
if (sample > 1) {
|
|
762
|
+
sample = 1;
|
|
763
|
+
} else if (sample < -1) {
|
|
764
|
+
sample = -1;
|
|
765
|
+
}
|
|
766
|
+
channel[i] = sample;
|
|
767
|
+
const absSample = Math.abs(sample);
|
|
768
|
+
if (absSample > peak) {
|
|
769
|
+
peak = absSample;
|
|
770
|
+
}
|
|
771
|
+
sumSquares += sample * sample;
|
|
772
|
+
samples++;
|
|
773
|
+
}
|
|
774
|
+
}
|
|
775
|
+
const rmsLevel = samples > 0 ? Math.sqrt(sumSquares / samples) : 0;
|
|
776
|
+
return {
|
|
777
|
+
peakLevel: peak,
|
|
778
|
+
rmsLevel
|
|
779
|
+
};
|
|
780
|
+
}
|
|
781
|
+
createAudioData(channels, timestampUs) {
|
|
782
|
+
const configuredChannels = this.config.numberOfChannels ?? 0;
|
|
783
|
+
const inferredChannels = channels.length;
|
|
784
|
+
const numberOfChannels = (inferredChannels > 0 ? inferredChannels : configuredChannels) || 1;
|
|
785
|
+
const numberOfFrames = channels[0]?.length ?? 0;
|
|
786
|
+
if (numberOfFrames === 0) {
|
|
787
|
+
return new AudioData({
|
|
788
|
+
format: "f32",
|
|
789
|
+
sampleRate: this.config.sampleRate,
|
|
790
|
+
numberOfFrames: 0,
|
|
791
|
+
numberOfChannels,
|
|
792
|
+
timestamp: timestampUs,
|
|
793
|
+
data: new Float32Array(0)
|
|
794
|
+
});
|
|
795
|
+
}
|
|
796
|
+
const interleaved = new Float32Array(numberOfFrames * numberOfChannels);
|
|
797
|
+
for (let frame = 0; frame < numberOfFrames; frame++) {
|
|
798
|
+
for (let channel = 0; channel < numberOfChannels; channel++) {
|
|
799
|
+
const sourceChannel = channels[channel] ?? channels[channels.length - 1];
|
|
800
|
+
interleaved[frame * numberOfChannels + channel] = sourceChannel?.[frame] ?? 0;
|
|
801
|
+
}
|
|
802
|
+
}
|
|
803
|
+
return new AudioData({
|
|
804
|
+
format: "f32",
|
|
805
|
+
sampleRate: this.config.sampleRate,
|
|
806
|
+
numberOfFrames,
|
|
807
|
+
numberOfChannels,
|
|
808
|
+
timestamp: timestampUs,
|
|
809
|
+
data: interleaved
|
|
810
|
+
});
|
|
811
|
+
}
|
|
812
|
+
getFrameCount(durationUs) {
|
|
813
|
+
if (durationUs <= 0) {
|
|
814
|
+
return 0;
|
|
815
|
+
}
|
|
816
|
+
return Math.ceil(durationUs / 1e6 * this.config.sampleRate);
|
|
817
|
+
}
|
|
818
|
+
getFadeSampleCount(fade) {
|
|
819
|
+
if (!fade || fade.durationUs <= 0) {
|
|
820
|
+
return 0;
|
|
821
|
+
}
|
|
822
|
+
return Math.round(fade.durationUs / 1e6 * this.config.sampleRate);
|
|
823
|
+
}
|
|
824
|
+
getClipSampleCount(durationUs) {
|
|
825
|
+
if (!durationUs || durationUs <= 0) {
|
|
826
|
+
return 0;
|
|
827
|
+
}
|
|
828
|
+
return Math.round(durationUs / 1e6 * this.config.sampleRate);
|
|
829
|
+
}
|
|
830
|
+
computeTrackStartFrame(track) {
|
|
831
|
+
const audioTimestamp = track.audioData.timestamp ?? track.config.startTimeUs;
|
|
832
|
+
const relativeUs = audioTimestamp - track.config.startTimeUs;
|
|
833
|
+
const relativeFrames = Math.round(relativeUs / 1e6 * this.config.sampleRate);
|
|
834
|
+
return relativeFrames > 0 ? relativeFrames : 0;
|
|
835
|
+
}
|
|
836
|
+
getCurveValue(progress, curve = "linear") {
|
|
837
|
+
const clamped = Math.min(Math.max(progress, 0), 1);
|
|
838
|
+
switch (curve) {
|
|
839
|
+
case "exponential":
|
|
840
|
+
return clamped * clamped;
|
|
841
|
+
case "logarithmic":
|
|
842
|
+
return Math.log10(clamped * 9 + 1);
|
|
843
|
+
case "cosine":
|
|
844
|
+
return (1 - Math.cos(clamped * Math.PI)) / 2;
|
|
845
|
+
default:
|
|
846
|
+
return clamped;
|
|
847
|
+
}
|
|
848
|
+
}
|
|
849
|
+
}
|
|
850
|
+
class AudioDucker {
|
|
851
|
+
config = null;
|
|
852
|
+
sampleRate;
|
|
853
|
+
constructor(sampleRate) {
|
|
854
|
+
this.sampleRate = sampleRate;
|
|
855
|
+
}
|
|
856
|
+
configure(config) {
|
|
857
|
+
this.config = config;
|
|
858
|
+
}
|
|
859
|
+
/**
|
|
860
|
+
* Analyze trigger tracks (voice) and generate ducking envelope
|
|
861
|
+
* Returns gain values (0-1) to apply to target tracks (BGM)
|
|
862
|
+
*/
|
|
863
|
+
async generateDuckingEnvelope(tracks, frameCount) {
|
|
864
|
+
if (!this.config?.enabled) {
|
|
865
|
+
return new Float32Array(frameCount).fill(1);
|
|
866
|
+
}
|
|
867
|
+
const envelope = new Float32Array(frameCount);
|
|
868
|
+
envelope.fill(1);
|
|
869
|
+
const triggerTracks = tracks.filter((t) => this.config.triggerTracks.includes(t.trackId));
|
|
870
|
+
if (triggerTracks.length === 0) {
|
|
871
|
+
return envelope;
|
|
872
|
+
}
|
|
873
|
+
for (const track of triggerTracks) {
|
|
874
|
+
const voiceActivity = await this.detectVoiceActivity(track.audioData);
|
|
875
|
+
this.applyDuckingToEnvelope(envelope, voiceActivity);
|
|
876
|
+
}
|
|
877
|
+
return envelope;
|
|
878
|
+
}
|
|
879
|
+
/**
|
|
880
|
+
* Voice Activity Detection (VAD)
|
|
881
|
+
* Simple energy-based detection with smoothing
|
|
882
|
+
* More sophisticated implementations could use:
|
|
883
|
+
* - Zero-crossing rate (ZCR) for speech/music discrimination
|
|
884
|
+
* - Spectral centroid for voice frequency detection
|
|
885
|
+
* - Machine learning models for robust VAD
|
|
886
|
+
*/
|
|
887
|
+
async detectVoiceActivity(audioData) {
|
|
888
|
+
const frameCount = audioData.numberOfFrames;
|
|
889
|
+
const activity = new Float32Array(frameCount);
|
|
890
|
+
const monoData = new Float32Array(frameCount);
|
|
891
|
+
const channelData = new Float32Array(frameCount);
|
|
892
|
+
for (let ch = 0; ch < audioData.numberOfChannels; ch++) {
|
|
893
|
+
audioData.copyTo(channelData, {
|
|
894
|
+
planeIndex: ch,
|
|
895
|
+
format: "f32-planar"
|
|
896
|
+
});
|
|
897
|
+
for (let i = 0; i < frameCount; i++) {
|
|
898
|
+
if (monoData && channelData) {
|
|
899
|
+
monoData[i] = (monoData[i] || 0) + (channelData[i] || 0) / audioData.numberOfChannels;
|
|
900
|
+
}
|
|
901
|
+
}
|
|
902
|
+
}
|
|
903
|
+
const windowSize = Math.floor(this.sampleRate * 0.02);
|
|
904
|
+
const hopSize = Math.floor(windowSize / 2);
|
|
905
|
+
for (let i = 0; i < frameCount; i += hopSize) {
|
|
906
|
+
const end = Math.min(i + windowSize, frameCount);
|
|
907
|
+
let energy = 0;
|
|
908
|
+
for (let j = i; j < end; j++) {
|
|
909
|
+
if (monoData && monoData[j] !== void 0) {
|
|
910
|
+
const sample = monoData[j];
|
|
911
|
+
if (sample !== void 0) {
|
|
912
|
+
energy += sample * sample;
|
|
913
|
+
}
|
|
914
|
+
}
|
|
915
|
+
}
|
|
916
|
+
energy = Math.sqrt(energy / (end - i));
|
|
917
|
+
const threshold = 0.01;
|
|
918
|
+
const isVoice = energy > threshold;
|
|
919
|
+
for (let j = i; j < end; j++) {
|
|
920
|
+
activity[j] = isVoice ? 1 : 0;
|
|
921
|
+
}
|
|
922
|
+
}
|
|
923
|
+
return this.smoothActivityDetection(activity);
|
|
924
|
+
}
|
|
925
|
+
/**
|
|
926
|
+
* Smooth voice activity detection to avoid choppy ducking
|
|
927
|
+
* Uses a simple moving average filter
|
|
928
|
+
*/
|
|
929
|
+
smoothActivityDetection(activity) {
|
|
930
|
+
const smoothed = new Float32Array(activity.length);
|
|
931
|
+
const smoothWindow = Math.floor(this.sampleRate * 0.05);
|
|
932
|
+
for (let i = 0; i < activity.length; i++) {
|
|
933
|
+
let sum = 0;
|
|
934
|
+
let count = 0;
|
|
935
|
+
for (let j = Math.max(0, i - smoothWindow); j <= Math.min(activity.length - 1, i + smoothWindow); j++) {
|
|
936
|
+
if (activity && activity[j] !== void 0) {
|
|
937
|
+
const val = activity[j];
|
|
938
|
+
if (val !== void 0) {
|
|
939
|
+
sum += val;
|
|
940
|
+
}
|
|
941
|
+
}
|
|
942
|
+
count++;
|
|
943
|
+
}
|
|
944
|
+
smoothed[i] = sum / count;
|
|
945
|
+
}
|
|
946
|
+
return smoothed;
|
|
947
|
+
}
|
|
948
|
+
/**
|
|
949
|
+
* Apply ducking based on voice activity
|
|
950
|
+
* Implements attack/release envelope shaping
|
|
951
|
+
*/
|
|
952
|
+
applyDuckingToEnvelope(envelope, voiceActivity) {
|
|
953
|
+
if (!this.config) return;
|
|
954
|
+
const duckingLevel = 1 - this.config.duckingLevel;
|
|
955
|
+
const attackSamples = Math.floor(this.config.attackTimeMs / 1e3 * this.sampleRate);
|
|
956
|
+
const releaseSamples = Math.floor(this.config.releaseTimeMs / 1e3 * this.sampleRate);
|
|
957
|
+
const lookAheadSamples = this.config.lookAheadMs ? Math.floor(this.config.lookAheadMs / 1e3 * this.sampleRate) : 0;
|
|
958
|
+
let currentGain = 1;
|
|
959
|
+
let releaseCounter = 0;
|
|
960
|
+
for (let i = 0; i < envelope.length; i++) {
|
|
961
|
+
const lookAheadIndex = Math.min(i + lookAheadSamples, voiceActivity.length - 1);
|
|
962
|
+
const activity = voiceActivity[lookAheadIndex];
|
|
963
|
+
if (activity !== void 0 && activity > 0.5) {
|
|
964
|
+
if (currentGain > duckingLevel) {
|
|
965
|
+
currentGain = Math.max(duckingLevel, currentGain - (1 - duckingLevel) / attackSamples);
|
|
966
|
+
} else {
|
|
967
|
+
currentGain = duckingLevel;
|
|
968
|
+
}
|
|
969
|
+
releaseCounter = 0;
|
|
970
|
+
} else if (currentGain < 1) {
|
|
971
|
+
releaseCounter++;
|
|
972
|
+
if (releaseCounter > releaseSamples * 0.1) {
|
|
973
|
+
currentGain = Math.min(1, currentGain + (1 - duckingLevel) / releaseSamples);
|
|
974
|
+
}
|
|
975
|
+
}
|
|
976
|
+
envelope[i] = Math.min(envelope[i] || 1, currentGain);
|
|
977
|
+
}
|
|
978
|
+
}
|
|
979
|
+
/**
|
|
980
|
+
* Apply ducking envelope to audio buffer
|
|
981
|
+
* This modulates the volume over time according to the envelope
|
|
982
|
+
*/
|
|
983
|
+
applyEnvelopeToVolume(baseVolume, envelope) {
|
|
984
|
+
const result = new Float32Array(envelope.length);
|
|
985
|
+
for (let i = 0; i < envelope.length; i++) {
|
|
986
|
+
result[i] = baseVolume * (envelope[i] || 1);
|
|
987
|
+
}
|
|
988
|
+
return result;
|
|
989
|
+
}
|
|
990
|
+
/**
|
|
991
|
+
* Calculate dynamic range to avoid over-compression
|
|
992
|
+
* Returns the difference between peak and RMS levels in dB
|
|
993
|
+
*/
|
|
994
|
+
calculateDynamicRange(envelope) {
|
|
995
|
+
let peak = 0;
|
|
996
|
+
let sumSquares = 0;
|
|
997
|
+
for (const value of envelope) {
|
|
998
|
+
peak = Math.max(peak, value);
|
|
999
|
+
sumSquares += value * value;
|
|
1000
|
+
}
|
|
1001
|
+
const rms = Math.sqrt(sumSquares / envelope.length);
|
|
1002
|
+
const peakDb = 20 * Math.log10(peak);
|
|
1003
|
+
const rmsDb = 20 * Math.log10(rms);
|
|
1004
|
+
return peakDb - rmsDb;
|
|
1005
|
+
}
|
|
1006
|
+
}
|
|
1007
|
+
class AudioComposeWorker {
|
|
1008
|
+
channel;
|
|
1009
|
+
mixer = null;
|
|
1010
|
+
ducker = null;
|
|
1011
|
+
mixStream = null;
|
|
1012
|
+
// Connections to other workers
|
|
1013
|
+
decoderPort = null;
|
|
1014
|
+
encoderPort = null;
|
|
1015
|
+
// Track buffer map
|
|
1016
|
+
trackBuffers = /* @__PURE__ */ new Map();
|
|
1017
|
+
trackQueueWaiters = /* @__PURE__ */ new Map();
|
|
1018
|
+
encoderStreamAttached = false;
|
|
1019
|
+
mixWindowUs = 4e4;
|
|
1020
|
+
// 40ms window
|
|
1021
|
+
maxQueuedSegments = 8;
|
|
1022
|
+
mixing = false;
|
|
1023
|
+
constructor() {
|
|
1024
|
+
this.channel = new WorkerChannel(self, {
|
|
1025
|
+
name: "AudioComposeWorker",
|
|
1026
|
+
timeout: 3e4
|
|
1027
|
+
});
|
|
1028
|
+
this.setupHandlers();
|
|
1029
|
+
}
|
|
1030
|
+
setupHandlers() {
|
|
1031
|
+
this.channel.registerHandler("configure", this.handleConfigure.bind(this));
|
|
1032
|
+
this.channel.registerHandler("connect", this.handleConnect.bind(this));
|
|
1033
|
+
this.channel.registerHandler("add_track", this.handleAddTrack.bind(this));
|
|
1034
|
+
this.channel.registerHandler("remove_track", this.handleRemoveTrack.bind(this));
|
|
1035
|
+
this.channel.registerHandler("update_track", this.handleUpdateTrack.bind(this));
|
|
1036
|
+
this.channel.registerHandler("configure_ducking", this.handleConfigureDucking.bind(this));
|
|
1037
|
+
this.channel.registerHandler("get_stats", this.handleGetStats.bind(this));
|
|
1038
|
+
this.channel.registerHandler(WorkerMessageType.Dispose, this.handleDispose.bind(this));
|
|
1039
|
+
}
|
|
1040
|
+
/** Unified connect handler mapping for stream pipeline */
|
|
1041
|
+
async handleConnect(payload) {
|
|
1042
|
+
if (payload.direction === "upstream") {
|
|
1043
|
+
this.decoderPort = payload.port;
|
|
1044
|
+
const decoderChannel = new WorkerChannel(this.decoderPort, {
|
|
1045
|
+
name: "AudioCompose-Decoder",
|
|
1046
|
+
timeout: 3e4
|
|
1047
|
+
});
|
|
1048
|
+
decoderChannel.registerHandler("audio_track:add", this.handleAddTrack.bind(this));
|
|
1049
|
+
decoderChannel.registerHandler("audio_track:remove", this.handleRemoveTrack.bind(this));
|
|
1050
|
+
decoderChannel.registerHandler("audio_track:update", this.handleUpdateTrack.bind(this));
|
|
1051
|
+
decoderChannel.receiveStream(this.handleReceiveStream.bind(this));
|
|
1052
|
+
return { success: true };
|
|
1053
|
+
}
|
|
1054
|
+
if (payload.direction === "downstream") {
|
|
1055
|
+
this.encoderPort = payload.port;
|
|
1056
|
+
return { success: true };
|
|
1057
|
+
}
|
|
1058
|
+
return { success: true };
|
|
1059
|
+
}
|
|
1060
|
+
async handleReceiveStream(stream, metadata) {
|
|
1061
|
+
if (metadata?.streamType !== "audio" || !this.mixStream || !this.mixer) {
|
|
1062
|
+
return;
|
|
1063
|
+
}
|
|
1064
|
+
const update = {};
|
|
1065
|
+
const currentConfig = this.mixer.getConfig();
|
|
1066
|
+
if (typeof metadata?.sampleRate === "number" && metadata.sampleRate > 0) {
|
|
1067
|
+
if (!currentConfig.sampleRate || currentConfig.sampleRate !== metadata.sampleRate) {
|
|
1068
|
+
update.sampleRate = metadata.sampleRate;
|
|
1069
|
+
}
|
|
1070
|
+
}
|
|
1071
|
+
if (typeof metadata?.numberOfChannels === "number" && metadata.numberOfChannels > 0) {
|
|
1072
|
+
if (!currentConfig.numberOfChannels || metadata.numberOfChannels > currentConfig.numberOfChannels) {
|
|
1073
|
+
update.numberOfChannels = metadata.numberOfChannels;
|
|
1074
|
+
}
|
|
1075
|
+
}
|
|
1076
|
+
if (Object.keys(update).length > 0) {
|
|
1077
|
+
this.mixer.updateConfig(update);
|
|
1078
|
+
}
|
|
1079
|
+
const mixerConfig = this.mixer.getConfig();
|
|
1080
|
+
const streamMetadata = {
|
|
1081
|
+
...metadata,
|
|
1082
|
+
streamType: "audio",
|
|
1083
|
+
sampleRate: mixerConfig.sampleRate,
|
|
1084
|
+
numberOfChannels: mixerConfig.numberOfChannels
|
|
1085
|
+
};
|
|
1086
|
+
await this.attachEncodeStream(streamMetadata);
|
|
1087
|
+
const trackId = metadata?.trackId ?? metadata?.clipId;
|
|
1088
|
+
if (!trackId) {
|
|
1089
|
+
console.warn("[AudioComposeWorker] Missing track identifier in audio stream metadata");
|
|
1090
|
+
await stream.cancel();
|
|
1091
|
+
return;
|
|
1092
|
+
}
|
|
1093
|
+
await this.bufferTrackStream(trackId, stream, streamMetadata);
|
|
1094
|
+
this.scheduleMix();
|
|
1095
|
+
}
|
|
1096
|
+
/**
|
|
1097
|
+
* Configure audio composer
|
|
1098
|
+
* @param payload.config - Audio composition configuration
|
|
1099
|
+
* @param payload.initial - If true, initialize worker state; otherwise just update config
|
|
1100
|
+
*/
|
|
1101
|
+
async handleConfigure(payload) {
|
|
1102
|
+
const { config, initial = false } = payload;
|
|
1103
|
+
try {
|
|
1104
|
+
if (initial) {
|
|
1105
|
+
this.channel.state = WorkerState.Ready;
|
|
1106
|
+
this.mixer = new AudioMixer(config);
|
|
1107
|
+
this.ducker = new AudioDucker(config.sampleRate);
|
|
1108
|
+
this.mixStream = this.mixer.createMixStream(this.ducker);
|
|
1109
|
+
this.channel.notify("configured", {
|
|
1110
|
+
sampleRate: config.sampleRate,
|
|
1111
|
+
numberOfChannels: config.numberOfChannels
|
|
1112
|
+
});
|
|
1113
|
+
} else {
|
|
1114
|
+
if (!this.mixer || !this.ducker) {
|
|
1115
|
+
throw {
|
|
1116
|
+
code: "NOT_INITIALIZED",
|
|
1117
|
+
message: "Audio composer not initialized. Call configure with initial=true first"
|
|
1118
|
+
};
|
|
1119
|
+
}
|
|
1120
|
+
}
|
|
1121
|
+
return { success: true };
|
|
1122
|
+
} catch (error) {
|
|
1123
|
+
throw {
|
|
1124
|
+
code: error.code || "CONFIG_ERROR",
|
|
1125
|
+
message: error.message
|
|
1126
|
+
};
|
|
1127
|
+
}
|
|
1128
|
+
}
|
|
1129
|
+
/**
|
|
1130
|
+
* Connect to decoder worker to receive audio streams
|
|
1131
|
+
*/
|
|
1132
|
+
/**
|
|
1133
|
+
* Add an audio track
|
|
1134
|
+
*/
|
|
1135
|
+
handleAddTrack(payload) {
|
|
1136
|
+
if (!this.mixer) {
|
|
1137
|
+
throw {
|
|
1138
|
+
code: "NOT_CONFIGURED",
|
|
1139
|
+
message: "Mixer not configured"
|
|
1140
|
+
};
|
|
1141
|
+
}
|
|
1142
|
+
const config = this.cloneTrackConfig(payload.config);
|
|
1143
|
+
const track = {
|
|
1144
|
+
id: payload.trackId,
|
|
1145
|
+
clipId: payload.clipId,
|
|
1146
|
+
type: payload.type ?? "other",
|
|
1147
|
+
config
|
|
1148
|
+
};
|
|
1149
|
+
this.mixer.addTrack(track);
|
|
1150
|
+
this.trackBuffers.set(payload.trackId, {
|
|
1151
|
+
clipId: payload.clipId,
|
|
1152
|
+
queue: [],
|
|
1153
|
+
ended: false,
|
|
1154
|
+
config,
|
|
1155
|
+
type: payload.type ?? "other"
|
|
1156
|
+
});
|
|
1157
|
+
this.channel.notify("track_added", {
|
|
1158
|
+
trackId: track.id,
|
|
1159
|
+
trackType: track.type
|
|
1160
|
+
});
|
|
1161
|
+
return { success: true };
|
|
1162
|
+
}
|
|
1163
|
+
/**
|
|
1164
|
+
* Remove an audio track
|
|
1165
|
+
*/
|
|
1166
|
+
handleRemoveTrack(payload) {
|
|
1167
|
+
if (!this.mixer) {
|
|
1168
|
+
throw {
|
|
1169
|
+
code: "NOT_CONFIGURED",
|
|
1170
|
+
message: "Mixer not configured"
|
|
1171
|
+
};
|
|
1172
|
+
}
|
|
1173
|
+
this.mixer.removeTrack(payload.trackId);
|
|
1174
|
+
this.disposeTrackBuffer(payload.trackId);
|
|
1175
|
+
this.channel.notify("track_removed", {
|
|
1176
|
+
trackId: payload.trackId
|
|
1177
|
+
});
|
|
1178
|
+
return { success: true };
|
|
1179
|
+
}
|
|
1180
|
+
/**
|
|
1181
|
+
* Update track configuration
|
|
1182
|
+
*/
|
|
1183
|
+
handleUpdateTrack(payload) {
|
|
1184
|
+
if (!this.mixer) {
|
|
1185
|
+
throw {
|
|
1186
|
+
code: "NOT_CONFIGURED",
|
|
1187
|
+
message: "Mixer not configured"
|
|
1188
|
+
};
|
|
1189
|
+
}
|
|
1190
|
+
this.mixer.updateTrack(payload.trackId, payload.config);
|
|
1191
|
+
const buffer = this.trackBuffers.get(payload.trackId);
|
|
1192
|
+
if (buffer) {
|
|
1193
|
+
if (payload.config.type) {
|
|
1194
|
+
buffer.type = payload.config.type;
|
|
1195
|
+
}
|
|
1196
|
+
this.applyTrackConfigPatch(buffer.config, payload.config);
|
|
1197
|
+
}
|
|
1198
|
+
this.channel.notify("track_updated", {
|
|
1199
|
+
trackId: payload.trackId
|
|
1200
|
+
});
|
|
1201
|
+
return { success: true };
|
|
1202
|
+
}
|
|
1203
|
+
/**
|
|
1204
|
+
* Configure audio ducking
|
|
1205
|
+
*/
|
|
1206
|
+
handleConfigureDucking(config) {
|
|
1207
|
+
if (!this.ducker) {
|
|
1208
|
+
throw {
|
|
1209
|
+
code: "NOT_CONFIGURED",
|
|
1210
|
+
message: "Ducker not configured"
|
|
1211
|
+
};
|
|
1212
|
+
}
|
|
1213
|
+
this.ducker.configure(config);
|
|
1214
|
+
this.channel.notify("ducking_configured", {
|
|
1215
|
+
enabled: config.enabled
|
|
1216
|
+
});
|
|
1217
|
+
return { success: true };
|
|
1218
|
+
}
|
|
1219
|
+
/**
|
|
1220
|
+
* Get mixer statistics
|
|
1221
|
+
*/
|
|
1222
|
+
async handleGetStats() {
|
|
1223
|
+
if (!this.mixer) {
|
|
1224
|
+
return { state: this.channel.state };
|
|
1225
|
+
}
|
|
1226
|
+
return {
|
|
1227
|
+
tracks: this.mixer.tracks,
|
|
1228
|
+
ducking: this.ducker ? {
|
|
1229
|
+
configured: this.ducker !== null
|
|
1230
|
+
} : null,
|
|
1231
|
+
state: this.channel.state
|
|
1232
|
+
};
|
|
1233
|
+
}
|
|
1234
|
+
/**
|
|
1235
|
+
* Dispose worker and cleanup resources
|
|
1236
|
+
*/
|
|
1237
|
+
async handleDispose() {
|
|
1238
|
+
this.mixer = null;
|
|
1239
|
+
this.ducker = null;
|
|
1240
|
+
this.mixStream = null;
|
|
1241
|
+
this.decoderPort?.close();
|
|
1242
|
+
this.decoderPort = null;
|
|
1243
|
+
this.encoderPort?.close();
|
|
1244
|
+
this.encoderPort = null;
|
|
1245
|
+
this.channel.state = WorkerState.Disposed;
|
|
1246
|
+
return { success: true };
|
|
1247
|
+
}
|
|
1248
|
+
async attachEncodeStream(metadata) {
|
|
1249
|
+
if (!this.mixStream || !this.encoderPort || this.encoderStreamAttached || !this.mixer) {
|
|
1250
|
+
return;
|
|
1251
|
+
}
|
|
1252
|
+
const encoderChannel = new WorkerChannel(this.encoderPort, {
|
|
1253
|
+
name: "AudioCompose-Encoder",
|
|
1254
|
+
timeout: 3e4
|
|
1255
|
+
});
|
|
1256
|
+
const mixerConfig = this.mixer?.getConfig();
|
|
1257
|
+
const streamMetadata = {
|
|
1258
|
+
...metadata,
|
|
1259
|
+
streamType: "audio",
|
|
1260
|
+
sampleRate: mixerConfig?.sampleRate ?? metadata?.sampleRate,
|
|
1261
|
+
numberOfChannels: mixerConfig?.numberOfChannels ?? metadata?.numberOfChannels
|
|
1262
|
+
};
|
|
1263
|
+
const [encoderResultStream, previewResultStream] = this.mixStream.readable.tee();
|
|
1264
|
+
const createAudioDataStream = (source) => {
|
|
1265
|
+
return new ReadableStream({
|
|
1266
|
+
start: (controller) => {
|
|
1267
|
+
const reader = source.getReader();
|
|
1268
|
+
const pump = async () => {
|
|
1269
|
+
const { done, value } = await reader.read();
|
|
1270
|
+
if (done) {
|
|
1271
|
+
reader.releaseLock();
|
|
1272
|
+
controller.close();
|
|
1273
|
+
return;
|
|
1274
|
+
}
|
|
1275
|
+
try {
|
|
1276
|
+
controller.enqueue(value.audioData);
|
|
1277
|
+
} catch (error) {
|
|
1278
|
+
controller.error(error);
|
|
1279
|
+
reader.releaseLock();
|
|
1280
|
+
return;
|
|
1281
|
+
}
|
|
1282
|
+
await pump();
|
|
1283
|
+
};
|
|
1284
|
+
pump().catch((error) => {
|
|
1285
|
+
reader.releaseLock();
|
|
1286
|
+
controller.error(error);
|
|
1287
|
+
});
|
|
1288
|
+
}
|
|
1289
|
+
});
|
|
1290
|
+
};
|
|
1291
|
+
const encoderStream = createAudioDataStream(encoderResultStream);
|
|
1292
|
+
const previewStream = createAudioDataStream(previewResultStream);
|
|
1293
|
+
await encoderChannel.sendStream(encoderStream, streamMetadata);
|
|
1294
|
+
this.channel.sendStream(previewStream, streamMetadata);
|
|
1295
|
+
this.encoderStreamAttached = true;
|
|
1296
|
+
await this.scheduleMix();
|
|
1297
|
+
}
|
|
1298
|
+
async bufferTrackStream(trackId, stream, metadata) {
|
|
1299
|
+
const buffer = this.trackBuffers.get(trackId);
|
|
1300
|
+
if (!buffer) {
|
|
1301
|
+
await stream.cancel();
|
|
1302
|
+
return;
|
|
1303
|
+
}
|
|
1304
|
+
const reader = stream.getReader();
|
|
1305
|
+
const process = async () => {
|
|
1306
|
+
while (true) {
|
|
1307
|
+
if (buffer.queue.length >= this.maxQueuedSegments) {
|
|
1308
|
+
await this.waitForQueueSpace(trackId);
|
|
1309
|
+
}
|
|
1310
|
+
const { done, value } = await reader.read();
|
|
1311
|
+
if (done) {
|
|
1312
|
+
buffer.ended = true;
|
|
1313
|
+
reader.releaseLock();
|
|
1314
|
+
return;
|
|
1315
|
+
}
|
|
1316
|
+
buffer.queue.push({
|
|
1317
|
+
audioData: value,
|
|
1318
|
+
timestampUs: value.timestamp ?? 0,
|
|
1319
|
+
durationUs: value.duration ?? Math.round(value.numberOfFrames / (metadata.sampleRate ?? 48e3) * 1e6)
|
|
1320
|
+
});
|
|
1321
|
+
this.scheduleMix();
|
|
1322
|
+
}
|
|
1323
|
+
};
|
|
1324
|
+
process().catch((error) => {
|
|
1325
|
+
buffer.ended = true;
|
|
1326
|
+
reader.releaseLock();
|
|
1327
|
+
console.error("[AudioComposeWorker] Track stream error:", error);
|
|
1328
|
+
});
|
|
1329
|
+
}
|
|
1330
|
+
async scheduleMix() {
|
|
1331
|
+
if (this.mixing || !this.mixStream || !this.encoderStreamAttached || !this.mixer) {
|
|
1332
|
+
return;
|
|
1333
|
+
}
|
|
1334
|
+
const window = this.computeNextWindow();
|
|
1335
|
+
if (!window) {
|
|
1336
|
+
return;
|
|
1337
|
+
}
|
|
1338
|
+
this.mixing = true;
|
|
1339
|
+
try {
|
|
1340
|
+
const request = this.buildMixRequest(window.timeUs, window.durationUs);
|
|
1341
|
+
if (!request) {
|
|
1342
|
+
return;
|
|
1343
|
+
}
|
|
1344
|
+
const writer = this.mixStream.writable.getWriter();
|
|
1345
|
+
try {
|
|
1346
|
+
await writer.write(request);
|
|
1347
|
+
} finally {
|
|
1348
|
+
writer.releaseLock();
|
|
1349
|
+
}
|
|
1350
|
+
} catch (error) {
|
|
1351
|
+
console.error("[AudioComposeWorker] Failed to enqueue mix request:", error);
|
|
1352
|
+
} finally {
|
|
1353
|
+
this.mixing = false;
|
|
1354
|
+
if (this.hasBufferedAudio()) {
|
|
1355
|
+
queueMicrotask(() => {
|
|
1356
|
+
void this.scheduleMix();
|
|
1357
|
+
});
|
|
1358
|
+
}
|
|
1359
|
+
}
|
|
1360
|
+
}
|
|
1361
|
+
computeNextWindow() {
|
|
1362
|
+
let earliest = null;
|
|
1363
|
+
for (const buffer of this.trackBuffers.values()) {
|
|
1364
|
+
if (buffer.queue.length === 0) {
|
|
1365
|
+
continue;
|
|
1366
|
+
}
|
|
1367
|
+
const firstFrame = buffer.queue[0];
|
|
1368
|
+
if (!firstFrame) {
|
|
1369
|
+
continue;
|
|
1370
|
+
}
|
|
1371
|
+
const ts = firstFrame.timestampUs;
|
|
1372
|
+
if (earliest === null || ts < earliest) {
|
|
1373
|
+
earliest = ts;
|
|
1374
|
+
}
|
|
1375
|
+
}
|
|
1376
|
+
if (earliest === null) {
|
|
1377
|
+
return null;
|
|
1378
|
+
}
|
|
1379
|
+
return {
|
|
1380
|
+
timeUs: earliest,
|
|
1381
|
+
durationUs: this.mixWindowUs
|
|
1382
|
+
};
|
|
1383
|
+
}
|
|
1384
|
+
buildMixRequest(timeUs, _durationUs) {
|
|
1385
|
+
if (!this.mixer) {
|
|
1386
|
+
return null;
|
|
1387
|
+
}
|
|
1388
|
+
const tracks = [];
|
|
1389
|
+
let resolvedDurationUs = this.mixWindowUs;
|
|
1390
|
+
for (const [trackId, buffer] of this.trackBuffers.entries()) {
|
|
1391
|
+
const segment = this.consumeSegment(trackId, buffer);
|
|
1392
|
+
if (!segment) {
|
|
1393
|
+
continue;
|
|
1394
|
+
}
|
|
1395
|
+
tracks.push({
|
|
1396
|
+
trackId,
|
|
1397
|
+
clipId: buffer.clipId,
|
|
1398
|
+
audioData: segment.audioData,
|
|
1399
|
+
config: segment.config,
|
|
1400
|
+
type: buffer.type,
|
|
1401
|
+
sampleRate: this.mixer.config.sampleRate,
|
|
1402
|
+
numberOfChannels: this.mixer.config.numberOfChannels
|
|
1403
|
+
});
|
|
1404
|
+
resolvedDurationUs = Math.min(resolvedDurationUs, segment.durationUs ?? this.mixWindowUs);
|
|
1405
|
+
}
|
|
1406
|
+
if (tracks.length === 0) {
|
|
1407
|
+
return null;
|
|
1408
|
+
}
|
|
1409
|
+
return {
|
|
1410
|
+
tracks,
|
|
1411
|
+
timeUs,
|
|
1412
|
+
durationUs: resolvedDurationUs
|
|
1413
|
+
};
|
|
1414
|
+
}
|
|
1415
|
+
consumeSegment(trackId, buffer) {
|
|
1416
|
+
if (buffer.queue.length === 0) {
|
|
1417
|
+
return null;
|
|
1418
|
+
}
|
|
1419
|
+
const head = buffer.queue[0];
|
|
1420
|
+
if (!head) {
|
|
1421
|
+
return null;
|
|
1422
|
+
}
|
|
1423
|
+
const queueItem = buffer.queue.shift();
|
|
1424
|
+
if (!queueItem) {
|
|
1425
|
+
return null;
|
|
1426
|
+
}
|
|
1427
|
+
this.resolveQueueWaiter(trackId);
|
|
1428
|
+
return {
|
|
1429
|
+
audioData: queueItem.audioData,
|
|
1430
|
+
config: buffer.config,
|
|
1431
|
+
sampleRate: this.mixer.config.sampleRate,
|
|
1432
|
+
numberOfChannels: this.mixer.config.numberOfChannels,
|
|
1433
|
+
durationUs: queueItem.durationUs
|
|
1434
|
+
};
|
|
1435
|
+
}
|
|
1436
|
+
hasBufferedAudio() {
|
|
1437
|
+
for (const buffer of this.trackBuffers.values()) {
|
|
1438
|
+
if (buffer.queue.length > 0) {
|
|
1439
|
+
return true;
|
|
1440
|
+
}
|
|
1441
|
+
}
|
|
1442
|
+
return false;
|
|
1443
|
+
}
|
|
1444
|
+
cloneTrackConfig(config) {
|
|
1445
|
+
return {
|
|
1446
|
+
startTimeUs: config.startTimeUs,
|
|
1447
|
+
durationUs: config.durationUs,
|
|
1448
|
+
volume: config.volume,
|
|
1449
|
+
fadeIn: config.fadeIn ? { ...config.fadeIn } : void 0,
|
|
1450
|
+
fadeOut: config.fadeOut ? { ...config.fadeOut } : void 0,
|
|
1451
|
+
effects: config.effects ? config.effects.map((effect) => ({
|
|
1452
|
+
type: effect.type,
|
|
1453
|
+
params: effect.params ?? {}
|
|
1454
|
+
})) : void 0,
|
|
1455
|
+
duckingTag: config.duckingTag
|
|
1456
|
+
};
|
|
1457
|
+
}
|
|
1458
|
+
applyTrackConfigPatch(target, patch) {
|
|
1459
|
+
if (patch.fadeIn) {
|
|
1460
|
+
target.fadeIn = { ...patch.fadeIn };
|
|
1461
|
+
} else if (patch.fadeIn === null) {
|
|
1462
|
+
target.fadeIn = void 0;
|
|
1463
|
+
}
|
|
1464
|
+
if (patch.fadeOut) {
|
|
1465
|
+
target.fadeOut = { ...patch.fadeOut };
|
|
1466
|
+
} else if (patch.fadeOut === null) {
|
|
1467
|
+
target.fadeOut = void 0;
|
|
1468
|
+
}
|
|
1469
|
+
if (patch.effects) {
|
|
1470
|
+
target.effects = patch.effects.map((effect) => ({
|
|
1471
|
+
type: effect.type,
|
|
1472
|
+
params: effect.params ?? {}
|
|
1473
|
+
}));
|
|
1474
|
+
}
|
|
1475
|
+
if (typeof patch.volume === "number") {
|
|
1476
|
+
target.volume = patch.volume;
|
|
1477
|
+
}
|
|
1478
|
+
if (patch.startTimeUs !== void 0) {
|
|
1479
|
+
target.startTimeUs = patch.startTimeUs;
|
|
1480
|
+
}
|
|
1481
|
+
if (patch.durationUs !== void 0) {
|
|
1482
|
+
target.durationUs = patch.durationUs;
|
|
1483
|
+
}
|
|
1484
|
+
if (patch.duckingTag !== void 0) {
|
|
1485
|
+
target.duckingTag = patch.duckingTag;
|
|
1486
|
+
}
|
|
1487
|
+
}
|
|
1488
|
+
async waitForQueueSpace(trackId) {
|
|
1489
|
+
const existing = this.trackQueueWaiters.get(trackId);
|
|
1490
|
+
if (existing && existing.length === 0) {
|
|
1491
|
+
this.trackQueueWaiters.delete(trackId);
|
|
1492
|
+
}
|
|
1493
|
+
await new Promise((resolve) => {
|
|
1494
|
+
const waiters = this.trackQueueWaiters.get(trackId);
|
|
1495
|
+
if (waiters) {
|
|
1496
|
+
waiters.push(resolve);
|
|
1497
|
+
} else {
|
|
1498
|
+
this.trackQueueWaiters.set(trackId, [resolve]);
|
|
1499
|
+
}
|
|
1500
|
+
});
|
|
1501
|
+
}
|
|
1502
|
+
resolveQueueWaiter(trackId) {
|
|
1503
|
+
const waiters = this.trackQueueWaiters.get(trackId);
|
|
1504
|
+
if (!waiters || waiters.length === 0) {
|
|
1505
|
+
return;
|
|
1506
|
+
}
|
|
1507
|
+
const resolve = waiters.shift();
|
|
1508
|
+
resolve?.();
|
|
1509
|
+
if (!waiters.length) {
|
|
1510
|
+
this.trackQueueWaiters.delete(trackId);
|
|
1511
|
+
}
|
|
1512
|
+
}
|
|
1513
|
+
disposeTrackBuffer(trackId) {
|
|
1514
|
+
const buffer = this.trackBuffers.get(trackId);
|
|
1515
|
+
if (!buffer) {
|
|
1516
|
+
return;
|
|
1517
|
+
}
|
|
1518
|
+
for (const item of buffer.queue) {
|
|
1519
|
+
try {
|
|
1520
|
+
item.audioData.close();
|
|
1521
|
+
} catch (error) {
|
|
1522
|
+
console.warn("[AudioComposeWorker] Failed to close AudioData on disposal", error);
|
|
1523
|
+
}
|
|
1524
|
+
}
|
|
1525
|
+
this.trackBuffers.delete(trackId);
|
|
1526
|
+
const waiters = this.trackQueueWaiters.get(trackId);
|
|
1527
|
+
if (waiters) {
|
|
1528
|
+
waiters.forEach((resolve) => resolve());
|
|
1529
|
+
this.trackQueueWaiters.delete(trackId);
|
|
1530
|
+
}
|
|
1531
|
+
}
|
|
1532
|
+
}
|
|
1533
|
+
const worker = new AudioComposeWorker();
|
|
1534
|
+
self.addEventListener("beforeunload", () => {
|
|
1535
|
+
worker["handleDispose"]();
|
|
1536
|
+
});
|
|
1537
|
+
//# sourceMappingURL=audio-compose.worker-nGVvHD5Q.js.map
|