ibi-ai-talk 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +24 -0
- package/dist/demo.html +5 -0
- package/dist/index.common.js +2653 -0
- package/dist/index.common.js.map +1 -0
- package/dist/index.umd.js +2665 -0
- package/dist/index.umd.js.map +1 -0
- package/dist/index.umd.min.js +2 -0
- package/dist/index.umd.min.js.map +1 -0
- package/index.js +8 -0
- package/package.json +18 -0
- package/src/index.vue +57 -0
- package/src/utils/blocking-queue.js +98 -0
- package/src/utils/controller.js +34 -0
- package/src/utils/manager.js +39 -0
- package/src/utils/opus-codec.js +169 -0
- package/src/utils/ota-connector.js +115 -0
- package/src/utils/player.js +262 -0
- package/src/utils/recorder.js +420 -0
- package/src/utils/stream-context.js +184 -0
- package/src/utils/tools.js +517 -0
- package/src/utils/websocket.js +565 -0
|
@@ -0,0 +1,420 @@
|
|
|
1
|
+
// 音频录制模块
|
|
2
|
+
import { initOpusEncoder } from "./opus-codec.js";
|
|
3
|
+
import { getAudioPlayer } from "./player.js";
|
|
4
|
+
|
|
5
|
+
// 音频录制器类
|
|
6
|
+
export class AudioRecorder {
|
|
7
|
+
constructor() {
|
|
8
|
+
this.isRecording = false;
|
|
9
|
+
this.audioContext = null;
|
|
10
|
+
this.analyser = null;
|
|
11
|
+
this.audioProcessor = null;
|
|
12
|
+
this.audioProcessorType = null;
|
|
13
|
+
this.audioSource = null;
|
|
14
|
+
this.opusEncoder = null;
|
|
15
|
+
this.pcmDataBuffer = new Int16Array();
|
|
16
|
+
this.audioBuffers = [];
|
|
17
|
+
this.totalAudioSize = 0;
|
|
18
|
+
this.visualizationRequest = null;
|
|
19
|
+
this.recordingTimer = null;
|
|
20
|
+
this.websocket = null;
|
|
21
|
+
|
|
22
|
+
// 回调函数
|
|
23
|
+
this.onRecordingStart = null;
|
|
24
|
+
this.onRecordingStop = null;
|
|
25
|
+
this.onVisualizerUpdate = null;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// 设置WebSocket实例
|
|
29
|
+
setWebSocket(ws) {
|
|
30
|
+
this.websocket = ws;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// 获取AudioContext实例
|
|
34
|
+
getAudioContext() {
|
|
35
|
+
const audioPlayer = getAudioPlayer();
|
|
36
|
+
return audioPlayer.getAudioContext();
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// 初始化编码器
|
|
40
|
+
initEncoder() {
|
|
41
|
+
if (!this.opusEncoder) {
|
|
42
|
+
this.opusEncoder = initOpusEncoder();
|
|
43
|
+
}
|
|
44
|
+
return this.opusEncoder;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// PCM处理器代码
|
|
48
|
+
getAudioProcessorCode() {
|
|
49
|
+
return `
|
|
50
|
+
class AudioRecorderProcessor extends AudioWorkletProcessor {
|
|
51
|
+
constructor() {
|
|
52
|
+
super();
|
|
53
|
+
this.buffers = [];
|
|
54
|
+
this.frameSize = 960;
|
|
55
|
+
this.buffer = new Int16Array(this.frameSize);
|
|
56
|
+
this.bufferIndex = 0;
|
|
57
|
+
this.isRecording = false;
|
|
58
|
+
|
|
59
|
+
this.port.onmessage = (event) => {
|
|
60
|
+
if (event.data.command === 'start') {
|
|
61
|
+
this.isRecording = true;
|
|
62
|
+
this.port.postMessage({ type: 'status', status: 'started' });
|
|
63
|
+
} else if (event.data.command === 'stop') {
|
|
64
|
+
this.isRecording = false;
|
|
65
|
+
|
|
66
|
+
if (this.bufferIndex > 0) {
|
|
67
|
+
const finalBuffer = this.buffer.slice(0, this.bufferIndex);
|
|
68
|
+
this.port.postMessage({
|
|
69
|
+
type: 'buffer',
|
|
70
|
+
buffer: finalBuffer
|
|
71
|
+
});
|
|
72
|
+
this.bufferIndex = 0;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
this.port.postMessage({ type: 'status', status: 'stopped' });
|
|
76
|
+
}
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
process(inputs, outputs, parameters) {
|
|
81
|
+
if (!this.isRecording) return true;
|
|
82
|
+
|
|
83
|
+
const input = inputs[0][0];
|
|
84
|
+
if (!input) return true;
|
|
85
|
+
|
|
86
|
+
for (let i = 0; i < input.length; i++) {
|
|
87
|
+
if (this.bufferIndex >= this.frameSize) {
|
|
88
|
+
this.port.postMessage({
|
|
89
|
+
type: 'buffer',
|
|
90
|
+
buffer: this.buffer.slice(0)
|
|
91
|
+
});
|
|
92
|
+
this.bufferIndex = 0;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
this.buffer[this.bufferIndex++] = Math.max(-32768, Math.min(32767, Math.floor(input[i] * 32767)));
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return true;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
registerProcessor('audio-recorder-processor', AudioRecorderProcessor);
|
|
103
|
+
`;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// 创建音频处理器
|
|
107
|
+
async createAudioProcessor() {
|
|
108
|
+
this.audioContext = this.getAudioContext();
|
|
109
|
+
|
|
110
|
+
try {
|
|
111
|
+
if (this.audioContext.audioWorklet) {
|
|
112
|
+
const blob = new Blob([this.getAudioProcessorCode()], {
|
|
113
|
+
type: "application/javascript",
|
|
114
|
+
});
|
|
115
|
+
const url = URL.createObjectURL(blob);
|
|
116
|
+
await this.audioContext.audioWorklet.addModule(url);
|
|
117
|
+
URL.revokeObjectURL(url);
|
|
118
|
+
|
|
119
|
+
const audioProcessor = new AudioWorkletNode(
|
|
120
|
+
this.audioContext,
|
|
121
|
+
"audio-recorder-processor"
|
|
122
|
+
);
|
|
123
|
+
|
|
124
|
+
audioProcessor.port.onmessage = (event) => {
|
|
125
|
+
if (event.data.type === "buffer") {
|
|
126
|
+
this.processPCMBuffer(event.data.buffer);
|
|
127
|
+
}
|
|
128
|
+
};
|
|
129
|
+
|
|
130
|
+
console.log("使用AudioWorklet处理音频", "success");
|
|
131
|
+
|
|
132
|
+
const silent = this.audioContext.createGain();
|
|
133
|
+
silent.gain.value = 0;
|
|
134
|
+
audioProcessor.connect(silent);
|
|
135
|
+
silent.connect(this.audioContext.destination);
|
|
136
|
+
return { node: audioProcessor, type: "worklet" };
|
|
137
|
+
} else {
|
|
138
|
+
console.log(
|
|
139
|
+
"AudioWorklet不可用,使用ScriptProcessorNode作为回退方案",
|
|
140
|
+
"warning"
|
|
141
|
+
);
|
|
142
|
+
return this.createScriptProcessor();
|
|
143
|
+
}
|
|
144
|
+
} catch (error) {
|
|
145
|
+
console.log(
|
|
146
|
+
`创建音频处理器失败: ${error.message},尝试回退方案`,
|
|
147
|
+
"error"
|
|
148
|
+
);
|
|
149
|
+
return this.createScriptProcessor();
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// 创建ScriptProcessor作为回退
|
|
154
|
+
createScriptProcessor() {
|
|
155
|
+
try {
|
|
156
|
+
const frameSize = 4096;
|
|
157
|
+
const scriptProcessor = this.audioContext.createScriptProcessor(
|
|
158
|
+
frameSize,
|
|
159
|
+
1,
|
|
160
|
+
1
|
|
161
|
+
);
|
|
162
|
+
|
|
163
|
+
scriptProcessor.onaudioprocess = (event) => {
|
|
164
|
+
if (!this.isRecording) return;
|
|
165
|
+
|
|
166
|
+
const input = event.inputBuffer.getChannelData(0);
|
|
167
|
+
const buffer = new Int16Array(input.length);
|
|
168
|
+
|
|
169
|
+
for (let i = 0; i < input.length; i++) {
|
|
170
|
+
buffer[i] = Math.max(
|
|
171
|
+
-32768,
|
|
172
|
+
Math.min(32767, Math.floor(input[i] * 32767))
|
|
173
|
+
);
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
this.processPCMBuffer(buffer);
|
|
177
|
+
};
|
|
178
|
+
|
|
179
|
+
const silent = this.audioContext.createGain();
|
|
180
|
+
silent.gain.value = 0;
|
|
181
|
+
scriptProcessor.connect(silent);
|
|
182
|
+
silent.connect(this.audioContext.destination);
|
|
183
|
+
|
|
184
|
+
console.log("使用ScriptProcessorNode作为回退方案成功", "warning");
|
|
185
|
+
return { node: scriptProcessor, type: "processor" };
|
|
186
|
+
} catch (fallbackError) {
|
|
187
|
+
console.log(`回退方案也失败: ${fallbackError.message}`, "error");
|
|
188
|
+
return null;
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
// 处理PCM缓冲数据
|
|
193
|
+
processPCMBuffer(buffer) {
|
|
194
|
+
if (!this.isRecording) return;
|
|
195
|
+
|
|
196
|
+
const newBuffer = new Int16Array(this.pcmDataBuffer.length + buffer.length);
|
|
197
|
+
newBuffer.set(this.pcmDataBuffer);
|
|
198
|
+
newBuffer.set(buffer, this.pcmDataBuffer.length);
|
|
199
|
+
this.pcmDataBuffer = newBuffer;
|
|
200
|
+
|
|
201
|
+
const samplesPerFrame = 960;
|
|
202
|
+
|
|
203
|
+
while (this.pcmDataBuffer.length >= samplesPerFrame) {
|
|
204
|
+
const frameData = this.pcmDataBuffer.slice(0, samplesPerFrame);
|
|
205
|
+
this.pcmDataBuffer = this.pcmDataBuffer.slice(samplesPerFrame);
|
|
206
|
+
|
|
207
|
+
this.encodeAndSendOpus(frameData);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
// 编码并发送Opus数据
|
|
212
|
+
encodeAndSendOpus(pcmData = null) {
|
|
213
|
+
if (!this.opusEncoder) {
|
|
214
|
+
console.log("Opus编码器未初始化", "error");
|
|
215
|
+
return;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
try {
|
|
219
|
+
if (pcmData) {
|
|
220
|
+
const opusData = this.opusEncoder.encode(pcmData);
|
|
221
|
+
if (opusData && opusData.length > 0) {
|
|
222
|
+
this.audioBuffers.push(opusData.buffer);
|
|
223
|
+
this.totalAudioSize += opusData.length;
|
|
224
|
+
|
|
225
|
+
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
|
|
226
|
+
try {
|
|
227
|
+
this.websocket.send(opusData.buffer);
|
|
228
|
+
} catch (error) {
|
|
229
|
+
console.log(`WebSocket发送错误: ${error.message}`, "error");
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
} else {
|
|
233
|
+
log("Opus编码失败,无有效数据返回", "error");
|
|
234
|
+
}
|
|
235
|
+
} else {
|
|
236
|
+
if (this.pcmDataBuffer.length > 0) {
|
|
237
|
+
const samplesPerFrame = 960;
|
|
238
|
+
if (this.pcmDataBuffer.length < samplesPerFrame) {
|
|
239
|
+
const paddedBuffer = new Int16Array(samplesPerFrame);
|
|
240
|
+
paddedBuffer.set(this.pcmDataBuffer);
|
|
241
|
+
this.encodeAndSendOpus(paddedBuffer);
|
|
242
|
+
} else {
|
|
243
|
+
this.encodeAndSendOpus(
|
|
244
|
+
this.pcmDataBuffer.slice(0, samplesPerFrame)
|
|
245
|
+
);
|
|
246
|
+
}
|
|
247
|
+
this.pcmDataBuffer = new Int16Array(0);
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
} catch (error) {
|
|
251
|
+
console.log(`Opus编码错误: ${error.message}`, "error");
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
// 开始录音
|
|
256
|
+
async start() {
|
|
257
|
+
try {
|
|
258
|
+
if (!this.initEncoder()) {
|
|
259
|
+
console.log("无法启动录音: Opus编码器初始化失败", "error");
|
|
260
|
+
return false;
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
const stream = await navigator.mediaDevices.getUserMedia({
|
|
264
|
+
audio: {
|
|
265
|
+
echoCancellation: true,
|
|
266
|
+
noiseSuppression: true,
|
|
267
|
+
sampleRate: 16000,
|
|
268
|
+
channelCount: 1,
|
|
269
|
+
latency: { ideal: 0.02, max: 0.05 },
|
|
270
|
+
// Chrome 扩展参数(非标准,可能变动)
|
|
271
|
+
googNoiseSuppression: true, // 启用 Chrome 噪声抑制
|
|
272
|
+
googNoiseSuppression2: 3, // 级别设置(1-3,数值越高抑制越强,不同版本可能有差异)
|
|
273
|
+
googAutoGainControl: true, // 自动增益控制
|
|
274
|
+
googHighpassFilter: true, // 高通滤波器(过滤低频噪声)
|
|
275
|
+
},
|
|
276
|
+
});
|
|
277
|
+
|
|
278
|
+
this.audioContext = this.getAudioContext();
|
|
279
|
+
|
|
280
|
+
if (this.audioContext.state === "suspended") {
|
|
281
|
+
await this.audioContext.resume();
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
const processorResult = await this.createAudioProcessor();
|
|
285
|
+
if (!processorResult) {
|
|
286
|
+
console.log("无法创建音频处理器", "error");
|
|
287
|
+
return false;
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
this.audioProcessor = processorResult.node;
|
|
291
|
+
this.audioProcessorType = processorResult.type;
|
|
292
|
+
|
|
293
|
+
this.audioSource = this.audioContext.createMediaStreamSource(stream);
|
|
294
|
+
this.analyser = this.audioContext.createAnalyser();
|
|
295
|
+
this.analyser.fftSize = 2048;
|
|
296
|
+
|
|
297
|
+
this.audioSource.connect(this.analyser);
|
|
298
|
+
this.audioSource.connect(this.audioProcessor);
|
|
299
|
+
|
|
300
|
+
this.pcmDataBuffer = new Int16Array();
|
|
301
|
+
this.audioBuffers = [];
|
|
302
|
+
this.totalAudioSize = 0;
|
|
303
|
+
this.isRecording = true;
|
|
304
|
+
|
|
305
|
+
if (this.audioProcessorType === "worklet" && this.audioProcessor.port) {
|
|
306
|
+
this.audioProcessor.port.postMessage({ command: "start" });
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
// 发送监听开始消息
|
|
310
|
+
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
|
|
311
|
+
const listenMessage = {
|
|
312
|
+
type: "listen",
|
|
313
|
+
mode: localStorage.getItem("listenMode") || "wakeup",
|
|
314
|
+
state: "start",
|
|
315
|
+
};
|
|
316
|
+
|
|
317
|
+
console.log(
|
|
318
|
+
`发送录音开始消息: ${JSON.stringify(listenMessage)}`,
|
|
319
|
+
"info"
|
|
320
|
+
);
|
|
321
|
+
this.websocket.send(JSON.stringify(listenMessage));
|
|
322
|
+
} else {
|
|
323
|
+
console.log("WebSocket未连接,无法发送开始消息", "error");
|
|
324
|
+
return false;
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
// 启动录音计时器
|
|
328
|
+
let recordingSeconds = 0;
|
|
329
|
+
this.recordingTimer = setInterval(() => {
|
|
330
|
+
recordingSeconds += 0.1;
|
|
331
|
+
if (this.onRecordingStart) {
|
|
332
|
+
this.onRecordingStart(recordingSeconds);
|
|
333
|
+
}
|
|
334
|
+
}, 100);
|
|
335
|
+
|
|
336
|
+
console.log("开始PCM直接录音", "success");
|
|
337
|
+
return true;
|
|
338
|
+
} catch (error) {
|
|
339
|
+
console.log(`直接录音启动错误: ${error.message}`, "error");
|
|
340
|
+
this.isRecording = false;
|
|
341
|
+
return false;
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
// 停止录音
|
|
346
|
+
stop() {
|
|
347
|
+
if (!this.isRecording) return false;
|
|
348
|
+
|
|
349
|
+
try {
|
|
350
|
+
this.isRecording = false;
|
|
351
|
+
|
|
352
|
+
if (this.audioProcessor) {
|
|
353
|
+
if (this.audioProcessorType === "worklet" && this.audioProcessor.port) {
|
|
354
|
+
this.audioProcessor.port.postMessage({ command: "stop" });
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
this.audioProcessor.disconnect();
|
|
358
|
+
this.audioProcessor = null;
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
if (this.audioSource) {
|
|
362
|
+
this.audioSource.disconnect();
|
|
363
|
+
this.audioSource = null;
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
if (this.visualizationRequest) {
|
|
367
|
+
cancelAnimationFrame(this.visualizationRequest);
|
|
368
|
+
this.visualizationRequest = null;
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
if (this.recordingTimer) {
|
|
372
|
+
clearInterval(this.recordingTimer);
|
|
373
|
+
this.recordingTimer = null;
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
// 编码并发送剩余的数据
|
|
377
|
+
this.encodeAndSendOpus();
|
|
378
|
+
|
|
379
|
+
// 发送结束信号
|
|
380
|
+
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
|
|
381
|
+
const emptyOpusFrame = new Uint8Array(0);
|
|
382
|
+
this.websocket.send(emptyOpusFrame);
|
|
383
|
+
|
|
384
|
+
const stopMessage = {
|
|
385
|
+
type: "listen",
|
|
386
|
+
mode: localStorage.getItem("listenMode") || "wakeup",
|
|
387
|
+
state: "stop",
|
|
388
|
+
};
|
|
389
|
+
|
|
390
|
+
this.websocket.send(JSON.stringify(stopMessage));
|
|
391
|
+
console.log("已发送录音停止信号", "info");
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
if (this.onRecordingStop) {
|
|
395
|
+
this.onRecordingStop();
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
console.log("停止PCM直接录音", "success");
|
|
399
|
+
return true;
|
|
400
|
+
} catch (error) {
|
|
401
|
+
console.log(`直接录音停止错误: ${error.message}`, "error");
|
|
402
|
+
return false;
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
// 获取分析器
|
|
407
|
+
getAnalyser() {
|
|
408
|
+
return this.analyser;
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
// 创建单例
|
|
413
|
+
let audioRecorderInstance = null;
|
|
414
|
+
|
|
415
|
+
export function getAudioRecorder() {
|
|
416
|
+
if (!audioRecorderInstance) {
|
|
417
|
+
audioRecorderInstance = new AudioRecorder();
|
|
418
|
+
}
|
|
419
|
+
return audioRecorderInstance;
|
|
420
|
+
}
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
import BlockingQueue from "./blocking-queue.js";
|
|
2
|
+
|
|
3
|
+
// 音频流播放上下文类
|
|
4
|
+
export class StreamingContext {
|
|
5
|
+
constructor(
|
|
6
|
+
opusDecoder,
|
|
7
|
+
audioContext,
|
|
8
|
+
sampleRate,
|
|
9
|
+
channels,
|
|
10
|
+
minAudioDuration
|
|
11
|
+
) {
|
|
12
|
+
this.opusDecoder = opusDecoder;
|
|
13
|
+
this.audioContext = audioContext;
|
|
14
|
+
|
|
15
|
+
// 音频参数
|
|
16
|
+
this.sampleRate = sampleRate;
|
|
17
|
+
this.channels = channels;
|
|
18
|
+
this.minAudioDuration = minAudioDuration;
|
|
19
|
+
|
|
20
|
+
// 初始化队列和状态
|
|
21
|
+
this.queue = []; // 已解码的PCM队列。正在播放
|
|
22
|
+
this.activeQueue = new BlockingQueue(); // 已解码的PCM队列。准备播放
|
|
23
|
+
this.pendingAudioBufferQueue = []; // 待处理的缓存队列
|
|
24
|
+
this.audioBufferQueue = new BlockingQueue(); // 缓存队列
|
|
25
|
+
this.playing = false; // 是否正在播放
|
|
26
|
+
this.endOfStream = false; // 是否收到结束信号
|
|
27
|
+
this.source = null; // 当前音频源
|
|
28
|
+
this.totalSamples = 0; // 累积的总样本数
|
|
29
|
+
this.lastPlayTime = 0; // 上次播放的时间戳
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// 缓存音频数组
|
|
33
|
+
pushAudioBuffer(item) {
|
|
34
|
+
this.audioBufferQueue.enqueue(...item);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
// 获取需要处理缓存队列,单线程:在audioBufferQueue一直更新的状态下不会出现安全问题
|
|
38
|
+
async getPendingAudioBufferQueue() {
|
|
39
|
+
// 原子交换 + 清空
|
|
40
|
+
[this.pendingAudioBufferQueue, this.audioBufferQueue] = [
|
|
41
|
+
await this.audioBufferQueue.dequeue(),
|
|
42
|
+
new BlockingQueue(),
|
|
43
|
+
];
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// 获取正在播放已解码的PCM队列,单线程:在activeQueue一直更新的状态下不会出现安全问题
|
|
47
|
+
async getQueue(minSamples) {
|
|
48
|
+
let TepArray = [];
|
|
49
|
+
const num =
|
|
50
|
+
minSamples - this.queue.length > 0 ? minSamples - this.queue.length : 1;
|
|
51
|
+
// 原子交换 + 清空
|
|
52
|
+
[TepArray, this.activeQueue] = [
|
|
53
|
+
await this.activeQueue.dequeue(num),
|
|
54
|
+
new BlockingQueue(),
|
|
55
|
+
];
|
|
56
|
+
this.queue.push(...TepArray);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// 将Int16音频数据转换为Float32音频数据
|
|
60
|
+
convertInt16ToFloat32(int16Data) {
|
|
61
|
+
const float32Data = new Float32Array(int16Data.length);
|
|
62
|
+
for (let i = 0; i < int16Data.length; i++) {
|
|
63
|
+
// 将[-32768,32767]范围转换为[-1,1],统一使用32768.0避免不对称失真
|
|
64
|
+
float32Data[i] = int16Data[i] / 32768.0;
|
|
65
|
+
}
|
|
66
|
+
return float32Data;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// 将Opus数据解码为PCM
|
|
70
|
+
async decodeOpusFrames() {
|
|
71
|
+
if (!this.opusDecoder) {
|
|
72
|
+
console.log("Opus解码器未初始化,无法解码", "error");
|
|
73
|
+
return;
|
|
74
|
+
} else {
|
|
75
|
+
console.log("Opus解码器启动", "info");
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
while (true) {
|
|
79
|
+
let decodedSamples = [];
|
|
80
|
+
for (const frame of this.pendingAudioBufferQueue) {
|
|
81
|
+
try {
|
|
82
|
+
// 使用Opus解码器解码
|
|
83
|
+
const frameData = this.opusDecoder.decode(frame);
|
|
84
|
+
if (frameData && frameData.length > 0) {
|
|
85
|
+
// 转换为Float32
|
|
86
|
+
const floatData = this.convertInt16ToFloat32(frameData);
|
|
87
|
+
// 使用循环替代展开运算符
|
|
88
|
+
for (let i = 0; i < floatData.length; i++) {
|
|
89
|
+
decodedSamples.push(floatData[i]);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
} catch (error) {
|
|
93
|
+
console.log("Opus解码失败: " + error.message, "error");
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
if (decodedSamples.length > 0) {
|
|
98
|
+
// 使用循环替代展开运算符
|
|
99
|
+
for (let i = 0; i < decodedSamples.length; i++) {
|
|
100
|
+
this.activeQueue.enqueue(decodedSamples[i]);
|
|
101
|
+
}
|
|
102
|
+
this.totalSamples += decodedSamples.length;
|
|
103
|
+
} else {
|
|
104
|
+
console.log("没有成功解码的样本", "warning");
|
|
105
|
+
}
|
|
106
|
+
await this.getPendingAudioBufferQueue();
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// 开始播放音频
|
|
111
|
+
async startPlaying() {
|
|
112
|
+
let scheduledEndTime = this.audioContext.currentTime; // 跟踪已调度音频的结束时间
|
|
113
|
+
|
|
114
|
+
while (true) {
|
|
115
|
+
// 初始缓冲:等待足够的样本再开始播放
|
|
116
|
+
const minSamples = this.sampleRate * this.minAudioDuration * 2;
|
|
117
|
+
if (!this.playing && this.queue.length < minSamples) {
|
|
118
|
+
await this.getQueue(minSamples);
|
|
119
|
+
}
|
|
120
|
+
this.playing = true;
|
|
121
|
+
|
|
122
|
+
// 持续播放队列中的音频,每次播放一个小块
|
|
123
|
+
while (this.playing && this.queue.length > 0) {
|
|
124
|
+
// 每次播放120ms的音频(2个Opus包)
|
|
125
|
+
const playDuration = 0.12;
|
|
126
|
+
const targetSamples = Math.floor(this.sampleRate * playDuration);
|
|
127
|
+
const actualSamples = Math.min(this.queue.length, targetSamples);
|
|
128
|
+
|
|
129
|
+
if (actualSamples === 0) break;
|
|
130
|
+
|
|
131
|
+
const currentSamples = this.queue.splice(0, actualSamples);
|
|
132
|
+
const audioBuffer = this.audioContext.createBuffer(
|
|
133
|
+
this.channels,
|
|
134
|
+
currentSamples.length,
|
|
135
|
+
this.sampleRate
|
|
136
|
+
);
|
|
137
|
+
audioBuffer.copyToChannel(new Float32Array(currentSamples), 0);
|
|
138
|
+
|
|
139
|
+
// 创建音频源
|
|
140
|
+
this.source = this.audioContext.createBufferSource();
|
|
141
|
+
this.source.buffer = audioBuffer;
|
|
142
|
+
|
|
143
|
+
// 精确调度播放时间
|
|
144
|
+
const currentTime = this.audioContext.currentTime;
|
|
145
|
+
const startTime = Math.max(scheduledEndTime, currentTime);
|
|
146
|
+
|
|
147
|
+
// 直接连接到输出
|
|
148
|
+
this.source.connect(this.audioContext.destination);
|
|
149
|
+
|
|
150
|
+
this.source.start(startTime);
|
|
151
|
+
|
|
152
|
+
// 更新下一个音频块的调度时间
|
|
153
|
+
const duration = audioBuffer.duration;
|
|
154
|
+
scheduledEndTime = startTime + duration;
|
|
155
|
+
this.lastPlayTime = startTime;
|
|
156
|
+
|
|
157
|
+
// 如果队列中数据不足,等待新数据
|
|
158
|
+
if (this.queue.length < targetSamples) {
|
|
159
|
+
break;
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
// 等待新数据
|
|
164
|
+
await this.getQueue(minSamples);
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
// 创建streamingContext实例的工厂函数
|
|
170
|
+
export function createStreamingContext(
|
|
171
|
+
opusDecoder,
|
|
172
|
+
audioContext,
|
|
173
|
+
sampleRate,
|
|
174
|
+
channels,
|
|
175
|
+
minAudioDuration
|
|
176
|
+
) {
|
|
177
|
+
return new StreamingContext(
|
|
178
|
+
opusDecoder,
|
|
179
|
+
audioContext,
|
|
180
|
+
sampleRate,
|
|
181
|
+
channels,
|
|
182
|
+
minAudioDuration
|
|
183
|
+
);
|
|
184
|
+
}
|