@fluxerjs/voice 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +24 -0
- package/dist/index.d.mts +140 -0
- package/dist/index.d.ts +140 -0
- package/dist/index.js +925 -0
- package/dist/index.mjs +892 -0
- package/package.json +44 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,892 @@
|
|
|
1
|
+
// src/VoiceManager.ts
|
|
2
|
+
import { EventEmitter as EventEmitter3 } from "events";
|
|
3
|
+
import { Events } from "@fluxerjs/core";
|
|
4
|
+
import { GatewayOpcodes } from "@fluxerjs/types";
|
|
5
|
+
|
|
6
|
+
// src/VoiceConnection.ts
|
|
7
|
+
import { EventEmitter } from "events";
|
|
8
|
+
import * as nacl from "tweetnacl";
|
|
9
|
+
import * as dgram from "dgram";
|
|
10
|
+
var VOICE_WS_OPCODES = { Identify: 0, SelectProtocol: 1, Ready: 2, Heartbeat: 3, SessionDescription: 4, Speaking: 5 };
|
|
11
|
+
var VOICE_VERSION = 4;
|
|
12
|
+
var CHANNELS = 2;
|
|
13
|
+
var OPUS_FRAME_TICKS = 960 * (CHANNELS === 2 ? 2 : 1);
|
|
14
|
+
var AUDIO_FRAME_INTERVAL_MS = 20;
|
|
15
|
+
async function logFullResponse(url) {
|
|
16
|
+
try {
|
|
17
|
+
const fetchUrl = url.replace(/^wss:\/\//i, "https://").replace(/^ws:\/\//i, "http://");
|
|
18
|
+
const res = await fetch(fetchUrl, { method: "GET" });
|
|
19
|
+
const body = await res.text();
|
|
20
|
+
const headers = {};
|
|
21
|
+
res.headers.forEach((v, k) => {
|
|
22
|
+
headers[k] = v;
|
|
23
|
+
});
|
|
24
|
+
console.error("[voice] Full response from", url, {
|
|
25
|
+
status: res.status,
|
|
26
|
+
statusText: res.statusText,
|
|
27
|
+
headers,
|
|
28
|
+
body: body.slice(0, 2e3) + (body.length > 2e3 ? "..." : "")
|
|
29
|
+
});
|
|
30
|
+
} catch (e) {
|
|
31
|
+
console.error("[voice] Could not fetch URL for logging:", e);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
var VoiceConnection = class extends EventEmitter {
|
|
35
|
+
client;
|
|
36
|
+
channel;
|
|
37
|
+
guildId;
|
|
38
|
+
_sessionId = null;
|
|
39
|
+
_token = null;
|
|
40
|
+
_endpoint = null;
|
|
41
|
+
_userId;
|
|
42
|
+
voiceWs = null;
|
|
43
|
+
udpSocket = null;
|
|
44
|
+
ssrc = 0;
|
|
45
|
+
secretKey = null;
|
|
46
|
+
heartbeatInterval = null;
|
|
47
|
+
sequence = 0;
|
|
48
|
+
timestamp = 0;
|
|
49
|
+
_playing = false;
|
|
50
|
+
_destroyed = false;
|
|
51
|
+
currentStream = null;
|
|
52
|
+
remoteUdpAddress = "";
|
|
53
|
+
remoteUdpPort = 0;
|
|
54
|
+
audioPacketQueue = [];
|
|
55
|
+
pacingInterval = null;
|
|
56
|
+
constructor(client, channel, userId) {
|
|
57
|
+
super();
|
|
58
|
+
this.client = client;
|
|
59
|
+
this.channel = channel;
|
|
60
|
+
this.guildId = channel.guildId;
|
|
61
|
+
this._userId = userId;
|
|
62
|
+
}
|
|
63
|
+
get sessionId() {
|
|
64
|
+
return this._sessionId;
|
|
65
|
+
}
|
|
66
|
+
get playing() {
|
|
67
|
+
return this._playing;
|
|
68
|
+
}
|
|
69
|
+
/** Called when we have both server update and state update. */
|
|
70
|
+
async connect(server, state) {
|
|
71
|
+
this._token = server.token;
|
|
72
|
+
const raw = (server.endpoint ?? "").trim();
|
|
73
|
+
this._sessionId = state.session_id;
|
|
74
|
+
if (!raw || !this._token || !this._sessionId) {
|
|
75
|
+
this.emit("error", new Error("Missing voice server or session data"));
|
|
76
|
+
return;
|
|
77
|
+
}
|
|
78
|
+
let wsUrl;
|
|
79
|
+
if (raw.includes("?")) {
|
|
80
|
+
wsUrl = /^wss?:\/\//i.test(raw) ? raw : raw.replace(/^https?:\/\//i, "wss://");
|
|
81
|
+
if (!/^wss?:\/\//i.test(wsUrl)) wsUrl = `wss://${wsUrl}`;
|
|
82
|
+
} else {
|
|
83
|
+
const normalized = raw.replace(/^(wss|ws|https?):\/\//i, "").replace(/^\/+/, "") || raw;
|
|
84
|
+
wsUrl = `wss://${normalized}?v=${VOICE_VERSION}`;
|
|
85
|
+
}
|
|
86
|
+
const hostPart = raw.replace(/^(wss|ws|https?):\/\//i, "").replace(/^\/+/, "").split("/")[0] ?? "";
|
|
87
|
+
this._endpoint = hostPart.split("?")[0] || hostPart;
|
|
88
|
+
const WS = await this.getWebSocketConstructor();
|
|
89
|
+
this.voiceWs = new WS(wsUrl);
|
|
90
|
+
return new Promise((resolve, reject) => {
|
|
91
|
+
const resolveReady = () => {
|
|
92
|
+
cleanup();
|
|
93
|
+
resolve();
|
|
94
|
+
this.emit("ready");
|
|
95
|
+
};
|
|
96
|
+
const onOpen = () => {
|
|
97
|
+
this.voiceWs.off("error", onError);
|
|
98
|
+
this.sendVoiceOp(VOICE_WS_OPCODES.Identify, {
|
|
99
|
+
server_id: this.guildId,
|
|
100
|
+
user_id: this._userId,
|
|
101
|
+
session_id: this._sessionId,
|
|
102
|
+
token: this._token
|
|
103
|
+
});
|
|
104
|
+
};
|
|
105
|
+
const onError = (err) => {
|
|
106
|
+
if (err instanceof Error && /Unexpected server response/i.test(err.message)) {
|
|
107
|
+
logFullResponse(wsUrl).catch(() => {
|
|
108
|
+
});
|
|
109
|
+
}
|
|
110
|
+
cleanup();
|
|
111
|
+
reject(err instanceof Error ? err : new Error(String(err)));
|
|
112
|
+
};
|
|
113
|
+
const onMessage = (data) => {
|
|
114
|
+
const buf = Buffer.isBuffer(data) ? data : Buffer.from(data);
|
|
115
|
+
const payload = JSON.parse(buf.toString());
|
|
116
|
+
const op = payload.op;
|
|
117
|
+
const d = payload.d;
|
|
118
|
+
if (op === VOICE_WS_OPCODES.Ready) {
|
|
119
|
+
this.ssrc = d.ssrc;
|
|
120
|
+
const port = d.port;
|
|
121
|
+
const address = d.address ?? this._endpoint.split(":")[0];
|
|
122
|
+
this.remoteUdpAddress = address;
|
|
123
|
+
this.remoteUdpPort = port;
|
|
124
|
+
this.setupUDP(address, port, () => {
|
|
125
|
+
});
|
|
126
|
+
} else if (op === VOICE_WS_OPCODES.SessionDescription) {
|
|
127
|
+
this.secretKey = new Uint8Array(d.secret_key);
|
|
128
|
+
if (this.heartbeatInterval) {
|
|
129
|
+
clearInterval(this.heartbeatInterval);
|
|
130
|
+
this.heartbeatInterval = null;
|
|
131
|
+
}
|
|
132
|
+
this.heartbeatInterval = setInterval(() => {
|
|
133
|
+
this.sendVoiceOp(VOICE_WS_OPCODES.Heartbeat, Date.now());
|
|
134
|
+
}, d.heartbeat_interval ?? 5e3);
|
|
135
|
+
resolveReady();
|
|
136
|
+
} else if (op === VOICE_WS_OPCODES.Heartbeat) {
|
|
137
|
+
}
|
|
138
|
+
};
|
|
139
|
+
const cleanup = () => {
|
|
140
|
+
if (this.voiceWs) {
|
|
141
|
+
this.voiceWs.removeAllListeners();
|
|
142
|
+
}
|
|
143
|
+
};
|
|
144
|
+
const ws = this.voiceWs;
|
|
145
|
+
ws.on("open", onOpen);
|
|
146
|
+
ws.on("error", onError);
|
|
147
|
+
ws.on("message", (data) => onMessage(data));
|
|
148
|
+
ws.once("close", () => {
|
|
149
|
+
cleanup();
|
|
150
|
+
if (!this._destroyed) reject(new Error("Voice WebSocket closed"));
|
|
151
|
+
});
|
|
152
|
+
});
|
|
153
|
+
}
|
|
154
|
+
async getWebSocketConstructor() {
|
|
155
|
+
try {
|
|
156
|
+
const ws = await import("ws");
|
|
157
|
+
return ws.default;
|
|
158
|
+
} catch {
|
|
159
|
+
throw new Error('Install "ws" for voice support: pnpm add ws');
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
sendVoiceOp(op, d) {
|
|
163
|
+
if (!this.voiceWs || this.voiceWs.readyState !== 1) return;
|
|
164
|
+
this.voiceWs.send(JSON.stringify({ op, d }));
|
|
165
|
+
}
|
|
166
|
+
setupUDP(remoteAddress, remotePort, onReady) {
|
|
167
|
+
const socket = dgram.createSocket("udp4");
|
|
168
|
+
this.udpSocket = socket;
|
|
169
|
+
const discovery = Buffer.alloc(70);
|
|
170
|
+
discovery.writeUInt32BE(1, 0);
|
|
171
|
+
discovery.writeUInt16BE(70, 4);
|
|
172
|
+
discovery.writeUInt32BE(this.ssrc, 6);
|
|
173
|
+
socket.send(discovery, 0, discovery.length, remotePort, remoteAddress, () => {
|
|
174
|
+
socket.once("message", (msg) => {
|
|
175
|
+
const len = msg.readUInt16BE(4);
|
|
176
|
+
let ourIp = "";
|
|
177
|
+
let i = 10;
|
|
178
|
+
while (i < Math.min(70, len + 8) && msg[i] !== 0) {
|
|
179
|
+
ourIp += String.fromCharCode(msg[i]);
|
|
180
|
+
i++;
|
|
181
|
+
}
|
|
182
|
+
const ourPort = msg.readUInt16BE(68);
|
|
183
|
+
this.sendVoiceOp(VOICE_WS_OPCODES.SelectProtocol, {
|
|
184
|
+
protocol: "udp",
|
|
185
|
+
data: {
|
|
186
|
+
address: ourIp,
|
|
187
|
+
port: ourPort,
|
|
188
|
+
mode: "xsalsa20_poly1305"
|
|
189
|
+
}
|
|
190
|
+
});
|
|
191
|
+
onReady();
|
|
192
|
+
});
|
|
193
|
+
});
|
|
194
|
+
}
|
|
195
|
+
/**
|
|
196
|
+
* Play a stream of raw Opus packets
|
|
197
|
+
* Uses the same queue and 20ms pacing as play(). Use this for local files (MP3 → PCM → Opus) or other Opus sources.
|
|
198
|
+
*/
|
|
199
|
+
playOpus(stream) {
|
|
200
|
+
this.stop();
|
|
201
|
+
this._playing = true;
|
|
202
|
+
this.currentStream = stream;
|
|
203
|
+
this.audioPacketQueue = [];
|
|
204
|
+
this.sendVoiceOp(VOICE_WS_OPCODES.Speaking, { speaking: 1, delay: 0 });
|
|
205
|
+
const stopPacing = () => {
|
|
206
|
+
if (this.pacingInterval) {
|
|
207
|
+
clearInterval(this.pacingInterval);
|
|
208
|
+
this.pacingInterval = null;
|
|
209
|
+
}
|
|
210
|
+
};
|
|
211
|
+
this.pacingInterval = setInterval(() => {
|
|
212
|
+
const packet = this.audioPacketQueue.shift();
|
|
213
|
+
if (packet && this.secretKey && this.udpSocket) this.sendAudioFrame(packet);
|
|
214
|
+
if (this.audioPacketQueue.length === 0 && !this._playing) stopPacing();
|
|
215
|
+
}, AUDIO_FRAME_INTERVAL_MS);
|
|
216
|
+
stream.on("data", (chunk) => {
|
|
217
|
+
if (!this._playing) return;
|
|
218
|
+
if (Buffer.isBuffer(chunk) && chunk.length > 0) this.audioPacketQueue.push(chunk);
|
|
219
|
+
});
|
|
220
|
+
stream.on("error", (err) => {
|
|
221
|
+
this._playing = false;
|
|
222
|
+
this.currentStream = null;
|
|
223
|
+
stopPacing();
|
|
224
|
+
this.emit("error", err);
|
|
225
|
+
});
|
|
226
|
+
stream.on("end", () => {
|
|
227
|
+
this._playing = false;
|
|
228
|
+
this.currentStream = null;
|
|
229
|
+
if (this.audioPacketQueue.length === 0) stopPacing();
|
|
230
|
+
});
|
|
231
|
+
}
|
|
232
|
+
/**
|
|
233
|
+
* Play a direct WebM/Opus URL or stream. Fetches the URL (if string), demuxes with prism-media WebmDemuxer,
|
|
234
|
+
* and sends Opus packets to the voice connection. No FFmpeg or encoding; input must be WebM with Opus.
|
|
235
|
+
*/
|
|
236
|
+
async play(urlOrStream) {
|
|
237
|
+
this.stop();
|
|
238
|
+
const { opus: prismOpus } = await import("prism-media");
|
|
239
|
+
const { Readable } = await import("stream");
|
|
240
|
+
let inputStream;
|
|
241
|
+
if (typeof urlOrStream === "string") {
|
|
242
|
+
try {
|
|
243
|
+
const response = await fetch(urlOrStream);
|
|
244
|
+
if (!response.ok) throw new Error(`HTTP ${response.status}`);
|
|
245
|
+
if (!response.body) throw new Error("No response body");
|
|
246
|
+
inputStream = Readable.fromWeb(response.body);
|
|
247
|
+
} catch (e) {
|
|
248
|
+
const err = e instanceof Error ? e : new Error(String(e));
|
|
249
|
+
this.emit("error", err);
|
|
250
|
+
return;
|
|
251
|
+
}
|
|
252
|
+
} else {
|
|
253
|
+
inputStream = urlOrStream;
|
|
254
|
+
}
|
|
255
|
+
const demuxer = new prismOpus.WebmDemuxer();
|
|
256
|
+
inputStream.pipe(demuxer);
|
|
257
|
+
this._playing = true;
|
|
258
|
+
this.currentStream = demuxer;
|
|
259
|
+
this.audioPacketQueue = [];
|
|
260
|
+
this.sendVoiceOp(VOICE_WS_OPCODES.Speaking, { speaking: 1, delay: 0 });
|
|
261
|
+
const stopPacing = () => {
|
|
262
|
+
if (this.pacingInterval) {
|
|
263
|
+
clearInterval(this.pacingInterval);
|
|
264
|
+
this.pacingInterval = null;
|
|
265
|
+
}
|
|
266
|
+
};
|
|
267
|
+
this.pacingInterval = setInterval(() => {
|
|
268
|
+
const packet = this.audioPacketQueue.shift();
|
|
269
|
+
if (packet && this.secretKey && this.udpSocket) this.sendAudioFrame(packet);
|
|
270
|
+
if (this.audioPacketQueue.length === 0 && !this._playing) stopPacing();
|
|
271
|
+
}, AUDIO_FRAME_INTERVAL_MS);
|
|
272
|
+
demuxer.on("data", (chunk) => {
|
|
273
|
+
if (!this._playing) return;
|
|
274
|
+
if (Buffer.isBuffer(chunk) && chunk.length > 0) this.audioPacketQueue.push(chunk);
|
|
275
|
+
});
|
|
276
|
+
demuxer.on("error", (err) => {
|
|
277
|
+
this._playing = false;
|
|
278
|
+
this.currentStream = null;
|
|
279
|
+
stopPacing();
|
|
280
|
+
this.emit("error", err);
|
|
281
|
+
});
|
|
282
|
+
demuxer.on("end", () => {
|
|
283
|
+
this._playing = false;
|
|
284
|
+
this.currentStream = null;
|
|
285
|
+
if (this.audioPacketQueue.length === 0) stopPacing();
|
|
286
|
+
});
|
|
287
|
+
}
|
|
288
|
+
sendAudioFrame(opusPayload) {
|
|
289
|
+
if (!this.udpSocket || !this.secretKey) return;
|
|
290
|
+
const rtpHeader = Buffer.alloc(12);
|
|
291
|
+
rtpHeader[0] = 128;
|
|
292
|
+
rtpHeader[1] = 120;
|
|
293
|
+
rtpHeader.writeUInt16BE(this.sequence++, 2);
|
|
294
|
+
rtpHeader.writeUInt32BE(this.timestamp, 4);
|
|
295
|
+
rtpHeader.writeUInt32BE(this.ssrc, 8);
|
|
296
|
+
this.timestamp += OPUS_FRAME_TICKS;
|
|
297
|
+
const nonce = Buffer.alloc(24);
|
|
298
|
+
rtpHeader.copy(nonce, 0, 0, 12);
|
|
299
|
+
const encrypted = nacl.secretbox(opusPayload, new Uint8Array(nonce), this.secretKey);
|
|
300
|
+
const packet = Buffer.concat([rtpHeader, Buffer.from(encrypted)]);
|
|
301
|
+
if (this.remoteUdpPort && this.remoteUdpAddress && this.udpSocket) {
|
|
302
|
+
this.udpSocket.send(packet, 0, packet.length, this.remoteUdpPort, this.remoteUdpAddress);
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
stop() {
|
|
306
|
+
this._playing = false;
|
|
307
|
+
this.audioPacketQueue = [];
|
|
308
|
+
if (this.pacingInterval) {
|
|
309
|
+
clearInterval(this.pacingInterval);
|
|
310
|
+
this.pacingInterval = null;
|
|
311
|
+
}
|
|
312
|
+
if (this.currentStream) {
|
|
313
|
+
if (typeof this.currentStream.destroy === "function") this.currentStream.destroy();
|
|
314
|
+
this.currentStream = null;
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
disconnect() {
|
|
318
|
+
this._destroyed = true;
|
|
319
|
+
this.stop();
|
|
320
|
+
if (this.heartbeatInterval) {
|
|
321
|
+
clearInterval(this.heartbeatInterval);
|
|
322
|
+
this.heartbeatInterval = null;
|
|
323
|
+
}
|
|
324
|
+
if (this.voiceWs) {
|
|
325
|
+
this.voiceWs.close();
|
|
326
|
+
this.voiceWs = null;
|
|
327
|
+
}
|
|
328
|
+
if (this.udpSocket) {
|
|
329
|
+
this.udpSocket.close();
|
|
330
|
+
this.udpSocket = null;
|
|
331
|
+
}
|
|
332
|
+
this.emit("disconnect");
|
|
333
|
+
}
|
|
334
|
+
destroy() {
|
|
335
|
+
this.disconnect();
|
|
336
|
+
this.removeAllListeners();
|
|
337
|
+
}
|
|
338
|
+
};
|
|
339
|
+
|
|
340
|
+
// src/LiveKitRtcConnection.ts
|
|
341
|
+
import { EventEmitter as EventEmitter2 } from "events";
|
|
342
|
+
import {
|
|
343
|
+
Room,
|
|
344
|
+
RoomEvent,
|
|
345
|
+
AudioSource,
|
|
346
|
+
AudioFrame,
|
|
347
|
+
LocalAudioTrack,
|
|
348
|
+
TrackPublishOptions,
|
|
349
|
+
TrackSource
|
|
350
|
+
} from "@livekit/rtc-node";
|
|
351
|
+
|
|
352
|
+
// src/livekit.ts
|
|
353
|
+
function isLiveKitEndpoint(endpoint, token) {
|
|
354
|
+
if (!endpoint || typeof endpoint !== "string") return false;
|
|
355
|
+
const s = endpoint.trim();
|
|
356
|
+
if (s.includes("access_token=") || s.includes("/rtc") && s.includes("?"))
|
|
357
|
+
return true;
|
|
358
|
+
if (token && !s.includes("?")) return true;
|
|
359
|
+
return false;
|
|
360
|
+
}
|
|
361
|
+
function buildLiveKitUrlForRtcSdk(endpoint) {
|
|
362
|
+
const base = endpoint.replace(/^(wss|ws|https?):\/\//i, "").replace(/^\/+/, "").split("/")[0] ?? endpoint;
|
|
363
|
+
const scheme = /^wss?:\/\//i.test(endpoint) ? endpoint.startsWith("wss") ? "wss" : "ws" : "wss";
|
|
364
|
+
return `${scheme}://${base.replace(/\/+$/, "")}`;
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
// src/opusUtils.ts
|
|
368
|
+
function parseOpusPacketBoundaries(buffer) {
|
|
369
|
+
if (buffer.length < 1) return null;
|
|
370
|
+
const toc = buffer[0];
|
|
371
|
+
const c = toc & 3;
|
|
372
|
+
const tocSingle = toc & 252 | 0;
|
|
373
|
+
if (c === 0) {
|
|
374
|
+
return { frames: [buffer.slice()], consumed: buffer.length };
|
|
375
|
+
}
|
|
376
|
+
if (c === 1) {
|
|
377
|
+
if (buffer.length < 2) return null;
|
|
378
|
+
const L1 = buffer[1] + 1;
|
|
379
|
+
if (buffer.length < 2 + L1) return null;
|
|
380
|
+
const L2 = buffer.length - 2 - L1;
|
|
381
|
+
const frame0 = new Uint8Array(1 + L1);
|
|
382
|
+
frame0[0] = tocSingle;
|
|
383
|
+
frame0.set(buffer.subarray(2, 2 + L1), 1);
|
|
384
|
+
const frame1 = new Uint8Array(1 + L2);
|
|
385
|
+
frame1[0] = tocSingle;
|
|
386
|
+
frame1.set(buffer.subarray(2 + L1), 1);
|
|
387
|
+
return { frames: [frame0, frame1], consumed: buffer.length };
|
|
388
|
+
}
|
|
389
|
+
if (c === 2) {
|
|
390
|
+
if (buffer.length < 3) return null;
|
|
391
|
+
const frameLen = Math.floor((buffer.length - 2) / 2);
|
|
392
|
+
if (frameLen < 1) return null;
|
|
393
|
+
const frame0 = new Uint8Array(1 + frameLen);
|
|
394
|
+
frame0[0] = tocSingle;
|
|
395
|
+
frame0.set(buffer.subarray(2, 2 + frameLen), 1);
|
|
396
|
+
const frame1 = new Uint8Array(1 + frameLen);
|
|
397
|
+
frame1[0] = tocSingle;
|
|
398
|
+
frame1.set(buffer.subarray(2 + frameLen, 2 + 2 * frameLen), 1);
|
|
399
|
+
return { frames: [frame0, frame1], consumed: 2 + 2 * frameLen };
|
|
400
|
+
}
|
|
401
|
+
if (c === 3) {
|
|
402
|
+
if (buffer.length < 2) return null;
|
|
403
|
+
const N = buffer[1];
|
|
404
|
+
if (N < 1 || N > 255) return null;
|
|
405
|
+
const numLengthBytes = N - 1;
|
|
406
|
+
if (buffer.length < 2 + numLengthBytes) return null;
|
|
407
|
+
const lengths = [];
|
|
408
|
+
for (let i = 0; i < numLengthBytes; i++) {
|
|
409
|
+
lengths.push(buffer[2 + i] + 1);
|
|
410
|
+
}
|
|
411
|
+
const headerLen = 2 + numLengthBytes;
|
|
412
|
+
let offset = headerLen;
|
|
413
|
+
const sumKnown = lengths.reduce((a, b) => a + b, 0);
|
|
414
|
+
const lastLen = buffer.length - headerLen - sumKnown;
|
|
415
|
+
if (lastLen < 0) return null;
|
|
416
|
+
lengths.push(lastLen);
|
|
417
|
+
const frames = [];
|
|
418
|
+
for (let i = 0; i < lengths.length; i++) {
|
|
419
|
+
const L = lengths[i];
|
|
420
|
+
if (offset + L > buffer.length) return null;
|
|
421
|
+
const frame = new Uint8Array(1 + L);
|
|
422
|
+
frame[0] = tocSingle;
|
|
423
|
+
frame.set(buffer.subarray(offset, offset + L), 1);
|
|
424
|
+
frames.push(frame);
|
|
425
|
+
offset += L;
|
|
426
|
+
}
|
|
427
|
+
return { frames, consumed: offset };
|
|
428
|
+
}
|
|
429
|
+
return null;
|
|
430
|
+
}
|
|
431
|
+
function concatUint8Arrays(a, b) {
|
|
432
|
+
const out = new Uint8Array(a.length + b.length);
|
|
433
|
+
out.set(a);
|
|
434
|
+
out.set(b, a.length);
|
|
435
|
+
return out;
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
// src/LiveKitRtcConnection.ts
|
|
439
|
+
var SAMPLE_RATE = 48e3;
|
|
440
|
+
var CHANNELS2 = 1;
|
|
441
|
+
var FRAME_SAMPLES = 480;
|
|
442
|
+
var VOICE_DEBUG = process.env.VOICE_DEBUG === "1" || process.env.VOICE_DEBUG === "true";
|
|
443
|
+
var LiveKitRtcConnection = class extends EventEmitter2 {
|
|
444
|
+
client;
|
|
445
|
+
channel;
|
|
446
|
+
guildId;
|
|
447
|
+
_playing = false;
|
|
448
|
+
_destroyed = false;
|
|
449
|
+
room = null;
|
|
450
|
+
audioSource = null;
|
|
451
|
+
audioTrack = null;
|
|
452
|
+
currentStream = null;
|
|
453
|
+
lastServerEndpoint = null;
|
|
454
|
+
lastServerToken = null;
|
|
455
|
+
_disconnectEmitted = false;
|
|
456
|
+
constructor(client, channel, _userId) {
|
|
457
|
+
super();
|
|
458
|
+
this.client = client;
|
|
459
|
+
this.channel = channel;
|
|
460
|
+
this.guildId = channel.guildId;
|
|
461
|
+
}
|
|
462
|
+
get playing() {
|
|
463
|
+
return this._playing;
|
|
464
|
+
}
|
|
465
|
+
debug(msg, data) {
|
|
466
|
+
console.error("[voice LiveKitRtc]", msg, data ?? "");
|
|
467
|
+
}
|
|
468
|
+
audioDebug(msg, data) {
|
|
469
|
+
if (VOICE_DEBUG) {
|
|
470
|
+
console.error("[voice LiveKitRtc audio]", msg, data ?? "");
|
|
471
|
+
}
|
|
472
|
+
}
|
|
473
|
+
emitDisconnect(source) {
|
|
474
|
+
if (this._disconnectEmitted) return;
|
|
475
|
+
this._disconnectEmitted = true;
|
|
476
|
+
this.debug("emitting disconnect", { source });
|
|
477
|
+
this.emit("disconnect");
|
|
478
|
+
}
|
|
479
|
+
/** Returns true if the LiveKit room is connected and not destroyed. */
|
|
480
|
+
isConnected() {
|
|
481
|
+
return !this._destroyed && this.room != null && this.room.isConnected;
|
|
482
|
+
}
|
|
483
|
+
/** Returns true if we're already connected to the given server (skip migration). */
|
|
484
|
+
isSameServer(endpoint, token) {
|
|
485
|
+
const ep = (endpoint ?? "").trim();
|
|
486
|
+
return ep === (this.lastServerEndpoint ?? "") && token === (this.lastServerToken ?? "");
|
|
487
|
+
}
|
|
488
|
+
playOpus(_stream) {
|
|
489
|
+
this.emit("error", new Error("LiveKit: playOpus not supported; use play(url) with a WebM/Opus URL"));
|
|
490
|
+
}
|
|
491
|
+
async connect(server, _state) {
|
|
492
|
+
const raw = (server.endpoint ?? "").trim();
|
|
493
|
+
const token = server.token;
|
|
494
|
+
if (!raw || !token) {
|
|
495
|
+
this.emit("error", new Error("Missing voice server endpoint or token"));
|
|
496
|
+
return;
|
|
497
|
+
}
|
|
498
|
+
const url = buildLiveKitUrlForRtcSdk(raw);
|
|
499
|
+
this._disconnectEmitted = false;
|
|
500
|
+
try {
|
|
501
|
+
const room = new Room();
|
|
502
|
+
this.room = room;
|
|
503
|
+
room.on(RoomEvent.Disconnected, () => {
|
|
504
|
+
this.debug("Room disconnected");
|
|
505
|
+
this.lastServerEndpoint = null;
|
|
506
|
+
this.lastServerToken = null;
|
|
507
|
+
setImmediate(() => this.emit("serverLeave"));
|
|
508
|
+
this.emitDisconnect("room_disconnected");
|
|
509
|
+
});
|
|
510
|
+
room.on(RoomEvent.Reconnecting, () => {
|
|
511
|
+
this.debug("Room reconnecting");
|
|
512
|
+
});
|
|
513
|
+
room.on(RoomEvent.Reconnected, () => {
|
|
514
|
+
this.debug("Room reconnected");
|
|
515
|
+
});
|
|
516
|
+
await room.connect(url, token, { autoSubscribe: false, dynacast: false });
|
|
517
|
+
this.lastServerEndpoint = raw;
|
|
518
|
+
this.lastServerToken = token;
|
|
519
|
+
this.debug("connected to room");
|
|
520
|
+
this.emit("ready");
|
|
521
|
+
} catch (e) {
|
|
522
|
+
this.room = null;
|
|
523
|
+
const err = e instanceof Error ? e : new Error(String(e));
|
|
524
|
+
this.emit("error", err);
|
|
525
|
+
throw err;
|
|
526
|
+
}
|
|
527
|
+
}
|
|
528
|
+
async play(urlOrStream) {
|
|
529
|
+
this.stop();
|
|
530
|
+
if (!this.room || !this.room.isConnected) {
|
|
531
|
+
this.emit("error", new Error("LiveKit: not connected"));
|
|
532
|
+
return;
|
|
533
|
+
}
|
|
534
|
+
const { opus: prismOpus } = await import("prism-media");
|
|
535
|
+
const { Readable } = await import("stream");
|
|
536
|
+
const { OpusDecoder } = await import("opus-decoder");
|
|
537
|
+
let inputStream;
|
|
538
|
+
if (typeof urlOrStream === "string") {
|
|
539
|
+
try {
|
|
540
|
+
const response = await fetch(urlOrStream);
|
|
541
|
+
if (!response.ok) throw new Error(`HTTP ${response.status}`);
|
|
542
|
+
if (!response.body) throw new Error("No response body");
|
|
543
|
+
inputStream = Readable.fromWeb(response.body);
|
|
544
|
+
} catch (e) {
|
|
545
|
+
this.emit("error", e instanceof Error ? e : new Error(String(e)));
|
|
546
|
+
return;
|
|
547
|
+
}
|
|
548
|
+
} else {
|
|
549
|
+
inputStream = urlOrStream;
|
|
550
|
+
}
|
|
551
|
+
const source = new AudioSource(SAMPLE_RATE, CHANNELS2);
|
|
552
|
+
this.audioSource = source;
|
|
553
|
+
const track = LocalAudioTrack.createAudioTrack("audio", source);
|
|
554
|
+
this.audioTrack = track;
|
|
555
|
+
const options = new TrackPublishOptions();
|
|
556
|
+
options.source = TrackSource.SOURCE_MICROPHONE;
|
|
557
|
+
await this.room.localParticipant.publishTrack(track, options);
|
|
558
|
+
const demuxer = new prismOpus.WebmDemuxer();
|
|
559
|
+
inputStream.pipe(demuxer);
|
|
560
|
+
this.currentStream = demuxer;
|
|
561
|
+
const decoder = new OpusDecoder({ sampleRate: SAMPLE_RATE, channels: CHANNELS2 });
|
|
562
|
+
await decoder.ready;
|
|
563
|
+
this._playing = true;
|
|
564
|
+
function floatToInt16(float32) {
|
|
565
|
+
const int16 = new Int16Array(float32.length);
|
|
566
|
+
for (let i = 0; i < float32.length; i++) {
|
|
567
|
+
let s = float32[i];
|
|
568
|
+
if (!Number.isFinite(s)) {
|
|
569
|
+
int16[i] = 0;
|
|
570
|
+
continue;
|
|
571
|
+
}
|
|
572
|
+
s = Math.max(-1, Math.min(1, s));
|
|
573
|
+
const scale = s < 0 ? 32768 : 32767;
|
|
574
|
+
const dither = (Math.random() + Math.random() - 1) * 0.5;
|
|
575
|
+
const scaled = Math.round(s * scale + dither);
|
|
576
|
+
int16[i] = Math.max(-32768, Math.min(32767, scaled));
|
|
577
|
+
}
|
|
578
|
+
return int16;
|
|
579
|
+
}
|
|
580
|
+
let sampleBuffer = new Int16Array(0);
|
|
581
|
+
let opusBuffer = new Uint8Array(0);
|
|
582
|
+
let streamEnded = false;
|
|
583
|
+
let framesCaptured = 0;
|
|
584
|
+
const processOneOpusFrame = async (frame) => {
|
|
585
|
+
if (frame.length < 2) return;
|
|
586
|
+
try {
|
|
587
|
+
const result = decoder.decodeFrame(frame);
|
|
588
|
+
if (!result?.channelData?.[0]?.length) return;
|
|
589
|
+
const int16 = floatToInt16(result.channelData[0]);
|
|
590
|
+
const newBuffer = new Int16Array(sampleBuffer.length + int16.length);
|
|
591
|
+
newBuffer.set(sampleBuffer);
|
|
592
|
+
newBuffer.set(int16, sampleBuffer.length);
|
|
593
|
+
sampleBuffer = newBuffer;
|
|
594
|
+
while (sampleBuffer.length >= FRAME_SAMPLES && this._playing && source) {
|
|
595
|
+
const outSamples = sampleBuffer.subarray(0, FRAME_SAMPLES);
|
|
596
|
+
sampleBuffer = sampleBuffer.subarray(FRAME_SAMPLES).slice();
|
|
597
|
+
const audioFrame = new AudioFrame(outSamples, SAMPLE_RATE, CHANNELS2, FRAME_SAMPLES);
|
|
598
|
+
if (source.queuedDuration > 500) {
|
|
599
|
+
await source.waitForPlayout();
|
|
600
|
+
}
|
|
601
|
+
await source.captureFrame(audioFrame);
|
|
602
|
+
framesCaptured++;
|
|
603
|
+
}
|
|
604
|
+
} catch (err) {
|
|
605
|
+
if (VOICE_DEBUG) this.audioDebug("decode error", { error: String(err) });
|
|
606
|
+
}
|
|
607
|
+
};
|
|
608
|
+
let firstChunk = true;
|
|
609
|
+
let processing = false;
|
|
610
|
+
const opusFrameQueue = [];
|
|
611
|
+
const drainOpusQueue = async () => {
|
|
612
|
+
if (processing || opusFrameQueue.length === 0) return;
|
|
613
|
+
processing = true;
|
|
614
|
+
while (opusFrameQueue.length > 0 && this._playing && source) {
|
|
615
|
+
const frame = opusFrameQueue.shift();
|
|
616
|
+
await processOneOpusFrame(frame);
|
|
617
|
+
}
|
|
618
|
+
processing = false;
|
|
619
|
+
};
|
|
620
|
+
demuxer.on("data", (chunk) => {
|
|
621
|
+
if (!this._playing) return;
|
|
622
|
+
if (firstChunk) {
|
|
623
|
+
this.audioDebug("first audio chunk received", { size: chunk.length });
|
|
624
|
+
firstChunk = false;
|
|
625
|
+
}
|
|
626
|
+
opusBuffer = concatUint8Arrays(opusBuffer, new Uint8Array(chunk));
|
|
627
|
+
while (opusBuffer.length > 0) {
|
|
628
|
+
const parsed = parseOpusPacketBoundaries(opusBuffer);
|
|
629
|
+
if (!parsed) break;
|
|
630
|
+
opusBuffer = opusBuffer.slice(parsed.consumed);
|
|
631
|
+
for (const frame of parsed.frames) {
|
|
632
|
+
opusFrameQueue.push(frame);
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
drainOpusQueue().catch((e) => this.audioDebug("drainOpusQueue error", { error: String(e) }));
|
|
636
|
+
});
|
|
637
|
+
demuxer.on("error", (err) => {
|
|
638
|
+
this.audioDebug("demuxer error", { error: err.message });
|
|
639
|
+
this._playing = false;
|
|
640
|
+
this.currentStream = null;
|
|
641
|
+
this.emit("error", err);
|
|
642
|
+
});
|
|
643
|
+
demuxer.on("end", async () => {
|
|
644
|
+
streamEnded = true;
|
|
645
|
+
this.audioDebug("stream ended", { framesCaptured });
|
|
646
|
+
while (processing || opusFrameQueue.length > 0) {
|
|
647
|
+
await drainOpusQueue();
|
|
648
|
+
await new Promise((r) => setImmediate(r));
|
|
649
|
+
}
|
|
650
|
+
while (sampleBuffer.length >= FRAME_SAMPLES && this._playing && source) {
|
|
651
|
+
const outSamples = sampleBuffer.subarray(0, FRAME_SAMPLES);
|
|
652
|
+
sampleBuffer = sampleBuffer.subarray(FRAME_SAMPLES).slice();
|
|
653
|
+
const audioFrame = new AudioFrame(outSamples, SAMPLE_RATE, CHANNELS2, FRAME_SAMPLES);
|
|
654
|
+
await source.captureFrame(audioFrame);
|
|
655
|
+
framesCaptured++;
|
|
656
|
+
}
|
|
657
|
+
if (sampleBuffer.length > 0 && this._playing && source) {
|
|
658
|
+
const padded = new Int16Array(FRAME_SAMPLES);
|
|
659
|
+
padded.set(sampleBuffer);
|
|
660
|
+
const audioFrame = new AudioFrame(padded, SAMPLE_RATE, CHANNELS2, FRAME_SAMPLES);
|
|
661
|
+
await source.captureFrame(audioFrame);
|
|
662
|
+
framesCaptured++;
|
|
663
|
+
}
|
|
664
|
+
this.audioDebug("playback complete", { framesCaptured });
|
|
665
|
+
this._playing = false;
|
|
666
|
+
this.currentStream = null;
|
|
667
|
+
if (this.audioTrack) {
|
|
668
|
+
await this.audioTrack.close();
|
|
669
|
+
this.audioTrack = null;
|
|
670
|
+
}
|
|
671
|
+
if (this.audioSource) {
|
|
672
|
+
await this.audioSource.close();
|
|
673
|
+
this.audioSource = null;
|
|
674
|
+
}
|
|
675
|
+
});
|
|
676
|
+
}
|
|
677
|
+
stop() {
|
|
678
|
+
this._playing = false;
|
|
679
|
+
if (this.currentStream?.destroy) this.currentStream.destroy();
|
|
680
|
+
this.currentStream = null;
|
|
681
|
+
if (this.audioTrack) {
|
|
682
|
+
this.audioTrack.close().catch(() => {
|
|
683
|
+
});
|
|
684
|
+
this.audioTrack = null;
|
|
685
|
+
}
|
|
686
|
+
if (this.audioSource) {
|
|
687
|
+
this.audioSource.close().catch(() => {
|
|
688
|
+
});
|
|
689
|
+
this.audioSource = null;
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
disconnect() {
|
|
693
|
+
this._destroyed = true;
|
|
694
|
+
this.stop();
|
|
695
|
+
if (this.room) {
|
|
696
|
+
this.room.disconnect().catch(() => {
|
|
697
|
+
});
|
|
698
|
+
this.room = null;
|
|
699
|
+
}
|
|
700
|
+
this.lastServerEndpoint = null;
|
|
701
|
+
this.lastServerToken = null;
|
|
702
|
+
this.emit("disconnect");
|
|
703
|
+
}
|
|
704
|
+
destroy() {
|
|
705
|
+
this.disconnect();
|
|
706
|
+
this.removeAllListeners();
|
|
707
|
+
}
|
|
708
|
+
};
|
|
709
|
+
|
|
710
|
+
// src/VoiceManager.ts
|
|
711
|
+
import { Collection } from "@fluxerjs/collection";
|
|
712
|
+
var VoiceManager = class extends EventEmitter3 {
|
|
713
|
+
client;
|
|
714
|
+
connections = new Collection();
|
|
715
|
+
/** guild_id -> user_id -> channel_id */
|
|
716
|
+
voiceStates = /* @__PURE__ */ new Map();
|
|
717
|
+
pending = /* @__PURE__ */ new Map();
|
|
718
|
+
shardId;
|
|
719
|
+
constructor(client, options = {}) {
|
|
720
|
+
super();
|
|
721
|
+
this.client = client;
|
|
722
|
+
this.shardId = options.shardId ?? 0;
|
|
723
|
+
this.client.on(Events.VoiceStateUpdate, (data) => this.handleVoiceStateUpdate(data));
|
|
724
|
+
this.client.on(Events.VoiceServerUpdate, (data) => this.handleVoiceServerUpdate(data));
|
|
725
|
+
this.client.on(Events.VoiceStatesSync, (data) => this.handleVoiceStatesSync(data));
|
|
726
|
+
}
|
|
727
|
+
handleVoiceStatesSync(data) {
|
|
728
|
+
let guildMap = this.voiceStates.get(data.guildId);
|
|
729
|
+
if (!guildMap) {
|
|
730
|
+
guildMap = /* @__PURE__ */ new Map();
|
|
731
|
+
this.voiceStates.set(data.guildId, guildMap);
|
|
732
|
+
}
|
|
733
|
+
for (const vs of data.voiceStates) {
|
|
734
|
+
guildMap.set(vs.user_id, vs.channel_id);
|
|
735
|
+
}
|
|
736
|
+
}
|
|
737
|
+
/** Get the voice channel ID the user is in, or null. */
|
|
738
|
+
getVoiceChannelId(guildId, userId) {
|
|
739
|
+
const guildMap = this.voiceStates.get(guildId);
|
|
740
|
+
if (!guildMap) return null;
|
|
741
|
+
return guildMap.get(userId) ?? null;
|
|
742
|
+
}
|
|
743
|
+
handleVoiceStateUpdate(data) {
|
|
744
|
+
const guildId = data.guild_id ?? "";
|
|
745
|
+
if (!guildId) return;
|
|
746
|
+
let guildMap = this.voiceStates.get(guildId);
|
|
747
|
+
if (!guildMap) {
|
|
748
|
+
guildMap = /* @__PURE__ */ new Map();
|
|
749
|
+
this.voiceStates.set(guildId, guildMap);
|
|
750
|
+
}
|
|
751
|
+
guildMap.set(data.user_id, data.channel_id);
|
|
752
|
+
const pending = this.pending.get(guildId);
|
|
753
|
+
if (pending && data.user_id === this.client.user?.id) {
|
|
754
|
+
pending.state = data;
|
|
755
|
+
this.tryCompletePending(guildId);
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
handleVoiceServerUpdate(data) {
|
|
759
|
+
const guildId = data.guild_id;
|
|
760
|
+
const pending = this.pending.get(guildId);
|
|
761
|
+
if (pending) {
|
|
762
|
+
pending.server = data;
|
|
763
|
+
this.tryCompletePending(guildId);
|
|
764
|
+
return;
|
|
765
|
+
}
|
|
766
|
+
const conn = this.connections.get(guildId);
|
|
767
|
+
if (!conn) return;
|
|
768
|
+
if (!data.endpoint || !data.token) {
|
|
769
|
+
this.client.emit?.("debug", `[VoiceManager] Voice server endpoint null for guild ${guildId}; disconnecting until new allocation`);
|
|
770
|
+
conn.destroy();
|
|
771
|
+
this.connections.delete(guildId);
|
|
772
|
+
return;
|
|
773
|
+
}
|
|
774
|
+
if (!isLiveKitEndpoint(data.endpoint, data.token)) return;
|
|
775
|
+
if (conn instanceof LiveKitRtcConnection && conn.isSameServer(data.endpoint, data.token)) {
|
|
776
|
+
return;
|
|
777
|
+
}
|
|
778
|
+
const channel = conn.channel;
|
|
779
|
+
this.client.emit?.("debug", `[VoiceManager] Voice server migration for guild ${guildId}; reconnecting`);
|
|
780
|
+
conn.destroy();
|
|
781
|
+
this.connections.delete(guildId);
|
|
782
|
+
const ConnClass = LiveKitRtcConnection;
|
|
783
|
+
const newConn = new ConnClass(this.client, channel, this.client.user.id);
|
|
784
|
+
this.registerConnection(guildId, newConn);
|
|
785
|
+
const state = {
|
|
786
|
+
guild_id: guildId,
|
|
787
|
+
channel_id: channel.id,
|
|
788
|
+
user_id: this.client.user.id,
|
|
789
|
+
session_id: ""
|
|
790
|
+
};
|
|
791
|
+
newConn.connect(data, state).catch((e) => {
|
|
792
|
+
this.connections.delete(guildId);
|
|
793
|
+
newConn.emit("error", e instanceof Error ? e : new Error(String(e)));
|
|
794
|
+
});
|
|
795
|
+
}
|
|
796
|
+
registerConnection(guildId, conn) {
|
|
797
|
+
this.connections.set(guildId, conn);
|
|
798
|
+
conn.once("disconnect", () => this.connections.delete(guildId));
|
|
799
|
+
}
|
|
800
|
+
tryCompletePending(guildId) {
|
|
801
|
+
const pending = this.pending.get(guildId);
|
|
802
|
+
if (!pending?.server || !pending.state) return;
|
|
803
|
+
this.pending.delete(guildId);
|
|
804
|
+
const ConnClass = isLiveKitEndpoint(pending.server.endpoint, pending.server.token) ? LiveKitRtcConnection : VoiceConnection;
|
|
805
|
+
const conn = new ConnClass(this.client, pending.channel, this.client.user.id);
|
|
806
|
+
this.registerConnection(guildId, conn);
|
|
807
|
+
conn.connect(pending.server, pending.state).then(
|
|
808
|
+
() => pending.resolve(conn),
|
|
809
|
+
(e) => pending.reject(e)
|
|
810
|
+
);
|
|
811
|
+
}
|
|
812
|
+
/** Join a voice channel. Resolves when the connection is ready. */
|
|
813
|
+
async join(channel) {
|
|
814
|
+
const existing = this.connections.get(channel.guildId);
|
|
815
|
+
if (existing) {
|
|
816
|
+
const isReusable = existing.channel.id === channel.id && (existing instanceof LiveKitRtcConnection ? existing.isConnected() : true);
|
|
817
|
+
if (isReusable) return existing;
|
|
818
|
+
existing.destroy();
|
|
819
|
+
this.connections.delete(channel.guildId);
|
|
820
|
+
}
|
|
821
|
+
return new Promise((resolve, reject) => {
|
|
822
|
+
const timeout = setTimeout(() => {
|
|
823
|
+
if (this.pending.has(channel.guildId)) {
|
|
824
|
+
this.pending.delete(channel.guildId);
|
|
825
|
+
reject(new Error("Voice connection timeout"));
|
|
826
|
+
}
|
|
827
|
+
}, 15e3);
|
|
828
|
+
this.pending.set(channel.guildId, {
|
|
829
|
+
channel,
|
|
830
|
+
resolve: (c) => {
|
|
831
|
+
clearTimeout(timeout);
|
|
832
|
+
resolve(c);
|
|
833
|
+
},
|
|
834
|
+
reject: (e) => {
|
|
835
|
+
clearTimeout(timeout);
|
|
836
|
+
reject(e);
|
|
837
|
+
}
|
|
838
|
+
});
|
|
839
|
+
this.client.sendToGateway(this.shardId, {
|
|
840
|
+
op: GatewayOpcodes.VoiceStateUpdate,
|
|
841
|
+
d: {
|
|
842
|
+
guild_id: channel.guildId,
|
|
843
|
+
channel_id: channel.id,
|
|
844
|
+
self_mute: false,
|
|
845
|
+
self_deaf: false
|
|
846
|
+
}
|
|
847
|
+
});
|
|
848
|
+
});
|
|
849
|
+
}
|
|
850
|
+
/** Leave a guild's voice channel. */
|
|
851
|
+
leave(guildId) {
|
|
852
|
+
const conn = this.connections.get(guildId);
|
|
853
|
+
if (conn) {
|
|
854
|
+
conn.destroy();
|
|
855
|
+
this.connections.delete(guildId);
|
|
856
|
+
}
|
|
857
|
+
this.client.sendToGateway(this.shardId, {
|
|
858
|
+
op: GatewayOpcodes.VoiceStateUpdate,
|
|
859
|
+
d: {
|
|
860
|
+
guild_id: guildId,
|
|
861
|
+
channel_id: null,
|
|
862
|
+
self_mute: false,
|
|
863
|
+
self_deaf: false
|
|
864
|
+
}
|
|
865
|
+
});
|
|
866
|
+
}
|
|
867
|
+
getConnection(guildId) {
|
|
868
|
+
return this.connections.get(guildId);
|
|
869
|
+
}
|
|
870
|
+
};
|
|
871
|
+
|
|
872
|
+
// src/index.ts
|
|
873
|
+
async function joinVoiceChannel(client, channel, options) {
|
|
874
|
+
const manager = getVoiceManager(client, options);
|
|
875
|
+
return manager.join(channel);
|
|
876
|
+
}
|
|
877
|
+
var voiceManagers = /* @__PURE__ */ new WeakMap();
|
|
878
|
+
function getVoiceManager(client, options) {
|
|
879
|
+
let manager = voiceManagers.get(client);
|
|
880
|
+
if (!manager) {
|
|
881
|
+
manager = new VoiceManager(client, options);
|
|
882
|
+
voiceManagers.set(client, manager);
|
|
883
|
+
}
|
|
884
|
+
return manager;
|
|
885
|
+
}
|
|
886
|
+
export {
|
|
887
|
+
LiveKitRtcConnection,
|
|
888
|
+
VoiceConnection,
|
|
889
|
+
VoiceManager,
|
|
890
|
+
getVoiceManager,
|
|
891
|
+
joinVoiceChannel
|
|
892
|
+
};
|