@fluxerjs/voice 1.0.7 → 1.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -191,6 +191,7 @@ declare class LiveKitRtcConnection extends EventEmitter {
191
191
  * Stop video playback and unpublish the video track from the LiveKit room.
192
192
  * Safe to call even when no video is playing.
193
193
  */
194
+ private _videoCleaning;
194
195
  stopVideo(): void;
195
196
  /** Stop playback and clear both audio and video tracks. */
196
197
  stop(): void;
package/dist/index.d.ts CHANGED
@@ -191,6 +191,7 @@ declare class LiveKitRtcConnection extends EventEmitter {
191
191
  * Stop video playback and unpublish the video track from the LiveKit room.
192
192
  * Safe to call even when no video is playing.
193
193
  */
194
+ private _videoCleaning;
194
195
  stopVideo(): void;
195
196
  /** Stop playback and clear both audio and video tracks. */
196
197
  stop(): void;
package/dist/index.js CHANGED
@@ -51,7 +51,14 @@ var thumbnail = MINIMAL_PNG_BASE64;
51
51
  var import_events = require("events");
52
52
  var nacl = __toESM(require("tweetnacl"));
53
53
  var dgram = __toESM(require("dgram"));
54
- var VOICE_WS_OPCODES = { Identify: 0, SelectProtocol: 1, Ready: 2, Heartbeat: 3, SessionDescription: 4, Speaking: 5 };
54
+ var VOICE_WS_OPCODES = {
55
+ Identify: 0,
56
+ SelectProtocol: 1,
57
+ Ready: 2,
58
+ Heartbeat: 3,
59
+ SessionDescription: 4,
60
+ Speaking: 5
61
+ };
55
62
  var VOICE_VERSION = 4;
56
63
  var CHANNELS = 2;
57
64
  var OPUS_FRAME_TICKS = 960 * (CHANNELS === 2 ? 2 : 1);
@@ -218,6 +225,10 @@ var VoiceConnection = class extends import_events.EventEmitter {
218
225
  discovery.writeUInt32BE(this.ssrc, 6);
219
226
  socket.send(discovery, 0, discovery.length, remotePort, remoteAddress, () => {
220
227
  socket.once("message", (msg) => {
228
+ if (msg.length < 70) {
229
+ this.emit("error", new Error("UDP discovery response too short"));
230
+ return;
231
+ }
221
232
  const len = msg.readUInt16BE(4);
222
233
  let ourIp = "";
223
234
  let i = 10;
@@ -381,6 +392,10 @@ var VoiceConnection = class extends import_events.EventEmitter {
381
392
  }
382
393
  /** Disconnect and remove all listeners. */
383
394
  destroy() {
395
+ if (this.currentStream) {
396
+ if (typeof this.currentStream.destroy === "function") this.currentStream.destroy();
397
+ this.currentStream = null;
398
+ }
384
399
  this.disconnect();
385
400
  this.removeAllListeners();
386
401
  }
@@ -395,8 +410,7 @@ var import_rtc_node = require("@livekit/rtc-node");
395
410
  function isLiveKitEndpoint(endpoint, token) {
396
411
  if (!endpoint || typeof endpoint !== "string") return false;
397
412
  const s = endpoint.trim();
398
- if (s.includes("access_token=") || s.includes("/rtc") && s.includes("?"))
399
- return true;
413
+ if (s.includes("access_token=") || s.includes("/rtc") && s.includes("?")) return true;
400
414
  if (token && !s.includes("?")) return true;
401
415
  return false;
402
416
  }
@@ -408,7 +422,7 @@ function buildLiveKitUrlForRtcSdk(endpoint) {
408
422
 
409
423
  // src/opusUtils.ts
410
424
  function parseOpusPacketBoundaries(buffer) {
411
- if (buffer.length < 1) return null;
425
+ if (buffer.length < 2) return null;
412
426
  const toc = buffer[0];
413
427
  const c = toc & 3;
414
428
  const tocSingle = toc & 252 | 0;
@@ -488,7 +502,8 @@ function getNaluByteLength(nalu) {
488
502
  }
489
503
  function toUint8Array(nalu) {
490
504
  if (nalu instanceof Uint8Array) return nalu;
491
- if (ArrayBuffer.isView(nalu)) return new Uint8Array(nalu.buffer, nalu.byteOffset, nalu.byteLength);
505
+ if (ArrayBuffer.isView(nalu))
506
+ return new Uint8Array(nalu.buffer, nalu.byteOffset, nalu.byteLength);
492
507
  if (nalu instanceof ArrayBuffer) return new Uint8Array(nalu);
493
508
  if (Array.isArray(nalu)) return new Uint8Array(nalu);
494
509
  return new Uint8Array(0);
@@ -618,7 +633,10 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
618
633
  return ep === (this.lastServerEndpoint ?? "") && token === (this.lastServerToken ?? "");
619
634
  }
620
635
  playOpus(_stream) {
621
- this.emit("error", new Error("LiveKit: playOpus not supported; use play(url) with a WebM/Opus URL"));
636
+ this.emit(
637
+ "error",
638
+ new Error("LiveKit: playOpus not supported; use play(url) with a WebM/Opus URL")
639
+ );
622
640
  }
623
641
  /**
624
642
  * Connect to the LiveKit room using voice server and state from the gateway.
@@ -714,7 +732,10 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
714
732
  return;
715
733
  }
716
734
  } else if (urlOrBuffer instanceof Uint8Array) {
717
- arrayBuffer = urlOrBuffer.buffer.slice(urlOrBuffer.byteOffset, urlOrBuffer.byteOffset + urlOrBuffer.byteLength);
735
+ arrayBuffer = urlOrBuffer.buffer.slice(
736
+ urlOrBuffer.byteOffset,
737
+ urlOrBuffer.byteOffset + urlOrBuffer.byteLength
738
+ );
718
739
  } else {
719
740
  arrayBuffer = urlOrBuffer;
720
741
  }
@@ -726,13 +747,19 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
726
747
  this.emit("error", e);
727
748
  };
728
749
  file.onReady = (info) => {
729
- const tracks = info.tracks ?? [];
750
+ if (!info.tracks?.length) {
751
+ this.emit("error", new Error("No tracks found in MP4 file"));
752
+ return;
753
+ }
754
+ const tracks = info.tracks;
730
755
  const videoTrack = tracks.find((t) => t.type === "video");
731
756
  if (!videoTrack) {
732
757
  this.emit("error", new Error("No video track in MP4"));
733
758
  return;
734
759
  }
735
- const audioTrackInfo = tracks.find((t) => t.type === "audio" && t.codec.startsWith("mp4a"));
760
+ const audioTrackInfo = tracks.find(
761
+ (t) => t.type === "audio" && t.codec.startsWith("mp4a")
762
+ );
736
763
  const width = videoTrack.video?.width ?? 640;
737
764
  const height = videoTrack.video?.height ?? 480;
738
765
  const totalSamples = videoTrack.nb_samples ?? Number.POSITIVE_INFINITY;
@@ -772,7 +799,11 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
772
799
  const { codedWidth, codedHeight } = frame;
773
800
  if (codedWidth <= 0 || codedHeight <= 0) {
774
801
  frame.close();
775
- if (VOICE_DEBUG) this.audioDebug("video frame skipped (invalid dimensions)", { codedWidth, codedHeight });
802
+ if (VOICE_DEBUG)
803
+ this.audioDebug("video frame skipped (invalid dimensions)", {
804
+ codedWidth,
805
+ codedHeight
806
+ });
776
807
  return;
777
808
  }
778
809
  try {
@@ -786,13 +817,22 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
786
817
  frame.close();
787
818
  const expectedI420Size = Math.ceil(codedWidth * codedHeight * 3 / 2);
788
819
  if (buffer.byteLength < expectedI420Size) {
789
- if (VOICE_DEBUG) this.audioDebug("video frame skipped (buffer too small)", { codedWidth, codedHeight });
820
+ if (VOICE_DEBUG)
821
+ this.audioDebug("video frame skipped (buffer too small)", {
822
+ codedWidth,
823
+ codedHeight
824
+ });
790
825
  return;
791
826
  }
792
827
  while (frameQueue.length >= MAX_QUEUED_FRAMES) {
793
828
  frameQueue.shift();
794
829
  }
795
- frameQueue.push({ buffer, width: codedWidth, height: codedHeight, timestampMs: frameTimeMs });
830
+ frameQueue.push({
831
+ buffer,
832
+ width: codedWidth,
833
+ height: codedHeight,
834
+ timestampMs: frameTimeMs
835
+ });
796
836
  } catch (err) {
797
837
  if (VOICE_DEBUG) this.audioDebug("video frame error", { error: String(err) });
798
838
  }
@@ -827,7 +867,7 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
827
867
  }
828
868
  try {
829
869
  decoder.close();
830
- } catch (_) {
870
+ } catch {
831
871
  }
832
872
  if (audioFfmpegProc && !audioFfmpegProc.killed) {
833
873
  audioFfmpegProc.kill("SIGKILL");
@@ -906,7 +946,10 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
906
946
  decoder.decode(chunk);
907
947
  }
908
948
  } catch (decodeErr) {
909
- this.emit("error", decodeErr instanceof Error ? decodeErr : new Error(String(decodeErr)));
949
+ this.emit(
950
+ "error",
951
+ decodeErr instanceof Error ? decodeErr : new Error(String(decodeErr))
952
+ );
910
953
  doCleanup();
911
954
  return;
912
955
  }
@@ -942,7 +985,10 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
942
985
  decoder.decode(chunk);
943
986
  }
944
987
  } catch (decodeErr) {
945
- this.emit("error", decodeErr instanceof Error ? decodeErr : new Error(String(decodeErr)));
988
+ this.emit(
989
+ "error",
990
+ decodeErr instanceof Error ? decodeErr : new Error(String(decodeErr))
991
+ );
946
992
  doCleanup();
947
993
  return;
948
994
  }
@@ -981,11 +1027,20 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
981
1027
  if (frameQueue.length > 0 && frameQueue[0].timestampMs <= elapsed) {
982
1028
  const f = frameQueue.shift();
983
1029
  try {
984
- const livekitFrame = new import_rtc_node.VideoFrame(f.buffer, f.width, f.height, import_rtc_node.VideoBufferType.I420);
1030
+ const livekitFrame = new import_rtc_node.VideoFrame(
1031
+ f.buffer,
1032
+ f.width,
1033
+ f.height,
1034
+ import_rtc_node.VideoBufferType.I420
1035
+ );
985
1036
  source.captureFrame(livekitFrame);
986
1037
  } catch (captureErr) {
987
- if (VOICE_DEBUG) this.audioDebug("captureFrame error", { error: String(captureErr) });
988
- this.emit("error", captureErr instanceof Error ? captureErr : new Error(String(captureErr)));
1038
+ if (VOICE_DEBUG)
1039
+ this.audioDebug("captureFrame error", { error: String(captureErr) });
1040
+ this.emit(
1041
+ "error",
1042
+ captureErr instanceof Error ? captureErr : new Error(String(captureErr))
1043
+ );
989
1044
  }
990
1045
  }
991
1046
  }, FRAME_INTERVAL_MS);
@@ -998,20 +1053,24 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
998
1053
  const { OpusDecoder } = await import("opus-decoder");
999
1054
  const runAudioFfmpeg = async () => {
1000
1055
  if (!this._playingVideo || cleanupCalled || !audioSource) return;
1001
- const audioProc = (0, import_node_child_process.spawn)("ffmpeg", [
1002
- "-loglevel",
1003
- "warning",
1004
- "-re",
1005
- "-i",
1006
- videoUrl,
1007
- "-vn",
1008
- "-c:a",
1009
- "libopus",
1010
- "-f",
1011
- "webm",
1012
- ...loop ? ["-stream_loop", "-1"] : [],
1013
- "pipe:1"
1014
- ], { stdio: ["ignore", "pipe", "pipe"] });
1056
+ const audioProc = (0, import_node_child_process.spawn)(
1057
+ "ffmpeg",
1058
+ [
1059
+ "-loglevel",
1060
+ "warning",
1061
+ "-re",
1062
+ "-i",
1063
+ videoUrl,
1064
+ "-vn",
1065
+ "-c:a",
1066
+ "libopus",
1067
+ "-f",
1068
+ "webm",
1069
+ ...loop ? ["-stream_loop", "-1"] : [],
1070
+ "pipe:1"
1071
+ ],
1072
+ { stdio: ["ignore", "pipe", "pipe"] }
1073
+ );
1015
1074
  audioFfmpegProc = audioProc;
1016
1075
  const demuxer = new prismOpus.WebmDemuxer();
1017
1076
  if (audioProc.stdout) audioProc.stdout.pipe(demuxer);
@@ -1034,11 +1093,16 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
1034
1093
  while (sampleBuffer.length >= FRAME_SAMPLES && this._playingVideo && audioSource) {
1035
1094
  const outSamples = sampleBuffer.subarray(0, FRAME_SAMPLES);
1036
1095
  sampleBuffer = sampleBuffer.subarray(FRAME_SAMPLES).slice();
1037
- const audioFrame = new import_rtc_node.AudioFrame(outSamples, SAMPLE_RATE, CHANNELS2, FRAME_SAMPLES);
1096
+ const audioFrame = new import_rtc_node.AudioFrame(
1097
+ outSamples,
1098
+ SAMPLE_RATE,
1099
+ CHANNELS2,
1100
+ FRAME_SAMPLES
1101
+ );
1038
1102
  if (audioSource.queuedDuration > 500) await audioSource.waitForPlayout();
1039
1103
  await audioSource.captureFrame(audioFrame);
1040
1104
  }
1041
- } catch (_) {
1105
+ } catch {
1042
1106
  }
1043
1107
  };
1044
1108
  const drainQueue = async () => {
@@ -1069,7 +1133,9 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
1069
1133
  }
1070
1134
  });
1071
1135
  };
1072
- runAudioFfmpeg().catch((e) => this.audioDebug("audio ffmpeg error", { error: String(e) }));
1136
+ runAudioFfmpeg().catch(
1137
+ (e) => this.audioDebug("audio ffmpeg error", { error: String(e) })
1138
+ );
1073
1139
  }
1074
1140
  this.emit("requestVoiceStateSync", {
1075
1141
  self_stream: sourceOption === "screenshare",
@@ -1097,17 +1163,21 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
1097
1163
  const { execFile } = await import("child_process");
1098
1164
  const { promisify } = await import("util");
1099
1165
  const exec = promisify(execFile);
1100
- const { stdout } = await exec("ffprobe", [
1101
- "-v",
1102
- "error",
1103
- "-select_streams",
1104
- "v:0",
1105
- "-show_entries",
1106
- "stream=width,height",
1107
- "-of",
1108
- "json",
1109
- url
1110
- ], { encoding: "utf8", timeout: 1e4 });
1166
+ const { stdout } = await exec(
1167
+ "ffprobe",
1168
+ [
1169
+ "-v",
1170
+ "error",
1171
+ "-select_streams",
1172
+ "v:0",
1173
+ "-show_entries",
1174
+ "stream=width,height",
1175
+ "-of",
1176
+ "json",
1177
+ url
1178
+ ],
1179
+ { encoding: "utf8", timeout: 1e4 }
1180
+ );
1111
1181
  const parsed = JSON.parse(stdout);
1112
1182
  const stream = parsed?.streams?.[0];
1113
1183
  if (stream?.width && stream?.height) {
@@ -1115,7 +1185,12 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
1115
1185
  height = stream.height;
1116
1186
  }
1117
1187
  } catch (probeErr) {
1118
- this.emit("error", new Error(`ffprobe failed: ${probeErr instanceof Error ? probeErr.message : String(probeErr)}`));
1188
+ this.emit(
1189
+ "error",
1190
+ new Error(
1191
+ `ffprobe failed: ${probeErr instanceof Error ? probeErr.message : String(probeErr)}`
1192
+ )
1193
+ );
1119
1194
  return;
1120
1195
  }
1121
1196
  if (options?.width && options?.height) {
@@ -1142,19 +1217,23 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
1142
1217
  return;
1143
1218
  }
1144
1219
  let audioFfmpegProc = null;
1145
- let audioSource = new import_rtc_node.AudioSource(SAMPLE_RATE, CHANNELS2);
1146
- let audioTrack = import_rtc_node.LocalAudioTrack.createAudioTrack("audio", audioSource);
1220
+ const audioSource = new import_rtc_node.AudioSource(SAMPLE_RATE, CHANNELS2);
1221
+ const audioTrack = import_rtc_node.LocalAudioTrack.createAudioTrack(
1222
+ "audio",
1223
+ audioSource
1224
+ );
1147
1225
  this.audioSource = audioSource;
1148
1226
  this.audioTrack = audioTrack;
1149
1227
  try {
1150
- await participant.publishTrack(audioTrack, new import_rtc_node.TrackPublishOptions({ source: import_rtc_node.TrackSource.SOURCE_MICROPHONE }));
1228
+ await participant.publishTrack(
1229
+ audioTrack,
1230
+ new import_rtc_node.TrackPublishOptions({ source: import_rtc_node.TrackSource.SOURCE_MICROPHONE })
1231
+ );
1151
1232
  } catch {
1152
1233
  audioTrack.close().catch(() => {
1153
1234
  });
1154
1235
  this.audioTrack = null;
1155
1236
  this.audioSource = null;
1156
- audioSource = null;
1157
- audioTrack = null;
1158
1237
  }
1159
1238
  this._playingVideo = true;
1160
1239
  this.emit("requestVoiceStateSync", {
@@ -1327,20 +1406,24 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
1327
1406
  runFFmpeg();
1328
1407
  const runAudioFfmpeg = async () => {
1329
1408
  if (!this._playingVideo || cleanupCalled || !audioSource) return;
1330
- const audioProc = (0, import_node_child_process.spawn)("ffmpeg", [
1331
- "-loglevel",
1332
- "warning",
1333
- "-re",
1334
- "-i",
1335
- url,
1336
- "-vn",
1337
- "-c:a",
1338
- "libopus",
1339
- "-f",
1340
- "webm",
1341
- ...loop ? ["-stream_loop", "-1"] : [],
1342
- "pipe:1"
1343
- ], { stdio: ["ignore", "pipe", "pipe"] });
1409
+ const audioProc = (0, import_node_child_process.spawn)(
1410
+ "ffmpeg",
1411
+ [
1412
+ "-loglevel",
1413
+ "warning",
1414
+ "-re",
1415
+ "-i",
1416
+ url,
1417
+ "-vn",
1418
+ "-c:a",
1419
+ "libopus",
1420
+ "-f",
1421
+ "webm",
1422
+ ...loop ? ["-stream_loop", "-1"] : [],
1423
+ "pipe:1"
1424
+ ],
1425
+ { stdio: ["ignore", "pipe", "pipe"] }
1426
+ );
1344
1427
  audioFfmpegProc = audioProc;
1345
1428
  const { opus: prismOpus } = await import("prism-media");
1346
1429
  const { OpusDecoder } = await import("opus-decoder");
@@ -1369,7 +1452,7 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
1369
1452
  if (audioSource.queuedDuration > 500) await audioSource.waitForPlayout();
1370
1453
  await audioSource.captureFrame(audioFrame);
1371
1454
  }
1372
- } catch (_) {
1455
+ } catch {
1373
1456
  }
1374
1457
  };
1375
1458
  const drainQueue = async () => {
@@ -1447,7 +1530,7 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
1447
1530
  this._playing = true;
1448
1531
  let sampleBuffer = new Int16Array(0);
1449
1532
  let opusBuffer = new Uint8Array(0);
1450
- let streamEnded = false;
1533
+ let _streamEnded = false;
1451
1534
  let framesCaptured = 0;
1452
1535
  const processOneOpusFrame = async (frame) => {
1453
1536
  if (frame.length < 2) return;
@@ -1509,7 +1592,7 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
1509
1592
  this.emit("error", err);
1510
1593
  });
1511
1594
  demuxer.on("end", async () => {
1512
- streamEnded = true;
1595
+ _streamEnded = true;
1513
1596
  this.audioDebug("stream ended", { framesCaptured });
1514
1597
  while (processing || opusFrameQueue.length > 0) {
1515
1598
  await drainOpusQueue();
@@ -1546,9 +1629,17 @@ var LiveKitRtcConnection = class extends import_events2.EventEmitter {
1546
1629
  * Stop video playback and unpublish the video track from the LiveKit room.
1547
1630
  * Safe to call even when no video is playing.
1548
1631
  */
1632
+ _videoCleaning = false;
1549
1633
  stopVideo() {
1634
+ if (this._videoCleaning) return;
1550
1635
  if (this._videoCleanup) {
1551
- this._videoCleanup();
1636
+ this._videoCleaning = true;
1637
+ try {
1638
+ this._videoCleanup();
1639
+ } finally {
1640
+ this._videoCleaning = false;
1641
+ }
1642
+ this._videoCleanup = null;
1552
1643
  return;
1553
1644
  }
1554
1645
  this._playingVideo = false;
@@ -1618,9 +1709,18 @@ var VoiceManager = class extends import_events3.EventEmitter {
1618
1709
  super();
1619
1710
  this.client = client;
1620
1711
  this.shardId = options.shardId ?? 0;
1621
- this.client.on(import_core.Events.VoiceStateUpdate, (data) => this.handleVoiceStateUpdate(data));
1622
- this.client.on(import_core.Events.VoiceServerUpdate, (data) => this.handleVoiceServerUpdate(data));
1623
- this.client.on(import_core.Events.VoiceStatesSync, (data) => this.handleVoiceStatesSync(data));
1712
+ this.client.on(
1713
+ import_core.Events.VoiceStateUpdate,
1714
+ (data) => this.handleVoiceStateUpdate(data)
1715
+ );
1716
+ this.client.on(
1717
+ import_core.Events.VoiceServerUpdate,
1718
+ (data) => this.handleVoiceServerUpdate(data)
1719
+ );
1720
+ this.client.on(
1721
+ import_core.Events.VoiceStatesSync,
1722
+ (data) => this.handleVoiceStatesSync(data)
1723
+ );
1624
1724
  }
1625
1725
  handleVoiceStatesSync(data) {
1626
1726
  let guildMap = this.voiceStates.get(data.guildId);
@@ -1645,7 +1745,10 @@ var VoiceManager = class extends import_events3.EventEmitter {
1645
1745
  handleVoiceStateUpdate(data) {
1646
1746
  const guildId = data.guild_id ?? "";
1647
1747
  if (!guildId) return;
1648
- this.client.emit?.("debug", `[VoiceManager] VoiceStateUpdate guild=${guildId} user=${data.user_id} channel=${data.channel_id ?? "null"} (bot=${this.client.user?.id})`);
1748
+ this.client.emit?.(
1749
+ "debug",
1750
+ `[VoiceManager] VoiceStateUpdate guild=${guildId} user=${data.user_id} channel=${data.channel_id ?? "null"} (bot=${this.client.user?.id})`
1751
+ );
1649
1752
  let guildMap = this.voiceStates.get(guildId);
1650
1753
  if (!guildMap) {
1651
1754
  guildMap = /* @__PURE__ */ new Map();
@@ -1658,7 +1761,10 @@ var VoiceManager = class extends import_events3.EventEmitter {
1658
1761
  this.storeConnectionId(guildId, data.connection_id);
1659
1762
  }
1660
1763
  if (pending && isBot) {
1661
- this.client.emit?.("debug", `[VoiceManager] VoiceStateUpdate for bot - completing pending guild ${guildId}`);
1764
+ this.client.emit?.(
1765
+ "debug",
1766
+ `[VoiceManager] VoiceStateUpdate for bot - completing pending guild ${guildId}`
1767
+ );
1662
1768
  pending.state = data;
1663
1769
  this.tryCompletePending(guildId);
1664
1770
  }
@@ -1668,15 +1774,29 @@ var VoiceManager = class extends import_events3.EventEmitter {
1668
1774
  const pending = this.pending.get(guildId);
1669
1775
  if (pending) {
1670
1776
  const hasToken = !!(data.token && data.token.length > 0);
1671
- this.client.emit?.("debug", `[VoiceManager] VoiceServerUpdate guild=${guildId} endpoint=${data.endpoint ?? "null"} token=${hasToken ? "yes" : "NO"}`);
1777
+ this.client.emit?.(
1778
+ "debug",
1779
+ `[VoiceManager] VoiceServerUpdate guild=${guildId} endpoint=${data.endpoint ?? "null"} token=${hasToken ? "yes" : "NO"}`
1780
+ );
1672
1781
  pending.server = data;
1673
1782
  this.tryCompletePending(guildId);
1674
1783
  return;
1675
1784
  }
1785
+ const userId = this.client.user?.id;
1786
+ if (!userId) {
1787
+ this.client.emit?.(
1788
+ "debug",
1789
+ "[VoiceManager] Client user not available. Ensure the client is logged in."
1790
+ );
1791
+ return;
1792
+ }
1676
1793
  const conn = this.connections.get(guildId);
1677
1794
  if (!conn) return;
1678
1795
  if (!data.endpoint || !data.token) {
1679
- this.client.emit?.("debug", `[VoiceManager] Voice server endpoint null for guild ${guildId}; disconnecting until new allocation`);
1796
+ this.client.emit?.(
1797
+ "debug",
1798
+ `[VoiceManager] Voice server endpoint null for guild ${guildId}; disconnecting until new allocation`
1799
+ );
1680
1800
  conn.destroy();
1681
1801
  this.connections.delete(guildId);
1682
1802
  return;
@@ -1686,17 +1806,20 @@ var VoiceManager = class extends import_events3.EventEmitter {
1686
1806
  return;
1687
1807
  }
1688
1808
  const channel = conn.channel;
1689
- this.client.emit?.("debug", `[VoiceManager] Voice server migration for guild ${guildId}; reconnecting`);
1809
+ this.client.emit?.(
1810
+ "debug",
1811
+ `[VoiceManager] Voice server migration for guild ${guildId}; reconnecting`
1812
+ );
1690
1813
  conn.destroy();
1691
1814
  this.connections.delete(guildId);
1692
1815
  this.storeConnectionId(guildId, data.connection_id);
1693
1816
  const ConnClass = LiveKitRtcConnection;
1694
- const newConn = new ConnClass(this.client, channel, this.client.user.id);
1817
+ const newConn = new ConnClass(this.client, channel, userId);
1695
1818
  this.registerConnection(guildId, newConn);
1696
1819
  const state = {
1697
1820
  guild_id: guildId,
1698
1821
  channel_id: channel.id,
1699
- user_id: this.client.user.id,
1822
+ user_id: userId,
1700
1823
  session_id: ""
1701
1824
  };
1702
1825
  newConn.connect(data, state).catch((e) => {
@@ -1741,18 +1864,32 @@ var VoiceManager = class extends import_events3.EventEmitter {
1741
1864
  const hasState = !!pending.state;
1742
1865
  if (!useLiveKit && !hasState) return;
1743
1866
  if (useLiveKit && !hasState) {
1744
- this.client.emit?.("debug", `[VoiceManager] Proceeding with VoiceServerUpdate only (LiveKit does not require VoiceStateUpdate)`);
1867
+ this.client.emit?.(
1868
+ "debug",
1869
+ `[VoiceManager] Proceeding with VoiceServerUpdate only (LiveKit does not require VoiceStateUpdate)`
1870
+ );
1871
+ }
1872
+ const userId = this.client.user?.id;
1873
+ if (!userId) {
1874
+ this.client.emit?.(
1875
+ "debug",
1876
+ "[VoiceManager] Client user not available. Ensure the client is logged in."
1877
+ );
1878
+ return;
1745
1879
  }
1746
1880
  const state = pending.state ?? {
1747
1881
  guild_id: guildId,
1748
1882
  channel_id: pending.channel.id,
1749
- user_id: this.client.user.id,
1883
+ user_id: userId,
1750
1884
  session_id: ""
1751
1885
  };
1752
- this.storeConnectionId(guildId, pending.server.connection_id ?? state.connection_id);
1886
+ this.storeConnectionId(
1887
+ guildId,
1888
+ pending.server.connection_id ?? state.connection_id
1889
+ );
1753
1890
  this.pending.delete(guildId);
1754
1891
  const ConnClass = useLiveKit ? LiveKitRtcConnection : VoiceConnection;
1755
- const conn = new ConnClass(this.client, pending.channel, this.client.user.id);
1892
+ const conn = new ConnClass(this.client, pending.channel, userId);
1756
1893
  this.registerConnection(guildId, conn);
1757
1894
  conn.connect(pending.server, state).then(
1758
1895
  () => pending.resolve(conn),
@@ -1773,7 +1910,10 @@ var VoiceManager = class extends import_events3.EventEmitter {
1773
1910
  this.connections.delete(channel.guildId);
1774
1911
  }
1775
1912
  return new Promise((resolve, reject) => {
1776
- this.client.emit?.("debug", `[VoiceManager] Requesting voice join guild=${channel.guildId} channel=${channel.id}`);
1913
+ this.client.emit?.(
1914
+ "debug",
1915
+ `[VoiceManager] Requesting voice join guild=${channel.guildId} channel=${channel.id}`
1916
+ );
1777
1917
  const timeout = setTimeout(() => {
1778
1918
  if (this.pending.has(channel.guildId)) {
1779
1919
  this.pending.delete(channel.guildId);
@@ -1847,7 +1987,10 @@ var VoiceManager = class extends import_events3.EventEmitter {
1847
1987
  if (!conn) return;
1848
1988
  const connectionId = this.connectionIds.get(guildId);
1849
1989
  if (!connectionId) {
1850
- this.client.emit?.("debug", `[VoiceManager] Skipping voice state sync: no connection_id for guild ${guildId}`);
1990
+ this.client.emit?.(
1991
+ "debug",
1992
+ `[VoiceManager] Skipping voice state sync: no connection_id for guild ${guildId}`
1993
+ );
1851
1994
  return;
1852
1995
  }
1853
1996
  this.client.sendToGateway(this.shardId, {
package/dist/index.mjs CHANGED
@@ -11,7 +11,14 @@ var thumbnail = MINIMAL_PNG_BASE64;
11
11
  import { EventEmitter } from "events";
12
12
  import * as nacl from "tweetnacl";
13
13
  import * as dgram from "dgram";
14
- var VOICE_WS_OPCODES = { Identify: 0, SelectProtocol: 1, Ready: 2, Heartbeat: 3, SessionDescription: 4, Speaking: 5 };
14
+ var VOICE_WS_OPCODES = {
15
+ Identify: 0,
16
+ SelectProtocol: 1,
17
+ Ready: 2,
18
+ Heartbeat: 3,
19
+ SessionDescription: 4,
20
+ Speaking: 5
21
+ };
15
22
  var VOICE_VERSION = 4;
16
23
  var CHANNELS = 2;
17
24
  var OPUS_FRAME_TICKS = 960 * (CHANNELS === 2 ? 2 : 1);
@@ -178,6 +185,10 @@ var VoiceConnection = class extends EventEmitter {
178
185
  discovery.writeUInt32BE(this.ssrc, 6);
179
186
  socket.send(discovery, 0, discovery.length, remotePort, remoteAddress, () => {
180
187
  socket.once("message", (msg) => {
188
+ if (msg.length < 70) {
189
+ this.emit("error", new Error("UDP discovery response too short"));
190
+ return;
191
+ }
181
192
  const len = msg.readUInt16BE(4);
182
193
  let ourIp = "";
183
194
  let i = 10;
@@ -341,6 +352,10 @@ var VoiceConnection = class extends EventEmitter {
341
352
  }
342
353
  /** Disconnect and remove all listeners. */
343
354
  destroy() {
355
+ if (this.currentStream) {
356
+ if (typeof this.currentStream.destroy === "function") this.currentStream.destroy();
357
+ this.currentStream = null;
358
+ }
344
359
  this.disconnect();
345
360
  this.removeAllListeners();
346
361
  }
@@ -367,8 +382,7 @@ import {
367
382
  function isLiveKitEndpoint(endpoint, token) {
368
383
  if (!endpoint || typeof endpoint !== "string") return false;
369
384
  const s = endpoint.trim();
370
- if (s.includes("access_token=") || s.includes("/rtc") && s.includes("?"))
371
- return true;
385
+ if (s.includes("access_token=") || s.includes("/rtc") && s.includes("?")) return true;
372
386
  if (token && !s.includes("?")) return true;
373
387
  return false;
374
388
  }
@@ -380,7 +394,7 @@ function buildLiveKitUrlForRtcSdk(endpoint) {
380
394
 
381
395
  // src/opusUtils.ts
382
396
  function parseOpusPacketBoundaries(buffer) {
383
- if (buffer.length < 1) return null;
397
+ if (buffer.length < 2) return null;
384
398
  const toc = buffer[0];
385
399
  const c = toc & 3;
386
400
  const tocSingle = toc & 252 | 0;
@@ -460,7 +474,8 @@ function getNaluByteLength(nalu) {
460
474
  }
461
475
  function toUint8Array(nalu) {
462
476
  if (nalu instanceof Uint8Array) return nalu;
463
- if (ArrayBuffer.isView(nalu)) return new Uint8Array(nalu.buffer, nalu.byteOffset, nalu.byteLength);
477
+ if (ArrayBuffer.isView(nalu))
478
+ return new Uint8Array(nalu.buffer, nalu.byteOffset, nalu.byteLength);
464
479
  if (nalu instanceof ArrayBuffer) return new Uint8Array(nalu);
465
480
  if (Array.isArray(nalu)) return new Uint8Array(nalu);
466
481
  return new Uint8Array(0);
@@ -590,7 +605,10 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
590
605
  return ep === (this.lastServerEndpoint ?? "") && token === (this.lastServerToken ?? "");
591
606
  }
592
607
  playOpus(_stream) {
593
- this.emit("error", new Error("LiveKit: playOpus not supported; use play(url) with a WebM/Opus URL"));
608
+ this.emit(
609
+ "error",
610
+ new Error("LiveKit: playOpus not supported; use play(url) with a WebM/Opus URL")
611
+ );
594
612
  }
595
613
  /**
596
614
  * Connect to the LiveKit room using voice server and state from the gateway.
@@ -686,7 +704,10 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
686
704
  return;
687
705
  }
688
706
  } else if (urlOrBuffer instanceof Uint8Array) {
689
- arrayBuffer = urlOrBuffer.buffer.slice(urlOrBuffer.byteOffset, urlOrBuffer.byteOffset + urlOrBuffer.byteLength);
707
+ arrayBuffer = urlOrBuffer.buffer.slice(
708
+ urlOrBuffer.byteOffset,
709
+ urlOrBuffer.byteOffset + urlOrBuffer.byteLength
710
+ );
690
711
  } else {
691
712
  arrayBuffer = urlOrBuffer;
692
713
  }
@@ -698,13 +719,19 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
698
719
  this.emit("error", e);
699
720
  };
700
721
  file.onReady = (info) => {
701
- const tracks = info.tracks ?? [];
722
+ if (!info.tracks?.length) {
723
+ this.emit("error", new Error("No tracks found in MP4 file"));
724
+ return;
725
+ }
726
+ const tracks = info.tracks;
702
727
  const videoTrack = tracks.find((t) => t.type === "video");
703
728
  if (!videoTrack) {
704
729
  this.emit("error", new Error("No video track in MP4"));
705
730
  return;
706
731
  }
707
- const audioTrackInfo = tracks.find((t) => t.type === "audio" && t.codec.startsWith("mp4a"));
732
+ const audioTrackInfo = tracks.find(
733
+ (t) => t.type === "audio" && t.codec.startsWith("mp4a")
734
+ );
708
735
  const width = videoTrack.video?.width ?? 640;
709
736
  const height = videoTrack.video?.height ?? 480;
710
737
  const totalSamples = videoTrack.nb_samples ?? Number.POSITIVE_INFINITY;
@@ -744,7 +771,11 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
744
771
  const { codedWidth, codedHeight } = frame;
745
772
  if (codedWidth <= 0 || codedHeight <= 0) {
746
773
  frame.close();
747
- if (VOICE_DEBUG) this.audioDebug("video frame skipped (invalid dimensions)", { codedWidth, codedHeight });
774
+ if (VOICE_DEBUG)
775
+ this.audioDebug("video frame skipped (invalid dimensions)", {
776
+ codedWidth,
777
+ codedHeight
778
+ });
748
779
  return;
749
780
  }
750
781
  try {
@@ -758,13 +789,22 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
758
789
  frame.close();
759
790
  const expectedI420Size = Math.ceil(codedWidth * codedHeight * 3 / 2);
760
791
  if (buffer.byteLength < expectedI420Size) {
761
- if (VOICE_DEBUG) this.audioDebug("video frame skipped (buffer too small)", { codedWidth, codedHeight });
792
+ if (VOICE_DEBUG)
793
+ this.audioDebug("video frame skipped (buffer too small)", {
794
+ codedWidth,
795
+ codedHeight
796
+ });
762
797
  return;
763
798
  }
764
799
  while (frameQueue.length >= MAX_QUEUED_FRAMES) {
765
800
  frameQueue.shift();
766
801
  }
767
- frameQueue.push({ buffer, width: codedWidth, height: codedHeight, timestampMs: frameTimeMs });
802
+ frameQueue.push({
803
+ buffer,
804
+ width: codedWidth,
805
+ height: codedHeight,
806
+ timestampMs: frameTimeMs
807
+ });
768
808
  } catch (err) {
769
809
  if (VOICE_DEBUG) this.audioDebug("video frame error", { error: String(err) });
770
810
  }
@@ -799,7 +839,7 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
799
839
  }
800
840
  try {
801
841
  decoder.close();
802
- } catch (_) {
842
+ } catch {
803
843
  }
804
844
  if (audioFfmpegProc && !audioFfmpegProc.killed) {
805
845
  audioFfmpegProc.kill("SIGKILL");
@@ -878,7 +918,10 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
878
918
  decoder.decode(chunk);
879
919
  }
880
920
  } catch (decodeErr) {
881
- this.emit("error", decodeErr instanceof Error ? decodeErr : new Error(String(decodeErr)));
921
+ this.emit(
922
+ "error",
923
+ decodeErr instanceof Error ? decodeErr : new Error(String(decodeErr))
924
+ );
882
925
  doCleanup();
883
926
  return;
884
927
  }
@@ -914,7 +957,10 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
914
957
  decoder.decode(chunk);
915
958
  }
916
959
  } catch (decodeErr) {
917
- this.emit("error", decodeErr instanceof Error ? decodeErr : new Error(String(decodeErr)));
960
+ this.emit(
961
+ "error",
962
+ decodeErr instanceof Error ? decodeErr : new Error(String(decodeErr))
963
+ );
918
964
  doCleanup();
919
965
  return;
920
966
  }
@@ -953,11 +999,20 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
953
999
  if (frameQueue.length > 0 && frameQueue[0].timestampMs <= elapsed) {
954
1000
  const f = frameQueue.shift();
955
1001
  try {
956
- const livekitFrame = new VideoFrame(f.buffer, f.width, f.height, VideoBufferType.I420);
1002
+ const livekitFrame = new VideoFrame(
1003
+ f.buffer,
1004
+ f.width,
1005
+ f.height,
1006
+ VideoBufferType.I420
1007
+ );
957
1008
  source.captureFrame(livekitFrame);
958
1009
  } catch (captureErr) {
959
- if (VOICE_DEBUG) this.audioDebug("captureFrame error", { error: String(captureErr) });
960
- this.emit("error", captureErr instanceof Error ? captureErr : new Error(String(captureErr)));
1010
+ if (VOICE_DEBUG)
1011
+ this.audioDebug("captureFrame error", { error: String(captureErr) });
1012
+ this.emit(
1013
+ "error",
1014
+ captureErr instanceof Error ? captureErr : new Error(String(captureErr))
1015
+ );
961
1016
  }
962
1017
  }
963
1018
  }, FRAME_INTERVAL_MS);
@@ -970,20 +1025,24 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
970
1025
  const { OpusDecoder } = await import("opus-decoder");
971
1026
  const runAudioFfmpeg = async () => {
972
1027
  if (!this._playingVideo || cleanupCalled || !audioSource) return;
973
- const audioProc = spawn("ffmpeg", [
974
- "-loglevel",
975
- "warning",
976
- "-re",
977
- "-i",
978
- videoUrl,
979
- "-vn",
980
- "-c:a",
981
- "libopus",
982
- "-f",
983
- "webm",
984
- ...loop ? ["-stream_loop", "-1"] : [],
985
- "pipe:1"
986
- ], { stdio: ["ignore", "pipe", "pipe"] });
1028
+ const audioProc = spawn(
1029
+ "ffmpeg",
1030
+ [
1031
+ "-loglevel",
1032
+ "warning",
1033
+ "-re",
1034
+ "-i",
1035
+ videoUrl,
1036
+ "-vn",
1037
+ "-c:a",
1038
+ "libopus",
1039
+ "-f",
1040
+ "webm",
1041
+ ...loop ? ["-stream_loop", "-1"] : [],
1042
+ "pipe:1"
1043
+ ],
1044
+ { stdio: ["ignore", "pipe", "pipe"] }
1045
+ );
987
1046
  audioFfmpegProc = audioProc;
988
1047
  const demuxer = new prismOpus.WebmDemuxer();
989
1048
  if (audioProc.stdout) audioProc.stdout.pipe(demuxer);
@@ -1006,11 +1065,16 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
1006
1065
  while (sampleBuffer.length >= FRAME_SAMPLES && this._playingVideo && audioSource) {
1007
1066
  const outSamples = sampleBuffer.subarray(0, FRAME_SAMPLES);
1008
1067
  sampleBuffer = sampleBuffer.subarray(FRAME_SAMPLES).slice();
1009
- const audioFrame = new AudioFrame(outSamples, SAMPLE_RATE, CHANNELS2, FRAME_SAMPLES);
1068
+ const audioFrame = new AudioFrame(
1069
+ outSamples,
1070
+ SAMPLE_RATE,
1071
+ CHANNELS2,
1072
+ FRAME_SAMPLES
1073
+ );
1010
1074
  if (audioSource.queuedDuration > 500) await audioSource.waitForPlayout();
1011
1075
  await audioSource.captureFrame(audioFrame);
1012
1076
  }
1013
- } catch (_) {
1077
+ } catch {
1014
1078
  }
1015
1079
  };
1016
1080
  const drainQueue = async () => {
@@ -1041,7 +1105,9 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
1041
1105
  }
1042
1106
  });
1043
1107
  };
1044
- runAudioFfmpeg().catch((e) => this.audioDebug("audio ffmpeg error", { error: String(e) }));
1108
+ runAudioFfmpeg().catch(
1109
+ (e) => this.audioDebug("audio ffmpeg error", { error: String(e) })
1110
+ );
1045
1111
  }
1046
1112
  this.emit("requestVoiceStateSync", {
1047
1113
  self_stream: sourceOption === "screenshare",
@@ -1069,17 +1135,21 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
1069
1135
  const { execFile } = await import("child_process");
1070
1136
  const { promisify } = await import("util");
1071
1137
  const exec = promisify(execFile);
1072
- const { stdout } = await exec("ffprobe", [
1073
- "-v",
1074
- "error",
1075
- "-select_streams",
1076
- "v:0",
1077
- "-show_entries",
1078
- "stream=width,height",
1079
- "-of",
1080
- "json",
1081
- url
1082
- ], { encoding: "utf8", timeout: 1e4 });
1138
+ const { stdout } = await exec(
1139
+ "ffprobe",
1140
+ [
1141
+ "-v",
1142
+ "error",
1143
+ "-select_streams",
1144
+ "v:0",
1145
+ "-show_entries",
1146
+ "stream=width,height",
1147
+ "-of",
1148
+ "json",
1149
+ url
1150
+ ],
1151
+ { encoding: "utf8", timeout: 1e4 }
1152
+ );
1083
1153
  const parsed = JSON.parse(stdout);
1084
1154
  const stream = parsed?.streams?.[0];
1085
1155
  if (stream?.width && stream?.height) {
@@ -1087,7 +1157,12 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
1087
1157
  height = stream.height;
1088
1158
  }
1089
1159
  } catch (probeErr) {
1090
- this.emit("error", new Error(`ffprobe failed: ${probeErr instanceof Error ? probeErr.message : String(probeErr)}`));
1160
+ this.emit(
1161
+ "error",
1162
+ new Error(
1163
+ `ffprobe failed: ${probeErr instanceof Error ? probeErr.message : String(probeErr)}`
1164
+ )
1165
+ );
1091
1166
  return;
1092
1167
  }
1093
1168
  if (options?.width && options?.height) {
@@ -1114,19 +1189,23 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
1114
1189
  return;
1115
1190
  }
1116
1191
  let audioFfmpegProc = null;
1117
- let audioSource = new AudioSource(SAMPLE_RATE, CHANNELS2);
1118
- let audioTrack = LocalAudioTrack.createAudioTrack("audio", audioSource);
1192
+ const audioSource = new AudioSource(SAMPLE_RATE, CHANNELS2);
1193
+ const audioTrack = LocalAudioTrack.createAudioTrack(
1194
+ "audio",
1195
+ audioSource
1196
+ );
1119
1197
  this.audioSource = audioSource;
1120
1198
  this.audioTrack = audioTrack;
1121
1199
  try {
1122
- await participant.publishTrack(audioTrack, new TrackPublishOptions({ source: TrackSource.SOURCE_MICROPHONE }));
1200
+ await participant.publishTrack(
1201
+ audioTrack,
1202
+ new TrackPublishOptions({ source: TrackSource.SOURCE_MICROPHONE })
1203
+ );
1123
1204
  } catch {
1124
1205
  audioTrack.close().catch(() => {
1125
1206
  });
1126
1207
  this.audioTrack = null;
1127
1208
  this.audioSource = null;
1128
- audioSource = null;
1129
- audioTrack = null;
1130
1209
  }
1131
1210
  this._playingVideo = true;
1132
1211
  this.emit("requestVoiceStateSync", {
@@ -1299,20 +1378,24 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
1299
1378
  runFFmpeg();
1300
1379
  const runAudioFfmpeg = async () => {
1301
1380
  if (!this._playingVideo || cleanupCalled || !audioSource) return;
1302
- const audioProc = spawn("ffmpeg", [
1303
- "-loglevel",
1304
- "warning",
1305
- "-re",
1306
- "-i",
1307
- url,
1308
- "-vn",
1309
- "-c:a",
1310
- "libopus",
1311
- "-f",
1312
- "webm",
1313
- ...loop ? ["-stream_loop", "-1"] : [],
1314
- "pipe:1"
1315
- ], { stdio: ["ignore", "pipe", "pipe"] });
1381
+ const audioProc = spawn(
1382
+ "ffmpeg",
1383
+ [
1384
+ "-loglevel",
1385
+ "warning",
1386
+ "-re",
1387
+ "-i",
1388
+ url,
1389
+ "-vn",
1390
+ "-c:a",
1391
+ "libopus",
1392
+ "-f",
1393
+ "webm",
1394
+ ...loop ? ["-stream_loop", "-1"] : [],
1395
+ "pipe:1"
1396
+ ],
1397
+ { stdio: ["ignore", "pipe", "pipe"] }
1398
+ );
1316
1399
  audioFfmpegProc = audioProc;
1317
1400
  const { opus: prismOpus } = await import("prism-media");
1318
1401
  const { OpusDecoder } = await import("opus-decoder");
@@ -1341,7 +1424,7 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
1341
1424
  if (audioSource.queuedDuration > 500) await audioSource.waitForPlayout();
1342
1425
  await audioSource.captureFrame(audioFrame);
1343
1426
  }
1344
- } catch (_) {
1427
+ } catch {
1345
1428
  }
1346
1429
  };
1347
1430
  const drainQueue = async () => {
@@ -1419,7 +1502,7 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
1419
1502
  this._playing = true;
1420
1503
  let sampleBuffer = new Int16Array(0);
1421
1504
  let opusBuffer = new Uint8Array(0);
1422
- let streamEnded = false;
1505
+ let _streamEnded = false;
1423
1506
  let framesCaptured = 0;
1424
1507
  const processOneOpusFrame = async (frame) => {
1425
1508
  if (frame.length < 2) return;
@@ -1481,7 +1564,7 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
1481
1564
  this.emit("error", err);
1482
1565
  });
1483
1566
  demuxer.on("end", async () => {
1484
- streamEnded = true;
1567
+ _streamEnded = true;
1485
1568
  this.audioDebug("stream ended", { framesCaptured });
1486
1569
  while (processing || opusFrameQueue.length > 0) {
1487
1570
  await drainOpusQueue();
@@ -1518,9 +1601,17 @@ var LiveKitRtcConnection = class extends EventEmitter2 {
1518
1601
  * Stop video playback and unpublish the video track from the LiveKit room.
1519
1602
  * Safe to call even when no video is playing.
1520
1603
  */
1604
+ _videoCleaning = false;
1521
1605
  stopVideo() {
1606
+ if (this._videoCleaning) return;
1522
1607
  if (this._videoCleanup) {
1523
- this._videoCleanup();
1608
+ this._videoCleaning = true;
1609
+ try {
1610
+ this._videoCleanup();
1611
+ } finally {
1612
+ this._videoCleaning = false;
1613
+ }
1614
+ this._videoCleanup = null;
1524
1615
  return;
1525
1616
  }
1526
1617
  this._playingVideo = false;
@@ -1590,9 +1681,18 @@ var VoiceManager = class extends EventEmitter3 {
1590
1681
  super();
1591
1682
  this.client = client;
1592
1683
  this.shardId = options.shardId ?? 0;
1593
- this.client.on(Events.VoiceStateUpdate, (data) => this.handleVoiceStateUpdate(data));
1594
- this.client.on(Events.VoiceServerUpdate, (data) => this.handleVoiceServerUpdate(data));
1595
- this.client.on(Events.VoiceStatesSync, (data) => this.handleVoiceStatesSync(data));
1684
+ this.client.on(
1685
+ Events.VoiceStateUpdate,
1686
+ (data) => this.handleVoiceStateUpdate(data)
1687
+ );
1688
+ this.client.on(
1689
+ Events.VoiceServerUpdate,
1690
+ (data) => this.handleVoiceServerUpdate(data)
1691
+ );
1692
+ this.client.on(
1693
+ Events.VoiceStatesSync,
1694
+ (data) => this.handleVoiceStatesSync(data)
1695
+ );
1596
1696
  }
1597
1697
  handleVoiceStatesSync(data) {
1598
1698
  let guildMap = this.voiceStates.get(data.guildId);
@@ -1617,7 +1717,10 @@ var VoiceManager = class extends EventEmitter3 {
1617
1717
  handleVoiceStateUpdate(data) {
1618
1718
  const guildId = data.guild_id ?? "";
1619
1719
  if (!guildId) return;
1620
- this.client.emit?.("debug", `[VoiceManager] VoiceStateUpdate guild=${guildId} user=${data.user_id} channel=${data.channel_id ?? "null"} (bot=${this.client.user?.id})`);
1720
+ this.client.emit?.(
1721
+ "debug",
1722
+ `[VoiceManager] VoiceStateUpdate guild=${guildId} user=${data.user_id} channel=${data.channel_id ?? "null"} (bot=${this.client.user?.id})`
1723
+ );
1621
1724
  let guildMap = this.voiceStates.get(guildId);
1622
1725
  if (!guildMap) {
1623
1726
  guildMap = /* @__PURE__ */ new Map();
@@ -1630,7 +1733,10 @@ var VoiceManager = class extends EventEmitter3 {
1630
1733
  this.storeConnectionId(guildId, data.connection_id);
1631
1734
  }
1632
1735
  if (pending && isBot) {
1633
- this.client.emit?.("debug", `[VoiceManager] VoiceStateUpdate for bot - completing pending guild ${guildId}`);
1736
+ this.client.emit?.(
1737
+ "debug",
1738
+ `[VoiceManager] VoiceStateUpdate for bot - completing pending guild ${guildId}`
1739
+ );
1634
1740
  pending.state = data;
1635
1741
  this.tryCompletePending(guildId);
1636
1742
  }
@@ -1640,15 +1746,29 @@ var VoiceManager = class extends EventEmitter3 {
1640
1746
  const pending = this.pending.get(guildId);
1641
1747
  if (pending) {
1642
1748
  const hasToken = !!(data.token && data.token.length > 0);
1643
- this.client.emit?.("debug", `[VoiceManager] VoiceServerUpdate guild=${guildId} endpoint=${data.endpoint ?? "null"} token=${hasToken ? "yes" : "NO"}`);
1749
+ this.client.emit?.(
1750
+ "debug",
1751
+ `[VoiceManager] VoiceServerUpdate guild=${guildId} endpoint=${data.endpoint ?? "null"} token=${hasToken ? "yes" : "NO"}`
1752
+ );
1644
1753
  pending.server = data;
1645
1754
  this.tryCompletePending(guildId);
1646
1755
  return;
1647
1756
  }
1757
+ const userId = this.client.user?.id;
1758
+ if (!userId) {
1759
+ this.client.emit?.(
1760
+ "debug",
1761
+ "[VoiceManager] Client user not available. Ensure the client is logged in."
1762
+ );
1763
+ return;
1764
+ }
1648
1765
  const conn = this.connections.get(guildId);
1649
1766
  if (!conn) return;
1650
1767
  if (!data.endpoint || !data.token) {
1651
- this.client.emit?.("debug", `[VoiceManager] Voice server endpoint null for guild ${guildId}; disconnecting until new allocation`);
1768
+ this.client.emit?.(
1769
+ "debug",
1770
+ `[VoiceManager] Voice server endpoint null for guild ${guildId}; disconnecting until new allocation`
1771
+ );
1652
1772
  conn.destroy();
1653
1773
  this.connections.delete(guildId);
1654
1774
  return;
@@ -1658,17 +1778,20 @@ var VoiceManager = class extends EventEmitter3 {
1658
1778
  return;
1659
1779
  }
1660
1780
  const channel = conn.channel;
1661
- this.client.emit?.("debug", `[VoiceManager] Voice server migration for guild ${guildId}; reconnecting`);
1781
+ this.client.emit?.(
1782
+ "debug",
1783
+ `[VoiceManager] Voice server migration for guild ${guildId}; reconnecting`
1784
+ );
1662
1785
  conn.destroy();
1663
1786
  this.connections.delete(guildId);
1664
1787
  this.storeConnectionId(guildId, data.connection_id);
1665
1788
  const ConnClass = LiveKitRtcConnection;
1666
- const newConn = new ConnClass(this.client, channel, this.client.user.id);
1789
+ const newConn = new ConnClass(this.client, channel, userId);
1667
1790
  this.registerConnection(guildId, newConn);
1668
1791
  const state = {
1669
1792
  guild_id: guildId,
1670
1793
  channel_id: channel.id,
1671
- user_id: this.client.user.id,
1794
+ user_id: userId,
1672
1795
  session_id: ""
1673
1796
  };
1674
1797
  newConn.connect(data, state).catch((e) => {
@@ -1713,18 +1836,32 @@ var VoiceManager = class extends EventEmitter3 {
1713
1836
  const hasState = !!pending.state;
1714
1837
  if (!useLiveKit && !hasState) return;
1715
1838
  if (useLiveKit && !hasState) {
1716
- this.client.emit?.("debug", `[VoiceManager] Proceeding with VoiceServerUpdate only (LiveKit does not require VoiceStateUpdate)`);
1839
+ this.client.emit?.(
1840
+ "debug",
1841
+ `[VoiceManager] Proceeding with VoiceServerUpdate only (LiveKit does not require VoiceStateUpdate)`
1842
+ );
1843
+ }
1844
+ const userId = this.client.user?.id;
1845
+ if (!userId) {
1846
+ this.client.emit?.(
1847
+ "debug",
1848
+ "[VoiceManager] Client user not available. Ensure the client is logged in."
1849
+ );
1850
+ return;
1717
1851
  }
1718
1852
  const state = pending.state ?? {
1719
1853
  guild_id: guildId,
1720
1854
  channel_id: pending.channel.id,
1721
- user_id: this.client.user.id,
1855
+ user_id: userId,
1722
1856
  session_id: ""
1723
1857
  };
1724
- this.storeConnectionId(guildId, pending.server.connection_id ?? state.connection_id);
1858
+ this.storeConnectionId(
1859
+ guildId,
1860
+ pending.server.connection_id ?? state.connection_id
1861
+ );
1725
1862
  this.pending.delete(guildId);
1726
1863
  const ConnClass = useLiveKit ? LiveKitRtcConnection : VoiceConnection;
1727
- const conn = new ConnClass(this.client, pending.channel, this.client.user.id);
1864
+ const conn = new ConnClass(this.client, pending.channel, userId);
1728
1865
  this.registerConnection(guildId, conn);
1729
1866
  conn.connect(pending.server, state).then(
1730
1867
  () => pending.resolve(conn),
@@ -1745,7 +1882,10 @@ var VoiceManager = class extends EventEmitter3 {
1745
1882
  this.connections.delete(channel.guildId);
1746
1883
  }
1747
1884
  return new Promise((resolve, reject) => {
1748
- this.client.emit?.("debug", `[VoiceManager] Requesting voice join guild=${channel.guildId} channel=${channel.id}`);
1885
+ this.client.emit?.(
1886
+ "debug",
1887
+ `[VoiceManager] Requesting voice join guild=${channel.guildId} channel=${channel.id}`
1888
+ );
1749
1889
  const timeout = setTimeout(() => {
1750
1890
  if (this.pending.has(channel.guildId)) {
1751
1891
  this.pending.delete(channel.guildId);
@@ -1819,7 +1959,10 @@ var VoiceManager = class extends EventEmitter3 {
1819
1959
  if (!conn) return;
1820
1960
  const connectionId = this.connectionIds.get(guildId);
1821
1961
  if (!connectionId) {
1822
- this.client.emit?.("debug", `[VoiceManager] Skipping voice state sync: no connection_id for guild ${guildId}`);
1962
+ this.client.emit?.(
1963
+ "debug",
1964
+ `[VoiceManager] Skipping voice state sync: no connection_id for guild ${guildId}`
1965
+ );
1823
1966
  return;
1824
1967
  }
1825
1968
  this.client.sendToGateway(this.shardId, {
package/package.json CHANGED
@@ -3,8 +3,22 @@
3
3
  "publishConfig": {
4
4
  "access": "public"
5
5
  },
6
- "version": "1.0.7",
6
+ "version": "1.0.9",
7
7
  "description": "Voice support for Fluxer bots",
8
+ "repository": {
9
+ "type": "git",
10
+ "url": "https://github.com/fluxerjs/core.git",
11
+ "directory": "packages/voice"
12
+ },
13
+ "bugs": "https://github.com/fluxerjs/core/issues",
14
+ "homepage": "https://fluxerjs.blstmo.com",
15
+ "keywords": [
16
+ "fluxer",
17
+ "voice",
18
+ "audio",
19
+ "livekit"
20
+ ],
21
+ "license": "AGPL-3.0",
8
22
  "main": "./dist/index.js",
9
23
  "module": "./dist/index.mjs",
10
24
  "types": "./dist/index.d.ts",
@@ -26,9 +40,9 @@
26
40
  "prism-media": "^1.3.5",
27
41
  "tweetnacl": "^1.0.3",
28
42
  "ws": "^8.18.0",
29
- "@fluxerjs/core": "1.0.7",
30
- "@fluxerjs/collection": "1.0.7",
31
- "@fluxerjs/types": "1.0.7"
43
+ "@fluxerjs/collection": "1.0.9",
44
+ "@fluxerjs/core": "1.0.9",
45
+ "@fluxerjs/types": "1.0.9"
32
46
  },
33
47
  "devDependencies": {
34
48
  "@types/node": "^20.0.0",
@@ -44,6 +58,10 @@
44
58
  },
45
59
  "scripts": {
46
60
  "build": "tsup src/index.ts --format cjs,esm --dts",
47
- "clean": "rm -rf dist"
61
+ "clean": "rm -rf dist",
62
+ "lint": "eslint src --max-warnings 0 --config ../../eslint.config.js",
63
+ "lint:fix": "eslint src --fix --config ../../eslint.config.js",
64
+ "test": "vitest run --passWithNoTests",
65
+ "test:coverage": "vitest run --coverage --passWithNoTests"
48
66
  }
49
67
  }