livekit-client 2.1.4 → 2.2.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (47) hide show
  1. package/dist/livekit-client.esm.mjs +152 -64
  2. package/dist/livekit-client.esm.mjs.map +1 -1
  3. package/dist/livekit-client.umd.js +1 -1
  4. package/dist/livekit-client.umd.js.map +1 -1
  5. package/dist/src/index.d.ts +2 -2
  6. package/dist/src/index.d.ts.map +1 -1
  7. package/dist/src/room/RTCEngine.d.ts +2 -2
  8. package/dist/src/room/RTCEngine.d.ts.map +1 -1
  9. package/dist/src/room/Room.d.ts +4 -1
  10. package/dist/src/room/Room.d.ts.map +1 -1
  11. package/dist/src/room/events.d.ts +12 -1
  12. package/dist/src/room/events.d.ts.map +1 -1
  13. package/dist/src/room/participant/LocalParticipant.d.ts.map +1 -1
  14. package/dist/src/room/participant/Participant.d.ts +6 -3
  15. package/dist/src/room/participant/Participant.d.ts.map +1 -1
  16. package/dist/src/room/participant/RemoteParticipant.d.ts +3 -3
  17. package/dist/src/room/participant/RemoteParticipant.d.ts.map +1 -1
  18. package/dist/src/room/participant/publishUtils.d.ts.map +1 -1
  19. package/dist/src/room/track/LocalTrack.d.ts +1 -1
  20. package/dist/src/room/track/LocalTrack.d.ts.map +1 -1
  21. package/dist/src/room/track/LocalVideoTrack.d.ts +2 -1
  22. package/dist/src/room/track/LocalVideoTrack.d.ts.map +1 -1
  23. package/dist/src/room/track/create.d.ts.map +1 -1
  24. package/dist/src/room/track/options.d.ts +9 -0
  25. package/dist/src/room/track/options.d.ts.map +1 -1
  26. package/dist/ts4.2/src/index.d.ts +2 -2
  27. package/dist/ts4.2/src/room/RTCEngine.d.ts +2 -2
  28. package/dist/ts4.2/src/room/Room.d.ts +4 -1
  29. package/dist/ts4.2/src/room/events.d.ts +12 -1
  30. package/dist/ts4.2/src/room/participant/Participant.d.ts +7 -3
  31. package/dist/ts4.2/src/room/participant/RemoteParticipant.d.ts +3 -3
  32. package/dist/ts4.2/src/room/track/LocalTrack.d.ts +1 -1
  33. package/dist/ts4.2/src/room/track/LocalVideoTrack.d.ts +2 -1
  34. package/dist/ts4.2/src/room/track/options.d.ts +9 -0
  35. package/package.json +1 -1
  36. package/src/index.ts +2 -1
  37. package/src/room/RTCEngine.ts +23 -6
  38. package/src/room/Room.ts +39 -10
  39. package/src/room/events.ts +14 -1
  40. package/src/room/participant/LocalParticipant.ts +36 -25
  41. package/src/room/participant/Participant.ts +14 -1
  42. package/src/room/participant/RemoteParticipant.ts +17 -4
  43. package/src/room/participant/publishUtils.ts +4 -0
  44. package/src/room/track/LocalTrack.ts +13 -9
  45. package/src/room/track/LocalVideoTrack.ts +9 -2
  46. package/src/room/track/create.ts +37 -27
  47. package/src/room/track/options.ts +15 -0
@@ -1233,10 +1233,12 @@ class BinaryReader {
1233
1233
  return [fieldNo, wireType];
1234
1234
  }
1235
1235
  /**
1236
- * Skip one element on the wire and return the skipped data.
1237
- * Supports WireType.StartGroup since v2.0.0-alpha.23.
1236
+ * Skip one element and return the skipped data.
1237
+ *
1238
+ * When skipping StartGroup, provide the tags field number to check for
1239
+ * matching field number in the EndGroup tag.
1238
1240
  */
1239
- skip(wireType) {
1241
+ skip(wireType, fieldNo) {
1240
1242
  let start = this.pos;
1241
1243
  switch (wireType) {
1242
1244
  case WireType.Varint:
@@ -1258,10 +1260,15 @@ class BinaryReader {
1258
1260
  this.pos += len;
1259
1261
  break;
1260
1262
  case WireType.StartGroup:
1261
- // TODO check for matching field numbers in StartGroup / EndGroup tags
1262
- let t;
1263
- while ((t = this.tag()[1]) !== WireType.EndGroup) {
1264
- this.skip(t);
1263
+ for (;;) {
1264
+ const [fn, wt] = this.tag();
1265
+ if (wt === WireType.EndGroup) {
1266
+ if (fieldNo !== undefined && fn !== fieldNo) {
1267
+ throw new Error("invalid end group tag");
1268
+ }
1269
+ break;
1270
+ }
1271
+ this.skip(wt, fn);
1265
1272
  }
1266
1273
  break;
1267
1274
  default:
@@ -1651,7 +1658,7 @@ function setExtension(message, extension, value, options) {
1651
1658
  const reader = readOpt.readerFactory(writer.finish());
1652
1659
  while (reader.pos < reader.len) {
1653
1660
  const [no, wireType] = reader.tag();
1654
- const data = reader.skip(wireType);
1661
+ const data = reader.skip(wireType, no);
1655
1662
  message.getType().runtime.bin.onUnknownField(message, no, wireType, data);
1656
1663
  }
1657
1664
  }
@@ -2459,12 +2466,12 @@ function makeBinaryFormat() {
2459
2466
  let fieldNo, wireType;
2460
2467
  while (reader.pos < end) {
2461
2468
  [fieldNo, wireType] = reader.tag();
2462
- if (wireType == WireType.EndGroup) {
2469
+ if (delimitedMessageEncoding === true && wireType == WireType.EndGroup) {
2463
2470
  break;
2464
2471
  }
2465
2472
  const field = type.fields.find(fieldNo);
2466
2473
  if (!field) {
2467
- const data = reader.skip(wireType);
2474
+ const data = reader.skip(wireType, fieldNo);
2468
2475
  if (options.readUnknownFields) {
2469
2476
  this.onUnknownField(message, fieldNo, wireType, data);
2470
2477
  }
@@ -2825,7 +2832,7 @@ function makeUtilCommon() {
2825
2832
  const localName = member.localName,
2826
2833
  t = target,
2827
2834
  s = source;
2828
- if (s[localName] === undefined) {
2835
+ if (s[localName] == null) {
2829
2836
  // TODO if source is a Message instance, we should use isFieldSet() here to support future field presence
2830
2837
  continue;
2831
2838
  }
@@ -10040,6 +10047,12 @@ var RoomEvent;
10040
10047
  * args: (payload: Uint8Array, participant: [[Participant]], kind: [[DataPacket_Kind]], topic?: string)
10041
10048
  */
10042
10049
  RoomEvent["DataReceived"] = "dataReceived";
10050
+ /**
10051
+ * SIP DTMF tones received from another participant.
10052
+ *
10053
+ * args: (participant: [[Participant]], dtmf: [[DataPacket_Kind]])
10054
+ */
10055
+ RoomEvent["SipDTMFReceived"] = "sipDTMFReceived";
10043
10056
  /**
10044
10057
  * Transcription received from a participant's track.
10045
10058
  * @beta
@@ -10226,6 +10239,12 @@ var ParticipantEvent;
10226
10239
  * args: (payload: Uint8Array, kind: [[DataPacket_Kind]])
10227
10240
  */
10228
10241
  ParticipantEvent["DataReceived"] = "dataReceived";
10242
+ /**
10243
+ * SIP DTMF tones received from this participant as sender.
10244
+ *
10245
+ * args: (dtmf: [[DataPacket_Kind]])
10246
+ */
10247
+ ParticipantEvent["SipDTMFReceived"] = "sipDTMFReceived";
10229
10248
  /**
10230
10249
  * Transcription received from this participant as data source.
10231
10250
  * @beta
@@ -10300,7 +10319,6 @@ var EngineEvent;
10300
10319
  EngineEvent["MediaTrackAdded"] = "mediaTrackAdded";
10301
10320
  EngineEvent["ActiveSpeakersUpdate"] = "activeSpeakersUpdate";
10302
10321
  EngineEvent["DataPacketReceived"] = "dataPacketReceived";
10303
- EngineEvent["TranscriptionReceived"] = "transcriptionReceived";
10304
10322
  EngineEvent["RTPVideoMapUpdate"] = "rtpVideoMapUpdate";
10305
10323
  EngineEvent["DCBufferStatusChanged"] = "dcBufferStatusChanged";
10306
10324
  EngineEvent["ParticipantUpdate"] = "participantUpdate";
@@ -10505,7 +10523,7 @@ function getOSVersion(ua) {
10505
10523
  return ua.includes('mac os') ? getMatch(/\(.+?(\d+_\d+(:?_\d+)?)/, ua, 1).replace(/_/g, '.') : undefined;
10506
10524
  }
10507
10525
 
10508
- var version$1 = "2.1.4";
10526
+ var version$1 = "2.2.0";
10509
10527
 
10510
10528
  const version = version$1;
10511
10529
  const protocolVersion = 13;
@@ -12120,30 +12138,32 @@ class LocalTrack extends Track {
12120
12138
  var _this2 = this;
12121
12139
  let showProcessedStreamLocally = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
12122
12140
  return function* () {
12123
- var _a, _b;
12141
+ var _a;
12124
12142
  const unlock = yield _this2.processorLock.lock();
12125
12143
  try {
12126
12144
  _this2.log.debug('setting up processor', _this2.logContext);
12127
- _this2.processorElement = (_a = _this2.processorElement) !== null && _a !== void 0 ? _a : document.createElement(_this2.kind);
12145
+ const processorElement = document.createElement(_this2.kind);
12128
12146
  const processorOptions = {
12129
12147
  kind: _this2.kind,
12130
12148
  track: _this2._mediaStreamTrack,
12131
- element: _this2.processorElement,
12149
+ element: processorElement,
12132
12150
  audioContext: _this2.audioContext
12133
12151
  };
12134
12152
  yield processor.init(processorOptions);
12153
+ _this2.log.debug('processor initialized', _this2.logContext);
12135
12154
  if (_this2.processor) {
12136
12155
  yield _this2.stopProcessor();
12137
12156
  }
12138
12157
  if (_this2.kind === 'unknown') {
12139
12158
  throw TypeError('cannot set processor on track of unknown kind');
12140
12159
  }
12141
- attachToElement(_this2._mediaStreamTrack, _this2.processorElement);
12142
- _this2.processorElement.muted = true;
12143
- _this2.processorElement.play().catch(error => _this2.log.error('failed to play processor element', Object.assign(Object.assign({}, _this2.logContext), {
12160
+ attachToElement(_this2._mediaStreamTrack, processorElement);
12161
+ processorElement.muted = true;
12162
+ processorElement.play().catch(error => _this2.log.error('failed to play processor element', Object.assign(Object.assign({}, _this2.logContext), {
12144
12163
  error
12145
12164
  })));
12146
12165
  _this2.processor = processor;
12166
+ _this2.processorElement = processorElement;
12147
12167
  if (_this2.processor.processedTrack) {
12148
12168
  for (const el of _this2.attachedElements) {
12149
12169
  if (el !== _this2.processorElement && showProcessedStreamLocally) {
@@ -12151,7 +12171,7 @@ class LocalTrack extends Track {
12151
12171
  attachToElement(_this2.processor.processedTrack, el);
12152
12172
  }
12153
12173
  }
12154
- yield (_b = _this2.sender) === null || _b === void 0 ? void 0 : _b.replaceTrack(_this2.processor.processedTrack);
12174
+ yield (_a = _this2.sender) === null || _a === void 0 ? void 0 : _a.replaceTrack(_this2.processor.processedTrack);
12155
12175
  }
12156
12176
  _this2.emit(TrackEvent.TrackProcessorUpdate, _this2.processor);
12157
12177
  } finally {
@@ -12171,20 +12191,26 @@ class LocalTrack extends Track {
12171
12191
  * @returns
12172
12192
  */
12173
12193
  stopProcessor() {
12174
- return __awaiter(this, void 0, void 0, function* () {
12175
- var _a, _b;
12176
- if (!this.processor) return;
12177
- this.log.debug('stopping processor', this.logContext);
12178
- (_a = this.processor.processedTrack) === null || _a === void 0 ? void 0 : _a.stop();
12179
- yield this.processor.destroy();
12180
- this.processor = undefined;
12181
- (_b = this.processorElement) === null || _b === void 0 ? void 0 : _b.remove();
12182
- this.processorElement = undefined;
12183
- // apply original track constraints in case the processor changed them
12184
- yield this._mediaStreamTrack.applyConstraints(this._constraints);
12185
- // force re-setting of the mediaStreamTrack on the sender
12186
- yield this.setMediaStreamTrack(this._mediaStreamTrack, true);
12187
- this.emit(TrackEvent.TrackProcessorUpdate);
12194
+ return __awaiter(this, arguments, void 0, function () {
12195
+ var _this3 = this;
12196
+ let keepElement = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true;
12197
+ return function* () {
12198
+ var _a, _b;
12199
+ if (!_this3.processor) return;
12200
+ _this3.log.debug('stopping processor', _this3.logContext);
12201
+ (_a = _this3.processor.processedTrack) === null || _a === void 0 ? void 0 : _a.stop();
12202
+ yield _this3.processor.destroy();
12203
+ _this3.processor = undefined;
12204
+ if (!keepElement) {
12205
+ (_b = _this3.processorElement) === null || _b === void 0 ? void 0 : _b.remove();
12206
+ _this3.processorElement = undefined;
12207
+ }
12208
+ // apply original track constraints in case the processor changed them
12209
+ yield _this3._mediaStreamTrack.applyConstraints(_this3._constraints);
12210
+ // force re-setting of the mediaStreamTrack on the sender
12211
+ yield _this3.setMediaStreamTrack(_this3._mediaStreamTrack, true);
12212
+ _this3.emit(TrackEvent.TrackProcessorUpdate);
12213
+ }();
12188
12214
  });
12189
12215
  }
12190
12216
  }
@@ -15060,7 +15086,11 @@ function computeVideoEncodings(isScreenShare, width, height, options) {
15060
15086
  // before M113.
15061
15087
  // Announced here: https://groups.google.com/g/discuss-webrtc/c/-QQ3pxrl-fw?pli=1
15062
15088
  const browser = getBrowser();
15063
- if (isSafari() || (browser === null || browser === void 0 ? void 0 : browser.name) === 'Chrome' && compareVersions(browser === null || browser === void 0 ? void 0 : browser.version, '113') < 0) {
15089
+ if (isSafari() ||
15090
+ // Even tho RN runs M114, it does not produce SVC layers when a single encoding
15091
+ // is provided. So we'll use the legacy SVC specification for now.
15092
+ // TODO: when we upstream libwebrtc, this will need additional verification
15093
+ isReactNative() || (browser === null || browser === void 0 ? void 0 : browser.name) === 'Chrome' && compareVersions(browser === null || browser === void 0 ? void 0 : browser.version, '113') < 0) {
15064
15094
  const bitratesRatio = sm.suffix == 'h' ? 2 : 3;
15065
15095
  for (let i = 0; i < sm.spatial; i += 1) {
15066
15096
  // in legacy SVC, scaleResolutionDownBy cannot be set
@@ -15311,6 +15341,9 @@ function getDefaultDegradationPreference(track) {
15311
15341
 
15312
15342
  const refreshSubscribedCodecAfterNewCodec = 5000;
15313
15343
  class LocalVideoTrack extends LocalTrack {
15344
+ get sender() {
15345
+ return this._sender;
15346
+ }
15314
15347
  set sender(sender) {
15315
15348
  this._sender = sender;
15316
15349
  if (this.degradationPreference) {
@@ -15980,7 +16013,7 @@ class RTCEngine extends eventsExports.EventEmitter {
15980
16013
  }();
15981
16014
  });
15982
16015
  this.handleDataMessage = message => __awaiter(this, void 0, void 0, function* () {
15983
- var _c, _d, _e;
16016
+ var _c, _d;
15984
16017
  // make sure to respect incoming data message order by processing message events one after the other
15985
16018
  const unlock = yield this.dataProcessLock.lock();
15986
16019
  try {
@@ -16000,10 +16033,12 @@ class RTCEngine extends eventsExports.EventEmitter {
16000
16033
  if (((_c = dp.value) === null || _c === void 0 ? void 0 : _c.case) === 'speaker') {
16001
16034
  // dispatch speaker updates
16002
16035
  this.emit(EngineEvent.ActiveSpeakersUpdate, dp.value.value.speakers);
16003
- } else if (((_d = dp.value) === null || _d === void 0 ? void 0 : _d.case) === 'user') {
16004
- this.emit(EngineEvent.DataPacketReceived, dp.value.value, dp.kind);
16005
- } else if (((_e = dp.value) === null || _e === void 0 ? void 0 : _e.case) === 'transcription') {
16006
- this.emit(EngineEvent.TranscriptionReceived, dp.value.value);
16036
+ } else {
16037
+ if (((_d = dp.value) === null || _d === void 0 ? void 0 : _d.case) === 'user') {
16038
+ // compatibility
16039
+ applyUserDataCompat(dp, dp.value.value);
16040
+ }
16041
+ this.emit(EngineEvent.DataPacketReceived, dp);
16007
16042
  }
16008
16043
  } finally {
16009
16044
  unlock();
@@ -17014,6 +17049,14 @@ class SignalReconnectError extends Error {}
17014
17049
  function supportOptionalDatachannel(protocol) {
17015
17050
  return protocol !== undefined && protocol > 13;
17016
17051
  }
17052
+ function applyUserDataCompat(newObj, oldObj) {
17053
+ const participantIdentity = newObj.participantIdentity ? newObj.participantIdentity : oldObj.participantIdentity;
17054
+ newObj.participantIdentity = participantIdentity;
17055
+ oldObj.participantIdentity = participantIdentity;
17056
+ const destinationIdentities = newObj.destinationIdentities.length !== 0 ? newObj.destinationIdentities : oldObj.destinationIdentities;
17057
+ newObj.destinationIdentities = destinationIdentities;
17058
+ oldObj.destinationIdentities = destinationIdentities;
17059
+ }
17017
17060
 
17018
17061
  class RegionUrlProvider {
17019
17062
  constructor(url, token) {
@@ -17918,11 +17961,15 @@ class Participant extends eventsExports.EventEmitter {
17918
17961
  return this.trackPublications.size > 0 && Array.from(this.trackPublications.values()).every(tr => tr.isEncrypted);
17919
17962
  }
17920
17963
  get isAgent() {
17921
- var _a, _b;
17922
- return (_b = (_a = this.permissions) === null || _a === void 0 ? void 0 : _a.agent) !== null && _b !== void 0 ? _b : false;
17964
+ var _a;
17965
+ return ((_a = this.permissions) === null || _a === void 0 ? void 0 : _a.agent) || this.kind === ParticipantInfo_Kind.AGENT;
17966
+ }
17967
+ get kind() {
17968
+ return this._kind;
17923
17969
  }
17924
17970
  /** @internal */
17925
17971
  constructor(sid, identity, name, metadata, loggerOptions) {
17972
+ let kind = arguments.length > 5 && arguments[5] !== undefined ? arguments[5] : ParticipantInfo_Kind.STANDARD;
17926
17973
  var _a;
17927
17974
  super();
17928
17975
  /** audio level between 0-1.0, 1 being loudest, 0 being softest */
@@ -17941,6 +17988,7 @@ class Participant extends eventsExports.EventEmitter {
17941
17988
  this.audioTrackPublications = new Map();
17942
17989
  this.videoTrackPublications = new Map();
17943
17990
  this.trackPublications = new Map();
17991
+ this._kind = kind;
17944
17992
  }
17945
17993
  getTrackPublications() {
17946
17994
  return Array.from(this.trackPublications.values());
@@ -18495,8 +18543,8 @@ class LocalParticipant extends Participant {
18495
18543
  createTracks(options) {
18496
18544
  return __awaiter(this, void 0, void 0, function* () {
18497
18545
  var _a, _b;
18498
- const opts = mergeDefaultOptions(options, (_a = this.roomOptions) === null || _a === void 0 ? void 0 : _a.audioCaptureDefaults, (_b = this.roomOptions) === null || _b === void 0 ? void 0 : _b.videoCaptureDefaults);
18499
- const constraints = constraintsForOptions(opts);
18546
+ const mergedOptions = mergeDefaultOptions(options, (_a = this.roomOptions) === null || _a === void 0 ? void 0 : _a.audioCaptureDefaults, (_b = this.roomOptions) === null || _b === void 0 ? void 0 : _b.videoCaptureDefaults);
18547
+ const constraints = constraintsForOptions(mergedOptions);
18500
18548
  let stream;
18501
18549
  try {
18502
18550
  stream = yield navigator.mediaDevices.getUserMedia(constraints);
@@ -18518,9 +18566,12 @@ class LocalParticipant extends Participant {
18518
18566
  if (constraints.video) {
18519
18567
  this.cameraError = undefined;
18520
18568
  }
18521
- return stream.getTracks().map(mediaStreamTrack => {
18569
+ return Promise.all(stream.getTracks().map(mediaStreamTrack => __awaiter(this, void 0, void 0, function* () {
18522
18570
  const isAudio = mediaStreamTrack.kind === 'audio';
18523
- isAudio ? options.audio : options.video;
18571
+ let trackOptions = isAudio ? mergedOptions.audio : mergedOptions.video;
18572
+ if (typeof trackOptions === 'boolean' || !trackOptions) {
18573
+ trackOptions = {};
18574
+ }
18524
18575
  let trackConstraints;
18525
18576
  const conOrBool = isAudio ? constraints.audio : constraints.video;
18526
18577
  if (typeof conOrBool !== 'boolean') {
@@ -18534,10 +18585,18 @@ class LocalParticipant extends Participant {
18534
18585
  track.source = Track.Source.Camera;
18535
18586
  } else if (track.kind === Track.Kind.Audio) {
18536
18587
  track.source = Track.Source.Microphone;
18588
+ track.setAudioContext(this.audioContext);
18537
18589
  }
18538
18590
  track.mediaStream = stream;
18591
+ if (trackOptions.processor) {
18592
+ if (track instanceof LocalAudioTrack) {
18593
+ yield track.setProcessor(trackOptions.processor);
18594
+ } else {
18595
+ yield track.setProcessor(trackOptions.processor);
18596
+ }
18597
+ }
18539
18598
  return track;
18540
- });
18599
+ })));
18541
18600
  });
18542
18601
  }
18543
18602
  /**
@@ -19427,8 +19486,8 @@ class RemoteTrackPublication extends TrackPublication {
19427
19486
 
19428
19487
  class RemoteParticipant extends Participant {
19429
19488
  /** @internal */
19430
- static fromParticipantInfo(signalClient, pi) {
19431
- return new RemoteParticipant(signalClient, pi.sid, pi.identity, pi.name, pi.metadata);
19489
+ static fromParticipantInfo(signalClient, pi, loggerOptions) {
19490
+ return new RemoteParticipant(signalClient, pi.sid, pi.identity, pi.name, pi.metadata, loggerOptions, pi.kind);
19432
19491
  }
19433
19492
  get logContext() {
19434
19493
  return Object.assign(Object.assign({}, super.logContext), {
@@ -19438,7 +19497,8 @@ class RemoteParticipant extends Participant {
19438
19497
  }
19439
19498
  /** @internal */
19440
19499
  constructor(signalClient, sid, identity, name, metadata, loggerOptions) {
19441
- super(sid, identity || '', name, metadata, loggerOptions);
19500
+ let kind = arguments.length > 6 && arguments[6] !== undefined ? arguments[6] : ParticipantInfo_Kind.STANDARD;
19501
+ super(sid, identity || '', name, metadata, loggerOptions, kind);
19442
19502
  this.signalClient = signalClient;
19443
19503
  this.trackPublications = new Map();
19444
19504
  this.audioTrackPublications = new Map();
@@ -20238,17 +20298,31 @@ class Room extends eventsExports.EventEmitter {
20238
20298
  }
20239
20299
  pub.setSubscriptionError(update.err);
20240
20300
  };
20241
- this.handleDataPacket = (userPacket, kind) => {
20301
+ this.handleDataPacket = packet => {
20242
20302
  // find the participant
20243
- const participant = this.remoteParticipants.get(userPacket.participantIdentity);
20303
+ const participant = this.remoteParticipants.get(packet.participantIdentity);
20304
+ if (packet.value.case === 'user') {
20305
+ this.handleUserPacket(participant, packet.value.value, packet.kind);
20306
+ } else if (packet.value.case === 'transcription') {
20307
+ this.handleTranscription(participant, packet.value.value);
20308
+ } else if (packet.value.case === 'sipDtmf') {
20309
+ this.handleSipDtmf(participant, packet.value.value);
20310
+ }
20311
+ };
20312
+ this.handleUserPacket = (participant, userPacket, kind) => {
20244
20313
  this.emit(RoomEvent.DataReceived, userPacket.payload, participant, kind, userPacket.topic);
20245
20314
  // also emit on the participant
20246
20315
  participant === null || participant === void 0 ? void 0 : participant.emit(ParticipantEvent.DataReceived, userPacket.payload, kind);
20247
20316
  };
20317
+ this.handleSipDtmf = (participant, dtmf) => {
20318
+ this.emit(RoomEvent.SipDTMFReceived, dtmf, participant);
20319
+ // also emit on the participant
20320
+ participant === null || participant === void 0 ? void 0 : participant.emit(ParticipantEvent.SipDTMFReceived, dtmf);
20321
+ };
20248
20322
  this.bufferedSegments = new Map();
20249
- this.handleTranscription = transcription => {
20323
+ this.handleTranscription = (remoteParticipant, transcription) => {
20250
20324
  // find the participant
20251
- const participant = transcription.participantIdentity === this.localParticipant.identity ? this.localParticipant : this.remoteParticipants.get(transcription.participantIdentity);
20325
+ const participant = transcription.participantIdentity === this.localParticipant.identity ? this.localParticipant : remoteParticipant;
20252
20326
  const publication = participant === null || participant === void 0 ? void 0 : participant.trackPublications.get(transcription.trackId);
20253
20327
  const segments = extractTranscriptionSegments(transcription);
20254
20328
  publication === null || publication === void 0 ? void 0 : publication.emit(TrackEvent.TranscriptionReceived, segments);
@@ -20480,7 +20554,7 @@ class Room extends eventsExports.EventEmitter {
20480
20554
  this.onTrackAdded(mediaTrack, stream, receiver);
20481
20555
  }).on(EngineEvent.Disconnected, reason => {
20482
20556
  this.handleDisconnect(this.options.stopLocalTrackOnUnpublish, reason);
20483
- }).on(EngineEvent.ActiveSpeakersUpdate, this.handleActiveSpeakersUpdate).on(EngineEvent.DataPacketReceived, this.handleDataPacket).on(EngineEvent.TranscriptionReceived, this.handleTranscription).on(EngineEvent.Resuming, () => {
20557
+ }).on(EngineEvent.ActiveSpeakersUpdate, this.handleActiveSpeakersUpdate).on(EngineEvent.DataPacketReceived, this.handleDataPacket).on(EngineEvent.Resuming, () => {
20484
20558
  this.clearConnectionReconcile();
20485
20559
  this.isResuming = true;
20486
20560
  this.log.info('Resuming signal connection', this.logContext);
@@ -20777,11 +20851,12 @@ class Room extends eventsExports.EventEmitter {
20777
20851
  if (_this3.options.webAudioMix) {
20778
20852
  // @ts-expect-error setSinkId is not yet in the typescript type of AudioContext
20779
20853
  (_c = _this3.audioContext) === null || _c === void 0 ? void 0 : _c.setSinkId(deviceId);
20780
- } else {
20781
- yield Promise.all(Array.from(_this3.remoteParticipants.values()).map(p => p.setAudioOutput({
20782
- deviceId
20783
- })));
20784
20854
  }
20855
+ // also set audio output on all audio elements, even if webAudioMix is enabled in order to workaround echo cancellation not working on chrome with non-default output devices
20856
+ // see https://issues.chromium.org/issues/40252911#comment7
20857
+ yield Promise.all(Array.from(_this3.remoteParticipants.values()).map(p => p.setAudioOutput({
20858
+ deviceId
20859
+ })));
20785
20860
  } catch (e) {
20786
20861
  _this3.options.audioOutput.deviceId = prevDeviceId;
20787
20862
  throw e;
@@ -20959,7 +21034,10 @@ class Room extends eventsExports.EventEmitter {
20959
21034
  var _a;
20960
21035
  let participant;
20961
21036
  if (info) {
20962
- participant = RemoteParticipant.fromParticipantInfo(this.engine.client, info);
21037
+ participant = RemoteParticipant.fromParticipantInfo(this.engine.client, info, {
21038
+ loggerContextCb: () => this.logContext,
21039
+ loggerName: this.options.loggerName
21040
+ });
20963
21041
  } else {
20964
21042
  participant = new RemoteParticipant(this.engine.client, '', identity, undefined, undefined, {
20965
21043
  loggerContextCb: () => this.logContext,
@@ -21412,9 +21490,12 @@ function createLocalTracks(options) {
21412
21490
  mediaPromise.catch(() => DeviceManager.userMediaPromiseMap.delete('videoinput'));
21413
21491
  }
21414
21492
  const stream = yield mediaPromise;
21415
- return stream.getTracks().map(mediaStreamTrack => {
21493
+ return Promise.all(stream.getTracks().map(mediaStreamTrack => __awaiter(this, void 0, void 0, function* () {
21416
21494
  const isAudio = mediaStreamTrack.kind === 'audio';
21417
- isAudio ? options.audio : options.video;
21495
+ let trackOptions = isAudio ? options.audio : options.video;
21496
+ if (typeof trackOptions === 'boolean' || !trackOptions) {
21497
+ trackOptions = {};
21498
+ }
21418
21499
  let trackConstraints;
21419
21500
  const conOrBool = isAudio ? constraints.audio : constraints.video;
21420
21501
  if (typeof conOrBool !== 'boolean') {
@@ -21436,8 +21517,15 @@ function createLocalTracks(options) {
21436
21517
  track.source = Track.Source.Microphone;
21437
21518
  }
21438
21519
  track.mediaStream = stream;
21520
+ if (trackOptions.processor) {
21521
+ if (track instanceof LocalAudioTrack) {
21522
+ yield track.setProcessor(trackOptions.processor);
21523
+ } else if (track instanceof LocalVideoTrack) {
21524
+ yield track.setProcessor(trackOptions.processor);
21525
+ }
21526
+ }
21439
21527
  return track;
21440
- });
21528
+ })));
21441
21529
  });
21442
21530
  }
21443
21531
  /**
@@ -21903,5 +21991,5 @@ function isFacingModeValue(item) {
21903
21991
  return item === undefined || allowedValues.includes(item);
21904
21992
  }
21905
21993
 
21906
- export { AudioPresets, BaseKeyProvider, CheckStatus, Checker, ConnectionCheck, ConnectionError, ConnectionQuality, ConnectionState, CriticalTimers, CryptorEvent, DataPacket_Kind, DefaultReconnectPolicy, DeviceUnsupportedError, DisconnectReason, EncryptionEvent, EngineEvent, ExternalE2EEKeyProvider, KeyHandlerEvent, KeyProviderEvent, LivekitError, LocalAudioTrack, LocalParticipant, LocalTrack, LocalTrackPublication, LocalVideoTrack, LogLevel, LoggerNames, MediaDeviceFailure, Mutex, NegotiationError, Participant, ParticipantEvent, PublishDataError, RemoteAudioTrack, RemoteParticipant, RemoteTrack, RemoteTrackPublication, RemoteVideoTrack, Room, RoomEvent, ScreenSharePresets, SubscriptionError, Track, TrackEvent, TrackInvalidError, TrackPublication, UnexpectedConnectionState, UnsupportedServer, VideoPreset, VideoPresets, VideoPresets43, VideoQuality, attachToElement, createAudioAnalyser, createE2EEKey, createKeyMaterialFromBuffer, createKeyMaterialFromString, createLocalAudioTrack, createLocalScreenTracks, createLocalTracks, createLocalVideoTrack, deriveKeys, detachTrack, facingModeFromDeviceLabel, facingModeFromLocalTrack, getBrowser, getEmptyAudioStreamTrack, getEmptyVideoStreamTrack, getLogger, importKey, isBackupCodec, isBrowserSupported, isE2EESupported, isInsertableStreamSupported, isScriptTransformSupported, isVideoFrame, needsRbspUnescaping, parseRbsp, protocolVersion, ratchet, setLogExtension, setLogLevel, supportsAV1, supportsAdaptiveStream, supportsDynacast, supportsVP9, version, videoCodecs, writeRbsp };
21994
+ export { AudioPresets, BaseKeyProvider, CheckStatus, Checker, ConnectionCheck, ConnectionError, ConnectionQuality, ConnectionState, CriticalTimers, CryptorEvent, DataPacket_Kind, DefaultReconnectPolicy, DeviceUnsupportedError, DisconnectReason, EncryptionEvent, EngineEvent, ExternalE2EEKeyProvider, KeyHandlerEvent, KeyProviderEvent, LivekitError, LocalAudioTrack, LocalParticipant, LocalTrack, LocalTrackPublication, LocalVideoTrack, LogLevel, LoggerNames, MediaDeviceFailure, Mutex, NegotiationError, Participant, ParticipantEvent, ParticipantInfo_Kind as ParticipantKind, PublishDataError, RemoteAudioTrack, RemoteParticipant, RemoteTrack, RemoteTrackPublication, RemoteVideoTrack, Room, RoomEvent, ScreenSharePresets, SubscriptionError, Track, TrackEvent, TrackInvalidError, TrackPublication, UnexpectedConnectionState, UnsupportedServer, VideoPreset, VideoPresets, VideoPresets43, VideoQuality, attachToElement, createAudioAnalyser, createE2EEKey, createKeyMaterialFromBuffer, createKeyMaterialFromString, createLocalAudioTrack, createLocalScreenTracks, createLocalTracks, createLocalVideoTrack, deriveKeys, detachTrack, facingModeFromDeviceLabel, facingModeFromLocalTrack, getBrowser, getEmptyAudioStreamTrack, getEmptyVideoStreamTrack, getLogger, importKey, isBackupCodec, isBrowserSupported, isE2EESupported, isInsertableStreamSupported, isScriptTransformSupported, isVideoFrame, needsRbspUnescaping, parseRbsp, protocolVersion, ratchet, setLogExtension, setLogLevel, supportsAV1, supportsAdaptiveStream, supportsDynacast, supportsVP9, version, videoCodecs, writeRbsp };
21907
21995
  //# sourceMappingURL=livekit-client.esm.mjs.map