livekit-client 2.15.5 → 2.15.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/livekit-client.e2ee.worker.js +1 -1
  2. package/dist/livekit-client.e2ee.worker.js.map +1 -1
  3. package/dist/livekit-client.e2ee.worker.mjs +54 -50
  4. package/dist/livekit-client.e2ee.worker.mjs.map +1 -1
  5. package/dist/livekit-client.esm.mjs +81 -40
  6. package/dist/livekit-client.esm.mjs.map +1 -1
  7. package/dist/livekit-client.umd.js +1 -1
  8. package/dist/livekit-client.umd.js.map +1 -1
  9. package/dist/src/e2ee/E2eeManager.d.ts.map +1 -1
  10. package/dist/src/e2ee/worker/FrameCryptor.d.ts +0 -1
  11. package/dist/src/e2ee/worker/FrameCryptor.d.ts.map +1 -1
  12. package/dist/src/e2ee/worker/sifPayload.d.ts +22 -0
  13. package/dist/src/e2ee/worker/sifPayload.d.ts.map +1 -0
  14. package/dist/src/room/PCTransport.d.ts.map +1 -1
  15. package/dist/src/room/Room.d.ts.map +1 -1
  16. package/dist/src/room/participant/LocalParticipant.d.ts +1 -3
  17. package/dist/src/room/participant/LocalParticipant.d.ts.map +1 -1
  18. package/dist/src/room/track/LocalTrack.d.ts.map +1 -1
  19. package/dist/src/room/track/RemoteVideoTrack.d.ts +1 -0
  20. package/dist/src/room/track/RemoteVideoTrack.d.ts.map +1 -1
  21. package/dist/src/room/track/Track.d.ts +4 -1
  22. package/dist/src/room/track/Track.d.ts.map +1 -1
  23. package/dist/src/room/utils.d.ts +8 -0
  24. package/dist/src/room/utils.d.ts.map +1 -1
  25. package/dist/ts4.2/src/e2ee/worker/FrameCryptor.d.ts +0 -1
  26. package/dist/ts4.2/src/e2ee/worker/sifPayload.d.ts +22 -0
  27. package/dist/ts4.2/src/room/participant/LocalParticipant.d.ts +1 -3
  28. package/dist/ts4.2/src/room/track/RemoteVideoTrack.d.ts +1 -0
  29. package/dist/ts4.2/src/room/track/Track.d.ts +4 -1
  30. package/dist/ts4.2/src/room/utils.d.ts +8 -0
  31. package/package.json +10 -9
  32. package/src/e2ee/E2eeManager.ts +18 -1
  33. package/src/e2ee/worker/FrameCryptor.ts +8 -18
  34. package/src/e2ee/worker/e2ee.worker.ts +6 -1
  35. package/src/e2ee/worker/sifPayload.ts +75 -0
  36. package/src/room/PCTransport.ts +14 -5
  37. package/src/room/Room.ts +12 -3
  38. package/src/room/participant/LocalParticipant.ts +9 -23
  39. package/src/room/track/LocalTrack.ts +5 -2
  40. package/src/room/track/RemoteVideoTrack.ts +12 -2
  41. package/src/room/track/Track.ts +10 -1
  42. package/src/room/utils.ts +12 -3
  43. package/dist/src/e2ee/worker/SifGuard.d.ts +0 -11
  44. package/dist/src/e2ee/worker/SifGuard.d.ts.map +0 -1
  45. package/dist/ts4.2/src/e2ee/worker/SifGuard.d.ts +0 -11
  46. package/src/e2ee/worker/SifGuard.ts +0 -47
@@ -11388,7 +11388,7 @@ function getOSVersion(ua) {
11388
11388
  return ua.includes('mac os') ? getMatch(/\(.+?(\d+_\d+(:?_\d+)?)/, ua, 1).replace(/_/g, '.') : undefined;
11389
11389
  }
11390
11390
 
11391
- var version$1 = "2.15.5";
11391
+ var version$1 = "2.15.7";
11392
11392
 
11393
11393
  const version = version$1;
11394
11394
  const protocolVersion = 16;
@@ -11425,17 +11425,24 @@ var VideoQuality;
11425
11425
  VideoQuality[VideoQuality["HIGH"] = 2] = "HIGH";
11426
11426
  })(VideoQuality || (VideoQuality = {}));
11427
11427
  class Track extends eventsExports.EventEmitter {
11428
+ /**
11429
+ * indicates current state of stream, it'll indicate `paused` if the track
11430
+ * has been paused by congestion controller
11431
+ */
11432
+ get streamState() {
11433
+ return this._streamState;
11434
+ }
11435
+ /** @internal */
11436
+ setStreamState(value) {
11437
+ this._streamState = value;
11438
+ }
11428
11439
  constructor(mediaTrack, kind) {
11429
11440
  let loggerOptions = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
11430
11441
  var _a;
11431
11442
  super();
11432
11443
  this.attachedElements = [];
11433
11444
  this.isMuted = false;
11434
- /**
11435
- * indicates current state of stream, it'll indicate `paused` if the track
11436
- * has been paused by congestion controller
11437
- */
11438
- this.streamState = Track.StreamState.Active;
11445
+ this._streamState = Track.StreamState.Active;
11439
11446
  this.isInBackground = false;
11440
11447
  this._currentBitrate = 0;
11441
11448
  this.log = livekitLogger;
@@ -11928,8 +11935,9 @@ function supportsAV1() {
11928
11935
  if (!('getCapabilities' in RTCRtpSender)) {
11929
11936
  return false;
11930
11937
  }
11931
- if (isSafari()) {
11938
+ if (isSafari() || isFireFox()) {
11932
11939
  // Safari 17 on iPhone14 reports AV1 capability, but does not actually support it
11940
+ // Firefox does support AV1, but SVC publishing is not supported
11933
11941
  return false;
11934
11942
  }
11935
11943
  const capabilities = RTCRtpSender.getCapabilities('video');
@@ -12031,9 +12039,9 @@ function isE2EESimulcastSupported() {
12031
12039
  if (browser) {
12032
12040
  if (browser.name !== 'Safari' && browser.os !== 'iOS') {
12033
12041
  return true;
12034
- } else if (browser.os === 'iOS' && browser.osVersion && compareVersions(supportedSafariVersion, browser.osVersion) >= 0) {
12042
+ } else if (browser.os === 'iOS' && browser.osVersion && compareVersions(browser.osVersion, supportedSafariVersion) >= 0) {
12035
12043
  return true;
12036
- } else if (browser.name === 'Safari' && compareVersions(supportedSafariVersion, browser.version) >= 0) {
12044
+ } else if (browser.name === 'Safari' && compareVersions(browser.version, supportedSafariVersion) >= 0) {
12037
12045
  return true;
12038
12046
  } else {
12039
12047
  return false;
@@ -12081,6 +12089,14 @@ function getDevicePixelRatio() {
12081
12089
  }
12082
12090
  return 1;
12083
12091
  }
12092
+ /**
12093
+ * @param v1 - The first version string to compare.
12094
+ * @param v2 - The second version string to compare.
12095
+ * @returns A number indicating the order of the versions:
12096
+ * - 1 if v1 is greater than v2
12097
+ * - -1 if v1 is less than v2
12098
+ * - 0 if v1 and v2 are equal
12099
+ */
12084
12100
  function compareVersions(v1, v2) {
12085
12101
  const parts1 = v1.split('.');
12086
12102
  const parts2 = v2.split('.');
@@ -12875,6 +12891,21 @@ class E2EEManager extends eventsExports.EventEmitter {
12875
12891
  room.localParticipant.on(ParticipantEvent.LocalSenderCreated, (sender, track) => __awaiter(this, void 0, void 0, function* () {
12876
12892
  this.setupE2EESender(track, sender);
12877
12893
  }));
12894
+ room.localParticipant.on(ParticipantEvent.LocalTrackPublished, publication => {
12895
+ // Safari doesn't support retrieving payload information on RTCEncodedVideoFrame, so we need to update the codec manually once we have the trackInfo from the server
12896
+ if (!isVideoTrack(publication.track) || !isSafariBased()) {
12897
+ return;
12898
+ }
12899
+ const msg = {
12900
+ kind: 'updateCodec',
12901
+ data: {
12902
+ trackId: publication.track.mediaStreamID,
12903
+ codec: mimeTypeToVideoCodecString(publication.trackInfo.codecs[0].mimeType),
12904
+ participantIdentity: this.room.localParticipant.identity
12905
+ }
12906
+ };
12907
+ this.worker.postMessage(msg);
12908
+ });
12878
12909
  keyProvider.on(KeyProviderEvent.SetKey, keyInfo => this.postKey(keyInfo)).on(KeyProviderEvent.RatchetRequest, (participantId, keyIndex) => this.postRatchetRequest(participantId, keyIndex));
12879
12910
  }
12880
12911
  postRatchetRequest(participantIdentity, keyIndex) {
@@ -14939,10 +14970,11 @@ class PCTransport extends eventsExports.EventEmitter {
14939
14970
  } else if (sd.type === 'answer') {
14940
14971
  const sdpParsed = libExports.parse((_a = sd.sdp) !== null && _a !== void 0 ? _a : '');
14941
14972
  sdpParsed.media.forEach(media => {
14973
+ const mid = getMidString(media.mid);
14942
14974
  if (media.type === 'audio') {
14943
14975
  // mung sdp for opus bitrate settings
14944
14976
  this.trackBitrates.some(trackbr => {
14945
- if (!trackbr.transceiver || media.mid != trackbr.transceiver.mid) {
14977
+ if (!trackbr.transceiver || mid != trackbr.transceiver.mid) {
14946
14978
  return false;
14947
14979
  }
14948
14980
  let codecPayload = 0;
@@ -15293,6 +15325,8 @@ class PCTransport extends eventsExports.EventEmitter {
15293
15325
  }
15294
15326
  }
15295
15327
  function ensureAudioNackAndStereo(media, stereoMids, nackMids) {
15328
+ // sdp-transform types don't include number however the parser outputs mids as numbers in some cases
15329
+ const mid = getMidString(media.mid);
15296
15330
  // found opus codec to add nack fb
15297
15331
  let opusPayload = 0;
15298
15332
  media.rtp.some(rtp => {
@@ -15307,13 +15341,13 @@ function ensureAudioNackAndStereo(media, stereoMids, nackMids) {
15307
15341
  if (!media.rtcpFb) {
15308
15342
  media.rtcpFb = [];
15309
15343
  }
15310
- if (nackMids.includes(media.mid) && !media.rtcpFb.some(fb => fb.payload === opusPayload && fb.type === 'nack')) {
15344
+ if (nackMids.includes(mid) && !media.rtcpFb.some(fb => fb.payload === opusPayload && fb.type === 'nack')) {
15311
15345
  media.rtcpFb.push({
15312
15346
  payload: opusPayload,
15313
15347
  type: 'nack'
15314
15348
  });
15315
15349
  }
15316
- if (stereoMids.includes(media.mid)) {
15350
+ if (stereoMids.includes(mid)) {
15317
15351
  media.fmtp.some(fmtp => {
15318
15352
  if (fmtp.payload === opusPayload) {
15319
15353
  if (!fmtp.config.includes('stereo=1')) {
@@ -15334,6 +15368,7 @@ function extractStereoAndNackAudioFromOffer(offer) {
15334
15368
  let opusPayload = 0;
15335
15369
  sdpParsed.media.forEach(media => {
15336
15370
  var _a;
15371
+ const mid = getMidString(media.mid);
15337
15372
  if (media.type === 'audio') {
15338
15373
  media.rtp.some(rtp => {
15339
15374
  if (rtp.codec === 'opus') {
@@ -15343,12 +15378,12 @@ function extractStereoAndNackAudioFromOffer(offer) {
15343
15378
  return false;
15344
15379
  });
15345
15380
  if ((_a = media.rtcpFb) === null || _a === void 0 ? void 0 : _a.some(fb => fb.payload === opusPayload && fb.type === 'nack')) {
15346
- nackMids.push(media.mid);
15381
+ nackMids.push(mid);
15347
15382
  }
15348
15383
  media.fmtp.some(fmtp => {
15349
15384
  if (fmtp.payload === opusPayload) {
15350
15385
  if (fmtp.config.includes('sprop-stereo=1')) {
15351
- stereoMids.push(media.mid);
15386
+ stereoMids.push(mid);
15352
15387
  }
15353
15388
  return true;
15354
15389
  }
@@ -15374,6 +15409,9 @@ function ensureIPAddrMatchVersion(media) {
15374
15409
  }
15375
15410
  }
15376
15411
  }
15412
+ function getMidString(mid) {
15413
+ return typeof mid === 'number' ? mid.toFixed(0) : mid;
15414
+ }
15377
15415
 
15378
15416
  const defaultVideoCodec = 'vp8';
15379
15417
  const publishDefaults = {
@@ -16203,9 +16241,9 @@ class LocalTrack extends Track {
16203
16241
  facingMode
16204
16242
  } : true;
16205
16243
  } else {
16206
- streamConstraints.audio = deviceId ? {
16244
+ streamConstraints.audio = deviceId ? Object.assign({
16207
16245
  deviceId
16208
- } : true;
16246
+ }, otherConstraints) : true;
16209
16247
  }
16210
16248
  // these steps are duplicated from setMediaStreamTrack because we must stop
16211
16249
  // the previous tracks before new tracks can be acquired
@@ -16220,7 +16258,10 @@ class LocalTrack extends Track {
16220
16258
  // create new track and attach
16221
16259
  const mediaStream = yield navigator.mediaDevices.getUserMedia(streamConstraints);
16222
16260
  const newTrack = mediaStream.getTracks()[0];
16223
- yield newTrack.applyConstraints(otherConstraints);
16261
+ if (this.kind === Track.Kind.Video) {
16262
+ // we already captured the audio track with the constraints, so we only need to apply the video constraints
16263
+ yield newTrack.applyConstraints(otherConstraints);
16264
+ }
16224
16265
  newTrack.addEventListener('ended', this.handleEnded);
16225
16266
  this.log.debug('re-acquired MediaStreamTrack', this.logContext);
16226
16267
  yield this.setMediaStreamTrack(newTrack);
@@ -20155,6 +20196,15 @@ class RemoteVideoTrack extends RemoteTrack {
20155
20196
  get isAdaptiveStream() {
20156
20197
  return this.adaptiveStreamSettings !== undefined;
20157
20198
  }
20199
+ setStreamState(value) {
20200
+ super.setStreamState(value);
20201
+ console.log('setStreamState', value);
20202
+ if (value === Track.StreamState.Active) {
20203
+ // update visibility for adaptive stream tracks when stream state received from server is active
20204
+ // this is needed to ensure the track is stopped when there's no element attached to it at all
20205
+ this.updateVisibility();
20206
+ }
20207
+ }
20158
20208
  /**
20159
20209
  * Note: When using adaptiveStream, you need to use remoteVideoTrack.attach() to add the track to a HTMLVideoElement, otherwise your video tracks might never start
20160
20210
  */
@@ -20304,14 +20354,14 @@ class RemoteVideoTrack extends RemoteTrack {
20304
20354
  this.updateVisibility();
20305
20355
  });
20306
20356
  }
20307
- updateVisibility() {
20357
+ updateVisibility(forceEmit) {
20308
20358
  var _a, _b;
20309
20359
  const lastVisibilityChange = this.elementInfos.reduce((prev, info) => Math.max(prev, info.visibilityChangedAt || 0), 0);
20310
20360
  const backgroundPause = ((_b = (_a = this.adaptiveStreamSettings) === null || _a === void 0 ? void 0 : _a.pauseVideoInBackground) !== null && _b !== void 0 ? _b : true // default to true
20311
20361
  ) ? this.isInBackground : false;
20312
20362
  const isPiPMode = this.elementInfos.some(info => info.pictureInPicture);
20313
20363
  const isVisible = this.elementInfos.some(info => info.visible) && !backgroundPause || isPiPMode;
20314
- if (this.lastVisible === isVisible) {
20364
+ if (this.lastVisible === isVisible && !forceEmit) {
20315
20365
  return;
20316
20366
  }
20317
20367
  if (!isVisible && Date.now() - lastVisibilityChange < REACTION_DELAY) {
@@ -21526,15 +21576,6 @@ class LocalParticipant extends Participant {
21526
21576
  return this.setTrackEnabled(Track.Source.ScreenShare, enabled, options, publishOptions);
21527
21577
  }
21528
21578
  /** @internal */
21529
- setPermissions(permissions) {
21530
- const prevPermissions = this.permissions;
21531
- const changed = super.setPermissions(permissions);
21532
- if (changed && prevPermissions) {
21533
- this.emit(ParticipantEvent.ParticipantPermissionsChanged, prevPermissions);
21534
- }
21535
- return changed;
21536
- }
21537
- /** @internal */
21538
21579
  setE2EEEnabled(enabled) {
21539
21580
  return __awaiter(this, void 0, void 0, function* () {
21540
21581
  this.encryptionType = enabled ? Encryption_Type.GCM : Encryption_Type.NONE;
@@ -21845,25 +21886,22 @@ class LocalParticipant extends Participant {
21845
21886
  _this2.log.warn('track has already been published, skipping', Object.assign(Object.assign({}, _this2.logContext), getLogContextFromTrack(existingPublication)));
21846
21887
  return existingPublication;
21847
21888
  }
21889
+ const opts = Object.assign(Object.assign({}, _this2.roomOptions.publishDefaults), options);
21848
21890
  const isStereoInput = 'channelCount' in track.mediaStreamTrack.getSettings() &&
21849
21891
  // @ts-ignore `channelCount` on getSettings() is currently only available for Safari, but is generally the best way to determine a stereo track https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackSettings/channelCount
21850
21892
  track.mediaStreamTrack.getSettings().channelCount === 2 || track.mediaStreamTrack.getConstraints().channelCount === 2;
21851
- const isStereo = (_b = options === null || options === void 0 ? void 0 : options.forceStereo) !== null && _b !== void 0 ? _b : isStereoInput;
21893
+ const isStereo = (_b = opts.forceStereo) !== null && _b !== void 0 ? _b : isStereoInput;
21852
21894
  // disable dtx for stereo track if not enabled explicitly
21853
21895
  if (isStereo) {
21854
- if (!options) {
21855
- options = {};
21856
- }
21857
- if (options.dtx === undefined) {
21896
+ if (opts.dtx === undefined) {
21858
21897
  _this2.log.info("Opus DTX will be disabled for stereo tracks by default. Enable them explicitly to make it work.", Object.assign(Object.assign({}, _this2.logContext), getLogContextFromTrack(track)));
21859
21898
  }
21860
- if (options.red === undefined) {
21899
+ if (opts.red === undefined) {
21861
21900
  _this2.log.info("Opus RED will be disabled for stereo tracks by default. Enable them explicitly to make it work.");
21862
21901
  }
21863
- (_c = options.dtx) !== null && _c !== void 0 ? _c : options.dtx = false;
21864
- (_d = options.red) !== null && _d !== void 0 ? _d : options.red = false;
21902
+ (_c = opts.dtx) !== null && _c !== void 0 ? _c : opts.dtx = false;
21903
+ (_d = opts.red) !== null && _d !== void 0 ? _d : opts.red = false;
21865
21904
  }
21866
- const opts = Object.assign(Object.assign({}, _this2.roomOptions.publishDefaults), options);
21867
21905
  if (!isE2EESimulcastSupported() && _this2.roomOptions.e2ee) {
21868
21906
  _this2.log.info("End-to-end encryption is set up, simulcast publishing will be disabled on Safari versions and iOS browsers running iOS < v17.2", Object.assign({}, _this2.logContext));
21869
21907
  opts.simulcast = false;
@@ -23938,8 +23976,8 @@ class Room extends eventsExports.EventEmitter {
23938
23976
  return;
23939
23977
  }
23940
23978
  const newStreamState = Track.streamStateFromProto(streamState.state);
23979
+ pub.track.setStreamState(newStreamState);
23941
23980
  if (newStreamState !== pub.track.streamState) {
23942
- pub.track.streamState = newStreamState;
23943
23981
  participant.emit(ParticipantEvent.TrackStreamStateChanged, pub, pub.track.streamState);
23944
23982
  this.emitWhenConnected(RoomEvent.TrackStreamStateChanged, pub, pub.track.streamState, participant);
23945
23983
  }
@@ -24470,7 +24508,7 @@ class Room extends eventsExports.EventEmitter {
24470
24508
  */
24471
24509
  simulateScenario(scenario, arg) {
24472
24510
  return __awaiter(this, void 0, void 0, function* () {
24473
- let postAction = () => {};
24511
+ let postAction = () => __awaiter(this, void 0, void 0, function* () {});
24474
24512
  let req;
24475
24513
  switch (scenario) {
24476
24514
  case 'signal-reconnect':
@@ -24765,7 +24803,10 @@ class Room extends eventsExports.EventEmitter {
24765
24803
  adaptiveStreamSettings = {};
24766
24804
  }
24767
24805
  }
24768
- participant.addSubscribedMediaTrack(mediaTrack, trackId, stream, receiver, adaptiveStreamSettings);
24806
+ const publication = participant.addSubscribedMediaTrack(mediaTrack, trackId, stream, receiver, adaptiveStreamSettings);
24807
+ if ((publication === null || publication === void 0 ? void 0 : publication.isEncrypted) && !this.e2eeManager) {
24808
+ this.emit(RoomEvent.EncryptionError, new Error("Encrypted ".concat(publication.source, " track received from participant ").concat(participant.sid, ", but room does not have encryption enabled!")));
24809
+ }
24769
24810
  }
24770
24811
  handleDisconnect() {
24771
24812
  let shouldStopTracks = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true;