livekit-client 1.9.0 → 1.9.1

Sign up to get free protection for your applications and to get access to all the features.
package/README.md CHANGED
@@ -45,13 +45,12 @@ await room.connect(...);
45
45
 
46
46
  ```typescript
47
47
  import {
48
- connect,
49
- Room,
50
- RoomEvent,
48
+ Participant,
51
49
  RemoteParticipant,
52
- RemoteTrackPublication,
53
50
  RemoteTrack,
54
- Participant,
51
+ RemoteTrackPublication,
52
+ Room,
53
+ RoomEvent,
55
54
  } from 'livekit-client';
56
55
 
57
56
  // creates a new room with options
@@ -296,7 +295,9 @@ You can have a look at the `"browerslist"` section of `package.json` for more de
296
295
  If you are targeting legacy browsers, but still want adaptiveStream functionality you'll likely need to use polyfills for [ResizeObserver](https://www.npmjs.com/package/resize-observer-polyfill) and [IntersectionObserver](https://www.npmjs.com/package/intersection-observer).
297
296
 
298
297
  <!--BEGIN_REPO_NAV-->
298
+
299
299
  <br/><table>
300
+
300
301
  <thead><tr><th colspan="2">LiveKit Ecosystem</th></tr></thead>
301
302
  <tbody>
302
303
  <tr><td>Client SDKs</td><td><a href="https://github.com/livekit/components-js">Components</a> · <b>JavaScript</b> · <a href="https://github.com/livekit/client-sdk-rust">Rust</a> · <a href="https://github.com/livekit/client-sdk-swift">iOS/macOS</a> · <a href="https://github.com/livekit/client-sdk-android">Android</a> · <a href="https://github.com/livekit/client-sdk-flutter">Flutter</a> · <a href="https://github.com/livekit/client-sdk-unity-web">Unity (web)</a> · <a href="https://github.com/livekit/client-sdk-react-native">React Native (beta)</a></td></tr><tr></tr>
@@ -14940,7 +14940,7 @@ var uaParser = {exports: {}};
14940
14940
  var uaParserExports = uaParser.exports;
14941
14941
  var UAParser = /*@__PURE__*/getDefaultExportFromCjs(uaParserExports);
14942
14942
 
14943
- var version$1 = "1.9.0";
14943
+ var version$1 = "1.9.1";
14944
14944
 
14945
14945
  const version = version$1;
14946
14946
  const protocolVersion = 9;
@@ -15037,6 +15037,7 @@ function getNewAudioContext() {
15037
15037
  }
15038
15038
 
15039
15039
  const separator = '|';
15040
+ const ddExtensionURI = 'https://aomediacodec.github.io/av1-rtp-spec/#dependency-descriptor-rtp-header-extension';
15040
15041
  function unpackStreamId(packed) {
15041
15042
  const parts = packed.split(separator);
15042
15043
  if (parts.length > 1) {
@@ -15064,7 +15065,6 @@ function supportsDynacast() {
15064
15065
  function supportsAV1() {
15065
15066
  const capabilities = RTCRtpReceiver.getCapabilities('video');
15066
15067
  let hasAV1 = false;
15067
- let hasDDExt = false;
15068
15068
  if (capabilities) {
15069
15069
  for (const codec of capabilities.codecs) {
15070
15070
  if (codec.mimeType === 'video/AV1') {
@@ -15072,14 +15072,24 @@ function supportsAV1() {
15072
15072
  break;
15073
15073
  }
15074
15074
  }
15075
- for (const ext of capabilities.headerExtensions) {
15076
- if (ext.uri === 'https://aomediacodec.github.io/av1-rtp-spec/#dependency-descriptor-rtp-header-extension') {
15077
- hasDDExt = true;
15075
+ }
15076
+ return hasAV1;
15077
+ }
15078
+ function supportsVP9() {
15079
+ const capabilities = RTCRtpReceiver.getCapabilities('video');
15080
+ let hasVP9 = false;
15081
+ if (capabilities) {
15082
+ for (const codec of capabilities.codecs) {
15083
+ if (codec.mimeType === 'video/VP9') {
15084
+ hasVP9 = true;
15078
15085
  break;
15079
15086
  }
15080
15087
  }
15081
15088
  }
15082
- return hasAV1 && hasDDExt;
15089
+ return hasVP9;
15090
+ }
15091
+ function isSVCCodec(codec) {
15092
+ return codec === 'av1' || codec === 'vp9';
15083
15093
  }
15084
15094
  function supportsSetSinkId(elm) {
15085
15095
  if (!document) {
@@ -16763,6 +16773,7 @@ class PCTransport extends EventEmitter$1 {
16763
16773
  if (media.type === 'audio') {
16764
16774
  ensureAudioNackAndStereo(media, [], []);
16765
16775
  } else if (media.type === 'video') {
16776
+ ensureVideoDDExtensionForSVC(media);
16766
16777
  // mung sdp for codec bitrate setting that can't apply by sendEncoding
16767
16778
  this.trackBitrates.some(trackbr => {
16768
16779
  if (!media.msid || !media.msid.includes(trackbr.sid)) {
@@ -16780,6 +16791,9 @@ class PCTransport extends EventEmitter$1 {
16780
16791
  if (codecPayload > 0) {
16781
16792
  if (!media.fmtp.some(fmtp => {
16782
16793
  if (fmtp.payload === codecPayload) {
16794
+ if (!fmtp.config.includes('x-google-start-bitrate')) {
16795
+ fmtp.config += ";x-google-start-bitrate=".concat(trackbr.maxbr * 0.7);
16796
+ }
16783
16797
  if (!fmtp.config.includes('x-google-max-bitrate')) {
16784
16798
  fmtp.config += ";x-google-max-bitrate=".concat(trackbr.maxbr);
16785
16799
  }
@@ -16789,7 +16803,7 @@ class PCTransport extends EventEmitter$1 {
16789
16803
  })) {
16790
16804
  media.fmtp.push({
16791
16805
  payload: codecPayload,
16792
- config: "x-google-max-bitrate=".concat(trackbr.maxbr)
16806
+ config: "x-google-start-bitrate=".concat(trackbr.maxbr * 0.7, ";x-google-max-bitrate=").concat(trackbr.maxbr)
16793
16807
  });
16794
16808
  }
16795
16809
  }
@@ -16887,6 +16901,29 @@ function ensureAudioNackAndStereo(media, stereoMids, nackMids) {
16887
16901
  }
16888
16902
  }
16889
16903
  }
16904
+ function ensureVideoDDExtensionForSVC(media) {
16905
+ var _a, _b, _c, _d;
16906
+ const codec = (_b = (_a = media.rtp.at(0)) === null || _a === void 0 ? void 0 : _a.codec) === null || _b === void 0 ? void 0 : _b.toLowerCase();
16907
+ if (!isSVCCodec(codec)) {
16908
+ return;
16909
+ }
16910
+ let maxID = 0;
16911
+ const ddFound = (_c = media.ext) === null || _c === void 0 ? void 0 : _c.some(ext => {
16912
+ if (ext.uri === ddExtensionURI) {
16913
+ return true;
16914
+ }
16915
+ if (ext.value > maxID) {
16916
+ maxID = ext.value;
16917
+ }
16918
+ return false;
16919
+ });
16920
+ if (!ddFound) {
16921
+ (_d = media.ext) === null || _d === void 0 ? void 0 : _d.push({
16922
+ value: maxID + 1,
16923
+ uri: ddExtensionURI
16924
+ });
16925
+ }
16926
+ }
16890
16927
  function extractStereoAndNackAudioFromOffer(offer) {
16891
16928
  var _a;
16892
16929
  const stereoMids = [];
@@ -16996,6 +17033,9 @@ const backupCodecs = ['vp8', 'h264'];
16996
17033
  function isBackupCodec(codec) {
16997
17034
  return !!backupCodecs.find(backup => backup === codec);
16998
17035
  }
17036
+ function isCodecEqual(c1, c2) {
17037
+ return (c1 === null || c1 === void 0 ? void 0 : c1.toLowerCase().replace(/audio\/|video\//y, '')) === (c2 === null || c2 === void 0 ? void 0 : c2.toLowerCase().replace(/audio\/|video\//y, ''));
17038
+ }
16999
17039
  var AudioPresets;
17000
17040
  (function (AudioPresets) {
17001
17041
  AudioPresets.telephone = {
@@ -21064,7 +21104,7 @@ function computeVideoEncodings(isScreenShare, width, height, options) {
21064
21104
  livekitLogger.debug('using video encoding', videoEncoding);
21065
21105
  }
21066
21106
  const original = new VideoPreset(width, height, videoEncoding.maxBitrate, videoEncoding.maxFramerate);
21067
- if (scalabilityMode && videoCodec === 'av1') {
21107
+ if (scalabilityMode && isSVCCodec(videoCodec)) {
21068
21108
  livekitLogger.debug("using svc with scalabilityMode ".concat(scalabilityMode));
21069
21109
  const encodings = [];
21070
21110
  // svc use first encoding as the original, so we sort encoding from high to low
@@ -21164,8 +21204,13 @@ function determineAppropriateEncoding(isScreenShare, width, height, codec) {
21164
21204
  if (codec) {
21165
21205
  switch (codec) {
21166
21206
  case 'av1':
21207
+ encoding = _objectSpread2({}, encoding);
21167
21208
  encoding.maxBitrate = encoding.maxBitrate * 0.7;
21168
21209
  break;
21210
+ case 'vp9':
21211
+ encoding = _objectSpread2({}, encoding);
21212
+ encoding.maxBitrate = encoding.maxBitrate * 0.85;
21213
+ break;
21169
21214
  }
21170
21215
  }
21171
21216
  return encoding;
@@ -21205,13 +21250,15 @@ function encodingsFromPresets(width, height, presets) {
21205
21250
  }
21206
21251
  const size = Math.min(width, height);
21207
21252
  const rid = videoRids[idx];
21208
- encodings.push({
21253
+ const encoding = {
21209
21254
  rid,
21210
21255
  scaleResolutionDownBy: Math.max(1, size / Math.min(preset.width, preset.height)),
21211
- maxBitrate: preset.encoding.maxBitrate,
21212
- /* @ts-ignore */
21213
- maxFramerate: preset.encoding.maxFramerate
21214
- });
21256
+ maxBitrate: preset.encoding.maxBitrate
21257
+ };
21258
+ if (preset.encoding.maxFramerate) {
21259
+ encoding.maxFramerate = preset.encoding.maxFramerate;
21260
+ }
21261
+ encodings.push(encoding);
21215
21262
  });
21216
21263
  return encodings;
21217
21264
  }
@@ -21784,10 +21831,13 @@ class LocalParticipant extends Participant {
21784
21831
  // we frequently get no data on layer 0 when enabled
21785
21832
  opts.simulcast = false;
21786
21833
  }
21787
- // require full AV1 SVC support prior to using it
21834
+ // require full AV1/VP9 SVC support prior to using it
21788
21835
  if (opts.videoCodec === 'av1' && !supportsAV1()) {
21789
21836
  opts.videoCodec = undefined;
21790
21837
  }
21838
+ if (opts.videoCodec === 'vp9' && !supportsVP9()) {
21839
+ opts.videoCodec = undefined;
21840
+ }
21791
21841
  // handle track actions
21792
21842
  track.on(TrackEvent.Muted, this.onTrackMuted);
21793
21843
  track.on(TrackEvent.Unmuted, this.onTrackUnmuted);
@@ -21825,7 +21875,7 @@ class LocalParticipant extends Participant {
21825
21875
  req.height = dims.height;
21826
21876
  // for svc codecs, disable simulcast and use vp8 for backup codec
21827
21877
  if (track instanceof LocalVideoTrack) {
21828
- if ((opts === null || opts === void 0 ? void 0 : opts.videoCodec) === 'av1') {
21878
+ if (isSVCCodec(opts.videoCodec)) {
21829
21879
  // set scalabilityMode to 'L3T3' by default
21830
21880
  opts.scalabilityMode = (_c = opts.scalabilityMode) !== null && _c !== void 0 ? _c : 'L3T3';
21831
21881
  }
@@ -21856,6 +21906,28 @@ class LocalParticipant extends Participant {
21856
21906
  throw new UnexpectedConnectionState('cannot publish track when not connected');
21857
21907
  }
21858
21908
  const ti = await this.engine.addTrack(req);
21909
+ let primaryCodecSupported = false;
21910
+ let backupCodecSupported = false;
21911
+ ti.codecs.forEach(c => {
21912
+ if (isCodecEqual(c.mimeType, opts.videoCodec)) {
21913
+ primaryCodecSupported = true;
21914
+ } else if (opts.backupCodec && isCodecEqual(c.mimeType, opts.backupCodec.codec)) {
21915
+ backupCodecSupported = true;
21916
+ }
21917
+ });
21918
+ if (req.simulcastCodecs.length > 0) {
21919
+ if (!primaryCodecSupported && !backupCodecSupported) {
21920
+ throw Error('cannot publish track, codec not supported by server');
21921
+ }
21922
+ if (!primaryCodecSupported && opts.backupCodec) {
21923
+ const backupCodec = opts.backupCodec;
21924
+ opts = _objectSpread2({}, opts);
21925
+ livekitLogger.debug("primary codec ".concat(opts.videoCodec, " not supported, fallback to ").concat(backupCodec.codec));
21926
+ opts.videoCodec = backupCodec.codec;
21927
+ opts.videoEncoding = backupCodec.encoding;
21928
+ encodings = simEncodings;
21929
+ }
21930
+ }
21859
21931
  const publication = new LocalTrackPublication(track.kind, ti, track);
21860
21932
  // save options for when it needs to be republished again
21861
21933
  publication.options = opts;
@@ -21869,7 +21941,7 @@ class LocalParticipant extends Participant {
21869
21941
  });
21870
21942
  // store RTPSender
21871
21943
  track.sender = await this.engine.createSender(track, opts, encodings);
21872
- if (track.codec === 'av1' && encodings && ((_d = encodings[0]) === null || _d === void 0 ? void 0 : _d.maxBitrate)) {
21944
+ if (track.codec && isSVCCodec(track.codec) && encodings && ((_d = encodings[0]) === null || _d === void 0 ? void 0 : _d.maxBitrate)) {
21873
21945
  this.engine.publisher.setTrackCodecBitrate(req.cid, track.codec, encodings[0].maxBitrate / 1000);
21874
21946
  }
21875
21947
  this.engine.negotiate();
@@ -22404,6 +22476,7 @@ class Room extends eventsExports.EventEmitter {
22404
22476
  livekitLogger.debug("signal reconnected to server", {
22405
22477
  region: joinResponse.serverRegion
22406
22478
  });
22479
+ this.cachedParticipantSids = [];
22407
22480
  this.applyJoinResponse(joinResponse);
22408
22481
  try {
22409
22482
  // unpublish & republish tracks
@@ -22647,6 +22720,7 @@ class Room extends eventsExports.EventEmitter {
22647
22720
  };
22648
22721
  this.setMaxListeners(100);
22649
22722
  this.participants = new Map();
22723
+ this.cachedParticipantSids = [];
22650
22724
  this.identityToSid = new Map();
22651
22725
  this.options = _objectSpread2(_objectSpread2({}, roomOptionDefaults), options);
22652
22726
  this.options.audioCaptureDefaults = _objectSpread2(_objectSpread2({}, audioDefaults), options === null || options === void 0 ? void 0 : options.audioCaptureDefaults);
@@ -22705,10 +22779,15 @@ class Room extends eventsExports.EventEmitter {
22705
22779
  if (this.setAndEmitConnectionState(ConnectionState.Reconnecting)) {
22706
22780
  this.emit(RoomEvent.Reconnecting);
22707
22781
  }
22782
+ this.cachedParticipantSids = Array.from(this.participants.keys());
22708
22783
  }).on(EngineEvent.Resumed, () => {
22709
22784
  this.setAndEmitConnectionState(ConnectionState.Connected);
22710
22785
  this.emit(RoomEvent.Reconnected);
22711
22786
  this.updateSubscriptions();
22787
+ // once reconnected, figure out if any participants connected during reconnect and emit events for it
22788
+ const diffParticipants = Array.from(this.participants.values()).filter(p => !this.cachedParticipantSids.includes(p.sid));
22789
+ diffParticipants.forEach(p => this.emit(RoomEvent.ParticipantConnected, p));
22790
+ this.cachedParticipantSids = [];
22712
22791
  }).on(EngineEvent.SignalResumed, () => {
22713
22792
  if (this.state === ConnectionState.Reconnecting) {
22714
22793
  this.sendSyncState();
@@ -23784,5 +23863,5 @@ class ConnectionCheck extends EventEmitter$1 {
23784
23863
  }
23785
23864
  }
23786
23865
 
23787
- export { AudioPresets, ConnectionCheck, ConnectionError, ConnectionQuality, ConnectionState, CriticalTimers, DataPacket_Kind, DefaultReconnectPolicy, DeviceUnsupportedError, DisconnectReason, EngineEvent, LivekitError, LocalAudioTrack, LocalParticipant, LocalTrack, LocalTrackPublication, LocalVideoTrack, LogLevel, MediaDeviceFailure, NegotiationError, Participant, ParticipantEvent, PublishDataError, RemoteAudioTrack, RemoteParticipant, RemoteTrack, RemoteTrackPublication, RemoteVideoTrack, Room, RoomEvent, RoomState, ScreenSharePresets, Track, TrackEvent, TrackInvalidError, TrackPublication, UnexpectedConnectionState, UnsupportedServer, VideoPreset, VideoPresets, VideoPresets43, VideoQuality, attachToElement, createAudioAnalyser, createLocalAudioTrack, createLocalScreenTracks, createLocalTracks, createLocalVideoTrack, detachTrack, getEmptyAudioStreamTrack, getEmptyVideoStreamTrack, isBackupCodec, isBrowserSupported, protocolVersion, setLogExtension, setLogLevel, supportsAV1, supportsAdaptiveStream, supportsDynacast, version };
23866
+ export { AudioPresets, ConnectionCheck, ConnectionError, ConnectionQuality, ConnectionState, CriticalTimers, DataPacket_Kind, DefaultReconnectPolicy, DeviceUnsupportedError, DisconnectReason, EngineEvent, LivekitError, LocalAudioTrack, LocalParticipant, LocalTrack, LocalTrackPublication, LocalVideoTrack, LogLevel, MediaDeviceFailure, NegotiationError, Participant, ParticipantEvent, PublishDataError, RemoteAudioTrack, RemoteParticipant, RemoteTrack, RemoteTrackPublication, RemoteVideoTrack, Room, RoomEvent, RoomState, ScreenSharePresets, Track, TrackEvent, TrackInvalidError, TrackPublication, UnexpectedConnectionState, UnsupportedServer, VideoPreset, VideoPresets, VideoPresets43, VideoQuality, attachToElement, createAudioAnalyser, createLocalAudioTrack, createLocalScreenTracks, createLocalTracks, createLocalVideoTrack, detachTrack, getEmptyAudioStreamTrack, getEmptyVideoStreamTrack, isBackupCodec, isBrowserSupported, isCodecEqual, protocolVersion, setLogExtension, setLogLevel, supportsAV1, supportsAdaptiveStream, supportsDynacast, version };
23788
23867
  //# sourceMappingURL=livekit-client.esm.mjs.map