@stream-io/video-client 0.3.28 → 0.3.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/CHANGELOG.md +14 -0
  2. package/README.md +6 -4
  3. package/dist/index.browser.es.js +382 -118
  4. package/dist/index.browser.es.js.map +1 -1
  5. package/dist/index.cjs.js +382 -116
  6. package/dist/index.cjs.js.map +1 -1
  7. package/dist/index.es.js +382 -118
  8. package/dist/index.es.js.map +1 -1
  9. package/dist/src/Call.d.ts +14 -10
  10. package/dist/src/devices/CameraManager.d.ts +0 -1
  11. package/dist/src/devices/InputMediaDeviceManager.d.ts +18 -15
  12. package/dist/src/devices/InputMediaDeviceManagerState.d.ts +22 -6
  13. package/dist/src/devices/MicrophoneManager.d.ts +0 -1
  14. package/dist/src/devices/ScreenShareManager.d.ts +39 -0
  15. package/dist/src/devices/ScreenShareState.d.ts +36 -0
  16. package/dist/src/devices/__tests__/ScreenShareManager.test.d.ts +1 -0
  17. package/dist/src/devices/__tests__/mocks.d.ts +3 -7
  18. package/dist/src/devices/index.d.ts +2 -0
  19. package/dist/src/helpers/DynascaleManager.d.ts +3 -2
  20. package/dist/src/helpers/__tests__/hq-audio-sdp.d.ts +1 -0
  21. package/dist/src/helpers/sdp-munging.d.ts +8 -0
  22. package/dist/src/rtc/Publisher.d.ts +7 -4
  23. package/dist/src/rtc/helpers/tracks.d.ts +2 -1
  24. package/dist/src/rtc/videoLayers.d.ts +2 -1
  25. package/dist/src/types.d.ts +20 -0
  26. package/dist/version.d.ts +1 -1
  27. package/package.json +1 -1
  28. package/src/Call.ts +56 -12
  29. package/src/devices/CameraManager.ts +3 -4
  30. package/src/devices/InputMediaDeviceManager.ts +60 -45
  31. package/src/devices/InputMediaDeviceManagerState.ts +34 -14
  32. package/src/devices/MicrophoneManager.ts +3 -4
  33. package/src/devices/ScreenShareManager.ts +85 -0
  34. package/src/devices/ScreenShareState.ts +63 -0
  35. package/src/devices/__tests__/InputMediaDeviceManager.test.ts +16 -1
  36. package/src/devices/__tests__/ScreenShareManager.test.ts +119 -0
  37. package/src/devices/__tests__/mocks.ts +38 -1
  38. package/src/devices/devices.ts +10 -1
  39. package/src/devices/index.ts +2 -0
  40. package/src/helpers/DynascaleManager.ts +18 -3
  41. package/src/helpers/__tests__/DynascaleManager.test.ts +36 -1
  42. package/src/helpers/__tests__/hq-audio-sdp.ts +332 -0
  43. package/src/helpers/__tests__/sdp-munging.test.ts +13 -1
  44. package/src/helpers/sdp-munging.ts +49 -0
  45. package/src/rtc/Publisher.ts +87 -48
  46. package/src/rtc/Subscriber.ts +4 -1
  47. package/src/rtc/helpers/tracks.ts +16 -6
  48. package/src/rtc/videoLayers.ts +4 -2
  49. package/src/store/CallState.ts +3 -2
  50. package/src/store/__tests__/CallState.test.ts +1 -1
  51. package/src/types.ts +27 -0
@@ -4,7 +4,7 @@ import { ServiceType, stackIntercept } from '@protobuf-ts/runtime-rpc';
4
4
  import axios, { AxiosHeaders } from 'axios';
5
5
  export { AxiosError } from 'axios';
6
6
  import { TwirpFetchTransport } from '@protobuf-ts/twirp-transport';
7
- import { ReplaySubject, combineLatest, BehaviorSubject, map as map$2, takeWhile, distinctUntilChanged as distinctUntilChanged$1, shareReplay as shareReplay$1, distinctUntilKeyChanged, Observable, debounceTime, concatMap, from, merge, filter, pairwise, tap, debounce, timer } from 'rxjs';
7
+ import { ReplaySubject, combineLatest, BehaviorSubject, map as map$2, takeWhile, distinctUntilChanged as distinctUntilChanged$1, shareReplay as shareReplay$1, distinctUntilKeyChanged, Observable, debounceTime, concatMap, from, merge, filter, pairwise, of, tap, debounce, timer } from 'rxjs';
8
8
  import * as SDP from 'sdp-transform';
9
9
  import { UAParser } from 'ua-parser-js';
10
10
  import WebSocket from 'isomorphic-ws';
@@ -6196,7 +6196,8 @@ const withSimulcastConstraints = (settings, optimalVideoLayers) => {
6196
6196
  const ridMapping = ['q', 'h', 'f'];
6197
6197
  return layers.map((layer, index) => (Object.assign(Object.assign({}, layer), { rid: ridMapping[index] })));
6198
6198
  };
6199
- const findOptimalScreenSharingLayers = (videoTrack) => {
6199
+ const findOptimalScreenSharingLayers = (videoTrack, preferences) => {
6200
+ var _a, _b;
6200
6201
  const settings = videoTrack.getSettings();
6201
6202
  return [
6202
6203
  {
@@ -6204,9 +6205,9 @@ const findOptimalScreenSharingLayers = (videoTrack) => {
6204
6205
  rid: 'q',
6205
6206
  width: settings.width || 0,
6206
6207
  height: settings.height || 0,
6207
- maxBitrate: 3000000,
6208
6208
  scaleResolutionDownBy: 1,
6209
- maxFramerate: 30,
6209
+ maxBitrate: (_a = preferences === null || preferences === void 0 ? void 0 : preferences.maxBitrate) !== null && _a !== void 0 ? _a : 3000000,
6210
+ maxFramerate: (_b = preferences === null || preferences === void 0 ? void 0 : preferences.maxFramerate) !== null && _b !== void 0 ? _b : 30,
6210
6211
  },
6211
6212
  ];
6212
6213
  };
@@ -6215,12 +6216,17 @@ const trackTypeToParticipantStreamKey = (trackType) => {
6215
6216
  switch (trackType) {
6216
6217
  case TrackType.SCREEN_SHARE:
6217
6218
  return 'screenShareStream';
6219
+ case TrackType.SCREEN_SHARE_AUDIO:
6220
+ return 'screenShareAudioStream';
6218
6221
  case TrackType.VIDEO:
6219
6222
  return 'videoStream';
6220
6223
  case TrackType.AUDIO:
6221
6224
  return 'audioStream';
6225
+ case TrackType.UNSPECIFIED:
6226
+ throw new Error('Track type is unspecified');
6222
6227
  default:
6223
- throw new Error(`Unknown track type: ${trackType}`);
6228
+ const exhaustiveTrackTypeCheck = trackType;
6229
+ throw new Error(`Unknown track type: ${exhaustiveTrackTypeCheck}`);
6224
6230
  }
6225
6231
  };
6226
6232
  const trackTypeToDeviceIdKey = (trackType) => {
@@ -6230,9 +6236,12 @@ const trackTypeToDeviceIdKey = (trackType) => {
6230
6236
  case TrackType.VIDEO:
6231
6237
  return 'videoDeviceId';
6232
6238
  case TrackType.SCREEN_SHARE:
6239
+ case TrackType.SCREEN_SHARE_AUDIO:
6240
+ case TrackType.UNSPECIFIED:
6233
6241
  return undefined;
6234
6242
  default:
6235
- throw new Error(`Unknown track type: ${trackType}`);
6243
+ const exhaustiveTrackTypeCheck = trackType;
6244
+ throw new Error(`Unknown track type: ${exhaustiveTrackTypeCheck}`);
6236
6245
  }
6237
6246
  };
6238
6247
  const muteTypeToTrackType = (muteType) => {
@@ -6243,8 +6252,11 @@ const muteTypeToTrackType = (muteType) => {
6243
6252
  return TrackType.VIDEO;
6244
6253
  case 'screenshare':
6245
6254
  return TrackType.SCREEN_SHARE;
6255
+ case 'screenshare_audio':
6256
+ return TrackType.SCREEN_SHARE_AUDIO;
6246
6257
  default:
6247
- throw new Error(`Unknown mute type: ${muteType}`);
6258
+ const exhaustiveMuteTypeCheck = muteType;
6259
+ throw new Error(`Unknown mute type: ${exhaustiveMuteTypeCheck}`);
6248
6260
  }
6249
6261
  };
6250
6262
 
@@ -6360,6 +6372,41 @@ const toggleDtx = (sdp, enable) => {
6360
6372
  }
6361
6373
  return sdp;
6362
6374
  };
6375
+ /**
6376
+ * Enables high-quality audio through SDP munging for the given trackMid.
6377
+ *
6378
+ * @param sdp the SDP to munge.
6379
+ * @param trackMid the trackMid.
6380
+ * @param maxBitrate the max bitrate to set.
6381
+ */
6382
+ const enableHighQualityAudio = (sdp, trackMid, maxBitrate = 510000) => {
6383
+ maxBitrate = Math.max(Math.min(maxBitrate, 510000), 96000);
6384
+ const parsedSdp = SDP.parse(sdp);
6385
+ const audioMedia = parsedSdp.media.find((m) => m.type === 'audio' && String(m.mid) === trackMid);
6386
+ if (!audioMedia)
6387
+ return sdp;
6388
+ const opusRtp = audioMedia.rtp.find((r) => r.codec === 'opus');
6389
+ if (!opusRtp)
6390
+ return sdp;
6391
+ const opusFmtp = audioMedia.fmtp.find((f) => f.payload === opusRtp.payload);
6392
+ if (!opusFmtp)
6393
+ return sdp;
6394
+ // enable stereo, if not already enabled
6395
+ if (opusFmtp.config.match(/stereo=(\d)/)) {
6396
+ opusFmtp.config = opusFmtp.config.replace(/stereo=(\d)/, 'stereo=1');
6397
+ }
6398
+ else {
6399
+ opusFmtp.config = `${opusFmtp.config};stereo=1`;
6400
+ }
6401
+ // set maxaveragebitrate, to the given value
6402
+ if (opusFmtp.config.match(/maxaveragebitrate=(\d*)/)) {
6403
+ opusFmtp.config = opusFmtp.config.replace(/maxaveragebitrate=(\d*)/, `maxaveragebitrate=${maxBitrate}`);
6404
+ }
6405
+ else {
6406
+ opusFmtp.config = `${opusFmtp.config};maxaveragebitrate=${maxBitrate}`;
6407
+ }
6408
+ return SDP.write(parsedSdp);
6409
+ };
6363
6410
 
6364
6411
  const logger$3 = getLogger(['Publisher']);
6365
6412
  /**
@@ -6386,6 +6433,7 @@ class Publisher {
6386
6433
  [TrackType.SCREEN_SHARE_AUDIO]: undefined,
6387
6434
  [TrackType.UNSPECIFIED]: undefined,
6388
6435
  };
6436
+ this.publishOptionsPerTrackType = new Map();
6389
6437
  /**
6390
6438
  * An array maintaining the order how transceivers were added to the peer connection.
6391
6439
  * This is needed because some browsers (Firefox) don't reliably report
@@ -6398,7 +6446,7 @@ class Publisher {
6398
6446
  [TrackType.AUDIO]: 'audio',
6399
6447
  [TrackType.VIDEO]: 'video',
6400
6448
  [TrackType.SCREEN_SHARE]: 'video',
6401
- [TrackType.SCREEN_SHARE_AUDIO]: undefined,
6449
+ [TrackType.SCREEN_SHARE_AUDIO]: 'audio',
6402
6450
  [TrackType.UNSPECIFIED]: undefined,
6403
6451
  };
6404
6452
  this.trackLayersCache = {
@@ -6443,10 +6491,11 @@ class Publisher {
6443
6491
  *
6444
6492
  * Consecutive calls to this method will replace the stream.
6445
6493
  * The previous stream will be stopped.
6446
- * @param mediaStream
6447
- * @param track
6448
- * @param trackType
6449
- * @param opts
6494
+ *
6495
+ * @param mediaStream the media stream to publish.
6496
+ * @param track the track to publish.
6497
+ * @param trackType the track type to publish.
6498
+ * @param opts the optional publish options to use.
6450
6499
  */
6451
6500
  this.publishStream = (mediaStream, track, trackType, opts = {}) => __awaiter(this, void 0, void 0, function* () {
6452
6501
  var _a;
@@ -6476,7 +6525,9 @@ class Publisher {
6476
6525
  const targetResolution = settings === null || settings === void 0 ? void 0 : settings.video.target_resolution;
6477
6526
  const videoEncodings = trackType === TrackType.VIDEO
6478
6527
  ? findOptimalVideoLayers(track, targetResolution)
6479
- : undefined;
6528
+ : trackType === TrackType.SCREEN_SHARE
6529
+ ? findOptimalScreenSharingLayers(track, opts.screenShareSettings)
6530
+ : undefined;
6480
6531
  let preferredCodec = opts.preferredCodec;
6481
6532
  if (!preferredCodec && trackType === TrackType.VIDEO) {
6482
6533
  const isRNAndroid = isReactNative() && ((_a = getOSInfo()) === null || _a === void 0 ? void 0 : _a.name.toLowerCase()) === 'android';
@@ -6502,6 +6553,7 @@ class Publisher {
6502
6553
  logger$3('debug', `Added ${TrackType[trackType]} transceiver`);
6503
6554
  this.transceiverInitOrder.push(trackType);
6504
6555
  this.transceiverRegistry[trackType] = transceiver;
6556
+ this.publishOptionsPerTrackType.set(trackType, opts);
6505
6557
  if ('setCodecPreferences' in transceiver && codecPreferences) {
6506
6558
  logger$3('info', `Setting ${TrackType[trackType]} codec preferences`, codecPreferences);
6507
6559
  transceiver.setCodecPreferences(codecPreferences);
@@ -6711,10 +6763,19 @@ class Publisher {
6711
6763
  * @param options the optional offer options to use.
6712
6764
  */
6713
6765
  this.negotiate = (options) => __awaiter(this, void 0, void 0, function* () {
6714
- var _d;
6766
+ var _d, _e;
6715
6767
  this.isIceRestarting = (_d = options === null || options === void 0 ? void 0 : options.iceRestart) !== null && _d !== void 0 ? _d : false;
6716
6768
  const offer = yield this.pc.createOffer(options);
6717
- offer.sdp = this.mungeCodecs(offer.sdp);
6769
+ let sdp = this.mungeCodecs(offer.sdp);
6770
+ if (sdp && this.isPublishing(TrackType.SCREEN_SHARE_AUDIO)) {
6771
+ const transceiver = this.transceiverRegistry[TrackType.SCREEN_SHARE_AUDIO];
6772
+ if (transceiver && transceiver.sender.track) {
6773
+ const mid = (_e = transceiver.mid) !== null && _e !== void 0 ? _e : this.extractMid(sdp, transceiver.sender.track, TrackType.SCREEN_SHARE_AUDIO);
6774
+ sdp = enableHighQualityAudio(sdp, mid);
6775
+ }
6776
+ }
6777
+ // set the munged SDP back to the offer
6778
+ offer.sdp = sdp;
6718
6779
  const trackInfos = this.getCurrentTrackInfos(offer.sdp);
6719
6780
  if (trackInfos.length === 0) {
6720
6781
  throw new Error(`Can't initiate negotiation without announcing any tracks`);
@@ -6753,50 +6814,50 @@ class Publisher {
6753
6814
  }
6754
6815
  return sdp;
6755
6816
  };
6817
+ this.extractMid = (sdp, track, trackType) => {
6818
+ if (!sdp) {
6819
+ logger$3('warn', 'No SDP found. Returning empty mid');
6820
+ return '';
6821
+ }
6822
+ logger$3('debug', `No 'mid' found for track. Trying to find it from the Offer SDP`);
6823
+ const parsedSdp = SDP.parse(sdp);
6824
+ const media = parsedSdp.media.find((m) => {
6825
+ var _a, _b;
6826
+ return (m.type === track.kind &&
6827
+ // if `msid` is not present, we assume that the track is the first one
6828
+ ((_b = (_a = m.msid) === null || _a === void 0 ? void 0 : _a.includes(track.id)) !== null && _b !== void 0 ? _b : true));
6829
+ });
6830
+ if (typeof (media === null || media === void 0 ? void 0 : media.mid) === 'undefined') {
6831
+ logger$3('debug', `No mid found in SDP for track type ${track.kind} and id ${track.id}. Attempting to find a heuristic mid`);
6832
+ const heuristicMid = this.transceiverInitOrder.indexOf(trackType);
6833
+ if (heuristicMid !== -1) {
6834
+ return String(heuristicMid);
6835
+ }
6836
+ logger$3('debug', 'No heuristic mid found. Returning empty mid');
6837
+ return '';
6838
+ }
6839
+ return String(media.mid);
6840
+ };
6756
6841
  this.getCurrentTrackInfos = (sdp) => {
6757
6842
  var _a;
6758
6843
  sdp = sdp || ((_a = this.pc.localDescription) === null || _a === void 0 ? void 0 : _a.sdp);
6759
- const extractMid = (defaultMid, track, trackType) => {
6760
- if (defaultMid)
6761
- return defaultMid;
6762
- if (!sdp) {
6763
- logger$3('warn', 'No SDP found. Returning empty mid');
6764
- return '';
6765
- }
6766
- logger$3('debug', `No 'mid' found for track. Trying to find it from the Offer SDP`);
6767
- const parsedSdp = SDP.parse(sdp);
6768
- const media = parsedSdp.media.find((m) => {
6769
- var _a, _b;
6770
- return (m.type === track.kind &&
6771
- // if `msid` is not present, we assume that the track is the first one
6772
- ((_b = (_a = m.msid) === null || _a === void 0 ? void 0 : _a.includes(track.id)) !== null && _b !== void 0 ? _b : true));
6773
- });
6774
- if (typeof (media === null || media === void 0 ? void 0 : media.mid) === 'undefined') {
6775
- logger$3('debug', `No mid found in SDP for track type ${track.kind} and id ${track.id}. Attempting to find a heuristic mid`);
6776
- const heuristicMid = this.transceiverInitOrder.indexOf(trackType);
6777
- if (heuristicMid !== -1) {
6778
- return String(heuristicMid);
6779
- }
6780
- logger$3('debug', 'No heuristic mid found. Returning empty mid');
6781
- return '';
6782
- }
6783
- return String(media.mid);
6784
- };
6785
6844
  const { settings } = this.state;
6786
6845
  const targetResolution = settings === null || settings === void 0 ? void 0 : settings.video.target_resolution;
6787
6846
  return this.pc
6788
6847
  .getTransceivers()
6789
6848
  .filter((t) => t.direction === 'sendonly' && t.sender.track)
6790
6849
  .map((transceiver) => {
6850
+ var _a;
6791
6851
  const trackType = Number(Object.keys(this.transceiverRegistry).find((key) => this.transceiverRegistry[key] === transceiver));
6792
6852
  const track = transceiver.sender.track;
6793
6853
  let optimalLayers;
6794
6854
  if (track.readyState === 'live') {
6855
+ const publishOpts = this.publishOptionsPerTrackType.get(trackType);
6795
6856
  optimalLayers =
6796
6857
  trackType === TrackType.VIDEO
6797
6858
  ? findOptimalVideoLayers(track, targetResolution)
6798
6859
  : trackType === TrackType.SCREEN_SHARE
6799
- ? findOptimalScreenSharingLayers(track)
6860
+ ? findOptimalScreenSharingLayers(track, publishOpts === null || publishOpts === void 0 ? void 0 : publishOpts.screenShareSettings)
6800
6861
  : [];
6801
6862
  this.trackLayersCache[trackType] = optimalLayers;
6802
6863
  }
@@ -6815,15 +6876,21 @@ class Publisher {
6815
6876
  height: optimalLayer.height,
6816
6877
  },
6817
6878
  }));
6879
+ const isAudioTrack = [
6880
+ TrackType.AUDIO,
6881
+ TrackType.SCREEN_SHARE_AUDIO,
6882
+ ].includes(trackType);
6883
+ const trackSettings = track.getSettings();
6884
+ // @ts-expect-error - `channelCount` is not defined on `MediaTrackSettings`
6885
+ const isStereo = isAudioTrack && trackSettings.channelCount === 2;
6818
6886
  return {
6819
6887
  trackId: track.id,
6820
6888
  layers: layers,
6821
6889
  trackType,
6822
- mid: extractMid(transceiver.mid, track, trackType),
6823
- // FIXME OL: adjust these values
6824
- stereo: false,
6825
- dtx: TrackType.AUDIO === trackType && this.isDtxEnabled,
6826
- red: TrackType.AUDIO === trackType && this.isRedEnabled,
6890
+ mid: (_a = transceiver.mid) !== null && _a !== void 0 ? _a : this.extractMid(sdp, track, trackType),
6891
+ stereo: isStereo,
6892
+ dtx: isAudioTrack && this.isDtxEnabled,
6893
+ red: isAudioTrack && this.isRedEnabled,
6827
6894
  };
6828
6895
  });
6829
6896
  };
@@ -7041,6 +7108,7 @@ class Subscriber {
7041
7108
  TRACK_TYPE_AUDIO: 'audioStream',
7042
7109
  TRACK_TYPE_VIDEO: 'videoStream',
7043
7110
  TRACK_TYPE_SCREEN_SHARE: 'screenShareStream',
7111
+ TRACK_TYPE_SCREEN_SHARE_AUDIO: 'screenShareAudioStream',
7044
7112
  }[trackType];
7045
7113
  if (!streamKindProp) {
7046
7114
  logger$2('error', `Unknown track type: ${trackType}`);
@@ -7129,7 +7197,8 @@ class Subscriber {
7129
7197
  this.onIceCandidateError = (e) => {
7130
7198
  const errorMessage = e instanceof RTCPeerConnectionIceErrorEvent &&
7131
7199
  `${e.errorCode}: ${e.errorText}`;
7132
- logger$2('error', `ICE Candidate error`, errorMessage);
7200
+ const logLevel = this.pc.iceConnectionState === 'connected' ? 'debug' : 'error';
7201
+ logger$2(logLevel, `ICE Candidate error`, errorMessage);
7133
7202
  };
7134
7203
  this.sfuClient = sfuClient;
7135
7204
  this.dispatcher = dispatcher;
@@ -8334,8 +8403,9 @@ class CallState {
8334
8403
  };
8335
8404
  this.logger = getLogger(['CallState']);
8336
8405
  this.participants$ = this.participantsSubject.asObservable().pipe(
8337
- // TODO: replace with Array.toSorted once available
8338
- map$1((ps) => [...ps].sort(this.sortParticipantsBy)), shareReplay({ bufferSize: 1, refCount: true }));
8406
+ // maintain stable-sort by mutating the participants stored
8407
+ // in the original subject
8408
+ map$1((ps) => ps.sort(this.sortParticipantsBy)), shareReplay({ bufferSize: 1, refCount: true }));
8339
8409
  this.localParticipant$ = this.participants$.pipe(map$1((participants) => participants.find(isStreamVideoLocalParticipant)), shareReplay({ bufferSize: 1, refCount: true }));
8340
8410
  this.remoteParticipants$ = this.participants$.pipe(map$1((participants) => participants.filter((p) => !p.isLocalParticipant)), shareReplay({ bufferSize: 1, refCount: true }));
8341
8411
  this.pinnedParticipants$ = this.participants$.pipe(map$1((participants) => participants.filter((p) => !!p.pin)), shareReplay({ bufferSize: 1, refCount: true }));
@@ -9641,17 +9711,22 @@ class DynascaleManager {
9641
9711
  *
9642
9712
  * @param audioElement the audio element to bind to.
9643
9713
  * @param sessionId the session id.
9714
+ * @param trackType the kind of audio.
9644
9715
  * @returns a cleanup function that will unbind the audio element.
9645
9716
  */
9646
- this.bindAudioElement = (audioElement, sessionId) => {
9717
+ this.bindAudioElement = (audioElement, sessionId, trackType) => {
9647
9718
  const participant = this.call.state.findParticipantBySessionId(sessionId);
9648
9719
  if (!participant || participant.isLocalParticipant)
9649
9720
  return;
9650
9721
  const participant$ = this.call.state.participants$.pipe(map$2((participants) => participants.find((p) => p.sessionId === sessionId)), takeWhile((p) => !!p), distinctUntilChanged$1(), shareReplay$1({ bufferSize: 1, refCount: true }));
9651
9722
  const updateMediaStreamSubscription = participant$
9652
- .pipe(distinctUntilKeyChanged('audioStream'))
9723
+ .pipe(distinctUntilKeyChanged(trackType === 'screenShareAudioTrack'
9724
+ ? 'screenShareAudioStream'
9725
+ : 'audioStream'))
9653
9726
  .subscribe((p) => {
9654
- const source = p.audioStream;
9727
+ const source = trackType === 'screenShareAudioTrack'
9728
+ ? p.screenShareAudioStream
9729
+ : p.audioStream;
9655
9730
  if (audioElement.srcObject === source)
9656
9731
  return;
9657
9732
  setTimeout(() => {
@@ -9966,7 +10041,16 @@ const getVideoStream = (trackConstraints) => __awaiter(void 0, void 0, void 0, f
9966
10041
  */
9967
10042
  const getScreenShareStream = (options) => __awaiter(void 0, void 0, void 0, function* () {
9968
10043
  try {
9969
- return yield navigator.mediaDevices.getDisplayMedia(Object.assign({ video: true, audio: false }, options));
10044
+ return yield navigator.mediaDevices.getDisplayMedia(Object.assign({ video: true, audio: {
10045
+ channelCount: {
10046
+ ideal: 2,
10047
+ },
10048
+ echoCancellation: false,
10049
+ autoGainControl: false,
10050
+ noiseSuppression: false,
10051
+ },
10052
+ // @ts-expect-error - not present in types yet
10053
+ systemAudio: 'include' }, options));
9970
10054
  }
9971
10055
  catch (e) {
9972
10056
  getLogger(['devices'])('error', 'Failed to get screen share stream', e);
@@ -10095,7 +10179,7 @@ class InputMediaDeviceManager {
10095
10179
  return this.getDevices();
10096
10180
  }
10097
10181
  /**
10098
- * Starts camera/microphone
10182
+ * Starts stream.
10099
10183
  */
10100
10184
  enable() {
10101
10185
  return __awaiter(this, void 0, void 0, function* () {
@@ -10114,9 +10198,7 @@ class InputMediaDeviceManager {
10114
10198
  });
10115
10199
  }
10116
10200
  /**
10117
- * Stops camera/microphone
10118
- *
10119
- * @returns
10201
+ * Stops the stream.
10120
10202
  */
10121
10203
  disable() {
10122
10204
  return __awaiter(this, void 0, void 0, function* () {
@@ -10137,7 +10219,7 @@ class InputMediaDeviceManager {
10137
10219
  });
10138
10220
  }
10139
10221
  /**
10140
- * If status was previously enabled, it will reenable the device.
10222
+ * If status was previously enabled, it will re-enable the device.
10141
10223
  */
10142
10224
  resume() {
10143
10225
  return __awaiter(this, void 0, void 0, function* () {
@@ -10148,9 +10230,8 @@ class InputMediaDeviceManager {
10148
10230
  });
10149
10231
  }
10150
10232
  /**
10151
- * If current device statis is disabled, it will enable the device, else it will disable it.
10152
- *
10153
- * @returns
10233
+ * If the current device status is disabled, it will enable the device,
10234
+ * else it will disable it.
10154
10235
  */
10155
10236
  toggle() {
10156
10237
  return __awaiter(this, void 0, void 0, function* () {
@@ -10162,6 +10243,16 @@ class InputMediaDeviceManager {
10162
10243
  }
10163
10244
  });
10164
10245
  }
10246
+ /**
10247
+ * Will set the default constraints for the device.
10248
+ *
10249
+ * @param constraints the constraints to set.
10250
+ */
10251
+ setDefaultConstraints(constraints) {
10252
+ return __awaiter(this, void 0, void 0, function* () {
10253
+ this.state.setDefaultConstraints(constraints);
10254
+ });
10255
+ }
10165
10256
  /**
10166
10257
  * Select device
10167
10258
  *
@@ -10189,8 +10280,11 @@ class InputMediaDeviceManager {
10189
10280
  }
10190
10281
  });
10191
10282
  }
10283
+ getTracks() {
10284
+ var _a, _b;
10285
+ return (_b = (_a = this.state.mediaStream) === null || _a === void 0 ? void 0 : _a.getTracks()) !== null && _b !== void 0 ? _b : [];
10286
+ }
10192
10287
  muteStream(stopTracks = true) {
10193
- var _a;
10194
10288
  return __awaiter(this, void 0, void 0, function* () {
10195
10289
  if (!this.state.mediaStream) {
10196
10290
  return;
@@ -10200,57 +10294,63 @@ class InputMediaDeviceManager {
10200
10294
  yield this.stopPublishStream(stopTracks);
10201
10295
  }
10202
10296
  this.muteLocalStream(stopTracks);
10203
- if (((_a = this.getTrack()) === null || _a === void 0 ? void 0 : _a.readyState) === 'ended') {
10204
- // @ts-expect-error release() is present in react-native-webrtc and must be called to dispose the stream
10205
- if (typeof this.state.mediaStream.release === 'function') {
10206
- // @ts-expect-error
10207
- this.state.mediaStream.release();
10297
+ this.getTracks().forEach((track) => {
10298
+ if (track.readyState === 'ended') {
10299
+ // @ts-expect-error release() is present in react-native-webrtc
10300
+ // and must be called to dispose the stream
10301
+ if (typeof this.state.mediaStream.release === 'function') {
10302
+ // @ts-expect-error
10303
+ this.state.mediaStream.release();
10304
+ }
10305
+ this.state.setMediaStream(undefined);
10208
10306
  }
10209
- this.state.setMediaStream(undefined);
10210
- }
10307
+ });
10211
10308
  });
10212
10309
  }
10213
- muteTrack() {
10214
- const track = this.getTrack();
10215
- if (!track || !track.enabled) {
10216
- return;
10217
- }
10218
- track.enabled = false;
10310
+ muteTracks() {
10311
+ this.getTracks().forEach((track) => {
10312
+ if (track.enabled)
10313
+ track.enabled = false;
10314
+ });
10219
10315
  }
10220
- unmuteTrack() {
10221
- const track = this.getTrack();
10222
- if (!track || track.enabled) {
10223
- return;
10224
- }
10225
- track.enabled = true;
10316
+ unmuteTracks() {
10317
+ this.getTracks().forEach((track) => {
10318
+ if (!track.enabled)
10319
+ track.enabled = true;
10320
+ });
10226
10321
  }
10227
- stopTrack() {
10228
- const track = this.getTrack();
10229
- if (!track || track.readyState === 'ended') {
10230
- return;
10231
- }
10232
- track.stop();
10322
+ stopTracks() {
10323
+ this.getTracks().forEach((track) => {
10324
+ if (track.readyState === 'live')
10325
+ track.stop();
10326
+ });
10233
10327
  }
10234
10328
  muteLocalStream(stopTracks) {
10235
10329
  if (!this.state.mediaStream) {
10236
10330
  return;
10237
10331
  }
10238
- stopTracks ? this.stopTrack() : this.muteTrack();
10332
+ if (stopTracks) {
10333
+ this.stopTracks();
10334
+ }
10335
+ else {
10336
+ this.muteTracks();
10337
+ }
10239
10338
  }
10240
10339
  unmuteStream() {
10241
- var _a;
10242
10340
  return __awaiter(this, void 0, void 0, function* () {
10243
10341
  this.logger('debug', 'Starting stream');
10244
10342
  let stream;
10245
- if (this.state.mediaStream && ((_a = this.getTrack()) === null || _a === void 0 ? void 0 : _a.readyState) === 'live') {
10343
+ if (this.state.mediaStream &&
10344
+ this.getTracks().every((t) => t.readyState === 'live')) {
10246
10345
  stream = this.state.mediaStream;
10247
- this.unmuteTrack();
10346
+ this.unmuteTracks();
10248
10347
  }
10249
10348
  else {
10250
10349
  if (this.state.mediaStream) {
10251
- this.stopTrack();
10350
+ this.stopTracks();
10252
10351
  }
10253
- const constraints = { deviceId: this.state.selectedDevice };
10352
+ const defaultConstraints = this.state.defaultConstraints;
10353
+ const constraints = Object.assign(Object.assign({}, defaultConstraints), { deviceId: this.state.selectedDevice });
10254
10354
  stream = yield this.getStream(constraints);
10255
10355
  }
10256
10356
  if (this.call.state.callingState === CallingState.JOINED) {
@@ -10269,6 +10369,26 @@ class InputMediaDeviceManagerState {
10269
10369
  this.statusSubject = new BehaviorSubject(undefined);
10270
10370
  this.mediaStreamSubject = new BehaviorSubject(undefined);
10271
10371
  this.selectedDeviceSubject = new BehaviorSubject(undefined);
10372
+ this.defaultConstraintsSubject = new BehaviorSubject(undefined);
10373
+ /**
10374
+ * An Observable that emits the current media stream, or `undefined` if the device is currently disabled.
10375
+ *
10376
+ */
10377
+ this.mediaStream$ = this.mediaStreamSubject.asObservable();
10378
+ /**
10379
+ * An Observable that emits the currently selected device
10380
+ */
10381
+ this.selectedDevice$ = this.selectedDeviceSubject
10382
+ .asObservable()
10383
+ .pipe(distinctUntilChanged$1());
10384
+ /**
10385
+ * An Observable that emits the device status
10386
+ */
10387
+ this.status$ = this.statusSubject.asObservable().pipe(distinctUntilChanged$1());
10388
+ /**
10389
+ * The default constraints for the device.
10390
+ */
10391
+ this.defaultConstraints$ = this.defaultConstraintsSubject.asObservable();
10272
10392
  /**
10273
10393
  * Gets the current value of an observable, or undefined if the observable has
10274
10394
  * not emitted a value yet.
@@ -10288,13 +10408,6 @@ class InputMediaDeviceManagerState {
10288
10408
  * @return the updated value.
10289
10409
  */
10290
10410
  this.setCurrentValue = setCurrentValue;
10291
- this.mediaStream$ = this.mediaStreamSubject.asObservable();
10292
- this.selectedDevice$ = this.selectedDeviceSubject
10293
- .asObservable()
10294
- .pipe(distinctUntilChanged$1());
10295
- this.status$ = this.statusSubject
10296
- .asObservable()
10297
- .pipe(distinctUntilChanged$1());
10298
10411
  }
10299
10412
  /**
10300
10413
  * The device status
@@ -10338,6 +10451,21 @@ class InputMediaDeviceManagerState {
10338
10451
  setDevice(deviceId) {
10339
10452
  this.setCurrentValue(this.selectedDeviceSubject, deviceId);
10340
10453
  }
10454
+ /**
10455
+ * Gets the default constraints for the device.
10456
+ */
10457
+ get defaultConstraints() {
10458
+ return this.getCurrentValue(this.defaultConstraints$);
10459
+ }
10460
+ /**
10461
+ * Sets the default constraints for the device.
10462
+ *
10463
+ * @internal
10464
+ * @param constraints the constraints to set.
10465
+ */
10466
+ setDefaultConstraints(constraints) {
10467
+ this.setCurrentValue(this.defaultConstraintsSubject, constraints);
10468
+ }
10341
10469
  }
10342
10470
 
10343
10471
  class CameraManagerState extends InputMediaDeviceManagerState {
@@ -10462,10 +10590,6 @@ class CameraManager extends InputMediaDeviceManager {
10462
10590
  stopPublishStream(stopTracks) {
10463
10591
  return this.call.stopPublish(TrackType.VIDEO, stopTracks);
10464
10592
  }
10465
- getTrack() {
10466
- var _a;
10467
- return (_a = this.state.mediaStream) === null || _a === void 0 ? void 0 : _a.getVideoTracks()[0];
10468
- }
10469
10593
  }
10470
10594
 
10471
10595
  class MicrophoneManagerState extends InputMediaDeviceManagerState {
@@ -10590,10 +10714,6 @@ class MicrophoneManager extends InputMediaDeviceManager {
10590
10714
  stopPublishStream(stopTracks) {
10591
10715
  return this.call.stopPublish(TrackType.AUDIO, stopTracks);
10592
10716
  }
10593
- getTrack() {
10594
- var _a;
10595
- return (_a = this.state.mediaStream) === null || _a === void 0 ? void 0 : _a.getAudioTracks()[0];
10596
- }
10597
10717
  startSpeakingWhileMutedDetection(deviceId) {
10598
10718
  return __awaiter(this, void 0, void 0, function* () {
10599
10719
  if (isReactNative()) {
@@ -10625,6 +10745,128 @@ class MicrophoneManager extends InputMediaDeviceManager {
10625
10745
  }
10626
10746
  }
10627
10747
 
10748
+ class ScreenShareState extends InputMediaDeviceManagerState {
10749
+ constructor() {
10750
+ super(...arguments);
10751
+ this.audioEnabledSubject = new BehaviorSubject(true);
10752
+ this.settingsSubject = new BehaviorSubject(undefined);
10753
+ /**
10754
+ * An Observable that emits the current screen share audio status.
10755
+ */
10756
+ this.audioEnabled$ = this.audioEnabledSubject
10757
+ .asObservable()
10758
+ .pipe(distinctUntilChanged());
10759
+ /**
10760
+ * An Observable that emits the current screen share settings.
10761
+ */
10762
+ this.settings$ = this.settingsSubject.asObservable();
10763
+ /**
10764
+ * @internal
10765
+ */
10766
+ this.getDeviceIdFromStream = (stream) => {
10767
+ const [track] = stream.getTracks();
10768
+ return track === null || track === void 0 ? void 0 : track.getSettings().deviceId;
10769
+ };
10770
+ }
10771
+ /**
10772
+ * The current screen share audio status.
10773
+ */
10774
+ get audioEnabled() {
10775
+ return this.getCurrentValue(this.audioEnabled$);
10776
+ }
10777
+ /**
10778
+ * Set the current screen share audio status.
10779
+ */
10780
+ setAudioEnabled(isEnabled) {
10781
+ this.setCurrentValue(this.audioEnabledSubject, isEnabled);
10782
+ }
10783
+ /**
10784
+ * The current screen share settings.
10785
+ */
10786
+ get settings() {
10787
+ return this.getCurrentValue(this.settings$);
10788
+ }
10789
+ /**
10790
+ * Set the current screen share settings.
10791
+ *
10792
+ * @param settings the screen share settings to set.
10793
+ */
10794
+ setSettings(settings) {
10795
+ this.setCurrentValue(this.settingsSubject, settings);
10796
+ }
10797
+ }
10798
+
10799
+ class ScreenShareManager extends InputMediaDeviceManager {
10800
+ constructor(call) {
10801
+ super(call, new ScreenShareState(), TrackType.SCREEN_SHARE);
10802
+ }
10803
+ /**
10804
+ * Will enable screen share audio options on supported platforms.
10805
+ *
10806
+ * Note: for ongoing screen share, audio won't be enabled until you
10807
+ * re-publish the screen share stream.
10808
+ */
10809
+ enableScreenShareAudio() {
10810
+ this.state.setAudioEnabled(true);
10811
+ }
10812
+ /**
10813
+ * Will disable screen share audio options on supported platforms.
10814
+ */
10815
+ disableScreenShareAudio() {
10816
+ var _a;
10817
+ return __awaiter(this, void 0, void 0, function* () {
10818
+ this.state.setAudioEnabled(false);
10819
+ if ((_a = this.call.publisher) === null || _a === void 0 ? void 0 : _a.isPublishing(TrackType.SCREEN_SHARE_AUDIO)) {
10820
+ yield this.call.stopPublish(TrackType.SCREEN_SHARE_AUDIO, true);
10821
+ }
10822
+ });
10823
+ }
10824
+ /**
10825
+ * Returns the current screen share settings.
10826
+ */
10827
+ getSettings() {
10828
+ return this.state.settings;
10829
+ }
10830
+ /**
10831
+ * Sets the current screen share settings.
10832
+ *
10833
+ * @param settings the settings to set.
10834
+ */
10835
+ setSettings(settings) {
10836
+ this.state.setSettings(settings);
10837
+ }
10838
+ getDevices() {
10839
+ return of([]); // there are no devices to be listed for Screen Share
10840
+ }
10841
+ getStream(constraints) {
10842
+ if (!this.state.audioEnabled) {
10843
+ constraints.audio = false;
10844
+ }
10845
+ return getScreenShareStream(constraints);
10846
+ }
10847
+ publishStream(stream) {
10848
+ return this.call.publishScreenShareStream(stream, {
10849
+ screenShareSettings: this.state.settings,
10850
+ });
10851
+ }
10852
+ stopPublishStream(stopTracks) {
10853
+ return __awaiter(this, void 0, void 0, function* () {
10854
+ yield this.call.stopPublish(TrackType.SCREEN_SHARE, stopTracks);
10855
+ yield this.call.stopPublish(TrackType.SCREEN_SHARE_AUDIO, stopTracks);
10856
+ });
10857
+ }
10858
+ /**
10859
+ * Overrides the default `select` method to throw an error.
10860
+ *
10861
+ * @param deviceId ignored.
10862
+ */
10863
+ select(deviceId) {
10864
+ return __awaiter(this, void 0, void 0, function* () {
10865
+ throw new Error('This method is not supported in for Screen Share');
10866
+ });
10867
+ }
10868
+ }
10869
+
10628
10870
  class SpeakerState {
10629
10871
  constructor() {
10630
10872
  this.selectedDeviceSubject = new BehaviorSubject('');
@@ -10998,7 +11240,17 @@ class Call {
10998
11240
  // as the underlying peer connection will take care of it as part
10999
11241
  // of the ice-restart process
11000
11242
  if (localParticipant && !migrate) {
11001
- const { audioStream, videoStream, screenShareStream: screenShare, } = localParticipant;
11243
+ const { audioStream, videoStream, screenShareStream, screenShareAudioStream, } = localParticipant;
11244
+ let screenShare;
11245
+ if (screenShareStream || screenShareAudioStream) {
11246
+ screenShare = new MediaStream();
11247
+ screenShareStream === null || screenShareStream === void 0 ? void 0 : screenShareStream.getVideoTracks().forEach((track) => {
11248
+ screenShare === null || screenShare === void 0 ? void 0 : screenShare.addTrack(track);
11249
+ });
11250
+ screenShareAudioStream === null || screenShareAudioStream === void 0 ? void 0 : screenShareAudioStream.getAudioTracks().forEach((track) => {
11251
+ screenShare === null || screenShare === void 0 ? void 0 : screenShare.addTrack(track);
11252
+ });
11253
+ }
11002
11254
  // restore previous publishing state
11003
11255
  if (audioStream)
11004
11256
  yield this.publishAudioStream(audioStream);
@@ -11252,7 +11504,6 @@ class Call {
11252
11504
  * Consecutive calls to this method will replace the audio stream that is currently being published.
11253
11505
  * The previous audio stream will be stopped.
11254
11506
  *
11255
- *
11256
11507
  * @param audioStream the audio stream to publish.
11257
11508
  */
11258
11509
  this.publishAudioStream = (audioStream) => __awaiter(this, void 0, void 0, function* () {
@@ -11276,10 +11527,10 @@ class Call {
11276
11527
  * Consecutive calls to this method will replace the previous screen-share stream.
11277
11528
  * The previous screen-share stream will be stopped.
11278
11529
  *
11279
- *
11280
11530
  * @param screenShareStream the screen-share stream to publish.
11531
+ * @param opts the options to use when publishing the stream.
11281
11532
  */
11282
- this.publishScreenShareStream = (screenShareStream) => __awaiter(this, void 0, void 0, function* () {
11533
+ this.publishScreenShareStream = (screenShareStream, opts = {}) => __awaiter(this, void 0, void 0, function* () {
11283
11534
  // we should wait until we get a JoinResponse from the SFU,
11284
11535
  // otherwise we risk breaking the ICETrickle flow.
11285
11536
  yield this.assertCallJoined();
@@ -11292,7 +11543,11 @@ class Call {
11292
11543
  this.logger('error', `There is no video track in the screen share stream to publish`);
11293
11544
  return;
11294
11545
  }
11295
- yield this.publisher.publishStream(screenShareStream, screenShareTrack, TrackType.SCREEN_SHARE);
11546
+ yield this.publisher.publishStream(screenShareStream, screenShareTrack, TrackType.SCREEN_SHARE, opts);
11547
+ const [screenShareAudioTrack] = screenShareStream.getAudioTracks();
11548
+ if (screenShareAudioTrack) {
11549
+ yield this.publisher.publishStream(screenShareStream, screenShareAudioTrack, TrackType.SCREEN_SHARE_AUDIO, opts);
11550
+ }
11296
11551
  });
11297
11552
  /**
11298
11553
  * Stops publishing the given track type to the call, if it is currently being published.
@@ -11377,6 +11632,13 @@ class Call {
11377
11632
  dimension: p.screenShareDimension,
11378
11633
  });
11379
11634
  }
11635
+ if (p.publishedTracks.includes(TrackType.SCREEN_SHARE_AUDIO)) {
11636
+ subscriptions.push({
11637
+ userId: p.userId,
11638
+ sessionId: p.sessionId,
11639
+ trackType: TrackType.SCREEN_SHARE_AUDIO,
11640
+ });
11641
+ }
11380
11642
  }
11381
11643
  // schedule update
11382
11644
  this.trackSubscriptionsSubject.next({ type, data: subscriptions });
@@ -11847,9 +12109,10 @@ class Call {
11847
12109
  *
11848
12110
  * @param audioElement the audio element to bind to.
11849
12111
  * @param sessionId the session id.
12112
+ * @param trackType the kind of audio.
11850
12113
  */
11851
- this.bindAudioElement = (audioElement, sessionId) => {
11852
- const unbind = this.dynascaleManager.bindAudioElement(audioElement, sessionId);
12114
+ this.bindAudioElement = (audioElement, sessionId, trackType = 'audioTrack') => {
12115
+ const unbind = this.dynascaleManager.bindAudioElement(audioElement, sessionId, trackType);
11853
12116
  if (!unbind)
11854
12117
  return;
11855
12118
  this.leaveCallHooks.add(unbind);
@@ -11911,6 +12174,7 @@ class Call {
11911
12174
  this.camera = new CameraManager(this);
11912
12175
  this.microphone = new MicrophoneManager(this);
11913
12176
  this.speaker = new SpeakerManager();
12177
+ this.screenShare = new ScreenShareManager(this);
11914
12178
  }
11915
12179
  registerEffects() {
11916
12180
  this.leaveCallHooks.add(
@@ -13218,7 +13482,7 @@ class WSConnectionFallback {
13218
13482
  }
13219
13483
  }
13220
13484
 
13221
- const version = '0.3.28';
13485
+ const version = '0.3.30';
13222
13486
 
13223
13487
  const logger = getLogger(['location']);
13224
13488
  const HINT_URL = `https://hint.stream-io-video.com/`;
@@ -14198,5 +14462,5 @@ class StreamVideoServerClient extends StreamVideoClient {
14198
14462
  }
14199
14463
  }
14200
14464
 
14201
- export { AudioSettingsDefaultDeviceEnum, AudioSettingsRequestDefaultDeviceEnum, browsers as Browsers, Call, CallState, CallType, CallTypes, CallingState, CameraManager, CameraManagerState, CreateDeviceRequestPushProviderEnum, DebounceType, DynascaleManager, ErrorFromResponse, InputMediaDeviceManager, InputMediaDeviceManagerState, LayoutSettingsNameEnum, LayoutSettingsRequestNameEnum, MicrophoneManager, MicrophoneManagerState, OwnCapability, RecordSettingsRequestModeEnum, RecordSettingsRequestQualityEnum, rxUtils as RxUtils, events as SfuEvents, models as SfuModels, SpeakerManager, SpeakerState, StreamSfuClient, StreamVideoClient, StreamVideoReadOnlyStateStore, StreamVideoServerClient, StreamVideoWriteableStateStore, TranscriptionSettingsModeEnum, TranscriptionSettingsRequestModeEnum, VideoSettingsCameraFacingEnum, VideoSettingsRequestCameraFacingEnum, ViewportTracker, VisibilityState, checkIfAudioOutputChangeSupported, combineComparators, conditional, createSoundDetector, defaultSortPreset, descending, disposeOfMediaStream, dominantSpeaker, getAudioDevices, getAudioOutputDevices, getAudioStream, getClientDetails, getDeviceInfo, getLogger, getOSInfo, getScreenShareStream, getSdkInfo, getVideoDevices, getVideoStream, isStreamVideoLocalParticipant, livestreamOrAudioRoomSortPreset, logLevels, logToConsole, name, noopComparator, paginatedLayoutSortPreset, pinned, publishingAudio, publishingVideo, reactionType, role, screenSharing, setDeviceInfo, setLogLevel, setLogger, setOSInfo, setSdkInfo, speakerLayoutSortPreset, speaking, watchForAddedDefaultAudioDevice, watchForAddedDefaultAudioOutputDevice, watchForAddedDefaultVideoDevice, watchForDisconnectedAudioDevice, watchForDisconnectedAudioOutputDevice, watchForDisconnectedVideoDevice };
14465
+ export { AudioSettingsDefaultDeviceEnum, AudioSettingsRequestDefaultDeviceEnum, browsers as Browsers, Call, CallState, CallType, CallTypes, CallingState, CameraManager, CameraManagerState, CreateDeviceRequestPushProviderEnum, DebounceType, DynascaleManager, ErrorFromResponse, InputMediaDeviceManager, InputMediaDeviceManagerState, LayoutSettingsNameEnum, LayoutSettingsRequestNameEnum, MicrophoneManager, MicrophoneManagerState, OwnCapability, RecordSettingsRequestModeEnum, RecordSettingsRequestQualityEnum, rxUtils as RxUtils, ScreenShareManager, ScreenShareState, events as SfuEvents, models as SfuModels, SpeakerManager, SpeakerState, StreamSfuClient, StreamVideoClient, StreamVideoReadOnlyStateStore, StreamVideoServerClient, StreamVideoWriteableStateStore, TranscriptionSettingsModeEnum, TranscriptionSettingsRequestModeEnum, VideoSettingsCameraFacingEnum, VideoSettingsRequestCameraFacingEnum, ViewportTracker, VisibilityState, checkIfAudioOutputChangeSupported, combineComparators, conditional, createSoundDetector, defaultSortPreset, descending, disposeOfMediaStream, dominantSpeaker, getAudioDevices, getAudioOutputDevices, getAudioStream, getClientDetails, getDeviceInfo, getLogger, getOSInfo, getScreenShareStream, getSdkInfo, getVideoDevices, getVideoStream, isStreamVideoLocalParticipant, livestreamOrAudioRoomSortPreset, logLevels, logToConsole, name, noopComparator, paginatedLayoutSortPreset, pinned, publishingAudio, publishingVideo, reactionType, role, screenSharing, setDeviceInfo, setLogLevel, setLogger, setOSInfo, setSdkInfo, speakerLayoutSortPreset, speaking, watchForAddedDefaultAudioDevice, watchForAddedDefaultAudioOutputDevice, watchForAddedDefaultVideoDevice, watchForDisconnectedAudioDevice, watchForDisconnectedAudioOutputDevice, watchForDisconnectedVideoDevice };
14202
14466
  //# sourceMappingURL=index.browser.es.js.map