@stream-io/video-client 1.23.2 → 1.23.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.es.js CHANGED
@@ -4,7 +4,7 @@ import { ServiceType, stackIntercept, RpcError } from '@protobuf-ts/runtime-rpc'
4
4
  import axios from 'axios';
5
5
  export { AxiosError } from 'axios';
6
6
  import { TwirpFetchTransport, TwirpErrorCode } from '@protobuf-ts/twirp-transport';
7
- import { ReplaySubject, combineLatest, BehaviorSubject, map, shareReplay, distinctUntilChanged, takeWhile, distinctUntilKeyChanged, fromEventPattern, startWith, concatMap, merge, from, fromEvent, debounceTime, pairwise, of } from 'rxjs';
7
+ import { ReplaySubject, combineLatest, BehaviorSubject, shareReplay, map, distinctUntilChanged, takeWhile, distinctUntilKeyChanged, fromEventPattern, startWith, concatMap, merge, from, fromEvent, debounceTime, pairwise, of } from 'rxjs';
8
8
  import { UAParser } from 'ua-parser-js';
9
9
  import { parse } from 'sdp-transform';
10
10
  import https from 'https';
@@ -5032,6 +5032,9 @@ class CallState {
5032
5032
  return nextQueue.slice(-maxVisibleCaptions);
5033
5033
  });
5034
5034
  };
5035
+ this.rawParticipants$ = this.participantsSubject
5036
+ .asObservable()
5037
+ .pipe(shareReplay({ bufferSize: 1, refCount: true }));
5035
5038
  this.participants$ = this.participantsSubject.asObservable().pipe(
5036
5039
  // maintain stable-sort by mutating the participants stored
5037
5040
  // in the original subject
@@ -5205,6 +5208,12 @@ class CallState {
5205
5208
  get participants() {
5206
5209
  return this.getCurrentValue(this.participants$);
5207
5210
  }
5211
+ /**
5212
+ * The stable list of participants in the current call, unsorted.
5213
+ */
5214
+ get rawParticipants() {
5215
+ return this.getCurrentValue(this.rawParticipants$);
5216
+ }
5208
5217
  /**
5209
5218
  * The local participant in the current call.
5210
5219
  */
@@ -5677,7 +5686,7 @@ const aggregate = (stats) => {
5677
5686
  return report;
5678
5687
  };
5679
5688
 
5680
- const version = "1.23.2";
5689
+ const version = "1.23.4";
5681
5690
  const [major, minor, patch] = version.split('.');
5682
5691
  let sdkInfo = {
5683
5692
  type: SdkType.PLAIN_JAVASCRIPT,
@@ -6750,17 +6759,19 @@ const withSimulcastConstraints = (settings, optimalVideoLayers, useSingleLayer)
6750
6759
  let layers;
6751
6760
  const size = Math.max(settings.width || 0, settings.height || 0);
6752
6761
  if (size <= 320) {
6753
- // provide only one layer 320x240 (q), the one with the highest quality
6762
+ // provide only one layer 320x240 (f), the one with the highest quality
6754
6763
  layers = optimalVideoLayers.filter((layer) => layer.rid === 'f');
6755
6764
  }
6756
6765
  else if (size <= 640) {
6757
- // provide two layers, 160x120 (q) and 640x480 (h)
6758
- layers = optimalVideoLayers.filter((layer) => layer.rid !== 'h');
6766
+ // provide two layers, 320x240 (h) and 640x480 (f)
6767
+ layers = optimalVideoLayers.filter((layer) => layer.rid !== 'q');
6759
6768
  }
6760
6769
  else {
6761
6770
  // provide three layers for sizes > 640x480
6762
6771
  layers = optimalVideoLayers;
6763
6772
  }
6773
+ // we might have removed some layers, so we need to reassign the rid
6774
+ // to match the expected order of [q, h, f] for simulcast
6764
6775
  const ridMapping = ['q', 'h', 'f'];
6765
6776
  return layers.map((layer, index, arr) => ({
6766
6777
  ...layer,
@@ -7561,6 +7572,8 @@ class StreamSfuClient {
7561
7572
  const eventsToTrace = {
7562
7573
  callEnded: true,
7563
7574
  changePublishQuality: true,
7575
+ changePublishOptions: true,
7576
+ connectionQualityChanged: true,
7564
7577
  error: true,
7565
7578
  goAway: true,
7566
7579
  };
@@ -8434,6 +8447,20 @@ class DynascaleManager {
8434
8447
  true,
8435
8448
  };
8436
8449
  }), shareReplay(1));
8450
+ /**
8451
+ * Disposes the allocated resources and closes the audio context if it was created.
8452
+ */
8453
+ this.dispose = async () => {
8454
+ if (this.pendingSubscriptionsUpdate) {
8455
+ clearTimeout(this.pendingSubscriptionsUpdate);
8456
+ }
8457
+ const context = this.getOrCreateAudioContext();
8458
+ if (context && context.state !== 'closed') {
8459
+ document.removeEventListener('click', this.resumeAudioContext);
8460
+ await context.close();
8461
+ this.audioContext = undefined;
8462
+ }
8463
+ };
8437
8464
  this.setVideoTrackSubscriptionOverrides = (override, sessionIds) => {
8438
8465
  if (!sessionIds) {
8439
8466
  return setCurrentValue(this.videoTrackSubscriptionOverridesSubject, override ? { [globalOverrideKey]: override } : {});
@@ -8549,7 +8576,7 @@ class DynascaleManager {
8549
8576
  });
8550
8577
  this.applyTrackSubscriptions(debounceType);
8551
8578
  };
8552
- const participant$ = this.callState.participants$.pipe(map((participants) => participants.find((participant) => participant.sessionId === sessionId)), takeWhile((participant) => !!participant), distinctUntilChanged(), shareReplay({ bufferSize: 1, refCount: true }));
8579
+ const participant$ = this.callState.participants$.pipe(map((ps) => ps.find((p) => p.sessionId === sessionId)), takeWhile((participant) => !!participant), distinctUntilChanged(), shareReplay({ bufferSize: 1, refCount: true }));
8553
8580
  /**
8554
8581
  * Since the video elements are now being removed from the DOM (React SDK) upon
8555
8582
  * visibility change, this subscription is not in use an stays here only for the
@@ -8677,7 +8704,24 @@ class DynascaleManager {
8677
8704
  const participant = this.callState.findParticipantBySessionId(sessionId);
8678
8705
  if (!participant || participant.isLocalParticipant)
8679
8706
  return;
8680
- const participant$ = this.callState.participants$.pipe(map((participants) => participants.find((p) => p.sessionId === sessionId)), takeWhile((p) => !!p), distinctUntilChanged(), shareReplay({ bufferSize: 1, refCount: true }));
8707
+ const participant$ = this.callState.participants$.pipe(map((ps) => ps.find((p) => p.sessionId === sessionId)), takeWhile((p) => !!p), distinctUntilChanged(), shareReplay({ bufferSize: 1, refCount: true }));
8708
+ const updateSinkId = (deviceId, audioContext) => {
8709
+ if (!deviceId)
8710
+ return;
8711
+ if ('setSinkId' in audioElement) {
8712
+ audioElement.setSinkId(deviceId).catch((e) => {
8713
+ this.logger('warn', `Can't to set AudioElement sinkId`, e);
8714
+ });
8715
+ }
8716
+ if (audioContext && 'setSinkId' in audioContext) {
8717
+ // @ts-expect-error setSinkId is not available in all browsers
8718
+ audioContext.setSinkId(deviceId).catch((e) => {
8719
+ this.logger('warn', `Can't to set AudioContext sinkId`, e);
8720
+ });
8721
+ }
8722
+ };
8723
+ let sourceNode = undefined;
8724
+ let gainNode = undefined;
8681
8725
  const updateMediaStreamSubscription = participant$
8682
8726
  .pipe(distinctUntilKeyChanged(trackType === 'screenShareAudioTrack'
8683
8727
  ? 'screenShareAudioStream'
@@ -8690,40 +8734,82 @@ class DynascaleManager {
8690
8734
  return;
8691
8735
  setTimeout(() => {
8692
8736
  audioElement.srcObject = source ?? null;
8693
- if (audioElement.srcObject) {
8737
+ if (!source)
8738
+ return;
8739
+ // Safari has a special quirk that prevents playing audio until the user
8740
+ // interacts with the page or focuses on the tab where the call happens.
8741
+ // This is a workaround for the issue where:
8742
+ // - A and B are in a call
8743
+ // - A switches to another tab
8744
+ // - B mutes their microphone and unmutes it
8745
+ // - A does not hear B's unmuted audio until they focus the tab
8746
+ const audioContext = this.getOrCreateAudioContext();
8747
+ if (audioContext) {
8748
+ // we will play audio through the audio context in Safari
8749
+ audioElement.muted = true;
8750
+ sourceNode?.disconnect();
8751
+ sourceNode = audioContext.createMediaStreamSource(source);
8752
+ gainNode ?? (gainNode = audioContext.createGain());
8753
+ gainNode.gain.value = p.audioVolume ?? this.speaker.state.volume;
8754
+ sourceNode.connect(gainNode).connect(audioContext.destination);
8755
+ this.resumeAudioContext();
8756
+ }
8757
+ else {
8758
+ // we will play audio directly through the audio element in other browsers
8759
+ audioElement.muted = false;
8694
8760
  audioElement.play().catch((e) => {
8695
- this.logger('warn', `Failed to play stream`, e);
8761
+ this.logger('warn', `Failed to play audio stream`, e);
8696
8762
  });
8697
- // audio output device shall be set after the audio element is played
8698
- // otherwise, the browser will not pick it up, and will always
8699
- // play audio through the system's default device
8700
- const { selectedDevice } = this.speaker.state;
8701
- if (selectedDevice && 'setSinkId' in audioElement) {
8702
- audioElement.setSinkId(selectedDevice);
8703
- }
8704
8763
  }
8764
+ const { selectedDevice } = this.speaker.state;
8765
+ if (selectedDevice)
8766
+ updateSinkId(selectedDevice, audioContext);
8705
8767
  });
8706
8768
  });
8707
8769
  const sinkIdSubscription = !('setSinkId' in audioElement)
8708
8770
  ? null
8709
8771
  : this.speaker.state.selectedDevice$.subscribe((deviceId) => {
8710
- if (deviceId) {
8711
- audioElement.setSinkId(deviceId);
8712
- }
8772
+ const audioContext = this.getOrCreateAudioContext();
8773
+ updateSinkId(deviceId, audioContext);
8713
8774
  });
8714
8775
  const volumeSubscription = combineLatest([
8715
8776
  this.speaker.state.volume$,
8716
8777
  participant$.pipe(distinctUntilKeyChanged('audioVolume')),
8717
8778
  ]).subscribe(([volume, p]) => {
8718
- audioElement.volume = p.audioVolume ?? volume;
8779
+ const participantVolume = p.audioVolume ?? volume;
8780
+ audioElement.volume = participantVolume;
8781
+ if (gainNode)
8782
+ gainNode.gain.value = participantVolume;
8719
8783
  });
8720
8784
  audioElement.autoplay = true;
8721
8785
  return () => {
8722
8786
  sinkIdSubscription?.unsubscribe();
8723
8787
  volumeSubscription.unsubscribe();
8724
8788
  updateMediaStreamSubscription.unsubscribe();
8789
+ audioElement.srcObject = null;
8790
+ sourceNode?.disconnect();
8791
+ gainNode?.disconnect();
8725
8792
  };
8726
8793
  };
8794
+ this.getOrCreateAudioContext = () => {
8795
+ if (this.audioContext || !isSafari())
8796
+ return this.audioContext;
8797
+ const context = new AudioContext();
8798
+ if (context.state === 'suspended') {
8799
+ document.addEventListener('click', this.resumeAudioContext);
8800
+ }
8801
+ return (this.audioContext = context);
8802
+ };
8803
+ this.resumeAudioContext = () => {
8804
+ if (this.audioContext?.state === 'suspended') {
8805
+ this.audioContext
8806
+ .resume()
8807
+ .catch((err) => this.logger('warn', `Can't resume audio context`, err))
8808
+ .then(() => {
8809
+ document.removeEventListener('click', this.resumeAudioContext);
8810
+ });
8811
+ }
8812
+ };
8727
8813
  this.callState = callState;
8728
8814
  this.speaker = speaker;
8729
8815
  }
@@ -9163,6 +9249,16 @@ const getStream = async (constraints, tracer) => {
9163
9249
  // every successful getUserMedia call.
9164
9250
  navigator.mediaDevices.dispatchEvent(new Event('devicechange'));
9165
9251
  }
9252
+ if (constraints.video) {
9253
+ const [videoTrack] = stream.getVideoTracks();
9254
+ if (videoTrack) {
9255
+ const { width, height } = videoTrack.getSettings();
9256
+ const target = constraints.video;
9257
+ if (width !== target.width || height !== target.height) {
9258
+ tracer?.trace(`${tag}Warn`, `Requested resolution ${target.width}x${target.height} but got ${width}x${height}`);
9259
+ }
9260
+ }
9261
+ }
9166
9262
  return stream;
9167
9263
  }
9168
9264
  catch (error) {
@@ -9506,10 +9602,22 @@ class InputMediaDeviceManager {
9506
9602
  }
9507
9603
  }
9508
9604
  async applySettingsToStream() {
9509
- await withCancellation(this.statusChangeConcurrencyTag, async () => {
9605
+ await withCancellation(this.statusChangeConcurrencyTag, async (signal) => {
9510
9606
  if (this.enabled) {
9511
- await this.muteStream();
9512
- await this.unmuteStream();
9607
+ try {
9608
+ await this.muteStream();
9609
+ this.state.setStatus('disabled');
9610
+ if (signal.aborted) {
9611
+ return;
9612
+ }
9613
+ await this.unmuteStream();
9614
+ this.state.setStatus('enabled');
9615
+ }
9616
+ finally {
9617
+ if (!signal.aborted) {
9618
+ this.state.setPendingStatus(this.state.status);
9619
+ }
9620
+ }
9513
9621
  }
9514
9622
  });
9515
9623
  }
@@ -9575,130 +9683,122 @@ class InputMediaDeviceManager {
9575
9683
  this.logger('debug', 'Starting stream');
9576
9684
  let stream;
9577
9685
  let rootStream;
9578
- try {
9579
- if (this.state.mediaStream &&
9580
- this.getTracks().every((t) => t.readyState === 'live')) {
9581
- stream = this.state.mediaStream;
9582
- this.enableTracks();
9583
- }
9584
- else {
9585
- const defaultConstraints = this.state.defaultConstraints;
9586
- const constraints = {
9587
- ...defaultConstraints,
9588
- deviceId: this.state.selectedDevice
9589
- ? { exact: this.state.selectedDevice }
9590
- : undefined,
9591
- };
9592
- /**
9593
- * Chains two media streams together.
9594
- *
9595
- * In our case, filters MediaStreams are derived from their parent MediaStream.
9596
- * However, once a child filter's track is stopped,
9597
- * the tracks of the parent MediaStream aren't automatically stopped.
9598
- * This leads to a situation where the camera indicator light is still on
9599
- * even though the user stopped publishing video.
9600
- *
9601
- * This function works around this issue by stopping the parent MediaStream's tracks
9602
- * as well once the child filter's tracks are stopped.
9603
- *
9604
- * It works by patching the stop() method of the child filter's tracks to also stop
9605
- * the parent MediaStream's tracks of the same type. Here we assume that
9606
- * the parent MediaStream has only one track of each type.
9607
- *
9608
- * @param parentStream the parent MediaStream. Omit for the root stream.
9609
- */
9610
- const chainWith = (parentStream) => async (filterStream) => {
9611
- if (!parentStream)
9612
- return filterStream;
9613
- // TODO OL: take care of track.enabled property as well
9614
- const parent = await parentStream;
9615
- filterStream.getTracks().forEach((track) => {
9616
- const originalStop = track.stop;
9617
- track.stop = function stop() {
9618
- originalStop.call(track);
9619
- parent.getTracks().forEach((parentTrack) => {
9620
- if (parentTrack.kind === track.kind) {
9621
- parentTrack.stop();
9622
- }
9623
- });
9624
- };
9625
- });
9626
- parent.getTracks().forEach((parentTrack) => {
9627
- // When the parent stream abruptly ends, we propagate the event
9628
- // to the filter stream.
9629
- // This usually happens when the camera/microphone permissions
9630
- // are revoked or when the device is disconnected.
9631
- const handleParentTrackEnded = () => {
9632
- filterStream.getTracks().forEach((track) => {
9633
- if (parentTrack.kind !== track.kind)
9634
- return;
9635
- track.stop();
9636
- track.dispatchEvent(new Event('ended')); // propagate the event
9637
- });
9638
- };
9639
- parentTrack.addEventListener('ended', handleParentTrackEnded);
9640
- this.subscriptions.push(() => {
9641
- parentTrack.removeEventListener('ended', handleParentTrackEnded);
9642
- });
9643
- });
9686
+ if (this.state.mediaStream &&
9687
+ this.getTracks().every((t) => t.readyState === 'live')) {
9688
+ stream = this.state.mediaStream;
9689
+ this.enableTracks();
9690
+ }
9691
+ else {
9692
+ const defaultConstraints = this.state.defaultConstraints;
9693
+ const constraints = {
9694
+ ...defaultConstraints,
9695
+ deviceId: this.state.selectedDevice
9696
+ ? { exact: this.state.selectedDevice }
9697
+ : undefined,
9698
+ };
9699
+ /**
9700
+ * Chains two media streams together.
9701
+ *
9702
+ * In our case, filters MediaStreams are derived from their parent MediaStream.
9703
+ * However, once a child filter's track is stopped,
9704
+ * the tracks of the parent MediaStream aren't automatically stopped.
9705
+ * This leads to a situation where the camera indicator light is still on
9706
+ * even though the user stopped publishing video.
9707
+ *
9708
+ * This function works around this issue by stopping the parent MediaStream's tracks
9709
+ * as well once the child filter's tracks are stopped.
9710
+ *
9711
+ * It works by patching the stop() method of the child filter's tracks to also stop
9712
+ * the parent MediaStream's tracks of the same type. Here we assume that
9713
+ * the parent MediaStream has only one track of each type.
9714
+ *
9715
+ * @param parentStream the parent MediaStream. Omit for the root stream.
9716
+ */
9717
+ const chainWith = (parentStream) => async (filterStream) => {
9718
+ if (!parentStream)
9644
9719
  return filterStream;
9645
- };
9646
- // the rootStream represents the stream coming from the actual device
9647
- // e.g. camera or microphone stream
9648
- rootStream = this.getStream(constraints);
9649
- // we publish the last MediaStream of the chain
9650
- stream = await this.filters.reduce((parent, entry) => parent
9651
- .then((inputStream) => {
9652
- const { stop, output } = entry.start(inputStream);
9653
- entry.stop = stop;
9654
- return output;
9655
- })
9656
- .then(chainWith(parent), (error) => {
9657
- this.logger('warn', 'Filter failed to start and will be ignored', error);
9658
- return parent;
9659
- }), rootStream);
9660
- }
9661
- if (this.call.state.callingState === CallingState.JOINED) {
9662
- await this.publishStream(stream);
9663
- }
9664
- if (this.state.mediaStream !== stream) {
9665
- this.state.setMediaStream(stream, await rootStream);
9666
- const handleTrackEnded = async () => {
9667
- await this.statusChangeSettled();
9668
- if (this.enabled) {
9669
- this.isTrackStoppedDueToTrackEnd = true;
9670
- setTimeout(() => {
9671
- this.isTrackStoppedDueToTrackEnd = false;
9672
- }, 2000);
9673
- await this.disable();
9674
- }
9675
- };
9676
- const createTrackMuteHandler = (muted) => () => {
9677
- if (!isMobile() || this.trackType !== TrackType.VIDEO)
9678
- return;
9679
- this.call.notifyTrackMuteState(muted, this.trackType).catch((err) => {
9680
- this.logger('warn', 'Error while notifying track mute state', err);
9681
- });
9682
- };
9683
- stream.getTracks().forEach((track) => {
9684
- const muteHandler = createTrackMuteHandler(true);
9685
- const unmuteHandler = createTrackMuteHandler(false);
9686
- track.addEventListener('mute', muteHandler);
9687
- track.addEventListener('unmute', unmuteHandler);
9688
- track.addEventListener('ended', handleTrackEnded);
9720
+ // TODO OL: take care of track.enabled property as well
9721
+ const parent = await parentStream;
9722
+ filterStream.getTracks().forEach((track) => {
9723
+ const originalStop = track.stop;
9724
+ track.stop = function stop() {
9725
+ originalStop.call(track);
9726
+ parent.getTracks().forEach((parentTrack) => {
9727
+ if (parentTrack.kind === track.kind) {
9728
+ parentTrack.stop();
9729
+ }
9730
+ });
9731
+ };
9732
+ });
9733
+ parent.getTracks().forEach((parentTrack) => {
9734
+ // When the parent stream abruptly ends, we propagate the event
9735
+ // to the filter stream.
9736
+ // This usually happens when the camera/microphone permissions
9737
+ // are revoked or when the device is disconnected.
9738
+ const handleParentTrackEnded = () => {
9739
+ filterStream.getTracks().forEach((track) => {
9740
+ if (parentTrack.kind !== track.kind)
9741
+ return;
9742
+ track.stop();
9743
+ track.dispatchEvent(new Event('ended')); // propagate the event
9744
+ });
9745
+ };
9746
+ parentTrack.addEventListener('ended', handleParentTrackEnded);
9689
9747
  this.subscriptions.push(() => {
9690
- track.removeEventListener('mute', muteHandler);
9691
- track.removeEventListener('unmute', unmuteHandler);
9692
- track.removeEventListener('ended', handleTrackEnded);
9748
+ parentTrack.removeEventListener('ended', handleParentTrackEnded);
9693
9749
  });
9694
9750
  });
9695
- }
9751
+ return filterStream;
9752
+ };
9753
+ // the rootStream represents the stream coming from the actual device
9754
+ // e.g. camera or microphone stream
9755
+ rootStream = this.getStream(constraints);
9756
+ // we publish the last MediaStream of the chain
9757
+ stream = await this.filters.reduce((parent, entry) => parent
9758
+ .then((inputStream) => {
9759
+ const { stop, output } = entry.start(inputStream);
9760
+ entry.stop = stop;
9761
+ return output;
9762
+ })
9763
+ .then(chainWith(parent), (error) => {
9764
+ this.logger('warn', 'Filter failed to start and will be ignored', error);
9765
+ return parent;
9766
+ }), rootStream);
9696
9767
  }
9697
- catch (err) {
9698
- if (rootStream) {
9699
- disposeOfMediaStream(await rootStream);
9700
- }
9701
- throw err;
9768
+ if (this.call.state.callingState === CallingState.JOINED) {
9769
+ await this.publishStream(stream);
9770
+ }
9771
+ if (this.state.mediaStream !== stream) {
9772
+ this.state.setMediaStream(stream, await rootStream);
9773
+ const handleTrackEnded = async () => {
9774
+ await this.statusChangeSettled();
9775
+ if (this.enabled) {
9776
+ this.isTrackStoppedDueToTrackEnd = true;
9777
+ setTimeout(() => {
9778
+ this.isTrackStoppedDueToTrackEnd = false;
9779
+ }, 2000);
9780
+ await this.disable();
9781
+ }
9782
+ };
9783
+ const createTrackMuteHandler = (muted) => () => {
9784
+ if (!isMobile() || this.trackType !== TrackType.VIDEO)
9785
+ return;
9786
+ this.call.notifyTrackMuteState(muted, this.trackType).catch((err) => {
9787
+ this.logger('warn', 'Error while notifying track mute state', err);
9788
+ });
9789
+ };
9790
+ stream.getTracks().forEach((track) => {
9791
+ const muteHandler = createTrackMuteHandler(true);
9792
+ const unmuteHandler = createTrackMuteHandler(false);
9793
+ track.addEventListener('mute', muteHandler);
9794
+ track.addEventListener('unmute', unmuteHandler);
9795
+ track.addEventListener('ended', handleTrackEnded);
9796
+ this.subscriptions.push(() => {
9797
+ track.removeEventListener('mute', muteHandler);
9798
+ track.removeEventListener('unmute', unmuteHandler);
9799
+ track.removeEventListener('ended', handleTrackEnded);
9800
+ });
9801
+ });
9702
9802
  }
9703
9803
  }
9704
9804
  get mediaDeviceKind() {
@@ -10028,7 +10128,14 @@ class CameraManager extends InputMediaDeviceManager {
10028
10128
  // Wait for any in progress camera operation
10029
10129
  await this.statusChangeSettled();
10030
10130
  const { target_resolution, camera_facing, camera_default_on } = settings;
10031
- await this.selectTargetResolution(target_resolution);
10131
+ // normalize target resolution to landscape format.
10132
+ // on mobile devices, the device itself adjusts the resolution to portrait or landscape
10133
+ // depending on the orientation of the device. using portrait resolution
10134
+ // will result in falling back to the default resolution (640x480).
10135
+ let { width, height } = target_resolution;
10136
+ if (width < height)
10137
+ [width, height] = [height, width];
10138
+ await this.selectTargetResolution({ width, height });
10032
10139
  // Set camera direction if it's not yet set
10033
10140
  if (!this.state.direction && !this.state.selectedDevice) {
10034
10141
  this.state.setDirection(camera_facing === 'front' ? 'front' : 'back');
@@ -10425,6 +10532,7 @@ class MicrophoneManager extends InputMediaDeviceManager {
10425
10532
  await this.disableNoiseCancellation().catch((err) => {
10426
10533
  this.logger('warn', 'Failed to disable noise cancellation', err);
10427
10534
  });
10535
+ throw e;
10428
10536
  }
10429
10537
  }
10430
10538
  /**
@@ -11061,6 +11169,7 @@ class Call {
11061
11169
  await this.sfuClient?.leaveAndClose(message ?? reason ?? 'user is leaving the call');
11062
11170
  this.sfuClient = undefined;
11063
11171
  this.dynascaleManager.setSfuClient(undefined);
11172
+ await this.dynascaleManager.dispose();
11064
11173
  this.state.setCallingState(CallingState.LEFT);
11065
11174
  this.state.setParticipants([]);
11066
11175
  this.state.dispose();
@@ -13365,7 +13474,9 @@ class TokenManager {
13365
13474
  this.token = token;
13366
13475
  }
13367
13476
  catch (e) {
13368
- return reject(new Error(`Call to tokenProvider failed with message: ${e}`));
13477
+ return reject(new Error(`Call to tokenProvider failed with message: ${e}`, {
13478
+ cause: e,
13479
+ }));
13369
13480
  }
13370
13481
  resolve(this.token);
13371
13482
  }
@@ -13782,7 +13893,7 @@ class StreamClient {
13782
13893
  this.getUserAgent = () => {
13783
13894
  if (!this.cachedUserAgent) {
13784
13895
  const { clientAppIdentifier = {} } = this.options;
13785
- const { sdkName = 'js', sdkVersion = "1.23.2", ...extras } = clientAppIdentifier;
13896
+ const { sdkName = 'js', sdkVersion = "1.23.4", ...extras } = clientAppIdentifier;
13786
13897
  this.cachedUserAgent = [
13787
13898
  `stream-video-${sdkName}-v${sdkVersion}`,
13788
13899
  ...Object.entries(extras).map(([key, value]) => `${key}=${value}`),