@stream-io/video-client 1.23.2 → 1.23.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,23 @@
2
2
 
3
3
  This file was generated using [@jscutlery/semver](https://github.com/jscutlery/semver).
4
4
 
5
+ ## [1.23.4](https://github.com/GetStream/stream-video-js/compare/@stream-io/video-client-1.23.3...@stream-io/video-client-1.23.4) (2025-06-03)
6
+
7
+ ### Bug Fixes
8
+
9
+ - attach original token provider error as cause to loadToken rejection ([#1812](https://github.com/GetStream/stream-video-js/issues/1812)) ([15f817c](https://github.com/GetStream/stream-video-js/commit/15f817c2548a8edba8ca1004e133277d67cbeb4f))
10
+ - improved video quality on low capture resolution ([#1814](https://github.com/GetStream/stream-video-js/issues/1814)) ([ebcfdf7](https://github.com/GetStream/stream-video-js/commit/ebcfdf7f7e8146fcaf18a8bee81086f5a23f5df3))
11
+
12
+ ## [1.23.3](https://github.com/GetStream/stream-video-js/compare/@stream-io/video-client-1.23.2...@stream-io/video-client-1.23.3) (2025-06-02)
13
+
14
+ - remove TODO ([9cfea4b](https://github.com/GetStream/stream-video-js/commit/9cfea4b54284cdd680a6d666436dedc5fd8956c3))
15
+
16
+ ### Bug Fixes
17
+
18
+ - inconsistent device state if applySettingsToStream fails ([#1808](https://github.com/GetStream/stream-video-js/issues/1808)) ([73d66c2](https://github.com/GetStream/stream-video-js/commit/73d66c2eaa7eca52b9d41b39f8f9fd0a0ce240ef))
19
+ - test ([e0b93aa](https://github.com/GetStream/stream-video-js/commit/e0b93aaa13f22f0db30b61e6230aff40ba8fd92a))
20
+ - use AudioContext for Safari ([#1810](https://github.com/GetStream/stream-video-js/issues/1810)) ([63542f4](https://github.com/GetStream/stream-video-js/commit/63542f419efa475c7acf50f053621ace74a1eff4))
21
+
5
22
  ## [1.23.2](https://github.com/GetStream/stream-video-js/compare/@stream-io/video-client-1.23.1...@stream-io/video-client-1.23.2) (2025-05-22)
6
23
 
7
24
  ### Bug Fixes
@@ -4,7 +4,7 @@ import { ServiceType, stackIntercept, RpcError } from '@protobuf-ts/runtime-rpc'
4
4
  import axios from 'axios';
5
5
  export { AxiosError } from 'axios';
6
6
  import { TwirpFetchTransport, TwirpErrorCode } from '@protobuf-ts/twirp-transport';
7
- import { ReplaySubject, combineLatest, BehaviorSubject, map, shareReplay, distinctUntilChanged, takeWhile, distinctUntilKeyChanged, fromEventPattern, startWith, concatMap, merge, from, fromEvent, debounceTime, pairwise, of } from 'rxjs';
7
+ import { ReplaySubject, combineLatest, BehaviorSubject, shareReplay, map, distinctUntilChanged, takeWhile, distinctUntilKeyChanged, fromEventPattern, startWith, concatMap, merge, from, fromEvent, debounceTime, pairwise, of } from 'rxjs';
8
8
  import { UAParser } from 'ua-parser-js';
9
9
  import { parse } from 'sdp-transform';
10
10
 
@@ -5031,6 +5031,9 @@ class CallState {
5031
5031
  return nextQueue.slice(-maxVisibleCaptions);
5032
5032
  });
5033
5033
  };
5034
+ this.rawParticipants$ = this.participantsSubject
5035
+ .asObservable()
5036
+ .pipe(shareReplay({ bufferSize: 1, refCount: true }));
5034
5037
  this.participants$ = this.participantsSubject.asObservable().pipe(
5035
5038
  // maintain stable-sort by mutating the participants stored
5036
5039
  // in the original subject
@@ -5204,6 +5207,12 @@ class CallState {
5204
5207
  get participants() {
5205
5208
  return this.getCurrentValue(this.participants$);
5206
5209
  }
5210
+ /**
5211
+ * The stable list of participants in the current call, unsorted.
5212
+ */
5213
+ get rawParticipants() {
5214
+ return this.getCurrentValue(this.rawParticipants$);
5215
+ }
5207
5216
  /**
5208
5217
  * The local participant in the current call.
5209
5218
  */
@@ -5676,7 +5685,7 @@ const aggregate = (stats) => {
5676
5685
  return report;
5677
5686
  };
5678
5687
 
5679
- const version = "1.23.2";
5688
+ const version = "1.23.4";
5680
5689
  const [major, minor, patch] = version.split('.');
5681
5690
  let sdkInfo = {
5682
5691
  type: SdkType.PLAIN_JAVASCRIPT,
@@ -6749,17 +6758,19 @@ const withSimulcastConstraints = (settings, optimalVideoLayers, useSingleLayer)
6749
6758
  let layers;
6750
6759
  const size = Math.max(settings.width || 0, settings.height || 0);
6751
6760
  if (size <= 320) {
6752
- // provide only one layer 320x240 (q), the one with the highest quality
6761
+ // provide only one layer 320x240 (f), the one with the highest quality
6753
6762
  layers = optimalVideoLayers.filter((layer) => layer.rid === 'f');
6754
6763
  }
6755
6764
  else if (size <= 640) {
6756
- // provide two layers, 160x120 (q) and 640x480 (h)
6757
- layers = optimalVideoLayers.filter((layer) => layer.rid !== 'h');
6765
+ // provide two layers, 320x240 (h) and 640x480 (f)
6766
+ layers = optimalVideoLayers.filter((layer) => layer.rid !== 'q');
6758
6767
  }
6759
6768
  else {
6760
6769
  // provide three layers for sizes > 640x480
6761
6770
  layers = optimalVideoLayers;
6762
6771
  }
6772
+ // we might have removed some layers, so we need to reassign the rid
6773
+ // to match the expected order of [q, h, f] for simulcast
6763
6774
  const ridMapping = ['q', 'h', 'f'];
6764
6775
  return layers.map((layer, index, arr) => ({
6765
6776
  ...layer,
@@ -7560,6 +7571,8 @@ class StreamSfuClient {
7560
7571
  const eventsToTrace = {
7561
7572
  callEnded: true,
7562
7573
  changePublishQuality: true,
7574
+ changePublishOptions: true,
7575
+ connectionQualityChanged: true,
7563
7576
  error: true,
7564
7577
  goAway: true,
7565
7578
  };
@@ -8433,6 +8446,20 @@ class DynascaleManager {
8433
8446
  true,
8434
8447
  };
8435
8448
  }), shareReplay(1));
8449
+ /**
8450
+ * Disposes the allocated resources and closes the audio context if it was created.
8451
+ */
8452
+ this.dispose = async () => {
8453
+ if (this.pendingSubscriptionsUpdate) {
8454
+ clearTimeout(this.pendingSubscriptionsUpdate);
8455
+ }
8456
+ const context = this.getOrCreateAudioContext();
8457
+ if (context && context.state !== 'closed') {
8458
+ document.removeEventListener('click', this.resumeAudioContext);
8459
+ await context.close();
8460
+ this.audioContext = undefined;
8461
+ }
8462
+ };
8436
8463
  this.setVideoTrackSubscriptionOverrides = (override, sessionIds) => {
8437
8464
  if (!sessionIds) {
8438
8465
  return setCurrentValue(this.videoTrackSubscriptionOverridesSubject, override ? { [globalOverrideKey]: override } : {});
@@ -8548,7 +8575,7 @@ class DynascaleManager {
8548
8575
  });
8549
8576
  this.applyTrackSubscriptions(debounceType);
8550
8577
  };
8551
- const participant$ = this.callState.participants$.pipe(map((participants) => participants.find((participant) => participant.sessionId === sessionId)), takeWhile((participant) => !!participant), distinctUntilChanged(), shareReplay({ bufferSize: 1, refCount: true }));
8578
+ const participant$ = this.callState.participants$.pipe(map((ps) => ps.find((p) => p.sessionId === sessionId)), takeWhile((participant) => !!participant), distinctUntilChanged(), shareReplay({ bufferSize: 1, refCount: true }));
8552
8579
  /**
8553
8580
  * Since the video elements are now being removed from the DOM (React SDK) upon
8554
8581
  * visibility change, this subscription is not in use an stays here only for the
@@ -8676,7 +8703,24 @@ class DynascaleManager {
8676
8703
  const participant = this.callState.findParticipantBySessionId(sessionId);
8677
8704
  if (!participant || participant.isLocalParticipant)
8678
8705
  return;
8679
- const participant$ = this.callState.participants$.pipe(map((participants) => participants.find((p) => p.sessionId === sessionId)), takeWhile((p) => !!p), distinctUntilChanged(), shareReplay({ bufferSize: 1, refCount: true }));
8706
+ const participant$ = this.callState.participants$.pipe(map((ps) => ps.find((p) => p.sessionId === sessionId)), takeWhile((p) => !!p), distinctUntilChanged(), shareReplay({ bufferSize: 1, refCount: true }));
8707
+ const updateSinkId = (deviceId, audioContext) => {
8708
+ if (!deviceId)
8709
+ return;
8710
+ if ('setSinkId' in audioElement) {
8711
+ audioElement.setSinkId(deviceId).catch((e) => {
8712
+ this.logger('warn', `Can't to set AudioElement sinkId`, e);
8713
+ });
8714
+ }
8715
+ if (audioContext && 'setSinkId' in audioContext) {
8716
+ // @ts-expect-error setSinkId is not available in all browsers
8717
+ audioContext.setSinkId(deviceId).catch((e) => {
8718
+ this.logger('warn', `Can't to set AudioContext sinkId`, e);
8719
+ });
8720
+ }
8721
+ };
8722
+ let sourceNode = undefined;
8723
+ let gainNode = undefined;
8680
8724
  const updateMediaStreamSubscription = participant$
8681
8725
  .pipe(distinctUntilKeyChanged(trackType === 'screenShareAudioTrack'
8682
8726
  ? 'screenShareAudioStream'
@@ -8689,40 +8733,82 @@ class DynascaleManager {
8689
8733
  return;
8690
8734
  setTimeout(() => {
8691
8735
  audioElement.srcObject = source ?? null;
8692
- if (audioElement.srcObject) {
8736
+ if (!source)
8737
+ return;
8738
+ // Safari has a special quirk that prevents playing audio until the user
8739
+ // interacts with the page or focuses on the tab where the call happens.
8740
+ // This is a workaround for the issue where:
8741
+ // - A and B are in a call
8742
+ // - A switches to another tab
8743
+ // - B mutes their microphone and unmutes it
8744
+ // - A does not hear B's unmuted audio until they focus the tab
8745
+ const audioContext = this.getOrCreateAudioContext();
8746
+ if (audioContext) {
8747
+ // we will play audio through the audio context in Safari
8748
+ audioElement.muted = true;
8749
+ sourceNode?.disconnect();
8750
+ sourceNode = audioContext.createMediaStreamSource(source);
8751
+ gainNode ?? (gainNode = audioContext.createGain());
8752
+ gainNode.gain.value = p.audioVolume ?? this.speaker.state.volume;
8753
+ sourceNode.connect(gainNode).connect(audioContext.destination);
8754
+ this.resumeAudioContext();
8755
+ }
8756
+ else {
8757
+ // we will play audio directly through the audio element in other browsers
8758
+ audioElement.muted = false;
8693
8759
  audioElement.play().catch((e) => {
8694
- this.logger('warn', `Failed to play stream`, e);
8760
+ this.logger('warn', `Failed to play audio stream`, e);
8695
8761
  });
8696
- // audio output device shall be set after the audio element is played
8697
- // otherwise, the browser will not pick it up, and will always
8698
- // play audio through the system's default device
8699
- const { selectedDevice } = this.speaker.state;
8700
- if (selectedDevice && 'setSinkId' in audioElement) {
8701
- audioElement.setSinkId(selectedDevice);
8702
- }
8703
8762
  }
8763
+ const { selectedDevice } = this.speaker.state;
8764
+ if (selectedDevice)
8765
+ updateSinkId(selectedDevice, audioContext);
8704
8766
  });
8705
8767
  });
8706
8768
  const sinkIdSubscription = !('setSinkId' in audioElement)
8707
8769
  ? null
8708
8770
  : this.speaker.state.selectedDevice$.subscribe((deviceId) => {
8709
- if (deviceId) {
8710
- audioElement.setSinkId(deviceId);
8711
- }
8771
+ const audioContext = this.getOrCreateAudioContext();
8772
+ updateSinkId(deviceId, audioContext);
8712
8773
  });
8713
8774
  const volumeSubscription = combineLatest([
8714
8775
  this.speaker.state.volume$,
8715
8776
  participant$.pipe(distinctUntilKeyChanged('audioVolume')),
8716
8777
  ]).subscribe(([volume, p]) => {
8717
- audioElement.volume = p.audioVolume ?? volume;
8778
+ const participantVolume = p.audioVolume ?? volume;
8779
+ audioElement.volume = participantVolume;
8780
+ if (gainNode)
8781
+ gainNode.gain.value = participantVolume;
8718
8782
  });
8719
8783
  audioElement.autoplay = true;
8720
8784
  return () => {
8721
8785
  sinkIdSubscription?.unsubscribe();
8722
8786
  volumeSubscription.unsubscribe();
8723
8787
  updateMediaStreamSubscription.unsubscribe();
8788
+ audioElement.srcObject = null;
8789
+ sourceNode?.disconnect();
8790
+ gainNode?.disconnect();
8724
8791
  };
8725
8792
  };
8793
+ this.getOrCreateAudioContext = () => {
8794
+ if (this.audioContext || !isSafari())
8795
+ return this.audioContext;
8796
+ const context = new AudioContext();
8797
+ if (context.state === 'suspended') {
8798
+ document.addEventListener('click', this.resumeAudioContext);
8799
+ }
8800
+ return (this.audioContext = context);
8801
+ };
8802
+ this.resumeAudioContext = () => {
8803
+ if (this.audioContext?.state === 'suspended') {
8804
+ this.audioContext
8805
+ .resume()
8806
+ .catch((err) => this.logger('warn', `Can't resume audio context`, err))
8807
+ .then(() => {
8808
+ document.removeEventListener('click', this.resumeAudioContext);
8809
+ });
8810
+ }
8811
+ };
8726
8812
  this.callState = callState;
8727
8813
  this.speaker = speaker;
8728
8814
  }
@@ -9162,6 +9248,16 @@ const getStream = async (constraints, tracer) => {
9162
9248
  // every successful getUserMedia call.
9163
9249
  navigator.mediaDevices.dispatchEvent(new Event('devicechange'));
9164
9250
  }
9251
+ if (constraints.video) {
9252
+ const [videoTrack] = stream.getVideoTracks();
9253
+ if (videoTrack) {
9254
+ const { width, height } = videoTrack.getSettings();
9255
+ const target = constraints.video;
9256
+ if (width !== target.width || height !== target.height) {
9257
+ tracer?.trace(`${tag}Warn`, `Requested resolution ${target.width}x${target.height} but got ${width}x${height}`);
9258
+ }
9259
+ }
9260
+ }
9165
9261
  return stream;
9166
9262
  }
9167
9263
  catch (error) {
@@ -9505,10 +9601,22 @@ class InputMediaDeviceManager {
9505
9601
  }
9506
9602
  }
9507
9603
  async applySettingsToStream() {
9508
- await withCancellation(this.statusChangeConcurrencyTag, async () => {
9604
+ await withCancellation(this.statusChangeConcurrencyTag, async (signal) => {
9509
9605
  if (this.enabled) {
9510
- await this.muteStream();
9511
- await this.unmuteStream();
9606
+ try {
9607
+ await this.muteStream();
9608
+ this.state.setStatus('disabled');
9609
+ if (signal.aborted) {
9610
+ return;
9611
+ }
9612
+ await this.unmuteStream();
9613
+ this.state.setStatus('enabled');
9614
+ }
9615
+ finally {
9616
+ if (!signal.aborted) {
9617
+ this.state.setPendingStatus(this.state.status);
9618
+ }
9619
+ }
9512
9620
  }
9513
9621
  });
9514
9622
  }
@@ -9574,130 +9682,122 @@ class InputMediaDeviceManager {
9574
9682
  this.logger('debug', 'Starting stream');
9575
9683
  let stream;
9576
9684
  let rootStream;
9577
- try {
9578
- if (this.state.mediaStream &&
9579
- this.getTracks().every((t) => t.readyState === 'live')) {
9580
- stream = this.state.mediaStream;
9581
- this.enableTracks();
9582
- }
9583
- else {
9584
- const defaultConstraints = this.state.defaultConstraints;
9585
- const constraints = {
9586
- ...defaultConstraints,
9587
- deviceId: this.state.selectedDevice
9588
- ? { exact: this.state.selectedDevice }
9589
- : undefined,
9590
- };
9591
- /**
9592
- * Chains two media streams together.
9593
- *
9594
- * In our case, filters MediaStreams are derived from their parent MediaStream.
9595
- * However, once a child filter's track is stopped,
9596
- * the tracks of the parent MediaStream aren't automatically stopped.
9597
- * This leads to a situation where the camera indicator light is still on
9598
- * even though the user stopped publishing video.
9599
- *
9600
- * This function works around this issue by stopping the parent MediaStream's tracks
9601
- * as well once the child filter's tracks are stopped.
9602
- *
9603
- * It works by patching the stop() method of the child filter's tracks to also stop
9604
- * the parent MediaStream's tracks of the same type. Here we assume that
9605
- * the parent MediaStream has only one track of each type.
9606
- *
9607
- * @param parentStream the parent MediaStream. Omit for the root stream.
9608
- */
9609
- const chainWith = (parentStream) => async (filterStream) => {
9610
- if (!parentStream)
9611
- return filterStream;
9612
- // TODO OL: take care of track.enabled property as well
9613
- const parent = await parentStream;
9614
- filterStream.getTracks().forEach((track) => {
9615
- const originalStop = track.stop;
9616
- track.stop = function stop() {
9617
- originalStop.call(track);
9618
- parent.getTracks().forEach((parentTrack) => {
9619
- if (parentTrack.kind === track.kind) {
9620
- parentTrack.stop();
9621
- }
9622
- });
9623
- };
9624
- });
9625
- parent.getTracks().forEach((parentTrack) => {
9626
- // When the parent stream abruptly ends, we propagate the event
9627
- // to the filter stream.
9628
- // This usually happens when the camera/microphone permissions
9629
- // are revoked or when the device is disconnected.
9630
- const handleParentTrackEnded = () => {
9631
- filterStream.getTracks().forEach((track) => {
9632
- if (parentTrack.kind !== track.kind)
9633
- return;
9634
- track.stop();
9635
- track.dispatchEvent(new Event('ended')); // propagate the event
9636
- });
9637
- };
9638
- parentTrack.addEventListener('ended', handleParentTrackEnded);
9639
- this.subscriptions.push(() => {
9640
- parentTrack.removeEventListener('ended', handleParentTrackEnded);
9641
- });
9642
- });
9685
+ if (this.state.mediaStream &&
9686
+ this.getTracks().every((t) => t.readyState === 'live')) {
9687
+ stream = this.state.mediaStream;
9688
+ this.enableTracks();
9689
+ }
9690
+ else {
9691
+ const defaultConstraints = this.state.defaultConstraints;
9692
+ const constraints = {
9693
+ ...defaultConstraints,
9694
+ deviceId: this.state.selectedDevice
9695
+ ? { exact: this.state.selectedDevice }
9696
+ : undefined,
9697
+ };
9698
+ /**
9699
+ * Chains two media streams together.
9700
+ *
9701
+ * In our case, filters MediaStreams are derived from their parent MediaStream.
9702
+ * However, once a child filter's track is stopped,
9703
+ * the tracks of the parent MediaStream aren't automatically stopped.
9704
+ * This leads to a situation where the camera indicator light is still on
9705
+ * even though the user stopped publishing video.
9706
+ *
9707
+ * This function works around this issue by stopping the parent MediaStream's tracks
9708
+ * as well once the child filter's tracks are stopped.
9709
+ *
9710
+ * It works by patching the stop() method of the child filter's tracks to also stop
9711
+ * the parent MediaStream's tracks of the same type. Here we assume that
9712
+ * the parent MediaStream has only one track of each type.
9713
+ *
9714
+ * @param parentStream the parent MediaStream. Omit for the root stream.
9715
+ */
9716
+ const chainWith = (parentStream) => async (filterStream) => {
9717
+ if (!parentStream)
9643
9718
  return filterStream;
9644
- };
9645
- // the rootStream represents the stream coming from the actual device
9646
- // e.g. camera or microphone stream
9647
- rootStream = this.getStream(constraints);
9648
- // we publish the last MediaStream of the chain
9649
- stream = await this.filters.reduce((parent, entry) => parent
9650
- .then((inputStream) => {
9651
- const { stop, output } = entry.start(inputStream);
9652
- entry.stop = stop;
9653
- return output;
9654
- })
9655
- .then(chainWith(parent), (error) => {
9656
- this.logger('warn', 'Filter failed to start and will be ignored', error);
9657
- return parent;
9658
- }), rootStream);
9659
- }
9660
- if (this.call.state.callingState === CallingState.JOINED) {
9661
- await this.publishStream(stream);
9662
- }
9663
- if (this.state.mediaStream !== stream) {
9664
- this.state.setMediaStream(stream, await rootStream);
9665
- const handleTrackEnded = async () => {
9666
- await this.statusChangeSettled();
9667
- if (this.enabled) {
9668
- this.isTrackStoppedDueToTrackEnd = true;
9669
- setTimeout(() => {
9670
- this.isTrackStoppedDueToTrackEnd = false;
9671
- }, 2000);
9672
- await this.disable();
9673
- }
9674
- };
9675
- const createTrackMuteHandler = (muted) => () => {
9676
- if (!isMobile() || this.trackType !== TrackType.VIDEO)
9677
- return;
9678
- this.call.notifyTrackMuteState(muted, this.trackType).catch((err) => {
9679
- this.logger('warn', 'Error while notifying track mute state', err);
9680
- });
9681
- };
9682
- stream.getTracks().forEach((track) => {
9683
- const muteHandler = createTrackMuteHandler(true);
9684
- const unmuteHandler = createTrackMuteHandler(false);
9685
- track.addEventListener('mute', muteHandler);
9686
- track.addEventListener('unmute', unmuteHandler);
9687
- track.addEventListener('ended', handleTrackEnded);
9719
+ // TODO OL: take care of track.enabled property as well
9720
+ const parent = await parentStream;
9721
+ filterStream.getTracks().forEach((track) => {
9722
+ const originalStop = track.stop;
9723
+ track.stop = function stop() {
9724
+ originalStop.call(track);
9725
+ parent.getTracks().forEach((parentTrack) => {
9726
+ if (parentTrack.kind === track.kind) {
9727
+ parentTrack.stop();
9728
+ }
9729
+ });
9730
+ };
9731
+ });
9732
+ parent.getTracks().forEach((parentTrack) => {
9733
+ // When the parent stream abruptly ends, we propagate the event
9734
+ // to the filter stream.
9735
+ // This usually happens when the camera/microphone permissions
9736
+ // are revoked or when the device is disconnected.
9737
+ const handleParentTrackEnded = () => {
9738
+ filterStream.getTracks().forEach((track) => {
9739
+ if (parentTrack.kind !== track.kind)
9740
+ return;
9741
+ track.stop();
9742
+ track.dispatchEvent(new Event('ended')); // propagate the event
9743
+ });
9744
+ };
9745
+ parentTrack.addEventListener('ended', handleParentTrackEnded);
9688
9746
  this.subscriptions.push(() => {
9689
- track.removeEventListener('mute', muteHandler);
9690
- track.removeEventListener('unmute', unmuteHandler);
9691
- track.removeEventListener('ended', handleTrackEnded);
9747
+ parentTrack.removeEventListener('ended', handleParentTrackEnded);
9692
9748
  });
9693
9749
  });
9694
- }
9750
+ return filterStream;
9751
+ };
9752
+ // the rootStream represents the stream coming from the actual device
9753
+ // e.g. camera or microphone stream
9754
+ rootStream = this.getStream(constraints);
9755
+ // we publish the last MediaStream of the chain
9756
+ stream = await this.filters.reduce((parent, entry) => parent
9757
+ .then((inputStream) => {
9758
+ const { stop, output } = entry.start(inputStream);
9759
+ entry.stop = stop;
9760
+ return output;
9761
+ })
9762
+ .then(chainWith(parent), (error) => {
9763
+ this.logger('warn', 'Filter failed to start and will be ignored', error);
9764
+ return parent;
9765
+ }), rootStream);
9695
9766
  }
9696
- catch (err) {
9697
- if (rootStream) {
9698
- disposeOfMediaStream(await rootStream);
9699
- }
9700
- throw err;
9767
+ if (this.call.state.callingState === CallingState.JOINED) {
9768
+ await this.publishStream(stream);
9769
+ }
9770
+ if (this.state.mediaStream !== stream) {
9771
+ this.state.setMediaStream(stream, await rootStream);
9772
+ const handleTrackEnded = async () => {
9773
+ await this.statusChangeSettled();
9774
+ if (this.enabled) {
9775
+ this.isTrackStoppedDueToTrackEnd = true;
9776
+ setTimeout(() => {
9777
+ this.isTrackStoppedDueToTrackEnd = false;
9778
+ }, 2000);
9779
+ await this.disable();
9780
+ }
9781
+ };
9782
+ const createTrackMuteHandler = (muted) => () => {
9783
+ if (!isMobile() || this.trackType !== TrackType.VIDEO)
9784
+ return;
9785
+ this.call.notifyTrackMuteState(muted, this.trackType).catch((err) => {
9786
+ this.logger('warn', 'Error while notifying track mute state', err);
9787
+ });
9788
+ };
9789
+ stream.getTracks().forEach((track) => {
9790
+ const muteHandler = createTrackMuteHandler(true);
9791
+ const unmuteHandler = createTrackMuteHandler(false);
9792
+ track.addEventListener('mute', muteHandler);
9793
+ track.addEventListener('unmute', unmuteHandler);
9794
+ track.addEventListener('ended', handleTrackEnded);
9795
+ this.subscriptions.push(() => {
9796
+ track.removeEventListener('mute', muteHandler);
9797
+ track.removeEventListener('unmute', unmuteHandler);
9798
+ track.removeEventListener('ended', handleTrackEnded);
9799
+ });
9800
+ });
9701
9801
  }
9702
9802
  }
9703
9803
  get mediaDeviceKind() {
@@ -10027,7 +10127,14 @@ class CameraManager extends InputMediaDeviceManager {
10027
10127
  // Wait for any in progress camera operation
10028
10128
  await this.statusChangeSettled();
10029
10129
  const { target_resolution, camera_facing, camera_default_on } = settings;
10030
- await this.selectTargetResolution(target_resolution);
10130
+ // normalize target resolution to landscape format.
10131
+ // on mobile devices, the device itself adjusts the resolution to portrait or landscape
10132
+ // depending on the orientation of the device. using portrait resolution
10133
+ // will result in falling back to the default resolution (640x480).
10134
+ let { width, height } = target_resolution;
10135
+ if (width < height)
10136
+ [width, height] = [height, width];
10137
+ await this.selectTargetResolution({ width, height });
10031
10138
  // Set camera direction if it's not yet set
10032
10139
  if (!this.state.direction && !this.state.selectedDevice) {
10033
10140
  this.state.setDirection(camera_facing === 'front' ? 'front' : 'back');
@@ -10424,6 +10531,7 @@ class MicrophoneManager extends InputMediaDeviceManager {
10424
10531
  await this.disableNoiseCancellation().catch((err) => {
10425
10532
  this.logger('warn', 'Failed to disable noise cancellation', err);
10426
10533
  });
10534
+ throw e;
10427
10535
  }
10428
10536
  }
10429
10537
  /**
@@ -11060,6 +11168,7 @@ class Call {
11060
11168
  await this.sfuClient?.leaveAndClose(message ?? reason ?? 'user is leaving the call');
11061
11169
  this.sfuClient = undefined;
11062
11170
  this.dynascaleManager.setSfuClient(undefined);
11171
+ await this.dynascaleManager.dispose();
11063
11172
  this.state.setCallingState(CallingState.LEFT);
11064
11173
  this.state.setParticipants([]);
11065
11174
  this.state.dispose();
@@ -13366,7 +13475,9 @@ class TokenManager {
13366
13475
  this.token = token;
13367
13476
  }
13368
13477
  catch (e) {
13369
- return reject(new Error(`Call to tokenProvider failed with message: ${e}`));
13478
+ return reject(new Error(`Call to tokenProvider failed with message: ${e}`, {
13479
+ cause: e,
13480
+ }));
13370
13481
  }
13371
13482
  resolve(this.token);
13372
13483
  }
@@ -13783,7 +13894,7 @@ class StreamClient {
13783
13894
  this.getUserAgent = () => {
13784
13895
  if (!this.cachedUserAgent) {
13785
13896
  const { clientAppIdentifier = {} } = this.options;
13786
- const { sdkName = 'js', sdkVersion = "1.23.2", ...extras } = clientAppIdentifier;
13897
+ const { sdkName = 'js', sdkVersion = "1.23.4", ...extras } = clientAppIdentifier;
13787
13898
  this.cachedUserAgent = [
13788
13899
  `stream-video-${sdkName}-v${sdkVersion}`,
13789
13900
  ...Object.entries(extras).map(([key, value]) => `${key}=${value}`),