@stream-io/video-client 1.23.1 → 1.23.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,22 @@
2
2
 
3
3
  This file was generated using [@jscutlery/semver](https://github.com/jscutlery/semver).
4
4
 
5
+ ## [1.23.3](https://github.com/GetStream/stream-video-js/compare/@stream-io/video-client-1.23.2...@stream-io/video-client-1.23.3) (2025-06-02)
6
+
7
+ - remove TODO ([9cfea4b](https://github.com/GetStream/stream-video-js/commit/9cfea4b54284cdd680a6d666436dedc5fd8956c3))
8
+
9
+ ### Bug Fixes
10
+
11
+ - inconsistent device state if applySettingsToStream fails ([#1808](https://github.com/GetStream/stream-video-js/issues/1808)) ([73d66c2](https://github.com/GetStream/stream-video-js/commit/73d66c2eaa7eca52b9d41b39f8f9fd0a0ce240ef))
12
+ - test ([e0b93aa](https://github.com/GetStream/stream-video-js/commit/e0b93aaa13f22f0db30b61e6230aff40ba8fd92a))
13
+ - use AudioContext for Safari ([#1810](https://github.com/GetStream/stream-video-js/issues/1810)) ([63542f4](https://github.com/GetStream/stream-video-js/commit/63542f419efa475c7acf50f053621ace74a1eff4))
14
+
15
+ ## [1.23.2](https://github.com/GetStream/stream-video-js/compare/@stream-io/video-client-1.23.1...@stream-io/video-client-1.23.2) (2025-05-22)
16
+
17
+ ### Bug Fixes
18
+
19
+ - rpc error tracing ([#1801](https://github.com/GetStream/stream-video-js/issues/1801)) ([a9e86d5](https://github.com/GetStream/stream-video-js/commit/a9e86d5e51e72b15d044e012f5fcc5a44907c325))
20
+
5
21
  ## [1.23.1](https://github.com/GetStream/stream-video-js/compare/@stream-io/video-client-1.23.0...@stream-io/video-client-1.23.1) (2025-05-21)
6
22
 
7
23
  ### Bug Fixes
@@ -4,7 +4,7 @@ import { ServiceType, stackIntercept, RpcError } from '@protobuf-ts/runtime-rpc'
4
4
  import axios from 'axios';
5
5
  export { AxiosError } from 'axios';
6
6
  import { TwirpFetchTransport, TwirpErrorCode } from '@protobuf-ts/twirp-transport';
7
- import { ReplaySubject, combineLatest, BehaviorSubject, map, shareReplay, distinctUntilChanged, takeWhile, distinctUntilKeyChanged, fromEventPattern, startWith, concatMap, merge, from, fromEvent, debounceTime, pairwise, of } from 'rxjs';
7
+ import { ReplaySubject, combineLatest, BehaviorSubject, shareReplay, map, distinctUntilChanged, takeWhile, distinctUntilKeyChanged, fromEventPattern, startWith, concatMap, merge, from, fromEvent, debounceTime, pairwise, of } from 'rxjs';
8
8
  import { UAParser } from 'ua-parser-js';
9
9
  import { parse } from 'sdp-transform';
10
10
 
@@ -3411,22 +3411,18 @@ const withHeaders = (headers) => {
3411
3411
  const withRequestLogger = (logger, level) => {
3412
3412
  return {
3413
3413
  interceptUnary: (next, method, input, options) => {
3414
- let invocation;
3415
- try {
3416
- invocation = next(method, input, options);
3417
- }
3418
- finally {
3419
- logger(level, `Invoked SFU RPC method ${method.name}`, {
3420
- request: invocation?.request,
3421
- headers: invocation?.requestHeaders,
3422
- response: invocation?.response,
3423
- });
3424
- }
3414
+ const invocation = next(method, input, options);
3415
+ logger(level, `Invoked SFU RPC method ${method.name}`, {
3416
+ request: invocation.request,
3417
+ headers: invocation.requestHeaders,
3418
+ response: invocation.response,
3419
+ });
3425
3420
  return invocation;
3426
3421
  },
3427
3422
  };
3428
3423
  };
3429
3424
  const withRequestTracer = (trace) => {
3425
+ const traceError = (name, input, err) => trace(`${name}OnFailure`, [err, input]);
3430
3426
  const exclusions = {
3431
3427
  SendStats: true,
3432
3428
  };
@@ -3435,14 +3431,14 @@ const withRequestTracer = (trace) => {
3435
3431
  if (exclusions[method.name]) {
3436
3432
  return next(method, input, options);
3437
3433
  }
3438
- try {
3439
- trace(method.name, input);
3440
- return next(method, input, options);
3441
- }
3442
- catch (err) {
3443
- trace(`${method.name}OnFailure`, [input, err]);
3444
- throw err;
3445
- }
3434
+ trace(method.name, input);
3435
+ const unaryCall = next(method, input, options);
3436
+ unaryCall.then((invocation) => {
3437
+ const err = invocation.response?.error;
3438
+ if (err)
3439
+ traceError(method.name, input, err);
3440
+ }, (err) => traceError(method.name, input, err));
3441
+ return unaryCall;
3446
3442
  },
3447
3443
  };
3448
3444
  };
@@ -5035,6 +5031,9 @@ class CallState {
5035
5031
  return nextQueue.slice(-maxVisibleCaptions);
5036
5032
  });
5037
5033
  };
5034
+ this.rawParticipants$ = this.participantsSubject
5035
+ .asObservable()
5036
+ .pipe(shareReplay({ bufferSize: 1, refCount: true }));
5038
5037
  this.participants$ = this.participantsSubject.asObservable().pipe(
5039
5038
  // maintain stable-sort by mutating the participants stored
5040
5039
  // in the original subject
@@ -5208,6 +5207,12 @@ class CallState {
5208
5207
  get participants() {
5209
5208
  return this.getCurrentValue(this.participants$);
5210
5209
  }
5210
+ /**
5211
+ * The stable list of participants in the current call, unsorted.
5212
+ */
5213
+ get rawParticipants() {
5214
+ return this.getCurrentValue(this.rawParticipants$);
5215
+ }
5211
5216
  /**
5212
5217
  * The local participant in the current call.
5213
5218
  */
@@ -5680,7 +5685,7 @@ const aggregate = (stats) => {
5680
5685
  return report;
5681
5686
  };
5682
5687
 
5683
- const version = "1.23.1";
5688
+ const version = "1.23.3";
5684
5689
  const [major, minor, patch] = version.split('.');
5685
5690
  let sdkInfo = {
5686
5691
  type: SdkType.PLAIN_JAVASCRIPT,
@@ -8437,6 +8442,20 @@ class DynascaleManager {
8437
8442
  true,
8438
8443
  };
8439
8444
  }), shareReplay(1));
8445
+ /**
8446
+ * Disposes the allocated resources and closes the audio context if it was created.
8447
+ */
8448
+ this.dispose = async () => {
8449
+ if (this.pendingSubscriptionsUpdate) {
8450
+ clearTimeout(this.pendingSubscriptionsUpdate);
8451
+ }
8452
+ const context = this.getOrCreateAudioContext();
8453
+ if (context && context.state !== 'closed') {
8454
+ document.removeEventListener('click', this.resumeAudioContext);
8455
+ await context.close();
8456
+ this.audioContext = undefined;
8457
+ }
8458
+ };
8440
8459
  this.setVideoTrackSubscriptionOverrides = (override, sessionIds) => {
8441
8460
  if (!sessionIds) {
8442
8461
  return setCurrentValue(this.videoTrackSubscriptionOverridesSubject, override ? { [globalOverrideKey]: override } : {});
@@ -8552,7 +8571,7 @@ class DynascaleManager {
8552
8571
  });
8553
8572
  this.applyTrackSubscriptions(debounceType);
8554
8573
  };
8555
- const participant$ = this.callState.participants$.pipe(map((participants) => participants.find((participant) => participant.sessionId === sessionId)), takeWhile((participant) => !!participant), distinctUntilChanged(), shareReplay({ bufferSize: 1, refCount: true }));
8574
+ const participant$ = this.callState.participants$.pipe(map((ps) => ps.find((p) => p.sessionId === sessionId)), takeWhile((participant) => !!participant), distinctUntilChanged(), shareReplay({ bufferSize: 1, refCount: true }));
8556
8575
  /**
8557
8576
  * Since the video elements are now being removed from the DOM (React SDK) upon
8558
8577
  * visibility change, this subscription is not in use an stays here only for the
@@ -8680,7 +8699,24 @@ class DynascaleManager {
8680
8699
  const participant = this.callState.findParticipantBySessionId(sessionId);
8681
8700
  if (!participant || participant.isLocalParticipant)
8682
8701
  return;
8683
- const participant$ = this.callState.participants$.pipe(map((participants) => participants.find((p) => p.sessionId === sessionId)), takeWhile((p) => !!p), distinctUntilChanged(), shareReplay({ bufferSize: 1, refCount: true }));
8702
+ const participant$ = this.callState.participants$.pipe(map((ps) => ps.find((p) => p.sessionId === sessionId)), takeWhile((p) => !!p), distinctUntilChanged(), shareReplay({ bufferSize: 1, refCount: true }));
8703
+ const updateSinkId = (deviceId, audioContext) => {
8704
+ if (!deviceId)
8705
+ return;
8706
+ if ('setSinkId' in audioElement) {
8707
+ audioElement.setSinkId(deviceId).catch((e) => {
8708
+ this.logger('warn', `Can't to set AudioElement sinkId`, e);
8709
+ });
8710
+ }
8711
+ if (audioContext && 'setSinkId' in audioContext) {
8712
+ // @ts-expect-error setSinkId is not available in all browsers
8713
+ audioContext.setSinkId(deviceId).catch((e) => {
8714
+ this.logger('warn', `Can't to set AudioContext sinkId`, e);
8715
+ });
8716
+ }
8717
+ };
8718
+ let sourceNode = undefined;
8719
+ let gainNode = undefined;
8684
8720
  const updateMediaStreamSubscription = participant$
8685
8721
  .pipe(distinctUntilKeyChanged(trackType === 'screenShareAudioTrack'
8686
8722
  ? 'screenShareAudioStream'
@@ -8693,40 +8729,82 @@ class DynascaleManager {
8693
8729
  return;
8694
8730
  setTimeout(() => {
8695
8731
  audioElement.srcObject = source ?? null;
8696
- if (audioElement.srcObject) {
8732
+ if (!source)
8733
+ return;
8734
+ // Safari has a special quirk that prevents playing audio until the user
8735
+ // interacts with the page or focuses on the tab where the call happens.
8736
+ // This is a workaround for the issue where:
8737
+ // - A and B are in a call
8738
+ // - A switches to another tab
8739
+ // - B mutes their microphone and unmutes it
8740
+ // - A does not hear B's unmuted audio until they focus the tab
8741
+ const audioContext = this.getOrCreateAudioContext();
8742
+ if (audioContext) {
8743
+ // we will play audio through the audio context in Safari
8744
+ audioElement.muted = true;
8745
+ sourceNode?.disconnect();
8746
+ sourceNode = audioContext.createMediaStreamSource(source);
8747
+ gainNode ?? (gainNode = audioContext.createGain());
8748
+ gainNode.gain.value = p.audioVolume ?? this.speaker.state.volume;
8749
+ sourceNode.connect(gainNode).connect(audioContext.destination);
8750
+ this.resumeAudioContext();
8751
+ }
8752
+ else {
8753
+ // we will play audio directly through the audio element in other browsers
8754
+ audioElement.muted = false;
8697
8755
  audioElement.play().catch((e) => {
8698
- this.logger('warn', `Failed to play stream`, e);
8756
+ this.logger('warn', `Failed to play audio stream`, e);
8699
8757
  });
8700
- // audio output device shall be set after the audio element is played
8701
- // otherwise, the browser will not pick it up, and will always
8702
- // play audio through the system's default device
8703
- const { selectedDevice } = this.speaker.state;
8704
- if (selectedDevice && 'setSinkId' in audioElement) {
8705
- audioElement.setSinkId(selectedDevice);
8706
- }
8707
8758
  }
8759
+ const { selectedDevice } = this.speaker.state;
8760
+ if (selectedDevice)
8761
+ updateSinkId(selectedDevice, audioContext);
8708
8762
  });
8709
8763
  });
8710
8764
  const sinkIdSubscription = !('setSinkId' in audioElement)
8711
8765
  ? null
8712
8766
  : this.speaker.state.selectedDevice$.subscribe((deviceId) => {
8713
- if (deviceId) {
8714
- audioElement.setSinkId(deviceId);
8715
- }
8767
+ const audioContext = this.getOrCreateAudioContext();
8768
+ updateSinkId(deviceId, audioContext);
8716
8769
  });
8717
8770
  const volumeSubscription = combineLatest([
8718
8771
  this.speaker.state.volume$,
8719
8772
  participant$.pipe(distinctUntilKeyChanged('audioVolume')),
8720
8773
  ]).subscribe(([volume, p]) => {
8721
- audioElement.volume = p.audioVolume ?? volume;
8774
+ const participantVolume = p.audioVolume ?? volume;
8775
+ audioElement.volume = participantVolume;
8776
+ if (gainNode)
8777
+ gainNode.gain.value = participantVolume;
8722
8778
  });
8723
8779
  audioElement.autoplay = true;
8724
8780
  return () => {
8725
8781
  sinkIdSubscription?.unsubscribe();
8726
8782
  volumeSubscription.unsubscribe();
8727
8783
  updateMediaStreamSubscription.unsubscribe();
8784
+ audioElement.srcObject = null;
8785
+ sourceNode?.disconnect();
8786
+ gainNode?.disconnect();
8728
8787
  };
8729
8788
  };
8789
+ this.getOrCreateAudioContext = () => {
8790
+ if (this.audioContext || !isSafari())
8791
+ return this.audioContext;
8792
+ const context = new AudioContext();
8793
+ if (context.state === 'suspended') {
8794
+ document.addEventListener('click', this.resumeAudioContext);
8795
+ }
8796
+ return (this.audioContext = context);
8797
+ };
8798
+ this.resumeAudioContext = () => {
8799
+ if (this.audioContext?.state === 'suspended') {
8800
+ this.audioContext
8801
+ .resume()
8802
+ .catch((err) => this.logger('warn', `Can't resume audio context`, err))
8803
+ .then(() => {
8804
+ document.removeEventListener('click', this.resumeAudioContext);
8805
+ });
8806
+ }
8807
+ };
8730
8808
  this.callState = callState;
8731
8809
  this.speaker = speaker;
8732
8810
  }
@@ -9509,10 +9587,22 @@ class InputMediaDeviceManager {
9509
9587
  }
9510
9588
  }
9511
9589
  async applySettingsToStream() {
9512
- await withCancellation(this.statusChangeConcurrencyTag, async () => {
9590
+ await withCancellation(this.statusChangeConcurrencyTag, async (signal) => {
9513
9591
  if (this.enabled) {
9514
- await this.muteStream();
9515
- await this.unmuteStream();
9592
+ try {
9593
+ await this.muteStream();
9594
+ this.state.setStatus('disabled');
9595
+ if (signal.aborted) {
9596
+ return;
9597
+ }
9598
+ await this.unmuteStream();
9599
+ this.state.setStatus('enabled');
9600
+ }
9601
+ finally {
9602
+ if (!signal.aborted) {
9603
+ this.state.setPendingStatus(this.state.status);
9604
+ }
9605
+ }
9516
9606
  }
9517
9607
  });
9518
9608
  }
@@ -9578,130 +9668,122 @@ class InputMediaDeviceManager {
9578
9668
  this.logger('debug', 'Starting stream');
9579
9669
  let stream;
9580
9670
  let rootStream;
9581
- try {
9582
- if (this.state.mediaStream &&
9583
- this.getTracks().every((t) => t.readyState === 'live')) {
9584
- stream = this.state.mediaStream;
9585
- this.enableTracks();
9586
- }
9587
- else {
9588
- const defaultConstraints = this.state.defaultConstraints;
9589
- const constraints = {
9590
- ...defaultConstraints,
9591
- deviceId: this.state.selectedDevice
9592
- ? { exact: this.state.selectedDevice }
9593
- : undefined,
9594
- };
9595
- /**
9596
- * Chains two media streams together.
9597
- *
9598
- * In our case, filters MediaStreams are derived from their parent MediaStream.
9599
- * However, once a child filter's track is stopped,
9600
- * the tracks of the parent MediaStream aren't automatically stopped.
9601
- * This leads to a situation where the camera indicator light is still on
9602
- * even though the user stopped publishing video.
9603
- *
9604
- * This function works around this issue by stopping the parent MediaStream's tracks
9605
- * as well once the child filter's tracks are stopped.
9606
- *
9607
- * It works by patching the stop() method of the child filter's tracks to also stop
9608
- * the parent MediaStream's tracks of the same type. Here we assume that
9609
- * the parent MediaStream has only one track of each type.
9610
- *
9611
- * @param parentStream the parent MediaStream. Omit for the root stream.
9612
- */
9613
- const chainWith = (parentStream) => async (filterStream) => {
9614
- if (!parentStream)
9615
- return filterStream;
9616
- // TODO OL: take care of track.enabled property as well
9617
- const parent = await parentStream;
9618
- filterStream.getTracks().forEach((track) => {
9619
- const originalStop = track.stop;
9620
- track.stop = function stop() {
9621
- originalStop.call(track);
9622
- parent.getTracks().forEach((parentTrack) => {
9623
- if (parentTrack.kind === track.kind) {
9624
- parentTrack.stop();
9625
- }
9626
- });
9627
- };
9628
- });
9629
- parent.getTracks().forEach((parentTrack) => {
9630
- // When the parent stream abruptly ends, we propagate the event
9631
- // to the filter stream.
9632
- // This usually happens when the camera/microphone permissions
9633
- // are revoked or when the device is disconnected.
9634
- const handleParentTrackEnded = () => {
9635
- filterStream.getTracks().forEach((track) => {
9636
- if (parentTrack.kind !== track.kind)
9637
- return;
9638
- track.stop();
9639
- track.dispatchEvent(new Event('ended')); // propagate the event
9640
- });
9641
- };
9642
- parentTrack.addEventListener('ended', handleParentTrackEnded);
9643
- this.subscriptions.push(() => {
9644
- parentTrack.removeEventListener('ended', handleParentTrackEnded);
9645
- });
9646
- });
9671
+ if (this.state.mediaStream &&
9672
+ this.getTracks().every((t) => t.readyState === 'live')) {
9673
+ stream = this.state.mediaStream;
9674
+ this.enableTracks();
9675
+ }
9676
+ else {
9677
+ const defaultConstraints = this.state.defaultConstraints;
9678
+ const constraints = {
9679
+ ...defaultConstraints,
9680
+ deviceId: this.state.selectedDevice
9681
+ ? { exact: this.state.selectedDevice }
9682
+ : undefined,
9683
+ };
9684
+ /**
9685
+ * Chains two media streams together.
9686
+ *
9687
+ * In our case, filters MediaStreams are derived from their parent MediaStream.
9688
+ * However, once a child filter's track is stopped,
9689
+ * the tracks of the parent MediaStream aren't automatically stopped.
9690
+ * This leads to a situation where the camera indicator light is still on
9691
+ * even though the user stopped publishing video.
9692
+ *
9693
+ * This function works around this issue by stopping the parent MediaStream's tracks
9694
+ * as well once the child filter's tracks are stopped.
9695
+ *
9696
+ * It works by patching the stop() method of the child filter's tracks to also stop
9697
+ * the parent MediaStream's tracks of the same type. Here we assume that
9698
+ * the parent MediaStream has only one track of each type.
9699
+ *
9700
+ * @param parentStream the parent MediaStream. Omit for the root stream.
9701
+ */
9702
+ const chainWith = (parentStream) => async (filterStream) => {
9703
+ if (!parentStream)
9647
9704
  return filterStream;
9648
- };
9649
- // the rootStream represents the stream coming from the actual device
9650
- // e.g. camera or microphone stream
9651
- rootStream = this.getStream(constraints);
9652
- // we publish the last MediaStream of the chain
9653
- stream = await this.filters.reduce((parent, entry) => parent
9654
- .then((inputStream) => {
9655
- const { stop, output } = entry.start(inputStream);
9656
- entry.stop = stop;
9657
- return output;
9658
- })
9659
- .then(chainWith(parent), (error) => {
9660
- this.logger('warn', 'Filter failed to start and will be ignored', error);
9661
- return parent;
9662
- }), rootStream);
9663
- }
9664
- if (this.call.state.callingState === CallingState.JOINED) {
9665
- await this.publishStream(stream);
9666
- }
9667
- if (this.state.mediaStream !== stream) {
9668
- this.state.setMediaStream(stream, await rootStream);
9669
- const handleTrackEnded = async () => {
9670
- await this.statusChangeSettled();
9671
- if (this.enabled) {
9672
- this.isTrackStoppedDueToTrackEnd = true;
9673
- setTimeout(() => {
9674
- this.isTrackStoppedDueToTrackEnd = false;
9675
- }, 2000);
9676
- await this.disable();
9677
- }
9678
- };
9679
- const createTrackMuteHandler = (muted) => () => {
9680
- if (!isMobile() || this.trackType !== TrackType.VIDEO)
9681
- return;
9682
- this.call.notifyTrackMuteState(muted, this.trackType).catch((err) => {
9683
- this.logger('warn', 'Error while notifying track mute state', err);
9684
- });
9685
- };
9686
- stream.getTracks().forEach((track) => {
9687
- const muteHandler = createTrackMuteHandler(true);
9688
- const unmuteHandler = createTrackMuteHandler(false);
9689
- track.addEventListener('mute', muteHandler);
9690
- track.addEventListener('unmute', unmuteHandler);
9691
- track.addEventListener('ended', handleTrackEnded);
9705
+ // TODO OL: take care of track.enabled property as well
9706
+ const parent = await parentStream;
9707
+ filterStream.getTracks().forEach((track) => {
9708
+ const originalStop = track.stop;
9709
+ track.stop = function stop() {
9710
+ originalStop.call(track);
9711
+ parent.getTracks().forEach((parentTrack) => {
9712
+ if (parentTrack.kind === track.kind) {
9713
+ parentTrack.stop();
9714
+ }
9715
+ });
9716
+ };
9717
+ });
9718
+ parent.getTracks().forEach((parentTrack) => {
9719
+ // When the parent stream abruptly ends, we propagate the event
9720
+ // to the filter stream.
9721
+ // This usually happens when the camera/microphone permissions
9722
+ // are revoked or when the device is disconnected.
9723
+ const handleParentTrackEnded = () => {
9724
+ filterStream.getTracks().forEach((track) => {
9725
+ if (parentTrack.kind !== track.kind)
9726
+ return;
9727
+ track.stop();
9728
+ track.dispatchEvent(new Event('ended')); // propagate the event
9729
+ });
9730
+ };
9731
+ parentTrack.addEventListener('ended', handleParentTrackEnded);
9692
9732
  this.subscriptions.push(() => {
9693
- track.removeEventListener('mute', muteHandler);
9694
- track.removeEventListener('unmute', unmuteHandler);
9695
- track.removeEventListener('ended', handleTrackEnded);
9733
+ parentTrack.removeEventListener('ended', handleParentTrackEnded);
9696
9734
  });
9697
9735
  });
9698
- }
9736
+ return filterStream;
9737
+ };
9738
+ // the rootStream represents the stream coming from the actual device
9739
+ // e.g. camera or microphone stream
9740
+ rootStream = this.getStream(constraints);
9741
+ // we publish the last MediaStream of the chain
9742
+ stream = await this.filters.reduce((parent, entry) => parent
9743
+ .then((inputStream) => {
9744
+ const { stop, output } = entry.start(inputStream);
9745
+ entry.stop = stop;
9746
+ return output;
9747
+ })
9748
+ .then(chainWith(parent), (error) => {
9749
+ this.logger('warn', 'Filter failed to start and will be ignored', error);
9750
+ return parent;
9751
+ }), rootStream);
9699
9752
  }
9700
- catch (err) {
9701
- if (rootStream) {
9702
- disposeOfMediaStream(await rootStream);
9703
- }
9704
- throw err;
9753
+ if (this.call.state.callingState === CallingState.JOINED) {
9754
+ await this.publishStream(stream);
9755
+ }
9756
+ if (this.state.mediaStream !== stream) {
9757
+ this.state.setMediaStream(stream, await rootStream);
9758
+ const handleTrackEnded = async () => {
9759
+ await this.statusChangeSettled();
9760
+ if (this.enabled) {
9761
+ this.isTrackStoppedDueToTrackEnd = true;
9762
+ setTimeout(() => {
9763
+ this.isTrackStoppedDueToTrackEnd = false;
9764
+ }, 2000);
9765
+ await this.disable();
9766
+ }
9767
+ };
9768
+ const createTrackMuteHandler = (muted) => () => {
9769
+ if (!isMobile() || this.trackType !== TrackType.VIDEO)
9770
+ return;
9771
+ this.call.notifyTrackMuteState(muted, this.trackType).catch((err) => {
9772
+ this.logger('warn', 'Error while notifying track mute state', err);
9773
+ });
9774
+ };
9775
+ stream.getTracks().forEach((track) => {
9776
+ const muteHandler = createTrackMuteHandler(true);
9777
+ const unmuteHandler = createTrackMuteHandler(false);
9778
+ track.addEventListener('mute', muteHandler);
9779
+ track.addEventListener('unmute', unmuteHandler);
9780
+ track.addEventListener('ended', handleTrackEnded);
9781
+ this.subscriptions.push(() => {
9782
+ track.removeEventListener('mute', muteHandler);
9783
+ track.removeEventListener('unmute', unmuteHandler);
9784
+ track.removeEventListener('ended', handleTrackEnded);
9785
+ });
9786
+ });
9705
9787
  }
9706
9788
  }
9707
9789
  get mediaDeviceKind() {
@@ -10428,6 +10510,7 @@ class MicrophoneManager extends InputMediaDeviceManager {
10428
10510
  await this.disableNoiseCancellation().catch((err) => {
10429
10511
  this.logger('warn', 'Failed to disable noise cancellation', err);
10430
10512
  });
10513
+ throw e;
10431
10514
  }
10432
10515
  }
10433
10516
  /**
@@ -11064,6 +11147,7 @@ class Call {
11064
11147
  await this.sfuClient?.leaveAndClose(message ?? reason ?? 'user is leaving the call');
11065
11148
  this.sfuClient = undefined;
11066
11149
  this.dynascaleManager.setSfuClient(undefined);
11150
+ await this.dynascaleManager.dispose();
11067
11151
  this.state.setCallingState(CallingState.LEFT);
11068
11152
  this.state.setParticipants([]);
11069
11153
  this.state.dispose();
@@ -13787,7 +13871,7 @@ class StreamClient {
13787
13871
  this.getUserAgent = () => {
13788
13872
  if (!this.cachedUserAgent) {
13789
13873
  const { clientAppIdentifier = {} } = this.options;
13790
- const { sdkName = 'js', sdkVersion = "1.23.1", ...extras } = clientAppIdentifier;
13874
+ const { sdkName = 'js', sdkVersion = "1.23.3", ...extras } = clientAppIdentifier;
13791
13875
  this.cachedUserAgent = [
13792
13876
  `stream-video-${sdkName}-v${sdkVersion}`,
13793
13877
  ...Object.entries(extras).map(([key, value]) => `${key}=${value}`),