@stream-io/video-client 1.23.2 → 1.23.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs.js CHANGED
@@ -5033,6 +5033,9 @@ class CallState {
5033
5033
  return nextQueue.slice(-maxVisibleCaptions);
5034
5034
  });
5035
5035
  };
5036
+ this.rawParticipants$ = this.participantsSubject
5037
+ .asObservable()
5038
+ .pipe(rxjs.shareReplay({ bufferSize: 1, refCount: true }));
5036
5039
  this.participants$ = this.participantsSubject.asObservable().pipe(
5037
5040
  // maintain stable-sort by mutating the participants stored
5038
5041
  // in the original subject
@@ -5206,6 +5209,12 @@ class CallState {
5206
5209
  get participants() {
5207
5210
  return this.getCurrentValue(this.participants$);
5208
5211
  }
5212
+ /**
5213
+ * The stable list of participants in the current call, unsorted.
5214
+ */
5215
+ get rawParticipants() {
5216
+ return this.getCurrentValue(this.rawParticipants$);
5217
+ }
5209
5218
  /**
5210
5219
  * The local participant in the current call.
5211
5220
  */
@@ -5678,7 +5687,7 @@ const aggregate = (stats) => {
5678
5687
  return report;
5679
5688
  };
5680
5689
 
5681
- const version = "1.23.2";
5690
+ const version = "1.23.3";
5682
5691
  const [major, minor, patch] = version.split('.');
5683
5692
  let sdkInfo = {
5684
5693
  type: SdkType.PLAIN_JAVASCRIPT,
@@ -8435,6 +8444,20 @@ class DynascaleManager {
8435
8444
  true,
8436
8445
  };
8437
8446
  }), rxjs.shareReplay(1));
8447
+ /**
8448
+ * Disposes the allocated resources and closes the audio context if it was created.
8449
+ */
8450
+ this.dispose = async () => {
8451
+ if (this.pendingSubscriptionsUpdate) {
8452
+ clearTimeout(this.pendingSubscriptionsUpdate);
8453
+ }
8454
+ const context = this.getOrCreateAudioContext();
8455
+ if (context && context.state !== 'closed') {
8456
+ document.removeEventListener('click', this.resumeAudioContext);
8457
+ await context.close();
8458
+ this.audioContext = undefined;
8459
+ }
8460
+ };
8438
8461
  this.setVideoTrackSubscriptionOverrides = (override, sessionIds) => {
8439
8462
  if (!sessionIds) {
8440
8463
  return setCurrentValue(this.videoTrackSubscriptionOverridesSubject, override ? { [globalOverrideKey]: override } : {});
@@ -8550,7 +8573,7 @@ class DynascaleManager {
8550
8573
  });
8551
8574
  this.applyTrackSubscriptions(debounceType);
8552
8575
  };
8553
- const participant$ = this.callState.participants$.pipe(rxjs.map((participants) => participants.find((participant) => participant.sessionId === sessionId)), rxjs.takeWhile((participant) => !!participant), rxjs.distinctUntilChanged(), rxjs.shareReplay({ bufferSize: 1, refCount: true }));
8576
+ const participant$ = this.callState.participants$.pipe(rxjs.map((ps) => ps.find((p) => p.sessionId === sessionId)), rxjs.takeWhile((participant) => !!participant), rxjs.distinctUntilChanged(), rxjs.shareReplay({ bufferSize: 1, refCount: true }));
8554
8577
  /**
8555
8578
  * Since the video elements are now being removed from the DOM (React SDK) upon
8556
8579
  * visibility change, this subscription is not in use an stays here only for the
@@ -8678,7 +8701,24 @@ class DynascaleManager {
8678
8701
  const participant = this.callState.findParticipantBySessionId(sessionId);
8679
8702
  if (!participant || participant.isLocalParticipant)
8680
8703
  return;
8681
- const participant$ = this.callState.participants$.pipe(rxjs.map((participants) => participants.find((p) => p.sessionId === sessionId)), rxjs.takeWhile((p) => !!p), rxjs.distinctUntilChanged(), rxjs.shareReplay({ bufferSize: 1, refCount: true }));
8704
+ const participant$ = this.callState.participants$.pipe(rxjs.map((ps) => ps.find((p) => p.sessionId === sessionId)), rxjs.takeWhile((p) => !!p), rxjs.distinctUntilChanged(), rxjs.shareReplay({ bufferSize: 1, refCount: true }));
8705
+ const updateSinkId = (deviceId, audioContext) => {
8706
+ if (!deviceId)
8707
+ return;
8708
+ if ('setSinkId' in audioElement) {
8709
+ audioElement.setSinkId(deviceId).catch((e) => {
8710
+ this.logger('warn', `Can't to set AudioElement sinkId`, e);
8711
+ });
8712
+ }
8713
+ if (audioContext && 'setSinkId' in audioContext) {
8714
+ // @ts-expect-error setSinkId is not available in all browsers
8715
+ audioContext.setSinkId(deviceId).catch((e) => {
8716
+ this.logger('warn', `Can't to set AudioContext sinkId`, e);
8717
+ });
8718
+ }
8719
+ };
8720
+ let sourceNode = undefined;
8721
+ let gainNode = undefined;
8682
8722
  const updateMediaStreamSubscription = participant$
8683
8723
  .pipe(rxjs.distinctUntilKeyChanged(trackType === 'screenShareAudioTrack'
8684
8724
  ? 'screenShareAudioStream'
@@ -8691,40 +8731,82 @@ class DynascaleManager {
8691
8731
  return;
8692
8732
  setTimeout(() => {
8693
8733
  audioElement.srcObject = source ?? null;
8694
- if (audioElement.srcObject) {
8734
+ if (!source)
8735
+ return;
8736
+ // Safari has a special quirk that prevents playing audio until the user
8737
+ // interacts with the page or focuses on the tab where the call happens.
8738
+ // This is a workaround for the issue where:
8739
+ // - A and B are in a call
8740
+ // - A switches to another tab
8741
+ // - B mutes their microphone and unmutes it
8742
+ // - A does not hear B's unmuted audio until they focus the tab
8743
+ const audioContext = this.getOrCreateAudioContext();
8744
+ if (audioContext) {
8745
+ // we will play audio through the audio context in Safari
8746
+ audioElement.muted = true;
8747
+ sourceNode?.disconnect();
8748
+ sourceNode = audioContext.createMediaStreamSource(source);
8749
+ gainNode ?? (gainNode = audioContext.createGain());
8750
+ gainNode.gain.value = p.audioVolume ?? this.speaker.state.volume;
8751
+ sourceNode.connect(gainNode).connect(audioContext.destination);
8752
+ this.resumeAudioContext();
8753
+ }
8754
+ else {
8755
+ // we will play audio directly through the audio element in other browsers
8756
+ audioElement.muted = false;
8695
8757
  audioElement.play().catch((e) => {
8696
- this.logger('warn', `Failed to play stream`, e);
8758
+ this.logger('warn', `Failed to play audio stream`, e);
8697
8759
  });
8698
- // audio output device shall be set after the audio element is played
8699
- // otherwise, the browser will not pick it up, and will always
8700
- // play audio through the system's default device
8701
- const { selectedDevice } = this.speaker.state;
8702
- if (selectedDevice && 'setSinkId' in audioElement) {
8703
- audioElement.setSinkId(selectedDevice);
8704
- }
8705
8760
  }
8761
+ const { selectedDevice } = this.speaker.state;
8762
+ if (selectedDevice)
8763
+ updateSinkId(selectedDevice, audioContext);
8706
8764
  });
8707
8765
  });
8708
8766
  const sinkIdSubscription = !('setSinkId' in audioElement)
8709
8767
  ? null
8710
8768
  : this.speaker.state.selectedDevice$.subscribe((deviceId) => {
8711
- if (deviceId) {
8712
- audioElement.setSinkId(deviceId);
8713
- }
8769
+ const audioContext = this.getOrCreateAudioContext();
8770
+ updateSinkId(deviceId, audioContext);
8714
8771
  });
8715
8772
  const volumeSubscription = rxjs.combineLatest([
8716
8773
  this.speaker.state.volume$,
8717
8774
  participant$.pipe(rxjs.distinctUntilKeyChanged('audioVolume')),
8718
8775
  ]).subscribe(([volume, p]) => {
8719
- audioElement.volume = p.audioVolume ?? volume;
8776
+ const participantVolume = p.audioVolume ?? volume;
8777
+ audioElement.volume = participantVolume;
8778
+ if (gainNode)
8779
+ gainNode.gain.value = participantVolume;
8720
8780
  });
8721
8781
  audioElement.autoplay = true;
8722
8782
  return () => {
8723
8783
  sinkIdSubscription?.unsubscribe();
8724
8784
  volumeSubscription.unsubscribe();
8725
8785
  updateMediaStreamSubscription.unsubscribe();
8786
+ audioElement.srcObject = null;
8787
+ sourceNode?.disconnect();
8788
+ gainNode?.disconnect();
8726
8789
  };
8727
8790
  };
8791
+ this.getOrCreateAudioContext = () => {
8792
+ if (this.audioContext || !isSafari())
8793
+ return this.audioContext;
8794
+ const context = new AudioContext();
8795
+ if (context.state === 'suspended') {
8796
+ document.addEventListener('click', this.resumeAudioContext);
8797
+ }
8798
+ return (this.audioContext = context);
8799
+ };
8800
+ this.resumeAudioContext = () => {
8801
+ if (this.audioContext?.state === 'suspended') {
8802
+ this.audioContext
8803
+ .resume()
8804
+ .catch((err) => this.logger('warn', `Can't resume audio context`, err))
8805
+ .then(() => {
8806
+ document.removeEventListener('click', this.resumeAudioContext);
8807
+ });
8808
+ }
8809
+ };
8728
8810
  this.callState = callState;
8729
8811
  this.speaker = speaker;
8730
8812
  }
@@ -9507,10 +9589,22 @@ class InputMediaDeviceManager {
9507
9589
  }
9508
9590
  }
9509
9591
  async applySettingsToStream() {
9510
- await withCancellation(this.statusChangeConcurrencyTag, async () => {
9592
+ await withCancellation(this.statusChangeConcurrencyTag, async (signal) => {
9511
9593
  if (this.enabled) {
9512
- await this.muteStream();
9513
- await this.unmuteStream();
9594
+ try {
9595
+ await this.muteStream();
9596
+ this.state.setStatus('disabled');
9597
+ if (signal.aborted) {
9598
+ return;
9599
+ }
9600
+ await this.unmuteStream();
9601
+ this.state.setStatus('enabled');
9602
+ }
9603
+ finally {
9604
+ if (!signal.aborted) {
9605
+ this.state.setPendingStatus(this.state.status);
9606
+ }
9607
+ }
9514
9608
  }
9515
9609
  });
9516
9610
  }
@@ -9576,130 +9670,122 @@ class InputMediaDeviceManager {
9576
9670
  this.logger('debug', 'Starting stream');
9577
9671
  let stream;
9578
9672
  let rootStream;
9579
- try {
9580
- if (this.state.mediaStream &&
9581
- this.getTracks().every((t) => t.readyState === 'live')) {
9582
- stream = this.state.mediaStream;
9583
- this.enableTracks();
9584
- }
9585
- else {
9586
- const defaultConstraints = this.state.defaultConstraints;
9587
- const constraints = {
9588
- ...defaultConstraints,
9589
- deviceId: this.state.selectedDevice
9590
- ? { exact: this.state.selectedDevice }
9591
- : undefined,
9592
- };
9593
- /**
9594
- * Chains two media streams together.
9595
- *
9596
- * In our case, filters MediaStreams are derived from their parent MediaStream.
9597
- * However, once a child filter's track is stopped,
9598
- * the tracks of the parent MediaStream aren't automatically stopped.
9599
- * This leads to a situation where the camera indicator light is still on
9600
- * even though the user stopped publishing video.
9601
- *
9602
- * This function works around this issue by stopping the parent MediaStream's tracks
9603
- * as well once the child filter's tracks are stopped.
9604
- *
9605
- * It works by patching the stop() method of the child filter's tracks to also stop
9606
- * the parent MediaStream's tracks of the same type. Here we assume that
9607
- * the parent MediaStream has only one track of each type.
9608
- *
9609
- * @param parentStream the parent MediaStream. Omit for the root stream.
9610
- */
9611
- const chainWith = (parentStream) => async (filterStream) => {
9612
- if (!parentStream)
9613
- return filterStream;
9614
- // TODO OL: take care of track.enabled property as well
9615
- const parent = await parentStream;
9616
- filterStream.getTracks().forEach((track) => {
9617
- const originalStop = track.stop;
9618
- track.stop = function stop() {
9619
- originalStop.call(track);
9620
- parent.getTracks().forEach((parentTrack) => {
9621
- if (parentTrack.kind === track.kind) {
9622
- parentTrack.stop();
9623
- }
9624
- });
9625
- };
9626
- });
9627
- parent.getTracks().forEach((parentTrack) => {
9628
- // When the parent stream abruptly ends, we propagate the event
9629
- // to the filter stream.
9630
- // This usually happens when the camera/microphone permissions
9631
- // are revoked or when the device is disconnected.
9632
- const handleParentTrackEnded = () => {
9633
- filterStream.getTracks().forEach((track) => {
9634
- if (parentTrack.kind !== track.kind)
9635
- return;
9636
- track.stop();
9637
- track.dispatchEvent(new Event('ended')); // propagate the event
9638
- });
9639
- };
9640
- parentTrack.addEventListener('ended', handleParentTrackEnded);
9641
- this.subscriptions.push(() => {
9642
- parentTrack.removeEventListener('ended', handleParentTrackEnded);
9643
- });
9644
- });
9673
+ if (this.state.mediaStream &&
9674
+ this.getTracks().every((t) => t.readyState === 'live')) {
9675
+ stream = this.state.mediaStream;
9676
+ this.enableTracks();
9677
+ }
9678
+ else {
9679
+ const defaultConstraints = this.state.defaultConstraints;
9680
+ const constraints = {
9681
+ ...defaultConstraints,
9682
+ deviceId: this.state.selectedDevice
9683
+ ? { exact: this.state.selectedDevice }
9684
+ : undefined,
9685
+ };
9686
+ /**
9687
+ * Chains two media streams together.
9688
+ *
9689
+ * In our case, filters MediaStreams are derived from their parent MediaStream.
9690
+ * However, once a child filter's track is stopped,
9691
+ * the tracks of the parent MediaStream aren't automatically stopped.
9692
+ * This leads to a situation where the camera indicator light is still on
9693
+ * even though the user stopped publishing video.
9694
+ *
9695
+ * This function works around this issue by stopping the parent MediaStream's tracks
9696
+ * as well once the child filter's tracks are stopped.
9697
+ *
9698
+ * It works by patching the stop() method of the child filter's tracks to also stop
9699
+ * the parent MediaStream's tracks of the same type. Here we assume that
9700
+ * the parent MediaStream has only one track of each type.
9701
+ *
9702
+ * @param parentStream the parent MediaStream. Omit for the root stream.
9703
+ */
9704
+ const chainWith = (parentStream) => async (filterStream) => {
9705
+ if (!parentStream)
9645
9706
  return filterStream;
9646
- };
9647
- // the rootStream represents the stream coming from the actual device
9648
- // e.g. camera or microphone stream
9649
- rootStream = this.getStream(constraints);
9650
- // we publish the last MediaStream of the chain
9651
- stream = await this.filters.reduce((parent, entry) => parent
9652
- .then((inputStream) => {
9653
- const { stop, output } = entry.start(inputStream);
9654
- entry.stop = stop;
9655
- return output;
9656
- })
9657
- .then(chainWith(parent), (error) => {
9658
- this.logger('warn', 'Filter failed to start and will be ignored', error);
9659
- return parent;
9660
- }), rootStream);
9661
- }
9662
- if (this.call.state.callingState === exports.CallingState.JOINED) {
9663
- await this.publishStream(stream);
9664
- }
9665
- if (this.state.mediaStream !== stream) {
9666
- this.state.setMediaStream(stream, await rootStream);
9667
- const handleTrackEnded = async () => {
9668
- await this.statusChangeSettled();
9669
- if (this.enabled) {
9670
- this.isTrackStoppedDueToTrackEnd = true;
9671
- setTimeout(() => {
9672
- this.isTrackStoppedDueToTrackEnd = false;
9673
- }, 2000);
9674
- await this.disable();
9675
- }
9676
- };
9677
- const createTrackMuteHandler = (muted) => () => {
9678
- if (!isMobile() || this.trackType !== TrackType.VIDEO)
9679
- return;
9680
- this.call.notifyTrackMuteState(muted, this.trackType).catch((err) => {
9681
- this.logger('warn', 'Error while notifying track mute state', err);
9682
- });
9683
- };
9684
- stream.getTracks().forEach((track) => {
9685
- const muteHandler = createTrackMuteHandler(true);
9686
- const unmuteHandler = createTrackMuteHandler(false);
9687
- track.addEventListener('mute', muteHandler);
9688
- track.addEventListener('unmute', unmuteHandler);
9689
- track.addEventListener('ended', handleTrackEnded);
9707
+ // TODO OL: take care of track.enabled property as well
9708
+ const parent = await parentStream;
9709
+ filterStream.getTracks().forEach((track) => {
9710
+ const originalStop = track.stop;
9711
+ track.stop = function stop() {
9712
+ originalStop.call(track);
9713
+ parent.getTracks().forEach((parentTrack) => {
9714
+ if (parentTrack.kind === track.kind) {
9715
+ parentTrack.stop();
9716
+ }
9717
+ });
9718
+ };
9719
+ });
9720
+ parent.getTracks().forEach((parentTrack) => {
9721
+ // When the parent stream abruptly ends, we propagate the event
9722
+ // to the filter stream.
9723
+ // This usually happens when the camera/microphone permissions
9724
+ // are revoked or when the device is disconnected.
9725
+ const handleParentTrackEnded = () => {
9726
+ filterStream.getTracks().forEach((track) => {
9727
+ if (parentTrack.kind !== track.kind)
9728
+ return;
9729
+ track.stop();
9730
+ track.dispatchEvent(new Event('ended')); // propagate the event
9731
+ });
9732
+ };
9733
+ parentTrack.addEventListener('ended', handleParentTrackEnded);
9690
9734
  this.subscriptions.push(() => {
9691
- track.removeEventListener('mute', muteHandler);
9692
- track.removeEventListener('unmute', unmuteHandler);
9693
- track.removeEventListener('ended', handleTrackEnded);
9735
+ parentTrack.removeEventListener('ended', handleParentTrackEnded);
9694
9736
  });
9695
9737
  });
9696
- }
9738
+ return filterStream;
9739
+ };
9740
+ // the rootStream represents the stream coming from the actual device
9741
+ // e.g. camera or microphone stream
9742
+ rootStream = this.getStream(constraints);
9743
+ // we publish the last MediaStream of the chain
9744
+ stream = await this.filters.reduce((parent, entry) => parent
9745
+ .then((inputStream) => {
9746
+ const { stop, output } = entry.start(inputStream);
9747
+ entry.stop = stop;
9748
+ return output;
9749
+ })
9750
+ .then(chainWith(parent), (error) => {
9751
+ this.logger('warn', 'Filter failed to start and will be ignored', error);
9752
+ return parent;
9753
+ }), rootStream);
9697
9754
  }
9698
- catch (err) {
9699
- if (rootStream) {
9700
- disposeOfMediaStream(await rootStream);
9701
- }
9702
- throw err;
9755
+ if (this.call.state.callingState === exports.CallingState.JOINED) {
9756
+ await this.publishStream(stream);
9757
+ }
9758
+ if (this.state.mediaStream !== stream) {
9759
+ this.state.setMediaStream(stream, await rootStream);
9760
+ const handleTrackEnded = async () => {
9761
+ await this.statusChangeSettled();
9762
+ if (this.enabled) {
9763
+ this.isTrackStoppedDueToTrackEnd = true;
9764
+ setTimeout(() => {
9765
+ this.isTrackStoppedDueToTrackEnd = false;
9766
+ }, 2000);
9767
+ await this.disable();
9768
+ }
9769
+ };
9770
+ const createTrackMuteHandler = (muted) => () => {
9771
+ if (!isMobile() || this.trackType !== TrackType.VIDEO)
9772
+ return;
9773
+ this.call.notifyTrackMuteState(muted, this.trackType).catch((err) => {
9774
+ this.logger('warn', 'Error while notifying track mute state', err);
9775
+ });
9776
+ };
9777
+ stream.getTracks().forEach((track) => {
9778
+ const muteHandler = createTrackMuteHandler(true);
9779
+ const unmuteHandler = createTrackMuteHandler(false);
9780
+ track.addEventListener('mute', muteHandler);
9781
+ track.addEventListener('unmute', unmuteHandler);
9782
+ track.addEventListener('ended', handleTrackEnded);
9783
+ this.subscriptions.push(() => {
9784
+ track.removeEventListener('mute', muteHandler);
9785
+ track.removeEventListener('unmute', unmuteHandler);
9786
+ track.removeEventListener('ended', handleTrackEnded);
9787
+ });
9788
+ });
9703
9789
  }
9704
9790
  }
9705
9791
  get mediaDeviceKind() {
@@ -10426,6 +10512,7 @@ class MicrophoneManager extends InputMediaDeviceManager {
10426
10512
  await this.disableNoiseCancellation().catch((err) => {
10427
10513
  this.logger('warn', 'Failed to disable noise cancellation', err);
10428
10514
  });
10515
+ throw e;
10429
10516
  }
10430
10517
  }
10431
10518
  /**
@@ -11062,6 +11149,7 @@ class Call {
11062
11149
  await this.sfuClient?.leaveAndClose(message ?? reason ?? 'user is leaving the call');
11063
11150
  this.sfuClient = undefined;
11064
11151
  this.dynascaleManager.setSfuClient(undefined);
11152
+ await this.dynascaleManager.dispose();
11065
11153
  this.state.setCallingState(exports.CallingState.LEFT);
11066
11154
  this.state.setParticipants([]);
11067
11155
  this.state.dispose();
@@ -13783,7 +13871,7 @@ class StreamClient {
13783
13871
  this.getUserAgent = () => {
13784
13872
  if (!this.cachedUserAgent) {
13785
13873
  const { clientAppIdentifier = {} } = this.options;
13786
- const { sdkName = 'js', sdkVersion = "1.23.2", ...extras } = clientAppIdentifier;
13874
+ const { sdkName = 'js', sdkVersion = "1.23.3", ...extras } = clientAppIdentifier;
13787
13875
  this.cachedUserAgent = [
13788
13876
  `stream-video-${sdkName}-v${sdkVersion}`,
13789
13877
  ...Object.entries(extras).map(([key, value]) => `${key}=${value}`),