@stream-io/video-client 0.7.10 → 0.7.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,20 @@
2
2
 
3
3
  This file was generated using [@jscutlery/semver](https://github.com/jscutlery/semver).
4
4
 
5
+ ### [0.7.12](https://github.com/GetStream/stream-video-js/compare/@stream-io/video-client-0.7.11...@stream-io/video-client-0.7.12) (2024-05-03)
6
+
7
+
8
+ ### Features
9
+
10
+ * support target_resolution backend setting for screensharing ([#1336](https://github.com/GetStream/stream-video-js/issues/1336)) ([1e9f796](https://github.com/GetStream/stream-video-js/commit/1e9f7963009ac7fc27ee24abc00eb68749cc19d8))
11
+
12
+ ### [0.7.11](https://github.com/GetStream/stream-video-js/compare/@stream-io/video-client-0.7.10...@stream-io/video-client-0.7.11) (2024-05-03)
13
+
14
+
15
+ ### Bug Fixes
16
+
17
+ * **devices:** API to disable speaking while muted notifications ([#1335](https://github.com/GetStream/stream-video-js/issues/1335)) ([cdff0e0](https://github.com/GetStream/stream-video-js/commit/cdff0e036bf4afca763e4f7a1563c23e806be190)), closes [#1329](https://github.com/GetStream/stream-video-js/issues/1329)
18
+
5
19
  ### [0.7.10](https://github.com/GetStream/stream-video-js/compare/@stream-io/video-client-0.7.9...@stream-io/video-client-0.7.10) (2024-04-30)
6
20
 
7
21
 
@@ -6420,7 +6420,7 @@ const withSimulcastConstraints = (settings, optimalVideoLayers) => {
6420
6420
  rid: ridMapping[index], // reassign rid
6421
6421
  }));
6422
6422
  };
6423
- const findOptimalScreenSharingLayers = (videoTrack, preferences) => {
6423
+ const findOptimalScreenSharingLayers = (videoTrack, preferences, defaultMaxBitrate = 3000000) => {
6424
6424
  const settings = videoTrack.getSettings();
6425
6425
  return [
6426
6426
  {
@@ -6429,7 +6429,7 @@ const findOptimalScreenSharingLayers = (videoTrack, preferences) => {
6429
6429
  width: settings.width || 0,
6430
6430
  height: settings.height || 0,
6431
6431
  scaleResolutionDownBy: 1,
6432
- maxBitrate: preferences?.maxBitrate ?? 3000000,
6432
+ maxBitrate: preferences?.maxBitrate ?? defaultMaxBitrate,
6433
6433
  maxFramerate: preferences?.maxFramerate ?? 30,
6434
6434
  },
6435
6435
  ];
@@ -7937,10 +7937,11 @@ class Publisher {
7937
7937
  if (!transceiver) {
7938
7938
  const { settings } = this.state;
7939
7939
  const targetResolution = settings?.video.target_resolution;
7940
+ const screenShareBitrate = settings?.screensharing.target_resolution?.bitrate;
7940
7941
  const videoEncodings = trackType === TrackType.VIDEO
7941
7942
  ? findOptimalVideoLayers(track, targetResolution)
7942
7943
  : trackType === TrackType.SCREEN_SHARE
7943
- ? findOptimalScreenSharingLayers(track, opts.screenShareSettings)
7944
+ ? findOptimalScreenSharingLayers(track, opts.screenShareSettings, screenShareBitrate)
7944
7945
  : undefined;
7945
7946
  let preferredCodec = opts.preferredCodec;
7946
7947
  if (!preferredCodec && trackType === TrackType.VIDEO) {
@@ -9513,7 +9514,7 @@ const toRtcConfiguration = (config) => {
9513
9514
  *
9514
9515
  * @param report the report to flatten.
9515
9516
  */
9516
- const flatten$1 = (report) => {
9517
+ const flatten = (report) => {
9517
9518
  const stats = [];
9518
9519
  report.forEach((s) => {
9519
9520
  stats.push(s);
@@ -9667,7 +9668,7 @@ const createStatsReporter = ({ subscriber, publisher, state, pollingIntervalInMs
9667
9668
  const transform = (report, opts) => {
9668
9669
  const { trackKind, kind } = opts;
9669
9670
  const direction = kind === 'subscriber' ? 'inbound-rtp' : 'outbound-rtp';
9670
- const stats = flatten$1(report);
9671
+ const stats = flatten(report);
9671
9672
  const streams = stats
9672
9673
  .filter((stat) => stat.type === direction &&
9673
9674
  stat.kind === trackKind)
@@ -9763,8 +9764,8 @@ class SfuStatsReporter {
9763
9764
  this.logger = getLogger(['SfuStatsReporter']);
9764
9765
  this.run = async () => {
9765
9766
  const [subscriberStats, publisherStats] = await Promise.all([
9766
- this.subscriber.getStats().then(flatten$1).then(JSON.stringify),
9767
- this.publisher.getStats().then(flatten$1).then(JSON.stringify),
9767
+ this.subscriber.getStats().then(flatten).then(JSON.stringify),
9768
+ this.publisher.getStats().then(flatten).then(JSON.stringify),
9768
9769
  ]);
9769
9770
  await this.sfuClient.sendStats({
9770
9771
  sdk: this.sdkName,
@@ -10737,7 +10738,7 @@ class InputMediaDeviceManager {
10737
10738
  // @ts-expect-error called to dispose the stream in RN
10738
10739
  this.state.mediaStream.release();
10739
10740
  }
10740
- this.state.setMediaStream(undefined);
10741
+ this.state.setMediaStream(undefined, undefined);
10741
10742
  }
10742
10743
  }
10743
10744
  muteTracks() {
@@ -10772,6 +10773,7 @@ class InputMediaDeviceManager {
10772
10773
  async unmuteStream() {
10773
10774
  this.logger('debug', 'Starting stream');
10774
10775
  let stream;
10776
+ let rootStream;
10775
10777
  if (this.state.mediaStream &&
10776
10778
  this.getTracks().every((t) => t.readyState === 'live')) {
10777
10779
  stream = this.state.mediaStream;
@@ -10837,14 +10839,17 @@ class InputMediaDeviceManager {
10837
10839
  });
10838
10840
  return filterStream;
10839
10841
  };
10842
+ // the rootStream represents the stream coming from the actual device
10843
+ // e.g. camera or microphone stream
10844
+ rootStream = this.getStream(constraints);
10840
10845
  // we publish the last MediaStream of the chain
10841
- stream = await this.filters.reduce((parent, filter) => parent.then(filter).then(chainWith(parent)), this.getStream(constraints));
10846
+ stream = await this.filters.reduce((parent, filter) => parent.then(filter).then(chainWith(parent)), rootStream);
10842
10847
  }
10843
10848
  if (this.call.state.callingState === CallingState.JOINED) {
10844
10849
  await this.publishStream(stream);
10845
10850
  }
10846
10851
  if (this.state.mediaStream !== stream) {
10847
- this.state.setMediaStream(stream);
10852
+ this.state.setMediaStream(stream, await rootStream);
10848
10853
  this.getTracks().forEach((track) => {
10849
10854
  track.addEventListener('ended', async () => {
10850
10855
  if (this.enablePromise) {
@@ -11038,13 +11043,17 @@ class InputMediaDeviceManagerState {
11038
11043
  this.setCurrentValue(this.statusSubject, status);
11039
11044
  }
11040
11045
  /**
11046
+ * Updates the `mediaStream` state variable.
11047
+ *
11041
11048
  * @internal
11042
11049
  * @param stream the stream to set.
11050
+ * @param rootStream the root stream, applicable when filters are used
11051
+ * as this is the stream that holds the actual deviceId information.
11043
11052
  */
11044
- setMediaStream(stream) {
11053
+ setMediaStream(stream, rootStream) {
11045
11054
  this.setCurrentValue(this.mediaStreamSubject, stream);
11046
- if (stream) {
11047
- this.setDevice(this.getDeviceIdFromStream(stream));
11055
+ if (rootStream) {
11056
+ this.setDevice(this.getDeviceIdFromStream(rootStream));
11048
11057
  }
11049
11058
  }
11050
11059
  /**
@@ -11099,8 +11108,8 @@ class CameraManagerState extends InputMediaDeviceManagerState {
11099
11108
  /**
11100
11109
  * @internal
11101
11110
  */
11102
- setMediaStream(stream) {
11103
- super.setMediaStream(stream);
11111
+ setMediaStream(stream, rootStream) {
11112
+ super.setMediaStream(stream, rootStream);
11104
11113
  if (stream) {
11105
11114
  // RN getSettings() doesn't return facingMode, so we don't verify camera direction
11106
11115
  const direction = isReactNative()
@@ -11112,7 +11121,8 @@ class CameraManagerState extends InputMediaDeviceManagerState {
11112
11121
  }
11113
11122
  }
11114
11123
  getDeviceIdFromStream(stream) {
11115
- return stream.getVideoTracks()[0]?.getSettings().deviceId;
11124
+ const [track] = stream.getVideoTracks();
11125
+ return track?.getSettings().deviceId;
11116
11126
  }
11117
11127
  }
11118
11128
 
@@ -11205,8 +11215,8 @@ class CameraManager extends InputMediaDeviceManager {
11205
11215
  }
11206
11216
 
11207
11217
  class MicrophoneManagerState extends InputMediaDeviceManagerState {
11208
- constructor() {
11209
- super('disable-tracks',
11218
+ constructor(disableMode) {
11219
+ super(disableMode,
11210
11220
  // `microphone` is not in the W3C standard yet,
11211
11221
  // but it's supported by Chrome and Safari.
11212
11222
  'microphone');
@@ -11230,7 +11240,8 @@ class MicrophoneManagerState extends InputMediaDeviceManagerState {
11230
11240
  this.setCurrentValue(this.speakingWhileMutedSubject, isSpeaking);
11231
11241
  }
11232
11242
  getDeviceIdFromStream(stream) {
11233
- return stream.getAudioTracks()[0]?.getSettings().deviceId;
11243
+ const [track] = stream.getAudioTracks();
11244
+ return track?.getSettings().deviceId;
11234
11245
  }
11235
11246
  }
11236
11247
 
@@ -11287,18 +11298,6 @@ const createSoundDetector = (audioStream, onSoundDetectedStateChanged, options =
11287
11298
  };
11288
11299
  };
11289
11300
 
11290
- /**
11291
- * Flatten the stats report into an array of stats objects.
11292
- *
11293
- * @param report the report to flatten.
11294
- */
11295
- const flatten = (report) => {
11296
- const stats = [];
11297
- report.forEach((s) => {
11298
- stats.push(s);
11299
- });
11300
- return stats;
11301
- };
11302
11301
  const AUDIO_LEVEL_THRESHOLD = 0.2;
11303
11302
  class RNSpeechDetector {
11304
11303
  constructor() {
@@ -11381,20 +11380,24 @@ class RNSpeechDetector {
11381
11380
  }
11382
11381
 
11383
11382
  class MicrophoneManager extends InputMediaDeviceManager {
11384
- constructor(call) {
11385
- super(call, new MicrophoneManagerState(), TrackType.AUDIO);
11386
- combineLatest([
11383
+ constructor(call, disableMode = isReactNative()
11384
+ ? 'disable-tracks'
11385
+ : 'stop-tracks') {
11386
+ super(call, new MicrophoneManagerState(disableMode), TrackType.AUDIO);
11387
+ this.speakingWhileMutedNotificationEnabled = true;
11388
+ this.subscriptions.push(createSubscription(combineLatest([
11387
11389
  this.call.state.callingState$,
11388
11390
  this.call.state.ownCapabilities$,
11389
11391
  this.state.selectedDevice$,
11390
11392
  this.state.status$,
11391
- ]).subscribe(async ([callingState, ownCapabilities, deviceId, status]) => {
11392
- if (callingState !== CallingState.JOINED) {
11393
- if (callingState === CallingState.LEFT) {
11394
- await this.stopSpeakingWhileMutedDetection();
11395
- }
11396
- return;
11393
+ ]), async ([callingState, ownCapabilities, deviceId, status]) => {
11394
+ if (callingState === CallingState.LEFT) {
11395
+ await this.stopSpeakingWhileMutedDetection();
11397
11396
  }
11397
+ if (callingState !== CallingState.JOINED)
11398
+ return;
11399
+ if (!this.speakingWhileMutedNotificationEnabled)
11400
+ return;
11398
11401
  if (ownCapabilities.includes(OwnCapability.SEND_AUDIO)) {
11399
11402
  if (status === 'disabled') {
11400
11403
  await this.startSpeakingWhileMutedDetection(deviceId);
@@ -11406,7 +11409,7 @@ class MicrophoneManager extends InputMediaDeviceManager {
11406
11409
  else {
11407
11410
  await this.stopSpeakingWhileMutedDetection();
11408
11411
  }
11409
- });
11412
+ }));
11410
11413
  this.subscriptions.push(createSubscription(this.call.state.callingState$, (callingState) => {
11411
11414
  // do nothing when noise filtering isn't turned on
11412
11415
  if (!this.noiseCancellationRegistration || !this.noiseCancellation)
@@ -11501,6 +11504,22 @@ class MicrophoneManager extends InputMediaDeviceManager {
11501
11504
  });
11502
11505
  await this.call.notifyNoiseCancellationStopped();
11503
11506
  }
11507
+ /**
11508
+ * Enables speaking while muted notification.
11509
+ */
11510
+ async enableSpeakingWhileMutedNotification() {
11511
+ this.speakingWhileMutedNotificationEnabled = true;
11512
+ if (this.state.status === 'disabled') {
11513
+ await this.startSpeakingWhileMutedDetection(this.state.selectedDevice);
11514
+ }
11515
+ }
11516
+ /**
11517
+ * Disables speaking while muted notification.
11518
+ */
11519
+ async disableSpeakingWhileMutedNotification() {
11520
+ this.speakingWhileMutedNotificationEnabled = false;
11521
+ await this.stopSpeakingWhileMutedDetection();
11522
+ }
11504
11523
  getDevices() {
11505
11524
  return getAudioDevices();
11506
11525
  }
@@ -11538,9 +11557,8 @@ class MicrophoneManager extends InputMediaDeviceManager {
11538
11557
  }
11539
11558
  }
11540
11559
  async stopSpeakingWhileMutedDetection() {
11541
- if (!this.soundDetectorCleanup) {
11560
+ if (!this.soundDetectorCleanup)
11542
11561
  return;
11543
- }
11544
11562
  this.state.setSpeakingWhileMuted(false);
11545
11563
  try {
11546
11564
  await this.soundDetectorCleanup();
@@ -11605,6 +11623,17 @@ class ScreenShareState extends InputMediaDeviceManagerState {
11605
11623
  class ScreenShareManager extends InputMediaDeviceManager {
11606
11624
  constructor(call) {
11607
11625
  super(call, new ScreenShareState(), TrackType.SCREEN_SHARE);
11626
+ this.subscriptions.push(createSubscription(call.state.settings$, (settings) => {
11627
+ const maybeTargetResolution = settings?.screensharing.target_resolution;
11628
+ if (maybeTargetResolution) {
11629
+ this.setDefaultConstraints({
11630
+ video: {
11631
+ width: maybeTargetResolution.width,
11632
+ height: maybeTargetResolution.height,
11633
+ },
11634
+ });
11635
+ }
11636
+ }));
11608
11637
  }
11609
11638
  /**
11610
11639
  * Will enable screen share audio options on supported platforms.
@@ -14865,7 +14894,7 @@ class StreamClient {
14865
14894
  });
14866
14895
  };
14867
14896
  this.getUserAgent = () => {
14868
- const version = "0.7.10" ;
14897
+ const version = "0.7.12" ;
14869
14898
  return (this.userAgent ||
14870
14899
  `stream-video-javascript-client-${this.node ? 'node' : 'browser'}-${version}`);
14871
14900
  };