@stream-io/video-client 1.32.0 → 1.33.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/CHANGELOG.md +10 -0
  2. package/dist/index.browser.es.js +307 -74
  3. package/dist/index.browser.es.js.map +1 -1
  4. package/dist/index.cjs.js +308 -75
  5. package/dist/index.cjs.js.map +1 -1
  6. package/dist/index.es.js +307 -74
  7. package/dist/index.es.js.map +1 -1
  8. package/dist/src/Call.d.ts +3 -2
  9. package/dist/src/devices/AudioDeviceManager.d.ts +25 -0
  10. package/dist/src/devices/AudioDeviceManagerState.d.ts +24 -0
  11. package/dist/src/devices/CameraManager.d.ts +2 -2
  12. package/dist/src/devices/CameraManagerState.d.ts +3 -4
  13. package/dist/src/devices/{InputMediaDeviceManager.d.ts → DeviceManager.d.ts} +6 -6
  14. package/dist/src/devices/{InputMediaDeviceManagerState.d.ts → DeviceManagerState.d.ts} +4 -4
  15. package/dist/src/devices/MicrophoneManager.d.ts +5 -3
  16. package/dist/src/devices/MicrophoneManagerState.d.ts +6 -10
  17. package/dist/src/devices/ScreenShareManager.d.ts +4 -2
  18. package/dist/src/devices/ScreenShareState.d.ts +6 -2
  19. package/dist/src/devices/SpeakerState.d.ts +4 -4
  20. package/dist/src/devices/index.d.ts +2 -2
  21. package/dist/src/gen/coordinator/index.d.ts +169 -2
  22. package/dist/src/gen/video/sfu/models/models.d.ts +43 -0
  23. package/dist/src/rtc/BasePeerConnection.d.ts +2 -12
  24. package/dist/src/rtc/Publisher.d.ts +9 -6
  25. package/dist/src/rtc/Subscriber.d.ts +2 -1
  26. package/dist/src/rtc/TransceiverCache.d.ts +10 -11
  27. package/dist/src/rtc/index.d.ts +1 -1
  28. package/dist/src/rtc/{videoLayers.d.ts → layers.d.ts} +7 -1
  29. package/dist/src/rtc/types.d.ts +31 -0
  30. package/package.json +3 -2
  31. package/src/Call.ts +13 -6
  32. package/src/__tests__/Call.publishing.test.ts +14 -3
  33. package/src/__tests__/StreamVideoClient.api.test.ts +1 -1
  34. package/src/devices/AudioDeviceManager.ts +61 -0
  35. package/src/devices/AudioDeviceManagerState.ts +44 -0
  36. package/src/devices/CameraManager.ts +4 -4
  37. package/src/devices/CameraManagerState.ts +9 -8
  38. package/src/devices/{InputMediaDeviceManager.ts → DeviceManager.ts} +11 -8
  39. package/src/devices/{InputMediaDeviceManagerState.ts → DeviceManagerState.ts} +7 -4
  40. package/src/devices/MicrophoneManager.ts +26 -6
  41. package/src/devices/MicrophoneManagerState.ts +18 -19
  42. package/src/devices/ScreenShareManager.ts +23 -4
  43. package/src/devices/ScreenShareState.ts +11 -3
  44. package/src/devices/SpeakerState.ts +6 -14
  45. package/src/devices/__tests__/CameraManager.test.ts +1 -0
  46. package/src/devices/__tests__/{InputMediaDeviceManager.test.ts → DeviceManager.test.ts} +4 -4
  47. package/src/devices/__tests__/{InputMediaDeviceManagerFilters.test.ts → DeviceManagerFilters.test.ts} +4 -4
  48. package/src/devices/__tests__/{InputMediaDeviceManagerState.test.ts → DeviceManagerState.test.ts} +2 -2
  49. package/src/devices/__tests__/MicrophoneManager.test.ts +41 -1
  50. package/src/devices/__tests__/NoiseCancellationStub.ts +3 -1
  51. package/src/devices/__tests__/ScreenShareManager.test.ts +5 -1
  52. package/src/devices/index.ts +2 -2
  53. package/src/events/__tests__/internal.test.ts +25 -11
  54. package/src/gen/coordinator/index.ts +169 -2
  55. package/src/gen/video/sfu/models/models.ts +65 -0
  56. package/src/rtc/BasePeerConnection.ts +1 -16
  57. package/src/rtc/Publisher.ts +74 -31
  58. package/src/rtc/Subscriber.ts +2 -4
  59. package/src/rtc/TransceiverCache.ts +23 -27
  60. package/src/rtc/__tests__/Publisher.test.ts +61 -29
  61. package/src/rtc/__tests__/{videoLayers.test.ts → layers.test.ts} +76 -1
  62. package/src/rtc/index.ts +2 -1
  63. package/src/rtc/{videoLayers.ts → layers.ts} +28 -7
  64. package/src/rtc/types.ts +44 -0
  65. package/src/sorting/presets.ts +2 -2
  66. package/src/store/CallState.ts +36 -10
  67. package/src/store/__tests__/CallState.test.ts +20 -2
@@ -1,20 +1,10 @@
1
1
  import type { CallEventListener, Logger } from '../coordinator/connection/types';
2
2
  import { CallState } from '../store';
3
- import { PeerType, TrackType, WebsocketReconnectStrategy } from '../gen/video/sfu/models/models';
3
+ import { PeerType, TrackType } from '../gen/video/sfu/models/models';
4
4
  import { StreamSfuClient } from '../StreamSfuClient';
5
5
  import { AllSfuEvents, Dispatcher } from './Dispatcher';
6
6
  import { StatsTracer, Tracer } from '../stats';
7
- export type OnReconnectionNeeded = (kind: WebsocketReconnectStrategy, reason: string) => void;
8
- export type BasePeerConnectionOpts = {
9
- sfuClient: StreamSfuClient;
10
- state: CallState;
11
- connectionConfig?: RTCConfiguration;
12
- dispatcher: Dispatcher;
13
- onReconnectionNeeded?: OnReconnectionNeeded;
14
- tag: string;
15
- enableTracing: boolean;
16
- iceRestartDelay?: number;
17
- };
7
+ import { BasePeerConnectionOpts } from './types';
18
8
  /**
19
9
  * A base class for the `Publisher` and `Subscriber` classes.
20
10
  * @internal
@@ -1,8 +1,6 @@
1
- import { BasePeerConnection, BasePeerConnectionOpts } from './BasePeerConnection';
2
- import { PublishOption, TrackInfo, TrackType } from '../gen/video/sfu/models/models';
3
- export type PublisherConstructorOpts = BasePeerConnectionOpts & {
4
- publishOptions: PublishOption[];
5
- };
1
+ import { BasePeerConnection } from './BasePeerConnection';
2
+ import { PublisherConstructorOpts, TrackPublishOptions } from './types';
3
+ import { TrackInfo, TrackType } from '../gen/video/sfu/models/models';
6
4
  /**
7
5
  * The `Publisher` is responsible for publishing/unpublishing media streams to/from the SFU
8
6
  *
@@ -28,8 +26,9 @@ export declare class Publisher extends BasePeerConnection {
28
26
  *
29
27
  * @param track the track to publish.
30
28
  * @param trackType the track type to publish.
29
+ * @param options the publish options to use.
31
30
  */
32
- publish: (track: MediaStreamTrack, trackType: TrackType) => Promise<void>;
31
+ publish: (track: MediaStreamTrack, trackType: TrackType, options?: TrackPublishOptions) => Promise<void>;
33
32
  /**
34
33
  * Adds a new transceiver carrying the given track to the peer connection.
35
34
  */
@@ -38,6 +37,10 @@ export declare class Publisher extends BasePeerConnection {
38
37
  * Updates the transceiver with the given track and track type.
39
38
  */
40
39
  private updateTransceiver;
40
+ /**
41
+ * Updates the publish options for the given track type.
42
+ */
43
+ private updateAudioPublishOptions;
41
44
  /**
42
45
  * Synchronizes the current Publisher state with the provided publish options.
43
46
  */
@@ -1,4 +1,5 @@
1
- import { BasePeerConnection, BasePeerConnectionOpts } from './BasePeerConnection';
1
+ import { BasePeerConnection } from './BasePeerConnection';
2
+ import { BasePeerConnectionOpts } from './types';
2
3
  /**
3
4
  * A wrapper around the `RTCPeerConnection` that handles the incoming
4
5
  * media streams from the SFU.
@@ -1,9 +1,6 @@
1
1
  import { PublishOption } from '../gen/video/sfu/models/models';
2
- import { OptimalVideoLayer } from './videoLayers';
3
- type TransceiverId = {
4
- publishOption: PublishOption;
5
- transceiver: RTCRtpTransceiver;
6
- };
2
+ import type { OptimalVideoLayer } from './layers';
3
+ import type { PublishBundle } from './types';
7
4
  export declare class TransceiverCache {
8
5
  private readonly cache;
9
6
  private readonly layers;
@@ -16,11 +13,15 @@ export declare class TransceiverCache {
16
13
  /**
17
14
  * Adds a transceiver to the cache.
18
15
  */
19
- add: (publishOption: PublishOption, transceiver: RTCRtpTransceiver) => void;
16
+ add: (bundle: PublishBundle) => void;
20
17
  /**
21
18
  * Gets the transceiver for the given publish option.
22
19
  */
23
- get: (publishOption: PublishOption) => RTCRtpTransceiver | undefined;
20
+ get: (publishOption: PublishOption) => PublishBundle | undefined;
21
+ /**
22
+ * Updates the cached bundle with the given patch.
23
+ */
24
+ update: (publishOption: PublishOption, patch: Partial<PublishBundle>) => void;
24
25
  /**
25
26
  * Checks if the cache has the given publish option.
26
27
  */
@@ -28,11 +29,11 @@ export declare class TransceiverCache {
28
29
  /**
29
30
  * Finds the first transceiver that satisfies the given predicate.
30
31
  */
31
- find: (predicate: (item: TransceiverId) => boolean) => TransceiverId | undefined;
32
+ find: (predicate: (bundle: PublishBundle) => boolean) => PublishBundle | undefined;
32
33
  /**
33
34
  * Provides all the items in the cache.
34
35
  */
35
- items: () => TransceiverId[];
36
+ items: () => PublishBundle[];
36
37
  /**
37
38
  * Init index of the transceiver in the cache.
38
39
  */
@@ -45,7 +46,5 @@ export declare class TransceiverCache {
45
46
  * Sets the video layers for the given track.
46
47
  */
47
48
  setLayers: (publishOption: PublishOption, layers?: OptimalVideoLayer[]) => void;
48
- private findTransceiver;
49
49
  private findLayer;
50
50
  }
51
- export {};
@@ -4,7 +4,7 @@ export * from './IceTrickleBuffer';
4
4
  export * from './Publisher';
5
5
  export * from './Subscriber';
6
6
  export * from './signal';
7
- export * from './videoLayers';
8
7
  export * from './helpers/sdp';
9
8
  export * from './helpers/tracks';
10
9
  export * from './helpers/rtcConfiguration';
10
+ export * from './types';
@@ -1,9 +1,15 @@
1
1
  import { PublishOption, VideoDimension, VideoLayer, VideoQuality } from '../gen/video/sfu/models/models';
2
+ import { TrackPublishOptions } from './types';
2
3
  export type OptimalVideoLayer = RTCRtpEncodingParameters & {
3
4
  width: number;
4
5
  height: number;
5
6
  scalabilityMode?: string;
6
7
  };
8
+ /**
9
+ * Prepares the audio layer for the given track.
10
+ * Based on the provided audio bitrate profile, we apply the appropriate bitrate.
11
+ */
12
+ export declare const computeAudioLayers: (publishOption: PublishOption, options: TrackPublishOptions) => RTCRtpEncodingParameters[] | undefined;
7
13
  /**
8
14
  * In SVC, we need to send only one video encoding (layer).
9
15
  * this layer will have the additional spatial and temporal layers
@@ -11,7 +17,7 @@ export type OptimalVideoLayer = RTCRtpEncodingParameters & {
11
17
  *
12
18
  * @param layers the layers to process.
13
19
  */
14
- export declare const toSvcEncodings: (layers: OptimalVideoLayer[] | undefined) => RTCRtpEncodingParameters[] | undefined;
20
+ export declare const toSvcEncodings: (layers: RTCRtpEncodingParameters[] | undefined) => RTCRtpEncodingParameters[] | undefined;
15
21
  /**
16
22
  * Converts the rid to a video quality.
17
23
  */
@@ -0,0 +1,31 @@
1
+ import { AudioBitrateProfile, PublishOption, WebsocketReconnectStrategy } from '../gen/video/sfu/models/models';
2
+ import { StreamSfuClient } from '../StreamSfuClient';
3
+ import { CallState } from '../store';
4
+ import { Dispatcher } from './Dispatcher';
5
+ import type { OptimalVideoLayer } from './layers';
6
+ export type OnReconnectionNeeded = (kind: WebsocketReconnectStrategy, reason: string) => void;
7
+ export type BasePeerConnectionOpts = {
8
+ sfuClient: StreamSfuClient;
9
+ state: CallState;
10
+ connectionConfig?: RTCConfiguration;
11
+ dispatcher: Dispatcher;
12
+ onReconnectionNeeded?: OnReconnectionNeeded;
13
+ tag: string;
14
+ enableTracing: boolean;
15
+ iceRestartDelay?: number;
16
+ };
17
+ export type PublisherConstructorOpts = BasePeerConnectionOpts & {
18
+ publishOptions: PublishOption[];
19
+ };
20
+ export type TrackPublishOptions = {
21
+ audioBitrateProfile?: AudioBitrateProfile;
22
+ };
23
+ export type PublishBundle = {
24
+ publishOption: PublishOption;
25
+ transceiver: RTCRtpTransceiver;
26
+ options: TrackPublishOptions;
27
+ };
28
+ export type TrackLayersCache = {
29
+ publishOption: PublishOption;
30
+ layers: OptimalVideoLayer[];
31
+ };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@stream-io/video-client",
3
- "version": "1.32.0",
3
+ "version": "1.33.0",
4
4
  "main": "dist/index.cjs.js",
5
5
  "module": "dist/index.es.js",
6
6
  "browser": "dist/index.browser.es.js",
@@ -40,8 +40,9 @@
40
40
  "@openapitools/openapi-generator-cli": "^2.13.4",
41
41
  "@rollup/plugin-replace": "^6.0.2",
42
42
  "@rollup/plugin-typescript": "^12.1.2",
43
- "@stream-io/audio-filters-web": "^0.4.3",
43
+ "@stream-io/audio-filters-web": "^0.5.0",
44
44
  "@stream-io/node-sdk": "^0.4.24",
45
+ "@total-typescript/shoehorn": "^0.1.2",
45
46
  "@types/sdp-transform": "^2.4.9",
46
47
  "@types/ua-parser-js": "^0.7.39",
47
48
  "@vitest/coverage-v8": "^3.1.3",
package/src/Call.ts CHANGED
@@ -8,6 +8,7 @@ import {
8
8
  Publisher,
9
9
  Subscriber,
10
10
  toRtcConfiguration,
11
+ TrackPublishOptions,
11
12
  trackTypeToParticipantStreamKey,
12
13
  } from './rtc';
13
14
  import {
@@ -1770,9 +1771,14 @@ export class Call {
1770
1771
  *
1771
1772
  * @param mediaStream the media stream to publish.
1772
1773
  * @param trackType the type of the track to announce.
1774
+ * @param options the publish options.
1773
1775
  */
1774
- publish = async (mediaStream: MediaStream, trackType: TrackType) => {
1775
- if (!this.sfuClient) throw new Error(`Call not joined yet.`);
1776
+ publish = async (
1777
+ mediaStream: MediaStream,
1778
+ trackType: TrackType,
1779
+ options?: TrackPublishOptions,
1780
+ ) => {
1781
+ if (!this.sfuClient) throw new Error(`Call is not joined yet`);
1776
1782
  // joining is in progress, and we should wait until the client is ready
1777
1783
  await this.sfuClient.joinTask;
1778
1784
 
@@ -1797,15 +1803,16 @@ export class Call {
1797
1803
  }
1798
1804
 
1799
1805
  pushToIfMissing(this.trackPublishOrder, trackType);
1800
- await this.publisher.publish(track, trackType);
1806
+ await this.publisher.publish(track, trackType, options);
1801
1807
 
1802
1808
  const trackTypes = [trackType];
1803
1809
  if (trackType === TrackType.SCREEN_SHARE) {
1804
1810
  const [audioTrack] = mediaStream.getAudioTracks();
1805
1811
  if (audioTrack) {
1806
- pushToIfMissing(this.trackPublishOrder, TrackType.SCREEN_SHARE_AUDIO);
1807
- await this.publisher.publish(audioTrack, TrackType.SCREEN_SHARE_AUDIO);
1808
- trackTypes.push(TrackType.SCREEN_SHARE_AUDIO);
1812
+ const screenShareAudio = TrackType.SCREEN_SHARE_AUDIO;
1813
+ pushToIfMissing(this.trackPublishOrder, screenShareAudio);
1814
+ await this.publisher.publish(audioTrack, screenShareAudio, options);
1815
+ trackTypes.push(screenShareAudio);
1809
1816
  }
1810
1817
  }
1811
1818
 
@@ -35,7 +35,7 @@ describe('Publishing and Unpublishing tracks', () => {
35
35
  describe('Validations', () => {
36
36
  it('publishing is not allowed only when call is not joined', async () => {
37
37
  const ms = new MediaStream();
38
- const err = 'Call not joined yet.';
38
+ const err = 'Call is not joined yet';
39
39
  await expect(call.publish(ms, TrackType.VIDEO)).rejects.toThrowError(err);
40
40
  await expect(call.publish(ms, TrackType.AUDIO)).rejects.toThrowError(err);
41
41
  await expect(
@@ -140,7 +140,11 @@ describe('Publishing and Unpublishing tracks', () => {
140
140
  vi.spyOn(mediaStream, 'getVideoTracks').mockReturnValue([track]);
141
141
 
142
142
  await call.publish(mediaStream, TrackType.VIDEO);
143
- expect(publisher.publish).toHaveBeenCalledWith(track, TrackType.VIDEO);
143
+ expect(publisher.publish).toHaveBeenCalledWith(
144
+ track,
145
+ TrackType.VIDEO,
146
+ undefined,
147
+ );
144
148
  expect(call['trackPublishOrder']).toEqual([TrackType.VIDEO]);
145
149
 
146
150
  expect(sfuClient.updateMuteStates).toHaveBeenCalledWith([
@@ -159,7 +163,11 @@ describe('Publishing and Unpublishing tracks', () => {
159
163
  vi.spyOn(mediaStream, 'getAudioTracks').mockReturnValue([track]);
160
164
 
161
165
  await call.publish(mediaStream, TrackType.AUDIO);
162
- expect(publisher.publish).toHaveBeenCalledWith(track, TrackType.AUDIO);
166
+ expect(publisher.publish).toHaveBeenCalledWith(
167
+ track,
168
+ TrackType.AUDIO,
169
+ undefined,
170
+ );
163
171
  expect(call['trackPublishOrder']).toEqual([TrackType.AUDIO]);
164
172
 
165
173
  expect(sfuClient.updateMuteStates).toHaveBeenCalledWith([
@@ -181,6 +189,7 @@ describe('Publishing and Unpublishing tracks', () => {
181
189
  expect(publisher.publish).toHaveBeenCalledWith(
182
190
  track,
183
191
  TrackType.SCREEN_SHARE,
192
+ undefined,
184
193
  );
185
194
  expect(call['trackPublishOrder']).toEqual([TrackType.SCREEN_SHARE]);
186
195
 
@@ -205,10 +214,12 @@ describe('Publishing and Unpublishing tracks', () => {
205
214
  expect(publisher.publish).toHaveBeenCalledWith(
206
215
  videoTrack,
207
216
  TrackType.SCREEN_SHARE,
217
+ undefined,
208
218
  );
209
219
  expect(publisher.publish).toHaveBeenCalledWith(
210
220
  audioTrack,
211
221
  TrackType.SCREEN_SHARE_AUDIO,
222
+ undefined,
212
223
  );
213
224
  expect(call['trackPublishOrder']).toEqual([
214
225
  TrackType.SCREEN_SHARE,
@@ -25,7 +25,7 @@ const tokenProvider = (userId: string) => {
25
25
  };
26
26
  };
27
27
 
28
- describe.skip('StreamVideoClient - coordinator API', () => {
28
+ describe('StreamVideoClient - coordinator API', () => {
29
29
  let client: StreamVideoClient;
30
30
  const user = {
31
31
  id: 'sara',
@@ -0,0 +1,61 @@
1
+ import { DeviceManager } from './DeviceManager';
2
+ import { AudioDeviceManagerState } from './AudioDeviceManagerState';
3
+ import { AudioBitrateProfile } from '../gen/video/sfu/models/models';
4
+ import { TrackPublishOptions } from '../rtc';
5
+
6
+ /**
7
+ * Base class for High Fidelity enabled Device Managers.
8
+ */
9
+ export abstract class AudioDeviceManager<
10
+ S extends AudioDeviceManagerState<C>,
11
+ C = MediaTrackConstraints,
12
+ > extends DeviceManager<S, C> {
13
+ /**
14
+ * Sets the audio bitrate profile and stereo mode.
15
+ */
16
+ async setAudioBitrateProfile(profile: AudioBitrateProfile) {
17
+ if (!this.call.state.settings?.audio.hifi_audio_enabled) {
18
+ throw new Error('High Fidelity audio is not enabled for this call');
19
+ }
20
+ this.doSetAudioBitrateProfile(profile);
21
+ this.state.setAudioBitrateProfile(profile);
22
+ if (this.enabled) {
23
+ await this.applySettingsToStream();
24
+ }
25
+ }
26
+
27
+ /**
28
+ * Overrides the default `publishStream` method to inject the audio bitrate profile.
29
+ */
30
+ protected override publishStream(
31
+ stream: MediaStream,
32
+ options?: TrackPublishOptions,
33
+ ): Promise<void> {
34
+ return super.publishStream(stream, {
35
+ audioBitrateProfile: this.state.audioBitrateProfile,
36
+ ...options,
37
+ });
38
+ }
39
+
40
+ /**
41
+ * Applies Device Manager's specific audio profile settings.
42
+ */
43
+ protected abstract doSetAudioBitrateProfile(
44
+ profile: AudioBitrateProfile,
45
+ ): void;
46
+ }
47
+
48
+ /**
49
+ * Prepares a new MediaTrackConstraints set based on the provided arguments.
50
+ */
51
+ export const createAudioConstraints = (
52
+ profile: AudioBitrateProfile,
53
+ ): MediaTrackConstraints => {
54
+ const stereo = profile === AudioBitrateProfile.MUSIC_HIGH_QUALITY;
55
+ return {
56
+ echoCancellation: !stereo,
57
+ noiseSuppression: !stereo,
58
+ autoGainControl: !stereo,
59
+ channelCount: { ideal: stereo ? 2 : 1 },
60
+ };
61
+ };
@@ -0,0 +1,44 @@
1
+ import { BehaviorSubject, distinctUntilChanged, Observable } from 'rxjs';
2
+ import { AudioBitrateProfile } from '../gen/video/sfu/models/models';
3
+ import { DeviceManagerState, TrackDisableMode } from './DeviceManagerState';
4
+ import { RxUtils } from './../store';
5
+ import { BrowserPermission } from './BrowserPermission';
6
+
7
+ /**
8
+ * Base state class for High Fidelity enabled device managers.
9
+ */
10
+ export abstract class AudioDeviceManagerState<C> extends DeviceManagerState<C> {
11
+ private readonly audioBitrateProfileSubject: BehaviorSubject<AudioBitrateProfile>;
12
+
13
+ /** An Observable that emits the current audio bitrate profile. */
14
+ audioBitrateProfile$: Observable<AudioBitrateProfile>;
15
+
16
+ /**
17
+ * Constructs a new AudioDeviceManagerState instance.
18
+ */
19
+ protected constructor(
20
+ disableMode: TrackDisableMode,
21
+ permission: BrowserPermission | undefined,
22
+ profile: AudioBitrateProfile,
23
+ ) {
24
+ super(disableMode, permission);
25
+ this.audioBitrateProfileSubject = new BehaviorSubject(profile);
26
+ this.audioBitrateProfile$ = this.audioBitrateProfileSubject
27
+ .asObservable()
28
+ .pipe(distinctUntilChanged());
29
+ }
30
+
31
+ /**
32
+ * Returns the current audio bitrate profile.
33
+ */
34
+ get audioBitrateProfile() {
35
+ return RxUtils.getCurrentValue(this.audioBitrateProfile$);
36
+ }
37
+
38
+ /**
39
+ * Sets the audio bitrate profile and stereo mode.
40
+ */
41
+ setAudioBitrateProfile(profile: AudioBitrateProfile) {
42
+ RxUtils.setCurrentValue(this.audioBitrateProfileSubject, profile);
43
+ }
44
+ }
@@ -1,14 +1,14 @@
1
1
  import { Observable } from 'rxjs';
2
2
  import { Call } from '../Call';
3
3
  import { CameraDirection, CameraManagerState } from './CameraManagerState';
4
- import { InputMediaDeviceManager } from './InputMediaDeviceManager';
4
+ import { DeviceManager } from './DeviceManager';
5
5
  import { getVideoDevices, getVideoStream } from './devices';
6
6
  import { OwnCapability, VideoSettingsResponse } from '../gen/coordinator';
7
7
  import { TrackType } from '../gen/video/sfu/models/models';
8
8
  import { isMobile } from '../helpers/compatibility';
9
9
  import { isReactNative } from '../helpers/platforms';
10
10
 
11
- export class CameraManager extends InputMediaDeviceManager<CameraManagerState> {
11
+ export class CameraManager extends DeviceManager<CameraManagerState> {
12
12
  private targetResolution = {
13
13
  width: 1280,
14
14
  height: 720,
@@ -152,11 +152,11 @@ export class CameraManager extends InputMediaDeviceManager<CameraManagerState> {
152
152
  }
153
153
  }
154
154
 
155
- protected getDevices(): Observable<MediaDeviceInfo[]> {
155
+ protected override getDevices(): Observable<MediaDeviceInfo[]> {
156
156
  return getVideoDevices(this.call.tracer);
157
157
  }
158
158
 
159
- protected getStream(
159
+ protected override getStream(
160
160
  constraints: MediaTrackConstraints,
161
161
  ): Promise<MediaStream> {
162
162
  constraints.width = this.targetResolution.width;
@@ -1,12 +1,12 @@
1
- import { BehaviorSubject, distinctUntilChanged, Observable } from 'rxjs';
2
- import { InputMediaDeviceManagerState } from './InputMediaDeviceManagerState';
1
+ import { BehaviorSubject, distinctUntilChanged } from 'rxjs';
2
+ import { DeviceManagerState } from './DeviceManagerState';
3
3
  import { isReactNative } from '../helpers/platforms';
4
4
  import { getVideoBrowserPermission } from './devices';
5
5
  import { RxUtils } from '../store';
6
6
 
7
7
  export type CameraDirection = 'front' | 'back' | undefined;
8
8
 
9
- export class CameraManagerState extends InputMediaDeviceManagerState {
9
+ export class CameraManagerState extends DeviceManagerState {
10
10
  private directionSubject = new BehaviorSubject<CameraDirection>(undefined);
11
11
 
12
12
  /**
@@ -14,13 +14,12 @@ export class CameraManagerState extends InputMediaDeviceManagerState {
14
14
  * front - means the camera facing the user
15
15
  * back - means the camera facing the environment
16
16
  */
17
- direction$: Observable<CameraDirection>;
17
+ direction$ = this.directionSubject
18
+ .asObservable()
19
+ .pipe(distinctUntilChanged());
18
20
 
19
21
  constructor() {
20
22
  super('stop-tracks', getVideoBrowserPermission());
21
- this.direction$ = this.directionSubject
22
- .asObservable()
23
- .pipe(distinctUntilChanged());
24
23
  }
25
24
 
26
25
  /**
@@ -58,7 +57,9 @@ export class CameraManagerState extends InputMediaDeviceManagerState {
58
57
  }
59
58
  }
60
59
 
61
- protected getDeviceIdFromStream(stream: MediaStream): string | undefined {
60
+ protected override getDeviceIdFromStream(
61
+ stream: MediaStream,
62
+ ): string | undefined {
62
63
  const [track] = stream.getVideoTracks();
63
64
  return track?.getSettings().deviceId;
64
65
  }
@@ -1,8 +1,9 @@
1
1
  import { combineLatest, Observable, pairwise } from 'rxjs';
2
2
  import { Call } from '../Call';
3
+ import { TrackPublishOptions } from '../rtc';
3
4
  import { CallingState } from '../store';
4
5
  import { createSubscription } from '../store/rxUtils';
5
- import { InputMediaDeviceManagerState } from './InputMediaDeviceManagerState';
6
+ import { DeviceManagerState } from './DeviceManagerState';
6
7
  import { isMobile } from '../helpers/compatibility';
7
8
  import { isReactNative } from '../helpers/platforms';
8
9
  import { Logger } from '../coordinator/connection/types';
@@ -20,8 +21,8 @@ import {
20
21
  MediaStreamFilterRegistrationResult,
21
22
  } from './filters';
22
23
 
23
- export abstract class InputMediaDeviceManager<
24
- T extends InputMediaDeviceManagerState<C>,
24
+ export abstract class DeviceManager<
25
+ S extends DeviceManagerState<C>,
25
26
  C = MediaTrackConstraints,
26
27
  > {
27
28
  /**
@@ -30,7 +31,7 @@ export abstract class InputMediaDeviceManager<
30
31
  stopOnLeave = true;
31
32
  logger: Logger;
32
33
 
33
- state: T;
34
+ state: S;
34
35
 
35
36
  protected readonly call: Call;
36
37
  protected readonly trackType: TrackType;
@@ -43,7 +44,7 @@ export abstract class InputMediaDeviceManager<
43
44
  'filterRegistrationConcurrencyTag',
44
45
  );
45
46
 
46
- protected constructor(call: Call, state: T, trackType: TrackType) {
47
+ protected constructor(call: Call, state: S, trackType: TrackType) {
47
48
  this.call = call;
48
49
  this.state = state;
49
50
  this.trackType = trackType;
@@ -109,7 +110,6 @@ export abstract class InputMediaDeviceManager<
109
110
 
110
111
  /**
111
112
  * Stops or pauses the stream based on state.disableMode
112
- * @param {boolean} [forceStop=false] when true, stops the tracks regardless of the state.disableMode
113
113
  */
114
114
  async disable(options: { forceStop?: boolean }): Promise<void>;
115
115
  async disable(forceStop?: boolean): Promise<void>;
@@ -282,8 +282,11 @@ export abstract class InputMediaDeviceManager<
282
282
 
283
283
  protected abstract getStream(constraints: C): Promise<MediaStream>;
284
284
 
285
- protected publishStream(stream: MediaStream): Promise<void> {
286
- return this.call.publish(stream, this.trackType);
285
+ protected publishStream(
286
+ stream: MediaStream,
287
+ options?: TrackPublishOptions,
288
+ ): Promise<void> {
289
+ return this.call.publish(stream, this.trackType, options);
287
290
  }
288
291
 
289
292
  protected stopPublishStream(): Promise<void> {
@@ -11,7 +11,7 @@ import { BrowserPermission, BrowserPermissionState } from './BrowserPermission';
11
11
  export type InputDeviceStatus = 'enabled' | 'disabled' | undefined;
12
12
  export type TrackDisableMode = 'stop-tracks' | 'disable-tracks';
13
13
 
14
- export abstract class InputMediaDeviceManagerState<C = MediaTrackConstraints> {
14
+ export abstract class DeviceManagerState<C = MediaTrackConstraints> {
15
15
  protected statusSubject = new BehaviorSubject<InputDeviceStatus>(undefined);
16
16
  protected optimisticStatusSubject = new BehaviorSubject<InputDeviceStatus>(
17
17
  undefined,
@@ -79,17 +79,20 @@ export abstract class InputMediaDeviceManagerState<C = MediaTrackConstraints> {
79
79
  */
80
80
  isPromptingPermission$: Observable<boolean>;
81
81
 
82
+ readonly disableMode: TrackDisableMode;
83
+
82
84
  /**
83
- * Constructs new InputMediaDeviceManagerState instance.
85
+ * Constructs a new InputMediaDeviceManagerState instance.
84
86
  *
85
87
  * @param disableMode the disable mode to use.
86
88
  * @param permission the BrowserPermission to use for querying.
87
89
  * `undefined` means no permission is required.
88
90
  */
89
91
  constructor(
90
- public readonly disableMode: TrackDisableMode = 'stop-tracks',
91
- permission?: BrowserPermission,
92
+ disableMode: TrackDisableMode,
93
+ permission: BrowserPermission | undefined,
92
94
  ) {
95
+ this.disableMode = disableMode;
93
96
  this.hasBrowserPermission$ = permission
94
97
  ? permission.asObservable().pipe(shareReplay(1))
95
98
  : of(true);
@@ -1,11 +1,14 @@
1
1
  import { combineLatest, Observable } from 'rxjs';
2
2
  import type { INoiseCancellation } from '@stream-io/audio-filters-web';
3
3
  import { Call } from '../Call';
4
- import { InputMediaDeviceManager } from './InputMediaDeviceManager';
4
+ import {
5
+ AudioDeviceManager,
6
+ createAudioConstraints,
7
+ } from './AudioDeviceManager';
5
8
  import { MicrophoneManagerState } from './MicrophoneManagerState';
6
- import { TrackDisableMode } from './InputMediaDeviceManagerState';
9
+ import { TrackDisableMode } from './DeviceManagerState';
7
10
  import { getAudioDevices, getAudioStream } from './devices';
8
- import { TrackType } from '../gen/video/sfu/models/models';
11
+ import { AudioBitrateProfile, TrackType } from '../gen/video/sfu/models/models';
9
12
  import { createSoundDetector } from '../helpers/sound-detector';
10
13
  import { isReactNative } from '../helpers/platforms';
11
14
  import {
@@ -21,7 +24,7 @@ import {
21
24
  import { RNSpeechDetector } from '../helpers/RNSpeechDetector';
22
25
  import { withoutConcurrency } from '../helpers/concurrency';
23
26
 
24
- export class MicrophoneManager extends InputMediaDeviceManager<MicrophoneManagerState> {
27
+ export class MicrophoneManager extends AudioDeviceManager<MicrophoneManagerState> {
25
28
  private speakingWhileMutedNotificationEnabled = true;
26
29
  private soundDetectorConcurrencyTag = Symbol('soundDetectorConcurrencyTag');
27
30
  private soundDetectorCleanup?: Function;
@@ -245,16 +248,33 @@ export class MicrophoneManager extends InputMediaDeviceManager<MicrophoneManager
245
248
  }
246
249
  }
247
250
 
248
- protected getDevices(): Observable<MediaDeviceInfo[]> {
251
+ protected override getDevices(): Observable<MediaDeviceInfo[]> {
249
252
  return getAudioDevices(this.call.tracer);
250
253
  }
251
254
 
252
- protected getStream(
255
+ protected override getStream(
253
256
  constraints: MediaTrackConstraints,
254
257
  ): Promise<MediaStream> {
255
258
  return getAudioStream(constraints, this.call.tracer);
256
259
  }
257
260
 
261
+ protected override doSetAudioBitrateProfile(profile: AudioBitrateProfile) {
262
+ this.setDefaultConstraints({
263
+ ...this.state.defaultConstraints,
264
+ ...createAudioConstraints(profile),
265
+ });
266
+
267
+ if (this.noiseCancellation) {
268
+ const disableAudioProcessing =
269
+ profile === AudioBitrateProfile.MUSIC_HIGH_QUALITY;
270
+ if (disableAudioProcessing) {
271
+ this.noiseCancellation.disable(); // disable for high quality music mode
272
+ } else {
273
+ this.noiseCancellation.enable(); // restore it for other modes if available
274
+ }
275
+ }
276
+ }
277
+
258
278
  private async startSpeakingWhileMutedDetection(deviceId?: string) {
259
279
  await withoutConcurrency(this.soundDetectorConcurrencyTag, async () => {
260
280
  await this.stopSpeakingWhileMutedDetection();