@stream-io/video-client 1.41.0 → 1.41.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +11 -0
- package/dist/index.browser.es.js +94 -23
- package/dist/index.browser.es.js.map +1 -1
- package/dist/index.cjs.js +94 -23
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.es.js +94 -23
- package/dist/index.es.js.map +1 -1
- package/dist/src/devices/SpeakerManager.d.ts +3 -0
- package/dist/src/helpers/DynascaleManager.d.ts +13 -1
- package/dist/src/helpers/types.d.ts +16 -0
- package/dist/src/types.d.ts +2 -2
- package/package.json +2 -2
- package/src/Call.ts +6 -4
- package/src/devices/SpeakerManager.ts +51 -15
- package/src/helpers/DynascaleManager.ts +57 -9
- package/src/helpers/__tests__/DynascaleManager.test.ts +8 -1
- package/src/helpers/types.ts +26 -0
- package/src/rtc/__tests__/mocks/webrtc.mocks.ts +1 -0
- package/src/types.ts +2 -2
package/CHANGELOG.md
CHANGED
|
@@ -2,6 +2,17 @@
|
|
|
2
2
|
|
|
3
3
|
This file was generated using [@jscutlery/semver](https://github.com/jscutlery/semver).
|
|
4
4
|
|
|
5
|
+
## [1.41.2](https://github.com/GetStream/stream-video-js/compare/@stream-io/video-client-1.41.1...@stream-io/video-client-1.41.2) (2026-01-28)
|
|
6
|
+
|
|
7
|
+
- deduplicate RN compatibility assertions ([#2101](https://github.com/GetStream/stream-video-js/issues/2101)) ([5b9e6bc](https://github.com/GetStream/stream-video-js/commit/5b9e6bc227c55b067eea6345315bca015c8a7ee4))
|
|
8
|
+
|
|
9
|
+
## [1.41.1](https://github.com/GetStream/stream-video-js/compare/@stream-io/video-client-1.41.0...@stream-io/video-client-1.41.1) (2026-01-26)
|
|
10
|
+
|
|
11
|
+
### Bug Fixes
|
|
12
|
+
|
|
13
|
+
- **safari:** Handle interrupted AudioContext and AudioSession states ([#2098](https://github.com/GetStream/stream-video-js/issues/2098)) ([975901f](https://github.com/GetStream/stream-video-js/commit/975901f399b46479928ec1e9f32da7e47bba9ad3))
|
|
14
|
+
- use multiple settings to determine default audio device RN-338 ([#2096](https://github.com/GetStream/stream-video-js/issues/2096)) ([19cf136](https://github.com/GetStream/stream-video-js/commit/19cf13651112b647903587a84a70a555fc68fc9c)), closes [2BSettingsPriority.swift#L19](https://github.com/GetStream/2BSettingsPriority.swift/issues/L19)
|
|
15
|
+
|
|
5
16
|
## [1.41.0](https://github.com/GetStream/stream-video-js/compare/@stream-io/video-client-1.40.3...@stream-io/video-client-1.41.0) (2026-01-20)
|
|
6
17
|
|
|
7
18
|
### Features
|
package/dist/index.browser.es.js
CHANGED
|
@@ -6188,7 +6188,7 @@ const getSdkVersion = (sdk) => {
|
|
|
6188
6188
|
return sdk ? `${sdk.major}.${sdk.minor}.${sdk.patch}` : '0.0.0-development';
|
|
6189
6189
|
};
|
|
6190
6190
|
|
|
6191
|
-
const version = "1.41.
|
|
6191
|
+
const version = "1.41.2";
|
|
6192
6192
|
const [major, minor, patch] = version.split('.');
|
|
6193
6193
|
let sdkInfo = {
|
|
6194
6194
|
type: SdkType.PLAIN_JAVASCRIPT,
|
|
@@ -9372,12 +9372,13 @@ class DynascaleManager {
|
|
|
9372
9372
|
/**
|
|
9373
9373
|
* Creates a new DynascaleManager instance.
|
|
9374
9374
|
*/
|
|
9375
|
-
constructor(callState, speaker) {
|
|
9375
|
+
constructor(callState, speaker, tracer) {
|
|
9376
9376
|
/**
|
|
9377
9377
|
* The viewport tracker instance.
|
|
9378
9378
|
*/
|
|
9379
9379
|
this.viewportTracker = new ViewportTracker();
|
|
9380
9380
|
this.logger = videoLoggerSystem.getLogger('DynascaleManager');
|
|
9381
|
+
this.useWebAudio = isSafari();
|
|
9381
9382
|
this.pendingSubscriptionsUpdate = null;
|
|
9382
9383
|
this.videoTrackSubscriptionOverridesSubject = new BehaviorSubject({});
|
|
9383
9384
|
this.videoTrackSubscriptionOverrides$ = this.videoTrackSubscriptionOverridesSubject.asObservable();
|
|
@@ -9417,6 +9418,10 @@ class DynascaleManager {
|
|
|
9417
9418
|
}
|
|
9418
9419
|
};
|
|
9419
9420
|
this.setVideoTrackSubscriptionOverrides = (override, sessionIds) => {
|
|
9421
|
+
this.tracer.trace('setVideoTrackSubscriptionOverrides', [
|
|
9422
|
+
override,
|
|
9423
|
+
sessionIds,
|
|
9424
|
+
]);
|
|
9420
9425
|
if (!sessionIds) {
|
|
9421
9426
|
return setCurrentValue(this.videoTrackSubscriptionOverridesSubject, override ? { [globalOverrideKey]: override } : {});
|
|
9422
9427
|
}
|
|
@@ -9498,6 +9503,18 @@ class DynascaleManager {
|
|
|
9498
9503
|
this.setViewport = (element) => {
|
|
9499
9504
|
return this.viewportTracker.setViewport(element);
|
|
9500
9505
|
};
|
|
9506
|
+
/**
|
|
9507
|
+
* Sets whether to use WebAudio API for audio playback.
|
|
9508
|
+
* Must be set before joining the call.
|
|
9509
|
+
*
|
|
9510
|
+
* @internal
|
|
9511
|
+
*
|
|
9512
|
+
* @param useWebAudio whether to use WebAudio API.
|
|
9513
|
+
*/
|
|
9514
|
+
this.setUseWebAudio = (useWebAudio) => {
|
|
9515
|
+
this.tracer.trace('setUseWebAudio', useWebAudio);
|
|
9516
|
+
this.useWebAudio = useWebAudio;
|
|
9517
|
+
};
|
|
9501
9518
|
/**
|
|
9502
9519
|
* Binds a DOM <video> element to the given session id.
|
|
9503
9520
|
* This method will make sure that the video element will play
|
|
@@ -9713,6 +9730,7 @@ class DynascaleManager {
|
|
|
9713
9730
|
// we will play audio directly through the audio element in other browsers
|
|
9714
9731
|
audioElement.muted = false;
|
|
9715
9732
|
audioElement.play().catch((e) => {
|
|
9733
|
+
this.tracer.trace('audioPlaybackError', e.message);
|
|
9716
9734
|
this.logger.warn(`Failed to play audio stream`, e);
|
|
9717
9735
|
});
|
|
9718
9736
|
}
|
|
@@ -9747,32 +9765,57 @@ class DynascaleManager {
|
|
|
9747
9765
|
};
|
|
9748
9766
|
};
|
|
9749
9767
|
this.getOrCreateAudioContext = () => {
|
|
9750
|
-
if (this.
|
|
9768
|
+
if (!this.useWebAudio)
|
|
9769
|
+
return;
|
|
9770
|
+
if (this.audioContext)
|
|
9751
9771
|
return this.audioContext;
|
|
9752
9772
|
const context = new AudioContext();
|
|
9773
|
+
this.tracer.trace('audioContext.create', context.state);
|
|
9753
9774
|
if (context.state === 'suspended') {
|
|
9754
9775
|
document.addEventListener('click', this.resumeAudioContext);
|
|
9755
9776
|
}
|
|
9756
|
-
|
|
9777
|
+
context.addEventListener('statechange', () => {
|
|
9778
|
+
this.tracer.trace('audioContext.state', context.state);
|
|
9779
|
+
if (context.state === 'interrupted') {
|
|
9780
|
+
this.resumeAudioContext();
|
|
9781
|
+
}
|
|
9782
|
+
});
|
|
9757
9783
|
const audioSession = navigator.audioSession;
|
|
9758
9784
|
if (audioSession) {
|
|
9759
9785
|
// https://github.com/w3c/audio-session/blob/main/explainer.md
|
|
9760
9786
|
audioSession.type = 'play-and-record';
|
|
9787
|
+
let isSessionInterrupted = false;
|
|
9788
|
+
audioSession.addEventListener('statechange', () => {
|
|
9789
|
+
this.tracer.trace('audioSession.state', audioSession.state);
|
|
9790
|
+
if (audioSession.state === 'interrupted') {
|
|
9791
|
+
isSessionInterrupted = true;
|
|
9792
|
+
}
|
|
9793
|
+
else if (isSessionInterrupted) {
|
|
9794
|
+
this.resumeAudioContext();
|
|
9795
|
+
isSessionInterrupted = false;
|
|
9796
|
+
}
|
|
9797
|
+
});
|
|
9761
9798
|
}
|
|
9762
9799
|
return (this.audioContext = context);
|
|
9763
9800
|
};
|
|
9764
9801
|
this.resumeAudioContext = () => {
|
|
9765
|
-
if (this.audioContext
|
|
9766
|
-
|
|
9767
|
-
|
|
9768
|
-
|
|
9769
|
-
|
|
9802
|
+
if (!this.audioContext)
|
|
9803
|
+
return;
|
|
9804
|
+
const { state } = this.audioContext;
|
|
9805
|
+
if (state === 'suspended' || state === 'interrupted') {
|
|
9806
|
+
const tag = 'audioContext.resume';
|
|
9807
|
+
this.audioContext.resume().then(() => {
|
|
9808
|
+
this.tracer.trace(tag, this.audioContext?.state);
|
|
9770
9809
|
document.removeEventListener('click', this.resumeAudioContext);
|
|
9810
|
+
}, (err) => {
|
|
9811
|
+
this.tracer.trace(`${tag}Error`, this.audioContext?.state);
|
|
9812
|
+
this.logger.warn(`Can't resume audio context`, err);
|
|
9771
9813
|
});
|
|
9772
9814
|
}
|
|
9773
9815
|
};
|
|
9774
9816
|
this.callState = callState;
|
|
9775
9817
|
this.speaker = speaker;
|
|
9818
|
+
this.tracer = tracer;
|
|
9776
9819
|
}
|
|
9777
9820
|
setSfuClient(sfuClient) {
|
|
9778
9821
|
this.sfuClient = sfuClient;
|
|
@@ -11992,6 +12035,37 @@ class SpeakerManager {
|
|
|
11992
12035
|
this.state = new SpeakerState(call.tracer);
|
|
11993
12036
|
this.setup();
|
|
11994
12037
|
}
|
|
12038
|
+
apply(settings) {
|
|
12039
|
+
if (!isReactNative()) {
|
|
12040
|
+
return;
|
|
12041
|
+
}
|
|
12042
|
+
/// Determines if the speaker should be enabled based on a priority hierarchy of
|
|
12043
|
+
/// settings.
|
|
12044
|
+
///
|
|
12045
|
+
/// The priority order is as follows:
|
|
12046
|
+
/// 1. If video camera is set to be on by default, speaker is enabled
|
|
12047
|
+
/// 2. If audio speaker is set to be on by default, speaker is enabled
|
|
12048
|
+
/// 3. If the default audio device is set to speaker, speaker is enabled
|
|
12049
|
+
///
|
|
12050
|
+
/// This ensures that the speaker state aligns with the most important user
|
|
12051
|
+
/// preference or system requirement.
|
|
12052
|
+
const speakerOnWithSettingsPriority = settings.video.camera_default_on ||
|
|
12053
|
+
settings.audio.speaker_default_on ||
|
|
12054
|
+
settings.audio.default_device ===
|
|
12055
|
+
AudioSettingsRequestDefaultDeviceEnum.SPEAKER;
|
|
12056
|
+
const defaultDevice = speakerOnWithSettingsPriority
|
|
12057
|
+
? AudioSettingsRequestDefaultDeviceEnum.SPEAKER
|
|
12058
|
+
: AudioSettingsRequestDefaultDeviceEnum.EARPIECE;
|
|
12059
|
+
if (this.defaultDevice !== defaultDevice) {
|
|
12060
|
+
this.call.logger.debug('SpeakerManager: setting default device', {
|
|
12061
|
+
defaultDevice,
|
|
12062
|
+
});
|
|
12063
|
+
this.defaultDevice = defaultDevice;
|
|
12064
|
+
globalThis.streamRNVideoSDK?.callManager.setup({
|
|
12065
|
+
defaultDevice,
|
|
12066
|
+
});
|
|
12067
|
+
}
|
|
12068
|
+
}
|
|
11995
12069
|
setup() {
|
|
11996
12070
|
if (this.areSubscriptionsSetUp) {
|
|
11997
12071
|
return;
|
|
@@ -12018,9 +12092,7 @@ class SpeakerManager {
|
|
|
12018
12092
|
* @returns an Observable that will be updated if a device is connected or disconnected
|
|
12019
12093
|
*/
|
|
12020
12094
|
listDevices() {
|
|
12021
|
-
|
|
12022
|
-
throw new Error('This feature is not supported in React Native. Please visit https://getstream.io/video/docs/reactnative/core/camera-and-microphone/#speaker-management for more details');
|
|
12023
|
-
}
|
|
12095
|
+
assertUnsupportedInReactNative();
|
|
12024
12096
|
return getAudioOutputDevices(this.call.tracer);
|
|
12025
12097
|
}
|
|
12026
12098
|
/**
|
|
@@ -12031,9 +12103,7 @@ class SpeakerManager {
|
|
|
12031
12103
|
* @param deviceId empty string means the system default
|
|
12032
12104
|
*/
|
|
12033
12105
|
select(deviceId) {
|
|
12034
|
-
|
|
12035
|
-
throw new Error('This feature is not supported in React Native. Please visit https://getstream.io/video/docs/reactnative/core/camera-and-microphone/#speaker-management for more details');
|
|
12036
|
-
}
|
|
12106
|
+
assertUnsupportedInReactNative();
|
|
12037
12107
|
this.state.setDevice(deviceId);
|
|
12038
12108
|
}
|
|
12039
12109
|
/**
|
|
@@ -12043,9 +12113,7 @@ class SpeakerManager {
|
|
|
12043
12113
|
* Note: This method is not supported in React Native
|
|
12044
12114
|
*/
|
|
12045
12115
|
setVolume(volume) {
|
|
12046
|
-
|
|
12047
|
-
throw new Error('This feature is not supported in React Native. Please visit https://getstream.io/video/docs/reactnative/core/camera-and-microphone/#speaker-management for more details');
|
|
12048
|
-
}
|
|
12116
|
+
assertUnsupportedInReactNative();
|
|
12049
12117
|
if (volume && (volume < 0 || volume > 1)) {
|
|
12050
12118
|
throw new Error('Volume must be between 0 and 1');
|
|
12051
12119
|
}
|
|
@@ -12072,6 +12140,11 @@ class SpeakerManager {
|
|
|
12072
12140
|
});
|
|
12073
12141
|
}
|
|
12074
12142
|
}
|
|
12143
|
+
const assertUnsupportedInReactNative = () => {
|
|
12144
|
+
if (isReactNative()) {
|
|
12145
|
+
throw new Error('Unsupported in React Native. See: https://getstream.io/video/docs/react-native/guides/camera-and-microphone/#speaker-management');
|
|
12146
|
+
}
|
|
12147
|
+
};
|
|
12075
12148
|
|
|
12076
12149
|
/**
|
|
12077
12150
|
* An object representation of a `Call`.
|
|
@@ -13933,9 +14006,7 @@ class Call {
|
|
|
13933
14006
|
* @internal
|
|
13934
14007
|
*/
|
|
13935
14008
|
this.applyDeviceConfig = async (settings, publish) => {
|
|
13936
|
-
|
|
13937
|
-
default_device: settings.audio.default_device,
|
|
13938
|
-
});
|
|
14009
|
+
this.speaker.apply(settings);
|
|
13939
14010
|
await this.camera.apply(settings.video, publish).catch((err) => {
|
|
13940
14011
|
this.logger.warn('Camera init failed', err);
|
|
13941
14012
|
});
|
|
@@ -14105,7 +14176,7 @@ class Call {
|
|
|
14105
14176
|
this.microphone = new MicrophoneManager(this);
|
|
14106
14177
|
this.speaker = new SpeakerManager(this);
|
|
14107
14178
|
this.screenShare = new ScreenShareManager(this);
|
|
14108
|
-
this.dynascaleManager = new DynascaleManager(this.state, this.speaker);
|
|
14179
|
+
this.dynascaleManager = new DynascaleManager(this.state, this.speaker, this.tracer);
|
|
14109
14180
|
}
|
|
14110
14181
|
/**
|
|
14111
14182
|
* A flag indicating whether the call is "ringing" type of call.
|
|
@@ -15249,7 +15320,7 @@ class StreamClient {
|
|
|
15249
15320
|
this.getUserAgent = () => {
|
|
15250
15321
|
if (!this.cachedUserAgent) {
|
|
15251
15322
|
const { clientAppIdentifier = {} } = this.options;
|
|
15252
|
-
const { sdkName = 'js', sdkVersion = "1.41.
|
|
15323
|
+
const { sdkName = 'js', sdkVersion = "1.41.2", ...extras } = clientAppIdentifier;
|
|
15253
15324
|
this.cachedUserAgent = [
|
|
15254
15325
|
`stream-video-${sdkName}-v${sdkVersion}`,
|
|
15255
15326
|
...Object.entries(extras).map(([key, value]) => `${key}=${value}`),
|