@stream-io/video-client 0.3.11 → 0.3.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/dist/index.browser.es.js +393 -370
- package/dist/index.browser.es.js.map +1 -1
- package/dist/index.cjs.js +392 -369
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.es.js +393 -370
- package/dist/index.es.js.map +1 -1
- package/dist/src/Call.d.ts +6 -1
- package/dist/src/devices/SpeakerManager.d.ts +28 -0
- package/dist/src/devices/SpeakerState.d.ts +64 -0
- package/dist/src/devices/__tests__/SpeakerManager.test.d.ts +1 -0
- package/dist/src/rtc/codecs.d.ts +1 -1
- package/dist/src/types.d.ts +2 -0
- package/dist/version.d.ts +1 -1
- package/package.json +1 -1
- package/src/Call.ts +13 -8
- package/src/devices/SpeakerManager.ts +50 -0
- package/src/devices/SpeakerState.ts +90 -0
- package/src/devices/__tests__/SpeakerManager.test.ts +66 -0
- package/src/helpers/DynascaleManager.ts +25 -7
- package/src/helpers/__tests__/DynascaleManager.test.ts +30 -2
- package/src/rtc/codecs.ts +1 -35
- package/src/types.ts +2 -0
package/dist/index.es.js
CHANGED
|
@@ -4,7 +4,7 @@ import { ServiceType, stackIntercept } from '@protobuf-ts/runtime-rpc';
|
|
|
4
4
|
import axios, { AxiosHeaders } from 'axios';
|
|
5
5
|
export { AxiosError } from 'axios';
|
|
6
6
|
import { TwirpFetchTransport } from '@protobuf-ts/twirp-transport';
|
|
7
|
-
import { ReplaySubject, BehaviorSubject, map as map$2, takeWhile, distinctUntilChanged as distinctUntilChanged$1, distinctUntilKeyChanged, Observable, debounceTime, concatMap, from, shareReplay, merge,
|
|
7
|
+
import { ReplaySubject, BehaviorSubject, map as map$2, takeWhile, distinctUntilChanged as distinctUntilChanged$1, distinctUntilKeyChanged, combineLatest, Observable, debounceTime, concatMap, from, shareReplay, merge, filter, pairwise, tap, debounce, timer } from 'rxjs';
|
|
8
8
|
import * as SDP from 'sdp-transform';
|
|
9
9
|
import { UAParser } from 'ua-parser-js';
|
|
10
10
|
import WebSocket from 'isomorphic-ws';
|
|
@@ -5839,204 +5839,6 @@ const createSignalClient = (options) => {
|
|
|
5839
5839
|
return new SignalServerClient(transport);
|
|
5840
5840
|
};
|
|
5841
5841
|
|
|
5842
|
-
/**
|
|
5843
|
-
* Checks whether we are using React Native
|
|
5844
|
-
*/
|
|
5845
|
-
const isReactNative = () => {
|
|
5846
|
-
var _a;
|
|
5847
|
-
if (typeof navigator === 'undefined')
|
|
5848
|
-
return false;
|
|
5849
|
-
return ((_a = navigator.product) === null || _a === void 0 ? void 0 : _a.toLowerCase()) === 'reactnative';
|
|
5850
|
-
};
|
|
5851
|
-
|
|
5852
|
-
const getRtpMap = (line) => {
|
|
5853
|
-
// Example: a=rtpmap:110 opus/48000/2
|
|
5854
|
-
const rtpRegex = /^a=rtpmap:(\d*) ([\w\-.]*)(?:\s*\/(\d*)(?:\s*\/(\S*))?)?/;
|
|
5855
|
-
// The first captured group is the payload type number, the second captured group is the encoding name, the third captured group is the clock rate, and the fourth captured group is any additional parameters.
|
|
5856
|
-
const rtpMatch = rtpRegex.exec(line);
|
|
5857
|
-
if (rtpMatch) {
|
|
5858
|
-
return {
|
|
5859
|
-
original: rtpMatch[0],
|
|
5860
|
-
payload: rtpMatch[1],
|
|
5861
|
-
codec: rtpMatch[2],
|
|
5862
|
-
};
|
|
5863
|
-
}
|
|
5864
|
-
};
|
|
5865
|
-
const getFmtp = (line) => {
|
|
5866
|
-
// Example: a=fmtp:111 minptime=10; useinbandfec=1
|
|
5867
|
-
const fmtpRegex = /^a=fmtp:(\d*) (.*)/;
|
|
5868
|
-
const fmtpMatch = fmtpRegex.exec(line);
|
|
5869
|
-
// The first captured group is the payload type number, the second captured group is any additional parameters.
|
|
5870
|
-
if (fmtpMatch) {
|
|
5871
|
-
return {
|
|
5872
|
-
original: fmtpMatch[0],
|
|
5873
|
-
payload: fmtpMatch[1],
|
|
5874
|
-
config: fmtpMatch[2],
|
|
5875
|
-
};
|
|
5876
|
-
}
|
|
5877
|
-
};
|
|
5878
|
-
/**
|
|
5879
|
-
* gets the media section for the specified media type.
|
|
5880
|
-
* The media section contains the media type, port, codec, and payload type.
|
|
5881
|
-
* Example: m=video 9 UDP/TLS/RTP/SAVPF 100 101 96 97 35 36 102 125 127
|
|
5882
|
-
*/
|
|
5883
|
-
const getMedia = (line, mediaType) => {
|
|
5884
|
-
const regex = new RegExp(`(m=${mediaType} \\d+ [\\w/]+) ([\\d\\s]+)`);
|
|
5885
|
-
const match = regex.exec(line);
|
|
5886
|
-
if (match) {
|
|
5887
|
-
return {
|
|
5888
|
-
original: match[0],
|
|
5889
|
-
mediaWithPorts: match[1],
|
|
5890
|
-
codecOrder: match[2],
|
|
5891
|
-
};
|
|
5892
|
-
}
|
|
5893
|
-
};
|
|
5894
|
-
const getMediaSection = (sdp, mediaType) => {
|
|
5895
|
-
let media;
|
|
5896
|
-
const rtpMap = [];
|
|
5897
|
-
const fmtp = [];
|
|
5898
|
-
let isTheRequiredMediaSection = false;
|
|
5899
|
-
sdp.split(/(\r\n|\r|\n)/).forEach((line) => {
|
|
5900
|
-
const isValidLine = /^([a-z])=(.*)/.test(line);
|
|
5901
|
-
if (!isValidLine)
|
|
5902
|
-
return;
|
|
5903
|
-
/*
|
|
5904
|
-
NOTE: according to https://www.rfc-editor.org/rfc/rfc8866.pdf
|
|
5905
|
-
Each media description starts with an "m=" line and continues to the next media description or the end of the whole session description, whichever comes first
|
|
5906
|
-
*/
|
|
5907
|
-
const type = line[0];
|
|
5908
|
-
if (type === 'm') {
|
|
5909
|
-
const _media = getMedia(line, mediaType);
|
|
5910
|
-
isTheRequiredMediaSection = !!_media;
|
|
5911
|
-
if (_media) {
|
|
5912
|
-
media = _media;
|
|
5913
|
-
}
|
|
5914
|
-
}
|
|
5915
|
-
else if (isTheRequiredMediaSection && type === 'a') {
|
|
5916
|
-
const rtpMapLine = getRtpMap(line);
|
|
5917
|
-
const fmtpLine = getFmtp(line);
|
|
5918
|
-
if (rtpMapLine) {
|
|
5919
|
-
rtpMap.push(rtpMapLine);
|
|
5920
|
-
}
|
|
5921
|
-
else if (fmtpLine) {
|
|
5922
|
-
fmtp.push(fmtpLine);
|
|
5923
|
-
}
|
|
5924
|
-
}
|
|
5925
|
-
});
|
|
5926
|
-
if (media) {
|
|
5927
|
-
return {
|
|
5928
|
-
media,
|
|
5929
|
-
rtpMap,
|
|
5930
|
-
fmtp,
|
|
5931
|
-
};
|
|
5932
|
-
}
|
|
5933
|
-
};
|
|
5934
|
-
/**
|
|
5935
|
-
* Returns a string of codec IDs with the preferred codec ID in front of the other codec IDs.
|
|
5936
|
-
* It is used to ensure that a preferred codec is used when decoding a media stream.
|
|
5937
|
-
* Example: Suppose we want to prefer VP8 which has id 96
|
|
5938
|
-
* 1. If codec order is 100 101 96 97 35 36 102 125 127
|
|
5939
|
-
* 2. The function returns 96 100 101 97 35 36 102 125 127
|
|
5940
|
-
*/
|
|
5941
|
-
const moveCodecToFront = (codecOrder, preferredCodecId) => {
|
|
5942
|
-
const codecIds = codecOrder.split(' ');
|
|
5943
|
-
const index = codecIds.indexOf(preferredCodecId);
|
|
5944
|
-
if (index > -1) {
|
|
5945
|
-
codecIds.splice(index, 1);
|
|
5946
|
-
codecIds.unshift(preferredCodecId);
|
|
5947
|
-
}
|
|
5948
|
-
return codecIds.join(' ');
|
|
5949
|
-
};
|
|
5950
|
-
/**
|
|
5951
|
-
* Returns a string of codec IDs with the given codec ID removed
|
|
5952
|
-
* It is used to ensure that a codec is disabled when processing a media stream.
|
|
5953
|
-
* Example: Suppose we want to prefer RED which has id 63
|
|
5954
|
-
* 1. If codec order is 111 63 103 104 9 102 0 8 106 105 13 110 112 113 126
|
|
5955
|
-
* 2. The function returns 111 103 104 9 102 0 8 106 105 13 110 112 113 126
|
|
5956
|
-
*/
|
|
5957
|
-
const removeCodecFromOrder = (codecOrder, codecIdToRemove) => {
|
|
5958
|
-
const codecIds = codecOrder.split(' ');
|
|
5959
|
-
return codecIds.filter((codecID) => codecID !== codecIdToRemove).join(' ');
|
|
5960
|
-
};
|
|
5961
|
-
/**
|
|
5962
|
-
* Returns an SDP with the preferred codec in front of the other codecs.
|
|
5963
|
-
* Example: Suppose we want to prefer VP8
|
|
5964
|
-
* 1. find video media specification m=video 9 UDP/TLS/RTP/SAVPF 100 101 96 97 35 36 102 125 127
|
|
5965
|
-
* 2. look for specified codec (VP8) a=rtpmap:96 VP8/90000
|
|
5966
|
-
* 3. extract 96 as an identifier of VP8
|
|
5967
|
-
* 4. move 96 to the front
|
|
5968
|
-
* 5. now media looks like this: m=video 9 UDP/TLS/RTP/SAVPF 96 100 101 97 35 36 102 125 127
|
|
5969
|
-
*/
|
|
5970
|
-
const setPreferredCodec = (sdp, mediaType, preferredCodec) => {
|
|
5971
|
-
const section = getMediaSection(sdp, mediaType);
|
|
5972
|
-
if (!section)
|
|
5973
|
-
return sdp;
|
|
5974
|
-
const rtpMap = section.rtpMap.find((r) => r.codec.toLowerCase() === preferredCodec.toLowerCase());
|
|
5975
|
-
const codecId = rtpMap === null || rtpMap === void 0 ? void 0 : rtpMap.payload;
|
|
5976
|
-
if (!codecId)
|
|
5977
|
-
return sdp;
|
|
5978
|
-
const newCodecOrder = moveCodecToFront(section.media.codecOrder, codecId);
|
|
5979
|
-
return sdp.replace(section.media.original, `${section.media.mediaWithPorts} ${newCodecOrder}`);
|
|
5980
|
-
};
|
|
5981
|
-
/**
|
|
5982
|
-
* Returns an SDP with the specified codec removed.
|
|
5983
|
-
* Example: Suppose we want to remove RED
|
|
5984
|
-
* 1. find audio media specification m=video 9 UDP/TLS/RTP/SAVPF 100 101 96 97 35 36 102 125 127
|
|
5985
|
-
* 2. look for specified codec (RED) a=rtpmap:127 red/90000
|
|
5986
|
-
* 3. extract 127 as an identifier of RED
|
|
5987
|
-
* 4. remove 127 from the codec order
|
|
5988
|
-
* 5. remove a=rtpmap:127 red/90000
|
|
5989
|
-
* 6. remove a=fmtp:127 ...
|
|
5990
|
-
*/
|
|
5991
|
-
const removeCodec = (sdp, mediaType, codecToRemove) => {
|
|
5992
|
-
const section = getMediaSection(sdp, mediaType);
|
|
5993
|
-
const mediaSection = section === null || section === void 0 ? void 0 : section.media;
|
|
5994
|
-
if (!mediaSection) {
|
|
5995
|
-
return sdp;
|
|
5996
|
-
}
|
|
5997
|
-
const rtpMap = section === null || section === void 0 ? void 0 : section.rtpMap.find((r) => r.codec.toLowerCase() === codecToRemove.toLowerCase());
|
|
5998
|
-
const codecId = rtpMap === null || rtpMap === void 0 ? void 0 : rtpMap.payload;
|
|
5999
|
-
if (!codecId) {
|
|
6000
|
-
return sdp;
|
|
6001
|
-
}
|
|
6002
|
-
const newCodecOrder = removeCodecFromOrder(mediaSection.codecOrder, codecId);
|
|
6003
|
-
const fmtp = section === null || section === void 0 ? void 0 : section.fmtp.find((f) => f.payload === codecId);
|
|
6004
|
-
return sdp
|
|
6005
|
-
.replace(mediaSection.original, `${mediaSection.mediaWithPorts} ${newCodecOrder}`)
|
|
6006
|
-
.replace(new RegExp(`${rtpMap.original}[\r\n]+`), '') // remove the corresponding rtpmap line
|
|
6007
|
-
.replace((fmtp === null || fmtp === void 0 ? void 0 : fmtp.original) ? new RegExp(`${fmtp === null || fmtp === void 0 ? void 0 : fmtp.original}[\r\n]+`) : '', ''); // remove the corresponding fmtp line
|
|
6008
|
-
};
|
|
6009
|
-
/**
|
|
6010
|
-
* Gets the fmtp line corresponding to opus
|
|
6011
|
-
*/
|
|
6012
|
-
const getOpusFmtp = (sdp) => {
|
|
6013
|
-
const section = getMediaSection(sdp, 'audio');
|
|
6014
|
-
const rtpMap = section === null || section === void 0 ? void 0 : section.rtpMap.find((r) => r.codec.toLowerCase() === 'opus');
|
|
6015
|
-
const codecId = rtpMap === null || rtpMap === void 0 ? void 0 : rtpMap.payload;
|
|
6016
|
-
if (codecId) {
|
|
6017
|
-
return section === null || section === void 0 ? void 0 : section.fmtp.find((f) => f.payload === codecId);
|
|
6018
|
-
}
|
|
6019
|
-
};
|
|
6020
|
-
/**
|
|
6021
|
-
* Returns an SDP with DTX enabled or disabled.
|
|
6022
|
-
*/
|
|
6023
|
-
const toggleDtx = (sdp, enable) => {
|
|
6024
|
-
const opusFmtp = getOpusFmtp(sdp);
|
|
6025
|
-
if (opusFmtp) {
|
|
6026
|
-
const matchDtx = /usedtx=(\d)/.exec(opusFmtp.config);
|
|
6027
|
-
const requiredDtxConfig = `usedtx=${enable ? '1' : '0'}`;
|
|
6028
|
-
if (matchDtx) {
|
|
6029
|
-
const newFmtp = opusFmtp.original.replace(/usedtx=(\d)/, requiredDtxConfig);
|
|
6030
|
-
return sdp.replace(opusFmtp.original, newFmtp);
|
|
6031
|
-
}
|
|
6032
|
-
else {
|
|
6033
|
-
const newFmtp = `${opusFmtp.original};${requiredDtxConfig}`;
|
|
6034
|
-
return sdp.replace(opusFmtp.original, newFmtp);
|
|
6035
|
-
}
|
|
6036
|
-
}
|
|
6037
|
-
return sdp;
|
|
6038
|
-
};
|
|
6039
|
-
|
|
6040
5842
|
// log levels, sorted by verbosity
|
|
6041
5843
|
const logLevels = Object.freeze({
|
|
6042
5844
|
trace: 0,
|
|
@@ -6128,39 +5930,13 @@ const getPreferredCodecs = (kind, preferredCodec, codecToRemove) => {
|
|
|
6128
5930
|
logger === null || logger === void 0 ? void 0 : logger('info', `Preffered codecs: `, result);
|
|
6129
5931
|
return result;
|
|
6130
5932
|
};
|
|
6131
|
-
const getGenericSdp = (direction
|
|
5933
|
+
const getGenericSdp = (direction) => __awaiter(void 0, void 0, void 0, function* () {
|
|
6132
5934
|
var _a;
|
|
6133
5935
|
const tempPc = new RTCPeerConnection();
|
|
6134
5936
|
tempPc.addTransceiver('video', { direction });
|
|
6135
|
-
// if ('setCodecPreferences' in videoTransceiver) {
|
|
6136
|
-
// const videoCodecPreferences = getPreferredCodecs(
|
|
6137
|
-
// 'audio',
|
|
6138
|
-
// preferredVideoCodec ?? 'vp8',
|
|
6139
|
-
// );
|
|
6140
|
-
// videoTransceiver.setCodecPreferences([...(videoCodecPreferences ?? [])]);
|
|
6141
|
-
// }
|
|
6142
5937
|
tempPc.addTransceiver('audio', { direction });
|
|
6143
|
-
const preferredAudioCodec = isRedEnabled ? 'red' : 'opus';
|
|
6144
|
-
const audioCodecToRemove = !isRedEnabled ? 'red' : undefined;
|
|
6145
|
-
// if ('setCodecPreferences' in audioTransceiver) {
|
|
6146
|
-
// const audioCodecPreferences = getPreferredCodecs(
|
|
6147
|
-
// 'audio',
|
|
6148
|
-
// preferredAudioCodec,
|
|
6149
|
-
// // audioCodecToRemove,
|
|
6150
|
-
// );
|
|
6151
|
-
// audioTransceiver.setCodecPreferences([...(audioCodecPreferences || [])]);
|
|
6152
|
-
// }
|
|
6153
5938
|
const offer = yield tempPc.createOffer();
|
|
6154
5939
|
let sdp = (_a = offer.sdp) !== null && _a !== void 0 ? _a : '';
|
|
6155
|
-
if (isReactNative()) {
|
|
6156
|
-
if (preferredVideoCodec) {
|
|
6157
|
-
sdp = setPreferredCodec(sdp, 'video', preferredVideoCodec);
|
|
6158
|
-
}
|
|
6159
|
-
sdp = setPreferredCodec(sdp, 'audio', preferredAudioCodec);
|
|
6160
|
-
if (audioCodecToRemove) {
|
|
6161
|
-
sdp = removeCodec(sdp, 'audio', audioCodecToRemove);
|
|
6162
|
-
}
|
|
6163
|
-
}
|
|
6164
5940
|
tempPc.getTransceivers().forEach((t) => {
|
|
6165
5941
|
t.stop();
|
|
6166
5942
|
});
|
|
@@ -6269,6 +6045,16 @@ function getIceCandidate(candidate) {
|
|
|
6269
6045
|
}
|
|
6270
6046
|
}
|
|
6271
6047
|
|
|
6048
|
+
/**
|
|
6049
|
+
* Checks whether we are using React Native
|
|
6050
|
+
*/
|
|
6051
|
+
const isReactNative = () => {
|
|
6052
|
+
var _a;
|
|
6053
|
+
if (typeof navigator === 'undefined')
|
|
6054
|
+
return false;
|
|
6055
|
+
return ((_a = navigator.product) === null || _a === void 0 ? void 0 : _a.toLowerCase()) === 'reactnative';
|
|
6056
|
+
};
|
|
6057
|
+
|
|
6272
6058
|
let sdkInfo;
|
|
6273
6059
|
let osInfo;
|
|
6274
6060
|
let deviceInfo;
|
|
@@ -6465,6 +6251,119 @@ const muteTypeToTrackType = (muteType) => {
|
|
|
6465
6251
|
}
|
|
6466
6252
|
};
|
|
6467
6253
|
|
|
6254
|
+
const getRtpMap = (line) => {
|
|
6255
|
+
// Example: a=rtpmap:110 opus/48000/2
|
|
6256
|
+
const rtpRegex = /^a=rtpmap:(\d*) ([\w\-.]*)(?:\s*\/(\d*)(?:\s*\/(\S*))?)?/;
|
|
6257
|
+
// The first captured group is the payload type number, the second captured group is the encoding name, the third captured group is the clock rate, and the fourth captured group is any additional parameters.
|
|
6258
|
+
const rtpMatch = rtpRegex.exec(line);
|
|
6259
|
+
if (rtpMatch) {
|
|
6260
|
+
return {
|
|
6261
|
+
original: rtpMatch[0],
|
|
6262
|
+
payload: rtpMatch[1],
|
|
6263
|
+
codec: rtpMatch[2],
|
|
6264
|
+
};
|
|
6265
|
+
}
|
|
6266
|
+
};
|
|
6267
|
+
const getFmtp = (line) => {
|
|
6268
|
+
// Example: a=fmtp:111 minptime=10; useinbandfec=1
|
|
6269
|
+
const fmtpRegex = /^a=fmtp:(\d*) (.*)/;
|
|
6270
|
+
const fmtpMatch = fmtpRegex.exec(line);
|
|
6271
|
+
// The first captured group is the payload type number, the second captured group is any additional parameters.
|
|
6272
|
+
if (fmtpMatch) {
|
|
6273
|
+
return {
|
|
6274
|
+
original: fmtpMatch[0],
|
|
6275
|
+
payload: fmtpMatch[1],
|
|
6276
|
+
config: fmtpMatch[2],
|
|
6277
|
+
};
|
|
6278
|
+
}
|
|
6279
|
+
};
|
|
6280
|
+
/**
|
|
6281
|
+
* gets the media section for the specified media type.
|
|
6282
|
+
* The media section contains the media type, port, codec, and payload type.
|
|
6283
|
+
* Example: m=video 9 UDP/TLS/RTP/SAVPF 100 101 96 97 35 36 102 125 127
|
|
6284
|
+
*/
|
|
6285
|
+
const getMedia = (line, mediaType) => {
|
|
6286
|
+
const regex = new RegExp(`(m=${mediaType} \\d+ [\\w/]+) ([\\d\\s]+)`);
|
|
6287
|
+
const match = regex.exec(line);
|
|
6288
|
+
if (match) {
|
|
6289
|
+
return {
|
|
6290
|
+
original: match[0],
|
|
6291
|
+
mediaWithPorts: match[1],
|
|
6292
|
+
codecOrder: match[2],
|
|
6293
|
+
};
|
|
6294
|
+
}
|
|
6295
|
+
};
|
|
6296
|
+
const getMediaSection = (sdp, mediaType) => {
|
|
6297
|
+
let media;
|
|
6298
|
+
const rtpMap = [];
|
|
6299
|
+
const fmtp = [];
|
|
6300
|
+
let isTheRequiredMediaSection = false;
|
|
6301
|
+
sdp.split(/(\r\n|\r|\n)/).forEach((line) => {
|
|
6302
|
+
const isValidLine = /^([a-z])=(.*)/.test(line);
|
|
6303
|
+
if (!isValidLine)
|
|
6304
|
+
return;
|
|
6305
|
+
/*
|
|
6306
|
+
NOTE: according to https://www.rfc-editor.org/rfc/rfc8866.pdf
|
|
6307
|
+
Each media description starts with an "m=" line and continues to the next media description or the end of the whole session description, whichever comes first
|
|
6308
|
+
*/
|
|
6309
|
+
const type = line[0];
|
|
6310
|
+
if (type === 'm') {
|
|
6311
|
+
const _media = getMedia(line, mediaType);
|
|
6312
|
+
isTheRequiredMediaSection = !!_media;
|
|
6313
|
+
if (_media) {
|
|
6314
|
+
media = _media;
|
|
6315
|
+
}
|
|
6316
|
+
}
|
|
6317
|
+
else if (isTheRequiredMediaSection && type === 'a') {
|
|
6318
|
+
const rtpMapLine = getRtpMap(line);
|
|
6319
|
+
const fmtpLine = getFmtp(line);
|
|
6320
|
+
if (rtpMapLine) {
|
|
6321
|
+
rtpMap.push(rtpMapLine);
|
|
6322
|
+
}
|
|
6323
|
+
else if (fmtpLine) {
|
|
6324
|
+
fmtp.push(fmtpLine);
|
|
6325
|
+
}
|
|
6326
|
+
}
|
|
6327
|
+
});
|
|
6328
|
+
if (media) {
|
|
6329
|
+
return {
|
|
6330
|
+
media,
|
|
6331
|
+
rtpMap,
|
|
6332
|
+
fmtp,
|
|
6333
|
+
};
|
|
6334
|
+
}
|
|
6335
|
+
};
|
|
6336
|
+
/**
|
|
6337
|
+
* Gets the fmtp line corresponding to opus
|
|
6338
|
+
*/
|
|
6339
|
+
const getOpusFmtp = (sdp) => {
|
|
6340
|
+
const section = getMediaSection(sdp, 'audio');
|
|
6341
|
+
const rtpMap = section === null || section === void 0 ? void 0 : section.rtpMap.find((r) => r.codec.toLowerCase() === 'opus');
|
|
6342
|
+
const codecId = rtpMap === null || rtpMap === void 0 ? void 0 : rtpMap.payload;
|
|
6343
|
+
if (codecId) {
|
|
6344
|
+
return section === null || section === void 0 ? void 0 : section.fmtp.find((f) => f.payload === codecId);
|
|
6345
|
+
}
|
|
6346
|
+
};
|
|
6347
|
+
/**
|
|
6348
|
+
* Returns an SDP with DTX enabled or disabled.
|
|
6349
|
+
*/
|
|
6350
|
+
const toggleDtx = (sdp, enable) => {
|
|
6351
|
+
const opusFmtp = getOpusFmtp(sdp);
|
|
6352
|
+
if (opusFmtp) {
|
|
6353
|
+
const matchDtx = /usedtx=(\d)/.exec(opusFmtp.config);
|
|
6354
|
+
const requiredDtxConfig = `usedtx=${enable ? '1' : '0'}`;
|
|
6355
|
+
if (matchDtx) {
|
|
6356
|
+
const newFmtp = opusFmtp.original.replace(/usedtx=(\d)/, requiredDtxConfig);
|
|
6357
|
+
return sdp.replace(opusFmtp.original, newFmtp);
|
|
6358
|
+
}
|
|
6359
|
+
else {
|
|
6360
|
+
const newFmtp = `${opusFmtp.original};${requiredDtxConfig}`;
|
|
6361
|
+
return sdp.replace(opusFmtp.original, newFmtp);
|
|
6362
|
+
}
|
|
6363
|
+
}
|
|
6364
|
+
return sdp;
|
|
6365
|
+
};
|
|
6366
|
+
|
|
6468
6367
|
const logger$3 = getLogger(['Publisher']);
|
|
6469
6368
|
/**
|
|
6470
6369
|
* The `Publisher` is responsible for publishing/unpublishing media streams to/from the SFU
|
|
@@ -9614,15 +9513,26 @@ class DynascaleManager {
|
|
|
9614
9513
|
}
|
|
9615
9514
|
});
|
|
9616
9515
|
});
|
|
9617
|
-
const sinkIdSubscription =
|
|
9618
|
-
|
|
9516
|
+
const sinkIdSubscription = combineLatest([
|
|
9517
|
+
this.call.state.localParticipant$,
|
|
9518
|
+
this.call.speaker.state.selectedDevice$,
|
|
9519
|
+
]).subscribe(([p, selectedDevice]) => {
|
|
9520
|
+
var _a;
|
|
9521
|
+
const deviceId = ((_a = getSdkInfo()) === null || _a === void 0 ? void 0 : _a.type) === SdkType.REACT
|
|
9522
|
+
? p === null || p === void 0 ? void 0 : p.audioOutputDeviceId
|
|
9523
|
+
: selectedDevice;
|
|
9524
|
+
if ('setSinkId' in audioElement) {
|
|
9619
9525
|
// @ts-expect-error setSinkId is not yet in the lib
|
|
9620
|
-
audioElement.setSinkId(
|
|
9526
|
+
audioElement.setSinkId(deviceId);
|
|
9621
9527
|
}
|
|
9622
9528
|
});
|
|
9529
|
+
const volumeSubscription = this.call.speaker.state.volume$.subscribe((volume) => {
|
|
9530
|
+
audioElement.volume = volume;
|
|
9531
|
+
});
|
|
9623
9532
|
audioElement.autoplay = true;
|
|
9624
9533
|
return () => {
|
|
9625
9534
|
sinkIdSubscription.unsubscribe();
|
|
9535
|
+
volumeSubscription.unsubscribe();
|
|
9626
9536
|
updateMediaStreamSubscription.unsubscribe();
|
|
9627
9537
|
};
|
|
9628
9538
|
};
|
|
@@ -9750,24 +9660,145 @@ class CallTypesRegistry {
|
|
|
9750
9660
|
}, {});
|
|
9751
9661
|
}
|
|
9752
9662
|
}
|
|
9753
|
-
/**
|
|
9754
|
-
* The default call types registry.
|
|
9755
|
-
* You can use this instance to dynamically register and unregister call types.
|
|
9756
|
-
*/
|
|
9757
|
-
const CallTypes = new CallTypesRegistry([
|
|
9758
|
-
new CallType('default', {
|
|
9759
|
-
sortParticipantsBy: defaultSortPreset,
|
|
9760
|
-
}),
|
|
9761
|
-
new CallType('development', {
|
|
9762
|
-
sortParticipantsBy: defaultSortPreset,
|
|
9763
|
-
}),
|
|
9764
|
-
new CallType('livestream', {
|
|
9765
|
-
sortParticipantsBy: livestreamOrAudioRoomSortPreset,
|
|
9766
|
-
}),
|
|
9767
|
-
new CallType('audio_room', {
|
|
9768
|
-
sortParticipantsBy: livestreamOrAudioRoomSortPreset,
|
|
9769
|
-
}),
|
|
9770
|
-
]);
|
|
9663
|
+
/**
|
|
9664
|
+
* The default call types registry.
|
|
9665
|
+
* You can use this instance to dynamically register and unregister call types.
|
|
9666
|
+
*/
|
|
9667
|
+
const CallTypes = new CallTypesRegistry([
|
|
9668
|
+
new CallType('default', {
|
|
9669
|
+
sortParticipantsBy: defaultSortPreset,
|
|
9670
|
+
}),
|
|
9671
|
+
new CallType('development', {
|
|
9672
|
+
sortParticipantsBy: defaultSortPreset,
|
|
9673
|
+
}),
|
|
9674
|
+
new CallType('livestream', {
|
|
9675
|
+
sortParticipantsBy: livestreamOrAudioRoomSortPreset,
|
|
9676
|
+
}),
|
|
9677
|
+
new CallType('audio_room', {
|
|
9678
|
+
sortParticipantsBy: livestreamOrAudioRoomSortPreset,
|
|
9679
|
+
}),
|
|
9680
|
+
]);
|
|
9681
|
+
|
|
9682
|
+
class InputMediaDeviceManagerState {
|
|
9683
|
+
constructor(disableMode = 'stop-tracks') {
|
|
9684
|
+
this.disableMode = disableMode;
|
|
9685
|
+
this.statusSubject = new BehaviorSubject(undefined);
|
|
9686
|
+
this.mediaStreamSubject = new BehaviorSubject(undefined);
|
|
9687
|
+
this.selectedDeviceSubject = new BehaviorSubject(undefined);
|
|
9688
|
+
/**
|
|
9689
|
+
* Gets the current value of an observable, or undefined if the observable has
|
|
9690
|
+
* not emitted a value yet.
|
|
9691
|
+
*
|
|
9692
|
+
* @param observable$ the observable to get the value from.
|
|
9693
|
+
*/
|
|
9694
|
+
this.getCurrentValue = getCurrentValue;
|
|
9695
|
+
/**
|
|
9696
|
+
* Updates the value of the provided Subject.
|
|
9697
|
+
* An `update` can either be a new value or a function which takes
|
|
9698
|
+
* the current value and returns a new value.
|
|
9699
|
+
*
|
|
9700
|
+
* @internal
|
|
9701
|
+
*
|
|
9702
|
+
* @param subject the subject to update.
|
|
9703
|
+
* @param update the update to apply to the subject.
|
|
9704
|
+
* @return the updated value.
|
|
9705
|
+
*/
|
|
9706
|
+
this.setCurrentValue = setCurrentValue;
|
|
9707
|
+
this.mediaStream$ = this.mediaStreamSubject.asObservable();
|
|
9708
|
+
this.selectedDevice$ = this.selectedDeviceSubject
|
|
9709
|
+
.asObservable()
|
|
9710
|
+
.pipe(distinctUntilChanged$1());
|
|
9711
|
+
this.status$ = this.statusSubject
|
|
9712
|
+
.asObservable()
|
|
9713
|
+
.pipe(distinctUntilChanged$1());
|
|
9714
|
+
}
|
|
9715
|
+
/**
|
|
9716
|
+
* The device status
|
|
9717
|
+
*/
|
|
9718
|
+
get status() {
|
|
9719
|
+
return this.getCurrentValue(this.status$);
|
|
9720
|
+
}
|
|
9721
|
+
/**
|
|
9722
|
+
* The currently selected device
|
|
9723
|
+
*/
|
|
9724
|
+
get selectedDevice() {
|
|
9725
|
+
return this.getCurrentValue(this.selectedDevice$);
|
|
9726
|
+
}
|
|
9727
|
+
/**
|
|
9728
|
+
* The current media stream, or `undefined` if the device is currently disabled.
|
|
9729
|
+
*/
|
|
9730
|
+
get mediaStream() {
|
|
9731
|
+
return this.getCurrentValue(this.mediaStream$);
|
|
9732
|
+
}
|
|
9733
|
+
/**
|
|
9734
|
+
* @internal
|
|
9735
|
+
* @param status
|
|
9736
|
+
*/
|
|
9737
|
+
setStatus(status) {
|
|
9738
|
+
this.setCurrentValue(this.statusSubject, status);
|
|
9739
|
+
}
|
|
9740
|
+
/**
|
|
9741
|
+
* @internal
|
|
9742
|
+
* @param stream
|
|
9743
|
+
*/
|
|
9744
|
+
setMediaStream(stream) {
|
|
9745
|
+
this.setCurrentValue(this.mediaStreamSubject, stream);
|
|
9746
|
+
if (stream) {
|
|
9747
|
+
this.setDevice(this.getDeviceIdFromStream(stream));
|
|
9748
|
+
}
|
|
9749
|
+
}
|
|
9750
|
+
/**
|
|
9751
|
+
* @internal
|
|
9752
|
+
* @param stream
|
|
9753
|
+
*/
|
|
9754
|
+
setDevice(deviceId) {
|
|
9755
|
+
this.setCurrentValue(this.selectedDeviceSubject, deviceId);
|
|
9756
|
+
}
|
|
9757
|
+
}
|
|
9758
|
+
|
|
9759
|
+
class CameraManagerState extends InputMediaDeviceManagerState {
|
|
9760
|
+
constructor() {
|
|
9761
|
+
super('stop-tracks');
|
|
9762
|
+
this.directionSubject = new BehaviorSubject(undefined);
|
|
9763
|
+
this.direction$ = this.directionSubject
|
|
9764
|
+
.asObservable()
|
|
9765
|
+
.pipe(distinctUntilChanged$1());
|
|
9766
|
+
}
|
|
9767
|
+
/**
|
|
9768
|
+
* The preferred camera direction
|
|
9769
|
+
* front - means the camera facing the user
|
|
9770
|
+
* back - means the camera facing the environment
|
|
9771
|
+
*/
|
|
9772
|
+
get direction() {
|
|
9773
|
+
return this.getCurrentValue(this.direction$);
|
|
9774
|
+
}
|
|
9775
|
+
/**
|
|
9776
|
+
* @internal
|
|
9777
|
+
*/
|
|
9778
|
+
setDirection(direction) {
|
|
9779
|
+
this.setCurrentValue(this.directionSubject, direction);
|
|
9780
|
+
}
|
|
9781
|
+
/**
|
|
9782
|
+
* @internal
|
|
9783
|
+
*/
|
|
9784
|
+
setMediaStream(stream) {
|
|
9785
|
+
var _a;
|
|
9786
|
+
super.setMediaStream(stream);
|
|
9787
|
+
if (stream) {
|
|
9788
|
+
// RN getSettings() doesn't return facingMode, so we don't verify camera direction
|
|
9789
|
+
const direction = isReactNative()
|
|
9790
|
+
? this.direction
|
|
9791
|
+
: ((_a = stream.getVideoTracks()[0]) === null || _a === void 0 ? void 0 : _a.getSettings().facingMode) === 'environment'
|
|
9792
|
+
? 'back'
|
|
9793
|
+
: 'front';
|
|
9794
|
+
this.setDirection(direction);
|
|
9795
|
+
}
|
|
9796
|
+
}
|
|
9797
|
+
getDeviceIdFromStream(stream) {
|
|
9798
|
+
var _a;
|
|
9799
|
+
return (_a = stream.getVideoTracks()[0]) === null || _a === void 0 ? void 0 : _a.getSettings().deviceId;
|
|
9800
|
+
}
|
|
9801
|
+
}
|
|
9771
9802
|
|
|
9772
9803
|
const getDevices = (constraints) => {
|
|
9773
9804
|
return new Observable((subscriber) => {
|
|
@@ -10164,127 +10195,6 @@ class InputMediaDeviceManager {
|
|
|
10164
10195
|
}
|
|
10165
10196
|
}
|
|
10166
10197
|
|
|
10167
|
-
class InputMediaDeviceManagerState {
|
|
10168
|
-
constructor(disableMode = 'stop-tracks') {
|
|
10169
|
-
this.disableMode = disableMode;
|
|
10170
|
-
this.statusSubject = new BehaviorSubject(undefined);
|
|
10171
|
-
this.mediaStreamSubject = new BehaviorSubject(undefined);
|
|
10172
|
-
this.selectedDeviceSubject = new BehaviorSubject(undefined);
|
|
10173
|
-
/**
|
|
10174
|
-
* Gets the current value of an observable, or undefined if the observable has
|
|
10175
|
-
* not emitted a value yet.
|
|
10176
|
-
*
|
|
10177
|
-
* @param observable$ the observable to get the value from.
|
|
10178
|
-
*/
|
|
10179
|
-
this.getCurrentValue = getCurrentValue;
|
|
10180
|
-
/**
|
|
10181
|
-
* Updates the value of the provided Subject.
|
|
10182
|
-
* An `update` can either be a new value or a function which takes
|
|
10183
|
-
* the current value and returns a new value.
|
|
10184
|
-
*
|
|
10185
|
-
* @internal
|
|
10186
|
-
*
|
|
10187
|
-
* @param subject the subject to update.
|
|
10188
|
-
* @param update the update to apply to the subject.
|
|
10189
|
-
* @return the updated value.
|
|
10190
|
-
*/
|
|
10191
|
-
this.setCurrentValue = setCurrentValue;
|
|
10192
|
-
this.mediaStream$ = this.mediaStreamSubject.asObservable();
|
|
10193
|
-
this.selectedDevice$ = this.selectedDeviceSubject
|
|
10194
|
-
.asObservable()
|
|
10195
|
-
.pipe(distinctUntilChanged$1());
|
|
10196
|
-
this.status$ = this.statusSubject
|
|
10197
|
-
.asObservable()
|
|
10198
|
-
.pipe(distinctUntilChanged$1());
|
|
10199
|
-
}
|
|
10200
|
-
/**
|
|
10201
|
-
* The device status
|
|
10202
|
-
*/
|
|
10203
|
-
get status() {
|
|
10204
|
-
return this.getCurrentValue(this.status$);
|
|
10205
|
-
}
|
|
10206
|
-
/**
|
|
10207
|
-
* The currently selected device
|
|
10208
|
-
*/
|
|
10209
|
-
get selectedDevice() {
|
|
10210
|
-
return this.getCurrentValue(this.selectedDevice$);
|
|
10211
|
-
}
|
|
10212
|
-
/**
|
|
10213
|
-
* The current media stream, or `undefined` if the device is currently disabled.
|
|
10214
|
-
*/
|
|
10215
|
-
get mediaStream() {
|
|
10216
|
-
return this.getCurrentValue(this.mediaStream$);
|
|
10217
|
-
}
|
|
10218
|
-
/**
|
|
10219
|
-
* @internal
|
|
10220
|
-
* @param status
|
|
10221
|
-
*/
|
|
10222
|
-
setStatus(status) {
|
|
10223
|
-
this.setCurrentValue(this.statusSubject, status);
|
|
10224
|
-
}
|
|
10225
|
-
/**
|
|
10226
|
-
* @internal
|
|
10227
|
-
* @param stream
|
|
10228
|
-
*/
|
|
10229
|
-
setMediaStream(stream) {
|
|
10230
|
-
this.setCurrentValue(this.mediaStreamSubject, stream);
|
|
10231
|
-
if (stream) {
|
|
10232
|
-
this.setDevice(this.getDeviceIdFromStream(stream));
|
|
10233
|
-
}
|
|
10234
|
-
}
|
|
10235
|
-
/**
|
|
10236
|
-
* @internal
|
|
10237
|
-
* @param stream
|
|
10238
|
-
*/
|
|
10239
|
-
setDevice(deviceId) {
|
|
10240
|
-
this.setCurrentValue(this.selectedDeviceSubject, deviceId);
|
|
10241
|
-
}
|
|
10242
|
-
}
|
|
10243
|
-
|
|
10244
|
-
class CameraManagerState extends InputMediaDeviceManagerState {
|
|
10245
|
-
constructor() {
|
|
10246
|
-
super('stop-tracks');
|
|
10247
|
-
this.directionSubject = new BehaviorSubject(undefined);
|
|
10248
|
-
this.direction$ = this.directionSubject
|
|
10249
|
-
.asObservable()
|
|
10250
|
-
.pipe(distinctUntilChanged$1());
|
|
10251
|
-
}
|
|
10252
|
-
/**
|
|
10253
|
-
* The preferred camera direction
|
|
10254
|
-
* front - means the camera facing the user
|
|
10255
|
-
* back - means the camera facing the environment
|
|
10256
|
-
*/
|
|
10257
|
-
get direction() {
|
|
10258
|
-
return this.getCurrentValue(this.direction$);
|
|
10259
|
-
}
|
|
10260
|
-
/**
|
|
10261
|
-
* @internal
|
|
10262
|
-
*/
|
|
10263
|
-
setDirection(direction) {
|
|
10264
|
-
this.setCurrentValue(this.directionSubject, direction);
|
|
10265
|
-
}
|
|
10266
|
-
/**
|
|
10267
|
-
* @internal
|
|
10268
|
-
*/
|
|
10269
|
-
setMediaStream(stream) {
|
|
10270
|
-
var _a;
|
|
10271
|
-
super.setMediaStream(stream);
|
|
10272
|
-
if (stream) {
|
|
10273
|
-
// RN getSettings() doesn't return facingMode, so we don't verify camera direction
|
|
10274
|
-
const direction = isReactNative()
|
|
10275
|
-
? this.direction
|
|
10276
|
-
: ((_a = stream.getVideoTracks()[0]) === null || _a === void 0 ? void 0 : _a.getSettings().facingMode) === 'environment'
|
|
10277
|
-
? 'back'
|
|
10278
|
-
: 'front';
|
|
10279
|
-
this.setDirection(direction);
|
|
10280
|
-
}
|
|
10281
|
-
}
|
|
10282
|
-
getDeviceIdFromStream(stream) {
|
|
10283
|
-
var _a;
|
|
10284
|
-
return (_a = stream.getVideoTracks()[0]) === null || _a === void 0 ? void 0 : _a.getSettings().deviceId;
|
|
10285
|
-
}
|
|
10286
|
-
}
|
|
10287
|
-
|
|
10288
10198
|
class CameraManager extends InputMediaDeviceManager {
|
|
10289
10199
|
constructor(call) {
|
|
10290
10200
|
super(call, new CameraManagerState());
|
|
@@ -10408,6 +10318,116 @@ class MicrophoneManager extends InputMediaDeviceManager {
|
|
|
10408
10318
|
}
|
|
10409
10319
|
}
|
|
10410
10320
|
|
|
10321
|
+
class SpeakerState {
|
|
10322
|
+
constructor() {
|
|
10323
|
+
this.selectedDeviceSubject = new BehaviorSubject('');
|
|
10324
|
+
this.volumeSubject = new BehaviorSubject(1);
|
|
10325
|
+
/**
|
|
10326
|
+
* [Tells if the browser supports audio output change on 'audio' elements](https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/setSinkId).
|
|
10327
|
+
*/
|
|
10328
|
+
this.isDeviceSelectionSupported = checkIfAudioOutputChangeSupported();
|
|
10329
|
+
/**
|
|
10330
|
+
* Gets the current value of an observable, or undefined if the observable has
|
|
10331
|
+
* not emitted a value yet.
|
|
10332
|
+
*
|
|
10333
|
+
* @param observable$ the observable to get the value from.
|
|
10334
|
+
*/
|
|
10335
|
+
this.getCurrentValue = getCurrentValue;
|
|
10336
|
+
/**
|
|
10337
|
+
* Updates the value of the provided Subject.
|
|
10338
|
+
* An `update` can either be a new value or a function which takes
|
|
10339
|
+
* the current value and returns a new value.
|
|
10340
|
+
*
|
|
10341
|
+
* @internal
|
|
10342
|
+
*
|
|
10343
|
+
* @param subject the subject to update.
|
|
10344
|
+
* @param update the update to apply to the subject.
|
|
10345
|
+
* @return the updated value.
|
|
10346
|
+
*/
|
|
10347
|
+
this.setCurrentValue = setCurrentValue;
|
|
10348
|
+
this.selectedDevice$ = this.selectedDeviceSubject
|
|
10349
|
+
.asObservable()
|
|
10350
|
+
.pipe(distinctUntilChanged$1());
|
|
10351
|
+
this.volume$ = this.volumeSubject
|
|
10352
|
+
.asObservable()
|
|
10353
|
+
.pipe(distinctUntilChanged$1());
|
|
10354
|
+
}
|
|
10355
|
+
/**
|
|
10356
|
+
* The currently selected device
|
|
10357
|
+
*
|
|
10358
|
+
* Note: this feature is not supported in React Native
|
|
10359
|
+
*/
|
|
10360
|
+
get selectedDevice() {
|
|
10361
|
+
return this.getCurrentValue(this.selectedDevice$);
|
|
10362
|
+
}
|
|
10363
|
+
/**
|
|
10364
|
+
* The currently selected volume
|
|
10365
|
+
*
|
|
10366
|
+
* Note: this feature is not supported in React Native
|
|
10367
|
+
*/
|
|
10368
|
+
get volume() {
|
|
10369
|
+
return this.getCurrentValue(this.volume$);
|
|
10370
|
+
}
|
|
10371
|
+
/**
|
|
10372
|
+
* @internal
|
|
10373
|
+
* @param deviceId
|
|
10374
|
+
*/
|
|
10375
|
+
setDevice(deviceId) {
|
|
10376
|
+
this.setCurrentValue(this.selectedDeviceSubject, deviceId);
|
|
10377
|
+
}
|
|
10378
|
+
/**
|
|
10379
|
+
* @internal
|
|
10380
|
+
* @param volume
|
|
10381
|
+
*/
|
|
10382
|
+
setVolume(volume) {
|
|
10383
|
+
this.setCurrentValue(this.volumeSubject, volume);
|
|
10384
|
+
}
|
|
10385
|
+
}
|
|
10386
|
+
|
|
10387
|
+
class SpeakerManager {
|
|
10388
|
+
constructor() {
|
|
10389
|
+
this.state = new SpeakerState();
|
|
10390
|
+
}
|
|
10391
|
+
/**
|
|
10392
|
+
* Lists the available audio output devices
|
|
10393
|
+
*
|
|
10394
|
+
* Note: It prompts the user for a permission to use devices (if not already granted)
|
|
10395
|
+
*
|
|
10396
|
+
* @returns an Observable that will be updated if a device is connected or disconnected
|
|
10397
|
+
*/
|
|
10398
|
+
listDevices() {
|
|
10399
|
+
return getAudioOutputDevices();
|
|
10400
|
+
}
|
|
10401
|
+
/**
|
|
10402
|
+
* Select device
|
|
10403
|
+
*
|
|
10404
|
+
* Note: this method is not supported in React Native
|
|
10405
|
+
*
|
|
10406
|
+
* @param deviceId empty string means the system default
|
|
10407
|
+
*/
|
|
10408
|
+
select(deviceId) {
|
|
10409
|
+
if (isReactNative()) {
|
|
10410
|
+
throw new Error('This feature is not supported in React Native');
|
|
10411
|
+
}
|
|
10412
|
+
this.state.setDevice(deviceId);
|
|
10413
|
+
}
|
|
10414
|
+
/**
|
|
10415
|
+
* Set the volume of the audio elements
|
|
10416
|
+
* @param volume a number between 0 and 1
|
|
10417
|
+
*
|
|
10418
|
+
* Note: this method is not supported in React Native
|
|
10419
|
+
*/
|
|
10420
|
+
setVolume(volume) {
|
|
10421
|
+
if (isReactNative()) {
|
|
10422
|
+
throw new Error('This feature is not supported in React Native');
|
|
10423
|
+
}
|
|
10424
|
+
if (volume && (volume < 0 || volume > 1)) {
|
|
10425
|
+
throw new Error('Volume must be between 0 and 1');
|
|
10426
|
+
}
|
|
10427
|
+
this.state.setVolume(volume);
|
|
10428
|
+
}
|
|
10429
|
+
}
|
|
10430
|
+
|
|
10411
10431
|
/**
|
|
10412
10432
|
* An object representation of a `Call`.
|
|
10413
10433
|
*/
|
|
@@ -10791,7 +10811,7 @@ class Call {
|
|
|
10791
10811
|
// prepare a generic SDP and send it to the SFU.
|
|
10792
10812
|
// this is a throw-away SDP that the SFU will use to determine
|
|
10793
10813
|
// the capabilities of the client (codec support, etc.)
|
|
10794
|
-
.then(() => getGenericSdp('recvonly'
|
|
10814
|
+
.then(() => getGenericSdp('recvonly'))
|
|
10795
10815
|
.then((sdp) => {
|
|
10796
10816
|
var _a;
|
|
10797
10817
|
const subscriptions = getCurrentValue(this.trackSubscriptionsSubject);
|
|
@@ -11082,6 +11102,8 @@ class Call {
|
|
|
11082
11102
|
*
|
|
11083
11103
|
*
|
|
11084
11104
|
* @param deviceId the selected device, `undefined` means the user wants to use the system's default audio output
|
|
11105
|
+
*
|
|
11106
|
+
* @deprecated use `call.speaker` instead
|
|
11085
11107
|
*/
|
|
11086
11108
|
this.setAudioOutputDevice = (deviceId) => {
|
|
11087
11109
|
if (!this.sfuClient)
|
|
@@ -11556,6 +11578,7 @@ class Call {
|
|
|
11556
11578
|
this.leaveCallHooks.add(createSubscription(this.trackSubscriptionsSubject.pipe(debounce((v) => timer(v.type)), map$2((v) => v.data)), (subscriptions) => { var _a; return (_a = this.sfuClient) === null || _a === void 0 ? void 0 : _a.updateSubscriptions(subscriptions); }));
|
|
11557
11579
|
this.camera = new CameraManager(this);
|
|
11558
11580
|
this.microphone = new MicrophoneManager(this);
|
|
11581
|
+
this.speaker = new SpeakerManager();
|
|
11559
11582
|
}
|
|
11560
11583
|
registerEffects() {
|
|
11561
11584
|
this.leaveCallHooks.add(
|
|
@@ -12846,7 +12869,7 @@ class WSConnectionFallback {
|
|
|
12846
12869
|
}
|
|
12847
12870
|
}
|
|
12848
12871
|
|
|
12849
|
-
const version = '0.3.
|
|
12872
|
+
const version = '0.3.13';
|
|
12850
12873
|
|
|
12851
12874
|
const logger = getLogger(['location']);
|
|
12852
12875
|
const HINT_URL = `https://hint.stream-io-video.com/`;
|