@stream-io/video-client 0.3.11 → 0.3.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/dist/index.browser.es.js +393 -370
- package/dist/index.browser.es.js.map +1 -1
- package/dist/index.cjs.js +392 -369
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.es.js +393 -370
- package/dist/index.es.js.map +1 -1
- package/dist/src/Call.d.ts +6 -1
- package/dist/src/devices/SpeakerManager.d.ts +28 -0
- package/dist/src/devices/SpeakerState.d.ts +64 -0
- package/dist/src/devices/__tests__/SpeakerManager.test.d.ts +1 -0
- package/dist/src/rtc/codecs.d.ts +1 -1
- package/dist/src/types.d.ts +2 -0
- package/dist/version.d.ts +1 -1
- package/package.json +1 -1
- package/src/Call.ts +13 -8
- package/src/devices/SpeakerManager.ts +50 -0
- package/src/devices/SpeakerState.ts +90 -0
- package/src/devices/__tests__/SpeakerManager.test.ts +66 -0
- package/src/helpers/DynascaleManager.ts +25 -7
- package/src/helpers/__tests__/DynascaleManager.test.ts +30 -2
- package/src/rtc/codecs.ts +1 -35
- package/src/types.ts +2 -0
package/dist/index.browser.es.js
CHANGED
|
@@ -4,7 +4,7 @@ import { ServiceType, stackIntercept } from '@protobuf-ts/runtime-rpc';
|
|
|
4
4
|
import axios, { AxiosHeaders } from 'axios';
|
|
5
5
|
export { AxiosError } from 'axios';
|
|
6
6
|
import { TwirpFetchTransport } from '@protobuf-ts/twirp-transport';
|
|
7
|
-
import { ReplaySubject, BehaviorSubject, map as map$2, takeWhile, distinctUntilChanged as distinctUntilChanged$1, distinctUntilKeyChanged, Observable, debounceTime, concatMap, from, shareReplay, merge,
|
|
7
|
+
import { ReplaySubject, BehaviorSubject, map as map$2, takeWhile, distinctUntilChanged as distinctUntilChanged$1, distinctUntilKeyChanged, combineLatest, Observable, debounceTime, concatMap, from, shareReplay, merge, filter, pairwise, tap, debounce, timer } from 'rxjs';
|
|
8
8
|
import * as SDP from 'sdp-transform';
|
|
9
9
|
import { UAParser } from 'ua-parser-js';
|
|
10
10
|
import WebSocket from 'isomorphic-ws';
|
|
@@ -5836,204 +5836,6 @@ const createSignalClient = (options) => {
|
|
|
5836
5836
|
return new SignalServerClient(transport);
|
|
5837
5837
|
};
|
|
5838
5838
|
|
|
5839
|
-
/**
|
|
5840
|
-
* Checks whether we are using React Native
|
|
5841
|
-
*/
|
|
5842
|
-
const isReactNative = () => {
|
|
5843
|
-
var _a;
|
|
5844
|
-
if (typeof navigator === 'undefined')
|
|
5845
|
-
return false;
|
|
5846
|
-
return ((_a = navigator.product) === null || _a === void 0 ? void 0 : _a.toLowerCase()) === 'reactnative';
|
|
5847
|
-
};
|
|
5848
|
-
|
|
5849
|
-
const getRtpMap = (line) => {
|
|
5850
|
-
// Example: a=rtpmap:110 opus/48000/2
|
|
5851
|
-
const rtpRegex = /^a=rtpmap:(\d*) ([\w\-.]*)(?:\s*\/(\d*)(?:\s*\/(\S*))?)?/;
|
|
5852
|
-
// The first captured group is the payload type number, the second captured group is the encoding name, the third captured group is the clock rate, and the fourth captured group is any additional parameters.
|
|
5853
|
-
const rtpMatch = rtpRegex.exec(line);
|
|
5854
|
-
if (rtpMatch) {
|
|
5855
|
-
return {
|
|
5856
|
-
original: rtpMatch[0],
|
|
5857
|
-
payload: rtpMatch[1],
|
|
5858
|
-
codec: rtpMatch[2],
|
|
5859
|
-
};
|
|
5860
|
-
}
|
|
5861
|
-
};
|
|
5862
|
-
const getFmtp = (line) => {
|
|
5863
|
-
// Example: a=fmtp:111 minptime=10; useinbandfec=1
|
|
5864
|
-
const fmtpRegex = /^a=fmtp:(\d*) (.*)/;
|
|
5865
|
-
const fmtpMatch = fmtpRegex.exec(line);
|
|
5866
|
-
// The first captured group is the payload type number, the second captured group is any additional parameters.
|
|
5867
|
-
if (fmtpMatch) {
|
|
5868
|
-
return {
|
|
5869
|
-
original: fmtpMatch[0],
|
|
5870
|
-
payload: fmtpMatch[1],
|
|
5871
|
-
config: fmtpMatch[2],
|
|
5872
|
-
};
|
|
5873
|
-
}
|
|
5874
|
-
};
|
|
5875
|
-
/**
|
|
5876
|
-
* gets the media section for the specified media type.
|
|
5877
|
-
* The media section contains the media type, port, codec, and payload type.
|
|
5878
|
-
* Example: m=video 9 UDP/TLS/RTP/SAVPF 100 101 96 97 35 36 102 125 127
|
|
5879
|
-
*/
|
|
5880
|
-
const getMedia = (line, mediaType) => {
|
|
5881
|
-
const regex = new RegExp(`(m=${mediaType} \\d+ [\\w/]+) ([\\d\\s]+)`);
|
|
5882
|
-
const match = regex.exec(line);
|
|
5883
|
-
if (match) {
|
|
5884
|
-
return {
|
|
5885
|
-
original: match[0],
|
|
5886
|
-
mediaWithPorts: match[1],
|
|
5887
|
-
codecOrder: match[2],
|
|
5888
|
-
};
|
|
5889
|
-
}
|
|
5890
|
-
};
|
|
5891
|
-
const getMediaSection = (sdp, mediaType) => {
|
|
5892
|
-
let media;
|
|
5893
|
-
const rtpMap = [];
|
|
5894
|
-
const fmtp = [];
|
|
5895
|
-
let isTheRequiredMediaSection = false;
|
|
5896
|
-
sdp.split(/(\r\n|\r|\n)/).forEach((line) => {
|
|
5897
|
-
const isValidLine = /^([a-z])=(.*)/.test(line);
|
|
5898
|
-
if (!isValidLine)
|
|
5899
|
-
return;
|
|
5900
|
-
/*
|
|
5901
|
-
NOTE: according to https://www.rfc-editor.org/rfc/rfc8866.pdf
|
|
5902
|
-
Each media description starts with an "m=" line and continues to the next media description or the end of the whole session description, whichever comes first
|
|
5903
|
-
*/
|
|
5904
|
-
const type = line[0];
|
|
5905
|
-
if (type === 'm') {
|
|
5906
|
-
const _media = getMedia(line, mediaType);
|
|
5907
|
-
isTheRequiredMediaSection = !!_media;
|
|
5908
|
-
if (_media) {
|
|
5909
|
-
media = _media;
|
|
5910
|
-
}
|
|
5911
|
-
}
|
|
5912
|
-
else if (isTheRequiredMediaSection && type === 'a') {
|
|
5913
|
-
const rtpMapLine = getRtpMap(line);
|
|
5914
|
-
const fmtpLine = getFmtp(line);
|
|
5915
|
-
if (rtpMapLine) {
|
|
5916
|
-
rtpMap.push(rtpMapLine);
|
|
5917
|
-
}
|
|
5918
|
-
else if (fmtpLine) {
|
|
5919
|
-
fmtp.push(fmtpLine);
|
|
5920
|
-
}
|
|
5921
|
-
}
|
|
5922
|
-
});
|
|
5923
|
-
if (media) {
|
|
5924
|
-
return {
|
|
5925
|
-
media,
|
|
5926
|
-
rtpMap,
|
|
5927
|
-
fmtp,
|
|
5928
|
-
};
|
|
5929
|
-
}
|
|
5930
|
-
};
|
|
5931
|
-
/**
|
|
5932
|
-
* Returns a string of codec IDs with the preferred codec ID in front of the other codec IDs.
|
|
5933
|
-
* It is used to ensure that a preferred codec is used when decoding a media stream.
|
|
5934
|
-
* Example: Suppose we want to prefer VP8 which has id 96
|
|
5935
|
-
* 1. If codec order is 100 101 96 97 35 36 102 125 127
|
|
5936
|
-
* 2. The function returns 96 100 101 97 35 36 102 125 127
|
|
5937
|
-
*/
|
|
5938
|
-
const moveCodecToFront = (codecOrder, preferredCodecId) => {
|
|
5939
|
-
const codecIds = codecOrder.split(' ');
|
|
5940
|
-
const index = codecIds.indexOf(preferredCodecId);
|
|
5941
|
-
if (index > -1) {
|
|
5942
|
-
codecIds.splice(index, 1);
|
|
5943
|
-
codecIds.unshift(preferredCodecId);
|
|
5944
|
-
}
|
|
5945
|
-
return codecIds.join(' ');
|
|
5946
|
-
};
|
|
5947
|
-
/**
|
|
5948
|
-
* Returns a string of codec IDs with the given codec ID removed
|
|
5949
|
-
* It is used to ensure that a codec is disabled when processing a media stream.
|
|
5950
|
-
* Example: Suppose we want to prefer RED which has id 63
|
|
5951
|
-
* 1. If codec order is 111 63 103 104 9 102 0 8 106 105 13 110 112 113 126
|
|
5952
|
-
* 2. The function returns 111 103 104 9 102 0 8 106 105 13 110 112 113 126
|
|
5953
|
-
*/
|
|
5954
|
-
const removeCodecFromOrder = (codecOrder, codecIdToRemove) => {
|
|
5955
|
-
const codecIds = codecOrder.split(' ');
|
|
5956
|
-
return codecIds.filter((codecID) => codecID !== codecIdToRemove).join(' ');
|
|
5957
|
-
};
|
|
5958
|
-
/**
|
|
5959
|
-
* Returns an SDP with the preferred codec in front of the other codecs.
|
|
5960
|
-
* Example: Suppose we want to prefer VP8
|
|
5961
|
-
* 1. find video media specification m=video 9 UDP/TLS/RTP/SAVPF 100 101 96 97 35 36 102 125 127
|
|
5962
|
-
* 2. look for specified codec (VP8) a=rtpmap:96 VP8/90000
|
|
5963
|
-
* 3. extract 96 as an identifier of VP8
|
|
5964
|
-
* 4. move 96 to the front
|
|
5965
|
-
* 5. now media looks like this: m=video 9 UDP/TLS/RTP/SAVPF 96 100 101 97 35 36 102 125 127
|
|
5966
|
-
*/
|
|
5967
|
-
const setPreferredCodec = (sdp, mediaType, preferredCodec) => {
|
|
5968
|
-
const section = getMediaSection(sdp, mediaType);
|
|
5969
|
-
if (!section)
|
|
5970
|
-
return sdp;
|
|
5971
|
-
const rtpMap = section.rtpMap.find((r) => r.codec.toLowerCase() === preferredCodec.toLowerCase());
|
|
5972
|
-
const codecId = rtpMap === null || rtpMap === void 0 ? void 0 : rtpMap.payload;
|
|
5973
|
-
if (!codecId)
|
|
5974
|
-
return sdp;
|
|
5975
|
-
const newCodecOrder = moveCodecToFront(section.media.codecOrder, codecId);
|
|
5976
|
-
return sdp.replace(section.media.original, `${section.media.mediaWithPorts} ${newCodecOrder}`);
|
|
5977
|
-
};
|
|
5978
|
-
/**
|
|
5979
|
-
* Returns an SDP with the specified codec removed.
|
|
5980
|
-
* Example: Suppose we want to remove RED
|
|
5981
|
-
* 1. find audio media specification m=video 9 UDP/TLS/RTP/SAVPF 100 101 96 97 35 36 102 125 127
|
|
5982
|
-
* 2. look for specified codec (RED) a=rtpmap:127 red/90000
|
|
5983
|
-
* 3. extract 127 as an identifier of RED
|
|
5984
|
-
* 4. remove 127 from the codec order
|
|
5985
|
-
* 5. remove a=rtpmap:127 red/90000
|
|
5986
|
-
* 6. remove a=fmtp:127 ...
|
|
5987
|
-
*/
|
|
5988
|
-
const removeCodec = (sdp, mediaType, codecToRemove) => {
|
|
5989
|
-
const section = getMediaSection(sdp, mediaType);
|
|
5990
|
-
const mediaSection = section === null || section === void 0 ? void 0 : section.media;
|
|
5991
|
-
if (!mediaSection) {
|
|
5992
|
-
return sdp;
|
|
5993
|
-
}
|
|
5994
|
-
const rtpMap = section === null || section === void 0 ? void 0 : section.rtpMap.find((r) => r.codec.toLowerCase() === codecToRemove.toLowerCase());
|
|
5995
|
-
const codecId = rtpMap === null || rtpMap === void 0 ? void 0 : rtpMap.payload;
|
|
5996
|
-
if (!codecId) {
|
|
5997
|
-
return sdp;
|
|
5998
|
-
}
|
|
5999
|
-
const newCodecOrder = removeCodecFromOrder(mediaSection.codecOrder, codecId);
|
|
6000
|
-
const fmtp = section === null || section === void 0 ? void 0 : section.fmtp.find((f) => f.payload === codecId);
|
|
6001
|
-
return sdp
|
|
6002
|
-
.replace(mediaSection.original, `${mediaSection.mediaWithPorts} ${newCodecOrder}`)
|
|
6003
|
-
.replace(new RegExp(`${rtpMap.original}[\r\n]+`), '') // remove the corresponding rtpmap line
|
|
6004
|
-
.replace((fmtp === null || fmtp === void 0 ? void 0 : fmtp.original) ? new RegExp(`${fmtp === null || fmtp === void 0 ? void 0 : fmtp.original}[\r\n]+`) : '', ''); // remove the corresponding fmtp line
|
|
6005
|
-
};
|
|
6006
|
-
/**
|
|
6007
|
-
* Gets the fmtp line corresponding to opus
|
|
6008
|
-
*/
|
|
6009
|
-
const getOpusFmtp = (sdp) => {
|
|
6010
|
-
const section = getMediaSection(sdp, 'audio');
|
|
6011
|
-
const rtpMap = section === null || section === void 0 ? void 0 : section.rtpMap.find((r) => r.codec.toLowerCase() === 'opus');
|
|
6012
|
-
const codecId = rtpMap === null || rtpMap === void 0 ? void 0 : rtpMap.payload;
|
|
6013
|
-
if (codecId) {
|
|
6014
|
-
return section === null || section === void 0 ? void 0 : section.fmtp.find((f) => f.payload === codecId);
|
|
6015
|
-
}
|
|
6016
|
-
};
|
|
6017
|
-
/**
|
|
6018
|
-
* Returns an SDP with DTX enabled or disabled.
|
|
6019
|
-
*/
|
|
6020
|
-
const toggleDtx = (sdp, enable) => {
|
|
6021
|
-
const opusFmtp = getOpusFmtp(sdp);
|
|
6022
|
-
if (opusFmtp) {
|
|
6023
|
-
const matchDtx = /usedtx=(\d)/.exec(opusFmtp.config);
|
|
6024
|
-
const requiredDtxConfig = `usedtx=${enable ? '1' : '0'}`;
|
|
6025
|
-
if (matchDtx) {
|
|
6026
|
-
const newFmtp = opusFmtp.original.replace(/usedtx=(\d)/, requiredDtxConfig);
|
|
6027
|
-
return sdp.replace(opusFmtp.original, newFmtp);
|
|
6028
|
-
}
|
|
6029
|
-
else {
|
|
6030
|
-
const newFmtp = `${opusFmtp.original};${requiredDtxConfig}`;
|
|
6031
|
-
return sdp.replace(opusFmtp.original, newFmtp);
|
|
6032
|
-
}
|
|
6033
|
-
}
|
|
6034
|
-
return sdp;
|
|
6035
|
-
};
|
|
6036
|
-
|
|
6037
5839
|
// log levels, sorted by verbosity
|
|
6038
5840
|
const logLevels = Object.freeze({
|
|
6039
5841
|
trace: 0,
|
|
@@ -6125,39 +5927,13 @@ const getPreferredCodecs = (kind, preferredCodec, codecToRemove) => {
|
|
|
6125
5927
|
logger === null || logger === void 0 ? void 0 : logger('info', `Preffered codecs: `, result);
|
|
6126
5928
|
return result;
|
|
6127
5929
|
};
|
|
6128
|
-
const getGenericSdp = (direction
|
|
5930
|
+
const getGenericSdp = (direction) => __awaiter(void 0, void 0, void 0, function* () {
|
|
6129
5931
|
var _a;
|
|
6130
5932
|
const tempPc = new RTCPeerConnection();
|
|
6131
5933
|
tempPc.addTransceiver('video', { direction });
|
|
6132
|
-
// if ('setCodecPreferences' in videoTransceiver) {
|
|
6133
|
-
// const videoCodecPreferences = getPreferredCodecs(
|
|
6134
|
-
// 'audio',
|
|
6135
|
-
// preferredVideoCodec ?? 'vp8',
|
|
6136
|
-
// );
|
|
6137
|
-
// videoTransceiver.setCodecPreferences([...(videoCodecPreferences ?? [])]);
|
|
6138
|
-
// }
|
|
6139
5934
|
tempPc.addTransceiver('audio', { direction });
|
|
6140
|
-
const preferredAudioCodec = isRedEnabled ? 'red' : 'opus';
|
|
6141
|
-
const audioCodecToRemove = !isRedEnabled ? 'red' : undefined;
|
|
6142
|
-
// if ('setCodecPreferences' in audioTransceiver) {
|
|
6143
|
-
// const audioCodecPreferences = getPreferredCodecs(
|
|
6144
|
-
// 'audio',
|
|
6145
|
-
// preferredAudioCodec,
|
|
6146
|
-
// // audioCodecToRemove,
|
|
6147
|
-
// );
|
|
6148
|
-
// audioTransceiver.setCodecPreferences([...(audioCodecPreferences || [])]);
|
|
6149
|
-
// }
|
|
6150
5935
|
const offer = yield tempPc.createOffer();
|
|
6151
5936
|
let sdp = (_a = offer.sdp) !== null && _a !== void 0 ? _a : '';
|
|
6152
|
-
if (isReactNative()) {
|
|
6153
|
-
if (preferredVideoCodec) {
|
|
6154
|
-
sdp = setPreferredCodec(sdp, 'video', preferredVideoCodec);
|
|
6155
|
-
}
|
|
6156
|
-
sdp = setPreferredCodec(sdp, 'audio', preferredAudioCodec);
|
|
6157
|
-
if (audioCodecToRemove) {
|
|
6158
|
-
sdp = removeCodec(sdp, 'audio', audioCodecToRemove);
|
|
6159
|
-
}
|
|
6160
|
-
}
|
|
6161
5937
|
tempPc.getTransceivers().forEach((t) => {
|
|
6162
5938
|
t.stop();
|
|
6163
5939
|
});
|
|
@@ -6266,6 +6042,16 @@ function getIceCandidate(candidate) {
|
|
|
6266
6042
|
}
|
|
6267
6043
|
}
|
|
6268
6044
|
|
|
6045
|
+
/**
|
|
6046
|
+
* Checks whether we are using React Native
|
|
6047
|
+
*/
|
|
6048
|
+
const isReactNative = () => {
|
|
6049
|
+
var _a;
|
|
6050
|
+
if (typeof navigator === 'undefined')
|
|
6051
|
+
return false;
|
|
6052
|
+
return ((_a = navigator.product) === null || _a === void 0 ? void 0 : _a.toLowerCase()) === 'reactnative';
|
|
6053
|
+
};
|
|
6054
|
+
|
|
6269
6055
|
let sdkInfo;
|
|
6270
6056
|
let osInfo;
|
|
6271
6057
|
let deviceInfo;
|
|
@@ -6462,6 +6248,119 @@ const muteTypeToTrackType = (muteType) => {
|
|
|
6462
6248
|
}
|
|
6463
6249
|
};
|
|
6464
6250
|
|
|
6251
|
+
const getRtpMap = (line) => {
|
|
6252
|
+
// Example: a=rtpmap:110 opus/48000/2
|
|
6253
|
+
const rtpRegex = /^a=rtpmap:(\d*) ([\w\-.]*)(?:\s*\/(\d*)(?:\s*\/(\S*))?)?/;
|
|
6254
|
+
// The first captured group is the payload type number, the second captured group is the encoding name, the third captured group is the clock rate, and the fourth captured group is any additional parameters.
|
|
6255
|
+
const rtpMatch = rtpRegex.exec(line);
|
|
6256
|
+
if (rtpMatch) {
|
|
6257
|
+
return {
|
|
6258
|
+
original: rtpMatch[0],
|
|
6259
|
+
payload: rtpMatch[1],
|
|
6260
|
+
codec: rtpMatch[2],
|
|
6261
|
+
};
|
|
6262
|
+
}
|
|
6263
|
+
};
|
|
6264
|
+
const getFmtp = (line) => {
|
|
6265
|
+
// Example: a=fmtp:111 minptime=10; useinbandfec=1
|
|
6266
|
+
const fmtpRegex = /^a=fmtp:(\d*) (.*)/;
|
|
6267
|
+
const fmtpMatch = fmtpRegex.exec(line);
|
|
6268
|
+
// The first captured group is the payload type number, the second captured group is any additional parameters.
|
|
6269
|
+
if (fmtpMatch) {
|
|
6270
|
+
return {
|
|
6271
|
+
original: fmtpMatch[0],
|
|
6272
|
+
payload: fmtpMatch[1],
|
|
6273
|
+
config: fmtpMatch[2],
|
|
6274
|
+
};
|
|
6275
|
+
}
|
|
6276
|
+
};
|
|
6277
|
+
/**
|
|
6278
|
+
* gets the media section for the specified media type.
|
|
6279
|
+
* The media section contains the media type, port, codec, and payload type.
|
|
6280
|
+
* Example: m=video 9 UDP/TLS/RTP/SAVPF 100 101 96 97 35 36 102 125 127
|
|
6281
|
+
*/
|
|
6282
|
+
const getMedia = (line, mediaType) => {
|
|
6283
|
+
const regex = new RegExp(`(m=${mediaType} \\d+ [\\w/]+) ([\\d\\s]+)`);
|
|
6284
|
+
const match = regex.exec(line);
|
|
6285
|
+
if (match) {
|
|
6286
|
+
return {
|
|
6287
|
+
original: match[0],
|
|
6288
|
+
mediaWithPorts: match[1],
|
|
6289
|
+
codecOrder: match[2],
|
|
6290
|
+
};
|
|
6291
|
+
}
|
|
6292
|
+
};
|
|
6293
|
+
const getMediaSection = (sdp, mediaType) => {
|
|
6294
|
+
let media;
|
|
6295
|
+
const rtpMap = [];
|
|
6296
|
+
const fmtp = [];
|
|
6297
|
+
let isTheRequiredMediaSection = false;
|
|
6298
|
+
sdp.split(/(\r\n|\r|\n)/).forEach((line) => {
|
|
6299
|
+
const isValidLine = /^([a-z])=(.*)/.test(line);
|
|
6300
|
+
if (!isValidLine)
|
|
6301
|
+
return;
|
|
6302
|
+
/*
|
|
6303
|
+
NOTE: according to https://www.rfc-editor.org/rfc/rfc8866.pdf
|
|
6304
|
+
Each media description starts with an "m=" line and continues to the next media description or the end of the whole session description, whichever comes first
|
|
6305
|
+
*/
|
|
6306
|
+
const type = line[0];
|
|
6307
|
+
if (type === 'm') {
|
|
6308
|
+
const _media = getMedia(line, mediaType);
|
|
6309
|
+
isTheRequiredMediaSection = !!_media;
|
|
6310
|
+
if (_media) {
|
|
6311
|
+
media = _media;
|
|
6312
|
+
}
|
|
6313
|
+
}
|
|
6314
|
+
else if (isTheRequiredMediaSection && type === 'a') {
|
|
6315
|
+
const rtpMapLine = getRtpMap(line);
|
|
6316
|
+
const fmtpLine = getFmtp(line);
|
|
6317
|
+
if (rtpMapLine) {
|
|
6318
|
+
rtpMap.push(rtpMapLine);
|
|
6319
|
+
}
|
|
6320
|
+
else if (fmtpLine) {
|
|
6321
|
+
fmtp.push(fmtpLine);
|
|
6322
|
+
}
|
|
6323
|
+
}
|
|
6324
|
+
});
|
|
6325
|
+
if (media) {
|
|
6326
|
+
return {
|
|
6327
|
+
media,
|
|
6328
|
+
rtpMap,
|
|
6329
|
+
fmtp,
|
|
6330
|
+
};
|
|
6331
|
+
}
|
|
6332
|
+
};
|
|
6333
|
+
/**
|
|
6334
|
+
* Gets the fmtp line corresponding to opus
|
|
6335
|
+
*/
|
|
6336
|
+
const getOpusFmtp = (sdp) => {
|
|
6337
|
+
const section = getMediaSection(sdp, 'audio');
|
|
6338
|
+
const rtpMap = section === null || section === void 0 ? void 0 : section.rtpMap.find((r) => r.codec.toLowerCase() === 'opus');
|
|
6339
|
+
const codecId = rtpMap === null || rtpMap === void 0 ? void 0 : rtpMap.payload;
|
|
6340
|
+
if (codecId) {
|
|
6341
|
+
return section === null || section === void 0 ? void 0 : section.fmtp.find((f) => f.payload === codecId);
|
|
6342
|
+
}
|
|
6343
|
+
};
|
|
6344
|
+
/**
|
|
6345
|
+
* Returns an SDP with DTX enabled or disabled.
|
|
6346
|
+
*/
|
|
6347
|
+
const toggleDtx = (sdp, enable) => {
|
|
6348
|
+
const opusFmtp = getOpusFmtp(sdp);
|
|
6349
|
+
if (opusFmtp) {
|
|
6350
|
+
const matchDtx = /usedtx=(\d)/.exec(opusFmtp.config);
|
|
6351
|
+
const requiredDtxConfig = `usedtx=${enable ? '1' : '0'}`;
|
|
6352
|
+
if (matchDtx) {
|
|
6353
|
+
const newFmtp = opusFmtp.original.replace(/usedtx=(\d)/, requiredDtxConfig);
|
|
6354
|
+
return sdp.replace(opusFmtp.original, newFmtp);
|
|
6355
|
+
}
|
|
6356
|
+
else {
|
|
6357
|
+
const newFmtp = `${opusFmtp.original};${requiredDtxConfig}`;
|
|
6358
|
+
return sdp.replace(opusFmtp.original, newFmtp);
|
|
6359
|
+
}
|
|
6360
|
+
}
|
|
6361
|
+
return sdp;
|
|
6362
|
+
};
|
|
6363
|
+
|
|
6465
6364
|
const logger$3 = getLogger(['Publisher']);
|
|
6466
6365
|
/**
|
|
6467
6366
|
* The `Publisher` is responsible for publishing/unpublishing media streams to/from the SFU
|
|
@@ -9611,15 +9510,26 @@ class DynascaleManager {
|
|
|
9611
9510
|
}
|
|
9612
9511
|
});
|
|
9613
9512
|
});
|
|
9614
|
-
const sinkIdSubscription =
|
|
9615
|
-
|
|
9513
|
+
const sinkIdSubscription = combineLatest([
|
|
9514
|
+
this.call.state.localParticipant$,
|
|
9515
|
+
this.call.speaker.state.selectedDevice$,
|
|
9516
|
+
]).subscribe(([p, selectedDevice]) => {
|
|
9517
|
+
var _a;
|
|
9518
|
+
const deviceId = ((_a = getSdkInfo()) === null || _a === void 0 ? void 0 : _a.type) === SdkType.REACT
|
|
9519
|
+
? p === null || p === void 0 ? void 0 : p.audioOutputDeviceId
|
|
9520
|
+
: selectedDevice;
|
|
9521
|
+
if ('setSinkId' in audioElement) {
|
|
9616
9522
|
// @ts-expect-error setSinkId is not yet in the lib
|
|
9617
|
-
audioElement.setSinkId(
|
|
9523
|
+
audioElement.setSinkId(deviceId);
|
|
9618
9524
|
}
|
|
9619
9525
|
});
|
|
9526
|
+
const volumeSubscription = this.call.speaker.state.volume$.subscribe((volume) => {
|
|
9527
|
+
audioElement.volume = volume;
|
|
9528
|
+
});
|
|
9620
9529
|
audioElement.autoplay = true;
|
|
9621
9530
|
return () => {
|
|
9622
9531
|
sinkIdSubscription.unsubscribe();
|
|
9532
|
+
volumeSubscription.unsubscribe();
|
|
9623
9533
|
updateMediaStreamSubscription.unsubscribe();
|
|
9624
9534
|
};
|
|
9625
9535
|
};
|
|
@@ -9747,24 +9657,145 @@ class CallTypesRegistry {
|
|
|
9747
9657
|
}, {});
|
|
9748
9658
|
}
|
|
9749
9659
|
}
|
|
9750
|
-
/**
|
|
9751
|
-
* The default call types registry.
|
|
9752
|
-
* You can use this instance to dynamically register and unregister call types.
|
|
9753
|
-
*/
|
|
9754
|
-
const CallTypes = new CallTypesRegistry([
|
|
9755
|
-
new CallType('default', {
|
|
9756
|
-
sortParticipantsBy: defaultSortPreset,
|
|
9757
|
-
}),
|
|
9758
|
-
new CallType('development', {
|
|
9759
|
-
sortParticipantsBy: defaultSortPreset,
|
|
9760
|
-
}),
|
|
9761
|
-
new CallType('livestream', {
|
|
9762
|
-
sortParticipantsBy: livestreamOrAudioRoomSortPreset,
|
|
9763
|
-
}),
|
|
9764
|
-
new CallType('audio_room', {
|
|
9765
|
-
sortParticipantsBy: livestreamOrAudioRoomSortPreset,
|
|
9766
|
-
}),
|
|
9767
|
-
]);
|
|
9660
|
+
/**
|
|
9661
|
+
* The default call types registry.
|
|
9662
|
+
* You can use this instance to dynamically register and unregister call types.
|
|
9663
|
+
*/
|
|
9664
|
+
const CallTypes = new CallTypesRegistry([
|
|
9665
|
+
new CallType('default', {
|
|
9666
|
+
sortParticipantsBy: defaultSortPreset,
|
|
9667
|
+
}),
|
|
9668
|
+
new CallType('development', {
|
|
9669
|
+
sortParticipantsBy: defaultSortPreset,
|
|
9670
|
+
}),
|
|
9671
|
+
new CallType('livestream', {
|
|
9672
|
+
sortParticipantsBy: livestreamOrAudioRoomSortPreset,
|
|
9673
|
+
}),
|
|
9674
|
+
new CallType('audio_room', {
|
|
9675
|
+
sortParticipantsBy: livestreamOrAudioRoomSortPreset,
|
|
9676
|
+
}),
|
|
9677
|
+
]);
|
|
9678
|
+
|
|
9679
|
+
class InputMediaDeviceManagerState {
|
|
9680
|
+
constructor(disableMode = 'stop-tracks') {
|
|
9681
|
+
this.disableMode = disableMode;
|
|
9682
|
+
this.statusSubject = new BehaviorSubject(undefined);
|
|
9683
|
+
this.mediaStreamSubject = new BehaviorSubject(undefined);
|
|
9684
|
+
this.selectedDeviceSubject = new BehaviorSubject(undefined);
|
|
9685
|
+
/**
|
|
9686
|
+
* Gets the current value of an observable, or undefined if the observable has
|
|
9687
|
+
* not emitted a value yet.
|
|
9688
|
+
*
|
|
9689
|
+
* @param observable$ the observable to get the value from.
|
|
9690
|
+
*/
|
|
9691
|
+
this.getCurrentValue = getCurrentValue;
|
|
9692
|
+
/**
|
|
9693
|
+
* Updates the value of the provided Subject.
|
|
9694
|
+
* An `update` can either be a new value or a function which takes
|
|
9695
|
+
* the current value and returns a new value.
|
|
9696
|
+
*
|
|
9697
|
+
* @internal
|
|
9698
|
+
*
|
|
9699
|
+
* @param subject the subject to update.
|
|
9700
|
+
* @param update the update to apply to the subject.
|
|
9701
|
+
* @return the updated value.
|
|
9702
|
+
*/
|
|
9703
|
+
this.setCurrentValue = setCurrentValue;
|
|
9704
|
+
this.mediaStream$ = this.mediaStreamSubject.asObservable();
|
|
9705
|
+
this.selectedDevice$ = this.selectedDeviceSubject
|
|
9706
|
+
.asObservable()
|
|
9707
|
+
.pipe(distinctUntilChanged$1());
|
|
9708
|
+
this.status$ = this.statusSubject
|
|
9709
|
+
.asObservable()
|
|
9710
|
+
.pipe(distinctUntilChanged$1());
|
|
9711
|
+
}
|
|
9712
|
+
/**
|
|
9713
|
+
* The device status
|
|
9714
|
+
*/
|
|
9715
|
+
get status() {
|
|
9716
|
+
return this.getCurrentValue(this.status$);
|
|
9717
|
+
}
|
|
9718
|
+
/**
|
|
9719
|
+
* The currently selected device
|
|
9720
|
+
*/
|
|
9721
|
+
get selectedDevice() {
|
|
9722
|
+
return this.getCurrentValue(this.selectedDevice$);
|
|
9723
|
+
}
|
|
9724
|
+
/**
|
|
9725
|
+
* The current media stream, or `undefined` if the device is currently disabled.
|
|
9726
|
+
*/
|
|
9727
|
+
get mediaStream() {
|
|
9728
|
+
return this.getCurrentValue(this.mediaStream$);
|
|
9729
|
+
}
|
|
9730
|
+
/**
|
|
9731
|
+
* @internal
|
|
9732
|
+
* @param status
|
|
9733
|
+
*/
|
|
9734
|
+
setStatus(status) {
|
|
9735
|
+
this.setCurrentValue(this.statusSubject, status);
|
|
9736
|
+
}
|
|
9737
|
+
/**
|
|
9738
|
+
* @internal
|
|
9739
|
+
* @param stream
|
|
9740
|
+
*/
|
|
9741
|
+
setMediaStream(stream) {
|
|
9742
|
+
this.setCurrentValue(this.mediaStreamSubject, stream);
|
|
9743
|
+
if (stream) {
|
|
9744
|
+
this.setDevice(this.getDeviceIdFromStream(stream));
|
|
9745
|
+
}
|
|
9746
|
+
}
|
|
9747
|
+
/**
|
|
9748
|
+
* @internal
|
|
9749
|
+
* @param stream
|
|
9750
|
+
*/
|
|
9751
|
+
setDevice(deviceId) {
|
|
9752
|
+
this.setCurrentValue(this.selectedDeviceSubject, deviceId);
|
|
9753
|
+
}
|
|
9754
|
+
}
|
|
9755
|
+
|
|
9756
|
+
class CameraManagerState extends InputMediaDeviceManagerState {
|
|
9757
|
+
constructor() {
|
|
9758
|
+
super('stop-tracks');
|
|
9759
|
+
this.directionSubject = new BehaviorSubject(undefined);
|
|
9760
|
+
this.direction$ = this.directionSubject
|
|
9761
|
+
.asObservable()
|
|
9762
|
+
.pipe(distinctUntilChanged$1());
|
|
9763
|
+
}
|
|
9764
|
+
/**
|
|
9765
|
+
* The preferred camera direction
|
|
9766
|
+
* front - means the camera facing the user
|
|
9767
|
+
* back - means the camera facing the environment
|
|
9768
|
+
*/
|
|
9769
|
+
get direction() {
|
|
9770
|
+
return this.getCurrentValue(this.direction$);
|
|
9771
|
+
}
|
|
9772
|
+
/**
|
|
9773
|
+
* @internal
|
|
9774
|
+
*/
|
|
9775
|
+
setDirection(direction) {
|
|
9776
|
+
this.setCurrentValue(this.directionSubject, direction);
|
|
9777
|
+
}
|
|
9778
|
+
/**
|
|
9779
|
+
* @internal
|
|
9780
|
+
*/
|
|
9781
|
+
setMediaStream(stream) {
|
|
9782
|
+
var _a;
|
|
9783
|
+
super.setMediaStream(stream);
|
|
9784
|
+
if (stream) {
|
|
9785
|
+
// RN getSettings() doesn't return facingMode, so we don't verify camera direction
|
|
9786
|
+
const direction = isReactNative()
|
|
9787
|
+
? this.direction
|
|
9788
|
+
: ((_a = stream.getVideoTracks()[0]) === null || _a === void 0 ? void 0 : _a.getSettings().facingMode) === 'environment'
|
|
9789
|
+
? 'back'
|
|
9790
|
+
: 'front';
|
|
9791
|
+
this.setDirection(direction);
|
|
9792
|
+
}
|
|
9793
|
+
}
|
|
9794
|
+
getDeviceIdFromStream(stream) {
|
|
9795
|
+
var _a;
|
|
9796
|
+
return (_a = stream.getVideoTracks()[0]) === null || _a === void 0 ? void 0 : _a.getSettings().deviceId;
|
|
9797
|
+
}
|
|
9798
|
+
}
|
|
9768
9799
|
|
|
9769
9800
|
const getDevices = (constraints) => {
|
|
9770
9801
|
return new Observable((subscriber) => {
|
|
@@ -10161,127 +10192,6 @@ class InputMediaDeviceManager {
|
|
|
10161
10192
|
}
|
|
10162
10193
|
}
|
|
10163
10194
|
|
|
10164
|
-
class InputMediaDeviceManagerState {
|
|
10165
|
-
constructor(disableMode = 'stop-tracks') {
|
|
10166
|
-
this.disableMode = disableMode;
|
|
10167
|
-
this.statusSubject = new BehaviorSubject(undefined);
|
|
10168
|
-
this.mediaStreamSubject = new BehaviorSubject(undefined);
|
|
10169
|
-
this.selectedDeviceSubject = new BehaviorSubject(undefined);
|
|
10170
|
-
/**
|
|
10171
|
-
* Gets the current value of an observable, or undefined if the observable has
|
|
10172
|
-
* not emitted a value yet.
|
|
10173
|
-
*
|
|
10174
|
-
* @param observable$ the observable to get the value from.
|
|
10175
|
-
*/
|
|
10176
|
-
this.getCurrentValue = getCurrentValue;
|
|
10177
|
-
/**
|
|
10178
|
-
* Updates the value of the provided Subject.
|
|
10179
|
-
* An `update` can either be a new value or a function which takes
|
|
10180
|
-
* the current value and returns a new value.
|
|
10181
|
-
*
|
|
10182
|
-
* @internal
|
|
10183
|
-
*
|
|
10184
|
-
* @param subject the subject to update.
|
|
10185
|
-
* @param update the update to apply to the subject.
|
|
10186
|
-
* @return the updated value.
|
|
10187
|
-
*/
|
|
10188
|
-
this.setCurrentValue = setCurrentValue;
|
|
10189
|
-
this.mediaStream$ = this.mediaStreamSubject.asObservable();
|
|
10190
|
-
this.selectedDevice$ = this.selectedDeviceSubject
|
|
10191
|
-
.asObservable()
|
|
10192
|
-
.pipe(distinctUntilChanged$1());
|
|
10193
|
-
this.status$ = this.statusSubject
|
|
10194
|
-
.asObservable()
|
|
10195
|
-
.pipe(distinctUntilChanged$1());
|
|
10196
|
-
}
|
|
10197
|
-
/**
|
|
10198
|
-
* The device status
|
|
10199
|
-
*/
|
|
10200
|
-
get status() {
|
|
10201
|
-
return this.getCurrentValue(this.status$);
|
|
10202
|
-
}
|
|
10203
|
-
/**
|
|
10204
|
-
* The currently selected device
|
|
10205
|
-
*/
|
|
10206
|
-
get selectedDevice() {
|
|
10207
|
-
return this.getCurrentValue(this.selectedDevice$);
|
|
10208
|
-
}
|
|
10209
|
-
/**
|
|
10210
|
-
* The current media stream, or `undefined` if the device is currently disabled.
|
|
10211
|
-
*/
|
|
10212
|
-
get mediaStream() {
|
|
10213
|
-
return this.getCurrentValue(this.mediaStream$);
|
|
10214
|
-
}
|
|
10215
|
-
/**
|
|
10216
|
-
* @internal
|
|
10217
|
-
* @param status
|
|
10218
|
-
*/
|
|
10219
|
-
setStatus(status) {
|
|
10220
|
-
this.setCurrentValue(this.statusSubject, status);
|
|
10221
|
-
}
|
|
10222
|
-
/**
|
|
10223
|
-
* @internal
|
|
10224
|
-
* @param stream
|
|
10225
|
-
*/
|
|
10226
|
-
setMediaStream(stream) {
|
|
10227
|
-
this.setCurrentValue(this.mediaStreamSubject, stream);
|
|
10228
|
-
if (stream) {
|
|
10229
|
-
this.setDevice(this.getDeviceIdFromStream(stream));
|
|
10230
|
-
}
|
|
10231
|
-
}
|
|
10232
|
-
/**
|
|
10233
|
-
* @internal
|
|
10234
|
-
* @param stream
|
|
10235
|
-
*/
|
|
10236
|
-
setDevice(deviceId) {
|
|
10237
|
-
this.setCurrentValue(this.selectedDeviceSubject, deviceId);
|
|
10238
|
-
}
|
|
10239
|
-
}
|
|
10240
|
-
|
|
10241
|
-
class CameraManagerState extends InputMediaDeviceManagerState {
|
|
10242
|
-
constructor() {
|
|
10243
|
-
super('stop-tracks');
|
|
10244
|
-
this.directionSubject = new BehaviorSubject(undefined);
|
|
10245
|
-
this.direction$ = this.directionSubject
|
|
10246
|
-
.asObservable()
|
|
10247
|
-
.pipe(distinctUntilChanged$1());
|
|
10248
|
-
}
|
|
10249
|
-
/**
|
|
10250
|
-
* The preferred camera direction
|
|
10251
|
-
* front - means the camera facing the user
|
|
10252
|
-
* back - means the camera facing the environment
|
|
10253
|
-
*/
|
|
10254
|
-
get direction() {
|
|
10255
|
-
return this.getCurrentValue(this.direction$);
|
|
10256
|
-
}
|
|
10257
|
-
/**
|
|
10258
|
-
* @internal
|
|
10259
|
-
*/
|
|
10260
|
-
setDirection(direction) {
|
|
10261
|
-
this.setCurrentValue(this.directionSubject, direction);
|
|
10262
|
-
}
|
|
10263
|
-
/**
|
|
10264
|
-
* @internal
|
|
10265
|
-
*/
|
|
10266
|
-
setMediaStream(stream) {
|
|
10267
|
-
var _a;
|
|
10268
|
-
super.setMediaStream(stream);
|
|
10269
|
-
if (stream) {
|
|
10270
|
-
// RN getSettings() doesn't return facingMode, so we don't verify camera direction
|
|
10271
|
-
const direction = isReactNative()
|
|
10272
|
-
? this.direction
|
|
10273
|
-
: ((_a = stream.getVideoTracks()[0]) === null || _a === void 0 ? void 0 : _a.getSettings().facingMode) === 'environment'
|
|
10274
|
-
? 'back'
|
|
10275
|
-
: 'front';
|
|
10276
|
-
this.setDirection(direction);
|
|
10277
|
-
}
|
|
10278
|
-
}
|
|
10279
|
-
getDeviceIdFromStream(stream) {
|
|
10280
|
-
var _a;
|
|
10281
|
-
return (_a = stream.getVideoTracks()[0]) === null || _a === void 0 ? void 0 : _a.getSettings().deviceId;
|
|
10282
|
-
}
|
|
10283
|
-
}
|
|
10284
|
-
|
|
10285
10195
|
class CameraManager extends InputMediaDeviceManager {
|
|
10286
10196
|
constructor(call) {
|
|
10287
10197
|
super(call, new CameraManagerState());
|
|
@@ -10405,6 +10315,116 @@ class MicrophoneManager extends InputMediaDeviceManager {
|
|
|
10405
10315
|
}
|
|
10406
10316
|
}
|
|
10407
10317
|
|
|
10318
|
+
class SpeakerState {
|
|
10319
|
+
constructor() {
|
|
10320
|
+
this.selectedDeviceSubject = new BehaviorSubject('');
|
|
10321
|
+
this.volumeSubject = new BehaviorSubject(1);
|
|
10322
|
+
/**
|
|
10323
|
+
* [Tells if the browser supports audio output change on 'audio' elements](https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/setSinkId).
|
|
10324
|
+
*/
|
|
10325
|
+
this.isDeviceSelectionSupported = checkIfAudioOutputChangeSupported();
|
|
10326
|
+
/**
|
|
10327
|
+
* Gets the current value of an observable, or undefined if the observable has
|
|
10328
|
+
* not emitted a value yet.
|
|
10329
|
+
*
|
|
10330
|
+
* @param observable$ the observable to get the value from.
|
|
10331
|
+
*/
|
|
10332
|
+
this.getCurrentValue = getCurrentValue;
|
|
10333
|
+
/**
|
|
10334
|
+
* Updates the value of the provided Subject.
|
|
10335
|
+
* An `update` can either be a new value or a function which takes
|
|
10336
|
+
* the current value and returns a new value.
|
|
10337
|
+
*
|
|
10338
|
+
* @internal
|
|
10339
|
+
*
|
|
10340
|
+
* @param subject the subject to update.
|
|
10341
|
+
* @param update the update to apply to the subject.
|
|
10342
|
+
* @return the updated value.
|
|
10343
|
+
*/
|
|
10344
|
+
this.setCurrentValue = setCurrentValue;
|
|
10345
|
+
this.selectedDevice$ = this.selectedDeviceSubject
|
|
10346
|
+
.asObservable()
|
|
10347
|
+
.pipe(distinctUntilChanged$1());
|
|
10348
|
+
this.volume$ = this.volumeSubject
|
|
10349
|
+
.asObservable()
|
|
10350
|
+
.pipe(distinctUntilChanged$1());
|
|
10351
|
+
}
|
|
10352
|
+
/**
|
|
10353
|
+
* The currently selected device
|
|
10354
|
+
*
|
|
10355
|
+
* Note: this feature is not supported in React Native
|
|
10356
|
+
*/
|
|
10357
|
+
get selectedDevice() {
|
|
10358
|
+
return this.getCurrentValue(this.selectedDevice$);
|
|
10359
|
+
}
|
|
10360
|
+
/**
|
|
10361
|
+
* The currently selected volume
|
|
10362
|
+
*
|
|
10363
|
+
* Note: this feature is not supported in React Native
|
|
10364
|
+
*/
|
|
10365
|
+
get volume() {
|
|
10366
|
+
return this.getCurrentValue(this.volume$);
|
|
10367
|
+
}
|
|
10368
|
+
/**
|
|
10369
|
+
* @internal
|
|
10370
|
+
* @param deviceId
|
|
10371
|
+
*/
|
|
10372
|
+
setDevice(deviceId) {
|
|
10373
|
+
this.setCurrentValue(this.selectedDeviceSubject, deviceId);
|
|
10374
|
+
}
|
|
10375
|
+
/**
|
|
10376
|
+
* @internal
|
|
10377
|
+
* @param volume
|
|
10378
|
+
*/
|
|
10379
|
+
setVolume(volume) {
|
|
10380
|
+
this.setCurrentValue(this.volumeSubject, volume);
|
|
10381
|
+
}
|
|
10382
|
+
}
|
|
10383
|
+
|
|
10384
|
+
class SpeakerManager {
|
|
10385
|
+
constructor() {
|
|
10386
|
+
this.state = new SpeakerState();
|
|
10387
|
+
}
|
|
10388
|
+
/**
|
|
10389
|
+
* Lists the available audio output devices
|
|
10390
|
+
*
|
|
10391
|
+
* Note: It prompts the user for a permission to use devices (if not already granted)
|
|
10392
|
+
*
|
|
10393
|
+
* @returns an Observable that will be updated if a device is connected or disconnected
|
|
10394
|
+
*/
|
|
10395
|
+
listDevices() {
|
|
10396
|
+
return getAudioOutputDevices();
|
|
10397
|
+
}
|
|
10398
|
+
/**
|
|
10399
|
+
* Select device
|
|
10400
|
+
*
|
|
10401
|
+
* Note: this method is not supported in React Native
|
|
10402
|
+
*
|
|
10403
|
+
* @param deviceId empty string means the system default
|
|
10404
|
+
*/
|
|
10405
|
+
select(deviceId) {
|
|
10406
|
+
if (isReactNative()) {
|
|
10407
|
+
throw new Error('This feature is not supported in React Native');
|
|
10408
|
+
}
|
|
10409
|
+
this.state.setDevice(deviceId);
|
|
10410
|
+
}
|
|
10411
|
+
/**
|
|
10412
|
+
* Set the volume of the audio elements
|
|
10413
|
+
* @param volume a number between 0 and 1
|
|
10414
|
+
*
|
|
10415
|
+
* Note: this method is not supported in React Native
|
|
10416
|
+
*/
|
|
10417
|
+
setVolume(volume) {
|
|
10418
|
+
if (isReactNative()) {
|
|
10419
|
+
throw new Error('This feature is not supported in React Native');
|
|
10420
|
+
}
|
|
10421
|
+
if (volume && (volume < 0 || volume > 1)) {
|
|
10422
|
+
throw new Error('Volume must be between 0 and 1');
|
|
10423
|
+
}
|
|
10424
|
+
this.state.setVolume(volume);
|
|
10425
|
+
}
|
|
10426
|
+
}
|
|
10427
|
+
|
|
10408
10428
|
/**
|
|
10409
10429
|
* An object representation of a `Call`.
|
|
10410
10430
|
*/
|
|
@@ -10788,7 +10808,7 @@ class Call {
|
|
|
10788
10808
|
// prepare a generic SDP and send it to the SFU.
|
|
10789
10809
|
// this is a throw-away SDP that the SFU will use to determine
|
|
10790
10810
|
// the capabilities of the client (codec support, etc.)
|
|
10791
|
-
.then(() => getGenericSdp('recvonly'
|
|
10811
|
+
.then(() => getGenericSdp('recvonly'))
|
|
10792
10812
|
.then((sdp) => {
|
|
10793
10813
|
var _a;
|
|
10794
10814
|
const subscriptions = getCurrentValue(this.trackSubscriptionsSubject);
|
|
@@ -11079,6 +11099,8 @@ class Call {
|
|
|
11079
11099
|
*
|
|
11080
11100
|
*
|
|
11081
11101
|
* @param deviceId the selected device, `undefined` means the user wants to use the system's default audio output
|
|
11102
|
+
*
|
|
11103
|
+
* @deprecated use `call.speaker` instead
|
|
11082
11104
|
*/
|
|
11083
11105
|
this.setAudioOutputDevice = (deviceId) => {
|
|
11084
11106
|
if (!this.sfuClient)
|
|
@@ -11553,6 +11575,7 @@ class Call {
|
|
|
11553
11575
|
this.leaveCallHooks.add(createSubscription(this.trackSubscriptionsSubject.pipe(debounce((v) => timer(v.type)), map$2((v) => v.data)), (subscriptions) => { var _a; return (_a = this.sfuClient) === null || _a === void 0 ? void 0 : _a.updateSubscriptions(subscriptions); }));
|
|
11554
11576
|
this.camera = new CameraManager(this);
|
|
11555
11577
|
this.microphone = new MicrophoneManager(this);
|
|
11578
|
+
this.speaker = new SpeakerManager();
|
|
11556
11579
|
}
|
|
11557
11580
|
registerEffects() {
|
|
11558
11581
|
this.leaveCallHooks.add(
|
|
@@ -12842,7 +12865,7 @@ class WSConnectionFallback {
|
|
|
12842
12865
|
}
|
|
12843
12866
|
}
|
|
12844
12867
|
|
|
12845
|
-
const version = '0.3.
|
|
12868
|
+
const version = '0.3.13';
|
|
12846
12869
|
|
|
12847
12870
|
const logger = getLogger(['location']);
|
|
12848
12871
|
const HINT_URL = `https://hint.stream-io-video.com/`;
|