@stream-io/video-client 1.14.0 → 1.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/dist/index.browser.es.js +1532 -1784
- package/dist/index.browser.es.js.map +1 -1
- package/dist/index.cjs.js +1512 -1783
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.es.js +1532 -1784
- package/dist/index.es.js.map +1 -1
- package/dist/src/Call.d.ts +43 -28
- package/dist/src/StreamSfuClient.d.ts +4 -5
- package/dist/src/devices/CameraManager.d.ts +5 -8
- package/dist/src/devices/InputMediaDeviceManager.d.ts +5 -5
- package/dist/src/devices/MicrophoneManager.d.ts +7 -2
- package/dist/src/devices/ScreenShareManager.d.ts +1 -2
- package/dist/src/gen/video/sfu/event/events.d.ts +38 -19
- package/dist/src/gen/video/sfu/models/models.d.ts +76 -9
- package/dist/src/helpers/array.d.ts +7 -0
- package/dist/src/permissions/PermissionsContext.d.ts +6 -0
- package/dist/src/rtc/BasePeerConnection.d.ts +90 -0
- package/dist/src/rtc/Dispatcher.d.ts +0 -1
- package/dist/src/rtc/IceTrickleBuffer.d.ts +3 -2
- package/dist/src/rtc/Publisher.d.ts +32 -86
- package/dist/src/rtc/Subscriber.d.ts +4 -56
- package/dist/src/rtc/TransceiverCache.d.ts +55 -0
- package/dist/src/rtc/codecs.d.ts +1 -15
- package/dist/src/rtc/helpers/sdp.d.ts +8 -0
- package/dist/src/rtc/helpers/tracks.d.ts +1 -0
- package/dist/src/rtc/index.d.ts +3 -0
- package/dist/src/rtc/videoLayers.d.ts +11 -25
- package/dist/src/stats/{stateStoreStatsReporter.d.ts → CallStateStatsReporter.d.ts} +5 -1
- package/dist/src/stats/SfuStatsReporter.d.ts +4 -2
- package/dist/src/stats/index.d.ts +1 -1
- package/dist/src/stats/types.d.ts +8 -0
- package/dist/src/types.d.ts +12 -22
- package/package.json +1 -1
- package/src/Call.ts +254 -268
- package/src/StreamSfuClient.ts +9 -14
- package/src/StreamVideoClient.ts +1 -1
- package/src/__tests__/Call.publishing.test.ts +306 -0
- package/src/devices/CameraManager.ts +33 -16
- package/src/devices/InputMediaDeviceManager.ts +36 -27
- package/src/devices/MicrophoneManager.ts +29 -8
- package/src/devices/ScreenShareManager.ts +6 -8
- package/src/devices/__tests__/CameraManager.test.ts +111 -14
- package/src/devices/__tests__/InputMediaDeviceManager.test.ts +4 -4
- package/src/devices/__tests__/MicrophoneManager.test.ts +59 -21
- package/src/devices/__tests__/ScreenShareManager.test.ts +5 -5
- package/src/devices/__tests__/mocks.ts +1 -0
- package/src/events/__tests__/internal.test.ts +132 -0
- package/src/events/__tests__/mutes.test.ts +0 -3
- package/src/events/__tests__/speaker.test.ts +92 -0
- package/src/events/participant.ts +3 -4
- package/src/gen/video/sfu/event/events.ts +91 -30
- package/src/gen/video/sfu/models/models.ts +105 -13
- package/src/helpers/array.ts +14 -0
- package/src/permissions/PermissionsContext.ts +22 -0
- package/src/permissions/__tests__/PermissionsContext.test.ts +40 -0
- package/src/rpc/__tests__/createClient.test.ts +38 -0
- package/src/rpc/createClient.ts +11 -5
- package/src/rtc/BasePeerConnection.ts +240 -0
- package/src/rtc/Dispatcher.ts +0 -9
- package/src/rtc/IceTrickleBuffer.ts +24 -4
- package/src/rtc/Publisher.ts +210 -528
- package/src/rtc/Subscriber.ts +26 -200
- package/src/rtc/TransceiverCache.ts +120 -0
- package/src/rtc/__tests__/Publisher.test.ts +407 -210
- package/src/rtc/__tests__/Subscriber.test.ts +88 -36
- package/src/rtc/__tests__/mocks/webrtc.mocks.ts +22 -2
- package/src/rtc/__tests__/videoLayers.test.ts +161 -54
- package/src/rtc/codecs.ts +1 -131
- package/src/rtc/helpers/__tests__/rtcConfiguration.test.ts +34 -0
- package/src/rtc/helpers/__tests__/sdp.test.ts +59 -0
- package/src/rtc/helpers/sdp.ts +30 -0
- package/src/rtc/helpers/tracks.ts +3 -0
- package/src/rtc/index.ts +4 -0
- package/src/rtc/videoLayers.ts +68 -76
- package/src/stats/{stateStoreStatsReporter.ts → CallStateStatsReporter.ts} +58 -27
- package/src/stats/SfuStatsReporter.ts +31 -3
- package/src/stats/index.ts +1 -1
- package/src/stats/types.ts +12 -0
- package/src/types.ts +12 -22
- package/dist/src/helpers/sdp-munging.d.ts +0 -24
- package/dist/src/rtc/bitrateLookup.d.ts +0 -2
- package/dist/src/rtc/helpers/iceCandidate.d.ts +0 -2
- package/src/helpers/__tests__/hq-audio-sdp.ts +0 -332
- package/src/helpers/__tests__/sdp-munging.test.ts +0 -283
- package/src/helpers/sdp-munging.ts +0 -265
- package/src/rtc/__tests__/bitrateLookup.test.ts +0 -12
- package/src/rtc/__tests__/codecs.test.ts +0 -145
- package/src/rtc/bitrateLookup.ts +0 -61
- package/src/rtc/helpers/iceCandidate.ts +0 -16
- /package/dist/src/{compatibility.d.ts → helpers/compatibility.d.ts} +0 -0
- /package/src/{compatibility.ts → helpers/compatibility.ts} +0 -0
package/dist/index.es.js
CHANGED
|
@@ -4,9 +4,9 @@ import { ServiceType, stackIntercept, RpcError } from '@protobuf-ts/runtime-rpc'
|
|
|
4
4
|
import axios from 'axios';
|
|
5
5
|
export { AxiosError } from 'axios';
|
|
6
6
|
import { TwirpFetchTransport, TwirpErrorCode } from '@protobuf-ts/twirp-transport';
|
|
7
|
-
import { UAParser } from 'ua-parser-js';
|
|
8
7
|
import { ReplaySubject, combineLatest, BehaviorSubject, map, shareReplay, distinctUntilChanged, takeWhile, distinctUntilKeyChanged, fromEventPattern, startWith, concatMap, from, fromEvent, debounceTime, merge, pairwise, of } from 'rxjs';
|
|
9
|
-
import
|
|
8
|
+
import { parse } from 'sdp-transform';
|
|
9
|
+
import { UAParser } from 'ua-parser-js';
|
|
10
10
|
import https from 'https';
|
|
11
11
|
|
|
12
12
|
/* tslint:disable */
|
|
@@ -1225,23 +1225,33 @@ class VideoLayer$Type extends MessageType {
|
|
|
1225
1225
|
*/
|
|
1226
1226
|
const VideoLayer = new VideoLayer$Type();
|
|
1227
1227
|
// @generated message type with reflection information, may provide speed optimized methods
|
|
1228
|
-
class
|
|
1228
|
+
class SubscribeOption$Type extends MessageType {
|
|
1229
1229
|
constructor() {
|
|
1230
|
-
super('stream.video.sfu.models.
|
|
1230
|
+
super('stream.video.sfu.models.SubscribeOption', [
|
|
1231
1231
|
{
|
|
1232
1232
|
no: 1,
|
|
1233
|
+
name: 'track_type',
|
|
1234
|
+
kind: 'enum',
|
|
1235
|
+
T: () => [
|
|
1236
|
+
'stream.video.sfu.models.TrackType',
|
|
1237
|
+
TrackType,
|
|
1238
|
+
'TRACK_TYPE_',
|
|
1239
|
+
],
|
|
1240
|
+
},
|
|
1241
|
+
{
|
|
1242
|
+
no: 2,
|
|
1233
1243
|
name: 'codecs',
|
|
1234
1244
|
kind: 'message',
|
|
1235
1245
|
repeat: 1 /*RepeatType.PACKED*/,
|
|
1236
|
-
T: () =>
|
|
1246
|
+
T: () => Codec,
|
|
1237
1247
|
},
|
|
1238
1248
|
]);
|
|
1239
1249
|
}
|
|
1240
1250
|
}
|
|
1241
1251
|
/**
|
|
1242
|
-
* @generated MessageType for protobuf message stream.video.sfu.models.
|
|
1252
|
+
* @generated MessageType for protobuf message stream.video.sfu.models.SubscribeOption
|
|
1243
1253
|
*/
|
|
1244
|
-
const
|
|
1254
|
+
const SubscribeOption = new SubscribeOption$Type();
|
|
1245
1255
|
// @generated message type with reflection information, may provide speed optimized methods
|
|
1246
1256
|
class PublishOption$Type extends MessageType {
|
|
1247
1257
|
constructor() {
|
|
@@ -1271,6 +1281,13 @@ class PublishOption$Type extends MessageType {
|
|
|
1271
1281
|
kind: 'scalar',
|
|
1272
1282
|
T: 5 /*ScalarType.INT32*/,
|
|
1273
1283
|
},
|
|
1284
|
+
{
|
|
1285
|
+
no: 7,
|
|
1286
|
+
name: 'video_dimension',
|
|
1287
|
+
kind: 'message',
|
|
1288
|
+
T: () => VideoDimension,
|
|
1289
|
+
},
|
|
1290
|
+
{ no: 8, name: 'id', kind: 'scalar', T: 5 /*ScalarType.INT32*/ },
|
|
1274
1291
|
]);
|
|
1275
1292
|
}
|
|
1276
1293
|
}
|
|
@@ -1283,7 +1300,7 @@ class Codec$Type extends MessageType {
|
|
|
1283
1300
|
constructor() {
|
|
1284
1301
|
super('stream.video.sfu.models.Codec', [
|
|
1285
1302
|
{
|
|
1286
|
-
no:
|
|
1303
|
+
no: 16,
|
|
1287
1304
|
name: 'payload_type',
|
|
1288
1305
|
kind: 'scalar',
|
|
1289
1306
|
T: 13 /*ScalarType.UINT32*/,
|
|
@@ -1296,7 +1313,7 @@ class Codec$Type extends MessageType {
|
|
|
1296
1313
|
T: 13 /*ScalarType.UINT32*/,
|
|
1297
1314
|
},
|
|
1298
1315
|
{
|
|
1299
|
-
no:
|
|
1316
|
+
no: 15,
|
|
1300
1317
|
name: 'encoding_parameters',
|
|
1301
1318
|
kind: 'scalar',
|
|
1302
1319
|
T: 9 /*ScalarType.STRING*/,
|
|
@@ -1360,6 +1377,13 @@ class TrackInfo$Type extends MessageType {
|
|
|
1360
1377
|
{ no: 8, name: 'stereo', kind: 'scalar', T: 8 /*ScalarType.BOOL*/ },
|
|
1361
1378
|
{ no: 9, name: 'red', kind: 'scalar', T: 8 /*ScalarType.BOOL*/ },
|
|
1362
1379
|
{ no: 10, name: 'muted', kind: 'scalar', T: 8 /*ScalarType.BOOL*/ },
|
|
1380
|
+
{ no: 11, name: 'codec', kind: 'message', T: () => Codec },
|
|
1381
|
+
{
|
|
1382
|
+
no: 12,
|
|
1383
|
+
name: 'publish_option_id',
|
|
1384
|
+
kind: 'scalar',
|
|
1385
|
+
T: 5 /*ScalarType.INT32*/,
|
|
1386
|
+
},
|
|
1363
1387
|
]);
|
|
1364
1388
|
}
|
|
1365
1389
|
}
|
|
@@ -1633,10 +1657,10 @@ var models = /*#__PURE__*/Object.freeze({
|
|
|
1633
1657
|
get PeerType () { return PeerType; },
|
|
1634
1658
|
Pin: Pin,
|
|
1635
1659
|
PublishOption: PublishOption,
|
|
1636
|
-
PublishOptions: PublishOptions,
|
|
1637
1660
|
Sdk: Sdk,
|
|
1638
1661
|
get SdkType () { return SdkType; },
|
|
1639
1662
|
StreamQuality: StreamQuality,
|
|
1663
|
+
SubscribeOption: SubscribeOption,
|
|
1640
1664
|
TrackInfo: TrackInfo,
|
|
1641
1665
|
get TrackType () { return TrackType; },
|
|
1642
1666
|
get TrackUnpublishReason () { return TrackUnpublishReason; },
|
|
@@ -2266,13 +2290,6 @@ class SfuEvent$Type extends MessageType {
|
|
|
2266
2290
|
oneof: 'eventPayload',
|
|
2267
2291
|
T: () => ParticipantMigrationComplete,
|
|
2268
2292
|
},
|
|
2269
|
-
{
|
|
2270
|
-
no: 26,
|
|
2271
|
-
name: 'codec_negotiation_complete',
|
|
2272
|
-
kind: 'message',
|
|
2273
|
-
oneof: 'eventPayload',
|
|
2274
|
-
T: () => CodecNegotiationComplete,
|
|
2275
|
-
},
|
|
2276
2293
|
{
|
|
2277
2294
|
no: 27,
|
|
2278
2295
|
name: 'change_publish_options',
|
|
@@ -2293,10 +2310,12 @@ class ChangePublishOptions$Type extends MessageType {
|
|
|
2293
2310
|
super('stream.video.sfu.event.ChangePublishOptions', [
|
|
2294
2311
|
{
|
|
2295
2312
|
no: 1,
|
|
2296
|
-
name: '
|
|
2313
|
+
name: 'publish_options',
|
|
2297
2314
|
kind: 'message',
|
|
2315
|
+
repeat: 1 /*RepeatType.PACKED*/,
|
|
2298
2316
|
T: () => PublishOption,
|
|
2299
2317
|
},
|
|
2318
|
+
{ no: 2, name: 'reason', kind: 'scalar', T: 9 /*ScalarType.STRING*/ },
|
|
2300
2319
|
]);
|
|
2301
2320
|
}
|
|
2302
2321
|
}
|
|
@@ -2305,15 +2324,15 @@ class ChangePublishOptions$Type extends MessageType {
|
|
|
2305
2324
|
*/
|
|
2306
2325
|
const ChangePublishOptions = new ChangePublishOptions$Type();
|
|
2307
2326
|
// @generated message type with reflection information, may provide speed optimized methods
|
|
2308
|
-
class
|
|
2327
|
+
class ChangePublishOptionsComplete$Type extends MessageType {
|
|
2309
2328
|
constructor() {
|
|
2310
|
-
super('stream.video.sfu.event.
|
|
2329
|
+
super('stream.video.sfu.event.ChangePublishOptionsComplete', []);
|
|
2311
2330
|
}
|
|
2312
2331
|
}
|
|
2313
2332
|
/**
|
|
2314
|
-
* @generated MessageType for protobuf message stream.video.sfu.event.
|
|
2333
|
+
* @generated MessageType for protobuf message stream.video.sfu.event.ChangePublishOptionsComplete
|
|
2315
2334
|
*/
|
|
2316
|
-
const
|
|
2335
|
+
const ChangePublishOptionsComplete = new ChangePublishOptionsComplete$Type();
|
|
2317
2336
|
// @generated message type with reflection information, may provide speed optimized methods
|
|
2318
2337
|
class ParticipantMigrationComplete$Type extends MessageType {
|
|
2319
2338
|
constructor() {
|
|
@@ -2571,6 +2590,20 @@ class JoinRequest$Type extends MessageType {
|
|
|
2571
2590
|
kind: 'message',
|
|
2572
2591
|
T: () => ReconnectDetails,
|
|
2573
2592
|
},
|
|
2593
|
+
{
|
|
2594
|
+
no: 9,
|
|
2595
|
+
name: 'preferred_publish_options',
|
|
2596
|
+
kind: 'message',
|
|
2597
|
+
repeat: 1 /*RepeatType.PACKED*/,
|
|
2598
|
+
T: () => PublishOption,
|
|
2599
|
+
},
|
|
2600
|
+
{
|
|
2601
|
+
no: 10,
|
|
2602
|
+
name: 'preferred_subscribe_options',
|
|
2603
|
+
kind: 'message',
|
|
2604
|
+
repeat: 1 /*RepeatType.PACKED*/,
|
|
2605
|
+
T: () => SubscribeOption,
|
|
2606
|
+
},
|
|
2574
2607
|
]);
|
|
2575
2608
|
}
|
|
2576
2609
|
}
|
|
@@ -2678,7 +2711,8 @@ class JoinResponse$Type extends MessageType {
|
|
|
2678
2711
|
no: 4,
|
|
2679
2712
|
name: 'publish_options',
|
|
2680
2713
|
kind: 'message',
|
|
2681
|
-
|
|
2714
|
+
repeat: 1 /*RepeatType.PACKED*/,
|
|
2715
|
+
T: () => PublishOption,
|
|
2682
2716
|
},
|
|
2683
2717
|
]);
|
|
2684
2718
|
}
|
|
@@ -2843,6 +2877,22 @@ class AudioSender$Type extends MessageType {
|
|
|
2843
2877
|
constructor() {
|
|
2844
2878
|
super('stream.video.sfu.event.AudioSender', [
|
|
2845
2879
|
{ no: 2, name: 'codec', kind: 'message', T: () => Codec },
|
|
2880
|
+
{
|
|
2881
|
+
no: 3,
|
|
2882
|
+
name: 'track_type',
|
|
2883
|
+
kind: 'enum',
|
|
2884
|
+
T: () => [
|
|
2885
|
+
'stream.video.sfu.models.TrackType',
|
|
2886
|
+
TrackType,
|
|
2887
|
+
'TRACK_TYPE_',
|
|
2888
|
+
],
|
|
2889
|
+
},
|
|
2890
|
+
{
|
|
2891
|
+
no: 4,
|
|
2892
|
+
name: 'publish_option_id',
|
|
2893
|
+
kind: 'scalar',
|
|
2894
|
+
T: 5 /*ScalarType.INT32*/,
|
|
2895
|
+
},
|
|
2846
2896
|
]);
|
|
2847
2897
|
}
|
|
2848
2898
|
}
|
|
@@ -2895,6 +2945,22 @@ class VideoSender$Type extends MessageType {
|
|
|
2895
2945
|
repeat: 1 /*RepeatType.PACKED*/,
|
|
2896
2946
|
T: () => VideoLayerSetting,
|
|
2897
2947
|
},
|
|
2948
|
+
{
|
|
2949
|
+
no: 4,
|
|
2950
|
+
name: 'track_type',
|
|
2951
|
+
kind: 'enum',
|
|
2952
|
+
T: () => [
|
|
2953
|
+
'stream.video.sfu.models.TrackType',
|
|
2954
|
+
TrackType,
|
|
2955
|
+
'TRACK_TYPE_',
|
|
2956
|
+
],
|
|
2957
|
+
},
|
|
2958
|
+
{
|
|
2959
|
+
no: 5,
|
|
2960
|
+
name: 'publish_option_id',
|
|
2961
|
+
kind: 'scalar',
|
|
2962
|
+
T: 5 /*ScalarType.INT32*/,
|
|
2963
|
+
},
|
|
2898
2964
|
]);
|
|
2899
2965
|
}
|
|
2900
2966
|
}
|
|
@@ -2991,8 +3057,8 @@ var events = /*#__PURE__*/Object.freeze({
|
|
|
2991
3057
|
CallEnded: CallEnded,
|
|
2992
3058
|
CallGrantsUpdated: CallGrantsUpdated,
|
|
2993
3059
|
ChangePublishOptions: ChangePublishOptions,
|
|
3060
|
+
ChangePublishOptionsComplete: ChangePublishOptionsComplete,
|
|
2994
3061
|
ChangePublishQuality: ChangePublishQuality,
|
|
2995
|
-
CodecNegotiationComplete: CodecNegotiationComplete,
|
|
2996
3062
|
ConnectionQualityChanged: ConnectionQualityChanged,
|
|
2997
3063
|
ConnectionQualityInfo: ConnectionQualityInfo,
|
|
2998
3064
|
DominantSpeakerChanged: DominantSpeakerChanged,
|
|
@@ -3139,11 +3205,18 @@ const withHeaders = (headers) => {
|
|
|
3139
3205
|
const withRequestLogger = (logger, level) => {
|
|
3140
3206
|
return {
|
|
3141
3207
|
interceptUnary: (next, method, input, options) => {
|
|
3142
|
-
|
|
3143
|
-
|
|
3144
|
-
options
|
|
3145
|
-
}
|
|
3146
|
-
|
|
3208
|
+
let invocation;
|
|
3209
|
+
try {
|
|
3210
|
+
invocation = next(method, input, options);
|
|
3211
|
+
}
|
|
3212
|
+
finally {
|
|
3213
|
+
logger(level, `Invoked SFU RPC method ${method.name}`, {
|
|
3214
|
+
request: invocation?.request,
|
|
3215
|
+
headers: invocation?.requestHeaders,
|
|
3216
|
+
response: invocation?.response,
|
|
3217
|
+
});
|
|
3218
|
+
}
|
|
3219
|
+
return invocation;
|
|
3147
3220
|
},
|
|
3148
3221
|
};
|
|
3149
3222
|
};
|
|
@@ -3360,665 +3433,139 @@ const retryable = async (rpc, signal) => {
|
|
|
3360
3433
|
return result;
|
|
3361
3434
|
};
|
|
3362
3435
|
|
|
3363
|
-
|
|
3364
|
-
|
|
3365
|
-
|
|
3366
|
-
|
|
3367
|
-
|
|
3368
|
-
|
|
3369
|
-
|
|
3370
|
-
|
|
3371
|
-
|
|
3372
|
-
|
|
3373
|
-
|
|
3374
|
-
|
|
3375
|
-
const
|
|
3376
|
-
|
|
3377
|
-
|
|
3378
|
-
|
|
3379
|
-
|
|
3380
|
-
|
|
3381
|
-
const setOSInfo = (info) => {
|
|
3382
|
-
osInfo = info;
|
|
3383
|
-
};
|
|
3384
|
-
const getOSInfo = () => {
|
|
3385
|
-
return osInfo;
|
|
3386
|
-
};
|
|
3387
|
-
const setDeviceInfo = (info) => {
|
|
3388
|
-
deviceInfo = info;
|
|
3436
|
+
/**
|
|
3437
|
+
* Returns a generic SDP for the given direction.
|
|
3438
|
+
* We use this SDP to send it as part of our JoinRequest so that the SFU
|
|
3439
|
+
* can use it to determine the client's codec capabilities.
|
|
3440
|
+
*
|
|
3441
|
+
* @param direction the direction of the transceiver.
|
|
3442
|
+
*/
|
|
3443
|
+
const getGenericSdp = async (direction) => {
|
|
3444
|
+
const tempPc = new RTCPeerConnection();
|
|
3445
|
+
tempPc.addTransceiver('video', { direction });
|
|
3446
|
+
tempPc.addTransceiver('audio', { direction });
|
|
3447
|
+
const offer = await tempPc.createOffer();
|
|
3448
|
+
const sdp = offer.sdp ?? '';
|
|
3449
|
+
tempPc.getTransceivers().forEach((t) => {
|
|
3450
|
+
t.stop?.();
|
|
3451
|
+
});
|
|
3452
|
+
tempPc.close();
|
|
3453
|
+
return sdp;
|
|
3389
3454
|
};
|
|
3390
|
-
|
|
3391
|
-
|
|
3455
|
+
/**
|
|
3456
|
+
* Returns whether the codec is an SVC codec.
|
|
3457
|
+
*
|
|
3458
|
+
* @param codecOrMimeType the codec to check.
|
|
3459
|
+
*/
|
|
3460
|
+
const isSvcCodec = (codecOrMimeType) => {
|
|
3461
|
+
if (!codecOrMimeType)
|
|
3462
|
+
return false;
|
|
3463
|
+
codecOrMimeType = codecOrMimeType.toLowerCase();
|
|
3464
|
+
return (codecOrMimeType === 'vp9' ||
|
|
3465
|
+
codecOrMimeType === 'av1' ||
|
|
3466
|
+
codecOrMimeType === 'video/vp9' ||
|
|
3467
|
+
codecOrMimeType === 'video/av1');
|
|
3392
3468
|
};
|
|
3393
|
-
|
|
3394
|
-
|
|
3469
|
+
|
|
3470
|
+
const sfuEventKinds = {
|
|
3471
|
+
subscriberOffer: undefined,
|
|
3472
|
+
publisherAnswer: undefined,
|
|
3473
|
+
connectionQualityChanged: undefined,
|
|
3474
|
+
audioLevelChanged: undefined,
|
|
3475
|
+
iceTrickle: undefined,
|
|
3476
|
+
changePublishQuality: undefined,
|
|
3477
|
+
participantJoined: undefined,
|
|
3478
|
+
participantLeft: undefined,
|
|
3479
|
+
dominantSpeakerChanged: undefined,
|
|
3480
|
+
joinResponse: undefined,
|
|
3481
|
+
healthCheckResponse: undefined,
|
|
3482
|
+
trackPublished: undefined,
|
|
3483
|
+
trackUnpublished: undefined,
|
|
3484
|
+
error: undefined,
|
|
3485
|
+
callGrantsUpdated: undefined,
|
|
3486
|
+
goAway: undefined,
|
|
3487
|
+
iceRestart: undefined,
|
|
3488
|
+
pinsUpdated: undefined,
|
|
3489
|
+
callEnded: undefined,
|
|
3490
|
+
participantUpdated: undefined,
|
|
3491
|
+
participantMigrationComplete: undefined,
|
|
3492
|
+
changePublishOptions: undefined,
|
|
3395
3493
|
};
|
|
3396
|
-
const
|
|
3397
|
-
|
|
3494
|
+
const isSfuEvent = (eventName) => {
|
|
3495
|
+
return Object.prototype.hasOwnProperty.call(sfuEventKinds, eventName);
|
|
3398
3496
|
};
|
|
3399
|
-
|
|
3400
|
-
|
|
3401
|
-
|
|
3402
|
-
|
|
3403
|
-
|
|
3404
|
-
|
|
3405
|
-
|
|
3406
|
-
|
|
3407
|
-
|
|
3408
|
-
|
|
3409
|
-
|
|
3410
|
-
|
|
3411
|
-
|
|
3412
|
-
|
|
3413
|
-
|
|
3497
|
+
class Dispatcher {
|
|
3498
|
+
constructor() {
|
|
3499
|
+
this.logger = getLogger(['Dispatcher']);
|
|
3500
|
+
this.subscribers = {};
|
|
3501
|
+
this.dispatch = (message, logTag = '0') => {
|
|
3502
|
+
const eventKind = message.eventPayload.oneofKind;
|
|
3503
|
+
if (!eventKind)
|
|
3504
|
+
return;
|
|
3505
|
+
const payload = message.eventPayload[eventKind];
|
|
3506
|
+
this.logger('debug', `Dispatching ${eventKind}, tag=${logTag}`, payload);
|
|
3507
|
+
const listeners = this.subscribers[eventKind];
|
|
3508
|
+
if (!listeners)
|
|
3509
|
+
return;
|
|
3510
|
+
for (const fn of listeners) {
|
|
3511
|
+
try {
|
|
3512
|
+
fn(payload);
|
|
3513
|
+
}
|
|
3514
|
+
catch (e) {
|
|
3515
|
+
this.logger('warn', 'Listener failed with error', e);
|
|
3516
|
+
}
|
|
3517
|
+
}
|
|
3518
|
+
};
|
|
3519
|
+
this.on = (eventName, fn) => {
|
|
3520
|
+
var _a;
|
|
3521
|
+
((_a = this.subscribers)[eventName] ?? (_a[eventName] = [])).push(fn);
|
|
3522
|
+
return () => {
|
|
3523
|
+
this.off(eventName, fn);
|
|
3524
|
+
};
|
|
3525
|
+
};
|
|
3526
|
+
this.off = (eventName, fn) => {
|
|
3527
|
+
this.subscribers[eventName] = (this.subscribers[eventName] || []).filter((f) => f !== fn);
|
|
3414
3528
|
};
|
|
3415
3529
|
}
|
|
3416
|
-
|
|
3417
|
-
|
|
3418
|
-
|
|
3419
|
-
|
|
3420
|
-
|
|
3421
|
-
|
|
3422
|
-
|
|
3423
|
-
|
|
3424
|
-
|
|
3425
|
-
|
|
3530
|
+
}
|
|
3531
|
+
|
|
3532
|
+
/**
|
|
3533
|
+
* A buffer for ICE Candidates. Used for ICE Trickle:
|
|
3534
|
+
* - https://bloggeek.me/webrtcglossary/trickle-ice/
|
|
3535
|
+
*/
|
|
3536
|
+
class IceTrickleBuffer {
|
|
3537
|
+
constructor() {
|
|
3538
|
+
this.subscriberCandidates = new ReplaySubject();
|
|
3539
|
+
this.publisherCandidates = new ReplaySubject();
|
|
3540
|
+
this.push = (iceTrickle) => {
|
|
3541
|
+
const iceCandidate = toIceCandidate(iceTrickle);
|
|
3542
|
+
if (!iceCandidate)
|
|
3543
|
+
return;
|
|
3544
|
+
if (iceTrickle.peerType === PeerType.SUBSCRIBER) {
|
|
3545
|
+
this.subscriberCandidates.next(iceCandidate);
|
|
3546
|
+
}
|
|
3547
|
+
else if (iceTrickle.peerType === PeerType.PUBLISHER_UNSPECIFIED) {
|
|
3548
|
+
this.publisherCandidates.next(iceCandidate);
|
|
3549
|
+
}
|
|
3550
|
+
else {
|
|
3551
|
+
const logger = getLogger(['sfu-client']);
|
|
3552
|
+
logger('warn', `ICETrickle, Unknown peer type`, iceTrickle);
|
|
3553
|
+
}
|
|
3554
|
+
};
|
|
3555
|
+
this.dispose = () => {
|
|
3556
|
+
this.subscriberCandidates.complete();
|
|
3557
|
+
this.publisherCandidates.complete();
|
|
3426
3558
|
};
|
|
3427
3559
|
}
|
|
3428
|
-
}
|
|
3429
|
-
const
|
|
3430
|
-
|
|
3431
|
-
|
|
3432
|
-
return;
|
|
3560
|
+
}
|
|
3561
|
+
const toIceCandidate = (iceTrickle) => {
|
|
3562
|
+
try {
|
|
3563
|
+
return JSON.parse(iceTrickle.iceCandidate);
|
|
3433
3564
|
}
|
|
3434
|
-
|
|
3435
|
-
|
|
3436
|
-
|
|
3437
|
-
|
|
3438
|
-
thermalState: deviceState?.oneofKind === 'android'
|
|
3439
|
-
? deviceState.android.thermalState
|
|
3440
|
-
: AndroidThermalState.UNSPECIFIED,
|
|
3441
|
-
isPowerSaverMode: powerMode,
|
|
3442
|
-
},
|
|
3443
|
-
};
|
|
3444
|
-
}
|
|
3445
|
-
if (osInfo.name.toLowerCase() === 'ios') {
|
|
3446
|
-
deviceState = {
|
|
3447
|
-
oneofKind: 'apple',
|
|
3448
|
-
apple: {
|
|
3449
|
-
thermalState: deviceState?.oneofKind === 'apple'
|
|
3450
|
-
? deviceState.apple.thermalState
|
|
3451
|
-
: AppleThermalState.UNSPECIFIED,
|
|
3452
|
-
isLowPowerModeEnabled: powerMode,
|
|
3453
|
-
},
|
|
3454
|
-
};
|
|
3455
|
-
}
|
|
3456
|
-
};
|
|
3457
|
-
const getDeviceState = () => {
|
|
3458
|
-
return deviceState;
|
|
3459
|
-
};
|
|
3460
|
-
const getClientDetails = () => {
|
|
3461
|
-
if (isReactNative()) {
|
|
3462
|
-
// Since RN doesn't support web, sharing browser info is not required
|
|
3463
|
-
return {
|
|
3464
|
-
sdk: getSdkInfo(),
|
|
3465
|
-
os: getOSInfo(),
|
|
3466
|
-
device: getDeviceInfo(),
|
|
3467
|
-
};
|
|
3468
|
-
}
|
|
3469
|
-
const userAgent = new UAParser(navigator.userAgent);
|
|
3470
|
-
const { browser, os, device, cpu } = userAgent.getResult();
|
|
3471
|
-
return {
|
|
3472
|
-
sdk: getSdkInfo(),
|
|
3473
|
-
browser: {
|
|
3474
|
-
name: browser.name || navigator.userAgent,
|
|
3475
|
-
version: browser.version || '',
|
|
3476
|
-
},
|
|
3477
|
-
os: {
|
|
3478
|
-
name: os.name || '',
|
|
3479
|
-
version: os.version || '',
|
|
3480
|
-
architecture: cpu.architecture || '',
|
|
3481
|
-
},
|
|
3482
|
-
device: {
|
|
3483
|
-
name: [device.vendor, device.model, device.type]
|
|
3484
|
-
.filter(Boolean)
|
|
3485
|
-
.join(' '),
|
|
3486
|
-
version: '',
|
|
3487
|
-
},
|
|
3488
|
-
};
|
|
3489
|
-
};
|
|
3490
|
-
|
|
3491
|
-
/**
|
|
3492
|
-
* Checks whether the current browser is Safari.
|
|
3493
|
-
*/
|
|
3494
|
-
const isSafari = () => {
|
|
3495
|
-
if (typeof navigator === 'undefined')
|
|
3496
|
-
return false;
|
|
3497
|
-
return /^((?!chrome|android).)*safari/i.test(navigator.userAgent || '');
|
|
3498
|
-
};
|
|
3499
|
-
/**
|
|
3500
|
-
* Checks whether the current browser is Firefox.
|
|
3501
|
-
*/
|
|
3502
|
-
const isFirefox = () => {
|
|
3503
|
-
if (typeof navigator === 'undefined')
|
|
3504
|
-
return false;
|
|
3505
|
-
return navigator.userAgent?.includes('Firefox');
|
|
3506
|
-
};
|
|
3507
|
-
/**
|
|
3508
|
-
* Checks whether the current browser is Google Chrome.
|
|
3509
|
-
*/
|
|
3510
|
-
const isChrome = () => {
|
|
3511
|
-
if (typeof navigator === 'undefined')
|
|
3512
|
-
return false;
|
|
3513
|
-
return navigator.userAgent?.includes('Chrome');
|
|
3514
|
-
};
|
|
3515
|
-
|
|
3516
|
-
var browsers = /*#__PURE__*/Object.freeze({
|
|
3517
|
-
__proto__: null,
|
|
3518
|
-
isChrome: isChrome,
|
|
3519
|
-
isFirefox: isFirefox,
|
|
3520
|
-
isSafari: isSafari
|
|
3521
|
-
});
|
|
3522
|
-
|
|
3523
|
-
/**
|
|
3524
|
-
* Returns back a list of sorted codecs, with the preferred codec first.
|
|
3525
|
-
*
|
|
3526
|
-
* @param kind the kind of codec to get.
|
|
3527
|
-
* @param preferredCodec the codec to prioritize (vp8, h264, vp9, av1...).
|
|
3528
|
-
* @param codecToRemove the codec to exclude from the list.
|
|
3529
|
-
* @param codecPreferencesSource the source of the codec preferences.
|
|
3530
|
-
*/
|
|
3531
|
-
const getPreferredCodecs = (kind, preferredCodec, codecToRemove, codecPreferencesSource) => {
|
|
3532
|
-
const source = codecPreferencesSource === 'receiver' ? RTCRtpReceiver : RTCRtpSender;
|
|
3533
|
-
if (!('getCapabilities' in source))
|
|
3534
|
-
return;
|
|
3535
|
-
const capabilities = source.getCapabilities(kind);
|
|
3536
|
-
if (!capabilities)
|
|
3537
|
-
return;
|
|
3538
|
-
const preferred = [];
|
|
3539
|
-
const partiallyPreferred = [];
|
|
3540
|
-
const unpreferred = [];
|
|
3541
|
-
const preferredCodecMimeType = `${kind}/${preferredCodec.toLowerCase()}`;
|
|
3542
|
-
const codecToRemoveMimeType = codecToRemove && `${kind}/${codecToRemove.toLowerCase()}`;
|
|
3543
|
-
for (const codec of capabilities.codecs) {
|
|
3544
|
-
const codecMimeType = codec.mimeType.toLowerCase();
|
|
3545
|
-
const shouldRemoveCodec = codecMimeType === codecToRemoveMimeType;
|
|
3546
|
-
if (shouldRemoveCodec)
|
|
3547
|
-
continue; // skip this codec
|
|
3548
|
-
const isPreferredCodec = codecMimeType === preferredCodecMimeType;
|
|
3549
|
-
if (!isPreferredCodec) {
|
|
3550
|
-
unpreferred.push(codec);
|
|
3551
|
-
continue;
|
|
3552
|
-
}
|
|
3553
|
-
// h264 is a special case, we want to prioritize the baseline codec with
|
|
3554
|
-
// profile-level-id is 42e01f and packetization-mode=0 for maximum
|
|
3555
|
-
// cross-browser compatibility.
|
|
3556
|
-
// this branch covers the other cases, such as vp8.
|
|
3557
|
-
if (codecMimeType !== 'video/h264') {
|
|
3558
|
-
preferred.push(codec);
|
|
3559
|
-
continue;
|
|
3560
|
-
}
|
|
3561
|
-
const sdpFmtpLine = codec.sdpFmtpLine;
|
|
3562
|
-
if (!sdpFmtpLine || !sdpFmtpLine.includes('profile-level-id=42')) {
|
|
3563
|
-
// this is not the baseline h264 codec, prioritize it lower
|
|
3564
|
-
partiallyPreferred.push(codec);
|
|
3565
|
-
continue;
|
|
3566
|
-
}
|
|
3567
|
-
if (sdpFmtpLine.includes('packetization-mode=1')) {
|
|
3568
|
-
preferred.unshift(codec);
|
|
3569
|
-
}
|
|
3570
|
-
else {
|
|
3571
|
-
preferred.push(codec);
|
|
3572
|
-
}
|
|
3573
|
-
}
|
|
3574
|
-
// return a sorted list of codecs, with the preferred codecs first
|
|
3575
|
-
return [...preferred, ...partiallyPreferred, ...unpreferred];
|
|
3576
|
-
};
|
|
3577
|
-
/**
|
|
3578
|
-
* Returns a generic SDP for the given direction.
|
|
3579
|
-
* We use this SDP to send it as part of our JoinRequest so that the SFU
|
|
3580
|
-
* can use it to determine client's codec capabilities.
|
|
3581
|
-
*
|
|
3582
|
-
* @param direction the direction of the transceiver.
|
|
3583
|
-
*/
|
|
3584
|
-
const getGenericSdp = async (direction) => {
|
|
3585
|
-
const tempPc = new RTCPeerConnection();
|
|
3586
|
-
tempPc.addTransceiver('video', { direction });
|
|
3587
|
-
tempPc.addTransceiver('audio', { direction });
|
|
3588
|
-
const offer = await tempPc.createOffer();
|
|
3589
|
-
const sdp = offer.sdp ?? '';
|
|
3590
|
-
tempPc.getTransceivers().forEach((t) => {
|
|
3591
|
-
t.stop?.();
|
|
3592
|
-
});
|
|
3593
|
-
tempPc.close();
|
|
3594
|
-
return sdp;
|
|
3595
|
-
};
|
|
3596
|
-
/**
|
|
3597
|
-
* Returns the optimal video codec for the device.
|
|
3598
|
-
*/
|
|
3599
|
-
const getOptimalVideoCodec = (preferredCodec) => {
|
|
3600
|
-
if (isReactNative()) {
|
|
3601
|
-
const os = getOSInfo()?.name.toLowerCase();
|
|
3602
|
-
if (os === 'android')
|
|
3603
|
-
return preferredOr(preferredCodec, 'vp8');
|
|
3604
|
-
if (os === 'ios' || os === 'ipados') {
|
|
3605
|
-
return supportsH264Baseline() ? 'h264' : 'vp8';
|
|
3606
|
-
}
|
|
3607
|
-
return preferredOr(preferredCodec, 'h264');
|
|
3608
|
-
}
|
|
3609
|
-
if (isSafari())
|
|
3610
|
-
return 'h264';
|
|
3611
|
-
if (isFirefox())
|
|
3612
|
-
return 'vp8';
|
|
3613
|
-
return preferredOr(preferredCodec, 'vp8');
|
|
3614
|
-
};
|
|
3615
|
-
/**
|
|
3616
|
-
* Determines if the platform supports the preferred codec.
|
|
3617
|
-
* If not, it returns the fallback codec.
|
|
3618
|
-
*/
|
|
3619
|
-
const preferredOr = (codec, fallback) => {
|
|
3620
|
-
if (!codec)
|
|
3621
|
-
return fallback;
|
|
3622
|
-
if (!('getCapabilities' in RTCRtpSender))
|
|
3623
|
-
return fallback;
|
|
3624
|
-
const capabilities = RTCRtpSender.getCapabilities('video');
|
|
3625
|
-
if (!capabilities)
|
|
3626
|
-
return fallback;
|
|
3627
|
-
// Safari and Firefox do not have a good support encoding to SVC codecs,
|
|
3628
|
-
// so we disable it for them.
|
|
3629
|
-
if (isSvcCodec(codec) && (isSafari() || isFirefox()))
|
|
3630
|
-
return fallback;
|
|
3631
|
-
const { codecs } = capabilities;
|
|
3632
|
-
const codecMimeType = `video/${codec}`.toLowerCase();
|
|
3633
|
-
return codecs.some((c) => c.mimeType.toLowerCase() === codecMimeType)
|
|
3634
|
-
? codec
|
|
3635
|
-
: fallback;
|
|
3636
|
-
};
|
|
3637
|
-
/**
|
|
3638
|
-
* Returns whether the platform supports the H264 baseline codec.
|
|
3639
|
-
*/
|
|
3640
|
-
const supportsH264Baseline = () => {
|
|
3641
|
-
if (!('getCapabilities' in RTCRtpSender))
|
|
3642
|
-
return false;
|
|
3643
|
-
const capabilities = RTCRtpSender.getCapabilities('video');
|
|
3644
|
-
if (!capabilities)
|
|
3645
|
-
return false;
|
|
3646
|
-
return capabilities.codecs.some((c) => c.mimeType.toLowerCase() === 'video/h264' &&
|
|
3647
|
-
c.sdpFmtpLine?.includes('profile-level-id=42e01f'));
|
|
3648
|
-
};
|
|
3649
|
-
/**
|
|
3650
|
-
* Returns whether the codec is an SVC codec.
|
|
3651
|
-
*
|
|
3652
|
-
* @param codecOrMimeType the codec to check.
|
|
3653
|
-
*/
|
|
3654
|
-
const isSvcCodec = (codecOrMimeType) => {
|
|
3655
|
-
if (!codecOrMimeType)
|
|
3656
|
-
return false;
|
|
3657
|
-
codecOrMimeType = codecOrMimeType.toLowerCase();
|
|
3658
|
-
return (codecOrMimeType === 'vp9' ||
|
|
3659
|
-
codecOrMimeType === 'av1' ||
|
|
3660
|
-
codecOrMimeType === 'video/vp9' ||
|
|
3661
|
-
codecOrMimeType === 'video/av1');
|
|
3662
|
-
};
|
|
3663
|
-
|
|
3664
|
-
const sfuEventKinds = {
|
|
3665
|
-
subscriberOffer: undefined,
|
|
3666
|
-
publisherAnswer: undefined,
|
|
3667
|
-
connectionQualityChanged: undefined,
|
|
3668
|
-
audioLevelChanged: undefined,
|
|
3669
|
-
iceTrickle: undefined,
|
|
3670
|
-
changePublishQuality: undefined,
|
|
3671
|
-
participantJoined: undefined,
|
|
3672
|
-
participantLeft: undefined,
|
|
3673
|
-
dominantSpeakerChanged: undefined,
|
|
3674
|
-
joinResponse: undefined,
|
|
3675
|
-
healthCheckResponse: undefined,
|
|
3676
|
-
trackPublished: undefined,
|
|
3677
|
-
trackUnpublished: undefined,
|
|
3678
|
-
error: undefined,
|
|
3679
|
-
callGrantsUpdated: undefined,
|
|
3680
|
-
goAway: undefined,
|
|
3681
|
-
iceRestart: undefined,
|
|
3682
|
-
pinsUpdated: undefined,
|
|
3683
|
-
callEnded: undefined,
|
|
3684
|
-
participantUpdated: undefined,
|
|
3685
|
-
participantMigrationComplete: undefined,
|
|
3686
|
-
codecNegotiationComplete: undefined,
|
|
3687
|
-
changePublishOptions: undefined,
|
|
3688
|
-
};
|
|
3689
|
-
const isSfuEvent = (eventName) => {
|
|
3690
|
-
return Object.prototype.hasOwnProperty.call(sfuEventKinds, eventName);
|
|
3691
|
-
};
|
|
3692
|
-
class Dispatcher {
|
|
3693
|
-
constructor() {
|
|
3694
|
-
this.logger = getLogger(['Dispatcher']);
|
|
3695
|
-
this.subscribers = {};
|
|
3696
|
-
this.dispatch = (message, logTag = '0') => {
|
|
3697
|
-
const eventKind = message.eventPayload.oneofKind;
|
|
3698
|
-
if (!eventKind)
|
|
3699
|
-
return;
|
|
3700
|
-
const payload = message.eventPayload[eventKind];
|
|
3701
|
-
this.logger('debug', `Dispatching ${eventKind}, tag=${logTag}`, payload);
|
|
3702
|
-
const listeners = this.subscribers[eventKind];
|
|
3703
|
-
if (!listeners)
|
|
3704
|
-
return;
|
|
3705
|
-
for (const fn of listeners) {
|
|
3706
|
-
try {
|
|
3707
|
-
fn(payload);
|
|
3708
|
-
}
|
|
3709
|
-
catch (e) {
|
|
3710
|
-
this.logger('warn', 'Listener failed with error', e);
|
|
3711
|
-
}
|
|
3712
|
-
}
|
|
3713
|
-
};
|
|
3714
|
-
this.on = (eventName, fn) => {
|
|
3715
|
-
var _a;
|
|
3716
|
-
((_a = this.subscribers)[eventName] ?? (_a[eventName] = [])).push(fn);
|
|
3717
|
-
return () => {
|
|
3718
|
-
this.off(eventName, fn);
|
|
3719
|
-
};
|
|
3720
|
-
};
|
|
3721
|
-
this.off = (eventName, fn) => {
|
|
3722
|
-
this.subscribers[eventName] = (this.subscribers[eventName] || []).filter((f) => f !== fn);
|
|
3723
|
-
};
|
|
3724
|
-
this.offAll = (eventName) => {
|
|
3725
|
-
if (eventName) {
|
|
3726
|
-
this.subscribers[eventName] = [];
|
|
3727
|
-
}
|
|
3728
|
-
else {
|
|
3729
|
-
this.subscribers = {};
|
|
3730
|
-
}
|
|
3731
|
-
};
|
|
3732
|
-
}
|
|
3733
|
-
}
|
|
3734
|
-
|
|
3735
|
-
/**
|
|
3736
|
-
* A buffer for ICE Candidates. Used for ICE Trickle:
|
|
3737
|
-
* - https://bloggeek.me/webrtcglossary/trickle-ice/
|
|
3738
|
-
*/
|
|
3739
|
-
class IceTrickleBuffer {
|
|
3740
|
-
constructor() {
|
|
3741
|
-
this.subscriberCandidates = new ReplaySubject();
|
|
3742
|
-
this.publisherCandidates = new ReplaySubject();
|
|
3743
|
-
this.push = (iceTrickle) => {
|
|
3744
|
-
if (iceTrickle.peerType === PeerType.SUBSCRIBER) {
|
|
3745
|
-
this.subscriberCandidates.next(iceTrickle);
|
|
3746
|
-
}
|
|
3747
|
-
else if (iceTrickle.peerType === PeerType.PUBLISHER_UNSPECIFIED) {
|
|
3748
|
-
this.publisherCandidates.next(iceTrickle);
|
|
3749
|
-
}
|
|
3750
|
-
else {
|
|
3751
|
-
const logger = getLogger(['sfu-client']);
|
|
3752
|
-
logger('warn', `ICETrickle, Unknown peer type`, iceTrickle);
|
|
3753
|
-
}
|
|
3754
|
-
};
|
|
3755
|
-
}
|
|
3756
|
-
}
|
|
3757
|
-
|
|
3758
|
-
function getIceCandidate(candidate) {
|
|
3759
|
-
if (!candidate.usernameFragment) {
|
|
3760
|
-
// react-native-webrtc doesn't include usernameFragment in the candidate
|
|
3761
|
-
const splittedCandidate = candidate.candidate.split(' ');
|
|
3762
|
-
const ufragIndex = splittedCandidate.findIndex((s) => s === 'ufrag') + 1;
|
|
3763
|
-
const usernameFragment = splittedCandidate[ufragIndex];
|
|
3764
|
-
return JSON.stringify({ ...candidate, usernameFragment });
|
|
3765
|
-
}
|
|
3766
|
-
else {
|
|
3767
|
-
return JSON.stringify(candidate.toJSON());
|
|
3768
|
-
}
|
|
3769
|
-
}
|
|
3770
|
-
|
|
3771
|
-
const bitrateLookupTable = {
|
|
3772
|
-
h264: {
|
|
3773
|
-
2160: 5000000,
|
|
3774
|
-
1440: 3000000,
|
|
3775
|
-
1080: 2000000,
|
|
3776
|
-
720: 1250000,
|
|
3777
|
-
540: 750000,
|
|
3778
|
-
360: 400000,
|
|
3779
|
-
default: 1250000,
|
|
3780
|
-
},
|
|
3781
|
-
vp8: {
|
|
3782
|
-
2160: 5000000,
|
|
3783
|
-
1440: 2750000,
|
|
3784
|
-
1080: 2000000,
|
|
3785
|
-
720: 1250000,
|
|
3786
|
-
540: 600000,
|
|
3787
|
-
360: 350000,
|
|
3788
|
-
default: 1250000,
|
|
3789
|
-
},
|
|
3790
|
-
vp9: {
|
|
3791
|
-
2160: 3000000,
|
|
3792
|
-
1440: 2000000,
|
|
3793
|
-
1080: 1500000,
|
|
3794
|
-
720: 1250000,
|
|
3795
|
-
540: 500000,
|
|
3796
|
-
360: 275000,
|
|
3797
|
-
default: 1250000,
|
|
3798
|
-
},
|
|
3799
|
-
av1: {
|
|
3800
|
-
2160: 2000000,
|
|
3801
|
-
1440: 1550000,
|
|
3802
|
-
1080: 1000000,
|
|
3803
|
-
720: 600000,
|
|
3804
|
-
540: 350000,
|
|
3805
|
-
360: 200000,
|
|
3806
|
-
default: 600000,
|
|
3807
|
-
},
|
|
3808
|
-
};
|
|
3809
|
-
const getOptimalBitrate = (codec, frameHeight) => {
|
|
3810
|
-
const codecLookup = bitrateLookupTable[codec];
|
|
3811
|
-
if (!codecLookup)
|
|
3812
|
-
throw new Error(`Unknown codec: ${codec}`);
|
|
3813
|
-
let bitrate = codecLookup[frameHeight];
|
|
3814
|
-
if (!bitrate) {
|
|
3815
|
-
const keys = Object.keys(codecLookup).map(Number);
|
|
3816
|
-
const nearest = keys.reduce((a, b) => Math.abs(b - frameHeight) < Math.abs(a - frameHeight) ? b : a);
|
|
3817
|
-
bitrate = codecLookup[nearest];
|
|
3818
|
-
}
|
|
3819
|
-
return bitrate ?? codecLookup.default;
|
|
3820
|
-
};
|
|
3821
|
-
|
|
3822
|
-
const DEFAULT_BITRATE = 1250000;
|
|
3823
|
-
const defaultTargetResolution = {
|
|
3824
|
-
bitrate: DEFAULT_BITRATE,
|
|
3825
|
-
width: 1280,
|
|
3826
|
-
height: 720,
|
|
3827
|
-
};
|
|
3828
|
-
const defaultBitratePerRid = {
|
|
3829
|
-
q: 300000,
|
|
3830
|
-
h: 750000,
|
|
3831
|
-
f: DEFAULT_BITRATE,
|
|
3832
|
-
};
|
|
3833
|
-
/**
|
|
3834
|
-
* In SVC, we need to send only one video encoding (layer).
|
|
3835
|
-
* this layer will have the additional spatial and temporal layers
|
|
3836
|
-
* defined via the scalabilityMode property.
|
|
3837
|
-
*
|
|
3838
|
-
* @param layers the layers to process.
|
|
3839
|
-
*/
|
|
3840
|
-
const toSvcEncodings = (layers) => {
|
|
3841
|
-
// we take the `f` layer, and we rename it to `q`.
|
|
3842
|
-
return layers?.filter((l) => l.rid === 'f').map((l) => ({ ...l, rid: 'q' }));
|
|
3843
|
-
};
|
|
3844
|
-
/**
|
|
3845
|
-
* Converts the rid to a video quality.
|
|
3846
|
-
*/
|
|
3847
|
-
const ridToVideoQuality = (rid) => {
|
|
3848
|
-
return rid === 'q'
|
|
3849
|
-
? VideoQuality.LOW_UNSPECIFIED
|
|
3850
|
-
: rid === 'h'
|
|
3851
|
-
? VideoQuality.MID
|
|
3852
|
-
: VideoQuality.HIGH; // default to HIGH
|
|
3853
|
-
};
|
|
3854
|
-
/**
|
|
3855
|
-
* Determines the most optimal video layers for simulcasting
|
|
3856
|
-
* for the given track.
|
|
3857
|
-
*
|
|
3858
|
-
* @param videoTrack the video track to find optimal layers for.
|
|
3859
|
-
* @param targetResolution the expected target resolution.
|
|
3860
|
-
* @param codecInUse the codec in use.
|
|
3861
|
-
* @param publishOptions the publish options for the track.
|
|
3862
|
-
*/
|
|
3863
|
-
const findOptimalVideoLayers = (videoTrack, targetResolution = defaultTargetResolution, codecInUse, publishOptions) => {
|
|
3864
|
-
const optimalVideoLayers = [];
|
|
3865
|
-
const settings = videoTrack.getSettings();
|
|
3866
|
-
const { width = 0, height = 0 } = settings;
|
|
3867
|
-
const { scalabilityMode, bitrateDownscaleFactor = 2, maxSimulcastLayers = 3, } = publishOptions || {};
|
|
3868
|
-
const maxBitrate = getComputedMaxBitrate(targetResolution, width, height, codecInUse, publishOptions);
|
|
3869
|
-
let downscaleFactor = 1;
|
|
3870
|
-
let bitrateFactor = 1;
|
|
3871
|
-
const svcCodec = isSvcCodec(codecInUse);
|
|
3872
|
-
const totalLayers = svcCodec ? 3 : Math.min(3, maxSimulcastLayers);
|
|
3873
|
-
for (const rid of ['f', 'h', 'q'].slice(0, totalLayers)) {
|
|
3874
|
-
const layer = {
|
|
3875
|
-
active: true,
|
|
3876
|
-
rid,
|
|
3877
|
-
width: Math.round(width / downscaleFactor),
|
|
3878
|
-
height: Math.round(height / downscaleFactor),
|
|
3879
|
-
maxBitrate: Math.round(maxBitrate / bitrateFactor) || defaultBitratePerRid[rid],
|
|
3880
|
-
maxFramerate: 30,
|
|
3881
|
-
};
|
|
3882
|
-
if (svcCodec) {
|
|
3883
|
-
// for SVC codecs, we need to set the scalability mode, and the
|
|
3884
|
-
// codec will handle the rest (layers, temporal layers, etc.)
|
|
3885
|
-
layer.scalabilityMode = scalabilityMode || 'L3T2_KEY';
|
|
3886
|
-
}
|
|
3887
|
-
else {
|
|
3888
|
-
// for non-SVC codecs, we need to downscale proportionally (simulcast)
|
|
3889
|
-
layer.scaleResolutionDownBy = downscaleFactor;
|
|
3890
|
-
}
|
|
3891
|
-
downscaleFactor *= 2;
|
|
3892
|
-
bitrateFactor *= bitrateDownscaleFactor;
|
|
3893
|
-
// Reversing the order [f, h, q] to [q, h, f] as Chrome uses encoding index
|
|
3894
|
-
// when deciding which layer to disable when CPU or bandwidth is constrained.
|
|
3895
|
-
// Encodings should be ordered in increasing spatial resolution order.
|
|
3896
|
-
optimalVideoLayers.unshift(layer);
|
|
3897
|
-
}
|
|
3898
|
-
// for simplicity, we start with all layers enabled, then this function
|
|
3899
|
-
// will clear/reassign the layers that are not needed
|
|
3900
|
-
return withSimulcastConstraints(settings, optimalVideoLayers);
|
|
3901
|
-
};
|
|
3902
|
-
/**
|
|
3903
|
-
* Computes the maximum bitrate for a given resolution.
|
|
3904
|
-
* If the current resolution is lower than the target resolution,
|
|
3905
|
-
* we want to proportionally reduce the target bitrate.
|
|
3906
|
-
* If the current resolution is higher than the target resolution,
|
|
3907
|
-
* we want to use the target bitrate.
|
|
3908
|
-
*
|
|
3909
|
-
* @param targetResolution the target resolution.
|
|
3910
|
-
* @param currentWidth the current width of the track.
|
|
3911
|
-
* @param currentHeight the current height of the track.
|
|
3912
|
-
* @param codecInUse the codec in use.
|
|
3913
|
-
* @param publishOptions the publish options.
|
|
3914
|
-
*/
|
|
3915
|
-
const getComputedMaxBitrate = (targetResolution, currentWidth, currentHeight, codecInUse, publishOptions) => {
|
|
3916
|
-
// if the current resolution is lower than the target resolution,
|
|
3917
|
-
// we want to proportionally reduce the target bitrate
|
|
3918
|
-
const { width: targetWidth, height: targetHeight, bitrate: targetBitrate, } = targetResolution;
|
|
3919
|
-
const { preferredBitrate } = publishOptions || {};
|
|
3920
|
-
const frameHeight = currentWidth > currentHeight ? currentHeight : currentWidth;
|
|
3921
|
-
const bitrate = preferredBitrate ||
|
|
3922
|
-
(codecInUse ? getOptimalBitrate(codecInUse, frameHeight) : targetBitrate);
|
|
3923
|
-
if (currentWidth < targetWidth || currentHeight < targetHeight) {
|
|
3924
|
-
const currentPixels = currentWidth * currentHeight;
|
|
3925
|
-
const targetPixels = targetWidth * targetHeight;
|
|
3926
|
-
const reductionFactor = currentPixels / targetPixels;
|
|
3927
|
-
return Math.round(bitrate * reductionFactor);
|
|
3928
|
-
}
|
|
3929
|
-
return bitrate;
|
|
3930
|
-
};
|
|
3931
|
-
/**
|
|
3932
|
-
* Browsers have different simulcast constraints for different video resolutions.
|
|
3933
|
-
*
|
|
3934
|
-
* This function modifies the provided list of video layers according to the
|
|
3935
|
-
* current implementation of simulcast constraints in the Chromium based browsers.
|
|
3936
|
-
*
|
|
3937
|
-
* https://chromium.googlesource.com/external/webrtc/+/refs/heads/main/media/engine/simulcast.cc#90
|
|
3938
|
-
*/
|
|
3939
|
-
const withSimulcastConstraints = (settings, optimalVideoLayers) => {
|
|
3940
|
-
let layers;
|
|
3941
|
-
const size = Math.max(settings.width || 0, settings.height || 0);
|
|
3942
|
-
if (size <= 320) {
|
|
3943
|
-
// provide only one layer 320x240 (q), the one with the highest quality
|
|
3944
|
-
layers = optimalVideoLayers.filter((layer) => layer.rid === 'f');
|
|
3945
|
-
}
|
|
3946
|
-
else if (size <= 640) {
|
|
3947
|
-
// provide two layers, 160x120 (q) and 640x480 (h)
|
|
3948
|
-
layers = optimalVideoLayers.filter((layer) => layer.rid !== 'h');
|
|
3949
|
-
}
|
|
3950
|
-
else {
|
|
3951
|
-
// provide three layers for sizes > 640x480
|
|
3952
|
-
layers = optimalVideoLayers;
|
|
3953
|
-
}
|
|
3954
|
-
const ridMapping = ['q', 'h', 'f'];
|
|
3955
|
-
return layers.map((layer, index) => ({
|
|
3956
|
-
...layer,
|
|
3957
|
-
rid: ridMapping[index], // reassign rid
|
|
3958
|
-
}));
|
|
3959
|
-
};
|
|
3960
|
-
const findOptimalScreenSharingLayers = (videoTrack, publishOptions, defaultMaxBitrate = 3000000) => {
|
|
3961
|
-
const { screenShareSettings: preferences } = publishOptions || {};
|
|
3962
|
-
const settings = videoTrack.getSettings();
|
|
3963
|
-
return [
|
|
3964
|
-
{
|
|
3965
|
-
active: true,
|
|
3966
|
-
rid: 'q', // single track, start from 'q'
|
|
3967
|
-
width: settings.width || 0,
|
|
3968
|
-
height: settings.height || 0,
|
|
3969
|
-
scaleResolutionDownBy: 1,
|
|
3970
|
-
maxBitrate: preferences?.maxBitrate ?? defaultMaxBitrate,
|
|
3971
|
-
maxFramerate: preferences?.maxFramerate ?? 30,
|
|
3972
|
-
},
|
|
3973
|
-
];
|
|
3974
|
-
};
|
|
3975
|
-
|
|
3976
|
-
const ensureExhausted = (x, message) => {
|
|
3977
|
-
getLogger(['helpers'])('warn', message, x);
|
|
3978
|
-
};
|
|
3979
|
-
|
|
3980
|
-
const trackTypeToParticipantStreamKey = (trackType) => {
|
|
3981
|
-
switch (trackType) {
|
|
3982
|
-
case TrackType.SCREEN_SHARE:
|
|
3983
|
-
return 'screenShareStream';
|
|
3984
|
-
case TrackType.SCREEN_SHARE_AUDIO:
|
|
3985
|
-
return 'screenShareAudioStream';
|
|
3986
|
-
case TrackType.VIDEO:
|
|
3987
|
-
return 'videoStream';
|
|
3988
|
-
case TrackType.AUDIO:
|
|
3989
|
-
return 'audioStream';
|
|
3990
|
-
case TrackType.UNSPECIFIED:
|
|
3991
|
-
throw new Error('Track type is unspecified');
|
|
3992
|
-
default:
|
|
3993
|
-
ensureExhausted(trackType, 'Unknown track type');
|
|
3994
|
-
}
|
|
3995
|
-
};
|
|
3996
|
-
const muteTypeToTrackType = (muteType) => {
|
|
3997
|
-
switch (muteType) {
|
|
3998
|
-
case 'audio':
|
|
3999
|
-
return TrackType.AUDIO;
|
|
4000
|
-
case 'video':
|
|
4001
|
-
return TrackType.VIDEO;
|
|
4002
|
-
case 'screenshare':
|
|
4003
|
-
return TrackType.SCREEN_SHARE;
|
|
4004
|
-
case 'screenshare_audio':
|
|
4005
|
-
return TrackType.SCREEN_SHARE_AUDIO;
|
|
4006
|
-
default:
|
|
4007
|
-
ensureExhausted(muteType, 'Unknown mute type');
|
|
4008
|
-
}
|
|
4009
|
-
};
|
|
4010
|
-
const toTrackType = (trackType) => {
|
|
4011
|
-
switch (trackType) {
|
|
4012
|
-
case 'TRACK_TYPE_AUDIO':
|
|
4013
|
-
return TrackType.AUDIO;
|
|
4014
|
-
case 'TRACK_TYPE_VIDEO':
|
|
4015
|
-
return TrackType.VIDEO;
|
|
4016
|
-
case 'TRACK_TYPE_SCREEN_SHARE':
|
|
4017
|
-
return TrackType.SCREEN_SHARE;
|
|
4018
|
-
case 'TRACK_TYPE_SCREEN_SHARE_AUDIO':
|
|
4019
|
-
return TrackType.SCREEN_SHARE_AUDIO;
|
|
4020
|
-
default:
|
|
4021
|
-
return undefined;
|
|
3565
|
+
catch (e) {
|
|
3566
|
+
const logger = getLogger(['sfu-client']);
|
|
3567
|
+
logger('error', `Failed to parse ICE Trickle`, e, iceTrickle);
|
|
3568
|
+
return undefined;
|
|
4022
3569
|
}
|
|
4023
3570
|
};
|
|
4024
3571
|
|
|
@@ -5586,198 +5133,446 @@ class CallState {
|
|
|
5586
5133
|
}
|
|
5587
5134
|
}
|
|
5588
5135
|
|
|
5589
|
-
const getRtpMap = (line) => {
|
|
5590
|
-
// Example: a=rtpmap:110 opus/48000/2
|
|
5591
|
-
const rtpRegex = /^a=rtpmap:(\d*) ([\w\-.]*)(?:\s*\/(\d*)(?:\s*\/(\S*))?)?/;
|
|
5592
|
-
// The first captured group is the payload type number, the second captured group is the encoding name, the third captured group is the clock rate, and the fourth captured group is any additional parameters.
|
|
5593
|
-
const rtpMatch = rtpRegex.exec(line);
|
|
5594
|
-
if (rtpMatch) {
|
|
5595
|
-
return {
|
|
5596
|
-
original: rtpMatch[0],
|
|
5597
|
-
payload: rtpMatch[1],
|
|
5598
|
-
codec: rtpMatch[2],
|
|
5599
|
-
};
|
|
5600
|
-
}
|
|
5601
|
-
};
|
|
5602
|
-
const getFmtp = (line) => {
|
|
5603
|
-
// Example: a=fmtp:111 minptime=10; useinbandfec=1
|
|
5604
|
-
const fmtpRegex = /^a=fmtp:(\d*) (.*)/;
|
|
5605
|
-
const fmtpMatch = fmtpRegex.exec(line);
|
|
5606
|
-
// The first captured group is the payload type number, the second captured group is any additional parameters.
|
|
5607
|
-
if (fmtpMatch) {
|
|
5608
|
-
return {
|
|
5609
|
-
original: fmtpMatch[0],
|
|
5610
|
-
payload: fmtpMatch[1],
|
|
5611
|
-
config: fmtpMatch[2],
|
|
5612
|
-
};
|
|
5613
|
-
}
|
|
5614
|
-
};
|
|
5615
5136
|
/**
|
|
5616
|
-
*
|
|
5617
|
-
*
|
|
5618
|
-
* Example: m=video 9 UDP/TLS/RTP/SAVPF 100 101 96 97 35 36 102 125 127
|
|
5137
|
+
* A base class for the `Publisher` and `Subscriber` classes.
|
|
5138
|
+
* @internal
|
|
5619
5139
|
*/
|
|
5620
|
-
|
|
5621
|
-
|
|
5622
|
-
|
|
5623
|
-
|
|
5624
|
-
|
|
5625
|
-
|
|
5626
|
-
|
|
5627
|
-
|
|
5140
|
+
class BasePeerConnection {
|
|
5141
|
+
/**
|
|
5142
|
+
* Constructs a new `BasePeerConnection` instance.
|
|
5143
|
+
*/
|
|
5144
|
+
constructor(peerType, { sfuClient, connectionConfig, state, dispatcher, onUnrecoverableError, logTag, }) {
|
|
5145
|
+
this.isIceRestarting = false;
|
|
5146
|
+
this.subscriptions = [];
|
|
5147
|
+
/**
|
|
5148
|
+
* Disposes the `RTCPeerConnection` instance.
|
|
5149
|
+
*/
|
|
5150
|
+
this.dispose = () => {
|
|
5151
|
+
this.detachEventHandlers();
|
|
5152
|
+
this.pc.close();
|
|
5628
5153
|
};
|
|
5629
|
-
|
|
5630
|
-
|
|
5631
|
-
|
|
5632
|
-
|
|
5633
|
-
|
|
5634
|
-
|
|
5635
|
-
|
|
5636
|
-
|
|
5637
|
-
|
|
5638
|
-
|
|
5639
|
-
|
|
5640
|
-
|
|
5641
|
-
|
|
5642
|
-
|
|
5643
|
-
|
|
5644
|
-
|
|
5645
|
-
|
|
5646
|
-
|
|
5647
|
-
|
|
5648
|
-
|
|
5649
|
-
|
|
5154
|
+
/**
|
|
5155
|
+
* Handles events synchronously.
|
|
5156
|
+
* Consecutive events are queued and executed one after the other.
|
|
5157
|
+
*/
|
|
5158
|
+
this.on = (event, fn) => {
|
|
5159
|
+
this.subscriptions.push(this.dispatcher.on(event, (e) => {
|
|
5160
|
+
withoutConcurrency(`pc.${event}`, async () => fn(e)).catch((err) => {
|
|
5161
|
+
this.logger('warn', `Error handling ${event}`, err);
|
|
5162
|
+
});
|
|
5163
|
+
}));
|
|
5164
|
+
};
|
|
5165
|
+
/**
|
|
5166
|
+
* Appends the trickled ICE candidates to the `RTCPeerConnection`.
|
|
5167
|
+
*/
|
|
5168
|
+
this.addTrickledIceCandidates = () => {
|
|
5169
|
+
const { iceTrickleBuffer } = this.sfuClient;
|
|
5170
|
+
const observable = this.peerType === PeerType.SUBSCRIBER
|
|
5171
|
+
? iceTrickleBuffer.subscriberCandidates
|
|
5172
|
+
: iceTrickleBuffer.publisherCandidates;
|
|
5173
|
+
this.unsubscribeIceTrickle?.();
|
|
5174
|
+
this.unsubscribeIceTrickle = createSafeAsyncSubscription(observable, async (candidate) => {
|
|
5175
|
+
return this.pc.addIceCandidate(candidate).catch((e) => {
|
|
5176
|
+
this.logger('warn', `ICE candidate error`, e, candidate);
|
|
5177
|
+
});
|
|
5178
|
+
});
|
|
5179
|
+
};
|
|
5180
|
+
/**
|
|
5181
|
+
* Sets the SFU client to use.
|
|
5182
|
+
*
|
|
5183
|
+
* @param sfuClient the SFU client to use.
|
|
5184
|
+
*/
|
|
5185
|
+
this.setSfuClient = (sfuClient) => {
|
|
5186
|
+
this.sfuClient = sfuClient;
|
|
5187
|
+
};
|
|
5188
|
+
/**
|
|
5189
|
+
* Returns the result of the `RTCPeerConnection.getStats()` method
|
|
5190
|
+
* @param selector an optional `MediaStreamTrack` to get the stats for.
|
|
5191
|
+
*/
|
|
5192
|
+
this.getStats = (selector) => {
|
|
5193
|
+
return this.pc.getStats(selector);
|
|
5194
|
+
};
|
|
5195
|
+
/**
|
|
5196
|
+
* Handles the ICECandidate event and
|
|
5197
|
+
* Initiates an ICE Trickle process with the SFU.
|
|
5198
|
+
*/
|
|
5199
|
+
this.onIceCandidate = (e) => {
|
|
5200
|
+
const { candidate } = e;
|
|
5201
|
+
if (!candidate) {
|
|
5202
|
+
this.logger('debug', 'null ice candidate');
|
|
5203
|
+
return;
|
|
5650
5204
|
}
|
|
5651
|
-
|
|
5652
|
-
|
|
5653
|
-
|
|
5654
|
-
|
|
5655
|
-
|
|
5656
|
-
|
|
5205
|
+
const iceCandidate = this.toJSON(candidate);
|
|
5206
|
+
this.sfuClient
|
|
5207
|
+
.iceTrickle({ peerType: this.peerType, iceCandidate })
|
|
5208
|
+
.catch((err) => this.logger('warn', `ICETrickle failed`, err));
|
|
5209
|
+
};
|
|
5210
|
+
/**
|
|
5211
|
+
* Converts the ICE candidate to a JSON string.
|
|
5212
|
+
*/
|
|
5213
|
+
this.toJSON = (candidate) => {
|
|
5214
|
+
if (!candidate.usernameFragment) {
|
|
5215
|
+
// react-native-webrtc doesn't include usernameFragment in the candidate
|
|
5216
|
+
const segments = candidate.candidate.split(' ');
|
|
5217
|
+
const ufragIndex = segments.findIndex((s) => s === 'ufrag') + 1;
|
|
5218
|
+
const usernameFragment = segments[ufragIndex];
|
|
5219
|
+
return JSON.stringify({ ...candidate, usernameFragment });
|
|
5657
5220
|
}
|
|
5658
|
-
|
|
5659
|
-
|
|
5221
|
+
return JSON.stringify(candidate.toJSON());
|
|
5222
|
+
};
|
|
5223
|
+
/**
|
|
5224
|
+
* Handles the ICE connection state change event.
|
|
5225
|
+
*/
|
|
5226
|
+
this.onIceConnectionStateChange = () => {
|
|
5227
|
+
const state = this.pc.iceConnectionState;
|
|
5228
|
+
this.logger('debug', `ICE connection state changed`, state);
|
|
5229
|
+
if (this.state.callingState === CallingState.RECONNECTING)
|
|
5230
|
+
return;
|
|
5231
|
+
// do nothing when ICE is restarting
|
|
5232
|
+
if (this.isIceRestarting)
|
|
5233
|
+
return;
|
|
5234
|
+
if (state === 'failed' || state === 'disconnected') {
|
|
5235
|
+
this.logger('debug', `Attempting to restart ICE`);
|
|
5236
|
+
this.restartIce().catch((e) => {
|
|
5237
|
+
this.logger('error', `ICE restart failed`, e);
|
|
5238
|
+
this.onUnrecoverableError?.();
|
|
5239
|
+
});
|
|
5240
|
+
}
|
|
5241
|
+
};
|
|
5242
|
+
/**
|
|
5243
|
+
* Handles the ICE candidate error event.
|
|
5244
|
+
*/
|
|
5245
|
+
this.onIceCandidateError = (e) => {
|
|
5246
|
+
const errorMessage = e instanceof RTCPeerConnectionIceErrorEvent &&
|
|
5247
|
+
`${e.errorCode}: ${e.errorText}`;
|
|
5248
|
+
const iceState = this.pc.iceConnectionState;
|
|
5249
|
+
const logLevel = iceState === 'connected' || iceState === 'checking' ? 'debug' : 'warn';
|
|
5250
|
+
this.logger(logLevel, `ICE Candidate error`, errorMessage);
|
|
5251
|
+
};
|
|
5252
|
+
/**
|
|
5253
|
+
* Handles the ICE gathering state change event.
|
|
5254
|
+
*/
|
|
5255
|
+
this.onIceGatherChange = () => {
|
|
5256
|
+
this.logger('debug', `ICE Gathering State`, this.pc.iceGatheringState);
|
|
5257
|
+
};
|
|
5258
|
+
/**
|
|
5259
|
+
* Handles the signaling state change event.
|
|
5260
|
+
*/
|
|
5261
|
+
this.onSignalingChange = () => {
|
|
5262
|
+
this.logger('debug', `Signaling state changed`, this.pc.signalingState);
|
|
5263
|
+
};
|
|
5264
|
+
this.peerType = peerType;
|
|
5265
|
+
this.sfuClient = sfuClient;
|
|
5266
|
+
this.state = state;
|
|
5267
|
+
this.dispatcher = dispatcher;
|
|
5268
|
+
this.onUnrecoverableError = onUnrecoverableError;
|
|
5269
|
+
this.logger = getLogger([
|
|
5270
|
+
peerType === PeerType.SUBSCRIBER ? 'Subscriber' : 'Publisher',
|
|
5271
|
+
logTag,
|
|
5272
|
+
]);
|
|
5273
|
+
this.pc = new RTCPeerConnection(connectionConfig);
|
|
5274
|
+
this.pc.addEventListener('icecandidate', this.onIceCandidate);
|
|
5275
|
+
this.pc.addEventListener('icecandidateerror', this.onIceCandidateError);
|
|
5276
|
+
this.pc.addEventListener('iceconnectionstatechange', this.onIceConnectionStateChange);
|
|
5277
|
+
this.pc.addEventListener('icegatheringstatechange', this.onIceGatherChange);
|
|
5278
|
+
this.pc.addEventListener('signalingstatechange', this.onSignalingChange);
|
|
5279
|
+
}
|
|
5280
|
+
/**
|
|
5281
|
+
* Detaches the event handlers from the `RTCPeerConnection`.
|
|
5282
|
+
*/
|
|
5283
|
+
detachEventHandlers() {
|
|
5284
|
+
this.pc.removeEventListener('icecandidate', this.onIceCandidate);
|
|
5285
|
+
this.pc.removeEventListener('icecandidateerror', this.onIceCandidateError);
|
|
5286
|
+
this.pc.removeEventListener('signalingstatechange', this.onSignalingChange);
|
|
5287
|
+
this.pc.removeEventListener('iceconnectionstatechange', this.onIceConnectionStateChange);
|
|
5288
|
+
this.pc.removeEventListener('icegatheringstatechange', this.onIceGatherChange);
|
|
5289
|
+
this.unsubscribeIceTrickle?.();
|
|
5290
|
+
this.subscriptions.forEach((unsubscribe) => unsubscribe());
|
|
5291
|
+
}
|
|
5292
|
+
}
|
|
5293
|
+
|
|
5294
|
+
class TransceiverCache {
|
|
5295
|
+
constructor() {
|
|
5296
|
+
this.cache = [];
|
|
5297
|
+
this.layers = [];
|
|
5298
|
+
/**
|
|
5299
|
+
* An array maintaining the order how transceivers were added to the peer connection.
|
|
5300
|
+
* This is needed because some browsers (Firefox) don't reliably report
|
|
5301
|
+
* trackId and `mid` parameters.
|
|
5302
|
+
*/
|
|
5303
|
+
this.transceiverOrder = [];
|
|
5304
|
+
/**
|
|
5305
|
+
* Adds a transceiver to the cache.
|
|
5306
|
+
*/
|
|
5307
|
+
this.add = (publishOption, transceiver) => {
|
|
5308
|
+
this.cache.push({ publishOption, transceiver });
|
|
5309
|
+
this.transceiverOrder.push(transceiver);
|
|
5310
|
+
};
|
|
5311
|
+
/**
|
|
5312
|
+
* Gets the transceiver for the given publish option.
|
|
5313
|
+
*/
|
|
5314
|
+
this.get = (publishOption) => {
|
|
5315
|
+
return this.findTransceiver(publishOption)?.transceiver;
|
|
5316
|
+
};
|
|
5317
|
+
/**
|
|
5318
|
+
* Gets the last transceiver for the given track type and publish option id.
|
|
5319
|
+
*/
|
|
5320
|
+
this.getWith = (trackType, id) => {
|
|
5321
|
+
return this.findTransceiver({ trackType, id })?.transceiver;
|
|
5322
|
+
};
|
|
5323
|
+
/**
|
|
5324
|
+
* Checks if the cache has the given publish option.
|
|
5325
|
+
*/
|
|
5326
|
+
this.has = (publishOption) => {
|
|
5327
|
+
return !!this.get(publishOption);
|
|
5328
|
+
};
|
|
5329
|
+
/**
|
|
5330
|
+
* Finds the first transceiver that satisfies the given predicate.
|
|
5331
|
+
*/
|
|
5332
|
+
this.find = (predicate) => {
|
|
5333
|
+
return this.cache.find(predicate);
|
|
5334
|
+
};
|
|
5335
|
+
/**
|
|
5336
|
+
* Provides all the items in the cache.
|
|
5337
|
+
*/
|
|
5338
|
+
this.items = () => {
|
|
5339
|
+
return this.cache;
|
|
5340
|
+
};
|
|
5341
|
+
/**
|
|
5342
|
+
* Init index of the transceiver in the cache.
|
|
5343
|
+
*/
|
|
5344
|
+
this.indexOf = (transceiver) => {
|
|
5345
|
+
return this.transceiverOrder.indexOf(transceiver);
|
|
5346
|
+
};
|
|
5347
|
+
/**
|
|
5348
|
+
* Gets cached video layers for the given track.
|
|
5349
|
+
*/
|
|
5350
|
+
this.getLayers = (publishOption) => {
|
|
5351
|
+
const entry = this.layers.find((item) => item.publishOption.id === publishOption.id &&
|
|
5352
|
+
item.publishOption.trackType === publishOption.trackType);
|
|
5353
|
+
return entry?.layers;
|
|
5354
|
+
};
|
|
5355
|
+
/**
|
|
5356
|
+
* Sets the video layers for the given track.
|
|
5357
|
+
*/
|
|
5358
|
+
this.setLayers = (publishOption, layers = []) => {
|
|
5359
|
+
const entry = this.findLayer(publishOption);
|
|
5360
|
+
if (entry) {
|
|
5361
|
+
entry.layers = layers;
|
|
5362
|
+
}
|
|
5363
|
+
else {
|
|
5364
|
+
this.layers.push({ publishOption, layers });
|
|
5660
5365
|
}
|
|
5661
|
-
}
|
|
5662
|
-
});
|
|
5663
|
-
if (media) {
|
|
5664
|
-
return {
|
|
5665
|
-
media,
|
|
5666
|
-
rtpMap,
|
|
5667
|
-
fmtp,
|
|
5668
5366
|
};
|
|
5367
|
+
this.findTransceiver = (publishOption) => {
|
|
5368
|
+
return this.cache.find((item) => item.publishOption.id === publishOption.id &&
|
|
5369
|
+
item.publishOption.trackType === publishOption.trackType);
|
|
5370
|
+
};
|
|
5371
|
+
this.findLayer = (publishOption) => {
|
|
5372
|
+
return this.layers.find((item) => item.publishOption.id === publishOption.id &&
|
|
5373
|
+
item.publishOption.trackType === publishOption.trackType);
|
|
5374
|
+
};
|
|
5375
|
+
}
|
|
5376
|
+
}
|
|
5377
|
+
|
|
5378
|
+
const ensureExhausted = (x, message) => {
|
|
5379
|
+
getLogger(['helpers'])('warn', message, x);
|
|
5380
|
+
};
|
|
5381
|
+
|
|
5382
|
+
const trackTypeToParticipantStreamKey = (trackType) => {
|
|
5383
|
+
switch (trackType) {
|
|
5384
|
+
case TrackType.SCREEN_SHARE:
|
|
5385
|
+
return 'screenShareStream';
|
|
5386
|
+
case TrackType.SCREEN_SHARE_AUDIO:
|
|
5387
|
+
return 'screenShareAudioStream';
|
|
5388
|
+
case TrackType.VIDEO:
|
|
5389
|
+
return 'videoStream';
|
|
5390
|
+
case TrackType.AUDIO:
|
|
5391
|
+
return 'audioStream';
|
|
5392
|
+
case TrackType.UNSPECIFIED:
|
|
5393
|
+
throw new Error('Track type is unspecified');
|
|
5394
|
+
default:
|
|
5395
|
+
ensureExhausted(trackType, 'Unknown track type');
|
|
5396
|
+
}
|
|
5397
|
+
};
|
|
5398
|
+
const muteTypeToTrackType = (muteType) => {
|
|
5399
|
+
switch (muteType) {
|
|
5400
|
+
case 'audio':
|
|
5401
|
+
return TrackType.AUDIO;
|
|
5402
|
+
case 'video':
|
|
5403
|
+
return TrackType.VIDEO;
|
|
5404
|
+
case 'screenshare':
|
|
5405
|
+
return TrackType.SCREEN_SHARE;
|
|
5406
|
+
case 'screenshare_audio':
|
|
5407
|
+
return TrackType.SCREEN_SHARE_AUDIO;
|
|
5408
|
+
default:
|
|
5409
|
+
ensureExhausted(muteType, 'Unknown mute type');
|
|
5410
|
+
}
|
|
5411
|
+
};
|
|
5412
|
+
const toTrackType = (trackType) => {
|
|
5413
|
+
switch (trackType) {
|
|
5414
|
+
case 'TRACK_TYPE_AUDIO':
|
|
5415
|
+
return TrackType.AUDIO;
|
|
5416
|
+
case 'TRACK_TYPE_VIDEO':
|
|
5417
|
+
return TrackType.VIDEO;
|
|
5418
|
+
case 'TRACK_TYPE_SCREEN_SHARE':
|
|
5419
|
+
return TrackType.SCREEN_SHARE;
|
|
5420
|
+
case 'TRACK_TYPE_SCREEN_SHARE_AUDIO':
|
|
5421
|
+
return TrackType.SCREEN_SHARE_AUDIO;
|
|
5422
|
+
default:
|
|
5423
|
+
return undefined;
|
|
5669
5424
|
}
|
|
5670
5425
|
};
|
|
5426
|
+
const isAudioTrackType = (trackType) => trackType === TrackType.AUDIO || trackType === TrackType.SCREEN_SHARE_AUDIO;
|
|
5427
|
+
|
|
5428
|
+
const defaultBitratePerRid = {
|
|
5429
|
+
q: 300000,
|
|
5430
|
+
h: 750000,
|
|
5431
|
+
f: 1250000,
|
|
5432
|
+
};
|
|
5433
|
+
/**
|
|
5434
|
+
* In SVC, we need to send only one video encoding (layer).
|
|
5435
|
+
* this layer will have the additional spatial and temporal layers
|
|
5436
|
+
* defined via the scalabilityMode property.
|
|
5437
|
+
*
|
|
5438
|
+
* @param layers the layers to process.
|
|
5439
|
+
*/
|
|
5440
|
+
const toSvcEncodings = (layers) => {
|
|
5441
|
+
if (!layers)
|
|
5442
|
+
return;
|
|
5443
|
+
// we take the highest quality layer, and we assign it to `q` encoder.
|
|
5444
|
+
const withRid = (rid) => (l) => l.rid === rid;
|
|
5445
|
+
const highestLayer = layers.find(withRid('f')) ||
|
|
5446
|
+
layers.find(withRid('h')) ||
|
|
5447
|
+
layers.find(withRid('q'));
|
|
5448
|
+
return [{ ...highestLayer, rid: 'q' }];
|
|
5449
|
+
};
|
|
5450
|
+
/**
|
|
5451
|
+
* Converts the rid to a video quality.
|
|
5452
|
+
*/
|
|
5453
|
+
const ridToVideoQuality = (rid) => {
|
|
5454
|
+
return rid === 'q'
|
|
5455
|
+
? VideoQuality.LOW_UNSPECIFIED
|
|
5456
|
+
: rid === 'h'
|
|
5457
|
+
? VideoQuality.MID
|
|
5458
|
+
: VideoQuality.HIGH; // default to HIGH
|
|
5459
|
+
};
|
|
5671
5460
|
/**
|
|
5672
|
-
*
|
|
5461
|
+
* Converts the given video layers to SFU video layers.
|
|
5673
5462
|
*/
|
|
5674
|
-
const
|
|
5675
|
-
|
|
5676
|
-
|
|
5677
|
-
|
|
5678
|
-
|
|
5679
|
-
|
|
5463
|
+
const toVideoLayers = (layers = []) => {
|
|
5464
|
+
return layers.map((layer) => ({
|
|
5465
|
+
rid: layer.rid || '',
|
|
5466
|
+
bitrate: layer.maxBitrate || 0,
|
|
5467
|
+
fps: layer.maxFramerate || 0,
|
|
5468
|
+
quality: ridToVideoQuality(layer.rid || ''),
|
|
5469
|
+
videoDimension: { width: layer.width, height: layer.height },
|
|
5470
|
+
}));
|
|
5471
|
+
};
|
|
5472
|
+
/**
|
|
5473
|
+
* Converts the spatial and temporal layers to a scalability mode.
|
|
5474
|
+
*/
|
|
5475
|
+
const toScalabilityMode = (spatialLayers, temporalLayers) => `L${spatialLayers}T${temporalLayers}${spatialLayers > 1 ? '_KEY' : ''}`;
|
|
5476
|
+
/**
|
|
5477
|
+
* Determines the most optimal video layers for the given track.
|
|
5478
|
+
*
|
|
5479
|
+
* @param videoTrack the video track to find optimal layers for.
|
|
5480
|
+
* @param publishOption the publish options for the track.
|
|
5481
|
+
*/
|
|
5482
|
+
const computeVideoLayers = (videoTrack, publishOption) => {
|
|
5483
|
+
if (isAudioTrackType(publishOption.trackType))
|
|
5484
|
+
return;
|
|
5485
|
+
const optimalVideoLayers = [];
|
|
5486
|
+
const settings = videoTrack.getSettings();
|
|
5487
|
+
const { width = 0, height = 0 } = settings;
|
|
5488
|
+
const { bitrate, codec, fps, maxSpatialLayers = 3, maxTemporalLayers = 3, videoDimension = { width: 1280, height: 720 }, } = publishOption;
|
|
5489
|
+
const maxBitrate = getComputedMaxBitrate(videoDimension, width, height, bitrate);
|
|
5490
|
+
let downscaleFactor = 1;
|
|
5491
|
+
let bitrateFactor = 1;
|
|
5492
|
+
const svcCodec = isSvcCodec(codec?.name);
|
|
5493
|
+
for (const rid of ['f', 'h', 'q'].slice(0, maxSpatialLayers)) {
|
|
5494
|
+
const layer = {
|
|
5495
|
+
active: true,
|
|
5496
|
+
rid,
|
|
5497
|
+
width: Math.round(width / downscaleFactor),
|
|
5498
|
+
height: Math.round(height / downscaleFactor),
|
|
5499
|
+
maxBitrate: maxBitrate / bitrateFactor || defaultBitratePerRid[rid],
|
|
5500
|
+
maxFramerate: fps,
|
|
5501
|
+
};
|
|
5502
|
+
if (svcCodec) {
|
|
5503
|
+
// for SVC codecs, we need to set the scalability mode, and the
|
|
5504
|
+
// codec will handle the rest (layers, temporal layers, etc.)
|
|
5505
|
+
layer.scalabilityMode = toScalabilityMode(maxSpatialLayers, maxTemporalLayers);
|
|
5506
|
+
}
|
|
5507
|
+
else {
|
|
5508
|
+
// for non-SVC codecs, we need to downscale proportionally (simulcast)
|
|
5509
|
+
layer.scaleResolutionDownBy = downscaleFactor;
|
|
5510
|
+
}
|
|
5511
|
+
downscaleFactor *= 2;
|
|
5512
|
+
bitrateFactor *= 2;
|
|
5513
|
+
// Reversing the order [f, h, q] to [q, h, f] as Chrome uses encoding index
|
|
5514
|
+
// when deciding which layer to disable when CPU or bandwidth is constrained.
|
|
5515
|
+
// Encodings should be ordered in increasing spatial resolution order.
|
|
5516
|
+
optimalVideoLayers.unshift(layer);
|
|
5680
5517
|
}
|
|
5518
|
+
// for simplicity, we start with all layers enabled, then this function
|
|
5519
|
+
// will clear/reassign the layers that are not needed
|
|
5520
|
+
return withSimulcastConstraints(settings, optimalVideoLayers);
|
|
5681
5521
|
};
|
|
5682
5522
|
/**
|
|
5683
|
-
*
|
|
5523
|
+
* Computes the maximum bitrate for a given resolution.
|
|
5524
|
+
* If the current resolution is lower than the target resolution,
|
|
5525
|
+
* we want to proportionally reduce the target bitrate.
|
|
5526
|
+
* If the current resolution is higher than the target resolution,
|
|
5527
|
+
* we want to use the target bitrate.
|
|
5528
|
+
*
|
|
5529
|
+
* @param targetResolution the target resolution.
|
|
5530
|
+
* @param currentWidth the current width of the track.
|
|
5531
|
+
* @param currentHeight the current height of the track.
|
|
5532
|
+
* @param bitrate the target bitrate.
|
|
5684
5533
|
*/
|
|
5685
|
-
const
|
|
5686
|
-
|
|
5687
|
-
|
|
5688
|
-
|
|
5689
|
-
|
|
5690
|
-
|
|
5691
|
-
|
|
5692
|
-
|
|
5693
|
-
|
|
5694
|
-
|
|
5534
|
+
const getComputedMaxBitrate = (targetResolution, currentWidth, currentHeight, bitrate) => {
|
|
5535
|
+
// if the current resolution is lower than the target resolution,
|
|
5536
|
+
// we want to proportionally reduce the target bitrate
|
|
5537
|
+
const { width: targetWidth, height: targetHeight } = targetResolution;
|
|
5538
|
+
if (currentWidth < targetWidth || currentHeight < targetHeight) {
|
|
5539
|
+
const currentPixels = currentWidth * currentHeight;
|
|
5540
|
+
const targetPixels = targetWidth * targetHeight;
|
|
5541
|
+
const reductionFactor = currentPixels / targetPixels;
|
|
5542
|
+
return Math.round(bitrate * reductionFactor);
|
|
5543
|
+
}
|
|
5544
|
+
return bitrate;
|
|
5695
5545
|
};
|
|
5696
5546
|
/**
|
|
5697
|
-
*
|
|
5698
|
-
*/
|
|
5699
|
-
const preserveCodec = (sdp, mid, codec) => {
|
|
5700
|
-
const [kind, codecName] = codec.mimeType.toLowerCase().split('/');
|
|
5701
|
-
const toSet = (fmtpLine) => new Set(fmtpLine.split(';').map((f) => f.trim().toLowerCase()));
|
|
5702
|
-
const equal = (a, b) => {
|
|
5703
|
-
if (a.size !== b.size)
|
|
5704
|
-
return false;
|
|
5705
|
-
for (const item of a)
|
|
5706
|
-
if (!b.has(item))
|
|
5707
|
-
return false;
|
|
5708
|
-
return true;
|
|
5709
|
-
};
|
|
5710
|
-
const codecFmtp = toSet(codec.sdpFmtpLine || '');
|
|
5711
|
-
const parsedSdp = SDP.parse(sdp);
|
|
5712
|
-
for (const media of parsedSdp.media) {
|
|
5713
|
-
if (media.type !== kind || String(media.mid) !== mid)
|
|
5714
|
-
continue;
|
|
5715
|
-
// find the payload id of the desired codec
|
|
5716
|
-
const payloads = new Set();
|
|
5717
|
-
for (const rtp of media.rtp) {
|
|
5718
|
-
if (rtp.codec.toLowerCase() !== codecName)
|
|
5719
|
-
continue;
|
|
5720
|
-
const match =
|
|
5721
|
-
// vp8 doesn't have any fmtp, we preserve it without any additional checks
|
|
5722
|
-
codecName === 'vp8'
|
|
5723
|
-
? true
|
|
5724
|
-
: media.fmtp.some((f) => f.payload === rtp.payload && equal(toSet(f.config), codecFmtp));
|
|
5725
|
-
if (match) {
|
|
5726
|
-
payloads.add(rtp.payload);
|
|
5727
|
-
}
|
|
5728
|
-
}
|
|
5729
|
-
// find the corresponding rtx codec by matching apt=<preserved-codec-payload>
|
|
5730
|
-
for (const fmtp of media.fmtp) {
|
|
5731
|
-
const match = fmtp.config.match(/(apt)=(\d+)/);
|
|
5732
|
-
if (!match)
|
|
5733
|
-
continue;
|
|
5734
|
-
const [, , preservedCodecPayload] = match;
|
|
5735
|
-
if (payloads.has(Number(preservedCodecPayload))) {
|
|
5736
|
-
payloads.add(fmtp.payload);
|
|
5737
|
-
}
|
|
5738
|
-
}
|
|
5739
|
-
media.rtp = media.rtp.filter((r) => payloads.has(r.payload));
|
|
5740
|
-
media.fmtp = media.fmtp.filter((f) => payloads.has(f.payload));
|
|
5741
|
-
media.rtcpFb = media.rtcpFb?.filter((f) => payloads.has(f.payload));
|
|
5742
|
-
media.payloads = Array.from(payloads).join(' ');
|
|
5743
|
-
}
|
|
5744
|
-
return SDP.write(parsedSdp);
|
|
5745
|
-
};
|
|
5746
|
-
/**
|
|
5747
|
-
* Enables high-quality audio through SDP munging for the given trackMid.
|
|
5547
|
+
* Browsers have different simulcast constraints for different video resolutions.
|
|
5748
5548
|
*
|
|
5749
|
-
*
|
|
5750
|
-
*
|
|
5751
|
-
*
|
|
5752
|
-
|
|
5753
|
-
|
|
5754
|
-
|
|
5755
|
-
|
|
5756
|
-
const
|
|
5757
|
-
if (
|
|
5758
|
-
|
|
5759
|
-
|
|
5760
|
-
if (!opusRtp)
|
|
5761
|
-
return sdp;
|
|
5762
|
-
const opusFmtp = audioMedia.fmtp.find((f) => f.payload === opusRtp.payload);
|
|
5763
|
-
if (!opusFmtp)
|
|
5764
|
-
return sdp;
|
|
5765
|
-
// enable stereo, if not already enabled
|
|
5766
|
-
if (opusFmtp.config.match(/stereo=(\d)/)) {
|
|
5767
|
-
opusFmtp.config = opusFmtp.config.replace(/stereo=(\d)/, 'stereo=1');
|
|
5768
|
-
}
|
|
5769
|
-
else {
|
|
5770
|
-
opusFmtp.config = `${opusFmtp.config};stereo=1`;
|
|
5549
|
+
* This function modifies the provided list of video layers according to the
|
|
5550
|
+
* current implementation of simulcast constraints in the Chromium based browsers.
|
|
5551
|
+
*
|
|
5552
|
+
* https://chromium.googlesource.com/external/webrtc/+/refs/heads/main/media/engine/simulcast.cc#90
|
|
5553
|
+
*/
|
|
5554
|
+
const withSimulcastConstraints = (settings, optimalVideoLayers) => {
|
|
5555
|
+
let layers;
|
|
5556
|
+
const size = Math.max(settings.width || 0, settings.height || 0);
|
|
5557
|
+
if (size <= 320) {
|
|
5558
|
+
// provide only one layer 320x240 (q), the one with the highest quality
|
|
5559
|
+
layers = optimalVideoLayers.filter((layer) => layer.rid === 'f');
|
|
5771
5560
|
}
|
|
5772
|
-
|
|
5773
|
-
|
|
5774
|
-
|
|
5561
|
+
else if (size <= 640) {
|
|
5562
|
+
// provide two layers, 160x120 (q) and 640x480 (h)
|
|
5563
|
+
layers = optimalVideoLayers.filter((layer) => layer.rid !== 'h');
|
|
5775
5564
|
}
|
|
5776
5565
|
else {
|
|
5777
|
-
|
|
5566
|
+
// provide three layers for sizes > 640x480
|
|
5567
|
+
layers = optimalVideoLayers;
|
|
5778
5568
|
}
|
|
5779
|
-
|
|
5569
|
+
const ridMapping = ['q', 'h', 'f'];
|
|
5570
|
+
return layers.map((layer, index) => ({
|
|
5571
|
+
...layer,
|
|
5572
|
+
rid: ridMapping[index], // reassign rid
|
|
5573
|
+
}));
|
|
5780
5574
|
};
|
|
5575
|
+
|
|
5781
5576
|
/**
|
|
5782
5577
|
* Extracts the mid from the transceiver or the SDP.
|
|
5783
5578
|
*
|
|
@@ -5789,9 +5584,9 @@ const extractMid = (transceiver, transceiverInitIndex, sdp) => {
|
|
|
5789
5584
|
if (transceiver.mid)
|
|
5790
5585
|
return transceiver.mid;
|
|
5791
5586
|
if (!sdp)
|
|
5792
|
-
return
|
|
5587
|
+
return String(transceiverInitIndex);
|
|
5793
5588
|
const track = transceiver.sender.track;
|
|
5794
|
-
const parsedSdp =
|
|
5589
|
+
const parsedSdp = parse(sdp);
|
|
5795
5590
|
const media = parsedSdp.media.find((m) => {
|
|
5796
5591
|
return (m.type === track.kind &&
|
|
5797
5592
|
// if `msid` is not present, we assume that the track is the first one
|
|
@@ -5799,7 +5594,7 @@ const extractMid = (transceiver, transceiverInitIndex, sdp) => {
|
|
|
5799
5594
|
});
|
|
5800
5595
|
if (typeof media?.mid !== 'undefined')
|
|
5801
5596
|
return String(media.mid);
|
|
5802
|
-
if (transceiverInitIndex
|
|
5597
|
+
if (transceiverInitIndex < 0)
|
|
5803
5598
|
return '';
|
|
5804
5599
|
return String(transceiverInitIndex);
|
|
5805
5600
|
};
|
|
@@ -5809,164 +5604,87 @@ const extractMid = (transceiver, transceiverInitIndex, sdp) => {
|
|
|
5809
5604
|
*
|
|
5810
5605
|
* @internal
|
|
5811
5606
|
*/
|
|
5812
|
-
class Publisher {
|
|
5607
|
+
class Publisher extends BasePeerConnection {
|
|
5813
5608
|
/**
|
|
5814
5609
|
* Constructs a new `Publisher` instance.
|
|
5815
5610
|
*/
|
|
5816
|
-
constructor({
|
|
5817
|
-
|
|
5818
|
-
this.
|
|
5819
|
-
this.publishOptsForTrack = new Map();
|
|
5820
|
-
/**
|
|
5821
|
-
* An array maintaining the order how transceivers were added to the peer connection.
|
|
5822
|
-
* This is needed because some browsers (Firefox) don't reliably report
|
|
5823
|
-
* trackId and `mid` parameters.
|
|
5824
|
-
*
|
|
5825
|
-
* @internal
|
|
5826
|
-
*/
|
|
5827
|
-
this.transceiverInitOrder = [];
|
|
5828
|
-
this.isIceRestarting = false;
|
|
5829
|
-
this.createPeerConnection = (connectionConfig) => {
|
|
5830
|
-
const pc = new RTCPeerConnection(connectionConfig);
|
|
5831
|
-
pc.addEventListener('icecandidate', this.onIceCandidate);
|
|
5832
|
-
pc.addEventListener('negotiationneeded', this.onNegotiationNeeded);
|
|
5833
|
-
pc.addEventListener('icecandidateerror', this.onIceCandidateError);
|
|
5834
|
-
pc.addEventListener('iceconnectionstatechange', this.onIceConnectionStateChange);
|
|
5835
|
-
pc.addEventListener('icegatheringstatechange', this.onIceGatheringStateChange);
|
|
5836
|
-
pc.addEventListener('signalingstatechange', this.onSignalingStateChange);
|
|
5837
|
-
return pc;
|
|
5838
|
-
};
|
|
5839
|
-
/**
|
|
5840
|
-
* Closes the publisher PeerConnection and cleans up the resources.
|
|
5841
|
-
*/
|
|
5842
|
-
this.close = ({ stopTracks }) => {
|
|
5843
|
-
if (stopTracks) {
|
|
5844
|
-
this.stopPublishing();
|
|
5845
|
-
this.transceiverCache.clear();
|
|
5846
|
-
this.trackLayersCache.clear();
|
|
5847
|
-
}
|
|
5848
|
-
this.detachEventHandlers();
|
|
5849
|
-
this.pc.close();
|
|
5850
|
-
};
|
|
5851
|
-
/**
|
|
5852
|
-
* Detaches the event handlers from the `RTCPeerConnection`.
|
|
5853
|
-
* This is useful when we want to replace the `RTCPeerConnection`
|
|
5854
|
-
* instance with a new one (in case of migration).
|
|
5855
|
-
*/
|
|
5856
|
-
this.detachEventHandlers = () => {
|
|
5857
|
-
this.unsubscribeOnIceRestart();
|
|
5858
|
-
this.unsubscribeChangePublishQuality();
|
|
5859
|
-
this.pc.removeEventListener('icecandidate', this.onIceCandidate);
|
|
5860
|
-
this.pc.removeEventListener('negotiationneeded', this.onNegotiationNeeded);
|
|
5861
|
-
this.pc.removeEventListener('icecandidateerror', this.onIceCandidateError);
|
|
5862
|
-
this.pc.removeEventListener('iceconnectionstatechange', this.onIceConnectionStateChange);
|
|
5863
|
-
this.pc.removeEventListener('icegatheringstatechange', this.onIceGatheringStateChange);
|
|
5864
|
-
this.pc.removeEventListener('signalingstatechange', this.onSignalingStateChange);
|
|
5865
|
-
};
|
|
5611
|
+
constructor({ publishOptions, ...baseOptions }) {
|
|
5612
|
+
super(PeerType.PUBLISHER_UNSPECIFIED, baseOptions);
|
|
5613
|
+
this.transceiverCache = new TransceiverCache();
|
|
5866
5614
|
/**
|
|
5867
5615
|
* Starts publishing the given track of the given media stream.
|
|
5868
5616
|
*
|
|
5869
5617
|
* Consecutive calls to this method will replace the stream.
|
|
5870
5618
|
* The previous stream will be stopped.
|
|
5871
5619
|
*
|
|
5872
|
-
* @param mediaStream the media stream to publish.
|
|
5873
5620
|
* @param track the track to publish.
|
|
5874
5621
|
* @param trackType the track type to publish.
|
|
5875
|
-
* @param opts the optional publish options to use.
|
|
5876
5622
|
*/
|
|
5877
|
-
this.
|
|
5878
|
-
if (
|
|
5879
|
-
throw new Error(`
|
|
5880
|
-
}
|
|
5881
|
-
// enable the track if it is disabled
|
|
5882
|
-
if (!track.enabled)
|
|
5883
|
-
track.enabled = true;
|
|
5884
|
-
const transceiver = this.transceiverCache.get(trackType);
|
|
5885
|
-
if (!transceiver || !transceiver.sender.track) {
|
|
5886
|
-
// listen for 'ended' event on the track as it might be ended abruptly
|
|
5887
|
-
// by an external factors such as permission revokes, a disconnected device, etc.
|
|
5888
|
-
// keep in mind that `track.stop()` doesn't trigger this event.
|
|
5889
|
-
const handleTrackEnded = () => {
|
|
5890
|
-
this.logger('info', `Track ${TrackType[trackType]} has ended abruptly`);
|
|
5891
|
-
track.removeEventListener('ended', handleTrackEnded);
|
|
5892
|
-
this.notifyTrackMuteStateChanged(mediaStream, trackType, true).catch((err) => this.logger('warn', `Couldn't notify track mute state`, err));
|
|
5893
|
-
};
|
|
5894
|
-
track.addEventListener('ended', handleTrackEnded);
|
|
5895
|
-
this.addTransceiver(trackType, track, opts, mediaStream);
|
|
5623
|
+
this.publish = async (track, trackType) => {
|
|
5624
|
+
if (!this.publishOptions.some((o) => o.trackType === trackType)) {
|
|
5625
|
+
throw new Error(`No publish options found for ${TrackType[trackType]}`);
|
|
5896
5626
|
}
|
|
5897
|
-
|
|
5898
|
-
|
|
5627
|
+
for (const publishOption of this.publishOptions) {
|
|
5628
|
+
if (publishOption.trackType !== trackType)
|
|
5629
|
+
continue;
|
|
5630
|
+
// create a clone of the track as otherwise the same trackId will
|
|
5631
|
+
// appear in the SDP in multiple transceivers
|
|
5632
|
+
const trackToPublish = track.clone();
|
|
5633
|
+
const transceiver = this.transceiverCache.get(publishOption);
|
|
5634
|
+
if (!transceiver) {
|
|
5635
|
+
this.addTransceiver(trackToPublish, publishOption);
|
|
5636
|
+
}
|
|
5637
|
+
else {
|
|
5638
|
+
await transceiver.sender.replaceTrack(trackToPublish);
|
|
5639
|
+
}
|
|
5899
5640
|
}
|
|
5900
|
-
await this.notifyTrackMuteStateChanged(mediaStream, trackType, false);
|
|
5901
5641
|
};
|
|
5902
5642
|
/**
|
|
5903
|
-
* Adds a new transceiver to the peer connection.
|
|
5904
|
-
* This needs to be called when a new track kind is added to the peer connection.
|
|
5905
|
-
* In other cases, use `updateTransceiver` method.
|
|
5643
|
+
* Adds a new transceiver carrying the given track to the peer connection.
|
|
5906
5644
|
*/
|
|
5907
|
-
this.addTransceiver = (
|
|
5908
|
-
const
|
|
5909
|
-
const
|
|
5910
|
-
|
|
5645
|
+
this.addTransceiver = (track, publishOption) => {
|
|
5646
|
+
const videoEncodings = computeVideoLayers(track, publishOption);
|
|
5647
|
+
const sendEncodings = isSvcCodec(publishOption.codec?.name)
|
|
5648
|
+
? toSvcEncodings(videoEncodings)
|
|
5649
|
+
: videoEncodings;
|
|
5911
5650
|
const transceiver = this.pc.addTransceiver(track, {
|
|
5912
5651
|
direction: 'sendonly',
|
|
5913
|
-
|
|
5914
|
-
? [mediaStream]
|
|
5915
|
-
: undefined,
|
|
5916
|
-
sendEncodings: isSvcCodec(codecInUse)
|
|
5917
|
-
? toSvcEncodings(videoEncodings)
|
|
5918
|
-
: videoEncodings,
|
|
5652
|
+
sendEncodings,
|
|
5919
5653
|
});
|
|
5654
|
+
const trackType = publishOption.trackType;
|
|
5920
5655
|
this.logger('debug', `Added ${TrackType[trackType]} transceiver`);
|
|
5921
|
-
this.
|
|
5922
|
-
this.transceiverCache.set(trackType, transceiver);
|
|
5923
|
-
this.publishOptsForTrack.set(trackType, opts);
|
|
5924
|
-
// handle codec preferences
|
|
5925
|
-
if (!('setCodecPreferences' in transceiver))
|
|
5926
|
-
return;
|
|
5927
|
-
const codecPreferences = this.getCodecPreferences(trackType, trackType === TrackType.VIDEO ? codecInUse : undefined, 'receiver');
|
|
5928
|
-
if (!codecPreferences)
|
|
5929
|
-
return;
|
|
5930
|
-
try {
|
|
5931
|
-
this.logger('info', `Setting ${TrackType[trackType]} codec preferences`, codecPreferences);
|
|
5932
|
-
transceiver.setCodecPreferences(codecPreferences);
|
|
5933
|
-
}
|
|
5934
|
-
catch (err) {
|
|
5935
|
-
this.logger('warn', `Couldn't set codec preferences`, err);
|
|
5936
|
-
}
|
|
5937
|
-
};
|
|
5938
|
-
/**
|
|
5939
|
-
* Updates the given transceiver with the new track.
|
|
5940
|
-
* Stops the previous track and replaces it with the new one.
|
|
5941
|
-
*/
|
|
5942
|
-
this.updateTransceiver = async (transceiver, track) => {
|
|
5943
|
-
const previousTrack = transceiver.sender.track;
|
|
5944
|
-
// don't stop the track if we are re-publishing the same track
|
|
5945
|
-
if (previousTrack && previousTrack !== track) {
|
|
5946
|
-
previousTrack.stop();
|
|
5947
|
-
}
|
|
5948
|
-
await transceiver.sender.replaceTrack(track);
|
|
5656
|
+
this.transceiverCache.add(publishOption, transceiver);
|
|
5949
5657
|
};
|
|
5950
5658
|
/**
|
|
5951
|
-
*
|
|
5952
|
-
* Underlying track will be stopped and removed from the publisher.
|
|
5953
|
-
* @param trackType the track type to unpublish.
|
|
5954
|
-
* @param stopTrack specifies whether track should be stopped or just disabled
|
|
5659
|
+
* Synchronizes the current Publisher state with the provided publish options.
|
|
5955
5660
|
*/
|
|
5956
|
-
this.
|
|
5957
|
-
|
|
5958
|
-
|
|
5959
|
-
|
|
5960
|
-
(
|
|
5961
|
-
|
|
5962
|
-
|
|
5963
|
-
|
|
5964
|
-
|
|
5965
|
-
|
|
5966
|
-
|
|
5967
|
-
|
|
5968
|
-
|
|
5969
|
-
|
|
5661
|
+
this.syncPublishOptions = async () => {
|
|
5662
|
+
// enable publishing with new options -> [av1, vp9]
|
|
5663
|
+
for (const publishOption of this.publishOptions) {
|
|
5664
|
+
const { trackType } = publishOption;
|
|
5665
|
+
if (!this.isPublishing(trackType))
|
|
5666
|
+
continue;
|
|
5667
|
+
if (this.transceiverCache.has(publishOption))
|
|
5668
|
+
continue;
|
|
5669
|
+
const item = this.transceiverCache.find((i) => !!i.transceiver.sender.track &&
|
|
5670
|
+
i.publishOption.trackType === trackType);
|
|
5671
|
+
if (!item || !item.transceiver)
|
|
5672
|
+
continue;
|
|
5673
|
+
// take the track from the existing transceiver for the same track type,
|
|
5674
|
+
// clone it and publish it with the new publish options
|
|
5675
|
+
const track = item.transceiver.sender.track.clone();
|
|
5676
|
+
this.addTransceiver(track, publishOption);
|
|
5677
|
+
}
|
|
5678
|
+
// stop publishing with options not required anymore -> [vp9]
|
|
5679
|
+
for (const item of this.transceiverCache.items()) {
|
|
5680
|
+
const { publishOption, transceiver } = item;
|
|
5681
|
+
const hasPublishOption = this.publishOptions.some((option) => option.id === publishOption.id &&
|
|
5682
|
+
option.trackType === publishOption.trackType);
|
|
5683
|
+
if (hasPublishOption)
|
|
5684
|
+
continue;
|
|
5685
|
+
// it is safe to stop the track here, it is a clone
|
|
5686
|
+
transceiver.sender.track?.stop();
|
|
5687
|
+
await transceiver.sender.replaceTrack(null);
|
|
5970
5688
|
}
|
|
5971
5689
|
};
|
|
5972
5690
|
/**
|
|
@@ -5975,57 +5693,52 @@ class Publisher {
|
|
|
5975
5693
|
* @param trackType the track type to check.
|
|
5976
5694
|
*/
|
|
5977
5695
|
this.isPublishing = (trackType) => {
|
|
5978
|
-
const
|
|
5979
|
-
|
|
5980
|
-
|
|
5981
|
-
|
|
5982
|
-
|
|
5983
|
-
|
|
5984
|
-
|
|
5985
|
-
|
|
5986
|
-
const audioOrVideoOrScreenShareStream = trackTypeToParticipantStreamKey(trackType);
|
|
5987
|
-
if (!audioOrVideoOrScreenShareStream)
|
|
5988
|
-
return;
|
|
5989
|
-
if (isMuted) {
|
|
5990
|
-
this.state.updateParticipant(this.sfuClient.sessionId, (p) => ({
|
|
5991
|
-
publishedTracks: p.publishedTracks.filter((t) => t !== trackType),
|
|
5992
|
-
[audioOrVideoOrScreenShareStream]: undefined,
|
|
5993
|
-
}));
|
|
5994
|
-
}
|
|
5995
|
-
else {
|
|
5996
|
-
this.state.updateParticipant(this.sfuClient.sessionId, (p) => {
|
|
5997
|
-
return {
|
|
5998
|
-
publishedTracks: p.publishedTracks.includes(trackType)
|
|
5999
|
-
? p.publishedTracks
|
|
6000
|
-
: [...p.publishedTracks, trackType],
|
|
6001
|
-
[audioOrVideoOrScreenShareStream]: mediaStream,
|
|
6002
|
-
};
|
|
6003
|
-
});
|
|
5696
|
+
for (const item of this.transceiverCache.items()) {
|
|
5697
|
+
if (item.publishOption.trackType !== trackType)
|
|
5698
|
+
continue;
|
|
5699
|
+
const track = item.transceiver.sender.track;
|
|
5700
|
+
if (!track)
|
|
5701
|
+
continue;
|
|
5702
|
+
if (track.readyState === 'live' && track.enabled)
|
|
5703
|
+
return true;
|
|
6004
5704
|
}
|
|
5705
|
+
return false;
|
|
6005
5706
|
};
|
|
6006
5707
|
/**
|
|
6007
|
-
*
|
|
5708
|
+
* Maps the given track ID to the corresponding track type.
|
|
6008
5709
|
*/
|
|
6009
|
-
this.
|
|
6010
|
-
|
|
6011
|
-
|
|
6012
|
-
|
|
6013
|
-
|
|
6014
|
-
this.pc.removeTrack(s);
|
|
5710
|
+
this.getTrackType = (trackId) => {
|
|
5711
|
+
for (const transceiverId of this.transceiverCache.items()) {
|
|
5712
|
+
const { publishOption, transceiver } = transceiverId;
|
|
5713
|
+
if (transceiver.sender.track?.id === trackId) {
|
|
5714
|
+
return publishOption.trackType;
|
|
6015
5715
|
}
|
|
6016
|
-
}
|
|
5716
|
+
}
|
|
5717
|
+
return undefined;
|
|
6017
5718
|
};
|
|
6018
|
-
|
|
6019
|
-
|
|
6020
|
-
|
|
6021
|
-
|
|
6022
|
-
|
|
6023
|
-
|
|
5719
|
+
/**
|
|
5720
|
+
* Stops the cloned track that is being published to the SFU.
|
|
5721
|
+
*/
|
|
5722
|
+
this.stopTracks = (...trackTypes) => {
|
|
5723
|
+
for (const item of this.transceiverCache.items()) {
|
|
5724
|
+
const { publishOption, transceiver } = item;
|
|
5725
|
+
if (!trackTypes.includes(publishOption.trackType))
|
|
5726
|
+
continue;
|
|
5727
|
+
transceiver.sender.track?.stop();
|
|
6024
5728
|
}
|
|
6025
|
-
|
|
5729
|
+
};
|
|
5730
|
+
this.changePublishQuality = async (videoSender) => {
|
|
5731
|
+
const { trackType, layers, publishOptionId } = videoSender;
|
|
5732
|
+
const enabledLayers = layers.filter((l) => l.active);
|
|
5733
|
+
const tag = 'Update publish quality:';
|
|
5734
|
+
this.logger('info', `${tag} requested layers by SFU:`, enabledLayers);
|
|
5735
|
+
const sender = this.transceiverCache.getWith(trackType, publishOptionId)?.sender;
|
|
5736
|
+
if (!sender) {
|
|
5737
|
+
return this.logger('warn', `${tag} no video sender found.`);
|
|
5738
|
+
}
|
|
5739
|
+
const params = sender.getParameters();
|
|
6026
5740
|
if (params.encodings.length === 0) {
|
|
6027
|
-
this.logger('warn',
|
|
6028
|
-
return;
|
|
5741
|
+
return this.logger('warn', `${tag} there are no encodings set.`);
|
|
6029
5742
|
}
|
|
6030
5743
|
const [codecInUse] = params.codecs;
|
|
6031
5744
|
const usesSvcCodec = codecInUse && isSvcCodec(codecInUse.mimeType);
|
|
@@ -6067,54 +5780,12 @@ class Publisher {
|
|
|
6067
5780
|
changed = true;
|
|
6068
5781
|
}
|
|
6069
5782
|
}
|
|
6070
|
-
const
|
|
5783
|
+
const activeEncoders = params.encodings.filter((e) => e.active);
|
|
6071
5784
|
if (!changed) {
|
|
6072
|
-
this.logger('info',
|
|
6073
|
-
return;
|
|
6074
|
-
}
|
|
6075
|
-
await videoSender.setParameters(params);
|
|
6076
|
-
this.logger('info', `Update publish quality, enabled rids:`, activeLayers);
|
|
6077
|
-
};
|
|
6078
|
-
/**
|
|
6079
|
-
* Returns the result of the `RTCPeerConnection.getStats()` method
|
|
6080
|
-
* @param selector
|
|
6081
|
-
* @returns
|
|
6082
|
-
*/
|
|
6083
|
-
this.getStats = (selector) => {
|
|
6084
|
-
return this.pc.getStats(selector);
|
|
6085
|
-
};
|
|
6086
|
-
this.getCodecPreferences = (trackType, preferredCodec, codecPreferencesSource) => {
|
|
6087
|
-
if (trackType === TrackType.VIDEO) {
|
|
6088
|
-
return getPreferredCodecs('video', preferredCodec || 'vp8', undefined, codecPreferencesSource);
|
|
6089
|
-
}
|
|
6090
|
-
if (trackType === TrackType.AUDIO) {
|
|
6091
|
-
const defaultAudioCodec = this.isRedEnabled ? 'red' : 'opus';
|
|
6092
|
-
const codecToRemove = !this.isRedEnabled ? 'red' : undefined;
|
|
6093
|
-
return getPreferredCodecs('audio', preferredCodec ?? defaultAudioCodec, codecToRemove, codecPreferencesSource);
|
|
6094
|
-
}
|
|
6095
|
-
};
|
|
6096
|
-
this.onIceCandidate = (e) => {
|
|
6097
|
-
const { candidate } = e;
|
|
6098
|
-
if (!candidate) {
|
|
6099
|
-
this.logger('debug', 'null ice candidate');
|
|
6100
|
-
return;
|
|
5785
|
+
return this.logger('info', `${tag} no change:`, activeEncoders);
|
|
6101
5786
|
}
|
|
6102
|
-
|
|
6103
|
-
|
|
6104
|
-
iceCandidate: getIceCandidate(candidate),
|
|
6105
|
-
peerType: PeerType.PUBLISHER_UNSPECIFIED,
|
|
6106
|
-
})
|
|
6107
|
-
.catch((err) => {
|
|
6108
|
-
this.logger('warn', `ICETrickle failed`, err);
|
|
6109
|
-
});
|
|
6110
|
-
};
|
|
6111
|
-
/**
|
|
6112
|
-
* Sets the SFU client to use.
|
|
6113
|
-
*
|
|
6114
|
-
* @param sfuClient the SFU client to use.
|
|
6115
|
-
*/
|
|
6116
|
-
this.setSfuClient = (sfuClient) => {
|
|
6117
|
-
this.sfuClient = sfuClient;
|
|
5787
|
+
await sender.setParameters(params);
|
|
5788
|
+
this.logger('info', `${tag} enabled rids:`, activeEncoders);
|
|
6118
5789
|
};
|
|
6119
5790
|
/**
|
|
6120
5791
|
* Restarts the ICE connection and renegotiates with the SFU.
|
|
@@ -6129,7 +5800,7 @@ class Publisher {
|
|
|
6129
5800
|
await this.negotiate({ iceRestart: true });
|
|
6130
5801
|
};
|
|
6131
5802
|
this.onNegotiationNeeded = () => {
|
|
6132
|
-
this.negotiate().catch((err) => {
|
|
5803
|
+
withoutConcurrency('publisher.negotiate', () => this.negotiate()).catch((err) => {
|
|
6133
5804
|
this.logger('error', `Negotiation failed.`, err);
|
|
6134
5805
|
this.onUnrecoverableError?.();
|
|
6135
5806
|
});
|
|
@@ -6141,18 +5812,6 @@ class Publisher {
|
|
|
6141
5812
|
*/
|
|
6142
5813
|
this.negotiate = async (options) => {
|
|
6143
5814
|
const offer = await this.pc.createOffer(options);
|
|
6144
|
-
if (offer.sdp) {
|
|
6145
|
-
offer.sdp = toggleDtx(offer.sdp, this.isDtxEnabled);
|
|
6146
|
-
if (this.isPublishing(TrackType.SCREEN_SHARE_AUDIO)) {
|
|
6147
|
-
offer.sdp = this.enableHighQualityAudio(offer.sdp);
|
|
6148
|
-
}
|
|
6149
|
-
if (this.isPublishing(TrackType.VIDEO)) {
|
|
6150
|
-
// Hotfix for platforms that don't respect the ordered codec list
|
|
6151
|
-
// (Firefox, Android, Linux, etc...).
|
|
6152
|
-
// We remove all the codecs from the SDP except the one we want to use.
|
|
6153
|
-
offer.sdp = this.removeUnpreferredCodecs(offer.sdp, TrackType.VIDEO);
|
|
6154
|
-
}
|
|
6155
|
-
}
|
|
6156
5815
|
const trackInfos = this.getAnnouncedTracks(offer.sdp);
|
|
6157
5816
|
if (trackInfos.length === 0) {
|
|
6158
5817
|
throw new Error(`Can't negotiate without announcing any tracks`);
|
|
@@ -6171,238 +5830,121 @@ class Publisher {
|
|
|
6171
5830
|
finally {
|
|
6172
5831
|
this.isIceRestarting = false;
|
|
6173
5832
|
}
|
|
6174
|
-
this.
|
|
6175
|
-
try {
|
|
6176
|
-
const iceCandidate = JSON.parse(candidate.iceCandidate);
|
|
6177
|
-
await this.pc.addIceCandidate(iceCandidate);
|
|
6178
|
-
}
|
|
6179
|
-
catch (e) {
|
|
6180
|
-
this.logger('warn', `ICE candidate error`, e, candidate);
|
|
6181
|
-
}
|
|
6182
|
-
});
|
|
6183
|
-
};
|
|
6184
|
-
this.enableHighQualityAudio = (sdp) => {
|
|
6185
|
-
const transceiver = this.transceiverCache.get(TrackType.SCREEN_SHARE_AUDIO);
|
|
6186
|
-
if (!transceiver)
|
|
6187
|
-
return sdp;
|
|
6188
|
-
const transceiverInitIndex = this.transceiverInitOrder.indexOf(TrackType.SCREEN_SHARE_AUDIO);
|
|
6189
|
-
const mid = extractMid(transceiver, transceiverInitIndex, sdp);
|
|
6190
|
-
return enableHighQualityAudio(sdp, mid);
|
|
5833
|
+
this.addTrickledIceCandidates();
|
|
6191
5834
|
};
|
|
6192
5835
|
/**
|
|
6193
5836
|
* Returns a list of tracks that are currently being published.
|
|
6194
|
-
*
|
|
6195
|
-
* @internal
|
|
6196
|
-
* @param sdp an optional SDP to extract the `mid` from.
|
|
6197
5837
|
*/
|
|
6198
|
-
this.
|
|
6199
|
-
|
|
6200
|
-
|
|
6201
|
-
.getTransceivers()
|
|
6202
|
-
.filter((t) => t.direction === 'sendonly' && t.sender.track)
|
|
6203
|
-
.map((transceiver) => {
|
|
6204
|
-
let trackType;
|
|
6205
|
-
this.transceiverCache.forEach((value, key) => {
|
|
6206
|
-
if (value === transceiver)
|
|
6207
|
-
trackType = key;
|
|
6208
|
-
});
|
|
5838
|
+
this.getPublishedTracks = () => {
|
|
5839
|
+
const tracks = [];
|
|
5840
|
+
for (const { transceiver } of this.transceiverCache.items()) {
|
|
6209
5841
|
const track = transceiver.sender.track;
|
|
6210
|
-
|
|
6211
|
-
|
|
6212
|
-
if (isTrackLive) {
|
|
6213
|
-
optimalLayers = this.computeLayers(trackType, track) || [];
|
|
6214
|
-
this.trackLayersCache.set(trackType, optimalLayers);
|
|
6215
|
-
}
|
|
6216
|
-
else {
|
|
6217
|
-
// we report the last known optimal layers for ended tracks
|
|
6218
|
-
optimalLayers = this.trackLayersCache.get(trackType) || [];
|
|
6219
|
-
this.logger('debug', `Track ${TrackType[trackType]} is ended. Announcing last known optimal layers`, optimalLayers);
|
|
6220
|
-
}
|
|
6221
|
-
const layers = optimalLayers.map((optimalLayer) => ({
|
|
6222
|
-
rid: optimalLayer.rid || '',
|
|
6223
|
-
bitrate: optimalLayer.maxBitrate || 0,
|
|
6224
|
-
fps: optimalLayer.maxFramerate || 0,
|
|
6225
|
-
quality: ridToVideoQuality(optimalLayer.rid || ''),
|
|
6226
|
-
videoDimension: {
|
|
6227
|
-
width: optimalLayer.width,
|
|
6228
|
-
height: optimalLayer.height,
|
|
6229
|
-
},
|
|
6230
|
-
}));
|
|
6231
|
-
const isAudioTrack = [
|
|
6232
|
-
TrackType.AUDIO,
|
|
6233
|
-
TrackType.SCREEN_SHARE_AUDIO,
|
|
6234
|
-
].includes(trackType);
|
|
6235
|
-
const trackSettings = track.getSettings();
|
|
6236
|
-
const isStereo = isAudioTrack && trackSettings.channelCount === 2;
|
|
6237
|
-
const transceiverInitIndex = this.transceiverInitOrder.indexOf(trackType);
|
|
6238
|
-
return {
|
|
6239
|
-
trackId: track.id,
|
|
6240
|
-
layers: layers,
|
|
6241
|
-
trackType,
|
|
6242
|
-
mid: extractMid(transceiver, transceiverInitIndex, sdp),
|
|
6243
|
-
stereo: isStereo,
|
|
6244
|
-
dtx: isAudioTrack && this.isDtxEnabled,
|
|
6245
|
-
red: isAudioTrack && this.isRedEnabled,
|
|
6246
|
-
muted: !isTrackLive,
|
|
6247
|
-
};
|
|
6248
|
-
});
|
|
6249
|
-
};
|
|
6250
|
-
this.computeLayers = (trackType, track, opts) => {
|
|
6251
|
-
const { settings } = this.state;
|
|
6252
|
-
const targetResolution = settings?.video
|
|
6253
|
-
.target_resolution;
|
|
6254
|
-
const screenShareBitrate = settings?.screensharing.target_resolution?.bitrate;
|
|
6255
|
-
const publishOpts = opts || this.publishOptsForTrack.get(trackType);
|
|
6256
|
-
const codecInUse = opts?.forceCodec || getOptimalVideoCodec(opts?.preferredCodec);
|
|
6257
|
-
return trackType === TrackType.VIDEO
|
|
6258
|
-
? findOptimalVideoLayers(track, targetResolution, codecInUse, publishOpts)
|
|
6259
|
-
: trackType === TrackType.SCREEN_SHARE
|
|
6260
|
-
? findOptimalScreenSharingLayers(track, publishOpts, screenShareBitrate)
|
|
6261
|
-
: undefined;
|
|
6262
|
-
};
|
|
6263
|
-
this.onIceCandidateError = (e) => {
|
|
6264
|
-
const errorMessage = e instanceof RTCPeerConnectionIceErrorEvent &&
|
|
6265
|
-
`${e.errorCode}: ${e.errorText}`;
|
|
6266
|
-
const iceState = this.pc.iceConnectionState;
|
|
6267
|
-
const logLevel = iceState === 'connected' || iceState === 'checking' ? 'debug' : 'warn';
|
|
6268
|
-
this.logger(logLevel, `ICE Candidate error`, errorMessage);
|
|
6269
|
-
};
|
|
6270
|
-
this.onIceConnectionStateChange = () => {
|
|
6271
|
-
const state = this.pc.iceConnectionState;
|
|
6272
|
-
this.logger('debug', `ICE Connection state changed to`, state);
|
|
6273
|
-
if (this.state.callingState === CallingState.RECONNECTING)
|
|
6274
|
-
return;
|
|
6275
|
-
if (state === 'failed' || state === 'disconnected') {
|
|
6276
|
-
this.logger('debug', `Attempting to restart ICE`);
|
|
6277
|
-
this.restartIce().catch((e) => {
|
|
6278
|
-
this.logger('error', `ICE restart error`, e);
|
|
6279
|
-
this.onUnrecoverableError?.();
|
|
6280
|
-
});
|
|
5842
|
+
if (track && track.readyState === 'live')
|
|
5843
|
+
tracks.push(track);
|
|
6281
5844
|
}
|
|
6282
|
-
|
|
6283
|
-
this.onIceGatheringStateChange = () => {
|
|
6284
|
-
this.logger('debug', `ICE Gathering State`, this.pc.iceGatheringState);
|
|
6285
|
-
};
|
|
6286
|
-
this.onSignalingStateChange = () => {
|
|
6287
|
-
this.logger('debug', `Signaling state changed`, this.pc.signalingState);
|
|
6288
|
-
};
|
|
6289
|
-
this.logger = getLogger(['Publisher', logTag]);
|
|
6290
|
-
this.pc = this.createPeerConnection(connectionConfig);
|
|
6291
|
-
this.sfuClient = sfuClient;
|
|
6292
|
-
this.state = state;
|
|
6293
|
-
this.isDtxEnabled = isDtxEnabled;
|
|
6294
|
-
this.isRedEnabled = isRedEnabled;
|
|
6295
|
-
this.onUnrecoverableError = onUnrecoverableError;
|
|
6296
|
-
this.unsubscribeOnIceRestart = dispatcher.on('iceRestart', (iceRestart) => {
|
|
6297
|
-
if (iceRestart.peerType !== PeerType.PUBLISHER_UNSPECIFIED)
|
|
6298
|
-
return;
|
|
6299
|
-
this.restartIce().catch((err) => {
|
|
6300
|
-
this.logger('warn', `ICERestart failed`, err);
|
|
6301
|
-
this.onUnrecoverableError?.();
|
|
6302
|
-
});
|
|
6303
|
-
});
|
|
6304
|
-
this.unsubscribeChangePublishQuality = dispatcher.on('changePublishQuality', ({ videoSenders }) => {
|
|
6305
|
-
withoutConcurrency('publisher.changePublishQuality', async () => {
|
|
6306
|
-
for (const videoSender of videoSenders) {
|
|
6307
|
-
const { layers } = videoSender;
|
|
6308
|
-
const enabledLayers = layers.filter((l) => l.active);
|
|
6309
|
-
await this.changePublishQuality(enabledLayers);
|
|
6310
|
-
}
|
|
6311
|
-
}).catch((err) => {
|
|
6312
|
-
this.logger('warn', 'Failed to change publish quality', err);
|
|
6313
|
-
});
|
|
6314
|
-
});
|
|
6315
|
-
}
|
|
6316
|
-
removeUnpreferredCodecs(sdp, trackType) {
|
|
6317
|
-
const opts = this.publishOptsForTrack.get(trackType);
|
|
6318
|
-
const forceSingleCodec = !!opts?.forceSingleCodec || isReactNative() || isFirefox();
|
|
6319
|
-
if (!opts || !forceSingleCodec)
|
|
6320
|
-
return sdp;
|
|
6321
|
-
const codec = opts.forceCodec || getOptimalVideoCodec(opts.preferredCodec);
|
|
6322
|
-
const orderedCodecs = this.getCodecPreferences(trackType, codec, 'sender');
|
|
6323
|
-
if (!orderedCodecs || orderedCodecs.length === 0)
|
|
6324
|
-
return sdp;
|
|
6325
|
-
const transceiver = this.transceiverCache.get(trackType);
|
|
6326
|
-
if (!transceiver)
|
|
6327
|
-
return sdp;
|
|
6328
|
-
const index = this.transceiverInitOrder.indexOf(trackType);
|
|
6329
|
-
const mid = extractMid(transceiver, index, sdp);
|
|
6330
|
-
const [codecToPreserve] = orderedCodecs;
|
|
6331
|
-
return preserveCodec(sdp, mid, codecToPreserve);
|
|
6332
|
-
}
|
|
6333
|
-
}
|
|
6334
|
-
|
|
6335
|
-
/**
|
|
6336
|
-
* A wrapper around the `RTCPeerConnection` that handles the incoming
|
|
6337
|
-
* media streams from the SFU.
|
|
6338
|
-
*
|
|
6339
|
-
* @internal
|
|
6340
|
-
*/
|
|
6341
|
-
class Subscriber {
|
|
6342
|
-
/**
|
|
6343
|
-
* Constructs a new `Subscriber` instance.
|
|
6344
|
-
*
|
|
6345
|
-
* @param sfuClient the SFU client to use.
|
|
6346
|
-
* @param dispatcher the dispatcher to use.
|
|
6347
|
-
* @param state the state of the call.
|
|
6348
|
-
* @param connectionConfig the connection configuration to use.
|
|
6349
|
-
* @param iceRestartDelay the delay in milliseconds to wait before restarting ICE when connection goes to `disconnected` state.
|
|
6350
|
-
* @param onUnrecoverableError a callback to call when an unrecoverable error occurs.
|
|
6351
|
-
* @param logTag a tag to use for logging.
|
|
6352
|
-
*/
|
|
6353
|
-
constructor({ sfuClient, dispatcher, state, connectionConfig, onUnrecoverableError, logTag, }) {
|
|
6354
|
-
this.isIceRestarting = false;
|
|
6355
|
-
/**
|
|
6356
|
-
* Creates a new `RTCPeerConnection` instance with the given configuration.
|
|
6357
|
-
*
|
|
6358
|
-
* @param connectionConfig the connection configuration to use.
|
|
6359
|
-
*/
|
|
6360
|
-
this.createPeerConnection = (connectionConfig) => {
|
|
6361
|
-
const pc = new RTCPeerConnection(connectionConfig);
|
|
6362
|
-
pc.addEventListener('icecandidate', this.onIceCandidate);
|
|
6363
|
-
pc.addEventListener('track', this.handleOnTrack);
|
|
6364
|
-
pc.addEventListener('icecandidateerror', this.onIceCandidateError);
|
|
6365
|
-
pc.addEventListener('iceconnectionstatechange', this.onIceConnectionStateChange);
|
|
6366
|
-
pc.addEventListener('icegatheringstatechange', this.onIceGatheringStateChange);
|
|
6367
|
-
return pc;
|
|
6368
|
-
};
|
|
6369
|
-
/**
|
|
6370
|
-
* Closes the `RTCPeerConnection` and unsubscribes from the dispatcher.
|
|
6371
|
-
*/
|
|
6372
|
-
this.close = () => {
|
|
6373
|
-
this.detachEventHandlers();
|
|
6374
|
-
this.pc.close();
|
|
5845
|
+
return tracks;
|
|
6375
5846
|
};
|
|
6376
5847
|
/**
|
|
6377
|
-
*
|
|
6378
|
-
*
|
|
6379
|
-
* instance with a new one (in case of migration).
|
|
5848
|
+
* Returns a list of tracks that are currently being published.
|
|
5849
|
+
* @param sdp an optional SDP to extract the `mid` from.
|
|
6380
5850
|
*/
|
|
6381
|
-
this.
|
|
6382
|
-
|
|
6383
|
-
this.
|
|
6384
|
-
|
|
6385
|
-
|
|
6386
|
-
|
|
6387
|
-
|
|
6388
|
-
|
|
5851
|
+
this.getAnnouncedTracks = (sdp) => {
|
|
5852
|
+
const trackInfos = [];
|
|
5853
|
+
for (const bundle of this.transceiverCache.items()) {
|
|
5854
|
+
const { transceiver, publishOption } = bundle;
|
|
5855
|
+
const track = transceiver.sender.track;
|
|
5856
|
+
if (!track)
|
|
5857
|
+
continue;
|
|
5858
|
+
trackInfos.push(this.toTrackInfo(transceiver, publishOption, sdp));
|
|
5859
|
+
}
|
|
5860
|
+
return trackInfos;
|
|
6389
5861
|
};
|
|
6390
5862
|
/**
|
|
6391
|
-
* Returns
|
|
6392
|
-
*
|
|
6393
|
-
*
|
|
6394
|
-
*/
|
|
6395
|
-
this.
|
|
6396
|
-
|
|
5863
|
+
* Returns a list of tracks that are currently being published.
|
|
5864
|
+
* This method shall be used for the reconnection flow.
|
|
5865
|
+
* There we shouldn't announce the tracks that have been stopped due to a codec switch.
|
|
5866
|
+
*/
|
|
5867
|
+
this.getAnnouncedTracksForReconnect = () => {
|
|
5868
|
+
const sdp = this.pc.localDescription?.sdp;
|
|
5869
|
+
const trackInfos = [];
|
|
5870
|
+
for (const publishOption of this.publishOptions) {
|
|
5871
|
+
const transceiver = this.transceiverCache.get(publishOption);
|
|
5872
|
+
if (!transceiver || !transceiver.sender.track)
|
|
5873
|
+
continue;
|
|
5874
|
+
trackInfos.push(this.toTrackInfo(transceiver, publishOption, sdp));
|
|
5875
|
+
}
|
|
5876
|
+
return trackInfos;
|
|
6397
5877
|
};
|
|
6398
5878
|
/**
|
|
6399
|
-
*
|
|
6400
|
-
*
|
|
6401
|
-
* @param sfuClient the SFU client to use.
|
|
5879
|
+
* Converts the given transceiver to a `TrackInfo` object.
|
|
6402
5880
|
*/
|
|
6403
|
-
this.
|
|
6404
|
-
|
|
5881
|
+
this.toTrackInfo = (transceiver, publishOption, sdp) => {
|
|
5882
|
+
const track = transceiver.sender.track;
|
|
5883
|
+
const isTrackLive = track.readyState === 'live';
|
|
5884
|
+
const layers = isTrackLive
|
|
5885
|
+
? computeVideoLayers(track, publishOption)
|
|
5886
|
+
: this.transceiverCache.getLayers(publishOption);
|
|
5887
|
+
this.transceiverCache.setLayers(publishOption, layers);
|
|
5888
|
+
const isAudioTrack = isAudioTrackType(publishOption.trackType);
|
|
5889
|
+
const isStereo = isAudioTrack && track.getSettings().channelCount === 2;
|
|
5890
|
+
const transceiverIndex = this.transceiverCache.indexOf(transceiver);
|
|
5891
|
+
const audioSettings = this.state.settings?.audio;
|
|
5892
|
+
return {
|
|
5893
|
+
trackId: track.id,
|
|
5894
|
+
layers: toVideoLayers(layers),
|
|
5895
|
+
trackType: publishOption.trackType,
|
|
5896
|
+
mid: extractMid(transceiver, transceiverIndex, sdp),
|
|
5897
|
+
stereo: isStereo,
|
|
5898
|
+
dtx: isAudioTrack && !!audioSettings?.opus_dtx_enabled,
|
|
5899
|
+
red: isAudioTrack && !!audioSettings?.redundant_coding_enabled,
|
|
5900
|
+
muted: !isTrackLive,
|
|
5901
|
+
codec: publishOption.codec,
|
|
5902
|
+
publishOptionId: publishOption.id,
|
|
5903
|
+
};
|
|
6405
5904
|
};
|
|
5905
|
+
this.publishOptions = publishOptions;
|
|
5906
|
+
this.pc.addEventListener('negotiationneeded', this.onNegotiationNeeded);
|
|
5907
|
+
this.on('iceRestart', (iceRestart) => {
|
|
5908
|
+
if (iceRestart.peerType !== PeerType.PUBLISHER_UNSPECIFIED)
|
|
5909
|
+
return;
|
|
5910
|
+
this.restartIce().catch((err) => {
|
|
5911
|
+
this.logger('warn', `ICERestart failed`, err);
|
|
5912
|
+
this.onUnrecoverableError?.();
|
|
5913
|
+
});
|
|
5914
|
+
});
|
|
5915
|
+
this.on('changePublishQuality', async (event) => {
|
|
5916
|
+
for (const videoSender of event.videoSenders) {
|
|
5917
|
+
await this.changePublishQuality(videoSender);
|
|
5918
|
+
}
|
|
5919
|
+
});
|
|
5920
|
+
this.on('changePublishOptions', (event) => {
|
|
5921
|
+
this.publishOptions = event.publishOptions;
|
|
5922
|
+
return this.syncPublishOptions();
|
|
5923
|
+
});
|
|
5924
|
+
}
|
|
5925
|
+
/**
|
|
5926
|
+
* Detaches the event handlers from the `RTCPeerConnection`.
|
|
5927
|
+
* This is useful when we want to replace the `RTCPeerConnection`
|
|
5928
|
+
* instance with a new one (in case of migration).
|
|
5929
|
+
*/
|
|
5930
|
+
detachEventHandlers() {
|
|
5931
|
+
super.detachEventHandlers();
|
|
5932
|
+
this.pc.removeEventListener('negotiationneeded', this.onNegotiationNeeded);
|
|
5933
|
+
}
|
|
5934
|
+
}
|
|
5935
|
+
|
|
5936
|
+
/**
|
|
5937
|
+
* A wrapper around the `RTCPeerConnection` that handles the incoming
|
|
5938
|
+
* media streams from the SFU.
|
|
5939
|
+
*
|
|
5940
|
+
* @internal
|
|
5941
|
+
*/
|
|
5942
|
+
class Subscriber extends BasePeerConnection {
|
|
5943
|
+
/**
|
|
5944
|
+
* Constructs a new `Subscriber` instance.
|
|
5945
|
+
*/
|
|
5946
|
+
constructor(opts) {
|
|
5947
|
+
super(PeerType.SUBSCRIBER, opts);
|
|
6406
5948
|
/**
|
|
6407
5949
|
* Restarts the ICE connection and renegotiates with the SFU.
|
|
6408
5950
|
*/
|
|
@@ -6465,7 +6007,15 @@ class Subscriber {
|
|
|
6465
6007
|
this.logger('error', `Unknown track type: ${rawTrackType}`);
|
|
6466
6008
|
return;
|
|
6467
6009
|
}
|
|
6010
|
+
// get the previous stream to dispose it later
|
|
6011
|
+
// usually this happens during migration, when the stream is replaced
|
|
6012
|
+
// with a new one but the old one is still in the state
|
|
6468
6013
|
const previousStream = participantToUpdate[streamKindProp];
|
|
6014
|
+
// replace the previous stream with the new one, prevents flickering
|
|
6015
|
+
this.state.updateParticipant(participantToUpdate.sessionId, {
|
|
6016
|
+
[streamKindProp]: primaryStream,
|
|
6017
|
+
});
|
|
6018
|
+
// now, dispose the previous stream if it exists
|
|
6469
6019
|
if (previousStream) {
|
|
6470
6020
|
this.logger('info', `[onTrack]: Cleaning up previous remote ${e.track.kind} tracks for userId: ${participantToUpdate.userId}`);
|
|
6471
6021
|
previousStream.getTracks().forEach((t) => {
|
|
@@ -6473,24 +6023,6 @@ class Subscriber {
|
|
|
6473
6023
|
previousStream.removeTrack(t);
|
|
6474
6024
|
});
|
|
6475
6025
|
}
|
|
6476
|
-
this.state.updateParticipant(participantToUpdate.sessionId, {
|
|
6477
|
-
[streamKindProp]: primaryStream,
|
|
6478
|
-
});
|
|
6479
|
-
};
|
|
6480
|
-
this.onIceCandidate = (e) => {
|
|
6481
|
-
const { candidate } = e;
|
|
6482
|
-
if (!candidate) {
|
|
6483
|
-
this.logger('debug', 'null ice candidate');
|
|
6484
|
-
return;
|
|
6485
|
-
}
|
|
6486
|
-
this.sfuClient
|
|
6487
|
-
.iceTrickle({
|
|
6488
|
-
iceCandidate: getIceCandidate(candidate),
|
|
6489
|
-
peerType: PeerType.SUBSCRIBER,
|
|
6490
|
-
})
|
|
6491
|
-
.catch((err) => {
|
|
6492
|
-
this.logger('warn', `ICETrickle failed`, err);
|
|
6493
|
-
});
|
|
6494
6026
|
};
|
|
6495
6027
|
this.negotiate = async (subscriberOffer) => {
|
|
6496
6028
|
this.logger('info', `Received subscriberOffer`, subscriberOffer);
|
|
@@ -6498,15 +6030,7 @@ class Subscriber {
|
|
|
6498
6030
|
type: 'offer',
|
|
6499
6031
|
sdp: subscriberOffer.sdp,
|
|
6500
6032
|
});
|
|
6501
|
-
this.
|
|
6502
|
-
try {
|
|
6503
|
-
const iceCandidate = JSON.parse(candidate.iceCandidate);
|
|
6504
|
-
await this.pc.addIceCandidate(iceCandidate);
|
|
6505
|
-
}
|
|
6506
|
-
catch (e) {
|
|
6507
|
-
this.logger('warn', `ICE candidate error`, [e, candidate]);
|
|
6508
|
-
}
|
|
6509
|
-
});
|
|
6033
|
+
this.addTrickledIceCandidates();
|
|
6510
6034
|
const answer = await this.pc.createAnswer();
|
|
6511
6035
|
await this.pc.setLocalDescription(answer);
|
|
6512
6036
|
await this.sfuClient.sendAnswer({
|
|
@@ -6515,56 +6039,21 @@ class Subscriber {
|
|
|
6515
6039
|
});
|
|
6516
6040
|
this.isIceRestarting = false;
|
|
6517
6041
|
};
|
|
6518
|
-
this.
|
|
6519
|
-
|
|
6520
|
-
this.
|
|
6521
|
-
if (this.state.callingState === CallingState.RECONNECTING)
|
|
6522
|
-
return;
|
|
6523
|
-
// do nothing when ICE is restarting
|
|
6524
|
-
if (this.isIceRestarting)
|
|
6525
|
-
return;
|
|
6526
|
-
if (state === 'failed' || state === 'disconnected') {
|
|
6527
|
-
this.logger('debug', `Attempting to restart ICE`);
|
|
6528
|
-
this.restartIce().catch((e) => {
|
|
6529
|
-
this.logger('error', `ICE restart failed`, e);
|
|
6530
|
-
this.onUnrecoverableError?.();
|
|
6531
|
-
});
|
|
6532
|
-
}
|
|
6533
|
-
};
|
|
6534
|
-
this.onIceGatheringStateChange = () => {
|
|
6535
|
-
this.logger('debug', `ICE gathering state changed`, this.pc.iceGatheringState);
|
|
6536
|
-
};
|
|
6537
|
-
this.onIceCandidateError = (e) => {
|
|
6538
|
-
const errorMessage = e instanceof RTCPeerConnectionIceErrorEvent &&
|
|
6539
|
-
`${e.errorCode}: ${e.errorText}`;
|
|
6540
|
-
const iceState = this.pc.iceConnectionState;
|
|
6541
|
-
const logLevel = iceState === 'connected' || iceState === 'checking' ? 'debug' : 'warn';
|
|
6542
|
-
this.logger(logLevel, `ICE Candidate error`, errorMessage);
|
|
6543
|
-
};
|
|
6544
|
-
this.logger = getLogger(['Subscriber', logTag]);
|
|
6545
|
-
this.sfuClient = sfuClient;
|
|
6546
|
-
this.state = state;
|
|
6547
|
-
this.onUnrecoverableError = onUnrecoverableError;
|
|
6548
|
-
this.pc = this.createPeerConnection(connectionConfig);
|
|
6549
|
-
const subscriberOfferConcurrencyTag = Symbol('subscriberOffer');
|
|
6550
|
-
this.unregisterOnSubscriberOffer = dispatcher.on('subscriberOffer', (subscriberOffer) => {
|
|
6551
|
-
withoutConcurrency(subscriberOfferConcurrencyTag, () => {
|
|
6552
|
-
return this.negotiate(subscriberOffer);
|
|
6553
|
-
}).catch((err) => {
|
|
6042
|
+
this.pc.addEventListener('track', this.handleOnTrack);
|
|
6043
|
+
this.on('subscriberOffer', async (subscriberOffer) => {
|
|
6044
|
+
return this.negotiate(subscriberOffer).catch((err) => {
|
|
6554
6045
|
this.logger('error', `Negotiation failed.`, err);
|
|
6555
6046
|
});
|
|
6556
6047
|
});
|
|
6557
|
-
|
|
6558
|
-
|
|
6559
|
-
|
|
6560
|
-
|
|
6561
|
-
|
|
6562
|
-
|
|
6563
|
-
|
|
6564
|
-
|
|
6565
|
-
|
|
6566
|
-
});
|
|
6567
|
-
});
|
|
6048
|
+
}
|
|
6049
|
+
/**
|
|
6050
|
+
* Detaches the event handlers from the `RTCPeerConnection`.
|
|
6051
|
+
* This is useful when we want to replace the `RTCPeerConnection`
|
|
6052
|
+
* instance with a new one (in case of migration).
|
|
6053
|
+
*/
|
|
6054
|
+
detachEventHandlers() {
|
|
6055
|
+
super.detachEventHandlers();
|
|
6056
|
+
this.pc.removeEventListener('track', this.handleOnTrack);
|
|
6568
6057
|
}
|
|
6569
6058
|
}
|
|
6570
6059
|
|
|
@@ -6596,6 +6085,16 @@ const createWebSocketSignalChannel = (opts) => {
|
|
|
6596
6085
|
return ws;
|
|
6597
6086
|
};
|
|
6598
6087
|
|
|
6088
|
+
const toRtcConfiguration = (config) => {
|
|
6089
|
+
return {
|
|
6090
|
+
iceServers: config.map((ice) => ({
|
|
6091
|
+
urls: ice.urls,
|
|
6092
|
+
username: ice.username,
|
|
6093
|
+
credential: ice.password,
|
|
6094
|
+
})),
|
|
6095
|
+
};
|
|
6096
|
+
};
|
|
6097
|
+
|
|
6599
6098
|
/**
|
|
6600
6099
|
* Saving a long-lived reference to a promise that can reject can be unsafe,
|
|
6601
6100
|
* since rejecting the promise causes an unhandled rejection error (even if the
|
|
@@ -6880,6 +6379,7 @@ class StreamSfuClient {
|
|
|
6880
6379
|
clearTimeout(this.migrateAwayTimeout);
|
|
6881
6380
|
this.abortController.abort();
|
|
6882
6381
|
this.migrationTask?.resolve();
|
|
6382
|
+
this.iceTrickleBuffer.dispose();
|
|
6883
6383
|
};
|
|
6884
6384
|
this.leaveAndClose = async (reason) => {
|
|
6885
6385
|
await this.joinTask;
|
|
@@ -6912,13 +6412,9 @@ class StreamSfuClient {
|
|
|
6912
6412
|
await this.joinTask;
|
|
6913
6413
|
return retryable(() => this.rpc.iceRestart({ ...data, sessionId: this.sessionId }), this.abortController.signal);
|
|
6914
6414
|
};
|
|
6915
|
-
this.
|
|
6916
|
-
await this.joinTask;
|
|
6917
|
-
return this.updateMuteStates({ muteStates: [{ trackType, muted }] });
|
|
6918
|
-
};
|
|
6919
|
-
this.updateMuteStates = async (data) => {
|
|
6415
|
+
this.updateMuteStates = async (muteStates) => {
|
|
6920
6416
|
await this.joinTask;
|
|
6921
|
-
return retryable(() => this.rpc.updateMuteStates({
|
|
6417
|
+
return retryable(() => this.rpc.updateMuteStates({ muteStates, sessionId: this.sessionId }), this.abortController.signal);
|
|
6922
6418
|
};
|
|
6923
6419
|
this.sendStats = async (stats) => {
|
|
6924
6420
|
await this.joinTask;
|
|
@@ -7098,16 +6594,6 @@ StreamSfuClient.ERROR_CONNECTION_UNHEALTHY = 4001;
|
|
|
7098
6594
|
*/
|
|
7099
6595
|
StreamSfuClient.DISPOSE_OLD_SOCKET = 4002;
|
|
7100
6596
|
|
|
7101
|
-
const toRtcConfiguration = (config) => {
|
|
7102
|
-
return {
|
|
7103
|
-
iceServers: config.map((ice) => ({
|
|
7104
|
-
urls: ice.urls,
|
|
7105
|
-
username: ice.username,
|
|
7106
|
-
credential: ice.password,
|
|
7107
|
-
})),
|
|
7108
|
-
};
|
|
7109
|
-
};
|
|
7110
|
-
|
|
7111
6597
|
/**
|
|
7112
6598
|
* Event handler that watched the delivery of `call.accepted`.
|
|
7113
6599
|
* Once the event is received, the call is joined.
|
|
@@ -7326,6 +6812,21 @@ const handleRemoteSoftMute = (call) => {
|
|
|
7326
6812
|
});
|
|
7327
6813
|
};
|
|
7328
6814
|
|
|
6815
|
+
/**
|
|
6816
|
+
* Adds unique values to an array.
|
|
6817
|
+
*
|
|
6818
|
+
* @param arr the array to add to.
|
|
6819
|
+
* @param values the values to add.
|
|
6820
|
+
*/
|
|
6821
|
+
const pushToIfMissing = (arr, ...values) => {
|
|
6822
|
+
for (const v of values) {
|
|
6823
|
+
if (!arr.includes(v)) {
|
|
6824
|
+
arr.push(v);
|
|
6825
|
+
}
|
|
6826
|
+
}
|
|
6827
|
+
return arr;
|
|
6828
|
+
};
|
|
6829
|
+
|
|
7329
6830
|
/**
|
|
7330
6831
|
* An event responder which handles the `participantJoined` event.
|
|
7331
6832
|
*/
|
|
@@ -7391,7 +6892,7 @@ const watchTrackPublished = (state) => {
|
|
|
7391
6892
|
}
|
|
7392
6893
|
else {
|
|
7393
6894
|
state.updateParticipant(sessionId, (p) => ({
|
|
7394
|
-
publishedTracks: [...p.publishedTracks, type
|
|
6895
|
+
publishedTracks: pushToIfMissing([...p.publishedTracks], type),
|
|
7395
6896
|
}));
|
|
7396
6897
|
}
|
|
7397
6898
|
};
|
|
@@ -7416,7 +6917,6 @@ const watchTrackUnpublished = (state) => {
|
|
|
7416
6917
|
}
|
|
7417
6918
|
};
|
|
7418
6919
|
};
|
|
7419
|
-
const unique = (v, i, arr) => arr.indexOf(v) === i;
|
|
7420
6920
|
/**
|
|
7421
6921
|
* Reconciles orphaned tracks (if any) for the given participant.
|
|
7422
6922
|
*
|
|
@@ -7566,6 +7066,38 @@ const getSdkVersion = (sdk) => {
|
|
|
7566
7066
|
return sdk ? `${sdk.major}.${sdk.minor}.${sdk.patch}` : '0.0.0-development';
|
|
7567
7067
|
};
|
|
7568
7068
|
|
|
7069
|
+
/**
|
|
7070
|
+
* Checks whether the current browser is Safari.
|
|
7071
|
+
*/
|
|
7072
|
+
const isSafari = () => {
|
|
7073
|
+
if (typeof navigator === 'undefined')
|
|
7074
|
+
return false;
|
|
7075
|
+
return /^((?!chrome|android).)*safari/i.test(navigator.userAgent || '');
|
|
7076
|
+
};
|
|
7077
|
+
/**
|
|
7078
|
+
* Checks whether the current browser is Firefox.
|
|
7079
|
+
*/
|
|
7080
|
+
const isFirefox = () => {
|
|
7081
|
+
if (typeof navigator === 'undefined')
|
|
7082
|
+
return false;
|
|
7083
|
+
return navigator.userAgent?.includes('Firefox');
|
|
7084
|
+
};
|
|
7085
|
+
/**
|
|
7086
|
+
* Checks whether the current browser is Google Chrome.
|
|
7087
|
+
*/
|
|
7088
|
+
const isChrome = () => {
|
|
7089
|
+
if (typeof navigator === 'undefined')
|
|
7090
|
+
return false;
|
|
7091
|
+
return navigator.userAgent?.includes('Chrome');
|
|
7092
|
+
};
|
|
7093
|
+
|
|
7094
|
+
var browsers = /*#__PURE__*/Object.freeze({
|
|
7095
|
+
__proto__: null,
|
|
7096
|
+
isChrome: isChrome,
|
|
7097
|
+
isFirefox: isFirefox,
|
|
7098
|
+
isSafari: isSafari
|
|
7099
|
+
});
|
|
7100
|
+
|
|
7569
7101
|
/**
|
|
7570
7102
|
* Creates a new StatsReporter instance that collects metrics about the ongoing call and reports them to the state store
|
|
7571
7103
|
*/
|
|
@@ -7582,12 +7114,12 @@ const createStatsReporter = ({ subscriber, publisher, state, datacenter, polling
|
|
|
7582
7114
|
return undefined;
|
|
7583
7115
|
}
|
|
7584
7116
|
};
|
|
7585
|
-
const getStatsForStream = async (kind,
|
|
7117
|
+
const getStatsForStream = async (kind, tracks) => {
|
|
7586
7118
|
const pc = kind === 'subscriber' ? subscriber : publisher;
|
|
7587
7119
|
if (!pc)
|
|
7588
7120
|
return [];
|
|
7589
7121
|
const statsForStream = [];
|
|
7590
|
-
for (
|
|
7122
|
+
for (const track of tracks) {
|
|
7591
7123
|
const report = await pc.getStats(track);
|
|
7592
7124
|
const stats = transform(report, {
|
|
7593
7125
|
// @ts-ignore
|
|
@@ -7612,26 +7144,24 @@ const createStatsReporter = ({ subscriber, publisher, state, datacenter, polling
|
|
|
7612
7144
|
*/
|
|
7613
7145
|
const run = async () => {
|
|
7614
7146
|
const participantStats = {};
|
|
7615
|
-
|
|
7616
|
-
|
|
7617
|
-
for (
|
|
7147
|
+
if (sessionIdsToTrack.size > 0) {
|
|
7148
|
+
const sessionIds = new Set(sessionIdsToTrack);
|
|
7149
|
+
for (const participant of state.participants) {
|
|
7618
7150
|
if (!sessionIds.has(participant.sessionId))
|
|
7619
7151
|
continue;
|
|
7620
|
-
const
|
|
7621
|
-
|
|
7622
|
-
: 'subscriber';
|
|
7152
|
+
const { audioStream, isLocalParticipant, sessionId, userId, videoStream, } = participant;
|
|
7153
|
+
const kind = isLocalParticipant ? 'publisher' : 'subscriber';
|
|
7623
7154
|
try {
|
|
7624
|
-
const
|
|
7625
|
-
|
|
7626
|
-
|
|
7627
|
-
|
|
7628
|
-
|
|
7629
|
-
|
|
7630
|
-
|
|
7631
|
-
});
|
|
7155
|
+
const tracks = isLocalParticipant
|
|
7156
|
+
? publisher?.getPublishedTracks() || []
|
|
7157
|
+
: [
|
|
7158
|
+
...(videoStream?.getVideoTracks() || []),
|
|
7159
|
+
...(audioStream?.getAudioTracks() || []),
|
|
7160
|
+
];
|
|
7161
|
+
participantStats[sessionId] = await getStatsForStream(kind, tracks);
|
|
7632
7162
|
}
|
|
7633
7163
|
catch (e) {
|
|
7634
|
-
logger('
|
|
7164
|
+
logger('warn', `Failed to collect ${kind} stats for ${userId}`, e);
|
|
7635
7165
|
}
|
|
7636
7166
|
}
|
|
7637
7167
|
}
|
|
@@ -7641,6 +7171,7 @@ const createStatsReporter = ({ subscriber, publisher, state, datacenter, polling
|
|
|
7641
7171
|
.then((report) => transform(report, {
|
|
7642
7172
|
kind: 'subscriber',
|
|
7643
7173
|
trackKind: 'video',
|
|
7174
|
+
publisher,
|
|
7644
7175
|
}))
|
|
7645
7176
|
.then(aggregate),
|
|
7646
7177
|
publisher
|
|
@@ -7649,6 +7180,7 @@ const createStatsReporter = ({ subscriber, publisher, state, datacenter, polling
|
|
|
7649
7180
|
.then((report) => transform(report, {
|
|
7650
7181
|
kind: 'publisher',
|
|
7651
7182
|
trackKind: 'video',
|
|
7183
|
+
publisher,
|
|
7652
7184
|
}))
|
|
7653
7185
|
.then(aggregate)
|
|
7654
7186
|
: getEmptyStats(),
|
|
@@ -7697,7 +7229,7 @@ const createStatsReporter = ({ subscriber, publisher, state, datacenter, polling
|
|
|
7697
7229
|
* @param opts the transform options.
|
|
7698
7230
|
*/
|
|
7699
7231
|
const transform = (report, opts) => {
|
|
7700
|
-
const { trackKind, kind } = opts;
|
|
7232
|
+
const { trackKind, kind, publisher } = opts;
|
|
7701
7233
|
const direction = kind === 'subscriber' ? 'inbound-rtp' : 'outbound-rtp';
|
|
7702
7234
|
const stats = flatten(report);
|
|
7703
7235
|
const streams = stats
|
|
@@ -7713,6 +7245,16 @@ const transform = (report, opts) => {
|
|
|
7713
7245
|
s.id === transport.selectedCandidatePairId);
|
|
7714
7246
|
roundTripTime = candidatePair?.currentRoundTripTime;
|
|
7715
7247
|
}
|
|
7248
|
+
let trackType;
|
|
7249
|
+
if (kind === 'publisher' && publisher) {
|
|
7250
|
+
const firefox = isFirefox();
|
|
7251
|
+
const mediaSource = stats.find((s) => s.type === 'media-source' &&
|
|
7252
|
+
// Firefox doesn't have mediaSourceId, so we need to guess the media source
|
|
7253
|
+
(firefox ? true : s.id === rtcStreamStats.mediaSourceId));
|
|
7254
|
+
if (mediaSource) {
|
|
7255
|
+
trackType = publisher.getTrackType(mediaSource.trackIdentifier);
|
|
7256
|
+
}
|
|
7257
|
+
}
|
|
7716
7258
|
return {
|
|
7717
7259
|
bytesSent: rtcStreamStats.bytesSent,
|
|
7718
7260
|
bytesReceived: rtcStreamStats.bytesReceived,
|
|
@@ -7723,10 +7265,12 @@ const transform = (report, opts) => {
|
|
|
7723
7265
|
framesPerSecond: rtcStreamStats.framesPerSecond,
|
|
7724
7266
|
jitter: rtcStreamStats.jitter,
|
|
7725
7267
|
kind: rtcStreamStats.kind,
|
|
7268
|
+
mediaSourceId: rtcStreamStats.mediaSourceId,
|
|
7726
7269
|
// @ts-ignore: available in Chrome only, TS doesn't recognize this
|
|
7727
7270
|
qualityLimitationReason: rtcStreamStats.qualityLimitationReason,
|
|
7728
7271
|
rid: rtcStreamStats.rid,
|
|
7729
7272
|
ssrc: rtcStreamStats.ssrc,
|
|
7273
|
+
trackType,
|
|
7730
7274
|
};
|
|
7731
7275
|
});
|
|
7732
7276
|
return {
|
|
@@ -7747,6 +7291,7 @@ const getEmptyStats = (stats) => {
|
|
|
7747
7291
|
highestFrameHeight: 0,
|
|
7748
7292
|
highestFramesPerSecond: 0,
|
|
7749
7293
|
codec: '',
|
|
7294
|
+
codecPerTrackType: {},
|
|
7750
7295
|
timestamp: Date.now(),
|
|
7751
7296
|
};
|
|
7752
7297
|
};
|
|
@@ -7782,18 +7327,152 @@ const aggregate = (stats) => {
|
|
|
7782
7327
|
report.averageRoundTripTimeInMs = Math.round((report.averageRoundTripTimeInMs / streams.length) * 1000);
|
|
7783
7328
|
// we take the first codec we find, as it should be the same for all streams
|
|
7784
7329
|
report.codec = streams[0].codec || '';
|
|
7330
|
+
report.codecPerTrackType = streams.reduce((acc, stream) => {
|
|
7331
|
+
if (stream.trackType) {
|
|
7332
|
+
acc[stream.trackType] = stream.codec || '';
|
|
7333
|
+
}
|
|
7334
|
+
return acc;
|
|
7335
|
+
}, {});
|
|
7336
|
+
}
|
|
7337
|
+
const qualityLimitationReason = [
|
|
7338
|
+
qualityLimitationReasons.has('cpu') && 'cpu',
|
|
7339
|
+
qualityLimitationReasons.has('bandwidth') && 'bandwidth',
|
|
7340
|
+
qualityLimitationReasons.has('other') && 'other',
|
|
7341
|
+
]
|
|
7342
|
+
.filter(Boolean)
|
|
7343
|
+
.join(', ');
|
|
7344
|
+
if (qualityLimitationReason) {
|
|
7345
|
+
report.qualityLimitationReasons = qualityLimitationReason;
|
|
7346
|
+
}
|
|
7347
|
+
return report;
|
|
7348
|
+
};
|
|
7349
|
+
|
|
7350
|
+
const version = "1.15.0";
|
|
7351
|
+
const [major, minor, patch] = version.split('.');
|
|
7352
|
+
let sdkInfo = {
|
|
7353
|
+
type: SdkType.PLAIN_JAVASCRIPT,
|
|
7354
|
+
major,
|
|
7355
|
+
minor,
|
|
7356
|
+
patch,
|
|
7357
|
+
};
|
|
7358
|
+
let osInfo;
|
|
7359
|
+
let deviceInfo;
|
|
7360
|
+
let webRtcInfo;
|
|
7361
|
+
let deviceState = { oneofKind: undefined };
|
|
7362
|
+
const setSdkInfo = (info) => {
|
|
7363
|
+
sdkInfo = info;
|
|
7364
|
+
};
|
|
7365
|
+
const getSdkInfo = () => {
|
|
7366
|
+
return sdkInfo;
|
|
7367
|
+
};
|
|
7368
|
+
const setOSInfo = (info) => {
|
|
7369
|
+
osInfo = info;
|
|
7370
|
+
};
|
|
7371
|
+
const getOSInfo = () => {
|
|
7372
|
+
return osInfo;
|
|
7373
|
+
};
|
|
7374
|
+
const setDeviceInfo = (info) => {
|
|
7375
|
+
deviceInfo = info;
|
|
7376
|
+
};
|
|
7377
|
+
const getDeviceInfo = () => {
|
|
7378
|
+
return deviceInfo;
|
|
7379
|
+
};
|
|
7380
|
+
const getWebRTCInfo = () => {
|
|
7381
|
+
return webRtcInfo;
|
|
7382
|
+
};
|
|
7383
|
+
const setWebRTCInfo = (info) => {
|
|
7384
|
+
webRtcInfo = info;
|
|
7385
|
+
};
|
|
7386
|
+
const setThermalState = (state) => {
|
|
7387
|
+
if (!osInfo) {
|
|
7388
|
+
deviceState = { oneofKind: undefined };
|
|
7389
|
+
return;
|
|
7390
|
+
}
|
|
7391
|
+
if (osInfo.name === 'android') {
|
|
7392
|
+
const thermalState = AndroidThermalState[state] ||
|
|
7393
|
+
AndroidThermalState.UNSPECIFIED;
|
|
7394
|
+
deviceState = {
|
|
7395
|
+
oneofKind: 'android',
|
|
7396
|
+
android: {
|
|
7397
|
+
thermalState,
|
|
7398
|
+
isPowerSaverMode: deviceState?.oneofKind === 'android' &&
|
|
7399
|
+
deviceState.android.isPowerSaverMode,
|
|
7400
|
+
},
|
|
7401
|
+
};
|
|
7402
|
+
}
|
|
7403
|
+
if (osInfo.name.toLowerCase() === 'ios') {
|
|
7404
|
+
const thermalState = AppleThermalState[state] ||
|
|
7405
|
+
AppleThermalState.UNSPECIFIED;
|
|
7406
|
+
deviceState = {
|
|
7407
|
+
oneofKind: 'apple',
|
|
7408
|
+
apple: {
|
|
7409
|
+
thermalState,
|
|
7410
|
+
isLowPowerModeEnabled: deviceState?.oneofKind === 'apple' &&
|
|
7411
|
+
deviceState.apple.isLowPowerModeEnabled,
|
|
7412
|
+
},
|
|
7413
|
+
};
|
|
7414
|
+
}
|
|
7415
|
+
};
|
|
7416
|
+
const setPowerState = (powerMode) => {
|
|
7417
|
+
if (!osInfo) {
|
|
7418
|
+
deviceState = { oneofKind: undefined };
|
|
7419
|
+
return;
|
|
7420
|
+
}
|
|
7421
|
+
if (osInfo.name === 'android') {
|
|
7422
|
+
deviceState = {
|
|
7423
|
+
oneofKind: 'android',
|
|
7424
|
+
android: {
|
|
7425
|
+
thermalState: deviceState?.oneofKind === 'android'
|
|
7426
|
+
? deviceState.android.thermalState
|
|
7427
|
+
: AndroidThermalState.UNSPECIFIED,
|
|
7428
|
+
isPowerSaverMode: powerMode,
|
|
7429
|
+
},
|
|
7430
|
+
};
|
|
7431
|
+
}
|
|
7432
|
+
if (osInfo.name.toLowerCase() === 'ios') {
|
|
7433
|
+
deviceState = {
|
|
7434
|
+
oneofKind: 'apple',
|
|
7435
|
+
apple: {
|
|
7436
|
+
thermalState: deviceState?.oneofKind === 'apple'
|
|
7437
|
+
? deviceState.apple.thermalState
|
|
7438
|
+
: AppleThermalState.UNSPECIFIED,
|
|
7439
|
+
isLowPowerModeEnabled: powerMode,
|
|
7440
|
+
},
|
|
7441
|
+
};
|
|
7785
7442
|
}
|
|
7786
|
-
|
|
7787
|
-
|
|
7788
|
-
|
|
7789
|
-
|
|
7790
|
-
|
|
7791
|
-
|
|
7792
|
-
|
|
7793
|
-
|
|
7794
|
-
|
|
7443
|
+
};
|
|
7444
|
+
const getDeviceState = () => {
|
|
7445
|
+
return deviceState;
|
|
7446
|
+
};
|
|
7447
|
+
const getClientDetails = () => {
|
|
7448
|
+
if (isReactNative()) {
|
|
7449
|
+
// Since RN doesn't support web, sharing browser info is not required
|
|
7450
|
+
return {
|
|
7451
|
+
sdk: getSdkInfo(),
|
|
7452
|
+
os: getOSInfo(),
|
|
7453
|
+
device: getDeviceInfo(),
|
|
7454
|
+
};
|
|
7795
7455
|
}
|
|
7796
|
-
|
|
7456
|
+
const userAgent = new UAParser(navigator.userAgent);
|
|
7457
|
+
const { browser, os, device, cpu } = userAgent.getResult();
|
|
7458
|
+
return {
|
|
7459
|
+
sdk: getSdkInfo(),
|
|
7460
|
+
browser: {
|
|
7461
|
+
name: browser.name || navigator.userAgent,
|
|
7462
|
+
version: browser.version || '',
|
|
7463
|
+
},
|
|
7464
|
+
os: {
|
|
7465
|
+
name: os.name || '',
|
|
7466
|
+
version: os.version || '',
|
|
7467
|
+
architecture: cpu.architecture || '',
|
|
7468
|
+
},
|
|
7469
|
+
device: {
|
|
7470
|
+
name: [device.vendor, device.model, device.type]
|
|
7471
|
+
.filter(Boolean)
|
|
7472
|
+
.join(' '),
|
|
7473
|
+
version: '',
|
|
7474
|
+
},
|
|
7475
|
+
};
|
|
7797
7476
|
};
|
|
7798
7477
|
|
|
7799
7478
|
class SfuStatsReporter {
|
|
@@ -7829,8 +7508,28 @@ class SfuStatsReporter {
|
|
|
7829
7508
|
});
|
|
7830
7509
|
});
|
|
7831
7510
|
};
|
|
7832
|
-
this.
|
|
7833
|
-
|
|
7511
|
+
this.sendConnectionTime = (connectionTimeSeconds) => {
|
|
7512
|
+
this.sendTelemetryData({
|
|
7513
|
+
data: {
|
|
7514
|
+
oneofKind: 'connectionTimeSeconds',
|
|
7515
|
+
connectionTimeSeconds,
|
|
7516
|
+
},
|
|
7517
|
+
});
|
|
7518
|
+
};
|
|
7519
|
+
this.sendReconnectionTime = (strategy, timeSeconds) => {
|
|
7520
|
+
this.sendTelemetryData({
|
|
7521
|
+
data: {
|
|
7522
|
+
oneofKind: 'reconnection',
|
|
7523
|
+
reconnection: { strategy, timeSeconds },
|
|
7524
|
+
},
|
|
7525
|
+
});
|
|
7526
|
+
};
|
|
7527
|
+
this.sendTelemetryData = (telemetryData) => {
|
|
7528
|
+
// intentionally not awaiting the promise here
|
|
7529
|
+
// to avoid impeding with the ongoing actions.
|
|
7530
|
+
this.run(telemetryData).catch((err) => {
|
|
7531
|
+
this.logger('warn', 'Failed to send telemetry data', err);
|
|
7532
|
+
});
|
|
7834
7533
|
};
|
|
7835
7534
|
this.run = async (telemetryData) => {
|
|
7836
7535
|
const [subscriberStats, publisherStats] = await Promise.all([
|
|
@@ -8398,6 +8097,25 @@ class PermissionsContext {
|
|
|
8398
8097
|
this.hasPermission = (permission) => {
|
|
8399
8098
|
return this.permissions.includes(permission);
|
|
8400
8099
|
};
|
|
8100
|
+
/**
|
|
8101
|
+
* Helper method that checks whether the current user has the permission
|
|
8102
|
+
* to publish the given track type.
|
|
8103
|
+
*/
|
|
8104
|
+
this.canPublish = (trackType) => {
|
|
8105
|
+
switch (trackType) {
|
|
8106
|
+
case TrackType.AUDIO:
|
|
8107
|
+
return this.hasPermission(OwnCapability.SEND_AUDIO);
|
|
8108
|
+
case TrackType.VIDEO:
|
|
8109
|
+
return this.hasPermission(OwnCapability.SEND_VIDEO);
|
|
8110
|
+
case TrackType.SCREEN_SHARE:
|
|
8111
|
+
case TrackType.SCREEN_SHARE_AUDIO:
|
|
8112
|
+
return this.hasPermission(OwnCapability.SCREENSHARE);
|
|
8113
|
+
case TrackType.UNSPECIFIED:
|
|
8114
|
+
return false;
|
|
8115
|
+
default:
|
|
8116
|
+
ensureExhausted(trackType, 'Unknown track type');
|
|
8117
|
+
}
|
|
8118
|
+
};
|
|
8401
8119
|
/**
|
|
8402
8120
|
* Checks if the current user can request a specific permission
|
|
8403
8121
|
* within the call.
|
|
@@ -9036,36 +8754,42 @@ class InputMediaDeviceManager {
|
|
|
9036
8754
|
}
|
|
9037
8755
|
});
|
|
9038
8756
|
}
|
|
8757
|
+
publishStream(stream) {
|
|
8758
|
+
return this.call.publish(stream, this.trackType);
|
|
8759
|
+
}
|
|
8760
|
+
stopPublishStream() {
|
|
8761
|
+
return this.call.stopPublish(this.trackType);
|
|
8762
|
+
}
|
|
9039
8763
|
getTracks() {
|
|
9040
8764
|
return this.state.mediaStream?.getTracks() ?? [];
|
|
9041
8765
|
}
|
|
9042
8766
|
async muteStream(stopTracks = true) {
|
|
9043
|
-
|
|
8767
|
+
const mediaStream = this.state.mediaStream;
|
|
8768
|
+
if (!mediaStream)
|
|
9044
8769
|
return;
|
|
9045
8770
|
this.logger('debug', `${stopTracks ? 'Stopping' : 'Disabling'} stream`);
|
|
9046
8771
|
if (this.call.state.callingState === CallingState.JOINED) {
|
|
9047
|
-
await this.stopPublishStream(
|
|
8772
|
+
await this.stopPublishStream();
|
|
9048
8773
|
}
|
|
9049
8774
|
this.muteLocalStream(stopTracks);
|
|
9050
8775
|
const allEnded = this.getTracks().every((t) => t.readyState === 'ended');
|
|
9051
8776
|
if (allEnded) {
|
|
9052
|
-
|
|
9053
|
-
|
|
9054
|
-
typeof this.state.mediaStream.release === 'function') {
|
|
8777
|
+
// @ts-expect-error release() is present in react-native-webrtc
|
|
8778
|
+
if (typeof mediaStream.release === 'function') {
|
|
9055
8779
|
// @ts-expect-error called to dispose the stream in RN
|
|
9056
|
-
|
|
8780
|
+
mediaStream.release();
|
|
9057
8781
|
}
|
|
9058
8782
|
this.state.setMediaStream(undefined, undefined);
|
|
9059
8783
|
this.filters.forEach((entry) => entry.stop?.());
|
|
9060
8784
|
}
|
|
9061
8785
|
}
|
|
9062
|
-
|
|
8786
|
+
disableTracks() {
|
|
9063
8787
|
this.getTracks().forEach((track) => {
|
|
9064
8788
|
if (track.enabled)
|
|
9065
8789
|
track.enabled = false;
|
|
9066
8790
|
});
|
|
9067
8791
|
}
|
|
9068
|
-
|
|
8792
|
+
enableTracks() {
|
|
9069
8793
|
this.getTracks().forEach((track) => {
|
|
9070
8794
|
if (!track.enabled)
|
|
9071
8795
|
track.enabled = true;
|
|
@@ -9085,7 +8809,7 @@ class InputMediaDeviceManager {
|
|
|
9085
8809
|
this.stopTracks();
|
|
9086
8810
|
}
|
|
9087
8811
|
else {
|
|
9088
|
-
this.
|
|
8812
|
+
this.disableTracks();
|
|
9089
8813
|
}
|
|
9090
8814
|
}
|
|
9091
8815
|
async unmuteStream() {
|
|
@@ -9095,7 +8819,7 @@ class InputMediaDeviceManager {
|
|
|
9095
8819
|
if (this.state.mediaStream &&
|
|
9096
8820
|
this.getTracks().every((t) => t.readyState === 'live')) {
|
|
9097
8821
|
stream = this.state.mediaStream;
|
|
9098
|
-
this.
|
|
8822
|
+
this.enableTracks();
|
|
9099
8823
|
}
|
|
9100
8824
|
else {
|
|
9101
8825
|
const defaultConstraints = this.state.defaultConstraints;
|
|
@@ -9189,9 +8913,22 @@ class InputMediaDeviceManager {
|
|
|
9189
8913
|
await this.disable();
|
|
9190
8914
|
}
|
|
9191
8915
|
};
|
|
9192
|
-
|
|
8916
|
+
const createTrackMuteHandler = (muted) => () => {
|
|
8917
|
+
this.call.notifyTrackMuteState(muted, this.trackType).catch((err) => {
|
|
8918
|
+
this.logger('warn', 'Error while notifying track mute state', err);
|
|
8919
|
+
});
|
|
8920
|
+
};
|
|
8921
|
+
stream.getTracks().forEach((track) => {
|
|
8922
|
+
const muteHandler = createTrackMuteHandler(true);
|
|
8923
|
+
const unmuteHandler = createTrackMuteHandler(false);
|
|
8924
|
+
track.addEventListener('mute', muteHandler);
|
|
8925
|
+
track.addEventListener('unmute', unmuteHandler);
|
|
9193
8926
|
track.addEventListener('ended', handleTrackEnded);
|
|
9194
|
-
this.subscriptions.push(() =>
|
|
8927
|
+
this.subscriptions.push(() => {
|
|
8928
|
+
track.removeEventListener('mute', muteHandler);
|
|
8929
|
+
track.removeEventListener('unmute', unmuteHandler);
|
|
8930
|
+
track.removeEventListener('ended', handleTrackEnded);
|
|
8931
|
+
});
|
|
9195
8932
|
});
|
|
9196
8933
|
}
|
|
9197
8934
|
}
|
|
@@ -9215,8 +8952,8 @@ class InputMediaDeviceManager {
|
|
|
9215
8952
|
await this.statusChangeSettled();
|
|
9216
8953
|
let isDeviceDisconnected = false;
|
|
9217
8954
|
let isDeviceReplaced = false;
|
|
9218
|
-
const currentDevice = this.
|
|
9219
|
-
const prevDevice = this.
|
|
8955
|
+
const currentDevice = this.findDevice(currentDevices, deviceId);
|
|
8956
|
+
const prevDevice = this.findDevice(prevDevices, deviceId);
|
|
9220
8957
|
if (!currentDevice && prevDevice) {
|
|
9221
8958
|
isDeviceDisconnected = true;
|
|
9222
8959
|
}
|
|
@@ -9246,8 +8983,9 @@ class InputMediaDeviceManager {
|
|
|
9246
8983
|
}
|
|
9247
8984
|
}));
|
|
9248
8985
|
}
|
|
9249
|
-
|
|
9250
|
-
|
|
8986
|
+
findDevice(devices, deviceId) {
|
|
8987
|
+
const kind = this.mediaDeviceKind;
|
|
8988
|
+
return devices.find((d) => d.deviceId === deviceId && d.kind === kind);
|
|
9251
8989
|
}
|
|
9252
8990
|
}
|
|
9253
8991
|
|
|
@@ -9511,14 +9249,35 @@ class CameraManager extends InputMediaDeviceManager {
|
|
|
9511
9249
|
}
|
|
9512
9250
|
}
|
|
9513
9251
|
/**
|
|
9514
|
-
*
|
|
9252
|
+
* Applies the video settings to the camera.
|
|
9515
9253
|
*
|
|
9516
|
-
* @
|
|
9517
|
-
* @
|
|
9518
|
-
* @param codec the codec to use for encoding the video.
|
|
9254
|
+
* @param settings the video settings to apply.
|
|
9255
|
+
* @param publish whether to publish the stream after applying the settings.
|
|
9519
9256
|
*/
|
|
9520
|
-
|
|
9521
|
-
this.call.
|
|
9257
|
+
async apply(settings, publish) {
|
|
9258
|
+
const hasPublishedVideo = !!this.call.state.localParticipant?.videoStream;
|
|
9259
|
+
const hasPermission = this.call.permissionsContext.hasPermission(OwnCapability.SEND_AUDIO);
|
|
9260
|
+
if (hasPublishedVideo || !hasPermission)
|
|
9261
|
+
return;
|
|
9262
|
+
// Wait for any in progress camera operation
|
|
9263
|
+
await this.statusChangeSettled();
|
|
9264
|
+
const { target_resolution, camera_facing, camera_default_on } = settings;
|
|
9265
|
+
await this.selectTargetResolution(target_resolution);
|
|
9266
|
+
// Set camera direction if it's not yet set
|
|
9267
|
+
if (!this.state.direction && !this.state.selectedDevice) {
|
|
9268
|
+
this.state.setDirection(camera_facing === 'front' ? 'front' : 'back');
|
|
9269
|
+
}
|
|
9270
|
+
if (!publish)
|
|
9271
|
+
return;
|
|
9272
|
+
const { mediaStream } = this.state;
|
|
9273
|
+
if (this.enabled && mediaStream) {
|
|
9274
|
+
// The camera is already enabled (e.g. lobby screen). Publish the stream
|
|
9275
|
+
await this.publishStream(mediaStream);
|
|
9276
|
+
}
|
|
9277
|
+
else if (this.state.status === undefined && camera_default_on) {
|
|
9278
|
+
// Start camera if backend config specifies, and there is no local setting
|
|
9279
|
+
await this.enable();
|
|
9280
|
+
}
|
|
9522
9281
|
}
|
|
9523
9282
|
getDevices() {
|
|
9524
9283
|
return getVideoDevices();
|
|
@@ -9536,12 +9295,6 @@ class CameraManager extends InputMediaDeviceManager {
|
|
|
9536
9295
|
}
|
|
9537
9296
|
return getVideoStream(constraints);
|
|
9538
9297
|
}
|
|
9539
|
-
publishStream(stream) {
|
|
9540
|
-
return this.call.publishVideoStream(stream);
|
|
9541
|
-
}
|
|
9542
|
-
stopPublishStream(stopTracks) {
|
|
9543
|
-
return this.call.stopPublish(TrackType.VIDEO, stopTracks);
|
|
9544
|
-
}
|
|
9545
9298
|
}
|
|
9546
9299
|
|
|
9547
9300
|
class MicrophoneManagerState extends InputMediaDeviceManagerState {
|
|
@@ -9869,18 +9622,37 @@ class MicrophoneManager extends InputMediaDeviceManager {
|
|
|
9869
9622
|
this.speakingWhileMutedNotificationEnabled = false;
|
|
9870
9623
|
await this.stopSpeakingWhileMutedDetection();
|
|
9871
9624
|
}
|
|
9625
|
+
/**
|
|
9626
|
+
* Applies the audio settings to the microphone.
|
|
9627
|
+
* @param settings the audio settings to apply.
|
|
9628
|
+
* @param publish whether to publish the stream after applying the settings.
|
|
9629
|
+
*/
|
|
9630
|
+
async apply(settings, publish) {
|
|
9631
|
+
if (!publish)
|
|
9632
|
+
return;
|
|
9633
|
+
const hasPublishedAudio = !!this.call.state.localParticipant?.audioStream;
|
|
9634
|
+
const hasPermission = this.call.permissionsContext.hasPermission(OwnCapability.SEND_AUDIO);
|
|
9635
|
+
if (hasPublishedAudio || !hasPermission)
|
|
9636
|
+
return;
|
|
9637
|
+
// Wait for any in progress mic operation
|
|
9638
|
+
await this.statusChangeSettled();
|
|
9639
|
+
// Publish media stream that was set before we joined
|
|
9640
|
+
const { mediaStream } = this.state;
|
|
9641
|
+
if (this.enabled && mediaStream) {
|
|
9642
|
+
// The mic is already enabled (e.g. lobby screen). Publish the stream
|
|
9643
|
+
await this.publishStream(mediaStream);
|
|
9644
|
+
}
|
|
9645
|
+
else if (this.state.status === undefined && settings.mic_default_on) {
|
|
9646
|
+
// Start mic if backend config specifies, and there is no local setting
|
|
9647
|
+
await this.enable();
|
|
9648
|
+
}
|
|
9649
|
+
}
|
|
9872
9650
|
getDevices() {
|
|
9873
9651
|
return getAudioDevices();
|
|
9874
9652
|
}
|
|
9875
9653
|
getStream(constraints) {
|
|
9876
9654
|
return getAudioStream(constraints);
|
|
9877
9655
|
}
|
|
9878
|
-
publishStream(stream) {
|
|
9879
|
-
return this.call.publishAudioStream(stream);
|
|
9880
|
-
}
|
|
9881
|
-
stopPublishStream(stopTracks) {
|
|
9882
|
-
return this.call.stopPublish(TrackType.AUDIO, stopTracks);
|
|
9883
|
-
}
|
|
9884
9656
|
async startSpeakingWhileMutedDetection(deviceId) {
|
|
9885
9657
|
await withoutConcurrency(this.soundDetectorConcurrencyTag, async () => {
|
|
9886
9658
|
await this.stopSpeakingWhileMutedDetection();
|
|
@@ -10000,7 +9772,7 @@ class ScreenShareManager extends InputMediaDeviceManager {
|
|
|
10000
9772
|
async disableScreenShareAudio() {
|
|
10001
9773
|
this.state.setAudioEnabled(false);
|
|
10002
9774
|
if (this.call.publisher?.isPublishing(TrackType.SCREEN_SHARE_AUDIO)) {
|
|
10003
|
-
await this.call.stopPublish(TrackType.SCREEN_SHARE_AUDIO
|
|
9775
|
+
await this.call.stopPublish(TrackType.SCREEN_SHARE_AUDIO);
|
|
10004
9776
|
}
|
|
10005
9777
|
}
|
|
10006
9778
|
/**
|
|
@@ -10026,12 +9798,8 @@ class ScreenShareManager extends InputMediaDeviceManager {
|
|
|
10026
9798
|
}
|
|
10027
9799
|
return getScreenShareStream(constraints);
|
|
10028
9800
|
}
|
|
10029
|
-
|
|
10030
|
-
return this.call.
|
|
10031
|
-
}
|
|
10032
|
-
async stopPublishStream(stopTracks) {
|
|
10033
|
-
await this.call.stopPublish(TrackType.SCREEN_SHARE, stopTracks);
|
|
10034
|
-
await this.call.stopPublish(TrackType.SCREEN_SHARE_AUDIO, stopTracks);
|
|
9801
|
+
async stopPublishStream() {
|
|
9802
|
+
return this.call.stopPublish(TrackType.SCREEN_SHARE, TrackType.SCREEN_SHARE_AUDIO);
|
|
10035
9803
|
}
|
|
10036
9804
|
/**
|
|
10037
9805
|
* Overrides the default `select` method to throw an error.
|
|
@@ -10241,6 +10009,112 @@ class Call {
|
|
|
10241
10009
|
*/
|
|
10242
10010
|
this.leaveCallHooks = new Set();
|
|
10243
10011
|
this.streamClientEventHandlers = new Map();
|
|
10012
|
+
this.setup = async () => {
|
|
10013
|
+
await withoutConcurrency(this.joinLeaveConcurrencyTag, async () => {
|
|
10014
|
+
if (this.initialized)
|
|
10015
|
+
return;
|
|
10016
|
+
this.leaveCallHooks.add(this.on('all', (event) => {
|
|
10017
|
+
// update state with the latest event data
|
|
10018
|
+
this.state.updateFromEvent(event);
|
|
10019
|
+
}));
|
|
10020
|
+
this.leaveCallHooks.add(this.on('changePublishOptions', (event) => {
|
|
10021
|
+
this.currentPublishOptions = event.publishOptions;
|
|
10022
|
+
}));
|
|
10023
|
+
this.leaveCallHooks.add(registerEventHandlers(this, this.dispatcher));
|
|
10024
|
+
this.registerEffects();
|
|
10025
|
+
this.registerReconnectHandlers();
|
|
10026
|
+
if (this.state.callingState === CallingState.LEFT) {
|
|
10027
|
+
this.state.setCallingState(CallingState.IDLE);
|
|
10028
|
+
}
|
|
10029
|
+
this.initialized = true;
|
|
10030
|
+
});
|
|
10031
|
+
};
|
|
10032
|
+
this.registerEffects = () => {
|
|
10033
|
+
this.leaveCallHooks.add(
|
|
10034
|
+
// handles updating the permissions context when the settings change.
|
|
10035
|
+
createSubscription(this.state.settings$, (settings) => {
|
|
10036
|
+
if (!settings)
|
|
10037
|
+
return;
|
|
10038
|
+
this.permissionsContext.setCallSettings(settings);
|
|
10039
|
+
}));
|
|
10040
|
+
this.leaveCallHooks.add(
|
|
10041
|
+
// handle the case when the user permissions are modified.
|
|
10042
|
+
createSafeAsyncSubscription(this.state.ownCapabilities$, this.handleOwnCapabilitiesUpdated));
|
|
10043
|
+
this.leaveCallHooks.add(
|
|
10044
|
+
// handles the case when the user is blocked by the call owner.
|
|
10045
|
+
createSubscription(this.state.blockedUserIds$, async (blockedUserIds) => {
|
|
10046
|
+
if (!blockedUserIds || blockedUserIds.length === 0)
|
|
10047
|
+
return;
|
|
10048
|
+
const currentUserId = this.currentUserId;
|
|
10049
|
+
if (currentUserId && blockedUserIds.includes(currentUserId)) {
|
|
10050
|
+
this.logger('info', 'Leaving call because of being blocked');
|
|
10051
|
+
await this.leave({ reason: 'user blocked' }).catch((err) => {
|
|
10052
|
+
this.logger('error', 'Error leaving call after being blocked', err);
|
|
10053
|
+
});
|
|
10054
|
+
}
|
|
10055
|
+
}));
|
|
10056
|
+
this.leaveCallHooks.add(
|
|
10057
|
+
// cancel auto-drop when call is
|
|
10058
|
+
createSubscription(this.state.session$, (session) => {
|
|
10059
|
+
if (!this.ringing)
|
|
10060
|
+
return;
|
|
10061
|
+
const receiverId = this.clientStore.connectedUser?.id;
|
|
10062
|
+
if (!receiverId)
|
|
10063
|
+
return;
|
|
10064
|
+
const isAcceptedByMe = Boolean(session?.accepted_by[receiverId]);
|
|
10065
|
+
const isRejectedByMe = Boolean(session?.rejected_by[receiverId]);
|
|
10066
|
+
if (isAcceptedByMe || isRejectedByMe) {
|
|
10067
|
+
this.cancelAutoDrop();
|
|
10068
|
+
}
|
|
10069
|
+
}));
|
|
10070
|
+
this.leaveCallHooks.add(
|
|
10071
|
+
// "ringing" mode effects and event handlers
|
|
10072
|
+
createSubscription(this.ringingSubject, (isRinging) => {
|
|
10073
|
+
if (!isRinging)
|
|
10074
|
+
return;
|
|
10075
|
+
const callSession = this.state.session;
|
|
10076
|
+
const receiver_id = this.clientStore.connectedUser?.id;
|
|
10077
|
+
const ended_at = callSession?.ended_at;
|
|
10078
|
+
const created_by_id = this.state.createdBy?.id;
|
|
10079
|
+
const rejected_by = callSession?.rejected_by;
|
|
10080
|
+
const accepted_by = callSession?.accepted_by;
|
|
10081
|
+
let leaveCallIdle = false;
|
|
10082
|
+
if (ended_at) {
|
|
10083
|
+
// call was ended before it was accepted or rejected so we should leave it to idle
|
|
10084
|
+
leaveCallIdle = true;
|
|
10085
|
+
}
|
|
10086
|
+
else if (created_by_id && rejected_by) {
|
|
10087
|
+
if (rejected_by[created_by_id]) {
|
|
10088
|
+
// call was cancelled by the caller
|
|
10089
|
+
leaveCallIdle = true;
|
|
10090
|
+
}
|
|
10091
|
+
}
|
|
10092
|
+
else if (receiver_id && rejected_by) {
|
|
10093
|
+
if (rejected_by[receiver_id]) {
|
|
10094
|
+
// call was rejected by the receiver in some other device
|
|
10095
|
+
leaveCallIdle = true;
|
|
10096
|
+
}
|
|
10097
|
+
}
|
|
10098
|
+
else if (receiver_id && accepted_by) {
|
|
10099
|
+
if (accepted_by[receiver_id]) {
|
|
10100
|
+
// call was accepted by the receiver in some other device
|
|
10101
|
+
leaveCallIdle = true;
|
|
10102
|
+
}
|
|
10103
|
+
}
|
|
10104
|
+
if (leaveCallIdle) {
|
|
10105
|
+
if (this.state.callingState !== CallingState.IDLE) {
|
|
10106
|
+
this.state.setCallingState(CallingState.IDLE);
|
|
10107
|
+
}
|
|
10108
|
+
}
|
|
10109
|
+
else {
|
|
10110
|
+
if (this.state.callingState === CallingState.IDLE) {
|
|
10111
|
+
this.state.setCallingState(CallingState.RINGING);
|
|
10112
|
+
}
|
|
10113
|
+
this.scheduleAutoDrop();
|
|
10114
|
+
this.leaveCallHooks.add(registerRingingCallEventHandlers(this));
|
|
10115
|
+
}
|
|
10116
|
+
}));
|
|
10117
|
+
};
|
|
10244
10118
|
this.handleOwnCapabilitiesUpdated = async (ownCapabilities) => {
|
|
10245
10119
|
// update the permission context.
|
|
10246
10120
|
this.permissionsContext.setPermissions(ownCapabilities);
|
|
@@ -10353,9 +10227,9 @@ class Call {
|
|
|
10353
10227
|
this.statsReporter = undefined;
|
|
10354
10228
|
this.sfuStatsReporter?.stop();
|
|
10355
10229
|
this.sfuStatsReporter = undefined;
|
|
10356
|
-
this.subscriber?.
|
|
10230
|
+
this.subscriber?.dispose();
|
|
10357
10231
|
this.subscriber = undefined;
|
|
10358
|
-
this.publisher?.
|
|
10232
|
+
this.publisher?.dispose();
|
|
10359
10233
|
this.publisher = undefined;
|
|
10360
10234
|
await this.sfuClient?.leaveAndClose(reason);
|
|
10361
10235
|
this.sfuClient = undefined;
|
|
@@ -10393,7 +10267,8 @@ class Call {
|
|
|
10393
10267
|
// call.ring event excludes the call creator in the members list
|
|
10394
10268
|
// as the creator does not get the ring event
|
|
10395
10269
|
// so update the member list accordingly
|
|
10396
|
-
const
|
|
10270
|
+
const { created_by, settings } = event.call;
|
|
10271
|
+
const creator = this.state.members.find((m) => m.user.id === created_by.id);
|
|
10397
10272
|
if (!creator) {
|
|
10398
10273
|
this.state.setMembers(event.members);
|
|
10399
10274
|
}
|
|
@@ -10408,7 +10283,7 @@ class Call {
|
|
|
10408
10283
|
// const calls = useCalls().filter((c) => c.ringing);
|
|
10409
10284
|
const calls = this.clientStore.calls.filter((c) => c.cid !== this.cid);
|
|
10410
10285
|
this.clientStore.setCalls([this, ...calls]);
|
|
10411
|
-
await this.applyDeviceConfig(false);
|
|
10286
|
+
await this.applyDeviceConfig(settings, false);
|
|
10412
10287
|
};
|
|
10413
10288
|
/**
|
|
10414
10289
|
* Loads the information about the call.
|
|
@@ -10431,7 +10306,7 @@ class Call {
|
|
|
10431
10306
|
this.watching = true;
|
|
10432
10307
|
this.clientStore.registerCall(this);
|
|
10433
10308
|
}
|
|
10434
|
-
await this.applyDeviceConfig(false);
|
|
10309
|
+
await this.applyDeviceConfig(response.call.settings, false);
|
|
10435
10310
|
return response;
|
|
10436
10311
|
};
|
|
10437
10312
|
/**
|
|
@@ -10453,7 +10328,7 @@ class Call {
|
|
|
10453
10328
|
this.watching = true;
|
|
10454
10329
|
this.clientStore.registerCall(this);
|
|
10455
10330
|
}
|
|
10456
|
-
await this.applyDeviceConfig(false);
|
|
10331
|
+
await this.applyDeviceConfig(response.call.settings, false);
|
|
10457
10332
|
return response;
|
|
10458
10333
|
};
|
|
10459
10334
|
/**
|
|
@@ -10555,19 +10430,32 @@ class Call {
|
|
|
10555
10430
|
// we don't need to send JoinRequest if we are re-using an existing healthy SFU client
|
|
10556
10431
|
if (previousSfuClient !== sfuClient) {
|
|
10557
10432
|
// prepare a generic SDP and send it to the SFU.
|
|
10558
|
-
//
|
|
10433
|
+
// these are throw-away SDPs that the SFU will use to determine
|
|
10559
10434
|
// the capabilities of the client (codec support, etc.)
|
|
10560
|
-
const
|
|
10561
|
-
|
|
10435
|
+
const [subscriberSdp, publisherSdp] = await Promise.all([
|
|
10436
|
+
getGenericSdp('recvonly'),
|
|
10437
|
+
getGenericSdp('sendonly'),
|
|
10438
|
+
]);
|
|
10439
|
+
const isReconnecting = this.reconnectStrategy !== WebsocketReconnectStrategy.UNSPECIFIED;
|
|
10440
|
+
const reconnectDetails = isReconnecting
|
|
10562
10441
|
? this.getReconnectDetails(data?.migrating_from, previousSessionId)
|
|
10563
10442
|
: undefined;
|
|
10564
|
-
const
|
|
10565
|
-
|
|
10566
|
-
|
|
10443
|
+
const preferredPublishOptions = !isReconnecting
|
|
10444
|
+
? this.getPreferredPublishOptions()
|
|
10445
|
+
: this.currentPublishOptions || [];
|
|
10446
|
+
const preferredSubscribeOptions = !isReconnecting
|
|
10447
|
+
? this.getPreferredSubscribeOptions()
|
|
10448
|
+
: [];
|
|
10449
|
+
const { callState, fastReconnectDeadlineSeconds, publishOptions } = await sfuClient.join({
|
|
10450
|
+
subscriberSdp,
|
|
10451
|
+
publisherSdp,
|
|
10567
10452
|
clientDetails,
|
|
10568
10453
|
fastReconnect: performingFastReconnect,
|
|
10569
10454
|
reconnectDetails,
|
|
10455
|
+
preferredPublishOptions,
|
|
10456
|
+
preferredSubscribeOptions,
|
|
10570
10457
|
});
|
|
10458
|
+
this.currentPublishOptions = publishOptions;
|
|
10571
10459
|
this.fastReconnectDeadlineSeconds = fastReconnectDeadlineSeconds;
|
|
10572
10460
|
if (callState) {
|
|
10573
10461
|
this.state.updateFromSfuCallState(callState, sfuClient.sessionId, reconnectDetails);
|
|
@@ -10592,17 +10480,13 @@ class Call {
|
|
|
10592
10480
|
connectionConfig,
|
|
10593
10481
|
clientDetails,
|
|
10594
10482
|
statsOptions,
|
|
10483
|
+
publishOptions: this.currentPublishOptions || [],
|
|
10595
10484
|
closePreviousInstances: !performingMigration,
|
|
10596
10485
|
});
|
|
10597
10486
|
}
|
|
10598
10487
|
// make sure we only track connection timing if we are not calling this method as part of a reconnection flow
|
|
10599
10488
|
if (!performingRejoin && !performingFastReconnect && !performingMigration) {
|
|
10600
|
-
this.sfuStatsReporter?.
|
|
10601
|
-
data: {
|
|
10602
|
-
oneofKind: 'connectionTimeSeconds',
|
|
10603
|
-
connectionTimeSeconds: (Date.now() - connectStartTime) / 1000,
|
|
10604
|
-
},
|
|
10605
|
-
});
|
|
10489
|
+
this.sfuStatsReporter?.sendConnectionTime((Date.now() - connectStartTime) / 1000);
|
|
10606
10490
|
}
|
|
10607
10491
|
if (performingRejoin) {
|
|
10608
10492
|
const strategy = WebsocketReconnectStrategy[this.reconnectStrategy];
|
|
@@ -10613,8 +10497,8 @@ class Call {
|
|
|
10613
10497
|
}
|
|
10614
10498
|
// device settings should be applied only once, we don't have to
|
|
10615
10499
|
// re-apply them on later reconnections or server-side data fetches
|
|
10616
|
-
if (!this.deviceSettingsAppliedOnce) {
|
|
10617
|
-
await this.applyDeviceConfig(true);
|
|
10500
|
+
if (!this.deviceSettingsAppliedOnce && this.state.settings) {
|
|
10501
|
+
await this.applyDeviceConfig(this.state.settings, true);
|
|
10618
10502
|
this.deviceSettingsAppliedOnce = true;
|
|
10619
10503
|
}
|
|
10620
10504
|
// We shouldn't persist the `ring` and `notify` state after joining the call
|
|
@@ -10623,6 +10507,8 @@ class Call {
|
|
|
10623
10507
|
// we will spam the other participants with push notifications and `call.ring` events.
|
|
10624
10508
|
delete this.joinCallData?.ring;
|
|
10625
10509
|
delete this.joinCallData?.notify;
|
|
10510
|
+
// reset the reconnect strategy to unspecified after a successful reconnection
|
|
10511
|
+
this.reconnectStrategy = WebsocketReconnectStrategy.UNSPECIFIED;
|
|
10626
10512
|
this.logger('info', `Joined call ${this.cid}`);
|
|
10627
10513
|
};
|
|
10628
10514
|
/**
|
|
@@ -10632,7 +10518,7 @@ class Call {
|
|
|
10632
10518
|
this.getReconnectDetails = (migratingFromSfuId, previousSessionId) => {
|
|
10633
10519
|
const strategy = this.reconnectStrategy;
|
|
10634
10520
|
const performingRejoin = strategy === WebsocketReconnectStrategy.REJOIN;
|
|
10635
|
-
const announcedTracks = this.publisher?.
|
|
10521
|
+
const announcedTracks = this.publisher?.getAnnouncedTracksForReconnect() || [];
|
|
10636
10522
|
return {
|
|
10637
10523
|
strategy,
|
|
10638
10524
|
announcedTracks,
|
|
@@ -10642,6 +10528,54 @@ class Call {
|
|
|
10642
10528
|
previousSessionId: performingRejoin ? previousSessionId || '' : '',
|
|
10643
10529
|
};
|
|
10644
10530
|
};
|
|
10531
|
+
/**
|
|
10532
|
+
* Prepares the preferred codec for the call.
|
|
10533
|
+
* This is an experimental client feature and subject to change.
|
|
10534
|
+
* @internal
|
|
10535
|
+
*/
|
|
10536
|
+
this.getPreferredPublishOptions = () => {
|
|
10537
|
+
const { preferredCodec, fmtpLine, preferredBitrate, maxSimulcastLayers } = this.clientPublishOptions || {};
|
|
10538
|
+
if (!preferredCodec && !preferredBitrate && !maxSimulcastLayers)
|
|
10539
|
+
return [];
|
|
10540
|
+
const codec = preferredCodec
|
|
10541
|
+
? Codec.create({ name: preferredCodec.split('/').pop(), fmtp: fmtpLine })
|
|
10542
|
+
: undefined;
|
|
10543
|
+
const preferredPublishOptions = [
|
|
10544
|
+
PublishOption.create({
|
|
10545
|
+
trackType: TrackType.VIDEO,
|
|
10546
|
+
codec,
|
|
10547
|
+
bitrate: preferredBitrate,
|
|
10548
|
+
maxSpatialLayers: maxSimulcastLayers,
|
|
10549
|
+
}),
|
|
10550
|
+
];
|
|
10551
|
+
const screenShareSettings = this.screenShare.getSettings();
|
|
10552
|
+
if (screenShareSettings) {
|
|
10553
|
+
preferredPublishOptions.push(PublishOption.create({
|
|
10554
|
+
trackType: TrackType.SCREEN_SHARE,
|
|
10555
|
+
fps: screenShareSettings.maxFramerate,
|
|
10556
|
+
bitrate: screenShareSettings.maxBitrate,
|
|
10557
|
+
}));
|
|
10558
|
+
}
|
|
10559
|
+
return preferredPublishOptions;
|
|
10560
|
+
};
|
|
10561
|
+
/**
|
|
10562
|
+
* Prepares the preferred options for subscribing to tracks.
|
|
10563
|
+
* This is an experimental client feature and subject to change.
|
|
10564
|
+
* @internal
|
|
10565
|
+
*/
|
|
10566
|
+
this.getPreferredSubscribeOptions = () => {
|
|
10567
|
+
const { subscriberCodec, subscriberFmtpLine } = this.clientPublishOptions || {};
|
|
10568
|
+
if (!subscriberCodec || !subscriberFmtpLine)
|
|
10569
|
+
return [];
|
|
10570
|
+
return [
|
|
10571
|
+
SubscribeOption.create({
|
|
10572
|
+
trackType: TrackType.VIDEO,
|
|
10573
|
+
codecs: [
|
|
10574
|
+
{ name: subscriberCodec.split('/').pop(), fmtp: subscriberFmtpLine },
|
|
10575
|
+
],
|
|
10576
|
+
}),
|
|
10577
|
+
];
|
|
10578
|
+
};
|
|
10645
10579
|
/**
|
|
10646
10580
|
* Performs an ICE restart on both the Publisher and Subscriber Peer Connections.
|
|
10647
10581
|
* Uses the provided SFU client to restore the ICE connection.
|
|
@@ -10672,9 +10606,9 @@ class Call {
|
|
|
10672
10606
|
* @internal
|
|
10673
10607
|
*/
|
|
10674
10608
|
this.initPublisherAndSubscriber = (opts) => {
|
|
10675
|
-
const { sfuClient, connectionConfig, clientDetails, statsOptions, closePreviousInstances, } = opts;
|
|
10609
|
+
const { sfuClient, connectionConfig, clientDetails, statsOptions, publishOptions, closePreviousInstances, } = opts;
|
|
10676
10610
|
if (closePreviousInstances && this.subscriber) {
|
|
10677
|
-
this.subscriber.
|
|
10611
|
+
this.subscriber.dispose();
|
|
10678
10612
|
}
|
|
10679
10613
|
this.subscriber = new Subscriber({
|
|
10680
10614
|
sfuClient,
|
|
@@ -10693,18 +10627,14 @@ class Call {
|
|
|
10693
10627
|
const isAnonymous = this.streamClient.user?.type === 'anonymous';
|
|
10694
10628
|
if (!isAnonymous) {
|
|
10695
10629
|
if (closePreviousInstances && this.publisher) {
|
|
10696
|
-
this.publisher.
|
|
10630
|
+
this.publisher.dispose();
|
|
10697
10631
|
}
|
|
10698
|
-
const audioSettings = this.state.settings?.audio;
|
|
10699
|
-
const isDtxEnabled = !!audioSettings?.opus_dtx_enabled;
|
|
10700
|
-
const isRedEnabled = !!audioSettings?.redundant_coding_enabled;
|
|
10701
10632
|
this.publisher = new Publisher({
|
|
10702
10633
|
sfuClient,
|
|
10703
10634
|
dispatcher: this.dispatcher,
|
|
10704
10635
|
state: this.state,
|
|
10705
10636
|
connectionConfig,
|
|
10706
|
-
|
|
10707
|
-
isRedEnabled,
|
|
10637
|
+
publishOptions,
|
|
10708
10638
|
logTag: String(this.sfuClientTag),
|
|
10709
10639
|
onUnrecoverableError: () => {
|
|
10710
10640
|
this.reconnect(WebsocketReconnectStrategy.REJOIN).catch((err) => {
|
|
@@ -10851,47 +10781,31 @@ class Call {
|
|
|
10851
10781
|
* @internal
|
|
10852
10782
|
*/
|
|
10853
10783
|
this.reconnectFast = async () => {
|
|
10854
|
-
|
|
10784
|
+
const reconnectStartTime = Date.now();
|
|
10855
10785
|
this.reconnectStrategy = WebsocketReconnectStrategy.FAST;
|
|
10856
10786
|
this.state.setCallingState(CallingState.RECONNECTING);
|
|
10857
10787
|
await this.join(this.joinCallData);
|
|
10858
|
-
this.sfuStatsReporter?.
|
|
10859
|
-
data: {
|
|
10860
|
-
oneofKind: 'reconnection',
|
|
10861
|
-
reconnection: {
|
|
10862
|
-
timeSeconds: (Date.now() - reconnectStartTime) / 1000,
|
|
10863
|
-
strategy: WebsocketReconnectStrategy.FAST,
|
|
10864
|
-
},
|
|
10865
|
-
},
|
|
10866
|
-
});
|
|
10788
|
+
this.sfuStatsReporter?.sendReconnectionTime(WebsocketReconnectStrategy.FAST, (Date.now() - reconnectStartTime) / 1000);
|
|
10867
10789
|
};
|
|
10868
10790
|
/**
|
|
10869
10791
|
* Initiates the reconnection flow with the "rejoin" strategy.
|
|
10870
10792
|
* @internal
|
|
10871
10793
|
*/
|
|
10872
10794
|
this.reconnectRejoin = async () => {
|
|
10873
|
-
|
|
10795
|
+
const reconnectStartTime = Date.now();
|
|
10874
10796
|
this.reconnectStrategy = WebsocketReconnectStrategy.REJOIN;
|
|
10875
10797
|
this.state.setCallingState(CallingState.RECONNECTING);
|
|
10876
10798
|
await this.join(this.joinCallData);
|
|
10877
10799
|
await this.restorePublishedTracks();
|
|
10878
10800
|
this.restoreSubscribedTracks();
|
|
10879
|
-
this.sfuStatsReporter?.
|
|
10880
|
-
data: {
|
|
10881
|
-
oneofKind: 'reconnection',
|
|
10882
|
-
reconnection: {
|
|
10883
|
-
timeSeconds: (Date.now() - reconnectStartTime) / 1000,
|
|
10884
|
-
strategy: WebsocketReconnectStrategy.REJOIN,
|
|
10885
|
-
},
|
|
10886
|
-
},
|
|
10887
|
-
});
|
|
10801
|
+
this.sfuStatsReporter?.sendReconnectionTime(WebsocketReconnectStrategy.REJOIN, (Date.now() - reconnectStartTime) / 1000);
|
|
10888
10802
|
};
|
|
10889
10803
|
/**
|
|
10890
10804
|
* Initiates the reconnection flow with the "migrate" strategy.
|
|
10891
10805
|
* @internal
|
|
10892
10806
|
*/
|
|
10893
10807
|
this.reconnectMigrate = async () => {
|
|
10894
|
-
|
|
10808
|
+
const reconnectStartTime = Date.now();
|
|
10895
10809
|
const currentSfuClient = this.sfuClient;
|
|
10896
10810
|
if (!currentSfuClient) {
|
|
10897
10811
|
throw new Error('Cannot migrate without an active SFU client');
|
|
@@ -10925,20 +10839,12 @@ class Call {
|
|
|
10925
10839
|
this.state.setCallingState(CallingState.JOINED);
|
|
10926
10840
|
}
|
|
10927
10841
|
finally {
|
|
10928
|
-
currentSubscriber?.
|
|
10929
|
-
currentPublisher?.
|
|
10842
|
+
currentSubscriber?.dispose();
|
|
10843
|
+
currentPublisher?.dispose();
|
|
10930
10844
|
// and close the previous SFU client, without specifying close code
|
|
10931
10845
|
currentSfuClient.close();
|
|
10932
10846
|
}
|
|
10933
|
-
this.sfuStatsReporter?.
|
|
10934
|
-
data: {
|
|
10935
|
-
oneofKind: 'reconnection',
|
|
10936
|
-
reconnection: {
|
|
10937
|
-
timeSeconds: (Date.now() - reconnectStartTime) / 1000,
|
|
10938
|
-
strategy: WebsocketReconnectStrategy.MIGRATE,
|
|
10939
|
-
},
|
|
10940
|
-
},
|
|
10941
|
-
});
|
|
10847
|
+
this.sfuStatsReporter?.sendReconnectionTime(WebsocketReconnectStrategy.MIGRATE, (Date.now() - reconnectStartTime) / 1000);
|
|
10942
10848
|
};
|
|
10943
10849
|
/**
|
|
10944
10850
|
* Registers the various event handlers for reconnection.
|
|
@@ -11015,23 +10921,16 @@ class Call {
|
|
|
11015
10921
|
// the tracks need to be restored in their original order of publishing
|
|
11016
10922
|
// otherwise, we might get `m-lines order mismatch` errors
|
|
11017
10923
|
for (const trackType of this.trackPublishOrder) {
|
|
10924
|
+
let mediaStream;
|
|
11018
10925
|
switch (trackType) {
|
|
11019
10926
|
case TrackType.AUDIO:
|
|
11020
|
-
|
|
11021
|
-
if (audioStream) {
|
|
11022
|
-
await this.publishAudioStream(audioStream);
|
|
11023
|
-
}
|
|
10927
|
+
mediaStream = this.microphone.state.mediaStream;
|
|
11024
10928
|
break;
|
|
11025
10929
|
case TrackType.VIDEO:
|
|
11026
|
-
|
|
11027
|
-
if (videoStream)
|
|
11028
|
-
await this.publishVideoStream(videoStream);
|
|
10930
|
+
mediaStream = this.camera.state.mediaStream;
|
|
11029
10931
|
break;
|
|
11030
10932
|
case TrackType.SCREEN_SHARE:
|
|
11031
|
-
|
|
11032
|
-
if (screenShareStream) {
|
|
11033
|
-
await this.publishScreenShareStream(screenShareStream);
|
|
11034
|
-
}
|
|
10933
|
+
mediaStream = this.screenShare.state.mediaStream;
|
|
11035
10934
|
break;
|
|
11036
10935
|
// screen share audio can't exist without a screen share, so we handle it there
|
|
11037
10936
|
case TrackType.SCREEN_SHARE_AUDIO:
|
|
@@ -11041,6 +10940,8 @@ class Call {
|
|
|
11041
10940
|
ensureExhausted(trackType, 'Unknown track type');
|
|
11042
10941
|
break;
|
|
11043
10942
|
}
|
|
10943
|
+
if (mediaStream)
|
|
10944
|
+
await this.publish(mediaStream, trackType);
|
|
11044
10945
|
}
|
|
11045
10946
|
};
|
|
11046
10947
|
/**
|
|
@@ -11055,105 +10956,111 @@ class Call {
|
|
|
11055
10956
|
};
|
|
11056
10957
|
/**
|
|
11057
10958
|
* Starts publishing the given video stream to the call.
|
|
11058
|
-
*
|
|
11059
|
-
*
|
|
11060
|
-
* Consecutive calls to this method will replace the previously published stream.
|
|
11061
|
-
* The previous video stream will be stopped.
|
|
11062
|
-
*
|
|
11063
|
-
* @param videoStream the video stream to publish.
|
|
10959
|
+
* @deprecated use `call.publish()`.
|
|
11064
10960
|
*/
|
|
11065
10961
|
this.publishVideoStream = async (videoStream) => {
|
|
11066
|
-
|
|
11067
|
-
throw new Error(`Call not joined yet.`);
|
|
11068
|
-
// joining is in progress, and we should wait until the client is ready
|
|
11069
|
-
await this.sfuClient.joinTask;
|
|
11070
|
-
if (!this.permissionsContext.hasPermission(OwnCapability.SEND_VIDEO)) {
|
|
11071
|
-
throw new Error('No permission to publish video');
|
|
11072
|
-
}
|
|
11073
|
-
if (!this.publisher)
|
|
11074
|
-
throw new Error('Publisher is not initialized');
|
|
11075
|
-
const [videoTrack] = videoStream.getVideoTracks();
|
|
11076
|
-
if (!videoTrack)
|
|
11077
|
-
throw new Error('There is no video track in the stream');
|
|
11078
|
-
if (!this.trackPublishOrder.includes(TrackType.VIDEO)) {
|
|
11079
|
-
this.trackPublishOrder.push(TrackType.VIDEO);
|
|
11080
|
-
}
|
|
11081
|
-
await this.publisher.publishStream(videoStream, videoTrack, TrackType.VIDEO, this.publishOptions);
|
|
10962
|
+
await this.publish(videoStream, TrackType.VIDEO);
|
|
11082
10963
|
};
|
|
11083
10964
|
/**
|
|
11084
10965
|
* Starts publishing the given audio stream to the call.
|
|
11085
|
-
*
|
|
11086
|
-
*
|
|
11087
|
-
* Consecutive calls to this method will replace the audio stream that is currently being published.
|
|
11088
|
-
* The previous audio stream will be stopped.
|
|
11089
|
-
*
|
|
11090
|
-
* @param audioStream the audio stream to publish.
|
|
10966
|
+
* @deprecated use `call.publish()`
|
|
11091
10967
|
*/
|
|
11092
10968
|
this.publishAudioStream = async (audioStream) => {
|
|
11093
|
-
|
|
11094
|
-
throw new Error(`Call not joined yet.`);
|
|
11095
|
-
// joining is in progress, and we should wait until the client is ready
|
|
11096
|
-
await this.sfuClient.joinTask;
|
|
11097
|
-
if (!this.permissionsContext.hasPermission(OwnCapability.SEND_AUDIO)) {
|
|
11098
|
-
throw new Error('No permission to publish audio');
|
|
11099
|
-
}
|
|
11100
|
-
if (!this.publisher)
|
|
11101
|
-
throw new Error('Publisher is not initialized');
|
|
11102
|
-
const [audioTrack] = audioStream.getAudioTracks();
|
|
11103
|
-
if (!audioTrack)
|
|
11104
|
-
throw new Error('There is no audio track in the stream');
|
|
11105
|
-
if (!this.trackPublishOrder.includes(TrackType.AUDIO)) {
|
|
11106
|
-
this.trackPublishOrder.push(TrackType.AUDIO);
|
|
11107
|
-
}
|
|
11108
|
-
await this.publisher.publishStream(audioStream, audioTrack, TrackType.AUDIO);
|
|
10969
|
+
await this.publish(audioStream, TrackType.AUDIO);
|
|
11109
10970
|
};
|
|
11110
10971
|
/**
|
|
11111
10972
|
* Starts publishing the given screen-share stream to the call.
|
|
11112
|
-
*
|
|
11113
|
-
* Consecutive calls to this method will replace the previous screen-share stream.
|
|
11114
|
-
* The previous screen-share stream will be stopped.
|
|
11115
|
-
*
|
|
11116
|
-
* @param screenShareStream the screen-share stream to publish.
|
|
10973
|
+
* @deprecated use `call.publish()`
|
|
11117
10974
|
*/
|
|
11118
10975
|
this.publishScreenShareStream = async (screenShareStream) => {
|
|
10976
|
+
await this.publish(screenShareStream, TrackType.SCREEN_SHARE);
|
|
10977
|
+
};
|
|
10978
|
+
/**
|
|
10979
|
+
* Publishes the given media stream.
|
|
10980
|
+
*
|
|
10981
|
+
* @param mediaStream the media stream to publish.
|
|
10982
|
+
* @param trackType the type of the track to announce.
|
|
10983
|
+
*/
|
|
10984
|
+
this.publish = async (mediaStream, trackType) => {
|
|
11119
10985
|
if (!this.sfuClient)
|
|
11120
10986
|
throw new Error(`Call not joined yet.`);
|
|
11121
10987
|
// joining is in progress, and we should wait until the client is ready
|
|
11122
10988
|
await this.sfuClient.joinTask;
|
|
11123
|
-
if (!this.permissionsContext.
|
|
11124
|
-
throw new Error(
|
|
10989
|
+
if (!this.permissionsContext.canPublish(trackType)) {
|
|
10990
|
+
throw new Error(`No permission to publish ${TrackType[trackType]}`);
|
|
11125
10991
|
}
|
|
11126
10992
|
if (!this.publisher)
|
|
11127
10993
|
throw new Error('Publisher is not initialized');
|
|
11128
|
-
const [
|
|
11129
|
-
|
|
11130
|
-
|
|
10994
|
+
const [track] = isAudioTrackType(trackType)
|
|
10995
|
+
? mediaStream.getAudioTracks()
|
|
10996
|
+
: mediaStream.getVideoTracks();
|
|
10997
|
+
if (!track) {
|
|
10998
|
+
throw new Error(`There is no ${TrackType[trackType]} track in the stream`);
|
|
11131
10999
|
}
|
|
11132
|
-
if (
|
|
11133
|
-
|
|
11134
|
-
}
|
|
11135
|
-
|
|
11136
|
-
|
|
11137
|
-
|
|
11138
|
-
|
|
11139
|
-
|
|
11140
|
-
|
|
11141
|
-
|
|
11142
|
-
this.
|
|
11000
|
+
if (track.readyState === 'ended') {
|
|
11001
|
+
throw new Error(`Can't publish ended tracks.`);
|
|
11002
|
+
}
|
|
11003
|
+
pushToIfMissing(this.trackPublishOrder, trackType);
|
|
11004
|
+
await this.publisher.publish(track, trackType);
|
|
11005
|
+
const trackTypes = [trackType];
|
|
11006
|
+
if (trackType === TrackType.SCREEN_SHARE) {
|
|
11007
|
+
const [audioTrack] = mediaStream.getAudioTracks();
|
|
11008
|
+
if (audioTrack) {
|
|
11009
|
+
pushToIfMissing(this.trackPublishOrder, TrackType.SCREEN_SHARE_AUDIO);
|
|
11010
|
+
await this.publisher.publish(audioTrack, TrackType.SCREEN_SHARE_AUDIO);
|
|
11011
|
+
trackTypes.push(TrackType.SCREEN_SHARE_AUDIO);
|
|
11143
11012
|
}
|
|
11144
|
-
await this.publisher.publishStream(screenShareStream, screenShareAudioTrack, TrackType.SCREEN_SHARE_AUDIO, opts);
|
|
11145
11013
|
}
|
|
11014
|
+
await this.updateLocalStreamState(mediaStream, ...trackTypes);
|
|
11146
11015
|
};
|
|
11147
11016
|
/**
|
|
11148
11017
|
* Stops publishing the given track type to the call, if it is currently being published.
|
|
11149
|
-
* Underlying track will be stopped and removed from the publisher.
|
|
11150
11018
|
*
|
|
11151
|
-
* @param
|
|
11152
|
-
|
|
11019
|
+
* @param trackTypes the track types to stop publishing.
|
|
11020
|
+
*/
|
|
11021
|
+
this.stopPublish = async (...trackTypes) => {
|
|
11022
|
+
if (!this.sfuClient || !this.publisher)
|
|
11023
|
+
return;
|
|
11024
|
+
this.publisher.stopTracks(...trackTypes);
|
|
11025
|
+
await this.updateLocalStreamState(undefined, ...trackTypes);
|
|
11026
|
+
};
|
|
11027
|
+
/**
|
|
11028
|
+
* Updates the call state with the new stream.
|
|
11029
|
+
*
|
|
11030
|
+
* @param mediaStream the new stream to update the call state with.
|
|
11031
|
+
* If undefined, the stream will be removed from the call state.
|
|
11032
|
+
* @param trackTypes the track types to update the call state with.
|
|
11033
|
+
*/
|
|
11034
|
+
this.updateLocalStreamState = async (mediaStream, ...trackTypes) => {
|
|
11035
|
+
if (!this.sfuClient || !this.sfuClient.sessionId)
|
|
11036
|
+
return;
|
|
11037
|
+
await this.notifyTrackMuteState(!mediaStream, ...trackTypes);
|
|
11038
|
+
const { sessionId } = this.sfuClient;
|
|
11039
|
+
for (const trackType of trackTypes) {
|
|
11040
|
+
const streamStateProp = trackTypeToParticipantStreamKey(trackType);
|
|
11041
|
+
if (!streamStateProp)
|
|
11042
|
+
continue;
|
|
11043
|
+
this.state.updateParticipant(sessionId, (p) => ({
|
|
11044
|
+
publishedTracks: mediaStream
|
|
11045
|
+
? pushToIfMissing([...p.publishedTracks], trackType)
|
|
11046
|
+
: p.publishedTracks.filter((t) => t !== trackType),
|
|
11047
|
+
[streamStateProp]: mediaStream,
|
|
11048
|
+
}));
|
|
11049
|
+
}
|
|
11050
|
+
};
|
|
11051
|
+
/**
|
|
11052
|
+
* Updates the preferred publishing options
|
|
11053
|
+
*
|
|
11054
|
+
* @internal
|
|
11055
|
+
* @param options the options to use.
|
|
11153
11056
|
*/
|
|
11154
|
-
this.
|
|
11155
|
-
this.logger('
|
|
11156
|
-
|
|
11057
|
+
this.updatePublishOptions = (options) => {
|
|
11058
|
+
this.logger('warn', '[call.updatePublishOptions]: You are manually overriding the publish options for this call. ' +
|
|
11059
|
+
'This is not recommended, and it can cause call stability/compatibility issues. Use with caution.');
|
|
11060
|
+
if (this.state.callingState === CallingState.JOINED) {
|
|
11061
|
+
this.logger('warn', 'Updating publish options after joining the call does not have an effect');
|
|
11062
|
+
}
|
|
11063
|
+
this.clientPublishOptions = { ...this.clientPublishOptions, ...options };
|
|
11157
11064
|
};
|
|
11158
11065
|
/**
|
|
11159
11066
|
* Notifies the SFU that a noise cancellation process has started.
|
|
@@ -11175,6 +11082,15 @@ class Call {
|
|
|
11175
11082
|
this.logger('warn', 'Failed to notify stop of noise cancellation', err);
|
|
11176
11083
|
});
|
|
11177
11084
|
};
|
|
11085
|
+
/**
|
|
11086
|
+
* Notifies the SFU about the mute state of the given track types.
|
|
11087
|
+
* @internal
|
|
11088
|
+
*/
|
|
11089
|
+
this.notifyTrackMuteState = async (muted, ...trackTypes) => {
|
|
11090
|
+
if (!this.sfuClient)
|
|
11091
|
+
return;
|
|
11092
|
+
await this.sfuClient.updateMuteStates(trackTypes.map((trackType) => ({ trackType, muted })));
|
|
11093
|
+
};
|
|
11178
11094
|
/**
|
|
11179
11095
|
* Will enhance the reported stats with additional participant-specific information (`callStatsReport$` state [store variable](./StreamVideoClient.md/#readonlystatestore)).
|
|
11180
11096
|
* This is usually helpful when detailed stats for a specific participant are needed.
|
|
@@ -11638,70 +11554,14 @@ class Call {
|
|
|
11638
11554
|
*
|
|
11639
11555
|
* @internal
|
|
11640
11556
|
*/
|
|
11641
|
-
this.applyDeviceConfig = async (
|
|
11642
|
-
await this.
|
|
11557
|
+
this.applyDeviceConfig = async (settings, publish) => {
|
|
11558
|
+
await this.camera.apply(settings.video, publish).catch((err) => {
|
|
11643
11559
|
this.logger('warn', 'Camera init failed', err);
|
|
11644
11560
|
});
|
|
11645
|
-
await this.
|
|
11561
|
+
await this.microphone.apply(settings.audio, publish).catch((err) => {
|
|
11646
11562
|
this.logger('warn', 'Mic init failed', err);
|
|
11647
11563
|
});
|
|
11648
11564
|
};
|
|
11649
|
-
this.initCamera = async (options) => {
|
|
11650
|
-
// Wait for any in progress camera operation
|
|
11651
|
-
await this.camera.statusChangeSettled();
|
|
11652
|
-
if (this.state.localParticipant?.videoStream ||
|
|
11653
|
-
!this.permissionsContext.hasPermission('send-video')) {
|
|
11654
|
-
return;
|
|
11655
|
-
}
|
|
11656
|
-
// Set camera direction if it's not yet set
|
|
11657
|
-
if (!this.camera.state.direction && !this.camera.state.selectedDevice) {
|
|
11658
|
-
let defaultDirection = 'front';
|
|
11659
|
-
const backendSetting = this.state.settings?.video.camera_facing;
|
|
11660
|
-
if (backendSetting) {
|
|
11661
|
-
defaultDirection = backendSetting === 'front' ? 'front' : 'back';
|
|
11662
|
-
}
|
|
11663
|
-
this.camera.state.setDirection(defaultDirection);
|
|
11664
|
-
}
|
|
11665
|
-
// Set target resolution
|
|
11666
|
-
const targetResolution = this.state.settings?.video.target_resolution;
|
|
11667
|
-
if (targetResolution) {
|
|
11668
|
-
await this.camera.selectTargetResolution(targetResolution);
|
|
11669
|
-
}
|
|
11670
|
-
if (options.setStatus) {
|
|
11671
|
-
// Publish already that was set before we joined
|
|
11672
|
-
if (this.camera.enabled &&
|
|
11673
|
-
this.camera.state.mediaStream &&
|
|
11674
|
-
!this.publisher?.isPublishing(TrackType.VIDEO)) {
|
|
11675
|
-
await this.publishVideoStream(this.camera.state.mediaStream);
|
|
11676
|
-
}
|
|
11677
|
-
// Start camera if backend config specifies, and there is no local setting
|
|
11678
|
-
if (this.camera.state.status === undefined &&
|
|
11679
|
-
this.state.settings?.video.camera_default_on) {
|
|
11680
|
-
await this.camera.enable();
|
|
11681
|
-
}
|
|
11682
|
-
}
|
|
11683
|
-
};
|
|
11684
|
-
this.initMic = async (options) => {
|
|
11685
|
-
// Wait for any in progress mic operation
|
|
11686
|
-
await this.microphone.statusChangeSettled();
|
|
11687
|
-
if (this.state.localParticipant?.audioStream ||
|
|
11688
|
-
!this.permissionsContext.hasPermission('send-audio')) {
|
|
11689
|
-
return;
|
|
11690
|
-
}
|
|
11691
|
-
if (options.setStatus) {
|
|
11692
|
-
// Publish media stream that was set before we joined
|
|
11693
|
-
if (this.microphone.enabled &&
|
|
11694
|
-
this.microphone.state.mediaStream &&
|
|
11695
|
-
!this.publisher?.isPublishing(TrackType.AUDIO)) {
|
|
11696
|
-
await this.publishAudioStream(this.microphone.state.mediaStream);
|
|
11697
|
-
}
|
|
11698
|
-
// Start mic if backend config specifies, and there is no local setting
|
|
11699
|
-
if (this.microphone.state.status === undefined &&
|
|
11700
|
-
this.state.settings?.audio.mic_default_on) {
|
|
11701
|
-
await this.microphone.enable();
|
|
11702
|
-
}
|
|
11703
|
-
}
|
|
11704
|
-
};
|
|
11705
11565
|
/**
|
|
11706
11566
|
* Will begin tracking the given element for visibility changes within the
|
|
11707
11567
|
* configured viewport element (`call.setViewport`).
|
|
@@ -11850,109 +11710,6 @@ class Call {
|
|
|
11850
11710
|
this.screenShare = new ScreenShareManager(this);
|
|
11851
11711
|
this.dynascaleManager = new DynascaleManager(this.state, this.speaker);
|
|
11852
11712
|
}
|
|
11853
|
-
async setup() {
|
|
11854
|
-
await withoutConcurrency(this.joinLeaveConcurrencyTag, async () => {
|
|
11855
|
-
if (this.initialized)
|
|
11856
|
-
return;
|
|
11857
|
-
this.leaveCallHooks.add(this.on('all', (event) => {
|
|
11858
|
-
// update state with the latest event data
|
|
11859
|
-
this.state.updateFromEvent(event);
|
|
11860
|
-
}));
|
|
11861
|
-
this.leaveCallHooks.add(registerEventHandlers(this, this.dispatcher));
|
|
11862
|
-
this.registerEffects();
|
|
11863
|
-
this.registerReconnectHandlers();
|
|
11864
|
-
if (this.state.callingState === CallingState.LEFT) {
|
|
11865
|
-
this.state.setCallingState(CallingState.IDLE);
|
|
11866
|
-
}
|
|
11867
|
-
this.initialized = true;
|
|
11868
|
-
});
|
|
11869
|
-
}
|
|
11870
|
-
registerEffects() {
|
|
11871
|
-
this.leaveCallHooks.add(
|
|
11872
|
-
// handles updating the permissions context when the settings change.
|
|
11873
|
-
createSubscription(this.state.settings$, (settings) => {
|
|
11874
|
-
if (!settings)
|
|
11875
|
-
return;
|
|
11876
|
-
this.permissionsContext.setCallSettings(settings);
|
|
11877
|
-
}));
|
|
11878
|
-
this.leaveCallHooks.add(
|
|
11879
|
-
// handle the case when the user permissions are modified.
|
|
11880
|
-
createSafeAsyncSubscription(this.state.ownCapabilities$, this.handleOwnCapabilitiesUpdated));
|
|
11881
|
-
this.leaveCallHooks.add(
|
|
11882
|
-
// handles the case when the user is blocked by the call owner.
|
|
11883
|
-
createSubscription(this.state.blockedUserIds$, async (blockedUserIds) => {
|
|
11884
|
-
if (!blockedUserIds || blockedUserIds.length === 0)
|
|
11885
|
-
return;
|
|
11886
|
-
const currentUserId = this.currentUserId;
|
|
11887
|
-
if (currentUserId && blockedUserIds.includes(currentUserId)) {
|
|
11888
|
-
this.logger('info', 'Leaving call because of being blocked');
|
|
11889
|
-
await this.leave({ reason: 'user blocked' }).catch((err) => {
|
|
11890
|
-
this.logger('error', 'Error leaving call after being blocked', err);
|
|
11891
|
-
});
|
|
11892
|
-
}
|
|
11893
|
-
}));
|
|
11894
|
-
this.leaveCallHooks.add(
|
|
11895
|
-
// cancel auto-drop when call is
|
|
11896
|
-
createSubscription(this.state.session$, (session) => {
|
|
11897
|
-
if (!this.ringing)
|
|
11898
|
-
return;
|
|
11899
|
-
const receiverId = this.clientStore.connectedUser?.id;
|
|
11900
|
-
if (!receiverId)
|
|
11901
|
-
return;
|
|
11902
|
-
const isAcceptedByMe = Boolean(session?.accepted_by[receiverId]);
|
|
11903
|
-
const isRejectedByMe = Boolean(session?.rejected_by[receiverId]);
|
|
11904
|
-
if (isAcceptedByMe || isRejectedByMe) {
|
|
11905
|
-
this.cancelAutoDrop();
|
|
11906
|
-
}
|
|
11907
|
-
}));
|
|
11908
|
-
this.leaveCallHooks.add(
|
|
11909
|
-
// "ringing" mode effects and event handlers
|
|
11910
|
-
createSubscription(this.ringingSubject, (isRinging) => {
|
|
11911
|
-
if (!isRinging)
|
|
11912
|
-
return;
|
|
11913
|
-
const callSession = this.state.session;
|
|
11914
|
-
const receiver_id = this.clientStore.connectedUser?.id;
|
|
11915
|
-
const ended_at = callSession?.ended_at;
|
|
11916
|
-
const created_by_id = this.state.createdBy?.id;
|
|
11917
|
-
const rejected_by = callSession?.rejected_by;
|
|
11918
|
-
const accepted_by = callSession?.accepted_by;
|
|
11919
|
-
let leaveCallIdle = false;
|
|
11920
|
-
if (ended_at) {
|
|
11921
|
-
// call was ended before it was accepted or rejected so we should leave it to idle
|
|
11922
|
-
leaveCallIdle = true;
|
|
11923
|
-
}
|
|
11924
|
-
else if (created_by_id && rejected_by) {
|
|
11925
|
-
if (rejected_by[created_by_id]) {
|
|
11926
|
-
// call was cancelled by the caller
|
|
11927
|
-
leaveCallIdle = true;
|
|
11928
|
-
}
|
|
11929
|
-
}
|
|
11930
|
-
else if (receiver_id && rejected_by) {
|
|
11931
|
-
if (rejected_by[receiver_id]) {
|
|
11932
|
-
// call was rejected by the receiver in some other device
|
|
11933
|
-
leaveCallIdle = true;
|
|
11934
|
-
}
|
|
11935
|
-
}
|
|
11936
|
-
else if (receiver_id && accepted_by) {
|
|
11937
|
-
if (accepted_by[receiver_id]) {
|
|
11938
|
-
// call was accepted by the receiver in some other device
|
|
11939
|
-
leaveCallIdle = true;
|
|
11940
|
-
}
|
|
11941
|
-
}
|
|
11942
|
-
if (leaveCallIdle) {
|
|
11943
|
-
if (this.state.callingState !== CallingState.IDLE) {
|
|
11944
|
-
this.state.setCallingState(CallingState.IDLE);
|
|
11945
|
-
}
|
|
11946
|
-
}
|
|
11947
|
-
else {
|
|
11948
|
-
if (this.state.callingState === CallingState.IDLE) {
|
|
11949
|
-
this.state.setCallingState(CallingState.RINGING);
|
|
11950
|
-
}
|
|
11951
|
-
this.scheduleAutoDrop();
|
|
11952
|
-
this.leaveCallHooks.add(registerRingingCallEventHandlers(this));
|
|
11953
|
-
}
|
|
11954
|
-
}));
|
|
11955
|
-
}
|
|
11956
11713
|
/**
|
|
11957
11714
|
* A flag indicating whether the call is "ringing" type of call.
|
|
11958
11715
|
*/
|
|
@@ -11971,15 +11728,6 @@ class Call {
|
|
|
11971
11728
|
get isCreatedByMe() {
|
|
11972
11729
|
return this.state.createdBy?.id === this.currentUserId;
|
|
11973
11730
|
}
|
|
11974
|
-
/**
|
|
11975
|
-
* Updates the preferred publishing options
|
|
11976
|
-
*
|
|
11977
|
-
* @internal
|
|
11978
|
-
* @param options the options to use.
|
|
11979
|
-
*/
|
|
11980
|
-
updatePublishOptions(options) {
|
|
11981
|
-
this.publishOptions = { ...this.publishOptions, ...options };
|
|
11982
|
-
}
|
|
11983
11731
|
}
|
|
11984
11732
|
|
|
11985
11733
|
/**
|
|
@@ -13087,7 +12835,7 @@ class StreamClient {
|
|
|
13087
12835
|
return await this.wsConnection.connect(this.defaultWSTimeout);
|
|
13088
12836
|
};
|
|
13089
12837
|
this.getUserAgent = () => {
|
|
13090
|
-
const version = "1.
|
|
12838
|
+
const version = "1.15.0";
|
|
13091
12839
|
return (this.userAgent ||
|
|
13092
12840
|
`stream-video-javascript-client-${this.node ? 'node' : 'browser'}-${version}`);
|
|
13093
12841
|
};
|
|
@@ -13387,7 +13135,7 @@ class StreamVideoClient {
|
|
|
13387
13135
|
clientStore: this.writeableStateStore,
|
|
13388
13136
|
});
|
|
13389
13137
|
call.state.updateFromCallResponse(c.call);
|
|
13390
|
-
await call.applyDeviceConfig(false);
|
|
13138
|
+
await call.applyDeviceConfig(c.call.settings, false);
|
|
13391
13139
|
if (data.watch) {
|
|
13392
13140
|
this.writeableStateStore.registerCall(call);
|
|
13393
13141
|
}
|