@opentok/client 2.29.2-alpha.1 → 2.29.2-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/js/opentok.js +916 -876
- package/dist/js/opentok.js.map +1 -1
- package/dist/js/opentok.min.js +5 -5
- package/dist/js/opentok.min.js.map +1 -1
- package/package.json +1 -1
package/dist/js/opentok.js
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* @license OpenTok.js 2.29.2
|
|
2
|
+
* @license OpenTok.js 2.29.2 a5982cc
|
|
3
3
|
*
|
|
4
4
|
* Copyright (c) 2010-2025 TokBox, Inc.
|
|
5
5
|
* Subject to the applicable Software Development Kit (SDK) License Agreement:
|
|
6
6
|
* https://www.vonage.com/legal/communications-apis/terms-of-use/
|
|
7
7
|
*
|
|
8
|
-
* Date: Fri, 21 Feb 2025
|
|
8
|
+
* Date: Fri, 21 Feb 2025 21:03:53 GMT
|
|
9
9
|
*/
|
|
10
10
|
|
|
11
11
|
(function webpackUniversalModuleDefinition(root, factory) {
|
|
@@ -8291,7 +8291,7 @@ const logging = (0, _log.default)('StaticConfig');
|
|
|
8291
8291
|
*/
|
|
8292
8292
|
|
|
8293
8293
|
/** @type builtInConfig */
|
|
8294
|
-
const builtInConfig = (0, _cloneDeep.default)({"version":"v2.29.2","buildHash":"
|
|
8294
|
+
const builtInConfig = (0, _cloneDeep.default)({"version":"v2.29.2","buildHash":"a5982cc","minimumVersion":{"firefox":52,"chrome":49},"debug":false,"websiteURL":"http://www.tokbox.com","configURL":"https://config.opentok.com","ipWhitelistConfigURL":"","cdnURL":"","loggingURL":"https://hlg.tokbox.com/prod","apiURL":"https://anvil.opentok.com"});
|
|
8295
8295
|
const whitelistAllowedRuntimeProperties = (0, _pick.default)(['apiURL', 'assetURL', 'cdnURL', 'sessionInfoOverrides', 'loggingURL']);
|
|
8296
8296
|
const liveConfigMap = {
|
|
8297
8297
|
apiUrl: 'apiURL',
|
|
@@ -18449,153 +18449,131 @@ function PublisherFactory(_ref) {
|
|
|
18449
18449
|
remoteConnectionId
|
|
18450
18450
|
});
|
|
18451
18451
|
};
|
|
18452
|
-
const
|
|
18453
|
-
|
|
18454
|
-
|
|
18455
|
-
|
|
18456
|
-
|
|
18452
|
+
const handleDisconnect = (peerConnection, sourceStreamId) => {
|
|
18453
|
+
setTimeout(() => {
|
|
18454
|
+
if (lastIceConnectionStates[sourceStreamId] === 'disconnected' && _session._.isSocketConnected()) {
|
|
18455
|
+
const _getPeerConnectionMet5 = getPeerConnectionMeta(peerConnection),
|
|
18456
|
+
remoteConnectionId = _getPeerConnectionMet5.remoteConnectionId;
|
|
18457
|
+
logRepublish('Attempt', {
|
|
18458
|
+
remoteConnectionId
|
|
18459
|
+
});
|
|
18460
|
+
peerConnection.iceRestart();
|
|
18461
|
+
}
|
|
18462
|
+
}, 2000);
|
|
18463
|
+
};
|
|
18464
|
+
const handleConnect = /*#__PURE__*/function () {
|
|
18465
|
+
var _ref10 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee4() {
|
|
18466
|
+
var _yield$getMantisPeerC, _yield$getP2pPeerConn, isMantisConnected, isP2PConnected;
|
|
18467
|
+
return _regenerator.default.wrap(function _callee4$(_context4) {
|
|
18468
|
+
while (1) switch (_context4.prev = _context4.next) {
|
|
18457
18469
|
case 0:
|
|
18458
|
-
_this$session$session = _this.session.sessionInfo, isAdaptiveEnabled = _this$session$session.isAdaptiveEnabled, p2pEnabled = _this$session$session.p2pEnabled;
|
|
18459
|
-
sourceStreamId = peerConnection.getSourceStreamId();
|
|
18460
|
-
lastIceConnectionStates[sourceStreamId] = newState;
|
|
18461
|
-
if (newState === 'disconnected') {
|
|
18462
|
-
setTimeout(() => {
|
|
18463
|
-
const isSocketReconnecting = _this.session._.isSocketReconnecting;
|
|
18464
|
-
const socket = _this.session._.getSocket();
|
|
18465
|
-
const isSocketConnected = socket.is('connected') && !isSocketReconnecting();
|
|
18466
|
-
if (lastIceConnectionStates[sourceStreamId] === 'disconnected' && isSocketConnected) {
|
|
18467
|
-
const _getPeerConnectionMet5 = getPeerConnectionMeta(peerConnection),
|
|
18468
|
-
remoteConnectionId = _getPeerConnectionMet5.remoteConnectionId;
|
|
18469
|
-
logRepublish('Attempt', {
|
|
18470
|
-
remoteConnectionId
|
|
18471
|
-
});
|
|
18472
|
-
peerConnection.iceRestart();
|
|
18473
|
-
}
|
|
18474
|
-
}, 2000);
|
|
18475
|
-
}
|
|
18476
|
-
if (!(newState === 'connected')) {
|
|
18477
|
-
_context6.next = 26;
|
|
18478
|
-
break;
|
|
18479
|
-
}
|
|
18480
18470
|
clearTimeout(_streamDestroyTimeout);
|
|
18481
|
-
if (!isAdaptiveEnabled) {
|
|
18482
|
-
|
|
18471
|
+
if (!_this.session.sessionInfo.isAdaptiveEnabled) {
|
|
18472
|
+
_context4.next = 21;
|
|
18483
18473
|
break;
|
|
18484
18474
|
}
|
|
18485
|
-
|
|
18475
|
+
_context4.next = 4;
|
|
18486
18476
|
return getMantisPeerConnection();
|
|
18487
|
-
case
|
|
18488
|
-
|
|
18489
|
-
if (!(
|
|
18490
|
-
|
|
18477
|
+
case 4:
|
|
18478
|
+
_context4.t0 = _yield$getMantisPeerC = _context4.sent;
|
|
18479
|
+
if (!(_context4.t0 == null)) {
|
|
18480
|
+
_context4.next = 9;
|
|
18491
18481
|
break;
|
|
18492
18482
|
}
|
|
18493
|
-
|
|
18494
|
-
|
|
18483
|
+
_context4.t1 = void 0;
|
|
18484
|
+
_context4.next = 10;
|
|
18495
18485
|
break;
|
|
18496
|
-
case
|
|
18497
|
-
|
|
18498
|
-
case
|
|
18499
|
-
isMantisConnected =
|
|
18500
|
-
|
|
18486
|
+
case 9:
|
|
18487
|
+
_context4.t1 = _yield$getMantisPeerC.iceConnectionStateIsConnected();
|
|
18488
|
+
case 10:
|
|
18489
|
+
isMantisConnected = _context4.t1;
|
|
18490
|
+
_context4.next = 13;
|
|
18501
18491
|
return getP2pPeerConnection();
|
|
18502
|
-
case
|
|
18503
|
-
|
|
18504
|
-
if (!(
|
|
18505
|
-
|
|
18492
|
+
case 13:
|
|
18493
|
+
_context4.t2 = _yield$getP2pPeerConn = _context4.sent;
|
|
18494
|
+
if (!(_context4.t2 == null)) {
|
|
18495
|
+
_context4.next = 18;
|
|
18506
18496
|
break;
|
|
18507
18497
|
}
|
|
18508
|
-
|
|
18509
|
-
|
|
18498
|
+
_context4.t3 = void 0;
|
|
18499
|
+
_context4.next = 19;
|
|
18510
18500
|
break;
|
|
18511
|
-
case
|
|
18512
|
-
|
|
18513
|
-
case
|
|
18514
|
-
isP2PConnected =
|
|
18501
|
+
case 18:
|
|
18502
|
+
_context4.t3 = _yield$getP2pPeerConn.iceConnectionStateIsConnected();
|
|
18503
|
+
case 19:
|
|
18504
|
+
isP2PConnected = _context4.t3;
|
|
18515
18505
|
if (isMantisConnected && isP2PConnected) {
|
|
18516
18506
|
_stopSendingRtpToMantis();
|
|
18517
18507
|
}
|
|
18518
|
-
case
|
|
18519
|
-
|
|
18520
|
-
|
|
18521
|
-
|
|
18522
|
-
|
|
18523
|
-
|
|
18524
|
-
|
|
18525
|
-
|
|
18526
|
-
|
|
18527
|
-
|
|
18508
|
+
case 21:
|
|
18509
|
+
case "end":
|
|
18510
|
+
return _context4.stop();
|
|
18511
|
+
}
|
|
18512
|
+
}, _callee4);
|
|
18513
|
+
}));
|
|
18514
|
+
return function handleConnect() {
|
|
18515
|
+
return _ref10.apply(this, arguments);
|
|
18516
|
+
};
|
|
18517
|
+
}();
|
|
18518
|
+
const shouldDestroyPublisher = sourceStreamId => {
|
|
18519
|
+
const p2pEnabled = this.session.sessionInfo.p2pEnabled;
|
|
18520
|
+
// We destroy the publisher if:
|
|
18521
|
+
// Socket is connected. Otherwise we will try to reconnect once socket reconnects.
|
|
18522
|
+
return _session._.isSocketConnected()
|
|
18523
|
+
// And this is the active leg. Inactive leg fail independently.
|
|
18524
|
+
&& sourceStreamId === activeSourceStreamId
|
|
18525
|
+
// And it is not P2P. In P2P the other peer can be reconnecting.
|
|
18526
|
+
// If we don't detroy the Publisher, this will be destroyed once Rumors says so.
|
|
18527
|
+
&& !p2pEnabled;
|
|
18528
|
+
};
|
|
18529
|
+
const handleFail = sourceStreamId => {
|
|
18530
|
+
if (!shouldDestroyPublisher(sourceStreamId)) {
|
|
18531
|
+
return;
|
|
18532
|
+
}
|
|
18533
|
+
const isAdaptiveEnabled = this.session.sessionInfo.isAdaptiveEnabled;
|
|
18534
|
+
if (isAdaptiveEnabled && sourceStreamId === 'P2P') {
|
|
18535
|
+
// In adaptive if P2P PC has failed and the socket is connected we will transition to Mantis
|
|
18536
|
+
this._.startRelayedToRoutedTransition();
|
|
18537
|
+
} else {
|
|
18538
|
+
// Instead of destroying the publisher straight away, we will destroy it after 5 secs
|
|
18539
|
+
// in order to avoid a race condition where we just got the socket connected at the
|
|
18540
|
+
// same moment PC transition to failed
|
|
18541
|
+
_streamDestroyTimeout = setTimeout(() => {
|
|
18542
|
+
this.session._.streamDestroy(this.streamId, sourceStreamId);
|
|
18543
|
+
}, STREAM_DESTROY_DELAY);
|
|
18544
|
+
}
|
|
18545
|
+
};
|
|
18546
|
+
const onIceConnectionStateChange = /*#__PURE__*/function () {
|
|
18547
|
+
var _ref11 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee5(newState, peerConnection) {
|
|
18548
|
+
var sourceStreamId;
|
|
18549
|
+
return _regenerator.default.wrap(function _callee5$(_context5) {
|
|
18550
|
+
while (1) switch (_context5.prev = _context5.next) {
|
|
18551
|
+
case 0:
|
|
18552
|
+
sourceStreamId = peerConnection.getSourceStreamId();
|
|
18553
|
+
lastIceConnectionStates[sourceStreamId] = newState;
|
|
18554
|
+
if (!(newState === 'connected')) {
|
|
18555
|
+
_context5.next = 7;
|
|
18528
18556
|
break;
|
|
18529
18557
|
}
|
|
18530
|
-
|
|
18531
|
-
|
|
18532
|
-
|
|
18533
|
-
|
|
18534
|
-
|
|
18535
|
-
|
|
18536
|
-
|
|
18537
|
-
|
|
18538
|
-
|
|
18539
|
-
|
|
18540
|
-
var pendingPeerConnections;
|
|
18541
|
-
return _regenerator.default.wrap(function _callee4$(_context4) {
|
|
18542
|
-
while (1) switch (_context4.prev = _context4.next) {
|
|
18543
|
-
case 0:
|
|
18544
|
-
_context4.next = 2;
|
|
18545
|
-
return arePeerConnectionsAlive();
|
|
18546
|
-
case 2:
|
|
18547
|
-
pendingPeerConnections = _context4.sent;
|
|
18548
|
-
if (!pendingPeerConnections) {
|
|
18549
|
-
_this.session._.streamDestroy(_this.streamId, sourceStreamId);
|
|
18550
|
-
}
|
|
18551
|
-
case 4:
|
|
18552
|
-
case "end":
|
|
18553
|
-
return _context4.stop();
|
|
18554
|
-
}
|
|
18555
|
-
}, _callee4);
|
|
18556
|
-
})), STREAM_DESTROY_DELAY);
|
|
18557
|
-
};
|
|
18558
|
-
if (p2pEnabled) {
|
|
18559
|
-
// In P2P destroy the Publisher if there are no subscribers to it.
|
|
18560
|
-
_getPeerConnectionMet6 = getPeerConnectionMeta(peerConnection), remoteSubscriberId = _getPeerConnectionMet6.remoteSubscriberId;
|
|
18561
|
-
_this._removeSubscriber(remoteSubscriberId);
|
|
18562
|
-
arePeerConnectionsAlive = /*#__PURE__*/function () {
|
|
18563
|
-
var _ref12 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee5() {
|
|
18564
|
-
var peerConnections;
|
|
18565
|
-
return _regenerator.default.wrap(function _callee5$(_context5) {
|
|
18566
|
-
while (1) switch (_context5.prev = _context5.next) {
|
|
18567
|
-
case 0:
|
|
18568
|
-
_context5.next = 2;
|
|
18569
|
-
return getAllPeerConnections();
|
|
18570
|
-
case 2:
|
|
18571
|
-
peerConnections = _context5.sent;
|
|
18572
|
-
return _context5.abrupt("return", peerConnections.length !== 0);
|
|
18573
|
-
case 4:
|
|
18574
|
-
case "end":
|
|
18575
|
-
return _context5.stop();
|
|
18576
|
-
}
|
|
18577
|
-
}, _callee5);
|
|
18578
|
-
}));
|
|
18579
|
-
return function arePeerConnectionsAlive() {
|
|
18580
|
-
return _ref12.apply(this, arguments);
|
|
18581
|
-
};
|
|
18582
|
-
}();
|
|
18583
|
-
destroyStream(arePeerConnectionsAlive);
|
|
18584
|
-
} else if (isAdaptiveEnabled && sourceStreamId === 'P2P') {
|
|
18585
|
-
// In adaptive if P2P PC has failed and the socket is connected we will transition to Mantis
|
|
18586
|
-
_this._.startRelayedToRoutedTransition();
|
|
18587
|
-
} else {
|
|
18588
|
-
// If Mantis PC fails, then nothing else to do.
|
|
18589
|
-
destroyStream();
|
|
18558
|
+
_context5.next = 5;
|
|
18559
|
+
return handleConnect();
|
|
18560
|
+
case 5:
|
|
18561
|
+
_context5.next = 8;
|
|
18562
|
+
break;
|
|
18563
|
+
case 7:
|
|
18564
|
+
if (newState === 'disconnected') {
|
|
18565
|
+
handleDisconnect(peerConnection, sourceStreamId);
|
|
18566
|
+
} else if (newState === 'failed') {
|
|
18567
|
+
handleFail(sourceStreamId);
|
|
18590
18568
|
}
|
|
18591
|
-
case
|
|
18569
|
+
case 8:
|
|
18592
18570
|
case "end":
|
|
18593
|
-
return
|
|
18571
|
+
return _context5.stop();
|
|
18594
18572
|
}
|
|
18595
|
-
},
|
|
18573
|
+
}, _callee5);
|
|
18596
18574
|
}));
|
|
18597
18575
|
return function onIceConnectionStateChange(_x4, _x5) {
|
|
18598
|
-
return
|
|
18576
|
+
return _ref11.apply(this, arguments);
|
|
18599
18577
|
};
|
|
18600
18578
|
}();
|
|
18601
18579
|
const onPeerConnected = peerConnection => {
|
|
@@ -18642,12 +18620,12 @@ function PublisherFactory(_ref) {
|
|
|
18642
18620
|
* @param {string} configuration.peerConnectionId
|
|
18643
18621
|
* @returns {Promise<Error, PublisherPeerConnection>}
|
|
18644
18622
|
*/
|
|
18645
|
-
const createPeerConnection =
|
|
18646
|
-
let peerConnectionId =
|
|
18647
|
-
send =
|
|
18648
|
-
log =
|
|
18649
|
-
logQoS =
|
|
18650
|
-
sourceStreamId =
|
|
18623
|
+
const createPeerConnection = _ref12 => {
|
|
18624
|
+
let peerConnectionId = _ref12.peerConnectionId,
|
|
18625
|
+
send = _ref12.send,
|
|
18626
|
+
log = _ref12.log,
|
|
18627
|
+
logQoS = _ref12.logQoS,
|
|
18628
|
+
sourceStreamId = _ref12.sourceStreamId;
|
|
18651
18629
|
if (getPeerConnectionById(peerConnectionId)) {
|
|
18652
18630
|
return Promise.reject(new Error('PeerConnection already exists'));
|
|
18653
18631
|
}
|
|
@@ -18664,9 +18642,9 @@ function PublisherFactory(_ref) {
|
|
|
18664
18642
|
capableSimulcastScreenshare: properties.capableSimulcastScreenshare,
|
|
18665
18643
|
scalableVideo: properties.scalableVideo
|
|
18666
18644
|
});
|
|
18667
|
-
peerConnectionsAsync[peerConnectionId] = Promise.all([this.session._.getIceConfig(), this.session._.getVideoCodecsCompatible(webRTCStream)]).then(
|
|
18668
|
-
let iceConfig =
|
|
18669
|
-
videoCodecsCompatible =
|
|
18645
|
+
peerConnectionsAsync[peerConnectionId] = Promise.all([this.session._.getIceConfig(), this.session._.getVideoCodecsCompatible(webRTCStream)]).then(_ref13 => {
|
|
18646
|
+
let iceConfig = _ref13[0],
|
|
18647
|
+
videoCodecsCompatible = _ref13[1];
|
|
18670
18648
|
let pcStream = webRTCStream;
|
|
18671
18649
|
if (!videoCodecsCompatible) {
|
|
18672
18650
|
pcStream = webRTCStream.clone();
|
|
@@ -18718,9 +18696,9 @@ function PublisherFactory(_ref) {
|
|
|
18718
18696
|
});
|
|
18719
18697
|
peerConnection.on({
|
|
18720
18698
|
disconnected: () => onPeerDisconnected(peerConnection),
|
|
18721
|
-
error:
|
|
18722
|
-
let reason =
|
|
18723
|
-
prefix =
|
|
18699
|
+
error: _ref14 => {
|
|
18700
|
+
let reason = _ref14.reason,
|
|
18701
|
+
prefix = _ref14.prefix;
|
|
18724
18702
|
return onPeerConnectionFailure(peerConnection, {
|
|
18725
18703
|
reason,
|
|
18726
18704
|
prefix
|
|
@@ -18901,12 +18879,12 @@ function PublisherFactory(_ref) {
|
|
|
18901
18879
|
}))))).then(pcsAndStats => {
|
|
18902
18880
|
// @todo this publishStartTime is going to be so wrong in P2P
|
|
18903
18881
|
const startTimestamp = publishStartTime ? publishStartTime.getTime() : Date.now();
|
|
18904
|
-
const results = pcsAndStats.map(
|
|
18905
|
-
let pc =
|
|
18906
|
-
stats =
|
|
18907
|
-
const
|
|
18908
|
-
remoteConnectionId =
|
|
18909
|
-
remoteSubscriberId =
|
|
18882
|
+
const results = pcsAndStats.map(_ref15 => {
|
|
18883
|
+
let pc = _ref15.pc,
|
|
18884
|
+
stats = _ref15.stats;
|
|
18885
|
+
const _getPeerConnectionMet6 = getPeerConnectionMeta(pc),
|
|
18886
|
+
remoteConnectionId = _getPeerConnectionMet6.remoteConnectionId,
|
|
18887
|
+
remoteSubscriberId = _getPeerConnectionMet6.remoteSubscriberId;
|
|
18910
18888
|
return (0, _assign.default)(remoteConnectionId.match(/^symphony\./) ? {} : {
|
|
18911
18889
|
subscriberId: remoteSubscriberId,
|
|
18912
18890
|
connectionId: remoteConnectionId
|
|
@@ -18938,18 +18916,18 @@ function PublisherFactory(_ref) {
|
|
|
18938
18916
|
this.session._.streamCreate(streamOptions, completionHandler);
|
|
18939
18917
|
};
|
|
18940
18918
|
const _stopSendingRtpToMantis = /*#__PURE__*/function () {
|
|
18941
|
-
var
|
|
18919
|
+
var _ref16 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee6() {
|
|
18942
18920
|
var peerConnection;
|
|
18943
|
-
return _regenerator.default.wrap(function
|
|
18944
|
-
while (1) switch (
|
|
18921
|
+
return _regenerator.default.wrap(function _callee6$(_context6) {
|
|
18922
|
+
while (1) switch (_context6.prev = _context6.next) {
|
|
18945
18923
|
case 0:
|
|
18946
18924
|
_restartSendingRtpToMantisCalled = false;
|
|
18947
|
-
|
|
18925
|
+
_context6.next = 3;
|
|
18948
18926
|
return getMantisPeerConnection();
|
|
18949
18927
|
case 3:
|
|
18950
|
-
peerConnection =
|
|
18928
|
+
peerConnection = _context6.sent;
|
|
18951
18929
|
if (!peerConnection) {
|
|
18952
|
-
|
|
18930
|
+
_context6.next = 18;
|
|
18953
18931
|
break;
|
|
18954
18932
|
}
|
|
18955
18933
|
_this.trigger('sourceStreamIdChanged', 'P2P');
|
|
@@ -18960,55 +18938,55 @@ function PublisherFactory(_ref) {
|
|
|
18960
18938
|
|
|
18961
18939
|
// We add this delay before stopping media to prevent MANTIS to consider this stream
|
|
18962
18940
|
// as inactive after a reconnection and then destroy it.
|
|
18963
|
-
|
|
18941
|
+
_context6.next = 9;
|
|
18964
18942
|
return (0, _promiseDelay.default)(KEEP_SENDING_MEDIA_AFTER_TRANSITIONED);
|
|
18965
18943
|
case 9:
|
|
18966
18944
|
if (!_restartSendingRtpToMantisCalled) {
|
|
18967
|
-
|
|
18945
|
+
_context6.next = 12;
|
|
18968
18946
|
break;
|
|
18969
18947
|
}
|
|
18970
18948
|
logging.debug('Cancelling stop sending RTP to MANTIS.');
|
|
18971
|
-
return
|
|
18949
|
+
return _context6.abrupt("return");
|
|
18972
18950
|
case 12:
|
|
18973
18951
|
// Audio is muted and video is set to inactive
|
|
18974
18952
|
amrAudioTrackProcessor.muteAudioInPeerConnection(webRTCStream, peerConnection);
|
|
18975
|
-
|
|
18953
|
+
_context6.next = 15;
|
|
18976
18954
|
return peerConnection.changeMediaDirectionToInactive();
|
|
18977
18955
|
case 15:
|
|
18978
18956
|
if (!(OTHelpers.env.isFirefox && OTHelpers.env.version < 96)) {
|
|
18979
|
-
|
|
18957
|
+
_context6.next = 18;
|
|
18980
18958
|
break;
|
|
18981
18959
|
}
|
|
18982
|
-
|
|
18960
|
+
_context6.next = 18;
|
|
18983
18961
|
return _keepSendingRtcpToMantis();
|
|
18984
18962
|
case 18:
|
|
18985
18963
|
case "end":
|
|
18986
|
-
return
|
|
18964
|
+
return _context6.stop();
|
|
18987
18965
|
}
|
|
18988
|
-
},
|
|
18966
|
+
}, _callee6);
|
|
18989
18967
|
}));
|
|
18990
18968
|
return function _stopSendingRtpToMantis() {
|
|
18991
|
-
return
|
|
18969
|
+
return _ref16.apply(this, arguments);
|
|
18992
18970
|
};
|
|
18993
18971
|
}();
|
|
18994
18972
|
const _restartSendingRtpToMantis = /*#__PURE__*/function () {
|
|
18995
|
-
var
|
|
18973
|
+
var _ref17 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee7() {
|
|
18996
18974
|
var peerConnection, _yield$getP2pPeerConn2;
|
|
18997
|
-
return _regenerator.default.wrap(function
|
|
18998
|
-
while (1) switch (
|
|
18975
|
+
return _regenerator.default.wrap(function _callee7$(_context7) {
|
|
18976
|
+
while (1) switch (_context7.prev = _context7.next) {
|
|
18999
18977
|
case 0:
|
|
19000
18978
|
_restartSendingRtpToMantisCalled = true;
|
|
19001
|
-
|
|
18979
|
+
_context7.next = 3;
|
|
19002
18980
|
return getMantisPeerConnection();
|
|
19003
18981
|
case 3:
|
|
19004
|
-
peerConnection =
|
|
18982
|
+
peerConnection = _context7.sent;
|
|
19005
18983
|
if (!peerConnection) {
|
|
19006
|
-
|
|
18984
|
+
_context7.next = 22;
|
|
19007
18985
|
break;
|
|
19008
18986
|
}
|
|
19009
18987
|
// Audio is unmuted and video is set to recvonly
|
|
19010
18988
|
amrAudioTrackProcessor.unmuteAudioInPeerConnection(webRTCStream, peerConnection);
|
|
19011
|
-
|
|
18989
|
+
_context7.next = 8;
|
|
19012
18990
|
return peerConnection.changeMediaDirectionToRecvOnly();
|
|
19013
18991
|
case 8:
|
|
19014
18992
|
if (_keepSendingRtcpToMantisTimeout) {
|
|
@@ -19016,81 +18994,81 @@ function PublisherFactory(_ref) {
|
|
|
19016
18994
|
}
|
|
19017
18995
|
_this.trigger('sourceStreamIdChanged', 'MANTIS');
|
|
19018
18996
|
if (!properties.publisherAudioFallbackEnabled) {
|
|
19019
|
-
|
|
18997
|
+
_context7.next = 22;
|
|
19020
18998
|
break;
|
|
19021
18999
|
}
|
|
19022
|
-
|
|
19023
|
-
|
|
19000
|
+
_context7.t0 = peerConnection;
|
|
19001
|
+
_context7.next = 14;
|
|
19024
19002
|
return getP2pPeerConnection();
|
|
19025
19003
|
case 14:
|
|
19026
|
-
|
|
19027
|
-
if (!(
|
|
19028
|
-
|
|
19004
|
+
_context7.t1 = _yield$getP2pPeerConn2 = _context7.sent;
|
|
19005
|
+
if (!(_context7.t1 == null)) {
|
|
19006
|
+
_context7.next = 19;
|
|
19029
19007
|
break;
|
|
19030
19008
|
}
|
|
19031
|
-
|
|
19032
|
-
|
|
19009
|
+
_context7.t2 = void 0;
|
|
19010
|
+
_context7.next = 20;
|
|
19033
19011
|
break;
|
|
19034
19012
|
case 19:
|
|
19035
|
-
|
|
19013
|
+
_context7.t2 = _yield$getP2pPeerConn2.getAudioFallbackState();
|
|
19036
19014
|
case 20:
|
|
19037
|
-
|
|
19038
|
-
|
|
19015
|
+
_context7.t3 = _context7.t2;
|
|
19016
|
+
_context7.t0.enableCongestionLevelEstimation.call(_context7.t0, _context7.t3);
|
|
19039
19017
|
case 22:
|
|
19040
19018
|
case "end":
|
|
19041
|
-
return
|
|
19019
|
+
return _context7.stop();
|
|
19042
19020
|
}
|
|
19043
|
-
},
|
|
19021
|
+
}, _callee7);
|
|
19044
19022
|
}));
|
|
19045
19023
|
return function _restartSendingRtpToMantis() {
|
|
19046
|
-
return
|
|
19024
|
+
return _ref17.apply(this, arguments);
|
|
19047
19025
|
};
|
|
19048
19026
|
}();
|
|
19049
19027
|
const _keepSendingRtcpToMantis = /*#__PURE__*/function () {
|
|
19050
|
-
var
|
|
19028
|
+
var _ref18 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee9() {
|
|
19051
19029
|
var peerConnection;
|
|
19052
|
-
return _regenerator.default.wrap(function
|
|
19053
|
-
while (1) switch (
|
|
19030
|
+
return _regenerator.default.wrap(function _callee9$(_context9) {
|
|
19031
|
+
while (1) switch (_context9.prev = _context9.next) {
|
|
19054
19032
|
case 0:
|
|
19055
|
-
|
|
19033
|
+
_context9.next = 2;
|
|
19056
19034
|
return getMantisPeerConnection();
|
|
19057
19035
|
case 2:
|
|
19058
|
-
peerConnection =
|
|
19036
|
+
peerConnection = _context9.sent;
|
|
19059
19037
|
if (peerConnection) {
|
|
19060
|
-
_keepSendingRtcpToMantisTimeout = setTimeout( /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
19061
|
-
return _regenerator.default.wrap(function
|
|
19062
|
-
while (1) switch (
|
|
19038
|
+
_keepSendingRtcpToMantisTimeout = setTimeout( /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee8() {
|
|
19039
|
+
return _regenerator.default.wrap(function _callee8$(_context8) {
|
|
19040
|
+
while (1) switch (_context8.prev = _context8.next) {
|
|
19063
19041
|
case 0:
|
|
19064
19042
|
if (!(activeSourceStreamId === 'P2P')) {
|
|
19065
|
-
|
|
19043
|
+
_context8.next = 9;
|
|
19066
19044
|
break;
|
|
19067
19045
|
}
|
|
19068
|
-
|
|
19046
|
+
_context8.next = 3;
|
|
19069
19047
|
return peerConnection.changeMediaDirectionToRecvOnly();
|
|
19070
19048
|
case 3:
|
|
19071
|
-
|
|
19049
|
+
_context8.next = 5;
|
|
19072
19050
|
return (0, _promiseDelay.default)(KEEP_SENDING_MEDIA_TO_KEEP_ALIVE);
|
|
19073
19051
|
case 5:
|
|
19074
|
-
|
|
19052
|
+
_context8.next = 7;
|
|
19075
19053
|
return peerConnection.changeMediaDirectionToInactive();
|
|
19076
19054
|
case 7:
|
|
19077
|
-
|
|
19055
|
+
_context8.next = 9;
|
|
19078
19056
|
return _keepSendingRtcpToMantis();
|
|
19079
19057
|
case 9:
|
|
19080
19058
|
case "end":
|
|
19081
|
-
return
|
|
19059
|
+
return _context8.stop();
|
|
19082
19060
|
}
|
|
19083
|
-
},
|
|
19061
|
+
}, _callee8);
|
|
19084
19062
|
})), KEEP_SENDING_RTCP_DELAY);
|
|
19085
19063
|
}
|
|
19086
19064
|
case 4:
|
|
19087
19065
|
case "end":
|
|
19088
|
-
return
|
|
19066
|
+
return _context9.stop();
|
|
19089
19067
|
}
|
|
19090
|
-
},
|
|
19068
|
+
}, _callee9);
|
|
19091
19069
|
}));
|
|
19092
19070
|
return function _keepSendingRtcpToMantis() {
|
|
19093
|
-
return
|
|
19071
|
+
return _ref18.apply(this, arguments);
|
|
19094
19072
|
};
|
|
19095
19073
|
}();
|
|
19096
19074
|
const _transitionRoutedToRelayed = () => {
|
|
@@ -19126,55 +19104,55 @@ function PublisherFactory(_ref) {
|
|
|
19126
19104
|
});
|
|
19127
19105
|
};
|
|
19128
19106
|
const _transitionRelayedToRouted = /*#__PURE__*/function () {
|
|
19129
|
-
var
|
|
19130
|
-
return _regenerator.default.wrap(function
|
|
19131
|
-
while (1) switch (
|
|
19107
|
+
var _ref20 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee10() {
|
|
19108
|
+
return _regenerator.default.wrap(function _callee10$(_context10) {
|
|
19109
|
+
while (1) switch (_context10.prev = _context10.next) {
|
|
19132
19110
|
case 0:
|
|
19133
19111
|
if (!(activeSourceStreamId !== 'P2P')) {
|
|
19134
|
-
|
|
19112
|
+
_context10.next = 2;
|
|
19135
19113
|
break;
|
|
19136
19114
|
}
|
|
19137
|
-
return
|
|
19115
|
+
return _context10.abrupt("return");
|
|
19138
19116
|
case 2:
|
|
19139
19117
|
logRelayedToRoutedTransition('Attempt');
|
|
19140
19118
|
if (_this.session) {
|
|
19141
|
-
|
|
19119
|
+
_context10.next = 6;
|
|
19142
19120
|
break;
|
|
19143
19121
|
}
|
|
19144
19122
|
logRelayedToRoutedTransition('Failure', {
|
|
19145
19123
|
reason: 'Not connected to the session.'
|
|
19146
19124
|
});
|
|
19147
|
-
return
|
|
19125
|
+
return _context10.abrupt("return");
|
|
19148
19126
|
case 6:
|
|
19149
19127
|
if (_this.streamId) {
|
|
19150
|
-
|
|
19128
|
+
_context10.next = 9;
|
|
19151
19129
|
break;
|
|
19152
19130
|
}
|
|
19153
19131
|
logRelayedToRoutedTransition('Failure', {
|
|
19154
19132
|
reason: 'No streamId available'
|
|
19155
19133
|
});
|
|
19156
|
-
return
|
|
19134
|
+
return _context10.abrupt("return");
|
|
19157
19135
|
case 9:
|
|
19158
|
-
|
|
19136
|
+
_context10.next = 11;
|
|
19159
19137
|
return _restartSendingRtpToMantis();
|
|
19160
19138
|
case 11:
|
|
19161
19139
|
_this.session._.streamDestroy(_this.streamId, 'P2P');
|
|
19162
|
-
|
|
19163
|
-
|
|
19140
|
+
_context10.t0 = _this;
|
|
19141
|
+
_context10.next = 15;
|
|
19164
19142
|
return getP2pPeerConnection();
|
|
19165
19143
|
case 15:
|
|
19166
|
-
|
|
19167
|
-
|
|
19144
|
+
_context10.t1 = _context10.sent;
|
|
19145
|
+
_context10.t0._removePeerConnection.call(_context10.t0, _context10.t1);
|
|
19168
19146
|
logRelayedToRoutedTransition('Success');
|
|
19169
19147
|
_this.trigger('streamDestroyForP2PComplete');
|
|
19170
19148
|
case 19:
|
|
19171
19149
|
case "end":
|
|
19172
|
-
return
|
|
19150
|
+
return _context10.stop();
|
|
19173
19151
|
}
|
|
19174
|
-
},
|
|
19152
|
+
}, _callee10);
|
|
19175
19153
|
}));
|
|
19176
19154
|
return function _transitionRelayedToRouted() {
|
|
19177
|
-
return
|
|
19155
|
+
return _ref20.apply(this, arguments);
|
|
19178
19156
|
};
|
|
19179
19157
|
}();
|
|
19180
19158
|
this.publish = targetElement => {
|
|
@@ -19257,11 +19235,11 @@ function PublisherFactory(_ref) {
|
|
|
19257
19235
|
this.dispatchEvent(event);
|
|
19258
19236
|
});
|
|
19259
19237
|
getUserMedia().catch(userMediaError).then( /*#__PURE__*/function () {
|
|
19260
|
-
var
|
|
19238
|
+
var _ref21 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee11(stream) {
|
|
19261
19239
|
var _audioDevices, _videoDevices;
|
|
19262
19240
|
var hasVideoFilter, hasAudioVideoDevices;
|
|
19263
|
-
return _regenerator.default.wrap(function
|
|
19264
|
-
while (1) switch (
|
|
19241
|
+
return _regenerator.default.wrap(function _callee11$(_context11) {
|
|
19242
|
+
while (1) switch (_context11.prev = _context11.next) {
|
|
19265
19243
|
case 0:
|
|
19266
19244
|
// this comes from deviceHelpers.shouldAskForDevices in a round-about way
|
|
19267
19245
|
audioDevices = processedOptions.audioDevices;
|
|
@@ -19272,7 +19250,7 @@ function PublisherFactory(_ref) {
|
|
|
19272
19250
|
removeDeviceChangeListener = addAudioInputDevicesChangeListener(_this);
|
|
19273
19251
|
}
|
|
19274
19252
|
hasAudioVideoDevices = ((_audioDevices = audioDevices) == null ? void 0 : _audioDevices.length) > 0 || ((_videoDevices = videoDevices) == null ? void 0 : _videoDevices.length) > 0;
|
|
19275
|
-
|
|
19253
|
+
_context11.next = 7;
|
|
19276
19254
|
return (0, _permissionListener.default)(hasAudioVideoDevices).then(listener => {
|
|
19277
19255
|
listener.on('accessDenied', device => {
|
|
19278
19256
|
_this.accessAllowed = false;
|
|
@@ -19285,7 +19263,7 @@ function PublisherFactory(_ref) {
|
|
|
19285
19263
|
// else the wrong device will be returned/nonsensical
|
|
19286
19264
|
currentDeviceId = (0, _getDeviceIdFromStream.default)(stream, videoDevices);
|
|
19287
19265
|
}
|
|
19288
|
-
|
|
19266
|
+
_context11.next = 10;
|
|
19289
19267
|
return onStreamAvailable(stream);
|
|
19290
19268
|
case 10:
|
|
19291
19269
|
if (!properties.publishVideo) {
|
|
@@ -19300,7 +19278,7 @@ function PublisherFactory(_ref) {
|
|
|
19300
19278
|
setCurrentTrackDeviceId(currentDeviceId);
|
|
19301
19279
|
}
|
|
19302
19280
|
}
|
|
19303
|
-
return
|
|
19281
|
+
return _context11.abrupt("return", bindVideo().catch(error => {
|
|
19304
19282
|
if (error instanceof _cancel.CancellationError) {
|
|
19305
19283
|
// If we get a CancellationError, it means something newer tried
|
|
19306
19284
|
// to bindVideo before the old one succeeded, perhaps they called
|
|
@@ -19321,118 +19299,118 @@ function PublisherFactory(_ref) {
|
|
|
19321
19299
|
}));
|
|
19322
19300
|
case 13:
|
|
19323
19301
|
case "end":
|
|
19324
|
-
return
|
|
19302
|
+
return _context11.stop();
|
|
19325
19303
|
}
|
|
19326
|
-
},
|
|
19304
|
+
}, _callee11);
|
|
19327
19305
|
}));
|
|
19328
19306
|
return function (_x6) {
|
|
19329
|
-
return
|
|
19307
|
+
return _ref21.apply(this, arguments);
|
|
19330
19308
|
};
|
|
19331
19309
|
}());
|
|
19332
19310
|
});
|
|
19333
19311
|
return this;
|
|
19334
19312
|
};
|
|
19335
|
-
this._getVideoSenders = /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
19336
|
-
return _regenerator.default.wrap(function
|
|
19337
|
-
while (1) switch (
|
|
19313
|
+
this._getVideoSenders = /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee12() {
|
|
19314
|
+
return _regenerator.default.wrap(function _callee12$(_context12) {
|
|
19315
|
+
while (1) switch (_context12.prev = _context12.next) {
|
|
19338
19316
|
case 0:
|
|
19339
|
-
return
|
|
19340
|
-
let kind =
|
|
19317
|
+
return _context12.abrupt("return", getAllPeerConnections().then(peerConnections => peerConnections[0].getSenders().filter(_ref23 => {
|
|
19318
|
+
let kind = _ref23.track.kind;
|
|
19341
19319
|
return kind === 'video';
|
|
19342
19320
|
})));
|
|
19343
19321
|
case 1:
|
|
19344
19322
|
case "end":
|
|
19345
|
-
return
|
|
19323
|
+
return _context12.stop();
|
|
19346
19324
|
}
|
|
19347
|
-
},
|
|
19325
|
+
}, _callee12);
|
|
19348
19326
|
}));
|
|
19349
19327
|
this._setScalableValues = /*#__PURE__*/function () {
|
|
19350
|
-
var
|
|
19328
|
+
var _ref24 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee13(scalableParam, scalableValues) {
|
|
19351
19329
|
var senders, sender, sendParameters;
|
|
19352
|
-
return _regenerator.default.wrap(function
|
|
19353
|
-
while (1) switch (
|
|
19330
|
+
return _regenerator.default.wrap(function _callee13$(_context13) {
|
|
19331
|
+
while (1) switch (_context13.prev = _context13.next) {
|
|
19354
19332
|
case 0:
|
|
19355
|
-
|
|
19333
|
+
_context13.next = 2;
|
|
19356
19334
|
return _this._getVideoSenders();
|
|
19357
19335
|
case 2:
|
|
19358
|
-
senders =
|
|
19336
|
+
senders = _context13.sent;
|
|
19359
19337
|
sender = senders[0];
|
|
19360
19338
|
sendParameters = sender.getParameters();
|
|
19361
19339
|
sendParameters.encodings.forEach((encoding, index) => {
|
|
19362
19340
|
encoding[scalableParam] = scalableValues[index]; // eslint-disable-line no-param-reassign
|
|
19363
19341
|
});
|
|
19364
|
-
|
|
19342
|
+
_context13.next = 8;
|
|
19365
19343
|
return sender.setParameters(sendParameters);
|
|
19366
19344
|
case 8:
|
|
19367
19345
|
case "end":
|
|
19368
|
-
return
|
|
19346
|
+
return _context13.stop();
|
|
19369
19347
|
}
|
|
19370
|
-
},
|
|
19348
|
+
}, _callee13);
|
|
19371
19349
|
}));
|
|
19372
19350
|
return function (_x7, _x8) {
|
|
19373
|
-
return
|
|
19351
|
+
return _ref24.apply(this, arguments);
|
|
19374
19352
|
};
|
|
19375
19353
|
}();
|
|
19376
19354
|
this._setScalabilityMode = /*#__PURE__*/function () {
|
|
19377
|
-
var
|
|
19378
|
-
return _regenerator.default.wrap(function
|
|
19379
|
-
while (1) switch (
|
|
19355
|
+
var _ref25 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee14(scalabilityMode) {
|
|
19356
|
+
return _regenerator.default.wrap(function _callee14$(_context14) {
|
|
19357
|
+
while (1) switch (_context14.prev = _context14.next) {
|
|
19380
19358
|
case 0:
|
|
19381
|
-
return
|
|
19359
|
+
return _context14.abrupt("return", setScalabilityMode(scalabilityMode, _this));
|
|
19382
19360
|
case 1:
|
|
19383
19361
|
case "end":
|
|
19384
|
-
return
|
|
19362
|
+
return _context14.stop();
|
|
19385
19363
|
}
|
|
19386
|
-
},
|
|
19364
|
+
}, _callee14);
|
|
19387
19365
|
}));
|
|
19388
19366
|
return function (_x9) {
|
|
19389
|
-
return
|
|
19367
|
+
return _ref25.apply(this, arguments);
|
|
19390
19368
|
};
|
|
19391
19369
|
}();
|
|
19392
19370
|
this._setScalableFramerates = /*#__PURE__*/function () {
|
|
19393
|
-
var
|
|
19371
|
+
var _ref26 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee15(frameRates) {
|
|
19394
19372
|
var framerateValues;
|
|
19395
|
-
return _regenerator.default.wrap(function
|
|
19396
|
-
while (1) switch (
|
|
19373
|
+
return _regenerator.default.wrap(function _callee15$(_context15) {
|
|
19374
|
+
while (1) switch (_context15.prev = _context15.next) {
|
|
19397
19375
|
case 0:
|
|
19398
19376
|
framerateValues = normalizeScalableValues(frameRates);
|
|
19399
19377
|
if (!(framerateValues && areValidFramerates(framerateValues))) {
|
|
19400
|
-
|
|
19378
|
+
_context15.next = 4;
|
|
19401
19379
|
break;
|
|
19402
19380
|
}
|
|
19403
|
-
|
|
19381
|
+
_context15.next = 4;
|
|
19404
19382
|
return _this._setScalableValues('maxFramerate', framerateValues);
|
|
19405
19383
|
case 4:
|
|
19406
19384
|
case "end":
|
|
19407
|
-
return
|
|
19385
|
+
return _context15.stop();
|
|
19408
19386
|
}
|
|
19409
|
-
},
|
|
19387
|
+
}, _callee15);
|
|
19410
19388
|
}));
|
|
19411
19389
|
return function (_x10) {
|
|
19412
|
-
return
|
|
19390
|
+
return _ref26.apply(this, arguments);
|
|
19413
19391
|
};
|
|
19414
19392
|
}();
|
|
19415
19393
|
this._setScalableVideoLayers = /*#__PURE__*/function () {
|
|
19416
|
-
var
|
|
19394
|
+
var _ref27 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee16(videoLayers) {
|
|
19417
19395
|
var videoLayerValues;
|
|
19418
|
-
return _regenerator.default.wrap(function
|
|
19419
|
-
while (1) switch (
|
|
19396
|
+
return _regenerator.default.wrap(function _callee16$(_context16) {
|
|
19397
|
+
while (1) switch (_context16.prev = _context16.next) {
|
|
19420
19398
|
case 0:
|
|
19421
19399
|
videoLayerValues = normalizeScalableValues(videoLayers);
|
|
19422
19400
|
if (!(videoLayerValues && areValidResolutionScales(videoLayerValues))) {
|
|
19423
|
-
|
|
19401
|
+
_context16.next = 4;
|
|
19424
19402
|
break;
|
|
19425
19403
|
}
|
|
19426
|
-
|
|
19404
|
+
_context16.next = 4;
|
|
19427
19405
|
return _this._setScalableValues('scaleResolutionDownBy', videoLayerValues);
|
|
19428
19406
|
case 4:
|
|
19429
19407
|
case "end":
|
|
19430
|
-
return
|
|
19408
|
+
return _context16.stop();
|
|
19431
19409
|
}
|
|
19432
|
-
},
|
|
19410
|
+
}, _callee16);
|
|
19433
19411
|
}));
|
|
19434
19412
|
return function (_x11) {
|
|
19435
|
-
return
|
|
19413
|
+
return _ref27.apply(this, arguments);
|
|
19436
19414
|
};
|
|
19437
19415
|
}();
|
|
19438
19416
|
const areValidFramerates = framerates => {
|
|
@@ -19573,20 +19551,20 @@ function PublisherFactory(_ref) {
|
|
|
19573
19551
|
const updateVideo = () => {
|
|
19574
19552
|
const shouldSendVideo = haveWorkingTracks('video') && properties.publishVideo;
|
|
19575
19553
|
if (_env.default.name === 'Chrome' && _env.default.version >= 69) {
|
|
19576
|
-
(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
19554
|
+
(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee17() {
|
|
19577
19555
|
var executionSentinel, peerConnections;
|
|
19578
|
-
return _regenerator.default.wrap(function
|
|
19579
|
-
while (1) switch (
|
|
19556
|
+
return _regenerator.default.wrap(function _callee17$(_context17) {
|
|
19557
|
+
while (1) switch (_context17.prev = _context17.next) {
|
|
19580
19558
|
case 0:
|
|
19581
19559
|
if (updateVideoSenderParametersSentinel) {
|
|
19582
19560
|
updateVideoSenderParametersSentinel.cancel();
|
|
19583
19561
|
}
|
|
19584
19562
|
updateVideoSenderParametersSentinel = new _cancel.default();
|
|
19585
19563
|
executionSentinel = updateVideoSenderParametersSentinel;
|
|
19586
|
-
|
|
19564
|
+
_context17.next = 5;
|
|
19587
19565
|
return getAllPeerConnections();
|
|
19588
19566
|
case 5:
|
|
19589
|
-
peerConnections =
|
|
19567
|
+
peerConnections = _context17.sent;
|
|
19590
19568
|
if (!executionSentinel.isCanceled()) {
|
|
19591
19569
|
// only proceed if we weren't canceled during the async operation above
|
|
19592
19570
|
peerConnections.forEach(peerConnection => {
|
|
@@ -19595,9 +19573,9 @@ function PublisherFactory(_ref) {
|
|
|
19595
19573
|
}
|
|
19596
19574
|
case 7:
|
|
19597
19575
|
case "end":
|
|
19598
|
-
return
|
|
19576
|
+
return _context17.stop();
|
|
19599
19577
|
}
|
|
19600
|
-
},
|
|
19578
|
+
}, _callee17);
|
|
19601
19579
|
}))();
|
|
19602
19580
|
}
|
|
19603
19581
|
if (isCustomVideoTrack && mediaProcessor) {
|
|
@@ -19624,25 +19602,25 @@ function PublisherFactory(_ref) {
|
|
|
19624
19602
|
refreshAudioVideoUI();
|
|
19625
19603
|
};
|
|
19626
19604
|
const destroyMediaProcessor = /*#__PURE__*/function () {
|
|
19627
|
-
var
|
|
19605
|
+
var _ref29 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee18() {
|
|
19628
19606
|
var videoTrack;
|
|
19629
|
-
return _regenerator.default.wrap(function
|
|
19630
|
-
while (1) switch (
|
|
19607
|
+
return _regenerator.default.wrap(function _callee18$(_context18) {
|
|
19608
|
+
while (1) switch (_context18.prev = _context18.next) {
|
|
19631
19609
|
case 0:
|
|
19632
19610
|
// We need to cache this since calling the method below will
|
|
19633
19611
|
// clear this value.
|
|
19634
19612
|
videoTrack = mediaProcessor.getOriginalVideoTrack(); // Note: this needs to be called before the `stop` method below. Reversing
|
|
19635
19613
|
// the order may cause race conditions with the MP worker.
|
|
19636
|
-
|
|
19637
|
-
|
|
19614
|
+
_context18.prev = 1;
|
|
19615
|
+
_context18.next = 4;
|
|
19638
19616
|
return mediaProcessor.destroy();
|
|
19639
19617
|
case 4:
|
|
19640
|
-
|
|
19618
|
+
_context18.next = 9;
|
|
19641
19619
|
break;
|
|
19642
19620
|
case 6:
|
|
19643
|
-
|
|
19644
|
-
|
|
19645
|
-
logging.warn(`Error cleaning up mediaProcessor: ${
|
|
19621
|
+
_context18.prev = 6;
|
|
19622
|
+
_context18.t0 = _context18["catch"](1);
|
|
19623
|
+
logging.warn(`Error cleaning up mediaProcessor: ${_context18.t0}`);
|
|
19646
19624
|
case 9:
|
|
19647
19625
|
// Since no filtering is being applied, we perform some cleanup. We
|
|
19648
19626
|
// stop the original video track here since it's not being used
|
|
@@ -19650,12 +19628,12 @@ function PublisherFactory(_ref) {
|
|
|
19650
19628
|
videoTrack.stop();
|
|
19651
19629
|
case 10:
|
|
19652
19630
|
case "end":
|
|
19653
|
-
return
|
|
19631
|
+
return _context18.stop();
|
|
19654
19632
|
}
|
|
19655
|
-
},
|
|
19633
|
+
}, _callee18, null, [[1, 6]]);
|
|
19656
19634
|
}));
|
|
19657
19635
|
return function destroyMediaProcessor() {
|
|
19658
|
-
return
|
|
19636
|
+
return _ref29.apply(this, arguments);
|
|
19659
19637
|
};
|
|
19660
19638
|
}();
|
|
19661
19639
|
const hasTrackFromDevice = deviceId =>
|
|
@@ -19683,25 +19661,25 @@ function PublisherFactory(_ref) {
|
|
|
19683
19661
|
let currentVideoFilter;
|
|
19684
19662
|
let currentAudioFilter;
|
|
19685
19663
|
this._toggleVideo = (0, _blockCallsUntilComplete.default)( /*#__PURE__*/function () {
|
|
19686
|
-
var
|
|
19664
|
+
var _ref30 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee19(shouldHaveVideo, videoDimensions) {
|
|
19687
19665
|
var _vidDevices$find;
|
|
19688
19666
|
var vidDevices, oldTrack, oldTrackDeviceId, newTrack, videoFilter, originalVideoTrack, _originalVideoTrack;
|
|
19689
|
-
return _regenerator.default.wrap(function
|
|
19690
|
-
while (1) switch (
|
|
19667
|
+
return _regenerator.default.wrap(function _callee19$(_context19) {
|
|
19668
|
+
while (1) switch (_context19.prev = _context19.next) {
|
|
19691
19669
|
case 0:
|
|
19692
19670
|
if (!(isScreenSharing || isCustomVideoTrack || isTrackManuallyStopped)) {
|
|
19693
|
-
|
|
19671
|
+
_context19.next = 2;
|
|
19694
19672
|
break;
|
|
19695
19673
|
}
|
|
19696
|
-
return
|
|
19674
|
+
return _context19.abrupt("return");
|
|
19697
19675
|
case 2:
|
|
19698
|
-
|
|
19676
|
+
_context19.next = 4;
|
|
19699
19677
|
return getVideoDevices();
|
|
19700
19678
|
case 4:
|
|
19701
|
-
vidDevices =
|
|
19679
|
+
vidDevices = _context19.sent;
|
|
19702
19680
|
oldTrack = getCurrentTrack();
|
|
19703
19681
|
if (oldTrack) {
|
|
19704
|
-
|
|
19682
|
+
_context19.next = 8;
|
|
19705
19683
|
break;
|
|
19706
19684
|
}
|
|
19707
19685
|
throw otError(Errors.NOT_SUPPORTED, new Error('Publisher._toggleVideo cannot toggleVideo when you have no video source.'));
|
|
@@ -19709,17 +19687,17 @@ function PublisherFactory(_ref) {
|
|
|
19709
19687
|
// oldTrackDeviceId is undefined when it comes from a canvasTracks, i.e.: it is currently muted
|
|
19710
19688
|
oldTrackDeviceId = (_vidDevices$find = vidDevices.find(device => device.label === oldTrack.label)) == null ? void 0 : _vidDevices$find.deviceId;
|
|
19711
19689
|
if (isNewTrackNeeded(shouldHaveVideo, oldTrackDeviceId)) {
|
|
19712
|
-
|
|
19690
|
+
_context19.next = 11;
|
|
19713
19691
|
break;
|
|
19714
19692
|
}
|
|
19715
|
-
return
|
|
19693
|
+
return _context19.abrupt("return");
|
|
19716
19694
|
case 11:
|
|
19717
19695
|
if (!(oldTrack.readyState === 'ended')) {
|
|
19718
|
-
|
|
19696
|
+
_context19.next = 14;
|
|
19719
19697
|
break;
|
|
19720
19698
|
}
|
|
19721
19699
|
isTrackManuallyStopped = true;
|
|
19722
|
-
return
|
|
19700
|
+
return _context19.abrupt("return");
|
|
19723
19701
|
case 14:
|
|
19724
19702
|
if (shouldHaveVideo && OTHelpers.env.isAndroid && OTHelpers.env.isChrome) {
|
|
19725
19703
|
// On Chrome on Android you need to stop the previous video track OPENTOK-37206
|
|
@@ -19728,17 +19706,17 @@ function PublisherFactory(_ref) {
|
|
|
19728
19706
|
}
|
|
19729
19707
|
}
|
|
19730
19708
|
if (shouldHaveVideo) {
|
|
19731
|
-
|
|
19709
|
+
_context19.next = 31;
|
|
19732
19710
|
break;
|
|
19733
19711
|
}
|
|
19734
|
-
|
|
19712
|
+
_context19.prev = 16;
|
|
19735
19713
|
newTrack = (0, _createCanvasVideoTrack.default)(videoDimensions);
|
|
19736
|
-
|
|
19714
|
+
_context19.next = 23;
|
|
19737
19715
|
break;
|
|
19738
19716
|
case 20:
|
|
19739
|
-
|
|
19740
|
-
|
|
19741
|
-
return
|
|
19717
|
+
_context19.prev = 20;
|
|
19718
|
+
_context19.t0 = _context19["catch"](16);
|
|
19719
|
+
return _context19.abrupt("return");
|
|
19742
19720
|
case 23:
|
|
19743
19721
|
if (oldTrackDeviceId) {
|
|
19744
19722
|
// store the current deviceId to reacquire the video later
|
|
@@ -19747,13 +19725,13 @@ function PublisherFactory(_ref) {
|
|
|
19747
19725
|
setCurrentTrackDeviceId(newTrack.label);
|
|
19748
19726
|
videoFilter = mediaProcessor.getVideoFilter();
|
|
19749
19727
|
if (!videoFilter) {
|
|
19750
|
-
|
|
19728
|
+
_context19.next = 30;
|
|
19751
19729
|
break;
|
|
19752
19730
|
}
|
|
19753
19731
|
// Save the current video filter because we want to make sure it
|
|
19754
19732
|
// gets enabled when the user publishes video again
|
|
19755
19733
|
currentVideoFilter = videoFilter;
|
|
19756
|
-
|
|
19734
|
+
_context19.next = 30;
|
|
19757
19735
|
return destroyMediaProcessor();
|
|
19758
19736
|
case 30:
|
|
19759
19737
|
if (_videoMediaProcessorConnector) {
|
|
@@ -19762,102 +19740,102 @@ function PublisherFactory(_ref) {
|
|
|
19762
19740
|
}
|
|
19763
19741
|
case 31:
|
|
19764
19742
|
if (!(currentDeviceId && vidDevices.findIndex(device => device.deviceId === currentDeviceId) === -1)) {
|
|
19765
|
-
|
|
19743
|
+
_context19.next = 33;
|
|
19766
19744
|
break;
|
|
19767
19745
|
}
|
|
19768
19746
|
throw otError(Errors.NO_DEVICES_FOUND, new Error('Previous device no longer available - deviceId not found'));
|
|
19769
19747
|
case 33:
|
|
19770
19748
|
privateEvents.emit('streamDestroy');
|
|
19771
19749
|
if (!shouldHaveVideo) {
|
|
19772
|
-
|
|
19750
|
+
_context19.next = 64;
|
|
19773
19751
|
break;
|
|
19774
19752
|
}
|
|
19775
19753
|
if (!hasTrackFromDevice(currentDeviceId)) {
|
|
19776
|
-
|
|
19754
|
+
_context19.next = 37;
|
|
19777
19755
|
break;
|
|
19778
19756
|
}
|
|
19779
|
-
return
|
|
19757
|
+
return _context19.abrupt("return");
|
|
19780
19758
|
case 37:
|
|
19781
|
-
|
|
19782
|
-
|
|
19759
|
+
_context19.prev = 37;
|
|
19760
|
+
_context19.next = 40;
|
|
19783
19761
|
return getTrackFromDeviceId(currentDeviceId);
|
|
19784
19762
|
case 40:
|
|
19785
|
-
newTrack =
|
|
19786
|
-
|
|
19763
|
+
newTrack = _context19.sent;
|
|
19764
|
+
_context19.next = 47;
|
|
19787
19765
|
break;
|
|
19788
19766
|
case 43:
|
|
19789
|
-
|
|
19790
|
-
|
|
19791
|
-
logging.error(`Error getting new track for current device(${currentDeviceId}): ${
|
|
19792
|
-
throw
|
|
19767
|
+
_context19.prev = 43;
|
|
19768
|
+
_context19.t1 = _context19["catch"](37);
|
|
19769
|
+
logging.error(`Error getting new track for current device(${currentDeviceId}): ${_context19.t1}`);
|
|
19770
|
+
throw _context19.t1;
|
|
19793
19771
|
case 47:
|
|
19794
19772
|
if (newTrack) {
|
|
19795
|
-
|
|
19773
|
+
_context19.next = 50;
|
|
19796
19774
|
break;
|
|
19797
19775
|
}
|
|
19798
19776
|
logging.error('Failed to enable video. It was not possible to get a new track from the camera');
|
|
19799
|
-
return
|
|
19777
|
+
return _context19.abrupt("return");
|
|
19800
19778
|
case 50:
|
|
19801
19779
|
if (!currentVideoFilter) {
|
|
19802
|
-
|
|
19780
|
+
_context19.next = 59;
|
|
19803
19781
|
break;
|
|
19804
19782
|
}
|
|
19805
19783
|
originalVideoTrack = mediaProcessor.getOriginalVideoTrack();
|
|
19806
|
-
|
|
19784
|
+
_context19.next = 54;
|
|
19807
19785
|
return mediaProcessor.setVideoFilter(currentVideoFilter);
|
|
19808
19786
|
case 54:
|
|
19809
|
-
|
|
19787
|
+
_context19.next = 56;
|
|
19810
19788
|
return mediaProcessor.setMediaStream(webRTCStream);
|
|
19811
19789
|
case 56:
|
|
19812
|
-
|
|
19790
|
+
_context19.next = 58;
|
|
19813
19791
|
return mediaProcessor.setVideoTrack(newTrack);
|
|
19814
19792
|
case 58:
|
|
19815
|
-
newTrack =
|
|
19793
|
+
newTrack = _context19.sent;
|
|
19816
19794
|
case 59:
|
|
19817
19795
|
if (!_videoMediaProcessorConnector) {
|
|
19818
|
-
|
|
19796
|
+
_context19.next = 64;
|
|
19819
19797
|
break;
|
|
19820
19798
|
}
|
|
19821
19799
|
originalVideoTrack = _videoMediaProcessorConnector.originalTrack;
|
|
19822
|
-
|
|
19800
|
+
_context19.next = 63;
|
|
19823
19801
|
return _videoMediaProcessorConnector.setTrack(newTrack);
|
|
19824
19802
|
case 63:
|
|
19825
|
-
newTrack =
|
|
19803
|
+
newTrack = _context19.sent;
|
|
19826
19804
|
case 64:
|
|
19827
|
-
|
|
19828
|
-
|
|
19805
|
+
_context19.prev = 64;
|
|
19806
|
+
_context19.next = 67;
|
|
19829
19807
|
return replaceTrackAndUpdate(oldTrack, newTrack);
|
|
19830
19808
|
case 67:
|
|
19831
19809
|
// We stop the original track as a final step because whatever effects
|
|
19832
19810
|
// were applied to it should remain in effect until the new track is
|
|
19833
19811
|
// set
|
|
19834
19812
|
(_originalVideoTrack = originalVideoTrack) == null ? void 0 : _originalVideoTrack.stop();
|
|
19835
|
-
|
|
19813
|
+
_context19.next = 73;
|
|
19836
19814
|
break;
|
|
19837
19815
|
case 70:
|
|
19838
|
-
|
|
19839
|
-
|
|
19840
|
-
throw
|
|
19816
|
+
_context19.prev = 70;
|
|
19817
|
+
_context19.t2 = _context19["catch"](64);
|
|
19818
|
+
throw _context19.t2;
|
|
19841
19819
|
case 73:
|
|
19842
19820
|
case "end":
|
|
19843
|
-
return
|
|
19821
|
+
return _context19.stop();
|
|
19844
19822
|
}
|
|
19845
|
-
},
|
|
19823
|
+
}, _callee19, null, [[16, 20], [37, 43], [64, 70]]);
|
|
19846
19824
|
}));
|
|
19847
19825
|
return function (_x12, _x13) {
|
|
19848
|
-
return
|
|
19826
|
+
return _ref30.apply(this, arguments);
|
|
19849
19827
|
};
|
|
19850
19828
|
}());
|
|
19851
19829
|
const resetAudioFallbackStateOnPeerConnection = (0, _cancellation.callWithCancellation)( /*#__PURE__*/function () {
|
|
19852
|
-
var
|
|
19830
|
+
var _ref31 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee20(isActive, cancellation) {
|
|
19853
19831
|
var peerConnections;
|
|
19854
|
-
return _regenerator.default.wrap(function
|
|
19855
|
-
while (1) switch (
|
|
19832
|
+
return _regenerator.default.wrap(function _callee20$(_context20) {
|
|
19833
|
+
while (1) switch (_context20.prev = _context20.next) {
|
|
19856
19834
|
case 0:
|
|
19857
|
-
|
|
19835
|
+
_context20.next = 2;
|
|
19858
19836
|
return getAllPeerConnections();
|
|
19859
19837
|
case 2:
|
|
19860
|
-
peerConnections =
|
|
19838
|
+
peerConnections = _context20.sent;
|
|
19861
19839
|
if (!cancellation.isCanceled()) {
|
|
19862
19840
|
peerConnections.forEach(peerConnection => {
|
|
19863
19841
|
if (isActive) {
|
|
@@ -19869,12 +19847,12 @@ function PublisherFactory(_ref) {
|
|
|
19869
19847
|
}
|
|
19870
19848
|
case 4:
|
|
19871
19849
|
case "end":
|
|
19872
|
-
return
|
|
19850
|
+
return _context20.stop();
|
|
19873
19851
|
}
|
|
19874
|
-
},
|
|
19852
|
+
}, _callee20);
|
|
19875
19853
|
}));
|
|
19876
19854
|
return function (_x14, _x15) {
|
|
19877
|
-
return
|
|
19855
|
+
return _ref31.apply(this, arguments);
|
|
19878
19856
|
};
|
|
19879
19857
|
}());
|
|
19880
19858
|
const resetAudioFallbackState = () => {
|
|
@@ -19884,10 +19862,10 @@ function PublisherFactory(_ref) {
|
|
|
19884
19862
|
(_chromeMixin2 = chromeMixin) == null ? void 0 : _chromeMixin2.videoDisabledIndicator.setWarning(false);
|
|
19885
19863
|
};
|
|
19886
19864
|
const onAudioFallbackActiveVideo = /*#__PURE__*/function () {
|
|
19887
|
-
var
|
|
19865
|
+
var _ref32 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee21(previousState) {
|
|
19888
19866
|
var _chromeMixin3, _chromeMixin4;
|
|
19889
|
-
return _regenerator.default.wrap(function
|
|
19890
|
-
while (1) switch (
|
|
19867
|
+
return _regenerator.default.wrap(function _callee21$(_context21) {
|
|
19868
|
+
while (1) switch (_context21.prev = _context21.next) {
|
|
19891
19869
|
case 0:
|
|
19892
19870
|
(_chromeMixin3 = chromeMixin) == null ? void 0 : _chromeMixin3.videoDisabledIndicator.disableVideo(false);
|
|
19893
19871
|
(_chromeMixin4 = chromeMixin) == null ? void 0 : _chromeMixin4.videoDisabledIndicator.setWarning(false);
|
|
@@ -19900,12 +19878,12 @@ function PublisherFactory(_ref) {
|
|
|
19900
19878
|
}
|
|
19901
19879
|
case 3:
|
|
19902
19880
|
case "end":
|
|
19903
|
-
return
|
|
19881
|
+
return _context21.stop();
|
|
19904
19882
|
}
|
|
19905
|
-
},
|
|
19883
|
+
}, _callee21);
|
|
19906
19884
|
}));
|
|
19907
19885
|
return function onAudioFallbackActiveVideo(_x16) {
|
|
19908
|
-
return
|
|
19886
|
+
return _ref32.apply(this, arguments);
|
|
19909
19887
|
};
|
|
19910
19888
|
}();
|
|
19911
19889
|
const onAudioFallbackActiveVideoWithWarning = () => {
|
|
@@ -19916,10 +19894,10 @@ function PublisherFactory(_ref) {
|
|
|
19916
19894
|
this.trigger('videoDisableWarning');
|
|
19917
19895
|
};
|
|
19918
19896
|
const onAudioFallbackSuspendedVideo = /*#__PURE__*/function () {
|
|
19919
|
-
var
|
|
19897
|
+
var _ref33 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee22() {
|
|
19920
19898
|
var _chromeMixin6;
|
|
19921
|
-
return _regenerator.default.wrap(function
|
|
19922
|
-
while (1) switch (
|
|
19899
|
+
return _regenerator.default.wrap(function _callee22$(_context22) {
|
|
19900
|
+
while (1) switch (_context22.prev = _context22.next) {
|
|
19923
19901
|
case 0:
|
|
19924
19902
|
if (properties.publishVideo) {
|
|
19925
19903
|
(_chromeMixin6 = chromeMixin) == null ? void 0 : _chromeMixin6.videoDisabledIndicator.disableVideo(true);
|
|
@@ -19929,12 +19907,12 @@ function PublisherFactory(_ref) {
|
|
|
19929
19907
|
});
|
|
19930
19908
|
case 2:
|
|
19931
19909
|
case "end":
|
|
19932
|
-
return
|
|
19910
|
+
return _context22.stop();
|
|
19933
19911
|
}
|
|
19934
|
-
},
|
|
19912
|
+
}, _callee22);
|
|
19935
19913
|
}));
|
|
19936
19914
|
return function onAudioFallbackSuspendedVideo() {
|
|
19937
|
-
return
|
|
19915
|
+
return _ref33.apply(this, arguments);
|
|
19938
19916
|
};
|
|
19939
19917
|
}();
|
|
19940
19918
|
let audioFallbackCoordinator;
|
|
@@ -19947,9 +19925,9 @@ function PublisherFactory(_ref) {
|
|
|
19947
19925
|
[_audioFallbackVideoStates.default.ACTIVE_VIDEO_WITH_WARNING]: onAudioFallbackActiveVideoWithWarning,
|
|
19948
19926
|
[_audioFallbackVideoStates.default.SUSPENDED_VIDEO]: onAudioFallbackSuspendedVideo
|
|
19949
19927
|
};
|
|
19950
|
-
audioFallbackCoordinator.on('stateChange',
|
|
19951
|
-
let previousState =
|
|
19952
|
-
audioFallbackVideoState =
|
|
19928
|
+
audioFallbackCoordinator.on('stateChange', _ref34 => {
|
|
19929
|
+
let previousState = _ref34.previousState,
|
|
19930
|
+
audioFallbackVideoState = _ref34.state;
|
|
19953
19931
|
try {
|
|
19954
19932
|
audioFallbackStateHandlers[audioFallbackVideoState](previousState);
|
|
19955
19933
|
} catch (err) {
|
|
@@ -20038,10 +20016,10 @@ function PublisherFactory(_ref) {
|
|
|
20038
20016
|
return this;
|
|
20039
20017
|
};
|
|
20040
20018
|
this._publishVideo = /*#__PURE__*/function () {
|
|
20041
|
-
var
|
|
20019
|
+
var _ref35 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee23(value) {
|
|
20042
20020
|
var videoDimensions;
|
|
20043
|
-
return _regenerator.default.wrap(function
|
|
20044
|
-
while (1) switch (
|
|
20021
|
+
return _regenerator.default.wrap(function _callee23$(_context23) {
|
|
20022
|
+
while (1) switch (_context23.prev = _context23.next) {
|
|
20045
20023
|
case 0:
|
|
20046
20024
|
// Save videoDimensions to not alter video size between mute states.
|
|
20047
20025
|
videoDimensions = getVideoDimensions();
|
|
@@ -20055,16 +20033,16 @@ function PublisherFactory(_ref) {
|
|
|
20055
20033
|
resetAudioFallbackState();
|
|
20056
20034
|
}
|
|
20057
20035
|
}
|
|
20058
|
-
|
|
20036
|
+
_context23.next = 5;
|
|
20059
20037
|
return Promise.all([_this._toggleVideo(properties.publishVideo, videoDimensions), updateVideo()]);
|
|
20060
20038
|
case 5:
|
|
20061
20039
|
case "end":
|
|
20062
|
-
return
|
|
20040
|
+
return _context23.stop();
|
|
20063
20041
|
}
|
|
20064
|
-
},
|
|
20042
|
+
}, _callee23);
|
|
20065
20043
|
}));
|
|
20066
20044
|
return function (_x17) {
|
|
20067
|
-
return
|
|
20045
|
+
return _ref35.apply(this, arguments);
|
|
20068
20046
|
};
|
|
20069
20047
|
}();
|
|
20070
20048
|
|
|
@@ -20333,42 +20311,42 @@ function PublisherFactory(_ref) {
|
|
|
20333
20311
|
{
|
|
20334
20312
|
let videoIndex = 0;
|
|
20335
20313
|
const cycleVideo = /*#__PURE__*/function () {
|
|
20336
|
-
var
|
|
20314
|
+
var _ref36 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee24() {
|
|
20337
20315
|
var oldTrack, vidDevices, hasOtherVideoDevices, newVideoDevice, deviceId;
|
|
20338
|
-
return _regenerator.default.wrap(function
|
|
20339
|
-
while (1) switch (
|
|
20316
|
+
return _regenerator.default.wrap(function _callee24$(_context24) {
|
|
20317
|
+
while (1) switch (_context24.prev = _context24.next) {
|
|
20340
20318
|
case 0:
|
|
20341
20319
|
if (!(OTHelpers.env.isLegacyEdge || !windowMock.RTCRtpSender || typeof windowMock.RTCRtpSender.prototype.replaceTrack !== 'function')) {
|
|
20342
|
-
|
|
20320
|
+
_context24.next = 2;
|
|
20343
20321
|
break;
|
|
20344
20322
|
}
|
|
20345
20323
|
throw otError(Errors.UNSUPPORTED_BROWSER, new Error('Publisher#cycleVideo is not supported in your browser.'), ExceptionCodes.UNABLE_TO_PUBLISH);
|
|
20346
20324
|
case 2:
|
|
20347
20325
|
if (!(isCustomVideoTrack || isScreenSharing)) {
|
|
20348
|
-
|
|
20326
|
+
_context24.next = 4;
|
|
20349
20327
|
break;
|
|
20350
20328
|
}
|
|
20351
20329
|
throw otError(Errors.NOT_SUPPORTED, new Error('Publisher#cycleVideo: The publisher is not using a camera video source'));
|
|
20352
20330
|
case 4:
|
|
20353
20331
|
oldTrack = getCurrentTrack();
|
|
20354
20332
|
if (oldTrack) {
|
|
20355
|
-
|
|
20333
|
+
_context24.next = 7;
|
|
20356
20334
|
break;
|
|
20357
20335
|
}
|
|
20358
20336
|
throw otError(Errors.NOT_SUPPORTED, new Error('Publisher#cycleVideo cannot cycleVideo when you have no video source.'));
|
|
20359
20337
|
case 7:
|
|
20360
20338
|
videoIndex += 1;
|
|
20361
|
-
|
|
20339
|
+
_context24.next = 10;
|
|
20362
20340
|
return getVideoDevices();
|
|
20363
20341
|
case 10:
|
|
20364
|
-
vidDevices =
|
|
20342
|
+
vidDevices = _context24.sent;
|
|
20365
20343
|
// different devices return the cameras in different orders
|
|
20366
20344
|
hasOtherVideoDevices = vidDevices.filter(device => device.deviceId !== currentDeviceId).length > 0;
|
|
20367
20345
|
if (hasOtherVideoDevices) {
|
|
20368
|
-
|
|
20346
|
+
_context24.next = 14;
|
|
20369
20347
|
break;
|
|
20370
20348
|
}
|
|
20371
|
-
return
|
|
20349
|
+
return _context24.abrupt("return", currentDeviceId);
|
|
20372
20350
|
case 14:
|
|
20373
20351
|
while (vidDevices[videoIndex % vidDevices.length].deviceId === currentDeviceId) {
|
|
20374
20352
|
videoIndex += 1;
|
|
@@ -20376,18 +20354,18 @@ function PublisherFactory(_ref) {
|
|
|
20376
20354
|
privateEvents.emit('streamDestroy');
|
|
20377
20355
|
newVideoDevice = vidDevices[videoIndex % vidDevices.length];
|
|
20378
20356
|
deviceId = newVideoDevice.deviceId;
|
|
20379
|
-
|
|
20357
|
+
_context24.next = 20;
|
|
20380
20358
|
return attemptToSetVideoTrack(deviceId);
|
|
20381
20359
|
case 20:
|
|
20382
|
-
return
|
|
20360
|
+
return _context24.abrupt("return", currentDeviceId);
|
|
20383
20361
|
case 21:
|
|
20384
20362
|
case "end":
|
|
20385
|
-
return
|
|
20363
|
+
return _context24.stop();
|
|
20386
20364
|
}
|
|
20387
|
-
},
|
|
20365
|
+
}, _callee24);
|
|
20388
20366
|
}));
|
|
20389
20367
|
return function cycleVideo() {
|
|
20390
|
-
return
|
|
20368
|
+
return _ref36.apply(this, arguments);
|
|
20391
20369
|
};
|
|
20392
20370
|
}();
|
|
20393
20371
|
|
|
@@ -20432,62 +20410,62 @@ function PublisherFactory(_ref) {
|
|
|
20432
20410
|
*
|
|
20433
20411
|
* @see <a href="#setVideoSource">Publisher.setVideoSource()</a>
|
|
20434
20412
|
*/
|
|
20435
|
-
this.cycleVideo = (0, _blockCallsUntilComplete.default)( /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
20413
|
+
this.cycleVideo = (0, _blockCallsUntilComplete.default)( /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee25() {
|
|
20436
20414
|
var deviceId;
|
|
20437
|
-
return _regenerator.default.wrap(function
|
|
20438
|
-
while (1) switch (
|
|
20415
|
+
return _regenerator.default.wrap(function _callee25$(_context25) {
|
|
20416
|
+
while (1) switch (_context25.prev = _context25.next) {
|
|
20439
20417
|
case 0:
|
|
20440
|
-
|
|
20441
|
-
|
|
20418
|
+
_context25.prev = 0;
|
|
20419
|
+
_context25.next = 3;
|
|
20442
20420
|
return cycleVideo();
|
|
20443
20421
|
case 3:
|
|
20444
|
-
deviceId =
|
|
20445
|
-
|
|
20422
|
+
deviceId = _context25.sent;
|
|
20423
|
+
_context25.next = 10;
|
|
20446
20424
|
break;
|
|
20447
20425
|
case 6:
|
|
20448
|
-
|
|
20449
|
-
|
|
20450
|
-
logging.error(`Publisher#cycleVideo: could not cycle video: ${
|
|
20451
|
-
throw
|
|
20426
|
+
_context25.prev = 6;
|
|
20427
|
+
_context25.t0 = _context25["catch"](0);
|
|
20428
|
+
logging.error(`Publisher#cycleVideo: could not cycle video: ${_context25.t0}`);
|
|
20429
|
+
throw _context25.t0;
|
|
20452
20430
|
case 10:
|
|
20453
|
-
return
|
|
20431
|
+
return _context25.abrupt("return", {
|
|
20454
20432
|
deviceId
|
|
20455
20433
|
});
|
|
20456
20434
|
case 11:
|
|
20457
20435
|
case "end":
|
|
20458
|
-
return
|
|
20436
|
+
return _context25.stop();
|
|
20459
20437
|
}
|
|
20460
|
-
},
|
|
20438
|
+
}, _callee25, null, [[0, 6]]);
|
|
20461
20439
|
})));
|
|
20462
20440
|
}
|
|
20463
20441
|
const replaceTrackAndUpdate = /*#__PURE__*/function () {
|
|
20464
|
-
var
|
|
20442
|
+
var _ref38 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee27(oldTrack, newTrack) {
|
|
20465
20443
|
var pcs, isNewTrackFiltered, video;
|
|
20466
|
-
return _regenerator.default.wrap(function
|
|
20467
|
-
while (1) switch (
|
|
20444
|
+
return _regenerator.default.wrap(function _callee27$(_context27) {
|
|
20445
|
+
while (1) switch (_context27.prev = _context27.next) {
|
|
20468
20446
|
case 0:
|
|
20469
|
-
|
|
20447
|
+
_context27.next = 2;
|
|
20470
20448
|
return getAllPeerConnections();
|
|
20471
20449
|
case 2:
|
|
20472
|
-
pcs =
|
|
20473
|
-
|
|
20450
|
+
pcs = _context27.sent;
|
|
20451
|
+
_context27.next = 5;
|
|
20474
20452
|
return Promise.all(pcs.map( /*#__PURE__*/function () {
|
|
20475
|
-
var
|
|
20476
|
-
return _regenerator.default.wrap(function
|
|
20477
|
-
while (1) switch (
|
|
20453
|
+
var _ref39 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee26(pc) {
|
|
20454
|
+
return _regenerator.default.wrap(function _callee26$(_context26) {
|
|
20455
|
+
while (1) switch (_context26.prev = _context26.next) {
|
|
20478
20456
|
case 0:
|
|
20479
|
-
|
|
20457
|
+
_context26.next = 2;
|
|
20480
20458
|
return pc.findAndReplaceTrack(oldTrack, newTrack);
|
|
20481
20459
|
case 2:
|
|
20482
20460
|
pc.setP2PMaxBitrate();
|
|
20483
20461
|
case 3:
|
|
20484
20462
|
case "end":
|
|
20485
|
-
return
|
|
20463
|
+
return _context26.stop();
|
|
20486
20464
|
}
|
|
20487
|
-
},
|
|
20465
|
+
}, _callee26);
|
|
20488
20466
|
}));
|
|
20489
20467
|
return function (_x20) {
|
|
20490
|
-
return
|
|
20468
|
+
return _ref39.apply(this, arguments);
|
|
20491
20469
|
};
|
|
20492
20470
|
}()));
|
|
20493
20471
|
case 5:
|
|
@@ -20523,20 +20501,20 @@ function PublisherFactory(_ref) {
|
|
|
20523
20501
|
updateVideo();
|
|
20524
20502
|
case 13:
|
|
20525
20503
|
case "end":
|
|
20526
|
-
return
|
|
20504
|
+
return _context27.stop();
|
|
20527
20505
|
}
|
|
20528
|
-
},
|
|
20506
|
+
}, _callee27);
|
|
20529
20507
|
}));
|
|
20530
20508
|
return function replaceTrackAndUpdate(_x18, _x19) {
|
|
20531
|
-
return
|
|
20509
|
+
return _ref38.apply(this, arguments);
|
|
20532
20510
|
};
|
|
20533
20511
|
}();
|
|
20534
20512
|
const getTrackFromDeviceId = /*#__PURE__*/function () {
|
|
20535
|
-
var
|
|
20513
|
+
var _ref40 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee28(deviceId) {
|
|
20536
20514
|
var _newVideoStream;
|
|
20537
20515
|
var oldTrackDeviceId, newOptions, _processedOptions2, getUserMediaHelper, newVideoStream, _newVideoStream$getVi, track;
|
|
20538
|
-
return _regenerator.default.wrap(function
|
|
20539
|
-
while (1) switch (
|
|
20516
|
+
return _regenerator.default.wrap(function _callee28$(_context28) {
|
|
20517
|
+
while (1) switch (_context28.prev = _context28.next) {
|
|
20540
20518
|
case 0:
|
|
20541
20519
|
oldTrackDeviceId = getCurrentTrackDeviceId();
|
|
20542
20520
|
setCurrentTrackDeviceId(deviceId);
|
|
@@ -20549,32 +20527,32 @@ function PublisherFactory(_ref) {
|
|
|
20549
20527
|
accessDialogClosed: onAccessDialogClosed
|
|
20550
20528
|
});
|
|
20551
20529
|
_processedOptions2 = processedOptions, getUserMediaHelper = _processedOptions2.getUserMedia;
|
|
20552
|
-
|
|
20553
|
-
|
|
20530
|
+
_context28.prev = 8;
|
|
20531
|
+
_context28.next = 11;
|
|
20554
20532
|
return getUserMediaHelper();
|
|
20555
20533
|
case 11:
|
|
20556
|
-
newVideoStream =
|
|
20557
|
-
|
|
20534
|
+
newVideoStream = _context28.sent;
|
|
20535
|
+
_context28.next = 18;
|
|
20558
20536
|
break;
|
|
20559
20537
|
case 14:
|
|
20560
|
-
|
|
20561
|
-
|
|
20562
|
-
logging.error(
|
|
20563
|
-
throw
|
|
20538
|
+
_context28.prev = 14;
|
|
20539
|
+
_context28.t0 = _context28["catch"](8);
|
|
20540
|
+
logging.error(_context28.t0);
|
|
20541
|
+
throw _context28.t0;
|
|
20564
20542
|
case 18:
|
|
20565
20543
|
_newVideoStream$getVi = (_newVideoStream = newVideoStream) == null ? void 0 : _newVideoStream.getVideoTracks(), track = _newVideoStream$getVi[0];
|
|
20566
20544
|
if (!track) {
|
|
20567
20545
|
setCurrentTrackDeviceId(oldTrackDeviceId);
|
|
20568
20546
|
}
|
|
20569
|
-
return
|
|
20547
|
+
return _context28.abrupt("return", track);
|
|
20570
20548
|
case 21:
|
|
20571
20549
|
case "end":
|
|
20572
|
-
return
|
|
20550
|
+
return _context28.stop();
|
|
20573
20551
|
}
|
|
20574
|
-
},
|
|
20552
|
+
}, _callee28, null, [[8, 14]]);
|
|
20575
20553
|
}));
|
|
20576
20554
|
return function getTrackFromDeviceId(_x21) {
|
|
20577
|
-
return
|
|
20555
|
+
return _ref40.apply(this, arguments);
|
|
20578
20556
|
};
|
|
20579
20557
|
}();
|
|
20580
20558
|
const getCurrentTrack = () => {
|
|
@@ -20591,102 +20569,102 @@ function PublisherFactory(_ref) {
|
|
|
20591
20569
|
_currentTrackDeviceId = deviceId;
|
|
20592
20570
|
};
|
|
20593
20571
|
const getVideoDevices = /*#__PURE__*/function () {
|
|
20594
|
-
var
|
|
20572
|
+
var _ref41 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee29() {
|
|
20595
20573
|
var devices, vidDevices;
|
|
20596
|
-
return _regenerator.default.wrap(function
|
|
20597
|
-
while (1) switch (
|
|
20574
|
+
return _regenerator.default.wrap(function _callee29$(_context29) {
|
|
20575
|
+
while (1) switch (_context29.prev = _context29.next) {
|
|
20598
20576
|
case 0:
|
|
20599
|
-
|
|
20577
|
+
_context29.next = 2;
|
|
20600
20578
|
return deviceHelpers.shouldAskForDevices();
|
|
20601
20579
|
case 2:
|
|
20602
|
-
devices =
|
|
20580
|
+
devices = _context29.sent;
|
|
20603
20581
|
vidDevices = devices.videoDevices;
|
|
20604
20582
|
if (!(!devices.video || !vidDevices || !vidDevices.length)) {
|
|
20605
|
-
|
|
20583
|
+
_context29.next = 6;
|
|
20606
20584
|
break;
|
|
20607
20585
|
}
|
|
20608
20586
|
throw otError(Errors.NO_DEVICES_FOUND, new Error('No video devices available'), ExceptionCodes.UNABLE_TO_PUBLISH);
|
|
20609
20587
|
case 6:
|
|
20610
|
-
return
|
|
20588
|
+
return _context29.abrupt("return", vidDevices);
|
|
20611
20589
|
case 7:
|
|
20612
20590
|
case "end":
|
|
20613
|
-
return
|
|
20591
|
+
return _context29.stop();
|
|
20614
20592
|
}
|
|
20615
|
-
},
|
|
20593
|
+
}, _callee29);
|
|
20616
20594
|
}));
|
|
20617
20595
|
return function getVideoDevices() {
|
|
20618
|
-
return
|
|
20596
|
+
return _ref41.apply(this, arguments);
|
|
20619
20597
|
};
|
|
20620
20598
|
}();
|
|
20621
20599
|
const replaceAudioTrackInPeerConnections = /*#__PURE__*/function () {
|
|
20622
|
-
var
|
|
20600
|
+
var _ref42 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee30(oldTrack, newTrack) {
|
|
20623
20601
|
var isAdaptiveEnabled, activePeerConnection, inactivePeerConnection, pcs;
|
|
20624
|
-
return _regenerator.default.wrap(function
|
|
20625
|
-
while (1) switch (
|
|
20602
|
+
return _regenerator.default.wrap(function _callee30$(_context30) {
|
|
20603
|
+
while (1) switch (_context30.prev = _context30.next) {
|
|
20626
20604
|
case 0:
|
|
20627
20605
|
if (_this.session) {
|
|
20628
|
-
|
|
20606
|
+
_context30.next = 2;
|
|
20629
20607
|
break;
|
|
20630
20608
|
}
|
|
20631
|
-
return
|
|
20609
|
+
return _context30.abrupt("return");
|
|
20632
20610
|
case 2:
|
|
20633
20611
|
isAdaptiveEnabled = _this.session.sessionInfo.isAdaptiveEnabled; // If we are in an AMR session and a P2P PC, we need to change the track in the P2P PC
|
|
20634
20612
|
// and check the Mantis PC, if we need to add the new track to the muted Mantis audio tracks and replace it.
|
|
20635
20613
|
// activeSourceStreamId may be undefined if we haven't finished publishing yet
|
|
20636
20614
|
if (!(activeSourceStreamId && isAdaptiveEnabled)) {
|
|
20637
|
-
|
|
20615
|
+
_context30.next = 17;
|
|
20638
20616
|
break;
|
|
20639
20617
|
}
|
|
20640
|
-
|
|
20618
|
+
_context30.next = 6;
|
|
20641
20619
|
return getPeerConnectionBySourceStreamId(activeSourceStreamId);
|
|
20642
20620
|
case 6:
|
|
20643
|
-
activePeerConnection =
|
|
20644
|
-
|
|
20621
|
+
activePeerConnection = _context30.sent;
|
|
20622
|
+
_context30.next = 9;
|
|
20645
20623
|
return activePeerConnection == null ? void 0 : activePeerConnection.findAndReplaceTrack(oldTrack, newTrack);
|
|
20646
20624
|
case 9:
|
|
20647
20625
|
if (!(activeSourceStreamId === 'P2P')) {
|
|
20648
|
-
|
|
20626
|
+
_context30.next = 15;
|
|
20649
20627
|
break;
|
|
20650
20628
|
}
|
|
20651
|
-
|
|
20629
|
+
_context30.next = 12;
|
|
20652
20630
|
return getMantisPeerConnection();
|
|
20653
20631
|
case 12:
|
|
20654
|
-
inactivePeerConnection =
|
|
20655
|
-
|
|
20632
|
+
inactivePeerConnection = _context30.sent;
|
|
20633
|
+
_context30.next = 15;
|
|
20656
20634
|
return amrAudioTrackProcessor.replaceTrackInMutedAudioTracks(inactivePeerConnection, oldTrack, newTrack);
|
|
20657
20635
|
case 15:
|
|
20658
|
-
|
|
20636
|
+
_context30.next = 22;
|
|
20659
20637
|
break;
|
|
20660
20638
|
case 17:
|
|
20661
|
-
|
|
20639
|
+
_context30.next = 19;
|
|
20662
20640
|
return getAllPeerConnections();
|
|
20663
20641
|
case 19:
|
|
20664
|
-
pcs =
|
|
20665
|
-
|
|
20642
|
+
pcs = _context30.sent;
|
|
20643
|
+
_context30.next = 22;
|
|
20666
20644
|
return Promise.all(pcs.map(pc => pc.findAndReplaceTrack(oldTrack, newTrack)));
|
|
20667
20645
|
case 22:
|
|
20668
20646
|
case "end":
|
|
20669
|
-
return
|
|
20647
|
+
return _context30.stop();
|
|
20670
20648
|
}
|
|
20671
|
-
},
|
|
20649
|
+
}, _callee30);
|
|
20672
20650
|
}));
|
|
20673
20651
|
return function replaceAudioTrackInPeerConnections(_x22, _x23) {
|
|
20674
|
-
return
|
|
20652
|
+
return _ref42.apply(this, arguments);
|
|
20675
20653
|
};
|
|
20676
20654
|
}();
|
|
20677
20655
|
const replaceAudioTrack = /*#__PURE__*/function () {
|
|
20678
|
-
var
|
|
20656
|
+
var _ref43 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee31(oldTrack, newTrack) {
|
|
20679
20657
|
var video, isFilteredTrack;
|
|
20680
|
-
return _regenerator.default.wrap(function
|
|
20681
|
-
while (1) switch (
|
|
20658
|
+
return _regenerator.default.wrap(function _callee31$(_context31) {
|
|
20659
|
+
while (1) switch (_context31.prev = _context31.next) {
|
|
20682
20660
|
case 0:
|
|
20683
20661
|
if (!(oldTrack === newTrack)) {
|
|
20684
|
-
|
|
20662
|
+
_context31.next = 2;
|
|
20685
20663
|
break;
|
|
20686
20664
|
}
|
|
20687
|
-
return
|
|
20665
|
+
return _context31.abrupt("return");
|
|
20688
20666
|
case 2:
|
|
20689
|
-
|
|
20667
|
+
_context31.next = 4;
|
|
20690
20668
|
return replaceAudioTrackInPeerConnections(oldTrack, newTrack);
|
|
20691
20669
|
case 4:
|
|
20692
20670
|
if (newTrack) {
|
|
@@ -20728,23 +20706,23 @@ function PublisherFactory(_ref) {
|
|
|
20728
20706
|
refreshAudioVideoUI();
|
|
20729
20707
|
case 14:
|
|
20730
20708
|
case "end":
|
|
20731
|
-
return
|
|
20709
|
+
return _context31.stop();
|
|
20732
20710
|
}
|
|
20733
|
-
},
|
|
20711
|
+
}, _callee31);
|
|
20734
20712
|
}));
|
|
20735
20713
|
return function replaceAudioTrack(_x24, _x25) {
|
|
20736
|
-
return
|
|
20714
|
+
return _ref43.apply(this, arguments);
|
|
20737
20715
|
};
|
|
20738
20716
|
}();
|
|
20739
20717
|
const resetAudioSource = /*#__PURE__*/function () {
|
|
20740
|
-
var
|
|
20718
|
+
var _ref44 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee32(audioTrack) {
|
|
20741
20719
|
var audioDeviceId, newAudioTrack;
|
|
20742
|
-
return _regenerator.default.wrap(function
|
|
20743
|
-
while (1) switch (
|
|
20720
|
+
return _regenerator.default.wrap(function _callee32$(_context32) {
|
|
20721
|
+
while (1) switch (_context32.prev = _context32.next) {
|
|
20744
20722
|
case 0:
|
|
20745
20723
|
audioDeviceId = audioTrack.getSettings().deviceId;
|
|
20746
|
-
|
|
20747
|
-
|
|
20724
|
+
_context32.prev = 1;
|
|
20725
|
+
_context32.next = 4;
|
|
20748
20726
|
return _this.setAudioSource(audioDeviceId);
|
|
20749
20727
|
case 4:
|
|
20750
20728
|
// We need to add the onmute listener to the new audio track.
|
|
@@ -20753,20 +20731,20 @@ function PublisherFactory(_ref) {
|
|
|
20753
20731
|
newAudioTrack.onmute = () => handleBuggedMutedLocalAudioTrack(newAudioTrack);
|
|
20754
20732
|
newAudioTrack.onunmute = () => handleBuggedUnMutedLocalAudioTrack(newAudioTrack);
|
|
20755
20733
|
}
|
|
20756
|
-
|
|
20734
|
+
_context32.next = 11;
|
|
20757
20735
|
break;
|
|
20758
20736
|
case 8:
|
|
20759
|
-
|
|
20760
|
-
|
|
20761
|
-
logging.error(
|
|
20737
|
+
_context32.prev = 8;
|
|
20738
|
+
_context32.t0 = _context32["catch"](1);
|
|
20739
|
+
logging.error(_context32.t0);
|
|
20762
20740
|
case 11:
|
|
20763
20741
|
case "end":
|
|
20764
|
-
return
|
|
20742
|
+
return _context32.stop();
|
|
20765
20743
|
}
|
|
20766
|
-
},
|
|
20744
|
+
}, _callee32, null, [[1, 8]]);
|
|
20767
20745
|
}));
|
|
20768
20746
|
return function resetAudioSource(_x26) {
|
|
20769
|
-
return
|
|
20747
|
+
return _ref44.apply(this, arguments);
|
|
20770
20748
|
};
|
|
20771
20749
|
}();
|
|
20772
20750
|
|
|
@@ -20780,15 +20758,15 @@ function PublisherFactory(_ref) {
|
|
|
20780
20758
|
}
|
|
20781
20759
|
// trigger the handler onVisibilityChange
|
|
20782
20760
|
const visibilityHandler = /*#__PURE__*/function () {
|
|
20783
|
-
var
|
|
20784
|
-
return _regenerator.default.wrap(function
|
|
20785
|
-
while (1) switch (
|
|
20761
|
+
var _ref45 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee33() {
|
|
20762
|
+
return _regenerator.default.wrap(function _callee33$(_context33) {
|
|
20763
|
+
while (1) switch (_context33.prev = _context33.next) {
|
|
20786
20764
|
case 0:
|
|
20787
20765
|
if (document.hidden) {
|
|
20788
|
-
|
|
20766
|
+
_context33.next = 5;
|
|
20789
20767
|
break;
|
|
20790
20768
|
}
|
|
20791
|
-
|
|
20769
|
+
_context33.next = 3;
|
|
20792
20770
|
return resetAudioSource(audioTrack);
|
|
20793
20771
|
case 3:
|
|
20794
20772
|
if (shouldRePublishVideo) {
|
|
@@ -20797,12 +20775,12 @@ function PublisherFactory(_ref) {
|
|
|
20797
20775
|
document.removeEventListener('visibilitychange', visibilityHandler);
|
|
20798
20776
|
case 5:
|
|
20799
20777
|
case "end":
|
|
20800
|
-
return
|
|
20778
|
+
return _context33.stop();
|
|
20801
20779
|
}
|
|
20802
|
-
},
|
|
20780
|
+
}, _callee33);
|
|
20803
20781
|
}));
|
|
20804
20782
|
return function visibilityHandler() {
|
|
20805
|
-
return
|
|
20783
|
+
return _ref45.apply(this, arguments);
|
|
20806
20784
|
};
|
|
20807
20785
|
}();
|
|
20808
20786
|
document.addEventListener('visibilitychange', visibilityHandler);
|
|
@@ -20882,80 +20860,80 @@ function PublisherFactory(_ref) {
|
|
|
20882
20860
|
return cancelPreviousSetAudioSourceSentinel;
|
|
20883
20861
|
};
|
|
20884
20862
|
const setAudioSource = /*#__PURE__*/function () {
|
|
20885
|
-
var
|
|
20863
|
+
var _ref46 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee35(audioSource) {
|
|
20886
20864
|
var currentCancelSentinel, setStreamIfNotCancelled, prevAudioSource, newTrack, newOptions, prevLabel, prevDeviceId, _processedOptions3, getUserMediaHelper, prevOptions, previousDevice, stream;
|
|
20887
|
-
return _regenerator.default.wrap(function
|
|
20888
|
-
while (1) switch (
|
|
20865
|
+
return _regenerator.default.wrap(function _callee35$(_context35) {
|
|
20866
|
+
while (1) switch (_context35.prev = _context35.next) {
|
|
20889
20867
|
case 0:
|
|
20890
20868
|
if (isSetAudioSourceSupported) {
|
|
20891
|
-
|
|
20869
|
+
_context35.next = 2;
|
|
20892
20870
|
break;
|
|
20893
20871
|
}
|
|
20894
20872
|
throw setAudioSourceNotSupportedError();
|
|
20895
20873
|
case 2:
|
|
20896
20874
|
currentCancelSentinel = getSetAudioSourceCancellationSentinel();
|
|
20897
20875
|
setStreamIfNotCancelled = /*#__PURE__*/function () {
|
|
20898
|
-
var
|
|
20899
|
-
return _regenerator.default.wrap(function
|
|
20900
|
-
while (1) switch (
|
|
20876
|
+
var _ref47 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee34(stream) {
|
|
20877
|
+
return _regenerator.default.wrap(function _callee34$(_context34) {
|
|
20878
|
+
while (1) switch (_context34.prev = _context34.next) {
|
|
20901
20879
|
case 0:
|
|
20902
20880
|
if (!currentCancelSentinel.isCanceled()) {
|
|
20903
|
-
|
|
20881
|
+
_context34.next = 3;
|
|
20904
20882
|
break;
|
|
20905
20883
|
}
|
|
20906
20884
|
stream.getTracks(track => track.stop());
|
|
20907
20885
|
throw setAudioSourceCancellationError();
|
|
20908
20886
|
case 3:
|
|
20909
|
-
return
|
|
20887
|
+
return _context34.abrupt("return", setAudioSource(stream.getAudioTracks()[0]));
|
|
20910
20888
|
case 4:
|
|
20911
20889
|
case "end":
|
|
20912
|
-
return
|
|
20890
|
+
return _context34.stop();
|
|
20913
20891
|
}
|
|
20914
|
-
},
|
|
20892
|
+
}, _callee34);
|
|
20915
20893
|
}));
|
|
20916
20894
|
return function setStreamIfNotCancelled(_x28) {
|
|
20917
|
-
return
|
|
20895
|
+
return _ref47.apply(this, arguments);
|
|
20918
20896
|
};
|
|
20919
20897
|
}();
|
|
20920
20898
|
prevAudioSource = _this._getAudioSource();
|
|
20921
20899
|
if (prevAudioSource) {
|
|
20922
|
-
|
|
20900
|
+
_context35.next = 7;
|
|
20923
20901
|
break;
|
|
20924
20902
|
}
|
|
20925
20903
|
throw otError(Errors.NOT_SUPPORTED, new Error('Publisher#setAudioSource cannot add an audio source when you started without one.'));
|
|
20926
20904
|
case 7:
|
|
20927
20905
|
if (!(audioSource instanceof MediaStreamTrack)) {
|
|
20928
|
-
|
|
20906
|
+
_context35.next = 26;
|
|
20929
20907
|
break;
|
|
20930
20908
|
}
|
|
20931
20909
|
if (!currentCancelSentinel.isCanceled()) {
|
|
20932
|
-
|
|
20910
|
+
_context35.next = 10;
|
|
20933
20911
|
break;
|
|
20934
20912
|
}
|
|
20935
20913
|
throw setAudioSourceCancellationError();
|
|
20936
20914
|
case 10:
|
|
20937
20915
|
if (!_audioMediaProcessorConnector) {
|
|
20938
|
-
|
|
20916
|
+
_context35.next = 23;
|
|
20939
20917
|
break;
|
|
20940
20918
|
}
|
|
20941
|
-
|
|
20942
|
-
|
|
20919
|
+
_context35.prev = 11;
|
|
20920
|
+
_context35.next = 14;
|
|
20943
20921
|
return _audioMediaProcessorConnector.setTrack(audioSource);
|
|
20944
20922
|
case 14:
|
|
20945
|
-
newTrack =
|
|
20946
|
-
|
|
20923
|
+
newTrack = _context35.sent;
|
|
20924
|
+
_context35.next = 17;
|
|
20947
20925
|
return replaceAudioTrack(prevAudioSource, newTrack);
|
|
20948
20926
|
case 17:
|
|
20949
|
-
return
|
|
20927
|
+
return _context35.abrupt("return", _context35.sent);
|
|
20950
20928
|
case 20:
|
|
20951
|
-
|
|
20952
|
-
|
|
20953
|
-
logging.error(`Error setting track on audioMediaProcessorConnector: ${
|
|
20929
|
+
_context35.prev = 20;
|
|
20930
|
+
_context35.t0 = _context35["catch"](11);
|
|
20931
|
+
logging.error(`Error setting track on audioMediaProcessorConnector: ${_context35.t0}`);
|
|
20954
20932
|
case 23:
|
|
20955
|
-
return
|
|
20933
|
+
return _context35.abrupt("return", replaceAudioTrack(prevAudioSource, audioSource));
|
|
20956
20934
|
case 26:
|
|
20957
20935
|
if (!(typeof audioSource === 'string')) {
|
|
20958
|
-
|
|
20936
|
+
_context35.next = 73;
|
|
20959
20937
|
break;
|
|
20960
20938
|
}
|
|
20961
20939
|
// Must be a deviceId, call getUserMedia and get the MediaStreamTrack
|
|
@@ -20973,21 +20951,21 @@ function PublisherFactory(_ref) {
|
|
|
20973
20951
|
prevAudioSource.stop();
|
|
20974
20952
|
}
|
|
20975
20953
|
_processedOptions3 = processedOptions, getUserMediaHelper = _processedOptions3.getUserMedia;
|
|
20976
|
-
|
|
20977
|
-
|
|
20978
|
-
|
|
20954
|
+
_context35.prev = 36;
|
|
20955
|
+
_context35.t1 = setStreamIfNotCancelled;
|
|
20956
|
+
_context35.next = 40;
|
|
20979
20957
|
return getUserMediaHelper();
|
|
20980
20958
|
case 40:
|
|
20981
|
-
|
|
20982
|
-
|
|
20983
|
-
return (0,
|
|
20959
|
+
_context35.t2 = _context35.sent;
|
|
20960
|
+
_context35.next = 43;
|
|
20961
|
+
return (0, _context35.t1)(_context35.t2);
|
|
20984
20962
|
case 43:
|
|
20985
|
-
return
|
|
20963
|
+
return _context35.abrupt("return", _context35.sent);
|
|
20986
20964
|
case 46:
|
|
20987
|
-
|
|
20988
|
-
|
|
20965
|
+
_context35.prev = 46;
|
|
20966
|
+
_context35.t3 = _context35["catch"](36);
|
|
20989
20967
|
if (!currentCancelSentinel.isCanceled()) {
|
|
20990
|
-
|
|
20968
|
+
_context35.next = 50;
|
|
20991
20969
|
break;
|
|
20992
20970
|
}
|
|
20993
20971
|
throw setAudioSourceCancellationError();
|
|
@@ -20996,15 +20974,15 @@ function PublisherFactory(_ref) {
|
|
|
20996
20974
|
prevOptions.videoSource = null;
|
|
20997
20975
|
prevOptions.audioSource = prevDeviceId;
|
|
20998
20976
|
if (!(!prevOptions.audioSource && prevLabel)) {
|
|
20999
|
-
|
|
20977
|
+
_context35.next = 60;
|
|
21000
20978
|
break;
|
|
21001
20979
|
}
|
|
21002
|
-
|
|
20980
|
+
_context35.next = 56;
|
|
21003
20981
|
return getInputMediaDevices();
|
|
21004
20982
|
case 56:
|
|
21005
|
-
previousDevice =
|
|
20983
|
+
previousDevice = _context35.sent.find(x => x.label === prevLabel);
|
|
21006
20984
|
if (!currentCancelSentinel.isCanceled()) {
|
|
21007
|
-
|
|
20985
|
+
_context35.next = 59;
|
|
21008
20986
|
break;
|
|
21009
20987
|
}
|
|
21010
20988
|
throw setAudioSourceCancellationError();
|
|
@@ -21014,39 +20992,39 @@ function PublisherFactory(_ref) {
|
|
|
21014
20992
|
}
|
|
21015
20993
|
case 60:
|
|
21016
20994
|
if (prevOptions.audioSource) {
|
|
21017
|
-
|
|
20995
|
+
_context35.next = 63;
|
|
21018
20996
|
break;
|
|
21019
20997
|
}
|
|
21020
|
-
|
|
21021
|
-
throw otError(Errors.NOT_FOUND,
|
|
20998
|
+
_context35.t3.message += ' (could not determine previous audio device)';
|
|
20999
|
+
throw otError(Errors.NOT_FOUND, _context35.t3);
|
|
21022
21000
|
case 63:
|
|
21023
21001
|
processedOptions = processPubOptions(prevOptions, 'OT.Publisher.setAudioSource', () => currentCancelSentinel.isCanceled() || state && state.isDestroyed());
|
|
21024
|
-
|
|
21002
|
+
_context35.next = 66;
|
|
21025
21003
|
return processedOptions.getUserMedia().catch(error => {
|
|
21026
21004
|
// eslint-disable-next-line no-param-reassign
|
|
21027
21005
|
error.message += ' (could not obtain previous audio device)';
|
|
21028
21006
|
throw error;
|
|
21029
21007
|
});
|
|
21030
21008
|
case 66:
|
|
21031
|
-
stream =
|
|
21032
|
-
|
|
21009
|
+
stream = _context35.sent;
|
|
21010
|
+
_context35.next = 69;
|
|
21033
21011
|
return setStreamIfNotCancelled(stream);
|
|
21034
21012
|
case 69:
|
|
21035
|
-
|
|
21036
|
-
throw
|
|
21013
|
+
_context35.t3.message += ' (reverted to previous audio device)';
|
|
21014
|
+
throw _context35.t3;
|
|
21037
21015
|
case 71:
|
|
21038
|
-
|
|
21016
|
+
_context35.next = 74;
|
|
21039
21017
|
break;
|
|
21040
21018
|
case 73:
|
|
21041
21019
|
throw otError(Errors.INVALID_PARAMETER, new Error('Invalid parameter passed to OT.Publisher.setAudioSource(). Expected string or MediaStreamTrack.'));
|
|
21042
21020
|
case 74:
|
|
21043
21021
|
case "end":
|
|
21044
|
-
return
|
|
21022
|
+
return _context35.stop();
|
|
21045
21023
|
}
|
|
21046
|
-
},
|
|
21024
|
+
}, _callee35, null, [[11, 20], [36, 46]]);
|
|
21047
21025
|
}));
|
|
21048
21026
|
return function setAudioSource(_x27) {
|
|
21049
|
-
return
|
|
21027
|
+
return _ref46.apply(this, arguments);
|
|
21050
21028
|
};
|
|
21051
21029
|
}();
|
|
21052
21030
|
this.setAudioSource = setAudioSource;
|
|
@@ -21109,21 +21087,21 @@ function PublisherFactory(_ref) {
|
|
|
21109
21087
|
* completes successfully. If there is an error, the promise is rejected.
|
|
21110
21088
|
*/
|
|
21111
21089
|
const setVideoSource = /*#__PURE__*/function () {
|
|
21112
|
-
var _setVideoSource = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
21090
|
+
var _setVideoSource = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee36(videoSourceId) {
|
|
21113
21091
|
var invalidVideoSourceOtError, setVideoSourceOtError, isAudioOnly, deviceList, isValidVideoDeviceId;
|
|
21114
|
-
return _regenerator.default.wrap(function
|
|
21115
|
-
while (1) switch (
|
|
21092
|
+
return _regenerator.default.wrap(function _callee36$(_context36) {
|
|
21093
|
+
while (1) switch (_context36.prev = _context36.next) {
|
|
21116
21094
|
case 0:
|
|
21117
21095
|
invalidVideoSourceOtError = otError(Errors.INVALID_VIDEO_SOURCE, new Error('Invalid video source. Video source must be a valid video input deviceId'), 1041);
|
|
21118
21096
|
setVideoSourceOtError = otError(Errors.SET_VIDEO_SOURCE_FAILURE, new Error('You cannot reset the video source on a publisher that does not currently use a camera source.'), 1040);
|
|
21119
21097
|
if (!(OTHelpers.env.isLegacyEdge || !windowMock.RTCRtpSender || typeof windowMock.RTCRtpSender.prototype.replaceTrack !== 'function')) {
|
|
21120
|
-
|
|
21098
|
+
_context36.next = 4;
|
|
21121
21099
|
break;
|
|
21122
21100
|
}
|
|
21123
21101
|
throw otError(Errors.UNSUPPORTED_BROWSER, new Error('setVideoSource is not supported in your browser.'), ExceptionCodes.UNABLE_TO_PUBLISH);
|
|
21124
21102
|
case 4:
|
|
21125
21103
|
if (!(typeof videoSourceId !== 'string')) {
|
|
21126
|
-
|
|
21104
|
+
_context36.next = 6;
|
|
21127
21105
|
break;
|
|
21128
21106
|
}
|
|
21129
21107
|
throw invalidVideoSourceOtError;
|
|
@@ -21131,29 +21109,29 @@ function PublisherFactory(_ref) {
|
|
|
21131
21109
|
// we can't use hasVideo because that only checks if the video is
|
|
21132
21110
|
isAudioOnly = !webRTCStream || webRTCStream.getVideoTracks().length === 0;
|
|
21133
21111
|
if (!(isCustomVideoTrack || isScreenSharing || isAudioOnly)) {
|
|
21134
|
-
|
|
21112
|
+
_context36.next = 9;
|
|
21135
21113
|
break;
|
|
21136
21114
|
}
|
|
21137
21115
|
throw setVideoSourceOtError;
|
|
21138
21116
|
case 9:
|
|
21139
|
-
|
|
21117
|
+
_context36.next = 11;
|
|
21140
21118
|
return getInputMediaDevices();
|
|
21141
21119
|
case 11:
|
|
21142
|
-
deviceList =
|
|
21120
|
+
deviceList = _context36.sent;
|
|
21143
21121
|
isValidVideoDeviceId = deviceList.find(device => device.kind === 'videoInput' && device.deviceId === videoSourceId);
|
|
21144
21122
|
if (isValidVideoDeviceId) {
|
|
21145
|
-
|
|
21123
|
+
_context36.next = 15;
|
|
21146
21124
|
break;
|
|
21147
21125
|
}
|
|
21148
21126
|
throw invalidVideoSourceOtError;
|
|
21149
21127
|
case 15:
|
|
21150
|
-
|
|
21128
|
+
_context36.next = 17;
|
|
21151
21129
|
return attemptToSetVideoTrack(videoSourceId);
|
|
21152
21130
|
case 17:
|
|
21153
21131
|
case "end":
|
|
21154
|
-
return
|
|
21132
|
+
return _context36.stop();
|
|
21155
21133
|
}
|
|
21156
|
-
},
|
|
21134
|
+
}, _callee36);
|
|
21157
21135
|
}));
|
|
21158
21136
|
function setVideoSource(_x29) {
|
|
21159
21137
|
return _setVideoSource.apply(this, arguments);
|
|
@@ -21162,39 +21140,39 @@ function PublisherFactory(_ref) {
|
|
|
21162
21140
|
}();
|
|
21163
21141
|
this.setVideoSource = setVideoSource;
|
|
21164
21142
|
const attemptToSetVideoTrack = /*#__PURE__*/function () {
|
|
21165
|
-
var
|
|
21143
|
+
var _ref48 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee37(newVideoDeviceId) {
|
|
21166
21144
|
var _originalVideoTrack2;
|
|
21167
21145
|
var oldDeviceID, oldTrack, videoFilterToReapplyOnAndroid, newVideoTrack, originalVideoTrack;
|
|
21168
|
-
return _regenerator.default.wrap(function
|
|
21169
|
-
while (1) switch (
|
|
21146
|
+
return _regenerator.default.wrap(function _callee37$(_context37) {
|
|
21147
|
+
while (1) switch (_context37.prev = _context37.next) {
|
|
21170
21148
|
case 0:
|
|
21171
21149
|
oldDeviceID = currentDeviceId;
|
|
21172
21150
|
currentDeviceId = newVideoDeviceId;
|
|
21173
21151
|
|
|
21174
21152
|
// We shouldn't replace the track unless the video is on
|
|
21175
21153
|
if (properties.publishVideo) {
|
|
21176
|
-
|
|
21154
|
+
_context37.next = 4;
|
|
21177
21155
|
break;
|
|
21178
21156
|
}
|
|
21179
|
-
return
|
|
21157
|
+
return _context37.abrupt("return");
|
|
21180
21158
|
case 4:
|
|
21181
21159
|
if (!hasTrackFromDevice(newVideoDeviceId)) {
|
|
21182
|
-
|
|
21160
|
+
_context37.next = 6;
|
|
21183
21161
|
break;
|
|
21184
21162
|
}
|
|
21185
|
-
return
|
|
21163
|
+
return _context37.abrupt("return");
|
|
21186
21164
|
case 6:
|
|
21187
21165
|
oldTrack = getCurrentTrack();
|
|
21188
21166
|
if (!(properties.publishVideo && OTHelpers.env.isAndroid && (OTHelpers.env.isChrome || OTHelpers.env.isFirefox))) {
|
|
21189
|
-
|
|
21167
|
+
_context37.next = 14;
|
|
21190
21168
|
break;
|
|
21191
21169
|
}
|
|
21192
21170
|
if (!currentVideoFilter) {
|
|
21193
|
-
|
|
21171
|
+
_context37.next = 13;
|
|
21194
21172
|
break;
|
|
21195
21173
|
}
|
|
21196
21174
|
videoFilterToReapplyOnAndroid = currentVideoFilter;
|
|
21197
|
-
|
|
21175
|
+
_context37.next = 12;
|
|
21198
21176
|
return destroyMediaProcessor();
|
|
21199
21177
|
case 12:
|
|
21200
21178
|
currentVideoFilter = null;
|
|
@@ -21203,55 +21181,55 @@ function PublisherFactory(_ref) {
|
|
|
21203
21181
|
oldTrack.stop();
|
|
21204
21182
|
}
|
|
21205
21183
|
case 14:
|
|
21206
|
-
|
|
21207
|
-
|
|
21184
|
+
_context37.prev = 14;
|
|
21185
|
+
_context37.next = 17;
|
|
21208
21186
|
return getTrackFromDeviceId(newVideoDeviceId);
|
|
21209
21187
|
case 17:
|
|
21210
|
-
newVideoTrack =
|
|
21211
|
-
|
|
21188
|
+
newVideoTrack = _context37.sent;
|
|
21189
|
+
_context37.next = 25;
|
|
21212
21190
|
break;
|
|
21213
21191
|
case 20:
|
|
21214
|
-
|
|
21215
|
-
|
|
21192
|
+
_context37.prev = 20;
|
|
21193
|
+
_context37.t0 = _context37["catch"](14);
|
|
21216
21194
|
currentDeviceId = oldDeviceID;
|
|
21217
|
-
logging.error(
|
|
21218
|
-
throw
|
|
21195
|
+
logging.error(_context37.t0);
|
|
21196
|
+
throw _context37.t0;
|
|
21219
21197
|
case 25:
|
|
21220
21198
|
if (newVideoTrack) {
|
|
21221
|
-
|
|
21199
|
+
_context37.next = 28;
|
|
21222
21200
|
break;
|
|
21223
21201
|
}
|
|
21224
21202
|
logging.warn('Unable to aquire video track. Moving to next device.');
|
|
21225
|
-
return
|
|
21203
|
+
return _context37.abrupt("return");
|
|
21226
21204
|
case 28:
|
|
21227
21205
|
if (!currentVideoFilter) {
|
|
21228
|
-
|
|
21206
|
+
_context37.next = 33;
|
|
21229
21207
|
break;
|
|
21230
21208
|
}
|
|
21231
21209
|
originalVideoTrack = mediaProcessor.getOriginalVideoTrack();
|
|
21232
|
-
|
|
21210
|
+
_context37.next = 32;
|
|
21233
21211
|
return mediaProcessor.setVideoTrack(newVideoTrack);
|
|
21234
21212
|
case 32:
|
|
21235
|
-
newVideoTrack =
|
|
21213
|
+
newVideoTrack = _context37.sent;
|
|
21236
21214
|
case 33:
|
|
21237
21215
|
if (!_videoMediaProcessorConnector) {
|
|
21238
|
-
|
|
21216
|
+
_context37.next = 38;
|
|
21239
21217
|
break;
|
|
21240
21218
|
}
|
|
21241
21219
|
originalVideoTrack = _videoMediaProcessorConnector.originalTrack;
|
|
21242
|
-
|
|
21220
|
+
_context37.next = 37;
|
|
21243
21221
|
return _videoMediaProcessorConnector.setTrack(newVideoTrack);
|
|
21244
21222
|
case 37:
|
|
21245
|
-
newVideoTrack =
|
|
21223
|
+
newVideoTrack = _context37.sent;
|
|
21246
21224
|
case 38:
|
|
21247
|
-
|
|
21225
|
+
_context37.next = 40;
|
|
21248
21226
|
return replaceTrackAndUpdate(oldTrack, newVideoTrack);
|
|
21249
21227
|
case 40:
|
|
21250
21228
|
if (!videoFilterToReapplyOnAndroid) {
|
|
21251
|
-
|
|
21229
|
+
_context37.next = 43;
|
|
21252
21230
|
break;
|
|
21253
21231
|
}
|
|
21254
|
-
|
|
21232
|
+
_context37.next = 43;
|
|
21255
21233
|
return _this.applyVideoFilter(videoFilterToReapplyOnAndroid);
|
|
21256
21234
|
case 43:
|
|
21257
21235
|
// We stop the original track as a final step because whatever effects
|
|
@@ -21263,12 +21241,12 @@ function PublisherFactory(_ref) {
|
|
|
21263
21241
|
}
|
|
21264
21242
|
case 45:
|
|
21265
21243
|
case "end":
|
|
21266
|
-
return
|
|
21244
|
+
return _context37.stop();
|
|
21267
21245
|
}
|
|
21268
|
-
},
|
|
21246
|
+
}, _callee37, null, [[14, 20]]);
|
|
21269
21247
|
}));
|
|
21270
21248
|
return function attemptToSetVideoTrack(_x30) {
|
|
21271
|
-
return
|
|
21249
|
+
return _ref48.apply(this, arguments);
|
|
21272
21250
|
};
|
|
21273
21251
|
}();
|
|
21274
21252
|
|
|
@@ -21318,21 +21296,21 @@ function PublisherFactory(_ref) {
|
|
|
21318
21296
|
this._ = {
|
|
21319
21297
|
privateEvents,
|
|
21320
21298
|
setIceConfig(newIceConfig) {
|
|
21321
|
-
return (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
21299
|
+
return (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee38() {
|
|
21322
21300
|
var pcs;
|
|
21323
|
-
return _regenerator.default.wrap(function
|
|
21324
|
-
while (1) switch (
|
|
21301
|
+
return _regenerator.default.wrap(function _callee38$(_context38) {
|
|
21302
|
+
while (1) switch (_context38.prev = _context38.next) {
|
|
21325
21303
|
case 0:
|
|
21326
|
-
|
|
21304
|
+
_context38.next = 2;
|
|
21327
21305
|
return getAllPeerConnections();
|
|
21328
21306
|
case 2:
|
|
21329
|
-
pcs =
|
|
21307
|
+
pcs = _context38.sent;
|
|
21330
21308
|
pcs.forEach(pc => pc.setIceConfig(newIceConfig));
|
|
21331
21309
|
case 4:
|
|
21332
21310
|
case "end":
|
|
21333
|
-
return
|
|
21311
|
+
return _context38.stop();
|
|
21334
21312
|
}
|
|
21335
|
-
},
|
|
21313
|
+
}, _callee38);
|
|
21336
21314
|
}))();
|
|
21337
21315
|
},
|
|
21338
21316
|
publishToSession: (session, analyticsReplacement) => {
|
|
@@ -21362,8 +21340,8 @@ function PublisherFactory(_ref) {
|
|
|
21362
21340
|
return;
|
|
21363
21341
|
}
|
|
21364
21342
|
this.once('initSuccess', resolve);
|
|
21365
|
-
this.once('destroyed',
|
|
21366
|
-
let reason =
|
|
21343
|
+
this.once('destroyed', _ref49 => {
|
|
21344
|
+
let reason = _ref49.reason;
|
|
21367
21345
|
let reasonDescription = '';
|
|
21368
21346
|
if (reason) {
|
|
21369
21347
|
reasonDescription = ` Reason: ${reason}`;
|
|
@@ -21606,55 +21584,55 @@ function PublisherFactory(_ref) {
|
|
|
21606
21584
|
return webRTCStream;
|
|
21607
21585
|
},
|
|
21608
21586
|
switchTracks() {
|
|
21609
|
-
return (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
21587
|
+
return (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee39() {
|
|
21610
21588
|
var stream;
|
|
21611
|
-
return _regenerator.default.wrap(function
|
|
21612
|
-
while (1) switch (
|
|
21589
|
+
return _regenerator.default.wrap(function _callee39$(_context39) {
|
|
21590
|
+
while (1) switch (_context39.prev = _context39.next) {
|
|
21613
21591
|
case 0:
|
|
21614
|
-
|
|
21615
|
-
|
|
21592
|
+
_context39.prev = 0;
|
|
21593
|
+
_context39.next = 3;
|
|
21616
21594
|
return getUserMedia().catch(userMediaError);
|
|
21617
21595
|
case 3:
|
|
21618
|
-
stream =
|
|
21619
|
-
|
|
21596
|
+
stream = _context39.sent;
|
|
21597
|
+
_context39.next = 10;
|
|
21620
21598
|
break;
|
|
21621
21599
|
case 6:
|
|
21622
|
-
|
|
21623
|
-
|
|
21624
|
-
logging.error(`OT.Publisher.switchTracks failed to getUserMedia: ${
|
|
21625
|
-
throw
|
|
21600
|
+
_context39.prev = 6;
|
|
21601
|
+
_context39.t0 = _context39["catch"](0);
|
|
21602
|
+
logging.error(`OT.Publisher.switchTracks failed to getUserMedia: ${_context39.t0}`);
|
|
21603
|
+
throw _context39.t0;
|
|
21626
21604
|
case 10:
|
|
21627
21605
|
setNewStream(stream);
|
|
21628
|
-
|
|
21606
|
+
_context39.prev = 11;
|
|
21629
21607
|
bindVideo();
|
|
21630
|
-
|
|
21608
|
+
_context39.next = 21;
|
|
21631
21609
|
break;
|
|
21632
21610
|
case 15:
|
|
21633
|
-
|
|
21634
|
-
|
|
21635
|
-
if (!(
|
|
21636
|
-
|
|
21611
|
+
_context39.prev = 15;
|
|
21612
|
+
_context39.t1 = _context39["catch"](11);
|
|
21613
|
+
if (!(_context39.t1 instanceof _cancel.CancellationError)) {
|
|
21614
|
+
_context39.next = 19;
|
|
21637
21615
|
break;
|
|
21638
21616
|
}
|
|
21639
|
-
return
|
|
21617
|
+
return _context39.abrupt("return");
|
|
21640
21618
|
case 19:
|
|
21641
|
-
logging.error('Error while binding video',
|
|
21642
|
-
throw
|
|
21619
|
+
logging.error('Error while binding video', _context39.t1);
|
|
21620
|
+
throw _context39.t1;
|
|
21643
21621
|
case 21:
|
|
21644
|
-
|
|
21622
|
+
_context39.prev = 21;
|
|
21645
21623
|
replaceTracks();
|
|
21646
|
-
|
|
21624
|
+
_context39.next = 29;
|
|
21647
21625
|
break;
|
|
21648
21626
|
case 25:
|
|
21649
|
-
|
|
21650
|
-
|
|
21651
|
-
logging.error('Error replacing tracks',
|
|
21652
|
-
throw
|
|
21627
|
+
_context39.prev = 25;
|
|
21628
|
+
_context39.t2 = _context39["catch"](21);
|
|
21629
|
+
logging.error('Error replacing tracks', _context39.t2);
|
|
21630
|
+
throw _context39.t2;
|
|
21653
21631
|
case 29:
|
|
21654
21632
|
case "end":
|
|
21655
|
-
return
|
|
21633
|
+
return _context39.stop();
|
|
21656
21634
|
}
|
|
21657
|
-
},
|
|
21635
|
+
}, _callee39, null, [[0, 6], [11, 15], [21, 25]]);
|
|
21658
21636
|
}))();
|
|
21659
21637
|
},
|
|
21660
21638
|
getDataChannel(label, getOptions, completion) {
|
|
@@ -21676,8 +21654,8 @@ function PublisherFactory(_ref) {
|
|
|
21676
21654
|
}
|
|
21677
21655
|
getAllPeerConnections().then(peerConnections => {
|
|
21678
21656
|
peerConnections.forEach(peerConnection => {
|
|
21679
|
-
const
|
|
21680
|
-
remoteConnectionId =
|
|
21657
|
+
const _getPeerConnectionMet7 = getPeerConnectionMeta(peerConnection),
|
|
21658
|
+
remoteConnectionId = _getPeerConnectionMet7.remoteConnectionId;
|
|
21681
21659
|
logRepublish('Attempt', {
|
|
21682
21660
|
remoteConnectionId
|
|
21683
21661
|
});
|
|
@@ -21694,27 +21672,27 @@ function PublisherFactory(_ref) {
|
|
|
21694
21672
|
},
|
|
21695
21673
|
demoOnlyCycleVideo: this.cycleVideo,
|
|
21696
21674
|
testOnlyGetFramesEncoded() {
|
|
21697
|
-
return (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
21675
|
+
return (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee40() {
|
|
21698
21676
|
var peerConnections;
|
|
21699
|
-
return _regenerator.default.wrap(function
|
|
21700
|
-
while (1) switch (
|
|
21677
|
+
return _regenerator.default.wrap(function _callee40$(_context40) {
|
|
21678
|
+
while (1) switch (_context40.prev = _context40.next) {
|
|
21701
21679
|
case 0:
|
|
21702
|
-
|
|
21680
|
+
_context40.next = 2;
|
|
21703
21681
|
return getAllPeerConnections();
|
|
21704
21682
|
case 2:
|
|
21705
|
-
peerConnections =
|
|
21683
|
+
peerConnections = _context40.sent;
|
|
21706
21684
|
if (peerConnections.length) {
|
|
21707
|
-
|
|
21685
|
+
_context40.next = 5;
|
|
21708
21686
|
break;
|
|
21709
21687
|
}
|
|
21710
21688
|
throw new Error('No established PeerConnections yet');
|
|
21711
21689
|
case 5:
|
|
21712
|
-
return
|
|
21690
|
+
return _context40.abrupt("return", peerConnections[0]._testOnlyGetFramesEncoded());
|
|
21713
21691
|
case 6:
|
|
21714
21692
|
case "end":
|
|
21715
|
-
return
|
|
21693
|
+
return _context40.stop();
|
|
21716
21694
|
}
|
|
21717
|
-
},
|
|
21695
|
+
}, _callee40);
|
|
21718
21696
|
}))();
|
|
21719
21697
|
},
|
|
21720
21698
|
onStreamAvailable,
|
|
@@ -21743,45 +21721,45 @@ function PublisherFactory(_ref) {
|
|
|
21743
21721
|
}
|
|
21744
21722
|
}.bind(this),
|
|
21745
21723
|
setCongestionLevel: function () {
|
|
21746
|
-
var _setCongestionLevel = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
21724
|
+
var _setCongestionLevel = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee41(level, peerId) {
|
|
21747
21725
|
var pcs;
|
|
21748
|
-
return _regenerator.default.wrap(function
|
|
21749
|
-
while (1) switch (
|
|
21726
|
+
return _regenerator.default.wrap(function _callee41$(_context41) {
|
|
21727
|
+
while (1) switch (_context41.prev = _context41.next) {
|
|
21750
21728
|
case 0:
|
|
21751
21729
|
if (!audioFallbackCoordinator) {
|
|
21752
|
-
|
|
21730
|
+
_context41.next = 15;
|
|
21753
21731
|
break;
|
|
21754
21732
|
}
|
|
21755
21733
|
if (!peerId) {
|
|
21756
|
-
|
|
21734
|
+
_context41.next = 8;
|
|
21757
21735
|
break;
|
|
21758
21736
|
}
|
|
21759
|
-
|
|
21737
|
+
_context41.next = 4;
|
|
21760
21738
|
return getPeerConnectionById(peerId);
|
|
21761
21739
|
case 4:
|
|
21762
|
-
|
|
21763
|
-
|
|
21764
|
-
|
|
21740
|
+
_context41.t1 = _context41.sent;
|
|
21741
|
+
_context41.t0 = [_context41.t1];
|
|
21742
|
+
_context41.next = 11;
|
|
21765
21743
|
break;
|
|
21766
21744
|
case 8:
|
|
21767
|
-
|
|
21745
|
+
_context41.next = 10;
|
|
21768
21746
|
return getAllPeerConnections();
|
|
21769
21747
|
case 10:
|
|
21770
|
-
|
|
21748
|
+
_context41.t0 = _context41.sent;
|
|
21771
21749
|
case 11:
|
|
21772
|
-
pcs =
|
|
21750
|
+
pcs = _context41.t0;
|
|
21773
21751
|
pcs.forEach(pc => {
|
|
21774
21752
|
pc.setCongestionLevel(level);
|
|
21775
21753
|
});
|
|
21776
|
-
|
|
21754
|
+
_context41.next = 16;
|
|
21777
21755
|
break;
|
|
21778
21756
|
case 15:
|
|
21779
21757
|
logging.warn('Audio Fallback is not enabled');
|
|
21780
21758
|
case 16:
|
|
21781
21759
|
case "end":
|
|
21782
|
-
return
|
|
21760
|
+
return _context41.stop();
|
|
21783
21761
|
}
|
|
21784
|
-
},
|
|
21762
|
+
}, _callee41);
|
|
21785
21763
|
}));
|
|
21786
21764
|
function setCongestionLevel(_x31, _x32) {
|
|
21787
21765
|
return _setCongestionLevel.apply(this, arguments);
|
|
@@ -22120,37 +22098,37 @@ function PublisherFactory(_ref) {
|
|
|
22120
22098
|
* If there is an error, the promise is rejected and no new video filter is set.
|
|
22121
22099
|
*/
|
|
22122
22100
|
this.applyVideoFilter = /*#__PURE__*/function () {
|
|
22123
|
-
var
|
|
22101
|
+
var _ref50 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee42(videoFilter) {
|
|
22124
22102
|
var sanitizedVideoFilter, isSupported, message, _webRTCStream$getVide2, originalVideoTrack, filteredVideoTrack;
|
|
22125
|
-
return _regenerator.default.wrap(function
|
|
22126
|
-
while (1) switch (
|
|
22103
|
+
return _regenerator.default.wrap(function _callee42$(_context42) {
|
|
22104
|
+
while (1) switch (_context42.prev = _context42.next) {
|
|
22127
22105
|
case 0:
|
|
22128
22106
|
sanitizedVideoFilter = (0, _sanitizeVideoFilter.default)(videoFilter);
|
|
22129
22107
|
logAnalyticsEvent('applyVideoFilter', 'Attempt', {
|
|
22130
22108
|
videoFilter: sanitizedVideoFilter
|
|
22131
22109
|
});
|
|
22132
|
-
|
|
22110
|
+
_context42.prev = 2;
|
|
22133
22111
|
if (!_videoMediaProcessorConnector) {
|
|
22134
|
-
|
|
22112
|
+
_context42.next = 5;
|
|
22135
22113
|
break;
|
|
22136
22114
|
}
|
|
22137
22115
|
throw otError(Errors.NOT_SUPPORTED, new Error('Cannot apply video filter when videoMediaProcessorConnector is set.'));
|
|
22138
22116
|
case 5:
|
|
22139
22117
|
isSupported = MediaProcessor.isSupported();
|
|
22140
22118
|
if (isSupported) {
|
|
22141
|
-
|
|
22119
|
+
_context42.next = 8;
|
|
22142
22120
|
break;
|
|
22143
22121
|
}
|
|
22144
22122
|
throw otError(Errors.NOT_SUPPORTED, new Error('Browser does not support video filters (Insertable Streams and Worker APIs are required)'));
|
|
22145
22123
|
case 8:
|
|
22146
22124
|
if (mediaProcessor.isValidVideoFilter(videoFilter)) {
|
|
22147
|
-
|
|
22125
|
+
_context42.next = 10;
|
|
22148
22126
|
break;
|
|
22149
22127
|
}
|
|
22150
22128
|
throw otError(Errors.INVALID_PARAMETER, new Error('Video filter has invalid configuration'));
|
|
22151
22129
|
case 10:
|
|
22152
22130
|
if (webRTCStream) {
|
|
22153
|
-
|
|
22131
|
+
_context42.next = 15;
|
|
22154
22132
|
break;
|
|
22155
22133
|
}
|
|
22156
22134
|
message = 'Ignoring. No mediaStream';
|
|
@@ -22158,53 +22136,53 @@ function PublisherFactory(_ref) {
|
|
|
22158
22136
|
message
|
|
22159
22137
|
});
|
|
22160
22138
|
logging.warn(message);
|
|
22161
|
-
return
|
|
22139
|
+
return _context42.abrupt("return");
|
|
22162
22140
|
case 15:
|
|
22163
22141
|
if (!isScreenSharing) {
|
|
22164
|
-
|
|
22142
|
+
_context42.next = 17;
|
|
22165
22143
|
break;
|
|
22166
22144
|
}
|
|
22167
22145
|
throw otError(Errors.INVALID_PARAMETER, new Error('Video filters can not be applied to screen share'));
|
|
22168
22146
|
case 17:
|
|
22169
22147
|
enableMediaProcessorLogging();
|
|
22170
22148
|
if (properties.publishVideo) {
|
|
22171
|
-
|
|
22149
|
+
_context42.next = 22;
|
|
22172
22150
|
break;
|
|
22173
22151
|
}
|
|
22174
22152
|
currentVideoFilter = videoFilter;
|
|
22175
22153
|
logAnalyticsEvent('applyVideoFilter', 'Success', {
|
|
22176
22154
|
videoFilter: sanitizedVideoFilter
|
|
22177
22155
|
});
|
|
22178
|
-
return
|
|
22156
|
+
return _context42.abrupt("return");
|
|
22179
22157
|
case 22:
|
|
22180
22158
|
if (!mediaProcessor.getVideoFilter()) {
|
|
22181
|
-
|
|
22159
|
+
_context42.next = 37;
|
|
22182
22160
|
break;
|
|
22183
22161
|
}
|
|
22184
22162
|
if (!mediaProcessor.canUpdateVideoFilter(videoFilter.type)) {
|
|
22185
|
-
|
|
22163
|
+
_context42.next = 35;
|
|
22186
22164
|
break;
|
|
22187
22165
|
}
|
|
22188
|
-
|
|
22189
|
-
|
|
22166
|
+
_context42.prev = 24;
|
|
22167
|
+
_context42.next = 27;
|
|
22190
22168
|
return mediaProcessor.updateVideoFilter(videoFilter);
|
|
22191
22169
|
case 27:
|
|
22192
22170
|
currentVideoFilter = videoFilter;
|
|
22193
22171
|
logAnalyticsEvent('applyVideoFilter', 'Success', {
|
|
22194
22172
|
videoFilter: sanitizedVideoFilter
|
|
22195
22173
|
});
|
|
22196
|
-
return
|
|
22174
|
+
return _context42.abrupt("return");
|
|
22197
22175
|
case 32:
|
|
22198
|
-
|
|
22199
|
-
|
|
22200
|
-
logging.warn(`Error updating video filter: ${
|
|
22176
|
+
_context42.prev = 32;
|
|
22177
|
+
_context42.t0 = _context42["catch"](24);
|
|
22178
|
+
logging.warn(`Error updating video filter: ${_context42.t0}`);
|
|
22201
22179
|
case 35:
|
|
22202
|
-
|
|
22180
|
+
_context42.next = 37;
|
|
22203
22181
|
return _this.clearVideoFilter();
|
|
22204
22182
|
case 37:
|
|
22205
22183
|
_webRTCStream$getVide2 = webRTCStream.getVideoTracks(), originalVideoTrack = _webRTCStream$getVide2[0];
|
|
22206
22184
|
if (originalVideoTrack) {
|
|
22207
|
-
|
|
22185
|
+
_context42.next = 43;
|
|
22208
22186
|
break;
|
|
22209
22187
|
}
|
|
22210
22188
|
message = 'Ignoring. No video';
|
|
@@ -22212,32 +22190,32 @@ function PublisherFactory(_ref) {
|
|
|
22212
22190
|
message
|
|
22213
22191
|
});
|
|
22214
22192
|
logging.warn(message);
|
|
22215
|
-
return
|
|
22193
|
+
return _context42.abrupt("return");
|
|
22216
22194
|
case 43:
|
|
22217
|
-
|
|
22195
|
+
_context42.next = 45;
|
|
22218
22196
|
return mediaProcessor.setVideoFilter(videoFilter);
|
|
22219
22197
|
case 45:
|
|
22220
|
-
|
|
22198
|
+
_context42.next = 47;
|
|
22221
22199
|
return mediaProcessor.setMediaStream(webRTCStream);
|
|
22222
22200
|
case 47:
|
|
22223
|
-
filteredVideoTrack =
|
|
22201
|
+
filteredVideoTrack = _context42.sent;
|
|
22224
22202
|
if (!filteredVideoTrack) {
|
|
22225
|
-
|
|
22203
|
+
_context42.next = 51;
|
|
22226
22204
|
break;
|
|
22227
22205
|
}
|
|
22228
|
-
|
|
22206
|
+
_context42.next = 51;
|
|
22229
22207
|
return replaceTrackAndUpdate(originalVideoTrack, filteredVideoTrack);
|
|
22230
22208
|
case 51:
|
|
22231
|
-
|
|
22209
|
+
_context42.next = 58;
|
|
22232
22210
|
break;
|
|
22233
22211
|
case 53:
|
|
22234
|
-
|
|
22235
|
-
|
|
22236
|
-
logging.error(`Error applying video filter: ${
|
|
22212
|
+
_context42.prev = 53;
|
|
22213
|
+
_context42.t1 = _context42["catch"](2);
|
|
22214
|
+
logging.error(`Error applying video filter: ${_context42.t1}`);
|
|
22237
22215
|
logAnalyticsEvent('applyVideoFilter', 'Failure', {
|
|
22238
|
-
message:
|
|
22216
|
+
message: _context42.t1.message
|
|
22239
22217
|
});
|
|
22240
|
-
throw
|
|
22218
|
+
throw _context42.t1;
|
|
22241
22219
|
case 58:
|
|
22242
22220
|
currentVideoFilter = videoFilter;
|
|
22243
22221
|
logAnalyticsEvent('applyVideoFilter', 'Success', {
|
|
@@ -22245,12 +22223,12 @@ function PublisherFactory(_ref) {
|
|
|
22245
22223
|
});
|
|
22246
22224
|
case 60:
|
|
22247
22225
|
case "end":
|
|
22248
|
-
return
|
|
22226
|
+
return _context42.stop();
|
|
22249
22227
|
}
|
|
22250
|
-
},
|
|
22228
|
+
}, _callee42, null, [[2, 53], [24, 32]]);
|
|
22251
22229
|
}));
|
|
22252
22230
|
return function (_x33) {
|
|
22253
|
-
return
|
|
22231
|
+
return _ref50.apply(this, arguments);
|
|
22254
22232
|
};
|
|
22255
22233
|
}();
|
|
22256
22234
|
|
|
@@ -22356,35 +22334,35 @@ function PublisherFactory(_ref) {
|
|
|
22356
22334
|
* If there is an error, the promise is rejected and no new video filter is set.
|
|
22357
22335
|
*/
|
|
22358
22336
|
this.applyAudioFilter = /*#__PURE__*/function () {
|
|
22359
|
-
var
|
|
22337
|
+
var _ref51 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee43(audioFilter) {
|
|
22360
22338
|
var isSupported, message, _getNoiseSuppressionC, assetsDirBaseUrl, vonageNoiseSuppression, noiseSuppressionConnector;
|
|
22361
|
-
return _regenerator.default.wrap(function
|
|
22362
|
-
while (1) switch (
|
|
22339
|
+
return _regenerator.default.wrap(function _callee43$(_context43) {
|
|
22340
|
+
while (1) switch (_context43.prev = _context43.next) {
|
|
22363
22341
|
case 0:
|
|
22364
22342
|
logAnalyticsEvent('applyAudioFilter', 'Attempt', {
|
|
22365
22343
|
audioFilter
|
|
22366
22344
|
});
|
|
22367
22345
|
isSupported = MediaProcessor.isSupported();
|
|
22368
22346
|
if (isSupported) {
|
|
22369
|
-
|
|
22347
|
+
_context43.next = 4;
|
|
22370
22348
|
break;
|
|
22371
22349
|
}
|
|
22372
22350
|
throw otError(Errors.NOT_SUPPORTED, new Error('Browser does not support audio filters (Insertable Streams and Worker APIs are required)'));
|
|
22373
22351
|
case 4:
|
|
22374
22352
|
if ((0, _isValidAudioFilter.default)(audioFilter)) {
|
|
22375
|
-
|
|
22353
|
+
_context43.next = 6;
|
|
22376
22354
|
break;
|
|
22377
22355
|
}
|
|
22378
22356
|
throw otError(Errors.INVALID_PARAMETER, new Error('Audio filter has invalid configuration'));
|
|
22379
22357
|
case 6:
|
|
22380
22358
|
if (!_audioMediaProcessorConnector) {
|
|
22381
|
-
|
|
22359
|
+
_context43.next = 8;
|
|
22382
22360
|
break;
|
|
22383
22361
|
}
|
|
22384
22362
|
throw otError(Errors.NOT_SUPPORTED, new Error('Cannot apply audio filter when audioMediaProcessorConnector is set.'));
|
|
22385
22363
|
case 8:
|
|
22386
22364
|
if (webRTCStream) {
|
|
22387
|
-
|
|
22365
|
+
_context43.next = 13;
|
|
22388
22366
|
break;
|
|
22389
22367
|
}
|
|
22390
22368
|
message = 'Ignoring. No mediaStream';
|
|
@@ -22392,68 +22370,68 @@ function PublisherFactory(_ref) {
|
|
|
22392
22370
|
message
|
|
22393
22371
|
});
|
|
22394
22372
|
logging.warn(message);
|
|
22395
|
-
return
|
|
22373
|
+
return _context43.abrupt("return");
|
|
22396
22374
|
case 13:
|
|
22397
22375
|
if (properties.publishAudio) {
|
|
22398
|
-
|
|
22376
|
+
_context43.next = 17;
|
|
22399
22377
|
break;
|
|
22400
22378
|
}
|
|
22401
22379
|
currentAudioFilter = audioFilter;
|
|
22402
22380
|
logAnalyticsEvent('applyAudioFilter', 'Success', {
|
|
22403
22381
|
audioFilter
|
|
22404
22382
|
});
|
|
22405
|
-
return
|
|
22383
|
+
return _context43.abrupt("return");
|
|
22406
22384
|
case 17:
|
|
22407
22385
|
if (!_this.getAudioFilter()) {
|
|
22408
|
-
|
|
22386
|
+
_context43.next = 20;
|
|
22409
22387
|
break;
|
|
22410
22388
|
}
|
|
22411
|
-
|
|
22389
|
+
_context43.next = 20;
|
|
22412
22390
|
return _this.clearAudioFilter();
|
|
22413
22391
|
case 20:
|
|
22414
|
-
|
|
22392
|
+
_context43.prev = 20;
|
|
22415
22393
|
if (!(audioFilter.type === 'advancedNoiseSuppression')) {
|
|
22416
|
-
|
|
22394
|
+
_context43.next = 32;
|
|
22417
22395
|
break;
|
|
22418
22396
|
}
|
|
22419
22397
|
_getNoiseSuppressionC = (0, _getDefaultConfig.default)({
|
|
22420
22398
|
proxyUrl: (0, _proxyUrl.getProxyUrl)()
|
|
22421
22399
|
}), assetsDirBaseUrl = _getNoiseSuppressionC.assetsDirBaseUrl;
|
|
22422
22400
|
vonageNoiseSuppression = createVonageNoiseSuppression();
|
|
22423
|
-
|
|
22401
|
+
_context43.next = 26;
|
|
22424
22402
|
return vonageNoiseSuppression.init({
|
|
22425
22403
|
assetsDirBaseUrl
|
|
22426
22404
|
});
|
|
22427
22405
|
case 26:
|
|
22428
|
-
|
|
22406
|
+
_context43.next = 28;
|
|
22429
22407
|
return vonageNoiseSuppression.getConnector();
|
|
22430
22408
|
case 28:
|
|
22431
|
-
noiseSuppressionConnector =
|
|
22432
|
-
|
|
22409
|
+
noiseSuppressionConnector = _context43.sent;
|
|
22410
|
+
_context43.next = 31;
|
|
22433
22411
|
return _setAudioMediaProcessorConnector(noiseSuppressionConnector);
|
|
22434
22412
|
case 31:
|
|
22435
22413
|
currentAudioFilter = audioFilter;
|
|
22436
22414
|
case 32:
|
|
22437
|
-
|
|
22415
|
+
_context43.next = 39;
|
|
22438
22416
|
break;
|
|
22439
22417
|
case 34:
|
|
22440
|
-
|
|
22441
|
-
|
|
22442
|
-
logging.error(`Error applying audio filter: ${
|
|
22418
|
+
_context43.prev = 34;
|
|
22419
|
+
_context43.t0 = _context43["catch"](20);
|
|
22420
|
+
logging.error(`Error applying audio filter: ${_context43.t0}`);
|
|
22443
22421
|
logAnalyticsEvent('applyAudioFilter', 'Failure', {
|
|
22444
|
-
message:
|
|
22422
|
+
message: _context43.t0.message
|
|
22445
22423
|
});
|
|
22446
|
-
throw
|
|
22424
|
+
throw _context43.t0;
|
|
22447
22425
|
case 39:
|
|
22448
22426
|
logAnalyticsEvent('applyAudioFilter', 'Success');
|
|
22449
22427
|
case 40:
|
|
22450
22428
|
case "end":
|
|
22451
|
-
return
|
|
22429
|
+
return _context43.stop();
|
|
22452
22430
|
}
|
|
22453
|
-
},
|
|
22431
|
+
}, _callee43, null, [[20, 34]]);
|
|
22454
22432
|
}));
|
|
22455
22433
|
return function (_x34) {
|
|
22456
|
-
return
|
|
22434
|
+
return _ref51.apply(this, arguments);
|
|
22457
22435
|
};
|
|
22458
22436
|
}();
|
|
22459
22437
|
|
|
@@ -22474,22 +22452,22 @@ function PublisherFactory(_ref) {
|
|
|
22474
22452
|
* @return {Promise} A promise that resolves when the operation completes successfully.
|
|
22475
22453
|
* If there is an error, the promise is rejected.
|
|
22476
22454
|
*/
|
|
22477
|
-
this.clearAudioFilter = /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
22455
|
+
this.clearAudioFilter = /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee44() {
|
|
22478
22456
|
var message;
|
|
22479
|
-
return _regenerator.default.wrap(function
|
|
22480
|
-
while (1) switch (
|
|
22457
|
+
return _regenerator.default.wrap(function _callee44$(_context44) {
|
|
22458
|
+
while (1) switch (_context44.prev = _context44.next) {
|
|
22481
22459
|
case 0:
|
|
22482
22460
|
logAnalyticsEvent('clearAudioFilter', 'Attempt');
|
|
22483
22461
|
if (!(!properties.publishAudio && _this.getAudioFilter())) {
|
|
22484
|
-
|
|
22462
|
+
_context44.next = 5;
|
|
22485
22463
|
break;
|
|
22486
22464
|
}
|
|
22487
22465
|
currentAudioFilter = null;
|
|
22488
22466
|
logAnalyticsEvent('clearAudioFilter', 'Success');
|
|
22489
|
-
return
|
|
22467
|
+
return _context44.abrupt("return");
|
|
22490
22468
|
case 5:
|
|
22491
22469
|
if (MediaProcessor.isSupported()) {
|
|
22492
|
-
|
|
22470
|
+
_context44.next = 10;
|
|
22493
22471
|
break;
|
|
22494
22472
|
}
|
|
22495
22473
|
message = 'Ignoring. "clearAudioFilter" not supported.';
|
|
@@ -22497,10 +22475,10 @@ function PublisherFactory(_ref) {
|
|
|
22497
22475
|
message
|
|
22498
22476
|
});
|
|
22499
22477
|
logging.warn(message);
|
|
22500
|
-
return
|
|
22478
|
+
return _context44.abrupt("return");
|
|
22501
22479
|
case 10:
|
|
22502
22480
|
if (_this.getAudioFilter()) {
|
|
22503
|
-
|
|
22481
|
+
_context44.next = 15;
|
|
22504
22482
|
break;
|
|
22505
22483
|
}
|
|
22506
22484
|
message = 'Ignoring. No audio filter applied';
|
|
@@ -22508,10 +22486,10 @@ function PublisherFactory(_ref) {
|
|
|
22508
22486
|
message
|
|
22509
22487
|
});
|
|
22510
22488
|
logging.debug(message);
|
|
22511
|
-
return
|
|
22489
|
+
return _context44.abrupt("return");
|
|
22512
22490
|
case 15:
|
|
22513
22491
|
if (webRTCStream) {
|
|
22514
|
-
|
|
22492
|
+
_context44.next = 20;
|
|
22515
22493
|
break;
|
|
22516
22494
|
}
|
|
22517
22495
|
message = 'Ignoring. No mediaStream';
|
|
@@ -22519,29 +22497,29 @@ function PublisherFactory(_ref) {
|
|
|
22519
22497
|
message
|
|
22520
22498
|
});
|
|
22521
22499
|
logging.warn(message);
|
|
22522
|
-
return
|
|
22500
|
+
return _context44.abrupt("return");
|
|
22523
22501
|
case 20:
|
|
22524
|
-
|
|
22525
|
-
|
|
22502
|
+
_context44.prev = 20;
|
|
22503
|
+
_context44.next = 23;
|
|
22526
22504
|
return _setAudioMediaProcessorConnector(null);
|
|
22527
22505
|
case 23:
|
|
22528
22506
|
currentAudioFilter = null;
|
|
22529
|
-
|
|
22507
|
+
_context44.next = 30;
|
|
22530
22508
|
break;
|
|
22531
22509
|
case 26:
|
|
22532
|
-
|
|
22533
|
-
|
|
22510
|
+
_context44.prev = 26;
|
|
22511
|
+
_context44.t0 = _context44["catch"](20);
|
|
22534
22512
|
logAnalyticsEvent('clearAudioFilter', 'Failure', {
|
|
22535
|
-
error:
|
|
22513
|
+
error: _context44.t0
|
|
22536
22514
|
});
|
|
22537
|
-
return
|
|
22515
|
+
return _context44.abrupt("return");
|
|
22538
22516
|
case 30:
|
|
22539
22517
|
logAnalyticsEvent('clearAudioFilter', 'Success');
|
|
22540
22518
|
case 31:
|
|
22541
22519
|
case "end":
|
|
22542
|
-
return
|
|
22520
|
+
return _context44.stop();
|
|
22543
22521
|
}
|
|
22544
|
-
},
|
|
22522
|
+
}, _callee44, null, [[20, 26]]);
|
|
22545
22523
|
}));
|
|
22546
22524
|
|
|
22547
22525
|
/**
|
|
@@ -22636,16 +22614,16 @@ function PublisherFactory(_ref) {
|
|
|
22636
22614
|
* If there is an error, the promise is rejected and no connector is set.
|
|
22637
22615
|
*/
|
|
22638
22616
|
this.setVideoMediaProcessorConnector = /*#__PURE__*/function () {
|
|
22639
|
-
var
|
|
22617
|
+
var _ref53 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee45(mediaProcessorConnector) {
|
|
22640
22618
|
var _webRTCStream$getVide3, filteredVideoTrack, videoTrack, _webRTCStream$getVide4, originalVideoTrack, message;
|
|
22641
|
-
return _regenerator.default.wrap(function
|
|
22642
|
-
while (1) switch (
|
|
22619
|
+
return _regenerator.default.wrap(function _callee45$(_context45) {
|
|
22620
|
+
while (1) switch (_context45.prev = _context45.next) {
|
|
22643
22621
|
case 0:
|
|
22644
22622
|
logAnalyticsEvent('setVideoMediaProcessorConnector', 'Attempt', {
|
|
22645
22623
|
message: mediaProcessorConnector ? 'setting the connector' : 'clearing the connector'
|
|
22646
22624
|
});
|
|
22647
22625
|
if (!_this.getVideoFilter()) {
|
|
22648
|
-
|
|
22626
|
+
_context45.next = 4;
|
|
22649
22627
|
break;
|
|
22650
22628
|
}
|
|
22651
22629
|
logAnalyticsEvent('setVideoMediaProcessorConnector', 'Failure', {
|
|
@@ -22654,15 +22632,15 @@ function PublisherFactory(_ref) {
|
|
|
22654
22632
|
throw otError(Errors.NOT_SUPPORTED, new Error('setVideoMediaProcessorConnector: Cannot use this method when videoFilter is set.'));
|
|
22655
22633
|
case 4:
|
|
22656
22634
|
if (!_videoMediaProcessorConnector) {
|
|
22657
|
-
|
|
22635
|
+
_context45.next = 14;
|
|
22658
22636
|
break;
|
|
22659
22637
|
}
|
|
22660
22638
|
_webRTCStream$getVide3 = webRTCStream.getVideoTracks(), filteredVideoTrack = _webRTCStream$getVide3[0];
|
|
22661
|
-
|
|
22639
|
+
_context45.next = 8;
|
|
22662
22640
|
return getTrackFromDeviceId(currentDeviceId);
|
|
22663
22641
|
case 8:
|
|
22664
|
-
videoTrack =
|
|
22665
|
-
|
|
22642
|
+
videoTrack = _context45.sent;
|
|
22643
|
+
_context45.next = 11;
|
|
22666
22644
|
return replaceTrackAndUpdate(filteredVideoTrack, videoTrack);
|
|
22667
22645
|
case 11:
|
|
22668
22646
|
_videoMediaProcessorConnector.destroy();
|
|
@@ -22670,16 +22648,16 @@ function PublisherFactory(_ref) {
|
|
|
22670
22648
|
_videoMediaProcessorConnector = null;
|
|
22671
22649
|
case 14:
|
|
22672
22650
|
if (mediaProcessorConnector) {
|
|
22673
|
-
|
|
22651
|
+
_context45.next = 17;
|
|
22674
22652
|
break;
|
|
22675
22653
|
}
|
|
22676
22654
|
logAnalyticsEvent('setVideoMediaProcessorConnector', 'Success', {
|
|
22677
22655
|
message: 'clearing the connector'
|
|
22678
22656
|
});
|
|
22679
|
-
return
|
|
22657
|
+
return _context45.abrupt("return");
|
|
22680
22658
|
case 17:
|
|
22681
22659
|
if (MediaProcessorConnector.isValidConnector(mediaProcessorConnector)) {
|
|
22682
|
-
|
|
22660
|
+
_context45.next = 20;
|
|
22683
22661
|
break;
|
|
22684
22662
|
}
|
|
22685
22663
|
logAnalyticsEvent('setVideoMediaProcessorConnector', 'Failure', {
|
|
@@ -22690,7 +22668,7 @@ function PublisherFactory(_ref) {
|
|
|
22690
22668
|
_videoMediaProcessorConnector = new MediaProcessorConnector(mediaProcessorConnector);
|
|
22691
22669
|
_webRTCStream$getVide4 = webRTCStream.getVideoTracks(), originalVideoTrack = _webRTCStream$getVide4[0];
|
|
22692
22670
|
if (originalVideoTrack) {
|
|
22693
|
-
|
|
22671
|
+
_context45.next = 28;
|
|
22694
22672
|
break;
|
|
22695
22673
|
}
|
|
22696
22674
|
message = 'Connector not set as no video track is present.';
|
|
@@ -22699,37 +22677,37 @@ function PublisherFactory(_ref) {
|
|
|
22699
22677
|
});
|
|
22700
22678
|
logging.warn(`setVideoMediaProcessorConnector: ${message}`);
|
|
22701
22679
|
_videoMediaProcessorConnector = null;
|
|
22702
|
-
return
|
|
22680
|
+
return _context45.abrupt("return");
|
|
22703
22681
|
case 28:
|
|
22704
|
-
|
|
22705
|
-
|
|
22682
|
+
_context45.prev = 28;
|
|
22683
|
+
_context45.next = 31;
|
|
22706
22684
|
return _videoMediaProcessorConnector.setTrack(originalVideoTrack);
|
|
22707
22685
|
case 31:
|
|
22708
|
-
filteredVideoTrack =
|
|
22709
|
-
|
|
22686
|
+
filteredVideoTrack = _context45.sent;
|
|
22687
|
+
_context45.next = 34;
|
|
22710
22688
|
return replaceTrackAndUpdate(originalVideoTrack, filteredVideoTrack);
|
|
22711
22689
|
case 34:
|
|
22712
|
-
|
|
22690
|
+
_context45.next = 42;
|
|
22713
22691
|
break;
|
|
22714
22692
|
case 36:
|
|
22715
|
-
|
|
22716
|
-
|
|
22693
|
+
_context45.prev = 36;
|
|
22694
|
+
_context45.t0 = _context45["catch"](28);
|
|
22717
22695
|
_videoMediaProcessorConnector = null;
|
|
22718
|
-
logging.error(`setVideoMediaProcessorConnector: Error getting track from MediaProcessorConnector: ${
|
|
22696
|
+
logging.error(`setVideoMediaProcessorConnector: Error getting track from MediaProcessorConnector: ${_context45.t0}`);
|
|
22719
22697
|
logAnalyticsEvent('setVideoMediaProcessorConnector', 'Failure', {
|
|
22720
|
-
message:
|
|
22698
|
+
message: _context45.t0.message
|
|
22721
22699
|
});
|
|
22722
|
-
throw
|
|
22700
|
+
throw _context45.t0;
|
|
22723
22701
|
case 42:
|
|
22724
22702
|
logAnalyticsEvent('setVideoMediaProcessorConnector', 'Success');
|
|
22725
22703
|
case 43:
|
|
22726
22704
|
case "end":
|
|
22727
|
-
return
|
|
22705
|
+
return _context45.stop();
|
|
22728
22706
|
}
|
|
22729
|
-
},
|
|
22707
|
+
}, _callee45, null, [[28, 36]]);
|
|
22730
22708
|
}));
|
|
22731
22709
|
return function (_x35) {
|
|
22732
|
-
return
|
|
22710
|
+
return _ref53.apply(this, arguments);
|
|
22733
22711
|
};
|
|
22734
22712
|
}();
|
|
22735
22713
|
|
|
@@ -22792,71 +22770,71 @@ function PublisherFactory(_ref) {
|
|
|
22792
22770
|
* If there is an error, the promise is rejected and no connector is set.
|
|
22793
22771
|
*/
|
|
22794
22772
|
this.setAudioMediaProcessorConnector = /*#__PURE__*/function () {
|
|
22795
|
-
var
|
|
22796
|
-
return _regenerator.default.wrap(function
|
|
22797
|
-
while (1) switch (
|
|
22773
|
+
var _ref54 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee46(mediaProcessorConnector) {
|
|
22774
|
+
return _regenerator.default.wrap(function _callee46$(_context46) {
|
|
22775
|
+
while (1) switch (_context46.prev = _context46.next) {
|
|
22798
22776
|
case 0:
|
|
22799
22777
|
logAnalyticsEvent('setAudioMediaProcessorConnector', 'Attempt', {
|
|
22800
22778
|
message: mediaProcessorConnector ? 'setting the connector' : 'clearing the connector'
|
|
22801
22779
|
});
|
|
22802
|
-
|
|
22803
|
-
|
|
22780
|
+
_context46.prev = 1;
|
|
22781
|
+
_context46.next = 4;
|
|
22804
22782
|
return _setAudioMediaProcessorConnector(mediaProcessorConnector);
|
|
22805
22783
|
case 4:
|
|
22806
22784
|
logAnalyticsEvent('setAudioMediaProcessorConnector', 'Success', {
|
|
22807
22785
|
message: mediaProcessorConnector ? undefined : 'clearing the connector'
|
|
22808
22786
|
});
|
|
22809
|
-
|
|
22787
|
+
_context46.next = 11;
|
|
22810
22788
|
break;
|
|
22811
22789
|
case 7:
|
|
22812
|
-
|
|
22813
|
-
|
|
22790
|
+
_context46.prev = 7;
|
|
22791
|
+
_context46.t0 = _context46["catch"](1);
|
|
22814
22792
|
logAnalyticsEvent('setAudioMediaProcessorConnector', 'Failure', {
|
|
22815
|
-
message:
|
|
22793
|
+
message: _context46.t0.message
|
|
22816
22794
|
});
|
|
22817
|
-
throw
|
|
22795
|
+
throw _context46.t0;
|
|
22818
22796
|
case 11:
|
|
22819
22797
|
case "end":
|
|
22820
|
-
return
|
|
22798
|
+
return _context46.stop();
|
|
22821
22799
|
}
|
|
22822
|
-
},
|
|
22800
|
+
}, _callee46, null, [[1, 7]]);
|
|
22823
22801
|
}));
|
|
22824
22802
|
return function (_x36) {
|
|
22825
|
-
return
|
|
22803
|
+
return _ref54.apply(this, arguments);
|
|
22826
22804
|
};
|
|
22827
22805
|
}();
|
|
22828
22806
|
const _setAudioMediaProcessorConnector = /*#__PURE__*/function () {
|
|
22829
|
-
var
|
|
22807
|
+
var _ref55 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee47(mediaProcessorConnector) {
|
|
22830
22808
|
var message, _webRTCStream$getAudi, filteredAudioTrack, _webRTCStream$getAudi2, originalAudioTrack;
|
|
22831
|
-
return _regenerator.default.wrap(function
|
|
22832
|
-
while (1) switch (
|
|
22809
|
+
return _regenerator.default.wrap(function _callee47$(_context47) {
|
|
22810
|
+
while (1) switch (_context47.prev = _context47.next) {
|
|
22833
22811
|
case 0:
|
|
22834
22812
|
if (!(_this.getAudioFilter() && mediaProcessorConnector)) {
|
|
22835
|
-
|
|
22813
|
+
_context47.next = 3;
|
|
22836
22814
|
break;
|
|
22837
22815
|
}
|
|
22838
22816
|
message = 'Tried to set audio MediaProcessorConnector when audio filter applied.';
|
|
22839
22817
|
throw otError(Errors.NOT_SUPPORTED, new Error(message));
|
|
22840
22818
|
case 3:
|
|
22841
22819
|
if (!_audioMediaProcessorConnector) {
|
|
22842
|
-
|
|
22820
|
+
_context47.next = 9;
|
|
22843
22821
|
break;
|
|
22844
22822
|
}
|
|
22845
22823
|
_webRTCStream$getAudi = webRTCStream.getAudioTracks(), filteredAudioTrack = _webRTCStream$getAudi[0];
|
|
22846
|
-
|
|
22824
|
+
_context47.next = 7;
|
|
22847
22825
|
return replaceAudioTrack(filteredAudioTrack, _audioMediaProcessorConnector.originalTrack);
|
|
22848
22826
|
case 7:
|
|
22849
22827
|
_audioMediaProcessorConnector.destroy();
|
|
22850
22828
|
_audioMediaProcessorConnector = null;
|
|
22851
22829
|
case 9:
|
|
22852
22830
|
if (mediaProcessorConnector) {
|
|
22853
|
-
|
|
22831
|
+
_context47.next = 11;
|
|
22854
22832
|
break;
|
|
22855
22833
|
}
|
|
22856
|
-
return
|
|
22834
|
+
return _context47.abrupt("return");
|
|
22857
22835
|
case 11:
|
|
22858
22836
|
if (MediaProcessorConnector.isValidConnector(mediaProcessorConnector)) {
|
|
22859
|
-
|
|
22837
|
+
_context47.next = 14;
|
|
22860
22838
|
break;
|
|
22861
22839
|
}
|
|
22862
22840
|
message = 'Invalid MediaProcessorConnector';
|
|
@@ -22864,7 +22842,7 @@ function PublisherFactory(_ref) {
|
|
|
22864
22842
|
case 14:
|
|
22865
22843
|
_webRTCStream$getAudi2 = webRTCStream.getAudioTracks(), originalAudioTrack = _webRTCStream$getAudi2[0];
|
|
22866
22844
|
if (originalAudioTrack) {
|
|
22867
|
-
|
|
22845
|
+
_context47.next = 20;
|
|
22868
22846
|
break;
|
|
22869
22847
|
}
|
|
22870
22848
|
message = 'Connector not set as no audio track is present.';
|
|
@@ -22873,30 +22851,30 @@ function PublisherFactory(_ref) {
|
|
|
22873
22851
|
throw new Error(message);
|
|
22874
22852
|
case 20:
|
|
22875
22853
|
_audioMediaProcessorConnector = new MediaProcessorConnector(mediaProcessorConnector);
|
|
22876
|
-
|
|
22877
|
-
|
|
22854
|
+
_context47.prev = 21;
|
|
22855
|
+
_context47.next = 24;
|
|
22878
22856
|
return _audioMediaProcessorConnector.setTrack(originalAudioTrack);
|
|
22879
22857
|
case 24:
|
|
22880
|
-
filteredAudioTrack =
|
|
22881
|
-
|
|
22858
|
+
filteredAudioTrack = _context47.sent;
|
|
22859
|
+
_context47.next = 27;
|
|
22882
22860
|
return replaceAudioTrack(_this._getAudioSource(), filteredAudioTrack);
|
|
22883
22861
|
case 27:
|
|
22884
|
-
|
|
22862
|
+
_context47.next = 34;
|
|
22885
22863
|
break;
|
|
22886
22864
|
case 29:
|
|
22887
|
-
|
|
22888
|
-
|
|
22865
|
+
_context47.prev = 29;
|
|
22866
|
+
_context47.t0 = _context47["catch"](21);
|
|
22889
22867
|
_audioMediaProcessorConnector = null;
|
|
22890
|
-
logging.error(`setAudioMediaProcessorConnector: Error getting track from MediaProcessorConnector: ${
|
|
22891
|
-
throw
|
|
22868
|
+
logging.error(`setAudioMediaProcessorConnector: Error getting track from MediaProcessorConnector: ${_context47.t0}`);
|
|
22869
|
+
throw _context47.t0;
|
|
22892
22870
|
case 34:
|
|
22893
22871
|
case "end":
|
|
22894
|
-
return
|
|
22872
|
+
return _context47.stop();
|
|
22895
22873
|
}
|
|
22896
|
-
},
|
|
22874
|
+
}, _callee47, null, [[21, 29]]);
|
|
22897
22875
|
}));
|
|
22898
22876
|
return function _setAudioMediaProcessorConnector(_x37) {
|
|
22899
|
-
return
|
|
22877
|
+
return _ref55.apply(this, arguments);
|
|
22900
22878
|
};
|
|
22901
22879
|
}();
|
|
22902
22880
|
|
|
@@ -22917,22 +22895,22 @@ function PublisherFactory(_ref) {
|
|
|
22917
22895
|
* @return {Promise} A promise that resolves when the operation completes successfully.
|
|
22918
22896
|
* If there is an error, the promise is rejected.
|
|
22919
22897
|
*/
|
|
22920
|
-
this.clearVideoFilter = /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
22898
|
+
this.clearVideoFilter = /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee48() {
|
|
22921
22899
|
var message, oldVideoFilter, _webRTCStream$getVide5, filteredVideoTrack, videoTrack;
|
|
22922
|
-
return _regenerator.default.wrap(function
|
|
22923
|
-
while (1) switch (
|
|
22900
|
+
return _regenerator.default.wrap(function _callee48$(_context48) {
|
|
22901
|
+
while (1) switch (_context48.prev = _context48.next) {
|
|
22924
22902
|
case 0:
|
|
22925
22903
|
logAnalyticsEvent('clearVideoFilter', 'Attempt');
|
|
22926
22904
|
if (!(!properties.publishVideo && !mediaProcessor.getVideoFilter())) {
|
|
22927
|
-
|
|
22905
|
+
_context48.next = 5;
|
|
22928
22906
|
break;
|
|
22929
22907
|
}
|
|
22930
22908
|
currentVideoFilter = null;
|
|
22931
22909
|
logAnalyticsEvent('clearVideoFilter', 'Success');
|
|
22932
|
-
return
|
|
22910
|
+
return _context48.abrupt("return");
|
|
22933
22911
|
case 5:
|
|
22934
22912
|
if (!(!mediaProcessor.getVideoFilter() && !currentVideoFilter)) {
|
|
22935
|
-
|
|
22913
|
+
_context48.next = 10;
|
|
22936
22914
|
break;
|
|
22937
22915
|
}
|
|
22938
22916
|
message = 'Ignoring. No video filter applied';
|
|
@@ -22940,10 +22918,10 @@ function PublisherFactory(_ref) {
|
|
|
22940
22918
|
message
|
|
22941
22919
|
});
|
|
22942
22920
|
logging.debug(message);
|
|
22943
|
-
return
|
|
22921
|
+
return _context48.abrupt("return");
|
|
22944
22922
|
case 10:
|
|
22945
22923
|
if (MediaProcessor.isSupported()) {
|
|
22946
|
-
|
|
22924
|
+
_context48.next = 15;
|
|
22947
22925
|
break;
|
|
22948
22926
|
}
|
|
22949
22927
|
message = 'Ignoring. "clearVideoFilter" not supported.';
|
|
@@ -22951,10 +22929,10 @@ function PublisherFactory(_ref) {
|
|
|
22951
22929
|
message
|
|
22952
22930
|
});
|
|
22953
22931
|
logging.warn(message);
|
|
22954
|
-
return
|
|
22932
|
+
return _context48.abrupt("return");
|
|
22955
22933
|
case 15:
|
|
22956
22934
|
if (webRTCStream) {
|
|
22957
|
-
|
|
22935
|
+
_context48.next = 20;
|
|
22958
22936
|
break;
|
|
22959
22937
|
}
|
|
22960
22938
|
message = 'Ignoring. No mediaStream';
|
|
@@ -22962,43 +22940,43 @@ function PublisherFactory(_ref) {
|
|
|
22962
22940
|
message
|
|
22963
22941
|
});
|
|
22964
22942
|
logging.warn(message);
|
|
22965
|
-
return
|
|
22943
|
+
return _context48.abrupt("return");
|
|
22966
22944
|
case 20:
|
|
22967
22945
|
oldVideoFilter = currentVideoFilter;
|
|
22968
22946
|
currentVideoFilter = null;
|
|
22969
22947
|
_webRTCStream$getVide5 = webRTCStream.getVideoTracks(), filteredVideoTrack = _webRTCStream$getVide5[0];
|
|
22970
|
-
|
|
22971
|
-
|
|
22948
|
+
_context48.prev = 23;
|
|
22949
|
+
_context48.next = 26;
|
|
22972
22950
|
return getTrackFromDeviceId(currentDeviceId);
|
|
22973
22951
|
case 26:
|
|
22974
|
-
videoTrack =
|
|
22975
|
-
|
|
22952
|
+
videoTrack = _context48.sent;
|
|
22953
|
+
_context48.next = 33;
|
|
22976
22954
|
break;
|
|
22977
22955
|
case 29:
|
|
22978
|
-
|
|
22979
|
-
|
|
22980
|
-
logging.error(
|
|
22981
|
-
return
|
|
22956
|
+
_context48.prev = 29;
|
|
22957
|
+
_context48.t0 = _context48["catch"](23);
|
|
22958
|
+
logging.error(_context48.t0);
|
|
22959
|
+
return _context48.abrupt("return");
|
|
22982
22960
|
case 33:
|
|
22983
22961
|
if (videoTrack) {
|
|
22984
|
-
|
|
22962
|
+
_context48.next = 36;
|
|
22985
22963
|
break;
|
|
22986
22964
|
}
|
|
22987
22965
|
logging.warn('Failed to clear filter because there is no video track.');
|
|
22988
|
-
return
|
|
22966
|
+
return _context48.abrupt("return");
|
|
22989
22967
|
case 36:
|
|
22990
|
-
|
|
22991
|
-
|
|
22968
|
+
_context48.prev = 36;
|
|
22969
|
+
_context48.next = 39;
|
|
22992
22970
|
return replaceTrackAndUpdate(filteredVideoTrack, videoTrack);
|
|
22993
22971
|
case 39:
|
|
22994
|
-
|
|
22972
|
+
_context48.next = 41;
|
|
22995
22973
|
return destroyMediaProcessor();
|
|
22996
22974
|
case 41:
|
|
22997
|
-
|
|
22975
|
+
_context48.next = 46;
|
|
22998
22976
|
break;
|
|
22999
22977
|
case 43:
|
|
23000
|
-
|
|
23001
|
-
|
|
22978
|
+
_context48.prev = 43;
|
|
22979
|
+
_context48.t1 = _context48["catch"](36);
|
|
23002
22980
|
// Restore the previous filter since this call has failed. This way, this function can be
|
|
23003
22981
|
// called again if needed.
|
|
23004
22982
|
currentVideoFilter = oldVideoFilter;
|
|
@@ -23006,9 +22984,9 @@ function PublisherFactory(_ref) {
|
|
|
23006
22984
|
logAnalyticsEvent('clearVideoFilter', 'Success');
|
|
23007
22985
|
case 47:
|
|
23008
22986
|
case "end":
|
|
23009
|
-
return
|
|
22987
|
+
return _context48.stop();
|
|
23010
22988
|
}
|
|
23011
|
-
},
|
|
22989
|
+
}, _callee48, null, [[23, 29], [36, 43]]);
|
|
23012
22990
|
}));
|
|
23013
22991
|
};
|
|
23014
22992
|
|
|
@@ -37833,52 +37811,100 @@ function SubscriberFactory(_ref2) {
|
|
|
37833
37811
|
disconnected: _eventNames.default.SUBSCRIBER_DISCONNECTED,
|
|
37834
37812
|
failed: _eventNames.default.SUBSCRIBER_DESTROYED
|
|
37835
37813
|
};
|
|
37814
|
+
const isConnectionStateChanged = (currentState, lastState) => currentState && currentState !== lastState;
|
|
37815
|
+
const destroyAdaptiveRelayedPeerConnection = () => {
|
|
37816
|
+
logging.warn('OT.Subscriber: relayed PeerConnection has failed, rolling back to routed PeerConnection');
|
|
37817
|
+
socket.subscriberDestroy(_stream.id, this.widgetId, 'P2P');
|
|
37818
|
+
logAnalyticsEvent('AMRMantisRollback', 'Event');
|
|
37819
|
+
};
|
|
37820
|
+
const handleDisconnect = sourceStreamId => {
|
|
37821
|
+
if (sourceStreamId !== 'P2P') {
|
|
37822
|
+
setTimeout( /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee13() {
|
|
37823
|
+
return _regenerator.default.wrap(function _callee13$(_context13) {
|
|
37824
|
+
while (1) switch (_context13.prev = _context13.next) {
|
|
37825
|
+
case 0:
|
|
37826
|
+
if (!(_lastConnectionStatesMap[sourceStreamId] === 'disconnected' && _session._.isSocketConnected())) {
|
|
37827
|
+
_context13.next = 3;
|
|
37828
|
+
break;
|
|
37829
|
+
}
|
|
37830
|
+
_context13.next = 3;
|
|
37831
|
+
return _this._.iceRestart('peer connection disconnected');
|
|
37832
|
+
case 3:
|
|
37833
|
+
case "end":
|
|
37834
|
+
return _context13.stop();
|
|
37835
|
+
}
|
|
37836
|
+
}, _callee13);
|
|
37837
|
+
})), 2000);
|
|
37838
|
+
}
|
|
37839
|
+
};
|
|
37840
|
+
const handleFailMantis = () => {
|
|
37841
|
+
if (_session._.isSocketConnected()) {
|
|
37842
|
+
setTimeout(() => {
|
|
37843
|
+
if (_lastConnectionStatesMap.MANTIS === 'destroyed') {
|
|
37844
|
+
this._destroy({
|
|
37845
|
+
reason: 'peerConnection is disconnected',
|
|
37846
|
+
noStateTransition: true
|
|
37847
|
+
});
|
|
37848
|
+
}
|
|
37849
|
+
// Allow 2s to make sure that a WebSocket reconnection hasn't triggered an iceRestart at
|
|
37850
|
+
// the same time the PC has failed.
|
|
37851
|
+
}, 2000);
|
|
37852
|
+
}
|
|
37853
|
+
};
|
|
37854
|
+
const handleFailAdaptive = sourceStreamId => {
|
|
37855
|
+
if (sourceStreamId === 'P2P') {
|
|
37856
|
+
// If P2P leg fails in an adaptive session, then we will proceed to destroy it so the
|
|
37857
|
+
// transition to Mantis is triggered.
|
|
37858
|
+
destroyAdaptiveRelayedPeerConnection();
|
|
37859
|
+
} else if (_activeSourceStreamId === 'MANTIS') {
|
|
37860
|
+
handleFailMantis();
|
|
37861
|
+
}
|
|
37862
|
+
};
|
|
37863
|
+
const handleFail = sourceStreamId => {
|
|
37864
|
+
const p2pEnabled = _session.sessionInfo.p2pEnabled;
|
|
37865
|
+
if (_isAdaptiveEnabled) {
|
|
37866
|
+
handleFailAdaptive(sourceStreamId);
|
|
37867
|
+
} else if (!p2pEnabled) {
|
|
37868
|
+
handleFailMantis();
|
|
37869
|
+
}
|
|
37870
|
+
};
|
|
37871
|
+
const updateWidgetView = currentConnectionState => {
|
|
37872
|
+
if (_widgetView) {
|
|
37873
|
+
_widgetView.loading(currentConnectionState !== _eventNames.default.SUBSCRIBER_CONNECTED);
|
|
37874
|
+
}
|
|
37875
|
+
};
|
|
37876
|
+
const notifyStateChange = (state, currentConnectionState, sourceStreamId) => {
|
|
37877
|
+
logging.debug(`OT.Subscriber.connectionStateChanged to ${state}`);
|
|
37878
|
+
if (state === 'failed' && !_session._.isSocketConnected()) {
|
|
37879
|
+
return;
|
|
37880
|
+
}
|
|
37881
|
+
|
|
37882
|
+
// Allow 2s for a posible iceRestart in Mantis PC happening by a socket reconnection.
|
|
37883
|
+
// Please see `handleFailMantis`
|
|
37884
|
+
const delayEventMs = state === 'failed' && sourceStreamId === 'MANTIS' ? 2000 : 0;
|
|
37885
|
+
setTimeout(() => {
|
|
37886
|
+
if (state !== 'failed' || _lastConnectionStatesMap[sourceStreamId] === 'destroyed') {
|
|
37887
|
+
this.dispatchEvent(new Events.ConnectionStateChangedEvent(currentConnectionState, this, (0, _getMediaModeBySourceStreamId.default)(sourceStreamId)));
|
|
37888
|
+
}
|
|
37889
|
+
}, delayEventMs);
|
|
37890
|
+
};
|
|
37836
37891
|
const onIceConnectionStateChange = (state, peerConnection) => {
|
|
37837
37892
|
const currentConnectionState = connectionStateMap[state];
|
|
37838
37893
|
const sourceStreamId = peerConnection.getSourceStreamId();
|
|
37839
37894
|
const lastConnectionState = _lastConnectionStatesMap[sourceStreamId];
|
|
37840
|
-
|
|
37841
|
-
|
|
37842
|
-
|
|
37843
|
-
|
|
37844
|
-
|
|
37845
|
-
|
|
37846
|
-
|
|
37847
|
-
|
|
37848
|
-
|
|
37849
|
-
|
|
37850
|
-
|
|
37851
|
-
|
|
37852
|
-
|
|
37853
|
-
return;
|
|
37854
|
-
}
|
|
37855
|
-
if (state === 'disconnected' && sourceStreamId !== 'P2P') {
|
|
37856
|
-
// This block of code initiates an iceRestart when a peer connection is disconnected
|
|
37857
|
-
// and the socket is still connected.
|
|
37858
|
-
setTimeout( /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee13() {
|
|
37859
|
-
var isSocketReconnecting;
|
|
37860
|
-
return _regenerator.default.wrap(function _callee13$(_context13) {
|
|
37861
|
-
while (1) switch (_context13.prev = _context13.next) {
|
|
37862
|
-
case 0:
|
|
37863
|
-
isSocketReconnecting = _session._.isSocketReconnecting;
|
|
37864
|
-
if (!(_lastConnectionStatesMap[sourceStreamId] === 'disconnected' && socket.is('connected') && !isSocketReconnecting())) {
|
|
37865
|
-
_context13.next = 4;
|
|
37866
|
-
break;
|
|
37867
|
-
}
|
|
37868
|
-
_context13.next = 4;
|
|
37869
|
-
return _this._.iceRestart('peer connection disconnected');
|
|
37870
|
-
case 4:
|
|
37871
|
-
case "end":
|
|
37872
|
-
return _context13.stop();
|
|
37873
|
-
}
|
|
37874
|
-
}, _callee13);
|
|
37875
|
-
})), 2000);
|
|
37876
|
-
}
|
|
37877
|
-
if (_widgetView && sourceStreamId === _activeSourceStreamId) {
|
|
37878
|
-
_widgetView.loading(currentConnectionState !== _eventNames.default.SUBSCRIBER_CONNECTED);
|
|
37879
|
-
}
|
|
37880
|
-
logging.debug(`OT.Subscriber.connectionStateChanged to ${state}`);
|
|
37881
|
-
this.dispatchEvent(new Events.ConnectionStateChangedEvent(currentConnectionState, this, (0, _getMediaModeBySourceStreamId.default)(sourceStreamId)));
|
|
37895
|
+
if (!isConnectionStateChanged(currentConnectionState, lastConnectionState)) return;
|
|
37896
|
+
_lastConnectionStatesMap[sourceStreamId] = currentConnectionState;
|
|
37897
|
+
if (state === 'disconnected') {
|
|
37898
|
+
handleDisconnect(sourceStreamId);
|
|
37899
|
+
} else if (state === 'failed') {
|
|
37900
|
+
handleFail(sourceStreamId);
|
|
37901
|
+
// Do nothing else in P2P, since we don't want to destroy the Subscriber or change its
|
|
37902
|
+
// video state to loading.
|
|
37903
|
+
if (sourceStreamId === 'P2P') return;
|
|
37904
|
+
}
|
|
37905
|
+
if (sourceStreamId === _activeSourceStreamId) {
|
|
37906
|
+
updateWidgetView(currentConnectionState);
|
|
37907
|
+
notifyStateChange(state, currentConnectionState, sourceStreamId);
|
|
37882
37908
|
}
|
|
37883
37909
|
};
|
|
37884
37910
|
const onIceRestartSuccess = () => {
|
|
@@ -39502,30 +39528,41 @@ function SubscriberFactory(_ref2) {
|
|
|
39502
39528
|
},
|
|
39503
39529
|
iceRestart(reason, forcedRestart) {
|
|
39504
39530
|
return (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee26() {
|
|
39505
|
-
var peerConnection;
|
|
39531
|
+
var sourceStreamId, peerConnection;
|
|
39506
39532
|
return _regenerator.default.wrap(function _callee26$(_context26) {
|
|
39507
39533
|
while (1) switch (_context26.prev = _context26.next) {
|
|
39508
39534
|
case 0:
|
|
39509
39535
|
if (forcedRestart === void 0) {
|
|
39510
39536
|
forcedRestart = false;
|
|
39511
39537
|
}
|
|
39512
|
-
|
|
39513
|
-
|
|
39514
|
-
|
|
39538
|
+
if (_session._.isSocketConnected()) {
|
|
39539
|
+
_context26.next = 4;
|
|
39540
|
+
break;
|
|
39541
|
+
}
|
|
39542
|
+
logging.debug('Subscriber: Skipping ice restart, websocket is not connected');
|
|
39543
|
+
return _context26.abrupt("return");
|
|
39544
|
+
case 4:
|
|
39545
|
+
sourceStreamId = _session.sessionInfo.p2pEnabled ? 'P2P' : 'MANTIS';
|
|
39546
|
+
_context26.next = 7;
|
|
39547
|
+
return getPeerConnectionBySourceStreamId(sourceStreamId);
|
|
39548
|
+
case 7:
|
|
39515
39549
|
peerConnection = _context26.sent;
|
|
39516
|
-
if (
|
|
39517
|
-
|
|
39518
|
-
|
|
39519
|
-
logResubscribe('Attempt', {
|
|
39520
|
-
reason
|
|
39521
|
-
});
|
|
39522
|
-
logging.debug('Subscriber: iceRestart attempt');
|
|
39523
|
-
peerConnection.iceRestart();
|
|
39524
|
-
if (forcedRestart) {
|
|
39525
|
-
peerConnection.generateOffer();
|
|
39526
|
-
}
|
|
39550
|
+
if (peerConnection) {
|
|
39551
|
+
_context26.next = 11;
|
|
39552
|
+
break;
|
|
39527
39553
|
}
|
|
39528
|
-
|
|
39554
|
+
logging.debug('Subscriber: Skipping ice restart, we have no peer connection');
|
|
39555
|
+
return _context26.abrupt("return");
|
|
39556
|
+
case 11:
|
|
39557
|
+
logResubscribe('Attempt', {
|
|
39558
|
+
reason
|
|
39559
|
+
});
|
|
39560
|
+
logging.debug('Subscriber: iceRestart attempt');
|
|
39561
|
+
peerConnection.iceRestart();
|
|
39562
|
+
if (forcedRestart) {
|
|
39563
|
+
peerConnection.generateOffer();
|
|
39564
|
+
}
|
|
39565
|
+
case 15:
|
|
39529
39566
|
case "end":
|
|
39530
39567
|
return _context26.stop();
|
|
39531
39568
|
}
|
|
@@ -44484,6 +44521,9 @@ function SessionFactory(deps) {
|
|
|
44484
44521
|
isSocketReconnecting() {
|
|
44485
44522
|
return _isSocketReconnecting;
|
|
44486
44523
|
},
|
|
44524
|
+
isSocketConnected() {
|
|
44525
|
+
return _socket.is('connected') && !_isSocketReconnecting;
|
|
44526
|
+
},
|
|
44487
44527
|
getSocket() {
|
|
44488
44528
|
return _socket;
|
|
44489
44529
|
},
|
|
@@ -44500,7 +44540,7 @@ function SessionFactory(deps) {
|
|
|
44500
44540
|
}).forEach(publisher => {
|
|
44501
44541
|
publisher._.iceRestart();
|
|
44502
44542
|
});
|
|
44503
|
-
if (!this.sessionInfo.p2pEnabled) {
|
|
44543
|
+
if (!this.session.sessionInfo.p2pEnabled) {
|
|
44504
44544
|
sessionObjects.subscribers.where({
|
|
44505
44545
|
session: this
|
|
44506
44546
|
}).forEach(subscriber => {
|