@opentok/client 2.27.8 → 2.27.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/js/opentok.js +813 -742
- package/dist/js/opentok.js.map +1 -1
- package/dist/js/opentok.min.js +6 -6
- package/dist/js/opentok.min.js.map +1 -1
- package/package.json +1 -1
package/dist/js/opentok.js
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* @license OpenTok.js 2.27.
|
|
2
|
+
* @license OpenTok.js 2.27.9 a948194
|
|
3
3
|
*
|
|
4
|
-
* Copyright (c) 2010-
|
|
4
|
+
* Copyright (c) 2010-2025 TokBox, Inc.
|
|
5
5
|
* Subject to the applicable Software Development Kit (SDK) License Agreement:
|
|
6
6
|
* https://www.vonage.com/legal/communications-apis/terms-of-use/
|
|
7
7
|
*
|
|
8
|
-
* Date:
|
|
8
|
+
* Date: Mon, 13 Jan 2025 09:24:52 GMT
|
|
9
9
|
*/
|
|
10
10
|
|
|
11
11
|
(function webpackUniversalModuleDefinition(root, factory) {
|
|
@@ -7945,7 +7945,7 @@ const logging = (0, _log.default)('StaticConfig');
|
|
|
7945
7945
|
*/
|
|
7946
7946
|
|
|
7947
7947
|
/** @type builtInConfig */
|
|
7948
|
-
const builtInConfig = (0, _cloneDeep.default)({"version":"v2.27.
|
|
7948
|
+
const builtInConfig = (0, _cloneDeep.default)({"version":"v2.27.9","buildHash":"a948194","minimumVersion":{"firefox":52,"chrome":49},"debug":false,"websiteURL":"http://www.tokbox.com","configURL":"https://config.opentok.com","ipWhitelistConfigURL":"","cdnURL":"","loggingURL":"https://hlg.tokbox.com/prod","apiURL":"https://anvil.opentok.com"});
|
|
7949
7949
|
const whitelistAllowedRuntimeProperties = (0, _pick.default)(['apiURL', 'assetURL', 'cdnURL', 'sessionInfoOverrides', 'loggingURL']);
|
|
7950
7950
|
const liveConfigMap = {
|
|
7951
7951
|
apiUrl: 'apiURL',
|
|
@@ -16281,61 +16281,69 @@ function WidgetViewFactory(_temp) {
|
|
|
16281
16281
|
return _waitForVideoResolution;
|
|
16282
16282
|
}();
|
|
16283
16283
|
_proto._tryToLoadVideoBuffer = /*#__PURE__*/function () {
|
|
16284
|
-
var _tryToLoadVideoBuffer2 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee4(webRTCStream, throwIfBufferFails) {
|
|
16285
|
-
var retries, loaded, error;
|
|
16284
|
+
var _tryToLoadVideoBuffer2 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee4(webRTCStream, cancellation, throwIfBufferFails) {
|
|
16285
|
+
var removeListener, retries, loaded, error;
|
|
16286
16286
|
return _regenerator.default.wrap(function _callee4$(_context4) {
|
|
16287
16287
|
while (1) switch (_context4.prev = _context4.next) {
|
|
16288
16288
|
case 0:
|
|
16289
|
+
// Since this function is async, it can be stacked while bind is canceled.
|
|
16290
|
+
throwIfBindCancelled(cancellation);
|
|
16291
|
+
|
|
16289
16292
|
// Temporary logging to monitor the blink: OPENTOK-46931
|
|
16290
16293
|
// All the logging is done at Subscriber level, we just emit actions
|
|
16291
16294
|
// to be potentially logged
|
|
16292
16295
|
this._emitAMREvent('AMRLoadVideoBuffer', 'Attempt');
|
|
16293
16296
|
|
|
16294
|
-
//
|
|
16297
|
+
// Add listener in case buffer is canceled. With the function returned we can also remove the
|
|
16298
|
+
// listener later on if buffer is not canceled.
|
|
16299
|
+
removeListener = cancellation.onCancel(() => this._emitAMREvent('AMRLoadVideoBuffer', 'Cancel')); // For transitions, before binding we use the bufferVideoElement to make sure video from
|
|
16295
16300
|
// webRTCStream is loaded and playing. We want to retry to load the video buffer as many
|
|
16296
16301
|
// times we can inside the VIDEO_LOADING_TIMEOUT.
|
|
16297
16302
|
retries = Math.ceil(VIDEO_LOADING_TIMEOUT / VIDEO_BUFFER_TIMEOUT);
|
|
16298
16303
|
loaded = false;
|
|
16299
|
-
case
|
|
16304
|
+
case 5:
|
|
16300
16305
|
if (!(!loaded && retries)) {
|
|
16301
|
-
_context4.next =
|
|
16306
|
+
_context4.next = 13;
|
|
16302
16307
|
break;
|
|
16303
16308
|
}
|
|
16304
|
-
_context4.next =
|
|
16309
|
+
_context4.next = 8;
|
|
16305
16310
|
return this._loadVideoBuffer(webRTCStream, VIDEO_BUFFER_TIMEOUT);
|
|
16306
|
-
case
|
|
16311
|
+
case 8:
|
|
16307
16312
|
loaded = _context4.sent;
|
|
16313
|
+
throwIfBindCancelled(cancellation);
|
|
16308
16314
|
retries--;
|
|
16309
|
-
_context4.next =
|
|
16315
|
+
_context4.next = 5;
|
|
16310
16316
|
break;
|
|
16311
|
-
case
|
|
16317
|
+
case 13:
|
|
16318
|
+
// Buffer hasn't been canceled, lets clear the listener.
|
|
16319
|
+
removeListener();
|
|
16312
16320
|
if (loaded) {
|
|
16313
|
-
_context4.next =
|
|
16321
|
+
_context4.next = 20;
|
|
16314
16322
|
break;
|
|
16315
16323
|
}
|
|
16316
16324
|
this._emitAMREvent('AMRLoadVideoBuffer', 'Failure');
|
|
16317
16325
|
if (!throwIfBufferFails) {
|
|
16318
|
-
_context4.next =
|
|
16326
|
+
_context4.next = 19;
|
|
16319
16327
|
break;
|
|
16320
16328
|
}
|
|
16321
16329
|
error = (0, _amrVideoBufferError.createAmrVideoBufferError)();
|
|
16322
16330
|
throw error;
|
|
16323
|
-
case
|
|
16331
|
+
case 19:
|
|
16324
16332
|
return _context4.abrupt("return");
|
|
16325
|
-
case
|
|
16333
|
+
case 20:
|
|
16326
16334
|
this._emitAMREvent('AMRLoadVideoBuffer', 'Success');
|
|
16327
16335
|
// We now want to log binding duration to monitor the blink: OPENTOK-46931
|
|
16328
16336
|
// We propagate the event from videoFacade level to Subscriber
|
|
16329
16337
|
this._videoFacadeEvents.on('amrLogEvent', (action, variation, options) => {
|
|
16330
16338
|
this._emitAMREvent(action, variation, options);
|
|
16331
16339
|
});
|
|
16332
|
-
case
|
|
16340
|
+
case 22:
|
|
16333
16341
|
case "end":
|
|
16334
16342
|
return _context4.stop();
|
|
16335
16343
|
}
|
|
16336
16344
|
}, _callee4, this);
|
|
16337
16345
|
}));
|
|
16338
|
-
function _tryToLoadVideoBuffer(_x7, _x8) {
|
|
16346
|
+
function _tryToLoadVideoBuffer(_x7, _x8, _x9) {
|
|
16339
16347
|
return _tryToLoadVideoBuffer2.apply(this, arguments);
|
|
16340
16348
|
}
|
|
16341
16349
|
return _tryToLoadVideoBuffer;
|
|
@@ -16392,7 +16400,7 @@ function WidgetViewFactory(_temp) {
|
|
|
16392
16400
|
break;
|
|
16393
16401
|
}
|
|
16394
16402
|
_context5.next = 13;
|
|
16395
|
-
return this._tryToLoadVideoBuffer(webRTCStream, throwIfBufferFails);
|
|
16403
|
+
return this._tryToLoadVideoBuffer(webRTCStream, cancellation, throwIfBufferFails);
|
|
16396
16404
|
case 13:
|
|
16397
16405
|
_context5.next = 15;
|
|
16398
16406
|
return this._bindToStream(webRTCStream, cancellation);
|
|
@@ -16417,7 +16425,7 @@ function WidgetViewFactory(_temp) {
|
|
|
16417
16425
|
}
|
|
16418
16426
|
}, _callee5, this);
|
|
16419
16427
|
}));
|
|
16420
|
-
function bindVideo(
|
|
16428
|
+
function bindVideo(_x10, _x11, _x12) {
|
|
16421
16429
|
return _bindVideo.apply(this, arguments);
|
|
16422
16430
|
}
|
|
16423
16431
|
return bindVideo;
|
|
@@ -17743,12 +17751,12 @@ function PublisherFactory(_ref) {
|
|
|
17743
17751
|
});
|
|
17744
17752
|
};
|
|
17745
17753
|
const onIceConnectionStateChange = /*#__PURE__*/function () {
|
|
17746
|
-
var _ref10 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
17747
|
-
var isAdaptiveEnabled, sourceStreamId, _yield$getMantisPeerC, _yield$getP2pPeerConn, isMantisConnected, isP2PConnected, isSocketReconnecting, socket, isSocketConnected;
|
|
17748
|
-
return _regenerator.default.wrap(function
|
|
17749
|
-
while (1) switch (
|
|
17754
|
+
var _ref10 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee6(newState, peerConnection) {
|
|
17755
|
+
var _this$session$session, isAdaptiveEnabled, p2pEnabled, sourceStreamId, _yield$getMantisPeerC, _yield$getP2pPeerConn, isMantisConnected, isP2PConnected, isSocketReconnecting, socket, isSocketConnected, destroyStream, _getPeerConnectionMet6, remoteSubscriberId, arePeerConnectionsAlive;
|
|
17756
|
+
return _regenerator.default.wrap(function _callee6$(_context6) {
|
|
17757
|
+
while (1) switch (_context6.prev = _context6.next) {
|
|
17750
17758
|
case 0:
|
|
17751
|
-
|
|
17759
|
+
_this$session$session = _this.session.sessionInfo, isAdaptiveEnabled = _this$session$session.isAdaptiveEnabled, p2pEnabled = _this$session$session.p2pEnabled;
|
|
17752
17760
|
sourceStreamId = peerConnection.getSourceStreamId();
|
|
17753
17761
|
lastIceConnectionStates[sourceStreamId] = newState;
|
|
17754
17762
|
if (newState === 'disconnected') {
|
|
@@ -17767,78 +17775,125 @@ function PublisherFactory(_ref) {
|
|
|
17767
17775
|
}, 2000);
|
|
17768
17776
|
}
|
|
17769
17777
|
if (!(newState === 'connected')) {
|
|
17770
|
-
|
|
17778
|
+
_context6.next = 26;
|
|
17771
17779
|
break;
|
|
17772
17780
|
}
|
|
17773
17781
|
clearTimeout(_streamDestroyTimeout);
|
|
17774
17782
|
if (!isAdaptiveEnabled) {
|
|
17775
|
-
|
|
17783
|
+
_context6.next = 26;
|
|
17776
17784
|
break;
|
|
17777
17785
|
}
|
|
17778
|
-
|
|
17786
|
+
_context6.next = 9;
|
|
17779
17787
|
return getMantisPeerConnection();
|
|
17780
17788
|
case 9:
|
|
17781
|
-
|
|
17782
|
-
if (!(
|
|
17783
|
-
|
|
17789
|
+
_context6.t0 = _yield$getMantisPeerC = _context6.sent;
|
|
17790
|
+
if (!(_context6.t0 == null)) {
|
|
17791
|
+
_context6.next = 14;
|
|
17784
17792
|
break;
|
|
17785
17793
|
}
|
|
17786
|
-
|
|
17787
|
-
|
|
17794
|
+
_context6.t1 = void 0;
|
|
17795
|
+
_context6.next = 15;
|
|
17788
17796
|
break;
|
|
17789
17797
|
case 14:
|
|
17790
|
-
|
|
17798
|
+
_context6.t1 = _yield$getMantisPeerC.iceConnectionStateIsConnected();
|
|
17791
17799
|
case 15:
|
|
17792
|
-
isMantisConnected =
|
|
17793
|
-
|
|
17800
|
+
isMantisConnected = _context6.t1;
|
|
17801
|
+
_context6.next = 18;
|
|
17794
17802
|
return getP2pPeerConnection();
|
|
17795
17803
|
case 18:
|
|
17796
|
-
|
|
17797
|
-
if (!(
|
|
17798
|
-
|
|
17804
|
+
_context6.t2 = _yield$getP2pPeerConn = _context6.sent;
|
|
17805
|
+
if (!(_context6.t2 == null)) {
|
|
17806
|
+
_context6.next = 23;
|
|
17799
17807
|
break;
|
|
17800
17808
|
}
|
|
17801
|
-
|
|
17802
|
-
|
|
17809
|
+
_context6.t3 = void 0;
|
|
17810
|
+
_context6.next = 24;
|
|
17803
17811
|
break;
|
|
17804
17812
|
case 23:
|
|
17805
|
-
|
|
17813
|
+
_context6.t3 = _yield$getP2pPeerConn.iceConnectionStateIsConnected();
|
|
17806
17814
|
case 24:
|
|
17807
|
-
isP2PConnected =
|
|
17815
|
+
isP2PConnected = _context6.t3;
|
|
17808
17816
|
if (isMantisConnected && isP2PConnected) {
|
|
17809
17817
|
_stopSendingRtpToMantis();
|
|
17810
17818
|
}
|
|
17811
17819
|
case 26:
|
|
17812
17820
|
if (!(newState === 'failed')) {
|
|
17813
|
-
|
|
17821
|
+
_context6.next = 34;
|
|
17814
17822
|
break;
|
|
17815
17823
|
}
|
|
17816
17824
|
isSocketReconnecting = _this.session._.isSocketReconnecting;
|
|
17817
17825
|
socket = _this.session._.getSocket();
|
|
17818
17826
|
isSocketConnected = socket.is('connected') && !isSocketReconnecting();
|
|
17819
|
-
if (isSocketConnected) {
|
|
17820
|
-
|
|
17827
|
+
if (!(!isSocketConnected || sourceStreamId !== activeSourceStreamId)) {
|
|
17828
|
+
_context6.next = 32;
|
|
17821
17829
|
break;
|
|
17822
17830
|
}
|
|
17823
|
-
return
|
|
17831
|
+
return _context6.abrupt("return");
|
|
17824
17832
|
case 32:
|
|
17825
|
-
// If PC has failed and the socket is connected we will either transition to Mantis
|
|
17826
|
-
// if adaptive and P2P leg or destroy the publisher in all other cases
|
|
17827
17833
|
// Instead of destroying the publisher straight away, we will destroy it after 5 secs
|
|
17828
17834
|
// in order to avoid a race condition where we just got the socket connected at the
|
|
17829
17835
|
// same moment PC transition to failed
|
|
17830
|
-
|
|
17836
|
+
destroyStream = function destroyStream(arePeerConnectionsAlive) {
|
|
17837
|
+
if (arePeerConnectionsAlive === void 0) {
|
|
17838
|
+
arePeerConnectionsAlive = () => Promise.resolve(false);
|
|
17839
|
+
}
|
|
17840
|
+
_streamDestroyTimeout = setTimeout( /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee4() {
|
|
17841
|
+
var pendingPeerConnections;
|
|
17842
|
+
return _regenerator.default.wrap(function _callee4$(_context4) {
|
|
17843
|
+
while (1) switch (_context4.prev = _context4.next) {
|
|
17844
|
+
case 0:
|
|
17845
|
+
_context4.next = 2;
|
|
17846
|
+
return arePeerConnectionsAlive();
|
|
17847
|
+
case 2:
|
|
17848
|
+
pendingPeerConnections = _context4.sent;
|
|
17849
|
+
if (!pendingPeerConnections) {
|
|
17850
|
+
_this.session._.streamDestroy(_this.streamId, sourceStreamId);
|
|
17851
|
+
}
|
|
17852
|
+
case 4:
|
|
17853
|
+
case "end":
|
|
17854
|
+
return _context4.stop();
|
|
17855
|
+
}
|
|
17856
|
+
}, _callee4);
|
|
17857
|
+
})), STREAM_DESTROY_DELAY);
|
|
17858
|
+
};
|
|
17859
|
+
if (p2pEnabled) {
|
|
17860
|
+
// In P2P destroy the Publisher if there are no subscribers to it.
|
|
17861
|
+
_getPeerConnectionMet6 = getPeerConnectionMeta(peerConnection), remoteSubscriberId = _getPeerConnectionMet6.remoteSubscriberId;
|
|
17862
|
+
_this._removeSubscriber(remoteSubscriberId);
|
|
17863
|
+
arePeerConnectionsAlive = /*#__PURE__*/function () {
|
|
17864
|
+
var _ref12 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee5() {
|
|
17865
|
+
var peerConnections;
|
|
17866
|
+
return _regenerator.default.wrap(function _callee5$(_context5) {
|
|
17867
|
+
while (1) switch (_context5.prev = _context5.next) {
|
|
17868
|
+
case 0:
|
|
17869
|
+
_context5.next = 2;
|
|
17870
|
+
return getAllPeerConnections();
|
|
17871
|
+
case 2:
|
|
17872
|
+
peerConnections = _context5.sent;
|
|
17873
|
+
return _context5.abrupt("return", peerConnections.length !== 0);
|
|
17874
|
+
case 4:
|
|
17875
|
+
case "end":
|
|
17876
|
+
return _context5.stop();
|
|
17877
|
+
}
|
|
17878
|
+
}, _callee5);
|
|
17879
|
+
}));
|
|
17880
|
+
return function arePeerConnectionsAlive() {
|
|
17881
|
+
return _ref12.apply(this, arguments);
|
|
17882
|
+
};
|
|
17883
|
+
}();
|
|
17884
|
+
destroyStream(arePeerConnectionsAlive);
|
|
17885
|
+
} else if (isAdaptiveEnabled && sourceStreamId === 'P2P') {
|
|
17886
|
+
// In adaptive if P2P PC has failed and the socket is connected we will transition to Mantis
|
|
17831
17887
|
_this._.startRelayedToRoutedTransition();
|
|
17832
17888
|
} else {
|
|
17833
|
-
|
|
17834
|
-
|
|
17835
|
-
}, STREAM_DESTROY_DELAY);
|
|
17889
|
+
// If Mantis PC fails, then nothing else to do.
|
|
17890
|
+
destroyStream();
|
|
17836
17891
|
}
|
|
17837
|
-
case
|
|
17892
|
+
case 34:
|
|
17838
17893
|
case "end":
|
|
17839
|
-
return
|
|
17894
|
+
return _context6.stop();
|
|
17840
17895
|
}
|
|
17841
|
-
},
|
|
17896
|
+
}, _callee6);
|
|
17842
17897
|
}));
|
|
17843
17898
|
return function onIceConnectionStateChange(_x4, _x5) {
|
|
17844
17899
|
return _ref10.apply(this, arguments);
|
|
@@ -17888,12 +17943,12 @@ function PublisherFactory(_ref) {
|
|
|
17888
17943
|
* @param {string} configuration.peerConnectionId
|
|
17889
17944
|
* @returns {Promise<Error, PublisherPeerConnection>}
|
|
17890
17945
|
*/
|
|
17891
|
-
const createPeerConnection =
|
|
17892
|
-
let peerConnectionId =
|
|
17893
|
-
send =
|
|
17894
|
-
log =
|
|
17895
|
-
logQoS =
|
|
17896
|
-
sourceStreamId =
|
|
17946
|
+
const createPeerConnection = _ref13 => {
|
|
17947
|
+
let peerConnectionId = _ref13.peerConnectionId,
|
|
17948
|
+
send = _ref13.send,
|
|
17949
|
+
log = _ref13.log,
|
|
17950
|
+
logQoS = _ref13.logQoS,
|
|
17951
|
+
sourceStreamId = _ref13.sourceStreamId;
|
|
17897
17952
|
if (getPeerConnectionById(peerConnectionId)) {
|
|
17898
17953
|
return Promise.reject(new Error('PeerConnection already exists'));
|
|
17899
17954
|
}
|
|
@@ -17910,9 +17965,9 @@ function PublisherFactory(_ref) {
|
|
|
17910
17965
|
capableSimulcastScreenshare: properties.capableSimulcastScreenshare,
|
|
17911
17966
|
scalableVideo: properties.scalableVideo
|
|
17912
17967
|
});
|
|
17913
|
-
peerConnectionsAsync[peerConnectionId] = Promise.all([this.session._.getIceConfig(), this.session._.getVideoCodecsCompatible(webRTCStream)]).then(
|
|
17914
|
-
let iceConfig =
|
|
17915
|
-
videoCodecsCompatible =
|
|
17968
|
+
peerConnectionsAsync[peerConnectionId] = Promise.all([this.session._.getIceConfig(), this.session._.getVideoCodecsCompatible(webRTCStream)]).then(_ref14 => {
|
|
17969
|
+
let iceConfig = _ref14[0],
|
|
17970
|
+
videoCodecsCompatible = _ref14[1];
|
|
17916
17971
|
let pcStream = webRTCStream;
|
|
17917
17972
|
if (!videoCodecsCompatible) {
|
|
17918
17973
|
pcStream = webRTCStream.clone();
|
|
@@ -17964,9 +18019,9 @@ function PublisherFactory(_ref) {
|
|
|
17964
18019
|
});
|
|
17965
18020
|
peerConnection.on({
|
|
17966
18021
|
disconnected: () => onPeerDisconnected(peerConnection),
|
|
17967
|
-
error:
|
|
17968
|
-
let reason =
|
|
17969
|
-
prefix =
|
|
18022
|
+
error: _ref15 => {
|
|
18023
|
+
let reason = _ref15.reason,
|
|
18024
|
+
prefix = _ref15.prefix;
|
|
17970
18025
|
return onPeerConnectionFailure(peerConnection, {
|
|
17971
18026
|
reason,
|
|
17972
18027
|
prefix
|
|
@@ -18147,12 +18202,12 @@ function PublisherFactory(_ref) {
|
|
|
18147
18202
|
}))))).then(pcsAndStats => {
|
|
18148
18203
|
// @todo this publishStartTime is going to be so wrong in P2P
|
|
18149
18204
|
const startTimestamp = publishStartTime ? publishStartTime.getTime() : Date.now();
|
|
18150
|
-
const results = pcsAndStats.map(
|
|
18151
|
-
let pc =
|
|
18152
|
-
stats =
|
|
18153
|
-
const
|
|
18154
|
-
remoteConnectionId =
|
|
18155
|
-
remoteSubscriberId =
|
|
18205
|
+
const results = pcsAndStats.map(_ref16 => {
|
|
18206
|
+
let pc = _ref16.pc,
|
|
18207
|
+
stats = _ref16.stats;
|
|
18208
|
+
const _getPeerConnectionMet7 = getPeerConnectionMeta(pc),
|
|
18209
|
+
remoteConnectionId = _getPeerConnectionMet7.remoteConnectionId,
|
|
18210
|
+
remoteSubscriberId = _getPeerConnectionMet7.remoteSubscriberId;
|
|
18156
18211
|
return (0, _assign.default)(remoteConnectionId.match(/^symphony\./) ? {} : {
|
|
18157
18212
|
subscriberId: remoteSubscriberId,
|
|
18158
18213
|
connectionId: remoteConnectionId
|
|
@@ -18184,18 +18239,18 @@ function PublisherFactory(_ref) {
|
|
|
18184
18239
|
this.session._.streamCreate(streamOptions, completionHandler);
|
|
18185
18240
|
};
|
|
18186
18241
|
const _stopSendingRtpToMantis = /*#__PURE__*/function () {
|
|
18187
|
-
var
|
|
18242
|
+
var _ref17 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee7() {
|
|
18188
18243
|
var peerConnection;
|
|
18189
|
-
return _regenerator.default.wrap(function
|
|
18190
|
-
while (1) switch (
|
|
18244
|
+
return _regenerator.default.wrap(function _callee7$(_context7) {
|
|
18245
|
+
while (1) switch (_context7.prev = _context7.next) {
|
|
18191
18246
|
case 0:
|
|
18192
18247
|
_restartSendingRtpToMantisCalled = false;
|
|
18193
|
-
|
|
18248
|
+
_context7.next = 3;
|
|
18194
18249
|
return getMantisPeerConnection();
|
|
18195
18250
|
case 3:
|
|
18196
|
-
peerConnection =
|
|
18251
|
+
peerConnection = _context7.sent;
|
|
18197
18252
|
if (!peerConnection) {
|
|
18198
|
-
|
|
18253
|
+
_context7.next = 18;
|
|
18199
18254
|
break;
|
|
18200
18255
|
}
|
|
18201
18256
|
_this.trigger('sourceStreamIdChanged', 'P2P');
|
|
@@ -18206,55 +18261,55 @@ function PublisherFactory(_ref) {
|
|
|
18206
18261
|
|
|
18207
18262
|
// We add this delay before stopping media to prevent MANTIS to consider this stream
|
|
18208
18263
|
// as inactive after a reconnection and then destroy it.
|
|
18209
|
-
|
|
18264
|
+
_context7.next = 9;
|
|
18210
18265
|
return (0, _promiseDelay.default)(KEEP_SENDING_MEDIA_AFTER_TRANSITIONED);
|
|
18211
18266
|
case 9:
|
|
18212
18267
|
if (!_restartSendingRtpToMantisCalled) {
|
|
18213
|
-
|
|
18268
|
+
_context7.next = 12;
|
|
18214
18269
|
break;
|
|
18215
18270
|
}
|
|
18216
18271
|
logging.debug('Cancelling stop sending RTP to MANTIS.');
|
|
18217
|
-
return
|
|
18272
|
+
return _context7.abrupt("return");
|
|
18218
18273
|
case 12:
|
|
18219
18274
|
// Audio is muted and video is set to inactive
|
|
18220
18275
|
amrAudioTrackProcessor.muteAudioInPeerConnection(webRTCStream, peerConnection);
|
|
18221
|
-
|
|
18276
|
+
_context7.next = 15;
|
|
18222
18277
|
return peerConnection.changeMediaDirectionToInactive();
|
|
18223
18278
|
case 15:
|
|
18224
18279
|
if (!(OTHelpers.env.isFirefox && OTHelpers.env.version < 96)) {
|
|
18225
|
-
|
|
18280
|
+
_context7.next = 18;
|
|
18226
18281
|
break;
|
|
18227
18282
|
}
|
|
18228
|
-
|
|
18283
|
+
_context7.next = 18;
|
|
18229
18284
|
return _keepSendingRtcpToMantis();
|
|
18230
18285
|
case 18:
|
|
18231
18286
|
case "end":
|
|
18232
|
-
return
|
|
18287
|
+
return _context7.stop();
|
|
18233
18288
|
}
|
|
18234
|
-
},
|
|
18289
|
+
}, _callee7);
|
|
18235
18290
|
}));
|
|
18236
18291
|
return function _stopSendingRtpToMantis() {
|
|
18237
|
-
return
|
|
18292
|
+
return _ref17.apply(this, arguments);
|
|
18238
18293
|
};
|
|
18239
18294
|
}();
|
|
18240
18295
|
const _restartSendingRtpToMantis = /*#__PURE__*/function () {
|
|
18241
|
-
var
|
|
18296
|
+
var _ref18 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee8() {
|
|
18242
18297
|
var peerConnection, _yield$getP2pPeerConn2;
|
|
18243
|
-
return _regenerator.default.wrap(function
|
|
18244
|
-
while (1) switch (
|
|
18298
|
+
return _regenerator.default.wrap(function _callee8$(_context8) {
|
|
18299
|
+
while (1) switch (_context8.prev = _context8.next) {
|
|
18245
18300
|
case 0:
|
|
18246
18301
|
_restartSendingRtpToMantisCalled = true;
|
|
18247
|
-
|
|
18302
|
+
_context8.next = 3;
|
|
18248
18303
|
return getMantisPeerConnection();
|
|
18249
18304
|
case 3:
|
|
18250
|
-
peerConnection =
|
|
18305
|
+
peerConnection = _context8.sent;
|
|
18251
18306
|
if (!peerConnection) {
|
|
18252
|
-
|
|
18307
|
+
_context8.next = 22;
|
|
18253
18308
|
break;
|
|
18254
18309
|
}
|
|
18255
18310
|
// Audio is unmuted and video is set to recvonly
|
|
18256
18311
|
amrAudioTrackProcessor.unmuteAudioInPeerConnection(webRTCStream, peerConnection);
|
|
18257
|
-
|
|
18312
|
+
_context8.next = 8;
|
|
18258
18313
|
return peerConnection.changeMediaDirectionToRecvOnly();
|
|
18259
18314
|
case 8:
|
|
18260
18315
|
if (_keepSendingRtcpToMantisTimeout) {
|
|
@@ -18262,81 +18317,81 @@ function PublisherFactory(_ref) {
|
|
|
18262
18317
|
}
|
|
18263
18318
|
_this.trigger('sourceStreamIdChanged', 'MANTIS');
|
|
18264
18319
|
if (!properties.publisherAudioFallbackEnabled) {
|
|
18265
|
-
|
|
18320
|
+
_context8.next = 22;
|
|
18266
18321
|
break;
|
|
18267
18322
|
}
|
|
18268
|
-
|
|
18269
|
-
|
|
18323
|
+
_context8.t0 = peerConnection;
|
|
18324
|
+
_context8.next = 14;
|
|
18270
18325
|
return getP2pPeerConnection();
|
|
18271
18326
|
case 14:
|
|
18272
|
-
|
|
18273
|
-
if (!(
|
|
18274
|
-
|
|
18327
|
+
_context8.t1 = _yield$getP2pPeerConn2 = _context8.sent;
|
|
18328
|
+
if (!(_context8.t1 == null)) {
|
|
18329
|
+
_context8.next = 19;
|
|
18275
18330
|
break;
|
|
18276
18331
|
}
|
|
18277
|
-
|
|
18278
|
-
|
|
18332
|
+
_context8.t2 = void 0;
|
|
18333
|
+
_context8.next = 20;
|
|
18279
18334
|
break;
|
|
18280
18335
|
case 19:
|
|
18281
|
-
|
|
18336
|
+
_context8.t2 = _yield$getP2pPeerConn2.getAudioFallbackState();
|
|
18282
18337
|
case 20:
|
|
18283
|
-
|
|
18284
|
-
|
|
18338
|
+
_context8.t3 = _context8.t2;
|
|
18339
|
+
_context8.t0.enableCongestionLevelEstimation.call(_context8.t0, _context8.t3);
|
|
18285
18340
|
case 22:
|
|
18286
18341
|
case "end":
|
|
18287
|
-
return
|
|
18342
|
+
return _context8.stop();
|
|
18288
18343
|
}
|
|
18289
|
-
},
|
|
18344
|
+
}, _callee8);
|
|
18290
18345
|
}));
|
|
18291
18346
|
return function _restartSendingRtpToMantis() {
|
|
18292
|
-
return
|
|
18347
|
+
return _ref18.apply(this, arguments);
|
|
18293
18348
|
};
|
|
18294
18349
|
}();
|
|
18295
18350
|
const _keepSendingRtcpToMantis = /*#__PURE__*/function () {
|
|
18296
|
-
var
|
|
18351
|
+
var _ref19 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee10() {
|
|
18297
18352
|
var peerConnection;
|
|
18298
|
-
return _regenerator.default.wrap(function
|
|
18299
|
-
while (1) switch (
|
|
18353
|
+
return _regenerator.default.wrap(function _callee10$(_context10) {
|
|
18354
|
+
while (1) switch (_context10.prev = _context10.next) {
|
|
18300
18355
|
case 0:
|
|
18301
|
-
|
|
18356
|
+
_context10.next = 2;
|
|
18302
18357
|
return getMantisPeerConnection();
|
|
18303
18358
|
case 2:
|
|
18304
|
-
peerConnection =
|
|
18359
|
+
peerConnection = _context10.sent;
|
|
18305
18360
|
if (peerConnection) {
|
|
18306
|
-
_keepSendingRtcpToMantisTimeout = setTimeout( /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
18307
|
-
return _regenerator.default.wrap(function
|
|
18308
|
-
while (1) switch (
|
|
18361
|
+
_keepSendingRtcpToMantisTimeout = setTimeout( /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee9() {
|
|
18362
|
+
return _regenerator.default.wrap(function _callee9$(_context9) {
|
|
18363
|
+
while (1) switch (_context9.prev = _context9.next) {
|
|
18309
18364
|
case 0:
|
|
18310
18365
|
if (!(activeSourceStreamId === 'P2P')) {
|
|
18311
|
-
|
|
18366
|
+
_context9.next = 9;
|
|
18312
18367
|
break;
|
|
18313
18368
|
}
|
|
18314
|
-
|
|
18369
|
+
_context9.next = 3;
|
|
18315
18370
|
return peerConnection.changeMediaDirectionToRecvOnly();
|
|
18316
18371
|
case 3:
|
|
18317
|
-
|
|
18372
|
+
_context9.next = 5;
|
|
18318
18373
|
return (0, _promiseDelay.default)(KEEP_SENDING_MEDIA_TO_KEEP_ALIVE);
|
|
18319
18374
|
case 5:
|
|
18320
|
-
|
|
18375
|
+
_context9.next = 7;
|
|
18321
18376
|
return peerConnection.changeMediaDirectionToInactive();
|
|
18322
18377
|
case 7:
|
|
18323
|
-
|
|
18378
|
+
_context9.next = 9;
|
|
18324
18379
|
return _keepSendingRtcpToMantis();
|
|
18325
18380
|
case 9:
|
|
18326
18381
|
case "end":
|
|
18327
|
-
return
|
|
18382
|
+
return _context9.stop();
|
|
18328
18383
|
}
|
|
18329
|
-
},
|
|
18384
|
+
}, _callee9);
|
|
18330
18385
|
})), KEEP_SENDING_RTCP_DELAY);
|
|
18331
18386
|
}
|
|
18332
18387
|
case 4:
|
|
18333
18388
|
case "end":
|
|
18334
|
-
return
|
|
18389
|
+
return _context10.stop();
|
|
18335
18390
|
}
|
|
18336
|
-
},
|
|
18391
|
+
}, _callee10);
|
|
18337
18392
|
}));
|
|
18338
18393
|
return function _keepSendingRtcpToMantis() {
|
|
18339
|
-
return
|
|
18394
|
+
return _ref19.apply(this, arguments);
|
|
18340
18395
|
};
|
|
18341
18396
|
}();
|
|
18342
18397
|
const _transitionRoutedToRelayed = () => {
|
|
@@ -18372,55 +18427,55 @@ function PublisherFactory(_ref) {
|
|
|
18372
18427
|
});
|
|
18373
18428
|
};
|
|
18374
18429
|
const _transitionRelayedToRouted = /*#__PURE__*/function () {
|
|
18375
|
-
var
|
|
18376
|
-
return _regenerator.default.wrap(function
|
|
18377
|
-
while (1) switch (
|
|
18430
|
+
var _ref21 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee11() {
|
|
18431
|
+
return _regenerator.default.wrap(function _callee11$(_context11) {
|
|
18432
|
+
while (1) switch (_context11.prev = _context11.next) {
|
|
18378
18433
|
case 0:
|
|
18379
18434
|
if (!(activeSourceStreamId !== 'P2P')) {
|
|
18380
|
-
|
|
18435
|
+
_context11.next = 2;
|
|
18381
18436
|
break;
|
|
18382
18437
|
}
|
|
18383
|
-
return
|
|
18438
|
+
return _context11.abrupt("return");
|
|
18384
18439
|
case 2:
|
|
18385
18440
|
logRelayedToRoutedTransition('Attempt');
|
|
18386
18441
|
if (_this.session) {
|
|
18387
|
-
|
|
18442
|
+
_context11.next = 6;
|
|
18388
18443
|
break;
|
|
18389
18444
|
}
|
|
18390
18445
|
logRelayedToRoutedTransition('Failure', {
|
|
18391
18446
|
reason: 'Not connected to the session.'
|
|
18392
18447
|
});
|
|
18393
|
-
return
|
|
18448
|
+
return _context11.abrupt("return");
|
|
18394
18449
|
case 6:
|
|
18395
18450
|
if (_this.streamId) {
|
|
18396
|
-
|
|
18451
|
+
_context11.next = 9;
|
|
18397
18452
|
break;
|
|
18398
18453
|
}
|
|
18399
18454
|
logRelayedToRoutedTransition('Failure', {
|
|
18400
18455
|
reason: 'No streamId available'
|
|
18401
18456
|
});
|
|
18402
|
-
return
|
|
18457
|
+
return _context11.abrupt("return");
|
|
18403
18458
|
case 9:
|
|
18404
|
-
|
|
18459
|
+
_context11.next = 11;
|
|
18405
18460
|
return _restartSendingRtpToMantis();
|
|
18406
18461
|
case 11:
|
|
18407
18462
|
_this.session._.streamDestroy(_this.streamId, 'P2P');
|
|
18408
|
-
|
|
18409
|
-
|
|
18463
|
+
_context11.t0 = _this;
|
|
18464
|
+
_context11.next = 15;
|
|
18410
18465
|
return getP2pPeerConnection();
|
|
18411
18466
|
case 15:
|
|
18412
|
-
|
|
18413
|
-
|
|
18467
|
+
_context11.t1 = _context11.sent;
|
|
18468
|
+
_context11.t0._removePeerConnection.call(_context11.t0, _context11.t1);
|
|
18414
18469
|
logRelayedToRoutedTransition('Success');
|
|
18415
18470
|
_this.trigger('streamDestroyForP2PComplete');
|
|
18416
18471
|
case 19:
|
|
18417
18472
|
case "end":
|
|
18418
|
-
return
|
|
18473
|
+
return _context11.stop();
|
|
18419
18474
|
}
|
|
18420
|
-
},
|
|
18475
|
+
}, _callee11);
|
|
18421
18476
|
}));
|
|
18422
18477
|
return function _transitionRelayedToRouted() {
|
|
18423
|
-
return
|
|
18478
|
+
return _ref21.apply(this, arguments);
|
|
18424
18479
|
};
|
|
18425
18480
|
}();
|
|
18426
18481
|
this.publish = targetElement => {
|
|
@@ -18500,10 +18555,10 @@ function PublisherFactory(_ref) {
|
|
|
18500
18555
|
this.dispatchEvent(event);
|
|
18501
18556
|
});
|
|
18502
18557
|
getUserMedia().catch(userMediaError).then( /*#__PURE__*/function () {
|
|
18503
|
-
var
|
|
18558
|
+
var _ref22 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee12(stream) {
|
|
18504
18559
|
var hasVideoFilter;
|
|
18505
|
-
return _regenerator.default.wrap(function
|
|
18506
|
-
while (1) switch (
|
|
18560
|
+
return _regenerator.default.wrap(function _callee12$(_context12) {
|
|
18561
|
+
while (1) switch (_context12.prev = _context12.next) {
|
|
18507
18562
|
case 0:
|
|
18508
18563
|
// this comes from deviceHelpers.shouldAskForDevices in a round-about way
|
|
18509
18564
|
audioDevices = processedOptions.audioDevices;
|
|
@@ -18515,7 +18570,7 @@ function PublisherFactory(_ref) {
|
|
|
18515
18570
|
// else the wrong device will be returned/nonsensical
|
|
18516
18571
|
currentDeviceId = (0, _getDeviceIdFromStream.default)(stream, videoDevices);
|
|
18517
18572
|
}
|
|
18518
|
-
|
|
18573
|
+
_context12.next = 7;
|
|
18519
18574
|
return onStreamAvailable(stream);
|
|
18520
18575
|
case 7:
|
|
18521
18576
|
if (!properties.publishVideo) {
|
|
@@ -18530,7 +18585,7 @@ function PublisherFactory(_ref) {
|
|
|
18530
18585
|
setCurrentTrackDeviceId(currentDeviceId);
|
|
18531
18586
|
}
|
|
18532
18587
|
}
|
|
18533
|
-
return
|
|
18588
|
+
return _context12.abrupt("return", bindVideo().catch(error => {
|
|
18534
18589
|
if (error instanceof _cancel.CancellationError) {
|
|
18535
18590
|
// If we get a CancellationError, it means something newer tried
|
|
18536
18591
|
// to bindVideo before the old one succeeded, perhaps they called
|
|
@@ -18551,118 +18606,118 @@ function PublisherFactory(_ref) {
|
|
|
18551
18606
|
}));
|
|
18552
18607
|
case 10:
|
|
18553
18608
|
case "end":
|
|
18554
|
-
return
|
|
18609
|
+
return _context12.stop();
|
|
18555
18610
|
}
|
|
18556
|
-
},
|
|
18611
|
+
}, _callee12);
|
|
18557
18612
|
}));
|
|
18558
18613
|
return function (_x6) {
|
|
18559
|
-
return
|
|
18614
|
+
return _ref22.apply(this, arguments);
|
|
18560
18615
|
};
|
|
18561
18616
|
}());
|
|
18562
18617
|
});
|
|
18563
18618
|
return this;
|
|
18564
18619
|
};
|
|
18565
|
-
this._getVideoSenders = /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
18566
|
-
return _regenerator.default.wrap(function
|
|
18567
|
-
while (1) switch (
|
|
18620
|
+
this._getVideoSenders = /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee13() {
|
|
18621
|
+
return _regenerator.default.wrap(function _callee13$(_context13) {
|
|
18622
|
+
while (1) switch (_context13.prev = _context13.next) {
|
|
18568
18623
|
case 0:
|
|
18569
|
-
return
|
|
18570
|
-
let kind =
|
|
18624
|
+
return _context13.abrupt("return", getAllPeerConnections().then(peerConnections => peerConnections[0].getSenders().filter(_ref24 => {
|
|
18625
|
+
let kind = _ref24.track.kind;
|
|
18571
18626
|
return kind === 'video';
|
|
18572
18627
|
})));
|
|
18573
18628
|
case 1:
|
|
18574
18629
|
case "end":
|
|
18575
|
-
return
|
|
18630
|
+
return _context13.stop();
|
|
18576
18631
|
}
|
|
18577
|
-
},
|
|
18632
|
+
}, _callee13);
|
|
18578
18633
|
}));
|
|
18579
18634
|
this._setScalableValues = /*#__PURE__*/function () {
|
|
18580
|
-
var
|
|
18635
|
+
var _ref25 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee14(scalableParam, scalableValues) {
|
|
18581
18636
|
var senders, sender, sendParameters;
|
|
18582
|
-
return _regenerator.default.wrap(function
|
|
18583
|
-
while (1) switch (
|
|
18637
|
+
return _regenerator.default.wrap(function _callee14$(_context14) {
|
|
18638
|
+
while (1) switch (_context14.prev = _context14.next) {
|
|
18584
18639
|
case 0:
|
|
18585
|
-
|
|
18640
|
+
_context14.next = 2;
|
|
18586
18641
|
return _this._getVideoSenders();
|
|
18587
18642
|
case 2:
|
|
18588
|
-
senders =
|
|
18643
|
+
senders = _context14.sent;
|
|
18589
18644
|
sender = senders[0];
|
|
18590
18645
|
sendParameters = sender.getParameters();
|
|
18591
18646
|
sendParameters.encodings.forEach((encoding, index) => {
|
|
18592
18647
|
encoding[scalableParam] = scalableValues[index]; // eslint-disable-line no-param-reassign
|
|
18593
18648
|
});
|
|
18594
|
-
|
|
18649
|
+
_context14.next = 8;
|
|
18595
18650
|
return sender.setParameters(sendParameters);
|
|
18596
18651
|
case 8:
|
|
18597
18652
|
case "end":
|
|
18598
|
-
return
|
|
18653
|
+
return _context14.stop();
|
|
18599
18654
|
}
|
|
18600
|
-
},
|
|
18655
|
+
}, _callee14);
|
|
18601
18656
|
}));
|
|
18602
18657
|
return function (_x7, _x8) {
|
|
18603
|
-
return
|
|
18658
|
+
return _ref25.apply(this, arguments);
|
|
18604
18659
|
};
|
|
18605
18660
|
}();
|
|
18606
18661
|
this._setScalabilityMode = /*#__PURE__*/function () {
|
|
18607
|
-
var
|
|
18608
|
-
return _regenerator.default.wrap(function
|
|
18609
|
-
while (1) switch (
|
|
18662
|
+
var _ref26 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee15(scalabilityMode) {
|
|
18663
|
+
return _regenerator.default.wrap(function _callee15$(_context15) {
|
|
18664
|
+
while (1) switch (_context15.prev = _context15.next) {
|
|
18610
18665
|
case 0:
|
|
18611
|
-
return
|
|
18666
|
+
return _context15.abrupt("return", setScalabilityMode(scalabilityMode, _this));
|
|
18612
18667
|
case 1:
|
|
18613
18668
|
case "end":
|
|
18614
|
-
return
|
|
18669
|
+
return _context15.stop();
|
|
18615
18670
|
}
|
|
18616
|
-
},
|
|
18671
|
+
}, _callee15);
|
|
18617
18672
|
}));
|
|
18618
18673
|
return function (_x9) {
|
|
18619
|
-
return
|
|
18674
|
+
return _ref26.apply(this, arguments);
|
|
18620
18675
|
};
|
|
18621
18676
|
}();
|
|
18622
18677
|
this._setScalableFramerates = /*#__PURE__*/function () {
|
|
18623
|
-
var
|
|
18678
|
+
var _ref27 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee16(frameRates) {
|
|
18624
18679
|
var framerateValues;
|
|
18625
|
-
return _regenerator.default.wrap(function
|
|
18626
|
-
while (1) switch (
|
|
18680
|
+
return _regenerator.default.wrap(function _callee16$(_context16) {
|
|
18681
|
+
while (1) switch (_context16.prev = _context16.next) {
|
|
18627
18682
|
case 0:
|
|
18628
18683
|
framerateValues = normalizeScalableValues(frameRates);
|
|
18629
18684
|
if (!(framerateValues && areValidFramerates(framerateValues))) {
|
|
18630
|
-
|
|
18685
|
+
_context16.next = 4;
|
|
18631
18686
|
break;
|
|
18632
18687
|
}
|
|
18633
|
-
|
|
18688
|
+
_context16.next = 4;
|
|
18634
18689
|
return _this._setScalableValues('maxFramerate', framerateValues);
|
|
18635
18690
|
case 4:
|
|
18636
18691
|
case "end":
|
|
18637
|
-
return
|
|
18692
|
+
return _context16.stop();
|
|
18638
18693
|
}
|
|
18639
|
-
},
|
|
18694
|
+
}, _callee16);
|
|
18640
18695
|
}));
|
|
18641
18696
|
return function (_x10) {
|
|
18642
|
-
return
|
|
18697
|
+
return _ref27.apply(this, arguments);
|
|
18643
18698
|
};
|
|
18644
18699
|
}();
|
|
18645
18700
|
this._setScalableVideoLayers = /*#__PURE__*/function () {
|
|
18646
|
-
var
|
|
18701
|
+
var _ref28 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee17(videoLayers) {
|
|
18647
18702
|
var videoLayerValues;
|
|
18648
|
-
return _regenerator.default.wrap(function
|
|
18649
|
-
while (1) switch (
|
|
18703
|
+
return _regenerator.default.wrap(function _callee17$(_context17) {
|
|
18704
|
+
while (1) switch (_context17.prev = _context17.next) {
|
|
18650
18705
|
case 0:
|
|
18651
18706
|
videoLayerValues = normalizeScalableValues(videoLayers);
|
|
18652
18707
|
if (!(videoLayerValues && areValidResolutionScales(videoLayerValues))) {
|
|
18653
|
-
|
|
18708
|
+
_context17.next = 4;
|
|
18654
18709
|
break;
|
|
18655
18710
|
}
|
|
18656
|
-
|
|
18711
|
+
_context17.next = 4;
|
|
18657
18712
|
return _this._setScalableValues('scaleResolutionDownBy', videoLayerValues);
|
|
18658
18713
|
case 4:
|
|
18659
18714
|
case "end":
|
|
18660
|
-
return
|
|
18715
|
+
return _context17.stop();
|
|
18661
18716
|
}
|
|
18662
|
-
},
|
|
18717
|
+
}, _callee17);
|
|
18663
18718
|
}));
|
|
18664
18719
|
return function (_x11) {
|
|
18665
|
-
return
|
|
18720
|
+
return _ref28.apply(this, arguments);
|
|
18666
18721
|
};
|
|
18667
18722
|
}();
|
|
18668
18723
|
const areValidFramerates = framerates => {
|
|
@@ -18803,20 +18858,20 @@ function PublisherFactory(_ref) {
|
|
|
18803
18858
|
const updateVideo = () => {
|
|
18804
18859
|
const shouldSendVideo = haveWorkingTracks('video') && properties.publishVideo;
|
|
18805
18860
|
if (_env.default.name === 'Chrome' && _env.default.version >= 69) {
|
|
18806
|
-
(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
18861
|
+
(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee18() {
|
|
18807
18862
|
var executionSentinel, peerConnections;
|
|
18808
|
-
return _regenerator.default.wrap(function
|
|
18809
|
-
while (1) switch (
|
|
18863
|
+
return _regenerator.default.wrap(function _callee18$(_context18) {
|
|
18864
|
+
while (1) switch (_context18.prev = _context18.next) {
|
|
18810
18865
|
case 0:
|
|
18811
18866
|
if (updateVideoSenderParametersSentinel) {
|
|
18812
18867
|
updateVideoSenderParametersSentinel.cancel();
|
|
18813
18868
|
}
|
|
18814
18869
|
updateVideoSenderParametersSentinel = new _cancel.default();
|
|
18815
18870
|
executionSentinel = updateVideoSenderParametersSentinel;
|
|
18816
|
-
|
|
18871
|
+
_context18.next = 5;
|
|
18817
18872
|
return getAllPeerConnections();
|
|
18818
18873
|
case 5:
|
|
18819
|
-
peerConnections =
|
|
18874
|
+
peerConnections = _context18.sent;
|
|
18820
18875
|
if (!executionSentinel.isCanceled()) {
|
|
18821
18876
|
// only proceed if we weren't canceled during the async operation above
|
|
18822
18877
|
peerConnections.forEach(peerConnection => {
|
|
@@ -18825,9 +18880,9 @@ function PublisherFactory(_ref) {
|
|
|
18825
18880
|
}
|
|
18826
18881
|
case 7:
|
|
18827
18882
|
case "end":
|
|
18828
|
-
return
|
|
18883
|
+
return _context18.stop();
|
|
18829
18884
|
}
|
|
18830
|
-
},
|
|
18885
|
+
}, _callee18);
|
|
18831
18886
|
}))();
|
|
18832
18887
|
}
|
|
18833
18888
|
if (isCustomVideoTrack && mediaProcessor) {
|
|
@@ -18854,25 +18909,25 @@ function PublisherFactory(_ref) {
|
|
|
18854
18909
|
refreshAudioVideoUI();
|
|
18855
18910
|
};
|
|
18856
18911
|
const destroyMediaProcessor = /*#__PURE__*/function () {
|
|
18857
|
-
var
|
|
18912
|
+
var _ref30 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee19() {
|
|
18858
18913
|
var videoTrack;
|
|
18859
|
-
return _regenerator.default.wrap(function
|
|
18860
|
-
while (1) switch (
|
|
18914
|
+
return _regenerator.default.wrap(function _callee19$(_context19) {
|
|
18915
|
+
while (1) switch (_context19.prev = _context19.next) {
|
|
18861
18916
|
case 0:
|
|
18862
18917
|
// We need to cache this since calling the method below will
|
|
18863
18918
|
// clear this value.
|
|
18864
18919
|
videoTrack = mediaProcessor.getOriginalVideoTrack(); // Note: this needs to be called before the `stop` method below. Reversing
|
|
18865
18920
|
// the order may cause race conditions with the MP worker.
|
|
18866
|
-
|
|
18867
|
-
|
|
18921
|
+
_context19.prev = 1;
|
|
18922
|
+
_context19.next = 4;
|
|
18868
18923
|
return mediaProcessor.destroy();
|
|
18869
18924
|
case 4:
|
|
18870
|
-
|
|
18925
|
+
_context19.next = 9;
|
|
18871
18926
|
break;
|
|
18872
18927
|
case 6:
|
|
18873
|
-
|
|
18874
|
-
|
|
18875
|
-
logging.warn(`Error cleaning up mediaProcessor: ${
|
|
18928
|
+
_context19.prev = 6;
|
|
18929
|
+
_context19.t0 = _context19["catch"](1);
|
|
18930
|
+
logging.warn(`Error cleaning up mediaProcessor: ${_context19.t0}`);
|
|
18876
18931
|
case 9:
|
|
18877
18932
|
// Since no filtering is being applied, we perform some cleanup. We
|
|
18878
18933
|
// stop the original video track here since it's not being used
|
|
@@ -18880,12 +18935,12 @@ function PublisherFactory(_ref) {
|
|
|
18880
18935
|
videoTrack.stop();
|
|
18881
18936
|
case 10:
|
|
18882
18937
|
case "end":
|
|
18883
|
-
return
|
|
18938
|
+
return _context19.stop();
|
|
18884
18939
|
}
|
|
18885
|
-
},
|
|
18940
|
+
}, _callee19, null, [[1, 6]]);
|
|
18886
18941
|
}));
|
|
18887
18942
|
return function destroyMediaProcessor() {
|
|
18888
|
-
return
|
|
18943
|
+
return _ref30.apply(this, arguments);
|
|
18889
18944
|
};
|
|
18890
18945
|
}();
|
|
18891
18946
|
const hasTrackFromDevice = deviceId =>
|
|
@@ -18913,25 +18968,25 @@ function PublisherFactory(_ref) {
|
|
|
18913
18968
|
let currentVideoFilter;
|
|
18914
18969
|
let currentAudioFilter;
|
|
18915
18970
|
this._toggleVideo = (0, _blockCallsUntilComplete.default)( /*#__PURE__*/function () {
|
|
18916
|
-
var
|
|
18971
|
+
var _ref31 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee20(shouldHaveVideo, videoDimensions) {
|
|
18917
18972
|
var _vidDevices$find;
|
|
18918
18973
|
var vidDevices, oldTrack, oldTrackDeviceId, newTrack, videoFilter, originalVideoTrack, _originalVideoTrack;
|
|
18919
|
-
return _regenerator.default.wrap(function
|
|
18920
|
-
while (1) switch (
|
|
18974
|
+
return _regenerator.default.wrap(function _callee20$(_context20) {
|
|
18975
|
+
while (1) switch (_context20.prev = _context20.next) {
|
|
18921
18976
|
case 0:
|
|
18922
18977
|
if (!(isScreenSharing || isCustomVideoTrack || isTrackManuallyStopped)) {
|
|
18923
|
-
|
|
18978
|
+
_context20.next = 2;
|
|
18924
18979
|
break;
|
|
18925
18980
|
}
|
|
18926
|
-
return
|
|
18981
|
+
return _context20.abrupt("return");
|
|
18927
18982
|
case 2:
|
|
18928
|
-
|
|
18983
|
+
_context20.next = 4;
|
|
18929
18984
|
return getVideoDevices();
|
|
18930
18985
|
case 4:
|
|
18931
|
-
vidDevices =
|
|
18986
|
+
vidDevices = _context20.sent;
|
|
18932
18987
|
oldTrack = getCurrentTrack();
|
|
18933
18988
|
if (oldTrack) {
|
|
18934
|
-
|
|
18989
|
+
_context20.next = 8;
|
|
18935
18990
|
break;
|
|
18936
18991
|
}
|
|
18937
18992
|
throw otError(Errors.NOT_SUPPORTED, new Error('Publisher._toggleVideo cannot toggleVideo when you have no video source.'));
|
|
@@ -18939,17 +18994,17 @@ function PublisherFactory(_ref) {
|
|
|
18939
18994
|
// oldTrackDeviceId is undefined when it comes from a canvasTracks, i.e.: it is currently muted
|
|
18940
18995
|
oldTrackDeviceId = (_vidDevices$find = vidDevices.find(device => device.label === oldTrack.label)) == null ? void 0 : _vidDevices$find.deviceId;
|
|
18941
18996
|
if (isNewTrackNeeded(shouldHaveVideo, oldTrackDeviceId)) {
|
|
18942
|
-
|
|
18997
|
+
_context20.next = 11;
|
|
18943
18998
|
break;
|
|
18944
18999
|
}
|
|
18945
|
-
return
|
|
19000
|
+
return _context20.abrupt("return");
|
|
18946
19001
|
case 11:
|
|
18947
19002
|
if (!(oldTrack.readyState === 'ended')) {
|
|
18948
|
-
|
|
19003
|
+
_context20.next = 14;
|
|
18949
19004
|
break;
|
|
18950
19005
|
}
|
|
18951
19006
|
isTrackManuallyStopped = true;
|
|
18952
|
-
return
|
|
19007
|
+
return _context20.abrupt("return");
|
|
18953
19008
|
case 14:
|
|
18954
19009
|
if (shouldHaveVideo && OTHelpers.env.isAndroid && OTHelpers.env.isChrome) {
|
|
18955
19010
|
// On Chrome on Android you need to stop the previous video track OPENTOK-37206
|
|
@@ -18958,17 +19013,17 @@ function PublisherFactory(_ref) {
|
|
|
18958
19013
|
}
|
|
18959
19014
|
}
|
|
18960
19015
|
if (shouldHaveVideo) {
|
|
18961
|
-
|
|
19016
|
+
_context20.next = 31;
|
|
18962
19017
|
break;
|
|
18963
19018
|
}
|
|
18964
|
-
|
|
19019
|
+
_context20.prev = 16;
|
|
18965
19020
|
newTrack = (0, _createCanvasVideoTrack.default)(videoDimensions);
|
|
18966
|
-
|
|
19021
|
+
_context20.next = 23;
|
|
18967
19022
|
break;
|
|
18968
19023
|
case 20:
|
|
18969
|
-
|
|
18970
|
-
|
|
18971
|
-
return
|
|
19024
|
+
_context20.prev = 20;
|
|
19025
|
+
_context20.t0 = _context20["catch"](16);
|
|
19026
|
+
return _context20.abrupt("return");
|
|
18972
19027
|
case 23:
|
|
18973
19028
|
if (oldTrackDeviceId) {
|
|
18974
19029
|
// store the current deviceId to reacquire the video later
|
|
@@ -18977,13 +19032,13 @@ function PublisherFactory(_ref) {
|
|
|
18977
19032
|
setCurrentTrackDeviceId(newTrack.label);
|
|
18978
19033
|
videoFilter = mediaProcessor.getVideoFilter();
|
|
18979
19034
|
if (!videoFilter) {
|
|
18980
|
-
|
|
19035
|
+
_context20.next = 30;
|
|
18981
19036
|
break;
|
|
18982
19037
|
}
|
|
18983
19038
|
// Save the current video filter because we want to make sure it
|
|
18984
19039
|
// gets enabled when the user publishes video again
|
|
18985
19040
|
currentVideoFilter = videoFilter;
|
|
18986
|
-
|
|
19041
|
+
_context20.next = 30;
|
|
18987
19042
|
return destroyMediaProcessor();
|
|
18988
19043
|
case 30:
|
|
18989
19044
|
if (_videoMediaProcessorConnector) {
|
|
@@ -18992,102 +19047,102 @@ function PublisherFactory(_ref) {
|
|
|
18992
19047
|
}
|
|
18993
19048
|
case 31:
|
|
18994
19049
|
if (!(currentDeviceId && vidDevices.findIndex(device => device.deviceId === currentDeviceId) === -1)) {
|
|
18995
|
-
|
|
19050
|
+
_context20.next = 33;
|
|
18996
19051
|
break;
|
|
18997
19052
|
}
|
|
18998
19053
|
throw otError(Errors.NO_DEVICES_FOUND, new Error('Previous device no longer available - deviceId not found'));
|
|
18999
19054
|
case 33:
|
|
19000
19055
|
privateEvents.emit('streamDestroy');
|
|
19001
19056
|
if (!shouldHaveVideo) {
|
|
19002
|
-
|
|
19057
|
+
_context20.next = 64;
|
|
19003
19058
|
break;
|
|
19004
19059
|
}
|
|
19005
19060
|
if (!hasTrackFromDevice(currentDeviceId)) {
|
|
19006
|
-
|
|
19061
|
+
_context20.next = 37;
|
|
19007
19062
|
break;
|
|
19008
19063
|
}
|
|
19009
|
-
return
|
|
19064
|
+
return _context20.abrupt("return");
|
|
19010
19065
|
case 37:
|
|
19011
|
-
|
|
19012
|
-
|
|
19066
|
+
_context20.prev = 37;
|
|
19067
|
+
_context20.next = 40;
|
|
19013
19068
|
return getTrackFromDeviceId(currentDeviceId);
|
|
19014
19069
|
case 40:
|
|
19015
|
-
newTrack =
|
|
19016
|
-
|
|
19070
|
+
newTrack = _context20.sent;
|
|
19071
|
+
_context20.next = 47;
|
|
19017
19072
|
break;
|
|
19018
19073
|
case 43:
|
|
19019
|
-
|
|
19020
|
-
|
|
19021
|
-
logging.error(`Error getting new track for current device(${currentDeviceId}): ${
|
|
19022
|
-
throw
|
|
19074
|
+
_context20.prev = 43;
|
|
19075
|
+
_context20.t1 = _context20["catch"](37);
|
|
19076
|
+
logging.error(`Error getting new track for current device(${currentDeviceId}): ${_context20.t1}`);
|
|
19077
|
+
throw _context20.t1;
|
|
19023
19078
|
case 47:
|
|
19024
19079
|
if (newTrack) {
|
|
19025
|
-
|
|
19080
|
+
_context20.next = 50;
|
|
19026
19081
|
break;
|
|
19027
19082
|
}
|
|
19028
19083
|
logging.error('Failed to enable video. It was not possible to get a new track from the camera');
|
|
19029
|
-
return
|
|
19084
|
+
return _context20.abrupt("return");
|
|
19030
19085
|
case 50:
|
|
19031
19086
|
if (!currentVideoFilter) {
|
|
19032
|
-
|
|
19087
|
+
_context20.next = 59;
|
|
19033
19088
|
break;
|
|
19034
19089
|
}
|
|
19035
19090
|
originalVideoTrack = mediaProcessor.getOriginalVideoTrack();
|
|
19036
|
-
|
|
19091
|
+
_context20.next = 54;
|
|
19037
19092
|
return mediaProcessor.setVideoFilter(currentVideoFilter);
|
|
19038
19093
|
case 54:
|
|
19039
|
-
|
|
19094
|
+
_context20.next = 56;
|
|
19040
19095
|
return mediaProcessor.setMediaStream(webRTCStream);
|
|
19041
19096
|
case 56:
|
|
19042
|
-
|
|
19097
|
+
_context20.next = 58;
|
|
19043
19098
|
return mediaProcessor.setVideoTrack(newTrack);
|
|
19044
19099
|
case 58:
|
|
19045
|
-
newTrack =
|
|
19100
|
+
newTrack = _context20.sent;
|
|
19046
19101
|
case 59:
|
|
19047
19102
|
if (!_videoMediaProcessorConnector) {
|
|
19048
|
-
|
|
19103
|
+
_context20.next = 64;
|
|
19049
19104
|
break;
|
|
19050
19105
|
}
|
|
19051
19106
|
originalVideoTrack = _videoMediaProcessorConnector.originalTrack;
|
|
19052
|
-
|
|
19107
|
+
_context20.next = 63;
|
|
19053
19108
|
return _videoMediaProcessorConnector.setTrack(newTrack);
|
|
19054
19109
|
case 63:
|
|
19055
|
-
newTrack =
|
|
19110
|
+
newTrack = _context20.sent;
|
|
19056
19111
|
case 64:
|
|
19057
|
-
|
|
19058
|
-
|
|
19112
|
+
_context20.prev = 64;
|
|
19113
|
+
_context20.next = 67;
|
|
19059
19114
|
return replaceTrackAndUpdate(oldTrack, newTrack);
|
|
19060
19115
|
case 67:
|
|
19061
19116
|
// We stop the original track as a final step because whatever effects
|
|
19062
19117
|
// were applied to it should remain in effect until the new track is
|
|
19063
19118
|
// set
|
|
19064
19119
|
(_originalVideoTrack = originalVideoTrack) == null ? void 0 : _originalVideoTrack.stop();
|
|
19065
|
-
|
|
19120
|
+
_context20.next = 73;
|
|
19066
19121
|
break;
|
|
19067
19122
|
case 70:
|
|
19068
|
-
|
|
19069
|
-
|
|
19070
|
-
throw
|
|
19123
|
+
_context20.prev = 70;
|
|
19124
|
+
_context20.t2 = _context20["catch"](64);
|
|
19125
|
+
throw _context20.t2;
|
|
19071
19126
|
case 73:
|
|
19072
19127
|
case "end":
|
|
19073
|
-
return
|
|
19128
|
+
return _context20.stop();
|
|
19074
19129
|
}
|
|
19075
|
-
},
|
|
19130
|
+
}, _callee20, null, [[16, 20], [37, 43], [64, 70]]);
|
|
19076
19131
|
}));
|
|
19077
19132
|
return function (_x12, _x13) {
|
|
19078
|
-
return
|
|
19133
|
+
return _ref31.apply(this, arguments);
|
|
19079
19134
|
};
|
|
19080
19135
|
}());
|
|
19081
19136
|
const resetAudioFallbackStateOnPeerConnection = (0, _cancellation.callWithCancellation)( /*#__PURE__*/function () {
|
|
19082
|
-
var
|
|
19137
|
+
var _ref32 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee21(isActive, cancellation) {
|
|
19083
19138
|
var peerConnections;
|
|
19084
|
-
return _regenerator.default.wrap(function
|
|
19085
|
-
while (1) switch (
|
|
19139
|
+
return _regenerator.default.wrap(function _callee21$(_context21) {
|
|
19140
|
+
while (1) switch (_context21.prev = _context21.next) {
|
|
19086
19141
|
case 0:
|
|
19087
|
-
|
|
19142
|
+
_context21.next = 2;
|
|
19088
19143
|
return getAllPeerConnections();
|
|
19089
19144
|
case 2:
|
|
19090
|
-
peerConnections =
|
|
19145
|
+
peerConnections = _context21.sent;
|
|
19091
19146
|
if (!cancellation.isCanceled()) {
|
|
19092
19147
|
peerConnections.forEach(peerConnection => {
|
|
19093
19148
|
if (isActive) {
|
|
@@ -19099,12 +19154,12 @@ function PublisherFactory(_ref) {
|
|
|
19099
19154
|
}
|
|
19100
19155
|
case 4:
|
|
19101
19156
|
case "end":
|
|
19102
|
-
return
|
|
19157
|
+
return _context21.stop();
|
|
19103
19158
|
}
|
|
19104
|
-
},
|
|
19159
|
+
}, _callee21);
|
|
19105
19160
|
}));
|
|
19106
19161
|
return function (_x14, _x15) {
|
|
19107
|
-
return
|
|
19162
|
+
return _ref32.apply(this, arguments);
|
|
19108
19163
|
};
|
|
19109
19164
|
}());
|
|
19110
19165
|
const resetAudioFallbackState = () => {
|
|
@@ -19114,10 +19169,10 @@ function PublisherFactory(_ref) {
|
|
|
19114
19169
|
(_chromeMixin2 = chromeMixin) == null ? void 0 : _chromeMixin2.videoDisabledIndicator.setWarning(false);
|
|
19115
19170
|
};
|
|
19116
19171
|
const onAudioFallbackActiveVideo = /*#__PURE__*/function () {
|
|
19117
|
-
var
|
|
19172
|
+
var _ref33 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee22(previousState) {
|
|
19118
19173
|
var _chromeMixin3, _chromeMixin4;
|
|
19119
|
-
return _regenerator.default.wrap(function
|
|
19120
|
-
while (1) switch (
|
|
19174
|
+
return _regenerator.default.wrap(function _callee22$(_context22) {
|
|
19175
|
+
while (1) switch (_context22.prev = _context22.next) {
|
|
19121
19176
|
case 0:
|
|
19122
19177
|
(_chromeMixin3 = chromeMixin) == null ? void 0 : _chromeMixin3.videoDisabledIndicator.disableVideo(false);
|
|
19123
19178
|
(_chromeMixin4 = chromeMixin) == null ? void 0 : _chromeMixin4.videoDisabledIndicator.setWarning(false);
|
|
@@ -19130,12 +19185,12 @@ function PublisherFactory(_ref) {
|
|
|
19130
19185
|
}
|
|
19131
19186
|
case 3:
|
|
19132
19187
|
case "end":
|
|
19133
|
-
return
|
|
19188
|
+
return _context22.stop();
|
|
19134
19189
|
}
|
|
19135
|
-
},
|
|
19190
|
+
}, _callee22);
|
|
19136
19191
|
}));
|
|
19137
19192
|
return function onAudioFallbackActiveVideo(_x16) {
|
|
19138
|
-
return
|
|
19193
|
+
return _ref33.apply(this, arguments);
|
|
19139
19194
|
};
|
|
19140
19195
|
}();
|
|
19141
19196
|
const onAudioFallbackActiveVideoWithWarning = () => {
|
|
@@ -19146,10 +19201,10 @@ function PublisherFactory(_ref) {
|
|
|
19146
19201
|
this.trigger('videoDisableWarning');
|
|
19147
19202
|
};
|
|
19148
19203
|
const onAudioFallbackSuspendedVideo = /*#__PURE__*/function () {
|
|
19149
|
-
var
|
|
19204
|
+
var _ref34 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee23() {
|
|
19150
19205
|
var _chromeMixin6;
|
|
19151
|
-
return _regenerator.default.wrap(function
|
|
19152
|
-
while (1) switch (
|
|
19206
|
+
return _regenerator.default.wrap(function _callee23$(_context23) {
|
|
19207
|
+
while (1) switch (_context23.prev = _context23.next) {
|
|
19153
19208
|
case 0:
|
|
19154
19209
|
if (properties.publishVideo) {
|
|
19155
19210
|
(_chromeMixin6 = chromeMixin) == null ? void 0 : _chromeMixin6.videoDisabledIndicator.disableVideo(true);
|
|
@@ -19159,12 +19214,12 @@ function PublisherFactory(_ref) {
|
|
|
19159
19214
|
});
|
|
19160
19215
|
case 2:
|
|
19161
19216
|
case "end":
|
|
19162
|
-
return
|
|
19217
|
+
return _context23.stop();
|
|
19163
19218
|
}
|
|
19164
|
-
},
|
|
19219
|
+
}, _callee23);
|
|
19165
19220
|
}));
|
|
19166
19221
|
return function onAudioFallbackSuspendedVideo() {
|
|
19167
|
-
return
|
|
19222
|
+
return _ref34.apply(this, arguments);
|
|
19168
19223
|
};
|
|
19169
19224
|
}();
|
|
19170
19225
|
let audioFallbackCoordinator;
|
|
@@ -19177,9 +19232,9 @@ function PublisherFactory(_ref) {
|
|
|
19177
19232
|
[_audioFallbackVideoStates.default.ACTIVE_VIDEO_WITH_WARNING]: onAudioFallbackActiveVideoWithWarning,
|
|
19178
19233
|
[_audioFallbackVideoStates.default.SUSPENDED_VIDEO]: onAudioFallbackSuspendedVideo
|
|
19179
19234
|
};
|
|
19180
|
-
audioFallbackCoordinator.on('stateChange',
|
|
19181
|
-
let previousState =
|
|
19182
|
-
audioFallbackVideoState =
|
|
19235
|
+
audioFallbackCoordinator.on('stateChange', _ref35 => {
|
|
19236
|
+
let previousState = _ref35.previousState,
|
|
19237
|
+
audioFallbackVideoState = _ref35.state;
|
|
19183
19238
|
try {
|
|
19184
19239
|
audioFallbackStateHandlers[audioFallbackVideoState](previousState);
|
|
19185
19240
|
} catch (err) {
|
|
@@ -19530,42 +19585,42 @@ function PublisherFactory(_ref) {
|
|
|
19530
19585
|
{
|
|
19531
19586
|
let videoIndex = 0;
|
|
19532
19587
|
const cycleVideo = /*#__PURE__*/function () {
|
|
19533
|
-
var
|
|
19588
|
+
var _ref36 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee24() {
|
|
19534
19589
|
var oldTrack, vidDevices, hasOtherVideoDevices, newVideoDevice, deviceId;
|
|
19535
|
-
return _regenerator.default.wrap(function
|
|
19536
|
-
while (1) switch (
|
|
19590
|
+
return _regenerator.default.wrap(function _callee24$(_context24) {
|
|
19591
|
+
while (1) switch (_context24.prev = _context24.next) {
|
|
19537
19592
|
case 0:
|
|
19538
19593
|
if (!(OTHelpers.env.isLegacyEdge || !windowMock.RTCRtpSender || typeof windowMock.RTCRtpSender.prototype.replaceTrack !== 'function')) {
|
|
19539
|
-
|
|
19594
|
+
_context24.next = 2;
|
|
19540
19595
|
break;
|
|
19541
19596
|
}
|
|
19542
19597
|
throw otError(Errors.UNSUPPORTED_BROWSER, new Error('Publisher#cycleVideo is not supported in your browser.'), ExceptionCodes.UNABLE_TO_PUBLISH);
|
|
19543
19598
|
case 2:
|
|
19544
19599
|
if (!(isCustomVideoTrack || isScreenSharing)) {
|
|
19545
|
-
|
|
19600
|
+
_context24.next = 4;
|
|
19546
19601
|
break;
|
|
19547
19602
|
}
|
|
19548
19603
|
throw otError(Errors.NOT_SUPPORTED, new Error('Publisher#cycleVideo: The publisher is not using a camera video source'));
|
|
19549
19604
|
case 4:
|
|
19550
19605
|
oldTrack = getCurrentTrack();
|
|
19551
19606
|
if (oldTrack) {
|
|
19552
|
-
|
|
19607
|
+
_context24.next = 7;
|
|
19553
19608
|
break;
|
|
19554
19609
|
}
|
|
19555
19610
|
throw otError(Errors.NOT_SUPPORTED, new Error('Publisher#cycleVideo cannot cycleVideo when you have no video source.'));
|
|
19556
19611
|
case 7:
|
|
19557
19612
|
videoIndex += 1;
|
|
19558
|
-
|
|
19613
|
+
_context24.next = 10;
|
|
19559
19614
|
return getVideoDevices();
|
|
19560
19615
|
case 10:
|
|
19561
|
-
vidDevices =
|
|
19616
|
+
vidDevices = _context24.sent;
|
|
19562
19617
|
// different devices return the cameras in different orders
|
|
19563
19618
|
hasOtherVideoDevices = vidDevices.filter(device => device.deviceId !== currentDeviceId).length > 0;
|
|
19564
19619
|
if (hasOtherVideoDevices) {
|
|
19565
|
-
|
|
19620
|
+
_context24.next = 14;
|
|
19566
19621
|
break;
|
|
19567
19622
|
}
|
|
19568
|
-
return
|
|
19623
|
+
return _context24.abrupt("return", currentDeviceId);
|
|
19569
19624
|
case 14:
|
|
19570
19625
|
while (vidDevices[videoIndex % vidDevices.length].deviceId === currentDeviceId) {
|
|
19571
19626
|
videoIndex += 1;
|
|
@@ -19573,18 +19628,18 @@ function PublisherFactory(_ref) {
|
|
|
19573
19628
|
privateEvents.emit('streamDestroy');
|
|
19574
19629
|
newVideoDevice = vidDevices[videoIndex % vidDevices.length];
|
|
19575
19630
|
deviceId = newVideoDevice.deviceId;
|
|
19576
|
-
|
|
19631
|
+
_context24.next = 20;
|
|
19577
19632
|
return attemptToSetVideoTrack(deviceId);
|
|
19578
19633
|
case 20:
|
|
19579
|
-
return
|
|
19634
|
+
return _context24.abrupt("return", currentDeviceId);
|
|
19580
19635
|
case 21:
|
|
19581
19636
|
case "end":
|
|
19582
|
-
return
|
|
19637
|
+
return _context24.stop();
|
|
19583
19638
|
}
|
|
19584
|
-
},
|
|
19639
|
+
}, _callee24);
|
|
19585
19640
|
}));
|
|
19586
19641
|
return function cycleVideo() {
|
|
19587
|
-
return
|
|
19642
|
+
return _ref36.apply(this, arguments);
|
|
19588
19643
|
};
|
|
19589
19644
|
}();
|
|
19590
19645
|
|
|
@@ -19629,62 +19684,62 @@ function PublisherFactory(_ref) {
|
|
|
19629
19684
|
*
|
|
19630
19685
|
* @see <a href="#setVideoSource">Publisher.setVideoSource()</a>
|
|
19631
19686
|
*/
|
|
19632
|
-
this.cycleVideo = (0, _blockCallsUntilComplete.default)( /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
19687
|
+
this.cycleVideo = (0, _blockCallsUntilComplete.default)( /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee25() {
|
|
19633
19688
|
var deviceId;
|
|
19634
|
-
return _regenerator.default.wrap(function
|
|
19635
|
-
while (1) switch (
|
|
19689
|
+
return _regenerator.default.wrap(function _callee25$(_context25) {
|
|
19690
|
+
while (1) switch (_context25.prev = _context25.next) {
|
|
19636
19691
|
case 0:
|
|
19637
|
-
|
|
19638
|
-
|
|
19692
|
+
_context25.prev = 0;
|
|
19693
|
+
_context25.next = 3;
|
|
19639
19694
|
return cycleVideo();
|
|
19640
19695
|
case 3:
|
|
19641
|
-
deviceId =
|
|
19642
|
-
|
|
19696
|
+
deviceId = _context25.sent;
|
|
19697
|
+
_context25.next = 10;
|
|
19643
19698
|
break;
|
|
19644
19699
|
case 6:
|
|
19645
|
-
|
|
19646
|
-
|
|
19647
|
-
logging.error(`Publisher#cycleVideo: could not cycle video: ${
|
|
19648
|
-
throw
|
|
19700
|
+
_context25.prev = 6;
|
|
19701
|
+
_context25.t0 = _context25["catch"](0);
|
|
19702
|
+
logging.error(`Publisher#cycleVideo: could not cycle video: ${_context25.t0}`);
|
|
19703
|
+
throw _context25.t0;
|
|
19649
19704
|
case 10:
|
|
19650
|
-
return
|
|
19705
|
+
return _context25.abrupt("return", {
|
|
19651
19706
|
deviceId
|
|
19652
19707
|
});
|
|
19653
19708
|
case 11:
|
|
19654
19709
|
case "end":
|
|
19655
|
-
return
|
|
19710
|
+
return _context25.stop();
|
|
19656
19711
|
}
|
|
19657
|
-
},
|
|
19712
|
+
}, _callee25, null, [[0, 6]]);
|
|
19658
19713
|
})));
|
|
19659
19714
|
}
|
|
19660
19715
|
const replaceTrackAndUpdate = /*#__PURE__*/function () {
|
|
19661
|
-
var
|
|
19716
|
+
var _ref38 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee27(oldTrack, newTrack) {
|
|
19662
19717
|
var pcs, isNewTrackFiltered, video;
|
|
19663
|
-
return _regenerator.default.wrap(function
|
|
19664
|
-
while (1) switch (
|
|
19718
|
+
return _regenerator.default.wrap(function _callee27$(_context27) {
|
|
19719
|
+
while (1) switch (_context27.prev = _context27.next) {
|
|
19665
19720
|
case 0:
|
|
19666
|
-
|
|
19721
|
+
_context27.next = 2;
|
|
19667
19722
|
return getAllPeerConnections();
|
|
19668
19723
|
case 2:
|
|
19669
|
-
pcs =
|
|
19670
|
-
|
|
19724
|
+
pcs = _context27.sent;
|
|
19725
|
+
_context27.next = 5;
|
|
19671
19726
|
return Promise.all(pcs.map( /*#__PURE__*/function () {
|
|
19672
|
-
var
|
|
19673
|
-
return _regenerator.default.wrap(function
|
|
19674
|
-
while (1) switch (
|
|
19727
|
+
var _ref39 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee26(pc) {
|
|
19728
|
+
return _regenerator.default.wrap(function _callee26$(_context26) {
|
|
19729
|
+
while (1) switch (_context26.prev = _context26.next) {
|
|
19675
19730
|
case 0:
|
|
19676
|
-
|
|
19731
|
+
_context26.next = 2;
|
|
19677
19732
|
return pc.findAndReplaceTrack(oldTrack, newTrack);
|
|
19678
19733
|
case 2:
|
|
19679
19734
|
pc.setP2PMaxBitrate();
|
|
19680
19735
|
case 3:
|
|
19681
19736
|
case "end":
|
|
19682
|
-
return
|
|
19737
|
+
return _context26.stop();
|
|
19683
19738
|
}
|
|
19684
|
-
},
|
|
19739
|
+
}, _callee26);
|
|
19685
19740
|
}));
|
|
19686
19741
|
return function (_x19) {
|
|
19687
|
-
return
|
|
19742
|
+
return _ref39.apply(this, arguments);
|
|
19688
19743
|
};
|
|
19689
19744
|
}()));
|
|
19690
19745
|
case 5:
|
|
@@ -19720,20 +19775,20 @@ function PublisherFactory(_ref) {
|
|
|
19720
19775
|
updateVideo();
|
|
19721
19776
|
case 13:
|
|
19722
19777
|
case "end":
|
|
19723
|
-
return
|
|
19778
|
+
return _context27.stop();
|
|
19724
19779
|
}
|
|
19725
|
-
},
|
|
19780
|
+
}, _callee27);
|
|
19726
19781
|
}));
|
|
19727
19782
|
return function replaceTrackAndUpdate(_x17, _x18) {
|
|
19728
|
-
return
|
|
19783
|
+
return _ref38.apply(this, arguments);
|
|
19729
19784
|
};
|
|
19730
19785
|
}();
|
|
19731
19786
|
const getTrackFromDeviceId = /*#__PURE__*/function () {
|
|
19732
|
-
var
|
|
19787
|
+
var _ref40 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee28(deviceId) {
|
|
19733
19788
|
var _newVideoStream;
|
|
19734
19789
|
var oldTrackDeviceId, newOptions, _processedOptions2, getUserMediaHelper, newVideoStream, _newVideoStream$getVi, track;
|
|
19735
|
-
return _regenerator.default.wrap(function
|
|
19736
|
-
while (1) switch (
|
|
19790
|
+
return _regenerator.default.wrap(function _callee28$(_context28) {
|
|
19791
|
+
while (1) switch (_context28.prev = _context28.next) {
|
|
19737
19792
|
case 0:
|
|
19738
19793
|
oldTrackDeviceId = getCurrentTrackDeviceId();
|
|
19739
19794
|
setCurrentTrackDeviceId(deviceId);
|
|
@@ -19746,32 +19801,32 @@ function PublisherFactory(_ref) {
|
|
|
19746
19801
|
accessDialogClosed: onAccessDialogClosed
|
|
19747
19802
|
});
|
|
19748
19803
|
_processedOptions2 = processedOptions, getUserMediaHelper = _processedOptions2.getUserMedia;
|
|
19749
|
-
|
|
19750
|
-
|
|
19804
|
+
_context28.prev = 8;
|
|
19805
|
+
_context28.next = 11;
|
|
19751
19806
|
return getUserMediaHelper();
|
|
19752
19807
|
case 11:
|
|
19753
|
-
newVideoStream =
|
|
19754
|
-
|
|
19808
|
+
newVideoStream = _context28.sent;
|
|
19809
|
+
_context28.next = 18;
|
|
19755
19810
|
break;
|
|
19756
19811
|
case 14:
|
|
19757
|
-
|
|
19758
|
-
|
|
19759
|
-
logging.error(
|
|
19760
|
-
throw
|
|
19812
|
+
_context28.prev = 14;
|
|
19813
|
+
_context28.t0 = _context28["catch"](8);
|
|
19814
|
+
logging.error(_context28.t0);
|
|
19815
|
+
throw _context28.t0;
|
|
19761
19816
|
case 18:
|
|
19762
19817
|
_newVideoStream$getVi = (_newVideoStream = newVideoStream) == null ? void 0 : _newVideoStream.getVideoTracks(), track = _newVideoStream$getVi[0];
|
|
19763
19818
|
if (!track) {
|
|
19764
19819
|
setCurrentTrackDeviceId(oldTrackDeviceId);
|
|
19765
19820
|
}
|
|
19766
|
-
return
|
|
19821
|
+
return _context28.abrupt("return", track);
|
|
19767
19822
|
case 21:
|
|
19768
19823
|
case "end":
|
|
19769
|
-
return
|
|
19824
|
+
return _context28.stop();
|
|
19770
19825
|
}
|
|
19771
|
-
},
|
|
19826
|
+
}, _callee28, null, [[8, 14]]);
|
|
19772
19827
|
}));
|
|
19773
19828
|
return function getTrackFromDeviceId(_x20) {
|
|
19774
|
-
return
|
|
19829
|
+
return _ref40.apply(this, arguments);
|
|
19775
19830
|
};
|
|
19776
19831
|
}();
|
|
19777
19832
|
const getCurrentTrack = () => {
|
|
@@ -19788,102 +19843,102 @@ function PublisherFactory(_ref) {
|
|
|
19788
19843
|
_currentTrackDeviceId = deviceId;
|
|
19789
19844
|
};
|
|
19790
19845
|
const getVideoDevices = /*#__PURE__*/function () {
|
|
19791
|
-
var
|
|
19846
|
+
var _ref41 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee29() {
|
|
19792
19847
|
var devices, vidDevices;
|
|
19793
|
-
return _regenerator.default.wrap(function
|
|
19794
|
-
while (1) switch (
|
|
19848
|
+
return _regenerator.default.wrap(function _callee29$(_context29) {
|
|
19849
|
+
while (1) switch (_context29.prev = _context29.next) {
|
|
19795
19850
|
case 0:
|
|
19796
|
-
|
|
19851
|
+
_context29.next = 2;
|
|
19797
19852
|
return deviceHelpers.shouldAskForDevices();
|
|
19798
19853
|
case 2:
|
|
19799
|
-
devices =
|
|
19854
|
+
devices = _context29.sent;
|
|
19800
19855
|
vidDevices = devices.videoDevices;
|
|
19801
19856
|
if (!(!devices.video || !vidDevices || !vidDevices.length)) {
|
|
19802
|
-
|
|
19857
|
+
_context29.next = 6;
|
|
19803
19858
|
break;
|
|
19804
19859
|
}
|
|
19805
19860
|
throw otError(Errors.NO_DEVICES_FOUND, new Error('No video devices available'), ExceptionCodes.UNABLE_TO_PUBLISH);
|
|
19806
19861
|
case 6:
|
|
19807
|
-
return
|
|
19862
|
+
return _context29.abrupt("return", vidDevices);
|
|
19808
19863
|
case 7:
|
|
19809
19864
|
case "end":
|
|
19810
|
-
return
|
|
19865
|
+
return _context29.stop();
|
|
19811
19866
|
}
|
|
19812
|
-
},
|
|
19867
|
+
}, _callee29);
|
|
19813
19868
|
}));
|
|
19814
19869
|
return function getVideoDevices() {
|
|
19815
|
-
return
|
|
19870
|
+
return _ref41.apply(this, arguments);
|
|
19816
19871
|
};
|
|
19817
19872
|
}();
|
|
19818
19873
|
const replaceAudioTrackInPeerConnections = /*#__PURE__*/function () {
|
|
19819
|
-
var
|
|
19874
|
+
var _ref42 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee30(oldTrack, newTrack) {
|
|
19820
19875
|
var isAdaptiveEnabled, activePeerConnection, inactivePeerConnection, pcs;
|
|
19821
|
-
return _regenerator.default.wrap(function
|
|
19822
|
-
while (1) switch (
|
|
19876
|
+
return _regenerator.default.wrap(function _callee30$(_context30) {
|
|
19877
|
+
while (1) switch (_context30.prev = _context30.next) {
|
|
19823
19878
|
case 0:
|
|
19824
19879
|
if (_this.session) {
|
|
19825
|
-
|
|
19880
|
+
_context30.next = 2;
|
|
19826
19881
|
break;
|
|
19827
19882
|
}
|
|
19828
|
-
return
|
|
19883
|
+
return _context30.abrupt("return");
|
|
19829
19884
|
case 2:
|
|
19830
19885
|
isAdaptiveEnabled = _this.session.sessionInfo.isAdaptiveEnabled; // If we are in an AMR session and a P2P PC, we need to change the track in the P2P PC
|
|
19831
19886
|
// and check the Mantis PC, if we need to add the new track to the muted Mantis audio tracks and replace it.
|
|
19832
19887
|
// activeSourceStreamId may be undefined if we haven't finished publishing yet
|
|
19833
19888
|
if (!(activeSourceStreamId && isAdaptiveEnabled)) {
|
|
19834
|
-
|
|
19889
|
+
_context30.next = 17;
|
|
19835
19890
|
break;
|
|
19836
19891
|
}
|
|
19837
|
-
|
|
19892
|
+
_context30.next = 6;
|
|
19838
19893
|
return getPeerConnectionBySourceStreamId(activeSourceStreamId);
|
|
19839
19894
|
case 6:
|
|
19840
|
-
activePeerConnection =
|
|
19841
|
-
|
|
19895
|
+
activePeerConnection = _context30.sent;
|
|
19896
|
+
_context30.next = 9;
|
|
19842
19897
|
return activePeerConnection == null ? void 0 : activePeerConnection.findAndReplaceTrack(oldTrack, newTrack);
|
|
19843
19898
|
case 9:
|
|
19844
19899
|
if (!(activeSourceStreamId === 'P2P')) {
|
|
19845
|
-
|
|
19900
|
+
_context30.next = 15;
|
|
19846
19901
|
break;
|
|
19847
19902
|
}
|
|
19848
|
-
|
|
19903
|
+
_context30.next = 12;
|
|
19849
19904
|
return getMantisPeerConnection();
|
|
19850
19905
|
case 12:
|
|
19851
|
-
inactivePeerConnection =
|
|
19852
|
-
|
|
19906
|
+
inactivePeerConnection = _context30.sent;
|
|
19907
|
+
_context30.next = 15;
|
|
19853
19908
|
return amrAudioTrackProcessor.replaceTrackInMutedAudioTracks(inactivePeerConnection, oldTrack, newTrack);
|
|
19854
19909
|
case 15:
|
|
19855
|
-
|
|
19910
|
+
_context30.next = 22;
|
|
19856
19911
|
break;
|
|
19857
19912
|
case 17:
|
|
19858
|
-
|
|
19913
|
+
_context30.next = 19;
|
|
19859
19914
|
return getAllPeerConnections();
|
|
19860
19915
|
case 19:
|
|
19861
|
-
pcs =
|
|
19862
|
-
|
|
19916
|
+
pcs = _context30.sent;
|
|
19917
|
+
_context30.next = 22;
|
|
19863
19918
|
return Promise.all(pcs.map(pc => pc.findAndReplaceTrack(oldTrack, newTrack)));
|
|
19864
19919
|
case 22:
|
|
19865
19920
|
case "end":
|
|
19866
|
-
return
|
|
19921
|
+
return _context30.stop();
|
|
19867
19922
|
}
|
|
19868
|
-
},
|
|
19923
|
+
}, _callee30);
|
|
19869
19924
|
}));
|
|
19870
19925
|
return function replaceAudioTrackInPeerConnections(_x21, _x22) {
|
|
19871
|
-
return
|
|
19926
|
+
return _ref42.apply(this, arguments);
|
|
19872
19927
|
};
|
|
19873
19928
|
}();
|
|
19874
19929
|
const replaceAudioTrack = /*#__PURE__*/function () {
|
|
19875
|
-
var
|
|
19930
|
+
var _ref43 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee31(oldTrack, newTrack) {
|
|
19876
19931
|
var video, isFilteredTrack;
|
|
19877
|
-
return _regenerator.default.wrap(function
|
|
19878
|
-
while (1) switch (
|
|
19932
|
+
return _regenerator.default.wrap(function _callee31$(_context31) {
|
|
19933
|
+
while (1) switch (_context31.prev = _context31.next) {
|
|
19879
19934
|
case 0:
|
|
19880
19935
|
if (!(oldTrack === newTrack)) {
|
|
19881
|
-
|
|
19936
|
+
_context31.next = 2;
|
|
19882
19937
|
break;
|
|
19883
19938
|
}
|
|
19884
|
-
return
|
|
19939
|
+
return _context31.abrupt("return");
|
|
19885
19940
|
case 2:
|
|
19886
|
-
|
|
19941
|
+
_context31.next = 4;
|
|
19887
19942
|
return replaceAudioTrackInPeerConnections(oldTrack, newTrack);
|
|
19888
19943
|
case 4:
|
|
19889
19944
|
if (newTrack) {
|
|
@@ -19925,23 +19980,23 @@ function PublisherFactory(_ref) {
|
|
|
19925
19980
|
refreshAudioVideoUI();
|
|
19926
19981
|
case 14:
|
|
19927
19982
|
case "end":
|
|
19928
|
-
return
|
|
19983
|
+
return _context31.stop();
|
|
19929
19984
|
}
|
|
19930
|
-
},
|
|
19985
|
+
}, _callee31);
|
|
19931
19986
|
}));
|
|
19932
19987
|
return function replaceAudioTrack(_x23, _x24) {
|
|
19933
|
-
return
|
|
19988
|
+
return _ref43.apply(this, arguments);
|
|
19934
19989
|
};
|
|
19935
19990
|
}();
|
|
19936
19991
|
const resetAudioSource = /*#__PURE__*/function () {
|
|
19937
|
-
var
|
|
19992
|
+
var _ref44 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee32(audioTrack) {
|
|
19938
19993
|
var audioDeviceId, newAudioTrack;
|
|
19939
|
-
return _regenerator.default.wrap(function
|
|
19940
|
-
while (1) switch (
|
|
19994
|
+
return _regenerator.default.wrap(function _callee32$(_context32) {
|
|
19995
|
+
while (1) switch (_context32.prev = _context32.next) {
|
|
19941
19996
|
case 0:
|
|
19942
19997
|
audioDeviceId = audioTrack.getSettings().deviceId;
|
|
19943
|
-
|
|
19944
|
-
|
|
19998
|
+
_context32.prev = 1;
|
|
19999
|
+
_context32.next = 4;
|
|
19945
20000
|
return _this.setAudioSource(audioDeviceId);
|
|
19946
20001
|
case 4:
|
|
19947
20002
|
// We need to add the onmute listener to the new audio track.
|
|
@@ -19950,20 +20005,20 @@ function PublisherFactory(_ref) {
|
|
|
19950
20005
|
newAudioTrack.onmute = () => handleBuggedMutedLocalAudioTrack(newAudioTrack);
|
|
19951
20006
|
newAudioTrack.onunmute = () => handleBuggedUnMutedLocalAudioTrack(newAudioTrack);
|
|
19952
20007
|
}
|
|
19953
|
-
|
|
20008
|
+
_context32.next = 11;
|
|
19954
20009
|
break;
|
|
19955
20010
|
case 8:
|
|
19956
|
-
|
|
19957
|
-
|
|
19958
|
-
logging.error(
|
|
20011
|
+
_context32.prev = 8;
|
|
20012
|
+
_context32.t0 = _context32["catch"](1);
|
|
20013
|
+
logging.error(_context32.t0);
|
|
19959
20014
|
case 11:
|
|
19960
20015
|
case "end":
|
|
19961
|
-
return
|
|
20016
|
+
return _context32.stop();
|
|
19962
20017
|
}
|
|
19963
|
-
},
|
|
20018
|
+
}, _callee32, null, [[1, 8]]);
|
|
19964
20019
|
}));
|
|
19965
20020
|
return function resetAudioSource(_x25) {
|
|
19966
|
-
return
|
|
20021
|
+
return _ref44.apply(this, arguments);
|
|
19967
20022
|
};
|
|
19968
20023
|
}();
|
|
19969
20024
|
|
|
@@ -19977,15 +20032,15 @@ function PublisherFactory(_ref) {
|
|
|
19977
20032
|
}
|
|
19978
20033
|
// trigger the handler onVisibilityChange
|
|
19979
20034
|
const visibilityHandler = /*#__PURE__*/function () {
|
|
19980
|
-
var
|
|
19981
|
-
return _regenerator.default.wrap(function
|
|
19982
|
-
while (1) switch (
|
|
20035
|
+
var _ref45 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee33() {
|
|
20036
|
+
return _regenerator.default.wrap(function _callee33$(_context33) {
|
|
20037
|
+
while (1) switch (_context33.prev = _context33.next) {
|
|
19983
20038
|
case 0:
|
|
19984
20039
|
if (document.hidden) {
|
|
19985
|
-
|
|
20040
|
+
_context33.next = 5;
|
|
19986
20041
|
break;
|
|
19987
20042
|
}
|
|
19988
|
-
|
|
20043
|
+
_context33.next = 3;
|
|
19989
20044
|
return resetAudioSource(audioTrack);
|
|
19990
20045
|
case 3:
|
|
19991
20046
|
if (shouldRePublishVideo) {
|
|
@@ -19994,12 +20049,12 @@ function PublisherFactory(_ref) {
|
|
|
19994
20049
|
document.removeEventListener('visibilitychange', visibilityHandler);
|
|
19995
20050
|
case 5:
|
|
19996
20051
|
case "end":
|
|
19997
|
-
return
|
|
20052
|
+
return _context33.stop();
|
|
19998
20053
|
}
|
|
19999
|
-
},
|
|
20054
|
+
}, _callee33);
|
|
20000
20055
|
}));
|
|
20001
20056
|
return function visibilityHandler() {
|
|
20002
|
-
return
|
|
20057
|
+
return _ref45.apply(this, arguments);
|
|
20003
20058
|
};
|
|
20004
20059
|
}();
|
|
20005
20060
|
document.addEventListener('visibilitychange', visibilityHandler);
|
|
@@ -20079,80 +20134,80 @@ function PublisherFactory(_ref) {
|
|
|
20079
20134
|
return cancelPreviousSetAudioSourceSentinel;
|
|
20080
20135
|
};
|
|
20081
20136
|
const setAudioSource = /*#__PURE__*/function () {
|
|
20082
|
-
var
|
|
20137
|
+
var _ref46 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee35(audioSource) {
|
|
20083
20138
|
var currentCancelSentinel, setStreamIfNotCancelled, prevAudioSource, newTrack, newOptions, prevLabel, prevDeviceId, _processedOptions3, getUserMediaHelper, prevOptions, previousDevice, stream;
|
|
20084
|
-
return _regenerator.default.wrap(function
|
|
20085
|
-
while (1) switch (
|
|
20139
|
+
return _regenerator.default.wrap(function _callee35$(_context35) {
|
|
20140
|
+
while (1) switch (_context35.prev = _context35.next) {
|
|
20086
20141
|
case 0:
|
|
20087
20142
|
if (isSetAudioSourceSupported) {
|
|
20088
|
-
|
|
20143
|
+
_context35.next = 2;
|
|
20089
20144
|
break;
|
|
20090
20145
|
}
|
|
20091
20146
|
throw setAudioSourceNotSupportedError();
|
|
20092
20147
|
case 2:
|
|
20093
20148
|
currentCancelSentinel = getSetAudioSourceCancellationSentinel();
|
|
20094
20149
|
setStreamIfNotCancelled = /*#__PURE__*/function () {
|
|
20095
|
-
var
|
|
20096
|
-
return _regenerator.default.wrap(function
|
|
20097
|
-
while (1) switch (
|
|
20150
|
+
var _ref47 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee34(stream) {
|
|
20151
|
+
return _regenerator.default.wrap(function _callee34$(_context34) {
|
|
20152
|
+
while (1) switch (_context34.prev = _context34.next) {
|
|
20098
20153
|
case 0:
|
|
20099
20154
|
if (!currentCancelSentinel.isCanceled()) {
|
|
20100
|
-
|
|
20155
|
+
_context34.next = 3;
|
|
20101
20156
|
break;
|
|
20102
20157
|
}
|
|
20103
20158
|
stream.getTracks(track => track.stop());
|
|
20104
20159
|
throw setAudioSourceCancellationError();
|
|
20105
20160
|
case 3:
|
|
20106
|
-
return
|
|
20161
|
+
return _context34.abrupt("return", setAudioSource(stream.getAudioTracks()[0]));
|
|
20107
20162
|
case 4:
|
|
20108
20163
|
case "end":
|
|
20109
|
-
return
|
|
20164
|
+
return _context34.stop();
|
|
20110
20165
|
}
|
|
20111
|
-
},
|
|
20166
|
+
}, _callee34);
|
|
20112
20167
|
}));
|
|
20113
20168
|
return function setStreamIfNotCancelled(_x27) {
|
|
20114
|
-
return
|
|
20169
|
+
return _ref47.apply(this, arguments);
|
|
20115
20170
|
};
|
|
20116
20171
|
}();
|
|
20117
20172
|
prevAudioSource = _this.getAudioSource();
|
|
20118
20173
|
if (prevAudioSource) {
|
|
20119
|
-
|
|
20174
|
+
_context35.next = 7;
|
|
20120
20175
|
break;
|
|
20121
20176
|
}
|
|
20122
20177
|
throw otError(Errors.NOT_SUPPORTED, new Error('Publisher#setAudioSource cannot add an audio source when you started without one.'));
|
|
20123
20178
|
case 7:
|
|
20124
20179
|
if (!(audioSource instanceof MediaStreamTrack)) {
|
|
20125
|
-
|
|
20180
|
+
_context35.next = 26;
|
|
20126
20181
|
break;
|
|
20127
20182
|
}
|
|
20128
20183
|
if (!currentCancelSentinel.isCanceled()) {
|
|
20129
|
-
|
|
20184
|
+
_context35.next = 10;
|
|
20130
20185
|
break;
|
|
20131
20186
|
}
|
|
20132
20187
|
throw setAudioSourceCancellationError();
|
|
20133
20188
|
case 10:
|
|
20134
20189
|
if (!_audioMediaProcessorConnector) {
|
|
20135
|
-
|
|
20190
|
+
_context35.next = 23;
|
|
20136
20191
|
break;
|
|
20137
20192
|
}
|
|
20138
|
-
|
|
20139
|
-
|
|
20193
|
+
_context35.prev = 11;
|
|
20194
|
+
_context35.next = 14;
|
|
20140
20195
|
return _audioMediaProcessorConnector.setTrack(audioSource);
|
|
20141
20196
|
case 14:
|
|
20142
|
-
newTrack =
|
|
20143
|
-
|
|
20197
|
+
newTrack = _context35.sent;
|
|
20198
|
+
_context35.next = 17;
|
|
20144
20199
|
return replaceAudioTrack(prevAudioSource, newTrack);
|
|
20145
20200
|
case 17:
|
|
20146
|
-
return
|
|
20201
|
+
return _context35.abrupt("return", _context35.sent);
|
|
20147
20202
|
case 20:
|
|
20148
|
-
|
|
20149
|
-
|
|
20150
|
-
logging.error(`Error setting track on audioMediaProcessorConnector: ${
|
|
20203
|
+
_context35.prev = 20;
|
|
20204
|
+
_context35.t0 = _context35["catch"](11);
|
|
20205
|
+
logging.error(`Error setting track on audioMediaProcessorConnector: ${_context35.t0}`);
|
|
20151
20206
|
case 23:
|
|
20152
|
-
return
|
|
20207
|
+
return _context35.abrupt("return", replaceAudioTrack(prevAudioSource, audioSource));
|
|
20153
20208
|
case 26:
|
|
20154
20209
|
if (!(typeof audioSource === 'string')) {
|
|
20155
|
-
|
|
20210
|
+
_context35.next = 73;
|
|
20156
20211
|
break;
|
|
20157
20212
|
}
|
|
20158
20213
|
// Must be a deviceId, call getUserMedia and get the MediaStreamTrack
|
|
@@ -20170,21 +20225,21 @@ function PublisherFactory(_ref) {
|
|
|
20170
20225
|
prevAudioSource.stop();
|
|
20171
20226
|
}
|
|
20172
20227
|
_processedOptions3 = processedOptions, getUserMediaHelper = _processedOptions3.getUserMedia;
|
|
20173
|
-
|
|
20174
|
-
|
|
20175
|
-
|
|
20228
|
+
_context35.prev = 36;
|
|
20229
|
+
_context35.t1 = setStreamIfNotCancelled;
|
|
20230
|
+
_context35.next = 40;
|
|
20176
20231
|
return getUserMediaHelper();
|
|
20177
20232
|
case 40:
|
|
20178
|
-
|
|
20179
|
-
|
|
20180
|
-
return (0,
|
|
20233
|
+
_context35.t2 = _context35.sent;
|
|
20234
|
+
_context35.next = 43;
|
|
20235
|
+
return (0, _context35.t1)(_context35.t2);
|
|
20181
20236
|
case 43:
|
|
20182
|
-
return
|
|
20237
|
+
return _context35.abrupt("return", _context35.sent);
|
|
20183
20238
|
case 46:
|
|
20184
|
-
|
|
20185
|
-
|
|
20239
|
+
_context35.prev = 46;
|
|
20240
|
+
_context35.t3 = _context35["catch"](36);
|
|
20186
20241
|
if (!currentCancelSentinel.isCanceled()) {
|
|
20187
|
-
|
|
20242
|
+
_context35.next = 50;
|
|
20188
20243
|
break;
|
|
20189
20244
|
}
|
|
20190
20245
|
throw setAudioSourceCancellationError();
|
|
@@ -20193,15 +20248,15 @@ function PublisherFactory(_ref) {
|
|
|
20193
20248
|
prevOptions.videoSource = null;
|
|
20194
20249
|
prevOptions.audioSource = prevDeviceId;
|
|
20195
20250
|
if (!(!prevOptions.audioSource && prevLabel)) {
|
|
20196
|
-
|
|
20251
|
+
_context35.next = 60;
|
|
20197
20252
|
break;
|
|
20198
20253
|
}
|
|
20199
|
-
|
|
20254
|
+
_context35.next = 56;
|
|
20200
20255
|
return getInputMediaDevices();
|
|
20201
20256
|
case 56:
|
|
20202
|
-
previousDevice =
|
|
20257
|
+
previousDevice = _context35.sent.find(x => x.label === prevLabel);
|
|
20203
20258
|
if (!currentCancelSentinel.isCanceled()) {
|
|
20204
|
-
|
|
20259
|
+
_context35.next = 59;
|
|
20205
20260
|
break;
|
|
20206
20261
|
}
|
|
20207
20262
|
throw setAudioSourceCancellationError();
|
|
@@ -20211,39 +20266,39 @@ function PublisherFactory(_ref) {
|
|
|
20211
20266
|
}
|
|
20212
20267
|
case 60:
|
|
20213
20268
|
if (prevOptions.audioSource) {
|
|
20214
|
-
|
|
20269
|
+
_context35.next = 63;
|
|
20215
20270
|
break;
|
|
20216
20271
|
}
|
|
20217
|
-
|
|
20218
|
-
throw otError(Errors.NOT_FOUND,
|
|
20272
|
+
_context35.t3.message += ' (could not determine previous audio device)';
|
|
20273
|
+
throw otError(Errors.NOT_FOUND, _context35.t3);
|
|
20219
20274
|
case 63:
|
|
20220
20275
|
processedOptions = processPubOptions(prevOptions, 'OT.Publisher.setAudioSource', () => currentCancelSentinel.isCanceled() || state && state.isDestroyed());
|
|
20221
|
-
|
|
20276
|
+
_context35.next = 66;
|
|
20222
20277
|
return processedOptions.getUserMedia().catch(error => {
|
|
20223
20278
|
// eslint-disable-next-line no-param-reassign
|
|
20224
20279
|
error.message += ' (could not obtain previous audio device)';
|
|
20225
20280
|
throw error;
|
|
20226
20281
|
});
|
|
20227
20282
|
case 66:
|
|
20228
|
-
stream =
|
|
20229
|
-
|
|
20283
|
+
stream = _context35.sent;
|
|
20284
|
+
_context35.next = 69;
|
|
20230
20285
|
return setStreamIfNotCancelled(stream);
|
|
20231
20286
|
case 69:
|
|
20232
|
-
|
|
20233
|
-
throw
|
|
20287
|
+
_context35.t3.message += ' (reverted to previous audio device)';
|
|
20288
|
+
throw _context35.t3;
|
|
20234
20289
|
case 71:
|
|
20235
|
-
|
|
20290
|
+
_context35.next = 74;
|
|
20236
20291
|
break;
|
|
20237
20292
|
case 73:
|
|
20238
20293
|
throw otError(Errors.INVALID_PARAMETER, new Error('Invalid parameter passed to OT.Publisher.setAudioSource(). Expected string or MediaStreamTrack.'));
|
|
20239
20294
|
case 74:
|
|
20240
20295
|
case "end":
|
|
20241
|
-
return
|
|
20296
|
+
return _context35.stop();
|
|
20242
20297
|
}
|
|
20243
|
-
},
|
|
20298
|
+
}, _callee35, null, [[11, 20], [36, 46]]);
|
|
20244
20299
|
}));
|
|
20245
20300
|
return function setAudioSource(_x26) {
|
|
20246
|
-
return
|
|
20301
|
+
return _ref46.apply(this, arguments);
|
|
20247
20302
|
};
|
|
20248
20303
|
}();
|
|
20249
20304
|
this.setAudioSource = setAudioSource;
|
|
@@ -20297,21 +20352,21 @@ function PublisherFactory(_ref) {
|
|
|
20297
20352
|
* completes successfully. If there is an error, the promise is rejected.
|
|
20298
20353
|
*/
|
|
20299
20354
|
const setVideoSource = /*#__PURE__*/function () {
|
|
20300
|
-
var _setVideoSource = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
20355
|
+
var _setVideoSource = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee36(videoSourceId) {
|
|
20301
20356
|
var invalidVideoSourceOtError, setVideoSourceOtError, isAudioOnly, deviceList, isValidVideoDeviceId;
|
|
20302
|
-
return _regenerator.default.wrap(function
|
|
20303
|
-
while (1) switch (
|
|
20357
|
+
return _regenerator.default.wrap(function _callee36$(_context36) {
|
|
20358
|
+
while (1) switch (_context36.prev = _context36.next) {
|
|
20304
20359
|
case 0:
|
|
20305
20360
|
invalidVideoSourceOtError = otError(Errors.INVALID_VIDEO_SOURCE, new Error('Invalid video source. Video source must be a valid video input deviceId'), 1041);
|
|
20306
20361
|
setVideoSourceOtError = otError(Errors.SET_VIDEO_SOURCE_FAILURE, new Error('You cannot reset the video source on a publisher that does not currently use a camera source.'), 1040);
|
|
20307
20362
|
if (!(OTHelpers.env.isLegacyEdge || !windowMock.RTCRtpSender || typeof windowMock.RTCRtpSender.prototype.replaceTrack !== 'function')) {
|
|
20308
|
-
|
|
20363
|
+
_context36.next = 4;
|
|
20309
20364
|
break;
|
|
20310
20365
|
}
|
|
20311
20366
|
throw otError(Errors.UNSUPPORTED_BROWSER, new Error('setVideoSource is not supported in your browser.'), ExceptionCodes.UNABLE_TO_PUBLISH);
|
|
20312
20367
|
case 4:
|
|
20313
20368
|
if (!(typeof videoSourceId !== 'string')) {
|
|
20314
|
-
|
|
20369
|
+
_context36.next = 6;
|
|
20315
20370
|
break;
|
|
20316
20371
|
}
|
|
20317
20372
|
throw invalidVideoSourceOtError;
|
|
@@ -20319,29 +20374,29 @@ function PublisherFactory(_ref) {
|
|
|
20319
20374
|
// we can't use hasVideo because that only checks if the video is
|
|
20320
20375
|
isAudioOnly = !webRTCStream || webRTCStream.getVideoTracks().length === 0;
|
|
20321
20376
|
if (!(isCustomVideoTrack || isScreenSharing || isAudioOnly)) {
|
|
20322
|
-
|
|
20377
|
+
_context36.next = 9;
|
|
20323
20378
|
break;
|
|
20324
20379
|
}
|
|
20325
20380
|
throw setVideoSourceOtError;
|
|
20326
20381
|
case 9:
|
|
20327
|
-
|
|
20382
|
+
_context36.next = 11;
|
|
20328
20383
|
return getInputMediaDevices();
|
|
20329
20384
|
case 11:
|
|
20330
|
-
deviceList =
|
|
20385
|
+
deviceList = _context36.sent;
|
|
20331
20386
|
isValidVideoDeviceId = deviceList.find(device => device.kind === 'videoInput' && device.deviceId === videoSourceId);
|
|
20332
20387
|
if (isValidVideoDeviceId) {
|
|
20333
|
-
|
|
20388
|
+
_context36.next = 15;
|
|
20334
20389
|
break;
|
|
20335
20390
|
}
|
|
20336
20391
|
throw invalidVideoSourceOtError;
|
|
20337
20392
|
case 15:
|
|
20338
|
-
|
|
20393
|
+
_context36.next = 17;
|
|
20339
20394
|
return attemptToSetVideoTrack(videoSourceId);
|
|
20340
20395
|
case 17:
|
|
20341
20396
|
case "end":
|
|
20342
|
-
return
|
|
20397
|
+
return _context36.stop();
|
|
20343
20398
|
}
|
|
20344
|
-
},
|
|
20399
|
+
}, _callee36);
|
|
20345
20400
|
}));
|
|
20346
20401
|
function setVideoSource(_x28) {
|
|
20347
20402
|
return _setVideoSource.apply(this, arguments);
|
|
@@ -20350,39 +20405,39 @@ function PublisherFactory(_ref) {
|
|
|
20350
20405
|
}();
|
|
20351
20406
|
this.setVideoSource = setVideoSource;
|
|
20352
20407
|
const attemptToSetVideoTrack = /*#__PURE__*/function () {
|
|
20353
|
-
var
|
|
20408
|
+
var _ref48 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee37(newVideoDeviceId) {
|
|
20354
20409
|
var _originalVideoTrack2;
|
|
20355
20410
|
var oldDeviceID, oldTrack, videoFilterToReapplyOnAndroid, newVideoTrack, originalVideoTrack;
|
|
20356
|
-
return _regenerator.default.wrap(function
|
|
20357
|
-
while (1) switch (
|
|
20411
|
+
return _regenerator.default.wrap(function _callee37$(_context37) {
|
|
20412
|
+
while (1) switch (_context37.prev = _context37.next) {
|
|
20358
20413
|
case 0:
|
|
20359
20414
|
oldDeviceID = currentDeviceId;
|
|
20360
20415
|
currentDeviceId = newVideoDeviceId;
|
|
20361
20416
|
|
|
20362
20417
|
// We shouldn't replace the track unless the video is on
|
|
20363
20418
|
if (properties.publishVideo) {
|
|
20364
|
-
|
|
20419
|
+
_context37.next = 4;
|
|
20365
20420
|
break;
|
|
20366
20421
|
}
|
|
20367
|
-
return
|
|
20422
|
+
return _context37.abrupt("return");
|
|
20368
20423
|
case 4:
|
|
20369
20424
|
if (!hasTrackFromDevice(newVideoDeviceId)) {
|
|
20370
|
-
|
|
20425
|
+
_context37.next = 6;
|
|
20371
20426
|
break;
|
|
20372
20427
|
}
|
|
20373
|
-
return
|
|
20428
|
+
return _context37.abrupt("return");
|
|
20374
20429
|
case 6:
|
|
20375
20430
|
oldTrack = getCurrentTrack();
|
|
20376
20431
|
if (!(properties.publishVideo && OTHelpers.env.isAndroid && (OTHelpers.env.isChrome || OTHelpers.env.isFirefox))) {
|
|
20377
|
-
|
|
20432
|
+
_context37.next = 14;
|
|
20378
20433
|
break;
|
|
20379
20434
|
}
|
|
20380
20435
|
if (!currentVideoFilter) {
|
|
20381
|
-
|
|
20436
|
+
_context37.next = 13;
|
|
20382
20437
|
break;
|
|
20383
20438
|
}
|
|
20384
20439
|
videoFilterToReapplyOnAndroid = currentVideoFilter;
|
|
20385
|
-
|
|
20440
|
+
_context37.next = 12;
|
|
20386
20441
|
return destroyMediaProcessor();
|
|
20387
20442
|
case 12:
|
|
20388
20443
|
currentVideoFilter = null;
|
|
@@ -20391,55 +20446,55 @@ function PublisherFactory(_ref) {
|
|
|
20391
20446
|
oldTrack.stop();
|
|
20392
20447
|
}
|
|
20393
20448
|
case 14:
|
|
20394
|
-
|
|
20395
|
-
|
|
20449
|
+
_context37.prev = 14;
|
|
20450
|
+
_context37.next = 17;
|
|
20396
20451
|
return getTrackFromDeviceId(newVideoDeviceId);
|
|
20397
20452
|
case 17:
|
|
20398
|
-
newVideoTrack =
|
|
20399
|
-
|
|
20453
|
+
newVideoTrack = _context37.sent;
|
|
20454
|
+
_context37.next = 25;
|
|
20400
20455
|
break;
|
|
20401
20456
|
case 20:
|
|
20402
|
-
|
|
20403
|
-
|
|
20457
|
+
_context37.prev = 20;
|
|
20458
|
+
_context37.t0 = _context37["catch"](14);
|
|
20404
20459
|
currentDeviceId = oldDeviceID;
|
|
20405
|
-
logging.error(
|
|
20406
|
-
throw
|
|
20460
|
+
logging.error(_context37.t0);
|
|
20461
|
+
throw _context37.t0;
|
|
20407
20462
|
case 25:
|
|
20408
20463
|
if (newVideoTrack) {
|
|
20409
|
-
|
|
20464
|
+
_context37.next = 28;
|
|
20410
20465
|
break;
|
|
20411
20466
|
}
|
|
20412
20467
|
logging.warn('Unable to aquire video track. Moving to next device.');
|
|
20413
|
-
return
|
|
20468
|
+
return _context37.abrupt("return");
|
|
20414
20469
|
case 28:
|
|
20415
20470
|
if (!currentVideoFilter) {
|
|
20416
|
-
|
|
20471
|
+
_context37.next = 33;
|
|
20417
20472
|
break;
|
|
20418
20473
|
}
|
|
20419
20474
|
originalVideoTrack = mediaProcessor.getOriginalVideoTrack();
|
|
20420
|
-
|
|
20475
|
+
_context37.next = 32;
|
|
20421
20476
|
return mediaProcessor.setVideoTrack(newVideoTrack);
|
|
20422
20477
|
case 32:
|
|
20423
|
-
newVideoTrack =
|
|
20478
|
+
newVideoTrack = _context37.sent;
|
|
20424
20479
|
case 33:
|
|
20425
20480
|
if (!_videoMediaProcessorConnector) {
|
|
20426
|
-
|
|
20481
|
+
_context37.next = 38;
|
|
20427
20482
|
break;
|
|
20428
20483
|
}
|
|
20429
20484
|
originalVideoTrack = _videoMediaProcessorConnector.originalTrack;
|
|
20430
|
-
|
|
20485
|
+
_context37.next = 37;
|
|
20431
20486
|
return _videoMediaProcessorConnector.setTrack(newVideoTrack);
|
|
20432
20487
|
case 37:
|
|
20433
|
-
newVideoTrack =
|
|
20488
|
+
newVideoTrack = _context37.sent;
|
|
20434
20489
|
case 38:
|
|
20435
|
-
|
|
20490
|
+
_context37.next = 40;
|
|
20436
20491
|
return replaceTrackAndUpdate(oldTrack, newVideoTrack);
|
|
20437
20492
|
case 40:
|
|
20438
20493
|
if (!videoFilterToReapplyOnAndroid) {
|
|
20439
|
-
|
|
20494
|
+
_context37.next = 43;
|
|
20440
20495
|
break;
|
|
20441
20496
|
}
|
|
20442
|
-
|
|
20497
|
+
_context37.next = 43;
|
|
20443
20498
|
return _this.applyVideoFilter(videoFilterToReapplyOnAndroid);
|
|
20444
20499
|
case 43:
|
|
20445
20500
|
// We stop the original track as a final step because whatever effects
|
|
@@ -20451,12 +20506,12 @@ function PublisherFactory(_ref) {
|
|
|
20451
20506
|
}
|
|
20452
20507
|
case 45:
|
|
20453
20508
|
case "end":
|
|
20454
|
-
return
|
|
20509
|
+
return _context37.stop();
|
|
20455
20510
|
}
|
|
20456
|
-
},
|
|
20511
|
+
}, _callee37, null, [[14, 20]]);
|
|
20457
20512
|
}));
|
|
20458
20513
|
return function attemptToSetVideoTrack(_x29) {
|
|
20459
|
-
return
|
|
20514
|
+
return _ref48.apply(this, arguments);
|
|
20460
20515
|
};
|
|
20461
20516
|
}();
|
|
20462
20517
|
|
|
@@ -20506,21 +20561,21 @@ function PublisherFactory(_ref) {
|
|
|
20506
20561
|
this._ = {
|
|
20507
20562
|
privateEvents,
|
|
20508
20563
|
setIceConfig(newIceConfig) {
|
|
20509
|
-
return (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
20564
|
+
return (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee38() {
|
|
20510
20565
|
var pcs;
|
|
20511
|
-
return _regenerator.default.wrap(function
|
|
20512
|
-
while (1) switch (
|
|
20566
|
+
return _regenerator.default.wrap(function _callee38$(_context38) {
|
|
20567
|
+
while (1) switch (_context38.prev = _context38.next) {
|
|
20513
20568
|
case 0:
|
|
20514
|
-
|
|
20569
|
+
_context38.next = 2;
|
|
20515
20570
|
return getAllPeerConnections();
|
|
20516
20571
|
case 2:
|
|
20517
|
-
pcs =
|
|
20572
|
+
pcs = _context38.sent;
|
|
20518
20573
|
pcs.forEach(pc => pc.setIceConfig(newIceConfig));
|
|
20519
20574
|
case 4:
|
|
20520
20575
|
case "end":
|
|
20521
|
-
return
|
|
20576
|
+
return _context38.stop();
|
|
20522
20577
|
}
|
|
20523
|
-
},
|
|
20578
|
+
}, _callee38);
|
|
20524
20579
|
}))();
|
|
20525
20580
|
},
|
|
20526
20581
|
publishToSession: (session, analyticsReplacement) => {
|
|
@@ -20550,8 +20605,8 @@ function PublisherFactory(_ref) {
|
|
|
20550
20605
|
return;
|
|
20551
20606
|
}
|
|
20552
20607
|
this.once('initSuccess', resolve);
|
|
20553
|
-
this.once('destroyed',
|
|
20554
|
-
let reason =
|
|
20608
|
+
this.once('destroyed', _ref49 => {
|
|
20609
|
+
let reason = _ref49.reason;
|
|
20555
20610
|
let reasonDescription = '';
|
|
20556
20611
|
if (reason) {
|
|
20557
20612
|
reasonDescription = ` Reason: ${reason}`;
|
|
@@ -20758,6 +20813,7 @@ function PublisherFactory(_ref) {
|
|
|
20758
20813
|
const err = new Error(createErrorFromReason());
|
|
20759
20814
|
this.trigger('publishComplete', otError(reason === 'mediaStopped' ? Errors.MEDIA_ENDED : Errors.CANCEL, err));
|
|
20760
20815
|
}
|
|
20816
|
+
this.session = null;
|
|
20761
20817
|
logAnalyticsEvent('unpublish', 'Success');
|
|
20762
20818
|
this._.streamDestroyed(reason);
|
|
20763
20819
|
return this;
|
|
@@ -20781,6 +20837,7 @@ function PublisherFactory(_ref) {
|
|
|
20781
20837
|
if (!event.isDefaultPrevented()) {
|
|
20782
20838
|
this.destroy();
|
|
20783
20839
|
}
|
|
20840
|
+
this.streamId = null;
|
|
20784
20841
|
},
|
|
20785
20842
|
archivingStatus(status) {
|
|
20786
20843
|
if (chromeMixin) {
|
|
@@ -20792,55 +20849,55 @@ function PublisherFactory(_ref) {
|
|
|
20792
20849
|
return webRTCStream;
|
|
20793
20850
|
},
|
|
20794
20851
|
switchTracks() {
|
|
20795
|
-
return (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
20852
|
+
return (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee39() {
|
|
20796
20853
|
var stream;
|
|
20797
|
-
return _regenerator.default.wrap(function
|
|
20798
|
-
while (1) switch (
|
|
20854
|
+
return _regenerator.default.wrap(function _callee39$(_context39) {
|
|
20855
|
+
while (1) switch (_context39.prev = _context39.next) {
|
|
20799
20856
|
case 0:
|
|
20800
|
-
|
|
20801
|
-
|
|
20857
|
+
_context39.prev = 0;
|
|
20858
|
+
_context39.next = 3;
|
|
20802
20859
|
return getUserMedia().catch(userMediaError);
|
|
20803
20860
|
case 3:
|
|
20804
|
-
stream =
|
|
20805
|
-
|
|
20861
|
+
stream = _context39.sent;
|
|
20862
|
+
_context39.next = 10;
|
|
20806
20863
|
break;
|
|
20807
20864
|
case 6:
|
|
20808
|
-
|
|
20809
|
-
|
|
20810
|
-
logging.error(`OT.Publisher.switchTracks failed to getUserMedia: ${
|
|
20811
|
-
throw
|
|
20865
|
+
_context39.prev = 6;
|
|
20866
|
+
_context39.t0 = _context39["catch"](0);
|
|
20867
|
+
logging.error(`OT.Publisher.switchTracks failed to getUserMedia: ${_context39.t0}`);
|
|
20868
|
+
throw _context39.t0;
|
|
20812
20869
|
case 10:
|
|
20813
20870
|
setNewStream(stream);
|
|
20814
|
-
|
|
20871
|
+
_context39.prev = 11;
|
|
20815
20872
|
bindVideo();
|
|
20816
|
-
|
|
20873
|
+
_context39.next = 21;
|
|
20817
20874
|
break;
|
|
20818
20875
|
case 15:
|
|
20819
|
-
|
|
20820
|
-
|
|
20821
|
-
if (!(
|
|
20822
|
-
|
|
20876
|
+
_context39.prev = 15;
|
|
20877
|
+
_context39.t1 = _context39["catch"](11);
|
|
20878
|
+
if (!(_context39.t1 instanceof _cancel.CancellationError)) {
|
|
20879
|
+
_context39.next = 19;
|
|
20823
20880
|
break;
|
|
20824
20881
|
}
|
|
20825
|
-
return
|
|
20882
|
+
return _context39.abrupt("return");
|
|
20826
20883
|
case 19:
|
|
20827
|
-
logging.error('Error while binding video',
|
|
20828
|
-
throw
|
|
20884
|
+
logging.error('Error while binding video', _context39.t1);
|
|
20885
|
+
throw _context39.t1;
|
|
20829
20886
|
case 21:
|
|
20830
|
-
|
|
20887
|
+
_context39.prev = 21;
|
|
20831
20888
|
replaceTracks();
|
|
20832
|
-
|
|
20889
|
+
_context39.next = 29;
|
|
20833
20890
|
break;
|
|
20834
20891
|
case 25:
|
|
20835
|
-
|
|
20836
|
-
|
|
20837
|
-
logging.error('Error replacing tracks',
|
|
20838
|
-
throw
|
|
20892
|
+
_context39.prev = 25;
|
|
20893
|
+
_context39.t2 = _context39["catch"](21);
|
|
20894
|
+
logging.error('Error replacing tracks', _context39.t2);
|
|
20895
|
+
throw _context39.t2;
|
|
20839
20896
|
case 29:
|
|
20840
20897
|
case "end":
|
|
20841
|
-
return
|
|
20898
|
+
return _context39.stop();
|
|
20842
20899
|
}
|
|
20843
|
-
},
|
|
20900
|
+
}, _callee39, null, [[0, 6], [11, 15], [21, 25]]);
|
|
20844
20901
|
}))();
|
|
20845
20902
|
},
|
|
20846
20903
|
getDataChannel(label, getOptions, completion) {
|
|
@@ -20862,8 +20919,8 @@ function PublisherFactory(_ref) {
|
|
|
20862
20919
|
}
|
|
20863
20920
|
getAllPeerConnections().then(peerConnections => {
|
|
20864
20921
|
peerConnections.forEach(peerConnection => {
|
|
20865
|
-
const
|
|
20866
|
-
remoteConnectionId =
|
|
20922
|
+
const _getPeerConnectionMet8 = getPeerConnectionMeta(peerConnection),
|
|
20923
|
+
remoteConnectionId = _getPeerConnectionMet8.remoteConnectionId;
|
|
20867
20924
|
logRepublish('Attempt', {
|
|
20868
20925
|
remoteConnectionId
|
|
20869
20926
|
});
|
|
@@ -20880,27 +20937,27 @@ function PublisherFactory(_ref) {
|
|
|
20880
20937
|
},
|
|
20881
20938
|
demoOnlyCycleVideo: this.cycleVideo,
|
|
20882
20939
|
testOnlyGetFramesEncoded() {
|
|
20883
|
-
return (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
20940
|
+
return (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee40() {
|
|
20884
20941
|
var peerConnections;
|
|
20885
|
-
return _regenerator.default.wrap(function
|
|
20886
|
-
while (1) switch (
|
|
20942
|
+
return _regenerator.default.wrap(function _callee40$(_context40) {
|
|
20943
|
+
while (1) switch (_context40.prev = _context40.next) {
|
|
20887
20944
|
case 0:
|
|
20888
|
-
|
|
20945
|
+
_context40.next = 2;
|
|
20889
20946
|
return getAllPeerConnections();
|
|
20890
20947
|
case 2:
|
|
20891
|
-
peerConnections =
|
|
20948
|
+
peerConnections = _context40.sent;
|
|
20892
20949
|
if (peerConnections.length) {
|
|
20893
|
-
|
|
20950
|
+
_context40.next = 5;
|
|
20894
20951
|
break;
|
|
20895
20952
|
}
|
|
20896
20953
|
throw new Error('No established PeerConnections yet');
|
|
20897
20954
|
case 5:
|
|
20898
|
-
return
|
|
20955
|
+
return _context40.abrupt("return", peerConnections[0]._testOnlyGetFramesEncoded());
|
|
20899
20956
|
case 6:
|
|
20900
20957
|
case "end":
|
|
20901
|
-
return
|
|
20958
|
+
return _context40.stop();
|
|
20902
20959
|
}
|
|
20903
|
-
},
|
|
20960
|
+
}, _callee40);
|
|
20904
20961
|
}))();
|
|
20905
20962
|
},
|
|
20906
20963
|
onStreamAvailable,
|
|
@@ -20929,45 +20986,45 @@ function PublisherFactory(_ref) {
|
|
|
20929
20986
|
}
|
|
20930
20987
|
}.bind(this),
|
|
20931
20988
|
setCongestionLevel: function () {
|
|
20932
|
-
var _setCongestionLevel = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
20989
|
+
var _setCongestionLevel = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee41(level, peerId) {
|
|
20933
20990
|
var pcs;
|
|
20934
|
-
return _regenerator.default.wrap(function
|
|
20935
|
-
while (1) switch (
|
|
20991
|
+
return _regenerator.default.wrap(function _callee41$(_context41) {
|
|
20992
|
+
while (1) switch (_context41.prev = _context41.next) {
|
|
20936
20993
|
case 0:
|
|
20937
20994
|
if (!audioFallbackCoordinator) {
|
|
20938
|
-
|
|
20995
|
+
_context41.next = 15;
|
|
20939
20996
|
break;
|
|
20940
20997
|
}
|
|
20941
20998
|
if (!peerId) {
|
|
20942
|
-
|
|
20999
|
+
_context41.next = 8;
|
|
20943
21000
|
break;
|
|
20944
21001
|
}
|
|
20945
|
-
|
|
21002
|
+
_context41.next = 4;
|
|
20946
21003
|
return getPeerConnectionById(peerId);
|
|
20947
21004
|
case 4:
|
|
20948
|
-
|
|
20949
|
-
|
|
20950
|
-
|
|
21005
|
+
_context41.t1 = _context41.sent;
|
|
21006
|
+
_context41.t0 = [_context41.t1];
|
|
21007
|
+
_context41.next = 11;
|
|
20951
21008
|
break;
|
|
20952
21009
|
case 8:
|
|
20953
|
-
|
|
21010
|
+
_context41.next = 10;
|
|
20954
21011
|
return getAllPeerConnections();
|
|
20955
21012
|
case 10:
|
|
20956
|
-
|
|
21013
|
+
_context41.t0 = _context41.sent;
|
|
20957
21014
|
case 11:
|
|
20958
|
-
pcs =
|
|
21015
|
+
pcs = _context41.t0;
|
|
20959
21016
|
pcs.forEach(pc => {
|
|
20960
21017
|
pc.setCongestionLevel(level);
|
|
20961
21018
|
});
|
|
20962
|
-
|
|
21019
|
+
_context41.next = 16;
|
|
20963
21020
|
break;
|
|
20964
21021
|
case 15:
|
|
20965
21022
|
logging.warn('Audio Fallback is not enabled');
|
|
20966
21023
|
case 16:
|
|
20967
21024
|
case "end":
|
|
20968
|
-
return
|
|
21025
|
+
return _context41.stop();
|
|
20969
21026
|
}
|
|
20970
|
-
},
|
|
21027
|
+
}, _callee41);
|
|
20971
21028
|
}));
|
|
20972
21029
|
function setCongestionLevel(_x30, _x31) {
|
|
20973
21030
|
return _setCongestionLevel.apply(this, arguments);
|
|
@@ -21306,37 +21363,37 @@ function PublisherFactory(_ref) {
|
|
|
21306
21363
|
* If there is an error, the promise is rejected and no new video filter is set.
|
|
21307
21364
|
*/
|
|
21308
21365
|
this.applyVideoFilter = /*#__PURE__*/function () {
|
|
21309
|
-
var
|
|
21366
|
+
var _ref50 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee42(videoFilter) {
|
|
21310
21367
|
var sanitizedVideoFilter, isSupported, message, _webRTCStream$getVide2, originalVideoTrack, filteredVideoTrack;
|
|
21311
|
-
return _regenerator.default.wrap(function
|
|
21312
|
-
while (1) switch (
|
|
21368
|
+
return _regenerator.default.wrap(function _callee42$(_context42) {
|
|
21369
|
+
while (1) switch (_context42.prev = _context42.next) {
|
|
21313
21370
|
case 0:
|
|
21314
21371
|
sanitizedVideoFilter = (0, _sanitizeVideoFilter.default)(videoFilter);
|
|
21315
21372
|
logAnalyticsEvent('applyVideoFilter', 'Attempt', {
|
|
21316
21373
|
videoFilter: sanitizedVideoFilter
|
|
21317
21374
|
});
|
|
21318
|
-
|
|
21375
|
+
_context42.prev = 2;
|
|
21319
21376
|
if (!_videoMediaProcessorConnector) {
|
|
21320
|
-
|
|
21377
|
+
_context42.next = 5;
|
|
21321
21378
|
break;
|
|
21322
21379
|
}
|
|
21323
21380
|
throw otError(Errors.NOT_SUPPORTED, new Error('Cannot apply video filter when videoMediaProcessorConnector is set.'));
|
|
21324
21381
|
case 5:
|
|
21325
21382
|
isSupported = MediaProcessor.isSupported();
|
|
21326
21383
|
if (isSupported) {
|
|
21327
|
-
|
|
21384
|
+
_context42.next = 8;
|
|
21328
21385
|
break;
|
|
21329
21386
|
}
|
|
21330
21387
|
throw otError(Errors.NOT_SUPPORTED, new Error('Browser does not support video filters (Insertable Streams and Worker APIs are required)'));
|
|
21331
21388
|
case 8:
|
|
21332
21389
|
if (mediaProcessor.isValidVideoFilter(videoFilter)) {
|
|
21333
|
-
|
|
21390
|
+
_context42.next = 10;
|
|
21334
21391
|
break;
|
|
21335
21392
|
}
|
|
21336
21393
|
throw otError(Errors.INVALID_PARAMETER, new Error('Video filter has invalid configuration'));
|
|
21337
21394
|
case 10:
|
|
21338
21395
|
if (webRTCStream) {
|
|
21339
|
-
|
|
21396
|
+
_context42.next = 15;
|
|
21340
21397
|
break;
|
|
21341
21398
|
}
|
|
21342
21399
|
message = 'Ignoring. No mediaStream';
|
|
@@ -21344,53 +21401,53 @@ function PublisherFactory(_ref) {
|
|
|
21344
21401
|
message
|
|
21345
21402
|
});
|
|
21346
21403
|
logging.warn(message);
|
|
21347
|
-
return
|
|
21404
|
+
return _context42.abrupt("return");
|
|
21348
21405
|
case 15:
|
|
21349
21406
|
if (!isScreenSharing) {
|
|
21350
|
-
|
|
21407
|
+
_context42.next = 17;
|
|
21351
21408
|
break;
|
|
21352
21409
|
}
|
|
21353
21410
|
throw otError(Errors.INVALID_PARAMETER, new Error('Video filters can not be applied to screen share'));
|
|
21354
21411
|
case 17:
|
|
21355
21412
|
enableMediaProcessorLogging();
|
|
21356
21413
|
if (properties.publishVideo) {
|
|
21357
|
-
|
|
21414
|
+
_context42.next = 22;
|
|
21358
21415
|
break;
|
|
21359
21416
|
}
|
|
21360
21417
|
currentVideoFilter = videoFilter;
|
|
21361
21418
|
logAnalyticsEvent('applyVideoFilter', 'Success', {
|
|
21362
21419
|
videoFilter: sanitizedVideoFilter
|
|
21363
21420
|
});
|
|
21364
|
-
return
|
|
21421
|
+
return _context42.abrupt("return");
|
|
21365
21422
|
case 22:
|
|
21366
21423
|
if (!mediaProcessor.getVideoFilter()) {
|
|
21367
|
-
|
|
21424
|
+
_context42.next = 37;
|
|
21368
21425
|
break;
|
|
21369
21426
|
}
|
|
21370
21427
|
if (!mediaProcessor.canUpdateVideoFilter(videoFilter.type)) {
|
|
21371
|
-
|
|
21428
|
+
_context42.next = 35;
|
|
21372
21429
|
break;
|
|
21373
21430
|
}
|
|
21374
|
-
|
|
21375
|
-
|
|
21431
|
+
_context42.prev = 24;
|
|
21432
|
+
_context42.next = 27;
|
|
21376
21433
|
return mediaProcessor.updateVideoFilter(videoFilter);
|
|
21377
21434
|
case 27:
|
|
21378
21435
|
currentVideoFilter = videoFilter;
|
|
21379
21436
|
logAnalyticsEvent('applyVideoFilter', 'Success', {
|
|
21380
21437
|
videoFilter: sanitizedVideoFilter
|
|
21381
21438
|
});
|
|
21382
|
-
return
|
|
21439
|
+
return _context42.abrupt("return");
|
|
21383
21440
|
case 32:
|
|
21384
|
-
|
|
21385
|
-
|
|
21386
|
-
logging.warn(`Error updating video filter: ${
|
|
21441
|
+
_context42.prev = 32;
|
|
21442
|
+
_context42.t0 = _context42["catch"](24);
|
|
21443
|
+
logging.warn(`Error updating video filter: ${_context42.t0}`);
|
|
21387
21444
|
case 35:
|
|
21388
|
-
|
|
21445
|
+
_context42.next = 37;
|
|
21389
21446
|
return _this.clearVideoFilter();
|
|
21390
21447
|
case 37:
|
|
21391
21448
|
_webRTCStream$getVide2 = webRTCStream.getVideoTracks(), originalVideoTrack = _webRTCStream$getVide2[0];
|
|
21392
21449
|
if (originalVideoTrack) {
|
|
21393
|
-
|
|
21450
|
+
_context42.next = 43;
|
|
21394
21451
|
break;
|
|
21395
21452
|
}
|
|
21396
21453
|
message = 'Ignoring. No video';
|
|
@@ -21398,32 +21455,32 @@ function PublisherFactory(_ref) {
|
|
|
21398
21455
|
message
|
|
21399
21456
|
});
|
|
21400
21457
|
logging.warn(message);
|
|
21401
|
-
return
|
|
21458
|
+
return _context42.abrupt("return");
|
|
21402
21459
|
case 43:
|
|
21403
|
-
|
|
21460
|
+
_context42.next = 45;
|
|
21404
21461
|
return mediaProcessor.setVideoFilter(videoFilter);
|
|
21405
21462
|
case 45:
|
|
21406
|
-
|
|
21463
|
+
_context42.next = 47;
|
|
21407
21464
|
return mediaProcessor.setMediaStream(webRTCStream);
|
|
21408
21465
|
case 47:
|
|
21409
|
-
filteredVideoTrack =
|
|
21466
|
+
filteredVideoTrack = _context42.sent;
|
|
21410
21467
|
if (!filteredVideoTrack) {
|
|
21411
|
-
|
|
21468
|
+
_context42.next = 51;
|
|
21412
21469
|
break;
|
|
21413
21470
|
}
|
|
21414
|
-
|
|
21471
|
+
_context42.next = 51;
|
|
21415
21472
|
return replaceTrackAndUpdate(originalVideoTrack, filteredVideoTrack);
|
|
21416
21473
|
case 51:
|
|
21417
|
-
|
|
21474
|
+
_context42.next = 58;
|
|
21418
21475
|
break;
|
|
21419
21476
|
case 53:
|
|
21420
|
-
|
|
21421
|
-
|
|
21422
|
-
logging.error(`Error applying video filter: ${
|
|
21477
|
+
_context42.prev = 53;
|
|
21478
|
+
_context42.t1 = _context42["catch"](2);
|
|
21479
|
+
logging.error(`Error applying video filter: ${_context42.t1}`);
|
|
21423
21480
|
logAnalyticsEvent('applyVideoFilter', 'Failure', {
|
|
21424
|
-
message:
|
|
21481
|
+
message: _context42.t1.message
|
|
21425
21482
|
});
|
|
21426
|
-
throw
|
|
21483
|
+
throw _context42.t1;
|
|
21427
21484
|
case 58:
|
|
21428
21485
|
currentVideoFilter = videoFilter;
|
|
21429
21486
|
logAnalyticsEvent('applyVideoFilter', 'Success', {
|
|
@@ -21431,12 +21488,12 @@ function PublisherFactory(_ref) {
|
|
|
21431
21488
|
});
|
|
21432
21489
|
case 60:
|
|
21433
21490
|
case "end":
|
|
21434
|
-
return
|
|
21491
|
+
return _context42.stop();
|
|
21435
21492
|
}
|
|
21436
|
-
},
|
|
21493
|
+
}, _callee42, null, [[2, 53], [24, 32]]);
|
|
21437
21494
|
}));
|
|
21438
21495
|
return function (_x32) {
|
|
21439
|
-
return
|
|
21496
|
+
return _ref50.apply(this, arguments);
|
|
21440
21497
|
};
|
|
21441
21498
|
}();
|
|
21442
21499
|
|
|
@@ -21542,35 +21599,35 @@ function PublisherFactory(_ref) {
|
|
|
21542
21599
|
* If there is an error, the promise is rejected and no new video filter is set.
|
|
21543
21600
|
*/
|
|
21544
21601
|
this.applyAudioFilter = /*#__PURE__*/function () {
|
|
21545
|
-
var
|
|
21602
|
+
var _ref51 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee43(audioFilter) {
|
|
21546
21603
|
var isSupported, message, _NoiseSuppressionConf, assetsDirBaseUrl, vonageNoiseSuppression, noiseSuppressionConnector;
|
|
21547
|
-
return _regenerator.default.wrap(function
|
|
21548
|
-
while (1) switch (
|
|
21604
|
+
return _regenerator.default.wrap(function _callee43$(_context43) {
|
|
21605
|
+
while (1) switch (_context43.prev = _context43.next) {
|
|
21549
21606
|
case 0:
|
|
21550
21607
|
logAnalyticsEvent('applyAudioFilter', 'Attempt', {
|
|
21551
21608
|
audioFilter
|
|
21552
21609
|
});
|
|
21553
21610
|
isSupported = MediaProcessor.isSupported();
|
|
21554
21611
|
if (isSupported) {
|
|
21555
|
-
|
|
21612
|
+
_context43.next = 4;
|
|
21556
21613
|
break;
|
|
21557
21614
|
}
|
|
21558
21615
|
throw otError(Errors.NOT_SUPPORTED, new Error('Browser does not support audio filters (Insertable Streams and Worker APIs are required)'));
|
|
21559
21616
|
case 4:
|
|
21560
21617
|
if ((0, _isValidAudioFilter.default)(audioFilter)) {
|
|
21561
|
-
|
|
21618
|
+
_context43.next = 6;
|
|
21562
21619
|
break;
|
|
21563
21620
|
}
|
|
21564
21621
|
throw otError(Errors.INVALID_PARAMETER, new Error('Audio filter has invalid configuration'));
|
|
21565
21622
|
case 6:
|
|
21566
21623
|
if (!_audioMediaProcessorConnector) {
|
|
21567
|
-
|
|
21624
|
+
_context43.next = 8;
|
|
21568
21625
|
break;
|
|
21569
21626
|
}
|
|
21570
21627
|
throw otError(Errors.NOT_SUPPORTED, new Error('Cannot apply audio filter when audioMediaProcessorConnector is set.'));
|
|
21571
21628
|
case 8:
|
|
21572
21629
|
if (webRTCStream) {
|
|
21573
|
-
|
|
21630
|
+
_context43.next = 13;
|
|
21574
21631
|
break;
|
|
21575
21632
|
}
|
|
21576
21633
|
message = 'Ignoring. No mediaStream';
|
|
@@ -21578,28 +21635,28 @@ function PublisherFactory(_ref) {
|
|
|
21578
21635
|
message
|
|
21579
21636
|
});
|
|
21580
21637
|
logging.warn(message);
|
|
21581
|
-
return
|
|
21638
|
+
return _context43.abrupt("return");
|
|
21582
21639
|
case 13:
|
|
21583
21640
|
if (properties.publishAudio) {
|
|
21584
|
-
|
|
21641
|
+
_context43.next = 17;
|
|
21585
21642
|
break;
|
|
21586
21643
|
}
|
|
21587
21644
|
currentAudioFilter = audioFilter;
|
|
21588
21645
|
logAnalyticsEvent('applyAudioFilter', 'Success', {
|
|
21589
21646
|
audioFilter
|
|
21590
21647
|
});
|
|
21591
|
-
return
|
|
21648
|
+
return _context43.abrupt("return");
|
|
21592
21649
|
case 17:
|
|
21593
21650
|
if (!_this.getAudioFilter()) {
|
|
21594
|
-
|
|
21651
|
+
_context43.next = 20;
|
|
21595
21652
|
break;
|
|
21596
21653
|
}
|
|
21597
|
-
|
|
21654
|
+
_context43.next = 20;
|
|
21598
21655
|
return _this.clearAudioFilter();
|
|
21599
21656
|
case 20:
|
|
21600
|
-
|
|
21657
|
+
_context43.prev = 20;
|
|
21601
21658
|
if (!(audioFilter.type === 'advancedNoiseSuppression')) {
|
|
21602
|
-
|
|
21659
|
+
_context43.next = 32;
|
|
21603
21660
|
break;
|
|
21604
21661
|
}
|
|
21605
21662
|
_NoiseSuppressionConf = _defaultConfig.default.getConfig({
|
|
@@ -21607,40 +21664,40 @@ function PublisherFactory(_ref) {
|
|
|
21607
21664
|
proxyUrl: (0, _proxyUrl.getProxyUrl)()
|
|
21608
21665
|
}), assetsDirBaseUrl = _NoiseSuppressionConf.assetsDirBaseUrl;
|
|
21609
21666
|
vonageNoiseSuppression = createVonageNoiseSuppression();
|
|
21610
|
-
|
|
21667
|
+
_context43.next = 26;
|
|
21611
21668
|
return vonageNoiseSuppression.init({
|
|
21612
21669
|
assetsDirBaseUrl
|
|
21613
21670
|
});
|
|
21614
21671
|
case 26:
|
|
21615
|
-
|
|
21672
|
+
_context43.next = 28;
|
|
21616
21673
|
return vonageNoiseSuppression.getConnector();
|
|
21617
21674
|
case 28:
|
|
21618
|
-
noiseSuppressionConnector =
|
|
21619
|
-
|
|
21675
|
+
noiseSuppressionConnector = _context43.sent;
|
|
21676
|
+
_context43.next = 31;
|
|
21620
21677
|
return _setAudioMediaProcessorConnector(noiseSuppressionConnector);
|
|
21621
21678
|
case 31:
|
|
21622
21679
|
currentAudioFilter = audioFilter;
|
|
21623
21680
|
case 32:
|
|
21624
|
-
|
|
21681
|
+
_context43.next = 39;
|
|
21625
21682
|
break;
|
|
21626
21683
|
case 34:
|
|
21627
|
-
|
|
21628
|
-
|
|
21629
|
-
logging.error(`Error applying audio filter: ${
|
|
21684
|
+
_context43.prev = 34;
|
|
21685
|
+
_context43.t0 = _context43["catch"](20);
|
|
21686
|
+
logging.error(`Error applying audio filter: ${_context43.t0}`);
|
|
21630
21687
|
logAnalyticsEvent('applyAudioFilter', 'Failure', {
|
|
21631
|
-
message:
|
|
21688
|
+
message: _context43.t0.message
|
|
21632
21689
|
});
|
|
21633
|
-
throw
|
|
21690
|
+
throw _context43.t0;
|
|
21634
21691
|
case 39:
|
|
21635
21692
|
logAnalyticsEvent('applyAudioFilter', 'Success');
|
|
21636
21693
|
case 40:
|
|
21637
21694
|
case "end":
|
|
21638
|
-
return
|
|
21695
|
+
return _context43.stop();
|
|
21639
21696
|
}
|
|
21640
|
-
},
|
|
21697
|
+
}, _callee43, null, [[20, 34]]);
|
|
21641
21698
|
}));
|
|
21642
21699
|
return function (_x33) {
|
|
21643
|
-
return
|
|
21700
|
+
return _ref51.apply(this, arguments);
|
|
21644
21701
|
};
|
|
21645
21702
|
}();
|
|
21646
21703
|
|
|
@@ -21661,22 +21718,22 @@ function PublisherFactory(_ref) {
|
|
|
21661
21718
|
* @return {Promise} A promise that resolves when the operation completes successfully.
|
|
21662
21719
|
* If there is an error, the promise is rejected.
|
|
21663
21720
|
*/
|
|
21664
|
-
this.clearAudioFilter = /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
21721
|
+
this.clearAudioFilter = /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee44() {
|
|
21665
21722
|
var message;
|
|
21666
|
-
return _regenerator.default.wrap(function
|
|
21667
|
-
while (1) switch (
|
|
21723
|
+
return _regenerator.default.wrap(function _callee44$(_context44) {
|
|
21724
|
+
while (1) switch (_context44.prev = _context44.next) {
|
|
21668
21725
|
case 0:
|
|
21669
21726
|
logAnalyticsEvent('clearAudioFilter', 'Attempt');
|
|
21670
21727
|
if (!(!properties.publishAudio && _this.getAudioFilter())) {
|
|
21671
|
-
|
|
21728
|
+
_context44.next = 5;
|
|
21672
21729
|
break;
|
|
21673
21730
|
}
|
|
21674
21731
|
currentAudioFilter = null;
|
|
21675
21732
|
logAnalyticsEvent('clearAudioFilter', 'Success');
|
|
21676
|
-
return
|
|
21733
|
+
return _context44.abrupt("return");
|
|
21677
21734
|
case 5:
|
|
21678
21735
|
if (MediaProcessor.isSupported()) {
|
|
21679
|
-
|
|
21736
|
+
_context44.next = 10;
|
|
21680
21737
|
break;
|
|
21681
21738
|
}
|
|
21682
21739
|
message = 'Ignoring. "clearAudioFilter" not supported.';
|
|
@@ -21684,10 +21741,10 @@ function PublisherFactory(_ref) {
|
|
|
21684
21741
|
message
|
|
21685
21742
|
});
|
|
21686
21743
|
logging.warn(message);
|
|
21687
|
-
return
|
|
21744
|
+
return _context44.abrupt("return");
|
|
21688
21745
|
case 10:
|
|
21689
21746
|
if (_this.getAudioFilter()) {
|
|
21690
|
-
|
|
21747
|
+
_context44.next = 15;
|
|
21691
21748
|
break;
|
|
21692
21749
|
}
|
|
21693
21750
|
message = 'Ignoring. No audio filter applied';
|
|
@@ -21695,10 +21752,10 @@ function PublisherFactory(_ref) {
|
|
|
21695
21752
|
message
|
|
21696
21753
|
});
|
|
21697
21754
|
logging.debug(message);
|
|
21698
|
-
return
|
|
21755
|
+
return _context44.abrupt("return");
|
|
21699
21756
|
case 15:
|
|
21700
21757
|
if (webRTCStream) {
|
|
21701
|
-
|
|
21758
|
+
_context44.next = 20;
|
|
21702
21759
|
break;
|
|
21703
21760
|
}
|
|
21704
21761
|
message = 'Ignoring. No mediaStream';
|
|
@@ -21706,29 +21763,29 @@ function PublisherFactory(_ref) {
|
|
|
21706
21763
|
message
|
|
21707
21764
|
});
|
|
21708
21765
|
logging.warn(message);
|
|
21709
|
-
return
|
|
21766
|
+
return _context44.abrupt("return");
|
|
21710
21767
|
case 20:
|
|
21711
|
-
|
|
21712
|
-
|
|
21768
|
+
_context44.prev = 20;
|
|
21769
|
+
_context44.next = 23;
|
|
21713
21770
|
return _setAudioMediaProcessorConnector(null);
|
|
21714
21771
|
case 23:
|
|
21715
21772
|
currentAudioFilter = null;
|
|
21716
|
-
|
|
21773
|
+
_context44.next = 30;
|
|
21717
21774
|
break;
|
|
21718
21775
|
case 26:
|
|
21719
|
-
|
|
21720
|
-
|
|
21776
|
+
_context44.prev = 26;
|
|
21777
|
+
_context44.t0 = _context44["catch"](20);
|
|
21721
21778
|
logAnalyticsEvent('clearAudioFilter', 'Failure', {
|
|
21722
|
-
error:
|
|
21779
|
+
error: _context44.t0
|
|
21723
21780
|
});
|
|
21724
|
-
return
|
|
21781
|
+
return _context44.abrupt("return");
|
|
21725
21782
|
case 30:
|
|
21726
21783
|
logAnalyticsEvent('clearAudioFilter', 'Success');
|
|
21727
21784
|
case 31:
|
|
21728
21785
|
case "end":
|
|
21729
|
-
return
|
|
21786
|
+
return _context44.stop();
|
|
21730
21787
|
}
|
|
21731
|
-
},
|
|
21788
|
+
}, _callee44, null, [[20, 26]]);
|
|
21732
21789
|
}));
|
|
21733
21790
|
|
|
21734
21791
|
/**
|
|
@@ -21823,16 +21880,16 @@ function PublisherFactory(_ref) {
|
|
|
21823
21880
|
* If there is an error, the promise is rejected and no connector is set.
|
|
21824
21881
|
*/
|
|
21825
21882
|
this.setVideoMediaProcessorConnector = /*#__PURE__*/function () {
|
|
21826
|
-
var
|
|
21883
|
+
var _ref53 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee45(mediaProcessorConnector) {
|
|
21827
21884
|
var _webRTCStream$getVide3, filteredVideoTrack, videoTrack, _webRTCStream$getVide4, originalVideoTrack, message;
|
|
21828
|
-
return _regenerator.default.wrap(function
|
|
21829
|
-
while (1) switch (
|
|
21885
|
+
return _regenerator.default.wrap(function _callee45$(_context45) {
|
|
21886
|
+
while (1) switch (_context45.prev = _context45.next) {
|
|
21830
21887
|
case 0:
|
|
21831
21888
|
logAnalyticsEvent('setVideoMediaProcessorConnector', 'Attempt', {
|
|
21832
21889
|
message: mediaProcessorConnector ? 'setting the connector' : 'clearing the connector'
|
|
21833
21890
|
});
|
|
21834
21891
|
if (!_this.getVideoFilter()) {
|
|
21835
|
-
|
|
21892
|
+
_context45.next = 4;
|
|
21836
21893
|
break;
|
|
21837
21894
|
}
|
|
21838
21895
|
logAnalyticsEvent('setVideoMediaProcessorConnector', 'Failure', {
|
|
@@ -21841,15 +21898,15 @@ function PublisherFactory(_ref) {
|
|
|
21841
21898
|
throw otError(Errors.NOT_SUPPORTED, new Error('setVideoMediaProcessorConnector: Cannot use this method when videoFilter is set.'));
|
|
21842
21899
|
case 4:
|
|
21843
21900
|
if (!_videoMediaProcessorConnector) {
|
|
21844
|
-
|
|
21901
|
+
_context45.next = 14;
|
|
21845
21902
|
break;
|
|
21846
21903
|
}
|
|
21847
21904
|
_webRTCStream$getVide3 = webRTCStream.getVideoTracks(), filteredVideoTrack = _webRTCStream$getVide3[0];
|
|
21848
|
-
|
|
21905
|
+
_context45.next = 8;
|
|
21849
21906
|
return getTrackFromDeviceId(currentDeviceId);
|
|
21850
21907
|
case 8:
|
|
21851
|
-
videoTrack =
|
|
21852
|
-
|
|
21908
|
+
videoTrack = _context45.sent;
|
|
21909
|
+
_context45.next = 11;
|
|
21853
21910
|
return replaceTrackAndUpdate(filteredVideoTrack, videoTrack);
|
|
21854
21911
|
case 11:
|
|
21855
21912
|
_videoMediaProcessorConnector.destroy();
|
|
@@ -21857,16 +21914,16 @@ function PublisherFactory(_ref) {
|
|
|
21857
21914
|
_videoMediaProcessorConnector = null;
|
|
21858
21915
|
case 14:
|
|
21859
21916
|
if (mediaProcessorConnector) {
|
|
21860
|
-
|
|
21917
|
+
_context45.next = 17;
|
|
21861
21918
|
break;
|
|
21862
21919
|
}
|
|
21863
21920
|
logAnalyticsEvent('setVideoMediaProcessorConnector', 'Success', {
|
|
21864
21921
|
message: 'clearing the connector'
|
|
21865
21922
|
});
|
|
21866
|
-
return
|
|
21923
|
+
return _context45.abrupt("return");
|
|
21867
21924
|
case 17:
|
|
21868
21925
|
if (MediaProcessorConnector.isValidConnector(mediaProcessorConnector)) {
|
|
21869
|
-
|
|
21926
|
+
_context45.next = 20;
|
|
21870
21927
|
break;
|
|
21871
21928
|
}
|
|
21872
21929
|
logAnalyticsEvent('setVideoMediaProcessorConnector', 'Failure', {
|
|
@@ -21877,7 +21934,7 @@ function PublisherFactory(_ref) {
|
|
|
21877
21934
|
_videoMediaProcessorConnector = new MediaProcessorConnector(mediaProcessorConnector);
|
|
21878
21935
|
_webRTCStream$getVide4 = webRTCStream.getVideoTracks(), originalVideoTrack = _webRTCStream$getVide4[0];
|
|
21879
21936
|
if (originalVideoTrack) {
|
|
21880
|
-
|
|
21937
|
+
_context45.next = 28;
|
|
21881
21938
|
break;
|
|
21882
21939
|
}
|
|
21883
21940
|
message = 'Connector not set as no video track is present.';
|
|
@@ -21886,37 +21943,37 @@ function PublisherFactory(_ref) {
|
|
|
21886
21943
|
});
|
|
21887
21944
|
logging.warn(`setVideoMediaProcessorConnector: ${message}`);
|
|
21888
21945
|
_videoMediaProcessorConnector = null;
|
|
21889
|
-
return
|
|
21946
|
+
return _context45.abrupt("return");
|
|
21890
21947
|
case 28:
|
|
21891
|
-
|
|
21892
|
-
|
|
21948
|
+
_context45.prev = 28;
|
|
21949
|
+
_context45.next = 31;
|
|
21893
21950
|
return _videoMediaProcessorConnector.setTrack(originalVideoTrack);
|
|
21894
21951
|
case 31:
|
|
21895
|
-
filteredVideoTrack =
|
|
21896
|
-
|
|
21952
|
+
filteredVideoTrack = _context45.sent;
|
|
21953
|
+
_context45.next = 34;
|
|
21897
21954
|
return replaceTrackAndUpdate(originalVideoTrack, filteredVideoTrack);
|
|
21898
21955
|
case 34:
|
|
21899
|
-
|
|
21956
|
+
_context45.next = 42;
|
|
21900
21957
|
break;
|
|
21901
21958
|
case 36:
|
|
21902
|
-
|
|
21903
|
-
|
|
21959
|
+
_context45.prev = 36;
|
|
21960
|
+
_context45.t0 = _context45["catch"](28);
|
|
21904
21961
|
_videoMediaProcessorConnector = null;
|
|
21905
|
-
logging.error(`setVideoMediaProcessorConnector: Error getting track from MediaProcessorConnector: ${
|
|
21962
|
+
logging.error(`setVideoMediaProcessorConnector: Error getting track from MediaProcessorConnector: ${_context45.t0}`);
|
|
21906
21963
|
logAnalyticsEvent('setVideoMediaProcessorConnector', 'Failure', {
|
|
21907
|
-
message:
|
|
21964
|
+
message: _context45.t0.message
|
|
21908
21965
|
});
|
|
21909
|
-
throw
|
|
21966
|
+
throw _context45.t0;
|
|
21910
21967
|
case 42:
|
|
21911
21968
|
logAnalyticsEvent('setVideoMediaProcessorConnector', 'Success');
|
|
21912
21969
|
case 43:
|
|
21913
21970
|
case "end":
|
|
21914
|
-
return
|
|
21971
|
+
return _context45.stop();
|
|
21915
21972
|
}
|
|
21916
|
-
},
|
|
21973
|
+
}, _callee45, null, [[28, 36]]);
|
|
21917
21974
|
}));
|
|
21918
21975
|
return function (_x34) {
|
|
21919
|
-
return
|
|
21976
|
+
return _ref53.apply(this, arguments);
|
|
21920
21977
|
};
|
|
21921
21978
|
}();
|
|
21922
21979
|
|
|
@@ -21979,71 +22036,71 @@ function PublisherFactory(_ref) {
|
|
|
21979
22036
|
* If there is an error, the promise is rejected and no connector is set.
|
|
21980
22037
|
*/
|
|
21981
22038
|
this.setAudioMediaProcessorConnector = /*#__PURE__*/function () {
|
|
21982
|
-
var
|
|
21983
|
-
return _regenerator.default.wrap(function
|
|
21984
|
-
while (1) switch (
|
|
22039
|
+
var _ref54 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee46(mediaProcessorConnector) {
|
|
22040
|
+
return _regenerator.default.wrap(function _callee46$(_context46) {
|
|
22041
|
+
while (1) switch (_context46.prev = _context46.next) {
|
|
21985
22042
|
case 0:
|
|
21986
22043
|
logAnalyticsEvent('setAudioMediaProcessorConnector', 'Attempt', {
|
|
21987
22044
|
message: mediaProcessorConnector ? 'setting the connector' : 'clearing the connector'
|
|
21988
22045
|
});
|
|
21989
|
-
|
|
21990
|
-
|
|
22046
|
+
_context46.prev = 1;
|
|
22047
|
+
_context46.next = 4;
|
|
21991
22048
|
return _setAudioMediaProcessorConnector(mediaProcessorConnector);
|
|
21992
22049
|
case 4:
|
|
21993
22050
|
logAnalyticsEvent('setAudioMediaProcessorConnector', 'Success', {
|
|
21994
22051
|
message: mediaProcessorConnector ? undefined : 'clearing the connector'
|
|
21995
22052
|
});
|
|
21996
|
-
|
|
22053
|
+
_context46.next = 11;
|
|
21997
22054
|
break;
|
|
21998
22055
|
case 7:
|
|
21999
|
-
|
|
22000
|
-
|
|
22056
|
+
_context46.prev = 7;
|
|
22057
|
+
_context46.t0 = _context46["catch"](1);
|
|
22001
22058
|
logAnalyticsEvent('setAudioMediaProcessorConnector', 'Failure', {
|
|
22002
|
-
message:
|
|
22059
|
+
message: _context46.t0.message
|
|
22003
22060
|
});
|
|
22004
|
-
throw
|
|
22061
|
+
throw _context46.t0;
|
|
22005
22062
|
case 11:
|
|
22006
22063
|
case "end":
|
|
22007
|
-
return
|
|
22064
|
+
return _context46.stop();
|
|
22008
22065
|
}
|
|
22009
|
-
},
|
|
22066
|
+
}, _callee46, null, [[1, 7]]);
|
|
22010
22067
|
}));
|
|
22011
22068
|
return function (_x35) {
|
|
22012
|
-
return
|
|
22069
|
+
return _ref54.apply(this, arguments);
|
|
22013
22070
|
};
|
|
22014
22071
|
}();
|
|
22015
22072
|
const _setAudioMediaProcessorConnector = /*#__PURE__*/function () {
|
|
22016
|
-
var
|
|
22073
|
+
var _ref55 = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee47(mediaProcessorConnector) {
|
|
22017
22074
|
var message, _webRTCStream$getAudi, filteredAudioTrack, _webRTCStream$getAudi2, originalAudioTrack;
|
|
22018
|
-
return _regenerator.default.wrap(function
|
|
22019
|
-
while (1) switch (
|
|
22075
|
+
return _regenerator.default.wrap(function _callee47$(_context47) {
|
|
22076
|
+
while (1) switch (_context47.prev = _context47.next) {
|
|
22020
22077
|
case 0:
|
|
22021
22078
|
if (!(_this.getAudioFilter() && mediaProcessorConnector)) {
|
|
22022
|
-
|
|
22079
|
+
_context47.next = 3;
|
|
22023
22080
|
break;
|
|
22024
22081
|
}
|
|
22025
22082
|
message = 'Tried to set audio MediaProcessorConnector when audio filter applied.';
|
|
22026
22083
|
throw otError(Errors.NOT_SUPPORTED, new Error(message));
|
|
22027
22084
|
case 3:
|
|
22028
22085
|
if (!_audioMediaProcessorConnector) {
|
|
22029
|
-
|
|
22086
|
+
_context47.next = 9;
|
|
22030
22087
|
break;
|
|
22031
22088
|
}
|
|
22032
22089
|
_webRTCStream$getAudi = webRTCStream.getAudioTracks(), filteredAudioTrack = _webRTCStream$getAudi[0];
|
|
22033
|
-
|
|
22090
|
+
_context47.next = 7;
|
|
22034
22091
|
return replaceAudioTrack(filteredAudioTrack, _audioMediaProcessorConnector.originalTrack);
|
|
22035
22092
|
case 7:
|
|
22036
22093
|
_audioMediaProcessorConnector.destroy();
|
|
22037
22094
|
_audioMediaProcessorConnector = null;
|
|
22038
22095
|
case 9:
|
|
22039
22096
|
if (mediaProcessorConnector) {
|
|
22040
|
-
|
|
22097
|
+
_context47.next = 11;
|
|
22041
22098
|
break;
|
|
22042
22099
|
}
|
|
22043
|
-
return
|
|
22100
|
+
return _context47.abrupt("return");
|
|
22044
22101
|
case 11:
|
|
22045
22102
|
if (MediaProcessorConnector.isValidConnector(mediaProcessorConnector)) {
|
|
22046
|
-
|
|
22103
|
+
_context47.next = 14;
|
|
22047
22104
|
break;
|
|
22048
22105
|
}
|
|
22049
22106
|
message = 'Invalid MediaProcessorConnector';
|
|
@@ -22051,7 +22108,7 @@ function PublisherFactory(_ref) {
|
|
|
22051
22108
|
case 14:
|
|
22052
22109
|
_webRTCStream$getAudi2 = webRTCStream.getAudioTracks(), originalAudioTrack = _webRTCStream$getAudi2[0];
|
|
22053
22110
|
if (originalAudioTrack) {
|
|
22054
|
-
|
|
22111
|
+
_context47.next = 20;
|
|
22055
22112
|
break;
|
|
22056
22113
|
}
|
|
22057
22114
|
message = 'Connector not set as no audio track is present.';
|
|
@@ -22060,30 +22117,30 @@ function PublisherFactory(_ref) {
|
|
|
22060
22117
|
throw new Error(message);
|
|
22061
22118
|
case 20:
|
|
22062
22119
|
_audioMediaProcessorConnector = new MediaProcessorConnector(mediaProcessorConnector);
|
|
22063
|
-
|
|
22064
|
-
|
|
22120
|
+
_context47.prev = 21;
|
|
22121
|
+
_context47.next = 24;
|
|
22065
22122
|
return _audioMediaProcessorConnector.setTrack(originalAudioTrack);
|
|
22066
22123
|
case 24:
|
|
22067
|
-
filteredAudioTrack =
|
|
22068
|
-
|
|
22124
|
+
filteredAudioTrack = _context47.sent;
|
|
22125
|
+
_context47.next = 27;
|
|
22069
22126
|
return replaceAudioTrack(_this.getAudioSource(), filteredAudioTrack);
|
|
22070
22127
|
case 27:
|
|
22071
|
-
|
|
22128
|
+
_context47.next = 34;
|
|
22072
22129
|
break;
|
|
22073
22130
|
case 29:
|
|
22074
|
-
|
|
22075
|
-
|
|
22131
|
+
_context47.prev = 29;
|
|
22132
|
+
_context47.t0 = _context47["catch"](21);
|
|
22076
22133
|
_audioMediaProcessorConnector = null;
|
|
22077
|
-
logging.error(`setAudioMediaProcessorConnector: Error getting track from MediaProcessorConnector: ${
|
|
22078
|
-
throw
|
|
22134
|
+
logging.error(`setAudioMediaProcessorConnector: Error getting track from MediaProcessorConnector: ${_context47.t0}`);
|
|
22135
|
+
throw _context47.t0;
|
|
22079
22136
|
case 34:
|
|
22080
22137
|
case "end":
|
|
22081
|
-
return
|
|
22138
|
+
return _context47.stop();
|
|
22082
22139
|
}
|
|
22083
|
-
},
|
|
22140
|
+
}, _callee47, null, [[21, 29]]);
|
|
22084
22141
|
}));
|
|
22085
22142
|
return function _setAudioMediaProcessorConnector(_x36) {
|
|
22086
|
-
return
|
|
22143
|
+
return _ref55.apply(this, arguments);
|
|
22087
22144
|
};
|
|
22088
22145
|
}();
|
|
22089
22146
|
|
|
@@ -22104,22 +22161,22 @@ function PublisherFactory(_ref) {
|
|
|
22104
22161
|
* @return {Promise} A promise that resolves when the operation completes successfully.
|
|
22105
22162
|
* If there is an error, the promise is rejected.
|
|
22106
22163
|
*/
|
|
22107
|
-
this.clearVideoFilter = /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function
|
|
22164
|
+
this.clearVideoFilter = /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee48() {
|
|
22108
22165
|
var message, oldVideoFilter, _webRTCStream$getVide5, filteredVideoTrack, videoTrack;
|
|
22109
|
-
return _regenerator.default.wrap(function
|
|
22110
|
-
while (1) switch (
|
|
22166
|
+
return _regenerator.default.wrap(function _callee48$(_context48) {
|
|
22167
|
+
while (1) switch (_context48.prev = _context48.next) {
|
|
22111
22168
|
case 0:
|
|
22112
22169
|
logAnalyticsEvent('clearVideoFilter', 'Attempt');
|
|
22113
22170
|
if (!(!properties.publishVideo && !mediaProcessor.getVideoFilter())) {
|
|
22114
|
-
|
|
22171
|
+
_context48.next = 5;
|
|
22115
22172
|
break;
|
|
22116
22173
|
}
|
|
22117
22174
|
currentVideoFilter = null;
|
|
22118
22175
|
logAnalyticsEvent('clearVideoFilter', 'Success');
|
|
22119
|
-
return
|
|
22176
|
+
return _context48.abrupt("return");
|
|
22120
22177
|
case 5:
|
|
22121
22178
|
if (!(!mediaProcessor.getVideoFilter() && !currentVideoFilter)) {
|
|
22122
|
-
|
|
22179
|
+
_context48.next = 10;
|
|
22123
22180
|
break;
|
|
22124
22181
|
}
|
|
22125
22182
|
message = 'Ignoring. No video filter applied';
|
|
@@ -22127,10 +22184,10 @@ function PublisherFactory(_ref) {
|
|
|
22127
22184
|
message
|
|
22128
22185
|
});
|
|
22129
22186
|
logging.debug(message);
|
|
22130
|
-
return
|
|
22187
|
+
return _context48.abrupt("return");
|
|
22131
22188
|
case 10:
|
|
22132
22189
|
if (MediaProcessor.isSupported()) {
|
|
22133
|
-
|
|
22190
|
+
_context48.next = 15;
|
|
22134
22191
|
break;
|
|
22135
22192
|
}
|
|
22136
22193
|
message = 'Ignoring. "clearVideoFilter" not supported.';
|
|
@@ -22138,10 +22195,10 @@ function PublisherFactory(_ref) {
|
|
|
22138
22195
|
message
|
|
22139
22196
|
});
|
|
22140
22197
|
logging.warn(message);
|
|
22141
|
-
return
|
|
22198
|
+
return _context48.abrupt("return");
|
|
22142
22199
|
case 15:
|
|
22143
22200
|
if (webRTCStream) {
|
|
22144
|
-
|
|
22201
|
+
_context48.next = 20;
|
|
22145
22202
|
break;
|
|
22146
22203
|
}
|
|
22147
22204
|
message = 'Ignoring. No mediaStream';
|
|
@@ -22149,43 +22206,43 @@ function PublisherFactory(_ref) {
|
|
|
22149
22206
|
message
|
|
22150
22207
|
});
|
|
22151
22208
|
logging.warn(message);
|
|
22152
|
-
return
|
|
22209
|
+
return _context48.abrupt("return");
|
|
22153
22210
|
case 20:
|
|
22154
22211
|
oldVideoFilter = currentVideoFilter;
|
|
22155
22212
|
currentVideoFilter = null;
|
|
22156
22213
|
_webRTCStream$getVide5 = webRTCStream.getVideoTracks(), filteredVideoTrack = _webRTCStream$getVide5[0];
|
|
22157
|
-
|
|
22158
|
-
|
|
22214
|
+
_context48.prev = 23;
|
|
22215
|
+
_context48.next = 26;
|
|
22159
22216
|
return getTrackFromDeviceId(currentDeviceId);
|
|
22160
22217
|
case 26:
|
|
22161
|
-
videoTrack =
|
|
22162
|
-
|
|
22218
|
+
videoTrack = _context48.sent;
|
|
22219
|
+
_context48.next = 33;
|
|
22163
22220
|
break;
|
|
22164
22221
|
case 29:
|
|
22165
|
-
|
|
22166
|
-
|
|
22167
|
-
logging.error(
|
|
22168
|
-
return
|
|
22222
|
+
_context48.prev = 29;
|
|
22223
|
+
_context48.t0 = _context48["catch"](23);
|
|
22224
|
+
logging.error(_context48.t0);
|
|
22225
|
+
return _context48.abrupt("return");
|
|
22169
22226
|
case 33:
|
|
22170
22227
|
if (videoTrack) {
|
|
22171
|
-
|
|
22228
|
+
_context48.next = 36;
|
|
22172
22229
|
break;
|
|
22173
22230
|
}
|
|
22174
22231
|
logging.warn('Failed to clear filter because there is no video track.');
|
|
22175
|
-
return
|
|
22232
|
+
return _context48.abrupt("return");
|
|
22176
22233
|
case 36:
|
|
22177
|
-
|
|
22178
|
-
|
|
22234
|
+
_context48.prev = 36;
|
|
22235
|
+
_context48.next = 39;
|
|
22179
22236
|
return replaceTrackAndUpdate(filteredVideoTrack, videoTrack);
|
|
22180
22237
|
case 39:
|
|
22181
|
-
|
|
22238
|
+
_context48.next = 41;
|
|
22182
22239
|
return destroyMediaProcessor();
|
|
22183
22240
|
case 41:
|
|
22184
|
-
|
|
22241
|
+
_context48.next = 46;
|
|
22185
22242
|
break;
|
|
22186
22243
|
case 43:
|
|
22187
|
-
|
|
22188
|
-
|
|
22244
|
+
_context48.prev = 43;
|
|
22245
|
+
_context48.t1 = _context48["catch"](36);
|
|
22189
22246
|
// Restore the previous filter since this call has failed. This way, this function can be
|
|
22190
22247
|
// called again if needed.
|
|
22191
22248
|
currentVideoFilter = oldVideoFilter;
|
|
@@ -22193,9 +22250,9 @@ function PublisherFactory(_ref) {
|
|
|
22193
22250
|
logAnalyticsEvent('clearVideoFilter', 'Success');
|
|
22194
22251
|
case 47:
|
|
22195
22252
|
case "end":
|
|
22196
|
-
return
|
|
22253
|
+
return _context48.stop();
|
|
22197
22254
|
}
|
|
22198
|
-
},
|
|
22255
|
+
}, _callee48, null, [[23, 29], [36, 43]]);
|
|
22199
22256
|
}));
|
|
22200
22257
|
};
|
|
22201
22258
|
|
|
@@ -48748,6 +48805,9 @@ function NativeVideoElementWrapperFactory(deps) {
|
|
|
48748
48805
|
// it won't play.
|
|
48749
48806
|
watcherIntervalTimeout = 1000;
|
|
48750
48807
|
shouldPlay = _env.default.isiOS;
|
|
48808
|
+
if (this._audioOnlyVideoElementWatcher) {
|
|
48809
|
+
clearInterval(this._audioOnlyVideoElementWatcher);
|
|
48810
|
+
}
|
|
48751
48811
|
this._audioOnlyVideoElementWatcher = setInterval(() => {
|
|
48752
48812
|
if (this._domAudioOnlyVideoElement && this._domAudioOnlyVideoElement.paused) {
|
|
48753
48813
|
this._domAudioOnlyVideoElement.srcObject = this._domAudioOnlyVideoElement.srcObject;
|
|
@@ -48756,12 +48816,12 @@ function NativeVideoElementWrapperFactory(deps) {
|
|
|
48756
48816
|
}
|
|
48757
48817
|
}
|
|
48758
48818
|
}, 100);
|
|
48759
|
-
_context4.next =
|
|
48819
|
+
_context4.next = 6;
|
|
48760
48820
|
return (0, _promiseDelay.default)(watcherIntervalTimeout);
|
|
48761
|
-
case
|
|
48821
|
+
case 6:
|
|
48762
48822
|
clearInterval(this._audioOnlyVideoElementWatcher);
|
|
48763
48823
|
this._audioOnlyVideoElementWatcher = null;
|
|
48764
|
-
case
|
|
48824
|
+
case 8:
|
|
48765
48825
|
case "end":
|
|
48766
48826
|
return _context4.stop();
|
|
48767
48827
|
}
|
|
@@ -59423,7 +59483,8 @@ function SubscriberFactory(_ref2) {
|
|
|
59423
59483
|
checking: _eventNames.default.SUBSCRIBER_DISCONNECTED,
|
|
59424
59484
|
connected: _eventNames.default.SUBSCRIBER_CONNECTED,
|
|
59425
59485
|
completed: _eventNames.default.SUBSCRIBER_CONNECTED,
|
|
59426
|
-
disconnected: _eventNames.default.SUBSCRIBER_DISCONNECTED
|
|
59486
|
+
disconnected: _eventNames.default.SUBSCRIBER_DISCONNECTED,
|
|
59487
|
+
failed: _eventNames.default.SUBSCRIBER_DESTROYED
|
|
59427
59488
|
};
|
|
59428
59489
|
const onIceConnectionStateChange = (state, peerConnection) => {
|
|
59429
59490
|
const currentConnectionState = connectionStateMap[state];
|
|
@@ -59434,6 +59495,16 @@ function SubscriberFactory(_ref2) {
|
|
|
59434
59495
|
const isConnectionStateChanged = currentConnectionState && currentConnectionState !== lastConnectionState;
|
|
59435
59496
|
if (isConnectionStateChanged) {
|
|
59436
59497
|
_lastConnectionStatesMap[sourceStreamId] = currentConnectionState;
|
|
59498
|
+
if (state === 'failed' && _isAdaptiveEnabled && sourceStreamId === 'P2P') {
|
|
59499
|
+
logging.warn('OT.Subscriber: relayed PeerConnection has failed, rolling back to routed' + 'PeerConnection');
|
|
59500
|
+
|
|
59501
|
+
// When AMR, if P2P leg fails, we destroy it, so Rumor can make the transition to Routed
|
|
59502
|
+
socket.subscriberDestroy(_stream.id, this.widgetId, sourceStreamId);
|
|
59503
|
+
logAnalyticsEvent('AMRMantisRollback', 'Event');
|
|
59504
|
+
|
|
59505
|
+
// We want this recovery as silent as possible, so we avoid loading and eventing.
|
|
59506
|
+
return;
|
|
59507
|
+
}
|
|
59437
59508
|
if (state === 'disconnected' && sourceStreamId !== 'P2P') {
|
|
59438
59509
|
// This block of code initiates an iceRestart when a peer connection is disconnected
|
|
59439
59510
|
// and the socket is still connected.
|