livekit-client 1.12.2 → 1.12.3
Sign up to get free protection for your applications and to get access to all the features.
- package/dist/livekit-client.e2ee.worker.js.map +1 -1
- package/dist/livekit-client.e2ee.worker.mjs.map +1 -1
- package/dist/livekit-client.esm.mjs +687 -683
- package/dist/livekit-client.esm.mjs.map +1 -1
- package/dist/livekit-client.umd.js +1 -1
- package/dist/livekit-client.umd.js.map +1 -1
- package/dist/src/room/participant/publishUtils.d.ts +1 -1
- package/dist/src/room/participant/publishUtils.d.ts.map +1 -1
- package/dist/src/room/track/LocalVideoTrack.d.ts.map +1 -1
- package/dist/src/room/track/options.d.ts +1 -0
- package/dist/src/room/track/options.d.ts.map +1 -1
- package/dist/ts4.2/src/room/participant/publishUtils.d.ts +1 -1
- package/dist/ts4.2/src/room/track/options.d.ts +1 -0
- package/package.json +1 -1
- package/src/room/RTCEngine.ts +4 -4
- package/src/room/participant/publishUtils.ts +4 -2
- package/src/room/track/LocalVideoTrack.ts +10 -8
- package/src/room/track/options.ts +17 -16
@@ -11823,7 +11823,7 @@ function getMatch(exp, ua) {
|
|
11823
11823
|
return match && match.length >= id && match[id] || '';
|
11824
11824
|
}
|
11825
11825
|
|
11826
|
-
var version$1 = "1.12.
|
11826
|
+
var version$1 = "1.12.3";
|
11827
11827
|
|
11828
11828
|
const version = version$1;
|
11829
11829
|
const protocolVersion = 9;
|
@@ -11880,11 +11880,11 @@ var AudioPresets;
|
|
11880
11880
|
* Sane presets for video resolution/encoding
|
11881
11881
|
*/
|
11882
11882
|
const VideoPresets = {
|
11883
|
-
h90: new VideoPreset(160, 90,
|
11884
|
-
h180: new VideoPreset(320, 180,
|
11885
|
-
h216: new VideoPreset(384, 216, 180000,
|
11886
|
-
h360: new VideoPreset(640, 360,
|
11887
|
-
h540: new VideoPreset(960, 540,
|
11883
|
+
h90: new VideoPreset(160, 90, 90000, 20),
|
11884
|
+
h180: new VideoPreset(320, 180, 160000, 20),
|
11885
|
+
h216: new VideoPreset(384, 216, 180000, 20),
|
11886
|
+
h360: new VideoPreset(640, 360, 450000, 20),
|
11887
|
+
h540: new VideoPreset(960, 540, 800000, 25),
|
11888
11888
|
h720: new VideoPreset(1280, 720, 1700000, 30),
|
11889
11889
|
h1080: new VideoPreset(1920, 1080, 3000000, 30),
|
11890
11890
|
h1440: new VideoPreset(2560, 1440, 5000000, 30),
|
@@ -11894,22 +11894,23 @@ const VideoPresets = {
|
|
11894
11894
|
* Four by three presets
|
11895
11895
|
*/
|
11896
11896
|
const VideoPresets43 = {
|
11897
|
-
h120: new VideoPreset(160, 120,
|
11898
|
-
h180: new VideoPreset(240, 180,
|
11899
|
-
h240: new VideoPreset(320, 240,
|
11897
|
+
h120: new VideoPreset(160, 120, 70000, 20),
|
11898
|
+
h180: new VideoPreset(240, 180, 125000, 20),
|
11899
|
+
h240: new VideoPreset(320, 240, 140000, 20),
|
11900
11900
|
h360: new VideoPreset(480, 360, 225000, 20),
|
11901
|
-
h480: new VideoPreset(640, 480,
|
11902
|
-
h540: new VideoPreset(720, 540,
|
11903
|
-
h720: new VideoPreset(960, 720,
|
11904
|
-
h1080: new VideoPreset(1440, 1080,
|
11905
|
-
h1440: new VideoPreset(1920, 1440,
|
11901
|
+
h480: new VideoPreset(640, 480, 500000, 20),
|
11902
|
+
h540: new VideoPreset(720, 540, 600000, 25),
|
11903
|
+
h720: new VideoPreset(960, 720, 1300000, 30),
|
11904
|
+
h1080: new VideoPreset(1440, 1080, 2300000, 30),
|
11905
|
+
h1440: new VideoPreset(1920, 1440, 3800000, 30)
|
11906
11906
|
};
|
11907
11907
|
const ScreenSharePresets = {
|
11908
11908
|
h360fps3: new VideoPreset(640, 360, 200000, 3, 'medium'),
|
11909
11909
|
h720fps5: new VideoPreset(1280, 720, 400000, 5, 'medium'),
|
11910
|
-
h720fps15: new VideoPreset(1280, 720,
|
11911
|
-
|
11912
|
-
|
11910
|
+
h720fps15: new VideoPreset(1280, 720, 1500000, 15, 'medium'),
|
11911
|
+
h720fps30: new VideoPreset(1280, 720, 2000000, 30, 'medium'),
|
11912
|
+
h1080fps15: new VideoPreset(1920, 1080, 2500000, 15, 'medium'),
|
11913
|
+
h1080fps30: new VideoPreset(1920, 1080, 4000000, 30, 'medium')
|
11913
11914
|
};
|
11914
11915
|
|
11915
11916
|
/**
|
@@ -16029,13 +16030,13 @@ class RTCEngine extends eventsExports.EventEmitter {
|
|
16029
16030
|
this.handleDataError = event => {
|
16030
16031
|
const channel = event.currentTarget;
|
16031
16032
|
const channelKind = channel.maxRetransmits === 0 ? 'lossy' : 'reliable';
|
16032
|
-
if (event instanceof ErrorEvent) {
|
16033
|
+
if (event instanceof ErrorEvent && event.error) {
|
16033
16034
|
const {
|
16034
16035
|
error
|
16035
16036
|
} = event.error;
|
16036
16037
|
livekitLogger.error("DataChannel error on ".concat(channelKind, ": ").concat(event.message), error);
|
16037
16038
|
} else {
|
16038
|
-
livekitLogger.error("Unknown DataChannel
|
16039
|
+
livekitLogger.error("Unknown DataChannel error on ".concat(channelKind), event);
|
16039
16040
|
}
|
16040
16041
|
};
|
16041
16042
|
this.handleBufferedAmountLow = event => {
|
@@ -16423,13 +16424,13 @@ class RTCEngine extends eventsExports.EventEmitter {
|
|
16423
16424
|
});
|
16424
16425
|
this.client.onLocalTrackPublished = res => {
|
16425
16426
|
livekitLogger.debug('received trackPublishedResponse', res);
|
16426
|
-
|
16427
|
-
resolve
|
16428
|
-
} = this.pendingTrackResolvers[res.cid];
|
16429
|
-
if (!resolve) {
|
16427
|
+
if (!this.pendingTrackResolvers[res.cid]) {
|
16430
16428
|
livekitLogger.error("missing track resolver for ".concat(res.cid));
|
16431
16429
|
return;
|
16432
16430
|
}
|
16431
|
+
const {
|
16432
|
+
resolve
|
16433
|
+
} = this.pendingTrackResolvers[res.cid];
|
16433
16434
|
delete this.pendingTrackResolvers[res.cid];
|
16434
16435
|
resolve(res.track);
|
16435
16436
|
};
|
@@ -17312,251 +17313,551 @@ class LocalAudioTrack extends LocalTrack {
|
|
17312
17313
|
}
|
17313
17314
|
}
|
17314
17315
|
|
17315
|
-
|
17316
|
-
|
17317
|
-
|
17318
|
-
|
17319
|
-
|
17320
|
-
|
17321
|
-
|
17322
|
-
|
17323
|
-
|
17324
|
-
let userProvidedTrack = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : true;
|
17325
|
-
super(mediaTrack, Track.Kind.Video, constraints, userProvidedTrack);
|
17326
|
-
/* @internal */
|
17327
|
-
this.simulcastCodecs = new Map();
|
17328
|
-
this.monitorSender = () => __awaiter(this, void 0, void 0, function* () {
|
17329
|
-
if (!this.sender) {
|
17330
|
-
this._currentBitrate = 0;
|
17331
|
-
return;
|
17332
|
-
}
|
17333
|
-
let stats;
|
17334
|
-
try {
|
17335
|
-
stats = yield this.getSenderStats();
|
17336
|
-
} catch (e) {
|
17337
|
-
livekitLogger.error('could not get audio sender stats', {
|
17338
|
-
error: e
|
17339
|
-
});
|
17340
|
-
return;
|
17341
|
-
}
|
17342
|
-
const statsMap = new Map(stats.map(s => [s.rid, s]));
|
17343
|
-
if (this.prevStats) {
|
17344
|
-
let totalBitrate = 0;
|
17345
|
-
statsMap.forEach((s, key) => {
|
17346
|
-
var _a;
|
17347
|
-
const prev = (_a = this.prevStats) === null || _a === void 0 ? void 0 : _a.get(key);
|
17348
|
-
totalBitrate += computeBitrate(s, prev);
|
17349
|
-
});
|
17350
|
-
this._currentBitrate = totalBitrate;
|
17351
|
-
}
|
17352
|
-
this.prevStats = statsMap;
|
17353
|
-
});
|
17354
|
-
this.senderLock = new Mutex();
|
17316
|
+
/** @internal */
|
17317
|
+
function mediaTrackToLocalTrack(mediaStreamTrack, constraints) {
|
17318
|
+
switch (mediaStreamTrack.kind) {
|
17319
|
+
case 'audio':
|
17320
|
+
return new LocalAudioTrack(mediaStreamTrack, constraints, false);
|
17321
|
+
case 'video':
|
17322
|
+
return new LocalVideoTrack(mediaStreamTrack, constraints, false);
|
17323
|
+
default:
|
17324
|
+
throw new TrackInvalidError("unsupported track type: ".concat(mediaStreamTrack.kind));
|
17355
17325
|
}
|
17356
|
-
|
17357
|
-
|
17358
|
-
|
17326
|
+
}
|
17327
|
+
/* @internal */
|
17328
|
+
const presets169 = Object.values(VideoPresets);
|
17329
|
+
/* @internal */
|
17330
|
+
const presets43 = Object.values(VideoPresets43);
|
17331
|
+
/* @internal */
|
17332
|
+
const presetsScreenShare = Object.values(ScreenSharePresets);
|
17333
|
+
/* @internal */
|
17334
|
+
const defaultSimulcastPresets169 = [VideoPresets.h180, VideoPresets.h360];
|
17335
|
+
/* @internal */
|
17336
|
+
const defaultSimulcastPresets43 = [VideoPresets43.h180, VideoPresets43.h360];
|
17337
|
+
/* @internal */
|
17338
|
+
const computeDefaultScreenShareSimulcastPresets = fromPreset => {
|
17339
|
+
const layers = [{
|
17340
|
+
scaleResolutionDownBy: 2,
|
17341
|
+
fps: 3
|
17342
|
+
}];
|
17343
|
+
return layers.map(t => {
|
17344
|
+
var _a;
|
17345
|
+
return new VideoPreset(Math.floor(fromPreset.width / t.scaleResolutionDownBy), Math.floor(fromPreset.height / t.scaleResolutionDownBy), Math.max(150000, Math.floor(fromPreset.encoding.maxBitrate / (Math.pow(t.scaleResolutionDownBy, 2) * (((_a = fromPreset.encoding.maxFramerate) !== null && _a !== void 0 ? _a : 30) / t.fps)))), t.fps, fromPreset.encoding.priority);
|
17346
|
+
});
|
17347
|
+
};
|
17348
|
+
// /**
|
17349
|
+
// *
|
17350
|
+
// * @internal
|
17351
|
+
// * @experimental
|
17352
|
+
// */
|
17353
|
+
// const computeDefaultMultiCodecSimulcastEncodings = (width: number, height: number) => {
|
17354
|
+
// // use vp8 as a default
|
17355
|
+
// const vp8 = determineAppropriateEncoding(false, width, height);
|
17356
|
+
// const vp9 = { ...vp8, maxBitrate: vp8.maxBitrate * 0.9 };
|
17357
|
+
// const h264 = { ...vp8, maxBitrate: vp8.maxBitrate * 1.1 };
|
17358
|
+
// const av1 = { ...vp8, maxBitrate: vp8.maxBitrate * 0.7 };
|
17359
|
+
// return {
|
17360
|
+
// vp8,
|
17361
|
+
// vp9,
|
17362
|
+
// h264,
|
17363
|
+
// av1,
|
17364
|
+
// };
|
17365
|
+
// };
|
17366
|
+
const videoRids = ['q', 'h', 'f'];
|
17367
|
+
/* @internal */
|
17368
|
+
function computeVideoEncodings(isScreenShare, width, height, options) {
|
17369
|
+
var _a, _b;
|
17370
|
+
let videoEncoding = options === null || options === void 0 ? void 0 : options.videoEncoding;
|
17371
|
+
if (isScreenShare) {
|
17372
|
+
videoEncoding = options === null || options === void 0 ? void 0 : options.screenShareEncoding;
|
17373
|
+
}
|
17374
|
+
const useSimulcast = options === null || options === void 0 ? void 0 : options.simulcast;
|
17375
|
+
const scalabilityMode = options === null || options === void 0 ? void 0 : options.scalabilityMode;
|
17376
|
+
const videoCodec = options === null || options === void 0 ? void 0 : options.videoCodec;
|
17377
|
+
if (!videoEncoding && !useSimulcast && !scalabilityMode || !width || !height) {
|
17378
|
+
// when we aren't simulcasting or svc, will need to return a single encoding without
|
17379
|
+
// capping bandwidth. we always require a encoding for dynacast
|
17380
|
+
return [{}];
|
17381
|
+
}
|
17382
|
+
if (!videoEncoding) {
|
17383
|
+
// find the right encoding based on width/height
|
17384
|
+
videoEncoding = determineAppropriateEncoding(isScreenShare, width, height, videoCodec);
|
17385
|
+
livekitLogger.debug('using video encoding', videoEncoding);
|
17386
|
+
}
|
17387
|
+
const original = new VideoPreset(width, height, videoEncoding.maxBitrate, videoEncoding.maxFramerate, videoEncoding.priority);
|
17388
|
+
if (scalabilityMode && isSVCCodec(videoCodec)) {
|
17389
|
+
livekitLogger.debug("using svc with scalabilityMode ".concat(scalabilityMode));
|
17390
|
+
const sm = new ScalabilityMode(scalabilityMode);
|
17391
|
+
const encodings = [];
|
17392
|
+
if (sm.spatial > 3) {
|
17393
|
+
throw new Error("unsupported scalabilityMode: ".concat(scalabilityMode));
|
17359
17394
|
}
|
17360
|
-
|
17395
|
+
for (let i = 0; i < sm.spatial; i += 1) {
|
17396
|
+
encodings.push({
|
17397
|
+
rid: videoRids[2 - i],
|
17398
|
+
maxBitrate: videoEncoding.maxBitrate / Math.pow(3, i),
|
17399
|
+
/* @ts-ignore */
|
17400
|
+
maxFramerate: original.encoding.maxFramerate
|
17401
|
+
});
|
17402
|
+
}
|
17403
|
+
/* @ts-ignore */
|
17404
|
+
encodings[0].scalabilityMode = scalabilityMode;
|
17405
|
+
livekitLogger.debug('encodings', encodings);
|
17406
|
+
return encodings;
|
17361
17407
|
}
|
17362
|
-
|
17363
|
-
|
17364
|
-
|
17365
|
-
|
17366
|
-
|
17367
|
-
|
17408
|
+
if (!useSimulcast) {
|
17409
|
+
return [videoEncoding];
|
17410
|
+
}
|
17411
|
+
let presets = [];
|
17412
|
+
if (isScreenShare) {
|
17413
|
+
presets = (_a = sortPresets(options === null || options === void 0 ? void 0 : options.screenShareSimulcastLayers)) !== null && _a !== void 0 ? _a : defaultSimulcastLayers(isScreenShare, original);
|
17414
|
+
} else {
|
17415
|
+
presets = (_b = sortPresets(options === null || options === void 0 ? void 0 : options.videoSimulcastLayers)) !== null && _b !== void 0 ? _b : defaultSimulcastLayers(isScreenShare, original);
|
17416
|
+
}
|
17417
|
+
let midPreset;
|
17418
|
+
if (presets.length > 0) {
|
17419
|
+
const lowPreset = presets[0];
|
17420
|
+
if (presets.length > 1) {
|
17421
|
+
[, midPreset] = presets;
|
17368
17422
|
}
|
17369
|
-
//
|
17370
|
-
//
|
17371
|
-
|
17372
|
-
|
17373
|
-
|
17423
|
+
// NOTE:
|
17424
|
+
// 1. Ordering of these encodings is important. Chrome seems
|
17425
|
+
// to use the index into encodings to decide which layer
|
17426
|
+
// to disable when CPU constrained.
|
17427
|
+
// So encodings should be ordered in increasing spatial
|
17428
|
+
// resolution order.
|
17429
|
+
// 2. ion-sfu translates rids into layers. So, all encodings
|
17430
|
+
// should have the base layer `q` and then more added
|
17431
|
+
// based on other conditions.
|
17432
|
+
const size = Math.max(width, height);
|
17433
|
+
if (size >= 960 && midPreset) {
|
17434
|
+
return encodingsFromPresets(width, height, [lowPreset, midPreset, original]);
|
17374
17435
|
}
|
17375
|
-
if (
|
17376
|
-
return;
|
17436
|
+
if (size >= 480) {
|
17437
|
+
return encodingsFromPresets(width, height, [lowPreset, original]);
|
17377
17438
|
}
|
17378
|
-
this.monitorInterval = setInterval(() => {
|
17379
|
-
this.monitorSender();
|
17380
|
-
}, monitorFrequency);
|
17381
|
-
}
|
17382
|
-
stop() {
|
17383
|
-
this._mediaStreamTrack.getConstraints();
|
17384
|
-
this.simulcastCodecs.forEach(trackInfo => {
|
17385
|
-
trackInfo.mediaStreamTrack.stop();
|
17386
|
-
});
|
17387
|
-
super.stop();
|
17388
17439
|
}
|
17389
|
-
|
17390
|
-
|
17391
|
-
|
17392
|
-
|
17393
|
-
|
17394
|
-
|
17395
|
-
return
|
17396
|
-
const unlock = yield this.muteLock.lock();
|
17397
|
-
try {
|
17398
|
-
if (this.source === Track.Source.Camera && !this.isUserProvided) {
|
17399
|
-
livekitLogger.debug('stopping camera track');
|
17400
|
-
// also stop the track, so that camera indicator is turned off
|
17401
|
-
this._mediaStreamTrack.stop();
|
17402
|
-
}
|
17403
|
-
yield _super.mute.call(this);
|
17404
|
-
return this;
|
17405
|
-
} finally {
|
17406
|
-
unlock();
|
17407
|
-
}
|
17408
|
-
});
|
17440
|
+
return encodingsFromPresets(width, height, [original]);
|
17441
|
+
}
|
17442
|
+
function computeTrackBackupEncodings(track, videoCodec, opts) {
|
17443
|
+
var _a, _b, _c, _d;
|
17444
|
+
if (!opts.backupCodec || opts.backupCodec.codec === opts.videoCodec) {
|
17445
|
+
// backup codec publishing is disabled
|
17446
|
+
return;
|
17409
17447
|
}
|
17410
|
-
|
17411
|
-
|
17412
|
-
|
17413
|
-
|
17414
|
-
}
|
17415
|
-
});
|
17416
|
-
return __awaiter(this, void 0, void 0, function* () {
|
17417
|
-
const unlock = yield this.muteLock.lock();
|
17418
|
-
try {
|
17419
|
-
if (this.source === Track.Source.Camera && !this.isUserProvided) {
|
17420
|
-
livekitLogger.debug('reacquiring camera track');
|
17421
|
-
yield this.restartTrack();
|
17422
|
-
}
|
17423
|
-
yield _super.unmute.call(this);
|
17424
|
-
return this;
|
17425
|
-
} finally {
|
17426
|
-
unlock();
|
17427
|
-
}
|
17448
|
+
if (videoCodec !== opts.backupCodec.codec) {
|
17449
|
+
livekitLogger.warn('requested a different codec than specified as backup', {
|
17450
|
+
serverRequested: videoCodec,
|
17451
|
+
backup: opts.backupCodec.codec
|
17428
17452
|
});
|
17429
17453
|
}
|
17430
|
-
|
17431
|
-
|
17432
|
-
|
17433
|
-
|
17434
|
-
|
17435
|
-
|
17436
|
-
|
17437
|
-
|
17438
|
-
|
17439
|
-
|
17440
|
-
|
17441
|
-
|
17442
|
-
|
17443
|
-
|
17444
|
-
|
17445
|
-
|
17446
|
-
|
17447
|
-
|
17448
|
-
|
17449
|
-
|
17450
|
-
|
17451
|
-
|
17452
|
-
|
17453
|
-
rid: (_a = v.rid) !== null && _a !== void 0 ? _a : v.id,
|
17454
|
-
retransmittedPacketsSent: v.retransmittedPacketsSent,
|
17455
|
-
qualityLimitationReason: v.qualityLimitationReason,
|
17456
|
-
qualityLimitationResolutionChanges: v.qualityLimitationResolutionChanges
|
17457
|
-
};
|
17458
|
-
// locate the appropriate remote-inbound-rtp item
|
17459
|
-
const r = stats.get(v.remoteId);
|
17460
|
-
if (r) {
|
17461
|
-
vs.jitter = r.jitter;
|
17462
|
-
vs.packetsLost = r.packetsLost;
|
17463
|
-
vs.roundTripTime = r.roundTripTime;
|
17464
|
-
}
|
17465
|
-
items.push(vs);
|
17466
|
-
}
|
17467
|
-
});
|
17468
|
-
return items;
|
17469
|
-
});
|
17454
|
+
opts.videoCodec = videoCodec;
|
17455
|
+
// use backup encoding setting as videoEncoding for backup codec publishing
|
17456
|
+
opts.videoEncoding = opts.backupCodec.encoding;
|
17457
|
+
const settings = track.mediaStreamTrack.getSettings();
|
17458
|
+
const width = (_a = settings.width) !== null && _a !== void 0 ? _a : (_b = track.dimensions) === null || _b === void 0 ? void 0 : _b.width;
|
17459
|
+
const height = (_c = settings.height) !== null && _c !== void 0 ? _c : (_d = track.dimensions) === null || _d === void 0 ? void 0 : _d.height;
|
17460
|
+
const encodings = computeVideoEncodings(track.source === Track.Source.ScreenShare, width, height, opts);
|
17461
|
+
return encodings;
|
17462
|
+
}
|
17463
|
+
/* @internal */
|
17464
|
+
function determineAppropriateEncoding(isScreenShare, width, height, codec) {
|
17465
|
+
const presets = presetsForResolution(isScreenShare, width, height);
|
17466
|
+
let {
|
17467
|
+
encoding
|
17468
|
+
} = presets[0];
|
17469
|
+
// handle portrait by swapping dimensions
|
17470
|
+
const size = Math.max(width, height);
|
17471
|
+
for (let i = 0; i < presets.length; i += 1) {
|
17472
|
+
const preset = presets[i];
|
17473
|
+
encoding = preset.encoding;
|
17474
|
+
if (preset.width >= size) {
|
17475
|
+
break;
|
17476
|
+
}
|
17470
17477
|
}
|
17471
|
-
|
17472
|
-
|
17473
|
-
|
17474
|
-
|
17475
|
-
|
17476
|
-
|
17477
|
-
|
17478
|
+
// presets are based on the assumption of vp8 as a codec
|
17479
|
+
// for other codecs we adjust the maxBitrate if no specific videoEncoding has been provided
|
17480
|
+
// users should override these with ones that are optimized for their use case
|
17481
|
+
// NOTE: SVC codec bitrates are inclusive of all scalability layers. while
|
17482
|
+
// bitrate for non-SVC codecs does not include other simulcast layers.
|
17483
|
+
if (codec) {
|
17484
|
+
switch (codec) {
|
17485
|
+
case 'av1':
|
17486
|
+
encoding = Object.assign({}, encoding);
|
17487
|
+
encoding.maxBitrate = encoding.maxBitrate * 0.7;
|
17488
|
+
break;
|
17489
|
+
case 'vp9':
|
17490
|
+
encoding = Object.assign({}, encoding);
|
17491
|
+
encoding.maxBitrate = encoding.maxBitrate * 0.85;
|
17492
|
+
break;
|
17478
17493
|
}
|
17479
|
-
livekitLogger.debug("setting publishing quality. max quality ".concat(maxQuality));
|
17480
|
-
this.setPublishingLayers(qualities);
|
17481
17494
|
}
|
17482
|
-
|
17483
|
-
|
17484
|
-
|
17485
|
-
|
17486
|
-
|
17487
|
-
|
17488
|
-
|
17489
|
-
|
17490
|
-
|
17491
|
-
|
17495
|
+
return encoding;
|
17496
|
+
}
|
17497
|
+
/* @internal */
|
17498
|
+
function presetsForResolution(isScreenShare, width, height) {
|
17499
|
+
if (isScreenShare) {
|
17500
|
+
return presetsScreenShare;
|
17501
|
+
}
|
17502
|
+
const aspect = width > height ? width / height : height / width;
|
17503
|
+
if (Math.abs(aspect - 16.0 / 9) < Math.abs(aspect - 4.0 / 3)) {
|
17504
|
+
return presets169;
|
17505
|
+
}
|
17506
|
+
return presets43;
|
17507
|
+
}
|
17508
|
+
/* @internal */
|
17509
|
+
function defaultSimulcastLayers(isScreenShare, original) {
|
17510
|
+
if (isScreenShare) {
|
17511
|
+
return computeDefaultScreenShareSimulcastPresets(original);
|
17512
|
+
}
|
17513
|
+
const {
|
17514
|
+
width,
|
17515
|
+
height
|
17516
|
+
} = original;
|
17517
|
+
const aspect = width > height ? width / height : height / width;
|
17518
|
+
if (Math.abs(aspect - 16.0 / 9) < Math.abs(aspect - 4.0 / 3)) {
|
17519
|
+
return defaultSimulcastPresets169;
|
17520
|
+
}
|
17521
|
+
return defaultSimulcastPresets43;
|
17522
|
+
}
|
17523
|
+
// presets should be ordered by low, medium, high
|
17524
|
+
function encodingsFromPresets(width, height, presets) {
|
17525
|
+
const encodings = [];
|
17526
|
+
presets.forEach((preset, idx) => {
|
17527
|
+
if (idx >= videoRids.length) {
|
17528
|
+
return;
|
17529
|
+
}
|
17530
|
+
const size = Math.min(width, height);
|
17531
|
+
const rid = videoRids[idx];
|
17532
|
+
const encoding = {
|
17533
|
+
rid,
|
17534
|
+
scaleResolutionDownBy: Math.max(1, size / Math.min(preset.width, preset.height)),
|
17535
|
+
maxBitrate: preset.encoding.maxBitrate
|
17536
|
+
};
|
17537
|
+
if (preset.encoding.maxFramerate) {
|
17538
|
+
encoding.maxFramerate = preset.encoding.maxFramerate;
|
17539
|
+
}
|
17540
|
+
const canSetPriority = isFireFox() || idx === 0;
|
17541
|
+
if (preset.encoding.priority && canSetPriority) {
|
17542
|
+
encoding.priority = preset.encoding.priority;
|
17543
|
+
encoding.networkPriority = preset.encoding.priority;
|
17544
|
+
}
|
17545
|
+
encodings.push(encoding);
|
17546
|
+
});
|
17547
|
+
// RN ios simulcast requires all same framerates.
|
17548
|
+
if (isReactNative() && getReactNativeOs() === 'ios') {
|
17549
|
+
let topFramerate = undefined;
|
17550
|
+
encodings.forEach(encoding => {
|
17551
|
+
if (!topFramerate) {
|
17552
|
+
topFramerate = encoding.maxFramerate;
|
17553
|
+
} else if (encoding.maxFramerate && encoding.maxFramerate > topFramerate) {
|
17554
|
+
topFramerate = encoding.maxFramerate;
|
17492
17555
|
}
|
17493
|
-
return this.isMuted || unwrapConstraint(deviceId) === this._mediaStreamTrack.getSettings().deviceId;
|
17494
17556
|
});
|
17495
|
-
|
17496
|
-
|
17497
|
-
|
17498
|
-
|
17499
|
-
|
17500
|
-
|
17501
|
-
|
17502
|
-
});
|
17503
|
-
if (typeof streamConstraints.video !== 'boolean') {
|
17504
|
-
constraints = streamConstraints.video;
|
17557
|
+
let notifyOnce = true;
|
17558
|
+
encodings.forEach(encoding => {
|
17559
|
+
var _a;
|
17560
|
+
if (encoding.maxFramerate != topFramerate) {
|
17561
|
+
if (notifyOnce) {
|
17562
|
+
notifyOnce = false;
|
17563
|
+
livekitLogger.info("Simulcast on iOS React-Native requires all encodings to share the same framerate.");
|
17505
17564
|
}
|
17565
|
+
livekitLogger.info("Setting framerate of encoding \"".concat((_a = encoding.rid) !== null && _a !== void 0 ? _a : '', "\" to ").concat(topFramerate));
|
17566
|
+
encoding.maxFramerate = topFramerate;
|
17506
17567
|
}
|
17507
|
-
yield this.restart(constraints);
|
17508
17568
|
});
|
17509
17569
|
}
|
17510
|
-
|
17511
|
-
|
17512
|
-
|
17570
|
+
return encodings;
|
17571
|
+
}
|
17572
|
+
/** @internal */
|
17573
|
+
function sortPresets(presets) {
|
17574
|
+
if (!presets) return;
|
17575
|
+
return presets.sort((a, b) => {
|
17576
|
+
const {
|
17577
|
+
encoding: aEnc
|
17578
|
+
} = a;
|
17579
|
+
const {
|
17580
|
+
encoding: bEnc
|
17581
|
+
} = b;
|
17582
|
+
if (aEnc.maxBitrate > bEnc.maxBitrate) {
|
17583
|
+
return 1;
|
17513
17584
|
}
|
17514
|
-
|
17515
|
-
|
17516
|
-
|
17517
|
-
sender: undefined,
|
17518
|
-
encodings
|
17519
|
-
};
|
17520
|
-
this.simulcastCodecs.set(codec, simulcastCodecInfo);
|
17521
|
-
return simulcastCodecInfo;
|
17522
|
-
}
|
17523
|
-
setSimulcastTrackSender(codec, sender) {
|
17524
|
-
const simulcastCodecInfo = this.simulcastCodecs.get(codec);
|
17525
|
-
if (!simulcastCodecInfo) {
|
17526
|
-
return;
|
17585
|
+
if (aEnc.maxBitrate < bEnc.maxBitrate) return -1;
|
17586
|
+
if (aEnc.maxBitrate === bEnc.maxBitrate && aEnc.maxFramerate && bEnc.maxFramerate) {
|
17587
|
+
return aEnc.maxFramerate > bEnc.maxFramerate ? 1 : -1;
|
17527
17588
|
}
|
17528
|
-
|
17529
|
-
|
17530
|
-
|
17531
|
-
|
17532
|
-
|
17533
|
-
|
17589
|
+
return 0;
|
17590
|
+
});
|
17591
|
+
}
|
17592
|
+
/** @internal */
|
17593
|
+
class ScalabilityMode {
|
17594
|
+
constructor(scalabilityMode) {
|
17595
|
+
const results = scalabilityMode.match(/^L(\d)T(\d)(h|_KEY|_KEY_SHIFT){0,1}$/);
|
17596
|
+
if (!results) {
|
17597
|
+
throw new Error('invalid scalability mode');
|
17598
|
+
}
|
17599
|
+
this.spatial = parseInt(results[1]);
|
17600
|
+
this.temporal = parseInt(results[2]);
|
17601
|
+
if (results.length > 3) {
|
17602
|
+
switch (results[3]) {
|
17603
|
+
case 'h':
|
17604
|
+
case '_KEY':
|
17605
|
+
case '_KEY_SHIFT':
|
17606
|
+
this.suffix = results[3];
|
17534
17607
|
}
|
17535
|
-
}
|
17608
|
+
}
|
17609
|
+
}
|
17610
|
+
toString() {
|
17611
|
+
var _a;
|
17612
|
+
return "L".concat(this.spatial, "T").concat(this.temporal).concat((_a = this.suffix) !== null && _a !== void 0 ? _a : '');
|
17536
17613
|
}
|
17614
|
+
}
|
17615
|
+
|
17616
|
+
const refreshSubscribedCodecAfterNewCodec = 5000;
|
17617
|
+
class LocalVideoTrack extends LocalTrack {
|
17537
17618
|
/**
|
17538
|
-
*
|
17539
|
-
*
|
17619
|
+
*
|
17620
|
+
* @param mediaTrack
|
17621
|
+
* @param constraints MediaTrackConstraints that are being used when restarting or reacquiring tracks
|
17622
|
+
* @param userProvidedTrack Signals to the SDK whether or not the mediaTrack should be managed (i.e. released and reacquired) internally by the SDK
|
17540
17623
|
*/
|
17541
|
-
|
17542
|
-
|
17543
|
-
|
17544
|
-
|
17545
|
-
|
17546
|
-
|
17547
|
-
|
17548
|
-
|
17549
|
-
|
17550
|
-
if (!this.codec && codecs.length > 0) {
|
17551
|
-
yield this.setPublishingLayers(codecs[0].qualities);
|
17552
|
-
return [];
|
17624
|
+
constructor(mediaTrack, constraints) {
|
17625
|
+
let userProvidedTrack = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : true;
|
17626
|
+
super(mediaTrack, Track.Kind.Video, constraints, userProvidedTrack);
|
17627
|
+
/* @internal */
|
17628
|
+
this.simulcastCodecs = new Map();
|
17629
|
+
this.monitorSender = () => __awaiter(this, void 0, void 0, function* () {
|
17630
|
+
if (!this.sender) {
|
17631
|
+
this._currentBitrate = 0;
|
17632
|
+
return;
|
17553
17633
|
}
|
17554
|
-
|
17555
|
-
const newCodecs = [];
|
17634
|
+
let stats;
|
17556
17635
|
try {
|
17557
|
-
|
17558
|
-
|
17559
|
-
|
17636
|
+
stats = yield this.getSenderStats();
|
17637
|
+
} catch (e) {
|
17638
|
+
livekitLogger.error('could not get audio sender stats', {
|
17639
|
+
error: e
|
17640
|
+
});
|
17641
|
+
return;
|
17642
|
+
}
|
17643
|
+
const statsMap = new Map(stats.map(s => [s.rid, s]));
|
17644
|
+
if (this.prevStats) {
|
17645
|
+
let totalBitrate = 0;
|
17646
|
+
statsMap.forEach((s, key) => {
|
17647
|
+
var _a;
|
17648
|
+
const prev = (_a = this.prevStats) === null || _a === void 0 ? void 0 : _a.get(key);
|
17649
|
+
totalBitrate += computeBitrate(s, prev);
|
17650
|
+
});
|
17651
|
+
this._currentBitrate = totalBitrate;
|
17652
|
+
}
|
17653
|
+
this.prevStats = statsMap;
|
17654
|
+
});
|
17655
|
+
this.senderLock = new Mutex();
|
17656
|
+
}
|
17657
|
+
get isSimulcast() {
|
17658
|
+
if (this.sender && this.sender.getParameters().encodings.length > 1) {
|
17659
|
+
return true;
|
17660
|
+
}
|
17661
|
+
return false;
|
17662
|
+
}
|
17663
|
+
/* @internal */
|
17664
|
+
startMonitor(signalClient) {
|
17665
|
+
var _a;
|
17666
|
+
this.signalClient = signalClient;
|
17667
|
+
if (!isWeb()) {
|
17668
|
+
return;
|
17669
|
+
}
|
17670
|
+
// save original encodings
|
17671
|
+
// TODO : merge simulcast tracks stats
|
17672
|
+
const params = (_a = this.sender) === null || _a === void 0 ? void 0 : _a.getParameters();
|
17673
|
+
if (params) {
|
17674
|
+
this.encodings = params.encodings;
|
17675
|
+
}
|
17676
|
+
if (this.monitorInterval) {
|
17677
|
+
return;
|
17678
|
+
}
|
17679
|
+
this.monitorInterval = setInterval(() => {
|
17680
|
+
this.monitorSender();
|
17681
|
+
}, monitorFrequency);
|
17682
|
+
}
|
17683
|
+
stop() {
|
17684
|
+
this._mediaStreamTrack.getConstraints();
|
17685
|
+
this.simulcastCodecs.forEach(trackInfo => {
|
17686
|
+
trackInfo.mediaStreamTrack.stop();
|
17687
|
+
});
|
17688
|
+
super.stop();
|
17689
|
+
}
|
17690
|
+
mute() {
|
17691
|
+
const _super = Object.create(null, {
|
17692
|
+
mute: {
|
17693
|
+
get: () => super.mute
|
17694
|
+
}
|
17695
|
+
});
|
17696
|
+
return __awaiter(this, void 0, void 0, function* () {
|
17697
|
+
const unlock = yield this.muteLock.lock();
|
17698
|
+
try {
|
17699
|
+
if (this.source === Track.Source.Camera && !this.isUserProvided) {
|
17700
|
+
livekitLogger.debug('stopping camera track');
|
17701
|
+
// also stop the track, so that camera indicator is turned off
|
17702
|
+
this._mediaStreamTrack.stop();
|
17703
|
+
}
|
17704
|
+
yield _super.mute.call(this);
|
17705
|
+
return this;
|
17706
|
+
} finally {
|
17707
|
+
unlock();
|
17708
|
+
}
|
17709
|
+
});
|
17710
|
+
}
|
17711
|
+
unmute() {
|
17712
|
+
const _super = Object.create(null, {
|
17713
|
+
unmute: {
|
17714
|
+
get: () => super.unmute
|
17715
|
+
}
|
17716
|
+
});
|
17717
|
+
return __awaiter(this, void 0, void 0, function* () {
|
17718
|
+
const unlock = yield this.muteLock.lock();
|
17719
|
+
try {
|
17720
|
+
if (this.source === Track.Source.Camera && !this.isUserProvided) {
|
17721
|
+
livekitLogger.debug('reacquiring camera track');
|
17722
|
+
yield this.restartTrack();
|
17723
|
+
}
|
17724
|
+
yield _super.unmute.call(this);
|
17725
|
+
return this;
|
17726
|
+
} finally {
|
17727
|
+
unlock();
|
17728
|
+
}
|
17729
|
+
});
|
17730
|
+
}
|
17731
|
+
getSenderStats() {
|
17732
|
+
var _a;
|
17733
|
+
return __awaiter(this, void 0, void 0, function* () {
|
17734
|
+
if (!((_a = this.sender) === null || _a === void 0 ? void 0 : _a.getStats)) {
|
17735
|
+
return [];
|
17736
|
+
}
|
17737
|
+
const items = [];
|
17738
|
+
const stats = yield this.sender.getStats();
|
17739
|
+
stats.forEach(v => {
|
17740
|
+
var _a;
|
17741
|
+
if (v.type === 'outbound-rtp') {
|
17742
|
+
const vs = {
|
17743
|
+
type: 'video',
|
17744
|
+
streamId: v.id,
|
17745
|
+
frameHeight: v.frameHeight,
|
17746
|
+
frameWidth: v.frameWidth,
|
17747
|
+
firCount: v.firCount,
|
17748
|
+
pliCount: v.pliCount,
|
17749
|
+
nackCount: v.nackCount,
|
17750
|
+
packetsSent: v.packetsSent,
|
17751
|
+
bytesSent: v.bytesSent,
|
17752
|
+
framesSent: v.framesSent,
|
17753
|
+
timestamp: v.timestamp,
|
17754
|
+
rid: (_a = v.rid) !== null && _a !== void 0 ? _a : v.id,
|
17755
|
+
retransmittedPacketsSent: v.retransmittedPacketsSent,
|
17756
|
+
qualityLimitationReason: v.qualityLimitationReason,
|
17757
|
+
qualityLimitationResolutionChanges: v.qualityLimitationResolutionChanges
|
17758
|
+
};
|
17759
|
+
// locate the appropriate remote-inbound-rtp item
|
17760
|
+
const r = stats.get(v.remoteId);
|
17761
|
+
if (r) {
|
17762
|
+
vs.jitter = r.jitter;
|
17763
|
+
vs.packetsLost = r.packetsLost;
|
17764
|
+
vs.roundTripTime = r.roundTripTime;
|
17765
|
+
}
|
17766
|
+
items.push(vs);
|
17767
|
+
}
|
17768
|
+
});
|
17769
|
+
return items;
|
17770
|
+
});
|
17771
|
+
}
|
17772
|
+
setPublishingQuality(maxQuality) {
|
17773
|
+
const qualities = [];
|
17774
|
+
for (let q = VideoQuality.LOW; q <= VideoQuality.HIGH; q += 1) {
|
17775
|
+
qualities.push(new SubscribedQuality({
|
17776
|
+
quality: q,
|
17777
|
+
enabled: q <= maxQuality
|
17778
|
+
}));
|
17779
|
+
}
|
17780
|
+
livekitLogger.debug("setting publishing quality. max quality ".concat(maxQuality));
|
17781
|
+
this.setPublishingLayers(qualities);
|
17782
|
+
}
|
17783
|
+
setDeviceId(deviceId) {
|
17784
|
+
return __awaiter(this, void 0, void 0, function* () {
|
17785
|
+
if (this._constraints.deviceId === deviceId && this._mediaStreamTrack.getSettings().deviceId === unwrapConstraint(deviceId)) {
|
17786
|
+
return true;
|
17787
|
+
}
|
17788
|
+
this._constraints.deviceId = deviceId;
|
17789
|
+
// when video is muted, underlying media stream track is stopped and
|
17790
|
+
// will be restarted later
|
17791
|
+
if (!this.isMuted) {
|
17792
|
+
yield this.restartTrack();
|
17793
|
+
}
|
17794
|
+
return this.isMuted || unwrapConstraint(deviceId) === this._mediaStreamTrack.getSettings().deviceId;
|
17795
|
+
});
|
17796
|
+
}
|
17797
|
+
restartTrack(options) {
|
17798
|
+
return __awaiter(this, void 0, void 0, function* () {
|
17799
|
+
let constraints;
|
17800
|
+
if (options) {
|
17801
|
+
const streamConstraints = constraintsForOptions({
|
17802
|
+
video: options
|
17803
|
+
});
|
17804
|
+
if (typeof streamConstraints.video !== 'boolean') {
|
17805
|
+
constraints = streamConstraints.video;
|
17806
|
+
}
|
17807
|
+
}
|
17808
|
+
yield this.restart(constraints);
|
17809
|
+
});
|
17810
|
+
}
|
17811
|
+
addSimulcastTrack(codec, encodings) {
|
17812
|
+
if (this.simulcastCodecs.has(codec)) {
|
17813
|
+
throw new Error("".concat(codec, " already added"));
|
17814
|
+
}
|
17815
|
+
const simulcastCodecInfo = {
|
17816
|
+
codec,
|
17817
|
+
mediaStreamTrack: this.mediaStreamTrack.clone(),
|
17818
|
+
sender: undefined,
|
17819
|
+
encodings
|
17820
|
+
};
|
17821
|
+
this.simulcastCodecs.set(codec, simulcastCodecInfo);
|
17822
|
+
return simulcastCodecInfo;
|
17823
|
+
}
|
17824
|
+
setSimulcastTrackSender(codec, sender) {
|
17825
|
+
const simulcastCodecInfo = this.simulcastCodecs.get(codec);
|
17826
|
+
if (!simulcastCodecInfo) {
|
17827
|
+
return;
|
17828
|
+
}
|
17829
|
+
simulcastCodecInfo.sender = sender;
|
17830
|
+
// browser will reenable disabled codec/layers after new codec has been published,
|
17831
|
+
// so refresh subscribedCodecs after publish a new codec
|
17832
|
+
setTimeout(() => {
|
17833
|
+
if (this.subscribedCodecs) {
|
17834
|
+
this.setPublishingCodecs(this.subscribedCodecs);
|
17835
|
+
}
|
17836
|
+
}, refreshSubscribedCodecAfterNewCodec);
|
17837
|
+
}
|
17838
|
+
/**
|
17839
|
+
* @internal
|
17840
|
+
* Sets codecs that should be publishing
|
17841
|
+
*/
|
17842
|
+
setPublishingCodecs(codecs) {
|
17843
|
+
var _a, codecs_1, codecs_1_1;
|
17844
|
+
var _b, e_1, _c, _d;
|
17845
|
+
return __awaiter(this, void 0, void 0, function* () {
|
17846
|
+
livekitLogger.debug('setting publishing codecs', {
|
17847
|
+
codecs,
|
17848
|
+
currentCodec: this.codec
|
17849
|
+
});
|
17850
|
+
// only enable simulcast codec for preference codec setted
|
17851
|
+
if (!this.codec && codecs.length > 0) {
|
17852
|
+
yield this.setPublishingLayers(codecs[0].qualities);
|
17853
|
+
return [];
|
17854
|
+
}
|
17855
|
+
this.subscribedCodecs = codecs;
|
17856
|
+
const newCodecs = [];
|
17857
|
+
try {
|
17858
|
+
for (_a = true, codecs_1 = __asyncValues(codecs); codecs_1_1 = yield codecs_1.next(), _b = codecs_1_1.done, !_b; _a = true) {
|
17859
|
+
_d = codecs_1_1.value;
|
17860
|
+
_a = false;
|
17560
17861
|
const codec = _d;
|
17561
17862
|
if (!this.codec || this.codec === codec.codec) {
|
17562
17863
|
yield this.setPublishingLayers(codec.qualities);
|
@@ -17716,14 +18017,15 @@ function videoLayersFromEncodings(width, height, encodings, svc) {
|
|
17716
18017
|
if (svc) {
|
17717
18018
|
// svc layers
|
17718
18019
|
/* @ts-ignore */
|
17719
|
-
const
|
18020
|
+
const encodingSM = encodings[0].scalabilityMode;
|
18021
|
+
const sm = new ScalabilityMode(encodingSM);
|
17720
18022
|
const layers = [];
|
17721
18023
|
for (let i = 0; i < sm.spatial; i += 1) {
|
17722
18024
|
layers.push(new VideoLayer({
|
17723
18025
|
quality: VideoQuality.HIGH - i,
|
17724
|
-
width: width / Math.pow(2, i),
|
17725
|
-
height: height / Math.pow(2, i),
|
17726
|
-
bitrate: encodings[0].maxBitrate ? encodings[0].maxBitrate / Math.pow(3, i) : 0,
|
18026
|
+
width: Math.ceil(width / Math.pow(2, i)),
|
18027
|
+
height: Math.ceil(height / Math.pow(2, i)),
|
18028
|
+
bitrate: encodings[0].maxBitrate ? Math.ceil(encodings[0].maxBitrate / Math.pow(3, i)) : 0,
|
17727
18029
|
ssrc: 0
|
17728
18030
|
}));
|
17729
18031
|
}
|
@@ -17735,8 +18037,8 @@ function videoLayersFromEncodings(width, height, encodings, svc) {
|
|
17735
18037
|
let quality = videoQualityForRid((_b = encoding.rid) !== null && _b !== void 0 ? _b : '');
|
17736
18038
|
return new VideoLayer({
|
17737
18039
|
quality,
|
17738
|
-
width: width / scale,
|
17739
|
-
height: height / scale,
|
18040
|
+
width: Math.ceil(width / scale),
|
18041
|
+
height: Math.ceil(height / scale),
|
17740
18042
|
bitrate: (_c = encoding.maxBitrate) !== null && _c !== void 0 ? _c : 0,
|
17741
18043
|
ssrc: 0
|
17742
18044
|
});
|
@@ -19016,458 +19318,160 @@ class RemoteParticipant extends Participant {
|
|
19016
19318
|
}, 150);
|
19017
19319
|
return;
|
19018
19320
|
}
|
19019
|
-
if (mediaTrack.readyState === 'ended') {
|
19020
|
-
livekitLogger.error('unable to subscribe because MediaStreamTrack is ended. Do not call MediaStreamTrack.stop()', {
|
19021
|
-
participant: this.sid,
|
19022
|
-
trackSid: sid
|
19023
|
-
});
|
19024
|
-
this.emit(ParticipantEvent.TrackSubscriptionFailed, sid);
|
19025
|
-
return;
|
19026
|
-
}
|
19027
|
-
const isVideo = mediaTrack.kind === 'video';
|
19028
|
-
let track;
|
19029
|
-
if (isVideo) {
|
19030
|
-
track = new RemoteVideoTrack(mediaTrack, sid, receiver, adaptiveStreamSettings);
|
19031
|
-
} else {
|
19032
|
-
track = new RemoteAudioTrack(mediaTrack, sid, receiver, this.audioContext, this.audioOutput);
|
19033
|
-
}
|
19034
|
-
// set track info
|
19035
|
-
track.source = publication.source;
|
19036
|
-
// keep publication's muted status
|
19037
|
-
track.isMuted = publication.isMuted;
|
19038
|
-
track.setMediaStream(mediaStream);
|
19039
|
-
track.start();
|
19040
|
-
publication.setTrack(track);
|
19041
|
-
// set participant volumes on new audio tracks
|
19042
|
-
if (this.volumeMap.has(publication.source) && track instanceof RemoteAudioTrack) {
|
19043
|
-
track.setVolume(this.volumeMap.get(publication.source));
|
19044
|
-
}
|
19045
|
-
return publication;
|
19046
|
-
}
|
19047
|
-
/** @internal */
|
19048
|
-
get hasMetadata() {
|
19049
|
-
return !!this.participantInfo;
|
19050
|
-
}
|
19051
|
-
getTrackPublication(sid) {
|
19052
|
-
return this.tracks.get(sid);
|
19053
|
-
}
|
19054
|
-
/** @internal */
|
19055
|
-
updateInfo(info) {
|
19056
|
-
if (!super.updateInfo(info)) {
|
19057
|
-
return false;
|
19058
|
-
}
|
19059
|
-
// we are getting a list of all available tracks, reconcile in here
|
19060
|
-
// and send out events for changes
|
19061
|
-
// reconcile track publications, publish events only if metadata is already there
|
19062
|
-
// i.e. changes since the local participant has joined
|
19063
|
-
const validTracks = new Map();
|
19064
|
-
const newTracks = new Map();
|
19065
|
-
info.tracks.forEach(ti => {
|
19066
|
-
var _a;
|
19067
|
-
let publication = this.getTrackPublication(ti.sid);
|
19068
|
-
if (!publication) {
|
19069
|
-
// new publication
|
19070
|
-
const kind = Track.kindFromProto(ti.type);
|
19071
|
-
if (!kind) {
|
19072
|
-
return;
|
19073
|
-
}
|
19074
|
-
publication = new RemoteTrackPublication(kind, ti, (_a = this.signalClient.connectOptions) === null || _a === void 0 ? void 0 : _a.autoSubscribe);
|
19075
|
-
publication.updateInfo(ti);
|
19076
|
-
newTracks.set(ti.sid, publication);
|
19077
|
-
const existingTrackOfSource = Array.from(this.tracks.values()).find(publishedTrack => publishedTrack.source === (publication === null || publication === void 0 ? void 0 : publication.source));
|
19078
|
-
if (existingTrackOfSource && publication.source !== Track.Source.Unknown) {
|
19079
|
-
livekitLogger.debug("received a second track publication for ".concat(this.identity, " with the same source: ").concat(publication.source), {
|
19080
|
-
oldTrack: existingTrackOfSource,
|
19081
|
-
newTrack: publication,
|
19082
|
-
participant: this,
|
19083
|
-
participantInfo: info
|
19084
|
-
});
|
19085
|
-
}
|
19086
|
-
this.addTrackPublication(publication);
|
19087
|
-
} else {
|
19088
|
-
publication.updateInfo(ti);
|
19089
|
-
}
|
19090
|
-
validTracks.set(ti.sid, publication);
|
19091
|
-
});
|
19092
|
-
// detect removed tracks
|
19093
|
-
this.tracks.forEach(publication => {
|
19094
|
-
if (!validTracks.has(publication.trackSid)) {
|
19095
|
-
livekitLogger.trace('detected removed track on remote participant, unpublishing', {
|
19096
|
-
publication,
|
19097
|
-
participantSid: this.sid
|
19098
|
-
});
|
19099
|
-
this.unpublishTrack(publication.trackSid, true);
|
19100
|
-
}
|
19101
|
-
});
|
19102
|
-
// always emit events for new publications, Room will not forward them unless it's ready
|
19103
|
-
newTracks.forEach(publication => {
|
19104
|
-
this.emit(ParticipantEvent.TrackPublished, publication);
|
19105
|
-
});
|
19106
|
-
return true;
|
19107
|
-
}
|
19108
|
-
/** @internal */
|
19109
|
-
unpublishTrack(sid, sendUnpublish) {
|
19110
|
-
const publication = this.tracks.get(sid);
|
19111
|
-
if (!publication) {
|
19112
|
-
return;
|
19113
|
-
}
|
19114
|
-
// also send unsubscribe, if track is actively subscribed
|
19115
|
-
const {
|
19116
|
-
track
|
19117
|
-
} = publication;
|
19118
|
-
if (track) {
|
19119
|
-
track.stop();
|
19120
|
-
publication.setTrack(undefined);
|
19121
|
-
}
|
19122
|
-
// remove track from maps only after unsubscribed has been fired
|
19123
|
-
this.tracks.delete(sid);
|
19124
|
-
// remove from the right type map
|
19125
|
-
switch (publication.kind) {
|
19126
|
-
case Track.Kind.Audio:
|
19127
|
-
this.audioTracks.delete(sid);
|
19128
|
-
break;
|
19129
|
-
case Track.Kind.Video:
|
19130
|
-
this.videoTracks.delete(sid);
|
19131
|
-
break;
|
19132
|
-
}
|
19133
|
-
if (sendUnpublish) {
|
19134
|
-
this.emit(ParticipantEvent.TrackUnpublished, publication);
|
19135
|
-
}
|
19136
|
-
}
|
19137
|
-
/**
|
19138
|
-
* @internal
|
19139
|
-
*/
|
19140
|
-
setAudioContext(ctx) {
|
19141
|
-
this.audioContext = ctx;
|
19142
|
-
this.audioTracks.forEach(track => track.track instanceof RemoteAudioTrack && track.track.setAudioContext(ctx));
|
19143
|
-
}
|
19144
|
-
/**
|
19145
|
-
* @internal
|
19146
|
-
*/
|
19147
|
-
setAudioOutput(output) {
|
19148
|
-
return __awaiter(this, void 0, void 0, function* () {
|
19149
|
-
this.audioOutput = output;
|
19150
|
-
const promises = [];
|
19151
|
-
this.audioTracks.forEach(pub => {
|
19152
|
-
var _a;
|
19153
|
-
if (pub.track instanceof RemoteAudioTrack) {
|
19154
|
-
promises.push(pub.track.setSinkId((_a = output.deviceId) !== null && _a !== void 0 ? _a : 'default'));
|
19155
|
-
}
|
19156
|
-
});
|
19157
|
-
yield Promise.all(promises);
|
19158
|
-
});
|
19159
|
-
}
|
19160
|
-
/** @internal */
|
19161
|
-
emit(event) {
|
19162
|
-
for (var _len = arguments.length, args = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
|
19163
|
-
args[_key - 1] = arguments[_key];
|
19164
|
-
}
|
19165
|
-
livekitLogger.trace('participant event', {
|
19166
|
-
participant: this.sid,
|
19167
|
-
event,
|
19168
|
-
args
|
19169
|
-
});
|
19170
|
-
return super.emit(event, ...args);
|
19171
|
-
}
|
19172
|
-
}
|
19173
|
-
|
19174
|
-
/** @internal */
|
19175
|
-
function mediaTrackToLocalTrack(mediaStreamTrack, constraints) {
|
19176
|
-
switch (mediaStreamTrack.kind) {
|
19177
|
-
case 'audio':
|
19178
|
-
return new LocalAudioTrack(mediaStreamTrack, constraints, false);
|
19179
|
-
case 'video':
|
19180
|
-
return new LocalVideoTrack(mediaStreamTrack, constraints, false);
|
19181
|
-
default:
|
19182
|
-
throw new TrackInvalidError("unsupported track type: ".concat(mediaStreamTrack.kind));
|
19183
|
-
}
|
19184
|
-
}
|
19185
|
-
/* @internal */
|
19186
|
-
const presets169 = Object.values(VideoPresets);
|
19187
|
-
/* @internal */
|
19188
|
-
const presets43 = Object.values(VideoPresets43);
|
19189
|
-
/* @internal */
|
19190
|
-
const presetsScreenShare = Object.values(ScreenSharePresets);
|
19191
|
-
/* @internal */
|
19192
|
-
const defaultSimulcastPresets169 = [VideoPresets.h180, VideoPresets.h360];
|
19193
|
-
/* @internal */
|
19194
|
-
const defaultSimulcastPresets43 = [VideoPresets43.h180, VideoPresets43.h360];
|
19195
|
-
/* @internal */
|
19196
|
-
const computeDefaultScreenShareSimulcastPresets = fromPreset => {
|
19197
|
-
const layers = [{
|
19198
|
-
scaleResolutionDownBy: 2,
|
19199
|
-
fps: 3
|
19200
|
-
}];
|
19201
|
-
return layers.map(t => {
|
19202
|
-
var _a;
|
19203
|
-
return new VideoPreset(Math.floor(fromPreset.width / t.scaleResolutionDownBy), Math.floor(fromPreset.height / t.scaleResolutionDownBy), Math.max(150000, Math.floor(fromPreset.encoding.maxBitrate / (Math.pow(t.scaleResolutionDownBy, 2) * (((_a = fromPreset.encoding.maxFramerate) !== null && _a !== void 0 ? _a : 30) / t.fps)))), t.fps, fromPreset.encoding.priority);
|
19204
|
-
});
|
19205
|
-
};
|
19206
|
-
// /**
|
19207
|
-
// *
|
19208
|
-
// * @internal
|
19209
|
-
// * @experimental
|
19210
|
-
// */
|
19211
|
-
// const computeDefaultMultiCodecSimulcastEncodings = (width: number, height: number) => {
|
19212
|
-
// // use vp8 as a default
|
19213
|
-
// const vp8 = determineAppropriateEncoding(false, width, height);
|
19214
|
-
// const vp9 = { ...vp8, maxBitrate: vp8.maxBitrate * 0.9 };
|
19215
|
-
// const h264 = { ...vp8, maxBitrate: vp8.maxBitrate * 1.1 };
|
19216
|
-
// const av1 = { ...vp8, maxBitrate: vp8.maxBitrate * 0.7 };
|
19217
|
-
// return {
|
19218
|
-
// vp8,
|
19219
|
-
// vp9,
|
19220
|
-
// h264,
|
19221
|
-
// av1,
|
19222
|
-
// };
|
19223
|
-
// };
|
19224
|
-
const videoRids = ['q', 'h', 'f'];
|
19225
|
-
/* @internal */
|
19226
|
-
function computeVideoEncodings(isScreenShare, width, height, options) {
|
19227
|
-
var _a, _b;
|
19228
|
-
let videoEncoding = options === null || options === void 0 ? void 0 : options.videoEncoding;
|
19229
|
-
if (isScreenShare) {
|
19230
|
-
videoEncoding = options === null || options === void 0 ? void 0 : options.screenShareEncoding;
|
19231
|
-
}
|
19232
|
-
const useSimulcast = options === null || options === void 0 ? void 0 : options.simulcast;
|
19233
|
-
const scalabilityMode = options === null || options === void 0 ? void 0 : options.scalabilityMode;
|
19234
|
-
const videoCodec = options === null || options === void 0 ? void 0 : options.videoCodec;
|
19235
|
-
if (!videoEncoding && !useSimulcast && !scalabilityMode || !width || !height) {
|
19236
|
-
// when we aren't simulcasting or svc, will need to return a single encoding without
|
19237
|
-
// capping bandwidth. we always require a encoding for dynacast
|
19238
|
-
return [{}];
|
19239
|
-
}
|
19240
|
-
if (!videoEncoding) {
|
19241
|
-
// find the right encoding based on width/height
|
19242
|
-
videoEncoding = determineAppropriateEncoding(isScreenShare, width, height, videoCodec);
|
19243
|
-
livekitLogger.debug('using video encoding', videoEncoding);
|
19244
|
-
}
|
19245
|
-
const original = new VideoPreset(width, height, videoEncoding.maxBitrate, videoEncoding.maxFramerate, videoEncoding.priority);
|
19246
|
-
if (scalabilityMode && isSVCCodec(videoCodec)) {
|
19247
|
-
livekitLogger.debug("using svc with scalabilityMode ".concat(scalabilityMode));
|
19248
|
-
const sm = new ScalabilityMode$1(scalabilityMode);
|
19249
|
-
const encodings = [];
|
19250
|
-
if (sm.spatial > 3) {
|
19251
|
-
throw new Error("unsupported scalabilityMode: ".concat(scalabilityMode));
|
19252
|
-
}
|
19253
|
-
for (let i = 0; i < sm.spatial; i += 1) {
|
19254
|
-
encodings.push({
|
19255
|
-
rid: videoRids[2 - i],
|
19256
|
-
maxBitrate: videoEncoding.maxBitrate / Math.pow(3, i),
|
19257
|
-
/* @ts-ignore */
|
19258
|
-
maxFramerate: original.encoding.maxFramerate
|
19259
|
-
});
|
19260
|
-
}
|
19261
|
-
/* @ts-ignore */
|
19262
|
-
encodings[0].scalabilityMode = scalabilityMode;
|
19263
|
-
livekitLogger.debug('encodings', encodings);
|
19264
|
-
return encodings;
|
19265
|
-
}
|
19266
|
-
if (!useSimulcast) {
|
19267
|
-
return [videoEncoding];
|
19268
|
-
}
|
19269
|
-
let presets = [];
|
19270
|
-
if (isScreenShare) {
|
19271
|
-
presets = (_a = sortPresets(options === null || options === void 0 ? void 0 : options.screenShareSimulcastLayers)) !== null && _a !== void 0 ? _a : defaultSimulcastLayers(isScreenShare, original);
|
19272
|
-
} else {
|
19273
|
-
presets = (_b = sortPresets(options === null || options === void 0 ? void 0 : options.videoSimulcastLayers)) !== null && _b !== void 0 ? _b : defaultSimulcastLayers(isScreenShare, original);
|
19274
|
-
}
|
19275
|
-
let midPreset;
|
19276
|
-
if (presets.length > 0) {
|
19277
|
-
const lowPreset = presets[0];
|
19278
|
-
if (presets.length > 1) {
|
19279
|
-
[, midPreset] = presets;
|
19280
|
-
}
|
19281
|
-
// NOTE:
|
19282
|
-
// 1. Ordering of these encodings is important. Chrome seems
|
19283
|
-
// to use the index into encodings to decide which layer
|
19284
|
-
// to disable when CPU constrained.
|
19285
|
-
// So encodings should be ordered in increasing spatial
|
19286
|
-
// resolution order.
|
19287
|
-
// 2. ion-sfu translates rids into layers. So, all encodings
|
19288
|
-
// should have the base layer `q` and then more added
|
19289
|
-
// based on other conditions.
|
19290
|
-
const size = Math.max(width, height);
|
19291
|
-
if (size >= 960 && midPreset) {
|
19292
|
-
return encodingsFromPresets(width, height, [lowPreset, midPreset, original]);
|
19293
|
-
}
|
19294
|
-
if (size >= 480) {
|
19295
|
-
return encodingsFromPresets(width, height, [lowPreset, original]);
|
19296
|
-
}
|
19297
|
-
}
|
19298
|
-
return encodingsFromPresets(width, height, [original]);
|
19299
|
-
}
|
19300
|
-
function computeTrackBackupEncodings(track, videoCodec, opts) {
|
19301
|
-
var _a, _b, _c, _d;
|
19302
|
-
if (!opts.backupCodec || opts.backupCodec.codec === opts.videoCodec) {
|
19303
|
-
// backup codec publishing is disabled
|
19304
|
-
return;
|
19305
|
-
}
|
19306
|
-
if (videoCodec !== opts.backupCodec.codec) {
|
19307
|
-
livekitLogger.warn('requested a different codec than specified as backup', {
|
19308
|
-
serverRequested: videoCodec,
|
19309
|
-
backup: opts.backupCodec.codec
|
19310
|
-
});
|
19311
|
-
}
|
19312
|
-
opts.videoCodec = videoCodec;
|
19313
|
-
// use backup encoding setting as videoEncoding for backup codec publishing
|
19314
|
-
opts.videoEncoding = opts.backupCodec.encoding;
|
19315
|
-
const settings = track.mediaStreamTrack.getSettings();
|
19316
|
-
const width = (_a = settings.width) !== null && _a !== void 0 ? _a : (_b = track.dimensions) === null || _b === void 0 ? void 0 : _b.width;
|
19317
|
-
const height = (_c = settings.height) !== null && _c !== void 0 ? _c : (_d = track.dimensions) === null || _d === void 0 ? void 0 : _d.height;
|
19318
|
-
const encodings = computeVideoEncodings(track.source === Track.Source.ScreenShare, width, height, opts);
|
19319
|
-
return encodings;
|
19320
|
-
}
|
19321
|
-
/* @internal */
|
19322
|
-
function determineAppropriateEncoding(isScreenShare, width, height, codec) {
|
19323
|
-
const presets = presetsForResolution(isScreenShare, width, height);
|
19324
|
-
let {
|
19325
|
-
encoding
|
19326
|
-
} = presets[0];
|
19327
|
-
// handle portrait by swapping dimensions
|
19328
|
-
const size = Math.max(width, height);
|
19329
|
-
for (let i = 0; i < presets.length; i += 1) {
|
19330
|
-
const preset = presets[i];
|
19331
|
-
encoding = preset.encoding;
|
19332
|
-
if (preset.width >= size) {
|
19333
|
-
break;
|
19334
|
-
}
|
19335
|
-
}
|
19336
|
-
// presets are based on the assumption of vp8 as a codec
|
19337
|
-
// for other codecs we adjust the maxBitrate if no specific videoEncoding has been provided
|
19338
|
-
// TODO make the bitrate multipliers configurable per codec
|
19339
|
-
if (codec) {
|
19340
|
-
switch (codec) {
|
19341
|
-
case 'av1':
|
19342
|
-
encoding = Object.assign({}, encoding);
|
19343
|
-
encoding.maxBitrate = encoding.maxBitrate * 0.7;
|
19344
|
-
break;
|
19345
|
-
case 'vp9':
|
19346
|
-
encoding = Object.assign({}, encoding);
|
19347
|
-
encoding.maxBitrate = encoding.maxBitrate * 0.85;
|
19348
|
-
break;
|
19321
|
+
if (mediaTrack.readyState === 'ended') {
|
19322
|
+
livekitLogger.error('unable to subscribe because MediaStreamTrack is ended. Do not call MediaStreamTrack.stop()', {
|
19323
|
+
participant: this.sid,
|
19324
|
+
trackSid: sid
|
19325
|
+
});
|
19326
|
+
this.emit(ParticipantEvent.TrackSubscriptionFailed, sid);
|
19327
|
+
return;
|
19349
19328
|
}
|
19329
|
+
const isVideo = mediaTrack.kind === 'video';
|
19330
|
+
let track;
|
19331
|
+
if (isVideo) {
|
19332
|
+
track = new RemoteVideoTrack(mediaTrack, sid, receiver, adaptiveStreamSettings);
|
19333
|
+
} else {
|
19334
|
+
track = new RemoteAudioTrack(mediaTrack, sid, receiver, this.audioContext, this.audioOutput);
|
19335
|
+
}
|
19336
|
+
// set track info
|
19337
|
+
track.source = publication.source;
|
19338
|
+
// keep publication's muted status
|
19339
|
+
track.isMuted = publication.isMuted;
|
19340
|
+
track.setMediaStream(mediaStream);
|
19341
|
+
track.start();
|
19342
|
+
publication.setTrack(track);
|
19343
|
+
// set participant volumes on new audio tracks
|
19344
|
+
if (this.volumeMap.has(publication.source) && track instanceof RemoteAudioTrack) {
|
19345
|
+
track.setVolume(this.volumeMap.get(publication.source));
|
19346
|
+
}
|
19347
|
+
return publication;
|
19350
19348
|
}
|
19351
|
-
|
19352
|
-
|
19353
|
-
|
19354
|
-
function presetsForResolution(isScreenShare, width, height) {
|
19355
|
-
if (isScreenShare) {
|
19356
|
-
return presetsScreenShare;
|
19357
|
-
}
|
19358
|
-
const aspect = width > height ? width / height : height / width;
|
19359
|
-
if (Math.abs(aspect - 16.0 / 9) < Math.abs(aspect - 4.0 / 3)) {
|
19360
|
-
return presets169;
|
19361
|
-
}
|
19362
|
-
return presets43;
|
19363
|
-
}
|
19364
|
-
/* @internal */
|
19365
|
-
function defaultSimulcastLayers(isScreenShare, original) {
|
19366
|
-
if (isScreenShare) {
|
19367
|
-
return computeDefaultScreenShareSimulcastPresets(original);
|
19349
|
+
/** @internal */
|
19350
|
+
get hasMetadata() {
|
19351
|
+
return !!this.participantInfo;
|
19368
19352
|
}
|
19369
|
-
|
19370
|
-
|
19371
|
-
height
|
19372
|
-
} = original;
|
19373
|
-
const aspect = width > height ? width / height : height / width;
|
19374
|
-
if (Math.abs(aspect - 16.0 / 9) < Math.abs(aspect - 4.0 / 3)) {
|
19375
|
-
return defaultSimulcastPresets169;
|
19353
|
+
getTrackPublication(sid) {
|
19354
|
+
return this.tracks.get(sid);
|
19376
19355
|
}
|
19377
|
-
|
19378
|
-
|
19379
|
-
|
19380
|
-
|
19381
|
-
const encodings = [];
|
19382
|
-
presets.forEach((preset, idx) => {
|
19383
|
-
if (idx >= videoRids.length) {
|
19384
|
-
return;
|
19385
|
-
}
|
19386
|
-
const size = Math.min(width, height);
|
19387
|
-
const rid = videoRids[idx];
|
19388
|
-
const encoding = {
|
19389
|
-
rid,
|
19390
|
-
scaleResolutionDownBy: Math.max(1, size / Math.min(preset.width, preset.height)),
|
19391
|
-
maxBitrate: preset.encoding.maxBitrate
|
19392
|
-
};
|
19393
|
-
if (preset.encoding.maxFramerate) {
|
19394
|
-
encoding.maxFramerate = preset.encoding.maxFramerate;
|
19395
|
-
}
|
19396
|
-
const canSetPriority = isFireFox() || idx === 0;
|
19397
|
-
if (preset.encoding.priority && canSetPriority) {
|
19398
|
-
encoding.priority = preset.encoding.priority;
|
19399
|
-
encoding.networkPriority = preset.encoding.priority;
|
19356
|
+
/** @internal */
|
19357
|
+
updateInfo(info) {
|
19358
|
+
if (!super.updateInfo(info)) {
|
19359
|
+
return false;
|
19400
19360
|
}
|
19401
|
-
|
19402
|
-
|
19403
|
-
|
19404
|
-
|
19405
|
-
|
19406
|
-
|
19407
|
-
|
19408
|
-
topFramerate = encoding.maxFramerate;
|
19409
|
-
} else if (encoding.maxFramerate && encoding.maxFramerate > topFramerate) {
|
19410
|
-
topFramerate = encoding.maxFramerate;
|
19411
|
-
}
|
19412
|
-
});
|
19413
|
-
let notifyOnce = true;
|
19414
|
-
encodings.forEach(encoding => {
|
19361
|
+
// we are getting a list of all available tracks, reconcile in here
|
19362
|
+
// and send out events for changes
|
19363
|
+
// reconcile track publications, publish events only if metadata is already there
|
19364
|
+
// i.e. changes since the local participant has joined
|
19365
|
+
const validTracks = new Map();
|
19366
|
+
const newTracks = new Map();
|
19367
|
+
info.tracks.forEach(ti => {
|
19415
19368
|
var _a;
|
19416
|
-
|
19417
|
-
|
19418
|
-
|
19419
|
-
|
19369
|
+
let publication = this.getTrackPublication(ti.sid);
|
19370
|
+
if (!publication) {
|
19371
|
+
// new publication
|
19372
|
+
const kind = Track.kindFromProto(ti.type);
|
19373
|
+
if (!kind) {
|
19374
|
+
return;
|
19420
19375
|
}
|
19421
|
-
|
19422
|
-
|
19376
|
+
publication = new RemoteTrackPublication(kind, ti, (_a = this.signalClient.connectOptions) === null || _a === void 0 ? void 0 : _a.autoSubscribe);
|
19377
|
+
publication.updateInfo(ti);
|
19378
|
+
newTracks.set(ti.sid, publication);
|
19379
|
+
const existingTrackOfSource = Array.from(this.tracks.values()).find(publishedTrack => publishedTrack.source === (publication === null || publication === void 0 ? void 0 : publication.source));
|
19380
|
+
if (existingTrackOfSource && publication.source !== Track.Source.Unknown) {
|
19381
|
+
livekitLogger.debug("received a second track publication for ".concat(this.identity, " with the same source: ").concat(publication.source), {
|
19382
|
+
oldTrack: existingTrackOfSource,
|
19383
|
+
newTrack: publication,
|
19384
|
+
participant: this,
|
19385
|
+
participantInfo: info
|
19386
|
+
});
|
19387
|
+
}
|
19388
|
+
this.addTrackPublication(publication);
|
19389
|
+
} else {
|
19390
|
+
publication.updateInfo(ti);
|
19391
|
+
}
|
19392
|
+
validTracks.set(ti.sid, publication);
|
19393
|
+
});
|
19394
|
+
// detect removed tracks
|
19395
|
+
this.tracks.forEach(publication => {
|
19396
|
+
if (!validTracks.has(publication.trackSid)) {
|
19397
|
+
livekitLogger.trace('detected removed track on remote participant, unpublishing', {
|
19398
|
+
publication,
|
19399
|
+
participantSid: this.sid
|
19400
|
+
});
|
19401
|
+
this.unpublishTrack(publication.trackSid, true);
|
19423
19402
|
}
|
19424
19403
|
});
|
19404
|
+
// always emit events for new publications, Room will not forward them unless it's ready
|
19405
|
+
newTracks.forEach(publication => {
|
19406
|
+
this.emit(ParticipantEvent.TrackPublished, publication);
|
19407
|
+
});
|
19408
|
+
return true;
|
19425
19409
|
}
|
19426
|
-
|
19427
|
-
|
19428
|
-
|
19429
|
-
|
19430
|
-
|
19431
|
-
return presets.sort((a, b) => {
|
19432
|
-
const {
|
19433
|
-
encoding: aEnc
|
19434
|
-
} = a;
|
19435
|
-
const {
|
19436
|
-
encoding: bEnc
|
19437
|
-
} = b;
|
19438
|
-
if (aEnc.maxBitrate > bEnc.maxBitrate) {
|
19439
|
-
return 1;
|
19410
|
+
/** @internal */
|
19411
|
+
unpublishTrack(sid, sendUnpublish) {
|
19412
|
+
const publication = this.tracks.get(sid);
|
19413
|
+
if (!publication) {
|
19414
|
+
return;
|
19440
19415
|
}
|
19441
|
-
if
|
19442
|
-
|
19443
|
-
|
19416
|
+
// also send unsubscribe, if track is actively subscribed
|
19417
|
+
const {
|
19418
|
+
track
|
19419
|
+
} = publication;
|
19420
|
+
if (track) {
|
19421
|
+
track.stop();
|
19422
|
+
publication.setTrack(undefined);
|
19444
19423
|
}
|
19445
|
-
|
19446
|
-
|
19447
|
-
|
19448
|
-
|
19449
|
-
|
19450
|
-
|
19451
|
-
|
19452
|
-
|
19453
|
-
|
19424
|
+
// remove track from maps only after unsubscribed has been fired
|
19425
|
+
this.tracks.delete(sid);
|
19426
|
+
// remove from the right type map
|
19427
|
+
switch (publication.kind) {
|
19428
|
+
case Track.Kind.Audio:
|
19429
|
+
this.audioTracks.delete(sid);
|
19430
|
+
break;
|
19431
|
+
case Track.Kind.Video:
|
19432
|
+
this.videoTracks.delete(sid);
|
19433
|
+
break;
|
19454
19434
|
}
|
19455
|
-
|
19456
|
-
|
19457
|
-
if (results.length > 3) {
|
19458
|
-
switch (results[3]) {
|
19459
|
-
case 'h':
|
19460
|
-
case '_KEY':
|
19461
|
-
case '_KEY_SHIFT':
|
19462
|
-
this.suffix = results[3];
|
19463
|
-
}
|
19435
|
+
if (sendUnpublish) {
|
19436
|
+
this.emit(ParticipantEvent.TrackUnpublished, publication);
|
19464
19437
|
}
|
19465
19438
|
}
|
19466
|
-
|
19467
|
-
|
19468
|
-
|
19439
|
+
/**
|
19440
|
+
* @internal
|
19441
|
+
*/
|
19442
|
+
setAudioContext(ctx) {
|
19443
|
+
this.audioContext = ctx;
|
19444
|
+
this.audioTracks.forEach(track => track.track instanceof RemoteAudioTrack && track.track.setAudioContext(ctx));
|
19469
19445
|
}
|
19470
|
-
|
19446
|
+
/**
|
19447
|
+
* @internal
|
19448
|
+
*/
|
19449
|
+
setAudioOutput(output) {
|
19450
|
+
return __awaiter(this, void 0, void 0, function* () {
|
19451
|
+
this.audioOutput = output;
|
19452
|
+
const promises = [];
|
19453
|
+
this.audioTracks.forEach(pub => {
|
19454
|
+
var _a;
|
19455
|
+
if (pub.track instanceof RemoteAudioTrack) {
|
19456
|
+
promises.push(pub.track.setSinkId((_a = output.deviceId) !== null && _a !== void 0 ? _a : 'default'));
|
19457
|
+
}
|
19458
|
+
});
|
19459
|
+
yield Promise.all(promises);
|
19460
|
+
});
|
19461
|
+
}
|
19462
|
+
/** @internal */
|
19463
|
+
emit(event) {
|
19464
|
+
for (var _len = arguments.length, args = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
|
19465
|
+
args[_key - 1] = arguments[_key];
|
19466
|
+
}
|
19467
|
+
livekitLogger.trace('participant event', {
|
19468
|
+
participant: this.sid,
|
19469
|
+
event,
|
19470
|
+
args
|
19471
|
+
});
|
19472
|
+
return super.emit(event, ...args);
|
19473
|
+
}
|
19474
|
+
}
|
19471
19475
|
|
19472
19476
|
class LocalParticipant extends Participant {
|
19473
19477
|
/** @internal */
|