livekit-client 2.9.8 → 2.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/livekit-client.e2ee.worker.js.map +1 -1
- package/dist/livekit-client.e2ee.worker.mjs +3 -3
- package/dist/livekit-client.e2ee.worker.mjs.map +1 -1
- package/dist/livekit-client.esm.mjs +117 -47
- package/dist/livekit-client.esm.mjs.map +1 -1
- package/dist/livekit-client.umd.js +1 -1
- package/dist/livekit-client.umd.js.map +1 -1
- package/dist/src/api/SignalClient.d.ts.map +1 -1
- package/dist/src/api/utils.d.ts +3 -0
- package/dist/src/api/utils.d.ts.map +1 -0
- package/dist/src/e2ee/KeyProvider.d.ts.map +1 -1
- package/dist/src/room/Room.d.ts.map +1 -1
- package/dist/src/room/participant/LocalParticipant.d.ts.map +1 -1
- package/dist/src/room/participant/publishUtils.d.ts.map +1 -1
- package/dist/src/room/track/LocalVideoTrack.d.ts.map +1 -1
- package/dist/src/room/track/RemoteTrackPublication.d.ts.map +1 -1
- package/dist/src/room/track/options.d.ts +3 -2
- package/dist/src/room/track/options.d.ts.map +1 -1
- package/dist/src/room/track/utils.d.ts.map +1 -1
- package/dist/ts4.2/src/api/utils.d.ts +3 -0
- package/dist/ts4.2/src/room/track/options.d.ts +3 -2
- package/package.json +13 -13
- package/src/api/SignalClient.ts +8 -17
- package/src/api/utils.test.ts +100 -0
- package/src/api/utils.ts +26 -0
- package/src/room/Room.ts +6 -1
- package/src/room/participant/LocalParticipant.ts +2 -1
- package/src/room/track/LocalTrack.ts +2 -2
- package/src/room/track/LocalVideoTrack.ts +15 -14
- package/src/room/track/options.ts +5 -1
- package/src/room/track/utils.ts +10 -4
@@ -15,9 +15,9 @@ function _mergeNamespaces(n, m) {
|
|
15
15
|
|
16
16
|
var e = Object.defineProperty;
|
17
17
|
var h = (i, s, t) => s in i ? e(i, s, {
|
18
|
-
enumerable:
|
19
|
-
configurable:
|
20
|
-
writable:
|
18
|
+
enumerable: true,
|
19
|
+
configurable: true,
|
20
|
+
writable: true,
|
21
21
|
value: t
|
22
22
|
}) : i[s] = t;
|
23
23
|
var o = (i, s, t) => h(i, typeof s != "symbol" ? s + "" : s, t);
|
@@ -64,10 +64,10 @@ function assert(condition, msg) {
|
|
64
64
|
}
|
65
65
|
}
|
66
66
|
const FLOAT32_MAX = 3.4028234663852886e38,
|
67
|
-
FLOAT32_MIN = -
|
67
|
+
FLOAT32_MIN = -34028234663852886e22,
|
68
68
|
UINT32_MAX = 0xffffffff,
|
69
69
|
INT32_MAX = 0x7fffffff,
|
70
|
-
INT32_MIN = -
|
70
|
+
INT32_MIN = -2147483648;
|
71
71
|
/**
|
72
72
|
* Assert a valid signed protobuf 32-bit integer.
|
73
73
|
*/
|
@@ -3772,10 +3772,13 @@ const EventMetric = /* @__PURE__ */proto3.makeMessageType("livekit.EventMetric",
|
|
3772
3772
|
}]);
|
3773
3773
|
const BackupCodecPolicy$1 = /* @__PURE__ */proto3.makeEnum("livekit.BackupCodecPolicy", [{
|
3774
3774
|
no: 0,
|
3775
|
-
name: "
|
3775
|
+
name: "PREFER_REGRESSION"
|
3776
3776
|
}, {
|
3777
3777
|
no: 1,
|
3778
3778
|
name: "SIMULCAST"
|
3779
|
+
}, {
|
3780
|
+
no: 2,
|
3781
|
+
name: "REGRESSION"
|
3779
3782
|
}]);
|
3780
3783
|
const TrackType = /* @__PURE__ */proto3.makeEnum("livekit.TrackType", [{
|
3781
3784
|
no: 0,
|
@@ -4169,6 +4172,12 @@ const ParticipantInfo = /* @__PURE__ */proto3.makeMessageType("livekit.Participa
|
|
4169
4172
|
name: "disconnect_reason",
|
4170
4173
|
kind: "enum",
|
4171
4174
|
T: proto3.getEnumType(DisconnectReason)
|
4175
|
+
}, {
|
4176
|
+
no: 18,
|
4177
|
+
name: "kind_details",
|
4178
|
+
kind: "enum",
|
4179
|
+
T: proto3.getEnumType(ParticipantInfo_KindDetail),
|
4180
|
+
repeated: true
|
4172
4181
|
}]);
|
4173
4182
|
const ParticipantInfo_State = /* @__PURE__ */proto3.makeEnum("livekit.ParticipantInfo.State", [{
|
4174
4183
|
no: 0,
|
@@ -4199,6 +4208,13 @@ const ParticipantInfo_Kind = /* @__PURE__ */proto3.makeEnum("livekit.Participant
|
|
4199
4208
|
no: 4,
|
4200
4209
|
name: "AGENT"
|
4201
4210
|
}]);
|
4211
|
+
const ParticipantInfo_KindDetail = /* @__PURE__ */proto3.makeEnum("livekit.ParticipantInfo.KindDetail", [{
|
4212
|
+
no: 0,
|
4213
|
+
name: "CLOUD_AGENT"
|
4214
|
+
}, {
|
4215
|
+
no: 1,
|
4216
|
+
name: "FORWARDED"
|
4217
|
+
}]);
|
4202
4218
|
const Encryption_Type = /* @__PURE__ */proto3.makeEnum("livekit.Encryption.Type", [{
|
4203
4219
|
no: 0,
|
4204
4220
|
name: "NONE"
|
@@ -11169,7 +11185,7 @@ function getOSVersion(ua) {
|
|
11169
11185
|
return ua.includes('mac os') ? getMatch(/\(.+?(\d+_\d+(:?_\d+)?)/, ua, 1).replace(/_/g, '.') : undefined;
|
11170
11186
|
}
|
11171
11187
|
|
11172
|
-
var version$1 = "2.
|
11188
|
+
var version$1 = "2.10.0";
|
11173
11189
|
|
11174
11190
|
const version = version$1;
|
11175
11191
|
const protocolVersion = 15;
|
@@ -11567,7 +11583,7 @@ function detachTrack(track, element) {
|
|
11567
11583
|
}
|
11568
11584
|
}
|
11569
11585
|
Track.streamStateFromProto = streamStateFromProto;
|
11570
|
-
})(Track);
|
11586
|
+
})(Track || (Track = {}));
|
11571
11587
|
|
11572
11588
|
class VideoPreset {
|
11573
11589
|
constructor(widthOrOptions, height, maxBitrate, maxFramerate, priority) {
|
@@ -11609,8 +11625,12 @@ function isBackupCodec(codec) {
|
|
11609
11625
|
}
|
11610
11626
|
var BackupCodecPolicy;
|
11611
11627
|
(function (BackupCodecPolicy) {
|
11612
|
-
|
11628
|
+
// codec regression is preferred, the sfu will try to regress codec if possible but not guaranteed
|
11629
|
+
BackupCodecPolicy[BackupCodecPolicy["PREFER_REGRESSION"] = 0] = "PREFER_REGRESSION";
|
11630
|
+
// multi-codec simulcast, publish both primary and backup codec at the same time
|
11613
11631
|
BackupCodecPolicy[BackupCodecPolicy["SIMULCAST"] = 1] = "SIMULCAST";
|
11632
|
+
// always use backup codec only
|
11633
|
+
BackupCodecPolicy[BackupCodecPolicy["REGRESSION"] = 2] = "REGRESSION";
|
11614
11634
|
})(BackupCodecPolicy || (BackupCodecPolicy = {}));
|
11615
11635
|
var AudioPresets;
|
11616
11636
|
(function (AudioPresets) {
|
@@ -12041,12 +12061,6 @@ function unwrapConstraint(constraint) {
|
|
12041
12061
|
}
|
12042
12062
|
throw Error('could not unwrap constraint');
|
12043
12063
|
}
|
12044
|
-
function toWebsocketUrl(url) {
|
12045
|
-
if (url.startsWith('http')) {
|
12046
|
-
return url.replace(/^(http)/, 'ws');
|
12047
|
-
}
|
12048
|
-
return url;
|
12049
|
-
}
|
12050
12064
|
function toHttpUrl(url) {
|
12051
12065
|
if (url.startsWith('ws')) {
|
12052
12066
|
return url.replace(/^(ws)/, 'http');
|
@@ -12291,12 +12305,17 @@ function getNewAudioContext() {
|
|
12291
12305
|
});
|
12292
12306
|
// If the audio context is suspended, we need to resume it when the user clicks on the page
|
12293
12307
|
if (audioContext.state === 'suspended' && typeof window !== 'undefined' && ((_a = window.document) === null || _a === void 0 ? void 0 : _a.body)) {
|
12294
|
-
const handleResume = () => {
|
12295
|
-
|
12296
|
-
|
12297
|
-
(
|
12298
|
-
|
12299
|
-
|
12308
|
+
const handleResume = () => __awaiter(this, void 0, void 0, function* () {
|
12309
|
+
var _a;
|
12310
|
+
try {
|
12311
|
+
if (audioContext.state === 'suspended') {
|
12312
|
+
yield audioContext.resume();
|
12313
|
+
}
|
12314
|
+
} catch (e) {
|
12315
|
+
console.warn('Error trying to auto-resume audio context', e);
|
12316
|
+
}
|
12317
|
+
(_a = window.document.body) === null || _a === void 0 ? void 0 : _a.removeEventListener('click', handleResume);
|
12318
|
+
});
|
12300
12319
|
window.document.body.addEventListener('click', handleResume);
|
12301
12320
|
}
|
12302
12321
|
return audioContext;
|
@@ -12931,6 +12950,28 @@ class AsyncQueue {
|
|
12931
12950
|
}
|
12932
12951
|
}
|
12933
12952
|
|
12953
|
+
function createRtcUrl(url, searchParams) {
|
12954
|
+
const urlObj = new URL(url);
|
12955
|
+
searchParams.forEach((value, key) => {
|
12956
|
+
urlObj.searchParams.set(key, value);
|
12957
|
+
});
|
12958
|
+
return appendUrlPath(urlObj, 'rtc');
|
12959
|
+
}
|
12960
|
+
function createValidateUrl(rtcWsUrl) {
|
12961
|
+
const urlObj = new URL(toHttpUrl(rtcWsUrl));
|
12962
|
+
return appendUrlPath(urlObj, 'validate');
|
12963
|
+
}
|
12964
|
+
function ensureTrailingSlash(url) {
|
12965
|
+
return url.endsWith('/') ? url : "".concat(url, "/");
|
12966
|
+
}
|
12967
|
+
function appendUrlPath(urlObj, path) {
|
12968
|
+
const result = "".concat(urlObj.protocol, "//").concat(urlObj.host).concat(ensureTrailingSlash(urlObj.pathname)).concat(path);
|
12969
|
+
if (urlObj.searchParams.size > 0) {
|
12970
|
+
return "".concat(result, "?").concat(urlObj.searchParams.toString());
|
12971
|
+
}
|
12972
|
+
return result;
|
12973
|
+
}
|
12974
|
+
|
12934
12975
|
const passThroughQueueSignals = ['syncState', 'trickle', 'offer', 'answer', 'simulate', 'leave'];
|
12935
12976
|
function canPassThroughQueue(req) {
|
12936
12977
|
const canPass = passThroughQueueSignals.indexOf(req.case) >= 0;
|
@@ -13028,15 +13069,10 @@ class SignalClient {
|
|
13028
13069
|
}
|
13029
13070
|
connect(url, token, opts, abortSignal) {
|
13030
13071
|
this.connectOptions = opts;
|
13031
|
-
const urlObj = new URL(toWebsocketUrl(url));
|
13032
|
-
// strip trailing slash
|
13033
|
-
urlObj.pathname = urlObj.pathname.replace(/\/$/, '');
|
13034
|
-
urlObj.pathname += '/rtc';
|
13035
13072
|
const clientInfo = getClientInfo();
|
13036
13073
|
const params = createConnectionParams(token, clientInfo, opts);
|
13037
|
-
|
13038
|
-
|
13039
|
-
}
|
13074
|
+
const rtcUrl = createRtcUrl(url, params);
|
13075
|
+
const validateUrl = createValidateUrl(rtcUrl);
|
13040
13076
|
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
|
13041
13077
|
const unlock = yield this.connectionLock.lock();
|
13042
13078
|
try {
|
@@ -13053,7 +13089,7 @@ class SignalClient {
|
|
13053
13089
|
abortHandler();
|
13054
13090
|
}
|
13055
13091
|
abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.addEventListener('abort', abortHandler);
|
13056
|
-
const redactedUrl = new URL(
|
13092
|
+
const redactedUrl = new URL(rtcUrl);
|
13057
13093
|
if (redactedUrl.searchParams.has('access_token')) {
|
13058
13094
|
redactedUrl.searchParams.set('access_token', '<redacted>');
|
13059
13095
|
}
|
@@ -13064,7 +13100,7 @@ class SignalClient {
|
|
13064
13100
|
if (this.ws) {
|
13065
13101
|
yield this.close(false);
|
13066
13102
|
}
|
13067
|
-
this.ws = new WebSocket(
|
13103
|
+
this.ws = new WebSocket(rtcUrl);
|
13068
13104
|
this.ws.binaryType = 'arraybuffer';
|
13069
13105
|
this.ws.onopen = () => {
|
13070
13106
|
clearTimeout(wsTimeout);
|
@@ -13074,15 +13110,12 @@ class SignalClient {
|
|
13074
13110
|
this.state = SignalConnectionState.DISCONNECTED;
|
13075
13111
|
clearTimeout(wsTimeout);
|
13076
13112
|
try {
|
13077
|
-
const
|
13078
|
-
validateURL.protocol = "http".concat(validateURL.protocol.substring(2));
|
13079
|
-
validateURL.pathname += '/validate';
|
13080
|
-
const resp = yield fetch(validateURL);
|
13113
|
+
const resp = yield fetch(validateUrl);
|
13081
13114
|
if (resp.status.toFixed(0).startsWith('4')) {
|
13082
13115
|
const msg = yield resp.text();
|
13083
13116
|
reject(new ConnectionError(msg, ConnectionErrorReason.NotAllowed, resp.status));
|
13084
13117
|
} else {
|
13085
|
-
reject(new ConnectionError(
|
13118
|
+
reject(new ConnectionError("Encountered unknown websocket error during connection: ".concat(ev.toString()), ConnectionErrorReason.InternalError, resp.status));
|
13086
13119
|
}
|
13087
13120
|
} catch (e) {
|
13088
13121
|
reject(new ConnectionError(e instanceof Error ? e.message : 'server was not reachable', ConnectionErrorReason.ServerUnreachable));
|
@@ -15639,9 +15672,10 @@ class LocalTrack extends Track {
|
|
15639
15672
|
constraints = this._constraints;
|
15640
15673
|
}
|
15641
15674
|
const {
|
15642
|
-
deviceId
|
15675
|
+
deviceId,
|
15676
|
+
facingMode
|
15643
15677
|
} = constraints,
|
15644
|
-
otherConstraints = __rest(constraints, ["deviceId"]);
|
15678
|
+
otherConstraints = __rest(constraints, ["deviceId", "facingMode"]);
|
15645
15679
|
this.log.debug('restarting track with constraints', Object.assign(Object.assign({}, this.logContext), {
|
15646
15680
|
constraints
|
15647
15681
|
}));
|
@@ -15650,8 +15684,9 @@ class LocalTrack extends Track {
|
|
15650
15684
|
video: false
|
15651
15685
|
};
|
15652
15686
|
if (this.kind === Track.Kind.Video) {
|
15653
|
-
streamConstraints.video = deviceId ? {
|
15654
|
-
deviceId
|
15687
|
+
streamConstraints.video = deviceId || facingMode ? {
|
15688
|
+
deviceId,
|
15689
|
+
facingMode
|
15655
15690
|
} : true;
|
15656
15691
|
} else {
|
15657
15692
|
streamConstraints.audio = deviceId ? {
|
@@ -16945,13 +16980,44 @@ function setPublishingLayersForSender(sender, senderEncodings, qualities, sender
|
|
16945
16980
|
return;
|
16946
16981
|
}
|
16947
16982
|
let hasChanged = false;
|
16948
|
-
|
16949
|
-
|
16950
|
-
low resolution frame and recover very quickly, but noticable
|
16951
|
-
2. livekit sfu: additional pli request cause video frozen for a few frames, also noticable */
|
16952
|
-
const closableSpatial = false;
|
16983
|
+
const browser = getBrowser();
|
16984
|
+
const closableSpatial = (browser === null || browser === void 0 ? void 0 : browser.name) === 'Chrome' && compareVersions(browser === null || browser === void 0 ? void 0 : browser.version, '133') > 0;
|
16953
16985
|
/* @ts-ignore */
|
16954
|
-
if (closableSpatial && encodings[0].scalabilityMode)
|
16986
|
+
if (closableSpatial && encodings[0].scalabilityMode) {
|
16987
|
+
// svc dynacast encodings
|
16988
|
+
const encoding = encodings[0];
|
16989
|
+
/* @ts-ignore */
|
16990
|
+
const mode = new ScalabilityMode(encoding.scalabilityMode);
|
16991
|
+
let maxQuality = VideoQuality$1.OFF;
|
16992
|
+
qualities.forEach(q => {
|
16993
|
+
if (q.enabled && (maxQuality === VideoQuality$1.OFF || q.quality > maxQuality)) {
|
16994
|
+
maxQuality = q.quality;
|
16995
|
+
}
|
16996
|
+
});
|
16997
|
+
if (maxQuality === VideoQuality$1.OFF) {
|
16998
|
+
if (encoding.active) {
|
16999
|
+
encoding.active = false;
|
17000
|
+
hasChanged = true;
|
17001
|
+
}
|
17002
|
+
} else if (!encoding.active || mode.spatial !== maxQuality + 1) {
|
17003
|
+
hasChanged = true;
|
17004
|
+
encoding.active = true;
|
17005
|
+
/* @ts-ignore */
|
17006
|
+
const originalMode = new ScalabilityMode(senderEncodings[0].scalabilityMode);
|
17007
|
+
mode.spatial = maxQuality + 1;
|
17008
|
+
mode.suffix = originalMode.suffix;
|
17009
|
+
if (mode.spatial === 1) {
|
17010
|
+
// no suffix for L1Tx
|
17011
|
+
mode.suffix = undefined;
|
17012
|
+
}
|
17013
|
+
/* @ts-ignore */
|
17014
|
+
encoding.scalabilityMode = mode.toString();
|
17015
|
+
encoding.scaleResolutionDownBy = Math.pow(2, 2 - maxQuality);
|
17016
|
+
if (senderEncodings[0].maxBitrate) {
|
17017
|
+
encoding.maxBitrate = senderEncodings[0].maxBitrate / (encoding.scaleResolutionDownBy * encoding.scaleResolutionDownBy);
|
17018
|
+
}
|
17019
|
+
}
|
17020
|
+
} else {
|
16955
17021
|
// simulcast dynacast encodings
|
16956
17022
|
encodings.forEach((encoding, idx) => {
|
16957
17023
|
var _a;
|
@@ -19240,7 +19306,7 @@ class TrackPublication extends eventsExports.EventEmitter {
|
|
19240
19306
|
PermissionStatus["Allowed"] = "allowed";
|
19241
19307
|
PermissionStatus["NotAllowed"] = "not_allowed";
|
19242
19308
|
})(TrackPublication.PermissionStatus || (TrackPublication.PermissionStatus = {}));
|
19243
|
-
})(TrackPublication);
|
19309
|
+
})(TrackPublication || (TrackPublication = {}));
|
19244
19310
|
|
19245
19311
|
class LocalTrackPublication extends TrackPublication {
|
19246
19312
|
get isUpstreamPaused() {
|
@@ -23216,6 +23282,10 @@ class Room extends eventsExports.EventEmitter {
|
|
23216
23282
|
this.log.warn('skipping incoming track after Room disconnected', this.logContext);
|
23217
23283
|
return;
|
23218
23284
|
}
|
23285
|
+
if (mediaTrack.readyState === 'ended') {
|
23286
|
+
this.log.info('skipping incoming track as it already ended', this.logContext);
|
23287
|
+
return;
|
23288
|
+
}
|
23219
23289
|
const parts = unpackStreamId(stream.id);
|
23220
23290
|
const participantSid = parts[0];
|
23221
23291
|
let streamId = parts[1];
|
@@ -23723,7 +23793,7 @@ class Room extends eventsExports.EventEmitter {
|
|
23723
23793
|
args[_key3 - 1] = arguments[_key3];
|
23724
23794
|
}
|
23725
23795
|
// active speaker updates are too spammy
|
23726
|
-
if (event !== RoomEvent.ActiveSpeakersChanged) {
|
23796
|
+
if (event !== RoomEvent.ActiveSpeakersChanged && event !== RoomEvent.TranscriptionReceived) {
|
23727
23797
|
// only extract logContext from arguments in order to avoid logging the whole object tree
|
23728
23798
|
const minimizedArgs = mapArgs(args).filter(arg => arg !== undefined);
|
23729
23799
|
this.log.debug("room event ".concat(event), Object.assign(Object.assign({}, this.logContext), {
|