livekit-client 1.11.3 → 1.12.0
Sign up to get free protection for your applications and to get access to all the features.
- package/README.md +1 -3
- package/dist/livekit-client.e2ee.worker.js +2 -0
- package/dist/livekit-client.e2ee.worker.js.map +1 -0
- package/dist/livekit-client.e2ee.worker.mjs +1525 -0
- package/dist/livekit-client.e2ee.worker.mjs.map +1 -0
- package/dist/livekit-client.esm.mjs +1462 -660
- package/dist/livekit-client.esm.mjs.map +1 -1
- package/dist/livekit-client.umd.js +1 -1
- package/dist/livekit-client.umd.js.map +1 -1
- package/dist/src/api/SignalClient.d.ts +4 -1
- package/dist/src/api/SignalClient.d.ts.map +1 -1
- package/dist/src/connectionHelper/checks/turn.d.ts.map +1 -1
- package/dist/src/connectionHelper/checks/websocket.d.ts.map +1 -1
- package/dist/src/e2ee/E2eeManager.d.ts +45 -0
- package/dist/src/e2ee/E2eeManager.d.ts.map +1 -0
- package/dist/src/e2ee/KeyProvider.d.ts +42 -0
- package/dist/src/e2ee/KeyProvider.d.ts.map +1 -0
- package/dist/src/e2ee/constants.d.ts +14 -0
- package/dist/src/e2ee/constants.d.ts.map +1 -0
- package/dist/src/e2ee/errors.d.ts +11 -0
- package/dist/src/e2ee/errors.d.ts.map +1 -0
- package/dist/src/e2ee/index.d.ts +4 -0
- package/dist/src/e2ee/index.d.ts.map +1 -0
- package/dist/src/e2ee/types.d.ts +129 -0
- package/dist/src/e2ee/types.d.ts.map +1 -0
- package/dist/src/e2ee/utils.d.ts +24 -0
- package/dist/src/e2ee/utils.d.ts.map +1 -0
- package/dist/src/e2ee/worker/FrameCryptor.d.ts +175 -0
- package/dist/src/e2ee/worker/FrameCryptor.d.ts.map +1 -0
- package/dist/src/e2ee/worker/ParticipantKeyHandler.d.ts +46 -0
- package/dist/src/e2ee/worker/ParticipantKeyHandler.d.ts.map +1 -0
- package/dist/src/e2ee/worker/e2ee.worker.d.ts +2 -0
- package/dist/src/e2ee/worker/e2ee.worker.d.ts.map +1 -0
- package/dist/src/index.d.ts +2 -0
- package/dist/src/index.d.ts.map +1 -1
- package/dist/src/logger.d.ts +4 -1
- package/dist/src/logger.d.ts.map +1 -1
- package/dist/src/options.d.ts +5 -0
- package/dist/src/options.d.ts.map +1 -1
- package/dist/src/proto/livekit_models.d.ts +2 -2
- package/dist/src/proto/livekit_models.d.ts.map +1 -1
- package/dist/src/room/PCTransport.d.ts +3 -1
- package/dist/src/room/PCTransport.d.ts.map +1 -1
- package/dist/src/room/RTCEngine.d.ts +17 -3
- package/dist/src/room/RTCEngine.d.ts.map +1 -1
- package/dist/src/room/Room.d.ts +10 -1
- package/dist/src/room/Room.d.ts.map +1 -1
- package/dist/src/room/events.d.ts +14 -2
- package/dist/src/room/events.d.ts.map +1 -1
- package/dist/src/room/participant/LocalParticipant.d.ts +9 -2
- package/dist/src/room/participant/LocalParticipant.d.ts.map +1 -1
- package/dist/src/room/participant/Participant.d.ts +1 -0
- package/dist/src/room/participant/Participant.d.ts.map +1 -1
- package/dist/src/room/track/LocalAudioTrack.d.ts.map +1 -1
- package/dist/src/room/track/LocalTrack.d.ts +3 -2
- package/dist/src/room/track/LocalTrack.d.ts.map +1 -1
- package/dist/src/room/track/LocalVideoTrack.d.ts.map +1 -1
- package/dist/src/room/track/RemoteVideoTrack.d.ts.map +1 -1
- package/dist/src/room/track/TrackPublication.d.ts +3 -0
- package/dist/src/room/track/TrackPublication.d.ts.map +1 -1
- package/dist/src/room/track/facingMode.d.ts +41 -0
- package/dist/src/room/track/facingMode.d.ts.map +1 -0
- package/dist/src/room/track/options.d.ts +2 -2
- package/dist/src/room/track/options.d.ts.map +1 -1
- package/dist/src/room/track/utils.d.ts +5 -35
- package/dist/src/room/track/utils.d.ts.map +1 -1
- package/dist/src/room/utils.d.ts +2 -0
- package/dist/src/room/utils.d.ts.map +1 -1
- package/dist/src/test/MockMediaStreamTrack.d.ts.map +1 -1
- package/dist/ts4.2/src/api/SignalClient.d.ts +4 -1
- package/dist/ts4.2/src/e2ee/E2eeManager.d.ts +45 -0
- package/dist/ts4.2/src/e2ee/KeyProvider.d.ts +42 -0
- package/dist/ts4.2/src/e2ee/constants.d.ts +14 -0
- package/dist/ts4.2/src/e2ee/errors.d.ts +11 -0
- package/dist/ts4.2/src/e2ee/index.d.ts +4 -0
- package/dist/ts4.2/src/e2ee/types.d.ts +129 -0
- package/dist/ts4.2/src/e2ee/utils.d.ts +24 -0
- package/dist/ts4.2/src/e2ee/worker/FrameCryptor.d.ts +175 -0
- package/dist/ts4.2/src/e2ee/worker/ParticipantKeyHandler.d.ts +46 -0
- package/dist/ts4.2/src/e2ee/worker/e2ee.worker.d.ts +2 -0
- package/dist/ts4.2/src/index.d.ts +2 -0
- package/dist/ts4.2/src/logger.d.ts +4 -1
- package/dist/ts4.2/src/options.d.ts +5 -0
- package/dist/ts4.2/src/proto/livekit_models.d.ts +2 -2
- package/dist/ts4.2/src/room/PCTransport.d.ts +3 -1
- package/dist/ts4.2/src/room/RTCEngine.d.ts +17 -3
- package/dist/ts4.2/src/room/Room.d.ts +10 -1
- package/dist/ts4.2/src/room/events.d.ts +14 -2
- package/dist/ts4.2/src/room/participant/LocalParticipant.d.ts +9 -2
- package/dist/ts4.2/src/room/participant/Participant.d.ts +1 -0
- package/dist/ts4.2/src/room/track/LocalTrack.d.ts +3 -2
- package/dist/ts4.2/src/room/track/TrackPublication.d.ts +3 -0
- package/dist/ts4.2/src/room/track/facingMode.d.ts +41 -0
- package/dist/ts4.2/src/room/track/options.d.ts +6 -6
- package/dist/ts4.2/src/room/track/utils.d.ts +5 -35
- package/dist/ts4.2/src/room/utils.d.ts +2 -0
- package/package.json +17 -7
- package/src/api/SignalClient.ts +28 -9
- package/src/connectionHelper/checks/turn.ts +1 -0
- package/src/connectionHelper/checks/websocket.ts +1 -0
- package/src/e2ee/E2eeManager.ts +374 -0
- package/src/e2ee/KeyProvider.ts +77 -0
- package/src/e2ee/constants.ts +40 -0
- package/src/e2ee/errors.ts +16 -0
- package/src/e2ee/index.ts +3 -0
- package/src/e2ee/types.ts +160 -0
- package/src/e2ee/utils.ts +127 -0
- package/src/e2ee/worker/FrameCryptor.test.ts +21 -0
- package/src/e2ee/worker/FrameCryptor.ts +614 -0
- package/src/e2ee/worker/ParticipantKeyHandler.ts +129 -0
- package/src/e2ee/worker/e2ee.worker.ts +217 -0
- package/src/e2ee/worker/tsconfig.json +6 -0
- package/src/index.ts +2 -0
- package/src/logger.ts +10 -2
- package/src/options.ts +6 -0
- package/src/proto/livekit_models.ts +12 -12
- package/src/room/PCTransport.ts +39 -9
- package/src/room/RTCEngine.ts +127 -34
- package/src/room/Room.ts +83 -30
- package/src/room/defaults.ts +1 -1
- package/src/room/events.ts +14 -0
- package/src/room/participant/LocalParticipant.ts +82 -10
- package/src/room/participant/Participant.ts +4 -0
- package/src/room/track/LocalAudioTrack.ts +11 -4
- package/src/room/track/LocalTrack.ts +50 -43
- package/src/room/track/LocalVideoTrack.ts +5 -3
- package/src/room/track/RemoteVideoTrack.ts +2 -2
- package/src/room/track/TrackPublication.ts +9 -1
- package/src/room/track/facingMode.test.ts +30 -0
- package/src/room/track/facingMode.ts +103 -0
- package/src/room/track/options.ts +3 -2
- package/src/room/track/utils.test.ts +1 -30
- package/src/room/track/utils.ts +16 -91
- package/src/room/utils.ts +5 -0
- package/src/room/worker.d.ts +4 -0
- package/src/test/MockMediaStreamTrack.ts +1 -0
@@ -285,6 +285,7 @@ var loglevel = {exports: {}};
|
|
285
285
|
});
|
286
286
|
})(loglevel);
|
287
287
|
var loglevelExports = loglevel.exports;
|
288
|
+
var log$1 = /*@__PURE__*/getDefaultExportFromCjs(loglevelExports);
|
288
289
|
|
289
290
|
var LogLevel;
|
290
291
|
(function (LogLevel) {
|
@@ -297,8 +298,13 @@ var LogLevel;
|
|
297
298
|
})(LogLevel || (LogLevel = {}));
|
298
299
|
const livekitLogger = loglevelExports.getLogger('livekit');
|
299
300
|
livekitLogger.setDefaultLevel(LogLevel.info);
|
300
|
-
function setLogLevel(level) {
|
301
|
-
|
301
|
+
function setLogLevel(level, loggerName) {
|
302
|
+
if (loggerName) {
|
303
|
+
loglevelExports.getLogger(loggerName).setLevel(level);
|
304
|
+
}
|
305
|
+
for (const logger of Object.values(loglevelExports.getLoggers())) {
|
306
|
+
logger.setLevel(level);
|
307
|
+
}
|
302
308
|
}
|
303
309
|
/**
|
304
310
|
* use this to hook into the logging function to allow sending internal livekit logs to third party services
|
@@ -320,6 +326,8 @@ function setLogExtension(extension) {
|
|
320
326
|
livekitLogger.setLevel(livekitLogger.getLevel()); // Be sure to call setLevel method in order to apply plugin
|
321
327
|
}
|
322
328
|
|
329
|
+
loglevelExports.getLogger('lk-e2ee');
|
330
|
+
|
323
331
|
var long = Long;
|
324
332
|
|
325
333
|
/**
|
@@ -4295,7 +4303,7 @@ function disconnectReasonToJSON(object) {
|
|
4295
4303
|
}
|
4296
4304
|
var ReconnectReason;
|
4297
4305
|
(function (ReconnectReason) {
|
4298
|
-
ReconnectReason[ReconnectReason["
|
4306
|
+
ReconnectReason[ReconnectReason["RR_UNKNOWN"] = 0] = "RR_UNKNOWN";
|
4299
4307
|
ReconnectReason[ReconnectReason["RR_SIGNAL_DISCONNECTED"] = 1] = "RR_SIGNAL_DISCONNECTED";
|
4300
4308
|
ReconnectReason[ReconnectReason["RR_PUBLISHER_FAILED"] = 2] = "RR_PUBLISHER_FAILED";
|
4301
4309
|
ReconnectReason[ReconnectReason["RR_SUBSCRIBER_FAILED"] = 3] = "RR_SUBSCRIBER_FAILED";
|
@@ -4304,7 +4312,7 @@ var ReconnectReason;
|
|
4304
4312
|
})(ReconnectReason || (ReconnectReason = {}));
|
4305
4313
|
var SubscriptionError;
|
4306
4314
|
(function (SubscriptionError) {
|
4307
|
-
SubscriptionError[SubscriptionError["
|
4315
|
+
SubscriptionError[SubscriptionError["SE_UNKNOWN"] = 0] = "SE_UNKNOWN";
|
4308
4316
|
SubscriptionError[SubscriptionError["SE_CODEC_UNSUPPORTED"] = 1] = "SE_CODEC_UNSUPPORTED";
|
4309
4317
|
SubscriptionError[SubscriptionError["SE_TRACK_NOTFOUND"] = 2] = "SE_TRACK_NOTFOUND";
|
4310
4318
|
SubscriptionError[SubscriptionError["UNRECOGNIZED"] = -1] = "UNRECOGNIZED";
|
@@ -4312,8 +4320,8 @@ var SubscriptionError;
|
|
4312
4320
|
function subscriptionErrorFromJSON(object) {
|
4313
4321
|
switch (object) {
|
4314
4322
|
case 0:
|
4315
|
-
case "
|
4316
|
-
return SubscriptionError.
|
4323
|
+
case "SE_UNKNOWN":
|
4324
|
+
return SubscriptionError.SE_UNKNOWN;
|
4317
4325
|
case 1:
|
4318
4326
|
case "SE_CODEC_UNSUPPORTED":
|
4319
4327
|
return SubscriptionError.SE_CODEC_UNSUPPORTED;
|
@@ -4328,8 +4336,8 @@ function subscriptionErrorFromJSON(object) {
|
|
4328
4336
|
}
|
4329
4337
|
function subscriptionErrorToJSON(object) {
|
4330
4338
|
switch (object) {
|
4331
|
-
case SubscriptionError.
|
4332
|
-
return "
|
4339
|
+
case SubscriptionError.SE_UNKNOWN:
|
4340
|
+
return "SE_UNKNOWN";
|
4333
4341
|
case SubscriptionError.SE_CODEC_UNSUPPORTED:
|
4334
4342
|
return "SE_CODEC_UNSUPPORTED";
|
4335
4343
|
case SubscriptionError.SE_TRACK_NOTFOUND:
|
@@ -14187,77 +14195,94 @@ function getMatch(exp, ua) {
|
|
14187
14195
|
return match && match.length >= id && match[id] || '';
|
14188
14196
|
}
|
14189
14197
|
|
14190
|
-
var version$1 = "1.
|
14198
|
+
var version$1 = "1.12.0";
|
14191
14199
|
|
14192
14200
|
const version = version$1;
|
14193
14201
|
const protocolVersion = 9;
|
14194
14202
|
|
14195
|
-
|
14196
|
-
|
14197
|
-
|
14198
|
-
|
14199
|
-
|
14200
|
-
|
14201
|
-
|
14202
|
-
|
14203
|
-
|
14204
|
-
let requestPermissions = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
|
14205
|
-
var _a;
|
14206
|
-
return __awaiter(this, void 0, void 0, function* () {
|
14207
|
-
if (((_a = DeviceManager.userMediaPromiseMap) === null || _a === void 0 ? void 0 : _a.size) > 0) {
|
14208
|
-
livekitLogger.debug('awaiting getUserMedia promise');
|
14209
|
-
try {
|
14210
|
-
if (kind) {
|
14211
|
-
yield DeviceManager.userMediaPromiseMap.get(kind);
|
14212
|
-
} else {
|
14213
|
-
yield Promise.all(DeviceManager.userMediaPromiseMap.values());
|
14214
|
-
}
|
14215
|
-
} catch (e) {
|
14216
|
-
livekitLogger.warn('error waiting for media permissons');
|
14217
|
-
}
|
14218
|
-
}
|
14219
|
-
let devices = yield navigator.mediaDevices.enumerateDevices();
|
14220
|
-
if (requestPermissions && kind && (
|
14221
|
-
// for safari we need to skip this check, as otherwise it will re-acquire user media and fail on iOS https://bugs.webkit.org/show_bug.cgi?id=179363
|
14222
|
-
!DeviceManager.userMediaPromiseMap.get(kind) || !isSafari())) {
|
14223
|
-
const isDummyDeviceOrEmpty = devices.length === 0 || devices.some(device => {
|
14224
|
-
const noLabel = device.label === '';
|
14225
|
-
const isRelevant = kind ? device.kind === kind : true;
|
14226
|
-
return noLabel && isRelevant;
|
14227
|
-
});
|
14228
|
-
if (isDummyDeviceOrEmpty) {
|
14229
|
-
const permissionsToAcquire = {
|
14230
|
-
video: kind !== 'audioinput' && kind !== 'audiooutput',
|
14231
|
-
audio: kind !== 'videoinput'
|
14232
|
-
};
|
14233
|
-
const stream = yield navigator.mediaDevices.getUserMedia(permissionsToAcquire);
|
14234
|
-
devices = yield navigator.mediaDevices.enumerateDevices();
|
14235
|
-
stream.getTracks().forEach(track => {
|
14236
|
-
track.stop();
|
14237
|
-
});
|
14238
|
-
}
|
14239
|
-
}
|
14240
|
-
if (kind) {
|
14241
|
-
devices = devices.filter(device => device.kind === kind);
|
14242
|
-
}
|
14243
|
-
return devices;
|
14244
|
-
});
|
14203
|
+
class VideoPreset {
|
14204
|
+
constructor(width, height, maxBitrate, maxFramerate, priority) {
|
14205
|
+
this.width = width;
|
14206
|
+
this.height = height;
|
14207
|
+
this.encoding = {
|
14208
|
+
maxBitrate,
|
14209
|
+
maxFramerate,
|
14210
|
+
priority
|
14211
|
+
};
|
14245
14212
|
}
|
14246
|
-
|
14247
|
-
return
|
14248
|
-
|
14249
|
-
|
14250
|
-
|
14251
|
-
|
14252
|
-
|
14253
|
-
const devices = yield this.getDevices(kind);
|
14254
|
-
const device = devices.find(d => d.groupId === groupId && d.deviceId !== defaultId);
|
14255
|
-
return device === null || device === void 0 ? void 0 : device.deviceId;
|
14256
|
-
});
|
14213
|
+
get resolution() {
|
14214
|
+
return {
|
14215
|
+
width: this.width,
|
14216
|
+
height: this.height,
|
14217
|
+
frameRate: this.encoding.maxFramerate,
|
14218
|
+
aspectRatio: this.width / this.height
|
14219
|
+
};
|
14257
14220
|
}
|
14258
14221
|
}
|
14259
|
-
|
14260
|
-
|
14222
|
+
const backupCodecs = ['vp8', 'h264'];
|
14223
|
+
const videoCodecs = ['vp8', 'h264', 'vp9', 'av1'];
|
14224
|
+
function isBackupCodec(codec) {
|
14225
|
+
return !!backupCodecs.find(backup => backup === codec);
|
14226
|
+
}
|
14227
|
+
function isCodecEqual(c1, c2) {
|
14228
|
+
return (c1 === null || c1 === void 0 ? void 0 : c1.toLowerCase().replace(/audio\/|video\//y, '')) === (c2 === null || c2 === void 0 ? void 0 : c2.toLowerCase().replace(/audio\/|video\//y, ''));
|
14229
|
+
}
|
14230
|
+
var AudioPresets;
|
14231
|
+
(function (AudioPresets) {
|
14232
|
+
AudioPresets.telephone = {
|
14233
|
+
maxBitrate: 12000
|
14234
|
+
};
|
14235
|
+
AudioPresets.speech = {
|
14236
|
+
maxBitrate: 20000
|
14237
|
+
};
|
14238
|
+
AudioPresets.music = {
|
14239
|
+
maxBitrate: 32000
|
14240
|
+
};
|
14241
|
+
AudioPresets.musicStereo = {
|
14242
|
+
maxBitrate: 48000
|
14243
|
+
};
|
14244
|
+
AudioPresets.musicHighQuality = {
|
14245
|
+
maxBitrate: 64000
|
14246
|
+
};
|
14247
|
+
AudioPresets.musicHighQualityStereo = {
|
14248
|
+
maxBitrate: 96000
|
14249
|
+
};
|
14250
|
+
})(AudioPresets || (AudioPresets = {}));
|
14251
|
+
/**
|
14252
|
+
* Sane presets for video resolution/encoding
|
14253
|
+
*/
|
14254
|
+
const VideoPresets = {
|
14255
|
+
h90: new VideoPreset(160, 90, 60000, 15),
|
14256
|
+
h180: new VideoPreset(320, 180, 120000, 15),
|
14257
|
+
h216: new VideoPreset(384, 216, 180000, 15),
|
14258
|
+
h360: new VideoPreset(640, 360, 300000, 20),
|
14259
|
+
h540: new VideoPreset(960, 540, 600000, 25),
|
14260
|
+
h720: new VideoPreset(1280, 720, 1700000, 30),
|
14261
|
+
h1080: new VideoPreset(1920, 1080, 3000000, 30),
|
14262
|
+
h1440: new VideoPreset(2560, 1440, 5000000, 30),
|
14263
|
+
h2160: new VideoPreset(3840, 2160, 8000000, 30)
|
14264
|
+
};
|
14265
|
+
/**
|
14266
|
+
* Four by three presets
|
14267
|
+
*/
|
14268
|
+
const VideoPresets43 = {
|
14269
|
+
h120: new VideoPreset(160, 120, 80000, 15),
|
14270
|
+
h180: new VideoPreset(240, 180, 100000, 15),
|
14271
|
+
h240: new VideoPreset(320, 240, 150000, 15),
|
14272
|
+
h360: new VideoPreset(480, 360, 225000, 20),
|
14273
|
+
h480: new VideoPreset(640, 480, 300000, 20),
|
14274
|
+
h540: new VideoPreset(720, 540, 450000, 25),
|
14275
|
+
h720: new VideoPreset(960, 720, 1500000, 30),
|
14276
|
+
h1080: new VideoPreset(1440, 1080, 2500000, 30),
|
14277
|
+
h1440: new VideoPreset(1920, 1440, 3500000, 30)
|
14278
|
+
};
|
14279
|
+
const ScreenSharePresets = {
|
14280
|
+
h360fps3: new VideoPreset(640, 360, 200000, 3, 'medium'),
|
14281
|
+
h720fps5: new VideoPreset(1280, 720, 400000, 5, 'medium'),
|
14282
|
+
h720fps15: new VideoPreset(1280, 720, 1000000, 15, 'medium'),
|
14283
|
+
h1080fps15: new VideoPreset(1920, 1080, 1500000, 15, 'medium'),
|
14284
|
+
h1080fps30: new VideoPreset(1920, 1080, 3000000, 30, 'medium')
|
14285
|
+
};
|
14261
14286
|
|
14262
14287
|
/**
|
14263
14288
|
* Events are the primary way LiveKit notifies your application of changes.
|
@@ -14501,6 +14526,8 @@ var RoomEvent;
|
|
14501
14526
|
* args: (isRecording: boolean)
|
14502
14527
|
*/
|
14503
14528
|
RoomEvent["RecordingStatusChanged"] = "recordingStatusChanged";
|
14529
|
+
RoomEvent["ParticipantEncryptionStatusChanged"] = "participantEncryptionStatusChanged";
|
14530
|
+
RoomEvent["EncryptionError"] = "encryptionError";
|
14504
14531
|
/**
|
14505
14532
|
* Emits whenever the current buffer status of a data channel changes
|
14506
14533
|
* args: (isLow: boolean, kind: [[DataPacket_Kind]])
|
@@ -14652,6 +14679,8 @@ var ParticipantEvent;
|
|
14652
14679
|
* args: (prevPermissions: [[ParticipantPermission]])
|
14653
14680
|
*/
|
14654
14681
|
ParticipantEvent["ParticipantPermissionsChanged"] = "participantPermissionsChanged";
|
14682
|
+
/** @internal */
|
14683
|
+
ParticipantEvent["PCTrackAdded"] = "pcTrackAdded";
|
14655
14684
|
})(ParticipantEvent || (ParticipantEvent = {}));
|
14656
14685
|
/** @internal */
|
14657
14686
|
var EngineEvent;
|
@@ -14669,7 +14698,15 @@ var EngineEvent;
|
|
14669
14698
|
EngineEvent["MediaTrackAdded"] = "mediaTrackAdded";
|
14670
14699
|
EngineEvent["ActiveSpeakersUpdate"] = "activeSpeakersUpdate";
|
14671
14700
|
EngineEvent["DataPacketReceived"] = "dataPacketReceived";
|
14701
|
+
EngineEvent["RTPVideoMapUpdate"] = "rtpVideoMapUpdate";
|
14672
14702
|
EngineEvent["DCBufferStatusChanged"] = "dcBufferStatusChanged";
|
14703
|
+
EngineEvent["ParticipantUpdate"] = "participantUpdate";
|
14704
|
+
EngineEvent["RoomUpdate"] = "roomUpdate";
|
14705
|
+
EngineEvent["SpeakersChanged"] = "speakersChanged";
|
14706
|
+
EngineEvent["StreamStateChanged"] = "streamStateChanged";
|
14707
|
+
EngineEvent["ConnectionQualityUpdate"] = "connectionQualityUpdate";
|
14708
|
+
EngineEvent["SubscriptionError"] = "subscriptionError";
|
14709
|
+
EngineEvent["SubscriptionPermissionUpdate"] = "subscriptionPermissionUpdate";
|
14673
14710
|
})(EngineEvent || (EngineEvent = {}));
|
14674
14711
|
var TrackEvent;
|
14675
14712
|
(function (TrackEvent) {
|
@@ -15063,387 +15100,16 @@ function detachTrack(track, element) {
|
|
15063
15100
|
Track.streamStateFromProto = streamStateFromProto;
|
15064
15101
|
})(Track || (Track = {}));
|
15065
15102
|
|
15066
|
-
|
15067
|
-
|
15068
|
-
|
15069
|
-
|
15070
|
-
|
15071
|
-
|
15072
|
-
|
15073
|
-
* @param userProvidedTrack Signals to the SDK whether or not the mediaTrack should be managed (i.e. released and reacquired) internally by the SDK
|
15074
|
-
*/
|
15075
|
-
constructor(mediaTrack, kind, constraints) {
|
15076
|
-
let userProvidedTrack = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false;
|
15077
|
-
super(mediaTrack, kind);
|
15078
|
-
this.isSettingUpProcessor = false;
|
15079
|
-
this._isUpstreamPaused = false;
|
15080
|
-
this.handleEnded = () => {
|
15081
|
-
if (this.isInBackground) {
|
15082
|
-
this.reacquireTrack = true;
|
15083
|
-
}
|
15084
|
-
this._mediaStreamTrack.removeEventListener('mute', this.pauseUpstream);
|
15085
|
-
this._mediaStreamTrack.removeEventListener('unmute', this.resumeUpstream);
|
15086
|
-
this.emit(TrackEvent.Ended, this);
|
15087
|
-
};
|
15088
|
-
/**
|
15089
|
-
* pauses publishing to the server without disabling the local MediaStreamTrack
|
15090
|
-
* this is used to display a user's own video locally while pausing publishing to
|
15091
|
-
* the server.
|
15092
|
-
* this API is unsupported on Safari < 12 due to a bug
|
15093
|
-
**/
|
15094
|
-
this.pauseUpstream = () => __awaiter(this, void 0, void 0, function* () {
|
15095
|
-
const unlock = yield this.pauseUpstreamLock.lock();
|
15096
|
-
try {
|
15097
|
-
if (this._isUpstreamPaused === true) {
|
15098
|
-
return;
|
15099
|
-
}
|
15100
|
-
if (!this.sender) {
|
15101
|
-
livekitLogger.warn('unable to pause upstream for an unpublished track');
|
15102
|
-
return;
|
15103
|
-
}
|
15104
|
-
this._isUpstreamPaused = true;
|
15105
|
-
this.emit(TrackEvent.UpstreamPaused, this);
|
15106
|
-
const browser = getBrowser();
|
15107
|
-
if ((browser === null || browser === void 0 ? void 0 : browser.name) === 'Safari' && compareVersions(browser.version, '12.0') < 0) {
|
15108
|
-
// https://bugs.webkit.org/show_bug.cgi?id=184911
|
15109
|
-
throw new DeviceUnsupportedError('pauseUpstream is not supported on Safari < 12.');
|
15110
|
-
}
|
15111
|
-
yield this.sender.replaceTrack(null);
|
15112
|
-
} finally {
|
15113
|
-
unlock();
|
15114
|
-
}
|
15115
|
-
});
|
15116
|
-
this.resumeUpstream = () => __awaiter(this, void 0, void 0, function* () {
|
15117
|
-
const unlock = yield this.pauseUpstreamLock.lock();
|
15118
|
-
try {
|
15119
|
-
if (this._isUpstreamPaused === false) {
|
15120
|
-
return;
|
15121
|
-
}
|
15122
|
-
if (!this.sender) {
|
15123
|
-
livekitLogger.warn('unable to resume upstream for an unpublished track');
|
15124
|
-
return;
|
15125
|
-
}
|
15126
|
-
this._isUpstreamPaused = false;
|
15127
|
-
this.emit(TrackEvent.UpstreamResumed, this);
|
15128
|
-
// this operation is noop if mediastreamtrack is already being sent
|
15129
|
-
yield this.sender.replaceTrack(this._mediaStreamTrack);
|
15130
|
-
} finally {
|
15131
|
-
unlock();
|
15132
|
-
}
|
15133
|
-
});
|
15134
|
-
this.reacquireTrack = false;
|
15135
|
-
this.providedByUser = userProvidedTrack;
|
15136
|
-
this.muteLock = new Mutex();
|
15137
|
-
this.pauseUpstreamLock = new Mutex();
|
15138
|
-
// added to satisfy TS compiler, constraints are synced with MediaStreamTrack
|
15139
|
-
this.constraints = mediaTrack.getConstraints();
|
15140
|
-
this.setMediaStreamTrack(mediaTrack);
|
15141
|
-
if (constraints) {
|
15142
|
-
this.constraints = constraints;
|
15143
|
-
}
|
15144
|
-
}
|
15145
|
-
get id() {
|
15146
|
-
return this._mediaStreamTrack.id;
|
15147
|
-
}
|
15148
|
-
get dimensions() {
|
15149
|
-
if (this.kind !== Track.Kind.Video) {
|
15150
|
-
return undefined;
|
15151
|
-
}
|
15152
|
-
const {
|
15153
|
-
width,
|
15154
|
-
height
|
15155
|
-
} = this._mediaStreamTrack.getSettings();
|
15156
|
-
if (width && height) {
|
15157
|
-
return {
|
15158
|
-
width,
|
15159
|
-
height
|
15160
|
-
};
|
15161
|
-
}
|
15162
|
-
return undefined;
|
15163
|
-
}
|
15164
|
-
get isUpstreamPaused() {
|
15165
|
-
return this._isUpstreamPaused;
|
15103
|
+
function mergeDefaultOptions(options, audioDefaults, videoDefaults) {
|
15104
|
+
const opts = Object.assign({}, options);
|
15105
|
+
if (opts.audio === true) opts.audio = {};
|
15106
|
+
if (opts.video === true) opts.video = {};
|
15107
|
+
// use defaults
|
15108
|
+
if (opts.audio) {
|
15109
|
+
mergeObjectWithoutOverwriting(opts.audio, audioDefaults);
|
15166
15110
|
}
|
15167
|
-
|
15168
|
-
|
15169
|
-
}
|
15170
|
-
get mediaStreamTrack() {
|
15171
|
-
var _a, _b;
|
15172
|
-
return (_b = (_a = this.processor) === null || _a === void 0 ? void 0 : _a.processedTrack) !== null && _b !== void 0 ? _b : this._mediaStreamTrack;
|
15173
|
-
}
|
15174
|
-
setMediaStreamTrack(newTrack) {
|
15175
|
-
return __awaiter(this, void 0, void 0, function* () {
|
15176
|
-
if (newTrack === this._mediaStreamTrack) {
|
15177
|
-
return;
|
15178
|
-
}
|
15179
|
-
if (this._mediaStreamTrack) {
|
15180
|
-
// detach
|
15181
|
-
this.attachedElements.forEach(el => {
|
15182
|
-
detachTrack(this._mediaStreamTrack, el);
|
15183
|
-
});
|
15184
|
-
this._mediaStreamTrack.removeEventListener('ended', this.handleEnded);
|
15185
|
-
this._mediaStreamTrack.removeEventListener('mute', this.pauseUpstream);
|
15186
|
-
this._mediaStreamTrack.removeEventListener('unmute', this.resumeUpstream);
|
15187
|
-
if (!this.providedByUser) {
|
15188
|
-
this._mediaStreamTrack.stop();
|
15189
|
-
}
|
15190
|
-
}
|
15191
|
-
this.mediaStream = new MediaStream([newTrack]);
|
15192
|
-
if (newTrack) {
|
15193
|
-
newTrack.addEventListener('ended', this.handleEnded);
|
15194
|
-
// when underlying track emits mute, it indicates that the device is unable
|
15195
|
-
// to produce media. In this case we'll need to signal with remote that
|
15196
|
-
// the track is "muted"
|
15197
|
-
// note this is different from LocalTrack.mute because we do not want to
|
15198
|
-
// touch MediaStreamTrack.enabled
|
15199
|
-
newTrack.addEventListener('mute', this.pauseUpstream);
|
15200
|
-
newTrack.addEventListener('unmute', this.resumeUpstream);
|
15201
|
-
this.constraints = newTrack.getConstraints();
|
15202
|
-
}
|
15203
|
-
if (this.sender) {
|
15204
|
-
yield this.sender.replaceTrack(newTrack);
|
15205
|
-
}
|
15206
|
-
this._mediaStreamTrack = newTrack;
|
15207
|
-
if (newTrack) {
|
15208
|
-
// sync muted state with the enabled state of the newly provided track
|
15209
|
-
this._mediaStreamTrack.enabled = !this.isMuted;
|
15210
|
-
// when a valid track is replace, we'd want to start producing
|
15211
|
-
yield this.resumeUpstream();
|
15212
|
-
this.attachedElements.forEach(el => {
|
15213
|
-
attachToElement(newTrack, el);
|
15214
|
-
});
|
15215
|
-
}
|
15216
|
-
});
|
15217
|
-
}
|
15218
|
-
waitForDimensions() {
|
15219
|
-
let timeout = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : defaultDimensionsTimeout;
|
15220
|
-
return __awaiter(this, void 0, void 0, function* () {
|
15221
|
-
if (this.kind === Track.Kind.Audio) {
|
15222
|
-
throw new Error('cannot get dimensions for audio tracks');
|
15223
|
-
}
|
15224
|
-
const started = Date.now();
|
15225
|
-
while (Date.now() - started < timeout) {
|
15226
|
-
const dims = this.dimensions;
|
15227
|
-
if (dims) {
|
15228
|
-
return dims;
|
15229
|
-
}
|
15230
|
-
yield sleep(50);
|
15231
|
-
}
|
15232
|
-
throw new TrackInvalidError('unable to get track dimensions after timeout');
|
15233
|
-
});
|
15234
|
-
}
|
15235
|
-
/**
|
15236
|
-
* @returns DeviceID of the device that is currently being used for this track
|
15237
|
-
*/
|
15238
|
-
getDeviceId() {
|
15239
|
-
return __awaiter(this, void 0, void 0, function* () {
|
15240
|
-
// screen share doesn't have a usable device id
|
15241
|
-
if (this.source === Track.Source.ScreenShare) {
|
15242
|
-
return;
|
15243
|
-
}
|
15244
|
-
const {
|
15245
|
-
deviceId,
|
15246
|
-
groupId
|
15247
|
-
} = this._mediaStreamTrack.getSettings();
|
15248
|
-
const kind = this.kind === Track.Kind.Audio ? 'audioinput' : 'videoinput';
|
15249
|
-
return DeviceManager.getInstance().normalizeDeviceId(kind, deviceId, groupId);
|
15250
|
-
});
|
15251
|
-
}
|
15252
|
-
mute() {
|
15253
|
-
return __awaiter(this, void 0, void 0, function* () {
|
15254
|
-
this.setTrackMuted(true);
|
15255
|
-
return this;
|
15256
|
-
});
|
15257
|
-
}
|
15258
|
-
unmute() {
|
15259
|
-
return __awaiter(this, void 0, void 0, function* () {
|
15260
|
-
this.setTrackMuted(false);
|
15261
|
-
return this;
|
15262
|
-
});
|
15263
|
-
}
|
15264
|
-
replaceTrack(track) {
|
15265
|
-
let userProvidedTrack = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
|
15266
|
-
return __awaiter(this, void 0, void 0, function* () {
|
15267
|
-
if (!this.sender) {
|
15268
|
-
throw new TrackInvalidError('unable to replace an unpublished track');
|
15269
|
-
}
|
15270
|
-
livekitLogger.debug('replace MediaStreamTrack');
|
15271
|
-
yield this.setMediaStreamTrack(track);
|
15272
|
-
// this must be synced *after* setting mediaStreamTrack above, since it relies
|
15273
|
-
// on the previous state in order to cleanup
|
15274
|
-
this.providedByUser = userProvidedTrack;
|
15275
|
-
if (this.processor) {
|
15276
|
-
yield this.stopProcessor();
|
15277
|
-
}
|
15278
|
-
return this;
|
15279
|
-
});
|
15280
|
-
}
|
15281
|
-
restart(constraints) {
|
15282
|
-
return __awaiter(this, void 0, void 0, function* () {
|
15283
|
-
if (!constraints) {
|
15284
|
-
constraints = this.constraints;
|
15285
|
-
}
|
15286
|
-
livekitLogger.debug('restarting track with constraints', constraints);
|
15287
|
-
const streamConstraints = {
|
15288
|
-
audio: false,
|
15289
|
-
video: false
|
15290
|
-
};
|
15291
|
-
if (this.kind === Track.Kind.Video) {
|
15292
|
-
streamConstraints.video = constraints;
|
15293
|
-
} else {
|
15294
|
-
streamConstraints.audio = constraints;
|
15295
|
-
}
|
15296
|
-
// these steps are duplicated from setMediaStreamTrack because we must stop
|
15297
|
-
// the previous tracks before new tracks can be acquired
|
15298
|
-
this.attachedElements.forEach(el => {
|
15299
|
-
detachTrack(this.mediaStreamTrack, el);
|
15300
|
-
});
|
15301
|
-
this._mediaStreamTrack.removeEventListener('ended', this.handleEnded);
|
15302
|
-
// on Safari, the old audio track must be stopped before attempting to acquire
|
15303
|
-
// the new track, otherwise the new track will stop with
|
15304
|
-
// 'A MediaStreamTrack ended due to a capture failure`
|
15305
|
-
this._mediaStreamTrack.stop();
|
15306
|
-
// create new track and attach
|
15307
|
-
const mediaStream = yield navigator.mediaDevices.getUserMedia(streamConstraints);
|
15308
|
-
const newTrack = mediaStream.getTracks()[0];
|
15309
|
-
newTrack.addEventListener('ended', this.handleEnded);
|
15310
|
-
livekitLogger.debug('re-acquired MediaStreamTrack');
|
15311
|
-
yield this.setMediaStreamTrack(newTrack);
|
15312
|
-
this.constraints = constraints;
|
15313
|
-
if (this.processor) {
|
15314
|
-
const processor = this.processor;
|
15315
|
-
yield this.setProcessor(processor);
|
15316
|
-
} else {
|
15317
|
-
this.attachedElements.forEach(el => {
|
15318
|
-
attachToElement(this._mediaStreamTrack, el);
|
15319
|
-
});
|
15320
|
-
}
|
15321
|
-
this.emit(TrackEvent.Restarted, this);
|
15322
|
-
return this;
|
15323
|
-
});
|
15324
|
-
}
|
15325
|
-
setTrackMuted(muted) {
|
15326
|
-
livekitLogger.debug("setting ".concat(this.kind, " track ").concat(muted ? 'muted' : 'unmuted'));
|
15327
|
-
if (this.isMuted === muted && this._mediaStreamTrack.enabled !== muted) {
|
15328
|
-
return;
|
15329
|
-
}
|
15330
|
-
this.isMuted = muted;
|
15331
|
-
this._mediaStreamTrack.enabled = !muted;
|
15332
|
-
this.emit(muted ? TrackEvent.Muted : TrackEvent.Unmuted, this);
|
15333
|
-
}
|
15334
|
-
get needsReAcquisition() {
|
15335
|
-
return this._mediaStreamTrack.readyState !== 'live' || this._mediaStreamTrack.muted || !this._mediaStreamTrack.enabled || this.reacquireTrack;
|
15336
|
-
}
|
15337
|
-
handleAppVisibilityChanged() {
|
15338
|
-
const _super = Object.create(null, {
|
15339
|
-
handleAppVisibilityChanged: {
|
15340
|
-
get: () => super.handleAppVisibilityChanged
|
15341
|
-
}
|
15342
|
-
});
|
15343
|
-
return __awaiter(this, void 0, void 0, function* () {
|
15344
|
-
yield _super.handleAppVisibilityChanged.call(this);
|
15345
|
-
if (!isMobile()) return;
|
15346
|
-
livekitLogger.debug("visibility changed, is in Background: ".concat(this.isInBackground));
|
15347
|
-
if (!this.isInBackground && this.needsReAcquisition && !this.isUserProvided && !this.isMuted) {
|
15348
|
-
livekitLogger.debug("track needs to be reaquired, restarting ".concat(this.source));
|
15349
|
-
yield this.restart();
|
15350
|
-
this.reacquireTrack = false;
|
15351
|
-
}
|
15352
|
-
});
|
15353
|
-
}
|
15354
|
-
stop() {
|
15355
|
-
var _a;
|
15356
|
-
super.stop();
|
15357
|
-
this._mediaStreamTrack.removeEventListener('ended', this.handleEnded);
|
15358
|
-
this._mediaStreamTrack.removeEventListener('mute', this.pauseUpstream);
|
15359
|
-
this._mediaStreamTrack.removeEventListener('unmute', this.resumeUpstream);
|
15360
|
-
(_a = this.processor) === null || _a === void 0 ? void 0 : _a.destroy();
|
15361
|
-
this.processor = undefined;
|
15362
|
-
}
|
15363
|
-
/**
|
15364
|
-
* Sets a processor on this track.
|
15365
|
-
* See https://github.com/livekit/track-processors-js for example usage
|
15366
|
-
*
|
15367
|
-
* @experimental
|
15368
|
-
*
|
15369
|
-
* @param processor
|
15370
|
-
* @param showProcessedStreamLocally
|
15371
|
-
* @returns
|
15372
|
-
*/
|
15373
|
-
setProcessor(processor) {
|
15374
|
-
let showProcessedStreamLocally = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
|
15375
|
-
var _a, _b;
|
15376
|
-
return __awaiter(this, void 0, void 0, function* () {
|
15377
|
-
if (this.isSettingUpProcessor) {
|
15378
|
-
livekitLogger.warn('already trying to set up a processor');
|
15379
|
-
return;
|
15380
|
-
}
|
15381
|
-
livekitLogger.debug('setting up processor');
|
15382
|
-
this.isSettingUpProcessor = true;
|
15383
|
-
if (this.processor) {
|
15384
|
-
yield this.stopProcessor();
|
15385
|
-
}
|
15386
|
-
if (this.kind === 'unknown') {
|
15387
|
-
throw TypeError('cannot set processor on track of unknown kind');
|
15388
|
-
}
|
15389
|
-
this.processorElement = (_a = this.processorElement) !== null && _a !== void 0 ? _a : document.createElement(this.kind);
|
15390
|
-
this.processorElement.muted = true;
|
15391
|
-
attachToElement(this._mediaStreamTrack, this.processorElement);
|
15392
|
-
this.processorElement.play().catch(e => livekitLogger.error(e));
|
15393
|
-
const processorOptions = {
|
15394
|
-
kind: this.kind,
|
15395
|
-
track: this._mediaStreamTrack,
|
15396
|
-
element: this.processorElement
|
15397
|
-
};
|
15398
|
-
yield processor.init(processorOptions);
|
15399
|
-
this.processor = processor;
|
15400
|
-
if (this.processor.processedTrack) {
|
15401
|
-
for (const el of this.attachedElements) {
|
15402
|
-
if (el !== this.processorElement && showProcessedStreamLocally) {
|
15403
|
-
detachTrack(this._mediaStreamTrack, el);
|
15404
|
-
attachToElement(this.processor.processedTrack, el);
|
15405
|
-
}
|
15406
|
-
}
|
15407
|
-
yield (_b = this.sender) === null || _b === void 0 ? void 0 : _b.replaceTrack(this.processor.processedTrack);
|
15408
|
-
}
|
15409
|
-
this.isSettingUpProcessor = false;
|
15410
|
-
});
|
15411
|
-
}
|
15412
|
-
getProcessor() {
|
15413
|
-
return this.processor;
|
15414
|
-
}
|
15415
|
-
/**
|
15416
|
-
* Stops the track processor
|
15417
|
-
* See https://github.com/livekit/track-processors-js for example usage
|
15418
|
-
*
|
15419
|
-
* @experimental
|
15420
|
-
* @returns
|
15421
|
-
*/
|
15422
|
-
stopProcessor() {
|
15423
|
-
var _a, _b;
|
15424
|
-
return __awaiter(this, void 0, void 0, function* () {
|
15425
|
-
if (!this.processor) return;
|
15426
|
-
livekitLogger.debug('stopping processor');
|
15427
|
-
(_a = this.processor.processedTrack) === null || _a === void 0 ? void 0 : _a.stop();
|
15428
|
-
yield this.processor.destroy();
|
15429
|
-
this.processor = undefined;
|
15430
|
-
(_b = this.processorElement) === null || _b === void 0 ? void 0 : _b.remove();
|
15431
|
-
this.processorElement = undefined;
|
15432
|
-
yield this.restart();
|
15433
|
-
});
|
15434
|
-
}
|
15435
|
-
}
|
15436
|
-
|
15437
|
-
function mergeDefaultOptions(options, audioDefaults, videoDefaults) {
|
15438
|
-
const opts = Object.assign({}, options);
|
15439
|
-
if (opts.audio === true) opts.audio = {};
|
15440
|
-
if (opts.video === true) opts.video = {};
|
15441
|
-
// use defaults
|
15442
|
-
if (opts.audio) {
|
15443
|
-
mergeObjectWithoutOverwriting(opts.audio, audioDefaults);
|
15444
|
-
}
|
15445
|
-
if (opts.video) {
|
15446
|
-
mergeObjectWithoutOverwriting(opts.video, videoDefaults);
|
15111
|
+
if (opts.video) {
|
15112
|
+
mergeObjectWithoutOverwriting(opts.video, videoDefaults);
|
15447
15113
|
}
|
15448
15114
|
return opts;
|
15449
15115
|
}
|
@@ -15526,6 +15192,18 @@ function getNewAudioContext() {
|
|
15526
15192
|
});
|
15527
15193
|
}
|
15528
15194
|
}
|
15195
|
+
/**
|
15196
|
+
* @internal
|
15197
|
+
*/
|
15198
|
+
function sourceToKind(source) {
|
15199
|
+
if (source === Track.Source.Microphone) {
|
15200
|
+
return 'audioinput';
|
15201
|
+
} else if (source === Track.Source.Camera) {
|
15202
|
+
return 'videoinput';
|
15203
|
+
} else {
|
15204
|
+
return undefined;
|
15205
|
+
}
|
15206
|
+
}
|
15529
15207
|
|
15530
15208
|
const separator = '|';
|
15531
15209
|
const ddExtensionURI = 'https://aomediacodec.github.io/av1-rtp-spec/#dependency-descriptor-rtp-header-extension';
|
@@ -15878,6 +15556,9 @@ class Mutex {
|
|
15878
15556
|
return willUnlock;
|
15879
15557
|
}
|
15880
15558
|
}
|
15559
|
+
function isVideoCodec(maybeCodec) {
|
15560
|
+
return videoCodecs.includes(maybeCodec);
|
15561
|
+
}
|
15881
15562
|
function unwrapConstraint(constraint) {
|
15882
15563
|
if (typeof constraint === 'string') {
|
15883
15564
|
return constraint;
|
@@ -15957,6 +15638,20 @@ class SignalClient {
|
|
15957
15638
|
let useJSON = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false;
|
15958
15639
|
/** signal rtt in milliseconds */
|
15959
15640
|
this.rtt = 0;
|
15641
|
+
/** @internal */
|
15642
|
+
this.resetCallbacks = () => {
|
15643
|
+
this.onAnswer = undefined;
|
15644
|
+
this.onLeave = undefined;
|
15645
|
+
this.onLocalTrackPublished = undefined;
|
15646
|
+
this.onLocalTrackUnpublished = undefined;
|
15647
|
+
this.onNegotiateRequested = undefined;
|
15648
|
+
this.onOffer = undefined;
|
15649
|
+
this.onRemoteMuteChanged = undefined;
|
15650
|
+
this.onSubscribedQualityUpdate = undefined;
|
15651
|
+
this.onTokenRefresh = undefined;
|
15652
|
+
this.onTrickle = undefined;
|
15653
|
+
this.onClose = undefined;
|
15654
|
+
};
|
15960
15655
|
this.isConnected = false;
|
15961
15656
|
this.isReconnecting = false;
|
15962
15657
|
this.useJSON = useJSON;
|
@@ -16102,15 +15797,17 @@ class SignalClient {
|
|
16102
15797
|
try {
|
16103
15798
|
this.isConnected = false;
|
16104
15799
|
if (this.ws) {
|
16105
|
-
this.ws.onclose = null;
|
16106
15800
|
this.ws.onmessage = null;
|
16107
15801
|
this.ws.onopen = null;
|
15802
|
+
this.ws.onclose = null;
|
16108
15803
|
// calling `ws.close()` only starts the closing handshake (CLOSING state), prefer to wait until state is actually CLOSED
|
16109
15804
|
const closePromise = new Promise(resolve => {
|
16110
15805
|
if (this.ws) {
|
16111
|
-
this.ws.onclose =
|
15806
|
+
this.ws.onclose = () => {
|
15807
|
+
resolve();
|
15808
|
+
};
|
16112
15809
|
} else {
|
16113
|
-
resolve(
|
15810
|
+
resolve();
|
16114
15811
|
}
|
16115
15812
|
});
|
16116
15813
|
if (this.ws.readyState < this.ws.CLOSING) {
|
@@ -16119,9 +15816,9 @@ class SignalClient {
|
|
16119
15816
|
yield Promise.race([closePromise, sleep(250)]);
|
16120
15817
|
}
|
16121
15818
|
this.ws = undefined;
|
16122
|
-
this.clearPingInterval();
|
16123
15819
|
}
|
16124
15820
|
} finally {
|
15821
|
+
this.clearPingInterval();
|
16125
15822
|
unlock();
|
16126
15823
|
}
|
16127
15824
|
});
|
@@ -16164,7 +15861,7 @@ class SignalClient {
|
|
16164
15861
|
sendAddTrack(req) {
|
16165
15862
|
return this.sendRequest({
|
16166
15863
|
$case: 'addTrack',
|
16167
|
-
addTrack:
|
15864
|
+
addTrack: req
|
16168
15865
|
});
|
16169
15866
|
}
|
16170
15867
|
sendUpdateLocalMetadata(metadata, name) {
|
@@ -16374,10 +16071,11 @@ class SignalClient {
|
|
16374
16071
|
handleOnClose(reason) {
|
16375
16072
|
return __awaiter(this, void 0, void 0, function* () {
|
16376
16073
|
if (!this.isConnected) return;
|
16074
|
+
const onCloseCallback = this.onClose;
|
16377
16075
|
yield this.close();
|
16378
16076
|
livekitLogger.debug("websocket connection closed: ".concat(reason));
|
16379
|
-
if (
|
16380
|
-
|
16077
|
+
if (onCloseCallback) {
|
16078
|
+
onCloseCallback(reason);
|
16381
16079
|
}
|
16382
16080
|
});
|
16383
16081
|
}
|
@@ -16483,18 +16181,944 @@ function createConnectionParams(token, info, opts) {
|
|
16483
16181
|
if (opts.publishOnly !== undefined) {
|
16484
16182
|
params.set('publish', opts.publishOnly);
|
16485
16183
|
}
|
16486
|
-
if (opts.adaptiveStream) {
|
16487
|
-
params.set('adaptive_stream', '1');
|
16184
|
+
if (opts.adaptiveStream) {
|
16185
|
+
params.set('adaptive_stream', '1');
|
16186
|
+
}
|
16187
|
+
if (opts.reconnectReason) {
|
16188
|
+
params.set('reconnect_reason', opts.reconnectReason.toString());
|
16189
|
+
}
|
16190
|
+
// @ts-ignore
|
16191
|
+
if ((_a = navigator.connection) === null || _a === void 0 ? void 0 : _a.type) {
|
16192
|
+
// @ts-ignore
|
16193
|
+
params.set('network', navigator.connection.type);
|
16194
|
+
}
|
16195
|
+
return "?".concat(params.toString());
|
16196
|
+
}
|
16197
|
+
|
16198
|
+
const ENCRYPTION_ALGORITHM = 'AES-GCM';
|
16199
|
+
// flag set to indicate that e2ee has been setup for sender/receiver;
|
16200
|
+
const E2EE_FLAG = 'lk_e2ee';
|
16201
|
+
const SALT = 'LKFrameEncryptionKey';
|
16202
|
+
const KEY_PROVIDER_DEFAULTS = {
|
16203
|
+
sharedKey: false,
|
16204
|
+
ratchetSalt: SALT,
|
16205
|
+
ratchetWindowSize: 8
|
16206
|
+
};
|
16207
|
+
|
16208
|
+
function isE2EESupported() {
|
16209
|
+
return isInsertableStreamSupported() || isScriptTransformSupported();
|
16210
|
+
}
|
16211
|
+
function isScriptTransformSupported() {
|
16212
|
+
// @ts-ignore
|
16213
|
+
return typeof window.RTCRtpScriptTransform !== 'undefined';
|
16214
|
+
}
|
16215
|
+
function isInsertableStreamSupported() {
|
16216
|
+
return typeof window.RTCRtpSender !== 'undefined' &&
|
16217
|
+
// @ts-ignore
|
16218
|
+
typeof window.RTCRtpSender.prototype.createEncodedStreams !== 'undefined';
|
16219
|
+
}
|
16220
|
+
function isVideoFrame(frame) {
|
16221
|
+
return 'type' in frame;
|
16222
|
+
}
|
16223
|
+
function importKey(keyBytes) {
|
16224
|
+
let algorithm = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {
|
16225
|
+
name: ENCRYPTION_ALGORITHM
|
16226
|
+
};
|
16227
|
+
let usage = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 'encrypt';
|
16228
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16229
|
+
// https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/importKey
|
16230
|
+
return crypto.subtle.importKey('raw', keyBytes, algorithm, false, usage === 'derive' ? ['deriveBits', 'deriveKey'] : ['encrypt', 'decrypt']);
|
16231
|
+
});
|
16232
|
+
}
|
16233
|
+
function createKeyMaterialFromString(password) {
|
16234
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16235
|
+
let enc = new TextEncoder();
|
16236
|
+
const keyMaterial = yield crypto.subtle.importKey('raw', enc.encode(password), {
|
16237
|
+
name: 'PBKDF2'
|
16238
|
+
}, false, ['deriveBits', 'deriveKey']);
|
16239
|
+
return keyMaterial;
|
16240
|
+
});
|
16241
|
+
}
|
16242
|
+
function getAlgoOptions(algorithmName, salt) {
|
16243
|
+
const textEncoder = new TextEncoder();
|
16244
|
+
const encodedSalt = textEncoder.encode(salt);
|
16245
|
+
switch (algorithmName) {
|
16246
|
+
case 'HKDF':
|
16247
|
+
return {
|
16248
|
+
name: 'HKDF',
|
16249
|
+
salt: encodedSalt,
|
16250
|
+
hash: 'SHA-256',
|
16251
|
+
info: new ArrayBuffer(128)
|
16252
|
+
};
|
16253
|
+
case 'PBKDF2':
|
16254
|
+
{
|
16255
|
+
return {
|
16256
|
+
name: 'PBKDF2',
|
16257
|
+
salt: encodedSalt,
|
16258
|
+
hash: 'SHA-256',
|
16259
|
+
iterations: 100000
|
16260
|
+
};
|
16261
|
+
}
|
16262
|
+
default:
|
16263
|
+
throw new Error("algorithm ".concat(algorithmName, " is currently unsupported"));
|
16264
|
+
}
|
16265
|
+
}
|
16266
|
+
/**
|
16267
|
+
* Derives a set of keys from the master key.
|
16268
|
+
* See https://tools.ietf.org/html/draft-omara-sframe-00#section-4.3.1
|
16269
|
+
*/
|
16270
|
+
function deriveKeys(material, salt) {
|
16271
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16272
|
+
const algorithmOptions = getAlgoOptions(material.algorithm.name, salt);
|
16273
|
+
// https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/deriveKey#HKDF
|
16274
|
+
// https://developer.mozilla.org/en-US/docs/Web/API/HkdfParams
|
16275
|
+
const encryptionKey = yield crypto.subtle.deriveKey(algorithmOptions, material, {
|
16276
|
+
name: ENCRYPTION_ALGORITHM,
|
16277
|
+
length: 128
|
16278
|
+
}, false, ['encrypt', 'decrypt']);
|
16279
|
+
return {
|
16280
|
+
material,
|
16281
|
+
encryptionKey
|
16282
|
+
};
|
16283
|
+
});
|
16284
|
+
}
|
16285
|
+
function createE2EEKey() {
|
16286
|
+
return window.crypto.getRandomValues(new Uint8Array(32));
|
16287
|
+
}
|
16288
|
+
function mimeTypeToVideoCodecString(mimeType) {
|
16289
|
+
const codec = mimeType.split('/')[1].toLowerCase();
|
16290
|
+
if (!videoCodecs.includes(codec)) {
|
16291
|
+
throw Error("Video codec not supported: ".concat(codec));
|
16292
|
+
}
|
16293
|
+
return codec;
|
16294
|
+
}
|
16295
|
+
/**
|
16296
|
+
* Ratchets a key. See
|
16297
|
+
* https://tools.ietf.org/html/draft-omara-sframe-00#section-4.3.5.1
|
16298
|
+
*/
|
16299
|
+
function ratchet(material, salt) {
|
16300
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16301
|
+
const algorithmOptions = getAlgoOptions(material.algorithm.name, salt);
|
16302
|
+
// https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/deriveBits
|
16303
|
+
return crypto.subtle.deriveBits(algorithmOptions, material, 256);
|
16304
|
+
});
|
16305
|
+
}
|
16306
|
+
|
16307
|
+
/**
|
16308
|
+
* @experimental
|
16309
|
+
*/
|
16310
|
+
class BaseKeyProvider extends EventEmitter {
|
16311
|
+
constructor() {
|
16312
|
+
let options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
|
16313
|
+
super();
|
16314
|
+
/**
|
16315
|
+
* callback being invoked after a ratchet request has been performed on the local participant
|
16316
|
+
* that surfaces the new key material.
|
16317
|
+
* @param material
|
16318
|
+
* @param keyIndex
|
16319
|
+
*/
|
16320
|
+
this.onKeyRatcheted = (material, keyIndex) => {
|
16321
|
+
console.debug('key ratcheted event received', material, keyIndex);
|
16322
|
+
};
|
16323
|
+
this.keyInfoMap = new Map();
|
16324
|
+
this.options = Object.assign(Object.assign({}, KEY_PROVIDER_DEFAULTS), options);
|
16325
|
+
this.on('keyRatcheted', this.onKeyRatcheted);
|
16326
|
+
}
|
16327
|
+
/**
|
16328
|
+
* callback to invoke once a key has been set for a participant
|
16329
|
+
* @param key
|
16330
|
+
* @param participantId
|
16331
|
+
* @param keyIndex
|
16332
|
+
*/
|
16333
|
+
onSetEncryptionKey(key, participantId, keyIndex) {
|
16334
|
+
const keyInfo = {
|
16335
|
+
key,
|
16336
|
+
participantId,
|
16337
|
+
keyIndex
|
16338
|
+
};
|
16339
|
+
this.keyInfoMap.set("".concat(participantId !== null && participantId !== void 0 ? participantId : 'shared', "-").concat(keyIndex !== null && keyIndex !== void 0 ? keyIndex : 0), keyInfo);
|
16340
|
+
this.emit('setKey', keyInfo);
|
16341
|
+
}
|
16342
|
+
getKeys() {
|
16343
|
+
return Array.from(this.keyInfoMap.values());
|
16344
|
+
}
|
16345
|
+
getOptions() {
|
16346
|
+
return this.options;
|
16347
|
+
}
|
16348
|
+
ratchetKey(participantId, keyIndex) {
|
16349
|
+
this.emit('ratchetRequest', participantId, keyIndex);
|
16350
|
+
}
|
16351
|
+
}
|
16352
|
+
/**
|
16353
|
+
* A basic KeyProvider implementation intended for a single shared
|
16354
|
+
* passphrase between all participants
|
16355
|
+
* @experimental
|
16356
|
+
*/
|
16357
|
+
class ExternalE2EEKeyProvider extends BaseKeyProvider {
|
16358
|
+
constructor() {
|
16359
|
+
let options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
|
16360
|
+
const opts = Object.assign(Object.assign({}, options), {
|
16361
|
+
sharedKey: true
|
16362
|
+
});
|
16363
|
+
super(opts);
|
16364
|
+
}
|
16365
|
+
/**
|
16366
|
+
* Accepts a passphrase that's used to create the crypto keys
|
16367
|
+
* @param key
|
16368
|
+
*/
|
16369
|
+
setKey(key) {
|
16370
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16371
|
+
const derivedKey = yield createKeyMaterialFromString(key);
|
16372
|
+
this.onSetEncryptionKey(derivedKey);
|
16373
|
+
});
|
16374
|
+
}
|
16375
|
+
}
|
16376
|
+
|
16377
|
+
const EncryptionEvent = {
|
16378
|
+
ParticipantEncryptionStatusChanged: 'participantEncryptionStatusChanged',
|
16379
|
+
Error: 'encryptionError'
|
16380
|
+
};
|
16381
|
+
const CryptorEvent = {
|
16382
|
+
Error: 'cryptorError'
|
16383
|
+
};
|
16384
|
+
|
16385
|
+
const defaultId = 'default';
|
16386
|
+
class DeviceManager {
|
16387
|
+
static getInstance() {
|
16388
|
+
if (this.instance === undefined) {
|
16389
|
+
this.instance = new DeviceManager();
|
16390
|
+
}
|
16391
|
+
return this.instance;
|
16392
|
+
}
|
16393
|
+
getDevices(kind) {
|
16394
|
+
let requestPermissions = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
|
16395
|
+
var _a;
|
16396
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16397
|
+
if (((_a = DeviceManager.userMediaPromiseMap) === null || _a === void 0 ? void 0 : _a.size) > 0) {
|
16398
|
+
livekitLogger.debug('awaiting getUserMedia promise');
|
16399
|
+
try {
|
16400
|
+
if (kind) {
|
16401
|
+
yield DeviceManager.userMediaPromiseMap.get(kind);
|
16402
|
+
} else {
|
16403
|
+
yield Promise.all(DeviceManager.userMediaPromiseMap.values());
|
16404
|
+
}
|
16405
|
+
} catch (e) {
|
16406
|
+
livekitLogger.warn('error waiting for media permissons');
|
16407
|
+
}
|
16408
|
+
}
|
16409
|
+
let devices = yield navigator.mediaDevices.enumerateDevices();
|
16410
|
+
if (requestPermissions && kind && (
|
16411
|
+
// for safari we need to skip this check, as otherwise it will re-acquire user media and fail on iOS https://bugs.webkit.org/show_bug.cgi?id=179363
|
16412
|
+
!DeviceManager.userMediaPromiseMap.get(kind) || !isSafari())) {
|
16413
|
+
const isDummyDeviceOrEmpty = devices.length === 0 || devices.some(device => {
|
16414
|
+
const noLabel = device.label === '';
|
16415
|
+
const isRelevant = kind ? device.kind === kind : true;
|
16416
|
+
return noLabel && isRelevant;
|
16417
|
+
});
|
16418
|
+
if (isDummyDeviceOrEmpty) {
|
16419
|
+
const permissionsToAcquire = {
|
16420
|
+
video: kind !== 'audioinput' && kind !== 'audiooutput',
|
16421
|
+
audio: kind !== 'videoinput'
|
16422
|
+
};
|
16423
|
+
const stream = yield navigator.mediaDevices.getUserMedia(permissionsToAcquire);
|
16424
|
+
devices = yield navigator.mediaDevices.enumerateDevices();
|
16425
|
+
stream.getTracks().forEach(track => {
|
16426
|
+
track.stop();
|
16427
|
+
});
|
16428
|
+
}
|
16429
|
+
}
|
16430
|
+
if (kind) {
|
16431
|
+
devices = devices.filter(device => device.kind === kind);
|
16432
|
+
}
|
16433
|
+
return devices;
|
16434
|
+
});
|
16435
|
+
}
|
16436
|
+
normalizeDeviceId(kind, deviceId, groupId) {
|
16437
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16438
|
+
if (deviceId !== defaultId) {
|
16439
|
+
return deviceId;
|
16440
|
+
}
|
16441
|
+
// resolve actual device id if it's 'default': Chrome returns it when no
|
16442
|
+
// device has been chosen
|
16443
|
+
const devices = yield this.getDevices(kind);
|
16444
|
+
const device = devices.find(d => d.groupId === groupId && d.deviceId !== defaultId);
|
16445
|
+
return device === null || device === void 0 ? void 0 : device.deviceId;
|
16446
|
+
});
|
16447
|
+
}
|
16448
|
+
}
|
16449
|
+
DeviceManager.mediaDeviceKinds = ['audioinput', 'audiooutput', 'videoinput'];
|
16450
|
+
DeviceManager.userMediaPromiseMap = new Map();
|
16451
|
+
|
16452
|
+
const defaultDimensionsTimeout = 1000;
|
16453
|
+
class LocalTrack extends Track {
|
16454
|
+
get constraints() {
|
16455
|
+
return this._constraints;
|
16456
|
+
}
|
16457
|
+
/**
|
16458
|
+
*
|
16459
|
+
* @param mediaTrack
|
16460
|
+
* @param kind
|
16461
|
+
* @param constraints MediaTrackConstraints that are being used when restarting or reacquiring tracks
|
16462
|
+
* @param userProvidedTrack Signals to the SDK whether or not the mediaTrack should be managed (i.e. released and reacquired) internally by the SDK
|
16463
|
+
*/
|
16464
|
+
constructor(mediaTrack, kind, constraints) {
|
16465
|
+
let userProvidedTrack = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false;
|
16466
|
+
super(mediaTrack, kind);
|
16467
|
+
this._isUpstreamPaused = false;
|
16468
|
+
this.handleEnded = () => {
|
16469
|
+
if (this.isInBackground) {
|
16470
|
+
this.reacquireTrack = true;
|
16471
|
+
}
|
16472
|
+
this._mediaStreamTrack.removeEventListener('mute', this.pauseUpstream);
|
16473
|
+
this._mediaStreamTrack.removeEventListener('unmute', this.resumeUpstream);
|
16474
|
+
this.emit(TrackEvent.Ended, this);
|
16475
|
+
};
|
16476
|
+
/**
|
16477
|
+
* pauses publishing to the server without disabling the local MediaStreamTrack
|
16478
|
+
* this is used to display a user's own video locally while pausing publishing to
|
16479
|
+
* the server.
|
16480
|
+
* this API is unsupported on Safari < 12 due to a bug
|
16481
|
+
**/
|
16482
|
+
this.pauseUpstream = () => __awaiter(this, void 0, void 0, function* () {
|
16483
|
+
const unlock = yield this.pauseUpstreamLock.lock();
|
16484
|
+
try {
|
16485
|
+
if (this._isUpstreamPaused === true) {
|
16486
|
+
return;
|
16487
|
+
}
|
16488
|
+
if (!this.sender) {
|
16489
|
+
livekitLogger.warn('unable to pause upstream for an unpublished track');
|
16490
|
+
return;
|
16491
|
+
}
|
16492
|
+
this._isUpstreamPaused = true;
|
16493
|
+
this.emit(TrackEvent.UpstreamPaused, this);
|
16494
|
+
const browser = getBrowser();
|
16495
|
+
if ((browser === null || browser === void 0 ? void 0 : browser.name) === 'Safari' && compareVersions(browser.version, '12.0') < 0) {
|
16496
|
+
// https://bugs.webkit.org/show_bug.cgi?id=184911
|
16497
|
+
throw new DeviceUnsupportedError('pauseUpstream is not supported on Safari < 12.');
|
16498
|
+
}
|
16499
|
+
yield this.sender.replaceTrack(null);
|
16500
|
+
} finally {
|
16501
|
+
unlock();
|
16502
|
+
}
|
16503
|
+
});
|
16504
|
+
this.resumeUpstream = () => __awaiter(this, void 0, void 0, function* () {
|
16505
|
+
const unlock = yield this.pauseUpstreamLock.lock();
|
16506
|
+
try {
|
16507
|
+
if (this._isUpstreamPaused === false) {
|
16508
|
+
return;
|
16509
|
+
}
|
16510
|
+
if (!this.sender) {
|
16511
|
+
livekitLogger.warn('unable to resume upstream for an unpublished track');
|
16512
|
+
return;
|
16513
|
+
}
|
16514
|
+
this._isUpstreamPaused = false;
|
16515
|
+
this.emit(TrackEvent.UpstreamResumed, this);
|
16516
|
+
// this operation is noop if mediastreamtrack is already being sent
|
16517
|
+
yield this.sender.replaceTrack(this._mediaStreamTrack);
|
16518
|
+
} finally {
|
16519
|
+
unlock();
|
16520
|
+
}
|
16521
|
+
});
|
16522
|
+
this.reacquireTrack = false;
|
16523
|
+
this.providedByUser = userProvidedTrack;
|
16524
|
+
this.muteLock = new Mutex();
|
16525
|
+
this.pauseUpstreamLock = new Mutex();
|
16526
|
+
this.processorLock = new Mutex();
|
16527
|
+
this.setMediaStreamTrack(mediaTrack, true);
|
16528
|
+
// added to satisfy TS compiler, constraints are synced with MediaStreamTrack
|
16529
|
+
this._constraints = mediaTrack.getConstraints();
|
16530
|
+
if (constraints) {
|
16531
|
+
this._constraints = constraints;
|
16532
|
+
}
|
16533
|
+
}
|
16534
|
+
get id() {
|
16535
|
+
return this._mediaStreamTrack.id;
|
16536
|
+
}
|
16537
|
+
get dimensions() {
|
16538
|
+
if (this.kind !== Track.Kind.Video) {
|
16539
|
+
return undefined;
|
16540
|
+
}
|
16541
|
+
const {
|
16542
|
+
width,
|
16543
|
+
height
|
16544
|
+
} = this._mediaStreamTrack.getSettings();
|
16545
|
+
if (width && height) {
|
16546
|
+
return {
|
16547
|
+
width,
|
16548
|
+
height
|
16549
|
+
};
|
16550
|
+
}
|
16551
|
+
return undefined;
|
16552
|
+
}
|
16553
|
+
get isUpstreamPaused() {
|
16554
|
+
return this._isUpstreamPaused;
|
16555
|
+
}
|
16556
|
+
get isUserProvided() {
|
16557
|
+
return this.providedByUser;
|
16558
|
+
}
|
16559
|
+
get mediaStreamTrack() {
|
16560
|
+
var _a, _b;
|
16561
|
+
return (_b = (_a = this.processor) === null || _a === void 0 ? void 0 : _a.processedTrack) !== null && _b !== void 0 ? _b : this._mediaStreamTrack;
|
16562
|
+
}
|
16563
|
+
setMediaStreamTrack(newTrack, force) {
|
16564
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16565
|
+
if (newTrack === this._mediaStreamTrack && !force) {
|
16566
|
+
return;
|
16567
|
+
}
|
16568
|
+
if (this._mediaStreamTrack) {
|
16569
|
+
// detach
|
16570
|
+
this.attachedElements.forEach(el => {
|
16571
|
+
detachTrack(this._mediaStreamTrack, el);
|
16572
|
+
});
|
16573
|
+
this._mediaStreamTrack.removeEventListener('ended', this.handleEnded);
|
16574
|
+
this._mediaStreamTrack.removeEventListener('mute', this.pauseUpstream);
|
16575
|
+
this._mediaStreamTrack.removeEventListener('unmute', this.resumeUpstream);
|
16576
|
+
if (!this.providedByUser && this._mediaStreamTrack !== newTrack) {
|
16577
|
+
this._mediaStreamTrack.stop();
|
16578
|
+
}
|
16579
|
+
}
|
16580
|
+
this.mediaStream = new MediaStream([newTrack]);
|
16581
|
+
if (newTrack) {
|
16582
|
+
newTrack.addEventListener('ended', this.handleEnded);
|
16583
|
+
// when underlying track emits mute, it indicates that the device is unable
|
16584
|
+
// to produce media. In this case we'll need to signal with remote that
|
16585
|
+
// the track is "muted"
|
16586
|
+
// note this is different from LocalTrack.mute because we do not want to
|
16587
|
+
// touch MediaStreamTrack.enabled
|
16588
|
+
newTrack.addEventListener('mute', this.pauseUpstream);
|
16589
|
+
newTrack.addEventListener('unmute', this.resumeUpstream);
|
16590
|
+
this._constraints = newTrack.getConstraints();
|
16591
|
+
}
|
16592
|
+
if (this.sender) {
|
16593
|
+
yield this.sender.replaceTrack(newTrack);
|
16594
|
+
}
|
16595
|
+
this._mediaStreamTrack = newTrack;
|
16596
|
+
if (newTrack) {
|
16597
|
+
// sync muted state with the enabled state of the newly provided track
|
16598
|
+
this._mediaStreamTrack.enabled = !this.isMuted;
|
16599
|
+
// when a valid track is replace, we'd want to start producing
|
16600
|
+
yield this.resumeUpstream();
|
16601
|
+
this.attachedElements.forEach(el => {
|
16602
|
+
attachToElement(newTrack, el);
|
16603
|
+
});
|
16604
|
+
}
|
16605
|
+
});
|
16606
|
+
}
|
16607
|
+
waitForDimensions() {
|
16608
|
+
let timeout = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : defaultDimensionsTimeout;
|
16609
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16610
|
+
if (this.kind === Track.Kind.Audio) {
|
16611
|
+
throw new Error('cannot get dimensions for audio tracks');
|
16612
|
+
}
|
16613
|
+
const started = Date.now();
|
16614
|
+
while (Date.now() - started < timeout) {
|
16615
|
+
const dims = this.dimensions;
|
16616
|
+
if (dims) {
|
16617
|
+
return dims;
|
16618
|
+
}
|
16619
|
+
yield sleep(50);
|
16620
|
+
}
|
16621
|
+
throw new TrackInvalidError('unable to get track dimensions after timeout');
|
16622
|
+
});
|
16623
|
+
}
|
16624
|
+
/**
|
16625
|
+
* @returns DeviceID of the device that is currently being used for this track
|
16626
|
+
*/
|
16627
|
+
getDeviceId() {
|
16628
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16629
|
+
// screen share doesn't have a usable device id
|
16630
|
+
if (this.source === Track.Source.ScreenShare) {
|
16631
|
+
return;
|
16632
|
+
}
|
16633
|
+
const {
|
16634
|
+
deviceId,
|
16635
|
+
groupId
|
16636
|
+
} = this._mediaStreamTrack.getSettings();
|
16637
|
+
const kind = this.kind === Track.Kind.Audio ? 'audioinput' : 'videoinput';
|
16638
|
+
return DeviceManager.getInstance().normalizeDeviceId(kind, deviceId, groupId);
|
16639
|
+
});
|
16640
|
+
}
|
16641
|
+
mute() {
|
16642
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16643
|
+
this.setTrackMuted(true);
|
16644
|
+
return this;
|
16645
|
+
});
|
16646
|
+
}
|
16647
|
+
unmute() {
|
16648
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16649
|
+
this.setTrackMuted(false);
|
16650
|
+
return this;
|
16651
|
+
});
|
16652
|
+
}
|
16653
|
+
replaceTrack(track) {
|
16654
|
+
let userProvidedTrack = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
|
16655
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16656
|
+
if (!this.sender) {
|
16657
|
+
throw new TrackInvalidError('unable to replace an unpublished track');
|
16658
|
+
}
|
16659
|
+
livekitLogger.debug('replace MediaStreamTrack');
|
16660
|
+
yield this.setMediaStreamTrack(track);
|
16661
|
+
// this must be synced *after* setting mediaStreamTrack above, since it relies
|
16662
|
+
// on the previous state in order to cleanup
|
16663
|
+
this.providedByUser = userProvidedTrack;
|
16664
|
+
if (this.processor) {
|
16665
|
+
yield this.stopProcessor();
|
16666
|
+
}
|
16667
|
+
return this;
|
16668
|
+
});
|
16669
|
+
}
|
16670
|
+
restart(constraints) {
|
16671
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16672
|
+
if (!constraints) {
|
16673
|
+
constraints = this._constraints;
|
16674
|
+
}
|
16675
|
+
livekitLogger.debug('restarting track with constraints', constraints);
|
16676
|
+
const streamConstraints = {
|
16677
|
+
audio: false,
|
16678
|
+
video: false
|
16679
|
+
};
|
16680
|
+
if (this.kind === Track.Kind.Video) {
|
16681
|
+
streamConstraints.video = constraints;
|
16682
|
+
} else {
|
16683
|
+
streamConstraints.audio = constraints;
|
16684
|
+
}
|
16685
|
+
// these steps are duplicated from setMediaStreamTrack because we must stop
|
16686
|
+
// the previous tracks before new tracks can be acquired
|
16687
|
+
this.attachedElements.forEach(el => {
|
16688
|
+
detachTrack(this.mediaStreamTrack, el);
|
16689
|
+
});
|
16690
|
+
this._mediaStreamTrack.removeEventListener('ended', this.handleEnded);
|
16691
|
+
// on Safari, the old audio track must be stopped before attempting to acquire
|
16692
|
+
// the new track, otherwise the new track will stop with
|
16693
|
+
// 'A MediaStreamTrack ended due to a capture failure`
|
16694
|
+
this._mediaStreamTrack.stop();
|
16695
|
+
// create new track and attach
|
16696
|
+
const mediaStream = yield navigator.mediaDevices.getUserMedia(streamConstraints);
|
16697
|
+
const newTrack = mediaStream.getTracks()[0];
|
16698
|
+
newTrack.addEventListener('ended', this.handleEnded);
|
16699
|
+
livekitLogger.debug('re-acquired MediaStreamTrack');
|
16700
|
+
yield this.setMediaStreamTrack(newTrack);
|
16701
|
+
this._constraints = constraints;
|
16702
|
+
if (this.processor) {
|
16703
|
+
const processor = this.processor;
|
16704
|
+
yield this.setProcessor(processor);
|
16705
|
+
} else {
|
16706
|
+
this.attachedElements.forEach(el => {
|
16707
|
+
attachToElement(this._mediaStreamTrack, el);
|
16708
|
+
});
|
16709
|
+
}
|
16710
|
+
this.emit(TrackEvent.Restarted, this);
|
16711
|
+
return this;
|
16712
|
+
});
|
16713
|
+
}
|
16714
|
+
setTrackMuted(muted) {
|
16715
|
+
livekitLogger.debug("setting ".concat(this.kind, " track ").concat(muted ? 'muted' : 'unmuted'));
|
16716
|
+
if (this.isMuted === muted && this._mediaStreamTrack.enabled !== muted) {
|
16717
|
+
return;
|
16718
|
+
}
|
16719
|
+
this.isMuted = muted;
|
16720
|
+
this._mediaStreamTrack.enabled = !muted;
|
16721
|
+
this.emit(muted ? TrackEvent.Muted : TrackEvent.Unmuted, this);
|
16722
|
+
}
|
16723
|
+
get needsReAcquisition() {
|
16724
|
+
return this._mediaStreamTrack.readyState !== 'live' || this._mediaStreamTrack.muted || !this._mediaStreamTrack.enabled || this.reacquireTrack;
|
16725
|
+
}
|
16726
|
+
handleAppVisibilityChanged() {
|
16727
|
+
const _super = Object.create(null, {
|
16728
|
+
handleAppVisibilityChanged: {
|
16729
|
+
get: () => super.handleAppVisibilityChanged
|
16730
|
+
}
|
16731
|
+
});
|
16732
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16733
|
+
yield _super.handleAppVisibilityChanged.call(this);
|
16734
|
+
if (!isMobile()) return;
|
16735
|
+
livekitLogger.debug("visibility changed, is in Background: ".concat(this.isInBackground));
|
16736
|
+
if (!this.isInBackground && this.needsReAcquisition && !this.isUserProvided && !this.isMuted) {
|
16737
|
+
livekitLogger.debug("track needs to be reaquired, restarting ".concat(this.source));
|
16738
|
+
yield this.restart();
|
16739
|
+
this.reacquireTrack = false;
|
16740
|
+
}
|
16741
|
+
});
|
16742
|
+
}
|
16743
|
+
stop() {
|
16744
|
+
var _a;
|
16745
|
+
super.stop();
|
16746
|
+
this._mediaStreamTrack.removeEventListener('ended', this.handleEnded);
|
16747
|
+
this._mediaStreamTrack.removeEventListener('mute', this.pauseUpstream);
|
16748
|
+
this._mediaStreamTrack.removeEventListener('unmute', this.resumeUpstream);
|
16749
|
+
(_a = this.processor) === null || _a === void 0 ? void 0 : _a.destroy();
|
16750
|
+
this.processor = undefined;
|
16751
|
+
}
|
16752
|
+
/**
|
16753
|
+
* Sets a processor on this track.
|
16754
|
+
* See https://github.com/livekit/track-processors-js for example usage
|
16755
|
+
*
|
16756
|
+
* @experimental
|
16757
|
+
*
|
16758
|
+
* @param processor
|
16759
|
+
* @param showProcessedStreamLocally
|
16760
|
+
* @returns
|
16761
|
+
*/
|
16762
|
+
setProcessor(processor) {
|
16763
|
+
let showProcessedStreamLocally = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
|
16764
|
+
var _a, _b;
|
16765
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16766
|
+
const unlock = yield this.processorLock.lock();
|
16767
|
+
try {
|
16768
|
+
livekitLogger.debug('setting up processor');
|
16769
|
+
if (this.processor) {
|
16770
|
+
yield this.stopProcessor();
|
16771
|
+
}
|
16772
|
+
if (this.kind === 'unknown') {
|
16773
|
+
throw TypeError('cannot set processor on track of unknown kind');
|
16774
|
+
}
|
16775
|
+
this.processorElement = (_a = this.processorElement) !== null && _a !== void 0 ? _a : document.createElement(this.kind);
|
16776
|
+
this.processorElement.muted = true;
|
16777
|
+
attachToElement(this._mediaStreamTrack, this.processorElement);
|
16778
|
+
this.processorElement.play().catch(error => livekitLogger.error('failed to play processor element', {
|
16779
|
+
error
|
16780
|
+
}));
|
16781
|
+
const processorOptions = {
|
16782
|
+
kind: this.kind,
|
16783
|
+
track: this._mediaStreamTrack,
|
16784
|
+
element: this.processorElement
|
16785
|
+
};
|
16786
|
+
yield processor.init(processorOptions);
|
16787
|
+
this.processor = processor;
|
16788
|
+
if (this.processor.processedTrack) {
|
16789
|
+
for (const el of this.attachedElements) {
|
16790
|
+
if (el !== this.processorElement && showProcessedStreamLocally) {
|
16791
|
+
detachTrack(this._mediaStreamTrack, el);
|
16792
|
+
attachToElement(this.processor.processedTrack, el);
|
16793
|
+
}
|
16794
|
+
}
|
16795
|
+
yield (_b = this.sender) === null || _b === void 0 ? void 0 : _b.replaceTrack(this.processor.processedTrack);
|
16796
|
+
}
|
16797
|
+
} finally {
|
16798
|
+
unlock();
|
16799
|
+
}
|
16800
|
+
});
|
16801
|
+
}
|
16802
|
+
getProcessor() {
|
16803
|
+
return this.processor;
|
16804
|
+
}
|
16805
|
+
/**
|
16806
|
+
* Stops the track processor
|
16807
|
+
* See https://github.com/livekit/track-processors-js for example usage
|
16808
|
+
*
|
16809
|
+
* @experimental
|
16810
|
+
* @returns
|
16811
|
+
*/
|
16812
|
+
stopProcessor() {
|
16813
|
+
var _a, _b;
|
16814
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16815
|
+
if (!this.processor) return;
|
16816
|
+
livekitLogger.debug('stopping processor');
|
16817
|
+
(_a = this.processor.processedTrack) === null || _a === void 0 ? void 0 : _a.stop();
|
16818
|
+
yield this.processor.destroy();
|
16819
|
+
this.processor = undefined;
|
16820
|
+
(_b = this.processorElement) === null || _b === void 0 ? void 0 : _b.remove();
|
16821
|
+
this.processorElement = undefined;
|
16822
|
+
yield this.restart();
|
16823
|
+
});
|
16824
|
+
}
|
16825
|
+
}
|
16826
|
+
|
16827
|
+
/**
|
16828
|
+
* @experimental
|
16829
|
+
*/
|
16830
|
+
class E2EEManager extends EventEmitter {
|
16831
|
+
get isEnabled() {
|
16832
|
+
return this.encryptionEnabled;
|
16833
|
+
}
|
16834
|
+
constructor(options) {
|
16835
|
+
super();
|
16836
|
+
this.onWorkerMessage = ev => {
|
16837
|
+
var _a, _b;
|
16838
|
+
const {
|
16839
|
+
kind,
|
16840
|
+
data
|
16841
|
+
} = ev.data;
|
16842
|
+
switch (kind) {
|
16843
|
+
case 'error':
|
16844
|
+
console.error('error in worker', {
|
16845
|
+
data
|
16846
|
+
});
|
16847
|
+
this.emit(EncryptionEvent.Error, data.error);
|
16848
|
+
break;
|
16849
|
+
case 'enable':
|
16850
|
+
if (this.encryptionEnabled !== data.enabled && !data.participantId) {
|
16851
|
+
this.emit(EncryptionEvent.ParticipantEncryptionStatusChanged, data.enabled, (_a = this.room) === null || _a === void 0 ? void 0 : _a.localParticipant);
|
16852
|
+
this.encryptionEnabled = data.enabled;
|
16853
|
+
} else if (data.participantId) {
|
16854
|
+
const participant = (_b = this.room) === null || _b === void 0 ? void 0 : _b.getParticipantByIdentity(data.participantId);
|
16855
|
+
this.emit(EncryptionEvent.ParticipantEncryptionStatusChanged, data.enabled, participant);
|
16856
|
+
}
|
16857
|
+
if (this.encryptionEnabled) {
|
16858
|
+
this.keyProvider.getKeys().forEach(keyInfo => {
|
16859
|
+
this.postKey(keyInfo);
|
16860
|
+
});
|
16861
|
+
}
|
16862
|
+
break;
|
16863
|
+
case 'ratchetKey':
|
16864
|
+
this.keyProvider.emit('keyRatcheted', data.material, data.keyIndex);
|
16865
|
+
break;
|
16866
|
+
}
|
16867
|
+
};
|
16868
|
+
this.onWorkerError = ev => {
|
16869
|
+
livekitLogger.error('e2ee worker encountered an error:', {
|
16870
|
+
error: ev.error
|
16871
|
+
});
|
16872
|
+
this.emit(EncryptionEvent.Error, ev.error);
|
16873
|
+
};
|
16874
|
+
this.keyProvider = options.keyProvider;
|
16875
|
+
this.worker = options.worker;
|
16876
|
+
this.encryptionEnabled = false;
|
16877
|
+
}
|
16878
|
+
/**
|
16879
|
+
* @internal
|
16880
|
+
*/
|
16881
|
+
setup(room) {
|
16882
|
+
if (!isE2EESupported()) {
|
16883
|
+
throw new DeviceUnsupportedError('tried to setup end-to-end encryption on an unsupported browser');
|
16884
|
+
}
|
16885
|
+
livekitLogger.info('setting up e2ee');
|
16886
|
+
if (room !== this.room) {
|
16887
|
+
this.room = room;
|
16888
|
+
this.setupEventListeners(room, this.keyProvider);
|
16889
|
+
// this.worker = new Worker('');
|
16890
|
+
const msg = {
|
16891
|
+
kind: 'init',
|
16892
|
+
data: {
|
16893
|
+
keyProviderOptions: this.keyProvider.getOptions()
|
16894
|
+
}
|
16895
|
+
};
|
16896
|
+
if (this.worker) {
|
16897
|
+
livekitLogger.info("initializing worker", {
|
16898
|
+
worker: this.worker
|
16899
|
+
});
|
16900
|
+
this.worker.onmessage = this.onWorkerMessage;
|
16901
|
+
this.worker.onerror = this.onWorkerError;
|
16902
|
+
this.worker.postMessage(msg);
|
16903
|
+
}
|
16904
|
+
}
|
16905
|
+
}
|
16906
|
+
/**
|
16907
|
+
* @internal
|
16908
|
+
*/
|
16909
|
+
setParticipantCryptorEnabled(enabled, participantId) {
|
16910
|
+
return __awaiter(this, void 0, void 0, function* () {
|
16911
|
+
livekitLogger.info("set e2ee to ".concat(enabled));
|
16912
|
+
if (this.worker) {
|
16913
|
+
const enableMsg = {
|
16914
|
+
kind: 'enable',
|
16915
|
+
data: {
|
16916
|
+
enabled,
|
16917
|
+
participantId
|
16918
|
+
}
|
16919
|
+
};
|
16920
|
+
this.worker.postMessage(enableMsg);
|
16921
|
+
} else {
|
16922
|
+
throw new ReferenceError('failed to enable e2ee, worker is not ready');
|
16923
|
+
}
|
16924
|
+
});
|
16925
|
+
}
|
16926
|
+
setupEngine(engine) {
|
16927
|
+
engine.on(EngineEvent.RTPVideoMapUpdate, rtpMap => {
|
16928
|
+
this.postRTPMap(rtpMap);
|
16929
|
+
});
|
16930
|
+
}
|
16931
|
+
setupEventListeners(room, keyProvider) {
|
16932
|
+
room.on(RoomEvent.TrackPublished, (pub, participant) => this.setParticipantCryptorEnabled(pub.trackInfo.encryption !== Encryption_Type.NONE, participant.identity));
|
16933
|
+
room.on(RoomEvent.ConnectionStateChanged, state => {
|
16934
|
+
if (state === ConnectionState.Connected) {
|
16935
|
+
room.participants.forEach(participant => {
|
16936
|
+
participant.tracks.forEach(pub => {
|
16937
|
+
this.setParticipantCryptorEnabled(pub.trackInfo.encryption !== Encryption_Type.NONE, participant.identity);
|
16938
|
+
});
|
16939
|
+
});
|
16940
|
+
}
|
16941
|
+
});
|
16942
|
+
room.on(RoomEvent.TrackUnsubscribed, (track, _, participant) => {
|
16943
|
+
var _a;
|
16944
|
+
const msg = {
|
16945
|
+
kind: 'removeTransform',
|
16946
|
+
data: {
|
16947
|
+
participantId: participant.identity,
|
16948
|
+
trackId: track.mediaStreamID
|
16949
|
+
}
|
16950
|
+
};
|
16951
|
+
(_a = this.worker) === null || _a === void 0 ? void 0 : _a.postMessage(msg);
|
16952
|
+
});
|
16953
|
+
room.on(RoomEvent.TrackSubscribed, (track, pub, participant) => {
|
16954
|
+
this.setupE2EEReceiver(track, participant.identity, pub.trackInfo);
|
16955
|
+
});
|
16956
|
+
room.localParticipant.on(ParticipantEvent.LocalTrackPublished, publication => __awaiter(this, void 0, void 0, function* () {
|
16957
|
+
this.setupE2EESender(publication.track, publication.track.sender, room.localParticipant.identity);
|
16958
|
+
}));
|
16959
|
+
keyProvider.on('setKey', keyInfo => this.postKey(keyInfo)).on('ratchetRequest', (participantId, keyIndex) => this.postRatchetRequest(participantId, keyIndex));
|
16960
|
+
}
|
16961
|
+
postRatchetRequest(participantId, keyIndex) {
|
16962
|
+
if (!this.worker) {
|
16963
|
+
throw Error('could not ratchet key, worker is missing');
|
16964
|
+
}
|
16965
|
+
const msg = {
|
16966
|
+
kind: 'ratchetRequest',
|
16967
|
+
data: {
|
16968
|
+
participantId,
|
16969
|
+
keyIndex
|
16970
|
+
}
|
16971
|
+
};
|
16972
|
+
this.worker.postMessage(msg);
|
16973
|
+
}
|
16974
|
+
postKey(_ref) {
|
16975
|
+
let {
|
16976
|
+
key,
|
16977
|
+
participantId,
|
16978
|
+
keyIndex
|
16979
|
+
} = _ref;
|
16980
|
+
if (!this.worker) {
|
16981
|
+
throw Error('could not set key, worker is missing');
|
16982
|
+
}
|
16983
|
+
const msg = {
|
16984
|
+
kind: 'setKey',
|
16985
|
+
data: {
|
16986
|
+
participantId,
|
16987
|
+
key,
|
16988
|
+
keyIndex
|
16989
|
+
}
|
16990
|
+
};
|
16991
|
+
this.worker.postMessage(msg);
|
16992
|
+
}
|
16993
|
+
postRTPMap(map) {
|
16994
|
+
if (!this.worker) {
|
16995
|
+
throw Error('could not post rtp map, worker is missing');
|
16996
|
+
}
|
16997
|
+
const msg = {
|
16998
|
+
kind: 'setRTPMap',
|
16999
|
+
data: {
|
17000
|
+
map
|
17001
|
+
}
|
17002
|
+
};
|
17003
|
+
this.worker.postMessage(msg);
|
17004
|
+
}
|
17005
|
+
setupE2EEReceiver(track, remoteId, trackInfo) {
|
17006
|
+
if (!track.receiver) {
|
17007
|
+
return;
|
17008
|
+
}
|
17009
|
+
if (!(trackInfo === null || trackInfo === void 0 ? void 0 : trackInfo.mimeType) || trackInfo.mimeType === '') {
|
17010
|
+
throw new TypeError('MimeType missing from trackInfo, cannot set up E2EE cryptor');
|
17011
|
+
}
|
17012
|
+
this.handleReceiver(track.receiver, track.mediaStreamID, remoteId, track.kind === 'video' ? mimeTypeToVideoCodecString(trackInfo.mimeType) : undefined);
|
17013
|
+
}
|
17014
|
+
setupE2EESender(track, sender, localId) {
|
17015
|
+
if (!(track instanceof LocalTrack) || !sender) {
|
17016
|
+
if (!sender) livekitLogger.warn('early return because sender is not ready');
|
17017
|
+
return;
|
17018
|
+
}
|
17019
|
+
this.handleSender(sender, track.mediaStreamID, localId, undefined);
|
16488
17020
|
}
|
16489
|
-
|
16490
|
-
|
17021
|
+
/**
|
17022
|
+
* Handles the given {@code RTCRtpReceiver} by creating a {@code TransformStream} which will inject
|
17023
|
+
* a frame decoder.
|
17024
|
+
*
|
17025
|
+
*/
|
17026
|
+
handleReceiver(receiver, trackId, participantId, codec) {
|
17027
|
+
return __awaiter(this, void 0, void 0, function* () {
|
17028
|
+
if (!this.worker) {
|
17029
|
+
return;
|
17030
|
+
}
|
17031
|
+
if (isScriptTransformSupported()) {
|
17032
|
+
const options = {
|
17033
|
+
kind: 'decode',
|
17034
|
+
participantId,
|
17035
|
+
trackId,
|
17036
|
+
codec
|
17037
|
+
};
|
17038
|
+
// @ts-ignore
|
17039
|
+
receiver.transform = new RTCRtpScriptTransform(this.worker, options);
|
17040
|
+
} else {
|
17041
|
+
if (E2EE_FLAG in receiver && codec) {
|
17042
|
+
// only update codec
|
17043
|
+
const msg = {
|
17044
|
+
kind: 'updateCodec',
|
17045
|
+
data: {
|
17046
|
+
trackId,
|
17047
|
+
codec,
|
17048
|
+
participantId
|
17049
|
+
}
|
17050
|
+
};
|
17051
|
+
this.worker.postMessage(msg);
|
17052
|
+
return;
|
17053
|
+
}
|
17054
|
+
// @ts-ignore
|
17055
|
+
let writable = receiver.writableStream;
|
17056
|
+
// @ts-ignore
|
17057
|
+
let readable = receiver.readableStream;
|
17058
|
+
if (!writable || !readable) {
|
17059
|
+
// @ts-ignore
|
17060
|
+
const receiverStreams = receiver.createEncodedStreams();
|
17061
|
+
// @ts-ignore
|
17062
|
+
receiver.writableStream = receiverStreams.writable;
|
17063
|
+
writable = receiverStreams.writable;
|
17064
|
+
// @ts-ignore
|
17065
|
+
receiver.readableStream = receiverStreams.readable;
|
17066
|
+
readable = receiverStreams.readable;
|
17067
|
+
}
|
17068
|
+
const msg = {
|
17069
|
+
kind: 'decode',
|
17070
|
+
data: {
|
17071
|
+
readableStream: readable,
|
17072
|
+
writableStream: writable,
|
17073
|
+
trackId: trackId,
|
17074
|
+
codec,
|
17075
|
+
participantId
|
17076
|
+
}
|
17077
|
+
};
|
17078
|
+
this.worker.postMessage(msg, [readable, writable]);
|
17079
|
+
}
|
17080
|
+
// @ts-ignore
|
17081
|
+
receiver[E2EE_FLAG] = true;
|
17082
|
+
});
|
16491
17083
|
}
|
16492
|
-
|
16493
|
-
|
17084
|
+
/**
|
17085
|
+
* Handles the given {@code RTCRtpSender} by creating a {@code TransformStream} which will inject
|
17086
|
+
* a frame encoder.
|
17087
|
+
*
|
17088
|
+
*/
|
17089
|
+
handleSender(sender, trackId, participantId, codec) {
|
17090
|
+
if (E2EE_FLAG in sender || !this.worker) {
|
17091
|
+
return;
|
17092
|
+
}
|
17093
|
+
if (isScriptTransformSupported()) {
|
17094
|
+
livekitLogger.warn('initialize script transform');
|
17095
|
+
const options = {
|
17096
|
+
kind: 'encode',
|
17097
|
+
participantId,
|
17098
|
+
trackId,
|
17099
|
+
codec
|
17100
|
+
};
|
17101
|
+
// @ts-ignore
|
17102
|
+
sender.transform = new RTCRtpScriptTransform(this.worker, options);
|
17103
|
+
} else {
|
17104
|
+
livekitLogger.warn('initialize encoded streams');
|
17105
|
+
// @ts-ignore
|
17106
|
+
const senderStreams = sender.createEncodedStreams();
|
17107
|
+
const msg = {
|
17108
|
+
kind: 'encode',
|
17109
|
+
data: {
|
17110
|
+
readableStream: senderStreams.readable,
|
17111
|
+
writableStream: senderStreams.writable,
|
17112
|
+
codec,
|
17113
|
+
trackId,
|
17114
|
+
participantId
|
17115
|
+
}
|
17116
|
+
};
|
17117
|
+
this.worker.postMessage(msg, [senderStreams.readable, senderStreams.writable]);
|
17118
|
+
}
|
16494
17119
|
// @ts-ignore
|
16495
|
-
|
17120
|
+
sender[E2EE_FLAG] = true;
|
16496
17121
|
}
|
16497
|
-
return "?".concat(params.toString());
|
16498
17122
|
}
|
16499
17123
|
|
16500
17124
|
var parser$1 = {};
|
@@ -17221,10 +17845,15 @@ eliminate this issue.
|
|
17221
17845
|
const startBitrateForSVC = 0.7;
|
17222
17846
|
const PCEvents = {
|
17223
17847
|
NegotiationStarted: 'negotiationStarted',
|
17224
|
-
NegotiationComplete: 'negotiationComplete'
|
17848
|
+
NegotiationComplete: 'negotiationComplete',
|
17849
|
+
RTPVideoPayloadTypes: 'rtpVideoPayloadTypes'
|
17225
17850
|
};
|
17226
17851
|
/** @internal */
|
17227
17852
|
class PCTransport extends EventEmitter {
|
17853
|
+
get pc() {
|
17854
|
+
if (this._pc) return this._pc;
|
17855
|
+
throw new UnexpectedConnectionState('Expected peer connection to be available');
|
17856
|
+
}
|
17228
17857
|
constructor(config) {
|
17229
17858
|
let mediaConstraints = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
17230
17859
|
super();
|
@@ -17247,12 +17876,12 @@ class PCTransport extends EventEmitter {
|
|
17247
17876
|
}
|
17248
17877
|
}
|
17249
17878
|
}, 100);
|
17250
|
-
this.
|
17879
|
+
this._pc = isChromiumBased() ?
|
17251
17880
|
// @ts-expect-error chrome allows additional media constraints to be passed into the RTCPeerConnection constructor
|
17252
17881
|
new RTCPeerConnection(config, mediaConstraints) : new RTCPeerConnection(config);
|
17253
17882
|
}
|
17254
17883
|
get isICEConnected() {
|
17255
|
-
return this.pc.iceConnectionState === 'connected' || this.pc.iceConnectionState === 'completed';
|
17884
|
+
return this._pc !== null && (this.pc.iceConnectionState === 'connected' || this.pc.iceConnectionState === 'completed');
|
17256
17885
|
}
|
17257
17886
|
addIceCandidate(candidate) {
|
17258
17887
|
return __awaiter(this, void 0, void 0, function* () {
|
@@ -17329,6 +17958,14 @@ class PCTransport extends EventEmitter {
|
|
17329
17958
|
this.createAndSendOffer();
|
17330
17959
|
} else if (sd.type === 'answer') {
|
17331
17960
|
this.emit(PCEvents.NegotiationComplete);
|
17961
|
+
if (sd.sdp) {
|
17962
|
+
const sdpParsed = parse(sd.sdp);
|
17963
|
+
sdpParsed.media.forEach(media => {
|
17964
|
+
if (media.type === 'video') {
|
17965
|
+
this.emit(PCEvents.RTPVideoPayloadTypes, media.rtp);
|
17966
|
+
}
|
17967
|
+
});
|
17968
|
+
}
|
17332
17969
|
}
|
17333
17970
|
});
|
17334
17971
|
}
|
@@ -17342,7 +17979,7 @@ class PCTransport extends EventEmitter {
|
|
17342
17979
|
livekitLogger.debug('restarting ICE');
|
17343
17980
|
this.restartingIce = true;
|
17344
17981
|
}
|
17345
|
-
if (this.
|
17982
|
+
if (this._pc && this._pc.signalingState === 'have-local-offer') {
|
17346
17983
|
// we're waiting for the peer to accept our offer, so we'll just wait
|
17347
17984
|
// the only exception to this is when ICE restart is needed
|
17348
17985
|
const currentSD = this.pc.remoteDescription;
|
@@ -17354,7 +17991,7 @@ class PCTransport extends EventEmitter {
|
|
17354
17991
|
this.renegotiate = true;
|
17355
17992
|
return;
|
17356
17993
|
}
|
17357
|
-
} else if (this.
|
17994
|
+
} else if (!this._pc || this._pc.signalingState === 'closed') {
|
17358
17995
|
livekitLogger.warn('could not createOffer with closed peer connection');
|
17359
17996
|
return;
|
17360
17997
|
}
|
@@ -17428,9 +18065,22 @@ class PCTransport extends EventEmitter {
|
|
17428
18065
|
this.trackBitrates.push(info);
|
17429
18066
|
}
|
17430
18067
|
close() {
|
17431
|
-
this.
|
17432
|
-
|
17433
|
-
|
18068
|
+
if (!this._pc) {
|
18069
|
+
return;
|
18070
|
+
}
|
18071
|
+
this._pc.close();
|
18072
|
+
this._pc.onconnectionstatechange = null;
|
18073
|
+
this._pc.oniceconnectionstatechange = null;
|
18074
|
+
this._pc.onicegatheringstatechange = null;
|
18075
|
+
this._pc.ondatachannel = null;
|
18076
|
+
this._pc.onnegotiationneeded = null;
|
18077
|
+
this._pc.onsignalingstatechange = null;
|
18078
|
+
this._pc.onicecandidate = null;
|
18079
|
+
this._pc.ondatachannel = null;
|
18080
|
+
this._pc.ontrack = null;
|
18081
|
+
this._pc.onconnectionstatechange = null;
|
18082
|
+
this._pc.oniceconnectionstatechange = null;
|
18083
|
+
this._pc = null;
|
17434
18084
|
}
|
17435
18085
|
setMungedSDP(sd, munged, remote) {
|
17436
18086
|
return __awaiter(this, void 0, void 0, function* () {
|
@@ -17620,89 +18270,6 @@ function getCloudConfigUrl(serverUrl) {
|
|
17620
18270
|
return "".concat(serverUrl.protocol.replace('ws', 'http'), "//").concat(serverUrl.host, "/settings");
|
17621
18271
|
}
|
17622
18272
|
|
17623
|
-
class VideoPreset {
|
17624
|
-
constructor(width, height, maxBitrate, maxFramerate, priority) {
|
17625
|
-
this.width = width;
|
17626
|
-
this.height = height;
|
17627
|
-
this.encoding = {
|
17628
|
-
maxBitrate,
|
17629
|
-
maxFramerate,
|
17630
|
-
priority
|
17631
|
-
};
|
17632
|
-
}
|
17633
|
-
get resolution() {
|
17634
|
-
return {
|
17635
|
-
width: this.width,
|
17636
|
-
height: this.height,
|
17637
|
-
frameRate: this.encoding.maxFramerate,
|
17638
|
-
aspectRatio: this.width / this.height
|
17639
|
-
};
|
17640
|
-
}
|
17641
|
-
}
|
17642
|
-
const backupCodecs = ['vp8', 'h264'];
|
17643
|
-
function isBackupCodec(codec) {
|
17644
|
-
return !!backupCodecs.find(backup => backup === codec);
|
17645
|
-
}
|
17646
|
-
function isCodecEqual(c1, c2) {
|
17647
|
-
return (c1 === null || c1 === void 0 ? void 0 : c1.toLowerCase().replace(/audio\/|video\//y, '')) === (c2 === null || c2 === void 0 ? void 0 : c2.toLowerCase().replace(/audio\/|video\//y, ''));
|
17648
|
-
}
|
17649
|
-
var AudioPresets;
|
17650
|
-
(function (AudioPresets) {
|
17651
|
-
AudioPresets.telephone = {
|
17652
|
-
maxBitrate: 12000
|
17653
|
-
};
|
17654
|
-
AudioPresets.speech = {
|
17655
|
-
maxBitrate: 20000
|
17656
|
-
};
|
17657
|
-
AudioPresets.music = {
|
17658
|
-
maxBitrate: 32000
|
17659
|
-
};
|
17660
|
-
AudioPresets.musicStereo = {
|
17661
|
-
maxBitrate: 48000
|
17662
|
-
};
|
17663
|
-
AudioPresets.musicHighQuality = {
|
17664
|
-
maxBitrate: 64000
|
17665
|
-
};
|
17666
|
-
AudioPresets.musicHighQualityStereo = {
|
17667
|
-
maxBitrate: 96000
|
17668
|
-
};
|
17669
|
-
})(AudioPresets || (AudioPresets = {}));
|
17670
|
-
/**
|
17671
|
-
* Sane presets for video resolution/encoding
|
17672
|
-
*/
|
17673
|
-
const VideoPresets = {
|
17674
|
-
h90: new VideoPreset(160, 90, 60000, 15),
|
17675
|
-
h180: new VideoPreset(320, 180, 120000, 15),
|
17676
|
-
h216: new VideoPreset(384, 216, 180000, 15),
|
17677
|
-
h360: new VideoPreset(640, 360, 300000, 20),
|
17678
|
-
h540: new VideoPreset(960, 540, 600000, 25),
|
17679
|
-
h720: new VideoPreset(1280, 720, 1700000, 30),
|
17680
|
-
h1080: new VideoPreset(1920, 1080, 3000000, 30),
|
17681
|
-
h1440: new VideoPreset(2560, 1440, 5000000, 30),
|
17682
|
-
h2160: new VideoPreset(3840, 2160, 8000000, 30)
|
17683
|
-
};
|
17684
|
-
/**
|
17685
|
-
* Four by three presets
|
17686
|
-
*/
|
17687
|
-
const VideoPresets43 = {
|
17688
|
-
h120: new VideoPreset(160, 120, 80000, 15),
|
17689
|
-
h180: new VideoPreset(240, 180, 100000, 15),
|
17690
|
-
h240: new VideoPreset(320, 240, 150000, 15),
|
17691
|
-
h360: new VideoPreset(480, 360, 225000, 20),
|
17692
|
-
h480: new VideoPreset(640, 480, 300000, 20),
|
17693
|
-
h540: new VideoPreset(720, 540, 450000, 25),
|
17694
|
-
h720: new VideoPreset(960, 720, 1500000, 30),
|
17695
|
-
h1080: new VideoPreset(1440, 1080, 2500000, 30),
|
17696
|
-
h1440: new VideoPreset(1920, 1440, 3500000, 30)
|
17697
|
-
};
|
17698
|
-
const ScreenSharePresets = {
|
17699
|
-
h360fps3: new VideoPreset(640, 360, 200000, 3, 'medium'),
|
17700
|
-
h720fps5: new VideoPreset(1280, 720, 400000, 5, 'medium'),
|
17701
|
-
h720fps15: new VideoPreset(1280, 720, 1000000, 15, 'medium'),
|
17702
|
-
h1080fps15: new VideoPreset(1920, 1080, 1500000, 15, 'medium'),
|
17703
|
-
h1080fps30: new VideoPreset(1920, 1080, 3000000, 30, 'medium')
|
17704
|
-
};
|
17705
|
-
|
17706
18273
|
const publishDefaults = {
|
17707
18274
|
/**
|
17708
18275
|
* @deprecated
|
@@ -17716,10 +18283,7 @@ const publishDefaults = {
|
|
17716
18283
|
screenShareEncoding: ScreenSharePresets.h1080fps15.encoding,
|
17717
18284
|
stopMicTrackOnMute: false,
|
17718
18285
|
videoCodec: 'vp8',
|
17719
|
-
backupCodec:
|
17720
|
-
codec: 'vp8',
|
17721
|
-
encoding: VideoPresets.h540.encoding
|
17722
|
-
}
|
18286
|
+
backupCodec: false
|
17723
18287
|
};
|
17724
18288
|
const audioDefaults = {
|
17725
18289
|
autoGainControl: true,
|
@@ -17924,6 +18488,13 @@ class RTCEngine extends EventEmitter {
|
|
17924
18488
|
this.closingLock = new Mutex();
|
17925
18489
|
this.dataProcessLock = new Mutex();
|
17926
18490
|
this.dcBufferStatus = new Map([[DataPacket_Kind.LOSSY, true], [DataPacket_Kind.RELIABLE, true]]);
|
18491
|
+
this.client.onParticipantUpdate = updates => this.emit(EngineEvent.ParticipantUpdate, updates);
|
18492
|
+
this.client.onConnectionQuality = update => this.emit(EngineEvent.ConnectionQualityUpdate, update);
|
18493
|
+
this.client.onRoomUpdate = update => this.emit(EngineEvent.RoomUpdate, update);
|
18494
|
+
this.client.onSubscriptionError = resp => this.emit(EngineEvent.SubscriptionError, resp);
|
18495
|
+
this.client.onSubscriptionPermissionUpdate = update => this.emit(EngineEvent.SubscriptionPermissionUpdate, update);
|
18496
|
+
this.client.onSpeakersChanged = update => this.emit(EngineEvent.SpeakersChanged, update);
|
18497
|
+
this.client.onStreamStateUpdate = update => this.emit(EngineEvent.StreamStateChanged, update);
|
17927
18498
|
}
|
17928
18499
|
join(url, token, opts, abortSignal) {
|
17929
18500
|
return __awaiter(this, void 0, void 0, function* () {
|
@@ -17943,6 +18514,7 @@ class RTCEngine extends EventEmitter {
|
|
17943
18514
|
if (!this.subscriberPrimary) {
|
17944
18515
|
this.negotiate();
|
17945
18516
|
}
|
18517
|
+
this.setupSignalClientCallbacks();
|
17946
18518
|
this.clientConfiguration = joinResponse.clientConfiguration;
|
17947
18519
|
return joinResponse;
|
17948
18520
|
} catch (e) {
|
@@ -17971,33 +18543,65 @@ class RTCEngine extends EventEmitter {
|
|
17971
18543
|
this.removeAllListeners();
|
17972
18544
|
this.deregisterOnLineListener();
|
17973
18545
|
this.clearPendingReconnect();
|
17974
|
-
|
17975
|
-
|
17976
|
-
var _a, _b;
|
17977
|
-
try {
|
17978
|
-
// TODO: react-native-webrtc doesn't have removeTrack yet.
|
17979
|
-
if ((_a = this.publisher) === null || _a === void 0 ? void 0 : _a.pc.removeTrack) {
|
17980
|
-
(_b = this.publisher) === null || _b === void 0 ? void 0 : _b.pc.removeTrack(sender);
|
17981
|
-
}
|
17982
|
-
} catch (e) {
|
17983
|
-
livekitLogger.warn('could not removeTrack', {
|
17984
|
-
error: e
|
17985
|
-
});
|
17986
|
-
}
|
17987
|
-
});
|
17988
|
-
this.publisher.close();
|
17989
|
-
this.publisher = undefined;
|
17990
|
-
}
|
17991
|
-
if (this.subscriber) {
|
17992
|
-
this.subscriber.close();
|
17993
|
-
this.subscriber = undefined;
|
17994
|
-
}
|
17995
|
-
yield this.client.close();
|
18546
|
+
yield this.cleanupPeerConnections();
|
18547
|
+
yield this.cleanupClient();
|
17996
18548
|
} finally {
|
17997
18549
|
unlock();
|
17998
18550
|
}
|
17999
18551
|
});
|
18000
18552
|
}
|
18553
|
+
cleanupPeerConnections() {
|
18554
|
+
return __awaiter(this, void 0, void 0, function* () {
|
18555
|
+
if (this.publisher && this.publisher.pc.signalingState !== 'closed') {
|
18556
|
+
this.publisher.pc.getSenders().forEach(sender => {
|
18557
|
+
var _a, _b;
|
18558
|
+
try {
|
18559
|
+
// TODO: react-native-webrtc doesn't have removeTrack yet.
|
18560
|
+
if ((_a = this.publisher) === null || _a === void 0 ? void 0 : _a.pc.removeTrack) {
|
18561
|
+
(_b = this.publisher) === null || _b === void 0 ? void 0 : _b.pc.removeTrack(sender);
|
18562
|
+
}
|
18563
|
+
} catch (e) {
|
18564
|
+
livekitLogger.warn('could not removeTrack', {
|
18565
|
+
error: e
|
18566
|
+
});
|
18567
|
+
}
|
18568
|
+
});
|
18569
|
+
}
|
18570
|
+
if (this.publisher) {
|
18571
|
+
this.publisher.close();
|
18572
|
+
this.publisher = undefined;
|
18573
|
+
}
|
18574
|
+
if (this.subscriber) {
|
18575
|
+
this.subscriber.close();
|
18576
|
+
this.subscriber = undefined;
|
18577
|
+
}
|
18578
|
+
this.primaryPC = undefined;
|
18579
|
+
const dcCleanup = dc => {
|
18580
|
+
if (!dc) return;
|
18581
|
+
dc.close();
|
18582
|
+
dc.onbufferedamountlow = null;
|
18583
|
+
dc.onclose = null;
|
18584
|
+
dc.onclosing = null;
|
18585
|
+
dc.onerror = null;
|
18586
|
+
dc.onmessage = null;
|
18587
|
+
dc.onopen = null;
|
18588
|
+
};
|
18589
|
+
dcCleanup(this.lossyDC);
|
18590
|
+
dcCleanup(this.lossyDCSub);
|
18591
|
+
dcCleanup(this.reliableDC);
|
18592
|
+
dcCleanup(this.reliableDCSub);
|
18593
|
+
this.lossyDC = undefined;
|
18594
|
+
this.lossyDCSub = undefined;
|
18595
|
+
this.reliableDC = undefined;
|
18596
|
+
this.reliableDCSub = undefined;
|
18597
|
+
});
|
18598
|
+
}
|
18599
|
+
cleanupClient() {
|
18600
|
+
return __awaiter(this, void 0, void 0, function* () {
|
18601
|
+
yield this.client.close();
|
18602
|
+
this.client.resetCallbacks();
|
18603
|
+
});
|
18604
|
+
}
|
18001
18605
|
addTrack(req) {
|
18002
18606
|
if (this.pendingTrackResolvers[req.cid]) {
|
18003
18607
|
throw new TrackInvalidError('a track with the same ID has already been published');
|
@@ -18064,13 +18668,19 @@ class RTCEngine extends EventEmitter {
|
|
18064
18668
|
});
|
18065
18669
|
}
|
18066
18670
|
configure(joinResponse) {
|
18067
|
-
var _a;
|
18671
|
+
var _a, _b;
|
18068
18672
|
// already configured
|
18069
18673
|
if (this.publisher || this.subscriber) {
|
18070
18674
|
return;
|
18071
18675
|
}
|
18072
18676
|
this.participantSid = (_a = joinResponse.participant) === null || _a === void 0 ? void 0 : _a.sid;
|
18073
18677
|
const rtcConfig = this.makeRTCConfiguration(joinResponse);
|
18678
|
+
if ((_b = this.signalOpts) === null || _b === void 0 ? void 0 : _b.e2eeEnabled) {
|
18679
|
+
livekitLogger.debug('E2EE - setting up transports with insertable streams');
|
18680
|
+
// this makes sure that no data is sent before the transforms are ready
|
18681
|
+
// @ts-ignore
|
18682
|
+
rtcConfig.encodedInsertableStreams = true;
|
18683
|
+
}
|
18074
18684
|
const googConstraints = {
|
18075
18685
|
optional: [{
|
18076
18686
|
googDscp: true
|
@@ -18128,6 +18738,8 @@ class RTCEngine extends EventEmitter {
|
|
18128
18738
|
this.emit(EngineEvent.MediaTrackAdded, ev.track, ev.streams[0], ev.receiver);
|
18129
18739
|
};
|
18130
18740
|
this.createDataChannels();
|
18741
|
+
}
|
18742
|
+
setupSignalClientCallbacks() {
|
18131
18743
|
// configure signaling client
|
18132
18744
|
this.client.onAnswer = sd => __awaiter(this, void 0, void 0, function* () {
|
18133
18745
|
if (!this.publisher) {
|
@@ -18135,7 +18747,7 @@ class RTCEngine extends EventEmitter {
|
|
18135
18747
|
}
|
18136
18748
|
livekitLogger.debug('received server answer', {
|
18137
18749
|
RTCSdpType: sd.type,
|
18138
|
-
signalingState: this.publisher.pc.signalingState
|
18750
|
+
signalingState: this.publisher.pc.signalingState.toString()
|
18139
18751
|
});
|
18140
18752
|
yield this.publisher.setRemoteDescription(sd);
|
18141
18753
|
});
|
@@ -18161,7 +18773,7 @@ class RTCEngine extends EventEmitter {
|
|
18161
18773
|
}
|
18162
18774
|
livekitLogger.debug('received server offer', {
|
18163
18775
|
RTCSdpType: sd.type,
|
18164
|
-
signalingState: this.subscriber.pc.signalingState
|
18776
|
+
signalingState: this.subscriber.pc.signalingState.toString()
|
18165
18777
|
});
|
18166
18778
|
yield this.subscriber.setRemoteDescription(sd);
|
18167
18779
|
// answer the offer
|
@@ -18302,11 +18914,13 @@ class RTCEngine extends EventEmitter {
|
|
18302
18914
|
createSender(track, opts, encodings) {
|
18303
18915
|
return __awaiter(this, void 0, void 0, function* () {
|
18304
18916
|
if (supportsTransceiver()) {
|
18305
|
-
|
18917
|
+
const sender = yield this.createTransceiverRTCRtpSender(track, opts, encodings);
|
18918
|
+
return sender;
|
18306
18919
|
}
|
18307
18920
|
if (supportsAddTrack()) {
|
18308
|
-
livekitLogger.
|
18309
|
-
|
18921
|
+
livekitLogger.warn('using add-track fallback');
|
18922
|
+
const sender = yield this.createRTCRtpSender(track.mediaStreamTrack);
|
18923
|
+
return sender;
|
18310
18924
|
}
|
18311
18925
|
throw new UnexpectedConnectionState('Required webRTC APIs not supported on this device');
|
18312
18926
|
});
|
@@ -18314,7 +18928,6 @@ class RTCEngine extends EventEmitter {
|
|
18314
18928
|
createSimulcastSender(track, simulcastTrack, opts, encodings) {
|
18315
18929
|
return __awaiter(this, void 0, void 0, function* () {
|
18316
18930
|
// store RTCRtpSender
|
18317
|
-
// @ts-ignore
|
18318
18931
|
if (supportsTransceiver()) {
|
18319
18932
|
return this.createSimulcastTransceiverSender(track, simulcastTrack, opts, encodings);
|
18320
18933
|
}
|
@@ -18330,8 +18943,13 @@ class RTCEngine extends EventEmitter {
|
|
18330
18943
|
if (!this.publisher) {
|
18331
18944
|
throw new UnexpectedConnectionState('publisher is closed');
|
18332
18945
|
}
|
18946
|
+
const streams = [];
|
18947
|
+
if (track.mediaStream) {
|
18948
|
+
streams.push(track.mediaStream);
|
18949
|
+
}
|
18333
18950
|
const transceiverInit = {
|
18334
|
-
direction: 'sendonly'
|
18951
|
+
direction: 'sendonly',
|
18952
|
+
streams
|
18335
18953
|
};
|
18336
18954
|
if (encodings) {
|
18337
18955
|
transceiverInit.sendEncodings = encodings;
|
@@ -18413,7 +19031,7 @@ class RTCEngine extends EventEmitter {
|
|
18413
19031
|
this.fullReconnectOnNext = true;
|
18414
19032
|
}
|
18415
19033
|
if (recoverable) {
|
18416
|
-
this.handleDisconnect('reconnect', ReconnectReason.
|
19034
|
+
this.handleDisconnect('reconnect', ReconnectReason.RR_UNKNOWN);
|
18417
19035
|
} else {
|
18418
19036
|
livekitLogger.info("could not recover connection after ".concat(this.reconnectAttempts, " attempts, ").concat(Date.now() - this.reconnectStart, "ms. giving up"));
|
18419
19037
|
this.emit(EngineEvent.Disconnected);
|
@@ -18436,7 +19054,7 @@ class RTCEngine extends EventEmitter {
|
|
18436
19054
|
return null;
|
18437
19055
|
}
|
18438
19056
|
restartConnection(regionUrl) {
|
18439
|
-
var _a, _b, _c
|
19057
|
+
var _a, _b, _c;
|
18440
19058
|
return __awaiter(this, void 0, void 0, function* () {
|
18441
19059
|
try {
|
18442
19060
|
if (!this.url || !this.token) {
|
@@ -18448,12 +19066,8 @@ class RTCEngine extends EventEmitter {
|
|
18448
19066
|
if (this.client.isConnected) {
|
18449
19067
|
yield this.client.sendLeave();
|
18450
19068
|
}
|
18451
|
-
yield this.
|
18452
|
-
this.
|
18453
|
-
(_a = this.publisher) === null || _a === void 0 ? void 0 : _a.close();
|
18454
|
-
this.publisher = undefined;
|
18455
|
-
(_b = this.subscriber) === null || _b === void 0 ? void 0 : _b.close();
|
18456
|
-
this.subscriber = undefined;
|
19069
|
+
yield this.cleanupPeerConnections();
|
19070
|
+
yield this.cleanupClient();
|
18457
19071
|
let joinResponse;
|
18458
19072
|
try {
|
18459
19073
|
if (!this.signalOpts) {
|
@@ -18475,17 +19089,17 @@ class RTCEngine extends EventEmitter {
|
|
18475
19089
|
this.client.setReconnected();
|
18476
19090
|
this.emit(EngineEvent.SignalRestarted, joinResponse);
|
18477
19091
|
yield this.waitForPCReconnected();
|
18478
|
-
(
|
19092
|
+
(_a = this.regionUrlProvider) === null || _a === void 0 ? void 0 : _a.resetAttempts();
|
18479
19093
|
// reconnect success
|
18480
19094
|
this.emit(EngineEvent.Restarted);
|
18481
19095
|
} catch (error) {
|
18482
|
-
const nextRegionUrl = yield (
|
19096
|
+
const nextRegionUrl = yield (_b = this.regionUrlProvider) === null || _b === void 0 ? void 0 : _b.getNextBestRegionUrl();
|
18483
19097
|
if (nextRegionUrl) {
|
18484
19098
|
yield this.restartConnection(nextRegionUrl);
|
18485
19099
|
return;
|
18486
19100
|
} else {
|
18487
19101
|
// no more regions to try (or we're not on cloud)
|
18488
|
-
(
|
19102
|
+
(_c = this.regionUrlProvider) === null || _c === void 0 ? void 0 : _c.resetAttempts();
|
18489
19103
|
throw error;
|
18490
19104
|
}
|
18491
19105
|
}
|
@@ -18505,6 +19119,7 @@ class RTCEngine extends EventEmitter {
|
|
18505
19119
|
livekitLogger.info("resuming signal connection, attempt ".concat(this.reconnectAttempts));
|
18506
19120
|
this.emit(EngineEvent.Resuming);
|
18507
19121
|
try {
|
19122
|
+
this.setupSignalClientCallbacks();
|
18508
19123
|
const res = yield this.client.reconnect(this.url, this.token, this.participantSid, reason);
|
18509
19124
|
if (res) {
|
18510
19125
|
const rtcConfig = this.makeRTCConfiguration(res);
|
@@ -18692,6 +19307,9 @@ class RTCEngine extends EventEmitter {
|
|
18692
19307
|
resolve();
|
18693
19308
|
return;
|
18694
19309
|
};
|
19310
|
+
if (this.isClosed) {
|
19311
|
+
reject('cannot negotiate on closed engine');
|
19312
|
+
}
|
18695
19313
|
this.on(EngineEvent.Closing, handleClosed);
|
18696
19314
|
const negotiationTimeout = setTimeout(() => {
|
18697
19315
|
reject('negotiation timed out');
|
@@ -18708,13 +19326,23 @@ class RTCEngine extends EventEmitter {
|
|
18708
19326
|
resolve();
|
18709
19327
|
});
|
18710
19328
|
});
|
19329
|
+
this.publisher.once(PCEvents.RTPVideoPayloadTypes, rtpTypes => {
|
19330
|
+
const rtpMap = new Map();
|
19331
|
+
rtpTypes.forEach(rtp => {
|
19332
|
+
const codec = rtp.codec.toLowerCase();
|
19333
|
+
if (isVideoCodec(codec)) {
|
19334
|
+
rtpMap.set(rtp.payload, codec);
|
19335
|
+
}
|
19336
|
+
});
|
19337
|
+
this.emit(EngineEvent.RTPVideoMapUpdate, rtpMap);
|
19338
|
+
});
|
18711
19339
|
this.publisher.negotiate(e => {
|
18712
19340
|
cleanup();
|
18713
19341
|
reject(e);
|
18714
19342
|
if (e instanceof NegotiationError) {
|
18715
19343
|
this.fullReconnectOnNext = true;
|
18716
19344
|
}
|
18717
|
-
this.handleDisconnect('negotiation', ReconnectReason.
|
19345
|
+
this.handleDisconnect('negotiation', ReconnectReason.RR_UNKNOWN);
|
18718
19346
|
});
|
18719
19347
|
});
|
18720
19348
|
}
|
@@ -18851,14 +19479,14 @@ class LocalAudioTrack extends LocalTrack {
|
|
18851
19479
|
}
|
18852
19480
|
setDeviceId(deviceId) {
|
18853
19481
|
return __awaiter(this, void 0, void 0, function* () {
|
18854
|
-
if (this.
|
19482
|
+
if (this._constraints.deviceId === deviceId) {
|
18855
19483
|
return true;
|
18856
19484
|
}
|
18857
|
-
this.
|
19485
|
+
this._constraints.deviceId = deviceId;
|
18858
19486
|
if (!this.isMuted) {
|
18859
19487
|
yield this.restartTrack();
|
18860
19488
|
}
|
18861
|
-
return unwrapConstraint(deviceId) === this.mediaStreamTrack.getSettings().deviceId;
|
19489
|
+
return this.isMuted || unwrapConstraint(deviceId) === this.mediaStreamTrack.getSettings().deviceId;
|
18862
19490
|
});
|
18863
19491
|
}
|
18864
19492
|
mute() {
|
@@ -18892,7 +19520,8 @@ class LocalAudioTrack extends LocalTrack {
|
|
18892
19520
|
return __awaiter(this, void 0, void 0, function* () {
|
18893
19521
|
const unlock = yield this.muteLock.lock();
|
18894
19522
|
try {
|
18895
|
-
|
19523
|
+
const deviceHasChanged = this._constraints.deviceId && this._mediaStreamTrack.getSettings().deviceId !== unwrapConstraint(this._constraints.deviceId);
|
19524
|
+
if (this.source === Track.Source.Microphone && (this.stopOnMute || this._mediaStreamTrack.readyState === 'ended' || deviceHasChanged) && !this.isUserProvided) {
|
18896
19525
|
livekitLogger.debug('reacquiring mic track');
|
18897
19526
|
yield this.restartTrack();
|
18898
19527
|
}
|
@@ -19447,16 +20076,16 @@ class LocalVideoTrack extends LocalTrack {
|
|
19447
20076
|
}
|
19448
20077
|
setDeviceId(deviceId) {
|
19449
20078
|
return __awaiter(this, void 0, void 0, function* () {
|
19450
|
-
if (this.
|
20079
|
+
if (this._constraints.deviceId === deviceId && this._mediaStreamTrack.getSettings().deviceId === unwrapConstraint(deviceId)) {
|
19451
20080
|
return true;
|
19452
20081
|
}
|
19453
|
-
this.
|
20082
|
+
this._constraints.deviceId = deviceId;
|
19454
20083
|
// when video is muted, underlying media stream track is stopped and
|
19455
20084
|
// will be restarted later
|
19456
20085
|
if (!this.isMuted) {
|
19457
20086
|
yield this.restartTrack();
|
19458
20087
|
}
|
19459
|
-
return unwrapConstraint(deviceId) === this._mediaStreamTrack.getSettings().deviceId;
|
20088
|
+
return this.isMuted || unwrapConstraint(deviceId) === this._mediaStreamTrack.getSettings().deviceId;
|
19460
20089
|
});
|
19461
20090
|
}
|
19462
20091
|
restartTrack(options) {
|
@@ -20042,6 +20671,7 @@ class RemoteVideoTrack extends RemoteTrack {
|
|
20042
20671
|
}
|
20043
20672
|
this.elementInfos = this.elementInfos.filter(info => info !== elementInfo);
|
20044
20673
|
this.updateVisibility();
|
20674
|
+
this.debouncedHandleResize();
|
20045
20675
|
}
|
20046
20676
|
detach(element) {
|
20047
20677
|
let detachedElements = [];
|
@@ -20094,9 +20724,8 @@ class RemoteVideoTrack extends RemoteTrack {
|
|
20094
20724
|
stopObservingElement(element) {
|
20095
20725
|
const stopElementInfos = this.elementInfos.filter(info => info.element === element);
|
20096
20726
|
for (const info of stopElementInfos) {
|
20097
|
-
|
20727
|
+
this.stopObservingElementInfo(info);
|
20098
20728
|
}
|
20099
|
-
this.elementInfos = this.elementInfos.filter(info => info.element !== element);
|
20100
20729
|
}
|
20101
20730
|
handleAppVisibilityChanged() {
|
20102
20731
|
const _super = Object.create(null, {
|
@@ -20259,6 +20888,7 @@ class TrackPublication extends EventEmitter {
|
|
20259
20888
|
constructor(kind, id, name) {
|
20260
20889
|
super();
|
20261
20890
|
this.metadataMuted = false;
|
20891
|
+
this.encryption = Encryption_Type.NONE;
|
20262
20892
|
this.handleMuted = () => {
|
20263
20893
|
this.emit(TrackEvent.Muted);
|
20264
20894
|
};
|
@@ -20292,6 +20922,9 @@ class TrackPublication extends EventEmitter {
|
|
20292
20922
|
get isSubscribed() {
|
20293
20923
|
return this.track !== undefined;
|
20294
20924
|
}
|
20925
|
+
get isEncrypted() {
|
20926
|
+
return this.encryption !== Encryption_Type.NONE;
|
20927
|
+
}
|
20295
20928
|
/**
|
20296
20929
|
* an [AudioTrack] if this publication holds an audio track
|
20297
20930
|
*/
|
@@ -20321,8 +20954,9 @@ class TrackPublication extends EventEmitter {
|
|
20321
20954
|
};
|
20322
20955
|
this.simulcasted = info.simulcast;
|
20323
20956
|
}
|
20957
|
+
this.encryption = info.encryption;
|
20324
20958
|
this.trackInfo = info;
|
20325
|
-
livekitLogger.
|
20959
|
+
livekitLogger.debug('update publication info', {
|
20326
20960
|
info
|
20327
20961
|
});
|
20328
20962
|
}
|
@@ -20435,6 +21069,9 @@ function qualityFromProto(q) {
|
|
20435
21069
|
}
|
20436
21070
|
}
|
20437
21071
|
class Participant extends EventEmitter {
|
21072
|
+
get isEncrypted() {
|
21073
|
+
return this.tracks.size > 0 && Array.from(this.tracks.values()).every(tr => tr.isEncrypted);
|
21074
|
+
}
|
20438
21075
|
/** @internal */
|
20439
21076
|
constructor(sid, identity, name, metadata) {
|
20440
21077
|
super();
|
@@ -21133,6 +21770,7 @@ class LocalParticipant extends Participant {
|
|
21133
21770
|
this.pendingPublishPromises = new Map();
|
21134
21771
|
this.participantTrackPermissions = [];
|
21135
21772
|
this.allParticipantsAllowedToSubscribe = true;
|
21773
|
+
this.encryptionType = Encryption_Type.NONE;
|
21136
21774
|
this.handleReconnecting = () => {
|
21137
21775
|
if (!this.reconnectFuture) {
|
21138
21776
|
this.reconnectFuture = new Future();
|
@@ -21288,6 +21926,7 @@ class LocalParticipant extends Participant {
|
|
21288
21926
|
this.engine = engine;
|
21289
21927
|
this.roomOptions = options;
|
21290
21928
|
this.setupEngine(engine);
|
21929
|
+
this.activeDeviceMap = new Map();
|
21291
21930
|
}
|
21292
21931
|
get lastCameraError() {
|
21293
21932
|
return this.cameraError;
|
@@ -21372,6 +22011,22 @@ class LocalParticipant extends Participant {
|
|
21372
22011
|
setScreenShareEnabled(enabled, options, publishOptions) {
|
21373
22012
|
return this.setTrackEnabled(Track.Source.ScreenShare, enabled, options, publishOptions);
|
21374
22013
|
}
|
22014
|
+
/** @internal */
|
22015
|
+
setPermissions(permissions) {
|
22016
|
+
const prevPermissions = this.permissions;
|
22017
|
+
const changed = super.setPermissions(permissions);
|
22018
|
+
if (changed && prevPermissions) {
|
22019
|
+
this.emit(ParticipantEvent.ParticipantPermissionsChanged, prevPermissions);
|
22020
|
+
}
|
22021
|
+
return changed;
|
22022
|
+
}
|
22023
|
+
/** @internal */
|
22024
|
+
setE2EEEnabled(enabled) {
|
22025
|
+
return __awaiter(this, void 0, void 0, function* () {
|
22026
|
+
this.encryptionType = enabled ? Encryption_Type.GCM : Encryption_Type.NONE;
|
22027
|
+
yield this.republishAllTracks(undefined, false);
|
22028
|
+
});
|
22029
|
+
}
|
21375
22030
|
setTrackEnabled(source, enabled, options, publishOptions) {
|
21376
22031
|
var _a, _b;
|
21377
22032
|
return __awaiter(this, void 0, void 0, function* () {
|
@@ -21596,14 +22251,35 @@ class LocalParticipant extends Participant {
|
|
21596
22251
|
if (track instanceof LocalTrack && this.pendingPublishPromises.has(track)) {
|
21597
22252
|
yield this.pendingPublishPromises.get(track);
|
21598
22253
|
}
|
22254
|
+
let defaultConstraints;
|
22255
|
+
if (track instanceof MediaStreamTrack) {
|
22256
|
+
defaultConstraints = track.getConstraints();
|
22257
|
+
} else {
|
22258
|
+
// we want to access constraints directly as `track.mediaStreamTrack`
|
22259
|
+
// might be pointing to a non-device track (e.g. processed track) already
|
22260
|
+
defaultConstraints = track.constraints;
|
22261
|
+
let deviceKind = undefined;
|
22262
|
+
switch (track.source) {
|
22263
|
+
case Track.Source.Microphone:
|
22264
|
+
deviceKind = 'audioinput';
|
22265
|
+
break;
|
22266
|
+
case Track.Source.Camera:
|
22267
|
+
deviceKind = 'videoinput';
|
22268
|
+
}
|
22269
|
+
if (deviceKind && this.activeDeviceMap.has(deviceKind)) {
|
22270
|
+
defaultConstraints = Object.assign(Object.assign({}, defaultConstraints), {
|
22271
|
+
deviceId: this.activeDeviceMap.get(deviceKind)
|
22272
|
+
});
|
22273
|
+
}
|
22274
|
+
}
|
21599
22275
|
// convert raw media track into audio or video track
|
21600
22276
|
if (track instanceof MediaStreamTrack) {
|
21601
22277
|
switch (track.kind) {
|
21602
22278
|
case 'audio':
|
21603
|
-
track = new LocalAudioTrack(track,
|
22279
|
+
track = new LocalAudioTrack(track, defaultConstraints, true);
|
21604
22280
|
break;
|
21605
22281
|
case 'video':
|
21606
|
-
track = new LocalVideoTrack(track,
|
22282
|
+
track = new LocalVideoTrack(track, defaultConstraints, true);
|
21607
22283
|
break;
|
21608
22284
|
default:
|
21609
22285
|
throw new TrackInvalidError("unsupported MediaStreamTrack kind ".concat(track.kind));
|
@@ -21642,6 +22318,11 @@ class LocalParticipant extends Participant {
|
|
21642
22318
|
(_d = options.red) !== null && _d !== void 0 ? _d : options.red = false;
|
21643
22319
|
}
|
21644
22320
|
const opts = Object.assign(Object.assign({}, this.roomOptions.publishDefaults), options);
|
22321
|
+
// disable simulcast if e2ee is set on safari
|
22322
|
+
if (isSafari() && this.roomOptions.e2ee) {
|
22323
|
+
livekitLogger.info("End-to-end encryption is set up, simulcast publishing will be disabled on Safari");
|
22324
|
+
opts.simulcast = false;
|
22325
|
+
}
|
21645
22326
|
if (opts.source) {
|
21646
22327
|
track.source = opts.source;
|
21647
22328
|
}
|
@@ -21658,7 +22339,7 @@ class LocalParticipant extends Participant {
|
|
21658
22339
|
});
|
21659
22340
|
}
|
21660
22341
|
publish(track, opts, options, isStereo) {
|
21661
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o
|
22342
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o;
|
21662
22343
|
return __awaiter(this, void 0, void 0, function* () {
|
21663
22344
|
const existingTrackOfSource = Array.from(this.tracks.values()).find(publishedTrack => track instanceof LocalTrack && publishedTrack.source === track.source);
|
21664
22345
|
if (existingTrackOfSource && track.source !== Track.Source.Unknown) {
|
@@ -21705,8 +22386,9 @@ class LocalParticipant extends Participant {
|
|
21705
22386
|
muted: track.isMuted,
|
21706
22387
|
source: Track.sourceToProto(track.source),
|
21707
22388
|
disableDtx: !((_a = opts.dtx) !== null && _a !== void 0 ? _a : true),
|
21708
|
-
|
21709
|
-
|
22389
|
+
encryption: this.encryptionType,
|
22390
|
+
stereo: isStereo
|
22391
|
+
// disableRed: !(opts.red ?? true),
|
21710
22392
|
});
|
21711
22393
|
// compute encodings and layers for video
|
21712
22394
|
let encodings;
|
@@ -21721,7 +22403,7 @@ class LocalParticipant extends Participant {
|
|
21721
22403
|
} catch (e) {
|
21722
22404
|
// use defaults, it's quite painful for congestion control without simulcast
|
21723
22405
|
// so using default dims according to publish settings
|
21724
|
-
const defaultRes = (
|
22406
|
+
const defaultRes = (_c = (_b = this.roomOptions.videoCaptureDefaults) === null || _b === void 0 ? void 0 : _b.resolution) !== null && _c !== void 0 ? _c : VideoPresets.h720.resolution;
|
21725
22407
|
dims = {
|
21726
22408
|
width: defaultRes.width,
|
21727
22409
|
height: defaultRes.height
|
@@ -21736,7 +22418,7 @@ class LocalParticipant extends Participant {
|
|
21736
22418
|
if (track instanceof LocalVideoTrack) {
|
21737
22419
|
if (isSVCCodec(opts.videoCodec)) {
|
21738
22420
|
// set scalabilityMode to 'L3T3_KEY' by default
|
21739
|
-
opts.scalabilityMode = (
|
22421
|
+
opts.scalabilityMode = (_d = opts.scalabilityMode) !== null && _d !== void 0 ? _d : 'L3T3_KEY';
|
21740
22422
|
}
|
21741
22423
|
// set up backup
|
21742
22424
|
if (opts.videoCodec && opts.backupCodec && opts.videoCodec !== opts.backupCodec.codec) {
|
@@ -21758,7 +22440,7 @@ class LocalParticipant extends Participant {
|
|
21758
22440
|
req.simulcastCodecs = [{
|
21759
22441
|
codec: opts.videoCodec,
|
21760
22442
|
cid: track.mediaStreamTrack.id,
|
21761
|
-
enableSimulcastLayers: (
|
22443
|
+
enableSimulcastLayers: (_e = opts.simulcast) !== null && _e !== void 0 ? _e : false
|
21762
22444
|
}];
|
21763
22445
|
}
|
21764
22446
|
}
|
@@ -21766,9 +22448,9 @@ class LocalParticipant extends Participant {
|
|
21766
22448
|
req.layers = videoLayersFromEncodings(req.width, req.height, encodings, isSVCCodec(opts.videoCodec));
|
21767
22449
|
} else if (track.kind === Track.Kind.Audio) {
|
21768
22450
|
encodings = [{
|
21769
|
-
maxBitrate: (
|
21770
|
-
priority: (
|
21771
|
-
networkPriority: (
|
22451
|
+
maxBitrate: (_g = (_f = opts.audioPreset) === null || _f === void 0 ? void 0 : _f.maxBitrate) !== null && _g !== void 0 ? _g : opts.audioBitrate,
|
22452
|
+
priority: (_j = (_h = opts.audioPreset) === null || _h === void 0 ? void 0 : _h.priority) !== null && _j !== void 0 ? _j : 'high',
|
22453
|
+
networkPriority: (_l = (_k = opts.audioPreset) === null || _k === void 0 ? void 0 : _k.priority) !== null && _l !== void 0 ? _l : 'high'
|
21772
22454
|
}];
|
21773
22455
|
}
|
21774
22456
|
if (!this.engine || this.engine.isClosed) {
|
@@ -21830,10 +22512,10 @@ class LocalParticipant extends Participant {
|
|
21830
22512
|
this.engine.publisher.setTrackCodecBitrate({
|
21831
22513
|
transceiver: trackTransceiver,
|
21832
22514
|
codec: 'opus',
|
21833
|
-
maxbr: ((
|
22515
|
+
maxbr: ((_m = encodings[0]) === null || _m === void 0 ? void 0 : _m.maxBitrate) ? encodings[0].maxBitrate / 1000 : 0
|
21834
22516
|
});
|
21835
22517
|
}
|
21836
|
-
} else if (track.codec && isSVCCodec(track.codec) && ((
|
22518
|
+
} else if (track.codec && isSVCCodec(track.codec) && ((_o = encodings[0]) === null || _o === void 0 ? void 0 : _o.maxBitrate)) {
|
21837
22519
|
this.engine.publisher.setTrackCodecBitrate({
|
21838
22520
|
cid: req.cid,
|
21839
22521
|
codec: track.codec,
|
@@ -21841,7 +22523,7 @@ class LocalParticipant extends Participant {
|
|
21841
22523
|
});
|
21842
22524
|
}
|
21843
22525
|
}
|
21844
|
-
this.engine.negotiate();
|
22526
|
+
yield this.engine.negotiate();
|
21845
22527
|
if (track instanceof LocalVideoTrack) {
|
21846
22528
|
track.startMonitor(this.engine.client);
|
21847
22529
|
} else if (track instanceof LocalAudioTrack) {
|
@@ -21903,7 +22585,7 @@ class LocalParticipant extends Participant {
|
|
21903
22585
|
}
|
21904
22586
|
const ti = yield this.engine.addTrack(req);
|
21905
22587
|
yield this.engine.createSimulcastSender(track, simulcastTrack, opts, encodings);
|
21906
|
-
this.engine.negotiate();
|
22588
|
+
yield this.engine.negotiate();
|
21907
22589
|
livekitLogger.debug("published ".concat(videoCodec, " for track ").concat(track.sid), {
|
21908
22590
|
encodings,
|
21909
22591
|
trackInfo: ti
|
@@ -21999,6 +22681,7 @@ class LocalParticipant extends Participant {
|
|
21999
22681
|
});
|
22000
22682
|
}
|
22001
22683
|
republishAllTracks(options) {
|
22684
|
+
let restartTracks = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
|
22002
22685
|
return __awaiter(this, void 0, void 0, function* () {
|
22003
22686
|
const localPubs = [];
|
22004
22687
|
this.tracks.forEach(pub => {
|
@@ -22012,6 +22695,14 @@ class LocalParticipant extends Participant {
|
|
22012
22695
|
yield Promise.all(localPubs.map(pub => __awaiter(this, void 0, void 0, function* () {
|
22013
22696
|
const track = pub.track;
|
22014
22697
|
yield this.unpublishTrack(track, false);
|
22698
|
+
if (restartTracks && !track.isMuted && (track instanceof LocalAudioTrack || track instanceof LocalVideoTrack) && !track.isUserProvided) {
|
22699
|
+
// generally we need to restart the track before publishing, often a full reconnect
|
22700
|
+
// is necessary because computer had gone to sleep.
|
22701
|
+
livekitLogger.debug('restarting existing track', {
|
22702
|
+
track: pub.trackSid
|
22703
|
+
});
|
22704
|
+
yield track.restartTrack();
|
22705
|
+
}
|
22015
22706
|
yield this.publishTrack(track, pub.options);
|
22016
22707
|
})));
|
22017
22708
|
});
|
@@ -22187,6 +22878,8 @@ class Room extends EventEmitter {
|
|
22187
22878
|
* a [[RoomEvent.ActiveSpeakersChanged]] event is fired
|
22188
22879
|
*/
|
22189
22880
|
this.activeSpeakers = [];
|
22881
|
+
/** reflects the sender encryption status of the local participant */
|
22882
|
+
this.isE2EEEnabled = false;
|
22190
22883
|
this.audioEnabled = true;
|
22191
22884
|
this.connect = (url, token, opts) => __awaiter(this, void 0, void 0, function* () {
|
22192
22885
|
// In case a disconnect called happened right before the connect call, make sure the disconnect is completed first by awaiting its lock
|
@@ -22247,7 +22940,8 @@ class Room extends EventEmitter {
|
|
22247
22940
|
autoSubscribe: connectOptions.autoSubscribe,
|
22248
22941
|
publishOnly: connectOptions.publishOnly,
|
22249
22942
|
adaptiveStream: typeof roomOptions.adaptiveStream === 'object' ? true : roomOptions.adaptiveStream,
|
22250
|
-
maxRetries: connectOptions.maxRetries
|
22943
|
+
maxRetries: connectOptions.maxRetries,
|
22944
|
+
e2eeEnabled: !!this.e2eeManager
|
22251
22945
|
}, abortController.signal);
|
22252
22946
|
let serverInfo = joinResponse.serverInfo;
|
22253
22947
|
if (!serverInfo) {
|
@@ -22637,6 +23331,7 @@ class Room extends EventEmitter {
|
|
22637
23331
|
this.emit(RoomEvent.TrackUnmuted, pub, this.localParticipant);
|
22638
23332
|
};
|
22639
23333
|
this.onLocalTrackPublished = pub => __awaiter(this, void 0, void 0, function* () {
|
23334
|
+
var _j;
|
22640
23335
|
this.emit(RoomEvent.LocalTrackPublished, pub, this.localParticipant);
|
22641
23336
|
if (pub.track instanceof LocalAudioTrack) {
|
22642
23337
|
const trackIsSilent = yield pub.track.checkForSilence();
|
@@ -22644,6 +23339,12 @@ class Room extends EventEmitter {
|
|
22644
23339
|
this.emit(RoomEvent.LocalAudioSilenceDetected, pub);
|
22645
23340
|
}
|
22646
23341
|
}
|
23342
|
+
const deviceId = yield (_j = pub.track) === null || _j === void 0 ? void 0 : _j.getDeviceId();
|
23343
|
+
const deviceKind = sourceToKind(pub.source);
|
23344
|
+
if (deviceKind && deviceId && deviceId !== this.localParticipant.activeDeviceMap.get(deviceKind)) {
|
23345
|
+
this.localParticipant.activeDeviceMap.set(deviceKind, deviceId);
|
23346
|
+
this.emit(RoomEvent.ActiveDeviceChanged, deviceKind, deviceId);
|
23347
|
+
}
|
22647
23348
|
});
|
22648
23349
|
this.onLocalTrackUnpublished = pub => {
|
22649
23350
|
this.emit(RoomEvent.LocalTrackUnpublished, pub, this.localParticipant);
|
@@ -22666,17 +23367,45 @@ class Room extends EventEmitter {
|
|
22666
23367
|
this.options.publishDefaults = Object.assign(Object.assign({}, publishDefaults), options === null || options === void 0 ? void 0 : options.publishDefaults);
|
22667
23368
|
this.maybeCreateEngine();
|
22668
23369
|
this.disconnectLock = new Mutex();
|
22669
|
-
this.
|
23370
|
+
this.localParticipant = new LocalParticipant('', '', this.engine, this.options);
|
22670
23371
|
if (this.options.videoCaptureDefaults.deviceId) {
|
22671
|
-
this.activeDeviceMap.set('videoinput', unwrapConstraint(this.options.videoCaptureDefaults.deviceId));
|
23372
|
+
this.localParticipant.activeDeviceMap.set('videoinput', unwrapConstraint(this.options.videoCaptureDefaults.deviceId));
|
22672
23373
|
}
|
22673
23374
|
if (this.options.audioCaptureDefaults.deviceId) {
|
22674
|
-
this.activeDeviceMap.set('audioinput', unwrapConstraint(this.options.audioCaptureDefaults.deviceId));
|
23375
|
+
this.localParticipant.activeDeviceMap.set('audioinput', unwrapConstraint(this.options.audioCaptureDefaults.deviceId));
|
22675
23376
|
}
|
22676
23377
|
if ((_a = this.options.audioOutput) === null || _a === void 0 ? void 0 : _a.deviceId) {
|
22677
23378
|
this.switchActiveDevice('audiooutput', unwrapConstraint(this.options.audioOutput.deviceId));
|
22678
23379
|
}
|
22679
|
-
|
23380
|
+
if (this.options.e2ee) {
|
23381
|
+
this.setupE2EE();
|
23382
|
+
}
|
23383
|
+
}
|
23384
|
+
/**
|
23385
|
+
* @experimental
|
23386
|
+
*/
|
23387
|
+
setE2EEEnabled(enabled) {
|
23388
|
+
return __awaiter(this, void 0, void 0, function* () {
|
23389
|
+
if (this.e2eeManager) {
|
23390
|
+
yield Promise.all([this.localParticipant.setE2EEEnabled(enabled), this.e2eeManager.setParticipantCryptorEnabled(enabled)]);
|
23391
|
+
} else {
|
23392
|
+
throw Error('e2ee not configured, please set e2ee settings within the room options');
|
23393
|
+
}
|
23394
|
+
});
|
23395
|
+
}
|
23396
|
+
setupE2EE() {
|
23397
|
+
var _a;
|
23398
|
+
if (this.options.e2ee) {
|
23399
|
+
this.e2eeManager = new E2EEManager(this.options.e2ee);
|
23400
|
+
this.e2eeManager.on(EncryptionEvent.ParticipantEncryptionStatusChanged, (enabled, participant) => {
|
23401
|
+
if (participant instanceof LocalParticipant) {
|
23402
|
+
this.isE2EEEnabled = enabled;
|
23403
|
+
}
|
23404
|
+
this.emit(RoomEvent.ParticipantEncryptionStatusChanged, enabled, participant);
|
23405
|
+
});
|
23406
|
+
this.e2eeManager.on(EncryptionEvent.Error, error => this.emit(RoomEvent.EncryptionError, error));
|
23407
|
+
(_a = this.e2eeManager) === null || _a === void 0 ? void 0 : _a.setup(this);
|
23408
|
+
}
|
22680
23409
|
}
|
22681
23410
|
/**
|
22682
23411
|
* if the current room has a participant with `recorder: true` in its JWT grant
|
@@ -22713,14 +23442,7 @@ class Room extends EventEmitter {
|
|
22713
23442
|
return;
|
22714
23443
|
}
|
22715
23444
|
this.engine = new RTCEngine(this.options);
|
22716
|
-
this.engine.
|
22717
|
-
this.engine.client.onRoomUpdate = this.handleRoomUpdate;
|
22718
|
-
this.engine.client.onSpeakersChanged = this.handleSpeakersChanged;
|
22719
|
-
this.engine.client.onStreamStateUpdate = this.handleStreamStateUpdate;
|
22720
|
-
this.engine.client.onSubscriptionPermissionUpdate = this.handleSubscriptionPermissionUpdate;
|
22721
|
-
this.engine.client.onConnectionQuality = this.handleConnectionQualityUpdate;
|
22722
|
-
this.engine.client.onSubscriptionError = this.handleSubscriptionError;
|
22723
|
-
this.engine.on(EngineEvent.MediaTrackAdded, (mediaTrack, stream, receiver) => {
|
23445
|
+
this.engine.on(EngineEvent.ParticipantUpdate, this.handleParticipantUpdates).on(EngineEvent.RoomUpdate, this.handleRoomUpdate).on(EngineEvent.SpeakersChanged, this.handleSpeakersChanged).on(EngineEvent.StreamStateChanged, this.handleStreamStateUpdate).on(EngineEvent.ConnectionQualityUpdate, this.handleConnectionQualityUpdate).on(EngineEvent.SubscriptionError, this.handleSubscriptionError).on(EngineEvent.SubscriptionPermissionUpdate, this.handleSubscriptionPermissionUpdate).on(EngineEvent.MediaTrackAdded, (mediaTrack, stream, receiver) => {
|
22724
23446
|
this.onTrackAdded(mediaTrack, stream, receiver);
|
22725
23447
|
}).on(EngineEvent.Disconnected, reason => {
|
22726
23448
|
this.handleDisconnect(this.options.stopLocalTrackOnUnpublish, reason);
|
@@ -22749,6 +23471,9 @@ class Room extends EventEmitter {
|
|
22749
23471
|
if (this.localParticipant) {
|
22750
23472
|
this.localParticipant.setupEngine(this.engine);
|
22751
23473
|
}
|
23474
|
+
if (this.e2eeManager) {
|
23475
|
+
this.e2eeManager.setupEngine(this.engine);
|
23476
|
+
}
|
22752
23477
|
}
|
22753
23478
|
/**
|
22754
23479
|
* getLocalDevices abstracts navigator.mediaDevices.enumerateDevices.
|
@@ -22803,10 +23528,8 @@ class Room extends EventEmitter {
|
|
22803
23528
|
let req;
|
22804
23529
|
switch (scenario) {
|
22805
23530
|
case 'signal-reconnect':
|
22806
|
-
|
22807
|
-
|
22808
|
-
this.engine.client.onClose('simulate disconnect');
|
22809
|
-
}
|
23531
|
+
// @ts-expect-error function is private
|
23532
|
+
yield this.engine.client.handleOnClose('simulate disconnect');
|
22810
23533
|
break;
|
22811
23534
|
case 'speaker':
|
22812
23535
|
req = SimulateScenario.fromPartial({
|
@@ -22842,17 +23565,13 @@ class Room extends EventEmitter {
|
|
22842
23565
|
break;
|
22843
23566
|
case 'resume-reconnect':
|
22844
23567
|
this.engine.failNext();
|
22845
|
-
|
22846
|
-
|
22847
|
-
this.engine.client.onClose('simulate resume-reconnect');
|
22848
|
-
}
|
23568
|
+
// @ts-expect-error function is private
|
23569
|
+
yield this.engine.client.handleOnClose('simulate resume-disconnect');
|
22849
23570
|
break;
|
22850
23571
|
case 'full-reconnect':
|
22851
23572
|
this.engine.fullReconnectOnNext = true;
|
22852
|
-
|
22853
|
-
|
22854
|
-
this.engine.client.onClose('simulate full-reconnect');
|
22855
|
-
}
|
23573
|
+
// @ts-expect-error function is private
|
23574
|
+
yield this.engine.client.handleOnClose('simulate full-reconnect');
|
22856
23575
|
break;
|
22857
23576
|
case 'force-tcp':
|
22858
23577
|
case 'force-tls':
|
@@ -22949,7 +23668,7 @@ class Room extends EventEmitter {
|
|
22949
23668
|
return (_b = (_a = this.options.audioOutput) === null || _a === void 0 ? void 0 : _a.deviceId) !== null && _b !== void 0 ? _b : '';
|
22950
23669
|
}
|
22951
23670
|
getActiveDevice(kind) {
|
22952
|
-
return this.activeDeviceMap.get(kind);
|
23671
|
+
return this.localParticipant.activeDeviceMap.get(kind);
|
22953
23672
|
}
|
22954
23673
|
/**
|
22955
23674
|
* Switches all active devices used in this room to the given device.
|
@@ -23022,7 +23741,7 @@ class Room extends EventEmitter {
|
|
23022
23741
|
}
|
23023
23742
|
}
|
23024
23743
|
if (deviceHasChanged && success) {
|
23025
|
-
this.activeDeviceMap.set(kind, deviceId);
|
23744
|
+
this.localParticipant.activeDeviceMap.set(kind, deviceId);
|
23026
23745
|
this.emit(RoomEvent.ActiveDeviceChanged, kind, deviceId);
|
23027
23746
|
}
|
23028
23747
|
return success;
|
@@ -23379,7 +24098,7 @@ class Room extends EventEmitter {
|
|
23379
24098
|
sid: Math.floor(Math.random() * 10000).toString(),
|
23380
24099
|
type: TrackType.AUDIO,
|
23381
24100
|
name: 'video-dummy'
|
23382
|
-
}), new LocalVideoTrack(publishOptions.useRealTracks ? (yield navigator.mediaDevices.getUserMedia({
|
24101
|
+
}), new LocalVideoTrack(publishOptions.useRealTracks ? (yield window.navigator.mediaDevices.getUserMedia({
|
23383
24102
|
video: true
|
23384
24103
|
})).getVideoTracks()[0] : createDummyVideoStreamTrack((_a = 160 * participantOptions.aspectRatios[0]) !== null && _a !== void 0 ? _a : 1, 160, true, true)));
|
23385
24104
|
// @ts-ignore
|
@@ -23792,7 +24511,8 @@ class TURNCheck extends Checker {
|
|
23792
24511
|
const signalClient = new SignalClient();
|
23793
24512
|
const joinRes = yield signalClient.join(this.url, this.token, {
|
23794
24513
|
autoSubscribe: true,
|
23795
|
-
maxRetries: 0
|
24514
|
+
maxRetries: 0,
|
24515
|
+
e2eeEnabled: false
|
23796
24516
|
});
|
23797
24517
|
let hasTLS = false;
|
23798
24518
|
let hasTURN = false;
|
@@ -23863,7 +24583,8 @@ class WebSocketCheck extends Checker {
|
|
23863
24583
|
let signalClient = new SignalClient();
|
23864
24584
|
const joinRes = yield signalClient.join(this.url, this.token, {
|
23865
24585
|
autoSubscribe: true,
|
23866
|
-
maxRetries: 0
|
24586
|
+
maxRetries: 0,
|
24587
|
+
e2eeEnabled: false
|
23867
24588
|
});
|
23868
24589
|
this.appendMessage("Connected to server, version ".concat(joinRes.serverVersion, "."));
|
23869
24590
|
yield signalClient.close();
|
@@ -23943,5 +24664,86 @@ class ConnectionCheck extends EventEmitter {
|
|
23943
24664
|
}
|
23944
24665
|
}
|
23945
24666
|
|
23946
|
-
|
24667
|
+
/**
|
24668
|
+
* Try to analyze the local track to determine the facing mode of a track.
|
24669
|
+
*
|
24670
|
+
* @remarks
|
24671
|
+
* There is no property supported by all browsers to detect whether a video track originated from a user- or environment-facing camera device.
|
24672
|
+
* For this reason, we use the `facingMode` property when available, but will fall back on a string-based analysis of the device label to determine the facing mode.
|
24673
|
+
* If both methods fail, the default facing mode will be used.
|
24674
|
+
*
|
24675
|
+
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackConstraints/facingMode | MDN docs on facingMode}
|
24676
|
+
* @experimental
|
24677
|
+
*/
|
24678
|
+
function facingModeFromLocalTrack(localTrack) {
|
24679
|
+
let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
24680
|
+
var _a;
|
24681
|
+
const track = localTrack instanceof LocalTrack ? localTrack.mediaStreamTrack : localTrack;
|
24682
|
+
const trackSettings = track.getSettings();
|
24683
|
+
let result = {
|
24684
|
+
facingMode: (_a = options.defaultFacingMode) !== null && _a !== void 0 ? _a : 'user',
|
24685
|
+
confidence: 'low'
|
24686
|
+
};
|
24687
|
+
// 1. Try to get facingMode from track settings.
|
24688
|
+
if ('facingMode' in trackSettings) {
|
24689
|
+
const rawFacingMode = trackSettings.facingMode;
|
24690
|
+
log$1.debug('rawFacingMode', {
|
24691
|
+
rawFacingMode
|
24692
|
+
});
|
24693
|
+
if (rawFacingMode && typeof rawFacingMode === 'string' && isFacingModeValue(rawFacingMode)) {
|
24694
|
+
result = {
|
24695
|
+
facingMode: rawFacingMode,
|
24696
|
+
confidence: 'high'
|
24697
|
+
};
|
24698
|
+
}
|
24699
|
+
}
|
24700
|
+
// 2. If we don't have a high confidence we try to get the facing mode from the device label.
|
24701
|
+
if (['low', 'medium'].includes(result.confidence)) {
|
24702
|
+
log$1.debug("Try to get facing mode from device label: (".concat(track.label, ")"));
|
24703
|
+
const labelAnalysisResult = facingModeFromDeviceLabel(track.label);
|
24704
|
+
if (labelAnalysisResult !== undefined) {
|
24705
|
+
result = labelAnalysisResult;
|
24706
|
+
}
|
24707
|
+
}
|
24708
|
+
return result;
|
24709
|
+
}
|
24710
|
+
const knownDeviceLabels = new Map([['obs virtual camera', {
|
24711
|
+
facingMode: 'environment',
|
24712
|
+
confidence: 'medium'
|
24713
|
+
}]]);
|
24714
|
+
const knownDeviceLabelSections = new Map([['iphone', {
|
24715
|
+
facingMode: 'environment',
|
24716
|
+
confidence: 'medium'
|
24717
|
+
}], ['ipad', {
|
24718
|
+
facingMode: 'environment',
|
24719
|
+
confidence: 'medium'
|
24720
|
+
}]]);
|
24721
|
+
/**
|
24722
|
+
* Attempt to analyze the device label to determine the facing mode.
|
24723
|
+
*
|
24724
|
+
* @experimental
|
24725
|
+
*/
|
24726
|
+
function facingModeFromDeviceLabel(deviceLabel) {
|
24727
|
+
var _a;
|
24728
|
+
const label = deviceLabel.trim().toLowerCase();
|
24729
|
+
// Empty string is a valid device label but we can't infer anything from it.
|
24730
|
+
if (label === '') {
|
24731
|
+
return undefined;
|
24732
|
+
}
|
24733
|
+
// Can we match against widely known device labels.
|
24734
|
+
if (knownDeviceLabels.has(label)) {
|
24735
|
+
return knownDeviceLabels.get(label);
|
24736
|
+
}
|
24737
|
+
// Can we match against sections of the device label.
|
24738
|
+
return (_a = Array.from(knownDeviceLabelSections.entries()).find(_ref => {
|
24739
|
+
let [section] = _ref;
|
24740
|
+
return label.includes(section);
|
24741
|
+
})) === null || _a === void 0 ? void 0 : _a[1];
|
24742
|
+
}
|
24743
|
+
function isFacingModeValue(item) {
|
24744
|
+
const allowedValues = ['user', 'environment', 'left', 'right'];
|
24745
|
+
return item === undefined || allowedValues.includes(item);
|
24746
|
+
}
|
24747
|
+
|
24748
|
+
export { AudioPresets, BaseKeyProvider, ConnectionCheck, ConnectionError, ConnectionQuality, ConnectionState, CriticalTimers, CryptorEvent, DataPacket_Kind, DefaultReconnectPolicy, DeviceUnsupportedError, DisconnectReason, EncryptionEvent, EngineEvent, ExternalE2EEKeyProvider, LivekitError, LocalAudioTrack, LocalParticipant, LocalTrack, LocalTrackPublication, LocalVideoTrack, LogLevel, MediaDeviceFailure, NegotiationError, Participant, ParticipantEvent, PublishDataError, RemoteAudioTrack, RemoteParticipant, RemoteTrack, RemoteTrackPublication, RemoteVideoTrack, Room, RoomEvent, RoomState, ScreenSharePresets, Track, TrackEvent, TrackInvalidError, TrackPublication, UnexpectedConnectionState, UnsupportedServer, VideoPreset, VideoPresets, VideoPresets43, VideoQuality, attachToElement, createAudioAnalyser, createE2EEKey, createKeyMaterialFromString, createLocalAudioTrack, createLocalScreenTracks, createLocalTracks, createLocalVideoTrack, deriveKeys, detachTrack, facingModeFromDeviceLabel, facingModeFromLocalTrack, getEmptyAudioStreamTrack, getEmptyVideoStreamTrack, importKey, isBackupCodec, isBrowserSupported, isCodecEqual, isE2EESupported, isInsertableStreamSupported, isScriptTransformSupported, isVideoFrame, mimeTypeToVideoCodecString, protocolVersion, ratchet, setLogExtension, setLogLevel, supportsAV1, supportsAdaptiveStream, supportsDynacast, supportsVP9, version, videoCodecs };
|
23947
24749
|
//# sourceMappingURL=livekit-client.esm.mjs.map
|