livekit-client 2.5.0 → 2.5.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (61) hide show
  1. package/README.md +4 -0
  2. package/dist/livekit-client.e2ee.worker.js +1 -1
  3. package/dist/livekit-client.e2ee.worker.js.map +1 -1
  4. package/dist/livekit-client.e2ee.worker.mjs +4 -2
  5. package/dist/livekit-client.e2ee.worker.mjs.map +1 -1
  6. package/dist/livekit-client.esm.mjs +517 -269
  7. package/dist/livekit-client.esm.mjs.map +1 -1
  8. package/dist/livekit-client.umd.js +1 -1
  9. package/dist/livekit-client.umd.js.map +1 -1
  10. package/dist/src/e2ee/worker/FrameCryptor.d.ts.map +1 -1
  11. package/dist/src/room/PCTransport.d.ts.map +1 -1
  12. package/dist/src/room/PCTransportManager.d.ts +1 -0
  13. package/dist/src/room/PCTransportManager.d.ts.map +1 -1
  14. package/dist/src/room/Room.d.ts +8 -3
  15. package/dist/src/room/Room.d.ts.map +1 -1
  16. package/dist/src/room/events.d.ts +10 -2
  17. package/dist/src/room/events.d.ts.map +1 -1
  18. package/dist/src/room/participant/LocalParticipant.d.ts +4 -1
  19. package/dist/src/room/participant/LocalParticipant.d.ts.map +1 -1
  20. package/dist/src/room/participant/Participant.d.ts +1 -0
  21. package/dist/src/room/participant/Participant.d.ts.map +1 -1
  22. package/dist/src/room/timers.d.ts +4 -4
  23. package/dist/src/room/timers.d.ts.map +1 -1
  24. package/dist/src/room/track/LocalTrack.d.ts +1 -1
  25. package/dist/src/room/track/LocalTrack.d.ts.map +1 -1
  26. package/dist/src/room/track/create.d.ts +7 -0
  27. package/dist/src/room/track/create.d.ts.map +1 -1
  28. package/dist/src/room/track/options.d.ts +1 -1
  29. package/dist/src/room/types.d.ts +2 -0
  30. package/dist/src/room/types.d.ts.map +1 -1
  31. package/dist/src/room/utils.d.ts +1 -1
  32. package/dist/src/room/utils.d.ts.map +1 -1
  33. package/dist/ts4.2/src/room/PCTransportManager.d.ts +1 -0
  34. package/dist/ts4.2/src/room/Room.d.ts +8 -3
  35. package/dist/ts4.2/src/room/events.d.ts +10 -2
  36. package/dist/ts4.2/src/room/participant/LocalParticipant.d.ts +4 -1
  37. package/dist/ts4.2/src/room/participant/Participant.d.ts +1 -0
  38. package/dist/ts4.2/src/room/timers.d.ts +4 -4
  39. package/dist/ts4.2/src/room/track/LocalTrack.d.ts +1 -1
  40. package/dist/ts4.2/src/room/track/create.d.ts +7 -0
  41. package/dist/ts4.2/src/room/track/options.d.ts +1 -1
  42. package/dist/ts4.2/src/room/types.d.ts +2 -0
  43. package/dist/ts4.2/src/room/utils.d.ts +1 -1
  44. package/package.json +9 -9
  45. package/src/connectionHelper/checks/Checker.ts +1 -1
  46. package/src/e2ee/worker/FrameCryptor.ts +3 -1
  47. package/src/room/PCTransport.ts +3 -1
  48. package/src/room/PCTransportManager.ts +12 -4
  49. package/src/room/RTCEngine.ts +1 -1
  50. package/src/room/Room.ts +69 -7
  51. package/src/room/events.ts +10 -0
  52. package/src/room/participant/LocalParticipant.ts +126 -84
  53. package/src/room/participant/Participant.ts +1 -0
  54. package/src/room/timers.ts +15 -6
  55. package/src/room/track/LocalTrack.ts +4 -2
  56. package/src/room/track/LocalVideoTrack.test.ts +60 -0
  57. package/src/room/track/LocalVideoTrack.ts +1 -1
  58. package/src/room/track/create.ts +27 -8
  59. package/src/room/track/options.ts +1 -1
  60. package/src/room/types.ts +2 -0
  61. package/src/room/utils.ts +10 -0
@@ -3397,6 +3397,122 @@ target => {
3397
3397
  }
3398
3398
  });
3399
3399
 
3400
+ // @generated by protoc-gen-es v1.10.0 with parameter "target=js+dts"
3401
+ // @generated from file livekit_metrics.proto (package livekit, syntax proto3)
3402
+ /* eslint-disable */
3403
+ // @ts-nocheck
3404
+
3405
+
3406
+ /**
3407
+ * @generated from message livekit.MetricsBatch
3408
+ */
3409
+ const MetricsBatch = /*@__PURE__*/proto3.makeMessageType("livekit.MetricsBatch", () => [{
3410
+ no: 1,
3411
+ name: "str_data",
3412
+ kind: "scalar",
3413
+ T: 9 /* ScalarType.STRING */,
3414
+ repeated: true
3415
+ }, {
3416
+ no: 2,
3417
+ name: "time_series",
3418
+ kind: "message",
3419
+ T: TimeSeriesMetric,
3420
+ repeated: true
3421
+ }, {
3422
+ no: 3,
3423
+ name: "events",
3424
+ kind: "message",
3425
+ T: EventMetric,
3426
+ repeated: true
3427
+ }]);
3428
+
3429
+ /**
3430
+ * @generated from message livekit.TimeSeriesMetric
3431
+ */
3432
+ const TimeSeriesMetric = /*@__PURE__*/proto3.makeMessageType("livekit.TimeSeriesMetric", () => [{
3433
+ no: 1,
3434
+ name: "label",
3435
+ kind: "scalar",
3436
+ T: 13 /* ScalarType.UINT32 */
3437
+ }, {
3438
+ no: 2,
3439
+ name: "participant_identity",
3440
+ kind: "scalar",
3441
+ T: 13 /* ScalarType.UINT32 */
3442
+ }, {
3443
+ no: 3,
3444
+ name: "track_sid",
3445
+ kind: "scalar",
3446
+ T: 13 /* ScalarType.UINT32 */
3447
+ }, {
3448
+ no: 4,
3449
+ name: "start_timestamp",
3450
+ kind: "scalar",
3451
+ T: 3 /* ScalarType.INT64 */
3452
+ }, {
3453
+ no: 5,
3454
+ name: "end_timestamp",
3455
+ kind: "scalar",
3456
+ T: 3 /* ScalarType.INT64 */
3457
+ }, {
3458
+ no: 6,
3459
+ name: "samples",
3460
+ kind: "message",
3461
+ T: MetricSample,
3462
+ repeated: true
3463
+ }]);
3464
+
3465
+ /**
3466
+ * @generated from message livekit.MetricSample
3467
+ */
3468
+ const MetricSample = /*@__PURE__*/proto3.makeMessageType("livekit.MetricSample", () => [{
3469
+ no: 1,
3470
+ name: "timestamp",
3471
+ kind: "scalar",
3472
+ T: 3 /* ScalarType.INT64 */
3473
+ }, {
3474
+ no: 2,
3475
+ name: "value",
3476
+ kind: "scalar",
3477
+ T: 2 /* ScalarType.FLOAT */
3478
+ }]);
3479
+
3480
+ /**
3481
+ * @generated from message livekit.EventMetric
3482
+ */
3483
+ const EventMetric = /*@__PURE__*/proto3.makeMessageType("livekit.EventMetric", () => [{
3484
+ no: 1,
3485
+ name: "label",
3486
+ kind: "scalar",
3487
+ T: 13 /* ScalarType.UINT32 */
3488
+ }, {
3489
+ no: 2,
3490
+ name: "participant_identity",
3491
+ kind: "scalar",
3492
+ T: 13 /* ScalarType.UINT32 */
3493
+ }, {
3494
+ no: 3,
3495
+ name: "track_sid",
3496
+ kind: "scalar",
3497
+ T: 13 /* ScalarType.UINT32 */
3498
+ }, {
3499
+ no: 4,
3500
+ name: "start_timestamp",
3501
+ kind: "scalar",
3502
+ T: 3 /* ScalarType.INT64 */
3503
+ }, {
3504
+ no: 5,
3505
+ name: "end_timestamp",
3506
+ kind: "scalar",
3507
+ T: 3 /* ScalarType.INT64 */,
3508
+ opt: true
3509
+ }, {
3510
+ no: 6,
3511
+ name: "metadata",
3512
+ kind: "scalar",
3513
+ T: 9 /* ScalarType.STRING */
3514
+ }]);
3515
+
3400
3516
  // Copyright 2023 LiveKit, Inc.
3401
3517
  //
3402
3518
  // Licensed under the Apache License, Version 2.0 (the "License");
@@ -3719,6 +3835,11 @@ const ParticipantPermission = /*@__PURE__*/proto3.makeMessageType("livekit.Parti
3719
3835
  name: "agent",
3720
3836
  kind: "scalar",
3721
3837
  T: 8 /* ScalarType.BOOL */
3838
+ }, {
3839
+ no: 12,
3840
+ name: "can_subscribe_metrics",
3841
+ kind: "scalar",
3842
+ T: 8 /* ScalarType.BOOL */
3722
3843
  }]);
3723
3844
 
3724
3845
  /**
@@ -4058,6 +4179,12 @@ const DataPacket = /*@__PURE__*/proto3.makeMessageType("livekit.DataPacket", ()
4058
4179
  kind: "message",
4059
4180
  T: Transcription,
4060
4181
  oneof: "value"
4182
+ }, {
4183
+ no: 8,
4184
+ name: "metrics",
4185
+ kind: "message",
4186
+ T: MetricsBatch,
4187
+ oneof: "value"
4061
4188
  }]);
4062
4189
 
4063
4190
  /**
@@ -4964,6 +5091,17 @@ const JoinResponse = /*@__PURE__*/proto3.makeMessageType("livekit.JoinResponse",
4964
5091
  name: "sif_trailer",
4965
5092
  kind: "scalar",
4966
5093
  T: 12 /* ScalarType.BYTES */
5094
+ }, {
5095
+ no: 14,
5096
+ name: "enabled_publish_codecs",
5097
+ kind: "message",
5098
+ T: Codec,
5099
+ repeated: true
5100
+ }, {
5101
+ no: 15,
5102
+ name: "fast_publish",
5103
+ kind: "scalar",
5104
+ T: 8 /* ScalarType.BOOL */
4967
5105
  }]);
4968
5106
 
4969
5107
  /**
@@ -6098,7 +6236,7 @@ LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
6098
6236
  OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
6099
6237
  PERFORMANCE OF THIS SOFTWARE.
6100
6238
  ***************************************************************************** */
6101
- /* global Reflect, Promise, SuppressedError, Symbol */
6239
+ /* global Reflect, Promise, SuppressedError, Symbol, Iterator */
6102
6240
 
6103
6241
 
6104
6242
  function __awaiter(thisArg, _arguments, P, generator) {
@@ -10182,6 +10320,10 @@ var RoomEvent;
10182
10320
  * args: (kind: MediaDeviceKind, deviceId: string)
10183
10321
  */
10184
10322
  RoomEvent["ActiveDeviceChanged"] = "activeDeviceChanged";
10323
+ /**
10324
+ * fired when the first remote participant has subscribed to the localParticipant's track
10325
+ */
10326
+ RoomEvent["LocalTrackSubscribed"] = "localTrackSubscribed";
10185
10327
  })(RoomEvent || (RoomEvent = {}));
10186
10328
  var ParticipantEvent;
10187
10329
  (function (ParticipantEvent) {
@@ -10345,6 +10487,10 @@ var ParticipantEvent;
10345
10487
  * When a participant's attributes changed, this event will be emitted with the changed attributes
10346
10488
  */
10347
10489
  ParticipantEvent["AttributesChanged"] = "attributesChanged";
10490
+ /**
10491
+ * fired on local participant only, when the first remote participant has subscribed to the track specified in the payload
10492
+ */
10493
+ ParticipantEvent["LocalTrackSubscribed"] = "localTrackSubscribed";
10348
10494
  })(ParticipantEvent || (ParticipantEvent = {}));
10349
10495
  /** @internal */
10350
10496
  var EngineEvent;
@@ -10568,7 +10714,7 @@ function getOSVersion(ua) {
10568
10714
  return ua.includes('mac os') ? getMatch(/\(.+?(\d+_\d+(:?_\d+)?)/, ua, 1).replace(/_/g, '.') : undefined;
10569
10715
  }
10570
10716
 
10571
- var version$1 = "2.5.0";
10717
+ var version$1 = "2.5.2";
10572
10718
 
10573
10719
  const version = version$1;
10574
10720
  const protocolVersion = 15;
@@ -10579,12 +10725,12 @@ const protocolVersion = 15;
10579
10725
  * that the timer fires on time.
10580
10726
  */
10581
10727
  class CriticalTimers {}
10582
- // eslint-disable-next-line @typescript-eslint/no-implied-eval
10583
10728
  CriticalTimers.setTimeout = function () {
10584
10729
  return setTimeout(...arguments);
10585
10730
  };
10731
+ CriticalTimers.setInterval =
10586
10732
  // eslint-disable-next-line @typescript-eslint/no-implied-eval
10587
- CriticalTimers.setInterval = function () {
10733
+ function () {
10588
10734
  return setInterval(...arguments);
10589
10735
  };
10590
10736
  CriticalTimers.clearTimeout = function () {
@@ -11696,7 +11842,7 @@ function toHttpUrl(url) {
11696
11842
  }
11697
11843
  return url;
11698
11844
  }
11699
- function extractTranscriptionSegments(transcription) {
11845
+ function extractTranscriptionSegments(transcription, firstReceivedTimesMap) {
11700
11846
  return transcription.segments.map(_ref => {
11701
11847
  let {
11702
11848
  id,
@@ -11706,13 +11852,23 @@ function extractTranscriptionSegments(transcription) {
11706
11852
  endTime,
11707
11853
  final
11708
11854
  } = _ref;
11855
+ var _a;
11856
+ const firstReceivedTime = (_a = firstReceivedTimesMap.get(id)) !== null && _a !== void 0 ? _a : Date.now();
11857
+ const lastReceivedTime = Date.now();
11858
+ if (final) {
11859
+ firstReceivedTimesMap.delete(id);
11860
+ } else {
11861
+ firstReceivedTimesMap.set(id, firstReceivedTime);
11862
+ }
11709
11863
  return {
11710
11864
  id,
11711
11865
  text,
11712
11866
  startTime: Number.parseInt(startTime.toString()),
11713
11867
  endTime: Number.parseInt(endTime.toString()),
11714
11868
  final,
11715
- language
11869
+ language,
11870
+ firstReceivedTime,
11871
+ lastReceivedTime
11716
11872
  };
11717
11873
  });
11718
11874
  }
@@ -11983,17 +12139,21 @@ class LocalTrack extends Track {
11983
12139
  * @returns DeviceID of the device that is currently being used for this track
11984
12140
  */
11985
12141
  getDeviceId() {
11986
- return __awaiter(this, void 0, void 0, function* () {
11987
- // screen share doesn't have a usable device id
11988
- if (this.source === Track.Source.ScreenShare) {
11989
- return;
11990
- }
11991
- const {
11992
- deviceId,
11993
- groupId
11994
- } = this._mediaStreamTrack.getSettings();
11995
- const kind = this.kind === Track.Kind.Audio ? 'audioinput' : 'videoinput';
11996
- return DeviceManager.getInstance().normalizeDeviceId(kind, deviceId, groupId);
12142
+ return __awaiter(this, arguments, void 0, function () {
12143
+ var _this2 = this;
12144
+ let normalize = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true;
12145
+ return function* () {
12146
+ // screen share doesn't have a usable device id
12147
+ if (_this2.source === Track.Source.ScreenShare) {
12148
+ return;
12149
+ }
12150
+ const {
12151
+ deviceId,
12152
+ groupId
12153
+ } = _this2._mediaStreamTrack.getSettings();
12154
+ const kind = _this2.kind === Track.Kind.Audio ? 'audioinput' : 'videoinput';
12155
+ return normalize ? DeviceManager.getInstance().normalizeDeviceId(kind, deviceId, groupId) : deviceId;
12156
+ }();
11997
12157
  });
11998
12158
  }
11999
12159
  mute() {
@@ -12197,45 +12357,45 @@ class LocalTrack extends Track {
12197
12357
  */
12198
12358
  setProcessor(processor_1) {
12199
12359
  return __awaiter(this, arguments, void 0, function (processor) {
12200
- var _this2 = this;
12360
+ var _this3 = this;
12201
12361
  let showProcessedStreamLocally = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
12202
12362
  return function* () {
12203
12363
  var _a;
12204
- const unlock = yield _this2.processorLock.lock();
12364
+ const unlock = yield _this3.processorLock.lock();
12205
12365
  try {
12206
- _this2.log.debug('setting up processor', _this2.logContext);
12207
- const processorElement = document.createElement(_this2.kind);
12366
+ _this3.log.debug('setting up processor', _this3.logContext);
12367
+ const processorElement = document.createElement(_this3.kind);
12208
12368
  const processorOptions = {
12209
- kind: _this2.kind,
12210
- track: _this2._mediaStreamTrack,
12369
+ kind: _this3.kind,
12370
+ track: _this3._mediaStreamTrack,
12211
12371
  element: processorElement,
12212
- audioContext: _this2.audioContext
12372
+ audioContext: _this3.audioContext
12213
12373
  };
12214
12374
  yield processor.init(processorOptions);
12215
- _this2.log.debug('processor initialized', _this2.logContext);
12216
- if (_this2.processor) {
12217
- yield _this2.stopProcessor();
12375
+ _this3.log.debug('processor initialized', _this3.logContext);
12376
+ if (_this3.processor) {
12377
+ yield _this3.stopProcessor();
12218
12378
  }
12219
- if (_this2.kind === 'unknown') {
12379
+ if (_this3.kind === 'unknown') {
12220
12380
  throw TypeError('cannot set processor on track of unknown kind');
12221
12381
  }
12222
- attachToElement(_this2._mediaStreamTrack, processorElement);
12382
+ attachToElement(_this3._mediaStreamTrack, processorElement);
12223
12383
  processorElement.muted = true;
12224
- processorElement.play().catch(error => _this2.log.error('failed to play processor element', Object.assign(Object.assign({}, _this2.logContext), {
12384
+ processorElement.play().catch(error => _this3.log.error('failed to play processor element', Object.assign(Object.assign({}, _this3.logContext), {
12225
12385
  error
12226
12386
  })));
12227
- _this2.processor = processor;
12228
- _this2.processorElement = processorElement;
12229
- if (_this2.processor.processedTrack) {
12230
- for (const el of _this2.attachedElements) {
12231
- if (el !== _this2.processorElement && showProcessedStreamLocally) {
12232
- detachTrack(_this2._mediaStreamTrack, el);
12233
- attachToElement(_this2.processor.processedTrack, el);
12387
+ _this3.processor = processor;
12388
+ _this3.processorElement = processorElement;
12389
+ if (_this3.processor.processedTrack) {
12390
+ for (const el of _this3.attachedElements) {
12391
+ if (el !== _this3.processorElement && showProcessedStreamLocally) {
12392
+ detachTrack(_this3._mediaStreamTrack, el);
12393
+ attachToElement(_this3.processor.processedTrack, el);
12234
12394
  }
12235
12395
  }
12236
- yield (_a = _this2.sender) === null || _a === void 0 ? void 0 : _a.replaceTrack(_this2.processor.processedTrack);
12396
+ yield (_a = _this3.sender) === null || _a === void 0 ? void 0 : _a.replaceTrack(_this3.processor.processedTrack);
12237
12397
  }
12238
- _this2.emit(TrackEvent.TrackProcessorUpdate, _this2.processor);
12398
+ _this3.emit(TrackEvent.TrackProcessorUpdate, _this3.processor);
12239
12399
  } finally {
12240
12400
  unlock();
12241
12401
  }
@@ -12254,24 +12414,24 @@ class LocalTrack extends Track {
12254
12414
  */
12255
12415
  stopProcessor() {
12256
12416
  return __awaiter(this, arguments, void 0, function () {
12257
- var _this3 = this;
12417
+ var _this4 = this;
12258
12418
  let keepElement = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true;
12259
12419
  return function* () {
12260
12420
  var _a, _b;
12261
- if (!_this3.processor) return;
12262
- _this3.log.debug('stopping processor', _this3.logContext);
12263
- (_a = _this3.processor.processedTrack) === null || _a === void 0 ? void 0 : _a.stop();
12264
- yield _this3.processor.destroy();
12265
- _this3.processor = undefined;
12421
+ if (!_this4.processor) return;
12422
+ _this4.log.debug('stopping processor', _this4.logContext);
12423
+ (_a = _this4.processor.processedTrack) === null || _a === void 0 ? void 0 : _a.stop();
12424
+ yield _this4.processor.destroy();
12425
+ _this4.processor = undefined;
12266
12426
  if (!keepElement) {
12267
- (_b = _this3.processorElement) === null || _b === void 0 ? void 0 : _b.remove();
12268
- _this3.processorElement = undefined;
12427
+ (_b = _this4.processorElement) === null || _b === void 0 ? void 0 : _b.remove();
12428
+ _this4.processorElement = undefined;
12269
12429
  }
12270
12430
  // apply original track constraints in case the processor changed them
12271
- yield _this3._mediaStreamTrack.applyConstraints(_this3._constraints);
12431
+ yield _this4._mediaStreamTrack.applyConstraints(_this4._constraints);
12272
12432
  // force re-setting of the mediaStreamTrack on the sender
12273
- yield _this3.setMediaStreamTrack(_this3._mediaStreamTrack, true);
12274
- _this3.emit(TrackEvent.TrackProcessorUpdate);
12433
+ yield _this4.setMediaStreamTrack(_this4._mediaStreamTrack, true);
12434
+ _this4.emit(TrackEvent.TrackProcessorUpdate);
12275
12435
  }();
12276
12436
  });
12277
12437
  }
@@ -14013,6 +14173,7 @@ the first few seconds. So we use a 70% of target bitrate here as the start bitra
14013
14173
  eliminate this issue.
14014
14174
  */
14015
14175
  const startBitrateForSVC = 0.7;
14176
+ const debounceInterval = 20;
14016
14177
  const PCEvents = {
14017
14178
  NegotiationStarted: 'negotiationStarted',
14018
14179
  NegotiationComplete: 'negotiationComplete',
@@ -14049,7 +14210,7 @@ class PCTransport extends eventsExports.EventEmitter {
14049
14210
  throw e;
14050
14211
  }
14051
14212
  }
14052
- }), 100);
14213
+ }), debounceInterval);
14053
14214
  this.close = () => {
14054
14215
  if (!this._pc) {
14055
14216
  return;
@@ -14644,6 +14805,7 @@ class PCTransportManager {
14644
14805
  };
14645
14806
  this.state = PCTransportState.NEW;
14646
14807
  this.connectionLock = new Mutex();
14808
+ this.remoteOfferLock = new Mutex();
14647
14809
  }
14648
14810
  get logContext() {
14649
14811
  var _a, _b;
@@ -14716,10 +14878,15 @@ class PCTransportManager {
14716
14878
  sdp: sd.sdp,
14717
14879
  signalingState: this.subscriber.getSignallingState().toString()
14718
14880
  }));
14719
- yield this.subscriber.setRemoteDescription(sd);
14720
- // answer the offer
14721
- const answer = yield this.subscriber.createAndSetAnswer();
14722
- return answer;
14881
+ const unlock = yield this.remoteOfferLock.lock();
14882
+ try {
14883
+ yield this.subscriber.setRemoteDescription(sd);
14884
+ // answer the offer
14885
+ const answer = yield this.subscriber.createAndSetAnswer();
14886
+ return answer;
14887
+ } finally {
14888
+ unlock();
14889
+ }
14723
14890
  });
14724
14891
  }
14725
14892
  updateConfiguration(config, iceRestart) {
@@ -16017,7 +16184,7 @@ function videoLayersFromEncodings(width, height, encodings, svc) {
16017
16184
  const bitratesRatio = sm.suffix == 'h' ? 2 : 3;
16018
16185
  for (let i = 0; i < sm.spatial; i += 1) {
16019
16186
  layers.push(new VideoLayer({
16020
- quality: VideoQuality.HIGH - i,
16187
+ quality: Math.min(VideoQuality.HIGH, sm.spatial - 1) - i,
16021
16188
  width: Math.ceil(width / Math.pow(resRatio, i)),
16022
16189
  height: Math.ceil(height / Math.pow(resRatio, i)),
16023
16190
  bitrate: encodings[0].maxBitrate ? Math.ceil(encodings[0].maxBitrate / Math.pow(bitratesRatio, i)) : 0,
@@ -16276,7 +16443,7 @@ class RTCEngine extends eventsExports.EventEmitter {
16276
16443
  yield this.configure(joinResponse);
16277
16444
  }
16278
16445
  // create offer
16279
- if (!this.subscriberPrimary) {
16446
+ if (!this.subscriberPrimary || joinResponse.fastPublish) {
16280
16447
  this.negotiate();
16281
16448
  }
16282
16449
  this.clientConfiguration = joinResponse.clientConfiguration;
@@ -18052,6 +18219,141 @@ class LocalTrackPublication extends TrackPublication {
18052
18219
  }
18053
18220
  }
18054
18221
 
18222
+ /** @internal */
18223
+ function extractProcessorsFromOptions(options) {
18224
+ let audioProcessor;
18225
+ let videoProcessor;
18226
+ if (typeof options.audio === 'object' && options.audio.processor) {
18227
+ audioProcessor = options.audio.processor;
18228
+ }
18229
+ if (typeof options.video === 'object' && options.video.processor) {
18230
+ videoProcessor = options.video.processor;
18231
+ }
18232
+ return {
18233
+ audioProcessor,
18234
+ videoProcessor
18235
+ };
18236
+ }
18237
+ /**
18238
+ * Creates a local video and audio track at the same time. When acquiring both
18239
+ * audio and video tracks together, it'll display a single permission prompt to
18240
+ * the user instead of two separate ones.
18241
+ * @param options
18242
+ */
18243
+ function createLocalTracks(options) {
18244
+ return __awaiter(this, void 0, void 0, function* () {
18245
+ var _a, _b;
18246
+ // set default options to true
18247
+ options !== null && options !== void 0 ? options : options = {};
18248
+ (_a = options.audio) !== null && _a !== void 0 ? _a : options.audio = true;
18249
+ (_b = options.video) !== null && _b !== void 0 ? _b : options.video = true;
18250
+ const {
18251
+ audioProcessor,
18252
+ videoProcessor
18253
+ } = extractProcessorsFromOptions(options);
18254
+ const opts = mergeDefaultOptions(options, audioDefaults, videoDefaults);
18255
+ const constraints = constraintsForOptions(opts);
18256
+ // Keep a reference to the promise on DeviceManager and await it in getLocalDevices()
18257
+ // works around iOS Safari Bug https://bugs.webkit.org/show_bug.cgi?id=179363
18258
+ const mediaPromise = navigator.mediaDevices.getUserMedia(constraints);
18259
+ if (options.audio) {
18260
+ DeviceManager.userMediaPromiseMap.set('audioinput', mediaPromise);
18261
+ mediaPromise.catch(() => DeviceManager.userMediaPromiseMap.delete('audioinput'));
18262
+ }
18263
+ if (options.video) {
18264
+ DeviceManager.userMediaPromiseMap.set('videoinput', mediaPromise);
18265
+ mediaPromise.catch(() => DeviceManager.userMediaPromiseMap.delete('videoinput'));
18266
+ }
18267
+ const stream = yield mediaPromise;
18268
+ return Promise.all(stream.getTracks().map(mediaStreamTrack => __awaiter(this, void 0, void 0, function* () {
18269
+ const isAudio = mediaStreamTrack.kind === 'audio';
18270
+ isAudio ? opts.audio : opts.video;
18271
+ let trackConstraints;
18272
+ const conOrBool = isAudio ? constraints.audio : constraints.video;
18273
+ if (typeof conOrBool !== 'boolean') {
18274
+ trackConstraints = conOrBool;
18275
+ }
18276
+ // update the constraints with the device id the user gave permissions to in the permission prompt
18277
+ // otherwise each track restart (e.g. mute - unmute) will try to initialize the device again -> causing additional permission prompts
18278
+ if (trackConstraints) {
18279
+ trackConstraints.deviceId = mediaStreamTrack.getSettings().deviceId;
18280
+ } else {
18281
+ trackConstraints = {
18282
+ deviceId: mediaStreamTrack.getSettings().deviceId
18283
+ };
18284
+ }
18285
+ const track = mediaTrackToLocalTrack(mediaStreamTrack, trackConstraints);
18286
+ if (track.kind === Track.Kind.Video) {
18287
+ track.source = Track.Source.Camera;
18288
+ } else if (track.kind === Track.Kind.Audio) {
18289
+ track.source = Track.Source.Microphone;
18290
+ }
18291
+ track.mediaStream = stream;
18292
+ if (track instanceof LocalAudioTrack && audioProcessor) {
18293
+ yield track.setProcessor(audioProcessor);
18294
+ } else if (track instanceof LocalVideoTrack && videoProcessor) {
18295
+ yield track.setProcessor(videoProcessor);
18296
+ }
18297
+ return track;
18298
+ })));
18299
+ });
18300
+ }
18301
+ /**
18302
+ * Creates a [[LocalVideoTrack]] with getUserMedia()
18303
+ * @param options
18304
+ */
18305
+ function createLocalVideoTrack(options) {
18306
+ return __awaiter(this, void 0, void 0, function* () {
18307
+ const tracks = yield createLocalTracks({
18308
+ audio: false,
18309
+ video: options
18310
+ });
18311
+ return tracks[0];
18312
+ });
18313
+ }
18314
+ function createLocalAudioTrack(options) {
18315
+ return __awaiter(this, void 0, void 0, function* () {
18316
+ const tracks = yield createLocalTracks({
18317
+ audio: options,
18318
+ video: false
18319
+ });
18320
+ return tracks[0];
18321
+ });
18322
+ }
18323
+ /**
18324
+ * Creates a screen capture tracks with getDisplayMedia().
18325
+ * A LocalVideoTrack is always created and returned.
18326
+ * If { audio: true }, and the browser supports audio capture, a LocalAudioTrack is also created.
18327
+ */
18328
+ function createLocalScreenTracks(options) {
18329
+ return __awaiter(this, void 0, void 0, function* () {
18330
+ if (options === undefined) {
18331
+ options = {};
18332
+ }
18333
+ if (options.resolution === undefined && !isSafari17()) {
18334
+ options.resolution = ScreenSharePresets.h1080fps30.resolution;
18335
+ }
18336
+ if (navigator.mediaDevices.getDisplayMedia === undefined) {
18337
+ throw new DeviceUnsupportedError('getDisplayMedia not supported');
18338
+ }
18339
+ const constraints = screenCaptureToDisplayMediaStreamOptions(options);
18340
+ const stream = yield navigator.mediaDevices.getDisplayMedia(constraints);
18341
+ const tracks = stream.getVideoTracks();
18342
+ if (tracks.length === 0) {
18343
+ throw new TrackInvalidError('no video track found');
18344
+ }
18345
+ const screenVideo = new LocalVideoTrack(tracks[0], undefined, false);
18346
+ screenVideo.source = Track.Source.ScreenShare;
18347
+ const localTracks = [screenVideo];
18348
+ if (stream.getAudioTracks().length > 0) {
18349
+ const screenAudio = new LocalAudioTrack(stream.getAudioTracks()[0], undefined, false);
18350
+ screenAudio.source = Track.Source.ScreenShareAudio;
18351
+ localTracks.push(screenAudio);
18352
+ }
18353
+ return localTracks;
18354
+ });
18355
+ }
18356
+
18055
18357
  var ConnectionQuality;
18056
18358
  (function (ConnectionQuality) {
18057
18359
  ConnectionQuality["Excellent"] = "excellent";
@@ -18315,6 +18617,7 @@ class LocalParticipant extends Participant {
18315
18617
  this.participantTrackPermissions = [];
18316
18618
  this.allParticipantsAllowedToSubscribe = true;
18317
18619
  this.encryptionType = Encryption_Type.NONE;
18620
+ this.enabledPublishVideoCodecs = [];
18318
18621
  this.handleReconnecting = () => {
18319
18622
  if (!this.reconnectFuture) {
18320
18623
  this.reconnectFuture = new Future();
@@ -18474,7 +18777,14 @@ class LocalParticipant extends Participant {
18474
18777
  }
18475
18778
  if (!track.isMuted) {
18476
18779
  this.log.debug('track ended, attempting to use a different device', Object.assign(Object.assign({}, this.logContext), getLogContextFromTrack(track)));
18477
- yield track.restartTrack();
18780
+ if (track instanceof LocalAudioTrack) {
18781
+ // fall back to default device if available
18782
+ yield track.restartTrack({
18783
+ deviceId: 'default'
18784
+ });
18785
+ } else {
18786
+ yield track.restartTrack();
18787
+ }
18478
18788
  }
18479
18789
  } catch (e) {
18480
18790
  this.log.warn("could not restart track, muting instead", Object.assign(Object.assign({}, this.logContext), getLogContextFromTrack(track)));
@@ -18763,6 +19073,11 @@ class LocalParticipant extends Participant {
18763
19073
  createTracks(options) {
18764
19074
  return __awaiter(this, void 0, void 0, function* () {
18765
19075
  var _a, _b;
19076
+ options !== null && options !== void 0 ? options : options = {};
19077
+ const {
19078
+ audioProcessor,
19079
+ videoProcessor
19080
+ } = extractProcessorsFromOptions(options);
18766
19081
  const mergedOptions = mergeDefaultOptions(options, (_a = this.roomOptions) === null || _a === void 0 ? void 0 : _a.audioCaptureDefaults, (_b = this.roomOptions) === null || _b === void 0 ? void 0 : _b.videoCaptureDefaults);
18767
19082
  const constraints = constraintsForOptions(mergedOptions);
18768
19083
  let stream;
@@ -18788,10 +19103,7 @@ class LocalParticipant extends Participant {
18788
19103
  }
18789
19104
  return Promise.all(stream.getTracks().map(mediaStreamTrack => __awaiter(this, void 0, void 0, function* () {
18790
19105
  const isAudio = mediaStreamTrack.kind === 'audio';
18791
- let trackOptions = isAudio ? mergedOptions.audio : mergedOptions.video;
18792
- if (typeof trackOptions === 'boolean' || !trackOptions) {
18793
- trackOptions = {};
18794
- }
19106
+ isAudio ? mergedOptions.audio : mergedOptions.video;
18795
19107
  let trackConstraints;
18796
19108
  const conOrBool = isAudio ? constraints.audio : constraints.video;
18797
19109
  if (typeof conOrBool !== 'boolean') {
@@ -18808,12 +19120,10 @@ class LocalParticipant extends Participant {
18808
19120
  track.setAudioContext(this.audioContext);
18809
19121
  }
18810
19122
  track.mediaStream = stream;
18811
- if (trackOptions.processor) {
18812
- if (track instanceof LocalAudioTrack) {
18813
- yield track.setProcessor(trackOptions.processor);
18814
- } else {
18815
- yield track.setProcessor(trackOptions.processor);
18816
- }
19123
+ if (track instanceof LocalAudioTrack && audioProcessor) {
19124
+ yield track.setProcessor(audioProcessor);
19125
+ } else if (track instanceof LocalVideoTrack && videoProcessor) {
19126
+ yield track.setProcessor(videoProcessor);
18817
19127
  }
18818
19128
  return track;
18819
19129
  })));
@@ -18979,7 +19289,7 @@ class LocalParticipant extends Participant {
18979
19289
  }
18980
19290
  publish(track, opts, isStereo) {
18981
19291
  return __awaiter(this, void 0, void 0, function* () {
18982
- var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o;
19292
+ var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;
18983
19293
  const existingTrackOfSource = Array.from(this.trackPublications.values()).find(publishedTrack => track instanceof LocalTrack && publishedTrack.source === track.source);
18984
19294
  if (existingTrackOfSource && track.source !== Track.Source.Unknown) {
18985
19295
  this.log.info("publishing a second track with the same source: ".concat(track.source), Object.assign(Object.assign({}, this.logContext), getLogContextFromTrack(track)));
@@ -19002,6 +19312,12 @@ class LocalParticipant extends Participant {
19002
19312
  if (opts.videoCodec === undefined) {
19003
19313
  opts.videoCodec = defaultVideoCodec;
19004
19314
  }
19315
+ if (this.enabledPublishVideoCodecs.length > 0) {
19316
+ // fallback to a supported codec if it is not supported
19317
+ if (!this.enabledPublishVideoCodecs.some(c => opts.videoCodec === mimeTypeToVideoCodecString(c.mime))) {
19318
+ opts.videoCodec = mimeTypeToVideoCodecString(this.enabledPublishVideoCodecs[0].mime);
19319
+ }
19320
+ }
19005
19321
  const videoCodec = opts.videoCodec;
19006
19322
  // handle track actions
19007
19323
  track.on(TrackEvent.Muted, this.onTrackMuted);
@@ -19104,25 +19420,75 @@ class LocalParticipant extends Participant {
19104
19420
  if (!this.engine || this.engine.isClosed) {
19105
19421
  throw new UnexpectedConnectionState('cannot publish track when not connected');
19106
19422
  }
19107
- const ti = yield this.engine.addTrack(req);
19108
- // server might not support the codec the client has requested, in that case, fallback
19109
- // to a supported codec
19110
- let primaryCodecMime;
19111
- ti.codecs.forEach(codec => {
19112
- if (primaryCodecMime === undefined) {
19113
- primaryCodecMime = codec.mimeType;
19423
+ const negotiate = () => __awaiter(this, void 0, void 0, function* () {
19424
+ var _a, _b, _c;
19425
+ if (!this.engine.pcManager) {
19426
+ throw new UnexpectedConnectionState('pcManager is not ready');
19114
19427
  }
19428
+ track.sender = yield this.engine.createSender(track, opts, encodings);
19429
+ if (track instanceof LocalVideoTrack) {
19430
+ (_a = opts.degradationPreference) !== null && _a !== void 0 ? _a : opts.degradationPreference = getDefaultDegradationPreference(track);
19431
+ track.setDegradationPreference(opts.degradationPreference);
19432
+ }
19433
+ if (encodings) {
19434
+ if (isFireFox() && track.kind === Track.Kind.Audio) {
19435
+ /* Refer to RFC https://datatracker.ietf.org/doc/html/rfc7587#section-6.1,
19436
+ livekit-server uses maxaveragebitrate=510000 in the answer sdp to permit client to
19437
+ publish high quality audio track. But firefox always uses this value as the actual
19438
+ bitrates, causing the audio bitrates to rise to 510Kbps in any stereo case unexpectedly.
19439
+ So the client need to modify maxaverragebitrates in answer sdp to user provided value to
19440
+ fix the issue.
19441
+ */
19442
+ let trackTransceiver = undefined;
19443
+ for (const transceiver of this.engine.pcManager.publisher.getTransceivers()) {
19444
+ if (transceiver.sender === track.sender) {
19445
+ trackTransceiver = transceiver;
19446
+ break;
19447
+ }
19448
+ }
19449
+ if (trackTransceiver) {
19450
+ this.engine.pcManager.publisher.setTrackCodecBitrate({
19451
+ transceiver: trackTransceiver,
19452
+ codec: 'opus',
19453
+ maxbr: ((_b = encodings[0]) === null || _b === void 0 ? void 0 : _b.maxBitrate) ? encodings[0].maxBitrate / 1000 : 0
19454
+ });
19455
+ }
19456
+ } else if (track.codec && isSVCCodec(track.codec) && ((_c = encodings[0]) === null || _c === void 0 ? void 0 : _c.maxBitrate)) {
19457
+ this.engine.pcManager.publisher.setTrackCodecBitrate({
19458
+ cid: req.cid,
19459
+ codec: track.codec,
19460
+ maxbr: encodings[0].maxBitrate / 1000
19461
+ });
19462
+ }
19463
+ }
19464
+ yield this.engine.negotiate();
19115
19465
  });
19116
- if (primaryCodecMime && track.kind === Track.Kind.Video) {
19117
- const updatedCodec = mimeTypeToVideoCodecString(primaryCodecMime);
19118
- if (updatedCodec !== videoCodec) {
19119
- this.log.debug('falling back to server selected codec', Object.assign(Object.assign(Object.assign({}, this.logContext), getLogContextFromTrack(track)), {
19120
- codec: updatedCodec
19121
- }));
19122
- opts.videoCodec = updatedCodec;
19123
- // recompute encodings since bitrates/etc could have changed
19124
- encodings = computeVideoEncodings(track.source === Track.Source.ScreenShare, req.width, req.height, opts);
19466
+ let ti;
19467
+ if (this.enabledPublishVideoCodecs.length > 0) {
19468
+ const rets = yield Promise.all([this.engine.addTrack(req), negotiate()]);
19469
+ ti = rets[0];
19470
+ } else {
19471
+ ti = yield this.engine.addTrack(req);
19472
+ // server might not support the codec the client has requested, in that case, fallback
19473
+ // to a supported codec
19474
+ let primaryCodecMime;
19475
+ ti.codecs.forEach(codec => {
19476
+ if (primaryCodecMime === undefined) {
19477
+ primaryCodecMime = codec.mimeType;
19478
+ }
19479
+ });
19480
+ if (primaryCodecMime && track.kind === Track.Kind.Video) {
19481
+ const updatedCodec = mimeTypeToVideoCodecString(primaryCodecMime);
19482
+ if (updatedCodec !== videoCodec) {
19483
+ this.log.debug('falling back to server selected codec', Object.assign(Object.assign(Object.assign({}, this.logContext), getLogContextFromTrack(track)), {
19484
+ codec: updatedCodec
19485
+ }));
19486
+ opts.videoCodec = updatedCodec;
19487
+ // recompute encodings since bitrates/etc could have changed
19488
+ encodings = computeVideoEncodings(track.source === Track.Source.ScreenShare, req.width, req.height, opts);
19489
+ }
19125
19490
  }
19491
+ yield negotiate();
19126
19492
  }
19127
19493
  const publication = new LocalTrackPublication(track.kind, ti, track, {
19128
19494
  loggerName: this.roomOptions.loggerName,
@@ -19131,50 +19497,10 @@ class LocalParticipant extends Participant {
19131
19497
  // save options for when it needs to be republished again
19132
19498
  publication.options = opts;
19133
19499
  track.sid = ti.sid;
19134
- if (!this.engine.pcManager) {
19135
- throw new UnexpectedConnectionState('pcManager is not ready');
19136
- }
19137
19500
  this.log.debug("publishing ".concat(track.kind, " with encodings"), Object.assign(Object.assign({}, this.logContext), {
19138
19501
  encodings,
19139
19502
  trackInfo: ti
19140
19503
  }));
19141
- track.sender = yield this.engine.createSender(track, opts, encodings);
19142
- if (track instanceof LocalVideoTrack) {
19143
- (_l = opts.degradationPreference) !== null && _l !== void 0 ? _l : opts.degradationPreference = getDefaultDegradationPreference(track);
19144
- track.setDegradationPreference(opts.degradationPreference);
19145
- }
19146
- if (encodings) {
19147
- if (isFireFox() && track.kind === Track.Kind.Audio) {
19148
- /* Refer to RFC https://datatracker.ietf.org/doc/html/rfc7587#section-6.1,
19149
- livekit-server uses maxaveragebitrate=510000 in the answer sdp to permit client to
19150
- publish high quality audio track. But firefox always uses this value as the actual
19151
- bitrates, causing the audio bitrates to rise to 510Kbps in any stereo case unexpectedly.
19152
- So the client need to modify maxaverragebitrates in answer sdp to user provided value to
19153
- fix the issue.
19154
- */
19155
- let trackTransceiver = undefined;
19156
- for (const transceiver of this.engine.pcManager.publisher.getTransceivers()) {
19157
- if (transceiver.sender === track.sender) {
19158
- trackTransceiver = transceiver;
19159
- break;
19160
- }
19161
- }
19162
- if (trackTransceiver) {
19163
- this.engine.pcManager.publisher.setTrackCodecBitrate({
19164
- transceiver: trackTransceiver,
19165
- codec: 'opus',
19166
- maxbr: ((_m = encodings[0]) === null || _m === void 0 ? void 0 : _m.maxBitrate) ? encodings[0].maxBitrate / 1000 : 0
19167
- });
19168
- }
19169
- } else if (track.codec && isSVCCodec(track.codec) && ((_o = encodings[0]) === null || _o === void 0 ? void 0 : _o.maxBitrate)) {
19170
- this.engine.pcManager.publisher.setTrackCodecBitrate({
19171
- cid: req.cid,
19172
- codec: track.codec,
19173
- maxbr: encodings[0].maxBitrate / 1000
19174
- });
19175
- }
19176
- }
19177
- yield this.engine.negotiate();
19178
19504
  if (track instanceof LocalVideoTrack) {
19179
19505
  track.startMonitor(this.engine.client);
19180
19506
  } else if (track instanceof LocalAudioTrack) {
@@ -19240,9 +19566,12 @@ class LocalParticipant extends Participant {
19240
19566
  if (!this.engine || this.engine.isClosed) {
19241
19567
  throw new UnexpectedConnectionState('cannot publish track when not connected');
19242
19568
  }
19243
- const ti = yield this.engine.addTrack(req);
19244
- yield this.engine.createSimulcastSender(track, simulcastTrack, opts, encodings);
19245
- yield this.engine.negotiate();
19569
+ const negotiate = () => __awaiter(this, void 0, void 0, function* () {
19570
+ yield this.engine.createSimulcastSender(track, simulcastTrack, opts, encodings);
19571
+ yield this.engine.negotiate();
19572
+ });
19573
+ const rets = yield Promise.all([this.engine.addTrack(req), negotiate()]);
19574
+ const ti = rets[0];
19246
19575
  this.log.debug("published ".concat(videoCodec, " for track ").concat(track.sid), Object.assign(Object.assign({}, this.logContext), {
19247
19576
  encodings,
19248
19577
  trackInfo: ti
@@ -19426,6 +19755,10 @@ class LocalParticipant extends Participant {
19426
19755
  }
19427
19756
  }
19428
19757
  /** @internal */
19758
+ setEnabledPublishCodecs(codecs) {
19759
+ this.enabledPublishVideoCodecs = codecs.filter(c => c.mime.split('/')[0].toLowerCase() === 'video');
19760
+ }
19761
+ /** @internal */
19429
19762
  updateInfo(info) {
19430
19763
  if (info.sid !== this.sid) {
19431
19764
  // drop updates that specify a wrong sid.
@@ -20147,6 +20480,7 @@ class Room extends eventsExports.EventEmitter {
20147
20480
  const pi = joinResponse.participant;
20148
20481
  this.localParticipant.sid = pi.sid;
20149
20482
  this.localParticipant.identity = pi.identity;
20483
+ this.localParticipant.setEnabledPublishCodecs(joinResponse.enabledPublishCodecs);
20150
20484
  if (this.options.e2ee && this.e2eeManager) {
20151
20485
  try {
20152
20486
  this.e2eeManager.setSifTrailer(joinResponse.sifTrailer);
@@ -20559,7 +20893,7 @@ class Room extends eventsExports.EventEmitter {
20559
20893
  // find the participant
20560
20894
  const participant = transcription.transcribedParticipantIdentity === this.localParticipant.identity ? this.localParticipant : this.getParticipantByIdentity(transcription.transcribedParticipantIdentity);
20561
20895
  const publication = participant === null || participant === void 0 ? void 0 : participant.trackPublications.get(transcription.trackId);
20562
- const segments = extractTranscriptionSegments(transcription);
20896
+ const segments = extractTranscriptionSegments(transcription, this.transcriptionReceivedTimes);
20563
20897
  publication === null || publication === void 0 ? void 0 : publication.emit(TrackEvent.TranscriptionReceived, segments);
20564
20898
  participant === null || participant === void 0 ? void 0 : participant.emit(ParticipantEvent.TranscriptionReceived, segments, publication);
20565
20899
  this.emit(RoomEvent.TranscriptionReceived, segments, participant, publication);
@@ -20594,6 +20928,16 @@ class Room extends eventsExports.EventEmitter {
20594
20928
  }
20595
20929
  };
20596
20930
  this.handleDeviceChange = () => __awaiter(this, void 0, void 0, function* () {
20931
+ const availableDevices = yield DeviceManager.getInstance().getDevices();
20932
+ // inputs are automatically handled via TrackEvent.Ended causing a TrackEvent.Restarted. Here we only need to worry about audiooutputs changing
20933
+ const kinds = ['audiooutput'];
20934
+ for (let kind of kinds) {
20935
+ // switch to first available device if previously active device is not available any more
20936
+ const devicesOfKind = availableDevices.filter(d => d.kind === kind);
20937
+ if (devicesOfKind.length > 0 && !devicesOfKind.find(deviceInfo => deviceInfo.deviceId === this.getActiveDevice(kind))) {
20938
+ yield this.switchActiveDevice(kind, devicesOfKind[0].deviceId);
20939
+ }
20940
+ }
20597
20941
  this.emit(RoomEvent.MediaDevicesChanged);
20598
20942
  });
20599
20943
  this.handleRoomUpdate = room => {
@@ -20638,9 +20982,10 @@ class Room extends eventsExports.EventEmitter {
20638
20982
  (_a = processor === null || processor === void 0 ? void 0 : processor.onPublish) === null || _a === void 0 ? void 0 : _a.call(processor, this);
20639
20983
  };
20640
20984
  this.onLocalTrackPublished = pub => __awaiter(this, void 0, void 0, function* () {
20641
- var _a, _b, _c, _d, _e;
20985
+ var _a, _b, _c, _d, _e, _f;
20642
20986
  (_a = pub.track) === null || _a === void 0 ? void 0 : _a.on(TrackEvent.TrackProcessorUpdate, this.onTrackProcessorUpdate);
20643
- (_d = (_c = (_b = pub.track) === null || _b === void 0 ? void 0 : _b.getProcessor()) === null || _c === void 0 ? void 0 : _c.onPublish) === null || _d === void 0 ? void 0 : _d.call(_c, this);
20987
+ (_b = pub.track) === null || _b === void 0 ? void 0 : _b.on(TrackEvent.Restarted, this.onLocalTrackRestarted);
20988
+ (_e = (_d = (_c = pub.track) === null || _c === void 0 ? void 0 : _c.getProcessor()) === null || _d === void 0 ? void 0 : _d.onPublish) === null || _e === void 0 ? void 0 : _e.call(_d, this);
20644
20989
  this.emit(RoomEvent.LocalTrackPublished, pub, this.localParticipant);
20645
20990
  if (pub.track instanceof LocalAudioTrack) {
20646
20991
  const trackIsSilent = yield pub.track.checkForSilence();
@@ -20648,7 +20993,7 @@ class Room extends eventsExports.EventEmitter {
20648
20993
  this.emit(RoomEvent.LocalAudioSilenceDetected, pub);
20649
20994
  }
20650
20995
  }
20651
- const deviceId = yield (_e = pub.track) === null || _e === void 0 ? void 0 : _e.getDeviceId();
20996
+ const deviceId = yield (_f = pub.track) === null || _f === void 0 ? void 0 : _f.getDeviceId();
20652
20997
  const deviceKind = sourceToKind(pub.source);
20653
20998
  if (deviceKind && deviceId && deviceId !== this.localParticipant.activeDeviceMap.get(deviceKind)) {
20654
20999
  this.localParticipant.activeDeviceMap.set(deviceKind, deviceId);
@@ -20656,10 +21001,20 @@ class Room extends eventsExports.EventEmitter {
20656
21001
  }
20657
21002
  });
20658
21003
  this.onLocalTrackUnpublished = pub => {
20659
- var _a;
21004
+ var _a, _b;
20660
21005
  (_a = pub.track) === null || _a === void 0 ? void 0 : _a.off(TrackEvent.TrackProcessorUpdate, this.onTrackProcessorUpdate);
21006
+ (_b = pub.track) === null || _b === void 0 ? void 0 : _b.off(TrackEvent.Restarted, this.onLocalTrackRestarted);
20661
21007
  this.emit(RoomEvent.LocalTrackUnpublished, pub, this.localParticipant);
20662
21008
  };
21009
+ this.onLocalTrackRestarted = track => __awaiter(this, void 0, void 0, function* () {
21010
+ const deviceId = yield track.getDeviceId(false);
21011
+ const deviceKind = sourceToKind(track.source);
21012
+ if (deviceKind && deviceId && deviceId !== this.localParticipant.activeDeviceMap.get(deviceKind)) {
21013
+ this.log.debug("local track restarted, setting ".concat(deviceKind, " ").concat(deviceId, " active"), this.logContext);
21014
+ this.localParticipant.activeDeviceMap.set(deviceKind, deviceId);
21015
+ this.emit(RoomEvent.ActiveDeviceChanged, deviceKind, deviceId);
21016
+ }
21017
+ });
20663
21018
  this.onLocalConnectionQualityChanged = quality => {
20664
21019
  this.emit(RoomEvent.ConnectionQualityChanged, quality, this.localParticipant);
20665
21020
  };
@@ -20674,6 +21029,7 @@ class Room extends eventsExports.EventEmitter {
20674
21029
  this.sidToIdentity = new Map();
20675
21030
  this.options = Object.assign(Object.assign({}, roomOptionDefaults), options);
20676
21031
  this.log = getLogger((_a = this.options.loggerName) !== null && _a !== void 0 ? _a : LoggerNames.Room);
21032
+ this.transcriptionReceivedTimes = new Map();
20677
21033
  this.options.audioCaptureDefaults = Object.assign(Object.assign({}, audioDefaults), options === null || options === void 0 ? void 0 : options.audioCaptureDefaults);
20678
21034
  this.options.videoCaptureDefaults = Object.assign(Object.assign({}, videoDefaults), options === null || options === void 0 ? void 0 : options.videoCaptureDefaults);
20679
21035
  this.options.publishDefaults = Object.assign(Object.assign({}, publishDefaults), options === null || options === void 0 ? void 0 : options.publishDefaults);
@@ -20819,6 +21175,19 @@ class Room extends eventsExports.EventEmitter {
20819
21175
  }
20820
21176
  }).on(EngineEvent.DCBufferStatusChanged, (status, kind) => {
20821
21177
  this.emit(RoomEvent.DCBufferStatusChanged, status, kind);
21178
+ }).on(EngineEvent.LocalTrackSubscribed, subscribedSid => {
21179
+ const trackPublication = this.localParticipant.getTrackPublications().find(_ref2 => {
21180
+ let {
21181
+ trackSid
21182
+ } = _ref2;
21183
+ return trackSid === subscribedSid;
21184
+ });
21185
+ if (!trackPublication) {
21186
+ this.log.warn('could not find local track subscription for subscribed event', this.logContext);
21187
+ return;
21188
+ }
21189
+ this.localParticipant.emit(ParticipantEvent.LocalTrackSubscribed, trackPublication);
21190
+ this.emitWhenConnected(RoomEvent.LocalTrackSubscribed, trackPublication, this.localParticipant);
20822
21191
  });
20823
21192
  if (this.localParticipant) {
20824
21193
  this.localParticipant.setupEngine(this.engine);
@@ -20829,9 +21198,8 @@ class Room extends eventsExports.EventEmitter {
20829
21198
  }
20830
21199
  /**
20831
21200
  * getLocalDevices abstracts navigator.mediaDevices.enumerateDevices.
20832
- * In particular, it handles Chrome's unique behavior of creating `default`
20833
- * devices. When encountered, it'll be removed from the list of devices.
20834
- * The actual default device will be placed at top.
21201
+ * In particular, it requests device permissions by default if needed
21202
+ * and makes sure the returned device does not consist of dummy devices
20835
21203
  * @param kind
20836
21204
  * @returns a list of available local devices
20837
21205
  */
@@ -21044,15 +21412,15 @@ class Room extends eventsExports.EventEmitter {
21044
21412
  var _this3 = this;
21045
21413
  let exact = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
21046
21414
  return function* () {
21047
- var _a, _b, _c;
21048
- var _d;
21415
+ var _a, _b, _c, _d, _e, _f;
21416
+ var _g;
21049
21417
  let deviceHasChanged = false;
21050
21418
  let success = true;
21051
21419
  const deviceConstraint = exact ? {
21052
21420
  exact: deviceId
21053
21421
  } : deviceId;
21054
21422
  if (kind === 'audioinput') {
21055
- const prevDeviceId = _this3.options.audioCaptureDefaults.deviceId;
21423
+ const prevDeviceId = (_a = _this3.getActiveDevice(kind)) !== null && _a !== void 0 ? _a : _this3.options.audioCaptureDefaults.deviceId;
21056
21424
  _this3.options.audioCaptureDefaults.deviceId = deviceConstraint;
21057
21425
  deviceHasChanged = prevDeviceId !== deviceConstraint;
21058
21426
  const tracks = Array.from(_this3.localParticipant.audioTrackPublications.values()).filter(track => track.source === Track.Source.Microphone);
@@ -21066,7 +21434,7 @@ class Room extends eventsExports.EventEmitter {
21066
21434
  throw e;
21067
21435
  }
21068
21436
  } else if (kind === 'videoinput') {
21069
- const prevDeviceId = _this3.options.videoCaptureDefaults.deviceId;
21437
+ const prevDeviceId = (_b = _this3.getActiveDevice(kind)) !== null && _b !== void 0 ? _b : _this3.options.videoCaptureDefaults.deviceId;
21070
21438
  _this3.options.videoCaptureDefaults.deviceId = deviceConstraint;
21071
21439
  deviceHasChanged = prevDeviceId !== deviceConstraint;
21072
21440
  const tracks = Array.from(_this3.localParticipant.videoTrackPublications.values()).filter(track => track.source === Track.Source.Camera);
@@ -21085,16 +21453,16 @@ class Room extends eventsExports.EventEmitter {
21085
21453
  }
21086
21454
  if (_this3.options.webAudioMix) {
21087
21455
  // setting `default` for web audio output doesn't work, so we need to normalize the id before
21088
- deviceId = (_a = yield DeviceManager.getInstance().normalizeDeviceId('audiooutput', deviceId)) !== null && _a !== void 0 ? _a : '';
21456
+ deviceId = (_c = yield DeviceManager.getInstance().normalizeDeviceId('audiooutput', deviceId)) !== null && _c !== void 0 ? _c : '';
21089
21457
  }
21090
- (_b = (_d = _this3.options).audioOutput) !== null && _b !== void 0 ? _b : _d.audioOutput = {};
21091
- const prevDeviceId = _this3.options.audioOutput.deviceId;
21458
+ (_d = (_g = _this3.options).audioOutput) !== null && _d !== void 0 ? _d : _g.audioOutput = {};
21459
+ const prevDeviceId = (_e = _this3.getActiveDevice(kind)) !== null && _e !== void 0 ? _e : _this3.options.audioOutput.deviceId;
21092
21460
  _this3.options.audioOutput.deviceId = deviceId;
21093
21461
  deviceHasChanged = prevDeviceId !== deviceConstraint;
21094
21462
  try {
21095
21463
  if (_this3.options.webAudioMix) {
21096
21464
  // @ts-expect-error setSinkId is not yet in the typescript type of AudioContext
21097
- (_c = _this3.audioContext) === null || _c === void 0 ? void 0 : _c.setSinkId(deviceId);
21465
+ (_f = _this3.audioContext) === null || _f === void 0 ? void 0 : _f.setSinkId(deviceId);
21098
21466
  }
21099
21467
  // also set audio output on all audio elements, even if webAudioMix is enabled in order to workaround echo cancellation not working on chrome with non-default output devices
21100
21468
  // see https://issues.chromium.org/issues/40252911#comment7
@@ -21188,6 +21556,7 @@ class Room extends eventsExports.EventEmitter {
21188
21556
  this.clearConnectionReconcile();
21189
21557
  this.isResuming = false;
21190
21558
  this.bufferedEvents = [];
21559
+ this.transcriptionReceivedTimes.clear();
21191
21560
  if (this.state === ConnectionState.Disconnected) {
21192
21561
  return;
21193
21562
  }
@@ -21431,8 +21800,8 @@ class Room extends eventsExports.EventEmitter {
21431
21800
  return true;
21432
21801
  }
21433
21802
  emitBufferedEvents() {
21434
- this.bufferedEvents.forEach(_ref2 => {
21435
- let [ev, args] = _ref2;
21803
+ this.bufferedEvents.forEach(_ref3 => {
21804
+ let [ev, args] = _ref3;
21436
21805
  this.emit(ev, ...args);
21437
21806
  });
21438
21807
  this.bufferedEvents = [];
@@ -21654,7 +22023,7 @@ class Checker extends eventsExports.EventEmitter {
21654
22023
  if (this.room.state === ConnectionState.Connected) {
21655
22024
  return this.room;
21656
22025
  }
21657
- yield this.room.connect(this.url, this.token);
22026
+ yield this.room.connect(this.url, this.token, this.connectOptions);
21658
22027
  return this.room;
21659
22028
  });
21660
22029
  }
@@ -21709,127 +22078,6 @@ class Checker extends eventsExports.EventEmitter {
21709
22078
  }
21710
22079
  }
21711
22080
 
21712
- /**
21713
- * Creates a local video and audio track at the same time. When acquiring both
21714
- * audio and video tracks together, it'll display a single permission prompt to
21715
- * the user instead of two separate ones.
21716
- * @param options
21717
- */
21718
- function createLocalTracks(options) {
21719
- return __awaiter(this, void 0, void 0, function* () {
21720
- var _a, _b;
21721
- // set default options to true
21722
- options !== null && options !== void 0 ? options : options = {};
21723
- (_a = options.audio) !== null && _a !== void 0 ? _a : options.audio = true;
21724
- (_b = options.video) !== null && _b !== void 0 ? _b : options.video = true;
21725
- const opts = mergeDefaultOptions(options, audioDefaults, videoDefaults);
21726
- const constraints = constraintsForOptions(opts);
21727
- // Keep a reference to the promise on DeviceManager and await it in getLocalDevices()
21728
- // works around iOS Safari Bug https://bugs.webkit.org/show_bug.cgi?id=179363
21729
- const mediaPromise = navigator.mediaDevices.getUserMedia(constraints);
21730
- if (options.audio) {
21731
- DeviceManager.userMediaPromiseMap.set('audioinput', mediaPromise);
21732
- mediaPromise.catch(() => DeviceManager.userMediaPromiseMap.delete('audioinput'));
21733
- }
21734
- if (options.video) {
21735
- DeviceManager.userMediaPromiseMap.set('videoinput', mediaPromise);
21736
- mediaPromise.catch(() => DeviceManager.userMediaPromiseMap.delete('videoinput'));
21737
- }
21738
- const stream = yield mediaPromise;
21739
- return Promise.all(stream.getTracks().map(mediaStreamTrack => __awaiter(this, void 0, void 0, function* () {
21740
- const isAudio = mediaStreamTrack.kind === 'audio';
21741
- let trackOptions = isAudio ? options.audio : options.video;
21742
- if (typeof trackOptions === 'boolean' || !trackOptions) {
21743
- trackOptions = {};
21744
- }
21745
- let trackConstraints;
21746
- const conOrBool = isAudio ? constraints.audio : constraints.video;
21747
- if (typeof conOrBool !== 'boolean') {
21748
- trackConstraints = conOrBool;
21749
- }
21750
- // update the constraints with the device id the user gave permissions to in the permission prompt
21751
- // otherwise each track restart (e.g. mute - unmute) will try to initialize the device again -> causing additional permission prompts
21752
- if (trackConstraints) {
21753
- trackConstraints.deviceId = mediaStreamTrack.getSettings().deviceId;
21754
- } else {
21755
- trackConstraints = {
21756
- deviceId: mediaStreamTrack.getSettings().deviceId
21757
- };
21758
- }
21759
- const track = mediaTrackToLocalTrack(mediaStreamTrack, trackConstraints);
21760
- if (track.kind === Track.Kind.Video) {
21761
- track.source = Track.Source.Camera;
21762
- } else if (track.kind === Track.Kind.Audio) {
21763
- track.source = Track.Source.Microphone;
21764
- }
21765
- track.mediaStream = stream;
21766
- if (trackOptions.processor) {
21767
- if (track instanceof LocalAudioTrack) {
21768
- yield track.setProcessor(trackOptions.processor);
21769
- } else if (track instanceof LocalVideoTrack) {
21770
- yield track.setProcessor(trackOptions.processor);
21771
- }
21772
- }
21773
- return track;
21774
- })));
21775
- });
21776
- }
21777
- /**
21778
- * Creates a [[LocalVideoTrack]] with getUserMedia()
21779
- * @param options
21780
- */
21781
- function createLocalVideoTrack(options) {
21782
- return __awaiter(this, void 0, void 0, function* () {
21783
- const tracks = yield createLocalTracks({
21784
- audio: false,
21785
- video: options
21786
- });
21787
- return tracks[0];
21788
- });
21789
- }
21790
- function createLocalAudioTrack(options) {
21791
- return __awaiter(this, void 0, void 0, function* () {
21792
- const tracks = yield createLocalTracks({
21793
- audio: options,
21794
- video: false
21795
- });
21796
- return tracks[0];
21797
- });
21798
- }
21799
- /**
21800
- * Creates a screen capture tracks with getDisplayMedia().
21801
- * A LocalVideoTrack is always created and returned.
21802
- * If { audio: true }, and the browser supports audio capture, a LocalAudioTrack is also created.
21803
- */
21804
- function createLocalScreenTracks(options) {
21805
- return __awaiter(this, void 0, void 0, function* () {
21806
- if (options === undefined) {
21807
- options = {};
21808
- }
21809
- if (options.resolution === undefined && !isSafari17()) {
21810
- options.resolution = ScreenSharePresets.h1080fps30.resolution;
21811
- }
21812
- if (navigator.mediaDevices.getDisplayMedia === undefined) {
21813
- throw new DeviceUnsupportedError('getDisplayMedia not supported');
21814
- }
21815
- const constraints = screenCaptureToDisplayMediaStreamOptions(options);
21816
- const stream = yield navigator.mediaDevices.getDisplayMedia(constraints);
21817
- const tracks = stream.getVideoTracks();
21818
- if (tracks.length === 0) {
21819
- throw new TrackInvalidError('no video track found');
21820
- }
21821
- const screenVideo = new LocalVideoTrack(tracks[0], undefined, false);
21822
- screenVideo.source = Track.Source.ScreenShare;
21823
- const localTracks = [screenVideo];
21824
- if (stream.getAudioTracks().length > 0) {
21825
- const screenAudio = new LocalAudioTrack(stream.getAudioTracks()[0], undefined, false);
21826
- screenAudio.source = Track.Source.ScreenShareAudio;
21827
- localTracks.push(screenAudio);
21828
- }
21829
- return localTracks;
21830
- });
21831
- }
21832
-
21833
22081
  class PublishAudioCheck extends Checker {
21834
22082
  get description() {
21835
22083
  return 'Can publish audio';
@@ -22238,5 +22486,5 @@ function isFacingModeValue(item) {
22238
22486
  return item === undefined || allowedValues.includes(item);
22239
22487
  }
22240
22488
 
22241
- export { AudioPresets, BaseKeyProvider, CheckStatus, Checker, ConnectionCheck, ConnectionError, ConnectionQuality, ConnectionState, CriticalTimers, CryptorError, CryptorErrorReason, CryptorEvent, DataPacket_Kind, DefaultReconnectPolicy, DeviceUnsupportedError, DisconnectReason, EncryptionEvent, EngineEvent, ExternalE2EEKeyProvider, KeyHandlerEvent, KeyProviderEvent, LivekitError, LocalAudioTrack, LocalParticipant, LocalTrack, LocalTrackPublication, LocalVideoTrack, LogLevel, LoggerNames, MediaDeviceFailure, Mutex, NegotiationError, Participant, ParticipantEvent, ParticipantInfo_Kind as ParticipantKind, PublishDataError, RemoteAudioTrack, RemoteParticipant, RemoteTrack, RemoteTrackPublication, RemoteVideoTrack, Room, RoomEvent, ScreenSharePresets, SignalRequestError, SubscriptionError, Track, TrackEvent, TrackInvalidError, TrackPublication, UnexpectedConnectionState, UnsupportedServer, VideoPreset, VideoPresets, VideoPresets43, VideoQuality, attachToElement, createAudioAnalyser, createE2EEKey, createKeyMaterialFromBuffer, createKeyMaterialFromString, createLocalAudioTrack, createLocalScreenTracks, createLocalTracks, createLocalVideoTrack, deriveKeys, detachTrack, facingModeFromDeviceLabel, facingModeFromLocalTrack, getBrowser, getEmptyAudioStreamTrack, getEmptyVideoStreamTrack, getLogger, importKey, isBackupCodec, isBrowserSupported, isE2EESupported, isInsertableStreamSupported, isScriptTransformSupported, isVideoFrame, needsRbspUnescaping, parseRbsp, protocolVersion, ratchet, setLogExtension, setLogLevel, supportsAV1, supportsAdaptiveStream, supportsDynacast, supportsVP9, version, videoCodecs, writeRbsp };
22489
+ export { AudioPresets, BaseKeyProvider, CheckStatus, Checker, ConnectionCheck, ConnectionError, ConnectionQuality, ConnectionState, CriticalTimers, CryptorError, CryptorErrorReason, CryptorEvent, DataPacket_Kind, DefaultReconnectPolicy, DeviceUnsupportedError, DisconnectReason, EncryptionEvent, EngineEvent, ExternalE2EEKeyProvider, KeyHandlerEvent, KeyProviderEvent, LivekitError, LocalAudioTrack, LocalParticipant, LocalTrack, LocalTrackPublication, LocalVideoTrack, LogLevel, LoggerNames, MediaDeviceFailure, Mutex, NegotiationError, Participant, ParticipantEvent, ParticipantInfo_Kind as ParticipantKind, PublishDataError, RemoteAudioTrack, RemoteParticipant, RemoteTrack, RemoteTrackPublication, RemoteVideoTrack, Room, RoomEvent, ScreenSharePresets, SignalRequestError, SubscriptionError, Track, TrackEvent, TrackInvalidError, TrackPublication, UnexpectedConnectionState, UnsupportedServer, VideoPreset, VideoPresets, VideoPresets43, VideoQuality, attachToElement, createAudioAnalyser, createE2EEKey, createKeyMaterialFromBuffer, createKeyMaterialFromString, createLocalAudioTrack, createLocalScreenTracks, createLocalTracks, createLocalVideoTrack, deriveKeys, detachTrack, extractProcessorsFromOptions, facingModeFromDeviceLabel, facingModeFromLocalTrack, getBrowser, getEmptyAudioStreamTrack, getEmptyVideoStreamTrack, getLogger, importKey, isBackupCodec, isBrowserSupported, isE2EESupported, isInsertableStreamSupported, isScriptTransformSupported, isVideoFrame, needsRbspUnescaping, parseRbsp, protocolVersion, ratchet, setLogExtension, setLogLevel, supportsAV1, supportsAdaptiveStream, supportsDynacast, supportsVP9, version, videoCodecs, writeRbsp };
22242
22490
  //# sourceMappingURL=livekit-client.esm.mjs.map