livekit-client 1.11.1 → 1.11.3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (47) hide show
  1. package/dist/livekit-client.esm.mjs +4157 -4015
  2. package/dist/livekit-client.esm.mjs.map +1 -1
  3. package/dist/livekit-client.umd.js +1 -1
  4. package/dist/livekit-client.umd.js.map +1 -1
  5. package/dist/src/room/PCTransport.d.ts +4 -3
  6. package/dist/src/room/PCTransport.d.ts.map +1 -1
  7. package/dist/src/room/Room.d.ts +5 -4
  8. package/dist/src/room/Room.d.ts.map +1 -1
  9. package/dist/src/room/events.d.ts +6 -1
  10. package/dist/src/room/events.d.ts.map +1 -1
  11. package/dist/src/room/participant/LocalParticipant.d.ts.map +1 -1
  12. package/dist/src/room/participant/publishUtils.d.ts.map +1 -1
  13. package/dist/src/room/track/LocalAudioTrack.d.ts +1 -1
  14. package/dist/src/room/track/LocalAudioTrack.d.ts.map +1 -1
  15. package/dist/src/room/track/LocalVideoTrack.d.ts +1 -1
  16. package/dist/src/room/track/LocalVideoTrack.d.ts.map +1 -1
  17. package/dist/src/room/track/RemoteVideoTrack.d.ts +3 -1
  18. package/dist/src/room/track/RemoteVideoTrack.d.ts.map +1 -1
  19. package/dist/src/room/track/options.d.ts +1 -1
  20. package/dist/src/room/track/utils.d.ts +39 -0
  21. package/dist/src/room/track/utils.d.ts.map +1 -1
  22. package/dist/src/room/utils.d.ts +1 -0
  23. package/dist/src/room/utils.d.ts.map +1 -1
  24. package/dist/ts4.2/src/room/PCTransport.d.ts +4 -3
  25. package/dist/ts4.2/src/room/Room.d.ts +5 -4
  26. package/dist/ts4.2/src/room/events.d.ts +6 -1
  27. package/dist/ts4.2/src/room/track/LocalAudioTrack.d.ts +1 -1
  28. package/dist/ts4.2/src/room/track/LocalVideoTrack.d.ts +1 -1
  29. package/dist/ts4.2/src/room/track/RemoteVideoTrack.d.ts +3 -1
  30. package/dist/ts4.2/src/room/track/options.d.ts +1 -1
  31. package/dist/ts4.2/src/room/track/utils.d.ts +39 -0
  32. package/dist/ts4.2/src/room/utils.d.ts +1 -0
  33. package/package.json +1 -1
  34. package/src/room/PCTransport.ts +116 -48
  35. package/src/room/Room.test.ts +29 -0
  36. package/src/room/Room.ts +55 -10
  37. package/src/room/events.ts +6 -0
  38. package/src/room/participant/LocalParticipant.ts +32 -8
  39. package/src/room/participant/publishUtils.ts +4 -2
  40. package/src/room/track/LocalAudioTrack.ts +4 -3
  41. package/src/room/track/LocalTrack.ts +2 -2
  42. package/src/room/track/LocalVideoTrack.ts +8 -4
  43. package/src/room/track/RemoteVideoTrack.ts +3 -8
  44. package/src/room/track/options.ts +1 -1
  45. package/src/room/track/utils.test.ts +30 -1
  46. package/src/room/track/utils.ts +102 -0
  47. package/src/room/utils.ts +23 -0
@@ -8,11 +8,20 @@ import { ddExtensionURI, isChromiumBased, isSVCCodec } from './utils';
8
8
 
9
9
  /** @internal */
10
10
  interface TrackBitrateInfo {
11
- sid: string;
11
+ cid?: string;
12
+ transceiver?: RTCRtpTransceiver;
12
13
  codec: string;
13
14
  maxbr: number;
14
15
  }
15
16
 
17
+ /* The svc codec (av1/vp9) would use a very low bitrate at the begining and
18
+ increase slowly by the bandwidth estimator until it reach the target bitrate. The
19
+ process commonly cost more than 10 seconds cause subscriber will get blur video at
20
+ the first few seconds. So we use a 70% of target bitrate here as the start bitrate to
21
+ eliminate this issue.
22
+ */
23
+ const startBitrateForSVC = 0.7;
24
+
16
25
  export const PCEvents = {
17
26
  NegotiationStarted: 'negotiationStarted',
18
27
  NegotiationComplete: 'negotiationComplete',
@@ -56,12 +65,65 @@ export default class PCTransport extends EventEmitter {
56
65
  }
57
66
 
58
67
  async setRemoteDescription(sd: RTCSessionDescriptionInit): Promise<void> {
68
+ let mungedSDP: string | undefined = undefined;
59
69
  if (sd.type === 'offer') {
60
70
  let { stereoMids, nackMids } = extractStereoAndNackAudioFromOffer(sd);
61
71
  this.remoteStereoMids = stereoMids;
62
72
  this.remoteNackMids = nackMids;
73
+ } else if (sd.type === 'answer') {
74
+ const sdpParsed = parse(sd.sdp ?? '');
75
+ sdpParsed.media.forEach((media) => {
76
+ if (media.type === 'audio') {
77
+ // mung sdp for opus bitrate settings
78
+ this.trackBitrates.some((trackbr): boolean => {
79
+ if (!trackbr.transceiver || media.mid != trackbr.transceiver.mid) {
80
+ return false;
81
+ }
82
+
83
+ let codecPayload = 0;
84
+ media.rtp.some((rtp): boolean => {
85
+ if (rtp.codec.toUpperCase() === trackbr.codec.toUpperCase()) {
86
+ codecPayload = rtp.payload;
87
+ return true;
88
+ }
89
+ return false;
90
+ });
91
+
92
+ if (codecPayload === 0) {
93
+ return true;
94
+ }
95
+
96
+ let fmtpFound = false;
97
+ for (const fmtp of media.fmtp) {
98
+ if (fmtp.payload === codecPayload) {
99
+ fmtp.config = fmtp.config
100
+ .split(';')
101
+ .filter((attr) => !attr.includes('maxaveragebitrate'))
102
+ .join(';');
103
+ if (trackbr.maxbr > 0) {
104
+ fmtp.config += `;maxaveragebitrate=${trackbr.maxbr * 1000}`;
105
+ }
106
+ fmtpFound = true;
107
+ break;
108
+ }
109
+ }
110
+
111
+ if (!fmtpFound) {
112
+ if (trackbr.maxbr > 0) {
113
+ media.fmtp.push({
114
+ payload: codecPayload,
115
+ config: `maxaveragebitrate=${trackbr.maxbr * 1000}`,
116
+ });
117
+ }
118
+ }
119
+
120
+ return true;
121
+ });
122
+ }
123
+ });
124
+ mungedSDP = write(sdpParsed);
63
125
  }
64
- await this.pc.setRemoteDescription(sd);
126
+ await this.setMungedSDP(sd, mungedSDP, true);
65
127
 
66
128
  this.pendingCandidates.forEach((candidate) => {
67
129
  this.pc.addIceCandidate(candidate);
@@ -130,7 +192,7 @@ export default class PCTransport extends EventEmitter {
130
192
  ensureVideoDDExtensionForSVC(media);
131
193
  // mung sdp for codec bitrate setting that can't apply by sendEncoding
132
194
  this.trackBitrates.some((trackbr): boolean => {
133
- if (!media.msid || !media.msid.includes(trackbr.sid)) {
195
+ if (!media.msid || !trackbr.cid || !media.msid.includes(trackbr.cid)) {
134
196
  return false;
135
197
  }
136
198
 
@@ -143,39 +205,39 @@ export default class PCTransport extends EventEmitter {
143
205
  return false;
144
206
  });
145
207
 
146
- // add x-google-max-bitrate to fmtp line if not exist
147
- if (codecPayload > 0) {
148
- if (
149
- !media.fmtp.some((fmtp): boolean => {
150
- if (fmtp.payload === codecPayload) {
151
- if (!fmtp.config.includes('x-google-start-bitrate')) {
152
- fmtp.config += `;x-google-start-bitrate=${trackbr.maxbr * 0.7}`;
153
- }
154
- if (!fmtp.config.includes('x-google-max-bitrate')) {
155
- fmtp.config += `;x-google-max-bitrate=${trackbr.maxbr}`;
156
- }
157
- return true;
158
- }
159
- return false;
160
- })
161
- ) {
162
- media.fmtp.push({
163
- payload: codecPayload,
164
- config: `x-google-start-bitrate=${trackbr.maxbr * 0.7};x-google-max-bitrate=${
165
- trackbr.maxbr
166
- }`,
167
- });
208
+ if (codecPayload === 0) {
209
+ return true;
210
+ }
211
+
212
+ let fmtpFound = false;
213
+ for (const fmtp of media.fmtp) {
214
+ if (fmtp.payload === codecPayload) {
215
+ if (!fmtp.config.includes('x-google-start-bitrate')) {
216
+ fmtp.config += `;x-google-start-bitrate=${trackbr.maxbr * startBitrateForSVC}`;
217
+ }
218
+ if (!fmtp.config.includes('x-google-max-bitrate')) {
219
+ fmtp.config += `;x-google-max-bitrate=${trackbr.maxbr}`;
220
+ }
221
+ fmtpFound = true;
222
+ break;
168
223
  }
169
224
  }
170
225
 
226
+ if (!fmtpFound) {
227
+ media.fmtp.push({
228
+ payload: codecPayload,
229
+ config: `x-google-start-bitrate=${
230
+ trackbr.maxbr * startBitrateForSVC
231
+ };x-google-max-bitrate=${trackbr.maxbr}`,
232
+ });
233
+ }
234
+
171
235
  return true;
172
236
  });
173
237
  }
174
238
  });
175
239
 
176
- this.trackBitrates = [];
177
-
178
- await this.setMungedLocalDescription(offer, write(sdpParsed));
240
+ await this.setMungedSDP(offer, write(sdpParsed));
179
241
  this.onOffer(offer);
180
242
  }
181
243
 
@@ -187,16 +249,12 @@ export default class PCTransport extends EventEmitter {
187
249
  ensureAudioNackAndStereo(media, this.remoteStereoMids, this.remoteNackMids);
188
250
  }
189
251
  });
190
- await this.setMungedLocalDescription(answer, write(sdpParsed));
252
+ await this.setMungedSDP(answer, write(sdpParsed));
191
253
  return answer;
192
254
  }
193
255
 
194
- setTrackCodecBitrate(sid: string, codec: string, maxbr: number) {
195
- this.trackBitrates.push({
196
- sid,
197
- codec,
198
- maxbr,
199
- });
256
+ setTrackCodecBitrate(info: TrackBitrateInfo) {
257
+ this.trackBitrates.push(info);
200
258
  }
201
259
 
202
260
  close() {
@@ -205,22 +263,32 @@ export default class PCTransport extends EventEmitter {
205
263
  this.pc.close();
206
264
  }
207
265
 
208
- private async setMungedLocalDescription(sd: RTCSessionDescriptionInit, munged: string) {
209
- const originalSdp = sd.sdp;
210
- sd.sdp = munged;
211
- try {
212
- log.debug('setting munged local description');
213
- await this.pc.setLocalDescription(sd);
214
- return;
215
- } catch (e) {
216
- log.warn(`not able to set ${sd.type}, falling back to unmodified sdp`, {
217
- error: e,
218
- });
219
- sd.sdp = originalSdp;
266
+ private async setMungedSDP(sd: RTCSessionDescriptionInit, munged?: string, remote?: boolean) {
267
+ if (munged) {
268
+ const originalSdp = sd.sdp;
269
+ sd.sdp = munged;
270
+ try {
271
+ log.debug(`setting munged ${remote ? 'remote' : 'local'} description`);
272
+ if (remote) {
273
+ await this.pc.setRemoteDescription(sd);
274
+ } else {
275
+ await this.pc.setLocalDescription(sd);
276
+ }
277
+ return;
278
+ } catch (e) {
279
+ log.warn(`not able to set ${sd.type}, falling back to unmodified sdp`, {
280
+ error: e,
281
+ });
282
+ sd.sdp = originalSdp;
283
+ }
220
284
  }
221
285
 
222
286
  try {
223
- await this.pc.setLocalDescription(sd);
287
+ if (remote) {
288
+ await this.pc.setRemoteDescription(sd);
289
+ } else {
290
+ await this.pc.setLocalDescription(sd);
291
+ }
224
292
  } catch (e) {
225
293
  // this error cannot always be caught.
226
294
  // If the local description has a setCodecPreferences error, this error will be uncaught
@@ -0,0 +1,29 @@
1
+ import Room from './Room';
2
+ import { RoomEvent } from './events';
3
+
4
+ describe('Active device switch', () => {
5
+ it('updates devices correctly', async () => {
6
+ const room = new Room();
7
+ await room.switchActiveDevice('audioinput', 'test');
8
+ expect(room.getActiveDevice('audioinput')).toBe('test');
9
+ });
10
+ it('updates devices with exact constraint', async () => {
11
+ const room = new Room();
12
+ await room.switchActiveDevice('audioinput', 'test', true);
13
+ expect(room.getActiveDevice('audioinput')).toBe('test');
14
+ });
15
+ it('emits changed event', async () => {
16
+ const room = new Room();
17
+ let kind: MediaDeviceKind | undefined;
18
+ let deviceId: string | undefined;
19
+ const deviceChangeHandler = (_kind: MediaDeviceKind, _deviceId: string) => {
20
+ kind = _kind;
21
+ deviceId = _deviceId;
22
+ };
23
+ room.on(RoomEvent.ActiveDeviceChanged, deviceChangeHandler);
24
+ await room.switchActiveDevice('audioinput', 'test', true);
25
+
26
+ expect(deviceId).toBe('test');
27
+ expect(kind).toBe('audioinput');
28
+ });
29
+ });
package/src/room/Room.ts CHANGED
@@ -68,6 +68,7 @@ import {
68
68
  isWeb,
69
69
  supportsSetSinkId,
70
70
  unpackStreamId,
71
+ unwrapConstraint,
71
72
  } from './utils';
72
73
 
73
74
  export enum ConnectionState {
@@ -134,6 +135,8 @@ class Room extends EventEmitter<RoomEventCallbacks> {
134
135
 
135
136
  private connectionReconcileInterval?: ReturnType<typeof setInterval>;
136
137
 
138
+ private activeDeviceMap: Map<MediaDeviceKind, string>;
139
+
137
140
  /**
138
141
  * Creates a new Room, the primary construct for a LiveKit session.
139
142
  * @param options
@@ -161,6 +164,22 @@ class Room extends EventEmitter<RoomEventCallbacks> {
161
164
  this.maybeCreateEngine();
162
165
 
163
166
  this.disconnectLock = new Mutex();
167
+ this.activeDeviceMap = new Map();
168
+ if (this.options.videoCaptureDefaults.deviceId) {
169
+ this.activeDeviceMap.set(
170
+ 'videoinput',
171
+ unwrapConstraint(this.options.videoCaptureDefaults.deviceId),
172
+ );
173
+ }
174
+ if (this.options.audioCaptureDefaults.deviceId) {
175
+ this.activeDeviceMap.set(
176
+ 'audioinput',
177
+ unwrapConstraint(this.options.audioCaptureDefaults.deviceId),
178
+ );
179
+ }
180
+ if (this.options.audioOutput?.deviceId) {
181
+ this.switchActiveDevice('audiooutput', unwrapConstraint(this.options.audioOutput.deviceId));
182
+ }
164
183
 
165
184
  this.localParticipant = new LocalParticipant('', '', this.engine, this.options);
166
185
  }
@@ -717,15 +736,17 @@ class Room extends EventEmitter<RoomEventCallbacks> {
717
736
 
718
737
  /**
719
738
  * Returns the active audio output device used in this room.
720
- *
721
- * Note: to get the active `audioinput` or `videoinput` use [[LocalTrack.getDeviceId()]]
722
- *
723
739
  * @return the previously successfully set audio output device ID or an empty string if the default device is used.
740
+ * @deprecated use `getActiveDevice('audiooutput')` instead
724
741
  */
725
742
  getActiveAudioOutputDevice(): string {
726
743
  return this.options.audioOutput?.deviceId ?? '';
727
744
  }
728
745
 
746
+ getActiveDevice(kind: MediaDeviceKind): string | undefined {
747
+ return this.activeDeviceMap.get(kind);
748
+ }
749
+
729
750
  /**
730
751
  * Switches all active devices used in this room to the given device.
731
752
  *
@@ -737,15 +758,20 @@ class Room extends EventEmitter<RoomEventCallbacks> {
737
758
  * @param deviceId
738
759
  */
739
760
  async switchActiveDevice(kind: MediaDeviceKind, deviceId: string, exact: boolean = false) {
761
+ let deviceHasChanged = false;
762
+ let success = true;
740
763
  const deviceConstraint = exact ? { exact: deviceId } : deviceId;
741
764
  if (kind === 'audioinput') {
742
765
  const prevDeviceId = this.options.audioCaptureDefaults!.deviceId;
743
766
  this.options.audioCaptureDefaults!.deviceId = deviceConstraint;
767
+ deviceHasChanged = prevDeviceId !== deviceConstraint;
744
768
  const tracks = Array.from(this.localParticipant.audioTracks.values()).filter(
745
769
  (track) => track.source === Track.Source.Microphone,
746
770
  );
747
771
  try {
748
- await Promise.all(tracks.map((t) => t.audioTrack?.setDeviceId(deviceConstraint)));
772
+ success = (
773
+ await Promise.all(tracks.map((t) => t.audioTrack?.setDeviceId(deviceConstraint)))
774
+ ).every((val) => val === true);
749
775
  } catch (e) {
750
776
  this.options.audioCaptureDefaults!.deviceId = prevDeviceId;
751
777
  throw e;
@@ -753,32 +779,50 @@ class Room extends EventEmitter<RoomEventCallbacks> {
753
779
  } else if (kind === 'videoinput') {
754
780
  const prevDeviceId = this.options.videoCaptureDefaults!.deviceId;
755
781
  this.options.videoCaptureDefaults!.deviceId = deviceConstraint;
782
+ deviceHasChanged = prevDeviceId !== deviceConstraint;
756
783
  const tracks = Array.from(this.localParticipant.videoTracks.values()).filter(
757
784
  (track) => track.source === Track.Source.Camera,
758
785
  );
759
786
  try {
760
- await Promise.all(tracks.map((t) => t.videoTrack?.setDeviceId(deviceConstraint)));
787
+ success = (
788
+ await Promise.all(tracks.map((t) => t.videoTrack?.setDeviceId(deviceConstraint)))
789
+ ).every((val) => val === true);
761
790
  } catch (e) {
762
791
  this.options.videoCaptureDefaults!.deviceId = prevDeviceId;
763
792
  throw e;
764
793
  }
765
794
  } else if (kind === 'audiooutput') {
766
- // TODO add support for webaudio mix once the API becomes available https://github.com/WebAudio/web-audio-api/pull/2498
767
- if (!supportsSetSinkId()) {
795
+ if (
796
+ (!supportsSetSinkId() && !this.options.expWebAudioMix) ||
797
+ (this.audioContext && !('setSinkId' in this.audioContext))
798
+ ) {
768
799
  throw new Error('cannot switch audio output, setSinkId not supported');
769
800
  }
770
801
  this.options.audioOutput ??= {};
771
802
  const prevDeviceId = this.options.audioOutput.deviceId;
772
803
  this.options.audioOutput.deviceId = deviceId;
804
+ deviceHasChanged = prevDeviceId !== deviceConstraint;
805
+
773
806
  try {
774
- await Promise.all(
775
- Array.from(this.participants.values()).map((p) => p.setAudioOutput({ deviceId })),
776
- );
807
+ if (this.options.expWebAudioMix) {
808
+ // @ts-expect-error setSinkId is not yet in the typescript type of AudioContext
809
+ this.audioContext?.setSinkId(deviceId);
810
+ } else {
811
+ await Promise.all(
812
+ Array.from(this.participants.values()).map((p) => p.setAudioOutput({ deviceId })),
813
+ );
814
+ }
777
815
  } catch (e) {
778
816
  this.options.audioOutput.deviceId = prevDeviceId;
779
817
  throw e;
780
818
  }
781
819
  }
820
+ if (deviceHasChanged && success) {
821
+ this.activeDeviceMap.set(kind, deviceId);
822
+ this.emit(RoomEvent.ActiveDeviceChanged, kind, deviceId);
823
+ }
824
+
825
+ return success;
782
826
  }
783
827
 
784
828
  private setupLocalParticipantEvents() {
@@ -1717,4 +1761,5 @@ export type RoomEventCallbacks = {
1717
1761
  signalConnected: () => void;
1718
1762
  recordingStatusChanged: (recording: boolean) => void;
1719
1763
  dcBufferStatusChanged: (isLow: boolean, kind: DataPacket_Kind) => void;
1764
+ activeDeviceChanged: (kind: MediaDeviceKind, deviceId: string) => void;
1720
1765
  };
@@ -278,6 +278,12 @@ export enum RoomEvent {
278
278
  * args: (isLow: boolean, kind: [[DataPacket_Kind]])
279
279
  */
280
280
  DCBufferStatusChanged = 'dcBufferStatusChanged',
281
+
282
+ /**
283
+ * Triggered by a call to room.switchActiveDevice
284
+ * args: (kind: MediaDeviceKind, deviceId: string)
285
+ */
286
+ ActiveDeviceChanged = 'activeDeviceChanged',
281
287
  }
282
288
 
283
289
  export enum ParticipantEvent {
@@ -494,12 +494,12 @@ export default class LocalParticipant extends Participant {
494
494
  return existingPublication;
495
495
  }
496
496
 
497
- const isStereo =
498
- options?.forceStereo ||
497
+ const isStereoInput =
499
498
  ('channelCount' in track.mediaStreamTrack.getSettings() &&
500
499
  // @ts-ignore `channelCount` on getSettings() is currently only available for Safari, but is generally the best way to determine a stereo track https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackSettings/channelCount
501
500
  track.mediaStreamTrack.getSettings().channelCount === 2) ||
502
501
  track.mediaStreamTrack.getConstraints().channelCount === 2;
502
+ const isStereo = options?.forceStereo ?? isStereoInput;
503
503
 
504
504
  // disable dtx for stereo track if not enabled explicitly
505
505
  if (isStereo) {
@@ -730,12 +730,36 @@ export default class LocalParticipant extends Participant {
730
730
  // store RTPSender
731
731
  track.sender = await this.engine.createSender(track, opts, encodings);
732
732
 
733
- if (track.codec && isSVCCodec(track.codec) && encodings && encodings[0]?.maxBitrate) {
734
- this.engine.publisher.setTrackCodecBitrate(
735
- req.cid,
736
- track.codec,
737
- encodings[0].maxBitrate / 1000,
738
- );
733
+ if (encodings) {
734
+ if (isFireFox() && track.kind === Track.Kind.Audio) {
735
+ /* Refer to RFC https://datatracker.ietf.org/doc/html/rfc7587#section-6.1,
736
+ livekit-server uses maxaveragebitrate=510000in the answer sdp to permit client to
737
+ publish high quality audio track. But firefox always uses this value as the actual
738
+ bitrates, causing the audio bitrates to rise to 510Kbps in any stereo case unexpectedly.
739
+ So the client need to modify maxaverragebitrates in answer sdp to user provided value to
740
+ fix the issue.
741
+ */
742
+ let trackTransceiver: RTCRtpTransceiver | undefined = undefined;
743
+ for (const transceiver of this.engine.publisher.pc.getTransceivers()) {
744
+ if (transceiver.sender === track.sender) {
745
+ trackTransceiver = transceiver;
746
+ break;
747
+ }
748
+ }
749
+ if (trackTransceiver) {
750
+ this.engine.publisher.setTrackCodecBitrate({
751
+ transceiver: trackTransceiver,
752
+ codec: 'opus',
753
+ maxbr: encodings[0]?.maxBitrate ? encodings[0].maxBitrate / 1000 : 0,
754
+ });
755
+ }
756
+ } else if (track.codec && isSVCCodec(track.codec) && encodings[0]?.maxBitrate) {
757
+ this.engine.publisher.setTrackCodecBitrate({
758
+ cid: req.cid,
759
+ codec: track.codec,
760
+ maxbr: encodings[0].maxBitrate / 1000,
761
+ });
762
+ }
739
763
  }
740
764
 
741
765
  this.engine.negotiate();
@@ -10,7 +10,7 @@ import type {
10
10
  VideoCodec,
11
11
  VideoEncoding,
12
12
  } from '../track/options';
13
- import { getReactNativeOs, isReactNative, isSVCCodec } from '../utils';
13
+ import { getReactNativeOs, isFireFox, isReactNative, isSVCCodec } from '../utils';
14
14
 
15
15
  /** @internal */
16
16
  export function mediaTrackToLocalTrack(
@@ -118,6 +118,7 @@ export function computeVideoEncodings(
118
118
  height,
119
119
  videoEncoding.maxBitrate,
120
120
  videoEncoding.maxFramerate,
121
+ videoEncoding.priority,
121
122
  );
122
123
 
123
124
  if (scalabilityMode && isSVCCodec(videoCodec)) {
@@ -311,7 +312,8 @@ function encodingsFromPresets(
311
312
  if (preset.encoding.maxFramerate) {
312
313
  encoding.maxFramerate = preset.encoding.maxFramerate;
313
314
  }
314
- if (preset.encoding.priority) {
315
+ const canSetPriority = isFireFox() || idx === 0;
316
+ if (preset.encoding.priority && canSetPriority) {
315
317
  encoding.priority = preset.encoding.priority;
316
318
  encoding.networkPriority = preset.encoding.priority;
317
319
  }
@@ -2,7 +2,7 @@ import log from '../../logger';
2
2
  import { TrackEvent } from '../events';
3
3
  import { computeBitrate, monitorFrequency } from '../stats';
4
4
  import type { AudioSenderStats } from '../stats';
5
- import { isWeb } from '../utils';
5
+ import { isWeb, unwrapConstraint } from '../utils';
6
6
  import LocalTrack from './LocalTrack';
7
7
  import { Track } from './Track';
8
8
  import type { AudioCaptureOptions } from './options';
@@ -29,14 +29,15 @@ export default class LocalAudioTrack extends LocalTrack {
29
29
  this.checkForSilence();
30
30
  }
31
31
 
32
- async setDeviceId(deviceId: ConstrainDOMString) {
32
+ async setDeviceId(deviceId: ConstrainDOMString): Promise<boolean> {
33
33
  if (this.constraints.deviceId === deviceId) {
34
- return;
34
+ return true;
35
35
  }
36
36
  this.constraints.deviceId = deviceId;
37
37
  if (!this.isMuted) {
38
38
  await this.restartTrack();
39
39
  }
40
+ return unwrapConstraint(deviceId) === this.mediaStreamTrack.getSettings().deviceId;
40
41
  }
41
42
 
42
43
  async mute(): Promise<LocalAudioTrack> {
@@ -182,7 +182,7 @@ export default abstract class LocalTrack extends Track {
182
182
  }
183
183
 
184
184
  log.debug('replace MediaStreamTrack');
185
- this.setMediaStreamTrack(track);
185
+ await this.setMediaStreamTrack(track);
186
186
  // this must be synced *after* setting mediaStreamTrack above, since it relies
187
187
  // on the previous state in order to cleanup
188
188
  this.providedByUser = userProvidedTrack;
@@ -227,7 +227,7 @@ export default abstract class LocalTrack extends Track {
227
227
  newTrack.addEventListener('ended', this.handleEnded);
228
228
  log.debug('re-acquired MediaStreamTrack');
229
229
 
230
- this.setMediaStreamTrack(newTrack);
230
+ await this.setMediaStreamTrack(newTrack);
231
231
  this.constraints = constraints;
232
232
  if (this.processor) {
233
233
  const processor = this.processor;
@@ -5,7 +5,7 @@ import type { SubscribedCodec, SubscribedQuality } from '../../proto/livekit_rtc
5
5
  import { ScalabilityMode } from '../participant/publishUtils';
6
6
  import { computeBitrate, monitorFrequency } from '../stats';
7
7
  import type { VideoSenderStats } from '../stats';
8
- import { Mutex, isFireFox, isMobile, isWeb } from '../utils';
8
+ import { Mutex, isFireFox, isMobile, isWeb, unwrapConstraint } from '../utils';
9
9
  import LocalTrack from './LocalTrack';
10
10
  import { Track } from './Track';
11
11
  import type { VideoCaptureOptions, VideoCodec } from './options';
@@ -182,9 +182,12 @@ export default class LocalVideoTrack extends LocalTrack {
182
182
  this.setPublishingLayers(qualities);
183
183
  }
184
184
 
185
- async setDeviceId(deviceId: ConstrainDOMString) {
186
- if (this.constraints.deviceId === deviceId) {
187
- return;
185
+ async setDeviceId(deviceId: ConstrainDOMString): Promise<boolean> {
186
+ if (
187
+ this.constraints.deviceId === deviceId &&
188
+ this._mediaStreamTrack.getSettings().deviceId === unwrapConstraint(deviceId)
189
+ ) {
190
+ return true;
188
191
  }
189
192
  this.constraints.deviceId = deviceId;
190
193
  // when video is muted, underlying media stream track is stopped and
@@ -192,6 +195,7 @@ export default class LocalVideoTrack extends LocalTrack {
192
195
  if (!this.isMuted) {
193
196
  await this.restartTrack();
194
197
  }
198
+ return unwrapConstraint(deviceId) === this._mediaStreamTrack.getSettings().deviceId;
195
199
  }
196
200
 
197
201
  async restartTrack(options?: VideoCaptureOptions) {
@@ -23,8 +23,6 @@ export default class RemoteVideoTrack extends RemoteTrack {
23
23
 
24
24
  private lastDimensions?: Track.Dimensions;
25
25
 
26
- private isObserved: boolean = false;
27
-
28
26
  constructor(
29
27
  mediaTrack: MediaStreamTrack,
30
28
  sid: string,
@@ -39,12 +37,10 @@ export default class RemoteVideoTrack extends RemoteTrack {
39
37
  return this.adaptiveStreamSettings !== undefined;
40
38
  }
41
39
 
40
+ /**
41
+ * Note: When using adaptiveStream, you need to use remoteVideoTrack.attach() to add the track to a HTMLVideoElement, otherwise your video tracks might never start
42
+ */
42
43
  get mediaStreamTrack() {
43
- if (this.isAdaptiveStream && !this.isObserved) {
44
- log.warn(
45
- 'When using adaptiveStream, you need to use remoteVideoTrack.attach() to add the track to a HTMLVideoElement, otherwise your video tracks might never start',
46
- );
47
- }
48
44
  return this._mediaStreamTrack;
49
45
  }
50
46
 
@@ -106,7 +102,6 @@ export default class RemoteVideoTrack extends RemoteTrack {
106
102
  // the tab comes into focus for the first time.
107
103
  this.debouncedHandleResize();
108
104
  this.updateVisibility();
109
- this.isObserved = true;
110
105
  } else {
111
106
  log.warn('visibility resize observer not triggered');
112
107
  }
@@ -45,7 +45,7 @@ export interface TrackPublishDefaults {
45
45
  red?: boolean;
46
46
 
47
47
  /**
48
- * stereo audio track. defaults determined by capture channel count.
48
+ * publish track in stereo mode (or set to false to disable). defaults determined by capture channel count.
49
49
  */
50
50
  forceStereo?: boolean;
51
51
 
@@ -1,5 +1,5 @@
1
1
  import { AudioCaptureOptions, VideoCaptureOptions, VideoPresets } from './options';
2
- import { constraintsForOptions, mergeDefaultOptions } from './utils';
2
+ import { constraintsForOptions, facingModeFromDeviceLabel, mergeDefaultOptions } from './utils';
3
3
 
4
4
  describe('mergeDefaultOptions', () => {
5
5
  const audioDefaults: AudioCaptureOptions = {
@@ -108,3 +108,32 @@ describe('constraintsForOptions', () => {
108
108
  expect(videoOpts.aspectRatio).toEqual(VideoPresets.h720.resolution.aspectRatio);
109
109
  });
110
110
  });
111
+
112
+ describe('Test facingMode detection', () => {
113
+ test('OBS virtual camera should be detected.', () => {
114
+ const result = facingModeFromDeviceLabel('OBS Virtual Camera');
115
+ expect(result?.facingMode).toEqual('environment');
116
+ expect(result?.confidence).toEqual('medium');
117
+ });
118
+
119
+ test.each([
120
+ ['Peter’s iPhone Camera', { facingMode: 'environment', confidence: 'medium' }],
121
+ ['iPhone de Théo Camera', { facingMode: 'environment', confidence: 'medium' }],
122
+ ])(
123
+ 'Device labels that contain "iphone" should return facingMode "environment".',
124
+ (label, expected) => {
125
+ const result = facingModeFromDeviceLabel(label);
126
+ expect(result?.facingMode).toEqual(expected.facingMode);
127
+ expect(result?.confidence).toEqual(expected.confidence);
128
+ },
129
+ );
130
+
131
+ test.each([
132
+ ['Peter’s iPad Camera', { facingMode: 'environment', confidence: 'medium' }],
133
+ ['iPad de Théo Camera', { facingMode: 'environment', confidence: 'medium' }],
134
+ ])('Device label that contain "ipad" should detect.', (label, expected) => {
135
+ const result = facingModeFromDeviceLabel(label);
136
+ expect(result?.facingMode).toEqual(expected.facingMode);
137
+ expect(result?.confidence).toEqual(expected.confidence);
138
+ });
139
+ });