livekit-client 2.1.5 → 2.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/README.md +2 -6
  2. package/dist/livekit-client.esm.mjs +175 -77
  3. package/dist/livekit-client.esm.mjs.map +1 -1
  4. package/dist/livekit-client.umd.js +1 -1
  5. package/dist/livekit-client.umd.js.map +1 -1
  6. package/dist/src/index.d.ts +2 -2
  7. package/dist/src/index.d.ts.map +1 -1
  8. package/dist/src/room/DeviceManager.d.ts.map +1 -1
  9. package/dist/src/room/RTCEngine.d.ts +2 -2
  10. package/dist/src/room/RTCEngine.d.ts.map +1 -1
  11. package/dist/src/room/Room.d.ts +7 -2
  12. package/dist/src/room/Room.d.ts.map +1 -1
  13. package/dist/src/room/events.d.ts +18 -1
  14. package/dist/src/room/events.d.ts.map +1 -1
  15. package/dist/src/room/participant/LocalParticipant.d.ts.map +1 -1
  16. package/dist/src/room/participant/Participant.d.ts +6 -3
  17. package/dist/src/room/participant/Participant.d.ts.map +1 -1
  18. package/dist/src/room/participant/RemoteParticipant.d.ts +3 -3
  19. package/dist/src/room/participant/RemoteParticipant.d.ts.map +1 -1
  20. package/dist/src/room/participant/publishUtils.d.ts.map +1 -1
  21. package/dist/src/room/track/LocalTrack.d.ts +1 -1
  22. package/dist/src/room/track/LocalTrack.d.ts.map +1 -1
  23. package/dist/src/room/track/LocalVideoTrack.d.ts +1 -1
  24. package/dist/src/room/track/LocalVideoTrack.d.ts.map +1 -1
  25. package/dist/src/room/track/create.d.ts.map +1 -1
  26. package/dist/src/room/track/options.d.ts +9 -0
  27. package/dist/src/room/track/options.d.ts.map +1 -1
  28. package/dist/ts4.2/src/index.d.ts +2 -2
  29. package/dist/ts4.2/src/room/RTCEngine.d.ts +2 -2
  30. package/dist/ts4.2/src/room/Room.d.ts +7 -2
  31. package/dist/ts4.2/src/room/events.d.ts +18 -1
  32. package/dist/ts4.2/src/room/participant/Participant.d.ts +7 -3
  33. package/dist/ts4.2/src/room/participant/RemoteParticipant.d.ts +3 -3
  34. package/dist/ts4.2/src/room/track/LocalTrack.d.ts +1 -1
  35. package/dist/ts4.2/src/room/track/LocalVideoTrack.d.ts +1 -1
  36. package/dist/ts4.2/src/room/track/options.d.ts +9 -0
  37. package/package.json +9 -9
  38. package/src/index.ts +2 -1
  39. package/src/room/DeviceManager.test.ts +105 -0
  40. package/src/room/DeviceManager.ts +11 -6
  41. package/src/room/RTCEngine.ts +23 -6
  42. package/src/room/Room.ts +48 -11
  43. package/src/room/defaults.ts +1 -1
  44. package/src/room/events.ts +21 -1
  45. package/src/room/participant/LocalParticipant.ts +36 -25
  46. package/src/room/participant/Participant.ts +14 -1
  47. package/src/room/participant/RemoteParticipant.ts +17 -4
  48. package/src/room/participant/publishUtils.ts +4 -0
  49. package/src/room/track/LocalTrack.ts +14 -10
  50. package/src/room/track/LocalVideoTrack.ts +4 -1
  51. package/src/room/track/create.ts +37 -27
  52. package/src/room/track/options.ts +15 -0
@@ -30,6 +30,7 @@ import type {
30
30
  VideoCaptureOptions,
31
31
  } from '../track/options';
32
32
  import { ScreenSharePresets, VideoPresets, isBackupCodec } from '../track/options';
33
+ import type { TrackProcessor } from '../track/processor/types';
33
34
  import {
34
35
  constraintsForOptions,
35
36
  getLogContextFromTrack,
@@ -394,13 +395,13 @@ export default class LocalParticipant extends Participant {
394
395
  * @returns
395
396
  */
396
397
  async createTracks(options?: CreateLocalTracksOptions): Promise<LocalTrack[]> {
397
- const opts = mergeDefaultOptions(
398
+ const mergedOptions = mergeDefaultOptions(
398
399
  options,
399
400
  this.roomOptions?.audioCaptureDefaults,
400
401
  this.roomOptions?.videoCaptureDefaults,
401
402
  );
402
403
 
403
- const constraints = constraintsForOptions(opts);
404
+ const constraints = constraintsForOptions(mergedOptions);
404
405
  let stream: MediaStream | undefined;
405
406
  try {
406
407
  stream = await navigator.mediaDevices.getUserMedia(constraints);
@@ -425,29 +426,39 @@ export default class LocalParticipant extends Participant {
425
426
  this.cameraError = undefined;
426
427
  }
427
428
 
428
- return stream.getTracks().map((mediaStreamTrack) => {
429
- const isAudio = mediaStreamTrack.kind === 'audio';
430
- let trackOptions = isAudio ? options!.audio : options!.video;
431
- if (typeof trackOptions === 'boolean' || !trackOptions) {
432
- trackOptions = {};
433
- }
434
- let trackConstraints: MediaTrackConstraints | undefined;
435
- const conOrBool = isAudio ? constraints.audio : constraints.video;
436
- if (typeof conOrBool !== 'boolean') {
437
- trackConstraints = conOrBool;
438
- }
439
- const track = mediaTrackToLocalTrack(mediaStreamTrack, trackConstraints, {
440
- loggerName: this.roomOptions.loggerName,
441
- loggerContextCb: () => this.logContext,
442
- });
443
- if (track.kind === Track.Kind.Video) {
444
- track.source = Track.Source.Camera;
445
- } else if (track.kind === Track.Kind.Audio) {
446
- track.source = Track.Source.Microphone;
447
- }
448
- track.mediaStream = stream;
449
- return track;
450
- });
429
+ return Promise.all(
430
+ stream.getTracks().map(async (mediaStreamTrack) => {
431
+ const isAudio = mediaStreamTrack.kind === 'audio';
432
+ let trackOptions = isAudio ? mergedOptions!.audio : mergedOptions!.video;
433
+ if (typeof trackOptions === 'boolean' || !trackOptions) {
434
+ trackOptions = {};
435
+ }
436
+ let trackConstraints: MediaTrackConstraints | undefined;
437
+ const conOrBool = isAudio ? constraints.audio : constraints.video;
438
+ if (typeof conOrBool !== 'boolean') {
439
+ trackConstraints = conOrBool;
440
+ }
441
+ const track = mediaTrackToLocalTrack(mediaStreamTrack, trackConstraints, {
442
+ loggerName: this.roomOptions.loggerName,
443
+ loggerContextCb: () => this.logContext,
444
+ });
445
+ if (track.kind === Track.Kind.Video) {
446
+ track.source = Track.Source.Camera;
447
+ } else if (track.kind === Track.Kind.Audio) {
448
+ track.source = Track.Source.Microphone;
449
+ track.setAudioContext(this.audioContext);
450
+ }
451
+ track.mediaStream = stream;
452
+ if (trackOptions.processor) {
453
+ if (track instanceof LocalAudioTrack) {
454
+ await track.setProcessor(trackOptions.processor as TrackProcessor<Track.Kind.Audio>);
455
+ } else {
456
+ await track.setProcessor(trackOptions.processor as TrackProcessor<Track.Kind.Video>);
457
+ }
458
+ }
459
+ return track;
460
+ }),
461
+ );
451
462
  }
452
463
 
453
464
  /**
@@ -1,8 +1,10 @@
1
1
  import {
2
2
  DataPacket_Kind,
3
3
  ParticipantInfo,
4
+ ParticipantInfo_Kind as ParticipantKind,
4
5
  ParticipantPermission,
5
6
  ConnectionQuality as ProtoQuality,
7
+ type SipDTMF,
6
8
  SubscriptionError,
7
9
  } from '@livekit/protocol';
8
10
  import { EventEmitter } from 'events';
@@ -45,6 +47,8 @@ function qualityFromProto(q: ProtoQuality): ConnectionQuality {
45
47
  }
46
48
  }
47
49
 
50
+ export { ParticipantKind };
51
+
48
52
  export default class Participant extends (EventEmitter as new () => TypedEmitter<ParticipantEventCallbacks>) {
49
53
  protected participantInfo?: ParticipantInfo;
50
54
 
@@ -77,6 +81,8 @@ export default class Participant extends (EventEmitter as new () => TypedEmitter
77
81
 
78
82
  permissions?: ParticipantPermission;
79
83
 
84
+ protected _kind: ParticipantKind;
85
+
80
86
  private _connectionQuality: ConnectionQuality = ConnectionQuality.Unknown;
81
87
 
82
88
  protected audioContext?: AudioContext;
@@ -99,7 +105,11 @@ export default class Participant extends (EventEmitter as new () => TypedEmitter
99
105
  }
100
106
 
101
107
  get isAgent() {
102
- return this.permissions?.agent ?? false;
108
+ return this.permissions?.agent || this.kind === ParticipantKind.AGENT;
109
+ }
110
+
111
+ get kind() {
112
+ return this._kind;
103
113
  }
104
114
 
105
115
  /** @internal */
@@ -109,6 +119,7 @@ export default class Participant extends (EventEmitter as new () => TypedEmitter
109
119
  name?: string,
110
120
  metadata?: string,
111
121
  loggerOptions?: LoggerOptions,
122
+ kind: ParticipantKind = ParticipantKind.STANDARD,
112
123
  ) {
113
124
  super();
114
125
 
@@ -123,6 +134,7 @@ export default class Participant extends (EventEmitter as new () => TypedEmitter
123
134
  this.audioTrackPublications = new Map();
124
135
  this.videoTrackPublications = new Map();
125
136
  this.trackPublications = new Map();
137
+ this._kind = kind;
126
138
  }
127
139
 
128
140
  getTrackPublications(): TrackPublication[] {
@@ -329,6 +341,7 @@ export type ParticipantEventCallbacks = {
329
341
  participantMetadataChanged: (prevMetadata: string | undefined, participant?: any) => void;
330
342
  participantNameChanged: (name: string) => void;
331
343
  dataReceived: (payload: Uint8Array, kind: DataPacket_Kind) => void;
344
+ sipDTMFReceived: (dtmf: SipDTMF) => void;
332
345
  transcriptionReceived: (
333
346
  transcription: TranscriptionSegment[],
334
347
  publication?: TrackPublication,
@@ -16,7 +16,7 @@ import type { AudioOutputOptions } from '../track/options';
16
16
  import type { AdaptiveStreamSettings } from '../track/types';
17
17
  import { getLogContextFromTrack } from '../track/utils';
18
18
  import type { LoggerOptions } from '../types';
19
- import Participant from './Participant';
19
+ import Participant, { ParticipantKind } from './Participant';
20
20
  import type { ParticipantEventCallbacks } from './Participant';
21
21
 
22
22
  export default class RemoteParticipant extends Participant {
@@ -33,8 +33,20 @@ export default class RemoteParticipant extends Participant {
33
33
  private audioOutput?: AudioOutputOptions;
34
34
 
35
35
  /** @internal */
36
- static fromParticipantInfo(signalClient: SignalClient, pi: ParticipantInfo): RemoteParticipant {
37
- return new RemoteParticipant(signalClient, pi.sid, pi.identity, pi.name, pi.metadata);
36
+ static fromParticipantInfo(
37
+ signalClient: SignalClient,
38
+ pi: ParticipantInfo,
39
+ loggerOptions: LoggerOptions,
40
+ ): RemoteParticipant {
41
+ return new RemoteParticipant(
42
+ signalClient,
43
+ pi.sid,
44
+ pi.identity,
45
+ pi.name,
46
+ pi.metadata,
47
+ loggerOptions,
48
+ pi.kind,
49
+ );
38
50
  }
39
51
 
40
52
  protected get logContext() {
@@ -53,8 +65,9 @@ export default class RemoteParticipant extends Participant {
53
65
  name?: string,
54
66
  metadata?: string,
55
67
  loggerOptions?: LoggerOptions,
68
+ kind: ParticipantKind = ParticipantKind.STANDARD,
56
69
  ) {
57
- super(sid, identity || '', name, metadata, loggerOptions);
70
+ super(sid, identity || '', name, metadata, loggerOptions, kind);
58
71
  this.signalClient = signalClient;
59
72
  this.trackPublications = new Map();
60
73
  this.audioTrackPublications = new Map();
@@ -149,6 +149,10 @@ export function computeVideoEncodings(
149
149
  const browser = getBrowser();
150
150
  if (
151
151
  isSafari() ||
152
+ // Even tho RN runs M114, it does not produce SVC layers when a single encoding
153
+ // is provided. So we'll use the legacy SVC specification for now.
154
+ // TODO: when we upstream libwebrtc, this will need additional verification
155
+ isReactNative() ||
152
156
  (browser?.name === 'Chrome' && compareVersions(browser?.version, '113') < 0)
153
157
  ) {
154
158
  const bitratesRatio = sm.suffix == 'h' ? 2 : 3;
@@ -433,7 +433,7 @@ export default abstract class LocalTrack<
433
433
  this.emit(TrackEvent.UpstreamResumed, this);
434
434
 
435
435
  // this operation is noop if mediastreamtrack is already being sent
436
- await this.sender.replaceTrack(this._mediaStreamTrack);
436
+ await this.sender.replaceTrack(this.mediaStreamTrack);
437
437
  } finally {
438
438
  unlock();
439
439
  }
@@ -468,16 +468,17 @@ export default abstract class LocalTrack<
468
468
  try {
469
469
  this.log.debug('setting up processor', this.logContext);
470
470
 
471
- this.processorElement =
472
- this.processorElement ?? (document.createElement(this.kind) as HTMLMediaElement);
471
+ const processorElement = document.createElement(this.kind) as HTMLMediaElement;
473
472
 
474
473
  const processorOptions = {
475
474
  kind: this.kind,
476
475
  track: this._mediaStreamTrack,
477
- element: this.processorElement,
476
+ element: processorElement,
478
477
  audioContext: this.audioContext,
479
478
  };
480
479
  await processor.init(processorOptions);
480
+ this.log.debug('processor initialized', this.logContext);
481
+
481
482
  if (this.processor) {
482
483
  await this.stopProcessor();
483
484
  }
@@ -485,16 +486,17 @@ export default abstract class LocalTrack<
485
486
  throw TypeError('cannot set processor on track of unknown kind');
486
487
  }
487
488
 
488
- attachToElement(this._mediaStreamTrack, this.processorElement);
489
- this.processorElement.muted = true;
489
+ attachToElement(this._mediaStreamTrack, processorElement);
490
+ processorElement.muted = true;
490
491
 
491
- this.processorElement
492
+ processorElement
492
493
  .play()
493
494
  .catch((error) =>
494
495
  this.log.error('failed to play processor element', { ...this.logContext, error }),
495
496
  );
496
497
 
497
498
  this.processor = processor;
499
+ this.processorElement = processorElement;
498
500
  if (this.processor.processedTrack) {
499
501
  for (const el of this.attachedElements) {
500
502
  if (el !== this.processorElement && showProcessedStreamLocally) {
@@ -521,15 +523,17 @@ export default abstract class LocalTrack<
521
523
  * @experimental
522
524
  * @returns
523
525
  */
524
- async stopProcessor() {
526
+ async stopProcessor(keepElement = true) {
525
527
  if (!this.processor) return;
526
528
 
527
529
  this.log.debug('stopping processor', this.logContext);
528
530
  this.processor.processedTrack?.stop();
529
531
  await this.processor.destroy();
530
532
  this.processor = undefined;
531
- this.processorElement?.remove();
532
- this.processorElement = undefined;
533
+ if (!keepElement) {
534
+ this.processorElement?.remove();
535
+ this.processorElement = undefined;
536
+ }
533
537
  // apply original track constraints in case the processor changed them
534
538
  await this._mediaStreamTrack.applyConstraints(this._constraints);
535
539
  // force re-setting of the mediaStreamTrack on the sender
@@ -276,7 +276,10 @@ export default class LocalVideoTrack extends LocalTrack<Track.Kind.Video> {
276
276
  }
277
277
  }
278
278
 
279
- async setProcessor(processor: TrackProcessor<Track.Kind>, showProcessedStreamLocally = true) {
279
+ async setProcessor(
280
+ processor: TrackProcessor<Track.Kind.Video>,
281
+ showProcessedStreamLocally = true,
282
+ ) {
280
283
  await super.setProcessor(processor, showProcessedStreamLocally);
281
284
 
282
285
  if (this.processor?.processedTrack) {
@@ -14,6 +14,7 @@ import type {
14
14
  VideoCaptureOptions,
15
15
  } from './options';
16
16
  import { ScreenSharePresets } from './options';
17
+ import type { TrackProcessor } from './processor/types';
17
18
  import {
18
19
  constraintsForOptions,
19
20
  mergeDefaultOptions,
@@ -51,35 +52,44 @@ export async function createLocalTracks(
51
52
  }
52
53
 
53
54
  const stream = await mediaPromise;
54
- return stream.getTracks().map((mediaStreamTrack) => {
55
- const isAudio = mediaStreamTrack.kind === 'audio';
56
- let trackOptions = isAudio ? options!.audio : options!.video;
57
- if (typeof trackOptions === 'boolean' || !trackOptions) {
58
- trackOptions = {};
59
- }
60
- let trackConstraints: MediaTrackConstraints | undefined;
61
- const conOrBool = isAudio ? constraints.audio : constraints.video;
62
- if (typeof conOrBool !== 'boolean') {
63
- trackConstraints = conOrBool;
64
- }
55
+ return Promise.all(
56
+ stream.getTracks().map(async (mediaStreamTrack) => {
57
+ const isAudio = mediaStreamTrack.kind === 'audio';
58
+ let trackOptions = isAudio ? options!.audio : options!.video;
59
+ if (typeof trackOptions === 'boolean' || !trackOptions) {
60
+ trackOptions = {};
61
+ }
62
+ let trackConstraints: MediaTrackConstraints | undefined;
63
+ const conOrBool = isAudio ? constraints.audio : constraints.video;
64
+ if (typeof conOrBool !== 'boolean') {
65
+ trackConstraints = conOrBool;
66
+ }
65
67
 
66
- // update the constraints with the device id the user gave permissions to in the permission prompt
67
- // otherwise each track restart (e.g. mute - unmute) will try to initialize the device again -> causing additional permission prompts
68
- if (trackConstraints) {
69
- trackConstraints.deviceId = mediaStreamTrack.getSettings().deviceId;
70
- } else {
71
- trackConstraints = { deviceId: mediaStreamTrack.getSettings().deviceId };
72
- }
68
+ // update the constraints with the device id the user gave permissions to in the permission prompt
69
+ // otherwise each track restart (e.g. mute - unmute) will try to initialize the device again -> causing additional permission prompts
70
+ if (trackConstraints) {
71
+ trackConstraints.deviceId = mediaStreamTrack.getSettings().deviceId;
72
+ } else {
73
+ trackConstraints = { deviceId: mediaStreamTrack.getSettings().deviceId };
74
+ }
73
75
 
74
- const track = mediaTrackToLocalTrack(mediaStreamTrack, trackConstraints);
75
- if (track.kind === Track.Kind.Video) {
76
- track.source = Track.Source.Camera;
77
- } else if (track.kind === Track.Kind.Audio) {
78
- track.source = Track.Source.Microphone;
79
- }
80
- track.mediaStream = stream;
81
- return track;
82
- });
76
+ const track = mediaTrackToLocalTrack(mediaStreamTrack, trackConstraints);
77
+ if (track.kind === Track.Kind.Video) {
78
+ track.source = Track.Source.Camera;
79
+ } else if (track.kind === Track.Kind.Audio) {
80
+ track.source = Track.Source.Microphone;
81
+ }
82
+ track.mediaStream = stream;
83
+ if (trackOptions.processor) {
84
+ if (track instanceof LocalAudioTrack) {
85
+ await track.setProcessor(trackOptions.processor as TrackProcessor<Track.Kind.Audio>);
86
+ } else if (track instanceof LocalVideoTrack) {
87
+ await track.setProcessor(trackOptions.processor as TrackProcessor<Track.Kind.Video>);
88
+ }
89
+ }
90
+ return track;
91
+ }),
92
+ );
83
93
  }
84
94
 
85
95
  /**
@@ -1,4 +1,9 @@
1
1
  import type { Track } from './Track';
2
+ import type {
3
+ AudioProcessorOptions,
4
+ TrackProcessor,
5
+ VideoProcessorOptions,
6
+ } from './processor/types';
2
7
 
3
8
  export interface TrackPublishDefaults {
4
9
  /**
@@ -152,6 +157,11 @@ export interface VideoCaptureOptions {
152
157
  facingMode?: 'user' | 'environment' | 'left' | 'right';
153
158
 
154
159
  resolution?: VideoResolution;
160
+
161
+ /**
162
+ * initialize the track with a given processor
163
+ */
164
+ processor?: TrackProcessor<Track.Kind.Video, VideoProcessorOptions>;
155
165
  }
156
166
 
157
167
  export interface ScreenShareCaptureOptions {
@@ -245,6 +255,11 @@ export interface AudioCaptureOptions {
245
255
  * sample size or range of sample sizes which are acceptable and/or required.
246
256
  */
247
257
  sampleSize?: ConstrainULong;
258
+
259
+ /**
260
+ * initialize the track with a given processor
261
+ */
262
+ processor?: TrackProcessor<Track.Kind.Audio, AudioProcessorOptions>;
248
263
  }
249
264
 
250
265
  export interface AudioOutputOptions {