livekit-client 0.15.1 → 0.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. package/dist/api/SignalClient.d.ts +11 -3
  2. package/dist/api/SignalClient.js +92 -28
  3. package/dist/api/SignalClient.js.map +1 -1
  4. package/dist/index.d.ts +4 -2
  5. package/dist/index.js +5 -3
  6. package/dist/index.js.map +1 -1
  7. package/dist/options.d.ts +5 -0
  8. package/dist/proto/livekit_models.d.ts +48 -0
  9. package/dist/proto/livekit_models.js +367 -5
  10. package/dist/proto/livekit_models.js.map +1 -1
  11. package/dist/proto/livekit_rtc.d.ts +50 -11
  12. package/dist/proto/livekit_rtc.js +300 -22
  13. package/dist/proto/livekit_rtc.js.map +1 -1
  14. package/dist/room/PCTransport.js +4 -0
  15. package/dist/room/PCTransport.js.map +1 -1
  16. package/dist/room/RTCEngine.d.ts +10 -2
  17. package/dist/room/RTCEngine.js +182 -42
  18. package/dist/room/RTCEngine.js.map +1 -1
  19. package/dist/room/Room.d.ts +15 -0
  20. package/dist/room/Room.js +165 -20
  21. package/dist/room/Room.js.map +1 -1
  22. package/dist/room/events.d.ts +42 -20
  23. package/dist/room/events.js +41 -19
  24. package/dist/room/events.js.map +1 -1
  25. package/dist/room/participant/LocalParticipant.d.ts +25 -4
  26. package/dist/room/participant/LocalParticipant.js +50 -23
  27. package/dist/room/participant/LocalParticipant.js.map +1 -1
  28. package/dist/room/participant/Participant.d.ts +3 -1
  29. package/dist/room/participant/Participant.js +1 -0
  30. package/dist/room/participant/Participant.js.map +1 -1
  31. package/dist/room/participant/ParticipantTrackPermission.d.ts +19 -0
  32. package/dist/room/participant/ParticipantTrackPermission.js +16 -0
  33. package/dist/room/participant/ParticipantTrackPermission.js.map +1 -0
  34. package/dist/room/participant/RemoteParticipant.d.ts +2 -2
  35. package/dist/room/participant/RemoteParticipant.js +11 -16
  36. package/dist/room/participant/RemoteParticipant.js.map +1 -1
  37. package/dist/room/participant/publishUtils.js +1 -1
  38. package/dist/room/participant/publishUtils.js.map +1 -1
  39. package/dist/room/participant/publishUtils.test.js +9 -0
  40. package/dist/room/participant/publishUtils.test.js.map +1 -1
  41. package/dist/room/track/LocalTrack.d.ts +0 -3
  42. package/dist/room/track/LocalTrack.js +1 -6
  43. package/dist/room/track/LocalTrack.js.map +1 -1
  44. package/dist/room/track/LocalTrackPublication.d.ts +5 -1
  45. package/dist/room/track/LocalTrackPublication.js +15 -5
  46. package/dist/room/track/LocalTrackPublication.js.map +1 -1
  47. package/dist/room/track/LocalVideoTrack.d.ts +1 -1
  48. package/dist/room/track/LocalVideoTrack.js +7 -6
  49. package/dist/room/track/LocalVideoTrack.js.map +1 -1
  50. package/dist/room/track/RemoteAudioTrack.d.ts +5 -14
  51. package/dist/room/track/RemoteAudioTrack.js +7 -32
  52. package/dist/room/track/RemoteAudioTrack.js.map +1 -1
  53. package/dist/room/track/RemoteTrack.d.ts +14 -0
  54. package/dist/room/track/RemoteTrack.js +47 -0
  55. package/dist/room/track/RemoteTrack.js.map +1 -0
  56. package/dist/room/track/RemoteTrackPublication.d.ts +10 -2
  57. package/dist/room/track/RemoteTrackPublication.js +51 -12
  58. package/dist/room/track/RemoteTrackPublication.js.map +1 -1
  59. package/dist/room/track/RemoteVideoTrack.d.ts +3 -9
  60. package/dist/room/track/RemoteVideoTrack.js +8 -29
  61. package/dist/room/track/RemoteVideoTrack.js.map +1 -1
  62. package/dist/room/track/Track.d.ts +3 -0
  63. package/dist/room/track/Track.js +14 -5
  64. package/dist/room/track/Track.js.map +1 -1
  65. package/dist/room/track/TrackPublication.d.ts +14 -1
  66. package/dist/room/track/TrackPublication.js +24 -7
  67. package/dist/room/track/TrackPublication.js.map +1 -1
  68. package/dist/room/track/create.js +5 -0
  69. package/dist/room/track/create.js.map +1 -1
  70. package/dist/room/utils.d.ts +2 -0
  71. package/dist/room/utils.js +32 -1
  72. package/dist/room/utils.js.map +1 -1
  73. package/dist/version.d.ts +2 -2
  74. package/dist/version.js +2 -2
  75. package/package.json +5 -3
  76. package/src/api/SignalClient.ts +444 -0
  77. package/src/connect.ts +100 -0
  78. package/src/index.ts +47 -0
  79. package/src/logger.ts +22 -0
  80. package/src/options.ts +152 -0
  81. package/src/proto/livekit_models.ts +1863 -0
  82. package/src/proto/livekit_rtc.ts +3415 -0
  83. package/src/room/DeviceManager.ts +57 -0
  84. package/src/room/PCTransport.ts +86 -0
  85. package/src/room/RTCEngine.ts +582 -0
  86. package/src/room/Room.ts +840 -0
  87. package/src/room/errors.ts +65 -0
  88. package/src/room/events.ts +398 -0
  89. package/src/room/participant/LocalParticipant.ts +685 -0
  90. package/src/room/participant/Participant.ts +214 -0
  91. package/src/room/participant/ParticipantTrackPermission.ts +32 -0
  92. package/src/room/participant/RemoteParticipant.ts +241 -0
  93. package/src/room/participant/publishUtils.test.ts +105 -0
  94. package/src/room/participant/publishUtils.ts +180 -0
  95. package/src/room/stats.ts +130 -0
  96. package/src/room/track/LocalAudioTrack.ts +112 -0
  97. package/src/room/track/LocalTrack.ts +124 -0
  98. package/src/room/track/LocalTrackPublication.ts +66 -0
  99. package/src/room/track/LocalVideoTrack.test.ts +70 -0
  100. package/src/room/track/LocalVideoTrack.ts +416 -0
  101. package/src/room/track/RemoteAudioTrack.ts +58 -0
  102. package/src/room/track/RemoteTrack.ts +59 -0
  103. package/src/room/track/RemoteTrackPublication.ts +198 -0
  104. package/src/room/track/RemoteVideoTrack.ts +213 -0
  105. package/src/room/track/Track.ts +307 -0
  106. package/src/room/track/TrackPublication.ts +120 -0
  107. package/src/room/track/create.ts +120 -0
  108. package/src/room/track/defaults.ts +23 -0
  109. package/src/room/track/options.ts +229 -0
  110. package/src/room/track/types.ts +8 -0
  111. package/src/room/track/utils.test.ts +93 -0
  112. package/src/room/track/utils.ts +76 -0
  113. package/src/room/utils.ts +74 -0
  114. package/src/version.ts +2 -0
  115. package/.github/workflows/publish.yaml +0 -55
  116. package/.github/workflows/test.yaml +0 -36
  117. package/example/index.html +0 -248
  118. package/example/sample.ts +0 -621
  119. package/example/styles.css +0 -144
  120. package/example/webpack.config.js +0 -33
@@ -0,0 +1,180 @@
1
+ import log from '../../logger';
2
+ import { TrackInvalidError } from '../errors';
3
+ import LocalAudioTrack from '../track/LocalAudioTrack';
4
+ import LocalVideoTrack from '../track/LocalVideoTrack';
5
+ import {
6
+ ScreenSharePresets, TrackPublishOptions,
7
+ VideoEncoding, VideoPreset, VideoPresets,
8
+ VideoPresets43,
9
+ } from '../track/options';
10
+
11
+ /** @internal */
12
+ export function mediaTrackToLocalTrack(
13
+ mediaStreamTrack: MediaStreamTrack,
14
+ constraints?: MediaTrackConstraints,
15
+ ): LocalVideoTrack | LocalAudioTrack {
16
+ switch (mediaStreamTrack.kind) {
17
+ case 'audio':
18
+ return new LocalAudioTrack(mediaStreamTrack, constraints);
19
+ case 'video':
20
+ return new LocalVideoTrack(mediaStreamTrack, constraints);
21
+ default:
22
+ throw new TrackInvalidError(
23
+ `unsupported track type: ${mediaStreamTrack.kind}`,
24
+ );
25
+ }
26
+ }
27
+
28
+ /* @internal */
29
+ export const presets169 = [
30
+ VideoPresets.qvga,
31
+ VideoPresets.vga,
32
+ VideoPresets.qhd,
33
+ VideoPresets.hd,
34
+ VideoPresets.fhd,
35
+ ];
36
+
37
+ /* @internal */
38
+ export const presets43 = [
39
+ VideoPresets43.qvga,
40
+ VideoPresets43.vga,
41
+ VideoPresets43.qhd,
42
+ VideoPresets43.hd,
43
+ VideoPresets43.fhd,
44
+ ];
45
+
46
+ /* @internal */
47
+ export const presetsScreenShare = [
48
+ ScreenSharePresets.vga,
49
+ ScreenSharePresets.hd_8,
50
+ ScreenSharePresets.hd_15,
51
+ ScreenSharePresets.fhd_15,
52
+ ScreenSharePresets.fhd_30,
53
+ ];
54
+
55
+ const videoRids = ['q', 'h', 'f'];
56
+
57
+ /* @internal */
58
+ export function computeVideoEncodings(
59
+ isScreenShare: boolean,
60
+ width?: number,
61
+ height?: number,
62
+ options?: TrackPublishOptions,
63
+ ): RTCRtpEncodingParameters[] {
64
+ let videoEncoding: VideoEncoding | undefined = options?.videoEncoding;
65
+ if (isScreenShare) {
66
+ videoEncoding = options?.screenShareEncoding;
67
+ }
68
+ const useSimulcast = !isScreenShare && options?.simulcast;
69
+
70
+ if ((!videoEncoding && !useSimulcast) || !width || !height) {
71
+ // when we aren't simulcasting, will need to return a single encoding without
72
+ // capping bandwidth. we always require a encoding for dynacast
73
+ return [{}];
74
+ }
75
+
76
+ if (!videoEncoding) {
77
+ // find the right encoding based on width/height
78
+ videoEncoding = determineAppropriateEncoding(isScreenShare, width, height);
79
+ log.debug('using video encoding', videoEncoding);
80
+ }
81
+
82
+ if (!useSimulcast) {
83
+ return [videoEncoding];
84
+ }
85
+
86
+ const presets = presetsForResolution(isScreenShare, width, height);
87
+ let midPreset: VideoPreset | undefined;
88
+ const lowPreset = presets[0];
89
+ if (presets.length > 1) {
90
+ [,midPreset] = presets;
91
+ }
92
+ const original = new VideoPreset(
93
+ width, height, videoEncoding.maxBitrate, videoEncoding.maxFramerate,
94
+ );
95
+
96
+ // NOTE:
97
+ // 1. Ordering of these encodings is important. Chrome seems
98
+ // to use the index into encodings to decide which layer
99
+ // to disable when CPU constrained.
100
+ // So encodings should be ordered in increasing spatial
101
+ // resolution order.
102
+ // 2. ion-sfu translates rids into layers. So, all encodings
103
+ // should have the base layer `q` and then more added
104
+ // based on other conditions.
105
+ const size = Math.max(width, height);
106
+ if (size >= 960 && midPreset) {
107
+ return encodingsFromPresets(width, height, [
108
+ lowPreset, midPreset, original,
109
+ ]);
110
+ }
111
+ if (size >= 500) {
112
+ return encodingsFromPresets(width, height, [
113
+ lowPreset, original,
114
+ ]);
115
+ }
116
+ return encodingsFromPresets(width, height, [
117
+ original,
118
+ ]);
119
+ }
120
+
121
+ /* @internal */
122
+ export function determineAppropriateEncoding(
123
+ isScreenShare: boolean,
124
+ width: number,
125
+ height: number,
126
+ ): VideoEncoding {
127
+ const presets = presetsForResolution(isScreenShare, width, height);
128
+ let { encoding } = presets[0];
129
+
130
+ // handle portrait by swapping dimensions
131
+ const size = Math.max(width, height);
132
+
133
+ for (let i = 0; i < presets.length; i += 1) {
134
+ const preset = presets[i];
135
+ encoding = preset.encoding;
136
+ if (preset.width >= size) {
137
+ break;
138
+ }
139
+ }
140
+
141
+ return encoding;
142
+ }
143
+
144
+ /* @internal */
145
+ export function presetsForResolution(
146
+ isScreenShare: boolean, width: number, height: number,
147
+ ): VideoPreset[] {
148
+ if (isScreenShare) {
149
+ return presetsScreenShare;
150
+ }
151
+ const aspect = width > height ? width / height : height / width;
152
+ if (Math.abs(aspect - 16.0 / 9) < Math.abs(aspect - 4.0 / 3)) {
153
+ return presets169;
154
+ }
155
+ return presets43;
156
+ }
157
+
158
+ // presets should be ordered by low, medium, high
159
+ function encodingsFromPresets(
160
+ width: number,
161
+ height: number,
162
+ presets: VideoPreset[],
163
+ ): RTCRtpEncodingParameters[] {
164
+ const encodings: RTCRtpEncodingParameters[] = [];
165
+ presets.forEach((preset, idx) => {
166
+ if (idx >= videoRids.length) {
167
+ return;
168
+ }
169
+ const size = Math.min(width, height);
170
+ const rid = videoRids[idx];
171
+ encodings.push({
172
+ rid,
173
+ scaleResolutionDownBy: size / Math.min(preset.width, preset.height),
174
+ maxBitrate: preset.encoding.maxBitrate,
175
+ /* @ts-ignore */
176
+ maxFramerate: preset.encoding.maxFramerate,
177
+ });
178
+ });
179
+ return encodings;
180
+ }
@@ -0,0 +1,130 @@
1
+ export const monitorFrequency = 2000;
2
+
3
+ // key stats for senders and receivers
4
+ interface SenderStats {
5
+ /** number of packets sent */
6
+ packetsSent?: number;
7
+
8
+ /** number of bytes sent */
9
+ bytesSent?: number;
10
+
11
+ /** jitter as perceived by remote */
12
+ jitter?: number;
13
+
14
+ /** packets reported lost by remote */
15
+ packetsLost?: number;
16
+
17
+ /** RTT reported by remote */
18
+ roundTripTime?: number;
19
+
20
+ /** ID of the outbound stream */
21
+ streamId?: string;
22
+
23
+ timestamp: number;
24
+ }
25
+
26
+ export interface AudioSenderStats extends SenderStats {
27
+ type: 'audio';
28
+ }
29
+
30
+ export interface VideoSenderStats extends SenderStats {
31
+ type: 'video';
32
+
33
+ firCount: number;
34
+
35
+ pliCount: number;
36
+
37
+ nackCount: number;
38
+
39
+ rid: string;
40
+
41
+ frameWidth: number;
42
+
43
+ frameHeight: number;
44
+
45
+ framesSent: number;
46
+
47
+ // bandwidth, cpu, other, none
48
+ qualityLimitationReason: string;
49
+
50
+ qualityLimitationResolutionChanges: number;
51
+
52
+ retransmittedPacketsSent: number;
53
+ }
54
+
55
+ interface ReceiverStats {
56
+ jitterBufferDelay?: number;
57
+
58
+ /** packets reported lost by remote */
59
+ packetsLost?: number;
60
+
61
+ /** number of packets sent */
62
+ packetsReceived?: number;
63
+
64
+ bytesReceived?: number;
65
+
66
+ streamId?: string;
67
+
68
+ jitter?: number;
69
+
70
+ timestamp: number;
71
+ }
72
+
73
+ export interface AudioReceiverStats extends ReceiverStats {
74
+ type: 'audio';
75
+
76
+ concealedSamples?: number;
77
+
78
+ concealmentEvents?: number;
79
+
80
+ silentConcealedSamples?: number;
81
+
82
+ silentConcealmentEvents?: number;
83
+
84
+ totalAudioEnergy?: number;
85
+
86
+ totalSamplesDuration?: number;
87
+ }
88
+
89
+ export interface VideoReceiverStats extends ReceiverStats {
90
+ type: 'video';
91
+
92
+ framesDecoded: number;
93
+
94
+ framesDropped: number;
95
+
96
+ framesReceived: number;
97
+
98
+ frameWidth?: number;
99
+
100
+ frameHeight?: number;
101
+
102
+ firCount?: number;
103
+
104
+ pliCount?: number;
105
+
106
+ nackCount?: number;
107
+ }
108
+
109
+ export function computeBitrate<T extends ReceiverStats | SenderStats>(
110
+ currentStats: T,
111
+ prevStats?: T,
112
+ ): number {
113
+ if (!prevStats) {
114
+ return 0;
115
+ }
116
+ let bytesNow: number | undefined;
117
+ let bytesPrev: number | undefined;
118
+ if ('bytesReceived' in currentStats) {
119
+ bytesNow = (currentStats as ReceiverStats).bytesReceived;
120
+ bytesPrev = (prevStats as ReceiverStats).bytesReceived;
121
+ } else if ('bytesSent' in currentStats) {
122
+ bytesNow = (currentStats as SenderStats).bytesSent;
123
+ bytesPrev = (prevStats as SenderStats).bytesSent;
124
+ }
125
+ if (bytesNow === undefined || bytesPrev === undefined
126
+ || currentStats.timestamp === undefined || prevStats.timestamp === undefined) {
127
+ return 0;
128
+ }
129
+ return ((bytesNow - bytesPrev) * 8 * 1000) / (currentStats.timestamp - prevStats.timestamp);
130
+ }
@@ -0,0 +1,112 @@
1
+ import log from '../../logger';
2
+ import { AudioSenderStats, computeBitrate, monitorFrequency } from '../stats';
3
+ import LocalTrack from './LocalTrack';
4
+ import { AudioCaptureOptions } from './options';
5
+ import { Track } from './Track';
6
+ import { constraintsForOptions } from './utils';
7
+
8
+ export default class LocalAudioTrack extends LocalTrack {
9
+ sender?: RTCRtpSender;
10
+
11
+ /** @internal */
12
+ stopOnMute: boolean = false;
13
+
14
+ private prevStats?: AudioSenderStats;
15
+
16
+ constructor(
17
+ mediaTrack: MediaStreamTrack,
18
+ constraints?: MediaTrackConstraints,
19
+ ) {
20
+ super(mediaTrack, Track.Kind.Audio, constraints);
21
+ }
22
+
23
+ async setDeviceId(deviceId: string) {
24
+ if (this.constraints.deviceId === deviceId) {
25
+ return;
26
+ }
27
+ this.constraints.deviceId = deviceId;
28
+ if (!this.isMuted) {
29
+ await this.restartTrack();
30
+ }
31
+ }
32
+
33
+ async mute(): Promise<LocalAudioTrack> {
34
+ // disabled special handling as it will cause BT headsets to switch communication modes
35
+ if (this.source === Track.Source.Microphone && this.stopOnMute) {
36
+ log.debug('stopping mic track');
37
+ // also stop the track, so that microphone indicator is turned off
38
+ this.mediaStreamTrack.stop();
39
+ }
40
+ await super.mute();
41
+ return this;
42
+ }
43
+
44
+ async unmute(): Promise<LocalAudioTrack> {
45
+ if (this.source === Track.Source.Microphone && this.stopOnMute) {
46
+ log.debug('reacquiring mic track');
47
+ await this.restartTrack();
48
+ }
49
+ await super.unmute();
50
+ return this;
51
+ }
52
+
53
+ async restartTrack(options?: AudioCaptureOptions) {
54
+ let constraints: MediaTrackConstraints | undefined;
55
+ if (options) {
56
+ const streamConstraints = constraintsForOptions({ audio: options });
57
+ if (typeof streamConstraints.audio !== 'boolean') {
58
+ constraints = streamConstraints.audio;
59
+ }
60
+ }
61
+ await this.restart(constraints);
62
+ }
63
+
64
+ /* @internal */
65
+ startMonitor() {
66
+ setTimeout(() => {
67
+ this.monitorSender();
68
+ }, monitorFrequency);
69
+ }
70
+
71
+ private monitorSender = async () => {
72
+ if (!this.sender) {
73
+ this._currentBitrate = 0;
74
+ return;
75
+ }
76
+ const stats = await this.getSenderStats();
77
+
78
+ if (stats && this.prevStats) {
79
+ this._currentBitrate = computeBitrate(stats, this.prevStats);
80
+ }
81
+
82
+ this.prevStats = stats;
83
+ setTimeout(() => {
84
+ this.monitorSender();
85
+ }, monitorFrequency);
86
+ };
87
+
88
+ async getSenderStats(): Promise<AudioSenderStats | undefined> {
89
+ if (!this.sender) {
90
+ return undefined;
91
+ }
92
+
93
+ const stats = await this.sender.getStats();
94
+ let audioStats: AudioSenderStats | undefined;
95
+ stats.forEach((v) => {
96
+ if (v.type === 'outbound-rtp') {
97
+ audioStats = {
98
+ type: 'audio',
99
+ streamId: v.id,
100
+ packetsSent: v.packetsSent,
101
+ packetsLost: v.packetsLost,
102
+ bytesSent: v.bytesSent,
103
+ timestamp: v.timestamp,
104
+ roundTripTime: v.roundTripTime,
105
+ jitter: v.jitter,
106
+ };
107
+ }
108
+ });
109
+
110
+ return audioStats;
111
+ }
112
+ }
@@ -0,0 +1,124 @@
1
+ import log from '../../logger';
2
+ import DeviceManager from '../DeviceManager';
3
+ import { TrackInvalidError } from '../errors';
4
+ import { TrackEvent } from '../events';
5
+ import { attachToElement, detachTrack, Track } from './Track';
6
+
7
+ export default class LocalTrack extends Track {
8
+ /** @internal */
9
+ sender?: RTCRtpSender;
10
+
11
+ protected constraints: MediaTrackConstraints;
12
+
13
+ protected constructor(
14
+ mediaTrack: MediaStreamTrack, kind: Track.Kind, constraints?: MediaTrackConstraints,
15
+ ) {
16
+ super(mediaTrack, kind);
17
+ this.mediaStreamTrack.addEventListener('ended', this.handleEnded);
18
+ this.constraints = constraints ?? mediaTrack.getConstraints();
19
+ }
20
+
21
+ get id(): string {
22
+ return this.mediaStreamTrack.id;
23
+ }
24
+
25
+ get dimensions(): Track.Dimensions | undefined {
26
+ if (this.kind !== Track.Kind.Video) {
27
+ return undefined;
28
+ }
29
+
30
+ const { width, height } = this.mediaStreamTrack.getSettings();
31
+ if (width && height) {
32
+ return {
33
+ width,
34
+ height,
35
+ };
36
+ }
37
+ return undefined;
38
+ }
39
+
40
+ /**
41
+ * @returns DeviceID of the device that is currently being used for this track
42
+ */
43
+ async getDeviceId(): Promise<string | undefined> {
44
+ // screen share doesn't have a usable device id
45
+ if (this.source === Track.Source.ScreenShare) {
46
+ return;
47
+ }
48
+ const { deviceId, groupId } = this.mediaStreamTrack.getSettings();
49
+ const kind = this.kind === Track.Kind.Audio ? 'audioinput' : 'videoinput';
50
+
51
+ return DeviceManager.getInstance().normalizeDeviceId(kind, deviceId, groupId);
52
+ }
53
+
54
+ async mute(): Promise<LocalTrack> {
55
+ this.setTrackMuted(true);
56
+ return this;
57
+ }
58
+
59
+ async unmute(): Promise<LocalTrack> {
60
+ this.setTrackMuted(false);
61
+ return this;
62
+ }
63
+
64
+ protected async restart(constraints?: MediaTrackConstraints): Promise<LocalTrack> {
65
+ if (!this.sender) {
66
+ throw new TrackInvalidError('unable to restart an unpublished track');
67
+ }
68
+ if (!constraints) {
69
+ constraints = this.constraints;
70
+ }
71
+ log.debug('restarting track with constraints', constraints);
72
+
73
+ const streamConstraints: MediaStreamConstraints = {
74
+ audio: false,
75
+ video: false,
76
+ };
77
+
78
+ if (this.kind === Track.Kind.Video) {
79
+ streamConstraints.video = constraints;
80
+ } else {
81
+ streamConstraints.audio = constraints;
82
+ }
83
+
84
+ // detach
85
+ this.attachedElements.forEach((el) => {
86
+ detachTrack(this.mediaStreamTrack, el);
87
+ });
88
+ this.mediaStreamTrack.removeEventListener('ended', this.handleEnded);
89
+ // on Safari, the old audio track must be stopped before attempting to acquire
90
+ // the new track, otherwise the new track will stop with
91
+ // 'A MediaStreamTrack ended due to a capture failure`
92
+ this.mediaStreamTrack.stop();
93
+
94
+ // create new track and attach
95
+ const mediaStream = await navigator.mediaDevices.getUserMedia(streamConstraints);
96
+ const newTrack = mediaStream.getTracks()[0];
97
+ newTrack.addEventListener('ended', this.handleEnded);
98
+ log.debug('re-acquired MediaStreamTrack');
99
+
100
+ await this.sender.replaceTrack(newTrack);
101
+ this.mediaStreamTrack = newTrack;
102
+
103
+ this.attachedElements.forEach((el) => {
104
+ attachToElement(newTrack, el);
105
+ });
106
+
107
+ this.constraints = constraints;
108
+ return this;
109
+ }
110
+
111
+ protected setTrackMuted(muted: boolean) {
112
+ if (this.isMuted === muted) {
113
+ return;
114
+ }
115
+
116
+ this.isMuted = muted;
117
+ this.mediaStreamTrack.enabled = !muted;
118
+ this.emit(muted ? TrackEvent.Muted : TrackEvent.Unmuted, this);
119
+ }
120
+
121
+ private handleEnded = () => {
122
+ this.emit(TrackEvent.Ended, this);
123
+ };
124
+ }
@@ -0,0 +1,66 @@
1
+ import { TrackInfo } from '../../proto/livekit_models';
2
+ import { TrackEvent } from '../events';
3
+ import LocalAudioTrack from './LocalAudioTrack';
4
+ import LocalTrack from './LocalTrack';
5
+ import LocalVideoTrack from './LocalVideoTrack';
6
+ import { TrackPublishOptions } from './options';
7
+ import { Track } from './Track';
8
+ import { TrackPublication } from './TrackPublication';
9
+
10
+ export default class LocalTrackPublication extends TrackPublication {
11
+ track?: LocalTrack;
12
+
13
+ options?: TrackPublishOptions;
14
+
15
+ constructor(kind: Track.Kind, ti: TrackInfo, track?: LocalTrack) {
16
+ super(kind, ti.sid, ti.name);
17
+
18
+ this.updateInfo(ti);
19
+ this.setTrack(track);
20
+ }
21
+
22
+ setTrack(track?: Track) {
23
+ if (this.track) {
24
+ this.track.off(TrackEvent.Ended, this.handleTrackEnded);
25
+ }
26
+
27
+ super.setTrack(track);
28
+
29
+ if (track) {
30
+ track.on(TrackEvent.Ended, this.handleTrackEnded);
31
+ }
32
+ }
33
+
34
+ get isMuted(): boolean {
35
+ if (this.track) {
36
+ return this.track.isMuted;
37
+ }
38
+ return super.isMuted;
39
+ }
40
+
41
+ get audioTrack(): LocalAudioTrack | undefined {
42
+ return super.audioTrack as LocalAudioTrack | undefined;
43
+ }
44
+
45
+ get videoTrack(): LocalVideoTrack | undefined {
46
+ return super.videoTrack as LocalVideoTrack | undefined;
47
+ }
48
+
49
+ /**
50
+ * Mute the track associated with this publication
51
+ */
52
+ async mute() {
53
+ return this.track?.mute();
54
+ }
55
+
56
+ /**
57
+ * Unmute track associated with this publication
58
+ */
59
+ async unmute() {
60
+ return this.track?.unmute();
61
+ }
62
+
63
+ handleTrackEnded = (track: LocalTrack) => {
64
+ this.emit(TrackEvent.Ended, track);
65
+ };
66
+ }
@@ -0,0 +1,70 @@
1
+ import { VideoQuality } from '../../proto/livekit_models';
2
+ import { videoLayersFromEncodings } from './LocalVideoTrack';
3
+
4
+ describe('videoLayersFromEncodings', () => {
5
+ it('returns single layer for no encoding', () => {
6
+ const layers = videoLayersFromEncodings(640, 360);
7
+ expect(layers).toHaveLength(1);
8
+ expect(layers[0].quality).toBe(VideoQuality.HIGH);
9
+ expect(layers[0].width).toBe(640);
10
+ expect(layers[0].height).toBe(360);
11
+ });
12
+
13
+ it('returns single layer for explicit encoding', () => {
14
+ const layers = videoLayersFromEncodings(640, 360, [{
15
+ maxBitrate: 200_000,
16
+ }]);
17
+ expect(layers).toHaveLength(1);
18
+ expect(layers[0].quality).toBe(VideoQuality.HIGH);
19
+ expect(layers[0].bitrate).toBe(200_000);
20
+ });
21
+
22
+ it('returns three layers for simulcast', () => {
23
+ const layers = videoLayersFromEncodings(1280, 720, [
24
+ {
25
+ scaleResolutionDownBy: 4,
26
+ rid: 'q',
27
+ maxBitrate: 125_000,
28
+ },
29
+ {
30
+ scaleResolutionDownBy: 2,
31
+ rid: 'h',
32
+ maxBitrate: 500_000,
33
+ },
34
+ {
35
+ rid: 'f',
36
+ maxBitrate: 1_200_000,
37
+ },
38
+ ]);
39
+
40
+ expect(layers).toHaveLength(3);
41
+ expect(layers[0].quality).toBe(VideoQuality.LOW);
42
+ expect(layers[0].width).toBe(320);
43
+ expect(layers[2].quality).toBe(VideoQuality.HIGH);
44
+ expect(layers[2].height).toBe(720);
45
+ });
46
+
47
+ it('handles portrait', () => {
48
+ const layers = videoLayersFromEncodings(720, 1280, [
49
+ {
50
+ scaleResolutionDownBy: 4,
51
+ rid: 'q',
52
+ maxBitrate: 125_000,
53
+ },
54
+ {
55
+ scaleResolutionDownBy: 2,
56
+ rid: 'h',
57
+ maxBitrate: 500_000,
58
+ },
59
+ {
60
+ rid: 'f',
61
+ maxBitrate: 1_200_000,
62
+ },
63
+ ]);
64
+ expect(layers).toHaveLength(3);
65
+ expect(layers[0].quality).toBe(VideoQuality.LOW);
66
+ expect(layers[0].height).toBe(320);
67
+ expect(layers[2].quality).toBe(VideoQuality.HIGH);
68
+ expect(layers[2].width).toBe(720);
69
+ });
70
+ });