livekit-client 0.18.4-RC6 → 0.18.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -5
- package/dist/api/RequestQueue.d.ts +13 -12
- package/dist/api/RequestQueue.d.ts.map +1 -0
- package/dist/api/SignalClient.d.ts +67 -66
- package/dist/api/SignalClient.d.ts.map +1 -0
- package/dist/connect.d.ts +24 -23
- package/dist/connect.d.ts.map +1 -0
- package/dist/index.d.ts +27 -26
- package/dist/index.d.ts.map +1 -0
- package/dist/livekit-client.esm.mjs +546 -486
- package/dist/livekit-client.esm.mjs.map +1 -1
- package/dist/livekit-client.umd.js +1 -1
- package/dist/livekit-client.umd.js.map +1 -1
- package/dist/logger.d.ts +26 -25
- package/dist/logger.d.ts.map +1 -0
- package/dist/options.d.ts +128 -127
- package/dist/options.d.ts.map +1 -0
- package/dist/proto/google/protobuf/timestamp.d.ts +133 -132
- package/dist/proto/google/protobuf/timestamp.d.ts.map +1 -0
- package/dist/proto/livekit_models.d.ts +876 -868
- package/dist/proto/livekit_models.d.ts.map +1 -0
- package/dist/proto/livekit_rtc.d.ts +3904 -3859
- package/dist/proto/livekit_rtc.d.ts.map +1 -0
- package/dist/room/DeviceManager.d.ts +8 -7
- package/dist/room/DeviceManager.d.ts.map +1 -0
- package/dist/room/PCTransport.d.ts +16 -15
- package/dist/room/PCTransport.d.ts.map +1 -0
- package/dist/room/RTCEngine.d.ts +67 -66
- package/dist/room/RTCEngine.d.ts.map +1 -0
- package/dist/room/Room.d.ts +166 -165
- package/dist/room/Room.d.ts.map +1 -0
- package/dist/room/errors.d.ts +29 -28
- package/dist/room/errors.d.ts.map +1 -0
- package/dist/room/events.d.ts +391 -390
- package/dist/room/events.d.ts.map +1 -0
- package/dist/room/participant/LocalParticipant.d.ts +126 -125
- package/dist/room/participant/LocalParticipant.d.ts.map +1 -0
- package/dist/room/participant/Participant.d.ts +94 -93
- package/dist/room/participant/Participant.d.ts.map +1 -0
- package/dist/room/participant/ParticipantTrackPermission.d.ts +26 -19
- package/dist/room/participant/ParticipantTrackPermission.d.ts.map +1 -0
- package/dist/room/participant/RemoteParticipant.d.ts +40 -39
- package/dist/room/participant/RemoteParticipant.d.ts.map +1 -0
- package/dist/room/participant/publishUtils.d.ts +18 -17
- package/dist/room/participant/publishUtils.d.ts.map +1 -0
- package/dist/room/stats.d.ts +66 -65
- package/dist/room/stats.d.ts.map +1 -0
- package/dist/room/track/LocalAudioTrack.d.ts +20 -19
- package/dist/room/track/LocalAudioTrack.d.ts.map +1 -0
- package/dist/room/track/LocalTrack.d.ts +28 -27
- package/dist/room/track/LocalTrack.d.ts.map +1 -0
- package/dist/room/track/LocalTrackPublication.d.ts +38 -37
- package/dist/room/track/LocalTrackPublication.d.ts.map +1 -0
- package/dist/room/track/LocalVideoTrack.d.ts +31 -30
- package/dist/room/track/LocalVideoTrack.d.ts.map +1 -0
- package/dist/room/track/RemoteAudioTrack.d.ts +20 -19
- package/dist/room/track/RemoteAudioTrack.d.ts.map +1 -0
- package/dist/room/track/RemoteTrack.d.ts +16 -15
- package/dist/room/track/RemoteTrack.d.ts.map +1 -0
- package/dist/room/track/RemoteTrackPublication.d.ts +51 -50
- package/dist/room/track/RemoteTrackPublication.d.ts.map +1 -0
- package/dist/room/track/RemoteVideoTrack.d.ts +28 -27
- package/dist/room/track/RemoteVideoTrack.d.ts.map +1 -0
- package/dist/room/track/Track.d.ts +101 -100
- package/dist/room/track/Track.d.ts.map +1 -0
- package/dist/room/track/TrackPublication.d.ts +50 -49
- package/dist/room/track/TrackPublication.d.ts.map +1 -0
- package/dist/room/track/create.d.ts +24 -23
- package/dist/room/track/create.d.ts.map +1 -0
- package/dist/room/track/defaults.d.ts +5 -4
- package/dist/room/track/defaults.d.ts.map +1 -0
- package/dist/room/track/options.d.ts +223 -222
- package/dist/room/track/options.d.ts.map +1 -0
- package/dist/room/track/types.d.ts +19 -18
- package/dist/room/track/types.d.ts.map +1 -0
- package/dist/room/track/utils.d.ts +14 -13
- package/dist/room/track/utils.d.ts.map +1 -0
- package/dist/room/utils.d.ts +17 -15
- package/dist/room/utils.d.ts.map +1 -0
- package/dist/test/mocks.d.ts +12 -11
- package/dist/test/mocks.d.ts.map +1 -0
- package/dist/version.d.ts +3 -2
- package/dist/version.d.ts.map +1 -0
- package/package.json +4 -5
- package/src/api/RequestQueue.ts +53 -0
- package/src/api/SignalClient.ts +497 -0
- package/src/connect.ts +98 -0
- package/src/index.ts +49 -0
- package/src/logger.ts +56 -0
- package/src/options.ts +156 -0
- package/src/proto/google/protobuf/timestamp.ts +216 -0
- package/src/proto/livekit_models.ts +2456 -0
- package/src/proto/livekit_rtc.ts +2859 -0
- package/src/room/DeviceManager.ts +80 -0
- package/src/room/PCTransport.ts +88 -0
- package/src/room/RTCEngine.ts +695 -0
- package/src/room/Room.ts +970 -0
- package/src/room/errors.ts +65 -0
- package/src/room/events.ts +438 -0
- package/src/room/participant/LocalParticipant.ts +755 -0
- package/src/room/participant/Participant.ts +287 -0
- package/src/room/participant/ParticipantTrackPermission.ts +42 -0
- package/src/room/participant/RemoteParticipant.ts +263 -0
- package/src/room/participant/publishUtils.test.ts +144 -0
- package/src/room/participant/publishUtils.ts +229 -0
- package/src/room/stats.ts +134 -0
- package/src/room/track/LocalAudioTrack.ts +134 -0
- package/src/room/track/LocalTrack.ts +229 -0
- package/src/room/track/LocalTrackPublication.ts +87 -0
- package/src/room/track/LocalVideoTrack.test.ts +72 -0
- package/src/room/track/LocalVideoTrack.ts +295 -0
- package/src/room/track/RemoteAudioTrack.ts +86 -0
- package/src/room/track/RemoteTrack.ts +62 -0
- package/src/room/track/RemoteTrackPublication.ts +207 -0
- package/src/room/track/RemoteVideoTrack.ts +240 -0
- package/src/room/track/Track.ts +358 -0
- package/src/room/track/TrackPublication.ts +120 -0
- package/src/room/track/create.ts +122 -0
- package/src/room/track/defaults.ts +27 -0
- package/src/room/track/options.ts +281 -0
- package/src/room/track/types.ts +20 -0
- package/src/room/track/utils.test.ts +110 -0
- package/src/room/track/utils.ts +113 -0
- package/src/room/utils.ts +115 -0
- package/src/test/mocks.ts +17 -0
- package/src/version.ts +2 -0
- package/CHANGELOG.md +0 -5
@@ -0,0 +1,144 @@
|
|
1
|
+
import { ScreenSharePresets, VideoPreset, VideoPresets, VideoPresets43 } from '../track/options';
|
2
|
+
import {
|
3
|
+
computeDefaultScreenShareSimulcastPresets,
|
4
|
+
computeVideoEncodings,
|
5
|
+
determineAppropriateEncoding,
|
6
|
+
presets169,
|
7
|
+
presets43,
|
8
|
+
presetsForResolution,
|
9
|
+
presetsScreenShare,
|
10
|
+
sortPresets,
|
11
|
+
} from './publishUtils';
|
12
|
+
|
13
|
+
describe('presetsForResolution', () => {
|
14
|
+
it('handles screenshare', () => {
|
15
|
+
expect(presetsForResolution(true, 600, 300)).toEqual(presetsScreenShare);
|
16
|
+
});
|
17
|
+
|
18
|
+
it('handles landscape', () => {
|
19
|
+
expect(presetsForResolution(false, 600, 300)).toEqual(presets169);
|
20
|
+
expect(presetsForResolution(false, 500, 500)).toEqual(presets43);
|
21
|
+
});
|
22
|
+
|
23
|
+
it('handles portrait', () => {
|
24
|
+
expect(presetsForResolution(false, 300, 600)).toEqual(presets169);
|
25
|
+
expect(presetsForResolution(false, 500, 500)).toEqual(presets43);
|
26
|
+
});
|
27
|
+
});
|
28
|
+
|
29
|
+
describe('determineAppropriateEncoding', () => {
|
30
|
+
it('uses higher encoding', () => {
|
31
|
+
expect(determineAppropriateEncoding(false, 600, 300)).toEqual(VideoPresets.vga.encoding);
|
32
|
+
});
|
33
|
+
|
34
|
+
it('handles portrait', () => {
|
35
|
+
expect(determineAppropriateEncoding(false, 300, 600)).toEqual(VideoPresets.vga.encoding);
|
36
|
+
});
|
37
|
+
});
|
38
|
+
|
39
|
+
describe('computeVideoEncodings', () => {
|
40
|
+
it('handles non-simulcast', () => {
|
41
|
+
const encodings = computeVideoEncodings(false, 640, 480, {
|
42
|
+
simulcast: false,
|
43
|
+
});
|
44
|
+
expect(encodings).toEqual([{}]);
|
45
|
+
});
|
46
|
+
|
47
|
+
it('respects client defined bitrate', () => {
|
48
|
+
const encodings = computeVideoEncodings(false, 640, 480, {
|
49
|
+
simulcast: false,
|
50
|
+
videoEncoding: {
|
51
|
+
maxBitrate: 1024,
|
52
|
+
},
|
53
|
+
});
|
54
|
+
expect(encodings).toHaveLength(1);
|
55
|
+
expect(encodings![0].maxBitrate).toBe(1024);
|
56
|
+
});
|
57
|
+
|
58
|
+
it('returns three encodings for high-res simulcast', () => {
|
59
|
+
const encodings = computeVideoEncodings(false, 960, 540, {
|
60
|
+
simulcast: true,
|
61
|
+
});
|
62
|
+
expect(encodings).toHaveLength(3);
|
63
|
+
|
64
|
+
// ensure they are what we expect
|
65
|
+
expect(encodings![0].rid).toBe('q');
|
66
|
+
expect(encodings![0].maxBitrate).toBe(VideoPresets.h180.encoding.maxBitrate);
|
67
|
+
expect(encodings![0].scaleResolutionDownBy).toBe(3);
|
68
|
+
expect(encodings![1].rid).toBe('h');
|
69
|
+
expect(encodings![1].scaleResolutionDownBy).toBe(1.5);
|
70
|
+
expect(encodings![2].rid).toBe('f');
|
71
|
+
});
|
72
|
+
|
73
|
+
it('handles portrait simulcast', () => {
|
74
|
+
const encodings = computeVideoEncodings(false, 540, 960, {
|
75
|
+
simulcast: true,
|
76
|
+
});
|
77
|
+
expect(encodings).toHaveLength(3);
|
78
|
+
expect(encodings![0].scaleResolutionDownBy).toBe(3);
|
79
|
+
expect(encodings![1].scaleResolutionDownBy).toBe(1.5);
|
80
|
+
expect(encodings![2].maxBitrate).toBe(VideoPresets.h540.encoding.maxBitrate);
|
81
|
+
});
|
82
|
+
|
83
|
+
it('returns two encodings for lower-res simulcast', () => {
|
84
|
+
const encodings = computeVideoEncodings(false, 640, 360, {
|
85
|
+
simulcast: true,
|
86
|
+
});
|
87
|
+
expect(encodings).toHaveLength(2);
|
88
|
+
|
89
|
+
// ensure they are what we expect
|
90
|
+
expect(encodings![0].rid).toBe('q');
|
91
|
+
expect(encodings![0].maxBitrate).toBe(VideoPresets.h180.encoding.maxBitrate);
|
92
|
+
expect(encodings![1].rid).toBe('h');
|
93
|
+
expect(encodings![1].maxBitrate).toBe(VideoPresets.h360.encoding.maxBitrate);
|
94
|
+
});
|
95
|
+
|
96
|
+
it('respects provided min resolution', () => {
|
97
|
+
const encodings = computeVideoEncodings(false, 100, 120, {
|
98
|
+
simulcast: true,
|
99
|
+
});
|
100
|
+
expect(encodings).toHaveLength(1);
|
101
|
+
expect(encodings![0].rid).toBe('q');
|
102
|
+
expect(encodings![0].maxBitrate).toBe(VideoPresets43.h120.encoding.maxBitrate);
|
103
|
+
expect(encodings![0].scaleResolutionDownBy).toBe(1);
|
104
|
+
});
|
105
|
+
});
|
106
|
+
|
107
|
+
describe('customSimulcastLayers', () => {
|
108
|
+
it('sorts presets from lowest to highest', () => {
|
109
|
+
const sortedPresets = sortPresets([
|
110
|
+
VideoPresets.h1440,
|
111
|
+
VideoPresets.h360,
|
112
|
+
VideoPresets.h1080,
|
113
|
+
VideoPresets.h90,
|
114
|
+
]) as Array<VideoPreset>;
|
115
|
+
expect(sortPresets).not.toBeUndefined();
|
116
|
+
expect(sortedPresets[0]).toBe(VideoPresets.h90);
|
117
|
+
expect(sortedPresets[1]).toBe(VideoPresets.h360);
|
118
|
+
expect(sortedPresets[2]).toBe(VideoPresets.h1080);
|
119
|
+
expect(sortedPresets[3]).toBe(VideoPresets.h1440);
|
120
|
+
});
|
121
|
+
it('sorts presets from lowest to highest, even when dimensions are the same', () => {
|
122
|
+
const sortedPresets = sortPresets([
|
123
|
+
new VideoPreset(1920, 1080, 3_000_000, 20),
|
124
|
+
new VideoPreset(1920, 1080, 2_000_000, 15),
|
125
|
+
new VideoPreset(1920, 1080, 3_000_000, 15),
|
126
|
+
]) as Array<VideoPreset>;
|
127
|
+
expect(sortPresets).not.toBeUndefined();
|
128
|
+
expect(sortedPresets[0].encoding.maxBitrate).toBe(2_000_000);
|
129
|
+
expect(sortedPresets[1].encoding.maxFramerate).toBe(15);
|
130
|
+
expect(sortedPresets[2].encoding.maxFramerate).toBe(20);
|
131
|
+
});
|
132
|
+
});
|
133
|
+
|
134
|
+
describe('screenShareSimulcastDefaults', () => {
|
135
|
+
it('computes appropriate bitrate from original preset', () => {
|
136
|
+
const defaultSimulcastLayers = computeDefaultScreenShareSimulcastPresets(
|
137
|
+
ScreenSharePresets.h720fps15,
|
138
|
+
);
|
139
|
+
expect(defaultSimulcastLayers[0].width).toBe(640);
|
140
|
+
expect(defaultSimulcastLayers[0].height).toBe(360);
|
141
|
+
expect(defaultSimulcastLayers[0].encoding.maxFramerate).toBe(3);
|
142
|
+
expect(defaultSimulcastLayers[0].encoding.maxBitrate).toBe(150_000);
|
143
|
+
});
|
144
|
+
});
|
@@ -0,0 +1,229 @@
|
|
1
|
+
import log from '../../logger';
|
2
|
+
import { TrackInvalidError } from '../errors';
|
3
|
+
import LocalAudioTrack from '../track/LocalAudioTrack';
|
4
|
+
import LocalVideoTrack from '../track/LocalVideoTrack';
|
5
|
+
import {
|
6
|
+
ScreenSharePresets,
|
7
|
+
TrackPublishOptions,
|
8
|
+
VideoEncoding,
|
9
|
+
VideoPreset,
|
10
|
+
VideoPresets,
|
11
|
+
VideoPresets43,
|
12
|
+
} from '../track/options';
|
13
|
+
|
14
|
+
/** @internal */
|
15
|
+
export function mediaTrackToLocalTrack(
|
16
|
+
mediaStreamTrack: MediaStreamTrack,
|
17
|
+
constraints?: MediaTrackConstraints,
|
18
|
+
): LocalVideoTrack | LocalAudioTrack {
|
19
|
+
switch (mediaStreamTrack.kind) {
|
20
|
+
case 'audio':
|
21
|
+
return new LocalAudioTrack(mediaStreamTrack, constraints);
|
22
|
+
case 'video':
|
23
|
+
return new LocalVideoTrack(mediaStreamTrack, constraints);
|
24
|
+
default:
|
25
|
+
throw new TrackInvalidError(`unsupported track type: ${mediaStreamTrack.kind}`);
|
26
|
+
}
|
27
|
+
}
|
28
|
+
|
29
|
+
/* @internal */
|
30
|
+
export const presets169 = Object.values(VideoPresets);
|
31
|
+
|
32
|
+
/* @internal */
|
33
|
+
export const presets43 = Object.values(VideoPresets43);
|
34
|
+
|
35
|
+
/* @internal */
|
36
|
+
export const presetsScreenShare = Object.values(ScreenSharePresets);
|
37
|
+
|
38
|
+
/* @internal */
|
39
|
+
export const defaultSimulcastPresets169 = [VideoPresets.h180, VideoPresets.h360];
|
40
|
+
|
41
|
+
/* @internal */
|
42
|
+
export const defaultSimulcastPresets43 = [VideoPresets43.h180, VideoPresets43.h360];
|
43
|
+
|
44
|
+
/* @internal */
|
45
|
+
export const computeDefaultScreenShareSimulcastPresets = (fromPreset: VideoPreset) => {
|
46
|
+
const layers = [{ scaleResolutionDownBy: 2, fps: 3 }];
|
47
|
+
return layers.map(
|
48
|
+
(t) =>
|
49
|
+
new VideoPreset(
|
50
|
+
Math.floor(fromPreset.width / t.scaleResolutionDownBy),
|
51
|
+
Math.floor(fromPreset.height / t.scaleResolutionDownBy),
|
52
|
+
Math.max(
|
53
|
+
150_000,
|
54
|
+
Math.floor(
|
55
|
+
fromPreset.encoding.maxBitrate /
|
56
|
+
(t.scaleResolutionDownBy ** 2 * ((fromPreset.encoding.maxFramerate ?? 30) / t.fps)),
|
57
|
+
),
|
58
|
+
),
|
59
|
+
t.fps,
|
60
|
+
),
|
61
|
+
);
|
62
|
+
};
|
63
|
+
|
64
|
+
const videoRids = ['q', 'h', 'f'];
|
65
|
+
|
66
|
+
/* @internal */
|
67
|
+
export function computeVideoEncodings(
|
68
|
+
isScreenShare: boolean,
|
69
|
+
width?: number,
|
70
|
+
height?: number,
|
71
|
+
options?: TrackPublishOptions,
|
72
|
+
): RTCRtpEncodingParameters[] {
|
73
|
+
let videoEncoding: VideoEncoding | undefined = options?.videoEncoding;
|
74
|
+
if (isScreenShare) {
|
75
|
+
videoEncoding = options?.screenShareEncoding;
|
76
|
+
}
|
77
|
+
const useSimulcast = options?.simulcast;
|
78
|
+
|
79
|
+
if ((!videoEncoding && !useSimulcast) || !width || !height) {
|
80
|
+
// when we aren't simulcasting, will need to return a single encoding without
|
81
|
+
// capping bandwidth. we always require a encoding for dynacast
|
82
|
+
return [{}];
|
83
|
+
}
|
84
|
+
|
85
|
+
if (!videoEncoding) {
|
86
|
+
// find the right encoding based on width/height
|
87
|
+
videoEncoding = determineAppropriateEncoding(isScreenShare, width, height);
|
88
|
+
log.debug('using video encoding', videoEncoding);
|
89
|
+
}
|
90
|
+
|
91
|
+
if (!useSimulcast) {
|
92
|
+
return [videoEncoding];
|
93
|
+
}
|
94
|
+
const original = new VideoPreset(
|
95
|
+
width,
|
96
|
+
height,
|
97
|
+
videoEncoding.maxBitrate,
|
98
|
+
videoEncoding.maxFramerate,
|
99
|
+
);
|
100
|
+
let presets: Array<VideoPreset> = [];
|
101
|
+
if (isScreenShare) {
|
102
|
+
presets =
|
103
|
+
sortPresets(options?.screenShareSimulcastLayers) ??
|
104
|
+
defaultSimulcastLayers(isScreenShare, original);
|
105
|
+
} else {
|
106
|
+
presets =
|
107
|
+
sortPresets(options?.videoSimulcastLayers) ?? defaultSimulcastLayers(isScreenShare, original);
|
108
|
+
}
|
109
|
+
let midPreset: VideoPreset | undefined;
|
110
|
+
const lowPreset = presets[0];
|
111
|
+
if (presets.length > 1) {
|
112
|
+
[, midPreset] = presets;
|
113
|
+
}
|
114
|
+
|
115
|
+
// NOTE:
|
116
|
+
// 1. Ordering of these encodings is important. Chrome seems
|
117
|
+
// to use the index into encodings to decide which layer
|
118
|
+
// to disable when CPU constrained.
|
119
|
+
// So encodings should be ordered in increasing spatial
|
120
|
+
// resolution order.
|
121
|
+
// 2. ion-sfu translates rids into layers. So, all encodings
|
122
|
+
// should have the base layer `q` and then more added
|
123
|
+
// based on other conditions.
|
124
|
+
const size = Math.max(width, height);
|
125
|
+
if (size >= 960 && midPreset) {
|
126
|
+
return encodingsFromPresets(width, height, [lowPreset, midPreset, original]);
|
127
|
+
}
|
128
|
+
if (size >= 480) {
|
129
|
+
return encodingsFromPresets(width, height, [lowPreset, original]);
|
130
|
+
}
|
131
|
+
return encodingsFromPresets(width, height, [original]);
|
132
|
+
}
|
133
|
+
|
134
|
+
/* @internal */
|
135
|
+
export function determineAppropriateEncoding(
|
136
|
+
isScreenShare: boolean,
|
137
|
+
width: number,
|
138
|
+
height: number,
|
139
|
+
): VideoEncoding {
|
140
|
+
const presets = presetsForResolution(isScreenShare, width, height);
|
141
|
+
let { encoding } = presets[0];
|
142
|
+
|
143
|
+
// handle portrait by swapping dimensions
|
144
|
+
const size = Math.max(width, height);
|
145
|
+
|
146
|
+
for (let i = 0; i < presets.length; i += 1) {
|
147
|
+
const preset = presets[i];
|
148
|
+
encoding = preset.encoding;
|
149
|
+
if (preset.width >= size) {
|
150
|
+
break;
|
151
|
+
}
|
152
|
+
}
|
153
|
+
|
154
|
+
return encoding;
|
155
|
+
}
|
156
|
+
|
157
|
+
/* @internal */
|
158
|
+
export function presetsForResolution(
|
159
|
+
isScreenShare: boolean,
|
160
|
+
width: number,
|
161
|
+
height: number,
|
162
|
+
): VideoPreset[] {
|
163
|
+
if (isScreenShare) {
|
164
|
+
return presetsScreenShare;
|
165
|
+
}
|
166
|
+
const aspect = width > height ? width / height : height / width;
|
167
|
+
if (Math.abs(aspect - 16.0 / 9) < Math.abs(aspect - 4.0 / 3)) {
|
168
|
+
return presets169;
|
169
|
+
}
|
170
|
+
return presets43;
|
171
|
+
}
|
172
|
+
|
173
|
+
/* @internal */
|
174
|
+
export function defaultSimulcastLayers(
|
175
|
+
isScreenShare: boolean,
|
176
|
+
original: VideoPreset,
|
177
|
+
): VideoPreset[] {
|
178
|
+
if (isScreenShare) {
|
179
|
+
return computeDefaultScreenShareSimulcastPresets(original);
|
180
|
+
}
|
181
|
+
const { width, height } = original;
|
182
|
+
const aspect = width > height ? width / height : height / width;
|
183
|
+
if (Math.abs(aspect - 16.0 / 9) < Math.abs(aspect - 4.0 / 3)) {
|
184
|
+
return defaultSimulcastPresets169;
|
185
|
+
}
|
186
|
+
return defaultSimulcastPresets43;
|
187
|
+
}
|
188
|
+
|
189
|
+
// presets should be ordered by low, medium, high
|
190
|
+
function encodingsFromPresets(
|
191
|
+
width: number,
|
192
|
+
height: number,
|
193
|
+
presets: VideoPreset[],
|
194
|
+
): RTCRtpEncodingParameters[] {
|
195
|
+
const encodings: RTCRtpEncodingParameters[] = [];
|
196
|
+
presets.forEach((preset, idx) => {
|
197
|
+
if (idx >= videoRids.length) {
|
198
|
+
return;
|
199
|
+
}
|
200
|
+
const size = Math.min(width, height);
|
201
|
+
const rid = videoRids[idx];
|
202
|
+
encodings.push({
|
203
|
+
rid,
|
204
|
+
scaleResolutionDownBy: size / Math.min(preset.width, preset.height),
|
205
|
+
maxBitrate: preset.encoding.maxBitrate,
|
206
|
+
/* @ts-ignore */
|
207
|
+
maxFramerate: preset.encoding.maxFramerate,
|
208
|
+
});
|
209
|
+
});
|
210
|
+
return encodings;
|
211
|
+
}
|
212
|
+
|
213
|
+
/** @internal */
|
214
|
+
export function sortPresets(presets: Array<VideoPreset> | undefined) {
|
215
|
+
if (!presets) return;
|
216
|
+
return presets.sort((a, b) => {
|
217
|
+
const { encoding: aEnc } = a;
|
218
|
+
const { encoding: bEnc } = b;
|
219
|
+
|
220
|
+
if (aEnc.maxBitrate > bEnc.maxBitrate) {
|
221
|
+
return 1;
|
222
|
+
}
|
223
|
+
if (aEnc.maxBitrate < bEnc.maxBitrate) return -1;
|
224
|
+
if (aEnc.maxBitrate === bEnc.maxBitrate && aEnc.maxFramerate && bEnc.maxFramerate) {
|
225
|
+
return aEnc.maxFramerate > bEnc.maxFramerate ? 1 : -1;
|
226
|
+
}
|
227
|
+
return 0;
|
228
|
+
});
|
229
|
+
}
|
@@ -0,0 +1,134 @@
|
|
1
|
+
export const monitorFrequency = 2000;
|
2
|
+
|
3
|
+
// key stats for senders and receivers
|
4
|
+
interface SenderStats {
|
5
|
+
/** number of packets sent */
|
6
|
+
packetsSent?: number;
|
7
|
+
|
8
|
+
/** number of bytes sent */
|
9
|
+
bytesSent?: number;
|
10
|
+
|
11
|
+
/** jitter as perceived by remote */
|
12
|
+
jitter?: number;
|
13
|
+
|
14
|
+
/** packets reported lost by remote */
|
15
|
+
packetsLost?: number;
|
16
|
+
|
17
|
+
/** RTT reported by remote */
|
18
|
+
roundTripTime?: number;
|
19
|
+
|
20
|
+
/** ID of the outbound stream */
|
21
|
+
streamId?: string;
|
22
|
+
|
23
|
+
timestamp: number;
|
24
|
+
}
|
25
|
+
|
26
|
+
export interface AudioSenderStats extends SenderStats {
|
27
|
+
type: 'audio';
|
28
|
+
}
|
29
|
+
|
30
|
+
export interface VideoSenderStats extends SenderStats {
|
31
|
+
type: 'video';
|
32
|
+
|
33
|
+
firCount: number;
|
34
|
+
|
35
|
+
pliCount: number;
|
36
|
+
|
37
|
+
nackCount: number;
|
38
|
+
|
39
|
+
rid: string;
|
40
|
+
|
41
|
+
frameWidth: number;
|
42
|
+
|
43
|
+
frameHeight: number;
|
44
|
+
|
45
|
+
framesSent: number;
|
46
|
+
|
47
|
+
// bandwidth, cpu, other, none
|
48
|
+
qualityLimitationReason: string;
|
49
|
+
|
50
|
+
qualityLimitationResolutionChanges: number;
|
51
|
+
|
52
|
+
retransmittedPacketsSent: number;
|
53
|
+
}
|
54
|
+
|
55
|
+
interface ReceiverStats {
|
56
|
+
jitterBufferDelay?: number;
|
57
|
+
|
58
|
+
/** packets reported lost by remote */
|
59
|
+
packetsLost?: number;
|
60
|
+
|
61
|
+
/** number of packets sent */
|
62
|
+
packetsReceived?: number;
|
63
|
+
|
64
|
+
bytesReceived?: number;
|
65
|
+
|
66
|
+
streamId?: string;
|
67
|
+
|
68
|
+
jitter?: number;
|
69
|
+
|
70
|
+
timestamp: number;
|
71
|
+
}
|
72
|
+
|
73
|
+
export interface AudioReceiverStats extends ReceiverStats {
|
74
|
+
type: 'audio';
|
75
|
+
|
76
|
+
concealedSamples?: number;
|
77
|
+
|
78
|
+
concealmentEvents?: number;
|
79
|
+
|
80
|
+
silentConcealedSamples?: number;
|
81
|
+
|
82
|
+
silentConcealmentEvents?: number;
|
83
|
+
|
84
|
+
totalAudioEnergy?: number;
|
85
|
+
|
86
|
+
totalSamplesDuration?: number;
|
87
|
+
}
|
88
|
+
|
89
|
+
export interface VideoReceiverStats extends ReceiverStats {
|
90
|
+
type: 'video';
|
91
|
+
|
92
|
+
framesDecoded: number;
|
93
|
+
|
94
|
+
framesDropped: number;
|
95
|
+
|
96
|
+
framesReceived: number;
|
97
|
+
|
98
|
+
frameWidth?: number;
|
99
|
+
|
100
|
+
frameHeight?: number;
|
101
|
+
|
102
|
+
firCount?: number;
|
103
|
+
|
104
|
+
pliCount?: number;
|
105
|
+
|
106
|
+
nackCount?: number;
|
107
|
+
}
|
108
|
+
|
109
|
+
export function computeBitrate<T extends ReceiverStats | SenderStats>(
|
110
|
+
currentStats: T,
|
111
|
+
prevStats?: T,
|
112
|
+
): number {
|
113
|
+
if (!prevStats) {
|
114
|
+
return 0;
|
115
|
+
}
|
116
|
+
let bytesNow: number | undefined;
|
117
|
+
let bytesPrev: number | undefined;
|
118
|
+
if ('bytesReceived' in currentStats) {
|
119
|
+
bytesNow = (currentStats as ReceiverStats).bytesReceived;
|
120
|
+
bytesPrev = (prevStats as ReceiverStats).bytesReceived;
|
121
|
+
} else if ('bytesSent' in currentStats) {
|
122
|
+
bytesNow = (currentStats as SenderStats).bytesSent;
|
123
|
+
bytesPrev = (prevStats as SenderStats).bytesSent;
|
124
|
+
}
|
125
|
+
if (
|
126
|
+
bytesNow === undefined ||
|
127
|
+
bytesPrev === undefined ||
|
128
|
+
currentStats.timestamp === undefined ||
|
129
|
+
prevStats.timestamp === undefined
|
130
|
+
) {
|
131
|
+
return 0;
|
132
|
+
}
|
133
|
+
return ((bytesNow - bytesPrev) * 8 * 1000) / (currentStats.timestamp - prevStats.timestamp);
|
134
|
+
}
|
@@ -0,0 +1,134 @@
|
|
1
|
+
import log from '../../logger';
|
2
|
+
import { TrackEvent } from '../events';
|
3
|
+
import { AudioSenderStats, computeBitrate, monitorFrequency } from '../stats';
|
4
|
+
import LocalTrack from './LocalTrack';
|
5
|
+
import { AudioCaptureOptions } from './options';
|
6
|
+
import { Track } from './Track';
|
7
|
+
import { constraintsForOptions, detectSilence } from './utils';
|
8
|
+
|
9
|
+
export default class LocalAudioTrack extends LocalTrack {
|
10
|
+
sender?: RTCRtpSender;
|
11
|
+
|
12
|
+
/** @internal */
|
13
|
+
stopOnMute: boolean = false;
|
14
|
+
|
15
|
+
private prevStats?: AudioSenderStats;
|
16
|
+
|
17
|
+
constructor(mediaTrack: MediaStreamTrack, constraints?: MediaTrackConstraints) {
|
18
|
+
super(mediaTrack, Track.Kind.Audio, constraints);
|
19
|
+
this.checkForSilence();
|
20
|
+
}
|
21
|
+
|
22
|
+
async setDeviceId(deviceId: string) {
|
23
|
+
if (this.constraints.deviceId === deviceId) {
|
24
|
+
return;
|
25
|
+
}
|
26
|
+
this.constraints.deviceId = deviceId;
|
27
|
+
if (!this.isMuted) {
|
28
|
+
await this.restartTrack();
|
29
|
+
}
|
30
|
+
}
|
31
|
+
|
32
|
+
async mute(): Promise<LocalAudioTrack> {
|
33
|
+
// disabled special handling as it will cause BT headsets to switch communication modes
|
34
|
+
if (this.source === Track.Source.Microphone && this.stopOnMute) {
|
35
|
+
log.debug('stopping mic track');
|
36
|
+
// also stop the track, so that microphone indicator is turned off
|
37
|
+
this.mediaStreamTrack.stop();
|
38
|
+
}
|
39
|
+
await super.mute();
|
40
|
+
return this;
|
41
|
+
}
|
42
|
+
|
43
|
+
async unmute(): Promise<LocalAudioTrack> {
|
44
|
+
if (this.source === Track.Source.Microphone && this.stopOnMute) {
|
45
|
+
log.debug('reacquiring mic track');
|
46
|
+
await this.restartTrack();
|
47
|
+
}
|
48
|
+
await super.unmute();
|
49
|
+
return this;
|
50
|
+
}
|
51
|
+
|
52
|
+
async restartTrack(options?: AudioCaptureOptions) {
|
53
|
+
let constraints: MediaTrackConstraints | undefined;
|
54
|
+
if (options) {
|
55
|
+
const streamConstraints = constraintsForOptions({ audio: options });
|
56
|
+
if (typeof streamConstraints.audio !== 'boolean') {
|
57
|
+
constraints = streamConstraints.audio;
|
58
|
+
}
|
59
|
+
}
|
60
|
+
await this.restart(constraints);
|
61
|
+
}
|
62
|
+
|
63
|
+
protected async restart(constraints?: MediaTrackConstraints): Promise<LocalTrack> {
|
64
|
+
const track = await super.restart(constraints);
|
65
|
+
this.checkForSilence();
|
66
|
+
return track;
|
67
|
+
}
|
68
|
+
|
69
|
+
/* @internal */
|
70
|
+
startMonitor() {
|
71
|
+
setTimeout(() => {
|
72
|
+
this.monitorSender();
|
73
|
+
}, monitorFrequency);
|
74
|
+
}
|
75
|
+
|
76
|
+
private monitorSender = async () => {
|
77
|
+
if (!this.sender) {
|
78
|
+
this._currentBitrate = 0;
|
79
|
+
return;
|
80
|
+
}
|
81
|
+
|
82
|
+
let stats: AudioSenderStats | undefined;
|
83
|
+
try {
|
84
|
+
stats = await this.getSenderStats();
|
85
|
+
} catch (e) {
|
86
|
+
log.error('could not get audio sender stats', e);
|
87
|
+
return;
|
88
|
+
}
|
89
|
+
|
90
|
+
if (stats && this.prevStats) {
|
91
|
+
this._currentBitrate = computeBitrate(stats, this.prevStats);
|
92
|
+
}
|
93
|
+
|
94
|
+
this.prevStats = stats;
|
95
|
+
setTimeout(() => {
|
96
|
+
this.monitorSender();
|
97
|
+
}, monitorFrequency);
|
98
|
+
};
|
99
|
+
|
100
|
+
async getSenderStats(): Promise<AudioSenderStats | undefined> {
|
101
|
+
if (!this.sender) {
|
102
|
+
return undefined;
|
103
|
+
}
|
104
|
+
|
105
|
+
const stats = await this.sender.getStats();
|
106
|
+
let audioStats: AudioSenderStats | undefined;
|
107
|
+
stats.forEach((v) => {
|
108
|
+
if (v.type === 'outbound-rtp') {
|
109
|
+
audioStats = {
|
110
|
+
type: 'audio',
|
111
|
+
streamId: v.id,
|
112
|
+
packetsSent: v.packetsSent,
|
113
|
+
packetsLost: v.packetsLost,
|
114
|
+
bytesSent: v.bytesSent,
|
115
|
+
timestamp: v.timestamp,
|
116
|
+
roundTripTime: v.roundTripTime,
|
117
|
+
jitter: v.jitter,
|
118
|
+
};
|
119
|
+
}
|
120
|
+
});
|
121
|
+
|
122
|
+
return audioStats;
|
123
|
+
}
|
124
|
+
|
125
|
+
async checkForSilence() {
|
126
|
+
const trackIsSilent = await detectSilence(this);
|
127
|
+
if (trackIsSilent) {
|
128
|
+
if (!this.isMuted) {
|
129
|
+
log.warn('silence detected on local audio track');
|
130
|
+
}
|
131
|
+
this.emit(TrackEvent.AudioSilenceDetected);
|
132
|
+
}
|
133
|
+
}
|
134
|
+
}
|