livekit-client 0.17.3 → 0.17.6-rc1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +1 -1
- package/README.md +26 -20
- package/dist/api/SignalClient.d.ts +1 -0
- package/dist/connect.d.ts +2 -0
- package/dist/index.d.ts +2 -2
- package/dist/livekit-client.esm.js +17344 -0
- package/dist/livekit-client.esm.js.map +1 -0
- package/dist/livekit-client.umd.js +17388 -0
- package/dist/livekit-client.umd.js.map +1 -0
- package/dist/logger.d.ts +22 -11
- package/dist/options.d.ts +4 -2
- package/dist/proto/google/protobuf/timestamp.d.ts +12 -2
- package/dist/proto/livekit_models.d.ts +524 -17
- package/dist/proto/livekit_rtc.d.ts +3449 -31
- package/dist/room/DeviceManager.d.ts +1 -1
- package/dist/room/RTCEngine.d.ts +1 -1
- package/dist/room/Room.d.ts +2 -2
- package/dist/room/events.d.ts +1 -1
- package/dist/room/participant/LocalParticipant.d.ts +9 -5
- package/dist/room/participant/RemoteParticipant.d.ts +9 -0
- package/dist/room/track/RemoteAudioTrack.d.ts +11 -0
- package/dist/room/track/options.d.ts +1 -1
- package/dist/test/mocks.d.ts +11 -0
- package/dist/version.d.ts +1 -1
- package/package.json +41 -16
- package/.eslintrc.js +0 -17
- package/.gitmodules +0 -3
- package/dist/api/RequestQueue.js +0 -61
- package/dist/api/RequestQueue.js.map +0 -1
- package/dist/api/SignalClient.js +0 -428
- package/dist/api/SignalClient.js.map +0 -1
- package/dist/connect.js +0 -130
- package/dist/connect.js.map +0 -1
- package/dist/index.js +0 -71
- package/dist/index.js.map +0 -1
- package/dist/logger.js +0 -24
- package/dist/logger.js.map +0 -1
- package/dist/options.js +0 -3
- package/dist/options.js.map +0 -1
- package/dist/proto/google/protobuf/timestamp.js +0 -93
- package/dist/proto/google/protobuf/timestamp.js.map +0 -1
- package/dist/proto/livekit_models.js +0 -2688
- package/dist/proto/livekit_models.js.map +0 -1
- package/dist/proto/livekit_rtc.js +0 -2995
- package/dist/proto/livekit_rtc.js.map +0 -1
- package/dist/room/DeviceManager.js +0 -62
- package/dist/room/DeviceManager.js.map +0 -1
- package/dist/room/PCTransport.js +0 -91
- package/dist/room/PCTransport.js.map +0 -1
- package/dist/room/RTCEngine.js +0 -562
- package/dist/room/RTCEngine.js.map +0 -1
- package/dist/room/Room.js +0 -759
- package/dist/room/Room.js.map +0 -1
- package/dist/room/errors.js +0 -68
- package/dist/room/errors.js.map +0 -1
- package/dist/room/events.js +0 -385
- package/dist/room/events.js.map +0 -1
- package/dist/room/participant/LocalParticipant.js +0 -647
- package/dist/room/participant/LocalParticipant.js.map +0 -1
- package/dist/room/participant/Participant.js +0 -189
- package/dist/room/participant/Participant.js.map +0 -1
- package/dist/room/participant/ParticipantTrackPermission.js +0 -16
- package/dist/room/participant/ParticipantTrackPermission.js.map +0 -1
- package/dist/room/participant/RemoteParticipant.js +0 -194
- package/dist/room/participant/RemoteParticipant.js.map +0 -1
- package/dist/room/participant/publishUtils.js +0 -189
- package/dist/room/participant/publishUtils.js.map +0 -1
- package/dist/room/participant/publishUtils.test.d.ts +0 -1
- package/dist/room/participant/publishUtils.test.js +0 -118
- package/dist/room/participant/publishUtils.test.js.map +0 -1
- package/dist/room/stats.js +0 -26
- package/dist/room/stats.js.map +0 -1
- package/dist/room/track/LocalAudioTrack.js +0 -153
- package/dist/room/track/LocalAudioTrack.js.map +0 -1
- package/dist/room/track/LocalTrack.js +0 -158
- package/dist/room/track/LocalTrack.js.map +0 -1
- package/dist/room/track/LocalTrackPublication.js +0 -64
- package/dist/room/track/LocalTrackPublication.js.map +0 -1
- package/dist/room/track/LocalVideoTrack.js +0 -297
- package/dist/room/track/LocalVideoTrack.js.map +0 -1
- package/dist/room/track/LocalVideoTrack.test.d.ts +0 -1
- package/dist/room/track/LocalVideoTrack.test.js +0 -68
- package/dist/room/track/LocalVideoTrack.test.js.map +0 -1
- package/dist/room/track/RemoteAudioTrack.js +0 -64
- package/dist/room/track/RemoteAudioTrack.js.map +0 -1
- package/dist/room/track/RemoteTrack.js +0 -49
- package/dist/room/track/RemoteTrack.js.map +0 -1
- package/dist/room/track/RemoteTrackPublication.js +0 -178
- package/dist/room/track/RemoteTrackPublication.js.map +0 -1
- package/dist/room/track/RemoteVideoTrack.js +0 -201
- package/dist/room/track/RemoteVideoTrack.js.map +0 -1
- package/dist/room/track/Track.js +0 -276
- package/dist/room/track/Track.js.map +0 -1
- package/dist/room/track/TrackPublication.js +0 -92
- package/dist/room/track/TrackPublication.js.map +0 -1
- package/dist/room/track/create.js +0 -131
- package/dist/room/track/create.js.map +0 -1
- package/dist/room/track/defaults.js +0 -21
- package/dist/room/track/defaults.js.map +0 -1
- package/dist/room/track/options.js +0 -100
- package/dist/room/track/options.js.map +0 -1
- package/dist/room/track/types.js +0 -3
- package/dist/room/track/types.js.map +0 -1
- package/dist/room/track/utils.js +0 -113
- package/dist/room/track/utils.js.map +0 -1
- package/dist/room/track/utils.test.d.ts +0 -1
- package/dist/room/track/utils.test.js +0 -85
- package/dist/room/track/utils.test.js.map +0 -1
- package/dist/room/utils.js +0 -79
- package/dist/room/utils.js.map +0 -1
- package/dist/version.js +0 -6
- package/dist/version.js.map +0 -1
- package/jest.config.js +0 -6
- package/src/api/RequestQueue.ts +0 -53
- package/src/api/SignalClient.ts +0 -499
- package/src/connect.ts +0 -100
- package/src/index.ts +0 -47
- package/src/logger.ts +0 -22
- package/src/options.ts +0 -149
- package/src/proto/google/protobuf/timestamp.ts +0 -222
- package/src/proto/livekit_models.ts +0 -3019
- package/src/proto/livekit_rtc.ts +0 -3677
- package/src/room/DeviceManager.ts +0 -57
- package/src/room/PCTransport.ts +0 -86
- package/src/room/RTCEngine.ts +0 -652
- package/src/room/Room.ts +0 -943
- package/src/room/errors.ts +0 -65
- package/src/room/events.ts +0 -424
- package/src/room/participant/LocalParticipant.ts +0 -734
- package/src/room/participant/Participant.ts +0 -269
- package/src/room/participant/ParticipantTrackPermission.ts +0 -32
- package/src/room/participant/RemoteParticipant.ts +0 -243
- package/src/room/participant/publishUtils.test.ts +0 -145
- package/src/room/participant/publishUtils.ts +0 -225
- package/src/room/stats.ts +0 -130
- package/src/room/track/LocalAudioTrack.ts +0 -137
- package/src/room/track/LocalTrack.ts +0 -161
- package/src/room/track/LocalTrackPublication.ts +0 -66
- package/src/room/track/LocalVideoTrack.test.ts +0 -70
- package/src/room/track/LocalVideoTrack.ts +0 -293
- package/src/room/track/RemoteAudioTrack.ts +0 -58
- package/src/room/track/RemoteTrack.ts +0 -62
- package/src/room/track/RemoteTrackPublication.ts +0 -198
- package/src/room/track/RemoteVideoTrack.ts +0 -235
- package/src/room/track/Track.ts +0 -337
- package/src/room/track/TrackPublication.ts +0 -120
- package/src/room/track/create.ts +0 -121
- package/src/room/track/defaults.ts +0 -23
- package/src/room/track/options.ts +0 -281
- package/src/room/track/types.ts +0 -20
- package/src/room/track/utils.test.ts +0 -93
- package/src/room/track/utils.ts +0 -115
- package/src/room/utils.ts +0 -70
- package/src/version.ts +0 -2
- package/tsconfig.eslint.json +0 -11
@@ -1,225 +0,0 @@
|
|
1
|
-
import log from '../../logger';
|
2
|
-
import { TrackInvalidError } from '../errors';
|
3
|
-
import LocalAudioTrack from '../track/LocalAudioTrack';
|
4
|
-
import LocalVideoTrack from '../track/LocalVideoTrack';
|
5
|
-
import {
|
6
|
-
ScreenSharePresets, TrackPublishOptions,
|
7
|
-
VideoEncoding, VideoPreset, VideoPresets,
|
8
|
-
VideoPresets43,
|
9
|
-
} from '../track/options';
|
10
|
-
|
11
|
-
/** @internal */
|
12
|
-
export function mediaTrackToLocalTrack(
|
13
|
-
mediaStreamTrack: MediaStreamTrack,
|
14
|
-
constraints?: MediaTrackConstraints,
|
15
|
-
): LocalVideoTrack | LocalAudioTrack {
|
16
|
-
switch (mediaStreamTrack.kind) {
|
17
|
-
case 'audio':
|
18
|
-
return new LocalAudioTrack(mediaStreamTrack, constraints);
|
19
|
-
case 'video':
|
20
|
-
return new LocalVideoTrack(mediaStreamTrack, constraints);
|
21
|
-
default:
|
22
|
-
throw new TrackInvalidError(
|
23
|
-
`unsupported track type: ${mediaStreamTrack.kind}`,
|
24
|
-
);
|
25
|
-
}
|
26
|
-
}
|
27
|
-
|
28
|
-
/* @internal */
|
29
|
-
export const presets169 = Object.values(VideoPresets);
|
30
|
-
|
31
|
-
/* @internal */
|
32
|
-
export const presets43 = Object.values(VideoPresets43);
|
33
|
-
|
34
|
-
/* @internal */
|
35
|
-
export const presetsScreenShare = Object.values(ScreenSharePresets);
|
36
|
-
|
37
|
-
/* @internal */
|
38
|
-
export const defaultSimulcastPresets169 = [
|
39
|
-
VideoPresets.h180,
|
40
|
-
VideoPresets.h360,
|
41
|
-
];
|
42
|
-
|
43
|
-
/* @internal */
|
44
|
-
export const defaultSimulcastPresets43 = [
|
45
|
-
VideoPresets43.h180,
|
46
|
-
VideoPresets43.h360,
|
47
|
-
];
|
48
|
-
|
49
|
-
/* @internal */
|
50
|
-
export const computeDefaultScreenShareSimulcastPresets = (fromPreset: VideoPreset) => {
|
51
|
-
const layers = [{ scaleResolutionDownBy: 2, fps: 3 }];
|
52
|
-
return layers.map((t) => new VideoPreset(
|
53
|
-
Math.floor(fromPreset.width / t.scaleResolutionDownBy),
|
54
|
-
Math.floor(fromPreset.height / t.scaleResolutionDownBy),
|
55
|
-
Math.max(150_000, Math.floor(fromPreset.encoding.maxBitrate
|
56
|
-
/ (t.scaleResolutionDownBy ** 2 * ((fromPreset.encoding.maxFramerate ?? 30) / t.fps)))),
|
57
|
-
t.fps,
|
58
|
-
));
|
59
|
-
};
|
60
|
-
|
61
|
-
const videoRids = ['q', 'h', 'f'];
|
62
|
-
|
63
|
-
/* @internal */
|
64
|
-
export function computeVideoEncodings(
|
65
|
-
isScreenShare: boolean,
|
66
|
-
width?: number,
|
67
|
-
height?: number,
|
68
|
-
options?: TrackPublishOptions,
|
69
|
-
): RTCRtpEncodingParameters[] {
|
70
|
-
let videoEncoding: VideoEncoding | undefined = options?.videoEncoding;
|
71
|
-
if (isScreenShare) {
|
72
|
-
videoEncoding = options?.screenShareEncoding;
|
73
|
-
}
|
74
|
-
const useSimulcast = options?.simulcast;
|
75
|
-
|
76
|
-
if ((!videoEncoding && !useSimulcast) || !width || !height) {
|
77
|
-
// when we aren't simulcasting, will need to return a single encoding without
|
78
|
-
// capping bandwidth. we always require a encoding for dynacast
|
79
|
-
return [{}];
|
80
|
-
}
|
81
|
-
|
82
|
-
if (!videoEncoding) {
|
83
|
-
// find the right encoding based on width/height
|
84
|
-
videoEncoding = determineAppropriateEncoding(isScreenShare, width, height);
|
85
|
-
log.debug('using video encoding', videoEncoding);
|
86
|
-
}
|
87
|
-
|
88
|
-
if (!useSimulcast) {
|
89
|
-
return [videoEncoding];
|
90
|
-
}
|
91
|
-
const original = new VideoPreset(
|
92
|
-
width, height, videoEncoding.maxBitrate, videoEncoding.maxFramerate,
|
93
|
-
);
|
94
|
-
let presets: Array<VideoPreset> = [];
|
95
|
-
if (isScreenShare) {
|
96
|
-
presets = sortPresets(options?.screenShareSimulcastLayers)
|
97
|
-
?? defaultSimulcastLayers(isScreenShare, original);
|
98
|
-
} else {
|
99
|
-
presets = sortPresets(options?.videoSimulcastLayers)
|
100
|
-
?? defaultSimulcastLayers(isScreenShare, original);
|
101
|
-
}
|
102
|
-
let midPreset: VideoPreset | undefined;
|
103
|
-
const lowPreset = presets[0];
|
104
|
-
if (presets.length > 1) {
|
105
|
-
[, midPreset] = presets;
|
106
|
-
}
|
107
|
-
|
108
|
-
// NOTE:
|
109
|
-
// 1. Ordering of these encodings is important. Chrome seems
|
110
|
-
// to use the index into encodings to decide which layer
|
111
|
-
// to disable when CPU constrained.
|
112
|
-
// So encodings should be ordered in increasing spatial
|
113
|
-
// resolution order.
|
114
|
-
// 2. ion-sfu translates rids into layers. So, all encodings
|
115
|
-
// should have the base layer `q` and then more added
|
116
|
-
// based on other conditions.
|
117
|
-
const size = Math.max(width, height);
|
118
|
-
if (size >= 960 && midPreset) {
|
119
|
-
return encodingsFromPresets(width, height, [
|
120
|
-
lowPreset, midPreset, original,
|
121
|
-
]);
|
122
|
-
}
|
123
|
-
if (size >= 480) {
|
124
|
-
return encodingsFromPresets(width, height, [
|
125
|
-
lowPreset, original,
|
126
|
-
]);
|
127
|
-
}
|
128
|
-
return encodingsFromPresets(width, height, [
|
129
|
-
original,
|
130
|
-
]);
|
131
|
-
}
|
132
|
-
|
133
|
-
/* @internal */
|
134
|
-
export function determineAppropriateEncoding(
|
135
|
-
isScreenShare: boolean,
|
136
|
-
width: number,
|
137
|
-
height: number,
|
138
|
-
): VideoEncoding {
|
139
|
-
const presets = presetsForResolution(isScreenShare, width, height);
|
140
|
-
let { encoding } = presets[0];
|
141
|
-
|
142
|
-
// handle portrait by swapping dimensions
|
143
|
-
const size = Math.max(width, height);
|
144
|
-
|
145
|
-
for (let i = 0; i < presets.length; i += 1) {
|
146
|
-
const preset = presets[i];
|
147
|
-
encoding = preset.encoding;
|
148
|
-
if (preset.width >= size) {
|
149
|
-
break;
|
150
|
-
}
|
151
|
-
}
|
152
|
-
|
153
|
-
return encoding;
|
154
|
-
}
|
155
|
-
|
156
|
-
/* @internal */
|
157
|
-
export function presetsForResolution(
|
158
|
-
isScreenShare: boolean, width: number, height: number,
|
159
|
-
): VideoPreset[] {
|
160
|
-
if (isScreenShare) {
|
161
|
-
return presetsScreenShare;
|
162
|
-
}
|
163
|
-
const aspect = width > height ? width / height : height / width;
|
164
|
-
if (Math.abs(aspect - 16.0 / 9) < Math.abs(aspect - 4.0 / 3)) {
|
165
|
-
return presets169;
|
166
|
-
}
|
167
|
-
return presets43;
|
168
|
-
}
|
169
|
-
|
170
|
-
/* @internal */
|
171
|
-
export function defaultSimulcastLayers(
|
172
|
-
isScreenShare: boolean, original: VideoPreset,
|
173
|
-
): VideoPreset[] {
|
174
|
-
if (isScreenShare) {
|
175
|
-
return computeDefaultScreenShareSimulcastPresets(original);
|
176
|
-
}
|
177
|
-
const { width, height } = original;
|
178
|
-
const aspect = width > height ? width / height : height / width;
|
179
|
-
if (Math.abs(aspect - 16.0 / 9) < Math.abs(aspect - 4.0 / 3)) {
|
180
|
-
return defaultSimulcastPresets169;
|
181
|
-
}
|
182
|
-
return defaultSimulcastPresets43;
|
183
|
-
}
|
184
|
-
|
185
|
-
// presets should be ordered by low, medium, high
|
186
|
-
function encodingsFromPresets(
|
187
|
-
width: number,
|
188
|
-
height: number,
|
189
|
-
presets: VideoPreset[],
|
190
|
-
): RTCRtpEncodingParameters[] {
|
191
|
-
const encodings: RTCRtpEncodingParameters[] = [];
|
192
|
-
presets.forEach((preset, idx) => {
|
193
|
-
if (idx >= videoRids.length) {
|
194
|
-
return;
|
195
|
-
}
|
196
|
-
const size = Math.min(width, height);
|
197
|
-
const rid = videoRids[idx];
|
198
|
-
encodings.push({
|
199
|
-
rid,
|
200
|
-
scaleResolutionDownBy: size / Math.min(preset.width, preset.height),
|
201
|
-
maxBitrate: preset.encoding.maxBitrate,
|
202
|
-
/* @ts-ignore */
|
203
|
-
maxFramerate: preset.encoding.maxFramerate,
|
204
|
-
});
|
205
|
-
});
|
206
|
-
return encodings;
|
207
|
-
}
|
208
|
-
|
209
|
-
/** @internal */
|
210
|
-
export function sortPresets(presets: Array<VideoPreset> | undefined) {
|
211
|
-
if (!presets) return;
|
212
|
-
return presets.sort((a, b) => {
|
213
|
-
const { encoding: aEnc } = a;
|
214
|
-
const { encoding: bEnc } = b;
|
215
|
-
|
216
|
-
if (aEnc.maxBitrate > bEnc.maxBitrate) {
|
217
|
-
return 1;
|
218
|
-
}
|
219
|
-
if (aEnc.maxBitrate < bEnc.maxBitrate) return -1;
|
220
|
-
if (aEnc.maxBitrate === bEnc.maxBitrate && aEnc.maxFramerate && bEnc.maxFramerate) {
|
221
|
-
return aEnc.maxFramerate > bEnc.maxFramerate ? 1 : -1;
|
222
|
-
}
|
223
|
-
return 0;
|
224
|
-
});
|
225
|
-
}
|
package/src/room/stats.ts
DELETED
@@ -1,130 +0,0 @@
|
|
1
|
-
export const monitorFrequency = 2000;
|
2
|
-
|
3
|
-
// key stats for senders and receivers
|
4
|
-
interface SenderStats {
|
5
|
-
/** number of packets sent */
|
6
|
-
packetsSent?: number;
|
7
|
-
|
8
|
-
/** number of bytes sent */
|
9
|
-
bytesSent?: number;
|
10
|
-
|
11
|
-
/** jitter as perceived by remote */
|
12
|
-
jitter?: number;
|
13
|
-
|
14
|
-
/** packets reported lost by remote */
|
15
|
-
packetsLost?: number;
|
16
|
-
|
17
|
-
/** RTT reported by remote */
|
18
|
-
roundTripTime?: number;
|
19
|
-
|
20
|
-
/** ID of the outbound stream */
|
21
|
-
streamId?: string;
|
22
|
-
|
23
|
-
timestamp: number;
|
24
|
-
}
|
25
|
-
|
26
|
-
export interface AudioSenderStats extends SenderStats {
|
27
|
-
type: 'audio';
|
28
|
-
}
|
29
|
-
|
30
|
-
export interface VideoSenderStats extends SenderStats {
|
31
|
-
type: 'video';
|
32
|
-
|
33
|
-
firCount: number;
|
34
|
-
|
35
|
-
pliCount: number;
|
36
|
-
|
37
|
-
nackCount: number;
|
38
|
-
|
39
|
-
rid: string;
|
40
|
-
|
41
|
-
frameWidth: number;
|
42
|
-
|
43
|
-
frameHeight: number;
|
44
|
-
|
45
|
-
framesSent: number;
|
46
|
-
|
47
|
-
// bandwidth, cpu, other, none
|
48
|
-
qualityLimitationReason: string;
|
49
|
-
|
50
|
-
qualityLimitationResolutionChanges: number;
|
51
|
-
|
52
|
-
retransmittedPacketsSent: number;
|
53
|
-
}
|
54
|
-
|
55
|
-
interface ReceiverStats {
|
56
|
-
jitterBufferDelay?: number;
|
57
|
-
|
58
|
-
/** packets reported lost by remote */
|
59
|
-
packetsLost?: number;
|
60
|
-
|
61
|
-
/** number of packets sent */
|
62
|
-
packetsReceived?: number;
|
63
|
-
|
64
|
-
bytesReceived?: number;
|
65
|
-
|
66
|
-
streamId?: string;
|
67
|
-
|
68
|
-
jitter?: number;
|
69
|
-
|
70
|
-
timestamp: number;
|
71
|
-
}
|
72
|
-
|
73
|
-
export interface AudioReceiverStats extends ReceiverStats {
|
74
|
-
type: 'audio';
|
75
|
-
|
76
|
-
concealedSamples?: number;
|
77
|
-
|
78
|
-
concealmentEvents?: number;
|
79
|
-
|
80
|
-
silentConcealedSamples?: number;
|
81
|
-
|
82
|
-
silentConcealmentEvents?: number;
|
83
|
-
|
84
|
-
totalAudioEnergy?: number;
|
85
|
-
|
86
|
-
totalSamplesDuration?: number;
|
87
|
-
}
|
88
|
-
|
89
|
-
export interface VideoReceiverStats extends ReceiverStats {
|
90
|
-
type: 'video';
|
91
|
-
|
92
|
-
framesDecoded: number;
|
93
|
-
|
94
|
-
framesDropped: number;
|
95
|
-
|
96
|
-
framesReceived: number;
|
97
|
-
|
98
|
-
frameWidth?: number;
|
99
|
-
|
100
|
-
frameHeight?: number;
|
101
|
-
|
102
|
-
firCount?: number;
|
103
|
-
|
104
|
-
pliCount?: number;
|
105
|
-
|
106
|
-
nackCount?: number;
|
107
|
-
}
|
108
|
-
|
109
|
-
export function computeBitrate<T extends ReceiverStats | SenderStats>(
|
110
|
-
currentStats: T,
|
111
|
-
prevStats?: T,
|
112
|
-
): number {
|
113
|
-
if (!prevStats) {
|
114
|
-
return 0;
|
115
|
-
}
|
116
|
-
let bytesNow: number | undefined;
|
117
|
-
let bytesPrev: number | undefined;
|
118
|
-
if ('bytesReceived' in currentStats) {
|
119
|
-
bytesNow = (currentStats as ReceiverStats).bytesReceived;
|
120
|
-
bytesPrev = (prevStats as ReceiverStats).bytesReceived;
|
121
|
-
} else if ('bytesSent' in currentStats) {
|
122
|
-
bytesNow = (currentStats as SenderStats).bytesSent;
|
123
|
-
bytesPrev = (prevStats as SenderStats).bytesSent;
|
124
|
-
}
|
125
|
-
if (bytesNow === undefined || bytesPrev === undefined
|
126
|
-
|| currentStats.timestamp === undefined || prevStats.timestamp === undefined) {
|
127
|
-
return 0;
|
128
|
-
}
|
129
|
-
return ((bytesNow - bytesPrev) * 8 * 1000) / (currentStats.timestamp - prevStats.timestamp);
|
130
|
-
}
|
@@ -1,137 +0,0 @@
|
|
1
|
-
import log from '../../logger';
|
2
|
-
import { TrackEvent } from '../events';
|
3
|
-
import { AudioSenderStats, computeBitrate, monitorFrequency } from '../stats';
|
4
|
-
import LocalTrack from './LocalTrack';
|
5
|
-
import { AudioCaptureOptions } from './options';
|
6
|
-
import { Track } from './Track';
|
7
|
-
import { constraintsForOptions, detectSilence } from './utils';
|
8
|
-
|
9
|
-
export default class LocalAudioTrack extends LocalTrack {
|
10
|
-
sender?: RTCRtpSender;
|
11
|
-
|
12
|
-
/** @internal */
|
13
|
-
stopOnMute: boolean = false;
|
14
|
-
|
15
|
-
private prevStats?: AudioSenderStats;
|
16
|
-
|
17
|
-
constructor(
|
18
|
-
mediaTrack: MediaStreamTrack,
|
19
|
-
constraints?: MediaTrackConstraints,
|
20
|
-
) {
|
21
|
-
super(mediaTrack, Track.Kind.Audio, constraints);
|
22
|
-
this.checkForSilence();
|
23
|
-
}
|
24
|
-
|
25
|
-
async setDeviceId(deviceId: string) {
|
26
|
-
if (this.constraints.deviceId === deviceId) {
|
27
|
-
return;
|
28
|
-
}
|
29
|
-
this.constraints.deviceId = deviceId;
|
30
|
-
if (!this.isMuted) {
|
31
|
-
await this.restartTrack();
|
32
|
-
}
|
33
|
-
}
|
34
|
-
|
35
|
-
async mute(): Promise<LocalAudioTrack> {
|
36
|
-
// disabled special handling as it will cause BT headsets to switch communication modes
|
37
|
-
if (this.source === Track.Source.Microphone && this.stopOnMute) {
|
38
|
-
log.debug('stopping mic track');
|
39
|
-
// also stop the track, so that microphone indicator is turned off
|
40
|
-
this.mediaStreamTrack.stop();
|
41
|
-
}
|
42
|
-
await super.mute();
|
43
|
-
return this;
|
44
|
-
}
|
45
|
-
|
46
|
-
async unmute(): Promise<LocalAudioTrack> {
|
47
|
-
if (this.source === Track.Source.Microphone && this.stopOnMute) {
|
48
|
-
log.debug('reacquiring mic track');
|
49
|
-
await this.restartTrack();
|
50
|
-
}
|
51
|
-
await super.unmute();
|
52
|
-
return this;
|
53
|
-
}
|
54
|
-
|
55
|
-
async restartTrack(options?: AudioCaptureOptions) {
|
56
|
-
let constraints: MediaTrackConstraints | undefined;
|
57
|
-
if (options) {
|
58
|
-
const streamConstraints = constraintsForOptions({ audio: options });
|
59
|
-
if (typeof streamConstraints.audio !== 'boolean') {
|
60
|
-
constraints = streamConstraints.audio;
|
61
|
-
}
|
62
|
-
}
|
63
|
-
await this.restart(constraints);
|
64
|
-
}
|
65
|
-
|
66
|
-
protected async restart(constraints?: MediaTrackConstraints): Promise<LocalTrack> {
|
67
|
-
const track = await super.restart(constraints);
|
68
|
-
this.checkForSilence();
|
69
|
-
return track;
|
70
|
-
}
|
71
|
-
|
72
|
-
/* @internal */
|
73
|
-
startMonitor() {
|
74
|
-
setTimeout(() => {
|
75
|
-
this.monitorSender();
|
76
|
-
}, monitorFrequency);
|
77
|
-
}
|
78
|
-
|
79
|
-
private monitorSender = async () => {
|
80
|
-
if (!this.sender) {
|
81
|
-
this._currentBitrate = 0;
|
82
|
-
return;
|
83
|
-
}
|
84
|
-
|
85
|
-
let stats: AudioSenderStats | undefined;
|
86
|
-
try {
|
87
|
-
stats = await this.getSenderStats();
|
88
|
-
} catch (e) {
|
89
|
-
log.error('could not get audio sender stats', e);
|
90
|
-
return;
|
91
|
-
}
|
92
|
-
|
93
|
-
if (stats && this.prevStats) {
|
94
|
-
this._currentBitrate = computeBitrate(stats, this.prevStats);
|
95
|
-
}
|
96
|
-
|
97
|
-
this.prevStats = stats;
|
98
|
-
setTimeout(() => {
|
99
|
-
this.monitorSender();
|
100
|
-
}, monitorFrequency);
|
101
|
-
};
|
102
|
-
|
103
|
-
async getSenderStats(): Promise<AudioSenderStats | undefined> {
|
104
|
-
if (!this.sender) {
|
105
|
-
return undefined;
|
106
|
-
}
|
107
|
-
|
108
|
-
const stats = await this.sender.getStats();
|
109
|
-
let audioStats: AudioSenderStats | undefined;
|
110
|
-
stats.forEach((v) => {
|
111
|
-
if (v.type === 'outbound-rtp') {
|
112
|
-
audioStats = {
|
113
|
-
type: 'audio',
|
114
|
-
streamId: v.id,
|
115
|
-
packetsSent: v.packetsSent,
|
116
|
-
packetsLost: v.packetsLost,
|
117
|
-
bytesSent: v.bytesSent,
|
118
|
-
timestamp: v.timestamp,
|
119
|
-
roundTripTime: v.roundTripTime,
|
120
|
-
jitter: v.jitter,
|
121
|
-
};
|
122
|
-
}
|
123
|
-
});
|
124
|
-
|
125
|
-
return audioStats;
|
126
|
-
}
|
127
|
-
|
128
|
-
async checkForSilence() {
|
129
|
-
const trackIsSilent = await detectSilence(this);
|
130
|
-
if (trackIsSilent) {
|
131
|
-
if (!this.isMuted) {
|
132
|
-
log.warn('silence detected on local audio track');
|
133
|
-
}
|
134
|
-
this.emit(TrackEvent.AudioSilenceDetected);
|
135
|
-
}
|
136
|
-
}
|
137
|
-
}
|
@@ -1,161 +0,0 @@
|
|
1
|
-
import log from '../../logger';
|
2
|
-
import DeviceManager from '../DeviceManager';
|
3
|
-
import { TrackInvalidError } from '../errors';
|
4
|
-
import { TrackEvent } from '../events';
|
5
|
-
import { isMobile } from '../utils';
|
6
|
-
import { attachToElement, detachTrack, Track } from './Track';
|
7
|
-
|
8
|
-
export default class LocalTrack extends Track {
|
9
|
-
/** @internal */
|
10
|
-
sender?: RTCRtpSender;
|
11
|
-
|
12
|
-
protected constraints: MediaTrackConstraints;
|
13
|
-
|
14
|
-
protected wasMuted: boolean;
|
15
|
-
|
16
|
-
protected reacquireTrack: boolean;
|
17
|
-
|
18
|
-
protected constructor(
|
19
|
-
mediaTrack: MediaStreamTrack, kind: Track.Kind, constraints?: MediaTrackConstraints,
|
20
|
-
) {
|
21
|
-
super(mediaTrack, kind);
|
22
|
-
this.mediaStreamTrack.addEventListener('ended', this.handleEnded);
|
23
|
-
this.constraints = constraints ?? mediaTrack.getConstraints();
|
24
|
-
this.reacquireTrack = false;
|
25
|
-
this.wasMuted = false;
|
26
|
-
}
|
27
|
-
|
28
|
-
get id(): string {
|
29
|
-
return this.mediaStreamTrack.id;
|
30
|
-
}
|
31
|
-
|
32
|
-
get dimensions(): Track.Dimensions | undefined {
|
33
|
-
if (this.kind !== Track.Kind.Video) {
|
34
|
-
return undefined;
|
35
|
-
}
|
36
|
-
|
37
|
-
const { width, height } = this.mediaStreamTrack.getSettings();
|
38
|
-
if (width && height) {
|
39
|
-
return {
|
40
|
-
width,
|
41
|
-
height,
|
42
|
-
};
|
43
|
-
}
|
44
|
-
return undefined;
|
45
|
-
}
|
46
|
-
|
47
|
-
/**
|
48
|
-
* @returns DeviceID of the device that is currently being used for this track
|
49
|
-
*/
|
50
|
-
async getDeviceId(): Promise<string | undefined> {
|
51
|
-
// screen share doesn't have a usable device id
|
52
|
-
if (this.source === Track.Source.ScreenShare) {
|
53
|
-
return;
|
54
|
-
}
|
55
|
-
const { deviceId, groupId } = this.mediaStreamTrack.getSettings();
|
56
|
-
const kind = this.kind === Track.Kind.Audio ? 'audioinput' : 'videoinput';
|
57
|
-
|
58
|
-
return DeviceManager.getInstance().normalizeDeviceId(kind, deviceId, groupId);
|
59
|
-
}
|
60
|
-
|
61
|
-
async mute(): Promise<LocalTrack> {
|
62
|
-
this.setTrackMuted(true);
|
63
|
-
return this;
|
64
|
-
}
|
65
|
-
|
66
|
-
async unmute(): Promise<LocalTrack> {
|
67
|
-
this.setTrackMuted(false);
|
68
|
-
return this;
|
69
|
-
}
|
70
|
-
|
71
|
-
protected async restart(constraints?: MediaTrackConstraints): Promise<LocalTrack> {
|
72
|
-
if (!this.sender) {
|
73
|
-
throw new TrackInvalidError('unable to restart an unpublished track');
|
74
|
-
}
|
75
|
-
if (!constraints) {
|
76
|
-
constraints = this.constraints;
|
77
|
-
}
|
78
|
-
log.debug('restarting track with constraints', constraints);
|
79
|
-
|
80
|
-
const streamConstraints: MediaStreamConstraints = {
|
81
|
-
audio: false,
|
82
|
-
video: false,
|
83
|
-
};
|
84
|
-
|
85
|
-
if (this.kind === Track.Kind.Video) {
|
86
|
-
streamConstraints.video = constraints;
|
87
|
-
} else {
|
88
|
-
streamConstraints.audio = constraints;
|
89
|
-
}
|
90
|
-
|
91
|
-
// detach
|
92
|
-
this.attachedElements.forEach((el) => {
|
93
|
-
detachTrack(this.mediaStreamTrack, el);
|
94
|
-
});
|
95
|
-
this.mediaStreamTrack.removeEventListener('ended', this.handleEnded);
|
96
|
-
// on Safari, the old audio track must be stopped before attempting to acquire
|
97
|
-
// the new track, otherwise the new track will stop with
|
98
|
-
// 'A MediaStreamTrack ended due to a capture failure`
|
99
|
-
this.mediaStreamTrack.stop();
|
100
|
-
|
101
|
-
// create new track and attach
|
102
|
-
const mediaStream = await navigator.mediaDevices.getUserMedia(streamConstraints);
|
103
|
-
const newTrack = mediaStream.getTracks()[0];
|
104
|
-
newTrack.addEventListener('ended', this.handleEnded);
|
105
|
-
log.debug('re-acquired MediaStreamTrack');
|
106
|
-
|
107
|
-
await this.sender.replaceTrack(newTrack);
|
108
|
-
this.mediaStreamTrack = newTrack;
|
109
|
-
|
110
|
-
this.attachedElements.forEach((el) => {
|
111
|
-
attachToElement(newTrack, el);
|
112
|
-
});
|
113
|
-
|
114
|
-
this.mediaStream = mediaStream;
|
115
|
-
this.constraints = constraints;
|
116
|
-
return this;
|
117
|
-
}
|
118
|
-
|
119
|
-
protected setTrackMuted(muted: boolean) {
|
120
|
-
if (this.isMuted === muted) {
|
121
|
-
return;
|
122
|
-
}
|
123
|
-
|
124
|
-
this.isMuted = muted;
|
125
|
-
this.mediaStreamTrack.enabled = !muted;
|
126
|
-
this.emit(muted ? TrackEvent.Muted : TrackEvent.Unmuted, this);
|
127
|
-
}
|
128
|
-
|
129
|
-
protected get needsReAcquisition(): boolean {
|
130
|
-
return this.mediaStreamTrack.readyState !== 'live'
|
131
|
-
|| this.mediaStreamTrack.muted
|
132
|
-
|| !this.mediaStreamTrack.enabled
|
133
|
-
|| this.reacquireTrack;
|
134
|
-
}
|
135
|
-
|
136
|
-
protected async handleAppVisibilityChanged() {
|
137
|
-
await super.handleAppVisibilityChanged();
|
138
|
-
if (!isMobile()) return;
|
139
|
-
log.debug('visibility changed, is in Background: ', this.isInBackground);
|
140
|
-
|
141
|
-
if (!this.isInBackground && this.needsReAcquisition) {
|
142
|
-
log.debug('track needs to be reaquired, restarting', this.source);
|
143
|
-
await this.restart();
|
144
|
-
this.reacquireTrack = false;
|
145
|
-
// Restore muted state if had to be restarted
|
146
|
-
this.setTrackMuted(this.wasMuted);
|
147
|
-
}
|
148
|
-
|
149
|
-
// store muted state each time app goes to background
|
150
|
-
if (this.isInBackground) {
|
151
|
-
this.wasMuted = this.isMuted;
|
152
|
-
}
|
153
|
-
}
|
154
|
-
|
155
|
-
private handleEnded = () => {
|
156
|
-
if (this.isInBackground) {
|
157
|
-
this.reacquireTrack = true;
|
158
|
-
}
|
159
|
-
this.emit(TrackEvent.Ended, this);
|
160
|
-
};
|
161
|
-
}
|
@@ -1,66 +0,0 @@
|
|
1
|
-
import { TrackInfo } from '../../proto/livekit_models';
|
2
|
-
import { TrackEvent } from '../events';
|
3
|
-
import LocalAudioTrack from './LocalAudioTrack';
|
4
|
-
import LocalTrack from './LocalTrack';
|
5
|
-
import LocalVideoTrack from './LocalVideoTrack';
|
6
|
-
import { TrackPublishOptions } from './options';
|
7
|
-
import { Track } from './Track';
|
8
|
-
import { TrackPublication } from './TrackPublication';
|
9
|
-
|
10
|
-
export default class LocalTrackPublication extends TrackPublication {
|
11
|
-
track?: LocalTrack;
|
12
|
-
|
13
|
-
options?: TrackPublishOptions;
|
14
|
-
|
15
|
-
constructor(kind: Track.Kind, ti: TrackInfo, track?: LocalTrack) {
|
16
|
-
super(kind, ti.sid, ti.name);
|
17
|
-
|
18
|
-
this.updateInfo(ti);
|
19
|
-
this.setTrack(track);
|
20
|
-
}
|
21
|
-
|
22
|
-
setTrack(track?: Track) {
|
23
|
-
if (this.track) {
|
24
|
-
this.track.off(TrackEvent.Ended, this.handleTrackEnded);
|
25
|
-
}
|
26
|
-
|
27
|
-
super.setTrack(track);
|
28
|
-
|
29
|
-
if (track) {
|
30
|
-
track.on(TrackEvent.Ended, this.handleTrackEnded);
|
31
|
-
}
|
32
|
-
}
|
33
|
-
|
34
|
-
get isMuted(): boolean {
|
35
|
-
if (this.track) {
|
36
|
-
return this.track.isMuted;
|
37
|
-
}
|
38
|
-
return super.isMuted;
|
39
|
-
}
|
40
|
-
|
41
|
-
get audioTrack(): LocalAudioTrack | undefined {
|
42
|
-
return super.audioTrack as LocalAudioTrack | undefined;
|
43
|
-
}
|
44
|
-
|
45
|
-
get videoTrack(): LocalVideoTrack | undefined {
|
46
|
-
return super.videoTrack as LocalVideoTrack | undefined;
|
47
|
-
}
|
48
|
-
|
49
|
-
/**
|
50
|
-
* Mute the track associated with this publication
|
51
|
-
*/
|
52
|
-
async mute() {
|
53
|
-
return this.track?.mute();
|
54
|
-
}
|
55
|
-
|
56
|
-
/**
|
57
|
-
* Unmute track associated with this publication
|
58
|
-
*/
|
59
|
-
async unmute() {
|
60
|
-
return this.track?.unmute();
|
61
|
-
}
|
62
|
-
|
63
|
-
handleTrackEnded = (track: LocalTrack) => {
|
64
|
-
this.emit(TrackEvent.Ended, track);
|
65
|
-
};
|
66
|
-
}
|