livekit-client 0.15.3 → 0.15.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api/SignalClient.d.ts +3 -1
- package/dist/api/SignalClient.js +59 -25
- package/dist/api/SignalClient.js.map +1 -1
- package/dist/options.d.ts +5 -0
- package/dist/proto/livekit_models.d.ts +30 -0
- package/dist/proto/livekit_models.js +219 -1
- package/dist/proto/livekit_models.js.map +1 -1
- package/dist/room/RTCEngine.d.ts +2 -0
- package/dist/room/RTCEngine.js +45 -2
- package/dist/room/RTCEngine.js.map +1 -1
- package/dist/room/Room.js +4 -0
- package/dist/room/Room.js.map +1 -1
- package/dist/room/participant/LocalParticipant.js +2 -1
- package/dist/room/participant/LocalParticipant.js.map +1 -1
- package/dist/room/participant/publishUtils.js +1 -1
- package/dist/room/participant/publishUtils.js.map +1 -1
- package/dist/room/participant/publishUtils.test.js +9 -0
- package/dist/room/participant/publishUtils.test.js.map +1 -1
- package/dist/room/track/RemoteTrackPublication.d.ts +1 -0
- package/dist/room/track/RemoteTrackPublication.js +15 -7
- package/dist/room/track/RemoteTrackPublication.js.map +1 -1
- package/dist/room/track/create.js +5 -0
- package/dist/room/track/create.js.map +1 -1
- package/dist/room/utils.d.ts +2 -0
- package/dist/room/utils.js +32 -1
- package/dist/room/utils.js.map +1 -1
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/package.json +4 -2
- package/src/api/SignalClient.ts +434 -0
- package/src/connect.ts +100 -0
- package/src/index.ts +47 -0
- package/src/logger.ts +22 -0
- package/src/options.ts +152 -0
- package/src/proto/livekit_models.ts +1863 -0
- package/src/proto/livekit_rtc.ts +3401 -0
- package/src/room/DeviceManager.ts +57 -0
- package/src/room/PCTransport.ts +86 -0
- package/src/room/RTCEngine.ts +484 -0
- package/src/room/Room.ts +785 -0
- package/src/room/errors.ts +65 -0
- package/src/room/events.ts +396 -0
- package/src/room/participant/LocalParticipant.ts +685 -0
- package/src/room/participant/Participant.ts +214 -0
- package/src/room/participant/ParticipantTrackPermission.ts +32 -0
- package/src/room/participant/RemoteParticipant.ts +238 -0
- package/src/room/participant/publishUtils.test.ts +105 -0
- package/src/room/participant/publishUtils.ts +180 -0
- package/src/room/stats.ts +130 -0
- package/src/room/track/LocalAudioTrack.ts +112 -0
- package/src/room/track/LocalTrack.ts +124 -0
- package/src/room/track/LocalTrackPublication.ts +63 -0
- package/src/room/track/LocalVideoTrack.test.ts +70 -0
- package/src/room/track/LocalVideoTrack.ts +416 -0
- package/src/room/track/RemoteAudioTrack.ts +58 -0
- package/src/room/track/RemoteTrack.ts +59 -0
- package/src/room/track/RemoteTrackPublication.ts +192 -0
- package/src/room/track/RemoteVideoTrack.ts +213 -0
- package/src/room/track/Track.ts +301 -0
- package/src/room/track/TrackPublication.ts +120 -0
- package/src/room/track/create.ts +120 -0
- package/src/room/track/defaults.ts +23 -0
- package/src/room/track/options.ts +229 -0
- package/src/room/track/types.ts +8 -0
- package/src/room/track/utils.test.ts +93 -0
- package/src/room/track/utils.ts +76 -0
- package/src/room/utils.ts +74 -0
- package/src/version.ts +2 -0
- package/.github/workflows/publish.yaml +0 -55
- package/.github/workflows/test.yaml +0 -36
- package/example/index.html +0 -247
- package/example/sample.ts +0 -632
- package/example/styles.css +0 -144
- package/example/webpack.config.js +0 -33
@@ -0,0 +1,416 @@
|
|
1
|
+
import { SignalClient } from '../../api/SignalClient';
|
2
|
+
import log from '../../logger';
|
3
|
+
import { VideoLayer, VideoQuality } from '../../proto/livekit_models';
|
4
|
+
import { SubscribedQuality } from '../../proto/livekit_rtc';
|
5
|
+
import { computeBitrate, monitorFrequency, VideoSenderStats } from '../stats';
|
6
|
+
import { isFireFox } from '../utils';
|
7
|
+
import LocalTrack from './LocalTrack';
|
8
|
+
import { VideoCaptureOptions } from './options';
|
9
|
+
import { Track } from './Track';
|
10
|
+
import { constraintsForOptions } from './utils';
|
11
|
+
|
12
|
+
// delay before attempting to upgrade
|
13
|
+
const QUALITY_UPGRADE_DELAY = 60 * 1000;
|
14
|
+
|
15
|
+
// avoid downgrading too quickly
|
16
|
+
const QUALITY_DOWNGRADE_DELAY = 5 * 1000;
|
17
|
+
|
18
|
+
const ridOrder = ['q', 'h', 'f'];
|
19
|
+
|
20
|
+
export default class LocalVideoTrack extends LocalTrack {
|
21
|
+
/* internal */
|
22
|
+
signalClient?: SignalClient;
|
23
|
+
|
24
|
+
private prevStats?: Map<string, VideoSenderStats>;
|
25
|
+
|
26
|
+
// last time it had a change in quality
|
27
|
+
private lastQualityChange?: number;
|
28
|
+
|
29
|
+
// last time we made an explicit change
|
30
|
+
private lastExplicitQualityChange?: number;
|
31
|
+
|
32
|
+
private encodings?: RTCRtpEncodingParameters[];
|
33
|
+
|
34
|
+
// layers that are being subscribed to, and that we should publish
|
35
|
+
private activeQualities?: SubscribedQuality[];
|
36
|
+
|
37
|
+
constructor(
|
38
|
+
mediaTrack: MediaStreamTrack,
|
39
|
+
constraints?: MediaTrackConstraints,
|
40
|
+
) {
|
41
|
+
super(mediaTrack, Track.Kind.Video, constraints);
|
42
|
+
}
|
43
|
+
|
44
|
+
get isSimulcast(): boolean {
|
45
|
+
if (this.sender && this.sender.getParameters().encodings.length > 1) {
|
46
|
+
return true;
|
47
|
+
}
|
48
|
+
return false;
|
49
|
+
}
|
50
|
+
|
51
|
+
/* @internal */
|
52
|
+
startMonitor(signalClient: SignalClient, disableLayerPause: boolean) {
|
53
|
+
this.signalClient = signalClient;
|
54
|
+
// save original encodings
|
55
|
+
const params = this.sender?.getParameters();
|
56
|
+
if (params) {
|
57
|
+
this.encodings = params.encodings;
|
58
|
+
}
|
59
|
+
|
60
|
+
setTimeout(() => {
|
61
|
+
this.monitorSender(disableLayerPause);
|
62
|
+
}, monitorFrequency);
|
63
|
+
}
|
64
|
+
|
65
|
+
stop() {
|
66
|
+
this.sender = undefined;
|
67
|
+
this.mediaStreamTrack.getConstraints();
|
68
|
+
super.stop();
|
69
|
+
}
|
70
|
+
|
71
|
+
async mute(): Promise<LocalVideoTrack> {
|
72
|
+
if (this.source === Track.Source.Camera) {
|
73
|
+
log.debug('stopping camera track');
|
74
|
+
// also stop the track, so that camera indicator is turned off
|
75
|
+
this.mediaStreamTrack.stop();
|
76
|
+
}
|
77
|
+
await super.mute();
|
78
|
+
return this;
|
79
|
+
}
|
80
|
+
|
81
|
+
async unmute(): Promise<LocalVideoTrack> {
|
82
|
+
if (this.source === Track.Source.Camera) {
|
83
|
+
log.debug('reacquiring camera track');
|
84
|
+
await this.restartTrack();
|
85
|
+
}
|
86
|
+
await super.unmute();
|
87
|
+
return this;
|
88
|
+
}
|
89
|
+
|
90
|
+
async getSenderStats(): Promise<VideoSenderStats[]> {
|
91
|
+
if (!this.sender) {
|
92
|
+
return [];
|
93
|
+
}
|
94
|
+
|
95
|
+
const items: VideoSenderStats[] = [];
|
96
|
+
|
97
|
+
const stats = await this.sender.getStats();
|
98
|
+
stats.forEach((v) => {
|
99
|
+
if (v.type === 'outbound-rtp') {
|
100
|
+
const vs: VideoSenderStats = {
|
101
|
+
type: 'video',
|
102
|
+
streamId: v.id,
|
103
|
+
frameHeight: v.frameHeight,
|
104
|
+
frameWidth: v.frameWidth,
|
105
|
+
firCount: v.firCount,
|
106
|
+
pliCount: v.pliCount,
|
107
|
+
nackCount: v.nackCount,
|
108
|
+
packetsSent: v.packetsSent,
|
109
|
+
bytesSent: v.bytesSent,
|
110
|
+
framesSent: v.framesSent,
|
111
|
+
timestamp: v.timestamp,
|
112
|
+
rid: v.rid ?? '',
|
113
|
+
retransmittedPacketsSent: v.retransmittedPacketsSent,
|
114
|
+
qualityLimitationReason: v.qualityLimitationReason,
|
115
|
+
qualityLimitationResolutionChanges:
|
116
|
+
v.qualityLimitationResolutionChanges,
|
117
|
+
};
|
118
|
+
|
119
|
+
// locate the appropriate remote-inbound-rtp item
|
120
|
+
const r = stats.get(v.remoteId);
|
121
|
+
if (r) {
|
122
|
+
vs.jitter = r.jitter;
|
123
|
+
vs.packetsLost = r.packetsLost;
|
124
|
+
vs.roundTripTime = r.roundTripTime;
|
125
|
+
}
|
126
|
+
|
127
|
+
items.push(vs);
|
128
|
+
}
|
129
|
+
});
|
130
|
+
|
131
|
+
return items;
|
132
|
+
}
|
133
|
+
|
134
|
+
setPublishingQuality(maxQuality: VideoQuality) {
|
135
|
+
const qualities: SubscribedQuality[] = [];
|
136
|
+
for (let q = VideoQuality.LOW; q <= VideoQuality.HIGH; q += 1) {
|
137
|
+
qualities.push({
|
138
|
+
quality: q,
|
139
|
+
enabled: q <= maxQuality,
|
140
|
+
});
|
141
|
+
}
|
142
|
+
log.debug('setting publishing quality. max quality', maxQuality);
|
143
|
+
this.setPublishingLayers(qualities);
|
144
|
+
}
|
145
|
+
|
146
|
+
async setDeviceId(deviceId: string) {
|
147
|
+
if (this.constraints.deviceId === deviceId) {
|
148
|
+
return;
|
149
|
+
}
|
150
|
+
this.constraints.deviceId = deviceId;
|
151
|
+
// when video is muted, underlying media stream track is stopped and
|
152
|
+
// will be restarted later
|
153
|
+
if (!this.isMuted) {
|
154
|
+
await this.restartTrack();
|
155
|
+
}
|
156
|
+
}
|
157
|
+
|
158
|
+
async restartTrack(options?: VideoCaptureOptions) {
|
159
|
+
let constraints: MediaTrackConstraints | undefined;
|
160
|
+
if (options) {
|
161
|
+
const streamConstraints = constraintsForOptions({ video: options });
|
162
|
+
if (typeof streamConstraints.video !== 'boolean') {
|
163
|
+
constraints = streamConstraints.video;
|
164
|
+
}
|
165
|
+
}
|
166
|
+
await this.restart(constraints);
|
167
|
+
}
|
168
|
+
|
169
|
+
/**
|
170
|
+
* @internal
|
171
|
+
* Sets layers that should be publishing
|
172
|
+
*/
|
173
|
+
async setPublishingLayers(qualities: SubscribedQuality[]) {
|
174
|
+
log.debug('setting publishing layers', qualities);
|
175
|
+
if (!this.sender || !this.encodings) {
|
176
|
+
return;
|
177
|
+
}
|
178
|
+
const params = this.sender.getParameters();
|
179
|
+
const { encodings } = params;
|
180
|
+
if (!encodings) {
|
181
|
+
return;
|
182
|
+
}
|
183
|
+
|
184
|
+
if (encodings.length !== this.encodings.length) {
|
185
|
+
log.warn('cannot set publishing layers, encodings mismatch');
|
186
|
+
return;
|
187
|
+
}
|
188
|
+
|
189
|
+
this.activeQualities = qualities;
|
190
|
+
let hasChanged = false;
|
191
|
+
encodings.forEach((encoding, idx) => {
|
192
|
+
let rid = encoding.rid ?? '';
|
193
|
+
if (rid === '') {
|
194
|
+
rid = 'q';
|
195
|
+
}
|
196
|
+
const quality = videoQualityForRid(rid);
|
197
|
+
const subscribedQuality = qualities.find((q) => q.quality === quality);
|
198
|
+
if (!subscribedQuality) {
|
199
|
+
return;
|
200
|
+
}
|
201
|
+
if (encoding.active !== subscribedQuality.enabled) {
|
202
|
+
hasChanged = true;
|
203
|
+
encoding.active = subscribedQuality.enabled;
|
204
|
+
log.debug(`setting layer ${subscribedQuality.quality} to ${encoding.active ? 'enabled' : 'disabled'}`);
|
205
|
+
|
206
|
+
// FireFox does not support setting encoding.active to false, so we
|
207
|
+
// have a workaround of lowering its bitrate and resolution to the min.
|
208
|
+
if (isFireFox()) {
|
209
|
+
if (subscribedQuality.enabled) {
|
210
|
+
encoding.scaleResolutionDownBy = this.encodings![idx].scaleResolutionDownBy;
|
211
|
+
encoding.maxBitrate = this.encodings![idx].maxBitrate;
|
212
|
+
/* @ts-ignore */
|
213
|
+
encoding.maxFrameRate = this.encodings![idx].maxFrameRate;
|
214
|
+
} else {
|
215
|
+
encoding.scaleResolutionDownBy = 4;
|
216
|
+
encoding.maxBitrate = 10;
|
217
|
+
/* @ts-ignore */
|
218
|
+
encoding.maxFrameRate = 2;
|
219
|
+
}
|
220
|
+
}
|
221
|
+
}
|
222
|
+
});
|
223
|
+
|
224
|
+
if (hasChanged) {
|
225
|
+
params.encodings = encodings;
|
226
|
+
await this.sender.setParameters(params);
|
227
|
+
}
|
228
|
+
}
|
229
|
+
|
230
|
+
private monitorSender = async (disableLayerPause: boolean) => {
|
231
|
+
if (!this.sender) {
|
232
|
+
this._currentBitrate = 0;
|
233
|
+
return;
|
234
|
+
}
|
235
|
+
const stats = await this.getSenderStats();
|
236
|
+
const statsMap = new Map<string, VideoSenderStats>(stats.map((s) => [s.rid, s]));
|
237
|
+
|
238
|
+
if (!disableLayerPause && this.prevStats && this.isSimulcast) {
|
239
|
+
this.checkAndUpdateSimulcast(statsMap);
|
240
|
+
}
|
241
|
+
|
242
|
+
if (this.prevStats) {
|
243
|
+
let totalBitrate = 0;
|
244
|
+
statsMap.forEach((s, key) => {
|
245
|
+
const prev = this.prevStats?.get(key);
|
246
|
+
totalBitrate += computeBitrate(s, prev);
|
247
|
+
});
|
248
|
+
this._currentBitrate = totalBitrate;
|
249
|
+
}
|
250
|
+
|
251
|
+
this.prevStats = statsMap;
|
252
|
+
setTimeout(() => {
|
253
|
+
this.monitorSender(disableLayerPause);
|
254
|
+
}, monitorFrequency);
|
255
|
+
};
|
256
|
+
|
257
|
+
private checkAndUpdateSimulcast(statsMap: Map<string, VideoSenderStats>) {
|
258
|
+
if (!this.sender || this.isMuted || !this.encodings) {
|
259
|
+
return;
|
260
|
+
}
|
261
|
+
|
262
|
+
let bestEncoding: RTCRtpEncodingParameters | undefined;
|
263
|
+
const { encodings } = this.sender.getParameters();
|
264
|
+
encodings.forEach((encoding) => {
|
265
|
+
// skip inactive encodings
|
266
|
+
if (!encoding.active) return;
|
267
|
+
|
268
|
+
if (bestEncoding === undefined) {
|
269
|
+
bestEncoding = encoding;
|
270
|
+
} else if (
|
271
|
+
bestEncoding.rid
|
272
|
+
&& encoding.rid
|
273
|
+
&& ridOrder.indexOf(bestEncoding.rid) < ridOrder.indexOf(encoding.rid)
|
274
|
+
) {
|
275
|
+
bestEncoding = encoding;
|
276
|
+
} else if (
|
277
|
+
bestEncoding.maxBitrate !== undefined
|
278
|
+
&& encoding.maxBitrate !== undefined
|
279
|
+
&& bestEncoding.maxBitrate < encoding.maxBitrate
|
280
|
+
) {
|
281
|
+
bestEncoding = encoding;
|
282
|
+
}
|
283
|
+
});
|
284
|
+
|
285
|
+
if (!bestEncoding) {
|
286
|
+
return;
|
287
|
+
}
|
288
|
+
const rid: string = bestEncoding.rid ?? '';
|
289
|
+
const sendStats = statsMap.get(rid);
|
290
|
+
const lastStats = this.prevStats?.get(rid);
|
291
|
+
if (!sendStats || !lastStats) {
|
292
|
+
return;
|
293
|
+
}
|
294
|
+
const currentQuality = videoQualityForRid(rid);
|
295
|
+
|
296
|
+
// adaptive simulcast algorithm notes (davidzhao)
|
297
|
+
// Chrome (and other browsers) will automatically pause the highest layer
|
298
|
+
// when it runs into bandwidth limitations. When that happens, it would not
|
299
|
+
// be able to send any new frames between the two stats checks.
|
300
|
+
//
|
301
|
+
// We need to set that layer to inactive intentionally, because chrome tends
|
302
|
+
// to flicker, meaning it will attempt to send that layer again shortly
|
303
|
+
// afterwards, flip-flopping every few seconds. We want to avoid that.
|
304
|
+
//
|
305
|
+
// Note: even after bandwidth recovers, the flip-flopping behavior continues
|
306
|
+
// this is possibly due to SFU-side PLI generation and imperfect bandwidth estimation
|
307
|
+
if (sendStats.qualityLimitationResolutionChanges
|
308
|
+
- lastStats.qualityLimitationResolutionChanges > 0) {
|
309
|
+
this.lastQualityChange = new Date().getTime();
|
310
|
+
}
|
311
|
+
|
312
|
+
// log.debug('frameSent', sendStats.framesSent, 'lastSent', lastStats.framesSent,
|
313
|
+
// 'elapsed', sendStats.timestamp - lastStats.timestamp);
|
314
|
+
if (sendStats.framesSent - lastStats.framesSent > 0) {
|
315
|
+
// frames have been sending ok, consider upgrading quality
|
316
|
+
if (currentQuality === VideoQuality.HIGH || !this.lastQualityChange) return;
|
317
|
+
|
318
|
+
const nextQuality = currentQuality + 1;
|
319
|
+
if ((new Date()).getTime() - this.lastQualityChange < QUALITY_UPGRADE_DELAY) {
|
320
|
+
return;
|
321
|
+
}
|
322
|
+
|
323
|
+
if (this.activeQualities
|
324
|
+
&& this.activeQualities.some((q) => q.quality === nextQuality && !q.enabled)
|
325
|
+
) {
|
326
|
+
// quality has been disabled by the server, so we should skip
|
327
|
+
return;
|
328
|
+
}
|
329
|
+
|
330
|
+
// we are already at the highest layer
|
331
|
+
let bestQuality = VideoQuality.LOW;
|
332
|
+
encodings.forEach((encoding) => {
|
333
|
+
const quality = videoQualityForRid(encoding.rid ?? '');
|
334
|
+
if (quality > bestQuality) {
|
335
|
+
bestQuality = quality;
|
336
|
+
}
|
337
|
+
});
|
338
|
+
if (nextQuality > bestQuality) {
|
339
|
+
return;
|
340
|
+
}
|
341
|
+
|
342
|
+
log.debug('upgrading video quality to', nextQuality);
|
343
|
+
this.setPublishingQuality(nextQuality);
|
344
|
+
return;
|
345
|
+
}
|
346
|
+
|
347
|
+
// if best layer has not sent anything, do not downgrade till the
|
348
|
+
// best layer starts sending something. It is possible that the
|
349
|
+
// browser has not started some layer(s) due to cpu/bandwidth
|
350
|
+
// constraints
|
351
|
+
if (sendStats.framesSent === 0) return;
|
352
|
+
|
353
|
+
// if we've upgraded or downgraded recently, give it a bit of time before
|
354
|
+
// downgrading again
|
355
|
+
if (this.lastExplicitQualityChange
|
356
|
+
&& ((new Date()).getTime() - this.lastExplicitQualityChange) < QUALITY_DOWNGRADE_DELAY) {
|
357
|
+
return;
|
358
|
+
}
|
359
|
+
|
360
|
+
if (currentQuality === VideoQuality.UNRECOGNIZED) {
|
361
|
+
return;
|
362
|
+
}
|
363
|
+
|
364
|
+
if (currentQuality === VideoQuality.LOW) {
|
365
|
+
// already the lowest quality, nothing we can do
|
366
|
+
return;
|
367
|
+
}
|
368
|
+
|
369
|
+
log.debug('downgrading video quality to', currentQuality - 1);
|
370
|
+
this.setPublishingQuality(currentQuality - 1);
|
371
|
+
}
|
372
|
+
}
|
373
|
+
|
374
|
+
export function videoQualityForRid(rid: string): VideoQuality {
|
375
|
+
switch (rid) {
|
376
|
+
case 'f':
|
377
|
+
return VideoQuality.HIGH;
|
378
|
+
case 'h':
|
379
|
+
return VideoQuality.MEDIUM;
|
380
|
+
case 'q':
|
381
|
+
return VideoQuality.LOW;
|
382
|
+
default:
|
383
|
+
return VideoQuality.UNRECOGNIZED;
|
384
|
+
}
|
385
|
+
}
|
386
|
+
|
387
|
+
export function videoLayersFromEncodings(
|
388
|
+
width: number,
|
389
|
+
height: number,
|
390
|
+
encodings?: RTCRtpEncodingParameters[],
|
391
|
+
): VideoLayer[] {
|
392
|
+
// default to a single layer, HQ
|
393
|
+
if (!encodings) {
|
394
|
+
return [{
|
395
|
+
quality: VideoQuality.HIGH,
|
396
|
+
width,
|
397
|
+
height,
|
398
|
+
bitrate: 0,
|
399
|
+
ssrc: 0,
|
400
|
+
}];
|
401
|
+
}
|
402
|
+
return encodings.map((encoding) => {
|
403
|
+
const scale = encoding.scaleResolutionDownBy ?? 1;
|
404
|
+
let quality = videoQualityForRid(encoding.rid ?? '');
|
405
|
+
if (quality === VideoQuality.UNRECOGNIZED && encodings.length === 1) {
|
406
|
+
quality = VideoQuality.HIGH;
|
407
|
+
}
|
408
|
+
return {
|
409
|
+
quality,
|
410
|
+
width: width / scale,
|
411
|
+
height: height / scale,
|
412
|
+
bitrate: encoding.maxBitrate ?? 0,
|
413
|
+
ssrc: 0,
|
414
|
+
};
|
415
|
+
});
|
416
|
+
}
|
@@ -0,0 +1,58 @@
|
|
1
|
+
import { AudioReceiverStats, computeBitrate, monitorFrequency } from '../stats';
|
2
|
+
import RemoteTrack from './RemoteTrack';
|
3
|
+
import { Track } from './Track';
|
4
|
+
|
5
|
+
export default class RemoteAudioTrack extends RemoteTrack {
|
6
|
+
private prevStats?: AudioReceiverStats;
|
7
|
+
|
8
|
+
constructor(
|
9
|
+
mediaTrack: MediaStreamTrack,
|
10
|
+
sid: string,
|
11
|
+
receiver?: RTCRtpReceiver,
|
12
|
+
) {
|
13
|
+
super(mediaTrack, sid, Track.Kind.Audio, receiver);
|
14
|
+
}
|
15
|
+
|
16
|
+
protected monitorReceiver = async () => {
|
17
|
+
if (!this.receiver) {
|
18
|
+
this._currentBitrate = 0;
|
19
|
+
return;
|
20
|
+
}
|
21
|
+
const stats = await this.getReceiverStats();
|
22
|
+
|
23
|
+
if (stats && this.prevStats && this.receiver) {
|
24
|
+
this._currentBitrate = computeBitrate(stats, this.prevStats);
|
25
|
+
}
|
26
|
+
|
27
|
+
this.prevStats = stats;
|
28
|
+
setTimeout(() => {
|
29
|
+
this.monitorReceiver();
|
30
|
+
}, monitorFrequency);
|
31
|
+
};
|
32
|
+
|
33
|
+
protected async getReceiverStats(): Promise<AudioReceiverStats | undefined> {
|
34
|
+
if (!this.receiver) {
|
35
|
+
return;
|
36
|
+
}
|
37
|
+
|
38
|
+
const stats = await this.receiver.getStats();
|
39
|
+
let receiverStats: AudioReceiverStats | undefined;
|
40
|
+
stats.forEach((v) => {
|
41
|
+
if (v.type === 'inbound-rtp') {
|
42
|
+
receiverStats = {
|
43
|
+
type: 'audio',
|
44
|
+
timestamp: v.timestamp,
|
45
|
+
jitter: v.jitter,
|
46
|
+
bytesReceived: v.bytesReceived,
|
47
|
+
concealedSamples: v.concealedSamples,
|
48
|
+
concealmentEvents: v.concealmentEvents,
|
49
|
+
silentConcealedSamples: v.silentConcealedSamples,
|
50
|
+
silentConcealmentEvents: v.silentConcealmentEvents,
|
51
|
+
totalAudioEnergy: v.totalAudioEnergy,
|
52
|
+
totalSamplesDuration: v.totalSamplesDuration,
|
53
|
+
};
|
54
|
+
}
|
55
|
+
});
|
56
|
+
return receiverStats;
|
57
|
+
}
|
58
|
+
}
|
@@ -0,0 +1,59 @@
|
|
1
|
+
import { TrackEvent } from '../events';
|
2
|
+
import { monitorFrequency } from '../stats';
|
3
|
+
import { Track } from './Track';
|
4
|
+
|
5
|
+
export default abstract class RemoteTrack extends Track {
|
6
|
+
/** @internal */
|
7
|
+
receiver?: RTCRtpReceiver;
|
8
|
+
|
9
|
+
constructor(
|
10
|
+
mediaTrack: MediaStreamTrack,
|
11
|
+
sid: string,
|
12
|
+
kind: Track.Kind,
|
13
|
+
receiver?: RTCRtpReceiver,
|
14
|
+
) {
|
15
|
+
super(mediaTrack, kind);
|
16
|
+
this.sid = sid;
|
17
|
+
this.receiver = receiver;
|
18
|
+
}
|
19
|
+
|
20
|
+
/** @internal */
|
21
|
+
setMuted(muted: boolean) {
|
22
|
+
if (this.isMuted !== muted) {
|
23
|
+
this.isMuted = muted;
|
24
|
+
this.emit(muted ? TrackEvent.Muted : TrackEvent.Unmuted, this);
|
25
|
+
}
|
26
|
+
}
|
27
|
+
|
28
|
+
/** @internal */
|
29
|
+
setMediaStream(stream: MediaStream) {
|
30
|
+
// this is needed to determine when the track is finished
|
31
|
+
// we send each track down in its own MediaStream, so we can assume the
|
32
|
+
// current track is the only one that can be removed.
|
33
|
+
stream.onremovetrack = () => {
|
34
|
+
this.receiver = undefined;
|
35
|
+
this._currentBitrate = 0;
|
36
|
+
this.emit(TrackEvent.Ended, this);
|
37
|
+
};
|
38
|
+
}
|
39
|
+
|
40
|
+
start() {
|
41
|
+
this.startMonitor();
|
42
|
+
// use `enabled` of track to enable re-use of transceiver
|
43
|
+
super.enable();
|
44
|
+
}
|
45
|
+
|
46
|
+
stop() {
|
47
|
+
// use `enabled` of track to enable re-use of transceiver
|
48
|
+
super.disable();
|
49
|
+
}
|
50
|
+
|
51
|
+
/* @internal */
|
52
|
+
startMonitor() {
|
53
|
+
setTimeout(() => {
|
54
|
+
this.monitorReceiver();
|
55
|
+
}, monitorFrequency);
|
56
|
+
}
|
57
|
+
|
58
|
+
protected abstract monitorReceiver(): void;
|
59
|
+
}
|