livekit-client 1.15.0 → 1.15.2
Sign up to get free protection for your applications and to get access to all the features.
- package/dist/livekit-client.esm.mjs +5488 -5235
- package/dist/livekit-client.esm.mjs.map +1 -1
- package/dist/livekit-client.umd.js +1 -1
- package/dist/livekit-client.umd.js.map +1 -1
- package/dist/src/room/PCTransport.d.ts +9 -4
- package/dist/src/room/PCTransport.d.ts.map +1 -1
- package/dist/src/room/PCTransportManager.d.ts +51 -0
- package/dist/src/room/PCTransportManager.d.ts.map +1 -0
- package/dist/src/room/RTCEngine.d.ts +8 -5
- package/dist/src/room/RTCEngine.d.ts.map +1 -1
- package/dist/src/room/Room.d.ts +9 -0
- package/dist/src/room/Room.d.ts.map +1 -1
- package/dist/src/room/events.d.ts +10 -0
- package/dist/src/room/events.d.ts.map +1 -1
- package/dist/src/room/participant/LocalParticipant.d.ts +0 -5
- package/dist/src/room/participant/LocalParticipant.d.ts.map +1 -1
- package/dist/src/room/track/Track.d.ts +6 -2
- package/dist/src/room/track/Track.d.ts.map +1 -1
- package/dist/src/room/track/utils.d.ts +3 -0
- package/dist/src/room/track/utils.d.ts.map +1 -1
- package/dist/ts4.2/src/room/PCTransport.d.ts +9 -4
- package/dist/ts4.2/src/room/PCTransportManager.d.ts +51 -0
- package/dist/ts4.2/src/room/RTCEngine.d.ts +8 -5
- package/dist/ts4.2/src/room/Room.d.ts +9 -0
- package/dist/ts4.2/src/room/events.d.ts +10 -0
- package/dist/ts4.2/src/room/participant/LocalParticipant.d.ts +0 -5
- package/dist/ts4.2/src/room/track/Track.d.ts +6 -2
- package/dist/ts4.2/src/room/track/utils.d.ts +3 -0
- package/package.json +1 -1
- package/src/connectionHelper/checks/webrtc.ts +2 -2
- package/src/room/PCTransport.ts +62 -29
- package/src/room/PCTransportManager.ts +336 -0
- package/src/room/RTCEngine.ts +178 -246
- package/src/room/Room.ts +49 -46
- package/src/room/events.ts +11 -0
- package/src/room/participant/LocalParticipant.ts +15 -52
- package/src/room/track/Track.ts +30 -9
- package/src/room/track/utils.ts +19 -0
package/src/room/Room.ts
CHANGED
@@ -2,7 +2,6 @@ import { protoInt64 } from '@bufbuild/protobuf';
|
|
2
2
|
import { EventEmitter } from 'events';
|
3
3
|
import type TypedEmitter from 'typed-emitter';
|
4
4
|
import 'webrtc-adapter';
|
5
|
-
import { toProtoSessionDescription } from '../api/SignalClient';
|
6
5
|
import { EncryptionEvent } from '../e2ee';
|
7
6
|
import { E2EEManager } from '../e2ee/E2eeManager';
|
8
7
|
import log from '../logger';
|
@@ -35,8 +34,6 @@ import {
|
|
35
34
|
StreamStateUpdate,
|
36
35
|
SubscriptionPermissionUpdate,
|
37
36
|
SubscriptionResponse,
|
38
|
-
SyncState,
|
39
|
-
UpdateSubscription,
|
40
37
|
} from '../proto/livekit_rtc_pb';
|
41
38
|
import { getBrowser } from '../utils/browserParser';
|
42
39
|
import DeviceManager from './DeviceManager';
|
@@ -152,6 +149,8 @@ class Room extends (EventEmitter as new () => TypedEmitter<RoomEventCallbacks>)
|
|
152
149
|
|
153
150
|
private regionUrl?: string;
|
154
151
|
|
152
|
+
private isVideoPlaybackBlocked: boolean = false;
|
153
|
+
|
155
154
|
/**
|
156
155
|
* Creates a new Room, the primary construct for a LiveKit session.
|
157
156
|
* @param options
|
@@ -865,6 +864,22 @@ class Room extends (EventEmitter as new () => TypedEmitter<RoomEventCallbacks>)
|
|
865
864
|
}
|
866
865
|
};
|
867
866
|
|
867
|
+
startVideo = async () => {
|
868
|
+
for (const p of this.participants.values()) {
|
869
|
+
p.videoTracks.forEach((tr) => {
|
870
|
+
tr.track?.attachedElements.forEach((el) => {
|
871
|
+
el.play().catch((e) => {
|
872
|
+
if (e.name === 'NotAllowedError') {
|
873
|
+
log.warn(
|
874
|
+
'Resuming video playback failed, make sure you call `startVideo` directly in a user gesture handler',
|
875
|
+
);
|
876
|
+
}
|
877
|
+
});
|
878
|
+
});
|
879
|
+
});
|
880
|
+
}
|
881
|
+
};
|
882
|
+
|
868
883
|
/**
|
869
884
|
* Returns true if audio playback is enabled
|
870
885
|
*/
|
@@ -872,6 +887,13 @@ class Room extends (EventEmitter as new () => TypedEmitter<RoomEventCallbacks>)
|
|
872
887
|
return this.audioEnabled;
|
873
888
|
}
|
874
889
|
|
890
|
+
/**
|
891
|
+
* Returns true if video playback is enabled
|
892
|
+
*/
|
893
|
+
get canPlaybackVideo(): boolean {
|
894
|
+
return !this.isVideoPlaybackBlocked;
|
895
|
+
}
|
896
|
+
|
875
897
|
/**
|
876
898
|
* Returns the active audio output device used in this room.
|
877
899
|
* @return the previously successfully set audio output device ID or an empty string if the default device is used.
|
@@ -1384,6 +1406,20 @@ class Room extends (EventEmitter as new () => TypedEmitter<RoomEventCallbacks>)
|
|
1384
1406
|
this.emit(RoomEvent.AudioPlaybackStatusChanged, false);
|
1385
1407
|
};
|
1386
1408
|
|
1409
|
+
private handleVideoPlaybackStarted = () => {
|
1410
|
+
if (this.isVideoPlaybackBlocked) {
|
1411
|
+
this.isVideoPlaybackBlocked = false;
|
1412
|
+
this.emit(RoomEvent.VideoPlaybackStatusChanged, true);
|
1413
|
+
}
|
1414
|
+
};
|
1415
|
+
|
1416
|
+
private handleVideoPlaybackFailed = () => {
|
1417
|
+
if (!this.isVideoPlaybackBlocked) {
|
1418
|
+
this.isVideoPlaybackBlocked = true;
|
1419
|
+
this.emit(RoomEvent.VideoPlaybackStatusChanged, false);
|
1420
|
+
}
|
1421
|
+
};
|
1422
|
+
|
1387
1423
|
private handleDeviceChange = async () => {
|
1388
1424
|
this.emit(RoomEvent.MediaDevicesChanged);
|
1389
1425
|
};
|
@@ -1487,6 +1523,9 @@ class Room extends (EventEmitter as new () => TypedEmitter<RoomEventCallbacks>)
|
|
1487
1523
|
if (track.kind === Track.Kind.Audio) {
|
1488
1524
|
track.on(TrackEvent.AudioPlaybackStarted, this.handleAudioPlaybackStarted);
|
1489
1525
|
track.on(TrackEvent.AudioPlaybackFailed, this.handleAudioPlaybackFailed);
|
1526
|
+
} else if (track.kind === Track.Kind.Video) {
|
1527
|
+
track.on(TrackEvent.VideoPlaybackFailed, this.handleVideoPlaybackFailed);
|
1528
|
+
track.on(TrackEvent.VideoPlaybackStarted, this.handleVideoPlaybackStarted);
|
1490
1529
|
}
|
1491
1530
|
this.emit(RoomEvent.TrackSubscribed, track, publication, participant);
|
1492
1531
|
},
|
@@ -1551,49 +1590,12 @@ class Room extends (EventEmitter as new () => TypedEmitter<RoomEventCallbacks>)
|
|
1551
1590
|
}
|
1552
1591
|
|
1553
1592
|
private sendSyncState() {
|
1554
|
-
const
|
1555
|
-
|
1556
|
-
|
1557
|
-
|
1558
|
-
|
1559
|
-
|
1560
|
-
|
1561
|
-
/* 1. autosubscribe on, so subscribed tracks = all tracks - unsub tracks,
|
1562
|
-
in this case, we send unsub tracks, so server add all tracks to this
|
1563
|
-
subscribe pc and unsub special tracks from it.
|
1564
|
-
2. autosubscribe off, we send subscribed tracks.
|
1565
|
-
*/
|
1566
|
-
const autoSubscribe = this.connOptions?.autoSubscribe ?? true;
|
1567
|
-
const trackSids = new Array<string>();
|
1568
|
-
this.participants.forEach((participant) => {
|
1569
|
-
participant.tracks.forEach((track) => {
|
1570
|
-
if (track.isDesired !== autoSubscribe) {
|
1571
|
-
trackSids.push(track.trackSid);
|
1572
|
-
}
|
1573
|
-
});
|
1574
|
-
});
|
1575
|
-
|
1576
|
-
this.engine.client.sendSyncState(
|
1577
|
-
new SyncState({
|
1578
|
-
answer: toProtoSessionDescription({
|
1579
|
-
sdp: previousAnswer.sdp,
|
1580
|
-
type: previousAnswer.type,
|
1581
|
-
}),
|
1582
|
-
offer: previousOffer
|
1583
|
-
? toProtoSessionDescription({
|
1584
|
-
sdp: previousOffer.sdp,
|
1585
|
-
type: previousOffer.type,
|
1586
|
-
})
|
1587
|
-
: undefined,
|
1588
|
-
subscription: new UpdateSubscription({
|
1589
|
-
trackSids,
|
1590
|
-
subscribe: !autoSubscribe,
|
1591
|
-
participantTracks: [],
|
1592
|
-
}),
|
1593
|
-
publishTracks: this.localParticipant.publishedTracksInfo(),
|
1594
|
-
dataChannels: this.localParticipant.dataChannelsInfo(),
|
1595
|
-
}),
|
1596
|
-
);
|
1593
|
+
const remoteTracks = Array.from(this.participants.values()).reduce((acc, participant) => {
|
1594
|
+
acc.push(...(participant.getTracks() as RemoteTrackPublication[])); // FIXME would be nice to have this return RemoteTrackPublications directly instead of the type cast
|
1595
|
+
return acc;
|
1596
|
+
}, [] as RemoteTrackPublication[]);
|
1597
|
+
const localTracks = this.localParticipant.getTracks() as LocalTrackPublication[]; // FIXME would be nice to have this return LocalTrackPublications directly instead of the type cast
|
1598
|
+
this.engine.sendSyncState(remoteTracks, localTracks);
|
1597
1599
|
}
|
1598
1600
|
|
1599
1601
|
/**
|
@@ -1929,6 +1931,7 @@ export type RoomEventCallbacks = {
|
|
1929
1931
|
participant: RemoteParticipant,
|
1930
1932
|
) => void;
|
1931
1933
|
audioPlaybackChanged: (playing: boolean) => void;
|
1934
|
+
videoPlaybackChanged: (playing: boolean) => void;
|
1932
1935
|
signalConnected: () => void;
|
1933
1936
|
recordingStatusChanged: (recording: boolean) => void;
|
1934
1937
|
participantEncryptionStatusChanged: (encrypted: boolean, participant?: Participant) => void;
|
package/src/room/events.ts
CHANGED
@@ -245,6 +245,13 @@ export enum RoomEvent {
|
|
245
245
|
*/
|
246
246
|
AudioPlaybackStatusChanged = 'audioPlaybackChanged',
|
247
247
|
|
248
|
+
/**
|
249
|
+
* LiveKit will attempt to autoplay all video tracks when you attach them to
|
250
|
+
* a video element. However, if that fails, we'll notify you via VideoPlaybackStatusChanged.
|
251
|
+
* Calling `room.startVideo()` in a user gesture event handler will resume the video playback.
|
252
|
+
*/
|
253
|
+
VideoPlaybackStatusChanged = 'videoPlaybackChanged',
|
254
|
+
|
248
255
|
/**
|
249
256
|
* When we have encountered an error while attempting to create a track.
|
250
257
|
* The errors take place in getUserMedia().
|
@@ -510,6 +517,10 @@ export enum TrackEvent {
|
|
510
517
|
/** @internal */
|
511
518
|
VideoDimensionsChanged = 'videoDimensionsChanged',
|
512
519
|
/** @internal */
|
520
|
+
VideoPlaybackStarted = 'videoPlaybackStarted',
|
521
|
+
/** @internal */
|
522
|
+
VideoPlaybackFailed = 'videoPlaybackFailed',
|
523
|
+
/** @internal */
|
513
524
|
ElementAttached = 'elementAttached',
|
514
525
|
/** @internal */
|
515
526
|
ElementDetached = 'elementDetached',
|
@@ -10,13 +10,11 @@ import {
|
|
10
10
|
} from '../../proto/livekit_models_pb';
|
11
11
|
import {
|
12
12
|
AddTrackRequest,
|
13
|
-
DataChannelInfo,
|
14
|
-
SignalTarget,
|
15
13
|
SimulcastCodec,
|
16
14
|
SubscribedQualityUpdate,
|
17
|
-
TrackPublishedResponse,
|
18
15
|
TrackUnpublishedResponse,
|
19
16
|
} from '../../proto/livekit_rtc_pb';
|
17
|
+
import { PCTransportState } from '../PCTransportManager';
|
20
18
|
import type RTCEngine from '../RTCEngine';
|
21
19
|
import { defaultVideoCodec } from '../defaults';
|
22
20
|
import { DeviceUnsupportedError, TrackInvalidError, UnexpectedConnectionState } from '../errors';
|
@@ -695,7 +693,12 @@ export default class LocalParticipant extends Participant {
|
|
695
693
|
if (opts.backupCodec === true) {
|
696
694
|
opts.backupCodec = { codec: defaultVideoCodec };
|
697
695
|
}
|
698
|
-
if (
|
696
|
+
if (
|
697
|
+
opts.backupCodec &&
|
698
|
+
videoCodec !== opts.backupCodec.codec &&
|
699
|
+
// TODO remove this once e2ee is supported for backup codecs
|
700
|
+
req.encryption === Encryption_Type.NONE
|
701
|
+
) {
|
699
702
|
// multi-codec simulcast requires dynacast
|
700
703
|
if (!this.roomOptions.dynacast) {
|
701
704
|
this.roomOptions.dynacast = true;
|
@@ -766,8 +769,8 @@ export default class LocalParticipant extends Participant {
|
|
766
769
|
publication.options = opts;
|
767
770
|
track.sid = ti.sid;
|
768
771
|
|
769
|
-
if (!this.engine.
|
770
|
-
throw new UnexpectedConnectionState('
|
772
|
+
if (!this.engine.pcManager) {
|
773
|
+
throw new UnexpectedConnectionState('pcManager is not ready');
|
771
774
|
}
|
772
775
|
log.debug(`publishing ${track.kind} with encodings`, { encodings, trackInfo: ti });
|
773
776
|
|
@@ -783,21 +786,21 @@ export default class LocalParticipant extends Participant {
|
|
783
786
|
fix the issue.
|
784
787
|
*/
|
785
788
|
let trackTransceiver: RTCRtpTransceiver | undefined = undefined;
|
786
|
-
for (const transceiver of this.engine.publisher.getTransceivers()) {
|
789
|
+
for (const transceiver of this.engine.pcManager.publisher.getTransceivers()) {
|
787
790
|
if (transceiver.sender === track.sender) {
|
788
791
|
trackTransceiver = transceiver;
|
789
792
|
break;
|
790
793
|
}
|
791
794
|
}
|
792
795
|
if (trackTransceiver) {
|
793
|
-
this.engine.publisher.setTrackCodecBitrate({
|
796
|
+
this.engine.pcManager.publisher.setTrackCodecBitrate({
|
794
797
|
transceiver: trackTransceiver,
|
795
798
|
codec: 'opus',
|
796
799
|
maxbr: encodings[0]?.maxBitrate ? encodings[0].maxBitrate / 1000 : 0,
|
797
800
|
});
|
798
801
|
}
|
799
802
|
} else if (track.codec && isSVCCodec(track.codec) && encodings[0]?.maxBitrate) {
|
800
|
-
this.engine.publisher.setTrackCodecBitrate({
|
803
|
+
this.engine.pcManager.publisher.setTrackCodecBitrate({
|
801
804
|
cid: req.cid,
|
802
805
|
codec: track.codec,
|
803
806
|
maxbr: encodings[0].maxBitrate / 1000,
|
@@ -929,12 +932,12 @@ export default class LocalParticipant extends Participant {
|
|
929
932
|
const trackSender = track.sender;
|
930
933
|
track.sender = undefined;
|
931
934
|
if (
|
932
|
-
this.engine.
|
933
|
-
this.engine.
|
935
|
+
this.engine.pcManager &&
|
936
|
+
this.engine.pcManager.currentState < PCTransportState.FAILED &&
|
934
937
|
trackSender
|
935
938
|
) {
|
936
939
|
try {
|
937
|
-
for (const transceiver of this.engine.publisher.getTransceivers()) {
|
940
|
+
for (const transceiver of this.engine.pcManager.publisher.getTransceivers()) {
|
938
941
|
// if sender is not currently sending (after replaceTrack(null))
|
939
942
|
// removeTrack would have no effect.
|
940
943
|
// to ensure we end up successfully removing the track, manually set
|
@@ -1310,44 +1313,4 @@ export default class LocalParticipant extends Participant {
|
|
1310
1313
|
});
|
1311
1314
|
return publication;
|
1312
1315
|
}
|
1313
|
-
|
1314
|
-
/** @internal */
|
1315
|
-
publishedTracksInfo(): TrackPublishedResponse[] {
|
1316
|
-
const infos: TrackPublishedResponse[] = [];
|
1317
|
-
this.tracks.forEach((track: LocalTrackPublication) => {
|
1318
|
-
if (track.track !== undefined) {
|
1319
|
-
infos.push(
|
1320
|
-
new TrackPublishedResponse({
|
1321
|
-
cid: track.track.mediaStreamID,
|
1322
|
-
track: track.trackInfo,
|
1323
|
-
}),
|
1324
|
-
);
|
1325
|
-
}
|
1326
|
-
});
|
1327
|
-
return infos;
|
1328
|
-
}
|
1329
|
-
|
1330
|
-
/** @internal */
|
1331
|
-
dataChannelsInfo(): DataChannelInfo[] {
|
1332
|
-
const infos: DataChannelInfo[] = [];
|
1333
|
-
const getInfo = (dc: RTCDataChannel | undefined, target: SignalTarget) => {
|
1334
|
-
if (dc?.id !== undefined && dc.id !== null) {
|
1335
|
-
infos.push(
|
1336
|
-
new DataChannelInfo({
|
1337
|
-
label: dc.label,
|
1338
|
-
id: dc.id,
|
1339
|
-
target,
|
1340
|
-
}),
|
1341
|
-
);
|
1342
|
-
}
|
1343
|
-
};
|
1344
|
-
getInfo(this.engine.dataChannelForKind(DataPacket_Kind.LOSSY), SignalTarget.PUBLISHER);
|
1345
|
-
getInfo(this.engine.dataChannelForKind(DataPacket_Kind.RELIABLE), SignalTarget.PUBLISHER);
|
1346
|
-
getInfo(this.engine.dataChannelForKind(DataPacket_Kind.LOSSY, true), SignalTarget.SUBSCRIBER);
|
1347
|
-
getInfo(
|
1348
|
-
this.engine.dataChannelForKind(DataPacket_Kind.RELIABLE, true),
|
1349
|
-
SignalTarget.SUBSCRIBER,
|
1350
|
-
);
|
1351
|
-
return infos;
|
1352
|
-
}
|
1353
1316
|
}
|
package/src/room/track/Track.ts
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
import { EventEmitter } from 'events';
|
2
|
+
import { debounce } from 'ts-debounce';
|
2
3
|
import type TypedEventEmitter from 'typed-emitter';
|
3
4
|
import type { SignalClient } from '../../api/SignalClient';
|
4
|
-
import log from '../../logger';
|
5
5
|
import { TrackSource, TrackType } from '../../proto/livekit_models_pb';
|
6
6
|
import { StreamState as ProtoStreamState } from '../../proto/livekit_rtc_pb';
|
7
7
|
import { TrackEvent } from '../events';
|
@@ -113,6 +113,9 @@ export abstract class Track extends (EventEmitter as new () => TypedEventEmitter
|
|
113
113
|
|
114
114
|
if (!this.attachedElements.includes(element)) {
|
115
115
|
this.attachedElements.push(element);
|
116
|
+
// listen to suspend events in order to detect auto playback issues
|
117
|
+
element.addEventListener('suspend', this.handleElementSuspended);
|
118
|
+
element.addEventListener('playing', this.handleElementPlay);
|
116
119
|
}
|
117
120
|
|
118
121
|
// even if we believe it's already attached to the element, it's possible
|
@@ -130,11 +133,6 @@ export abstract class Track extends (EventEmitter as new () => TypedEventEmitter
|
|
130
133
|
this.emit(TrackEvent.AudioPlaybackStarted);
|
131
134
|
})
|
132
135
|
.catch((e) => {
|
133
|
-
if (e.name === 'NotAllowedError') {
|
134
|
-
this.emit(TrackEvent.AudioPlaybackFailed, e);
|
135
|
-
} else {
|
136
|
-
log.warn('could not playback audio', e);
|
137
|
-
}
|
138
136
|
// If audio playback isn't allowed make sure we still play back the video
|
139
137
|
if (
|
140
138
|
element &&
|
@@ -172,6 +170,8 @@ export abstract class Track extends (EventEmitter as new () => TypedEventEmitter
|
|
172
170
|
if (idx >= 0) {
|
173
171
|
this.attachedElements.splice(idx, 1);
|
174
172
|
this.recycleElement(element);
|
173
|
+
element.removeEventListener('suspend', this.handleElementSuspended);
|
174
|
+
element.removeEventListener('playing', this.handleElementPlay);
|
175
175
|
this.emit(TrackEvent.ElementDetached, element);
|
176
176
|
}
|
177
177
|
return element;
|
@@ -182,6 +182,8 @@ export abstract class Track extends (EventEmitter as new () => TypedEventEmitter
|
|
182
182
|
detachTrack(this.mediaStreamTrack, elm);
|
183
183
|
detached.push(elm);
|
184
184
|
this.recycleElement(elm);
|
185
|
+
elm.removeEventListener('suspend', this.handleElementSuspended);
|
186
|
+
elm.removeEventListener('playing', this.handleElementPlay);
|
185
187
|
this.emit(TrackEvent.ElementDetached, elm);
|
186
188
|
});
|
187
189
|
|
@@ -268,9 +270,26 @@ export abstract class Track extends (EventEmitter as new () => TypedEventEmitter
|
|
268
270
|
document.removeEventListener('visibilitychange', this.appVisibilityChangedListener);
|
269
271
|
}
|
270
272
|
}
|
273
|
+
|
274
|
+
private handleElementSuspended = () => {
|
275
|
+
this.debouncedPlaybackStateChange(false);
|
276
|
+
};
|
277
|
+
|
278
|
+
private handleElementPlay = () => {
|
279
|
+
this.debouncedPlaybackStateChange(true);
|
280
|
+
};
|
281
|
+
|
282
|
+
private debouncedPlaybackStateChange = debounce((allowed: boolean) => {
|
283
|
+
// we debounce this as Safari triggers both `playing` and `suspend` shortly after one another
|
284
|
+
// in order not to raise the wrong event, we debounce the call to make sure we only emit the correct status
|
285
|
+
if (this.kind === Track.Kind.Audio) {
|
286
|
+
this.emit(allowed ? TrackEvent.AudioPlaybackStarted : TrackEvent.AudioPlaybackFailed);
|
287
|
+
} else if (this.kind === Track.Kind.Video) {
|
288
|
+
this.emit(allowed ? TrackEvent.VideoPlaybackStarted : TrackEvent.VideoPlaybackFailed);
|
289
|
+
}
|
290
|
+
}, 300);
|
271
291
|
}
|
272
292
|
|
273
|
-
/** @internal */
|
274
293
|
export function attachToElement(track: MediaStreamTrack, element: HTMLMediaElement) {
|
275
294
|
let mediaStream: MediaStream;
|
276
295
|
if (element.srcObject instanceof MediaStream) {
|
@@ -321,7 +340,7 @@ export function attachToElement(track: MediaStreamTrack, element: HTMLMediaEleme
|
|
321
340
|
// when the window is backgrounded before the first frame is drawn
|
322
341
|
// manually calling play here seems to fix that
|
323
342
|
element.play().catch(() => {
|
324
|
-
|
343
|
+
/** do nothing, we watch the `suspended` event do deal with these failures */
|
325
344
|
});
|
326
345
|
}, 0);
|
327
346
|
}
|
@@ -446,10 +465,12 @@ export type TrackEventCallbacks = {
|
|
446
465
|
updateSettings: () => void;
|
447
466
|
updateSubscription: () => void;
|
448
467
|
audioPlaybackStarted: () => void;
|
449
|
-
audioPlaybackFailed: (error
|
468
|
+
audioPlaybackFailed: (error?: Error) => void;
|
450
469
|
audioSilenceDetected: () => void;
|
451
470
|
visibilityChanged: (visible: boolean, track?: any) => void;
|
452
471
|
videoDimensionsChanged: (dimensions: Track.Dimensions, track?: any) => void;
|
472
|
+
videoPlaybackStarted: () => void;
|
473
|
+
videoPlaybackFailed: (error?: Error) => void;
|
453
474
|
elementAttached: (element: HTMLMediaElement) => void;
|
454
475
|
elementDetached: (element: HTMLMediaElement) => void;
|
455
476
|
upstreamPaused: (track: any) => void;
|
package/src/room/track/utils.ts
CHANGED
@@ -1,6 +1,8 @@
|
|
1
|
+
import { TrackPublishedResponse } from '../../proto/livekit_rtc_pb';
|
1
2
|
import { cloneDeep } from '../../utils/cloneDeep';
|
2
3
|
import { isSafari, sleep } from '../utils';
|
3
4
|
import { Track } from './Track';
|
5
|
+
import type { TrackPublication } from './TrackPublication';
|
4
6
|
import {
|
5
7
|
type AudioCaptureOptions,
|
6
8
|
type CreateLocalTracksOptions,
|
@@ -190,3 +192,20 @@ export function mimeTypeToVideoCodecString(mimeType: string) {
|
|
190
192
|
}
|
191
193
|
return codec;
|
192
194
|
}
|
195
|
+
|
196
|
+
export function getTrackPublicationInfo<T extends TrackPublication>(
|
197
|
+
tracks: T[],
|
198
|
+
): TrackPublishedResponse[] {
|
199
|
+
const infos: TrackPublishedResponse[] = [];
|
200
|
+
tracks.forEach((track: TrackPublication) => {
|
201
|
+
if (track.track !== undefined) {
|
202
|
+
infos.push(
|
203
|
+
new TrackPublishedResponse({
|
204
|
+
cid: track.track.mediaStreamID,
|
205
|
+
track: track.trackInfo,
|
206
|
+
}),
|
207
|
+
);
|
208
|
+
}
|
209
|
+
});
|
210
|
+
return infos;
|
211
|
+
}
|