livekit-client 2.1.4 → 2.2.0
Sign up to get free protection for your applications and to get access to all the features.
- package/dist/livekit-client.esm.mjs +152 -64
- package/dist/livekit-client.esm.mjs.map +1 -1
- package/dist/livekit-client.umd.js +1 -1
- package/dist/livekit-client.umd.js.map +1 -1
- package/dist/src/index.d.ts +2 -2
- package/dist/src/index.d.ts.map +1 -1
- package/dist/src/room/RTCEngine.d.ts +2 -2
- package/dist/src/room/RTCEngine.d.ts.map +1 -1
- package/dist/src/room/Room.d.ts +4 -1
- package/dist/src/room/Room.d.ts.map +1 -1
- package/dist/src/room/events.d.ts +12 -1
- package/dist/src/room/events.d.ts.map +1 -1
- package/dist/src/room/participant/LocalParticipant.d.ts.map +1 -1
- package/dist/src/room/participant/Participant.d.ts +6 -3
- package/dist/src/room/participant/Participant.d.ts.map +1 -1
- package/dist/src/room/participant/RemoteParticipant.d.ts +3 -3
- package/dist/src/room/participant/RemoteParticipant.d.ts.map +1 -1
- package/dist/src/room/participant/publishUtils.d.ts.map +1 -1
- package/dist/src/room/track/LocalTrack.d.ts +1 -1
- package/dist/src/room/track/LocalTrack.d.ts.map +1 -1
- package/dist/src/room/track/LocalVideoTrack.d.ts +2 -1
- package/dist/src/room/track/LocalVideoTrack.d.ts.map +1 -1
- package/dist/src/room/track/create.d.ts.map +1 -1
- package/dist/src/room/track/options.d.ts +9 -0
- package/dist/src/room/track/options.d.ts.map +1 -1
- package/dist/ts4.2/src/index.d.ts +2 -2
- package/dist/ts4.2/src/room/RTCEngine.d.ts +2 -2
- package/dist/ts4.2/src/room/Room.d.ts +4 -1
- package/dist/ts4.2/src/room/events.d.ts +12 -1
- package/dist/ts4.2/src/room/participant/Participant.d.ts +7 -3
- package/dist/ts4.2/src/room/participant/RemoteParticipant.d.ts +3 -3
- package/dist/ts4.2/src/room/track/LocalTrack.d.ts +1 -1
- package/dist/ts4.2/src/room/track/LocalVideoTrack.d.ts +2 -1
- package/dist/ts4.2/src/room/track/options.d.ts +9 -0
- package/package.json +1 -1
- package/src/index.ts +2 -1
- package/src/room/RTCEngine.ts +23 -6
- package/src/room/Room.ts +39 -10
- package/src/room/events.ts +14 -1
- package/src/room/participant/LocalParticipant.ts +36 -25
- package/src/room/participant/Participant.ts +14 -1
- package/src/room/participant/RemoteParticipant.ts +17 -4
- package/src/room/participant/publishUtils.ts +4 -0
- package/src/room/track/LocalTrack.ts +13 -9
- package/src/room/track/LocalVideoTrack.ts +9 -2
- package/src/room/track/create.ts +37 -27
- package/src/room/track/options.ts +15 -0
@@ -5,7 +5,7 @@ import { Track } from '../track/Track';
|
|
5
5
|
import type { AudioOutputOptions } from '../track/options';
|
6
6
|
import type { AdaptiveStreamSettings } from '../track/types';
|
7
7
|
import type { LoggerOptions } from '../types';
|
8
|
-
import Participant from './Participant';
|
8
|
+
import Participant, { ParticipantKind } from './Participant';
|
9
9
|
import type { ParticipantEventCallbacks } from './Participant';
|
10
10
|
export default class RemoteParticipant extends Participant {
|
11
11
|
audioTrackPublications: Map<string, RemoteTrackPublication>;
|
@@ -15,13 +15,13 @@ export default class RemoteParticipant extends Participant {
|
|
15
15
|
private volumeMap;
|
16
16
|
private audioOutput?;
|
17
17
|
/** @internal */
|
18
|
-
static fromParticipantInfo(signalClient: SignalClient, pi: ParticipantInfo): RemoteParticipant;
|
18
|
+
static fromParticipantInfo(signalClient: SignalClient, pi: ParticipantInfo, loggerOptions: LoggerOptions): RemoteParticipant;
|
19
19
|
protected get logContext(): {
|
20
20
|
rpID: string;
|
21
21
|
remoteParticipant: string;
|
22
22
|
};
|
23
23
|
/** @internal */
|
24
|
-
constructor(signalClient: SignalClient, sid: string, identity?: string, name?: string, metadata?: string, loggerOptions?: LoggerOptions);
|
24
|
+
constructor(signalClient: SignalClient, sid: string, identity?: string, name?: string, metadata?: string, loggerOptions?: LoggerOptions, kind?: ParticipantKind);
|
25
25
|
protected addTrackPublication(publication: RemoteTrackPublication): void;
|
26
26
|
getTrackPublication(source: Track.Source): RemoteTrackPublication | undefined;
|
27
27
|
getTrackPublicationByName(name: string): RemoteTrackPublication | undefined;
|
@@ -91,7 +91,7 @@ export default abstract class LocalTrack<TrackKind extends Track.Kind = Track.Ki
|
|
91
91
|
* @experimental
|
92
92
|
* @returns
|
93
93
|
*/
|
94
|
-
stopProcessor(): Promise<void>;
|
94
|
+
stopProcessor(keepElement?: boolean): Promise<void>;
|
95
95
|
protected abstract monitorSender(): void;
|
96
96
|
}
|
97
97
|
//# sourceMappingURL=LocalTrack.d.ts.map
|
@@ -21,6 +21,7 @@ export default class LocalVideoTrack extends LocalTrack<Track.Kind.Video> {
|
|
21
21
|
private subscribedCodecs?;
|
22
22
|
private senderLock;
|
23
23
|
private degradationPreference;
|
24
|
+
get sender(): RTCRtpSender | undefined;
|
24
25
|
set sender(sender: RTCRtpSender | undefined);
|
25
26
|
/**
|
26
27
|
*
|
@@ -41,7 +42,7 @@ export default class LocalVideoTrack extends LocalTrack<Track.Kind.Video> {
|
|
41
42
|
setPublishingQuality(maxQuality: VideoQuality): void;
|
42
43
|
setDeviceId(deviceId: ConstrainDOMString): Promise<boolean>;
|
43
44
|
restartTrack(options?: VideoCaptureOptions): Promise<void>;
|
44
|
-
setProcessor(processor: TrackProcessor<Track.Kind>, showProcessedStreamLocally?: boolean): Promise<void>;
|
45
|
+
setProcessor(processor: TrackProcessor<Track.Kind.Video>, showProcessedStreamLocally?: boolean): Promise<void>;
|
45
46
|
setDegradationPreference(preference: RTCDegradationPreference): Promise<void>;
|
46
47
|
addSimulcastTrack(codec: VideoCodec, encodings?: RTCRtpEncodingParameters[]): SimulcastTrackInfo | undefined;
|
47
48
|
setSimulcastTrackSender(codec: VideoCodec, sender: RTCRtpSender): void;
|
@@ -1,4 +1,5 @@
|
|
1
1
|
import type { Track } from './Track';
|
2
|
+
import type { AudioProcessorOptions, TrackProcessor, VideoProcessorOptions } from './processor/types';
|
2
3
|
export interface TrackPublishDefaults {
|
3
4
|
/**
|
4
5
|
* encoding parameters for camera track
|
@@ -133,6 +134,10 @@ export interface VideoCaptureOptions {
|
|
133
134
|
*/
|
134
135
|
facingMode?: 'user' | 'environment' | 'left' | 'right';
|
135
136
|
resolution?: VideoResolution;
|
137
|
+
/**
|
138
|
+
* initialize the track with a given processor
|
139
|
+
*/
|
140
|
+
processor?: TrackProcessor<Track.Kind.Video, VideoProcessorOptions>;
|
136
141
|
}
|
137
142
|
export interface ScreenShareCaptureOptions {
|
138
143
|
/**
|
@@ -210,6 +215,10 @@ export interface AudioCaptureOptions {
|
|
210
215
|
* sample size or range of sample sizes which are acceptable and/or required.
|
211
216
|
*/
|
212
217
|
sampleSize?: ConstrainULong;
|
218
|
+
/**
|
219
|
+
* initialize the track with a given processor
|
220
|
+
*/
|
221
|
+
processor?: TrackProcessor<Track.Kind.Audio, AudioProcessorOptions>;
|
213
222
|
}
|
214
223
|
export interface AudioOutputOptions {
|
215
224
|
/**
|
package/package.json
CHANGED
package/src/index.ts
CHANGED
@@ -3,7 +3,7 @@ import { LogLevel, LoggerNames, getLogger, setLogExtension, setLogLevel } from '
|
|
3
3
|
import DefaultReconnectPolicy from './room/DefaultReconnectPolicy';
|
4
4
|
import Room, { ConnectionState } from './room/Room';
|
5
5
|
import LocalParticipant from './room/participant/LocalParticipant';
|
6
|
-
import Participant, { ConnectionQuality } from './room/participant/Participant';
|
6
|
+
import Participant, { ConnectionQuality, ParticipantKind } from './room/participant/Participant';
|
7
7
|
import type { ParticipantTrackPermission } from './room/participant/ParticipantTrackPermission';
|
8
8
|
import RemoteParticipant from './room/participant/RemoteParticipant';
|
9
9
|
import CriticalTimers from './room/timers';
|
@@ -63,6 +63,7 @@ export {
|
|
63
63
|
Participant,
|
64
64
|
RemoteAudioTrack,
|
65
65
|
RemoteParticipant,
|
66
|
+
ParticipantKind,
|
66
67
|
RemoteTrack,
|
67
68
|
RemoteTrackPublication,
|
68
69
|
RemoteVideoTrack,
|
package/src/room/RTCEngine.ts
CHANGED
@@ -26,7 +26,7 @@ import {
|
|
26
26
|
TrackUnpublishedResponse,
|
27
27
|
Transcription,
|
28
28
|
UpdateSubscription,
|
29
|
-
UserPacket,
|
29
|
+
type UserPacket,
|
30
30
|
} from '@livekit/protocol';
|
31
31
|
import { EventEmitter } from 'events';
|
32
32
|
import type { MediaAttributes } from 'sdp-transform';
|
@@ -648,10 +648,12 @@ export default class RTCEngine extends (EventEmitter as new () => TypedEventEmit
|
|
648
648
|
if (dp.value?.case === 'speaker') {
|
649
649
|
// dispatch speaker updates
|
650
650
|
this.emit(EngineEvent.ActiveSpeakersUpdate, dp.value.value.speakers);
|
651
|
-
} else
|
652
|
-
|
653
|
-
|
654
|
-
|
651
|
+
} else {
|
652
|
+
if (dp.value?.case === 'user') {
|
653
|
+
// compatibility
|
654
|
+
applyUserDataCompat(dp, dp.value.value);
|
655
|
+
}
|
656
|
+
this.emit(EngineEvent.DataPacketReceived, dp);
|
655
657
|
}
|
656
658
|
} finally {
|
657
659
|
unlock();
|
@@ -1392,7 +1394,7 @@ export type EngineEventCallbacks = {
|
|
1392
1394
|
receiver?: RTCRtpReceiver,
|
1393
1395
|
) => void;
|
1394
1396
|
activeSpeakersUpdate: (speakers: Array<SpeakerInfo>) => void;
|
1395
|
-
dataPacketReceived: (
|
1397
|
+
dataPacketReceived: (packet: DataPacket) => void;
|
1396
1398
|
transcriptionReceived: (transcription: Transcription) => void;
|
1397
1399
|
transportsCreated: (publisher: PCTransport, subscriber: PCTransport) => void;
|
1398
1400
|
/** @internal */
|
@@ -1415,3 +1417,18 @@ export type EngineEventCallbacks = {
|
|
1415
1417
|
function supportOptionalDatachannel(protocol: number | undefined): boolean {
|
1416
1418
|
return protocol !== undefined && protocol > 13;
|
1417
1419
|
}
|
1420
|
+
|
1421
|
+
function applyUserDataCompat(newObj: DataPacket, oldObj: UserPacket) {
|
1422
|
+
const participantIdentity = newObj.participantIdentity
|
1423
|
+
? newObj.participantIdentity
|
1424
|
+
: oldObj.participantIdentity;
|
1425
|
+
newObj.participantIdentity = participantIdentity;
|
1426
|
+
oldObj.participantIdentity = participantIdentity;
|
1427
|
+
|
1428
|
+
const destinationIdentities =
|
1429
|
+
newObj.destinationIdentities.length !== 0
|
1430
|
+
? newObj.destinationIdentities
|
1431
|
+
: oldObj.destinationIdentities;
|
1432
|
+
newObj.destinationIdentities = destinationIdentities;
|
1433
|
+
oldObj.destinationIdentities = destinationIdentities;
|
1434
|
+
}
|
package/src/room/Room.ts
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
import {
|
2
2
|
ConnectionQualityUpdate,
|
3
|
+
type DataPacket,
|
3
4
|
DataPacket_Kind,
|
4
5
|
DisconnectReason,
|
5
6
|
JoinResponse,
|
@@ -11,6 +12,7 @@ import {
|
|
11
12
|
Room as RoomModel,
|
12
13
|
ServerInfo,
|
13
14
|
SimulateScenario,
|
15
|
+
SipDTMF,
|
14
16
|
SpeakerInfo,
|
15
17
|
StreamStateUpdate,
|
16
18
|
SubscriptionError,
|
@@ -334,7 +336,6 @@ class Room extends (EventEmitter as new () => TypedEmitter<RoomEventCallbacks>)
|
|
334
336
|
})
|
335
337
|
.on(EngineEvent.ActiveSpeakersUpdate, this.handleActiveSpeakersUpdate)
|
336
338
|
.on(EngineEvent.DataPacketReceived, this.handleDataPacket)
|
337
|
-
.on(EngineEvent.TranscriptionReceived, this.handleTranscription)
|
338
339
|
.on(EngineEvent.Resuming, () => {
|
339
340
|
this.clearConnectionReconcile();
|
340
341
|
this.isResuming = true;
|
@@ -1085,11 +1086,12 @@ class Room extends (EventEmitter as new () => TypedEmitter<RoomEventCallbacks>)
|
|
1085
1086
|
if (this.options.webAudioMix) {
|
1086
1087
|
// @ts-expect-error setSinkId is not yet in the typescript type of AudioContext
|
1087
1088
|
this.audioContext?.setSinkId(deviceId);
|
1088
|
-
} else {
|
1089
|
-
await Promise.all(
|
1090
|
-
Array.from(this.remoteParticipants.values()).map((p) => p.setAudioOutput({ deviceId })),
|
1091
|
-
);
|
1092
1089
|
}
|
1090
|
+
// also set audio output on all audio elements, even if webAudioMix is enabled in order to workaround echo cancellation not working on chrome with non-default output devices
|
1091
|
+
// see https://issues.chromium.org/issues/40252911#comment7
|
1092
|
+
await Promise.all(
|
1093
|
+
Array.from(this.remoteParticipants.values()).map((p) => p.setAudioOutput({ deviceId })),
|
1094
|
+
);
|
1093
1095
|
} catch (e) {
|
1094
1096
|
this.options.audioOutput.deviceId = prevDeviceId;
|
1095
1097
|
throw e;
|
@@ -1472,24 +1474,47 @@ class Room extends (EventEmitter as new () => TypedEmitter<RoomEventCallbacks>)
|
|
1472
1474
|
pub.setSubscriptionError(update.err);
|
1473
1475
|
};
|
1474
1476
|
|
1475
|
-
private handleDataPacket = (
|
1477
|
+
private handleDataPacket = (packet: DataPacket) => {
|
1476
1478
|
// find the participant
|
1477
|
-
const participant = this.remoteParticipants.get(
|
1479
|
+
const participant = this.remoteParticipants.get(packet.participantIdentity);
|
1480
|
+
if (packet.value.case === 'user') {
|
1481
|
+
this.handleUserPacket(participant, packet.value.value, packet.kind);
|
1482
|
+
} else if (packet.value.case === 'transcription') {
|
1483
|
+
this.handleTranscription(participant, packet.value.value);
|
1484
|
+
} else if (packet.value.case === 'sipDtmf') {
|
1485
|
+
this.handleSipDtmf(participant, packet.value.value);
|
1486
|
+
}
|
1487
|
+
};
|
1478
1488
|
|
1489
|
+
private handleUserPacket = (
|
1490
|
+
participant: RemoteParticipant | undefined,
|
1491
|
+
userPacket: UserPacket,
|
1492
|
+
kind: DataPacket_Kind,
|
1493
|
+
) => {
|
1479
1494
|
this.emit(RoomEvent.DataReceived, userPacket.payload, participant, kind, userPacket.topic);
|
1480
1495
|
|
1481
1496
|
// also emit on the participant
|
1482
1497
|
participant?.emit(ParticipantEvent.DataReceived, userPacket.payload, kind);
|
1483
1498
|
};
|
1484
1499
|
|
1500
|
+
private handleSipDtmf = (participant: RemoteParticipant | undefined, dtmf: SipDTMF) => {
|
1501
|
+
this.emit(RoomEvent.SipDTMFReceived, dtmf, participant);
|
1502
|
+
|
1503
|
+
// also emit on the participant
|
1504
|
+
participant?.emit(ParticipantEvent.SipDTMFReceived, dtmf);
|
1505
|
+
};
|
1506
|
+
|
1485
1507
|
bufferedSegments: Map<string, TranscriptionSegmentModel> = new Map();
|
1486
1508
|
|
1487
|
-
private handleTranscription = (
|
1509
|
+
private handleTranscription = (
|
1510
|
+
remoteParticipant: RemoteParticipant | undefined,
|
1511
|
+
transcription: TranscriptionModel,
|
1512
|
+
) => {
|
1488
1513
|
// find the participant
|
1489
1514
|
const participant =
|
1490
1515
|
transcription.participantIdentity === this.localParticipant.identity
|
1491
1516
|
? this.localParticipant
|
1492
|
-
:
|
1517
|
+
: remoteParticipant;
|
1493
1518
|
const publication = participant?.trackPublications.get(transcription.trackId);
|
1494
1519
|
|
1495
1520
|
const segments = extractTranscriptionSegments(transcription);
|
@@ -1596,7 +1621,10 @@ class Room extends (EventEmitter as new () => TypedEmitter<RoomEventCallbacks>)
|
|
1596
1621
|
private createParticipant(identity: string, info?: ParticipantInfo): RemoteParticipant {
|
1597
1622
|
let participant: RemoteParticipant;
|
1598
1623
|
if (info) {
|
1599
|
-
participant = RemoteParticipant.fromParticipantInfo(this.engine.client, info
|
1624
|
+
participant = RemoteParticipant.fromParticipantInfo(this.engine.client, info, {
|
1625
|
+
loggerContextCb: () => this.logContext,
|
1626
|
+
loggerName: this.options.loggerName,
|
1627
|
+
});
|
1600
1628
|
} else {
|
1601
1629
|
participant = new RemoteParticipant(this.engine.client, '', identity, undefined, undefined, {
|
1602
1630
|
loggerContextCb: () => this.logContext,
|
@@ -2099,6 +2127,7 @@ export type RoomEventCallbacks = {
|
|
2099
2127
|
kind?: DataPacket_Kind,
|
2100
2128
|
topic?: string,
|
2101
2129
|
) => void;
|
2130
|
+
sipDTMFReceived: (dtmf: SipDTMF, participant?: RemoteParticipant) => void;
|
2102
2131
|
transcriptionReceived: (
|
2103
2132
|
transcription: TranscriptionSegment[],
|
2104
2133
|
participant?: Participant,
|
package/src/room/events.ts
CHANGED
@@ -197,6 +197,13 @@ export enum RoomEvent {
|
|
197
197
|
*/
|
198
198
|
DataReceived = 'dataReceived',
|
199
199
|
|
200
|
+
/**
|
201
|
+
* SIP DTMF tones received from another participant.
|
202
|
+
*
|
203
|
+
* args: (participant: [[Participant]], dtmf: [[DataPacket_Kind]])
|
204
|
+
*/
|
205
|
+
SipDTMFReceived = 'sipDTMFReceived',
|
206
|
+
|
200
207
|
/**
|
201
208
|
* Transcription received from a participant's track.
|
202
209
|
* @beta
|
@@ -408,6 +415,13 @@ export enum ParticipantEvent {
|
|
408
415
|
*/
|
409
416
|
DataReceived = 'dataReceived',
|
410
417
|
|
418
|
+
/**
|
419
|
+
* SIP DTMF tones received from this participant as sender.
|
420
|
+
*
|
421
|
+
* args: (dtmf: [[DataPacket_Kind]])
|
422
|
+
*/
|
423
|
+
SipDTMFReceived = 'sipDTMFReceived',
|
424
|
+
|
411
425
|
/**
|
412
426
|
* Transcription received from this participant as data source.
|
413
427
|
* @beta
|
@@ -491,7 +505,6 @@ export enum EngineEvent {
|
|
491
505
|
MediaTrackAdded = 'mediaTrackAdded',
|
492
506
|
ActiveSpeakersUpdate = 'activeSpeakersUpdate',
|
493
507
|
DataPacketReceived = 'dataPacketReceived',
|
494
|
-
TranscriptionReceived = 'transcriptionReceived',
|
495
508
|
RTPVideoMapUpdate = 'rtpVideoMapUpdate',
|
496
509
|
DCBufferStatusChanged = 'dcBufferStatusChanged',
|
497
510
|
ParticipantUpdate = 'participantUpdate',
|
@@ -30,6 +30,7 @@ import type {
|
|
30
30
|
VideoCaptureOptions,
|
31
31
|
} from '../track/options';
|
32
32
|
import { ScreenSharePresets, VideoPresets, isBackupCodec } from '../track/options';
|
33
|
+
import type { TrackProcessor } from '../track/processor/types';
|
33
34
|
import {
|
34
35
|
constraintsForOptions,
|
35
36
|
getLogContextFromTrack,
|
@@ -394,13 +395,13 @@ export default class LocalParticipant extends Participant {
|
|
394
395
|
* @returns
|
395
396
|
*/
|
396
397
|
async createTracks(options?: CreateLocalTracksOptions): Promise<LocalTrack[]> {
|
397
|
-
const
|
398
|
+
const mergedOptions = mergeDefaultOptions(
|
398
399
|
options,
|
399
400
|
this.roomOptions?.audioCaptureDefaults,
|
400
401
|
this.roomOptions?.videoCaptureDefaults,
|
401
402
|
);
|
402
403
|
|
403
|
-
const constraints = constraintsForOptions(
|
404
|
+
const constraints = constraintsForOptions(mergedOptions);
|
404
405
|
let stream: MediaStream | undefined;
|
405
406
|
try {
|
406
407
|
stream = await navigator.mediaDevices.getUserMedia(constraints);
|
@@ -425,29 +426,39 @@ export default class LocalParticipant extends Participant {
|
|
425
426
|
this.cameraError = undefined;
|
426
427
|
}
|
427
428
|
|
428
|
-
return
|
429
|
-
|
430
|
-
|
431
|
-
|
432
|
-
trackOptions
|
433
|
-
|
434
|
-
|
435
|
-
|
436
|
-
|
437
|
-
|
438
|
-
|
439
|
-
|
440
|
-
|
441
|
-
|
442
|
-
|
443
|
-
|
444
|
-
track.
|
445
|
-
|
446
|
-
track.
|
447
|
-
|
448
|
-
|
449
|
-
|
450
|
-
|
429
|
+
return Promise.all(
|
430
|
+
stream.getTracks().map(async (mediaStreamTrack) => {
|
431
|
+
const isAudio = mediaStreamTrack.kind === 'audio';
|
432
|
+
let trackOptions = isAudio ? mergedOptions!.audio : mergedOptions!.video;
|
433
|
+
if (typeof trackOptions === 'boolean' || !trackOptions) {
|
434
|
+
trackOptions = {};
|
435
|
+
}
|
436
|
+
let trackConstraints: MediaTrackConstraints | undefined;
|
437
|
+
const conOrBool = isAudio ? constraints.audio : constraints.video;
|
438
|
+
if (typeof conOrBool !== 'boolean') {
|
439
|
+
trackConstraints = conOrBool;
|
440
|
+
}
|
441
|
+
const track = mediaTrackToLocalTrack(mediaStreamTrack, trackConstraints, {
|
442
|
+
loggerName: this.roomOptions.loggerName,
|
443
|
+
loggerContextCb: () => this.logContext,
|
444
|
+
});
|
445
|
+
if (track.kind === Track.Kind.Video) {
|
446
|
+
track.source = Track.Source.Camera;
|
447
|
+
} else if (track.kind === Track.Kind.Audio) {
|
448
|
+
track.source = Track.Source.Microphone;
|
449
|
+
track.setAudioContext(this.audioContext);
|
450
|
+
}
|
451
|
+
track.mediaStream = stream;
|
452
|
+
if (trackOptions.processor) {
|
453
|
+
if (track instanceof LocalAudioTrack) {
|
454
|
+
await track.setProcessor(trackOptions.processor as TrackProcessor<Track.Kind.Audio>);
|
455
|
+
} else {
|
456
|
+
await track.setProcessor(trackOptions.processor as TrackProcessor<Track.Kind.Video>);
|
457
|
+
}
|
458
|
+
}
|
459
|
+
return track;
|
460
|
+
}),
|
461
|
+
);
|
451
462
|
}
|
452
463
|
|
453
464
|
/**
|
@@ -1,8 +1,10 @@
|
|
1
1
|
import {
|
2
2
|
DataPacket_Kind,
|
3
3
|
ParticipantInfo,
|
4
|
+
ParticipantInfo_Kind as ParticipantKind,
|
4
5
|
ParticipantPermission,
|
5
6
|
ConnectionQuality as ProtoQuality,
|
7
|
+
type SipDTMF,
|
6
8
|
SubscriptionError,
|
7
9
|
} from '@livekit/protocol';
|
8
10
|
import { EventEmitter } from 'events';
|
@@ -45,6 +47,8 @@ function qualityFromProto(q: ProtoQuality): ConnectionQuality {
|
|
45
47
|
}
|
46
48
|
}
|
47
49
|
|
50
|
+
export { ParticipantKind };
|
51
|
+
|
48
52
|
export default class Participant extends (EventEmitter as new () => TypedEmitter<ParticipantEventCallbacks>) {
|
49
53
|
protected participantInfo?: ParticipantInfo;
|
50
54
|
|
@@ -77,6 +81,8 @@ export default class Participant extends (EventEmitter as new () => TypedEmitter
|
|
77
81
|
|
78
82
|
permissions?: ParticipantPermission;
|
79
83
|
|
84
|
+
protected _kind: ParticipantKind;
|
85
|
+
|
80
86
|
private _connectionQuality: ConnectionQuality = ConnectionQuality.Unknown;
|
81
87
|
|
82
88
|
protected audioContext?: AudioContext;
|
@@ -99,7 +105,11 @@ export default class Participant extends (EventEmitter as new () => TypedEmitter
|
|
99
105
|
}
|
100
106
|
|
101
107
|
get isAgent() {
|
102
|
-
return this.permissions?.agent
|
108
|
+
return this.permissions?.agent || this.kind === ParticipantKind.AGENT;
|
109
|
+
}
|
110
|
+
|
111
|
+
get kind() {
|
112
|
+
return this._kind;
|
103
113
|
}
|
104
114
|
|
105
115
|
/** @internal */
|
@@ -109,6 +119,7 @@ export default class Participant extends (EventEmitter as new () => TypedEmitter
|
|
109
119
|
name?: string,
|
110
120
|
metadata?: string,
|
111
121
|
loggerOptions?: LoggerOptions,
|
122
|
+
kind: ParticipantKind = ParticipantKind.STANDARD,
|
112
123
|
) {
|
113
124
|
super();
|
114
125
|
|
@@ -123,6 +134,7 @@ export default class Participant extends (EventEmitter as new () => TypedEmitter
|
|
123
134
|
this.audioTrackPublications = new Map();
|
124
135
|
this.videoTrackPublications = new Map();
|
125
136
|
this.trackPublications = new Map();
|
137
|
+
this._kind = kind;
|
126
138
|
}
|
127
139
|
|
128
140
|
getTrackPublications(): TrackPublication[] {
|
@@ -329,6 +341,7 @@ export type ParticipantEventCallbacks = {
|
|
329
341
|
participantMetadataChanged: (prevMetadata: string | undefined, participant?: any) => void;
|
330
342
|
participantNameChanged: (name: string) => void;
|
331
343
|
dataReceived: (payload: Uint8Array, kind: DataPacket_Kind) => void;
|
344
|
+
sipDTMFReceived: (dtmf: SipDTMF) => void;
|
332
345
|
transcriptionReceived: (
|
333
346
|
transcription: TranscriptionSegment[],
|
334
347
|
publication?: TrackPublication,
|
@@ -16,7 +16,7 @@ import type { AudioOutputOptions } from '../track/options';
|
|
16
16
|
import type { AdaptiveStreamSettings } from '../track/types';
|
17
17
|
import { getLogContextFromTrack } from '../track/utils';
|
18
18
|
import type { LoggerOptions } from '../types';
|
19
|
-
import Participant from './Participant';
|
19
|
+
import Participant, { ParticipantKind } from './Participant';
|
20
20
|
import type { ParticipantEventCallbacks } from './Participant';
|
21
21
|
|
22
22
|
export default class RemoteParticipant extends Participant {
|
@@ -33,8 +33,20 @@ export default class RemoteParticipant extends Participant {
|
|
33
33
|
private audioOutput?: AudioOutputOptions;
|
34
34
|
|
35
35
|
/** @internal */
|
36
|
-
static fromParticipantInfo(
|
37
|
-
|
36
|
+
static fromParticipantInfo(
|
37
|
+
signalClient: SignalClient,
|
38
|
+
pi: ParticipantInfo,
|
39
|
+
loggerOptions: LoggerOptions,
|
40
|
+
): RemoteParticipant {
|
41
|
+
return new RemoteParticipant(
|
42
|
+
signalClient,
|
43
|
+
pi.sid,
|
44
|
+
pi.identity,
|
45
|
+
pi.name,
|
46
|
+
pi.metadata,
|
47
|
+
loggerOptions,
|
48
|
+
pi.kind,
|
49
|
+
);
|
38
50
|
}
|
39
51
|
|
40
52
|
protected get logContext() {
|
@@ -53,8 +65,9 @@ export default class RemoteParticipant extends Participant {
|
|
53
65
|
name?: string,
|
54
66
|
metadata?: string,
|
55
67
|
loggerOptions?: LoggerOptions,
|
68
|
+
kind: ParticipantKind = ParticipantKind.STANDARD,
|
56
69
|
) {
|
57
|
-
super(sid, identity || '', name, metadata, loggerOptions);
|
70
|
+
super(sid, identity || '', name, metadata, loggerOptions, kind);
|
58
71
|
this.signalClient = signalClient;
|
59
72
|
this.trackPublications = new Map();
|
60
73
|
this.audioTrackPublications = new Map();
|
@@ -149,6 +149,10 @@ export function computeVideoEncodings(
|
|
149
149
|
const browser = getBrowser();
|
150
150
|
if (
|
151
151
|
isSafari() ||
|
152
|
+
// Even tho RN runs M114, it does not produce SVC layers when a single encoding
|
153
|
+
// is provided. So we'll use the legacy SVC specification for now.
|
154
|
+
// TODO: when we upstream libwebrtc, this will need additional verification
|
155
|
+
isReactNative() ||
|
152
156
|
(browser?.name === 'Chrome' && compareVersions(browser?.version, '113') < 0)
|
153
157
|
) {
|
154
158
|
const bitratesRatio = sm.suffix == 'h' ? 2 : 3;
|
@@ -468,16 +468,17 @@ export default abstract class LocalTrack<
|
|
468
468
|
try {
|
469
469
|
this.log.debug('setting up processor', this.logContext);
|
470
470
|
|
471
|
-
this.
|
472
|
-
this.processorElement ?? (document.createElement(this.kind) as HTMLMediaElement);
|
471
|
+
const processorElement = document.createElement(this.kind) as HTMLMediaElement;
|
473
472
|
|
474
473
|
const processorOptions = {
|
475
474
|
kind: this.kind,
|
476
475
|
track: this._mediaStreamTrack,
|
477
|
-
element:
|
476
|
+
element: processorElement,
|
478
477
|
audioContext: this.audioContext,
|
479
478
|
};
|
480
479
|
await processor.init(processorOptions);
|
480
|
+
this.log.debug('processor initialized', this.logContext);
|
481
|
+
|
481
482
|
if (this.processor) {
|
482
483
|
await this.stopProcessor();
|
483
484
|
}
|
@@ -485,16 +486,17 @@ export default abstract class LocalTrack<
|
|
485
486
|
throw TypeError('cannot set processor on track of unknown kind');
|
486
487
|
}
|
487
488
|
|
488
|
-
attachToElement(this._mediaStreamTrack,
|
489
|
-
|
489
|
+
attachToElement(this._mediaStreamTrack, processorElement);
|
490
|
+
processorElement.muted = true;
|
490
491
|
|
491
|
-
|
492
|
+
processorElement
|
492
493
|
.play()
|
493
494
|
.catch((error) =>
|
494
495
|
this.log.error('failed to play processor element', { ...this.logContext, error }),
|
495
496
|
);
|
496
497
|
|
497
498
|
this.processor = processor;
|
499
|
+
this.processorElement = processorElement;
|
498
500
|
if (this.processor.processedTrack) {
|
499
501
|
for (const el of this.attachedElements) {
|
500
502
|
if (el !== this.processorElement && showProcessedStreamLocally) {
|
@@ -521,15 +523,17 @@ export default abstract class LocalTrack<
|
|
521
523
|
* @experimental
|
522
524
|
* @returns
|
523
525
|
*/
|
524
|
-
async stopProcessor() {
|
526
|
+
async stopProcessor(keepElement = true) {
|
525
527
|
if (!this.processor) return;
|
526
528
|
|
527
529
|
this.log.debug('stopping processor', this.logContext);
|
528
530
|
this.processor.processedTrack?.stop();
|
529
531
|
await this.processor.destroy();
|
530
532
|
this.processor = undefined;
|
531
|
-
|
532
|
-
|
533
|
+
if (!keepElement) {
|
534
|
+
this.processorElement?.remove();
|
535
|
+
this.processorElement = undefined;
|
536
|
+
}
|
533
537
|
// apply original track constraints in case the processor changed them
|
534
538
|
await this._mediaStreamTrack.applyConstraints(this._constraints);
|
535
539
|
// force re-setting of the mediaStreamTrack on the sender
|
@@ -55,7 +55,11 @@ export default class LocalVideoTrack extends LocalTrack<Track.Kind.Video> {
|
|
55
55
|
|
56
56
|
private degradationPreference: RTCDegradationPreference = 'balanced';
|
57
57
|
|
58
|
-
|
58
|
+
get sender(): RTCRtpSender | undefined {
|
59
|
+
return this._sender;
|
60
|
+
}
|
61
|
+
|
62
|
+
set sender(sender: RTCRtpSender | undefined) {
|
59
63
|
this._sender = sender;
|
60
64
|
if (this.degradationPreference) {
|
61
65
|
this.setDegradationPreference(this.degradationPreference);
|
@@ -272,7 +276,10 @@ export default class LocalVideoTrack extends LocalTrack<Track.Kind.Video> {
|
|
272
276
|
}
|
273
277
|
}
|
274
278
|
|
275
|
-
async setProcessor(
|
279
|
+
async setProcessor(
|
280
|
+
processor: TrackProcessor<Track.Kind.Video>,
|
281
|
+
showProcessedStreamLocally = true,
|
282
|
+
) {
|
276
283
|
await super.setProcessor(processor, showProcessedStreamLocally);
|
277
284
|
|
278
285
|
if (this.processor?.processedTrack) {
|