@livekit/react-native 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +202 -0
- package/README.md +141 -0
- package/android/build.gradle +133 -0
- package/android/gradle/wrapper/gradle-wrapper.jar +0 -0
- package/android/gradle/wrapper/gradle-wrapper.properties +5 -0
- package/android/gradle.properties +3 -0
- package/android/gradlew +185 -0
- package/android/gradlew.bat +89 -0
- package/android/local.properties +8 -0
- package/android/src/main/AndroidManifest.xml +13 -0
- package/android/src/main/java/com/livekit/reactnative/LivekitReactNativeModule.kt +50 -0
- package/android/src/main/java/com/livekit/reactnative/LivekitReactNativePackage.kt +17 -0
- package/android/src/main/java/com/livekit/reactnative/audio/AudioDeviceKind.java +40 -0
- package/android/src/main/java/com/livekit/reactnative/audio/AudioSwitchManager.java +140 -0
- package/ios/LivekitReactNative-Bridging-Header.h +2 -0
- package/ios/LivekitReactNative.h +11 -0
- package/ios/LivekitReactNative.m +111 -0
- package/ios/LivekitReactNative.xcodeproj/project.pbxproj +274 -0
- package/ios/LivekitReactNative.xcodeproj/project.xcworkspace/contents.xcworkspacedata +4 -0
- package/ios/LivekitReactNative.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist +8 -0
- package/ios/LivekitReactNative.xcodeproj/project.xcworkspace/xcuserdata/davidliu.xcuserdatad/UserInterfaceState.xcuserstate +0 -0
- package/ios/LivekitReactNative.xcodeproj/xcuserdata/davidliu.xcuserdatad/xcschemes/xcschememanagement.plist +14 -0
- package/lib/commonjs/audio/AudioSession.js +80 -0
- package/lib/commonjs/audio/AudioSession.js.map +1 -0
- package/lib/commonjs/components/VideoView.js +165 -0
- package/lib/commonjs/components/VideoView.js.map +1 -0
- package/lib/commonjs/components/ViewPortDetector.js +109 -0
- package/lib/commonjs/components/ViewPortDetector.js.map +1 -0
- package/lib/commonjs/index.js +103 -0
- package/lib/commonjs/index.js.map +1 -0
- package/lib/commonjs/useParticipant.js +100 -0
- package/lib/commonjs/useParticipant.js.map +1 -0
- package/lib/commonjs/useRoom.js +137 -0
- package/lib/commonjs/useRoom.js.map +1 -0
- package/lib/module/audio/AudioSession.js +70 -0
- package/lib/module/audio/AudioSession.js.map +1 -0
- package/lib/module/components/VideoView.js +144 -0
- package/lib/module/components/VideoView.js.map +1 -0
- package/lib/module/components/ViewPortDetector.js +97 -0
- package/lib/module/components/ViewPortDetector.js.map +1 -0
- package/lib/module/index.js +45 -0
- package/lib/module/index.js.map +1 -0
- package/lib/module/useParticipant.js +91 -0
- package/lib/module/useParticipant.js.map +1 -0
- package/lib/module/useRoom.js +126 -0
- package/lib/module/useRoom.js.map +1 -0
- package/lib/typescript/audio/AudioSession.d.ts +88 -0
- package/lib/typescript/components/VideoView.d.ts +10 -0
- package/lib/typescript/components/ViewPortDetector.d.ts +26 -0
- package/lib/typescript/index.d.ts +12 -0
- package/lib/typescript/useParticipant.d.ts +13 -0
- package/lib/typescript/useRoom.d.ts +20 -0
- package/livekit-react-native.podspec +22 -0
- package/package.json +157 -0
- package/src/audio/AudioSession.ts +132 -0
- package/src/components/VideoView.tsx +143 -0
- package/src/components/ViewPortDetector.tsx +93 -0
- package/src/index.tsx +37 -0
- package/src/useParticipant.ts +144 -0
- package/src/useRoom.ts +163 -0
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
import React, { Component } from 'react';
|
|
4
|
+
import { View, ViewStyle } from 'react-native';
|
|
5
|
+
|
|
6
|
+
export type Props = {
|
|
7
|
+
disabled?: boolean;
|
|
8
|
+
style?: ViewStyle;
|
|
9
|
+
onChange?: (isVisible: boolean) => void;
|
|
10
|
+
delay?: number;
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Detects when this is in the viewport and visible.
|
|
15
|
+
*
|
|
16
|
+
* Will not fire visibility changes for zero width/height components.
|
|
17
|
+
*/
|
|
18
|
+
export default class ViewPortDetector extends Component<Props> {
|
|
19
|
+
private lastValue: boolean | null = null;
|
|
20
|
+
private interval: any | null = null;
|
|
21
|
+
private view: View | null = null;
|
|
22
|
+
|
|
23
|
+
constructor(props: Props) {
|
|
24
|
+
super(props);
|
|
25
|
+
this.state = { rectTop: 0, rectBottom: 0 };
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
componentDidMount() {
|
|
29
|
+
if (!this.props.disabled) {
|
|
30
|
+
this.startWatching();
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
componentWillUnmount() {
|
|
35
|
+
this.stopWatching();
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
UNSAFE_componentWillReceiveProps(nextProps: Props) {
|
|
39
|
+
if (nextProps.disabled) {
|
|
40
|
+
this.stopWatching();
|
|
41
|
+
} else {
|
|
42
|
+
this.lastValue = null;
|
|
43
|
+
this.startWatching();
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
private startWatching() {
|
|
48
|
+
if (this.interval) {
|
|
49
|
+
return;
|
|
50
|
+
}
|
|
51
|
+
this.interval = setInterval(() => {
|
|
52
|
+
if (!this.view) {
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
this.view.measure((_x, _y, width, height, _pageX, _pageY) => {
|
|
56
|
+
this.checkInViewPort(width, height);
|
|
57
|
+
});
|
|
58
|
+
}, this.props.delay || 100);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
private stopWatching() {
|
|
62
|
+
this.interval = clearInterval(this.interval);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
private checkInViewPort(width?: number, height?: number) {
|
|
66
|
+
let isVisible: boolean;
|
|
67
|
+
// Not visible if any of these are missing.
|
|
68
|
+
if (!width || !height) {
|
|
69
|
+
isVisible = false;
|
|
70
|
+
} else {
|
|
71
|
+
isVisible = true;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
if (this.lastValue !== isVisible) {
|
|
75
|
+
this.lastValue = isVisible;
|
|
76
|
+
this.props.onChange?.(isVisible);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
render() {
|
|
81
|
+
return (
|
|
82
|
+
<View
|
|
83
|
+
collapsable={false}
|
|
84
|
+
ref={(component) => {
|
|
85
|
+
this.view = component;
|
|
86
|
+
}}
|
|
87
|
+
{...this.props}
|
|
88
|
+
>
|
|
89
|
+
{this.props.children}
|
|
90
|
+
</View>
|
|
91
|
+
);
|
|
92
|
+
}
|
|
93
|
+
}
|
package/src/index.tsx
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { registerGlobals as webrtcRegisterGlobals } from 'react-native-webrtc';
|
|
2
|
+
import { setupURLPolyfill } from 'react-native-url-polyfill';
|
|
3
|
+
import AudioSession from './audio/AudioSession';
|
|
4
|
+
import type { AudioConfiguration } from './audio/AudioSession';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Registers the required globals needed for LiveKit to work.
|
|
8
|
+
*
|
|
9
|
+
* Must be called before using LiveKit.
|
|
10
|
+
*/
|
|
11
|
+
export function registerGlobals() {
|
|
12
|
+
webrtcRegisterGlobals();
|
|
13
|
+
setupURLPolyfill();
|
|
14
|
+
fixWebrtcAdapter();
|
|
15
|
+
shimPromiseAllSettled();
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
function fixWebrtcAdapter() {
|
|
19
|
+
// @ts-ignore
|
|
20
|
+
if (window?.navigator !== undefined) {
|
|
21
|
+
// @ts-ignore
|
|
22
|
+
const { navigator } = window;
|
|
23
|
+
if (navigator.userAgent === undefined) {
|
|
24
|
+
navigator.userAgent = navigator.product ?? 'Unknown';
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function shimPromiseAllSettled() {
|
|
30
|
+
var allSettled = require('promise.allsettled');
|
|
31
|
+
allSettled.shim();
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export * from './components/VideoView';
|
|
35
|
+
export * from './useParticipant';
|
|
36
|
+
export * from './useRoom';
|
|
37
|
+
export { AudioSession, AudioConfiguration };
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ConnectionQuality,
|
|
3
|
+
LocalParticipant,
|
|
4
|
+
Participant,
|
|
5
|
+
ParticipantEvent,
|
|
6
|
+
Track,
|
|
7
|
+
TrackPublication,
|
|
8
|
+
} from 'livekit-client';
|
|
9
|
+
import { useEffect, useState } from 'react';
|
|
10
|
+
|
|
11
|
+
export interface ParticipantState {
|
|
12
|
+
isSpeaking: boolean;
|
|
13
|
+
connectionQuality: ConnectionQuality;
|
|
14
|
+
isLocal: boolean;
|
|
15
|
+
metadata?: string;
|
|
16
|
+
publications: TrackPublication[];
|
|
17
|
+
subscribedTracks: TrackPublication[];
|
|
18
|
+
cameraPublication?: TrackPublication;
|
|
19
|
+
microphonePublication?: TrackPublication;
|
|
20
|
+
screenSharePublication?: TrackPublication;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export function useParticipant(participant: Participant): ParticipantState {
|
|
24
|
+
const [isAudioMuted, setAudioMuted] = useState(false);
|
|
25
|
+
const [, setVideoMuted] = useState(false);
|
|
26
|
+
const [connectionQuality, setConnectionQuality] = useState<ConnectionQuality>(
|
|
27
|
+
participant.connectionQuality
|
|
28
|
+
);
|
|
29
|
+
const [isSpeaking, setSpeaking] = useState(false);
|
|
30
|
+
const [metadata, setMetadata] = useState<string>();
|
|
31
|
+
const [publications, setPublications] = useState<TrackPublication[]>([]);
|
|
32
|
+
const [subscribedTracks, setSubscribedTracks] = useState<TrackPublication[]>(
|
|
33
|
+
[]
|
|
34
|
+
);
|
|
35
|
+
|
|
36
|
+
const [cameraPublication, setCameraPublication] = useState(
|
|
37
|
+
participant.getTrack(Track.Source.Camera)
|
|
38
|
+
);
|
|
39
|
+
const [microphonePublication, setMicrophonePublication] = useState(
|
|
40
|
+
participant.getTrack(Track.Source.Microphone)
|
|
41
|
+
);
|
|
42
|
+
const [screenSharePublication, setScreenSharePublication] = useState(
|
|
43
|
+
participant.getTrack(Track.Source.ScreenShare)
|
|
44
|
+
);
|
|
45
|
+
useEffect(() => {
|
|
46
|
+
const onPublicationsChanged = () => {
|
|
47
|
+
setPublications(Array.from(participant.tracks.values()));
|
|
48
|
+
setCameraPublication(participant.getTrack(Track.Source.Camera));
|
|
49
|
+
setMicrophonePublication(participant.getTrack(Track.Source.Microphone));
|
|
50
|
+
setScreenSharePublication(participant.getTrack(Track.Source.ScreenShare));
|
|
51
|
+
setSubscribedTracks(
|
|
52
|
+
Array.from(participant.tracks.values()).filter((pub) => {
|
|
53
|
+
return pub.isSubscribed && pub.track !== undefined;
|
|
54
|
+
})
|
|
55
|
+
);
|
|
56
|
+
};
|
|
57
|
+
const onMuted = (pub: TrackPublication) => {
|
|
58
|
+
if (pub.kind === Track.Kind.Audio) {
|
|
59
|
+
setAudioMuted(true);
|
|
60
|
+
} else if (pub.kind === Track.Kind.Video) {
|
|
61
|
+
setVideoMuted(true);
|
|
62
|
+
}
|
|
63
|
+
};
|
|
64
|
+
const onUnmuted = (pub: TrackPublication) => {
|
|
65
|
+
if (pub.kind === Track.Kind.Audio) {
|
|
66
|
+
setAudioMuted(false);
|
|
67
|
+
} else if (pub.kind === Track.Kind.Video) {
|
|
68
|
+
setVideoMuted(false);
|
|
69
|
+
}
|
|
70
|
+
};
|
|
71
|
+
const onMetadataChanged = () => {
|
|
72
|
+
if (participant.metadata) {
|
|
73
|
+
setMetadata(participant.metadata);
|
|
74
|
+
}
|
|
75
|
+
};
|
|
76
|
+
const onIsSpeakingChanged = () => {
|
|
77
|
+
setSpeaking(participant.isSpeaking);
|
|
78
|
+
};
|
|
79
|
+
const onConnectionQualityUpdate = () => {
|
|
80
|
+
setConnectionQuality(participant.connectionQuality);
|
|
81
|
+
};
|
|
82
|
+
|
|
83
|
+
// register listeners
|
|
84
|
+
participant
|
|
85
|
+
.on(ParticipantEvent.TrackMuted, onMuted)
|
|
86
|
+
.on(ParticipantEvent.TrackUnmuted, onUnmuted)
|
|
87
|
+
.on(ParticipantEvent.ParticipantMetadataChanged, onMetadataChanged)
|
|
88
|
+
.on(ParticipantEvent.IsSpeakingChanged, onIsSpeakingChanged)
|
|
89
|
+
.on(ParticipantEvent.TrackPublished, onPublicationsChanged)
|
|
90
|
+
.on(ParticipantEvent.TrackUnpublished, onPublicationsChanged)
|
|
91
|
+
.on(ParticipantEvent.TrackSubscribed, onPublicationsChanged)
|
|
92
|
+
.on(ParticipantEvent.TrackUnsubscribed, onPublicationsChanged)
|
|
93
|
+
.on(ParticipantEvent.LocalTrackPublished, onPublicationsChanged)
|
|
94
|
+
.on(ParticipantEvent.LocalTrackUnpublished, onPublicationsChanged)
|
|
95
|
+
.on(ParticipantEvent.ConnectionQualityChanged, onConnectionQualityUpdate);
|
|
96
|
+
|
|
97
|
+
// set initial state
|
|
98
|
+
onMetadataChanged();
|
|
99
|
+
onIsSpeakingChanged();
|
|
100
|
+
onPublicationsChanged();
|
|
101
|
+
|
|
102
|
+
return () => {
|
|
103
|
+
// cleanup
|
|
104
|
+
participant
|
|
105
|
+
.off(ParticipantEvent.TrackMuted, onMuted)
|
|
106
|
+
.off(ParticipantEvent.TrackUnmuted, onUnmuted)
|
|
107
|
+
.off(ParticipantEvent.ParticipantMetadataChanged, onMetadataChanged)
|
|
108
|
+
.off(ParticipantEvent.IsSpeakingChanged, onIsSpeakingChanged)
|
|
109
|
+
.off(ParticipantEvent.TrackPublished, onPublicationsChanged)
|
|
110
|
+
.off(ParticipantEvent.TrackUnpublished, onPublicationsChanged)
|
|
111
|
+
.off(ParticipantEvent.TrackSubscribed, onPublicationsChanged)
|
|
112
|
+
.off(ParticipantEvent.TrackUnsubscribed, onPublicationsChanged)
|
|
113
|
+
.off(ParticipantEvent.LocalTrackPublished, onPublicationsChanged)
|
|
114
|
+
.off(ParticipantEvent.LocalTrackUnpublished, onPublicationsChanged)
|
|
115
|
+
.off(
|
|
116
|
+
ParticipantEvent.ConnectionQualityChanged,
|
|
117
|
+
onConnectionQualityUpdate
|
|
118
|
+
);
|
|
119
|
+
};
|
|
120
|
+
}, [participant]);
|
|
121
|
+
|
|
122
|
+
let muted: boolean | undefined;
|
|
123
|
+
participant.audioTracks.forEach((pub) => {
|
|
124
|
+
muted = pub.isMuted;
|
|
125
|
+
});
|
|
126
|
+
if (muted === undefined) {
|
|
127
|
+
muted = true;
|
|
128
|
+
}
|
|
129
|
+
if (isAudioMuted !== muted) {
|
|
130
|
+
setAudioMuted(muted);
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
return {
|
|
134
|
+
isLocal: participant instanceof LocalParticipant,
|
|
135
|
+
isSpeaking,
|
|
136
|
+
connectionQuality,
|
|
137
|
+
publications,
|
|
138
|
+
subscribedTracks,
|
|
139
|
+
cameraPublication,
|
|
140
|
+
microphonePublication,
|
|
141
|
+
screenSharePublication,
|
|
142
|
+
metadata,
|
|
143
|
+
};
|
|
144
|
+
}
|
package/src/useRoom.ts
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
import {
|
|
2
|
+
AudioTrack,
|
|
3
|
+
ConnectionState,
|
|
4
|
+
LocalParticipant,
|
|
5
|
+
Participant,
|
|
6
|
+
RemoteTrack,
|
|
7
|
+
Room,
|
|
8
|
+
RoomEvent,
|
|
9
|
+
Track,
|
|
10
|
+
} from 'livekit-client';
|
|
11
|
+
import { useEffect, useState } from 'react';
|
|
12
|
+
|
|
13
|
+
export interface RoomState {
|
|
14
|
+
room?: Room;
|
|
15
|
+
/* all participants in the room, including the local participant. */
|
|
16
|
+
participants: Participant[];
|
|
17
|
+
/* all subscribed audio tracks in the room, not including local participant. */
|
|
18
|
+
audioTracks: AudioTrack[];
|
|
19
|
+
error?: Error;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export interface RoomOptions {
|
|
23
|
+
sortParticipants?: (participants: Participant[]) => void;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export function useRoom(room: Room, options?: RoomOptions): RoomState {
|
|
27
|
+
const [error] = useState<Error>();
|
|
28
|
+
const [participants, setParticipants] = useState<Participant[]>([]);
|
|
29
|
+
const [audioTracks, setAudioTracks] = useState<AudioTrack[]>([]);
|
|
30
|
+
|
|
31
|
+
const sortFunc = options?.sortParticipants ?? sortParticipants;
|
|
32
|
+
|
|
33
|
+
useEffect(() => {
|
|
34
|
+
const onParticipantsChanged = () => {
|
|
35
|
+
const remotes = Array.from(room.participants.values());
|
|
36
|
+
const newParticipants: Participant[] = [room.localParticipant];
|
|
37
|
+
newParticipants.push(...remotes);
|
|
38
|
+
sortFunc(newParticipants, room.localParticipant);
|
|
39
|
+
setParticipants(newParticipants);
|
|
40
|
+
};
|
|
41
|
+
const onSubscribedTrackChanged = (track?: RemoteTrack) => {
|
|
42
|
+
// ordering may have changed, re-sort
|
|
43
|
+
onParticipantsChanged();
|
|
44
|
+
if (track && track.kind !== Track.Kind.Audio) {
|
|
45
|
+
return;
|
|
46
|
+
}
|
|
47
|
+
const tracks: AudioTrack[] = [];
|
|
48
|
+
room.participants.forEach((p) => {
|
|
49
|
+
p.audioTracks.forEach((pub) => {
|
|
50
|
+
if (pub.audioTrack) {
|
|
51
|
+
tracks.push(pub.audioTrack);
|
|
52
|
+
}
|
|
53
|
+
});
|
|
54
|
+
});
|
|
55
|
+
setAudioTracks(tracks);
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
const onConnectionStateChanged = (state: ConnectionState) => {
|
|
59
|
+
if (state === ConnectionState.Connected) {
|
|
60
|
+
onParticipantsChanged();
|
|
61
|
+
}
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
room.once(RoomEvent.Disconnected, () => {
|
|
65
|
+
room
|
|
66
|
+
.off(RoomEvent.ParticipantConnected, onParticipantsChanged)
|
|
67
|
+
.off(RoomEvent.ParticipantDisconnected, onParticipantsChanged)
|
|
68
|
+
.off(RoomEvent.ActiveSpeakersChanged, onParticipantsChanged)
|
|
69
|
+
.off(RoomEvent.TrackSubscribed, onSubscribedTrackChanged)
|
|
70
|
+
.off(RoomEvent.TrackUnsubscribed, onSubscribedTrackChanged)
|
|
71
|
+
.off(RoomEvent.LocalTrackPublished, onParticipantsChanged)
|
|
72
|
+
.off(RoomEvent.LocalTrackUnpublished, onParticipantsChanged)
|
|
73
|
+
.off(RoomEvent.AudioPlaybackStatusChanged, onParticipantsChanged)
|
|
74
|
+
.off(RoomEvent.ConnectionStateChanged, onConnectionStateChanged);
|
|
75
|
+
});
|
|
76
|
+
room
|
|
77
|
+
.on(RoomEvent.ConnectionStateChanged, onConnectionStateChanged)
|
|
78
|
+
.on(RoomEvent.Reconnected, onParticipantsChanged)
|
|
79
|
+
.on(RoomEvent.ParticipantConnected, onParticipantsChanged)
|
|
80
|
+
.on(RoomEvent.ParticipantDisconnected, onParticipantsChanged)
|
|
81
|
+
.on(RoomEvent.ActiveSpeakersChanged, onParticipantsChanged)
|
|
82
|
+
.on(RoomEvent.TrackSubscribed, onSubscribedTrackChanged)
|
|
83
|
+
.on(RoomEvent.TrackUnsubscribed, onSubscribedTrackChanged)
|
|
84
|
+
.on(RoomEvent.LocalTrackPublished, onParticipantsChanged)
|
|
85
|
+
.on(RoomEvent.LocalTrackUnpublished, onParticipantsChanged)
|
|
86
|
+
// trigger a state change by re-sorting participants
|
|
87
|
+
.on(RoomEvent.AudioPlaybackStatusChanged, onParticipantsChanged);
|
|
88
|
+
|
|
89
|
+
onSubscribedTrackChanged();
|
|
90
|
+
|
|
91
|
+
return () => {
|
|
92
|
+
room.disconnect();
|
|
93
|
+
};
|
|
94
|
+
}, [room, sortFunc]);
|
|
95
|
+
|
|
96
|
+
return {
|
|
97
|
+
error,
|
|
98
|
+
participants,
|
|
99
|
+
audioTracks,
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
/**
|
|
104
|
+
* Default sort for participants, it'll order participants by:
|
|
105
|
+
* 1. dominant speaker (speaker with the loudest audio level)
|
|
106
|
+
* 2. local participant
|
|
107
|
+
* 3. other speakers that are recently active
|
|
108
|
+
* 4. participants with video on
|
|
109
|
+
* 5. by joinedAt
|
|
110
|
+
*/
|
|
111
|
+
export function sortParticipants(
|
|
112
|
+
participants: Participant[],
|
|
113
|
+
localParticipant?: LocalParticipant
|
|
114
|
+
) {
|
|
115
|
+
participants.sort((a, b) => {
|
|
116
|
+
// loudest speaker first
|
|
117
|
+
if (a.isSpeaking && b.isSpeaking) {
|
|
118
|
+
return b.audioLevel - a.audioLevel;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// speaker goes first
|
|
122
|
+
if (a.isSpeaking !== b.isSpeaking) {
|
|
123
|
+
if (a.isSpeaking) {
|
|
124
|
+
return -1;
|
|
125
|
+
} else {
|
|
126
|
+
return 1;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// last active speaker first
|
|
131
|
+
if (a.lastSpokeAt !== b.lastSpokeAt) {
|
|
132
|
+
const aLast = a.lastSpokeAt?.getTime() ?? 0;
|
|
133
|
+
const bLast = b.lastSpokeAt?.getTime() ?? 0;
|
|
134
|
+
return bLast - aLast;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
// video on
|
|
138
|
+
const aVideo = a.videoTracks.size > 0;
|
|
139
|
+
const bVideo = b.videoTracks.size > 0;
|
|
140
|
+
if (aVideo !== bVideo) {
|
|
141
|
+
if (aVideo) {
|
|
142
|
+
return -1;
|
|
143
|
+
} else {
|
|
144
|
+
return 1;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
// joinedAt
|
|
149
|
+
return (a.joinedAt?.getTime() ?? 0) - (b.joinedAt?.getTime() ?? 0);
|
|
150
|
+
});
|
|
151
|
+
|
|
152
|
+
if (localParticipant) {
|
|
153
|
+
const localIdx = participants.indexOf(localParticipant);
|
|
154
|
+
if (localIdx >= 0) {
|
|
155
|
+
participants.splice(localIdx, 1);
|
|
156
|
+
if (participants.length > 0) {
|
|
157
|
+
participants.splice(1, 0, localParticipant);
|
|
158
|
+
} else {
|
|
159
|
+
participants.push(localParticipant);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|