@livekit/react-native 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +202 -0
- package/README.md +141 -0
- package/android/build.gradle +133 -0
- package/android/gradle/wrapper/gradle-wrapper.jar +0 -0
- package/android/gradle/wrapper/gradle-wrapper.properties +5 -0
- package/android/gradle.properties +3 -0
- package/android/gradlew +185 -0
- package/android/gradlew.bat +89 -0
- package/android/local.properties +8 -0
- package/android/src/main/AndroidManifest.xml +13 -0
- package/android/src/main/java/com/livekit/reactnative/LivekitReactNativeModule.kt +50 -0
- package/android/src/main/java/com/livekit/reactnative/LivekitReactNativePackage.kt +17 -0
- package/android/src/main/java/com/livekit/reactnative/audio/AudioDeviceKind.java +40 -0
- package/android/src/main/java/com/livekit/reactnative/audio/AudioSwitchManager.java +140 -0
- package/ios/LivekitReactNative-Bridging-Header.h +2 -0
- package/ios/LivekitReactNative.h +11 -0
- package/ios/LivekitReactNative.m +111 -0
- package/ios/LivekitReactNative.xcodeproj/project.pbxproj +274 -0
- package/ios/LivekitReactNative.xcodeproj/project.xcworkspace/contents.xcworkspacedata +4 -0
- package/ios/LivekitReactNative.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist +8 -0
- package/ios/LivekitReactNative.xcodeproj/project.xcworkspace/xcuserdata/davidliu.xcuserdatad/UserInterfaceState.xcuserstate +0 -0
- package/ios/LivekitReactNative.xcodeproj/xcuserdata/davidliu.xcuserdatad/xcschemes/xcschememanagement.plist +14 -0
- package/lib/commonjs/audio/AudioSession.js +80 -0
- package/lib/commonjs/audio/AudioSession.js.map +1 -0
- package/lib/commonjs/components/VideoView.js +165 -0
- package/lib/commonjs/components/VideoView.js.map +1 -0
- package/lib/commonjs/components/ViewPortDetector.js +109 -0
- package/lib/commonjs/components/ViewPortDetector.js.map +1 -0
- package/lib/commonjs/index.js +103 -0
- package/lib/commonjs/index.js.map +1 -0
- package/lib/commonjs/useParticipant.js +100 -0
- package/lib/commonjs/useParticipant.js.map +1 -0
- package/lib/commonjs/useRoom.js +137 -0
- package/lib/commonjs/useRoom.js.map +1 -0
- package/lib/module/audio/AudioSession.js +70 -0
- package/lib/module/audio/AudioSession.js.map +1 -0
- package/lib/module/components/VideoView.js +144 -0
- package/lib/module/components/VideoView.js.map +1 -0
- package/lib/module/components/ViewPortDetector.js +97 -0
- package/lib/module/components/ViewPortDetector.js.map +1 -0
- package/lib/module/index.js +45 -0
- package/lib/module/index.js.map +1 -0
- package/lib/module/useParticipant.js +91 -0
- package/lib/module/useParticipant.js.map +1 -0
- package/lib/module/useRoom.js +126 -0
- package/lib/module/useRoom.js.map +1 -0
- package/lib/typescript/audio/AudioSession.d.ts +88 -0
- package/lib/typescript/components/VideoView.d.ts +10 -0
- package/lib/typescript/components/ViewPortDetector.d.ts +26 -0
- package/lib/typescript/index.d.ts +12 -0
- package/lib/typescript/useParticipant.d.ts +13 -0
- package/lib/typescript/useRoom.d.ts +20 -0
- package/livekit-react-native.podspec +22 -0
- package/package.json +157 -0
- package/src/audio/AudioSession.ts +132 -0
- package/src/components/VideoView.tsx +143 -0
- package/src/components/ViewPortDetector.tsx +93 -0
- package/src/index.tsx +37 -0
- package/src/useParticipant.ts +144 -0
- package/src/useRoom.ts +163 -0
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
@rem
|
|
2
|
+
@rem Copyright 2015 the original author or authors.
|
|
3
|
+
@rem
|
|
4
|
+
@rem Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
@rem you may not use this file except in compliance with the License.
|
|
6
|
+
@rem You may obtain a copy of the License at
|
|
7
|
+
@rem
|
|
8
|
+
@rem https://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
@rem
|
|
10
|
+
@rem Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
@rem See the License for the specific language governing permissions and
|
|
14
|
+
@rem limitations under the License.
|
|
15
|
+
@rem
|
|
16
|
+
|
|
17
|
+
@if "%DEBUG%" == "" @echo off
|
|
18
|
+
@rem ##########################################################################
|
|
19
|
+
@rem
|
|
20
|
+
@rem Gradle startup script for Windows
|
|
21
|
+
@rem
|
|
22
|
+
@rem ##########################################################################
|
|
23
|
+
|
|
24
|
+
@rem Set local scope for the variables with windows NT shell
|
|
25
|
+
if "%OS%"=="Windows_NT" setlocal
|
|
26
|
+
|
|
27
|
+
set DIRNAME=%~dp0
|
|
28
|
+
if "%DIRNAME%" == "" set DIRNAME=.
|
|
29
|
+
set APP_BASE_NAME=%~n0
|
|
30
|
+
set APP_HOME=%DIRNAME%
|
|
31
|
+
|
|
32
|
+
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
|
|
33
|
+
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
|
|
34
|
+
|
|
35
|
+
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
|
36
|
+
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
|
|
37
|
+
|
|
38
|
+
@rem Find java.exe
|
|
39
|
+
if defined JAVA_HOME goto findJavaFromJavaHome
|
|
40
|
+
|
|
41
|
+
set JAVA_EXE=java.exe
|
|
42
|
+
%JAVA_EXE% -version >NUL 2>&1
|
|
43
|
+
if "%ERRORLEVEL%" == "0" goto execute
|
|
44
|
+
|
|
45
|
+
echo.
|
|
46
|
+
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
|
47
|
+
echo.
|
|
48
|
+
echo Please set the JAVA_HOME variable in your environment to match the
|
|
49
|
+
echo location of your Java installation.
|
|
50
|
+
|
|
51
|
+
goto fail
|
|
52
|
+
|
|
53
|
+
:findJavaFromJavaHome
|
|
54
|
+
set JAVA_HOME=%JAVA_HOME:"=%
|
|
55
|
+
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
|
56
|
+
|
|
57
|
+
if exist "%JAVA_EXE%" goto execute
|
|
58
|
+
|
|
59
|
+
echo.
|
|
60
|
+
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
|
61
|
+
echo.
|
|
62
|
+
echo Please set the JAVA_HOME variable in your environment to match the
|
|
63
|
+
echo location of your Java installation.
|
|
64
|
+
|
|
65
|
+
goto fail
|
|
66
|
+
|
|
67
|
+
:execute
|
|
68
|
+
@rem Setup the command line
|
|
69
|
+
|
|
70
|
+
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
@rem Execute Gradle
|
|
74
|
+
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
|
|
75
|
+
|
|
76
|
+
:end
|
|
77
|
+
@rem End local scope for the variables with windows NT shell
|
|
78
|
+
if "%ERRORLEVEL%"=="0" goto mainEnd
|
|
79
|
+
|
|
80
|
+
:fail
|
|
81
|
+
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
|
82
|
+
rem the _cmd.exe /c_ return code!
|
|
83
|
+
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
|
84
|
+
exit /b 1
|
|
85
|
+
|
|
86
|
+
:mainEnd
|
|
87
|
+
if "%OS%"=="Windows_NT" endlocal
|
|
88
|
+
|
|
89
|
+
:omega
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
## This file must *NOT* be checked into Version Control Systems,
|
|
2
|
+
# as it contains information specific to your local configuration.
|
|
3
|
+
#
|
|
4
|
+
# Location of the SDK. This is only used by Gradle.
|
|
5
|
+
# For customization when using a Version Control System, please read the
|
|
6
|
+
# header note.
|
|
7
|
+
#Fri Apr 22 01:03:04 JST 2022
|
|
8
|
+
sdk.dir=/Users/davidliu/Library/Android/sdk
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
|
2
|
+
package="com.livekit.reactnative">
|
|
3
|
+
|
|
4
|
+
<uses-permission android:name="android.permission.INTERNET" />
|
|
5
|
+
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
|
|
6
|
+
<uses-permission android:name="android.permission.CAMERA" />
|
|
7
|
+
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
|
|
8
|
+
<uses-permission android:name="android.permission.RECORD_AUDIO" />
|
|
9
|
+
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
|
|
10
|
+
<uses-permission android:name="android.permission.WAKE_LOCK" />
|
|
11
|
+
<uses-permission android:name="android.permission.BLUETOOTH" />
|
|
12
|
+
<uses-permission android:name="android.permission.BLUETOOTH_ADMIN" />
|
|
13
|
+
</manifest>
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
package com.livekit.reactnative
|
|
2
|
+
|
|
3
|
+
import com.facebook.react.bridge.*
|
|
4
|
+
import com.livekit.reactnative.audio.AudioDeviceKind
|
|
5
|
+
import com.livekit.reactnative.audio.AudioSwitchManager
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class LivekitReactNativeModule(reactContext: ReactApplicationContext) : ReactContextBaseJavaModule(reactContext) {
|
|
9
|
+
|
|
10
|
+
val audioManager = AudioSwitchManager(reactContext.applicationContext)
|
|
11
|
+
override fun getName(): String {
|
|
12
|
+
return "LivekitReactNative"
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
@ReactMethod
|
|
16
|
+
fun configureAudio(config: ReadableMap) {
|
|
17
|
+
val androidConfig = config.getMap("android") ?: return
|
|
18
|
+
|
|
19
|
+
androidConfig.getArray("preferredOutputList")?.let { preferredOutputList ->
|
|
20
|
+
val preferredDeviceList = preferredOutputList.toArrayList().mapNotNull { output ->
|
|
21
|
+
val outputStr = output as? String
|
|
22
|
+
AudioDeviceKind.fromTypeName(outputStr)?.audioDeviceClass
|
|
23
|
+
}
|
|
24
|
+
audioManager.preferredDeviceList = preferredDeviceList
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
@ReactMethod
|
|
29
|
+
fun startAudioSession() {
|
|
30
|
+
audioManager.start()
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
@ReactMethod
|
|
34
|
+
fun stopAudioSession() {
|
|
35
|
+
audioManager.stop()
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
@ReactMethod
|
|
39
|
+
fun getAudioOutputs(promise: Promise) {
|
|
40
|
+
val deviceIds = audioManager.availableAudioDevices()
|
|
41
|
+
.mapNotNull { device -> AudioDeviceKind.fromAudioDevice(device)?.typeName }
|
|
42
|
+
promise.resolve(Arguments.makeNativeArray(deviceIds))
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
@ReactMethod
|
|
46
|
+
fun selectAudioOutput(deviceId: String, promise: Promise) {
|
|
47
|
+
audioManager.selectAudioOutput(AudioDeviceKind.fromTypeName(deviceId))
|
|
48
|
+
promise.resolve(null)
|
|
49
|
+
}
|
|
50
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
package com.livekit.reactnative
|
|
2
|
+
|
|
3
|
+
import com.facebook.react.ReactPackage
|
|
4
|
+
import com.facebook.react.bridge.NativeModule
|
|
5
|
+
import com.facebook.react.bridge.ReactApplicationContext
|
|
6
|
+
import com.facebook.react.uimanager.ViewManager
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class LivekitReactNativePackage : ReactPackage {
|
|
10
|
+
override fun createNativeModules(reactContext: ReactApplicationContext): List<NativeModule> {
|
|
11
|
+
return listOf(LivekitReactNativeModule(reactContext))
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
override fun createViewManagers(reactContext: ReactApplicationContext): List<ViewManager<*, *>> {
|
|
15
|
+
return emptyList()
|
|
16
|
+
}
|
|
17
|
+
}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
package com.livekit.reactnative.audio;
|
|
2
|
+
|
|
3
|
+
import androidx.annotation.Nullable;
|
|
4
|
+
|
|
5
|
+
import com.twilio.audioswitch.AudioDevice;
|
|
6
|
+
|
|
7
|
+
public enum AudioDeviceKind {
|
|
8
|
+
BLUETOOTH("bluetooth", AudioDevice.BluetoothHeadset.class),
|
|
9
|
+
WIRED_HEADSET("headset", AudioDevice.WiredHeadset.class),
|
|
10
|
+
SPEAKER("speaker", AudioDevice.Speakerphone.class),
|
|
11
|
+
EARPIECE("earpiece", AudioDevice.Earpiece.class);
|
|
12
|
+
|
|
13
|
+
public final String typeName;
|
|
14
|
+
public final Class<? extends AudioDevice> audioDeviceClass;
|
|
15
|
+
|
|
16
|
+
AudioDeviceKind(String typeName, Class<? extends AudioDevice> audioDeviceClass) {
|
|
17
|
+
this.typeName = typeName;
|
|
18
|
+
this.audioDeviceClass = audioDeviceClass;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
@Nullable
|
|
22
|
+
public static AudioDeviceKind fromAudioDevice(AudioDevice audioDevice) {
|
|
23
|
+
for (AudioDeviceKind kind : values()) {
|
|
24
|
+
if (kind.audioDeviceClass.equals(audioDevice.getClass())) {
|
|
25
|
+
return kind;
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
@Nullable
|
|
32
|
+
public static AudioDeviceKind fromTypeName(String typeName) {
|
|
33
|
+
for (AudioDeviceKind kind : values()) {
|
|
34
|
+
if (kind.typeName.equals(typeName)) {
|
|
35
|
+
return kind;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
return null;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
package com.livekit.reactnative.audio;
|
|
2
|
+
|
|
3
|
+
import android.content.Context;
|
|
4
|
+
import android.media.AudioManager;
|
|
5
|
+
import android.os.Handler;
|
|
6
|
+
import android.os.Looper;
|
|
7
|
+
|
|
8
|
+
import androidx.annotation.NonNull;
|
|
9
|
+
import androidx.annotation.Nullable;
|
|
10
|
+
|
|
11
|
+
import com.twilio.audioswitch.AudioDevice;
|
|
12
|
+
import com.twilio.audioswitch.AudioSwitch;
|
|
13
|
+
|
|
14
|
+
import java.util.ArrayList;
|
|
15
|
+
import java.util.Collections;
|
|
16
|
+
import java.util.List;
|
|
17
|
+
|
|
18
|
+
import kotlin.Unit;
|
|
19
|
+
import kotlin.jvm.functions.Function2;
|
|
20
|
+
|
|
21
|
+
public class AudioSwitchManager {
|
|
22
|
+
@NonNull
|
|
23
|
+
private final Context context;
|
|
24
|
+
@NonNull
|
|
25
|
+
private final AudioManager audioManager;
|
|
26
|
+
|
|
27
|
+
public boolean loggingEnabled;
|
|
28
|
+
@NonNull
|
|
29
|
+
public Function2<
|
|
30
|
+
? super List<? extends AudioDevice>,
|
|
31
|
+
? super AudioDevice,
|
|
32
|
+
Unit> audioDeviceChangeListener = (devices, currentDevice) -> null;
|
|
33
|
+
|
|
34
|
+
@NonNull
|
|
35
|
+
public AudioManager.OnAudioFocusChangeListener audioFocusChangeListener = (i -> {});
|
|
36
|
+
|
|
37
|
+
@NonNull
|
|
38
|
+
public List<Class<? extends AudioDevice>> preferredDeviceList;
|
|
39
|
+
|
|
40
|
+
// AudioSwitch is not threadsafe, so all calls should be done on the main thread.
|
|
41
|
+
private final Handler handler = new Handler(Looper.getMainLooper());
|
|
42
|
+
|
|
43
|
+
@Nullable
|
|
44
|
+
private AudioSwitch audioSwitch;
|
|
45
|
+
|
|
46
|
+
public AudioSwitchManager(@NonNull Context context) {
|
|
47
|
+
this.context = context;
|
|
48
|
+
this.audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
|
|
49
|
+
|
|
50
|
+
preferredDeviceList = new ArrayList<>();
|
|
51
|
+
preferredDeviceList.add(AudioDevice.BluetoothHeadset.class);
|
|
52
|
+
preferredDeviceList.add(AudioDevice.WiredHeadset.class);
|
|
53
|
+
preferredDeviceList.add(AudioDevice.Speakerphone.class);
|
|
54
|
+
preferredDeviceList.add(AudioDevice.Earpiece.class);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
public void start() {
|
|
58
|
+
if (audioSwitch == null) {
|
|
59
|
+
handler.removeCallbacksAndMessages(null);
|
|
60
|
+
handler.postAtFrontOfQueue(() -> {
|
|
61
|
+
audioSwitch = new AudioSwitch(
|
|
62
|
+
context,
|
|
63
|
+
loggingEnabled,
|
|
64
|
+
audioFocusChangeListener,
|
|
65
|
+
preferredDeviceList
|
|
66
|
+
);
|
|
67
|
+
audioSwitch.start(audioDeviceChangeListener);
|
|
68
|
+
audioSwitch.activate();
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
public void stop() {
|
|
74
|
+
handler.removeCallbacksAndMessages(null);
|
|
75
|
+
handler.postAtFrontOfQueue(() -> {
|
|
76
|
+
if (audioSwitch != null) {
|
|
77
|
+
audioSwitch.stop();
|
|
78
|
+
}
|
|
79
|
+
audioSwitch = null;
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
public void setMicrophoneMute(boolean mute){
|
|
84
|
+
audioManager.setMicrophoneMute(mute);
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
@Nullable
|
|
88
|
+
public AudioDevice selectedAudioDevice() {
|
|
89
|
+
AudioSwitch audioSwitchTemp = audioSwitch;
|
|
90
|
+
if (audioSwitchTemp != null) {
|
|
91
|
+
return audioSwitchTemp.getSelectedAudioDevice();
|
|
92
|
+
} else {
|
|
93
|
+
return null;
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
@NonNull
|
|
98
|
+
public List<AudioDevice> availableAudioDevices() {
|
|
99
|
+
AudioSwitch audioSwitchTemp = audioSwitch;
|
|
100
|
+
if (audioSwitchTemp != null) {
|
|
101
|
+
return audioSwitchTemp.getAvailableAudioDevices();
|
|
102
|
+
} else {
|
|
103
|
+
return Collections.emptyList();
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
public void selectAudioOutput(@NonNull Class<? extends AudioDevice> audioDeviceClass) {
|
|
108
|
+
handler.post(() -> {
|
|
109
|
+
if (audioSwitch != null) {
|
|
110
|
+
List<AudioDevice> devices = availableAudioDevices();
|
|
111
|
+
AudioDevice audioDevice = null;
|
|
112
|
+
|
|
113
|
+
for (AudioDevice device : devices) {
|
|
114
|
+
if (device.getClass().equals(audioDeviceClass)) {
|
|
115
|
+
audioDevice = device;
|
|
116
|
+
break;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
if (audioDevice != null) {
|
|
121
|
+
audioSwitch.selectDevice(audioDevice);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
public void enableSpeakerphone(boolean enable) {
|
|
128
|
+
if(enable) {
|
|
129
|
+
audioManager.setSpeakerphoneOn(true);
|
|
130
|
+
} else {
|
|
131
|
+
audioManager.setSpeakerphoneOn(false);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
public void selectAudioOutput(@Nullable AudioDeviceKind kind) {
|
|
136
|
+
if (kind != null) {
|
|
137
|
+
selectAudioOutput(kind.audioDeviceClass);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
}
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
#import <React/RCTBridgeModule.h>
|
|
2
|
+
#import "LivekitReactNative.h"
|
|
3
|
+
#import <WebRTC/RTCAudioSession.h>
|
|
4
|
+
#import <WebRTC/RTCAudioSessionConfiguration.h>
|
|
5
|
+
#import <AVFAudio/AVFAudio.h>
|
|
6
|
+
#import <AVKit/AVKit.h>
|
|
7
|
+
|
|
8
|
+
@implementation LivekitReactNative
|
|
9
|
+
RCT_EXPORT_MODULE();
|
|
10
|
+
|
|
11
|
+
-(instancetype)init {
|
|
12
|
+
if(self = [super init]) {
|
|
13
|
+
|
|
14
|
+
RTCAudioSessionConfiguration* config = [[RTCAudioSessionConfiguration alloc] init];
|
|
15
|
+
[config setCategory:AVAudioSessionCategoryPlayAndRecord];
|
|
16
|
+
[config setCategoryOptions:
|
|
17
|
+
AVAudioSessionCategoryOptionAllowAirPlay|
|
|
18
|
+
AVAudioSessionCategoryOptionAllowBluetooth|
|
|
19
|
+
AVAudioSessionCategoryOptionAllowBluetoothA2DP|
|
|
20
|
+
AVAudioSessionCategoryOptionDefaultToSpeaker
|
|
21
|
+
];
|
|
22
|
+
[config setMode:AVAudioSessionModeVideoChat];
|
|
23
|
+
[RTCAudioSessionConfiguration setWebRTCConfiguration: config];
|
|
24
|
+
return self;
|
|
25
|
+
} else {
|
|
26
|
+
return nil;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
+(BOOL)requiresMainQueueSetup {
|
|
31
|
+
return NO;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/// Configure default audio config for WebRTC
|
|
35
|
+
RCT_EXPORT_METHOD(configureAudio:(NSDictionary *) config){
|
|
36
|
+
NSDictionary *iOSConfig = [config objectForKey:@"ios"];
|
|
37
|
+
if(iOSConfig == nil) {
|
|
38
|
+
return;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
NSString * defaultOutput = [iOSConfig objectForKey:@"defaultOutput"];
|
|
42
|
+
if (defaultOutput == nil) {
|
|
43
|
+
defaultOutput = @"speaker";
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
RTCAudioSessionConfiguration* rtcConfig = [[RTCAudioSessionConfiguration alloc] init];
|
|
47
|
+
[rtcConfig setCategory:AVAudioSessionCategoryPlayAndRecord];
|
|
48
|
+
|
|
49
|
+
if([defaultOutput isEqualToString:@"earpiece"]){
|
|
50
|
+
[rtcConfig setCategoryOptions:
|
|
51
|
+
AVAudioSessionCategoryOptionAllowAirPlay|
|
|
52
|
+
AVAudioSessionCategoryOptionAllowBluetooth|
|
|
53
|
+
AVAudioSessionCategoryOptionAllowBluetoothA2DP];
|
|
54
|
+
[rtcConfig setMode:AVAudioSessionModeVoiceChat];
|
|
55
|
+
} else {
|
|
56
|
+
[rtcConfig setCategoryOptions:
|
|
57
|
+
AVAudioSessionCategoryOptionAllowAirPlay|
|
|
58
|
+
AVAudioSessionCategoryOptionAllowBluetooth|
|
|
59
|
+
AVAudioSessionCategoryOptionAllowBluetoothA2DP|
|
|
60
|
+
AVAudioSessionCategoryOptionDefaultToSpeaker];
|
|
61
|
+
[rtcConfig setMode:AVAudioSessionModeVideoChat];
|
|
62
|
+
}
|
|
63
|
+
[RTCAudioSessionConfiguration setWebRTCConfiguration: rtcConfig];
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
RCT_EXPORT_METHOD(startAudioSession){
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
RCT_EXPORT_METHOD(stopAudioSession){
|
|
70
|
+
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
RCT_EXPORT_METHOD(showAudioRoutePicker){
|
|
74
|
+
if (@available(iOS 11.0, *)) {
|
|
75
|
+
AVRoutePickerView *routePickerView = [[AVRoutePickerView alloc] init];
|
|
76
|
+
NSArray<UIView *> *subviews = routePickerView.subviews;
|
|
77
|
+
for (int i = 0; i < subviews.count; i++) {
|
|
78
|
+
UIView *subview = [subviews objectAtIndex:i];
|
|
79
|
+
if([subview isKindOfClass:[UIButton class]]) {
|
|
80
|
+
UIButton *button = (UIButton *) subview;
|
|
81
|
+
[button sendActionsForControlEvents:UIControlEventTouchUpInside];
|
|
82
|
+
break;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
RCT_EXPORT_METHOD(getAudioOutputsWithResolver:(RCTPromiseResolveBlock)resolve
|
|
89
|
+
withRejecter:(RCTPromiseRejectBlock)reject){
|
|
90
|
+
resolve(@[@"default", @"force_speaker"]);
|
|
91
|
+
}
|
|
92
|
+
RCT_EXPORT_METHOD(selectAudioOutput:(NSString *)deviceId
|
|
93
|
+
withResolver:(RCTPromiseResolveBlock)resolve
|
|
94
|
+
withRejecter:(RCTPromiseRejectBlock)reject){
|
|
95
|
+
|
|
96
|
+
AVAudioSession *session = [AVAudioSession sharedInstance];
|
|
97
|
+
NSError *error = nil;
|
|
98
|
+
|
|
99
|
+
if ([deviceId isEqualToString:@"default"]) {
|
|
100
|
+
[session overrideOutputAudioPort:AVAudioSessionPortOverrideNone error:&error];
|
|
101
|
+
} else if ([deviceId isEqualToString:@"force_speaker"]) {
|
|
102
|
+
[session overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker error:&error];
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
if (error != nil) {
|
|
106
|
+
reject(@"selectAudioOutput error", error.localizedDescription, error);
|
|
107
|
+
} else {
|
|
108
|
+
resolve(nil);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
@end
|