@sbhjt-gr/react-native-webrtc 124.0.2 → 124.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.clang-format +11 -11
- package/.eslintignore +6 -6
- package/.nvmrc +1 -1
- package/ISSUE_TEMPLATE.md +40 -40
- package/LICENSE +22 -22
- package/README.md +103 -103
- package/android/build.gradle +37 -37
- package/android/consumer-rules.pro +3 -3
- package/android/src/main/AndroidManifest.xml +11 -11
- package/android/src/main/java/com/oney/WebRTCModule/AbstractVideoCaptureController.java +113 -113
- package/android/src/main/java/com/oney/WebRTCModule/CameraCaptureController.java +338 -338
- package/android/src/main/java/com/oney/WebRTCModule/CameraEventsHandler.java +49 -49
- package/android/src/main/java/com/oney/WebRTCModule/DataChannelWrapper.java +99 -99
- package/android/src/main/java/com/oney/WebRTCModule/DataPacketCryptorManager.java +62 -62
- package/android/src/main/java/com/oney/WebRTCModule/DisplayUtils.java +16 -16
- package/android/src/main/java/com/oney/WebRTCModule/EglUtils.java +66 -66
- package/android/src/main/java/com/oney/WebRTCModule/GetUserMediaImpl.java +539 -539
- package/android/src/main/java/com/oney/WebRTCModule/LibraryLoader.java +21 -21
- package/android/src/main/java/com/oney/WebRTCModule/MediaProjectionNotification.java +70 -70
- package/android/src/main/java/com/oney/WebRTCModule/MediaProjectionService.java +82 -82
- package/android/src/main/java/com/oney/WebRTCModule/PeerConnectionObserver.java +588 -588
- package/android/src/main/java/com/oney/WebRTCModule/RTCCryptoManager.java +493 -493
- package/android/src/main/java/com/oney/WebRTCModule/RTCVideoViewManager.java +98 -98
- package/android/src/main/java/com/oney/WebRTCModule/ReactBridgeUtil.java +35 -35
- package/android/src/main/java/com/oney/WebRTCModule/ScreenCaptureController.java +94 -94
- package/android/src/main/java/com/oney/WebRTCModule/SerializeUtils.java +342 -342
- package/android/src/main/java/com/oney/WebRTCModule/StringUtils.java +100 -100
- package/android/src/main/java/com/oney/WebRTCModule/ThreadUtils.java +41 -41
- package/android/src/main/java/com/oney/WebRTCModule/TrackCapturerEventsEmitter.java +34 -34
- package/android/src/main/java/com/oney/WebRTCModule/VideoTrackAdapter.java +137 -137
- package/android/src/main/java/com/oney/WebRTCModule/WebRTCModule.java +1649 -1643
- package/android/src/main/java/com/oney/WebRTCModule/WebRTCModuleOptions.java +33 -33
- package/android/src/main/java/com/oney/WebRTCModule/WebRTCModulePackage.java +21 -21
- package/android/src/main/java/com/oney/WebRTCModule/WebRTCView.java +583 -583
- package/android/src/main/java/com/oney/WebRTCModule/palabra/PalabraClient.java +529 -464
- package/android/src/main/java/com/oney/WebRTCModule/palabra/PalabraConfig.java +17 -17
- package/android/src/main/java/com/oney/WebRTCModule/palabra/PalabraListener.java +7 -7
- package/android/src/main/java/com/oney/WebRTCModule/videoEffects/ProcessorProvider.java +38 -38
- package/android/src/main/java/com/oney/WebRTCModule/videoEffects/VideoEffectProcessor.java +59 -59
- package/android/src/main/java/com/oney/WebRTCModule/videoEffects/VideoFrameProcessor.java +19 -19
- package/android/src/main/java/com/oney/WebRTCModule/videoEffects/VideoFrameProcessorFactoryInterface.java +12 -12
- package/android/src/main/java/com/oney/WebRTCModule/webrtcutils/H264AndSoftwareVideoDecoderFactory.java +73 -73
- package/android/src/main/java/com/oney/WebRTCModule/webrtcutils/H264AndSoftwareVideoEncoderFactory.java +73 -73
- package/android/src/main/java/com/oney/WebRTCModule/webrtcutils/SoftwareVideoDecoderFactoryProxy.java +36 -36
- package/android/src/main/java/com/oney/WebRTCModule/webrtcutils/SoftwareVideoEncoderFactoryProxy.java +36 -36
- package/android/src/main/java/org/webrtc/Camera1Helper.java +54 -54
- package/android/src/main/java/org/webrtc/Camera2Helper.java +52 -52
- package/android/src/main/res/values/strings.xml +5 -5
- package/android/src/main/res/values/styles.xml +8 -8
- package/ios/RCTWebRTC/CaptureController.h +18 -18
- package/ios/RCTWebRTC/CaptureController.m +28 -28
- package/ios/RCTWebRTC/CapturerEventsDelegate.h +12 -12
- package/ios/RCTWebRTC/DataChannelWrapper.h +27 -27
- package/ios/RCTWebRTC/DataChannelWrapper.m +42 -42
- package/ios/RCTWebRTC/I420Converter.h +22 -22
- package/ios/RCTWebRTC/I420Converter.m +164 -164
- package/ios/RCTWebRTC/PIPController.h +24 -24
- package/ios/RCTWebRTC/PIPController.m +234 -234
- package/ios/RCTWebRTC/PalabraAudioSink.h +13 -13
- package/ios/RCTWebRTC/PalabraAudioSink.m +18 -18
- package/ios/RCTWebRTC/PalabraClient.h +42 -36
- package/ios/RCTWebRTC/PalabraClient.m +680 -584
- package/ios/RCTWebRTC/RCTConvert+WebRTC.h +16 -16
- package/ios/RCTWebRTC/RCTConvert+WebRTC.m +206 -206
- package/ios/RCTWebRTC/RTCMediaStreamTrack+React.h +10 -10
- package/ios/RCTWebRTC/RTCMediaStreamTrack+React.m +16 -16
- package/ios/RCTWebRTC/RTCVideoViewManager.h +29 -29
- package/ios/RCTWebRTC/RTCVideoViewManager.m +411 -411
- package/ios/RCTWebRTC/SampleBufferVideoCallView.h +12 -12
- package/ios/RCTWebRTC/SampleBufferVideoCallView.m +178 -178
- package/ios/RCTWebRTC/ScreenCaptureController.h +20 -20
- package/ios/RCTWebRTC/ScreenCaptureController.m +82 -82
- package/ios/RCTWebRTC/ScreenCapturePickerViewManager.h +7 -7
- package/ios/RCTWebRTC/ScreenCapturePickerViewManager.m +59 -59
- package/ios/RCTWebRTC/ScreenCapturer.h +19 -19
- package/ios/RCTWebRTC/ScreenCapturer.m +263 -263
- package/ios/RCTWebRTC/SerializeUtils.h +28 -28
- package/ios/RCTWebRTC/SerializeUtils.m +314 -314
- package/ios/RCTWebRTC/SocketConnection.h +13 -13
- package/ios/RCTWebRTC/SocketConnection.m +137 -137
- package/ios/RCTWebRTC/TrackCapturerEventsEmitter.h +14 -14
- package/ios/RCTWebRTC/TrackCapturerEventsEmitter.m +36 -36
- package/ios/RCTWebRTC/VideoCaptureController.h +21 -21
- package/ios/RCTWebRTC/VideoCaptureController.m +328 -328
- package/ios/RCTWebRTC/WLVAudioDevice.h +12 -12
- package/ios/RCTWebRTC/WLVAudioDevice.m +137 -137
- package/ios/RCTWebRTC/WebRTCModule+Palabra.h +4 -4
- package/ios/RCTWebRTC/WebRTCModule+Palabra.m +92 -83
- package/ios/RCTWebRTC/WebRTCModule+Permissions.m +75 -75
- package/ios/RCTWebRTC/WebRTCModule+RTCAudioSession.m +20 -20
- package/ios/RCTWebRTC/WebRTCModule+RTCDataChannel.h +14 -14
- package/ios/RCTWebRTC/WebRTCModule+RTCDataChannel.m +165 -165
- package/ios/RCTWebRTC/WebRTCModule+RTCFrameCryptor.m +611 -611
- package/ios/RCTWebRTC/WebRTCModule+RTCMediaStream.h +13 -13
- package/ios/RCTWebRTC/WebRTCModule+RTCMediaStream.m +728 -728
- package/ios/RCTWebRTC/WebRTCModule+RTCPeerConnection.h +24 -24
- package/ios/RCTWebRTC/WebRTCModule+RTCPeerConnection.m +1004 -1004
- package/ios/RCTWebRTC/WebRTCModule+Transceivers.m +267 -267
- package/ios/RCTWebRTC/WebRTCModule+VideoTrackAdapter.h +12 -12
- package/ios/RCTWebRTC/WebRTCModule+VideoTrackAdapter.m +166 -166
- package/ios/RCTWebRTC/WebRTCModule.h +58 -58
- package/ios/RCTWebRTC/WebRTCModule.m +169 -169
- package/ios/RCTWebRTC/WebRTCModuleOptions.h +24 -24
- package/ios/RCTWebRTC/WebRTCModuleOptions.m +31 -31
- package/ios/RCTWebRTC/videoEffects/ProcessorProvider.h +9 -9
- package/ios/RCTWebRTC/videoEffects/ProcessorProvider.m +23 -23
- package/ios/RCTWebRTC/videoEffects/VideoEffectProcessor.h +13 -13
- package/ios/RCTWebRTC/videoEffects/VideoEffectProcessor.m +23 -23
- package/ios/RCTWebRTC/videoEffects/VideoFrameProcessor.h +8 -8
- package/ios/RCTWebRTC.xcodeproj/project.pbxproj +404 -404
- package/ios/RCTWebRTC.xcworkspace/contents.xcworkspacedata +10 -10
- package/lib/commonjs/Constraints.js.map +1 -1
- package/lib/commonjs/EventEmitter.js.map +1 -1
- package/lib/commonjs/Logger.js.map +1 -1
- package/lib/commonjs/MediaDevices.js +17 -17
- package/lib/commonjs/MediaDevices.js.map +1 -1
- package/lib/commonjs/MediaStream.js +19 -19
- package/lib/commonjs/MediaStream.js.map +1 -1
- package/lib/commonjs/MediaStreamError.js.map +1 -1
- package/lib/commonjs/MediaStreamErrorEvent.js.map +1 -1
- package/lib/commonjs/MediaStreamTrack.js +28 -28
- package/lib/commonjs/MediaStreamTrack.js.map +1 -1
- package/lib/commonjs/MediaStreamTrackEvent.js +6 -6
- package/lib/commonjs/MediaStreamTrackEvent.js.map +1 -1
- package/lib/commonjs/MessageEvent.js +7 -7
- package/lib/commonjs/MessageEvent.js.map +1 -1
- package/lib/commonjs/Permissions.js +28 -28
- package/lib/commonjs/Permissions.js.map +1 -1
- package/lib/commonjs/RTCAudioSession.js +4 -4
- package/lib/commonjs/RTCAudioSession.js.map +1 -1
- package/lib/commonjs/RTCDataChannel.js +2 -2
- package/lib/commonjs/RTCDataChannel.js.map +1 -1
- package/lib/commonjs/RTCDataChannelEvent.js +6 -6
- package/lib/commonjs/RTCDataChannelEvent.js.map +1 -1
- package/lib/commonjs/RTCDataPacketCryptor.js.map +1 -1
- package/lib/commonjs/RTCDataPacketCryptorFactory.js.map +1 -1
- package/lib/commonjs/RTCErrorEvent.js +3 -3
- package/lib/commonjs/RTCErrorEvent.js.map +1 -1
- package/lib/commonjs/RTCFrameCryptor.js +8 -8
- package/lib/commonjs/RTCFrameCryptor.js.map +1 -1
- package/lib/commonjs/RTCFrameCryptorFactory.js.map +1 -1
- package/lib/commonjs/RTCIceCandidate.js.map +1 -1
- package/lib/commonjs/RTCIceCandidateEvent.js +7 -7
- package/lib/commonjs/RTCIceCandidateEvent.js.map +1 -1
- package/lib/commonjs/RTCKeyProvider.js.map +1 -1
- package/lib/commonjs/RTCPIPView.js +2 -2
- package/lib/commonjs/RTCPIPView.js.map +1 -1
- package/lib/commonjs/RTCPIPView.web.js.map +1 -1
- package/lib/commonjs/RTCPeerConnection.js +146 -41
- package/lib/commonjs/RTCPeerConnection.js.map +1 -1
- package/lib/commonjs/RTCRtcpParameters.js.map +1 -1
- package/lib/commonjs/RTCRtpCapabilities.js +2 -2
- package/lib/commonjs/RTCRtpCapabilities.js.map +1 -1
- package/lib/commonjs/RTCRtpCodecCapability.js.map +1 -1
- package/lib/commonjs/RTCRtpCodecParameters.js.map +1 -1
- package/lib/commonjs/RTCRtpEncodingParameters.js.map +1 -1
- package/lib/commonjs/RTCRtpHeaderExtension.js.map +1 -1
- package/lib/commonjs/RTCRtpParameters.js.map +1 -1
- package/lib/commonjs/RTCRtpReceiveParameters.js.map +1 -1
- package/lib/commonjs/RTCRtpReceiver.js +7 -7
- package/lib/commonjs/RTCRtpReceiver.js.map +1 -1
- package/lib/commonjs/RTCRtpSendParameters.js +3 -3
- package/lib/commonjs/RTCRtpSendParameters.js.map +1 -1
- package/lib/commonjs/RTCRtpSender.js +7 -7
- package/lib/commonjs/RTCRtpSender.js.map +1 -1
- package/lib/commonjs/RTCRtpTransceiver.js.map +1 -1
- package/lib/commonjs/RTCSessionDescription.js.map +1 -1
- package/lib/commonjs/RTCTrackEvent.js +6 -6
- package/lib/commonjs/RTCTrackEvent.js.map +1 -1
- package/lib/commonjs/RTCUtil.js +28 -28
- package/lib/commonjs/RTCUtil.js.map +1 -1
- package/lib/commonjs/RTCView.js +5 -5
- package/lib/commonjs/RTCView.js.map +1 -1
- package/lib/commonjs/RTCView.web.js.map +1 -1
- package/lib/commonjs/ScreenCapturePickerView.js.map +1 -1
- package/lib/commonjs/ScreenCapturePickerView.web.js.map +1 -1
- package/lib/commonjs/getDisplayMedia.js.map +1 -1
- package/lib/commonjs/getUserMedia.js +23 -0
- package/lib/commonjs/getUserMedia.js.map +1 -1
- package/lib/commonjs/index.js.map +1 -1
- package/lib/commonjs/index.web.js.map +1 -1
- package/lib/commonjs/webStream.js.map +1 -1
- package/lib/module/Constraints.js.map +1 -1
- package/lib/module/EventEmitter.js.map +1 -1
- package/lib/module/Logger.js.map +1 -1
- package/lib/module/MediaDevices.js +17 -17
- package/lib/module/MediaDevices.js.map +1 -1
- package/lib/module/MediaStream.js +19 -19
- package/lib/module/MediaStream.js.map +1 -1
- package/lib/module/MediaStreamError.js.map +1 -1
- package/lib/module/MediaStreamErrorEvent.js.map +1 -1
- package/lib/module/MediaStreamTrack.js +28 -28
- package/lib/module/MediaStreamTrack.js.map +1 -1
- package/lib/module/MediaStreamTrackEvent.js +6 -6
- package/lib/module/MediaStreamTrackEvent.js.map +1 -1
- package/lib/module/MessageEvent.js +7 -7
- package/lib/module/MessageEvent.js.map +1 -1
- package/lib/module/Permissions.js +28 -28
- package/lib/module/Permissions.js.map +1 -1
- package/lib/module/RTCAudioSession.js +4 -4
- package/lib/module/RTCAudioSession.js.map +1 -1
- package/lib/module/RTCDataChannel.js +2 -2
- package/lib/module/RTCDataChannel.js.map +1 -1
- package/lib/module/RTCDataChannelEvent.js +6 -6
- package/lib/module/RTCDataChannelEvent.js.map +1 -1
- package/lib/module/RTCDataPacketCryptor.js.map +1 -1
- package/lib/module/RTCDataPacketCryptorFactory.js.map +1 -1
- package/lib/module/RTCErrorEvent.js +3 -3
- package/lib/module/RTCErrorEvent.js.map +1 -1
- package/lib/module/RTCFrameCryptor.js +8 -8
- package/lib/module/RTCFrameCryptor.js.map +1 -1
- package/lib/module/RTCFrameCryptorFactory.js.map +1 -1
- package/lib/module/RTCIceCandidate.js.map +1 -1
- package/lib/module/RTCIceCandidateEvent.js +7 -7
- package/lib/module/RTCIceCandidateEvent.js.map +1 -1
- package/lib/module/RTCKeyProvider.js.map +1 -1
- package/lib/module/RTCPIPView.js +2 -2
- package/lib/module/RTCPIPView.js.map +1 -1
- package/lib/module/RTCPIPView.web.js.map +1 -1
- package/lib/module/RTCPeerConnection.js +146 -41
- package/lib/module/RTCPeerConnection.js.map +1 -1
- package/lib/module/RTCRtcpParameters.js.map +1 -1
- package/lib/module/RTCRtpCapabilities.js +2 -2
- package/lib/module/RTCRtpCapabilities.js.map +1 -1
- package/lib/module/RTCRtpCodecCapability.js.map +1 -1
- package/lib/module/RTCRtpCodecParameters.js.map +1 -1
- package/lib/module/RTCRtpEncodingParameters.js.map +1 -1
- package/lib/module/RTCRtpHeaderExtension.js.map +1 -1
- package/lib/module/RTCRtpParameters.js.map +1 -1
- package/lib/module/RTCRtpReceiveParameters.js.map +1 -1
- package/lib/module/RTCRtpReceiver.js +7 -7
- package/lib/module/RTCRtpReceiver.js.map +1 -1
- package/lib/module/RTCRtpSendParameters.js +3 -3
- package/lib/module/RTCRtpSendParameters.js.map +1 -1
- package/lib/module/RTCRtpSender.js +7 -7
- package/lib/module/RTCRtpSender.js.map +1 -1
- package/lib/module/RTCRtpTransceiver.js.map +1 -1
- package/lib/module/RTCSessionDescription.js.map +1 -1
- package/lib/module/RTCTrackEvent.js +6 -6
- package/lib/module/RTCTrackEvent.js.map +1 -1
- package/lib/module/RTCUtil.js +28 -28
- package/lib/module/RTCUtil.js.map +1 -1
- package/lib/module/RTCView.js +5 -5
- package/lib/module/RTCView.js.map +1 -1
- package/lib/module/RTCView.web.js.map +1 -1
- package/lib/module/ScreenCapturePickerView.js.map +1 -1
- package/lib/module/ScreenCapturePickerView.web.js.map +1 -1
- package/lib/module/getDisplayMedia.js.map +1 -1
- package/lib/module/getUserMedia.js +23 -0
- package/lib/module/getUserMedia.js.map +1 -1
- package/lib/module/index.js.map +1 -1
- package/lib/module/index.web.js.map +1 -1
- package/lib/module/webStream.js.map +1 -1
- package/lib/typescript/Constraints.d.ts +19 -19
- package/lib/typescript/EventEmitter.d.ts +6 -6
- package/lib/typescript/Logger.d.ts +13 -13
- package/lib/typescript/MediaDevices.d.ts +30 -30
- package/lib/typescript/MediaStream.d.ts +48 -48
- package/lib/typescript/MediaStreamError.d.ts +6 -6
- package/lib/typescript/MediaStreamErrorEvent.d.ts +6 -6
- package/lib/typescript/MediaStreamTrack.d.ts +101 -101
- package/lib/typescript/MediaStreamTrackEvent.d.ts +19 -19
- package/lib/typescript/MessageEvent.d.ts +20 -20
- package/lib/typescript/Permissions.d.ts +55 -55
- package/lib/typescript/RTCAudioSession.d.ts +10 -10
- package/lib/typescript/RTCDataChannel.d.ts +43 -43
- package/lib/typescript/RTCDataChannelEvent.d.ts +19 -19
- package/lib/typescript/RTCDataPacketCryptor.d.ts +12 -12
- package/lib/typescript/RTCDataPacketCryptorFactory.d.ts +6 -6
- package/lib/typescript/RTCErrorEvent.d.ts +12 -12
- package/lib/typescript/RTCFrameCryptor.d.ts +47 -47
- package/lib/typescript/RTCFrameCryptorFactory.d.ts +21 -21
- package/lib/typescript/RTCIceCandidate.d.ts +17 -17
- package/lib/typescript/RTCIceCandidateEvent.d.ts +20 -20
- package/lib/typescript/RTCKeyProvider.d.ts +21 -21
- package/lib/typescript/RTCPIPView.d.ts +15 -15
- package/lib/typescript/RTCPIPView.web.d.ts +13 -13
- package/lib/typescript/RTCPeerConnection.d.ts +117 -117
- package/lib/typescript/RTCRtcpParameters.d.ts +10 -10
- package/lib/typescript/RTCRtpCapabilities.d.ts +9 -9
- package/lib/typescript/RTCRtpCodecCapability.d.ts +7 -7
- package/lib/typescript/RTCRtpCodecParameters.d.ts +16 -16
- package/lib/typescript/RTCRtpEncodingParameters.d.ts +23 -23
- package/lib/typescript/RTCRtpHeaderExtension.d.ts +12 -12
- package/lib/typescript/RTCRtpParameters.d.ts +19 -19
- package/lib/typescript/RTCRtpReceiveParameters.d.ts +4 -4
- package/lib/typescript/RTCRtpReceiver.d.ts +21 -21
- package/lib/typescript/RTCRtpSendParameters.d.ts +20 -20
- package/lib/typescript/RTCRtpSender.d.ts +22 -22
- package/lib/typescript/RTCRtpTransceiver.d.ts +31 -31
- package/lib/typescript/RTCSessionDescription.d.ts +12 -12
- package/lib/typescript/RTCTrackEvent.d.ts +29 -29
- package/lib/typescript/RTCUtil.d.ts +37 -37
- package/lib/typescript/RTCView.d.ts +117 -117
- package/lib/typescript/RTCView.web.d.ts +25 -25
- package/lib/typescript/ScreenCapturePickerView.d.ts +2 -2
- package/lib/typescript/ScreenCapturePickerView.web.d.ts +1 -1
- package/lib/typescript/getDisplayMedia.d.ts +2 -2
- package/lib/typescript/getUserMedia.d.ts +7 -7
- package/lib/typescript/index.d.ts +22 -22
- package/lib/typescript/index.web.d.ts +101 -101
- package/lib/typescript/webStream.d.ts +3 -3
- package/livekit-react-native-webrtc.podspec +29 -29
- package/macos/RCTWebRTC.xcodeproj/project.pbxproj +324 -324
- package/macos/RCTWebRTC.xcodeproj/project.xcworkspace/contents.xcworkspacedata +7 -7
- package/macos/RCTWebRTC.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist +8 -8
- package/metro.config.js +7 -7
- package/metro.config.macos.js +14 -14
- package/package.json +66 -66
- package/react-native.config.js +11 -11
- package/src/.eslintrc.cjs +67 -67
- package/src/Constraints.ts +20 -20
- package/src/EventEmitter.ts +65 -65
- package/src/Logger.ts +49 -49
- package/src/MediaDevices.ts +53 -53
- package/src/MediaStream.ts +161 -161
- package/src/MediaStreamError.ts +12 -12
- package/src/MediaStreamErrorEvent.ts +11 -11
- package/src/MediaStreamTrack.ts +282 -282
- package/src/MediaStreamTrackEvent.ts +25 -25
- package/src/MessageEvent.ts +26 -26
- package/src/Permissions.ts +133 -133
- package/src/RTCAudioSession.ts +25 -25
- package/src/RTCDataChannel.ts +190 -190
- package/src/RTCDataChannelEvent.ts +28 -28
- package/src/RTCDataPacketCryptor.ts +90 -90
- package/src/RTCDataPacketCryptorFactory.ts +24 -24
- package/src/RTCErrorEvent.ts +20 -20
- package/src/RTCFrameCryptor.ts +162 -162
- package/src/RTCFrameCryptorFactory.ts +101 -101
- package/src/RTCIceCandidate.ts +29 -29
- package/src/RTCIceCandidateEvent.ts +26 -26
- package/src/RTCKeyProvider.ts +117 -117
- package/src/RTCPIPView.tsx +46 -46
- package/src/RTCPIPView.web.tsx +18 -18
- package/src/RTCPeerConnection.ts +935 -832
- package/src/RTCRtcpParameters.ts +23 -23
- package/src/RTCRtpCapabilities.ts +16 -16
- package/src/RTCRtpCodecCapability.ts +12 -12
- package/src/RTCRtpCodecParameters.ts +44 -44
- package/src/RTCRtpEncodingParameters.ts +90 -90
- package/src/RTCRtpHeaderExtension.ts +27 -27
- package/src/RTCRtpParameters.ts +37 -37
- package/src/RTCRtpReceiveParameters.ts +7 -7
- package/src/RTCRtpReceiver.ts +60 -60
- package/src/RTCRtpSendParameters.ts +63 -63
- package/src/RTCRtpSender.ts +78 -78
- package/src/RTCRtpTransceiver.ts +107 -107
- package/src/RTCSessionDescription.ts +30 -30
- package/src/RTCTrackEvent.ts +42 -42
- package/src/RTCUtil.ts +211 -211
- package/src/RTCView.ts +122 -122
- package/src/RTCView.web.tsx +80 -80
- package/src/ScreenCapturePickerView.ts +4 -4
- package/src/ScreenCapturePickerView.web.tsx +3 -3
- package/src/getDisplayMedia.ts +30 -30
- package/src/getUserMedia.ts +136 -111
- package/src/index.ts +107 -107
- package/src/index.web.ts +191 -191
- package/src/webStream.ts +31 -31
- package/tools/format.sh +6 -6
- package/tools/release.sh +45 -45
- package/tsconfig.json +17 -17
|
@@ -1,539 +1,539 @@
|
|
|
1
|
-
package com.oney.WebRTCModule;
|
|
2
|
-
|
|
3
|
-
import android.app.Activity;
|
|
4
|
-
import android.content.Context;
|
|
5
|
-
import android.content.Intent;
|
|
6
|
-
import android.media.projection.MediaProjectionManager;
|
|
7
|
-
import android.util.DisplayMetrics;
|
|
8
|
-
import android.util.Log;
|
|
9
|
-
|
|
10
|
-
import androidx.core.util.Consumer;
|
|
11
|
-
|
|
12
|
-
import com.facebook.react.bridge.Arguments;
|
|
13
|
-
import com.facebook.react.bridge.BaseActivityEventListener;
|
|
14
|
-
import com.facebook.react.bridge.Callback;
|
|
15
|
-
import com.facebook.react.bridge.Promise;
|
|
16
|
-
import com.facebook.react.bridge.ReactApplicationContext;
|
|
17
|
-
import com.facebook.react.bridge.ReadableArray;
|
|
18
|
-
import com.facebook.react.bridge.ReadableMap;
|
|
19
|
-
import com.facebook.react.bridge.UiThreadUtil;
|
|
20
|
-
import com.facebook.react.bridge.WritableArray;
|
|
21
|
-
import com.facebook.react.bridge.WritableMap;
|
|
22
|
-
import com.oney.WebRTCModule.videoEffects.ProcessorProvider;
|
|
23
|
-
import com.oney.WebRTCModule.videoEffects.VideoEffectProcessor;
|
|
24
|
-
import com.oney.WebRTCModule.videoEffects.VideoFrameProcessor;
|
|
25
|
-
|
|
26
|
-
import org.webrtc.*;
|
|
27
|
-
|
|
28
|
-
import java.util.ArrayList;
|
|
29
|
-
import java.util.HashMap;
|
|
30
|
-
import java.util.List;
|
|
31
|
-
import java.util.Map;
|
|
32
|
-
import java.util.Objects;
|
|
33
|
-
import java.util.UUID;
|
|
34
|
-
import java.util.stream.Collectors;
|
|
35
|
-
|
|
36
|
-
/**
|
|
37
|
-
* The implementation of {@code getUserMedia} extracted into a separate file in
|
|
38
|
-
* order to reduce complexity and to (somewhat) separate concerns.
|
|
39
|
-
*/
|
|
40
|
-
class GetUserMediaImpl {
|
|
41
|
-
/**
|
|
42
|
-
* The {@link Log} tag with which {@code GetUserMediaImpl} is to log.
|
|
43
|
-
*/
|
|
44
|
-
private static final String TAG = WebRTCModule.TAG;
|
|
45
|
-
|
|
46
|
-
private static final int PERMISSION_REQUEST_CODE = (int) (Math.random() * Short.MAX_VALUE);
|
|
47
|
-
|
|
48
|
-
private CameraEnumerator cameraEnumerator;
|
|
49
|
-
private final ReactApplicationContext reactContext;
|
|
50
|
-
|
|
51
|
-
/**
|
|
52
|
-
* The application/library-specific private members of local
|
|
53
|
-
* {@link MediaStreamTrack}s created by {@code GetUserMediaImpl} mapped by
|
|
54
|
-
* track ID.
|
|
55
|
-
*/
|
|
56
|
-
private final Map<String, TrackPrivate> tracks = new HashMap<>();
|
|
57
|
-
|
|
58
|
-
private final WebRTCModule webRTCModule;
|
|
59
|
-
|
|
60
|
-
private Promise displayMediaPromise;
|
|
61
|
-
private Intent mediaProjectionPermissionResultData;
|
|
62
|
-
|
|
63
|
-
GetUserMediaImpl(WebRTCModule webRTCModule, ReactApplicationContext reactContext) {
|
|
64
|
-
this.webRTCModule = webRTCModule;
|
|
65
|
-
this.reactContext = reactContext;
|
|
66
|
-
|
|
67
|
-
reactContext.addActivityEventListener(new BaseActivityEventListener() {
|
|
68
|
-
@Override
|
|
69
|
-
public void onActivityResult(Activity activity, int requestCode, int resultCode, Intent data) {
|
|
70
|
-
super.onActivityResult(activity, requestCode, resultCode, data);
|
|
71
|
-
if (requestCode == PERMISSION_REQUEST_CODE) {
|
|
72
|
-
if (resultCode != Activity.RESULT_OK) {
|
|
73
|
-
displayMediaPromise.reject("DOMException", "NotAllowedError");
|
|
74
|
-
displayMediaPromise = null;
|
|
75
|
-
return;
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
mediaProjectionPermissionResultData = data;
|
|
79
|
-
|
|
80
|
-
ThreadUtils.runOnExecutor(() -> {
|
|
81
|
-
MediaProjectionService.launch(activity);
|
|
82
|
-
createScreenStream();
|
|
83
|
-
});
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
});
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
private AudioTrack createAudioTrack(ReadableMap constraints) {
|
|
90
|
-
ReadableMap audioConstraintsMap = constraints.getMap("audio");
|
|
91
|
-
|
|
92
|
-
Log.d(TAG, "getUserMedia(audio): " + audioConstraintsMap);
|
|
93
|
-
|
|
94
|
-
String id = UUID.randomUUID().toString();
|
|
95
|
-
PeerConnectionFactory pcFactory = webRTCModule.mFactory;
|
|
96
|
-
MediaConstraints peerConstraints = webRTCModule.constraintsForOptions(audioConstraintsMap);
|
|
97
|
-
|
|
98
|
-
// Convert given constraints into the internal webrtc media constraints.
|
|
99
|
-
peerConstraints.optional.add(new MediaConstraints.KeyValuePair("googAutoGainControl",
|
|
100
|
-
audioConstraintsMap.hasKey("autoGainControl")
|
|
101
|
-
? ReactBridgeUtil.getMapStrValue(audioConstraintsMap, "autoGainControl")
|
|
102
|
-
: "true"));
|
|
103
|
-
peerConstraints.optional.add(new MediaConstraints.KeyValuePair("googNoiseSuppression",
|
|
104
|
-
audioConstraintsMap.hasKey("noiseSuppression")
|
|
105
|
-
? ReactBridgeUtil.getMapStrValue(audioConstraintsMap, "noiseSuppression")
|
|
106
|
-
: "true"));
|
|
107
|
-
peerConstraints.optional.add(new MediaConstraints.KeyValuePair("googEchoCancellation",
|
|
108
|
-
audioConstraintsMap.hasKey("echoCancellation")
|
|
109
|
-
? ReactBridgeUtil.getMapStrValue(audioConstraintsMap, "echoCancellation")
|
|
110
|
-
: "true"));
|
|
111
|
-
peerConstraints.optional.add(new MediaConstraints.KeyValuePair("googHighpassFilter",
|
|
112
|
-
audioConstraintsMap.hasKey("highpassFilter")
|
|
113
|
-
? ReactBridgeUtil.getMapStrValue(audioConstraintsMap, "highpassFilter")
|
|
114
|
-
: "true"));
|
|
115
|
-
|
|
116
|
-
// PeerConnectionFactory.createAudioSource will throw an error when mandatory constraints contain nulls.
|
|
117
|
-
// so, let's check for nulls
|
|
118
|
-
checkMandatoryConstraints(peerConstraints);
|
|
119
|
-
|
|
120
|
-
AudioSource audioSource = pcFactory.createAudioSource(peerConstraints);
|
|
121
|
-
AudioTrack track = pcFactory.createAudioTrack(id, audioSource);
|
|
122
|
-
|
|
123
|
-
// surfaceTextureHelper is initialized for videoTrack only, so its null here.
|
|
124
|
-
tracks.put(id, new TrackPrivate(track, audioSource, /* videoCapturer */ null, /* surfaceTextureHelper */ null));
|
|
125
|
-
|
|
126
|
-
return track;
|
|
127
|
-
}
|
|
128
|
-
|
|
129
|
-
private void checkMandatoryConstraints(MediaConstraints peerConstraints) {
|
|
130
|
-
ArrayList<MediaConstraints.KeyValuePair> valid = new ArrayList<>(peerConstraints.mandatory.size());
|
|
131
|
-
|
|
132
|
-
for (MediaConstraints.KeyValuePair constraint : peerConstraints.mandatory) {
|
|
133
|
-
if (constraint.getValue() != null) {
|
|
134
|
-
valid.add(constraint);
|
|
135
|
-
} else {
|
|
136
|
-
Log.d(TAG, String.format("constraint %s is null, ignoring it", constraint.getKey()));
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
peerConstraints.mandatory.clear();
|
|
141
|
-
peerConstraints.mandatory.addAll(valid);
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
private CameraEnumerator getCameraEnumerator() {
|
|
145
|
-
if (cameraEnumerator == null) {
|
|
146
|
-
if (Camera2Enumerator.isSupported(reactContext)) {
|
|
147
|
-
Log.d(TAG, "Creating camera enumerator using the Camera2 API");
|
|
148
|
-
cameraEnumerator = new Camera2Enumerator(reactContext);
|
|
149
|
-
} else {
|
|
150
|
-
Log.d(TAG, "Creating camera enumerator using the Camera1 API");
|
|
151
|
-
cameraEnumerator = new Camera1Enumerator(false);
|
|
152
|
-
}
|
|
153
|
-
}
|
|
154
|
-
|
|
155
|
-
return cameraEnumerator;
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
ReadableArray enumerateDevices() {
|
|
159
|
-
WritableArray array = Arguments.createArray();
|
|
160
|
-
String[] devices = getCameraEnumerator().getDeviceNames();
|
|
161
|
-
|
|
162
|
-
for (int i = 0; i < devices.length; ++i) {
|
|
163
|
-
String deviceName = devices[i];
|
|
164
|
-
boolean isFrontFacing;
|
|
165
|
-
try {
|
|
166
|
-
// This can throw an exception when using the Camera 1 API.
|
|
167
|
-
isFrontFacing = getCameraEnumerator().isFrontFacing(deviceName);
|
|
168
|
-
} catch (Exception e) {
|
|
169
|
-
Log.e(TAG, "Failed to check the facing mode of camera");
|
|
170
|
-
continue;
|
|
171
|
-
}
|
|
172
|
-
WritableMap params = Arguments.createMap();
|
|
173
|
-
params.putString("facing", isFrontFacing ? "front" : "environment");
|
|
174
|
-
params.putString("deviceId", "" + i);
|
|
175
|
-
params.putString("groupId", "");
|
|
176
|
-
params.putString("label", deviceName);
|
|
177
|
-
params.putString("kind", "videoinput");
|
|
178
|
-
array.pushMap(params);
|
|
179
|
-
}
|
|
180
|
-
|
|
181
|
-
WritableMap audio = Arguments.createMap();
|
|
182
|
-
audio.putString("deviceId", "audio-1");
|
|
183
|
-
audio.putString("groupId", "");
|
|
184
|
-
audio.putString("label", "Audio");
|
|
185
|
-
audio.putString("kind", "audioinput");
|
|
186
|
-
array.pushMap(audio);
|
|
187
|
-
|
|
188
|
-
return array;
|
|
189
|
-
}
|
|
190
|
-
|
|
191
|
-
MediaStreamTrack getTrack(String id) {
|
|
192
|
-
TrackPrivate private_ = tracks.get(id);
|
|
193
|
-
|
|
194
|
-
return private_ == null ? null : private_.track;
|
|
195
|
-
}
|
|
196
|
-
|
|
197
|
-
/**
|
|
198
|
-
* Implements {@code getUserMedia}. Note that at this point constraints have
|
|
199
|
-
* been normalized and permissions have been granted. The constraints only
|
|
200
|
-
* contain keys for which permissions have already been granted, that is,
|
|
201
|
-
* if audio permission was not granted, there will be no "audio" key in
|
|
202
|
-
* the constraints map.
|
|
203
|
-
*/
|
|
204
|
-
void getUserMedia(final ReadableMap constraints, final Callback successCallback, final Callback errorCallback) {
|
|
205
|
-
AudioTrack audioTrack = null;
|
|
206
|
-
VideoTrack videoTrack = null;
|
|
207
|
-
|
|
208
|
-
if (constraints.hasKey("audio")) {
|
|
209
|
-
audioTrack = createAudioTrack(constraints);
|
|
210
|
-
}
|
|
211
|
-
|
|
212
|
-
if (constraints.hasKey("video")) {
|
|
213
|
-
ReadableMap videoConstraintsMap = constraints.getMap("video");
|
|
214
|
-
|
|
215
|
-
Log.d(TAG, "getUserMedia(video): " + videoConstraintsMap);
|
|
216
|
-
|
|
217
|
-
CameraCaptureController cameraCaptureController = new CameraCaptureController(
|
|
218
|
-
reactContext.getCurrentActivity(), getCameraEnumerator(), videoConstraintsMap);
|
|
219
|
-
|
|
220
|
-
videoTrack = createVideoTrack(cameraCaptureController);
|
|
221
|
-
}
|
|
222
|
-
|
|
223
|
-
if (audioTrack == null && videoTrack == null) {
|
|
224
|
-
// Fail with DOMException with name AbortError as per:
|
|
225
|
-
// https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia
|
|
226
|
-
errorCallback.invoke("DOMException", "AbortError");
|
|
227
|
-
return;
|
|
228
|
-
}
|
|
229
|
-
|
|
230
|
-
createStream(new MediaStreamTrack[] {audioTrack, videoTrack}, (streamId, tracksInfo) -> {
|
|
231
|
-
WritableArray tracksInfoWritableArray = Arguments.createArray();
|
|
232
|
-
|
|
233
|
-
for (WritableMap trackInfo : tracksInfo) {
|
|
234
|
-
tracksInfoWritableArray.pushMap(trackInfo);
|
|
235
|
-
}
|
|
236
|
-
|
|
237
|
-
successCallback.invoke(streamId, tracksInfoWritableArray);
|
|
238
|
-
});
|
|
239
|
-
}
|
|
240
|
-
|
|
241
|
-
void mediaStreamTrackSetEnabled(String trackId, final boolean enabled) {
|
|
242
|
-
TrackPrivate track = tracks.get(trackId);
|
|
243
|
-
if (track != null && track.videoCaptureController != null) {
|
|
244
|
-
if (enabled) {
|
|
245
|
-
track.videoCaptureController.startCapture();
|
|
246
|
-
} else {
|
|
247
|
-
track.videoCaptureController.stopCapture();
|
|
248
|
-
}
|
|
249
|
-
}
|
|
250
|
-
}
|
|
251
|
-
|
|
252
|
-
void disposeTrack(String id) {
|
|
253
|
-
TrackPrivate track = tracks.remove(id);
|
|
254
|
-
if (track != null) {
|
|
255
|
-
track.dispose();
|
|
256
|
-
}
|
|
257
|
-
}
|
|
258
|
-
|
|
259
|
-
void applyConstraints(String trackId, ReadableMap constraints, Promise promise) {
|
|
260
|
-
TrackPrivate track = tracks.get(trackId);
|
|
261
|
-
if (track != null && track.videoCaptureController instanceof AbstractVideoCaptureController) {
|
|
262
|
-
AbstractVideoCaptureController captureController =
|
|
263
|
-
(AbstractVideoCaptureController) track.videoCaptureController;
|
|
264
|
-
captureController.applyConstraints(constraints, new Consumer<Exception>() {
|
|
265
|
-
public void accept(Exception e) {
|
|
266
|
-
if (e != null) {
|
|
267
|
-
promise.reject(e);
|
|
268
|
-
return;
|
|
269
|
-
}
|
|
270
|
-
|
|
271
|
-
promise.resolve(captureController.getSettings());
|
|
272
|
-
}
|
|
273
|
-
});
|
|
274
|
-
} else {
|
|
275
|
-
promise.reject(new Exception("Camera track not found!"));
|
|
276
|
-
}
|
|
277
|
-
}
|
|
278
|
-
|
|
279
|
-
void getDisplayMedia(Promise promise) {
|
|
280
|
-
if (this.displayMediaPromise != null) {
|
|
281
|
-
promise.reject(new RuntimeException("Another operation is pending."));
|
|
282
|
-
return;
|
|
283
|
-
}
|
|
284
|
-
|
|
285
|
-
Activity currentActivity = this.reactContext.getCurrentActivity();
|
|
286
|
-
if (currentActivity == null) {
|
|
287
|
-
promise.reject(new RuntimeException("No current Activity."));
|
|
288
|
-
return;
|
|
289
|
-
}
|
|
290
|
-
|
|
291
|
-
this.displayMediaPromise = promise;
|
|
292
|
-
|
|
293
|
-
MediaProjectionManager mediaProjectionManager =
|
|
294
|
-
(MediaProjectionManager) currentActivity.getApplication().getSystemService(
|
|
295
|
-
Context.MEDIA_PROJECTION_SERVICE);
|
|
296
|
-
|
|
297
|
-
if (mediaProjectionManager != null) {
|
|
298
|
-
UiThreadUtil.runOnUiThread(new Runnable() {
|
|
299
|
-
@Override
|
|
300
|
-
public void run() {
|
|
301
|
-
currentActivity.startActivityForResult(
|
|
302
|
-
mediaProjectionManager.createScreenCaptureIntent(), PERMISSION_REQUEST_CODE);
|
|
303
|
-
}
|
|
304
|
-
});
|
|
305
|
-
|
|
306
|
-
} else {
|
|
307
|
-
promise.reject(new RuntimeException("MediaProjectionManager is null."));
|
|
308
|
-
}
|
|
309
|
-
}
|
|
310
|
-
|
|
311
|
-
private void createScreenStream() {
|
|
312
|
-
VideoTrack track = createScreenTrack();
|
|
313
|
-
|
|
314
|
-
if (track == null) {
|
|
315
|
-
displayMediaPromise.reject(new RuntimeException("ScreenTrack is null."));
|
|
316
|
-
} else {
|
|
317
|
-
createStream(new MediaStreamTrack[] {track}, (streamId, tracksInfo) -> {
|
|
318
|
-
WritableMap data = Arguments.createMap();
|
|
319
|
-
|
|
320
|
-
data.putString("streamId", streamId);
|
|
321
|
-
|
|
322
|
-
if (tracksInfo.size() == 0) {
|
|
323
|
-
displayMediaPromise.reject(new RuntimeException("No ScreenTrackInfo found."));
|
|
324
|
-
} else {
|
|
325
|
-
data.putMap("track", tracksInfo.get(0));
|
|
326
|
-
displayMediaPromise.resolve(data);
|
|
327
|
-
}
|
|
328
|
-
});
|
|
329
|
-
}
|
|
330
|
-
|
|
331
|
-
// Cleanup
|
|
332
|
-
mediaProjectionPermissionResultData = null;
|
|
333
|
-
displayMediaPromise = null;
|
|
334
|
-
}
|
|
335
|
-
|
|
336
|
-
void createStream(MediaStreamTrack[] tracks, BiConsumer<String, ArrayList<WritableMap>> successCallback) {
|
|
337
|
-
String streamId = UUID.randomUUID().toString();
|
|
338
|
-
MediaStream mediaStream = webRTCModule.mFactory.createLocalMediaStream(streamId);
|
|
339
|
-
|
|
340
|
-
ArrayList<WritableMap> tracksInfo = new ArrayList<>();
|
|
341
|
-
|
|
342
|
-
for (MediaStreamTrack track : tracks) {
|
|
343
|
-
if (track == null) {
|
|
344
|
-
continue;
|
|
345
|
-
}
|
|
346
|
-
|
|
347
|
-
if (track instanceof AudioTrack) {
|
|
348
|
-
mediaStream.addTrack((AudioTrack) track);
|
|
349
|
-
} else {
|
|
350
|
-
mediaStream.addTrack((VideoTrack) track);
|
|
351
|
-
}
|
|
352
|
-
|
|
353
|
-
WritableMap trackInfo = Arguments.createMap();
|
|
354
|
-
String trackId = track.id();
|
|
355
|
-
|
|
356
|
-
trackInfo.putBoolean("enabled", track.enabled());
|
|
357
|
-
trackInfo.putString("id", trackId);
|
|
358
|
-
trackInfo.putString("kind", track.kind());
|
|
359
|
-
trackInfo.putString("readyState", "live");
|
|
360
|
-
trackInfo.putBoolean("remote", false);
|
|
361
|
-
|
|
362
|
-
if (track instanceof VideoTrack) {
|
|
363
|
-
TrackPrivate tp = this.tracks.get(trackId);
|
|
364
|
-
AbstractVideoCaptureController vcc = tp.videoCaptureController;
|
|
365
|
-
trackInfo.putMap("settings", vcc.getSettings());
|
|
366
|
-
}
|
|
367
|
-
|
|
368
|
-
if (track instanceof AudioTrack) {
|
|
369
|
-
WritableMap settings = Arguments.createMap();
|
|
370
|
-
settings.putString("deviceId", "audio-1");
|
|
371
|
-
settings.putString("groupId", "");
|
|
372
|
-
trackInfo.putMap("settings", settings);
|
|
373
|
-
}
|
|
374
|
-
|
|
375
|
-
tracksInfo.add(trackInfo);
|
|
376
|
-
}
|
|
377
|
-
|
|
378
|
-
Log.d(TAG, "MediaStream id: " + streamId);
|
|
379
|
-
webRTCModule.localStreams.put(streamId, mediaStream);
|
|
380
|
-
|
|
381
|
-
successCallback.accept(streamId, tracksInfo);
|
|
382
|
-
}
|
|
383
|
-
|
|
384
|
-
private VideoTrack createScreenTrack() {
|
|
385
|
-
DisplayMetrics displayMetrics = DisplayUtils.getDisplayMetrics(reactContext.getCurrentActivity());
|
|
386
|
-
int width = displayMetrics.widthPixels;
|
|
387
|
-
int height = displayMetrics.heightPixels;
|
|
388
|
-
ScreenCaptureController screenCaptureController = new ScreenCaptureController(
|
|
389
|
-
reactContext.getCurrentActivity(), width, height, mediaProjectionPermissionResultData);
|
|
390
|
-
return createVideoTrack(screenCaptureController);
|
|
391
|
-
}
|
|
392
|
-
|
|
393
|
-
VideoTrack createVideoTrack(AbstractVideoCaptureController videoCaptureController) {
|
|
394
|
-
videoCaptureController.initializeVideoCapturer();
|
|
395
|
-
|
|
396
|
-
VideoCapturer videoCapturer = videoCaptureController.videoCapturer;
|
|
397
|
-
if (videoCapturer == null) {
|
|
398
|
-
return null;
|
|
399
|
-
}
|
|
400
|
-
|
|
401
|
-
PeerConnectionFactory pcFactory = webRTCModule.mFactory;
|
|
402
|
-
EglBase.Context eglContext = EglUtils.getRootEglBaseContext();
|
|
403
|
-
SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglContext);
|
|
404
|
-
|
|
405
|
-
if (surfaceTextureHelper == null) {
|
|
406
|
-
Log.d(TAG, "Error creating SurfaceTextureHelper");
|
|
407
|
-
return null;
|
|
408
|
-
}
|
|
409
|
-
|
|
410
|
-
String id = UUID.randomUUID().toString();
|
|
411
|
-
|
|
412
|
-
TrackCapturerEventsEmitter eventsEmitter = new TrackCapturerEventsEmitter(webRTCModule, id);
|
|
413
|
-
videoCaptureController.setCapturerEventsListener(eventsEmitter);
|
|
414
|
-
|
|
415
|
-
VideoSource videoSource = pcFactory.createVideoSource(videoCapturer.isScreencast());
|
|
416
|
-
videoCapturer.initialize(surfaceTextureHelper, reactContext, videoSource.getCapturerObserver());
|
|
417
|
-
|
|
418
|
-
VideoTrack track = pcFactory.createVideoTrack(id, videoSource);
|
|
419
|
-
|
|
420
|
-
track.setEnabled(true);
|
|
421
|
-
tracks.put(id, new TrackPrivate(track, videoSource, videoCaptureController, surfaceTextureHelper));
|
|
422
|
-
|
|
423
|
-
videoCaptureController.startCapture();
|
|
424
|
-
|
|
425
|
-
return track;
|
|
426
|
-
}
|
|
427
|
-
|
|
428
|
-
/**
|
|
429
|
-
* Set video effects to the TrackPrivate corresponding to the trackId with the help of VideoEffectProcessor
|
|
430
|
-
* corresponding to the names.
|
|
431
|
-
* @param trackId TrackPrivate id
|
|
432
|
-
* @param names VideoEffectProcessor names
|
|
433
|
-
*/
|
|
434
|
-
void setVideoEffects(String trackId, ReadableArray names) {
|
|
435
|
-
TrackPrivate track = tracks.get(trackId);
|
|
436
|
-
|
|
437
|
-
if (track != null && track.videoCaptureController instanceof CameraCaptureController) {
|
|
438
|
-
VideoSource videoSource = (VideoSource) track.mediaSource;
|
|
439
|
-
SurfaceTextureHelper surfaceTextureHelper = track.surfaceTextureHelper;
|
|
440
|
-
|
|
441
|
-
if (names != null) {
|
|
442
|
-
List<VideoFrameProcessor> processors =
|
|
443
|
-
names.toArrayList()
|
|
444
|
-
.stream()
|
|
445
|
-
.filter(name -> name instanceof String)
|
|
446
|
-
.map(name -> {
|
|
447
|
-
VideoFrameProcessor videoFrameProcessor =
|
|
448
|
-
ProcessorProvider.getProcessor((String) name);
|
|
449
|
-
if (videoFrameProcessor == null) {
|
|
450
|
-
Log.e(TAG, "no videoFrameProcessor associated with this name: " + name);
|
|
451
|
-
}
|
|
452
|
-
return videoFrameProcessor;
|
|
453
|
-
})
|
|
454
|
-
.filter(Objects::nonNull)
|
|
455
|
-
.collect(Collectors.toList());
|
|
456
|
-
|
|
457
|
-
VideoEffectProcessor videoEffectProcessor = new VideoEffectProcessor(processors, surfaceTextureHelper);
|
|
458
|
-
videoSource.setVideoProcessor(videoEffectProcessor);
|
|
459
|
-
|
|
460
|
-
} else {
|
|
461
|
-
videoSource.setVideoProcessor(null);
|
|
462
|
-
}
|
|
463
|
-
}
|
|
464
|
-
}
|
|
465
|
-
|
|
466
|
-
/**
|
|
467
|
-
* Application/library-specific private members of local
|
|
468
|
-
* {@code MediaStreamTrack}s created by {@code GetUserMediaImpl}.
|
|
469
|
-
*/
|
|
470
|
-
private static class TrackPrivate {
|
|
471
|
-
/**
|
|
472
|
-
* The {@code MediaSource} from which {@link #track} was created.
|
|
473
|
-
*/
|
|
474
|
-
public final MediaSource mediaSource;
|
|
475
|
-
|
|
476
|
-
public final MediaStreamTrack track;
|
|
477
|
-
|
|
478
|
-
/**
|
|
479
|
-
* The {@code VideoCapturer} from which {@link #mediaSource} was created
|
|
480
|
-
* if {@link #track} is a {@link VideoTrack}.
|
|
481
|
-
*/
|
|
482
|
-
public final AbstractVideoCaptureController videoCaptureController;
|
|
483
|
-
|
|
484
|
-
private final SurfaceTextureHelper surfaceTextureHelper;
|
|
485
|
-
|
|
486
|
-
/**
|
|
487
|
-
* Whether this object has been disposed or not.
|
|
488
|
-
*/
|
|
489
|
-
private boolean disposed;
|
|
490
|
-
|
|
491
|
-
/**
|
|
492
|
-
* Initializes a new {@code TrackPrivate} instance.
|
|
493
|
-
*
|
|
494
|
-
* @param track
|
|
495
|
-
* @param mediaSource the {@code MediaSource} from which the specified
|
|
496
|
-
* {@code code} was created
|
|
497
|
-
* @param videoCaptureController the {@code AbstractVideoCaptureController} from which the
|
|
498
|
-
* specified {@code mediaSource} was created if the specified
|
|
499
|
-
* {@code track} is a {@link VideoTrack}
|
|
500
|
-
*/
|
|
501
|
-
public TrackPrivate(MediaStreamTrack track, MediaSource mediaSource,
|
|
502
|
-
AbstractVideoCaptureController videoCaptureController, SurfaceTextureHelper surfaceTextureHelper) {
|
|
503
|
-
this.track = track;
|
|
504
|
-
this.mediaSource = mediaSource;
|
|
505
|
-
this.videoCaptureController = videoCaptureController;
|
|
506
|
-
this.surfaceTextureHelper = surfaceTextureHelper;
|
|
507
|
-
this.disposed = false;
|
|
508
|
-
}
|
|
509
|
-
|
|
510
|
-
public void dispose() {
|
|
511
|
-
if (!disposed) {
|
|
512
|
-
if (videoCaptureController != null) {
|
|
513
|
-
if (videoCaptureController.stopCapture()) {
|
|
514
|
-
videoCaptureController.dispose();
|
|
515
|
-
}
|
|
516
|
-
}
|
|
517
|
-
|
|
518
|
-
/*
|
|
519
|
-
* As per webrtc library documentation - The caller still has ownership of {@code
|
|
520
|
-
* surfaceTextureHelper} and is responsible for making sure surfaceTextureHelper.dispose() is
|
|
521
|
-
* called. This also means that the caller can reuse the SurfaceTextureHelper to initialize a new
|
|
522
|
-
* VideoCapturer once the previous VideoCapturer has been disposed. */
|
|
523
|
-
|
|
524
|
-
if (surfaceTextureHelper != null) {
|
|
525
|
-
surfaceTextureHelper.stopListening();
|
|
526
|
-
surfaceTextureHelper.dispose();
|
|
527
|
-
}
|
|
528
|
-
|
|
529
|
-
mediaSource.dispose();
|
|
530
|
-
track.dispose();
|
|
531
|
-
disposed = true;
|
|
532
|
-
}
|
|
533
|
-
}
|
|
534
|
-
}
|
|
535
|
-
|
|
536
|
-
public interface BiConsumer<T, U> {
|
|
537
|
-
void accept(T t, U u);
|
|
538
|
-
}
|
|
539
|
-
}
|
|
1
|
+
package com.oney.WebRTCModule;
|
|
2
|
+
|
|
3
|
+
import android.app.Activity;
|
|
4
|
+
import android.content.Context;
|
|
5
|
+
import android.content.Intent;
|
|
6
|
+
import android.media.projection.MediaProjectionManager;
|
|
7
|
+
import android.util.DisplayMetrics;
|
|
8
|
+
import android.util.Log;
|
|
9
|
+
|
|
10
|
+
import androidx.core.util.Consumer;
|
|
11
|
+
|
|
12
|
+
import com.facebook.react.bridge.Arguments;
|
|
13
|
+
import com.facebook.react.bridge.BaseActivityEventListener;
|
|
14
|
+
import com.facebook.react.bridge.Callback;
|
|
15
|
+
import com.facebook.react.bridge.Promise;
|
|
16
|
+
import com.facebook.react.bridge.ReactApplicationContext;
|
|
17
|
+
import com.facebook.react.bridge.ReadableArray;
|
|
18
|
+
import com.facebook.react.bridge.ReadableMap;
|
|
19
|
+
import com.facebook.react.bridge.UiThreadUtil;
|
|
20
|
+
import com.facebook.react.bridge.WritableArray;
|
|
21
|
+
import com.facebook.react.bridge.WritableMap;
|
|
22
|
+
import com.oney.WebRTCModule.videoEffects.ProcessorProvider;
|
|
23
|
+
import com.oney.WebRTCModule.videoEffects.VideoEffectProcessor;
|
|
24
|
+
import com.oney.WebRTCModule.videoEffects.VideoFrameProcessor;
|
|
25
|
+
|
|
26
|
+
import org.webrtc.*;
|
|
27
|
+
|
|
28
|
+
import java.util.ArrayList;
|
|
29
|
+
import java.util.HashMap;
|
|
30
|
+
import java.util.List;
|
|
31
|
+
import java.util.Map;
|
|
32
|
+
import java.util.Objects;
|
|
33
|
+
import java.util.UUID;
|
|
34
|
+
import java.util.stream.Collectors;
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* The implementation of {@code getUserMedia} extracted into a separate file in
|
|
38
|
+
* order to reduce complexity and to (somewhat) separate concerns.
|
|
39
|
+
*/
|
|
40
|
+
class GetUserMediaImpl {
|
|
41
|
+
/**
|
|
42
|
+
* The {@link Log} tag with which {@code GetUserMediaImpl} is to log.
|
|
43
|
+
*/
|
|
44
|
+
private static final String TAG = WebRTCModule.TAG;
|
|
45
|
+
|
|
46
|
+
private static final int PERMISSION_REQUEST_CODE = (int) (Math.random() * Short.MAX_VALUE);
|
|
47
|
+
|
|
48
|
+
private CameraEnumerator cameraEnumerator;
|
|
49
|
+
private final ReactApplicationContext reactContext;
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* The application/library-specific private members of local
|
|
53
|
+
* {@link MediaStreamTrack}s created by {@code GetUserMediaImpl} mapped by
|
|
54
|
+
* track ID.
|
|
55
|
+
*/
|
|
56
|
+
private final Map<String, TrackPrivate> tracks = new HashMap<>();
|
|
57
|
+
|
|
58
|
+
private final WebRTCModule webRTCModule;
|
|
59
|
+
|
|
60
|
+
private Promise displayMediaPromise;
|
|
61
|
+
private Intent mediaProjectionPermissionResultData;
|
|
62
|
+
|
|
63
|
+
GetUserMediaImpl(WebRTCModule webRTCModule, ReactApplicationContext reactContext) {
|
|
64
|
+
this.webRTCModule = webRTCModule;
|
|
65
|
+
this.reactContext = reactContext;
|
|
66
|
+
|
|
67
|
+
reactContext.addActivityEventListener(new BaseActivityEventListener() {
|
|
68
|
+
@Override
|
|
69
|
+
public void onActivityResult(Activity activity, int requestCode, int resultCode, Intent data) {
|
|
70
|
+
super.onActivityResult(activity, requestCode, resultCode, data);
|
|
71
|
+
if (requestCode == PERMISSION_REQUEST_CODE) {
|
|
72
|
+
if (resultCode != Activity.RESULT_OK) {
|
|
73
|
+
displayMediaPromise.reject("DOMException", "NotAllowedError");
|
|
74
|
+
displayMediaPromise = null;
|
|
75
|
+
return;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
mediaProjectionPermissionResultData = data;
|
|
79
|
+
|
|
80
|
+
ThreadUtils.runOnExecutor(() -> {
|
|
81
|
+
MediaProjectionService.launch(activity);
|
|
82
|
+
createScreenStream();
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
private AudioTrack createAudioTrack(ReadableMap constraints) {
|
|
90
|
+
ReadableMap audioConstraintsMap = constraints.getMap("audio");
|
|
91
|
+
|
|
92
|
+
Log.d(TAG, "getUserMedia(audio): " + audioConstraintsMap);
|
|
93
|
+
|
|
94
|
+
String id = UUID.randomUUID().toString();
|
|
95
|
+
PeerConnectionFactory pcFactory = webRTCModule.mFactory;
|
|
96
|
+
MediaConstraints peerConstraints = webRTCModule.constraintsForOptions(audioConstraintsMap);
|
|
97
|
+
|
|
98
|
+
// Convert given constraints into the internal webrtc media constraints.
|
|
99
|
+
peerConstraints.optional.add(new MediaConstraints.KeyValuePair("googAutoGainControl",
|
|
100
|
+
audioConstraintsMap.hasKey("autoGainControl")
|
|
101
|
+
? ReactBridgeUtil.getMapStrValue(audioConstraintsMap, "autoGainControl")
|
|
102
|
+
: "true"));
|
|
103
|
+
peerConstraints.optional.add(new MediaConstraints.KeyValuePair("googNoiseSuppression",
|
|
104
|
+
audioConstraintsMap.hasKey("noiseSuppression")
|
|
105
|
+
? ReactBridgeUtil.getMapStrValue(audioConstraintsMap, "noiseSuppression")
|
|
106
|
+
: "true"));
|
|
107
|
+
peerConstraints.optional.add(new MediaConstraints.KeyValuePair("googEchoCancellation",
|
|
108
|
+
audioConstraintsMap.hasKey("echoCancellation")
|
|
109
|
+
? ReactBridgeUtil.getMapStrValue(audioConstraintsMap, "echoCancellation")
|
|
110
|
+
: "true"));
|
|
111
|
+
peerConstraints.optional.add(new MediaConstraints.KeyValuePair("googHighpassFilter",
|
|
112
|
+
audioConstraintsMap.hasKey("highpassFilter")
|
|
113
|
+
? ReactBridgeUtil.getMapStrValue(audioConstraintsMap, "highpassFilter")
|
|
114
|
+
: "true"));
|
|
115
|
+
|
|
116
|
+
// PeerConnectionFactory.createAudioSource will throw an error when mandatory constraints contain nulls.
|
|
117
|
+
// so, let's check for nulls
|
|
118
|
+
checkMandatoryConstraints(peerConstraints);
|
|
119
|
+
|
|
120
|
+
AudioSource audioSource = pcFactory.createAudioSource(peerConstraints);
|
|
121
|
+
AudioTrack track = pcFactory.createAudioTrack(id, audioSource);
|
|
122
|
+
|
|
123
|
+
// surfaceTextureHelper is initialized for videoTrack only, so its null here.
|
|
124
|
+
tracks.put(id, new TrackPrivate(track, audioSource, /* videoCapturer */ null, /* surfaceTextureHelper */ null));
|
|
125
|
+
|
|
126
|
+
return track;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
private void checkMandatoryConstraints(MediaConstraints peerConstraints) {
|
|
130
|
+
ArrayList<MediaConstraints.KeyValuePair> valid = new ArrayList<>(peerConstraints.mandatory.size());
|
|
131
|
+
|
|
132
|
+
for (MediaConstraints.KeyValuePair constraint : peerConstraints.mandatory) {
|
|
133
|
+
if (constraint.getValue() != null) {
|
|
134
|
+
valid.add(constraint);
|
|
135
|
+
} else {
|
|
136
|
+
Log.d(TAG, String.format("constraint %s is null, ignoring it", constraint.getKey()));
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
peerConstraints.mandatory.clear();
|
|
141
|
+
peerConstraints.mandatory.addAll(valid);
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
private CameraEnumerator getCameraEnumerator() {
|
|
145
|
+
if (cameraEnumerator == null) {
|
|
146
|
+
if (Camera2Enumerator.isSupported(reactContext)) {
|
|
147
|
+
Log.d(TAG, "Creating camera enumerator using the Camera2 API");
|
|
148
|
+
cameraEnumerator = new Camera2Enumerator(reactContext);
|
|
149
|
+
} else {
|
|
150
|
+
Log.d(TAG, "Creating camera enumerator using the Camera1 API");
|
|
151
|
+
cameraEnumerator = new Camera1Enumerator(false);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
return cameraEnumerator;
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
ReadableArray enumerateDevices() {
|
|
159
|
+
WritableArray array = Arguments.createArray();
|
|
160
|
+
String[] devices = getCameraEnumerator().getDeviceNames();
|
|
161
|
+
|
|
162
|
+
for (int i = 0; i < devices.length; ++i) {
|
|
163
|
+
String deviceName = devices[i];
|
|
164
|
+
boolean isFrontFacing;
|
|
165
|
+
try {
|
|
166
|
+
// This can throw an exception when using the Camera 1 API.
|
|
167
|
+
isFrontFacing = getCameraEnumerator().isFrontFacing(deviceName);
|
|
168
|
+
} catch (Exception e) {
|
|
169
|
+
Log.e(TAG, "Failed to check the facing mode of camera");
|
|
170
|
+
continue;
|
|
171
|
+
}
|
|
172
|
+
WritableMap params = Arguments.createMap();
|
|
173
|
+
params.putString("facing", isFrontFacing ? "front" : "environment");
|
|
174
|
+
params.putString("deviceId", "" + i);
|
|
175
|
+
params.putString("groupId", "");
|
|
176
|
+
params.putString("label", deviceName);
|
|
177
|
+
params.putString("kind", "videoinput");
|
|
178
|
+
array.pushMap(params);
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
WritableMap audio = Arguments.createMap();
|
|
182
|
+
audio.putString("deviceId", "audio-1");
|
|
183
|
+
audio.putString("groupId", "");
|
|
184
|
+
audio.putString("label", "Audio");
|
|
185
|
+
audio.putString("kind", "audioinput");
|
|
186
|
+
array.pushMap(audio);
|
|
187
|
+
|
|
188
|
+
return array;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
MediaStreamTrack getTrack(String id) {
|
|
192
|
+
TrackPrivate private_ = tracks.get(id);
|
|
193
|
+
|
|
194
|
+
return private_ == null ? null : private_.track;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
/**
|
|
198
|
+
* Implements {@code getUserMedia}. Note that at this point constraints have
|
|
199
|
+
* been normalized and permissions have been granted. The constraints only
|
|
200
|
+
* contain keys for which permissions have already been granted, that is,
|
|
201
|
+
* if audio permission was not granted, there will be no "audio" key in
|
|
202
|
+
* the constraints map.
|
|
203
|
+
*/
|
|
204
|
+
void getUserMedia(final ReadableMap constraints, final Callback successCallback, final Callback errorCallback) {
|
|
205
|
+
AudioTrack audioTrack = null;
|
|
206
|
+
VideoTrack videoTrack = null;
|
|
207
|
+
|
|
208
|
+
if (constraints.hasKey("audio")) {
|
|
209
|
+
audioTrack = createAudioTrack(constraints);
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
if (constraints.hasKey("video")) {
|
|
213
|
+
ReadableMap videoConstraintsMap = constraints.getMap("video");
|
|
214
|
+
|
|
215
|
+
Log.d(TAG, "getUserMedia(video): " + videoConstraintsMap);
|
|
216
|
+
|
|
217
|
+
CameraCaptureController cameraCaptureController = new CameraCaptureController(
|
|
218
|
+
reactContext.getCurrentActivity(), getCameraEnumerator(), videoConstraintsMap);
|
|
219
|
+
|
|
220
|
+
videoTrack = createVideoTrack(cameraCaptureController);
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
if (audioTrack == null && videoTrack == null) {
|
|
224
|
+
// Fail with DOMException with name AbortError as per:
|
|
225
|
+
// https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia
|
|
226
|
+
errorCallback.invoke("DOMException", "AbortError");
|
|
227
|
+
return;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
createStream(new MediaStreamTrack[] {audioTrack, videoTrack}, (streamId, tracksInfo) -> {
|
|
231
|
+
WritableArray tracksInfoWritableArray = Arguments.createArray();
|
|
232
|
+
|
|
233
|
+
for (WritableMap trackInfo : tracksInfo) {
|
|
234
|
+
tracksInfoWritableArray.pushMap(trackInfo);
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
successCallback.invoke(streamId, tracksInfoWritableArray);
|
|
238
|
+
});
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
void mediaStreamTrackSetEnabled(String trackId, final boolean enabled) {
|
|
242
|
+
TrackPrivate track = tracks.get(trackId);
|
|
243
|
+
if (track != null && track.videoCaptureController != null) {
|
|
244
|
+
if (enabled) {
|
|
245
|
+
track.videoCaptureController.startCapture();
|
|
246
|
+
} else {
|
|
247
|
+
track.videoCaptureController.stopCapture();
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
void disposeTrack(String id) {
|
|
253
|
+
TrackPrivate track = tracks.remove(id);
|
|
254
|
+
if (track != null) {
|
|
255
|
+
track.dispose();
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
void applyConstraints(String trackId, ReadableMap constraints, Promise promise) {
|
|
260
|
+
TrackPrivate track = tracks.get(trackId);
|
|
261
|
+
if (track != null && track.videoCaptureController instanceof AbstractVideoCaptureController) {
|
|
262
|
+
AbstractVideoCaptureController captureController =
|
|
263
|
+
(AbstractVideoCaptureController) track.videoCaptureController;
|
|
264
|
+
captureController.applyConstraints(constraints, new Consumer<Exception>() {
|
|
265
|
+
public void accept(Exception e) {
|
|
266
|
+
if (e != null) {
|
|
267
|
+
promise.reject(e);
|
|
268
|
+
return;
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
promise.resolve(captureController.getSettings());
|
|
272
|
+
}
|
|
273
|
+
});
|
|
274
|
+
} else {
|
|
275
|
+
promise.reject(new Exception("Camera track not found!"));
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
void getDisplayMedia(Promise promise) {
|
|
280
|
+
if (this.displayMediaPromise != null) {
|
|
281
|
+
promise.reject(new RuntimeException("Another operation is pending."));
|
|
282
|
+
return;
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
Activity currentActivity = this.reactContext.getCurrentActivity();
|
|
286
|
+
if (currentActivity == null) {
|
|
287
|
+
promise.reject(new RuntimeException("No current Activity."));
|
|
288
|
+
return;
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
this.displayMediaPromise = promise;
|
|
292
|
+
|
|
293
|
+
MediaProjectionManager mediaProjectionManager =
|
|
294
|
+
(MediaProjectionManager) currentActivity.getApplication().getSystemService(
|
|
295
|
+
Context.MEDIA_PROJECTION_SERVICE);
|
|
296
|
+
|
|
297
|
+
if (mediaProjectionManager != null) {
|
|
298
|
+
UiThreadUtil.runOnUiThread(new Runnable() {
|
|
299
|
+
@Override
|
|
300
|
+
public void run() {
|
|
301
|
+
currentActivity.startActivityForResult(
|
|
302
|
+
mediaProjectionManager.createScreenCaptureIntent(), PERMISSION_REQUEST_CODE);
|
|
303
|
+
}
|
|
304
|
+
});
|
|
305
|
+
|
|
306
|
+
} else {
|
|
307
|
+
promise.reject(new RuntimeException("MediaProjectionManager is null."));
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
private void createScreenStream() {
|
|
312
|
+
VideoTrack track = createScreenTrack();
|
|
313
|
+
|
|
314
|
+
if (track == null) {
|
|
315
|
+
displayMediaPromise.reject(new RuntimeException("ScreenTrack is null."));
|
|
316
|
+
} else {
|
|
317
|
+
createStream(new MediaStreamTrack[] {track}, (streamId, tracksInfo) -> {
|
|
318
|
+
WritableMap data = Arguments.createMap();
|
|
319
|
+
|
|
320
|
+
data.putString("streamId", streamId);
|
|
321
|
+
|
|
322
|
+
if (tracksInfo.size() == 0) {
|
|
323
|
+
displayMediaPromise.reject(new RuntimeException("No ScreenTrackInfo found."));
|
|
324
|
+
} else {
|
|
325
|
+
data.putMap("track", tracksInfo.get(0));
|
|
326
|
+
displayMediaPromise.resolve(data);
|
|
327
|
+
}
|
|
328
|
+
});
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
// Cleanup
|
|
332
|
+
mediaProjectionPermissionResultData = null;
|
|
333
|
+
displayMediaPromise = null;
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
void createStream(MediaStreamTrack[] tracks, BiConsumer<String, ArrayList<WritableMap>> successCallback) {
|
|
337
|
+
String streamId = UUID.randomUUID().toString();
|
|
338
|
+
MediaStream mediaStream = webRTCModule.mFactory.createLocalMediaStream(streamId);
|
|
339
|
+
|
|
340
|
+
ArrayList<WritableMap> tracksInfo = new ArrayList<>();
|
|
341
|
+
|
|
342
|
+
for (MediaStreamTrack track : tracks) {
|
|
343
|
+
if (track == null) {
|
|
344
|
+
continue;
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
if (track instanceof AudioTrack) {
|
|
348
|
+
mediaStream.addTrack((AudioTrack) track);
|
|
349
|
+
} else {
|
|
350
|
+
mediaStream.addTrack((VideoTrack) track);
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
WritableMap trackInfo = Arguments.createMap();
|
|
354
|
+
String trackId = track.id();
|
|
355
|
+
|
|
356
|
+
trackInfo.putBoolean("enabled", track.enabled());
|
|
357
|
+
trackInfo.putString("id", trackId);
|
|
358
|
+
trackInfo.putString("kind", track.kind());
|
|
359
|
+
trackInfo.putString("readyState", "live");
|
|
360
|
+
trackInfo.putBoolean("remote", false);
|
|
361
|
+
|
|
362
|
+
if (track instanceof VideoTrack) {
|
|
363
|
+
TrackPrivate tp = this.tracks.get(trackId);
|
|
364
|
+
AbstractVideoCaptureController vcc = tp.videoCaptureController;
|
|
365
|
+
trackInfo.putMap("settings", vcc.getSettings());
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
if (track instanceof AudioTrack) {
|
|
369
|
+
WritableMap settings = Arguments.createMap();
|
|
370
|
+
settings.putString("deviceId", "audio-1");
|
|
371
|
+
settings.putString("groupId", "");
|
|
372
|
+
trackInfo.putMap("settings", settings);
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
tracksInfo.add(trackInfo);
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
Log.d(TAG, "MediaStream id: " + streamId);
|
|
379
|
+
webRTCModule.localStreams.put(streamId, mediaStream);
|
|
380
|
+
|
|
381
|
+
successCallback.accept(streamId, tracksInfo);
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
private VideoTrack createScreenTrack() {
|
|
385
|
+
DisplayMetrics displayMetrics = DisplayUtils.getDisplayMetrics(reactContext.getCurrentActivity());
|
|
386
|
+
int width = displayMetrics.widthPixels;
|
|
387
|
+
int height = displayMetrics.heightPixels;
|
|
388
|
+
ScreenCaptureController screenCaptureController = new ScreenCaptureController(
|
|
389
|
+
reactContext.getCurrentActivity(), width, height, mediaProjectionPermissionResultData);
|
|
390
|
+
return createVideoTrack(screenCaptureController);
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
VideoTrack createVideoTrack(AbstractVideoCaptureController videoCaptureController) {
|
|
394
|
+
videoCaptureController.initializeVideoCapturer();
|
|
395
|
+
|
|
396
|
+
VideoCapturer videoCapturer = videoCaptureController.videoCapturer;
|
|
397
|
+
if (videoCapturer == null) {
|
|
398
|
+
return null;
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
PeerConnectionFactory pcFactory = webRTCModule.mFactory;
|
|
402
|
+
EglBase.Context eglContext = EglUtils.getRootEglBaseContext();
|
|
403
|
+
SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglContext);
|
|
404
|
+
|
|
405
|
+
if (surfaceTextureHelper == null) {
|
|
406
|
+
Log.d(TAG, "Error creating SurfaceTextureHelper");
|
|
407
|
+
return null;
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
String id = UUID.randomUUID().toString();
|
|
411
|
+
|
|
412
|
+
TrackCapturerEventsEmitter eventsEmitter = new TrackCapturerEventsEmitter(webRTCModule, id);
|
|
413
|
+
videoCaptureController.setCapturerEventsListener(eventsEmitter);
|
|
414
|
+
|
|
415
|
+
VideoSource videoSource = pcFactory.createVideoSource(videoCapturer.isScreencast());
|
|
416
|
+
videoCapturer.initialize(surfaceTextureHelper, reactContext, videoSource.getCapturerObserver());
|
|
417
|
+
|
|
418
|
+
VideoTrack track = pcFactory.createVideoTrack(id, videoSource);
|
|
419
|
+
|
|
420
|
+
track.setEnabled(true);
|
|
421
|
+
tracks.put(id, new TrackPrivate(track, videoSource, videoCaptureController, surfaceTextureHelper));
|
|
422
|
+
|
|
423
|
+
videoCaptureController.startCapture();
|
|
424
|
+
|
|
425
|
+
return track;
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
/**
|
|
429
|
+
* Set video effects to the TrackPrivate corresponding to the trackId with the help of VideoEffectProcessor
|
|
430
|
+
* corresponding to the names.
|
|
431
|
+
* @param trackId TrackPrivate id
|
|
432
|
+
* @param names VideoEffectProcessor names
|
|
433
|
+
*/
|
|
434
|
+
void setVideoEffects(String trackId, ReadableArray names) {
|
|
435
|
+
TrackPrivate track = tracks.get(trackId);
|
|
436
|
+
|
|
437
|
+
if (track != null && track.videoCaptureController instanceof CameraCaptureController) {
|
|
438
|
+
VideoSource videoSource = (VideoSource) track.mediaSource;
|
|
439
|
+
SurfaceTextureHelper surfaceTextureHelper = track.surfaceTextureHelper;
|
|
440
|
+
|
|
441
|
+
if (names != null) {
|
|
442
|
+
List<VideoFrameProcessor> processors =
|
|
443
|
+
names.toArrayList()
|
|
444
|
+
.stream()
|
|
445
|
+
.filter(name -> name instanceof String)
|
|
446
|
+
.map(name -> {
|
|
447
|
+
VideoFrameProcessor videoFrameProcessor =
|
|
448
|
+
ProcessorProvider.getProcessor((String) name);
|
|
449
|
+
if (videoFrameProcessor == null) {
|
|
450
|
+
Log.e(TAG, "no videoFrameProcessor associated with this name: " + name);
|
|
451
|
+
}
|
|
452
|
+
return videoFrameProcessor;
|
|
453
|
+
})
|
|
454
|
+
.filter(Objects::nonNull)
|
|
455
|
+
.collect(Collectors.toList());
|
|
456
|
+
|
|
457
|
+
VideoEffectProcessor videoEffectProcessor = new VideoEffectProcessor(processors, surfaceTextureHelper);
|
|
458
|
+
videoSource.setVideoProcessor(videoEffectProcessor);
|
|
459
|
+
|
|
460
|
+
} else {
|
|
461
|
+
videoSource.setVideoProcessor(null);
|
|
462
|
+
}
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
/**
|
|
467
|
+
* Application/library-specific private members of local
|
|
468
|
+
* {@code MediaStreamTrack}s created by {@code GetUserMediaImpl}.
|
|
469
|
+
*/
|
|
470
|
+
private static class TrackPrivate {
|
|
471
|
+
/**
|
|
472
|
+
* The {@code MediaSource} from which {@link #track} was created.
|
|
473
|
+
*/
|
|
474
|
+
public final MediaSource mediaSource;
|
|
475
|
+
|
|
476
|
+
public final MediaStreamTrack track;
|
|
477
|
+
|
|
478
|
+
/**
|
|
479
|
+
* The {@code VideoCapturer} from which {@link #mediaSource} was created
|
|
480
|
+
* if {@link #track} is a {@link VideoTrack}.
|
|
481
|
+
*/
|
|
482
|
+
public final AbstractVideoCaptureController videoCaptureController;
|
|
483
|
+
|
|
484
|
+
private final SurfaceTextureHelper surfaceTextureHelper;
|
|
485
|
+
|
|
486
|
+
/**
|
|
487
|
+
* Whether this object has been disposed or not.
|
|
488
|
+
*/
|
|
489
|
+
private boolean disposed;
|
|
490
|
+
|
|
491
|
+
/**
|
|
492
|
+
* Initializes a new {@code TrackPrivate} instance.
|
|
493
|
+
*
|
|
494
|
+
* @param track
|
|
495
|
+
* @param mediaSource the {@code MediaSource} from which the specified
|
|
496
|
+
* {@code code} was created
|
|
497
|
+
* @param videoCaptureController the {@code AbstractVideoCaptureController} from which the
|
|
498
|
+
* specified {@code mediaSource} was created if the specified
|
|
499
|
+
* {@code track} is a {@link VideoTrack}
|
|
500
|
+
*/
|
|
501
|
+
public TrackPrivate(MediaStreamTrack track, MediaSource mediaSource,
|
|
502
|
+
AbstractVideoCaptureController videoCaptureController, SurfaceTextureHelper surfaceTextureHelper) {
|
|
503
|
+
this.track = track;
|
|
504
|
+
this.mediaSource = mediaSource;
|
|
505
|
+
this.videoCaptureController = videoCaptureController;
|
|
506
|
+
this.surfaceTextureHelper = surfaceTextureHelper;
|
|
507
|
+
this.disposed = false;
|
|
508
|
+
}
|
|
509
|
+
|
|
510
|
+
public void dispose() {
|
|
511
|
+
if (!disposed) {
|
|
512
|
+
if (videoCaptureController != null) {
|
|
513
|
+
if (videoCaptureController.stopCapture()) {
|
|
514
|
+
videoCaptureController.dispose();
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
/*
|
|
519
|
+
* As per webrtc library documentation - The caller still has ownership of {@code
|
|
520
|
+
* surfaceTextureHelper} and is responsible for making sure surfaceTextureHelper.dispose() is
|
|
521
|
+
* called. This also means that the caller can reuse the SurfaceTextureHelper to initialize a new
|
|
522
|
+
* VideoCapturer once the previous VideoCapturer has been disposed. */
|
|
523
|
+
|
|
524
|
+
if (surfaceTextureHelper != null) {
|
|
525
|
+
surfaceTextureHelper.stopListening();
|
|
526
|
+
surfaceTextureHelper.dispose();
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
mediaSource.dispose();
|
|
530
|
+
track.dispose();
|
|
531
|
+
disposed = true;
|
|
532
|
+
}
|
|
533
|
+
}
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
public interface BiConsumer<T, U> {
|
|
537
|
+
void accept(T t, U u);
|
|
538
|
+
}
|
|
539
|
+
}
|