@sbhjt-gr/react-native-webrtc 124.0.3 → 124.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.clang-format +11 -11
- package/.eslintignore +6 -6
- package/.nvmrc +1 -1
- package/ISSUE_TEMPLATE.md +40 -40
- package/LICENSE +22 -22
- package/README.md +103 -103
- package/android/build.gradle +37 -37
- package/android/consumer-rules.pro +3 -3
- package/android/src/main/AndroidManifest.xml +11 -11
- package/android/src/main/java/com/oney/WebRTCModule/AbstractVideoCaptureController.java +113 -113
- package/android/src/main/java/com/oney/WebRTCModule/CameraCaptureController.java +338 -338
- package/android/src/main/java/com/oney/WebRTCModule/CameraEventsHandler.java +49 -49
- package/android/src/main/java/com/oney/WebRTCModule/DataChannelWrapper.java +99 -99
- package/android/src/main/java/com/oney/WebRTCModule/DataPacketCryptorManager.java +62 -62
- package/android/src/main/java/com/oney/WebRTCModule/DisplayUtils.java +16 -16
- package/android/src/main/java/com/oney/WebRTCModule/EglUtils.java +66 -66
- package/android/src/main/java/com/oney/WebRTCModule/GetUserMediaImpl.java +539 -539
- package/android/src/main/java/com/oney/WebRTCModule/LibraryLoader.java +21 -21
- package/android/src/main/java/com/oney/WebRTCModule/MediaProjectionNotification.java +70 -70
- package/android/src/main/java/com/oney/WebRTCModule/MediaProjectionService.java +82 -82
- package/android/src/main/java/com/oney/WebRTCModule/PeerConnectionObserver.java +588 -588
- package/android/src/main/java/com/oney/WebRTCModule/RTCCryptoManager.java +493 -493
- package/android/src/main/java/com/oney/WebRTCModule/RTCVideoViewManager.java +98 -98
- package/android/src/main/java/com/oney/WebRTCModule/ReactBridgeUtil.java +35 -35
- package/android/src/main/java/com/oney/WebRTCModule/ScreenCaptureController.java +94 -94
- package/android/src/main/java/com/oney/WebRTCModule/SerializeUtils.java +342 -342
- package/android/src/main/java/com/oney/WebRTCModule/StringUtils.java +100 -100
- package/android/src/main/java/com/oney/WebRTCModule/ThreadUtils.java +41 -41
- package/android/src/main/java/com/oney/WebRTCModule/TrackCapturerEventsEmitter.java +34 -34
- package/android/src/main/java/com/oney/WebRTCModule/VideoTrackAdapter.java +137 -137
- package/android/src/main/java/com/oney/WebRTCModule/WebRTCModule.java +1649 -1643
- package/android/src/main/java/com/oney/WebRTCModule/WebRTCModuleOptions.java +33 -33
- package/android/src/main/java/com/oney/WebRTCModule/WebRTCModulePackage.java +21 -21
- package/android/src/main/java/com/oney/WebRTCModule/WebRTCView.java +583 -583
- package/android/src/main/java/com/oney/WebRTCModule/palabra/PalabraClient.java +529 -501
- package/android/src/main/java/com/oney/WebRTCModule/palabra/PalabraConfig.java +17 -17
- package/android/src/main/java/com/oney/WebRTCModule/palabra/PalabraListener.java +7 -7
- package/android/src/main/java/com/oney/WebRTCModule/videoEffects/ProcessorProvider.java +38 -38
- package/android/src/main/java/com/oney/WebRTCModule/videoEffects/VideoEffectProcessor.java +59 -59
- package/android/src/main/java/com/oney/WebRTCModule/videoEffects/VideoFrameProcessor.java +19 -19
- package/android/src/main/java/com/oney/WebRTCModule/videoEffects/VideoFrameProcessorFactoryInterface.java +12 -12
- package/android/src/main/java/com/oney/WebRTCModule/webrtcutils/H264AndSoftwareVideoDecoderFactory.java +73 -73
- package/android/src/main/java/com/oney/WebRTCModule/webrtcutils/H264AndSoftwareVideoEncoderFactory.java +73 -73
- package/android/src/main/java/com/oney/WebRTCModule/webrtcutils/SoftwareVideoDecoderFactoryProxy.java +36 -36
- package/android/src/main/java/com/oney/WebRTCModule/webrtcutils/SoftwareVideoEncoderFactoryProxy.java +36 -36
- package/android/src/main/java/org/webrtc/Camera1Helper.java +54 -54
- package/android/src/main/java/org/webrtc/Camera2Helper.java +52 -52
- package/android/src/main/res/values/strings.xml +5 -5
- package/android/src/main/res/values/styles.xml +8 -8
- package/ios/RCTWebRTC/CaptureController.h +18 -18
- package/ios/RCTWebRTC/CaptureController.m +28 -28
- package/ios/RCTWebRTC/CapturerEventsDelegate.h +12 -12
- package/ios/RCTWebRTC/DataChannelWrapper.h +27 -27
- package/ios/RCTWebRTC/DataChannelWrapper.m +42 -42
- package/ios/RCTWebRTC/I420Converter.h +22 -22
- package/ios/RCTWebRTC/I420Converter.m +164 -164
- package/ios/RCTWebRTC/PIPController.h +24 -24
- package/ios/RCTWebRTC/PIPController.m +234 -234
- package/ios/RCTWebRTC/PalabraAudioSink.h +13 -13
- package/ios/RCTWebRTC/PalabraAudioSink.m +18 -18
- package/ios/RCTWebRTC/PalabraClient.h +42 -36
- package/ios/RCTWebRTC/PalabraClient.m +680 -644
- package/ios/RCTWebRTC/RCTConvert+WebRTC.h +16 -16
- package/ios/RCTWebRTC/RCTConvert+WebRTC.m +206 -206
- package/ios/RCTWebRTC/RTCMediaStreamTrack+React.h +10 -10
- package/ios/RCTWebRTC/RTCMediaStreamTrack+React.m +16 -16
- package/ios/RCTWebRTC/RTCVideoViewManager.h +29 -29
- package/ios/RCTWebRTC/RTCVideoViewManager.m +411 -411
- package/ios/RCTWebRTC/SampleBufferVideoCallView.h +12 -12
- package/ios/RCTWebRTC/SampleBufferVideoCallView.m +178 -178
- package/ios/RCTWebRTC/ScreenCaptureController.h +20 -20
- package/ios/RCTWebRTC/ScreenCaptureController.m +82 -82
- package/ios/RCTWebRTC/ScreenCapturePickerViewManager.h +7 -7
- package/ios/RCTWebRTC/ScreenCapturePickerViewManager.m +59 -59
- package/ios/RCTWebRTC/ScreenCapturer.h +19 -19
- package/ios/RCTWebRTC/ScreenCapturer.m +263 -263
- package/ios/RCTWebRTC/SerializeUtils.h +28 -28
- package/ios/RCTWebRTC/SerializeUtils.m +314 -314
- package/ios/RCTWebRTC/SocketConnection.h +13 -13
- package/ios/RCTWebRTC/SocketConnection.m +137 -137
- package/ios/RCTWebRTC/TrackCapturerEventsEmitter.h +14 -14
- package/ios/RCTWebRTC/TrackCapturerEventsEmitter.m +36 -36
- package/ios/RCTWebRTC/VideoCaptureController.h +21 -21
- package/ios/RCTWebRTC/VideoCaptureController.m +328 -328
- package/ios/RCTWebRTC/WLVAudioDevice.h +12 -12
- package/ios/RCTWebRTC/WLVAudioDevice.m +137 -137
- package/ios/RCTWebRTC/WebRTCModule+Palabra.h +4 -4
- package/ios/RCTWebRTC/WebRTCModule+Palabra.m +92 -83
- package/ios/RCTWebRTC/WebRTCModule+Permissions.m +75 -75
- package/ios/RCTWebRTC/WebRTCModule+RTCAudioSession.m +20 -20
- package/ios/RCTWebRTC/WebRTCModule+RTCDataChannel.h +14 -14
- package/ios/RCTWebRTC/WebRTCModule+RTCDataChannel.m +165 -165
- package/ios/RCTWebRTC/WebRTCModule+RTCFrameCryptor.m +611 -611
- package/ios/RCTWebRTC/WebRTCModule+RTCMediaStream.h +13 -13
- package/ios/RCTWebRTC/WebRTCModule+RTCMediaStream.m +728 -728
- package/ios/RCTWebRTC/WebRTCModule+RTCPeerConnection.h +24 -24
- package/ios/RCTWebRTC/WebRTCModule+RTCPeerConnection.m +1004 -1004
- package/ios/RCTWebRTC/WebRTCModule+Transceivers.m +267 -267
- package/ios/RCTWebRTC/WebRTCModule+VideoTrackAdapter.h +12 -12
- package/ios/RCTWebRTC/WebRTCModule+VideoTrackAdapter.m +166 -166
- package/ios/RCTWebRTC/WebRTCModule.h +58 -58
- package/ios/RCTWebRTC/WebRTCModule.m +169 -169
- package/ios/RCTWebRTC/WebRTCModuleOptions.h +24 -24
- package/ios/RCTWebRTC/WebRTCModuleOptions.m +31 -31
- package/ios/RCTWebRTC/videoEffects/ProcessorProvider.h +9 -9
- package/ios/RCTWebRTC/videoEffects/ProcessorProvider.m +23 -23
- package/ios/RCTWebRTC/videoEffects/VideoEffectProcessor.h +13 -13
- package/ios/RCTWebRTC/videoEffects/VideoEffectProcessor.m +23 -23
- package/ios/RCTWebRTC/videoEffects/VideoFrameProcessor.h +8 -8
- package/ios/RCTWebRTC.xcodeproj/project.pbxproj +404 -404
- package/ios/RCTWebRTC.xcworkspace/contents.xcworkspacedata +10 -10
- package/lib/commonjs/Constraints.js.map +1 -1
- package/lib/commonjs/EventEmitter.js.map +1 -1
- package/lib/commonjs/Logger.js.map +1 -1
- package/lib/commonjs/MediaDevices.js +17 -17
- package/lib/commonjs/MediaDevices.js.map +1 -1
- package/lib/commonjs/MediaStream.js +19 -19
- package/lib/commonjs/MediaStream.js.map +1 -1
- package/lib/commonjs/MediaStreamError.js.map +1 -1
- package/lib/commonjs/MediaStreamErrorEvent.js.map +1 -1
- package/lib/commonjs/MediaStreamTrack.js +28 -28
- package/lib/commonjs/MediaStreamTrack.js.map +1 -1
- package/lib/commonjs/MediaStreamTrackEvent.js +6 -6
- package/lib/commonjs/MediaStreamTrackEvent.js.map +1 -1
- package/lib/commonjs/MessageEvent.js +7 -7
- package/lib/commonjs/MessageEvent.js.map +1 -1
- package/lib/commonjs/Permissions.js +28 -28
- package/lib/commonjs/Permissions.js.map +1 -1
- package/lib/commonjs/RTCAudioSession.js +4 -4
- package/lib/commonjs/RTCAudioSession.js.map +1 -1
- package/lib/commonjs/RTCDataChannel.js +2 -2
- package/lib/commonjs/RTCDataChannel.js.map +1 -1
- package/lib/commonjs/RTCDataChannelEvent.js +6 -6
- package/lib/commonjs/RTCDataChannelEvent.js.map +1 -1
- package/lib/commonjs/RTCDataPacketCryptor.js.map +1 -1
- package/lib/commonjs/RTCDataPacketCryptorFactory.js.map +1 -1
- package/lib/commonjs/RTCErrorEvent.js +3 -3
- package/lib/commonjs/RTCErrorEvent.js.map +1 -1
- package/lib/commonjs/RTCFrameCryptor.js +8 -8
- package/lib/commonjs/RTCFrameCryptor.js.map +1 -1
- package/lib/commonjs/RTCFrameCryptorFactory.js.map +1 -1
- package/lib/commonjs/RTCIceCandidate.js.map +1 -1
- package/lib/commonjs/RTCIceCandidateEvent.js +7 -7
- package/lib/commonjs/RTCIceCandidateEvent.js.map +1 -1
- package/lib/commonjs/RTCKeyProvider.js.map +1 -1
- package/lib/commonjs/RTCPIPView.js +2 -2
- package/lib/commonjs/RTCPIPView.js.map +1 -1
- package/lib/commonjs/RTCPIPView.web.js.map +1 -1
- package/lib/commonjs/RTCPeerConnection.js +36 -36
- package/lib/commonjs/RTCPeerConnection.js.map +1 -1
- package/lib/commonjs/RTCRtcpParameters.js.map +1 -1
- package/lib/commonjs/RTCRtpCapabilities.js +2 -2
- package/lib/commonjs/RTCRtpCapabilities.js.map +1 -1
- package/lib/commonjs/RTCRtpCodecCapability.js.map +1 -1
- package/lib/commonjs/RTCRtpCodecParameters.js.map +1 -1
- package/lib/commonjs/RTCRtpEncodingParameters.js.map +1 -1
- package/lib/commonjs/RTCRtpHeaderExtension.js.map +1 -1
- package/lib/commonjs/RTCRtpParameters.js.map +1 -1
- package/lib/commonjs/RTCRtpReceiveParameters.js.map +1 -1
- package/lib/commonjs/RTCRtpReceiver.js +7 -7
- package/lib/commonjs/RTCRtpReceiver.js.map +1 -1
- package/lib/commonjs/RTCRtpSendParameters.js +3 -3
- package/lib/commonjs/RTCRtpSendParameters.js.map +1 -1
- package/lib/commonjs/RTCRtpSender.js +7 -7
- package/lib/commonjs/RTCRtpSender.js.map +1 -1
- package/lib/commonjs/RTCRtpTransceiver.js.map +1 -1
- package/lib/commonjs/RTCSessionDescription.js.map +1 -1
- package/lib/commonjs/RTCTrackEvent.js +6 -6
- package/lib/commonjs/RTCTrackEvent.js.map +1 -1
- package/lib/commonjs/RTCUtil.js +28 -28
- package/lib/commonjs/RTCUtil.js.map +1 -1
- package/lib/commonjs/RTCView.js +5 -5
- package/lib/commonjs/RTCView.js.map +1 -1
- package/lib/commonjs/RTCView.web.js.map +1 -1
- package/lib/commonjs/ScreenCapturePickerView.js.map +1 -1
- package/lib/commonjs/ScreenCapturePickerView.web.js.map +1 -1
- package/lib/commonjs/getDisplayMedia.js.map +1 -1
- package/lib/commonjs/getUserMedia.js.map +1 -1
- package/lib/commonjs/index.js.map +1 -1
- package/lib/commonjs/index.web.js.map +1 -1
- package/lib/commonjs/webStream.js.map +1 -1
- package/lib/module/Constraints.js.map +1 -1
- package/lib/module/EventEmitter.js.map +1 -1
- package/lib/module/Logger.js.map +1 -1
- package/lib/module/MediaDevices.js +17 -17
- package/lib/module/MediaDevices.js.map +1 -1
- package/lib/module/MediaStream.js +19 -19
- package/lib/module/MediaStream.js.map +1 -1
- package/lib/module/MediaStreamError.js.map +1 -1
- package/lib/module/MediaStreamErrorEvent.js.map +1 -1
- package/lib/module/MediaStreamTrack.js +28 -28
- package/lib/module/MediaStreamTrack.js.map +1 -1
- package/lib/module/MediaStreamTrackEvent.js +6 -6
- package/lib/module/MediaStreamTrackEvent.js.map +1 -1
- package/lib/module/MessageEvent.js +7 -7
- package/lib/module/MessageEvent.js.map +1 -1
- package/lib/module/Permissions.js +28 -28
- package/lib/module/Permissions.js.map +1 -1
- package/lib/module/RTCAudioSession.js +4 -4
- package/lib/module/RTCAudioSession.js.map +1 -1
- package/lib/module/RTCDataChannel.js +2 -2
- package/lib/module/RTCDataChannel.js.map +1 -1
- package/lib/module/RTCDataChannelEvent.js +6 -6
- package/lib/module/RTCDataChannelEvent.js.map +1 -1
- package/lib/module/RTCDataPacketCryptor.js.map +1 -1
- package/lib/module/RTCDataPacketCryptorFactory.js.map +1 -1
- package/lib/module/RTCErrorEvent.js +3 -3
- package/lib/module/RTCErrorEvent.js.map +1 -1
- package/lib/module/RTCFrameCryptor.js +8 -8
- package/lib/module/RTCFrameCryptor.js.map +1 -1
- package/lib/module/RTCFrameCryptorFactory.js.map +1 -1
- package/lib/module/RTCIceCandidate.js.map +1 -1
- package/lib/module/RTCIceCandidateEvent.js +7 -7
- package/lib/module/RTCIceCandidateEvent.js.map +1 -1
- package/lib/module/RTCKeyProvider.js.map +1 -1
- package/lib/module/RTCPIPView.js +2 -2
- package/lib/module/RTCPIPView.js.map +1 -1
- package/lib/module/RTCPIPView.web.js.map +1 -1
- package/lib/module/RTCPeerConnection.js +36 -36
- package/lib/module/RTCPeerConnection.js.map +1 -1
- package/lib/module/RTCRtcpParameters.js.map +1 -1
- package/lib/module/RTCRtpCapabilities.js +2 -2
- package/lib/module/RTCRtpCapabilities.js.map +1 -1
- package/lib/module/RTCRtpCodecCapability.js.map +1 -1
- package/lib/module/RTCRtpCodecParameters.js.map +1 -1
- package/lib/module/RTCRtpEncodingParameters.js.map +1 -1
- package/lib/module/RTCRtpHeaderExtension.js.map +1 -1
- package/lib/module/RTCRtpParameters.js.map +1 -1
- package/lib/module/RTCRtpReceiveParameters.js.map +1 -1
- package/lib/module/RTCRtpReceiver.js +7 -7
- package/lib/module/RTCRtpReceiver.js.map +1 -1
- package/lib/module/RTCRtpSendParameters.js +3 -3
- package/lib/module/RTCRtpSendParameters.js.map +1 -1
- package/lib/module/RTCRtpSender.js +7 -7
- package/lib/module/RTCRtpSender.js.map +1 -1
- package/lib/module/RTCRtpTransceiver.js.map +1 -1
- package/lib/module/RTCSessionDescription.js.map +1 -1
- package/lib/module/RTCTrackEvent.js +6 -6
- package/lib/module/RTCTrackEvent.js.map +1 -1
- package/lib/module/RTCUtil.js +28 -28
- package/lib/module/RTCUtil.js.map +1 -1
- package/lib/module/RTCView.js +5 -5
- package/lib/module/RTCView.js.map +1 -1
- package/lib/module/RTCView.web.js.map +1 -1
- package/lib/module/ScreenCapturePickerView.js.map +1 -1
- package/lib/module/ScreenCapturePickerView.web.js.map +1 -1
- package/lib/module/getDisplayMedia.js.map +1 -1
- package/lib/module/getUserMedia.js.map +1 -1
- package/lib/module/index.js.map +1 -1
- package/lib/module/index.web.js.map +1 -1
- package/lib/module/webStream.js.map +1 -1
- package/lib/typescript/Constraints.d.ts +19 -19
- package/lib/typescript/EventEmitter.d.ts +6 -6
- package/lib/typescript/Logger.d.ts +13 -13
- package/lib/typescript/MediaDevices.d.ts +30 -30
- package/lib/typescript/MediaStream.d.ts +48 -48
- package/lib/typescript/MediaStreamError.d.ts +6 -6
- package/lib/typescript/MediaStreamErrorEvent.d.ts +6 -6
- package/lib/typescript/MediaStreamTrack.d.ts +101 -101
- package/lib/typescript/MediaStreamTrackEvent.d.ts +19 -19
- package/lib/typescript/MessageEvent.d.ts +20 -20
- package/lib/typescript/Permissions.d.ts +55 -55
- package/lib/typescript/RTCAudioSession.d.ts +10 -10
- package/lib/typescript/RTCDataChannel.d.ts +43 -43
- package/lib/typescript/RTCDataChannelEvent.d.ts +19 -19
- package/lib/typescript/RTCDataPacketCryptor.d.ts +12 -12
- package/lib/typescript/RTCDataPacketCryptorFactory.d.ts +6 -6
- package/lib/typescript/RTCErrorEvent.d.ts +12 -12
- package/lib/typescript/RTCFrameCryptor.d.ts +47 -47
- package/lib/typescript/RTCFrameCryptorFactory.d.ts +21 -21
- package/lib/typescript/RTCIceCandidate.d.ts +17 -17
- package/lib/typescript/RTCIceCandidateEvent.d.ts +20 -20
- package/lib/typescript/RTCKeyProvider.d.ts +21 -21
- package/lib/typescript/RTCPIPView.d.ts +15 -15
- package/lib/typescript/RTCPIPView.web.d.ts +13 -13
- package/lib/typescript/RTCPeerConnection.d.ts +117 -117
- package/lib/typescript/RTCRtcpParameters.d.ts +10 -10
- package/lib/typescript/RTCRtpCapabilities.d.ts +9 -9
- package/lib/typescript/RTCRtpCodecCapability.d.ts +7 -7
- package/lib/typescript/RTCRtpCodecParameters.d.ts +16 -16
- package/lib/typescript/RTCRtpEncodingParameters.d.ts +23 -23
- package/lib/typescript/RTCRtpHeaderExtension.d.ts +12 -12
- package/lib/typescript/RTCRtpParameters.d.ts +19 -19
- package/lib/typescript/RTCRtpReceiveParameters.d.ts +4 -4
- package/lib/typescript/RTCRtpReceiver.d.ts +21 -21
- package/lib/typescript/RTCRtpSendParameters.d.ts +20 -20
- package/lib/typescript/RTCRtpSender.d.ts +22 -22
- package/lib/typescript/RTCRtpTransceiver.d.ts +31 -31
- package/lib/typescript/RTCSessionDescription.d.ts +12 -12
- package/lib/typescript/RTCTrackEvent.d.ts +29 -29
- package/lib/typescript/RTCUtil.d.ts +37 -37
- package/lib/typescript/RTCView.d.ts +117 -117
- package/lib/typescript/RTCView.web.d.ts +25 -25
- package/lib/typescript/ScreenCapturePickerView.d.ts +2 -2
- package/lib/typescript/ScreenCapturePickerView.web.d.ts +1 -1
- package/lib/typescript/getDisplayMedia.d.ts +2 -2
- package/lib/typescript/getUserMedia.d.ts +7 -7
- package/lib/typescript/index.d.ts +22 -22
- package/lib/typescript/index.web.d.ts +101 -101
- package/lib/typescript/webStream.d.ts +3 -3
- package/livekit-react-native-webrtc.podspec +29 -29
- package/macos/RCTWebRTC.xcodeproj/project.pbxproj +324 -324
- package/macos/RCTWebRTC.xcodeproj/project.xcworkspace/contents.xcworkspacedata +7 -7
- package/macos/RCTWebRTC.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist +8 -8
- package/metro.config.js +7 -7
- package/metro.config.macos.js +14 -14
- package/package.json +66 -66
- package/react-native.config.js +11 -11
- package/src/.eslintrc.cjs +67 -67
- package/src/Constraints.ts +20 -20
- package/src/EventEmitter.ts +65 -65
- package/src/Logger.ts +49 -49
- package/src/MediaDevices.ts +53 -53
- package/src/MediaStream.ts +161 -161
- package/src/MediaStreamError.ts +12 -12
- package/src/MediaStreamErrorEvent.ts +11 -11
- package/src/MediaStreamTrack.ts +282 -282
- package/src/MediaStreamTrackEvent.ts +25 -25
- package/src/MessageEvent.ts +26 -26
- package/src/Permissions.ts +133 -133
- package/src/RTCAudioSession.ts +25 -25
- package/src/RTCDataChannel.ts +190 -190
- package/src/RTCDataChannelEvent.ts +28 -28
- package/src/RTCDataPacketCryptor.ts +90 -90
- package/src/RTCDataPacketCryptorFactory.ts +24 -24
- package/src/RTCErrorEvent.ts +20 -20
- package/src/RTCFrameCryptor.ts +162 -162
- package/src/RTCFrameCryptorFactory.ts +101 -101
- package/src/RTCIceCandidate.ts +29 -29
- package/src/RTCIceCandidateEvent.ts +26 -26
- package/src/RTCKeyProvider.ts +117 -117
- package/src/RTCPIPView.tsx +46 -46
- package/src/RTCPIPView.web.tsx +18 -18
- package/src/RTCPeerConnection.ts +935 -935
- package/src/RTCRtcpParameters.ts +23 -23
- package/src/RTCRtpCapabilities.ts +16 -16
- package/src/RTCRtpCodecCapability.ts +12 -12
- package/src/RTCRtpCodecParameters.ts +44 -44
- package/src/RTCRtpEncodingParameters.ts +90 -90
- package/src/RTCRtpHeaderExtension.ts +27 -27
- package/src/RTCRtpParameters.ts +37 -37
- package/src/RTCRtpReceiveParameters.ts +7 -7
- package/src/RTCRtpReceiver.ts +60 -60
- package/src/RTCRtpSendParameters.ts +63 -63
- package/src/RTCRtpSender.ts +78 -78
- package/src/RTCRtpTransceiver.ts +107 -107
- package/src/RTCSessionDescription.ts +30 -30
- package/src/RTCTrackEvent.ts +42 -42
- package/src/RTCUtil.ts +211 -211
- package/src/RTCView.ts +122 -122
- package/src/RTCView.web.tsx +80 -80
- package/src/ScreenCapturePickerView.ts +4 -4
- package/src/ScreenCapturePickerView.web.tsx +3 -3
- package/src/getDisplayMedia.ts +30 -30
- package/src/getUserMedia.ts +136 -136
- package/src/index.ts +107 -107
- package/src/index.web.ts +191 -191
- package/src/webStream.ts +31 -31
- package/tools/format.sh +6 -6
- package/tools/release.sh +45 -45
- package/tsconfig.json +17 -17
|
@@ -1,728 +1,728 @@
|
|
|
1
|
-
#import <objc/runtime.h>
|
|
2
|
-
|
|
3
|
-
#import <AVFoundation/AVFoundation.h>
|
|
4
|
-
#import <math.h>
|
|
5
|
-
#import <limits.h>
|
|
6
|
-
#import <stdint.h>
|
|
7
|
-
|
|
8
|
-
#import <WebRTC/RTCCameraVideoCapturer.h>
|
|
9
|
-
#import <WebRTC/RTCMediaConstraints.h>
|
|
10
|
-
#import <WebRTC/RTCMediaStreamTrack.h>
|
|
11
|
-
#import <WebRTC/RTCVideoTrack.h>
|
|
12
|
-
|
|
13
|
-
#import "RTCMediaStreamTrack+React.h"
|
|
14
|
-
#import "WebRTCModule+RTCMediaStream.h"
|
|
15
|
-
#import "WebRTCModule+RTCPeerConnection.h"
|
|
16
|
-
#import "WebRTCModuleOptions.h"
|
|
17
|
-
#import "WLVAudioDevice.h"
|
|
18
|
-
|
|
19
|
-
#import "ProcessorProvider.h"
|
|
20
|
-
#import "ScreenCaptureController.h"
|
|
21
|
-
#import "ScreenCapturer.h"
|
|
22
|
-
#import "TrackCapturerEventsEmitter.h"
|
|
23
|
-
#import "VideoCaptureController.h"
|
|
24
|
-
|
|
25
|
-
@interface AudioSamplesRenderer : NSObject<RTCAudioRenderer>
|
|
26
|
-
|
|
27
|
-
- (instancetype)initWithModule:(WebRTCModule *)module trackId:(NSString *)trackId;
|
|
28
|
-
|
|
29
|
-
@end
|
|
30
|
-
|
|
31
|
-
@implementation AudioSamplesRenderer {
|
|
32
|
-
__weak WebRTCModule *_module;
|
|
33
|
-
NSString *_trackId;
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
- (instancetype)initWithModule:(WebRTCModule *)module trackId:(NSString *)trackId {
|
|
37
|
-
self = [super init];
|
|
38
|
-
if (self) {
|
|
39
|
-
_module = module;
|
|
40
|
-
_trackId = [trackId copy];
|
|
41
|
-
}
|
|
42
|
-
return self;
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
- (void)render:(AVAudioPCMBuffer *)pcmBuffer {
|
|
46
|
-
WebRTCModule *module = _module;
|
|
47
|
-
if (!module) {
|
|
48
|
-
return;
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
AVAudioFrameCount frameLength = pcmBuffer.frameLength;
|
|
52
|
-
if (frameLength == 0) {
|
|
53
|
-
return;
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
UInt32 channelCount = pcmBuffer.format.channelCount;
|
|
57
|
-
if (channelCount == 0) {
|
|
58
|
-
return;
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
AVAudioCommonFormat format = pcmBuffer.format.commonFormat;
|
|
62
|
-
NSUInteger totalSamples = (NSUInteger)frameLength * (NSUInteger)channelCount;
|
|
63
|
-
NSMutableArray<NSNumber *> *samples = [NSMutableArray arrayWithCapacity:totalSamples];
|
|
64
|
-
int bitsPerSample = (int)pcmBuffer.format.streamDescription->mBitsPerChannel;
|
|
65
|
-
|
|
66
|
-
if (format == AVAudioPCMFormatInt16) {
|
|
67
|
-
int16_t **channelData = pcmBuffer.int16ChannelData;
|
|
68
|
-
if (!channelData) {
|
|
69
|
-
return;
|
|
70
|
-
}
|
|
71
|
-
for (AVAudioFrameCount frame = 0; frame < frameLength; frame++) {
|
|
72
|
-
for (UInt32 channel = 0; channel < channelCount; channel++) {
|
|
73
|
-
[samples addObject:@(channelData[channel][frame])];
|
|
74
|
-
}
|
|
75
|
-
}
|
|
76
|
-
} else if (format == AVAudioPCMFormatFloat32) {
|
|
77
|
-
float **channelData = pcmBuffer.floatChannelData;
|
|
78
|
-
if (!channelData) {
|
|
79
|
-
return;
|
|
80
|
-
}
|
|
81
|
-
bitsPerSample = 16;
|
|
82
|
-
for (AVAudioFrameCount frame = 0; frame < frameLength; frame++) {
|
|
83
|
-
for (UInt32 channel = 0; channel < channelCount; channel++) {
|
|
84
|
-
float value = channelData[channel][frame];
|
|
85
|
-
float clipped = fminf(fmaxf(value, -1.0f), 1.0f);
|
|
86
|
-
int16_t sample = (int16_t)lrintf(clipped * (float)INT16_MAX);
|
|
87
|
-
[samples addObject:@(sample)];
|
|
88
|
-
}
|
|
89
|
-
}
|
|
90
|
-
} else if (format == AVAudioPCMFormatInt32) {
|
|
91
|
-
int32_t **channelData = pcmBuffer.int32ChannelData;
|
|
92
|
-
if (!channelData) {
|
|
93
|
-
return;
|
|
94
|
-
}
|
|
95
|
-
bitsPerSample = 32;
|
|
96
|
-
for (AVAudioFrameCount frame = 0; frame < frameLength; frame++) {
|
|
97
|
-
for (UInt32 channel = 0; channel < channelCount; channel++) {
|
|
98
|
-
int32_t value = channelData[channel][frame];
|
|
99
|
-
[samples addObject:@(value)];
|
|
100
|
-
}
|
|
101
|
-
}
|
|
102
|
-
} else {
|
|
103
|
-
return;
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
NSTimeInterval timestampMs = [[NSDate date] timeIntervalSince1970] * 1000.0;
|
|
107
|
-
|
|
108
|
-
[module sendEventWithName:kEventAudioSamples
|
|
109
|
-
body:@{
|
|
110
|
-
@"trackId" : _trackId,
|
|
111
|
-
@"samples" : samples,
|
|
112
|
-
@"sampleRate" : @(pcmBuffer.format.sampleRate),
|
|
113
|
-
@"channels" : @(channelCount),
|
|
114
|
-
@"bitsPerSample" : @(bitsPerSample),
|
|
115
|
-
@"framesPerBuffer" : @(frameLength),
|
|
116
|
-
@"timestamp" : @(timestampMs)
|
|
117
|
-
}];
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
@end
|
|
121
|
-
|
|
122
|
-
@implementation WebRTCModule (RTCMediaStream)
|
|
123
|
-
|
|
124
|
-
- (NSString *)audioSinkKeyForPcId:(NSNumber *)pcId trackId:(NSString *)trackId {
|
|
125
|
-
return [NSString stringWithFormat:@"%@:%@", pcId, trackId];
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
- (VideoEffectProcessor *)videoEffectProcessor {
|
|
129
|
-
return objc_getAssociatedObject(self, _cmd);
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
- (void)setVideoEffectProcessor:(VideoEffectProcessor *)videoEffectProcessor {
|
|
133
|
-
objc_setAssociatedObject(
|
|
134
|
-
self, @selector(videoEffectProcessor), videoEffectProcessor, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
#pragma mark - getUserMedia
|
|
138
|
-
|
|
139
|
-
- (NSString *)convertBoolToString:(id)value {
|
|
140
|
-
return value ? @"true" : @"false";
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
/**
|
|
144
|
-
* Initializes a new {@link RTCAudioTrack} which satisfies the given constraints.
|
|
145
|
-
*
|
|
146
|
-
* @param constraints The {@code MediaStreamConstraints} which the new
|
|
147
|
-
* {@code RTCAudioTrack} instance is to satisfy.
|
|
148
|
-
*/
|
|
149
|
-
- (RTCAudioTrack *)createAudioTrack:(NSDictionary *)constraints {
|
|
150
|
-
NSString *trackId = [[NSUUID UUID] UUIDString];
|
|
151
|
-
NSDictionary *audioConstraints = constraints[@"audio"];
|
|
152
|
-
NSMutableDictionary *optionalConstraints = [NSMutableDictionary dictionary];
|
|
153
|
-
optionalConstraints[@"googAutoGainControl"] = audioConstraints[@"autoGainControl"] != nil
|
|
154
|
-
? [self convertBoolToString:audioConstraints[@"autoGainControl"]]
|
|
155
|
-
: @"true";
|
|
156
|
-
optionalConstraints[@"googNoiseSuppression"] =
|
|
157
|
-
audioConstraints[@"noiseSuppression"] != nil ? [self convertBoolToString:audioConstraints[@"noiseSuppression"]]
|
|
158
|
-
: @"true";
|
|
159
|
-
optionalConstraints[@"googEchoCancellation"] =
|
|
160
|
-
audioConstraints[@"echoCancellation"] != nil ? [self convertBoolToString:audioConstraints[@"echoCancellation"]]
|
|
161
|
-
: @"true";
|
|
162
|
-
optionalConstraints[@"googHighpassFilter"] = audioConstraints[@"highpassFilter"] != nil
|
|
163
|
-
? [self convertBoolToString:audioConstraints[@"highpassFilter"]]
|
|
164
|
-
: @"true";
|
|
165
|
-
|
|
166
|
-
RTCMediaConstraints *mediaConstraints =
|
|
167
|
-
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil optionalConstraints:optionalConstraints];
|
|
168
|
-
|
|
169
|
-
RTCAudioSource *audioSource = [self.peerConnectionFactory audioSourceWithConstraints:mediaConstraints];
|
|
170
|
-
RTCAudioTrack *audioTrack = [self.peerConnectionFactory audioTrackWithSource:audioSource trackId:trackId];
|
|
171
|
-
return audioTrack;
|
|
172
|
-
}
|
|
173
|
-
/**
|
|
174
|
-
* Initializes a new {@link RTCVideoTrack} with the given capture controller
|
|
175
|
-
*/
|
|
176
|
-
- (RTCVideoTrack *)createVideoTrackWithCaptureController:
|
|
177
|
-
(CaptureController * (^)(RTCVideoSource *))captureControllerCreator {
|
|
178
|
-
#if TARGET_OS_TV
|
|
179
|
-
return nil;
|
|
180
|
-
#else
|
|
181
|
-
|
|
182
|
-
RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource];
|
|
183
|
-
|
|
184
|
-
NSString *trackUUID = [[NSUUID UUID] UUIDString];
|
|
185
|
-
RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID];
|
|
186
|
-
|
|
187
|
-
CaptureController *captureController = captureControllerCreator(videoSource);
|
|
188
|
-
videoTrack.captureController = captureController;
|
|
189
|
-
[captureController startCapture];
|
|
190
|
-
|
|
191
|
-
return videoTrack;
|
|
192
|
-
#endif
|
|
193
|
-
}
|
|
194
|
-
/**
|
|
195
|
-
* Initializes a new {@link RTCMediaTrack} with the given tracks.
|
|
196
|
-
*
|
|
197
|
-
* @return An array with the mediaStreamId in index 0, and track infos in index 1.
|
|
198
|
-
*/
|
|
199
|
-
- (NSArray *)createMediaStream:(NSArray<RTCMediaStreamTrack *> *)tracks {
|
|
200
|
-
#if TARGET_OS_TV
|
|
201
|
-
return nil;
|
|
202
|
-
#else
|
|
203
|
-
NSString *mediaStreamId = [[NSUUID UUID] UUIDString];
|
|
204
|
-
RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId];
|
|
205
|
-
NSMutableArray<NSDictionary *> *trackInfos = [NSMutableArray array];
|
|
206
|
-
|
|
207
|
-
for (RTCMediaStreamTrack *track in tracks) {
|
|
208
|
-
if ([track.kind isEqualToString:@"audio"]) {
|
|
209
|
-
[mediaStream addAudioTrack:(RTCAudioTrack *)track];
|
|
210
|
-
} else if ([track.kind isEqualToString:@"video"]) {
|
|
211
|
-
[mediaStream addVideoTrack:(RTCVideoTrack *)track];
|
|
212
|
-
}
|
|
213
|
-
|
|
214
|
-
NSString *trackId = track.trackId;
|
|
215
|
-
|
|
216
|
-
self.localTracks[trackId] = track;
|
|
217
|
-
|
|
218
|
-
NSDictionary *settings = @{};
|
|
219
|
-
if ([track.kind isEqualToString:@"video"]) {
|
|
220
|
-
RTCVideoTrack *videoTrack = (RTCVideoTrack *)track;
|
|
221
|
-
if ([videoTrack.captureController isKindOfClass:[CaptureController class]]) {
|
|
222
|
-
settings = [videoTrack.captureController getSettings];
|
|
223
|
-
}
|
|
224
|
-
} else if ([track.kind isEqualToString:@"audio"]) {
|
|
225
|
-
settings = @{
|
|
226
|
-
@"deviceId" : @"audio",
|
|
227
|
-
@"groupId" : @"",
|
|
228
|
-
};
|
|
229
|
-
}
|
|
230
|
-
|
|
231
|
-
[trackInfos addObject:@{
|
|
232
|
-
@"enabled" : @(track.isEnabled),
|
|
233
|
-
@"id" : trackId,
|
|
234
|
-
@"kind" : track.kind,
|
|
235
|
-
@"readyState" : @"live",
|
|
236
|
-
@"remote" : @(NO),
|
|
237
|
-
@"settings" : settings
|
|
238
|
-
}];
|
|
239
|
-
}
|
|
240
|
-
|
|
241
|
-
self.localStreams[mediaStreamId] = mediaStream;
|
|
242
|
-
return @[ mediaStreamId, trackInfos ];
|
|
243
|
-
#endif
|
|
244
|
-
}
|
|
245
|
-
|
|
246
|
-
/**
|
|
247
|
-
* Initializes a new {@link RTCVideoTrack} which satisfies the given constraints.
|
|
248
|
-
*/
|
|
249
|
-
- (RTCVideoTrack *)createVideoTrack:(NSDictionary *)constraints {
|
|
250
|
-
#if TARGET_OS_TV
|
|
251
|
-
return nil;
|
|
252
|
-
#else
|
|
253
|
-
RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource];
|
|
254
|
-
|
|
255
|
-
NSString *trackUUID = [[NSUUID UUID] UUIDString];
|
|
256
|
-
RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID];
|
|
257
|
-
|
|
258
|
-
#if !TARGET_IPHONE_SIMULATOR
|
|
259
|
-
RTCCameraVideoCapturer *videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoSource];
|
|
260
|
-
VideoCaptureController *videoCaptureController =
|
|
261
|
-
[[VideoCaptureController alloc] initWithCapturer:videoCapturer andConstraints:constraints[@"video"]];
|
|
262
|
-
videoCaptureController.enableMultitaskingCameraAccess =
|
|
263
|
-
[WebRTCModuleOptions sharedInstance].enableMultitaskingCameraAccess;
|
|
264
|
-
videoTrack.captureController = videoCaptureController;
|
|
265
|
-
[videoCaptureController startCapture];
|
|
266
|
-
#endif
|
|
267
|
-
|
|
268
|
-
return videoTrack;
|
|
269
|
-
#endif
|
|
270
|
-
}
|
|
271
|
-
|
|
272
|
-
- (RTCVideoTrack *)createScreenCaptureVideoTrack {
|
|
273
|
-
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_OSX || TARGET_OS_TV
|
|
274
|
-
return nil;
|
|
275
|
-
#endif
|
|
276
|
-
|
|
277
|
-
RTCVideoSource *videoSource = [self.peerConnectionFactory videoSourceForScreenCast:YES];
|
|
278
|
-
|
|
279
|
-
NSString *trackUUID = [[NSUUID UUID] UUIDString];
|
|
280
|
-
RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID];
|
|
281
|
-
|
|
282
|
-
ScreenCapturer *screenCapturer = [[ScreenCapturer alloc] initWithDelegate:videoSource];
|
|
283
|
-
ScreenCaptureController *screenCaptureController =
|
|
284
|
-
[[ScreenCaptureController alloc] initWithCapturer:screenCapturer];
|
|
285
|
-
|
|
286
|
-
TrackCapturerEventsEmitter *emitter = [[TrackCapturerEventsEmitter alloc] initWith:trackUUID webRTCModule:self];
|
|
287
|
-
screenCaptureController.eventsDelegate = emitter;
|
|
288
|
-
videoTrack.captureController = screenCaptureController;
|
|
289
|
-
[screenCaptureController startCapture];
|
|
290
|
-
|
|
291
|
-
return videoTrack;
|
|
292
|
-
}
|
|
293
|
-
|
|
294
|
-
RCT_EXPORT_METHOD(createVirtualAudioTrack : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock)reject) {
|
|
295
|
-
#if TARGET_OS_TV
|
|
296
|
-
reject(@"unsupported_platform", @"tvOS is not supported", nil);
|
|
297
|
-
return;
|
|
298
|
-
#else
|
|
299
|
-
RTCPeerConnectionFactory *factory = [self virtualFactory];
|
|
300
|
-
if (!factory) {
|
|
301
|
-
reject(@"virtual_factory_error", @"Factory unavailable", nil);
|
|
302
|
-
return;
|
|
303
|
-
}
|
|
304
|
-
NSString *trackId = [[NSUUID UUID] UUIDString];
|
|
305
|
-
RTCAudioSource *source = [factory audioSourceWithConstraints:nil];
|
|
306
|
-
RTCAudioTrack *track = [factory audioTrackWithSource:source trackId:trackId];
|
|
307
|
-
NSArray *components = [self createMediaStream:@[ track ]];
|
|
308
|
-
resolve(@{ @"streamId" : components[0], @"tracks" : components[1] });
|
|
309
|
-
#endif
|
|
310
|
-
}
|
|
311
|
-
|
|
312
|
-
RCT_EXPORT_METHOD(pushVirtualAudioSamples
|
|
313
|
-
: (nonnull NSArray<NSNumber *> *)samples
|
|
314
|
-
sampleRate
|
|
315
|
-
: (nonnull NSNumber *)sampleRate
|
|
316
|
-
channels
|
|
317
|
-
: (nonnull NSNumber *)channels) {
|
|
318
|
-
#if TARGET_OS_TV
|
|
319
|
-
return;
|
|
320
|
-
#else
|
|
321
|
-
if (samples.count == 0) {
|
|
322
|
-
return;
|
|
323
|
-
}
|
|
324
|
-
if (!self.virtualAudioDevice) {
|
|
325
|
-
[self virtualFactory];
|
|
326
|
-
}
|
|
327
|
-
if (!self.virtualAudioDevice) {
|
|
328
|
-
return;
|
|
329
|
-
}
|
|
330
|
-
NSUInteger length = samples.count * sizeof(int16_t);
|
|
331
|
-
NSMutableData *data = [NSMutableData dataWithLength:length];
|
|
332
|
-
int16_t *buffer = data.mutableBytes;
|
|
333
|
-
NSUInteger idx = 0;
|
|
334
|
-
for (NSNumber *value in samples) {
|
|
335
|
-
buffer[idx++] = (int16_t)value.shortValue;
|
|
336
|
-
}
|
|
337
|
-
[self.virtualAudioDevice pushAudioData:data sampleRate:sampleRate.doubleValue channels:channels.integerValue];
|
|
338
|
-
#endif
|
|
339
|
-
}
|
|
340
|
-
|
|
341
|
-
RCT_EXPORT_METHOD(getDisplayMedia : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock)reject) {
|
|
342
|
-
#if TARGET_OS_TV
|
|
343
|
-
reject(@"unsupported_platform", @"tvOS is not supported", nil);
|
|
344
|
-
return;
|
|
345
|
-
#else
|
|
346
|
-
|
|
347
|
-
RTCVideoTrack *videoTrack = [self createScreenCaptureVideoTrack];
|
|
348
|
-
|
|
349
|
-
if (videoTrack == nil) {
|
|
350
|
-
reject(@"DOMException", @"AbortError", nil);
|
|
351
|
-
return;
|
|
352
|
-
}
|
|
353
|
-
|
|
354
|
-
NSString *mediaStreamId = [[NSUUID UUID] UUIDString];
|
|
355
|
-
RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId];
|
|
356
|
-
[mediaStream addVideoTrack:videoTrack];
|
|
357
|
-
|
|
358
|
-
NSString *trackId = videoTrack.trackId;
|
|
359
|
-
self.localTracks[trackId] = videoTrack;
|
|
360
|
-
|
|
361
|
-
NSDictionary *trackInfo = @{
|
|
362
|
-
@"enabled" : @(videoTrack.isEnabled),
|
|
363
|
-
@"id" : videoTrack.trackId,
|
|
364
|
-
@"kind" : videoTrack.kind,
|
|
365
|
-
@"readyState" : @"live",
|
|
366
|
-
@"remote" : @(NO)
|
|
367
|
-
};
|
|
368
|
-
|
|
369
|
-
self.localStreams[mediaStreamId] = mediaStream;
|
|
370
|
-
resolve(@{@"streamId" : mediaStreamId, @"track" : trackInfo});
|
|
371
|
-
#endif
|
|
372
|
-
}
|
|
373
|
-
|
|
374
|
-
/**
|
|
375
|
-
* Implements {@code getUserMedia}. Note that at this point constraints have
|
|
376
|
-
* been normalized and permissions have been granted. The constraints only
|
|
377
|
-
* contain keys for which permissions have already been granted, that is,
|
|
378
|
-
* if audio permission was not granted, there will be no "audio" key in
|
|
379
|
-
* the constraints dictionary.
|
|
380
|
-
*/
|
|
381
|
-
RCT_EXPORT_METHOD(getUserMedia
|
|
382
|
-
: (NSDictionary *)constraints successCallback
|
|
383
|
-
: (RCTResponseSenderBlock)successCallback errorCallback
|
|
384
|
-
: (RCTResponseSenderBlock)errorCallback) {
|
|
385
|
-
#if TARGET_OS_TV
|
|
386
|
-
errorCallback(@[ @"PlatformNotSupported", @"getUserMedia is not supported on tvOS." ]);
|
|
387
|
-
return;
|
|
388
|
-
#else
|
|
389
|
-
RTCAudioTrack *audioTrack = nil;
|
|
390
|
-
RTCVideoTrack *videoTrack = nil;
|
|
391
|
-
|
|
392
|
-
if (constraints[@"audio"]) {
|
|
393
|
-
audioTrack = [self createAudioTrack:constraints];
|
|
394
|
-
}
|
|
395
|
-
if (constraints[@"video"]) {
|
|
396
|
-
videoTrack = [self createVideoTrack:constraints];
|
|
397
|
-
}
|
|
398
|
-
|
|
399
|
-
if (audioTrack == nil && videoTrack == nil) {
|
|
400
|
-
// Fail with DOMException with name AbortError as per:
|
|
401
|
-
// https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia
|
|
402
|
-
errorCallback(@[ @"DOMException", @"AbortError" ]);
|
|
403
|
-
return;
|
|
404
|
-
}
|
|
405
|
-
|
|
406
|
-
NSString *mediaStreamId = [[NSUUID UUID] UUIDString];
|
|
407
|
-
RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId];
|
|
408
|
-
NSMutableArray *tracks = [NSMutableArray array];
|
|
409
|
-
NSMutableArray *tmp = [NSMutableArray array];
|
|
410
|
-
if (audioTrack)
|
|
411
|
-
[tmp addObject:audioTrack];
|
|
412
|
-
if (videoTrack)
|
|
413
|
-
[tmp addObject:videoTrack];
|
|
414
|
-
|
|
415
|
-
for (RTCMediaStreamTrack *track in tmp) {
|
|
416
|
-
if ([track.kind isEqualToString:@"audio"]) {
|
|
417
|
-
[mediaStream addAudioTrack:(RTCAudioTrack *)track];
|
|
418
|
-
} else if ([track.kind isEqualToString:@"video"]) {
|
|
419
|
-
[mediaStream addVideoTrack:(RTCVideoTrack *)track];
|
|
420
|
-
}
|
|
421
|
-
|
|
422
|
-
NSString *trackId = track.trackId;
|
|
423
|
-
|
|
424
|
-
self.localTracks[trackId] = track;
|
|
425
|
-
|
|
426
|
-
NSDictionary *settings = @{};
|
|
427
|
-
if ([track.kind isEqualToString:@"video"]) {
|
|
428
|
-
RTCVideoTrack *videoTrack = (RTCVideoTrack *)track;
|
|
429
|
-
if ([videoTrack.captureController isKindOfClass:[CaptureController class]]) {
|
|
430
|
-
settings = [videoTrack.captureController getSettings];
|
|
431
|
-
}
|
|
432
|
-
} else if ([track.kind isEqualToString:@"audio"]) {
|
|
433
|
-
settings = @{
|
|
434
|
-
@"deviceId" : @"audio",
|
|
435
|
-
@"groupId" : @"",
|
|
436
|
-
};
|
|
437
|
-
}
|
|
438
|
-
|
|
439
|
-
[tracks addObject:@{
|
|
440
|
-
@"enabled" : @(track.isEnabled),
|
|
441
|
-
@"id" : trackId,
|
|
442
|
-
@"kind" : track.kind,
|
|
443
|
-
@"readyState" : @"live",
|
|
444
|
-
@"remote" : @(NO),
|
|
445
|
-
@"settings" : settings
|
|
446
|
-
}];
|
|
447
|
-
}
|
|
448
|
-
|
|
449
|
-
self.localStreams[mediaStreamId] = mediaStream;
|
|
450
|
-
successCallback(@[ mediaStreamId, tracks ]);
|
|
451
|
-
#endif
|
|
452
|
-
}
|
|
453
|
-
|
|
454
|
-
#pragma mark - Other stream related APIs
|
|
455
|
-
|
|
456
|
-
RCT_EXPORT_METHOD(enumerateDevices : (RCTResponseSenderBlock)callback) {
|
|
457
|
-
#if TARGET_OS_TV
|
|
458
|
-
callback(@[]);
|
|
459
|
-
#else
|
|
460
|
-
NSMutableArray *devices = [NSMutableArray array];
|
|
461
|
-
NSMutableArray *deviceTypes = [NSMutableArray array];
|
|
462
|
-
[deviceTypes addObjectsFromArray:@[
|
|
463
|
-
AVCaptureDeviceTypeBuiltInWideAngleCamera,
|
|
464
|
-
AVCaptureDeviceTypeBuiltInUltraWideCamera,
|
|
465
|
-
AVCaptureDeviceTypeBuiltInTelephotoCamera,
|
|
466
|
-
AVCaptureDeviceTypeBuiltInDualCamera,
|
|
467
|
-
AVCaptureDeviceTypeBuiltInDualWideCamera,
|
|
468
|
-
AVCaptureDeviceTypeBuiltInTripleCamera
|
|
469
|
-
]];
|
|
470
|
-
if (@available(macos 14.0, ios 17.0, tvos 17.0, *)) {
|
|
471
|
-
[deviceTypes addObject:AVCaptureDeviceTypeExternal];
|
|
472
|
-
}
|
|
473
|
-
AVCaptureDeviceDiscoverySession *videoDevicesSession =
|
|
474
|
-
[AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes
|
|
475
|
-
mediaType:AVMediaTypeVideo
|
|
476
|
-
position:AVCaptureDevicePositionUnspecified];
|
|
477
|
-
for (AVCaptureDevice *device in videoDevicesSession.devices) {
|
|
478
|
-
NSString *position = @"unknown";
|
|
479
|
-
if (device.position == AVCaptureDevicePositionBack) {
|
|
480
|
-
position = @"environment";
|
|
481
|
-
} else if (device.position == AVCaptureDevicePositionFront) {
|
|
482
|
-
position = @"front";
|
|
483
|
-
}
|
|
484
|
-
NSString *label = @"Unknown video device";
|
|
485
|
-
if (device.localizedName != nil) {
|
|
486
|
-
label = device.localizedName;
|
|
487
|
-
}
|
|
488
|
-
|
|
489
|
-
[devices addObject:@{
|
|
490
|
-
@"facing" : position,
|
|
491
|
-
@"deviceId" : device.uniqueID,
|
|
492
|
-
@"groupId" : @"",
|
|
493
|
-
@"label" : label,
|
|
494
|
-
@"kind" : @"videoinput",
|
|
495
|
-
}];
|
|
496
|
-
}
|
|
497
|
-
AVCaptureDeviceDiscoverySession *audioDevicesSession =
|
|
498
|
-
[AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInMicrophone ]
|
|
499
|
-
mediaType:AVMediaTypeAudio
|
|
500
|
-
position:AVCaptureDevicePositionUnspecified];
|
|
501
|
-
for (AVCaptureDevice *device in audioDevicesSession.devices) {
|
|
502
|
-
NSString *label = @"Unknown audio device";
|
|
503
|
-
if (device.localizedName != nil) {
|
|
504
|
-
label = device.localizedName;
|
|
505
|
-
}
|
|
506
|
-
[devices addObject:@{
|
|
507
|
-
@"deviceId" : device.uniqueID,
|
|
508
|
-
@"groupId" : @"",
|
|
509
|
-
@"label" : label,
|
|
510
|
-
@"kind" : @"audioinput",
|
|
511
|
-
}];
|
|
512
|
-
}
|
|
513
|
-
callback(@[ devices ]);
|
|
514
|
-
#endif
|
|
515
|
-
}
|
|
516
|
-
|
|
517
|
-
RCT_EXPORT_METHOD(mediaStreamCreate : (nonnull NSString *)streamID) {
|
|
518
|
-
RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:streamID];
|
|
519
|
-
self.localStreams[streamID] = mediaStream;
|
|
520
|
-
}
|
|
521
|
-
|
|
522
|
-
RCT_EXPORT_METHOD(mediaStreamAddTrack
|
|
523
|
-
: (nonnull NSString *)streamID
|
|
524
|
-
: (nonnull NSNumber *)pcId
|
|
525
|
-
: (nonnull NSString *)trackID) {
|
|
526
|
-
RTCMediaStream *mediaStream = self.localStreams[streamID];
|
|
527
|
-
if (mediaStream == nil) {
|
|
528
|
-
return;
|
|
529
|
-
}
|
|
530
|
-
|
|
531
|
-
RTCMediaStreamTrack *track = [self trackForId:trackID pcId:pcId];
|
|
532
|
-
if (track == nil) {
|
|
533
|
-
return;
|
|
534
|
-
}
|
|
535
|
-
|
|
536
|
-
if ([track.kind isEqualToString:@"audio"]) {
|
|
537
|
-
[mediaStream addAudioTrack:(RTCAudioTrack *)track];
|
|
538
|
-
} else if ([track.kind isEqualToString:@"video"]) {
|
|
539
|
-
[mediaStream addVideoTrack:(RTCVideoTrack *)track];
|
|
540
|
-
}
|
|
541
|
-
}
|
|
542
|
-
|
|
543
|
-
RCT_EXPORT_METHOD(mediaStreamRemoveTrack
|
|
544
|
-
: (nonnull NSString *)streamID
|
|
545
|
-
: (nonnull NSNumber *)pcId
|
|
546
|
-
: (nonnull NSString *)trackID) {
|
|
547
|
-
RTCMediaStream *mediaStream = self.localStreams[streamID];
|
|
548
|
-
if (mediaStream == nil) {
|
|
549
|
-
return;
|
|
550
|
-
}
|
|
551
|
-
|
|
552
|
-
RTCMediaStreamTrack *track = [self trackForId:trackID pcId:pcId];
|
|
553
|
-
if (track == nil) {
|
|
554
|
-
return;
|
|
555
|
-
}
|
|
556
|
-
|
|
557
|
-
if ([track.kind isEqualToString:@"audio"]) {
|
|
558
|
-
[mediaStream removeAudioTrack:(RTCAudioTrack *)track];
|
|
559
|
-
} else if ([track.kind isEqualToString:@"video"]) {
|
|
560
|
-
[mediaStream removeVideoTrack:(RTCVideoTrack *)track];
|
|
561
|
-
}
|
|
562
|
-
}
|
|
563
|
-
|
|
564
|
-
RCT_EXPORT_METHOD(mediaStreamRelease : (nonnull NSString *)streamID) {
|
|
565
|
-
RTCMediaStream *stream = self.localStreams[streamID];
|
|
566
|
-
if (stream) {
|
|
567
|
-
[self.localStreams removeObjectForKey:streamID];
|
|
568
|
-
}
|
|
569
|
-
}
|
|
570
|
-
|
|
571
|
-
RCT_EXPORT_METHOD(mediaStreamTrackRelease : (nonnull NSString *)trackID) {
|
|
572
|
-
#if TARGET_OS_TV
|
|
573
|
-
return;
|
|
574
|
-
#else
|
|
575
|
-
|
|
576
|
-
RTCMediaStreamTrack *track = self.localTracks[trackID];
|
|
577
|
-
if (track) {
|
|
578
|
-
track.isEnabled = NO;
|
|
579
|
-
[track.captureController stopCapture];
|
|
580
|
-
[self.localTracks removeObjectForKey:trackID];
|
|
581
|
-
}
|
|
582
|
-
#endif
|
|
583
|
-
}
|
|
584
|
-
|
|
585
|
-
RCT_EXPORT_METHOD(mediaStreamTrackSetEnabled : (nonnull NSNumber *)pcId : (nonnull NSString *)trackID : (BOOL)enabled) {
|
|
586
|
-
RTCMediaStreamTrack *track = [self trackForId:trackID pcId:pcId];
|
|
587
|
-
if (track == nil) {
|
|
588
|
-
return;
|
|
589
|
-
}
|
|
590
|
-
|
|
591
|
-
track.isEnabled = enabled;
|
|
592
|
-
#if !TARGET_OS_TV
|
|
593
|
-
if (track.captureController) { // It could be a remote track!
|
|
594
|
-
if (enabled) {
|
|
595
|
-
[track.captureController startCapture];
|
|
596
|
-
} else {
|
|
597
|
-
[track.captureController stopCapture];
|
|
598
|
-
}
|
|
599
|
-
}
|
|
600
|
-
#endif
|
|
601
|
-
}
|
|
602
|
-
|
|
603
|
-
RCT_EXPORT_METHOD(mediaStreamTrackApplyConstraints
|
|
604
|
-
: (nonnull NSString *)trackID
|
|
605
|
-
: (NSDictionary *)constraints
|
|
606
|
-
: (RCTPromiseResolveBlock)resolve
|
|
607
|
-
: (RCTPromiseRejectBlock)reject) {
|
|
608
|
-
#if TARGET_OS_TV
|
|
609
|
-
reject(@"unsupported_platform", @"tvOS is not supported", nil);
|
|
610
|
-
return;
|
|
611
|
-
#else
|
|
612
|
-
RTCMediaStreamTrack *track = self.localTracks[trackID];
|
|
613
|
-
if (track) {
|
|
614
|
-
if ([track.kind isEqualToString:@"video"]) {
|
|
615
|
-
RTCVideoTrack *videoTrack = (RTCVideoTrack *)track;
|
|
616
|
-
if ([videoTrack.captureController isKindOfClass:[CaptureController class]]) {
|
|
617
|
-
CaptureController *vcc = (CaptureController *)videoTrack.captureController;
|
|
618
|
-
NSError *error = nil;
|
|
619
|
-
[vcc applyConstraints:constraints error:&error];
|
|
620
|
-
if (error) {
|
|
621
|
-
reject(@"E_INVALID", error.localizedDescription, error);
|
|
622
|
-
} else {
|
|
623
|
-
resolve([vcc getSettings]);
|
|
624
|
-
}
|
|
625
|
-
}
|
|
626
|
-
} else {
|
|
627
|
-
RCTLogWarn(@"mediaStreamTrackApplyConstraints() track is not video");
|
|
628
|
-
reject(@"E_INVALID", @"Can't apply constraints on audio tracks", nil);
|
|
629
|
-
}
|
|
630
|
-
} else {
|
|
631
|
-
RCTLogWarn(@"mediaStreamTrackApplyConstraints() track is null");
|
|
632
|
-
reject(@"E_INVALID", @"Could not get track", nil);
|
|
633
|
-
}
|
|
634
|
-
#endif
|
|
635
|
-
}
|
|
636
|
-
|
|
637
|
-
RCT_EXPORT_METHOD(mediaStreamTrackSetVolume : (nonnull NSNumber *)pcId : (nonnull NSString *)trackID : (double)volume) {
|
|
638
|
-
RTCMediaStreamTrack *track = [self trackForId:trackID pcId:pcId];
|
|
639
|
-
if (track && [track.kind isEqualToString:@"audio"]) {
|
|
640
|
-
RTCAudioTrack *audioTrack = (RTCAudioTrack *)track;
|
|
641
|
-
audioTrack.source.volume = volume;
|
|
642
|
-
}
|
|
643
|
-
}
|
|
644
|
-
|
|
645
|
-
RCT_EXPORT_METHOD(mediaStreamTrackSetPlaybackEnabled : (nonnull NSNumber *)pcId : (nonnull NSString *)trackID : (BOOL)enabled) {
|
|
646
|
-
RTCMediaStreamTrack *track = [self trackForId:trackID pcId:pcId];
|
|
647
|
-
if (track && [track.kind isEqualToString:@"audio"]) {
|
|
648
|
-
RTCAudioTrack *audioTrack = (RTCAudioTrack *)track;
|
|
649
|
-
audioTrack.source.volume = enabled ? 1.0 : 0.0;
|
|
650
|
-
}
|
|
651
|
-
}
|
|
652
|
-
|
|
653
|
-
RCT_EXPORT_METHOD(mediaStreamTrackEnableAudioSink : (nonnull NSNumber *)pcId : (nonnull NSString *)trackID : (BOOL)enabled) {
|
|
654
|
-
NSString *key = [self audioSinkKeyForPcId:pcId trackId:trackID];
|
|
655
|
-
|
|
656
|
-
if (enabled) {
|
|
657
|
-
if (self.audioRenderers[key]) {
|
|
658
|
-
return;
|
|
659
|
-
}
|
|
660
|
-
|
|
661
|
-
RTCMediaStreamTrack *track = [self trackForId:trackID pcId:pcId];
|
|
662
|
-
if (track == nil || ![track.kind isEqualToString:@"audio"]) {
|
|
663
|
-
return;
|
|
664
|
-
}
|
|
665
|
-
|
|
666
|
-
RTCAudioTrack *audioTrack = (RTCAudioTrack *)track;
|
|
667
|
-
AudioSamplesRenderer *renderer = [[AudioSamplesRenderer alloc] initWithModule:self trackId:trackID];
|
|
668
|
-
self.audioRenderers[key] = renderer;
|
|
669
|
-
[audioTrack addRenderer:renderer];
|
|
670
|
-
} else {
|
|
671
|
-
id<RTCAudioRenderer> renderer = self.audioRenderers[key];
|
|
672
|
-
if (!renderer) {
|
|
673
|
-
return;
|
|
674
|
-
}
|
|
675
|
-
|
|
676
|
-
RTCMediaStreamTrack *track = [self trackForId:trackID pcId:pcId];
|
|
677
|
-
if ([track isKindOfClass:[RTCAudioTrack class]]) {
|
|
678
|
-
RTCAudioTrack *audioTrack = (RTCAudioTrack *)track;
|
|
679
|
-
[audioTrack removeRenderer:renderer];
|
|
680
|
-
}
|
|
681
|
-
|
|
682
|
-
[self.audioRenderers removeObjectForKey:key];
|
|
683
|
-
}
|
|
684
|
-
}
|
|
685
|
-
|
|
686
|
-
RCT_EXPORT_METHOD(mediaStreamTrackSetVideoEffects
|
|
687
|
-
: (nonnull NSString *)trackID names
|
|
688
|
-
: (nonnull NSArray<NSString *> *)names) {
|
|
689
|
-
RTCMediaStreamTrack *track = self.localTracks[trackID];
|
|
690
|
-
if (track == nil) {
|
|
691
|
-
return;
|
|
692
|
-
}
|
|
693
|
-
|
|
694
|
-
RTCVideoTrack *videoTrack = (RTCVideoTrack *)track;
|
|
695
|
-
RTCVideoSource *videoSource = videoTrack.source;
|
|
696
|
-
|
|
697
|
-
NSMutableArray *processors = [[NSMutableArray alloc] init];
|
|
698
|
-
for (NSString *name in names) {
|
|
699
|
-
NSObject<VideoFrameProcessorDelegate> *processor = [ProcessorProvider getProcessor:name];
|
|
700
|
-
if (processor != nil) {
|
|
701
|
-
[processors addObject:processor];
|
|
702
|
-
}
|
|
703
|
-
}
|
|
704
|
-
|
|
705
|
-
self.videoEffectProcessor = [[VideoEffectProcessor alloc] initWithProcessors:processors videoSource:videoSource];
|
|
706
|
-
|
|
707
|
-
VideoCaptureController *vcc = (VideoCaptureController *)videoTrack.captureController;
|
|
708
|
-
RTCVideoCapturer *capturer = vcc.capturer;
|
|
709
|
-
|
|
710
|
-
capturer.delegate = self.videoEffectProcessor;
|
|
711
|
-
}
|
|
712
|
-
|
|
713
|
-
#pragma mark - Helpers
|
|
714
|
-
|
|
715
|
-
- (RTCMediaStreamTrack *)trackForId:(nonnull NSString *)trackId pcId:(nonnull NSNumber *)pcId {
|
|
716
|
-
if ([pcId isEqualToNumber:[NSNumber numberWithInt:-1]]) {
|
|
717
|
-
return self.localTracks[trackId];
|
|
718
|
-
}
|
|
719
|
-
|
|
720
|
-
RTCPeerConnection *peerConnection = self.peerConnections[pcId];
|
|
721
|
-
if (peerConnection == nil) {
|
|
722
|
-
return nil;
|
|
723
|
-
}
|
|
724
|
-
|
|
725
|
-
return peerConnection.remoteTracks[trackId];
|
|
726
|
-
}
|
|
727
|
-
|
|
728
|
-
@end
|
|
1
|
+
#import <objc/runtime.h>
|
|
2
|
+
|
|
3
|
+
#import <AVFoundation/AVFoundation.h>
|
|
4
|
+
#import <math.h>
|
|
5
|
+
#import <limits.h>
|
|
6
|
+
#import <stdint.h>
|
|
7
|
+
|
|
8
|
+
#import <WebRTC/RTCCameraVideoCapturer.h>
|
|
9
|
+
#import <WebRTC/RTCMediaConstraints.h>
|
|
10
|
+
#import <WebRTC/RTCMediaStreamTrack.h>
|
|
11
|
+
#import <WebRTC/RTCVideoTrack.h>
|
|
12
|
+
|
|
13
|
+
#import "RTCMediaStreamTrack+React.h"
|
|
14
|
+
#import "WebRTCModule+RTCMediaStream.h"
|
|
15
|
+
#import "WebRTCModule+RTCPeerConnection.h"
|
|
16
|
+
#import "WebRTCModuleOptions.h"
|
|
17
|
+
#import "WLVAudioDevice.h"
|
|
18
|
+
|
|
19
|
+
#import "ProcessorProvider.h"
|
|
20
|
+
#import "ScreenCaptureController.h"
|
|
21
|
+
#import "ScreenCapturer.h"
|
|
22
|
+
#import "TrackCapturerEventsEmitter.h"
|
|
23
|
+
#import "VideoCaptureController.h"
|
|
24
|
+
|
|
25
|
+
@interface AudioSamplesRenderer : NSObject<RTCAudioRenderer>
|
|
26
|
+
|
|
27
|
+
- (instancetype)initWithModule:(WebRTCModule *)module trackId:(NSString *)trackId;
|
|
28
|
+
|
|
29
|
+
@end
|
|
30
|
+
|
|
31
|
+
@implementation AudioSamplesRenderer {
|
|
32
|
+
__weak WebRTCModule *_module;
|
|
33
|
+
NSString *_trackId;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
- (instancetype)initWithModule:(WebRTCModule *)module trackId:(NSString *)trackId {
|
|
37
|
+
self = [super init];
|
|
38
|
+
if (self) {
|
|
39
|
+
_module = module;
|
|
40
|
+
_trackId = [trackId copy];
|
|
41
|
+
}
|
|
42
|
+
return self;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
- (void)render:(AVAudioPCMBuffer *)pcmBuffer {
|
|
46
|
+
WebRTCModule *module = _module;
|
|
47
|
+
if (!module) {
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
AVAudioFrameCount frameLength = pcmBuffer.frameLength;
|
|
52
|
+
if (frameLength == 0) {
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
UInt32 channelCount = pcmBuffer.format.channelCount;
|
|
57
|
+
if (channelCount == 0) {
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
AVAudioCommonFormat format = pcmBuffer.format.commonFormat;
|
|
62
|
+
NSUInteger totalSamples = (NSUInteger)frameLength * (NSUInteger)channelCount;
|
|
63
|
+
NSMutableArray<NSNumber *> *samples = [NSMutableArray arrayWithCapacity:totalSamples];
|
|
64
|
+
int bitsPerSample = (int)pcmBuffer.format.streamDescription->mBitsPerChannel;
|
|
65
|
+
|
|
66
|
+
if (format == AVAudioPCMFormatInt16) {
|
|
67
|
+
int16_t **channelData = pcmBuffer.int16ChannelData;
|
|
68
|
+
if (!channelData) {
|
|
69
|
+
return;
|
|
70
|
+
}
|
|
71
|
+
for (AVAudioFrameCount frame = 0; frame < frameLength; frame++) {
|
|
72
|
+
for (UInt32 channel = 0; channel < channelCount; channel++) {
|
|
73
|
+
[samples addObject:@(channelData[channel][frame])];
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
} else if (format == AVAudioPCMFormatFloat32) {
|
|
77
|
+
float **channelData = pcmBuffer.floatChannelData;
|
|
78
|
+
if (!channelData) {
|
|
79
|
+
return;
|
|
80
|
+
}
|
|
81
|
+
bitsPerSample = 16;
|
|
82
|
+
for (AVAudioFrameCount frame = 0; frame < frameLength; frame++) {
|
|
83
|
+
for (UInt32 channel = 0; channel < channelCount; channel++) {
|
|
84
|
+
float value = channelData[channel][frame];
|
|
85
|
+
float clipped = fminf(fmaxf(value, -1.0f), 1.0f);
|
|
86
|
+
int16_t sample = (int16_t)lrintf(clipped * (float)INT16_MAX);
|
|
87
|
+
[samples addObject:@(sample)];
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
} else if (format == AVAudioPCMFormatInt32) {
|
|
91
|
+
int32_t **channelData = pcmBuffer.int32ChannelData;
|
|
92
|
+
if (!channelData) {
|
|
93
|
+
return;
|
|
94
|
+
}
|
|
95
|
+
bitsPerSample = 32;
|
|
96
|
+
for (AVAudioFrameCount frame = 0; frame < frameLength; frame++) {
|
|
97
|
+
for (UInt32 channel = 0; channel < channelCount; channel++) {
|
|
98
|
+
int32_t value = channelData[channel][frame];
|
|
99
|
+
[samples addObject:@(value)];
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
} else {
|
|
103
|
+
return;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
NSTimeInterval timestampMs = [[NSDate date] timeIntervalSince1970] * 1000.0;
|
|
107
|
+
|
|
108
|
+
[module sendEventWithName:kEventAudioSamples
|
|
109
|
+
body:@{
|
|
110
|
+
@"trackId" : _trackId,
|
|
111
|
+
@"samples" : samples,
|
|
112
|
+
@"sampleRate" : @(pcmBuffer.format.sampleRate),
|
|
113
|
+
@"channels" : @(channelCount),
|
|
114
|
+
@"bitsPerSample" : @(bitsPerSample),
|
|
115
|
+
@"framesPerBuffer" : @(frameLength),
|
|
116
|
+
@"timestamp" : @(timestampMs)
|
|
117
|
+
}];
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
@end
|
|
121
|
+
|
|
122
|
+
@implementation WebRTCModule (RTCMediaStream)
|
|
123
|
+
|
|
124
|
+
- (NSString *)audioSinkKeyForPcId:(NSNumber *)pcId trackId:(NSString *)trackId {
|
|
125
|
+
return [NSString stringWithFormat:@"%@:%@", pcId, trackId];
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
- (VideoEffectProcessor *)videoEffectProcessor {
|
|
129
|
+
return objc_getAssociatedObject(self, _cmd);
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
- (void)setVideoEffectProcessor:(VideoEffectProcessor *)videoEffectProcessor {
|
|
133
|
+
objc_setAssociatedObject(
|
|
134
|
+
self, @selector(videoEffectProcessor), videoEffectProcessor, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
#pragma mark - getUserMedia
|
|
138
|
+
|
|
139
|
+
- (NSString *)convertBoolToString:(id)value {
|
|
140
|
+
return value ? @"true" : @"false";
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
/**
|
|
144
|
+
* Initializes a new {@link RTCAudioTrack} which satisfies the given constraints.
|
|
145
|
+
*
|
|
146
|
+
* @param constraints The {@code MediaStreamConstraints} which the new
|
|
147
|
+
* {@code RTCAudioTrack} instance is to satisfy.
|
|
148
|
+
*/
|
|
149
|
+
- (RTCAudioTrack *)createAudioTrack:(NSDictionary *)constraints {
|
|
150
|
+
NSString *trackId = [[NSUUID UUID] UUIDString];
|
|
151
|
+
NSDictionary *audioConstraints = constraints[@"audio"];
|
|
152
|
+
NSMutableDictionary *optionalConstraints = [NSMutableDictionary dictionary];
|
|
153
|
+
optionalConstraints[@"googAutoGainControl"] = audioConstraints[@"autoGainControl"] != nil
|
|
154
|
+
? [self convertBoolToString:audioConstraints[@"autoGainControl"]]
|
|
155
|
+
: @"true";
|
|
156
|
+
optionalConstraints[@"googNoiseSuppression"] =
|
|
157
|
+
audioConstraints[@"noiseSuppression"] != nil ? [self convertBoolToString:audioConstraints[@"noiseSuppression"]]
|
|
158
|
+
: @"true";
|
|
159
|
+
optionalConstraints[@"googEchoCancellation"] =
|
|
160
|
+
audioConstraints[@"echoCancellation"] != nil ? [self convertBoolToString:audioConstraints[@"echoCancellation"]]
|
|
161
|
+
: @"true";
|
|
162
|
+
optionalConstraints[@"googHighpassFilter"] = audioConstraints[@"highpassFilter"] != nil
|
|
163
|
+
? [self convertBoolToString:audioConstraints[@"highpassFilter"]]
|
|
164
|
+
: @"true";
|
|
165
|
+
|
|
166
|
+
RTCMediaConstraints *mediaConstraints =
|
|
167
|
+
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil optionalConstraints:optionalConstraints];
|
|
168
|
+
|
|
169
|
+
RTCAudioSource *audioSource = [self.peerConnectionFactory audioSourceWithConstraints:mediaConstraints];
|
|
170
|
+
RTCAudioTrack *audioTrack = [self.peerConnectionFactory audioTrackWithSource:audioSource trackId:trackId];
|
|
171
|
+
return audioTrack;
|
|
172
|
+
}
|
|
173
|
+
/**
|
|
174
|
+
* Initializes a new {@link RTCVideoTrack} with the given capture controller
|
|
175
|
+
*/
|
|
176
|
+
- (RTCVideoTrack *)createVideoTrackWithCaptureController:
|
|
177
|
+
(CaptureController * (^)(RTCVideoSource *))captureControllerCreator {
|
|
178
|
+
#if TARGET_OS_TV
|
|
179
|
+
return nil;
|
|
180
|
+
#else
|
|
181
|
+
|
|
182
|
+
RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource];
|
|
183
|
+
|
|
184
|
+
NSString *trackUUID = [[NSUUID UUID] UUIDString];
|
|
185
|
+
RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID];
|
|
186
|
+
|
|
187
|
+
CaptureController *captureController = captureControllerCreator(videoSource);
|
|
188
|
+
videoTrack.captureController = captureController;
|
|
189
|
+
[captureController startCapture];
|
|
190
|
+
|
|
191
|
+
return videoTrack;
|
|
192
|
+
#endif
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* Initializes a new {@link RTCMediaTrack} with the given tracks.
|
|
196
|
+
*
|
|
197
|
+
* @return An array with the mediaStreamId in index 0, and track infos in index 1.
|
|
198
|
+
*/
|
|
199
|
+
- (NSArray *)createMediaStream:(NSArray<RTCMediaStreamTrack *> *)tracks {
|
|
200
|
+
#if TARGET_OS_TV
|
|
201
|
+
return nil;
|
|
202
|
+
#else
|
|
203
|
+
NSString *mediaStreamId = [[NSUUID UUID] UUIDString];
|
|
204
|
+
RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId];
|
|
205
|
+
NSMutableArray<NSDictionary *> *trackInfos = [NSMutableArray array];
|
|
206
|
+
|
|
207
|
+
for (RTCMediaStreamTrack *track in tracks) {
|
|
208
|
+
if ([track.kind isEqualToString:@"audio"]) {
|
|
209
|
+
[mediaStream addAudioTrack:(RTCAudioTrack *)track];
|
|
210
|
+
} else if ([track.kind isEqualToString:@"video"]) {
|
|
211
|
+
[mediaStream addVideoTrack:(RTCVideoTrack *)track];
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
NSString *trackId = track.trackId;
|
|
215
|
+
|
|
216
|
+
self.localTracks[trackId] = track;
|
|
217
|
+
|
|
218
|
+
NSDictionary *settings = @{};
|
|
219
|
+
if ([track.kind isEqualToString:@"video"]) {
|
|
220
|
+
RTCVideoTrack *videoTrack = (RTCVideoTrack *)track;
|
|
221
|
+
if ([videoTrack.captureController isKindOfClass:[CaptureController class]]) {
|
|
222
|
+
settings = [videoTrack.captureController getSettings];
|
|
223
|
+
}
|
|
224
|
+
} else if ([track.kind isEqualToString:@"audio"]) {
|
|
225
|
+
settings = @{
|
|
226
|
+
@"deviceId" : @"audio",
|
|
227
|
+
@"groupId" : @"",
|
|
228
|
+
};
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
[trackInfos addObject:@{
|
|
232
|
+
@"enabled" : @(track.isEnabled),
|
|
233
|
+
@"id" : trackId,
|
|
234
|
+
@"kind" : track.kind,
|
|
235
|
+
@"readyState" : @"live",
|
|
236
|
+
@"remote" : @(NO),
|
|
237
|
+
@"settings" : settings
|
|
238
|
+
}];
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
self.localStreams[mediaStreamId] = mediaStream;
|
|
242
|
+
return @[ mediaStreamId, trackInfos ];
|
|
243
|
+
#endif
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
/**
|
|
247
|
+
* Initializes a new {@link RTCVideoTrack} which satisfies the given constraints.
|
|
248
|
+
*/
|
|
249
|
+
- (RTCVideoTrack *)createVideoTrack:(NSDictionary *)constraints {
|
|
250
|
+
#if TARGET_OS_TV
|
|
251
|
+
return nil;
|
|
252
|
+
#else
|
|
253
|
+
RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource];
|
|
254
|
+
|
|
255
|
+
NSString *trackUUID = [[NSUUID UUID] UUIDString];
|
|
256
|
+
RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID];
|
|
257
|
+
|
|
258
|
+
#if !TARGET_IPHONE_SIMULATOR
|
|
259
|
+
RTCCameraVideoCapturer *videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoSource];
|
|
260
|
+
VideoCaptureController *videoCaptureController =
|
|
261
|
+
[[VideoCaptureController alloc] initWithCapturer:videoCapturer andConstraints:constraints[@"video"]];
|
|
262
|
+
videoCaptureController.enableMultitaskingCameraAccess =
|
|
263
|
+
[WebRTCModuleOptions sharedInstance].enableMultitaskingCameraAccess;
|
|
264
|
+
videoTrack.captureController = videoCaptureController;
|
|
265
|
+
[videoCaptureController startCapture];
|
|
266
|
+
#endif
|
|
267
|
+
|
|
268
|
+
return videoTrack;
|
|
269
|
+
#endif
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
- (RTCVideoTrack *)createScreenCaptureVideoTrack {
|
|
273
|
+
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_OSX || TARGET_OS_TV
|
|
274
|
+
return nil;
|
|
275
|
+
#endif
|
|
276
|
+
|
|
277
|
+
RTCVideoSource *videoSource = [self.peerConnectionFactory videoSourceForScreenCast:YES];
|
|
278
|
+
|
|
279
|
+
NSString *trackUUID = [[NSUUID UUID] UUIDString];
|
|
280
|
+
RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID];
|
|
281
|
+
|
|
282
|
+
ScreenCapturer *screenCapturer = [[ScreenCapturer alloc] initWithDelegate:videoSource];
|
|
283
|
+
ScreenCaptureController *screenCaptureController =
|
|
284
|
+
[[ScreenCaptureController alloc] initWithCapturer:screenCapturer];
|
|
285
|
+
|
|
286
|
+
TrackCapturerEventsEmitter *emitter = [[TrackCapturerEventsEmitter alloc] initWith:trackUUID webRTCModule:self];
|
|
287
|
+
screenCaptureController.eventsDelegate = emitter;
|
|
288
|
+
videoTrack.captureController = screenCaptureController;
|
|
289
|
+
[screenCaptureController startCapture];
|
|
290
|
+
|
|
291
|
+
return videoTrack;
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
RCT_EXPORT_METHOD(createVirtualAudioTrack : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock)reject) {
|
|
295
|
+
#if TARGET_OS_TV
|
|
296
|
+
reject(@"unsupported_platform", @"tvOS is not supported", nil);
|
|
297
|
+
return;
|
|
298
|
+
#else
|
|
299
|
+
RTCPeerConnectionFactory *factory = [self virtualFactory];
|
|
300
|
+
if (!factory) {
|
|
301
|
+
reject(@"virtual_factory_error", @"Factory unavailable", nil);
|
|
302
|
+
return;
|
|
303
|
+
}
|
|
304
|
+
NSString *trackId = [[NSUUID UUID] UUIDString];
|
|
305
|
+
RTCAudioSource *source = [factory audioSourceWithConstraints:nil];
|
|
306
|
+
RTCAudioTrack *track = [factory audioTrackWithSource:source trackId:trackId];
|
|
307
|
+
NSArray *components = [self createMediaStream:@[ track ]];
|
|
308
|
+
resolve(@{ @"streamId" : components[0], @"tracks" : components[1] });
|
|
309
|
+
#endif
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
RCT_EXPORT_METHOD(pushVirtualAudioSamples
|
|
313
|
+
: (nonnull NSArray<NSNumber *> *)samples
|
|
314
|
+
sampleRate
|
|
315
|
+
: (nonnull NSNumber *)sampleRate
|
|
316
|
+
channels
|
|
317
|
+
: (nonnull NSNumber *)channels) {
|
|
318
|
+
#if TARGET_OS_TV
|
|
319
|
+
return;
|
|
320
|
+
#else
|
|
321
|
+
if (samples.count == 0) {
|
|
322
|
+
return;
|
|
323
|
+
}
|
|
324
|
+
if (!self.virtualAudioDevice) {
|
|
325
|
+
[self virtualFactory];
|
|
326
|
+
}
|
|
327
|
+
if (!self.virtualAudioDevice) {
|
|
328
|
+
return;
|
|
329
|
+
}
|
|
330
|
+
NSUInteger length = samples.count * sizeof(int16_t);
|
|
331
|
+
NSMutableData *data = [NSMutableData dataWithLength:length];
|
|
332
|
+
int16_t *buffer = data.mutableBytes;
|
|
333
|
+
NSUInteger idx = 0;
|
|
334
|
+
for (NSNumber *value in samples) {
|
|
335
|
+
buffer[idx++] = (int16_t)value.shortValue;
|
|
336
|
+
}
|
|
337
|
+
[self.virtualAudioDevice pushAudioData:data sampleRate:sampleRate.doubleValue channels:channels.integerValue];
|
|
338
|
+
#endif
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
RCT_EXPORT_METHOD(getDisplayMedia : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock)reject) {
|
|
342
|
+
#if TARGET_OS_TV
|
|
343
|
+
reject(@"unsupported_platform", @"tvOS is not supported", nil);
|
|
344
|
+
return;
|
|
345
|
+
#else
|
|
346
|
+
|
|
347
|
+
RTCVideoTrack *videoTrack = [self createScreenCaptureVideoTrack];
|
|
348
|
+
|
|
349
|
+
if (videoTrack == nil) {
|
|
350
|
+
reject(@"DOMException", @"AbortError", nil);
|
|
351
|
+
return;
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
NSString *mediaStreamId = [[NSUUID UUID] UUIDString];
|
|
355
|
+
RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId];
|
|
356
|
+
[mediaStream addVideoTrack:videoTrack];
|
|
357
|
+
|
|
358
|
+
NSString *trackId = videoTrack.trackId;
|
|
359
|
+
self.localTracks[trackId] = videoTrack;
|
|
360
|
+
|
|
361
|
+
NSDictionary *trackInfo = @{
|
|
362
|
+
@"enabled" : @(videoTrack.isEnabled),
|
|
363
|
+
@"id" : videoTrack.trackId,
|
|
364
|
+
@"kind" : videoTrack.kind,
|
|
365
|
+
@"readyState" : @"live",
|
|
366
|
+
@"remote" : @(NO)
|
|
367
|
+
};
|
|
368
|
+
|
|
369
|
+
self.localStreams[mediaStreamId] = mediaStream;
|
|
370
|
+
resolve(@{@"streamId" : mediaStreamId, @"track" : trackInfo});
|
|
371
|
+
#endif
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
/**
|
|
375
|
+
* Implements {@code getUserMedia}. Note that at this point constraints have
|
|
376
|
+
* been normalized and permissions have been granted. The constraints only
|
|
377
|
+
* contain keys for which permissions have already been granted, that is,
|
|
378
|
+
* if audio permission was not granted, there will be no "audio" key in
|
|
379
|
+
* the constraints dictionary.
|
|
380
|
+
*/
|
|
381
|
+
RCT_EXPORT_METHOD(getUserMedia
|
|
382
|
+
: (NSDictionary *)constraints successCallback
|
|
383
|
+
: (RCTResponseSenderBlock)successCallback errorCallback
|
|
384
|
+
: (RCTResponseSenderBlock)errorCallback) {
|
|
385
|
+
#if TARGET_OS_TV
|
|
386
|
+
errorCallback(@[ @"PlatformNotSupported", @"getUserMedia is not supported on tvOS." ]);
|
|
387
|
+
return;
|
|
388
|
+
#else
|
|
389
|
+
RTCAudioTrack *audioTrack = nil;
|
|
390
|
+
RTCVideoTrack *videoTrack = nil;
|
|
391
|
+
|
|
392
|
+
if (constraints[@"audio"]) {
|
|
393
|
+
audioTrack = [self createAudioTrack:constraints];
|
|
394
|
+
}
|
|
395
|
+
if (constraints[@"video"]) {
|
|
396
|
+
videoTrack = [self createVideoTrack:constraints];
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
if (audioTrack == nil && videoTrack == nil) {
|
|
400
|
+
// Fail with DOMException with name AbortError as per:
|
|
401
|
+
// https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia
|
|
402
|
+
errorCallback(@[ @"DOMException", @"AbortError" ]);
|
|
403
|
+
return;
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
NSString *mediaStreamId = [[NSUUID UUID] UUIDString];
|
|
407
|
+
RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId];
|
|
408
|
+
NSMutableArray *tracks = [NSMutableArray array];
|
|
409
|
+
NSMutableArray *tmp = [NSMutableArray array];
|
|
410
|
+
if (audioTrack)
|
|
411
|
+
[tmp addObject:audioTrack];
|
|
412
|
+
if (videoTrack)
|
|
413
|
+
[tmp addObject:videoTrack];
|
|
414
|
+
|
|
415
|
+
for (RTCMediaStreamTrack *track in tmp) {
|
|
416
|
+
if ([track.kind isEqualToString:@"audio"]) {
|
|
417
|
+
[mediaStream addAudioTrack:(RTCAudioTrack *)track];
|
|
418
|
+
} else if ([track.kind isEqualToString:@"video"]) {
|
|
419
|
+
[mediaStream addVideoTrack:(RTCVideoTrack *)track];
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
NSString *trackId = track.trackId;
|
|
423
|
+
|
|
424
|
+
self.localTracks[trackId] = track;
|
|
425
|
+
|
|
426
|
+
NSDictionary *settings = @{};
|
|
427
|
+
if ([track.kind isEqualToString:@"video"]) {
|
|
428
|
+
RTCVideoTrack *videoTrack = (RTCVideoTrack *)track;
|
|
429
|
+
if ([videoTrack.captureController isKindOfClass:[CaptureController class]]) {
|
|
430
|
+
settings = [videoTrack.captureController getSettings];
|
|
431
|
+
}
|
|
432
|
+
} else if ([track.kind isEqualToString:@"audio"]) {
|
|
433
|
+
settings = @{
|
|
434
|
+
@"deviceId" : @"audio",
|
|
435
|
+
@"groupId" : @"",
|
|
436
|
+
};
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
[tracks addObject:@{
|
|
440
|
+
@"enabled" : @(track.isEnabled),
|
|
441
|
+
@"id" : trackId,
|
|
442
|
+
@"kind" : track.kind,
|
|
443
|
+
@"readyState" : @"live",
|
|
444
|
+
@"remote" : @(NO),
|
|
445
|
+
@"settings" : settings
|
|
446
|
+
}];
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
self.localStreams[mediaStreamId] = mediaStream;
|
|
450
|
+
successCallback(@[ mediaStreamId, tracks ]);
|
|
451
|
+
#endif
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
#pragma mark - Other stream related APIs
|
|
455
|
+
|
|
456
|
+
RCT_EXPORT_METHOD(enumerateDevices : (RCTResponseSenderBlock)callback) {
|
|
457
|
+
#if TARGET_OS_TV
|
|
458
|
+
callback(@[]);
|
|
459
|
+
#else
|
|
460
|
+
NSMutableArray *devices = [NSMutableArray array];
|
|
461
|
+
NSMutableArray *deviceTypes = [NSMutableArray array];
|
|
462
|
+
[deviceTypes addObjectsFromArray:@[
|
|
463
|
+
AVCaptureDeviceTypeBuiltInWideAngleCamera,
|
|
464
|
+
AVCaptureDeviceTypeBuiltInUltraWideCamera,
|
|
465
|
+
AVCaptureDeviceTypeBuiltInTelephotoCamera,
|
|
466
|
+
AVCaptureDeviceTypeBuiltInDualCamera,
|
|
467
|
+
AVCaptureDeviceTypeBuiltInDualWideCamera,
|
|
468
|
+
AVCaptureDeviceTypeBuiltInTripleCamera
|
|
469
|
+
]];
|
|
470
|
+
if (@available(macos 14.0, ios 17.0, tvos 17.0, *)) {
|
|
471
|
+
[deviceTypes addObject:AVCaptureDeviceTypeExternal];
|
|
472
|
+
}
|
|
473
|
+
AVCaptureDeviceDiscoverySession *videoDevicesSession =
|
|
474
|
+
[AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes
|
|
475
|
+
mediaType:AVMediaTypeVideo
|
|
476
|
+
position:AVCaptureDevicePositionUnspecified];
|
|
477
|
+
for (AVCaptureDevice *device in videoDevicesSession.devices) {
|
|
478
|
+
NSString *position = @"unknown";
|
|
479
|
+
if (device.position == AVCaptureDevicePositionBack) {
|
|
480
|
+
position = @"environment";
|
|
481
|
+
} else if (device.position == AVCaptureDevicePositionFront) {
|
|
482
|
+
position = @"front";
|
|
483
|
+
}
|
|
484
|
+
NSString *label = @"Unknown video device";
|
|
485
|
+
if (device.localizedName != nil) {
|
|
486
|
+
label = device.localizedName;
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
[devices addObject:@{
|
|
490
|
+
@"facing" : position,
|
|
491
|
+
@"deviceId" : device.uniqueID,
|
|
492
|
+
@"groupId" : @"",
|
|
493
|
+
@"label" : label,
|
|
494
|
+
@"kind" : @"videoinput",
|
|
495
|
+
}];
|
|
496
|
+
}
|
|
497
|
+
AVCaptureDeviceDiscoverySession *audioDevicesSession =
|
|
498
|
+
[AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInMicrophone ]
|
|
499
|
+
mediaType:AVMediaTypeAudio
|
|
500
|
+
position:AVCaptureDevicePositionUnspecified];
|
|
501
|
+
for (AVCaptureDevice *device in audioDevicesSession.devices) {
|
|
502
|
+
NSString *label = @"Unknown audio device";
|
|
503
|
+
if (device.localizedName != nil) {
|
|
504
|
+
label = device.localizedName;
|
|
505
|
+
}
|
|
506
|
+
[devices addObject:@{
|
|
507
|
+
@"deviceId" : device.uniqueID,
|
|
508
|
+
@"groupId" : @"",
|
|
509
|
+
@"label" : label,
|
|
510
|
+
@"kind" : @"audioinput",
|
|
511
|
+
}];
|
|
512
|
+
}
|
|
513
|
+
callback(@[ devices ]);
|
|
514
|
+
#endif
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
RCT_EXPORT_METHOD(mediaStreamCreate : (nonnull NSString *)streamID) {
|
|
518
|
+
RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:streamID];
|
|
519
|
+
self.localStreams[streamID] = mediaStream;
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
RCT_EXPORT_METHOD(mediaStreamAddTrack
|
|
523
|
+
: (nonnull NSString *)streamID
|
|
524
|
+
: (nonnull NSNumber *)pcId
|
|
525
|
+
: (nonnull NSString *)trackID) {
|
|
526
|
+
RTCMediaStream *mediaStream = self.localStreams[streamID];
|
|
527
|
+
if (mediaStream == nil) {
|
|
528
|
+
return;
|
|
529
|
+
}
|
|
530
|
+
|
|
531
|
+
RTCMediaStreamTrack *track = [self trackForId:trackID pcId:pcId];
|
|
532
|
+
if (track == nil) {
|
|
533
|
+
return;
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
if ([track.kind isEqualToString:@"audio"]) {
|
|
537
|
+
[mediaStream addAudioTrack:(RTCAudioTrack *)track];
|
|
538
|
+
} else if ([track.kind isEqualToString:@"video"]) {
|
|
539
|
+
[mediaStream addVideoTrack:(RTCVideoTrack *)track];
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
RCT_EXPORT_METHOD(mediaStreamRemoveTrack
|
|
544
|
+
: (nonnull NSString *)streamID
|
|
545
|
+
: (nonnull NSNumber *)pcId
|
|
546
|
+
: (nonnull NSString *)trackID) {
|
|
547
|
+
RTCMediaStream *mediaStream = self.localStreams[streamID];
|
|
548
|
+
if (mediaStream == nil) {
|
|
549
|
+
return;
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
RTCMediaStreamTrack *track = [self trackForId:trackID pcId:pcId];
|
|
553
|
+
if (track == nil) {
|
|
554
|
+
return;
|
|
555
|
+
}
|
|
556
|
+
|
|
557
|
+
if ([track.kind isEqualToString:@"audio"]) {
|
|
558
|
+
[mediaStream removeAudioTrack:(RTCAudioTrack *)track];
|
|
559
|
+
} else if ([track.kind isEqualToString:@"video"]) {
|
|
560
|
+
[mediaStream removeVideoTrack:(RTCVideoTrack *)track];
|
|
561
|
+
}
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
RCT_EXPORT_METHOD(mediaStreamRelease : (nonnull NSString *)streamID) {
|
|
565
|
+
RTCMediaStream *stream = self.localStreams[streamID];
|
|
566
|
+
if (stream) {
|
|
567
|
+
[self.localStreams removeObjectForKey:streamID];
|
|
568
|
+
}
|
|
569
|
+
}
|
|
570
|
+
|
|
571
|
+
RCT_EXPORT_METHOD(mediaStreamTrackRelease : (nonnull NSString *)trackID) {
|
|
572
|
+
#if TARGET_OS_TV
|
|
573
|
+
return;
|
|
574
|
+
#else
|
|
575
|
+
|
|
576
|
+
RTCMediaStreamTrack *track = self.localTracks[trackID];
|
|
577
|
+
if (track) {
|
|
578
|
+
track.isEnabled = NO;
|
|
579
|
+
[track.captureController stopCapture];
|
|
580
|
+
[self.localTracks removeObjectForKey:trackID];
|
|
581
|
+
}
|
|
582
|
+
#endif
|
|
583
|
+
}
|
|
584
|
+
|
|
585
|
+
RCT_EXPORT_METHOD(mediaStreamTrackSetEnabled : (nonnull NSNumber *)pcId : (nonnull NSString *)trackID : (BOOL)enabled) {
|
|
586
|
+
RTCMediaStreamTrack *track = [self trackForId:trackID pcId:pcId];
|
|
587
|
+
if (track == nil) {
|
|
588
|
+
return;
|
|
589
|
+
}
|
|
590
|
+
|
|
591
|
+
track.isEnabled = enabled;
|
|
592
|
+
#if !TARGET_OS_TV
|
|
593
|
+
if (track.captureController) { // It could be a remote track!
|
|
594
|
+
if (enabled) {
|
|
595
|
+
[track.captureController startCapture];
|
|
596
|
+
} else {
|
|
597
|
+
[track.captureController stopCapture];
|
|
598
|
+
}
|
|
599
|
+
}
|
|
600
|
+
#endif
|
|
601
|
+
}
|
|
602
|
+
|
|
603
|
+
RCT_EXPORT_METHOD(mediaStreamTrackApplyConstraints
|
|
604
|
+
: (nonnull NSString *)trackID
|
|
605
|
+
: (NSDictionary *)constraints
|
|
606
|
+
: (RCTPromiseResolveBlock)resolve
|
|
607
|
+
: (RCTPromiseRejectBlock)reject) {
|
|
608
|
+
#if TARGET_OS_TV
|
|
609
|
+
reject(@"unsupported_platform", @"tvOS is not supported", nil);
|
|
610
|
+
return;
|
|
611
|
+
#else
|
|
612
|
+
RTCMediaStreamTrack *track = self.localTracks[trackID];
|
|
613
|
+
if (track) {
|
|
614
|
+
if ([track.kind isEqualToString:@"video"]) {
|
|
615
|
+
RTCVideoTrack *videoTrack = (RTCVideoTrack *)track;
|
|
616
|
+
if ([videoTrack.captureController isKindOfClass:[CaptureController class]]) {
|
|
617
|
+
CaptureController *vcc = (CaptureController *)videoTrack.captureController;
|
|
618
|
+
NSError *error = nil;
|
|
619
|
+
[vcc applyConstraints:constraints error:&error];
|
|
620
|
+
if (error) {
|
|
621
|
+
reject(@"E_INVALID", error.localizedDescription, error);
|
|
622
|
+
} else {
|
|
623
|
+
resolve([vcc getSettings]);
|
|
624
|
+
}
|
|
625
|
+
}
|
|
626
|
+
} else {
|
|
627
|
+
RCTLogWarn(@"mediaStreamTrackApplyConstraints() track is not video");
|
|
628
|
+
reject(@"E_INVALID", @"Can't apply constraints on audio tracks", nil);
|
|
629
|
+
}
|
|
630
|
+
} else {
|
|
631
|
+
RCTLogWarn(@"mediaStreamTrackApplyConstraints() track is null");
|
|
632
|
+
reject(@"E_INVALID", @"Could not get track", nil);
|
|
633
|
+
}
|
|
634
|
+
#endif
|
|
635
|
+
}
|
|
636
|
+
|
|
637
|
+
RCT_EXPORT_METHOD(mediaStreamTrackSetVolume : (nonnull NSNumber *)pcId : (nonnull NSString *)trackID : (double)volume) {
|
|
638
|
+
RTCMediaStreamTrack *track = [self trackForId:trackID pcId:pcId];
|
|
639
|
+
if (track && [track.kind isEqualToString:@"audio"]) {
|
|
640
|
+
RTCAudioTrack *audioTrack = (RTCAudioTrack *)track;
|
|
641
|
+
audioTrack.source.volume = volume;
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
|
|
645
|
+
RCT_EXPORT_METHOD(mediaStreamTrackSetPlaybackEnabled : (nonnull NSNumber *)pcId : (nonnull NSString *)trackID : (BOOL)enabled) {
|
|
646
|
+
RTCMediaStreamTrack *track = [self trackForId:trackID pcId:pcId];
|
|
647
|
+
if (track && [track.kind isEqualToString:@"audio"]) {
|
|
648
|
+
RTCAudioTrack *audioTrack = (RTCAudioTrack *)track;
|
|
649
|
+
audioTrack.source.volume = enabled ? 1.0 : 0.0;
|
|
650
|
+
}
|
|
651
|
+
}
|
|
652
|
+
|
|
653
|
+
RCT_EXPORT_METHOD(mediaStreamTrackEnableAudioSink : (nonnull NSNumber *)pcId : (nonnull NSString *)trackID : (BOOL)enabled) {
|
|
654
|
+
NSString *key = [self audioSinkKeyForPcId:pcId trackId:trackID];
|
|
655
|
+
|
|
656
|
+
if (enabled) {
|
|
657
|
+
if (self.audioRenderers[key]) {
|
|
658
|
+
return;
|
|
659
|
+
}
|
|
660
|
+
|
|
661
|
+
RTCMediaStreamTrack *track = [self trackForId:trackID pcId:pcId];
|
|
662
|
+
if (track == nil || ![track.kind isEqualToString:@"audio"]) {
|
|
663
|
+
return;
|
|
664
|
+
}
|
|
665
|
+
|
|
666
|
+
RTCAudioTrack *audioTrack = (RTCAudioTrack *)track;
|
|
667
|
+
AudioSamplesRenderer *renderer = [[AudioSamplesRenderer alloc] initWithModule:self trackId:trackID];
|
|
668
|
+
self.audioRenderers[key] = renderer;
|
|
669
|
+
[audioTrack addRenderer:renderer];
|
|
670
|
+
} else {
|
|
671
|
+
id<RTCAudioRenderer> renderer = self.audioRenderers[key];
|
|
672
|
+
if (!renderer) {
|
|
673
|
+
return;
|
|
674
|
+
}
|
|
675
|
+
|
|
676
|
+
RTCMediaStreamTrack *track = [self trackForId:trackID pcId:pcId];
|
|
677
|
+
if ([track isKindOfClass:[RTCAudioTrack class]]) {
|
|
678
|
+
RTCAudioTrack *audioTrack = (RTCAudioTrack *)track;
|
|
679
|
+
[audioTrack removeRenderer:renderer];
|
|
680
|
+
}
|
|
681
|
+
|
|
682
|
+
[self.audioRenderers removeObjectForKey:key];
|
|
683
|
+
}
|
|
684
|
+
}
|
|
685
|
+
|
|
686
|
+
RCT_EXPORT_METHOD(mediaStreamTrackSetVideoEffects
|
|
687
|
+
: (nonnull NSString *)trackID names
|
|
688
|
+
: (nonnull NSArray<NSString *> *)names) {
|
|
689
|
+
RTCMediaStreamTrack *track = self.localTracks[trackID];
|
|
690
|
+
if (track == nil) {
|
|
691
|
+
return;
|
|
692
|
+
}
|
|
693
|
+
|
|
694
|
+
RTCVideoTrack *videoTrack = (RTCVideoTrack *)track;
|
|
695
|
+
RTCVideoSource *videoSource = videoTrack.source;
|
|
696
|
+
|
|
697
|
+
NSMutableArray *processors = [[NSMutableArray alloc] init];
|
|
698
|
+
for (NSString *name in names) {
|
|
699
|
+
NSObject<VideoFrameProcessorDelegate> *processor = [ProcessorProvider getProcessor:name];
|
|
700
|
+
if (processor != nil) {
|
|
701
|
+
[processors addObject:processor];
|
|
702
|
+
}
|
|
703
|
+
}
|
|
704
|
+
|
|
705
|
+
self.videoEffectProcessor = [[VideoEffectProcessor alloc] initWithProcessors:processors videoSource:videoSource];
|
|
706
|
+
|
|
707
|
+
VideoCaptureController *vcc = (VideoCaptureController *)videoTrack.captureController;
|
|
708
|
+
RTCVideoCapturer *capturer = vcc.capturer;
|
|
709
|
+
|
|
710
|
+
capturer.delegate = self.videoEffectProcessor;
|
|
711
|
+
}
|
|
712
|
+
|
|
713
|
+
#pragma mark - Helpers
|
|
714
|
+
|
|
715
|
+
- (RTCMediaStreamTrack *)trackForId:(nonnull NSString *)trackId pcId:(nonnull NSNumber *)pcId {
|
|
716
|
+
if ([pcId isEqualToNumber:[NSNumber numberWithInt:-1]]) {
|
|
717
|
+
return self.localTracks[trackId];
|
|
718
|
+
}
|
|
719
|
+
|
|
720
|
+
RTCPeerConnection *peerConnection = self.peerConnections[pcId];
|
|
721
|
+
if (peerConnection == nil) {
|
|
722
|
+
return nil;
|
|
723
|
+
}
|
|
724
|
+
|
|
725
|
+
return peerConnection.remoteTracks[trackId];
|
|
726
|
+
}
|
|
727
|
+
|
|
728
|
+
@end
|