@rick427/react-native-liveness 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/LICENSE +20 -0
  2. package/LivenessCamera.podspec +26 -0
  3. package/README.md +167 -0
  4. package/android/build.gradle +80 -0
  5. package/android/src/main/AndroidManifest.xml +2 -0
  6. package/android/src/main/java/com/livenesscamera/LivenessCameraPackage.kt +28 -0
  7. package/android/src/main/java/com/livenesscamera/LivenessCameraPlugin.kt +63 -0
  8. package/ios/LivenessCameraPlugin-Bridging-Header.h +3 -0
  9. package/ios/LivenessCameraPlugin.m +8 -0
  10. package/ios/LivenessCameraPlugin.swift +69 -0
  11. package/lib/module/LivenessCamera.js +283 -0
  12. package/lib/module/LivenessCamera.js.map +1 -0
  13. package/lib/module/LivenessDetector.js +23 -0
  14. package/lib/module/LivenessDetector.js.map +1 -0
  15. package/lib/module/index.js +5 -0
  16. package/lib/module/index.js.map +1 -0
  17. package/lib/module/livenessScoring.js +58 -0
  18. package/lib/module/livenessScoring.js.map +1 -0
  19. package/lib/module/package.json +1 -0
  20. package/lib/module/types.js +4 -0
  21. package/lib/module/types.js.map +1 -0
  22. package/lib/module/useLivenessCamera.js +167 -0
  23. package/lib/module/useLivenessCamera.js.map +1 -0
  24. package/lib/typescript/package.json +1 -0
  25. package/lib/typescript/src/LivenessCamera.d.ts +3 -0
  26. package/lib/typescript/src/LivenessCamera.d.ts.map +1 -0
  27. package/lib/typescript/src/LivenessDetector.d.ts +8 -0
  28. package/lib/typescript/src/LivenessDetector.d.ts.map +1 -0
  29. package/lib/typescript/src/index.d.ts +4 -0
  30. package/lib/typescript/src/index.d.ts.map +1 -0
  31. package/lib/typescript/src/livenessScoring.d.ts +11 -0
  32. package/lib/typescript/src/livenessScoring.d.ts.map +1 -0
  33. package/lib/typescript/src/types.d.ts +61 -0
  34. package/lib/typescript/src/types.d.ts.map +1 -0
  35. package/lib/typescript/src/useLivenessCamera.d.ts +21 -0
  36. package/lib/typescript/src/useLivenessCamera.d.ts.map +1 -0
  37. package/package.json +120 -0
  38. package/src/LivenessCamera.tsx +284 -0
  39. package/src/LivenessDetector.ts +34 -0
  40. package/src/index.ts +9 -0
  41. package/src/livenessScoring.ts +81 -0
  42. package/src/types.ts +88 -0
  43. package/src/useLivenessCamera.ts +206 -0
package/LICENSE ADDED
@@ -0,0 +1,20 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Richard
4
+ Permission is hereby granted, free of charge, to any person obtaining a copy
5
+ of this software and associated documentation files (the "Software"), to deal
6
+ in the Software without restriction, including without limitation the rights
7
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8
+ copies of the Software, and to permit persons to whom the Software is
9
+ furnished to do so, subject to the following conditions:
10
+
11
+ The above copyright notice and this permission notice shall be included in all
12
+ copies or substantial portions of the Software.
13
+
14
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20
+ SOFTWARE.
@@ -0,0 +1,26 @@
1
+ require "json"
2
+
3
+ package = JSON.parse(File.read(File.join(__dir__, "package.json")))
4
+
5
+ Pod::Spec.new do |s|
6
+ s.name = "LivenessCamera"
7
+ s.version = package["version"]
8
+ s.summary = package["description"]
9
+ s.homepage = package["homepage"]
10
+ s.license = package["license"]
11
+ s.authors = package["author"]
12
+
13
+ s.platforms = { :ios => "13.0" }
14
+ s.source = { :git => "https://github.com/rick427/react-native-liveness.git", :tag => "#{s.version}" }
15
+
16
+ s.source_files = "ios/**/*.{h,m,mm,swift}"
17
+ s.swift_version = "5.7"
18
+
19
+ # Vision Camera frame processor plugin
20
+ s.dependency "VisionCamera"
21
+
22
+ # ML Kit Face Detection
23
+ s.dependency "GoogleMLKit/FaceDetection", "~> 7.0"
24
+
25
+ install_modules_dependencies(s)
26
+ end
package/README.md ADDED
@@ -0,0 +1,167 @@
1
+ # @rick427/react-native-liveness
2
+
3
+ A React Native library for real-time **liveness detection** using the device's front camera. Powered by [Vision Camera v4](https://github.com/mrousavy/react-native-vision-camera) and **ML Kit Face Detection** — no server required, fully on-device.
4
+
5
+ The library scores each camera frame against a set of liveness signals (face size, head pose, eye openness), confirms liveness after a sustained high-score window, then automatically counts down **3 → 2 → 1**, plays a shutter sound, and captures the photo.
6
+
7
+ ---
8
+
9
+ ## Features
10
+
11
+ - Real-time passive liveness detection (no gestures required)
12
+ - On-device ML — works fully offline (ML Kit)
13
+ - Face guide oval with live feedback hints
14
+ - Animated countdown (React Native built-in `Animated`)
15
+ - Auto photo capture via Vision Camera's `takePhoto()`
16
+ - Optional shutter sound
17
+ - Fully typed TypeScript API
18
+
19
+ ---
20
+
21
+ ## Prerequisites
22
+
23
+ Install and link these peer dependencies in your project before using `@rick427/react-native-liveness`:
24
+
25
+ | Package | Version |
26
+ |---|---|
27
+ | `react-native-vision-camera` | `>= 4.0.0` |
28
+ | `react-native-worklets-core` | `>= 1.0.0` |
29
+ | `react-native-svg` | `>= 13.0.0` |
30
+
31
+ ```sh
32
+ npm install react-native-vision-camera react-native-worklets-core react-native-svg
33
+ ```
34
+
35
+ ---
36
+
37
+ ## Installation
38
+
39
+ ```sh
40
+ npm install @rick427/react-native-liveness
41
+ # or
42
+ yarn add @rick427/react-native-liveness
43
+ ```
44
+
45
+ Then install the peer dependencies if you haven't already:
46
+
47
+ ```sh
48
+ npm install react-native-vision-camera react-native-worklets-core react-native-svg
49
+ cd ios && pod install
50
+ ```
51
+
52
+ ### iOS
53
+
54
+ Add the ML Kit pod (already declared in the podspec, but run pod install):
55
+
56
+ ```sh
57
+ cd ios && pod install
58
+ ```
59
+
60
+ Add `NSCameraUsageDescription` to your `Info.plist`:
61
+
62
+ ```xml
63
+ <key>NSCameraUsageDescription</key>
64
+ <string>Camera access is required for liveness verification.</string>
65
+ ```
66
+
67
+ ### Android
68
+
69
+ The ML Kit dependency is included in `build.gradle` automatically. No extra steps needed.
70
+
71
+ ---
72
+
73
+ ## Usage
74
+
75
+ ### Drop-in component
76
+
77
+ ```tsx
78
+ import { LivenessCamera } from '@rick427/react-native-liveness';
79
+ import type { CaptureResult } from '@rick427/react-native-liveness';
80
+
81
+ export default function VerificationScreen() {
82
+ const handleCapture = (result: CaptureResult) => {
83
+ console.log('Photo path:', result.photo.path);
84
+ console.log('Liveness score:', result.livenessScore); // 0.0 – 1.0
85
+ };
86
+
87
+ return (
88
+ <LivenessCamera
89
+ style={{ flex: 1 }}
90
+ onCapture={handleCapture}
91
+ onLivenessConfirmed={() => console.log('Live face confirmed!')}
92
+ onError={(err) => console.error(err)}
93
+ />
94
+ );
95
+ }
96
+ ```
97
+
98
+ ### Props
99
+
100
+ | Prop | Type | Default | Description |
101
+ |---|---|---|---|
102
+ | `onCapture` | `(result: CaptureResult) => void` | **required** | Fired after photo is taken. |
103
+ | `onLivenessConfirmed` | `() => void` | — | Fired the moment liveness is confirmed, before the countdown. |
104
+ | `onError` | `(err: Error) => void` | — | Fired on unrecoverable errors. |
105
+ | `countdownFrom` | `number` | `3` | Countdown start value. |
106
+ | `livenessThreshold` | `number` | `0.75` | Score (0–1) required per frame to be considered live. |
107
+ | `confirmFrames` | `number` | `15` | Consecutive high-score frames required (~500 ms at 30 fps). |
108
+ | `soundEnabled` | `boolean` | `true` | Play the native system shutter sound on capture. Respects silent mode. |
109
+ | `style` | `ViewStyle` | — | Style for the root container. |
110
+
111
+ ### CaptureResult
112
+
113
+ ```ts
114
+ type CaptureResult = {
115
+ photo: PhotoFile; // Vision Camera PhotoFile
116
+ livenessScore: number; // rolling average score at time of capture (0–1)
117
+ timestamp: number; // Date.now() at capture
118
+ };
119
+ ```
120
+
121
+ ---
122
+
123
+ ## How liveness scoring works
124
+
125
+ Each camera frame is scored across four signals:
126
+
127
+ | Signal | Weight | Detail |
128
+ |---|---|---|
129
+ | Face detected | 20% | ML Kit found a face in the frame |
130
+ | Face size | 20% | Face width is 20%–65% of the frame (not too far, not too close) |
131
+ | Head pose | 30% | Yaw < ±20° and pitch < ±20° from frontal |
132
+ | Eyes open | 30% | Average of left/right eye open probability from ML Kit |
133
+
134
+ A rolling window of the last 20 frame scores is maintained. Liveness is confirmed once `confirmFrames` consecutive frames all score above `livenessThreshold`.
135
+
136
+ ---
137
+
138
+ ## Architecture
139
+
140
+ ```
141
+ Camera frame (30fps)
142
+ ↓ [worklet thread — Vision Camera frame processor]
143
+ Native plugin (Swift / Kotlin)
144
+ → ML Kit Face Detection
145
+ → { bounds, yawAngle, pitchAngle, leftEyeOpenProbability, … }
146
+ ↓ [runOnJS → JS thread]
147
+ useLivenessCamera hook
148
+ → scoreFrame() per frame
149
+ → rolling 20-frame window
150
+ → 15 consecutive frames > threshold → liveness confirmed
151
+
152
+ Countdown 3 → 2 → 1 (React Native Animated)
153
+
154
+ camera.takePhoto() → onCapture({ photo, livenessScore, timestamp })
155
+ ```
156
+
157
+ ---
158
+
159
+ ## Contributing
160
+
161
+ See [CONTRIBUTING.md](CONTRIBUTING.md) for development workflow and pull request guidelines.
162
+
163
+ ---
164
+
165
+ ## License
166
+
167
+ MIT © [Richard](https://github.com/rick427)
@@ -0,0 +1,80 @@
1
+ buildscript {
2
+ ext.LivenessCamera = [
3
+ kotlinVersion: "2.0.21",
4
+ minSdkVersion: 24,
5
+ compileSdkVersion: 36,
6
+ targetSdkVersion: 36
7
+ ]
8
+
9
+ ext.getExtOrDefault = { prop ->
10
+ if (rootProject.ext.has(prop)) {
11
+ return rootProject.ext.get(prop)
12
+ }
13
+ return LivenessCamera[prop]
14
+ }
15
+
16
+ repositories {
17
+ google()
18
+ mavenCentral()
19
+ }
20
+
21
+ dependencies {
22
+ classpath "com.android.tools.build:gradle:8.7.2"
23
+ classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:${getExtOrDefault('kotlinVersion')}"
24
+ }
25
+ }
26
+
27
+ apply plugin: "com.android.library"
28
+ apply plugin: "kotlin-android"
29
+ apply plugin: "com.facebook.react"
30
+
31
+ android {
32
+ namespace "com.livenesscamera"
33
+ compileSdkVersion getExtOrDefault("compileSdkVersion")
34
+
35
+ defaultConfig {
36
+ minSdkVersion getExtOrDefault("minSdkVersion")
37
+ targetSdkVersion getExtOrDefault("targetSdkVersion")
38
+ }
39
+
40
+ buildFeatures {
41
+ buildConfig true
42
+ }
43
+
44
+ buildTypes {
45
+ release {
46
+ minifyEnabled false
47
+ }
48
+ }
49
+
50
+ lint {
51
+ disable "GradleCompatible"
52
+ }
53
+
54
+ compileOptions {
55
+ sourceCompatibility JavaVersion.VERSION_1_8
56
+ targetCompatibility JavaVersion.VERSION_1_8
57
+ }
58
+
59
+ kotlinOptions {
60
+ jvmTarget = "1.8"
61
+ }
62
+ }
63
+
64
+ repositories {
65
+ google()
66
+ mavenCentral()
67
+ }
68
+
69
+ dependencies {
70
+ implementation "com.facebook.react:react-android"
71
+
72
+ // Vision Camera
73
+ implementation "com.mrousavy.camera:react-native-vision-camera:+"
74
+
75
+ // ML Kit Face Detection
76
+ implementation "com.google.mlkit:face-detection:16.1.7"
77
+
78
+ // Google Play Services Tasks (for Tasks.await() synchronous execution)
79
+ implementation "com.google.android.gms:play-services-tasks:18.1.0"
80
+ }
@@ -0,0 +1,2 @@
1
+ <manifest xmlns:android="http://schemas.android.com/apk/res/android">
2
+ </manifest>
@@ -0,0 +1,28 @@
1
+ package com.livenesscamera
2
+
3
+ import com.facebook.react.ReactPackage
4
+ import com.facebook.react.bridge.NativeModule
5
+ import com.facebook.react.bridge.ReactApplicationContext
6
+ import com.facebook.react.uimanager.ViewManager
7
+ import com.mrousavy.camera.frameprocessors.FrameProcessorPluginRegistry
8
+
9
+ class LivenessCameraPackage : ReactPackage {
10
+
11
+ companion object {
12
+ init {
13
+ // Register the frame processor plugin under the name "detectLiveness".
14
+ // JS side calls VisionCameraProxy.initFrameProcessorPlugin('detectLiveness').
15
+ FrameProcessorPluginRegistry.addFrameProcessorPlugin("detectLiveness") { proxy, options ->
16
+ LivenessCameraPlugin(proxy, options)
17
+ }
18
+ }
19
+ }
20
+
21
+ override fun createNativeModules(
22
+ reactContext: ReactApplicationContext
23
+ ): MutableList<NativeModule> = mutableListOf()
24
+
25
+ override fun createViewManagers(
26
+ reactContext: ReactApplicationContext
27
+ ): MutableList<ViewManager<*, *>> = mutableListOf()
28
+ }
@@ -0,0 +1,63 @@
1
+ package com.livenesscamera
2
+
3
+ import android.media.Image
4
+ import com.google.android.gms.tasks.Tasks
5
+ import com.google.mlkit.vision.common.InputImage
6
+ import com.google.mlkit.vision.face.FaceDetection
7
+ import com.google.mlkit.vision.face.FaceDetectorOptions
8
+ import com.mrousavy.camera.frameprocessors.Frame
9
+ import com.mrousavy.camera.frameprocessors.FrameProcessorPlugin
10
+ import com.mrousavy.camera.frameprocessors.VisionCameraProxyHolder
11
+
12
+ class LivenessCameraPlugin(
13
+ proxy: VisionCameraProxyHolder,
14
+ options: Map<String, Any>?
15
+ ) : FrameProcessorPlugin(proxy, options) {
16
+
17
+ private val faceDetector = FaceDetection.getClient(
18
+ FaceDetectorOptions.Builder()
19
+ .setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_FAST)
20
+ .setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
21
+ .setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_NONE)
22
+ .setContourMode(FaceDetectorOptions.CONTOUR_MODE_NONE)
23
+ .build()
24
+ )
25
+
26
+ override fun callback(frame: Frame, arguments: Map<String, Any>?): Any {
27
+ val mediaImage: Image = frame.image
28
+ val rotationDegrees = frame.orientation.toDegrees()
29
+
30
+ val inputImage = InputImage.fromMediaImage(mediaImage, rotationDegrees)
31
+
32
+ return try {
33
+ // Tasks.await blocks the frame-processor thread synchronously.
34
+ // ML Kit face detection is fast (~5-10ms) so this is acceptable.
35
+ val faces = Tasks.await(faceDetector.process(inputImage))
36
+
37
+ if (faces.isEmpty()) {
38
+ return mapOf("detected" to false)
39
+ }
40
+
41
+ val face = faces.first()
42
+ val box = face.boundingBox
43
+
44
+ mapOf(
45
+ "detected" to true,
46
+ "bounds" to mapOf(
47
+ "x" to box.left.toFloat(),
48
+ "y" to box.top.toFloat(),
49
+ "width" to box.width().toFloat(),
50
+ "height" to box.height().toFloat()
51
+ ),
52
+ "yawAngle" to face.headEulerAngleY,
53
+ "pitchAngle" to face.headEulerAngleX,
54
+ "rollAngle" to face.headEulerAngleZ,
55
+ "leftEyeOpenProbability" to (face.leftEyeOpenProbability ?: -1f),
56
+ "rightEyeOpenProbability" to (face.rightEyeOpenProbability ?: -1f),
57
+ "smilingProbability" to (face.smilingProbability ?: -1f)
58
+ )
59
+ } catch (e: Exception) {
60
+ mapOf("detected" to false)
61
+ }
62
+ }
63
+ }
@@ -0,0 +1,3 @@
1
+ #import <VisionCamera/FrameProcessorPlugin.h>
2
+ #import <VisionCamera/FrameProcessorPluginRegistry.h>
3
+ #import <VisionCamera/VisionCameraProxyHolder.h>
@@ -0,0 +1,8 @@
1
+ #import <VisionCamera/FrameProcessorPlugin.h>
2
+ #import <VisionCamera/FrameProcessorPluginRegistry.h>
3
+ #import <VisionCamera/VisionCameraProxyHolder.h>
4
+
5
+ // Registers the Swift class "LivenessCameraPlugin" under the JS name "detectLiveness".
6
+ // VisionCameraProxy.initFrameProcessorPlugin('detectLiveness') on the JS side
7
+ // will resolve to this class.
8
+ VISION_EXPORT_FRAME_PROCESSOR(LivenessCameraPlugin, detectLiveness)
@@ -0,0 +1,69 @@
1
+ import VisionCamera
2
+ import MLKitFaceDetection
3
+ import MLKitVision
4
+ import UIKit
5
+ import AVFoundation
6
+
7
+ @objc(LivenessCameraPlugin)
8
+ public class LivenessCameraPlugin: FrameProcessorPlugin {
9
+
10
+ private var faceDetector: FaceDetector
11
+
12
+ public override init(
13
+ proxy: VisionCameraProxyHolder,
14
+ options: [AnyHashable: Any]? = [:]
15
+ ) {
16
+ let detectorOptions = FaceDetectorOptions()
17
+ detectorOptions.performanceMode = .fast
18
+ detectorOptions.classificationMode = .all // gives eye/smile probabilities
19
+ detectorOptions.landmarkMode = .none
20
+ detectorOptions.contourMode = .none
21
+ detectorOptions.isTrackingEnabled = false
22
+ self.faceDetector = FaceDetector.faceDetector(options: detectorOptions)
23
+ super.init(proxy: proxy, options: options)
24
+ }
25
+
26
+ public override func callback(
27
+ _ frame: Frame,
28
+ withArguments arguments: [AnyHashable: Any]?
29
+ ) -> Any {
30
+ // Vision Camera frames arrive rotated 90° — swap width/height for ML Kit
31
+ let image = VisionImage(buffer: frame.buffer)
32
+ image.orientation = imageOrientation(from: frame.orientation)
33
+
34
+ do {
35
+ let faces = try faceDetector.results(in: image)
36
+ guard let face = faces.first else {
37
+ return ["detected": false]
38
+ }
39
+
40
+ return [
41
+ "detected": true,
42
+ "bounds": [
43
+ "x": face.frame.origin.x,
44
+ "y": face.frame.origin.y,
45
+ "width": face.frame.size.width,
46
+ "height": face.frame.size.height,
47
+ ],
48
+ "yawAngle": face.headEulerAngleY,
49
+ "pitchAngle": face.headEulerAngleX,
50
+ "rollAngle": face.headEulerAngleZ,
51
+ "leftEyeOpenProbability": face.leftEyeOpenProbability,
52
+ "rightEyeOpenProbability": face.rightEyeOpenProbability,
53
+ "smilingProbability": face.smilingProbability,
54
+ ]
55
+ } catch {
56
+ return ["detected": false]
57
+ }
58
+ }
59
+
60
+ // MARK: - Helpers
61
+
62
+ private func imageOrientation(
63
+ from orientation: UIImage.Orientation
64
+ ) -> UIImage.Orientation {
65
+ // Vision Camera already exposes the correct UIImage.Orientation from
66
+ // frame.orientation in v4; return it directly for ML Kit.
67
+ return orientation
68
+ }
69
+ }