vision-camera-face-detection 2.2.3 → 2.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,24 +1,337 @@
1
1
  # vision-camera-face-detection
2
2
 
3
- Plugin Face Detection for Vision Camera 4
3
+ A high-performance Face Detection plugin for [React Native Vision Camera](https://github.com/mrousavy/react-native-vision-camera) V4. Powered by Google ML Kit Face Detection for real-time face detection with landmarks, contours, classification, and TensorFlow Lite support for face recognition.
4
+
5
+ [![npm version](https://img.shields.io/npm/v/vision-camera-face-detection.svg)](https://www.npmjs.com/package/vision-camera-face-detection)
6
+ [![license](https://img.shields.io/npm/l/vision-camera-face-detection.svg)](https://github.com/edritech93/vision-camera-face-detection/blob/main/LICENSE)
7
+
8
+ ## Features
9
+
10
+ - 🚀 Real-time face detection using Google ML Kit
11
+ - 📍 Face landmarks detection (eyes, ears, nose, cheeks, mouth)
12
+ - 🎯 Face contours detection
13
+ - 😊 Face classification (smiling, eyes open probability)
14
+ - 📐 Face angles (pitch, roll, yaw)
15
+ - 🔄 Face tracking across frames
16
+ - 🧠 TensorFlow Lite integration for face recognition/embedding
17
+ - ⚡ Optimized async processing without blocking camera preview
18
+
19
+ ## Requirements
20
+
21
+ - React Native >= 0.83
22
+ - Node.js >= 20
23
+ - react-native-vision-camera >= 4.6
24
+ - react-native-worklets-core >= 1.5
25
+ - iOS 15.5+
26
+ - Android minSdkVersion 24+
4
27
 
5
28
  ## Installation
6
29
 
7
30
  ```sh
8
31
  npm install vision-camera-face-detection
32
+ # or
33
+ yarn add vision-camera-face-detection
34
+ ```
35
+
36
+ ### iOS
37
+
38
+ Add the following to your `Podfile`:
39
+
40
+ ```ruby
41
+ pod 'GoogleMLKit/FaceDetection', '~> 6.0.0'
9
42
  ```
10
43
 
44
+ Then run:
45
+
46
+ ```sh
47
+ cd ios && pod install
48
+ ```
49
+
50
+ For TensorFlow Lite support, add the TFLite model file (e.g., `mobile_face_net.tflite`) to your iOS project.
51
+
52
+ ### Android
53
+
54
+ No additional setup required. The library automatically links the required ML Kit dependencies.
55
+
56
+ For TensorFlow Lite support, place your model file in the `assets` folder.
57
+
11
58
  ## Usage
12
59
 
60
+ ### Basic Face Detection
61
+
62
+ ```tsx
63
+ import React, { useRef } from 'react';
64
+ import { StyleSheet, View } from 'react-native';
65
+ import {
66
+ useCameraDevice,
67
+ useCameraPermission,
68
+ } from 'react-native-vision-camera';
69
+ import {
70
+ Camera,
71
+ type Face,
72
+ type FaceDetectionOptions,
73
+ } from 'vision-camera-face-detection';
74
+
75
+ export default function App() {
76
+ const { hasPermission, requestPermission } = useCameraPermission();
77
+ const device = useCameraDevice('front');
78
+
79
+ const faceDetectionOptions: FaceDetectionOptions = {
80
+ performanceMode: 'fast',
81
+ classificationMode: 'all',
82
+ landmarkMode: 'all',
83
+ contourMode: 'none',
84
+ };
85
+
86
+ const handleFacesDetected = (faces: Face[], frame: Frame) => {
87
+ console.log('Detected faces:', faces.length);
88
+
89
+ if (faces.length > 0) {
90
+ const face = faces[0];
91
+ console.log('Face bounds:', face.bounds);
92
+ console.log('Smiling probability:', face.smilingProbability);
93
+ console.log('Left eye open:', face.leftEyeOpenProbability);
94
+ console.log('Right eye open:', face.rightEyeOpenProbability);
95
+ }
96
+ };
97
+
98
+ if (!hasPermission) {
99
+ requestPermission();
100
+ return null;
101
+ }
102
+
103
+ if (!device) return null;
104
+
105
+ return (
106
+ <View style={styles.container}>
107
+ <Camera
108
+ style={StyleSheet.absoluteFill}
109
+ device={device}
110
+ isActive={true}
111
+ faceDetectionOptions={faceDetectionOptions}
112
+ faceDetectionCallback={handleFacesDetected}
113
+ />
114
+ </View>
115
+ );
116
+ }
117
+
118
+ const styles = StyleSheet.create({
119
+ container: {
120
+ flex: 1,
121
+ },
122
+ });
123
+ ```
124
+
125
+ ### Using the `useFaceDetector` Hook
126
+
127
+ For more control, you can use the `useFaceDetector` hook directly with a custom frame processor:
128
+
129
+ ```tsx
130
+ import { useFrameProcessor } from 'react-native-vision-camera';
131
+ import {
132
+ useFaceDetector,
133
+ type FaceDetectionOptions,
134
+ } from 'vision-camera-face-detection';
135
+
136
+ const faceDetectionOptions: FaceDetectionOptions = {
137
+ performanceMode: 'accurate',
138
+ landmarkMode: 'all',
139
+ contourMode: 'all',
140
+ classificationMode: 'all',
141
+ };
142
+
143
+ const { detectFaces } = useFaceDetector(faceDetectionOptions);
144
+
145
+ const frameProcessor = useFrameProcessor(
146
+ (frame) => {
147
+ 'worklet';
148
+ const faces = detectFaces(frame);
149
+ console.log('Faces:', faces);
150
+ },
151
+ [detectFaces]
152
+ );
153
+ ```
154
+
155
+ ### TensorFlow Lite Integration
156
+
157
+ For face recognition/embedding using TensorFlow Lite:
13
158
 
14
- ```js
15
- import { multiply } from 'vision-camera-face-detection';
159
+ ```tsx
160
+ import {
161
+ initTensor,
162
+ detectFromBase64,
163
+ type DetectBas64Type,
164
+ } from 'vision-camera-face-detection';
16
165
 
17
- // ...
166
+ // Initialize TensorFlow Lite model
167
+ await initTensor('mobile_face_net', 1);
18
168
 
19
- const result = await multiply(3, 7);
169
+ // Detect face from base64 image and get embedding
170
+ const result: DetectBas64Type = await detectFromBase64(base64Image);
171
+ console.log('Face embedding:', result.data);
172
+ console.log('Cropped face base64:', result.base64);
20
173
  ```
21
174
 
175
+ ## API Reference
176
+
177
+ ### `<Camera />` Component
178
+
179
+ A wrapper around Vision Camera that includes face detection.
180
+
181
+ | Prop | Type | Description |
182
+ | ----------------------- | --------------------------------------- | -------------------------------- |
183
+ | `faceDetectionOptions` | `FaceDetectionOptions` | Configuration for face detection |
184
+ | `faceDetectionCallback` | `(faces: Face[], frame: Frame) => void` | Callback when faces are detected |
185
+ | `...props` | `CameraProps` | All Vision Camera props |
186
+
187
+ ### `FaceDetectionOptions`
188
+
189
+ | Option | Type | Default | Description |
190
+ | -------------------- | ---------------------- | --------- | ---------------------------------------- |
191
+ | `performanceMode` | `'fast' \| 'accurate'` | `'fast'` | Favor speed or accuracy |
192
+ | `landmarkMode` | `'none' \| 'all'` | `'none'` | Detect facial landmarks |
193
+ | `contourMode` | `'none' \| 'all'` | `'none'` | Detect face contours |
194
+ | `classificationMode` | `'none' \| 'all'` | `'none'` | Classify faces (smiling, eyes open) |
195
+ | `minFaceSize` | `number` | `0.15` | Minimum face size ratio |
196
+ | `trackingEnabled` | `boolean` | `false` | Enable face tracking across frames |
197
+ | `autoMode` | `boolean` | `false` | Auto scale bounds for screen coordinates |
198
+ | `windowWidth` | `number` | `1.0` | Screen width for auto scaling |
199
+ | `windowHeight` | `number` | `1.0` | Screen height for auto scaling |
200
+ | `cameraFacing` | `'front' \| 'back'` | `'front'` | Current camera position |
201
+ | `enableTensor` | `boolean` | `false` | Enable TensorFlow Lite processing |
202
+
203
+ ### `Face` Interface
204
+
205
+ ```typescript
206
+ interface Face {
207
+ pitchAngle: number; // Head rotation around X-axis
208
+ rollAngle: number; // Head rotation around Z-axis
209
+ yawAngle: number; // Head rotation around Y-axis
210
+ bounds: Bounds; // Face bounding box
211
+ leftEyeOpenProbability: number; // 0.0 to 1.0
212
+ rightEyeOpenProbability: number; // 0.0 to 1.0
213
+ smilingProbability: number; // 0.0 to 1.0
214
+ contours?: Contours; // Face contour points
215
+ landmarks?: Landmarks; // Face landmark points
216
+ data: number[]; // Face embedding (when TensorFlow enabled)
217
+ }
218
+
219
+ interface Bounds {
220
+ x: number;
221
+ y: number;
222
+ width: number;
223
+ height: number;
224
+ }
225
+ ```
226
+
227
+ ### `Landmarks` Interface
228
+
229
+ ```typescript
230
+ interface Landmarks {
231
+ LEFT_CHEEK: Point;
232
+ LEFT_EAR: Point;
233
+ LEFT_EYE: Point;
234
+ MOUTH_BOTTOM: Point;
235
+ MOUTH_LEFT: Point;
236
+ MOUTH_RIGHT: Point;
237
+ NOSE_BASE: Point;
238
+ RIGHT_CHEEK: Point;
239
+ RIGHT_EAR: Point;
240
+ RIGHT_EYE: Point;
241
+ }
242
+ ```
243
+
244
+ ### `Contours` Interface
245
+
246
+ ```typescript
247
+ interface Contours {
248
+ FACE: Point[];
249
+ LEFT_EYEBROW_TOP: Point[];
250
+ LEFT_EYEBROW_BOTTOM: Point[];
251
+ RIGHT_EYEBROW_TOP: Point[];
252
+ RIGHT_EYEBROW_BOTTOM: Point[];
253
+ LEFT_EYE: Point[];
254
+ RIGHT_EYE: Point[];
255
+ UPPER_LIP_TOP: Point[];
256
+ UPPER_LIP_BOTTOM: Point[];
257
+ LOWER_LIP_TOP: Point[];
258
+ LOWER_LIP_BOTTOM: Point[];
259
+ NOSE_BRIDGE: Point[];
260
+ NOSE_BOTTOM: Point[];
261
+ LEFT_CHEEK: Point[];
262
+ RIGHT_CHEEK: Point[];
263
+ }
264
+ ```
265
+
266
+ ### TensorFlow Lite Functions
267
+
268
+ #### `initTensor(modelPath: string, count?: number): Promise<string>`
269
+
270
+ Initialize TensorFlow Lite model for face recognition.
271
+
272
+ - `modelPath`: Name of the TFLite model file (without extension)
273
+ - `count`: Number of threads (optional)
274
+
275
+ #### `detectFromBase64(imageString: string): Promise<DetectBas64Type>`
276
+
277
+ Detect face from base64 image and return face embedding.
278
+
279
+ ```typescript
280
+ type DetectBas64Type = {
281
+ base64: string; // Cropped face image
282
+ data: number[]; // Face embedding array
283
+ message: string; // Status message
284
+ leftEyeOpenProbability: number;
285
+ rightEyeOpenProbability: number;
286
+ smilingProbability: number;
287
+ };
288
+ ```
289
+
290
+ ## Example: Face Comparison
291
+
292
+ ```tsx
293
+ // Calculate Euclidean distance between two face embeddings
294
+ function compareFaces(embedding1: number[], embedding2: number[]): number {
295
+ let distance = 0.0;
296
+ for (let i = 0; i < embedding1.length; i++) {
297
+ const diff = embedding1[i] - embedding2[i];
298
+ distance += diff * diff;
299
+ }
300
+ return distance; // Lower = more similar
301
+ }
302
+
303
+ // Usage
304
+ const distance = compareFaces(knownFaceEmbedding, detectedFaceEmbedding);
305
+ const isSamePerson = distance < 1.0; // Threshold may vary
306
+ ```
307
+
308
+ ## Troubleshooting
309
+
310
+ ### iOS Build Issues
311
+
312
+ If you encounter build issues on iOS, ensure you have:
313
+
314
+ 1. Run `pod install` after adding the package
315
+ 2. Added the required permissions in `Info.plist`:
316
+ ```xml
317
+ <key>NSCameraUsageDescription</key>
318
+ <string>Camera access is required for face detection</string>
319
+ ```
320
+
321
+ ### Android Build Issues
322
+
323
+ If you encounter build issues on Android:
324
+
325
+ 1. Ensure `minSdkVersion` is at least 24
326
+ 2. Ensure `compileSdkVersion` and `targetSdkVersion` are at least 36
327
+ 3. Enable `multiDexEnabled` if needed
328
+
329
+ ### Performance Tips
330
+
331
+ - Use `performanceMode: 'fast'` for real-time applications
332
+ - Disable `contourMode` and `landmarkMode` if not needed
333
+ - Use `minFaceSize` to filter small faces
334
+ - Disable `trackingEnabled` when using `contourMode`
22
335
 
23
336
  ## Contributing
24
337
 
@@ -30,4 +343,4 @@ MIT
30
343
 
31
344
  ---
32
345
 
33
- Made with [create-react-native-library](https://github.com/callstack/react-native-builder-bob)
346
+ Made with ❤️ by [Yudi Edri Alviska](https://github.com/edritech93)
@@ -16,9 +16,9 @@ Pod::Spec.new do |s|
16
16
 
17
17
  s.source_files = "ios/**/*.{h,m,mm,swift}"
18
18
 
19
- s.dependency 'GoogleMLKit/FaceDetection', '7.0.0'
19
+ s.dependency 'GoogleMLKit/FaceDetection'
20
20
  s.dependency "VisionCamera"
21
- s.dependency "TensorFlowLiteSwift", "2.11.0"
21
+ s.dependency "TensorFlowLiteSwift", "~> 2.17.0"
22
22
 
23
23
  # Use install_modules_dependencies helper to install the dependencies if React Native version >=0.71.0.
24
24
  # See https://github.com/facebook/react-native/blob/febf6b7f33fdb4904669f99d795eba4c0f95d7bf/scripts/cocoapods/new_architecture.rb#L79.
@@ -85,12 +85,9 @@ dependencies {
85
85
  implementation "com.facebook.react:react-android"
86
86
  implementation "org.jetbrains.kotlin:kotlin-stdlib:$kotlin_version"
87
87
  api project(":react-native-vision-camera")
88
- implementation "androidx.annotation:annotation:1.8.2"
89
- implementation "androidx.camera:camera-core:1.3.4"
88
+ implementation "androidx.annotation:annotation:1.9.1"
89
+ implementation "androidx.camera:camera-core:1.5.2"
90
90
  implementation "com.google.mlkit:face-detection:16.1.7"
91
- implementation 'com.google.ai.edge.litert:litert:1.4.0'
92
- implementation 'com.google.ai.edge.litert:litert-api:1.4.0'
93
- implementation 'com.google.ai.edge.litert:litert-support:1.4.0'
94
- implementation 'com.google.ai.edge.litert:litert-metadata:1.4.0'
91
+ implementation 'com.google.ai.edge.litert:litert:2.1.0'
95
92
  }
96
93
 
@@ -6,12 +6,10 @@ import android.util.Base64
6
6
  import com.google.mlkit.vision.face.Face
7
7
  import com.google.mlkit.vision.face.FaceContour
8
8
  import org.tensorflow.lite.Interpreter
9
- import org.tensorflow.lite.support.common.ops.NormalizeOp
10
- import org.tensorflow.lite.support.image.ImageProcessor
11
- import org.tensorflow.lite.support.image.TensorImage
12
- import org.tensorflow.lite.support.image.ops.ResizeOp
9
+ //import com.google.ai.edge.litert.Interpreter
13
10
  import java.io.ByteArrayOutputStream
14
11
  import java.nio.ByteBuffer
12
+ import java.nio.ByteOrder
15
13
  import kotlin.math.ceil
16
14
 
17
15
  var interpreter: Interpreter? = null
@@ -19,14 +17,64 @@ const val TF_OD_API_INPUT_SIZE = 112
19
17
 
20
18
  class FaceHelper {
21
19
 
22
- private val imageTensorProcessor: ImageProcessor = ImageProcessor.Builder()
23
- .add(ResizeOp(TF_OD_API_INPUT_SIZE, TF_OD_API_INPUT_SIZE, ResizeOp.ResizeMethod.BILINEAR))
24
- .add(NormalizeOp(127.5f, 127.5f))
25
- .build()
26
-
20
+ /**
21
+ * Converts a bitmap to a ByteBuffer suitable for TensorFlow Lite inference.
22
+ * This replaces the litert-support library's ImageProcessor functionality.
23
+ *
24
+ * The bitmap is resized to TF_OD_API_INPUT_SIZE x TF_OD_API_INPUT_SIZE and
25
+ * normalized using the formula: (pixel - 127.5) / 127.5
26
+ */
27
27
  fun bitmap2ByteBuffer(bitmap: Bitmap?): ByteBuffer {
28
- val imageTensor: TensorImage = imageTensorProcessor.process(TensorImage.fromBitmap(bitmap))
29
- return imageTensor.buffer
28
+ if (bitmap == null) {
29
+ // Return an empty buffer if bitmap is null
30
+ val emptyBuffer = ByteBuffer.allocateDirect(TF_OD_API_INPUT_SIZE * TF_OD_API_INPUT_SIZE * 3 * 4)
31
+ emptyBuffer.order(ByteOrder.nativeOrder())
32
+ return emptyBuffer
33
+ }
34
+
35
+ // Resize bitmap to the required input size using bilinear filtering
36
+ val resizedBitmap = Bitmap.createScaledBitmap(
37
+ bitmap,
38
+ TF_OD_API_INPUT_SIZE,
39
+ TF_OD_API_INPUT_SIZE,
40
+ true
41
+ )
42
+
43
+ // Allocate ByteBuffer for float values (3 channels * 4 bytes per float)
44
+ val byteBuffer = ByteBuffer.allocateDirect(TF_OD_API_INPUT_SIZE * TF_OD_API_INPUT_SIZE * 3 * 4)
45
+ byteBuffer.order(ByteOrder.nativeOrder())
46
+
47
+ // Extract pixel values and normalize
48
+ val intValues = IntArray(TF_OD_API_INPUT_SIZE * TF_OD_API_INPUT_SIZE)
49
+ resizedBitmap.getPixels(
50
+ intValues,
51
+ 0,
52
+ TF_OD_API_INPUT_SIZE,
53
+ 0,
54
+ 0,
55
+ TF_OD_API_INPUT_SIZE,
56
+ TF_OD_API_INPUT_SIZE
57
+ )
58
+
59
+ // Normalize pixel values: (pixel - 127.5) / 127.5
60
+ // This maps [0, 255] to [-1, 1]
61
+ for (pixelValue in intValues) {
62
+ val r = (pixelValue shr 16 and 0xFF)
63
+ val g = (pixelValue shr 8 and 0xFF)
64
+ val b = (pixelValue and 0xFF)
65
+
66
+ byteBuffer.putFloat((r - 127.5f) / 127.5f)
67
+ byteBuffer.putFloat((g - 127.5f) / 127.5f)
68
+ byteBuffer.putFloat((b - 127.5f) / 127.5f)
69
+ }
70
+
71
+ // Clean up resized bitmap if it's different from the original
72
+ if (resizedBitmap != bitmap) {
73
+ resizedBitmap.recycle()
74
+ }
75
+
76
+ byteBuffer.rewind()
77
+ return byteBuffer
30
78
  }
31
79
 
32
80
  fun processBoundingBox(boundingBox: Rect): MutableMap<String, Any> {
@@ -17,7 +17,6 @@ import com.google.mlkit.vision.face.FaceDetector
17
17
  import com.google.mlkit.vision.face.FaceDetectorOptions
18
18
  import com.google.mlkit.vision.face.FaceLandmark
19
19
  import com.mrousavy.camera.core.FrameInvalidError
20
- import com.mrousavy.camera.core.types.Orientation
21
20
  import com.mrousavy.camera.core.types.Position
22
21
  import com.mrousavy.camera.frameprocessors.Frame
23
22
  import com.mrousavy.camera.frameprocessors.FrameProcessorPlugin
@@ -112,12 +112,28 @@ class FaceHelper {
112
112
 
113
113
  static func getImageFaceFromBuffer(from sampleBuffer: CMSampleBuffer, rectImage: CGRect, orientation: UIImage.Orientation) -> CVPixelBuffer? {
114
114
  autoreleasepool {
115
- let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
116
- let ciimage = CIImage(cvPixelBuffer: imageBuffer!)
115
+ guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
116
+ print("Error: Failed to get image buffer from sample buffer")
117
+ return nil
118
+ }
119
+
120
+ let ciimage = CIImage(cvPixelBuffer: imageBuffer)
117
121
  let context = CIContext(options: nil)
118
- let cgImage = context.createCGImage(ciimage, from: ciimage.extent)!
119
122
 
120
- let imageRef: CGImage = cgImage.cropping(to: rectImage)!
123
+ guard let cgImage = context.createCGImage(ciimage, from: ciimage.extent) else {
124
+ print("Error: Failed to create CGImage from CIImage")
125
+ return nil
126
+ }
127
+
128
+ // Ensure rectImage is within bounds of the image
129
+ let imageRect = CGRect(x: 0, y: 0, width: cgImage.width, height: cgImage.height)
130
+ let clampedRect = rectImage.intersection(imageRect)
131
+
132
+ guard !clampedRect.isEmpty, let imageRef = cgImage.cropping(to: clampedRect) else {
133
+ print("Error: Failed to crop image. Face rect: \(rectImage), Image bounds: \(imageRect)")
134
+ return nil
135
+ }
136
+
121
137
  let imageCrop: UIImage = UIImage(cgImage: imageRef, scale: 0.5, orientation: orientation)
122
138
  return uiImageToPixelBuffer(image: imageCrop, size: inputWidth)
123
139
  }
@@ -24,7 +24,9 @@ class VisionCameraFaceDetectionModule: NSObject {
24
24
  forResource: modelName,
25
25
  ofType: "tflite"
26
26
  ) else {
27
- print("Failed to load the model file with name: \(modelName).")
27
+ let errorMessage = "Failed to load the model file with name: \(modelName).tflite. Make sure the file is added to your app bundle."
28
+ print(errorMessage)
29
+ reject("MODEL_NOT_FOUND", errorMessage, nil)
28
30
  return
29
31
  }
30
32
  do {
@@ -32,10 +34,11 @@ class VisionCameraFaceDetectionModule: NSObject {
32
34
  options.threadCount = count
33
35
  interpreter = try Interpreter(modelPath: modelPath, options: options)
34
36
  try interpreter?.allocateTensors()
37
+ print("TensorFlow Lite interpreter initialized successfully with model: \(modelName)")
35
38
  resolve("initialization tflite success")
36
39
  } catch let error {
37
40
  print("Failed to create the interpreter with error: \(error.localizedDescription)")
38
- reject("Error", "tflite error", error)
41
+ reject("INTERPRETER_ERROR", "tflite error: \(error.localizedDescription)", error)
39
42
  return
40
43
  }
41
44
  }
@@ -291,21 +291,40 @@ public class VisionCameraFaceDetectionPlugin: FrameProcessorPlugin {
291
291
  for face in faces {
292
292
  var map: [String: Any] = [:]
293
293
  if enableTensor {
294
+ // Check if interpreter is initialized
295
+ guard let tfliteInterpreter = interpreter else {
296
+ print("Error: TensorFlow Lite interpreter is not initialized. Call initTensor() first.")
297
+ map["data"] = []
298
+ map["error"] = "Interpreter not initialized"
299
+ result.append(map)
300
+ continue
301
+ }
302
+
294
303
  guard let imageCrop = FaceHelper.getImageFaceFromBuffer(from: frame.buffer, rectImage: face.frame, orientation: image.orientation) else {
295
- return nil
304
+ print("Error: Failed to crop face image from buffer")
305
+ map["data"] = []
306
+ map["error"] = "Failed to crop face image"
307
+ result.append(map)
308
+ continue
296
309
  }
297
310
  guard let rgbData = FaceHelper.rgbDataFromBuffer(imageCrop) else {
298
- return nil
311
+ print("Error: Failed to convert image buffer to RGB data")
312
+ map["data"] = []
313
+ map["error"] = "Failed to convert to RGB"
314
+ result.append(map)
315
+ continue
299
316
  }
300
- try interpreter?.copy(rgbData, toInputAt: 0)
301
- try interpreter?.invoke()
302
- let outputTensor: Tensor? = try interpreter?.output(at: 0)
303
317
 
304
- if ((outputTensor?.data) != nil) {
305
- let result: [Float] = [Float32](unsafeData: outputTensor!.data) ?? []
306
- map["data"] = result
307
- } else {
318
+ do {
319
+ try tfliteInterpreter.copy(rgbData, toInputAt: 0)
320
+ try tfliteInterpreter.invoke()
321
+ let outputTensor: Tensor = try tfliteInterpreter.output(at: 0)
322
+ let embeddingData: [Float] = [Float32](unsafeData: outputTensor.data) ?? []
323
+ map["data"] = embeddingData
324
+ } catch let tensorError {
325
+ print("Error running TensorFlow inference: \(tensorError.localizedDescription)")
308
326
  map["data"] = []
327
+ map["error"] = "TensorFlow inference failed: \(tensorError.localizedDescription)"
309
328
  }
310
329
  } else {
311
330
  map["data"] = []
@@ -1 +1 @@
1
- {"version":3,"names":["_react","require","_reactNativeVisionCamera","createFaceDetectorPlugin","options","plugin","VisionCameraProxy","initFrameProcessorPlugin","Error","detectFaces","frame","call","useFaceDetector","useMemo"],"sourceRoot":"../../src","sources":["FaceDetector.ts"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,wBAAA,GAAAD,OAAA;AAwJA;AACA;AACA;AACA;AACA;AACA;AACA,SAASE,wBAAwBA,CAC/BC,OAA8B,EACV;EACpB,MAAMC,MAAM,GAAGC,0CAAiB,CAACC,wBAAwB,CAAC,aAAa,EAAE;IACvE,GAAGH;EACL,CAAC,CAAC;EAEF,IAAI,CAACC,MAAM,EAAE;IACX,MAAM,IAAIG,KAAK,CAAC,sDAAsD,CAAC;EACzE;EAEA,OAAO;IACLC,WAAW,EAAGC,KAAY,IAAa;MACrC,SAAS;;MACT;MACA,OAAOL,MAAM,CAACM,IAAI,CAACD,KAAK,CAAC;IAC3B;EACF,CAAC;AACH;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,SAASE,eAAeA,CAC7BR,OAA8B,EACV;EACpB,OAAO,IAAAS,cAAO,EAAC,MAAMV,wBAAwB,CAACC,OAAO,CAAC,EAAE,CAACA,OAAO,CAAC,CAAC;AACpE","ignoreList":[]}
1
+ {"version":3,"names":["_react","require","_reactNativeVisionCamera","createFaceDetectorPlugin","options","plugin","VisionCameraProxy","initFrameProcessorPlugin","Error","detectFaces","frame","call","useFaceDetector","useMemo"],"sourceRoot":"../../src","sources":["FaceDetector.ts"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,wBAAA,GAAAD,OAAA;AAyJA;AACA;AACA;AACA;AACA;AACA;AACA,SAASE,wBAAwBA,CAC/BC,OAA8B,EACV;EACpB,MAAMC,MAAM,GAAGC,0CAAiB,CAACC,wBAAwB,CAAC,aAAa,EAAE;IACvE,GAAGH;EACL,CAAC,CAAC;EAEF,IAAI,CAACC,MAAM,EAAE;IACX,MAAM,IAAIG,KAAK,CAAC,sDAAsD,CAAC;EACzE;EAEA,OAAO;IACLC,WAAW,EAAGC,KAAY,IAAa;MACrC,SAAS;;MACT;MACA,OAAOL,MAAM,CAACM,IAAI,CAACD,KAAK,CAAC;IAC3B;EACF,CAAC;AACH;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,SAASE,eAAeA,CAC7BR,OAA8B,EACV;EACpB,OAAO,IAAAS,cAAO,EAAC,MAAMV,wBAAwB,CAACC,OAAO,CAAC,EAAE,CAACA,OAAO,CAAC,CAAC;AACpE","ignoreList":[]}
@@ -1 +1 @@
1
- {"version":3,"names":["useMemo","VisionCameraProxy","createFaceDetectorPlugin","options","plugin","initFrameProcessorPlugin","Error","detectFaces","frame","call","useFaceDetector"],"sourceRoot":"../../src","sources":["FaceDetector.ts"],"mappings":";;AAAA,SAASA,OAAO,QAAQ,OAAO;AAC/B,SACEC,iBAAiB,QAGZ,4BAA4B;AAoJnC;AACA;AACA;AACA;AACA;AACA;AACA,SAASC,wBAAwBA,CAC/BC,OAA8B,EACV;EACpB,MAAMC,MAAM,GAAGH,iBAAiB,CAACI,wBAAwB,CAAC,aAAa,EAAE;IACvE,GAAGF;EACL,CAAC,CAAC;EAEF,IAAI,CAACC,MAAM,EAAE;IACX,MAAM,IAAIE,KAAK,CAAC,sDAAsD,CAAC;EACzE;EAEA,OAAO;IACLC,WAAW,EAAGC,KAAY,IAAa;MACrC,SAAS;;MACT;MACA,OAAOJ,MAAM,CAACK,IAAI,CAACD,KAAK,CAAC;IAC3B;EACF,CAAC;AACH;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASE,eAAeA,CAC7BP,OAA8B,EACV;EACpB,OAAOH,OAAO,CAAC,MAAME,wBAAwB,CAACC,OAAO,CAAC,EAAE,CAACA,OAAO,CAAC,CAAC;AACpE","ignoreList":[]}
1
+ {"version":3,"names":["useMemo","VisionCameraProxy","createFaceDetectorPlugin","options","plugin","initFrameProcessorPlugin","Error","detectFaces","frame","call","useFaceDetector"],"sourceRoot":"../../src","sources":["FaceDetector.ts"],"mappings":";;AAAA,SAASA,OAAO,QAAQ,OAAO;AAC/B,SACEC,iBAAiB,QAGZ,4BAA4B;AAqJnC;AACA;AACA;AACA;AACA;AACA;AACA,SAASC,wBAAwBA,CAC/BC,OAA8B,EACV;EACpB,MAAMC,MAAM,GAAGH,iBAAiB,CAACI,wBAAwB,CAAC,aAAa,EAAE;IACvE,GAAGF;EACL,CAAC,CAAC;EAEF,IAAI,CAACC,MAAM,EAAE;IACX,MAAM,IAAIE,KAAK,CAAC,sDAAsD,CAAC;EACzE;EAEA,OAAO;IACLC,WAAW,EAAGC,KAAY,IAAa;MACrC,SAAS;;MACT;MACA,OAAOJ,MAAM,CAACK,IAAI,CAACD,KAAK,CAAC;IAC3B;EACF,CAAC;AACH;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASE,eAAeA,CAC7BP,OAA8B,EACV;EACpB,OAAOH,OAAO,CAAC,MAAME,wBAAwB,CAACC,OAAO,CAAC,EAAE,CAACA,OAAO,CAAC,CAAC;AACpE","ignoreList":[]}
@@ -22,6 +22,7 @@ export interface Face {
22
22
  contours?: Contours;
23
23
  landmarks?: Landmarks;
24
24
  data: number[];
25
+ error?: string;
25
26
  }
26
27
  export interface Bounds {
27
28
  width: number;
@@ -1 +1 @@
1
- {"version":3,"file":"FaceDetector.d.ts","sourceRoot":"","sources":["../../../../src/FaceDetector.ts"],"names":[],"mappings":"AACA,OAAO,EAEL,KAAK,cAAc,EACnB,KAAK,KAAK,EACX,MAAM,4BAA4B,CAAC;AAEpC,KAAK,kBAAkB,GAAG;IACxB;;;;OAIG;IACH,WAAW,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,EAAE,CAAC;CACvC,CAAC;AAEF,KAAK,KAAK,GAAG;IACX,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;CACX,CAAC;AAEF,MAAM,WAAW,IAAI;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;IACf,sBAAsB,EAAE,MAAM,CAAC;IAC/B,uBAAuB,EAAE,MAAM,CAAC;IAChC,kBAAkB,EAAE,MAAM,CAAC;IAC3B,QAAQ,CAAC,EAAE,QAAQ,CAAC;IACpB,SAAS,CAAC,EAAE,SAAS,CAAC;IACtB,IAAI,EAAE,MAAM,EAAE,CAAC;CAChB;AAED,MAAM,WAAW,MAAM;IACrB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;CACX;AAED,MAAM,WAAW,QAAQ;IACvB,IAAI,EAAE,KAAK,EAAE,CAAC;IACd,gBAAgB,EAAE,KAAK,EAAE,CAAC;IAC1B,mBAAmB,EAAE,KAAK,EAAE,CAAC;IAC7B,iBAAiB,EAAE,KAAK,EAAE,CAAC;IAC3B,oBAAoB,EAAE,KAAK,EAAE,CAAC;IAC9B,QAAQ,EAAE,KAAK,EAAE,CAAC;IAClB,SAAS,EAAE,KAAK,EAAE,CAAC;IACnB,aAAa,EAAE,KAAK,EAAE,CAAC;IACvB,gBAAgB,EAAE,KAAK,EAAE,CAAC;IAC1B,aAAa,EAAE,KAAK,EAAE,CAAC;IACvB,gBAAgB,EAAE,KAAK,EAAE,CAAC;IAC1B,WAAW,EAAE,KAAK,EAAE,CAAC;IACrB,WAAW,EAAE,KAAK,EAAE,CAAC;IACrB,UAAU,EAAE,KAAK,EAAE,CAAC;IACpB,WAAW,EAAE,KAAK,EAAE,CAAC;CACtB;AAED,MAAM,WAAW,SAAS;IACxB,UAAU,EAAE,KAAK,CAAC;IAClB,QAAQ,EAAE,KAAK,CAAC;IAChB,QAAQ,EAAE,KAAK,CAAC;IAChB,YAAY,EAAE,KAAK,CAAC;IACpB,UAAU,EAAE,KAAK,CAAC;IAClB,WAAW,EAAE,KAAK,CAAC;IACnB,SAAS,EAAE,KAAK,CAAC;IACjB,WAAW,EAAE,KAAK,CAAC;IACnB,SAAS,EAAE,KAAK,CAAC;IACjB,SAAS,EAAE,KAAK,CAAC;CAClB;AAED,MAAM,WAAW,oBAAoB;IACnC;;;;OAIG;IACH,eAAe,CAAC,EAAE,MAAM,GAAG,UAAU,CAAC;IAEtC;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IAE9B;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IAE7B;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IAEpC;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;;;OAMG;IACH,eAAe,CAAC,EAAE,OAAO,CAAC;IAE1B;;;;;;OAMG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IAEnB;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB;;;;OAIG;IACH,YAAY,CAAC,EAAE,cAAc,CAAC;IAE9B;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;CACxB;AA4BD;;;;;;GAMG;AACH,wBAAgB,eAAe,CAC7B,OAAO,CAAC,EAAE,oBAAoB,GAC7B,kBAAkB,CAEpB"}
1
+ {"version":3,"file":"FaceDetector.d.ts","sourceRoot":"","sources":["../../../../src/FaceDetector.ts"],"names":[],"mappings":"AACA,OAAO,EAEL,KAAK,cAAc,EACnB,KAAK,KAAK,EACX,MAAM,4BAA4B,CAAC;AAEpC,KAAK,kBAAkB,GAAG;IACxB;;;;OAIG;IACH,WAAW,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,EAAE,CAAC;CACvC,CAAC;AAEF,KAAK,KAAK,GAAG;IACX,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;CACX,CAAC;AAEF,MAAM,WAAW,IAAI;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;IACf,sBAAsB,EAAE,MAAM,CAAC;IAC/B,uBAAuB,EAAE,MAAM,CAAC;IAChC,kBAAkB,EAAE,MAAM,CAAC;IAC3B,QAAQ,CAAC,EAAE,QAAQ,CAAC;IACpB,SAAS,CAAC,EAAE,SAAS,CAAC;IACtB,IAAI,EAAE,MAAM,EAAE,CAAC;IACf,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,MAAM;IACrB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;CACX;AAED,MAAM,WAAW,QAAQ;IACvB,IAAI,EAAE,KAAK,EAAE,CAAC;IACd,gBAAgB,EAAE,KAAK,EAAE,CAAC;IAC1B,mBAAmB,EAAE,KAAK,EAAE,CAAC;IAC7B,iBAAiB,EAAE,KAAK,EAAE,CAAC;IAC3B,oBAAoB,EAAE,KAAK,EAAE,CAAC;IAC9B,QAAQ,EAAE,KAAK,EAAE,CAAC;IAClB,SAAS,EAAE,KAAK,EAAE,CAAC;IACnB,aAAa,EAAE,KAAK,EAAE,CAAC;IACvB,gBAAgB,EAAE,KAAK,EAAE,CAAC;IAC1B,aAAa,EAAE,KAAK,EAAE,CAAC;IACvB,gBAAgB,EAAE,KAAK,EAAE,CAAC;IAC1B,WAAW,EAAE,KAAK,EAAE,CAAC;IACrB,WAAW,EAAE,KAAK,EAAE,CAAC;IACrB,UAAU,EAAE,KAAK,EAAE,CAAC;IACpB,WAAW,EAAE,KAAK,EAAE,CAAC;CACtB;AAED,MAAM,WAAW,SAAS;IACxB,UAAU,EAAE,KAAK,CAAC;IAClB,QAAQ,EAAE,KAAK,CAAC;IAChB,QAAQ,EAAE,KAAK,CAAC;IAChB,YAAY,EAAE,KAAK,CAAC;IACpB,UAAU,EAAE,KAAK,CAAC;IAClB,WAAW,EAAE,KAAK,CAAC;IACnB,SAAS,EAAE,KAAK,CAAC;IACjB,WAAW,EAAE,KAAK,CAAC;IACnB,SAAS,EAAE,KAAK,CAAC;IACjB,SAAS,EAAE,KAAK,CAAC;CAClB;AAED,MAAM,WAAW,oBAAoB;IACnC;;;;OAIG;IACH,eAAe,CAAC,EAAE,MAAM,GAAG,UAAU,CAAC;IAEtC;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IAE9B;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IAE7B;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IAEpC;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;;;OAMG;IACH,eAAe,CAAC,EAAE,OAAO,CAAC;IAE1B;;;;;;OAMG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IAEnB;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB;;;;OAIG;IACH,YAAY,CAAC,EAAE,cAAc,CAAC;IAE9B;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;CACxB;AA4BD;;;;;;GAMG;AACH,wBAAgB,eAAe,CAC7B,OAAO,CAAC,EAAE,oBAAoB,GAC7B,kBAAkB,CAEpB"}
@@ -22,6 +22,7 @@ export interface Face {
22
22
  contours?: Contours;
23
23
  landmarks?: Landmarks;
24
24
  data: number[];
25
+ error?: string;
25
26
  }
26
27
  export interface Bounds {
27
28
  width: number;
@@ -1 +1 @@
1
- {"version":3,"file":"FaceDetector.d.ts","sourceRoot":"","sources":["../../../../src/FaceDetector.ts"],"names":[],"mappings":"AACA,OAAO,EAEL,KAAK,cAAc,EACnB,KAAK,KAAK,EACX,MAAM,4BAA4B,CAAC;AAEpC,KAAK,kBAAkB,GAAG;IACxB;;;;OAIG;IACH,WAAW,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,EAAE,CAAC;CACvC,CAAC;AAEF,KAAK,KAAK,GAAG;IACX,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;CACX,CAAC;AAEF,MAAM,WAAW,IAAI;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;IACf,sBAAsB,EAAE,MAAM,CAAC;IAC/B,uBAAuB,EAAE,MAAM,CAAC;IAChC,kBAAkB,EAAE,MAAM,CAAC;IAC3B,QAAQ,CAAC,EAAE,QAAQ,CAAC;IACpB,SAAS,CAAC,EAAE,SAAS,CAAC;IACtB,IAAI,EAAE,MAAM,EAAE,CAAC;CAChB;AAED,MAAM,WAAW,MAAM;IACrB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;CACX;AAED,MAAM,WAAW,QAAQ;IACvB,IAAI,EAAE,KAAK,EAAE,CAAC;IACd,gBAAgB,EAAE,KAAK,EAAE,CAAC;IAC1B,mBAAmB,EAAE,KAAK,EAAE,CAAC;IAC7B,iBAAiB,EAAE,KAAK,EAAE,CAAC;IAC3B,oBAAoB,EAAE,KAAK,EAAE,CAAC;IAC9B,QAAQ,EAAE,KAAK,EAAE,CAAC;IAClB,SAAS,EAAE,KAAK,EAAE,CAAC;IACnB,aAAa,EAAE,KAAK,EAAE,CAAC;IACvB,gBAAgB,EAAE,KAAK,EAAE,CAAC;IAC1B,aAAa,EAAE,KAAK,EAAE,CAAC;IACvB,gBAAgB,EAAE,KAAK,EAAE,CAAC;IAC1B,WAAW,EAAE,KAAK,EAAE,CAAC;IACrB,WAAW,EAAE,KAAK,EAAE,CAAC;IACrB,UAAU,EAAE,KAAK,EAAE,CAAC;IACpB,WAAW,EAAE,KAAK,EAAE,CAAC;CACtB;AAED,MAAM,WAAW,SAAS;IACxB,UAAU,EAAE,KAAK,CAAC;IAClB,QAAQ,EAAE,KAAK,CAAC;IAChB,QAAQ,EAAE,KAAK,CAAC;IAChB,YAAY,EAAE,KAAK,CAAC;IACpB,UAAU,EAAE,KAAK,CAAC;IAClB,WAAW,EAAE,KAAK,CAAC;IACnB,SAAS,EAAE,KAAK,CAAC;IACjB,WAAW,EAAE,KAAK,CAAC;IACnB,SAAS,EAAE,KAAK,CAAC;IACjB,SAAS,EAAE,KAAK,CAAC;CAClB;AAED,MAAM,WAAW,oBAAoB;IACnC;;;;OAIG;IACH,eAAe,CAAC,EAAE,MAAM,GAAG,UAAU,CAAC;IAEtC;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IAE9B;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IAE7B;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IAEpC;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;;;OAMG;IACH,eAAe,CAAC,EAAE,OAAO,CAAC;IAE1B;;;;;;OAMG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IAEnB;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB;;;;OAIG;IACH,YAAY,CAAC,EAAE,cAAc,CAAC;IAE9B;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;CACxB;AA4BD;;;;;;GAMG;AACH,wBAAgB,eAAe,CAC7B,OAAO,CAAC,EAAE,oBAAoB,GAC7B,kBAAkB,CAEpB"}
1
+ {"version":3,"file":"FaceDetector.d.ts","sourceRoot":"","sources":["../../../../src/FaceDetector.ts"],"names":[],"mappings":"AACA,OAAO,EAEL,KAAK,cAAc,EACnB,KAAK,KAAK,EACX,MAAM,4BAA4B,CAAC;AAEpC,KAAK,kBAAkB,GAAG;IACxB;;;;OAIG;IACH,WAAW,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,EAAE,CAAC;CACvC,CAAC;AAEF,KAAK,KAAK,GAAG;IACX,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;CACX,CAAC;AAEF,MAAM,WAAW,IAAI;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;IACf,sBAAsB,EAAE,MAAM,CAAC;IAC/B,uBAAuB,EAAE,MAAM,CAAC;IAChC,kBAAkB,EAAE,MAAM,CAAC;IAC3B,QAAQ,CAAC,EAAE,QAAQ,CAAC;IACpB,SAAS,CAAC,EAAE,SAAS,CAAC;IACtB,IAAI,EAAE,MAAM,EAAE,CAAC;IACf,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,MAAM;IACrB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;CACX;AAED,MAAM,WAAW,QAAQ;IACvB,IAAI,EAAE,KAAK,EAAE,CAAC;IACd,gBAAgB,EAAE,KAAK,EAAE,CAAC;IAC1B,mBAAmB,EAAE,KAAK,EAAE,CAAC;IAC7B,iBAAiB,EAAE,KAAK,EAAE,CAAC;IAC3B,oBAAoB,EAAE,KAAK,EAAE,CAAC;IAC9B,QAAQ,EAAE,KAAK,EAAE,CAAC;IAClB,SAAS,EAAE,KAAK,EAAE,CAAC;IACnB,aAAa,EAAE,KAAK,EAAE,CAAC;IACvB,gBAAgB,EAAE,KAAK,EAAE,CAAC;IAC1B,aAAa,EAAE,KAAK,EAAE,CAAC;IACvB,gBAAgB,EAAE,KAAK,EAAE,CAAC;IAC1B,WAAW,EAAE,KAAK,EAAE,CAAC;IACrB,WAAW,EAAE,KAAK,EAAE,CAAC;IACrB,UAAU,EAAE,KAAK,EAAE,CAAC;IACpB,WAAW,EAAE,KAAK,EAAE,CAAC;CACtB;AAED,MAAM,WAAW,SAAS;IACxB,UAAU,EAAE,KAAK,CAAC;IAClB,QAAQ,EAAE,KAAK,CAAC;IAChB,QAAQ,EAAE,KAAK,CAAC;IAChB,YAAY,EAAE,KAAK,CAAC;IACpB,UAAU,EAAE,KAAK,CAAC;IAClB,WAAW,EAAE,KAAK,CAAC;IACnB,SAAS,EAAE,KAAK,CAAC;IACjB,WAAW,EAAE,KAAK,CAAC;IACnB,SAAS,EAAE,KAAK,CAAC;IACjB,SAAS,EAAE,KAAK,CAAC;CAClB;AAED,MAAM,WAAW,oBAAoB;IACnC;;;;OAIG;IACH,eAAe,CAAC,EAAE,MAAM,GAAG,UAAU,CAAC;IAEtC;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IAE9B;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IAE7B;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IAEpC;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;;;OAMG;IACH,eAAe,CAAC,EAAE,OAAO,CAAC;IAE1B;;;;;;OAMG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IAEnB;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB;;;;OAIG;IACH,YAAY,CAAC,EAAE,cAAc,CAAC;IAE9B;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;CACxB;AA4BD;;;;;;GAMG;AACH,wBAAgB,eAAe,CAC7B,OAAO,CAAC,EAAE,oBAAoB,GAC7B,kBAAkB,CAEpB"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "vision-camera-face-detection",
3
- "version": "2.2.3",
3
+ "version": "2.2.5",
4
4
  "description": "Plugin Face Detection for Vision Camera 4",
5
5
  "source": "./src/index.tsx",
6
6
  "main": "./lib/commonjs/index.js",
@@ -77,17 +77,17 @@
77
77
  "eslint-plugin-prettier": "^5.0.1",
78
78
  "jest": "^29.7.0",
79
79
  "prettier": "^3.0.3",
80
- "react": "19.0.0",
81
- "react-native": "0.78.3",
80
+ "react": "19.2.0",
81
+ "react-native": "0.83.1",
82
82
  "react-native-builder-bob": "^0.36.0",
83
- "react-native-vision-camera": "^4.7.1",
83
+ "react-native-vision-camera": "^4.7.3",
84
84
  "react-native-worklets-core": "^1.6.2",
85
85
  "release-it": "^17.10.0",
86
86
  "turbo": "^1.10.7",
87
87
  "typescript": "^5.2.2"
88
88
  },
89
89
  "resolutions": {
90
- "@types/react": "^18.2.44"
90
+ "@types/react": "^19.2.0"
91
91
  },
92
92
  "peerDependencies": {
93
93
  "react": "*",
@@ -30,6 +30,7 @@ export interface Face {
30
30
  contours?: Contours;
31
31
  landmarks?: Landmarks;
32
32
  data: number[];
33
+ error?: string;
33
34
  }
34
35
 
35
36
  export interface Bounds {