vision-camera-face-detection 2.2.0 → 2.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "vision-camera-face-detection",
3
- "version": "2.2.0",
3
+ "version": "2.2.1",
4
4
  "description": "Plugin Face Detection for Vision Camera 4",
5
5
  "source": "./src/index.tsx",
6
6
  "main": "./lib/commonjs/index.js",
@@ -78,10 +78,10 @@
78
78
  "jest": "^29.7.0",
79
79
  "prettier": "^3.0.3",
80
80
  "react": "19.0.0",
81
- "react-native": "0.78.0",
81
+ "react-native": "0.78.3",
82
82
  "react-native-builder-bob": "^0.36.0",
83
- "react-native-vision-camera": "^4.6.4",
84
- "react-native-worklets-core": "^1.5.0",
83
+ "react-native-vision-camera": "^4.7.0",
84
+ "react-native-worklets-core": "^1.6.0",
85
85
  "release-it": "^17.10.0",
86
86
  "turbo": "^1.10.7",
87
87
  "typescript": "^5.2.2"
package/src/Camera.tsx CHANGED
@@ -1,8 +1,11 @@
1
- import React from 'react';
1
+ /* eslint-disable react-hooks/exhaustive-deps */
2
+
3
+ import React, { useMemo } from 'react';
2
4
  import { Platform } from 'react-native';
3
5
  import {
4
6
  Camera as VisionCamera,
5
7
  useFrameProcessor,
8
+ // useSkiaFrameProcessor,
6
9
  } from 'react-native-vision-camera';
7
10
  import {
8
11
  Worklets,
@@ -15,6 +18,7 @@ import { useFaceDetector } from './FaceDetector';
15
18
  import type { DependencyList, ForwardedRef } from 'react';
16
19
  import type {
17
20
  CameraProps,
21
+ DrawableFrame,
18
22
  Frame,
19
23
  FrameInternal,
20
24
  } from 'react-native-vision-camera';
@@ -32,6 +36,7 @@ type CallbackType = (faces: Face[], frame: Frame) => void | Promise<void>;
32
36
  type ComponentType = {
33
37
  faceDetectionOptions?: FaceDetectionOptions;
34
38
  faceDetectionCallback: CallbackType;
39
+ // skiaActions?: (faces: Face[], frame: DrawableFrame) => void | Promise<void>;
35
40
  } & CameraProps;
36
41
 
37
42
  /**
@@ -46,10 +51,9 @@ function useWorklet(
46
51
  func: (frame: FrameInternal) => void,
47
52
  dependencyList: DependencyList
48
53
  ): UseWorkletType {
49
- const worklet = React.useMemo(() => {
54
+ const worklet = useMemo(() => {
50
55
  const context = Worklets.defaultContext;
51
56
  return context.createRunAsync(func);
52
- // eslint-disable-next-line react-hooks/exhaustive-deps
53
57
  }, dependencyList);
54
58
 
55
59
  return worklet;
@@ -67,8 +71,7 @@ function useRunInJS(
67
71
  func: CallbackType,
68
72
  dependencyList: DependencyList
69
73
  ): UseRunInJSType {
70
- // eslint-disable-next-line react-hooks/exhaustive-deps
71
- return React.useMemo(() => Worklets.createRunOnJS(func), dependencyList);
74
+ return useMemo(() => Worklets.createRunOnJS(func), dependencyList);
72
75
  }
73
76
 
74
77
  /**
@@ -79,7 +82,12 @@ function useRunInJS(
79
82
  */
80
83
  export const Camera = React.forwardRef(
81
84
  (
82
- { faceDetectionOptions, faceDetectionCallback, ...props }: ComponentType,
85
+ {
86
+ faceDetectionOptions,
87
+ faceDetectionCallback,
88
+ // skiaActions,
89
+ ...props
90
+ }: ComponentType,
83
91
  ref: ForwardedRef<VisionCamera>
84
92
  ) => {
85
93
  const { detectFaces } = useFaceDetector(faceDetectionOptions);
@@ -87,6 +95,7 @@ export const Camera = React.forwardRef(
87
95
  * Is there an async task already running?
88
96
  */
89
97
  const isAsyncContextBusy = useSharedValue(false);
98
+ const faces = useSharedValue<string>('[]');
90
99
 
91
100
  /**
92
101
  * Throws logs/errors back on js thread
@@ -102,7 +111,7 @@ export const Camera = React.forwardRef(
102
111
  /**
103
112
  * Runs on detection callback on js thread
104
113
  */
105
- const runInJs = useRunInJS(faceDetectionCallback, [faceDetectionCallback]);
114
+ const runOnJs = useRunInJS(faceDetectionCallback, [faceDetectionCallback]);
106
115
 
107
116
  /**
108
117
  * Async context that will handle face detection
@@ -111,11 +120,11 @@ export const Camera = React.forwardRef(
111
120
  (frame: FrameInternal) => {
112
121
  'worklet';
113
122
  try {
114
- const faces = detectFaces(frame);
123
+ faces.value = JSON.stringify(detectFaces(frame));
115
124
  // increment frame count so we can use frame on
116
125
  // js side without frame processor getting stuck
117
126
  frame.incrementRefCount();
118
- runInJs(faces, frame).finally(() => {
127
+ runOnJs(JSON.parse(faces.value), frame).finally(() => {
119
128
  'worklet';
120
129
  // finally decrement frame count so it can be dropped
121
130
  frame.decrementRefCount();
@@ -127,7 +136,7 @@ export const Camera = React.forwardRef(
127
136
  isAsyncContextBusy.value = false;
128
137
  }
129
138
  },
130
- [detectFaces, runInJs]
139
+ [detectFaces, runOnJs]
131
140
  );
132
141
 
133
142
  /**
@@ -135,7 +144,7 @@ export const Camera = React.forwardRef(
135
144
  *
136
145
  * @param {Frame} frame Current frame
137
146
  */
138
- function runAsync(frame: Frame) {
147
+ function runAsync(frame: Frame | DrawableFrame) {
139
148
  'worklet';
140
149
  if (isAsyncContextBusy.value) return;
141
150
  // set async context as busy
@@ -148,9 +157,23 @@ export const Camera = React.forwardRef(
148
157
  }
149
158
 
150
159
  /**
151
- * Camera frame processor
160
+ * Skia frame processor
161
+ */
162
+ // NOTE - temporary without skia
163
+ // const skiaFrameProcessor = useSkiaFrameProcessor(
164
+ // (frame) => {
165
+ // 'worklet';
166
+ // frame.render();
167
+ // skiaActions!(JSON.parse(faces.value), frame);
168
+ // runAsync(frame);
169
+ // },
170
+ // [runOnAsyncContext, skiaActions]
171
+ // );
172
+
173
+ /**
174
+ * Default frame processor
152
175
  */
153
- const processorAndroid = useFrameProcessor(
176
+ const cameraFrameProcessor = useFrameProcessor(
154
177
  (frame) => {
155
178
  'worklet';
156
179
  runAsync(frame);
@@ -158,34 +181,43 @@ export const Camera = React.forwardRef(
158
181
  [runOnAsyncContext]
159
182
  );
160
183
 
184
+ /**
185
+ * Camera frame processor
186
+ */
187
+ const frameProcessor = (() => {
188
+ // const { autoMode } = faceDetectionOptions ?? {};
189
+
190
+ // if (!autoMode && !!skiaActions) return skiaFrameProcessor;
191
+
192
+ return cameraFrameProcessor;
193
+ })();
194
+
161
195
  //
162
196
  // use bellow when vision-camera's
163
197
  // context creation issue is solved
164
198
  //
165
- /**
166
- * Runs on detection callback on js thread
167
- */
168
- // const runOnJs = useRunOnJS(faceDetectionCallback, [faceDetectionCallback]);
169
-
170
- // const processorIOS = useFrameProcessor(
171
- // (frame) => {
172
- // 'worklet';
173
- // runOnJs(detectFaces(frame), frame);
174
- // // runAsync(frame, () => {
175
- // // 'worklet';
176
- // // runOnJs(detectFaces(frame), frame);
177
- // // });
178
- // },
179
- // [runOnJs]
180
- // );
199
+ // /**
200
+ // * Runs on detection callback on js thread
201
+ // */
202
+ // const runOnJs = useRunOnJS( faceDetectionCallback, [
203
+ // faceDetectionCallback
204
+ // ] )
205
+
206
+ // const cameraFrameProcessor = useFrameProcessor( ( frame ) => {
207
+ // 'worklet'
208
+ // runAsync( frame, () => {
209
+ // 'worklet'
210
+ // runOnJs(
211
+ // detectFaces( frame ),
212
+ // frame
213
+ // )
214
+ // } )
215
+ // }, [ runOnJs ] )
181
216
 
182
217
  return (
183
218
  <VisionCamera
184
219
  ref={ref}
185
- // frameProcessor={
186
- // Platform.OS === 'android' ? processorAndroid : processorIOS
187
- // }
188
- frameProcessor={processorAndroid}
220
+ frameProcessor={frameProcessor}
189
221
  pixelFormat={Platform.OS === 'android' ? 'yuv' : 'rgb'}
190
222
  {...props}
191
223
  />
@@ -1,5 +1,9 @@
1
1
  import { useMemo } from 'react';
2
- import { VisionCameraProxy, type Frame } from 'react-native-vision-camera';
2
+ import {
3
+ VisionCameraProxy,
4
+ type CameraPosition,
5
+ type Frame,
6
+ } from 'react-native-vision-camera';
3
7
 
4
8
  type FaceDetectorPlugin = {
5
9
  /**
@@ -23,9 +27,9 @@ export interface Face {
23
27
  leftEyeOpenProbability: number;
24
28
  rightEyeOpenProbability: number;
25
29
  smilingProbability: number;
26
- contours: Contours;
27
- landmarks: Landmarks;
28
- data: any;
30
+ contours?: Contours;
31
+ landmarks?: Landmarks;
32
+ data: number[];
29
33
  }
30
34
 
31
35
  export interface Bounds {
@@ -112,13 +116,34 @@ export interface FaceDetectionOptions {
112
116
  trackingEnabled?: boolean;
113
117
 
114
118
  /**
115
- * Should auto scale face bounds, contour and landmarks on native side?
119
+ * Should handle auto scale (face bounds, contour and landmarks) and rotation on native side?
116
120
  * This option should be disabled if you want to draw on frame using `Skia Frame Processor`.
117
121
  * See [this](https://github.com/luicfrr/react-native-vision-camera-face-detector/issues/30#issuecomment-2058805546) and [this](https://github.com/luicfrr/react-native-vision-camera-face-detector/issues/35) for more details.
118
122
  *
119
123
  * @default false
120
124
  */
121
- autoScale?: boolean;
125
+ autoMode?: boolean;
126
+
127
+ /**
128
+ * Required if you want to use `autoMode`. You must handle your own logic to get screen sizes, with or without statusbar size, etc...
129
+ *
130
+ * @default 1.0
131
+ */
132
+ windowWidth?: number;
133
+
134
+ /**
135
+ * Required if you want to use `autoMode`. You must handle your own logic to get screen sizes, with or without statusbar size, etc...
136
+ *
137
+ * @default 1.0
138
+ */
139
+ windowHeight?: number;
140
+
141
+ /**
142
+ * Current active camera
143
+ *
144
+ * @default front
145
+ */
146
+ cameraFacing?: CameraPosition;
122
147
 
123
148
  /**
124
149
  * for enable/disable tensorflow lite logic, just face detection