@opexa/portal-components 0.0.969 → 0.0.970
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -142,7 +142,10 @@ export function useCamera(options = {}) {
|
|
|
142
142
|
canvas.toBlob((blob) => {
|
|
143
143
|
if (!blob) {
|
|
144
144
|
resolve(null);
|
|
145
|
-
return
|
|
145
|
+
return setError({
|
|
146
|
+
name: 'CameraError',
|
|
147
|
+
message: 'Failed to snap photo',
|
|
148
|
+
});
|
|
146
149
|
}
|
|
147
150
|
const url = URL.createObjectURL(blob);
|
|
148
151
|
const file = new File([blob], `${crypto.randomUUID()}.jpeg`, {
|
|
@@ -168,6 +171,7 @@ export function useCamera(options = {}) {
|
|
|
168
171
|
const reopen = useCallback(async () => {
|
|
169
172
|
setSnapping(false);
|
|
170
173
|
setLoading(true);
|
|
174
|
+
setData(null);
|
|
171
175
|
await close();
|
|
172
176
|
await sleep();
|
|
173
177
|
await open();
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { useFieldContext } from '@ark-ui/react';
|
|
2
2
|
import { FaceDetector, FilesetResolver } from '@mediapipe/tasks-vision';
|
|
3
|
-
import { useRef, useState } from 'react';
|
|
3
|
+
import { useEffect, useRef, useState } from 'react';
|
|
4
4
|
import invariant from 'tiny-invariant';
|
|
5
5
|
import { useInterval } from 'usehooks-ts';
|
|
6
6
|
import { useCamera } from '../../../client/hooks/useCamera.js';
|
|
@@ -111,13 +111,14 @@ export function useSelfieImageField(props) {
|
|
|
111
111
|
if (open) {
|
|
112
112
|
camera.open();
|
|
113
113
|
}
|
|
114
|
-
else {
|
|
115
|
-
setFaceFound(false);
|
|
116
|
-
camera.close();
|
|
117
|
-
}
|
|
118
114
|
},
|
|
119
115
|
});
|
|
120
116
|
const [faceFound, setFaceFound] = useState(false);
|
|
117
|
+
// Reset faceFound when camera is reset or reopened
|
|
118
|
+
// biome-ignore lint/correctness/useExhaustiveDependencies: Reset faceFound on state change
|
|
119
|
+
useEffect(() => {
|
|
120
|
+
setFaceFound(false);
|
|
121
|
+
}, [camera.loading, camera.data]);
|
|
121
122
|
useInterval(async () => {
|
|
122
123
|
if (!camera.videoRef.current || !guideRef.current) {
|
|
123
124
|
return setFaceFound(false);
|
|
@@ -197,14 +198,13 @@ async function getVideoFaceDetector() {
|
|
|
197
198
|
return __video_face_detector_promise__;
|
|
198
199
|
__video_face_detector_promise__ = (async () => {
|
|
199
200
|
try {
|
|
200
|
-
const vision = await
|
|
201
|
+
const vision = await FilesetResolver.forVisionTasks('https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.0/wasm');
|
|
201
202
|
const detector = await FaceDetector.createFromOptions(vision, {
|
|
202
|
-
runningMode: 'VIDEO',
|
|
203
|
-
minDetectionConfidence: 0.5,
|
|
204
203
|
baseOptions: {
|
|
205
|
-
delegate: 'GPU',
|
|
206
204
|
modelAssetPath: 'https://storage.googleapis.com/mediapipe-models/face_detector/blaze_face_short_range/float16/1/blaze_face_short_range.tflite',
|
|
205
|
+
delegate: 'GPU',
|
|
207
206
|
},
|
|
207
|
+
runningMode: 'VIDEO',
|
|
208
208
|
});
|
|
209
209
|
__video_face_detector__ = detector;
|
|
210
210
|
return detector;
|