@opexa/portal-components 0.0.970 → 0.0.971
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/components/shared/SelfieImageField/SelfieImageField.client.js +1 -1
- package/dist/components/shared/SelfieImageField/SelfieImageField.js +2 -7
- package/dist/components/shared/SelfieImageField/useSelfieImageField.d.ts +1 -0
- package/dist/components/shared/SelfieImageField/useSelfieImageField.js +44 -42
- package/package.json +1 -1
|
@@ -95,7 +95,7 @@ function Camera() {
|
|
|
95
95
|
context.mutation.mutate({ file });
|
|
96
96
|
context.disclosure.setOpen(false);
|
|
97
97
|
}, className: "w-full lg:w-[10rem]", children: [_jsx(CheckCircleIcon, { className: "size-5" }), "Use Photo"] }), _jsxs(Button, { variant: "outline", colorScheme: "gray", onClick: () => {
|
|
98
|
-
context.camera.
|
|
98
|
+
context.camera.reopen();
|
|
99
99
|
}, className: "w-full lg:w-[10rem]", children: [_jsx(RefreshCcw01Icon, { className: "size-5" }), " Retake"] })] }))] }) })] }) }));
|
|
100
100
|
}
|
|
101
101
|
/*
|
|
@@ -1,11 +1,6 @@
|
|
|
1
1
|
'use client';
|
|
2
2
|
import { jsx as _jsx } from "react/jsx-runtime";
|
|
3
|
-
import
|
|
4
|
-
import { Spinner02Icon } from '../../../icons/Spinner02Icon.js';
|
|
5
|
-
const Component = dynamic(() => import('./SelfieImageField.client.js').then((mod) => mod.SelfieImageField__client), {
|
|
6
|
-
ssr: false,
|
|
7
|
-
loading: () => (_jsx("div", { className: "flex aspect-[352/180] w-full shrink-0 items-center justify-center rounded-xl border border-border-primary bg-bg-primary", children: _jsx(Spinner02Icon, { className: "size-8 text-text-quinary" }) })),
|
|
8
|
-
});
|
|
3
|
+
import { SelfieImageField__client } from './SelfieImageField.client.js';
|
|
9
4
|
export function SelfieImageField(props) {
|
|
10
|
-
return _jsx(
|
|
5
|
+
return _jsx(SelfieImageField__client, { ...props });
|
|
11
6
|
}
|
|
@@ -114,6 +114,19 @@ export function useSelfieImageField(props) {
|
|
|
114
114
|
},
|
|
115
115
|
});
|
|
116
116
|
const [faceFound, setFaceFound] = useState(false);
|
|
117
|
+
const [detectorReady, setDetectorReady] = useState(false);
|
|
118
|
+
// Preload detector when disclosure opens
|
|
119
|
+
useEffect(() => {
|
|
120
|
+
if (disclosure.open) {
|
|
121
|
+
setDetectorReady(false);
|
|
122
|
+
getFaceDetector()
|
|
123
|
+
.then(() => setDetectorReady(true))
|
|
124
|
+
.catch((e) => {
|
|
125
|
+
console.error('Failed to load face detector:', e);
|
|
126
|
+
setDetectorReady(false);
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
}, [disclosure.open]);
|
|
117
130
|
// Reset faceFound when camera is reset or reopened
|
|
118
131
|
// biome-ignore lint/correctness/useExhaustiveDependencies: Reset faceFound on state change
|
|
119
132
|
useEffect(() => {
|
|
@@ -123,9 +136,14 @@ export function useSelfieImageField(props) {
|
|
|
123
136
|
if (!camera.videoRef.current || !guideRef.current) {
|
|
124
137
|
return setFaceFound(false);
|
|
125
138
|
}
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
139
|
+
const found = await validateFaceFromVideo(camera.videoRef.current, guideRef.current);
|
|
140
|
+
setFaceFound(found);
|
|
141
|
+
}, disclosure.open &&
|
|
142
|
+
detectorReady &&
|
|
143
|
+
!camera.data &&
|
|
144
|
+
!camera.error &&
|
|
145
|
+
!camera.loading
|
|
146
|
+
? 100
|
|
129
147
|
: null);
|
|
130
148
|
return {
|
|
131
149
|
field,
|
|
@@ -139,13 +157,13 @@ export function useSelfieImageField(props) {
|
|
|
139
157
|
guideRef,
|
|
140
158
|
maskRef,
|
|
141
159
|
faceFound,
|
|
160
|
+
detectorReady,
|
|
142
161
|
};
|
|
143
162
|
}
|
|
144
|
-
let
|
|
145
|
-
let __video_face_detector__ = null;
|
|
163
|
+
let __face_detector__ = null;
|
|
146
164
|
let __vision__ = null;
|
|
147
|
-
let
|
|
148
|
-
let
|
|
165
|
+
let __current_running_mode__ = 'IMAGE';
|
|
166
|
+
let __face_detector_promise__ = null;
|
|
149
167
|
let __vision_promise__ = null;
|
|
150
168
|
async function getVision() {
|
|
151
169
|
if (__vision__)
|
|
@@ -165,12 +183,12 @@ async function getVision() {
|
|
|
165
183
|
})();
|
|
166
184
|
return __vision_promise__;
|
|
167
185
|
}
|
|
168
|
-
async function
|
|
169
|
-
if (
|
|
170
|
-
return
|
|
171
|
-
if (
|
|
172
|
-
return
|
|
173
|
-
|
|
186
|
+
async function getFaceDetector() {
|
|
187
|
+
if (__face_detector__)
|
|
188
|
+
return __face_detector__;
|
|
189
|
+
if (__face_detector_promise__)
|
|
190
|
+
return __face_detector_promise__;
|
|
191
|
+
__face_detector_promise__ = (async () => {
|
|
174
192
|
try {
|
|
175
193
|
const vision = await getVision();
|
|
176
194
|
const detector = await FaceDetector.createFromOptions(vision, {
|
|
@@ -181,44 +199,28 @@ async function getImageFaceDetector() {
|
|
|
181
199
|
modelAssetPath: 'https://storage.googleapis.com/mediapipe-models/face_detector/blaze_face_short_range/float16/1/blaze_face_short_range.tflite',
|
|
182
200
|
},
|
|
183
201
|
});
|
|
184
|
-
|
|
202
|
+
__face_detector__ = detector;
|
|
203
|
+
__current_running_mode__ = 'IMAGE';
|
|
185
204
|
return detector;
|
|
186
205
|
}
|
|
187
206
|
catch (e) {
|
|
188
|
-
|
|
207
|
+
__face_detector_promise__ = null;
|
|
189
208
|
throw e;
|
|
190
209
|
}
|
|
191
210
|
})();
|
|
192
|
-
return
|
|
211
|
+
return __face_detector_promise__;
|
|
193
212
|
}
|
|
194
|
-
async function
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
const vision = await FilesetResolver.forVisionTasks('https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.0/wasm');
|
|
202
|
-
const detector = await FaceDetector.createFromOptions(vision, {
|
|
203
|
-
baseOptions: {
|
|
204
|
-
modelAssetPath: 'https://storage.googleapis.com/mediapipe-models/face_detector/blaze_face_short_range/float16/1/blaze_face_short_range.tflite',
|
|
205
|
-
delegate: 'GPU',
|
|
206
|
-
},
|
|
207
|
-
runningMode: 'VIDEO',
|
|
208
|
-
});
|
|
209
|
-
__video_face_detector__ = detector;
|
|
210
|
-
return detector;
|
|
211
|
-
}
|
|
212
|
-
catch (e) {
|
|
213
|
-
__video_face_detector_promise__ = null;
|
|
214
|
-
throw e;
|
|
215
|
-
}
|
|
216
|
-
})();
|
|
217
|
-
return __video_face_detector_promise__;
|
|
213
|
+
async function ensureRunningMode(mode) {
|
|
214
|
+
const detector = await getFaceDetector();
|
|
215
|
+
if (__current_running_mode__ !== mode) {
|
|
216
|
+
await detector.setOptions({ runningMode: mode });
|
|
217
|
+
__current_running_mode__ = mode;
|
|
218
|
+
}
|
|
219
|
+
return detector;
|
|
218
220
|
}
|
|
219
221
|
async function validateFaceFromImage(image) {
|
|
220
222
|
try {
|
|
221
|
-
const detector = await
|
|
223
|
+
const detector = await ensureRunningMode('IMAGE');
|
|
222
224
|
const result = detector.detect(image);
|
|
223
225
|
return result.detections.length > 0;
|
|
224
226
|
}
|
|
@@ -231,7 +233,7 @@ async function validateFaceFromVideo(video, guide) {
|
|
|
231
233
|
if (video.readyState < 2)
|
|
232
234
|
return false;
|
|
233
235
|
try {
|
|
234
|
-
const detector = await
|
|
236
|
+
const detector = await ensureRunningMode('VIDEO');
|
|
235
237
|
const result = detector.detectForVideo(video, performance.now());
|
|
236
238
|
const detection = result.detections.at(0);
|
|
237
239
|
if (!detection)
|