tktechnico-react-face-detection 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +156 -0
- package/dist/components/CaptureButton.d.ts +15 -0
- package/dist/components/CaptureButton.d.ts.map +1 -0
- package/dist/components/DetectionResultDisplay.d.ts +11 -0
- package/dist/components/DetectionResultDisplay.d.ts.map +1 -0
- package/dist/components/FaceDetector.d.ts +24 -0
- package/dist/components/FaceDetector.d.ts.map +1 -0
- package/dist/components/FaceOverlay.d.ts +11 -0
- package/dist/components/FaceOverlay.d.ts.map +1 -0
- package/dist/components/ImagePreviewWithOverlay.d.ts +13 -0
- package/dist/components/ImagePreviewWithOverlay.d.ts.map +1 -0
- package/dist/components/LoadingState.d.ts +11 -0
- package/dist/components/LoadingState.d.ts.map +1 -0
- package/dist/index.d.ts +34 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +794 -0
- package/dist/index.mjs +751 -0
- package/dist/types.d.ts +40 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/useFaceDetectionCore.d.ts +14 -0
- package/dist/useFaceDetectionCore.d.ts.map +1 -0
- package/package.json +66 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,751 @@
|
|
|
1
|
+
// src/components/FaceDetector.tsx
|
|
2
|
+
import { useState as useState4, useCallback as useCallback4 } from "react";
|
|
3
|
+
|
|
4
|
+
// src/useFaceDetectionCore.ts
|
|
5
|
+
import { useState, useEffect, useCallback, useRef } from "react";
|
|
6
|
+
import * as tf from "@tensorflow/tfjs";
|
|
7
|
+
import * as blazeface from "@tensorflow-models/blazeface";
|
|
8
|
+
var DEFAULT_CONFIDENCE_THRESHOLD = 0.7;
|
|
9
|
+
var DEFAULT_MAX_IMAGE_SIZE = 640;
|
|
10
|
+
function useFaceDetectionCore(options = {}) {
|
|
11
|
+
const {
|
|
12
|
+
confidenceThreshold = DEFAULT_CONFIDENCE_THRESHOLD,
|
|
13
|
+
maxImageSize = DEFAULT_MAX_IMAGE_SIZE
|
|
14
|
+
} = options;
|
|
15
|
+
const [isModelLoading, setIsModelLoading] = useState(true);
|
|
16
|
+
const [modelError, setModelError] = useState(null);
|
|
17
|
+
const modelRef = useRef(null);
|
|
18
|
+
const loadModel = useCallback(async () => {
|
|
19
|
+
try {
|
|
20
|
+
setIsModelLoading(true);
|
|
21
|
+
setModelError(null);
|
|
22
|
+
await tf.ready();
|
|
23
|
+
const model = await blazeface.load();
|
|
24
|
+
modelRef.current = model;
|
|
25
|
+
console.log("BlazeFace model loaded successfully");
|
|
26
|
+
setIsModelLoading(false);
|
|
27
|
+
} catch (error) {
|
|
28
|
+
console.error("Failed to load face detection model:", error);
|
|
29
|
+
setModelError("Failed to load face detection model. Please check your connection and try again.");
|
|
30
|
+
setIsModelLoading(false);
|
|
31
|
+
}
|
|
32
|
+
}, []);
|
|
33
|
+
useEffect(() => {
|
|
34
|
+
loadModel();
|
|
35
|
+
}, [loadModel]);
|
|
36
|
+
const detectFace = useCallback(async (imageElement) => {
|
|
37
|
+
if (!modelRef.current) {
|
|
38
|
+
return {
|
|
39
|
+
success: false,
|
|
40
|
+
face: null,
|
|
41
|
+
confidence: 0,
|
|
42
|
+
isLowConfidence: false,
|
|
43
|
+
errorMessage: "Face detection model not loaded"
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
try {
|
|
47
|
+
const canvas = document.createElement("canvas");
|
|
48
|
+
const ctx = canvas.getContext("2d");
|
|
49
|
+
if (!ctx) {
|
|
50
|
+
throw new Error("Could not get canvas context");
|
|
51
|
+
}
|
|
52
|
+
let width = imageElement.naturalWidth;
|
|
53
|
+
let height = imageElement.naturalHeight;
|
|
54
|
+
if (width > height && width > maxImageSize) {
|
|
55
|
+
height = height / width * maxImageSize;
|
|
56
|
+
width = maxImageSize;
|
|
57
|
+
} else if (height > maxImageSize) {
|
|
58
|
+
width = width / height * maxImageSize;
|
|
59
|
+
height = maxImageSize;
|
|
60
|
+
}
|
|
61
|
+
canvas.width = width;
|
|
62
|
+
canvas.height = height;
|
|
63
|
+
ctx.drawImage(imageElement, 0, 0, width, height);
|
|
64
|
+
const predictions = await modelRef.current.estimateFaces(canvas, false);
|
|
65
|
+
if (predictions.length === 0) {
|
|
66
|
+
return {
|
|
67
|
+
success: false,
|
|
68
|
+
face: null,
|
|
69
|
+
confidence: 0,
|
|
70
|
+
isLowConfidence: false,
|
|
71
|
+
errorMessage: "No face detected. Please take a clear photo of your face."
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
const prediction = predictions[0];
|
|
75
|
+
const probability = Array.isArray(prediction.probability) ? prediction.probability[0] : prediction.probability;
|
|
76
|
+
const scaleX = imageElement.naturalWidth / width;
|
|
77
|
+
const scaleY = imageElement.naturalHeight / height;
|
|
78
|
+
const topLeft = prediction.topLeft;
|
|
79
|
+
const bottomRight = prediction.bottomRight;
|
|
80
|
+
const face = {
|
|
81
|
+
topLeft: [topLeft[0] * scaleX, topLeft[1] * scaleY],
|
|
82
|
+
bottomRight: [bottomRight[0] * scaleX, bottomRight[1] * scaleY],
|
|
83
|
+
probability,
|
|
84
|
+
landmarks: prediction.landmarks
|
|
85
|
+
};
|
|
86
|
+
const isLowConfidence = probability < confidenceThreshold;
|
|
87
|
+
return {
|
|
88
|
+
success: true,
|
|
89
|
+
face,
|
|
90
|
+
confidence: probability,
|
|
91
|
+
isLowConfidence,
|
|
92
|
+
errorMessage: isLowConfidence ? "Face detected but image quality is low. Please retake." : null
|
|
93
|
+
};
|
|
94
|
+
} catch (error) {
|
|
95
|
+
console.error("Face detection error:", error);
|
|
96
|
+
return {
|
|
97
|
+
success: false,
|
|
98
|
+
face: null,
|
|
99
|
+
confidence: 0,
|
|
100
|
+
isLowConfidence: false,
|
|
101
|
+
errorMessage: "An error occurred during face detection. Please try again."
|
|
102
|
+
};
|
|
103
|
+
}
|
|
104
|
+
}, [confidenceThreshold, maxImageSize]);
|
|
105
|
+
const retryModelLoad = useCallback(() => {
|
|
106
|
+
loadModel();
|
|
107
|
+
}, [loadModel]);
|
|
108
|
+
return {
|
|
109
|
+
isModelLoading,
|
|
110
|
+
modelError,
|
|
111
|
+
detectFace,
|
|
112
|
+
retryModelLoad
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// src/components/ImagePreviewWithOverlay.tsx
|
|
117
|
+
import { useState as useState2, useRef as useRef3, useEffect as useEffect3, useCallback as useCallback2 } from "react";
|
|
118
|
+
|
|
119
|
+
// src/components/FaceOverlay.tsx
|
|
120
|
+
import { useEffect as useEffect2, useRef as useRef2 } from "react";
|
|
121
|
+
import { jsx } from "react/jsx-runtime";
|
|
122
|
+
function FaceOverlay({
|
|
123
|
+
imageElement,
|
|
124
|
+
face,
|
|
125
|
+
containerWidth,
|
|
126
|
+
containerHeight,
|
|
127
|
+
color = "#22c55e"
|
|
128
|
+
}) {
|
|
129
|
+
const canvasRef = useRef2(null);
|
|
130
|
+
useEffect2(() => {
|
|
131
|
+
const canvas = canvasRef.current;
|
|
132
|
+
if (!canvas || !imageElement || !face) {
|
|
133
|
+
if (canvas) {
|
|
134
|
+
const ctx2 = canvas.getContext("2d");
|
|
135
|
+
if (ctx2) {
|
|
136
|
+
ctx2.clearRect(0, 0, canvas.width, canvas.height);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
return;
|
|
140
|
+
}
|
|
141
|
+
const ctx = canvas.getContext("2d");
|
|
142
|
+
if (!ctx) return;
|
|
143
|
+
const imageAspect = imageElement.naturalWidth / imageElement.naturalHeight;
|
|
144
|
+
const containerAspect = containerWidth / containerHeight;
|
|
145
|
+
let displayWidth;
|
|
146
|
+
let displayHeight;
|
|
147
|
+
let offsetX = 0;
|
|
148
|
+
let offsetY = 0;
|
|
149
|
+
if (imageAspect > containerAspect) {
|
|
150
|
+
displayWidth = containerWidth;
|
|
151
|
+
displayHeight = containerWidth / imageAspect;
|
|
152
|
+
offsetY = (containerHeight - displayHeight) / 2;
|
|
153
|
+
} else {
|
|
154
|
+
displayHeight = containerHeight;
|
|
155
|
+
displayWidth = containerHeight * imageAspect;
|
|
156
|
+
offsetX = (containerWidth - displayWidth) / 2;
|
|
157
|
+
}
|
|
158
|
+
canvas.width = containerWidth;
|
|
159
|
+
canvas.height = containerHeight;
|
|
160
|
+
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
|
161
|
+
const scaleX = displayWidth / imageElement.naturalWidth;
|
|
162
|
+
const scaleY = displayHeight / imageElement.naturalHeight;
|
|
163
|
+
const x1 = face.topLeft[0] * scaleX + offsetX;
|
|
164
|
+
const y1 = face.topLeft[1] * scaleY + offsetY;
|
|
165
|
+
const x2 = face.bottomRight[0] * scaleX + offsetX;
|
|
166
|
+
const y2 = face.bottomRight[1] * scaleY + offsetY;
|
|
167
|
+
const boxWidth = x2 - x1;
|
|
168
|
+
const boxHeight = y2 - y1;
|
|
169
|
+
ctx.strokeStyle = color;
|
|
170
|
+
ctx.lineWidth = 3;
|
|
171
|
+
ctx.strokeRect(x1, y1, boxWidth, boxHeight);
|
|
172
|
+
const confidenceText = `Confidence: ${Math.round(face.probability * 100)}%`;
|
|
173
|
+
ctx.font = "bold 14px system-ui, sans-serif";
|
|
174
|
+
const textMetrics = ctx.measureText(confidenceText);
|
|
175
|
+
const textPadding = 6;
|
|
176
|
+
const labelHeight = 24;
|
|
177
|
+
const labelWidth = textMetrics.width + textPadding * 2;
|
|
178
|
+
const labelX = x1;
|
|
179
|
+
const labelY = Math.max(y1 - labelHeight - 4, 4);
|
|
180
|
+
ctx.fillStyle = color;
|
|
181
|
+
ctx.fillRect(labelX, labelY, labelWidth, labelHeight);
|
|
182
|
+
ctx.fillStyle = "#ffffff";
|
|
183
|
+
ctx.textBaseline = "middle";
|
|
184
|
+
ctx.fillText(confidenceText, labelX + textPadding, labelY + labelHeight / 2);
|
|
185
|
+
const cornerLength = Math.min(20, boxWidth * 0.2, boxHeight * 0.2);
|
|
186
|
+
ctx.lineWidth = 4;
|
|
187
|
+
ctx.strokeStyle = color;
|
|
188
|
+
ctx.beginPath();
|
|
189
|
+
ctx.moveTo(x1, y1 + cornerLength);
|
|
190
|
+
ctx.lineTo(x1, y1);
|
|
191
|
+
ctx.lineTo(x1 + cornerLength, y1);
|
|
192
|
+
ctx.stroke();
|
|
193
|
+
ctx.beginPath();
|
|
194
|
+
ctx.moveTo(x2 - cornerLength, y1);
|
|
195
|
+
ctx.lineTo(x2, y1);
|
|
196
|
+
ctx.lineTo(x2, y1 + cornerLength);
|
|
197
|
+
ctx.stroke();
|
|
198
|
+
ctx.beginPath();
|
|
199
|
+
ctx.moveTo(x1, y2 - cornerLength);
|
|
200
|
+
ctx.lineTo(x1, y2);
|
|
201
|
+
ctx.lineTo(x1 + cornerLength, y2);
|
|
202
|
+
ctx.stroke();
|
|
203
|
+
ctx.beginPath();
|
|
204
|
+
ctx.moveTo(x2 - cornerLength, y2);
|
|
205
|
+
ctx.lineTo(x2, y2);
|
|
206
|
+
ctx.lineTo(x2, y2 - cornerLength);
|
|
207
|
+
ctx.stroke();
|
|
208
|
+
}, [imageElement, face, containerWidth, containerHeight, color]);
|
|
209
|
+
if (!face) return null;
|
|
210
|
+
return /* @__PURE__ */ jsx(
|
|
211
|
+
"canvas",
|
|
212
|
+
{
|
|
213
|
+
ref: canvasRef,
|
|
214
|
+
style: {
|
|
215
|
+
position: "absolute",
|
|
216
|
+
inset: 0,
|
|
217
|
+
pointerEvents: "none",
|
|
218
|
+
zIndex: 10,
|
|
219
|
+
width: containerWidth,
|
|
220
|
+
height: containerHeight
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
);
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
// src/components/ImagePreviewWithOverlay.tsx
|
|
227
|
+
import { jsx as jsx2, jsxs } from "react/jsx-runtime";
|
|
228
|
+
function ImagePreviewWithOverlay({
|
|
229
|
+
imageSrc,
|
|
230
|
+
face,
|
|
231
|
+
onImageLoad,
|
|
232
|
+
showOverlay = true,
|
|
233
|
+
overlayColor = "#22c55e",
|
|
234
|
+
placeholderText = "Capture a photo to detect your face",
|
|
235
|
+
className = ""
|
|
236
|
+
}) {
|
|
237
|
+
const containerRef = useRef3(null);
|
|
238
|
+
const imageRef = useRef3(null);
|
|
239
|
+
const [dimensions, setDimensions] = useState2({ width: 0, height: 0 });
|
|
240
|
+
const updateDimensions = useCallback2(() => {
|
|
241
|
+
if (containerRef.current) {
|
|
242
|
+
setDimensions({
|
|
243
|
+
width: containerRef.current.offsetWidth,
|
|
244
|
+
height: containerRef.current.offsetHeight
|
|
245
|
+
});
|
|
246
|
+
}
|
|
247
|
+
}, []);
|
|
248
|
+
useEffect3(() => {
|
|
249
|
+
updateDimensions();
|
|
250
|
+
window.addEventListener("resize", updateDimensions);
|
|
251
|
+
return () => window.removeEventListener("resize", updateDimensions);
|
|
252
|
+
}, [updateDimensions]);
|
|
253
|
+
const handleImageLoad = useCallback2(() => {
|
|
254
|
+
if (imageRef.current) {
|
|
255
|
+
onImageLoad(imageRef.current);
|
|
256
|
+
updateDimensions();
|
|
257
|
+
}
|
|
258
|
+
}, [onImageLoad, updateDimensions]);
|
|
259
|
+
const containerStyles = {
|
|
260
|
+
width: "100%",
|
|
261
|
+
aspectRatio: "3/4",
|
|
262
|
+
maxWidth: "24rem",
|
|
263
|
+
margin: "0 auto",
|
|
264
|
+
borderRadius: "0.75rem",
|
|
265
|
+
display: "flex",
|
|
266
|
+
alignItems: "center",
|
|
267
|
+
justifyContent: "center",
|
|
268
|
+
backgroundColor: "#f3f4f6",
|
|
269
|
+
border: imageSrc ? "none" : "2px dashed #d1d5db",
|
|
270
|
+
position: "relative",
|
|
271
|
+
overflow: "hidden"
|
|
272
|
+
};
|
|
273
|
+
if (!imageSrc) {
|
|
274
|
+
return /* @__PURE__ */ jsx2("div", { ref: containerRef, style: containerStyles, className, children: /* @__PURE__ */ jsxs("div", { style: { textAlign: "center", padding: "2rem" }, children: [
|
|
275
|
+
/* @__PURE__ */ jsx2("div", { style: {
|
|
276
|
+
width: "4rem",
|
|
277
|
+
height: "4rem",
|
|
278
|
+
margin: "0 auto 1rem",
|
|
279
|
+
borderRadius: "50%",
|
|
280
|
+
backgroundColor: "#e5e7eb",
|
|
281
|
+
display: "flex",
|
|
282
|
+
alignItems: "center",
|
|
283
|
+
justifyContent: "center"
|
|
284
|
+
}, children: /* @__PURE__ */ jsxs(
|
|
285
|
+
"svg",
|
|
286
|
+
{
|
|
287
|
+
style: { width: "2rem", height: "2rem", color: "#9ca3af" },
|
|
288
|
+
fill: "none",
|
|
289
|
+
stroke: "currentColor",
|
|
290
|
+
viewBox: "0 0 24 24",
|
|
291
|
+
children: [
|
|
292
|
+
/* @__PURE__ */ jsx2(
|
|
293
|
+
"path",
|
|
294
|
+
{
|
|
295
|
+
strokeLinecap: "round",
|
|
296
|
+
strokeLinejoin: "round",
|
|
297
|
+
strokeWidth: 1.5,
|
|
298
|
+
d: "M3 9a2 2 0 012-2h.93a2 2 0 001.664-.89l.812-1.22A2 2 0 0110.07 4h3.86a2 2 0 011.664.89l.812 1.22A2 2 0 0018.07 7H19a2 2 0 012 2v9a2 2 0 01-2 2H5a2 2 0 01-2-2V9z"
|
|
299
|
+
}
|
|
300
|
+
),
|
|
301
|
+
/* @__PURE__ */ jsx2(
|
|
302
|
+
"path",
|
|
303
|
+
{
|
|
304
|
+
strokeLinecap: "round",
|
|
305
|
+
strokeLinejoin: "round",
|
|
306
|
+
strokeWidth: 1.5,
|
|
307
|
+
d: "M15 13a3 3 0 11-6 0 3 3 0 016 0z"
|
|
308
|
+
}
|
|
309
|
+
)
|
|
310
|
+
]
|
|
311
|
+
}
|
|
312
|
+
) }),
|
|
313
|
+
/* @__PURE__ */ jsx2("p", { style: { color: "#6b7280", fontSize: "0.875rem" }, children: placeholderText })
|
|
314
|
+
] }) });
|
|
315
|
+
}
|
|
316
|
+
return /* @__PURE__ */ jsxs("div", { ref: containerRef, style: { ...containerStyles, border: "none" }, className, children: [
|
|
317
|
+
/* @__PURE__ */ jsx2(
|
|
318
|
+
"img",
|
|
319
|
+
{
|
|
320
|
+
ref: imageRef,
|
|
321
|
+
src: imageSrc,
|
|
322
|
+
alt: "Captured",
|
|
323
|
+
onLoad: handleImageLoad,
|
|
324
|
+
style: { width: "100%", height: "100%", objectFit: "contain" }
|
|
325
|
+
}
|
|
326
|
+
),
|
|
327
|
+
showOverlay && /* @__PURE__ */ jsx2(
|
|
328
|
+
FaceOverlay,
|
|
329
|
+
{
|
|
330
|
+
imageElement: imageRef.current,
|
|
331
|
+
face,
|
|
332
|
+
containerWidth: dimensions.width,
|
|
333
|
+
containerHeight: dimensions.height,
|
|
334
|
+
color: overlayColor
|
|
335
|
+
}
|
|
336
|
+
)
|
|
337
|
+
] });
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
// src/components/DetectionResultDisplay.tsx
|
|
341
|
+
import { jsx as jsx3, jsxs as jsxs2 } from "react/jsx-runtime";
|
|
342
|
+
function DetectionResultDisplay({
|
|
343
|
+
result,
|
|
344
|
+
className = "",
|
|
345
|
+
successColor = "#22c55e",
|
|
346
|
+
warningColor = "#eab308",
|
|
347
|
+
errorColor = "#ef4444"
|
|
348
|
+
}) {
|
|
349
|
+
if (!result) return null;
|
|
350
|
+
const baseStyles = {
|
|
351
|
+
display: "flex",
|
|
352
|
+
alignItems: "center",
|
|
353
|
+
gap: "0.75rem",
|
|
354
|
+
borderRadius: "0.5rem",
|
|
355
|
+
padding: "1rem",
|
|
356
|
+
border: "1px solid"
|
|
357
|
+
};
|
|
358
|
+
if (result.success && !result.isLowConfidence) {
|
|
359
|
+
return /* @__PURE__ */ jsxs2(
|
|
360
|
+
"div",
|
|
361
|
+
{
|
|
362
|
+
className,
|
|
363
|
+
style: {
|
|
364
|
+
...baseStyles,
|
|
365
|
+
backgroundColor: `${successColor}10`,
|
|
366
|
+
borderColor: `${successColor}33`
|
|
367
|
+
},
|
|
368
|
+
children: [
|
|
369
|
+
/* @__PURE__ */ jsx3("svg", { style: { width: "1.5rem", height: "1.5rem", flexShrink: 0 }, fill: successColor, viewBox: "0 0 24 24", children: /* @__PURE__ */ jsx3("path", { d: "M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm-2 15l-5-5 1.41-1.41L10 14.17l7.59-7.59L19 8l-9 9z" }) }),
|
|
370
|
+
/* @__PURE__ */ jsxs2("div", { style: { display: "flex", flexDirection: "column" }, children: [
|
|
371
|
+
/* @__PURE__ */ jsx3("span", { style: { fontWeight: 600, color: "#111827" }, children: "Face detected successfully \u2705" }),
|
|
372
|
+
/* @__PURE__ */ jsxs2("span", { style: { fontSize: "0.875rem", color: "#6b7280" }, children: [
|
|
373
|
+
"Confidence: ",
|
|
374
|
+
Math.round(result.confidence * 100),
|
|
375
|
+
"%"
|
|
376
|
+
] })
|
|
377
|
+
] })
|
|
378
|
+
]
|
|
379
|
+
}
|
|
380
|
+
);
|
|
381
|
+
}
|
|
382
|
+
if (result.success && result.isLowConfidence) {
|
|
383
|
+
return /* @__PURE__ */ jsxs2(
|
|
384
|
+
"div",
|
|
385
|
+
{
|
|
386
|
+
className,
|
|
387
|
+
style: {
|
|
388
|
+
...baseStyles,
|
|
389
|
+
backgroundColor: `${warningColor}10`,
|
|
390
|
+
borderColor: `${warningColor}33`
|
|
391
|
+
},
|
|
392
|
+
children: [
|
|
393
|
+
/* @__PURE__ */ jsx3("svg", { style: { width: "1.5rem", height: "1.5rem", flexShrink: 0 }, fill: warningColor, viewBox: "0 0 24 24", children: /* @__PURE__ */ jsx3("path", { d: "M1 21h22L12 2 1 21zm12-3h-2v-2h2v2zm0-4h-2v-4h2v4z" }) }),
|
|
394
|
+
/* @__PURE__ */ jsxs2("div", { style: { display: "flex", flexDirection: "column" }, children: [
|
|
395
|
+
/* @__PURE__ */ jsx3("span", { style: { fontWeight: 600, color: "#111827" }, children: "Face detected but image quality is low" }),
|
|
396
|
+
/* @__PURE__ */ jsxs2("span", { style: { fontSize: "0.875rem", color: "#6b7280" }, children: [
|
|
397
|
+
"Confidence: ",
|
|
398
|
+
Math.round(result.confidence * 100),
|
|
399
|
+
"% \u2014 Please retake for better results."
|
|
400
|
+
] })
|
|
401
|
+
] })
|
|
402
|
+
]
|
|
403
|
+
}
|
|
404
|
+
);
|
|
405
|
+
}
|
|
406
|
+
return /* @__PURE__ */ jsxs2(
|
|
407
|
+
"div",
|
|
408
|
+
{
|
|
409
|
+
className,
|
|
410
|
+
style: {
|
|
411
|
+
...baseStyles,
|
|
412
|
+
backgroundColor: `${errorColor}10`,
|
|
413
|
+
borderColor: `${errorColor}33`
|
|
414
|
+
},
|
|
415
|
+
children: [
|
|
416
|
+
/* @__PURE__ */ jsx3("svg", { style: { width: "1.5rem", height: "1.5rem", flexShrink: 0 }, fill: errorColor, viewBox: "0 0 24 24", children: /* @__PURE__ */ jsx3("path", { d: "M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm1 15h-2v-2h2v2zm0-4h-2V7h2v6z" }) }),
|
|
417
|
+
/* @__PURE__ */ jsxs2("div", { style: { display: "flex", flexDirection: "column" }, children: [
|
|
418
|
+
/* @__PURE__ */ jsx3("span", { style: { fontWeight: 600, color: "#111827" }, children: "No face detected" }),
|
|
419
|
+
/* @__PURE__ */ jsx3("span", { style: { fontSize: "0.875rem", color: "#6b7280" }, children: result.errorMessage || "Please take a clear photo of your face." })
|
|
420
|
+
] })
|
|
421
|
+
]
|
|
422
|
+
}
|
|
423
|
+
);
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
// src/components/LoadingState.tsx
|
|
427
|
+
import { jsx as jsx4, jsxs as jsxs3 } from "react/jsx-runtime";
|
|
428
|
+
function LoadingState({
|
|
429
|
+
isLoading,
|
|
430
|
+
error,
|
|
431
|
+
onRetry,
|
|
432
|
+
loadingText = "Loading Face Detection",
|
|
433
|
+
errorText = "Failed to Load Model",
|
|
434
|
+
className = ""
|
|
435
|
+
}) {
|
|
436
|
+
const containerStyles = {
|
|
437
|
+
display: "flex",
|
|
438
|
+
flexDirection: "column",
|
|
439
|
+
alignItems: "center",
|
|
440
|
+
justifyContent: "center",
|
|
441
|
+
gap: "1rem",
|
|
442
|
+
padding: "2rem"
|
|
443
|
+
};
|
|
444
|
+
if (isLoading) {
|
|
445
|
+
return /* @__PURE__ */ jsxs3("div", { style: containerStyles, className, children: [
|
|
446
|
+
/* @__PURE__ */ jsx4("div", { style: {
|
|
447
|
+
width: "3rem",
|
|
448
|
+
height: "3rem",
|
|
449
|
+
border: "4px solid #3b82f6",
|
|
450
|
+
borderTopColor: "transparent",
|
|
451
|
+
borderRadius: "50%",
|
|
452
|
+
animation: "spin 1s linear infinite"
|
|
453
|
+
} }),
|
|
454
|
+
/* @__PURE__ */ jsx4("style", { children: `@keyframes spin { to { transform: rotate(360deg); } }` }),
|
|
455
|
+
/* @__PURE__ */ jsxs3("div", { style: { textAlign: "center" }, children: [
|
|
456
|
+
/* @__PURE__ */ jsx4("p", { style: { fontWeight: 600, color: "#111827" }, children: loadingText }),
|
|
457
|
+
/* @__PURE__ */ jsx4("p", { style: { fontSize: "0.875rem", color: "#6b7280" }, children: "Preparing AI model for offline use..." })
|
|
458
|
+
] })
|
|
459
|
+
] });
|
|
460
|
+
}
|
|
461
|
+
if (error) {
|
|
462
|
+
return /* @__PURE__ */ jsxs3("div", { style: containerStyles, className, children: [
|
|
463
|
+
/* @__PURE__ */ jsx4("svg", { style: { width: "3rem", height: "3rem", color: "#ef4444" }, fill: "currentColor", viewBox: "0 0 24 24", children: /* @__PURE__ */ jsx4("path", { d: "M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm1 15h-2v-2h2v2zm0-4h-2V7h2v6z" }) }),
|
|
464
|
+
/* @__PURE__ */ jsxs3("div", { style: { textAlign: "center" }, children: [
|
|
465
|
+
/* @__PURE__ */ jsx4("p", { style: { fontWeight: 600, color: "#111827" }, children: errorText }),
|
|
466
|
+
/* @__PURE__ */ jsx4("p", { style: { fontSize: "0.875rem", color: "#6b7280", marginBottom: "1rem" }, children: error }),
|
|
467
|
+
/* @__PURE__ */ jsxs3(
|
|
468
|
+
"button",
|
|
469
|
+
{
|
|
470
|
+
onClick: onRetry,
|
|
471
|
+
style: {
|
|
472
|
+
display: "inline-flex",
|
|
473
|
+
alignItems: "center",
|
|
474
|
+
gap: "0.5rem",
|
|
475
|
+
padding: "0.5rem 1rem",
|
|
476
|
+
backgroundColor: "#3b82f6",
|
|
477
|
+
color: "white",
|
|
478
|
+
borderRadius: "0.5rem",
|
|
479
|
+
border: "none",
|
|
480
|
+
cursor: "pointer",
|
|
481
|
+
fontSize: "0.875rem",
|
|
482
|
+
fontWeight: 500
|
|
483
|
+
},
|
|
484
|
+
children: [
|
|
485
|
+
/* @__PURE__ */ jsx4("svg", { style: { width: "1rem", height: "1rem" }, fill: "none", stroke: "currentColor", viewBox: "0 0 24 24", children: /* @__PURE__ */ jsx4("path", { strokeLinecap: "round", strokeLinejoin: "round", strokeWidth: 2, d: "M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" }) }),
|
|
486
|
+
"Retry"
|
|
487
|
+
]
|
|
488
|
+
}
|
|
489
|
+
)
|
|
490
|
+
] })
|
|
491
|
+
] });
|
|
492
|
+
}
|
|
493
|
+
return null;
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
// src/components/CaptureButton.tsx
|
|
497
|
+
import { useState as useState3, useCallback as useCallback3 } from "react";
|
|
498
|
+
import { Camera, CameraResultType, CameraSource, CameraDirection } from "@capacitor/camera";
|
|
499
|
+
import { Fragment, jsx as jsx5, jsxs as jsxs4 } from "react/jsx-runtime";
|
|
500
|
+
function CaptureButton({
|
|
501
|
+
onImageCaptured,
|
|
502
|
+
isProcessing = false,
|
|
503
|
+
showRetake = false,
|
|
504
|
+
onRetake,
|
|
505
|
+
defaultFacing = "front",
|
|
506
|
+
allowCameraSwitch = true,
|
|
507
|
+
quality = 90,
|
|
508
|
+
onError,
|
|
509
|
+
className = ""
|
|
510
|
+
}) {
|
|
511
|
+
const [isCapturing, setIsCapturing] = useState3(false);
|
|
512
|
+
const [cameraFacing, setCameraFacing] = useState3(defaultFacing);
|
|
513
|
+
const capturePhoto = useCallback3(async () => {
|
|
514
|
+
try {
|
|
515
|
+
setIsCapturing(true);
|
|
516
|
+
const photo = await Camera.getPhoto({
|
|
517
|
+
quality,
|
|
518
|
+
allowEditing: false,
|
|
519
|
+
resultType: CameraResultType.DataUrl,
|
|
520
|
+
source: CameraSource.Camera,
|
|
521
|
+
correctOrientation: true,
|
|
522
|
+
direction: cameraFacing === "front" ? CameraDirection.Front : CameraDirection.Rear
|
|
523
|
+
});
|
|
524
|
+
if (photo.dataUrl) {
|
|
525
|
+
onImageCaptured(photo.dataUrl);
|
|
526
|
+
}
|
|
527
|
+
} catch (error) {
|
|
528
|
+
console.error("Camera error:", error);
|
|
529
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
530
|
+
if (errorMessage.includes("denied") || errorMessage.includes("permission")) {
|
|
531
|
+
onError?.("Camera permission denied. Please enable camera access in your device settings.");
|
|
532
|
+
} else if (!errorMessage.includes("cancelled") && !errorMessage.includes("canceled")) {
|
|
533
|
+
onError?.("Unable to access camera. Please try again.");
|
|
534
|
+
}
|
|
535
|
+
} finally {
|
|
536
|
+
setIsCapturing(false);
|
|
537
|
+
}
|
|
538
|
+
}, [onImageCaptured, cameraFacing, quality, onError]);
|
|
539
|
+
const handleRetake = useCallback3(() => {
|
|
540
|
+
onRetake?.();
|
|
541
|
+
capturePhoto();
|
|
542
|
+
}, [onRetake, capturePhoto]);
|
|
543
|
+
const isDisabled = isCapturing || isProcessing;
|
|
544
|
+
const buttonBaseStyles = {
|
|
545
|
+
width: "100%",
|
|
546
|
+
maxWidth: "20rem",
|
|
547
|
+
display: "flex",
|
|
548
|
+
alignItems: "center",
|
|
549
|
+
justifyContent: "center",
|
|
550
|
+
gap: "0.5rem",
|
|
551
|
+
padding: "0.75rem 1.5rem",
|
|
552
|
+
color: "white",
|
|
553
|
+
borderRadius: "0.5rem",
|
|
554
|
+
border: "none",
|
|
555
|
+
cursor: isDisabled ? "not-allowed" : "pointer",
|
|
556
|
+
opacity: isDisabled ? 0.5 : 1,
|
|
557
|
+
fontSize: "1rem",
|
|
558
|
+
fontWeight: 500,
|
|
559
|
+
transition: "background-color 0.2s"
|
|
560
|
+
};
|
|
561
|
+
const toggleButtonStyles = (isActive) => ({
|
|
562
|
+
flex: 1,
|
|
563
|
+
display: "flex",
|
|
564
|
+
alignItems: "center",
|
|
565
|
+
justifyContent: "center",
|
|
566
|
+
gap: "0.5rem",
|
|
567
|
+
padding: "0.5rem 1rem",
|
|
568
|
+
borderRadius: "0.5rem",
|
|
569
|
+
border: "1px solid",
|
|
570
|
+
backgroundColor: isActive ? "#3b82f6" : "white",
|
|
571
|
+
color: isActive ? "white" : "#374151",
|
|
572
|
+
borderColor: isActive ? "#3b82f6" : "#d1d5db",
|
|
573
|
+
cursor: "pointer",
|
|
574
|
+
fontSize: "0.875rem",
|
|
575
|
+
fontWeight: 500,
|
|
576
|
+
transition: "all 0.2s"
|
|
577
|
+
});
|
|
578
|
+
return /* @__PURE__ */ jsxs4("div", { style: { display: "flex", flexDirection: "column", alignItems: "center", gap: "1rem", width: "100%" }, className, children: [
|
|
579
|
+
allowCameraSwitch && /* @__PURE__ */ jsxs4("div", { style: { display: "flex", gap: "0.5rem", width: "100%", maxWidth: "20rem" }, children: [
|
|
580
|
+
/* @__PURE__ */ jsxs4(
|
|
581
|
+
"button",
|
|
582
|
+
{
|
|
583
|
+
onClick: () => setCameraFacing("front"),
|
|
584
|
+
style: toggleButtonStyles(cameraFacing === "front"),
|
|
585
|
+
children: [
|
|
586
|
+
/* @__PURE__ */ jsx5("svg", { style: { width: "1rem", height: "1rem" }, fill: "none", stroke: "currentColor", viewBox: "0 0 24 24", children: /* @__PURE__ */ jsx5("path", { strokeLinecap: "round", strokeLinejoin: "round", strokeWidth: 2, d: "M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" }) }),
|
|
587
|
+
"Selfie"
|
|
588
|
+
]
|
|
589
|
+
}
|
|
590
|
+
),
|
|
591
|
+
/* @__PURE__ */ jsxs4(
|
|
592
|
+
"button",
|
|
593
|
+
{
|
|
594
|
+
onClick: () => setCameraFacing("rear"),
|
|
595
|
+
style: toggleButtonStyles(cameraFacing === "rear"),
|
|
596
|
+
children: [
|
|
597
|
+
/* @__PURE__ */ jsxs4("svg", { style: { width: "1rem", height: "1rem" }, fill: "none", stroke: "currentColor", viewBox: "0 0 24 24", children: [
|
|
598
|
+
/* @__PURE__ */ jsx5("path", { strokeLinecap: "round", strokeLinejoin: "round", strokeWidth: 2, d: "M3 9a2 2 0 012-2h.93a2 2 0 001.664-.89l.812-1.22A2 2 0 0110.07 4h3.86a2 2 0 011.664.89l.812 1.22A2 2 0 0018.07 7H19a2 2 0 012 2v9a2 2 0 01-2 2H5a2 2 0 01-2-2V9z" }),
|
|
599
|
+
/* @__PURE__ */ jsx5("path", { strokeLinecap: "round", strokeLinejoin: "round", strokeWidth: 2, d: "M15 13a3 3 0 11-6 0 3 3 0 016 0z" })
|
|
600
|
+
] }),
|
|
601
|
+
"Rear"
|
|
602
|
+
]
|
|
603
|
+
}
|
|
604
|
+
)
|
|
605
|
+
] }),
|
|
606
|
+
showRetake ? /* @__PURE__ */ jsx5(
|
|
607
|
+
"button",
|
|
608
|
+
{
|
|
609
|
+
onClick: handleRetake,
|
|
610
|
+
disabled: isDisabled,
|
|
611
|
+
style: { ...buttonBaseStyles, backgroundColor: "#eab308" },
|
|
612
|
+
children: isCapturing ? /* @__PURE__ */ jsx5("div", { style: { width: "1.25rem", height: "1.25rem", border: "2px solid white", borderTopColor: "transparent", borderRadius: "50%", animation: "spin 1s linear infinite" } }) : /* @__PURE__ */ jsxs4(Fragment, { children: [
|
|
613
|
+
/* @__PURE__ */ jsx5("svg", { style: { width: "1.25rem", height: "1.25rem" }, fill: "none", stroke: "currentColor", viewBox: "0 0 24 24", children: /* @__PURE__ */ jsx5("path", { strokeLinecap: "round", strokeLinejoin: "round", strokeWidth: 2, d: "M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" }) }),
|
|
614
|
+
"Retake Photo"
|
|
615
|
+
] })
|
|
616
|
+
}
|
|
617
|
+
) : /* @__PURE__ */ jsx5(
|
|
618
|
+
"button",
|
|
619
|
+
{
|
|
620
|
+
onClick: capturePhoto,
|
|
621
|
+
disabled: isDisabled,
|
|
622
|
+
style: { ...buttonBaseStyles, backgroundColor: "#3b82f6" },
|
|
623
|
+
children: isCapturing ? /* @__PURE__ */ jsx5("div", { style: { width: "1.25rem", height: "1.25rem", border: "2px solid white", borderTopColor: "transparent", borderRadius: "50%", animation: "spin 1s linear infinite" } }) : /* @__PURE__ */ jsxs4(Fragment, { children: [
|
|
624
|
+
/* @__PURE__ */ jsxs4("svg", { style: { width: "1.25rem", height: "1.25rem" }, fill: "none", stroke: "currentColor", viewBox: "0 0 24 24", children: [
|
|
625
|
+
/* @__PURE__ */ jsx5("path", { strokeLinecap: "round", strokeLinejoin: "round", strokeWidth: 2, d: "M3 9a2 2 0 012-2h.93a2 2 0 001.664-.89l.812-1.22A2 2 0 0110.07 4h3.86a2 2 0 011.664.89l.812 1.22A2 2 0 0018.07 7H19a2 2 0 012 2v9a2 2 0 01-2 2H5a2 2 0 01-2-2V9z" }),
|
|
626
|
+
/* @__PURE__ */ jsx5("path", { strokeLinecap: "round", strokeLinejoin: "round", strokeWidth: 2, d: "M15 13a3 3 0 11-6 0 3 3 0 016 0z" })
|
|
627
|
+
] }),
|
|
628
|
+
"Capture Photo"
|
|
629
|
+
] })
|
|
630
|
+
}
|
|
631
|
+
),
|
|
632
|
+
/* @__PURE__ */ jsx5("style", { children: `@keyframes spin { to { transform: rotate(360deg); } }` }),
|
|
633
|
+
isProcessing && /* @__PURE__ */ jsxs4("div", { style: { display: "flex", alignItems: "center", gap: "0.5rem", color: "#6b7280" }, children: [
|
|
634
|
+
/* @__PURE__ */ jsx5("div", { style: { width: "1rem", height: "1rem", border: "2px solid #3b82f6", borderTopColor: "transparent", borderRadius: "50%", animation: "spin 1s linear infinite" } }),
|
|
635
|
+
/* @__PURE__ */ jsx5("span", { children: "Analyzing face..." })
|
|
636
|
+
] })
|
|
637
|
+
] });
|
|
638
|
+
}
|
|
639
|
+
|
|
640
|
+
// src/components/FaceDetector.tsx
|
|
641
|
+
import { Fragment as Fragment2, jsx as jsx6, jsxs as jsxs5 } from "react/jsx-runtime";
|
|
642
|
+
function FaceDetector({
|
|
643
|
+
// Detection config
|
|
644
|
+
confidenceThreshold = 0.7,
|
|
645
|
+
maxImageSize = 640,
|
|
646
|
+
showOverlay = true,
|
|
647
|
+
showResult = true,
|
|
648
|
+
onDetectionComplete,
|
|
649
|
+
onError,
|
|
650
|
+
// Camera config
|
|
651
|
+
defaultFacing = "front",
|
|
652
|
+
allowCameraSwitch = true,
|
|
653
|
+
quality = 90,
|
|
654
|
+
// Styling
|
|
655
|
+
className = ""
|
|
656
|
+
}) {
|
|
657
|
+
const [capturedImage, setCapturedImage] = useState4(null);
|
|
658
|
+
const [detectionResult, setDetectionResult] = useState4(null);
|
|
659
|
+
const [detectedFace, setDetectedFace] = useState4(null);
|
|
660
|
+
const [isProcessing, setIsProcessing] = useState4(false);
|
|
661
|
+
const { isModelLoading, modelError, detectFace, retryModelLoad } = useFaceDetectionCore({
|
|
662
|
+
confidenceThreshold,
|
|
663
|
+
maxImageSize
|
|
664
|
+
});
|
|
665
|
+
const handleImageCaptured = useCallback4((imageDataUrl) => {
|
|
666
|
+
setCapturedImage(imageDataUrl);
|
|
667
|
+
setDetectionResult(null);
|
|
668
|
+
setDetectedFace(null);
|
|
669
|
+
}, []);
|
|
670
|
+
const handleImageLoad = useCallback4(async (img) => {
|
|
671
|
+
setIsProcessing(true);
|
|
672
|
+
try {
|
|
673
|
+
const result = await detectFace(img);
|
|
674
|
+
setDetectionResult(result);
|
|
675
|
+
if (result.success && result.face) {
|
|
676
|
+
setDetectedFace(result.face);
|
|
677
|
+
} else {
|
|
678
|
+
setDetectedFace(null);
|
|
679
|
+
}
|
|
680
|
+
onDetectionComplete?.(result);
|
|
681
|
+
} catch (error) {
|
|
682
|
+
console.error("Detection error:", error);
|
|
683
|
+
onError?.("An error occurred during detection");
|
|
684
|
+
} finally {
|
|
685
|
+
setIsProcessing(false);
|
|
686
|
+
}
|
|
687
|
+
}, [detectFace, onDetectionComplete, onError]);
|
|
688
|
+
const handleRetake = useCallback4(() => {
|
|
689
|
+
setCapturedImage(null);
|
|
690
|
+
setDetectionResult(null);
|
|
691
|
+
setDetectedFace(null);
|
|
692
|
+
}, []);
|
|
693
|
+
const handleCameraError = useCallback4((error) => {
|
|
694
|
+
onError?.(error);
|
|
695
|
+
}, [onError]);
|
|
696
|
+
const showRetakeButton = detectionResult !== null && (!detectionResult.success || detectionResult.isLowConfidence);
|
|
697
|
+
const containerStyles = {
|
|
698
|
+
display: "flex",
|
|
699
|
+
flexDirection: "column",
|
|
700
|
+
alignItems: "center",
|
|
701
|
+
gap: "1.5rem",
|
|
702
|
+
maxWidth: "32rem",
|
|
703
|
+
margin: "0 auto",
|
|
704
|
+
padding: "1rem 0"
|
|
705
|
+
};
|
|
706
|
+
return /* @__PURE__ */ jsxs5("div", { style: containerStyles, className, children: [
|
|
707
|
+
(isModelLoading || modelError) && /* @__PURE__ */ jsx6(
|
|
708
|
+
LoadingState,
|
|
709
|
+
{
|
|
710
|
+
isLoading: isModelLoading,
|
|
711
|
+
error: modelError,
|
|
712
|
+
onRetry: retryModelLoad
|
|
713
|
+
}
|
|
714
|
+
),
|
|
715
|
+
!isModelLoading && !modelError && /* @__PURE__ */ jsxs5(Fragment2, { children: [
|
|
716
|
+
/* @__PURE__ */ jsx6(
|
|
717
|
+
ImagePreviewWithOverlay,
|
|
718
|
+
{
|
|
719
|
+
imageSrc: capturedImage,
|
|
720
|
+
face: detectedFace,
|
|
721
|
+
onImageLoad: handleImageLoad,
|
|
722
|
+
showOverlay
|
|
723
|
+
}
|
|
724
|
+
),
|
|
725
|
+
showResult && detectionResult && /* @__PURE__ */ jsx6("div", { style: { width: "100%", maxWidth: "24rem" }, children: /* @__PURE__ */ jsx6(DetectionResultDisplay, { result: detectionResult }) }),
|
|
726
|
+
/* @__PURE__ */ jsx6("div", { style: { width: "100%", maxWidth: "24rem" }, children: /* @__PURE__ */ jsx6(
|
|
727
|
+
CaptureButton,
|
|
728
|
+
{
|
|
729
|
+
onImageCaptured: handleImageCaptured,
|
|
730
|
+
isProcessing,
|
|
731
|
+
showRetake: showRetakeButton,
|
|
732
|
+
onRetake: handleRetake,
|
|
733
|
+
defaultFacing,
|
|
734
|
+
allowCameraSwitch,
|
|
735
|
+
quality,
|
|
736
|
+
onError: handleCameraError
|
|
737
|
+
}
|
|
738
|
+
) }),
|
|
739
|
+
!capturedImage && /* @__PURE__ */ jsx6("div", { style: { textAlign: "center", fontSize: "0.875rem", color: "#6b7280", padding: "0 1rem" }, children: /* @__PURE__ */ jsx6("p", { children: "Take a clear photo of your face. The app will detect your face and show a confidence score." }) })
|
|
740
|
+
] })
|
|
741
|
+
] });
|
|
742
|
+
}
|
|
743
|
+
export {
|
|
744
|
+
CaptureButton,
|
|
745
|
+
DetectionResultDisplay,
|
|
746
|
+
FaceDetector,
|
|
747
|
+
FaceOverlay,
|
|
748
|
+
ImagePreviewWithOverlay,
|
|
749
|
+
LoadingState,
|
|
750
|
+
useFaceDetectionCore
|
|
751
|
+
};
|