@facesmash/sdk 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +236 -0
- package/dist/index.cjs +658 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +192 -0
- package/dist/index.d.ts +192 -0
- package/dist/index.js +621 -0
- package/dist/index.js.map +1 -0
- package/dist/react.cjs +1068 -0
- package/dist/react.cjs.map +1 -0
- package/dist/react.d.cts +292 -0
- package/dist/react.d.ts +292 -0
- package/dist/react.js +1024 -0
- package/dist/react.js.map +1 -0
- package/package.json +84 -0
package/dist/react.js
ADDED
|
@@ -0,0 +1,1024 @@
|
|
|
1
|
+
import PocketBase from 'pocketbase';
|
|
2
|
+
import * as faceapi from '@vladmandic/face-api';
|
|
3
|
+
import { createContext, useRef, useState, useCallback, useEffect, useContext } from 'react';
|
|
4
|
+
import { jsx, Fragment, jsxs } from 'react/jsx-runtime';
|
|
5
|
+
|
|
6
|
+
// src/core/client.ts
|
|
7
|
+
var modelsLoaded = false;
|
|
8
|
+
async function loadModels(config, onProgress) {
|
|
9
|
+
if (modelsLoaded) {
|
|
10
|
+
onProgress?.(100);
|
|
11
|
+
return true;
|
|
12
|
+
}
|
|
13
|
+
try {
|
|
14
|
+
try {
|
|
15
|
+
const tf2 = faceapi.tf;
|
|
16
|
+
if (tf2) {
|
|
17
|
+
await tf2.setBackend("webgl");
|
|
18
|
+
await tf2.ready();
|
|
19
|
+
if (tf2.env().flagRegistry?.CANVAS2D_WILL_READ_FREQUENTLY) {
|
|
20
|
+
tf2.env().set("CANVAS2D_WILL_READ_FREQUENTLY", true);
|
|
21
|
+
}
|
|
22
|
+
if (tf2.env().flagRegistry?.WEBGL_EXP_CONV) {
|
|
23
|
+
tf2.env().set("WEBGL_EXP_CONV", true);
|
|
24
|
+
}
|
|
25
|
+
if (config.debug) {
|
|
26
|
+
console.log(`[FaceSmash] TF.js backend: ${tf2.getBackend()}`);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
} catch {
|
|
30
|
+
}
|
|
31
|
+
onProgress?.(10);
|
|
32
|
+
await Promise.all([
|
|
33
|
+
faceapi.nets.ssdMobilenetv1.loadFromUri(config.modelUrl),
|
|
34
|
+
faceapi.nets.tinyFaceDetector.loadFromUri(config.modelUrl),
|
|
35
|
+
faceapi.nets.faceLandmark68Net.loadFromUri(config.modelUrl),
|
|
36
|
+
faceapi.nets.faceRecognitionNet.loadFromUri(config.modelUrl),
|
|
37
|
+
faceapi.nets.faceExpressionNet.loadFromUri(config.modelUrl)
|
|
38
|
+
]);
|
|
39
|
+
modelsLoaded = true;
|
|
40
|
+
onProgress?.(100);
|
|
41
|
+
if (config.debug) {
|
|
42
|
+
console.log("[FaceSmash] Models loaded successfully");
|
|
43
|
+
}
|
|
44
|
+
return true;
|
|
45
|
+
} catch (error) {
|
|
46
|
+
if (config.debug) {
|
|
47
|
+
console.error("[FaceSmash] Failed to load models:", error);
|
|
48
|
+
}
|
|
49
|
+
return false;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
function areModelsLoaded() {
|
|
53
|
+
return modelsLoaded;
|
|
54
|
+
}
|
|
55
|
+
function getSsdOptions(minConfidence) {
|
|
56
|
+
return new faceapi.SsdMobilenetv1Options({ minConfidence });
|
|
57
|
+
}
|
|
58
|
+
function getTinyOptions() {
|
|
59
|
+
return new faceapi.TinyFaceDetectorOptions({ inputSize: 224, scoreThreshold: 0.4 });
|
|
60
|
+
}
|
|
61
|
+
async function extractDescriptor(input, config) {
|
|
62
|
+
try {
|
|
63
|
+
const media = typeof input === "string" ? await faceapi.fetchImage(input) : input;
|
|
64
|
+
let detection = await faceapi.detectSingleFace(media, getSsdOptions(config.minDetectionConfidence)).withFaceLandmarks().withFaceDescriptor();
|
|
65
|
+
if (!detection) {
|
|
66
|
+
detection = await faceapi.detectSingleFace(media, getTinyOptions()).withFaceLandmarks().withFaceDescriptor();
|
|
67
|
+
}
|
|
68
|
+
return detection?.descriptor ?? null;
|
|
69
|
+
} catch (error) {
|
|
70
|
+
if (config.debug) {
|
|
71
|
+
console.error("[FaceSmash] Descriptor extraction failed:", error);
|
|
72
|
+
}
|
|
73
|
+
return null;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
async function analyzeFace(imageData, config) {
|
|
77
|
+
try {
|
|
78
|
+
const img = await faceapi.fetchImage(imageData);
|
|
79
|
+
let detection = await faceapi.detectSingleFace(img, getSsdOptions(config.minDetectionConfidence)).withFaceLandmarks().withFaceDescriptor();
|
|
80
|
+
if (!detection) {
|
|
81
|
+
detection = await faceapi.detectSingleFace(img, getTinyOptions()).withFaceLandmarks().withFaceDescriptor();
|
|
82
|
+
}
|
|
83
|
+
if (!detection) return null;
|
|
84
|
+
const headPose = estimateHeadPose(detection.landmarks, detection.detection.box);
|
|
85
|
+
const imgWidth = img.width || 640;
|
|
86
|
+
const imgHeight = img.height || 480;
|
|
87
|
+
const faceSizeCheck = validateFaceSize(detection.detection.box, imgWidth, imgHeight);
|
|
88
|
+
if (!faceSizeCheck.isValid) {
|
|
89
|
+
return {
|
|
90
|
+
descriptor: detection.descriptor,
|
|
91
|
+
normalizedDescriptor: normalizeDescriptor(detection.descriptor),
|
|
92
|
+
confidence: detection.detection.score,
|
|
93
|
+
qualityScore: 0,
|
|
94
|
+
lightingScore: 0,
|
|
95
|
+
headPose,
|
|
96
|
+
faceSizeCheck,
|
|
97
|
+
eyeAspectRatio: 0,
|
|
98
|
+
rejectionReason: faceSizeCheck.reason
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
const { avgEAR } = getEyeAspectRatios(detection.landmarks);
|
|
102
|
+
let lightingAnalysis;
|
|
103
|
+
try {
|
|
104
|
+
lightingAnalysis = analyzeLighting(detection, img);
|
|
105
|
+
} catch {
|
|
106
|
+
lightingAnalysis = {
|
|
107
|
+
score: 0.5,
|
|
108
|
+
brightness: 0.5,
|
|
109
|
+
contrast: 0.5,
|
|
110
|
+
evenness: 0.5,
|
|
111
|
+
conditions: { tooDark: false, tooBright: false, uneven: false, optimal: false }
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
let qualityScore = Math.min(detection.detection.score, 1);
|
|
115
|
+
qualityScore *= 0.7 + lightingAnalysis.score * 0.3;
|
|
116
|
+
const faceArea = detection.detection.box.width * detection.detection.box.height;
|
|
117
|
+
const imageArea = 640 * 640;
|
|
118
|
+
const sizeRatio = Math.min(faceArea / imageArea, 0.3) / 0.3;
|
|
119
|
+
qualityScore *= 0.8 + sizeRatio * 0.2;
|
|
120
|
+
if (!headPose.isFrontal) {
|
|
121
|
+
const anglePenalty = Math.max(0.5, 1 - (Math.abs(headPose.yaw) + Math.abs(headPose.pitch)) * 0.3);
|
|
122
|
+
qualityScore *= anglePenalty;
|
|
123
|
+
}
|
|
124
|
+
qualityScore = Math.max(0, Math.min(1, qualityScore));
|
|
125
|
+
return {
|
|
126
|
+
descriptor: detection.descriptor,
|
|
127
|
+
normalizedDescriptor: normalizeDescriptor(detection.descriptor),
|
|
128
|
+
confidence: detection.detection.score,
|
|
129
|
+
qualityScore,
|
|
130
|
+
lightingScore: lightingAnalysis.score,
|
|
131
|
+
headPose,
|
|
132
|
+
faceSizeCheck,
|
|
133
|
+
eyeAspectRatio: avgEAR
|
|
134
|
+
};
|
|
135
|
+
} catch (error) {
|
|
136
|
+
if (config.debug) {
|
|
137
|
+
console.error("[FaceSmash] Face analysis failed:", error);
|
|
138
|
+
}
|
|
139
|
+
return null;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
async function processImages(images, config) {
|
|
143
|
+
if (images.length === 1) {
|
|
144
|
+
return extractDescriptor(images[0], config);
|
|
145
|
+
}
|
|
146
|
+
const descriptors = [];
|
|
147
|
+
for (const image of images) {
|
|
148
|
+
const d = await extractDescriptor(image, config);
|
|
149
|
+
if (d) descriptors.push(d);
|
|
150
|
+
}
|
|
151
|
+
if (descriptors.length === 0) return null;
|
|
152
|
+
const avg = new Float32Array(descriptors[0].length);
|
|
153
|
+
for (let i = 0; i < avg.length; i++) {
|
|
154
|
+
let sum = 0;
|
|
155
|
+
for (const d of descriptors) sum += d[i];
|
|
156
|
+
avg[i] = sum / descriptors.length;
|
|
157
|
+
}
|
|
158
|
+
return avg;
|
|
159
|
+
}
|
|
160
|
+
function euclidean(a, b) {
|
|
161
|
+
return Math.sqrt((a.x - b.x) ** 2 + (a.y - b.y) ** 2);
|
|
162
|
+
}
|
|
163
|
+
function calculateEAR(eye) {
|
|
164
|
+
if (eye.length < 6) return 0.3;
|
|
165
|
+
const v1 = euclidean(eye[1], eye[5]);
|
|
166
|
+
const v2 = euclidean(eye[2], eye[4]);
|
|
167
|
+
const h = euclidean(eye[0], eye[3]);
|
|
168
|
+
return h === 0 ? 0 : (v1 + v2) / (2 * h);
|
|
169
|
+
}
|
|
170
|
+
function getEyeAspectRatios(landmarks) {
|
|
171
|
+
const leftEAR = calculateEAR(landmarks.getLeftEye());
|
|
172
|
+
const rightEAR = calculateEAR(landmarks.getRightEye());
|
|
173
|
+
return { leftEAR, rightEAR, avgEAR: (leftEAR + rightEAR) / 2 };
|
|
174
|
+
}
|
|
175
|
+
function estimateHeadPose(landmarks, box) {
|
|
176
|
+
const nose = landmarks.getNose();
|
|
177
|
+
const jaw = landmarks.getJawOutline();
|
|
178
|
+
const noseTip = nose[3];
|
|
179
|
+
const faceCenterX = box.x + box.width / 2;
|
|
180
|
+
const faceCenterY = box.y + box.height / 2;
|
|
181
|
+
const yaw = (noseTip.x - faceCenterX) / (box.width / 2);
|
|
182
|
+
const pitch = (noseTip.y - faceCenterY) / (box.height / 2);
|
|
183
|
+
const jawLeft = jaw[0];
|
|
184
|
+
const jawRight = jaw[jaw.length - 1];
|
|
185
|
+
const roll = Math.atan2(jawRight.y - jawLeft.y, jawRight.x - jawLeft.x);
|
|
186
|
+
const isFrontal = Math.abs(yaw) < 0.35 && Math.abs(pitch) < 0.4 && Math.abs(roll) < 0.25;
|
|
187
|
+
return { yaw, pitch, roll, isFrontal };
|
|
188
|
+
}
|
|
189
|
+
function validateFaceSize(box, frameWidth = 640, frameHeight = 480) {
|
|
190
|
+
const ratio = box.width * box.height / (frameWidth * frameHeight);
|
|
191
|
+
if (ratio < 0.02) return { isValid: false, ratio, reason: "Face too far from camera" };
|
|
192
|
+
if (ratio > 0.65) return { isValid: false, ratio, reason: "Face too close to camera" };
|
|
193
|
+
if (box.width < 80 || box.height < 80) return { isValid: false, ratio, reason: "Face too small for reliable recognition" };
|
|
194
|
+
return { isValid: true, ratio };
|
|
195
|
+
}
|
|
196
|
+
function normalizeDescriptor(descriptor) {
|
|
197
|
+
let norm = 0;
|
|
198
|
+
for (let i = 0; i < descriptor.length; i++) norm += descriptor[i] ** 2;
|
|
199
|
+
norm = Math.sqrt(norm);
|
|
200
|
+
if (norm === 0) return descriptor;
|
|
201
|
+
const normalized = new Float32Array(descriptor.length);
|
|
202
|
+
for (let i = 0; i < descriptor.length; i++) normalized[i] = descriptor[i] / norm;
|
|
203
|
+
return normalized;
|
|
204
|
+
}
|
|
205
|
+
function analyzeLighting(detection, imageElement) {
|
|
206
|
+
const canvas = document.createElement("canvas");
|
|
207
|
+
const ctx = canvas.getContext("2d");
|
|
208
|
+
if (!ctx) throw new Error("Cannot get canvas context");
|
|
209
|
+
canvas.width = imageElement.width || 640;
|
|
210
|
+
canvas.height = imageElement.height || 640;
|
|
211
|
+
if (imageElement instanceof HTMLImageElement) {
|
|
212
|
+
ctx.drawImage(imageElement, 0, 0, canvas.width, canvas.height);
|
|
213
|
+
} else {
|
|
214
|
+
ctx.drawImage(imageElement, 0, 0);
|
|
215
|
+
}
|
|
216
|
+
const faceBox = detection.detection.box;
|
|
217
|
+
const faceImageData = ctx.getImageData(
|
|
218
|
+
Math.max(0, faceBox.x - 20),
|
|
219
|
+
Math.max(0, faceBox.y - 20),
|
|
220
|
+
Math.min(canvas.width - faceBox.x, faceBox.width + 40),
|
|
221
|
+
Math.min(canvas.height - faceBox.y, faceBox.height + 40)
|
|
222
|
+
);
|
|
223
|
+
const pixels = faceImageData.data;
|
|
224
|
+
let totalBrightness = 0;
|
|
225
|
+
const brightnessValues = [];
|
|
226
|
+
for (let i = 0; i < pixels.length; i += 4) {
|
|
227
|
+
const brightness = (pixels[i] + pixels[i + 1] + pixels[i + 2]) / 3;
|
|
228
|
+
totalBrightness += brightness;
|
|
229
|
+
brightnessValues.push(brightness);
|
|
230
|
+
}
|
|
231
|
+
const avgBrightness = totalBrightness / (pixels.length / 4);
|
|
232
|
+
const variance = brightnessValues.reduce((acc, val) => acc + (val - avgBrightness) ** 2, 0) / brightnessValues.length;
|
|
233
|
+
const contrast = Math.sqrt(variance);
|
|
234
|
+
const evenness = Math.max(0, 1 - contrast / 128);
|
|
235
|
+
const tooDark = avgBrightness < 80;
|
|
236
|
+
const tooBright = avgBrightness > 200;
|
|
237
|
+
const uneven = evenness < 0.6;
|
|
238
|
+
const optimal = !tooDark && !tooBright && !uneven;
|
|
239
|
+
let score = 0.5;
|
|
240
|
+
if (optimal) score = 0.9;
|
|
241
|
+
else if (tooDark) score = Math.max(0.2, avgBrightness / 160);
|
|
242
|
+
else if (tooBright) score = Math.max(0.2, (255 - avgBrightness) / 110);
|
|
243
|
+
else if (uneven) score = Math.max(0.3, evenness);
|
|
244
|
+
return {
|
|
245
|
+
score,
|
|
246
|
+
brightness: avgBrightness / 255,
|
|
247
|
+
contrast: Math.min(contrast / 64, 1),
|
|
248
|
+
evenness,
|
|
249
|
+
conditions: { tooDark, tooBright, uneven, optimal }
|
|
250
|
+
};
|
|
251
|
+
}
|
|
252
|
+
function calculateSimilarity(d1, d2) {
|
|
253
|
+
return 1 - faceapi.euclideanDistance(d1, d2);
|
|
254
|
+
}
|
|
255
|
+
function facesMatch(d1, d2, threshold = 0.45) {
|
|
256
|
+
return calculateSimilarity(d1, d2) >= threshold;
|
|
257
|
+
}
|
|
258
|
+
function enhancedMatch(descriptor1, descriptor2, baseThreshold = 0.45, confidenceBoost = 0, lightingScore = 0.5) {
|
|
259
|
+
if (descriptor1.length !== descriptor2.length) {
|
|
260
|
+
return { isMatch: false, similarity: 0, adaptedThreshold: baseThreshold };
|
|
261
|
+
}
|
|
262
|
+
const similarity = calculateSimilarity(descriptor1, descriptor2);
|
|
263
|
+
let adaptedThreshold = baseThreshold;
|
|
264
|
+
if (lightingScore < 0.4) {
|
|
265
|
+
adaptedThreshold = Math.max(0.35, adaptedThreshold - 0.05);
|
|
266
|
+
} else if (lightingScore > 0.8) {
|
|
267
|
+
adaptedThreshold = Math.min(0.6, adaptedThreshold + 0.02);
|
|
268
|
+
}
|
|
269
|
+
adaptedThreshold = Math.max(0.35, adaptedThreshold - confidenceBoost * 0.05);
|
|
270
|
+
return {
|
|
271
|
+
isMatch: similarity >= adaptedThreshold,
|
|
272
|
+
similarity,
|
|
273
|
+
adaptedThreshold
|
|
274
|
+
};
|
|
275
|
+
}
|
|
276
|
+
function multiTemplateMatch(newDescriptor, templates, baseThreshold, lightingScore = 0.5) {
|
|
277
|
+
if (templates.length === 0) {
|
|
278
|
+
return { isMatch: false, bestSimilarity: 0, avgSimilarity: 0, matchCount: 0 };
|
|
279
|
+
}
|
|
280
|
+
let bestSimilarity = 0;
|
|
281
|
+
let weightedSum = 0;
|
|
282
|
+
let totalWeight = 0;
|
|
283
|
+
let matchCount = 0;
|
|
284
|
+
for (const template of templates) {
|
|
285
|
+
if (!template.descriptor || template.descriptor.length === 0) continue;
|
|
286
|
+
const result = enhancedMatch(
|
|
287
|
+
newDescriptor,
|
|
288
|
+
template.descriptor,
|
|
289
|
+
baseThreshold,
|
|
290
|
+
template.weight,
|
|
291
|
+
lightingScore
|
|
292
|
+
);
|
|
293
|
+
if (result.similarity > bestSimilarity) {
|
|
294
|
+
bestSimilarity = result.similarity;
|
|
295
|
+
}
|
|
296
|
+
const w = template.quality * template.weight;
|
|
297
|
+
weightedSum += result.similarity * w;
|
|
298
|
+
totalWeight += w;
|
|
299
|
+
if (result.isMatch) matchCount++;
|
|
300
|
+
}
|
|
301
|
+
const avgSimilarity = totalWeight > 0 ? weightedSum / totalWeight : 0;
|
|
302
|
+
const isMatch = bestSimilarity >= baseThreshold || matchCount / templates.length >= 0.6;
|
|
303
|
+
return { isMatch, bestSimilarity, avgSimilarity, matchCount };
|
|
304
|
+
}
|
|
305
|
+
function calculateLearningWeight(qualityScore, lightingScore, confidence) {
|
|
306
|
+
let weight = 1;
|
|
307
|
+
if (qualityScore > 0.8) weight *= 1.5;
|
|
308
|
+
else if (qualityScore > 0.6) weight *= 1.2;
|
|
309
|
+
else if (qualityScore < 0.4) weight *= 0.5;
|
|
310
|
+
if (lightingScore > 0.7) weight *= 1.3;
|
|
311
|
+
else if (lightingScore < 0.4) weight *= 0.7;
|
|
312
|
+
if (confidence > 0.8) weight *= 1.2;
|
|
313
|
+
else if (confidence < 0.5) weight *= 0.8;
|
|
314
|
+
return Math.max(0.1, Math.min(weight, 3));
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
// src/core/types.ts
|
|
318
|
+
var DEFAULT_CONFIG = {
|
|
319
|
+
apiUrl: "https://api.facesmash.app",
|
|
320
|
+
modelUrl: "https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model",
|
|
321
|
+
minDetectionConfidence: 0.3,
|
|
322
|
+
matchThreshold: 0.45,
|
|
323
|
+
minQualityScore: 0.2,
|
|
324
|
+
maxTemplatesPerUser: 10,
|
|
325
|
+
debug: false
|
|
326
|
+
};
|
|
327
|
+
function resolveConfig(config) {
|
|
328
|
+
return { ...DEFAULT_CONFIG, ...config };
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
// src/core/client.ts
|
|
332
|
+
var FaceSmashClient = class {
|
|
333
|
+
constructor(config) {
|
|
334
|
+
this.listeners = [];
|
|
335
|
+
this._modelsLoaded = false;
|
|
336
|
+
this.config = resolveConfig(config);
|
|
337
|
+
this.pb = new PocketBase(this.config.apiUrl);
|
|
338
|
+
this.pb.autoCancellation(false);
|
|
339
|
+
}
|
|
340
|
+
// ─── Event System ───────────────────────────────────────────
|
|
341
|
+
on(listener) {
|
|
342
|
+
this.listeners.push(listener);
|
|
343
|
+
return () => {
|
|
344
|
+
this.listeners = this.listeners.filter((l) => l !== listener);
|
|
345
|
+
};
|
|
346
|
+
}
|
|
347
|
+
emit(event) {
|
|
348
|
+
for (const listener of this.listeners) {
|
|
349
|
+
try {
|
|
350
|
+
listener(event);
|
|
351
|
+
} catch {
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
// ─── Model Loading ──────────────────────────────────────────
|
|
356
|
+
get isReady() {
|
|
357
|
+
return this._modelsLoaded;
|
|
358
|
+
}
|
|
359
|
+
async init(onProgress) {
|
|
360
|
+
if (this._modelsLoaded) return true;
|
|
361
|
+
this.emit({ type: "models-loading", progress: 0 });
|
|
362
|
+
const success = await loadModels(this.config, (progress) => {
|
|
363
|
+
onProgress?.(progress);
|
|
364
|
+
this.emit({ type: "models-loading", progress });
|
|
365
|
+
});
|
|
366
|
+
if (success) {
|
|
367
|
+
this._modelsLoaded = true;
|
|
368
|
+
this.emit({ type: "models-loaded" });
|
|
369
|
+
} else {
|
|
370
|
+
this.emit({ type: "models-error", error: "Failed to load face recognition models" });
|
|
371
|
+
}
|
|
372
|
+
return success;
|
|
373
|
+
}
|
|
374
|
+
// ─── Face Analysis ──────────────────────────────────────────
|
|
375
|
+
async analyzeFace(imageData) {
|
|
376
|
+
this.ensureReady();
|
|
377
|
+
const result = await analyzeFace(imageData, this.config);
|
|
378
|
+
if (result) {
|
|
379
|
+
this.emit({ type: "face-detected", analysis: result });
|
|
380
|
+
} else {
|
|
381
|
+
this.emit({ type: "face-lost" });
|
|
382
|
+
}
|
|
383
|
+
return result;
|
|
384
|
+
}
|
|
385
|
+
// ─── Login ──────────────────────────────────────────────────
|
|
386
|
+
async login(images) {
|
|
387
|
+
this.ensureReady();
|
|
388
|
+
this.emit({ type: "login-start" });
|
|
389
|
+
try {
|
|
390
|
+
let bestAnalysis = null;
|
|
391
|
+
for (const img of images) {
|
|
392
|
+
const analysis = await analyzeFace(img, this.config);
|
|
393
|
+
if (analysis && !analysis.rejectionReason) {
|
|
394
|
+
if (!bestAnalysis || analysis.qualityScore > bestAnalysis.qualityScore) {
|
|
395
|
+
bestAnalysis = analysis;
|
|
396
|
+
}
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
if (!bestAnalysis) {
|
|
400
|
+
const error2 = "No face detected in any image";
|
|
401
|
+
this.emit({ type: "login-failed", error: error2 });
|
|
402
|
+
return { success: false, error: error2 };
|
|
403
|
+
}
|
|
404
|
+
if (bestAnalysis.qualityScore < this.config.minQualityScore) {
|
|
405
|
+
const error2 = "Face quality too low. Improve lighting and face the camera directly.";
|
|
406
|
+
this.emit({ type: "login-failed", error: error2 });
|
|
407
|
+
return { success: false, error: error2 };
|
|
408
|
+
}
|
|
409
|
+
const profiles = await this.pb.collection("user_profiles").getFullList();
|
|
410
|
+
if (profiles.length === 0) {
|
|
411
|
+
const error2 = "No registered users found";
|
|
412
|
+
this.emit({ type: "login-failed", error: error2 });
|
|
413
|
+
return { success: false, error: error2 };
|
|
414
|
+
}
|
|
415
|
+
let bestMatch = { user: null, similarity: 0 };
|
|
416
|
+
for (const profile of profiles) {
|
|
417
|
+
if (!profile.face_embedding) continue;
|
|
418
|
+
const storedEmbedding = new Float32Array(profile.face_embedding);
|
|
419
|
+
let matchResult = enhancedMatch(
|
|
420
|
+
bestAnalysis.descriptor,
|
|
421
|
+
storedEmbedding,
|
|
422
|
+
this.config.matchThreshold,
|
|
423
|
+
0,
|
|
424
|
+
bestAnalysis.lightingScore
|
|
425
|
+
);
|
|
426
|
+
try {
|
|
427
|
+
const templates = await this.pb.collection("face_templates").getList(1, 50, {
|
|
428
|
+
filter: `user_email="${profile.email}"`,
|
|
429
|
+
sort: "-quality_score"
|
|
430
|
+
});
|
|
431
|
+
if (templates.items.length > 0) {
|
|
432
|
+
const templateData = templates.items.filter((t) => t.descriptor && t.descriptor.length > 0).map((t) => ({
|
|
433
|
+
descriptor: new Float32Array(t.descriptor),
|
|
434
|
+
quality: t.quality_score || 0.5,
|
|
435
|
+
weight: 1
|
|
436
|
+
}));
|
|
437
|
+
if (templateData.length > 0) {
|
|
438
|
+
const multiResult = multiTemplateMatch(
|
|
439
|
+
bestAnalysis.descriptor,
|
|
440
|
+
templateData,
|
|
441
|
+
this.config.matchThreshold,
|
|
442
|
+
bestAnalysis.lightingScore
|
|
443
|
+
);
|
|
444
|
+
if (multiResult.bestSimilarity > matchResult.similarity) {
|
|
445
|
+
matchResult = {
|
|
446
|
+
isMatch: multiResult.isMatch,
|
|
447
|
+
similarity: multiResult.bestSimilarity,
|
|
448
|
+
adaptedThreshold: this.config.matchThreshold
|
|
449
|
+
};
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
} catch {
|
|
454
|
+
}
|
|
455
|
+
const userProfile = {
|
|
456
|
+
id: profile.id,
|
|
457
|
+
name: profile.name,
|
|
458
|
+
email: profile.email,
|
|
459
|
+
face_embedding: profile.face_embedding,
|
|
460
|
+
created: profile.created,
|
|
461
|
+
updated: profile.updated
|
|
462
|
+
};
|
|
463
|
+
if (matchResult.similarity > bestMatch.similarity) {
|
|
464
|
+
bestMatch = { user: userProfile, similarity: matchResult.similarity };
|
|
465
|
+
}
|
|
466
|
+
if (matchResult.isMatch) {
|
|
467
|
+
try {
|
|
468
|
+
await this.storeLoginScan(userProfile, bestAnalysis);
|
|
469
|
+
} catch {
|
|
470
|
+
}
|
|
471
|
+
this.emit({
|
|
472
|
+
type: "login-success",
|
|
473
|
+
user: userProfile,
|
|
474
|
+
similarity: matchResult.similarity
|
|
475
|
+
});
|
|
476
|
+
return { success: true, user: userProfile, similarity: matchResult.similarity };
|
|
477
|
+
}
|
|
478
|
+
}
|
|
479
|
+
const error = bestMatch.similarity > 0.4 ? "Face partially matched but did not meet security threshold." : "Face not recognized.";
|
|
480
|
+
this.emit({ type: "login-failed", error, bestSimilarity: bestMatch.similarity });
|
|
481
|
+
return { success: false, error, similarity: bestMatch.similarity };
|
|
482
|
+
} catch (err) {
|
|
483
|
+
const error = err instanceof Error ? err.message : "Unknown error during login";
|
|
484
|
+
this.emit({ type: "login-failed", error });
|
|
485
|
+
return { success: false, error };
|
|
486
|
+
}
|
|
487
|
+
}
|
|
488
|
+
// ─── Registration ───────────────────────────────────────────
|
|
489
|
+
async register(name, images, email) {
|
|
490
|
+
this.ensureReady();
|
|
491
|
+
this.emit({ type: "register-start" });
|
|
492
|
+
try {
|
|
493
|
+
let bestAnalysis = null;
|
|
494
|
+
let bestImageIdx = 0;
|
|
495
|
+
for (let i = 0; i < images.length; i++) {
|
|
496
|
+
const analysis = await analyzeFace(images[i], this.config);
|
|
497
|
+
if (analysis && !analysis.rejectionReason) {
|
|
498
|
+
if (!bestAnalysis || analysis.qualityScore > bestAnalysis.qualityScore) {
|
|
499
|
+
bestAnalysis = analysis;
|
|
500
|
+
bestImageIdx = i;
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
if (!bestAnalysis) {
|
|
505
|
+
const error = "No face detected in any image";
|
|
506
|
+
this.emit({ type: "register-failed", error });
|
|
507
|
+
return { success: false, error };
|
|
508
|
+
}
|
|
509
|
+
if (bestAnalysis.qualityScore < this.config.minQualityScore) {
|
|
510
|
+
const error = "Face quality too low for registration.";
|
|
511
|
+
this.emit({ type: "register-failed", error });
|
|
512
|
+
return { success: false, error };
|
|
513
|
+
}
|
|
514
|
+
const existingProfiles = await this.pb.collection("user_profiles").getFullList();
|
|
515
|
+
for (const profile of existingProfiles) {
|
|
516
|
+
if (!profile.face_embedding) continue;
|
|
517
|
+
const stored = new Float32Array(profile.face_embedding);
|
|
518
|
+
if (stored.length !== bestAnalysis.descriptor.length) continue;
|
|
519
|
+
const similarity = 1 - (await import('@vladmandic/face-api')).euclideanDistance(bestAnalysis.descriptor, stored);
|
|
520
|
+
if (similarity >= 0.75) {
|
|
521
|
+
const error = `This face is already registered to ${profile.name || profile.email}`;
|
|
522
|
+
this.emit({ type: "register-failed", error });
|
|
523
|
+
return { success: false, error };
|
|
524
|
+
}
|
|
525
|
+
}
|
|
526
|
+
const embeddingArray = Array.from(bestAnalysis.descriptor);
|
|
527
|
+
const record = await this.pb.collection("user_profiles").create({
|
|
528
|
+
name,
|
|
529
|
+
email: email || `${name.toLowerCase().replace(/\s+/g, ".")}@facesmash.app`,
|
|
530
|
+
face_embedding: embeddingArray
|
|
531
|
+
});
|
|
532
|
+
await this.pb.collection("face_templates").create({
|
|
533
|
+
user_email: record.email,
|
|
534
|
+
descriptor: embeddingArray,
|
|
535
|
+
quality_score: bestAnalysis.qualityScore,
|
|
536
|
+
label: "registration"
|
|
537
|
+
});
|
|
538
|
+
await this.pb.collection("face_scans").create({
|
|
539
|
+
user_email: record.email,
|
|
540
|
+
face_embedding: JSON.stringify(embeddingArray),
|
|
541
|
+
confidence: String(bestAnalysis.confidence),
|
|
542
|
+
scan_type: "registration",
|
|
543
|
+
quality_score: String(bestAnalysis.qualityScore)
|
|
544
|
+
});
|
|
545
|
+
const user = {
|
|
546
|
+
id: record.id,
|
|
547
|
+
name: record.name,
|
|
548
|
+
email: record.email,
|
|
549
|
+
face_embedding: embeddingArray,
|
|
550
|
+
created: record.created,
|
|
551
|
+
updated: record.updated
|
|
552
|
+
};
|
|
553
|
+
this.emit({ type: "register-success", user });
|
|
554
|
+
return { success: true, user };
|
|
555
|
+
} catch (err) {
|
|
556
|
+
const error = err instanceof Error ? err.message : "Unknown error during registration";
|
|
557
|
+
this.emit({ type: "register-failed", error });
|
|
558
|
+
return { success: false, error };
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
// ─── Helpers ────────────────────────────────────────────────
|
|
562
|
+
ensureReady() {
|
|
563
|
+
if (!areModelsLoaded()) {
|
|
564
|
+
throw new Error(
|
|
565
|
+
"FaceSmash models not loaded. Call client.init() first."
|
|
566
|
+
);
|
|
567
|
+
}
|
|
568
|
+
}
|
|
569
|
+
async storeLoginScan(user, analysis) {
|
|
570
|
+
const embeddingArray = Array.from(analysis.descriptor);
|
|
571
|
+
await this.pb.collection("sign_in_logs").create({
|
|
572
|
+
user_email: user.email,
|
|
573
|
+
success: true
|
|
574
|
+
});
|
|
575
|
+
await this.pb.collection("face_scans").create({
|
|
576
|
+
user_email: user.email,
|
|
577
|
+
face_embedding: JSON.stringify(embeddingArray),
|
|
578
|
+
confidence: String(analysis.confidence),
|
|
579
|
+
scan_type: "login",
|
|
580
|
+
quality_score: String(analysis.qualityScore)
|
|
581
|
+
});
|
|
582
|
+
if (analysis.qualityScore > 0.5) {
|
|
583
|
+
const weight = calculateLearningWeight(
|
|
584
|
+
analysis.qualityScore,
|
|
585
|
+
analysis.lightingScore,
|
|
586
|
+
analysis.confidence
|
|
587
|
+
);
|
|
588
|
+
const learningRate = Math.min(weight * 0.1, 0.3);
|
|
589
|
+
const current = new Float32Array(user.face_embedding);
|
|
590
|
+
const updated = new Float32Array(current.length);
|
|
591
|
+
for (let i = 0; i < current.length; i++) {
|
|
592
|
+
updated[i] = current[i] * (1 - learningRate) + analysis.descriptor[i] * learningRate;
|
|
593
|
+
}
|
|
594
|
+
await this.pb.collection("user_profiles").update(user.id, {
|
|
595
|
+
face_embedding: Array.from(updated)
|
|
596
|
+
});
|
|
597
|
+
}
|
|
598
|
+
if (analysis.qualityScore > 0.6) {
|
|
599
|
+
const existing = await this.pb.collection("face_templates").getList(1, 50, {
|
|
600
|
+
filter: `user_email="${user.email}"`,
|
|
601
|
+
sort: "quality_score"
|
|
602
|
+
});
|
|
603
|
+
if (existing.items.length >= this.config.maxTemplatesPerUser) {
|
|
604
|
+
await this.pb.collection("face_templates").delete(existing.items[0].id);
|
|
605
|
+
}
|
|
606
|
+
await this.pb.collection("face_templates").create({
|
|
607
|
+
user_email: user.email,
|
|
608
|
+
descriptor: embeddingArray,
|
|
609
|
+
quality_score: analysis.qualityScore,
|
|
610
|
+
label: "auto"
|
|
611
|
+
});
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
};
|
|
615
|
+
|
|
616
|
+
// src/index.ts
|
|
617
|
+
function createFaceSmash(config) {
|
|
618
|
+
return new FaceSmashClient(config);
|
|
619
|
+
}
|
|
620
|
+
var FaceSmashContext = createContext(null);
|
|
621
|
+
function FaceSmashProvider({
|
|
622
|
+
children,
|
|
623
|
+
config,
|
|
624
|
+
onReady,
|
|
625
|
+
onError,
|
|
626
|
+
onEvent
|
|
627
|
+
}) {
|
|
628
|
+
const clientRef = useRef(null);
|
|
629
|
+
if (!clientRef.current) {
|
|
630
|
+
clientRef.current = new FaceSmashClient(config);
|
|
631
|
+
}
|
|
632
|
+
const client = clientRef.current;
|
|
633
|
+
const [isReady, setIsReady] = useState(false);
|
|
634
|
+
const [isLoading, setIsLoading] = useState(true);
|
|
635
|
+
const [loadProgress, setLoadProgress] = useState(0);
|
|
636
|
+
const [error, setError] = useState(null);
|
|
637
|
+
const initModels = useCallback(async () => {
|
|
638
|
+
setIsLoading(true);
|
|
639
|
+
setError(null);
|
|
640
|
+
setLoadProgress(0);
|
|
641
|
+
const success = await client.init((progress) => {
|
|
642
|
+
setLoadProgress(progress);
|
|
643
|
+
});
|
|
644
|
+
if (success) {
|
|
645
|
+
setIsReady(true);
|
|
646
|
+
setIsLoading(false);
|
|
647
|
+
onReady?.();
|
|
648
|
+
} else {
|
|
649
|
+
const msg = "Failed to load face recognition models";
|
|
650
|
+
setError(msg);
|
|
651
|
+
setIsLoading(false);
|
|
652
|
+
onError?.(msg);
|
|
653
|
+
}
|
|
654
|
+
}, [client, onReady, onError]);
|
|
655
|
+
useEffect(() => {
|
|
656
|
+
initModels();
|
|
657
|
+
}, [initModels]);
|
|
658
|
+
useEffect(() => {
|
|
659
|
+
if (!onEvent) return;
|
|
660
|
+
return client.on(onEvent);
|
|
661
|
+
}, [client, onEvent]);
|
|
662
|
+
const retryInit = useCallback(() => {
|
|
663
|
+
initModels();
|
|
664
|
+
}, [initModels]);
|
|
665
|
+
return /* @__PURE__ */ jsx(
|
|
666
|
+
FaceSmashContext.Provider,
|
|
667
|
+
{
|
|
668
|
+
value: { client, isReady, isLoading, loadProgress, error, retryInit },
|
|
669
|
+
children
|
|
670
|
+
}
|
|
671
|
+
);
|
|
672
|
+
}
|
|
673
|
+
function useFaceSmash() {
|
|
674
|
+
const ctx = useContext(FaceSmashContext);
|
|
675
|
+
if (!ctx) {
|
|
676
|
+
throw new Error("useFaceSmash must be used within a <FaceSmashProvider>");
|
|
677
|
+
}
|
|
678
|
+
return ctx;
|
|
679
|
+
}
|
|
680
|
+
function FaceLogin({
|
|
681
|
+
onResult,
|
|
682
|
+
captureCount = 3,
|
|
683
|
+
captureDelay = 500,
|
|
684
|
+
autoStart = true,
|
|
685
|
+
className,
|
|
686
|
+
overlay,
|
|
687
|
+
loadingContent,
|
|
688
|
+
errorContent
|
|
689
|
+
}) {
|
|
690
|
+
const { client, isReady, isLoading, error: initError } = useFaceSmash();
|
|
691
|
+
const videoRef = useRef(null);
|
|
692
|
+
const canvasRef = useRef(null);
|
|
693
|
+
const streamRef = useRef(null);
|
|
694
|
+
const [cameraError, setCameraError] = useState(null);
|
|
695
|
+
const [isScanning, setIsScanning] = useState(false);
|
|
696
|
+
const [status, setStatus] = useState("loading");
|
|
697
|
+
const startCamera = useCallback(async () => {
|
|
698
|
+
try {
|
|
699
|
+
const stream = await navigator.mediaDevices.getUserMedia({
|
|
700
|
+
video: { width: 640, height: 480, facingMode: "user" }
|
|
701
|
+
});
|
|
702
|
+
streamRef.current = stream;
|
|
703
|
+
if (videoRef.current) {
|
|
704
|
+
videoRef.current.srcObject = stream;
|
|
705
|
+
await videoRef.current.play();
|
|
706
|
+
}
|
|
707
|
+
setCameraError(null);
|
|
708
|
+
} catch {
|
|
709
|
+
setCameraError("Camera access denied or not available");
|
|
710
|
+
setStatus("error");
|
|
711
|
+
}
|
|
712
|
+
}, []);
|
|
713
|
+
const stopCamera = useCallback(() => {
|
|
714
|
+
streamRef.current?.getTracks().forEach((t) => t.stop());
|
|
715
|
+
streamRef.current = null;
|
|
716
|
+
}, []);
|
|
717
|
+
const captureFrame = useCallback(() => {
|
|
718
|
+
const video = videoRef.current;
|
|
719
|
+
const canvas = canvasRef.current;
|
|
720
|
+
if (!video || !canvas) return null;
|
|
721
|
+
canvas.width = video.videoWidth || 640;
|
|
722
|
+
canvas.height = video.videoHeight || 480;
|
|
723
|
+
const ctx = canvas.getContext("2d");
|
|
724
|
+
if (!ctx) return null;
|
|
725
|
+
ctx.drawImage(video, 0, 0);
|
|
726
|
+
return canvas.toDataURL("image/jpeg", 0.9);
|
|
727
|
+
}, []);
|
|
728
|
+
const scan = useCallback(async () => {
|
|
729
|
+
if (!isReady || isScanning) return;
|
|
730
|
+
setIsScanning(true);
|
|
731
|
+
setStatus("scanning");
|
|
732
|
+
const images = [];
|
|
733
|
+
for (let i = 0; i < captureCount; i++) {
|
|
734
|
+
const frame = captureFrame();
|
|
735
|
+
if (frame) images.push(frame);
|
|
736
|
+
if (i < captureCount - 1) {
|
|
737
|
+
await new Promise((r) => setTimeout(r, captureDelay));
|
|
738
|
+
}
|
|
739
|
+
}
|
|
740
|
+
if (images.length === 0) {
|
|
741
|
+
const result2 = { success: false, error: "Failed to capture images from camera" };
|
|
742
|
+
onResult(result2);
|
|
743
|
+
setIsScanning(false);
|
|
744
|
+
setStatus("error");
|
|
745
|
+
return;
|
|
746
|
+
}
|
|
747
|
+
const result = await client.login(images);
|
|
748
|
+
onResult(result);
|
|
749
|
+
setIsScanning(false);
|
|
750
|
+
setStatus("done");
|
|
751
|
+
}, [isReady, isScanning, captureCount, captureDelay, captureFrame, client, onResult]);
|
|
752
|
+
useEffect(() => {
|
|
753
|
+
if (isReady) {
|
|
754
|
+
startCamera();
|
|
755
|
+
setStatus("ready");
|
|
756
|
+
}
|
|
757
|
+
return () => stopCamera();
|
|
758
|
+
}, [isReady, startCamera, stopCamera]);
|
|
759
|
+
useEffect(() => {
|
|
760
|
+
if (autoStart && status === "ready" && !isScanning) {
|
|
761
|
+
const timer = setTimeout(scan, 2e3);
|
|
762
|
+
return () => clearTimeout(timer);
|
|
763
|
+
}
|
|
764
|
+
}, [autoStart, status, isScanning, scan]);
|
|
765
|
+
const retry = useCallback(() => {
|
|
766
|
+
setCameraError(null);
|
|
767
|
+
setStatus("loading");
|
|
768
|
+
startCamera().then(() => setStatus("ready"));
|
|
769
|
+
}, [startCamera]);
|
|
770
|
+
const displayError = cameraError || initError;
|
|
771
|
+
if (displayError && errorContent) {
|
|
772
|
+
return /* @__PURE__ */ jsx(Fragment, { children: errorContent(displayError, retry) });
|
|
773
|
+
}
|
|
774
|
+
if (isLoading && loadingContent) {
|
|
775
|
+
return /* @__PURE__ */ jsx(Fragment, { children: loadingContent });
|
|
776
|
+
}
|
|
777
|
+
return /* @__PURE__ */ jsxs("div", { className, style: { position: "relative" }, children: [
|
|
778
|
+
/* @__PURE__ */ jsx(
|
|
779
|
+
"video",
|
|
780
|
+
{
|
|
781
|
+
ref: videoRef,
|
|
782
|
+
autoPlay: true,
|
|
783
|
+
playsInline: true,
|
|
784
|
+
muted: true,
|
|
785
|
+
style: {
|
|
786
|
+
width: "100%",
|
|
787
|
+
height: "100%",
|
|
788
|
+
objectFit: "cover",
|
|
789
|
+
transform: "scaleX(-1)"
|
|
790
|
+
}
|
|
791
|
+
}
|
|
792
|
+
),
|
|
793
|
+
/* @__PURE__ */ jsx("canvas", { ref: canvasRef, style: { display: "none" } }),
|
|
794
|
+
overlay,
|
|
795
|
+
displayError && /* @__PURE__ */ jsx(
|
|
796
|
+
"div",
|
|
797
|
+
{
|
|
798
|
+
style: {
|
|
799
|
+
position: "absolute",
|
|
800
|
+
inset: 0,
|
|
801
|
+
display: "flex",
|
|
802
|
+
alignItems: "center",
|
|
803
|
+
justifyContent: "center",
|
|
804
|
+
backgroundColor: "rgba(0,0,0,0.8)",
|
|
805
|
+
color: "white",
|
|
806
|
+
padding: "1rem",
|
|
807
|
+
textAlign: "center"
|
|
808
|
+
},
|
|
809
|
+
children: /* @__PURE__ */ jsxs("div", { children: [
|
|
810
|
+
/* @__PURE__ */ jsx("p", { children: displayError }),
|
|
811
|
+
/* @__PURE__ */ jsx("button", { onClick: retry, style: { marginTop: "0.5rem", cursor: "pointer" }, children: "Retry" })
|
|
812
|
+
] })
|
|
813
|
+
}
|
|
814
|
+
)
|
|
815
|
+
] });
|
|
816
|
+
}
|
|
817
|
+
function FaceRegister({
|
|
818
|
+
name,
|
|
819
|
+
email,
|
|
820
|
+
onResult,
|
|
821
|
+
captureCount = 3,
|
|
822
|
+
captureDelay = 500,
|
|
823
|
+
autoStart = true,
|
|
824
|
+
className,
|
|
825
|
+
overlay,
|
|
826
|
+
loadingContent,
|
|
827
|
+
errorContent
|
|
828
|
+
}) {
|
|
829
|
+
const { client, isReady, isLoading, error: initError } = useFaceSmash();
|
|
830
|
+
const videoRef = useRef(null);
|
|
831
|
+
const canvasRef = useRef(null);
|
|
832
|
+
const streamRef = useRef(null);
|
|
833
|
+
const [cameraError, setCameraError] = useState(null);
|
|
834
|
+
const [isCapturing, setIsCapturing] = useState(false);
|
|
835
|
+
const [status, setStatus] = useState("loading");
|
|
836
|
+
const startCamera = useCallback(async () => {
|
|
837
|
+
try {
|
|
838
|
+
const stream = await navigator.mediaDevices.getUserMedia({
|
|
839
|
+
video: { width: 640, height: 480, facingMode: "user" }
|
|
840
|
+
});
|
|
841
|
+
streamRef.current = stream;
|
|
842
|
+
if (videoRef.current) {
|
|
843
|
+
videoRef.current.srcObject = stream;
|
|
844
|
+
await videoRef.current.play();
|
|
845
|
+
}
|
|
846
|
+
setCameraError(null);
|
|
847
|
+
} catch {
|
|
848
|
+
setCameraError("Camera access denied or not available");
|
|
849
|
+
setStatus("error");
|
|
850
|
+
}
|
|
851
|
+
}, []);
|
|
852
|
+
const stopCamera = useCallback(() => {
|
|
853
|
+
streamRef.current?.getTracks().forEach((t) => t.stop());
|
|
854
|
+
streamRef.current = null;
|
|
855
|
+
}, []);
|
|
856
|
+
const captureFrame = useCallback(() => {
|
|
857
|
+
const video = videoRef.current;
|
|
858
|
+
const canvas = canvasRef.current;
|
|
859
|
+
if (!video || !canvas) return null;
|
|
860
|
+
canvas.width = video.videoWidth || 640;
|
|
861
|
+
canvas.height = video.videoHeight || 480;
|
|
862
|
+
const ctx = canvas.getContext("2d");
|
|
863
|
+
if (!ctx) return null;
|
|
864
|
+
ctx.drawImage(video, 0, 0);
|
|
865
|
+
return canvas.toDataURL("image/jpeg", 0.9);
|
|
866
|
+
}, []);
|
|
867
|
+
const capture = useCallback(async () => {
|
|
868
|
+
if (!isReady || isCapturing) return;
|
|
869
|
+
setIsCapturing(true);
|
|
870
|
+
setStatus("capturing");
|
|
871
|
+
const images = [];
|
|
872
|
+
for (let i = 0; i < captureCount; i++) {
|
|
873
|
+
const frame = captureFrame();
|
|
874
|
+
if (frame) images.push(frame);
|
|
875
|
+
if (i < captureCount - 1) {
|
|
876
|
+
await new Promise((r) => setTimeout(r, captureDelay));
|
|
877
|
+
}
|
|
878
|
+
}
|
|
879
|
+
if (images.length === 0) {
|
|
880
|
+
const result2 = { success: false, error: "Failed to capture images" };
|
|
881
|
+
onResult(result2);
|
|
882
|
+
setIsCapturing(false);
|
|
883
|
+
setStatus("error");
|
|
884
|
+
return;
|
|
885
|
+
}
|
|
886
|
+
const result = await client.register(name, images, email);
|
|
887
|
+
onResult(result);
|
|
888
|
+
setIsCapturing(false);
|
|
889
|
+
setStatus("done");
|
|
890
|
+
}, [isReady, isCapturing, captureCount, captureDelay, captureFrame, client, name, email, onResult]);
|
|
891
|
+
useEffect(() => {
|
|
892
|
+
if (isReady) {
|
|
893
|
+
startCamera();
|
|
894
|
+
setStatus("ready");
|
|
895
|
+
}
|
|
896
|
+
return () => stopCamera();
|
|
897
|
+
}, [isReady, startCamera, stopCamera]);
|
|
898
|
+
useEffect(() => {
|
|
899
|
+
if (autoStart && status === "ready" && !isCapturing) {
|
|
900
|
+
const timer = setTimeout(capture, 2e3);
|
|
901
|
+
return () => clearTimeout(timer);
|
|
902
|
+
}
|
|
903
|
+
}, [autoStart, status, isCapturing, capture]);
|
|
904
|
+
const retry = useCallback(() => {
|
|
905
|
+
setCameraError(null);
|
|
906
|
+
setStatus("loading");
|
|
907
|
+
startCamera().then(() => setStatus("ready"));
|
|
908
|
+
}, [startCamera]);
|
|
909
|
+
const displayError = cameraError || initError;
|
|
910
|
+
if (displayError && errorContent) {
|
|
911
|
+
return /* @__PURE__ */ jsx(Fragment, { children: errorContent(displayError, retry) });
|
|
912
|
+
}
|
|
913
|
+
if (isLoading && loadingContent) {
|
|
914
|
+
return /* @__PURE__ */ jsx(Fragment, { children: loadingContent });
|
|
915
|
+
}
|
|
916
|
+
return /* @__PURE__ */ jsxs("div", { className, style: { position: "relative" }, children: [
|
|
917
|
+
/* @__PURE__ */ jsx(
|
|
918
|
+
"video",
|
|
919
|
+
{
|
|
920
|
+
ref: videoRef,
|
|
921
|
+
autoPlay: true,
|
|
922
|
+
playsInline: true,
|
|
923
|
+
muted: true,
|
|
924
|
+
style: {
|
|
925
|
+
width: "100%",
|
|
926
|
+
height: "100%",
|
|
927
|
+
objectFit: "cover",
|
|
928
|
+
transform: "scaleX(-1)"
|
|
929
|
+
}
|
|
930
|
+
}
|
|
931
|
+
),
|
|
932
|
+
/* @__PURE__ */ jsx("canvas", { ref: canvasRef, style: { display: "none" } }),
|
|
933
|
+
overlay,
|
|
934
|
+
displayError && /* @__PURE__ */ jsx(
|
|
935
|
+
"div",
|
|
936
|
+
{
|
|
937
|
+
style: {
|
|
938
|
+
position: "absolute",
|
|
939
|
+
inset: 0,
|
|
940
|
+
display: "flex",
|
|
941
|
+
alignItems: "center",
|
|
942
|
+
justifyContent: "center",
|
|
943
|
+
backgroundColor: "rgba(0,0,0,0.8)",
|
|
944
|
+
color: "white",
|
|
945
|
+
padding: "1rem",
|
|
946
|
+
textAlign: "center"
|
|
947
|
+
},
|
|
948
|
+
children: /* @__PURE__ */ jsxs("div", { children: [
|
|
949
|
+
/* @__PURE__ */ jsx("p", { children: displayError }),
|
|
950
|
+
/* @__PURE__ */ jsx("button", { onClick: retry, style: { marginTop: "0.5rem", cursor: "pointer" }, children: "Retry" })
|
|
951
|
+
] })
|
|
952
|
+
}
|
|
953
|
+
)
|
|
954
|
+
] });
|
|
955
|
+
}
|
|
956
|
+
function useFaceLogin() {
|
|
957
|
+
const { client, isReady } = useFaceSmash();
|
|
958
|
+
const [isScanning, setIsScanning] = useState(false);
|
|
959
|
+
const [result, setResult] = useState(null);
|
|
960
|
+
const login = useCallback(
|
|
961
|
+
async (images) => {
|
|
962
|
+
if (!isReady) {
|
|
963
|
+
return { success: false, error: "Models not loaded yet" };
|
|
964
|
+
}
|
|
965
|
+
setIsScanning(true);
|
|
966
|
+
setResult(null);
|
|
967
|
+
const loginResult = await client.login(images);
|
|
968
|
+
setResult(loginResult);
|
|
969
|
+
setIsScanning(false);
|
|
970
|
+
return loginResult;
|
|
971
|
+
},
|
|
972
|
+
[client, isReady]
|
|
973
|
+
);
|
|
974
|
+
const reset = useCallback(() => {
|
|
975
|
+
setIsScanning(false);
|
|
976
|
+
setResult(null);
|
|
977
|
+
}, []);
|
|
978
|
+
return { login, isScanning, result, reset, isReady };
|
|
979
|
+
}
|
|
980
|
+
function useFaceRegister() {
|
|
981
|
+
const { client, isReady } = useFaceSmash();
|
|
982
|
+
const [isRegistering, setIsRegistering] = useState(false);
|
|
983
|
+
const [result, setResult] = useState(null);
|
|
984
|
+
const register = useCallback(
|
|
985
|
+
async (name, images, email) => {
|
|
986
|
+
if (!isReady) {
|
|
987
|
+
return { success: false, error: "Models not loaded yet" };
|
|
988
|
+
}
|
|
989
|
+
setIsRegistering(true);
|
|
990
|
+
setResult(null);
|
|
991
|
+
const regResult = await client.register(name, images, email);
|
|
992
|
+
setResult(regResult);
|
|
993
|
+
setIsRegistering(false);
|
|
994
|
+
return regResult;
|
|
995
|
+
},
|
|
996
|
+
[client, isReady]
|
|
997
|
+
);
|
|
998
|
+
const reset = useCallback(() => {
|
|
999
|
+
setIsRegistering(false);
|
|
1000
|
+
setResult(null);
|
|
1001
|
+
}, []);
|
|
1002
|
+
return { register, isRegistering, result, reset, isReady };
|
|
1003
|
+
}
|
|
1004
|
+
function useFaceAnalysis() {
|
|
1005
|
+
const { client, isReady } = useFaceSmash();
|
|
1006
|
+
const [analysis, setAnalysis] = useState(null);
|
|
1007
|
+
const [isAnalyzing, setIsAnalyzing] = useState(false);
|
|
1008
|
+
const analyze = useCallback(
|
|
1009
|
+
async (imageData) => {
|
|
1010
|
+
if (!isReady) return null;
|
|
1011
|
+
setIsAnalyzing(true);
|
|
1012
|
+
const result = await client.analyzeFace(imageData);
|
|
1013
|
+
setAnalysis(result);
|
|
1014
|
+
setIsAnalyzing(false);
|
|
1015
|
+
return result;
|
|
1016
|
+
},
|
|
1017
|
+
[client, isReady]
|
|
1018
|
+
);
|
|
1019
|
+
return { analyze, analysis, isAnalyzing, isReady };
|
|
1020
|
+
}
|
|
1021
|
+
|
|
1022
|
+
export { FaceLogin, FaceRegister, FaceSmashClient, FaceSmashProvider, analyzeFace, areModelsLoaded, calculateLearningWeight, calculateSimilarity, createFaceSmash, enhancedMatch, extractDescriptor, facesMatch, loadModels, multiTemplateMatch, normalizeDescriptor, processImages, useFaceAnalysis, useFaceLogin, useFaceRegister, useFaceSmash };
|
|
1023
|
+
//# sourceMappingURL=react.js.map
|
|
1024
|
+
//# sourceMappingURL=react.js.map
|