face-validator-sdk 1.0.0 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -98
- package/README.md +148 -135
- package/dist/face-validator-sdk.cjs.js +1 -1
- package/dist/face-validator-sdk.cjs.js.map +1 -1
- package/dist/face-validator-sdk.esm.js +1 -1
- package/dist/face-validator-sdk.esm.js.map +1 -1
- package/dist/face-validator-sdk.umd.js +1 -1
- package/dist/face-validator-sdk.umd.js.map +1 -1
- package/package.json +1 -7
package/CHANGELOG.md
CHANGED
|
@@ -1,109 +1,29 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
-
All notable changes to
|
|
3
|
+
All notable changes to this project will be documented in this file.
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
|
6
|
+
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
6
7
|
|
|
7
|
-
|
|
8
|
+
## [1.1.0] – 2026-02-06
|
|
8
9
|
|
|
9
|
-
|
|
10
|
+
### Changed
|
|
10
11
|
|
|
11
|
-
|
|
12
|
+
- **Oval proportional to container**: Adjusted `OVAL_RADIUS_Y_FACTOR` (0.38 → 0.34) so the oval does not overflow the video height in modals and containers with varying aspect ratios.
|
|
13
|
+
- **Demo**: Last 3 captures as thumbnails with click-to-zoom, X button to remove (localStorage), overlay to close zoom; thumbnails aligned to top; two distinct X buttons (remove on thumb, close on enlarged image).
|
|
12
14
|
|
|
13
|
-
|
|
15
|
+
### Fixed
|
|
14
16
|
|
|
15
|
-
-
|
|
16
|
-
- **Distance validation**: Detects when face is TOO_CLOSE or TOO_FAR from camera
|
|
17
|
-
- **Centering validation**: Ensures face is properly centered in oval guide
|
|
18
|
-
- **Head pose detection**: Validates head is straight (max 28° tilt)
|
|
19
|
-
- **Illumination validation**: Checks for adequate lighting (brightness > 70)
|
|
20
|
-
- **Stability detection**: Requires 1 second of stillness before capture
|
|
21
|
-
- **Multiple face detection**: Rejects frames with more than one face
|
|
17
|
+
- Oval disproportionate in integrations (e.g. datasync-front modal) when the canvas was displayed in containers with different dimensions.
|
|
22
18
|
|
|
23
|
-
|
|
19
|
+
## [1.0.0] – 2025-02-02
|
|
24
20
|
|
|
25
|
-
|
|
26
|
-
- **Hand obstruction detection**: Prevents face obstruction by hands
|
|
27
|
-
- **Real-time hand proximity analysis**: Validates hand distance from face
|
|
21
|
+
### Added
|
|
28
22
|
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
-
|
|
32
|
-
-
|
|
33
|
-
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
- **Multiple builds**: ESM, CJS, UMD for maximum compatibility
|
|
38
|
-
- **TypeScript support**: Full type definitions included
|
|
39
|
-
- **Debug mode**: Visualize landmarks and validation overlays
|
|
40
|
-
- **GPU acceleration**: Powered by MediaPipe with GPU support
|
|
41
|
-
- **Flexible configuration**: 15+ validation thresholds for fine-tuning
|
|
42
|
-
|
|
43
|
-
#### Demo & Documentation
|
|
44
|
-
|
|
45
|
-
- **Live interactive demo**: [https://face-validator-sdk.vercel.app](https://face-validator-sdk.vercel.app)
|
|
46
|
-
- **Comprehensive README**: Installation, quick start, configuration guide
|
|
47
|
-
- **Validation checklist**: Detailed explanation of all validation states
|
|
48
|
-
- **Code examples**: TypeScript examples for common use cases
|
|
49
|
-
|
|
50
|
-
### 📦 Installation
|
|
51
|
-
|
|
52
|
-
Single command installation - MediaPipe is included:
|
|
53
|
-
|
|
54
|
-
```bash
|
|
55
|
-
npm install face-validator-sdk
|
|
56
|
-
```
|
|
57
|
-
|
|
58
|
-
### 🚀 Quick Start
|
|
59
|
-
|
|
60
|
-
```typescript
|
|
61
|
-
import { FaceValidator, ValidationStatus } from 'face-validator-sdk';
|
|
62
|
-
|
|
63
|
-
const validator = new FaceValidator({
|
|
64
|
-
videoElement: document.getElementById('video'),
|
|
65
|
-
overlayCanvasElement: document.getElementById('overlay'),
|
|
66
|
-
locale: 'pt-BR',
|
|
67
|
-
|
|
68
|
-
onStatusUpdate: (status, message) => {
|
|
69
|
-
console.log(message);
|
|
70
|
-
},
|
|
71
|
-
|
|
72
|
-
onCaptureSuccess: (blob) => {
|
|
73
|
-
// Upload captured selfie
|
|
74
|
-
},
|
|
75
|
-
|
|
76
|
-
onError: (errorType, error) => {
|
|
77
|
-
console.error(error);
|
|
78
|
-
}
|
|
79
|
-
});
|
|
80
|
-
```
|
|
81
|
-
|
|
82
|
-
### 🔧 Configuration
|
|
83
|
-
|
|
84
|
-
15+ validation thresholds available:
|
|
85
|
-
|
|
86
|
-
- `minDetectionConfidence` (default: 0.5)
|
|
87
|
-
- `minIlluminationThreshold` (default: 70)
|
|
88
|
-
- `minFaceSizeFactor` (default: 0.25)
|
|
89
|
-
- `maxFaceSizeFactor` (default: 0.65)
|
|
90
|
-
- `maxHeadTiltDegrees` (default: 28)
|
|
91
|
-
- `maxHandFaceDistance` (default: 0.15)
|
|
92
|
-
- And more...
|
|
93
|
-
|
|
94
|
-
### 📚 What's Included
|
|
95
|
-
|
|
96
|
-
- Real-time video validation with visual feedback
|
|
97
|
-
- Automatic photo capture on successful validation
|
|
98
|
-
- Blob output for direct API upload
|
|
99
|
-
- LocalStorage support for demo captures
|
|
100
|
-
- ESM, CJS, and UMD builds for any environment
|
|
101
|
-
|
|
102
|
-
### 🎯 Key Achievements
|
|
103
|
-
|
|
104
|
-
✅ **Production Ready**: Tested and optimized for real-world use
|
|
105
|
-
✅ **Accessible**: Works on desktop, tablet, and mobile browsers
|
|
106
|
-
✅ **Customizable**: Fine-tune validation parameters for your use case
|
|
107
|
-
✅ **Fast**: GPU-accelerated inference with MediaPipe
|
|
108
|
-
✅ **Reliable**: High accuracy face and hand detection
|
|
109
|
-
✅ **Modern**: Built with TypeScript and modern web standards
|
|
23
|
+
- Initial release.
|
|
24
|
+
- Real-time selfie validation with **MediaPipe** (FaceLandmarker 478 landmarks + HandLandmarker 21 pts/hand).
|
|
25
|
+
- Validation steps: single face, distance (too close / too far), oval centering, head pose (roll/yaw/pitch), hand near face, neutral expression, dark glasses, illumination, stability; then capture.
|
|
26
|
+
- Optional `modelPath`; when omitted, uses CDN (jsdelivr).
|
|
27
|
+
- Internationalization: `pt-BR`, `en`, `es` via `locale` and optional `customMessages`.
|
|
28
|
+
- Helpers: `getMessage`, `getValidationMessages`, `getLoadingModelsMessage`.
|
|
29
|
+
- Builds: ESM, CJS, UMD; TypeScript declarations in `dist/types`.
|
package/README.md
CHANGED
|
@@ -4,10 +4,12 @@ Real-time selfie validation SDK with face detection, powered by **MediaPipe**. D
|
|
|
4
4
|
|
|
5
5
|
🎭 **[Live Demo](https://face-validator-sdk.vercel.app)** | 📦 [NPM Package](#installation) | 📖 [Documentation](#usage) | 🤝 [Contributing](#contributing)
|
|
6
6
|
|
|
7
|
+
[](https://github.com/rwmsousa/face-validator-sdk/actions/workflows/ci.yml)
|
|
8
|
+
[](https://github.com/rwmsousa/face-validator-sdk/actions/workflows/deploy-vercel.yml)
|
|
9
|
+
|
|
7
10
|
## ✨ Features
|
|
8
11
|
|
|
9
12
|
### Face Detection (478 landmarks)
|
|
10
|
-
|
|
11
13
|
- ✅ **Distance validation**: TOO_CLOSE / TOO_FAR
|
|
12
14
|
- ✅ **Centering**: Face must be centered in oval guide
|
|
13
15
|
- ✅ **Head pose**: Detects tilted or turned head
|
|
@@ -15,14 +17,12 @@ Real-time selfie validation SDK with face detection, powered by **MediaPipe**. D
|
|
|
15
17
|
- ✅ **Stability**: Ensures user stays still before capture
|
|
16
18
|
- ✅ **Multiple faces**: Rejects when more than one face detected
|
|
17
19
|
|
|
18
|
-
### Hand Detection
|
|
19
|
-
|
|
20
|
+
### Hand Detection (NEW! 🎉)
|
|
20
21
|
- ✅ **Hand near face detection**: Prevents hand covering face (obstructions)
|
|
21
22
|
- ✅ **21 landmarks per hand**: High precision tracking
|
|
22
23
|
- ✅ **Real-time validation**: Instant feedback
|
|
23
24
|
|
|
24
25
|
### Additional Features
|
|
25
|
-
|
|
26
26
|
- 🌐 **i18n**: Portuguese (pt-BR), English (en), Spanish (es)
|
|
27
27
|
- 🎨 **Visual feedback**: Oval guide with color-coded status
|
|
28
28
|
- 🐛 **Debug mode**: Visualize landmarks and bounding boxes
|
|
@@ -32,172 +32,123 @@ Real-time selfie validation SDK with face detection, powered by **MediaPipe**. D
|
|
|
32
32
|
## 📦 Installation
|
|
33
33
|
|
|
34
34
|
```bash
|
|
35
|
-
npm install face-validator-sdk
|
|
35
|
+
npm install face-validator-sdk @mediapipe/tasks-vision
|
|
36
36
|
```
|
|
37
37
|
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
## 📊 Validation Checklist
|
|
41
|
-
|
|
42
|
-
The SDK validates multiple conditions before capturing the selfie. Here's what each status means:
|
|
43
|
-
|
|
44
|
-
| Status | Description | User Action | Validation Threshold |
|
|
45
|
-
|--------|-------------|-------------|----------------------|
|
|
46
|
-
| **INITIALIZING** | Loading MediaPipe models from CDN | Wait, models loading... | N/A |
|
|
47
|
-
| **NO_FACE_DETECTED** | Camera is active but no face found | Move closer to camera, ensure good lighting | Requires 1 face |
|
|
48
|
-
| **FACE_DETECTED** | Face detected, starting validation | Hold still for validation | Confidence > 50% |
|
|
49
|
-
| **TOO_CLOSE** | Face is too large in frame (too close) | Move camera away | Face height < 65% viewport |
|
|
50
|
-
| **TOO_FAR** | Face is too small in frame (too far) | Move camera closer | Face height > 25% viewport |
|
|
51
|
-
| **OFF_CENTER** | Face not properly centered in oval | Center face in the oval guide | Within center zone |
|
|
52
|
-
| **FACE_OBSTRUCTED** | **Hand, glasses, or low visibility** | Remove hands from face, ensure visibility | Hand distance > 15% |
|
|
53
|
-
| **HEAD_NOT_STRAIGHT** | Head is tilted or turned | Face camera directly, keep head straight | Yaw/Pitch < 28° |
|
|
54
|
-
| **MULTIPLE_FACES** | More than one face detected | Ensure only you are in frame | Exactly 1 face required |
|
|
55
|
-
| **POOR_ILLUMINATION** | Not enough light to see face clearly | Increase lighting (natural/lamp light) | Brightness avg > 70 |
|
|
56
|
-
| **STAY_STILL** | Movement detected, hold still | Stop moving, keep steady position | Movement < 5px, 1s |
|
|
57
|
-
| **CAPTURING** | Validation passed, taking photo... | Keep position, don't move | Auto-capture in progress |
|
|
58
|
-
| **SUCCESS** | ✅ Selfie captured successfully! | Photo saved and ready to upload | Capture completed |
|
|
59
|
-
| **ERROR** | An error occurred during validation | Check camera permissions, try again | Check logs for details |
|
|
38
|
+
**Peer dependency**: `@mediapipe/tasks-vision` (^0.10.15)
|
|
60
39
|
|
|
61
40
|
## 🚀 Quick Start
|
|
62
41
|
|
|
63
|
-
### Basic Usage
|
|
64
|
-
|
|
65
42
|
```typescript
|
|
66
43
|
import { FaceValidator, ValidationStatus } from 'face-validator-sdk';
|
|
67
44
|
|
|
68
|
-
|
|
69
|
-
const
|
|
70
|
-
const canvasElement = document.getElementById('overlay');
|
|
45
|
+
const video = document.querySelector('video');
|
|
46
|
+
const canvas = document.querySelector('canvas');
|
|
71
47
|
|
|
72
|
-
// Initialize validator
|
|
73
48
|
const validator = new FaceValidator({
|
|
74
|
-
videoElement,
|
|
75
|
-
overlayCanvasElement:
|
|
49
|
+
videoElement: video,
|
|
50
|
+
overlayCanvasElement: canvas,
|
|
76
51
|
locale: 'pt-BR', // 'pt-BR' | 'en' | 'es'
|
|
77
|
-
debugMode:
|
|
52
|
+
debugMode: false,
|
|
78
53
|
|
|
79
|
-
// Called whenever validation status changes
|
|
80
54
|
onStatusUpdate: (status, message) => {
|
|
81
|
-
|
|
82
|
-
|
|
55
|
+
console.log(status, message);
|
|
56
|
+
// Update UI with validation status
|
|
83
57
|
},
|
|
84
58
|
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
document.getElementById('preview').src = url;
|
|
90
|
-
|
|
91
|
-
// Send to backend
|
|
92
|
-
const formData = new FormData();
|
|
93
|
-
formData.append('selfie', imageBlob, 'selfie.jpg');
|
|
94
|
-
fetch('/api/upload-selfie', { method: 'POST', body: formData });
|
|
59
|
+
onCaptureSuccess: (blob) => {
|
|
60
|
+
// Upload or preview the captured selfie
|
|
61
|
+
const url = URL.createObjectURL(blob);
|
|
62
|
+
document.querySelector('img').src = url;
|
|
95
63
|
},
|
|
96
64
|
|
|
97
|
-
// Called if something goes wrong
|
|
98
65
|
onError: (errorType, error) => {
|
|
99
|
-
console.error(
|
|
100
|
-
document.getElementById('status').textContent = error.message;
|
|
66
|
+
console.error(errorType, error);
|
|
101
67
|
}
|
|
102
68
|
});
|
|
103
69
|
|
|
104
|
-
// Validator starts automatically
|
|
105
|
-
// To stop
|
|
70
|
+
// Validator starts automatically
|
|
71
|
+
// To stop: validator.stop();
|
|
106
72
|
```
|
|
107
73
|
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
<!-- Video element for camera feed (will be mirrored) -->
|
|
127
|
-
<video id="video" width="512" height="384" autoplay playsinline muted></video>
|
|
128
|
-
|
|
129
|
-
<!-- Canvas for validation feedback (landmarks, oval guide, etc.) -->
|
|
130
|
-
<canvas id="overlay" width="512" height="384" style="border: 1px solid #ccc;"></canvas>
|
|
131
|
-
|
|
132
|
-
<!-- Status display -->
|
|
133
|
-
<div id="status">Loading...</div>
|
|
134
|
-
|
|
135
|
-
<!-- Captured selfie preview -->
|
|
136
|
-
<img id="preview" alt="Captured selfie" />
|
|
137
|
-
|
|
138
|
-
<!-- Load SDK (MediaPipe models are loaded automatically) -->
|
|
139
|
-
<script type="module" src="./app.js"></script>
|
|
140
|
-
</body>
|
|
141
|
-
</html>
|
|
142
|
-
```
|
|
74
|
+
## 📊 Validation Status
|
|
75
|
+
|
|
76
|
+
| Status | Description |
|
|
77
|
+
|--------|-------------|
|
|
78
|
+
| `INITIALIZING` | Loading MediaPipe models |
|
|
79
|
+
| `NO_FACE_DETECTED` | No face found in frame |
|
|
80
|
+
| `FACE_DETECTED` | Face detected, validating... |
|
|
81
|
+
| `TOO_CLOSE` | Face too close to camera |
|
|
82
|
+
| `TOO_FAR` | Face too far from camera |
|
|
83
|
+
| `OFF_CENTER` | Face not centered in oval |
|
|
84
|
+
| `FACE_OBSTRUCTED` | **Hand covering face or low visibility** |
|
|
85
|
+
| `HEAD_NOT_STRAIGHT` | Head tilted or turned |
|
|
86
|
+
| `MULTIPLE_FACES` | More than one face detected |
|
|
87
|
+
| `POOR_ILLUMINATION` | Insufficient lighting |
|
|
88
|
+
| `STAY_STILL` | Hold still for capture |
|
|
89
|
+
| `CAPTURING` | Taking photo... |
|
|
90
|
+
| `SUCCESS` | Capture successful! |
|
|
91
|
+
| `ERROR` | An error occurred |
|
|
143
92
|
|
|
144
93
|
## ⚙️ Configuration Options
|
|
145
94
|
|
|
146
95
|
```typescript
|
|
147
96
|
interface FaceValidatorOptions {
|
|
148
|
-
//
|
|
97
|
+
// Required
|
|
149
98
|
videoElement: HTMLVideoElement;
|
|
150
99
|
onStatusUpdate: (status: ValidationStatus, message: string) => void;
|
|
151
100
|
onCaptureSuccess: (imageBlob: Blob) => void;
|
|
152
101
|
onError: (errorType: ValidationStatus, error: Error) => void;
|
|
153
102
|
|
|
154
|
-
//
|
|
155
|
-
// Display
|
|
103
|
+
// Optional
|
|
156
104
|
overlayCanvasElement?: HTMLCanvasElement;
|
|
157
105
|
locale?: 'pt-BR' | 'en' | 'es'; // Default: 'en'
|
|
158
|
-
debugMode?: boolean; //
|
|
159
|
-
|
|
160
|
-
// Validation Thresholds
|
|
161
|
-
minDetectionConfidence?: number; // Face detection threshold. Default: 0.5 (50%)
|
|
162
|
-
minIlluminationThreshold?: number; // Minimum brightness (0-255). Default: 70
|
|
163
|
-
minFaceSizeFactor?: number; // Minimum face size relative to viewport. Default: 0.25 (25%)
|
|
164
|
-
maxFaceSizeFactor?: number; // Maximum face size relative to viewport. Default: 0.65 (65%)
|
|
106
|
+
debugMode?: boolean; // Default: false
|
|
165
107
|
|
|
166
|
-
//
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
//
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
//
|
|
175
|
-
maxHandFaceDistance?: number; //
|
|
108
|
+
// Validation thresholds
|
|
109
|
+
minDetectionConfidence?: number; // Default: 0.5
|
|
110
|
+
minIlluminationThreshold?: number; // Default: 70 (0-255)
|
|
111
|
+
minFaceSizeFactor?: number; // Default: 0.25
|
|
112
|
+
maxFaceSizeFactor?: number; // Default: 0.65
|
|
113
|
+
stabilizationTimeThreshold?: number; // Default: 1000ms
|
|
114
|
+
stabilityMovementThreshold?: number; // Default: 5px
|
|
115
|
+
minFaceVisibilityScore?: number; // Default: 0.5
|
|
116
|
+
maxHeadTiltDegrees?: number; // Default: 28°
|
|
117
|
+
maxHandFaceDistance?: number; // Default: 0.15 (normalized)
|
|
176
118
|
|
|
177
119
|
// Advanced
|
|
178
|
-
modelPath?: string; //
|
|
179
|
-
customMessages?: Partial<Record<ValidationStatus, string>>;
|
|
120
|
+
modelPath?: string; // MediaPipe WASM path (auto-detected from CDN)
|
|
121
|
+
customMessages?: Partial<Record<ValidationStatus, string>>;
|
|
180
122
|
}
|
|
181
123
|
```
|
|
182
124
|
|
|
183
|
-
|
|
125
|
+
## 🎭 Live Demo
|
|
184
126
|
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
127
|
+
### Online Demo
|
|
128
|
+
Visit: **[https://face-validator-sdk.vercel.app](https://face-validator-sdk.vercel.app)**
|
|
129
|
+
|
|
130
|
+
### Local Development
|
|
131
|
+
|
|
132
|
+
```bash
|
|
133
|
+
# Clone the repository
|
|
134
|
+
git clone https://github.com/rwmsousa/face-validator-sdk.git
|
|
135
|
+
cd face-validator-sdk
|
|
136
|
+
|
|
137
|
+
# Install dependencies
|
|
138
|
+
npm install
|
|
139
|
+
|
|
140
|
+
# Run local demo (http://localhost:8081)
|
|
141
|
+
npm run dev
|
|
142
|
+
```
|
|
143
|
+
|
|
144
|
+
### Build Demo for Production
|
|
145
|
+
|
|
146
|
+
```bash
|
|
147
|
+
# Build SDK + Demo
|
|
148
|
+
npm run build
|
|
149
|
+
npm run build:demo
|
|
150
|
+
|
|
151
|
+
# Demo files output to: demo/dist/
|
|
201
152
|
```
|
|
202
153
|
|
|
203
154
|
## 🏗️ Architecture
|
|
@@ -224,13 +175,26 @@ The SDK uses two MediaPipe models running in parallel:
|
|
|
224
175
|
│ │ 2. Centering │ │
|
|
225
176
|
│ │ 3. Face geometry │ │
|
|
226
177
|
│ │ 4. Head pose │ │
|
|
227
|
-
│ │ 5. Hand proximity
|
|
178
|
+
│ │ 5. Hand proximity ⭐NEW │ │
|
|
228
179
|
│ │ 6. Illumination │ │
|
|
229
180
|
│ │ 7. Stability │ │
|
|
230
181
|
│ └──────────────────────────────────┘ │
|
|
231
182
|
└─────────────────────────────────────────┘
|
|
232
183
|
```
|
|
233
184
|
|
|
185
|
+
## 📚 Why MediaPipe?
|
|
186
|
+
|
|
187
|
+
Migrated from face-api.js (discontinued 2021) to MediaPipe (Google):
|
|
188
|
+
|
|
189
|
+
| Feature | face-api.js | MediaPipe |
|
|
190
|
+
|---------|-------------|-----------|
|
|
191
|
+
| Landmarks | 68 points | **478 points** |
|
|
192
|
+
| Hand detection | ❌ None | ✅ **21 pts/hand** |
|
|
193
|
+
| Maintenance | ❌ Discontinued | ✅ Active (Google) |
|
|
194
|
+
| Performance | CPU only | ✅ **GPU accelerated** |
|
|
195
|
+
| Accuracy | ~60-70% | ✅ **~90-95%** |
|
|
196
|
+
| Model size | ~8MB | ~15MB |
|
|
197
|
+
|
|
234
198
|
## 🔧 Development
|
|
235
199
|
|
|
236
200
|
### Scripts
|
|
@@ -253,22 +217,71 @@ face-validator-sdk/
|
|
|
253
217
|
│ ├── types.ts # TypeScript types
|
|
254
218
|
│ ├── utils.ts # Validation functions
|
|
255
219
|
│ ├── i18n.ts # Internationalization
|
|
256
|
-
│ └── index.ts # Public API
|
|
220
|
+
│ └── index.ts # Public API exports
|
|
257
221
|
├── demo/
|
|
258
|
-
│ ├── demo.ts # Local
|
|
222
|
+
│ ├── demo.ts # Local dev demo
|
|
259
223
|
│ ├── demo-standalone.ts # Production demo
|
|
260
|
-
│ └── public/
|
|
261
|
-
|
|
262
|
-
|
|
224
|
+
│ └── public/
|
|
225
|
+
│ └── index.html # Demo HTML
|
|
226
|
+
├── dist/ # SDK build output
|
|
227
|
+
│ ├── face-validator-sdk.esm.js
|
|
228
|
+
│ ├── face-validator-sdk.cjs.js
|
|
229
|
+
│ ├── face-validator-sdk.umd.js
|
|
230
|
+
│ └── types/ # TypeScript declarations
|
|
231
|
+
├── .github/
|
|
232
|
+
│ └── workflows/
|
|
233
|
+
│ ├── ci.yml # CI/CD pipeline
|
|
234
|
+
│ └── deploy-vercel.yml # Vercel deployment
|
|
235
|
+
└── vercel.json # Vercel configuration
|
|
236
|
+
```
|
|
237
|
+
|
|
238
|
+
## 🚀 Deployment
|
|
239
|
+
|
|
240
|
+
### Vercel (Automatic)
|
|
241
|
+
|
|
242
|
+
1. Connect repository to Vercel
|
|
243
|
+
2. Add secrets to GitHub:
|
|
244
|
+
- `VERCEL_TOKEN`
|
|
245
|
+
- `VERCEL_ORG_ID`
|
|
246
|
+
- `VERCEL_PROJECT_ID`
|
|
247
|
+
3. Push to `main` branch → auto-deploy
|
|
248
|
+
|
|
249
|
+
### Manual Deployment
|
|
250
|
+
|
|
251
|
+
```bash
|
|
252
|
+
npm run build:demo
|
|
253
|
+
# Deploy demo/dist/ to any static host
|
|
263
254
|
```
|
|
264
255
|
|
|
256
|
+
## 🤝 Contributing
|
|
257
|
+
|
|
258
|
+
Contributions are welcome! Please:
|
|
259
|
+
|
|
260
|
+
1. Fork the repository
|
|
261
|
+
2. Create a feature branch: `git checkout -b feature/amazing-feature`
|
|
262
|
+
3. Commit changes: `git commit -m 'feat: add amazing feature'`
|
|
263
|
+
4. Push to branch: `git push origin feature/amazing-feature`
|
|
264
|
+
5. Open a Pull Request
|
|
265
|
+
|
|
266
|
+
### Commit Convention
|
|
267
|
+
|
|
268
|
+
We use [Conventional Commits](https://www.conventionalcommits.org/):
|
|
269
|
+
|
|
270
|
+
- `feat:` New feature
|
|
271
|
+
- `fix:` Bug fix
|
|
272
|
+
- `docs:` Documentation changes
|
|
273
|
+
- `chore:` Maintenance tasks
|
|
274
|
+
- `refactor:` Code refactoring
|
|
275
|
+
- `test:` Add/update tests
|
|
276
|
+
|
|
265
277
|
## 📄 License
|
|
266
278
|
|
|
267
279
|
MIT License - see [LICENSE](LICENSE) file for details.
|
|
268
280
|
|
|
269
281
|
## 🙏 Acknowledgments
|
|
270
282
|
|
|
271
|
-
- [MediaPipe](https://developers.google.com/mediapipe) by Google
|
|
283
|
+
- [MediaPipe](https://developers.google.com/mediapipe) by Google
|
|
284
|
+
- [face-api.js](https://github.com/justadudewhohacks/face-api.js) (original inspiration)
|
|
272
285
|
|
|
273
286
|
## 📞 Support
|
|
274
287
|
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
(()=>{"use strict";var e={d:(t,n)=>{for(var a in n)e.o(n,a)&&!e.o(t,a)&&Object.defineProperty(t,a,{enumerable:!0,get:n[a]})},o:(e,t)=>Object.prototype.hasOwnProperty.call(e,t),r:e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})}},t={};e.r(t),e.d(t,{FaceValidator:()=>T,ValidationStatus:()=>a,default:()=>S,getLoadingModelsMessage:()=>l,getMessage:()=>r,getValidationMessages:()=>s});const n=require("@mediapipe/tasks-vision");var a;!function(e){e.INITIALIZING="INITIALIZING",e.NO_FACE_DETECTED="NO_FACE_DETECTED",e.FACE_DETECTED="FACE_DETECTED",e.TOO_CLOSE="TOO_CLOSE",e.TOO_FAR="TOO_FAR",e.OFF_CENTER="OFF_CENTER",e.FACE_OBSTRUCTED="FACE_OBSTRUCTED",e.HEAD_NOT_STRAIGHT="HEAD_NOT_STRAIGHT",e.MULTIPLE_FACES="MULTIPLE_FACES",e.POOR_ILLUMINATION="POOR_ILLUMINATION",e.NOT_NEUTRAL_EXPRESSION="NOT_NEUTRAL_EXPRESSION",e.DARK_GLASSES="DARK_GLASSES",e.STAY_STILL="STAY_STILL",e.CAPTURING="CAPTURING",e.SUCCESS="SUCCESS",e.ERROR="ERROR"}(a||(a={}));const i={"pt-BR":{[a.INITIALIZING]:"Inicializando câmera e detector...",[a.NO_FACE_DETECTED]:"Posicione seu rosto no centro do oval.",[a.FACE_DETECTED]:"Analisando...",[a.TOO_CLOSE]:"Afaste-se um pouco",[a.TOO_FAR]:"Aproxime-se da câmera",[a.OFF_CENTER]:"Centralize o rosto no centro do oval",[a.FACE_OBSTRUCTED]:"Mantenha o rosto totalmente visível. Remova as mãos do rosto.",[a.HEAD_NOT_STRAIGHT]:"Olhe diretamente para a câmera e mantenha a cabeça reta.",[a.MULTIPLE_FACES]:"Mantenha apenas uma pessoa no quadro.",[a.POOR_ILLUMINATION]:"Procure um ambiente com boa iluminação e centralize seu rosto no centro do oval.",[a.NOT_NEUTRAL_EXPRESSION]:"Mantenha expressão neutra: boca fechada, sem sorrir e olhos abertos.",[a.DARK_GLASSES]:"Remova os óculos escuros. Óculos de grau são permitidos.",[a.STAY_STILL]:"Fique imóvel para capturar a foto",[a.CAPTURING]:"Capturando...",[a.SUCCESS]:"Captura realizada!",[a.ERROR]:"Ocorreu um erro."},en:{[a.INITIALIZING]:"Initializing camera and detector...",[a.NO_FACE_DETECTED]:"Position your face in the center of the oval.",[a.FACE_DETECTED]:"Analyzing...",[a.TOO_CLOSE]:"Move back a little",[a.TOO_FAR]:"Move closer to the camera",[a.OFF_CENTER]:"Center your face in the center of the oval",[a.FACE_OBSTRUCTED]:"Keep your face fully visible. Remove your hands from your face.",[a.HEAD_NOT_STRAIGHT]:"Look directly at the camera and keep your head straight.",[a.MULTIPLE_FACES]:"Keep only one person in the frame.",[a.POOR_ILLUMINATION]:"Find a well-lit environment and center your face in the oval.",[a.NOT_NEUTRAL_EXPRESSION]:"Keep a neutral expression: mouth closed, no smiling, and eyes open.",[a.DARK_GLASSES]:"Remove sunglasses. Prescription glasses are allowed.",[a.STAY_STILL]:"Stay still to capture the photo",[a.CAPTURING]:"Capturing...",[a.SUCCESS]:"Capture complete!",[a.ERROR]:"An error occurred."},es:{[a.INITIALIZING]:"Inicializando cámara y detector...",[a.NO_FACE_DETECTED]:"Coloque su rostro en el centro del óvalo.",[a.FACE_DETECTED]:"Analizando...",[a.TOO_CLOSE]:"Aléjese un poco",[a.TOO_FAR]:"Acérquese a la cámara",[a.OFF_CENTER]:"Centre el rostro en el centro del óvalo",[a.FACE_OBSTRUCTED]:"Mantenga el rostro totalmente visible. Quite las manos del rostro.",[a.HEAD_NOT_STRAIGHT]:"Mire directamente a la cámara y mantenga la cabeza recta.",[a.MULTIPLE_FACES]:"Mantenga solo una persona en el encuadre.",[a.POOR_ILLUMINATION]:"Busque un ambiente con buena iluminación y centre su rostro en el óvalo.",[a.NOT_NEUTRAL_EXPRESSION]:"Mantenga expresión neutra: boca cerrada, sin sonreír y ojos abiertos.",[a.DARK_GLASSES]:"Quite las gafas de sol. Las gafas graduadas están permitidas.",[a.STAY_STILL]:"Permanezca quieto para capturar la foto",[a.CAPTURING]:"Capturando...",[a.SUCCESS]:"¡Captura realizada!",[a.ERROR]:"Ocurrió un error."}},o={"pt-BR":"Status desconhecido.",en:"Unknown status.",es:"Estado desconhecido."};function s(e){return Object.assign({},i[e])}function r(e,t){var n;return null!==(n=i[t][e])&&void 0!==n?n:o[t]}function l(e){return{"pt-BR":"Carregando...",en:"Loading...",es:"Cargando..."}[e]}function c(e){const t=e.data;let n=0;for(let e=0;e<t.length;e+=4)n+=.2126*t[e]+.7152*t[e+1]+.0722*t[e+2];return n/(t.length/4)}const h=[33,133,159,145],d=[263,362,386,374],u=[61,291,0,17,39,269,270,409],m=.38;var E=function(e,t,n,a){return new(n||(n=Promise))(function(i,o){function s(e){try{l(a.next(e))}catch(e){o(e)}}function r(e){try{l(a.throw(e))}catch(e){o(e)}}function l(e){var t;e.done?i(e.value):(t=e.value,t instanceof n?t:new n(function(e){e(t)})).then(s,r)}l((a=a.apply(e,t||[])).next())})};const g={overlayCanvasElement:void 0,videoWidth:512,videoHeight:384,minDetectionConfidence:.4,minIlluminationThreshold:50,minFaceSizeFactor:.15,maxFaceSizeFactor:.75,stabilizationTimeThreshold:1e3,stabilityMovementThreshold:5,minFaceVisibilityScore:.4,maxHeadTiltDegrees:30,maxHandFaceDistance:.15,debugMode:!1,locale:"en",customMessages:{}};class T{constructor(e){this.faceLandmarker=null,this.handLandmarker=null,this.animationFrameId=null,this.lastDetection=null,this.stableSince=null,this.isCapturing=!1,this.options=this.resolveOptions(e),this.setStatus(a.INITIALIZING),this.init()}resolveOptions(e){const t=e.modelPath||"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@latest/wasm";return Object.assign(Object.assign(Object.assign({},g),e),{modelPath:t,locale:e.locale||"en",customMessages:e.customMessages||{}})}init(){return E(this,void 0,void 0,function*(){try{const e=l(this.options.locale);this.setStatus(a.INITIALIZING,void 0,e);const t=yield n.FilesetResolver.forVisionTasks(this.options.modelPath);this.faceLandmarker=yield n.FaceLandmarker.createFromOptions(t,{baseOptions:{modelAssetPath:"https://storage.googleapis.com/mediapipe-models/face_landmarker/face_landmarker/float16/1/face_landmarker.task",delegate:"GPU"},runningMode:"VIDEO",numFaces:2,minFaceDetectionConfidence:this.options.minDetectionConfidence,minFacePresenceConfidence:this.options.minFaceVisibilityScore,minTrackingConfidence:this.options.minFaceVisibilityScore}),this.handLandmarker=yield n.HandLandmarker.createFromOptions(t,{baseOptions:{modelAssetPath:"https://storage.googleapis.com/mediapipe-models/hand_landmarker/hand_landmarker/float16/1/hand_landmarker.task",delegate:"GPU"},runningMode:"VIDEO",numHands:2,minHandDetectionConfidence:.5,minHandPresenceConfidence:.5,minTrackingConfidence:.5}),this.startDetectionLoop()}catch(e){const t=e instanceof Error?e:new Error(String(e));this.setStatus(a.ERROR,t)}})}getMessageForStatus(e,t){return t||(this.options.customMessages[e]?this.options.customMessages[e]:r(e,this.options.locale))}setStatus(e,t,n){const i=this.getMessageForStatus(e,n);this.options.onStatusUpdate(e,i),e===a.ERROR&&t&&this.options.onError(e,t)}startDetectionLoop(){const e=this.options.videoElement,t=this.options.videoWidth||640,n=this.options.videoHeight||480,i=()=>E(this,void 0,void 0,function*(){var o;if(this.faceLandmarker&&this.handLandmarker&&e.videoWidth){try{const i=performance.now();let s=a.NO_FACE_DETECTED,r=null,l=[];const E=this.faceLandmarker.detectForVideo(e,i),g=this.handLandmarker.detectForVideo(e,i);if(g.landmarks&&g.landmarks.length>0&&(l=g.landmarks.map((e,t)=>{var n,a,i;return{landmarks:e,handedness:(null===(i=null===(a=null===(n=g.handednesses)||void 0===n?void 0:n[t])||void 0===a?void 0:a[0])||void 0===i?void 0:i.categoryName)||"Unknown"}})),E.faceLandmarks&&E.faceLandmarks.length>1){s=a.MULTIPLE_FACES,this.stableSince=null;const e=E.faceLandmarks[0],t=(null===(o=E.faceBlendshapes)||void 0===o?void 0:o[0])?this.estimateBoundingBox(e):null;t&&(r={boundingBox:t,landmarks:e,timestamp:i})}else if(E.faceLandmarks&&1===E.faceLandmarks.length){const o=E.faceLandmarks[0],g=this.estimateBoundingBox(o);r={boundingBox:g,landmarks:o,timestamp:i};const T=function(e,t=.18,n=.7){const a=e.width;return a<t?"TOO_FAR":a>n?"TOO_CLOSE":"OK"}(g,this.options.minFaceSizeFactor,this.options.maxFaceSizeFactor);if("OK"!==T)s="TOO_CLOSE"===T?a.TOO_CLOSE:a.TOO_FAR,this.stableSince=null;else{const E=o[4],T=function(e,t,n,a){const i=(e*n-n/2)/(.2*n),o=(t*a-a/2)/(a*m);return i*i+o*o<=1}(E.x,E.y,t,n);if(function(e,t,n){const a=t/2,i=n/2,o=.2*t,s=n*m,r=e.xMin*t,l=(e.xMin+e.width)*t,c=e.yMin*n,h=(e.yMin+e.height)*n,d=((r+l)/2-a)/o,u=((c+h)/2-i)/s;if(d*d+u*u>1)return!1;const E=[{x:r,y:c},{x:l,y:c},{x:r,y:h},{x:l,y:h}];for(const e of E){const t=(e.x-a)/o,n=(e.y-i)/s;t*t+n*n>1.2&&0}}(g,t,n),T)if(function(e,t){if(e.length<478)return!1;const n=e[4],a=u.map(t=>e[t]),i=a.reduce((e,t)=>e+t.y,0)/a.length,o=Math.min(...a.map(e=>e.y)),s=Math.max(...a.map(e=>e.y))-o,r=t.height;return!(i<n.y-.01||i-n.y<.06*r||s<.02*r)}(o,g))if(function(e,t=25){if(e.length<478)return!1;const n=e[h[0]],a=e[d[0]],i=e[4],o=e[13],s=e[14],r=e[152],l=e[10],c=Math.abs(n.y-a.y),u=Math.abs(n.x-a.x);if(u<.01)return!1;const m=c/u;if(Math.atan(m)*(180/Math.PI)>t)return!1;const E=(n.x+a.x)/2,g=i.x-E,T=Math.abs(n.x-a.x);if(T<.01)return!1;const S=Math.abs(g)/T;if(Math.atan(S)*(180/Math.PI)>t)return!1;if(!function(e){if(e.length<478)return!1;const t=e[234],n=e[454],a=e[4],i=Math.abs(t.x-a.x),o=Math.abs(n.x-a.x);return!((i>.01&&o>.01?Math.max(i,o)/Math.min(i,o):1)>1.4||void 0!==t.z&&void 0!==n.z&&Math.abs(t.z-n.z)>.05)}(e))return!1;const f=(n.y+a.y)/2,O=(o.y+s.y)/2,p=r.y-l.y;if(p<.1)return!1;if(l.y>f+.02)return!1;if(f>i.y+.02)return!1;if(i.y>O+.02)return!1;if(O>=r.y)return!1;const I=(f-l.y)/p,y=(i.y-f)/p,C=(O-i.y)/p,A=(r.y-O)/p;return!(I<.06||I>.38||y<.03||y>.3||C<.02||C>.25||A<.04||A>.38)}(o,this.options.maxHeadTiltDegrees))if(l.length>0&&function(e,t,n=.15){const a=t.xMin+t.width/2,i=t.yMin+t.height/2;for(const t of e.landmarks){const e=t.x-a,o=t.y-i;if(Math.sqrt(e*e+o*o)<n)return!0}return!1}(l[0],g,this.options.maxHandFaceDistance))s=a.FACE_OBSTRUCTED,this.stableSince=null;else if(function(e){if(e.length<478)return!1;const t=e[159],n=e[144],a=e[386],i=e[373],o=Math.abs(t.y-n.y),s=Math.abs(a.y-i.y);if(o<.01||s<.01)return!1;const r=e[13],l=e[14];if(Math.abs(r.y-l.y)>.025)return!1;const c=e[61],h=e[291],d=e[4];return!((c.y+h.y)/2-d.y<.05)}(o))if(function(e,t){if(t.length<478)return!1;try{const n=document.createElement("canvas"),a=n.getContext("2d");if(!a)return!1;const i=e.videoWidth,o=e.videoHeight,s=[t[33],t[133],t[159],t[144],t[145]],r=[t[263],t[362],t[386],t[373],t[374]],l=e=>{const t=e.map(e=>e.x*i),n=e.map(e=>e.y*o),a=Math.max(0,Math.min(...t)-5),s=Math.min(i,Math.max(...t)+5),r=Math.max(0,Math.min(...n)-5);return{x:a,y:r,width:s-a,height:Math.min(o,Math.max(...n)+5)-r}},h=t=>(n.width=t.width,n.height=t.height,a.drawImage(e,t.x,t.y,t.width,t.height,0,0,t.width,t.height),c(a.getImageData(0,0,t.width,t.height))),d=l(s),u=l(r);return(h(d)+h(u))/2<35}catch(e){return console.warn("Erro ao detectar óculos escuros:",e),!1}}(e,o))s=a.DARK_GLASSES,this.stableSince=null;else{const o=document.createElement("canvas"),l=g.xMin*e.videoWidth,h=g.yMin*e.videoHeight,d=g.width*e.videoWidth,u=g.height*e.videoHeight;o.width=d,o.height=u;const m=o.getContext("2d",{willReadFrequently:!0});if(m){m.drawImage(e,l,h,d,u,0,0,d,u);c(m.getImageData(0,0,o.width,o.height))<this.options.minIlluminationThreshold?(s=a.POOR_ILLUMINATION,this.stableSince=null):function(e,t,n=5,a=512,i=384){if(!e||!t)return!1;const o=(e.boundingBox.xMin+e.boundingBox.width/2)*a,s=(e.boundingBox.yMin+e.boundingBox.height/2)*i,r=(t.boundingBox.xMin+t.boundingBox.width/2)*a,l=(t.boundingBox.yMin+t.boundingBox.height/2)*i,c=Math.abs(o-r),h=Math.abs(s-l),d=Math.abs(e.boundingBox.width-t.boundingBox.width)*a,u=Math.abs(e.boundingBox.height-t.boundingBox.height)*i;return c<=n&&h<=n&&d<=2*n&&u<=2*n}(r,this.lastDetection,this.options.stabilityMovementThreshold,t,n)?(this.stableSince||(this.stableSince=i),s=i-this.stableSince>=this.options.stabilizationTimeThreshold?a.CAPTURING:a.STAY_STILL):(this.stableSince=null,s=a.STAY_STILL)}else s=a.FACE_DETECTED,this.stableSince=null}else s=a.NOT_NEUTRAL_EXPRESSION,this.stableSince=null;else s=a.HEAD_NOT_STRAIGHT,this.stableSince=null;else s=a.FACE_OBSTRUCTED,this.stableSince=null;else s=a.OFF_CENTER,this.stableSince=null}}else this.lastDetection=null,this.stableSince=null;if(this.lastDetection=r,this.setStatus(s),this.options.overlayCanvasElement&&function(e,t,n,i,o){const s=e.getContext("2d");if(!s)return;const r=e.width,l=e.height,c=r/2,u=l/2;s.clearRect(0,0,r,l);const E=.2*r,g=l*m;if(s.fillStyle="rgba(255, 255, 255, 0.35)",s.fillRect(0,0,r,l),s.save(),s.beginPath(),s.ellipse(c,u,E,g,0,0,2*Math.PI),s.closePath(),s.globalCompositeOperation="destination-out",s.fill(),s.restore(),s.strokeStyle="rgba(255, 255, 255, 0.9)",s.lineWidth=3,s.beginPath(),s.ellipse(c,u,E,g,0,0,2*Math.PI),s.stroke(),s.strokeStyle="rgba(255, 255, 255, 0.45)",s.lineWidth=1,s.beginPath(),s.moveTo(c-6,u),s.lineTo(c+6,u),s.moveTo(c,u-6),s.lineTo(c,u+6),s.stroke(),t&&i){const e=i.landmarks;if(e.length>=478){const t=e[10],i=e[152],o=e[234],c=e[454],u=e.map(e=>e.x),m=e.map(e=>e.y),E=Math.min(...u),g=Math.max(...u),T=Math.min(...m),S=g-E,f=Math.max(...m)-T,O=.08,p=(E-S*O)*r,I=(T-f*O)*l,y=S*(1+2*O)*r,C=f*(1+2*O)*l;let A="red";n===a.STAY_STILL||n===a.CAPTURING?A="lime":n===a.FACE_DETECTED&&(A="yellow"),s.strokeStyle=A,s.lineWidth=3,s.strokeRect(p,I,y,C);const _=e[4];e[h[0]],e[d[0]],s.fillStyle="cyan",s.beginPath(),s.arc(_.x*r,_.y*l,5,0,2*Math.PI),s.fill(),s.fillStyle="magenta",s.beginPath(),s.arc(t.x*r,t.y*l,4,0,2*Math.PI),s.fill(),s.fillStyle="lime",s.beginPath(),s.arc(i.x*r,i.y*l,4,0,2*Math.PI),s.fill(),s.fillStyle="yellow",[e[33],e[133],e[159],e[144],e[145]].forEach(e=>{s.beginPath(),s.arc(e.x*r,e.y*l,3,0,2*Math.PI),s.fill()}),s.fillStyle="yellow",[e[263],e[362],e[386],e[373],e[374]].forEach(e=>{s.beginPath(),s.arc(e.x*r,e.y*l,3,0,2*Math.PI),s.fill()}),s.fillStyle="purple",s.beginPath(),s.arc(o.x*r,o.y*l,3,0,2*Math.PI),s.fill(),s.beginPath(),s.arc(c.x*r,c.y*l,3,0,2*Math.PI),s.fill()}}t&&o&&o.length>0&&o.forEach(e=>{s.fillStyle="orange",e.landmarks.forEach(e=>{s.beginPath(),s.arc(e.x*r,e.y*l,3,0,2*Math.PI),s.fill()})})}(this.options.overlayCanvasElement,this.options.debugMode||!1,s,r||void 0,l.length>0?l:void 0),s===a.CAPTURING&&!this.isCapturing)return this.isCapturing=!0,yield this.captureImage(),this.setStatus(a.SUCCESS),void this.stop()}catch(e){const t=e instanceof Error?e:new Error(String(e));this.setStatus(a.ERROR,t)}this.animationFrameId=requestAnimationFrame(i)}else this.animationFrameId=requestAnimationFrame(i)});this.animationFrameId=requestAnimationFrame(i)}estimateBoundingBox(e){const t=e.map(e=>e.x),n=e.map(e=>e.y),a=Math.min(...t),i=Math.max(...t),o=Math.min(...n);return{xMin:a,yMin:o,width:i-a,height:Math.max(...n)-o}}captureImage(){return E(this,void 0,void 0,function*(){const e=this.options.videoElement,t=document.createElement("canvas");t.width=e.videoWidth,t.height=e.videoHeight;const n=t.getContext("2d");n?(n.drawImage(e,0,0,t.width,t.height),t.toBlob(e=>{e?this.options.onCaptureSuccess(e):this.setStatus(a.ERROR,new Error("Failed to generate image blob"))},"image/jpeg",.95)):this.setStatus(a.ERROR,new Error("Failed to get canvas context"))})}stop(){null!==this.animationFrameId&&(cancelAnimationFrame(this.animationFrameId),this.animationFrameId=null),this.faceLandmarker&&this.faceLandmarker.close(),this.handLandmarker&&this.handLandmarker.close()}}const S=T;module.exports=t})();
|
|
1
|
+
(()=>{"use strict";var e={d:(t,n)=>{for(var a in n)e.o(n,a)&&!e.o(t,a)&&Object.defineProperty(t,a,{enumerable:!0,get:n[a]})},o:(e,t)=>Object.prototype.hasOwnProperty.call(e,t),r:e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})}},t={};e.r(t),e.d(t,{FaceValidator:()=>T,ValidationStatus:()=>a,default:()=>S,getLoadingModelsMessage:()=>l,getMessage:()=>r,getValidationMessages:()=>s});const n=require("@mediapipe/tasks-vision");var a;!function(e){e.INITIALIZING="INITIALIZING",e.NO_FACE_DETECTED="NO_FACE_DETECTED",e.FACE_DETECTED="FACE_DETECTED",e.TOO_CLOSE="TOO_CLOSE",e.TOO_FAR="TOO_FAR",e.OFF_CENTER="OFF_CENTER",e.FACE_OBSTRUCTED="FACE_OBSTRUCTED",e.HEAD_NOT_STRAIGHT="HEAD_NOT_STRAIGHT",e.MULTIPLE_FACES="MULTIPLE_FACES",e.POOR_ILLUMINATION="POOR_ILLUMINATION",e.NOT_NEUTRAL_EXPRESSION="NOT_NEUTRAL_EXPRESSION",e.DARK_GLASSES="DARK_GLASSES",e.STAY_STILL="STAY_STILL",e.CAPTURING="CAPTURING",e.SUCCESS="SUCCESS",e.ERROR="ERROR"}(a||(a={}));const i={"pt-BR":{[a.INITIALIZING]:"Inicializando câmera e detector...",[a.NO_FACE_DETECTED]:"Posicione seu rosto no centro do oval.",[a.FACE_DETECTED]:"Analisando...",[a.TOO_CLOSE]:"Afaste-se um pouco",[a.TOO_FAR]:"Aproxime-se da câmera",[a.OFF_CENTER]:"Centralize o rosto no centro do oval",[a.FACE_OBSTRUCTED]:"Mantenha o rosto totalmente visível. Remova as mãos do rosto.",[a.HEAD_NOT_STRAIGHT]:"Olhe diretamente para a câmera e mantenha a cabeça reta.",[a.MULTIPLE_FACES]:"Mantenha apenas uma pessoa no quadro.",[a.POOR_ILLUMINATION]:"Procure um ambiente com boa iluminação e centralize seu rosto no centro do oval.",[a.NOT_NEUTRAL_EXPRESSION]:"Mantenha expressão neutra: boca fechada, sem sorrir e olhos abertos.",[a.DARK_GLASSES]:"Remova os óculos escuros. Óculos de grau são permitidos.",[a.STAY_STILL]:"Fique imóvel para capturar a foto",[a.CAPTURING]:"Capturando...",[a.SUCCESS]:"Captura realizada!",[a.ERROR]:"Ocorreu um erro."},en:{[a.INITIALIZING]:"Initializing camera and detector...",[a.NO_FACE_DETECTED]:"Position your face in the center of the oval.",[a.FACE_DETECTED]:"Analyzing...",[a.TOO_CLOSE]:"Move back a little",[a.TOO_FAR]:"Move closer to the camera",[a.OFF_CENTER]:"Center your face in the center of the oval",[a.FACE_OBSTRUCTED]:"Keep your face fully visible. Remove your hands from your face.",[a.HEAD_NOT_STRAIGHT]:"Look directly at the camera and keep your head straight.",[a.MULTIPLE_FACES]:"Keep only one person in the frame.",[a.POOR_ILLUMINATION]:"Find a well-lit environment and center your face in the oval.",[a.NOT_NEUTRAL_EXPRESSION]:"Keep a neutral expression: mouth closed, no smiling, and eyes open.",[a.DARK_GLASSES]:"Remove sunglasses. Prescription glasses are allowed.",[a.STAY_STILL]:"Stay still to capture the photo",[a.CAPTURING]:"Capturing...",[a.SUCCESS]:"Capture complete!",[a.ERROR]:"An error occurred."},es:{[a.INITIALIZING]:"Inicializando cámara y detector...",[a.NO_FACE_DETECTED]:"Coloque su rostro en el centro del óvalo.",[a.FACE_DETECTED]:"Analizando...",[a.TOO_CLOSE]:"Aléjese un poco",[a.TOO_FAR]:"Acérquese a la cámara",[a.OFF_CENTER]:"Centre el rostro en el centro del óvalo",[a.FACE_OBSTRUCTED]:"Mantenga el rostro totalmente visible. Quite las manos del rostro.",[a.HEAD_NOT_STRAIGHT]:"Mire directamente a la cámara y mantenga la cabeza recta.",[a.MULTIPLE_FACES]:"Mantenga solo una persona en el encuadre.",[a.POOR_ILLUMINATION]:"Busque un ambiente con buena iluminación y centre su rostro en el óvalo.",[a.NOT_NEUTRAL_EXPRESSION]:"Mantenga expresión neutra: boca cerrada, sin sonreír y ojos abiertos.",[a.DARK_GLASSES]:"Quite las gafas de sol. Las gafas graduadas están permitidas.",[a.STAY_STILL]:"Permanezca quieto para capturar la foto",[a.CAPTURING]:"Capturando...",[a.SUCCESS]:"¡Captura realizada!",[a.ERROR]:"Ocurrió un error."}},o={"pt-BR":"Status desconhecido.",en:"Unknown status.",es:"Estado desconhecido."};function s(e){return Object.assign({},i[e])}function r(e,t){var n;return null!==(n=i[t][e])&&void 0!==n?n:o[t]}function l(e){return{"pt-BR":"Carregando...",en:"Loading...",es:"Cargando..."}[e]}function c(e){const t=e.data;let n=0;for(let e=0;e<t.length;e+=4)n+=.2126*t[e]+.7152*t[e+1]+.0722*t[e+2];return n/(t.length/4)}const h=[33,133,159,145],d=[263,362,386,374],u=[61,291,0,17,39,269,270,409],m=.34;var E=function(e,t,n,a){return new(n||(n=Promise))(function(i,o){function s(e){try{l(a.next(e))}catch(e){o(e)}}function r(e){try{l(a.throw(e))}catch(e){o(e)}}function l(e){var t;e.done?i(e.value):(t=e.value,t instanceof n?t:new n(function(e){e(t)})).then(s,r)}l((a=a.apply(e,t||[])).next())})};const g={overlayCanvasElement:void 0,videoWidth:512,videoHeight:384,minDetectionConfidence:.4,minIlluminationThreshold:50,minFaceSizeFactor:.15,maxFaceSizeFactor:.75,stabilizationTimeThreshold:1e3,stabilityMovementThreshold:5,minFaceVisibilityScore:.4,maxHeadTiltDegrees:30,maxHandFaceDistance:.15,debugMode:!1,locale:"en",customMessages:{}};class T{constructor(e){this.faceLandmarker=null,this.handLandmarker=null,this.animationFrameId=null,this.lastDetection=null,this.stableSince=null,this.isCapturing=!1,this.options=this.resolveOptions(e),this.setStatus(a.INITIALIZING),this.init()}resolveOptions(e){const t=e.modelPath||"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@latest/wasm";return Object.assign(Object.assign(Object.assign({},g),e),{modelPath:t,locale:e.locale||"en",customMessages:e.customMessages||{}})}init(){return E(this,void 0,void 0,function*(){try{const e=l(this.options.locale);this.setStatus(a.INITIALIZING,void 0,e);const t=yield n.FilesetResolver.forVisionTasks(this.options.modelPath);this.faceLandmarker=yield n.FaceLandmarker.createFromOptions(t,{baseOptions:{modelAssetPath:"https://storage.googleapis.com/mediapipe-models/face_landmarker/face_landmarker/float16/1/face_landmarker.task",delegate:"GPU"},runningMode:"VIDEO",numFaces:2,minFaceDetectionConfidence:this.options.minDetectionConfidence,minFacePresenceConfidence:this.options.minFaceVisibilityScore,minTrackingConfidence:this.options.minFaceVisibilityScore}),this.handLandmarker=yield n.HandLandmarker.createFromOptions(t,{baseOptions:{modelAssetPath:"https://storage.googleapis.com/mediapipe-models/hand_landmarker/hand_landmarker/float16/1/hand_landmarker.task",delegate:"GPU"},runningMode:"VIDEO",numHands:2,minHandDetectionConfidence:.5,minHandPresenceConfidence:.5,minTrackingConfidence:.5}),this.startDetectionLoop()}catch(e){const t=e instanceof Error?e:new Error(String(e));this.setStatus(a.ERROR,t)}})}getMessageForStatus(e,t){return t||(this.options.customMessages[e]?this.options.customMessages[e]:r(e,this.options.locale))}setStatus(e,t,n){const i=this.getMessageForStatus(e,n);this.options.onStatusUpdate(e,i),e===a.ERROR&&t&&this.options.onError(e,t)}startDetectionLoop(){const e=this.options.videoElement,t=this.options.videoWidth||640,n=this.options.videoHeight||480,i=()=>E(this,void 0,void 0,function*(){var o;if(this.faceLandmarker&&this.handLandmarker&&e.videoWidth){try{const i=performance.now();let s=a.NO_FACE_DETECTED,r=null,l=[];const E=this.faceLandmarker.detectForVideo(e,i),g=this.handLandmarker.detectForVideo(e,i);if(g.landmarks&&g.landmarks.length>0&&(l=g.landmarks.map((e,t)=>{var n,a,i;return{landmarks:e,handedness:(null===(i=null===(a=null===(n=g.handednesses)||void 0===n?void 0:n[t])||void 0===a?void 0:a[0])||void 0===i?void 0:i.categoryName)||"Unknown"}})),E.faceLandmarks&&E.faceLandmarks.length>1){s=a.MULTIPLE_FACES,this.stableSince=null;const e=E.faceLandmarks[0],t=(null===(o=E.faceBlendshapes)||void 0===o?void 0:o[0])?this.estimateBoundingBox(e):null;t&&(r={boundingBox:t,landmarks:e,timestamp:i})}else if(E.faceLandmarks&&1===E.faceLandmarks.length){const o=E.faceLandmarks[0],g=this.estimateBoundingBox(o);r={boundingBox:g,landmarks:o,timestamp:i};const T=function(e,t=.18,n=.7){const a=e.width;return a<t?"TOO_FAR":a>n?"TOO_CLOSE":"OK"}(g,this.options.minFaceSizeFactor,this.options.maxFaceSizeFactor);if("OK"!==T)s="TOO_CLOSE"===T?a.TOO_CLOSE:a.TOO_FAR,this.stableSince=null;else{const E=o[4],T=function(e,t,n,a){const i=(e*n-n/2)/(.2*n),o=(t*a-a/2)/(a*m);return i*i+o*o<=1}(E.x,E.y,t,n);if(function(e,t,n){const a=t/2,i=n/2,o=.2*t,s=n*m,r=e.xMin*t,l=(e.xMin+e.width)*t,c=e.yMin*n,h=(e.yMin+e.height)*n,d=((r+l)/2-a)/o,u=((c+h)/2-i)/s;if(d*d+u*u>1)return!1;const E=[{x:r,y:c},{x:l,y:c},{x:r,y:h},{x:l,y:h}];for(const e of E){const t=(e.x-a)/o,n=(e.y-i)/s;t*t+n*n>1.2&&0}}(g,t,n),T)if(function(e,t){if(e.length<478)return!1;const n=e[4],a=u.map(t=>e[t]),i=a.reduce((e,t)=>e+t.y,0)/a.length,o=Math.min(...a.map(e=>e.y)),s=Math.max(...a.map(e=>e.y))-o,r=t.height;return!(i<n.y-.01||i-n.y<.06*r||s<.02*r)}(o,g))if(function(e,t=25){if(e.length<478)return!1;const n=e[h[0]],a=e[d[0]],i=e[4],o=e[13],s=e[14],r=e[152],l=e[10],c=Math.abs(n.y-a.y),u=Math.abs(n.x-a.x);if(u<.01)return!1;const m=c/u;if(Math.atan(m)*(180/Math.PI)>t)return!1;const E=(n.x+a.x)/2,g=i.x-E,T=Math.abs(n.x-a.x);if(T<.01)return!1;const S=Math.abs(g)/T;if(Math.atan(S)*(180/Math.PI)>t)return!1;if(!function(e){if(e.length<478)return!1;const t=e[234],n=e[454],a=e[4],i=Math.abs(t.x-a.x),o=Math.abs(n.x-a.x);return!((i>.01&&o>.01?Math.max(i,o)/Math.min(i,o):1)>1.4||void 0!==t.z&&void 0!==n.z&&Math.abs(t.z-n.z)>.05)}(e))return!1;const f=(n.y+a.y)/2,O=(o.y+s.y)/2,p=r.y-l.y;if(p<.1)return!1;if(l.y>f+.02)return!1;if(f>i.y+.02)return!1;if(i.y>O+.02)return!1;if(O>=r.y)return!1;const I=(f-l.y)/p,y=(i.y-f)/p,C=(O-i.y)/p,A=(r.y-O)/p;return!(I<.06||I>.38||y<.03||y>.3||C<.02||C>.25||A<.04||A>.38)}(o,this.options.maxHeadTiltDegrees))if(l.length>0&&function(e,t,n=.15){const a=t.xMin+t.width/2,i=t.yMin+t.height/2;for(const t of e.landmarks){const e=t.x-a,o=t.y-i;if(Math.sqrt(e*e+o*o)<n)return!0}return!1}(l[0],g,this.options.maxHandFaceDistance))s=a.FACE_OBSTRUCTED,this.stableSince=null;else if(function(e){if(e.length<478)return!1;const t=e[159],n=e[144],a=e[386],i=e[373],o=Math.abs(t.y-n.y),s=Math.abs(a.y-i.y);if(o<.01||s<.01)return!1;const r=e[13],l=e[14];if(Math.abs(r.y-l.y)>.025)return!1;const c=e[61],h=e[291],d=e[4];return!((c.y+h.y)/2-d.y<.05)}(o))if(function(e,t){if(t.length<478)return!1;try{const n=document.createElement("canvas"),a=n.getContext("2d");if(!a)return!1;const i=e.videoWidth,o=e.videoHeight,s=[t[33],t[133],t[159],t[144],t[145]],r=[t[263],t[362],t[386],t[373],t[374]],l=e=>{const t=e.map(e=>e.x*i),n=e.map(e=>e.y*o),a=Math.max(0,Math.min(...t)-5),s=Math.min(i,Math.max(...t)+5),r=Math.max(0,Math.min(...n)-5);return{x:a,y:r,width:s-a,height:Math.min(o,Math.max(...n)+5)-r}},h=t=>(n.width=t.width,n.height=t.height,a.drawImage(e,t.x,t.y,t.width,t.height,0,0,t.width,t.height),c(a.getImageData(0,0,t.width,t.height))),d=l(s),u=l(r);return(h(d)+h(u))/2<35}catch(e){return console.warn("Erro ao detectar óculos escuros:",e),!1}}(e,o))s=a.DARK_GLASSES,this.stableSince=null;else{const o=document.createElement("canvas"),l=g.xMin*e.videoWidth,h=g.yMin*e.videoHeight,d=g.width*e.videoWidth,u=g.height*e.videoHeight;o.width=d,o.height=u;const m=o.getContext("2d",{willReadFrequently:!0});if(m){m.drawImage(e,l,h,d,u,0,0,d,u);c(m.getImageData(0,0,o.width,o.height))<this.options.minIlluminationThreshold?(s=a.POOR_ILLUMINATION,this.stableSince=null):function(e,t,n=5,a=512,i=384){if(!e||!t)return!1;const o=(e.boundingBox.xMin+e.boundingBox.width/2)*a,s=(e.boundingBox.yMin+e.boundingBox.height/2)*i,r=(t.boundingBox.xMin+t.boundingBox.width/2)*a,l=(t.boundingBox.yMin+t.boundingBox.height/2)*i,c=Math.abs(o-r),h=Math.abs(s-l),d=Math.abs(e.boundingBox.width-t.boundingBox.width)*a,u=Math.abs(e.boundingBox.height-t.boundingBox.height)*i;return c<=n&&h<=n&&d<=2*n&&u<=2*n}(r,this.lastDetection,this.options.stabilityMovementThreshold,t,n)?(this.stableSince||(this.stableSince=i),s=i-this.stableSince>=this.options.stabilizationTimeThreshold?a.CAPTURING:a.STAY_STILL):(this.stableSince=null,s=a.STAY_STILL)}else s=a.FACE_DETECTED,this.stableSince=null}else s=a.NOT_NEUTRAL_EXPRESSION,this.stableSince=null;else s=a.HEAD_NOT_STRAIGHT,this.stableSince=null;else s=a.FACE_OBSTRUCTED,this.stableSince=null;else s=a.OFF_CENTER,this.stableSince=null}}else this.lastDetection=null,this.stableSince=null;if(this.lastDetection=r,this.setStatus(s),this.options.overlayCanvasElement&&function(e,t,n,i,o){const s=e.getContext("2d");if(!s)return;const r=e.width,l=e.height,c=r/2,u=l/2;s.clearRect(0,0,r,l);const E=.2*r,g=l*m;if(s.fillStyle="rgba(255, 255, 255, 0.35)",s.fillRect(0,0,r,l),s.save(),s.beginPath(),s.ellipse(c,u,E,g,0,0,2*Math.PI),s.closePath(),s.globalCompositeOperation="destination-out",s.fill(),s.restore(),s.strokeStyle="rgba(255, 255, 255, 0.9)",s.lineWidth=3,s.beginPath(),s.ellipse(c,u,E,g,0,0,2*Math.PI),s.stroke(),s.strokeStyle="rgba(255, 255, 255, 0.45)",s.lineWidth=1,s.beginPath(),s.moveTo(c-6,u),s.lineTo(c+6,u),s.moveTo(c,u-6),s.lineTo(c,u+6),s.stroke(),t&&i){const e=i.landmarks;if(e.length>=478){const t=e[10],i=e[152],o=e[234],c=e[454],u=e.map(e=>e.x),m=e.map(e=>e.y),E=Math.min(...u),g=Math.max(...u),T=Math.min(...m),S=g-E,f=Math.max(...m)-T,O=.08,p=(E-S*O)*r,I=(T-f*O)*l,y=S*(1+2*O)*r,C=f*(1+2*O)*l;let A="red";n===a.STAY_STILL||n===a.CAPTURING?A="lime":n===a.FACE_DETECTED&&(A="yellow"),s.strokeStyle=A,s.lineWidth=3,s.strokeRect(p,I,y,C);const _=e[4];e[h[0]],e[d[0]],s.fillStyle="cyan",s.beginPath(),s.arc(_.x*r,_.y*l,5,0,2*Math.PI),s.fill(),s.fillStyle="magenta",s.beginPath(),s.arc(t.x*r,t.y*l,4,0,2*Math.PI),s.fill(),s.fillStyle="lime",s.beginPath(),s.arc(i.x*r,i.y*l,4,0,2*Math.PI),s.fill(),s.fillStyle="yellow",[e[33],e[133],e[159],e[144],e[145]].forEach(e=>{s.beginPath(),s.arc(e.x*r,e.y*l,3,0,2*Math.PI),s.fill()}),s.fillStyle="yellow",[e[263],e[362],e[386],e[373],e[374]].forEach(e=>{s.beginPath(),s.arc(e.x*r,e.y*l,3,0,2*Math.PI),s.fill()}),s.fillStyle="purple",s.beginPath(),s.arc(o.x*r,o.y*l,3,0,2*Math.PI),s.fill(),s.beginPath(),s.arc(c.x*r,c.y*l,3,0,2*Math.PI),s.fill()}}t&&o&&o.length>0&&o.forEach(e=>{s.fillStyle="orange",e.landmarks.forEach(e=>{s.beginPath(),s.arc(e.x*r,e.y*l,3,0,2*Math.PI),s.fill()})})}(this.options.overlayCanvasElement,this.options.debugMode||!1,s,r||void 0,l.length>0?l:void 0),s===a.CAPTURING&&!this.isCapturing)return this.isCapturing=!0,yield this.captureImage(),this.setStatus(a.SUCCESS),void this.stop()}catch(e){const t=e instanceof Error?e:new Error(String(e));this.setStatus(a.ERROR,t)}this.animationFrameId=requestAnimationFrame(i)}else this.animationFrameId=requestAnimationFrame(i)});this.animationFrameId=requestAnimationFrame(i)}estimateBoundingBox(e){const t=e.map(e=>e.x),n=e.map(e=>e.y),a=Math.min(...t),i=Math.max(...t),o=Math.min(...n);return{xMin:a,yMin:o,width:i-a,height:Math.max(...n)-o}}captureImage(){return E(this,void 0,void 0,function*(){const e=this.options.videoElement,t=document.createElement("canvas");t.width=e.videoWidth,t.height=e.videoHeight;const n=t.getContext("2d");n?(n.drawImage(e,0,0,t.width,t.height),t.toBlob(e=>{e?this.options.onCaptureSuccess(e):this.setStatus(a.ERROR,new Error("Failed to generate image blob"))},"image/jpeg",.95)):this.setStatus(a.ERROR,new Error("Failed to get canvas context"))})}stop(){null!==this.animationFrameId&&(cancelAnimationFrame(this.animationFrameId),this.animationFrameId=null),this.faceLandmarker&&this.faceLandmarker.close(),this.handLandmarker&&this.handLandmarker.close()}}const S=T;module.exports=t})();
|
|
2
2
|
//# sourceMappingURL=face-validator-sdk.cjs.js.map
|