@daboss2003/liveness-web 1.0.2 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/engine.js CHANGED
@@ -110,14 +110,14 @@ const config = {
110
110
  // Number of frames averaged to produce the resting baseline per step
111
111
  baselineFrames: 8,
112
112
  // ── Head turns (relative to baseline) ─────────────────────────────────────
113
- yawTurnDelta: 12, // degrees of YAW change needed from rest
113
+ yawTurnDelta: 9, // degrees of YAW change needed from rest
114
114
  yawWrongDirDelta: 16, // block if turned clearly the WRONG way
115
- headTurnHoldMs: 120, // sustain the turned pose for this long
115
+ headTurnHoldMs: 80, // sustain the turned pose for this long
116
116
  // ── Nod (relative to baseline) ────────────────────────────────────────────
117
- nodDownDelta: 8, // chin must DROP by this many degrees from baseline
118
- nodReturnFraction: 0.40, // return to 40% of peak nod depth to complete
119
- nodReturnMaxDelta: 5, // cap: never require returning past 5° from baseline
120
- maxYawDuringNod: 22,
117
+ nodDownDelta: 4, // chin must DROP by this many degrees from baseline
118
+ nodReturnFraction: 0.75, // return to 75% of peak nod depth to complete
119
+ nodReturnMaxDelta: 9, // cap: never require returning past 9° from baseline
120
+ maxYawDuringNod: 32,
121
121
  // ── Blink ──────────────────────────────────────────────────────────────────
122
122
  blinkClosedThreshold: 0.35, // blendshape score = eyes closed
123
123
  blinkOpenThreshold: 0.20, // blendshape score = eyes open
@@ -127,11 +127,11 @@ const config = {
127
127
  maxYawDuringBlink: 25,
128
128
  maxPitchDuringBlink: 25,
129
129
  // ── Mouth ──────────────────────────────────────────────────────────────────
130
- mouthOpenThreshold: 0.28, // jawOpen blendshape
131
- mouthOpenMarThreshold: 0.28,
132
- mouthHoldMs: 120,
133
- maxYawDuringMouth: 25,
134
- maxPitchDuringMouth: 25,
130
+ mouthOpenThreshold: 0.20, // jawOpen blendshape
131
+ mouthOpenMarThreshold: 0.20,
132
+ mouthHoldMs: 50,
133
+ maxYawDuringMouth: 35,
134
+ maxPitchDuringMouth: 35,
135
135
  // ── Face-in-oval ───────────────────────────────────────────────────────────
136
136
  ovalCx: 0.50,
137
137
  ovalCy: 0.42,
package/dist/ui.js CHANGED
@@ -64,8 +64,20 @@ function createStyles() {
64
64
  object-fit: cover;
65
65
  /* Mirror so it feels like a selfie camera */
66
66
  transform: scaleX(-1);
67
+ opacity: 0;
68
+ transition: opacity 0.2s ease;
67
69
  /* Clip the video to the oval using clip-path on the parent */
68
70
  }
71
+ .lv-video.is-playing { opacity: 1; }
72
+ .lv-video::-webkit-media-controls,
73
+ .lv-video::-webkit-media-controls-panel,
74
+ .lv-video::-webkit-media-controls-play-button,
75
+ .lv-video::-webkit-media-controls-start-playback-button,
76
+ .lv-video::-webkit-media-controls-overlay-play-button,
77
+ .lv-video::-webkit-media-controls-enclosure {
78
+ display: none !important;
79
+ -webkit-appearance: none;
80
+ }
69
81
 
70
82
  /* ── Dark overlay with oval cutout ──────────────────────────────────── */
71
83
  .lv-overlay {
@@ -266,6 +278,7 @@ export function startLivenessWithUI(options) {
266
278
  video.className = "lv-video";
267
279
  video.setAttribute("autoplay", "");
268
280
  video.setAttribute("playsinline", "");
281
+ video.setAttribute("webkit-playsinline", "");
269
282
  video.setAttribute("muted", "");
270
283
  videoBg.appendChild(video);
271
284
  root.appendChild(videoBg);
@@ -357,6 +370,9 @@ export function startLivenessWithUI(options) {
357
370
  }
358
371
  function cleanup() {
359
372
  engine.stop();
373
+ video.removeEventListener("playing", onVideoPlaying);
374
+ video.removeEventListener("pause", onVideoPause);
375
+ video.removeEventListener("waiting", onVideoPause);
360
376
  root.remove();
361
377
  }
362
378
  // ── Engine ─────────────────────────────────────────────────────────────────
@@ -364,6 +380,11 @@ export function startLivenessWithUI(options) {
364
380
  const sounds = options.sounds ?? {
365
381
  ...(Object.keys(DEFAULT_SOUND_DATA_URLS).length > 0 ? DEFAULT_SOUND_DATA_URLS : { baseUrl: "audios/" }),
366
382
  };
383
+ const onVideoPlaying = () => video.classList.add("is-playing");
384
+ const onVideoPause = () => video.classList.remove("is-playing");
385
+ video.addEventListener("playing", onVideoPlaying);
386
+ video.addEventListener("pause", onVideoPause);
387
+ video.addEventListener("waiting", onVideoPause);
367
388
  const engine = new LivenessEngine({
368
389
  videoElement: video,
369
390
  canvasElement: canvas,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@daboss2003/liveness-web",
3
- "version": "1.0.2",
3
+ "version": "1.0.3",
4
4
  "description": "Web liveness detection using MediaPipe Face Landmarker (CDN)",
5
5
  "keywords": [
6
6
  "liveness",
package/src/engine.ts CHANGED
@@ -188,15 +188,15 @@ const config = {
188
188
  baselineFrames: 8,
189
189
 
190
190
  // ── Head turns (relative to baseline) ─────────────────────────────────────
191
- yawTurnDelta: 12, // degrees of YAW change needed from rest
191
+ yawTurnDelta: 9, // degrees of YAW change needed from rest
192
192
  yawWrongDirDelta: 16, // block if turned clearly the WRONG way
193
- headTurnHoldMs: 120, // sustain the turned pose for this long
193
+ headTurnHoldMs: 80, // sustain the turned pose for this long
194
194
 
195
195
  // ── Nod (relative to baseline) ────────────────────────────────────────────
196
- nodDownDelta: 8, // chin must DROP by this many degrees from baseline
197
- nodReturnFraction: 0.40, // return to 40% of peak nod depth to complete
198
- nodReturnMaxDelta: 5, // cap: never require returning past 5° from baseline
199
- maxYawDuringNod: 22,
196
+ nodDownDelta: 4, // chin must DROP by this many degrees from baseline
197
+ nodReturnFraction: 0.75, // return to 75% of peak nod depth to complete
198
+ nodReturnMaxDelta: 9, // cap: never require returning past 9° from baseline
199
+ maxYawDuringNod: 32,
200
200
 
201
201
  // ── Blink ──────────────────────────────────────────────────────────────────
202
202
  blinkClosedThreshold: 0.35, // blendshape score = eyes closed
@@ -208,11 +208,11 @@ const config = {
208
208
  maxPitchDuringBlink: 25,
209
209
 
210
210
  // ── Mouth ──────────────────────────────────────────────────────────────────
211
- mouthOpenThreshold: 0.28, // jawOpen blendshape
212
- mouthOpenMarThreshold: 0.28,
213
- mouthHoldMs: 120,
214
- maxYawDuringMouth: 25,
215
- maxPitchDuringMouth: 25,
211
+ mouthOpenThreshold: 0.20, // jawOpen blendshape
212
+ mouthOpenMarThreshold: 0.20,
213
+ mouthHoldMs: 50,
214
+ maxYawDuringMouth: 35,
215
+ maxPitchDuringMouth: 35,
216
216
 
217
217
  // ── Face-in-oval ───────────────────────────────────────────────────────────
218
218
  ovalCx: 0.50,
package/src/ui.ts CHANGED
@@ -79,8 +79,20 @@ function createStyles(): HTMLStyleElement {
79
79
  object-fit: cover;
80
80
  /* Mirror so it feels like a selfie camera */
81
81
  transform: scaleX(-1);
82
+ opacity: 0;
83
+ transition: opacity 0.2s ease;
82
84
  /* Clip the video to the oval using clip-path on the parent */
83
85
  }
86
+ .lv-video.is-playing { opacity: 1; }
87
+ .lv-video::-webkit-media-controls,
88
+ .lv-video::-webkit-media-controls-panel,
89
+ .lv-video::-webkit-media-controls-play-button,
90
+ .lv-video::-webkit-media-controls-start-playback-button,
91
+ .lv-video::-webkit-media-controls-overlay-play-button,
92
+ .lv-video::-webkit-media-controls-enclosure {
93
+ display: none !important;
94
+ -webkit-appearance: none;
95
+ }
84
96
 
85
97
  /* ── Dark overlay with oval cutout ──────────────────────────────────── */
86
98
  .lv-overlay {
@@ -287,6 +299,7 @@ export function startLivenessWithUI(options: StartLivenessOptions): LivenessEngi
287
299
  video.className = "lv-video";
288
300
  video.setAttribute("autoplay", "");
289
301
  video.setAttribute("playsinline", "");
302
+ video.setAttribute("webkit-playsinline", "");
290
303
  video.setAttribute("muted", "");
291
304
  videoBg.appendChild(video);
292
305
  root.appendChild(videoBg);
@@ -391,6 +404,9 @@ export function startLivenessWithUI(options: StartLivenessOptions): LivenessEngi
391
404
 
392
405
  function cleanup(): void {
393
406
  engine.stop();
407
+ video.removeEventListener("playing", onVideoPlaying);
408
+ video.removeEventListener("pause", onVideoPause);
409
+ video.removeEventListener("waiting", onVideoPause);
394
410
  root.remove();
395
411
  }
396
412
 
@@ -400,6 +416,12 @@ export function startLivenessWithUI(options: StartLivenessOptions): LivenessEngi
400
416
  ...(Object.keys(DEFAULT_SOUND_DATA_URLS).length > 0 ? DEFAULT_SOUND_DATA_URLS : { baseUrl: "audios/" }),
401
417
  };
402
418
 
419
+ const onVideoPlaying = () => video.classList.add("is-playing");
420
+ const onVideoPause = () => video.classList.remove("is-playing");
421
+ video.addEventListener("playing", onVideoPlaying);
422
+ video.addEventListener("pause", onVideoPause);
423
+ video.addEventListener("waiting", onVideoPause);
424
+
403
425
  const engine = new LivenessEngine({
404
426
  videoElement: video,
405
427
  canvasElement: canvas,