@sssxyd/face-liveness-detector 0.4.0-alpha.4 → 0.4.0-alpha.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.esm.js CHANGED
@@ -115,16 +115,16 @@ const DEFAULT_OPTIONS = {
115
115
  motion_liveness_motion_consistency_threshold: 0.3,
116
116
  motion_liveness_strict_photo_detection: false,
117
117
  // Screen Capture Detection Settings
118
- screen_capture_confidence_threshold: 0.6,
118
+ screen_capture_confidence_threshold: 0.7,
119
119
  screen_capture_detection_strategy: 'adaptive',
120
120
  screen_moire_pattern_threshold: 0.65,
121
121
  screen_moire_pattern_enable_dct: true,
122
122
  screen_moire_pattern_enable_edge_detection: true,
123
123
  screen_color_saturation_threshold: 40,
124
- screen_color_rgb_correlation_threshold: 0.85,
124
+ screen_color_rgb_correlation_threshold: 0.75,
125
125
  screen_color_pixel_entropy_threshold: 6.5,
126
126
  screen_color_gradient_smoothness_threshold: 0.7,
127
- screen_color_confidence_threshold: 0.6,
127
+ screen_color_confidence_threshold: 0.65,
128
128
  screen_rgb_low_freq_start_percent: 0.15,
129
129
  screen_rgb_low_freq_end_percent: 0.35,
130
130
  screen_rgb_energy_ratio_normalization_factor: 10,
@@ -132,7 +132,7 @@ const DEFAULT_OPTIONS = {
132
132
  screen_rgb_energy_score_weight: 0.40,
133
133
  screen_rgb_asymmetry_score_weight: 0.40,
134
134
  screen_rgb_difference_factor_weight: 0.20,
135
- screen_rgb_confidence_threshold: 0.60,
135
+ screen_rgb_confidence_threshold: 0.65,
136
136
  };
137
137
  /**
138
138
  * Merge user configuration with defaults
@@ -4516,9 +4516,16 @@ class FaceDetectionEngine extends SimpleEventEmitter {
4516
4516
  };
4517
4517
  if (this.videoElement) {
4518
4518
  this.videoElement.addEventListener('canplay', onCanPlay, { once: true });
4519
- this.videoElement.play().catch(err => {
4519
+ this.videoElement.play().catch((err) => {
4520
4520
  clearTimeout(timeout);
4521
4521
  cleanup();
4522
+ const errorInfo = this.extractErrorInfo(err);
4523
+ this.emitDebug('video-setup', 'Failed to play video', {
4524
+ error: errorInfo.message,
4525
+ stack: errorInfo.stack,
4526
+ name: errorInfo.name,
4527
+ cause: errorInfo.cause
4528
+ }, 'error');
4522
4529
  reject(err);
4523
4530
  });
4524
4531
  }
@@ -4702,34 +4709,34 @@ class FaceDetectionEngine extends SimpleEventEmitter {
4702
4709
  this.scheduleNextDetection(this.options.detect_error_retry_delay);
4703
4710
  return;
4704
4711
  }
4705
- // 当前帧图片
4706
- const bgrFrame = drawCanvasToMat(this.cv, frameCanvas, false);
4707
- if (!bgrFrame) {
4708
- this.emitDebug('detection', 'Failed to convert canvas to OpenCV Mat', {}, 'warn');
4709
- this.scheduleNextDetection(this.options.detect_error_retry_delay);
4710
- return;
4711
- }
4712
- // 当前帧灰度图片
4713
- const grayFrame = matToGray(this.cv, bgrFrame);
4714
- if (!grayFrame) {
4715
- bgrFrame.delete();
4716
- this.emitDebug('detection', 'Failed to convert frame Mat to grayscale', {}, 'warn');
4717
- this.scheduleNextDetection(this.options.detect_error_retry_delay);
4718
- return;
4719
- }
4720
- // 提取人脸区域图片及灰度图片
4721
- const bgrFace = bgrFrame.roi(new this.cv.Rect(faceBox[0], faceBox[1], faceBox[2], faceBox[3]));
4722
- const grayFace = matToGray(this.cv, bgrFace);
4723
- if (!grayFace) {
4724
- bgrFrame.delete();
4725
- bgrFace.delete();
4726
- this.emitDebug('detection', 'Failed to convert face Mat to grayscale', {}, 'warn');
4727
- this.scheduleNextDetection(this.options.detect_error_retry_delay);
4728
- return;
4729
- }
4730
- // 释放不再需要的Mat
4731
- bgrFrame.delete();
4712
+ // 所有需要删除的 Mat 对象
4713
+ let bgrFrame = null;
4714
+ let grayFrame = null;
4715
+ let bgrFace = null;
4716
+ let grayFace = null;
4732
4717
  try {
4718
+ // 当前帧图片
4719
+ bgrFrame = drawCanvasToMat(this.cv, frameCanvas, false);
4720
+ if (!bgrFrame) {
4721
+ this.emitDebug('detection', 'Failed to convert canvas to OpenCV Mat', {}, 'warn');
4722
+ this.scheduleNextDetection(this.options.detect_error_retry_delay);
4723
+ return;
4724
+ }
4725
+ // 当前帧灰度图片
4726
+ grayFrame = matToGray(this.cv, bgrFrame);
4727
+ if (!grayFrame) {
4728
+ this.emitDebug('detection', 'Failed to convert frame Mat to grayscale', {}, 'warn');
4729
+ this.scheduleNextDetection(this.options.detect_error_retry_delay);
4730
+ return;
4731
+ }
4732
+ // 提取人脸区域图片及灰度图片
4733
+ bgrFace = bgrFrame.roi(new this.cv.Rect(faceBox[0], faceBox[1], faceBox[2], faceBox[3]));
4734
+ grayFace = matToGray(this.cv, bgrFace);
4735
+ if (!grayFace) {
4736
+ this.emitDebug('detection', 'Failed to convert face Mat to grayscale', {}, 'warn');
4737
+ this.scheduleNextDetection(this.options.detect_error_retry_delay);
4738
+ return;
4739
+ }
4733
4740
  if (!this.detectionState.screenDetector) {
4734
4741
  this.emit('detector-error', {
4735
4742
  code: ErrorCode.INTERNAL_ERROR,
@@ -4748,11 +4755,8 @@ class FaceDetectionEngine extends SimpleEventEmitter {
4748
4755
  }
4749
4756
  // 屏幕捕获检测, 只关心脸部区域
4750
4757
  const screenResult = this.detectionState.screenDetector.detectAuto(bgrFace, grayFace);
4751
- bgrFace.delete();
4752
- grayFace.delete();
4753
4758
  // 屏幕捕获检测器已经准备就绪,其验证结果可信
4754
4759
  if (screenResult.isScreenCapture) {
4755
- grayFrame.delete();
4756
4760
  this.emitDetectorInfo({ code: DetectionCode.FACE_NOT_REAL, message: screenResult.getMessage(), screenConfidence: screenResult.confidenceScore });
4757
4761
  this.emitDebug('screen-capture-detection', 'Screen capture detected - possible video replay attack', {
4758
4762
  confidence: screenResult.confidenceScore,
@@ -4767,13 +4771,18 @@ class FaceDetectionEngine extends SimpleEventEmitter {
4767
4771
  if (this.detectionState.motionDetector.isReady()) {
4768
4772
  // 运动检测器已经准备就绪,其验证结果可信
4769
4773
  if (!motionResult.isLively) {
4770
- grayFrame.delete();
4771
4774
  this.emitDebug('motion-detection', 'Motion liveness check failed - possible photo attack', {
4772
4775
  motionScore: motionResult.motionScore,
4773
4776
  keypointVariance: motionResult.keypointVariance,
4777
+ opticalFlowMagnitude: motionResult.opticalFlowMagnitude,
4778
+ eyeMotionScore: motionResult.eyeMotionScore,
4779
+ mouthMotionScore: motionResult.mouthMotionScore,
4774
4780
  motionType: motionResult.motionType,
4775
4781
  minMotionScore: this.options.motion_liveness_min_motion_score,
4776
- minKeypointVariance: this.options.motion_liveness_min_keypoint_variance
4782
+ minKeypointVariance: this.options.motion_liveness_min_keypoint_variance,
4783
+ minOpticalFlowThreshold: this.options.motion_liveness_min_optical_flow_threshold,
4784
+ minMotionConsistencyThreshold: this.options.motion_liveness_motion_consistency_threshold,
4785
+ details: motionResult.details
4777
4786
  }, 'warn');
4778
4787
  this.emitDetectorInfo({
4779
4788
  code: DetectionCode.FACE_NOT_LIVE,
@@ -4822,7 +4831,6 @@ class FaceDetectionEngine extends SimpleEventEmitter {
4822
4831
  this.scheduleNextDetection(this.options.detect_error_retry_delay);
4823
4832
  return;
4824
4833
  }
4825
- grayFrame.delete();
4826
4834
  // 当前帧通过常规检查
4827
4835
  this.emitDetectorInfo({ passed: true, code: DetectionCode.FACE_CHECK_PASS, faceRatio: faceRatio, faceFrontal: frontal, imageQuality: qualityResult.score });
4828
4836
  // 处理不同检测阶段的逻辑
@@ -4868,18 +4876,15 @@ class FaceDetectionEngine extends SimpleEventEmitter {
4868
4876
  this.scheduleNextDetection(this.options.detect_error_retry_delay);
4869
4877
  }
4870
4878
  finally {
4871
- if (grayFrame) {
4879
+ // 统一在 finally 块中删除所有 Mat 对象
4880
+ if (grayFrame)
4872
4881
  grayFrame.delete();
4873
- }
4874
- if (bgrFrame) {
4882
+ if (bgrFrame)
4875
4883
  bgrFrame.delete();
4876
- }
4877
- if (bgrFace) {
4884
+ if (bgrFace)
4878
4885
  bgrFace.delete();
4879
- }
4880
- if (grayFace) {
4886
+ if (grayFace)
4881
4887
  grayFace.delete();
4882
- }
4883
4888
  }
4884
4889
  }
4885
4890
  /**
@@ -5139,7 +5144,7 @@ class FaceDetectionEngine extends SimpleEventEmitter {
5139
5144
  return null;
5140
5145
  }
5141
5146
  this.frameCanvasContext.drawImage(this.videoElement, 0, 0, videoWidth_actual, videoHeight_actual);
5142
- this.emitDebug('capture', 'Frame drawn to canvas');
5147
+ this.emitDebug('capture', 'Frame drawn to canvas as ' + videoHeight_actual + 'x' + videoWidth_actual);
5143
5148
  return this.frameCanvasElement;
5144
5149
  }
5145
5150
  catch (e) {