@sssxyd/face-liveness-detector 0.4.0-alpha.4 → 0.4.0-alpha.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.esm.js CHANGED
@@ -115,16 +115,16 @@ const DEFAULT_OPTIONS = {
115
115
  motion_liveness_motion_consistency_threshold: 0.3,
116
116
  motion_liveness_strict_photo_detection: false,
117
117
  // Screen Capture Detection Settings
118
- screen_capture_confidence_threshold: 0.6,
118
+ screen_capture_confidence_threshold: 0.7,
119
119
  screen_capture_detection_strategy: 'adaptive',
120
120
  screen_moire_pattern_threshold: 0.65,
121
121
  screen_moire_pattern_enable_dct: true,
122
122
  screen_moire_pattern_enable_edge_detection: true,
123
123
  screen_color_saturation_threshold: 40,
124
- screen_color_rgb_correlation_threshold: 0.85,
124
+ screen_color_rgb_correlation_threshold: 0.75,
125
125
  screen_color_pixel_entropy_threshold: 6.5,
126
126
  screen_color_gradient_smoothness_threshold: 0.7,
127
- screen_color_confidence_threshold: 0.6,
127
+ screen_color_confidence_threshold: 0.65,
128
128
  screen_rgb_low_freq_start_percent: 0.15,
129
129
  screen_rgb_low_freq_end_percent: 0.35,
130
130
  screen_rgb_energy_ratio_normalization_factor: 10,
@@ -132,7 +132,7 @@ const DEFAULT_OPTIONS = {
132
132
  screen_rgb_energy_score_weight: 0.40,
133
133
  screen_rgb_asymmetry_score_weight: 0.40,
134
134
  screen_rgb_difference_factor_weight: 0.20,
135
- screen_rgb_confidence_threshold: 0.60,
135
+ screen_rgb_confidence_threshold: 0.65,
136
136
  };
137
137
  /**
138
138
  * Merge user configuration with defaults
@@ -4516,9 +4516,16 @@ class FaceDetectionEngine extends SimpleEventEmitter {
4516
4516
  };
4517
4517
  if (this.videoElement) {
4518
4518
  this.videoElement.addEventListener('canplay', onCanPlay, { once: true });
4519
- this.videoElement.play().catch(err => {
4519
+ this.videoElement.play().catch((err) => {
4520
4520
  clearTimeout(timeout);
4521
4521
  cleanup();
4522
+ const errorInfo = this.extractErrorInfo(err);
4523
+ this.emitDebug('video-setup', 'Failed to play video', {
4524
+ error: errorInfo.message,
4525
+ stack: errorInfo.stack,
4526
+ name: errorInfo.name,
4527
+ cause: errorInfo.cause
4528
+ }, 'error');
4522
4529
  reject(err);
4523
4530
  });
4524
4531
  }
@@ -4582,6 +4589,7 @@ class FaceDetectionEngine extends SimpleEventEmitter {
4582
4589
  * Reset detection state
4583
4590
  */
4584
4591
  resetDetectionState() {
4592
+ this.emitDebug('detection', 'Resetting detection state...');
4585
4593
  this.detectionState.reset();
4586
4594
  this.actualVideoWidth = 0;
4587
4595
  this.actualVideoHeight = 0;
@@ -4702,34 +4710,34 @@ class FaceDetectionEngine extends SimpleEventEmitter {
4702
4710
  this.scheduleNextDetection(this.options.detect_error_retry_delay);
4703
4711
  return;
4704
4712
  }
4705
- // 当前帧图片
4706
- const bgrFrame = drawCanvasToMat(this.cv, frameCanvas, false);
4707
- if (!bgrFrame) {
4708
- this.emitDebug('detection', 'Failed to convert canvas to OpenCV Mat', {}, 'warn');
4709
- this.scheduleNextDetection(this.options.detect_error_retry_delay);
4710
- return;
4711
- }
4712
- // 当前帧灰度图片
4713
- const grayFrame = matToGray(this.cv, bgrFrame);
4714
- if (!grayFrame) {
4715
- bgrFrame.delete();
4716
- this.emitDebug('detection', 'Failed to convert frame Mat to grayscale', {}, 'warn');
4717
- this.scheduleNextDetection(this.options.detect_error_retry_delay);
4718
- return;
4719
- }
4720
- // 提取人脸区域图片及灰度图片
4721
- const bgrFace = bgrFrame.roi(new this.cv.Rect(faceBox[0], faceBox[1], faceBox[2], faceBox[3]));
4722
- const grayFace = matToGray(this.cv, bgrFace);
4723
- if (!grayFace) {
4724
- bgrFrame.delete();
4725
- bgrFace.delete();
4726
- this.emitDebug('detection', 'Failed to convert face Mat to grayscale', {}, 'warn');
4727
- this.scheduleNextDetection(this.options.detect_error_retry_delay);
4728
- return;
4729
- }
4730
- // 释放不再需要的Mat
4731
- bgrFrame.delete();
4713
+ // 所有需要删除的 Mat 对象
4714
+ let bgrFrame = null;
4715
+ let grayFrame = null;
4716
+ let bgrFace = null;
4717
+ let grayFace = null;
4732
4718
  try {
4719
+ // 当前帧图片
4720
+ bgrFrame = drawCanvasToMat(this.cv, frameCanvas, false);
4721
+ if (!bgrFrame) {
4722
+ this.emitDebug('detection', 'Failed to convert canvas to OpenCV Mat', {}, 'warn');
4723
+ this.scheduleNextDetection(this.options.detect_error_retry_delay);
4724
+ return;
4725
+ }
4726
+ // 当前帧灰度图片
4727
+ grayFrame = matToGray(this.cv, bgrFrame);
4728
+ if (!grayFrame) {
4729
+ this.emitDebug('detection', 'Failed to convert frame Mat to grayscale', {}, 'warn');
4730
+ this.scheduleNextDetection(this.options.detect_error_retry_delay);
4731
+ return;
4732
+ }
4733
+ // 提取人脸区域图片及灰度图片
4734
+ bgrFace = bgrFrame.roi(new this.cv.Rect(faceBox[0], faceBox[1], faceBox[2], faceBox[3]));
4735
+ grayFace = matToGray(this.cv, bgrFace);
4736
+ if (!grayFace) {
4737
+ this.emitDebug('detection', 'Failed to convert face Mat to grayscale', {}, 'warn');
4738
+ this.scheduleNextDetection(this.options.detect_error_retry_delay);
4739
+ return;
4740
+ }
4733
4741
  if (!this.detectionState.screenDetector) {
4734
4742
  this.emit('detector-error', {
4735
4743
  code: ErrorCode.INTERNAL_ERROR,
@@ -4748,15 +4756,37 @@ class FaceDetectionEngine extends SimpleEventEmitter {
4748
4756
  }
4749
4757
  // 屏幕捕获检测, 只关心脸部区域
4750
4758
  const screenResult = this.detectionState.screenDetector.detectAuto(bgrFace, grayFace);
4751
- bgrFace.delete();
4752
- grayFace.delete();
4753
4759
  // 屏幕捕获检测器已经准备就绪,其验证结果可信
4754
4760
  if (screenResult.isScreenCapture) {
4755
- grayFrame.delete();
4761
+ // 从 executedMethods 提取各检测器的置信度
4762
+ const methodConfidences = screenResult.executedMethods.reduce((acc, method) => {
4763
+ if (method.method.includes('Moiré')) {
4764
+ acc.moireConfidence = method.confidence;
4765
+ }
4766
+ else if (method.method.includes('Color')) {
4767
+ acc.colorConfidence = method.confidence;
4768
+ }
4769
+ else if (method.method.includes('RGB')) {
4770
+ acc.rgbConfidence = method.confidence;
4771
+ }
4772
+ return acc;
4773
+ }, {});
4756
4774
  this.emitDetectorInfo({ code: DetectionCode.FACE_NOT_REAL, message: screenResult.getMessage(), screenConfidence: screenResult.confidenceScore });
4757
4775
  this.emitDebug('screen-capture-detection', 'Screen capture detected - possible video replay attack', {
4758
- confidence: screenResult.confidenceScore,
4759
- minConfidence: this.options.screen_capture_confidence_threshold
4776
+ overallConfidence: screenResult.confidenceScore,
4777
+ minConfidenceThreshold: this.options.screen_capture_confidence_threshold,
4778
+ moireConfidence: methodConfidences.moireConfidence ?? 'N/A',
4779
+ colorConfidence: methodConfidences.colorConfidence ?? 'N/A',
4780
+ rgbConfidence: methodConfidences.rgbConfidence ?? 'N/A',
4781
+ detectionStrategy: screenResult.strategy,
4782
+ riskLevel: screenResult.riskLevel,
4783
+ processingTimeMs: screenResult.processingTimeMs,
4784
+ executedMethods: screenResult.executedMethods.map((m) => ({
4785
+ method: m.method,
4786
+ isScreenCapture: m.isScreenCapture,
4787
+ confidence: m.confidence
4788
+ })),
4789
+ skippedMethods: screenResult.skippedMethods
4760
4790
  }, 'warn');
4761
4791
  this.resetDetectionState();
4762
4792
  this.scheduleNextDetection(this.options.detect_error_retry_delay);
@@ -4767,13 +4797,18 @@ class FaceDetectionEngine extends SimpleEventEmitter {
4767
4797
  if (this.detectionState.motionDetector.isReady()) {
4768
4798
  // 运动检测器已经准备就绪,其验证结果可信
4769
4799
  if (!motionResult.isLively) {
4770
- grayFrame.delete();
4771
4800
  this.emitDebug('motion-detection', 'Motion liveness check failed - possible photo attack', {
4772
4801
  motionScore: motionResult.motionScore,
4773
4802
  keypointVariance: motionResult.keypointVariance,
4803
+ opticalFlowMagnitude: motionResult.opticalFlowMagnitude,
4804
+ eyeMotionScore: motionResult.eyeMotionScore,
4805
+ mouthMotionScore: motionResult.mouthMotionScore,
4774
4806
  motionType: motionResult.motionType,
4775
4807
  minMotionScore: this.options.motion_liveness_min_motion_score,
4776
- minKeypointVariance: this.options.motion_liveness_min_keypoint_variance
4808
+ minKeypointVariance: this.options.motion_liveness_min_keypoint_variance,
4809
+ minOpticalFlowThreshold: this.options.motion_liveness_min_optical_flow_threshold,
4810
+ minMotionConsistencyThreshold: this.options.motion_liveness_motion_consistency_threshold,
4811
+ details: motionResult.details
4777
4812
  }, 'warn');
4778
4813
  this.emitDetectorInfo({
4779
4814
  code: DetectionCode.FACE_NOT_LIVE,
@@ -4822,7 +4857,6 @@ class FaceDetectionEngine extends SimpleEventEmitter {
4822
4857
  this.scheduleNextDetection(this.options.detect_error_retry_delay);
4823
4858
  return;
4824
4859
  }
4825
- grayFrame.delete();
4826
4860
  // 当前帧通过常规检查
4827
4861
  this.emitDetectorInfo({ passed: true, code: DetectionCode.FACE_CHECK_PASS, faceRatio: faceRatio, faceFrontal: frontal, imageQuality: qualityResult.score });
4828
4862
  // 处理不同检测阶段的逻辑
@@ -4868,18 +4902,15 @@ class FaceDetectionEngine extends SimpleEventEmitter {
4868
4902
  this.scheduleNextDetection(this.options.detect_error_retry_delay);
4869
4903
  }
4870
4904
  finally {
4871
- if (grayFrame) {
4905
+ // 统一在 finally 块中删除所有 Mat 对象
4906
+ if (grayFrame)
4872
4907
  grayFrame.delete();
4873
- }
4874
- if (bgrFrame) {
4908
+ if (bgrFrame)
4875
4909
  bgrFrame.delete();
4876
- }
4877
- if (bgrFace) {
4910
+ if (bgrFace)
4878
4911
  bgrFace.delete();
4879
- }
4880
- if (grayFace) {
4912
+ if (grayFace)
4881
4913
  grayFace.delete();
4882
- }
4883
4914
  }
4884
4915
  }
4885
4916
  /**
@@ -5139,7 +5170,7 @@ class FaceDetectionEngine extends SimpleEventEmitter {
5139
5170
  return null;
5140
5171
  }
5141
5172
  this.frameCanvasContext.drawImage(this.videoElement, 0, 0, videoWidth_actual, videoHeight_actual);
5142
- this.emitDebug('capture', 'Frame drawn to canvas');
5173
+ this.emitDebug('capture', 'Frame drawn to canvas as ' + videoHeight_actual + 'x' + videoWidth_actual);
5143
5174
  return this.frameCanvasElement;
5144
5175
  }
5145
5176
  catch (e) {