@trustchex/react-native-sdk 1.360.0 → 1.362.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/TrustchexSDK.podspec +3 -1
  2. package/android/src/main/java/com/trustchex/reactnativesdk/TrustchexSDKPackage.kt +0 -13
  3. package/android/src/main/java/com/trustchex/reactnativesdk/camera/TrustchexCameraManager.kt +0 -8
  4. package/android/src/main/java/com/trustchex/reactnativesdk/camera/TrustchexCameraView.kt +59 -39
  5. package/android/src/main/java/com/trustchex/reactnativesdk/opencv/OpenCVModule.kt +94 -13
  6. package/ios/Camera/TrustchexCameraManager.m +0 -2
  7. package/ios/Camera/TrustchexCameraManager.swift +0 -7
  8. package/ios/Camera/TrustchexCameraView.swift +16 -47
  9. package/ios/OpenCV/OpenCVHelper.h +17 -0
  10. package/ios/OpenCV/OpenCVHelper.mm +128 -0
  11. package/ios/OpenCV/OpenCVModule.h +6 -0
  12. package/ios/OpenCV/OpenCVModule.mm +141 -0
  13. package/ios/TrustchexSDK-Bridging-Header.h +8 -0
  14. package/lib/module/Screens/Debug/MRZTestScreen.js +175 -0
  15. package/lib/module/Shared/Components/DebugNavigationPanel.js +4 -0
  16. package/lib/module/Shared/Components/EIDScanner.js +0 -78
  17. package/lib/module/Shared/Components/FaceCamera.js +6 -3
  18. package/lib/module/Shared/Components/IdentityDocumentCamera.js +199 -153
  19. package/lib/module/Shared/Components/QrCodeScannerCamera.js +0 -3
  20. package/lib/module/Shared/Config/camera-enhancement.config.js +11 -12
  21. package/lib/module/Shared/Libs/mrz.utils.js +265 -0
  22. package/lib/module/Trustchex.js +4 -0
  23. package/lib/module/index.js +1 -0
  24. package/lib/module/version.js +1 -1
  25. package/lib/typescript/src/Screens/Debug/MRZTestScreen.d.ts +3 -0
  26. package/lib/typescript/src/Screens/Debug/MRZTestScreen.d.ts.map +1 -0
  27. package/lib/typescript/src/Shared/Components/DebugNavigationPanel.d.ts.map +1 -1
  28. package/lib/typescript/src/Shared/Components/EIDScanner.d.ts.map +1 -1
  29. package/lib/typescript/src/Shared/Components/FaceCamera.d.ts.map +1 -1
  30. package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.d.ts +3 -1
  31. package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.d.ts.map +1 -1
  32. package/lib/typescript/src/Shared/Components/QrCodeScannerCamera.d.ts.map +1 -1
  33. package/lib/typescript/src/Shared/Components/TrustchexCamera.d.ts +0 -19
  34. package/lib/typescript/src/Shared/Components/TrustchexCamera.d.ts.map +1 -1
  35. package/lib/typescript/src/Shared/Config/camera-enhancement.config.d.ts +10 -10
  36. package/lib/typescript/src/Shared/Libs/mrz.utils.d.ts +18 -1
  37. package/lib/typescript/src/Shared/Libs/mrz.utils.d.ts.map +1 -1
  38. package/lib/typescript/src/Trustchex.d.ts.map +1 -1
  39. package/lib/typescript/src/index.d.ts +3 -0
  40. package/lib/typescript/src/index.d.ts.map +1 -1
  41. package/lib/typescript/src/version.d.ts +1 -1
  42. package/package.json +2 -1
  43. package/src/Screens/Debug/MRZTestScreen.tsx +209 -0
  44. package/src/Shared/Components/DebugNavigationPanel.tsx +5 -0
  45. package/src/Shared/Components/EIDScanner.tsx +0 -53
  46. package/src/Shared/Components/FaceCamera.tsx +6 -3
  47. package/src/Shared/Components/IdentityDocumentCamera.tsx +246 -149
  48. package/src/Shared/Components/QrCodeScannerCamera.tsx +0 -9
  49. package/src/Shared/Components/TrustchexCamera.tsx +0 -20
  50. package/src/Shared/Config/camera-enhancement.config.ts +6 -6
  51. package/src/Shared/Libs/mrz.utils.ts +289 -1
  52. package/src/Trustchex.tsx +5 -0
  53. package/src/index.tsx +3 -0
  54. package/src/version.ts +1 -1
  55. package/android/src/main/java/com/trustchex/reactnativesdk/mrz/MRZValidationModule.kt +0 -785
  56. package/android/src/main/java/com/trustchex/reactnativesdk/mrz/MRZValidator.kt +0 -419
  57. package/ios/MRZValidation.m +0 -39
  58. package/ios/MRZValidation.swift +0 -802
  59. package/ios/MRZValidator.swift +0 -466
@@ -45,9 +45,8 @@ class TrustchexCameraView: UIView {
45
45
  @objc var enableFaceDetection: Bool = false
46
46
  @objc var enableTextRecognition: Bool = false
47
47
  @objc var enableBarcodeScanning: Bool = false
48
- @objc var enableMrzValidation: Bool = false
49
48
  @objc var includeBase64: Bool = false
50
- @objc var targetFps: NSNumber = 6 {
49
+ @objc var targetFps: NSNumber = 10 {
51
50
  didSet {
52
51
  _targetFps = max(1, min(30, targetFps.int32Value))
53
52
  }
@@ -56,7 +55,7 @@ class TrustchexCameraView: UIView {
56
55
  private var _cameraType: String = "back"
57
56
  private var _torchEnabled = false
58
57
  private var _frameProcessingEnabled = false
59
- private var _targetFps: Int32 = 6
58
+ private var _targetFps: Int32 = 10
60
59
  private var lastFrameTime: TimeInterval = 0
61
60
  private var isProcessing = false
62
61
  private var isRecording = false
@@ -122,25 +121,19 @@ class TrustchexCameraView: UIView {
122
121
  // Front camera: Use Full HD for high-quality liveness detection
123
122
  if session.canSetSessionPreset(.hd1920x1080) {
124
123
  session.sessionPreset = .hd1920x1080
125
- print("[TrustchexCamera] Front camera: Using Full HD 1920x1080")
126
124
  } else if session.canSetSessionPreset(.hd1280x720) {
127
125
  session.sessionPreset = .hd1280x720
128
- print("[TrustchexCamera] Front camera: Fallback to HD 1280x720")
129
126
  } else {
130
127
  session.sessionPreset = .high
131
- print("[TrustchexCamera] Front camera: Using high preset")
132
128
  }
133
129
  } else {
134
130
  // Back camera: Use Full HD for document scanning
135
131
  if session.canSetSessionPreset(.hd1920x1080) {
136
132
  session.sessionPreset = .hd1920x1080
137
- print("[TrustchexCamera] Back camera: Using Full HD 1920x1080")
138
133
  } else if session.canSetSessionPreset(.hd1280x720) {
139
134
  session.sessionPreset = .hd1280x720
140
- print("[TrustchexCamera] Back camera: Fallback to HD 1280x720")
141
135
  } else {
142
136
  session.sessionPreset = .high
143
- print("[TrustchexCamera] Back camera: Using high preset")
144
137
  }
145
138
  }
146
139
  let camera = selectBestCamera(for: cameraPosition)
@@ -203,13 +196,17 @@ class TrustchexCameraView: UIView {
203
196
  if supportedTypes.contains(.qr) { typesToEnable.append(.qr) }
204
197
  if supportedTypes.contains(.code128) { typesToEnable.append(.code128) }
205
198
  if supportedTypes.contains(.code39) { typesToEnable.append(.code39) }
199
+ if supportedTypes.contains(.code39Mod43) { typesToEnable.append(.code39Mod43) }
200
+ if supportedTypes.contains(.code93) { typesToEnable.append(.code93) }
206
201
  if supportedTypes.contains(.ean13) { typesToEnable.append(.ean13) }
207
202
  if supportedTypes.contains(.ean8) { typesToEnable.append(.ean8) }
203
+ if supportedTypes.contains(.upce) { typesToEnable.append(.upce) }
204
+ if supportedTypes.contains(.interleaved2of5) { typesToEnable.append(.interleaved2of5) }
205
+ if supportedTypes.contains(.itf14) { typesToEnable.append(.itf14) }
208
206
  if supportedTypes.contains(.aztec) { typesToEnable.append(.aztec) }
209
207
  if supportedTypes.contains(.dataMatrix) { typesToEnable.append(.dataMatrix) }
210
208
 
211
209
  metadataOutput.metadataObjectTypes = typesToEnable
212
- print("[TrustchexCamera] Native barcode scanner enabled with types: \(typesToEnable)")
213
210
  }
214
211
  self.metadataOutput = metadataOutput
215
212
 
@@ -253,19 +250,16 @@ class TrustchexCameraView: UIView {
253
250
  // For document scanning, prefer wide angle camera for all models
254
251
  // This provides consistent behavior across iPhone 15, 15 Pro, and other devices
255
252
  if let wideAngleCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) {
256
- print("[TrustchexCamera] Selected Wide Angle camera for document scanning")
257
253
  return wideAngleCamera
258
254
  }
259
255
 
260
256
  // Fallback: Try dual camera
261
257
  if let dualCamera = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back) {
262
- print("[TrustchexCamera] Selected Dual Camera for document scanning")
263
258
  return dualCamera
264
259
  }
265
260
 
266
261
  // Fallback: Try triple camera
267
262
  if let tripleCamera = AVCaptureDevice.default(.builtInTripleCamera, for: .video, position: .back) {
268
- print("[TrustchexCamera] Selected Triple Camera for document scanning")
269
263
  return tripleCamera
270
264
  }
271
265
 
@@ -350,7 +344,7 @@ class TrustchexCameraView: UIView {
350
344
 
351
345
  camera.unlockForConfiguration()
352
346
  } catch {
353
- print("Failed to configure camera: \(error)")
347
+ // Failed to configure camera
354
348
  }
355
349
  }
356
350
 
@@ -382,7 +376,7 @@ class TrustchexCameraView: UIView {
382
376
  }
383
377
  camera.unlockForConfiguration()
384
378
  } catch {
385
- print("Failed to set torch: \(error)")
379
+ // Failed to set torch
386
380
  }
387
381
  }
388
382
  }
@@ -415,10 +409,6 @@ class TrustchexCameraView: UIView {
415
409
  enableBarcodeScanning = enabled
416
410
  }
417
411
 
418
- func setEnableMrzValidation(_ enabled: Bool) {
419
- enableMrzValidation = enabled
420
- }
421
-
422
412
  func setIncludeBase64(_ enabled: Bool) {
423
413
  includeBase64 = enabled
424
414
  }
@@ -452,7 +442,7 @@ class TrustchexCameraView: UIView {
452
442
 
453
443
  camera.unlockForConfiguration()
454
444
  } catch {
455
- print("Failed to set focus point: \(error)")
445
+ // Failed to set focus point
456
446
  }
457
447
  }
458
448
  }
@@ -468,7 +458,7 @@ class TrustchexCameraView: UIView {
468
458
  camera.setExposureTargetBias(bias, completionHandler: nil)
469
459
  camera.unlockForConfiguration()
470
460
  } catch {
471
- print("Failed to set exposure: \(error)")
461
+ // Failed to set exposure
472
462
  }
473
463
  }
474
464
  }
@@ -544,7 +534,6 @@ class TrustchexCameraView: UIView {
544
534
  // Check both our flag and the actual AVFoundation state
545
535
  guard self.isRecording || movieOutput.isRecording else { return }
546
536
 
547
- print("[TrustchexCameraView] *** cancelRecording: marking as cancelled ***")
548
537
  self.isCancelledRecording = true
549
538
  movieOutput.stopRecording()
550
539
  // Do NOT set isRecording = false here — let the delegate handle it
@@ -558,10 +547,9 @@ class TrustchexCameraView: UIView {
558
547
  let fileURL = URL(fileURLWithPath: filePath)
559
548
  if FileManager.default.fileExists(atPath: filePath) {
560
549
  try FileManager.default.removeItem(at: fileURL)
561
- print("[TrustchexCameraView] Successfully deleted recording at: \(filePath)")
562
550
  }
563
551
  } catch {
564
- print("[TrustchexCameraView] Failed to delete recording: \(error.localizedDescription)")
552
+ // Failed to delete recording
565
553
  }
566
554
  }
567
555
  }
@@ -614,7 +602,6 @@ class TrustchexCameraView: UIView {
614
602
 
615
603
  finishFallbackWorkItem = workItem
616
604
  DispatchQueue.main.asyncAfter(deadline: .now() + 0.5, execute: workItem)
617
- print("[TrustchexCameraView] *** Fallback check scheduled for 0.5s from now ***")
618
605
  }
619
606
 
620
607
  private func switchCamera(to type: String) {
@@ -742,9 +729,6 @@ extension TrustchexCameraView: AVCaptureVideoDataOutputSampleBufferDelegate {
742
729
 
743
730
  let isBufferLandscape = pixelWidth > pixelHeight
744
731
 
745
- // Log VisionImage metadata to understand coordinate space
746
- NSLog("[VisionImage] Buffer: \(pixelWidth)x\(pixelHeight) | Orientation: \(isBufferLandscape ? "landscape→portrait (.right)" : "portrait (.up)") | Will report as: \(portraitWidth)x\(portraitHeight)")
747
-
748
732
  // Create oriented CIImage for ML Kit processing
749
733
  let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
750
734
  let orientedImage = isBufferLandscape ? ciImage.oriented(.right) : ciImage
@@ -757,6 +741,9 @@ extension TrustchexCameraView: AVCaptureVideoDataOutputSampleBufferDelegate {
757
741
  }
758
742
  let visionImage = VisionImage(image: UIImage(cgImage: cgImage))
759
743
  visionImage.orientation = .up // Already oriented correctly
744
+
745
+ // Use original image for text recognition
746
+ let textVisionImage = visionImage
760
747
 
761
748
  // Generate JPEG base64 only when JS side explicitly needs the image
762
749
  // NOTE: Do NOT auto-generate for face detection - too expensive, causes frame drops
@@ -818,10 +805,9 @@ extension TrustchexCameraView: AVCaptureVideoDataOutputSampleBufferDelegate {
818
805
  let textSemaphore = DispatchSemaphore(value: 0)
819
806
  var resultText = ""
820
807
  if enableTextRecognition {
821
- textRecognizer.process(visionImage) { text, error in
808
+ textRecognizer.process(textVisionImage) { text, error in
822
809
  if let text = text {
823
810
  resultText = text.text
824
- NSLog("[Text Recognition] Found \(text.blocks.count) blocks | Buffer: \(pixelWidth)x\(pixelHeight) | Portrait: \(portraitWidth)x\(portraitHeight) | Landscape: \(isBufferLandscape)")
825
811
  for block in text.blocks {
826
812
  var blockMap: [String: Any] = ["text": block.text]
827
813
  let bb = block.frame
@@ -845,14 +831,12 @@ extension TrustchexCameraView: AVCaptureVideoDataOutputSampleBufferDelegate {
845
831
  blockY = pixelWidth - Int(bb.origin.x) - Int(bb.width)
846
832
  blockWidth = Int(bb.height)
847
833
  blockHeight = Int(bb.width)
848
- NSLog("[Text Block] '\(block.text.prefix(8))...' | ROTATED: landscape(\(Int(bb.origin.x)),\(Int(bb.origin.y)),\(Int(bb.width)),\(Int(bb.height))) → portrait(x:\(blockX) y:\(blockY) w:\(blockWidth) h:\(blockHeight))")
849
834
  } else {
850
835
  // Already portrait, use directly
851
836
  blockX = Int(bb.origin.x)
852
837
  blockY = Int(bb.origin.y)
853
838
  blockWidth = Int(bb.width)
854
839
  blockHeight = Int(bb.height)
855
- NSLog("[Text Block] '\(block.text.prefix(8))...' | DIRECT: x:\(blockX) y:\(blockY) w:\(blockWidth) h:\(blockHeight)")
856
840
  }
857
841
 
858
842
  blockMap["blockFrame"] = [
@@ -887,13 +871,6 @@ extension TrustchexCameraView: AVCaptureVideoDataOutputSampleBufferDelegate {
887
871
  _ = semaphore.wait(timeout: .now() + 2.0)
888
872
  _ = textSemaphore.wait(timeout: .now() + 2.0)
889
873
 
890
- // MRZ validation (if enabled and text was recognized)
891
- var mrzResultDict: [String: Any]? = nil
892
- if enableMrzValidation && enableTextRecognition && !resultText.isEmpty {
893
- let mrzResult = MRZValidator().validateWithCorrections(resultText)
894
- mrzResultDict = mrzResult.toDictionary()
895
- }
896
-
897
874
  // Only compute brightness if we haven't timed out or crashed
898
875
  // Brightness calculation restricted to scanning frame area (between 36% from top and 36% from bottom, 5% margins on sides)
899
876
  let brightness = computeBrightness(from: pixelBuffer, width: portraitWidth, height: portraitHeight)
@@ -921,9 +898,6 @@ extension TrustchexCameraView: AVCaptureVideoDataOutputSampleBufferDelegate {
921
898
  if enableBarcodeScanning {
922
899
  frameData["barcodes"] = barcodesArray
923
900
  }
924
- if let mrzDict = mrzResultDict {
925
- frameData["mrzResult"] = mrzDict
926
- }
927
901
 
928
902
  DispatchQueue.main.async { [weak self] in
929
903
  self?.onFrameAvailable?(["frame": frameData])
@@ -1013,7 +987,6 @@ extension TrustchexCameraView: AVCaptureFileOutputRecordingDelegate {
1013
987
 
1014
988
  // If recording was cancelled, clean up the temp file and don't send any callback
1015
989
  if wasCancelled {
1016
- print("[TrustchexCameraView] *** Recording was cancelled, cleaning up temp file ***")
1017
990
  try? FileManager.default.removeItem(at: outputFileURL)
1018
991
  return
1019
992
  }
@@ -1110,7 +1083,6 @@ extension TrustchexCameraView: AVCaptureMetadataOutputObjectsDelegate {
1110
1083
  }
1111
1084
 
1112
1085
  let valuePreview = String(stringValue.prefix(50))
1113
- NSLog("[Native Barcode] ✓ FAST DETECTION: \(formatName) - \(valuePreview)")
1114
1086
 
1115
1087
  var barcodeMap: [String: Any] = [
1116
1088
  "rawValue": stringValue,
@@ -1139,9 +1111,6 @@ extension TrustchexCameraView: AVCaptureMetadataOutputObjectsDelegate {
1139
1111
  let portraitRight = Int((1.0 - bounds.minY) * CGFloat(frameWidth))
1140
1112
  let portraitBottom = Int(bounds.maxX * CGFloat(frameHeight))
1141
1113
 
1142
- NSLog("[Native Barcode] Normalized bounds: (%.3f, %.3f) to (%.3f, %.3f)", bounds.minX, bounds.minY, bounds.maxX, bounds.maxY)
1143
- NSLog("[Native Barcode] Frame: \(frameWidth)x\(frameHeight), Portrait coords: (\(portraitLeft), \(portraitTop)) to (\(portraitRight), \(portraitBottom))")
1144
-
1145
1114
  barcodeMap["boundingBox"] = [
1146
1115
  "left": portraitLeft,
1147
1116
  "top": portraitTop,
@@ -0,0 +1,17 @@
1
+ #import <Foundation/Foundation.h>
2
+ #import <UIKit/UIKit.h>
3
+
4
+ NS_ASSUME_NONNULL_BEGIN
5
+
6
+ @interface OpenCVHelper : NSObject
7
+
8
+ /// Preprocesses an image for better OCR text recognition
9
+ /// Applies bilateral filtering, CLAHE, and sharpening to enhance text clarity
10
+ /// @param image The input UIImage to preprocess
11
+ /// @param applyThresholding Whether to apply adaptive thresholding (for binary output)
12
+ /// @return A preprocessed UIImage optimized for text recognition, or nil if preprocessing fails
13
+ + (UIImage * _Nullable)preprocessImageForOCR:(UIImage *)image applyThresholding:(BOOL)applyThresholding;
14
+
15
+ @end
16
+
17
+ NS_ASSUME_NONNULL_END
@@ -0,0 +1,128 @@
1
+ #import "OpenCVHelper.h"
2
+
3
+ #ifdef __cplusplus
4
+ #undef NO
5
+ #undef YES
6
+ #import <opencv2/opencv.hpp>
7
+ #import <opencv2/imgcodecs/ios.h>
8
+ #define NO __objc_no
9
+ #define YES __objc_yes
10
+ #endif
11
+
12
+ @implementation OpenCVHelper
13
+
14
+ // Helper: Convert UIImage to cv::Mat
15
+ + (cv::Mat)imageToMat:(UIImage *)image {
16
+ if (!image) return cv::Mat();
17
+
18
+ CGImageRef imageRef = image.CGImage;
19
+ CGColorSpaceRef colorSpace = CGImageGetColorSpace(imageRef);
20
+ size_t width = CGImageGetWidth(imageRef);
21
+ size_t height = CGImageGetHeight(imageRef);
22
+
23
+ cv::Mat mat(static_cast<int>(height), static_cast<int>(width), CV_8UC4);
24
+
25
+ CGContextRef context = CGBitmapContextCreate(mat.data, width, height, 8, mat.step[0],
26
+ colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrderDefault);
27
+ if (!context) return cv::Mat();
28
+
29
+ CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef);
30
+ CGContextRelease(context);
31
+
32
+ cv::Mat result;
33
+ cv::cvtColor(mat, result, cv::COLOR_RGBA2RGB);
34
+ return result;
35
+ }
36
+
37
+ // Helper: Convert cv::Mat to UIImage
38
+ + (UIImage *)matToImage:(cv::Mat)mat {
39
+ if (mat.empty()) return nil;
40
+
41
+ cv::Mat rgba;
42
+ if (mat.channels() == 1) {
43
+ cv::cvtColor(mat, rgba, cv::COLOR_GRAY2RGBA);
44
+ } else if (mat.channels() == 3) {
45
+ cv::cvtColor(mat, rgba, cv::COLOR_RGB2RGBA);
46
+ } else {
47
+ rgba = mat.clone();
48
+ }
49
+
50
+ NSData *data = [NSData dataWithBytes:rgba.data length:rgba.elemSize() * rgba.total()];
51
+
52
+ CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
53
+ CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
54
+
55
+ CGImageRef imageRef = CGImageCreate(rgba.cols, rgba.rows, 8, 32, rgba.step[0],
56
+ colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrderDefault,
57
+ provider, NULL, false, kCGRenderingIntentDefault);
58
+
59
+ UIImage *image = [UIImage imageWithCGImage:imageRef];
60
+
61
+ CGImageRelease(imageRef);
62
+ CGDataProviderRelease(provider);
63
+ CGColorSpaceRelease(colorSpace);
64
+
65
+ return image;
66
+ }
67
+
68
+ + (UIImage *)preprocessImageForOCR:(UIImage *)image applyThresholding:(BOOL)applyThresholding {
69
+ @try {
70
+ if (!image) return nil;
71
+
72
+ cv::Mat mat = [self imageToMat:image];
73
+ if (mat.empty()) return nil;
74
+
75
+ // Step 1: Convert to grayscale
76
+ cv::Mat gray;
77
+ cv::cvtColor(mat, gray, cv::COLOR_RGB2GRAY);
78
+ mat.release();
79
+
80
+ // Step 2: Suppress background using blackhat morphology
81
+ cv::Mat blackhat;
82
+ cv::Mat kernel = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(15, 5));
83
+ cv::morphologyEx(gray, blackhat, cv::MORPH_BLACKHAT, kernel);
84
+ gray.release();
85
+
86
+ // Step 3: Advanced denoising - removes artifacts while keeping character details
87
+ cv::Mat denoised;
88
+ cv::fastNlMeansDenoising(blackhat, denoised, 6.0, 7, 21);
89
+ blackhat.release();
90
+
91
+ // Step 4: CLAHE for local contrast without over-amplifying noise
92
+ cv::Ptr<cv::CLAHE> clahe = cv::createCLAHE(2.0, cv::Size(8, 8));
93
+ cv::Mat enhanced;
94
+ clahe->apply(denoised, enhanced);
95
+ denoised.release();
96
+
97
+ // Step 5: Unsharp masking for clearer edges without halos
98
+ cv::Mat blurred;
99
+ cv::GaussianBlur(enhanced, blurred, cv::Size(0, 0), 1.2);
100
+ cv::Mat sharpened;
101
+ cv::addWeighted(enhanced, 1.8, blurred, -0.8, 0, sharpened);
102
+ blurred.release();
103
+ enhanced.release();
104
+
105
+ // Step 6: Normalize to full 0-255 range
106
+ // Ensures maximum contrast for ML Kit
107
+ cv::Mat result;
108
+ cv::normalize(sharpened, result, 0, 255, cv::NORM_MINMAX);
109
+ sharpened.release();
110
+
111
+ if (applyThresholding) {
112
+ cv::Mat thresholded;
113
+ cv::adaptiveThreshold(result, thresholded, 255, cv::ADAPTIVE_THRESH_GAUSSIAN_C, cv::THRESH_BINARY, 31, 10);
114
+ result.release();
115
+ result = thresholded;
116
+ }
117
+
118
+ UIImage *resultImage = [self matToImage:result];
119
+ result.release();
120
+
121
+ return resultImage;
122
+ } @catch (NSException *exception) {
123
+ NSLog(@"OpenCV preprocessing error: %@", exception.reason);
124
+ return nil;
125
+ }
126
+ }
127
+
128
+ @end
@@ -1,4 +1,10 @@
1
1
  #import <React/RCTBridgeModule.h>
2
+ #import <UIKit/UIKit.h>
2
3
 
3
4
  @interface OpenCVModule : NSObject <RCTBridgeModule>
5
+
6
+ // Synchronous method for preprocessingimage for OCR
7
+ // This is called directly from Swift camera code for better performance
8
+ - (UIImage * _Nullable)preprocessImageForOCRSync:(UIImage * _Nonnull)image applyThresholding:(BOOL)applyThresholding;
9
+
4
10
  @end
@@ -805,6 +805,147 @@ RCT_EXPORT_METHOD(detectCardBounds:(NSString *)base64Image
805
805
  resolve(nil);
806
806
  }
807
807
  }
808
+ // Preprocess image for better OCR text recognition using OpenCV
809
+ RCT_EXPORT_METHOD(preprocessImageForOCR:(NSString *)base64Image
810
+ applyThresholding:(BOOL)applyThresholding
811
+ resolver:(RCTPromiseResolveBlock)resolve
812
+ rejecter:(RCTPromiseRejectBlock)reject) {
813
+ @try {
814
+ UIImage *image = [self base64ToImage:base64Image];
815
+ if (!image) {
816
+ reject(@"DECODE_ERROR", @"Failed to decode image", nil);
817
+ return;
818
+ }
819
+
820
+ cv::Mat mat = [self imageToMat:image];
821
+ if (mat.empty()) {
822
+ reject(@"MAT_ERROR", @"Failed to convert image to Mat", nil);
823
+ return;
824
+ }
825
+
826
+ // 1. Convert to grayscale
827
+ cv::Mat gray;
828
+ cv::cvtColor(mat, gray, cv::COLOR_RGB2GRAY);
829
+ mat.release();
830
+
831
+ // 2. Apply bilateral filter for noise reduction while preserving edges
832
+ // This is better than Gaussian blur for text as it keeps edges sharp
833
+ cv::Mat filtered;
834
+ cv::bilateralFilter(gray, filtered, 9, 75, 75);
835
+ gray.release();
836
+
837
+ // 3. Apply CLAHE (Contrast Limited Adaptive Histogram Equalization)
838
+ // This enhances local contrast, making text stand out better
839
+ cv::Ptr<cv::CLAHE> clahe = cv::createCLAHE(2.0, cv::Size(8, 8));
840
+ cv::Mat enhanced;
841
+ clahe->apply(filtered, enhanced);
842
+ filtered.release();
843
+
844
+ // 4. Sharpen the image to enhance text edges
845
+ // Use unsharp masking: original + (original - blurred) * amount
846
+ cv::Mat blurred;
847
+ cv::GaussianBlur(enhanced, blurred, cv::Size(0, 0), 3.0);
848
+ cv::Mat sharpened;
849
+ cv::addWeighted(enhanced, 1.5, blurred, -0.5, 0, sharpened);
850
+ blurred.release();
851
+ enhanced.release();
852
+
853
+ // 5. Optional: Apply adaptive thresholding for binary text extraction
854
+ cv::Mat result;
855
+ if (applyThresholding) {
856
+ cv::Mat thresholded;
857
+ // Use Gaussian adaptive threshold - better for varying illumination
858
+ cv::adaptiveThreshold(sharpened, thresholded, 255,
859
+ cv::ADAPTIVE_THRESH_GAUSSIAN_C,
860
+ cv::THRESH_BINARY, 11, 2);
861
+ sharpened.release();
862
+
863
+ // Apply morphological operations to clean up noise
864
+ cv::Mat kernel = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(2, 2));
865
+ cv::morphologyEx(thresholded, result, cv::MORPH_CLOSE, kernel);
866
+ thresholded.release();
867
+ kernel.release();
868
+ } else {
869
+ result = sharpened;
870
+ }
871
+
872
+ UIImage *resultImage = [self matToImage:result];
873
+ result.release();
874
+
875
+ if (!resultImage) {
876
+ reject(@"ENCODE_ERROR", @"Failed to convert result to image", nil);
877
+ return;
878
+ }
879
+
880
+ NSString *resultBase64 = [self imageToBase64:resultImage];
881
+ if (resultBase64) {
882
+ resolve(resultBase64);
883
+ } else {
884
+ reject(@"ENCODE_ERROR", @"Failed to encode result", nil);
885
+ }
886
+ } @catch (NSException *exception) {
887
+ reject(@"OCR_PREPROCESS_ERROR", exception.reason, nil);
888
+ }
889
+ }
890
+
891
+ // Synchronous version for direct Swift calls
892
+ - (UIImage *)preprocessImageForOCRSync:(UIImage *)image applyThresholding:(BOOL)applyThresholding {
893
+ @try {
894
+ if (!image) return nil;
895
+
896
+ cv::Mat mat = [self imageToMat:image];
897
+ if (mat.empty()) return nil;
898
+
899
+ // 1. Convert to grayscale
900
+ cv::Mat gray;
901
+ cv::cvtColor(mat, gray, cv::COLOR_RGB2GRAY);
902
+ mat.release();
903
+
904
+ // 2. Apply bilateral filter for noise reduction while preserving edges
905
+ cv::Mat filtered;
906
+ cv::bilateralFilter(gray, filtered, 9, 75, 75);
907
+ gray.release();
908
+
909
+ // 3. Apply CLAHE (Contrast Limited Adaptive Histogram Equalization)
910
+ cv::Ptr<cv::CLAHE> clahe = cv::createCLAHE(2.0, cv::Size(8, 8));
911
+ cv::Mat enhanced;
912
+ clahe->apply(filtered, enhanced);
913
+ filtered.release();
914
+
915
+ // 4. Sharpen the image to enhance text edges
916
+ cv::Mat blurred;
917
+ cv::GaussianBlur(enhanced, blurred, cv::Size(0, 0), 3.0);
918
+ cv::Mat sharpened;
919
+ cv::addWeighted(enhanced, 1.5, blurred, -0.5, 0, sharpened);
920
+ blurred.release();
921
+ enhanced.release();
922
+
923
+ // 5. Optional: Apply adaptive thresholding for binary text extraction
924
+ cv::Mat result;
925
+ if (applyThresholding) {
926
+ cv::Mat thresholded;
927
+ cv::adaptiveThreshold(sharpened, thresholded, 255,
928
+ cv::ADAPTIVE_THRESH_GAUSSIAN_C,
929
+ cv::THRESH_BINARY, 11, 2);
930
+ sharpened.release();
931
+
932
+ cv::Mat kernel = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(2, 2));
933
+ cv::morphologyEx(thresholded, result, cv::MORPH_CLOSE, kernel);
934
+ thresholded.release();
935
+ kernel.release();
936
+ } else {
937
+ result = sharpened;
938
+ }
939
+
940
+ UIImage *resultImage = [self matToImage:result];
941
+ result.release();
942
+
943
+ return resultImage;
944
+ } @catch (NSException *exception) {
945
+ NSLog(@"OpenCV preprocessing error: %@", exception.reason);
946
+ return nil;
947
+ }
948
+ }
808
949
 
809
950
  @end
810
951
 
@@ -0,0 +1,8 @@
1
+ //
2
+ // TrustchexSDK-Bridging-Header.h
3
+ // TrustchexSDK
4
+ //
5
+ // Bridging header to expose Objective-C/C++ classes to Swift
6
+ //
7
+
8
+ #import "OpenCVHelper.h"