@trustchex/react-native-sdk 1.374.0 → 1.409.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (137) hide show
  1. package/android/src/main/java/com/trustchex/reactnativesdk/camera/TrustchexCameraView.kt +1 -21
  2. package/android/src/main/java/com/trustchex/reactnativesdk/mlkit/MLKitModule.kt +1 -1
  3. package/android/src/main/java/com/trustchex/reactnativesdk/opencv/OpenCVModule.kt +636 -301
  4. package/ios/Camera/TrustchexCameraView.swift +9 -20
  5. package/ios/MLKit/MLKitModule.swift +1 -1
  6. package/ios/OpenCV/OpenCVHelper.h +0 -7
  7. package/ios/OpenCV/OpenCVHelper.mm +0 -60
  8. package/ios/OpenCV/OpenCVModule.h +0 -4
  9. package/ios/OpenCV/OpenCVModule.mm +440 -358
  10. package/lib/module/Screens/Debug/BarcodeTestScreen.js +308 -0
  11. package/lib/module/Screens/Debug/MRZTestScreen.js +105 -13
  12. package/lib/module/Screens/Dynamic/ContractAcceptanceScreen.js +49 -29
  13. package/lib/module/Screens/Dynamic/IdentityDocumentEIDScanningScreen.js +5 -0
  14. package/lib/module/Screens/Dynamic/IdentityDocumentScanningScreen.js +5 -0
  15. package/lib/module/Screens/Dynamic/LivenessDetectionScreen.js +26 -6
  16. package/lib/module/Screens/Dynamic/VideoCallScreen.js +676 -0
  17. package/lib/module/Screens/Static/OTPVerificationScreen.js +6 -0
  18. package/lib/module/Screens/Static/QrCodeScanningScreen.js +7 -1
  19. package/lib/module/Screens/Static/ResultScreen.js +27 -13
  20. package/lib/module/Screens/Static/VerificationSessionCheckScreen.js +51 -51
  21. package/lib/module/Shared/Animations/video-call.json +1 -0
  22. package/lib/module/Shared/Components/DebugNavigationPanel.js +180 -14
  23. package/lib/module/Shared/Components/DebugOverlay.js +541 -0
  24. package/lib/module/Shared/Components/EIDScanner.js +1 -4
  25. package/lib/module/Shared/Components/IdentityDocumentCamera.constants.js +44 -0
  26. package/lib/module/Shared/Components/IdentityDocumentCamera.flows.js +270 -0
  27. package/lib/module/Shared/Components/IdentityDocumentCamera.js +702 -1703
  28. package/lib/module/Shared/Components/IdentityDocumentCamera.types.js +3 -0
  29. package/lib/module/Shared/Components/IdentityDocumentCamera.utils.js +273 -0
  30. package/lib/module/Shared/Components/NavigationManager.js +15 -3
  31. package/lib/module/Shared/Contexts/AppContext.js +1 -0
  32. package/lib/module/Shared/Libs/SignalingClient.js +128 -0
  33. package/lib/module/Shared/Libs/analytics.utils.js +4 -0
  34. package/lib/module/Shared/Libs/deeplink.utils.js +9 -1
  35. package/lib/module/Shared/Libs/http-client.js +9 -0
  36. package/lib/module/Shared/Libs/promise.utils.js +16 -2
  37. package/lib/module/Shared/Libs/status-bar.utils.js +21 -0
  38. package/lib/module/Shared/Services/DataUploadService.js +294 -0
  39. package/lib/module/Shared/Services/VideoSessionService.js +156 -0
  40. package/lib/module/Shared/Services/WebRTCService.js +510 -0
  41. package/lib/module/Shared/Types/analytics.types.js +2 -0
  42. package/lib/module/Translation/Resources/en.js +20 -0
  43. package/lib/module/Translation/Resources/tr.js +20 -0
  44. package/lib/module/Trustchex.js +10 -0
  45. package/lib/module/version.js +1 -1
  46. package/lib/typescript/src/Screens/Debug/BarcodeTestScreen.d.ts +3 -0
  47. package/lib/typescript/src/Screens/Debug/BarcodeTestScreen.d.ts.map +1 -0
  48. package/lib/typescript/src/Screens/Debug/MRZTestScreen.d.ts.map +1 -1
  49. package/lib/typescript/src/Screens/Dynamic/ContractAcceptanceScreen.d.ts.map +1 -1
  50. package/lib/typescript/src/Screens/Dynamic/IdentityDocumentEIDScanningScreen.d.ts.map +1 -1
  51. package/lib/typescript/src/Screens/Dynamic/IdentityDocumentScanningScreen.d.ts.map +1 -1
  52. package/lib/typescript/src/Screens/Dynamic/LivenessDetectionScreen.d.ts.map +1 -1
  53. package/lib/typescript/src/Screens/Dynamic/VideoCallScreen.d.ts +3 -0
  54. package/lib/typescript/src/Screens/Dynamic/VideoCallScreen.d.ts.map +1 -0
  55. package/lib/typescript/src/Screens/Static/OTPVerificationScreen.d.ts.map +1 -1
  56. package/lib/typescript/src/Screens/Static/QrCodeScanningScreen.d.ts.map +1 -1
  57. package/lib/typescript/src/Screens/Static/ResultScreen.d.ts.map +1 -1
  58. package/lib/typescript/src/Screens/Static/VerificationSessionCheckScreen.d.ts.map +1 -1
  59. package/lib/typescript/src/Shared/Components/DebugNavigationPanel.d.ts.map +1 -1
  60. package/lib/typescript/src/Shared/Components/DebugOverlay.d.ts +30 -0
  61. package/lib/typescript/src/Shared/Components/DebugOverlay.d.ts.map +1 -0
  62. package/lib/typescript/src/Shared/Components/EIDScanner.d.ts.map +1 -1
  63. package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.constants.d.ts +35 -0
  64. package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.constants.d.ts.map +1 -0
  65. package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.d.ts +3 -56
  66. package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.d.ts.map +1 -1
  67. package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.flows.d.ts +88 -0
  68. package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.flows.d.ts.map +1 -0
  69. package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.types.d.ts +116 -0
  70. package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.types.d.ts.map +1 -0
  71. package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.utils.d.ts +93 -0
  72. package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.utils.d.ts.map +1 -0
  73. package/lib/typescript/src/Shared/Components/NavigationManager.d.ts.map +1 -1
  74. package/lib/typescript/src/Shared/Contexts/AppContext.d.ts +1 -0
  75. package/lib/typescript/src/Shared/Contexts/AppContext.d.ts.map +1 -1
  76. package/lib/typescript/src/Shared/Libs/SignalingClient.d.ts +24 -0
  77. package/lib/typescript/src/Shared/Libs/SignalingClient.d.ts.map +1 -0
  78. package/lib/typescript/src/Shared/Libs/analytics.utils.d.ts.map +1 -1
  79. package/lib/typescript/src/Shared/Libs/deeplink.utils.d.ts.map +1 -1
  80. package/lib/typescript/src/Shared/Libs/http-client.d.ts.map +1 -1
  81. package/lib/typescript/src/Shared/Libs/promise.utils.d.ts.map +1 -1
  82. package/lib/typescript/src/Shared/Libs/status-bar.utils.d.ts +9 -0
  83. package/lib/typescript/src/Shared/Libs/status-bar.utils.d.ts.map +1 -0
  84. package/lib/typescript/src/Shared/Services/DataUploadService.d.ts +25 -0
  85. package/lib/typescript/src/Shared/Services/DataUploadService.d.ts.map +1 -0
  86. package/lib/typescript/src/Shared/Services/VideoSessionService.d.ts +33 -0
  87. package/lib/typescript/src/Shared/Services/VideoSessionService.d.ts.map +1 -0
  88. package/lib/typescript/src/Shared/Services/WebRTCService.d.ts +58 -0
  89. package/lib/typescript/src/Shared/Services/WebRTCService.d.ts.map +1 -0
  90. package/lib/typescript/src/Shared/Types/analytics.types.d.ts +2 -0
  91. package/lib/typescript/src/Shared/Types/analytics.types.d.ts.map +1 -1
  92. package/lib/typescript/src/Shared/Types/identificationInfo.d.ts +4 -1
  93. package/lib/typescript/src/Shared/Types/identificationInfo.d.ts.map +1 -1
  94. package/lib/typescript/src/Translation/Resources/en.d.ts +20 -0
  95. package/lib/typescript/src/Translation/Resources/en.d.ts.map +1 -1
  96. package/lib/typescript/src/Translation/Resources/tr.d.ts +20 -0
  97. package/lib/typescript/src/Translation/Resources/tr.d.ts.map +1 -1
  98. package/lib/typescript/src/Trustchex.d.ts.map +1 -1
  99. package/lib/typescript/src/version.d.ts +1 -1
  100. package/package.json +29 -2
  101. package/src/Screens/Debug/BarcodeTestScreen.tsx +317 -0
  102. package/src/Screens/Debug/MRZTestScreen.tsx +107 -13
  103. package/src/Screens/Dynamic/ContractAcceptanceScreen.tsx +59 -33
  104. package/src/Screens/Dynamic/IdentityDocumentEIDScanningScreen.tsx +6 -0
  105. package/src/Screens/Dynamic/IdentityDocumentScanningScreen.tsx +6 -0
  106. package/src/Screens/Dynamic/LivenessDetectionScreen.tsx +34 -6
  107. package/src/Screens/Dynamic/VideoCallScreen.tsx +764 -0
  108. package/src/Screens/Static/OTPVerificationScreen.tsx +6 -0
  109. package/src/Screens/Static/QrCodeScanningScreen.tsx +7 -1
  110. package/src/Screens/Static/ResultScreen.tsx +58 -23
  111. package/src/Screens/Static/VerificationSessionCheckScreen.tsx +58 -72
  112. package/src/Shared/Animations/video-call.json +1 -0
  113. package/src/Shared/Components/DebugNavigationPanel.tsx +185 -9
  114. package/src/Shared/Components/DebugOverlay.tsx +656 -0
  115. package/src/Shared/Components/EIDScanner.tsx +1 -5
  116. package/src/Shared/Components/IdentityDocumentCamera.constants.ts +44 -0
  117. package/src/Shared/Components/IdentityDocumentCamera.flows.ts +342 -0
  118. package/src/Shared/Components/IdentityDocumentCamera.tsx +1089 -2465
  119. package/src/Shared/Components/IdentityDocumentCamera.types.ts +136 -0
  120. package/src/Shared/Components/IdentityDocumentCamera.utils.ts +364 -0
  121. package/src/Shared/Components/NavigationManager.tsx +14 -1
  122. package/src/Shared/Contexts/AppContext.ts +2 -0
  123. package/src/Shared/Libs/SignalingClient.ts +189 -0
  124. package/src/Shared/Libs/analytics.utils.ts +4 -0
  125. package/src/Shared/Libs/deeplink.utils.ts +12 -1
  126. package/src/Shared/Libs/http-client.ts +10 -0
  127. package/src/Shared/Libs/promise.utils.ts +16 -2
  128. package/src/Shared/Libs/status-bar.utils.ts +19 -0
  129. package/src/Shared/Services/DataUploadService.ts +395 -0
  130. package/src/Shared/Services/VideoSessionService.ts +190 -0
  131. package/src/Shared/Services/WebRTCService.ts +636 -0
  132. package/src/Shared/Types/analytics.types.ts +2 -0
  133. package/src/Shared/Types/identificationInfo.ts +5 -1
  134. package/src/Translation/Resources/en.ts +25 -0
  135. package/src/Translation/Resources/tr.ts +27 -0
  136. package/src/Trustchex.tsx +12 -2
  137. package/src/version.ts +1 -1
@@ -92,6 +92,7 @@ RCT_EXPORT_METHOD(cropFaceImages:(NSString *)base64Image
92
92
  faceBounds:(NSArray *)faceBounds
93
93
  imageWidth:(NSInteger)imageWidth
94
94
  imageHeight:(NSInteger)imageHeight
95
+ widerRightPadding:(BOOL)widerRightPadding
95
96
  resolver:(RCTPromiseResolveBlock)resolve
96
97
  rejecter:(RCTPromiseRejectBlock)reject) {
97
98
  @try {
@@ -122,13 +123,15 @@ RCT_EXPORT_METHOD(cropFaceImages:(NSString *)base64Image
122
123
  int width = [bounds[@"width"] intValue];
123
124
  int height = [bounds[@"height"] intValue];
124
125
 
125
- // Add 25% padding (matching Android)
126
- int padX = (int)(width * 0.25);
127
- int padY = (int)(height * 0.25);
128
- x = MAX(0, x - padX);
129
- y = MAX(0, y - padY);
130
- width = MIN(mat.cols - x, width + 2 * padX);
131
- height = MIN(mat.rows - y, height + 2 * padY);
126
+ // Padding based on use case: hologram needs wider area on right/top/bottom, regular face is symmetric
127
+ int padLeft = (int)(width * 0.25);
128
+ int padRight = widerRightPadding ? (int)(width * 0.60) : (int)(width * 0.25);
129
+ int padTop = widerRightPadding ? (int)(height * 0.50) : (int)(height * 0.25);
130
+ int padBottom = widerRightPadding ? (int)(height * 0.50) : (int)(height * 0.25);
131
+ x = MAX(0, x - padLeft);
132
+ y = MAX(0, y - padTop);
133
+ width = MIN(mat.cols - x, width + padLeft + padRight);
134
+ height = MIN(mat.rows - y, height + padTop + padBottom);
132
135
 
133
136
  if (x >= 0 && y >= 0 && x + width <= mat.cols && y + height <= mat.rows && width > 0 && height > 0) {
134
137
  cv::Rect roi(x, y, width, height);
@@ -216,6 +219,265 @@ RCT_EXPORT_METHOD(areImagesSimilar:(NSString *)base64Image1
216
219
  cv::threshold(gray, dst, 0, 255, cv::THRESH_BINARY + cv::THRESH_OTSU);
217
220
  }
218
221
 
222
+ // Helper: Find translation offset between image and reference using feature matching
223
+ - (cv::Point2d)findTranslationOffset:(cv::Mat)image reference:(cv::Mat)reference refGray:(cv::Mat)refGray {
224
+ if (image.empty() || reference.empty() || refGray.empty()) {
225
+ return cv::Point2d(0, 0);
226
+ }
227
+
228
+ @try {
229
+ cv::Mat imgGray;
230
+ cv::cvtColor(image, imgGray, cv::COLOR_RGB2GRAY);
231
+
232
+ // Detect ORB keypoints
233
+ cv::Ptr<cv::ORB> orb = cv::ORB::create(300);
234
+ std::vector<cv::KeyPoint> kpRef, kpImg;
235
+ cv::Mat descRef, descImg;
236
+
237
+ orb->detectAndCompute(refGray, cv::noArray(), kpRef, descRef);
238
+ orb->detectAndCompute(imgGray, cv::noArray(), kpImg, descImg);
239
+
240
+ if (descRef.empty() || descImg.empty()) {
241
+ return cv::Point2d(0, 0);
242
+ }
243
+
244
+ // Match features
245
+ cv::Ptr<cv::DescriptorMatcher> matcher = cv::DescriptorMatcher::create(cv::DescriptorMatcher::BRUTEFORCE_HAMMING);
246
+ std::vector<cv::DMatch> matches;
247
+ matcher->match(descImg, descRef, matches);
248
+
249
+ if (matches.empty()) {
250
+ return cv::Point2d(0, 0);
251
+ }
252
+
253
+ // Filter good matches
254
+ double minDistance = matches[0].distance;
255
+ for (const auto& m : matches) {
256
+ if (m.distance < minDistance) minDistance = m.distance;
257
+ }
258
+
259
+ std::vector<cv::DMatch> goodMatches;
260
+ for (const auto& m : matches) {
261
+ if (m.distance < 3 * minDistance && m.distance < 40.0) {
262
+ goodMatches.push_back(m);
263
+ }
264
+ }
265
+
266
+ if (goodMatches.size() < 5) {
267
+ return cv::Point2d(0, 0);
268
+ }
269
+
270
+ // Calculate median translation offset
271
+ std::vector<double> dxList, dyList;
272
+ for (const auto& m : goodMatches) {
273
+ cv::Point2f imgPt = kpImg[m.queryIdx].pt;
274
+ cv::Point2f refPt = kpRef[m.trainIdx].pt;
275
+ dxList.push_back(refPt.x - imgPt.x);
276
+ dyList.push_back(refPt.y - imgPt.y);
277
+ }
278
+
279
+ // Use median to be robust against outliers
280
+ std::sort(dxList.begin(), dxList.end());
281
+ std::sort(dyList.begin(), dyList.end());
282
+ double medianDx = dxList[dxList.size() / 2];
283
+ double medianDy = dyList[dyList.size() / 2];
284
+
285
+ return cv::Point2d(medianDx, medianDy);
286
+ } @catch (NSException *exception) {
287
+ return cv::Point2d(0, 0);
288
+ }
289
+ }
290
+
291
+ // Helper: Find common region and crop all images to it (no warping, just translation)
292
+ - (std::vector<cv::Mat>)findAndCropToCommonRegion:(std::vector<cv::Mat>)images {
293
+ std::vector<cv::Mat> result;
294
+
295
+ if (images.size() < 2) {
296
+ return result;
297
+ }
298
+
299
+ @try {
300
+ cv::Mat reference = images[0];
301
+ cv::Mat refGray;
302
+ cv::cvtColor(reference, refGray, cv::COLOR_RGB2GRAY);
303
+
304
+ // Track translation offsets for each image relative to reference
305
+ std::vector<cv::Point2d> offsets;
306
+ offsets.push_back(cv::Point2d(0, 0)); // Reference has no offset
307
+
308
+ // Find translation offset for each image
309
+ for (size_t i = 1; i < images.size(); i++) {
310
+ cv::Point2d offset = [self findTranslationOffset:images[i] reference:reference refGray:refGray];
311
+ offsets.push_back(offset);
312
+ }
313
+
314
+ // Calculate common region (intersection of all translated frames)
315
+ double commonX = 0.0;
316
+ double commonY = 0.0;
317
+ double commonWidth = reference.cols;
318
+ double commonHeight = reference.rows;
319
+
320
+ for (const auto& offset : offsets) {
321
+ double dx = offset.x;
322
+ double dy = offset.y;
323
+
324
+ commonX = std::max(commonX, -dx);
325
+ commonY = std::max(commonY, -dy);
326
+ commonWidth = std::min(commonWidth, (double)reference.cols - dx);
327
+ commonHeight = std::min(commonHeight, (double)reference.rows - dy);
328
+ }
329
+
330
+ // Ensure valid region
331
+ if (commonWidth <= commonX || commonHeight <= commonY ||
332
+ commonWidth - commonX < 50 || commonHeight - commonY < 50) {
333
+ return result;
334
+ }
335
+
336
+ cv::Rect cropRect(
337
+ (int)commonX,
338
+ (int)commonY,
339
+ (int)(commonWidth - commonX),
340
+ (int)(commonHeight - commonY)
341
+ );
342
+
343
+ // Crop all images to common region
344
+ for (size_t i = 0; i < images.size(); i++) {
345
+ cv::Point2d offset = offsets[i];
346
+ cv::Rect adjustedRect(
347
+ (int)(cropRect.x + offset.x),
348
+ (int)(cropRect.y + offset.y),
349
+ cropRect.width,
350
+ cropRect.height
351
+ );
352
+
353
+ // Ensure rect is within image bounds
354
+ if (adjustedRect.x >= 0 && adjustedRect.y >= 0 &&
355
+ adjustedRect.x + adjustedRect.width <= images[i].cols &&
356
+ adjustedRect.y + adjustedRect.height <= images[i].rows) {
357
+ cv::Mat cropped = images[i](adjustedRect).clone();
358
+ result.push_back(cropped);
359
+ } else {
360
+ // Fallback: use original
361
+ result.push_back(images[i].clone());
362
+ }
363
+ }
364
+
365
+ return result;
366
+ } @catch (NSException *exception) {
367
+ return result;
368
+ }
369
+ }
370
+
371
+ // Helper: Align single image to reference using feature matching
372
+ - (cv::Mat)alignImageToReference:(cv::Mat)image reference:(cv::Mat)reference refGray:(cv::Mat)refGray {
373
+ if (image.empty() || reference.empty() || refGray.empty()) {
374
+ return cv::Mat();
375
+ }
376
+
377
+ @try {
378
+ cv::Mat imgGray;
379
+ cv::cvtColor(image, imgGray, cv::COLOR_RGB2GRAY);
380
+
381
+ // Detect ORB keypoints and descriptors
382
+ cv::Ptr<cv::ORB> orb = cv::ORB::create(500);
383
+ std::vector<cv::KeyPoint> kpRef, kpImg;
384
+ cv::Mat descRef, descImg;
385
+
386
+ orb->detectAndCompute(refGray, cv::noArray(), kpRef, descRef);
387
+ orb->detectAndCompute(imgGray, cv::noArray(), kpImg, descImg);
388
+
389
+ if (descRef.empty() || descImg.empty()) {
390
+ return cv::Mat();
391
+ }
392
+
393
+ // Match features using BFMatcher
394
+ cv::Ptr<cv::DescriptorMatcher> matcher = cv::DescriptorMatcher::create(cv::DescriptorMatcher::BRUTEFORCE_HAMMING);
395
+ std::vector<cv::DMatch> matches;
396
+ matcher->match(descImg, descRef, matches);
397
+
398
+ if (matches.empty()) {
399
+ return cv::Mat();
400
+ }
401
+
402
+ // Filter good matches
403
+ double minDistance = matches[0].distance;
404
+ for (const auto& m : matches) {
405
+ if (m.distance < minDistance) minDistance = m.distance;
406
+ }
407
+
408
+ std::vector<cv::DMatch> goodMatches;
409
+ for (const auto& m : matches) {
410
+ if (m.distance < 3 * minDistance && m.distance < 50.0) {
411
+ goodMatches.push_back(m);
412
+ }
413
+ }
414
+
415
+ if (goodMatches.size() < 10) {
416
+ return cv::Mat();
417
+ }
418
+
419
+ // Extract matched points
420
+ std::vector<cv::Point2f> srcPoints, dstPoints;
421
+ for (const auto& m : goodMatches) {
422
+ srcPoints.push_back(kpImg[m.queryIdx].pt);
423
+ dstPoints.push_back(kpRef[m.trainIdx].pt);
424
+ }
425
+
426
+ // Find homography
427
+ cv::Mat homography = cv::findHomography(srcPoints, dstPoints, cv::RANSAC, 5.0);
428
+
429
+ if (homography.empty()) {
430
+ return cv::Mat();
431
+ }
432
+
433
+ // Warp image to align with reference
434
+ // Use BORDER_REPLICATE to avoid black/white spaces in aligned images
435
+ cv::Mat aligned;
436
+ cv::warpPerspective(image, aligned, homography, reference.size(),
437
+ cv::INTER_LINEAR, cv::BORDER_REPLICATE);
438
+
439
+ return aligned;
440
+ } @catch (NSException *exception) {
441
+ return cv::Mat();
442
+ }
443
+ }
444
+
445
+ // Helper: Align all images to first image as reference
446
+ - (std::vector<cv::Mat>)alignFacesToReference:(std::vector<cv::Mat>&)images {
447
+ std::vector<cv::Mat> alignedImages;
448
+
449
+ if (images.size() < 2) {
450
+ return alignedImages;
451
+ }
452
+
453
+ @try {
454
+ cv::Mat reference = images[0];
455
+
456
+ // Add reference image as-is
457
+ alignedImages.push_back(reference.clone());
458
+
459
+ // Convert reference to grayscale
460
+ cv::Mat refGray;
461
+ cv::cvtColor(reference, refGray, cv::COLOR_RGB2GRAY);
462
+
463
+ // Align remaining images
464
+ for (size_t i = 1; i < images.size(); i++) {
465
+ cv::Mat aligned = [self alignImageToReference:images[i] reference:reference refGray:refGray];
466
+ if (!aligned.empty()) {
467
+ alignedImages.push_back(aligned);
468
+ } else {
469
+ // Fallback to original if alignment fails
470
+ alignedImages.push_back(images[i].clone());
471
+ }
472
+ }
473
+
474
+ return (alignedImages.size() >= 2) ? alignedImages : std::vector<cv::Mat>();
475
+ } @catch (NSException *exception) {
476
+ for (auto& m : alignedImages) { m.release(); }
477
+ return std::vector<cv::Mat>();
478
+ }
479
+ }
480
+
219
481
  RCT_EXPORT_METHOD(detectHologram:(NSArray *)base64Images
220
482
  threshold:(NSInteger)threshold
221
483
  resolver:(RCTPromiseResolveBlock)resolve
@@ -223,10 +485,7 @@ RCT_EXPORT_METHOD(detectHologram:(NSArray *)base64Images
223
485
  // Run on background queue to avoid blocking the main/JS thread
224
486
  dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
225
487
  @try {
226
- NSLog(@"[OpenCV] detectHologram called with %lu images, threshold: %ld", (unsigned long)base64Images.count, (long)threshold);
227
-
228
488
  if (base64Images.count < 2) {
229
- NSLog(@"[OpenCV] Not enough images for hologram detection");
230
489
  resolve(nil);
231
490
  return;
232
491
  }
@@ -245,68 +504,81 @@ RCT_EXPORT_METHOD(detectHologram:(NSArray *)base64Images
245
504
  }
246
505
  }
247
506
 
248
- NSLog(@"[OpenCV] Decoded %lu valid images", (unsigned long)mats.size());
249
-
250
507
  if (mats.size() < 2) {
251
- NSLog(@"[OpenCV] Not enough valid decoded images");
252
508
  resolve(nil);
253
509
  return;
254
510
  }
255
511
 
256
- NSLog(@"[OpenCV] Processing hologram detection...");
512
+ // Use original images directly (no alignment needed - HSV detects color changes regardless)
513
+ std::vector<cv::Mat>& imagesToProcess = mats;
257
514
 
258
- // Multi-range HSV filtering for holographic colors (matching Android)
259
- // Range 1: Cyan-green holographic reflections
260
- cv::Scalar lowerBound1(35, 80, 80);
261
- cv::Scalar upperBound1(85, 255, 255);
262
- // Range 2: Blue-violet holographic reflections
263
- cv::Scalar lowerBound2(100, 80, 80);
264
- cv::Scalar upperBound2(160, 255, 255);
515
+ // Calculate adaptive threshold based on image size (1% of total pixels - balanced detection)
516
+ int imagePixels = imagesToProcess[0].rows * imagesToProcess[0].cols;
517
+ int adaptiveThreshold = (int)(imagePixels * 0.01);
518
+
519
+ // HSV filtering for holographic rainbow spectrum (balanced - detect holograms, exclude white glares)
520
+ // Saturation 60+ = colorful holographic shifts, not desaturated white glares
521
+ // Value capped at 220 = exclude very bright white glares
522
+ // Range 1: Cyan-green holographic spectrum
523
+ cv::Scalar lowerBound1(40, 60, 60);
524
+ cv::Scalar upperBound1(75, 255, 220);
525
+ // Range 2: Blue-violet holographic spectrum
526
+ cv::Scalar lowerBound2(110, 60, 60);
527
+ cv::Scalar upperBound2(150, 255, 220);
265
528
 
266
529
  std::vector<cv::Mat> diffs;
267
530
  std::vector<cv::Mat> brightestImages;
268
531
 
269
- const int HOLOGRAM_NON_ZERO_THRESHOLD = 600;
532
+ // Compare consecutive frames AND frames with gaps to catch both fast and slow color shifts
533
+ for (size_t i = 0; i < imagesToProcess.size() - 1; i++) {
534
+ // Gap 1: Consecutive frames (fast changes)
535
+ cv::Mat diff1;
536
+ cv::absdiff(imagesToProcess[i], imagesToProcess[i + 1], diff1);
537
+
538
+ cv::Mat hsv1;
539
+ cv::cvtColor(diff1, hsv1, cv::COLOR_RGB2HSV);
270
540
 
271
- for (size_t i = 0; i < mats.size() - 1; i++) {
272
- cv::Mat diff;
273
- cv::absdiff(mats[i], mats[i + 1], diff);
541
+ cv::Mat mask1a, mask1b, mask1;
542
+ cv::inRange(hsv1, lowerBound1, upperBound1, mask1a);
543
+ cv::inRange(hsv1, lowerBound2, upperBound2, mask1b);
544
+ cv::bitwise_or(mask1a, mask1b, mask1);
274
545
 
275
- cv::Mat hsv;
276
- cv::cvtColor(diff, hsv, cv::COLOR_RGB2HSV);
546
+ int maskNonZero1 = cv::countNonZero(mask1);
277
547
 
278
- // Apply multi-range HSV filtering
279
- cv::Mat mask1, mask2, mask;
280
- cv::inRange(hsv, lowerBound1, upperBound1, mask1);
281
- cv::inRange(hsv, lowerBound2, upperBound2, mask2);
282
- cv::bitwise_or(mask1, mask2, mask);
548
+ if (maskNonZero1 > adaptiveThreshold) {
549
+ diffs.push_back(mask1.clone());
550
+ brightestImages.push_back(imagesToProcess[i].clone());
551
+ brightestImages.push_back(imagesToProcess[i + 1].clone());
552
+ }
553
+
554
+ // Gap 3: Every third frame (slower color shifts) - only for frames that allow it
555
+ if (i + 3 < imagesToProcess.size()) {
556
+ cv::Mat diff3;
557
+ cv::absdiff(imagesToProcess[i], imagesToProcess[i + 3], diff3);
558
+
559
+ cv::Mat hsv3;
560
+ cv::cvtColor(diff3, hsv3, cv::COLOR_RGB2HSV);
283
561
 
284
- int maskNonZero = cv::countNonZero(mask);
285
- NSLog(@"[OpenCV] Image pair %zu→%zu: mask non-zero=%d (threshold=%d)", i, i+1, maskNonZero, HOLOGRAM_NON_ZERO_THRESHOLD);
562
+ cv::Mat mask3a, mask3b, mask3;
563
+ cv::inRange(hsv3, lowerBound1, upperBound1, mask3a);
564
+ cv::inRange(hsv3, lowerBound2, upperBound2, mask3b);
565
+ cv::bitwise_or(mask3a, mask3b, mask3);
286
566
 
287
- if (maskNonZero > HOLOGRAM_NON_ZERO_THRESHOLD) {
288
- diffs.push_back(mask.clone());
289
- brightestImages.push_back(mats[i].clone());
290
- brightestImages.push_back(mats[i + 1].clone());
291
- NSLog(@"[OpenCV] ✓ Mask added to diffs (significant variation)");
567
+ int maskNonZero3 = cv::countNonZero(mask3);
292
568
 
293
- // Early termination: if first pair already has very strong signal, skip rest
294
- if (i == 0 && maskNonZero > HOLOGRAM_NON_ZERO_THRESHOLD * 4) {
295
- NSLog(@"[OpenCV] ⚡ Strong signal in first pair, skipping remaining pairs");
296
- break;
569
+ if (maskNonZero3 > adaptiveThreshold) {
570
+ diffs.push_back(mask3.clone());
571
+ brightestImages.push_back(imagesToProcess[i].clone());
572
+ brightestImages.push_back(imagesToProcess[i + 3].clone());
297
573
  }
298
- } else {
299
- NSLog(@"[OpenCV] ✗ Mask rejected (insufficient variation)");
300
574
  }
301
575
  }
302
576
 
303
- // Release original mats early
304
- for (auto& m : mats) { m.release(); }
577
+ // Release processed mats
578
+ for (auto& m : imagesToProcess) { m.release(); }
305
579
  mats.clear();
306
- NSLog(@"[OpenCV] Found %lu significant differences", (unsigned long)diffs.size());
307
-
580
+
308
581
  if (diffs.empty()) {
309
- NSLog(@"[OpenCV] No significant hologram features detected");
310
582
  for (auto& m : brightestImages) { m.release(); }
311
583
  resolve(nil);
312
584
  return;
@@ -314,7 +586,6 @@ RCT_EXPORT_METHOD(detectHologram:(NSArray *)base64Images
314
586
 
315
587
  // Combine all difference masks using bitwise_or (matching Android)
316
588
  cv::Mat hologramMask = diffs[0].clone();
317
- NSLog(@"[OpenCV] Combining difference masks...");
318
589
 
319
590
  for (size_t i = 1; i < diffs.size(); i++) {
320
591
  cv::bitwise_or(hologramMask, diffs[i], hologramMask);
@@ -324,17 +595,14 @@ RCT_EXPORT_METHOD(detectHologram:(NSArray *)base64Images
324
595
  diffs.clear();
325
596
 
326
597
  // Apply morphological operations to clean up (matching Android)
327
- NSLog(@"[OpenCV] Applying morphological operations...");
328
598
  cv::Mat kernel = cv::getStructuringElement(cv::MORPH_ELLIPSE, cv::Size(3, 3));
329
599
  cv::morphologyEx(hologramMask, hologramMask, cv::MORPH_CLOSE, kernel);
330
600
  cv::morphologyEx(hologramMask, hologramMask, cv::MORPH_OPEN, kernel);
331
601
 
332
602
  // Check if significant hologram detected
333
603
  int nonZeroCount = cv::countNonZero(hologramMask);
334
- NSLog(@"[OpenCV] Hologram mask non-zero count: %d, threshold: %ld", nonZeroCount, (long)threshold);
335
604
 
336
605
  if (nonZeroCount < threshold) {
337
- NSLog(@"[OpenCV] Hologram mask below threshold");
338
606
  hologramMask.release();
339
607
  for (auto& m : brightestImages) { m.release(); }
340
608
  resolve(nil);
@@ -372,10 +640,7 @@ RCT_EXPORT_METHOD(detectHologram:(NSArray *)base64Images
372
640
  for (auto& m : brightestImages) { m.release(); }
373
641
  brightestImages.clear();
374
642
 
375
- NSLog(@"[OpenCV] Selected best hologram image with score: %f", maxScore);
376
-
377
643
  // Convert results to base64
378
- NSLog(@"[OpenCV] Converting results to base64...");
379
644
  @autoreleasepool {
380
645
  UIImage *hologramImage = [self matToImage:bestImage];
381
646
  UIImage *maskImage = [self matToImage:hologramMask];
@@ -388,7 +653,6 @@ RCT_EXPORT_METHOD(detectHologram:(NSArray *)base64Images
388
653
  NSString *maskBase64 = [self imageToBase64:maskImage];
389
654
 
390
655
  if (hologramBase64 && maskBase64) {
391
- NSLog(@"[OpenCV] Hologram detection successful");
392
656
  resolve(@{
393
657
  @"hologramImage": hologramBase64,
394
658
  @"hologramMask": maskBase64
@@ -398,7 +662,6 @@ RCT_EXPORT_METHOD(detectHologram:(NSArray *)base64Images
398
662
  }
399
663
  }
400
664
 
401
- NSLog(@"[OpenCV] Failed to convert hologram results to base64");
402
665
  resolve(nil);
403
666
  } @catch (NSException *exception) {
404
667
  NSLog(@"[OpenCV] Exception in detectHologram: %@", exception.reason);
@@ -611,339 +874,158 @@ RCT_EXPORT_METHOD(checkBlurryInRegion:(NSString *)base64Image
611
874
  }
612
875
  }
613
876
 
614
- RCT_EXPORT_METHOD(detectCardBounds:(NSString *)base64Image
615
- textBlocks:(NSArray *)textBlocks
616
- faces:(NSArray *)faces
617
- imageWidth:(NSInteger)imageWidth
618
- imageHeight:(NSInteger)imageHeight
877
+ /**
878
+ * Compare face shape/geometry between two face images.
879
+ * Analyzes aspect ratio, edges, and structural similarity.
880
+ * Device-side validation before backend FaceNet.
881
+ */
882
+ RCT_EXPORT_METHOD(compareFaceShape:(NSString *)base64Image1
883
+ secondImage:(NSString *)base64Image2
619
884
  resolver:(RCTPromiseResolveBlock)resolve
620
885
  rejecter:(RCTPromiseRejectBlock)reject) {
621
886
  @try {
622
- // Collect all element bounds for clustering
623
- NSMutableArray *allElements = [NSMutableArray array];
624
-
625
- // Process text blocks
626
- for (NSDictionary *block in textBlocks) {
627
- NSDictionary *frame = block[@"blockFrame"];
628
- if (!frame) continue;
629
-
630
- int x = [frame[@"x"] intValue];
631
- int y = [frame[@"y"] intValue];
632
- int width = [frame[@"width"] intValue];
633
- int height = [frame[@"height"] intValue];
634
-
635
- [allElements addObject:@{
636
- @"left": @(x),
637
- @"top": @(y),
638
- @"right": @(x + width),
639
- @"bottom": @(y + height)
640
- }];
641
- }
642
-
643
- // Process faces
644
- for (NSDictionary *face in faces) {
645
- NSDictionary *bounds = face[@"bounds"];
646
- if (!bounds) continue;
647
-
648
- int x = [bounds[@"x"] intValue];
649
- int y = [bounds[@"y"] intValue];
650
- int width = [bounds[@"width"] intValue];
651
- int height = [bounds[@"height"] intValue];
652
-
653
- [allElements addObject:@{
654
- @"left": @(x),
655
- @"top": @(y),
656
- @"right": @(x + width),
657
- @"bottom": @(y + height)
658
- }];
659
- }
660
-
661
- if (allElements.count == 0) {
662
- NSLog(@"[OpenCVModule] No elements detected for card bounds");
663
- resolve(nil);
664
- return;
665
- }
666
-
667
- // Calculate the centroid of all elements
668
- int centerX = 0;
669
- int centerY = 0;
670
- for (NSDictionary *rect in allElements) {
671
- centerX += ([rect[@"left"] intValue] + [rect[@"right"] intValue]) / 2;
672
- centerY += ([rect[@"top"] intValue] + [rect[@"bottom"] intValue]) / 2;
673
- }
674
- centerX /= (int)allElements.count;
675
- centerY /= (int)allElements.count;
676
-
677
- // Calculate distances from centroid
678
- NSMutableArray *distances = [NSMutableArray array];
679
- for (NSDictionary *rect in allElements) {
680
- int elemCenterX = ([rect[@"left"] intValue] + [rect[@"right"] intValue]) / 2;
681
- int elemCenterY = ([rect[@"top"] intValue] + [rect[@"bottom"] intValue]) / 2;
682
- int dx = elemCenterX - centerX;
683
- int dy = elemCenterY - centerY;
684
- double distance = sqrt(dx * dx + dy * dy);
685
- [distances addObject:@(distance)];
686
- }
687
-
688
- // Calculate median distance
689
- NSArray *sortedDistances = [distances sortedArrayUsingSelector:@selector(compare:)];
690
- double medianDistance = [sortedDistances[sortedDistances.count / 2] doubleValue];
691
-
692
- // Filter out elements that are more than 2x the median distance from center
693
- double threshold = medianDistance * 2.0;
694
- NSMutableArray *filteredElements = [NSMutableArray array];
695
- for (NSUInteger i = 0; i < allElements.count; i++) {
696
- if ([distances[i] doubleValue] <= threshold) {
697
- [filteredElements addObject:allElements[i]];
698
- }
699
- }
700
-
701
- NSLog(@"[OpenCVModule] Filtered %lu outlier elements (%lu -> %lu)",
702
- (unsigned long)(allElements.count - filteredElements.count),
703
- (unsigned long)allElements.count,
704
- (unsigned long)filteredElements.count);
887
+ UIImage *image1 = [self base64ToImage:base64Image1];
888
+ UIImage *image2 = [self base64ToImage:base64Image2];
705
889
 
706
- if (filteredElements.count == 0) {
707
- NSLog(@"[OpenCVModule] No elements after filtering outliers");
708
- resolve(nil);
890
+ if (!image1 || !image2) {
891
+ reject(@"DECODE_ERROR", @"Failed to decode face images", nil);
709
892
  return;
710
893
  }
711
894
 
712
- // Now calculate bounds from filtered elements
713
- int minX = (int)imageWidth;
714
- int minY = (int)imageHeight;
715
- int maxX = 0;
716
- int maxY = 0;
717
-
718
- for (NSDictionary *rect in filteredElements) {
719
- minX = MIN(minX, [rect[@"left"] intValue]);
720
- minY = MIN(minY, [rect[@"top"] intValue]);
721
- maxX = MAX(maxX, [rect[@"right"] intValue]);
722
- maxY = MAX(maxY, [rect[@"bottom"] intValue]);
723
- }
724
-
725
- int elementCount = (int)filteredElements.count;
726
-
727
- NSLog(@"[OpenCVModule] Detected elements: %d, bounds: (%d, %d) to (%d, %d)", elementCount, minX, minY, maxX, maxY);
728
-
729
- // Calculate raw bounding box from elements
730
- int elementsWidth = maxX - minX;
731
- int elementsHeight = maxY - minY;
732
-
733
- NSLog(@"[OpenCVModule] Elements size: %dx%d, frame: %ldx%ld", elementsWidth, elementsHeight, (long)imageWidth, (long)imageHeight);
895
+ cv::Mat mat1 = [self imageToMat:image1];
896
+ cv::Mat mat2 = [self imageToMat:image2];
734
897
 
735
- // Validate minimum size (elements should occupy at least 5% of frame)
736
- int minArea = (int)(imageWidth * imageHeight * 0.05);
737
- if (elementsWidth * elementsHeight < minArea) {
738
- NSLog(@"[OpenCVModule] Elements too small: %d < %d", elementsWidth * elementsHeight, minArea);
739
- resolve(nil);
898
+ if (mat1.empty() || mat2.empty()) {
899
+ reject(@"DECODE_ERROR", @"Failed to convert images to Mat", nil);
740
900
  return;
741
901
  }
742
902
 
743
- // Add generous padding (15% on all sides) to ensure full card is captured
744
- int paddingX = (int)(elementsWidth * 0.15);
745
- int paddingY = (int)(elementsHeight * 0.15);
903
+ // Normalize sizes for comparison
904
+ cv::Size size(200, 200);
905
+ cv::Mat resized1, resized2;
906
+ cv::resize(mat1, resized1, size);
907
+ cv::resize(mat2, resized2, size);
746
908
 
747
- // Calculate card bounds with padding, clamped to image boundaries
748
- int cardX = MAX(0, minX - paddingX);
749
- int cardY = MAX(0, minY - paddingY);
750
- int cardRight = MIN((int)imageWidth, maxX + paddingX);
751
- int cardBottom = MIN((int)imageHeight, maxY + paddingY);
752
- int cardWidth = cardRight - cardX;
753
- int cardHeight = cardBottom - cardY;
909
+ // Calculate aspect ratios
910
+ double aspectRatio1 = (double)mat1.cols / (double)mat1.rows;
911
+ double aspectRatio2 = (double)mat2.cols / (double)mat2.rows;
912
+ double aspectRatioDiff = fabs(aspectRatio1 - aspectRatio2);
913
+ BOOL aspectRatioMatch = aspectRatioDiff < 0.15; // 15% tolerance
754
914
 
755
- NSLog(@"[OpenCVModule] Card bounds: (%d, %d) %dx%d", cardX, cardY, cardWidth, cardHeight);
915
+ // Edge detection for structural comparison
916
+ cv::Mat gray1, gray2;
917
+ cv::cvtColor(resized1, gray1, cv::COLOR_RGB2GRAY);
918
+ cv::cvtColor(resized2, gray2, cv::COLOR_RGB2GRAY);
756
919
 
757
- // Validate aspect ratio is reasonable for a document (very lenient: 1.0 - 2.5)
758
- double aspectRatio = (double)cardWidth / MAX((double)cardHeight, 1.0);
759
- NSLog(@"[OpenCVModule] Card aspect ratio: %f", aspectRatio);
920
+ cv::Mat edges1, edges2;
921
+ cv::Canny(gray1, edges1, 50, 150);
922
+ cv::Canny(gray2, edges2, 50, 150);
760
923
 
761
- if (aspectRatio < 1.0 || aspectRatio > 2.5) {
762
- NSLog(@"[OpenCVModule] Aspect ratio out of range: %f", aspectRatio);
763
- resolve(nil);
764
- return;
765
- }
766
-
767
- // Create corner points (rectangular bounds)
768
- NSMutableArray *corners = [NSMutableArray array];
769
-
770
- // Top-left
771
- [corners addObject:@{
772
- @"x": @(cardX),
773
- @"y": @(cardY)
774
- }];
775
-
776
- // Top-right
777
- [corners addObject:@{
778
- @"x": @(cardX + cardWidth),
779
- @"y": @(cardY)
780
- }];
781
-
782
- // Bottom-right
783
- [corners addObject:@{
784
- @"x": @(cardX + cardWidth),
785
- @"y": @(cardY + cardHeight)
786
- }];
787
-
788
- // Bottom-left
789
- [corners addObject:@{
790
- @"x": @(cardX),
791
- @"y": @(cardY + cardHeight)
792
- }];
924
+ // Compare edge structures
925
+ cv::Mat diff;
926
+ cv::absdiff(edges1, edges2, diff);
927
+ int diffCount = cv::countNonZero(diff);
928
+ int totalPixels = edges1.rows * edges1.cols;
929
+ int matchPixels = totalPixels - diffCount;
930
+ double shapeScore = (double)matchPixels / (double)totalPixels;
931
+
932
+ // Cleanup
933
+ mat1.release();
934
+ mat2.release();
935
+ resized1.release();
936
+ resized2.release();
937
+ gray1.release();
938
+ gray2.release();
939
+ edges1.release();
940
+ edges2.release();
941
+ diff.release();
793
942
 
794
943
  NSDictionary *result = @{
795
- @"x": @(cardX),
796
- @"y": @(cardY),
797
- @"width": @(cardWidth),
798
- @"height": @(cardHeight),
799
- @"corners": corners,
800
- @"angle": @(0.0) // Rectangular alignment
944
+ @"shapeScore": @(shapeScore),
945
+ @"aspectRatioMatch": @(aspectRatioMatch),
946
+ @"aspectRatioDiff": @(aspectRatioDiff)
801
947
  };
802
-
803
948
  resolve(result);
804
949
  } @catch (NSException *exception) {
805
- resolve(nil);
950
+ reject(@"FACE_SHAPE_ERROR", exception.reason, nil);
806
951
  }
807
952
  }
808
- // Preprocess image for better OCR text recognition using OpenCV
809
- RCT_EXPORT_METHOD(preprocessImageForOCR:(NSString *)base64Image
810
- applyThresholding:(BOOL)applyThresholding
953
+
954
+ /**
955
+ * Compare visual similarity between two face images using SSIM-like approach.
956
+ * Better than simple pixel difference for different lighting/angles.
957
+ * Device-side validation before backend FaceNet.
958
+ */
959
+ RCT_EXPORT_METHOD(compareFaceVisualSimilarity:(NSString *)base64Image1
960
+ secondImage:(NSString *)base64Image2
811
961
  resolver:(RCTPromiseResolveBlock)resolve
812
962
  rejecter:(RCTPromiseRejectBlock)reject) {
813
963
  @try {
814
- UIImage *image = [self base64ToImage:base64Image];
815
- if (!image) {
816
- reject(@"DECODE_ERROR", @"Failed to decode image", nil);
817
- return;
818
- }
964
+ UIImage *image1 = [self base64ToImage:base64Image1];
965
+ UIImage *image2 = [self base64ToImage:base64Image2];
819
966
 
820
- cv::Mat mat = [self imageToMat:image];
821
- if (mat.empty()) {
822
- reject(@"MAT_ERROR", @"Failed to convert image to Mat", nil);
967
+ if (!image1 || !image2) {
968
+ reject(@"DECODE_ERROR", @"Failed to decode face images", nil);
823
969
  return;
824
970
  }
825
971
 
826
- // 1. Convert to grayscale
827
- cv::Mat gray;
828
- cv::cvtColor(mat, gray, cv::COLOR_RGB2GRAY);
829
- mat.release();
830
-
831
- // 2. Apply bilateral filter for noise reduction while preserving edges
832
- // This is better than Gaussian blur for text as it keeps edges sharp
833
- cv::Mat filtered;
834
- cv::bilateralFilter(gray, filtered, 9, 75, 75);
835
- gray.release();
836
-
837
- // 3. Apply CLAHE (Contrast Limited Adaptive Histogram Equalization)
838
- // This enhances local contrast, making text stand out better
839
- cv::Ptr<cv::CLAHE> clahe = cv::createCLAHE(2.0, cv::Size(8, 8));
840
- cv::Mat enhanced;
841
- clahe->apply(filtered, enhanced);
842
- filtered.release();
843
-
844
- // 4. Sharpen the image to enhance text edges
845
- // Use unsharp masking: original + (original - blurred) * amount
846
- cv::Mat blurred;
847
- cv::GaussianBlur(enhanced, blurred, cv::Size(0, 0), 3.0);
848
- cv::Mat sharpened;
849
- cv::addWeighted(enhanced, 1.5, blurred, -0.5, 0, sharpened);
850
- blurred.release();
851
- enhanced.release();
852
-
853
- // 5. Optional: Apply adaptive thresholding for binary text extraction
854
- cv::Mat result;
855
- if (applyThresholding) {
856
- cv::Mat thresholded;
857
- // Use Gaussian adaptive threshold - better for varying illumination
858
- cv::adaptiveThreshold(sharpened, thresholded, 255,
859
- cv::ADAPTIVE_THRESH_GAUSSIAN_C,
860
- cv::THRESH_BINARY, 11, 2);
861
- sharpened.release();
862
-
863
- // Apply morphological operations to clean up noise
864
- cv::Mat kernel = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(2, 2));
865
- cv::morphologyEx(thresholded, result, cv::MORPH_CLOSE, kernel);
866
- thresholded.release();
867
- kernel.release();
868
- } else {
869
- result = sharpened;
870
- }
871
-
872
- UIImage *resultImage = [self matToImage:result];
873
- result.release();
874
-
875
- if (!resultImage) {
876
- reject(@"ENCODE_ERROR", @"Failed to convert result to image", nil);
972
+ cv::Mat mat1 = [self imageToMat:image1];
973
+ cv::Mat mat2 = [self imageToMat:image2];
974
+
975
+ if (mat1.empty() || mat2.empty()) {
976
+ reject(@"DECODE_ERROR", @"Failed to convert images to Mat", nil);
877
977
  return;
878
978
  }
879
979
 
880
- NSString *resultBase64 = [self imageToBase64:resultImage];
881
- if (resultBase64) {
882
- resolve(resultBase64);
883
- } else {
884
- reject(@"ENCODE_ERROR", @"Failed to encode result", nil);
885
- }
886
- } @catch (NSException *exception) {
887
- reject(@"OCR_PREPROCESS_ERROR", exception.reason, nil);
888
- }
889
- }
980
+ // Normalize sizes
981
+ cv::Size size(200, 200);
982
+ cv::Mat resized1, resized2;
983
+ cv::resize(mat1, resized1, size);
984
+ cv::resize(mat2, resized2, size);
985
+
986
+ // Convert to grayscale and equalize histogram for better comparison
987
+ cv::Mat gray1, gray2;
988
+ cv::cvtColor(resized1, gray1, cv::COLOR_RGB2GRAY);
989
+ cv::cvtColor(resized2, gray2, cv::COLOR_RGB2GRAY);
990
+ cv::equalizeHist(gray1, gray1);
991
+ cv::equalizeHist(gray2, gray2);
992
+
993
+ // Compute mean and standard deviation for both images
994
+ cv::Scalar mean1, stdDev1, mean2, stdDev2;
995
+ cv::meanStdDev(gray1, mean1, stdDev1);
996
+ cv::meanStdDev(gray2, mean2, stdDev2);
997
+
998
+ // Compute correlation coefficient (simplified SSIM approach)
999
+ cv::Mat normalizedGray1, normalizedGray2;
1000
+ gray1.convertTo(normalizedGray1, CV_64F);
1001
+ gray2.convertTo(normalizedGray2, CV_64F);
1002
+
1003
+ // Calculate correlation
1004
+ cv::Mat product;
1005
+ cv::multiply(normalizedGray1, normalizedGray2, product);
1006
+ cv::Scalar sumScalar = cv::sum(product);
1007
+ double correlation = sumScalar[0] / (gray1.rows * gray1.cols);
1008
+
1009
+ // Normalize to 0-1 range (simplified similarity score)
1010
+ double similarity = std::max(0.0, std::min(1.0, correlation / 65536.0));
1011
+
1012
+ // Cleanup
1013
+ mat1.release();
1014
+ mat2.release();
1015
+ resized1.release();
1016
+ resized2.release();
1017
+ gray1.release();
1018
+ gray2.release();
1019
+ normalizedGray1.release();
1020
+ normalizedGray2.release();
1021
+ product.release();
890
1022
 
891
- // Synchronous version for direct Swift calls
892
- - (UIImage *)preprocessImageForOCRSync:(UIImage *)image applyThresholding:(BOOL)applyThresholding {
893
- @try {
894
- if (!image) return nil;
895
-
896
- cv::Mat mat = [self imageToMat:image];
897
- if (mat.empty()) return nil;
898
-
899
- // 1. Convert to grayscale
900
- cv::Mat gray;
901
- cv::cvtColor(mat, gray, cv::COLOR_RGB2GRAY);
902
- mat.release();
903
-
904
- // 2. Apply bilateral filter for noise reduction while preserving edges
905
- cv::Mat filtered;
906
- cv::bilateralFilter(gray, filtered, 9, 75, 75);
907
- gray.release();
908
-
909
- // 3. Apply CLAHE (Contrast Limited Adaptive Histogram Equalization)
910
- cv::Ptr<cv::CLAHE> clahe = cv::createCLAHE(2.0, cv::Size(8, 8));
911
- cv::Mat enhanced;
912
- clahe->apply(filtered, enhanced);
913
- filtered.release();
914
-
915
- // 4. Sharpen the image to enhance text edges
916
- cv::Mat blurred;
917
- cv::GaussianBlur(enhanced, blurred, cv::Size(0, 0), 3.0);
918
- cv::Mat sharpened;
919
- cv::addWeighted(enhanced, 1.5, blurred, -0.5, 0, sharpened);
920
- blurred.release();
921
- enhanced.release();
922
-
923
- // 5. Optional: Apply adaptive thresholding for binary text extraction
924
- cv::Mat result;
925
- if (applyThresholding) {
926
- cv::Mat thresholded;
927
- cv::adaptiveThreshold(sharpened, thresholded, 255,
928
- cv::ADAPTIVE_THRESH_GAUSSIAN_C,
929
- cv::THRESH_BINARY, 11, 2);
930
- sharpened.release();
931
-
932
- cv::Mat kernel = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(2, 2));
933
- cv::morphologyEx(thresholded, result, cv::MORPH_CLOSE, kernel);
934
- thresholded.release();
935
- kernel.release();
936
- } else {
937
- result = sharpened;
938
- }
939
-
940
- UIImage *resultImage = [self matToImage:result];
941
- result.release();
942
-
943
- return resultImage;
1023
+ NSDictionary *result = @{
1024
+ @"similarity": @(similarity)
1025
+ };
1026
+ resolve(result);
944
1027
  } @catch (NSException *exception) {
945
- NSLog(@"OpenCV preprocessing error: %@", exception.reason);
946
- return nil;
1028
+ reject(@"FACE_SIMILARITY_ERROR", exception.reason, nil);
947
1029
  }
948
1030
  }
949
1031