react-native-rectangle-doc-scanner 0.57.0 → 0.59.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -278,157 +278,191 @@ const DocScanner = ({ onCapture, overlayColor = '#e7a649', autoCapture = true, m
278
278
  const element = react_native_fast_opencv_1.OpenCV.invoke('getStructuringElement', react_native_fast_opencv_1.MorphShapes.MORPH_RECT, morphologyKernel);
279
279
  step = 'morphologyEx';
280
280
  reportStage(step);
281
- // MORPH_CLOSE to fill small holes in edges
282
281
  react_native_fast_opencv_1.OpenCV.invoke('morphologyEx', mat, mat, react_native_fast_opencv_1.MorphTypes.MORPH_CLOSE, element);
283
- // MORPH_OPEN to remove small noise
284
282
  react_native_fast_opencv_1.OpenCV.invoke('morphologyEx', mat, mat, react_native_fast_opencv_1.MorphTypes.MORPH_OPEN, element);
283
+ const ADAPTIVE_THRESH_GAUSSIAN_C = 1;
284
+ const THRESH_BINARY = 0;
285
+ const THRESH_OTSU = 8;
285
286
  // Bilateral filter for edge-preserving smoothing (better quality than Gaussian)
286
287
  step = 'bilateralFilter';
287
288
  reportStage(step);
289
+ let processed = mat;
288
290
  try {
289
291
  const tempMat = react_native_fast_opencv_1.OpenCV.createObject(react_native_fast_opencv_1.ObjectType.Mat);
290
292
  react_native_fast_opencv_1.OpenCV.invoke('bilateralFilter', mat, tempMat, 9, 75, 75);
291
- mat = tempMat;
293
+ processed = tempMat;
292
294
  }
293
295
  catch (error) {
294
296
  if (__DEV__) {
295
297
  console.warn('[DocScanner] bilateralFilter unavailable, falling back to GaussianBlur', error);
296
298
  }
297
- step = 'gaussianBlurFallback';
298
- reportStage(step);
299
299
  const blurKernel = react_native_fast_opencv_1.OpenCV.createObject(react_native_fast_opencv_1.ObjectType.Size, 5, 5);
300
300
  react_native_fast_opencv_1.OpenCV.invoke('GaussianBlur', mat, mat, blurKernel, 0);
301
+ processed = mat;
301
302
  }
302
- step = 'Canny';
303
- reportStage(step);
304
- // Configurable Canny parameters for adaptive edge detection
305
- react_native_fast_opencv_1.OpenCV.invoke('Canny', mat, mat, CANNY_LOW, CANNY_HIGH);
306
- step = 'createContours';
303
+ // Additional blur and close pass to smooth jagged edges
304
+ step = 'gaussianBlur';
307
305
  reportStage(step);
308
- const contours = react_native_fast_opencv_1.OpenCV.createObject(react_native_fast_opencv_1.ObjectType.PointVectorOfVectors);
309
- react_native_fast_opencv_1.OpenCV.invoke('findContours', mat, contours, react_native_fast_opencv_1.RetrievalModes.RETR_EXTERNAL, react_native_fast_opencv_1.ContourApproximationModes.CHAIN_APPROX_SIMPLE);
310
- let best = null;
311
- let maxArea = 0;
306
+ const gaussianKernel = react_native_fast_opencv_1.OpenCV.createObject(react_native_fast_opencv_1.ObjectType.Size, 5, 5);
307
+ react_native_fast_opencv_1.OpenCV.invoke('GaussianBlur', processed, processed, gaussianKernel, 0);
308
+ react_native_fast_opencv_1.OpenCV.invoke('morphologyEx', processed, processed, react_native_fast_opencv_1.MorphTypes.MORPH_CLOSE, element);
309
+ const baseMat = react_native_fast_opencv_1.OpenCV.invoke('clone', processed);
312
310
  const frameArea = width * height;
313
- step = 'toJSValue';
314
- reportStage(step);
315
- const contourVector = react_native_fast_opencv_1.OpenCV.toJSValue(contours);
316
- const contourArray = Array.isArray(contourVector?.array) ? contourVector.array : [];
317
- for (let i = 0; i < contourArray.length; i += 1) {
318
- step = `contour_${i}_copy`;
319
- reportStage(step);
320
- const contour = react_native_fast_opencv_1.OpenCV.copyObjectFromVector(contours, i);
321
- // Compute absolute area first
322
- step = `contour_${i}_area_abs`;
323
- reportStage(step);
324
- const { value: area } = react_native_fast_opencv_1.OpenCV.invoke('contourArea', contour, false);
325
- // Skip extremely small contours, but keep threshold very low to allow distant documents
326
- if (typeof area !== 'number' || !isFinite(area)) {
327
- continue;
328
- }
329
- if (area < 50) {
330
- continue;
331
- }
332
- step = `contour_${i}_area`; // ratio stage
333
- reportStage(step);
334
- const areaRatio = area / frameArea;
335
- if (__DEV__) {
336
- console.log('[DocScanner] area', area, 'ratio', areaRatio);
337
- }
338
- // Skip if area ratio is too small or too large
339
- if (areaRatio < 0.0002 || areaRatio > 0.99) {
340
- continue;
341
- }
342
- // Try to use convex hull for better corner detection
343
- let contourToUse = contour;
344
- try {
345
- step = `contour_${i}_convexHull`;
346
- reportStage(step);
347
- const hull = react_native_fast_opencv_1.OpenCV.createObject(react_native_fast_opencv_1.ObjectType.PointVector);
348
- react_native_fast_opencv_1.OpenCV.invoke('convexHull', contour, hull, false, true);
349
- contourToUse = hull;
311
+ const originalArea = frame.width * frame.height;
312
+ const minEdgeThreshold = Math.max(14, Math.min(frame.width, frame.height) * MIN_EDGE_RATIO);
313
+ const epsilonValues = [
314
+ 0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009,
315
+ 0.01, 0.012, 0.015, 0.018, 0.02, 0.025, 0.03, 0.035, 0.04, 0.05,
316
+ 0.06, 0.07, 0.08, 0.09, 0.1, 0.12,
317
+ ];
318
+ let bestQuad = null;
319
+ let bestArea = 0;
320
+ let convexHullWarned = false;
321
+ const considerCandidate = (candidate) => {
322
+ 'worklet';
323
+ if (!candidate) {
324
+ return;
350
325
  }
351
- catch (err) {
352
- // If convexHull fails, use original contour
353
- if (__DEV__) {
354
- console.warn('[DocScanner] convexHull failed, using original contour');
355
- }
326
+ if (!bestQuad || candidate.area > bestArea) {
327
+ bestQuad = candidate.quad;
328
+ bestArea = candidate.area;
356
329
  }
357
- step = `contour_${i}_arcLength`;
330
+ };
331
+ const evaluateContours = (inputMat, attemptLabel) => {
332
+ 'worklet';
333
+ step = `findContours_${attemptLabel}`;
358
334
  reportStage(step);
359
- const { value: perimeter } = react_native_fast_opencv_1.OpenCV.invoke('arcLength', contourToUse, true);
360
- const approx = react_native_fast_opencv_1.OpenCV.createObject(react_native_fast_opencv_1.ObjectType.PointVector);
361
- let approxArray = [];
362
- // Try more epsilon values from 0.1% to 10% for difficult shapes
363
- const epsilonValues = [
364
- 0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009,
365
- 0.01, 0.012, 0.015, 0.018, 0.02, 0.025, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1
366
- ];
367
- for (let attempt = 0; attempt < epsilonValues.length; attempt += 1) {
368
- const epsilon = epsilonValues[attempt] * perimeter;
369
- step = `contour_${i}_approxPolyDP_attempt_${attempt}`;
335
+ const contours = react_native_fast_opencv_1.OpenCV.createObject(react_native_fast_opencv_1.ObjectType.PointVectorOfVectors);
336
+ react_native_fast_opencv_1.OpenCV.invoke('findContours', inputMat, contours, react_native_fast_opencv_1.RetrievalModes.RETR_EXTERNAL, react_native_fast_opencv_1.ContourApproximationModes.CHAIN_APPROX_SIMPLE);
337
+ const contourVector = react_native_fast_opencv_1.OpenCV.toJSValue(contours);
338
+ const contourArray = Array.isArray(contourVector?.array) ? contourVector.array : [];
339
+ let bestLocal = null;
340
+ for (let i = 0; i < contourArray.length; i += 1) {
341
+ step = `${attemptLabel}_contour_${i}_copy`;
370
342
  reportStage(step);
371
- react_native_fast_opencv_1.OpenCV.invoke('approxPolyDP', contourToUse, approx, epsilon, true);
372
- step = `contour_${i}_toJS_attempt_${attempt}`;
343
+ const contour = react_native_fast_opencv_1.OpenCV.copyObjectFromVector(contours, i);
344
+ step = `${attemptLabel}_contour_${i}_area`;
373
345
  reportStage(step);
374
- const approxValue = react_native_fast_opencv_1.OpenCV.toJSValue(approx);
375
- const candidate = Array.isArray(approxValue?.array) ? approxValue.array : [];
376
- if (__DEV__) {
377
- console.log('[DocScanner] approx length', candidate.length, 'epsilon', epsilon);
346
+ const { value: area } = react_native_fast_opencv_1.OpenCV.invoke('contourArea', contour, false);
347
+ if (typeof area !== 'number' || !isFinite(area) || area < 60) {
348
+ continue;
378
349
  }
379
- if (candidate.length === 4) {
380
- approxArray = candidate;
381
- break;
350
+ const resizedRatio = area / frameArea;
351
+ if (resizedRatio < 0.00012 || resizedRatio > 0.98) {
352
+ continue;
382
353
  }
383
- }
384
- // Only proceed if we found exactly 4 corners
385
- if (approxArray.length !== 4) {
386
- continue;
387
- }
388
- step = `contour_${i}_convex`;
389
- reportStage(step);
390
- // Validate points before processing
391
- const isValidPoint = (pt) => {
392
- return typeof pt.x === 'number' && typeof pt.y === 'number' &&
393
- !isNaN(pt.x) && !isNaN(pt.y) &&
394
- isFinite(pt.x) && isFinite(pt.y);
395
- };
396
- if (!approxArray.every(isValidPoint)) {
397
- if (__DEV__) {
398
- console.warn('[DocScanner] invalid points in approxArray', approxArray);
354
+ let contourToUse = contour;
355
+ try {
356
+ const hull = react_native_fast_opencv_1.OpenCV.createObject(react_native_fast_opencv_1.ObjectType.PointVector);
357
+ react_native_fast_opencv_1.OpenCV.invoke('convexHull', contour, hull, false, true);
358
+ contourToUse = hull;
399
359
  }
400
- continue;
401
- }
402
- const points = approxArray.map((pt) => ({
403
- x: pt.x / ratio,
404
- y: pt.y / ratio,
405
- }));
406
- // Verify the quadrilateral is convex (valid document shape)
407
- try {
408
- if (!isConvexQuadrilateral(points)) {
409
- if (__DEV__) {
410
- console.log('[DocScanner] not convex, skipping:', points);
360
+ catch (err) {
361
+ if (__DEV__ && !convexHullWarned) {
362
+ console.warn('[DocScanner] convexHull failed, using original contour');
363
+ convexHullWarned = true;
411
364
  }
365
+ }
366
+ const { value: perimeter } = react_native_fast_opencv_1.OpenCV.invoke('arcLength', contourToUse, true);
367
+ if (typeof perimeter !== 'number' || !isFinite(perimeter) || perimeter < 80) {
412
368
  continue;
413
369
  }
414
- }
415
- catch (err) {
416
- if (__DEV__) {
417
- console.warn('[DocScanner] convex check error:', err, 'points:', points);
370
+ const approx = react_native_fast_opencv_1.OpenCV.createObject(react_native_fast_opencv_1.ObjectType.PointVector);
371
+ let approxArray = [];
372
+ for (let attempt = 0; attempt < epsilonValues.length; attempt += 1) {
373
+ const epsilon = epsilonValues[attempt] * perimeter;
374
+ step = `${attemptLabel}_contour_${i}_approx_${attempt}`;
375
+ reportStage(step);
376
+ react_native_fast_opencv_1.OpenCV.invoke('approxPolyDP', contourToUse, approx, epsilon, true);
377
+ const approxValue = react_native_fast_opencv_1.OpenCV.toJSValue(approx);
378
+ const candidate = Array.isArray(approxValue?.array) ? approxValue.array : [];
379
+ if (candidate.length === 4) {
380
+ approxArray = candidate;
381
+ break;
382
+ }
383
+ }
384
+ if (approxArray.length !== 4) {
385
+ continue;
386
+ }
387
+ const isValidPoint = (pt) => typeof pt.x === 'number' && typeof pt.y === 'number' && isFinite(pt.x) && isFinite(pt.y);
388
+ if (!approxArray.every(isValidPoint)) {
389
+ continue;
390
+ }
391
+ const normalizedPoints = approxArray.map((pt) => ({
392
+ x: pt.x / ratio,
393
+ y: pt.y / ratio,
394
+ }));
395
+ if (!isConvexQuadrilateral(normalizedPoints)) {
396
+ continue;
397
+ }
398
+ const sanitized = (0, quad_1.sanitizeQuad)((0, quad_1.orderQuadPoints)(normalizedPoints));
399
+ if (!(0, quad_1.isValidQuad)(sanitized)) {
400
+ continue;
401
+ }
402
+ const edges = (0, quad_1.quadEdgeLengths)(sanitized);
403
+ const minEdge = Math.min(...edges);
404
+ const maxEdge = Math.max(...edges);
405
+ if (!Number.isFinite(minEdge) || minEdge < minEdgeThreshold) {
406
+ continue;
407
+ }
408
+ const aspectRatio = maxEdge / Math.max(minEdge, 1);
409
+ if (!Number.isFinite(aspectRatio) || aspectRatio > 8.5) {
410
+ continue;
411
+ }
412
+ const quadAreaValue = (0, quad_1.quadArea)(sanitized);
413
+ const originalRatio = originalArea > 0 ? quadAreaValue / originalArea : 0;
414
+ if (originalRatio < 0.00012 || originalRatio > 0.92) {
415
+ continue;
416
+ }
417
+ const candidate = {
418
+ quad: sanitized,
419
+ area: quadAreaValue,
420
+ };
421
+ if (!bestLocal || candidate.area > bestLocal.area) {
422
+ bestLocal = candidate;
418
423
  }
419
- continue;
420
- }
421
- if (area > maxArea) {
422
- best = points;
423
- maxArea = area;
424
424
  }
425
- }
425
+ return bestLocal;
426
+ };
427
+ const runCanny = (label, low, high) => {
428
+ 'worklet';
429
+ const working = react_native_fast_opencv_1.OpenCV.invoke('clone', baseMat);
430
+ step = `${label}_canny`;
431
+ reportStage(step);
432
+ react_native_fast_opencv_1.OpenCV.invoke('Canny', working, working, low, high);
433
+ react_native_fast_opencv_1.OpenCV.invoke('morphologyEx', working, working, react_native_fast_opencv_1.MorphTypes.MORPH_CLOSE, element);
434
+ considerCandidate(evaluateContours(working, label));
435
+ };
436
+ const runAdaptive = (label, blockSize, c) => {
437
+ 'worklet';
438
+ const working = react_native_fast_opencv_1.OpenCV.invoke('clone', baseMat);
439
+ step = `${label}_adaptive`;
440
+ reportStage(step);
441
+ react_native_fast_opencv_1.OpenCV.invoke('adaptiveThreshold', working, working, 255, ADAPTIVE_THRESH_GAUSSIAN_C, THRESH_BINARY, blockSize, c);
442
+ react_native_fast_opencv_1.OpenCV.invoke('morphologyEx', working, working, react_native_fast_opencv_1.MorphTypes.MORPH_CLOSE, element);
443
+ considerCandidate(evaluateContours(working, label));
444
+ };
445
+ const runOtsu = () => {
446
+ 'worklet';
447
+ const working = react_native_fast_opencv_1.OpenCV.invoke('clone', baseMat);
448
+ step = 'otsu_threshold';
449
+ reportStage(step);
450
+ react_native_fast_opencv_1.OpenCV.invoke('threshold', working, working, 0, 255, THRESH_BINARY | THRESH_OTSU);
451
+ react_native_fast_opencv_1.OpenCV.invoke('morphologyEx', working, working, react_native_fast_opencv_1.MorphTypes.MORPH_CLOSE, element);
452
+ considerCandidate(evaluateContours(working, 'otsu'));
453
+ };
454
+ runCanny('canny_primary', CANNY_LOW, CANNY_HIGH);
455
+ runCanny('canny_soft', Math.max(6, CANNY_LOW * 0.6), Math.max(CANNY_LOW * 1.2, CANNY_HIGH * 0.75));
456
+ runCanny('canny_hard', Math.max(12, CANNY_LOW * 1.1), CANNY_HIGH * 1.25);
457
+ runAdaptive('adaptive_19', 19, 7);
458
+ runAdaptive('adaptive_23', 23, 5);
459
+ runOtsu();
426
460
  step = 'clearBuffers';
427
461
  reportStage(step);
428
462
  react_native_fast_opencv_1.OpenCV.clearBuffers();
429
463
  step = 'updateQuad';
430
464
  reportStage(step);
431
- updateQuad(best);
465
+ updateQuad(bestQuad);
432
466
  }
433
467
  catch (error) {
434
468
  reportError(step, error);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "react-native-rectangle-doc-scanner",
3
- "version": "0.57.0",
3
+ "version": "0.59.0",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "repository": {
@@ -344,182 +344,225 @@ export const DocScanner: React.FC<Props> = ({
344
344
  const element = OpenCV.invoke('getStructuringElement', MorphShapes.MORPH_RECT, morphologyKernel);
345
345
  step = 'morphologyEx';
346
346
  reportStage(step);
347
- // MORPH_CLOSE to fill small holes in edges
348
347
  OpenCV.invoke('morphologyEx', mat, mat, MorphTypes.MORPH_CLOSE, element);
349
- // MORPH_OPEN to remove small noise
350
348
  OpenCV.invoke('morphologyEx', mat, mat, MorphTypes.MORPH_OPEN, element);
351
349
 
350
+ const ADAPTIVE_THRESH_GAUSSIAN_C = 1;
351
+ const THRESH_BINARY = 0;
352
+ const THRESH_OTSU = 8;
353
+
352
354
  // Bilateral filter for edge-preserving smoothing (better quality than Gaussian)
353
355
  step = 'bilateralFilter';
354
356
  reportStage(step);
357
+ let processed = mat;
355
358
  try {
356
359
  const tempMat = OpenCV.createObject(ObjectType.Mat);
357
360
  OpenCV.invoke('bilateralFilter', mat, tempMat, 9, 75, 75);
358
- mat = tempMat;
361
+ processed = tempMat;
359
362
  } catch (error) {
360
363
  if (__DEV__) {
361
364
  console.warn('[DocScanner] bilateralFilter unavailable, falling back to GaussianBlur', error);
362
365
  }
363
- step = 'gaussianBlurFallback';
364
- reportStage(step);
365
366
  const blurKernel = OpenCV.createObject(ObjectType.Size, 5, 5);
366
367
  OpenCV.invoke('GaussianBlur', mat, mat, blurKernel, 0);
368
+ processed = mat;
367
369
  }
368
370
 
369
- step = 'Canny';
370
- reportStage(step);
371
- // Configurable Canny parameters for adaptive edge detection
372
- OpenCV.invoke('Canny', mat, mat, CANNY_LOW, CANNY_HIGH);
373
-
374
- step = 'createContours';
371
+ // Additional blur and close pass to smooth jagged edges
372
+ step = 'gaussianBlur';
375
373
  reportStage(step);
376
- const contours = OpenCV.createObject(ObjectType.PointVectorOfVectors);
377
- OpenCV.invoke('findContours', mat, contours, RetrievalModes.RETR_EXTERNAL, ContourApproximationModes.CHAIN_APPROX_SIMPLE);
374
+ const gaussianKernel = OpenCV.createObject(ObjectType.Size, 5, 5);
375
+ OpenCV.invoke('GaussianBlur', processed, processed, gaussianKernel, 0);
376
+ OpenCV.invoke('morphologyEx', processed, processed, MorphTypes.MORPH_CLOSE, element);
378
377
 
379
- let best: Point[] | null = null;
380
- let maxArea = 0;
378
+ const baseMat = OpenCV.invoke('clone', processed);
381
379
  const frameArea = width * height;
382
-
383
- step = 'toJSValue';
384
- reportStage(step);
385
- const contourVector = OpenCV.toJSValue(contours);
386
- const contourArray = Array.isArray(contourVector?.array) ? contourVector.array : [];
387
-
388
- for (let i = 0; i < contourArray.length; i += 1) {
389
- step = `contour_${i}_copy`;
390
- reportStage(step);
391
- const contour = OpenCV.copyObjectFromVector(contours, i);
392
-
393
- // Compute absolute area first
394
- step = `contour_${i}_area_abs`;
395
- reportStage(step);
396
- const { value: area } = OpenCV.invoke('contourArea', contour, false);
397
-
398
- // Skip extremely small contours, but keep threshold very low to allow distant documents
399
- if (typeof area !== 'number' || !isFinite(area)) {
400
- continue;
380
+ const originalArea = frame.width * frame.height;
381
+ const minEdgeThreshold = Math.max(14, Math.min(frame.width, frame.height) * MIN_EDGE_RATIO);
382
+ const epsilonValues = [
383
+ 0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009,
384
+ 0.01, 0.012, 0.015, 0.018, 0.02, 0.025, 0.03, 0.035, 0.04, 0.05,
385
+ 0.06, 0.07, 0.08, 0.09, 0.1, 0.12,
386
+ ];
387
+
388
+ let bestQuad: Point[] | null = null;
389
+ let bestArea = 0;
390
+ let convexHullWarned = false;
391
+
392
+ const considerCandidate = (candidate: { quad: Point[]; area: number } | null) => {
393
+ 'worklet';
394
+ if (!candidate) {
395
+ return;
401
396
  }
402
-
403
- if (area < 50) {
404
- continue;
397
+ if (!bestQuad || candidate.area > bestArea) {
398
+ bestQuad = candidate.quad;
399
+ bestArea = candidate.area;
405
400
  }
401
+ };
402
+
403
+ const evaluateContours = (inputMat: unknown, attemptLabel: string): { quad: Point[]; area: number } | null => {
404
+ 'worklet';
406
405
 
407
- step = `contour_${i}_area`; // ratio stage
406
+ step = `findContours_${attemptLabel}`;
408
407
  reportStage(step);
409
- const areaRatio = area / frameArea;
408
+ const contours = OpenCV.createObject(ObjectType.PointVectorOfVectors);
409
+ OpenCV.invoke('findContours', inputMat, contours, RetrievalModes.RETR_EXTERNAL, ContourApproximationModes.CHAIN_APPROX_SIMPLE);
410
410
 
411
- if (__DEV__) {
412
- console.log('[DocScanner] area', area, 'ratio', areaRatio);
413
- }
411
+ const contourVector = OpenCV.toJSValue(contours);
412
+ const contourArray = Array.isArray(contourVector?.array) ? contourVector.array : [];
414
413
 
415
- // Skip if area ratio is too small or too large
416
- if (areaRatio < 0.0002 || areaRatio > 0.99) {
417
- continue;
418
- }
414
+ let bestLocal: { quad: Point[]; area: number } | null = null;
415
+
416
+ for (let i = 0; i < contourArray.length; i += 1) {
417
+ step = `${attemptLabel}_contour_${i}_copy`;
418
+ reportStage(step);
419
+ const contour = OpenCV.copyObjectFromVector(contours, i);
419
420
 
420
- // Try to use convex hull for better corner detection
421
- let contourToUse = contour;
422
- try {
423
- step = `contour_${i}_convexHull`;
421
+ step = `${attemptLabel}_contour_${i}_area`;
424
422
  reportStage(step);
425
- const hull = OpenCV.createObject(ObjectType.PointVector);
426
- OpenCV.invoke('convexHull', contour, hull, false, true);
427
- contourToUse = hull;
428
- } catch (err) {
429
- // If convexHull fails, use original contour
430
- if (__DEV__) {
431
- console.warn('[DocScanner] convexHull failed, using original contour');
423
+ const { value: area } = OpenCV.invoke('contourArea', contour, false);
424
+ if (typeof area !== 'number' || !isFinite(area) || area < 60) {
425
+ continue;
432
426
  }
433
- }
434
427
 
435
- step = `contour_${i}_arcLength`;
436
- reportStage(step);
437
- const { value: perimeter } = OpenCV.invoke('arcLength', contourToUse, true);
438
- const approx = OpenCV.createObject(ObjectType.PointVector);
428
+ const resizedRatio = area / frameArea;
429
+ if (resizedRatio < 0.00012 || resizedRatio > 0.98) {
430
+ continue;
431
+ }
439
432
 
440
- let approxArray: Array<{ x: number; y: number }> = [];
433
+ let contourToUse = contour;
434
+ try {
435
+ const hull = OpenCV.createObject(ObjectType.PointVector);
436
+ OpenCV.invoke('convexHull', contour, hull, false, true);
437
+ contourToUse = hull;
438
+ } catch (err) {
439
+ if (__DEV__ && !convexHullWarned) {
440
+ console.warn('[DocScanner] convexHull failed, using original contour');
441
+ convexHullWarned = true;
442
+ }
443
+ }
441
444
 
442
- // Try more epsilon values from 0.1% to 10% for difficult shapes
443
- const epsilonValues = [
444
- 0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009,
445
- 0.01, 0.012, 0.015, 0.018, 0.02, 0.025, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1
446
- ];
445
+ const { value: perimeter } = OpenCV.invoke('arcLength', contourToUse, true);
446
+ if (typeof perimeter !== 'number' || !isFinite(perimeter) || perimeter < 80) {
447
+ continue;
448
+ }
447
449
 
448
- for (let attempt = 0; attempt < epsilonValues.length; attempt += 1) {
449
- const epsilon = epsilonValues[attempt] * perimeter;
450
- step = `contour_${i}_approxPolyDP_attempt_${attempt}`;
451
- reportStage(step);
452
- OpenCV.invoke('approxPolyDP', contourToUse, approx, epsilon, true);
450
+ const approx = OpenCV.createObject(ObjectType.PointVector);
451
+ let approxArray: Array<{ x: number; y: number }> = [];
453
452
 
454
- step = `contour_${i}_toJS_attempt_${attempt}`;
455
- reportStage(step);
456
- const approxValue = OpenCV.toJSValue(approx);
457
- const candidate = Array.isArray(approxValue?.array) ? approxValue.array : [];
453
+ for (let attempt = 0; attempt < epsilonValues.length; attempt += 1) {
454
+ const epsilon = epsilonValues[attempt] * perimeter;
455
+ step = `${attemptLabel}_contour_${i}_approx_${attempt}`;
456
+ reportStage(step);
457
+ OpenCV.invoke('approxPolyDP', contourToUse, approx, epsilon, true);
458
458
 
459
- if (__DEV__) {
460
- console.log('[DocScanner] approx length', candidate.length, 'epsilon', epsilon);
459
+ const approxValue = OpenCV.toJSValue(approx);
460
+ const candidate = Array.isArray(approxValue?.array) ? approxValue.array : [];
461
+ if (candidate.length === 4) {
462
+ approxArray = candidate as Array<{ x: number; y: number }>;
463
+ break;
464
+ }
461
465
  }
462
466
 
463
- if (candidate.length === 4) {
464
- approxArray = candidate as Array<{ x: number; y: number }>;
465
- break;
467
+ if (approxArray.length !== 4) {
468
+ continue;
466
469
  }
467
- }
468
470
 
469
- // Only proceed if we found exactly 4 corners
470
- if (approxArray.length !== 4) {
471
- continue;
472
- }
471
+ const isValidPoint = (pt: { x: number; y: number }) =>
472
+ typeof pt.x === 'number' && typeof pt.y === 'number' && isFinite(pt.x) && isFinite(pt.y);
473
473
 
474
- step = `contour_${i}_convex`;
475
- reportStage(step);
474
+ if (!approxArray.every(isValidPoint)) {
475
+ continue;
476
+ }
476
477
 
477
- // Validate points before processing
478
- const isValidPoint = (pt: { x: number; y: number }) => {
479
- return typeof pt.x === 'number' && typeof pt.y === 'number' &&
480
- !isNaN(pt.x) && !isNaN(pt.y) &&
481
- isFinite(pt.x) && isFinite(pt.y);
482
- };
478
+ const normalizedPoints: Point[] = approxArray.map((pt) => ({
479
+ x: pt.x / ratio,
480
+ y: pt.y / ratio,
481
+ }));
483
482
 
484
- if (!approxArray.every(isValidPoint)) {
485
- if (__DEV__) {
486
- console.warn('[DocScanner] invalid points in approxArray', approxArray);
483
+ if (!isConvexQuadrilateral(normalizedPoints)) {
484
+ continue;
487
485
  }
488
- continue;
489
- }
490
486
 
491
- const points: Point[] = approxArray.map((pt: { x: number; y: number }) => ({
492
- x: pt.x / ratio,
493
- y: pt.y / ratio,
494
- }));
487
+ const sanitized = sanitizeQuad(orderQuadPoints(normalizedPoints));
488
+ if (!isValidQuad(sanitized)) {
489
+ continue;
490
+ }
495
491
 
496
- // Verify the quadrilateral is convex (valid document shape)
497
- try {
498
- if (!isConvexQuadrilateral(points)) {
499
- if (__DEV__) {
500
- console.log('[DocScanner] not convex, skipping:', points);
501
- }
492
+ const edges = quadEdgeLengths(sanitized);
493
+ const minEdge = Math.min(...edges);
494
+ const maxEdge = Math.max(...edges);
495
+ if (!Number.isFinite(minEdge) || minEdge < minEdgeThreshold) {
496
+ continue;
497
+ }
498
+ const aspectRatio = maxEdge / Math.max(minEdge, 1);
499
+ if (!Number.isFinite(aspectRatio) || aspectRatio > 8.5) {
502
500
  continue;
503
501
  }
504
- } catch (err) {
505
- if (__DEV__) {
506
- console.warn('[DocScanner] convex check error:', err, 'points:', points);
502
+
503
+ const quadAreaValue = quadArea(sanitized);
504
+ const originalRatio = originalArea > 0 ? quadAreaValue / originalArea : 0;
505
+ if (originalRatio < 0.00012 || originalRatio > 0.92) {
506
+ continue;
507
507
  }
508
- continue;
509
- }
510
508
 
511
- if (area > maxArea) {
512
- best = points;
513
- maxArea = area;
509
+ const candidate = {
510
+ quad: sanitized,
511
+ area: quadAreaValue,
512
+ };
513
+
514
+ if (!bestLocal || candidate.area > bestLocal.area) {
515
+ bestLocal = candidate;
516
+ }
514
517
  }
515
- }
518
+
519
+ return bestLocal;
520
+ };
521
+
522
+ const runCanny = (label: string, low: number, high: number) => {
523
+ 'worklet';
524
+ const working = OpenCV.invoke('clone', baseMat);
525
+ step = `${label}_canny`;
526
+ reportStage(step);
527
+ OpenCV.invoke('Canny', working, working, low, high);
528
+ OpenCV.invoke('morphologyEx', working, working, MorphTypes.MORPH_CLOSE, element);
529
+ considerCandidate(evaluateContours(working, label));
530
+ };
531
+
532
+ const runAdaptive = (label: string, blockSize: number, c: number) => {
533
+ 'worklet';
534
+ const working = OpenCV.invoke('clone', baseMat);
535
+ step = `${label}_adaptive`;
536
+ reportStage(step);
537
+ OpenCV.invoke('adaptiveThreshold', working, working, 255, ADAPTIVE_THRESH_GAUSSIAN_C, THRESH_BINARY, blockSize, c);
538
+ OpenCV.invoke('morphologyEx', working, working, MorphTypes.MORPH_CLOSE, element);
539
+ considerCandidate(evaluateContours(working, label));
540
+ };
541
+
542
+ const runOtsu = () => {
543
+ 'worklet';
544
+ const working = OpenCV.invoke('clone', baseMat);
545
+ step = 'otsu_threshold';
546
+ reportStage(step);
547
+ OpenCV.invoke('threshold', working, working, 0, 255, THRESH_BINARY | THRESH_OTSU);
548
+ OpenCV.invoke('morphologyEx', working, working, MorphTypes.MORPH_CLOSE, element);
549
+ considerCandidate(evaluateContours(working, 'otsu'));
550
+ };
551
+
552
+ runCanny('canny_primary', CANNY_LOW, CANNY_HIGH);
553
+ runCanny('canny_soft', Math.max(6, CANNY_LOW * 0.6), Math.max(CANNY_LOW * 1.2, CANNY_HIGH * 0.75));
554
+ runCanny('canny_hard', Math.max(12, CANNY_LOW * 1.1), CANNY_HIGH * 1.25);
555
+
556
+ runAdaptive('adaptive_19', 19, 7);
557
+ runAdaptive('adaptive_23', 23, 5);
558
+ runOtsu();
516
559
 
517
560
  step = 'clearBuffers';
518
561
  reportStage(step);
519
562
  OpenCV.clearBuffers();
520
563
  step = 'updateQuad';
521
564
  reportStage(step);
522
- updateQuad(best);
565
+ updateQuad(bestQuad);
523
566
  } catch (error) {
524
567
  reportError(step, error);
525
568
  }