react-native-rectangle-doc-scanner 0.70.0 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -35,6 +35,7 @@ import org.opencv.core.MatOfPoint2f
|
|
|
35
35
|
import org.opencv.core.Point
|
|
36
36
|
import org.opencv.core.Size as MatSize
|
|
37
37
|
import org.opencv.imgproc.Imgproc
|
|
38
|
+
import org.opencv.photo.Photo
|
|
38
39
|
import java.io.File
|
|
39
40
|
import java.nio.ByteBuffer
|
|
40
41
|
import java.text.SimpleDateFormat
|
|
@@ -387,11 +388,20 @@ class RNRDocScannerView @JvmOverloads constructor(
|
|
|
387
388
|
val gray = Mat()
|
|
388
389
|
Imgproc.cvtColor(mat, gray, Imgproc.COLOR_BGR2GRAY)
|
|
389
390
|
|
|
391
|
+
// Improve contrast for low-light or glossy surfaces
|
|
392
|
+
val clahe = Photo.createCLAHE(2.0, MatSize(8.0, 8.0))
|
|
393
|
+
val enhanced = Mat()
|
|
394
|
+
clahe.apply(gray, enhanced)
|
|
395
|
+
clahe.collectGarbage()
|
|
396
|
+
|
|
390
397
|
val blurred = Mat()
|
|
391
|
-
Imgproc.GaussianBlur(
|
|
398
|
+
Imgproc.GaussianBlur(enhanced, blurred, MatSize(5.0, 5.0), 0.0)
|
|
392
399
|
|
|
393
400
|
val edges = Mat()
|
|
394
|
-
Imgproc.Canny(blurred, edges,
|
|
401
|
+
Imgproc.Canny(blurred, edges, 40.0, 140.0)
|
|
402
|
+
|
|
403
|
+
val morphKernel = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, MatSize(5.0, 5.0))
|
|
404
|
+
Imgproc.morphologyEx(edges, edges, Imgproc.MORPH_CLOSE, morphKernel)
|
|
395
405
|
|
|
396
406
|
val contours = ArrayList<MatOfPoint>()
|
|
397
407
|
val hierarchy = Mat()
|
|
@@ -415,7 +425,7 @@ class RNRDocScannerView @JvmOverloads constructor(
|
|
|
415
425
|
}
|
|
416
426
|
|
|
417
427
|
val area = abs(Imgproc.contourArea(approxCurve))
|
|
418
|
-
if (area < frameArea * 0.
|
|
428
|
+
if (area < frameArea * 0.05 || area > frameArea * 0.98) {
|
|
419
429
|
contour.release()
|
|
420
430
|
contour2f.release()
|
|
421
431
|
continue
|
|
@@ -437,8 +447,10 @@ class RNRDocScannerView @JvmOverloads constructor(
|
|
|
437
447
|
}
|
|
438
448
|
|
|
439
449
|
gray.release()
|
|
450
|
+
enhanced.release()
|
|
440
451
|
blurred.release()
|
|
441
452
|
edges.release()
|
|
453
|
+
morphKernel.release()
|
|
442
454
|
hierarchy.release()
|
|
443
455
|
approxCurve.release()
|
|
444
456
|
mat.release()
|
package/dist/utils/overlay.js
CHANGED
|
@@ -73,27 +73,8 @@ const Overlay = ({ quad, color = '#e7a649', frameSize, showGrid = true, gridColo
|
|
|
73
73
|
sourceQuad = quad;
|
|
74
74
|
}
|
|
75
75
|
else {
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
const marginY = screenHeight * marginRatio;
|
|
79
|
-
const maxWidth = screenWidth - marginX * 2;
|
|
80
|
-
const maxHeight = screenHeight - marginY * 2;
|
|
81
|
-
const a4Ratio = Math.SQRT2; // ~1.414 height / width
|
|
82
|
-
let width = maxWidth;
|
|
83
|
-
let height = width * a4Ratio;
|
|
84
|
-
if (height > maxHeight) {
|
|
85
|
-
height = maxHeight;
|
|
86
|
-
width = height / a4Ratio;
|
|
87
|
-
}
|
|
88
|
-
const left = (screenWidth - width) / 2;
|
|
89
|
-
const top = (screenHeight - height) / 2;
|
|
90
|
-
transformedQuad = [
|
|
91
|
-
{ x: left, y: top },
|
|
92
|
-
{ x: left + width, y: top },
|
|
93
|
-
{ x: left + width, y: top + height },
|
|
94
|
-
{ x: left, y: top + height },
|
|
95
|
-
];
|
|
96
|
-
sourceFrameSize = null;
|
|
76
|
+
// No detection yet – skip drawing
|
|
77
|
+
return { outlinePath: null, gridPaths: [] };
|
|
97
78
|
}
|
|
98
79
|
if (sourceQuad && sourceFrameSize) {
|
|
99
80
|
if (__DEV__) {
|
|
@@ -87,6 +87,9 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
87
87
|
if session.canAddOutput(photoOutput) {
|
|
88
88
|
photoOutput.isHighResolutionCaptureEnabled = true
|
|
89
89
|
session.addOutput(photoOutput)
|
|
90
|
+
if let connection = photoOutput.connection(with: .video), connection.isVideoOrientationSupported {
|
|
91
|
+
connection.videoOrientation = .portrait
|
|
92
|
+
}
|
|
90
93
|
}
|
|
91
94
|
|
|
92
95
|
videoOutput.videoSettings = [
|
|
@@ -97,6 +100,9 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
97
100
|
|
|
98
101
|
if session.canAddOutput(videoOutput) {
|
|
99
102
|
session.addOutput(videoOutput)
|
|
103
|
+
if let connection = videoOutput.connection(with: .video), connection.isVideoOrientationSupported {
|
|
104
|
+
connection.videoOrientation = .portrait
|
|
105
|
+
}
|
|
100
106
|
}
|
|
101
107
|
}
|
|
102
108
|
}
|
|
@@ -104,6 +110,9 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
104
110
|
override func layoutSubviews() {
|
|
105
111
|
super.layoutSubviews()
|
|
106
112
|
previewLayer?.frame = bounds
|
|
113
|
+
if let connection = previewLayer?.connection, connection.isVideoOrientationSupported {
|
|
114
|
+
connection.videoOrientation = .portrait
|
|
115
|
+
}
|
|
107
116
|
}
|
|
108
117
|
|
|
109
118
|
private func updateTorchMode() {
|
|
@@ -174,11 +183,14 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
174
183
|
self.handleDetectedRectangle(observation, frameSize: frameSize)
|
|
175
184
|
}
|
|
176
185
|
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
186
|
+
request.maximumObservations = 1
|
|
187
|
+
request.minimumConfidence = 0.5
|
|
188
|
+
request.minimumAspectRatio = 0.15
|
|
189
|
+
request.maximumAspectRatio = 1.75
|
|
190
|
+
request.minimumSize = 0.08
|
|
191
|
+
if #available(iOS 13.0, *) {
|
|
192
|
+
request.quadratureTolerance = 45
|
|
193
|
+
}
|
|
182
194
|
|
|
183
195
|
let handler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: orientation, options: [:])
|
|
184
196
|
do {
|
package/package.json
CHANGED
package/src/utils/overlay.tsx
CHANGED
|
@@ -66,27 +66,8 @@ export const Overlay: React.FC<OverlayProps> = ({
|
|
|
66
66
|
if (quad && frameSize) {
|
|
67
67
|
sourceQuad = quad;
|
|
68
68
|
} else {
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
const marginY = screenHeight * marginRatio;
|
|
72
|
-
const maxWidth = screenWidth - marginX * 2;
|
|
73
|
-
const maxHeight = screenHeight - marginY * 2;
|
|
74
|
-
const a4Ratio = Math.SQRT2; // ~1.414 height / width
|
|
75
|
-
let width = maxWidth;
|
|
76
|
-
let height = width * a4Ratio;
|
|
77
|
-
if (height > maxHeight) {
|
|
78
|
-
height = maxHeight;
|
|
79
|
-
width = height / a4Ratio;
|
|
80
|
-
}
|
|
81
|
-
const left = (screenWidth - width) / 2;
|
|
82
|
-
const top = (screenHeight - height) / 2;
|
|
83
|
-
transformedQuad = [
|
|
84
|
-
{ x: left, y: top },
|
|
85
|
-
{ x: left + width, y: top },
|
|
86
|
-
{ x: left + width, y: top + height },
|
|
87
|
-
{ x: left, y: top + height },
|
|
88
|
-
];
|
|
89
|
-
sourceFrameSize = null;
|
|
69
|
+
// No detection yet – skip drawing
|
|
70
|
+
return { outlinePath: null, gridPaths: [] };
|
|
90
71
|
}
|
|
91
72
|
|
|
92
73
|
if (sourceQuad && sourceFrameSize) {
|