react-native-rectangle-doc-scanner 1.6.0 → 1.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/DocScanner.d.ts +1 -0
- package/dist/DocScanner.js +8 -2
- package/ios/RNRDocScannerView.swift +95 -7
- package/package.json +1 -1
- package/src/DocScanner.tsx +20 -8
package/dist/DocScanner.d.ts
CHANGED
|
@@ -36,6 +36,7 @@ interface Props {
|
|
|
36
36
|
gridColor?: string;
|
|
37
37
|
gridLineWidth?: number;
|
|
38
38
|
detectionConfig?: DetectionConfig;
|
|
39
|
+
useNativeOverlay?: boolean;
|
|
39
40
|
}
|
|
40
41
|
export declare const DocScanner: React.ForwardRefExoticComponent<Props & React.RefAttributes<DocScannerHandle>>;
|
|
41
42
|
export type { DocScannerHandle };
|
package/dist/DocScanner.js
CHANGED
|
@@ -48,12 +48,18 @@ if (!NativeDocScannerModule) {
|
|
|
48
48
|
const NativeDocScanner = (0, react_native_1.requireNativeComponent)(VIEW_NAME);
|
|
49
49
|
const DEFAULT_OVERLAY_COLOR = '#e7a649';
|
|
50
50
|
const GRID_COLOR_FALLBACK = 'rgba(231, 166, 73, 0.35)';
|
|
51
|
-
exports.DocScanner = (0, react_1.forwardRef)(({ onCapture, overlayColor = DEFAULT_OVERLAY_COLOR, autoCapture = true, minStableFrames = 8, enableTorch = false, quality = 90, useBase64 = false, children, showGrid = true, gridColor, gridLineWidth = 2, }, ref) => {
|
|
51
|
+
exports.DocScanner = (0, react_1.forwardRef)(({ onCapture, overlayColor = DEFAULT_OVERLAY_COLOR, autoCapture = true, minStableFrames = 8, enableTorch = false, quality = 90, useBase64 = false, children, showGrid = true, gridColor, gridLineWidth = 2, useNativeOverlay, }, ref) => {
|
|
52
52
|
const viewRef = (0, react_1.useRef)(null);
|
|
53
53
|
const capturingRef = (0, react_1.useRef)(false);
|
|
54
54
|
const [quad, setQuad] = (0, react_1.useState)(null);
|
|
55
55
|
const [stable, setStable] = (0, react_1.useState)(0);
|
|
56
56
|
const [frameSize, setFrameSize] = (0, react_1.useState)(null);
|
|
57
|
+
const shouldUseNativeOverlay = (0, react_1.useMemo)(() => {
|
|
58
|
+
if (typeof useNativeOverlay === 'boolean') {
|
|
59
|
+
return useNativeOverlay;
|
|
60
|
+
}
|
|
61
|
+
return react_native_1.Platform.OS === 'ios';
|
|
62
|
+
}, [useNativeOverlay]);
|
|
57
63
|
const effectiveGridColor = (0, react_1.useMemo)(() => gridColor ?? GRID_COLOR_FALLBACK, [gridColor]);
|
|
58
64
|
const ensureViewHandle = (0, react_1.useCallback)(() => {
|
|
59
65
|
const nodeHandle = (0, react_native_1.findNodeHandle)(viewRef.current);
|
|
@@ -156,7 +162,7 @@ exports.DocScanner = (0, react_1.forwardRef)(({ onCapture, overlayColor = DEFAUL
|
|
|
156
162
|
}), [captureNative, resetNativeStability]);
|
|
157
163
|
return (react_1.default.createElement(react_native_1.View, { style: styles.container },
|
|
158
164
|
react_1.default.createElement(NativeDocScanner, { ref: viewRef, style: react_native_1.StyleSheet.absoluteFill, detectionCountBeforeCapture: minStableFrames, autoCapture: autoCapture, enableTorch: enableTorch, quality: quality, useBase64: useBase64, onRectangleDetect: handleRectangleDetect, onPictureTaken: handlePictureTaken }),
|
|
159
|
-
react_1.default.createElement(overlay_1.Overlay, { quad: quad, color: overlayColor, frameSize: frameSize, showGrid: showGrid, gridColor: effectiveGridColor, gridLineWidth: gridLineWidth }),
|
|
165
|
+
!shouldUseNativeOverlay && (react_1.default.createElement(overlay_1.Overlay, { quad: quad, color: overlayColor, frameSize: frameSize, showGrid: showGrid, gridColor: effectiveGridColor, gridLineWidth: gridLineWidth })),
|
|
160
166
|
!autoCapture && (react_1.default.createElement(react_native_1.TouchableOpacity, { style: styles.button, onPress: handleManualCapture })),
|
|
161
167
|
children));
|
|
162
168
|
});
|
|
@@ -28,13 +28,16 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
28
28
|
private var previewLayer: AVCaptureVideoPreviewLayer?
|
|
29
29
|
private let videoOutput = AVCaptureVideoDataOutput()
|
|
30
30
|
private let photoOutput = AVCapturePhotoOutput()
|
|
31
|
+
private var smoothedOverlayPoints: [CGPoint]?
|
|
32
|
+
private let outlineLayer = CAShapeLayer()
|
|
33
|
+
private let gridLayer = CAShapeLayer()
|
|
31
34
|
|
|
32
35
|
private var currentStableCounter: Int = 0
|
|
33
36
|
private var isProcessingFrame = false
|
|
34
37
|
private var isCaptureInFlight = false
|
|
35
38
|
private var lastObservation: VNRectangleObservation?
|
|
36
39
|
private var missedDetectionFrames: Int = 0
|
|
37
|
-
private let maxMissedDetections =
|
|
40
|
+
private let maxMissedDetections = 1
|
|
38
41
|
private var lastFrameSize: CGSize = .zero
|
|
39
42
|
private var photoCaptureCompletion: ((Result<RNRDocScannerCaptureResult, Error>) -> Void)?
|
|
40
43
|
|
|
@@ -51,6 +54,7 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
51
54
|
private func commonInit() {
|
|
52
55
|
backgroundColor = .black
|
|
53
56
|
configurePreviewLayer()
|
|
57
|
+
configureOverlayLayers()
|
|
54
58
|
configureSession()
|
|
55
59
|
}
|
|
56
60
|
|
|
@@ -61,6 +65,23 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
61
65
|
previewLayer = layer
|
|
62
66
|
}
|
|
63
67
|
|
|
68
|
+
private func configureOverlayLayers() {
|
|
69
|
+
outlineLayer.strokeColor = UIColor(red: 0.18, green: 0.6, blue: 0.95, alpha: 1.0).cgColor
|
|
70
|
+
outlineLayer.fillColor = UIColor(red: 0.18, green: 0.6, blue: 0.95, alpha: 0.2).cgColor
|
|
71
|
+
outlineLayer.lineWidth = 4
|
|
72
|
+
outlineLayer.lineJoin = .round
|
|
73
|
+
outlineLayer.isHidden = true
|
|
74
|
+
layer.addSublayer(outlineLayer)
|
|
75
|
+
|
|
76
|
+
gridLayer.strokeColor = UIColor(red: 0.18, green: 0.6, blue: 0.95, alpha: 0.35).cgColor
|
|
77
|
+
gridLayer.fillColor = UIColor.clear.cgColor
|
|
78
|
+
gridLayer.lineWidth = 1.5
|
|
79
|
+
gridLayer.lineJoin = .round
|
|
80
|
+
gridLayer.isHidden = true
|
|
81
|
+
gridLayer.zPosition = outlineLayer.zPosition + 1
|
|
82
|
+
layer.addSublayer(gridLayer)
|
|
83
|
+
}
|
|
84
|
+
|
|
64
85
|
private func configureSession() {
|
|
65
86
|
sessionQueue.async { [weak self] in
|
|
66
87
|
guard let self else { return }
|
|
@@ -115,6 +136,8 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
115
136
|
if let connection = previewLayer?.connection, connection.isVideoOrientationSupported {
|
|
116
137
|
connection.videoOrientation = .portrait
|
|
117
138
|
}
|
|
139
|
+
outlineLayer.frame = bounds
|
|
140
|
+
gridLayer.frame = bounds
|
|
118
141
|
}
|
|
119
142
|
|
|
120
143
|
private func updateTorchMode() {
|
|
@@ -181,7 +204,9 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
181
204
|
return
|
|
182
205
|
}
|
|
183
206
|
|
|
184
|
-
let
|
|
207
|
+
let filtered = observations.filter { $0.confidence >= 0.55 }
|
|
208
|
+
let candidates = filtered.isEmpty ? observations : filtered
|
|
209
|
+
let weighted: [VNRectangleObservation] = candidates.sorted { (lhs: VNRectangleObservation, rhs: VNRectangleObservation) -> Bool in
|
|
185
210
|
let lhsScore: CGFloat = CGFloat(lhs.confidence) * lhs.boundingBox.area
|
|
186
211
|
let rhsScore: CGFloat = CGFloat(rhs.confidence) * rhs.boundingBox.area
|
|
187
212
|
return lhsScore > rhsScore
|
|
@@ -200,13 +225,13 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
200
225
|
|
|
201
226
|
let request = VNDetectRectanglesRequest(completionHandler: requestHandler)
|
|
202
227
|
|
|
203
|
-
request.maximumObservations =
|
|
204
|
-
request.minimumConfidence = 0.
|
|
205
|
-
request.minimumAspectRatio = 0.
|
|
206
|
-
request.maximumAspectRatio = 2.
|
|
228
|
+
request.maximumObservations = 3
|
|
229
|
+
request.minimumConfidence = 0.55
|
|
230
|
+
request.minimumAspectRatio = 0.1
|
|
231
|
+
request.maximumAspectRatio = 2.0
|
|
207
232
|
request.minimumSize = 0.05
|
|
208
233
|
if #available(iOS 13.0, *) {
|
|
209
|
-
request.quadratureTolerance =
|
|
234
|
+
request.quadratureTolerance = 20
|
|
210
235
|
}
|
|
211
236
|
|
|
212
237
|
let handler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: orientation, options: [:])
|
|
@@ -233,9 +258,12 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
233
258
|
} else {
|
|
234
259
|
lastObservation = nil
|
|
235
260
|
missedDetectionFrames = 0
|
|
261
|
+
smoothedOverlayPoints = nil
|
|
236
262
|
effectiveObservation = nil
|
|
237
263
|
}
|
|
238
264
|
|
|
265
|
+
updateNativeOverlay(with: effectiveObservation)
|
|
266
|
+
|
|
239
267
|
let payload: [String: Any?]
|
|
240
268
|
if let observation = effectiveObservation {
|
|
241
269
|
let points = [
|
|
@@ -276,6 +304,66 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
276
304
|
CGPoint(x: normalizedPoint.x * frameSize.width, y: (1 - normalizedPoint.y) * frameSize.height)
|
|
277
305
|
}
|
|
278
306
|
|
|
307
|
+
private func updateNativeOverlay(with observation: VNRectangleObservation?) {
|
|
308
|
+
DispatchQueue.main.async {
|
|
309
|
+
guard let observation else {
|
|
310
|
+
self.outlineLayer.path = nil
|
|
311
|
+
self.gridLayer.path = nil
|
|
312
|
+
self.outlineLayer.isHidden = true
|
|
313
|
+
self.gridLayer.isHidden = true
|
|
314
|
+
return
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
guard let previewLayer = self.previewLayer else {
|
|
318
|
+
return
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
let points = [
|
|
322
|
+
self.convertToLayerPoint(observation.topLeft, previewLayer: previewLayer),
|
|
323
|
+
self.convertToLayerPoint(observation.topRight, previewLayer: previewLayer),
|
|
324
|
+
self.convertToLayerPoint(observation.bottomRight, previewLayer: previewLayer),
|
|
325
|
+
self.convertToLayerPoint(observation.bottomLeft, previewLayer: previewLayer),
|
|
326
|
+
]
|
|
327
|
+
|
|
328
|
+
let outline = UIBezierPath()
|
|
329
|
+
outline.move(to: points[0])
|
|
330
|
+
outline.addLine(to: points[1])
|
|
331
|
+
outline.addLine(to: points[2])
|
|
332
|
+
outline.addLine(to: points[3])
|
|
333
|
+
outline.close()
|
|
334
|
+
|
|
335
|
+
self.outlineLayer.path = outline.cgPath
|
|
336
|
+
self.outlineLayer.isHidden = false
|
|
337
|
+
|
|
338
|
+
let gridPath = UIBezierPath()
|
|
339
|
+
let steps: [CGFloat] = [1.0 / 3.0, 2.0 / 3.0]
|
|
340
|
+
|
|
341
|
+
for step in steps {
|
|
342
|
+
let startVertical = self.interpolate(points[0], points[1], t: step)
|
|
343
|
+
let endVertical = self.interpolate(points[3], points[2], t: step)
|
|
344
|
+
gridPath.move(to: startVertical)
|
|
345
|
+
gridPath.addLine(to: endVertical)
|
|
346
|
+
|
|
347
|
+
let startHorizontal = self.interpolate(points[0], points[3], t: step)
|
|
348
|
+
let endHorizontal = self.interpolate(points[1], points[2], t: step)
|
|
349
|
+
gridPath.move(to: startHorizontal)
|
|
350
|
+
gridPath.addLine(to: endHorizontal)
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
self.gridLayer.path = gridPath.cgPath
|
|
354
|
+
self.gridLayer.isHidden = false
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
private func convertToLayerPoint(_ normalizedPoint: CGPoint, previewLayer: AVCaptureVideoPreviewLayer) -> CGPoint {
|
|
359
|
+
let devicePoint = CGPoint(x: normalizedPoint.x, y: 1 - normalizedPoint.y)
|
|
360
|
+
return previewLayer.layerPointConverted(fromCaptureDevicePoint: devicePoint)
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
private func interpolate(_ start: CGPoint, _ end: CGPoint, t: CGFloat) -> CGPoint {
|
|
364
|
+
CGPoint(x: start.x + (end.x - start.x) * t, y: start.y + (end.y - start.y) * t)
|
|
365
|
+
}
|
|
366
|
+
|
|
279
367
|
// MARK: - Capture
|
|
280
368
|
|
|
281
369
|
func capture(completion: @escaping (Result<RNRDocScannerCaptureResult, Error>) -> Void) {
|
package/package.json
CHANGED
package/src/DocScanner.tsx
CHANGED
|
@@ -8,6 +8,7 @@ import React, {
|
|
|
8
8
|
useState,
|
|
9
9
|
} from 'react';
|
|
10
10
|
import {
|
|
11
|
+
Platform,
|
|
11
12
|
findNodeHandle,
|
|
12
13
|
NativeModules,
|
|
13
14
|
requireNativeComponent,
|
|
@@ -93,6 +94,7 @@ interface Props {
|
|
|
93
94
|
gridColor?: string;
|
|
94
95
|
gridLineWidth?: number;
|
|
95
96
|
detectionConfig?: DetectionConfig;
|
|
97
|
+
useNativeOverlay?: boolean;
|
|
96
98
|
}
|
|
97
99
|
|
|
98
100
|
const DEFAULT_OVERLAY_COLOR = '#e7a649';
|
|
@@ -110,6 +112,7 @@ export const DocScanner = forwardRef<DocScannerHandle, Props>(({
|
|
|
110
112
|
showGrid = true,
|
|
111
113
|
gridColor,
|
|
112
114
|
gridLineWidth = 2,
|
|
115
|
+
useNativeOverlay,
|
|
113
116
|
}, ref) => {
|
|
114
117
|
const viewRef = useRef<NativeDocScannerInstance | null>(null);
|
|
115
118
|
const capturingRef = useRef(false);
|
|
@@ -117,6 +120,13 @@ export const DocScanner = forwardRef<DocScannerHandle, Props>(({
|
|
|
117
120
|
const [stable, setStable] = useState(0);
|
|
118
121
|
const [frameSize, setFrameSize] = useState<{ width: number; height: number } | null>(null);
|
|
119
122
|
|
|
123
|
+
const shouldUseNativeOverlay = useMemo(() => {
|
|
124
|
+
if (typeof useNativeOverlay === 'boolean') {
|
|
125
|
+
return useNativeOverlay;
|
|
126
|
+
}
|
|
127
|
+
return Platform.OS === 'ios';
|
|
128
|
+
}, [useNativeOverlay]);
|
|
129
|
+
|
|
120
130
|
const effectiveGridColor = useMemo(
|
|
121
131
|
() => gridColor ?? GRID_COLOR_FALLBACK,
|
|
122
132
|
[gridColor],
|
|
@@ -261,14 +271,16 @@ export const DocScanner = forwardRef<DocScannerHandle, Props>(({
|
|
|
261
271
|
onRectangleDetect={handleRectangleDetect}
|
|
262
272
|
onPictureTaken={handlePictureTaken}
|
|
263
273
|
/>
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
274
|
+
{!shouldUseNativeOverlay && (
|
|
275
|
+
<Overlay
|
|
276
|
+
quad={quad}
|
|
277
|
+
color={overlayColor}
|
|
278
|
+
frameSize={frameSize}
|
|
279
|
+
showGrid={showGrid}
|
|
280
|
+
gridColor={effectiveGridColor}
|
|
281
|
+
gridLineWidth={gridLineWidth}
|
|
282
|
+
/>
|
|
283
|
+
)}
|
|
272
284
|
{!autoCapture && (
|
|
273
285
|
<TouchableOpacity style={styles.button} onPress={handleManualCapture} />
|
|
274
286
|
)}
|