react-native-rectangle-doc-scanner 1.5.0 → 1.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/DocScanner.d.ts +1 -0
- package/dist/DocScanner.js +8 -2
- package/ios/RNRDocScannerView.swift +113 -24
- package/package.json +1 -1
- package/src/DocScanner.tsx +20 -8
package/dist/DocScanner.d.ts
CHANGED
|
@@ -36,6 +36,7 @@ interface Props {
|
|
|
36
36
|
gridColor?: string;
|
|
37
37
|
gridLineWidth?: number;
|
|
38
38
|
detectionConfig?: DetectionConfig;
|
|
39
|
+
useNativeOverlay?: boolean;
|
|
39
40
|
}
|
|
40
41
|
export declare const DocScanner: React.ForwardRefExoticComponent<Props & React.RefAttributes<DocScannerHandle>>;
|
|
41
42
|
export type { DocScannerHandle };
|
package/dist/DocScanner.js
CHANGED
|
@@ -48,12 +48,18 @@ if (!NativeDocScannerModule) {
|
|
|
48
48
|
const NativeDocScanner = (0, react_native_1.requireNativeComponent)(VIEW_NAME);
|
|
49
49
|
const DEFAULT_OVERLAY_COLOR = '#e7a649';
|
|
50
50
|
const GRID_COLOR_FALLBACK = 'rgba(231, 166, 73, 0.35)';
|
|
51
|
-
exports.DocScanner = (0, react_1.forwardRef)(({ onCapture, overlayColor = DEFAULT_OVERLAY_COLOR, autoCapture = true, minStableFrames = 8, enableTorch = false, quality = 90, useBase64 = false, children, showGrid = true, gridColor, gridLineWidth = 2, }, ref) => {
|
|
51
|
+
exports.DocScanner = (0, react_1.forwardRef)(({ onCapture, overlayColor = DEFAULT_OVERLAY_COLOR, autoCapture = true, minStableFrames = 8, enableTorch = false, quality = 90, useBase64 = false, children, showGrid = true, gridColor, gridLineWidth = 2, useNativeOverlay, }, ref) => {
|
|
52
52
|
const viewRef = (0, react_1.useRef)(null);
|
|
53
53
|
const capturingRef = (0, react_1.useRef)(false);
|
|
54
54
|
const [quad, setQuad] = (0, react_1.useState)(null);
|
|
55
55
|
const [stable, setStable] = (0, react_1.useState)(0);
|
|
56
56
|
const [frameSize, setFrameSize] = (0, react_1.useState)(null);
|
|
57
|
+
const shouldUseNativeOverlay = (0, react_1.useMemo)(() => {
|
|
58
|
+
if (typeof useNativeOverlay === 'boolean') {
|
|
59
|
+
return useNativeOverlay;
|
|
60
|
+
}
|
|
61
|
+
return react_native_1.Platform.OS === 'ios';
|
|
62
|
+
}, [useNativeOverlay]);
|
|
57
63
|
const effectiveGridColor = (0, react_1.useMemo)(() => gridColor ?? GRID_COLOR_FALLBACK, [gridColor]);
|
|
58
64
|
const ensureViewHandle = (0, react_1.useCallback)(() => {
|
|
59
65
|
const nodeHandle = (0, react_native_1.findNodeHandle)(viewRef.current);
|
|
@@ -156,7 +162,7 @@ exports.DocScanner = (0, react_1.forwardRef)(({ onCapture, overlayColor = DEFAUL
|
|
|
156
162
|
}), [captureNative, resetNativeStability]);
|
|
157
163
|
return (react_1.default.createElement(react_native_1.View, { style: styles.container },
|
|
158
164
|
react_1.default.createElement(NativeDocScanner, { ref: viewRef, style: react_native_1.StyleSheet.absoluteFill, detectionCountBeforeCapture: minStableFrames, autoCapture: autoCapture, enableTorch: enableTorch, quality: quality, useBase64: useBase64, onRectangleDetect: handleRectangleDetect, onPictureTaken: handlePictureTaken }),
|
|
159
|
-
react_1.default.createElement(overlay_1.Overlay, { quad: quad, color: overlayColor, frameSize: frameSize, showGrid: showGrid, gridColor: effectiveGridColor, gridLineWidth: gridLineWidth }),
|
|
165
|
+
!shouldUseNativeOverlay && (react_1.default.createElement(overlay_1.Overlay, { quad: quad, color: overlayColor, frameSize: frameSize, showGrid: showGrid, gridColor: effectiveGridColor, gridLineWidth: gridLineWidth })),
|
|
160
166
|
!autoCapture && (react_1.default.createElement(react_native_1.TouchableOpacity, { style: styles.button, onPress: handleManualCapture })),
|
|
161
167
|
children));
|
|
162
168
|
});
|
|
@@ -28,6 +28,8 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
28
28
|
private var previewLayer: AVCaptureVideoPreviewLayer?
|
|
29
29
|
private let videoOutput = AVCaptureVideoDataOutput()
|
|
30
30
|
private let photoOutput = AVCapturePhotoOutput()
|
|
31
|
+
private let outlineLayer = CAShapeLayer()
|
|
32
|
+
private let gridLayer = CAShapeLayer()
|
|
31
33
|
|
|
32
34
|
private var currentStableCounter: Int = 0
|
|
33
35
|
private var isProcessingFrame = false
|
|
@@ -51,6 +53,7 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
51
53
|
private func commonInit() {
|
|
52
54
|
backgroundColor = .black
|
|
53
55
|
configurePreviewLayer()
|
|
56
|
+
configureOverlayLayers()
|
|
54
57
|
configureSession()
|
|
55
58
|
}
|
|
56
59
|
|
|
@@ -61,6 +64,23 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
61
64
|
previewLayer = layer
|
|
62
65
|
}
|
|
63
66
|
|
|
67
|
+
private func configureOverlayLayers() {
|
|
68
|
+
outlineLayer.strokeColor = UIColor(red: 0.18, green: 0.6, blue: 0.95, alpha: 1.0).cgColor
|
|
69
|
+
outlineLayer.fillColor = UIColor(red: 0.18, green: 0.6, blue: 0.95, alpha: 0.2).cgColor
|
|
70
|
+
outlineLayer.lineWidth = 4
|
|
71
|
+
outlineLayer.lineJoin = .round
|
|
72
|
+
outlineLayer.isHidden = true
|
|
73
|
+
layer.addSublayer(outlineLayer)
|
|
74
|
+
|
|
75
|
+
gridLayer.strokeColor = UIColor(red: 0.18, green: 0.6, blue: 0.95, alpha: 0.35).cgColor
|
|
76
|
+
gridLayer.fillColor = UIColor.clear.cgColor
|
|
77
|
+
gridLayer.lineWidth = 1.5
|
|
78
|
+
gridLayer.lineJoin = .round
|
|
79
|
+
gridLayer.isHidden = true
|
|
80
|
+
gridLayer.zPosition = outlineLayer.zPosition + 1
|
|
81
|
+
layer.addSublayer(gridLayer)
|
|
82
|
+
}
|
|
83
|
+
|
|
64
84
|
private func configureSession() {
|
|
65
85
|
sessionQueue.async { [weak self] in
|
|
66
86
|
guard let self else { return }
|
|
@@ -115,6 +135,8 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
115
135
|
if let connection = previewLayer?.connection, connection.isVideoOrientationSupported {
|
|
116
136
|
connection.videoOrientation = .portrait
|
|
117
137
|
}
|
|
138
|
+
outlineLayer.frame = bounds
|
|
139
|
+
gridLayer.frame = bounds
|
|
118
140
|
}
|
|
119
141
|
|
|
120
142
|
private func updateTorchMode() {
|
|
@@ -165,36 +187,41 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
165
187
|
isProcessingFrame = false
|
|
166
188
|
}
|
|
167
189
|
|
|
168
|
-
|
|
169
|
-
|
|
190
|
+
let requestHandler: VNRequestCompletionHandler = { [weak self] request, error in
|
|
191
|
+
guard let self = self else { return }
|
|
170
192
|
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
193
|
+
if let error = error {
|
|
194
|
+
NSLog("[RNRDocScanner] detection error: \(error)")
|
|
195
|
+
self.lastObservation = nil
|
|
196
|
+
self.handleDetectedRectangle(nil, frameSize: frameSize)
|
|
197
|
+
return
|
|
198
|
+
}
|
|
177
199
|
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
200
|
+
guard let observations = request.results as? [VNRectangleObservation], !observations.isEmpty else {
|
|
201
|
+
self.lastObservation = nil
|
|
202
|
+
self.handleDetectedRectangle(nil, frameSize: frameSize)
|
|
203
|
+
return
|
|
204
|
+
}
|
|
183
205
|
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
206
|
+
let weighted: [VNRectangleObservation] = observations.sorted { (lhs: VNRectangleObservation, rhs: VNRectangleObservation) -> Bool in
|
|
207
|
+
let lhsScore: CGFloat = CGFloat(lhs.confidence) * lhs.boundingBox.area
|
|
208
|
+
let rhsScore: CGFloat = CGFloat(rhs.confidence) * rhs.boundingBox.area
|
|
209
|
+
return lhsScore > rhsScore
|
|
210
|
+
}
|
|
187
211
|
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
}
|
|
193
|
-
self.lastObservation = best
|
|
194
|
-
self.missedDetectionFrames = 0
|
|
195
|
-
self.handleDetectedRectangle(best, frameSize: frameSize)
|
|
212
|
+
guard let best = weighted.first else {
|
|
213
|
+
self.lastObservation = nil
|
|
214
|
+
self.handleDetectedRectangle(nil, frameSize: frameSize)
|
|
215
|
+
return
|
|
196
216
|
}
|
|
197
217
|
|
|
218
|
+
self.lastObservation = best
|
|
219
|
+
self.missedDetectionFrames = 0
|
|
220
|
+
self.handleDetectedRectangle(best, frameSize: frameSize)
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
let request = VNDetectRectanglesRequest(completionHandler: requestHandler)
|
|
224
|
+
|
|
198
225
|
request.maximumObservations = 2
|
|
199
226
|
request.minimumConfidence = 0.4
|
|
200
227
|
request.minimumAspectRatio = 0.08
|
|
@@ -231,6 +258,8 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
231
258
|
effectiveObservation = nil
|
|
232
259
|
}
|
|
233
260
|
|
|
261
|
+
updateNativeOverlay(with: effectiveObservation)
|
|
262
|
+
|
|
234
263
|
let payload: [String: Any?]
|
|
235
264
|
if let observation = effectiveObservation {
|
|
236
265
|
let points = [
|
|
@@ -271,6 +300,66 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
271
300
|
CGPoint(x: normalizedPoint.x * frameSize.width, y: (1 - normalizedPoint.y) * frameSize.height)
|
|
272
301
|
}
|
|
273
302
|
|
|
303
|
+
private func updateNativeOverlay(with observation: VNRectangleObservation?) {
|
|
304
|
+
DispatchQueue.main.async {
|
|
305
|
+
guard let observation else {
|
|
306
|
+
self.outlineLayer.path = nil
|
|
307
|
+
self.gridLayer.path = nil
|
|
308
|
+
self.outlineLayer.isHidden = true
|
|
309
|
+
self.gridLayer.isHidden = true
|
|
310
|
+
return
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
guard let previewLayer = self.previewLayer else {
|
|
314
|
+
return
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
let points = [
|
|
318
|
+
self.convertToLayerPoint(observation.topLeft, previewLayer: previewLayer),
|
|
319
|
+
self.convertToLayerPoint(observation.topRight, previewLayer: previewLayer),
|
|
320
|
+
self.convertToLayerPoint(observation.bottomRight, previewLayer: previewLayer),
|
|
321
|
+
self.convertToLayerPoint(observation.bottomLeft, previewLayer: previewLayer),
|
|
322
|
+
]
|
|
323
|
+
|
|
324
|
+
let outline = UIBezierPath()
|
|
325
|
+
outline.move(to: points[0])
|
|
326
|
+
outline.addLine(to: points[1])
|
|
327
|
+
outline.addLine(to: points[2])
|
|
328
|
+
outline.addLine(to: points[3])
|
|
329
|
+
outline.close()
|
|
330
|
+
|
|
331
|
+
self.outlineLayer.path = outline.cgPath
|
|
332
|
+
self.outlineLayer.isHidden = false
|
|
333
|
+
|
|
334
|
+
let gridPath = UIBezierPath()
|
|
335
|
+
let steps: [CGFloat] = [1.0 / 3.0, 2.0 / 3.0]
|
|
336
|
+
|
|
337
|
+
for step in steps {
|
|
338
|
+
let startVertical = self.interpolate(points[0], points[1], t: step)
|
|
339
|
+
let endVertical = self.interpolate(points[3], points[2], t: step)
|
|
340
|
+
gridPath.move(to: startVertical)
|
|
341
|
+
gridPath.addLine(to: endVertical)
|
|
342
|
+
|
|
343
|
+
let startHorizontal = self.interpolate(points[0], points[3], t: step)
|
|
344
|
+
let endHorizontal = self.interpolate(points[1], points[2], t: step)
|
|
345
|
+
gridPath.move(to: startHorizontal)
|
|
346
|
+
gridPath.addLine(to: endHorizontal)
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
self.gridLayer.path = gridPath.cgPath
|
|
350
|
+
self.gridLayer.isHidden = false
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
private func convertToLayerPoint(_ normalizedPoint: CGPoint, previewLayer: AVCaptureVideoPreviewLayer) -> CGPoint {
|
|
355
|
+
let devicePoint = CGPoint(x: normalizedPoint.x, y: 1 - normalizedPoint.y)
|
|
356
|
+
return previewLayer.layerPointConverted(fromCaptureDevicePoint: devicePoint)
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
private func interpolate(_ start: CGPoint, _ end: CGPoint, t: CGFloat) -> CGPoint {
|
|
360
|
+
CGPoint(x: start.x + (end.x - start.x) * t, y: start.y + (end.y - start.y) * t)
|
|
361
|
+
}
|
|
362
|
+
|
|
274
363
|
// MARK: - Capture
|
|
275
364
|
|
|
276
365
|
func capture(completion: @escaping (Result<RNRDocScannerCaptureResult, Error>) -> Void) {
|
package/package.json
CHANGED
package/src/DocScanner.tsx
CHANGED
|
@@ -8,6 +8,7 @@ import React, {
|
|
|
8
8
|
useState,
|
|
9
9
|
} from 'react';
|
|
10
10
|
import {
|
|
11
|
+
Platform,
|
|
11
12
|
findNodeHandle,
|
|
12
13
|
NativeModules,
|
|
13
14
|
requireNativeComponent,
|
|
@@ -93,6 +94,7 @@ interface Props {
|
|
|
93
94
|
gridColor?: string;
|
|
94
95
|
gridLineWidth?: number;
|
|
95
96
|
detectionConfig?: DetectionConfig;
|
|
97
|
+
useNativeOverlay?: boolean;
|
|
96
98
|
}
|
|
97
99
|
|
|
98
100
|
const DEFAULT_OVERLAY_COLOR = '#e7a649';
|
|
@@ -110,6 +112,7 @@ export const DocScanner = forwardRef<DocScannerHandle, Props>(({
|
|
|
110
112
|
showGrid = true,
|
|
111
113
|
gridColor,
|
|
112
114
|
gridLineWidth = 2,
|
|
115
|
+
useNativeOverlay,
|
|
113
116
|
}, ref) => {
|
|
114
117
|
const viewRef = useRef<NativeDocScannerInstance | null>(null);
|
|
115
118
|
const capturingRef = useRef(false);
|
|
@@ -117,6 +120,13 @@ export const DocScanner = forwardRef<DocScannerHandle, Props>(({
|
|
|
117
120
|
const [stable, setStable] = useState(0);
|
|
118
121
|
const [frameSize, setFrameSize] = useState<{ width: number; height: number } | null>(null);
|
|
119
122
|
|
|
123
|
+
const shouldUseNativeOverlay = useMemo(() => {
|
|
124
|
+
if (typeof useNativeOverlay === 'boolean') {
|
|
125
|
+
return useNativeOverlay;
|
|
126
|
+
}
|
|
127
|
+
return Platform.OS === 'ios';
|
|
128
|
+
}, [useNativeOverlay]);
|
|
129
|
+
|
|
120
130
|
const effectiveGridColor = useMemo(
|
|
121
131
|
() => gridColor ?? GRID_COLOR_FALLBACK,
|
|
122
132
|
[gridColor],
|
|
@@ -261,14 +271,16 @@ export const DocScanner = forwardRef<DocScannerHandle, Props>(({
|
|
|
261
271
|
onRectangleDetect={handleRectangleDetect}
|
|
262
272
|
onPictureTaken={handlePictureTaken}
|
|
263
273
|
/>
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
274
|
+
{!shouldUseNativeOverlay && (
|
|
275
|
+
<Overlay
|
|
276
|
+
quad={quad}
|
|
277
|
+
color={overlayColor}
|
|
278
|
+
frameSize={frameSize}
|
|
279
|
+
showGrid={showGrid}
|
|
280
|
+
gridColor={effectiveGridColor}
|
|
281
|
+
gridLineWidth={gridLineWidth}
|
|
282
|
+
/>
|
|
283
|
+
)}
|
|
272
284
|
{!autoCapture && (
|
|
273
285
|
<TouchableOpacity style={styles.button} onPress={handleManualCapture} />
|
|
274
286
|
)}
|