react-native-rectangle-doc-scanner 1.7.0 → 1.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ios/RNRDocScannerView.swift +25 -9
- package/package.json +1 -1
|
@@ -28,6 +28,7 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
28
28
|
private var previewLayer: AVCaptureVideoPreviewLayer?
|
|
29
29
|
private let videoOutput = AVCaptureVideoDataOutput()
|
|
30
30
|
private let photoOutput = AVCapturePhotoOutput()
|
|
31
|
+
private var smoothedOverlayPoints: [CGPoint]?
|
|
31
32
|
private let outlineLayer = CAShapeLayer()
|
|
32
33
|
private let gridLayer = CAShapeLayer()
|
|
33
34
|
|
|
@@ -36,7 +37,7 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
36
37
|
private var isCaptureInFlight = false
|
|
37
38
|
private var lastObservation: VNRectangleObservation?
|
|
38
39
|
private var missedDetectionFrames: Int = 0
|
|
39
|
-
private let maxMissedDetections =
|
|
40
|
+
private let maxMissedDetections = 1
|
|
40
41
|
private var lastFrameSize: CGSize = .zero
|
|
41
42
|
private var photoCaptureCompletion: ((Result<RNRDocScannerCaptureResult, Error>) -> Void)?
|
|
42
43
|
|
|
@@ -203,7 +204,9 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
203
204
|
return
|
|
204
205
|
}
|
|
205
206
|
|
|
206
|
-
let
|
|
207
|
+
let filtered = observations.filter { $0.confidence >= 0.55 }
|
|
208
|
+
let candidates = filtered.isEmpty ? observations : filtered
|
|
209
|
+
let weighted: [VNRectangleObservation] = candidates.sorted { (lhs: VNRectangleObservation, rhs: VNRectangleObservation) -> Bool in
|
|
207
210
|
let lhsScore: CGFloat = CGFloat(lhs.confidence) * lhs.boundingBox.area
|
|
208
211
|
let rhsScore: CGFloat = CGFloat(rhs.confidence) * rhs.boundingBox.area
|
|
209
212
|
return lhsScore > rhsScore
|
|
@@ -222,13 +225,13 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
222
225
|
|
|
223
226
|
let request = VNDetectRectanglesRequest(completionHandler: requestHandler)
|
|
224
227
|
|
|
225
|
-
request.maximumObservations =
|
|
226
|
-
request.minimumConfidence = 0.
|
|
227
|
-
request.minimumAspectRatio = 0.
|
|
228
|
-
request.maximumAspectRatio = 2.
|
|
228
|
+
request.maximumObservations = 3
|
|
229
|
+
request.minimumConfidence = 0.55
|
|
230
|
+
request.minimumAspectRatio = 0.1
|
|
231
|
+
request.maximumAspectRatio = 2.0
|
|
229
232
|
request.minimumSize = 0.05
|
|
230
233
|
if #available(iOS 13.0, *) {
|
|
231
|
-
request.quadratureTolerance =
|
|
234
|
+
request.quadratureTolerance = 20
|
|
232
235
|
}
|
|
233
236
|
|
|
234
237
|
let handler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: orientation, options: [:])
|
|
@@ -255,10 +258,12 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
255
258
|
} else {
|
|
256
259
|
lastObservation = nil
|
|
257
260
|
missedDetectionFrames = 0
|
|
261
|
+
smoothedOverlayPoints = nil
|
|
258
262
|
effectiveObservation = nil
|
|
259
263
|
}
|
|
260
264
|
|
|
261
|
-
|
|
265
|
+
let shouldDisplayOverlay = currentStableCounter > 0 && effectiveObservation != nil
|
|
266
|
+
updateNativeOverlay(with: shouldDisplayOverlay ? effectiveObservation : nil)
|
|
262
267
|
|
|
263
268
|
let payload: [String: Any?]
|
|
264
269
|
if let observation = effectiveObservation {
|
|
@@ -314,13 +319,24 @@ class RNRDocScannerView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate, A
|
|
|
314
319
|
return
|
|
315
320
|
}
|
|
316
321
|
|
|
317
|
-
let
|
|
322
|
+
let rawPoints = [
|
|
318
323
|
self.convertToLayerPoint(observation.topLeft, previewLayer: previewLayer),
|
|
319
324
|
self.convertToLayerPoint(observation.topRight, previewLayer: previewLayer),
|
|
320
325
|
self.convertToLayerPoint(observation.bottomRight, previewLayer: previewLayer),
|
|
321
326
|
self.convertToLayerPoint(observation.bottomLeft, previewLayer: previewLayer),
|
|
322
327
|
]
|
|
323
328
|
|
|
329
|
+
let points: [CGPoint]
|
|
330
|
+
if let previous = self.smoothedOverlayPoints, previous.count == 4 {
|
|
331
|
+
points = zip(previous, rawPoints).map { prev, next in
|
|
332
|
+
CGPoint(x: prev.x * 0.7 + next.x * 0.3, y: prev.y * 0.7 + next.y * 0.3)
|
|
333
|
+
}
|
|
334
|
+
} else {
|
|
335
|
+
points = rawPoints
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
self.smoothedOverlayPoints = points
|
|
339
|
+
|
|
324
340
|
let outline = UIBezierPath()
|
|
325
341
|
outline.move(to: points[0])
|
|
326
342
|
outline.addLine(to: points[1])
|