react-native-rectangle-doc-scanner 12.0.0 → 13.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/android/build.gradle +0 -1
- package/android/src/camera2/kotlin/com/reactnativerectangledocscanner/CameraController.kt +21 -40
- package/android/src/camera2/kotlin/com/reactnativerectangledocscanner/DocumentScannerView.kt +102 -27
- package/android/src/camera2/kotlin/com/reactnativerectangledocscanner/DocumentScannerViewManager.kt +0 -5
- package/android/src/main/kotlin/com/reactnativerectangledocscanner/DocumentScannerModule.kt +4 -187
- package/dist/DocScanner.js +4 -13
- package/dist/FullDocScanner.js +2 -11
- package/package.json +1 -1
- package/src/DocScanner.tsx +3 -13
- package/src/FullDocScanner.tsx +2 -12
- package/src/external.d.ts +0 -1
- package/vendor/react-native-document-scanner/index.d.ts +0 -1
package/android/build.gradle
CHANGED
|
@@ -86,7 +86,6 @@ dependencies {
|
|
|
86
86
|
|
|
87
87
|
// ML Kit object detection for live rectangle hints (Camera2 mode)
|
|
88
88
|
implementation 'com.google.mlkit:object-detection:17.0.1'
|
|
89
|
-
implementation 'com.google.android.gms:play-services-mlkit-document-scanner:16.0.0-beta1'
|
|
90
89
|
|
|
91
90
|
if (hasVisionCamera) {
|
|
92
91
|
// VisionCamera mode - include VisionCamera dependency
|
|
@@ -123,6 +123,7 @@ class CameraController(
|
|
|
123
123
|
Log.d(TAG, "[CAMERAX] Setting target rotation to ROTATION_0 (portrait-only app)")
|
|
124
124
|
|
|
125
125
|
preview = Preview.Builder()
|
|
126
|
+
.setTargetAspectRatio(AspectRatio.RATIO_4_3)
|
|
126
127
|
.setTargetRotation(targetRotation) // Force portrait
|
|
127
128
|
.build()
|
|
128
129
|
.also { previewUseCase ->
|
|
@@ -185,7 +186,8 @@ class CameraController(
|
|
|
185
186
|
// ImageAnalysis UseCase for document detection
|
|
186
187
|
imageAnalyzer = ImageAnalysis.Builder()
|
|
187
188
|
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
|
|
188
|
-
|
|
189
|
+
// Match preview aspect ratio to avoid square analysis frames on some devices.
|
|
190
|
+
.setTargetAspectRatio(AspectRatio.RATIO_4_3)
|
|
189
191
|
.setTargetRotation(targetRotation) // Match preview rotation
|
|
190
192
|
.build()
|
|
191
193
|
.also {
|
|
@@ -201,6 +203,7 @@ class CameraController(
|
|
|
201
203
|
// ImageCapture UseCase
|
|
202
204
|
imageCapture = ImageCapture.Builder()
|
|
203
205
|
.setCaptureMode(ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY)
|
|
206
|
+
.setTargetAspectRatio(AspectRatio.RATIO_4_3)
|
|
204
207
|
.setTargetRotation(targetRotation) // Match preview rotation
|
|
205
208
|
.build()
|
|
206
209
|
|
|
@@ -265,10 +268,11 @@ class CameraController(
|
|
|
265
268
|
return
|
|
266
269
|
}
|
|
267
270
|
|
|
268
|
-
val rotationDegrees = imageProxy.imageInfo.rotationDegrees
|
|
269
271
|
val imageWidth = imageProxy.width
|
|
270
272
|
val imageHeight = imageProxy.height
|
|
271
273
|
|
|
274
|
+
val rotationDegrees = imageProxy.imageInfo.rotationDegrees
|
|
275
|
+
|
|
272
276
|
// Calculate rotation using the same logic as TextureView transform
|
|
273
277
|
val sensorOrientation = getCameraSensorOrientation()
|
|
274
278
|
val displayRotationDegrees = when (textureView.display?.rotation ?: Surface.ROTATION_0) {
|
|
@@ -281,16 +285,16 @@ class CameraController(
|
|
|
281
285
|
|
|
282
286
|
// Use the same rotation logic as updateTextureViewTransform
|
|
283
287
|
val tabletUpsideDownFix = if (sensorOrientation == 0 && displayRotationDegrees == 90) 180 else 0
|
|
284
|
-
val effectiveRotation =
|
|
285
|
-
(displayRotationDegrees + tabletUpsideDownFix) % 360
|
|
286
|
-
} else {
|
|
287
|
-
sensorOrientation
|
|
288
|
-
}
|
|
288
|
+
val effectiveRotation = (displayRotationDegrees + tabletUpsideDownFix) % 360
|
|
289
289
|
|
|
290
|
-
Log.d(
|
|
290
|
+
Log.d(
|
|
291
|
+
TAG,
|
|
292
|
+
"[ANALYZE] Sensor: $sensorOrientation°, Display: $displayRotationDegrees°, " +
|
|
293
|
+
"ImageProxy: $rotationDegrees°, Effective: $effectiveRotation°"
|
|
294
|
+
)
|
|
291
295
|
|
|
292
|
-
// Try ML Kit first
|
|
293
|
-
val inputImage = InputImage.fromMediaImage(mediaImage,
|
|
296
|
+
// Try ML Kit first (use the same rotation as preview/OpenCV)
|
|
297
|
+
val inputImage = InputImage.fromMediaImage(mediaImage, effectiveRotation)
|
|
294
298
|
|
|
295
299
|
objectDetector.process(inputImage)
|
|
296
300
|
.addOnSuccessListener { objects ->
|
|
@@ -486,32 +490,11 @@ class CameraController(
|
|
|
486
490
|
|
|
487
491
|
if (viewWidth <= 0 || viewHeight <= 0) return null
|
|
488
492
|
|
|
489
|
-
//
|
|
490
|
-
|
|
491
|
-
val
|
|
492
|
-
val displayRotationDegrees = when (textureView.display?.rotation ?: Surface.ROTATION_0) {
|
|
493
|
-
Surface.ROTATION_0 -> 0
|
|
494
|
-
Surface.ROTATION_90 -> 90
|
|
495
|
-
Surface.ROTATION_180 -> 180
|
|
496
|
-
Surface.ROTATION_270 -> 270
|
|
497
|
-
else -> 0
|
|
498
|
-
}
|
|
499
|
-
|
|
500
|
-
fun rotatePoint(point: org.opencv.core.Point): org.opencv.core.Point {
|
|
501
|
-
return if (sensorOrientation == 90) {
|
|
502
|
-
org.opencv.core.Point(
|
|
503
|
-
point.y,
|
|
504
|
-
imageWidth - point.x
|
|
505
|
-
)
|
|
506
|
-
} else {
|
|
507
|
-
point
|
|
508
|
-
}
|
|
509
|
-
}
|
|
510
|
-
|
|
511
|
-
val finalWidth = if (sensorOrientation == 90) imageHeight else imageWidth
|
|
512
|
-
val finalHeight = if (sensorOrientation == 90) imageWidth else imageHeight
|
|
493
|
+
// Rectangle coordinates are already in the rotated image space (effective rotation applied).
|
|
494
|
+
val finalWidth = imageWidth
|
|
495
|
+
val finalHeight = imageHeight
|
|
513
496
|
|
|
514
|
-
//
|
|
497
|
+
// Apply fit-center scaling to match TextureView display.
|
|
515
498
|
val scaleX = viewWidth / finalWidth.toFloat()
|
|
516
499
|
val scaleY = viewHeight / finalHeight.toFloat()
|
|
517
500
|
val scale = scaleX.coerceAtMost(scaleY)
|
|
@@ -522,10 +505,9 @@ class CameraController(
|
|
|
522
505
|
val offsetY = (viewHeight - scaledHeight) / 2f
|
|
523
506
|
|
|
524
507
|
fun transformPoint(point: org.opencv.core.Point): org.opencv.core.Point {
|
|
525
|
-
val rotated = rotatePoint(point)
|
|
526
508
|
return org.opencv.core.Point(
|
|
527
|
-
|
|
528
|
-
|
|
509
|
+
point.x * scale + offsetX,
|
|
510
|
+
point.y * scale + offsetY
|
|
529
511
|
)
|
|
530
512
|
}
|
|
531
513
|
|
|
@@ -536,10 +518,9 @@ class CameraController(
|
|
|
536
518
|
transformPoint(rectangle.bottomRight)
|
|
537
519
|
)
|
|
538
520
|
|
|
539
|
-
Log.d(TAG, "[MAPPING]
|
|
521
|
+
Log.d(TAG, "[MAPPING] Image: ${imageWidth}x${imageHeight} → Final: ${finalWidth}x${finalHeight}")
|
|
540
522
|
Log.d(TAG, "[MAPPING] View: ${viewWidth.toInt()}x${viewHeight.toInt()}, Scale: $scale, Offset: ($offsetX, $offsetY)")
|
|
541
523
|
Log.d(TAG, "[MAPPING] TL: (${rectangle.topLeft.x}, ${rectangle.topLeft.y}) → " +
|
|
542
|
-
"Rotated: (${rotatePoint(rectangle.topLeft).x}, ${rotatePoint(rectangle.topLeft).y}) → " +
|
|
543
524
|
"Final: (${result.topLeft.x}, ${result.topLeft.y})")
|
|
544
525
|
|
|
545
526
|
return result
|
package/android/src/camera2/kotlin/com/reactnativerectangledocscanner/DocumentScannerView.kt
CHANGED
|
@@ -49,7 +49,6 @@ class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), L
|
|
|
49
49
|
var brightness: Float = 0f
|
|
50
50
|
var contrast: Float = 1f
|
|
51
51
|
var saturation: Float = 1f
|
|
52
|
-
var useExternalScanner: Boolean = false
|
|
53
52
|
|
|
54
53
|
// State
|
|
55
54
|
private var stableCounter = 0
|
|
@@ -61,12 +60,14 @@ class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), L
|
|
|
61
60
|
private var lastDetectedImageHeight = 0
|
|
62
61
|
private var lastRectangleOnScreen: Rectangle? = null
|
|
63
62
|
private var lastSmoothedRectangleOnScreen: Rectangle? = null
|
|
63
|
+
private val iouHistory = ArrayDeque<Rectangle>()
|
|
64
64
|
|
|
65
65
|
// Coroutine scope for async operations
|
|
66
66
|
private val scope = CoroutineScope(Dispatchers.Main + SupervisorJob())
|
|
67
67
|
|
|
68
68
|
companion object {
|
|
69
69
|
private const val TAG = "DocumentScannerView"
|
|
70
|
+
private const val PREVIEW_ASPECT_RATIO = 3f / 4f // width:height (matches 3:4)
|
|
70
71
|
}
|
|
71
72
|
|
|
72
73
|
override val lifecycle: Lifecycle
|
|
@@ -111,6 +112,9 @@ class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), L
|
|
|
111
112
|
addView(overlayView, 1) // Add at index 1 (front)
|
|
112
113
|
Log.d(TAG, "[INIT] OverlayView added, childCount: $childCount")
|
|
113
114
|
|
|
115
|
+
// Match camera UI look with letterboxing when preview doesn't fill the view.
|
|
116
|
+
setBackgroundColor(android.graphics.Color.BLACK)
|
|
117
|
+
|
|
114
118
|
Log.d(TAG, "╔════════════════════════════════════════╗")
|
|
115
119
|
Log.d(TAG, "║ DocumentScannerView INIT COMPLETE ║")
|
|
116
120
|
Log.d(TAG, "╚════════════════════════════════════════╝")
|
|
@@ -137,16 +141,36 @@ class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), L
|
|
|
137
141
|
override fun onLayout(changed: Boolean, left: Int, top: Int, right: Int, bottom: Int) {
|
|
138
142
|
super.onLayout(changed, left, top, right, bottom)
|
|
139
143
|
if (changed) {
|
|
144
|
+
layoutPreviewAndOverlay(right - left, bottom - top)
|
|
140
145
|
Log.d(TAG, "[LAYOUT] View size: ${right - left}x${bottom - top}, PreviewView: ${previewView.width}x${previewView.height}")
|
|
141
146
|
cameraController?.refreshTransform()
|
|
142
147
|
}
|
|
143
148
|
}
|
|
144
149
|
|
|
145
|
-
private fun
|
|
146
|
-
if (
|
|
147
|
-
|
|
148
|
-
|
|
150
|
+
private fun layoutPreviewAndOverlay(viewWidth: Int, viewHeight: Int) {
|
|
151
|
+
if (viewWidth <= 0 || viewHeight <= 0) return
|
|
152
|
+
|
|
153
|
+
val targetWidth: Int
|
|
154
|
+
val targetHeight: Int
|
|
155
|
+
val aspectHeight = (viewWidth / PREVIEW_ASPECT_RATIO).toInt()
|
|
156
|
+
if (aspectHeight <= viewHeight) {
|
|
157
|
+
targetWidth = viewWidth
|
|
158
|
+
targetHeight = aspectHeight
|
|
159
|
+
} else {
|
|
160
|
+
targetWidth = (viewHeight * PREVIEW_ASPECT_RATIO).toInt()
|
|
161
|
+
targetHeight = viewHeight
|
|
149
162
|
}
|
|
163
|
+
|
|
164
|
+
val left = (viewWidth - targetWidth) / 2
|
|
165
|
+
val top = if (targetHeight < viewHeight) 0 else (viewHeight - targetHeight) / 2
|
|
166
|
+
val right = left + targetWidth
|
|
167
|
+
val bottom = top + targetHeight
|
|
168
|
+
|
|
169
|
+
previewView.layout(left, top, right, bottom)
|
|
170
|
+
overlayView.layout(left, top, right, bottom)
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
private fun initializeCameraWhenReady() {
|
|
150
174
|
// If view is already laid out, start camera immediately
|
|
151
175
|
if (width > 0 && height > 0) {
|
|
152
176
|
Log.d(TAG, "[INIT] View already laid out, starting camera immediately")
|
|
@@ -180,20 +204,6 @@ class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), L
|
|
|
180
204
|
}
|
|
181
205
|
}
|
|
182
206
|
|
|
183
|
-
fun setUseExternalScanner(enabled: Boolean) {
|
|
184
|
-
if (useExternalScanner == enabled) {
|
|
185
|
-
return
|
|
186
|
-
}
|
|
187
|
-
useExternalScanner = enabled
|
|
188
|
-
Log.d(TAG, "[SET] useExternalScanner: $enabled")
|
|
189
|
-
if (enabled) {
|
|
190
|
-
stopCamera()
|
|
191
|
-
overlayView.setRectangle(null, overlayColor)
|
|
192
|
-
} else if (width > 0 && height > 0 && cameraController == null) {
|
|
193
|
-
setupCamera()
|
|
194
|
-
startCamera()
|
|
195
|
-
}
|
|
196
|
-
}
|
|
197
207
|
|
|
198
208
|
private fun setupCamera() {
|
|
199
209
|
try {
|
|
@@ -231,17 +241,26 @@ class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), L
|
|
|
231
241
|
lastDetectedImageHeight = imageHeight
|
|
232
242
|
}
|
|
233
243
|
|
|
234
|
-
val
|
|
244
|
+
val previewWidth = previewView.width
|
|
245
|
+
val previewHeight = previewView.height
|
|
246
|
+
|
|
247
|
+
val rectangleOnScreen = if (rectangle != null && previewWidth > 0 && previewHeight > 0) {
|
|
235
248
|
cameraController?.mapRectangleToView(rectangle, imageWidth, imageHeight)
|
|
236
|
-
?: DocumentDetector.transformRectangleToViewCoordinates(
|
|
249
|
+
?: DocumentDetector.transformRectangleToViewCoordinates(
|
|
250
|
+
rectangle,
|
|
251
|
+
imageWidth,
|
|
252
|
+
imageHeight,
|
|
253
|
+
previewWidth,
|
|
254
|
+
previewHeight
|
|
255
|
+
)
|
|
237
256
|
} else {
|
|
238
257
|
null
|
|
239
258
|
}
|
|
240
|
-
val smoothedRectangleOnScreen = smoothRectangle(rectangleOnScreen,
|
|
259
|
+
val smoothedRectangleOnScreen = smoothRectangle(rectangleOnScreen, previewWidth, previewHeight)
|
|
241
260
|
lastRectangleOnScreen = smoothedRectangleOnScreen
|
|
242
261
|
val quality = when {
|
|
243
|
-
smoothedRectangleOnScreen != null &&
|
|
244
|
-
DocumentDetector.evaluateRectangleQualityInView(smoothedRectangleOnScreen,
|
|
262
|
+
smoothedRectangleOnScreen != null && previewWidth > 0 && previewHeight > 0 ->
|
|
263
|
+
DocumentDetector.evaluateRectangleQualityInView(smoothedRectangleOnScreen, previewWidth, previewHeight)
|
|
245
264
|
rectangle != null -> DocumentDetector.evaluateRectangleQuality(rectangle, imageWidth, imageHeight)
|
|
246
265
|
else -> RectangleQuality.TOO_FAR
|
|
247
266
|
}
|
|
@@ -325,23 +344,33 @@ class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), L
|
|
|
325
344
|
overlayView.setRectangle(rectangleOnScreen, overlayColor)
|
|
326
345
|
}
|
|
327
346
|
|
|
328
|
-
// Update stable counter based on quality
|
|
347
|
+
// Update stable counter based on quality + IOU stability
|
|
329
348
|
if (rectangleCoordinates == null) {
|
|
330
349
|
if (stableCounter != 0) {
|
|
331
350
|
Log.d(TAG, "Rectangle lost, resetting stableCounter")
|
|
332
351
|
}
|
|
333
352
|
stableCounter = 0
|
|
353
|
+
clearIouHistory()
|
|
334
354
|
} else {
|
|
335
355
|
when (quality) {
|
|
336
356
|
RectangleQuality.GOOD -> {
|
|
337
|
-
|
|
338
|
-
|
|
357
|
+
val isStable = rectangleOnScreen?.let { updateIouHistory(it) } ?: false
|
|
358
|
+
if (isStable) {
|
|
359
|
+
stableCounter = min(stableCounter + 1, detectionCountBeforeCapture)
|
|
360
|
+
Log.d(TAG, "Good rectangle detected, stableCounter: $stableCounter/$detectionCountBeforeCapture")
|
|
361
|
+
} else {
|
|
362
|
+
if (stableCounter > 0) {
|
|
363
|
+
stableCounter--
|
|
364
|
+
}
|
|
365
|
+
Log.d(TAG, "Rectangle unstable (IOU), stableCounter: $stableCounter")
|
|
366
|
+
}
|
|
339
367
|
}
|
|
340
368
|
RectangleQuality.BAD_ANGLE, RectangleQuality.TOO_FAR -> {
|
|
341
369
|
if (stableCounter > 0) {
|
|
342
370
|
stableCounter--
|
|
343
371
|
}
|
|
344
372
|
Log.d(TAG, "Bad rectangle detected (type: $quality), stableCounter: $stableCounter")
|
|
373
|
+
clearIouHistory()
|
|
345
374
|
}
|
|
346
375
|
}
|
|
347
376
|
}
|
|
@@ -357,6 +386,52 @@ class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), L
|
|
|
357
386
|
}
|
|
358
387
|
}
|
|
359
388
|
|
|
389
|
+
private fun updateIouHistory(rectangle: Rectangle): Boolean {
|
|
390
|
+
if (iouHistory.size >= 3) {
|
|
391
|
+
iouHistory.removeFirst()
|
|
392
|
+
}
|
|
393
|
+
iouHistory.addLast(rectangle)
|
|
394
|
+
if (iouHistory.size < 3) {
|
|
395
|
+
return false
|
|
396
|
+
}
|
|
397
|
+
val r0 = iouHistory.elementAt(0)
|
|
398
|
+
val r1 = iouHistory.elementAt(1)
|
|
399
|
+
val r2 = iouHistory.elementAt(2)
|
|
400
|
+
val iou01 = rectangleIou(r0, r1)
|
|
401
|
+
val iou12 = rectangleIou(r1, r2)
|
|
402
|
+
val iou02 = rectangleIou(r0, r2)
|
|
403
|
+
return iou01 >= 0.85 && iou12 >= 0.85 && iou02 >= 0.85
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
private fun clearIouHistory() {
|
|
407
|
+
iouHistory.clear()
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
private fun rectangleIou(a: Rectangle, b: Rectangle): Double {
|
|
411
|
+
fun bounds(r: Rectangle): DoubleArray {
|
|
412
|
+
val minX = min(min(r.topLeft.x, r.topRight.x), min(r.bottomLeft.x, r.bottomRight.x))
|
|
413
|
+
val maxX = max(max(r.topLeft.x, r.topRight.x), max(r.bottomLeft.x, r.bottomRight.x))
|
|
414
|
+
val minY = min(min(r.topLeft.y, r.topRight.y), min(r.bottomLeft.y, r.bottomRight.y))
|
|
415
|
+
val maxY = max(max(r.topLeft.y, r.topRight.y), max(r.bottomLeft.y, r.bottomRight.y))
|
|
416
|
+
return doubleArrayOf(minX, minY, maxX, maxY)
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
val ab = bounds(a)
|
|
420
|
+
val bb = bounds(b)
|
|
421
|
+
val interLeft = max(ab[0], bb[0])
|
|
422
|
+
val interTop = max(ab[1], bb[1])
|
|
423
|
+
val interRight = min(ab[2], bb[2])
|
|
424
|
+
val interBottom = min(ab[3], bb[3])
|
|
425
|
+
val interW = max(0.0, interRight - interLeft)
|
|
426
|
+
val interH = max(0.0, interBottom - interTop)
|
|
427
|
+
val interArea = interW * interH
|
|
428
|
+
val areaA = max(0.0, (ab[2] - ab[0])) * max(0.0, (ab[3] - ab[1]))
|
|
429
|
+
val areaB = max(0.0, (bb[2] - bb[0])) * max(0.0, (bb[3] - bb[1]))
|
|
430
|
+
val union = areaA + areaB - interArea
|
|
431
|
+
if (union <= 0.0) return 0.0
|
|
432
|
+
return interArea / union
|
|
433
|
+
}
|
|
434
|
+
|
|
360
435
|
fun capture() {
|
|
361
436
|
captureWithPromise(null)
|
|
362
437
|
}
|
package/android/src/camera2/kotlin/com/reactnativerectangledocscanner/DocumentScannerViewManager.kt
CHANGED
|
@@ -94,11 +94,6 @@ class DocumentScannerViewManager : SimpleViewManager<DocumentScannerView>() {
|
|
|
94
94
|
view.saturation = saturation
|
|
95
95
|
}
|
|
96
96
|
|
|
97
|
-
@ReactProp(name = "useExternalScanner")
|
|
98
|
-
fun setUseExternalScanner(view: DocumentScannerView, enabled: Boolean) {
|
|
99
|
-
view.setUseExternalScanner(enabled)
|
|
100
|
-
}
|
|
101
|
-
|
|
102
97
|
override fun getExportedCustomDirectEventTypeConstants(): MutableMap<String, Any> {
|
|
103
98
|
return MapBuilder.of(
|
|
104
99
|
"onPictureTaken",
|
|
@@ -1,49 +1,24 @@
|
|
|
1
1
|
package com.reactnativerectangledocscanner
|
|
2
2
|
|
|
3
|
-
import android.app.Activity
|
|
4
|
-
import android.content.Intent
|
|
5
3
|
import android.graphics.BitmapFactory
|
|
6
|
-
import android.net.Uri
|
|
7
4
|
import android.util.Log
|
|
8
5
|
import com.facebook.react.bridge.*
|
|
9
6
|
import com.facebook.react.uimanager.UIManagerModule
|
|
10
|
-
import com.google.mlkit.vision.documentscanner.GmsDocumentScannerOptions
|
|
11
|
-
import com.google.mlkit.vision.documentscanner.GmsDocumentScanning
|
|
12
|
-
import com.google.mlkit.vision.documentscanner.GmsDocumentScanningResult
|
|
13
7
|
import kotlinx.coroutines.*
|
|
14
8
|
import org.opencv.core.Point
|
|
15
|
-
import java.io.File
|
|
16
|
-
import java.io.FileOutputStream
|
|
17
9
|
|
|
18
10
|
class DocumentScannerModule(reactContext: ReactApplicationContext) :
|
|
19
|
-
ReactContextBaseJavaModule(reactContext)
|
|
11
|
+
ReactContextBaseJavaModule(reactContext) {
|
|
20
12
|
|
|
21
13
|
private val scope = CoroutineScope(Dispatchers.Main + SupervisorJob())
|
|
22
14
|
|
|
23
15
|
companion object {
|
|
24
16
|
const val NAME = "RNPdfScannerManager"
|
|
25
17
|
private const val TAG = "DocumentScannerModule"
|
|
26
|
-
private const val EXTERNAL_SCAN_REQUEST = 9401
|
|
27
18
|
}
|
|
28
19
|
|
|
29
20
|
override fun getName() = NAME
|
|
30
21
|
|
|
31
|
-
private data class PendingScanConfig(
|
|
32
|
-
val useBase64: Boolean,
|
|
33
|
-
val saveInAppDocument: Boolean,
|
|
34
|
-
val quality: Float,
|
|
35
|
-
val brightness: Float,
|
|
36
|
-
val contrast: Float,
|
|
37
|
-
val saturation: Float
|
|
38
|
-
)
|
|
39
|
-
|
|
40
|
-
private var pendingScanPromise: Promise? = null
|
|
41
|
-
private var pendingScanConfig: PendingScanConfig? = null
|
|
42
|
-
|
|
43
|
-
init {
|
|
44
|
-
reactContext.addActivityEventListener(this)
|
|
45
|
-
}
|
|
46
|
-
|
|
47
22
|
/**
|
|
48
23
|
* Capture image from the document scanner view
|
|
49
24
|
* Matches iOS signature: capture(reactTag, resolver, rejecter)
|
|
@@ -71,13 +46,9 @@ class DocumentScannerModule(reactContext: ReactApplicationContext) :
|
|
|
71
46
|
if (view is DocumentScannerView) {
|
|
72
47
|
Log.d(TAG, "Found DocumentScannerView, triggering capture with promise")
|
|
73
48
|
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
// Pass promise to view so it can be resolved when capture completes
|
|
78
|
-
// This matches iOS behavior where promise is resolved with actual image data
|
|
79
|
-
view.captureWithPromise(promise)
|
|
80
|
-
}
|
|
49
|
+
// Pass promise to view so it can be resolved when capture completes
|
|
50
|
+
// This matches iOS behavior where promise is resolved with actual image data
|
|
51
|
+
view.captureWithPromise(promise)
|
|
81
52
|
} else {
|
|
82
53
|
Log.e(TAG, "View with tag $tag is not DocumentScannerView: ${view?.javaClass?.simpleName}")
|
|
83
54
|
promise.reject("INVALID_VIEW", "View is not a DocumentScannerView")
|
|
@@ -93,160 +64,6 @@ class DocumentScannerModule(reactContext: ReactApplicationContext) :
|
|
|
93
64
|
}
|
|
94
65
|
}
|
|
95
66
|
|
|
96
|
-
private fun startExternalScan(view: DocumentScannerView, promise: Promise) {
|
|
97
|
-
if (pendingScanPromise != null) {
|
|
98
|
-
promise.reject("SCAN_IN_PROGRESS", "Another scan is already in progress")
|
|
99
|
-
return
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
val activity = currentActivity ?: run {
|
|
103
|
-
promise.reject("NO_ACTIVITY", "Activity not available")
|
|
104
|
-
return
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
val options = GmsDocumentScannerOptions.Builder()
|
|
108
|
-
.setScannerMode(GmsDocumentScannerOptions.SCANNER_MODE_FULL)
|
|
109
|
-
.setGalleryImportAllowed(true)
|
|
110
|
-
.setPageLimit(1)
|
|
111
|
-
.setResultFormats(GmsDocumentScannerOptions.RESULT_FORMAT_JPEG)
|
|
112
|
-
.build()
|
|
113
|
-
|
|
114
|
-
val scanner = GmsDocumentScanning.getClient(options)
|
|
115
|
-
|
|
116
|
-
pendingScanPromise = promise
|
|
117
|
-
pendingScanConfig = PendingScanConfig(
|
|
118
|
-
useBase64 = view.useBase64,
|
|
119
|
-
saveInAppDocument = view.saveInAppDocument,
|
|
120
|
-
quality = view.quality,
|
|
121
|
-
brightness = view.brightness,
|
|
122
|
-
contrast = view.contrast,
|
|
123
|
-
saturation = view.saturation
|
|
124
|
-
)
|
|
125
|
-
|
|
126
|
-
scanner.getStartScanIntent(activity)
|
|
127
|
-
.addOnSuccessListener { intentSender ->
|
|
128
|
-
try {
|
|
129
|
-
activity.startIntentSenderForResult(
|
|
130
|
-
intentSender,
|
|
131
|
-
EXTERNAL_SCAN_REQUEST,
|
|
132
|
-
null,
|
|
133
|
-
0,
|
|
134
|
-
0,
|
|
135
|
-
0
|
|
136
|
-
)
|
|
137
|
-
} catch (e: Exception) {
|
|
138
|
-
Log.e(TAG, "Failed to launch ML Kit scanner", e)
|
|
139
|
-
cleanupPendingScan()
|
|
140
|
-
promise.reject("SCAN_LAUNCH_FAILED", "Failed to launch scanner: ${e.message}", e)
|
|
141
|
-
}
|
|
142
|
-
}
|
|
143
|
-
.addOnFailureListener { e ->
|
|
144
|
-
Log.e(TAG, "Failed to get ML Kit scan intent", e)
|
|
145
|
-
cleanupPendingScan()
|
|
146
|
-
promise.reject("SCAN_INTENT_FAILED", "Failed to start scanner: ${e.message}", e)
|
|
147
|
-
}
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
override fun onActivityResult(activity: Activity?, requestCode: Int, resultCode: Int, data: Intent?) {
|
|
151
|
-
if (requestCode != EXTERNAL_SCAN_REQUEST) {
|
|
152
|
-
return
|
|
153
|
-
}
|
|
154
|
-
|
|
155
|
-
val promise = pendingScanPromise ?: return
|
|
156
|
-
val config = pendingScanConfig
|
|
157
|
-
cleanupPendingScan()
|
|
158
|
-
|
|
159
|
-
if (resultCode != Activity.RESULT_OK || data == null) {
|
|
160
|
-
promise.reject("SCAN_CANCELLED", "Scan cancelled or failed")
|
|
161
|
-
return
|
|
162
|
-
}
|
|
163
|
-
|
|
164
|
-
val result = GmsDocumentScanningResult.fromActivityResultIntent(data)
|
|
165
|
-
val page = result?.pages?.firstOrNull()
|
|
166
|
-
val imageUri = page?.imageUri
|
|
167
|
-
|
|
168
|
-
if (imageUri == null || config == null) {
|
|
169
|
-
promise.reject("SCAN_NO_RESULT", "No scanned image returned")
|
|
170
|
-
return
|
|
171
|
-
}
|
|
172
|
-
|
|
173
|
-
scope.launch {
|
|
174
|
-
try {
|
|
175
|
-
val outputDir = if (config.saveInAppDocument) {
|
|
176
|
-
reactApplicationContext.filesDir
|
|
177
|
-
} else {
|
|
178
|
-
reactApplicationContext.cacheDir
|
|
179
|
-
}
|
|
180
|
-
val timestamp = System.currentTimeMillis()
|
|
181
|
-
val initialPath = copyUriToFile(imageUri, outputDir, "doc_scan_initial_$timestamp.jpg")
|
|
182
|
-
|
|
183
|
-
val processed = withContext(Dispatchers.IO) {
|
|
184
|
-
ImageProcessor.processImage(
|
|
185
|
-
imagePath = initialPath,
|
|
186
|
-
rectangle = null,
|
|
187
|
-
brightness = config.brightness,
|
|
188
|
-
contrast = config.contrast,
|
|
189
|
-
saturation = config.saturation,
|
|
190
|
-
shouldCrop = false
|
|
191
|
-
)
|
|
192
|
-
}
|
|
193
|
-
|
|
194
|
-
val resultMap = Arguments.createMap()
|
|
195
|
-
if (config.useBase64) {
|
|
196
|
-
val croppedBase64 = ImageProcessor.bitmapToBase64(processed.croppedImage, config.quality)
|
|
197
|
-
val initialBase64 = ImageProcessor.bitmapToBase64(processed.initialImage, config.quality)
|
|
198
|
-
resultMap.putString("croppedImage", croppedBase64)
|
|
199
|
-
resultMap.putString("initialImage", initialBase64)
|
|
200
|
-
} else {
|
|
201
|
-
val croppedPath = ImageProcessor.saveBitmapToFile(
|
|
202
|
-
processed.croppedImage,
|
|
203
|
-
outputDir,
|
|
204
|
-
"doc_scan_cropped_$timestamp.jpg",
|
|
205
|
-
config.quality
|
|
206
|
-
)
|
|
207
|
-
resultMap.putString("croppedImage", croppedPath)
|
|
208
|
-
resultMap.putString("initialImage", initialPath)
|
|
209
|
-
}
|
|
210
|
-
|
|
211
|
-
resultMap.putMap("rectangleCoordinates", null)
|
|
212
|
-
resultMap.putInt("width", processed.croppedImage.width)
|
|
213
|
-
resultMap.putInt("height", processed.croppedImage.height)
|
|
214
|
-
|
|
215
|
-
// Cleanup bitmaps to avoid leaks.
|
|
216
|
-
if (processed.croppedImage !== processed.initialImage) {
|
|
217
|
-
processed.croppedImage.recycle()
|
|
218
|
-
processed.initialImage.recycle()
|
|
219
|
-
} else {
|
|
220
|
-
processed.croppedImage.recycle()
|
|
221
|
-
}
|
|
222
|
-
|
|
223
|
-
promise.resolve(resultMap)
|
|
224
|
-
} catch (e: Exception) {
|
|
225
|
-
Log.e(TAG, "Failed to process scan result", e)
|
|
226
|
-
promise.reject("SCAN_PROCESS_FAILED", "Failed to process scan result: ${e.message}", e)
|
|
227
|
-
}
|
|
228
|
-
}
|
|
229
|
-
}
|
|
230
|
-
|
|
231
|
-
override fun onNewIntent(intent: Intent?) {
|
|
232
|
-
// No-op
|
|
233
|
-
}
|
|
234
|
-
|
|
235
|
-
private fun copyUriToFile(uri: Uri, outputDir: File, fileName: String): String {
|
|
236
|
-
val outputFile = File(outputDir, fileName)
|
|
237
|
-
reactApplicationContext.contentResolver.openInputStream(uri)?.use { input ->
|
|
238
|
-
FileOutputStream(outputFile).use { output ->
|
|
239
|
-
input.copyTo(output)
|
|
240
|
-
}
|
|
241
|
-
} ?: throw IllegalStateException("Failed to open input stream for URI: $uri")
|
|
242
|
-
return outputFile.absolutePath
|
|
243
|
-
}
|
|
244
|
-
|
|
245
|
-
private fun cleanupPendingScan() {
|
|
246
|
-
pendingScanPromise = null
|
|
247
|
-
pendingScanConfig = null
|
|
248
|
-
}
|
|
249
|
-
|
|
250
67
|
/**
|
|
251
68
|
* Apply color controls to an image
|
|
252
69
|
* Matches iOS: applyColorControls(imagePath, brightness, contrast, saturation, resolver, rejecter)
|
package/dist/DocScanner.js
CHANGED
|
@@ -715,28 +715,19 @@ const NativeScanner = (0, react_1.forwardRef)(({ onCapture, overlayColor = DEFAU
|
|
|
715
715
|
const overlayIsActive = autoCapture ? isAutoCapturing : (detectedRectangle?.stableCounter ?? 0) > 0;
|
|
716
716
|
const detectionThreshold = autoCapture ? minStableFrames : 99999;
|
|
717
717
|
return (react_1.default.createElement(react_native_1.View, { style: styles.container },
|
|
718
|
-
react_1.default.createElement(react_native_document_scanner_1.default, { ref: scannerRef, style: styles.scanner, detectionCountBeforeCapture: detectionThreshold, overlayColor: overlayColor, enableTorch: enableTorch, quality: normalizedQuality, useBase64: useBase64, manualOnly: react_native_1.Platform.OS === 'android', detectionConfig: detectionConfig,
|
|
718
|
+
react_1.default.createElement(react_native_document_scanner_1.default, { ref: scannerRef, style: styles.scanner, detectionCountBeforeCapture: detectionThreshold, overlayColor: overlayColor, enableTorch: enableTorch, quality: normalizedQuality, useBase64: useBase64, manualOnly: react_native_1.Platform.OS === 'android', detectionConfig: detectionConfig, onPictureTaken: handlePictureTaken, onError: handleError, onRectangleDetect: handleRectangleDetect }),
|
|
719
719
|
showGrid && overlayPolygon && (react_1.default.createElement(overlay_1.ScannerOverlay, { active: overlayIsActive, color: gridColor ?? overlayColor, lineWidth: gridLineWidth, polygon: overlayPolygon, clipRect: react_native_1.Platform.OS === 'android' ? null : (detectedRectangle?.previewViewport ?? null) })),
|
|
720
720
|
showManualCaptureButton && (react_1.default.createElement(react_native_1.TouchableOpacity, { style: styles.button, onPress: handleManualCapture })),
|
|
721
721
|
children));
|
|
722
722
|
});
|
|
723
723
|
exports.DocScanner = (0, react_1.forwardRef)((props, ref) => {
|
|
724
|
-
const useExternalScanner = react_native_1.Platform.OS === 'android';
|
|
725
724
|
(0, react_1.useEffect)(() => {
|
|
726
|
-
if (react_native_1.Platform.OS !== 'android'
|
|
725
|
+
if (react_native_1.Platform.OS !== 'android') {
|
|
727
726
|
return;
|
|
728
727
|
}
|
|
729
|
-
|
|
730
|
-
console.log('[DocScanner] Using VisionCamera pipeline');
|
|
731
|
-
}
|
|
732
|
-
else {
|
|
733
|
-
console.warn('[DocScanner] VisionCamera pipeline unavailable, falling back to native view.', {
|
|
734
|
-
hasVisionCameraModule: Boolean(visionCameraModule),
|
|
735
|
-
hasReanimated: Boolean(reanimatedModule),
|
|
736
|
-
});
|
|
737
|
-
}
|
|
728
|
+
console.log('[DocScanner] Using native CameraX pipeline on Android');
|
|
738
729
|
}, []);
|
|
739
|
-
if (
|
|
730
|
+
if (react_native_1.Platform.OS === 'android') {
|
|
740
731
|
return react_1.default.createElement(NativeScanner, { ref: ref, ...props });
|
|
741
732
|
}
|
|
742
733
|
if (hasVisionCamera) {
|
package/dist/FullDocScanner.js
CHANGED
|
@@ -124,7 +124,6 @@ const normalizeCapturedDocument = (document) => {
|
|
|
124
124
|
};
|
|
125
125
|
};
|
|
126
126
|
const FullDocScanner = ({ onResult, onClose, detectionConfig, overlayColor = '#3170f3', gridColor, gridLineWidth, showGrid, strings, minStableFrames, onError, enableGallery = true, cropWidth = 1200, cropHeight = 1600, type, }) => {
|
|
127
|
-
const useExternalScanner = react_native_1.Platform.OS === 'android';
|
|
128
127
|
const [processing, setProcessing] = (0, react_1.useState)(false);
|
|
129
128
|
const [croppedImageData, setCroppedImageData] = (0, react_1.useState)(null);
|
|
130
129
|
const [isGalleryOpen, setIsGalleryOpen] = (0, react_1.useState)(false);
|
|
@@ -144,11 +143,6 @@ const FullDocScanner = ({ onResult, onClose, detectionConfig, overlayColor = '#3
|
|
|
144
143
|
const rectangleHintTimeoutRef = (0, react_1.useRef)(null);
|
|
145
144
|
const captureReadyTimeoutRef = (0, react_1.useRef)(null);
|
|
146
145
|
const isBusinessMode = type === 'business';
|
|
147
|
-
(0, react_1.useEffect)(() => {
|
|
148
|
-
if (useExternalScanner) {
|
|
149
|
-
setCaptureReady(true);
|
|
150
|
-
}
|
|
151
|
-
}, [useExternalScanner]);
|
|
152
146
|
const resetScannerView = (0, react_1.useCallback)((options) => {
|
|
153
147
|
setProcessing(false);
|
|
154
148
|
setCroppedImageData(null);
|
|
@@ -394,7 +388,7 @@ const FullDocScanner = ({ onResult, onClose, detectionConfig, overlayColor = '#3
|
|
|
394
388
|
return;
|
|
395
389
|
}
|
|
396
390
|
console.log('[FullDocScanner] Starting manual capture, grid detected:', rectangleDetected);
|
|
397
|
-
const captureMode =
|
|
391
|
+
const captureMode = rectangleDetected ? 'grid' : 'no-grid';
|
|
398
392
|
captureModeRef.current = captureMode;
|
|
399
393
|
captureInProgressRef.current = true;
|
|
400
394
|
// Add timeout to reset state if capture hangs
|
|
@@ -542,9 +536,6 @@ const FullDocScanner = ({ onResult, onClose, detectionConfig, overlayColor = '#3
|
|
|
542
536
|
resetScannerView({ remount: true });
|
|
543
537
|
}, [capturedPhotos.length, isBusinessMode, resetScannerView]);
|
|
544
538
|
const handleRectangleDetect = (0, react_1.useCallback)((event) => {
|
|
545
|
-
if (useExternalScanner) {
|
|
546
|
-
return;
|
|
547
|
-
}
|
|
548
539
|
const stableCounter = event.stableCounter ?? 0;
|
|
549
540
|
const rectangleCoordinates = event.rectangleOnScreen ?? event.rectangleCoordinates;
|
|
550
541
|
const hasRectangle = Boolean(rectangleCoordinates);
|
|
@@ -598,7 +589,7 @@ const FullDocScanner = ({ onResult, onClose, detectionConfig, overlayColor = '#3
|
|
|
598
589
|
}
|
|
599
590
|
setRectangleDetected(false);
|
|
600
591
|
}
|
|
601
|
-
}, [rectangleDetected
|
|
592
|
+
}, [rectangleDetected]);
|
|
602
593
|
(0, react_1.useEffect)(() => () => {
|
|
603
594
|
if (rectangleCaptureTimeoutRef.current) {
|
|
604
595
|
clearTimeout(rectangleCaptureTimeoutRef.current);
|
package/package.json
CHANGED
package/src/DocScanner.tsx
CHANGED
|
@@ -1008,7 +1008,6 @@ const NativeScanner = forwardRef<DocScannerHandle, Props>(
|
|
|
1008
1008
|
useBase64={useBase64}
|
|
1009
1009
|
manualOnly={Platform.OS === 'android'}
|
|
1010
1010
|
detectionConfig={detectionConfig}
|
|
1011
|
-
useExternalScanner={Platform.OS === 'android'}
|
|
1012
1011
|
onPictureTaken={handlePictureTaken}
|
|
1013
1012
|
onError={handleError}
|
|
1014
1013
|
onRectangleDetect={handleRectangleDetect}
|
|
@@ -1032,23 +1031,14 @@ const NativeScanner = forwardRef<DocScannerHandle, Props>(
|
|
|
1032
1031
|
);
|
|
1033
1032
|
|
|
1034
1033
|
export const DocScanner = forwardRef<DocScannerHandle, Props>((props, ref) => {
|
|
1035
|
-
const useExternalScanner = Platform.OS === 'android';
|
|
1036
|
-
|
|
1037
1034
|
useEffect(() => {
|
|
1038
|
-
if (Platform.OS !== 'android'
|
|
1035
|
+
if (Platform.OS !== 'android') {
|
|
1039
1036
|
return;
|
|
1040
1037
|
}
|
|
1041
|
-
|
|
1042
|
-
console.log('[DocScanner] Using VisionCamera pipeline');
|
|
1043
|
-
} else {
|
|
1044
|
-
console.warn('[DocScanner] VisionCamera pipeline unavailable, falling back to native view.', {
|
|
1045
|
-
hasVisionCameraModule: Boolean(visionCameraModule),
|
|
1046
|
-
hasReanimated: Boolean(reanimatedModule),
|
|
1047
|
-
});
|
|
1048
|
-
}
|
|
1038
|
+
console.log('[DocScanner] Using native CameraX pipeline on Android');
|
|
1049
1039
|
}, []);
|
|
1050
1040
|
|
|
1051
|
-
if (
|
|
1041
|
+
if (Platform.OS === 'android') {
|
|
1052
1042
|
return <NativeScanner ref={ref} {...props} />;
|
|
1053
1043
|
}
|
|
1054
1044
|
|
package/src/FullDocScanner.tsx
CHANGED
|
@@ -187,7 +187,6 @@ export const FullDocScanner: React.FC<FullDocScannerProps> = ({
|
|
|
187
187
|
cropHeight = 1600,
|
|
188
188
|
type,
|
|
189
189
|
}) => {
|
|
190
|
-
const useExternalScanner = Platform.OS === 'android';
|
|
191
190
|
const [processing, setProcessing] = useState(false);
|
|
192
191
|
const [croppedImageData, setCroppedImageData] = useState<PreviewImageData | null>(null);
|
|
193
192
|
const [isGalleryOpen, setIsGalleryOpen] = useState(false);
|
|
@@ -209,12 +208,6 @@ export const FullDocScanner: React.FC<FullDocScannerProps> = ({
|
|
|
209
208
|
|
|
210
209
|
const isBusinessMode = type === 'business';
|
|
211
210
|
|
|
212
|
-
useEffect(() => {
|
|
213
|
-
if (useExternalScanner) {
|
|
214
|
-
setCaptureReady(true);
|
|
215
|
-
}
|
|
216
|
-
}, [useExternalScanner]);
|
|
217
|
-
|
|
218
211
|
const resetScannerView = useCallback(
|
|
219
212
|
(options?: { remount?: boolean }) => {
|
|
220
213
|
setProcessing(false);
|
|
@@ -546,7 +539,7 @@ export const FullDocScanner: React.FC<FullDocScannerProps> = ({
|
|
|
546
539
|
|
|
547
540
|
console.log('[FullDocScanner] Starting manual capture, grid detected:', rectangleDetected);
|
|
548
541
|
|
|
549
|
-
const captureMode =
|
|
542
|
+
const captureMode = rectangleDetected ? 'grid' : 'no-grid';
|
|
550
543
|
captureModeRef.current = captureMode;
|
|
551
544
|
captureInProgressRef.current = true;
|
|
552
545
|
|
|
@@ -736,9 +729,6 @@ export const FullDocScanner: React.FC<FullDocScannerProps> = ({
|
|
|
736
729
|
}, [capturedPhotos.length, isBusinessMode, resetScannerView]);
|
|
737
730
|
|
|
738
731
|
const handleRectangleDetect = useCallback((event: RectangleDetectEvent) => {
|
|
739
|
-
if (useExternalScanner) {
|
|
740
|
-
return;
|
|
741
|
-
}
|
|
742
732
|
const stableCounter = event.stableCounter ?? 0;
|
|
743
733
|
const rectangleCoordinates = event.rectangleOnScreen ?? event.rectangleCoordinates;
|
|
744
734
|
const hasRectangle = Boolean(rectangleCoordinates);
|
|
@@ -797,7 +787,7 @@ export const FullDocScanner: React.FC<FullDocScannerProps> = ({
|
|
|
797
787
|
}
|
|
798
788
|
setRectangleDetected(false);
|
|
799
789
|
}
|
|
800
|
-
}, [rectangleDetected
|
|
790
|
+
}, [rectangleDetected]);
|
|
801
791
|
|
|
802
792
|
useEffect(
|
|
803
793
|
() => () => {
|
package/src/external.d.ts
CHANGED
|
@@ -75,7 +75,6 @@ declare module 'react-native-document-scanner' {
|
|
|
75
75
|
maxAnchorMisses?: number;
|
|
76
76
|
maxCenterDelta?: number;
|
|
77
77
|
};
|
|
78
|
-
useExternalScanner?: boolean;
|
|
79
78
|
onPictureTaken?: (event: DocumentScannerResult) => void;
|
|
80
79
|
onError?: (error: Error) => void;
|
|
81
80
|
onRectangleDetect?: (event: RectangleEventPayload) => void;
|
|
@@ -46,7 +46,6 @@ export interface DocumentScannerProps {
|
|
|
46
46
|
maxAnchorMisses?: number;
|
|
47
47
|
maxCenterDelta?: number;
|
|
48
48
|
};
|
|
49
|
-
useExternalScanner?: boolean;
|
|
50
49
|
onPictureTaken?: (event: DocumentScannerResult) => void;
|
|
51
50
|
onError?: (error: Error) => void;
|
|
52
51
|
onRectangleDetect?: (event: RectangleEventPayload) => void;
|