react-native-rectangle-doc-scanner 0.66.0 → 0.70.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,536 @@
1
+ package com.reactnativerectangledocscanner
2
+
3
+ import android.Manifest
4
+ import android.content.Context
5
+ import android.content.pm.PackageManager
6
+ import android.graphics.Bitmap
7
+ import android.graphics.BitmapFactory
8
+ import android.graphics.Color
9
+ import android.media.Image
10
+ import android.util.AttributeSet
11
+ import android.util.Log
12
+ import android.util.Size as AndroidSize
13
+ import android.widget.FrameLayout
14
+ import androidx.camera.core.*
15
+ import androidx.camera.lifecycle.ProcessCameraProvider
16
+ import androidx.camera.view.PreviewView
17
+ import androidx.concurrent.futures.await
18
+ import androidx.core.content.ContextCompat
19
+ import androidx.lifecycle.LifecycleOwner
20
+ import com.facebook.react.bridge.Arguments
21
+ import com.facebook.react.bridge.Promise
22
+ import com.facebook.react.bridge.ReactContext
23
+ import com.facebook.react.bridge.WritableMap
24
+ import com.facebook.react.uimanager.events.RCTEventEmitter
25
+ import kotlinx.coroutines.CoroutineScope
26
+ import kotlinx.coroutines.Dispatchers
27
+ import kotlinx.coroutines.Job
28
+ import kotlinx.coroutines.launch
29
+ import kotlinx.coroutines.withContext
30
+ import org.opencv.android.OpenCVLoader
31
+ import org.opencv.core.CvType
32
+ import org.opencv.core.Mat
33
+ import org.opencv.core.MatOfPoint
34
+ import org.opencv.core.MatOfPoint2f
35
+ import org.opencv.core.Point
36
+ import org.opencv.core.Size as MatSize
37
+ import org.opencv.imgproc.Imgproc
38
+ import java.io.File
39
+ import java.nio.ByteBuffer
40
+ import java.text.SimpleDateFormat
41
+ import java.util.Date
42
+ import java.util.Locale
43
+ import java.util.concurrent.ExecutorService
44
+ import java.util.concurrent.Executors
45
+ import kotlin.math.abs
46
+ import kotlin.math.hypot
47
+ import kotlin.math.max
48
+
49
+ @androidx.camera.core.ExperimentalGetImage
50
+ class RNRDocScannerView @JvmOverloads constructor(
51
+ context: Context,
52
+ attrs: AttributeSet? = null,
53
+ ) : FrameLayout(context, attrs) {
54
+
55
+ var detectionCountBeforeCapture: Int = 8
56
+ var autoCapture: Boolean = true
57
+ var enableTorch: Boolean = false
58
+ set(value) {
59
+ field = value
60
+ updateTorchMode(value)
61
+ }
62
+ var quality: Int = 90
63
+ var useBase64: Boolean = false
64
+
65
+ private val previewView: PreviewView = PreviewView(context)
66
+ private var cameraProvider: ProcessCameraProvider? = null
67
+ private var camera: Camera? = null
68
+ private var imageCapture: ImageCapture? = null
69
+ private var imageAnalysis: ImageAnalysis? = null
70
+ private var cameraExecutor: ExecutorService? = null
71
+ private val scope = CoroutineScope(Dispatchers.Main + Job())
72
+
73
+ private var currentStableCounter: Int = 0
74
+ private var lastQuad: QuadPoints? = null
75
+ private var lastFrameSize: AndroidSize? = null
76
+ private var capturePromise: Promise? = null
77
+ private var captureInFlight: Boolean = false
78
+
79
+ init {
80
+ setBackgroundColor(Color.BLACK)
81
+ addView(
82
+ previewView,
83
+ LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT),
84
+ )
85
+
86
+ if (!OpenCVLoader.initDebug()) {
87
+ Log.w(TAG, "Failed to initialise OpenCV - detection will not run.")
88
+ }
89
+
90
+ initializeCamera()
91
+ }
92
+
93
+ private fun initializeCamera() {
94
+ if (!hasCameraPermission()) {
95
+ Log.w(TAG, "Camera permission missing. Detection will not start.")
96
+ return
97
+ }
98
+
99
+ cameraExecutor = Executors.newSingleThreadExecutor()
100
+ val providerFuture = ProcessCameraProvider.getInstance(context)
101
+ providerFuture.addListener(
102
+ {
103
+ scope.launch {
104
+ try {
105
+ cameraProvider = providerFuture.await()
106
+ bindCameraUseCases()
107
+ } catch (error: Exception) {
108
+ Log.e(TAG, "Failed to initialise camera", error)
109
+ }
110
+ }
111
+ },
112
+ ContextCompat.getMainExecutor(context),
113
+ )
114
+ }
115
+
116
+ private fun hasCameraPermission(): Boolean {
117
+ return ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED
118
+ }
119
+
120
+ private fun bindCameraUseCases() {
121
+ val provider = cameraProvider ?: return
122
+ val lifecycleOwner = context as? LifecycleOwner
123
+ if (lifecycleOwner == null) {
124
+ Log.w(TAG, "Context is not a LifecycleOwner; cannot bind camera use cases.")
125
+ return
126
+ }
127
+ provider.unbindAll()
128
+
129
+ val preview = Preview.Builder()
130
+ .setTargetAspectRatio(AspectRatio.RATIO_4_3)
131
+ .setTargetRotation(previewView.display.rotation)
132
+ .build()
133
+ .also { it.setSurfaceProvider(previewView.surfaceProvider) }
134
+
135
+ imageCapture = ImageCapture.Builder()
136
+ .setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
137
+ .setTargetAspectRatio(AspectRatio.RATIO_4_3)
138
+ .setTargetRotation(previewView.display.rotation)
139
+ .build()
140
+
141
+ imageAnalysis = ImageAnalysis.Builder()
142
+ .setTargetAspectRatio(AspectRatio.RATIO_4_3)
143
+ .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
144
+ .setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_YUV_420_888)
145
+ .build()
146
+ .also { analysis ->
147
+ analysis.setAnalyzer(cameraExecutor!!) { imageProxy ->
148
+ try {
149
+ processFrame(imageProxy)
150
+ } catch (error: Exception) {
151
+ Log.e(TAG, "Frame processing error", error)
152
+ imageProxy.close()
153
+ }
154
+ }
155
+ }
156
+
157
+ val selector = CameraSelector.Builder()
158
+ .requireLensFacing(CameraSelector.LENS_FACING_BACK)
159
+ .build()
160
+
161
+ camera = provider.bindToLifecycle(
162
+ lifecycleOwner,
163
+ selector,
164
+ preview,
165
+ imageCapture,
166
+ imageAnalysis,
167
+ )
168
+
169
+ updateTorchMode(enableTorch)
170
+ }
171
+
172
+ private fun updateTorchMode(enabled: Boolean) {
173
+ camera?.cameraControl?.enableTorch(enabled)
174
+ }
175
+
176
+ private fun processFrame(imageProxy: ImageProxy) {
177
+ val mediaImage = imageProxy.image
178
+ if (mediaImage == null) {
179
+ imageProxy.close()
180
+ return
181
+ }
182
+
183
+ val frameSize = AndroidSize(imageProxy.width, imageProxy.height)
184
+ lastFrameSize = frameSize
185
+
186
+ val mat = yuvToMat(mediaImage, imageProxy.imageInfo.rotationDegrees)
187
+ val detectedQuad = detectDocument(mat, frameSize)
188
+
189
+ imageProxy.close()
190
+
191
+ scope.launch {
192
+ emitDetectionResult(detectedQuad, frameSize)
193
+ if (autoCapture && detectedQuad != null && currentStableCounter >= detectionCountBeforeCapture && !captureInFlight) {
194
+ triggerAutoCapture()
195
+ }
196
+ }
197
+ }
198
+
199
+ private fun emitDetectionResult(quad: QuadPoints?, frameSize: AndroidSize) {
200
+ val reactContext = context as? ReactContext ?: return
201
+ val event: WritableMap = Arguments.createMap().apply {
202
+ if (quad != null) {
203
+ val quadMap = Arguments.createMap().apply {
204
+ putMap("topLeft", quad.topLeft.toWritable())
205
+ putMap("topRight", quad.topRight.toWritable())
206
+ putMap("bottomRight", quad.bottomRight.toWritable())
207
+ putMap("bottomLeft", quad.bottomLeft.toWritable())
208
+ }
209
+ putMap("rectangleCoordinates", quadMap)
210
+ currentStableCounter = (currentStableCounter + 1).coerceAtMost(detectionCountBeforeCapture)
211
+ lastQuad = quad
212
+ } else {
213
+ putNull("rectangleCoordinates")
214
+ currentStableCounter = 0
215
+ lastQuad = null
216
+ }
217
+ putInt("stableCounter", currentStableCounter)
218
+ putDouble("frameWidth", frameSize.width.toDouble())
219
+ putDouble("frameHeight", frameSize.height.toDouble())
220
+ }
221
+
222
+ reactContext
223
+ .getJSModule(RCTEventEmitter::class.java)
224
+ ?.receiveEvent(id, "onRectangleDetect", event)
225
+ }
226
+
227
+ private fun triggerAutoCapture() {
228
+ startCapture(null)
229
+ }
230
+
231
+ fun capture(promise: Promise) {
232
+ startCapture(promise)
233
+ }
234
+
235
+ private fun startCapture(promise: Promise?) {
236
+ if (captureInFlight) {
237
+ promise?.reject("capture_in_progress", "A capture request is already running.")
238
+ return
239
+ }
240
+
241
+ val imageCapture = this.imageCapture
242
+ if (imageCapture == null) {
243
+ promise?.reject("capture_unavailable", "Image capture is not initialised yet.")
244
+ return
245
+ }
246
+
247
+ val outputDir = context.cacheDir
248
+ val photoFile = File(
249
+ outputDir,
250
+ "docscan-${SimpleDateFormat("yyyyMMdd-HHmmss-SSS", Locale.US).format(Date())}.jpg",
251
+ )
252
+
253
+ val outputOptions = ImageCapture.OutputFileOptions.Builder(photoFile).build()
254
+
255
+ captureInFlight = true
256
+ pendingPromise = promise
257
+
258
+ imageCapture.takePicture(
259
+ outputOptions,
260
+ cameraExecutor ?: Executors.newSingleThreadExecutor(),
261
+ object : ImageCapture.OnImageSavedCallback {
262
+ override fun onImageSaved(outputFileResults: ImageCapture.OutputFileResults) {
263
+ scope.launch {
264
+ handleCaptureSuccess(photoFile)
265
+ }
266
+ }
267
+
268
+ override fun onError(exception: ImageCaptureException) {
269
+ scope.launch {
270
+ handleCaptureFailure(exception)
271
+ }
272
+ }
273
+ },
274
+ )
275
+ }
276
+
277
+ private suspend fun handleCaptureSuccess(file: File) {
278
+ withContext(Dispatchers.IO) {
279
+ try {
280
+ val bitmap = BitmapFactory.decodeFile(file.absolutePath)
281
+ val width = bitmap.width
282
+ val height = bitmap.height
283
+
284
+ val frameSize = lastFrameSize
285
+ val quadForCapture = if (lastQuad != null && frameSize != null) {
286
+ val scaleX = width.toDouble() / frameSize.width.toDouble()
287
+ val scaleY = height.toDouble() / frameSize.height.toDouble()
288
+ lastQuad!!.scaled(scaleX, scaleY)
289
+ } else {
290
+ null
291
+ }
292
+
293
+ val croppedPath = if (quadForCapture != null) {
294
+ cropAndSave(bitmap, quadForCapture, file.parentFile ?: context.cacheDir)
295
+ } else {
296
+ file.absolutePath
297
+ }
298
+
299
+ val event = Arguments.createMap().apply {
300
+ putString("initialImage", "file://${file.absolutePath}")
301
+ putString("croppedImage", "file://$croppedPath")
302
+ putDouble("width", width.toDouble())
303
+ putDouble("height", height.toDouble())
304
+ }
305
+
306
+ withContext(Dispatchers.Main) {
307
+ emitPictureTaken(event)
308
+ pendingPromise?.resolve(event)
309
+ resetAfterCapture()
310
+ }
311
+ } catch (error: Exception) {
312
+ bitmap.recycle()
313
+
314
+ withContext(Dispatchers.Main) {
315
+ handleCaptureFailure(error)
316
+ }
317
+ }
318
+ }
319
+ }
320
+
321
+ private fun handleCaptureFailure(error: Exception) {
322
+ pendingPromise?.reject(error)
323
+ resetAfterCapture()
324
+ }
325
+
326
+ private fun resetAfterCapture() {
327
+ captureInFlight = false
328
+ pendingPromise = null
329
+ currentStableCounter = 0
330
+ }
331
+
332
+ private fun emitPictureTaken(payload: WritableMap) {
333
+ val reactContext = context as? ReactContext ?: return
334
+ reactContext
335
+ .getJSModule(RCTEventEmitter::class.java)
336
+ ?.receiveEvent(id, "onPictureTaken", payload)
337
+ }
338
+
339
+ fun reset() {
340
+ currentStableCounter = 0
341
+ lastQuad = null
342
+ }
343
+
344
+ override fun onDetachedFromWindow() {
345
+ super.onDetachedFromWindow()
346
+ cameraExecutor?.shutdown()
347
+ cameraExecutor = null
348
+ cameraProvider?.unbindAll()
349
+ }
350
+
351
+ // region Detection helpers
352
+
353
+ private fun yuvToMat(image: Image, rotationDegrees: Int): Mat {
354
+ val bufferY = image.planes[0].buffer.toByteArray()
355
+ val bufferU = image.planes[1].buffer.toByteArray()
356
+ val bufferV = image.planes[2].buffer.toByteArray()
357
+
358
+ val yuvBytes = ByteArray(bufferY.size + bufferU.size + bufferV.size)
359
+ bufferY.copyInto(yuvBytes, 0)
360
+ bufferV.copyInto(yuvBytes, bufferY.size)
361
+ bufferU.copyInto(yuvBytes, bufferY.size + bufferV.size)
362
+
363
+ val yuvMat = Mat(image.height + image.height / 2, image.width, CvType.CV_8UC1)
364
+ yuvMat.put(0, 0, yuvBytes)
365
+
366
+ val bgrMat = Mat()
367
+ Imgproc.cvtColor(yuvMat, bgrMat, Imgproc.COLOR_YUV2BGR_NV21, 3)
368
+ yuvMat.release()
369
+
370
+ val rotatedMat = Mat()
371
+ when (rotationDegrees) {
372
+ 90 -> Core.rotate(bgrMat, rotatedMat, Core.ROTATE_90_CLOCKWISE)
373
+ 180 -> Core.rotate(bgrMat, rotatedMat, Core.ROTATE_180)
374
+ 270 -> Core.rotate(bgrMat, rotatedMat, Core.ROTATE_90_COUNTERCLOCKWISE)
375
+ else -> bgrMat.copyTo(rotatedMat)
376
+ }
377
+ bgrMat.release()
378
+ return rotatedMat
379
+ }
380
+
381
+ private fun detectDocument(mat: Mat, frameSize: AndroidSize): QuadPoints? {
382
+ if (mat.empty()) {
383
+ mat.release()
384
+ return null
385
+ }
386
+
387
+ val gray = Mat()
388
+ Imgproc.cvtColor(mat, gray, Imgproc.COLOR_BGR2GRAY)
389
+
390
+ val blurred = Mat()
391
+ Imgproc.GaussianBlur(gray, blurred, MatSize(5.0, 5.0), 0.0)
392
+
393
+ val edges = Mat()
394
+ Imgproc.Canny(blurred, edges, 50.0, 150.0)
395
+
396
+ val contours = ArrayList<MatOfPoint>()
397
+ val hierarchy = Mat()
398
+ Imgproc.findContours(edges, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE)
399
+
400
+ var bestQuad: QuadPoints? = null
401
+ var maxArea = 0.0
402
+ val frameArea = frameSize.width * frameSize.height.toDouble()
403
+
404
+ val approxCurve = MatOfPoint2f()
405
+ for (contour in contours) {
406
+ val contour2f = MatOfPoint2f(*contour.toArray())
407
+ val perimeter = Imgproc.arcLength(contour2f, true)
408
+ Imgproc.approxPolyDP(contour2f, approxCurve, 0.02 * perimeter, true)
409
+
410
+ val points = approxCurve.toArray()
411
+ if (points.size != 4) {
412
+ contour.release()
413
+ contour2f.release()
414
+ continue
415
+ }
416
+
417
+ val area = abs(Imgproc.contourArea(approxCurve))
418
+ if (area < frameArea * 0.10 || area > frameArea * 0.95) {
419
+ contour.release()
420
+ contour2f.release()
421
+ continue
422
+ }
423
+
424
+ if (area > maxArea && Imgproc.isContourConvex(approxCurve)) {
425
+ val ordered = orderPoints(points)
426
+ bestQuad = QuadPoints(
427
+ topLeft = ordered[0],
428
+ topRight = ordered[1],
429
+ bottomRight = ordered[2],
430
+ bottomLeft = ordered[3],
431
+ )
432
+ maxArea = area
433
+ }
434
+
435
+ contour.release()
436
+ contour2f.release()
437
+ }
438
+
439
+ gray.release()
440
+ blurred.release()
441
+ edges.release()
442
+ hierarchy.release()
443
+ approxCurve.release()
444
+ mat.release()
445
+
446
+ return bestQuad
447
+ }
448
+
449
+ private fun orderPoints(points: Array<Point>): Array<Point> {
450
+ val sorted = points.sortedBy { it.x + it.y }
451
+ val tl = sorted.first()
452
+ val br = sorted.last()
453
+ val remaining = points.filter { it != tl && it != br }
454
+ val (tr, bl) =
455
+ if (remaining[0].x > remaining[1].x) remaining[0] to remaining[1] else remaining[1] to remaining[0]
456
+ return arrayOf(tl, tr, br, bl)
457
+ }
458
+
459
+ // endregion
460
+
461
+ private fun cropAndSave(bitmap: Bitmap, quad: QuadPoints, outputDir: File): String {
462
+ val srcMat = Mat()
463
+ org.opencv.android.Utils.bitmapToMat(bitmap, srcMat)
464
+
465
+ val ordered = quad.toArray()
466
+ val widthA = hypot(ordered[2].x - ordered[3].x, ordered[2].y - ordered[3].y)
467
+ val widthB = hypot(ordered[1].x - ordered[0].x, ordered[1].y - ordered[0].y)
468
+ val heightA = hypot(ordered[1].x - ordered[2].x, ordered[1].y - ordered[2].y)
469
+ val heightB = hypot(ordered[0].x - ordered[3].x, ordered[0].y - ordered[3].y)
470
+
471
+ val maxWidth = max(widthA, widthB).toInt().coerceAtLeast(1)
472
+ val maxHeight = max(heightA, heightB).toInt().coerceAtLeast(1)
473
+
474
+ val srcPoints = MatOfPoint2f(*ordered)
475
+ val dstPoints = MatOfPoint2f(
476
+ Point(0.0, 0.0),
477
+ Point(maxWidth - 1.0, 0.0),
478
+ Point(maxWidth - 1.0, maxHeight - 1.0),
479
+ Point(0.0, maxHeight - 1.0),
480
+ )
481
+
482
+ val transform = Imgproc.getPerspectiveTransform(srcPoints, dstPoints)
483
+ val warped = Mat(MatSize(maxWidth.toDouble(), maxHeight.toDouble()), srcMat.type())
484
+ Imgproc.warpPerspective(srcMat, warped, transform, warped.size())
485
+
486
+ val croppedBitmap = Bitmap.createBitmap(maxWidth, maxHeight, Bitmap.Config.ARGB_8888)
487
+ org.opencv.android.Utils.matToBitmap(warped, croppedBitmap)
488
+
489
+ val outputFile = File(
490
+ outputDir,
491
+ "docscan-cropped-${SimpleDateFormat("yyyyMMdd-HHmmss-SSS", Locale.US).format(Date())}.jpg",
492
+ )
493
+ outputFile.outputStream().use { stream ->
494
+ croppedBitmap.compress(Bitmap.CompressFormat.JPEG, quality.coerceIn(10, 100), stream)
495
+ }
496
+
497
+ srcMat.release()
498
+ warped.release()
499
+ transform.release()
500
+ srcPoints.release()
501
+ dstPoints.release()
502
+
503
+ return outputFile.absolutePath
504
+ }
505
+
506
+ private fun Point.toWritable(): WritableMap = Arguments.createMap().apply {
507
+ putDouble("x", x)
508
+ putDouble("y", y)
509
+ }
510
+
511
+ private fun ByteBuffer.toByteArray(): ByteArray {
512
+ val bytes = ByteArray(remaining())
513
+ get(bytes)
514
+ rewind()
515
+ return bytes
516
+ }
517
+
518
+ companion object {
519
+ private const val TAG = "RNRDocScanner"
520
+ }
521
+ }
522
+
523
+ data class QuadPoints(
524
+ val topLeft: Point,
525
+ val topRight: Point,
526
+ val bottomRight: Point,
527
+ val bottomLeft: Point,
528
+ ) {
529
+ fun toArray(): Array<Point> = arrayOf(topLeft, topRight, bottomRight, bottomLeft)
530
+ fun scaled(scaleX: Double, scaleY: Double): QuadPoints = QuadPoints(
531
+ topLeft = Point(topLeft.x * scaleX, topLeft.y * scaleY),
532
+ topRight = Point(topRight.x * scaleX, topRight.y * scaleY),
533
+ bottomRight = Point(bottomRight.x * scaleX, bottomRight.y * scaleY),
534
+ bottomLeft = Point(bottomLeft.x * scaleX, bottomLeft.y * scaleY),
535
+ )
536
+ }
@@ -0,0 +1,50 @@
1
+ package com.reactnativerectangledocscanner
2
+
3
+ import com.facebook.react.bridge.ReactApplicationContext
4
+ import com.facebook.react.bridge.ReactContext
5
+ import com.facebook.react.uimanager.SimpleViewManager
6
+ import com.facebook.react.uimanager.ThemedReactContext
7
+ import com.facebook.react.uimanager.annotations.ReactProp
8
+
9
+ class RNRDocScannerViewManager(
10
+ private val reactContext: ReactApplicationContext,
11
+ ) : SimpleViewManager<RNRDocScannerView>() {
12
+
13
+ override fun getName() = "RNRDocScannerView"
14
+
15
+ override fun createViewInstance(reactContext: ThemedReactContext): RNRDocScannerView {
16
+ return RNRDocScannerView(reactContext)
17
+ }
18
+
19
+ override fun getExportedCustomDirectEventTypeConstants(): MutableMap<String, Any> {
20
+ return mutableMapOf(
21
+ "onRectangleDetect" to mapOf("registrationName" to "onRectangleDetect"),
22
+ "onPictureTaken" to mapOf("registrationName" to "onPictureTaken"),
23
+ )
24
+ }
25
+
26
+ @ReactProp(name = "detectionCountBeforeCapture", defaultInt = 8)
27
+ fun setDetectionCountBeforeCapture(view: RNRDocScannerView, value: Int) {
28
+ view.detectionCountBeforeCapture = value
29
+ }
30
+
31
+ @ReactProp(name = "autoCapture", defaultBoolean = true)
32
+ fun setAutoCapture(view: RNRDocScannerView, value: Boolean) {
33
+ view.autoCapture = value
34
+ }
35
+
36
+ @ReactProp(name = "enableTorch", defaultBoolean = false)
37
+ fun setEnableTorch(view: RNRDocScannerView, value: Boolean) {
38
+ view.enableTorch = value
39
+ }
40
+
41
+ @ReactProp(name = "quality", defaultInt = 90)
42
+ fun setQuality(view: RNRDocScannerView, value: Int) {
43
+ view.quality = value
44
+ }
45
+
46
+ @ReactProp(name = "useBase64", defaultBoolean = false)
47
+ fun setUseBase64(view: RNRDocScannerView, value: Boolean) {
48
+ view.useBase64 = value
49
+ }
50
+ }
@@ -1,10 +1,15 @@
1
1
  import React, { ReactNode } from 'react';
2
2
  import type { Point } from './types';
3
- /**
4
- * Detection configuration is no longer used now that the native
5
- * implementation handles edge detection. Keeping it for backwards
6
- * compatibility with existing consumer code.
7
- */
3
+ type PictureEvent = {
4
+ croppedImage?: string | null;
5
+ initialImage?: string;
6
+ width?: number;
7
+ height?: number;
8
+ };
9
+ type DocScannerHandle = {
10
+ capture: () => Promise<PictureEvent>;
11
+ reset: () => void;
12
+ };
8
13
  export interface DetectionConfig {
9
14
  processingWidth?: number;
10
15
  cannyLowThreshold?: number;
@@ -32,5 +37,5 @@ interface Props {
32
37
  gridLineWidth?: number;
33
38
  detectionConfig?: DetectionConfig;
34
39
  }
35
- export declare const DocScanner: React.FC<Props>;
36
- export {};
40
+ export declare const DocScanner: React.ForwardRefExoticComponent<Props & React.RefAttributes<DocScannerHandle>>;
41
+ export type { DocScannerHandle };