react-native-rectangle-doc-scanner 3.210.0 → 3.212.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,69 +5,51 @@ import android.content.Context
5
5
  import android.content.pm.PackageManager
6
6
  import android.graphics.Bitmap
7
7
  import android.graphics.BitmapFactory
8
- import android.graphics.ImageFormat
9
8
  import android.graphics.Matrix
10
9
  import android.graphics.Rect
11
- import android.graphics.RectF
12
- import android.graphics.SurfaceTexture
13
10
  import android.graphics.YuvImage
14
- import android.hardware.camera2.CameraCaptureSession
15
- import android.hardware.camera2.CameraCharacteristics
16
- import android.hardware.camera2.CameraDevice
17
- import android.hardware.camera2.CameraManager
18
- import android.hardware.camera2.CaptureRequest
19
- import android.media.Image
20
- import android.media.ImageReader
21
- import android.os.Handler
22
- import android.os.HandlerThread
23
11
  import android.util.Log
24
12
  import android.util.Size
25
- import android.view.Gravity
26
13
  import android.view.Surface
27
- import android.view.TextureView
14
+ import androidx.camera.core.AspectRatio
15
+ import androidx.camera.core.Camera
16
+ import androidx.camera.core.CameraSelector
17
+ import androidx.camera.core.ImageAnalysis
18
+ import androidx.camera.core.Preview
19
+ import androidx.camera.lifecycle.ProcessCameraProvider
20
+ import androidx.camera.view.PreviewView
28
21
  import androidx.core.content.ContextCompat
29
22
  import androidx.lifecycle.LifecycleOwner
23
+ import com.google.common.util.concurrent.ListenableFuture
30
24
  import java.io.ByteArrayOutputStream
31
25
  import java.io.File
32
26
  import java.io.FileOutputStream
33
- import java.util.concurrent.atomic.AtomicBoolean
27
+ import java.util.concurrent.ExecutorService
28
+ import java.util.concurrent.Executors
29
+ import java.util.concurrent.atomic.AtomicReference
34
30
 
35
31
  class CameraController(
36
32
  private val context: Context,
37
33
  private val lifecycleOwner: LifecycleOwner,
38
- private val previewView: TextureView
34
+ private val previewView: PreviewView
39
35
  ) {
40
- private val cameraManager = context.getSystemService(Context.CAMERA_SERVICE) as CameraManager
41
- private var cameraDevice: CameraDevice? = null
42
- private var captureSession: CameraCaptureSession? = null
43
- private var previewRequestBuilder: CaptureRequest.Builder? = null
44
- private var imageReader: ImageReader? = null
45
- private var backgroundThread: HandlerThread? = null
46
- private var backgroundHandler: Handler? = null
47
- private var previewLayoutListener: android.view.View.OnLayoutChangeListener? = null
48
-
49
- private var cameraId: String? = null
50
- private var sensorOrientation: Int = 0
51
- private var sensorAspectRatio: Float? = null
52
- private var previewSize: Size? = null
53
- private var analysisSize: Size? = null
54
- private var previewChoices: Array<Size> = emptyArray()
55
- private var analysisChoices: Array<Size> = emptyArray()
36
+ private var cameraProviderFuture: ListenableFuture<ProcessCameraProvider>? = null
37
+ private var cameraProvider: ProcessCameraProvider? = null
38
+ private var preview: Preview? = null
39
+ private var imageAnalysis: ImageAnalysis? = null
40
+ private var camera: Camera? = null
41
+ private val cameraExecutor: ExecutorService = Executors.newSingleThreadExecutor()
42
+ private val lastFrame = AtomicReference<LastFrame?>()
43
+
56
44
  private var useFrontCamera = false
57
- private var torchEnabled = false
58
45
  private var detectionEnabled = true
59
- private var hasStarted = false
60
-
61
- private val isOpening = AtomicBoolean(false)
62
- private val lastFrameLock = Any()
63
- private var lastFrame: LastFrame? = null
64
46
 
65
47
  var onFrameAnalyzed: ((Rectangle?, Int, Int) -> Unit)? = null
66
48
 
67
49
  companion object {
68
50
  private const val TAG = "CameraController"
69
- private const val MAX_ANALYSIS_WIDTH = 1280
70
- private const val MAX_ANALYSIS_HEIGHT = 720
51
+ private const val ANALYSIS_WIDTH = 1280
52
+ private const val ANALYSIS_HEIGHT = 720
71
53
  }
72
54
 
73
55
  private data class LastFrame(
@@ -78,89 +60,36 @@ class CameraController(
78
60
  val isFront: Boolean
79
61
  )
80
62
 
81
- private val textureListener = object : TextureView.SurfaceTextureListener {
82
- override fun onSurfaceTextureAvailable(surface: SurfaceTexture, width: Int, height: Int) {
83
- Log.d(TAG, "[CAMERA2] Texture available: ${width}x${height}")
84
- createPreviewSession()
85
- }
86
-
87
- override fun onSurfaceTextureSizeChanged(surface: SurfaceTexture, width: Int, height: Int) {
88
- Log.d(TAG, "[CAMERA2] Texture size changed: ${width}x${height}")
89
- updatePreviewTransform()
90
- }
91
-
92
- override fun onSurfaceTextureDestroyed(surface: SurfaceTexture): Boolean {
93
- Log.d(TAG, "[CAMERA2] Texture destroyed")
94
- return true
95
- }
96
-
97
- override fun onSurfaceTextureUpdated(surface: SurfaceTexture) = Unit
98
- }
99
-
100
63
  fun startCamera(
101
64
  useFrontCam: Boolean = false,
102
65
  enableDetection: Boolean = true
103
66
  ) {
104
- Log.d(TAG, "========================================")
105
- Log.d(TAG, "[CAMERA2] startCamera called")
106
- Log.d(TAG, "[CAMERA2] useFrontCam: $useFrontCam")
107
- Log.d(TAG, "[CAMERA2] enableDetection: $enableDetection")
108
- Log.d(TAG, "[CAMERA2] lifecycleOwner: $lifecycleOwner")
109
- Log.d(TAG, "========================================")
110
-
67
+ Log.d(TAG, "[CAMERAX] startCamera called")
111
68
  this.useFrontCamera = useFrontCam
112
69
  this.detectionEnabled = enableDetection
113
70
 
114
- if (hasStarted) {
115
- Log.d(TAG, "[CAMERA2] Already started, skipping")
116
- return
117
- }
118
- hasStarted = true
119
-
120
71
  if (!hasCameraPermission()) {
121
- Log.e(TAG, "[CAMERA2] Camera permission not granted")
72
+ Log.e(TAG, "[CAMERAX] Camera permission not granted")
122
73
  return
123
74
  }
124
75
 
125
- startBackgroundThread()
126
- chooseCamera()
127
-
128
- if (previewLayoutListener == null) {
129
- previewLayoutListener = android.view.View.OnLayoutChangeListener { _, _, _, _, _, _, _, _, _ ->
130
- updatePreviewTransform()
131
- }
132
- previewView.addOnLayoutChangeListener(previewLayoutListener)
76
+ if (cameraProviderFuture == null) {
77
+ cameraProviderFuture = ProcessCameraProvider.getInstance(context)
133
78
  }
134
79
 
135
- if (previewView.isAvailable) {
136
- openCamera()
137
- } else {
138
- previewView.surfaceTextureListener = textureListener
139
- }
80
+ cameraProviderFuture?.addListener({
81
+ try {
82
+ cameraProvider = cameraProviderFuture?.get()
83
+ bindCameraUseCases()
84
+ } catch (e: Exception) {
85
+ Log.e(TAG, "[CAMERAX] Failed to get camera provider", e)
86
+ }
87
+ }, ContextCompat.getMainExecutor(context))
140
88
  }
141
89
 
142
90
  fun stopCamera() {
143
- Log.d(TAG, "[CAMERA2] stopCamera called")
144
- previewLayoutListener?.let { listener ->
145
- previewView.removeOnLayoutChangeListener(listener)
146
- }
147
- previewLayoutListener = null
148
- try {
149
- captureSession?.close()
150
- captureSession = null
151
- } catch (e: Exception) {
152
- Log.w(TAG, "[CAMERA2] Failed to close session", e)
153
- }
154
- try {
155
- cameraDevice?.close()
156
- cameraDevice = null
157
- } catch (e: Exception) {
158
- Log.w(TAG, "[CAMERA2] Failed to close camera device", e)
159
- }
160
- imageReader?.close()
161
- imageReader = null
162
- stopBackgroundThread()
163
- hasStarted = false
91
+ Log.d(TAG, "[CAMERAX] stopCamera called")
92
+ cameraProvider?.unbindAll()
164
93
  }
165
94
 
166
95
  fun capturePhoto(
@@ -168,19 +97,15 @@ class CameraController(
168
97
  onImageCaptured: (File) -> Unit,
169
98
  onError: (Exception) -> Unit
170
99
  ) {
171
- val frame = synchronized(lastFrameLock) { lastFrame }
100
+ val frame = lastFrame.get()
172
101
  if (frame == null) {
173
- onError(Exception("No frame available for capture"))
102
+ onError(IllegalStateException("No frame available for capture"))
174
103
  return
175
104
  }
176
105
 
177
- backgroundHandler?.post {
106
+ cameraExecutor.execute {
178
107
  try {
179
- val photoFile = File(
180
- outputDirectory,
181
- "doc_scan_${System.currentTimeMillis()}.jpg"
182
- )
183
-
108
+ val photoFile = File(outputDirectory, "doc_scan_${System.currentTimeMillis()}.jpg")
184
109
  val jpegBytes = nv21ToJpeg(frame.nv21, frame.width, frame.height, 95)
185
110
  val bitmap = BitmapFactory.decodeByteArray(jpegBytes, 0, jpegBytes.size)
186
111
  ?: throw IllegalStateException("Failed to decode JPEG")
@@ -194,379 +119,126 @@ class CameraController(
194
119
  }
195
120
  bitmap.recycle()
196
121
 
197
- Log.d(TAG, "[CAMERA2] Photo capture succeeded: ${photoFile.absolutePath}")
122
+ Log.d(TAG, "[CAMERAX] Photo capture succeeded: ${photoFile.absolutePath}")
198
123
  onImageCaptured(photoFile)
199
124
  } catch (e: Exception) {
200
- Log.e(TAG, "[CAMERA2] Photo capture failed", e)
125
+ Log.e(TAG, "[CAMERAX] Photo capture failed", e)
201
126
  onError(e)
202
127
  }
203
128
  }
204
129
  }
205
130
 
206
131
  fun setTorchEnabled(enabled: Boolean) {
207
- torchEnabled = enabled
208
- val builder = previewRequestBuilder ?: return
209
- builder.set(CaptureRequest.FLASH_MODE, if (enabled) CaptureRequest.FLASH_MODE_TORCH else CaptureRequest.FLASH_MODE_OFF)
210
- try {
211
- captureSession?.setRepeatingRequest(builder.build(), null, backgroundHandler)
212
- } catch (e: Exception) {
213
- Log.w(TAG, "[CAMERA2] Failed to update torch", e)
214
- }
132
+ camera?.cameraControl?.enableTorch(enabled)
215
133
  }
216
134
 
217
135
  fun switchCamera() {
218
136
  useFrontCamera = !useFrontCamera
219
- stopCamera()
220
- startCamera(useFrontCamera, detectionEnabled)
137
+ bindCameraUseCases()
221
138
  }
222
139
 
223
140
  fun isTorchAvailable(): Boolean {
224
- val id = cameraId ?: return false
225
- val characteristics = cameraManager.getCameraCharacteristics(id)
226
- return characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) == true
141
+ return camera?.cameraInfo?.hasFlashUnit() == true
227
142
  }
228
143
 
229
144
  fun focusAt(x: Float, y: Float) {
230
- // No-op for now. Camera2 focus metering can be added if needed.
145
+ // No-op for now.
231
146
  }
232
147
 
233
148
  fun shutdown() {
234
149
  stopCamera()
150
+ cameraExecutor.shutdown()
235
151
  }
236
152
 
237
- private fun chooseCamera() {
238
- val lensFacing = if (useFrontCamera) {
239
- CameraCharacteristics.LENS_FACING_FRONT
240
- } else {
241
- CameraCharacteristics.LENS_FACING_BACK
242
- }
243
-
244
- val ids = cameraManager.cameraIdList
245
- val selected = ids.firstOrNull { id ->
246
- val characteristics = cameraManager.getCameraCharacteristics(id)
247
- characteristics.get(CameraCharacteristics.LENS_FACING) == lensFacing
248
- } ?: ids.firstOrNull()
249
-
250
- if (selected == null) {
251
- Log.e(TAG, "[CAMERA2] No camera available")
252
- return
253
- }
153
+ private fun bindCameraUseCases() {
154
+ val provider = cameraProvider ?: return
155
+ provider.unbindAll()
254
156
 
255
- cameraId = selected
256
- val characteristics = cameraManager.getCameraCharacteristics(selected)
257
- sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION) ?: 0
258
- val activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)
259
- sensorAspectRatio = activeArray?.let { rect ->
260
- if (rect.height() != 0) rect.width().toFloat() / rect.height().toFloat() else null
261
- }
157
+ val rotation = previewView.display?.rotation ?: Surface.ROTATION_0
158
+ preview = Preview.Builder()
159
+ .setTargetRotation(rotation)
160
+ .build()
161
+ .also {
162
+ it.setSurfaceProvider(previewView.surfaceProvider)
163
+ }
262
164
 
263
- val streamConfig = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
264
- previewChoices = streamConfig?.getOutputSizes(SurfaceTexture::class.java) ?: emptyArray()
265
- analysisChoices = streamConfig?.getOutputSizes(ImageFormat.YUV_420_888) ?: emptyArray()
165
+ imageAnalysis = ImageAnalysis.Builder()
166
+ .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
167
+ .setTargetResolution(Size(ANALYSIS_WIDTH, ANALYSIS_HEIGHT))
168
+ .setTargetRotation(rotation)
169
+ .setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_YUV_420_888)
170
+ .build()
171
+ .also {
172
+ it.setAnalyzer(cameraExecutor, DocumentAnalyzer())
173
+ }
266
174
 
267
- val viewWidth = if (previewView.width > 0) previewView.width else context.resources.displayMetrics.widthPixels
268
- val viewHeight = if (previewView.height > 0) previewView.height else context.resources.displayMetrics.heightPixels
269
- val targetRatio = if (viewWidth > 0 && viewHeight > 0) {
270
- viewWidth.toFloat() / viewHeight.toFloat()
175
+ val cameraSelector = if (useFrontCamera) {
176
+ CameraSelector.DEFAULT_FRONT_CAMERA
271
177
  } else {
272
- null
273
- }
274
-
275
- logSizeCandidates("preview", previewChoices, targetRatio, sensorAspectRatio)
276
- logSizeCandidates("analysis", analysisChoices, targetRatio, sensorAspectRatio)
277
-
278
- previewSize = choosePreviewSize(previewChoices, targetRatio, sensorAspectRatio)
279
- analysisSize = chooseAnalysisSize(analysisChoices, targetRatio, sensorAspectRatio)
280
- Log.d(
281
- TAG,
282
- "[CAMERA2] chooseCamera view=${viewWidth}x${viewHeight} ratio=$targetRatio " +
283
- "sensorOrientation=$sensorOrientation sensorRatio=$sensorAspectRatio " +
284
- "preview=$previewSize analysis=$analysisSize"
285
- )
286
- }
287
-
288
- private fun openCamera() {
289
- val id = cameraId ?: run {
290
- Log.e(TAG, "[CAMERA2] Camera id not set")
291
- return
292
- }
293
- if (isOpening.getAndSet(true)) {
294
- return
178
+ CameraSelector.DEFAULT_BACK_CAMERA
295
179
  }
296
180
 
297
181
  try {
298
- cameraManager.openCamera(id, object : CameraDevice.StateCallback() {
299
- override fun onOpened(device: CameraDevice) {
300
- Log.d(TAG, "[CAMERA2] Camera opened")
301
- isOpening.set(false)
302
- cameraDevice = device
303
- createPreviewSession()
304
- }
305
-
306
- override fun onDisconnected(device: CameraDevice) {
307
- Log.w(TAG, "[CAMERA2] Camera disconnected")
308
- isOpening.set(false)
309
- device.close()
310
- cameraDevice = null
311
- }
312
-
313
- override fun onError(device: CameraDevice, error: Int) {
314
- Log.e(TAG, "[CAMERA2] Camera error: $error")
315
- isOpening.set(false)
316
- device.close()
317
- cameraDevice = null
318
- }
319
- }, backgroundHandler)
320
- } catch (e: SecurityException) {
321
- isOpening.set(false)
322
- Log.e(TAG, "[CAMERA2] Camera permission missing", e)
323
- } catch (e: Exception) {
324
- isOpening.set(false)
325
- Log.e(TAG, "[CAMERA2] Failed to open camera", e)
326
- }
327
- }
328
-
329
- private fun createPreviewSession() {
330
- val device = cameraDevice ?: return
331
- val texture = previewView.surfaceTexture ?: return
332
- val sizes = ensurePreviewSizes()
333
- val previewSize = sizes.first ?: return
334
- val analysisSize = sizes.second ?: previewSize
335
-
336
- Log.d(
337
- TAG,
338
- "[CAMERA2] createPreviewSession view=${previewView.width}x${previewView.height} " +
339
- "preview=${previewSize.width}x${previewSize.height} analysis=${analysisSize.width}x${analysisSize.height}"
340
- )
341
-
342
- texture.setDefaultBufferSize(previewSize.width, previewSize.height)
343
- val previewSurface = Surface(texture)
344
-
345
- imageReader?.close()
346
- imageReader = ImageReader.newInstance(
347
- analysisSize.width,
348
- analysisSize.height,
349
- ImageFormat.YUV_420_888,
350
- 2
351
- ).apply {
352
- setOnImageAvailableListener({ reader ->
353
- val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener
354
- handleImage(image)
355
- }, backgroundHandler)
356
- }
357
-
358
- val surfaces = listOf(previewSurface, imageReader!!.surface)
359
- try {
360
- device.createCaptureSession(
361
- surfaces,
362
- object : CameraCaptureSession.StateCallback() {
363
- override fun onConfigured(session: CameraCaptureSession) {
364
- if (cameraDevice == null) {
365
- return
366
- }
367
- captureSession = session
368
- previewRequestBuilder = device.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply {
369
- addTarget(previewSurface)
370
- addTarget(imageReader!!.surface)
371
- set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO)
372
- set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)
373
- set(CaptureRequest.FLASH_MODE, if (torchEnabled) CaptureRequest.FLASH_MODE_TORCH else CaptureRequest.FLASH_MODE_OFF)
374
- }
375
- try {
376
- session.setRepeatingRequest(previewRequestBuilder!!.build(), null, backgroundHandler)
377
- Log.d(TAG, "[CAMERA2] Preview session started")
378
- updatePreviewTransform()
379
- } catch (e: Exception) {
380
- Log.e(TAG, "[CAMERA2] Failed to start preview", e)
381
- }
382
- }
383
-
384
- override fun onConfigureFailed(session: CameraCaptureSession) {
385
- Log.e(TAG, "[CAMERA2] Preview session configure failed")
386
- }
387
- },
388
- backgroundHandler
182
+ camera = provider.bindToLifecycle(
183
+ lifecycleOwner,
184
+ cameraSelector,
185
+ preview,
186
+ imageAnalysis
389
187
  )
188
+ Log.d(TAG, "[CAMERAX] Camera bound successfully")
390
189
  } catch (e: Exception) {
391
- Log.e(TAG, "[CAMERA2] Failed to create preview session", e)
190
+ Log.e(TAG, "[CAMERAX] Failed to bind camera", e)
392
191
  }
393
192
  }
394
193
 
395
- private fun ensurePreviewSizes(): Pair<Size?, Size?> {
396
- if (previewChoices.isEmpty()) {
397
- return Pair(previewSize, analysisSize)
398
- }
399
-
400
- val viewWidth = if (previewView.width > 0) previewView.width else context.resources.displayMetrics.widthPixels
401
- val viewHeight = if (previewView.height > 0) previewView.height else context.resources.displayMetrics.heightPixels
402
- val targetRatio = if (viewWidth > 0 && viewHeight > 0) {
403
- viewWidth.toFloat() / viewHeight.toFloat()
404
- } else {
405
- null
406
- }
407
-
408
- val newPreview = choosePreviewSize(previewChoices, targetRatio, sensorAspectRatio)
409
- val newAnalysis = chooseAnalysisSize(analysisChoices, targetRatio, sensorAspectRatio)
410
-
411
- if (newPreview != null && newPreview != previewSize) {
412
- previewSize = newPreview
413
- }
414
- if (newAnalysis != null && newAnalysis != analysisSize) {
415
- analysisSize = newAnalysis
416
- }
417
-
418
- Log.d(
419
- TAG,
420
- "[CAMERA2] ensurePreviewSizes view=${viewWidth}x${viewHeight} ratio=$targetRatio " +
421
- "preview=${previewSize?.width}x${previewSize?.height} analysis=${analysisSize?.width}x${analysisSize?.height}"
422
- )
423
- return Pair(previewSize, analysisSize)
424
- }
425
-
426
- private fun updatePreviewTransform() {
427
- val previewSize = previewSize ?: return
428
- ensureMatchParent()
429
-
430
- val viewWidth = previewView.width
431
- val viewHeight = previewView.height
432
- if (viewWidth == 0 || viewHeight == 0) {
433
- return
434
- }
435
-
436
- val rotationDegrees = getRotationDegrees()
437
- val viewRect = RectF(0f, 0f, viewWidth.toFloat(), viewHeight.toFloat())
438
- val bufferRect = if (rotationDegrees == 90 || rotationDegrees == 270) {
439
- RectF(0f, 0f, previewSize.height.toFloat(), previewSize.width.toFloat())
440
- } else {
441
- RectF(0f, 0f, previewSize.width.toFloat(), previewSize.height.toFloat())
442
- }
443
- val centerX = viewRect.centerX()
444
- val centerY = viewRect.centerY()
445
-
446
- bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY())
447
-
448
- val matrix = Matrix()
449
- // Map buffer to view, then scale to fill and rotate around the center.
450
- matrix.setRectToRect(bufferRect, viewRect, Matrix.ScaleToFit.FILL)
451
- val scale = kotlin.math.max(
452
- viewWidth.toFloat() / bufferRect.width(),
453
- viewHeight.toFloat() / bufferRect.height()
454
- )
455
- matrix.postScale(scale, scale, centerX, centerY)
456
- if (rotationDegrees != 0) {
457
- matrix.postRotate(rotationDegrees.toFloat(), centerX, centerY)
458
- }
459
-
460
- previewView.setTransform(matrix)
461
- Log.d(
462
- TAG,
463
- "[CAMERA2] transform view=${viewWidth}x${viewHeight} buffer=${previewSize.width}x${previewSize.height} " +
464
- "rotation=$rotationDegrees scale=$scale"
465
- )
466
- }
467
-
468
- private fun ensureMatchParent() {
469
- val parentView = previewView.parent as? android.view.View ?: return
470
- val parentWidth = parentView.width
471
- val parentHeight = parentView.height
472
- if (parentWidth == 0 || parentHeight == 0) {
473
- return
474
- }
475
-
476
- val layoutParams = (previewView.layoutParams as? android.widget.FrameLayout.LayoutParams)
477
- ?: android.widget.FrameLayout.LayoutParams(
478
- android.widget.FrameLayout.LayoutParams.MATCH_PARENT,
479
- android.widget.FrameLayout.LayoutParams.MATCH_PARENT
480
- )
481
- if (layoutParams.width != android.widget.FrameLayout.LayoutParams.MATCH_PARENT ||
482
- layoutParams.height != android.widget.FrameLayout.LayoutParams.MATCH_PARENT
483
- ) {
484
- layoutParams.width = android.widget.FrameLayout.LayoutParams.MATCH_PARENT
485
- layoutParams.height = android.widget.FrameLayout.LayoutParams.MATCH_PARENT
486
- layoutParams.gravity = Gravity.CENTER
487
- previewView.layoutParams = layoutParams
488
- }
489
- Log.d(TAG, "[CAMERA2] parent=${parentWidth}x${parentHeight} previewView=${previewView.width}x${previewView.height}")
490
- }
491
-
492
- private fun handleImage(image: Image) {
493
- try {
494
- val rotationDegrees = getRotationDegrees()
495
- val width = image.width
496
- val height = image.height
497
- val nv21 = imageToNV21(image)
498
-
499
- val frameWidth = if (rotationDegrees == 90 || rotationDegrees == 270) height else width
500
- val frameHeight = if (rotationDegrees == 90 || rotationDegrees == 270) width else height
501
-
502
- synchronized(lastFrameLock) {
503
- lastFrame = LastFrame(nv21, width, height, rotationDegrees, useFrontCamera)
504
- }
505
-
506
- if (detectionEnabled) {
507
- val rectangle = DocumentDetector.detectRectangleInYUV(
508
- nv21,
509
- width,
510
- height,
511
- rotationDegrees
194
+ private inner class DocumentAnalyzer : ImageAnalysis.Analyzer {
195
+ override fun analyze(imageProxy: androidx.camera.core.ImageProxy) {
196
+ try {
197
+ val rotationDegrees = imageProxy.imageInfo.rotationDegrees
198
+ val nv21 = imageProxy.toNv21()
199
+ lastFrame.set(
200
+ LastFrame(
201
+ nv21,
202
+ imageProxy.width,
203
+ imageProxy.height,
204
+ rotationDegrees,
205
+ useFrontCamera
206
+ )
512
207
  )
513
- onFrameAnalyzed?.invoke(rectangle, frameWidth, frameHeight)
514
- } else {
515
- onFrameAnalyzed?.invoke(null, frameWidth, frameHeight)
516
- }
517
- } catch (e: Exception) {
518
- Log.e(TAG, "[CAMERA2] Error analyzing frame", e)
519
- } finally {
520
- image.close()
521
- }
522
- }
523
208
 
524
- private fun imageToNV21(image: Image): ByteArray {
525
- val width = image.width
526
- val height = image.height
527
-
528
- val ySize = width * height
529
- val uvSize = width * height / 2
530
- val nv21 = ByteArray(ySize + uvSize)
531
-
532
- val yBuffer = image.planes[0].buffer
533
- val uBuffer = image.planes[1].buffer
534
- val vBuffer = image.planes[2].buffer
535
-
536
- val yRowStride = image.planes[0].rowStride
537
- val yPixelStride = image.planes[0].pixelStride
538
- var outputOffset = 0
539
- for (row in 0 until height) {
540
- var inputOffset = row * yRowStride
541
- for (col in 0 until width) {
542
- nv21[outputOffset++] = yBuffer.get(inputOffset)
543
- inputOffset += yPixelStride
544
- }
545
- }
209
+ val frameWidth = if (rotationDegrees == 90 || rotationDegrees == 270) {
210
+ imageProxy.height
211
+ } else {
212
+ imageProxy.width
213
+ }
546
214
 
547
- val uvRowStride = image.planes[1].rowStride
548
- val uvPixelStride = image.planes[1].pixelStride
549
- val vRowStride = image.planes[2].rowStride
550
- val vPixelStride = image.planes[2].pixelStride
551
-
552
- val uvHeight = height / 2
553
- val uvWidth = width / 2
554
- for (row in 0 until uvHeight) {
555
- var uInputOffset = row * uvRowStride
556
- var vInputOffset = row * vRowStride
557
- for (col in 0 until uvWidth) {
558
- nv21[outputOffset++] = vBuffer.get(vInputOffset)
559
- nv21[outputOffset++] = uBuffer.get(uInputOffset)
560
- uInputOffset += uvPixelStride
561
- vInputOffset += vPixelStride
215
+ val frameHeight = if (rotationDegrees == 90 || rotationDegrees == 270) {
216
+ imageProxy.width
217
+ } else {
218
+ imageProxy.height
219
+ }
220
+
221
+ if (detectionEnabled) {
222
+ val rectangle = DocumentDetector.detectRectangleInYUV(
223
+ nv21,
224
+ imageProxy.width,
225
+ imageProxy.height,
226
+ rotationDegrees
227
+ )
228
+ onFrameAnalyzed?.invoke(rectangle, frameWidth, frameHeight)
229
+ } else {
230
+ onFrameAnalyzed?.invoke(null, frameWidth, frameHeight)
231
+ }
232
+ } catch (e: Exception) {
233
+ Log.e(TAG, "[CAMERAX] Error analyzing frame", e)
234
+ } finally {
235
+ imageProxy.close()
562
236
  }
563
237
  }
564
-
565
- return nv21
566
238
  }
567
239
 
568
240
  private fun nv21ToJpeg(nv21: ByteArray, width: Int, height: Int, quality: Int): ByteArray {
569
- val yuv = YuvImage(nv21, ImageFormat.NV21, width, height, null)
241
+ val yuv = YuvImage(nv21, android.graphics.ImageFormat.NV21, width, height, null)
570
242
  val out = ByteArrayOutputStream()
571
243
  yuv.compressToJpeg(Rect(0, 0, width, height), quality, out)
572
244
  return out.toByteArray()
@@ -586,141 +258,6 @@ class CameraController(
586
258
  return Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true)
587
259
  }
588
260
 
589
- private fun getRotationDegrees(): Int {
590
- val displayRotation = previewView.display?.rotation ?: Surface.ROTATION_0
591
- val displayDegrees = when (displayRotation) {
592
- Surface.ROTATION_0 -> 0
593
- Surface.ROTATION_90 -> 90
594
- Surface.ROTATION_180 -> 180
595
- Surface.ROTATION_270 -> 270
596
- else -> 0
597
- }
598
-
599
- return if (useFrontCamera) {
600
- (sensorOrientation + displayDegrees) % 360
601
- } else {
602
- (sensorOrientation - displayDegrees + 360) % 360
603
- }
604
- }
605
-
606
- private fun choosePreviewSize(
607
- choices: Array<Size>,
608
- targetRatio: Float?,
609
- sensorRatio: Float?
610
- ): Size? {
611
- if (choices.isEmpty()) {
612
- return null
613
- }
614
- val candidates = choices.toList()
615
-
616
- val ratioBase = sensorRatio ?: targetRatio
617
- if (ratioBase == null) {
618
- return candidates.maxByOrNull { it.width * it.height }
619
- }
620
-
621
- val normalizedTarget = ratioBase
622
- val sorted = candidates.sortedWith(
623
- compareBy<Size> { size ->
624
- val ratio = size.width.toFloat() / size.height.toFloat()
625
- kotlin.math.abs(ratio - normalizedTarget)
626
- }.thenByDescending { size ->
627
- size.width * size.height
628
- }
629
- )
630
- return sorted.first()
631
- }
632
-
633
- private fun chooseAnalysisSize(
634
- choices: Array<Size>,
635
- targetRatio: Float?,
636
- sensorRatio: Float?
637
- ): Size? {
638
- if (choices.isEmpty()) {
639
- return null
640
- }
641
-
642
- val capped = choices.filter { it.width <= MAX_ANALYSIS_WIDTH && it.height <= MAX_ANALYSIS_HEIGHT }
643
- val candidates = if (capped.isNotEmpty()) capped else choices.toList()
644
-
645
- val ratioBase = sensorRatio ?: targetRatio
646
- if (ratioBase == null) {
647
- return candidates.maxByOrNull { it.width * it.height }
648
- }
649
-
650
- val normalizedTarget = ratioBase
651
- val sorted = candidates.sortedWith(
652
- compareBy<Size> { size ->
653
- val ratio = size.width.toFloat() / size.height.toFloat()
654
- kotlin.math.abs(ratio - normalizedTarget)
655
- }.thenByDescending { size ->
656
- size.width * size.height
657
- }
658
- )
659
- return sorted.first()
660
- }
661
-
662
- private fun logSizeCandidates(
663
- label: String,
664
- choices: Array<Size>,
665
- targetRatio: Float?,
666
- sensorRatio: Float?
667
- ) {
668
- if (choices.isEmpty()) {
669
- Log.d(TAG, "[CAMERA2] $label sizes: none")
670
- return
671
- }
672
-
673
- val ratioBase = sensorRatio ?: targetRatio
674
- if (ratioBase == null) {
675
- Log.d(TAG, "[CAMERA2] $label sizes: ${choices.size}, ratioBase=null")
676
- return
677
- }
678
-
679
- val normalizedTarget = ratioBase
680
- val sorted = choices.sortedWith(
681
- compareBy<Size> { size ->
682
- val ratio = size.width.toFloat() / size.height.toFloat()
683
- kotlin.math.abs(ratio - normalizedTarget)
684
- }.thenByDescending { size ->
685
- size.width * size.height
686
- }
687
- )
688
-
689
- val top = sorted.take(5).joinToString { size ->
690
- val ratio = size.width.toFloat() / size.height.toFloat()
691
- val diff = kotlin.math.abs(ratio - normalizedTarget)
692
- "${size.width}x${size.height}(r=${"%.3f".format(ratio)},d=${"%.3f".format(diff)})"
693
- }
694
-
695
- Log.d(
696
- TAG,
697
- "[CAMERA2] $label sizes: ${choices.size}, ratioBase=${"%.3f".format(normalizedTarget)} " +
698
- "sensor=${sensorRatio?.let { "%.3f".format(it) }} target=${targetRatio?.let { "%.3f".format(it) }} top=$top"
699
- )
700
- }
701
-
702
- private fun startBackgroundThread() {
703
- if (backgroundThread != null) {
704
- return
705
- }
706
- backgroundThread = HandlerThread("Camera2Background").also {
707
- it.start()
708
- backgroundHandler = Handler(it.looper)
709
- }
710
- }
711
-
712
- private fun stopBackgroundThread() {
713
- try {
714
- backgroundThread?.quitSafely()
715
- backgroundThread?.join()
716
- } catch (e: InterruptedException) {
717
- Log.w(TAG, "[CAMERA2] Background thread shutdown interrupted", e)
718
- } finally {
719
- backgroundThread = null
720
- backgroundHandler = null
721
- }
722
- }
723
-
724
261
  private fun hasCameraPermission(): Boolean {
725
262
  return ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED
726
263
  }
@@ -8,9 +8,9 @@ import android.graphics.Paint
8
8
  import android.graphics.PorterDuff
9
9
  import android.graphics.PorterDuffXfermode
10
10
  import android.util.Log
11
- import android.view.TextureView
12
11
  import android.view.View
13
12
  import android.widget.FrameLayout
13
+ import androidx.camera.view.PreviewView
14
14
  import androidx.lifecycle.Lifecycle
15
15
  import androidx.lifecycle.LifecycleOwner
16
16
  import androidx.lifecycle.LifecycleRegistry
@@ -25,7 +25,7 @@ import kotlin.math.min
25
25
 
26
26
  class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), LifecycleOwner {
27
27
  private val themedContext = context
28
- private val previewView: TextureView
28
+ private val previewView: PreviewView
29
29
  private val overlayView: OverlayView
30
30
  private var cameraController: CameraController? = null
31
31
  private val lifecycleRegistry = LifecycleRegistry(this)
@@ -73,23 +73,20 @@ class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), L
73
73
  Log.d(TAG, "[INIT] Lifecycle state: ${lifecycleRegistry.currentState}")
74
74
 
75
75
  // Create preview view
76
- Log.d(TAG, "[INIT] Creating TextureView...")
77
- previewView = TextureView(context).apply {
76
+ Log.d(TAG, "[INIT] Creating PreviewView...")
77
+ previewView = PreviewView(context).apply {
78
78
  layoutParams = LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)
79
79
  visibility = View.VISIBLE
80
80
  keepScreenOn = true
81
- // Force view to be drawn
82
- setWillNotDraw(false)
83
- // Ensure the view is on top
84
- bringToFront()
85
- requestLayout()
81
+ implementationMode = PreviewView.ImplementationMode.COMPATIBLE
82
+ scaleType = PreviewView.ScaleType.FILL_CENTER
86
83
  }
87
- Log.d(TAG, "[INIT] TextureView created: $previewView")
88
- Log.d(TAG, "[INIT] TextureView visibility: ${previewView.visibility}")
84
+ Log.d(TAG, "[INIT] PreviewView created: $previewView")
85
+ Log.d(TAG, "[INIT] PreviewView visibility: ${previewView.visibility}")
89
86
 
90
- Log.d(TAG, "[INIT] Adding TextureView to parent...")
87
+ Log.d(TAG, "[INIT] Adding PreviewView to parent...")
91
88
  addView(previewView)
92
- Log.d(TAG, "[INIT] TextureView added, childCount: $childCount")
89
+ Log.d(TAG, "[INIT] PreviewView added, childCount: $childCount")
93
90
 
94
91
  // Create overlay view for drawing rectangle
95
92
  Log.d(TAG, "[INIT] Creating OverlayView...")
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "react-native-rectangle-doc-scanner",
3
- "version": "3.210.0",
3
+ "version": "3.212.0",
4
4
  "description": "Native-backed document scanner for React Native with customizable overlays.",
5
5
  "license": "MIT",
6
6
  "main": "dist/index.js",