react-native-rectangle-doc-scanner 3.229.0 → 3.230.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,112 +6,112 @@ import android.content.pm.PackageManager
6
6
  import android.graphics.Bitmap
7
7
  import android.graphics.BitmapFactory
8
8
  import android.graphics.Matrix
9
- import android.graphics.Rect
10
- import android.graphics.YuvImage
9
+ import android.graphics.SurfaceTexture
10
+ import android.graphics.ImageFormat
11
+ import android.hardware.camera2.CameraCaptureSession
12
+ import android.hardware.camera2.CameraCharacteristics
13
+ import android.hardware.camera2.CameraDevice
14
+ import android.hardware.camera2.CameraManager
15
+ import android.hardware.camera2.CaptureRequest
16
+ import android.media.Image
17
+ import android.media.ImageReader
18
+ import android.os.Handler
19
+ import android.os.HandlerThread
11
20
  import android.util.Log
12
21
  import android.util.Size
13
22
  import android.view.Surface
14
- import androidx.camera.core.Camera
15
- import androidx.camera.core.CameraSelector
16
- import androidx.camera.core.ImageAnalysis
17
- import androidx.camera.core.ImageCapture
18
- import androidx.camera.core.ImageCaptureException
19
- import androidx.camera.core.Preview
20
- import androidx.camera.lifecycle.ProcessCameraProvider
21
- import androidx.camera.view.PreviewView
23
+ import android.view.TextureView
22
24
  import androidx.core.content.ContextCompat
23
- import androidx.lifecycle.LifecycleOwner
24
- import com.google.common.util.concurrent.ListenableFuture
25
- import java.io.ByteArrayOutputStream
26
25
  import java.io.File
27
26
  import java.io.FileOutputStream
28
- import java.util.concurrent.ExecutorService
29
- import java.util.concurrent.Executors
30
27
  import java.util.concurrent.atomic.AtomicReference
28
+ import kotlin.math.abs
29
+ import kotlin.math.max
31
30
 
32
31
  class CameraController(
33
32
  private val context: Context,
34
- private val lifecycleOwner: LifecycleOwner,
35
- private val previewView: PreviewView
33
+ private val lifecycleOwner: androidx.lifecycle.LifecycleOwner,
34
+ private val previewView: TextureView
36
35
  ) {
37
- private var cameraProviderFuture: ListenableFuture<ProcessCameraProvider>? = null
38
- private var cameraProvider: ProcessCameraProvider? = null
39
- private var preview: Preview? = null
40
- private var imageAnalysis: ImageAnalysis? = null
41
- private var imageCapture: ImageCapture? = null
42
- private var camera: Camera? = null
43
- private val cameraExecutor: ExecutorService = Executors.newSingleThreadExecutor()
44
- private val lastFrame = AtomicReference<LastFrame?>()
45
- private var analysisBound = false
46
- private var pendingBindAttempts = 0
47
- private var triedLowResFallback = false
48
- private val streamCheckHandler = android.os.Handler(android.os.Looper.getMainLooper())
49
- private var streamCheckRunnable: Runnable? = null
36
+ private val cameraManager = context.getSystemService(Context.CAMERA_SERVICE) as CameraManager
37
+ private var cameraDevice: CameraDevice? = null
38
+ private var captureSession: CameraCaptureSession? = null
39
+ private var previewRequestBuilder: CaptureRequest.Builder? = null
40
+
41
+ private var previewSize: Size? = null
42
+ private var analysisSize: Size? = null
43
+ private var captureSize: Size? = null
44
+
45
+ private var yuvReader: ImageReader? = null
46
+ private var jpegReader: ImageReader? = null
47
+
48
+ private val cameraThread = HandlerThread("Camera2Thread").apply { start() }
49
+ private val cameraHandler = Handler(cameraThread.looper)
50
+ private val analysisThread = HandlerThread("Camera2Analysis").apply { start() }
51
+ private val analysisHandler = Handler(analysisThread.looper)
50
52
 
51
53
  private var useFrontCamera = false
52
54
  private var detectionEnabled = true
55
+ private var torchEnabled = false
53
56
 
54
- // For periodic frame capture
55
- private var isAnalysisActive = false
56
- private val analysisHandler = android.os.Handler(android.os.Looper.getMainLooper())
57
- private val analysisRunnable = object : Runnable {
58
- override fun run() {
59
- if (isAnalysisActive && onFrameAnalyzed != null) {
60
- captureFrameForAnalysis()
61
- analysisHandler.postDelayed(this, 200) // Capture every 200ms
62
- }
63
- }
64
- }
57
+ private val pendingCapture = AtomicReference<PendingCapture?>()
65
58
 
66
59
  var onFrameAnalyzed: ((Rectangle?, Int, Int) -> Unit)? = null
67
60
 
68
61
  companion object {
69
62
  private const val TAG = "CameraController"
63
+ private const val ANALYSIS_MAX_AREA = 1920 * 1080
64
+ private const val ANALYSIS_ASPECT_TOLERANCE = 0.15
70
65
  }
71
66
 
72
- private data class LastFrame(
73
- val nv21: ByteArray,
74
- val width: Int,
75
- val height: Int,
76
- val rotationDegrees: Int,
77
- val isFront: Boolean
67
+ private data class PendingCapture(
68
+ val outputDirectory: File,
69
+ val onImageCaptured: (File) -> Unit,
70
+ val onError: (Exception) -> Unit
78
71
  )
79
72
 
73
+ private val textureListener = object : TextureView.SurfaceTextureListener {
74
+ override fun onSurfaceTextureAvailable(surface: SurfaceTexture, width: Int, height: Int) {
75
+ openCamera()
76
+ }
77
+
78
+ override fun onSurfaceTextureSizeChanged(surface: SurfaceTexture, width: Int, height: Int) {
79
+ configureTransform()
80
+ }
81
+
82
+ override fun onSurfaceTextureDestroyed(surface: SurfaceTexture): Boolean {
83
+ return true
84
+ }
85
+
86
+ override fun onSurfaceTextureUpdated(surface: SurfaceTexture) {
87
+ // no-op
88
+ }
89
+ }
90
+
80
91
  fun startCamera(
81
92
  useFrontCam: Boolean = false,
82
93
  enableDetection: Boolean = true
83
94
  ) {
84
- Log.d(TAG, "[CAMERAX-V6] startCamera called")
95
+ Log.d(TAG, "[CAMERA2] startCamera called")
85
96
  this.useFrontCamera = useFrontCam
86
97
  this.detectionEnabled = enableDetection
87
- triedLowResFallback = false
88
98
 
89
99
  if (!hasCameraPermission()) {
90
- Log.e(TAG, "[CAMERAX-V6] Camera permission not granted")
100
+ Log.e(TAG, "[CAMERA2] Camera permission not granted")
91
101
  return
92
102
  }
93
103
 
94
- if (cameraProviderFuture == null) {
95
- cameraProviderFuture = ProcessCameraProvider.getInstance(context)
104
+ if (previewView.isAvailable) {
105
+ openCamera()
106
+ } else {
107
+ previewView.surfaceTextureListener = textureListener
96
108
  }
97
-
98
- cameraProviderFuture?.addListener({
99
- try {
100
- cameraProvider = cameraProviderFuture?.get()
101
- bindCameraUseCases()
102
- } catch (e: Exception) {
103
- Log.e(TAG, "[CAMERAX-V6] Failed to get camera provider", e)
104
- }
105
- }, ContextCompat.getMainExecutor(context))
106
109
  }
107
110
 
108
111
  fun stopCamera() {
109
- Log.d(TAG, "[CAMERAX-V6] stopCamera called")
110
- isAnalysisActive = false
111
- analysisHandler.removeCallbacks(analysisRunnable)
112
- streamCheckRunnable?.let { streamCheckHandler.removeCallbacks(it) }
113
- cameraProvider?.unbindAll()
114
- analysisBound = false
112
+ Log.d(TAG, "[CAMERA2] stopCamera called")
113
+ previewView.surfaceTextureListener = null
114
+ closeSession()
115
115
  }
116
116
 
117
117
  fun capturePhoto(
@@ -119,195 +119,337 @@ class CameraController(
119
119
  onImageCaptured: (File) -> Unit,
120
120
  onError: (Exception) -> Unit
121
121
  ) {
122
- val frame = lastFrame.get()
123
- if (frame == null) {
124
- onError(IllegalStateException("No frame available for capture"))
122
+ val device = cameraDevice
123
+ val session = captureSession
124
+ val reader = jpegReader
125
+ if (device == null || session == null || reader == null) {
126
+ onError(IllegalStateException("Camera not ready for capture"))
125
127
  return
126
128
  }
127
129
 
128
- cameraExecutor.execute {
129
- try {
130
- val photoFile = File(outputDirectory, "doc_scan_${System.currentTimeMillis()}.jpg")
131
- val jpegBytes = nv21ToJpeg(frame.nv21, frame.width, frame.height, 95)
132
- val bitmap = BitmapFactory.decodeByteArray(jpegBytes, 0, jpegBytes.size)
133
- ?: throw IllegalStateException("Failed to decode JPEG")
130
+ if (!pendingCapture.compareAndSet(null, PendingCapture(outputDirectory, onImageCaptured, onError))) {
131
+ onError(IllegalStateException("Capture already in progress"))
132
+ return
133
+ }
134
134
 
135
- val rotated = rotateAndMirror(bitmap, frame.rotationDegrees, frame.isFront)
136
- FileOutputStream(photoFile).use { out ->
137
- rotated.compress(Bitmap.CompressFormat.JPEG, 95, out)
138
- }
139
- if (rotated != bitmap) {
140
- rotated.recycle()
135
+ try {
136
+ val requestBuilder = device.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE).apply {
137
+ addTarget(reader.surface)
138
+ set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)
139
+ set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON)
140
+ if (torchEnabled) {
141
+ set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH)
141
142
  }
142
- bitmap.recycle()
143
-
144
- Log.d(TAG, "[CAMERAX-V6] Photo capture succeeded: ${photoFile.absolutePath}")
145
- onImageCaptured(photoFile)
146
- } catch (e: Exception) {
147
- Log.e(TAG, "[CAMERAX-V6] Photo capture failed", e)
148
- onError(e)
143
+ set(CaptureRequest.JPEG_ORIENTATION, 0)
149
144
  }
145
+
146
+ session.capture(requestBuilder.build(), object : CameraCaptureSession.CaptureCallback() {}, cameraHandler)
147
+ } catch (e: Exception) {
148
+ pendingCapture.getAndSet(null)?.onError?.invoke(e)
150
149
  }
151
150
  }
152
151
 
153
152
  fun setTorchEnabled(enabled: Boolean) {
154
- camera?.cameraControl?.enableTorch(enabled)
153
+ torchEnabled = enabled
154
+ updateRepeatingRequest()
155
155
  }
156
156
 
157
157
  fun switchCamera() {
158
158
  useFrontCamera = !useFrontCamera
159
- bindCameraUseCases()
159
+ closeSession()
160
+ openCamera()
160
161
  }
161
162
 
162
163
  fun isTorchAvailable(): Boolean {
163
- return camera?.cameraInfo?.hasFlashUnit() == true
164
+ return try {
165
+ val cameraId = selectCameraId() ?: return false
166
+ val characteristics = cameraManager.getCameraCharacteristics(cameraId)
167
+ characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) == true
168
+ } catch (e: Exception) {
169
+ false
170
+ }
164
171
  }
165
172
 
166
173
  fun focusAt(x: Float, y: Float) {
167
- // No-op for now.
174
+ // Optional: implement touch-to-focus if needed.
168
175
  }
169
176
 
170
177
  fun shutdown() {
171
178
  stopCamera()
172
- cameraExecutor.shutdown()
179
+ cameraThread.quitSafely()
180
+ analysisThread.quitSafely()
173
181
  }
174
182
 
175
- private fun bindCameraUseCases(useLowRes: Boolean = false) {
176
- if (!previewView.isAttachedToWindow || previewView.width == 0 || previewView.height == 0) {
177
- if (pendingBindAttempts < 5) {
178
- pendingBindAttempts++
179
- Log.d(TAG, "[CAMERAX-V9] PreviewView not ready (attached=${previewView.isAttachedToWindow}, w=${previewView.width}, h=${previewView.height}), retrying...")
180
- previewView.post { bindCameraUseCases() }
183
+ private fun openCamera() {
184
+ if (cameraDevice != null) {
185
+ return
186
+ }
187
+ val cameraId = selectCameraId() ?: return
188
+ try {
189
+ val characteristics = cameraManager.getCameraCharacteristics(cameraId)
190
+ val streamConfigMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
191
+ ?: return
192
+
193
+ val viewAspect = if (previewView.height == 0) {
194
+ 1.0
181
195
  } else {
182
- Log.w(TAG, "[CAMERAX-V9] PreviewView still not ready after retries, aborting bind")
196
+ previewView.width.toDouble() / previewView.height.toDouble()
183
197
  }
184
- return
198
+
199
+ val previewSizes = streamConfigMap.getOutputSizes(SurfaceTexture::class.java)
200
+ previewSize = chooseBestSize(previewSizes, viewAspect, null)
201
+
202
+ val analysisSizes = streamConfigMap.getOutputSizes(ImageFormat.YUV_420_888)
203
+ analysisSize = chooseBestSize(analysisSizes, viewAspect, ANALYSIS_MAX_AREA)
204
+
205
+ val captureSizes = streamConfigMap.getOutputSizes(ImageFormat.JPEG)
206
+ captureSize = captureSizes?.maxByOrNull { it.width * it.height }
207
+
208
+ setupImageReaders()
209
+
210
+ if (ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
211
+ Log.e(TAG, "[CAMERA2] Camera permission not granted")
212
+ return
213
+ }
214
+
215
+ cameraManager.openCamera(cameraId, object : CameraDevice.StateCallback() {
216
+ override fun onOpened(camera: CameraDevice) {
217
+ cameraDevice = camera
218
+ createCaptureSession()
219
+ }
220
+
221
+ override fun onDisconnected(camera: CameraDevice) {
222
+ camera.close()
223
+ cameraDevice = null
224
+ }
225
+
226
+ override fun onError(camera: CameraDevice, error: Int) {
227
+ Log.e(TAG, "[CAMERA2] CameraDevice error: $error")
228
+ camera.close()
229
+ cameraDevice = null
230
+ }
231
+ }, cameraHandler)
232
+ } catch (e: Exception) {
233
+ Log.e(TAG, "[CAMERA2] Failed to open camera", e)
185
234
  }
186
- pendingBindAttempts = 0
235
+ }
187
236
 
188
- val provider = cameraProvider ?: return
189
- provider.unbindAll()
190
- analysisBound = false
191
- isAnalysisActive = false
237
+ private fun setupImageReaders() {
238
+ val analysis = analysisSize
239
+ val capture = captureSize
192
240
 
193
- val rotation = previewView.display?.rotation ?: Surface.ROTATION_0
241
+ yuvReader?.close()
242
+ jpegReader?.close()
194
243
 
195
- // Build Preview; fall back to a low-res stream if the default config stalls.
196
- val previewBuilder = Preview.Builder()
197
- .setTargetRotation(rotation)
198
- if (useLowRes) {
199
- previewBuilder.setTargetResolution(Size(640, 480))
244
+ if (analysis != null) {
245
+ yuvReader = ImageReader.newInstance(analysis.width, analysis.height, ImageFormat.YUV_420_888, 2).apply {
246
+ setOnImageAvailableListener({ reader ->
247
+ if (!detectionEnabled || onFrameAnalyzed == null) {
248
+ reader.acquireLatestImage()?.close()
249
+ return@setOnImageAvailableListener
250
+ }
251
+ val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener
252
+ analysisHandler.post { analyzeImage(image) }
253
+ }, cameraHandler)
254
+ }
200
255
  }
201
- preview = previewBuilder.build().also {
202
- // IMPORTANT: Set surface provider BEFORE binding
203
- it.setSurfaceProvider(previewView.surfaceProvider)
256
+
257
+ if (capture != null) {
258
+ jpegReader = ImageReader.newInstance(capture.width, capture.height, ImageFormat.JPEG, 2).apply {
259
+ setOnImageAvailableListener({ reader ->
260
+ val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener
261
+ val pending = pendingCapture.getAndSet(null)
262
+ if (pending == null) {
263
+ image.close()
264
+ return@setOnImageAvailableListener
265
+ }
266
+ analysisHandler.post { processCapture(image, pending) }
267
+ }, cameraHandler)
268
+ }
204
269
  }
270
+ }
205
271
 
206
- val cameraSelector = if (useFrontCamera) {
207
- CameraSelector.DEFAULT_FRONT_CAMERA
208
- } else {
209
- CameraSelector.DEFAULT_BACK_CAMERA
272
+ private fun createCaptureSession() {
273
+ val device = cameraDevice ?: return
274
+ val surfaceTexture = previewView.surfaceTexture ?: return
275
+ val preview = previewSize ?: return
276
+
277
+ surfaceTexture.setDefaultBufferSize(preview.width, preview.height)
278
+ val previewSurface = Surface(surfaceTexture)
279
+
280
+ val targets = mutableListOf(previewSurface)
281
+ yuvReader?.surface?.let { targets.add(it) }
282
+ jpegReader?.surface?.let { targets.add(it) }
283
+
284
+ try {
285
+ device.createCaptureSession(targets, object : CameraCaptureSession.StateCallback() {
286
+ override fun onConfigured(session: CameraCaptureSession) {
287
+ captureSession = session
288
+ configureTransform()
289
+ startRepeating(previewSurface)
290
+ }
291
+
292
+ override fun onConfigureFailed(session: CameraCaptureSession) {
293
+ Log.e(TAG, "[CAMERA2] Failed to configure capture session")
294
+ }
295
+ }, cameraHandler)
296
+ } catch (e: Exception) {
297
+ Log.e(TAG, "[CAMERA2] Failed to create capture session", e)
210
298
  }
299
+ }
211
300
 
212
- // Bind Preview ONLY first
301
+ private fun startRepeating(previewSurface: Surface) {
302
+ val device = cameraDevice ?: return
213
303
  try {
214
- camera = provider.bindToLifecycle(
215
- lifecycleOwner,
216
- cameraSelector,
217
- preview
218
- )
219
-
220
- if (useLowRes) {
221
- Log.d(TAG, "[CAMERAX-V9] Preview bound with low-res fallback (640x480)")
222
- } else {
223
- Log.d(TAG, "[CAMERAX-V9] Preview bound, waiting for capture session to configure...")
304
+ previewRequestBuilder = device.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply {
305
+ addTarget(previewSurface)
306
+ yuvReader?.surface?.let { addTarget(it) }
307
+ set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)
308
+ set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON)
309
+ if (torchEnabled) {
310
+ set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH)
311
+ }
224
312
  }
313
+ captureSession?.setRepeatingRequest(previewRequestBuilder?.build() ?: return, null, cameraHandler)
314
+ } catch (e: Exception) {
315
+ Log.e(TAG, "[CAMERA2] Failed to start repeating request", e)
316
+ }
317
+ }
225
318
 
226
- // Log session state after some time
227
- android.os.Handler(android.os.Looper.getMainLooper()).postDelayed({
228
- Log.d(TAG, "[CAMERAX-V9] Camera state check - preview should be working now")
229
- }, 6000)
319
+ private fun updateRepeatingRequest() {
320
+ val builder = previewRequestBuilder ?: return
321
+ builder.set(CaptureRequest.FLASH_MODE, if (torchEnabled) CaptureRequest.FLASH_MODE_TORCH else CaptureRequest.FLASH_MODE_OFF)
322
+ try {
323
+ captureSession?.setRepeatingRequest(builder.build(), null, cameraHandler)
324
+ } catch (e: Exception) {
325
+ Log.e(TAG, "[CAMERA2] Failed to update torch state", e)
326
+ }
327
+ }
230
328
 
231
- scheduleStreamCheck(useLowRes)
329
+ private fun analyzeImage(image: Image) {
330
+ try {
331
+ val nv21 = image.toNv21()
332
+ val rotationDegrees = computeRotationDegrees()
333
+ val rectangle = DocumentDetector.detectRectangleInYUV(nv21, image.width, image.height, rotationDegrees)
334
+
335
+ val frameWidth = if (rotationDegrees == 90 || rotationDegrees == 270) image.height else image.width
336
+ val frameHeight = if (rotationDegrees == 90 || rotationDegrees == 270) image.width else image.height
337
+
338
+ onFrameAnalyzed?.invoke(rectangle, frameWidth, frameHeight)
232
339
  } catch (e: Exception) {
233
- Log.e(TAG, "[CAMERAX-V8] Failed to bind preview", e)
340
+ Log.e(TAG, "[CAMERA2] Error analyzing frame", e)
341
+ } finally {
342
+ image.close()
234
343
  }
235
344
  }
236
345
 
237
- private fun scheduleStreamCheck(usingLowRes: Boolean) {
238
- streamCheckRunnable?.let { streamCheckHandler.removeCallbacks(it) }
239
- streamCheckRunnable = Runnable {
240
- val state = previewView.previewStreamState.value
241
- val streaming = state == PreviewView.StreamState.STREAMING
242
- if (!streaming && !usingLowRes && !triedLowResFallback) {
243
- triedLowResFallback = true
244
- Log.w(TAG, "[CAMERAX-V9] Preview not streaming; retrying with low-res fallback")
245
- bindCameraUseCases(useLowRes = true)
246
- } else if (!streaming) {
247
- Log.w(TAG, "[CAMERAX-V9] Preview still not streaming after fallback")
346
+ private fun processCapture(image: Image, pending: PendingCapture) {
347
+ try {
348
+ val buffer = image.planes[0].buffer
349
+ val bytes = ByteArray(buffer.remaining())
350
+ buffer.get(bytes)
351
+ val bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.size)
352
+ ?: throw IllegalStateException("Failed to decode JPEG")
353
+
354
+ val rotated = rotateAndMirror(bitmap, computeRotationDegrees(), useFrontCamera)
355
+ val photoFile = File(pending.outputDirectory, "doc_scan_${System.currentTimeMillis()}.jpg")
356
+ FileOutputStream(photoFile).use { out ->
357
+ rotated.compress(Bitmap.CompressFormat.JPEG, 95, out)
358
+ }
359
+
360
+ if (rotated != bitmap) {
361
+ rotated.recycle()
248
362
  }
363
+ bitmap.recycle()
364
+
365
+ pending.onImageCaptured(photoFile)
366
+ } catch (e: Exception) {
367
+ pending.onError(e)
368
+ } finally {
369
+ image.close()
249
370
  }
250
- streamCheckHandler.postDelayed(streamCheckRunnable!!, 6500)
251
371
  }
252
372
 
253
- // Function removed - this device cannot handle ImageCapture + Preview simultaneously
254
-
255
- private fun captureFrameForAnalysis() {
256
- val capture = imageCapture ?: return
257
-
258
- capture.takePicture(cameraExecutor, object : ImageCapture.OnImageCapturedCallback() {
259
- override fun onCaptureSuccess(image: androidx.camera.core.ImageProxy) {
260
- try {
261
- val rotationDegrees = image.imageInfo.rotationDegrees
262
- val nv21 = image.toNv21()
263
-
264
- lastFrame.set(
265
- LastFrame(
266
- nv21,
267
- image.width,
268
- image.height,
269
- rotationDegrees,
270
- useFrontCamera
271
- )
272
- )
273
-
274
- val frameWidth = if (rotationDegrees == 90 || rotationDegrees == 270) {
275
- image.height
276
- } else {
277
- image.width
278
- }
373
+ private fun closeSession() {
374
+ try {
375
+ captureSession?.close()
376
+ captureSession = null
377
+ cameraDevice?.close()
378
+ cameraDevice = null
379
+ } catch (e: Exception) {
380
+ Log.e(TAG, "[CAMERA2] Error closing camera", e)
381
+ } finally {
382
+ yuvReader?.close()
383
+ jpegReader?.close()
384
+ yuvReader = null
385
+ jpegReader = null
386
+ previewRequestBuilder = null
387
+ }
388
+ }
279
389
 
280
- val frameHeight = if (rotationDegrees == 90 || rotationDegrees == 270) {
281
- image.width
282
- } else {
283
- image.height
284
- }
390
+ private fun computeRotationDegrees(): Int {
391
+ val cameraId = selectCameraId() ?: return 0
392
+ val characteristics = cameraManager.getCameraCharacteristics(cameraId)
393
+ val sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION) ?: 0
394
+ val displayRotation = displayRotationDegrees()
395
+ return if (useFrontCamera) {
396
+ (sensorOrientation + displayRotation) % 360
397
+ } else {
398
+ (sensorOrientation - displayRotation + 360) % 360
399
+ }
400
+ }
285
401
 
286
- val rectangle = DocumentDetector.detectRectangleInYUV(
287
- nv21,
288
- image.width,
289
- image.height,
290
- rotationDegrees
291
- )
292
- onFrameAnalyzed?.invoke(rectangle, frameWidth, frameHeight)
293
- } catch (e: Exception) {
294
- Log.e(TAG, "[CAMERAX-V6] Error analyzing frame", e)
295
- } finally {
296
- image.close()
297
- }
298
- }
402
+ private fun displayRotationDegrees(): Int {
403
+ val rotation = previewView.display?.rotation ?: Surface.ROTATION_0
404
+ return when (rotation) {
405
+ Surface.ROTATION_0 -> 0
406
+ Surface.ROTATION_90 -> 90
407
+ Surface.ROTATION_180 -> 180
408
+ Surface.ROTATION_270 -> 270
409
+ else -> 0
410
+ }
411
+ }
299
412
 
300
- override fun onError(exception: ImageCaptureException) {
301
- Log.e(TAG, "[CAMERAX-V6] Frame capture for analysis failed", exception)
302
- }
303
- })
413
+ private fun configureTransform() {
414
+ val viewWidth = previewView.width.toFloat()
415
+ val viewHeight = previewView.height.toFloat()
416
+ val preview = previewSize ?: return
417
+ if (viewWidth == 0f || viewHeight == 0f) return
418
+
419
+ val rotation = displayRotationDegrees()
420
+ val bufferWidth = if (rotation == 90 || rotation == 270) preview.height.toFloat() else preview.width.toFloat()
421
+ val bufferHeight = if (rotation == 90 || rotation == 270) preview.width.toFloat() else preview.height.toFloat()
422
+
423
+ val scale = max(viewWidth / bufferWidth, viewHeight / bufferHeight)
424
+ val matrix = Matrix()
425
+ val centerX = viewWidth / 2f
426
+ val centerY = viewHeight / 2f
427
+
428
+ matrix.setScale(scale, scale, centerX, centerY)
429
+ matrix.postRotate(rotation.toFloat(), centerX, centerY)
430
+ previewView.setTransform(matrix)
304
431
  }
305
432
 
306
- private fun nv21ToJpeg(nv21: ByteArray, width: Int, height: Int, quality: Int): ByteArray {
307
- val yuv = YuvImage(nv21, android.graphics.ImageFormat.NV21, width, height, null)
308
- val out = ByteArrayOutputStream()
309
- yuv.compressToJpeg(Rect(0, 0, width, height), quality, out)
310
- return out.toByteArray()
433
+ private fun chooseBestSize(sizes: Array<Size>?, targetAspect: Double, maxArea: Int?): Size? {
434
+ if (sizes == null || sizes.isEmpty()) return null
435
+ val sorted = sizes.sortedByDescending { it.width * it.height }
436
+
437
+ val matching = sorted.filter {
438
+ val aspect = it.width.toDouble() / it.height.toDouble()
439
+ abs(aspect - targetAspect) <= ANALYSIS_ASPECT_TOLERANCE && (maxArea == null || it.width * it.height <= maxArea)
440
+ }
441
+
442
+ if (matching.isNotEmpty()) {
443
+ return matching.first()
444
+ }
445
+
446
+ val capped = if (maxArea != null) {
447
+ sorted.filter { it.width * it.height <= maxArea }
448
+ } else {
449
+ sorted
450
+ }
451
+
452
+ return capped.firstOrNull() ?: sorted.first()
311
453
  }
312
454
 
313
455
  private fun rotateAndMirror(bitmap: Bitmap, rotationDegrees: Int, mirror: Boolean): Bitmap {
@@ -324,7 +466,66 @@ class CameraController(
324
466
  return Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true)
325
467
  }
326
468
 
469
+ private fun Image.toNv21(): ByteArray {
470
+ val width = width
471
+ val height = height
472
+ val ySize = width * height
473
+ val uvSize = width * height / 2
474
+ val nv21 = ByteArray(ySize + uvSize)
475
+
476
+ val yBuffer = planes[0].buffer
477
+ val uBuffer = planes[1].buffer
478
+ val vBuffer = planes[2].buffer
479
+
480
+ val yRowStride = planes[0].rowStride
481
+ val yPixelStride = planes[0].pixelStride
482
+ var outputOffset = 0
483
+ for (row in 0 until height) {
484
+ var inputOffset = row * yRowStride
485
+ for (col in 0 until width) {
486
+ nv21[outputOffset++] = yBuffer.get(inputOffset)
487
+ inputOffset += yPixelStride
488
+ }
489
+ }
490
+
491
+ val uvRowStride = planes[1].rowStride
492
+ val uvPixelStride = planes[1].pixelStride
493
+ val vRowStride = planes[2].rowStride
494
+ val vPixelStride = planes[2].pixelStride
495
+ val uvHeight = height / 2
496
+ val uvWidth = width / 2
497
+ for (row in 0 until uvHeight) {
498
+ var uInputOffset = row * uvRowStride
499
+ var vInputOffset = row * vRowStride
500
+ for (col in 0 until uvWidth) {
501
+ nv21[outputOffset++] = vBuffer.get(vInputOffset)
502
+ nv21[outputOffset++] = uBuffer.get(uInputOffset)
503
+ uInputOffset += uvPixelStride
504
+ vInputOffset += vPixelStride
505
+ }
506
+ }
507
+
508
+ return nv21
509
+ }
510
+
327
511
  private fun hasCameraPermission(): Boolean {
328
512
  return ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED
329
513
  }
514
+
515
+ private fun selectCameraId(): String? {
516
+ return try {
517
+ val desiredFacing = if (useFrontCamera) {
518
+ CameraCharacteristics.LENS_FACING_FRONT
519
+ } else {
520
+ CameraCharacteristics.LENS_FACING_BACK
521
+ }
522
+ cameraManager.cameraIdList.firstOrNull { id ->
523
+ val characteristics = cameraManager.getCameraCharacteristics(id)
524
+ characteristics.get(CameraCharacteristics.LENS_FACING) == desiredFacing
525
+ } ?: cameraManager.cameraIdList.firstOrNull()
526
+ } catch (e: Exception) {
527
+ Log.e(TAG, "[CAMERA2] Failed to select camera", e)
528
+ null
529
+ }
530
+ }
330
531
  }
@@ -8,9 +8,9 @@ import android.graphics.Paint
8
8
  import android.graphics.PorterDuff
9
9
  import android.graphics.PorterDuffXfermode
10
10
  import android.util.Log
11
+ import android.view.TextureView
11
12
  import android.view.View
12
13
  import android.widget.FrameLayout
13
- import androidx.camera.view.PreviewView
14
14
  import androidx.lifecycle.Lifecycle
15
15
  import androidx.lifecycle.LifecycleOwner
16
16
  import androidx.lifecycle.LifecycleRegistry
@@ -25,7 +25,7 @@ import kotlin.math.min
25
25
 
26
26
  class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), LifecycleOwner {
27
27
  private val themedContext = context
28
- private val previewView: PreviewView
28
+ private val previewView: TextureView
29
29
  private val overlayView: OverlayView
30
30
  private var cameraController: CameraController? = null
31
31
  private val lifecycleRegistry = LifecycleRegistry(this)
@@ -74,13 +74,10 @@ class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), L
74
74
 
75
75
  // Create preview view
76
76
  Log.d(TAG, "[INIT] Creating PreviewView...")
77
- previewView = PreviewView(context).apply {
77
+ previewView = TextureView(context).apply {
78
78
  layoutParams = LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)
79
79
  visibility = View.VISIBLE
80
80
  keepScreenOn = true
81
- // TextureView mode avoids some device-specific Camera2 session timeouts.
82
- implementationMode = PreviewView.ImplementationMode.COMPATIBLE
83
- scaleType = PreviewView.ScaleType.FILL_CENTER
84
81
  }
85
82
  Log.d(TAG, "[INIT] PreviewView created: $previewView")
86
83
  Log.d(TAG, "[INIT] PreviewView visibility: ${previewView.visibility}")
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "react-native-rectangle-doc-scanner",
3
- "version": "3.229.0",
3
+ "version": "3.230.0",
4
4
  "description": "Native-backed document scanner for React Native with customizable overlays.",
5
5
  "license": "MIT",
6
6
  "main": "dist/index.js",