react-native-rectangle-doc-scanner 3.210.0 → 3.211.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -3,164 +3,79 @@ package com.reactnativerectangledocscanner
|
|
|
3
3
|
import android.Manifest
|
|
4
4
|
import android.content.Context
|
|
5
5
|
import android.content.pm.PackageManager
|
|
6
|
-
import android.graphics.Bitmap
|
|
7
|
-
import android.graphics.BitmapFactory
|
|
8
|
-
import android.graphics.ImageFormat
|
|
9
|
-
import android.graphics.Matrix
|
|
10
|
-
import android.graphics.Rect
|
|
11
|
-
import android.graphics.RectF
|
|
12
|
-
import android.graphics.SurfaceTexture
|
|
13
|
-
import android.graphics.YuvImage
|
|
14
|
-
import android.hardware.camera2.CameraCaptureSession
|
|
15
|
-
import android.hardware.camera2.CameraCharacteristics
|
|
16
|
-
import android.hardware.camera2.CameraDevice
|
|
17
|
-
import android.hardware.camera2.CameraManager
|
|
18
|
-
import android.hardware.camera2.CaptureRequest
|
|
19
|
-
import android.media.Image
|
|
20
|
-
import android.media.ImageReader
|
|
21
|
-
import android.os.Handler
|
|
22
|
-
import android.os.HandlerThread
|
|
23
6
|
import android.util.Log
|
|
24
7
|
import android.util.Size
|
|
25
|
-
import android.view.Gravity
|
|
26
8
|
import android.view.Surface
|
|
27
|
-
import
|
|
9
|
+
import androidx.camera.core.AspectRatio
|
|
10
|
+
import androidx.camera.core.Camera
|
|
11
|
+
import androidx.camera.core.CameraSelector
|
|
12
|
+
import androidx.camera.core.ImageAnalysis
|
|
13
|
+
import androidx.camera.core.ImageCapture
|
|
14
|
+
import androidx.camera.core.ImageCaptureException
|
|
15
|
+
import androidx.camera.core.Preview
|
|
16
|
+
import androidx.camera.lifecycle.ProcessCameraProvider
|
|
17
|
+
import androidx.camera.view.PreviewView
|
|
28
18
|
import androidx.core.content.ContextCompat
|
|
29
19
|
import androidx.lifecycle.LifecycleOwner
|
|
30
|
-
import
|
|
20
|
+
import com.google.common.util.concurrent.ListenableFuture
|
|
31
21
|
import java.io.File
|
|
32
|
-
import java.
|
|
33
|
-
import java.util.concurrent.
|
|
22
|
+
import java.util.concurrent.ExecutorService
|
|
23
|
+
import java.util.concurrent.Executors
|
|
34
24
|
|
|
35
25
|
class CameraController(
|
|
36
26
|
private val context: Context,
|
|
37
27
|
private val lifecycleOwner: LifecycleOwner,
|
|
38
|
-
private val previewView:
|
|
28
|
+
private val previewView: PreviewView
|
|
39
29
|
) {
|
|
40
|
-
private
|
|
41
|
-
private var
|
|
42
|
-
private var
|
|
43
|
-
private var
|
|
44
|
-
private var
|
|
45
|
-
private var
|
|
46
|
-
private
|
|
47
|
-
private var previewLayoutListener: android.view.View.OnLayoutChangeListener? = null
|
|
30
|
+
private var cameraProviderFuture: ListenableFuture<ProcessCameraProvider>? = null
|
|
31
|
+
private var cameraProvider: ProcessCameraProvider? = null
|
|
32
|
+
private var preview: Preview? = null
|
|
33
|
+
private var imageAnalysis: ImageAnalysis? = null
|
|
34
|
+
private var imageCapture: ImageCapture? = null
|
|
35
|
+
private var camera: Camera? = null
|
|
36
|
+
private val cameraExecutor: ExecutorService = Executors.newSingleThreadExecutor()
|
|
48
37
|
|
|
49
|
-
private var cameraId: String? = null
|
|
50
|
-
private var sensorOrientation: Int = 0
|
|
51
|
-
private var sensorAspectRatio: Float? = null
|
|
52
|
-
private var previewSize: Size? = null
|
|
53
|
-
private var analysisSize: Size? = null
|
|
54
|
-
private var previewChoices: Array<Size> = emptyArray()
|
|
55
|
-
private var analysisChoices: Array<Size> = emptyArray()
|
|
56
38
|
private var useFrontCamera = false
|
|
57
|
-
private var torchEnabled = false
|
|
58
39
|
private var detectionEnabled = true
|
|
59
|
-
private var hasStarted = false
|
|
60
|
-
|
|
61
|
-
private val isOpening = AtomicBoolean(false)
|
|
62
|
-
private val lastFrameLock = Any()
|
|
63
|
-
private var lastFrame: LastFrame? = null
|
|
64
40
|
|
|
65
41
|
var onFrameAnalyzed: ((Rectangle?, Int, Int) -> Unit)? = null
|
|
66
42
|
|
|
67
43
|
companion object {
|
|
68
44
|
private const val TAG = "CameraController"
|
|
69
|
-
private const val
|
|
70
|
-
private const val
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
private data class LastFrame(
|
|
74
|
-
val nv21: ByteArray,
|
|
75
|
-
val width: Int,
|
|
76
|
-
val height: Int,
|
|
77
|
-
val rotationDegrees: Int,
|
|
78
|
-
val isFront: Boolean
|
|
79
|
-
)
|
|
80
|
-
|
|
81
|
-
private val textureListener = object : TextureView.SurfaceTextureListener {
|
|
82
|
-
override fun onSurfaceTextureAvailable(surface: SurfaceTexture, width: Int, height: Int) {
|
|
83
|
-
Log.d(TAG, "[CAMERA2] Texture available: ${width}x${height}")
|
|
84
|
-
createPreviewSession()
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
override fun onSurfaceTextureSizeChanged(surface: SurfaceTexture, width: Int, height: Int) {
|
|
88
|
-
Log.d(TAG, "[CAMERA2] Texture size changed: ${width}x${height}")
|
|
89
|
-
updatePreviewTransform()
|
|
90
|
-
}
|
|
91
|
-
|
|
92
|
-
override fun onSurfaceTextureDestroyed(surface: SurfaceTexture): Boolean {
|
|
93
|
-
Log.d(TAG, "[CAMERA2] Texture destroyed")
|
|
94
|
-
return true
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
override fun onSurfaceTextureUpdated(surface: SurfaceTexture) = Unit
|
|
45
|
+
private const val ANALYSIS_WIDTH = 1280
|
|
46
|
+
private const val ANALYSIS_HEIGHT = 720
|
|
98
47
|
}
|
|
99
48
|
|
|
100
49
|
fun startCamera(
|
|
101
50
|
useFrontCam: Boolean = false,
|
|
102
51
|
enableDetection: Boolean = true
|
|
103
52
|
) {
|
|
104
|
-
Log.d(TAG, "
|
|
105
|
-
Log.d(TAG, "[CAMERA2] startCamera called")
|
|
106
|
-
Log.d(TAG, "[CAMERA2] useFrontCam: $useFrontCam")
|
|
107
|
-
Log.d(TAG, "[CAMERA2] enableDetection: $enableDetection")
|
|
108
|
-
Log.d(TAG, "[CAMERA2] lifecycleOwner: $lifecycleOwner")
|
|
109
|
-
Log.d(TAG, "========================================")
|
|
110
|
-
|
|
53
|
+
Log.d(TAG, "[CAMERAX] startCamera called")
|
|
111
54
|
this.useFrontCamera = useFrontCam
|
|
112
55
|
this.detectionEnabled = enableDetection
|
|
113
56
|
|
|
114
|
-
if (hasStarted) {
|
|
115
|
-
Log.d(TAG, "[CAMERA2] Already started, skipping")
|
|
116
|
-
return
|
|
117
|
-
}
|
|
118
|
-
hasStarted = true
|
|
119
|
-
|
|
120
57
|
if (!hasCameraPermission()) {
|
|
121
|
-
Log.e(TAG, "[
|
|
58
|
+
Log.e(TAG, "[CAMERAX] Camera permission not granted")
|
|
122
59
|
return
|
|
123
60
|
}
|
|
124
61
|
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
if (previewLayoutListener == null) {
|
|
129
|
-
previewLayoutListener = android.view.View.OnLayoutChangeListener { _, _, _, _, _, _, _, _, _ ->
|
|
130
|
-
updatePreviewTransform()
|
|
131
|
-
}
|
|
132
|
-
previewView.addOnLayoutChangeListener(previewLayoutListener)
|
|
62
|
+
if (cameraProviderFuture == null) {
|
|
63
|
+
cameraProviderFuture = ProcessCameraProvider.getInstance(context)
|
|
133
64
|
}
|
|
134
65
|
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
66
|
+
cameraProviderFuture?.addListener({
|
|
67
|
+
try {
|
|
68
|
+
cameraProvider = cameraProviderFuture?.get()
|
|
69
|
+
bindCameraUseCases()
|
|
70
|
+
} catch (e: Exception) {
|
|
71
|
+
Log.e(TAG, "[CAMERAX] Failed to get camera provider", e)
|
|
72
|
+
}
|
|
73
|
+
}, ContextCompat.getMainExecutor(context))
|
|
140
74
|
}
|
|
141
75
|
|
|
142
76
|
fun stopCamera() {
|
|
143
|
-
Log.d(TAG, "[
|
|
144
|
-
|
|
145
|
-
previewView.removeOnLayoutChangeListener(listener)
|
|
146
|
-
}
|
|
147
|
-
previewLayoutListener = null
|
|
148
|
-
try {
|
|
149
|
-
captureSession?.close()
|
|
150
|
-
captureSession = null
|
|
151
|
-
} catch (e: Exception) {
|
|
152
|
-
Log.w(TAG, "[CAMERA2] Failed to close session", e)
|
|
153
|
-
}
|
|
154
|
-
try {
|
|
155
|
-
cameraDevice?.close()
|
|
156
|
-
cameraDevice = null
|
|
157
|
-
} catch (e: Exception) {
|
|
158
|
-
Log.w(TAG, "[CAMERA2] Failed to close camera device", e)
|
|
159
|
-
}
|
|
160
|
-
imageReader?.close()
|
|
161
|
-
imageReader = null
|
|
162
|
-
stopBackgroundThread()
|
|
163
|
-
hasStarted = false
|
|
77
|
+
Log.d(TAG, "[CAMERAX] stopCamera called")
|
|
78
|
+
cameraProvider?.unbindAll()
|
|
164
79
|
}
|
|
165
80
|
|
|
166
81
|
fun capturePhoto(
|
|
@@ -168,556 +83,136 @@ class CameraController(
|
|
|
168
83
|
onImageCaptured: (File) -> Unit,
|
|
169
84
|
onError: (Exception) -> Unit
|
|
170
85
|
) {
|
|
171
|
-
val
|
|
172
|
-
if (
|
|
173
|
-
onError(
|
|
86
|
+
val capture = imageCapture
|
|
87
|
+
if (capture == null) {
|
|
88
|
+
onError(IllegalStateException("ImageCapture not initialized"))
|
|
174
89
|
return
|
|
175
90
|
}
|
|
176
91
|
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
val photoFile = File(
|
|
180
|
-
outputDirectory,
|
|
181
|
-
"doc_scan_${System.currentTimeMillis()}.jpg"
|
|
182
|
-
)
|
|
92
|
+
val photoFile = File(outputDirectory, "doc_scan_${System.currentTimeMillis()}.jpg")
|
|
93
|
+
val outputOptions = ImageCapture.OutputFileOptions.Builder(photoFile).build()
|
|
183
94
|
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
}
|
|
192
|
-
if (rotated != bitmap) {
|
|
193
|
-
rotated.recycle()
|
|
95
|
+
capture.takePicture(
|
|
96
|
+
outputOptions,
|
|
97
|
+
cameraExecutor,
|
|
98
|
+
object : ImageCapture.OnImageSavedCallback {
|
|
99
|
+
override fun onImageSaved(outputFileResults: ImageCapture.OutputFileResults) {
|
|
100
|
+
Log.d(TAG, "[CAMERAX] Photo capture succeeded: ${photoFile.absolutePath}")
|
|
101
|
+
onImageCaptured(photoFile)
|
|
194
102
|
}
|
|
195
|
-
bitmap.recycle()
|
|
196
103
|
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
onError(e)
|
|
104
|
+
override fun onError(exception: ImageCaptureException) {
|
|
105
|
+
Log.e(TAG, "[CAMERAX] Photo capture failed", exception)
|
|
106
|
+
onError(exception)
|
|
107
|
+
}
|
|
202
108
|
}
|
|
203
|
-
|
|
109
|
+
)
|
|
204
110
|
}
|
|
205
111
|
|
|
206
112
|
fun setTorchEnabled(enabled: Boolean) {
|
|
207
|
-
|
|
208
|
-
val builder = previewRequestBuilder ?: return
|
|
209
|
-
builder.set(CaptureRequest.FLASH_MODE, if (enabled) CaptureRequest.FLASH_MODE_TORCH else CaptureRequest.FLASH_MODE_OFF)
|
|
210
|
-
try {
|
|
211
|
-
captureSession?.setRepeatingRequest(builder.build(), null, backgroundHandler)
|
|
212
|
-
} catch (e: Exception) {
|
|
213
|
-
Log.w(TAG, "[CAMERA2] Failed to update torch", e)
|
|
214
|
-
}
|
|
113
|
+
camera?.cameraControl?.enableTorch(enabled)
|
|
215
114
|
}
|
|
216
115
|
|
|
217
116
|
fun switchCamera() {
|
|
218
117
|
useFrontCamera = !useFrontCamera
|
|
219
|
-
|
|
220
|
-
startCamera(useFrontCamera, detectionEnabled)
|
|
118
|
+
bindCameraUseCases()
|
|
221
119
|
}
|
|
222
120
|
|
|
223
121
|
fun isTorchAvailable(): Boolean {
|
|
224
|
-
|
|
225
|
-
val characteristics = cameraManager.getCameraCharacteristics(id)
|
|
226
|
-
return characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) == true
|
|
122
|
+
return camera?.cameraInfo?.hasFlashUnit() == true
|
|
227
123
|
}
|
|
228
124
|
|
|
229
125
|
fun focusAt(x: Float, y: Float) {
|
|
230
|
-
// No-op for now.
|
|
126
|
+
// No-op for now.
|
|
231
127
|
}
|
|
232
128
|
|
|
233
129
|
fun shutdown() {
|
|
234
130
|
stopCamera()
|
|
131
|
+
cameraExecutor.shutdown()
|
|
235
132
|
}
|
|
236
133
|
|
|
237
|
-
private fun
|
|
238
|
-
val
|
|
239
|
-
|
|
240
|
-
} else {
|
|
241
|
-
CameraCharacteristics.LENS_FACING_BACK
|
|
242
|
-
}
|
|
243
|
-
|
|
244
|
-
val ids = cameraManager.cameraIdList
|
|
245
|
-
val selected = ids.firstOrNull { id ->
|
|
246
|
-
val characteristics = cameraManager.getCameraCharacteristics(id)
|
|
247
|
-
characteristics.get(CameraCharacteristics.LENS_FACING) == lensFacing
|
|
248
|
-
} ?: ids.firstOrNull()
|
|
134
|
+
private fun bindCameraUseCases() {
|
|
135
|
+
val provider = cameraProvider ?: return
|
|
136
|
+
provider.unbindAll()
|
|
249
137
|
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
138
|
+
val rotation = previewView.display?.rotation ?: Surface.ROTATION_0
|
|
139
|
+
preview = Preview.Builder()
|
|
140
|
+
.setTargetRotation(rotation)
|
|
141
|
+
.build()
|
|
142
|
+
.also {
|
|
143
|
+
it.setSurfaceProvider(previewView.surfaceProvider)
|
|
144
|
+
}
|
|
254
145
|
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
146
|
+
imageAnalysis = ImageAnalysis.Builder()
|
|
147
|
+
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
|
|
148
|
+
.setTargetResolution(Size(ANALYSIS_WIDTH, ANALYSIS_HEIGHT))
|
|
149
|
+
.setTargetRotation(rotation)
|
|
150
|
+
.setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_YUV_420_888)
|
|
151
|
+
.build()
|
|
152
|
+
.also {
|
|
153
|
+
it.setAnalyzer(cameraExecutor, DocumentAnalyzer())
|
|
154
|
+
}
|
|
262
155
|
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
156
|
+
imageCapture = ImageCapture.Builder()
|
|
157
|
+
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
|
|
158
|
+
.setTargetRotation(rotation)
|
|
159
|
+
.setTargetAspectRatio(AspectRatio.RATIO_4_3)
|
|
160
|
+
.build()
|
|
266
161
|
|
|
267
|
-
val
|
|
268
|
-
|
|
269
|
-
val targetRatio = if (viewWidth > 0 && viewHeight > 0) {
|
|
270
|
-
viewWidth.toFloat() / viewHeight.toFloat()
|
|
162
|
+
val cameraSelector = if (useFrontCamera) {
|
|
163
|
+
CameraSelector.DEFAULT_FRONT_CAMERA
|
|
271
164
|
} else {
|
|
272
|
-
|
|
273
|
-
}
|
|
274
|
-
|
|
275
|
-
logSizeCandidates("preview", previewChoices, targetRatio, sensorAspectRatio)
|
|
276
|
-
logSizeCandidates("analysis", analysisChoices, targetRatio, sensorAspectRatio)
|
|
277
|
-
|
|
278
|
-
previewSize = choosePreviewSize(previewChoices, targetRatio, sensorAspectRatio)
|
|
279
|
-
analysisSize = chooseAnalysisSize(analysisChoices, targetRatio, sensorAspectRatio)
|
|
280
|
-
Log.d(
|
|
281
|
-
TAG,
|
|
282
|
-
"[CAMERA2] chooseCamera view=${viewWidth}x${viewHeight} ratio=$targetRatio " +
|
|
283
|
-
"sensorOrientation=$sensorOrientation sensorRatio=$sensorAspectRatio " +
|
|
284
|
-
"preview=$previewSize analysis=$analysisSize"
|
|
285
|
-
)
|
|
286
|
-
}
|
|
287
|
-
|
|
288
|
-
private fun openCamera() {
|
|
289
|
-
val id = cameraId ?: run {
|
|
290
|
-
Log.e(TAG, "[CAMERA2] Camera id not set")
|
|
291
|
-
return
|
|
292
|
-
}
|
|
293
|
-
if (isOpening.getAndSet(true)) {
|
|
294
|
-
return
|
|
165
|
+
CameraSelector.DEFAULT_BACK_CAMERA
|
|
295
166
|
}
|
|
296
167
|
|
|
297
168
|
try {
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
}
|
|
305
|
-
|
|
306
|
-
override fun onDisconnected(device: CameraDevice) {
|
|
307
|
-
Log.w(TAG, "[CAMERA2] Camera disconnected")
|
|
308
|
-
isOpening.set(false)
|
|
309
|
-
device.close()
|
|
310
|
-
cameraDevice = null
|
|
311
|
-
}
|
|
312
|
-
|
|
313
|
-
override fun onError(device: CameraDevice, error: Int) {
|
|
314
|
-
Log.e(TAG, "[CAMERA2] Camera error: $error")
|
|
315
|
-
isOpening.set(false)
|
|
316
|
-
device.close()
|
|
317
|
-
cameraDevice = null
|
|
318
|
-
}
|
|
319
|
-
}, backgroundHandler)
|
|
320
|
-
} catch (e: SecurityException) {
|
|
321
|
-
isOpening.set(false)
|
|
322
|
-
Log.e(TAG, "[CAMERA2] Camera permission missing", e)
|
|
323
|
-
} catch (e: Exception) {
|
|
324
|
-
isOpening.set(false)
|
|
325
|
-
Log.e(TAG, "[CAMERA2] Failed to open camera", e)
|
|
326
|
-
}
|
|
327
|
-
}
|
|
328
|
-
|
|
329
|
-
private fun createPreviewSession() {
|
|
330
|
-
val device = cameraDevice ?: return
|
|
331
|
-
val texture = previewView.surfaceTexture ?: return
|
|
332
|
-
val sizes = ensurePreviewSizes()
|
|
333
|
-
val previewSize = sizes.first ?: return
|
|
334
|
-
val analysisSize = sizes.second ?: previewSize
|
|
335
|
-
|
|
336
|
-
Log.d(
|
|
337
|
-
TAG,
|
|
338
|
-
"[CAMERA2] createPreviewSession view=${previewView.width}x${previewView.height} " +
|
|
339
|
-
"preview=${previewSize.width}x${previewSize.height} analysis=${analysisSize.width}x${analysisSize.height}"
|
|
340
|
-
)
|
|
341
|
-
|
|
342
|
-
texture.setDefaultBufferSize(previewSize.width, previewSize.height)
|
|
343
|
-
val previewSurface = Surface(texture)
|
|
344
|
-
|
|
345
|
-
imageReader?.close()
|
|
346
|
-
imageReader = ImageReader.newInstance(
|
|
347
|
-
analysisSize.width,
|
|
348
|
-
analysisSize.height,
|
|
349
|
-
ImageFormat.YUV_420_888,
|
|
350
|
-
2
|
|
351
|
-
).apply {
|
|
352
|
-
setOnImageAvailableListener({ reader ->
|
|
353
|
-
val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener
|
|
354
|
-
handleImage(image)
|
|
355
|
-
}, backgroundHandler)
|
|
356
|
-
}
|
|
357
|
-
|
|
358
|
-
val surfaces = listOf(previewSurface, imageReader!!.surface)
|
|
359
|
-
try {
|
|
360
|
-
device.createCaptureSession(
|
|
361
|
-
surfaces,
|
|
362
|
-
object : CameraCaptureSession.StateCallback() {
|
|
363
|
-
override fun onConfigured(session: CameraCaptureSession) {
|
|
364
|
-
if (cameraDevice == null) {
|
|
365
|
-
return
|
|
366
|
-
}
|
|
367
|
-
captureSession = session
|
|
368
|
-
previewRequestBuilder = device.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply {
|
|
369
|
-
addTarget(previewSurface)
|
|
370
|
-
addTarget(imageReader!!.surface)
|
|
371
|
-
set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO)
|
|
372
|
-
set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)
|
|
373
|
-
set(CaptureRequest.FLASH_MODE, if (torchEnabled) CaptureRequest.FLASH_MODE_TORCH else CaptureRequest.FLASH_MODE_OFF)
|
|
374
|
-
}
|
|
375
|
-
try {
|
|
376
|
-
session.setRepeatingRequest(previewRequestBuilder!!.build(), null, backgroundHandler)
|
|
377
|
-
Log.d(TAG, "[CAMERA2] Preview session started")
|
|
378
|
-
updatePreviewTransform()
|
|
379
|
-
} catch (e: Exception) {
|
|
380
|
-
Log.e(TAG, "[CAMERA2] Failed to start preview", e)
|
|
381
|
-
}
|
|
382
|
-
}
|
|
383
|
-
|
|
384
|
-
override fun onConfigureFailed(session: CameraCaptureSession) {
|
|
385
|
-
Log.e(TAG, "[CAMERA2] Preview session configure failed")
|
|
386
|
-
}
|
|
387
|
-
},
|
|
388
|
-
backgroundHandler
|
|
389
|
-
)
|
|
390
|
-
} catch (e: Exception) {
|
|
391
|
-
Log.e(TAG, "[CAMERA2] Failed to create preview session", e)
|
|
392
|
-
}
|
|
393
|
-
}
|
|
394
|
-
|
|
395
|
-
private fun ensurePreviewSizes(): Pair<Size?, Size?> {
|
|
396
|
-
if (previewChoices.isEmpty()) {
|
|
397
|
-
return Pair(previewSize, analysisSize)
|
|
398
|
-
}
|
|
399
|
-
|
|
400
|
-
val viewWidth = if (previewView.width > 0) previewView.width else context.resources.displayMetrics.widthPixels
|
|
401
|
-
val viewHeight = if (previewView.height > 0) previewView.height else context.resources.displayMetrics.heightPixels
|
|
402
|
-
val targetRatio = if (viewWidth > 0 && viewHeight > 0) {
|
|
403
|
-
viewWidth.toFloat() / viewHeight.toFloat()
|
|
404
|
-
} else {
|
|
405
|
-
null
|
|
406
|
-
}
|
|
407
|
-
|
|
408
|
-
val newPreview = choosePreviewSize(previewChoices, targetRatio, sensorAspectRatio)
|
|
409
|
-
val newAnalysis = chooseAnalysisSize(analysisChoices, targetRatio, sensorAspectRatio)
|
|
410
|
-
|
|
411
|
-
if (newPreview != null && newPreview != previewSize) {
|
|
412
|
-
previewSize = newPreview
|
|
413
|
-
}
|
|
414
|
-
if (newAnalysis != null && newAnalysis != analysisSize) {
|
|
415
|
-
analysisSize = newAnalysis
|
|
416
|
-
}
|
|
417
|
-
|
|
418
|
-
Log.d(
|
|
419
|
-
TAG,
|
|
420
|
-
"[CAMERA2] ensurePreviewSizes view=${viewWidth}x${viewHeight} ratio=$targetRatio " +
|
|
421
|
-
"preview=${previewSize?.width}x${previewSize?.height} analysis=${analysisSize?.width}x${analysisSize?.height}"
|
|
422
|
-
)
|
|
423
|
-
return Pair(previewSize, analysisSize)
|
|
424
|
-
}
|
|
425
|
-
|
|
426
|
-
private fun updatePreviewTransform() {
|
|
427
|
-
val previewSize = previewSize ?: return
|
|
428
|
-
ensureMatchParent()
|
|
429
|
-
|
|
430
|
-
val viewWidth = previewView.width
|
|
431
|
-
val viewHeight = previewView.height
|
|
432
|
-
if (viewWidth == 0 || viewHeight == 0) {
|
|
433
|
-
return
|
|
434
|
-
}
|
|
435
|
-
|
|
436
|
-
val rotationDegrees = getRotationDegrees()
|
|
437
|
-
val viewRect = RectF(0f, 0f, viewWidth.toFloat(), viewHeight.toFloat())
|
|
438
|
-
val bufferRect = if (rotationDegrees == 90 || rotationDegrees == 270) {
|
|
439
|
-
RectF(0f, 0f, previewSize.height.toFloat(), previewSize.width.toFloat())
|
|
440
|
-
} else {
|
|
441
|
-
RectF(0f, 0f, previewSize.width.toFloat(), previewSize.height.toFloat())
|
|
442
|
-
}
|
|
443
|
-
val centerX = viewRect.centerX()
|
|
444
|
-
val centerY = viewRect.centerY()
|
|
445
|
-
|
|
446
|
-
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY())
|
|
447
|
-
|
|
448
|
-
val matrix = Matrix()
|
|
449
|
-
// Map buffer to view, then scale to fill and rotate around the center.
|
|
450
|
-
matrix.setRectToRect(bufferRect, viewRect, Matrix.ScaleToFit.FILL)
|
|
451
|
-
val scale = kotlin.math.max(
|
|
452
|
-
viewWidth.toFloat() / bufferRect.width(),
|
|
453
|
-
viewHeight.toFloat() / bufferRect.height()
|
|
454
|
-
)
|
|
455
|
-
matrix.postScale(scale, scale, centerX, centerY)
|
|
456
|
-
if (rotationDegrees != 0) {
|
|
457
|
-
matrix.postRotate(rotationDegrees.toFloat(), centerX, centerY)
|
|
458
|
-
}
|
|
459
|
-
|
|
460
|
-
previewView.setTransform(matrix)
|
|
461
|
-
Log.d(
|
|
462
|
-
TAG,
|
|
463
|
-
"[CAMERA2] transform view=${viewWidth}x${viewHeight} buffer=${previewSize.width}x${previewSize.height} " +
|
|
464
|
-
"rotation=$rotationDegrees scale=$scale"
|
|
465
|
-
)
|
|
466
|
-
}
|
|
467
|
-
|
|
468
|
-
private fun ensureMatchParent() {
|
|
469
|
-
val parentView = previewView.parent as? android.view.View ?: return
|
|
470
|
-
val parentWidth = parentView.width
|
|
471
|
-
val parentHeight = parentView.height
|
|
472
|
-
if (parentWidth == 0 || parentHeight == 0) {
|
|
473
|
-
return
|
|
474
|
-
}
|
|
475
|
-
|
|
476
|
-
val layoutParams = (previewView.layoutParams as? android.widget.FrameLayout.LayoutParams)
|
|
477
|
-
?: android.widget.FrameLayout.LayoutParams(
|
|
478
|
-
android.widget.FrameLayout.LayoutParams.MATCH_PARENT,
|
|
479
|
-
android.widget.FrameLayout.LayoutParams.MATCH_PARENT
|
|
169
|
+
camera = provider.bindToLifecycle(
|
|
170
|
+
lifecycleOwner,
|
|
171
|
+
cameraSelector,
|
|
172
|
+
preview,
|
|
173
|
+
imageAnalysis,
|
|
174
|
+
imageCapture
|
|
480
175
|
)
|
|
481
|
-
|
|
482
|
-
layoutParams.height != android.widget.FrameLayout.LayoutParams.MATCH_PARENT
|
|
483
|
-
) {
|
|
484
|
-
layoutParams.width = android.widget.FrameLayout.LayoutParams.MATCH_PARENT
|
|
485
|
-
layoutParams.height = android.widget.FrameLayout.LayoutParams.MATCH_PARENT
|
|
486
|
-
layoutParams.gravity = Gravity.CENTER
|
|
487
|
-
previewView.layoutParams = layoutParams
|
|
488
|
-
}
|
|
489
|
-
Log.d(TAG, "[CAMERA2] parent=${parentWidth}x${parentHeight} previewView=${previewView.width}x${previewView.height}")
|
|
490
|
-
}
|
|
491
|
-
|
|
492
|
-
private fun handleImage(image: Image) {
|
|
493
|
-
try {
|
|
494
|
-
val rotationDegrees = getRotationDegrees()
|
|
495
|
-
val width = image.width
|
|
496
|
-
val height = image.height
|
|
497
|
-
val nv21 = imageToNV21(image)
|
|
498
|
-
|
|
499
|
-
val frameWidth = if (rotationDegrees == 90 || rotationDegrees == 270) height else width
|
|
500
|
-
val frameHeight = if (rotationDegrees == 90 || rotationDegrees == 270) width else height
|
|
501
|
-
|
|
502
|
-
synchronized(lastFrameLock) {
|
|
503
|
-
lastFrame = LastFrame(nv21, width, height, rotationDegrees, useFrontCamera)
|
|
504
|
-
}
|
|
505
|
-
|
|
506
|
-
if (detectionEnabled) {
|
|
507
|
-
val rectangle = DocumentDetector.detectRectangleInYUV(
|
|
508
|
-
nv21,
|
|
509
|
-
width,
|
|
510
|
-
height,
|
|
511
|
-
rotationDegrees
|
|
512
|
-
)
|
|
513
|
-
onFrameAnalyzed?.invoke(rectangle, frameWidth, frameHeight)
|
|
514
|
-
} else {
|
|
515
|
-
onFrameAnalyzed?.invoke(null, frameWidth, frameHeight)
|
|
516
|
-
}
|
|
176
|
+
Log.d(TAG, "[CAMERAX] Camera bound successfully")
|
|
517
177
|
} catch (e: Exception) {
|
|
518
|
-
Log.e(TAG, "[
|
|
519
|
-
} finally {
|
|
520
|
-
image.close()
|
|
521
|
-
}
|
|
522
|
-
}
|
|
523
|
-
|
|
524
|
-
private fun imageToNV21(image: Image): ByteArray {
|
|
525
|
-
val width = image.width
|
|
526
|
-
val height = image.height
|
|
527
|
-
|
|
528
|
-
val ySize = width * height
|
|
529
|
-
val uvSize = width * height / 2
|
|
530
|
-
val nv21 = ByteArray(ySize + uvSize)
|
|
531
|
-
|
|
532
|
-
val yBuffer = image.planes[0].buffer
|
|
533
|
-
val uBuffer = image.planes[1].buffer
|
|
534
|
-
val vBuffer = image.planes[2].buffer
|
|
535
|
-
|
|
536
|
-
val yRowStride = image.planes[0].rowStride
|
|
537
|
-
val yPixelStride = image.planes[0].pixelStride
|
|
538
|
-
var outputOffset = 0
|
|
539
|
-
for (row in 0 until height) {
|
|
540
|
-
var inputOffset = row * yRowStride
|
|
541
|
-
for (col in 0 until width) {
|
|
542
|
-
nv21[outputOffset++] = yBuffer.get(inputOffset)
|
|
543
|
-
inputOffset += yPixelStride
|
|
544
|
-
}
|
|
545
|
-
}
|
|
546
|
-
|
|
547
|
-
val uvRowStride = image.planes[1].rowStride
|
|
548
|
-
val uvPixelStride = image.planes[1].pixelStride
|
|
549
|
-
val vRowStride = image.planes[2].rowStride
|
|
550
|
-
val vPixelStride = image.planes[2].pixelStride
|
|
551
|
-
|
|
552
|
-
val uvHeight = height / 2
|
|
553
|
-
val uvWidth = width / 2
|
|
554
|
-
for (row in 0 until uvHeight) {
|
|
555
|
-
var uInputOffset = row * uvRowStride
|
|
556
|
-
var vInputOffset = row * vRowStride
|
|
557
|
-
for (col in 0 until uvWidth) {
|
|
558
|
-
nv21[outputOffset++] = vBuffer.get(vInputOffset)
|
|
559
|
-
nv21[outputOffset++] = uBuffer.get(uInputOffset)
|
|
560
|
-
uInputOffset += uvPixelStride
|
|
561
|
-
vInputOffset += vPixelStride
|
|
562
|
-
}
|
|
563
|
-
}
|
|
564
|
-
|
|
565
|
-
return nv21
|
|
566
|
-
}
|
|
567
|
-
|
|
568
|
-
private fun nv21ToJpeg(nv21: ByteArray, width: Int, height: Int, quality: Int): ByteArray {
|
|
569
|
-
val yuv = YuvImage(nv21, ImageFormat.NV21, width, height, null)
|
|
570
|
-
val out = ByteArrayOutputStream()
|
|
571
|
-
yuv.compressToJpeg(Rect(0, 0, width, height), quality, out)
|
|
572
|
-
return out.toByteArray()
|
|
573
|
-
}
|
|
574
|
-
|
|
575
|
-
private fun rotateAndMirror(bitmap: Bitmap, rotationDegrees: Int, mirror: Boolean): Bitmap {
|
|
576
|
-
if (rotationDegrees == 0 && !mirror) {
|
|
577
|
-
return bitmap
|
|
578
|
-
}
|
|
579
|
-
val matrix = Matrix()
|
|
580
|
-
if (mirror) {
|
|
581
|
-
matrix.postScale(-1f, 1f, bitmap.width / 2f, bitmap.height / 2f)
|
|
582
|
-
}
|
|
583
|
-
if (rotationDegrees != 0) {
|
|
584
|
-
matrix.postRotate(rotationDegrees.toFloat(), bitmap.width / 2f, bitmap.height / 2f)
|
|
178
|
+
Log.e(TAG, "[CAMERAX] Failed to bind camera", e)
|
|
585
179
|
}
|
|
586
|
-
return Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true)
|
|
587
180
|
}
|
|
588
181
|
|
|
589
|
-
private
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
Surface.ROTATION_180 -> 180
|
|
595
|
-
Surface.ROTATION_270 -> 270
|
|
596
|
-
else -> 0
|
|
597
|
-
}
|
|
598
|
-
|
|
599
|
-
return if (useFrontCamera) {
|
|
600
|
-
(sensorOrientation + displayDegrees) % 360
|
|
601
|
-
} else {
|
|
602
|
-
(sensorOrientation - displayDegrees + 360) % 360
|
|
603
|
-
}
|
|
604
|
-
}
|
|
605
|
-
|
|
606
|
-
private fun choosePreviewSize(
|
|
607
|
-
choices: Array<Size>,
|
|
608
|
-
targetRatio: Float?,
|
|
609
|
-
sensorRatio: Float?
|
|
610
|
-
): Size? {
|
|
611
|
-
if (choices.isEmpty()) {
|
|
612
|
-
return null
|
|
613
|
-
}
|
|
614
|
-
val candidates = choices.toList()
|
|
615
|
-
|
|
616
|
-
val ratioBase = sensorRatio ?: targetRatio
|
|
617
|
-
if (ratioBase == null) {
|
|
618
|
-
return candidates.maxByOrNull { it.width * it.height }
|
|
619
|
-
}
|
|
620
|
-
|
|
621
|
-
val normalizedTarget = ratioBase
|
|
622
|
-
val sorted = candidates.sortedWith(
|
|
623
|
-
compareBy<Size> { size ->
|
|
624
|
-
val ratio = size.width.toFloat() / size.height.toFloat()
|
|
625
|
-
kotlin.math.abs(ratio - normalizedTarget)
|
|
626
|
-
}.thenByDescending { size ->
|
|
627
|
-
size.width * size.height
|
|
628
|
-
}
|
|
629
|
-
)
|
|
630
|
-
return sorted.first()
|
|
631
|
-
}
|
|
632
|
-
|
|
633
|
-
private fun chooseAnalysisSize(
|
|
634
|
-
choices: Array<Size>,
|
|
635
|
-
targetRatio: Float?,
|
|
636
|
-
sensorRatio: Float?
|
|
637
|
-
): Size? {
|
|
638
|
-
if (choices.isEmpty()) {
|
|
639
|
-
return null
|
|
640
|
-
}
|
|
641
|
-
|
|
642
|
-
val capped = choices.filter { it.width <= MAX_ANALYSIS_WIDTH && it.height <= MAX_ANALYSIS_HEIGHT }
|
|
643
|
-
val candidates = if (capped.isNotEmpty()) capped else choices.toList()
|
|
644
|
-
|
|
645
|
-
val ratioBase = sensorRatio ?: targetRatio
|
|
646
|
-
if (ratioBase == null) {
|
|
647
|
-
return candidates.maxByOrNull { it.width * it.height }
|
|
648
|
-
}
|
|
649
|
-
|
|
650
|
-
val normalizedTarget = ratioBase
|
|
651
|
-
val sorted = candidates.sortedWith(
|
|
652
|
-
compareBy<Size> { size ->
|
|
653
|
-
val ratio = size.width.toFloat() / size.height.toFloat()
|
|
654
|
-
kotlin.math.abs(ratio - normalizedTarget)
|
|
655
|
-
}.thenByDescending { size ->
|
|
656
|
-
size.width * size.height
|
|
657
|
-
}
|
|
658
|
-
)
|
|
659
|
-
return sorted.first()
|
|
660
|
-
}
|
|
182
|
+
private inner class DocumentAnalyzer : ImageAnalysis.Analyzer {
|
|
183
|
+
override fun analyze(imageProxy: androidx.camera.core.ImageProxy) {
|
|
184
|
+
try {
|
|
185
|
+
val rotationDegrees = imageProxy.imageInfo.rotationDegrees
|
|
186
|
+
val nv21 = imageProxy.toNv21()
|
|
661
187
|
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
) {
|
|
668
|
-
if (choices.isEmpty()) {
|
|
669
|
-
Log.d(TAG, "[CAMERA2] $label sizes: none")
|
|
670
|
-
return
|
|
671
|
-
}
|
|
188
|
+
val frameWidth = if (rotationDegrees == 90 || rotationDegrees == 270) {
|
|
189
|
+
imageProxy.height
|
|
190
|
+
} else {
|
|
191
|
+
imageProxy.width
|
|
192
|
+
}
|
|
672
193
|
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
194
|
+
val frameHeight = if (rotationDegrees == 90 || rotationDegrees == 270) {
|
|
195
|
+
imageProxy.width
|
|
196
|
+
} else {
|
|
197
|
+
imageProxy.height
|
|
198
|
+
}
|
|
678
199
|
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
200
|
+
if (detectionEnabled) {
|
|
201
|
+
val rectangle = DocumentDetector.detectRectangleInYUV(
|
|
202
|
+
nv21,
|
|
203
|
+
imageProxy.width,
|
|
204
|
+
imageProxy.height,
|
|
205
|
+
rotationDegrees
|
|
206
|
+
)
|
|
207
|
+
onFrameAnalyzed?.invoke(rectangle, frameWidth, frameHeight)
|
|
208
|
+
} else {
|
|
209
|
+
onFrameAnalyzed?.invoke(null, frameWidth, frameHeight)
|
|
210
|
+
}
|
|
211
|
+
} catch (e: Exception) {
|
|
212
|
+
Log.e(TAG, "[CAMERAX] Error analyzing frame", e)
|
|
213
|
+
} finally {
|
|
214
|
+
imageProxy.close()
|
|
686
215
|
}
|
|
687
|
-
)
|
|
688
|
-
|
|
689
|
-
val top = sorted.take(5).joinToString { size ->
|
|
690
|
-
val ratio = size.width.toFloat() / size.height.toFloat()
|
|
691
|
-
val diff = kotlin.math.abs(ratio - normalizedTarget)
|
|
692
|
-
"${size.width}x${size.height}(r=${"%.3f".format(ratio)},d=${"%.3f".format(diff)})"
|
|
693
|
-
}
|
|
694
|
-
|
|
695
|
-
Log.d(
|
|
696
|
-
TAG,
|
|
697
|
-
"[CAMERA2] $label sizes: ${choices.size}, ratioBase=${"%.3f".format(normalizedTarget)} " +
|
|
698
|
-
"sensor=${sensorRatio?.let { "%.3f".format(it) }} target=${targetRatio?.let { "%.3f".format(it) }} top=$top"
|
|
699
|
-
)
|
|
700
|
-
}
|
|
701
|
-
|
|
702
|
-
private fun startBackgroundThread() {
|
|
703
|
-
if (backgroundThread != null) {
|
|
704
|
-
return
|
|
705
|
-
}
|
|
706
|
-
backgroundThread = HandlerThread("Camera2Background").also {
|
|
707
|
-
it.start()
|
|
708
|
-
backgroundHandler = Handler(it.looper)
|
|
709
|
-
}
|
|
710
|
-
}
|
|
711
|
-
|
|
712
|
-
private fun stopBackgroundThread() {
|
|
713
|
-
try {
|
|
714
|
-
backgroundThread?.quitSafely()
|
|
715
|
-
backgroundThread?.join()
|
|
716
|
-
} catch (e: InterruptedException) {
|
|
717
|
-
Log.w(TAG, "[CAMERA2] Background thread shutdown interrupted", e)
|
|
718
|
-
} finally {
|
|
719
|
-
backgroundThread = null
|
|
720
|
-
backgroundHandler = null
|
|
721
216
|
}
|
|
722
217
|
}
|
|
723
218
|
|
|
@@ -8,9 +8,9 @@ import android.graphics.Paint
|
|
|
8
8
|
import android.graphics.PorterDuff
|
|
9
9
|
import android.graphics.PorterDuffXfermode
|
|
10
10
|
import android.util.Log
|
|
11
|
-
import android.view.TextureView
|
|
12
11
|
import android.view.View
|
|
13
12
|
import android.widget.FrameLayout
|
|
13
|
+
import androidx.camera.view.PreviewView
|
|
14
14
|
import androidx.lifecycle.Lifecycle
|
|
15
15
|
import androidx.lifecycle.LifecycleOwner
|
|
16
16
|
import androidx.lifecycle.LifecycleRegistry
|
|
@@ -25,7 +25,7 @@ import kotlin.math.min
|
|
|
25
25
|
|
|
26
26
|
class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), LifecycleOwner {
|
|
27
27
|
private val themedContext = context
|
|
28
|
-
private val previewView:
|
|
28
|
+
private val previewView: PreviewView
|
|
29
29
|
private val overlayView: OverlayView
|
|
30
30
|
private var cameraController: CameraController? = null
|
|
31
31
|
private val lifecycleRegistry = LifecycleRegistry(this)
|
|
@@ -73,23 +73,20 @@ class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), L
|
|
|
73
73
|
Log.d(TAG, "[INIT] Lifecycle state: ${lifecycleRegistry.currentState}")
|
|
74
74
|
|
|
75
75
|
// Create preview view
|
|
76
|
-
Log.d(TAG, "[INIT] Creating
|
|
77
|
-
previewView =
|
|
76
|
+
Log.d(TAG, "[INIT] Creating PreviewView...")
|
|
77
|
+
previewView = PreviewView(context).apply {
|
|
78
78
|
layoutParams = LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)
|
|
79
79
|
visibility = View.VISIBLE
|
|
80
80
|
keepScreenOn = true
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
// Ensure the view is on top
|
|
84
|
-
bringToFront()
|
|
85
|
-
requestLayout()
|
|
81
|
+
implementationMode = PreviewView.ImplementationMode.COMPATIBLE
|
|
82
|
+
scaleType = PreviewView.ScaleType.FILL_CENTER
|
|
86
83
|
}
|
|
87
|
-
Log.d(TAG, "[INIT]
|
|
88
|
-
Log.d(TAG, "[INIT]
|
|
84
|
+
Log.d(TAG, "[INIT] PreviewView created: $previewView")
|
|
85
|
+
Log.d(TAG, "[INIT] PreviewView visibility: ${previewView.visibility}")
|
|
89
86
|
|
|
90
|
-
Log.d(TAG, "[INIT] Adding
|
|
87
|
+
Log.d(TAG, "[INIT] Adding PreviewView to parent...")
|
|
91
88
|
addView(previewView)
|
|
92
|
-
Log.d(TAG, "[INIT]
|
|
89
|
+
Log.d(TAG, "[INIT] PreviewView added, childCount: $childCount")
|
|
93
90
|
|
|
94
91
|
// Create overlay view for drawing rectangle
|
|
95
92
|
Log.d(TAG, "[INIT] Creating OverlayView...")
|
package/package.json
CHANGED