react-native-rectangle-doc-scanner 3.228.0 → 3.230.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -6,106 +6,112 @@ import android.content.pm.PackageManager
|
|
|
6
6
|
import android.graphics.Bitmap
|
|
7
7
|
import android.graphics.BitmapFactory
|
|
8
8
|
import android.graphics.Matrix
|
|
9
|
-
import android.graphics.
|
|
10
|
-
import android.graphics.
|
|
9
|
+
import android.graphics.SurfaceTexture
|
|
10
|
+
import android.graphics.ImageFormat
|
|
11
|
+
import android.hardware.camera2.CameraCaptureSession
|
|
12
|
+
import android.hardware.camera2.CameraCharacteristics
|
|
13
|
+
import android.hardware.camera2.CameraDevice
|
|
14
|
+
import android.hardware.camera2.CameraManager
|
|
15
|
+
import android.hardware.camera2.CaptureRequest
|
|
16
|
+
import android.media.Image
|
|
17
|
+
import android.media.ImageReader
|
|
18
|
+
import android.os.Handler
|
|
19
|
+
import android.os.HandlerThread
|
|
11
20
|
import android.util.Log
|
|
21
|
+
import android.util.Size
|
|
12
22
|
import android.view.Surface
|
|
13
|
-
import
|
|
14
|
-
import androidx.camera.core.CameraSelector
|
|
15
|
-
import androidx.camera.core.ImageAnalysis
|
|
16
|
-
import androidx.camera.core.ImageCapture
|
|
17
|
-
import androidx.camera.core.ImageCaptureException
|
|
18
|
-
import androidx.camera.core.Preview
|
|
19
|
-
import androidx.camera.lifecycle.ProcessCameraProvider
|
|
20
|
-
import androidx.camera.view.PreviewView
|
|
23
|
+
import android.view.TextureView
|
|
21
24
|
import androidx.core.content.ContextCompat
|
|
22
|
-
import androidx.lifecycle.LifecycleOwner
|
|
23
|
-
import com.google.common.util.concurrent.ListenableFuture
|
|
24
|
-
import java.io.ByteArrayOutputStream
|
|
25
25
|
import java.io.File
|
|
26
26
|
import java.io.FileOutputStream
|
|
27
|
-
import java.util.concurrent.ExecutorService
|
|
28
|
-
import java.util.concurrent.Executors
|
|
29
27
|
import java.util.concurrent.atomic.AtomicReference
|
|
28
|
+
import kotlin.math.abs
|
|
29
|
+
import kotlin.math.max
|
|
30
30
|
|
|
31
31
|
class CameraController(
|
|
32
32
|
private val context: Context,
|
|
33
|
-
private val lifecycleOwner: LifecycleOwner,
|
|
34
|
-
private val previewView:
|
|
33
|
+
private val lifecycleOwner: androidx.lifecycle.LifecycleOwner,
|
|
34
|
+
private val previewView: TextureView
|
|
35
35
|
) {
|
|
36
|
-
private
|
|
37
|
-
private var
|
|
38
|
-
private var
|
|
39
|
-
private var
|
|
40
|
-
|
|
41
|
-
private var
|
|
42
|
-
private
|
|
43
|
-
private
|
|
44
|
-
|
|
45
|
-
private var
|
|
36
|
+
private val cameraManager = context.getSystemService(Context.CAMERA_SERVICE) as CameraManager
|
|
37
|
+
private var cameraDevice: CameraDevice? = null
|
|
38
|
+
private var captureSession: CameraCaptureSession? = null
|
|
39
|
+
private var previewRequestBuilder: CaptureRequest.Builder? = null
|
|
40
|
+
|
|
41
|
+
private var previewSize: Size? = null
|
|
42
|
+
private var analysisSize: Size? = null
|
|
43
|
+
private var captureSize: Size? = null
|
|
44
|
+
|
|
45
|
+
private var yuvReader: ImageReader? = null
|
|
46
|
+
private var jpegReader: ImageReader? = null
|
|
47
|
+
|
|
48
|
+
private val cameraThread = HandlerThread("Camera2Thread").apply { start() }
|
|
49
|
+
private val cameraHandler = Handler(cameraThread.looper)
|
|
50
|
+
private val analysisThread = HandlerThread("Camera2Analysis").apply { start() }
|
|
51
|
+
private val analysisHandler = Handler(analysisThread.looper)
|
|
46
52
|
|
|
47
53
|
private var useFrontCamera = false
|
|
48
54
|
private var detectionEnabled = true
|
|
55
|
+
private var torchEnabled = false
|
|
49
56
|
|
|
50
|
-
|
|
51
|
-
private var isAnalysisActive = false
|
|
52
|
-
private val analysisHandler = android.os.Handler(android.os.Looper.getMainLooper())
|
|
53
|
-
private val analysisRunnable = object : Runnable {
|
|
54
|
-
override fun run() {
|
|
55
|
-
if (isAnalysisActive && onFrameAnalyzed != null) {
|
|
56
|
-
captureFrameForAnalysis()
|
|
57
|
-
analysisHandler.postDelayed(this, 200) // Capture every 200ms
|
|
58
|
-
}
|
|
59
|
-
}
|
|
60
|
-
}
|
|
57
|
+
private val pendingCapture = AtomicReference<PendingCapture?>()
|
|
61
58
|
|
|
62
59
|
var onFrameAnalyzed: ((Rectangle?, Int, Int) -> Unit)? = null
|
|
63
60
|
|
|
64
61
|
companion object {
|
|
65
62
|
private const val TAG = "CameraController"
|
|
63
|
+
private const val ANALYSIS_MAX_AREA = 1920 * 1080
|
|
64
|
+
private const val ANALYSIS_ASPECT_TOLERANCE = 0.15
|
|
66
65
|
}
|
|
67
66
|
|
|
68
|
-
private data class
|
|
69
|
-
val
|
|
70
|
-
val
|
|
71
|
-
val
|
|
72
|
-
val rotationDegrees: Int,
|
|
73
|
-
val isFront: Boolean
|
|
67
|
+
private data class PendingCapture(
|
|
68
|
+
val outputDirectory: File,
|
|
69
|
+
val onImageCaptured: (File) -> Unit,
|
|
70
|
+
val onError: (Exception) -> Unit
|
|
74
71
|
)
|
|
75
72
|
|
|
73
|
+
private val textureListener = object : TextureView.SurfaceTextureListener {
|
|
74
|
+
override fun onSurfaceTextureAvailable(surface: SurfaceTexture, width: Int, height: Int) {
|
|
75
|
+
openCamera()
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
override fun onSurfaceTextureSizeChanged(surface: SurfaceTexture, width: Int, height: Int) {
|
|
79
|
+
configureTransform()
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
override fun onSurfaceTextureDestroyed(surface: SurfaceTexture): Boolean {
|
|
83
|
+
return true
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
override fun onSurfaceTextureUpdated(surface: SurfaceTexture) {
|
|
87
|
+
// no-op
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
76
91
|
fun startCamera(
|
|
77
92
|
useFrontCam: Boolean = false,
|
|
78
93
|
enableDetection: Boolean = true
|
|
79
94
|
) {
|
|
80
|
-
Log.d(TAG, "[
|
|
95
|
+
Log.d(TAG, "[CAMERA2] startCamera called")
|
|
81
96
|
this.useFrontCamera = useFrontCam
|
|
82
97
|
this.detectionEnabled = enableDetection
|
|
83
98
|
|
|
84
99
|
if (!hasCameraPermission()) {
|
|
85
|
-
Log.e(TAG, "[
|
|
100
|
+
Log.e(TAG, "[CAMERA2] Camera permission not granted")
|
|
86
101
|
return
|
|
87
102
|
}
|
|
88
103
|
|
|
89
|
-
if (
|
|
90
|
-
|
|
104
|
+
if (previewView.isAvailable) {
|
|
105
|
+
openCamera()
|
|
106
|
+
} else {
|
|
107
|
+
previewView.surfaceTextureListener = textureListener
|
|
91
108
|
}
|
|
92
|
-
|
|
93
|
-
cameraProviderFuture?.addListener({
|
|
94
|
-
try {
|
|
95
|
-
cameraProvider = cameraProviderFuture?.get()
|
|
96
|
-
bindCameraUseCases()
|
|
97
|
-
} catch (e: Exception) {
|
|
98
|
-
Log.e(TAG, "[CAMERAX-V6] Failed to get camera provider", e)
|
|
99
|
-
}
|
|
100
|
-
}, ContextCompat.getMainExecutor(context))
|
|
101
109
|
}
|
|
102
110
|
|
|
103
111
|
fun stopCamera() {
|
|
104
|
-
Log.d(TAG, "[
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
cameraProvider?.unbindAll()
|
|
108
|
-
analysisBound = false
|
|
112
|
+
Log.d(TAG, "[CAMERA2] stopCamera called")
|
|
113
|
+
previewView.surfaceTextureListener = null
|
|
114
|
+
closeSession()
|
|
109
115
|
}
|
|
110
116
|
|
|
111
117
|
fun capturePhoto(
|
|
@@ -113,172 +119,337 @@ class CameraController(
|
|
|
113
119
|
onImageCaptured: (File) -> Unit,
|
|
114
120
|
onError: (Exception) -> Unit
|
|
115
121
|
) {
|
|
116
|
-
val
|
|
117
|
-
|
|
118
|
-
|
|
122
|
+
val device = cameraDevice
|
|
123
|
+
val session = captureSession
|
|
124
|
+
val reader = jpegReader
|
|
125
|
+
if (device == null || session == null || reader == null) {
|
|
126
|
+
onError(IllegalStateException("Camera not ready for capture"))
|
|
119
127
|
return
|
|
120
128
|
}
|
|
121
129
|
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
val bitmap = BitmapFactory.decodeByteArray(jpegBytes, 0, jpegBytes.size)
|
|
127
|
-
?: throw IllegalStateException("Failed to decode JPEG")
|
|
130
|
+
if (!pendingCapture.compareAndSet(null, PendingCapture(outputDirectory, onImageCaptured, onError))) {
|
|
131
|
+
onError(IllegalStateException("Capture already in progress"))
|
|
132
|
+
return
|
|
133
|
+
}
|
|
128
134
|
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
+
try {
|
|
136
|
+
val requestBuilder = device.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE).apply {
|
|
137
|
+
addTarget(reader.surface)
|
|
138
|
+
set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)
|
|
139
|
+
set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON)
|
|
140
|
+
if (torchEnabled) {
|
|
141
|
+
set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH)
|
|
135
142
|
}
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
Log.d(TAG, "[CAMERAX-V6] Photo capture succeeded: ${photoFile.absolutePath}")
|
|
139
|
-
onImageCaptured(photoFile)
|
|
140
|
-
} catch (e: Exception) {
|
|
141
|
-
Log.e(TAG, "[CAMERAX-V6] Photo capture failed", e)
|
|
142
|
-
onError(e)
|
|
143
|
+
set(CaptureRequest.JPEG_ORIENTATION, 0)
|
|
143
144
|
}
|
|
145
|
+
|
|
146
|
+
session.capture(requestBuilder.build(), object : CameraCaptureSession.CaptureCallback() {}, cameraHandler)
|
|
147
|
+
} catch (e: Exception) {
|
|
148
|
+
pendingCapture.getAndSet(null)?.onError?.invoke(e)
|
|
144
149
|
}
|
|
145
150
|
}
|
|
146
151
|
|
|
147
152
|
fun setTorchEnabled(enabled: Boolean) {
|
|
148
|
-
|
|
153
|
+
torchEnabled = enabled
|
|
154
|
+
updateRepeatingRequest()
|
|
149
155
|
}
|
|
150
156
|
|
|
151
157
|
fun switchCamera() {
|
|
152
158
|
useFrontCamera = !useFrontCamera
|
|
153
|
-
|
|
159
|
+
closeSession()
|
|
160
|
+
openCamera()
|
|
154
161
|
}
|
|
155
162
|
|
|
156
163
|
fun isTorchAvailable(): Boolean {
|
|
157
|
-
return
|
|
164
|
+
return try {
|
|
165
|
+
val cameraId = selectCameraId() ?: return false
|
|
166
|
+
val characteristics = cameraManager.getCameraCharacteristics(cameraId)
|
|
167
|
+
characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) == true
|
|
168
|
+
} catch (e: Exception) {
|
|
169
|
+
false
|
|
170
|
+
}
|
|
158
171
|
}
|
|
159
172
|
|
|
160
173
|
fun focusAt(x: Float, y: Float) {
|
|
161
|
-
//
|
|
174
|
+
// Optional: implement touch-to-focus if needed.
|
|
162
175
|
}
|
|
163
176
|
|
|
164
177
|
fun shutdown() {
|
|
165
178
|
stopCamera()
|
|
166
|
-
|
|
179
|
+
cameraThread.quitSafely()
|
|
180
|
+
analysisThread.quitSafely()
|
|
167
181
|
}
|
|
168
182
|
|
|
169
|
-
private fun
|
|
170
|
-
if (
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
183
|
+
private fun openCamera() {
|
|
184
|
+
if (cameraDevice != null) {
|
|
185
|
+
return
|
|
186
|
+
}
|
|
187
|
+
val cameraId = selectCameraId() ?: return
|
|
188
|
+
try {
|
|
189
|
+
val characteristics = cameraManager.getCameraCharacteristics(cameraId)
|
|
190
|
+
val streamConfigMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
|
|
191
|
+
?: return
|
|
192
|
+
|
|
193
|
+
val viewAspect = if (previewView.height == 0) {
|
|
194
|
+
1.0
|
|
175
195
|
} else {
|
|
176
|
-
|
|
196
|
+
previewView.width.toDouble() / previewView.height.toDouble()
|
|
177
197
|
}
|
|
178
|
-
|
|
198
|
+
|
|
199
|
+
val previewSizes = streamConfigMap.getOutputSizes(SurfaceTexture::class.java)
|
|
200
|
+
previewSize = chooseBestSize(previewSizes, viewAspect, null)
|
|
201
|
+
|
|
202
|
+
val analysisSizes = streamConfigMap.getOutputSizes(ImageFormat.YUV_420_888)
|
|
203
|
+
analysisSize = chooseBestSize(analysisSizes, viewAspect, ANALYSIS_MAX_AREA)
|
|
204
|
+
|
|
205
|
+
val captureSizes = streamConfigMap.getOutputSizes(ImageFormat.JPEG)
|
|
206
|
+
captureSize = captureSizes?.maxByOrNull { it.width * it.height }
|
|
207
|
+
|
|
208
|
+
setupImageReaders()
|
|
209
|
+
|
|
210
|
+
if (ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
|
|
211
|
+
Log.e(TAG, "[CAMERA2] Camera permission not granted")
|
|
212
|
+
return
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
cameraManager.openCamera(cameraId, object : CameraDevice.StateCallback() {
|
|
216
|
+
override fun onOpened(camera: CameraDevice) {
|
|
217
|
+
cameraDevice = camera
|
|
218
|
+
createCaptureSession()
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
override fun onDisconnected(camera: CameraDevice) {
|
|
222
|
+
camera.close()
|
|
223
|
+
cameraDevice = null
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
override fun onError(camera: CameraDevice, error: Int) {
|
|
227
|
+
Log.e(TAG, "[CAMERA2] CameraDevice error: $error")
|
|
228
|
+
camera.close()
|
|
229
|
+
cameraDevice = null
|
|
230
|
+
}
|
|
231
|
+
}, cameraHandler)
|
|
232
|
+
} catch (e: Exception) {
|
|
233
|
+
Log.e(TAG, "[CAMERA2] Failed to open camera", e)
|
|
179
234
|
}
|
|
180
|
-
|
|
235
|
+
}
|
|
181
236
|
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
isAnalysisActive = false
|
|
237
|
+
private fun setupImageReaders() {
|
|
238
|
+
val analysis = analysisSize
|
|
239
|
+
val capture = captureSize
|
|
186
240
|
|
|
187
|
-
|
|
241
|
+
yuvReader?.close()
|
|
242
|
+
jpegReader?.close()
|
|
188
243
|
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
244
|
+
if (analysis != null) {
|
|
245
|
+
yuvReader = ImageReader.newInstance(analysis.width, analysis.height, ImageFormat.YUV_420_888, 2).apply {
|
|
246
|
+
setOnImageAvailableListener({ reader ->
|
|
247
|
+
if (!detectionEnabled || onFrameAnalyzed == null) {
|
|
248
|
+
reader.acquireLatestImage()?.close()
|
|
249
|
+
return@setOnImageAvailableListener
|
|
250
|
+
}
|
|
251
|
+
val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener
|
|
252
|
+
analysisHandler.post { analyzeImage(image) }
|
|
253
|
+
}, cameraHandler)
|
|
196
254
|
}
|
|
255
|
+
}
|
|
197
256
|
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
257
|
+
if (capture != null) {
|
|
258
|
+
jpegReader = ImageReader.newInstance(capture.width, capture.height, ImageFormat.JPEG, 2).apply {
|
|
259
|
+
setOnImageAvailableListener({ reader ->
|
|
260
|
+
val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener
|
|
261
|
+
val pending = pendingCapture.getAndSet(null)
|
|
262
|
+
if (pending == null) {
|
|
263
|
+
image.close()
|
|
264
|
+
return@setOnImageAvailableListener
|
|
265
|
+
}
|
|
266
|
+
analysisHandler.post { processCapture(image, pending) }
|
|
267
|
+
}, cameraHandler)
|
|
268
|
+
}
|
|
202
269
|
}
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
private fun createCaptureSession() {
|
|
273
|
+
val device = cameraDevice ?: return
|
|
274
|
+
val surfaceTexture = previewView.surfaceTexture ?: return
|
|
275
|
+
val preview = previewSize ?: return
|
|
276
|
+
|
|
277
|
+
surfaceTexture.setDefaultBufferSize(preview.width, preview.height)
|
|
278
|
+
val previewSurface = Surface(surfaceTexture)
|
|
279
|
+
|
|
280
|
+
val targets = mutableListOf(previewSurface)
|
|
281
|
+
yuvReader?.surface?.let { targets.add(it) }
|
|
282
|
+
jpegReader?.surface?.let { targets.add(it) }
|
|
203
283
|
|
|
204
|
-
// Bind Preview ONLY first
|
|
205
284
|
try {
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
285
|
+
device.createCaptureSession(targets, object : CameraCaptureSession.StateCallback() {
|
|
286
|
+
override fun onConfigured(session: CameraCaptureSession) {
|
|
287
|
+
captureSession = session
|
|
288
|
+
configureTransform()
|
|
289
|
+
startRepeating(previewSurface)
|
|
290
|
+
}
|
|
211
291
|
|
|
212
|
-
|
|
292
|
+
override fun onConfigureFailed(session: CameraCaptureSession) {
|
|
293
|
+
Log.e(TAG, "[CAMERA2] Failed to configure capture session")
|
|
294
|
+
}
|
|
295
|
+
}, cameraHandler)
|
|
296
|
+
} catch (e: Exception) {
|
|
297
|
+
Log.e(TAG, "[CAMERA2] Failed to create capture session", e)
|
|
298
|
+
}
|
|
299
|
+
}
|
|
213
300
|
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
301
|
+
private fun startRepeating(previewSurface: Surface) {
|
|
302
|
+
val device = cameraDevice ?: return
|
|
303
|
+
try {
|
|
304
|
+
previewRequestBuilder = device.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply {
|
|
305
|
+
addTarget(previewSurface)
|
|
306
|
+
yuvReader?.surface?.let { addTarget(it) }
|
|
307
|
+
set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)
|
|
308
|
+
set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON)
|
|
309
|
+
if (torchEnabled) {
|
|
310
|
+
set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH)
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
captureSession?.setRepeatingRequest(previewRequestBuilder?.build() ?: return, null, cameraHandler)
|
|
314
|
+
} catch (e: Exception) {
|
|
315
|
+
Log.e(TAG, "[CAMERA2] Failed to start repeating request", e)
|
|
316
|
+
}
|
|
317
|
+
}
|
|
218
318
|
|
|
319
|
+
private fun updateRepeatingRequest() {
|
|
320
|
+
val builder = previewRequestBuilder ?: return
|
|
321
|
+
builder.set(CaptureRequest.FLASH_MODE, if (torchEnabled) CaptureRequest.FLASH_MODE_TORCH else CaptureRequest.FLASH_MODE_OFF)
|
|
322
|
+
try {
|
|
323
|
+
captureSession?.setRepeatingRequest(builder.build(), null, cameraHandler)
|
|
219
324
|
} catch (e: Exception) {
|
|
220
|
-
Log.e(TAG, "[
|
|
325
|
+
Log.e(TAG, "[CAMERA2] Failed to update torch state", e)
|
|
221
326
|
}
|
|
222
327
|
}
|
|
223
328
|
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
capture.takePicture(cameraExecutor, object : ImageCapture.OnImageCapturedCallback() {
|
|
230
|
-
override fun onCaptureSuccess(image: androidx.camera.core.ImageProxy) {
|
|
231
|
-
try {
|
|
232
|
-
val rotationDegrees = image.imageInfo.rotationDegrees
|
|
233
|
-
val nv21 = image.toNv21()
|
|
234
|
-
|
|
235
|
-
lastFrame.set(
|
|
236
|
-
LastFrame(
|
|
237
|
-
nv21,
|
|
238
|
-
image.width,
|
|
239
|
-
image.height,
|
|
240
|
-
rotationDegrees,
|
|
241
|
-
useFrontCamera
|
|
242
|
-
)
|
|
243
|
-
)
|
|
244
|
-
|
|
245
|
-
val frameWidth = if (rotationDegrees == 90 || rotationDegrees == 270) {
|
|
246
|
-
image.height
|
|
247
|
-
} else {
|
|
248
|
-
image.width
|
|
249
|
-
}
|
|
329
|
+
private fun analyzeImage(image: Image) {
|
|
330
|
+
try {
|
|
331
|
+
val nv21 = image.toNv21()
|
|
332
|
+
val rotationDegrees = computeRotationDegrees()
|
|
333
|
+
val rectangle = DocumentDetector.detectRectangleInYUV(nv21, image.width, image.height, rotationDegrees)
|
|
250
334
|
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
} else {
|
|
254
|
-
image.height
|
|
255
|
-
}
|
|
335
|
+
val frameWidth = if (rotationDegrees == 90 || rotationDegrees == 270) image.height else image.width
|
|
336
|
+
val frameHeight = if (rotationDegrees == 90 || rotationDegrees == 270) image.width else image.height
|
|
256
337
|
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
338
|
+
onFrameAnalyzed?.invoke(rectangle, frameWidth, frameHeight)
|
|
339
|
+
} catch (e: Exception) {
|
|
340
|
+
Log.e(TAG, "[CAMERA2] Error analyzing frame", e)
|
|
341
|
+
} finally {
|
|
342
|
+
image.close()
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
private fun processCapture(image: Image, pending: PendingCapture) {
|
|
347
|
+
try {
|
|
348
|
+
val buffer = image.planes[0].buffer
|
|
349
|
+
val bytes = ByteArray(buffer.remaining())
|
|
350
|
+
buffer.get(bytes)
|
|
351
|
+
val bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.size)
|
|
352
|
+
?: throw IllegalStateException("Failed to decode JPEG")
|
|
353
|
+
|
|
354
|
+
val rotated = rotateAndMirror(bitmap, computeRotationDegrees(), useFrontCamera)
|
|
355
|
+
val photoFile = File(pending.outputDirectory, "doc_scan_${System.currentTimeMillis()}.jpg")
|
|
356
|
+
FileOutputStream(photoFile).use { out ->
|
|
357
|
+
rotated.compress(Bitmap.CompressFormat.JPEG, 95, out)
|
|
269
358
|
}
|
|
270
359
|
|
|
271
|
-
|
|
272
|
-
|
|
360
|
+
if (rotated != bitmap) {
|
|
361
|
+
rotated.recycle()
|
|
273
362
|
}
|
|
274
|
-
|
|
363
|
+
bitmap.recycle()
|
|
364
|
+
|
|
365
|
+
pending.onImageCaptured(photoFile)
|
|
366
|
+
} catch (e: Exception) {
|
|
367
|
+
pending.onError(e)
|
|
368
|
+
} finally {
|
|
369
|
+
image.close()
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
private fun closeSession() {
|
|
374
|
+
try {
|
|
375
|
+
captureSession?.close()
|
|
376
|
+
captureSession = null
|
|
377
|
+
cameraDevice?.close()
|
|
378
|
+
cameraDevice = null
|
|
379
|
+
} catch (e: Exception) {
|
|
380
|
+
Log.e(TAG, "[CAMERA2] Error closing camera", e)
|
|
381
|
+
} finally {
|
|
382
|
+
yuvReader?.close()
|
|
383
|
+
jpegReader?.close()
|
|
384
|
+
yuvReader = null
|
|
385
|
+
jpegReader = null
|
|
386
|
+
previewRequestBuilder = null
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
private fun computeRotationDegrees(): Int {
|
|
391
|
+
val cameraId = selectCameraId() ?: return 0
|
|
392
|
+
val characteristics = cameraManager.getCameraCharacteristics(cameraId)
|
|
393
|
+
val sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION) ?: 0
|
|
394
|
+
val displayRotation = displayRotationDegrees()
|
|
395
|
+
return if (useFrontCamera) {
|
|
396
|
+
(sensorOrientation + displayRotation) % 360
|
|
397
|
+
} else {
|
|
398
|
+
(sensorOrientation - displayRotation + 360) % 360
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
private fun displayRotationDegrees(): Int {
|
|
403
|
+
val rotation = previewView.display?.rotation ?: Surface.ROTATION_0
|
|
404
|
+
return when (rotation) {
|
|
405
|
+
Surface.ROTATION_0 -> 0
|
|
406
|
+
Surface.ROTATION_90 -> 90
|
|
407
|
+
Surface.ROTATION_180 -> 180
|
|
408
|
+
Surface.ROTATION_270 -> 270
|
|
409
|
+
else -> 0
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
private fun configureTransform() {
|
|
414
|
+
val viewWidth = previewView.width.toFloat()
|
|
415
|
+
val viewHeight = previewView.height.toFloat()
|
|
416
|
+
val preview = previewSize ?: return
|
|
417
|
+
if (viewWidth == 0f || viewHeight == 0f) return
|
|
418
|
+
|
|
419
|
+
val rotation = displayRotationDegrees()
|
|
420
|
+
val bufferWidth = if (rotation == 90 || rotation == 270) preview.height.toFloat() else preview.width.toFloat()
|
|
421
|
+
val bufferHeight = if (rotation == 90 || rotation == 270) preview.width.toFloat() else preview.height.toFloat()
|
|
422
|
+
|
|
423
|
+
val scale = max(viewWidth / bufferWidth, viewHeight / bufferHeight)
|
|
424
|
+
val matrix = Matrix()
|
|
425
|
+
val centerX = viewWidth / 2f
|
|
426
|
+
val centerY = viewHeight / 2f
|
|
427
|
+
|
|
428
|
+
matrix.setScale(scale, scale, centerX, centerY)
|
|
429
|
+
matrix.postRotate(rotation.toFloat(), centerX, centerY)
|
|
430
|
+
previewView.setTransform(matrix)
|
|
275
431
|
}
|
|
276
432
|
|
|
277
|
-
private fun
|
|
278
|
-
|
|
279
|
-
val
|
|
280
|
-
|
|
281
|
-
|
|
433
|
+
private fun chooseBestSize(sizes: Array<Size>?, targetAspect: Double, maxArea: Int?): Size? {
|
|
434
|
+
if (sizes == null || sizes.isEmpty()) return null
|
|
435
|
+
val sorted = sizes.sortedByDescending { it.width * it.height }
|
|
436
|
+
|
|
437
|
+
val matching = sorted.filter {
|
|
438
|
+
val aspect = it.width.toDouble() / it.height.toDouble()
|
|
439
|
+
abs(aspect - targetAspect) <= ANALYSIS_ASPECT_TOLERANCE && (maxArea == null || it.width * it.height <= maxArea)
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
if (matching.isNotEmpty()) {
|
|
443
|
+
return matching.first()
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
val capped = if (maxArea != null) {
|
|
447
|
+
sorted.filter { it.width * it.height <= maxArea }
|
|
448
|
+
} else {
|
|
449
|
+
sorted
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
return capped.firstOrNull() ?: sorted.first()
|
|
282
453
|
}
|
|
283
454
|
|
|
284
455
|
private fun rotateAndMirror(bitmap: Bitmap, rotationDegrees: Int, mirror: Boolean): Bitmap {
|
|
@@ -295,7 +466,66 @@ class CameraController(
|
|
|
295
466
|
return Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true)
|
|
296
467
|
}
|
|
297
468
|
|
|
469
|
+
private fun Image.toNv21(): ByteArray {
|
|
470
|
+
val width = width
|
|
471
|
+
val height = height
|
|
472
|
+
val ySize = width * height
|
|
473
|
+
val uvSize = width * height / 2
|
|
474
|
+
val nv21 = ByteArray(ySize + uvSize)
|
|
475
|
+
|
|
476
|
+
val yBuffer = planes[0].buffer
|
|
477
|
+
val uBuffer = planes[1].buffer
|
|
478
|
+
val vBuffer = planes[2].buffer
|
|
479
|
+
|
|
480
|
+
val yRowStride = planes[0].rowStride
|
|
481
|
+
val yPixelStride = planes[0].pixelStride
|
|
482
|
+
var outputOffset = 0
|
|
483
|
+
for (row in 0 until height) {
|
|
484
|
+
var inputOffset = row * yRowStride
|
|
485
|
+
for (col in 0 until width) {
|
|
486
|
+
nv21[outputOffset++] = yBuffer.get(inputOffset)
|
|
487
|
+
inputOffset += yPixelStride
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
val uvRowStride = planes[1].rowStride
|
|
492
|
+
val uvPixelStride = planes[1].pixelStride
|
|
493
|
+
val vRowStride = planes[2].rowStride
|
|
494
|
+
val vPixelStride = planes[2].pixelStride
|
|
495
|
+
val uvHeight = height / 2
|
|
496
|
+
val uvWidth = width / 2
|
|
497
|
+
for (row in 0 until uvHeight) {
|
|
498
|
+
var uInputOffset = row * uvRowStride
|
|
499
|
+
var vInputOffset = row * vRowStride
|
|
500
|
+
for (col in 0 until uvWidth) {
|
|
501
|
+
nv21[outputOffset++] = vBuffer.get(vInputOffset)
|
|
502
|
+
nv21[outputOffset++] = uBuffer.get(uInputOffset)
|
|
503
|
+
uInputOffset += uvPixelStride
|
|
504
|
+
vInputOffset += vPixelStride
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
return nv21
|
|
509
|
+
}
|
|
510
|
+
|
|
298
511
|
private fun hasCameraPermission(): Boolean {
|
|
299
512
|
return ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED
|
|
300
513
|
}
|
|
514
|
+
|
|
515
|
+
private fun selectCameraId(): String? {
|
|
516
|
+
return try {
|
|
517
|
+
val desiredFacing = if (useFrontCamera) {
|
|
518
|
+
CameraCharacteristics.LENS_FACING_FRONT
|
|
519
|
+
} else {
|
|
520
|
+
CameraCharacteristics.LENS_FACING_BACK
|
|
521
|
+
}
|
|
522
|
+
cameraManager.cameraIdList.firstOrNull { id ->
|
|
523
|
+
val characteristics = cameraManager.getCameraCharacteristics(id)
|
|
524
|
+
characteristics.get(CameraCharacteristics.LENS_FACING) == desiredFacing
|
|
525
|
+
} ?: cameraManager.cameraIdList.firstOrNull()
|
|
526
|
+
} catch (e: Exception) {
|
|
527
|
+
Log.e(TAG, "[CAMERA2] Failed to select camera", e)
|
|
528
|
+
null
|
|
529
|
+
}
|
|
530
|
+
}
|
|
301
531
|
}
|
|
@@ -8,9 +8,9 @@ import android.graphics.Paint
|
|
|
8
8
|
import android.graphics.PorterDuff
|
|
9
9
|
import android.graphics.PorterDuffXfermode
|
|
10
10
|
import android.util.Log
|
|
11
|
+
import android.view.TextureView
|
|
11
12
|
import android.view.View
|
|
12
13
|
import android.widget.FrameLayout
|
|
13
|
-
import androidx.camera.view.PreviewView
|
|
14
14
|
import androidx.lifecycle.Lifecycle
|
|
15
15
|
import androidx.lifecycle.LifecycleOwner
|
|
16
16
|
import androidx.lifecycle.LifecycleRegistry
|
|
@@ -25,7 +25,7 @@ import kotlin.math.min
|
|
|
25
25
|
|
|
26
26
|
class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), LifecycleOwner {
|
|
27
27
|
private val themedContext = context
|
|
28
|
-
private val previewView:
|
|
28
|
+
private val previewView: TextureView
|
|
29
29
|
private val overlayView: OverlayView
|
|
30
30
|
private var cameraController: CameraController? = null
|
|
31
31
|
private val lifecycleRegistry = LifecycleRegistry(this)
|
|
@@ -74,13 +74,10 @@ class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), L
|
|
|
74
74
|
|
|
75
75
|
// Create preview view
|
|
76
76
|
Log.d(TAG, "[INIT] Creating PreviewView...")
|
|
77
|
-
previewView =
|
|
77
|
+
previewView = TextureView(context).apply {
|
|
78
78
|
layoutParams = LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)
|
|
79
79
|
visibility = View.VISIBLE
|
|
80
80
|
keepScreenOn = true
|
|
81
|
-
// TextureView mode avoids some device-specific Camera2 session timeouts.
|
|
82
|
-
implementationMode = PreviewView.ImplementationMode.COMPATIBLE
|
|
83
|
-
scaleType = PreviewView.ScaleType.FILL_CENTER
|
|
84
81
|
}
|
|
85
82
|
Log.d(TAG, "[INIT] PreviewView created: $previewView")
|
|
86
83
|
Log.d(TAG, "[INIT] PreviewView visibility: ${previewView.visibility}")
|
package/package.json
CHANGED