react-native-rectangle-doc-scanner 3.194.0 → 3.195.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,298 +1,384 @@
|
|
|
1
1
|
package com.reactnativerectangledocscanner
|
|
2
2
|
|
|
3
|
+
import android.Manifest
|
|
3
4
|
import android.content.Context
|
|
5
|
+
import android.content.pm.PackageManager
|
|
6
|
+
import android.graphics.Bitmap
|
|
7
|
+
import android.graphics.BitmapFactory
|
|
4
8
|
import android.graphics.ImageFormat
|
|
9
|
+
import android.graphics.Matrix
|
|
10
|
+
import android.graphics.Rect
|
|
11
|
+
import android.graphics.SurfaceTexture
|
|
12
|
+
import android.graphics.YuvImage
|
|
13
|
+
import android.hardware.camera2.CameraCaptureSession
|
|
14
|
+
import android.hardware.camera2.CameraCharacteristics
|
|
15
|
+
import android.hardware.camera2.CameraDevice
|
|
16
|
+
import android.hardware.camera2.CameraManager
|
|
17
|
+
import android.hardware.camera2.CaptureRequest
|
|
18
|
+
import android.media.Image
|
|
19
|
+
import android.media.ImageReader
|
|
20
|
+
import android.os.Handler
|
|
21
|
+
import android.os.HandlerThread
|
|
5
22
|
import android.util.Log
|
|
6
23
|
import android.util.Size
|
|
7
24
|
import android.view.Surface
|
|
8
|
-
import
|
|
9
|
-
import androidx.camera.lifecycle.ProcessCameraProvider
|
|
10
|
-
import androidx.camera.view.PreviewView
|
|
25
|
+
import android.view.TextureView
|
|
11
26
|
import androidx.core.content.ContextCompat
|
|
12
|
-
import androidx.lifecycle.Lifecycle
|
|
13
27
|
import androidx.lifecycle.LifecycleOwner
|
|
14
|
-
import
|
|
15
|
-
import androidx.lifecycle.Observer
|
|
28
|
+
import java.io.ByteArrayOutputStream
|
|
16
29
|
import java.io.File
|
|
17
|
-
import java.
|
|
18
|
-
import java.util.concurrent.
|
|
30
|
+
import java.io.FileOutputStream
|
|
31
|
+
import java.util.concurrent.atomic.AtomicBoolean
|
|
19
32
|
|
|
20
33
|
class CameraController(
|
|
21
34
|
private val context: Context,
|
|
22
35
|
private val lifecycleOwner: LifecycleOwner,
|
|
23
|
-
private val previewView:
|
|
36
|
+
private val previewView: TextureView
|
|
24
37
|
) {
|
|
25
|
-
private
|
|
26
|
-
private var
|
|
27
|
-
private var
|
|
28
|
-
private var
|
|
29
|
-
private
|
|
30
|
-
|
|
38
|
+
private val cameraManager = context.getSystemService(Context.CAMERA_SERVICE) as CameraManager
|
|
39
|
+
private var cameraDevice: CameraDevice? = null
|
|
40
|
+
private var captureSession: CameraCaptureSession? = null
|
|
41
|
+
private var previewRequestBuilder: CaptureRequest.Builder? = null
|
|
42
|
+
private var imageReader: ImageReader? = null
|
|
43
|
+
private var backgroundThread: HandlerThread? = null
|
|
44
|
+
private var backgroundHandler: Handler? = null
|
|
45
|
+
|
|
46
|
+
private var cameraId: String? = null
|
|
47
|
+
private var sensorOrientation: Int = 0
|
|
48
|
+
private var previewSize: Size? = null
|
|
49
|
+
private var analysisSize: Size? = null
|
|
31
50
|
private var useFrontCamera = false
|
|
32
51
|
private var torchEnabled = false
|
|
33
52
|
private var detectionEnabled = true
|
|
34
|
-
private var
|
|
35
|
-
|
|
36
|
-
private
|
|
37
|
-
private
|
|
53
|
+
private var hasStarted = false
|
|
54
|
+
|
|
55
|
+
private val isOpening = AtomicBoolean(false)
|
|
56
|
+
private val lastFrameLock = Any()
|
|
57
|
+
private var lastFrame: LastFrame? = null
|
|
38
58
|
|
|
39
59
|
var onFrameAnalyzed: ((Rectangle?, Int, Int) -> Unit)? = null
|
|
40
60
|
|
|
41
61
|
companion object {
|
|
42
62
|
private const val TAG = "CameraController"
|
|
63
|
+
private const val MAX_PREVIEW_WIDTH = 1280
|
|
64
|
+
private const val MAX_PREVIEW_HEIGHT = 720
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
private data class LastFrame(
|
|
68
|
+
val nv21: ByteArray,
|
|
69
|
+
val width: Int,
|
|
70
|
+
val height: Int,
|
|
71
|
+
val rotationDegrees: Int,
|
|
72
|
+
val isFront: Boolean
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
private val textureListener = object : TextureView.SurfaceTextureListener {
|
|
76
|
+
override fun onSurfaceTextureAvailable(surface: SurfaceTexture, width: Int, height: Int) {
|
|
77
|
+
Log.d(TAG, "[CAMERA2] Texture available: ${width}x${height}")
|
|
78
|
+
createPreviewSession()
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
override fun onSurfaceTextureSizeChanged(surface: SurfaceTexture, width: Int, height: Int) {
|
|
82
|
+
Log.d(TAG, "[CAMERA2] Texture size changed: ${width}x${height}")
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
override fun onSurfaceTextureDestroyed(surface: SurfaceTexture): Boolean {
|
|
86
|
+
Log.d(TAG, "[CAMERA2] Texture destroyed")
|
|
87
|
+
return true
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
override fun onSurfaceTextureUpdated(surface: SurfaceTexture) = Unit
|
|
43
91
|
}
|
|
44
92
|
|
|
45
|
-
/**
|
|
46
|
-
* Start camera with preview and analysis
|
|
47
|
-
*/
|
|
48
93
|
fun startCamera(
|
|
49
94
|
useFrontCam: Boolean = false,
|
|
50
95
|
enableDetection: Boolean = true
|
|
51
96
|
) {
|
|
52
97
|
Log.d(TAG, "========================================")
|
|
53
|
-
Log.d(TAG, "[
|
|
54
|
-
Log.d(TAG, "[
|
|
55
|
-
Log.d(TAG, "[
|
|
56
|
-
Log.d(TAG, "[
|
|
57
|
-
Log.d(TAG, "[CAMERA_CONTROLLER] lifecycleOwner.lifecycle.currentState: ${lifecycleOwner.lifecycle.currentState}")
|
|
98
|
+
Log.d(TAG, "[CAMERA2] startCamera called")
|
|
99
|
+
Log.d(TAG, "[CAMERA2] useFrontCam: $useFrontCam")
|
|
100
|
+
Log.d(TAG, "[CAMERA2] enableDetection: $enableDetection")
|
|
101
|
+
Log.d(TAG, "[CAMERA2] lifecycleOwner: $lifecycleOwner")
|
|
58
102
|
Log.d(TAG, "========================================")
|
|
59
103
|
|
|
60
104
|
this.useFrontCamera = useFrontCam
|
|
61
105
|
this.detectionEnabled = enableDetection
|
|
62
106
|
|
|
63
|
-
|
|
64
|
-
|
|
107
|
+
if (hasStarted) {
|
|
108
|
+
Log.d(TAG, "[CAMERA2] Already started, skipping")
|
|
109
|
+
return
|
|
110
|
+
}
|
|
111
|
+
hasStarted = true
|
|
65
112
|
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
}, ContextCompat.getMainExecutor(context))
|
|
113
|
+
if (!hasCameraPermission()) {
|
|
114
|
+
Log.e(TAG, "[CAMERA2] Camera permission not granted")
|
|
115
|
+
return
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
startBackgroundThread()
|
|
119
|
+
chooseCamera()
|
|
120
|
+
|
|
121
|
+
if (previewView.isAvailable) {
|
|
122
|
+
openCamera()
|
|
123
|
+
} else {
|
|
124
|
+
previewView.surfaceTextureListener = textureListener
|
|
125
|
+
}
|
|
80
126
|
}
|
|
81
127
|
|
|
82
|
-
/**
|
|
83
|
-
* Stop camera and release resources
|
|
84
|
-
*/
|
|
85
128
|
fun stopCamera() {
|
|
86
|
-
|
|
87
|
-
|
|
129
|
+
Log.d(TAG, "[CAMERA2] stopCamera called")
|
|
130
|
+
try {
|
|
131
|
+
captureSession?.close()
|
|
132
|
+
captureSession = null
|
|
133
|
+
} catch (e: Exception) {
|
|
134
|
+
Log.w(TAG, "[CAMERA2] Failed to close session", e)
|
|
135
|
+
}
|
|
136
|
+
try {
|
|
137
|
+
cameraDevice?.close()
|
|
138
|
+
cameraDevice = null
|
|
139
|
+
} catch (e: Exception) {
|
|
140
|
+
Log.w(TAG, "[CAMERA2] Failed to close camera device", e)
|
|
141
|
+
}
|
|
142
|
+
imageReader?.close()
|
|
143
|
+
imageReader = null
|
|
144
|
+
stopBackgroundThread()
|
|
145
|
+
hasStarted = false
|
|
88
146
|
}
|
|
89
147
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
val cameraProvider = cameraProvider
|
|
99
|
-
if (cameraProvider == null) {
|
|
100
|
-
Log.e(TAG, "[BIND] cameraProvider is null, returning")
|
|
148
|
+
fun capturePhoto(
|
|
149
|
+
outputDirectory: File,
|
|
150
|
+
onImageCaptured: (File) -> Unit,
|
|
151
|
+
onError: (Exception) -> Unit
|
|
152
|
+
) {
|
|
153
|
+
val frame = synchronized(lastFrameLock) { lastFrame }
|
|
154
|
+
if (frame == null) {
|
|
155
|
+
onError(Exception("No frame available for capture"))
|
|
101
156
|
return
|
|
102
157
|
}
|
|
103
158
|
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
159
|
+
backgroundHandler?.post {
|
|
160
|
+
try {
|
|
161
|
+
val photoFile = File(
|
|
162
|
+
outputDirectory,
|
|
163
|
+
"doc_scan_${System.currentTimeMillis()}.jpg"
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
val jpegBytes = nv21ToJpeg(frame.nv21, frame.width, frame.height, 95)
|
|
167
|
+
val bitmap = BitmapFactory.decodeByteArray(jpegBytes, 0, jpegBytes.size)
|
|
168
|
+
?: throw IllegalStateException("Failed to decode JPEG")
|
|
169
|
+
|
|
170
|
+
val rotated = rotateAndMirror(bitmap, frame.rotationDegrees, frame.isFront)
|
|
171
|
+
FileOutputStream(photoFile).use { out ->
|
|
172
|
+
rotated.compress(Bitmap.CompressFormat.JPEG, 95, out)
|
|
173
|
+
}
|
|
174
|
+
if (rotated != bitmap) {
|
|
175
|
+
rotated.recycle()
|
|
176
|
+
}
|
|
177
|
+
bitmap.recycle()
|
|
178
|
+
|
|
179
|
+
Log.d(TAG, "[CAMERA2] Photo capture succeeded: ${photoFile.absolutePath}")
|
|
180
|
+
onImageCaptured(photoFile)
|
|
181
|
+
} catch (e: Exception) {
|
|
182
|
+
Log.e(TAG, "[CAMERA2] Photo capture failed", e)
|
|
183
|
+
onError(e)
|
|
184
|
+
}
|
|
110
185
|
}
|
|
186
|
+
}
|
|
111
187
|
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
188
|
+
fun setTorchEnabled(enabled: Boolean) {
|
|
189
|
+
torchEnabled = enabled
|
|
190
|
+
val builder = previewRequestBuilder ?: return
|
|
191
|
+
builder.set(CaptureRequest.FLASH_MODE, if (enabled) CaptureRequest.FLASH_MODE_TORCH else CaptureRequest.FLASH_MODE_OFF)
|
|
192
|
+
try {
|
|
193
|
+
captureSession?.setRepeatingRequest(builder.build(), null, backgroundHandler)
|
|
194
|
+
} catch (e: Exception) {
|
|
195
|
+
Log.w(TAG, "[CAMERA2] Failed to update torch", e)
|
|
117
196
|
}
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
fun switchCamera() {
|
|
200
|
+
useFrontCamera = !useFrontCamera
|
|
201
|
+
stopCamera()
|
|
202
|
+
startCamera(useFrontCamera, detectionEnabled)
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
fun isTorchAvailable(): Boolean {
|
|
206
|
+
val id = cameraId ?: return false
|
|
207
|
+
val characteristics = cameraManager.getCameraCharacteristics(id)
|
|
208
|
+
return characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) == true
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
fun focusAt(x: Float, y: Float) {
|
|
212
|
+
// No-op for now. Camera2 focus metering can be added if needed.
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
fun shutdown() {
|
|
216
|
+
stopCamera()
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
private fun chooseCamera() {
|
|
220
|
+
val lensFacing = if (useFrontCamera) {
|
|
221
|
+
CameraCharacteristics.LENS_FACING_FRONT
|
|
140
222
|
} else {
|
|
141
|
-
|
|
223
|
+
CameraCharacteristics.LENS_FACING_BACK
|
|
142
224
|
}
|
|
143
225
|
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
.build()
|
|
154
|
-
.also { analysis ->
|
|
155
|
-
analysis.setAnalyzer(cameraExecutor) { imageProxy ->
|
|
156
|
-
analyzeFrame(imageProxy)
|
|
157
|
-
}
|
|
158
|
-
Log.d(TAG, "[BIND] ImageAnalysis created and analyzer set: $analysis")
|
|
159
|
-
}
|
|
160
|
-
} else {
|
|
161
|
-
Log.d(TAG, "[BIND] ImageAnalysis disabled")
|
|
162
|
-
null
|
|
226
|
+
val ids = cameraManager.cameraIdList
|
|
227
|
+
val selected = ids.firstOrNull { id ->
|
|
228
|
+
val characteristics = cameraManager.getCameraCharacteristics(id)
|
|
229
|
+
characteristics.get(CameraCharacteristics.LENS_FACING) == lensFacing
|
|
230
|
+
} ?: ids.firstOrNull()
|
|
231
|
+
|
|
232
|
+
if (selected == null) {
|
|
233
|
+
Log.e(TAG, "[CAMERA2] No camera available")
|
|
234
|
+
return
|
|
163
235
|
}
|
|
164
236
|
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
Log.d(TAG, "[BIND] PreviewView attached to window: ${previewView.isAttachedToWindow}")
|
|
169
|
-
Log.d(TAG, "[BIND] PreviewView size: ${previewView.width}x${previewView.height}")
|
|
170
|
-
Log.d(TAG, "[BIND] PreviewView implementationMode: ${previewView.implementationMode}")
|
|
171
|
-
|
|
172
|
-
// Set surface provider FIRST, before binding - this is critical
|
|
173
|
-
Log.d(TAG, "[BIND] Setting surface provider BEFORE binding...")
|
|
174
|
-
preview.setSurfaceProvider(previewView.surfaceProvider)
|
|
175
|
-
Log.d(TAG, "[BIND] Surface provider set successfully")
|
|
176
|
-
|
|
177
|
-
// Unbind all use cases before rebinding
|
|
178
|
-
Log.d(TAG, "[BIND] Unbinding all existing use cases...")
|
|
179
|
-
cameraProvider.unbindAll()
|
|
180
|
-
|
|
181
|
-
// Bind use cases to camera
|
|
182
|
-
val useCases = mutableListOf<UseCase>(preview)
|
|
183
|
-
if (imageCapture != null) {
|
|
184
|
-
useCases.add(imageCapture!!)
|
|
185
|
-
}
|
|
186
|
-
if (imageAnalysis != null) {
|
|
187
|
-
useCases.add(imageAnalysis!!)
|
|
188
|
-
}
|
|
189
|
-
Log.d(TAG, "[BIND] Total use cases to bind: ${useCases.size}")
|
|
237
|
+
cameraId = selected
|
|
238
|
+
val characteristics = cameraManager.getCameraCharacteristics(selected)
|
|
239
|
+
sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION) ?: 0
|
|
190
240
|
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
cameraSelector,
|
|
195
|
-
*useCases.toTypedArray()
|
|
196
|
-
)
|
|
197
|
-
Log.d(TAG, "[BIND] Bound to lifecycle successfully, camera: $camera")
|
|
198
|
-
registerCameraStateObserver(camera)
|
|
241
|
+
val streamConfig = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
|
|
242
|
+
val previewChoices = streamConfig?.getOutputSizes(SurfaceTexture::class.java) ?: emptyArray()
|
|
243
|
+
val analysisChoices = streamConfig?.getOutputSizes(ImageFormat.YUV_420_888) ?: emptyArray()
|
|
199
244
|
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
245
|
+
previewSize = chooseSize(previewChoices, MAX_PREVIEW_WIDTH, MAX_PREVIEW_HEIGHT)
|
|
246
|
+
analysisSize = chooseSize(analysisChoices, MAX_PREVIEW_WIDTH, MAX_PREVIEW_HEIGHT)
|
|
247
|
+
Log.d(TAG, "[CAMERA2] Selected sizes - preview: $previewSize, analysis: $analysisSize")
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
private fun openCamera() {
|
|
251
|
+
val id = cameraId ?: run {
|
|
252
|
+
Log.e(TAG, "[CAMERA2] Camera id not set")
|
|
253
|
+
return
|
|
254
|
+
}
|
|
255
|
+
if (isOpening.getAndSet(true)) {
|
|
256
|
+
return
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
try {
|
|
260
|
+
cameraManager.openCamera(id, object : CameraDevice.StateCallback() {
|
|
261
|
+
override fun onOpened(device: CameraDevice) {
|
|
262
|
+
Log.d(TAG, "[CAMERA2] Camera opened")
|
|
263
|
+
isOpening.set(false)
|
|
264
|
+
cameraDevice = device
|
|
265
|
+
createPreviewSession()
|
|
266
|
+
}
|
|
205
267
|
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
268
|
+
override fun onDisconnected(device: CameraDevice) {
|
|
269
|
+
Log.w(TAG, "[CAMERA2] Camera disconnected")
|
|
270
|
+
isOpening.set(false)
|
|
271
|
+
device.close()
|
|
272
|
+
cameraDevice = null
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
override fun onError(device: CameraDevice, error: Int) {
|
|
276
|
+
Log.e(TAG, "[CAMERA2] Camera error: $error")
|
|
277
|
+
isOpening.set(false)
|
|
278
|
+
device.close()
|
|
279
|
+
cameraDevice = null
|
|
280
|
+
}
|
|
281
|
+
}, backgroundHandler)
|
|
282
|
+
} catch (e: SecurityException) {
|
|
283
|
+
isOpening.set(false)
|
|
284
|
+
Log.e(TAG, "[CAMERA2] Camera permission missing", e)
|
|
211
285
|
} catch (e: Exception) {
|
|
212
|
-
|
|
213
|
-
e
|
|
286
|
+
isOpening.set(false)
|
|
287
|
+
Log.e(TAG, "[CAMERA2] Failed to open camera", e)
|
|
214
288
|
}
|
|
215
289
|
}
|
|
216
290
|
|
|
217
|
-
private fun
|
|
218
|
-
val
|
|
219
|
-
|
|
220
|
-
|
|
291
|
+
private fun createPreviewSession() {
|
|
292
|
+
val device = cameraDevice ?: return
|
|
293
|
+
val texture = previewView.surfaceTexture ?: return
|
|
294
|
+
val previewSize = previewSize ?: return
|
|
295
|
+
val analysisSize = analysisSize ?: previewSize
|
|
296
|
+
|
|
297
|
+
texture.setDefaultBufferSize(previewSize.width, previewSize.height)
|
|
298
|
+
val previewSurface = Surface(texture)
|
|
299
|
+
|
|
300
|
+
imageReader?.close()
|
|
301
|
+
imageReader = ImageReader.newInstance(
|
|
302
|
+
analysisSize.width,
|
|
303
|
+
analysisSize.height,
|
|
304
|
+
ImageFormat.YUV_420_888,
|
|
305
|
+
2
|
|
306
|
+
).apply {
|
|
307
|
+
setOnImageAvailableListener({ reader ->
|
|
308
|
+
val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener
|
|
309
|
+
handleImage(image)
|
|
310
|
+
}, backgroundHandler)
|
|
221
311
|
}
|
|
222
312
|
|
|
223
|
-
val
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
313
|
+
val surfaces = listOf(previewSurface, imageReader!!.surface)
|
|
314
|
+
try {
|
|
315
|
+
device.createCaptureSession(
|
|
316
|
+
surfaces,
|
|
317
|
+
object : CameraCaptureSession.StateCallback() {
|
|
318
|
+
override fun onConfigured(session: CameraCaptureSession) {
|
|
319
|
+
if (cameraDevice == null) {
|
|
320
|
+
return
|
|
321
|
+
}
|
|
322
|
+
captureSession = session
|
|
323
|
+
previewRequestBuilder = device.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply {
|
|
324
|
+
addTarget(previewSurface)
|
|
325
|
+
addTarget(imageReader!!.surface)
|
|
326
|
+
set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO)
|
|
327
|
+
set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)
|
|
328
|
+
set(CaptureRequest.FLASH_MODE, if (torchEnabled) CaptureRequest.FLASH_MODE_TORCH else CaptureRequest.FLASH_MODE_OFF)
|
|
329
|
+
}
|
|
330
|
+
try {
|
|
331
|
+
session.setRepeatingRequest(previewRequestBuilder!!.build(), null, backgroundHandler)
|
|
332
|
+
Log.d(TAG, "[CAMERA2] Preview session started")
|
|
333
|
+
} catch (e: Exception) {
|
|
334
|
+
Log.e(TAG, "[CAMERA2] Failed to start preview", e)
|
|
335
|
+
}
|
|
336
|
+
}
|
|
236
337
|
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
338
|
+
override fun onConfigureFailed(session: CameraCaptureSession) {
|
|
339
|
+
Log.e(TAG, "[CAMERA2] Preview session configure failed")
|
|
340
|
+
}
|
|
341
|
+
},
|
|
342
|
+
backgroundHandler
|
|
343
|
+
)
|
|
344
|
+
} catch (e: Exception) {
|
|
345
|
+
Log.e(TAG, "[CAMERA2] Failed to create preview session", e)
|
|
346
|
+
}
|
|
240
347
|
}
|
|
241
348
|
|
|
242
|
-
|
|
243
|
-
* Analyze frame for rectangle detection
|
|
244
|
-
*/
|
|
245
|
-
private fun analyzeFrame(imageProxy: ImageProxy) {
|
|
349
|
+
private fun handleImage(image: Image) {
|
|
246
350
|
try {
|
|
247
|
-
val rotationDegrees =
|
|
248
|
-
val
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
imageProxy.width
|
|
252
|
-
}
|
|
253
|
-
val frameHeight = if (rotationDegrees == 90 || rotationDegrees == 270) {
|
|
254
|
-
imageProxy.width
|
|
255
|
-
} else {
|
|
256
|
-
imageProxy.height
|
|
257
|
-
}
|
|
351
|
+
val rotationDegrees = getRotationDegrees()
|
|
352
|
+
val width = image.width
|
|
353
|
+
val height = image.height
|
|
354
|
+
val nv21 = imageToNV21(image)
|
|
258
355
|
|
|
259
|
-
if (
|
|
260
|
-
|
|
261
|
-
return
|
|
262
|
-
}
|
|
356
|
+
val frameWidth = if (rotationDegrees == 90 || rotationDegrees == 270) height else width
|
|
357
|
+
val frameHeight = if (rotationDegrees == 90 || rotationDegrees == 270) width else height
|
|
263
358
|
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
nv21,
|
|
267
|
-
imageProxy.width,
|
|
268
|
-
imageProxy.height,
|
|
269
|
-
rotationDegrees
|
|
270
|
-
)
|
|
271
|
-
|
|
272
|
-
onFrameAnalyzed?.invoke(rectangle, frameWidth, frameHeight)
|
|
273
|
-
} catch (e: Exception) {
|
|
274
|
-
Log.e(TAG, "Error analyzing frame", e)
|
|
275
|
-
val rotationDegrees = imageProxy.imageInfo.rotationDegrees
|
|
276
|
-
val frameWidth = if (rotationDegrees == 90 || rotationDegrees == 270) {
|
|
277
|
-
imageProxy.height
|
|
278
|
-
} else {
|
|
279
|
-
imageProxy.width
|
|
359
|
+
synchronized(lastFrameLock) {
|
|
360
|
+
lastFrame = LastFrame(nv21, width, height, rotationDegrees, useFrontCamera)
|
|
280
361
|
}
|
|
281
|
-
|
|
282
|
-
|
|
362
|
+
|
|
363
|
+
if (detectionEnabled) {
|
|
364
|
+
val rectangle = DocumentDetector.detectRectangleInYUV(
|
|
365
|
+
nv21,
|
|
366
|
+
width,
|
|
367
|
+
height,
|
|
368
|
+
rotationDegrees
|
|
369
|
+
)
|
|
370
|
+
onFrameAnalyzed?.invoke(rectangle, frameWidth, frameHeight)
|
|
283
371
|
} else {
|
|
284
|
-
|
|
372
|
+
onFrameAnalyzed?.invoke(null, frameWidth, frameHeight)
|
|
285
373
|
}
|
|
286
|
-
|
|
374
|
+
} catch (e: Exception) {
|
|
375
|
+
Log.e(TAG, "[CAMERA2] Error analyzing frame", e)
|
|
287
376
|
} finally {
|
|
288
|
-
|
|
377
|
+
image.close()
|
|
289
378
|
}
|
|
290
379
|
}
|
|
291
380
|
|
|
292
|
-
|
|
293
|
-
* Convert ImageProxy (YUV_420_888) to NV21 byte array
|
|
294
|
-
*/
|
|
295
|
-
private fun imageProxyToNV21(image: ImageProxy): ByteArray {
|
|
381
|
+
private fun imageToNV21(image: Image): ByteArray {
|
|
296
382
|
val width = image.width
|
|
297
383
|
val height = image.height
|
|
298
384
|
|
|
@@ -336,114 +422,76 @@ class CameraController(
|
|
|
336
422
|
return nv21
|
|
337
423
|
}
|
|
338
424
|
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
onError: (Exception) -> Unit
|
|
346
|
-
) {
|
|
347
|
-
if (!isCaptureSession) {
|
|
348
|
-
val provider = cameraProvider ?: run {
|
|
349
|
-
onError(Exception("Camera provider not initialized"))
|
|
350
|
-
return
|
|
351
|
-
}
|
|
352
|
-
ContextCompat.getMainExecutor(context).execute {
|
|
353
|
-
try {
|
|
354
|
-
// Rebind with ImageCapture only for the capture to avoid stream timeouts.
|
|
355
|
-
provider.unbindAll()
|
|
356
|
-
bindCameraUseCases(enableDetection = false, useImageCapture = true)
|
|
357
|
-
capturePhoto(outputDirectory, onImageCaptured, onError)
|
|
358
|
-
} catch (e: Exception) {
|
|
359
|
-
onError(e)
|
|
360
|
-
}
|
|
361
|
-
}
|
|
362
|
-
return
|
|
363
|
-
}
|
|
425
|
+
private fun nv21ToJpeg(nv21: ByteArray, width: Int, height: Int, quality: Int): ByteArray {
|
|
426
|
+
val yuv = YuvImage(nv21, ImageFormat.NV21, width, height, null)
|
|
427
|
+
val out = ByteArrayOutputStream()
|
|
428
|
+
yuv.compressToJpeg(Rect(0, 0, width, height), quality, out)
|
|
429
|
+
return out.toByteArray()
|
|
430
|
+
}
|
|
364
431
|
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
return
|
|
432
|
+
private fun rotateAndMirror(bitmap: Bitmap, rotationDegrees: Int, mirror: Boolean): Bitmap {
|
|
433
|
+
if (rotationDegrees == 0 && !mirror) {
|
|
434
|
+
return bitmap
|
|
368
435
|
}
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
)
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
imageCapture.takePicture(
|
|
378
|
-
outputOptions,
|
|
379
|
-
ContextCompat.getMainExecutor(context),
|
|
380
|
-
object : ImageCapture.OnImageSavedCallback {
|
|
381
|
-
override fun onImageSaved(output: ImageCapture.OutputFileResults) {
|
|
382
|
-
Log.d(TAG, "Photo capture succeeded: ${photoFile.absolutePath}")
|
|
383
|
-
onImageCaptured(photoFile)
|
|
384
|
-
if (detectionEnabled) {
|
|
385
|
-
ContextCompat.getMainExecutor(context).execute {
|
|
386
|
-
bindCameraUseCases(enableDetection = true, useImageCapture = false)
|
|
387
|
-
}
|
|
388
|
-
}
|
|
389
|
-
}
|
|
390
|
-
|
|
391
|
-
override fun onError(exception: ImageCaptureException) {
|
|
392
|
-
Log.e(TAG, "Photo capture failed", exception)
|
|
393
|
-
if (exception.imageCaptureError == ImageCapture.ERROR_CAMERA_CLOSED) {
|
|
394
|
-
Log.w(TAG, "Camera was closed during capture, attempting restart")
|
|
395
|
-
stopCamera()
|
|
396
|
-
startCamera(useFrontCamera, detectionEnabled)
|
|
397
|
-
}
|
|
398
|
-
if (detectionEnabled) {
|
|
399
|
-
ContextCompat.getMainExecutor(context).execute {
|
|
400
|
-
bindCameraUseCases(enableDetection = true, useImageCapture = false)
|
|
401
|
-
}
|
|
402
|
-
}
|
|
403
|
-
onError(exception)
|
|
404
|
-
}
|
|
405
|
-
}
|
|
406
|
-
)
|
|
436
|
+
val matrix = Matrix()
|
|
437
|
+
if (mirror) {
|
|
438
|
+
matrix.postScale(-1f, 1f, bitmap.width / 2f, bitmap.height / 2f)
|
|
439
|
+
}
|
|
440
|
+
if (rotationDegrees != 0) {
|
|
441
|
+
matrix.postRotate(rotationDegrees.toFloat(), bitmap.width / 2f, bitmap.height / 2f)
|
|
442
|
+
}
|
|
443
|
+
return Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true)
|
|
407
444
|
}
|
|
408
445
|
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
446
|
+
private fun getRotationDegrees(): Int {
|
|
447
|
+
val displayRotation = previewView.display?.rotation ?: Surface.ROTATION_0
|
|
448
|
+
val displayDegrees = when (displayRotation) {
|
|
449
|
+
Surface.ROTATION_0 -> 0
|
|
450
|
+
Surface.ROTATION_90 -> 90
|
|
451
|
+
Surface.ROTATION_180 -> 180
|
|
452
|
+
Surface.ROTATION_270 -> 270
|
|
453
|
+
else -> 0
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
return if (useFrontCamera) {
|
|
457
|
+
(sensorOrientation + displayDegrees) % 360
|
|
458
|
+
} else {
|
|
459
|
+
(sensorOrientation - displayDegrees + 360) % 360
|
|
460
|
+
}
|
|
415
461
|
}
|
|
416
462
|
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
463
|
+
private fun chooseSize(choices: Array<Size>, maxWidth: Int, maxHeight: Int): Size? {
|
|
464
|
+
if (choices.isEmpty()) {
|
|
465
|
+
return null
|
|
466
|
+
}
|
|
467
|
+
val filtered = choices.filter { it.width <= maxWidth && it.height <= maxHeight }
|
|
468
|
+
val candidates = if (filtered.isNotEmpty()) filtered else choices.toList()
|
|
469
|
+
return candidates.sortedBy { it.width * it.height }.last()
|
|
423
470
|
}
|
|
424
471
|
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
472
|
+
private fun startBackgroundThread() {
|
|
473
|
+
if (backgroundThread != null) {
|
|
474
|
+
return
|
|
475
|
+
}
|
|
476
|
+
backgroundThread = HandlerThread("Camera2Background").also {
|
|
477
|
+
it.start()
|
|
478
|
+
backgroundHandler = Handler(it.looper)
|
|
479
|
+
}
|
|
430
480
|
}
|
|
431
481
|
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
482
|
+
private fun stopBackgroundThread() {
|
|
483
|
+
try {
|
|
484
|
+
backgroundThread?.quitSafely()
|
|
485
|
+
backgroundThread?.join()
|
|
486
|
+
} catch (e: InterruptedException) {
|
|
487
|
+
Log.w(TAG, "[CAMERA2] Background thread shutdown interrupted", e)
|
|
488
|
+
} finally {
|
|
489
|
+
backgroundThread = null
|
|
490
|
+
backgroundHandler = null
|
|
491
|
+
}
|
|
440
492
|
}
|
|
441
493
|
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
*/
|
|
445
|
-
fun shutdown() {
|
|
446
|
-
cameraExecutor.shutdown()
|
|
447
|
-
stopCamera()
|
|
494
|
+
private fun hasCameraPermission(): Boolean {
|
|
495
|
+
return ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED
|
|
448
496
|
}
|
|
449
497
|
}
|
|
@@ -8,9 +8,9 @@ import android.graphics.Paint
|
|
|
8
8
|
import android.graphics.PorterDuff
|
|
9
9
|
import android.graphics.PorterDuffXfermode
|
|
10
10
|
import android.util.Log
|
|
11
|
+
import android.view.TextureView
|
|
11
12
|
import android.view.View
|
|
12
13
|
import android.widget.FrameLayout
|
|
13
|
-
import androidx.camera.view.PreviewView
|
|
14
14
|
import androidx.lifecycle.Lifecycle
|
|
15
15
|
import androidx.lifecycle.LifecycleOwner
|
|
16
16
|
import androidx.lifecycle.LifecycleRegistry
|
|
@@ -25,7 +25,7 @@ import kotlin.math.min
|
|
|
25
25
|
|
|
26
26
|
class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), LifecycleOwner {
|
|
27
27
|
private val themedContext = context
|
|
28
|
-
private val previewView:
|
|
28
|
+
private val previewView: TextureView
|
|
29
29
|
private val overlayView: OverlayView
|
|
30
30
|
private var cameraController: CameraController? = null
|
|
31
31
|
private val lifecycleRegistry = LifecycleRegistry(this)
|
|
@@ -73,12 +73,9 @@ class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), L
|
|
|
73
73
|
Log.d(TAG, "[INIT] Lifecycle state: ${lifecycleRegistry.currentState}")
|
|
74
74
|
|
|
75
75
|
// Create preview view
|
|
76
|
-
Log.d(TAG, "[INIT] Creating
|
|
77
|
-
previewView =
|
|
76
|
+
Log.d(TAG, "[INIT] Creating TextureView...")
|
|
77
|
+
previewView = TextureView(context).apply {
|
|
78
78
|
layoutParams = LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)
|
|
79
|
-
scaleType = PreviewView.ScaleType.FILL_CENTER
|
|
80
|
-
// Use COMPATIBLE (TextureView) to avoid SurfaceView black frames on some devices.
|
|
81
|
-
implementationMode = PreviewView.ImplementationMode.COMPATIBLE
|
|
82
79
|
visibility = View.VISIBLE
|
|
83
80
|
keepScreenOn = true
|
|
84
81
|
// Force view to be drawn
|
|
@@ -87,13 +84,12 @@ class DocumentScannerView(context: ThemedReactContext) : FrameLayout(context), L
|
|
|
87
84
|
bringToFront()
|
|
88
85
|
requestLayout()
|
|
89
86
|
}
|
|
90
|
-
Log.d(TAG, "[INIT]
|
|
91
|
-
Log.d(TAG, "[INIT]
|
|
92
|
-
Log.d(TAG, "[INIT] PreviewView visibility: ${previewView.visibility}")
|
|
87
|
+
Log.d(TAG, "[INIT] TextureView created: $previewView")
|
|
88
|
+
Log.d(TAG, "[INIT] TextureView visibility: ${previewView.visibility}")
|
|
93
89
|
|
|
94
|
-
Log.d(TAG, "[INIT] Adding
|
|
90
|
+
Log.d(TAG, "[INIT] Adding TextureView to parent...")
|
|
95
91
|
addView(previewView)
|
|
96
|
-
Log.d(TAG, "[INIT]
|
|
92
|
+
Log.d(TAG, "[INIT] TextureView added, childCount: $childCount")
|
|
97
93
|
|
|
98
94
|
// Create overlay view for drawing rectangle
|
|
99
95
|
Log.d(TAG, "[INIT] Creating OverlayView...")
|
package/package.json
CHANGED