capacitor-plugin-camera-forked 3.0.120 → 3.1.120

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -290,7 +290,7 @@ stopCamera() => Promise<void>
290
290
  ### takeSnapshot(...)
291
291
 
292
292
  ```typescript
293
- takeSnapshot(options: { quality?: number; checkBlur?: boolean; }) => Promise<{ base64: string; confidence?: number; }>
293
+ takeSnapshot(options: { quality?: number; checkBlur?: boolean; }) => Promise<{ base64: string; confidence?: number; boundingBoxes?: number[][]; }>
294
294
  ```
295
295
 
296
296
  take a snapshot as base64.
@@ -299,7 +299,7 @@ take a snapshot as base64.
299
299
  | ------------- | ------------------------------------------------------- |
300
300
  | **`options`** | <code>{ quality?: number; checkBlur?: boolean; }</code> |
301
301
 
302
- **Returns:** <code>Promise&lt;{ base64: string; confidence?: number; }&gt;</code>
302
+ **Returns:** <code>Promise&lt;{ base64: string; confidence?: number; boundingBoxes?: number[][]; }&gt;</code>
303
303
 
304
304
  --------------------
305
305
 
@@ -47,6 +47,13 @@ repositories {
47
47
  mavenCentral()
48
48
  }
49
49
 
50
+ allprojects {
51
+ repositories {
52
+ google()
53
+ mavenCentral()
54
+ }
55
+ }
56
+
50
57
 
51
58
  dependencies {
52
59
  implementation fileTree(dir: 'libs', include: ['*.jar'])
@@ -68,4 +75,7 @@ dependencies {
68
75
  implementation 'org.tensorflow:tensorflow-lite:2.14.0'
69
76
  implementation 'org.tensorflow:tensorflow-lite-support:0.4.4'
70
77
  implementation 'org.tensorflow:tensorflow-lite-gpu:2.14.0'
78
+
79
+ // Google ML Kit dependencies for text recognition
80
+ implementation 'com.google.mlkit:text-recognition:16.0.1'
71
81
  }
@@ -1,4 +1,11 @@
1
1
  <manifest xmlns:android="http://schemas.android.com/apk/res/android">
2
2
 
3
3
  <uses-permission android:name="android.permission.RECORD_AUDIO" />
4
+
5
+ <application>
6
+ <!-- ML Kit Text Recognition model download -->
7
+ <meta-data
8
+ android:name="com.google.mlkit.vision.DEPENDENCIES"
9
+ android:value="ocr" />
10
+ </application>
4
11
  </manifest>
@@ -39,6 +39,10 @@ public class BlurDetectionHelper {
39
39
  private TensorImage inputImageBuffer;
40
40
  private TensorBuffer outputProbabilityBuffer;
41
41
  private boolean isInitialized = false;
42
+
43
+ // Text recognition blur detection helper
44
+ private TextRecognitionBlurHelper textBlurHelper;
45
+ private boolean useTextRecognition = true;
42
46
 
43
47
 
44
48
  public BlurDetectionHelper() {
@@ -48,6 +52,9 @@ public class BlurDetectionHelper {
48
52
  .add(new ResizeWithCropOrPadOp(INPUT_SIZE, INPUT_SIZE))
49
53
  .add(new ResizeOp(INPUT_SIZE, INPUT_SIZE, ResizeOp.ResizeMethod.BILINEAR))
50
54
  .build();
55
+
56
+ // Initialize text recognition blur helper
57
+ textBlurHelper = new TextRecognitionBlurHelper();
51
58
  }
52
59
 
53
60
  /**
@@ -81,6 +88,9 @@ public class BlurDetectionHelper {
81
88
  tflite.getOutputTensor(0).dataType()
82
89
  );
83
90
 
91
+ // Initialize text recognition helper
92
+ textBlurHelper.initialize(context);
93
+
84
94
  // Update INPUT_SIZE based on actual model input shape
85
95
  int[] inputShape = tflite.getInputTensor(0).shape();
86
96
 
@@ -113,15 +123,51 @@ public class BlurDetectionHelper {
113
123
  }
114
124
 
115
125
  /**
116
- * Detect blur in image using TFLite model
126
+ * Detect blur in image using hybrid approach (Text Recognition + TFLite)
117
127
  * @param bitmap Input image bitmap
118
128
  * @return Blur confidence score (0.0 = sharp, 1.0 = very blurry)
119
129
  */
120
130
  public double detectBlur(Bitmap bitmap) {
131
+ // First try text recognition if enabled
132
+ if (useTextRecognition && textBlurHelper != null && textBlurHelper.isInitialized()) {
133
+ try {
134
+ java.util.Map<String, Object> textResult = textBlurHelper.detectBlurWithConfidence(bitmap);
135
+ Boolean hasText = (Boolean) textResult.get("hasText");
136
+
137
+ if (hasText != null && hasText) {
138
+ // Image contains text, use text recognition result
139
+ Boolean isBlur = (Boolean) textResult.get("isBlur");
140
+ Double textConfidence = (Double) textResult.get("textConfidence");
141
+ Integer wordCount = (Integer) textResult.get("wordCount");
142
+ Integer readableWords = (Integer) textResult.get("readableWords");
143
+
144
+
145
+ if (isBlur != null) {
146
+ double blurConfidence = isBlur ? 1.0 : 0.0;
147
+ return blurConfidence;
148
+ } else {
149
+ }
150
+ } else {
151
+ }
152
+ } catch (Exception e) {
153
+ }
154
+ }
155
+
156
+ // Fallback to TFLite model
157
+ return detectBlurWithTFLite(bitmap);
158
+ }
159
+
160
+ /**
161
+ * Detect blur in image using TFLite model only
162
+ * @param bitmap Input image bitmap
163
+ * @return Blur confidence score (0.0 = sharp, 1.0 = very blurry)
164
+ */
165
+ public double detectBlurWithTFLite(Bitmap bitmap) {
121
166
  if (!isInitialized || tflite == null) {
122
- Log.w(TAG, "TFLite model not initialized, falling back to Laplacian");
123
- return calculateLaplacianBlurScore(bitmap);
167
+ double laplacianScore = calculateLaplacianBlurScore(bitmap);
168
+ return laplacianScore;
124
169
  }
170
+
125
171
 
126
172
  try {
127
173
  // Use the original bitmap directly (no image enhancement)
@@ -158,11 +204,11 @@ public class BlurDetectionHelper {
158
204
  // Determine if image is blurry using TFLite confidence
159
205
  boolean isBlur = (blurConfidence >= 0.99 || sharpConfidence < 0.1);
160
206
 
207
+
161
208
  // Return 1.0 for blur, 0.0 for sharp (to maintain double return type)
162
209
  return isBlur ? 1.0 : 0.0;
163
210
 
164
211
  } catch (Exception e) {
165
- Log.e(TAG, "Error during TFLite inference: " + e.getMessage(), e);
166
212
  // Fallback to Laplacian algorithm
167
213
  double laplacianScore = calculateLaplacianBlurScore(bitmap);
168
214
  boolean isBlur = laplacianScore < 150;
@@ -294,24 +340,82 @@ public class BlurDetectionHelper {
294
340
  }
295
341
 
296
342
  /**
297
- * Detect blur with detailed confidence scores
343
+ * Detect blur with detailed confidence scores using hybrid approach
298
344
  * @param bitmap Input image bitmap
299
- * @return Map with isBlur, blurConfidence, and sharpConfidence
345
+ * @return Map with comprehensive blur detection results
300
346
  */
301
347
  public java.util.Map<String, Object> detectBlurWithConfidence(Bitmap bitmap) {
302
348
  java.util.Map<String, Object> result = new java.util.HashMap<>();
303
349
 
350
+ // Try text recognition first if enabled
351
+ if (useTextRecognition && textBlurHelper != null && textBlurHelper.isInitialized()) {
352
+ try {
353
+ java.util.Map<String, Object> textResult = textBlurHelper.detectBlurWithConfidence(bitmap);
354
+ Boolean hasText = (Boolean) textResult.get("hasText");
355
+
356
+ if (hasText != null && hasText) {
357
+ // Image contains text, use text recognition result
358
+ Boolean isBlur = (Boolean) textResult.get("isBlur");
359
+ Double textConfidence = (Double) textResult.get("textConfidence");
360
+
361
+ result.put("method", "text_recognition");
362
+ result.put("isBlur", isBlur);
363
+ result.put("textConfidence", textConfidence);
364
+ result.put("wordCount", textResult.get("wordCount"));
365
+ result.put("readableWords", textResult.get("readableWords"));
366
+ result.put("hasText", true);
367
+ result.put("boundingBoxes", textResult.get("boundingBoxes"));
368
+
369
+ // Set blur/sharp confidence based on text recognition result
370
+ if (isBlur != null && textConfidence != null) {
371
+ if (isBlur) {
372
+ // Image is blurry - high blur confidence, low sharp confidence
373
+ result.put("blurConfidence", 1.0);
374
+ result.put("sharpConfidence", 0.0);
375
+ } else {
376
+ // Image is sharp - low blur confidence, high sharp confidence
377
+ result.put("blurConfidence", 1.0 - textConfidence);
378
+ result.put("sharpConfidence", textConfidence);
379
+ }
380
+ } else {
381
+ // Default values if confidence not available
382
+ result.put("blurConfidence", isBlur != null && isBlur ? 1.0 : 0.0);
383
+ result.put("sharpConfidence", isBlur != null && !isBlur ? 1.0 : 0.0);
384
+ }
385
+
386
+
387
+ return result;
388
+ }
389
+ } catch (Exception e) {
390
+ }
391
+ }
392
+
393
+ // Fallback to TFLite model
394
+ return detectBlurWithTFLiteConfidence(bitmap);
395
+ }
396
+
397
+ /**
398
+ * Detect blur with detailed confidence scores using TFLite only
399
+ * @param bitmap Input image bitmap
400
+ * @return Map with isBlur, blurConfidence, and sharpConfidence
401
+ */
402
+ public java.util.Map<String, Object> detectBlurWithTFLiteConfidence(Bitmap bitmap) {
403
+ java.util.Map<String, Object> result = new java.util.HashMap<>();
404
+
304
405
  if (!isInitialized || tflite == null) {
305
- Log.w(TAG, "TFLite model not initialized, falling back to Laplacian");
306
406
  double laplacianScore = calculateLaplacianBlurScore(bitmap);
307
407
  boolean isBlur = laplacianScore < 150;
308
408
  double normalizedScore = Math.max(0.0, Math.min(1.0, laplacianScore / 300.0));
309
409
  double sharpConfidence = normalizedScore;
310
410
  double blurConfidence = 1.0 - normalizedScore;
311
411
 
412
+ result.put("method", "laplacian");
312
413
  result.put("isBlur", isBlur);
313
414
  result.put("blurConfidence", blurConfidence);
314
415
  result.put("sharpConfidence", sharpConfidence);
416
+ result.put("laplacianScore", laplacianScore);
417
+ result.put("hasText", false);
418
+ result.put("boundingBoxes", new java.util.ArrayList<>());
315
419
  return result;
316
420
  }
317
421
 
@@ -350,16 +454,14 @@ public class BlurDetectionHelper {
350
454
  // Determine if image is blurry using TFLite confidence
351
455
  boolean isBlur = (blurConfidence >= 0.99 || sharpConfidence < 0.1);
352
456
 
353
- Log.d(TAG, String.format("TFLite Blur Detection with Confidence - Blur: %.6f, Sharp: %.6f, Label: %s",
354
- blurConfidence, sharpConfidence, isBlur ? "blur" : "sharp"));
355
457
 
356
458
  result.put("isBlur", isBlur);
357
459
  result.put("blurConfidence", blurConfidence);
358
460
  result.put("sharpConfidence", sharpConfidence);
461
+ result.put("boundingBoxes", new java.util.ArrayList<>());
359
462
  return result;
360
463
 
361
464
  } catch (Exception e) {
362
- Log.e(TAG, "Error during TFLite inference: " + e.getMessage(), e);
363
465
  // Fallback to Laplacian algorithm
364
466
  double laplacianScore = calculateLaplacianBlurScore(bitmap);
365
467
  boolean isBlur = laplacianScore < 150;
@@ -370,6 +472,7 @@ public class BlurDetectionHelper {
370
472
  result.put("isBlur", isBlur);
371
473
  result.put("blurConfidence", blurConfidence);
372
474
  result.put("sharpConfidence", sharpConfidence);
475
+ result.put("boundingBoxes", new java.util.ArrayList<>());
373
476
  return result;
374
477
  }
375
478
  }
@@ -395,6 +498,40 @@ public class BlurDetectionHelper {
395
498
  return isBlurry(bitmap) ? 100.0 : 0.0;
396
499
  }
397
500
 
501
+ /**
502
+ * Enable or disable text recognition for blur detection
503
+ * @param enable true to enable text recognition, false to use only TFLite
504
+ */
505
+ public void setTextRecognitionEnabled(boolean enable) {
506
+ this.useTextRecognition = enable;
507
+ }
508
+
509
+ /**
510
+ * Enable or disable dictionary check in text recognition
511
+ * @param enable true to enable dictionary check
512
+ */
513
+ public void setDictionaryCheckEnabled(boolean enable) {
514
+ if (textBlurHelper != null) {
515
+ textBlurHelper.setDictionaryCheckEnabled(enable);
516
+ }
517
+ }
518
+
519
+ /**
520
+ * Check if text recognition is enabled
521
+ * @return true if text recognition is enabled
522
+ */
523
+ public boolean isTextRecognitionEnabled() {
524
+ return useTextRecognition;
525
+ }
526
+
527
+ /**
528
+ * Get text recognition helper instance
529
+ * @return TextRecognitionBlurHelper instance
530
+ */
531
+ public TextRecognitionBlurHelper getTextBlurHelper() {
532
+ return textBlurHelper;
533
+ }
534
+
398
535
  /**
399
536
  * Clean up resources
400
537
  */
@@ -403,6 +540,10 @@ public class BlurDetectionHelper {
403
540
  tflite.close();
404
541
  tflite = null;
405
542
  }
543
+ if (textBlurHelper != null) {
544
+ textBlurHelper.close();
545
+ textBlurHelper = null;
546
+ }
406
547
  isInitialized = false;
407
548
  }
408
549
 
@@ -125,7 +125,6 @@ public class CameraPreviewPlugin extends Plugin {
125
125
  desiredJpegQuality = call.getInt("quality");
126
126
  // Ensure quality is within valid range
127
127
  desiredJpegQuality = Math.max(1, Math.min(100, desiredJpegQuality));
128
- Log.d("Camera", "Initialized with JPEG quality: " + desiredJpegQuality);
129
128
  }
130
129
 
131
130
  previewView = new PreviewView(getContext());
@@ -144,7 +143,6 @@ public class CameraPreviewPlugin extends Plugin {
144
143
  // Initialize TFLite blur detection helper
145
144
  blurDetectionHelper = new BlurDetectionHelper();
146
145
  boolean tfliteInitialized = blurDetectionHelper.initialize(getContext());
147
- Log.d("Camera", "TFLite blur detection initialized: " + tfliteInitialized);
148
146
 
149
147
  cameraProviderFuture.addListener(() -> {
150
148
  try {
@@ -153,7 +151,6 @@ public class CameraPreviewPlugin extends Plugin {
153
151
  .requireLensFacing(CameraSelector.LENS_FACING_BACK).build();
154
152
  // Auto-optimize for photo capture on initialization with specified quality
155
153
  setupUseCases(false); // Always use photo-optimized mode
156
- Log.d("Camera", "Initialized with photo capture optimization and quality: " + desiredJpegQuality);
157
154
  call.resolve();
158
155
  } catch (ExecutionException | InterruptedException e) {
159
156
  e.printStackTrace();
@@ -172,10 +169,9 @@ public class CameraPreviewPlugin extends Plugin {
172
169
  Preview.Builder previewBuilder = new Preview.Builder();
173
170
  if (resolution != null) {
174
171
  previewBuilder.setTargetResolution(resolution);
175
- Log.d("Camera", "Using optimal resolution: " + resolution.getWidth() + "x" + resolution.getHeight());
176
172
  } else {
177
- // Fallback: let CameraX choose the best resolution automatically
178
- Log.d("Camera", "Using CameraX auto-resolution selection");
173
+ // Fallback: let CameraX choose the best resolution automatically
174
+ Log.d("Camera", "Using CameraX auto-resolution selection");
179
175
  }
180
176
  preview = previewBuilder.build();
181
177
  preview.setSurfaceProvider(previewView.getSurfaceProvider());
@@ -183,7 +179,7 @@ public class CameraPreviewPlugin extends Plugin {
183
179
  // Enhanced ImageAnalysis setup
184
180
  ImageAnalysis.Builder imageAnalysisBuilder = new ImageAnalysis.Builder();
185
181
  if (resolution != null) {
186
- imageAnalysisBuilder.setTargetResolution(resolution);
182
+ imageAnalysisBuilder.setTargetResolution(resolution);
187
183
  }
188
184
  imageAnalysisBuilder.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
189
185
  .setImageQueueDepth(1); // Optimize for latest frame
@@ -224,9 +220,26 @@ public class CameraPreviewPlugin extends Plugin {
224
220
  // Only detect blur if checkBlur option is true
225
221
  boolean shouldCheckBlur = takeSnapshotCall.getBoolean("checkBlur", false);
226
222
  if (shouldCheckBlur) {
227
- double confidence = calculateBlurConfidence(bitmap);
228
- result.put("confidence", confidence);
229
- Log.d("Camera", "Blur detection - Confidence: " + confidence);
223
+ // Get blur detection result with bounding boxes in one call
224
+ if (blurDetectionHelper != null && blurDetectionHelper.isInitialized()) {
225
+ java.util.Map<String, Object> blurResult = blurDetectionHelper.detectBlurWithConfidence(bitmap);
226
+
227
+ Double blurConfidence = (Double) blurResult.get("blurConfidence");
228
+ if (blurConfidence != null) {
229
+ result.put("confidence", blurConfidence);
230
+ }
231
+ if (blurResult.containsKey("boundingBoxes")) {
232
+ Object boundingBoxesObj = blurResult.get("boundingBoxes");
233
+ result.put("boundingBoxes", boundingBoxesObj);
234
+ } else {
235
+ result.put("boundingBoxes", new java.util.ArrayList<>());
236
+ }
237
+ } else {
238
+ // Fallback to Laplacian algorithm
239
+ double confidence = calculateBlurConfidence(bitmap);
240
+ result.put("confidence", confidence);
241
+ result.put("boundingBoxes", new java.util.ArrayList<>());
242
+ }
230
243
  } else {
231
244
  Log.d("Camera", "Blur detection disabled for performance");
232
245
  }
@@ -317,15 +330,13 @@ public class CameraPreviewPlugin extends Plugin {
317
330
 
318
331
  // Return the first (highest quality) option - CameraX will adapt if not supported
319
332
  Size optimalResolution = preferredResolutions[0];
320
- Log.d("Camera", "Selected optimal resolution: " + optimalResolution.getWidth() + "x" + optimalResolution.getHeight());
321
333
  return optimalResolution;
322
334
 
323
335
  } catch (Exception e) {
324
- Log.e("Camera", "Error selecting optimal resolution: " + e.getMessage());
336
+ Log.e("Camera", "Error selecting optimal resolution: " + e.getMessage());
325
337
  }
326
338
 
327
339
  // Fallback: return null to let CameraX auto-select
328
- Log.d("Camera", "Using CameraX auto-resolution selection as fallback");
329
340
  return null;
330
341
  }
331
342
 
@@ -350,8 +361,6 @@ public class CameraPreviewPlugin extends Plugin {
350
361
 
351
362
  // Enable continuous auto-focus by starting a background focus monitoring
352
363
  startContinuousAutoFocus();
353
-
354
- Log.d("Camera", "Initialized responsive auto-focus with continuous monitoring");
355
364
  } catch (Exception e) {
356
365
  Log.e("Camera", "Failed to initialize responsive auto-focus: " + e.getMessage());
357
366
  }
@@ -454,7 +463,6 @@ public class CameraPreviewPlugin extends Plugin {
454
463
  useCaseGroup = null;
455
464
  recorder = null;
456
465
  currentRecording = null;
457
- Log.d("Camera", "Camera stopped and all references cleared.");
458
466
  call.resolve();
459
467
  } catch (Exception e) {
460
468
  call.reject(e.getMessage());
@@ -660,7 +668,6 @@ public class CameraPreviewPlugin extends Plugin {
660
668
 
661
669
  // If focus failed, try a backup focus attempt to reduce need for multiple taps
662
670
  if (!result.isFocusSuccessful()) {
663
- Log.d("Camera", "Initial focus failed, attempting backup focus");
664
671
  performBackupFocus(previewX, previewY);
665
672
  } else {
666
673
  // If manual focus was successful, maintain it with a follow-up action
@@ -809,7 +816,6 @@ public class CameraPreviewPlugin extends Plugin {
809
816
  .build();
810
817
 
811
818
  camera.getCameraControl().startFocusAndMetering(adaptiveAction);
812
- Log.d("Camera", "Adaptive focus at point: " + (currentPointIndex + 1));
813
819
  }
814
820
  } catch (Exception e) {
815
821
  Log.d("Camera", "Adaptive focus failed: " + e.getMessage());
@@ -1134,7 +1140,6 @@ public class CameraPreviewPlugin extends Plugin {
1134
1140
  if (bitmap != null) {
1135
1141
  double confidence = calculateBlurConfidence(bitmap);
1136
1142
  result.put("confidence", confidence);
1137
- Log.d("Camera", "Blur detection - Confidence: " + confidence);
1138
1143
  }
1139
1144
  }
1140
1145
  result.put("path", file.getAbsolutePath());
@@ -1193,9 +1198,7 @@ public class CameraPreviewPlugin extends Plugin {
1193
1198
  Consumer<VideoRecordEvent> captureListener = new Consumer<VideoRecordEvent>() {
1194
1199
  @Override
1195
1200
  public void accept(VideoRecordEvent videoRecordEvent) {
1196
- Log.d("Camera",videoRecordEvent.toString());
1197
1201
  if (videoRecordEvent instanceof VideoRecordEvent.Finalize) {
1198
- Log.d("Camera","finalize");
1199
1202
  Uri uri = ((VideoRecordEvent.Finalize) videoRecordEvent).getOutputResults().getOutputUri();
1200
1203
  String path = uri.getPath();
1201
1204
 
@@ -1308,7 +1311,6 @@ public class CameraPreviewPlugin extends Plugin {
1308
1311
  useCaseGroup = null;
1309
1312
  recorder = null;
1310
1313
  currentRecording = null;
1311
- Log.d("Camera", "handleOnPause: Camera stopped and references cleared.");
1312
1314
  }
1313
1315
 
1314
1316
  // Clean up TFLite resources
@@ -1341,7 +1343,6 @@ public class CameraPreviewPlugin extends Plugin {
1341
1343
  public void requestCameraPermission(PluginCall call) {
1342
1344
  boolean hasCameraPerms = getPermissionState(CAMERA) == PermissionState.GRANTED;
1343
1345
  if (hasCameraPerms == false) {
1344
- Log.d("Camera","no camera permission. request permission.");
1345
1346
  String[] aliases = new String[] { CAMERA };
1346
1347
  requestPermissionForAliases(aliases, call, "cameraPermissionsCallback");
1347
1348
  }else{
@@ -1363,7 +1364,6 @@ public class CameraPreviewPlugin extends Plugin {
1363
1364
  public void requestMicroPhonePermission(PluginCall call) {
1364
1365
  boolean hasCameraPerms = getPermissionState(MICROPHONE) == PermissionState.GRANTED;
1365
1366
  if (hasCameraPerms == false) {
1366
- Log.d("Camera","no microphone permission. request permission.");
1367
1367
  String[] aliases = new String[] { MICROPHONE };
1368
1368
  requestPermissionForAliases(aliases, call, "microphonePermissionsCallback");
1369
1369
  }else{
@@ -1473,8 +1473,19 @@ public class CameraPreviewPlugin extends Plugin {
1473
1473
  // Use TFLite model if available for detailed confidence
1474
1474
  if (blurDetectionHelper != null && blurDetectionHelper.isInitialized()) {
1475
1475
  java.util.Map<String, Object> result = blurDetectionHelper.detectBlurWithConfidence(bitmap);
1476
+ Boolean isBlur = (Boolean) result.get("isBlur");
1476
1477
  Double blurConfidence = (Double) result.get("blurConfidence");
1477
- return blurConfidence != null ? blurConfidence : 0.0;
1478
+ Double sharpConfidence = (Double) result.get("sharpConfidence");
1479
+
1480
+ // Return the actual blurConfidence value for more nuanced results
1481
+ if (blurConfidence != null) {
1482
+ return blurConfidence;
1483
+ } else if (isBlur != null) {
1484
+ // Fallback to boolean if confidence not available
1485
+ return isBlur ? 1.0 : 0.0;
1486
+ } else {
1487
+ return 0.0;
1488
+ }
1478
1489
  } else {
1479
1490
  // Fallback to Laplacian algorithm with confidence calculation
1480
1491
  double laplacianScore = calculateLaplacianBlurScore(bitmap);
@@ -1612,7 +1623,6 @@ public class CameraPreviewPlugin extends Plugin {
1612
1623
  try {
1613
1624
  FocusMeteringResult backupResult = backupFuture.get();
1614
1625
  if (backupResult.isFocusSuccessful()) {
1615
- Log.d("Camera", "Backup focus successful");
1616
1626
  maintainFocusAtPoint(previewX, previewY);
1617
1627
  } else {
1618
1628
  Log.d("Camera", "Backup focus also failed");
@@ -1665,7 +1675,6 @@ public class CameraPreviewPlugin extends Plugin {
1665
1675
  .build();
1666
1676
 
1667
1677
  camera.getCameraControl().startFocusAndMetering(maintainAction);
1668
- Log.d("Camera", "Maintaining focus at tapped point");
1669
1678
  }
1670
1679
  } catch (Exception e) {
1671
1680
  Log.d("Camera", "Focus maintenance failed: " + e.getMessage());