react-native-rectangle-doc-scanner 0.63.0 → 0.65.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/DocScanner.d.ts +9 -11
- package/dist/DocScanner.js +61 -462
- package/dist/index.d.ts +0 -1
- package/dist/index.js +1 -3
- package/dist/utils/overlay.d.ts +3 -0
- package/dist/utils/overlay.js +38 -8
- package/package.json +3 -10
- package/src/DocScanner.tsx +139 -566
- package/src/external.d.ts +29 -61
- package/src/index.ts +0 -1
- package/src/utils/overlay.tsx +63 -8
- package/src/utils/documentDetection.ts +0 -278
- package/src/worklets-core.d.ts +0 -8
package/src/DocScanner.tsx
CHANGED
|
@@ -1,96 +1,71 @@
|
|
|
1
|
-
import React, {
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
1
|
+
import React, {
|
|
2
|
+
ComponentType,
|
|
3
|
+
ReactNode,
|
|
4
|
+
useCallback,
|
|
5
|
+
useMemo,
|
|
6
|
+
useRef,
|
|
7
|
+
useState,
|
|
8
|
+
} from 'react';
|
|
6
9
|
import {
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
ObjectType,
|
|
14
|
-
} from 'react-native-fast-opencv';
|
|
10
|
+
LayoutChangeEvent,
|
|
11
|
+
StyleSheet,
|
|
12
|
+
TouchableOpacity,
|
|
13
|
+
View,
|
|
14
|
+
} from 'react-native';
|
|
15
|
+
import DocumentScanner from 'react-native-document-scanner-plugin';
|
|
15
16
|
import { Overlay } from './utils/overlay';
|
|
16
|
-
import { checkStability } from './utils/stability';
|
|
17
|
-
import {
|
|
18
|
-
blendQuads,
|
|
19
|
-
isValidQuad,
|
|
20
|
-
orderQuadPoints,
|
|
21
|
-
quadArea,
|
|
22
|
-
quadCenter,
|
|
23
|
-
quadDistance,
|
|
24
|
-
quadEdgeLengths,
|
|
25
|
-
sanitizeQuad,
|
|
26
|
-
weightedAverageQuad,
|
|
27
|
-
} from './utils/quad';
|
|
28
17
|
import type { Point } from './types';
|
|
29
18
|
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
// Validate all points have valid x and y
|
|
38
|
-
for (const p of points) {
|
|
39
|
-
if (typeof p.x !== 'number' || typeof p.y !== 'number' ||
|
|
40
|
-
!isFinite(p.x) || !isFinite(p.y)) {
|
|
41
|
-
return false;
|
|
42
|
-
}
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
let previous = 0;
|
|
46
|
-
|
|
47
|
-
for (let i = 0; i < 4; i++) {
|
|
48
|
-
const p0 = points[i];
|
|
49
|
-
const p1 = points[(i + 1) % 4];
|
|
50
|
-
const p2 = points[(i + 2) % 4];
|
|
51
|
-
const cross = (p1.x - p0.x) * (p2.y - p1.y) - (p1.y - p0.y) * (p2.x - p1.x);
|
|
19
|
+
type NativeRectangle = {
|
|
20
|
+
topLeft: Point;
|
|
21
|
+
topRight: Point;
|
|
22
|
+
bottomRight: Point;
|
|
23
|
+
bottomLeft: Point;
|
|
24
|
+
};
|
|
52
25
|
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
26
|
+
type NativeRectangleEvent = {
|
|
27
|
+
rectangleCoordinates?: NativeRectangle | null;
|
|
28
|
+
stableCounter?: number;
|
|
29
|
+
};
|
|
57
30
|
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
31
|
+
type NativeCaptureResult = {
|
|
32
|
+
croppedImage?: string;
|
|
33
|
+
initialImage?: string;
|
|
34
|
+
width?: number;
|
|
35
|
+
height?: number;
|
|
36
|
+
};
|
|
64
37
|
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
return false;
|
|
68
|
-
}
|
|
38
|
+
type DocumentScannerHandle = {
|
|
39
|
+
capture: () => Promise<NativeCaptureResult>;
|
|
69
40
|
};
|
|
70
41
|
|
|
71
|
-
type
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
42
|
+
type NativeDocumentScannerProps = {
|
|
43
|
+
style?: object;
|
|
44
|
+
overlayColor?: string;
|
|
45
|
+
detectionCountBeforeCapture?: number;
|
|
46
|
+
enableTorch?: boolean;
|
|
47
|
+
hideControls?: boolean;
|
|
48
|
+
useBase64?: boolean;
|
|
49
|
+
quality?: number;
|
|
50
|
+
onRectangleDetect?: (event: NativeRectangleEvent) => void;
|
|
51
|
+
onPictureTaken?: (event: NativeCaptureResult) => void;
|
|
75
52
|
};
|
|
76
53
|
|
|
77
|
-
|
|
54
|
+
const NativeDocumentScanner = DocumentScanner as unknown as ComponentType<
|
|
55
|
+
NativeDocumentScannerProps & { ref?: React.Ref<DocumentScannerHandle> }
|
|
56
|
+
>;
|
|
78
57
|
|
|
79
58
|
/**
|
|
80
|
-
*
|
|
59
|
+
* Detection configuration is no longer used now that the native
|
|
60
|
+
* implementation handles edge detection. Keeping it for backwards
|
|
61
|
+
* compatibility with existing consumer code.
|
|
81
62
|
*/
|
|
82
63
|
export interface DetectionConfig {
|
|
83
|
-
/** Processing resolution width (default: 1280) - higher = more accurate but slower */
|
|
84
64
|
processingWidth?: number;
|
|
85
|
-
/** Canny edge detection lower threshold (default: 40) */
|
|
86
65
|
cannyLowThreshold?: number;
|
|
87
|
-
/** Canny edge detection upper threshold (default: 120) */
|
|
88
66
|
cannyHighThreshold?: number;
|
|
89
|
-
/** Snap distance in pixels for corner locking (default: 8) */
|
|
90
67
|
snapDistance?: number;
|
|
91
|
-
/** Max frames to hold anchor when detection fails (default: 20) */
|
|
92
68
|
maxAnchorMisses?: number;
|
|
93
|
-
/** Maximum center movement allowed while maintaining lock (default: 200px) */
|
|
94
69
|
maxCenterDelta?: number;
|
|
95
70
|
}
|
|
96
71
|
|
|
@@ -99,534 +74,129 @@ interface Props {
|
|
|
99
74
|
overlayColor?: string;
|
|
100
75
|
autoCapture?: boolean;
|
|
101
76
|
minStableFrames?: number;
|
|
102
|
-
|
|
77
|
+
enableTorch?: boolean;
|
|
78
|
+
quality?: number;
|
|
79
|
+
useBase64?: boolean;
|
|
103
80
|
children?: ReactNode;
|
|
104
|
-
|
|
81
|
+
showGrid?: boolean;
|
|
82
|
+
gridColor?: string;
|
|
83
|
+
gridLineWidth?: number;
|
|
105
84
|
detectionConfig?: DetectionConfig;
|
|
106
85
|
}
|
|
107
86
|
|
|
87
|
+
const DEFAULT_OVERLAY_COLOR = '#e7a649';
|
|
88
|
+
const GRID_COLOR_FALLBACK = 'rgba(231, 166, 73, 0.35)';
|
|
89
|
+
|
|
108
90
|
export const DocScanner: React.FC<Props> = ({
|
|
109
91
|
onCapture,
|
|
110
|
-
overlayColor =
|
|
92
|
+
overlayColor = DEFAULT_OVERLAY_COLOR,
|
|
111
93
|
autoCapture = true,
|
|
112
94
|
minStableFrames = 8,
|
|
113
|
-
|
|
95
|
+
enableTorch = false,
|
|
96
|
+
quality,
|
|
97
|
+
useBase64 = false,
|
|
114
98
|
children,
|
|
115
|
-
|
|
99
|
+
showGrid = true,
|
|
100
|
+
gridColor,
|
|
101
|
+
gridLineWidth = 2,
|
|
116
102
|
}) => {
|
|
117
|
-
const
|
|
118
|
-
const
|
|
119
|
-
const { resize } = useResizePlugin();
|
|
120
|
-
const camera = useRef<CameraRef | null>(null);
|
|
121
|
-
const handleCameraRef = useCallback((ref: CameraRef | null) => {
|
|
122
|
-
camera.current = ref;
|
|
123
|
-
}, []);
|
|
103
|
+
const scannerRef = useRef<DocumentScannerHandle | null>(null);
|
|
104
|
+
const capturingRef = useRef(false);
|
|
124
105
|
const [quad, setQuad] = useState<Point[] | null>(null);
|
|
125
|
-
const [
|
|
126
|
-
|
|
127
|
-
useEffect(() => {
|
|
128
|
-
requestPermission();
|
|
129
|
-
}, [requestPermission]);
|
|
130
|
-
|
|
131
|
-
const lastQuadRef = useRef<Point[] | null>(null);
|
|
132
|
-
const smoothingBufferRef = useRef<Point[][]>([]);
|
|
133
|
-
const anchorQuadRef = useRef<Point[] | null>(null);
|
|
134
|
-
const anchorMissesRef = useRef(0);
|
|
135
|
-
const anchorConfidenceRef = useRef(0);
|
|
136
|
-
const lastMeasurementRef = useRef<Point[] | null>(null);
|
|
137
|
-
const frameSizeRef = useRef<{ width: number; height: number } | null>(null);
|
|
138
|
-
|
|
139
|
-
// Detection parameters - configurable via props with sensible defaults
|
|
140
|
-
const PROCESSING_WIDTH = detectionConfig.processingWidth ?? 1280;
|
|
141
|
-
const CANNY_LOW = detectionConfig.cannyLowThreshold ?? 40;
|
|
142
|
-
const CANNY_HIGH = detectionConfig.cannyHighThreshold ?? 120;
|
|
143
|
-
const SNAP_DISTANCE = detectionConfig.snapDistance ?? 8;
|
|
144
|
-
const MAX_ANCHOR_MISSES = detectionConfig.maxAnchorMisses ?? 20;
|
|
145
|
-
const REJECT_CENTER_DELTA = detectionConfig.maxCenterDelta ?? 200;
|
|
106
|
+
const [frameSize, setFrameSize] = useState<{ width: number; height: number } | null>(null);
|
|
146
107
|
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
const MAX_CENTER_DELTA = 120;
|
|
152
|
-
const MAX_AREA_SHIFT = 0.55;
|
|
153
|
-
const HISTORY_RESET_DISTANCE = 90;
|
|
154
|
-
const MIN_AREA_RATIO = 0.0002;
|
|
155
|
-
const MAX_AREA_RATIO = 0.9;
|
|
156
|
-
const MIN_EDGE_RATIO = 0.015;
|
|
157
|
-
const MIN_CONFIDENCE_TO_HOLD = 2;
|
|
158
|
-
const MAX_ANCHOR_CONFIDENCE = 30;
|
|
108
|
+
const effectiveGridColor = useMemo(
|
|
109
|
+
() => gridColor ?? GRID_COLOR_FALLBACK,
|
|
110
|
+
[gridColor],
|
|
111
|
+
);
|
|
159
112
|
|
|
160
|
-
const
|
|
161
|
-
|
|
162
|
-
|
|
113
|
+
const handleLayout = useCallback((event: LayoutChangeEvent) => {
|
|
114
|
+
const { width, height } = event.nativeEvent.layout;
|
|
115
|
+
if (width > 0 && height > 0) {
|
|
116
|
+
setFrameSize({ width, height });
|
|
163
117
|
}
|
|
118
|
+
}, []);
|
|
164
119
|
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
smoothingBufferRef.current = [];
|
|
168
|
-
lastMeasurementRef.current = null;
|
|
169
|
-
}
|
|
170
|
-
|
|
171
|
-
const anchor = anchorQuadRef.current;
|
|
172
|
-
const anchorConfidence = anchorConfidenceRef.current;
|
|
173
|
-
|
|
174
|
-
if (anchor && anchorConfidence >= MIN_CONFIDENCE_TO_HOLD) {
|
|
175
|
-
anchorMissesRef.current += 1;
|
|
176
|
-
|
|
177
|
-
if (anchorMissesRef.current <= MAX_ANCHOR_MISSES) {
|
|
178
|
-
anchorConfidenceRef.current = Math.max(1, anchorConfidence - 1);
|
|
179
|
-
lastQuadRef.current = anchor;
|
|
180
|
-
setQuad(anchor);
|
|
181
|
-
return true;
|
|
182
|
-
}
|
|
183
|
-
}
|
|
120
|
+
const handleRectangleDetect = useCallback((event: NativeRectangleEvent) => {
|
|
121
|
+
const coordinates = event?.rectangleCoordinates;
|
|
184
122
|
|
|
185
|
-
|
|
186
|
-
anchorConfidenceRef.current = 0;
|
|
187
|
-
anchorQuadRef.current = null;
|
|
188
|
-
lastQuadRef.current = null;
|
|
123
|
+
if (!coordinates) {
|
|
189
124
|
setQuad(null);
|
|
190
|
-
return false;
|
|
191
|
-
};
|
|
192
|
-
|
|
193
|
-
if (!isValidQuad(value)) {
|
|
194
|
-
const handled = fallbackToAnchor(false);
|
|
195
|
-
if (handled) {
|
|
196
|
-
return;
|
|
197
|
-
}
|
|
198
|
-
return;
|
|
199
|
-
}
|
|
200
|
-
|
|
201
|
-
anchorMissesRef.current = 0;
|
|
202
|
-
|
|
203
|
-
const ordered = orderQuadPoints(value);
|
|
204
|
-
const sanitized = sanitizeQuad(ordered);
|
|
205
|
-
|
|
206
|
-
const frameSize = frameSizeRef.current;
|
|
207
|
-
const frameArea = frameSize ? frameSize.width * frameSize.height : null;
|
|
208
|
-
const area = quadArea(sanitized);
|
|
209
|
-
const edges = quadEdgeLengths(sanitized);
|
|
210
|
-
const minEdge = Math.min(...edges);
|
|
211
|
-
const maxEdge = Math.max(...edges);
|
|
212
|
-
const aspectRatio = maxEdge > 0 ? maxEdge / Math.max(minEdge, 1) : 0;
|
|
213
|
-
|
|
214
|
-
const minEdgeThreshold = frameSize
|
|
215
|
-
? Math.max(14, Math.min(frameSize.width, frameSize.height) * MIN_EDGE_RATIO)
|
|
216
|
-
: 14;
|
|
217
|
-
|
|
218
|
-
const areaTooSmall = frameArea ? area < frameArea * MIN_AREA_RATIO : area === 0;
|
|
219
|
-
const areaTooLarge = frameArea ? area > frameArea * MAX_AREA_RATIO : false;
|
|
220
|
-
const edgesTooShort = minEdge < minEdgeThreshold;
|
|
221
|
-
const aspectTooExtreme = aspectRatio > 7;
|
|
222
|
-
|
|
223
|
-
if (areaTooSmall || areaTooLarge || edgesTooShort || aspectTooExtreme) {
|
|
224
|
-
const handled = fallbackToAnchor(true);
|
|
225
|
-
if (handled) {
|
|
226
|
-
return;
|
|
227
|
-
}
|
|
228
125
|
return;
|
|
229
126
|
}
|
|
230
127
|
|
|
231
|
-
const
|
|
232
|
-
|
|
233
|
-
|
|
128
|
+
const nextQuad: Point[] = [
|
|
129
|
+
coordinates.topLeft,
|
|
130
|
+
coordinates.topRight,
|
|
131
|
+
coordinates.bottomRight,
|
|
132
|
+
coordinates.bottomLeft,
|
|
133
|
+
];
|
|
234
134
|
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
? [...existingHistory.slice(existingHistory.length - (MAX_HISTORY - 1)), sanitized]
|
|
238
|
-
: [...existingHistory, sanitized];
|
|
239
|
-
|
|
240
|
-
const hasHistory = nextHistory.length >= 2;
|
|
241
|
-
let candidate = hasHistory ? weightedAverageQuad(nextHistory) : sanitized;
|
|
135
|
+
setQuad(nextQuad);
|
|
136
|
+
}, []);
|
|
242
137
|
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
const anchorCenter = quadCenter(anchor);
|
|
247
|
-
const candidateCenter = quadCenter(candidate);
|
|
248
|
-
const anchorArea = quadArea(anchor);
|
|
249
|
-
const candidateArea = quadArea(candidate);
|
|
250
|
-
const centerDelta = Math.hypot(candidateCenter.x - anchorCenter.x, candidateCenter.y - anchorCenter.y);
|
|
251
|
-
const areaShift = anchorArea > 0 ? Math.abs(anchorArea - candidateArea) / anchorArea : 0;
|
|
138
|
+
const handlePictureTaken = useCallback(
|
|
139
|
+
(event: NativeCaptureResult) => {
|
|
140
|
+
capturingRef.current = false;
|
|
252
141
|
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
lastMeasurementRef.current = sanitized;
|
|
256
|
-
anchorQuadRef.current = candidate;
|
|
257
|
-
anchorConfidenceRef.current = 1;
|
|
258
|
-
anchorMissesRef.current = 0;
|
|
259
|
-
lastQuadRef.current = candidate;
|
|
260
|
-
setQuad(candidate);
|
|
142
|
+
const path = event?.croppedImage ?? event?.initialImage;
|
|
143
|
+
if (!path) {
|
|
261
144
|
return;
|
|
262
145
|
}
|
|
263
146
|
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
smoothingBufferRef.current = nextHistory;
|
|
267
|
-
lastMeasurementRef.current = sanitized;
|
|
268
|
-
anchorConfidenceRef.current = Math.min(anchorConfidenceRef.current + 1, MAX_ANCHOR_CONFIDENCE);
|
|
269
|
-
} else if (delta <= BLEND_DISTANCE && centerDelta <= MAX_CENTER_DELTA && areaShift <= MAX_AREA_SHIFT) {
|
|
270
|
-
const normalizedDelta = Math.min(1, delta / BLEND_DISTANCE);
|
|
271
|
-
const adaptiveAlpha = 0.25 + normalizedDelta * 0.45; // 0.25..0.7 range
|
|
272
|
-
candidate = blendQuads(anchor, candidate, adaptiveAlpha);
|
|
273
|
-
smoothingBufferRef.current = nextHistory;
|
|
274
|
-
lastMeasurementRef.current = sanitized;
|
|
275
|
-
anchorConfidenceRef.current = Math.min(anchorConfidenceRef.current + 1, MAX_ANCHOR_CONFIDENCE);
|
|
276
|
-
} else {
|
|
277
|
-
const handled = fallbackToAnchor(true);
|
|
278
|
-
if (handled) {
|
|
279
|
-
return;
|
|
280
|
-
}
|
|
281
|
-
return;
|
|
282
|
-
}
|
|
283
|
-
} else {
|
|
284
|
-
smoothingBufferRef.current = nextHistory;
|
|
285
|
-
lastMeasurementRef.current = sanitized;
|
|
286
|
-
anchorConfidenceRef.current = Math.min(anchorConfidenceRef.current + 1, MAX_ANCHOR_CONFIDENCE);
|
|
287
|
-
}
|
|
288
|
-
|
|
289
|
-
candidate = orderQuadPoints(candidate);
|
|
290
|
-
anchorQuadRef.current = candidate;
|
|
291
|
-
lastQuadRef.current = candidate;
|
|
292
|
-
setQuad(candidate);
|
|
293
|
-
anchorMissesRef.current = 0;
|
|
294
|
-
}, []);
|
|
295
|
-
|
|
296
|
-
const reportError = useRunOnJS((step: string, error: unknown) => {
|
|
297
|
-
const message = error instanceof Error ? error.message : `${error}`;
|
|
298
|
-
console.warn(`[DocScanner] frame error at ${step}: ${message}`);
|
|
299
|
-
}, []);
|
|
300
|
-
|
|
301
|
-
const reportStage = useRunOnJS((_stage: string) => {
|
|
302
|
-
// Disabled for performance
|
|
303
|
-
}, []);
|
|
304
|
-
|
|
305
|
-
const [frameSize, setFrameSize] = useState<{ width: number; height: number } | null>(null);
|
|
306
|
-
const updateFrameSize = useRunOnJS((width: number, height: number) => {
|
|
307
|
-
frameSizeRef.current = { width, height };
|
|
308
|
-
setFrameSize({ width, height });
|
|
309
|
-
}, []);
|
|
310
|
-
|
|
311
|
-
const frameProcessor = useFrameProcessor((frame) => {
|
|
312
|
-
'worklet';
|
|
313
|
-
|
|
314
|
-
let step = 'start';
|
|
315
|
-
|
|
316
|
-
try {
|
|
317
|
-
// Report frame size for coordinate transformation
|
|
318
|
-
updateFrameSize(frame.width, frame.height);
|
|
147
|
+
const width = event?.width ?? frameSize?.width ?? 0;
|
|
148
|
+
const height = event?.height ?? frameSize?.height ?? 0;
|
|
319
149
|
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
reportStage(step);
|
|
326
|
-
const resized = resize(frame, {
|
|
327
|
-
dataType: 'uint8',
|
|
328
|
-
pixelFormat: 'bgr',
|
|
329
|
-
scale: { width: width, height: height },
|
|
150
|
+
onCapture?.({
|
|
151
|
+
path,
|
|
152
|
+
quad,
|
|
153
|
+
width,
|
|
154
|
+
height,
|
|
330
155
|
});
|
|
156
|
+
},
|
|
157
|
+
[frameSize, onCapture, quad],
|
|
158
|
+
);
|
|
331
159
|
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
step = 'cvtColor';
|
|
337
|
-
reportStage(step);
|
|
338
|
-
OpenCV.invoke('cvtColor', mat, mat, ColorConversionCodes.COLOR_BGR2GRAY);
|
|
339
|
-
|
|
340
|
-
// Enhanced morphological operations for noise reduction
|
|
341
|
-
const morphologyKernel = OpenCV.createObject(ObjectType.Size, 7, 7);
|
|
342
|
-
step = 'getStructuringElement';
|
|
343
|
-
reportStage(step);
|
|
344
|
-
const element = OpenCV.invoke('getStructuringElement', MorphShapes.MORPH_RECT, morphologyKernel);
|
|
345
|
-
step = 'morphologyEx';
|
|
346
|
-
reportStage(step);
|
|
347
|
-
OpenCV.invoke('morphologyEx', mat, mat, MorphTypes.MORPH_CLOSE, element);
|
|
348
|
-
OpenCV.invoke('morphologyEx', mat, mat, MorphTypes.MORPH_OPEN, element);
|
|
349
|
-
|
|
350
|
-
const ADAPTIVE_THRESH_GAUSSIAN_C = 1;
|
|
351
|
-
const THRESH_BINARY = 0;
|
|
352
|
-
const THRESH_OTSU = 8;
|
|
353
|
-
|
|
354
|
-
// Bilateral filter for edge-preserving smoothing (better quality than Gaussian)
|
|
355
|
-
step = 'bilateralFilter';
|
|
356
|
-
reportStage(step);
|
|
357
|
-
let processed = mat;
|
|
358
|
-
try {
|
|
359
|
-
const tempMat = OpenCV.createObject(ObjectType.Mat);
|
|
360
|
-
OpenCV.invoke('bilateralFilter', mat, tempMat, 9, 75, 75);
|
|
361
|
-
processed = tempMat;
|
|
362
|
-
} catch (error) {
|
|
363
|
-
if (__DEV__) {
|
|
364
|
-
console.warn('[DocScanner] bilateralFilter unavailable, falling back to GaussianBlur', error);
|
|
365
|
-
}
|
|
366
|
-
const blurKernel = OpenCV.createObject(ObjectType.Size, 5, 5);
|
|
367
|
-
OpenCV.invoke('GaussianBlur', mat, mat, blurKernel, 0);
|
|
368
|
-
processed = mat;
|
|
369
|
-
}
|
|
370
|
-
|
|
371
|
-
// Additional blur and close pass to smooth jagged edges
|
|
372
|
-
step = 'gaussianBlur';
|
|
373
|
-
reportStage(step);
|
|
374
|
-
const gaussianKernel = OpenCV.createObject(ObjectType.Size, 5, 5);
|
|
375
|
-
OpenCV.invoke('GaussianBlur', processed, processed, gaussianKernel, 0);
|
|
376
|
-
OpenCV.invoke('morphologyEx', processed, processed, MorphTypes.MORPH_CLOSE, element);
|
|
377
|
-
|
|
378
|
-
const baseMat = OpenCV.invoke('clone', processed);
|
|
379
|
-
const frameArea = width * height;
|
|
380
|
-
const originalArea = frame.width * frame.height;
|
|
381
|
-
const minEdgeThreshold = Math.max(14, Math.min(frame.width, frame.height) * MIN_EDGE_RATIO);
|
|
382
|
-
const epsilonValues = [
|
|
383
|
-
0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009,
|
|
384
|
-
0.01, 0.012, 0.015, 0.018, 0.02, 0.025, 0.03, 0.035, 0.04, 0.05,
|
|
385
|
-
0.06, 0.07, 0.08, 0.09, 0.1, 0.12,
|
|
386
|
-
];
|
|
387
|
-
|
|
388
|
-
let bestQuad: Point[] | null = null;
|
|
389
|
-
let bestArea = 0;
|
|
390
|
-
let convexHullWarned = false;
|
|
391
|
-
|
|
392
|
-
const considerCandidate = (candidate: { quad: Point[]; area: number } | null) => {
|
|
393
|
-
'worklet';
|
|
394
|
-
if (!candidate) {
|
|
395
|
-
return;
|
|
396
|
-
}
|
|
397
|
-
if (!bestQuad || candidate.area > bestArea) {
|
|
398
|
-
bestQuad = candidate.quad;
|
|
399
|
-
bestArea = candidate.area;
|
|
400
|
-
}
|
|
401
|
-
};
|
|
402
|
-
|
|
403
|
-
const evaluateContours = (inputMat: unknown, attemptLabel: string): { quad: Point[]; area: number } | null => {
|
|
404
|
-
'worklet';
|
|
405
|
-
|
|
406
|
-
step = `findContours_${attemptLabel}`;
|
|
407
|
-
reportStage(step);
|
|
408
|
-
const contours = OpenCV.createObject(ObjectType.PointVectorOfVectors);
|
|
409
|
-
OpenCV.invoke('findContours', inputMat, contours, RetrievalModes.RETR_EXTERNAL, ContourApproximationModes.CHAIN_APPROX_SIMPLE);
|
|
410
|
-
|
|
411
|
-
const contourVector = OpenCV.toJSValue(contours);
|
|
412
|
-
const contourArray = Array.isArray(contourVector?.array) ? contourVector.array : [];
|
|
413
|
-
|
|
414
|
-
let bestLocal: { quad: Point[]; area: number } | null = null;
|
|
415
|
-
|
|
416
|
-
for (let i = 0; i < contourArray.length; i += 1) {
|
|
417
|
-
step = `${attemptLabel}_contour_${i}_copy`;
|
|
418
|
-
reportStage(step);
|
|
419
|
-
const contour = OpenCV.copyObjectFromVector(contours, i);
|
|
420
|
-
|
|
421
|
-
step = `${attemptLabel}_contour_${i}_area`;
|
|
422
|
-
reportStage(step);
|
|
423
|
-
const { value: area } = OpenCV.invoke('contourArea', contour, false);
|
|
424
|
-
if (typeof area !== 'number' || !isFinite(area) || area < 60) {
|
|
425
|
-
continue;
|
|
426
|
-
}
|
|
427
|
-
|
|
428
|
-
const resizedRatio = area / frameArea;
|
|
429
|
-
if (resizedRatio < 0.00012 || resizedRatio > 0.98) {
|
|
430
|
-
continue;
|
|
431
|
-
}
|
|
432
|
-
|
|
433
|
-
let contourToUse = contour;
|
|
434
|
-
try {
|
|
435
|
-
const hull = OpenCV.createObject(ObjectType.PointVector);
|
|
436
|
-
OpenCV.invoke('convexHull', contour, hull, false, true);
|
|
437
|
-
contourToUse = hull;
|
|
438
|
-
} catch (err) {
|
|
439
|
-
if (__DEV__ && !convexHullWarned) {
|
|
440
|
-
console.warn('[DocScanner] convexHull failed, using original contour');
|
|
441
|
-
convexHullWarned = true;
|
|
442
|
-
}
|
|
443
|
-
}
|
|
444
|
-
|
|
445
|
-
const { value: perimeter } = OpenCV.invoke('arcLength', contourToUse, true);
|
|
446
|
-
if (typeof perimeter !== 'number' || !isFinite(perimeter) || perimeter < 80) {
|
|
447
|
-
continue;
|
|
448
|
-
}
|
|
449
|
-
|
|
450
|
-
const approx = OpenCV.createObject(ObjectType.PointVector);
|
|
451
|
-
let approxArray: Array<{ x: number; y: number }> = [];
|
|
452
|
-
|
|
453
|
-
for (let attempt = 0; attempt < epsilonValues.length; attempt += 1) {
|
|
454
|
-
const epsilon = epsilonValues[attempt] * perimeter;
|
|
455
|
-
step = `${attemptLabel}_contour_${i}_approx_${attempt}`;
|
|
456
|
-
reportStage(step);
|
|
457
|
-
OpenCV.invoke('approxPolyDP', contourToUse, approx, epsilon, true);
|
|
458
|
-
|
|
459
|
-
const approxValue = OpenCV.toJSValue(approx);
|
|
460
|
-
const candidate = Array.isArray(approxValue?.array) ? approxValue.array : [];
|
|
461
|
-
if (candidate.length === 4) {
|
|
462
|
-
approxArray = candidate as Array<{ x: number; y: number }>;
|
|
463
|
-
break;
|
|
464
|
-
}
|
|
465
|
-
}
|
|
466
|
-
|
|
467
|
-
if (approxArray.length !== 4) {
|
|
468
|
-
continue;
|
|
469
|
-
}
|
|
470
|
-
|
|
471
|
-
const isValidPoint = (pt: { x: number; y: number }) =>
|
|
472
|
-
typeof pt.x === 'number' && typeof pt.y === 'number' && isFinite(pt.x) && isFinite(pt.y);
|
|
473
|
-
|
|
474
|
-
if (!approxArray.every(isValidPoint)) {
|
|
475
|
-
continue;
|
|
476
|
-
}
|
|
477
|
-
|
|
478
|
-
const normalizedPoints: Point[] = approxArray.map((pt) => ({
|
|
479
|
-
x: pt.x / ratio,
|
|
480
|
-
y: pt.y / ratio,
|
|
481
|
-
}));
|
|
482
|
-
|
|
483
|
-
if (!isConvexQuadrilateral(normalizedPoints)) {
|
|
484
|
-
continue;
|
|
485
|
-
}
|
|
486
|
-
|
|
487
|
-
const sanitized = sanitizeQuad(orderQuadPoints(normalizedPoints));
|
|
488
|
-
if (!isValidQuad(sanitized)) {
|
|
489
|
-
continue;
|
|
490
|
-
}
|
|
491
|
-
|
|
492
|
-
const edges = quadEdgeLengths(sanitized);
|
|
493
|
-
const minEdge = Math.min(...edges);
|
|
494
|
-
const maxEdge = Math.max(...edges);
|
|
495
|
-
if (!Number.isFinite(minEdge) || minEdge < minEdgeThreshold) {
|
|
496
|
-
continue;
|
|
497
|
-
}
|
|
498
|
-
const aspectRatio = maxEdge / Math.max(minEdge, 1);
|
|
499
|
-
if (!Number.isFinite(aspectRatio) || aspectRatio > 8.5) {
|
|
500
|
-
continue;
|
|
501
|
-
}
|
|
502
|
-
|
|
503
|
-
const quadAreaValue = quadArea(sanitized);
|
|
504
|
-
const originalRatio = originalArea > 0 ? quadAreaValue / originalArea : 0;
|
|
505
|
-
if (originalRatio < 0.00012 || originalRatio > 0.92) {
|
|
506
|
-
continue;
|
|
507
|
-
}
|
|
508
|
-
|
|
509
|
-
const candidate = {
|
|
510
|
-
quad: sanitized,
|
|
511
|
-
area: quadAreaValue,
|
|
512
|
-
};
|
|
513
|
-
|
|
514
|
-
if (!bestLocal || candidate.area > bestLocal.area) {
|
|
515
|
-
bestLocal = candidate;
|
|
516
|
-
}
|
|
517
|
-
}
|
|
518
|
-
|
|
519
|
-
return bestLocal;
|
|
520
|
-
};
|
|
521
|
-
|
|
522
|
-
const runCanny = (label: string, low: number, high: number) => {
|
|
523
|
-
'worklet';
|
|
524
|
-
const working = OpenCV.invoke('clone', baseMat);
|
|
525
|
-
step = `${label}_canny`;
|
|
526
|
-
reportStage(step);
|
|
527
|
-
OpenCV.invoke('Canny', working, working, low, high);
|
|
528
|
-
OpenCV.invoke('morphologyEx', working, working, MorphTypes.MORPH_CLOSE, element);
|
|
529
|
-
considerCandidate(evaluateContours(working, label));
|
|
530
|
-
};
|
|
531
|
-
|
|
532
|
-
const runAdaptive = (label: string, blockSize: number, c: number) => {
|
|
533
|
-
'worklet';
|
|
534
|
-
const working = OpenCV.invoke('clone', baseMat);
|
|
535
|
-
step = `${label}_adaptive`;
|
|
536
|
-
reportStage(step);
|
|
537
|
-
OpenCV.invoke('adaptiveThreshold', working, working, 255, ADAPTIVE_THRESH_GAUSSIAN_C, THRESH_BINARY, blockSize, c);
|
|
538
|
-
OpenCV.invoke('morphologyEx', working, working, MorphTypes.MORPH_CLOSE, element);
|
|
539
|
-
considerCandidate(evaluateContours(working, label));
|
|
540
|
-
};
|
|
541
|
-
|
|
542
|
-
const runOtsu = () => {
|
|
543
|
-
'worklet';
|
|
544
|
-
const working = OpenCV.invoke('clone', baseMat);
|
|
545
|
-
step = 'otsu_threshold';
|
|
546
|
-
reportStage(step);
|
|
547
|
-
OpenCV.invoke('threshold', working, working, 0, 255, THRESH_BINARY | THRESH_OTSU);
|
|
548
|
-
OpenCV.invoke('morphologyEx', working, working, MorphTypes.MORPH_CLOSE, element);
|
|
549
|
-
considerCandidate(evaluateContours(working, 'otsu'));
|
|
550
|
-
};
|
|
551
|
-
|
|
552
|
-
runCanny('canny_primary', CANNY_LOW, CANNY_HIGH);
|
|
553
|
-
runCanny('canny_soft', Math.max(6, CANNY_LOW * 0.6), Math.max(CANNY_LOW * 1.2, CANNY_HIGH * 0.75));
|
|
554
|
-
runCanny('canny_hard', Math.max(12, CANNY_LOW * 1.1), CANNY_HIGH * 1.25);
|
|
555
|
-
|
|
556
|
-
runAdaptive('adaptive_19', 19, 7);
|
|
557
|
-
runAdaptive('adaptive_23', 23, 5);
|
|
558
|
-
runOtsu();
|
|
559
|
-
|
|
560
|
-
step = 'clearBuffers';
|
|
561
|
-
reportStage(step);
|
|
562
|
-
OpenCV.clearBuffers();
|
|
563
|
-
step = 'updateQuad';
|
|
564
|
-
reportStage(step);
|
|
565
|
-
updateQuad(bestQuad);
|
|
566
|
-
} catch (error) {
|
|
567
|
-
reportError(step, error);
|
|
160
|
+
const handleManualCapture = useCallback(() => {
|
|
161
|
+
if (autoCapture || capturingRef.current || !scannerRef.current) {
|
|
162
|
+
return;
|
|
568
163
|
}
|
|
569
|
-
}, [resize, reportError, updateQuad]);
|
|
570
|
-
|
|
571
|
-
useEffect(() => {
|
|
572
|
-
const s = checkStability(quad);
|
|
573
|
-
setStable(s);
|
|
574
|
-
}, [quad]);
|
|
575
|
-
|
|
576
|
-
useEffect(() => {
|
|
577
|
-
const capture = async () => {
|
|
578
|
-
if (autoCapture && quad && stable >= minStableFrames && camera.current && frameSize) {
|
|
579
|
-
const photo = await camera.current.takePhoto({ qualityPrioritization: 'quality' });
|
|
580
|
-
onCapture?.({
|
|
581
|
-
path: photo.path,
|
|
582
|
-
quad,
|
|
583
|
-
width: frameSize.width,
|
|
584
|
-
height: frameSize.height,
|
|
585
|
-
});
|
|
586
|
-
setStable(0);
|
|
587
|
-
}
|
|
588
|
-
};
|
|
589
164
|
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
}
|
|
165
|
+
capturingRef.current = true;
|
|
166
|
+
scannerRef.current
|
|
167
|
+
.capture()
|
|
168
|
+
.catch((error) => {
|
|
169
|
+
console.warn('[DocScanner] manual capture failed', error);
|
|
170
|
+
capturingRef.current = false;
|
|
171
|
+
});
|
|
172
|
+
}, [autoCapture]);
|
|
599
173
|
|
|
600
174
|
return (
|
|
601
|
-
<View style={{
|
|
602
|
-
<
|
|
603
|
-
ref={
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
175
|
+
<View style={styles.container} onLayout={handleLayout}>
|
|
176
|
+
<NativeDocumentScanner
|
|
177
|
+
ref={(instance) => {
|
|
178
|
+
scannerRef.current = instance as DocumentScannerHandle | null;
|
|
179
|
+
}}
|
|
180
|
+
style={StyleSheet.absoluteFill}
|
|
181
|
+
overlayColor="transparent"
|
|
182
|
+
detectionCountBeforeCapture={autoCapture ? minStableFrames : 10000}
|
|
183
|
+
enableTorch={enableTorch}
|
|
184
|
+
hideControls
|
|
185
|
+
useBase64={useBase64}
|
|
186
|
+
quality={quality}
|
|
187
|
+
onRectangleDetect={handleRectangleDetect}
|
|
188
|
+
onPictureTaken={handlePictureTaken}
|
|
189
|
+
/>
|
|
190
|
+
<Overlay
|
|
191
|
+
quad={quad}
|
|
192
|
+
color={overlayColor}
|
|
193
|
+
frameSize={frameSize}
|
|
194
|
+
showGrid={showGrid}
|
|
195
|
+
gridColor={effectiveGridColor}
|
|
196
|
+
gridLineWidth={gridLineWidth}
|
|
611
197
|
/>
|
|
612
|
-
<Overlay quad={quad} color={overlayColor} frameSize={frameSize} />
|
|
613
198
|
{!autoCapture && (
|
|
614
|
-
<TouchableOpacity
|
|
615
|
-
style={styles.button}
|
|
616
|
-
onPress={async () => {
|
|
617
|
-
if (!camera.current || !frameSize) {
|
|
618
|
-
return;
|
|
619
|
-
}
|
|
620
|
-
|
|
621
|
-
const photo = await camera.current.takePhoto({ qualityPrioritization: 'quality' });
|
|
622
|
-
onCapture?.({
|
|
623
|
-
path: photo.path,
|
|
624
|
-
quad,
|
|
625
|
-
width: frameSize.width,
|
|
626
|
-
height: frameSize.height,
|
|
627
|
-
});
|
|
628
|
-
}}
|
|
629
|
-
/>
|
|
199
|
+
<TouchableOpacity style={styles.button} onPress={handleManualCapture} />
|
|
630
200
|
)}
|
|
631
201
|
{children}
|
|
632
202
|
</View>
|
|
@@ -634,6 +204,9 @@ export const DocScanner: React.FC<Props> = ({
|
|
|
634
204
|
};
|
|
635
205
|
|
|
636
206
|
const styles = StyleSheet.create({
|
|
207
|
+
container: {
|
|
208
|
+
flex: 1,
|
|
209
|
+
},
|
|
637
210
|
button: {
|
|
638
211
|
position: 'absolute',
|
|
639
212
|
bottom: 40,
|