react-native-rectangle-doc-scanner 0.44.0 → 0.46.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +160 -36
- package/dist/CropEditor.d.ts +25 -0
- package/dist/CropEditor.js +113 -0
- package/dist/DocScanner.d.ts +21 -0
- package/dist/DocScanner.js +45 -23
- package/dist/index.d.ts +5 -1
- package/dist/index.js +12 -15
- package/dist/types.d.ts +13 -0
- package/dist/utils/coordinate.d.ts +19 -0
- package/dist/utils/coordinate.js +58 -0
- package/package.json +2 -1
- package/src/CropEditor.tsx +134 -0
- package/src/DocScanner.tsx +68 -23
- package/src/external.d.ts +25 -0
- package/src/index.ts +15 -1
- package/src/types.ts +16 -0
- package/src/utils/coordinate.ts +71 -0
package/README.md
CHANGED
|
@@ -5,6 +5,10 @@ VisionCamera + Fast-OpenCV powered document scanner template built for React Nat
|
|
|
5
5
|
## Features
|
|
6
6
|
- Real-time quad detection using `react-native-fast-opencv`
|
|
7
7
|
- Frame processor worklet executed on the UI thread via `react-native-vision-camera`
|
|
8
|
+
- High-resolution processing (1280p) for accurate corner detection
|
|
9
|
+
- Advanced anchor locking system maintains corner positions during camera movement
|
|
10
|
+
- Intelligent edge detection with optimized Canny parameters (50/150 thresholds)
|
|
11
|
+
- Adaptive smoothing with weighted averaging across multiple frames
|
|
8
12
|
- Resize plugin keeps frame processing fast on lower-end devices
|
|
9
13
|
- Skia overlay visualises detected document contours
|
|
10
14
|
- Stability tracker enables auto-capture once the document is steady
|
|
@@ -15,6 +19,7 @@ Install the module alongside these peer dependencies (your host app should alrea
|
|
|
15
19
|
- `react-native-vision-camera` (v3+) with frame processors enabled
|
|
16
20
|
- `vision-camera-resize-plugin`
|
|
17
21
|
- `react-native-fast-opencv`
|
|
22
|
+
- `react-native-perspective-image-cropper`
|
|
18
23
|
- `react-native-reanimated` + `react-native-worklets-core`
|
|
19
24
|
- `@shopify/react-native-skia`
|
|
20
25
|
- `react`, `react-native`
|
|
@@ -26,6 +31,7 @@ yarn add react-native-rectangle-doc-scanner \
|
|
|
26
31
|
react-native-vision-camera \
|
|
27
32
|
vision-camera-resize-plugin \
|
|
28
33
|
react-native-fast-opencv \
|
|
34
|
+
react-native-perspective-image-cropper \
|
|
29
35
|
react-native-reanimated \
|
|
30
36
|
react-native-worklets-core \
|
|
31
37
|
@shopify/react-native-skia
|
|
@@ -48,58 +54,133 @@ Follow each dependency’s native installation guide:
|
|
|
48
54
|
|
|
49
55
|
## Usage
|
|
50
56
|
|
|
57
|
+
### Basic Document Scanning
|
|
58
|
+
|
|
51
59
|
```tsx
|
|
52
|
-
import React from 'react';
|
|
60
|
+
import React, { useState } from 'react';
|
|
53
61
|
import { StyleSheet, Text, TouchableOpacity, View } from 'react-native';
|
|
54
|
-
import { DocScanner } from 'react-native-rectangle-doc-scanner';
|
|
55
|
-
|
|
56
|
-
export const ScanScreen = () =>
|
|
57
|
-
<
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
62
|
+
import { DocScanner, CropEditor, type CapturedDocument } from 'react-native-rectangle-doc-scanner';
|
|
63
|
+
|
|
64
|
+
export const ScanScreen = () => {
|
|
65
|
+
const [capturedDoc, setCapturedDoc] = useState<CapturedDocument | null>(null);
|
|
66
|
+
|
|
67
|
+
if (capturedDoc) {
|
|
68
|
+
// Show crop editor after capture
|
|
69
|
+
return (
|
|
70
|
+
<CropEditor
|
|
71
|
+
document={capturedDoc}
|
|
72
|
+
overlayColor="rgba(0,0,0,0.5)"
|
|
73
|
+
overlayStrokeColor="#e7a649"
|
|
74
|
+
handlerColor="#e7a649"
|
|
75
|
+
onCropChange={(rectangle) => {
|
|
76
|
+
console.log('User adjusted corners:', rectangle);
|
|
77
|
+
// Process the adjusted corners
|
|
78
|
+
}}
|
|
79
|
+
/>
|
|
80
|
+
);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
return (
|
|
84
|
+
<View style={styles.container}>
|
|
85
|
+
<DocScanner
|
|
86
|
+
onCapture={(doc) => {
|
|
87
|
+
console.log('Document captured:', doc);
|
|
88
|
+
setCapturedDoc(doc);
|
|
89
|
+
}}
|
|
90
|
+
overlayColor="#e7a649"
|
|
91
|
+
autoCapture
|
|
92
|
+
minStableFrames={8}
|
|
93
|
+
cameraProps={{ enableZoomGesture: true }}
|
|
94
|
+
>
|
|
95
|
+
<View style={styles.overlayControls}>
|
|
96
|
+
<Text style={styles.hint}>Position document in frame</Text>
|
|
97
|
+
</View>
|
|
98
|
+
</DocScanner>
|
|
99
|
+
</View>
|
|
100
|
+
);
|
|
101
|
+
};
|
|
75
102
|
|
|
76
103
|
const styles = StyleSheet.create({
|
|
77
104
|
container: { flex: 1 },
|
|
78
105
|
overlayControls: {
|
|
79
106
|
position: 'absolute',
|
|
80
|
-
|
|
107
|
+
top: 60,
|
|
81
108
|
alignSelf: 'center',
|
|
82
109
|
},
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
110
|
+
hint: {
|
|
111
|
+
color: '#fff',
|
|
112
|
+
fontSize: 16,
|
|
113
|
+
fontWeight: '600',
|
|
114
|
+
textShadowColor: 'rgba(0,0,0,0.75)',
|
|
115
|
+
textShadowOffset: { width: 0, height: 1 },
|
|
116
|
+
textShadowRadius: 3,
|
|
88
117
|
},
|
|
89
|
-
label: { color: '#fff', fontWeight: '600' },
|
|
90
118
|
});
|
|
91
119
|
```
|
|
92
120
|
|
|
121
|
+
### Advanced Configuration
|
|
122
|
+
|
|
123
|
+
```tsx
|
|
124
|
+
import { DocScanner, type DetectionConfig } from 'react-native-rectangle-doc-scanner';
|
|
125
|
+
|
|
126
|
+
const detectionConfig: DetectionConfig = {
|
|
127
|
+
processingWidth: 1280, // Higher = more accurate but slower
|
|
128
|
+
cannyLowThreshold: 40, // Lower = detect more edges
|
|
129
|
+
cannyHighThreshold: 120, // Edge strength threshold
|
|
130
|
+
snapDistance: 8, // Corner lock sensitivity
|
|
131
|
+
maxAnchorMisses: 20, // Frames to hold anchor when detection fails
|
|
132
|
+
maxCenterDelta: 200, // Max camera movement while maintaining lock
|
|
133
|
+
};
|
|
134
|
+
|
|
135
|
+
<DocScanner
|
|
136
|
+
detectionConfig={detectionConfig}
|
|
137
|
+
onCapture={(doc) => {
|
|
138
|
+
// doc includes: path, quad, width, height
|
|
139
|
+
console.log('Captured with size:', doc.width, 'x', doc.height);
|
|
140
|
+
}}
|
|
141
|
+
/>
|
|
142
|
+
```
|
|
143
|
+
|
|
93
144
|
Passing `children` lets you render any UI on top of the camera preview, so you can freely add buttons, tutorials, or progress indicators without modifying the package.
|
|
94
145
|
|
|
95
|
-
|
|
146
|
+
## API Reference
|
|
147
|
+
|
|
148
|
+
### DocScanner Props
|
|
149
|
+
|
|
150
|
+
- `onCapture({ path, quad, width, height })` — called when a photo is taken
|
|
151
|
+
- `path`: file path to the captured image
|
|
152
|
+
- `quad`: detected corner coordinates (or `null` if none found)
|
|
153
|
+
- `width`, `height`: original frame dimensions for coordinate scaling
|
|
154
|
+
- `overlayColor` (default `#e7a649`) — stroke color for the contour overlay
|
|
155
|
+
- `autoCapture` (default `true`) — auto-captures after stability is reached
|
|
156
|
+
- `minStableFrames` (default `8`) — consecutive stable frames required before auto capture
|
|
157
|
+
- `cameraProps` — forwarded to underlying `Camera` (zoom, HDR, torch, etc.)
|
|
158
|
+
- `children` — custom UI rendered over the camera preview
|
|
159
|
+
- `detectionConfig` — advanced detection configuration (see below)
|
|
160
|
+
|
|
161
|
+
### DetectionConfig
|
|
162
|
+
|
|
163
|
+
Fine-tune the detection algorithm for your specific use case:
|
|
164
|
+
|
|
165
|
+
```typescript
|
|
166
|
+
interface DetectionConfig {
|
|
167
|
+
processingWidth?: number; // Default: 1280 (higher = more accurate but slower)
|
|
168
|
+
cannyLowThreshold?: number; // Default: 40 (lower = detect more edges)
|
|
169
|
+
cannyHighThreshold?: number; // Default: 120 (edge strength threshold)
|
|
170
|
+
snapDistance?: number; // Default: 8 (corner lock sensitivity in pixels)
|
|
171
|
+
maxAnchorMisses?: number; // Default: 20 (frames to hold anchor when detection fails)
|
|
172
|
+
maxCenterDelta?: number; // Default: 200 (max camera movement while maintaining lock)
|
|
173
|
+
}
|
|
174
|
+
```
|
|
175
|
+
|
|
176
|
+
### CropEditor Props
|
|
96
177
|
|
|
97
|
-
- `
|
|
98
|
-
- `overlayColor` (default
|
|
99
|
-
- `
|
|
100
|
-
- `
|
|
101
|
-
- `
|
|
102
|
-
- `
|
|
178
|
+
- `document` — `CapturedDocument` object from `onCapture` callback
|
|
179
|
+
- `overlayColor` (default `rgba(0,0,0,0.5)`) — color of overlay outside crop area
|
|
180
|
+
- `overlayStrokeColor` (default `#e7a649`) — color of crop boundary lines
|
|
181
|
+
- `handlerColor` (default `#e7a649`) — color of corner drag handles
|
|
182
|
+
- `enablePanStrict` (default `false`) — enable strict panning behavior
|
|
183
|
+
- `onCropChange(rectangle)` — callback when user adjusts corners
|
|
103
184
|
|
|
104
185
|
### Notes on camera behaviour
|
|
105
186
|
|
|
@@ -107,6 +188,49 @@ Passing `children` lets you render any UI on top of the camera preview, so you c
|
|
|
107
188
|
- The internal frame processor handles document detection; do not override `frameProcessor` in `cameraProps`.
|
|
108
189
|
- Adjust `minStableFrames` or tweak lighting conditions if auto capture is too sensitive or too slow.
|
|
109
190
|
|
|
191
|
+
## Detection Algorithm
|
|
192
|
+
|
|
193
|
+
The scanner uses a sophisticated multi-stage pipeline optimized for quality and stability:
|
|
194
|
+
|
|
195
|
+
### 1. Pre-processing (Configurable Resolution)
|
|
196
|
+
- Resizes frame to `processingWidth` (default 1280p) for optimal accuracy
|
|
197
|
+
- Converts to grayscale
|
|
198
|
+
- **Enhanced morphological operations**:
|
|
199
|
+
- MORPH_CLOSE to fill small holes in edges (7x7 kernel)
|
|
200
|
+
- MORPH_OPEN to remove small noise
|
|
201
|
+
- **Bilateral filter** for edge-preserving smoothing (better than Gaussian)
|
|
202
|
+
- **Adaptive Canny edge detection** with configurable thresholds (default 40/120)
|
|
203
|
+
|
|
204
|
+
### 2. Contour Detection
|
|
205
|
+
- Finds external contours using CHAIN_APPROX_SIMPLE
|
|
206
|
+
- Applies convex hull for improved corner accuracy
|
|
207
|
+
- Tests **23 epsilon values** (0.1%-10%) for approxPolyDP to find exact 4 corners
|
|
208
|
+
- Validates quadrilaterals for convexity and valid coordinates
|
|
209
|
+
|
|
210
|
+
### 3. Advanced Anchor Locking System
|
|
211
|
+
Once corners are detected, the system maintains stability through:
|
|
212
|
+
- **Snap locking**: Corners lock to positions when movement is minimal
|
|
213
|
+
- **Camera movement tolerance**: Maintains lock during movement (up to 200px center delta)
|
|
214
|
+
- **Persistence**: Holds anchor for up to 20 consecutive failed detections
|
|
215
|
+
- **Adaptive blending**: Smoothly transitions between old and new positions
|
|
216
|
+
- **Confidence building**: Increases lock strength over time (max 30 frames)
|
|
217
|
+
- **Intelligent reset**: Only resets when document clearly changes
|
|
218
|
+
|
|
219
|
+
### 4. Quad Validation
|
|
220
|
+
- Area ratio filtering (0.02%-90% of frame)
|
|
221
|
+
- Minimum edge length validation
|
|
222
|
+
- Aspect ratio constraints (max 7:1)
|
|
223
|
+
- Convexity checks to filter invalid shapes
|
|
224
|
+
|
|
225
|
+
### 5. Post-Capture Editing
|
|
226
|
+
After capture, users can manually adjust corners using the `CropEditor` component:
|
|
227
|
+
- Grid-based interface with perspective view
|
|
228
|
+
- Draggable corner handles
|
|
229
|
+
- Real-time preview of adjusted crop area
|
|
230
|
+
- Exports adjusted coordinates for final processing
|
|
231
|
+
|
|
232
|
+
This multi-layered approach ensures high-quality detection with maximum flexibility for various document types and lighting conditions.
|
|
233
|
+
|
|
110
234
|
## Build
|
|
111
235
|
```sh
|
|
112
236
|
yarn build
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import React from 'react';
|
|
2
|
+
import type { Rectangle, CapturedDocument } from './types';
|
|
3
|
+
interface CropEditorProps {
|
|
4
|
+
document: CapturedDocument;
|
|
5
|
+
overlayColor?: string;
|
|
6
|
+
overlayStrokeColor?: string;
|
|
7
|
+
handlerColor?: string;
|
|
8
|
+
enablePanStrict?: boolean;
|
|
9
|
+
onCropChange?: (rectangle: Rectangle) => void;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* CropEditor Component
|
|
13
|
+
*
|
|
14
|
+
* Displays a captured document image with adjustable corner handles.
|
|
15
|
+
* Uses react-native-perspective-image-cropper for the cropping UI.
|
|
16
|
+
*
|
|
17
|
+
* @param document - The captured document with path and detected quad
|
|
18
|
+
* @param overlayColor - Color of the overlay outside the crop area (default: 'rgba(0,0,0,0.5)')
|
|
19
|
+
* @param overlayStrokeColor - Color of the crop boundary lines (default: '#e7a649')
|
|
20
|
+
* @param handlerColor - Color of the corner handles (default: '#e7a649')
|
|
21
|
+
* @param enablePanStrict - Enable strict panning behavior
|
|
22
|
+
* @param onCropChange - Callback when user adjusts crop corners
|
|
23
|
+
*/
|
|
24
|
+
export declare const CropEditor: React.FC<CropEditorProps>;
|
|
25
|
+
export {};
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.CropEditor = void 0;
|
|
37
|
+
const react_1 = __importStar(require("react"));
|
|
38
|
+
const react_native_1 = require("react-native");
|
|
39
|
+
const react_native_perspective_image_cropper_1 = require("react-native-perspective-image-cropper");
|
|
40
|
+
const coordinate_1 = require("./utils/coordinate");
|
|
41
|
+
/**
|
|
42
|
+
* CropEditor Component
|
|
43
|
+
*
|
|
44
|
+
* Displays a captured document image with adjustable corner handles.
|
|
45
|
+
* Uses react-native-perspective-image-cropper for the cropping UI.
|
|
46
|
+
*
|
|
47
|
+
* @param document - The captured document with path and detected quad
|
|
48
|
+
* @param overlayColor - Color of the overlay outside the crop area (default: 'rgba(0,0,0,0.5)')
|
|
49
|
+
* @param overlayStrokeColor - Color of the crop boundary lines (default: '#e7a649')
|
|
50
|
+
* @param handlerColor - Color of the corner handles (default: '#e7a649')
|
|
51
|
+
* @param enablePanStrict - Enable strict panning behavior
|
|
52
|
+
* @param onCropChange - Callback when user adjusts crop corners
|
|
53
|
+
*/
|
|
54
|
+
const CropEditor = ({ document, overlayColor = 'rgba(0,0,0,0.5)', overlayStrokeColor = '#e7a649', handlerColor = '#e7a649', enablePanStrict = false, onCropChange, }) => {
|
|
55
|
+
const [imageSize, setImageSize] = (0, react_1.useState)(null);
|
|
56
|
+
const [displaySize, setDisplaySize] = (0, react_1.useState)({
|
|
57
|
+
width: react_native_1.Dimensions.get('window').width,
|
|
58
|
+
height: react_native_1.Dimensions.get('window').height,
|
|
59
|
+
});
|
|
60
|
+
// Get initial rectangle from detected quad or use default
|
|
61
|
+
const getInitialRectangle = (0, react_1.useCallback)(() => {
|
|
62
|
+
if (!document.quad || !imageSize) {
|
|
63
|
+
return undefined;
|
|
64
|
+
}
|
|
65
|
+
const rect = (0, coordinate_1.quadToRectangle)(document.quad);
|
|
66
|
+
if (!rect) {
|
|
67
|
+
return undefined;
|
|
68
|
+
}
|
|
69
|
+
// Scale from original detection coordinates to image coordinates
|
|
70
|
+
const scaled = (0, coordinate_1.scaleRectangle)(rect, document.width, document.height, imageSize.width, imageSize.height);
|
|
71
|
+
return scaled;
|
|
72
|
+
}, [document.quad, document.width, document.height, imageSize]);
|
|
73
|
+
const handleImageLoad = (0, react_1.useCallback)((event) => {
|
|
74
|
+
const { width, height } = event.nativeEvent.source;
|
|
75
|
+
setImageSize({ width, height });
|
|
76
|
+
}, []);
|
|
77
|
+
const handleLayout = (0, react_1.useCallback)((event) => {
|
|
78
|
+
const { width, height } = event.nativeEvent.layout;
|
|
79
|
+
setDisplaySize({ width, height });
|
|
80
|
+
}, []);
|
|
81
|
+
const handleDragEnd = (0, react_1.useCallback)((coordinates) => {
|
|
82
|
+
if (!imageSize) {
|
|
83
|
+
return;
|
|
84
|
+
}
|
|
85
|
+
// Convert back to Rectangle type
|
|
86
|
+
const rect = {
|
|
87
|
+
topLeft: coordinates.topLeft,
|
|
88
|
+
topRight: coordinates.topRight,
|
|
89
|
+
bottomRight: coordinates.bottomRight,
|
|
90
|
+
bottomLeft: coordinates.bottomLeft,
|
|
91
|
+
};
|
|
92
|
+
onCropChange?.(rect);
|
|
93
|
+
}, [imageSize, onCropChange]);
|
|
94
|
+
// Wait for image to load to get dimensions
|
|
95
|
+
if (!imageSize) {
|
|
96
|
+
return (react_1.default.createElement(react_native_1.View, { style: styles.container, onLayout: handleLayout },
|
|
97
|
+
react_1.default.createElement(react_native_1.Image, { source: { uri: `file://${document.path}` }, style: styles.hiddenImage, onLoad: handleImageLoad, resizeMode: "contain" })));
|
|
98
|
+
}
|
|
99
|
+
return (react_1.default.createElement(react_native_1.View, { style: styles.container, onLayout: handleLayout },
|
|
100
|
+
react_1.default.createElement(react_native_perspective_image_cropper_1.CustomImageCropper, { height: displaySize.height, width: displaySize.width, image: `file://${document.path}`, rectangleCoordinates: getInitialRectangle(), overlayColor: overlayColor, overlayStrokeColor: overlayStrokeColor, handlerColor: handlerColor, enablePanStrict: enablePanStrict, onDragEnd: handleDragEnd })));
|
|
101
|
+
};
|
|
102
|
+
exports.CropEditor = CropEditor;
|
|
103
|
+
const styles = react_native_1.StyleSheet.create({
|
|
104
|
+
container: {
|
|
105
|
+
flex: 1,
|
|
106
|
+
backgroundColor: '#000',
|
|
107
|
+
},
|
|
108
|
+
hiddenImage: {
|
|
109
|
+
width: 1,
|
|
110
|
+
height: 1,
|
|
111
|
+
opacity: 0,
|
|
112
|
+
},
|
|
113
|
+
});
|
package/dist/DocScanner.d.ts
CHANGED
|
@@ -2,16 +2,37 @@ import React, { ReactNode } from 'react';
|
|
|
2
2
|
import { Camera } from 'react-native-vision-camera';
|
|
3
3
|
import type { Point } from './types';
|
|
4
4
|
type CameraOverrides = Omit<React.ComponentProps<typeof Camera>, 'style' | 'ref' | 'frameProcessor'>;
|
|
5
|
+
/**
|
|
6
|
+
* Configuration for detection quality and behavior
|
|
7
|
+
*/
|
|
8
|
+
export interface DetectionConfig {
|
|
9
|
+
/** Processing resolution width (default: 1280) - higher = more accurate but slower */
|
|
10
|
+
processingWidth?: number;
|
|
11
|
+
/** Canny edge detection lower threshold (default: 40) */
|
|
12
|
+
cannyLowThreshold?: number;
|
|
13
|
+
/** Canny edge detection upper threshold (default: 120) */
|
|
14
|
+
cannyHighThreshold?: number;
|
|
15
|
+
/** Snap distance in pixels for corner locking (default: 8) */
|
|
16
|
+
snapDistance?: number;
|
|
17
|
+
/** Max frames to hold anchor when detection fails (default: 20) */
|
|
18
|
+
maxAnchorMisses?: number;
|
|
19
|
+
/** Maximum center movement allowed while maintaining lock (default: 200px) */
|
|
20
|
+
maxCenterDelta?: number;
|
|
21
|
+
}
|
|
5
22
|
interface Props {
|
|
6
23
|
onCapture?: (photo: {
|
|
7
24
|
path: string;
|
|
8
25
|
quad: Point[] | null;
|
|
26
|
+
width: number;
|
|
27
|
+
height: number;
|
|
9
28
|
}) => void;
|
|
10
29
|
overlayColor?: string;
|
|
11
30
|
autoCapture?: boolean;
|
|
12
31
|
minStableFrames?: number;
|
|
13
32
|
cameraProps?: CameraOverrides;
|
|
14
33
|
children?: ReactNode;
|
|
34
|
+
/** Advanced detection configuration */
|
|
35
|
+
detectionConfig?: DetectionConfig;
|
|
15
36
|
}
|
|
16
37
|
export declare const DocScanner: React.FC<Props>;
|
|
17
38
|
export {};
|
package/dist/DocScanner.js
CHANGED
|
@@ -79,7 +79,7 @@ const isConvexQuadrilateral = (points) => {
|
|
|
79
79
|
return false;
|
|
80
80
|
}
|
|
81
81
|
};
|
|
82
|
-
const DocScanner = ({ onCapture, overlayColor = '#e7a649', autoCapture = true, minStableFrames = 8, cameraProps, children, }) => {
|
|
82
|
+
const DocScanner = ({ onCapture, overlayColor = '#e7a649', autoCapture = true, minStableFrames = 8, cameraProps, children, detectionConfig = {}, }) => {
|
|
83
83
|
const device = (0, react_native_vision_camera_1.useCameraDevice)('back');
|
|
84
84
|
const { hasPermission, requestPermission } = (0, react_native_vision_camera_1.useCameraPermission)();
|
|
85
85
|
const { resize } = (0, vision_camera_resize_plugin_1.useResizePlugin)();
|
|
@@ -99,20 +99,25 @@ const DocScanner = ({ onCapture, overlayColor = '#e7a649', autoCapture = true, m
|
|
|
99
99
|
const anchorConfidenceRef = (0, react_1.useRef)(0);
|
|
100
100
|
const lastMeasurementRef = (0, react_1.useRef)(null);
|
|
101
101
|
const frameSizeRef = (0, react_1.useRef)(null);
|
|
102
|
+
// Detection parameters - configurable via props with sensible defaults
|
|
103
|
+
const PROCESSING_WIDTH = detectionConfig.processingWidth ?? 1280;
|
|
104
|
+
const CANNY_LOW = detectionConfig.cannyLowThreshold ?? 40;
|
|
105
|
+
const CANNY_HIGH = detectionConfig.cannyHighThreshold ?? 120;
|
|
106
|
+
const SNAP_DISTANCE = detectionConfig.snapDistance ?? 8;
|
|
107
|
+
const MAX_ANCHOR_MISSES = detectionConfig.maxAnchorMisses ?? 20;
|
|
108
|
+
const REJECT_CENTER_DELTA = detectionConfig.maxCenterDelta ?? 200;
|
|
109
|
+
// Fixed parameters for algorithm stability
|
|
102
110
|
const MAX_HISTORY = 5;
|
|
103
|
-
const
|
|
104
|
-
const
|
|
105
|
-
const
|
|
106
|
-
const
|
|
107
|
-
const
|
|
108
|
-
const MAX_AREA_SHIFT = 0.45;
|
|
109
|
-
const HISTORY_RESET_DISTANCE = 70;
|
|
111
|
+
const SNAP_CENTER_DISTANCE = 18;
|
|
112
|
+
const BLEND_DISTANCE = 80;
|
|
113
|
+
const MAX_CENTER_DELTA = 120;
|
|
114
|
+
const MAX_AREA_SHIFT = 0.55;
|
|
115
|
+
const HISTORY_RESET_DISTANCE = 90;
|
|
110
116
|
const MIN_AREA_RATIO = 0.0002;
|
|
111
117
|
const MAX_AREA_RATIO = 0.9;
|
|
112
118
|
const MIN_EDGE_RATIO = 0.015;
|
|
113
|
-
const MAX_ANCHOR_MISSES = 12;
|
|
114
119
|
const MIN_CONFIDENCE_TO_HOLD = 2;
|
|
115
|
-
const MAX_ANCHOR_CONFIDENCE =
|
|
120
|
+
const MAX_ANCHOR_CONFIDENCE = 30;
|
|
116
121
|
const updateQuad = (0, react_native_worklets_core_1.useRunOnJS)((value) => {
|
|
117
122
|
if (__DEV__) {
|
|
118
123
|
console.log('[DocScanner] quad', value);
|
|
@@ -249,8 +254,8 @@ const DocScanner = ({ onCapture, overlayColor = '#e7a649', autoCapture = true, m
|
|
|
249
254
|
try {
|
|
250
255
|
// Report frame size for coordinate transformation
|
|
251
256
|
updateFrameSize(frame.width, frame.height);
|
|
252
|
-
// Use
|
|
253
|
-
const ratio =
|
|
257
|
+
// Use configurable resolution for accuracy vs performance balance
|
|
258
|
+
const ratio = PROCESSING_WIDTH / frame.width;
|
|
254
259
|
const width = Math.floor(frame.width * ratio);
|
|
255
260
|
const height = Math.floor(frame.height * ratio);
|
|
256
261
|
step = 'resize';
|
|
@@ -262,24 +267,31 @@ const DocScanner = ({ onCapture, overlayColor = '#e7a649', autoCapture = true, m
|
|
|
262
267
|
});
|
|
263
268
|
step = 'frameBufferToMat';
|
|
264
269
|
reportStage(step);
|
|
265
|
-
|
|
270
|
+
let mat = react_native_fast_opencv_1.OpenCV.frameBufferToMat(height, width, 3, resized);
|
|
266
271
|
step = 'cvtColor';
|
|
267
272
|
reportStage(step);
|
|
268
273
|
react_native_fast_opencv_1.OpenCV.invoke('cvtColor', mat, mat, react_native_fast_opencv_1.ColorConversionCodes.COLOR_BGR2GRAY);
|
|
269
|
-
|
|
274
|
+
// Enhanced morphological operations for noise reduction
|
|
275
|
+
const morphologyKernel = react_native_fast_opencv_1.OpenCV.createObject(react_native_fast_opencv_1.ObjectType.Size, 7, 7);
|
|
270
276
|
step = 'getStructuringElement';
|
|
271
277
|
reportStage(step);
|
|
272
278
|
const element = react_native_fast_opencv_1.OpenCV.invoke('getStructuringElement', react_native_fast_opencv_1.MorphShapes.MORPH_RECT, morphologyKernel);
|
|
273
279
|
step = 'morphologyEx';
|
|
274
280
|
reportStage(step);
|
|
281
|
+
// MORPH_CLOSE to fill small holes in edges
|
|
282
|
+
react_native_fast_opencv_1.OpenCV.invoke('morphologyEx', mat, mat, react_native_fast_opencv_1.MorphTypes.MORPH_CLOSE, element);
|
|
283
|
+
// MORPH_OPEN to remove small noise
|
|
275
284
|
react_native_fast_opencv_1.OpenCV.invoke('morphologyEx', mat, mat, react_native_fast_opencv_1.MorphTypes.MORPH_OPEN, element);
|
|
276
|
-
|
|
277
|
-
step = '
|
|
285
|
+
// Bilateral filter for edge-preserving smoothing (better quality than Gaussian)
|
|
286
|
+
step = 'bilateralFilter';
|
|
278
287
|
reportStage(step);
|
|
279
|
-
react_native_fast_opencv_1.OpenCV.
|
|
288
|
+
const tempMat = react_native_fast_opencv_1.OpenCV.createObject(react_native_fast_opencv_1.ObjectType.Mat);
|
|
289
|
+
react_native_fast_opencv_1.OpenCV.invoke('bilateralFilter', mat, tempMat, 9, 75, 75);
|
|
290
|
+
mat = tempMat;
|
|
280
291
|
step = 'Canny';
|
|
281
292
|
reportStage(step);
|
|
282
|
-
|
|
293
|
+
// Configurable Canny parameters for adaptive edge detection
|
|
294
|
+
react_native_fast_opencv_1.OpenCV.invoke('Canny', mat, mat, CANNY_LOW, CANNY_HIGH);
|
|
283
295
|
step = 'createContours';
|
|
284
296
|
reportStage(step);
|
|
285
297
|
const contours = react_native_fast_opencv_1.OpenCV.createObject(react_native_fast_opencv_1.ObjectType.PointVectorOfVectors);
|
|
@@ -417,14 +429,19 @@ const DocScanner = ({ onCapture, overlayColor = '#e7a649', autoCapture = true, m
|
|
|
417
429
|
}, [quad]);
|
|
418
430
|
(0, react_1.useEffect)(() => {
|
|
419
431
|
const capture = async () => {
|
|
420
|
-
if (autoCapture && quad && stable >= minStableFrames && camera.current) {
|
|
432
|
+
if (autoCapture && quad && stable >= minStableFrames && camera.current && frameSize) {
|
|
421
433
|
const photo = await camera.current.takePhoto({ qualityPrioritization: 'quality' });
|
|
422
|
-
onCapture?.({
|
|
434
|
+
onCapture?.({
|
|
435
|
+
path: photo.path,
|
|
436
|
+
quad,
|
|
437
|
+
width: frameSize.width,
|
|
438
|
+
height: frameSize.height,
|
|
439
|
+
});
|
|
423
440
|
setStable(0);
|
|
424
441
|
}
|
|
425
442
|
};
|
|
426
443
|
capture();
|
|
427
|
-
}, [autoCapture, minStableFrames, onCapture, quad, stable]);
|
|
444
|
+
}, [autoCapture, minStableFrames, onCapture, quad, stable, frameSize]);
|
|
428
445
|
const { device: overrideDevice, ...cameraRestProps } = cameraProps ?? {};
|
|
429
446
|
const resolvedDevice = overrideDevice ?? device;
|
|
430
447
|
if (!resolvedDevice || !hasPermission) {
|
|
@@ -434,11 +451,16 @@ const DocScanner = ({ onCapture, overlayColor = '#e7a649', autoCapture = true, m
|
|
|
434
451
|
react_1.default.createElement(react_native_vision_camera_1.Camera, { ref: handleCameraRef, style: react_native_1.StyleSheet.absoluteFillObject, device: resolvedDevice, isActive: true, photo: true, frameProcessor: frameProcessor, frameProcessorFps: 15, ...cameraRestProps }),
|
|
435
452
|
react_1.default.createElement(overlay_1.Overlay, { quad: quad, color: overlayColor, frameSize: frameSize }),
|
|
436
453
|
!autoCapture && (react_1.default.createElement(react_native_1.TouchableOpacity, { style: styles.button, onPress: async () => {
|
|
437
|
-
if (!camera.current) {
|
|
454
|
+
if (!camera.current || !frameSize) {
|
|
438
455
|
return;
|
|
439
456
|
}
|
|
440
457
|
const photo = await camera.current.takePhoto({ qualityPrioritization: 'quality' });
|
|
441
|
-
onCapture?.({
|
|
458
|
+
onCapture?.({
|
|
459
|
+
path: photo.path,
|
|
460
|
+
quad,
|
|
461
|
+
width: frameSize.width,
|
|
462
|
+
height: frameSize.height,
|
|
463
|
+
});
|
|
442
464
|
} })),
|
|
443
465
|
children));
|
|
444
466
|
};
|
package/dist/index.d.ts
CHANGED
|
@@ -1 +1,5 @@
|
|
|
1
|
-
export
|
|
1
|
+
export { DocScanner } from './DocScanner';
|
|
2
|
+
export { CropEditor } from './CropEditor';
|
|
3
|
+
export type { Point, Quad, Rectangle, CapturedDocument } from './types';
|
|
4
|
+
export type { DetectionConfig } from './DocScanner';
|
|
5
|
+
export { quadToRectangle, rectangleToQuad, scaleCoordinates, scaleRectangle, } from './utils/coordinate';
|
package/dist/index.js
CHANGED
|
@@ -1,17 +1,14 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
-
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
-
};
|
|
16
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
-
|
|
3
|
+
exports.scaleRectangle = exports.scaleCoordinates = exports.rectangleToQuad = exports.quadToRectangle = exports.CropEditor = exports.DocScanner = void 0;
|
|
4
|
+
// Main components
|
|
5
|
+
var DocScanner_1 = require("./DocScanner");
|
|
6
|
+
Object.defineProperty(exports, "DocScanner", { enumerable: true, get: function () { return DocScanner_1.DocScanner; } });
|
|
7
|
+
var CropEditor_1 = require("./CropEditor");
|
|
8
|
+
Object.defineProperty(exports, "CropEditor", { enumerable: true, get: function () { return CropEditor_1.CropEditor; } });
|
|
9
|
+
// Utilities
|
|
10
|
+
var coordinate_1 = require("./utils/coordinate");
|
|
11
|
+
Object.defineProperty(exports, "quadToRectangle", { enumerable: true, get: function () { return coordinate_1.quadToRectangle; } });
|
|
12
|
+
Object.defineProperty(exports, "rectangleToQuad", { enumerable: true, get: function () { return coordinate_1.rectangleToQuad; } });
|
|
13
|
+
Object.defineProperty(exports, "scaleCoordinates", { enumerable: true, get: function () { return coordinate_1.scaleCoordinates; } });
|
|
14
|
+
Object.defineProperty(exports, "scaleRectangle", { enumerable: true, get: function () { return coordinate_1.scaleRectangle; } });
|
package/dist/types.d.ts
CHANGED
|
@@ -2,3 +2,16 @@ export type Point = {
|
|
|
2
2
|
x: number;
|
|
3
3
|
y: number;
|
|
4
4
|
};
|
|
5
|
+
export type Quad = [Point, Point, Point, Point];
|
|
6
|
+
export type Rectangle = {
|
|
7
|
+
topLeft: Point;
|
|
8
|
+
topRight: Point;
|
|
9
|
+
bottomRight: Point;
|
|
10
|
+
bottomLeft: Point;
|
|
11
|
+
};
|
|
12
|
+
export type CapturedDocument = {
|
|
13
|
+
path: string;
|
|
14
|
+
quad: Point[] | null;
|
|
15
|
+
width: number;
|
|
16
|
+
height: number;
|
|
17
|
+
};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { Point, Rectangle } from '../types';
|
|
2
|
+
/**
|
|
3
|
+
* Convert quad points array to Rectangle format for perspective cropper
|
|
4
|
+
* Assumes quad points are ordered: [topLeft, topRight, bottomRight, bottomLeft]
|
|
5
|
+
*/
|
|
6
|
+
export declare const quadToRectangle: (quad: Point[]) => Rectangle | null;
|
|
7
|
+
/**
|
|
8
|
+
* Convert Rectangle format back to quad points array
|
|
9
|
+
*/
|
|
10
|
+
export declare const rectangleToQuad: (rect: Rectangle) => Point[];
|
|
11
|
+
/**
|
|
12
|
+
* Scale coordinates from one dimension to another
|
|
13
|
+
* Useful when image dimensions differ from display dimensions
|
|
14
|
+
*/
|
|
15
|
+
export declare const scaleCoordinates: (points: Point[], fromWidth: number, fromHeight: number, toWidth: number, toHeight: number) => Point[];
|
|
16
|
+
/**
|
|
17
|
+
* Scale a rectangle
|
|
18
|
+
*/
|
|
19
|
+
export declare const scaleRectangle: (rect: Rectangle, fromWidth: number, fromHeight: number, toWidth: number, toHeight: number) => Rectangle;
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.scaleRectangle = exports.scaleCoordinates = exports.rectangleToQuad = exports.quadToRectangle = void 0;
|
|
4
|
+
/**
|
|
5
|
+
* Convert quad points array to Rectangle format for perspective cropper
|
|
6
|
+
* Assumes quad points are ordered: [topLeft, topRight, bottomRight, bottomLeft]
|
|
7
|
+
*/
|
|
8
|
+
const quadToRectangle = (quad) => {
|
|
9
|
+
if (!quad || quad.length !== 4) {
|
|
10
|
+
return null;
|
|
11
|
+
}
|
|
12
|
+
return {
|
|
13
|
+
topLeft: quad[0],
|
|
14
|
+
topRight: quad[1],
|
|
15
|
+
bottomRight: quad[2],
|
|
16
|
+
bottomLeft: quad[3],
|
|
17
|
+
};
|
|
18
|
+
};
|
|
19
|
+
exports.quadToRectangle = quadToRectangle;
|
|
20
|
+
/**
|
|
21
|
+
* Convert Rectangle format back to quad points array
|
|
22
|
+
*/
|
|
23
|
+
const rectangleToQuad = (rect) => {
|
|
24
|
+
return [
|
|
25
|
+
rect.topLeft,
|
|
26
|
+
rect.topRight,
|
|
27
|
+
rect.bottomRight,
|
|
28
|
+
rect.bottomLeft,
|
|
29
|
+
];
|
|
30
|
+
};
|
|
31
|
+
exports.rectangleToQuad = rectangleToQuad;
|
|
32
|
+
/**
|
|
33
|
+
* Scale coordinates from one dimension to another
|
|
34
|
+
* Useful when image dimensions differ from display dimensions
|
|
35
|
+
*/
|
|
36
|
+
const scaleCoordinates = (points, fromWidth, fromHeight, toWidth, toHeight) => {
|
|
37
|
+
const scaleX = toWidth / fromWidth;
|
|
38
|
+
const scaleY = toHeight / fromHeight;
|
|
39
|
+
return points.map(p => ({
|
|
40
|
+
x: p.x * scaleX,
|
|
41
|
+
y: p.y * scaleY,
|
|
42
|
+
}));
|
|
43
|
+
};
|
|
44
|
+
exports.scaleCoordinates = scaleCoordinates;
|
|
45
|
+
/**
|
|
46
|
+
* Scale a rectangle
|
|
47
|
+
*/
|
|
48
|
+
const scaleRectangle = (rect, fromWidth, fromHeight, toWidth, toHeight) => {
|
|
49
|
+
const scaleX = toWidth / fromWidth;
|
|
50
|
+
const scaleY = toHeight / fromHeight;
|
|
51
|
+
return {
|
|
52
|
+
topLeft: { x: rect.topLeft.x * scaleX, y: rect.topLeft.y * scaleY },
|
|
53
|
+
topRight: { x: rect.topRight.x * scaleX, y: rect.topRight.y * scaleY },
|
|
54
|
+
bottomRight: { x: rect.bottomRight.x * scaleX, y: rect.bottomRight.y * scaleY },
|
|
55
|
+
bottomLeft: { x: rect.bottomLeft.x * scaleX, y: rect.bottomLeft.y * scaleY },
|
|
56
|
+
};
|
|
57
|
+
};
|
|
58
|
+
exports.scaleRectangle = scaleRectangle;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "react-native-rectangle-doc-scanner",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.46.0",
|
|
4
4
|
"main": "dist/index.js",
|
|
5
5
|
"types": "dist/index.d.ts",
|
|
6
6
|
"repository": {
|
|
@@ -20,6 +20,7 @@
|
|
|
20
20
|
"react": "*",
|
|
21
21
|
"react-native": "*",
|
|
22
22
|
"react-native-fast-opencv": "*",
|
|
23
|
+
"react-native-perspective-image-cropper": "*",
|
|
23
24
|
"react-native-reanimated": "*",
|
|
24
25
|
"react-native-vision-camera": "*",
|
|
25
26
|
"react-native-worklets-core": "*",
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import React, { useState, useCallback } from 'react';
|
|
2
|
+
import { View, StyleSheet, Image, Dimensions } from 'react-native';
|
|
3
|
+
import { CustomImageCropper } from 'react-native-perspective-image-cropper';
|
|
4
|
+
import type { Rectangle as CropperRectangle } from 'react-native-perspective-image-cropper';
|
|
5
|
+
import type { Point, Rectangle, CapturedDocument } from './types';
|
|
6
|
+
import { quadToRectangle, scaleRectangle } from './utils/coordinate';
|
|
7
|
+
|
|
8
|
+
interface CropEditorProps {
|
|
9
|
+
document: CapturedDocument;
|
|
10
|
+
overlayColor?: string;
|
|
11
|
+
overlayStrokeColor?: string;
|
|
12
|
+
handlerColor?: string;
|
|
13
|
+
enablePanStrict?: boolean;
|
|
14
|
+
onCropChange?: (rectangle: Rectangle) => void;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* CropEditor Component
|
|
19
|
+
*
|
|
20
|
+
* Displays a captured document image with adjustable corner handles.
|
|
21
|
+
* Uses react-native-perspective-image-cropper for the cropping UI.
|
|
22
|
+
*
|
|
23
|
+
* @param document - The captured document with path and detected quad
|
|
24
|
+
* @param overlayColor - Color of the overlay outside the crop area (default: 'rgba(0,0,0,0.5)')
|
|
25
|
+
* @param overlayStrokeColor - Color of the crop boundary lines (default: '#e7a649')
|
|
26
|
+
* @param handlerColor - Color of the corner handles (default: '#e7a649')
|
|
27
|
+
* @param enablePanStrict - Enable strict panning behavior
|
|
28
|
+
* @param onCropChange - Callback when user adjusts crop corners
|
|
29
|
+
*/
|
|
30
|
+
export const CropEditor: React.FC<CropEditorProps> = ({
|
|
31
|
+
document,
|
|
32
|
+
overlayColor = 'rgba(0,0,0,0.5)',
|
|
33
|
+
overlayStrokeColor = '#e7a649',
|
|
34
|
+
handlerColor = '#e7a649',
|
|
35
|
+
enablePanStrict = false,
|
|
36
|
+
onCropChange,
|
|
37
|
+
}) => {
|
|
38
|
+
const [imageSize, setImageSize] = useState<{ width: number; height: number } | null>(null);
|
|
39
|
+
const [displaySize, setDisplaySize] = useState<{ width: number; height: number }>({
|
|
40
|
+
width: Dimensions.get('window').width,
|
|
41
|
+
height: Dimensions.get('window').height,
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
// Get initial rectangle from detected quad or use default
|
|
45
|
+
const getInitialRectangle = useCallback((): CropperRectangle | undefined => {
|
|
46
|
+
if (!document.quad || !imageSize) {
|
|
47
|
+
return undefined;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
const rect = quadToRectangle(document.quad);
|
|
51
|
+
if (!rect) {
|
|
52
|
+
return undefined;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// Scale from original detection coordinates to image coordinates
|
|
56
|
+
const scaled = scaleRectangle(
|
|
57
|
+
rect,
|
|
58
|
+
document.width,
|
|
59
|
+
document.height,
|
|
60
|
+
imageSize.width,
|
|
61
|
+
imageSize.height
|
|
62
|
+
);
|
|
63
|
+
|
|
64
|
+
return scaled as CropperRectangle;
|
|
65
|
+
}, [document.quad, document.width, document.height, imageSize]);
|
|
66
|
+
|
|
67
|
+
const handleImageLoad = useCallback((event: any) => {
|
|
68
|
+
const { width, height } = event.nativeEvent.source;
|
|
69
|
+
setImageSize({ width, height });
|
|
70
|
+
}, []);
|
|
71
|
+
|
|
72
|
+
const handleLayout = useCallback((event: any) => {
|
|
73
|
+
const { width, height } = event.nativeEvent.layout;
|
|
74
|
+
setDisplaySize({ width, height });
|
|
75
|
+
}, []);
|
|
76
|
+
|
|
77
|
+
const handleDragEnd = useCallback((coordinates: CropperRectangle) => {
|
|
78
|
+
if (!imageSize) {
|
|
79
|
+
return;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// Convert back to Rectangle type
|
|
83
|
+
const rect: Rectangle = {
|
|
84
|
+
topLeft: coordinates.topLeft,
|
|
85
|
+
topRight: coordinates.topRight,
|
|
86
|
+
bottomRight: coordinates.bottomRight,
|
|
87
|
+
bottomLeft: coordinates.bottomLeft,
|
|
88
|
+
};
|
|
89
|
+
|
|
90
|
+
onCropChange?.(rect);
|
|
91
|
+
}, [imageSize, onCropChange]);
|
|
92
|
+
|
|
93
|
+
// Wait for image to load to get dimensions
|
|
94
|
+
if (!imageSize) {
|
|
95
|
+
return (
|
|
96
|
+
<View style={styles.container} onLayout={handleLayout}>
|
|
97
|
+
<Image
|
|
98
|
+
source={{ uri: `file://${document.path}` }}
|
|
99
|
+
style={styles.hiddenImage}
|
|
100
|
+
onLoad={handleImageLoad}
|
|
101
|
+
resizeMode="contain"
|
|
102
|
+
/>
|
|
103
|
+
</View>
|
|
104
|
+
);
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
return (
|
|
108
|
+
<View style={styles.container} onLayout={handleLayout}>
|
|
109
|
+
<CustomImageCropper
|
|
110
|
+
height={displaySize.height}
|
|
111
|
+
width={displaySize.width}
|
|
112
|
+
image={`file://${document.path}`}
|
|
113
|
+
rectangleCoordinates={getInitialRectangle()}
|
|
114
|
+
overlayColor={overlayColor}
|
|
115
|
+
overlayStrokeColor={overlayStrokeColor}
|
|
116
|
+
handlerColor={handlerColor}
|
|
117
|
+
enablePanStrict={enablePanStrict}
|
|
118
|
+
onDragEnd={handleDragEnd}
|
|
119
|
+
/>
|
|
120
|
+
</View>
|
|
121
|
+
);
|
|
122
|
+
};
|
|
123
|
+
|
|
124
|
+
const styles = StyleSheet.create({
|
|
125
|
+
container: {
|
|
126
|
+
flex: 1,
|
|
127
|
+
backgroundColor: '#000',
|
|
128
|
+
},
|
|
129
|
+
hiddenImage: {
|
|
130
|
+
width: 1,
|
|
131
|
+
height: 1,
|
|
132
|
+
opacity: 0,
|
|
133
|
+
},
|
|
134
|
+
});
|
package/src/DocScanner.tsx
CHANGED
|
@@ -76,13 +76,33 @@ type CameraRef = {
|
|
|
76
76
|
|
|
77
77
|
type CameraOverrides = Omit<React.ComponentProps<typeof Camera>, 'style' | 'ref' | 'frameProcessor'>;
|
|
78
78
|
|
|
79
|
+
/**
|
|
80
|
+
* Configuration for detection quality and behavior
|
|
81
|
+
*/
|
|
82
|
+
export interface DetectionConfig {
|
|
83
|
+
/** Processing resolution width (default: 1280) - higher = more accurate but slower */
|
|
84
|
+
processingWidth?: number;
|
|
85
|
+
/** Canny edge detection lower threshold (default: 40) */
|
|
86
|
+
cannyLowThreshold?: number;
|
|
87
|
+
/** Canny edge detection upper threshold (default: 120) */
|
|
88
|
+
cannyHighThreshold?: number;
|
|
89
|
+
/** Snap distance in pixels for corner locking (default: 8) */
|
|
90
|
+
snapDistance?: number;
|
|
91
|
+
/** Max frames to hold anchor when detection fails (default: 20) */
|
|
92
|
+
maxAnchorMisses?: number;
|
|
93
|
+
/** Maximum center movement allowed while maintaining lock (default: 200px) */
|
|
94
|
+
maxCenterDelta?: number;
|
|
95
|
+
}
|
|
96
|
+
|
|
79
97
|
interface Props {
|
|
80
|
-
onCapture?: (photo: { path: string; quad: Point[] | null }) => void;
|
|
98
|
+
onCapture?: (photo: { path: string; quad: Point[] | null; width: number; height: number }) => void;
|
|
81
99
|
overlayColor?: string;
|
|
82
100
|
autoCapture?: boolean;
|
|
83
101
|
minStableFrames?: number;
|
|
84
102
|
cameraProps?: CameraOverrides;
|
|
85
103
|
children?: ReactNode;
|
|
104
|
+
/** Advanced detection configuration */
|
|
105
|
+
detectionConfig?: DetectionConfig;
|
|
86
106
|
}
|
|
87
107
|
|
|
88
108
|
export const DocScanner: React.FC<Props> = ({
|
|
@@ -92,6 +112,7 @@ export const DocScanner: React.FC<Props> = ({
|
|
|
92
112
|
minStableFrames = 8,
|
|
93
113
|
cameraProps,
|
|
94
114
|
children,
|
|
115
|
+
detectionConfig = {},
|
|
95
116
|
}) => {
|
|
96
117
|
const device = useCameraDevice('back');
|
|
97
118
|
const { hasPermission, requestPermission } = useCameraPermission();
|
|
@@ -115,20 +136,26 @@ export const DocScanner: React.FC<Props> = ({
|
|
|
115
136
|
const lastMeasurementRef = useRef<Point[] | null>(null);
|
|
116
137
|
const frameSizeRef = useRef<{ width: number; height: number } | null>(null);
|
|
117
138
|
|
|
139
|
+
// Detection parameters - configurable via props with sensible defaults
|
|
140
|
+
const PROCESSING_WIDTH = detectionConfig.processingWidth ?? 1280;
|
|
141
|
+
const CANNY_LOW = detectionConfig.cannyLowThreshold ?? 40;
|
|
142
|
+
const CANNY_HIGH = detectionConfig.cannyHighThreshold ?? 120;
|
|
143
|
+
const SNAP_DISTANCE = detectionConfig.snapDistance ?? 8;
|
|
144
|
+
const MAX_ANCHOR_MISSES = detectionConfig.maxAnchorMisses ?? 20;
|
|
145
|
+
const REJECT_CENTER_DELTA = detectionConfig.maxCenterDelta ?? 200;
|
|
146
|
+
|
|
147
|
+
// Fixed parameters for algorithm stability
|
|
118
148
|
const MAX_HISTORY = 5;
|
|
119
|
-
const
|
|
120
|
-
const
|
|
121
|
-
const
|
|
122
|
-
const
|
|
123
|
-
const
|
|
124
|
-
const MAX_AREA_SHIFT = 0.45;
|
|
125
|
-
const HISTORY_RESET_DISTANCE = 70;
|
|
149
|
+
const SNAP_CENTER_DISTANCE = 18;
|
|
150
|
+
const BLEND_DISTANCE = 80;
|
|
151
|
+
const MAX_CENTER_DELTA = 120;
|
|
152
|
+
const MAX_AREA_SHIFT = 0.55;
|
|
153
|
+
const HISTORY_RESET_DISTANCE = 90;
|
|
126
154
|
const MIN_AREA_RATIO = 0.0002;
|
|
127
155
|
const MAX_AREA_RATIO = 0.9;
|
|
128
156
|
const MIN_EDGE_RATIO = 0.015;
|
|
129
|
-
const MAX_ANCHOR_MISSES = 12;
|
|
130
157
|
const MIN_CONFIDENCE_TO_HOLD = 2;
|
|
131
|
-
const MAX_ANCHOR_CONFIDENCE =
|
|
158
|
+
const MAX_ANCHOR_CONFIDENCE = 30;
|
|
132
159
|
|
|
133
160
|
const updateQuad = useRunOnJS((value: Point[] | null) => {
|
|
134
161
|
if (__DEV__) {
|
|
@@ -290,8 +317,8 @@ export const DocScanner: React.FC<Props> = ({
|
|
|
290
317
|
// Report frame size for coordinate transformation
|
|
291
318
|
updateFrameSize(frame.width, frame.height);
|
|
292
319
|
|
|
293
|
-
// Use
|
|
294
|
-
const ratio =
|
|
320
|
+
// Use configurable resolution for accuracy vs performance balance
|
|
321
|
+
const ratio = PROCESSING_WIDTH / frame.width;
|
|
295
322
|
const width = Math.floor(frame.width * ratio);
|
|
296
323
|
const height = Math.floor(frame.height * ratio);
|
|
297
324
|
step = 'resize';
|
|
@@ -304,27 +331,35 @@ export const DocScanner: React.FC<Props> = ({
|
|
|
304
331
|
|
|
305
332
|
step = 'frameBufferToMat';
|
|
306
333
|
reportStage(step);
|
|
307
|
-
|
|
334
|
+
let mat = OpenCV.frameBufferToMat(height, width, 3, resized);
|
|
308
335
|
|
|
309
336
|
step = 'cvtColor';
|
|
310
337
|
reportStage(step);
|
|
311
338
|
OpenCV.invoke('cvtColor', mat, mat, ColorConversionCodes.COLOR_BGR2GRAY);
|
|
312
339
|
|
|
313
|
-
|
|
340
|
+
// Enhanced morphological operations for noise reduction
|
|
341
|
+
const morphologyKernel = OpenCV.createObject(ObjectType.Size, 7, 7);
|
|
314
342
|
step = 'getStructuringElement';
|
|
315
343
|
reportStage(step);
|
|
316
344
|
const element = OpenCV.invoke('getStructuringElement', MorphShapes.MORPH_RECT, morphologyKernel);
|
|
317
345
|
step = 'morphologyEx';
|
|
318
346
|
reportStage(step);
|
|
347
|
+
// MORPH_CLOSE to fill small holes in edges
|
|
348
|
+
OpenCV.invoke('morphologyEx', mat, mat, MorphTypes.MORPH_CLOSE, element);
|
|
349
|
+
// MORPH_OPEN to remove small noise
|
|
319
350
|
OpenCV.invoke('morphologyEx', mat, mat, MorphTypes.MORPH_OPEN, element);
|
|
320
351
|
|
|
321
|
-
|
|
322
|
-
step = '
|
|
352
|
+
// Bilateral filter for edge-preserving smoothing (better quality than Gaussian)
|
|
353
|
+
step = 'bilateralFilter';
|
|
323
354
|
reportStage(step);
|
|
324
|
-
OpenCV.
|
|
355
|
+
const tempMat = OpenCV.createObject(ObjectType.Mat);
|
|
356
|
+
OpenCV.invoke('bilateralFilter', mat, tempMat, 9, 75, 75);
|
|
357
|
+
mat = tempMat;
|
|
358
|
+
|
|
325
359
|
step = 'Canny';
|
|
326
360
|
reportStage(step);
|
|
327
|
-
|
|
361
|
+
// Configurable Canny parameters for adaptive edge detection
|
|
362
|
+
OpenCV.invoke('Canny', mat, mat, CANNY_LOW, CANNY_HIGH);
|
|
328
363
|
|
|
329
364
|
step = 'createContours';
|
|
330
365
|
reportStage(step);
|
|
@@ -487,15 +522,20 @@ export const DocScanner: React.FC<Props> = ({
|
|
|
487
522
|
|
|
488
523
|
useEffect(() => {
|
|
489
524
|
const capture = async () => {
|
|
490
|
-
if (autoCapture && quad && stable >= minStableFrames && camera.current) {
|
|
525
|
+
if (autoCapture && quad && stable >= minStableFrames && camera.current && frameSize) {
|
|
491
526
|
const photo = await camera.current.takePhoto({ qualityPrioritization: 'quality' });
|
|
492
|
-
onCapture?.({
|
|
527
|
+
onCapture?.({
|
|
528
|
+
path: photo.path,
|
|
529
|
+
quad,
|
|
530
|
+
width: frameSize.width,
|
|
531
|
+
height: frameSize.height,
|
|
532
|
+
});
|
|
493
533
|
setStable(0);
|
|
494
534
|
}
|
|
495
535
|
};
|
|
496
536
|
|
|
497
537
|
capture();
|
|
498
|
-
}, [autoCapture, minStableFrames, onCapture, quad, stable]);
|
|
538
|
+
}, [autoCapture, minStableFrames, onCapture, quad, stable, frameSize]);
|
|
499
539
|
|
|
500
540
|
const { device: overrideDevice, ...cameraRestProps } = cameraProps ?? {};
|
|
501
541
|
const resolvedDevice = overrideDevice ?? device;
|
|
@@ -521,12 +561,17 @@ export const DocScanner: React.FC<Props> = ({
|
|
|
521
561
|
<TouchableOpacity
|
|
522
562
|
style={styles.button}
|
|
523
563
|
onPress={async () => {
|
|
524
|
-
if (!camera.current) {
|
|
564
|
+
if (!camera.current || !frameSize) {
|
|
525
565
|
return;
|
|
526
566
|
}
|
|
527
567
|
|
|
528
568
|
const photo = await camera.current.takePhoto({ qualityPrioritization: 'quality' });
|
|
529
|
-
onCapture?.({
|
|
569
|
+
onCapture?.({
|
|
570
|
+
path: photo.path,
|
|
571
|
+
quad,
|
|
572
|
+
width: frameSize.width,
|
|
573
|
+
height: frameSize.height,
|
|
574
|
+
});
|
|
530
575
|
}}
|
|
531
576
|
/>
|
|
532
577
|
)}
|
package/src/external.d.ts
CHANGED
|
@@ -107,3 +107,28 @@ declare module '@shopify/react-native-skia' {
|
|
|
107
107
|
|
|
108
108
|
export const Path: ComponentType<PathProps>;
|
|
109
109
|
}
|
|
110
|
+
|
|
111
|
+
declare module 'react-native-perspective-image-cropper' {
|
|
112
|
+
import type { ComponentType } from 'react';
|
|
113
|
+
|
|
114
|
+
export type Rectangle = {
|
|
115
|
+
topLeft: { x: number; y: number };
|
|
116
|
+
topRight: { x: number; y: number };
|
|
117
|
+
bottomLeft: { x: number; y: number };
|
|
118
|
+
bottomRight: { x: number; y: number };
|
|
119
|
+
};
|
|
120
|
+
|
|
121
|
+
export type CustomImageCropperProps = {
|
|
122
|
+
height: number;
|
|
123
|
+
width: number;
|
|
124
|
+
image: string;
|
|
125
|
+
rectangleCoordinates?: Rectangle;
|
|
126
|
+
overlayColor?: string;
|
|
127
|
+
overlayStrokeColor?: string;
|
|
128
|
+
handlerColor?: string;
|
|
129
|
+
enablePanStrict?: boolean;
|
|
130
|
+
onDragEnd?: (coordinates: Rectangle) => void;
|
|
131
|
+
};
|
|
132
|
+
|
|
133
|
+
export const CustomImageCropper: ComponentType<CustomImageCropperProps>;
|
|
134
|
+
}
|
package/src/index.ts
CHANGED
|
@@ -1 +1,15 @@
|
|
|
1
|
-
|
|
1
|
+
// Main components
|
|
2
|
+
export { DocScanner } from './DocScanner';
|
|
3
|
+
export { CropEditor } from './CropEditor';
|
|
4
|
+
|
|
5
|
+
// Types
|
|
6
|
+
export type { Point, Quad, Rectangle, CapturedDocument } from './types';
|
|
7
|
+
export type { DetectionConfig } from './DocScanner';
|
|
8
|
+
|
|
9
|
+
// Utilities
|
|
10
|
+
export {
|
|
11
|
+
quadToRectangle,
|
|
12
|
+
rectangleToQuad,
|
|
13
|
+
scaleCoordinates,
|
|
14
|
+
scaleRectangle,
|
|
15
|
+
} from './utils/coordinate';
|
package/src/types.ts
CHANGED
|
@@ -1 +1,17 @@
|
|
|
1
1
|
export type Point = { x: number; y: number };
|
|
2
|
+
|
|
3
|
+
export type Quad = [Point, Point, Point, Point];
|
|
4
|
+
|
|
5
|
+
export type Rectangle = {
|
|
6
|
+
topLeft: Point;
|
|
7
|
+
topRight: Point;
|
|
8
|
+
bottomRight: Point;
|
|
9
|
+
bottomLeft: Point;
|
|
10
|
+
};
|
|
11
|
+
|
|
12
|
+
export type CapturedDocument = {
|
|
13
|
+
path: string;
|
|
14
|
+
quad: Point[] | null;
|
|
15
|
+
width: number;
|
|
16
|
+
height: number;
|
|
17
|
+
};
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import type { Point, Rectangle } from '../types';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Convert quad points array to Rectangle format for perspective cropper
|
|
5
|
+
* Assumes quad points are ordered: [topLeft, topRight, bottomRight, bottomLeft]
|
|
6
|
+
*/
|
|
7
|
+
export const quadToRectangle = (quad: Point[]): Rectangle | null => {
|
|
8
|
+
if (!quad || quad.length !== 4) {
|
|
9
|
+
return null;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
return {
|
|
13
|
+
topLeft: quad[0],
|
|
14
|
+
topRight: quad[1],
|
|
15
|
+
bottomRight: quad[2],
|
|
16
|
+
bottomLeft: quad[3],
|
|
17
|
+
};
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Convert Rectangle format back to quad points array
|
|
22
|
+
*/
|
|
23
|
+
export const rectangleToQuad = (rect: Rectangle): Point[] => {
|
|
24
|
+
return [
|
|
25
|
+
rect.topLeft,
|
|
26
|
+
rect.topRight,
|
|
27
|
+
rect.bottomRight,
|
|
28
|
+
rect.bottomLeft,
|
|
29
|
+
];
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Scale coordinates from one dimension to another
|
|
34
|
+
* Useful when image dimensions differ from display dimensions
|
|
35
|
+
*/
|
|
36
|
+
export const scaleCoordinates = (
|
|
37
|
+
points: Point[],
|
|
38
|
+
fromWidth: number,
|
|
39
|
+
fromHeight: number,
|
|
40
|
+
toWidth: number,
|
|
41
|
+
toHeight: number
|
|
42
|
+
): Point[] => {
|
|
43
|
+
const scaleX = toWidth / fromWidth;
|
|
44
|
+
const scaleY = toHeight / fromHeight;
|
|
45
|
+
|
|
46
|
+
return points.map(p => ({
|
|
47
|
+
x: p.x * scaleX,
|
|
48
|
+
y: p.y * scaleY,
|
|
49
|
+
}));
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Scale a rectangle
|
|
54
|
+
*/
|
|
55
|
+
export const scaleRectangle = (
|
|
56
|
+
rect: Rectangle,
|
|
57
|
+
fromWidth: number,
|
|
58
|
+
fromHeight: number,
|
|
59
|
+
toWidth: number,
|
|
60
|
+
toHeight: number
|
|
61
|
+
): Rectangle => {
|
|
62
|
+
const scaleX = toWidth / fromWidth;
|
|
63
|
+
const scaleY = toHeight / fromHeight;
|
|
64
|
+
|
|
65
|
+
return {
|
|
66
|
+
topLeft: { x: rect.topLeft.x * scaleX, y: rect.topLeft.y * scaleY },
|
|
67
|
+
topRight: { x: rect.topRight.x * scaleX, y: rect.topRight.y * scaleY },
|
|
68
|
+
bottomRight: { x: rect.bottomRight.x * scaleX, y: rect.bottomRight.y * scaleY },
|
|
69
|
+
bottomLeft: { x: rect.bottomLeft.x * scaleX, y: rect.bottomLeft.y * scaleY },
|
|
70
|
+
};
|
|
71
|
+
};
|