react-native-rectangle-doc-scanner 0.66.0 → 0.69.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,10 +1,15 @@
1
1
  import React, { ReactNode } from 'react';
2
2
  import type { Point } from './types';
3
- /**
4
- * Detection configuration is no longer used now that the native
5
- * implementation handles edge detection. Keeping it for backwards
6
- * compatibility with existing consumer code.
7
- */
3
+ type PictureEvent = {
4
+ croppedImage?: string | null;
5
+ initialImage?: string;
6
+ width?: number;
7
+ height?: number;
8
+ };
9
+ type DocScannerHandle = {
10
+ capture: () => Promise<PictureEvent>;
11
+ reset: () => void;
12
+ };
8
13
  export interface DetectionConfig {
9
14
  processingWidth?: number;
10
15
  cannyLowThreshold?: number;
@@ -32,5 +37,5 @@ interface Props {
32
37
  gridLineWidth?: number;
33
38
  detectionConfig?: DetectionConfig;
34
39
  }
35
- export declare const DocScanner: React.FC<Props>;
36
- export {};
40
+ export declare const DocScanner: React.ForwardRefExoticComponent<Props & React.RefAttributes<DocScannerHandle>>;
41
+ export type { DocScannerHandle };
@@ -32,83 +32,138 @@ var __importStar = (this && this.__importStar) || (function () {
32
32
  return result;
33
33
  };
34
34
  })();
35
- var __importDefault = (this && this.__importDefault) || function (mod) {
36
- return (mod && mod.__esModule) ? mod : { "default": mod };
37
- };
38
35
  Object.defineProperty(exports, "__esModule", { value: true });
39
36
  exports.DocScanner = void 0;
40
37
  const react_1 = __importStar(require("react"));
41
38
  const react_native_1 = require("react-native");
42
- const react_native_document_scanner_plugin_1 = __importDefault(require("react-native-document-scanner-plugin"));
43
39
  const overlay_1 = require("./utils/overlay");
44
- const NativeDocumentScanner = react_native_document_scanner_plugin_1.default;
40
+ const MODULE_NAME = 'RNRDocScannerModule';
41
+ const VIEW_NAME = 'RNRDocScannerView';
42
+ const NativeDocScannerModule = react_native_1.NativeModules[MODULE_NAME];
43
+ if (!NativeDocScannerModule) {
44
+ const fallbackMessage = `The native module '${MODULE_NAME}' is not linked. Make sure you have run pod install, ` +
45
+ `synced Gradle, and rebuilt the app after installing 'react-native-rectangle-doc-scanner'.`;
46
+ throw new Error(fallbackMessage);
47
+ }
48
+ const NativeDocScanner = (0, react_native_1.requireNativeComponent)(VIEW_NAME);
45
49
  const DEFAULT_OVERLAY_COLOR = '#e7a649';
46
50
  const GRID_COLOR_FALLBACK = 'rgba(231, 166, 73, 0.35)';
47
- const DocScanner = ({ onCapture, overlayColor = DEFAULT_OVERLAY_COLOR, autoCapture = true, minStableFrames = 8, enableTorch = false, quality, useBase64 = false, children, showGrid = true, gridColor, gridLineWidth = 2, }) => {
48
- const scannerRef = (0, react_1.useRef)(null);
51
+ exports.DocScanner = (0, react_1.forwardRef)(({ onCapture, overlayColor = DEFAULT_OVERLAY_COLOR, autoCapture = true, minStableFrames = 8, enableTorch = false, quality = 90, useBase64 = false, children, showGrid = true, gridColor, gridLineWidth = 2, }, ref) => {
52
+ const viewRef = (0, react_1.useRef)(null);
49
53
  const capturingRef = (0, react_1.useRef)(false);
50
54
  const [quad, setQuad] = (0, react_1.useState)(null);
55
+ const [stable, setStable] = (0, react_1.useState)(0);
51
56
  const [frameSize, setFrameSize] = (0, react_1.useState)(null);
52
57
  const effectiveGridColor = (0, react_1.useMemo)(() => gridColor ?? GRID_COLOR_FALLBACK, [gridColor]);
53
- const handleLayout = (0, react_1.useCallback)((event) => {
54
- const { width, height } = event.nativeEvent.layout;
55
- if (width > 0 && height > 0) {
56
- setFrameSize({ width, height });
58
+ const ensureViewHandle = (0, react_1.useCallback)(() => {
59
+ const nodeHandle = (0, react_native_1.findNodeHandle)(viewRef.current);
60
+ if (!nodeHandle) {
61
+ throw new Error('Unable to obtain native view handle for DocScanner.');
57
62
  }
63
+ return nodeHandle;
58
64
  }, []);
59
- const handleRectangleDetect = (0, react_1.useCallback)((event) => {
60
- const coordinates = event?.rectangleCoordinates;
61
- if (!coordinates) {
62
- setQuad(null);
63
- return;
65
+ const resetNativeStability = (0, react_1.useCallback)(() => {
66
+ try {
67
+ const handle = ensureViewHandle();
68
+ NativeDocScannerModule.reset(handle);
64
69
  }
65
- const nextQuad = [
66
- coordinates.topLeft,
67
- coordinates.topRight,
68
- coordinates.bottomRight,
69
- coordinates.bottomLeft,
70
- ];
71
- setQuad(nextQuad);
72
- }, []);
73
- const handlePictureTaken = (0, react_1.useCallback)((event) => {
70
+ catch (error) {
71
+ console.warn('[DocScanner] unable to reset native stability', error);
72
+ }
73
+ }, [ensureViewHandle]);
74
+ const emitCaptureResult = (0, react_1.useCallback)((payload) => {
74
75
  capturingRef.current = false;
75
- const path = event?.croppedImage ?? event?.initialImage;
76
+ const path = payload.croppedImage ?? payload.initialImage;
76
77
  if (!path) {
77
78
  return;
78
79
  }
79
- const width = event?.width ?? frameSize?.width ?? 0;
80
- const height = event?.height ?? frameSize?.height ?? 0;
80
+ const width = payload.width ?? frameSize?.width ?? 0;
81
+ const height = payload.height ?? frameSize?.height ?? 0;
81
82
  onCapture?.({
82
83
  path,
83
84
  quad,
84
85
  width,
85
86
  height,
86
87
  });
87
- }, [frameSize, onCapture, quad]);
88
+ setStable(0);
89
+ resetNativeStability();
90
+ }, [frameSize, onCapture, quad, resetNativeStability]);
91
+ const handleRectangleDetect = (0, react_1.useCallback)((event) => {
92
+ const { rectangleCoordinates, stableCounter, frameWidth, frameHeight } = event.nativeEvent;
93
+ setStable(stableCounter);
94
+ setFrameSize({ width: frameWidth, height: frameHeight });
95
+ if (!rectangleCoordinates) {
96
+ setQuad(null);
97
+ return;
98
+ }
99
+ setQuad([
100
+ rectangleCoordinates.topLeft,
101
+ rectangleCoordinates.topRight,
102
+ rectangleCoordinates.bottomRight,
103
+ rectangleCoordinates.bottomLeft,
104
+ ]);
105
+ if (autoCapture && stableCounter >= minStableFrames) {
106
+ triggerCapture();
107
+ }
108
+ }, [autoCapture, minStableFrames]);
109
+ const handlePictureTaken = (0, react_1.useCallback)((event) => {
110
+ emitCaptureResult(event.nativeEvent);
111
+ }, [emitCaptureResult]);
112
+ const captureNative = (0, react_1.useCallback)(() => {
113
+ if (capturingRef.current) {
114
+ return Promise.reject(new Error('capture_in_progress'));
115
+ }
116
+ try {
117
+ const handle = ensureViewHandle();
118
+ capturingRef.current = true;
119
+ return NativeDocScannerModule.capture(handle)
120
+ .then((result) => {
121
+ emitCaptureResult(result);
122
+ return result;
123
+ })
124
+ .catch((error) => {
125
+ capturingRef.current = false;
126
+ throw error;
127
+ });
128
+ }
129
+ catch (error) {
130
+ capturingRef.current = false;
131
+ return Promise.reject(error);
132
+ }
133
+ }, [emitCaptureResult, ensureViewHandle]);
134
+ const triggerCapture = (0, react_1.useCallback)(() => {
135
+ if (capturingRef.current) {
136
+ return;
137
+ }
138
+ captureNative().catch((error) => {
139
+ console.warn('[DocScanner] capture failed', error);
140
+ });
141
+ }, [captureNative]);
88
142
  const handleManualCapture = (0, react_1.useCallback)(() => {
89
- if (autoCapture || capturingRef.current || !scannerRef.current) {
143
+ if (autoCapture) {
90
144
  return;
91
145
  }
92
- capturingRef.current = true;
93
- scannerRef.current
94
- .capture()
95
- .catch((error) => {
146
+ captureNative().catch((error) => {
96
147
  console.warn('[DocScanner] manual capture failed', error);
97
- capturingRef.current = false;
98
148
  });
99
- }, [autoCapture]);
100
- return (react_1.default.createElement(react_native_1.View, { style: styles.container, onLayout: handleLayout },
101
- react_1.default.createElement(NativeDocumentScanner, { ref: (instance) => {
102
- scannerRef.current = instance;
103
- }, style: react_native_1.StyleSheet.absoluteFill, overlayColor: "transparent", detectionCountBeforeCapture: autoCapture ? minStableFrames : 10000, enableTorch: enableTorch, hideControls: true, useBase64: useBase64, quality: quality, onRectangleDetect: handleRectangleDetect, onPictureTaken: handlePictureTaken }),
149
+ }, [autoCapture, captureNative]);
150
+ (0, react_1.useImperativeHandle)(ref, () => ({
151
+ capture: captureNative,
152
+ reset: () => {
153
+ setStable(0);
154
+ resetNativeStability();
155
+ },
156
+ }), [captureNative, resetNativeStability]);
157
+ return (react_1.default.createElement(react_native_1.View, { style: styles.container },
158
+ react_1.default.createElement(NativeDocScanner, { ref: viewRef, style: react_native_1.StyleSheet.absoluteFill, detectionCountBeforeCapture: minStableFrames, autoCapture: autoCapture, enableTorch: enableTorch, quality: quality, useBase64: useBase64, onRectangleDetect: handleRectangleDetect, onPictureTaken: handlePictureTaken }),
104
159
  react_1.default.createElement(overlay_1.Overlay, { quad: quad, color: overlayColor, frameSize: frameSize, showGrid: showGrid, gridColor: effectiveGridColor, gridLineWidth: gridLineWidth }),
105
160
  !autoCapture && (react_1.default.createElement(react_native_1.TouchableOpacity, { style: styles.button, onPress: handleManualCapture })),
106
161
  children));
107
- };
108
- exports.DocScanner = DocScanner;
162
+ });
109
163
  const styles = react_native_1.StyleSheet.create({
110
164
  container: {
111
165
  flex: 1,
166
+ backgroundColor: '#000',
112
167
  },
113
168
  button: {
114
169
  position: 'absolute',
@@ -21,6 +21,9 @@ export interface FullDocScannerProps {
21
21
  onClose?: () => void;
22
22
  detectionConfig?: DetectionConfig;
23
23
  overlayColor?: string;
24
+ gridColor?: string;
25
+ gridLineWidth?: number;
26
+ showGrid?: boolean;
24
27
  overlayStrokeColor?: string;
25
28
  handlerColor?: string;
26
29
  strings?: FullDocScannerStrings;
@@ -41,12 +41,13 @@ const CropEditor_1 = require("./CropEditor");
41
41
  const coordinate_1 = require("./utils/coordinate");
42
42
  const stripFileUri = (value) => value.replace(/^file:\/\//, '');
43
43
  const ensureFileUri = (value) => (value.startsWith('file://') ? value : `file://${value}`);
44
- const FullDocScanner = ({ onResult, onClose, detectionConfig, overlayColor = '#3170f3', overlayStrokeColor = '#3170f3', handlerColor = '#3170f3', strings, manualCapture = false, minStableFrames, onError, }) => {
44
+ const FullDocScanner = ({ onResult, onClose, detectionConfig, overlayColor = '#3170f3', gridColor, gridLineWidth, showGrid, overlayStrokeColor = '#3170f3', handlerColor = '#3170f3', strings, manualCapture = false, minStableFrames, onError, }) => {
45
45
  const [screen, setScreen] = (0, react_1.useState)('scanner');
46
46
  const [capturedDoc, setCapturedDoc] = (0, react_1.useState)(null);
47
47
  const [cropRectangle, setCropRectangle] = (0, react_1.useState)(null);
48
48
  const [imageSize, setImageSize] = (0, react_1.useState)(null);
49
49
  const [processing, setProcessing] = (0, react_1.useState)(false);
50
+ const resolvedGridColor = gridColor ?? overlayColor;
50
51
  const mergedStrings = (0, react_1.useMemo)(() => ({
51
52
  captureHint: strings?.captureHint ?? 'Align the document within the frame.',
52
53
  manualHint: strings?.manualHint ?? 'Tap the button below to capture.',
@@ -153,7 +154,7 @@ const FullDocScanner = ({ onResult, onClose, detectionConfig, overlayColor = '#3
153
154
  }, [onClose, resetState]);
154
155
  return (react_1.default.createElement(react_native_1.View, { style: styles.container },
155
156
  screen === 'scanner' && (react_1.default.createElement(react_native_1.View, { style: styles.flex },
156
- react_1.default.createElement(DocScanner_1.DocScanner, { autoCapture: !manualCapture, overlayColor: overlayColor, minStableFrames: minStableFrames ?? 6, detectionConfig: detectionConfig, onCapture: handleCapture },
157
+ react_1.default.createElement(DocScanner_1.DocScanner, { autoCapture: !manualCapture, overlayColor: overlayColor, showGrid: showGrid, gridColor: resolvedGridColor, gridLineWidth: gridLineWidth, minStableFrames: minStableFrames ?? 6, detectionConfig: detectionConfig, onCapture: handleCapture },
157
158
  react_1.default.createElement(react_native_1.View, { style: styles.overlay, pointerEvents: "box-none" },
158
159
  react_1.default.createElement(react_native_1.TouchableOpacity, { style: styles.closeButton, onPress: handleClose, accessibilityLabel: mergedStrings.cancel, accessibilityRole: "button" },
159
160
  react_1.default.createElement(react_native_1.Text, { style: styles.closeButtonLabel }, "\u00D7")),
package/dist/index.d.ts CHANGED
@@ -3,5 +3,5 @@ export { CropEditor } from './CropEditor';
3
3
  export { FullDocScanner } from './FullDocScanner';
4
4
  export type { FullDocScannerResult, FullDocScannerProps, FullDocScannerStrings, } from './FullDocScanner';
5
5
  export type { Point, Quad, Rectangle, CapturedDocument } from './types';
6
- export type { DetectionConfig } from './DocScanner';
6
+ export type { DetectionConfig, DocScannerHandle } from './DocScanner';
7
7
  export { quadToRectangle, rectangleToQuad, scaleCoordinates, scaleRectangle, } from './utils/coordinate';
@@ -41,62 +41,91 @@ const lerp = (start, end, t) => ({
41
41
  x: start.x + (end.x - start.x) * t,
42
42
  y: start.y + (end.y - start.y) * t,
43
43
  });
44
+ const withAlpha = (value, alpha) => {
45
+ const hexMatch = /^#([0-9a-f]{3}|[0-9a-f]{6})$/i.exec(value.trim());
46
+ if (!hexMatch) {
47
+ return `rgba(231, 166, 73, ${alpha})`;
48
+ }
49
+ const hex = hexMatch[1];
50
+ const normalize = hex.length === 3
51
+ ? hex.split('').map((ch) => ch + ch).join('')
52
+ : hex;
53
+ const r = parseInt(normalize.slice(0, 2), 16);
54
+ const g = parseInt(normalize.slice(2, 4), 16);
55
+ const b = parseInt(normalize.slice(4, 6), 16);
56
+ return `rgba(${r}, ${g}, ${b}, ${alpha})`;
57
+ };
58
+ const buildPath = (points) => {
59
+ const path = react_native_skia_1.Skia.Path.Make();
60
+ path.moveTo(points[0].x, points[0].y);
61
+ points.slice(1).forEach((p) => path.lineTo(p.x, p.y));
62
+ path.close();
63
+ return path;
64
+ };
44
65
  const Overlay = ({ quad, color = '#e7a649', frameSize, showGrid = true, gridColor = 'rgba(231, 166, 73, 0.35)', gridLineWidth = 2, }) => {
45
66
  const { width: screenWidth, height: screenHeight } = (0, react_native_1.useWindowDimensions)();
67
+ const fillColor = (0, react_1.useMemo)(() => withAlpha(color, 0.2), [color]);
46
68
  const { outlinePath, gridPaths } = (0, react_1.useMemo)(() => {
47
- if (!quad || !frameSize) {
48
- if (__DEV__) {
49
- console.log('[Overlay] no quad or frameSize', { quad, frameSize });
50
- }
51
- return { outlinePath: null, gridPaths: [] };
52
- }
53
- if (__DEV__) {
54
- console.log('[Overlay] drawing quad:', quad);
55
- console.log('[Overlay] color:', color);
56
- console.log('[Overlay] screen dimensions:', screenWidth, 'x', screenHeight);
57
- console.log('[Overlay] frame dimensions:', frameSize.width, 'x', frameSize.height);
69
+ let transformedQuad = null;
70
+ let sourceQuad = null;
71
+ let sourceFrameSize = frameSize;
72
+ if (quad && frameSize) {
73
+ sourceQuad = quad;
58
74
  }
59
- // Check if camera is in landscape mode (width > height) but screen is portrait (height > width)
60
- const isFrameLandscape = frameSize.width > frameSize.height;
61
- const isScreenPortrait = screenHeight > screenWidth;
62
- const needsRotation = isFrameLandscape && isScreenPortrait;
63
- if (__DEV__) {
64
- console.log('[Overlay] needs rotation:', needsRotation);
65
- }
66
- let transformedQuad;
67
- if (needsRotation) {
68
- // Camera is landscape, screen is portrait - need to rotate 90 degrees
69
- // Transform: rotate 90° clockwise and scale
70
- // New coordinates: x' = y * (screenWidth / frameHeight), y' = (frameWidth - x) * (screenHeight / frameWidth)
71
- const scaleX = screenWidth / frameSize.height;
72
- const scaleY = screenHeight / frameSize.width;
73
- if (__DEV__) {
74
- console.log('[Overlay] rotation scale factors:', scaleX, 'x', scaleY);
75
+ else {
76
+ const marginRatio = 0.12;
77
+ const marginX = screenWidth * marginRatio;
78
+ const marginY = screenHeight * marginRatio;
79
+ const maxWidth = screenWidth - marginX * 2;
80
+ const maxHeight = screenHeight - marginY * 2;
81
+ const a4Ratio = Math.SQRT2; // ~1.414 height / width
82
+ let width = maxWidth;
83
+ let height = width * a4Ratio;
84
+ if (height > maxHeight) {
85
+ height = maxHeight;
86
+ width = height / a4Ratio;
75
87
  }
76
- transformedQuad = quad.map((p) => ({
77
- x: p.y * scaleX,
78
- y: (frameSize.width - p.x) * scaleY,
79
- }));
88
+ const left = (screenWidth - width) / 2;
89
+ const top = (screenHeight - height) / 2;
90
+ transformedQuad = [
91
+ { x: left, y: top },
92
+ { x: left + width, y: top },
93
+ { x: left + width, y: top + height },
94
+ { x: left, y: top + height },
95
+ ];
96
+ sourceFrameSize = null;
80
97
  }
81
- else {
82
- // Same orientation - just scale
83
- const scaleX = screenWidth / frameSize.width;
84
- const scaleY = screenHeight / frameSize.height;
98
+ if (sourceQuad && sourceFrameSize) {
85
99
  if (__DEV__) {
86
- console.log('[Overlay] scale factors:', scaleX, 'x', scaleY);
100
+ console.log('[Overlay] drawing quad:', sourceQuad);
101
+ console.log('[Overlay] color:', color);
102
+ console.log('[Overlay] screen dimensions:', screenWidth, 'x', screenHeight);
103
+ console.log('[Overlay] frame dimensions:', sourceFrameSize.width, 'x', sourceFrameSize.height);
104
+ }
105
+ const isFrameLandscape = sourceFrameSize.width > sourceFrameSize.height;
106
+ const isScreenPortrait = screenHeight > screenWidth;
107
+ const needsRotation = isFrameLandscape && isScreenPortrait;
108
+ if (needsRotation) {
109
+ const scaleX = screenWidth / sourceFrameSize.height;
110
+ const scaleY = screenHeight / sourceFrameSize.width;
111
+ transformedQuad = sourceQuad.map((p) => ({
112
+ x: p.y * scaleX,
113
+ y: (sourceFrameSize.width - p.x) * scaleY,
114
+ }));
115
+ }
116
+ else {
117
+ const scaleX = screenWidth / sourceFrameSize.width;
118
+ const scaleY = screenHeight / sourceFrameSize.height;
119
+ transformedQuad = sourceQuad.map((p) => ({
120
+ x: p.x * scaleX,
121
+ y: p.y * scaleY,
122
+ }));
87
123
  }
88
- transformedQuad = quad.map((p) => ({
89
- x: p.x * scaleX,
90
- y: p.y * scaleY,
91
- }));
92
124
  }
93
- if (__DEV__) {
94
- console.log('[Overlay] transformed quad:', transformedQuad);
125
+ if (!transformedQuad) {
126
+ return { outlinePath: null, gridPaths: [] };
95
127
  }
96
- const skPath = react_native_skia_1.Skia.Path.Make();
97
- skPath.moveTo(transformedQuad[0].x, transformedQuad[0].y);
98
- transformedQuad.slice(1).forEach((p) => skPath.lineTo(p.x, p.y));
99
- skPath.close();
128
+ const skPath = buildPath(transformedQuad);
100
129
  const grid = [];
101
130
  if (showGrid) {
102
131
  const [topLeft, topRight, bottomRight, bottomLeft] = transformedQuad;
@@ -119,14 +148,14 @@ const Overlay = ({ quad, color = '#e7a649', frameSize, showGrid = true, gridColo
119
148
  });
120
149
  }
121
150
  return { outlinePath: skPath, gridPaths: grid };
122
- }, [quad, screenWidth, screenHeight, frameSize, showGrid]);
151
+ }, [quad, screenWidth, screenHeight, frameSize, showGrid, color]);
123
152
  if (__DEV__) {
124
153
  console.log('[Overlay] rendering Canvas with dimensions:', screenWidth, 'x', screenHeight);
125
154
  }
126
155
  return (react_1.default.createElement(react_native_1.View, { style: styles.container, pointerEvents: "none" },
127
156
  react_1.default.createElement(react_native_skia_1.Canvas, { style: { width: screenWidth, height: screenHeight } }, outlinePath && (react_1.default.createElement(react_1.default.Fragment, null,
128
157
  react_1.default.createElement(react_native_skia_1.Path, { path: outlinePath, color: color, style: "stroke", strokeWidth: 8 }),
129
- react_1.default.createElement(react_native_skia_1.Path, { path: outlinePath, color: "rgba(231, 166, 73, 0.2)", style: "fill" }),
158
+ react_1.default.createElement(react_native_skia_1.Path, { path: outlinePath, color: fillColor, style: "fill" }),
130
159
  gridPaths.map((gridPath, index) => (react_1.default.createElement(react_native_skia_1.Path
131
160
  // eslint-disable-next-line react/no-array-index-key
132
161
  , {
@@ -0,0 +1,178 @@
1
+ # Native Module Architecture Plan
2
+
3
+ This document lays out the roadmap for migrating the library from its current
4
+ VisionCamera/OpenCV implementation to a fully native document scanner module
5
+ that mirrors the behaviour of `react-native-document-scanner-plugin` while
6
+ keeping support for custom React components and overlays.
7
+
8
+ ## Goals
9
+
10
+ - Expose a `<DocScanner />` React view component that renders a native camera
11
+ preview.
12
+ - Maintain the ability to layer custom React children (buttons, hints, headers)
13
+ on top of the preview.
14
+ - Provide the detected polygon and stability information every frame, so the
15
+ existing Skia overlay and auto-capture logic continue working.
16
+ - Support both automatic and manual capture flows; captures should resolve with
17
+ both cropped and original image metadata.
18
+ - Keep the public TypeScript API as close as possible to the current version to
19
+ minimise breaking changes for consuming apps.
20
+
21
+ ## High-Level Architecture
22
+
23
+ ```
24
+ ┌──────────────────────────────────────────────────────────────────────┐
25
+ │ React Native (TypeScript) │
26
+ │ ┌────────────────────────────┐ events / commands ┌─────────────┐ │
27
+ │ │ DocScanner.tsx (wrapper) ├───────────────────────▶│ Native View │ │
28
+ │ │ - Manages refs │◀───────────────────────┤ Module │ │
29
+ │ │ - Handles props │ layout callbacks └─────────────┘ │
30
+ │ │ - Renders Overlay + UI │ │
31
+ │ └────────────────────────────┘ │
32
+ │ ▲ ▲ │
33
+ │ │onRectangleDetect │onPictureTaken │
34
+ └────────────────┴─────────────────────────────┴───────────────────────┘
35
+ │ │
36
+ ┌────────┴────────┐ ┌──────┴──────┐
37
+ │ iOS (Swift) │ │ Android │
38
+ │ DocScannerView │ │ DocScannerView │
39
+ │ - AVCapture │ │ - CameraX │
40
+ │ - Vision / VNDetectRectangles │ │ - ML Kit / OpenCV│
41
+ │ - VNDocumentCamera for capture ││ - On-device cropping │
42
+ └──────────────────┘ └───────────────────────┘
43
+ ```
44
+
45
+ ### React Native Layer
46
+
47
+ - `DocScanner.tsx`
48
+ - Forwards props to the native view via `requireNativeComponent`.
49
+ - Holds a ref to call `capture()`, `setTorchEnabled()`, etc.
50
+ - Converts native events into the existing `Point[]` / stability format.
51
+ - Continues rendering the Skia-based `Overlay` and any child components.
52
+ - `index.ts`
53
+ - Exports the wrapper and new types.
54
+ - TypeScript definitions for native events, capture result, and imperative
55
+ methods (`DocScannerHandle`).
56
+
57
+ ### iOS Implementation
58
+
59
+ - Create `ios/DocScannerView.swift`
60
+ - Subclass of `UIView` hosting an `AVCaptureVideoPreviewLayer`.
61
+ - Uses `AVCaptureSession` with `AVCaptureVideoDataOutput` to stream frames.
62
+ - Runs `VNDetectRectanglesRequest` on a background queue for polygon
63
+ detection.
64
+ - Emits `onRectangleDetect` events (containing points, stability counter, and
65
+ frame size) via `RCTDirectEventBlock`.
66
+ - Implements `capture()` by calling `AVCapturePhotoOutput` and optionally
67
+ running `CIFilter` warps for perspective correction.
68
+ - Saves both original and cropped images, returning their URIs plus width /
69
+ height.
70
+
71
+ - Create `ios/DocScannerViewManager.m`
72
+ - Registers the component as `RNRDocScannerView` (name to be decided).
73
+ - Exposes props: `overlayColor`, `detectionCountBeforeCapture`, `torchEnabled`,
74
+ `useBase64`, `quality`, etc.
75
+ - Exposes commands / methods: `capture`, `setTorchEnabled`, `reset`.
76
+
77
+ - Create backing Swift files:
78
+ - `DocScannerFrameProcessor.swift` – handles rectangle detection and stability
79
+ scoring.
80
+ - `DocScannerCaptureCoordinator.swift` – manages photo output, perspective
81
+ correction, optional VisionKit integration.
82
+ - `DocScannerEventPayload.swift` – strongly typed event payloads.
83
+
84
+ ### Android Implementation
85
+
86
+ - Create `android/src/main/java/.../DocScannerView.kt`
87
+ - Extends `FrameLayout`, hosts a `PreviewView` from CameraX.
88
+ - Sets up `ProcessCameraProvider` with `ImageAnalysis` for frame processing.
89
+ - Runs ML Kit Document Scanner API (or OpenCV fallback) to detect quads.
90
+ - Emits events through `@ReactProp` + `UIManagerModule` event dispatch.
91
+ - Maintains stability counter similar to current JS implementation.
92
+
93
+ - Create `DocScannerViewManager.kt`
94
+ - Registers the view name (e.g. `RNRDocScannerView`).
95
+ - Maps props (`overlayColor`, `detectionCountBeforeCapture`, etc.).
96
+ - Implements `receiveCommand` to handle `capture`, `torch`, `reset`.
97
+
98
+ - Create `DocScannerModule.kt`
99
+ - Exposes imperative methods if needed for non-view functionality (e.g. file
100
+ cleanup).
101
+
102
+ - Provide utility classes:
103
+ - `ImageCropper.kt` – applies perspective transformations using OpenCV or
104
+ RenderScript.
105
+ - `PathUtils.kt` – file management for temporary images.
106
+ - `RectangleEvent.kt` – serialisable event payload.
107
+
108
+ ### Event Contract
109
+
110
+ `onRectangleDetect` event shape (identical for iOS & Android):
111
+
112
+ ```ts
113
+ type RectangleEventPayload = {
114
+ rectangleCoordinates: {
115
+ topLeft: Point;
116
+ topRight: Point;
117
+ bottomRight: Point;
118
+ bottomLeft: Point;
119
+ } | null;
120
+ stableCounter: number;
121
+ frameWidth: number;
122
+ frameHeight: number;
123
+ };
124
+ ```
125
+
126
+ `onPictureTaken` event shape:
127
+
128
+ ```ts
129
+ type PictureEvent = {
130
+ croppedImage: string | null; // file:// or base64 depending on props
131
+ initialImage: string;
132
+ width: number;
133
+ height: number;
134
+ };
135
+ ```
136
+
137
+ ### Props Mapping
138
+
139
+ | Prop | Description | Native handling |
140
+ |-------------------------------|-----------------------------------------------------|---------------------------------------|
141
+ | `overlayColor` | Stroke colour for overlay (forwarded to events) | Only metadata – actual overlay is JS |
142
+ | `detectionCountBeforeCapture`| Minimum stable frames before auto capture | Native stability counter |
143
+ | `autoCapture` | If `true`, native triggers capture automatically | Native triggers `capture()` internally|
144
+ | `enableTorch` | Controls device torch | Maps to `AVCaptureDevice` / CameraX |
145
+ | `quality` | JPEG quality (0–100) | Controls compression in native capture|
146
+ | `useBase64` | If `true`, return base64 strings instead of files | Native encode |
147
+ | `showGrid`, `gridColor`... | Handled in JS overlay; no native changes required | N/A |
148
+
149
+ ### Migration Steps
150
+
151
+ 1. **Scaffold native module directories**
152
+ - Add `ios/` and `android/` folders with minimal React Native module wiring.
153
+
154
+ 2. **Implement iOS detection & capture**
155
+ - Set up camera session, rectangle detection, event dispatch.
156
+ - Support auto capture and manual capture commands.
157
+
158
+ 3. **Implement Android detection & capture**
159
+ - Mirror iOS logic with CameraX + ML Kit/OpenCV.
160
+
161
+ 4. **Update TypeScript wrapper**
162
+ - Replace VisionCamera/OpenCV logic with `requireNativeComponent`.
163
+ - Keep existing overlay and UI contract intact.
164
+
165
+ 5. **Testing & validation**
166
+ - Write example app update (outside this package) to verify events, overlay,
167
+ auto-capture.
168
+ - Ensure TypeScript typings align with native behaviour.
169
+
170
+ 6. **Documentation**
171
+ - Update README with new installation instructions (pods/gradle changes,
172
+ permissions, peer deps).
173
+ - Document new props & events, auto/manual capture behaviour.
174
+
175
+ This plan focuses on creating a native module while preserving the custom
176
+ overlay UX. Each platform will require significant native code, but the above
177
+ structure provides the blueprint for implementation.
178
+
@@ -0,0 +1,49 @@
1
+ import Foundation
2
+ import React
3
+
4
+ @objc(RNRDocScannerModule)
5
+ class RNRDocScannerModule: NSObject, RCTBridgeModule {
6
+ static func moduleName() -> String! {
7
+ "RNRDocScannerModule"
8
+ }
9
+
10
+ static func requiresMainQueueSetup() -> Bool {
11
+ true
12
+ }
13
+
14
+ @objc var bridge: RCTBridge?
15
+
16
+ @objc func capture(_ reactTag: NSNumber, resolver resolve: @escaping RCTPromiseResolveBlock, rejecter reject: @escaping RCTPromiseRejectBlock) {
17
+ bridge?.uiManager?.addUIBlock { _, viewRegistry in
18
+ guard let view = viewRegistry?[reactTag] as? RNRDocScannerView else {
19
+ reject(RNRDocScannerError.viewNotFound.code, RNRDocScannerError.viewNotFound.message, nil)
20
+ return
21
+ }
22
+
23
+ view.capture { result in
24
+ switch result {
25
+ case let .success(payload):
26
+ resolve([
27
+ "croppedImage": payload.croppedImage as Any,
28
+ "initialImage": payload.originalImage,
29
+ "width": payload.width,
30
+ "height": payload.height,
31
+ ])
32
+ case let .failure(error as RNRDocScannerError):
33
+ reject(error.code, error.message, error)
34
+ case let .failure(error):
35
+ reject("capture_failed", error.localizedDescription, error)
36
+ }
37
+ }
38
+ }
39
+ }
40
+
41
+ @objc func reset(_ reactTag: NSNumber) {
42
+ bridge?.uiManager?.addUIBlock { _, viewRegistry in
43
+ guard let view = viewRegistry?[reactTag] as? RNRDocScannerView else {
44
+ return
45
+ }
46
+ view.resetStability()
47
+ }
48
+ }
49
+ }