react-native-rectangle-doc-scanner 2.1.0 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "react-native-rectangle-doc-scanner",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.3.0",
|
|
4
4
|
"description": "Native-backed document scanner for React Native with customizable overlays.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -16,8 +16,19 @@
|
|
|
16
16
|
"scripts": {
|
|
17
17
|
"build": "tsc",
|
|
18
18
|
"prepare": "yarn build",
|
|
19
|
-
"postinstall": "
|
|
19
|
+
"postinstall": "node scripts/postinstall.js"
|
|
20
20
|
},
|
|
21
|
+
"files": [
|
|
22
|
+
"dist",
|
|
23
|
+
"src",
|
|
24
|
+
"android",
|
|
25
|
+
"ios",
|
|
26
|
+
"scripts",
|
|
27
|
+
"vendor",
|
|
28
|
+
"*.md",
|
|
29
|
+
"*.js",
|
|
30
|
+
"*.json"
|
|
31
|
+
],
|
|
21
32
|
"peerDependencies": {
|
|
22
33
|
"@shopify/react-native-skia": "*",
|
|
23
34
|
"react": "*",
|
|
@@ -27,7 +38,6 @@
|
|
|
27
38
|
"devDependencies": {
|
|
28
39
|
"@types/react": "^18.2.41",
|
|
29
40
|
"@types/react-native": "0.73.0",
|
|
30
|
-
"patch-package": "^8.0.0",
|
|
31
41
|
"typescript": "^5.3.3"
|
|
32
42
|
},
|
|
33
43
|
"dependencies": {
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
|
|
6
|
+
// Find react-native-document-scanner (could be in parent due to hoisting)
|
|
7
|
+
function findPackage(packageName) {
|
|
8
|
+
const locations = [
|
|
9
|
+
path.join(__dirname, '..', 'node_modules', packageName), // Same level
|
|
10
|
+
path.join(__dirname, '..', '..', packageName), // Hoisted to parent
|
|
11
|
+
path.join(__dirname, '..', '..', '..', packageName), // Hoisted to root
|
|
12
|
+
];
|
|
13
|
+
|
|
14
|
+
for (const location of locations) {
|
|
15
|
+
if (fs.existsSync(location)) {
|
|
16
|
+
return location;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
return null;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
const SCANNER_PATH = findPackage('react-native-document-scanner');
|
|
23
|
+
const VENDOR_PATH = path.join(__dirname, '..', 'vendor', 'react-native-document-scanner');
|
|
24
|
+
|
|
25
|
+
// Check if react-native-document-scanner is installed
|
|
26
|
+
if (!SCANNER_PATH) {
|
|
27
|
+
console.log('⚠️ react-native-document-scanner not found, skipping quality patches');
|
|
28
|
+
process.exit(0);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
console.log('📸 Applying camera quality optimizations...');
|
|
32
|
+
|
|
33
|
+
try {
|
|
34
|
+
// Copy optimized iOS file
|
|
35
|
+
const iosFile = 'ios/IPDFCameraViewController.m';
|
|
36
|
+
const sourcePath = path.join(VENDOR_PATH, iosFile);
|
|
37
|
+
const targetPath = path.join(SCANNER_PATH, iosFile);
|
|
38
|
+
|
|
39
|
+
if (fs.existsSync(sourcePath)) {
|
|
40
|
+
// Backup original if not already backed up
|
|
41
|
+
if (!fs.existsSync(targetPath + '.original')) {
|
|
42
|
+
fs.copyFileSync(targetPath, targetPath + '.original');
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// Copy optimized version
|
|
46
|
+
fs.copyFileSync(sourcePath, targetPath);
|
|
47
|
+
console.log('✅ iOS camera quality optimizations applied!');
|
|
48
|
+
} else {
|
|
49
|
+
console.log('⚠️ Optimized iOS file not found in vendor folder');
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
console.log('✨ Setup complete!');
|
|
53
|
+
} catch (error) {
|
|
54
|
+
console.error('❌ Error applying optimizations:', error.message);
|
|
55
|
+
process.exit(1);
|
|
56
|
+
}
|
|
@@ -0,0 +1,588 @@
|
|
|
1
|
+
//
|
|
2
|
+
// IPDFCameraViewController.m
|
|
3
|
+
// InstaPDF
|
|
4
|
+
//
|
|
5
|
+
// Created by Maximilian Mackh on 06/01/15.
|
|
6
|
+
// Copyright (c) 2015 mackh ag. All rights reserved.
|
|
7
|
+
//
|
|
8
|
+
|
|
9
|
+
#import "IPDFCameraViewController.h"
|
|
10
|
+
|
|
11
|
+
#import <AVFoundation/AVFoundation.h>
|
|
12
|
+
#import <CoreMedia/CoreMedia.h>
|
|
13
|
+
#import <CoreVideo/CoreVideo.h>
|
|
14
|
+
#import <CoreImage/CoreImage.h>
|
|
15
|
+
#import <ImageIO/ImageIO.h>
|
|
16
|
+
#import <GLKit/GLKit.h>
|
|
17
|
+
|
|
18
|
+
@interface IPDFCameraViewController () <AVCaptureVideoDataOutputSampleBufferDelegate>
|
|
19
|
+
|
|
20
|
+
@property (nonatomic,strong) AVCaptureSession *captureSession;
|
|
21
|
+
@property (nonatomic,strong) AVCaptureDevice *captureDevice;
|
|
22
|
+
@property (nonatomic,strong) EAGLContext *context;
|
|
23
|
+
|
|
24
|
+
@property (nonatomic, strong) AVCaptureStillImageOutput* stillImageOutput;
|
|
25
|
+
|
|
26
|
+
@property (nonatomic, assign) BOOL forceStop;
|
|
27
|
+
@property (nonatomic, assign) float lastDetectionRate;
|
|
28
|
+
|
|
29
|
+
@end
|
|
30
|
+
|
|
31
|
+
@implementation IPDFCameraViewController
|
|
32
|
+
{
|
|
33
|
+
CIContext *_coreImageContext;
|
|
34
|
+
GLuint _renderBuffer;
|
|
35
|
+
GLKView *_glkView;
|
|
36
|
+
|
|
37
|
+
BOOL _isStopped;
|
|
38
|
+
|
|
39
|
+
CGFloat _imageDedectionConfidence;
|
|
40
|
+
NSTimer *_borderDetectTimeKeeper;
|
|
41
|
+
BOOL _borderDetectFrame;
|
|
42
|
+
CIRectangleFeature *_borderDetectLastRectangleFeature;
|
|
43
|
+
|
|
44
|
+
BOOL _isCapturing;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
- (void)awakeFromNib
|
|
48
|
+
{
|
|
49
|
+
[super awakeFromNib];
|
|
50
|
+
|
|
51
|
+
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_backgroundMode) name:UIApplicationWillResignActiveNotification object:nil];
|
|
52
|
+
|
|
53
|
+
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_foregroundMode) name:UIApplicationDidBecomeActiveNotification object:nil];
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
- (void)_backgroundMode
|
|
57
|
+
{
|
|
58
|
+
self.forceStop = YES;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
- (void)_foregroundMode
|
|
62
|
+
{
|
|
63
|
+
self.forceStop = NO;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
- (void)dealloc
|
|
67
|
+
{
|
|
68
|
+
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
- (void)createGLKView
|
|
72
|
+
{
|
|
73
|
+
if (self.context) return;
|
|
74
|
+
|
|
75
|
+
self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
|
|
76
|
+
GLKView *view = [[GLKView alloc] initWithFrame:self.bounds];
|
|
77
|
+
view.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
|
|
78
|
+
view.translatesAutoresizingMaskIntoConstraints = YES;
|
|
79
|
+
view.context = self.context;
|
|
80
|
+
view.contentScaleFactor = 1.0f;
|
|
81
|
+
view.drawableDepthFormat = GLKViewDrawableDepthFormat24;
|
|
82
|
+
[self insertSubview:view atIndex:0];
|
|
83
|
+
_glkView = view;
|
|
84
|
+
glGenRenderbuffers(1, &_renderBuffer);
|
|
85
|
+
glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer);
|
|
86
|
+
_coreImageContext = [CIContext contextWithEAGLContext:self.context];
|
|
87
|
+
[EAGLContext setCurrentContext:self.context];
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
- (void)setupCameraView
|
|
91
|
+
{
|
|
92
|
+
[self createGLKView];
|
|
93
|
+
|
|
94
|
+
AVCaptureDevice *device = nil;
|
|
95
|
+
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
|
|
96
|
+
for (AVCaptureDevice *possibleDevice in devices) {
|
|
97
|
+
if (self.useFrontCam) {
|
|
98
|
+
if ([possibleDevice position] == AVCaptureDevicePositionFront) {
|
|
99
|
+
device = possibleDevice;
|
|
100
|
+
}
|
|
101
|
+
} else {
|
|
102
|
+
if ([possibleDevice position] != AVCaptureDevicePositionFront) {
|
|
103
|
+
device = possibleDevice;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
if (!device) return;
|
|
108
|
+
|
|
109
|
+
_imageDedectionConfidence = 0.0;
|
|
110
|
+
|
|
111
|
+
AVCaptureSession *session = [[AVCaptureSession alloc] init];
|
|
112
|
+
self.captureSession = session;
|
|
113
|
+
[session beginConfiguration];
|
|
114
|
+
self.captureDevice = device;
|
|
115
|
+
|
|
116
|
+
NSError *error = nil;
|
|
117
|
+
AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
|
|
118
|
+
session.sessionPreset = AVCaptureSessionPresetPhoto;
|
|
119
|
+
[session addInput:input];
|
|
120
|
+
|
|
121
|
+
AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
|
|
122
|
+
[dataOutput setAlwaysDiscardsLateVideoFrames:YES];
|
|
123
|
+
[dataOutput setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)}];
|
|
124
|
+
[dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
|
|
125
|
+
[session addOutput:dataOutput];
|
|
126
|
+
|
|
127
|
+
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
|
|
128
|
+
// Configure for maximum quality still image capture
|
|
129
|
+
self.stillImageOutput.outputSettings = @{AVVideoCodecKey: AVVideoCodecJPEG};
|
|
130
|
+
self.stillImageOutput.highResolutionStillImageOutputEnabled = YES;
|
|
131
|
+
[session addOutput:self.stillImageOutput];
|
|
132
|
+
|
|
133
|
+
AVCaptureConnection *connection = [dataOutput.connections firstObject];
|
|
134
|
+
[connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
|
|
135
|
+
|
|
136
|
+
// Enable video stabilization for better quality
|
|
137
|
+
if ([connection isVideoStabilizationSupported]) {
|
|
138
|
+
[connection setPreferredVideoStabilizationMode:AVCaptureVideoStabilizationModeAuto];
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// Configure device for best quality
|
|
142
|
+
if ([device lockForConfiguration:nil])
|
|
143
|
+
{
|
|
144
|
+
// Disable flash for better natural lighting
|
|
145
|
+
if (device.isFlashAvailable) {
|
|
146
|
+
[device setFlashMode:AVCaptureFlashModeOff];
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
// Enable continuous autofocus for sharp images
|
|
150
|
+
if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
|
|
151
|
+
[device setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
// Enable continuous auto exposure
|
|
155
|
+
if ([device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
|
|
156
|
+
[device setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
// Enable auto white balance
|
|
160
|
+
if ([device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) {
|
|
161
|
+
[device setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance];
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
// Enable low light boost if available
|
|
165
|
+
if (device.isLowLightBoostSupported) {
|
|
166
|
+
[device setAutomaticallyEnablesLowLightBoostWhenAvailable:YES];
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
// Set active video format to highest resolution
|
|
170
|
+
if (@available(iOS 13.0, *)) {
|
|
171
|
+
AVCaptureDeviceFormat *bestFormat = nil;
|
|
172
|
+
AVFrameRateRange *bestFrameRateRange = nil;
|
|
173
|
+
for (AVCaptureDeviceFormat *format in [device formats]) {
|
|
174
|
+
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
|
175
|
+
// Prefer 4K resolution (3840x2160)
|
|
176
|
+
if (dimensions.width == 3840 && dimensions.height == 2160) {
|
|
177
|
+
for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
|
|
178
|
+
if (bestFormat == nil || range.maxFrameRate > bestFrameRateRange.maxFrameRate) {
|
|
179
|
+
bestFormat = format;
|
|
180
|
+
bestFrameRateRange = range;
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
if (bestFormat) {
|
|
186
|
+
[device setActiveFormat:bestFormat];
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
[device unlockForConfiguration];
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
[session commitConfiguration];
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
- (void)setCameraViewType:(IPDFCameraViewType)cameraViewType
|
|
197
|
+
{
|
|
198
|
+
UIBlurEffect * effect = [UIBlurEffect effectWithStyle:UIBlurEffectStyleDark];
|
|
199
|
+
UIVisualEffectView *viewWithBlurredBackground =[[UIVisualEffectView alloc] initWithEffect:effect];
|
|
200
|
+
viewWithBlurredBackground.frame = self.bounds;
|
|
201
|
+
[self insertSubview:viewWithBlurredBackground aboveSubview:_glkView];
|
|
202
|
+
|
|
203
|
+
_cameraViewType = cameraViewType;
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.3 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^
|
|
207
|
+
{
|
|
208
|
+
[viewWithBlurredBackground removeFromSuperview];
|
|
209
|
+
});
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
|
|
213
|
+
{
|
|
214
|
+
if (self.forceStop) return;
|
|
215
|
+
if (_isStopped || _isCapturing || !CMSampleBufferIsValid(sampleBuffer)) return;
|
|
216
|
+
|
|
217
|
+
CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
|
|
218
|
+
|
|
219
|
+
CIImage *image = [CIImage imageWithCVPixelBuffer:pixelBuffer];
|
|
220
|
+
|
|
221
|
+
if (self.cameraViewType != IPDFCameraViewTypeNormal)
|
|
222
|
+
{
|
|
223
|
+
image = [self filteredImageUsingEnhanceFilterOnImage:image];
|
|
224
|
+
}
|
|
225
|
+
else
|
|
226
|
+
{
|
|
227
|
+
image = [self filteredImageUsingContrastFilterOnImage:image];
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
if (self.isBorderDetectionEnabled)
|
|
231
|
+
{
|
|
232
|
+
if (_borderDetectFrame)
|
|
233
|
+
{
|
|
234
|
+
_borderDetectLastRectangleFeature = [self biggestRectangleInRectangles:[[self highAccuracyRectangleDetector] featuresInImage:image]];
|
|
235
|
+
_borderDetectFrame = NO;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
if (_borderDetectLastRectangleFeature)
|
|
239
|
+
{
|
|
240
|
+
_imageDedectionConfidence += .5;
|
|
241
|
+
|
|
242
|
+
image = [self drawHighlightOverlayForPoints:image topLeft:_borderDetectLastRectangleFeature.topLeft topRight:_borderDetectLastRectangleFeature.topRight bottomLeft:_borderDetectLastRectangleFeature.bottomLeft bottomRight:_borderDetectLastRectangleFeature.bottomRight];
|
|
243
|
+
}
|
|
244
|
+
else
|
|
245
|
+
{
|
|
246
|
+
_imageDedectionConfidence = 0.0f;
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
if (self.context && _coreImageContext)
|
|
251
|
+
{
|
|
252
|
+
[_coreImageContext drawImage:image inRect:self.bounds fromRect:image.extent];
|
|
253
|
+
[self.context presentRenderbuffer:GL_RENDERBUFFER];
|
|
254
|
+
|
|
255
|
+
[_glkView setNeedsDisplay];
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
- (void)enableBorderDetectFrame
|
|
260
|
+
{
|
|
261
|
+
_borderDetectFrame = YES;
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
- (CIImage *)drawHighlightOverlayForPoints:(CIImage *)image topLeft:(CGPoint)topLeft topRight:(CGPoint)topRight bottomLeft:(CGPoint)bottomLeft bottomRight:(CGPoint)bottomRight
|
|
265
|
+
{
|
|
266
|
+
CIImage *overlay = [CIImage imageWithColor:[[CIColor alloc] initWithColor:self.overlayColor]];
|
|
267
|
+
overlay = [overlay imageByCroppingToRect:image.extent];
|
|
268
|
+
overlay = [overlay imageByApplyingFilter:@"CIPerspectiveTransformWithExtent" withInputParameters:@{@"inputExtent":[CIVector vectorWithCGRect:image.extent],@"inputTopLeft":[CIVector vectorWithCGPoint:topLeft],@"inputTopRight":[CIVector vectorWithCGPoint:topRight],@"inputBottomLeft":[CIVector vectorWithCGPoint:bottomLeft],@"inputBottomRight":[CIVector vectorWithCGPoint:bottomRight]}];
|
|
269
|
+
|
|
270
|
+
return [overlay imageByCompositingOverImage:image];
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
- (void)start
|
|
274
|
+
{
|
|
275
|
+
_isStopped = NO;
|
|
276
|
+
|
|
277
|
+
[self.captureSession startRunning];
|
|
278
|
+
|
|
279
|
+
float detectionRefreshRate = _detectionRefreshRateInMS;
|
|
280
|
+
CGFloat detectionRefreshRateInSec = detectionRefreshRate/100;
|
|
281
|
+
|
|
282
|
+
if (_lastDetectionRate != _detectionRefreshRateInMS) {
|
|
283
|
+
if (_borderDetectTimeKeeper) {
|
|
284
|
+
[_borderDetectTimeKeeper invalidate];
|
|
285
|
+
}
|
|
286
|
+
_borderDetectTimeKeeper = [NSTimer scheduledTimerWithTimeInterval:detectionRefreshRateInSec target:self selector:@selector(enableBorderDetectFrame) userInfo:nil repeats:YES];
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
[self hideGLKView:NO completion:nil];
|
|
290
|
+
|
|
291
|
+
_lastDetectionRate = _detectionRefreshRateInMS;
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
- (void)stop
|
|
295
|
+
{
|
|
296
|
+
_isStopped = YES;
|
|
297
|
+
|
|
298
|
+
[self.captureSession stopRunning];
|
|
299
|
+
|
|
300
|
+
[_borderDetectTimeKeeper invalidate];
|
|
301
|
+
|
|
302
|
+
[self hideGLKView:YES completion:nil];
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
- (void)setEnableTorch:(BOOL)enableTorch
|
|
306
|
+
{
|
|
307
|
+
_enableTorch = enableTorch;
|
|
308
|
+
|
|
309
|
+
AVCaptureDevice *device = self.captureDevice;
|
|
310
|
+
if ([device hasTorch] && [device hasFlash])
|
|
311
|
+
{
|
|
312
|
+
[device lockForConfiguration:nil];
|
|
313
|
+
if (enableTorch)
|
|
314
|
+
{
|
|
315
|
+
[device setTorchMode:AVCaptureTorchModeOn];
|
|
316
|
+
}
|
|
317
|
+
else
|
|
318
|
+
{
|
|
319
|
+
[device setTorchMode:AVCaptureTorchModeOff];
|
|
320
|
+
}
|
|
321
|
+
[device unlockForConfiguration];
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
- (void)setUseFrontCam:(BOOL)useFrontCam
|
|
326
|
+
{
|
|
327
|
+
_useFrontCam = useFrontCam;
|
|
328
|
+
[self stop];
|
|
329
|
+
[self setupCameraView];
|
|
330
|
+
[self start];
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
- (void)setContrast:(float)contrast
|
|
335
|
+
{
|
|
336
|
+
|
|
337
|
+
_contrast = contrast;
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
- (void)setSaturation:(float)saturation
|
|
341
|
+
{
|
|
342
|
+
_saturation = saturation;
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
- (void)setBrightness:(float)brightness
|
|
346
|
+
{
|
|
347
|
+
_brightness = brightness;
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
- (void)setDetectionRefreshRateInMS:(NSInteger)detectionRefreshRateInMS
|
|
351
|
+
{
|
|
352
|
+
_detectionRefreshRateInMS = detectionRefreshRateInMS;
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
- (void)focusAtPoint:(CGPoint)point completionHandler:(void(^)())completionHandler
|
|
357
|
+
{
|
|
358
|
+
AVCaptureDevice *device = self.captureDevice;
|
|
359
|
+
CGPoint pointOfInterest = CGPointZero;
|
|
360
|
+
CGSize frameSize = self.bounds.size;
|
|
361
|
+
pointOfInterest = CGPointMake(point.y / frameSize.height, 1.f - (point.x / frameSize.width));
|
|
362
|
+
|
|
363
|
+
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus])
|
|
364
|
+
{
|
|
365
|
+
NSError *error;
|
|
366
|
+
if ([device lockForConfiguration:&error])
|
|
367
|
+
{
|
|
368
|
+
if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus])
|
|
369
|
+
{
|
|
370
|
+
[device setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
|
|
371
|
+
[device setFocusPointOfInterest:pointOfInterest];
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
if([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure])
|
|
375
|
+
{
|
|
376
|
+
[device setExposurePointOfInterest:pointOfInterest];
|
|
377
|
+
[device setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
|
|
378
|
+
completionHandler();
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
[device unlockForConfiguration];
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
else
|
|
385
|
+
{
|
|
386
|
+
completionHandler();
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
- (void)captureImageWithCompletionHander:(void(^)(id data, id initialData, CIRectangleFeature *rectangleFeature))completionHandler
|
|
391
|
+
{
|
|
392
|
+
if (_isCapturing) return;
|
|
393
|
+
|
|
394
|
+
__weak typeof(self) weakSelf = self;
|
|
395
|
+
|
|
396
|
+
[weakSelf hideGLKView:YES completion:^
|
|
397
|
+
{
|
|
398
|
+
[weakSelf hideGLKView:NO completion:^
|
|
399
|
+
{
|
|
400
|
+
[weakSelf hideGLKView:YES completion:nil];
|
|
401
|
+
}];
|
|
402
|
+
}];
|
|
403
|
+
|
|
404
|
+
_isCapturing = YES;
|
|
405
|
+
|
|
406
|
+
AVCaptureConnection *videoConnection = nil;
|
|
407
|
+
for (AVCaptureConnection *connection in self.stillImageOutput.connections)
|
|
408
|
+
{
|
|
409
|
+
for (AVCaptureInputPort *port in [connection inputPorts])
|
|
410
|
+
{
|
|
411
|
+
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
|
|
412
|
+
{
|
|
413
|
+
videoConnection = connection;
|
|
414
|
+
break;
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
if (videoConnection) break;
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
|
|
421
|
+
{
|
|
422
|
+
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
|
|
423
|
+
|
|
424
|
+
if (weakSelf.cameraViewType == IPDFCameraViewTypeBlackAndWhite || weakSelf.isBorderDetectionEnabled)
|
|
425
|
+
{
|
|
426
|
+
CIImage *enhancedImage = [CIImage imageWithData:imageData];
|
|
427
|
+
|
|
428
|
+
if (weakSelf.cameraViewType == IPDFCameraViewTypeBlackAndWhite)
|
|
429
|
+
{
|
|
430
|
+
enhancedImage = [self filteredImageUsingEnhanceFilterOnImage:enhancedImage];
|
|
431
|
+
}
|
|
432
|
+
else
|
|
433
|
+
{
|
|
434
|
+
enhancedImage = [self filteredImageUsingContrastFilterOnImage:enhancedImage];
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
if (weakSelf.isBorderDetectionEnabled && rectangleDetectionConfidenceHighEnough(_imageDedectionConfidence))
|
|
438
|
+
{
|
|
439
|
+
CIRectangleFeature *rectangleFeature = [self biggestRectangleInRectangles:[[self highAccuracyRectangleDetector] featuresInImage:enhancedImage]];
|
|
440
|
+
|
|
441
|
+
if (rectangleFeature)
|
|
442
|
+
{
|
|
443
|
+
enhancedImage = [self correctPerspectiveForImage:enhancedImage withFeatures:rectangleFeature];
|
|
444
|
+
|
|
445
|
+
UIGraphicsBeginImageContext(CGSizeMake(enhancedImage.extent.size.height, enhancedImage.extent.size.width));
|
|
446
|
+
[[UIImage imageWithCIImage:enhancedImage scale:1.0 orientation:UIImageOrientationRight] drawInRect:CGRectMake(0,0, enhancedImage.extent.size.height, enhancedImage.extent.size.width)];
|
|
447
|
+
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
|
|
448
|
+
UIImage *initialImage = [UIImage imageWithData:imageData];
|
|
449
|
+
UIGraphicsEndImageContext();
|
|
450
|
+
|
|
451
|
+
[weakSelf hideGLKView:NO completion:nil];
|
|
452
|
+
completionHandler(image, initialImage, rectangleFeature);
|
|
453
|
+
}
|
|
454
|
+
} else {
|
|
455
|
+
[weakSelf hideGLKView:NO completion:nil];
|
|
456
|
+
UIImage *initialImage = [UIImage imageWithData:imageData];
|
|
457
|
+
completionHandler(initialImage, initialImage, nil);
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
}
|
|
461
|
+
else
|
|
462
|
+
{
|
|
463
|
+
[weakSelf hideGLKView:NO completion:nil];
|
|
464
|
+
UIImage *initialImage = [UIImage imageWithData:imageData];
|
|
465
|
+
completionHandler(initialImage, initialImage, nil);
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
_isCapturing = NO;
|
|
469
|
+
}];
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
- (void)hideGLKView:(BOOL)hidden completion:(void(^)())completion
|
|
473
|
+
{
|
|
474
|
+
[UIView animateWithDuration:0.1 animations:^
|
|
475
|
+
{
|
|
476
|
+
_glkView.alpha = (hidden) ? 0.0 : 1.0;
|
|
477
|
+
}
|
|
478
|
+
completion:^(BOOL finished)
|
|
479
|
+
{
|
|
480
|
+
if (!completion) return;
|
|
481
|
+
completion();
|
|
482
|
+
}];
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
- (CIImage *)filteredImageUsingEnhanceFilterOnImage:(CIImage *)image
|
|
486
|
+
{
|
|
487
|
+
[self start];
|
|
488
|
+
return [CIFilter filterWithName:@"CIColorControls" keysAndValues:kCIInputImageKey, image, @"inputBrightness", @(self.brightness), @"inputContrast", @(self.contrast), @"inputSaturation", @(self.saturation), nil].outputImage;
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
- (CIImage *)filteredImageUsingContrastFilterOnImage:(CIImage *)image
|
|
492
|
+
{
|
|
493
|
+
return [CIFilter filterWithName:@"CIColorControls" withInputParameters:@{@"inputContrast":@(1.0),kCIInputImageKey:image}].outputImage;
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
- (CIImage *)correctPerspectiveForImage:(CIImage *)image withFeatures:(CIRectangleFeature *)rectangleFeature
|
|
497
|
+
{
|
|
498
|
+
NSMutableDictionary *rectangleCoordinates = [NSMutableDictionary new];
|
|
499
|
+
CGPoint newLeft = CGPointMake(rectangleFeature.topLeft.x + 30, rectangleFeature.topLeft.y);
|
|
500
|
+
CGPoint newRight = CGPointMake(rectangleFeature.topRight.x, rectangleFeature.topRight.y);
|
|
501
|
+
CGPoint newBottomLeft = CGPointMake(rectangleFeature.bottomLeft.x + 30, rectangleFeature.bottomLeft.y);
|
|
502
|
+
CGPoint newBottomRight = CGPointMake(rectangleFeature.bottomRight.x, rectangleFeature.bottomRight.y);
|
|
503
|
+
|
|
504
|
+
|
|
505
|
+
rectangleCoordinates[@"inputTopLeft"] = [CIVector vectorWithCGPoint:newLeft];
|
|
506
|
+
rectangleCoordinates[@"inputTopRight"] = [CIVector vectorWithCGPoint:newRight];
|
|
507
|
+
rectangleCoordinates[@"inputBottomLeft"] = [CIVector vectorWithCGPoint:newBottomLeft];
|
|
508
|
+
rectangleCoordinates[@"inputBottomRight"] = [CIVector vectorWithCGPoint:newBottomRight];
|
|
509
|
+
return [image imageByApplyingFilter:@"CIPerspectiveCorrection" withInputParameters:rectangleCoordinates];
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
- (CIDetector *)rectangleDetetor
|
|
513
|
+
{
|
|
514
|
+
static CIDetector *detector = nil;
|
|
515
|
+
static dispatch_once_t onceToken;
|
|
516
|
+
dispatch_once(&onceToken, ^
|
|
517
|
+
{
|
|
518
|
+
detector = [CIDetector detectorOfType:CIDetectorTypeRectangle context:nil options:@{CIDetectorAccuracy : CIDetectorAccuracyLow,CIDetectorTracking : @(YES)}];
|
|
519
|
+
});
|
|
520
|
+
return detector;
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
- (CIDetector *)highAccuracyRectangleDetector
|
|
524
|
+
{
|
|
525
|
+
static CIDetector *detector = nil;
|
|
526
|
+
static dispatch_once_t onceToken;
|
|
527
|
+
dispatch_once(&onceToken, ^
|
|
528
|
+
{
|
|
529
|
+
detector = [CIDetector detectorOfType:CIDetectorTypeRectangle context:nil options:@{CIDetectorAccuracy : CIDetectorAccuracyHigh, CIDetectorReturnSubFeatures: @(YES) }];
|
|
530
|
+
});
|
|
531
|
+
return detector;
|
|
532
|
+
}
|
|
533
|
+
|
|
534
|
+
- (CIRectangleFeature *)biggestRectangleInRectangles:(NSArray *)rectangles
|
|
535
|
+
{
|
|
536
|
+
if (![rectangles count]) return nil;
|
|
537
|
+
|
|
538
|
+
float halfPerimiterValue = 0;
|
|
539
|
+
|
|
540
|
+
CIRectangleFeature *biggestRectangle = [rectangles firstObject];
|
|
541
|
+
|
|
542
|
+
for (CIRectangleFeature *rect in rectangles)
|
|
543
|
+
{
|
|
544
|
+
CGPoint p1 = rect.topLeft;
|
|
545
|
+
CGPoint p2 = rect.topRight;
|
|
546
|
+
CGFloat width = hypotf(p1.x - p2.x, p1.y - p2.y);
|
|
547
|
+
|
|
548
|
+
CGPoint p3 = rect.topLeft;
|
|
549
|
+
CGPoint p4 = rect.bottomLeft;
|
|
550
|
+
CGFloat height = hypotf(p3.x - p4.x, p3.y - p4.y);
|
|
551
|
+
|
|
552
|
+
CGFloat currentHalfPerimiterValue = height + width;
|
|
553
|
+
|
|
554
|
+
if (halfPerimiterValue < currentHalfPerimiterValue)
|
|
555
|
+
{
|
|
556
|
+
halfPerimiterValue = currentHalfPerimiterValue;
|
|
557
|
+
biggestRectangle = rect;
|
|
558
|
+
}
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
if (self.delegate) {
|
|
562
|
+
[self.delegate didDetectRectangle:biggestRectangle withType:[self typeForRectangle:biggestRectangle]];
|
|
563
|
+
}
|
|
564
|
+
|
|
565
|
+
return biggestRectangle;
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
- (IPDFRectangeType) typeForRectangle: (CIRectangleFeature*) rectangle {
|
|
569
|
+
if (fabs(rectangle.topRight.y - rectangle.topLeft.y) > 100 ||
|
|
570
|
+
fabs(rectangle.topRight.x - rectangle.bottomRight.x) > 100 ||
|
|
571
|
+
fabs(rectangle.topLeft.x - rectangle.bottomLeft.x) > 100 ||
|
|
572
|
+
fabs(rectangle.bottomLeft.y - rectangle.bottomRight.y) > 100) {
|
|
573
|
+
return IPDFRectangeTypeBadAngle;
|
|
574
|
+
} else if ((_glkView.frame.origin.y + _glkView.frame.size.height) - rectangle.topLeft.y > 150 ||
|
|
575
|
+
(_glkView.frame.origin.y + _glkView.frame.size.height) - rectangle.topRight.y > 150 ||
|
|
576
|
+
_glkView.frame.origin.y - rectangle.bottomLeft.y > 150 ||
|
|
577
|
+
_glkView.frame.origin.y - rectangle.bottomRight.y > 150) {
|
|
578
|
+
return IPDFRectangeTypeTooFar;
|
|
579
|
+
}
|
|
580
|
+
return IPDFRectangeTypeGood;
|
|
581
|
+
}
|
|
582
|
+
|
|
583
|
+
BOOL rectangleDetectionConfidenceHighEnough(float confidence)
|
|
584
|
+
{
|
|
585
|
+
return (confidence > 1.0);
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
@end
|
|
@@ -1,155 +0,0 @@
|
|
|
1
|
-
diff --git a/node_modules/react-native-document-scanner/ios/IPDFCameraViewController.m b/node_modules/react-native-document-scanner/ios/IPDFCameraViewController.m
|
|
2
|
-
index 1234567..abcdefg 100644
|
|
3
|
-
--- a/node_modules/react-native-document-scanner/ios/IPDFCameraViewController.m
|
|
4
|
-
+++ b/node_modules/react-native-document-scanner/ios/IPDFCameraViewController.m
|
|
5
|
-
@@ -76,7 +76,7 @@
|
|
6
|
-
GLKView *view = [[GLKView alloc] initWithFrame:self.bounds];
|
|
7
|
-
view.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
|
|
8
|
-
view.translatesAutoresizingMaskIntoConstraints = YES;
|
|
9
|
-
view.context = self.context;
|
|
10
|
-
- view.contentScaleFactor = 1.0f;
|
|
11
|
-
+ view.contentScaleFactor = [UIScreen mainScreen].scale;
|
|
12
|
-
view.drawableDepthFormat = GLKViewDrawableDepthFormat24;
|
|
13
|
-
[self insertSubview:view atIndex:0];
|
|
14
|
-
_glkView = view;
|
|
15
|
-
@@ -115,7 +115,16 @@
|
|
16
|
-
|
|
17
|
-
NSError *error = nil;
|
|
18
|
-
AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
|
|
19
|
-
- session.sessionPreset = AVCaptureSessionPresetPhoto;
|
|
20
|
-
+
|
|
21
|
-
+ // Try to use the highest quality preset available
|
|
22
|
-
+ if ([session canSetSessionPreset:AVCaptureSessionPreset3840x2160]) {
|
|
23
|
-
+ session.sessionPreset = AVCaptureSessionPreset3840x2160; // 4K
|
|
24
|
-
+ } else if ([session canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
|
|
25
|
-
+ session.sessionPreset = AVCaptureSessionPreset1920x1080; // Full HD
|
|
26
|
-
+ } else {
|
|
27
|
-
+ session.sessionPreset = AVCaptureSessionPresetPhoto; // Fallback
|
|
28
|
-
+ }
|
|
29
|
-
+
|
|
30
|
-
[session addInput:input];
|
|
31
|
-
|
|
32
|
-
AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
|
|
33
|
-
@@ -134,23 +143,70 @@
|
|
34
|
-
[session addOutput:dataOutput];
|
|
35
|
-
|
|
36
|
-
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
|
|
37
|
-
+ // Configure for maximum quality still image capture
|
|
38
|
-
+ self.stillImageOutput.outputSettings = @{AVVideoCodecKey: AVVideoCodecJPEG};
|
|
39
|
-
+ self.stillImageOutput.highResolutionStillImageOutputEnabled = YES;
|
|
40
|
-
[session addOutput:self.stillImageOutput];
|
|
41
|
-
|
|
42
|
-
AVCaptureConnection *connection = [dataOutput.connections firstObject];
|
|
43
|
-
[connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
|
|
44
|
-
|
|
45
|
-
- if (device.isFlashAvailable)
|
|
46
|
-
+ // Enable video stabilization for better quality
|
|
47
|
-
+ if ([connection isVideoStabilizationSupported]) {
|
|
48
|
-
+ [connection setPreferredVideoStabilizationMode:AVCaptureVideoStabilizationModeAuto];
|
|
49
|
-
+ }
|
|
50
|
-
+
|
|
51
|
-
+ // Configure device for best quality
|
|
52
|
-
+ if ([device lockForConfiguration:nil])
|
|
53
|
-
{
|
|
54
|
-
- [device lockForConfiguration:nil];
|
|
55
|
-
- [device setFlashMode:AVCaptureFlashModeOff];
|
|
56
|
-
- [device unlockForConfiguration];
|
|
57
|
-
+ // Disable flash for better natural lighting
|
|
58
|
-
+ if (device.isFlashAvailable) {
|
|
59
|
-
+ [device setFlashMode:AVCaptureFlashModeOff];
|
|
60
|
-
+ }
|
|
61
|
-
|
|
62
|
-
- if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus])
|
|
63
|
-
- {
|
|
64
|
-
- [device lockForConfiguration:nil];
|
|
65
|
-
+ // Enable continuous autofocus for sharp images
|
|
66
|
-
+ if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
|
|
67
|
-
[device setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
|
|
68
|
-
- [device unlockForConfiguration];
|
|
69
|
-
}
|
|
70
|
-
+
|
|
71
|
-
+ // Enable continuous auto exposure
|
|
72
|
-
+ if ([device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
|
|
73
|
-
+ [device setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
|
|
74
|
-
+ }
|
|
75
|
-
+
|
|
76
|
-
+ // Enable auto white balance
|
|
77
|
-
+ if ([device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) {
|
|
78
|
-
+ [device setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance];
|
|
79
|
-
+ }
|
|
80
|
-
+
|
|
81
|
-
+ // Enable low light boost if available
|
|
82
|
-
+ if (device.isLowLightBoostSupported) {
|
|
83
|
-
+ [device setAutomaticallyEnablesLowLightBoostWhenAvailable:YES];
|
|
84
|
-
+ }
|
|
85
|
-
+
|
|
86
|
-
+ // Set active video format to highest resolution
|
|
87
|
-
+ if (@available(iOS 13.0, *)) {
|
|
88
|
-
+ AVCaptureDeviceFormat *bestFormat = nil;
|
|
89
|
-
+ AVFrameRateRange *bestFrameRateRange = nil;
|
|
90
|
-
+ for (AVCaptureDeviceFormat *format in [device formats]) {
|
|
91
|
-
+ CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
|
92
|
-
+ // Prefer 4K resolution (3840x2160)
|
|
93
|
-
+ if (dimensions.width == 3840 && dimensions.height == 2160) {
|
|
94
|
-
+ for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
|
|
95
|
-
+ if (bestFormat == nil || range.maxFrameRate > bestFrameRateRange.maxFrameRate) {
|
|
96
|
-
+ bestFormat = format;
|
|
97
|
-
+ bestFrameRateRange = range;
|
|
98
|
-
+ }
|
|
99
|
-
+ }
|
|
100
|
-
+ }
|
|
101
|
-
+ }
|
|
102
|
-
+ if (bestFormat) {
|
|
103
|
-
+ [device setActiveFormat:bestFormat];
|
|
104
|
-
+ }
|
|
105
|
-
+ }
|
|
106
|
-
+
|
|
107
|
-
+ [device unlockForConfiguration];
|
|
108
|
-
}
|
|
109
|
-
|
|
110
|
-
[session commitConfiguration];
|
|
111
|
-
@@ -381,10 +437,18 @@
|
|
112
|
-
|
|
113
|
-
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
|
|
114
|
-
{
|
|
115
|
-
- NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
|
|
116
|
-
+ // Get the highest quality image data from sample buffer
|
|
117
|
-
+ CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(imageSampleBuffer);
|
|
118
|
-
+ CIImage *sourceImage = [CIImage imageWithCVPixelBuffer:imageBuffer];
|
|
119
|
-
+
|
|
120
|
-
+ // Create high quality JPEG data
|
|
121
|
-
+ CIContext *context = [CIContext context];
|
|
122
|
-
+ NSData *imageData = [context JPEGRepresentationOfImage:sourceImage colorSpace:sourceImage.colorSpace options:@{(id)kCGImageDestinationLossyCompressionQuality: @(0.95)}];
|
|
123
|
-
|
|
124
|
-
if (weakSelf.cameraViewType == IPDFCameraViewTypeBlackAndWhite || weakSelf.isBorderDetectionEnabled)
|
|
125
|
-
{
|
|
126
|
-
- CIImage *enhancedImage = [CIImage imageWithData:imageData];
|
|
127
|
-
+ // Use source image directly for better quality
|
|
128
|
-
+ CIImage *enhancedImage = sourceImage;
|
|
129
|
-
|
|
130
|
-
if (weakSelf.cameraViewType == IPDFCameraViewTypeBlackAndWhite)
|
|
131
|
-
{
|
|
132
|
-
@@ -405,10 +469,17 @@
|
|
133
|
-
{
|
|
134
|
-
enhancedImage = [self correctPerspectiveForImage:enhancedImage withFeatures:rectangleFeature];
|
|
135
|
-
|
|
136
|
-
- UIGraphicsBeginImageContext(CGSizeMake(enhancedImage.extent.size.height, enhancedImage.extent.size.width));
|
|
137
|
-
- [[UIImage imageWithCIImage:enhancedImage scale:1.0 orientation:UIImageOrientationRight] drawInRect:CGRectMake(0,0, enhancedImage.extent.size.height, enhancedImage.extent.size.width)];
|
|
138
|
-
- UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
|
|
139
|
-
- UIImage *initialImage = [UIImage imageWithData:imageData];
|
|
140
|
-
- UIGraphicsEndImageContext();
|
|
141
|
-
+ // Convert CIImage to UIImage with high quality using CIContext
|
|
142
|
-
+ CIContext *ciContext = [CIContext contextWithOptions:@{kCIContextUseSoftwareRenderer: @(NO)}];
|
|
143
|
-
+
|
|
144
|
-
+ // Apply rotation to match device orientation
|
|
145
|
-
+ CGAffineTransform transform = CGAffineTransformMakeRotation(-M_PI_2);
|
|
146
|
-
+ enhancedImage = [enhancedImage imageByApplyingTransform:transform];
|
|
147
|
-
+
|
|
148
|
-
+ // Convert to CGImage first for better quality
|
|
149
|
-
+ CGImageRef cgImage = [ciContext createCGImage:enhancedImage fromRect:enhancedImage.extent];
|
|
150
|
-
+ UIImage *image = [UIImage imageWithCGImage:cgImage scale:1.0 orientation:UIImageOrientationUp];
|
|
151
|
-
+ CGImageRelease(cgImage);
|
|
152
|
-
+
|
|
153
|
-
+ UIImage *initialImage = [UIImage imageWithData:imageData];
|
|
154
|
-
|
|
155
|
-
[weakSelf hideGLKView:NO completion:nil];
|