react-native-rectangle-doc-scanner 3.9.0 → 3.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "react-native-rectangle-doc-scanner",
3
- "version": "3.9.0",
3
+ "version": "3.11.0",
4
4
  "description": "Native-backed document scanner for React Native with customizable overlays.",
5
5
  "license": "MIT",
6
6
  "main": "dist/index.js",
@@ -15,19 +15,17 @@
15
15
  #import <ImageIO/ImageIO.h>
16
16
  #import <GLKit/GLKit.h>
17
17
 
18
- @interface IPDFCameraViewController () <AVCaptureVideoDataOutputSampleBufferDelegate, AVCapturePhotoCaptureDelegate>
18
+ @interface IPDFCameraViewController () <AVCaptureVideoDataOutputSampleBufferDelegate>
19
19
 
20
20
  @property (nonatomic,strong) AVCaptureSession *captureSession;
21
21
  @property (nonatomic,strong) AVCaptureDevice *captureDevice;
22
22
  @property (nonatomic,strong) EAGLContext *context;
23
23
 
24
- @property (nonatomic, strong) AVCapturePhotoOutput* photoOutput;
24
+ @property (nonatomic, strong) AVCaptureStillImageOutput* stillImageOutput;
25
25
 
26
26
  @property (nonatomic, assign) BOOL forceStop;
27
27
  @property (nonatomic, assign) float lastDetectionRate;
28
28
 
29
- @property (nonatomic, copy) void (^photoCaptureCompletionHandler)(UIImage *croppedImage, UIImage *initialImage, CIRectangleFeature *rectangleFeature);
30
-
31
29
  @end
32
30
 
33
31
  @implementation IPDFCameraViewController
@@ -127,14 +125,7 @@
127
125
 
128
126
  NSError *error = nil;
129
127
  AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
130
-
131
- // Set session preset to highest quality
132
- if ([session canSetSessionPreset:AVCaptureSessionPresetHigh]) {
133
- session.sessionPreset = AVCaptureSessionPresetHigh;
134
- } else {
135
- session.sessionPreset = AVCaptureSessionPresetPhoto;
136
- }
137
-
128
+ session.sessionPreset = AVCaptureSessionPresetPhoto;
138
129
  [session addInput:input];
139
130
 
140
131
  AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
@@ -143,77 +134,24 @@
143
134
  [dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
144
135
  [session addOutput:dataOutput];
145
136
 
146
- // Use modern AVCapturePhotoOutput for best quality
147
- self.photoOutput = [[AVCapturePhotoOutput alloc] init];
148
-
149
- if ([session canAddOutput:self.photoOutput]) {
150
- [session addOutput:self.photoOutput];
151
-
152
- // Enable high quality photo capture
153
- if (@available(iOS 13.0, *)) {
154
- self.photoOutput.maxPhotoQualityPrioritization = AVCapturePhotoQualityPrioritizationQuality;
155
- // maxPhotoDimensions defaults to the highest supported resolution automatically
156
- }
157
- }
137
+ self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
138
+ [session addOutput:self.stillImageOutput];
158
139
 
159
140
  AVCaptureConnection *connection = [dataOutput.connections firstObject];
160
141
  [connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
161
142
 
162
- // Enable video stabilization for better quality
163
- if ([connection isVideoStabilizationSupported]) {
164
- [connection setPreferredVideoStabilizationMode:AVCaptureVideoStabilizationModeAuto];
165
- }
166
-
167
- // Configure device for best quality
168
- if ([device lockForConfiguration:nil])
143
+ if (device.isFlashAvailable)
169
144
  {
170
- // Disable flash for better natural lighting
171
- if (device.isFlashAvailable) {
172
- [device setFlashMode:AVCaptureFlashModeOff];
173
- }
145
+ [device lockForConfiguration:nil];
146
+ [device setFlashMode:AVCaptureFlashModeOff];
147
+ [device unlockForConfiguration];
174
148
 
175
- // Enable continuous autofocus for sharp images
176
- if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
149
+ if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus])
150
+ {
151
+ [device lockForConfiguration:nil];
177
152
  [device setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
153
+ [device unlockForConfiguration];
178
154
  }
179
-
180
- // Enable continuous auto exposure
181
- if ([device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
182
- [device setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
183
- }
184
-
185
- // Enable auto white balance
186
- if ([device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) {
187
- [device setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance];
188
- }
189
-
190
- // Enable low light boost if available
191
- if (device.isLowLightBoostSupported) {
192
- [device setAutomaticallyEnablesLowLightBoostWhenAvailable:YES];
193
- }
194
-
195
- // Set active video format to highest resolution
196
- if (@available(iOS 13.0, *)) {
197
- AVCaptureDeviceFormat *bestFormat = nil;
198
- AVFrameRateRange *bestFrameRateRange = nil;
199
- for (AVCaptureDeviceFormat *format in [device formats]) {
200
- CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
201
- // Prefer 4K resolution (3840x2160)
202
- if (dimensions.width == 3840 && dimensions.height == 2160) {
203
- for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
204
- if (bestFormat == nil || range.maxFrameRate > bestFrameRateRange.maxFrameRate) {
205
- bestFormat = format;
206
- bestFrameRateRange = range;
207
- }
208
- }
209
- }
210
- }
211
- if (bestFormat) {
212
- [device setActiveFormat:bestFormat];
213
- }
214
- }
215
-
216
- [device unlockForConfiguration];
217
155
  }
218
156
 
219
157
  [session commitConfiguration];
@@ -443,15 +381,6 @@
443
381
  {
444
382
  if (_isCapturing) return;
445
383
 
446
- // Check if photoOutput is available
447
- if (!self.photoOutput) {
448
- NSLog(@"Error: photoOutput is nil");
449
- if (completionHandler) {
450
- completionHandler(nil, nil, nil);
451
- }
452
- return;
453
- }
454
-
455
384
  __weak typeof(self) weakSelf = self;
456
385
 
457
386
  [weakSelf hideGLKView:YES completion:^
@@ -464,27 +393,70 @@
464
393
 
465
394
  _isCapturing = YES;
466
395
 
467
- // Store completion handler for delegate callback
468
- self.photoCaptureCompletionHandler = completionHandler;
469
-
470
- // Create photo settings with maximum quality - use JPEG for compatibility
471
- AVCapturePhotoSettings *photoSettings = [AVCapturePhotoSettings photoSettings];
472
-
473
- // Enable high resolution photo capture
474
- photoSettings.highResolutionPhotoEnabled = YES;
475
-
476
- // Set maximum quality prioritization (iOS 13+)
477
- if (@available(iOS 13.0, *)) {
478
- photoSettings.photoQualityPrioritization = AVCapturePhotoQualityPrioritizationQuality;
479
- }
480
-
481
- // Enable auto flash
482
- if (self.photoOutput.supportedFlashModes && [self.photoOutput.supportedFlashModes containsObject:@(AVCaptureFlashModeAuto)]) {
483
- photoSettings.flashMode = AVCaptureFlashModeAuto;
396
+ AVCaptureConnection *videoConnection = nil;
397
+ for (AVCaptureConnection *connection in self.stillImageOutput.connections)
398
+ {
399
+ for (AVCaptureInputPort *port in [connection inputPorts])
400
+ {
401
+ if ([[port mediaType] isEqual:AVMediaTypeVideo] )
402
+ {
403
+ videoConnection = connection;
404
+ break;
405
+ }
406
+ }
407
+ if (videoConnection) break;
484
408
  }
485
409
 
486
- // Capture photo - delegate will be called
487
- [self.photoOutput capturePhotoWithSettings:photoSettings delegate:self];
410
+ [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
411
+ {
412
+ NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
413
+
414
+ if (weakSelf.cameraViewType == IPDFCameraViewTypeBlackAndWhite || weakSelf.isBorderDetectionEnabled)
415
+ {
416
+ CIImage *enhancedImage = [CIImage imageWithData:imageData];
417
+
418
+ if (weakSelf.cameraViewType == IPDFCameraViewTypeBlackAndWhite)
419
+ {
420
+ enhancedImage = [self filteredImageUsingEnhanceFilterOnImage:enhancedImage];
421
+ }
422
+ else
423
+ {
424
+ enhancedImage = [self filteredImageUsingContrastFilterOnImage:enhancedImage];
425
+ }
426
+
427
+ if (weakSelf.isBorderDetectionEnabled && rectangleDetectionConfidenceHighEnough(_imageDedectionConfidence))
428
+ {
429
+ CIRectangleFeature *rectangleFeature = [self biggestRectangleInRectangles:[[self highAccuracyRectangleDetector] featuresInImage:enhancedImage]];
430
+
431
+ if (rectangleFeature)
432
+ {
433
+ enhancedImage = [self correctPerspectiveForImage:enhancedImage withFeatures:rectangleFeature];
434
+
435
+ UIGraphicsBeginImageContext(CGSizeMake(enhancedImage.extent.size.height, enhancedImage.extent.size.width));
436
+ [[UIImage imageWithCIImage:enhancedImage scale:1.0 orientation:UIImageOrientationRight] drawInRect:CGRectMake(0,0, enhancedImage.extent.size.height, enhancedImage.extent.size.width)];
437
+ UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
438
+ UIImage *initialImage = [UIImage imageWithData:imageData];
439
+ UIGraphicsEndImageContext();
440
+
441
+ [weakSelf hideGLKView:NO completion:nil];
442
+ completionHandler(image, initialImage, rectangleFeature);
443
+ }
444
+ } else {
445
+ [weakSelf hideGLKView:NO completion:nil];
446
+ UIImage *initialImage = [UIImage imageWithData:imageData];
447
+ completionHandler(initialImage, initialImage, nil);
448
+ }
449
+
450
+ }
451
+ else
452
+ {
453
+ [weakSelf hideGLKView:NO completion:nil];
454
+ UIImage *initialImage = [UIImage imageWithData:imageData];
455
+ completionHandler(initialImage, initialImage, nil);
456
+ }
457
+
458
+ _isCapturing = NO;
459
+ }];
488
460
  }
489
461
 
490
462
  - (void)hideGLKView:(BOOL)hidden completion:(void(^)())completion
@@ -602,103 +574,4 @@ BOOL rectangleDetectionConfidenceHighEnough(float confidence)
602
574
  return (confidence > 1.0);
603
575
  }
604
576
 
605
- #pragma mark - AVCapturePhotoCaptureDelegate
606
-
607
- - (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(NSError *)error {
608
- __weak typeof(self) weakSelf = self;
609
-
610
- if (error) {
611
- NSLog(@"Error capturing photo: %@", error);
612
- _isCapturing = NO;
613
- [weakSelf hideGLKView:NO completion:nil];
614
- if (self.photoCaptureCompletionHandler) {
615
- self.photoCaptureCompletionHandler(nil, nil, nil);
616
- self.photoCaptureCompletionHandler = nil;
617
- }
618
- return;
619
- }
620
-
621
- // Get high quality image data
622
- NSData *imageData = [photo fileDataRepresentation];
623
-
624
- if (!imageData) {
625
- NSLog(@"Failed to get image data from photo");
626
- _isCapturing = NO;
627
- [weakSelf hideGLKView:NO completion:nil];
628
- if (self.photoCaptureCompletionHandler) {
629
- self.photoCaptureCompletionHandler(nil, nil, nil);
630
- self.photoCaptureCompletionHandler = nil;
631
- }
632
- return;
633
- }
634
-
635
- // Process image
636
- if (weakSelf.cameraViewType == IPDFCameraViewTypeBlackAndWhite || weakSelf.isBorderDetectionEnabled)
637
- {
638
- CIImage *enhancedImage = [CIImage imageWithData:imageData];
639
-
640
- if (weakSelf.cameraViewType == IPDFCameraViewTypeBlackAndWhite)
641
- {
642
- enhancedImage = [self filteredImageUsingEnhanceFilterOnImage:enhancedImage];
643
- }
644
- else
645
- {
646
- enhancedImage = [self filteredImageUsingContrastFilterOnImage:enhancedImage];
647
- }
648
-
649
- if (weakSelf.isBorderDetectionEnabled && rectangleDetectionConfidenceHighEnough(_imageDedectionConfidence))
650
- {
651
- CIRectangleFeature *rectangleFeature = [self biggestRectangleInRectangles:[[self highAccuracyRectangleDetector] featuresInImage:enhancedImage]];
652
-
653
- if (rectangleFeature)
654
- {
655
- enhancedImage = [self correctPerspectiveForImage:enhancedImage withFeatures:rectangleFeature];
656
-
657
- // Convert CIImage to UIImage with high quality using CIContext
658
- CIContext *ciContext = [CIContext contextWithOptions:@{kCIContextUseSoftwareRenderer: @(NO)}];
659
-
660
- // Apply rotation to match device orientation
661
- CGAffineTransform transform = CGAffineTransformMakeRotation(-M_PI_2);
662
- enhancedImage = [enhancedImage imageByApplyingTransform:transform];
663
-
664
- // Convert to CGImage first for better quality
665
- CGImageRef cgImage = [ciContext createCGImage:enhancedImage fromRect:enhancedImage.extent];
666
- UIImage *image = [UIImage imageWithCGImage:cgImage scale:1.0 orientation:UIImageOrientationUp];
667
- CGImageRelease(cgImage);
668
-
669
- UIImage *initialImage = [UIImage imageWithData:imageData];
670
-
671
- [weakSelf hideGLKView:NO completion:nil];
672
- _isCapturing = NO;
673
-
674
- if (self.photoCaptureCompletionHandler) {
675
- self.photoCaptureCompletionHandler(image, initialImage, rectangleFeature);
676
- self.photoCaptureCompletionHandler = nil;
677
- }
678
- }
679
- } else {
680
- [weakSelf hideGLKView:NO completion:nil];
681
- _isCapturing = NO;
682
- UIImage *initialImage = [UIImage imageWithData:imageData];
683
-
684
- if (self.photoCaptureCompletionHandler) {
685
- self.photoCaptureCompletionHandler(initialImage, initialImage, nil);
686
- self.photoCaptureCompletionHandler = nil;
687
- }
688
- }
689
-
690
- }
691
- else
692
- {
693
- [weakSelf hideGLKView:NO completion:nil];
694
- _isCapturing = NO;
695
- UIImage *initialImage = [UIImage imageWithData:imageData];
696
-
697
- if (self.photoCaptureCompletionHandler) {
698
- self.photoCaptureCompletionHandler(initialImage, initialImage, nil);
699
- self.photoCaptureCompletionHandler = nil;
700
- }
701
- }
702
- }
703
-
704
577
  @end
@@ -0,0 +1,163 @@
1
+ // This file contains only the key methods we need to modify
2
+ // setupCameraView - use original simple version
3
+ // layoutSubviews - add for frame updates
4
+ // captureOutput - add aspect fill logic
5
+
6
+ - (void)layoutSubviews
7
+ {
8
+ [super layoutSubviews];
9
+
10
+ // Update GLKView frame to match parent bounds
11
+ if (_glkView) {
12
+ _glkView.frame = self.bounds;
13
+ }
14
+ }
15
+
16
+ - (void)createGLKView
17
+ {
18
+ if (self.context) return;
19
+
20
+ self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
21
+ GLKView *view = [[GLKView alloc] initWithFrame:self.bounds];
22
+ view.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
23
+ view.translatesAutoresizingMaskIntoConstraints = YES;
24
+ view.context = self.context;
25
+ view.contentScaleFactor = [UIScreen mainScreen].scale; // 화질 개선
26
+ view.drawableDepthFormat = GLKViewDrawableDepthFormat24;
27
+ [self insertSubview:view atIndex:0];
28
+ _glkView = view;
29
+ glGenRenderbuffers(1, &_renderBuffer);
30
+ glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer);
31
+ _coreImageContext = [CIContext contextWithEAGLContext:self.context];
32
+ [EAGLContext setCurrentContext:self.context];
33
+ }
34
+
35
+ - (void)setupCameraView
36
+ {
37
+ [self createGLKView];
38
+
39
+ AVCaptureDevice *device = nil;
40
+ NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
41
+ for (AVCaptureDevice *possibleDevice in devices) {
42
+ if (self.useFrontCam) {
43
+ if ([possibleDevice position] == AVCaptureDevicePositionFront) {
44
+ device = possibleDevice;
45
+ }
46
+ } else {
47
+ if ([possibleDevice position] != AVCaptureDevicePositionFront) {
48
+ device = possibleDevice;
49
+ }
50
+ }
51
+ }
52
+ if (!device) return;
53
+
54
+ _imageDedectionConfidence = 0.0;
55
+
56
+ AVCaptureSession *session = [[AVCaptureSession alloc] init];
57
+ self.captureSession = session;
58
+ [session beginConfiguration];
59
+ self.captureDevice = device;
60
+
61
+ NSError *error = nil;
62
+ AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
63
+ session.sessionPreset = AVCaptureSessionPresetPhoto;
64
+ [session addInput:input];
65
+
66
+ AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
67
+ [dataOutput setAlwaysDiscardsLateVideoFrames:YES];
68
+ [dataOutput setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)}];
69
+ [dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
70
+ [session addOutput:dataOutput];
71
+
72
+ self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
73
+ [session addOutput:self.stillImageOutput];
74
+
75
+ AVCaptureConnection *connection = [dataOutput.connections firstObject];
76
+ [connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
77
+
78
+ if (device.isFlashAvailable)
79
+ {
80
+ [device lockForConfiguration:nil];
81
+ [device setFlashMode:AVCaptureFlashModeOff];
82
+ [device unlockForConfiguration];
83
+
84
+ if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus])
85
+ {
86
+ [device lockForConfiguration:nil];
87
+ [device setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
88
+ [device unlockForConfiguration];
89
+ }
90
+ }
91
+
92
+ [session commitConfiguration];
93
+ }
94
+
95
+ -(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
96
+ {
97
+ if (self.forceStop) return;
98
+ if (_isStopped || _isCapturing || !CMSampleBufferIsValid(sampleBuffer)) return;
99
+
100
+ CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
101
+
102
+ CIImage *image = [CIImage imageWithCVPixelBuffer:pixelBuffer];
103
+
104
+ if (self.cameraViewType != IPDFCameraViewTypeNormal)
105
+ {
106
+ image = [self filteredImageUsingEnhanceFilterOnImage:image];
107
+ }
108
+ else
109
+ {
110
+ image = [self filteredImageUsingContrastFilterOnImage:image];
111
+ }
112
+
113
+ if (self.isBorderDetectionEnabled)
114
+ {
115
+ if (_borderDetectFrame)
116
+ {
117
+ _borderDetectLastRectangleFeature = [self biggestRectangleInRectangles:[[self highAccuracyRectangleDetector] featuresInImage:image]];
118
+ _borderDetectFrame = NO;
119
+ }
120
+
121
+ if (_borderDetectLastRectangleFeature)
122
+ {
123
+ _imageDedectionConfidence += .5;
124
+
125
+ image = [self drawHighlightOverlayForPoints:image topLeft:_borderDetectLastRectangleFeature.topLeft topRight:_borderDetectLastRectangleFeature.topRight bottomLeft:_borderDetectLastRectangleFeature.bottomLeft bottomRight:_borderDetectLastRectangleFeature.bottomRight];
126
+ }
127
+ else
128
+ {
129
+ _imageDedectionConfidence = 0.0f;
130
+ }
131
+ }
132
+
133
+ if (self.context && _coreImageContext)
134
+ {
135
+ // Calculate the rect to draw the image with aspect fill
136
+ CGRect drawRect = self.bounds;
137
+ CGRect imageExtent = image.extent;
138
+
139
+ // Calculate aspect ratios
140
+ CGFloat imageAspect = imageExtent.size.width / imageExtent.size.height;
141
+ CGFloat viewAspect = drawRect.size.width / drawRect.size.height;
142
+
143
+ CGRect fromRect = imageExtent;
144
+
145
+ // Aspect fill: crop the image to fill the view
146
+ if (imageAspect > viewAspect) {
147
+ // Image is wider, crop width
148
+ CGFloat newWidth = imageExtent.size.height * viewAspect;
149
+ CGFloat xOffset = (imageExtent.size.width - newWidth) / 2.0;
150
+ fromRect = CGRectMake(xOffset, 0, newWidth, imageExtent.size.height);
151
+ } else {
152
+ // Image is taller, crop height
153
+ CGFloat newHeight = imageExtent.size.width / viewAspect;
154
+ CGFloat yOffset = (imageExtent.size.height - newHeight) / 2.0;
155
+ fromRect = CGRectMake(0, yOffset, imageExtent.size.width, newHeight);
156
+ }
157
+
158
+ [_coreImageContext drawImage:image inRect:drawRect fromRect:fromRect];
159
+ [self.context presentRenderbuffer:GL_RENDERBUFFER];
160
+
161
+ [_glkView setNeedsDisplay];
162
+ }
163
+ }