react-native-rectangle-doc-scanner 3.10.0 → 3.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "react-native-rectangle-doc-scanner",
3
- "version": "3.10.0",
3
+ "version": "3.12.0",
4
4
  "description": "Native-backed document scanner for React Native with customizable overlays.",
5
5
  "license": "MIT",
6
6
  "main": "dist/index.js",
@@ -16,14 +16,21 @@
16
16
  return self;
17
17
  }
18
18
 
19
- - (void)didMoveToWindow {
20
- [super didMoveToWindow];
21
- if (self.window && !_hasSetupCamera) {
22
- // Only setup camera once when view is added to window
19
+ - (void)layoutSubviews {
20
+ [super layoutSubviews];
21
+
22
+ // Setup camera after layout is complete and bounds are valid
23
+ if (!_hasSetupCamera && self.window && !CGRectIsEmpty(self.bounds)) {
24
+ NSLog(@"[DocumentScanner] Setting up camera with bounds: %@", NSStringFromCGRect(self.bounds));
23
25
  [self setupCameraView];
24
26
  [self start];
25
27
  _hasSetupCamera = YES;
26
- } else if (!self.window && _hasSetupCamera) {
28
+ }
29
+ }
30
+
31
+ - (void)didMoveToWindow {
32
+ [super didMoveToWindow];
33
+ if (!self.window && _hasSetupCamera) {
27
34
  // Stop camera when view is removed from window
28
35
  [self stop];
29
36
  }
@@ -15,19 +15,17 @@
15
15
  #import <ImageIO/ImageIO.h>
16
16
  #import <GLKit/GLKit.h>
17
17
 
18
- @interface IPDFCameraViewController () <AVCaptureVideoDataOutputSampleBufferDelegate, AVCapturePhotoCaptureDelegate>
18
+ @interface IPDFCameraViewController () <AVCaptureVideoDataOutputSampleBufferDelegate>
19
19
 
20
20
  @property (nonatomic,strong) AVCaptureSession *captureSession;
21
21
  @property (nonatomic,strong) AVCaptureDevice *captureDevice;
22
22
  @property (nonatomic,strong) EAGLContext *context;
23
23
 
24
- @property (nonatomic, strong) AVCapturePhotoOutput* photoOutput;
24
+ @property (nonatomic, strong) AVCaptureStillImageOutput* stillImageOutput;
25
25
 
26
26
  @property (nonatomic, assign) BOOL forceStop;
27
27
  @property (nonatomic, assign) float lastDetectionRate;
28
28
 
29
- @property (nonatomic, copy) void (^photoCaptureCompletionHandler)(UIImage *croppedImage, UIImage *initialImage, CIRectangleFeature *rectangleFeature);
30
-
31
29
  @end
32
30
 
33
31
  @implementation IPDFCameraViewController
@@ -127,95 +125,33 @@
127
125
 
128
126
  NSError *error = nil;
129
127
  AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
130
-
131
- // Set session preset to highest quality
132
- if ([session canSetSessionPreset:AVCaptureSessionPresetHigh]) {
133
- session.sessionPreset = AVCaptureSessionPresetHigh;
134
- } else {
135
- session.sessionPreset = AVCaptureSessionPresetPhoto;
136
- }
137
-
128
+ session.sessionPreset = AVCaptureSessionPresetPhoto;
138
129
  [session addInput:input];
139
130
 
140
131
  AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
141
132
  [dataOutput setAlwaysDiscardsLateVideoFrames:YES];
142
133
  [dataOutput setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)}];
143
- // Use background queue for video processing to avoid blocking UI
144
- dispatch_queue_t videoQueue = dispatch_queue_create("com.scanner.videoQueue", DISPATCH_QUEUE_SERIAL);
145
- [dataOutput setSampleBufferDelegate:self queue:videoQueue];
134
+ [dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
146
135
  [session addOutput:dataOutput];
147
136
 
148
- // Use modern AVCapturePhotoOutput for best quality
149
- self.photoOutput = [[AVCapturePhotoOutput alloc] init];
150
-
151
- if ([session canAddOutput:self.photoOutput]) {
152
- [session addOutput:self.photoOutput];
153
-
154
- // Enable high quality photo capture
155
- if (@available(iOS 13.0, *)) {
156
- self.photoOutput.maxPhotoQualityPrioritization = AVCapturePhotoQualityPrioritizationQuality;
157
- // maxPhotoDimensions defaults to the highest supported resolution automatically
158
- }
159
- }
137
+ self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
138
+ [session addOutput:self.stillImageOutput];
160
139
 
161
140
  AVCaptureConnection *connection = [dataOutput.connections firstObject];
162
141
  [connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
163
142
 
164
- // Enable video stabilization for better quality
165
- if ([connection isVideoStabilizationSupported]) {
166
- [connection setPreferredVideoStabilizationMode:AVCaptureVideoStabilizationModeAuto];
167
- }
168
-
169
- // Configure device for best quality
170
- if ([device lockForConfiguration:nil])
143
+ if (device.isFlashAvailable)
171
144
  {
172
- // Disable flash for better natural lighting
173
- if (device.isFlashAvailable) {
174
- [device setFlashMode:AVCaptureFlashModeOff];
175
- }
145
+ [device lockForConfiguration:nil];
146
+ [device setFlashMode:AVCaptureFlashModeOff];
147
+ [device unlockForConfiguration];
176
148
 
177
- // Enable continuous autofocus for sharp images
178
- if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
149
+ if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus])
150
+ {
151
+ [device lockForConfiguration:nil];
179
152
  [device setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
153
+ [device unlockForConfiguration];
180
154
  }
181
-
182
- // Enable continuous auto exposure
183
- if ([device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
184
- [device setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
185
- }
186
-
187
- // Enable auto white balance
188
- if ([device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) {
189
- [device setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance];
190
- }
191
-
192
- // Enable low light boost if available
193
- if (device.isLowLightBoostSupported) {
194
- [device setAutomaticallyEnablesLowLightBoostWhenAvailable:YES];
195
- }
196
-
197
- // Set active video format to highest resolution
198
- if (@available(iOS 13.0, *)) {
199
- AVCaptureDeviceFormat *bestFormat = nil;
200
- AVFrameRateRange *bestFrameRateRange = nil;
201
- for (AVCaptureDeviceFormat *format in [device formats]) {
202
- CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
203
- // Prefer 4K resolution (3840x2160)
204
- if (dimensions.width == 3840 && dimensions.height == 2160) {
205
- for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
206
- if (bestFormat == nil || range.maxFrameRate > bestFrameRateRange.maxFrameRate) {
207
- bestFormat = format;
208
- bestFrameRateRange = range;
209
- }
210
- }
211
- }
212
- }
213
- if (bestFormat) {
214
- [device setActiveFormat:bestFormat];
215
- }
216
- }
217
-
218
- [device unlockForConfiguration];
219
155
  }
220
156
 
221
157
  [session commitConfiguration];
@@ -300,15 +236,10 @@
300
236
  fromRect = CGRectMake(0, yOffset, imageExtent.size.width, newHeight);
301
237
  }
302
238
 
303
- // Render on main thread for OpenGL operations
304
- dispatch_async(dispatch_get_main_queue(), ^{
305
- if (self.context && _coreImageContext && _glkView) {
306
- [EAGLContext setCurrentContext:self.context];
307
- [_coreImageContext drawImage:image inRect:drawRect fromRect:fromRect];
308
- [self.context presentRenderbuffer:GL_RENDERBUFFER];
309
- [_glkView setNeedsDisplay];
310
- }
311
- });
239
+ [_coreImageContext drawImage:image inRect:drawRect fromRect:fromRect];
240
+ [self.context presentRenderbuffer:GL_RENDERBUFFER];
241
+
242
+ [_glkView setNeedsDisplay];
312
243
  }
313
244
  }
314
245
 
@@ -450,15 +381,6 @@
450
381
  {
451
382
  if (_isCapturing) return;
452
383
 
453
- // Check if photoOutput is available
454
- if (!self.photoOutput) {
455
- NSLog(@"Error: photoOutput is nil");
456
- if (completionHandler) {
457
- completionHandler(nil, nil, nil);
458
- }
459
- return;
460
- }
461
-
462
384
  __weak typeof(self) weakSelf = self;
463
385
 
464
386
  [weakSelf hideGLKView:YES completion:^
@@ -471,27 +393,70 @@
471
393
 
472
394
  _isCapturing = YES;
473
395
 
474
- // Store completion handler for delegate callback
475
- self.photoCaptureCompletionHandler = completionHandler;
476
-
477
- // Create photo settings with maximum quality - use JPEG for compatibility
478
- AVCapturePhotoSettings *photoSettings = [AVCapturePhotoSettings photoSettings];
479
-
480
- // Enable high resolution photo capture
481
- photoSettings.highResolutionPhotoEnabled = YES;
482
-
483
- // Set maximum quality prioritization (iOS 13+)
484
- if (@available(iOS 13.0, *)) {
485
- photoSettings.photoQualityPrioritization = AVCapturePhotoQualityPrioritizationQuality;
486
- }
487
-
488
- // Enable auto flash
489
- if (self.photoOutput.supportedFlashModes && [self.photoOutput.supportedFlashModes containsObject:@(AVCaptureFlashModeAuto)]) {
490
- photoSettings.flashMode = AVCaptureFlashModeAuto;
396
+ AVCaptureConnection *videoConnection = nil;
397
+ for (AVCaptureConnection *connection in self.stillImageOutput.connections)
398
+ {
399
+ for (AVCaptureInputPort *port in [connection inputPorts])
400
+ {
401
+ if ([[port mediaType] isEqual:AVMediaTypeVideo] )
402
+ {
403
+ videoConnection = connection;
404
+ break;
405
+ }
406
+ }
407
+ if (videoConnection) break;
491
408
  }
492
409
 
493
- // Capture photo - delegate will be called
494
- [self.photoOutput capturePhotoWithSettings:photoSettings delegate:self];
410
+ [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
411
+ {
412
+ NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
413
+
414
+ if (weakSelf.cameraViewType == IPDFCameraViewTypeBlackAndWhite || weakSelf.isBorderDetectionEnabled)
415
+ {
416
+ CIImage *enhancedImage = [CIImage imageWithData:imageData];
417
+
418
+ if (weakSelf.cameraViewType == IPDFCameraViewTypeBlackAndWhite)
419
+ {
420
+ enhancedImage = [self filteredImageUsingEnhanceFilterOnImage:enhancedImage];
421
+ }
422
+ else
423
+ {
424
+ enhancedImage = [self filteredImageUsingContrastFilterOnImage:enhancedImage];
425
+ }
426
+
427
+ if (weakSelf.isBorderDetectionEnabled && rectangleDetectionConfidenceHighEnough(_imageDedectionConfidence))
428
+ {
429
+ CIRectangleFeature *rectangleFeature = [self biggestRectangleInRectangles:[[self highAccuracyRectangleDetector] featuresInImage:enhancedImage]];
430
+
431
+ if (rectangleFeature)
432
+ {
433
+ enhancedImage = [self correctPerspectiveForImage:enhancedImage withFeatures:rectangleFeature];
434
+
435
+ UIGraphicsBeginImageContext(CGSizeMake(enhancedImage.extent.size.height, enhancedImage.extent.size.width));
436
+ [[UIImage imageWithCIImage:enhancedImage scale:1.0 orientation:UIImageOrientationRight] drawInRect:CGRectMake(0,0, enhancedImage.extent.size.height, enhancedImage.extent.size.width)];
437
+ UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
438
+ UIImage *initialImage = [UIImage imageWithData:imageData];
439
+ UIGraphicsEndImageContext();
440
+
441
+ [weakSelf hideGLKView:NO completion:nil];
442
+ completionHandler(image, initialImage, rectangleFeature);
443
+ }
444
+ } else {
445
+ [weakSelf hideGLKView:NO completion:nil];
446
+ UIImage *initialImage = [UIImage imageWithData:imageData];
447
+ completionHandler(initialImage, initialImage, nil);
448
+ }
449
+
450
+ }
451
+ else
452
+ {
453
+ [weakSelf hideGLKView:NO completion:nil];
454
+ UIImage *initialImage = [UIImage imageWithData:imageData];
455
+ completionHandler(initialImage, initialImage, nil);
456
+ }
457
+
458
+ _isCapturing = NO;
459
+ }];
495
460
  }
496
461
 
497
462
  - (void)hideGLKView:(BOOL)hidden completion:(void(^)())completion
@@ -609,103 +574,4 @@ BOOL rectangleDetectionConfidenceHighEnough(float confidence)
609
574
  return (confidence > 1.0);
610
575
  }
611
576
 
612
- #pragma mark - AVCapturePhotoCaptureDelegate
613
-
614
- - (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(NSError *)error {
615
- __weak typeof(self) weakSelf = self;
616
-
617
- if (error) {
618
- NSLog(@"Error capturing photo: %@", error);
619
- _isCapturing = NO;
620
- [weakSelf hideGLKView:NO completion:nil];
621
- if (self.photoCaptureCompletionHandler) {
622
- self.photoCaptureCompletionHandler(nil, nil, nil);
623
- self.photoCaptureCompletionHandler = nil;
624
- }
625
- return;
626
- }
627
-
628
- // Get high quality image data
629
- NSData *imageData = [photo fileDataRepresentation];
630
-
631
- if (!imageData) {
632
- NSLog(@"Failed to get image data from photo");
633
- _isCapturing = NO;
634
- [weakSelf hideGLKView:NO completion:nil];
635
- if (self.photoCaptureCompletionHandler) {
636
- self.photoCaptureCompletionHandler(nil, nil, nil);
637
- self.photoCaptureCompletionHandler = nil;
638
- }
639
- return;
640
- }
641
-
642
- // Process image
643
- if (weakSelf.cameraViewType == IPDFCameraViewTypeBlackAndWhite || weakSelf.isBorderDetectionEnabled)
644
- {
645
- CIImage *enhancedImage = [CIImage imageWithData:imageData];
646
-
647
- if (weakSelf.cameraViewType == IPDFCameraViewTypeBlackAndWhite)
648
- {
649
- enhancedImage = [self filteredImageUsingEnhanceFilterOnImage:enhancedImage];
650
- }
651
- else
652
- {
653
- enhancedImage = [self filteredImageUsingContrastFilterOnImage:enhancedImage];
654
- }
655
-
656
- if (weakSelf.isBorderDetectionEnabled && rectangleDetectionConfidenceHighEnough(_imageDedectionConfidence))
657
- {
658
- CIRectangleFeature *rectangleFeature = [self biggestRectangleInRectangles:[[self highAccuracyRectangleDetector] featuresInImage:enhancedImage]];
659
-
660
- if (rectangleFeature)
661
- {
662
- enhancedImage = [self correctPerspectiveForImage:enhancedImage withFeatures:rectangleFeature];
663
-
664
- // Convert CIImage to UIImage with high quality using CIContext
665
- CIContext *ciContext = [CIContext contextWithOptions:@{kCIContextUseSoftwareRenderer: @(NO)}];
666
-
667
- // Apply rotation to match device orientation
668
- CGAffineTransform transform = CGAffineTransformMakeRotation(-M_PI_2);
669
- enhancedImage = [enhancedImage imageByApplyingTransform:transform];
670
-
671
- // Convert to CGImage first for better quality
672
- CGImageRef cgImage = [ciContext createCGImage:enhancedImage fromRect:enhancedImage.extent];
673
- UIImage *image = [UIImage imageWithCGImage:cgImage scale:1.0 orientation:UIImageOrientationUp];
674
- CGImageRelease(cgImage);
675
-
676
- UIImage *initialImage = [UIImage imageWithData:imageData];
677
-
678
- [weakSelf hideGLKView:NO completion:nil];
679
- _isCapturing = NO;
680
-
681
- if (self.photoCaptureCompletionHandler) {
682
- self.photoCaptureCompletionHandler(image, initialImage, rectangleFeature);
683
- self.photoCaptureCompletionHandler = nil;
684
- }
685
- }
686
- } else {
687
- [weakSelf hideGLKView:NO completion:nil];
688
- _isCapturing = NO;
689
- UIImage *initialImage = [UIImage imageWithData:imageData];
690
-
691
- if (self.photoCaptureCompletionHandler) {
692
- self.photoCaptureCompletionHandler(initialImage, initialImage, nil);
693
- self.photoCaptureCompletionHandler = nil;
694
- }
695
- }
696
-
697
- }
698
- else
699
- {
700
- [weakSelf hideGLKView:NO completion:nil];
701
- _isCapturing = NO;
702
- UIImage *initialImage = [UIImage imageWithData:imageData];
703
-
704
- if (self.photoCaptureCompletionHandler) {
705
- self.photoCaptureCompletionHandler(initialImage, initialImage, nil);
706
- self.photoCaptureCompletionHandler = nil;
707
- }
708
- }
709
- }
710
-
711
577
  @end
@@ -0,0 +1,163 @@
1
+ // This file contains only the key methods we need to modify
2
+ // setupCameraView - use original simple version
3
+ // layoutSubviews - add for frame updates
4
+ // captureOutput - add aspect fill logic
5
+
6
+ - (void)layoutSubviews
7
+ {
8
+ [super layoutSubviews];
9
+
10
+ // Update GLKView frame to match parent bounds
11
+ if (_glkView) {
12
+ _glkView.frame = self.bounds;
13
+ }
14
+ }
15
+
16
+ - (void)createGLKView
17
+ {
18
+ if (self.context) return;
19
+
20
+ self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
21
+ GLKView *view = [[GLKView alloc] initWithFrame:self.bounds];
22
+ view.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
23
+ view.translatesAutoresizingMaskIntoConstraints = YES;
24
+ view.context = self.context;
25
+ view.contentScaleFactor = [UIScreen mainScreen].scale; // 화질 개선
26
+ view.drawableDepthFormat = GLKViewDrawableDepthFormat24;
27
+ [self insertSubview:view atIndex:0];
28
+ _glkView = view;
29
+ glGenRenderbuffers(1, &_renderBuffer);
30
+ glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer);
31
+ _coreImageContext = [CIContext contextWithEAGLContext:self.context];
32
+ [EAGLContext setCurrentContext:self.context];
33
+ }
34
+
35
+ - (void)setupCameraView
36
+ {
37
+ [self createGLKView];
38
+
39
+ AVCaptureDevice *device = nil;
40
+ NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
41
+ for (AVCaptureDevice *possibleDevice in devices) {
42
+ if (self.useFrontCam) {
43
+ if ([possibleDevice position] == AVCaptureDevicePositionFront) {
44
+ device = possibleDevice;
45
+ }
46
+ } else {
47
+ if ([possibleDevice position] != AVCaptureDevicePositionFront) {
48
+ device = possibleDevice;
49
+ }
50
+ }
51
+ }
52
+ if (!device) return;
53
+
54
+ _imageDedectionConfidence = 0.0;
55
+
56
+ AVCaptureSession *session = [[AVCaptureSession alloc] init];
57
+ self.captureSession = session;
58
+ [session beginConfiguration];
59
+ self.captureDevice = device;
60
+
61
+ NSError *error = nil;
62
+ AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
63
+ session.sessionPreset = AVCaptureSessionPresetPhoto;
64
+ [session addInput:input];
65
+
66
+ AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
67
+ [dataOutput setAlwaysDiscardsLateVideoFrames:YES];
68
+ [dataOutput setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)}];
69
+ [dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
70
+ [session addOutput:dataOutput];
71
+
72
+ self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
73
+ [session addOutput:self.stillImageOutput];
74
+
75
+ AVCaptureConnection *connection = [dataOutput.connections firstObject];
76
+ [connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
77
+
78
+ if (device.isFlashAvailable)
79
+ {
80
+ [device lockForConfiguration:nil];
81
+ [device setFlashMode:AVCaptureFlashModeOff];
82
+ [device unlockForConfiguration];
83
+
84
+ if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus])
85
+ {
86
+ [device lockForConfiguration:nil];
87
+ [device setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
88
+ [device unlockForConfiguration];
89
+ }
90
+ }
91
+
92
+ [session commitConfiguration];
93
+ }
94
+
95
+ -(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
96
+ {
97
+ if (self.forceStop) return;
98
+ if (_isStopped || _isCapturing || !CMSampleBufferIsValid(sampleBuffer)) return;
99
+
100
+ CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
101
+
102
+ CIImage *image = [CIImage imageWithCVPixelBuffer:pixelBuffer];
103
+
104
+ if (self.cameraViewType != IPDFCameraViewTypeNormal)
105
+ {
106
+ image = [self filteredImageUsingEnhanceFilterOnImage:image];
107
+ }
108
+ else
109
+ {
110
+ image = [self filteredImageUsingContrastFilterOnImage:image];
111
+ }
112
+
113
+ if (self.isBorderDetectionEnabled)
114
+ {
115
+ if (_borderDetectFrame)
116
+ {
117
+ _borderDetectLastRectangleFeature = [self biggestRectangleInRectangles:[[self highAccuracyRectangleDetector] featuresInImage:image]];
118
+ _borderDetectFrame = NO;
119
+ }
120
+
121
+ if (_borderDetectLastRectangleFeature)
122
+ {
123
+ _imageDedectionConfidence += .5;
124
+
125
+ image = [self drawHighlightOverlayForPoints:image topLeft:_borderDetectLastRectangleFeature.topLeft topRight:_borderDetectLastRectangleFeature.topRight bottomLeft:_borderDetectLastRectangleFeature.bottomLeft bottomRight:_borderDetectLastRectangleFeature.bottomRight];
126
+ }
127
+ else
128
+ {
129
+ _imageDedectionConfidence = 0.0f;
130
+ }
131
+ }
132
+
133
+ if (self.context && _coreImageContext)
134
+ {
135
+ // Calculate the rect to draw the image with aspect fill
136
+ CGRect drawRect = self.bounds;
137
+ CGRect imageExtent = image.extent;
138
+
139
+ // Calculate aspect ratios
140
+ CGFloat imageAspect = imageExtent.size.width / imageExtent.size.height;
141
+ CGFloat viewAspect = drawRect.size.width / drawRect.size.height;
142
+
143
+ CGRect fromRect = imageExtent;
144
+
145
+ // Aspect fill: crop the image to fill the view
146
+ if (imageAspect > viewAspect) {
147
+ // Image is wider, crop width
148
+ CGFloat newWidth = imageExtent.size.height * viewAspect;
149
+ CGFloat xOffset = (imageExtent.size.width - newWidth) / 2.0;
150
+ fromRect = CGRectMake(xOffset, 0, newWidth, imageExtent.size.height);
151
+ } else {
152
+ // Image is taller, crop height
153
+ CGFloat newHeight = imageExtent.size.width / viewAspect;
154
+ CGFloat yOffset = (imageExtent.size.height - newHeight) / 2.0;
155
+ fromRect = CGRectMake(0, yOffset, imageExtent.size.width, newHeight);
156
+ }
157
+
158
+ [_coreImageContext drawImage:image inRect:drawRect fromRect:fromRect];
159
+ [self.context presentRenderbuffer:GL_RENDERBUFFER];
160
+
161
+ [_glkView setNeedsDisplay];
162
+ }
163
+ }