react-native-rectangle-doc-scanner 3.66.0 → 3.68.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/DocScanner.js +47 -32
- package/package.json +1 -1
- package/src/DocScanner.tsx +52 -34
- package/vendor/react-native-document-scanner/ios/DocumentScannerView.h +2 -0
- package/vendor/react-native-document-scanner/ios/DocumentScannerView.m +107 -44
- package/vendor/react-native-document-scanner/ios/IPDFCameraViewController.m +140 -183
- package/vendor/react-native-document-scanner/ios/RNPdfScannerManager.m +28 -20
- package/vendor/react-native-document-scanner/ios.js +8 -6
package/dist/DocScanner.js
CHANGED
|
@@ -88,6 +88,18 @@ exports.DocScanner = (0, react_1.forwardRef)(({ onCapture, overlayColor = DEFAUL
|
|
|
88
88
|
return Math.min(100, Math.max(0, quality));
|
|
89
89
|
}, [quality]);
|
|
90
90
|
const handlePictureTaken = (0, react_1.useCallback)((event) => {
|
|
91
|
+
const captureError = event?.error;
|
|
92
|
+
if (captureError) {
|
|
93
|
+
console.error('[DocScanner] Native capture error received:', captureError);
|
|
94
|
+
captureOriginRef.current = 'auto';
|
|
95
|
+
setIsAutoCapturing(false);
|
|
96
|
+
setDetectedRectangle(null);
|
|
97
|
+
if (captureResolvers.current) {
|
|
98
|
+
captureResolvers.current.reject(new Error(String(captureError)));
|
|
99
|
+
captureResolvers.current = null;
|
|
100
|
+
}
|
|
101
|
+
return;
|
|
102
|
+
}
|
|
91
103
|
console.log('[DocScanner] handlePictureTaken called with event:', {
|
|
92
104
|
hasInitialImage: !!event.initialImage,
|
|
93
105
|
hasCroppedImage: !!event.croppedImage,
|
|
@@ -156,46 +168,49 @@ exports.DocScanner = (0, react_1.forwardRef)(({ onCapture, overlayColor = DEFAUL
|
|
|
156
168
|
captureOriginRef.current = 'auto';
|
|
157
169
|
return Promise.reject(new Error('capture_in_progress'));
|
|
158
170
|
}
|
|
159
|
-
console.log('[DocScanner] Calling native capture method...');
|
|
160
|
-
let result;
|
|
171
|
+
console.log('[DocScanner] Calling native capture method (now returns Promise)...');
|
|
161
172
|
try {
|
|
162
|
-
result = instance.capture();
|
|
173
|
+
const result = instance.capture();
|
|
163
174
|
console.log('[DocScanner] Native capture method called, result type:', typeof result, 'isPromise:', !!(result && typeof result.then === 'function'));
|
|
175
|
+
if (result && typeof result.then === 'function') {
|
|
176
|
+
console.log('[DocScanner] Native returned a promise, waiting for resolution...');
|
|
177
|
+
return result
|
|
178
|
+
.then((payload) => {
|
|
179
|
+
console.log('[DocScanner] Native promise resolved with payload:', {
|
|
180
|
+
hasCroppedImage: !!payload.croppedImage,
|
|
181
|
+
hasInitialImage: !!payload.initialImage,
|
|
182
|
+
});
|
|
183
|
+
handlePictureTaken(payload);
|
|
184
|
+
return payload;
|
|
185
|
+
})
|
|
186
|
+
.catch((error) => {
|
|
187
|
+
console.error('[DocScanner] Native promise rejected:', error);
|
|
188
|
+
captureOriginRef.current = 'auto';
|
|
189
|
+
throw error;
|
|
190
|
+
});
|
|
191
|
+
}
|
|
192
|
+
// Fallback for legacy event-based approach
|
|
193
|
+
console.warn('[DocScanner] Native did not return a promise, using callback-based approach (legacy)');
|
|
194
|
+
return new Promise((resolve, reject) => {
|
|
195
|
+
captureResolvers.current = {
|
|
196
|
+
resolve: (value) => {
|
|
197
|
+
console.log('[DocScanner] Callback resolver called with:', value);
|
|
198
|
+
captureOriginRef.current = 'auto';
|
|
199
|
+
resolve(value);
|
|
200
|
+
},
|
|
201
|
+
reject: (reason) => {
|
|
202
|
+
console.error('[DocScanner] Callback rejector called with:', reason);
|
|
203
|
+
captureOriginRef.current = 'auto';
|
|
204
|
+
reject(reason);
|
|
205
|
+
},
|
|
206
|
+
};
|
|
207
|
+
});
|
|
164
208
|
}
|
|
165
209
|
catch (error) {
|
|
166
210
|
console.error('[DocScanner] Native capture threw error:', error);
|
|
167
211
|
captureOriginRef.current = 'auto';
|
|
168
212
|
return Promise.reject(error);
|
|
169
213
|
}
|
|
170
|
-
if (result && typeof result.then === 'function') {
|
|
171
|
-
console.log('[DocScanner] Native returned a promise, waiting for resolution...');
|
|
172
|
-
return result
|
|
173
|
-
.then((payload) => {
|
|
174
|
-
console.log('[DocScanner] Native promise resolved with payload:', payload);
|
|
175
|
-
handlePictureTaken(payload);
|
|
176
|
-
return payload;
|
|
177
|
-
})
|
|
178
|
-
.catch((error) => {
|
|
179
|
-
console.error('[DocScanner] Native promise rejected:', error);
|
|
180
|
-
captureOriginRef.current = 'auto';
|
|
181
|
-
throw error;
|
|
182
|
-
});
|
|
183
|
-
}
|
|
184
|
-
console.log('[DocScanner] Native did not return a promise, using callback-based approach');
|
|
185
|
-
return new Promise((resolve, reject) => {
|
|
186
|
-
captureResolvers.current = {
|
|
187
|
-
resolve: (value) => {
|
|
188
|
-
console.log('[DocScanner] Callback resolver called with:', value);
|
|
189
|
-
captureOriginRef.current = 'auto';
|
|
190
|
-
resolve(value);
|
|
191
|
-
},
|
|
192
|
-
reject: (reason) => {
|
|
193
|
-
console.error('[DocScanner] Callback rejector called with:', reason);
|
|
194
|
-
captureOriginRef.current = 'auto';
|
|
195
|
-
reject(reason);
|
|
196
|
-
},
|
|
197
|
-
};
|
|
198
|
-
});
|
|
199
214
|
}, [handlePictureTaken]);
|
|
200
215
|
const handleManualCapture = (0, react_1.useCallback)(() => {
|
|
201
216
|
captureOriginRef.current = 'manual';
|
package/package.json
CHANGED
package/src/DocScanner.tsx
CHANGED
|
@@ -150,6 +150,21 @@ export const DocScanner = forwardRef<DocScannerHandle, Props>(
|
|
|
150
150
|
|
|
151
151
|
const handlePictureTaken = useCallback(
|
|
152
152
|
(event: PictureEvent) => {
|
|
153
|
+
const captureError = (event as any)?.error;
|
|
154
|
+
if (captureError) {
|
|
155
|
+
console.error('[DocScanner] Native capture error received:', captureError);
|
|
156
|
+
captureOriginRef.current = 'auto';
|
|
157
|
+
setIsAutoCapturing(false);
|
|
158
|
+
setDetectedRectangle(null);
|
|
159
|
+
|
|
160
|
+
if (captureResolvers.current) {
|
|
161
|
+
captureResolvers.current.reject(new Error(String(captureError)));
|
|
162
|
+
captureResolvers.current = null;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
return;
|
|
166
|
+
}
|
|
167
|
+
|
|
153
168
|
console.log('[DocScanner] handlePictureTaken called with event:', {
|
|
154
169
|
hasInitialImage: !!event.initialImage,
|
|
155
170
|
hasCroppedImage: !!event.croppedImage,
|
|
@@ -232,47 +247,50 @@ export const DocScanner = forwardRef<DocScannerHandle, Props>(
|
|
|
232
247
|
return Promise.reject(new Error('capture_in_progress'));
|
|
233
248
|
}
|
|
234
249
|
|
|
235
|
-
console.log('[DocScanner] Calling native capture method...');
|
|
236
|
-
let result: any;
|
|
250
|
+
console.log('[DocScanner] Calling native capture method (now returns Promise)...');
|
|
237
251
|
try {
|
|
238
|
-
result = instance.capture();
|
|
252
|
+
const result = instance.capture();
|
|
239
253
|
console.log('[DocScanner] Native capture method called, result type:', typeof result, 'isPromise:', !!(result && typeof result.then === 'function'));
|
|
254
|
+
|
|
255
|
+
if (result && typeof result.then === 'function') {
|
|
256
|
+
console.log('[DocScanner] Native returned a promise, waiting for resolution...');
|
|
257
|
+
return result
|
|
258
|
+
.then((payload: PictureEvent) => {
|
|
259
|
+
console.log('[DocScanner] Native promise resolved with payload:', {
|
|
260
|
+
hasCroppedImage: !!payload.croppedImage,
|
|
261
|
+
hasInitialImage: !!payload.initialImage,
|
|
262
|
+
});
|
|
263
|
+
handlePictureTaken(payload);
|
|
264
|
+
return payload;
|
|
265
|
+
})
|
|
266
|
+
.catch((error: unknown) => {
|
|
267
|
+
console.error('[DocScanner] Native promise rejected:', error);
|
|
268
|
+
captureOriginRef.current = 'auto';
|
|
269
|
+
throw error;
|
|
270
|
+
});
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
// Fallback for legacy event-based approach
|
|
274
|
+
console.warn('[DocScanner] Native did not return a promise, using callback-based approach (legacy)');
|
|
275
|
+
return new Promise<PictureEvent>((resolve, reject) => {
|
|
276
|
+
captureResolvers.current = {
|
|
277
|
+
resolve: (value) => {
|
|
278
|
+
console.log('[DocScanner] Callback resolver called with:', value);
|
|
279
|
+
captureOriginRef.current = 'auto';
|
|
280
|
+
resolve(value);
|
|
281
|
+
},
|
|
282
|
+
reject: (reason) => {
|
|
283
|
+
console.error('[DocScanner] Callback rejector called with:', reason);
|
|
284
|
+
captureOriginRef.current = 'auto';
|
|
285
|
+
reject(reason);
|
|
286
|
+
},
|
|
287
|
+
};
|
|
288
|
+
});
|
|
240
289
|
} catch (error) {
|
|
241
290
|
console.error('[DocScanner] Native capture threw error:', error);
|
|
242
291
|
captureOriginRef.current = 'auto';
|
|
243
292
|
return Promise.reject(error);
|
|
244
293
|
}
|
|
245
|
-
|
|
246
|
-
if (result && typeof result.then === 'function') {
|
|
247
|
-
console.log('[DocScanner] Native returned a promise, waiting for resolution...');
|
|
248
|
-
return result
|
|
249
|
-
.then((payload: PictureEvent) => {
|
|
250
|
-
console.log('[DocScanner] Native promise resolved with payload:', payload);
|
|
251
|
-
handlePictureTaken(payload);
|
|
252
|
-
return payload;
|
|
253
|
-
})
|
|
254
|
-
.catch((error: unknown) => {
|
|
255
|
-
console.error('[DocScanner] Native promise rejected:', error);
|
|
256
|
-
captureOriginRef.current = 'auto';
|
|
257
|
-
throw error;
|
|
258
|
-
});
|
|
259
|
-
}
|
|
260
|
-
|
|
261
|
-
console.log('[DocScanner] Native did not return a promise, using callback-based approach');
|
|
262
|
-
return new Promise<PictureEvent>((resolve, reject) => {
|
|
263
|
-
captureResolvers.current = {
|
|
264
|
-
resolve: (value) => {
|
|
265
|
-
console.log('[DocScanner] Callback resolver called with:', value);
|
|
266
|
-
captureOriginRef.current = 'auto';
|
|
267
|
-
resolve(value);
|
|
268
|
-
},
|
|
269
|
-
reject: (reason) => {
|
|
270
|
-
console.error('[DocScanner] Callback rejector called with:', reason);
|
|
271
|
-
captureOriginRef.current = 'auto';
|
|
272
|
-
reject(reason);
|
|
273
|
-
},
|
|
274
|
-
};
|
|
275
|
-
});
|
|
276
294
|
}, [handlePictureTaken]);
|
|
277
295
|
|
|
278
296
|
const handleManualCapture = useCallback(() => {
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
#import "IPDFCameraViewController.h"
|
|
2
2
|
#import <React/RCTViewManager.h>
|
|
3
|
+
#import <React/RCTBridgeModule.h>
|
|
3
4
|
|
|
4
5
|
@interface DocumentScannerView : IPDFCameraViewController <IPDFCameraViewControllerDelegate>
|
|
5
6
|
|
|
@@ -14,5 +15,6 @@
|
|
|
14
15
|
@property (nonatomic, assign) BOOL manualOnly;
|
|
15
16
|
|
|
16
17
|
- (void) capture;
|
|
18
|
+
- (void) captureWithResolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject;
|
|
17
19
|
|
|
18
20
|
@end
|
|
@@ -124,55 +124,118 @@
|
|
|
124
124
|
self.onRectangleDetect(payload);
|
|
125
125
|
}
|
|
126
126
|
|
|
127
|
+
// Helper method to process captured images and prepare response data
|
|
128
|
+
- (NSDictionary *)processAndPrepareImageData:(UIImage *)croppedImage
|
|
129
|
+
initialImage:(UIImage *)initialImage
|
|
130
|
+
rectangleFeature:(CIRectangleFeature *)rectangleFeature {
|
|
131
|
+
CGFloat imageQuality = MAX(self.quality, 0.95);
|
|
132
|
+
NSData *croppedImageData = UIImageJPEGRepresentation(croppedImage, imageQuality);
|
|
133
|
+
|
|
134
|
+
if (initialImage.imageOrientation != UIImageOrientationUp) {
|
|
135
|
+
UIGraphicsBeginImageContextWithOptions(initialImage.size, false, initialImage.scale);
|
|
136
|
+
[initialImage drawInRect:CGRectMake(0, 0, initialImage.size.width, initialImage.size.height)];
|
|
137
|
+
initialImage = UIGraphicsGetImageFromCurrentImageContext();
|
|
138
|
+
UIGraphicsEndImageContext();
|
|
139
|
+
}
|
|
140
|
+
NSData *initialImageData = UIImageJPEGRepresentation(initialImage, imageQuality);
|
|
141
|
+
|
|
142
|
+
NSDictionary *rectangleCoordinatesDict = [self dictionaryForRectangleFeature:rectangleFeature];
|
|
143
|
+
id rectangleCoordinates = rectangleCoordinatesDict ? rectangleCoordinatesDict : [NSNull null];
|
|
144
|
+
|
|
145
|
+
if (self.useBase64) {
|
|
146
|
+
return @{
|
|
147
|
+
@"croppedImage": [croppedImageData base64EncodedStringWithOptions:NSDataBase64Encoding64CharacterLineLength],
|
|
148
|
+
@"initialImage": [initialImageData base64EncodedStringWithOptions:NSDataBase64Encoding64CharacterLineLength],
|
|
149
|
+
@"rectangleCoordinates": rectangleCoordinates
|
|
150
|
+
};
|
|
151
|
+
} else {
|
|
152
|
+
NSString *dir = NSTemporaryDirectory();
|
|
153
|
+
if (self.saveInAppDocument) {
|
|
154
|
+
dir = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) firstObject];
|
|
155
|
+
}
|
|
156
|
+
NSString *croppedFilePath = [dir stringByAppendingPathComponent:[NSString stringWithFormat:@"cropped_img_%i.jpeg",(int)[NSDate date].timeIntervalSince1970]];
|
|
157
|
+
NSString *initialFilePath = [dir stringByAppendingPathComponent:[NSString stringWithFormat:@"initial_img_%i.jpeg",(int)[NSDate date].timeIntervalSince1970]];
|
|
158
|
+
|
|
159
|
+
[croppedImageData writeToFile:croppedFilePath atomically:YES];
|
|
160
|
+
[initialImageData writeToFile:initialFilePath atomically:YES];
|
|
161
|
+
|
|
162
|
+
return @{
|
|
163
|
+
@"croppedImage": croppedFilePath,
|
|
164
|
+
@"initialImage": initialFilePath,
|
|
165
|
+
@"rectangleCoordinates": rectangleCoordinates
|
|
166
|
+
};
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
// Promise-based capture method - NEW
|
|
171
|
+
- (void)captureWithResolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject {
|
|
172
|
+
NSLog(@"[DocumentScanner] captureWithResolver called");
|
|
173
|
+
|
|
174
|
+
[self captureImageWithCompletionHander:^(UIImage *croppedImage, UIImage *initialImage, CIRectangleFeature *rectangleFeature) {
|
|
175
|
+
NSLog(@"[DocumentScanner] captureImageWithCompletionHander callback - croppedImage: %@, initialImage: %@", croppedImage ? @"YES" : @"NO", initialImage ? @"YES" : @"NO");
|
|
176
|
+
|
|
177
|
+
if (!croppedImage && initialImage) {
|
|
178
|
+
croppedImage = initialImage;
|
|
179
|
+
} else if (!initialImage && croppedImage) {
|
|
180
|
+
initialImage = croppedImage;
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
if (!croppedImage || !initialImage) {
|
|
184
|
+
NSLog(@"[DocumentScanner] capture failed - missing image data");
|
|
185
|
+
reject(@"CAPTURE_FAILED", @"Failed to capture image", nil);
|
|
186
|
+
|
|
187
|
+
if (!self.captureMultiple) {
|
|
188
|
+
[self stop];
|
|
189
|
+
}
|
|
190
|
+
return;
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
NSLog(@"[DocumentScanner] Processing captured images");
|
|
194
|
+
NSDictionary *result = [self processAndPrepareImageData:croppedImage
|
|
195
|
+
initialImage:initialImage
|
|
196
|
+
rectangleFeature:rectangleFeature];
|
|
197
|
+
|
|
198
|
+
NSLog(@"[DocumentScanner] Resolving promise with result");
|
|
199
|
+
resolve(result);
|
|
200
|
+
|
|
201
|
+
if (!self.captureMultiple) {
|
|
202
|
+
[self stop];
|
|
203
|
+
}
|
|
204
|
+
}];
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
// Event-based capture method - LEGACY (for backwards compatibility)
|
|
127
208
|
- (void) capture {
|
|
128
209
|
NSLog(@"[DocumentScanner] capture called");
|
|
129
210
|
[self captureImageWithCompletionHander:^(UIImage *croppedImage, UIImage *initialImage, CIRectangleFeature *rectangleFeature) {
|
|
130
|
-
|
|
211
|
+
NSLog(@"[DocumentScanner] captureImageWithCompletionHander callback - croppedImage: %@, initialImage: %@", croppedImage ? @"YES" : @"NO", initialImage ? @"YES" : @"NO");
|
|
212
|
+
|
|
213
|
+
if (!croppedImage && initialImage) {
|
|
214
|
+
// Use initial image when cropping is not available
|
|
215
|
+
croppedImage = initialImage;
|
|
216
|
+
} else if (!initialImage && croppedImage) {
|
|
217
|
+
// Mirror cropped image so downstream logic continues to work
|
|
218
|
+
initialImage = croppedImage;
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
if (!croppedImage || !initialImage) {
|
|
222
|
+
NSLog(@"[DocumentScanner] capture failed - missing image data");
|
|
223
|
+
if (self.onPictureTaken) {
|
|
224
|
+
self.onPictureTaken(@{ @"error": @"capture_failed" });
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
if (!self.captureMultiple) {
|
|
228
|
+
[self stop];
|
|
229
|
+
}
|
|
230
|
+
return;
|
|
231
|
+
}
|
|
232
|
+
|
|
131
233
|
if (self.onPictureTaken) {
|
|
132
234
|
NSLog(@"[DocumentScanner] Calling onPictureTaken");
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
if (initialImage.imageOrientation != UIImageOrientationUp) {
|
|
139
|
-
UIGraphicsBeginImageContextWithOptions(initialImage.size, false, initialImage.scale);
|
|
140
|
-
[initialImage drawInRect:CGRectMake(0, 0, initialImage.size.width
|
|
141
|
-
, initialImage.size.height)];
|
|
142
|
-
initialImage = UIGraphicsGetImageFromCurrentImageContext();
|
|
143
|
-
UIGraphicsEndImageContext();
|
|
144
|
-
}
|
|
145
|
-
NSData *initialImageData = UIImageJPEGRepresentation(initialImage, imageQuality);
|
|
146
|
-
|
|
147
|
-
/*
|
|
148
|
-
RectangleCoordinates expects a rectanle viewed from portrait,
|
|
149
|
-
while rectangleFeature returns a rectangle viewed from landscape, which explains the nonsense of the mapping below.
|
|
150
|
-
Sorry about that.
|
|
151
|
-
*/
|
|
152
|
-
NSDictionary *rectangleCoordinatesDict = [self dictionaryForRectangleFeature:rectangleFeature];
|
|
153
|
-
id rectangleCoordinates = rectangleCoordinatesDict ? rectangleCoordinatesDict : [NSNull null];
|
|
154
|
-
if (self.useBase64) {
|
|
155
|
-
self.onPictureTaken(@{
|
|
156
|
-
@"croppedImage": [croppedImageData base64EncodedStringWithOptions:NSDataBase64Encoding64CharacterLineLength],
|
|
157
|
-
@"initialImage": [initialImageData base64EncodedStringWithOptions:NSDataBase64Encoding64CharacterLineLength],
|
|
158
|
-
@"rectangleCoordinates": rectangleCoordinates });
|
|
159
|
-
}
|
|
160
|
-
else {
|
|
161
|
-
NSString *dir = NSTemporaryDirectory();
|
|
162
|
-
if (self.saveInAppDocument) {
|
|
163
|
-
dir = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) firstObject];
|
|
164
|
-
}
|
|
165
|
-
NSString *croppedFilePath = [dir stringByAppendingPathComponent:[NSString stringWithFormat:@"cropped_img_%i.jpeg",(int)[NSDate date].timeIntervalSince1970]];
|
|
166
|
-
NSString *initialFilePath = [dir stringByAppendingPathComponent:[NSString stringWithFormat:@"initial_img_%i.jpeg",(int)[NSDate date].timeIntervalSince1970]];
|
|
167
|
-
|
|
168
|
-
[croppedImageData writeToFile:croppedFilePath atomically:YES];
|
|
169
|
-
[initialImageData writeToFile:initialFilePath atomically:YES];
|
|
170
|
-
|
|
171
|
-
self.onPictureTaken(@{
|
|
172
|
-
@"croppedImage": croppedFilePath,
|
|
173
|
-
@"initialImage": initialFilePath,
|
|
174
|
-
@"rectangleCoordinates": rectangleCoordinates });
|
|
175
|
-
}
|
|
235
|
+
NSDictionary *result = [self processAndPrepareImageData:croppedImage
|
|
236
|
+
initialImage:initialImage
|
|
237
|
+
rectangleFeature:rectangleFeature];
|
|
238
|
+
self.onPictureTaken(result);
|
|
176
239
|
}
|
|
177
240
|
|
|
178
241
|
if (!self.captureMultiple) {
|
|
@@ -15,13 +15,25 @@
|
|
|
15
15
|
#import <ImageIO/ImageIO.h>
|
|
16
16
|
#import <GLKit/GLKit.h>
|
|
17
17
|
|
|
18
|
+
static inline void dispatch_async_main_queue(dispatch_block_t block)
|
|
19
|
+
{
|
|
20
|
+
if (!block) {
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
if ([NSThread isMainThread]) {
|
|
25
|
+
block();
|
|
26
|
+
} else {
|
|
27
|
+
dispatch_async(dispatch_get_main_queue(), block);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
|
|
18
31
|
@interface IPDFCameraViewController () <AVCaptureVideoDataOutputSampleBufferDelegate, AVCapturePhotoCaptureDelegate>
|
|
19
32
|
|
|
20
33
|
@property (nonatomic,strong) AVCaptureSession *captureSession;
|
|
21
34
|
@property (nonatomic,strong) AVCaptureDevice *captureDevice;
|
|
22
35
|
@property (nonatomic,strong) EAGLContext *context;
|
|
23
36
|
|
|
24
|
-
@property (nonatomic, strong) AVCapturePhotoOutput* photoOutput;
|
|
25
37
|
@property (nonatomic, strong) AVCaptureStillImageOutput* stillImageOutput; // Kept for backward compatibility
|
|
26
38
|
|
|
27
39
|
@property (nonatomic, assign) BOOL forceStop;
|
|
@@ -47,6 +59,39 @@
|
|
|
47
59
|
BOOL _isCapturing;
|
|
48
60
|
}
|
|
49
61
|
|
|
62
|
+
- (void)completeCaptureWithCroppedImage:(UIImage *)croppedImage
|
|
63
|
+
initialImage:(UIImage *)initialImage
|
|
64
|
+
rectangle:(CIRectangleFeature *)rectangleFeature
|
|
65
|
+
error:(NSError *)error
|
|
66
|
+
{
|
|
67
|
+
void (^completionHandler)(UIImage *, UIImage *, CIRectangleFeature *) = self.captureCompletionHandler;
|
|
68
|
+
|
|
69
|
+
dispatch_async_main_queue(^{
|
|
70
|
+
if (error) {
|
|
71
|
+
NSLog(@"[IPDFCameraViewController] Completing capture with error: %@", error.localizedDescription);
|
|
72
|
+
if (completionHandler) {
|
|
73
|
+
completionHandler(nil, nil, nil);
|
|
74
|
+
}
|
|
75
|
+
} else {
|
|
76
|
+
UIImage *resolvedInitial = initialImage ?: croppedImage;
|
|
77
|
+
UIImage *resolvedCropped = croppedImage ?: resolvedInitial;
|
|
78
|
+
|
|
79
|
+
if (!resolvedInitial || !resolvedCropped) {
|
|
80
|
+
NSLog(@"[IPDFCameraViewController] Missing images during completion, sending failure to JS");
|
|
81
|
+
if (completionHandler) {
|
|
82
|
+
completionHandler(nil, nil, nil);
|
|
83
|
+
}
|
|
84
|
+
} else if (completionHandler) {
|
|
85
|
+
completionHandler(resolvedCropped, resolvedInitial, rectangleFeature);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
self.captureCompletionHandler = nil;
|
|
90
|
+
self->_isCapturing = NO;
|
|
91
|
+
[self hideGLKView:NO completion:nil];
|
|
92
|
+
});
|
|
93
|
+
}
|
|
94
|
+
|
|
50
95
|
- (void)awakeFromNib
|
|
51
96
|
{
|
|
52
97
|
[super awakeFromNib];
|
|
@@ -146,29 +191,14 @@
|
|
|
146
191
|
[dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
|
|
147
192
|
[session addOutput:dataOutput];
|
|
148
193
|
|
|
149
|
-
// Use
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
if ([session canAddOutput:self.photoOutput]) {
|
|
153
|
-
[session addOutput:self.photoOutput];
|
|
154
|
-
NSLog(@"[IPDFCamera] Using AVCapturePhotoOutput (modern API)");
|
|
155
|
-
} else {
|
|
156
|
-
NSLog(@"[IPDFCamera] WARNING: Cannot add AVCapturePhotoOutput, falling back to AVCaptureStillImageOutput");
|
|
157
|
-
self.photoOutput = nil;
|
|
158
|
-
// Fallback to legacy API
|
|
159
|
-
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
|
|
160
|
-
if ([session canAddOutput:self.stillImageOutput]) {
|
|
161
|
-
[session addOutput:self.stillImageOutput];
|
|
162
|
-
NSLog(@"[IPDFCamera] Fallback successful: Using AVCaptureStillImageOutput");
|
|
163
|
-
} else {
|
|
164
|
-
NSLog(@"[IPDFCamera] CRITICAL ERROR: Cannot add any capture output!");
|
|
165
|
-
}
|
|
166
|
-
}
|
|
167
|
-
} else {
|
|
168
|
-
// Fallback for older iOS versions (< iOS 10)
|
|
169
|
-
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
|
|
194
|
+
// Use legacy AVCaptureStillImageOutput for reliable manual captures on all supported iOS versions
|
|
195
|
+
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
|
|
196
|
+
if ([session canAddOutput:self.stillImageOutput]) {
|
|
170
197
|
[session addOutput:self.stillImageOutput];
|
|
171
|
-
NSLog(@"[IPDFCamera] Using AVCaptureStillImageOutput (
|
|
198
|
+
NSLog(@"[IPDFCamera] Using AVCaptureStillImageOutput (manual capture)");
|
|
199
|
+
} else {
|
|
200
|
+
NSLog(@"[IPDFCamera] CRITICAL ERROR: Cannot add AVCaptureStillImageOutput to session");
|
|
201
|
+
self.stillImageOutput = nil;
|
|
172
202
|
}
|
|
173
203
|
|
|
174
204
|
AVCaptureConnection *connection = [dataOutput.connections firstObject];
|
|
@@ -452,7 +482,10 @@
|
|
|
452
482
|
|
|
453
483
|
if (!self.captureSession || !self.captureSession.isRunning) {
|
|
454
484
|
NSLog(@"[IPDFCameraViewController] ERROR: captureSession is not running");
|
|
455
|
-
|
|
485
|
+
NSError *error = [NSError errorWithDomain:@"IPDFCameraViewController"
|
|
486
|
+
code:-200
|
|
487
|
+
userInfo:@{ NSLocalizedDescriptionKey: @"capture_session_not_running" }];
|
|
488
|
+
[self completeCaptureWithCroppedImage:nil initialImage:nil rectangle:nil error:error];
|
|
456
489
|
return;
|
|
457
490
|
}
|
|
458
491
|
|
|
@@ -471,137 +504,64 @@
|
|
|
471
504
|
// Store completion handler for delegate callback
|
|
472
505
|
self.captureCompletionHandler = completionHandler;
|
|
473
506
|
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
}
|
|
482
|
-
|
|
483
|
-
NSLog(@"[IPDFCameraViewController] photoOutput is nil, trying fallback to stillImageOutput");
|
|
484
|
-
// Fallback to legacy API if photoOutput is not available
|
|
507
|
+
if (!self.stillImageOutput) {
|
|
508
|
+
NSLog(@"[IPDFCameraViewController] ERROR: stillImageOutput is nil");
|
|
509
|
+
NSError *error = [NSError errorWithDomain:@"IPDFCameraViewController"
|
|
510
|
+
code:-201
|
|
511
|
+
userInfo:@{ NSLocalizedDescriptionKey: @"missing_still_image_output" }];
|
|
512
|
+
[self completeCaptureWithCroppedImage:nil initialImage:nil rectangle:nil error:error];
|
|
513
|
+
return;
|
|
485
514
|
}
|
|
486
515
|
|
|
487
|
-
|
|
516
|
+
AVCaptureConnection *videoConnection = nil;
|
|
517
|
+
for (AVCaptureConnection *connection in self.stillImageOutput.connections)
|
|
488
518
|
{
|
|
489
|
-
|
|
490
|
-
NSLog(@"[IPDFCameraViewController] ERROR: stillImageOutput is nil");
|
|
491
|
-
_isCapturing = NO;
|
|
492
|
-
self.captureCompletionHandler = nil;
|
|
493
|
-
[weakSelf hideGLKView:NO completion:nil];
|
|
494
|
-
return;
|
|
495
|
-
}
|
|
496
|
-
|
|
497
|
-
AVCaptureConnection *videoConnection = nil;
|
|
498
|
-
for (AVCaptureConnection *connection in self.stillImageOutput.connections)
|
|
519
|
+
for (AVCaptureInputPort *port in [connection inputPorts])
|
|
499
520
|
{
|
|
500
|
-
|
|
521
|
+
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
|
|
501
522
|
{
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
videoConnection = connection;
|
|
505
|
-
break;
|
|
506
|
-
}
|
|
523
|
+
videoConnection = connection;
|
|
524
|
+
break;
|
|
507
525
|
}
|
|
508
|
-
if (videoConnection) break;
|
|
509
|
-
}
|
|
510
|
-
|
|
511
|
-
if (!videoConnection) {
|
|
512
|
-
NSLog(@"[IPDFCameraViewController] ERROR: No video connection found");
|
|
513
|
-
_isCapturing = NO;
|
|
514
|
-
self.captureCompletionHandler = nil;
|
|
515
|
-
[weakSelf hideGLKView:NO completion:nil];
|
|
516
|
-
return;
|
|
517
526
|
}
|
|
518
|
-
|
|
519
|
-
NSLog(@"[IPDFCameraViewController] Using AVCaptureStillImageOutput (legacy)");
|
|
520
|
-
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
|
|
521
|
-
{
|
|
522
|
-
[weakSelf handleCapturedImageData:imageSampleBuffer error:error];
|
|
523
|
-
}];
|
|
527
|
+
if (videoConnection) break;
|
|
524
528
|
}
|
|
525
|
-
}
|
|
526
|
-
|
|
527
|
-
// AVCapturePhotoCaptureDelegate method for iOS 11+
|
|
528
|
-
- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(NSError *)error API_AVAILABLE(ios(11.0)) {
|
|
529
|
-
NSLog(@"[IPDFCameraViewController] didFinishProcessingPhoto called, error=%@", error);
|
|
530
529
|
|
|
531
|
-
if (
|
|
532
|
-
NSLog(@"[IPDFCameraViewController] ERROR
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
530
|
+
if (!videoConnection) {
|
|
531
|
+
NSLog(@"[IPDFCameraViewController] ERROR: No video connection found");
|
|
532
|
+
NSError *error = [NSError errorWithDomain:@"IPDFCameraViewController"
|
|
533
|
+
code:-202
|
|
534
|
+
userInfo:@{ NSLocalizedDescriptionKey: @"no_video_connection" }];
|
|
535
|
+
[self completeCaptureWithCroppedImage:nil initialImage:nil rectangle:nil error:error];
|
|
536
536
|
return;
|
|
537
537
|
}
|
|
538
538
|
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
if (!imageData) {
|
|
542
|
-
NSLog(@"[IPDFCameraViewController] ERROR: Failed to get image data from photo");
|
|
543
|
-
_isCapturing = NO;
|
|
544
|
-
self.captureCompletionHandler = nil;
|
|
545
|
-
[self hideGLKView:NO completion:nil];
|
|
546
|
-
return;
|
|
539
|
+
if (videoConnection.isVideoOrientationSupported) {
|
|
540
|
+
videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
|
|
547
541
|
}
|
|
548
542
|
|
|
549
|
-
NSLog(@"[IPDFCameraViewController]
|
|
550
|
-
[self
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
// AVCapturePhotoCaptureDelegate method for iOS 10
|
|
554
|
-
- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings error:(NSError *)error API_DEPRECATED("Use -captureOutput:didFinishProcessingPhoto:error: instead.", ios(10.0, 11.0)) {
|
|
555
|
-
NSLog(@"[IPDFCameraViewController] didFinishProcessingPhotoSampleBuffer called (iOS 10)");
|
|
556
|
-
|
|
557
|
-
if (error) {
|
|
558
|
-
NSLog(@"[IPDFCameraViewController] ERROR in didFinishProcessingPhotoSampleBuffer: %@", error);
|
|
559
|
-
_isCapturing = NO;
|
|
560
|
-
self.captureCompletionHandler = nil;
|
|
561
|
-
[self hideGLKView:NO completion:nil];
|
|
562
|
-
return;
|
|
563
|
-
}
|
|
564
|
-
|
|
565
|
-
if (!photoSampleBuffer) {
|
|
566
|
-
NSLog(@"[IPDFCameraViewController] ERROR: photoSampleBuffer is nil");
|
|
567
|
-
_isCapturing = NO;
|
|
568
|
-
self.captureCompletionHandler = nil;
|
|
569
|
-
[self hideGLKView:NO completion:nil];
|
|
570
|
-
return;
|
|
571
|
-
}
|
|
572
|
-
|
|
573
|
-
// iOS 10: Use AVCapturePhotoOutput's method for converting sample buffer
|
|
574
|
-
NSData *imageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer previewPhotoSampleBuffer:previewPhotoSampleBuffer];
|
|
575
|
-
|
|
576
|
-
if (!imageData) {
|
|
577
|
-
NSLog(@"[IPDFCameraViewController] ERROR: Failed to create JPEG data from photo sample buffer");
|
|
578
|
-
_isCapturing = NO;
|
|
579
|
-
self.captureCompletionHandler = nil;
|
|
580
|
-
[self hideGLKView:NO completion:nil];
|
|
581
|
-
return;
|
|
582
|
-
}
|
|
583
|
-
|
|
584
|
-
NSLog(@"[IPDFCameraViewController] Got image data from photo sample buffer (iOS 10), size: %lu bytes", (unsigned long)imageData.length);
|
|
585
|
-
[self processImageData:imageData];
|
|
543
|
+
NSLog(@"[IPDFCameraViewController] Capturing image via AVCaptureStillImageOutput");
|
|
544
|
+
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
|
|
545
|
+
[weakSelf handleCapturedImageData:imageSampleBuffer error:error];
|
|
546
|
+
}];
|
|
586
547
|
}
|
|
587
548
|
|
|
588
|
-
// Helper method for legacy AVCaptureStillImageOutput
|
|
549
|
+
// Helper method for legacy AVCaptureStillImageOutput
|
|
589
550
|
- (void)handleCapturedImageData:(CMSampleBufferRef)sampleBuffer error:(NSError *)error {
|
|
590
551
|
NSLog(@"[IPDFCameraViewController] handleCapturedImageData called (legacy), error=%@, buffer=%@", error, sampleBuffer ? @"YES" : @"NO");
|
|
591
552
|
|
|
592
553
|
if (error) {
|
|
593
554
|
NSLog(@"[IPDFCameraViewController] ERROR capturing image: %@", error);
|
|
594
|
-
|
|
595
|
-
self.captureCompletionHandler = nil;
|
|
596
|
-
[self hideGLKView:NO completion:nil];
|
|
555
|
+
[self completeCaptureWithCroppedImage:nil initialImage:nil rectangle:nil error:error];
|
|
597
556
|
return;
|
|
598
557
|
}
|
|
599
558
|
|
|
600
559
|
if (!sampleBuffer) {
|
|
601
560
|
NSLog(@"[IPDFCameraViewController] ERROR: sampleBuffer is nil");
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
561
|
+
NSError *bufferError = [NSError errorWithDomain:@"IPDFCameraViewController"
|
|
562
|
+
code:-206
|
|
563
|
+
userInfo:@{ NSLocalizedDescriptionKey: @"sample_buffer_nil" }];
|
|
564
|
+
[self completeCaptureWithCroppedImage:nil initialImage:nil rectangle:nil error:bufferError];
|
|
605
565
|
return;
|
|
606
566
|
}
|
|
607
567
|
|
|
@@ -610,9 +570,10 @@
|
|
|
610
570
|
|
|
611
571
|
if (!imageData) {
|
|
612
572
|
NSLog(@"[IPDFCameraViewController] ERROR: Failed to create image data from sample buffer (legacy)");
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
573
|
+
NSError *dataError = [NSError errorWithDomain:@"IPDFCameraViewController"
|
|
574
|
+
code:-207
|
|
575
|
+
userInfo:@{ NSLocalizedDescriptionKey: @"legacy_sample_conversion_failed" }];
|
|
576
|
+
[self completeCaptureWithCroppedImage:nil initialImage:nil rectangle:nil error:dataError];
|
|
616
577
|
return;
|
|
617
578
|
}
|
|
618
579
|
|
|
@@ -623,67 +584,63 @@
|
|
|
623
584
|
- (void)processImageData:(NSData *)imageData {
|
|
624
585
|
NSLog(@"[IPDFCameraViewController] processImageData called, imageData size: %lu bytes", (unsigned long)imageData.length);
|
|
625
586
|
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
_isCapturing = NO;
|
|
632
|
-
[self hideGLKView:NO completion:nil];
|
|
587
|
+
if (!imageData || imageData.length == 0) {
|
|
588
|
+
NSError *dataError = [NSError errorWithDomain:@"IPDFCameraViewController"
|
|
589
|
+
code:-208
|
|
590
|
+
userInfo:@{ NSLocalizedDescriptionKey: @"empty_image_data" }];
|
|
591
|
+
[self completeCaptureWithCroppedImage:nil initialImage:nil rectangle:nil error:dataError];
|
|
633
592
|
return;
|
|
634
593
|
}
|
|
635
594
|
|
|
636
|
-
|
|
637
|
-
{
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
else
|
|
645
|
-
{
|
|
646
|
-
enhancedImage = [self filteredImageUsingContrastFilterOnImage:enhancedImage];
|
|
647
|
-
}
|
|
648
|
-
|
|
649
|
-
if (self.isBorderDetectionEnabled && rectangleDetectionConfidenceHighEnough(_imageDedectionConfidence))
|
|
650
|
-
{
|
|
651
|
-
CIRectangleFeature *rectangleFeature = [self biggestRectangleInRectangles:[[self highAccuracyRectangleDetector] featuresInImage:enhancedImage]];
|
|
652
|
-
|
|
653
|
-
if (rectangleFeature)
|
|
654
|
-
{
|
|
655
|
-
enhancedImage = [self correctPerspectiveForImage:enhancedImage withFeatures:rectangleFeature];
|
|
595
|
+
UIImage *initialImage = [UIImage imageWithData:imageData];
|
|
596
|
+
if (!initialImage) {
|
|
597
|
+
NSError *conversionError = [NSError errorWithDomain:@"IPDFCameraViewController"
|
|
598
|
+
code:-209
|
|
599
|
+
userInfo:@{ NSLocalizedDescriptionKey: @"initial_image_conversion_failed" }];
|
|
600
|
+
[self completeCaptureWithCroppedImage:nil initialImage:nil rectangle:nil error:conversionError];
|
|
601
|
+
return;
|
|
602
|
+
}
|
|
656
603
|
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
|
|
660
|
-
UIImage *initialImage = [UIImage imageWithData:imageData];
|
|
661
|
-
UIGraphicsEndImageContext();
|
|
604
|
+
UIImage *croppedImage = initialImage;
|
|
605
|
+
CIRectangleFeature *rectangleFeature = nil;
|
|
662
606
|
|
|
663
|
-
|
|
664
|
-
|
|
607
|
+
BOOL shouldEnhance = (self.cameraViewType == IPDFCameraViewTypeBlackAndWhite) || self.isBorderDetectionEnabled;
|
|
608
|
+
if (shouldEnhance) {
|
|
609
|
+
CIImage *processedImage = [CIImage imageWithData:imageData];
|
|
610
|
+
if (!processedImage) {
|
|
611
|
+
NSLog(@"[IPDFCameraViewController] Unable to create CIImage from data, returning original image");
|
|
612
|
+
} else {
|
|
613
|
+
if (self.cameraViewType == IPDFCameraViewTypeBlackAndWhite) {
|
|
614
|
+
processedImage = [self filteredImageUsingEnhanceFilterOnImage:processedImage];
|
|
665
615
|
} else {
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
616
|
+
processedImage = [self filteredImageUsingContrastFilterOnImage:processedImage];
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
if (self.isBorderDetectionEnabled && rectangleDetectionConfidenceHighEnough(_imageDedectionConfidence)) {
|
|
620
|
+
CIRectangleFeature *detectedRectangle = [self biggestRectangleInRectangles:[[self highAccuracyRectangleDetector] featuresInImage:processedImage]];
|
|
621
|
+
|
|
622
|
+
if (detectedRectangle) {
|
|
623
|
+
rectangleFeature = detectedRectangle;
|
|
624
|
+
CIImage *correctedImage = [self correctPerspectiveForImage:processedImage withFeatures:detectedRectangle];
|
|
625
|
+
|
|
626
|
+
UIGraphicsBeginImageContext(CGSizeMake(correctedImage.extent.size.height, correctedImage.extent.size.width));
|
|
627
|
+
[[UIImage imageWithCIImage:correctedImage scale:1.0 orientation:UIImageOrientationRight] drawInRect:CGRectMake(0, 0, correctedImage.extent.size.height, correctedImage.extent.size.width)];
|
|
628
|
+
UIImage *perspectiveCorrectedImage = UIGraphicsGetImageFromCurrentImageContext();
|
|
629
|
+
UIGraphicsEndImageContext();
|
|
630
|
+
|
|
631
|
+
if (perspectiveCorrectedImage) {
|
|
632
|
+
croppedImage = perspectiveCorrectedImage;
|
|
633
|
+
} else {
|
|
634
|
+
NSLog(@"[IPDFCameraViewController] Failed to create perspective corrected image, using original");
|
|
635
|
+
}
|
|
636
|
+
} else {
|
|
637
|
+
NSLog(@"[IPDFCameraViewController] No rectangle detected during manual capture, returning original image");
|
|
638
|
+
}
|
|
671
639
|
}
|
|
672
|
-
} else {
|
|
673
|
-
[weakSelf hideGLKView:NO completion:nil];
|
|
674
|
-
UIImage *initialImage = [UIImage imageWithData:imageData];
|
|
675
|
-
completionHandler(initialImage, initialImage, nil);
|
|
676
640
|
}
|
|
677
641
|
}
|
|
678
|
-
else
|
|
679
|
-
{
|
|
680
|
-
[weakSelf hideGLKView:NO completion:nil];
|
|
681
|
-
UIImage *initialImage = [UIImage imageWithData:imageData];
|
|
682
|
-
completionHandler(initialImage, initialImage, nil);
|
|
683
|
-
}
|
|
684
642
|
|
|
685
|
-
|
|
686
|
-
self.captureCompletionHandler = nil;
|
|
643
|
+
[self completeCaptureWithCroppedImage:croppedImage initialImage:initialImage rectangle:rectangleFeature error:nil];
|
|
687
644
|
}
|
|
688
645
|
|
|
689
646
|
- (void)hideGLKView:(BOOL)hidden completion:(void(^)())completion
|
|
@@ -34,31 +34,39 @@ RCT_EXPORT_VIEW_PROPERTY(quality, float)
|
|
|
34
34
|
RCT_EXPORT_VIEW_PROPERTY(brightness, float)
|
|
35
35
|
RCT_EXPORT_VIEW_PROPERTY(contrast, float)
|
|
36
36
|
|
|
37
|
-
// Main capture method -
|
|
38
|
-
RCT_EXPORT_METHOD(capture)
|
|
39
|
-
|
|
37
|
+
// Main capture method - returns a Promise
|
|
38
|
+
RCT_EXPORT_METHOD(capture:(nullable NSNumber *)reactTag
|
|
39
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
40
|
+
rejecter:(RCTPromiseRejectBlock)reject) {
|
|
41
|
+
NSLog(@"[RNPdfScannerManager] capture called with reactTag: %@", reactTag);
|
|
40
42
|
dispatch_async(dispatch_get_main_queue(), ^{
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
43
|
+
DocumentScannerView *targetView = nil;
|
|
44
|
+
|
|
45
|
+
if (reactTag) {
|
|
46
|
+
UIView *view = [self.bridge.uiManager viewForReactTag:reactTag];
|
|
47
|
+
if ([view isKindOfClass:[DocumentScannerView class]]) {
|
|
48
|
+
targetView = (DocumentScannerView *)view;
|
|
49
|
+
self->_scannerView = targetView;
|
|
50
|
+
} else if (view) {
|
|
51
|
+
NSLog(@"[RNPdfScannerManager] View for tag %@ is not DocumentScannerView: %@", reactTag, NSStringFromClass(view.class));
|
|
52
|
+
} else {
|
|
53
|
+
NSLog(@"[RNPdfScannerManager] No view found for tag %@", reactTag);
|
|
54
|
+
}
|
|
44
55
|
}
|
|
45
|
-
NSLog(@"[RNPdfScannerManager] Calling capture on view: %@", self->_scannerView);
|
|
46
|
-
[self->_scannerView capture];
|
|
47
|
-
});
|
|
48
|
-
}
|
|
49
56
|
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
if (!
|
|
56
|
-
NSLog(@"[RNPdfScannerManager]
|
|
57
|
+
if (!targetView && self->_scannerView) {
|
|
58
|
+
NSLog(@"[RNPdfScannerManager] Falling back to last known scanner view");
|
|
59
|
+
targetView = self->_scannerView;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
if (!targetView) {
|
|
63
|
+
NSLog(@"[RNPdfScannerManager] ERROR: No scanner view available for capture");
|
|
64
|
+
reject(@"NO_VIEW", @"No scanner view available for capture", nil);
|
|
57
65
|
return;
|
|
58
66
|
}
|
|
59
|
-
|
|
60
|
-
NSLog(@"[RNPdfScannerManager] Calling capture on view: %@",
|
|
61
|
-
[
|
|
67
|
+
|
|
68
|
+
NSLog(@"[RNPdfScannerManager] Calling capture on view: %@", targetView);
|
|
69
|
+
[targetView captureWithResolver:resolve rejecter:reject];
|
|
62
70
|
});
|
|
63
71
|
}
|
|
64
72
|
|
|
@@ -30,13 +30,15 @@ class PdfScanner extends React.Component {
|
|
|
30
30
|
console.log('[PdfScanner/ios.js] capture called, ref:', this.scannerRef.current);
|
|
31
31
|
const handle = findNodeHandle(this.scannerRef.current);
|
|
32
32
|
console.log('[PdfScanner/ios.js] node handle (reactTag):', handle);
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
33
|
+
|
|
34
|
+
if (!handle) {
|
|
35
|
+
console.error('[PdfScanner/ios.js] ERROR: No handle found for scanner ref');
|
|
36
|
+
return Promise.reject(new Error('No handle found for scanner view'));
|
|
36
37
|
}
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
38
|
+
|
|
39
|
+
// Call native method with reactTag - now returns a Promise
|
|
40
|
+
console.log('[PdfScanner/ios.js] Calling native capture with handle:', handle);
|
|
41
|
+
return NativeModules.RNPdfScannerManager.capture(handle);
|
|
40
42
|
}
|
|
41
43
|
|
|
42
44
|
render() {
|