@momo-kits/camerakit 0.152.3 → 0.152.4-sp.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -10,6 +10,9 @@ import UIKit
|
|
|
10
10
|
import CoreMotion
|
|
11
11
|
import Vision
|
|
12
12
|
|
|
13
|
+
// Global OCR queue (so OCR never blocks sessionQueue)
|
|
14
|
+
let globalOCRQueue = DispatchQueue(label: "com.tesla.react-native-camera-kit.ocr", qos: .userInitiated)
|
|
15
|
+
|
|
13
16
|
/*
|
|
14
17
|
* Real camera implementation that uses AVFoundation
|
|
15
18
|
*/
|
|
@@ -21,16 +24,16 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
21
24
|
private let session = AVCaptureSession()
|
|
22
25
|
// Communicate with the session and other session objects on this queue.
|
|
23
26
|
private let sessionQueue = DispatchQueue(label: "com.tesla.react-native-camera-kit")
|
|
24
|
-
|
|
27
|
+
|
|
25
28
|
// utilities
|
|
26
29
|
private var setupResult: SetupResult = .notStarted
|
|
27
30
|
private var isSessionRunning: Bool = false
|
|
28
31
|
private var backgroundRecordingId: UIBackgroundTaskIdentifier = .invalid
|
|
29
|
-
|
|
32
|
+
|
|
30
33
|
private var videoDeviceInput: AVCaptureDeviceInput?
|
|
31
34
|
private let photoOutput = AVCapturePhotoOutput()
|
|
32
35
|
private let metadataOutput = AVCaptureMetadataOutput()
|
|
33
|
-
|
|
36
|
+
|
|
34
37
|
private var resizeMode: ResizeMode = .cover
|
|
35
38
|
private var flashMode: FlashMode = .auto
|
|
36
39
|
private var torchMode: TorchMode = .off
|
|
@@ -38,6 +41,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
38
41
|
private var resetFocus: (() -> Void)?
|
|
39
42
|
private var focusFinished: (() -> Void)?
|
|
40
43
|
private var onBarcodeRead: ((_ barcode: String,_ codeFormat : CodeFormat) -> Void)?
|
|
44
|
+
private var supportedBarcodeTypes: [CodeFormat] = []
|
|
41
45
|
private var scannerFrameSize: CGRect? = nil
|
|
42
46
|
private var barcodeFrameSize: CGSize? = nil
|
|
43
47
|
private var onOrientationChange: RCTDirectEventBlock?
|
|
@@ -45,23 +49,25 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
45
49
|
private var lastOnZoom: Double?
|
|
46
50
|
private var zoom: Double?
|
|
47
51
|
private var maxZoom: Double?
|
|
48
|
-
|
|
52
|
+
|
|
49
53
|
private var deviceOrientation = UIDeviceOrientation.unknown
|
|
50
54
|
private var motionManager: CMMotionManager?
|
|
51
|
-
|
|
55
|
+
|
|
52
56
|
// KVO observation
|
|
53
57
|
private var adjustingFocusObservation: NSKeyValueObservation?
|
|
54
58
|
|
|
55
59
|
// Keep delegate objects in memory to avoid collecting them before photo capturing finishes
|
|
56
60
|
private var inProgressPhotoCaptureDelegates = [Int64: PhotoCaptureDelegate]()
|
|
57
|
-
|
|
61
|
+
|
|
58
62
|
private var onTextRead: ((_ text: String) -> Void)?
|
|
59
63
|
private let videoDataOutput = AVCaptureVideoDataOutput()
|
|
60
64
|
private var textRequest: VNRecognizeTextRequest?
|
|
61
65
|
private var textDetectionEnabled = false
|
|
62
66
|
private var lastTextProcess = Date.distantPast
|
|
63
67
|
private let textThrottle: TimeInterval = 0.35 // seconds
|
|
64
|
-
|
|
68
|
+
|
|
69
|
+
private var zoomStartedAt: Double = 1.0
|
|
70
|
+
|
|
65
71
|
// MARK: - Lifecycle
|
|
66
72
|
|
|
67
73
|
func cameraRemovedFromSuperview() {
|
|
@@ -87,15 +93,16 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
87
93
|
UIDevice.current.endGeneratingDeviceOrientationNotifications()
|
|
88
94
|
#endif
|
|
89
95
|
}
|
|
90
|
-
|
|
96
|
+
|
|
91
97
|
deinit {
|
|
92
98
|
removeObservers()
|
|
93
99
|
}
|
|
94
|
-
|
|
100
|
+
|
|
95
101
|
// MARK: - Public
|
|
96
102
|
|
|
97
103
|
func setup(cameraType: CameraType, supportedBarcodeType: [CodeFormat]) {
|
|
98
|
-
|
|
104
|
+
self.supportedBarcodeTypes = supportedBarcodeType
|
|
105
|
+
|
|
99
106
|
// Setup the capture session with priority on basic video preview first
|
|
100
107
|
sessionQueue.async {
|
|
101
108
|
self.setupResult = self.setupBasicVideoInput(cameraType: cameraType)
|
|
@@ -117,7 +124,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
117
124
|
self.setVideoOrientationToInterfaceOrientation()
|
|
118
125
|
}
|
|
119
126
|
}
|
|
120
|
-
|
|
127
|
+
|
|
121
128
|
DispatchQueue.global(qos: .utility).async {
|
|
122
129
|
self.initializeMotionManager()
|
|
123
130
|
}
|
|
@@ -125,37 +132,36 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
125
132
|
}
|
|
126
133
|
|
|
127
134
|
// MARK: - Private optimization methods
|
|
128
|
-
|
|
135
|
+
|
|
129
136
|
private func setupBasicVideoInput(cameraType: CameraType) -> SetupResult {
|
|
130
137
|
guard let videoDevice = self.getBestDevice(for: cameraType),
|
|
131
138
|
let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice) else {
|
|
132
139
|
return .sessionConfigurationFailed
|
|
133
140
|
}
|
|
134
|
-
|
|
141
|
+
|
|
135
142
|
session.beginConfiguration()
|
|
136
143
|
defer { session.commitConfiguration() }
|
|
137
|
-
|
|
144
|
+
|
|
138
145
|
if session.canAddInput(videoDeviceInput) {
|
|
139
146
|
session.addInput(videoDeviceInput)
|
|
140
147
|
self.videoDeviceInput = videoDeviceInput
|
|
141
148
|
self.resetZoom(forDevice: videoDevice)
|
|
142
149
|
return .success
|
|
143
|
-
} else {
|
|
144
|
-
return .sessionConfigurationFailed
|
|
145
150
|
}
|
|
151
|
+
return .sessionConfigurationFailed
|
|
146
152
|
}
|
|
147
|
-
|
|
153
|
+
|
|
148
154
|
private func setupAdditionalOutputs(supportedBarcodeType: [CodeFormat]) {
|
|
149
155
|
session.beginConfiguration()
|
|
150
156
|
defer { session.commitConfiguration() }
|
|
151
|
-
|
|
157
|
+
|
|
152
158
|
// Add photo output
|
|
153
159
|
if #available(iOS 13.0, *) {
|
|
154
|
-
if let maxPhotoQualityPrioritization {
|
|
160
|
+
if let maxPhotoQualityPrioritization = maxPhotoQualityPrioritization {
|
|
155
161
|
photoOutput.maxPhotoQualityPrioritization = maxPhotoQualityPrioritization.avQualityPrioritization
|
|
156
162
|
}
|
|
157
163
|
}
|
|
158
|
-
|
|
164
|
+
|
|
159
165
|
if session.canAddOutput(photoOutput) {
|
|
160
166
|
session.addOutput(photoOutput)
|
|
161
167
|
|
|
@@ -165,12 +171,12 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
165
171
|
}
|
|
166
172
|
}
|
|
167
173
|
}
|
|
168
|
-
|
|
174
|
+
|
|
169
175
|
// Add metadata output for barcode scanning
|
|
170
176
|
if self.session.canAddOutput(metadataOutput) {
|
|
171
177
|
self.session.addOutput(metadataOutput)
|
|
172
178
|
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
|
|
173
|
-
|
|
179
|
+
|
|
174
180
|
let availableTypes = self.metadataOutput.availableMetadataObjectTypes
|
|
175
181
|
let filteredTypes = supportedBarcodeType
|
|
176
182
|
.map { $0.toAVMetadataObjectType() }
|
|
@@ -178,21 +184,37 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
178
184
|
|
|
179
185
|
metadataOutput.metadataObjectTypes = filteredTypes
|
|
180
186
|
}
|
|
181
|
-
|
|
187
|
+
|
|
182
188
|
// add for text detections
|
|
183
189
|
if (textRequest != nil && self.session.canAddOutput(self.videoDataOutput)) {
|
|
184
190
|
self.session.addOutput(self.videoDataOutput)
|
|
185
191
|
}
|
|
186
192
|
}
|
|
187
|
-
|
|
188
|
-
|
|
193
|
+
|
|
194
|
+
// MARK: - Pause / Resume non-essential outputs
|
|
195
|
+
private func pauseNonEssentialOutputs() {
|
|
196
|
+
videoDataOutput.setSampleBufferDelegate(nil, queue: nil)
|
|
197
|
+
metadataOutput.metadataObjectTypes = []
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
private func resumeNonEssentialOutputs() {
|
|
201
|
+
if textDetectionEnabled {
|
|
202
|
+
videoDataOutput.setSampleBufferDelegate(self, queue: globalOCRQueue)
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
let available = metadataOutput.availableMetadataObjectTypes
|
|
206
|
+
let filtered = supportedBarcodeTypes.map { $0.toAVMetadataObjectType() }.filter { available.contains($0) }
|
|
207
|
+
metadataOutput.metadataObjectTypes = filtered
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
// MARK: - Zoom
|
|
189
211
|
func zoomPinchStart() {
|
|
190
212
|
sessionQueue.async {
|
|
191
213
|
guard let videoDevice = self.videoDeviceInput?.device else { return }
|
|
192
214
|
self.zoomStartedAt = videoDevice.videoZoomFactor
|
|
193
215
|
}
|
|
194
216
|
}
|
|
195
|
-
|
|
217
|
+
|
|
196
218
|
func zoomPinchChange(pinchScale: CGFloat) {
|
|
197
219
|
guard !pinchScale.isNaN else { return }
|
|
198
220
|
|
|
@@ -201,7 +223,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
201
223
|
|
|
202
224
|
let desiredZoomFactor = (self.zoomStartedAt / self.defaultZoomFactor(for: videoDevice)) * pinchScale
|
|
203
225
|
let zoomForDevice = self.getValidZoom(forDevice: videoDevice, zoom: desiredZoomFactor)
|
|
204
|
-
|
|
226
|
+
|
|
205
227
|
if zoomForDevice != self.normalizedZoom(for: videoDevice) {
|
|
206
228
|
// Only trigger zoom changes if it's an uncontrolled component (zoom isn't manually set)
|
|
207
229
|
// otherwise it's likely to cause issues inf. loops
|
|
@@ -212,14 +234,14 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
212
234
|
}
|
|
213
235
|
}
|
|
214
236
|
}
|
|
215
|
-
|
|
237
|
+
|
|
216
238
|
func update(maxZoom: Double?) {
|
|
217
239
|
self.maxZoom = maxZoom
|
|
218
240
|
|
|
219
241
|
// Re-update zoom value in case the max was increased
|
|
220
242
|
self.update(zoom: self.zoom)
|
|
221
243
|
}
|
|
222
|
-
|
|
244
|
+
|
|
223
245
|
func update(zoom: Double?) {
|
|
224
246
|
sessionQueue.async {
|
|
225
247
|
self.zoom = zoom
|
|
@@ -230,7 +252,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
230
252
|
self.setZoomFor(videoDevice, to: zoomForDevice)
|
|
231
253
|
}
|
|
232
254
|
}
|
|
233
|
-
|
|
255
|
+
|
|
234
256
|
/**
|
|
235
257
|
`desiredZoom` can be nil when we want to notify what the zoom factor really is
|
|
236
258
|
*/
|
|
@@ -251,11 +273,11 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
251
273
|
lastOnZoom = desiredOrCameraZoom
|
|
252
274
|
self.onZoomCallback?(["zoom": desiredOrCameraZoom])
|
|
253
275
|
}
|
|
254
|
-
|
|
276
|
+
|
|
255
277
|
func update(onZoom: RCTDirectEventBlock?) {
|
|
256
278
|
self.onZoomCallback = onZoom
|
|
257
279
|
}
|
|
258
|
-
|
|
280
|
+
|
|
259
281
|
func focus(at touchPoint: CGPoint, focusBehavior: FocusBehavior) {
|
|
260
282
|
DispatchQueue.main.async {
|
|
261
283
|
let devicePoint = self.cameraPreview.previewLayer.captureDevicePointConverted(fromLayerPoint: touchPoint)
|
|
@@ -270,7 +292,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
270
292
|
self.resetFocus = nil
|
|
271
293
|
self.focusFinished = nil
|
|
272
294
|
}
|
|
273
|
-
|
|
295
|
+
|
|
274
296
|
do {
|
|
275
297
|
try videoDevice.lockForConfiguration()
|
|
276
298
|
|
|
@@ -293,11 +315,11 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
293
315
|
}
|
|
294
316
|
}
|
|
295
317
|
}
|
|
296
|
-
|
|
318
|
+
|
|
297
319
|
func update(onOrientationChange: RCTDirectEventBlock?) {
|
|
298
320
|
self.onOrientationChange = onOrientationChange
|
|
299
321
|
}
|
|
300
|
-
|
|
322
|
+
|
|
301
323
|
func update(torchMode: TorchMode) {
|
|
302
324
|
sessionQueue.async {
|
|
303
325
|
self.torchMode = torchMode
|
|
@@ -314,11 +336,11 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
314
336
|
}
|
|
315
337
|
}
|
|
316
338
|
}
|
|
317
|
-
|
|
339
|
+
|
|
318
340
|
func update(flashMode: FlashMode) {
|
|
319
341
|
self.flashMode = flashMode
|
|
320
342
|
}
|
|
321
|
-
|
|
343
|
+
|
|
322
344
|
func update(maxPhotoQualityPrioritization: MaxPhotoQualityPrioritization?) {
|
|
323
345
|
guard #available(iOS 13.0, *) else { return }
|
|
324
346
|
guard maxPhotoQualityPrioritization != self.maxPhotoQualityPrioritization else { return }
|
|
@@ -329,7 +351,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
329
351
|
self.photoOutput.maxPhotoQualityPrioritization = maxPhotoQualityPrioritization?.avQualityPrioritization ?? .balanced
|
|
330
352
|
}
|
|
331
353
|
}
|
|
332
|
-
|
|
354
|
+
|
|
333
355
|
func update(cameraType: CameraType) {
|
|
334
356
|
sessionQueue.async {
|
|
335
357
|
if self.videoDeviceInput?.device.position == cameraType.avPosition {
|
|
@@ -343,14 +365,11 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
343
365
|
let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice) else {
|
|
344
366
|
return
|
|
345
367
|
}
|
|
346
|
-
|
|
368
|
+
|
|
347
369
|
self.removeObservers()
|
|
348
370
|
self.session.beginConfiguration()
|
|
349
|
-
defer { self.session.commitConfiguration() }
|
|
350
|
-
|
|
351
|
-
// Remove the existing device input first, since using the front and back camera simultaneously is not supported.
|
|
352
371
|
self.session.removeInput(currentViewDeviceInput)
|
|
353
|
-
|
|
372
|
+
|
|
354
373
|
if self.session.canAddInput(videoDeviceInput) {
|
|
355
374
|
self.session.addInput(videoDeviceInput)
|
|
356
375
|
self.resetZoom(forDevice: videoDevice)
|
|
@@ -359,14 +378,15 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
359
378
|
// If it fails, put back current camera
|
|
360
379
|
self.session.addInput(currentViewDeviceInput)
|
|
361
380
|
}
|
|
362
|
-
|
|
381
|
+
|
|
382
|
+
self.session.commitConfiguration()
|
|
363
383
|
self.addObservers()
|
|
364
384
|
|
|
365
385
|
// We need to reapply the configuration after reloading the camera
|
|
366
386
|
self.update(torchMode: self.torchMode)
|
|
367
387
|
}
|
|
368
388
|
}
|
|
369
|
-
|
|
389
|
+
|
|
370
390
|
func update(resizeMode: ResizeMode) {
|
|
371
391
|
DispatchQueue.main.async {
|
|
372
392
|
switch resizeMode {
|
|
@@ -377,24 +397,28 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
377
397
|
}
|
|
378
398
|
}
|
|
379
399
|
}
|
|
380
|
-
|
|
400
|
+
|
|
381
401
|
func capturePicture(onWillCapture: @escaping () -> Void,
|
|
382
|
-
|
|
383
|
-
|
|
402
|
+
onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?, _ dimensions: CMVideoDimensions) -> Void,
|
|
403
|
+
onError: @escaping (_ message: String) -> Void) {
|
|
384
404
|
/*
|
|
385
405
|
Retrieve the video preview layer's video orientation on the main queue before
|
|
386
406
|
entering the session queue. Do this to ensure that UI elements are accessed on
|
|
387
407
|
the main thread and session configuration is done on the session queue.
|
|
388
408
|
*/
|
|
409
|
+
|
|
410
|
+
// 🚀 NEW: Pause OCR + barcode before capturing
|
|
411
|
+
self.pauseNonEssentialOutputs()
|
|
412
|
+
|
|
389
413
|
DispatchQueue.main.async { [weak self] in
|
|
390
414
|
guard let self = self else {
|
|
391
415
|
onError("Camera was deallocated")
|
|
392
416
|
return
|
|
393
417
|
}
|
|
394
|
-
|
|
418
|
+
|
|
395
419
|
let videoPreviewLayerOrientation =
|
|
396
420
|
self.videoOrientation(from: self.deviceOrientation) ?? self.cameraPreview.previewLayer.connection?.videoOrientation
|
|
397
|
-
|
|
421
|
+
|
|
398
422
|
self.sessionQueue.async { [weak self] in
|
|
399
423
|
guard let self = self else {
|
|
400
424
|
onError("Camera was deallocated")
|
|
@@ -407,14 +431,14 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
407
431
|
onError("Camera session is not running")
|
|
408
432
|
return
|
|
409
433
|
}
|
|
410
|
-
|
|
434
|
+
|
|
411
435
|
// Ensure photo output has an active video connection
|
|
412
436
|
guard let photoOutputConnection = self.photoOutput.connection(with: .video) else {
|
|
413
437
|
print("Cannot capture photo: no video connection available")
|
|
414
438
|
onError("Camera connection is not available")
|
|
415
439
|
return
|
|
416
440
|
}
|
|
417
|
-
|
|
441
|
+
|
|
418
442
|
// Verify the connection is active and enabled
|
|
419
443
|
guard photoOutputConnection.isActive && photoOutputConnection.isEnabled else {
|
|
420
444
|
print("Cannot capture photo: video connection is not active or enabled")
|
|
@@ -426,7 +450,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
426
450
|
if let videoPreviewLayerOrientation {
|
|
427
451
|
photoOutputConnection.videoOrientation = videoPreviewLayerOrientation
|
|
428
452
|
}
|
|
429
|
-
|
|
453
|
+
|
|
430
454
|
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
|
|
431
455
|
if #available(iOS 13.0, *) {
|
|
432
456
|
settings.photoQualityPrioritization = self.photoOutput.maxPhotoQualityPrioritization
|
|
@@ -435,7 +459,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
435
459
|
if self.videoDeviceInput?.device.isFlashAvailable == true {
|
|
436
460
|
settings.flashMode = self.flashMode.avFlashMode
|
|
437
461
|
}
|
|
438
|
-
|
|
462
|
+
|
|
439
463
|
let photoCaptureDelegate = PhotoCaptureDelegate(
|
|
440
464
|
with: settings,
|
|
441
465
|
onWillCapture: onWillCapture,
|
|
@@ -443,30 +467,34 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
443
467
|
// Use weak self to prevent crash if camera is deallocated during capture
|
|
444
468
|
self?.inProgressPhotoCaptureDelegates[uniqueID] = nil
|
|
445
469
|
onSuccess(imageData, thumbnailData, dimensions)
|
|
470
|
+
self?.resumeNonEssentialOutputs()
|
|
446
471
|
},
|
|
447
472
|
onCaptureError: { [weak self] uniqueID, errorMessage in
|
|
448
473
|
// Use weak self to prevent crash if camera is deallocated during capture
|
|
449
474
|
self?.inProgressPhotoCaptureDelegates[uniqueID] = nil
|
|
450
475
|
onError(errorMessage)
|
|
476
|
+
self?.resumeNonEssentialOutputs()
|
|
451
477
|
}
|
|
452
478
|
)
|
|
453
|
-
|
|
479
|
+
|
|
454
480
|
self.inProgressPhotoCaptureDelegates[photoCaptureDelegate.requestedPhotoSettings.uniqueID] = photoCaptureDelegate
|
|
455
481
|
self.photoOutput.capturePhoto(with: settings, delegate: photoCaptureDelegate)
|
|
456
482
|
}
|
|
457
483
|
}
|
|
458
484
|
}
|
|
459
|
-
|
|
485
|
+
|
|
486
|
+
// MARK: - Barcode scanning
|
|
460
487
|
func isBarcodeScannerEnabled(_ isEnabled: Bool,
|
|
461
488
|
supportedBarcodeTypes supportedBarcodeType: [CodeFormat],
|
|
462
489
|
onBarcodeRead: ((_ barcode: String,_ codeFormat:CodeFormat) -> Void)?) {
|
|
463
490
|
sessionQueue.async {
|
|
464
491
|
self.onBarcodeRead = onBarcodeRead
|
|
492
|
+
self.supportedBarcodeTypes = supportedBarcodeType
|
|
493
|
+
|
|
494
|
+
let availableTypes = self.metadataOutput.availableMetadataObjectTypes
|
|
465
495
|
let newTypes: [AVMetadataObject.ObjectType]
|
|
466
496
|
if isEnabled && onBarcodeRead != nil {
|
|
467
|
-
|
|
468
|
-
newTypes = supportedBarcodeType.map { $0.toAVMetadataObjectType() }
|
|
469
|
-
.filter { availableTypes.contains($0) }
|
|
497
|
+
newTypes = supportedBarcodeType.map { $0.toAVMetadataObjectType() }.filter { availableTypes.contains($0) }
|
|
470
498
|
} else {
|
|
471
499
|
newTypes = []
|
|
472
500
|
}
|
|
@@ -479,11 +507,11 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
479
507
|
}
|
|
480
508
|
}
|
|
481
509
|
}
|
|
482
|
-
|
|
510
|
+
|
|
483
511
|
func update(barcodeFrameSize: CGSize?) {
|
|
484
512
|
self.barcodeFrameSize = barcodeFrameSize
|
|
485
513
|
}
|
|
486
|
-
|
|
514
|
+
|
|
487
515
|
func update(scannerFrameSize: CGRect?) {
|
|
488
516
|
guard self.scannerFrameSize != scannerFrameSize else { return }
|
|
489
517
|
self.sessionQueue.async {
|
|
@@ -491,7 +519,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
491
519
|
if !self.session.isRunning {
|
|
492
520
|
return
|
|
493
521
|
}
|
|
494
|
-
|
|
522
|
+
|
|
495
523
|
DispatchQueue.main.async {
|
|
496
524
|
var visibleRect: CGRect?
|
|
497
525
|
if scannerFrameSize != nil && scannerFrameSize != .zero {
|
|
@@ -510,7 +538,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
510
538
|
}
|
|
511
539
|
}
|
|
512
540
|
}
|
|
513
|
-
|
|
541
|
+
|
|
514
542
|
|
|
515
543
|
func isTextDetectionEnabled(_ isEnabled: Bool, onTextRead: ((String) -> Void)?) {
|
|
516
544
|
sessionQueue.async {
|
|
@@ -525,37 +553,39 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
525
553
|
self.textRequest?.recognitionLanguages = ["en", "fr", "de", "es", "vi"]
|
|
526
554
|
self.textRequest?.recognitionLevel = .accurate
|
|
527
555
|
self.textRequest?.usesLanguageCorrection = false
|
|
528
|
-
|
|
556
|
+
|
|
529
557
|
self.videoDataOutput.alwaysDiscardsLateVideoFrames = true
|
|
530
|
-
self.videoDataOutput.setSampleBufferDelegate(self, queue:
|
|
558
|
+
self.videoDataOutput.setSampleBufferDelegate(self, queue: globalOCRQueue)
|
|
531
559
|
self.videoDataOutput.videoSettings = [
|
|
532
560
|
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
|
|
533
561
|
]
|
|
534
|
-
|
|
562
|
+
|
|
535
563
|
} else {
|
|
536
564
|
self.textRequest = nil
|
|
537
565
|
}
|
|
538
566
|
}
|
|
539
567
|
}
|
|
540
|
-
|
|
568
|
+
|
|
541
569
|
// AVCaptureVideoDataOutputSampleBufferDelegate
|
|
542
570
|
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
|
|
543
571
|
guard textDetectionEnabled, let request = textRequest else { return }
|
|
544
572
|
let now = Date()
|
|
545
573
|
if now.timeIntervalSince(lastTextProcess) < textThrottle { return }
|
|
546
574
|
lastTextProcess = now
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
575
|
+
|
|
576
|
+
globalOCRQueue.async {
|
|
577
|
+
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
|
|
578
|
+
var requestOptions: [VNImageOption: Any] = [:]
|
|
579
|
+
|
|
580
|
+
let handler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: .up, options: requestOptions)
|
|
581
|
+
do {
|
|
582
|
+
try handler.perform([request])
|
|
583
|
+
} catch {
|
|
584
|
+
// ignore OCR errors; don't crash the pipeline
|
|
585
|
+
}
|
|
556
586
|
}
|
|
557
587
|
}
|
|
558
|
-
|
|
588
|
+
|
|
559
589
|
// MARK: - AVCaptureMetadataOutputObjectsDelegate
|
|
560
590
|
|
|
561
591
|
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
|
|
@@ -569,7 +599,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
569
599
|
|
|
570
600
|
onBarcodeRead?(codeStringValue,barcodeType)
|
|
571
601
|
}
|
|
572
|
-
|
|
602
|
+
|
|
573
603
|
// MARK: - Private
|
|
574
604
|
|
|
575
605
|
private func videoOrientation(from deviceOrientation: UIDeviceOrientation) -> AVCaptureVideoOrientation? {
|
|
@@ -588,7 +618,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
588
618
|
@unknown default: return nil
|
|
589
619
|
}
|
|
590
620
|
}
|
|
591
|
-
|
|
621
|
+
|
|
592
622
|
private func videoOrientation(from interfaceOrientation: UIInterfaceOrientation) -> AVCaptureVideoOrientation {
|
|
593
623
|
switch interfaceOrientation {
|
|
594
624
|
case .portrait:
|
|
@@ -603,14 +633,14 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
|
|
|
603
633
|
@unknown default: return .portrait
|
|
604
634
|
}
|
|
605
635
|
}
|
|
606
|
-
|
|
636
|
+
|
|
607
637
|
private func getBestDevice(for cameraType: CameraType) -> AVCaptureDevice? {
|
|
608
638
|
if let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: cameraType.avPosition) {
|
|
609
639
|
return device // single-lens/physical device
|
|
610
640
|
}
|
|
611
641
|
return nil
|
|
612
642
|
}
|
|
613
|
-
|
|
643
|
+
|
|
614
644
|
private func defaultZoomFactor(for videoDevice: AVCaptureDevice) -> CGFloat {
|
|
615
645
|
let fallback = 1.0
|
|
616
646
|
guard #available(iOS 13.0, *) else { return fallback }
|