capacitor-plugin-camera-forked 2.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,670 @@
1
+ import Foundation
2
+ import UIKit
3
+ import Capacitor
4
+ import AVFoundation
5
+
6
+ /**
7
+ * Please read the Capacitor iOS Plugin Development Guide
8
+ * here: https://capacitorjs.com/docs/plugins/ios
9
+ */
10
+ @objc(CameraPreviewPlugin)
11
+ public class CameraPreviewPlugin: CAPPlugin, AVCaptureVideoDataOutputSampleBufferDelegate, AVCapturePhotoCaptureDelegate, AVCaptureFileOutputRecordingDelegate {
12
+
13
+ var previewView: PreviewView!
14
+ var captureSession: AVCaptureSession!
15
+ var photoOutput: AVCapturePhotoOutput!
16
+ var videoOutput: AVCaptureVideoDataOutput!
17
+ var movieFileOutput: AVCaptureMovieFileOutput!
18
+ var takeSnapshotCall: CAPPluginCall! = nil
19
+ var takePhotoCall: CAPPluginCall! = nil
20
+ var stopRecordingCall: CAPPluginCall! = nil
21
+ var getResolutionCall: CAPPluginCall! = nil
22
+ var saveFrameCall: CAPPluginCall! = nil
23
+ static public var frameTaken:UIImage!
24
+ var triggerPlayRequired = false
25
+ var facingBack = true
26
+ var videoInput:AVCaptureDeviceInput!
27
+ var scanRegion:ScanRegion! = nil
28
+ @objc func initialize(_ call: CAPPluginCall) {
29
+ // Initialize a camera view for previewing video.
30
+ DispatchQueue.main.sync {
31
+ self.previewView = PreviewView.init(frame: (bridge?.viewController?.view.bounds)!)
32
+ self.webView!.superview!.insertSubview(self.previewView, belowSubview: self.webView!)
33
+ initializeCaptureSession(enableVideoRecording: false)
34
+ }
35
+ call.resolve()
36
+ }
37
+
38
+ @objc func rotated() {
39
+ let bounds = self.webView?.bounds
40
+ if bounds != nil {
41
+ self.previewView.frame = bounds!
42
+ if UIDevice.current.orientation == UIDeviceOrientation.portrait {
43
+ self.previewView.videoPreviewLayer.connection?.videoOrientation = .portrait
44
+ }else if UIDevice.current.orientation == UIDeviceOrientation.landscapeLeft {
45
+ self.previewView.videoPreviewLayer.connection?.videoOrientation = .landscapeRight
46
+ }else if UIDevice.current.orientation == UIDeviceOrientation.landscapeRight {
47
+ self.previewView.videoPreviewLayer.connection?.videoOrientation = .landscapeLeft
48
+ }
49
+ }
50
+ notifyListeners("onOrientationChanged",data: nil)
51
+ }
52
+
53
+ @objc func startCamera(_ call: CAPPluginCall) {
54
+ makeWebViewTransparent()
55
+
56
+ guard let captureSession = self.captureSession else {
57
+ call.reject("Camera not initialized")
58
+ return
59
+ }
60
+
61
+ DispatchQueue.global(qos: .userInitiated).async {
62
+ captureSession.startRunning()
63
+ DispatchQueue.main.async {
64
+ self.triggerOnPlayed()
65
+ }
66
+ }
67
+
68
+ call.resolve()
69
+ }
70
+
71
+ func destroyCaptureSession() {
72
+ guard let session = self.captureSession else { return }
73
+
74
+ // Stop the session
75
+ if session.isRunning {
76
+ session.stopRunning()
77
+ }
78
+
79
+ // Remove all inputs
80
+ for input in session.inputs {
81
+ session.removeInput(input)
82
+ }
83
+
84
+ // Remove all outputs
85
+ for output in session.outputs {
86
+ session.removeOutput(output)
87
+ }
88
+
89
+ // Release session
90
+ self.captureSession = nil
91
+ initializeCaptureSession(enableVideoRecording: false)
92
+ }
93
+
94
+ func getCameraAspectRatio() -> CGFloat? {
95
+ guard let videoDevice = self.videoInput?.device else {
96
+ // call.reject("Video device not available")
97
+ return 1;
98
+ }
99
+
100
+ // Retrieve the format description from the camera's active format.
101
+ let formatDesc = videoDevice.activeFormat.formatDescription
102
+ let dimensions = CMVideoFormatDescriptionGetDimensions(formatDesc)
103
+ let camWidth = CGFloat(dimensions.width)
104
+ let camHeight = CGFloat(dimensions.height)
105
+
106
+ return camWidth / camHeight // Aspect Ratio (height / width)
107
+ }
108
+
109
+ func updatePreviewLayerFrame() {
110
+ // Make sure you can get the view controller's view
111
+ guard let previewView = self.bridge?.viewController?.view,
112
+ let aspectRatio = getCameraAspectRatio() else { return }
113
+
114
+ let screenWidth = previewView.bounds.width
115
+ let previewHeight = screenWidth * aspectRatio // Calculate height
116
+
117
+ let screenHeight = previewView.bounds.height
118
+ let previewY = (screenHeight - previewHeight) * 0.4 // Center vertically
119
+
120
+ self.previewView.frame = CGRect(x: 0, y: previewY, width: screenWidth, height: previewHeight)
121
+ }
122
+
123
+ func initializeCaptureSession(enableVideoRecording:Bool){
124
+ // Create the capture session.
125
+ self.captureSession = AVCaptureSession()
126
+
127
+ // Find the default audio device.
128
+ guard let videoDevice = AVCaptureDevice.default(for: .video) else { return }
129
+ if enableVideoRecording {
130
+ let microphone = AVCaptureDevice.default(for: AVMediaType.audio)
131
+ if microphone != nil {
132
+ let micInput = try? AVCaptureDeviceInput(device: microphone!)
133
+ if captureSession.canAddInput(micInput!) {
134
+ captureSession.addInput(micInput!)
135
+ }
136
+ }
137
+ }
138
+
139
+ do {
140
+ // Wrap the video device in a capture device input.
141
+ self.videoInput = try AVCaptureDeviceInput(device: videoDevice)
142
+ // If the input can be added, add it to the session.
143
+ if self.captureSession.canAddInput(videoInput) {
144
+ self.captureSession.addInput(videoInput)
145
+ self.previewView.videoPreviewLayer.session = self.captureSession
146
+ self.previewView.videoPreviewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
147
+
148
+ self.videoOutput = AVCaptureVideoDataOutput.init()
149
+ if self.captureSession.canAddOutput(self.videoOutput) {
150
+ self.captureSession.addOutput(videoOutput)
151
+ }
152
+
153
+ if let connection = self.previewView.videoPreviewLayer.connection {
154
+ connection.videoOrientation = .portrait
155
+ }
156
+ if let videoConnection = self.videoOutput.connection(with: .video) {
157
+ videoConnection.videoOrientation = .portrait
158
+ }
159
+
160
+ self.photoOutput = AVCapturePhotoOutput()
161
+ self.photoOutput.isHighResolutionCaptureEnabled = true
162
+ //self.photoOutput.
163
+ if self.captureSession.canAddOutput(self.photoOutput) {
164
+ self.captureSession.addOutput(photoOutput)
165
+ }
166
+ if enableVideoRecording {
167
+ self.movieFileOutput = AVCaptureMovieFileOutput()
168
+ if self.captureSession.canAddOutput(self.movieFileOutput) {
169
+ self.captureSession.addOutput(movieFileOutput)
170
+ }
171
+ }
172
+
173
+ self.captureSession.sessionPreset = AVCaptureSession.Preset.photo
174
+
175
+ updatePreviewLayerFrame()
176
+
177
+ var queue:DispatchQueue
178
+ queue = DispatchQueue(label: "queue")
179
+ self.videoOutput.setSampleBufferDelegate(self as AVCaptureVideoDataOutputSampleBufferDelegate, queue: queue)
180
+ self.videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey : kCVPixelFormatType_32BGRA] as [String : Any]
181
+ }
182
+
183
+ } catch {
184
+ // Configuration failed. Handle error.
185
+ print(error)
186
+ }
187
+ }
188
+ func takePhotoWithAVFoundation(){
189
+ //self.captureSession.sessionPreset = AVCaptureSession.Preset.hd4K3840x2160
190
+ let photoSettings: AVCapturePhotoSettings
191
+ photoSettings = AVCapturePhotoSettings()
192
+ photoSettings.isHighResolutionPhotoEnabled = true
193
+
194
+ self.photoOutput.capturePhoto(with: photoSettings, delegate: self)
195
+ }
196
+
197
+ public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
198
+ if let error = error {
199
+ print("Error:", error)
200
+ } else {
201
+ if let imageData = photo.fileDataRepresentation() {
202
+ var ret = PluginCallResultData()
203
+ var url:URL
204
+ let pathToSave = takePhotoCall.getString("pathToSave", "")
205
+ if pathToSave == "" {
206
+ url = FileManager.default.temporaryDirectory
207
+ .appendingPathComponent(UUID().uuidString)
208
+ .appendingPathExtension("jpeg")
209
+ }else{
210
+ url = URL(string: "file://\(pathToSave)")!
211
+ }
212
+ if takePhotoCall.getBool("includeBase64", false) {
213
+ let image = UIImage(data: imageData)
214
+ let base64 = getBase64FromImage(image: image!, quality: 100.0)
215
+ ret["base64"] = base64
216
+ }
217
+ do {
218
+
219
+ try imageData.write(to: url)
220
+ ret["path"] = url.path
221
+ } catch {
222
+ print(error)
223
+ }
224
+ takePhotoCall.resolve(ret)
225
+ takePhotoCall = nil
226
+ }
227
+ }
228
+ }
229
+
230
+ public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection)
231
+ {
232
+ if triggerPlayRequired || getResolutionCall != nil {
233
+ var ret = PluginCallResultData()
234
+ let imageBuffer:CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
235
+ CVPixelBufferLockBaseAddress(imageBuffer, .readOnly)
236
+ let width = CVPixelBufferGetWidth(imageBuffer)
237
+ let height = CVPixelBufferGetHeight(imageBuffer)
238
+ let res = String(width)+"x"+String(height)
239
+ ret["resolution"] = res
240
+ if triggerPlayRequired {
241
+ notifyListeners("onPlayed", data: ret)
242
+ triggerPlayRequired = false
243
+ }
244
+ if getResolutionCall != nil {
245
+ getResolutionCall.resolve(ret)
246
+ getResolutionCall = nil
247
+ }
248
+
249
+ }
250
+ if takeSnapshotCall != nil || saveFrameCall != nil {
251
+ guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
252
+ print("Failed to get image buffer from sample buffer.")
253
+ return
254
+ }
255
+ let ciImage = CIImage(cvPixelBuffer: imageBuffer)
256
+ guard let cgImage = CIContext().createCGImage(ciImage, from: ciImage.extent) else {
257
+ print("Failed to create bitmap from image.")
258
+ return
259
+ }
260
+ let image = UIImage(cgImage: cgImage)
261
+ var normalized = normalizedImage(image)
262
+ if self.scanRegion != nil {
263
+ normalized = croppedUIImage(image: normalized, scanRegion: self.scanRegion)
264
+ }
265
+ if takeSnapshotCall != nil {
266
+ let base64 = getBase64FromImage(image: normalized, quality: 100.0);
267
+ var ret = PluginCallResultData()
268
+ ret["base64"] = base64
269
+ takeSnapshotCall.resolve(ret)
270
+ takeSnapshotCall = nil
271
+ }
272
+ if saveFrameCall != nil {
273
+ CameraPreviewPlugin.frameTaken = normalized
274
+ var ret = PluginCallResultData()
275
+ ret["success"] = true
276
+ saveFrameCall.resolve(ret)
277
+ saveFrameCall = nil
278
+ }
279
+ }
280
+ }
281
+
282
+ func makeWebViewTransparent(){
283
+ DispatchQueue.main.async {
284
+ self.bridge?.webView!.isOpaque = false
285
+ self.bridge?.webView!.backgroundColor = UIColor.clear
286
+ self.bridge?.webView!.scrollView.backgroundColor = UIColor.clear
287
+ }
288
+ }
289
+ func restoreWebViewBackground(){
290
+ DispatchQueue.main.async {
291
+ self.bridge?.webView!.isOpaque = true
292
+ self.bridge?.webView!.backgroundColor = UIColor.white
293
+ self.bridge?.webView!.scrollView.backgroundColor = UIColor.white
294
+ }
295
+ }
296
+
297
+ @objc func toggleTorch(_ call: CAPPluginCall) {
298
+ let device = videoInput.device
299
+ if device.hasTorch {
300
+ do {
301
+ try device.lockForConfiguration()
302
+ if call.getBool("on", true) == true {
303
+ device.torchMode = .on
304
+ } else {
305
+ device.torchMode = .off
306
+ }
307
+ device.unlockForConfiguration()
308
+ } catch {
309
+ print("Torch could not be used")
310
+ }
311
+ }
312
+ call.resolve()
313
+ }
314
+
315
+ @objc func stopCamera(_ call: CAPPluginCall) {
316
+ restoreWebViewBackground()
317
+ DispatchQueue.main.sync {
318
+ destroyCaptureSession()
319
+ }
320
+ call.resolve()
321
+ }
322
+
323
+
324
+ @objc func setResolution(_ call: CAPPluginCall) {
325
+ let res = call.getInt("resolution", 5)
326
+ let running = self.captureSession.isRunning
327
+ if running {
328
+ self.captureSession.stopRunning()
329
+ }
330
+ if (res == 1){
331
+ self.captureSession.sessionPreset = AVCaptureSession.Preset.vga640x480
332
+ } else if (res == 2){
333
+ self.captureSession.sessionPreset = AVCaptureSession.Preset.hd1280x720
334
+ } else if (res == 3 && facingBack == true){
335
+ self.captureSession.sessionPreset = AVCaptureSession.Preset.hd1920x1080
336
+ } else if (res == 5 && facingBack == true){
337
+ self.captureSession.sessionPreset = AVCaptureSession.Preset.hd4K3840x2160
338
+ }
339
+ if running {
340
+ self.captureSession.startRunning()
341
+ triggerOnPlayed()
342
+ }
343
+ call.resolve()
344
+ }
345
+
346
+ @objc func getResolution(_ call: CAPPluginCall) {
347
+ call.keepAlive = true
348
+ getResolutionCall = call
349
+ }
350
+
351
+ @objc func triggerOnPlayed() {
352
+ triggerPlayRequired = true
353
+ }
354
+
355
+ @objc func getAllCameras(_ call: CAPPluginCall) {
356
+ var ret = PluginCallResultData()
357
+ let array = NSMutableArray();
358
+ array.add("Front-Facing Camera")
359
+ array.add("Back-Facing Camera")
360
+ ret["cameras"] = array
361
+ call.resolve(ret)
362
+ }
363
+
364
+ @objc func getSelectedCamera(_ call: CAPPluginCall) {
365
+ var ret = PluginCallResultData()
366
+ if facingBack {
367
+ ret["selectedCamera"] = "Back-Facing Camera"
368
+ }else{
369
+ ret["selectedCamera"] = "Front-Facing Camera"
370
+ }
371
+ call.resolve(ret)
372
+ }
373
+
374
+ @objc func getOrientation(_ call: CAPPluginCall) {
375
+ var ret = PluginCallResultData()
376
+ if UIDevice.current.orientation.isLandscape {
377
+ ret["orientation"] = "LANDSCAPE"
378
+ }else {
379
+ ret["orientation"] = "PORTRAIT"
380
+ }
381
+ call.resolve(ret)
382
+ }
383
+
384
+ @objc func selectCamera(_ call: CAPPluginCall) {
385
+ let isRunning = self.captureSession.isRunning
386
+ if isRunning {
387
+ self.captureSession.stopRunning()
388
+ }
389
+ let cameraID = call.getString("cameraID", "Back-Facing Camera")
390
+ if cameraID == "Back-Facing Camera" && facingBack == false {
391
+ self.captureSession.removeInput(self.videoInput)
392
+ let videoDevice = captureDevice(with: AVCaptureDevice.Position.back)
393
+ self.videoInput = try? AVCaptureDeviceInput(device: videoDevice!)
394
+ self.captureSession.addInput(self.videoInput)
395
+ facingBack = true
396
+ }
397
+ if cameraID == "Front-Facing Camera" && facingBack == true {
398
+ self.captureSession.removeInput(self.videoInput)
399
+ self.captureSession.sessionPreset = AVCaptureSession.Preset.photo
400
+ let videoDevice = captureDevice(with: AVCaptureDevice.Position.front)
401
+ self.videoInput = try? AVCaptureDeviceInput(device: videoDevice!)
402
+ self.captureSession.addInput(self.videoInput)
403
+ facingBack = false
404
+ }
405
+ if isRunning {
406
+ self.captureSession.startRunning()
407
+ }
408
+ triggerOnPlayed()
409
+ call.resolve()
410
+ }
411
+
412
+ @objc func setLayout(_ call: CAPPluginCall) {
413
+ if (self.previewView == nil){
414
+ call.reject("not initialized")
415
+ }else{
416
+ DispatchQueue.main.async {
417
+ let left = self.getLayoutValue(call.getString("left")!,true)
418
+ let top = self.getLayoutValue(call.getString("top")!,false)
419
+ let width = self.getLayoutValue(call.getString("width")!,true)
420
+ let height = self.getLayoutValue(call.getString("height")!,false)
421
+ self.previewView.frame = CGRect.init(x: left, y: top, width: width, height: height)
422
+ }
423
+ call.resolve()
424
+ }
425
+ }
426
+
427
+ func getLayoutValue(_ value: String,_ isWidth: Bool) -> CGFloat {
428
+ if value.contains("%") {
429
+ let percent = CGFloat(Float(String(value[..<value.lastIndex(of: "%")!]))!/100)
430
+ if isWidth {
431
+ return percent * (self.bridge?.webView!.frame.width)!
432
+ }else{
433
+ return percent * (self.bridge?.webView!.frame.height)!
434
+ }
435
+ }
436
+ if value.contains("px") {
437
+ let num = CGFloat(Float(String(value[..<value.lastIndex(of: "p")!]))!)
438
+ return num
439
+ }
440
+ return CGFloat(Float(value)!)
441
+ }
442
+
443
+ @objc func startRecording(_ call: CAPPluginCall) {
444
+ DispatchQueue.main.sync {
445
+ if self.captureSession != nil {
446
+ if self.captureSession.isRunning {
447
+ self.captureSession.stopRunning()
448
+ self.initializeCaptureSession(enableVideoRecording: true)
449
+ self.captureSession.startRunning()
450
+ self.movieFileOutput!.startRecording(to: self.getTemp(), recordingDelegate: self)
451
+ }
452
+ }
453
+ }
454
+ call.resolve()
455
+ }
456
+
457
+ public func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
458
+ if self.stopRecordingCall != nil {
459
+ var ret = PluginCallResultData()
460
+ ret["path"] = outputFileURL.path
461
+ if self.stopRecordingCall.getBool("includeBase64", false) {
462
+ let data = try? Data(contentsOf: outputFileURL)
463
+ ret["base64"] = data?.base64EncodedString()
464
+ }
465
+ if self.captureSession != nil {
466
+ if self.captureSession.isRunning {
467
+ self.captureSession.stopRunning()
468
+ self.initializeCaptureSession(enableVideoRecording: false)
469
+ }
470
+ }
471
+ self.stopRecordingCall.resolve(ret)
472
+ self.stopRecordingCall = nil
473
+ }
474
+ }
475
+
476
+ private func getTemp() -> URL
477
+ {
478
+ let tempName = NSUUID().uuidString
479
+ let tempPath = (NSTemporaryDirectory() as NSString).appendingPathComponent((tempName as NSString).appendingPathExtension("mov")!)
480
+
481
+ print("Temp path: \(tempPath)")
482
+
483
+ return URL(fileURLWithPath: tempPath)
484
+ }
485
+
486
+ @objc func stopRecording(_ call: CAPPluginCall) {
487
+ call.keepAlive = true
488
+ self.stopRecordingCall = call
489
+ self.movieFileOutput.stopRecording()
490
+ }
491
+
492
+ func captureDevice(with position: AVCaptureDevice.Position) -> AVCaptureDevice? {
493
+
494
+ let devices = AVCaptureDevice.DiscoverySession(deviceTypes: [ .builtInWideAngleCamera, .builtInMicrophone, .builtInDualCamera, .builtInTelephotoCamera ], mediaType: AVMediaType.video, position: .unspecified).devices
495
+
496
+ for device in devices {
497
+ if device.position == position {
498
+ return device
499
+ }
500
+ }
501
+
502
+ return nil
503
+ }
504
+
505
+ @objc func setScanRegion(_ call: CAPPluginCall) {
506
+ let region = call.getObject("region")
507
+ self.scanRegion = ScanRegion()
508
+ self.scanRegion.top = region?["top"] as! Int
509
+ self.scanRegion.right = region?["right"] as! Int
510
+ self.scanRegion.left = region?["left"] as! Int
511
+ self.scanRegion.bottom = region?["bottom"] as! Int
512
+ self.scanRegion.measuredByPercentage = region?["measuredByPercentage"] as! Int
513
+ call.resolve()
514
+ }
515
+
516
+ @objc func setZoom(_ call: CAPPluginCall) {
517
+ let device = videoInput.device
518
+ do {
519
+ try device.lockForConfiguration()
520
+ var factor:CGFloat = CGFloat(call.getFloat("factor") ?? 1.0)
521
+ factor = max(factor, device.minAvailableVideoZoomFactor)
522
+ factor = min(factor, device.maxAvailableVideoZoomFactor)
523
+ device.videoZoomFactor = factor
524
+ device.unlockForConfiguration()
525
+ } catch {
526
+ print("Zoom could not be used")
527
+ }
528
+ call.resolve()
529
+ }
530
+
531
+ @objc func setFocus(_ call: CAPPluginCall) {
532
+ // Validate coordinates (must be normalized 0.0-1.0)
533
+ guard let x = call.getFloat("x"),
534
+ let y = call.getFloat("y"),
535
+ x >= 0.0 && x <= 1.0,
536
+ y >= 0.0 && y <= 1.0 else {
537
+ call.reject("Invalid coordinates. Provide normalized x,y values (0.0-1.0)")
538
+ return
539
+ }
540
+
541
+ let device = videoInput.device
542
+
543
+ do {
544
+ try device.lockForConfiguration()
545
+
546
+ // 1. Check if focus point is supported
547
+ guard device.isFocusPointOfInterestSupported else {
548
+ call.reject("Focus point of interest not supported on this device")
549
+ return
550
+ }
551
+
552
+ // 2. Set focus point
553
+ device.focusPointOfInterest = CGPoint(x: CGFloat(x), y: CGFloat(y))
554
+
555
+ // 3. Set focus mode (choose one supported by your use case)
556
+ if device.isFocusModeSupported(.autoFocus) {
557
+ device.focusMode = .autoFocus
558
+ } else if device.isFocusModeSupported(.continuousAutoFocus) {
559
+ device.focusMode = .continuousAutoFocus
560
+ } else {
561
+ call.reject("No supported focus mode available")
562
+ return
563
+ }
564
+ call.resolve()
565
+ } catch {
566
+ call.reject("Failed to lock device for configuration: \(error.localizedDescription)")
567
+ }
568
+ }
569
+
570
+ @objc func requestCameraPermission(_ call: CAPPluginCall) {
571
+ call.resolve()
572
+ }
573
+
574
+ @objc func requestMicroPhonePermission(_ call: CAPPluginCall) {
575
+ call.resolve()
576
+ }
577
+
578
+ @objc func isOpen(_ call: CAPPluginCall) {
579
+ var ret = PluginCallResultData()
580
+ ret["isOpen"] = self.captureSession.isRunning
581
+ call.resolve(ret)
582
+ }
583
+
584
+ @objc static func getBitmap() -> UIImage? {
585
+ return frameTaken
586
+ }
587
+
588
+ @objc func saveFrame(_ call: CAPPluginCall) {
589
+ call.keepAlive = true
590
+ saveFrameCall = call
591
+ }
592
+
593
+ @objc func takeSnapshot(_ call: CAPPluginCall) {
594
+ call.keepAlive = true
595
+ takeSnapshotCall = call
596
+ }
597
+
598
+ func croppedUIImage(image:UIImage,scanRegion:ScanRegion) -> UIImage {
599
+ let cgImage = image.cgImage
600
+ let imgWidth = Double(cgImage!.width)
601
+ let imgHeight = Double(cgImage!.height)
602
+ var regionLeft = Double(scanRegion.left)
603
+ var regionTop = Double(scanRegion.top)
604
+ var regionWidth = Double(scanRegion.right - scanRegion.left)
605
+ var regionHeight = Double(scanRegion.bottom - scanRegion.top)
606
+ if scanRegion.measuredByPercentage == 1 {
607
+ regionLeft = regionLeft / 100 * imgWidth
608
+ regionTop = regionTop / 100 * imgHeight
609
+ regionWidth = regionWidth / 100 * imgWidth
610
+ regionHeight = regionHeight / 100 * imgHeight
611
+ }
612
+
613
+ // The cropRect is the rect of the image to keep,
614
+ // in this case centered
615
+ let cropRect = CGRect(
616
+ x: regionLeft,
617
+ y: regionTop,
618
+ width: regionWidth,
619
+ height: regionHeight
620
+ ).integral
621
+
622
+ let cropped = cgImage?.cropping(
623
+ to: cropRect
624
+ )!
625
+ let image = UIImage(cgImage: cropped!)
626
+ return image
627
+ }
628
+
629
+ func rotatedUIImage(image:UIImage, degree: Int) -> UIImage {
630
+ var rotatedImage = UIImage()
631
+ switch degree
632
+ {
633
+ case 90:
634
+ rotatedImage = UIImage(cgImage: image.cgImage!, scale: 1.0, orientation: .right)
635
+ case 180:
636
+ rotatedImage = UIImage(cgImage: image.cgImage!, scale: 1.0, orientation: .down)
637
+ default:
638
+ return image
639
+ }
640
+ return rotatedImage
641
+ }
642
+
643
+
644
+
645
+ func normalizedImage(_ image:UIImage) -> UIImage {
646
+ if image.imageOrientation == UIImage.Orientation.up {
647
+ return image
648
+ }
649
+ UIGraphicsBeginImageContextWithOptions(image.size, false, image.scale)
650
+ image.draw(in: CGRect(x:0,y:0,width:image.size.width,height:image.size.height))
651
+ let normalized = UIGraphicsGetImageFromCurrentImageContext()!
652
+ UIGraphicsEndImageContext();
653
+ return normalized
654
+ }
655
+
656
+ func getBase64FromImage(image:UIImage, quality: CGFloat) -> String{
657
+ let dataTmp = image.jpegData(compressionQuality: quality)
658
+ if let data = dataTmp {
659
+ return data.base64EncodedString()
660
+ }
661
+ return ""
662
+ }
663
+
664
+ @objc func takePhoto(_ call: CAPPluginCall) {
665
+ call.keepAlive = true
666
+ takePhotoCall = call
667
+ takePhotoWithAVFoundation()
668
+ }
669
+
670
+ }
@@ -0,0 +1,24 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
3
+ <plist version="1.0">
4
+ <dict>
5
+ <key>CFBundleDevelopmentRegion</key>
6
+ <string>$(DEVELOPMENT_LANGUAGE)</string>
7
+ <key>CFBundleExecutable</key>
8
+ <string>$(EXECUTABLE_NAME)</string>
9
+ <key>CFBundleIdentifier</key>
10
+ <string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
11
+ <key>CFBundleInfoDictionaryVersion</key>
12
+ <string>6.0</string>
13
+ <key>CFBundleName</key>
14
+ <string>$(PRODUCT_NAME)</string>
15
+ <key>CFBundlePackageType</key>
16
+ <string>FMWK</string>
17
+ <key>CFBundleShortVersionString</key>
18
+ <string>1.0</string>
19
+ <key>CFBundleVersion</key>
20
+ <string>$(CURRENT_PROJECT_VERSION)</string>
21
+ <key>NSPrincipalClass</key>
22
+ <string></string>
23
+ </dict>
24
+ </plist>