@aigens/aigens-sdk-core 0.0.1 → 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,18 +1,732 @@
1
1
  import Foundation
2
2
  import Capacitor
3
+ import AVFoundation
4
+ import UIKit
3
5
 
4
6
  /**
5
7
  * Please read the Capacitor iOS Plugin Development Guide
6
8
  * here: https://capacitorjs.com/docs/plugins/ios
7
9
  */
8
10
  @objc(CorePlugin)
9
- public class CorePlugin: CAPPlugin {
11
+ public class CorePlugin: CAPPlugin, AVCaptureMetadataOutputObjectsDelegate {
12
+
10
13
  private let implementation = Core()
11
-
14
+ public static var member: Dictionary<String, Any>?
15
+
12
16
  @objc func echo(_ call: CAPPluginCall) {
17
+
13
18
  let value = call.getString("value") ?? ""
14
19
  call.resolve([
15
20
  "value": implementation.echo(value)
16
21
  ])
22
+
23
+
24
+ }
25
+
26
+ @objc func getMember(_ call: CAPPluginCall) {
27
+
28
+ call.resolve([
29
+ "member": CorePlugin.member!
30
+ ])
31
+ }
32
+
33
+ @objc func finish(_ call: CAPPluginCall) {
34
+
35
+ DispatchQueue.main.async {
36
+ self.bridge?.viewController?.dismiss(animated: true);
37
+ }
38
+
39
+
40
+ //let value = call.getString("value") ?? ""
41
+ call.resolve([
42
+ "success": true
43
+ //"value": implementation.echo(value)
44
+ ])
17
45
  }
46
+
47
+ @objc func scan(_ call: CAPPluginCall) {
48
+
49
+ let cancelButton = call.getString("cancelButton")
50
+ if(cancelButton != nil){
51
+ self.cancelText = cancelButton!
52
+ }else{
53
+ self.cancelText = "Cancel"
54
+ }
55
+
56
+ DispatchQueue.main.async {
57
+ self.loadScan()
58
+ self.startScan(call)
59
+ }
60
+ }
61
+
62
+
63
+ @objc func openBrowser(_ call: CAPPluginCall) {
64
+
65
+ let url = call.getString("url")
66
+
67
+
68
+ if(url == nil){
69
+ return
70
+ }
71
+
72
+ let member = call.getObject("member")
73
+
74
+ DispatchQueue.main.async {
75
+
76
+ let bridgeVC = WebContainerViewController()
77
+
78
+ var options = [String: AnyObject]()
79
+ options["url"] = url as AnyObject;
80
+
81
+ if(member != nil){
82
+ options["member"] = member as AnyObject;
83
+ }
84
+
85
+ bridgeVC.options = options;
86
+
87
+ bridgeVC.modalPresentationStyle = .fullScreen
88
+ let currentVC = self.bridge?.viewController;
89
+ currentVC?.present(bridgeVC, animated: true);
90
+ }
91
+
92
+
93
+ call.resolve([
94
+ "success": true
95
+ //"value": implementation.echo(value)
96
+ ])
97
+ }
98
+
99
+ //SCAN PLAUGIN HERE
100
+
101
+ class CameraView: UIView {
102
+ var videoPreviewLayer:AVCaptureVideoPreviewLayer?
103
+
104
+ func interfaceOrientationToVideoOrientation(_ orientation : UIInterfaceOrientation) -> AVCaptureVideoOrientation {
105
+ switch (orientation) {
106
+ case UIInterfaceOrientation.portrait:
107
+ return AVCaptureVideoOrientation.portrait
108
+ case UIInterfaceOrientation.portraitUpsideDown:
109
+ return AVCaptureVideoOrientation.portraitUpsideDown
110
+ case UIInterfaceOrientation.landscapeLeft:
111
+ return AVCaptureVideoOrientation.landscapeLeft
112
+ case UIInterfaceOrientation.landscapeRight:
113
+ return AVCaptureVideoOrientation.landscapeRight
114
+ default:
115
+ return AVCaptureVideoOrientation.portraitUpsideDown
116
+ }
117
+ }
118
+
119
+ override func layoutSubviews() {
120
+ super.layoutSubviews()
121
+ if let sublayers = self.layer.sublayers {
122
+ for layer in sublayers {
123
+ layer.frame = self.bounds
124
+ }
125
+ }
126
+
127
+ self.videoPreviewLayer?.connection?.videoOrientation = interfaceOrientationToVideoOrientation(UIApplication.shared.statusBarOrientation)
128
+ }
129
+
130
+
131
+ func addPreviewLayer(_ previewLayer:AVCaptureVideoPreviewLayer?) {
132
+ previewLayer!.videoGravity = AVLayerVideoGravity.resizeAspectFill
133
+ previewLayer!.frame = self.bounds
134
+ self.layer.addSublayer(previewLayer!)
135
+ self.videoPreviewLayer = previewLayer
136
+ }
137
+
138
+ func removePreviewLayer() {
139
+ if self.videoPreviewLayer != nil {
140
+ self.videoPreviewLayer!.removeFromSuperlayer()
141
+ self.videoPreviewLayer = nil
142
+ }
143
+ }
144
+ }
145
+
146
+ var cameraView: CameraView!
147
+ var cancelButton: UIButton!
148
+ var captureSession:AVCaptureSession?
149
+ var captureVideoPreviewLayer:AVCaptureVideoPreviewLayer?
150
+ var metaOutput: AVCaptureMetadataOutput?
151
+
152
+ var currentCamera: Int = 0
153
+ var frontCamera: AVCaptureDevice?
154
+ var backCamera: AVCaptureDevice?
155
+
156
+ var cancelText: String = "Cancel"
157
+ var isScanning: Bool = false
158
+ var shouldRunScan: Bool = false
159
+ var didRunCameraSetup: Bool = false
160
+ var didRunCameraPrepare: Bool = false
161
+ var isBackgroundHidden: Bool = false
162
+
163
+ var savedCall: CAPPluginCall? = nil
164
+ var scanningPaused: Bool = false
165
+ var lastScanResult: String? = nil
166
+
167
+ enum SupportedFormat: String, CaseIterable {
168
+ // 1D Product
169
+ //!\ UPC_A is part of EAN_13 according to Apple docs
170
+ case UPC_E
171
+ //!\ UPC_EAN_EXTENSION is not supported by AVFoundation
172
+ case EAN_8
173
+ case EAN_13
174
+ // 1D Industrial
175
+ case CODE_39
176
+ case CODE_39_MOD_43
177
+ case CODE_93
178
+ case CODE_128
179
+ //!\ CODABAR is not supported by AVFoundation
180
+ case ITF
181
+ case ITF_14
182
+ // 2D
183
+ case AZTEC
184
+ case DATA_MATRIX
185
+ //!\ MAXICODE is not supported by AVFoundation
186
+ case PDF_417
187
+ case QR_CODE
188
+ //!\ RSS_14 is not supported by AVFoundation
189
+ //!\ RSS_EXPANDED is not supported by AVFoundation
190
+
191
+ var value: AVMetadataObject.ObjectType {
192
+ switch self {
193
+ // 1D Product
194
+ case .UPC_E: return AVMetadataObject.ObjectType.upce
195
+ case .EAN_8: return AVMetadataObject.ObjectType.ean8
196
+ case .EAN_13: return AVMetadataObject.ObjectType.ean13
197
+ // 1D Industrial
198
+ case .CODE_39: return AVMetadataObject.ObjectType.code39
199
+ case .CODE_39_MOD_43: return AVMetadataObject.ObjectType.code39Mod43
200
+ case .CODE_93: return AVMetadataObject.ObjectType.code93
201
+ case .CODE_128: return AVMetadataObject.ObjectType.code128
202
+ case .ITF: return AVMetadataObject.ObjectType.interleaved2of5
203
+ case .ITF_14: return AVMetadataObject.ObjectType.itf14
204
+ // 2D
205
+ case .AZTEC: return AVMetadataObject.ObjectType.aztec
206
+ case .DATA_MATRIX: return AVMetadataObject.ObjectType.dataMatrix
207
+ case .PDF_417: return AVMetadataObject.ObjectType.pdf417
208
+ case .QR_CODE: return AVMetadataObject.ObjectType.qr
209
+ }
210
+ }
211
+ }
212
+
213
+ var targetedFormats = [AVMetadataObject.ObjectType]()
214
+
215
+ enum CaptureError: Error {
216
+ case backCameraUnavailable
217
+ case frontCameraUnavailable
218
+ case couldNotCaptureInput(error: NSError)
219
+ }
220
+
221
+ @objc func buttonTapped( _ button : UIButton)
222
+ {
223
+
224
+ self.destroy()
225
+ }
226
+
227
+
228
+
229
+ private func loadScan() {
230
+
231
+ if(self.cameraView != nil){
232
+ return
233
+ }
234
+
235
+
236
+ self.cameraView = CameraView(frame: CGRect(x: 0, y: 0, width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height))
237
+ self.cameraView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
238
+
239
+ let button = UIButton()
240
+ //button.backgroundColor = UIColor.orange
241
+
242
+ //let iconImage = UIImage(named: "search")
243
+ //button.setImage(iconImage, for: .normal)
244
+
245
+ button.setTitle("< " + self.cancelText, for: .normal)
246
+ button.contentMode = UIView.ContentMode.scaleToFill
247
+ button.frame = CGRect(x: 20, y: 10, width:200 , height:100)
248
+ button.titleLabel?.textColor = UIColor.white
249
+ button.contentHorizontalAlignment = .left
250
+
251
+ button.addTarget(self, action: #selector(self.buttonTapped(_:)), for: UIControl.Event.touchUpInside)
252
+
253
+ self.cancelButton = button
254
+
255
+ }
256
+
257
+ private func hasCameraPermission() -> Bool {
258
+ let status = AVCaptureDevice.authorizationStatus(for: AVMediaType.video)
259
+ if (status == AVAuthorizationStatus.authorized) {
260
+ return true
261
+ }
262
+ return false
263
+ }
264
+
265
+ private func addViews(){
266
+ self.webView!.superview!.insertSubview(cameraView, aboveSubview: self.webView!)
267
+ self.webView!.superview!.insertSubview(cancelButton, aboveSubview: cameraView)
268
+ }
269
+
270
+ private func removeViews(){
271
+
272
+ DispatchQueue.main.async {
273
+ self.cancelButton.removeFromSuperview()
274
+ self.cameraView.removeFromSuperview()
275
+ }
276
+
277
+ }
278
+
279
+ private func setupCamera(cameraDirection: String? = "back") -> Bool {
280
+ do {
281
+ var cameraDir = cameraDirection
282
+ cameraView.backgroundColor = UIColor.clear
283
+ //self.webView!.superview!.insertSubview(cameraView, belowSubview: self.webView!)
284
+
285
+ //self.webView!.superview!.insertSubview(cameraView, aboveSubview: self.webView!)
286
+ //self.webView!.superview!.insertSubview(cancelButton, aboveSubview: cameraView)
287
+ addViews()
288
+
289
+ let availableVideoDevices = discoverCaptureDevices()
290
+ for device in availableVideoDevices {
291
+ if device.position == AVCaptureDevice.Position.back {
292
+ backCamera = device
293
+ }
294
+ else if device.position == AVCaptureDevice.Position.front {
295
+ frontCamera = device
296
+ }
297
+ }
298
+ // older iPods have no back camera
299
+ if (cameraDir == "back") {
300
+ if (backCamera == nil) {
301
+ cameraDir = "front"
302
+ }
303
+ } else {
304
+ if (frontCamera == nil) {
305
+ cameraDir = "back"
306
+ }
307
+ }
308
+ let input: AVCaptureDeviceInput
309
+ input = try self.createCaptureDeviceInput(cameraDirection: cameraDir)
310
+ captureSession = AVCaptureSession()
311
+ captureSession!.addInput(input)
312
+ metaOutput = AVCaptureMetadataOutput()
313
+ captureSession!.addOutput(metaOutput!)
314
+ metaOutput!.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
315
+ captureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
316
+ cameraView.addPreviewLayer(captureVideoPreviewLayer)
317
+ self.didRunCameraSetup = true
318
+ return true
319
+ } catch CaptureError.backCameraUnavailable {
320
+ //
321
+ } catch CaptureError.frontCameraUnavailable {
322
+ //
323
+ } catch CaptureError.couldNotCaptureInput {
324
+ //
325
+ } catch {
326
+ //
327
+ }
328
+ return false
329
+ }
330
+
331
+ @available(swift, deprecated: 5.6, message: "New Xcode? Check if `AVCaptureDevice.DeviceType` has new types and add them accordingly.")
332
+ private func discoverCaptureDevices() -> [AVCaptureDevice] {
333
+ if #available(iOS 13.0, *) {
334
+ return AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInTripleCamera, .builtInDualCamera, .builtInTelephotoCamera, .builtInTrueDepthCamera, .builtInUltraWideCamera, .builtInDualWideCamera, .builtInWideAngleCamera], mediaType: .video, position: .unspecified).devices
335
+ } else {
336
+ return AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera, .builtInWideAngleCamera, .builtInTelephotoCamera, .builtInTrueDepthCamera], mediaType: .video, position: .unspecified).devices
337
+ }
338
+ }
339
+
340
+ private func createCaptureDeviceInput(cameraDirection: String? = "back") throws -> AVCaptureDeviceInput {
341
+ var captureDevice: AVCaptureDevice
342
+ if(cameraDirection == "back"){
343
+ if(backCamera != nil){
344
+ captureDevice = backCamera!
345
+ } else {
346
+ throw CaptureError.backCameraUnavailable
347
+ }
348
+ } else {
349
+ if(frontCamera != nil){
350
+ captureDevice = frontCamera!
351
+ } else {
352
+ throw CaptureError.frontCameraUnavailable
353
+ }
354
+ }
355
+ let captureDeviceInput: AVCaptureDeviceInput
356
+ do {
357
+ captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
358
+ } catch let error as NSError {
359
+ throw CaptureError.couldNotCaptureInput(error: error)
360
+ }
361
+ return captureDeviceInput
362
+ }
363
+
364
+ private func dismantleCamera() {
365
+ // opposite of setupCamera
366
+
367
+ if (self.captureSession != nil) {
368
+ DispatchQueue.main.async {
369
+ self.captureSession!.stopRunning()
370
+ self.cameraView.removePreviewLayer()
371
+ self.captureVideoPreviewLayer = nil
372
+ self.metaOutput = nil
373
+ self.captureSession = nil
374
+ self.frontCamera = nil
375
+ self.backCamera = nil
376
+ }
377
+ }
378
+
379
+ self.isScanning = false
380
+ self.didRunCameraSetup = false
381
+ self.didRunCameraPrepare = false
382
+
383
+ // If a call is saved and a scan will not run, free the saved call
384
+ if (self.savedCall != nil && !self.shouldRunScan) {
385
+ self.savedCall = nil
386
+ }
387
+ }
388
+
389
+ private func prepare(_ call: CAPPluginCall? = nil) {
390
+ // undo previous setup
391
+ // because it may be prepared with a different config
392
+ self.dismantleCamera()
393
+
394
+ DispatchQueue.main.async {
395
+ // setup camera with new config
396
+ if (self.setupCamera(cameraDirection: call?.getString("cameraDirection") ?? "back")) {
397
+ // indicate this method was run
398
+ self.didRunCameraPrepare = true
399
+
400
+ if (self.shouldRunScan) {
401
+ self.scan()
402
+ }
403
+ } else {
404
+ self.shouldRunScan = false
405
+ }
406
+ }
407
+ }
408
+
409
+ private func destroy() {
410
+
411
+ self.removeViews()
412
+
413
+ self.showBackground()
414
+
415
+ self.dismantleCamera()
416
+ }
417
+
418
+ private func scan() {
419
+ if (!self.didRunCameraPrepare) {
420
+ //In iOS 14 don't identify permissions needed, so force to ask it's better than nothing. Provisional.
421
+ var iOS14min: Bool = false
422
+ if #available(iOS 14.0, *) { iOS14min = true; }
423
+ if (!self.hasCameraPermission() && !iOS14min) {
424
+ // @TODO()
425
+ // requestPermission()
426
+ } else {
427
+ self.shouldRunScan = true
428
+ self.prepare(savedCall)
429
+ }
430
+ } else {
431
+ self.didRunCameraPrepare = false
432
+
433
+ self.shouldRunScan = false
434
+
435
+ targetedFormats = [AVMetadataObject.ObjectType]();
436
+
437
+ if ((savedCall?.options["targetedFormats"]) != nil) {
438
+ let _targetedFormats = savedCall?.getArray("targetedFormats", String.self)
439
+
440
+ if (_targetedFormats != nil && _targetedFormats?.count ?? 0 > 0) {
441
+ _targetedFormats?.forEach { targetedFormat in
442
+ if let value = SupportedFormat(rawValue: targetedFormat)?.value {
443
+ print(value)
444
+ targetedFormats.append(value)
445
+ }
446
+ }
447
+ }
448
+
449
+ if (targetedFormats.count == 0) {
450
+ print("The property targetedFormats was not set correctly.")
451
+ }
452
+ }
453
+
454
+ if (targetedFormats.count == 0) {
455
+ for supportedFormat in SupportedFormat.allCases {
456
+ targetedFormats.append(supportedFormat.value)
457
+ }
458
+ }
459
+
460
+ DispatchQueue.main.async {
461
+ self.metaOutput!.metadataObjectTypes = self.targetedFormats
462
+ self.captureSession!.startRunning()
463
+ }
464
+
465
+ self.hideBackground()
466
+
467
+ self.isScanning = true
468
+ }
469
+ }
470
+
471
+ private func hideBackground() {
472
+ /*
473
+ DispatchQueue.main.async {
474
+ self.bridge?.webView!.isOpaque = false
475
+ self.bridge?.webView!.backgroundColor = UIColor.clear
476
+ self.bridge?.webView!.scrollView.backgroundColor = UIColor.clear
477
+
478
+ let javascript = "document.documentElement.style.backgroundColor = 'transparent'"
479
+
480
+ self.bridge?.webView!.evaluateJavaScript(javascript)
481
+ }*/
482
+ }
483
+
484
+ private func showBackground() {
485
+
486
+ /*
487
+ DispatchQueue.main.async {
488
+ let javascript = "document.documentElement.style.backgroundColor = ''"
489
+
490
+ self.bridge?.webView!.evaluateJavaScript(javascript) { (result, error) in
491
+ self.bridge?.webView!.isOpaque = true
492
+ self.bridge?.webView!.backgroundColor = UIColor.white
493
+ self.bridge?.webView!.scrollView.backgroundColor = UIColor.white
494
+ }
495
+ }*/
496
+ }
497
+
498
+ // This method processes metadataObjects captured by iOS.
499
+ public func metadataOutput(_ captureOutput: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
500
+
501
+ if (metadataObjects.count == 0 || !self.isScanning) {
502
+ // while nothing is detected, or if scanning is false, do nothing.
503
+ return
504
+ }
505
+
506
+ let found = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
507
+ if (targetedFormats.contains(found.type)) {
508
+ var jsObject = PluginCallResultData()
509
+
510
+ if (found.stringValue != nil) {
511
+ jsObject["hasContent"] = true
512
+ jsObject["content"] = found.stringValue
513
+ jsObject["format"] = formatStringFromMetadata(found.type)
514
+ } else {
515
+ jsObject["hasContent"] = false
516
+ }
517
+
518
+ if (savedCall != nil) {
519
+ if (savedCall!.keepAlive) {
520
+ if (!scanningPaused && found.stringValue != lastScanResult ) {
521
+ lastScanResult = found.stringValue
522
+ savedCall!.resolve(jsObject)
523
+ }
524
+ } else {
525
+ savedCall!.resolve(jsObject)
526
+ savedCall = nil
527
+ destroy()
528
+ }
529
+ } else {
530
+ self.destroy()
531
+ }
532
+ }
533
+ }
534
+
535
+ private func formatStringFromMetadata(_ type: AVMetadataObject.ObjectType) -> String {
536
+ switch type {
537
+ case AVMetadataObject.ObjectType.upce:
538
+ return "UPC_E"
539
+ case AVMetadataObject.ObjectType.ean8:
540
+ return "EAN_8"
541
+ case AVMetadataObject.ObjectType.ean13:
542
+ return "EAN_13"
543
+ case AVMetadataObject.ObjectType.code39:
544
+ return "CODE_39"
545
+ case AVMetadataObject.ObjectType.code39Mod43:
546
+ return "CODE_39_MOD_43"
547
+ case AVMetadataObject.ObjectType.code93:
548
+ return "CODE_93"
549
+ case AVMetadataObject.ObjectType.code128:
550
+ return "CODE_128"
551
+ case AVMetadataObject.ObjectType.interleaved2of5:
552
+ return "ITF"
553
+ case AVMetadataObject.ObjectType.itf14:
554
+ return "ITF_14"
555
+ case AVMetadataObject.ObjectType.aztec:
556
+ return "AZTEC"
557
+ case AVMetadataObject.ObjectType.dataMatrix:
558
+ return "DATA_MATRIX"
559
+ case AVMetadataObject.ObjectType.pdf417:
560
+ return "PDF_417"
561
+ case AVMetadataObject.ObjectType.qr:
562
+ return "QR_CODE"
563
+ default:
564
+ return type.rawValue
565
+ }
566
+ }
567
+
568
+ @objc func prepare(_ call: CAPPluginCall) {
569
+ self.prepare()
570
+ call.resolve()
571
+ }
572
+
573
+ @objc func hideBackground(_ call: CAPPluginCall) {
574
+ self.hideBackground()
575
+ call.resolve()
576
+ }
577
+
578
+ @objc func showBackground(_ call: CAPPluginCall) {
579
+ self.showBackground()
580
+ call.resolve()
581
+ }
582
+
583
+ @objc func startScan(_ call: CAPPluginCall) {
584
+ self.savedCall = call
585
+ self.scan()
586
+ }
587
+
588
+ @objc func startScanning(_ call: CAPPluginCall) {
589
+ self.savedCall = call
590
+ self.savedCall?.keepAlive = true
591
+ scanningPaused = false
592
+ lastScanResult = nil
593
+ self.scan()
594
+ }
595
+
596
+ @objc func pauseScanning(_ call: CAPPluginCall) {
597
+ scanningPaused = true
598
+ call.resolve()
599
+ }
600
+
601
+ @objc func resumeScanning(_ call: CAPPluginCall) {
602
+ scanningPaused = false
603
+ call.resolve()
604
+ }
605
+
606
+ @objc func stopScan(_ call: CAPPluginCall) {
607
+ if ((call.getBool("resolveScan") ?? false) && self.savedCall != nil) {
608
+ var jsObject = PluginCallResultData()
609
+ jsObject["hasContent"] = false
610
+
611
+ savedCall?.resolve(jsObject)
612
+ savedCall = nil
613
+ }
614
+
615
+ self.destroy()
616
+ call.resolve()
617
+ }
618
+
619
+ @objc func checkPermission(_ call: CAPPluginCall) {
620
+ let force = call.getBool("force") ?? false
621
+
622
+ var savedReturnObject = PluginCallResultData()
623
+
624
+ DispatchQueue.main.async {
625
+ switch AVCaptureDevice.authorizationStatus(for: .video) {
626
+ case .authorized:
627
+ savedReturnObject["granted"] = true
628
+ case .denied:
629
+ savedReturnObject["denied"] = true
630
+ case .notDetermined:
631
+ savedReturnObject["neverAsked"] = true
632
+ case .restricted:
633
+ savedReturnObject["restricted"] = true
634
+ @unknown default:
635
+ savedReturnObject["unknown"] = true
636
+ }
637
+
638
+ if (force && savedReturnObject["neverAsked"] != nil) {
639
+ savedReturnObject["asked"] = true
640
+
641
+ AVCaptureDevice.requestAccess(for: .video) { (authorized) in
642
+ if (authorized) {
643
+ savedReturnObject["granted"] = true
644
+ } else {
645
+ savedReturnObject["denied"] = true
646
+ }
647
+ call.resolve(savedReturnObject)
648
+ }
649
+ } else {
650
+ call.resolve(savedReturnObject)
651
+ }
652
+ }
653
+ }
654
+
655
+ /*
656
+ @objc func openAppSettings(_ call: CAPPluginCall) {
657
+ guard let settingsUrl = URL(string: UIApplication.openSettingsURLString) else {
658
+ return
659
+ }
660
+
661
+ DispatchQueue.main.async {
662
+ if UIApplication.shared.canOpenURL(settingsUrl) {
663
+ UIApplication.shared.open(settingsUrl, completionHandler: { (success) in
664
+ call.resolve()
665
+ })
666
+ }
667
+ }
668
+ }
669
+ */
670
+
671
+ @objc func enableTorch(_ call: CAPPluginCall) {
672
+ guard let device = AVCaptureDevice.default(for: AVMediaType.video) else { return }
673
+ guard device.hasTorch else { return }
674
+ guard device.isTorchAvailable else { return }
675
+
676
+ do {
677
+ try device.lockForConfiguration()
678
+
679
+ do {
680
+ try device.setTorchModeOn(level: 1.0)
681
+ } catch {
682
+ print(error)
683
+ }
684
+
685
+ device.unlockForConfiguration()
686
+ } catch {
687
+ print(error)
688
+ }
689
+
690
+ call.resolve()
691
+ }
692
+
693
+ @objc func disableTorch(_ call: CAPPluginCall) {
694
+ guard let device = AVCaptureDevice.default(for: AVMediaType.video) else { return }
695
+ guard device.hasTorch else { return }
696
+ guard device.isTorchAvailable else { return }
697
+
698
+ do {
699
+ try device.lockForConfiguration()
700
+ device.torchMode = .off
701
+
702
+ device.unlockForConfiguration()
703
+ } catch {
704
+ print(error)
705
+ }
706
+
707
+ call.resolve()
708
+ }
709
+
710
+ @objc func toggleTorch(_ call: CAPPluginCall) {
711
+ guard let device = AVCaptureDevice.default(for: AVMediaType.video) else { return }
712
+ guard device.hasTorch else { return }
713
+ guard device.isTorchAvailable else { return }
714
+
715
+ if (device.torchMode == .on) {
716
+ self.disableTorch(call)
717
+ } else {
718
+ self.enableTorch(call)
719
+ }
720
+ }
721
+
722
+ @objc func getTorchState(_ call: CAPPluginCall) {
723
+ guard let device = AVCaptureDevice.default(for: AVMediaType.video) else { return }
724
+
725
+ var result = PluginCallResultData()
726
+
727
+ result["isEnabled"] = device.torchMode == .on
728
+
729
+ call.resolve(result)
730
+ }
731
+
18
732
  }