@capgo/camera-preview 7.4.0-beta.2 → 7.4.0-beta.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/README.md +218 -35
  2. package/android/.gradle/8.14.2/checksums/checksums.lock +0 -0
  3. package/android/.gradle/8.14.2/checksums/md5-checksums.bin +0 -0
  4. package/android/.gradle/8.14.2/checksums/sha1-checksums.bin +0 -0
  5. package/android/.gradle/8.14.2/executionHistory/executionHistory.bin +0 -0
  6. package/android/.gradle/8.14.2/executionHistory/executionHistory.lock +0 -0
  7. package/android/.gradle/8.14.2/fileHashes/fileHashes.bin +0 -0
  8. package/android/.gradle/8.14.2/fileHashes/fileHashes.lock +0 -0
  9. package/android/.gradle/8.14.2/fileHashes/resourceHashesCache.bin +0 -0
  10. package/android/.gradle/buildOutputCleanup/buildOutputCleanup.lock +0 -0
  11. package/android/.gradle/file-system.probe +0 -0
  12. package/android/build.gradle +3 -1
  13. package/android/src/main/AndroidManifest.xml +1 -4
  14. package/android/src/main/java/com/ahm/capacitor/camera/preview/CameraPreview.java +759 -83
  15. package/android/src/main/java/com/ahm/capacitor/camera/preview/CameraXView.java +2813 -805
  16. package/android/src/main/java/com/ahm/capacitor/camera/preview/GridOverlayView.java +112 -0
  17. package/android/src/main/java/com/ahm/capacitor/camera/preview/model/CameraDevice.java +55 -46
  18. package/android/src/main/java/com/ahm/capacitor/camera/preview/model/CameraLens.java +61 -52
  19. package/android/src/main/java/com/ahm/capacitor/camera/preview/model/CameraSessionConfiguration.java +161 -59
  20. package/android/src/main/java/com/ahm/capacitor/camera/preview/model/LensInfo.java +29 -23
  21. package/android/src/main/java/com/ahm/capacitor/camera/preview/model/ZoomFactors.java +24 -23
  22. package/dist/docs.json +333 -29
  23. package/dist/esm/definitions.d.ts +156 -13
  24. package/dist/esm/definitions.js.map +1 -1
  25. package/dist/esm/web.d.ts +52 -3
  26. package/dist/esm/web.js +592 -95
  27. package/dist/esm/web.js.map +1 -1
  28. package/dist/plugin.cjs.js +590 -95
  29. package/dist/plugin.cjs.js.map +1 -1
  30. package/dist/plugin.js +590 -95
  31. package/dist/plugin.js.map +1 -1
  32. package/ios/Sources/CapgoCameraPreview/CameraController.swift +907 -222
  33. package/ios/Sources/CapgoCameraPreview/GridOverlayView.swift +65 -0
  34. package/ios/Sources/CapgoCameraPreview/Plugin.swift +986 -250
  35. package/package.json +2 -2
@@ -1,13 +1,6 @@
1
- //
2
- // CameraController.swift
3
- // Plugin
4
- //
5
- // Created by Ariel Hernandez Musa on 7/14/19.
6
- // Copyright © 2019 Max Lynch. All rights reserved.
7
- //
8
-
9
1
  import AVFoundation
10
2
  import UIKit
3
+ import CoreLocation
11
4
 
12
5
  class CameraController: NSObject {
13
6
  var captureSession: AVCaptureSession?
@@ -23,25 +16,37 @@ class CameraController: NSObject {
23
16
  var rearCamera: AVCaptureDevice?
24
17
  var rearCameraInput: AVCaptureDeviceInput?
25
18
 
19
+ var allDiscoveredDevices: [AVCaptureDevice] = []
20
+
26
21
  var fileVideoOutput: AVCaptureMovieFileOutput?
27
22
 
28
23
  var previewLayer: AVCaptureVideoPreviewLayer?
24
+ var gridOverlayView: GridOverlayView?
25
+ var focusIndicatorView: UIView?
29
26
 
30
27
  var flashMode = AVCaptureDevice.FlashMode.off
31
28
  var photoCaptureCompletionBlock: ((UIImage?, Error?) -> Void)?
32
29
 
33
30
  var sampleBufferCaptureCompletionBlock: ((UIImage?, Error?) -> Void)?
34
-
35
- var highResolutionOutput: Bool = false
31
+
32
+ // Add callback for detecting when first frame is ready
33
+ var firstFrameReadyCallback: (() -> Void)?
34
+ var hasReceivedFirstFrame = false
36
35
 
37
36
  var audioDevice: AVCaptureDevice?
38
37
  var audioInput: AVCaptureDeviceInput?
39
38
 
40
39
  var zoomFactor: CGFloat = 1.0
40
+ private var lastZoomUpdateTime: TimeInterval = 0
41
+ private let zoomUpdateThrottle: TimeInterval = 1.0 / 60.0 // 60 FPS max
41
42
 
42
43
  var videoFileURL: URL?
43
44
  private let saneMaxZoomFactor: CGFloat = 25.5
44
45
 
46
+ // Track output preparation status
47
+ private var outputsPrepared: Bool = false
48
+ private let outputPreparationQueue = DispatchQueue(label: "camera.output.preparation", qos: .utility)
49
+
45
50
  var isUsingMultiLensVirtualCamera: Bool {
46
51
  guard let device = (currentCameraPosition == .rear) ? rearCamera : frontCamera else { return false }
47
52
  // A rear multi-lens virtual camera will have a min zoom of 1.0 but support wider angles
@@ -50,215 +55,458 @@ class CameraController: NSObject {
50
55
  }
51
56
 
52
57
  extension CameraController {
53
- func prepare(cameraPosition: String, deviceId: String? = nil, disableAudio: Bool, cameraMode: Bool, completionHandler: @escaping (Error?) -> Void) {
54
- func createCaptureSession() {
55
- self.captureSession = AVCaptureSession()
56
- }
57
-
58
- func configureCaptureDevices() throws {
59
- // Expanded device types to support more camera configurations
60
- let deviceTypes: [AVCaptureDevice.DeviceType] = [
61
- .builtInWideAngleCamera,
62
- .builtInUltraWideCamera,
63
- .builtInTelephotoCamera,
64
- .builtInDualCamera,
65
- .builtInDualWideCamera,
66
- .builtInTripleCamera,
67
- .builtInTrueDepthCamera
68
- ]
69
-
70
- let session = AVCaptureDevice.DiscoverySession(deviceTypes: deviceTypes, mediaType: AVMediaType.video, position: .unspecified)
71
-
72
- let cameras = session.devices.compactMap { $0 }
73
-
74
- // Log all found devices for debugging
75
- print("[CameraPreview] Found \(cameras.count) devices:")
76
- for camera in cameras {
77
- let constituentCount = camera.isVirtualDevice ? camera.constituentDevices.count : 1
78
- print("[CameraPreview] - \(camera.localizedName) (Position: \(camera.position.rawValue), Virtual: \(camera.isVirtualDevice), Lenses: \(constituentCount), Zoom: \(camera.minAvailableVideoZoomFactor)-\(camera.maxAvailableVideoZoomFactor))")
58
+ func prepareFullSession() {
59
+ // Only prepare if we don't already have a session
60
+ guard self.captureSession == nil else { return }
61
+
62
+ print("[CameraPreview] Preparing full camera session in background")
63
+
64
+ // 1. Create and configure session
65
+ self.captureSession = AVCaptureSession()
66
+
67
+ // 2. Pre-configure session preset (can be changed later) - use medium for faster startup
68
+ if captureSession!.canSetSessionPreset(.medium) {
69
+ captureSession!.sessionPreset = .medium // Start with medium, upgrade later if needed
70
+ } else if captureSession!.canSetSessionPreset(.high) {
71
+ captureSession!.sessionPreset = .high
72
+ }
73
+
74
+ // 3. Discover cameras on-demand (only when needed for better startup performance)
75
+ // discoverAndConfigureCameras() - moved to lazy loading
76
+
77
+ // // 4. Pre-create outputs asynchronously to avoid blocking camera opening
78
+ // outputPreparationQueue.async { [weak self] in
79
+ // self?.prepareOutputs()
80
+ // }
81
+
82
+ print("[CameraPreview] Full session preparation complete - cameras will be discovered on-demand, outputs being prepared asynchronously")
83
+ }
84
+
85
+ private func ensureCamerasDiscovered() {
86
+ // Rediscover cameras if the array is empty OR if the camera pointers are nil
87
+ guard allDiscoveredDevices.isEmpty || (rearCamera == nil && frontCamera == nil) else { return }
88
+ discoverAndConfigureCameras()
89
+ }
90
+
91
+ private func discoverAndConfigureCameras() {
92
+ let deviceTypes: [AVCaptureDevice.DeviceType] = [
93
+ .builtInWideAngleCamera,
94
+ .builtInUltraWideCamera,
95
+ .builtInTelephotoCamera,
96
+ .builtInDualCamera,
97
+ .builtInDualWideCamera,
98
+ .builtInTripleCamera,
99
+ .builtInTrueDepthCamera
100
+ ]
101
+
102
+ let session = AVCaptureDevice.DiscoverySession(deviceTypes: deviceTypes, mediaType: AVMediaType.video, position: .unspecified)
103
+ let cameras = session.devices.compactMap { $0 }
104
+
105
+ // Store all discovered devices for fast lookup later
106
+ self.allDiscoveredDevices = cameras
107
+
108
+ // Log all found devices for debugging
109
+ print("[CameraPreview] Found \(cameras.count) devices:")
110
+ for camera in cameras {
111
+ let constituentCount = camera.isVirtualDevice ? camera.constituentDevices.count : 1
112
+ print("[CameraPreview] - \(camera.localizedName) (Position: \(camera.position.rawValue), Virtual: \(camera.isVirtualDevice), Lenses: \(constituentCount))")
113
+ }
114
+
115
+ // Find best cameras
116
+ let rearVirtualDevices = cameras.filter { $0.position == .back && $0.isVirtualDevice }
117
+ let bestRearVirtualDevice = rearVirtualDevices.max { $0.constituentDevices.count < $1.constituentDevices.count }
118
+
119
+ self.frontCamera = cameras.first(where: { $0.position == .front })
120
+
121
+ if let bestCamera = bestRearVirtualDevice {
122
+ self.rearCamera = bestCamera
123
+ print("[CameraPreview] Selected best virtual rear camera: \(bestCamera.localizedName) with \(bestCamera.constituentDevices.count) physical cameras.")
124
+ } else if let firstRearCamera = cameras.first(where: { $0.position == .back }) {
125
+ self.rearCamera = firstRearCamera
126
+ print("[CameraPreview] WARN: No virtual rear camera found. Selected first available: \(firstRearCamera.localizedName)")
127
+ }
128
+
129
+ // Pre-configure focus modes
130
+ configureCameraFocus(camera: self.rearCamera)
131
+ configureCameraFocus(camera: self.frontCamera)
132
+ }
133
+
134
+ private func configureCameraFocus(camera: AVCaptureDevice?) {
135
+ guard let camera = camera else { return }
136
+
137
+ do {
138
+ try camera.lockForConfiguration()
139
+ if camera.isFocusModeSupported(.continuousAutoFocus) {
140
+ camera.focusMode = .continuousAutoFocus
79
141
  }
142
+ camera.unlockForConfiguration()
143
+ } catch {
144
+ print("[CameraPreview] Could not configure focus for \(camera.localizedName): \(error)")
145
+ }
146
+ }
80
147
 
81
- guard !cameras.isEmpty else {
82
- print("[CameraPreview] ERROR: No cameras found.")
83
- throw CameraControllerError.noCamerasAvailable
148
+ private func prepareOutputs() {
149
+ // Pre-create photo output with optimized settings
150
+ self.photoOutput = AVCapturePhotoOutput()
151
+ self.photoOutput?.isHighResolutionCaptureEnabled = false // Start with lower resolution for speed
152
+
153
+ // Configure photo output for better performance
154
+ if #available(iOS 13.0, *) {
155
+ self.photoOutput?.maxPhotoQualityPrioritization = .speed // Prioritize speed over quality initially
156
+ }
157
+
158
+ // Pre-create video output
159
+ self.fileVideoOutput = AVCaptureMovieFileOutput()
160
+
161
+ // Pre-create data output with optimized settings
162
+ self.dataOutput = AVCaptureVideoDataOutput()
163
+ self.dataOutput?.videoSettings = [
164
+ (kCVPixelBufferPixelFormatTypeKey as String): NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)
165
+ ]
166
+ self.dataOutput?.alwaysDiscardsLateVideoFrames = true
167
+
168
+ // Use a background queue for sample buffer processing to avoid blocking main thread
169
+ let dataOutputQueue = DispatchQueue(label: "camera.data.output", qos: .userInitiated)
170
+ self.dataOutput?.setSampleBufferDelegate(nil, queue: dataOutputQueue) // Will be set later
171
+
172
+ // Mark outputs as prepared
173
+ self.outputsPrepared = true
174
+
175
+ print("[CameraPreview] Outputs pre-created with performance optimizations")
176
+ }
177
+
178
+ private func waitForOutputsToBeReady() {
179
+ // If outputs are already prepared, return immediately
180
+ if outputsPrepared {
181
+ return
182
+ }
183
+
184
+ // Wait for outputs to be prepared with a timeout
185
+ let semaphore = DispatchSemaphore(value: 0)
186
+ var outputsReady = false
187
+
188
+ // Check for outputs readiness periodically
189
+ let timer = Timer.scheduledTimer(withTimeInterval: 0.01, repeats: true) { timer in
190
+ if self.outputsPrepared {
191
+ outputsReady = true
192
+ timer.invalidate()
193
+ semaphore.signal()
84
194
  }
195
+ }
85
196
 
86
- // --- Corrected Device Selection Logic ---
87
- // Find the virtual device with the most constituent cameras (this is the most capable one)
88
- let rearVirtualDevices = cameras.filter { $0.position == .back && $0.isVirtualDevice }
89
- let bestRearVirtualDevice = rearVirtualDevices.max { $0.constituentDevices.count < $1.constituentDevices.count }
197
+ // Wait for outputs to be ready or timeout after 2 seconds
198
+ let timeout = DispatchTime.now() + .seconds(2)
199
+ let result = semaphore.wait(timeout: timeout)
90
200
 
91
- self.frontCamera = cameras.first(where: { $0.position == .front })
201
+ timer.invalidate()
92
202
 
93
- if let bestCamera = bestRearVirtualDevice {
94
- self.rearCamera = bestCamera
95
- print("[CameraPreview] Selected best virtual rear camera: \(bestCamera.localizedName) with \(bestCamera.constituentDevices.count) physical cameras.")
96
- } else if let firstRearCamera = cameras.first(where: { $0.position == .back }) {
97
- // Fallback for devices without a virtual camera system
98
- self.rearCamera = firstRearCamera
99
- print("[CameraPreview] WARN: No virtual rear camera found. Selected first available: \(firstRearCamera.localizedName)")
203
+ if result == .timedOut && !outputsReady {
204
+ print("[CameraPreview] Warning: Timed out waiting for outputs to be prepared, proceeding anyway")
205
+ // Fallback: prepare outputs synchronously if async preparation failed
206
+ if !outputsPrepared {
207
+ prepareOutputs()
100
208
  }
101
- // --- End of Correction ---
209
+ } else {
210
+ print("[CameraPreview] Outputs ready, proceeding with camera preparation")
211
+ }
212
+ }
102
213
 
103
- if let rearCamera = self.rearCamera {
104
- do {
105
- try rearCamera.lockForConfiguration()
106
- if rearCamera.isFocusModeSupported(.continuousAutoFocus) {
107
- rearCamera.focusMode = .continuousAutoFocus
108
- }
109
- rearCamera.unlockForConfiguration()
110
- } catch {
111
- print("[CameraPreview] WARN: Could not set focus mode on rear camera. \(error)")
112
- }
214
+ func upgradeQualitySettings() {
215
+ guard let captureSession = self.captureSession else { return }
216
+
217
+ // Upgrade session preset to high quality after initial startup
218
+ DispatchQueue.global(qos: .utility).async { [weak self] in
219
+ guard let self = self else { return }
220
+
221
+ captureSession.beginConfiguration()
222
+
223
+ // Upgrade to high quality preset
224
+ if captureSession.canSetSessionPreset(.high) && captureSession.sessionPreset != .high {
225
+ captureSession.sessionPreset = .high
226
+ print("[CameraPreview] Upgraded session preset to high quality")
113
227
  }
114
228
 
115
- if disableAudio == false {
116
- self.audioDevice = AVCaptureDevice.default(for: AVMediaType.audio)
229
+ // Upgrade photo output quality
230
+ if let photoOutput = self.photoOutput {
231
+ photoOutput.isHighResolutionCaptureEnabled = true
232
+ if #available(iOS 13.0, *) {
233
+ photoOutput.maxPhotoQualityPrioritization = .quality
234
+ }
235
+ print("[CameraPreview] Upgraded photo output to high resolution")
117
236
  }
237
+
238
+ captureSession.commitConfiguration()
118
239
  }
240
+ }
119
241
 
120
- func configureDeviceInputs() throws {
121
- guard let captureSession = self.captureSession else { throw CameraControllerError.captureSessionIsMissing }
242
+ func prepare(cameraPosition: String, deviceId: String? = nil, disableAudio: Bool, cameraMode: Bool, aspectRatio: String? = nil, initialZoomLevel: Float = 1.0, completionHandler: @escaping (Error?) -> Void) {
243
+ // Use background queue for preparation to avoid blocking main thread
244
+ DispatchQueue.global(qos: .userInitiated).async { [weak self] in
245
+ guard let self = self else {
246
+ DispatchQueue.main.async {
247
+ completionHandler(CameraControllerError.unknown)
248
+ }
249
+ return
250
+ }
122
251
 
123
- var selectedDevice: AVCaptureDevice?
252
+ do {
253
+ // Session and outputs already created in load(), just configure user-specific settings
254
+ if self.captureSession == nil {
255
+ // Fallback if prepareFullSession() wasn't called
256
+ self.prepareFullSession()
257
+ }
124
258
 
125
- // If deviceId is specified, use that specific device
126
- if let deviceId = deviceId {
127
- let allDevices = AVCaptureDevice.DiscoverySession(
128
- deviceTypes: [.builtInWideAngleCamera, .builtInUltraWideCamera, .builtInTelephotoCamera, .builtInDualCamera, .builtInDualWideCamera, .builtInTripleCamera, .builtInTrueDepthCamera],
129
- mediaType: .video,
130
- position: .unspecified
131
- ).devices
259
+ guard let captureSession = self.captureSession else {
260
+ throw CameraControllerError.captureSessionIsMissing
261
+ }
132
262
 
133
- selectedDevice = allDevices.first(where: { $0.uniqueID == deviceId })
134
- guard selectedDevice != nil else {
135
- throw CameraControllerError.noCamerasAvailable
263
+ print("[CameraPreview] Fast prepare - using pre-initialized session")
264
+
265
+ // Ensure outputs are prepared synchronously before starting session
266
+ self.prepareOutputs()
267
+ self.waitForOutputsToBeReady()
268
+
269
+ // Configure device inputs for the requested camera
270
+ try self.configureDeviceInputs(cameraPosition: cameraPosition, deviceId: deviceId, disableAudio: disableAudio)
271
+
272
+ // Add data output early to detect first frame
273
+ captureSession.beginConfiguration()
274
+ if let dataOutput = self.dataOutput, captureSession.canAddOutput(dataOutput) {
275
+ captureSession.addOutput(dataOutput)
276
+ // Set delegate to detect first frame
277
+ dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
136
278
  }
137
- } else {
138
- // Use position-based selection
139
- if cameraPosition == "rear" {
140
- selectedDevice = self.rearCamera
141
- } else if cameraPosition == "front" {
142
- selectedDevice = self.frontCamera
279
+ captureSession.commitConfiguration()
280
+
281
+ // Reset first frame detection
282
+ self.hasReceivedFirstFrame = false
283
+
284
+ // Start the session on background thread (AVCaptureSession.startRunning() is thread-safe)
285
+ captureSession.startRunning()
286
+ print("[CameraPreview] Session started")
287
+
288
+ // Validate and set initial zoom level asynchronously
289
+ if initialZoomLevel != 1.0 {
290
+ DispatchQueue.main.async { [weak self] in
291
+ self?.setInitialZoom(level: initialZoomLevel)
292
+ }
293
+ }
294
+
295
+ // Call completion on main thread
296
+ DispatchQueue.main.async {
297
+ completionHandler(nil)
298
+
299
+ // Upgrade quality settings after a short delay for better user experience
300
+ DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
301
+ guard let self = self else { return }
302
+
303
+ // Add remaining outputs to session and apply user settings
304
+ do {
305
+ try self.addRemainingOutputsToSession(cameraMode: cameraMode, aspectRatio: aspectRatio)
306
+ print("[CameraPreview] Remaining outputs successfully added to session")
307
+ } catch {
308
+ print("[CameraPreview] Error adding outputs to session: \(error)")
309
+ }
310
+
311
+ self.upgradeQualitySettings()
312
+ }
313
+ }
314
+ } catch {
315
+ DispatchQueue.main.async {
316
+ completionHandler(error)
143
317
  }
144
318
  }
319
+ }
320
+ }
145
321
 
146
- guard let finalDevice = selectedDevice else {
147
- print("[CameraPreview] ERROR: No camera device selected for position: \(cameraPosition)")
322
+ private func setInitialZoom(level: Float) {
323
+ let device = (currentCameraPosition == .rear) ? rearCamera : frontCamera
324
+ guard let device = device else { return }
325
+
326
+ let minZoom = device.minAvailableVideoZoomFactor
327
+ let maxZoom = min(device.maxAvailableVideoZoomFactor, saneMaxZoomFactor)
328
+
329
+ guard CGFloat(level) >= minZoom && CGFloat(level) <= maxZoom else {
330
+ print("[CameraPreview] Initial zoom level \(level) out of range (\(minZoom)-\(maxZoom))")
331
+ return
332
+ }
333
+
334
+ do {
335
+ try device.lockForConfiguration()
336
+ device.videoZoomFactor = CGFloat(level)
337
+ device.unlockForConfiguration()
338
+ self.zoomFactor = CGFloat(level)
339
+ print("[CameraPreview] Set initial zoom to \(level)")
340
+ } catch {
341
+ print("[CameraPreview] Failed to set initial zoom: \(error)")
342
+ }
343
+ }
344
+
345
+ private func configureDeviceInputs(cameraPosition: String, deviceId: String?, disableAudio: Bool) throws {
346
+ guard let captureSession = self.captureSession else { throw CameraControllerError.captureSessionIsMissing }
347
+
348
+ // Ensure cameras are discovered before configuring inputs
349
+ ensureCamerasDiscovered()
350
+
351
+ var selectedDevice: AVCaptureDevice?
352
+
353
+ // If deviceId is specified, find that specific device from discovered devices
354
+ if let deviceId = deviceId {
355
+ selectedDevice = self.allDiscoveredDevices.first(where: { $0.uniqueID == deviceId })
356
+ guard selectedDevice != nil else {
357
+ print("[CameraPreview] ERROR: Device with ID \(deviceId) not found in discovered devices")
148
358
  throw CameraControllerError.noCamerasAvailable
149
359
  }
360
+ } else {
361
+ // Use position-based selection from discovered cameras
362
+ if cameraPosition == "rear" {
363
+ selectedDevice = self.rearCamera
364
+ } else if cameraPosition == "front" {
365
+ selectedDevice = self.frontCamera
366
+ }
367
+ }
150
368
 
151
- print("[CameraPreview] Configuring device: \(finalDevice.localizedName)")
152
- let deviceInput = try AVCaptureDeviceInput(device: finalDevice)
369
+ guard let finalDevice = selectedDevice else {
370
+ print("[CameraPreview] ERROR: No camera device selected for position: \(cameraPosition)")
371
+ throw CameraControllerError.noCamerasAvailable
372
+ }
153
373
 
154
- if captureSession.canAddInput(deviceInput) {
155
- captureSession.addInput(deviceInput)
374
+ print("[CameraPreview] Configuring device: \(finalDevice.localizedName)")
375
+ let deviceInput = try AVCaptureDeviceInput(device: finalDevice)
156
376
 
157
- if finalDevice.position == .front {
158
- self.frontCameraInput = deviceInput
159
- self.frontCamera = finalDevice
160
- self.currentCameraPosition = .front
161
- } else {
162
- self.rearCameraInput = deviceInput
163
- self.rearCamera = finalDevice
164
- self.currentCameraPosition = .rear
377
+ if captureSession.canAddInput(deviceInput) {
378
+ captureSession.addInput(deviceInput)
165
379
 
166
- // --- Corrected Initial Zoom Logic ---
167
- try finalDevice.lockForConfiguration()
168
- if finalDevice.isFocusModeSupported(.continuousAutoFocus) {
169
- finalDevice.focusMode = .continuousAutoFocus
170
- }
380
+ if finalDevice.position == .front {
381
+ self.frontCameraInput = deviceInput
382
+ self.currentCameraPosition = .front
383
+ } else {
384
+ self.rearCameraInput = deviceInput
385
+ self.currentCameraPosition = .rear
171
386
 
172
- // On a multi-camera system, a zoom factor of 2.0 often corresponds to the standard "1x" wide-angle lens.
173
- // We set this as the default to provide a familiar starting point for users.
174
- let defaultWideAngleZoom: CGFloat = 2.0
175
- if finalDevice.isVirtualDevice && finalDevice.constituentDevices.count > 1 && finalDevice.videoZoomFactor != defaultWideAngleZoom {
176
- // Check if 2.0 is a valid zoom factor before setting it.
177
- if defaultWideAngleZoom >= finalDevice.minAvailableVideoZoomFactor && defaultWideAngleZoom <= finalDevice.maxAvailableVideoZoomFactor {
178
- print("[CameraPreview] Multi-camera system detected. Setting initial zoom to \(defaultWideAngleZoom) (standard wide-angle).")
179
- finalDevice.videoZoomFactor = defaultWideAngleZoom
180
- }
387
+ // Configure zoom for multi-camera systems - simplified and faster
388
+ if finalDevice.isVirtualDevice && finalDevice.constituentDevices.count > 1 {
389
+ try finalDevice.lockForConfiguration()
390
+ let defaultWideAngleZoom: CGFloat = 1.0 // Changed from 2.0 to 1.0 for faster startup
391
+ if defaultWideAngleZoom >= finalDevice.minAvailableVideoZoomFactor && defaultWideAngleZoom <= finalDevice.maxAvailableVideoZoomFactor {
392
+ print("[CameraPreview] Setting initial zoom to \(defaultWideAngleZoom)")
393
+ finalDevice.videoZoomFactor = defaultWideAngleZoom
181
394
  }
182
395
  finalDevice.unlockForConfiguration()
183
- // --- End of Correction ---
184
396
  }
185
- } else {
186
- print("[CameraPreview] ERROR: Cannot add device input to session.")
187
- throw CameraControllerError.inputsAreInvalid
188
397
  }
398
+ } else {
399
+ throw CameraControllerError.inputsAreInvalid
400
+ }
189
401
 
190
- // Add audio input
191
- if disableAudio == false {
192
- if let audioDevice = self.audioDevice {
193
- self.audioInput = try AVCaptureDeviceInput(device: audioDevice)
194
- if captureSession.canAddInput(self.audioInput!) {
195
- captureSession.addInput(self.audioInput!)
196
- } else {
197
- throw CameraControllerError.inputsAreInvalid
198
- }
402
+ // Add audio input if needed
403
+ if !disableAudio {
404
+ if self.audioDevice == nil {
405
+ self.audioDevice = AVCaptureDevice.default(for: AVMediaType.audio)
406
+ }
407
+ if let audioDevice = self.audioDevice {
408
+ self.audioInput = try AVCaptureDeviceInput(device: audioDevice)
409
+ if captureSession.canAddInput(self.audioInput!) {
410
+ captureSession.addInput(self.audioInput!)
411
+ } else {
412
+ throw CameraControllerError.inputsAreInvalid
199
413
  }
200
414
  }
201
415
  }
416
+ }
202
417
 
203
- func configurePhotoOutput(cameraMode: Bool) throws {
204
- guard let captureSession = self.captureSession else { throw CameraControllerError.captureSessionIsMissing }
418
+ private func addRemainingOutputsToSession(cameraMode: Bool, aspectRatio: String?) throws {
419
+ guard let captureSession = self.captureSession else { throw CameraControllerError.captureSessionIsMissing }
205
420
 
206
- // TODO: check if that really useful
207
- if !cameraMode && self.highResolutionOutput && captureSession.canSetSessionPreset(.photo) {
208
- captureSession.sessionPreset = .photo
209
- } else if cameraMode && self.highResolutionOutput && captureSession.canSetSessionPreset(.high) {
210
- captureSession.sessionPreset = .high
211
- }
421
+ // Begin configuration to batch all changes
422
+ captureSession.beginConfiguration()
423
+ defer { captureSession.commitConfiguration() }
212
424
 
213
- self.photoOutput = AVCapturePhotoOutput()
214
- self.photoOutput!.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil)
215
- self.photoOutput?.isHighResolutionCaptureEnabled = self.highResolutionOutput
216
- if captureSession.canAddOutput(self.photoOutput!) { captureSession.addOutput(self.photoOutput!) }
425
+ // Update session preset based on aspect ratio if needed
426
+ var targetPreset: AVCaptureSession.Preset = .high // Default to high quality
217
427
 
218
- let fileVideoOutput = AVCaptureMovieFileOutput()
219
- if captureSession.canAddOutput(fileVideoOutput) {
220
- captureSession.addOutput(fileVideoOutput)
221
- self.fileVideoOutput = fileVideoOutput
428
+ if let aspectRatio = aspectRatio {
429
+ switch aspectRatio {
430
+ case "16:9":
431
+ targetPreset = captureSession.canSetSessionPreset(.hd1920x1080) ? .hd1920x1080 : .high
432
+ case "4:3":
433
+ targetPreset = captureSession.canSetSessionPreset(.photo) ? .photo : .high
434
+ default:
435
+ targetPreset = .high
222
436
  }
223
- captureSession.startRunning()
224
437
  }
225
438
 
226
- func configureDataOutput() throws {
227
- guard let captureSession = self.captureSession else { throw CameraControllerError.captureSessionIsMissing }
228
-
229
- self.dataOutput = AVCaptureVideoDataOutput()
230
- self.dataOutput?.videoSettings = [
231
- (kCVPixelBufferPixelFormatTypeKey as String): NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)
232
- ]
233
- self.dataOutput?.alwaysDiscardsLateVideoFrames = true
234
- if captureSession.canAddOutput(self.dataOutput!) {
235
- captureSession.addOutput(self.dataOutput!)
236
- }
439
+ // Always try to set the best preset available
440
+ if captureSession.canSetSessionPreset(targetPreset) {
441
+ captureSession.sessionPreset = targetPreset
442
+ print("[CameraPreview] Updated preset to \(targetPreset) for aspect ratio: \(aspectRatio ?? "default")")
443
+ } else if captureSession.canSetSessionPreset(.high) {
444
+ // Fallback to high if target preset not available
445
+ captureSession.sessionPreset = .high
446
+ print("[CameraPreview] Fallback to high preset")
447
+ }
237
448
 
238
- captureSession.commitConfiguration()
449
+ // Add photo output (already created in prepareOutputs)
450
+ if let photoOutput = self.photoOutput, captureSession.canAddOutput(photoOutput) {
451
+ photoOutput.isHighResolutionCaptureEnabled = true
452
+ captureSession.addOutput(photoOutput)
453
+ }
239
454
 
240
- let queue = DispatchQueue(label: "DataOutput", attributes: [])
241
- self.dataOutput?.setSampleBufferDelegate(self, queue: queue)
455
+ // Add video output only if camera mode is enabled
456
+ if cameraMode, let videoOutput = self.fileVideoOutput, captureSession.canAddOutput(videoOutput) {
457
+ captureSession.addOutput(videoOutput)
242
458
  }
459
+ // Data output was already added in prepare() to detect first frame
460
+ }
461
+
462
+ private func addOutputsToSession(cameraMode: Bool, aspectRatio: String?) throws {
463
+ guard let captureSession = self.captureSession else { throw CameraControllerError.captureSessionIsMissing }
243
464
 
244
- DispatchQueue(label: "prepare").async {
245
- do {
246
- createCaptureSession()
247
- try configureCaptureDevices()
248
- try configureDeviceInputs()
249
- try configurePhotoOutput(cameraMode: cameraMode)
250
- try configureDataOutput()
251
- // try configureVideoOutput()
252
- } catch {
253
- DispatchQueue.main.async {
254
- completionHandler(error)
255
- }
465
+ // Begin configuration to batch all changes
466
+ captureSession.beginConfiguration()
467
+ defer { captureSession.commitConfiguration() }
256
468
 
257
- return
469
+ // Update session preset based on aspect ratio if needed
470
+ var targetPreset: AVCaptureSession.Preset = .high // Default to high quality
471
+
472
+ if let aspectRatio = aspectRatio {
473
+ switch aspectRatio {
474
+ case "16:9":
475
+ targetPreset = captureSession.canSetSessionPreset(.hd1920x1080) ? .hd1920x1080 : .high
476
+ case "4:3":
477
+ targetPreset = captureSession.canSetSessionPreset(.photo) ? .photo : .high
478
+ default:
479
+ targetPreset = .high
258
480
  }
481
+ }
482
+
483
+ // Always try to set the best preset available
484
+ if captureSession.canSetSessionPreset(targetPreset) {
485
+ captureSession.sessionPreset = targetPreset
486
+ print("[CameraPreview] Updated preset to \(targetPreset) for aspect ratio: \(aspectRatio ?? "default")")
487
+ } else if captureSession.canSetSessionPreset(.high) {
488
+ // Fallback to high if target preset not available
489
+ captureSession.sessionPreset = .high
490
+ print("[CameraPreview] Fallback to high preset")
491
+ }
492
+
493
+ // Add photo output (already created in prepareOutputs)
494
+ if let photoOutput = self.photoOutput, captureSession.canAddOutput(photoOutput) {
495
+ photoOutput.isHighResolutionCaptureEnabled = true
496
+ captureSession.addOutput(photoOutput)
497
+ }
259
498
 
499
+ // Add video output only if camera mode is enabled
500
+ if cameraMode, let videoOutput = self.fileVideoOutput, captureSession.canAddOutput(videoOutput) {
501
+ captureSession.addOutput(videoOutput)
502
+ }
503
+
504
+ // Add data output
505
+ if let dataOutput = self.dataOutput, captureSession.canAddOutput(dataOutput) {
506
+ captureSession.addOutput(dataOutput)
507
+ // Set delegate after outputs are added for better performance
260
508
  DispatchQueue.main.async {
261
- completionHandler(nil)
509
+ dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
262
510
  }
263
511
  }
264
512
  }
@@ -266,13 +514,51 @@ extension CameraController {
266
514
  func displayPreview(on view: UIView) throws {
267
515
  guard let captureSession = self.captureSession, captureSession.isRunning else { throw CameraControllerError.captureSessionIsMissing }
268
516
 
269
- self.previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
270
- self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
517
+ print("[CameraPreview] displayPreview called with view frame: \(view.frame)")
518
+
519
+ // Create and configure preview layer in one go
520
+ let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
521
+
522
+ // Batch all layer configuration to avoid multiple redraws
523
+ CATransaction.begin()
524
+ CATransaction.setDisableActions(true)
525
+
526
+ previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
527
+ previewLayer.connection?.videoOrientation = .portrait
528
+ previewLayer.isOpaque = true
529
+ previewLayer.contentsScale = UIScreen.main.scale
530
+ previewLayer.frame = view.bounds
531
+
532
+ // Insert layer and store reference
533
+ view.layer.insertSublayer(previewLayer, at: 0)
534
+ self.previewLayer = previewLayer
535
+
536
+ CATransaction.commit()
537
+
538
+ print("[CameraPreview] Set preview layer frame to view bounds: \(view.bounds)")
539
+ print("[CameraPreview] Session preset: \(captureSession.sessionPreset.rawValue)")
540
+
541
+ // Update video orientation asynchronously to avoid blocking
542
+ DispatchQueue.main.async { [weak self] in
543
+ self?.updateVideoOrientation()
544
+ }
545
+ }
546
+
547
+ func addGridOverlay(to view: UIView, gridMode: String) {
548
+ removeGridOverlay()
271
549
 
272
- view.layer.insertSublayer(self.previewLayer!, at: 0)
273
- self.previewLayer?.frame = view.frame
550
+ // Disable animation for grid overlay creation and positioning
551
+ CATransaction.begin()
552
+ CATransaction.setDisableActions(true)
553
+ gridOverlayView = GridOverlayView(frame: view.bounds)
554
+ gridOverlayView?.gridMode = gridMode
555
+ view.addSubview(gridOverlayView!)
556
+ CATransaction.commit()
557
+ }
274
558
 
275
- updateVideoOrientation()
559
+ func removeGridOverlay() {
560
+ gridOverlayView?.removeFromSuperview()
561
+ gridOverlayView = nil
276
562
  }
277
563
 
278
564
  func setupGestures(target: UIView, enableZoom: Bool) {
@@ -291,6 +577,10 @@ extension CameraController {
291
577
  func setupPinchGesture(target: UIView, selector: Selector, delegate: UIGestureRecognizerDelegate?) {
292
578
  let pinchGesture = UIPinchGestureRecognizer(target: self, action: selector)
293
579
  pinchGesture.delegate = delegate
580
+ // Optimize gesture recognition for better performance
581
+ pinchGesture.delaysTouchesBegan = false
582
+ pinchGesture.delaysTouchesEnded = false
583
+ pinchGesture.cancelsTouchesInView = false
294
584
  target.addGestureRecognizer(pinchGesture)
295
585
  }
296
586
 
@@ -298,8 +588,8 @@ extension CameraController {
298
588
  if Thread.isMainThread {
299
589
  updateVideoOrientationOnMainThread()
300
590
  } else {
301
- DispatchQueue.main.async { [weak self] in
302
- self?.updateVideoOrientationOnMainThread()
591
+ DispatchQueue.main.sync {
592
+ self.updateVideoOrientationOnMainThread()
303
593
  }
304
594
  }
305
595
  }
@@ -340,7 +630,7 @@ extension CameraController {
340
630
 
341
631
  // Ensure we have the necessary cameras
342
632
  guard (currentCameraPosition == .front && rearCamera != nil) ||
343
- (currentCameraPosition == .rear && frontCamera != nil) else {
633
+ (currentCameraPosition == .rear && frontCamera != nil) else {
344
634
  throw CameraControllerError.noCamerasAvailable
345
635
  }
346
636
 
@@ -356,9 +646,7 @@ extension CameraController {
356
646
  captureSession.commitConfiguration()
357
647
  // Restart the session if it was running before
358
648
  if wasRunning {
359
- DispatchQueue.global(qos: .userInitiated).async { [weak self] in
360
- self?.captureSession?.startRunning()
361
- }
649
+ captureSession.startRunning()
362
650
  }
363
651
  }
364
652
 
@@ -368,7 +656,7 @@ extension CameraController {
368
656
  // Remove only video inputs
369
657
  captureSession.inputs.forEach { input in
370
658
  if (input as? AVCaptureDeviceInput)?.device.hasMediaType(.video) ?? false {
371
- captureSession.removeInput(input)
659
+ captureSession.removeInput(input)
372
660
  }
373
661
  }
374
662
 
@@ -387,7 +675,7 @@ extension CameraController {
387
675
  rearCamera.unlockForConfiguration()
388
676
 
389
677
  if let newInput = try? AVCaptureDeviceInput(device: rearCamera),
390
- captureSession.canAddInput(newInput) {
678
+ captureSession.canAddInput(newInput) {
391
679
  captureSession.addInput(newInput)
392
680
  rearCameraInput = newInput
393
681
  self.currentCameraPosition = .rear
@@ -407,7 +695,7 @@ extension CameraController {
407
695
  frontCamera.unlockForConfiguration()
408
696
 
409
697
  if let newInput = try? AVCaptureDeviceInput(device: frontCamera),
410
- captureSession.canAddInput(newInput) {
698
+ captureSession.canAddInput(newInput) {
411
699
  captureSession.addInput(newInput)
412
700
  frontCameraInput = newInput
413
701
  self.currentCameraPosition = .front
@@ -422,20 +710,205 @@ extension CameraController {
422
710
  }
423
711
 
424
712
  // Update video orientation
425
- DispatchQueue.main.async { [weak self] in
426
- self?.updateVideoOrientation()
427
- }
713
+ self.updateVideoOrientation()
428
714
  }
429
715
 
430
- func captureImage(completion: @escaping (UIImage?, Error?) -> Void) {
431
- guard let captureSession = captureSession, captureSession.isRunning else { completion(nil, CameraControllerError.captureSessionIsMissing); return }
716
+ func captureImage(width: Int?, height: Int?, aspectRatio: String?, quality: Float, gpsLocation: CLLocation?, completion: @escaping (UIImage?, Error?) -> Void) {
717
+ print("[CameraPreview] captureImage called - width: \(width ?? -1), height: \(height ?? -1), aspectRatio: \(aspectRatio ?? "nil")")
718
+
719
+ guard let photoOutput = self.photoOutput else {
720
+ completion(nil, NSError(domain: "Camera", code: 0, userInfo: [NSLocalizedDescriptionKey: "Photo output is not available"]))
721
+ return
722
+ }
723
+
432
724
  let settings = AVCapturePhotoSettings()
433
725
 
434
- settings.flashMode = self.flashMode
435
- settings.isHighResolutionPhotoEnabled = self.highResolutionOutput
726
+ // Apply the current flash mode to the photo settings
727
+ // Check if the current device supports flash
728
+ var currentCamera: AVCaptureDevice?
729
+ switch currentCameraPosition {
730
+ case .front:
731
+ currentCamera = self.frontCamera
732
+ case .rear:
733
+ currentCamera = self.rearCamera
734
+ default:
735
+ break
736
+ }
737
+
738
+ // Only apply flash if the device has flash and the flash mode is supported
739
+ if let device = currentCamera, device.hasFlash {
740
+ let supportedFlashModes = photoOutput.supportedFlashModes
741
+ if supportedFlashModes.contains(self.flashMode) {
742
+ settings.flashMode = self.flashMode
743
+ }
744
+ }
745
+
746
+ self.photoCaptureCompletionBlock = { (image, error) in
747
+ if let error = error {
748
+ completion(nil, error)
749
+ return
750
+ }
751
+
752
+ guard let image = image else {
753
+ completion(nil, NSError(domain: "Camera", code: 0, userInfo: [NSLocalizedDescriptionKey: "Failed to capture image"]))
754
+ return
755
+ }
436
756
 
437
- self.photoOutput?.capturePhoto(with: settings, delegate: self)
438
- self.photoCaptureCompletionBlock = completion
757
+ if let location = gpsLocation {
758
+ self.addGPSMetadata(to: image, location: location)
759
+ }
760
+
761
+ var finalImage = image
762
+
763
+ // Determine what to do based on parameters
764
+ if let width = width, let height = height {
765
+ // Specific dimensions requested - resize to exact size
766
+ finalImage = self.resizeImage(image: image, to: CGSize(width: width, height: height))!
767
+ print("[CameraPreview] Resized to exact dimensions: \(finalImage.size.width)x\(finalImage.size.height)")
768
+ } else if let aspectRatio = aspectRatio {
769
+ // Aspect ratio specified - crop to that ratio
770
+ let components = aspectRatio.split(separator: ":").compactMap { Double($0) }
771
+ if components.count == 2 {
772
+ // For capture in portrait orientation, swap the aspect ratio (16:9 becomes 9:16)
773
+ let isPortrait = image.size.height > image.size.width
774
+ let targetAspectRatio = isPortrait ? components[1] / components[0] : components[0] / components[1]
775
+ let imageSize = image.size
776
+ let originalAspectRatio = imageSize.width / imageSize.height
777
+
778
+ // Only crop if the aspect ratios don't match
779
+ if abs(originalAspectRatio - targetAspectRatio) > 0.01 {
780
+ var targetSize = imageSize
781
+
782
+ if originalAspectRatio > targetAspectRatio {
783
+ // Original is wider than target - fit by height
784
+ targetSize.width = imageSize.height * CGFloat(targetAspectRatio)
785
+ } else {
786
+ // Original is taller than target - fit by width
787
+ targetSize.height = imageSize.width / CGFloat(targetAspectRatio)
788
+ }
789
+
790
+ // Center crop the image
791
+ if let croppedImage = self.cropImageToAspectRatio(image: image, targetSize: targetSize) {
792
+ finalImage = croppedImage
793
+ print("[CameraPreview] Applied aspect ratio crop: \(finalImage.size.width)x\(finalImage.size.height)")
794
+ }
795
+ }
796
+ }
797
+ } else {
798
+ // No parameters specified - crop to match what's visible in the preview
799
+ // This ensures we capture exactly what the user sees
800
+ if let previewLayer = self.previewLayer,
801
+ let previewCroppedImage = self.cropImageToMatchPreview(image: image, previewLayer: previewLayer) {
802
+ finalImage = previewCroppedImage
803
+ print("[CameraPreview] Cropped to match preview: \(finalImage.size.width)x\(finalImage.size.height)")
804
+ }
805
+ }
806
+
807
+ completion(finalImage, nil)
808
+ }
809
+
810
+ photoOutput.capturePhoto(with: settings, delegate: self)
811
+ }
812
+
813
+ func addGPSMetadata(to image: UIImage, location: CLLocation) {
814
+ guard let jpegData = image.jpegData(compressionQuality: 1.0),
815
+ let source = CGImageSourceCreateWithData(jpegData as CFData, nil),
816
+ let uti = CGImageSourceGetType(source) else { return }
817
+
818
+ var metadata = CGImageSourceCopyPropertiesAtIndex(source, 0, nil) as? [String: Any] ?? [:]
819
+
820
+ let formatter = DateFormatter()
821
+ formatter.dateFormat = "yyyy-MM-dd'T'HH:mm:ssZ"
822
+ formatter.timeZone = TimeZone(abbreviation: "UTC")
823
+
824
+ let gpsDict: [String: Any] = [
825
+ kCGImagePropertyGPSLatitude as String: abs(location.coordinate.latitude),
826
+ kCGImagePropertyGPSLatitudeRef as String: location.coordinate.latitude >= 0 ? "N" : "S",
827
+ kCGImagePropertyGPSLongitude as String: abs(location.coordinate.longitude),
828
+ kCGImagePropertyGPSLongitudeRef as String: location.coordinate.longitude >= 0 ? "E" : "W",
829
+ kCGImagePropertyGPSTimeStamp as String: formatter.string(from: location.timestamp),
830
+ kCGImagePropertyGPSAltitude as String: location.altitude,
831
+ kCGImagePropertyGPSAltitudeRef as String: location.altitude >= 0 ? 0 : 1
832
+ ]
833
+
834
+ metadata[kCGImagePropertyGPSDictionary as String] = gpsDict
835
+
836
+ let destData = NSMutableData()
837
+ guard let destination = CGImageDestinationCreateWithData(destData, uti, 1, nil) else { return }
838
+ CGImageDestinationAddImageFromSource(destination, source, 0, metadata as CFDictionary)
839
+ CGImageDestinationFinalize(destination)
840
+ }
841
+
842
+ func resizeImage(image: UIImage, to size: CGSize) -> UIImage? {
843
+ let renderer = UIGraphicsImageRenderer(size: size)
844
+ let resizedImage = renderer.image { (_) in
845
+ image.draw(in: CGRect(origin: .zero, size: size))
846
+ }
847
+ return resizedImage
848
+ }
849
+
850
+ func cropImageToAspectRatio(image: UIImage, targetSize: CGSize) -> UIImage? {
851
+ let imageSize = image.size
852
+
853
+ // Calculate the crop rect - center crop
854
+ let xOffset = (imageSize.width - targetSize.width) / 2
855
+ let yOffset = (imageSize.height - targetSize.height) / 2
856
+ let cropRect = CGRect(x: xOffset, y: yOffset, width: targetSize.width, height: targetSize.height)
857
+
858
+ // Create the cropped image
859
+ guard let cgImage = image.cgImage,
860
+ let croppedCGImage = cgImage.cropping(to: cropRect) else {
861
+ return nil
862
+ }
863
+
864
+ return UIImage(cgImage: croppedCGImage, scale: image.scale, orientation: image.imageOrientation)
865
+ }
866
+
867
+ func cropImageToMatchPreview(image: UIImage, previewLayer: AVCaptureVideoPreviewLayer) -> UIImage? {
868
+ // When using resizeAspectFill, the preview layer shows a cropped portion of the video
869
+ // We need to calculate what portion of the captured image corresponds to what's visible
870
+
871
+ let previewBounds = previewLayer.bounds
872
+ let previewAspectRatio = previewBounds.width / previewBounds.height
873
+
874
+ // Get the dimensions of the captured image
875
+ let imageSize = image.size
876
+ let imageAspectRatio = imageSize.width / imageSize.height
877
+
878
+ print("[CameraPreview] cropImageToMatchPreview - Preview bounds: \(previewBounds.width)x\(previewBounds.height) (ratio: \(previewAspectRatio))")
879
+ print("[CameraPreview] cropImageToMatchPreview - Image size: \(imageSize.width)x\(imageSize.height) (ratio: \(imageAspectRatio))")
880
+
881
+ // Since we're using resizeAspectFill, we need to calculate what portion of the image
882
+ // is visible in the preview
883
+ var cropRect: CGRect
884
+
885
+ if imageAspectRatio > previewAspectRatio {
886
+ // Image is wider than preview - crop horizontally
887
+ let visibleWidth = imageSize.height * previewAspectRatio
888
+ let xOffset = (imageSize.width - visibleWidth) / 2
889
+ cropRect = CGRect(x: xOffset, y: 0, width: visibleWidth, height: imageSize.height)
890
+ print("[CameraPreview] cropImageToMatchPreview - Cropping horizontally: visible width = \(visibleWidth), offset = \(xOffset)")
891
+ } else {
892
+ // Image is taller than preview - crop vertically
893
+ let visibleHeight = imageSize.width / previewAspectRatio
894
+ let yOffset = (imageSize.height - visibleHeight) / 2
895
+ cropRect = CGRect(x: 0, y: yOffset, width: imageSize.width, height: visibleHeight)
896
+ print("[CameraPreview] cropImageToMatchPreview - Cropping vertically: visible height = \(visibleHeight), offset = \(yOffset)")
897
+ }
898
+
899
+ print("[CameraPreview] cropImageToMatchPreview - Crop rect: \(cropRect)")
900
+
901
+ // Create the cropped image
902
+ guard let cgImage = image.cgImage,
903
+ let croppedCGImage = cgImage.cropping(to: cropRect) else {
904
+ print("[CameraPreview] cropImageToMatchPreview - Failed to crop image")
905
+ return nil
906
+ }
907
+
908
+ let result = UIImage(cgImage: croppedCGImage, scale: image.scale, orientation: image.imageOrientation)
909
+ print("[CameraPreview] cropImageToMatchPreview - Result size: \(result.size.width)x\(result.size.height)")
910
+
911
+ return result
439
912
  }
440
913
 
441
914
  func captureSample(completion: @escaping (UIImage?, Error?) -> Void) {
@@ -612,7 +1085,7 @@ extension CameraController {
612
1085
  )
613
1086
  }
614
1087
 
615
- func setZoom(level: CGFloat, ramp: Bool) throws {
1088
+ func setZoom(level: CGFloat, ramp: Bool, autoFocus: Bool = true) throws {
616
1089
  var currentCamera: AVCaptureDevice?
617
1090
  switch currentCameraPosition {
618
1091
  case .front:
@@ -633,7 +1106,8 @@ extension CameraController {
633
1106
  try device.lockForConfiguration()
634
1107
 
635
1108
  if ramp {
636
- device.ramp(toVideoZoomFactor: zoomLevel, withRate: 1.0)
1109
+ // Use a very fast ramp rate for immediate response
1110
+ device.ramp(toVideoZoomFactor: zoomLevel, withRate: 8.0)
637
1111
  } else {
638
1112
  device.videoZoomFactor = zoomLevel
639
1113
  }
@@ -642,11 +1116,123 @@ extension CameraController {
642
1116
 
643
1117
  // Update our internal zoom factor tracking
644
1118
  self.zoomFactor = zoomLevel
1119
+
1120
+ // Trigger autofocus after zoom if requested
1121
+ if autoFocus {
1122
+ self.triggerAutoFocus()
1123
+ }
645
1124
  } catch {
646
1125
  throw CameraControllerError.invalidOperation
647
1126
  }
648
1127
  }
649
1128
 
1129
+ private func triggerAutoFocus() {
1130
+ var currentCamera: AVCaptureDevice?
1131
+ switch currentCameraPosition {
1132
+ case .front:
1133
+ currentCamera = self.frontCamera
1134
+ case .rear:
1135
+ currentCamera = self.rearCamera
1136
+ default: break
1137
+ }
1138
+
1139
+ guard let device = currentCamera else {
1140
+ return
1141
+ }
1142
+
1143
+ // Focus on the center of the preview (0.5, 0.5)
1144
+ let centerPoint = CGPoint(x: 0.5, y: 0.5)
1145
+
1146
+ do {
1147
+ try device.lockForConfiguration()
1148
+
1149
+ // Set focus mode to auto if supported
1150
+ if device.isFocusModeSupported(.autoFocus) {
1151
+ device.focusMode = .autoFocus
1152
+ if device.isFocusPointOfInterestSupported {
1153
+ device.focusPointOfInterest = centerPoint
1154
+ }
1155
+ } else if device.isFocusModeSupported(.continuousAutoFocus) {
1156
+ device.focusMode = .continuousAutoFocus
1157
+ if device.isFocusPointOfInterestSupported {
1158
+ device.focusPointOfInterest = centerPoint
1159
+ }
1160
+ }
1161
+
1162
+ // Also set exposure point if supported
1163
+ if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.autoExpose) {
1164
+ device.exposureMode = .autoExpose
1165
+ device.exposurePointOfInterest = centerPoint
1166
+ } else if device.isExposureModeSupported(.continuousAutoExposure) {
1167
+ device.exposureMode = .continuousAutoExposure
1168
+ if device.isExposurePointOfInterestSupported {
1169
+ device.exposurePointOfInterest = centerPoint
1170
+ }
1171
+ }
1172
+
1173
+ device.unlockForConfiguration()
1174
+ } catch {
1175
+ // Silently ignore errors during autofocus
1176
+ }
1177
+ }
1178
+
1179
+ func setFocus(at point: CGPoint, showIndicator: Bool = false, in view: UIView? = nil) throws {
1180
+ // Validate that coordinates are within bounds (0-1 range for device coordinates)
1181
+ if point.x < 0 || point.x > 1 || point.y < 0 || point.y > 1 {
1182
+ print("setFocus: Coordinates out of bounds - x: \(point.x), y: \(point.y)")
1183
+ throw CameraControllerError.invalidOperation
1184
+ }
1185
+
1186
+ var currentCamera: AVCaptureDevice?
1187
+ switch currentCameraPosition {
1188
+ case .front:
1189
+ currentCamera = self.frontCamera
1190
+ case .rear:
1191
+ currentCamera = self.rearCamera
1192
+ default: break
1193
+ }
1194
+
1195
+ guard let device = currentCamera else {
1196
+ throw CameraControllerError.noCamerasAvailable
1197
+ }
1198
+
1199
+ guard device.isFocusPointOfInterestSupported else {
1200
+ // Device doesn't support focus point of interest
1201
+ return
1202
+ }
1203
+
1204
+ // Show focus indicator if requested and view is provided - only after validation
1205
+ if showIndicator, let view = view, let previewLayer = self.previewLayer {
1206
+ // Convert the device point to layer point for indicator display
1207
+ let layerPoint = previewLayer.layerPointConverted(fromCaptureDevicePoint: point)
1208
+ showFocusIndicator(at: layerPoint, in: view)
1209
+ }
1210
+
1211
+ do {
1212
+ try device.lockForConfiguration()
1213
+
1214
+ // Set focus mode to auto if supported
1215
+ if device.isFocusModeSupported(.autoFocus) {
1216
+ device.focusMode = .autoFocus
1217
+ } else if device.isFocusModeSupported(.continuousAutoFocus) {
1218
+ device.focusMode = .continuousAutoFocus
1219
+ }
1220
+
1221
+ // Set the focus point
1222
+ device.focusPointOfInterest = point
1223
+
1224
+ // Also set exposure point if supported
1225
+ if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.autoExpose) {
1226
+ device.exposureMode = .autoExpose
1227
+ device.exposurePointOfInterest = point
1228
+ }
1229
+
1230
+ device.unlockForConfiguration()
1231
+ } catch {
1232
+ throw CameraControllerError.unknown
1233
+ }
1234
+ }
1235
+
650
1236
  func getFlashMode() throws -> String {
651
1237
  switch self.flashMode {
652
1238
  case .off:
@@ -677,7 +1263,7 @@ extension CameraController {
677
1263
 
678
1264
  return device.uniqueID
679
1265
  }
680
-
1266
+
681
1267
  func getCurrentLensInfo() throws -> (focalLength: Float, deviceType: String, baseZoomRatio: Float) {
682
1268
  var currentCamera: AVCaptureDevice?
683
1269
  switch currentCameraPosition {
@@ -757,9 +1343,7 @@ extension CameraController {
757
1343
  captureSession.commitConfiguration()
758
1344
  // Restart the session if it was running before
759
1345
  if wasRunning {
760
- DispatchQueue.global(qos: .userInitiated).async { [weak self] in
761
- self?.captureSession?.startRunning()
762
- }
1346
+ captureSession.startRunning()
763
1347
  }
764
1348
  }
765
1349
 
@@ -806,13 +1390,9 @@ extension CameraController {
806
1390
  }
807
1391
 
808
1392
  // Update video orientation
809
- DispatchQueue.main.async { [weak self] in
810
- self?.updateVideoOrientation()
811
- }
1393
+ self.updateVideoOrientation()
812
1394
  }
813
1395
 
814
-
815
-
816
1396
  func cleanup() {
817
1397
  if let captureSession = self.captureSession {
818
1398
  captureSession.stopRunning()
@@ -823,6 +1403,9 @@ extension CameraController {
823
1403
  self.previewLayer?.removeFromSuperlayer()
824
1404
  self.previewLayer = nil
825
1405
 
1406
+ self.focusIndicatorView?.removeFromSuperview()
1407
+ self.focusIndicatorView = nil
1408
+
826
1409
  self.frontCameraInput = nil
827
1410
  self.rearCameraInput = nil
828
1411
  self.audioInput = nil
@@ -830,6 +1413,7 @@ extension CameraController {
830
1413
  self.frontCamera = nil
831
1414
  self.rearCamera = nil
832
1415
  self.audioDevice = nil
1416
+ self.allDiscoveredDevices = []
833
1417
 
834
1418
  self.dataOutput = nil
835
1419
  self.photoOutput = nil
@@ -837,6 +1421,13 @@ extension CameraController {
837
1421
 
838
1422
  self.captureSession = nil
839
1423
  self.currentCameraPosition = nil
1424
+
1425
+ // Reset output preparation status
1426
+ self.outputsPrepared = false
1427
+
1428
+ // Reset first frame detection
1429
+ self.hasReceivedFirstFrame = false
1430
+ self.firstFrameReadyCallback = nil
840
1431
  }
841
1432
 
842
1433
  func captureVideo() throws {
@@ -913,6 +1504,11 @@ extension CameraController: UIGestureRecognizerDelegate {
913
1504
  let point = tap.location(in: tap.view)
914
1505
  let devicePoint = self.previewLayer?.captureDevicePointConverted(fromLayerPoint: point)
915
1506
 
1507
+ // Show focus indicator at the tap point
1508
+ if let view = tap.view {
1509
+ showFocusIndicator(at: point, in: view)
1510
+ }
1511
+
916
1512
  do {
917
1513
  try device.lockForConfiguration()
918
1514
  defer { device.unlockForConfiguration() }
@@ -933,6 +1529,54 @@ extension CameraController: UIGestureRecognizerDelegate {
933
1529
  }
934
1530
  }
935
1531
 
1532
+ private func showFocusIndicator(at point: CGPoint, in view: UIView) {
1533
+ // Remove any existing focus indicator
1534
+ focusIndicatorView?.removeFromSuperview()
1535
+
1536
+ // Create a new focus indicator
1537
+ let indicator = UIView(frame: CGRect(x: 0, y: 0, width: 80, height: 80))
1538
+ indicator.center = point
1539
+ indicator.layer.borderColor = UIColor.yellow.cgColor
1540
+ indicator.layer.borderWidth = 2.0
1541
+ indicator.layer.cornerRadius = 40
1542
+ indicator.backgroundColor = UIColor.clear
1543
+ indicator.alpha = 0
1544
+ indicator.transform = CGAffineTransform(scaleX: 1.5, y: 1.5)
1545
+
1546
+ // Add inner circle for better visibility
1547
+ let innerCircle = UIView(frame: CGRect(x: 20, y: 20, width: 40, height: 40))
1548
+ innerCircle.layer.borderColor = UIColor.yellow.cgColor
1549
+ innerCircle.layer.borderWidth = 1.0
1550
+ innerCircle.layer.cornerRadius = 20
1551
+ innerCircle.backgroundColor = UIColor.clear
1552
+ indicator.addSubview(innerCircle)
1553
+
1554
+ view.addSubview(indicator)
1555
+ focusIndicatorView = indicator
1556
+
1557
+ // Animate the focus indicator
1558
+ UIView.animate(withDuration: 0.15, animations: {
1559
+ indicator.alpha = 1.0
1560
+ indicator.transform = CGAffineTransform.identity
1561
+ }) { _ in
1562
+ // Keep the indicator visible for a moment
1563
+ UIView.animate(withDuration: 0.2, delay: 0.5, options: [], animations: {
1564
+ indicator.alpha = 0.3
1565
+ }) { _ in
1566
+ // Fade out and remove
1567
+ UIView.animate(withDuration: 0.3, delay: 0.2, options: [], animations: {
1568
+ indicator.alpha = 0
1569
+ indicator.transform = CGAffineTransform(scaleX: 0.8, y: 0.8)
1570
+ }) { _ in
1571
+ indicator.removeFromSuperview()
1572
+ if self.focusIndicatorView == indicator {
1573
+ self.focusIndicatorView = nil
1574
+ }
1575
+ }
1576
+ }
1577
+ }
1578
+ }
1579
+
936
1580
  @objc
937
1581
  private func handlePinch(_ pinch: UIPinchGestureRecognizer) {
938
1582
  guard let device = self.currentCameraPosition == .rear ? rearCamera : frontCamera else { return }
@@ -940,45 +1584,75 @@ extension CameraController: UIGestureRecognizerDelegate {
940
1584
  let effectiveMaxZoom = min(device.maxAvailableVideoZoomFactor, self.saneMaxZoomFactor)
941
1585
  func minMaxZoom(_ factor: CGFloat) -> CGFloat { return max(device.minAvailableVideoZoomFactor, min(factor, effectiveMaxZoom)) }
942
1586
 
943
- func update(scale factor: CGFloat) {
1587
+ switch pinch.state {
1588
+ case .began:
1589
+ // Store the initial zoom factor when pinch begins
1590
+ zoomFactor = device.videoZoomFactor
1591
+
1592
+ case .changed:
1593
+ // Throttle zoom updates to prevent excessive CPU usage
1594
+ let currentTime = CACurrentMediaTime()
1595
+ guard currentTime - lastZoomUpdateTime >= zoomUpdateThrottle else { return }
1596
+ lastZoomUpdateTime = currentTime
1597
+
1598
+ // Calculate new zoom factor based on pinch scale
1599
+ let newScaleFactor = minMaxZoom(pinch.scale * zoomFactor)
1600
+
1601
+ // Use ramping for smooth zoom transitions during pinch
1602
+ // This provides much smoother performance than direct setting
944
1603
  do {
945
1604
  try device.lockForConfiguration()
946
- defer { device.unlockForConfiguration() }
947
-
948
- device.videoZoomFactor = factor
1605
+ // Use a very fast ramp rate for immediate response
1606
+ device.ramp(toVideoZoomFactor: newScaleFactor, withRate: 5.0)
1607
+ device.unlockForConfiguration()
949
1608
  } catch {
950
- debugPrint(error)
1609
+ debugPrint("Failed to set zoom: \(error)")
951
1610
  }
952
- }
953
1611
 
954
- switch pinch.state {
955
- case .began: fallthrough
956
- case .changed:
957
- let newScaleFactor = minMaxZoom(pinch.scale * zoomFactor)
958
- update(scale: newScaleFactor)
959
1612
  case .ended:
1613
+ // Update our internal zoom factor tracking
960
1614
  zoomFactor = device.videoZoomFactor
1615
+
961
1616
  default: break
962
1617
  }
963
1618
  }
964
1619
  }
965
1620
 
966
1621
  extension CameraController: AVCapturePhotoCaptureDelegate {
967
- public func photoOutput(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?,
968
- resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Swift.Error?) {
1622
+ public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
969
1623
  if let error = error {
970
1624
  self.photoCaptureCompletionBlock?(nil, error)
971
- } else if let buffer = photoSampleBuffer, let data = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: buffer, previewPhotoSampleBuffer: nil),
972
- let image = UIImage(data: data) {
973
- self.photoCaptureCompletionBlock?(image.fixedOrientation(), nil)
974
- } else {
1625
+ return
1626
+ }
1627
+
1628
+ // Get the photo data using the modern API
1629
+ guard let imageData = photo.fileDataRepresentation() else {
975
1630
  self.photoCaptureCompletionBlock?(nil, CameraControllerError.unknown)
1631
+ return
1632
+ }
1633
+
1634
+ guard let image = UIImage(data: imageData) else {
1635
+ self.photoCaptureCompletionBlock?(nil, CameraControllerError.unknown)
1636
+ return
976
1637
  }
1638
+
1639
+ self.photoCaptureCompletionBlock?(image.fixedOrientation(), nil)
977
1640
  }
978
1641
  }
979
1642
 
980
1643
  extension CameraController: AVCaptureVideoDataOutputSampleBufferDelegate {
981
1644
  func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
1645
+ // Check if we're waiting for the first frame
1646
+ if !hasReceivedFirstFrame, let firstFrameCallback = firstFrameReadyCallback {
1647
+ hasReceivedFirstFrame = true
1648
+ firstFrameCallback()
1649
+ firstFrameReadyCallback = nil
1650
+ // If no capture is in progress, we can return early
1651
+ if sampleBufferCaptureCompletionBlock == nil {
1652
+ return
1653
+ }
1654
+ }
1655
+
982
1656
  guard let completion = sampleBufferCaptureCompletionBlock else { return }
983
1657
 
984
1658
  guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
@@ -1028,6 +1702,7 @@ enum CameraControllerError: Swift.Error {
1028
1702
  case cannotFindDocumentsDirectory
1029
1703
  case fileVideoOutputNotFound
1030
1704
  case unknown
1705
+ case invalidZoomLevel(min: CGFloat, max: CGFloat, requested: CGFloat)
1031
1706
  }
1032
1707
 
1033
1708
  public enum CameraPosition {
@@ -1054,6 +1729,8 @@ extension CameraControllerError: LocalizedError {
1054
1729
  return NSLocalizedString("Cannot find documents directory", comment: "This should never happen")
1055
1730
  case .fileVideoOutputNotFound:
1056
1731
  return NSLocalizedString("Video recording is not available. Make sure the camera is properly initialized.", comment: "Video recording not available")
1732
+ case .invalidZoomLevel(let min, let max, let requested):
1733
+ return NSLocalizedString("Invalid zoom level. Must be between \(min) and \(max). Requested: \(requested)", comment: "Invalid Zoom Level")
1057
1734
  }
1058
1735
  }
1059
1736
  }
@@ -1095,6 +1772,8 @@ extension UIImage {
1095
1772
  print("right")
1096
1773
  case .up, .upMirrored:
1097
1774
  break
1775
+ @unknown default:
1776
+ break
1098
1777
  }
1099
1778
 
1100
1779
  // Flip image one more time if needed to, this is to prevent flipped image
@@ -1107,15 +1786,21 @@ extension UIImage {
1107
1786
  transform.scaledBy(x: -1, y: 1)
1108
1787
  case .up, .down, .left, .right:
1109
1788
  break
1789
+ @unknown default:
1790
+ break
1110
1791
  }
1111
1792
 
1112
1793
  ctx.concatenate(transform)
1113
1794
 
1114
1795
  switch imageOrientation {
1115
1796
  case .left, .leftMirrored, .right, .rightMirrored:
1116
- ctx.draw(self.cgImage!, in: CGRect(x: 0, y: 0, width: size.height, height: size.width))
1797
+ if let cgImage = self.cgImage {
1798
+ ctx.draw(cgImage, in: CGRect(x: 0, y: 0, width: size.height, height: size.width))
1799
+ }
1117
1800
  default:
1118
- ctx.draw(self.cgImage!, in: CGRect(x: 0, y: 0, width: size.width, height: size.height))
1801
+ if let cgImage = self.cgImage {
1802
+ ctx.draw(cgImage, in: CGRect(x: 0, y: 0, width: size.width, height: size.height))
1803
+ }
1119
1804
  }
1120
1805
  guard let newCGImage = ctx.makeImage() else { return nil }
1121
1806
  return UIImage.init(cgImage: newCGImage, scale: 1, orientation: .up)