@capgo/camera-preview 7.3.11 → 7.4.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CapgoCameraPreview.podspec +16 -13
- package/README.md +492 -73
- package/android/build.gradle +11 -0
- package/android/gradle/wrapper/gradle-wrapper.properties +1 -1
- package/android/src/main/AndroidManifest.xml +5 -3
- package/android/src/main/java/com/ahm/capacitor/camera/preview/CameraPreview.java +968 -505
- package/android/src/main/java/com/ahm/capacitor/camera/preview/CameraXView.java +3017 -0
- package/android/src/main/java/com/ahm/capacitor/camera/preview/GridOverlayView.java +119 -0
- package/android/src/main/java/com/ahm/capacitor/camera/preview/model/CameraDevice.java +63 -0
- package/android/src/main/java/com/ahm/capacitor/camera/preview/model/CameraLens.java +79 -0
- package/android/src/main/java/com/ahm/capacitor/camera/preview/model/CameraSessionConfiguration.java +167 -0
- package/android/src/main/java/com/ahm/capacitor/camera/preview/model/LensInfo.java +40 -0
- package/android/src/main/java/com/ahm/capacitor/camera/preview/model/ZoomFactors.java +35 -0
- package/dist/docs.json +1041 -161
- package/dist/esm/definitions.d.ts +484 -84
- package/dist/esm/definitions.js +10 -1
- package/dist/esm/definitions.js.map +1 -1
- package/dist/esm/web.d.ts +78 -3
- package/dist/esm/web.js +813 -68
- package/dist/esm/web.js.map +1 -1
- package/dist/plugin.cjs.js +819 -68
- package/dist/plugin.cjs.js.map +1 -1
- package/dist/plugin.js +819 -68
- package/dist/plugin.js.map +1 -1
- package/ios/Sources/CapgoCameraPreviewPlugin/CameraController.swift +1663 -0
- package/ios/Sources/CapgoCameraPreviewPlugin/GridOverlayView.swift +65 -0
- package/ios/Sources/CapgoCameraPreviewPlugin/Plugin.swift +1550 -0
- package/ios/Tests/CameraPreviewPluginTests/CameraPreviewPluginTests.swift +15 -0
- package/package.json +2 -2
- package/android/src/main/java/com/ahm/capacitor/camera/preview/CameraActivity.java +0 -1279
- package/android/src/main/java/com/ahm/capacitor/camera/preview/CustomSurfaceView.java +0 -29
- package/android/src/main/java/com/ahm/capacitor/camera/preview/CustomTextureView.java +0 -39
- package/android/src/main/java/com/ahm/capacitor/camera/preview/Preview.java +0 -461
- package/android/src/main/java/com/ahm/capacitor/camera/preview/TapGestureDetector.java +0 -24
- package/ios/Plugin/CameraController.swift +0 -809
- package/ios/Plugin/Info.plist +0 -24
- package/ios/Plugin/Plugin.h +0 -10
- package/ios/Plugin/Plugin.m +0 -18
- package/ios/Plugin/Plugin.swift +0 -511
- package/ios/Plugin.xcodeproj/project.pbxproj +0 -593
- package/ios/Plugin.xcodeproj/project.xcworkspace/contents.xcworkspacedata +0 -7
- package/ios/Plugin.xcworkspace/contents.xcworkspacedata +0 -10
- package/ios/Plugin.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist +0 -8
- package/ios/PluginTests/Info.plist +0 -22
- package/ios/PluginTests/PluginTests.swift +0 -83
- package/ios/Podfile +0 -13
- package/ios/Podfile.lock +0 -23
|
@@ -0,0 +1,1663 @@
|
|
|
1
|
+
import AVFoundation
|
|
2
|
+
import UIKit
|
|
3
|
+
import CoreLocation
|
|
4
|
+
|
|
5
|
+
class CameraController: NSObject {
|
|
6
|
+
var captureSession: AVCaptureSession?
|
|
7
|
+
|
|
8
|
+
var currentCameraPosition: CameraPosition?
|
|
9
|
+
|
|
10
|
+
var frontCamera: AVCaptureDevice?
|
|
11
|
+
var frontCameraInput: AVCaptureDeviceInput?
|
|
12
|
+
|
|
13
|
+
var dataOutput: AVCaptureVideoDataOutput?
|
|
14
|
+
var photoOutput: AVCapturePhotoOutput?
|
|
15
|
+
|
|
16
|
+
var rearCamera: AVCaptureDevice?
|
|
17
|
+
var rearCameraInput: AVCaptureDeviceInput?
|
|
18
|
+
|
|
19
|
+
var allDiscoveredDevices: [AVCaptureDevice] = []
|
|
20
|
+
|
|
21
|
+
var fileVideoOutput: AVCaptureMovieFileOutput?
|
|
22
|
+
|
|
23
|
+
var previewLayer: AVCaptureVideoPreviewLayer?
|
|
24
|
+
var gridOverlayView: GridOverlayView?
|
|
25
|
+
var focusIndicatorView: UIView?
|
|
26
|
+
|
|
27
|
+
var flashMode = AVCaptureDevice.FlashMode.off
|
|
28
|
+
var photoCaptureCompletionBlock: ((UIImage?, Error?) -> Void)?
|
|
29
|
+
|
|
30
|
+
var sampleBufferCaptureCompletionBlock: ((UIImage?, Error?) -> Void)?
|
|
31
|
+
|
|
32
|
+
// Add callback for detecting when first frame is ready
|
|
33
|
+
var firstFrameReadyCallback: (() -> Void)?
|
|
34
|
+
var hasReceivedFirstFrame = false
|
|
35
|
+
|
|
36
|
+
var audioDevice: AVCaptureDevice?
|
|
37
|
+
var audioInput: AVCaptureDeviceInput?
|
|
38
|
+
|
|
39
|
+
var zoomFactor: CGFloat = 2.0
|
|
40
|
+
private var lastZoomUpdateTime: TimeInterval = 0
|
|
41
|
+
private let zoomUpdateThrottle: TimeInterval = 1.0 / 60.0 // 60 FPS max
|
|
42
|
+
|
|
43
|
+
var videoFileURL: URL?
|
|
44
|
+
private let saneMaxZoomFactor: CGFloat = 25.5
|
|
45
|
+
|
|
46
|
+
// Track output preparation status
|
|
47
|
+
private var outputsPrepared: Bool = false
|
|
48
|
+
|
|
49
|
+
var isUsingMultiLensVirtualCamera: Bool {
|
|
50
|
+
guard let device = (currentCameraPosition == .rear) ? rearCamera : frontCamera else { return false }
|
|
51
|
+
// A rear multi-lens virtual camera will have a min zoom of 1.0 but support wider angles
|
|
52
|
+
return device.position == .back && device.isVirtualDevice && device.constituentDevices.count > 1
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
extension CameraController {
|
|
57
|
+
func prepareFullSession() {
|
|
58
|
+
// This function is now deprecated in favor of inline session creation in prepare()
|
|
59
|
+
// Kept for backward compatibility
|
|
60
|
+
guard self.captureSession == nil else { return }
|
|
61
|
+
|
|
62
|
+
self.captureSession = AVCaptureSession()
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
private func ensureCamerasDiscovered() {
|
|
66
|
+
// Rediscover cameras if the array is empty OR if the camera pointers are nil
|
|
67
|
+
guard allDiscoveredDevices.isEmpty || (rearCamera == nil && frontCamera == nil) else { return }
|
|
68
|
+
discoverAndConfigureCameras()
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
private func discoverAndConfigureCameras() {
|
|
72
|
+
let deviceTypes: [AVCaptureDevice.DeviceType] = [
|
|
73
|
+
.builtInWideAngleCamera,
|
|
74
|
+
.builtInUltraWideCamera,
|
|
75
|
+
.builtInTelephotoCamera,
|
|
76
|
+
.builtInDualCamera,
|
|
77
|
+
.builtInDualWideCamera,
|
|
78
|
+
.builtInTripleCamera,
|
|
79
|
+
.builtInTrueDepthCamera
|
|
80
|
+
]
|
|
81
|
+
|
|
82
|
+
let session = AVCaptureDevice.DiscoverySession(deviceTypes: deviceTypes, mediaType: AVMediaType.video, position: .unspecified)
|
|
83
|
+
let cameras = session.devices.compactMap { $0 }
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
// Store all discovered devices for fast lookup later
|
|
88
|
+
self.allDiscoveredDevices = cameras
|
|
89
|
+
|
|
90
|
+
// Log all found devices for debugging
|
|
91
|
+
|
|
92
|
+
for camera in cameras {
|
|
93
|
+
let constituentCount = camera.isVirtualDevice ? camera.constituentDevices.count : 1
|
|
94
|
+
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// Set front camera (usually just one option)
|
|
98
|
+
self.frontCamera = cameras.first(where: { $0.position == .front })
|
|
99
|
+
|
|
100
|
+
// Find rear camera - prefer tripleCamera for multi-lens support
|
|
101
|
+
let rearCameras = cameras.filter { $0.position == .back }
|
|
102
|
+
|
|
103
|
+
// First try to find built-in triple camera (provides access to all lenses)
|
|
104
|
+
if let tripleCamera = rearCameras.first(where: {
|
|
105
|
+
$0.deviceType == .builtInTripleCamera
|
|
106
|
+
}) {
|
|
107
|
+
self.rearCamera = tripleCamera
|
|
108
|
+
} else if let dualWideCamera = rearCameras.first(where: {
|
|
109
|
+
$0.deviceType == .builtInDualWideCamera
|
|
110
|
+
}) {
|
|
111
|
+
// Fallback to dual wide camera
|
|
112
|
+
self.rearCamera = dualWideCamera
|
|
113
|
+
} else if let dualCamera = rearCameras.first(where: {
|
|
114
|
+
$0.deviceType == .builtInDualCamera
|
|
115
|
+
}) {
|
|
116
|
+
// Fallback to dual camera
|
|
117
|
+
self.rearCamera = dualCamera
|
|
118
|
+
} else if let wideAngleCamera = rearCameras.first(where: {
|
|
119
|
+
$0.deviceType == .builtInWideAngleCamera
|
|
120
|
+
}) {
|
|
121
|
+
// Fallback to wide angle camera
|
|
122
|
+
self.rearCamera = wideAngleCamera
|
|
123
|
+
} else if let firstRearCamera = rearCameras.first {
|
|
124
|
+
// Final fallback to any rear camera
|
|
125
|
+
self.rearCamera = firstRearCamera
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// Pre-configure focus modes
|
|
129
|
+
configureCameraFocus(camera: self.rearCamera)
|
|
130
|
+
configureCameraFocus(camera: self.frontCamera)
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
private func configureCameraFocus(camera: AVCaptureDevice?) {
|
|
134
|
+
guard let camera = camera else { return }
|
|
135
|
+
|
|
136
|
+
do {
|
|
137
|
+
try camera.lockForConfiguration()
|
|
138
|
+
if camera.isFocusModeSupported(.continuousAutoFocus) {
|
|
139
|
+
camera.focusMode = .continuousAutoFocus
|
|
140
|
+
}
|
|
141
|
+
camera.unlockForConfiguration()
|
|
142
|
+
} catch {
|
|
143
|
+
print("[CameraPreview] Could not configure focus for \(camera.localizedName): \(error)")
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
private func prepareOutputs() {
|
|
148
|
+
// Skip if already prepared
|
|
149
|
+
guard !self.outputsPrepared else { return }
|
|
150
|
+
|
|
151
|
+
// Create photo output
|
|
152
|
+
self.photoOutput = AVCapturePhotoOutput()
|
|
153
|
+
self.photoOutput?.isHighResolutionCaptureEnabled = true
|
|
154
|
+
|
|
155
|
+
// Create video output
|
|
156
|
+
self.fileVideoOutput = AVCaptureMovieFileOutput()
|
|
157
|
+
|
|
158
|
+
// Create data output for preview
|
|
159
|
+
self.dataOutput = AVCaptureVideoDataOutput()
|
|
160
|
+
self.dataOutput?.videoSettings = [
|
|
161
|
+
(kCVPixelBufferPixelFormatTypeKey as String): NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)
|
|
162
|
+
]
|
|
163
|
+
self.dataOutput?.alwaysDiscardsLateVideoFrames = true
|
|
164
|
+
|
|
165
|
+
// Pre-create preview layer to avoid delay later
|
|
166
|
+
if self.previewLayer == nil {
|
|
167
|
+
self.previewLayer = AVCaptureVideoPreviewLayer()
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
// Mark as prepared
|
|
171
|
+
self.outputsPrepared = true
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
func prepare(cameraPosition: String, deviceId: String? = nil, disableAudio: Bool, cameraMode: Bool, aspectRatio: String? = nil, initialZoomLevel: Float = 1.0, completionHandler: @escaping (Error?) -> Void) {
|
|
175
|
+
print("[CameraPreview] 🎬 Starting prepare - position: \(cameraPosition), deviceId: \(deviceId ?? "nil"), disableAudio: \(disableAudio), cameraMode: \(cameraMode), aspectRatio: \(aspectRatio ?? "nil"), zoom: \(initialZoomLevel)")
|
|
176
|
+
|
|
177
|
+
DispatchQueue.global(qos: .userInitiated).async { [weak self] in
|
|
178
|
+
guard let self = self else {
|
|
179
|
+
DispatchQueue.main.async {
|
|
180
|
+
completionHandler(CameraControllerError.unknown)
|
|
181
|
+
}
|
|
182
|
+
return
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
do {
|
|
186
|
+
// Create session if needed
|
|
187
|
+
if self.captureSession == nil {
|
|
188
|
+
self.captureSession = AVCaptureSession()
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
guard let captureSession = self.captureSession else {
|
|
192
|
+
throw CameraControllerError.captureSessionIsMissing
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
// Prepare outputs
|
|
196
|
+
self.prepareOutputs()
|
|
197
|
+
|
|
198
|
+
// Configure the session
|
|
199
|
+
captureSession.beginConfiguration()
|
|
200
|
+
|
|
201
|
+
// Set aspect ratio preset
|
|
202
|
+
self.configureSessionPreset(for: aspectRatio)
|
|
203
|
+
|
|
204
|
+
// Configure device inputs
|
|
205
|
+
try self.configureDeviceInputs(cameraPosition: cameraPosition, deviceId: deviceId, disableAudio: disableAudio)
|
|
206
|
+
|
|
207
|
+
// Add data output BEFORE starting session for faster first frame
|
|
208
|
+
if let dataOutput = self.dataOutput, captureSession.canAddOutput(dataOutput) {
|
|
209
|
+
captureSession.addOutput(dataOutput)
|
|
210
|
+
dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
captureSession.commitConfiguration()
|
|
214
|
+
|
|
215
|
+
// Set initial zoom
|
|
216
|
+
self.setInitialZoom(level: initialZoomLevel)
|
|
217
|
+
|
|
218
|
+
// Start the session
|
|
219
|
+
captureSession.startRunning()
|
|
220
|
+
|
|
221
|
+
// Defer adding photo/video outputs to avoid blocking
|
|
222
|
+
// These aren't needed immediately for preview
|
|
223
|
+
DispatchQueue.global(qos: .utility).async { [weak self] in
|
|
224
|
+
guard let self = self else { return }
|
|
225
|
+
|
|
226
|
+
captureSession.beginConfiguration()
|
|
227
|
+
|
|
228
|
+
// Add photo output
|
|
229
|
+
if let photoOutput = self.photoOutput, captureSession.canAddOutput(photoOutput) {
|
|
230
|
+
photoOutput.isHighResolutionCaptureEnabled = true
|
|
231
|
+
captureSession.addOutput(photoOutput)
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
// Add video output if needed
|
|
235
|
+
if cameraMode, let fileVideoOutput = self.fileVideoOutput, captureSession.canAddOutput(fileVideoOutput) {
|
|
236
|
+
captureSession.addOutput(fileVideoOutput)
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
captureSession.commitConfiguration()
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
// Success callback
|
|
243
|
+
DispatchQueue.main.async {
|
|
244
|
+
completionHandler(nil)
|
|
245
|
+
}
|
|
246
|
+
} catch {
|
|
247
|
+
DispatchQueue.main.async {
|
|
248
|
+
completionHandler(error)
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
private func configureSessionPreset(for aspectRatio: String?) {
|
|
255
|
+
guard let captureSession = self.captureSession else { return }
|
|
256
|
+
|
|
257
|
+
var targetPreset: AVCaptureSession.Preset = .high
|
|
258
|
+
|
|
259
|
+
if let aspectRatio = aspectRatio {
|
|
260
|
+
switch aspectRatio {
|
|
261
|
+
case "16:9":
|
|
262
|
+
targetPreset = captureSession.canSetSessionPreset(.hd1920x1080) ? .hd1920x1080 : .high
|
|
263
|
+
case "4:3":
|
|
264
|
+
targetPreset = captureSession.canSetSessionPreset(.photo) ? .photo : .high
|
|
265
|
+
default:
|
|
266
|
+
targetPreset = .high
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
if captureSession.canSetSessionPreset(targetPreset) {
|
|
271
|
+
captureSession.sessionPreset = targetPreset
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
private func setInitialZoom(level: Float) {
|
|
276
|
+
let device = (currentCameraPosition == .rear) ? rearCamera : frontCamera
|
|
277
|
+
guard let device = device else {
|
|
278
|
+
print("[CameraPreview] No device available for initial zoom")
|
|
279
|
+
return
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
let minZoom = device.minAvailableVideoZoomFactor
|
|
283
|
+
let maxZoom = min(device.maxAvailableVideoZoomFactor, saneMaxZoomFactor)
|
|
284
|
+
|
|
285
|
+
let adjustedLevel = level
|
|
286
|
+
|
|
287
|
+
guard CGFloat(adjustedLevel) >= minZoom && CGFloat(adjustedLevel) <= maxZoom else {
|
|
288
|
+
print("[CameraPreview] Initial zoom level \(adjustedLevel) out of range (\(minZoom)-\(maxZoom))")
|
|
289
|
+
return
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
do {
|
|
293
|
+
try device.lockForConfiguration()
|
|
294
|
+
device.videoZoomFactor = CGFloat(adjustedLevel)
|
|
295
|
+
device.unlockForConfiguration()
|
|
296
|
+
self.zoomFactor = CGFloat(adjustedLevel)
|
|
297
|
+
} catch {
|
|
298
|
+
print("[CameraPreview] Failed to set initial zoom: \(error)")
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
private func configureDeviceInputs(cameraPosition: String, deviceId: String?, disableAudio: Bool) throws {
|
|
303
|
+
guard let captureSession = self.captureSession else { throw CameraControllerError.captureSessionIsMissing }
|
|
304
|
+
|
|
305
|
+
// Ensure cameras are discovered before configuring inputs
|
|
306
|
+
ensureCamerasDiscovered()
|
|
307
|
+
|
|
308
|
+
var selectedDevice: AVCaptureDevice?
|
|
309
|
+
|
|
310
|
+
// If deviceId is specified, find that specific device from discovered devices
|
|
311
|
+
if let deviceId = deviceId {
|
|
312
|
+
selectedDevice = self.allDiscoveredDevices.first(where: { $0.uniqueID == deviceId })
|
|
313
|
+
guard selectedDevice != nil else {
|
|
314
|
+
throw CameraControllerError.noCamerasAvailable
|
|
315
|
+
}
|
|
316
|
+
} else {
|
|
317
|
+
// Use position-based selection from discovered cameras
|
|
318
|
+
if cameraPosition == "rear" {
|
|
319
|
+
selectedDevice = self.rearCamera
|
|
320
|
+
} else if cameraPosition == "front" {
|
|
321
|
+
selectedDevice = self.frontCamera
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
guard let finalDevice = selectedDevice else {
|
|
326
|
+
throw CameraControllerError.noCamerasAvailable
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
let deviceInput = try AVCaptureDeviceInput(device: finalDevice)
|
|
330
|
+
|
|
331
|
+
if captureSession.canAddInput(deviceInput) {
|
|
332
|
+
captureSession.addInput(deviceInput)
|
|
333
|
+
|
|
334
|
+
if finalDevice.position == .front {
|
|
335
|
+
self.frontCameraInput = deviceInput
|
|
336
|
+
self.currentCameraPosition = .front
|
|
337
|
+
} else {
|
|
338
|
+
self.rearCameraInput = deviceInput
|
|
339
|
+
self.currentCameraPosition = .rear
|
|
340
|
+
}
|
|
341
|
+
} else {
|
|
342
|
+
throw CameraControllerError.inputsAreInvalid
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
// Add audio input if needed
|
|
346
|
+
if !disableAudio {
|
|
347
|
+
if self.audioDevice == nil {
|
|
348
|
+
self.audioDevice = AVCaptureDevice.default(for: AVMediaType.audio)
|
|
349
|
+
}
|
|
350
|
+
if let audioDevice = self.audioDevice {
|
|
351
|
+
self.audioInput = try AVCaptureDeviceInput(device: audioDevice)
|
|
352
|
+
if captureSession.canAddInput(self.audioInput!) {
|
|
353
|
+
captureSession.addInput(self.audioInput!)
|
|
354
|
+
} else {
|
|
355
|
+
throw CameraControllerError.inputsAreInvalid
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
func displayPreview(on view: UIView) throws {
|
|
362
|
+
guard let captureSession = self.captureSession, captureSession.isRunning else {
|
|
363
|
+
throw CameraControllerError.captureSessionIsMissing
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
// Create or reuse preview layer
|
|
367
|
+
let previewLayer: AVCaptureVideoPreviewLayer
|
|
368
|
+
if let existingLayer = self.previewLayer {
|
|
369
|
+
// Always reuse if we have one - just update the session if needed
|
|
370
|
+
previewLayer = existingLayer
|
|
371
|
+
if existingLayer.session != captureSession {
|
|
372
|
+
existingLayer.session = captureSession
|
|
373
|
+
}
|
|
374
|
+
} else {
|
|
375
|
+
// Create layer with minimal properties to speed up creation
|
|
376
|
+
previewLayer = AVCaptureVideoPreviewLayer()
|
|
377
|
+
previewLayer.session = captureSession
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
// Fast configuration without CATransaction overhead
|
|
381
|
+
previewLayer.videoGravity = .resizeAspectFill
|
|
382
|
+
previewLayer.frame = view.bounds
|
|
383
|
+
|
|
384
|
+
// Insert layer immediately (only if new)
|
|
385
|
+
if previewLayer.superlayer != view.layer {
|
|
386
|
+
view.layer.insertSublayer(previewLayer, at: 0)
|
|
387
|
+
}
|
|
388
|
+
self.previewLayer = previewLayer
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
func addGridOverlay(to view: UIView, gridMode: String) {
|
|
392
|
+
removeGridOverlay()
|
|
393
|
+
|
|
394
|
+
// Disable animation for grid overlay creation and positioning
|
|
395
|
+
CATransaction.begin()
|
|
396
|
+
CATransaction.setDisableActions(true)
|
|
397
|
+
gridOverlayView = GridOverlayView(frame: view.bounds)
|
|
398
|
+
gridOverlayView?.gridMode = gridMode
|
|
399
|
+
view.addSubview(gridOverlayView!)
|
|
400
|
+
CATransaction.commit()
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
func removeGridOverlay() {
|
|
404
|
+
gridOverlayView?.removeFromSuperview()
|
|
405
|
+
gridOverlayView = nil
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
func setupGestures(target: UIView, enableZoom: Bool) {
|
|
409
|
+
setupTapGesture(target: target, selector: #selector(handleTap(_:)), delegate: self)
|
|
410
|
+
if enableZoom {
|
|
411
|
+
setupPinchGesture(target: target, selector: #selector(handlePinch(_:)), delegate: self)
|
|
412
|
+
}
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
func setupTapGesture(target: UIView, selector: Selector, delegate: UIGestureRecognizerDelegate?) {
|
|
416
|
+
let tapGesture = UITapGestureRecognizer(target: self, action: selector)
|
|
417
|
+
tapGesture.delegate = delegate
|
|
418
|
+
target.addGestureRecognizer(tapGesture)
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
func setupPinchGesture(target: UIView, selector: Selector, delegate: UIGestureRecognizerDelegate?) {
|
|
422
|
+
let pinchGesture = UIPinchGestureRecognizer(target: self, action: selector)
|
|
423
|
+
pinchGesture.delegate = delegate
|
|
424
|
+
// Optimize gesture recognition for better performance
|
|
425
|
+
pinchGesture.delaysTouchesBegan = false
|
|
426
|
+
pinchGesture.delaysTouchesEnded = false
|
|
427
|
+
pinchGesture.cancelsTouchesInView = false
|
|
428
|
+
target.addGestureRecognizer(pinchGesture)
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
func updateVideoOrientation() {
|
|
432
|
+
if Thread.isMainThread {
|
|
433
|
+
updateVideoOrientationOnMainThread()
|
|
434
|
+
} else {
|
|
435
|
+
DispatchQueue.main.sync {
|
|
436
|
+
self.updateVideoOrientationOnMainThread()
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
private func updateVideoOrientationOnMainThread() {
|
|
442
|
+
let videoOrientation: AVCaptureVideoOrientation
|
|
443
|
+
|
|
444
|
+
// Use window scene interface orientation
|
|
445
|
+
if let windowScene = UIApplication.shared.connectedScenes.first as? UIWindowScene {
|
|
446
|
+
switch windowScene.interfaceOrientation {
|
|
447
|
+
case .portrait:
|
|
448
|
+
videoOrientation = .portrait
|
|
449
|
+
case .landscapeLeft:
|
|
450
|
+
videoOrientation = .landscapeLeft
|
|
451
|
+
case .landscapeRight:
|
|
452
|
+
videoOrientation = .landscapeRight
|
|
453
|
+
case .portraitUpsideDown:
|
|
454
|
+
videoOrientation = .portraitUpsideDown
|
|
455
|
+
case .unknown:
|
|
456
|
+
fallthrough
|
|
457
|
+
@unknown default:
|
|
458
|
+
videoOrientation = .portrait
|
|
459
|
+
}
|
|
460
|
+
} else {
|
|
461
|
+
videoOrientation = .portrait
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
previewLayer?.connection?.videoOrientation = videoOrientation
|
|
465
|
+
dataOutput?.connections.forEach { $0.videoOrientation = videoOrientation }
|
|
466
|
+
photoOutput?.connections.forEach { $0.videoOrientation = videoOrientation }
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
func switchCameras() throws {
|
|
470
|
+
guard let currentCameraPosition = currentCameraPosition,
|
|
471
|
+
let captureSession = self.captureSession else {
|
|
472
|
+
throw CameraControllerError.captureSessionIsMissing
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
// Ensure we have the necessary cameras
|
|
476
|
+
guard (currentCameraPosition == .front && rearCamera != nil) ||
|
|
477
|
+
(currentCameraPosition == .rear && frontCamera != nil) else {
|
|
478
|
+
throw CameraControllerError.noCamerasAvailable
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
// Store the current running state
|
|
482
|
+
let wasRunning = captureSession.isRunning
|
|
483
|
+
if wasRunning {
|
|
484
|
+
captureSession.stopRunning()
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
// Begin configuration
|
|
488
|
+
captureSession.beginConfiguration()
|
|
489
|
+
defer {
|
|
490
|
+
captureSession.commitConfiguration()
|
|
491
|
+
// Restart the session if it was running before
|
|
492
|
+
if wasRunning {
|
|
493
|
+
captureSession.startRunning()
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
// Store audio input if it exists
|
|
498
|
+
let audioInput = captureSession.inputs.first { ($0 as? AVCaptureDeviceInput)?.device.hasMediaType(.audio) ?? false }
|
|
499
|
+
|
|
500
|
+
// Remove only video inputs
|
|
501
|
+
captureSession.inputs.forEach { input in
|
|
502
|
+
if (input as? AVCaptureDeviceInput)?.device.hasMediaType(.video) ?? false {
|
|
503
|
+
captureSession.removeInput(input)
|
|
504
|
+
}
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
// Configure new camera
|
|
508
|
+
switch currentCameraPosition {
|
|
509
|
+
case .front:
|
|
510
|
+
guard let rearCamera = rearCamera else {
|
|
511
|
+
throw CameraControllerError.invalidOperation
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
// Configure rear camera
|
|
515
|
+
try rearCamera.lockForConfiguration()
|
|
516
|
+
if rearCamera.isFocusModeSupported(.continuousAutoFocus) {
|
|
517
|
+
rearCamera.focusMode = .continuousAutoFocus
|
|
518
|
+
}
|
|
519
|
+
rearCamera.unlockForConfiguration()
|
|
520
|
+
|
|
521
|
+
if let newInput = try? AVCaptureDeviceInput(device: rearCamera),
|
|
522
|
+
captureSession.canAddInput(newInput) {
|
|
523
|
+
captureSession.addInput(newInput)
|
|
524
|
+
rearCameraInput = newInput
|
|
525
|
+
self.currentCameraPosition = .rear
|
|
526
|
+
} else {
|
|
527
|
+
throw CameraControllerError.invalidOperation
|
|
528
|
+
}
|
|
529
|
+
case .rear:
|
|
530
|
+
guard let frontCamera = frontCamera else {
|
|
531
|
+
throw CameraControllerError.invalidOperation
|
|
532
|
+
}
|
|
533
|
+
|
|
534
|
+
// Configure front camera
|
|
535
|
+
try frontCamera.lockForConfiguration()
|
|
536
|
+
if frontCamera.isFocusModeSupported(.continuousAutoFocus) {
|
|
537
|
+
frontCamera.focusMode = .continuousAutoFocus
|
|
538
|
+
}
|
|
539
|
+
frontCamera.unlockForConfiguration()
|
|
540
|
+
|
|
541
|
+
if let newInput = try? AVCaptureDeviceInput(device: frontCamera),
|
|
542
|
+
captureSession.canAddInput(newInput) {
|
|
543
|
+
captureSession.addInput(newInput)
|
|
544
|
+
frontCameraInput = newInput
|
|
545
|
+
self.currentCameraPosition = .front
|
|
546
|
+
} else {
|
|
547
|
+
throw CameraControllerError.invalidOperation
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
// Re-add audio input if it existed
|
|
552
|
+
if let audioInput = audioInput, captureSession.canAddInput(audioInput) {
|
|
553
|
+
captureSession.addInput(audioInput)
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
// Update video orientation
|
|
557
|
+
self.updateVideoOrientation()
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
func captureImage(width: Int?, height: Int?, aspectRatio: String?, quality: Float, gpsLocation: CLLocation?, completion: @escaping (UIImage?, Error?) -> Void) {
|
|
561
|
+
print("[CameraPreview] captureImage called - width: \(width ?? -1), height: \(height ?? -1), aspectRatio: \(aspectRatio ?? "nil")")
|
|
562
|
+
|
|
563
|
+
guard let photoOutput = self.photoOutput else {
|
|
564
|
+
completion(nil, NSError(domain: "Camera", code: 0, userInfo: [NSLocalizedDescriptionKey: "Photo output is not available"]))
|
|
565
|
+
return
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
let settings = AVCapturePhotoSettings()
|
|
569
|
+
|
|
570
|
+
// Apply the current flash mode to the photo settings
|
|
571
|
+
// Check if the current device supports flash
|
|
572
|
+
var currentCamera: AVCaptureDevice?
|
|
573
|
+
switch currentCameraPosition {
|
|
574
|
+
case .front:
|
|
575
|
+
currentCamera = self.frontCamera
|
|
576
|
+
case .rear:
|
|
577
|
+
currentCamera = self.rearCamera
|
|
578
|
+
default:
|
|
579
|
+
break
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
// Only apply flash if the device has flash and the flash mode is supported
|
|
583
|
+
if let device = currentCamera, device.hasFlash {
|
|
584
|
+
let supportedFlashModes = photoOutput.supportedFlashModes
|
|
585
|
+
if supportedFlashModes.contains(self.flashMode) {
|
|
586
|
+
settings.flashMode = self.flashMode
|
|
587
|
+
}
|
|
588
|
+
}
|
|
589
|
+
|
|
590
|
+
self.photoCaptureCompletionBlock = { (image, error) in
|
|
591
|
+
if let error = error {
|
|
592
|
+
completion(nil, error)
|
|
593
|
+
return
|
|
594
|
+
}
|
|
595
|
+
|
|
596
|
+
guard let image = image else {
|
|
597
|
+
completion(nil, NSError(domain: "Camera", code: 0, userInfo: [NSLocalizedDescriptionKey: "Failed to capture image"]))
|
|
598
|
+
return
|
|
599
|
+
}
|
|
600
|
+
|
|
601
|
+
if let location = gpsLocation {
|
|
602
|
+
self.addGPSMetadata(to: image, location: location)
|
|
603
|
+
}
|
|
604
|
+
|
|
605
|
+
var finalImage = image
|
|
606
|
+
|
|
607
|
+
// Determine what to do based on parameters
|
|
608
|
+
if let width = width, let height = height {
|
|
609
|
+
// Specific dimensions requested - resize to exact size
|
|
610
|
+
finalImage = self.resizeImage(image: image, to: CGSize(width: width, height: height))!
|
|
611
|
+
print("[CameraPreview] Resized to exact dimensions: \(finalImage.size.width)x\(finalImage.size.height)")
|
|
612
|
+
} else if let aspectRatio = aspectRatio {
|
|
613
|
+
// Aspect ratio specified - crop to that ratio
|
|
614
|
+
let components = aspectRatio.split(separator: ":").compactMap { Double($0) }
|
|
615
|
+
if components.count == 2 {
|
|
616
|
+
// For capture in portrait orientation, swap the aspect ratio (16:9 becomes 9:16)
|
|
617
|
+
let isPortrait = image.size.height > image.size.width
|
|
618
|
+
let targetAspectRatio = isPortrait ? components[1] / components[0] : components[0] / components[1]
|
|
619
|
+
let imageSize = image.size
|
|
620
|
+
let originalAspectRatio = imageSize.width / imageSize.height
|
|
621
|
+
|
|
622
|
+
// Only crop if the aspect ratios don't match
|
|
623
|
+
if abs(originalAspectRatio - targetAspectRatio) > 0.01 {
|
|
624
|
+
var targetSize = imageSize
|
|
625
|
+
|
|
626
|
+
if originalAspectRatio > targetAspectRatio {
|
|
627
|
+
// Original is wider than target - fit by height
|
|
628
|
+
targetSize.width = imageSize.height * CGFloat(targetAspectRatio)
|
|
629
|
+
} else {
|
|
630
|
+
// Original is taller than target - fit by width
|
|
631
|
+
targetSize.height = imageSize.width / CGFloat(targetAspectRatio)
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
// Center crop the image
|
|
635
|
+
if let croppedImage = self.cropImageToAspectRatio(image: image, targetSize: targetSize) {
|
|
636
|
+
finalImage = croppedImage
|
|
637
|
+
print("[CameraPreview] Applied aspect ratio crop: \(finalImage.size.width)x\(finalImage.size.height)")
|
|
638
|
+
}
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
} else {
|
|
642
|
+
// No parameters specified - crop to match what's visible in the preview
|
|
643
|
+
// This ensures we capture exactly what the user sees
|
|
644
|
+
if let previewLayer = self.previewLayer,
|
|
645
|
+
let previewCroppedImage = self.cropImageToMatchPreview(image: image, previewLayer: previewLayer) {
|
|
646
|
+
finalImage = previewCroppedImage
|
|
647
|
+
print("[CameraPreview] Cropped to match preview: \(finalImage.size.width)x\(finalImage.size.height)")
|
|
648
|
+
}
|
|
649
|
+
}
|
|
650
|
+
|
|
651
|
+
completion(finalImage, nil)
|
|
652
|
+
}
|
|
653
|
+
|
|
654
|
+
photoOutput.capturePhoto(with: settings, delegate: self)
|
|
655
|
+
}
|
|
656
|
+
|
|
657
|
+
func addGPSMetadata(to image: UIImage, location: CLLocation) {
|
|
658
|
+
guard let jpegData = image.jpegData(compressionQuality: 1.0),
|
|
659
|
+
let source = CGImageSourceCreateWithData(jpegData as CFData, nil),
|
|
660
|
+
let uti = CGImageSourceGetType(source) else { return }
|
|
661
|
+
|
|
662
|
+
var metadata = CGImageSourceCopyPropertiesAtIndex(source, 0, nil) as? [String: Any] ?? [:]
|
|
663
|
+
|
|
664
|
+
let formatter = DateFormatter()
|
|
665
|
+
formatter.dateFormat = "yyyy-MM-dd'T'HH:mm:ssZ"
|
|
666
|
+
formatter.timeZone = TimeZone(abbreviation: "UTC")
|
|
667
|
+
|
|
668
|
+
let gpsDict: [String: Any] = [
|
|
669
|
+
kCGImagePropertyGPSLatitude as String: abs(location.coordinate.latitude),
|
|
670
|
+
kCGImagePropertyGPSLatitudeRef as String: location.coordinate.latitude >= 0 ? "N" : "S",
|
|
671
|
+
kCGImagePropertyGPSLongitude as String: abs(location.coordinate.longitude),
|
|
672
|
+
kCGImagePropertyGPSLongitudeRef as String: location.coordinate.longitude >= 0 ? "E" : "W",
|
|
673
|
+
kCGImagePropertyGPSTimeStamp as String: formatter.string(from: location.timestamp),
|
|
674
|
+
kCGImagePropertyGPSAltitude as String: location.altitude,
|
|
675
|
+
kCGImagePropertyGPSAltitudeRef as String: location.altitude >= 0 ? 0 : 1
|
|
676
|
+
]
|
|
677
|
+
|
|
678
|
+
metadata[kCGImagePropertyGPSDictionary as String] = gpsDict
|
|
679
|
+
|
|
680
|
+
let destData = NSMutableData()
|
|
681
|
+
guard let destination = CGImageDestinationCreateWithData(destData, uti, 1, nil) else { return }
|
|
682
|
+
CGImageDestinationAddImageFromSource(destination, source, 0, metadata as CFDictionary)
|
|
683
|
+
CGImageDestinationFinalize(destination)
|
|
684
|
+
}
|
|
685
|
+
|
|
686
|
+
func resizeImage(image: UIImage, to size: CGSize) -> UIImage? {
|
|
687
|
+
let renderer = UIGraphicsImageRenderer(size: size)
|
|
688
|
+
let resizedImage = renderer.image { (_) in
|
|
689
|
+
image.draw(in: CGRect(origin: .zero, size: size))
|
|
690
|
+
}
|
|
691
|
+
return resizedImage
|
|
692
|
+
}
|
|
693
|
+
|
|
694
|
+
func cropImageToAspectRatio(image: UIImage, targetSize: CGSize) -> UIImage? {
|
|
695
|
+
let imageSize = image.size
|
|
696
|
+
|
|
697
|
+
// Calculate the crop rect - center crop
|
|
698
|
+
let xOffset = (imageSize.width - targetSize.width) / 2
|
|
699
|
+
let yOffset = (imageSize.height - targetSize.height) / 2
|
|
700
|
+
let cropRect = CGRect(x: xOffset, y: yOffset, width: targetSize.width, height: targetSize.height)
|
|
701
|
+
|
|
702
|
+
// Create the cropped image
|
|
703
|
+
guard let cgImage = image.cgImage,
|
|
704
|
+
let croppedCGImage = cgImage.cropping(to: cropRect) else {
|
|
705
|
+
return nil
|
|
706
|
+
}
|
|
707
|
+
|
|
708
|
+
return UIImage(cgImage: croppedCGImage, scale: image.scale, orientation: image.imageOrientation)
|
|
709
|
+
}
|
|
710
|
+
|
|
711
|
+
func cropImageToMatchPreview(image: UIImage, previewLayer: AVCaptureVideoPreviewLayer) -> UIImage? {
|
|
712
|
+
// When using resizeAspectFill, the preview layer shows a cropped portion of the video
|
|
713
|
+
// We need to calculate what portion of the captured image corresponds to what's visible
|
|
714
|
+
|
|
715
|
+
let previewBounds = previewLayer.bounds
|
|
716
|
+
let previewAspectRatio = previewBounds.width / previewBounds.height
|
|
717
|
+
|
|
718
|
+
// Get the dimensions of the captured image
|
|
719
|
+
let imageSize = image.size
|
|
720
|
+
let imageAspectRatio = imageSize.width / imageSize.height
|
|
721
|
+
|
|
722
|
+
print("[CameraPreview] cropImageToMatchPreview - Preview bounds: \(previewBounds.width)x\(previewBounds.height) (ratio: \(previewAspectRatio))")
|
|
723
|
+
print("[CameraPreview] cropImageToMatchPreview - Image size: \(imageSize.width)x\(imageSize.height) (ratio: \(imageAspectRatio))")
|
|
724
|
+
|
|
725
|
+
// Since we're using resizeAspectFill, we need to calculate what portion of the image
|
|
726
|
+
// is visible in the preview
|
|
727
|
+
var cropRect: CGRect
|
|
728
|
+
|
|
729
|
+
if imageAspectRatio > previewAspectRatio {
|
|
730
|
+
// Image is wider than preview - crop horizontally
|
|
731
|
+
let visibleWidth = imageSize.height * previewAspectRatio
|
|
732
|
+
let xOffset = (imageSize.width - visibleWidth) / 2
|
|
733
|
+
cropRect = CGRect(x: xOffset, y: 0, width: visibleWidth, height: imageSize.height)
|
|
734
|
+
|
|
735
|
+
} else {
|
|
736
|
+
// Image is taller than preview - crop vertically
|
|
737
|
+
let visibleHeight = imageSize.width / previewAspectRatio
|
|
738
|
+
let yOffset = (imageSize.height - visibleHeight) / 2
|
|
739
|
+
cropRect = CGRect(x: 0, y: yOffset, width: imageSize.width, height: visibleHeight)
|
|
740
|
+
|
|
741
|
+
}
|
|
742
|
+
|
|
743
|
+
|
|
744
|
+
|
|
745
|
+
// Create the cropped image
|
|
746
|
+
guard let cgImage = image.cgImage,
|
|
747
|
+
let croppedCGImage = cgImage.cropping(to: cropRect) else {
|
|
748
|
+
|
|
749
|
+
return nil
|
|
750
|
+
}
|
|
751
|
+
|
|
752
|
+
let result = UIImage(cgImage: croppedCGImage, scale: image.scale, orientation: image.imageOrientation)
|
|
753
|
+
|
|
754
|
+
|
|
755
|
+
return result
|
|
756
|
+
}
|
|
757
|
+
|
|
758
|
+
func captureSample(completion: @escaping (UIImage?, Error?) -> Void) {
|
|
759
|
+
guard let captureSession = captureSession,
|
|
760
|
+
captureSession.isRunning else {
|
|
761
|
+
completion(nil, CameraControllerError.captureSessionIsMissing)
|
|
762
|
+
return
|
|
763
|
+
}
|
|
764
|
+
|
|
765
|
+
self.sampleBufferCaptureCompletionBlock = completion
|
|
766
|
+
}
|
|
767
|
+
|
|
768
|
+
func getSupportedFlashModes() throws -> [String] {
|
|
769
|
+
var currentCamera: AVCaptureDevice?
|
|
770
|
+
switch currentCameraPosition {
|
|
771
|
+
case .front:
|
|
772
|
+
currentCamera = self.frontCamera!
|
|
773
|
+
case .rear:
|
|
774
|
+
currentCamera = self.rearCamera!
|
|
775
|
+
default: break
|
|
776
|
+
}
|
|
777
|
+
|
|
778
|
+
guard
|
|
779
|
+
let device = currentCamera
|
|
780
|
+
else {
|
|
781
|
+
throw CameraControllerError.noCamerasAvailable
|
|
782
|
+
}
|
|
783
|
+
|
|
784
|
+
var supportedFlashModesAsStrings: [String] = []
|
|
785
|
+
if device.hasFlash {
|
|
786
|
+
guard let supportedFlashModes: [AVCaptureDevice.FlashMode] = self.photoOutput?.supportedFlashModes else {
|
|
787
|
+
throw CameraControllerError.noCamerasAvailable
|
|
788
|
+
}
|
|
789
|
+
|
|
790
|
+
for flashMode in supportedFlashModes {
|
|
791
|
+
var flashModeValue: String?
|
|
792
|
+
switch flashMode {
|
|
793
|
+
case AVCaptureDevice.FlashMode.off:
|
|
794
|
+
flashModeValue = "off"
|
|
795
|
+
case AVCaptureDevice.FlashMode.on:
|
|
796
|
+
flashModeValue = "on"
|
|
797
|
+
case AVCaptureDevice.FlashMode.auto:
|
|
798
|
+
flashModeValue = "auto"
|
|
799
|
+
default: break
|
|
800
|
+
}
|
|
801
|
+
if flashModeValue != nil {
|
|
802
|
+
supportedFlashModesAsStrings.append(flashModeValue!)
|
|
803
|
+
}
|
|
804
|
+
}
|
|
805
|
+
}
|
|
806
|
+
if device.hasTorch {
|
|
807
|
+
supportedFlashModesAsStrings.append("torch")
|
|
808
|
+
}
|
|
809
|
+
return supportedFlashModesAsStrings
|
|
810
|
+
|
|
811
|
+
}
|
|
812
|
+
func getHorizontalFov() throws -> Float {
|
|
813
|
+
var currentCamera: AVCaptureDevice?
|
|
814
|
+
switch currentCameraPosition {
|
|
815
|
+
case .front:
|
|
816
|
+
currentCamera = self.frontCamera!
|
|
817
|
+
case .rear:
|
|
818
|
+
currentCamera = self.rearCamera!
|
|
819
|
+
default: break
|
|
820
|
+
}
|
|
821
|
+
|
|
822
|
+
guard
|
|
823
|
+
let device = currentCamera
|
|
824
|
+
else {
|
|
825
|
+
throw CameraControllerError.noCamerasAvailable
|
|
826
|
+
}
|
|
827
|
+
|
|
828
|
+
// Get the active format and field of view
|
|
829
|
+
let activeFormat = device.activeFormat
|
|
830
|
+
let fov = activeFormat.videoFieldOfView
|
|
831
|
+
|
|
832
|
+
// Adjust for current zoom level
|
|
833
|
+
let zoomFactor = device.videoZoomFactor
|
|
834
|
+
let adjustedFov = fov / Float(zoomFactor)
|
|
835
|
+
|
|
836
|
+
return adjustedFov
|
|
837
|
+
}
|
|
838
|
+
func setFlashMode(flashMode: AVCaptureDevice.FlashMode) throws {
|
|
839
|
+
var currentCamera: AVCaptureDevice?
|
|
840
|
+
switch currentCameraPosition {
|
|
841
|
+
case .front:
|
|
842
|
+
currentCamera = self.frontCamera!
|
|
843
|
+
case .rear:
|
|
844
|
+
currentCamera = self.rearCamera!
|
|
845
|
+
default: break
|
|
846
|
+
}
|
|
847
|
+
|
|
848
|
+
guard let device = currentCamera else {
|
|
849
|
+
throw CameraControllerError.noCamerasAvailable
|
|
850
|
+
}
|
|
851
|
+
|
|
852
|
+
guard let supportedFlashModes: [AVCaptureDevice.FlashMode] = self.photoOutput?.supportedFlashModes else {
|
|
853
|
+
throw CameraControllerError.invalidOperation
|
|
854
|
+
}
|
|
855
|
+
if supportedFlashModes.contains(flashMode) {
|
|
856
|
+
do {
|
|
857
|
+
try device.lockForConfiguration()
|
|
858
|
+
|
|
859
|
+
if device.hasTorch && device.isTorchAvailable && device.torchMode == AVCaptureDevice.TorchMode.on {
|
|
860
|
+
device.torchMode = AVCaptureDevice.TorchMode.off
|
|
861
|
+
}
|
|
862
|
+
self.flashMode = flashMode
|
|
863
|
+
let photoSettings = AVCapturePhotoSettings()
|
|
864
|
+
photoSettings.flashMode = flashMode
|
|
865
|
+
self.photoOutput?.photoSettingsForSceneMonitoring = photoSettings
|
|
866
|
+
|
|
867
|
+
device.unlockForConfiguration()
|
|
868
|
+
} catch {
|
|
869
|
+
throw CameraControllerError.invalidOperation
|
|
870
|
+
}
|
|
871
|
+
} else {
|
|
872
|
+
throw CameraControllerError.invalidOperation
|
|
873
|
+
}
|
|
874
|
+
}
|
|
875
|
+
|
|
876
|
+
func setTorchMode() throws {
|
|
877
|
+
var currentCamera: AVCaptureDevice?
|
|
878
|
+
switch currentCameraPosition {
|
|
879
|
+
case .front:
|
|
880
|
+
currentCamera = self.frontCamera!
|
|
881
|
+
case .rear:
|
|
882
|
+
currentCamera = self.rearCamera!
|
|
883
|
+
default: break
|
|
884
|
+
}
|
|
885
|
+
|
|
886
|
+
guard
|
|
887
|
+
let device = currentCamera,
|
|
888
|
+
device.hasTorch,
|
|
889
|
+
device.isTorchAvailable
|
|
890
|
+
else {
|
|
891
|
+
throw CameraControllerError.invalidOperation
|
|
892
|
+
}
|
|
893
|
+
|
|
894
|
+
do {
|
|
895
|
+
try device.lockForConfiguration()
|
|
896
|
+
if device.isTorchModeSupported(AVCaptureDevice.TorchMode.on) {
|
|
897
|
+
device.torchMode = AVCaptureDevice.TorchMode.on
|
|
898
|
+
} else if device.isTorchModeSupported(AVCaptureDevice.TorchMode.auto) {
|
|
899
|
+
device.torchMode = AVCaptureDevice.TorchMode.auto
|
|
900
|
+
} else {
|
|
901
|
+
device.torchMode = AVCaptureDevice.TorchMode.off
|
|
902
|
+
}
|
|
903
|
+
device.unlockForConfiguration()
|
|
904
|
+
} catch {
|
|
905
|
+
throw CameraControllerError.invalidOperation
|
|
906
|
+
}
|
|
907
|
+
}
|
|
908
|
+
|
|
909
|
+
func getZoom() throws -> (min: Float, max: Float, current: Float) {
|
|
910
|
+
var currentCamera: AVCaptureDevice?
|
|
911
|
+
switch currentCameraPosition {
|
|
912
|
+
case .front:
|
|
913
|
+
currentCamera = self.frontCamera
|
|
914
|
+
case .rear:
|
|
915
|
+
currentCamera = self.rearCamera
|
|
916
|
+
default: break
|
|
917
|
+
}
|
|
918
|
+
|
|
919
|
+
guard let device = currentCamera else {
|
|
920
|
+
throw CameraControllerError.noCamerasAvailable
|
|
921
|
+
}
|
|
922
|
+
|
|
923
|
+
let effectiveMaxZoom = min(device.maxAvailableVideoZoomFactor, self.saneMaxZoomFactor)
|
|
924
|
+
|
|
925
|
+
return (
|
|
926
|
+
min: Float(device.minAvailableVideoZoomFactor),
|
|
927
|
+
max: Float(effectiveMaxZoom),
|
|
928
|
+
current: Float(device.videoZoomFactor)
|
|
929
|
+
)
|
|
930
|
+
}
|
|
931
|
+
|
|
932
|
+
func setZoom(level: CGFloat, ramp: Bool, autoFocus: Bool = true) throws {
|
|
933
|
+
var currentCamera: AVCaptureDevice?
|
|
934
|
+
switch currentCameraPosition {
|
|
935
|
+
case .front:
|
|
936
|
+
currentCamera = self.frontCamera
|
|
937
|
+
case .rear:
|
|
938
|
+
currentCamera = self.rearCamera
|
|
939
|
+
default: break
|
|
940
|
+
}
|
|
941
|
+
|
|
942
|
+
guard let device = currentCamera else {
|
|
943
|
+
throw CameraControllerError.noCamerasAvailable
|
|
944
|
+
}
|
|
945
|
+
|
|
946
|
+
let effectiveMaxZoom = min(device.maxAvailableVideoZoomFactor, self.saneMaxZoomFactor)
|
|
947
|
+
let zoomLevel = max(device.minAvailableVideoZoomFactor, min(level, effectiveMaxZoom))
|
|
948
|
+
|
|
949
|
+
do {
|
|
950
|
+
try device.lockForConfiguration()
|
|
951
|
+
|
|
952
|
+
if ramp {
|
|
953
|
+
// Use a very fast ramp rate for immediate response
|
|
954
|
+
device.ramp(toVideoZoomFactor: zoomLevel, withRate: 8.0)
|
|
955
|
+
} else {
|
|
956
|
+
device.videoZoomFactor = zoomLevel
|
|
957
|
+
}
|
|
958
|
+
|
|
959
|
+
device.unlockForConfiguration()
|
|
960
|
+
|
|
961
|
+
// Update our internal zoom factor tracking
|
|
962
|
+
self.zoomFactor = zoomLevel
|
|
963
|
+
|
|
964
|
+
// Trigger autofocus after zoom if requested
|
|
965
|
+
// if autoFocus {
|
|
966
|
+
// self.triggerAutoFocus()
|
|
967
|
+
// }
|
|
968
|
+
} catch {
|
|
969
|
+
throw CameraControllerError.invalidOperation
|
|
970
|
+
}
|
|
971
|
+
}
|
|
972
|
+
|
|
973
|
+
private func triggerAutoFocus() {
|
|
974
|
+
var currentCamera: AVCaptureDevice?
|
|
975
|
+
switch currentCameraPosition {
|
|
976
|
+
case .front:
|
|
977
|
+
currentCamera = self.frontCamera
|
|
978
|
+
case .rear:
|
|
979
|
+
currentCamera = self.rearCamera
|
|
980
|
+
default: break
|
|
981
|
+
}
|
|
982
|
+
|
|
983
|
+
guard let device = currentCamera else {
|
|
984
|
+
return
|
|
985
|
+
}
|
|
986
|
+
|
|
987
|
+
// Focus on the center of the preview (0.5, 0.5)
|
|
988
|
+
let centerPoint = CGPoint(x: 0.5, y: 0.5)
|
|
989
|
+
|
|
990
|
+
do {
|
|
991
|
+
try device.lockForConfiguration()
|
|
992
|
+
|
|
993
|
+
// Set focus mode to auto if supported
|
|
994
|
+
if device.isFocusModeSupported(.autoFocus) {
|
|
995
|
+
device.focusMode = .autoFocus
|
|
996
|
+
if device.isFocusPointOfInterestSupported {
|
|
997
|
+
device.focusPointOfInterest = centerPoint
|
|
998
|
+
}
|
|
999
|
+
} else if device.isFocusModeSupported(.continuousAutoFocus) {
|
|
1000
|
+
device.focusMode = .continuousAutoFocus
|
|
1001
|
+
if device.isFocusPointOfInterestSupported {
|
|
1002
|
+
device.focusPointOfInterest = centerPoint
|
|
1003
|
+
}
|
|
1004
|
+
}
|
|
1005
|
+
|
|
1006
|
+
// Also set exposure point if supported
|
|
1007
|
+
if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.autoExpose) {
|
|
1008
|
+
device.exposureMode = .autoExpose
|
|
1009
|
+
device.exposurePointOfInterest = centerPoint
|
|
1010
|
+
} else if device.isExposureModeSupported(.continuousAutoExposure) {
|
|
1011
|
+
device.exposureMode = .continuousAutoExposure
|
|
1012
|
+
if device.isExposurePointOfInterestSupported {
|
|
1013
|
+
device.exposurePointOfInterest = centerPoint
|
|
1014
|
+
}
|
|
1015
|
+
}
|
|
1016
|
+
|
|
1017
|
+
device.unlockForConfiguration()
|
|
1018
|
+
} catch {
|
|
1019
|
+
// Silently ignore errors during autofocus
|
|
1020
|
+
}
|
|
1021
|
+
}
|
|
1022
|
+
|
|
1023
|
+
func setFocus(at point: CGPoint, showIndicator: Bool = false, in view: UIView? = nil) throws {
|
|
1024
|
+
// Validate that coordinates are within bounds (0-1 range for device coordinates)
|
|
1025
|
+
if point.x < 0 || point.x > 1 || point.y < 0 || point.y > 1 {
|
|
1026
|
+
print("setFocus: Coordinates out of bounds - x: \(point.x), y: \(point.y)")
|
|
1027
|
+
throw CameraControllerError.invalidOperation
|
|
1028
|
+
}
|
|
1029
|
+
|
|
1030
|
+
var currentCamera: AVCaptureDevice?
|
|
1031
|
+
switch currentCameraPosition {
|
|
1032
|
+
case .front:
|
|
1033
|
+
currentCamera = self.frontCamera
|
|
1034
|
+
case .rear:
|
|
1035
|
+
currentCamera = self.rearCamera
|
|
1036
|
+
default: break
|
|
1037
|
+
}
|
|
1038
|
+
|
|
1039
|
+
guard let device = currentCamera else {
|
|
1040
|
+
throw CameraControllerError.noCamerasAvailable
|
|
1041
|
+
}
|
|
1042
|
+
|
|
1043
|
+
guard device.isFocusPointOfInterestSupported else {
|
|
1044
|
+
// Device doesn't support focus point of interest
|
|
1045
|
+
return
|
|
1046
|
+
}
|
|
1047
|
+
|
|
1048
|
+
// Show focus indicator if requested and view is provided - only after validation
|
|
1049
|
+
if showIndicator, let view = view, let previewLayer = self.previewLayer {
|
|
1050
|
+
// Convert the device point to layer point for indicator display
|
|
1051
|
+
let layerPoint = previewLayer.layerPointConverted(fromCaptureDevicePoint: point)
|
|
1052
|
+
showFocusIndicator(at: layerPoint, in: view)
|
|
1053
|
+
}
|
|
1054
|
+
|
|
1055
|
+
do {
|
|
1056
|
+
try device.lockForConfiguration()
|
|
1057
|
+
|
|
1058
|
+
// Set focus mode to auto if supported
|
|
1059
|
+
if device.isFocusModeSupported(.autoFocus) {
|
|
1060
|
+
device.focusMode = .autoFocus
|
|
1061
|
+
} else if device.isFocusModeSupported(.continuousAutoFocus) {
|
|
1062
|
+
device.focusMode = .continuousAutoFocus
|
|
1063
|
+
}
|
|
1064
|
+
|
|
1065
|
+
// Set the focus point
|
|
1066
|
+
device.focusPointOfInterest = point
|
|
1067
|
+
|
|
1068
|
+
// Also set exposure point if supported
|
|
1069
|
+
if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.autoExpose) {
|
|
1070
|
+
device.exposureMode = .autoExpose
|
|
1071
|
+
device.exposurePointOfInterest = point
|
|
1072
|
+
}
|
|
1073
|
+
|
|
1074
|
+
device.unlockForConfiguration()
|
|
1075
|
+
} catch {
|
|
1076
|
+
throw CameraControllerError.unknown
|
|
1077
|
+
}
|
|
1078
|
+
}
|
|
1079
|
+
|
|
1080
|
+
func getFlashMode() throws -> String {
|
|
1081
|
+
switch self.flashMode {
|
|
1082
|
+
case .off:
|
|
1083
|
+
return "off"
|
|
1084
|
+
case .on:
|
|
1085
|
+
return "on"
|
|
1086
|
+
case .auto:
|
|
1087
|
+
return "auto"
|
|
1088
|
+
@unknown default:
|
|
1089
|
+
return "off"
|
|
1090
|
+
}
|
|
1091
|
+
}
|
|
1092
|
+
|
|
1093
|
+
func getCurrentDeviceId() throws -> String {
|
|
1094
|
+
var currentCamera: AVCaptureDevice?
|
|
1095
|
+
switch currentCameraPosition {
|
|
1096
|
+
case .front:
|
|
1097
|
+
currentCamera = self.frontCamera
|
|
1098
|
+
case .rear:
|
|
1099
|
+
currentCamera = self.rearCamera
|
|
1100
|
+
default:
|
|
1101
|
+
break
|
|
1102
|
+
}
|
|
1103
|
+
|
|
1104
|
+
guard let device = currentCamera else {
|
|
1105
|
+
throw CameraControllerError.noCamerasAvailable
|
|
1106
|
+
}
|
|
1107
|
+
|
|
1108
|
+
return device.uniqueID
|
|
1109
|
+
}
|
|
1110
|
+
|
|
1111
|
+
func getCurrentLensInfo() throws -> (focalLength: Float, deviceType: String, baseZoomRatio: Float) {
|
|
1112
|
+
var currentCamera: AVCaptureDevice?
|
|
1113
|
+
switch currentCameraPosition {
|
|
1114
|
+
case .front:
|
|
1115
|
+
currentCamera = self.frontCamera
|
|
1116
|
+
case .rear:
|
|
1117
|
+
currentCamera = self.rearCamera
|
|
1118
|
+
default:
|
|
1119
|
+
break
|
|
1120
|
+
}
|
|
1121
|
+
|
|
1122
|
+
guard let device = currentCamera else {
|
|
1123
|
+
throw CameraControllerError.noCamerasAvailable
|
|
1124
|
+
}
|
|
1125
|
+
|
|
1126
|
+
var deviceType = "wideAngle"
|
|
1127
|
+
var baseZoomRatio: Float = 1.0
|
|
1128
|
+
|
|
1129
|
+
switch device.deviceType {
|
|
1130
|
+
case .builtInWideAngleCamera:
|
|
1131
|
+
deviceType = "wideAngle"
|
|
1132
|
+
baseZoomRatio = 1.0
|
|
1133
|
+
case .builtInUltraWideCamera:
|
|
1134
|
+
deviceType = "ultraWide"
|
|
1135
|
+
baseZoomRatio = 0.5
|
|
1136
|
+
case .builtInTelephotoCamera:
|
|
1137
|
+
deviceType = "telephoto"
|
|
1138
|
+
baseZoomRatio = 2.0
|
|
1139
|
+
case .builtInDualCamera:
|
|
1140
|
+
deviceType = "dual"
|
|
1141
|
+
baseZoomRatio = 1.0
|
|
1142
|
+
case .builtInDualWideCamera:
|
|
1143
|
+
deviceType = "dualWide"
|
|
1144
|
+
baseZoomRatio = 1.0
|
|
1145
|
+
case .builtInTripleCamera:
|
|
1146
|
+
deviceType = "triple"
|
|
1147
|
+
baseZoomRatio = 1.0
|
|
1148
|
+
case .builtInTrueDepthCamera:
|
|
1149
|
+
deviceType = "trueDepth"
|
|
1150
|
+
baseZoomRatio = 1.0
|
|
1151
|
+
default:
|
|
1152
|
+
deviceType = "wideAngle"
|
|
1153
|
+
baseZoomRatio = 1.0
|
|
1154
|
+
}
|
|
1155
|
+
|
|
1156
|
+
// Approximate focal length for mobile devices
|
|
1157
|
+
let focalLength: Float = 4.25
|
|
1158
|
+
|
|
1159
|
+
return (focalLength: focalLength, deviceType: deviceType, baseZoomRatio: baseZoomRatio)
|
|
1160
|
+
}
|
|
1161
|
+
|
|
1162
|
+
func swapToDevice(deviceId: String) throws {
|
|
1163
|
+
guard let captureSession = self.captureSession else {
|
|
1164
|
+
throw CameraControllerError.captureSessionIsMissing
|
|
1165
|
+
}
|
|
1166
|
+
|
|
1167
|
+
// Find the device with the specified deviceId
|
|
1168
|
+
let allDevices = AVCaptureDevice.DiscoverySession(
|
|
1169
|
+
deviceTypes: [.builtInWideAngleCamera, .builtInUltraWideCamera, .builtInTelephotoCamera, .builtInDualCamera, .builtInDualWideCamera, .builtInTripleCamera, .builtInTrueDepthCamera],
|
|
1170
|
+
mediaType: .video,
|
|
1171
|
+
position: .unspecified
|
|
1172
|
+
).devices
|
|
1173
|
+
|
|
1174
|
+
guard let targetDevice = allDevices.first(where: { $0.uniqueID == deviceId }) else {
|
|
1175
|
+
throw CameraControllerError.noCamerasAvailable
|
|
1176
|
+
}
|
|
1177
|
+
|
|
1178
|
+
// Store the current running state
|
|
1179
|
+
let wasRunning = captureSession.isRunning
|
|
1180
|
+
if wasRunning {
|
|
1181
|
+
captureSession.stopRunning()
|
|
1182
|
+
}
|
|
1183
|
+
|
|
1184
|
+
// Begin configuration
|
|
1185
|
+
captureSession.beginConfiguration()
|
|
1186
|
+
defer {
|
|
1187
|
+
captureSession.commitConfiguration()
|
|
1188
|
+
// Restart the session if it was running before
|
|
1189
|
+
if wasRunning {
|
|
1190
|
+
captureSession.startRunning()
|
|
1191
|
+
}
|
|
1192
|
+
}
|
|
1193
|
+
|
|
1194
|
+
// Store audio input if it exists
|
|
1195
|
+
let audioInput = captureSession.inputs.first { ($0 as? AVCaptureDeviceInput)?.device.hasMediaType(.audio) ?? false }
|
|
1196
|
+
|
|
1197
|
+
// Remove only video inputs
|
|
1198
|
+
captureSession.inputs.forEach { input in
|
|
1199
|
+
if (input as? AVCaptureDeviceInput)?.device.hasMediaType(.video) ?? false {
|
|
1200
|
+
captureSession.removeInput(input)
|
|
1201
|
+
}
|
|
1202
|
+
}
|
|
1203
|
+
|
|
1204
|
+
// Configure the new device
|
|
1205
|
+
let newInput = try AVCaptureDeviceInput(device: targetDevice)
|
|
1206
|
+
|
|
1207
|
+
if captureSession.canAddInput(newInput) {
|
|
1208
|
+
captureSession.addInput(newInput)
|
|
1209
|
+
|
|
1210
|
+
// Update camera references based on device position
|
|
1211
|
+
if targetDevice.position == .front {
|
|
1212
|
+
self.frontCameraInput = newInput
|
|
1213
|
+
self.frontCamera = targetDevice
|
|
1214
|
+
self.currentCameraPosition = .front
|
|
1215
|
+
} else {
|
|
1216
|
+
self.rearCameraInput = newInput
|
|
1217
|
+
self.rearCamera = targetDevice
|
|
1218
|
+
self.currentCameraPosition = .rear
|
|
1219
|
+
|
|
1220
|
+
// Configure rear camera
|
|
1221
|
+
try targetDevice.lockForConfiguration()
|
|
1222
|
+
if targetDevice.isFocusModeSupported(.continuousAutoFocus) {
|
|
1223
|
+
targetDevice.focusMode = .continuousAutoFocus
|
|
1224
|
+
}
|
|
1225
|
+
targetDevice.unlockForConfiguration()
|
|
1226
|
+
}
|
|
1227
|
+
} else {
|
|
1228
|
+
throw CameraControllerError.invalidOperation
|
|
1229
|
+
}
|
|
1230
|
+
|
|
1231
|
+
// Re-add audio input if it existed
|
|
1232
|
+
if let audioInput = audioInput, captureSession.canAddInput(audioInput) {
|
|
1233
|
+
captureSession.addInput(audioInput)
|
|
1234
|
+
}
|
|
1235
|
+
|
|
1236
|
+
// Update video orientation
|
|
1237
|
+
self.updateVideoOrientation()
|
|
1238
|
+
}
|
|
1239
|
+
|
|
1240
|
+
func cleanup() {
|
|
1241
|
+
if let captureSession = self.captureSession {
|
|
1242
|
+
captureSession.stopRunning()
|
|
1243
|
+
captureSession.inputs.forEach { captureSession.removeInput($0) }
|
|
1244
|
+
captureSession.outputs.forEach { captureSession.removeOutput($0) }
|
|
1245
|
+
}
|
|
1246
|
+
|
|
1247
|
+
self.previewLayer?.removeFromSuperlayer()
|
|
1248
|
+
self.previewLayer = nil
|
|
1249
|
+
|
|
1250
|
+
self.focusIndicatorView?.removeFromSuperview()
|
|
1251
|
+
self.focusIndicatorView = nil
|
|
1252
|
+
|
|
1253
|
+
self.frontCameraInput = nil
|
|
1254
|
+
self.rearCameraInput = nil
|
|
1255
|
+
self.audioInput = nil
|
|
1256
|
+
|
|
1257
|
+
self.frontCamera = nil
|
|
1258
|
+
self.rearCamera = nil
|
|
1259
|
+
self.audioDevice = nil
|
|
1260
|
+
self.allDiscoveredDevices = []
|
|
1261
|
+
|
|
1262
|
+
self.dataOutput = nil
|
|
1263
|
+
self.photoOutput = nil
|
|
1264
|
+
self.fileVideoOutput = nil
|
|
1265
|
+
|
|
1266
|
+
self.captureSession = nil
|
|
1267
|
+
self.currentCameraPosition = nil
|
|
1268
|
+
|
|
1269
|
+
// Reset output preparation status
|
|
1270
|
+
self.outputsPrepared = false
|
|
1271
|
+
|
|
1272
|
+
// Reset first frame detection
|
|
1273
|
+
self.hasReceivedFirstFrame = false
|
|
1274
|
+
self.firstFrameReadyCallback = nil
|
|
1275
|
+
}
|
|
1276
|
+
|
|
1277
|
+
func captureVideo() throws {
|
|
1278
|
+
guard let captureSession = self.captureSession, captureSession.isRunning else {
|
|
1279
|
+
throw CameraControllerError.captureSessionIsMissing
|
|
1280
|
+
}
|
|
1281
|
+
guard let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else {
|
|
1282
|
+
throw CameraControllerError.cannotFindDocumentsDirectory
|
|
1283
|
+
}
|
|
1284
|
+
|
|
1285
|
+
guard let fileVideoOutput = self.fileVideoOutput else {
|
|
1286
|
+
throw CameraControllerError.fileVideoOutputNotFound
|
|
1287
|
+
}
|
|
1288
|
+
|
|
1289
|
+
// cpcp_video_A6C01203 - portrait
|
|
1290
|
+
//
|
|
1291
|
+
if let connection = fileVideoOutput.connection(with: .video) {
|
|
1292
|
+
switch UIDevice.current.orientation {
|
|
1293
|
+
case .landscapeRight:
|
|
1294
|
+
connection.videoOrientation = .landscapeLeft
|
|
1295
|
+
case .landscapeLeft:
|
|
1296
|
+
connection.videoOrientation = .landscapeRight
|
|
1297
|
+
case .portrait:
|
|
1298
|
+
connection.videoOrientation = .portrait
|
|
1299
|
+
case .portraitUpsideDown:
|
|
1300
|
+
connection.videoOrientation = .portraitUpsideDown
|
|
1301
|
+
default:
|
|
1302
|
+
connection.videoOrientation = .portrait
|
|
1303
|
+
}
|
|
1304
|
+
}
|
|
1305
|
+
|
|
1306
|
+
let identifier = UUID()
|
|
1307
|
+
let randomIdentifier = identifier.uuidString.replacingOccurrences(of: "-", with: "")
|
|
1308
|
+
let finalIdentifier = String(randomIdentifier.prefix(8))
|
|
1309
|
+
let fileName="cpcp_video_"+finalIdentifier+".mp4"
|
|
1310
|
+
|
|
1311
|
+
let fileUrl = documentsDirectory.appendingPathComponent(fileName)
|
|
1312
|
+
try? FileManager.default.removeItem(at: fileUrl)
|
|
1313
|
+
|
|
1314
|
+
// Start recording video
|
|
1315
|
+
fileVideoOutput.startRecording(to: fileUrl, recordingDelegate: self)
|
|
1316
|
+
|
|
1317
|
+
// Save the file URL for later use
|
|
1318
|
+
self.videoFileURL = fileUrl
|
|
1319
|
+
}
|
|
1320
|
+
|
|
1321
|
+
func stopRecording(completion: @escaping (URL?, Error?) -> Void) {
|
|
1322
|
+
guard let captureSession = self.captureSession, captureSession.isRunning else {
|
|
1323
|
+
completion(nil, CameraControllerError.captureSessionIsMissing)
|
|
1324
|
+
return
|
|
1325
|
+
}
|
|
1326
|
+
guard let fileVideoOutput = self.fileVideoOutput else {
|
|
1327
|
+
completion(nil, CameraControllerError.fileVideoOutputNotFound)
|
|
1328
|
+
return
|
|
1329
|
+
}
|
|
1330
|
+
|
|
1331
|
+
// Stop recording video
|
|
1332
|
+
fileVideoOutput.stopRecording()
|
|
1333
|
+
|
|
1334
|
+
// Return the video file URL in the completion handler
|
|
1335
|
+
completion(self.videoFileURL, nil)
|
|
1336
|
+
}
|
|
1337
|
+
}
|
|
1338
|
+
|
|
1339
|
+
extension CameraController: UIGestureRecognizerDelegate {
|
|
1340
|
+
func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool {
|
|
1341
|
+
return true
|
|
1342
|
+
}
|
|
1343
|
+
|
|
1344
|
+
@objc
|
|
1345
|
+
func handleTap(_ tap: UITapGestureRecognizer) {
|
|
1346
|
+
guard let device = self.currentCameraPosition == .rear ? rearCamera : frontCamera else { return }
|
|
1347
|
+
|
|
1348
|
+
let point = tap.location(in: tap.view)
|
|
1349
|
+
let devicePoint = self.previewLayer?.captureDevicePointConverted(fromLayerPoint: point)
|
|
1350
|
+
|
|
1351
|
+
// Show focus indicator at the tap point
|
|
1352
|
+
if let view = tap.view {
|
|
1353
|
+
showFocusIndicator(at: point, in: view)
|
|
1354
|
+
}
|
|
1355
|
+
|
|
1356
|
+
do {
|
|
1357
|
+
try device.lockForConfiguration()
|
|
1358
|
+
defer { device.unlockForConfiguration() }
|
|
1359
|
+
|
|
1360
|
+
let focusMode = AVCaptureDevice.FocusMode.autoFocus
|
|
1361
|
+
if device.isFocusPointOfInterestSupported && device.isFocusModeSupported(focusMode) {
|
|
1362
|
+
device.focusPointOfInterest = CGPoint(x: CGFloat(devicePoint?.x ?? 0), y: CGFloat(devicePoint?.y ?? 0))
|
|
1363
|
+
device.focusMode = focusMode
|
|
1364
|
+
}
|
|
1365
|
+
|
|
1366
|
+
let exposureMode = AVCaptureDevice.ExposureMode.autoExpose
|
|
1367
|
+
if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(exposureMode) {
|
|
1368
|
+
device.exposurePointOfInterest = CGPoint(x: CGFloat(devicePoint?.x ?? 0), y: CGFloat(devicePoint?.y ?? 0))
|
|
1369
|
+
device.exposureMode = exposureMode
|
|
1370
|
+
}
|
|
1371
|
+
} catch {
|
|
1372
|
+
debugPrint(error)
|
|
1373
|
+
}
|
|
1374
|
+
}
|
|
1375
|
+
|
|
1376
|
+
private func showFocusIndicator(at point: CGPoint, in view: UIView) {
|
|
1377
|
+
// Remove any existing focus indicator
|
|
1378
|
+
focusIndicatorView?.removeFromSuperview()
|
|
1379
|
+
|
|
1380
|
+
// Create a new focus indicator
|
|
1381
|
+
let indicator = UIView(frame: CGRect(x: 0, y: 0, width: 80, height: 80))
|
|
1382
|
+
indicator.center = point
|
|
1383
|
+
indicator.layer.borderColor = UIColor.yellow.cgColor
|
|
1384
|
+
indicator.layer.borderWidth = 2.0
|
|
1385
|
+
indicator.layer.cornerRadius = 40
|
|
1386
|
+
indicator.backgroundColor = UIColor.clear
|
|
1387
|
+
indicator.alpha = 0
|
|
1388
|
+
indicator.transform = CGAffineTransform(scaleX: 1.5, y: 1.5)
|
|
1389
|
+
|
|
1390
|
+
// Add inner circle for better visibility
|
|
1391
|
+
let innerCircle = UIView(frame: CGRect(x: 20, y: 20, width: 40, height: 40))
|
|
1392
|
+
innerCircle.layer.borderColor = UIColor.yellow.cgColor
|
|
1393
|
+
innerCircle.layer.borderWidth = 1.0
|
|
1394
|
+
innerCircle.layer.cornerRadius = 20
|
|
1395
|
+
innerCircle.backgroundColor = UIColor.clear
|
|
1396
|
+
indicator.addSubview(innerCircle)
|
|
1397
|
+
|
|
1398
|
+
view.addSubview(indicator)
|
|
1399
|
+
focusIndicatorView = indicator
|
|
1400
|
+
|
|
1401
|
+
// Animate the focus indicator
|
|
1402
|
+
UIView.animate(withDuration: 0.15, animations: {
|
|
1403
|
+
indicator.alpha = 1.0
|
|
1404
|
+
indicator.transform = CGAffineTransform.identity
|
|
1405
|
+
}) { _ in
|
|
1406
|
+
// Keep the indicator visible for a moment
|
|
1407
|
+
UIView.animate(withDuration: 0.2, delay: 0.5, options: [], animations: {
|
|
1408
|
+
indicator.alpha = 0.3
|
|
1409
|
+
}) { _ in
|
|
1410
|
+
// Fade out and remove
|
|
1411
|
+
UIView.animate(withDuration: 0.3, delay: 0.2, options: [], animations: {
|
|
1412
|
+
indicator.alpha = 0
|
|
1413
|
+
indicator.transform = CGAffineTransform(scaleX: 0.8, y: 0.8)
|
|
1414
|
+
}) { _ in
|
|
1415
|
+
indicator.removeFromSuperview()
|
|
1416
|
+
if self.focusIndicatorView == indicator {
|
|
1417
|
+
self.focusIndicatorView = nil
|
|
1418
|
+
}
|
|
1419
|
+
}
|
|
1420
|
+
}
|
|
1421
|
+
}
|
|
1422
|
+
}
|
|
1423
|
+
|
|
1424
|
+
@objc
|
|
1425
|
+
private func handlePinch(_ pinch: UIPinchGestureRecognizer) {
|
|
1426
|
+
guard let device = self.currentCameraPosition == .rear ? rearCamera : frontCamera else { return }
|
|
1427
|
+
|
|
1428
|
+
let effectiveMaxZoom = min(device.maxAvailableVideoZoomFactor, self.saneMaxZoomFactor)
|
|
1429
|
+
func minMaxZoom(_ factor: CGFloat) -> CGFloat { return max(device.minAvailableVideoZoomFactor, min(factor, effectiveMaxZoom)) }
|
|
1430
|
+
|
|
1431
|
+
switch pinch.state {
|
|
1432
|
+
case .began:
|
|
1433
|
+
// Store the initial zoom factor when pinch begins
|
|
1434
|
+
zoomFactor = device.videoZoomFactor
|
|
1435
|
+
|
|
1436
|
+
case .changed:
|
|
1437
|
+
// Throttle zoom updates to prevent excessive CPU usage
|
|
1438
|
+
let currentTime = CACurrentMediaTime()
|
|
1439
|
+
guard currentTime - lastZoomUpdateTime >= zoomUpdateThrottle else { return }
|
|
1440
|
+
lastZoomUpdateTime = currentTime
|
|
1441
|
+
|
|
1442
|
+
// Calculate new zoom factor based on pinch scale
|
|
1443
|
+
let newScaleFactor = minMaxZoom(pinch.scale * zoomFactor)
|
|
1444
|
+
|
|
1445
|
+
// Use ramping for smooth zoom transitions during pinch
|
|
1446
|
+
// This provides much smoother performance than direct setting
|
|
1447
|
+
do {
|
|
1448
|
+
try device.lockForConfiguration()
|
|
1449
|
+
// Use a very fast ramp rate for immediate response
|
|
1450
|
+
device.ramp(toVideoZoomFactor: newScaleFactor, withRate: 5.0)
|
|
1451
|
+
device.unlockForConfiguration()
|
|
1452
|
+
} catch {
|
|
1453
|
+
debugPrint("Failed to set zoom: \(error)")
|
|
1454
|
+
}
|
|
1455
|
+
|
|
1456
|
+
case .ended:
|
|
1457
|
+
// Update our internal zoom factor tracking
|
|
1458
|
+
zoomFactor = device.videoZoomFactor
|
|
1459
|
+
|
|
1460
|
+
default: break
|
|
1461
|
+
}
|
|
1462
|
+
}
|
|
1463
|
+
}
|
|
1464
|
+
|
|
1465
|
+
extension CameraController: AVCapturePhotoCaptureDelegate {
|
|
1466
|
+
public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
|
|
1467
|
+
if let error = error {
|
|
1468
|
+
self.photoCaptureCompletionBlock?(nil, error)
|
|
1469
|
+
return
|
|
1470
|
+
}
|
|
1471
|
+
|
|
1472
|
+
// Get the photo data using the modern API
|
|
1473
|
+
guard let imageData = photo.fileDataRepresentation() else {
|
|
1474
|
+
self.photoCaptureCompletionBlock?(nil, CameraControllerError.unknown)
|
|
1475
|
+
return
|
|
1476
|
+
}
|
|
1477
|
+
|
|
1478
|
+
guard let image = UIImage(data: imageData) else {
|
|
1479
|
+
self.photoCaptureCompletionBlock?(nil, CameraControllerError.unknown)
|
|
1480
|
+
return
|
|
1481
|
+
}
|
|
1482
|
+
|
|
1483
|
+
self.photoCaptureCompletionBlock?(image.fixedOrientation(), nil)
|
|
1484
|
+
}
|
|
1485
|
+
}
|
|
1486
|
+
|
|
1487
|
+
extension CameraController: AVCaptureVideoDataOutputSampleBufferDelegate {
|
|
1488
|
+
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
|
|
1489
|
+
// Check if we're waiting for the first frame
|
|
1490
|
+
if !hasReceivedFirstFrame, let firstFrameCallback = firstFrameReadyCallback {
|
|
1491
|
+
hasReceivedFirstFrame = true
|
|
1492
|
+
firstFrameCallback()
|
|
1493
|
+
firstFrameReadyCallback = nil
|
|
1494
|
+
// If no capture is in progress, we can return early
|
|
1495
|
+
if sampleBufferCaptureCompletionBlock == nil {
|
|
1496
|
+
return
|
|
1497
|
+
}
|
|
1498
|
+
}
|
|
1499
|
+
|
|
1500
|
+
guard let completion = sampleBufferCaptureCompletionBlock else { return }
|
|
1501
|
+
|
|
1502
|
+
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
|
1503
|
+
completion(nil, CameraControllerError.unknown)
|
|
1504
|
+
return
|
|
1505
|
+
}
|
|
1506
|
+
|
|
1507
|
+
CVPixelBufferLockBaseAddress(imageBuffer, .readOnly)
|
|
1508
|
+
defer { CVPixelBufferUnlockBaseAddress(imageBuffer, .readOnly) }
|
|
1509
|
+
|
|
1510
|
+
let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer)
|
|
1511
|
+
let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
|
|
1512
|
+
let width = CVPixelBufferGetWidth(imageBuffer)
|
|
1513
|
+
let height = CVPixelBufferGetHeight(imageBuffer)
|
|
1514
|
+
let colorSpace = CGColorSpaceCreateDeviceRGB()
|
|
1515
|
+
let bitmapInfo: UInt32 = CGBitmapInfo.byteOrder32Little.rawValue |
|
|
1516
|
+
CGImageAlphaInfo.premultipliedFirst.rawValue
|
|
1517
|
+
|
|
1518
|
+
let context = CGContext(
|
|
1519
|
+
data: baseAddress,
|
|
1520
|
+
width: width,
|
|
1521
|
+
height: height,
|
|
1522
|
+
bitsPerComponent: 8,
|
|
1523
|
+
bytesPerRow: bytesPerRow,
|
|
1524
|
+
space: colorSpace,
|
|
1525
|
+
bitmapInfo: bitmapInfo
|
|
1526
|
+
)
|
|
1527
|
+
|
|
1528
|
+
guard let cgImage = context?.makeImage() else {
|
|
1529
|
+
completion(nil, CameraControllerError.unknown)
|
|
1530
|
+
return
|
|
1531
|
+
}
|
|
1532
|
+
|
|
1533
|
+
let image = UIImage(cgImage: cgImage)
|
|
1534
|
+
completion(image.fixedOrientation(), nil)
|
|
1535
|
+
|
|
1536
|
+
sampleBufferCaptureCompletionBlock = nil
|
|
1537
|
+
}
|
|
1538
|
+
}
|
|
1539
|
+
|
|
1540
|
+
enum CameraControllerError: Swift.Error {
|
|
1541
|
+
case captureSessionAlreadyRunning
|
|
1542
|
+
case captureSessionIsMissing
|
|
1543
|
+
case inputsAreInvalid
|
|
1544
|
+
case invalidOperation
|
|
1545
|
+
case noCamerasAvailable
|
|
1546
|
+
case cannotFindDocumentsDirectory
|
|
1547
|
+
case fileVideoOutputNotFound
|
|
1548
|
+
case unknown
|
|
1549
|
+
case invalidZoomLevel(min: CGFloat, max: CGFloat, requested: CGFloat)
|
|
1550
|
+
}
|
|
1551
|
+
|
|
1552
|
+
public enum CameraPosition {
|
|
1553
|
+
case front
|
|
1554
|
+
case rear
|
|
1555
|
+
}
|
|
1556
|
+
|
|
1557
|
+
extension CameraControllerError: LocalizedError {
|
|
1558
|
+
public var errorDescription: String? {
|
|
1559
|
+
switch self {
|
|
1560
|
+
case .captureSessionAlreadyRunning:
|
|
1561
|
+
return NSLocalizedString("Capture Session is Already Running", comment: "Capture Session Already Running")
|
|
1562
|
+
case .captureSessionIsMissing:
|
|
1563
|
+
return NSLocalizedString("Capture Session is Missing", comment: "Capture Session Missing")
|
|
1564
|
+
case .inputsAreInvalid:
|
|
1565
|
+
return NSLocalizedString("Inputs Are Invalid", comment: "Inputs Are Invalid")
|
|
1566
|
+
case .invalidOperation:
|
|
1567
|
+
return NSLocalizedString("Invalid Operation", comment: "invalid Operation")
|
|
1568
|
+
case .noCamerasAvailable:
|
|
1569
|
+
return NSLocalizedString("Failed to access device camera(s)", comment: "No Cameras Available")
|
|
1570
|
+
case .unknown:
|
|
1571
|
+
return NSLocalizedString("Unknown", comment: "Unknown")
|
|
1572
|
+
case .cannotFindDocumentsDirectory:
|
|
1573
|
+
return NSLocalizedString("Cannot find documents directory", comment: "This should never happen")
|
|
1574
|
+
case .fileVideoOutputNotFound:
|
|
1575
|
+
return NSLocalizedString("Video recording is not available. Make sure the camera is properly initialized.", comment: "Video recording not available")
|
|
1576
|
+
case .invalidZoomLevel(let min, let max, let requested):
|
|
1577
|
+
return NSLocalizedString("Invalid zoom level. Must be between \(min) and \(max). Requested: \(requested)", comment: "Invalid Zoom Level")
|
|
1578
|
+
}
|
|
1579
|
+
}
|
|
1580
|
+
}
|
|
1581
|
+
|
|
1582
|
+
extension UIImage {
|
|
1583
|
+
|
|
1584
|
+
func fixedOrientation() -> UIImage? {
|
|
1585
|
+
|
|
1586
|
+
guard imageOrientation != UIImage.Orientation.up else {
|
|
1587
|
+
// This is default orientation, don't need to do anything
|
|
1588
|
+
return self.copy() as? UIImage
|
|
1589
|
+
}
|
|
1590
|
+
|
|
1591
|
+
guard let cgImage = self.cgImage else {
|
|
1592
|
+
// CGImage is not available
|
|
1593
|
+
return nil
|
|
1594
|
+
}
|
|
1595
|
+
|
|
1596
|
+
guard let colorSpace = cgImage.colorSpace, let ctx = CGContext(data: nil,
|
|
1597
|
+
width: Int(size.width), height: Int(size.height),
|
|
1598
|
+
bitsPerComponent: cgImage.bitsPerComponent, bytesPerRow: 0,
|
|
1599
|
+
space: colorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue) else {
|
|
1600
|
+
return nil // Not able to create CGContext
|
|
1601
|
+
}
|
|
1602
|
+
|
|
1603
|
+
var transform: CGAffineTransform = CGAffineTransform.identity
|
|
1604
|
+
switch imageOrientation {
|
|
1605
|
+
case .down, .downMirrored:
|
|
1606
|
+
transform = transform.translatedBy(x: size.width, y: size.height)
|
|
1607
|
+
transform = transform.rotated(by: CGFloat.pi)
|
|
1608
|
+
print("down")
|
|
1609
|
+
case .left, .leftMirrored:
|
|
1610
|
+
transform = transform.translatedBy(x: size.width, y: 0)
|
|
1611
|
+
transform = transform.rotated(by: CGFloat.pi / 2.0)
|
|
1612
|
+
print("left")
|
|
1613
|
+
case .right, .rightMirrored:
|
|
1614
|
+
transform = transform.translatedBy(x: 0, y: size.height)
|
|
1615
|
+
transform = transform.rotated(by: CGFloat.pi / -2.0)
|
|
1616
|
+
print("right")
|
|
1617
|
+
case .up, .upMirrored:
|
|
1618
|
+
break
|
|
1619
|
+
@unknown default:
|
|
1620
|
+
break
|
|
1621
|
+
}
|
|
1622
|
+
|
|
1623
|
+
// Flip image one more time if needed to, this is to prevent flipped image
|
|
1624
|
+
switch imageOrientation {
|
|
1625
|
+
case .upMirrored, .downMirrored:
|
|
1626
|
+
transform.translatedBy(x: size.width, y: 0)
|
|
1627
|
+
transform.scaledBy(x: -1, y: 1)
|
|
1628
|
+
case .leftMirrored, .rightMirrored:
|
|
1629
|
+
transform.translatedBy(x: size.height, y: 0)
|
|
1630
|
+
transform.scaledBy(x: -1, y: 1)
|
|
1631
|
+
case .up, .down, .left, .right:
|
|
1632
|
+
break
|
|
1633
|
+
@unknown default:
|
|
1634
|
+
break
|
|
1635
|
+
}
|
|
1636
|
+
|
|
1637
|
+
ctx.concatenate(transform)
|
|
1638
|
+
|
|
1639
|
+
switch imageOrientation {
|
|
1640
|
+
case .left, .leftMirrored, .right, .rightMirrored:
|
|
1641
|
+
if let cgImage = self.cgImage {
|
|
1642
|
+
ctx.draw(cgImage, in: CGRect(x: 0, y: 0, width: size.height, height: size.width))
|
|
1643
|
+
}
|
|
1644
|
+
default:
|
|
1645
|
+
if let cgImage = self.cgImage {
|
|
1646
|
+
ctx.draw(cgImage, in: CGRect(x: 0, y: 0, width: size.width, height: size.height))
|
|
1647
|
+
}
|
|
1648
|
+
}
|
|
1649
|
+
guard let newCGImage = ctx.makeImage() else { return nil }
|
|
1650
|
+
return UIImage.init(cgImage: newCGImage, scale: 1, orientation: .up)
|
|
1651
|
+
}
|
|
1652
|
+
}
|
|
1653
|
+
|
|
1654
|
+
extension CameraController: AVCaptureFileOutputRecordingDelegate {
|
|
1655
|
+
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
|
|
1656
|
+
if let error = error {
|
|
1657
|
+
print("Error recording movie: \(error.localizedDescription)")
|
|
1658
|
+
} else {
|
|
1659
|
+
print("Movie recorded successfully: \(outputFileURL)")
|
|
1660
|
+
// You can save the file to the library, upload it, etc.
|
|
1661
|
+
}
|
|
1662
|
+
}
|
|
1663
|
+
}
|