@succinctlabs/react-native-zcam1 0.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. package/README.md +61 -0
  2. package/Zcam1Sdk.podspec +157 -0
  3. package/app.plugin.js +11 -0
  4. package/cpp/generated/zcam1_c2pa_utils.cpp +4091 -0
  5. package/cpp/generated/zcam1_c2pa_utils.hpp +367 -0
  6. package/cpp/generated/zcam1_certs_utils.cpp +1799 -0
  7. package/cpp/generated/zcam1_certs_utils.hpp +72 -0
  8. package/cpp/generated/zcam1_verify_utils.cpp +1857 -0
  9. package/cpp/generated/zcam1_verify_utils.hpp +79 -0
  10. package/cpp/proving/generated/zcam1_proving_utils.cpp +3661 -0
  11. package/cpp/proving/generated/zcam1_proving_utils.hpp +275 -0
  12. package/cpp/proving/zcam1-proving.cpp +16 -0
  13. package/cpp/proving/zcam1-proving.h +15 -0
  14. package/cpp/zcam1-sdk.cpp +20 -0
  15. package/cpp/zcam1-sdk.h +15 -0
  16. package/ios/Zcam1Camera.swift +2945 -0
  17. package/ios/Zcam1CameraFilmStyle.swift +191 -0
  18. package/ios/Zcam1CameraViewManager.m +86 -0
  19. package/ios/Zcam1Capture.h +13 -0
  20. package/ios/Zcam1Capture.mm +500 -0
  21. package/ios/Zcam1DepthData.swift +417 -0
  22. package/ios/Zcam1Sdk.h +16 -0
  23. package/ios/Zcam1Sdk.mm +66 -0
  24. package/ios/proving/Zcam1Proving.h +16 -0
  25. package/ios/proving/Zcam1Proving.mm +66 -0
  26. package/lib/module/NativeZcam1Capture.js +12 -0
  27. package/lib/module/NativeZcam1Capture.js.map +1 -0
  28. package/lib/module/NativeZcam1Sdk.js +7 -0
  29. package/lib/module/NativeZcam1Sdk.js.map +1 -0
  30. package/lib/module/bindings.js +51 -0
  31. package/lib/module/bindings.js.map +1 -0
  32. package/lib/module/camera.js +522 -0
  33. package/lib/module/camera.js.map +1 -0
  34. package/lib/module/capture.js +120 -0
  35. package/lib/module/capture.js.map +1 -0
  36. package/lib/module/common.js +35 -0
  37. package/lib/module/common.js.map +1 -0
  38. package/lib/module/generated/zcam1_c2pa_utils-ffi.js +43 -0
  39. package/lib/module/generated/zcam1_c2pa_utils-ffi.js.map +1 -0
  40. package/lib/module/generated/zcam1_c2pa_utils.js +1202 -0
  41. package/lib/module/generated/zcam1_c2pa_utils.js.map +1 -0
  42. package/lib/module/generated/zcam1_certs_utils-ffi.js +43 -0
  43. package/lib/module/generated/zcam1_certs_utils-ffi.js.map +1 -0
  44. package/lib/module/generated/zcam1_certs_utils.js +399 -0
  45. package/lib/module/generated/zcam1_certs_utils.js.map +1 -0
  46. package/lib/module/generated/zcam1_proving_utils-ffi.js +43 -0
  47. package/lib/module/generated/zcam1_proving_utils-ffi.js.map +1 -0
  48. package/lib/module/generated/zcam1_proving_utils.js +515 -0
  49. package/lib/module/generated/zcam1_proving_utils.js.map +1 -0
  50. package/lib/module/generated/zcam1_verify_utils-ffi.js +43 -0
  51. package/lib/module/generated/zcam1_verify_utils-ffi.js.map +1 -0
  52. package/lib/module/generated/zcam1_verify_utils.js +252 -0
  53. package/lib/module/generated/zcam1_verify_utils.js.map +1 -0
  54. package/lib/module/index.js +31 -0
  55. package/lib/module/index.js.map +1 -0
  56. package/lib/module/package.json +1 -0
  57. package/lib/module/picker.js +222 -0
  58. package/lib/module/picker.js.map +1 -0
  59. package/lib/module/proving/NativeZcam1Proving.js +7 -0
  60. package/lib/module/proving/NativeZcam1Proving.js.map +1 -0
  61. package/lib/module/proving/bindings.js +46 -0
  62. package/lib/module/proving/bindings.js.map +1 -0
  63. package/lib/module/proving/index.js +5 -0
  64. package/lib/module/proving/index.js.map +1 -0
  65. package/lib/module/proving/prove.js +346 -0
  66. package/lib/module/proving/prove.js.map +1 -0
  67. package/lib/module/utils.js +27 -0
  68. package/lib/module/utils.js.map +1 -0
  69. package/lib/module/verify.js +82 -0
  70. package/lib/module/verify.js.map +1 -0
  71. package/lib/typescript/package.json +1 -0
  72. package/lib/typescript/src/NativeZcam1Capture.d.ts +280 -0
  73. package/lib/typescript/src/NativeZcam1Capture.d.ts.map +1 -0
  74. package/lib/typescript/src/NativeZcam1Sdk.d.ts +8 -0
  75. package/lib/typescript/src/NativeZcam1Sdk.d.ts.map +1 -0
  76. package/lib/typescript/src/bindings.d.ts +14 -0
  77. package/lib/typescript/src/bindings.d.ts.map +1 -0
  78. package/lib/typescript/src/camera.d.ts +300 -0
  79. package/lib/typescript/src/camera.d.ts.map +1 -0
  80. package/lib/typescript/src/capture.d.ts +59 -0
  81. package/lib/typescript/src/capture.d.ts.map +1 -0
  82. package/lib/typescript/src/common.d.ts +10 -0
  83. package/lib/typescript/src/common.d.ts.map +1 -0
  84. package/lib/typescript/src/generated/zcam1_c2pa_utils-ffi.d.ts +175 -0
  85. package/lib/typescript/src/generated/zcam1_c2pa_utils-ffi.d.ts.map +1 -0
  86. package/lib/typescript/src/generated/zcam1_c2pa_utils.d.ts +811 -0
  87. package/lib/typescript/src/generated/zcam1_c2pa_utils.d.ts.map +1 -0
  88. package/lib/typescript/src/generated/zcam1_certs_utils-ffi.d.ts +82 -0
  89. package/lib/typescript/src/generated/zcam1_certs_utils-ffi.d.ts.map +1 -0
  90. package/lib/typescript/src/generated/zcam1_certs_utils.d.ts +413 -0
  91. package/lib/typescript/src/generated/zcam1_certs_utils.d.ts.map +1 -0
  92. package/lib/typescript/src/generated/zcam1_proving_utils-ffi.d.ts +153 -0
  93. package/lib/typescript/src/generated/zcam1_proving_utils-ffi.d.ts.map +1 -0
  94. package/lib/typescript/src/generated/zcam1_proving_utils.d.ts +321 -0
  95. package/lib/typescript/src/generated/zcam1_proving_utils.d.ts.map +1 -0
  96. package/lib/typescript/src/generated/zcam1_verify_utils-ffi.d.ts +84 -0
  97. package/lib/typescript/src/generated/zcam1_verify_utils-ffi.d.ts.map +1 -0
  98. package/lib/typescript/src/generated/zcam1_verify_utils.d.ts +286 -0
  99. package/lib/typescript/src/generated/zcam1_verify_utils.d.ts.map +1 -0
  100. package/lib/typescript/src/index.d.ts +29 -0
  101. package/lib/typescript/src/index.d.ts.map +1 -0
  102. package/lib/typescript/src/picker.d.ts +103 -0
  103. package/lib/typescript/src/picker.d.ts.map +1 -0
  104. package/lib/typescript/src/proving/NativeZcam1Proving.d.ts +8 -0
  105. package/lib/typescript/src/proving/NativeZcam1Proving.d.ts.map +1 -0
  106. package/lib/typescript/src/proving/bindings.d.ts +8 -0
  107. package/lib/typescript/src/proving/bindings.d.ts.map +1 -0
  108. package/lib/typescript/src/proving/index.d.ts +3 -0
  109. package/lib/typescript/src/proving/index.d.ts.map +1 -0
  110. package/lib/typescript/src/proving/prove.d.ts +74 -0
  111. package/lib/typescript/src/proving/prove.d.ts.map +1 -0
  112. package/lib/typescript/src/utils.d.ts +2 -0
  113. package/lib/typescript/src/utils.d.ts.map +1 -0
  114. package/lib/typescript/src/verify.d.ts +45 -0
  115. package/lib/typescript/src/verify.d.ts.map +1 -0
  116. package/package.json +118 -0
  117. package/src/NativeZcam1Capture.ts +335 -0
  118. package/src/NativeZcam1Sdk.ts +10 -0
  119. package/src/bindings.tsx +49 -0
  120. package/src/camera.tsx +705 -0
  121. package/src/capture.tsx +165 -0
  122. package/src/common.tsx +46 -0
  123. package/src/generated/zcam1_c2pa_utils-ffi.ts +456 -0
  124. package/src/generated/zcam1_c2pa_utils.ts +1866 -0
  125. package/src/generated/zcam1_certs_utils-ffi.ts +187 -0
  126. package/src/generated/zcam1_certs_utils.ts +549 -0
  127. package/src/generated/zcam1_proving_utils-ffi.ts +374 -0
  128. package/src/generated/zcam1_proving_utils.ts +804 -0
  129. package/src/generated/zcam1_verify_utils-ffi.ts +196 -0
  130. package/src/generated/zcam1_verify_utils.ts +372 -0
  131. package/src/index.ts +73 -0
  132. package/src/picker.tsx +342 -0
  133. package/src/proving/NativeZcam1Proving.ts +10 -0
  134. package/src/proving/bindings.tsx +50 -0
  135. package/src/proving/index.ts +8 -0
  136. package/src/proving/prove.tsx +492 -0
  137. package/src/utils.ts +38 -0
  138. package/src/verify.tsx +119 -0
  139. package/turbo.json +27 -0
@@ -0,0 +1,2945 @@
1
+ //
2
+ // Zcam1Camera.swift
3
+ // react-native-zcam1-sdk
4
+ //
5
+ // Native camera view + service using AVFoundation for preview and capture.
6
+ //
7
+
8
+ import AVFoundation
9
+ import CoreMotion
10
+ import Foundation
11
+ import Harbeth
12
+ import ImageIO
13
+ import MobileCoreServices
14
+ import UIKit
15
+
16
+ // MARK: - Motion Manager (Singleton for orientation detection)
17
+
18
+ /// Singleton motion manager that provides non-blocking 4-way orientation detection.
19
+ /// Uses accelerometer data with a 0.75g threshold (same approach as Signal-iOS)
20
+ /// to determine portrait, portraitUpsideDown, landscapeLeft, and landscapeRight.
21
+ /// Works even when the user has iOS orientation lock enabled.
22
+ @available(iOS 16.0, *)
23
+ final class Zcam1MotionManager {
24
+ static let shared = Zcam1MotionManager()
25
+
26
+ private let motionManager = CMMotionManager()
27
+ private let queue = OperationQueue()
28
+ private var cachedOrientation: AVCaptureVideoOrientation = .portrait
29
+ private let lock = NSLock()
30
+
31
+ /// Listeners notified on orientation change (called on main thread), keyed by token.
32
+ private var listeners: [Int: (AVCaptureVideoOrientation) -> Void] = [:]
33
+ private var nextToken: Int = 0
34
+
35
+ private init() {
36
+ queue.name = "com.zcam1.motion"
37
+ queue.maxConcurrentOperationCount = 1
38
+ }
39
+
40
+ /// Start accelerometer updates. Call when camera becomes active.
41
+ func startUpdates() {
42
+ guard motionManager.isAccelerometerAvailable else { return }
43
+ guard !motionManager.isAccelerometerActive else { return }
44
+
45
+ // 5 Hz is sufficient for orientation detection (same as Signal).
46
+ motionManager.accelerometerUpdateInterval = 0.2
47
+ motionManager.startAccelerometerUpdates(to: queue) { [weak self] data, _ in
48
+ guard let self = self, let data = data else { return }
49
+
50
+ // Use 0.87g threshold (~60° tilt) to match native iOS camera sensitivity.
51
+ // When neither axis exceeds the threshold, keep the current orientation.
52
+ let x = data.acceleration.x
53
+ let y = data.acceleration.y
54
+
55
+ let threshold = 0.87
56
+ let newOrientation: AVCaptureVideoOrientation?
57
+ if x >= threshold {
58
+ newOrientation = .landscapeLeft
59
+ } else if x <= -threshold {
60
+ newOrientation = .landscapeRight
61
+ } else if y <= -threshold {
62
+ newOrientation = .portrait
63
+ } else if y >= threshold {
64
+ newOrientation = .portraitUpsideDown
65
+ } else {
66
+ // Tilt not decisive enough — keep current orientation.
67
+ newOrientation = nil
68
+ }
69
+
70
+ guard let newOrientation = newOrientation else { return }
71
+
72
+ self.lock.lock()
73
+ let changed = newOrientation != self.cachedOrientation
74
+ if changed {
75
+ self.cachedOrientation = newOrientation
76
+ }
77
+ let currentListeners = Array(self.listeners.values)
78
+ self.lock.unlock()
79
+
80
+ // Notify listeners on main thread when orientation changes.
81
+ if changed {
82
+ DispatchQueue.main.async {
83
+ for listener in currentListeners {
84
+ listener(newOrientation)
85
+ }
86
+ }
87
+ }
88
+ }
89
+ }
90
+
91
+ /// Stop accelerometer updates. Call when camera becomes inactive.
92
+ func stopUpdates() {
93
+ motionManager.stopAccelerometerUpdates()
94
+ }
95
+
96
+ /// Get the current orientation as AVCaptureVideoOrientation (non-blocking, cached).
97
+ func currentOrientation() -> AVCaptureVideoOrientation {
98
+ lock.lock()
99
+ let orientation = cachedOrientation
100
+ lock.unlock()
101
+ return orientation
102
+ }
103
+
104
+ /// Add a listener for orientation changes. Returns a token to remove it later.
105
+ func addListener(_ listener: @escaping (AVCaptureVideoOrientation) -> Void) -> Int {
106
+ lock.lock()
107
+ let token = nextToken
108
+ nextToken += 1
109
+ listeners[token] = listener
110
+ lock.unlock()
111
+ return token
112
+ }
113
+
114
+ /// Remove a specific listener by its token.
115
+ func removeListener(_ token: Int) {
116
+ lock.lock()
117
+ listeners.removeValue(forKey: token)
118
+ lock.unlock()
119
+ }
120
+ }
121
+
122
+ // MARK: - Capture Format
123
+
124
+ @objc public enum Zcam1CaptureFormat: Int {
125
+ case jpeg = 0
126
+ case dng = 1
127
+
128
+ init(from string: String?) {
129
+ switch string?.lowercased() {
130
+ case "dng", "raw":
131
+ self = .dng
132
+ default:
133
+ self = .jpeg
134
+ }
135
+ }
136
+
137
+ var fileExtension: String {
138
+ switch self {
139
+ case .jpeg:
140
+ return "jpg"
141
+ case .dng:
142
+ return "dng"
143
+ }
144
+ }
145
+
146
+ var formatString: String {
147
+ switch self {
148
+ case .jpeg:
149
+ return "jpeg"
150
+ case .dng:
151
+ return "dng"
152
+ }
153
+ }
154
+ }
155
+
156
+ // MARK: - Aspect Ratio
157
+
158
+ @objc public enum Zcam1AspectRatio: Int {
159
+ case ratio4_3 = 0
160
+ case ratio16_9 = 1
161
+ case ratio1_1 = 2
162
+
163
+ init(from string: String?) {
164
+ switch string {
165
+ case "16:9": self = .ratio16_9
166
+ case "1:1": self = .ratio1_1
167
+ default: self = .ratio4_3
168
+ }
169
+ }
170
+
171
+ /// Returns the aspect ratio as width/height (portrait orientation)
172
+ var value: CGFloat {
173
+ switch self {
174
+ case .ratio4_3: return 3.0 / 4.0 // Portrait: taller than wide
175
+ case .ratio16_9: return 9.0 / 16.0 // Portrait: taller than wide
176
+ case .ratio1_1: return 1.0
177
+ }
178
+ }
179
+
180
+ var formatString: String {
181
+ switch self {
182
+ case .ratio4_3: return "4:3"
183
+ case .ratio16_9: return "16:9"
184
+ case .ratio1_1: return "1:1"
185
+ }
186
+ }
187
+ }
188
+
189
+ // MARK: - Orientation
190
+
191
+ @objc public enum Zcam1Orientation: Int {
192
+ case auto = 0
193
+ case portrait = 1
194
+ case landscape = 2
195
+
196
+ init(from string: String?) {
197
+ switch string?.lowercased() {
198
+ case "portrait": self = .portrait
199
+ case "landscape": self = .landscape
200
+ default: self = .auto
201
+ }
202
+ }
203
+
204
+ /// Resolve to a concrete AVCaptureVideoOrientation using accelerometer data.
205
+ /// Works even when the user has iOS orientation lock enabled.
206
+ @available(iOS 16.0, *)
207
+ func resolveToVideoOrientation() -> AVCaptureVideoOrientation {
208
+ switch self {
209
+ case .portrait:
210
+ return .portrait
211
+ case .landscape:
212
+ // Auto-detect left/right from accelerometer.
213
+ let detected = Zcam1MotionManager.shared.currentOrientation()
214
+ if detected == .landscapeLeft || detected == .landscapeRight {
215
+ return detected
216
+ }
217
+ // Default to landscapeRight if not currently in landscape.
218
+ return .landscapeRight
219
+ case .auto:
220
+ return Zcam1MotionManager.shared.currentOrientation()
221
+ }
222
+ }
223
+ }
224
+
225
+ // MARK: - Orientation Helpers
226
+
227
+ /// Convert AVCaptureVideoOrientation to a JS-friendly string.
228
+ @available(iOS 16.0, *)
229
+ func orientationToString(_ orientation: AVCaptureVideoOrientation) -> String {
230
+ switch orientation {
231
+ case .portrait: return "portrait"
232
+ case .portraitUpsideDown: return "portraitUpsideDown"
233
+ case .landscapeLeft: return "landscapeLeft"
234
+ case .landscapeRight: return "landscapeRight"
235
+ @unknown default: return "portrait"
236
+ }
237
+ }
238
+
239
+ /// Convert AVCaptureVideoOrientation to a rotation angle for the video writer transform.
240
+ /// Front camera requires different rotations due to mirroring and opposite sensor orientation.
241
+ @available(iOS 16.0, *)
242
+ func videoWriterRotationAngle(
243
+ for orientation: AVCaptureVideoOrientation,
244
+ position: AVCaptureDevice.Position = .back
245
+ ) -> CGFloat {
246
+ if position == .front {
247
+ // Front camera: sensor is landscape-left + video is mirrored.
248
+ switch orientation {
249
+ case .portrait: return -.pi / 2 // 90° CCW (opposite of back)
250
+ case .portraitUpsideDown: return .pi / 2 // 90° CW (opposite of back)
251
+ case .landscapeRight: return .pi // 180° (opposite of back)
252
+ case .landscapeLeft: return 0 // No rotation (opposite of back)
253
+ @unknown default: return -.pi / 2
254
+ }
255
+ } else {
256
+ // Back camera: sensor is landscape-right.
257
+ switch orientation {
258
+ case .portrait: return .pi / 2 // 90° CW
259
+ case .portraitUpsideDown: return -.pi / 2 // 90° CCW
260
+ case .landscapeRight: return 0 // No rotation (sensor native)
261
+ case .landscapeLeft: return .pi // 180°
262
+ @unknown default: return .pi / 2
263
+ }
264
+ }
265
+ }
266
+
267
+ // MARK: - Camera Delegate
268
+
269
+ /// Internal helper that acts as the AVCapturePhotoCaptureDelegate.
270
+ /// This keeps AVFoundation protocol types out of the @objc-visible service API.
271
+ @available(iOS 16.0, *)
272
+ private final class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
273
+ private let format: Zcam1CaptureFormat
274
+ private let aspectRatio: Zcam1AspectRatio
275
+ private let orientation: Zcam1Orientation
276
+ private let includeDepthData: Bool
277
+ private let skipPostProcessing: Bool
278
+ // Store completion as a mutable optional so we can nil it after calling (prevents double-call).
279
+ private var completion: ((NSDictionary?, NSError?) -> Void)?
280
+ private weak var owner: Zcam1CameraService?
281
+ // Keep a strong self-reference until completion is called to prevent premature deallocation.
282
+ private var retainedSelf: PhotoCaptureDelegate?
283
+
284
+ init(
285
+ format: Zcam1CaptureFormat,
286
+ aspectRatio: Zcam1AspectRatio,
287
+ orientation: Zcam1Orientation,
288
+ includeDepthData: Bool,
289
+ skipPostProcessing: Bool = false,
290
+ owner: Zcam1CameraService,
291
+ completion: @escaping (NSDictionary?, NSError?) -> Void
292
+ ) {
293
+ self.format = format
294
+ self.aspectRatio = aspectRatio
295
+ self.orientation = orientation
296
+ self.includeDepthData = includeDepthData
297
+ self.skipPostProcessing = skipPostProcessing
298
+ self.owner = owner
299
+ self.completion = completion
300
+ super.init()
301
+ // Retain self until completion is called.
302
+ self.retainedSelf = self
303
+ }
304
+
305
+ /// Safely call completion exactly once, then release all references.
306
+ private func callCompletion(result: NSDictionary?, error: NSError?) {
307
+ // Ensure we only call completion once.
308
+ guard let completion = self.completion else {
309
+ print("[PhotoCaptureDelegate] WARNING: completion already called, skipping")
310
+ return
311
+ }
312
+ // Nil out completion before calling to prevent re-entry.
313
+ self.completion = nil
314
+
315
+ print(
316
+ "[PhotoCaptureDelegate] calling completion, result=\(result != nil), error=\(error != nil)"
317
+ )
318
+ completion(result, error)
319
+
320
+ // Clean up owner reference.
321
+ self.owner?.didFinishCapture(delegate: self)
322
+ // Release self-reference.
323
+ self.retainedSelf = nil
324
+ }
325
+
326
+ func photoOutput(
327
+ _ output: AVCapturePhotoOutput,
328
+ didFinishProcessingPhoto photo: AVCapturePhoto,
329
+ error: Error?
330
+ ) {
331
+ print("[PhotoCaptureDelegate] didFinishProcessingPhoto called")
332
+ if let error = error as NSError? {
333
+ print("[PhotoCaptureDelegate] ERROR: \(error)")
334
+ DispatchQueue.main.async { [self] in
335
+ self.callCompletion(result: nil, error: error)
336
+ }
337
+ return
338
+ }
339
+
340
+ print("[PhotoCaptureDelegate] getting fileDataRepresentation...")
341
+ guard let photoData = photo.fileDataRepresentation(), !photoData.isEmpty else {
342
+ print("[PhotoCaptureDelegate] ERROR: Empty photo data")
343
+ let err = NSError(
344
+ domain: "Zcam1CameraService",
345
+ code: -20,
346
+ userInfo: [NSLocalizedDescriptionKey: "Empty photo data"]
347
+ )
348
+ DispatchQueue.main.async { [self] in
349
+ self.callCompletion(result: nil, error: err)
350
+ }
351
+ return
352
+ }
353
+ print("[PhotoCaptureDelegate] photo data size: \(photoData.count) bytes")
354
+
355
+ // Log actual captured dimensions for debugging resolution issues.
356
+ if let cgImageSource = CGImageSourceCreateWithData(photoData as CFData, nil),
357
+ let properties = CGImageSourceCopyPropertiesAtIndex(cgImageSource, 0, nil) as? [String: Any] {
358
+ let width = properties[kCGImagePropertyPixelWidth as String] ?? "?"
359
+ let height = properties[kCGImagePropertyPixelHeight as String] ?? "?"
360
+ print("[PhotoCaptureDelegate] captured photo dimensions: \(width)x\(height)")
361
+ }
362
+
363
+ // Copy values we need immediately.
364
+ print("[PhotoCaptureDelegate] extracting metadata...")
365
+ let metadataSnapshot: [String: Any] = photo.metadata
366
+ print(
367
+ "[PhotoCaptureDelegate] extracting depthData (includeDepthData=\(includeDepthData))...")
368
+ let depthDataSnapshot: AVDepthData? = includeDepthData ? photo.depthData : nil
369
+ print("[PhotoCaptureDelegate] depthData present: \(depthDataSnapshot != nil)")
370
+
371
+ // Process synchronously on the current queue to avoid closure capture issues.
372
+ // The AVCapturePhotoOutput callback queue can handle this work.
373
+ print("[PhotoCaptureDelegate] processing photo...")
374
+ var data = photoData
375
+
376
+ // Apply crop + film style in a single pass (avoids double JPEG compression, preserves EXIF).
377
+ // Skip post-processing if requested (returns raw sensor output).
378
+ if skipPostProcessing {
379
+ print("[PhotoCaptureDelegate] skipPostProcessing=true, returning raw JPEG data")
380
+ } else {
381
+ print("[PhotoCaptureDelegate] applying crop and film style...")
382
+ if let owner = self.owner,
383
+ let processedData = owner.processImage(
384
+ data,
385
+ metadata: metadataSnapshot,
386
+ aspectRatio: self.aspectRatio
387
+ ) {
388
+ data = processedData
389
+ }
390
+ }
391
+ print("[PhotoCaptureDelegate] processing complete, data size: \(data.count)")
392
+
393
+ // Re-extract metadata from processed data to get accurate dimensions.
394
+ var finalMetadata: [String: Any] = metadataSnapshot
395
+ if let cgImageSource = CGImageSourceCreateWithData(data as CFData, nil),
396
+ let properties = CGImageSourceCopyPropertiesAtIndex(cgImageSource, 0, nil) as? [String: Any] {
397
+ finalMetadata = properties
398
+ print("[PhotoCaptureDelegate] re-extracted metadata, dimensions: \(properties[kCGImagePropertyPixelWidth as String] ?? "?")x\(properties[kCGImagePropertyPixelHeight as String] ?? "?")")
399
+ }
400
+
401
+ let filename = "zcam1-\(UUID().uuidString).\(self.format.fileExtension)"
402
+ let tmpURL = FileManager.default.temporaryDirectory.appendingPathComponent(filename)
403
+ print("[PhotoCaptureDelegate] writing to: \(tmpURL.path)")
404
+
405
+ do {
406
+ try data.write(to: tmpURL, options: [.atomic])
407
+ print("[PhotoCaptureDelegate] file written successfully")
408
+
409
+ var metadata: [String: Any] = finalMetadata
410
+ print("[PhotoCaptureDelegate] processing metadata...")
411
+
412
+ // Extract TIFF dictionary (device info, resolution).
413
+ if let tiffDict = metadata[kCGImagePropertyTIFFDictionary as String]
414
+ as? [String: Any]
415
+ {
416
+ metadata["{TIFF}"] = tiffDict
417
+ }
418
+
419
+ // Extract EXIF dictionary (ISO, exposure, focal length, aperture).
420
+ if let exifDict = metadata[kCGImagePropertyExifDictionary as String]
421
+ as? [String: Any]
422
+ {
423
+ metadata["{Exif}"] = exifDict
424
+ }
425
+ print("[PhotoCaptureDelegate] metadata processed")
426
+
427
+ // Extract depth data only when requested (and when available).
428
+ var depthData: [String: Any]? = nil
429
+ if self.includeDepthData, let depthDataSnapshot = depthDataSnapshot {
430
+ print("[PhotoCaptureDelegate] processing depth data...")
431
+ depthData = Zcam1DepthDataProcessor.processDepthData(depthDataSnapshot)
432
+ print("[PhotoCaptureDelegate] depth data processed")
433
+ }
434
+
435
+ var result: [String: Any] = [
436
+ "filePath": tmpURL.path,
437
+ "format": self.format.formatString,
438
+ "metadata": metadata,
439
+ ]
440
+
441
+ // Include depth data in result if requested and available.
442
+ if self.includeDepthData, let depthData = depthData {
443
+ result["depthData"] = depthData
444
+ }
445
+
446
+ print("[PhotoCaptureDelegate] calling completion on main thread...")
447
+ DispatchQueue.main.async { [self] in
448
+ self.callCompletion(result: result as NSDictionary, error: nil)
449
+ }
450
+ } catch {
451
+ print("[PhotoCaptureDelegate] ERROR writing file: \(error)")
452
+ DispatchQueue.main.async { [self] in
453
+ self.callCompletion(result: nil, error: error as NSError)
454
+ }
455
+ }
456
+ }
457
+ }
458
+
459
+ // MARK: - Asset Writer Recording State
460
+
461
+ /// Holds state for AVAssetWriter-based video recording.
462
+ /// Using AVAssetWriter instead of AVCaptureMovieFileOutput eliminates preview flash
463
+ /// when starting/stopping recording, since we manually write frames without session reconfiguration.
464
+ ///
465
+ /// Thread safety: All writer operations (startSession, append, markAsFinished, finishWriting)
466
+ /// are serialized through the writerQueue to prevent race conditions.
467
+ @available(iOS 16.0, *)
468
+ private final class AssetWriterRecordingState {
469
+ let assetWriter: AVAssetWriter
470
+ let videoInput: AVAssetWriterInput
471
+ let audioInput: AVAssetWriterInput?
472
+ let pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor
473
+ let outputURL: URL
474
+
475
+ /// Serial queue for all writer operations to prevent race conditions.
476
+ let writerQueue = DispatchQueue(label: "com.zcam1.assetwriter", qos: .userInitiated)
477
+
478
+ /// Whether recording is active. Only modified on writerQueue.
479
+ var isRecording: Bool = false
480
+
481
+ /// Whether startSession has been called. Only modified on writerQueue.
482
+ var hasStartedSession: Bool = false
483
+
484
+ /// Timestamp of first frame. Only modified on writerQueue.
485
+ var startTime: CMTime = .invalid
486
+
487
+ /// Frame/sample counters for debugging. Only modified on writerQueue.
488
+ var videoFrameCount: Int = 0
489
+ var audioSampleCount: Int = 0
490
+
491
+ init(
492
+ assetWriter: AVAssetWriter,
493
+ videoInput: AVAssetWriterInput,
494
+ audioInput: AVAssetWriterInput?,
495
+ pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor,
496
+ outputURL: URL
497
+ ) {
498
+ self.assetWriter = assetWriter
499
+ self.videoInput = videoInput
500
+ self.audioInput = audioInput
501
+ self.pixelBufferAdaptor = pixelBufferAdaptor
502
+ self.outputURL = outputURL
503
+ }
504
+ }
505
+
506
+ // MARK: - Camera Service
507
+
508
+ /// Shared service that owns the AVCaptureSession and performs still captures.
509
+ ///
510
+ /// It is designed to be driven from the JS side via the TurboModule method
511
+ /// `takeNativePhoto`, and from a native preview view (see `Zcam1CameraView`).
512
+ @available(iOS 16.0, *)
513
+ @objcMembers
514
+ public final class Zcam1CameraService: NSObject, AVCaptureAudioDataOutputSampleBufferDelegate {
515
+
516
+ // Singleton instance (easy to access from ObjC / Swift bridge)
517
+ public static let shared = Zcam1CameraService()
518
+
519
+ // Underlying capture session and IO
520
+ public private(set) var captureSession: AVCaptureSession?
521
+ private var videoInput: AVCaptureDeviceInput?
522
+ private var audioInput: AVCaptureDeviceInput?
523
+
524
+ private let photoOutput = AVCapturePhotoOutput()
525
+
526
+ // Audio data output for recording. Only attached when mic is authorized.
527
+ private let audioDataOutput = AVCaptureAudioDataOutput()
528
+ private let audioDataQueue = DispatchQueue(label: "com.zcam1.audiodataoutput", qos: .userInteractive)
529
+
530
+ // Depth delivery can incur a noticeable one-time setup cost (first capture).
531
+ // We prewarm it once per session configuration to avoid first-shot lag.
532
+ private var didPrewarmDepth: Bool = false
533
+
534
+ // Whether depth data delivery is enabled at the session/output level.
535
+ // When true, zoom may be restricted on dual-camera devices.
536
+ private var depthEnabledAtSessionLevel: Bool = false
537
+
538
+ // Serial queue for all session operations
539
+ private let sessionQueue = DispatchQueue(label: "com.anonymous.zcam1poc.camera.session")
540
+
541
+ /// KVO observation for focus completion detection.
542
+ private var focusObservation: NSKeyValueObservation?
543
+
544
+ /// Fallback timer to revert to continuous auto-focus after tap-to-focus.
545
+ private var focusRevertTimer: DispatchWorkItem?
546
+
547
+ // Keep strong references to in-flight delegates so they live until completion
548
+ private var inFlightDelegates: [PhotoCaptureDelegate] = []
549
+
550
+ // AVAssetWriter-based video recording state.
551
+ // Using AVAssetWriter eliminates preview flash since we manually write frames
552
+ // without any AVCaptureSession reconfiguration.
553
+ // Thread safety: recordingStateLock guards the reference; writerQueue guards internal mutations.
554
+ private var recordingState: AssetWriterRecordingState?
555
+ private let recordingStateLock = NSLock()
556
+
557
+ /// Returns true if video recording is currently active.
558
+ /// Thread-safe: uses recordingStateLock to protect access.
559
+ public var isVideoRecording: Bool {
560
+ recordingStateLock.lock()
561
+ let recording = recordingState?.isRecording ?? false
562
+ recordingStateLock.unlock()
563
+ return recording
564
+ }
565
+
566
+ // Auto-stop support: a cancellable work item that fires when maxDurationSeconds is reached.
567
+ // When it fires, it calls stopVideoRecording and caches the result so that a subsequent
568
+ // JS-side stopVideoRecording call can retrieve it without an error.
569
+ private var autoStopWorkItem: DispatchWorkItem?
570
+ private var autoStopResult: NSDictionary?
571
+
572
+ // Camera control state
573
+ private var currentZoom: CGFloat = 1.0
574
+ private var flashMode: AVCaptureDevice.FlashMode = .off
575
+ private var currentExposureBias: Float = 0.0
576
+ private var currentPosition: AVCaptureDevice.Position = .back
577
+
578
+ // Film style state
579
+ private var currentFilmStyle: Zcam1CameraFilmStyle = .normal
580
+ private var customFilmStyleChain: [C7FilterProtocol]?
581
+
582
+ private override init() {
583
+ super.init()
584
+ }
585
+
586
+ /// Best-effort prewarm for depth capture to avoid first-shot lag.
587
+ /// This primes the system's depth pipeline (and related ISP work) ahead of the first user capture.
588
+ /// Only runs if depth is enabled at the session level.
589
+ private func prewarmDepthPipelineIfNeeded() {
590
+ // Skip prewarm if depth is not enabled at the session level.
591
+ guard depthEnabledAtSessionLevel else { return }
592
+
593
+ // Only prewarm once the session is running; otherwise the work may still be deferred
594
+ // and the first real capture can pay the cost.
595
+ guard let session = captureSession, session.isRunning else { return }
596
+
597
+ guard !didPrewarmDepth else { return }
598
+ didPrewarmDepth = true
599
+
600
+ guard photoOutput.isDepthDataDeliverySupported else { return }
601
+
602
+ // Depth is already enabled at the output level in configureSessionIfNeeded().
603
+ // Just prepare the settings to prime the ISP.
604
+
605
+ let settings: AVCapturePhotoSettings
606
+ if photoOutput.availablePhotoCodecTypes.contains(.jpeg) {
607
+ settings = AVCapturePhotoSettings(format: [
608
+ AVVideoCodecKey: AVVideoCodecType.jpeg
609
+ ])
610
+ } else {
611
+ settings = AVCapturePhotoSettings()
612
+ }
613
+
614
+ settings.isDepthDataDeliveryEnabled = true
615
+ if photoOutput.isCameraCalibrationDataDeliverySupported {
616
+ settings.isCameraCalibrationDataDeliveryEnabled = true
617
+ }
618
+
619
+ if #available(iOS 13.0, *) {
620
+ settings.photoQualityPrioritization = .speed
621
+ }
622
+
623
+ photoOutput.setPreparedPhotoSettingsArray([settings]) { prepared, error in
624
+ if let error = error {
625
+ print("[Zcam1CameraService] Depth prewarm failed: \(error)")
626
+ return
627
+ }
628
+ print("[Zcam1CameraService] Depth prewarm prepared=\(prepared)")
629
+ }
630
+ }
631
+
632
+ // MARK: - Film Style
633
+
634
+ /// Set the active camera film style for preview and capture.
635
+ /// Clears any custom film style chain.
636
+ public func setFilmStyle(_ filmStyle: Zcam1CameraFilmStyle) {
637
+ self.currentFilmStyle = filmStyle
638
+ self.customFilmStyleChain = nil
639
+ }
640
+
641
+ /// Set a custom film style chain for preview and capture.
642
+ /// Overrides the built-in preset film style.
643
+ public func setCustomFilmStyles(_ filmStyles: [C7FilterProtocol]) {
644
+ self.customFilmStyleChain = filmStyles.isEmpty ? nil : filmStyles
645
+ }
646
+
647
+ /// Get the current film style.
648
+ public func getFilmStyle() -> Zcam1CameraFilmStyle {
649
+ return currentFilmStyle
650
+ }
651
+
652
+ /// Check if custom film styles are active.
653
+ public func hasCustomFilmStyles() -> Bool {
654
+ return customFilmStyleChain != nil
655
+ }
656
+
657
+ /// Get the current camera position.
658
+ public func getCurrentPosition() -> AVCaptureDevice.Position {
659
+ return currentPosition
660
+ }
661
+
662
+ /// Process image data with crop and film style. iOS handles orientation via EXIF metadata.
663
+ /// - Parameters:
664
+ /// - data: The original JPEG image data
665
+ /// - metadata: The original photo metadata (EXIF, TIFF, GPS, etc.)
666
+ /// - aspectRatio: The target aspect ratio
667
+ /// - compressionQuality: JPEG compression quality (0.0-1.0, default 0.95)
668
+ /// - Returns: Processed JPEG data with metadata, or the original data if no processing needed
669
+ func processImage(
670
+ _ data: Data,
671
+ metadata: [String: Any],
672
+ aspectRatio: Zcam1AspectRatio,
673
+ compressionQuality: CGFloat = 0.95
674
+ ) -> Data? {
675
+ let needsFilmStyle = customFilmStyleChain != nil
676
+
677
+ guard let image = UIImage(data: data),
678
+ let cgImage = image.cgImage else { return data }
679
+
680
+ // Calculate if cropping is needed.
681
+ // The sensor buffer is always landscape (width > height) regardless of capture orientation.
682
+ // EXIF orientation handles the final display rotation (portrait vs landscape).
683
+ // So we always crop in landscape pixel space using the inverted portrait ratio.
684
+ let pixelWidth = CGFloat(cgImage.width)
685
+ let pixelHeight = CGFloat(cgImage.height)
686
+ let sourceRatio = pixelWidth / pixelHeight
687
+ let targetRatio = 1.0 / aspectRatio.value
688
+ let needsCrop = abs(sourceRatio - targetRatio) > 0.01
689
+
690
+ // If no processing needed, return original
691
+ guard needsCrop || needsFilmStyle else { return data }
692
+
693
+ var processedCGImage: CGImage = cgImage
694
+
695
+ // Apply crop if needed (simple center crop)
696
+ if needsCrop {
697
+ if let croppedImage = cropCGImage(cgImage, targetRatio: targetRatio) {
698
+ processedCGImage = croppedImage
699
+ }
700
+ }
701
+
702
+ // Apply film style if needed, preserving original EXIF orientation.
703
+ // Harbeth's make(filter:) calls flattened() which bakes UIImage orientation into the
704
+ // pixel data and returns .up. This corrupts the EXIF orientation metadata (e.g., a
705
+ // portrait photo would get EXIF 1 "Up" instead of EXIF 6 "Right"). To prevent this,
706
+ // we pass the CGImage with .up orientation so flattened() is a no-op, then restore
707
+ // the original orientation afterward. Film style effects are purely color-space
708
+ // operations and do not depend on pixel orientation.
709
+ let originalOrientation = image.imageOrientation
710
+ var finalImage = UIImage(cgImage: processedCGImage, scale: image.scale, orientation: originalOrientation)
711
+ if let customFilmStyles = customFilmStyleChain {
712
+ let upImage = UIImage(cgImage: processedCGImage, scale: image.scale, orientation: .up)
713
+ let filtered = Zcam1CameraFilmStyle.apply(filmStyles: customFilmStyles, to: upImage)
714
+ if let filteredCG = filtered.cgImage {
715
+ finalImage = UIImage(cgImage: filteredCG, scale: image.scale, orientation: originalOrientation)
716
+ }
717
+ }
718
+
719
+ return encodeJPEGWithMetadata(finalImage, metadata: metadata, compressionQuality: compressionQuality)
720
+ }
721
+
722
+ /// Crop a CGImage to the specified aspect ratio using center crop.
723
+ /// - Parameters:
724
+ /// - cgImage: The source image to crop
725
+ /// - targetRatio: The target width/height ratio (already adjusted for EXIF orientation)
726
+ private func cropCGImage(_ cgImage: CGImage, targetRatio: CGFloat) -> CGImage? {
727
+ let pixelW = CGFloat(cgImage.width)
728
+ let pixelH = CGFloat(cgImage.height)
729
+ let currentRatio = pixelW / pixelH
730
+
731
+ var cropRect: CGRect
732
+ if currentRatio > targetRatio {
733
+ let newWidth = pixelH * targetRatio
734
+ let xOffset = (pixelW - newWidth) / 2
735
+ cropRect = CGRect(x: xOffset, y: 0, width: newWidth, height: pixelH)
736
+ } else {
737
+ let newHeight = pixelW / targetRatio
738
+ let yOffset = (pixelH - newHeight) / 2
739
+ cropRect = CGRect(x: 0, y: yOffset, width: pixelW, height: newHeight)
740
+ }
741
+
742
+ return cgImage.cropping(to: cropRect)
743
+ }
744
+
745
+ /// Encode a UIImage to JPEG data with metadata preservation using ImageIO.
746
+ private func encodeJPEGWithMetadata(
747
+ _ image: UIImage,
748
+ metadata: [String: Any],
749
+ compressionQuality: CGFloat
750
+ ) -> Data? {
751
+ guard let cgImage = image.cgImage else { return nil }
752
+
753
+ let data = NSMutableData()
754
+ guard let destination = CGImageDestinationCreateWithData(
755
+ data as CFMutableData,
756
+ kUTTypeJPEG,
757
+ 1,
758
+ nil
759
+ ) else { return nil }
760
+
761
+ // Prepare metadata with updated dimensions and orientation.
762
+ var updatedMetadata = metadata
763
+
764
+ // Update EXIF dimensions to match the processed image.
765
+ if var exifDict = updatedMetadata[kCGImagePropertyExifDictionary as String] as? [String: Any] {
766
+ exifDict[kCGImagePropertyExifPixelXDimension as String] = cgImage.width
767
+ exifDict[kCGImagePropertyExifPixelYDimension as String] = cgImage.height
768
+ updatedMetadata[kCGImagePropertyExifDictionary as String] = exifDict
769
+ }
770
+
771
+ // Set the image orientation in metadata.
772
+ updatedMetadata[kCGImagePropertyOrientation as String] = cgImageOrientationFromUIImageOrientation(image.imageOrientation)
773
+
774
+ // Set compression quality.
775
+ updatedMetadata[kCGImageDestinationLossyCompressionQuality as String] = compressionQuality
776
+
777
+ CGImageDestinationAddImage(destination, cgImage, updatedMetadata as CFDictionary)
778
+
779
+ guard CGImageDestinationFinalize(destination) else { return nil }
780
+
781
+ return data as Data
782
+ }
783
+
784
+ /// Convert UIImage.Orientation to CGImagePropertyOrientation value.
785
+ private func cgImageOrientationFromUIImageOrientation(_ orientation: UIImage.Orientation) -> Int {
786
+ switch orientation {
787
+ case .up: return 1
788
+ case .upMirrored: return 2
789
+ case .down: return 3
790
+ case .downMirrored: return 4
791
+ case .leftMirrored: return 5
792
+ case .right: return 6
793
+ case .rightMirrored: return 7
794
+ case .left: return 8
795
+ @unknown default: return 1
796
+ }
797
+ }
798
+
799
+ // MARK: - Video Data Output for Film Style Preview
800
+
801
+ /// Configure and add a video data output to the session for film style preview.
802
+ /// Must be called to get video frames for applying film styles in real-time.
803
+ /// - Parameters:
804
+ /// - delegate: The sample buffer delegate to receive video frames.
805
+ /// - callbackQueue: The queue for delegate callbacks.
806
+ /// - completion: Called on main thread with the output if successful, nil if failed.
807
+ public func configureVideoDataOutput(
808
+ delegate: AVCaptureVideoDataOutputSampleBufferDelegate,
809
+ callbackQueue: DispatchQueue,
810
+ completion: @escaping (AVCaptureVideoDataOutput?) -> Void
811
+ ) {
812
+ sessionQueue.async { [weak self] in
813
+ guard let self = self, let session = self.captureSession else {
814
+ print("[Zcam1CameraService] configureVideoDataOutput: no session")
815
+ DispatchQueue.main.async { completion(nil) }
816
+ return
817
+ }
818
+
819
+ print(
820
+ "[Zcam1CameraService] configureVideoDataOutput: session.isRunning=\(session.isRunning), preset=\(session.sessionPreset.rawValue)"
821
+ )
822
+
823
+ // Check if we already have a video data output.
824
+ for output in session.outputs {
825
+ if let existingOutput = output as? AVCaptureVideoDataOutput {
826
+ print(
827
+ "[Zcam1CameraService] configureVideoDataOutput: already have video data output, updating delegate and connection"
828
+ )
829
+ existingOutput.setSampleBufferDelegate(delegate, queue: callbackQueue)
830
+
831
+ // Reconfigure the connection for the current camera position.
832
+ if let connection = existingOutput.connection(with: .video) {
833
+ session.beginConfiguration()
834
+ // Mirror front camera for natural selfie view.
835
+ if connection.isVideoMirroringSupported {
836
+ connection.isVideoMirrored = (self.currentPosition == .front)
837
+ }
838
+ session.commitConfiguration()
839
+ print(
840
+ "[Zcam1CameraService] configureVideoDataOutput: reconfigured connection for position=\(self.currentPosition == .front ? "front" : "back"), mirrored=\(connection.isVideoMirrored)"
841
+ )
842
+ }
843
+
844
+ DispatchQueue.main.async { completion(existingOutput) }
845
+ return
846
+ }
847
+ }
848
+
849
+ let output = AVCaptureVideoDataOutput()
850
+ output.alwaysDiscardsLateVideoFrames = true
851
+ output.videoSettings = [
852
+ kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
853
+ ]
854
+
855
+ // Set delegate BEFORE adding to session to not miss any frames.
856
+ output.setSampleBufferDelegate(delegate, queue: callbackQueue)
857
+ print(
858
+ "[Zcam1CameraService] configureVideoDataOutput: delegate set on queue \(callbackQueue.label)"
859
+ )
860
+
861
+ session.beginConfiguration()
862
+
863
+ // Try to add the output.
864
+ if session.canAddOutput(output) {
865
+ session.addOutput(output)
866
+ print(
867
+ "[Zcam1CameraService] configureVideoDataOutput: added output successfully, total outputs=\(session.outputs.count)"
868
+ )
869
+
870
+ // Configure the connection.
871
+ if let connection = output.connection(with: .video) {
872
+ connection.isEnabled = true
873
+ // Mirror front camera for natural selfie view.
874
+ if connection.isVideoMirroringSupported {
875
+ connection.isVideoMirrored = (self.currentPosition == .front)
876
+ }
877
+ print(
878
+ "[Zcam1CameraService] configureVideoDataOutput: connection configured, isActive=\(connection.isActive), isEnabled=\(connection.isEnabled), mirrored=\(connection.isVideoMirrored)"
879
+ )
880
+ } else {
881
+ print(
882
+ "[Zcam1CameraService] configureVideoDataOutput: WARNING - no video connection found!"
883
+ )
884
+ }
885
+
886
+ session.commitConfiguration()
887
+ print(
888
+ "[Zcam1CameraService] configureVideoDataOutput: committed configuration, session.isRunning=\(session.isRunning)"
889
+ )
890
+ DispatchQueue.main.async { completion(output) }
891
+ } else {
892
+ print(
893
+ "[Zcam1CameraService] configureVideoDataOutput: canAddOutput returned false, preset=\(session.sessionPreset.rawValue)"
894
+ )
895
+ session.commitConfiguration()
896
+ DispatchQueue.main.async { completion(nil) }
897
+ }
898
+ }
899
+ }
900
+
901
+ /// Remove a video data output from the session.
902
+ public func removeVideoDataOutput(_ output: AVCaptureVideoDataOutput) {
903
+ sessionQueue.async { [weak self] in
904
+ guard let self = self, let session = self.captureSession else { return }
905
+ session.beginConfiguration()
906
+ session.removeOutput(output)
907
+ session.commitConfiguration()
908
+ print(
909
+ "[Zcam1CameraService] removeVideoDataOutput: removed output, total outputs=\(session.outputs.count)"
910
+ )
911
+ }
912
+ }
913
+
914
+ // MARK: - Permissions
915
+
916
+ public func ensureCameraAuthorization(completion: @escaping (Bool) -> Void) {
917
+ switch AVCaptureDevice.authorizationStatus(for: .video) {
918
+ case .authorized:
919
+ completion(true)
920
+ case .notDetermined:
921
+ AVCaptureDevice.requestAccess(for: .video) { granted in
922
+ DispatchQueue.main.async {
923
+ completion(granted)
924
+ }
925
+ }
926
+ default:
927
+ completion(false)
928
+ }
929
+ }
930
+
931
+ public func ensureMicrophoneAuthorization(completion: @escaping (Bool) -> Void) {
932
+ switch AVCaptureDevice.authorizationStatus(for: .audio) {
933
+ case .authorized:
934
+ completion(true)
935
+ case .notDetermined:
936
+ AVCaptureDevice.requestAccess(for: .audio) { granted in
937
+ DispatchQueue.main.async {
938
+ completion(granted)
939
+ }
940
+ }
941
+ default:
942
+ completion(false)
943
+ }
944
+ }
945
+
946
+ // MARK: - Session Setup
947
+
948
+ private func device(for position: AVCaptureDevice.Position) -> AVCaptureDevice? {
949
+ #if targetEnvironment(simulator)
950
+ // Simulator doesn't have camera hardware.
951
+ // We'll handle this case specially in takePhoto() to return a test image.
952
+ return nil
953
+ #else
954
+ // Prefer virtual devices that combine multiple cameras for seamless zoom.
955
+ // Order matters - first available is used.
956
+ let deviceTypes: [AVCaptureDevice.DeviceType] = [
957
+ .builtInTripleCamera, // Ultra-wide + Wide + Telephoto (back)
958
+ .builtInDualWideCamera, // Ultra-wide + Wide (back)
959
+ .builtInDualCamera, // Wide + Telephoto (back)
960
+ .builtInTrueDepthCamera, // TrueDepth with depth support (front)
961
+ .builtInWideAngleCamera, // Wide only (fallback)
962
+ ]
963
+
964
+ let discoverySession = AVCaptureDevice.DiscoverySession(
965
+ deviceTypes: deviceTypes,
966
+ mediaType: .video,
967
+ position: position
968
+ )
969
+
970
+ // Return the first available device (most capable virtual device).
971
+ if let device = discoverySession.devices.first {
972
+ return device
973
+ }
974
+
975
+ // Fallback to any video device.
976
+ return AVCaptureDevice.default(for: .video)
977
+ #endif
978
+ }
979
+
980
+ /// Configure virtual device lens switching for smooth zoom transitions.
981
+ /// This enables seamless switching between ultra-wide, wide, and telephoto lenses.
982
+ private func configureVirtualDeviceSwitching(_ device: AVCaptureDevice) {
983
+ // Only configure for virtual devices that support lens switching.
984
+ let deviceType = device.deviceType
985
+ let isVirtualDevice =
986
+ deviceType == .builtInTripleCamera || deviceType == .builtInDualWideCamera
987
+ || deviceType == .builtInDualCamera
988
+
989
+ guard isVirtualDevice else {
990
+ print(
991
+ "[Zcam1CameraService] Device is not a virtual device, skipping switching configuration"
992
+ )
993
+ return
994
+ }
995
+
996
+ do {
997
+ try device.lockForConfiguration()
998
+
999
+ // Enable automatic lens switching for smooth zoom transitions.
1000
+ // .auto allows the device to automatically switch between constituent cameras
1001
+ // based on zoom factor, providing seamless transitions.
1002
+ device.setPrimaryConstituentDeviceSwitchingBehavior(
1003
+ .auto, restrictedSwitchingBehaviorConditions: [])
1004
+
1005
+ print(
1006
+ "[Zcam1CameraService] Configured virtual device switching: \(deviceType.rawValue), behavior: auto"
1007
+ )
1008
+
1009
+ device.unlockForConfiguration()
1010
+ } catch {
1011
+ print("[Zcam1CameraService] Failed to configure virtual device switching: \(error)")
1012
+ }
1013
+ }
1014
+
1015
+ /// Configure the capture session if needed (or reconfigure if the position or depth setting changed).
1016
+ /// - Parameters:
1017
+ /// - position: The camera position (front or back).
1018
+ /// - depthEnabled: Whether to enable depth data delivery at the session level.
1019
+ /// When true, depth data can be captured but zoom may be restricted on dual-camera devices.
1020
+ /// When false (default), full zoom range is available.
1021
+ @nonobjc public func configureSessionIfNeeded(
1022
+ position: AVCaptureDevice.Position,
1023
+ depthEnabled: Bool = false,
1024
+ completion: @escaping (Error?) -> Void
1025
+ ) {
1026
+ sessionQueue.async {
1027
+ // Early return if session is already configured correctly for the requested position and depth setting.
1028
+ if let session = self.captureSession,
1029
+ let currentInput = self.videoInput,
1030
+ currentInput.device.position == position,
1031
+ session.outputs.contains(self.photoOutput),
1032
+ session.sessionPreset == .high,
1033
+ self.depthEnabledAtSessionLevel == depthEnabled
1034
+ {
1035
+ DispatchQueue.main.async {
1036
+ completion(nil)
1037
+ }
1038
+ return
1039
+ }
1040
+
1041
+ do {
1042
+ let session = self.captureSession ?? AVCaptureSession()
1043
+ session.beginConfiguration()
1044
+ self.didPrewarmDepth = false
1045
+ // Use .high preset to support both photo and video capture.
1046
+ // This avoids session reconfiguration when starting video recording,
1047
+ // which would cause dark initial frames while ISP adjusts.
1048
+ session.sessionPreset = .high
1049
+
1050
+ // Remove existing input if position changed
1051
+ if let currentInput = self.videoInput,
1052
+ currentInput.device.position != position
1053
+ {
1054
+ // Remove subject area change observer for old device.
1055
+ NotificationCenter.default.removeObserver(
1056
+ self,
1057
+ name: .AVCaptureDeviceSubjectAreaDidChange,
1058
+ object: currentInput.device
1059
+ )
1060
+ // Invalidate old focus KVO observation.
1061
+ self.focusObservation?.invalidate()
1062
+
1063
+ session.removeInput(currentInput)
1064
+ self.videoInput = nil
1065
+ }
1066
+
1067
+ // Add input if missing
1068
+ if self.videoInput == nil {
1069
+ guard let device = self.device(for: position) else {
1070
+ throw NSError(
1071
+ domain: "Zcam1CameraService",
1072
+ code: -1,
1073
+ userInfo: [NSLocalizedDescriptionKey: "No suitable camera device found"]
1074
+ )
1075
+ }
1076
+
1077
+ // Configure smooth lens switching for virtual devices (dual/triple camera).
1078
+ // This enables seamless zoom transitions between ultra-wide, wide, and telephoto lenses.
1079
+ self.configureVirtualDeviceSwitching(device)
1080
+
1081
+ let input = try AVCaptureDeviceInput(device: device)
1082
+ if session.canAddInput(input) {
1083
+ session.addInput(input)
1084
+ self.videoInput = input
1085
+ self.currentPosition = position
1086
+
1087
+ // Register subject area change observer for this device.
1088
+ NotificationCenter.default.addObserver(
1089
+ self,
1090
+ selector: #selector(self.subjectAreaDidChange),
1091
+ name: .AVCaptureDeviceSubjectAreaDidChange,
1092
+ object: device
1093
+ )
1094
+
1095
+ // Observe focus completion via KVO.
1096
+ self.focusObservation?.invalidate()
1097
+ self.focusObservation = input.observe(
1098
+ \.device.isAdjustingFocus,
1099
+ options: [.old, .new]
1100
+ ) { [weak self] _, change in
1101
+ guard let self else { return }
1102
+ guard let oldValue = change.oldValue,
1103
+ let newValue = change.newValue else { return }
1104
+ if oldValue == true, newValue == false {
1105
+ self.didCompleteFocusing()
1106
+ }
1107
+ }
1108
+
1109
+ // Set initial continuous auto-focus and auto-exposure.
1110
+ self.resetFocusAndExposure(device: device)
1111
+ } else {
1112
+ throw NSError(
1113
+ domain: "Zcam1CameraService",
1114
+ code: -2,
1115
+ userInfo: [
1116
+ NSLocalizedDescriptionKey: "Cannot add camera input to session"
1117
+ ]
1118
+ )
1119
+ }
1120
+ }
1121
+
1122
+ // Add photo output if needed.
1123
+ if !session.outputs.contains(self.photoOutput) {
1124
+ if session.canAddOutput(self.photoOutput) {
1125
+ session.addOutput(self.photoOutput)
1126
+ } else {
1127
+ throw NSError(
1128
+ domain: "Zcam1CameraService",
1129
+ code: -3,
1130
+ userInfo: [
1131
+ NSLocalizedDescriptionKey: "Cannot add photo output to session"
1132
+ ]
1133
+ )
1134
+ }
1135
+ }
1136
+
1137
+ // Mirror front camera photos to match the preview (native iOS selfie behavior).
1138
+ // The photo output has a separate AVCaptureConnection from the video data output,
1139
+ // so mirroring must be configured independently on each.
1140
+ if let photoConnection = self.photoOutput.connection(with: .video),
1141
+ photoConnection.isVideoMirroringSupported {
1142
+ photoConnection.automaticallyAdjustsVideoMirroring = false
1143
+ photoConnection.isVideoMirrored = (position == .front)
1144
+ print("[Zcam1CameraService] photo output mirrored=\(photoConnection.isVideoMirrored) for position=\(position == .front ? "front" : "back")")
1145
+ }
1146
+
1147
+ // Audio input/output setup is deferred until recording starts.
1148
+ // This avoids triggering microphone permission prompts during camera preview.
1149
+ // See setupAudioForRecording() which is called when recording begins.
1150
+
1151
+ // Configure photo output for maximum resolution.
1152
+ // This is critical because we use .high session preset for video preview,
1153
+ // but still want full-resolution photos (12MP instead of 2MP).
1154
+ if let device = self.videoInput?.device {
1155
+ // Find the highest resolution format available for photos.
1156
+ let maxDimensions = device.activeFormat.supportedMaxPhotoDimensions
1157
+ .max { ($0.width * $0.height) < ($1.width * $1.height) }
1158
+
1159
+ if let maxDim = maxDimensions {
1160
+ self.photoOutput.maxPhotoDimensions = maxDim
1161
+ print("[Zcam1CameraService] Photo output configured for max dimensions: \(maxDim.width)x\(maxDim.height)")
1162
+ }
1163
+ }
1164
+
1165
+ // Depth delivery setup:
1166
+ // - Enable at the output level based on the depthEnabled parameter.
1167
+ // - When enabled, prewarm the pipeline via setPreparedPhotoSettingsArray.
1168
+ // - Note: Enabling depth restricts zoom on dual-camera devices.
1169
+ if self.photoOutput.isDepthDataDeliverySupported {
1170
+ self.photoOutput.isDepthDataDeliveryEnabled = depthEnabled
1171
+ self.depthEnabledAtSessionLevel = depthEnabled
1172
+ } else {
1173
+ self.depthEnabledAtSessionLevel = false
1174
+ }
1175
+
1176
+ // Camera calibration data delivery is configured per-capture on AVCapturePhotoSettings.
1177
+ // AVCapturePhotoOutput does not expose a calibration delivery toggle on all iOS versions.
1178
+
1179
+ session.commitConfiguration()
1180
+ self.captureSession = session
1181
+
1182
+ // Prewarm depth pipeline (best-effort).
1183
+ self.prewarmDepthPipelineIfNeeded()
1184
+
1185
+ DispatchQueue.main.async {
1186
+ completion(nil)
1187
+ }
1188
+ } catch {
1189
+ DispatchQueue.main.async {
1190
+ completion(error)
1191
+ }
1192
+ }
1193
+ }
1194
+ }
1195
+
1196
+ // MARK: - Session Control
1197
+
1198
+ public func startRunning() {
1199
+ // Start motion manager for orientation detection (non-blocking).
1200
+ Zcam1MotionManager.shared.startUpdates()
1201
+
1202
+ sessionQueue.async {
1203
+ guard let session = self.captureSession else { return }
1204
+ if !session.isRunning {
1205
+ session.startRunning()
1206
+ }
1207
+
1208
+ // Trigger depth prewarm right after the session is (or becomes) running
1209
+ // to avoid first-shot lag on depth-enabled captures.
1210
+ self.prewarmDepthPipelineIfNeeded()
1211
+
1212
+ // Prewarm audio if mic permission is already granted to avoid shutter on first recording.
1213
+ self.prewarmAudioIfAuthorized()
1214
+ }
1215
+ }
1216
+
1217
+ public func stopRunning() {
1218
+ // Stop motion manager when camera is inactive.
1219
+ Zcam1MotionManager.shared.stopUpdates()
1220
+
1221
+ sessionQueue.async {
1222
+ // Clean up focus observers.
1223
+ self.focusObservation?.invalidate()
1224
+ self.focusObservation = nil
1225
+ self.focusRevertTimer?.cancel()
1226
+ self.focusRevertTimer = nil
1227
+ if let device = self.videoInput?.device {
1228
+ NotificationCenter.default.removeObserver(
1229
+ self,
1230
+ name: .AVCaptureDeviceSubjectAreaDidChange,
1231
+ object: device
1232
+ )
1233
+ }
1234
+
1235
+ guard let session = self.captureSession, session.isRunning else { return }
1236
+ session.stopRunning()
1237
+ }
1238
+ }
1239
+
1240
+ // MARK: - Camera Controls
1241
+
1242
+ /// Set zoom factor using the device's actual range.
1243
+ /// For virtual devices with ultra-wide, 1.0 is ultra-wide (0.5x user-facing),
1244
+ /// 2.0 is wide-angle (1x user-facing), etc.
1245
+ /// - Parameter factor: Device zoom factor (use getMinZoom/getMaxZoom for valid range)
1246
+ public func setZoom(_ factor: CGFloat) {
1247
+ sessionQueue.async {
1248
+ guard let device = self.videoInput?.device else { return }
1249
+ do {
1250
+ try device.lockForConfiguration()
1251
+ let minZoom = device.minAvailableVideoZoomFactor
1252
+ let maxZoom = min(device.maxAvailableVideoZoomFactor, 20.0)
1253
+ let clampedZoom = min(max(factor, minZoom), maxZoom)
1254
+ device.videoZoomFactor = clampedZoom
1255
+ self.currentZoom = clampedZoom
1256
+
1257
+ // Log active physical camera for debugging lens switching.
1258
+ if let activeCamera = device.activePrimaryConstituent {
1259
+ print(
1260
+ "[Zcam1] Zoom: \(clampedZoom), active lens: \(activeCamera.deviceType.rawValue)"
1261
+ )
1262
+ }
1263
+
1264
+ device.unlockForConfiguration()
1265
+ } catch {
1266
+ print("[Zcam1] Failed to set zoom: \(error)")
1267
+ }
1268
+ }
1269
+ }
1270
+
1271
+ /// Get the minimum supported zoom factor.
1272
+ /// For virtual devices with ultra-wide, this is 1.0 (corresponds to 0.5x user-facing).
1273
+ public func getMinZoom() -> CGFloat {
1274
+ guard let device = videoInput?.device else { return 1.0 }
1275
+ return device.minAvailableVideoZoomFactor
1276
+ }
1277
+
1278
+ /// Get the maximum supported zoom factor (capped at 15x for UX).
1279
+ public func getMaxZoom() -> CGFloat {
1280
+ guard let device = videoInput?.device else { return 1.0 }
1281
+ return min(device.maxAvailableVideoZoomFactor, 20.0)
1282
+ }
1283
+
1284
+ /// Get the zoom factors where the device switches between physical lenses.
1285
+ /// Returns empty array for single-camera devices.
1286
+ /// For triple camera: typically [2.0, 6.0] meaning:
1287
+ /// - Below 2.0: ultra-wide lens (0.5x-1x user-facing)
1288
+ /// - At 2.0: switches FROM ultra-wide TO wide lens (1x user-facing)
1289
+ /// - At 6.0: switches FROM wide TO telephoto lens (3x user-facing)
1290
+ public func getSwitchOverZoomFactors() -> [NSNumber] {
1291
+ guard let device = videoInput?.device else { return [] }
1292
+ return device.virtualDeviceSwitchOverVideoZoomFactors.map { $0 }
1293
+ }
1294
+
1295
+ /// Check if the current device has an ultra-wide camera.
1296
+ /// This is true for builtInTripleCamera and builtInDualWideCamera.
1297
+ /// This is false for builtInDualCamera (Wide + Telephoto) and builtInWideAngleCamera.
1298
+ public func hasUltraWideCamera() -> Bool {
1299
+ guard let device = videoInput?.device else { return false }
1300
+ let deviceType = device.deviceType
1301
+ // builtInTripleCamera = Ultra-wide + Wide + Telephoto
1302
+ // builtInDualWideCamera = Ultra-wide + Wide
1303
+ // builtInDualCamera = Wide + Telephoto (NO ultra-wide)
1304
+ // builtInWideAngleCamera = Wide only (NO ultra-wide)
1305
+ return deviceType == .builtInTripleCamera || deviceType == .builtInDualWideCamera
1306
+ }
1307
+
1308
+ /// Get diagnostic info about the current camera device for debugging.
1309
+ /// Returns a dictionary with device type, supported zoom range, and switching behavior.
1310
+ public func getDeviceDiagnostics() -> [String: Any] {
1311
+ guard let device = videoInput?.device else {
1312
+ return ["error": "No device configured"]
1313
+ }
1314
+
1315
+ let deviceType = device.deviceType.rawValue
1316
+ let minZoom = device.minAvailableVideoZoomFactor
1317
+ let maxZoom = device.maxAvailableVideoZoomFactor
1318
+ let switchOverFactors = device.virtualDeviceSwitchOverVideoZoomFactors.map {
1319
+ $0.doubleValue
1320
+ }
1321
+ let currentZoom = device.videoZoomFactor
1322
+ let switchingBehavior = device.activePrimaryConstituentDeviceSwitchingBehavior.rawValue
1323
+
1324
+ return [
1325
+ "deviceType": deviceType,
1326
+ "minZoom": minZoom,
1327
+ "maxZoom": maxZoom,
1328
+ "currentZoom": currentZoom,
1329
+ "switchOverFactors": switchOverFactors,
1330
+ "switchingBehavior": switchingBehavior,
1331
+ "isVirtualDevice": !switchOverFactors.isEmpty,
1332
+ "currentExposureBias": currentExposureBias,
1333
+ "minExposureBias": device.minExposureTargetBias,
1334
+ "maxExposureBias": device.maxExposureTargetBias,
1335
+ "currentISO": device.iso,
1336
+ "exposureDuration": CMTimeGetSeconds(device.exposureDuration),
1337
+ ]
1338
+ }
1339
+
1340
+ // MARK: - Depth Detection Methods
1341
+
1342
+ /// Check if the current camera device supports depth data capture.
1343
+ /// Returns true for dual/triple rear cameras and TrueDepth front camera.
1344
+ /// Returns false for single rear cameras (iPhone SE, 16e, Air).
1345
+ public func isDepthSupported() -> Bool {
1346
+ return photoOutput.isDepthDataDeliverySupported
1347
+ }
1348
+
1349
+ /// Check if enabling depth would restrict zoom on this device.
1350
+ /// Returns true if zoom is limited to discrete levels (min == max in all ranges).
1351
+ /// This typically happens on dual-camera devices (iPhone 12-16 base).
1352
+ /// Returns false for triple-camera devices (Pro) and TrueDepth front cameras.
1353
+ public func hasDepthZoomLimitations() -> Bool {
1354
+ guard let device = videoInput?.device,
1355
+ photoOutput.isDepthDataDeliverySupported else { return false }
1356
+
1357
+ let format = device.activeFormat // activeFormat is NOT optional.
1358
+
1359
+ // Use supportedVideoZoomRangesForDepthDataDelivery (iOS 17.2+).
1360
+ // Returns [ClosedRange<CGFloat>] - use .lowerBound/.upperBound.
1361
+ // If all ranges have min == max, zoom is restricted to discrete levels.
1362
+ if #available(iOS 17.2, *) {
1363
+ let ranges = format.supportedVideoZoomRangesForDepthDataDelivery
1364
+ if ranges.isEmpty { return false }
1365
+
1366
+ // Check if ALL ranges are discrete (min == max).
1367
+ let allDiscrete = ranges.allSatisfy { $0.lowerBound == $0.upperBound }
1368
+ return allDiscrete
1369
+ }
1370
+
1371
+ // Pre-iOS 17.2: assume limitations for dual camera devices.
1372
+ return device.deviceType == .builtInDualCamera ||
1373
+ device.deviceType == .builtInDualWideCamera
1374
+ }
1375
+
1376
+ /// Get zoom ranges supported when depth data delivery is enabled.
1377
+ /// Returns array of [min, max] pairs. If min == max, it's a discrete level.
1378
+ /// Empty array means no depth support or no zoom restrictions.
1379
+ public func getDepthSupportedZoomRanges() -> [[Double]] {
1380
+ guard let device = videoInput?.device,
1381
+ photoOutput.isDepthDataDeliverySupported else { return [] }
1382
+
1383
+ let format = device.activeFormat // activeFormat is NOT optional.
1384
+
1385
+ if #available(iOS 17.2, *) {
1386
+ // supportedVideoZoomRangesForDepthDataDelivery returns [ClosedRange<CGFloat>].
1387
+ return format.supportedVideoZoomRangesForDepthDataDelivery.map {
1388
+ [Double($0.lowerBound), Double($0.upperBound)]
1389
+ }
1390
+ }
1391
+ return []
1392
+ }
1393
+
1394
+ /// Set torch mode (continuous flashlight during preview).
1395
+ /// - Parameter enabled: Whether torch should be on.
1396
+ public func setTorch(_ enabled: Bool) {
1397
+ sessionQueue.async {
1398
+ guard let device = self.videoInput?.device else { return }
1399
+ guard device.hasTorch && device.isTorchAvailable else { return }
1400
+ do {
1401
+ try device.lockForConfiguration()
1402
+ device.torchMode = enabled ? .on : .off
1403
+ device.unlockForConfiguration()
1404
+ } catch {
1405
+ print("[Zcam1] Failed to set torch: \(error)")
1406
+ }
1407
+ }
1408
+ }
1409
+
1410
+ /// Set flash mode for the next capture.
1411
+ /// - Parameter mode: "off", "on", or "auto".
1412
+ public func setFlashMode(_ mode: String) {
1413
+ switch mode.lowercased() {
1414
+ case "on":
1415
+ flashMode = .on
1416
+ case "auto":
1417
+ flashMode = .auto
1418
+ default:
1419
+ flashMode = .off
1420
+ }
1421
+ }
1422
+
1423
+ /// Focus at a specific point in the preview.
1424
+ /// - Parameter point: Normalized coordinates (0-1, 0-1) where (0,0) is top-left.
1425
+ public func focusAtPoint(_ point: CGPoint) {
1426
+ sessionQueue.async {
1427
+ guard let device = self.videoInput?.device else { return }
1428
+ guard device.isFocusPointOfInterestSupported else { return }
1429
+
1430
+ // Cancel any pending revert timer.
1431
+ self.focusRevertTimer?.cancel()
1432
+
1433
+ do {
1434
+ try device.lockForConfiguration()
1435
+ device.focusPointOfInterest = point
1436
+ device.focusMode = .autoFocus
1437
+
1438
+ // Also set exposure point if supported.
1439
+ if device.isExposurePointOfInterestSupported {
1440
+ device.exposurePointOfInterest = point
1441
+ device.exposureMode = .autoExpose
1442
+ }
1443
+
1444
+ // Monitor for scene changes so we can revert to continuous AF.
1445
+ device.isSubjectAreaChangeMonitoringEnabled = true
1446
+
1447
+ device.unlockForConfiguration()
1448
+ } catch {
1449
+ print("[Zcam1] Failed to focus: \(error)")
1450
+ }
1451
+
1452
+ // Schedule fallback revert to continuous AF after 5 seconds.
1453
+ let revertWork = DispatchWorkItem { [weak self] in
1454
+ self?.revertToContinuousAutoFocus()
1455
+ }
1456
+ self.focusRevertTimer = revertWork
1457
+ self.sessionQueue.asyncAfter(deadline: .now() + 5.0, execute: revertWork)
1458
+ }
1459
+ }
1460
+
1461
+ /// Reset focus and exposure to continuous auto-focus at center.
1462
+ /// Called on session startup, camera switch, and subject area change.
1463
+ private func resetFocusAndExposure(device: AVCaptureDevice) {
1464
+ do {
1465
+ try device.lockForConfiguration()
1466
+
1467
+ // Reset focus to center with continuous auto-focus.
1468
+ if device.isFocusPointOfInterestSupported {
1469
+ device.focusPointOfInterest = CGPoint(x: 0.5, y: 0.5)
1470
+ }
1471
+ if device.isFocusModeSupported(.continuousAutoFocus) {
1472
+ device.focusMode = .continuousAutoFocus
1473
+ }
1474
+
1475
+ // Reset exposure to center with continuous auto-exposure.
1476
+ if device.isExposurePointOfInterestSupported {
1477
+ device.exposurePointOfInterest = CGPoint(x: 0.5, y: 0.5)
1478
+ }
1479
+ if device.isExposureModeSupported(.continuousAutoExposure) {
1480
+ device.exposureMode = .continuousAutoExposure
1481
+ }
1482
+
1483
+ // Disable monitoring until next tap-to-focus.
1484
+ device.isSubjectAreaChangeMonitoringEnabled = false
1485
+
1486
+ device.unlockForConfiguration()
1487
+ } catch {
1488
+ print("[Zcam1] Failed to reset focus: \(error)")
1489
+ }
1490
+ }
1491
+
1492
+ /// Revert to continuous auto-focus (called by fallback timer or subject area change).
1493
+ private func revertToContinuousAutoFocus() {
1494
+ // Cancel any pending revert timer since we're reverting now.
1495
+ self.focusRevertTimer?.cancel()
1496
+ self.focusRevertTimer = nil
1497
+
1498
+ guard let device = self.videoInput?.device else { return }
1499
+ resetFocusAndExposure(device: device)
1500
+ }
1501
+
1502
+ /// Called via KVO when focus adjustment completes.
1503
+ private func didCompleteFocusing() {
1504
+ guard let device = self.videoInput?.device else { return }
1505
+ let focusPoint = device.focusPointOfInterest
1506
+ print("[Zcam1] Focus completed at point: (\(focusPoint.x), \(focusPoint.y))")
1507
+ }
1508
+
1509
+ /// Called when the device detects a significant scene change after tap-to-focus.
1510
+ @objc private func subjectAreaDidChange(notification: Notification) {
1511
+ sessionQueue.async {
1512
+ self.revertToContinuousAutoFocus()
1513
+ }
1514
+ }
1515
+
1516
+ /// Set exposure compensation.
1517
+ /// - Parameter bias: Exposure bias in EV units (typically -2.0 to +2.0).
1518
+ public func setExposureCompensation(_ bias: Float) {
1519
+ sessionQueue.async {
1520
+ guard let device = self.videoInput?.device else { return }
1521
+ do {
1522
+ try device.lockForConfiguration()
1523
+ let minBias = device.minExposureTargetBias
1524
+ let maxBias = device.maxExposureTargetBias
1525
+ let clampedBias = min(max(bias, minBias), maxBias)
1526
+ device.setExposureTargetBias(clampedBias, completionHandler: nil)
1527
+ self.currentExposureBias = clampedBias
1528
+ device.unlockForConfiguration()
1529
+ } catch {
1530
+ print("[Zcam1] Failed to set exposure: \(error)")
1531
+ }
1532
+ }
1533
+ }
1534
+
1535
+ /// Get the supported exposure compensation range in EV units.
1536
+ /// Returns a dictionary with "min" and "max" keys.
1537
+ public func getExposureRange() -> [String: Float] {
1538
+ guard let device = videoInput?.device else {
1539
+ return ["min": 0.0, "max": 0.0]
1540
+ }
1541
+ return [
1542
+ "min": device.minExposureTargetBias,
1543
+ "max": device.maxExposureTargetBias,
1544
+ ]
1545
+ }
1546
+
1547
+ /// Reset exposure compensation to neutral (0 EV).
1548
+ public func resetExposure() {
1549
+ setExposureCompensation(0.0)
1550
+ }
1551
+
1552
+ /// Called by PhotoCaptureDelegate when a capture has fully completed so we can
1553
+ /// release the strong reference and avoid memory leaks.
1554
+ fileprivate func didFinishCapture(delegate: PhotoCaptureDelegate) {
1555
+ if let index = inFlightDelegates.firstIndex(where: { $0 === delegate }) {
1556
+ inFlightDelegates.remove(at: index)
1557
+ }
1558
+ }
1559
+
1560
+ // MARK: - Simulator Test Image
1561
+
1562
+ /// Creates a simple test image for simulator testing.
1563
+ /// Returns a UIImage with a colored background and test text.
1564
+ private func createTestImage() -> UIImage {
1565
+ let size = CGSize(width: 1920, height: 1080)
1566
+ let renderer = UIGraphicsImageRenderer(size: size)
1567
+
1568
+ return renderer.image { context in
1569
+ // Random gradient background with consistent tint (same hue).
1570
+ let hue = CGFloat.random(in: 0.0...1.0)
1571
+ let saturation = CGFloat.random(in: 0.55...0.9)
1572
+ let brightness1 = CGFloat.random(in: 0.5...0.75)
1573
+ let brightness2 = min(brightness1 + CGFloat.random(in: 0.15...0.35), 1.0)
1574
+
1575
+ let color1 = UIColor(
1576
+ hue: hue, saturation: saturation, brightness: brightness1, alpha: 1.0)
1577
+ let color2 = UIColor(
1578
+ hue: hue, saturation: saturation, brightness: brightness2, alpha: 1.0)
1579
+ let colors = [color1.cgColor, color2.cgColor]
1580
+ let gradient = CGGradient(
1581
+ colorsSpace: CGColorSpaceCreateDeviceRGB(),
1582
+ colors: colors as CFArray,
1583
+ locations: [0.0, 1.0])!
1584
+ context.cgContext.drawLinearGradient(
1585
+ gradient,
1586
+ start: .zero,
1587
+ end: CGPoint(x: size.width, y: size.height),
1588
+ options: [])
1589
+
1590
+ // Add test text.
1591
+ let now = Date()
1592
+ let dateFormatter = DateFormatter()
1593
+ dateFormatter.dateFormat = "yyyy-MM-dd\nHH:mm:ss"
1594
+ let dateTimeString = dateFormatter.string(from: now)
1595
+
1596
+ let dateAttributes: [NSAttributedString.Key: Any] = [
1597
+ .font: UIFont.systemFont(ofSize: 88, weight: .semibold),
1598
+ .foregroundColor: UIColor.white.withAlphaComponent(0.9),
1599
+ ]
1600
+ let dateTextSize = dateTimeString.size(withAttributes: dateAttributes)
1601
+ let dateTextRect = CGRect(
1602
+ x: (size.width - dateTextSize.width) / 2,
1603
+ y: (size.height - dateTextSize.height) / 2 + 90,
1604
+ width: dateTextSize.width,
1605
+ height: dateTextSize.height
1606
+ )
1607
+ dateTimeString.draw(in: dateTextRect, withAttributes: dateAttributes)
1608
+ let text = "SIMULATOR TEST IMAGE"
1609
+ let attributes: [NSAttributedString.Key: Any] = [
1610
+ .font: UIFont.boldSystemFont(ofSize: 72),
1611
+ .foregroundColor: UIColor.white,
1612
+ ]
1613
+ let textSize = text.size(withAttributes: attributes)
1614
+ let textRect = CGRect(
1615
+ x: (size.width - textSize.width) / 2,
1616
+ y: (size.height - textSize.height) / 2 - 90,
1617
+ width: textSize.width,
1618
+ height: textSize.height)
1619
+ text.draw(in: textRect, withAttributes: attributes)
1620
+ }
1621
+ }
1622
+
1623
+ // MARK: - Public Capture API (Objective-C-friendly)
1624
+
1625
+ /// High-level capture API used from ObjC / React Native bridge.
1626
+ ///
1627
+ /// - Parameters:
1628
+ /// - positionString: "front" or "back" (defaults to back).
1629
+ /// - formatString: "jpeg" or "dng" (defaults to jpeg).
1630
+ /// - completion: Called with a dictionary `{ filePath, format, metadata, depthData? }` or an error.
1631
+ public func takePhoto(
1632
+ positionString: String?,
1633
+ formatString: String?,
1634
+ completion: @escaping (NSDictionary?, NSError?) -> Void
1635
+ ) {
1636
+ self.takePhoto(
1637
+ positionString: positionString,
1638
+ formatString: formatString,
1639
+ includeDepthData: true,
1640
+ aspectRatio: nil,
1641
+ orientation: nil,
1642
+ completion: completion
1643
+ )
1644
+ }
1645
+
1646
+ public func takePhoto(
1647
+ positionString: String?,
1648
+ formatString: String?,
1649
+ includeDepthData: Bool,
1650
+ aspectRatio: String?,
1651
+ orientation: String?,
1652
+ skipPostProcessing: Bool = false,
1653
+ completion: @escaping (NSDictionary?, NSError?) -> Void
1654
+ ) {
1655
+ let aspectRatioEnum = Zcam1AspectRatio(from: aspectRatio)
1656
+ let orientationEnum = Zcam1Orientation(from: orientation)
1657
+ print("[Zcam1CameraService] takePhoto START - position=\(positionString ?? "nil"), format=\(formatString ?? "nil"), includeDepthData=\(includeDepthData), skipPostProcessing=\(skipPostProcessing)")
1658
+ #if targetEnvironment(simulator)
1659
+ // Simulator mode: create and return a test image.
1660
+ let format = Zcam1CaptureFormat(from: formatString)
1661
+ let testImage = createTestImage()
1662
+
1663
+ guard let jpegData = testImage.jpegData(compressionQuality: 0.9) else {
1664
+ let err = NSError(
1665
+ domain: "Zcam1CameraService",
1666
+ code: -30,
1667
+ userInfo: [NSLocalizedDescriptionKey: "Failed to create test image data"]
1668
+ )
1669
+ completion(nil, err)
1670
+ return
1671
+ }
1672
+
1673
+ let filename = "zcam1-simulator-\(UUID().uuidString).\(format.fileExtension)"
1674
+ let tmpURL = FileManager.default.temporaryDirectory.appendingPathComponent(filename)
1675
+
1676
+ do {
1677
+ try jpegData.write(to: tmpURL, options: [.atomic])
1678
+
1679
+ // Create mock metadata similar to what a real camera would provide.
1680
+ let now = Date()
1681
+ let dateFormatter = DateFormatter()
1682
+ dateFormatter.dateFormat = "yyyy:MM:dd HH:mm:ss"
1683
+ let dateString = dateFormatter.string(from: now)
1684
+
1685
+ let metadata: [String: Any] = [
1686
+ "{Exif}": [
1687
+ "ISOSpeedRatings": [],
1688
+ "PixelXDimension": 1920,
1689
+ "PixelYDimension": 1080,
1690
+ "ExposureTime": 0,
1691
+ "FNumber": 1,
1692
+ "FocalLength": 5,
1693
+ ],
1694
+ "{TIFF}": [
1695
+ "DateTime": dateString,
1696
+ "Model": "iPhone Simulator",
1697
+ "Software": "iOS Simulator",
1698
+ ],
1699
+ "Orientation": 6,
1700
+ ]
1701
+
1702
+ let result: [String: Any] = [
1703
+ "filePath": tmpURL.path,
1704
+ "format": format.formatString,
1705
+ "metadata": metadata,
1706
+ ]
1707
+
1708
+ DispatchQueue.main.async {
1709
+ completion(result as NSDictionary, nil)
1710
+ }
1711
+ } catch {
1712
+ DispatchQueue.main.async {
1713
+ completion(nil, error as NSError)
1714
+ }
1715
+ }
1716
+ return
1717
+ #endif
1718
+ print("[Zcam1CameraService] takePhoto: checking camera authorization...")
1719
+ ensureCameraAuthorization { authorized in
1720
+ print("[Zcam1CameraService] takePhoto: authorized=\(authorized)")
1721
+ guard authorized else {
1722
+ let err = NSError(
1723
+ domain: "Zcam1CameraService",
1724
+ code: -10,
1725
+ userInfo: [NSLocalizedDescriptionKey: "Camera access not authorized"]
1726
+ )
1727
+ completion(nil, err)
1728
+ return
1729
+ }
1730
+
1731
+ let position: AVCaptureDevice.Position
1732
+ switch positionString?.lowercased() {
1733
+ case "front":
1734
+ position = .front
1735
+ default:
1736
+ position = .back
1737
+ }
1738
+
1739
+ let format = Zcam1CaptureFormat(from: formatString)
1740
+ print(
1741
+ "[Zcam1CameraService] takePhoto: configuring session for position=\(position.rawValue), format=\(format), depthEnabled=\(self.depthEnabledAtSessionLevel)"
1742
+ )
1743
+
1744
+ self.configureSessionIfNeeded(position: position, depthEnabled: self.depthEnabledAtSessionLevel) { error in
1745
+ print(
1746
+ "[Zcam1CameraService] takePhoto: configureSessionIfNeeded completed, error=\(String(describing: error))"
1747
+ )
1748
+ if let error = error {
1749
+ completion(nil, error as NSError)
1750
+ return
1751
+ }
1752
+
1753
+ self.sessionQueue.async {
1754
+ print(
1755
+ "[Zcam1CameraService] takePhoto: on sessionQueue, checking captureSession..."
1756
+ )
1757
+ guard let session = self.captureSession else {
1758
+ let err = NSError(
1759
+ domain: "Zcam1CameraService",
1760
+ code: -11,
1761
+ userInfo: [NSLocalizedDescriptionKey: "Capture session not configured"]
1762
+ )
1763
+ DispatchQueue.main.async {
1764
+ completion(nil, err)
1765
+ }
1766
+ return
1767
+ }
1768
+
1769
+ print("[Zcam1CameraService] takePhoto: session.isRunning=\(session.isRunning)")
1770
+ if !session.isRunning {
1771
+ print("[Zcam1CameraService] takePhoto: starting session...")
1772
+ session.startRunning()
1773
+ print(
1774
+ "[Zcam1CameraService] takePhoto: calling prewarmDepthPipelineIfNeeded..."
1775
+ )
1776
+ self.prewarmDepthPipelineIfNeeded()
1777
+ print("[Zcam1CameraService] takePhoto: prewarm completed")
1778
+ }
1779
+
1780
+ // Prepare photo settings
1781
+ print("[Zcam1CameraService] takePhoto: preparing photo settings...")
1782
+ let settings: AVCapturePhotoSettings
1783
+
1784
+ switch format {
1785
+ case .jpeg:
1786
+ print("[Zcam1CameraService] takePhoto: format is JPEG")
1787
+ if self.photoOutput.availablePhotoCodecTypes.contains(.jpeg) {
1788
+ settings = AVCapturePhotoSettings(format: [
1789
+ AVVideoCodecKey: AVVideoCodecType.jpeg
1790
+ ])
1791
+ } else {
1792
+ settings = AVCapturePhotoSettings()
1793
+ }
1794
+
1795
+ case .dng:
1796
+ print("[Zcam1CameraService] takePhoto: format is DNG")
1797
+ if let rawType = self.photoOutput.availableRawPhotoPixelFormatTypes.first {
1798
+ settings = AVCapturePhotoSettings(rawPixelFormatType: rawType)
1799
+ // RAW capture requested; DNG file type selection is handled by the system if supported.
1800
+ } else {
1801
+ // Fallback to JPEG if RAW not available
1802
+ if self.photoOutput.availablePhotoCodecTypes.contains(.jpeg) {
1803
+ settings = AVCapturePhotoSettings(format: [
1804
+ AVVideoCodecKey: AVVideoCodecType.jpeg
1805
+ ])
1806
+ } else {
1807
+ settings = AVCapturePhotoSettings()
1808
+ }
1809
+ }
1810
+
1811
+ }
1812
+ print("[Zcam1CameraService] takePhoto: settings created")
1813
+
1814
+ // Request maximum resolution photo capture.
1815
+ // This is critical because we use .high session preset for video preview,
1816
+ // but still want full-resolution photos (12MP instead of 2MP).
1817
+ let maxDimensions = self.photoOutput.maxPhotoDimensions
1818
+ if maxDimensions.width > 0 && maxDimensions.height > 0 {
1819
+ settings.maxPhotoDimensions = maxDimensions
1820
+ print("[Zcam1CameraService] takePhoto: requesting max dimensions \(maxDimensions.width)x\(maxDimensions.height)")
1821
+ }
1822
+
1823
+ // Configure flash if available.
1824
+ if let device = self.videoInput?.device, device.hasFlash {
1825
+ if self.photoOutput.supportedFlashModes.contains(self.flashMode) {
1826
+ settings.flashMode = self.flashMode
1827
+ }
1828
+ }
1829
+ print("[Zcam1CameraService] takePhoto: flash configured")
1830
+
1831
+ // Favor responsiveness when depth delivery is enabled (reduces perceived capture lag),
1832
+ // but clamp to what the current device/output supports.
1833
+ if #available(iOS 13.0, *) {
1834
+ let desired: AVCapturePhotoOutput.QualityPrioritization =
1835
+ includeDepthData ? .speed : .quality
1836
+ let maxSupported = self.photoOutput.maxPhotoQualityPrioritization
1837
+
1838
+ if desired == .quality && maxSupported != .quality {
1839
+ settings.photoQualityPrioritization = maxSupported
1840
+ } else {
1841
+ settings.photoQualityPrioritization = desired
1842
+ }
1843
+ }
1844
+ print("[Zcam1CameraService] takePhoto: quality prioritization configured")
1845
+
1846
+ // Depth: only set on photo settings if already enabled at output level.
1847
+ print(
1848
+ "[Zcam1CameraService] takePhoto: isDepthDataDeliveryEnabled=\(self.photoOutput.isDepthDataDeliveryEnabled)"
1849
+ )
1850
+ if self.photoOutput.isDepthDataDeliveryEnabled {
1851
+ print(
1852
+ "[Zcam1CameraService] takePhoto: setting settings.isDepthDataDeliveryEnabled=\(includeDepthData)"
1853
+ )
1854
+ settings.isDepthDataDeliveryEnabled = includeDepthData
1855
+ } else {
1856
+ settings.isDepthDataDeliveryEnabled = false
1857
+ }
1858
+
1859
+ // Calibration: can be set directly if device supports it.
1860
+ print(
1861
+ "[Zcam1CameraService] takePhoto: isCameraCalibrationDataDeliverySupported=\(self.photoOutput.isCameraCalibrationDataDeliverySupported)"
1862
+ )
1863
+ if self.photoOutput.isCameraCalibrationDataDeliverySupported {
1864
+ print(
1865
+ "[Zcam1CameraService] takePhoto: setting settings.isCameraCalibrationDataDeliveryEnabled=\(includeDepthData)"
1866
+ )
1867
+ settings.isCameraCalibrationDataDeliveryEnabled = includeDepthData
1868
+ } else {
1869
+ settings.isCameraCalibrationDataDeliveryEnabled = false
1870
+ }
1871
+
1872
+ // Set the capture connection orientation so EXIF metadata is correct.
1873
+ // This is the key step that makes landscape photos display correctly.
1874
+ let resolvedOrientation = orientationEnum.resolveToVideoOrientation()
1875
+ if let connection = self.photoOutput.connection(with: .video) {
1876
+ if connection.isVideoOrientationSupported {
1877
+ connection.videoOrientation = resolvedOrientation
1878
+ print("[Zcam1CameraService] takePhoto: set videoOrientation=\(orientationToString(resolvedOrientation))")
1879
+ }
1880
+ // Mirror front camera photos to match the preview (native iOS selfie behavior).
1881
+ // Without this, the saved photo is a "true" capture (not mirrored), which
1882
+ // doesn't match what the user saw in the mirrored preview.
1883
+ if connection.isVideoMirroringSupported {
1884
+ connection.automaticallyAdjustsVideoMirroring = false
1885
+ connection.isVideoMirrored = (self.currentPosition == .front)
1886
+ print("[Zcam1CameraService] takePhoto: set isVideoMirrored=\(connection.isVideoMirrored) for position=\(self.currentPosition == .front ? "front" : "back")")
1887
+ }
1888
+ }
1889
+
1890
+ // Create delegate to handle capture and keep it alive until completion.
1891
+ print("[Zcam1CameraService] takePhoto: creating PhotoCaptureDelegate...")
1892
+ let delegate = PhotoCaptureDelegate(
1893
+ format: format,
1894
+ aspectRatio: aspectRatioEnum,
1895
+ orientation: orientationEnum,
1896
+ includeDepthData: includeDepthData,
1897
+ skipPostProcessing: skipPostProcessing,
1898
+ owner: self,
1899
+ completion: completion
1900
+ )
1901
+ self.inFlightDelegates.append(delegate)
1902
+ print("[Zcam1CameraService] takePhoto: calling capturePhoto...")
1903
+ self.photoOutput.capturePhoto(with: settings, delegate: delegate)
1904
+ print("[Zcam1CameraService] takePhoto: capturePhoto called successfully")
1905
+ }
1906
+ }
1907
+ }
1908
+ }
1909
+
1910
+ // MARK: - Video Capture API (Objective-C-friendly)
1911
+
1912
+ /// Starts video recording to a temporary `.mov` file using AVAssetWriter.
1913
+ /// This approach eliminates preview flash since no session reconfiguration is needed.
1914
+ /// Call `stopVideoRecording` to finish and receive the final file path.
1915
+ ///
1916
+ /// - Parameters:
1917
+ /// - positionString: Camera position ("front" or "back").
1918
+ /// - maxDurationSeconds: Maximum recording duration in seconds. When greater
1919
+ /// than zero the native layer will automatically stop recording after this
1920
+ /// many seconds and cache the result for the next `stopVideoRecording` call.
1921
+ /// - completion: Called once the recorder has started (or on error).
1922
+ public func startVideoRecording(
1923
+ positionString: String?,
1924
+ maxDurationSeconds: Double = 0,
1925
+ completion: @escaping (NSDictionary?, NSError?) -> Void
1926
+ ) {
1927
+ #if targetEnvironment(simulator)
1928
+ let err = NSError(
1929
+ domain: "Zcam1CameraService",
1930
+ code: -40,
1931
+ userInfo: [
1932
+ NSLocalizedDescriptionKey:
1933
+ "Video recording is not supported on the iOS simulator"
1934
+ ]
1935
+ )
1936
+ completion(nil, err)
1937
+ return
1938
+ #endif
1939
+
1940
+ ensureCameraAuthorization { authorized in
1941
+ guard authorized else {
1942
+ let err = NSError(
1943
+ domain: "Zcam1CameraService",
1944
+ code: -10,
1945
+ userInfo: [NSLocalizedDescriptionKey: "Camera access not authorized"]
1946
+ )
1947
+ completion(nil, err)
1948
+ return
1949
+ }
1950
+
1951
+ let startWithMicAuthorized: (Bool) -> Void = { micAuthorized in
1952
+ let position: AVCaptureDevice.Position
1953
+ switch positionString?.lowercased() {
1954
+ case "front":
1955
+ position = .front
1956
+ default:
1957
+ position = .back
1958
+ }
1959
+
1960
+ self.configureSessionIfNeeded(position: position, depthEnabled: self.depthEnabledAtSessionLevel) { error in
1961
+ if let error = error {
1962
+ completion(nil, error as NSError)
1963
+ return
1964
+ }
1965
+
1966
+ self.sessionQueue.async {
1967
+ guard let session = self.captureSession else {
1968
+ let err = NSError(
1969
+ domain: "Zcam1CameraService",
1970
+ code: -11,
1971
+ userInfo: [
1972
+ NSLocalizedDescriptionKey: "Capture session not configured"
1973
+ ]
1974
+ )
1975
+ DispatchQueue.main.async {
1976
+ completion(nil, err)
1977
+ }
1978
+ return
1979
+ }
1980
+
1981
+ // Check if recording is already in progress (with lock protection)
1982
+ self.recordingStateLock.lock()
1983
+ let isRecordingActive = self.recordingState != nil
1984
+ self.recordingStateLock.unlock()
1985
+
1986
+ if isRecordingActive {
1987
+ let err = NSError(
1988
+ domain: "Zcam1CameraService",
1989
+ code: -42,
1990
+ userInfo: [
1991
+ NSLocalizedDescriptionKey:
1992
+ "A video recording is already in progress"
1993
+ ]
1994
+ )
1995
+ DispatchQueue.main.async {
1996
+ completion(nil, err)
1997
+ }
1998
+ return
1999
+ }
2000
+
2001
+ // Setup audio input/output if mic is authorized (deferred from session init)
2002
+ var hasAudio = false
2003
+ if micAuthorized {
2004
+ hasAudio = self.setupAudioForRecording(session: session)
2005
+ }
2006
+
2007
+ // Prepare output URL
2008
+ let filename = "zcam1-\(UUID().uuidString).mov"
2009
+ let tmpURL = FileManager.default.temporaryDirectory.appendingPathComponent(filename)
2010
+
2011
+ // Remove any existing file at the path (defensive)
2012
+ if FileManager.default.fileExists(atPath: tmpURL.path) {
2013
+ try? FileManager.default.removeItem(at: tmpURL)
2014
+ }
2015
+
2016
+ do {
2017
+ // Create AVAssetWriter
2018
+ let assetWriter = try AVAssetWriter(outputURL: tmpURL, fileType: .mov)
2019
+
2020
+ // Get video dimensions from the active video device format.
2021
+ // Camera delivers uncompressed BGRA pixel buffers from AVCaptureVideoDataOutput,
2022
+ // so we must encode them to H.264/HEVC (passthrough nil settings only works with
2023
+ // already-compressed samples).
2024
+ var videoWidth: Int = 1920
2025
+ var videoHeight: Int = 1080
2026
+ if let videoDevice = self.videoInput?.device {
2027
+ let dimensions = CMVideoFormatDescriptionGetDimensions(videoDevice.activeFormat.formatDescription)
2028
+ videoWidth = Int(dimensions.width)
2029
+ videoHeight = Int(dimensions.height)
2030
+ }
2031
+
2032
+ // Use HEVC for modern devices (iOS 11+)
2033
+ let videoSettings: [String: Any] = [
2034
+ AVVideoCodecKey: AVVideoCodecType.hevc,
2035
+ AVVideoWidthKey: videoWidth,
2036
+ AVVideoHeightKey: videoHeight,
2037
+ ]
2038
+
2039
+ let videoInput = AVAssetWriterInput(
2040
+ mediaType: .video,
2041
+ outputSettings: videoSettings
2042
+ )
2043
+ videoInput.expectsMediaDataInRealTime = true
2044
+
2045
+ // Set video orientation based on current physical device orientation.
2046
+ // The transform is locked at recording start (same as Apple Camera and Signal).
2047
+ let recordingOrientation = Zcam1MotionManager.shared.currentOrientation()
2048
+ let rotationAngle = videoWriterRotationAngle(
2049
+ for: recordingOrientation,
2050
+ position: position
2051
+ )
2052
+ videoInput.transform = CGAffineTransform(rotationAngle: rotationAngle)
2053
+ print("[Zcam1CameraService] video recording orientation: \(orientationToString(recordingOrientation)), position: \(position == .front ? "front" : "back"), angle: \(rotationAngle)")
2054
+
2055
+ guard assetWriter.canAdd(videoInput) else {
2056
+ throw NSError(
2057
+ domain: "Zcam1CameraService",
2058
+ code: -45,
2059
+ userInfo: [NSLocalizedDescriptionKey: "Cannot add video input to asset writer"]
2060
+ )
2061
+ }
2062
+ assetWriter.add(videoInput)
2063
+
2064
+ // Create pixel buffer adaptor for writing filtered frames.
2065
+ // This allows us to write CVPixelBuffer directly instead of CMSampleBuffer,
2066
+ // which is necessary when applying film style filters to video frames.
2067
+ let pixelBufferAttributes: [String: Any] = [
2068
+ kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,
2069
+ kCVPixelBufferWidthKey as String: videoWidth,
2070
+ kCVPixelBufferHeightKey as String: videoHeight,
2071
+ ]
2072
+ let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(
2073
+ assetWriterInput: videoInput,
2074
+ sourcePixelBufferAttributes: pixelBufferAttributes
2075
+ )
2076
+
2077
+ // Configure audio input with AAC encoding.
2078
+ // AVCaptureAudioDataOutput provides uncompressed LPCM samples, so we must
2079
+ // encode them (passthrough nil settings only works with already-compressed audio).
2080
+ var audioInput: AVAssetWriterInput?
2081
+ if hasAudio {
2082
+ let audioSettings: [String: Any] = [
2083
+ AVFormatIDKey: kAudioFormatMPEG4AAC,
2084
+ AVNumberOfChannelsKey: 2,
2085
+ AVSampleRateKey: 44100.0,
2086
+ AVEncoderBitRateKey: 128000,
2087
+ ]
2088
+ let input = AVAssetWriterInput(
2089
+ mediaType: .audio,
2090
+ outputSettings: audioSettings
2091
+ )
2092
+ input.expectsMediaDataInRealTime = true
2093
+
2094
+ if assetWriter.canAdd(input) {
2095
+ assetWriter.add(input)
2096
+ audioInput = input
2097
+ }
2098
+ }
2099
+
2100
+ // Start writing
2101
+ guard assetWriter.startWriting() else {
2102
+ throw assetWriter.error ?? NSError(
2103
+ domain: "Zcam1CameraService",
2104
+ code: -46,
2105
+ userInfo: [NSLocalizedDescriptionKey: "Failed to start asset writer"]
2106
+ )
2107
+ }
2108
+
2109
+ // Create recording state
2110
+ let state = AssetWriterRecordingState(
2111
+ assetWriter: assetWriter,
2112
+ videoInput: videoInput,
2113
+ audioInput: audioInput,
2114
+ pixelBufferAdaptor: pixelBufferAdaptor,
2115
+ outputURL: tmpURL
2116
+ )
2117
+
2118
+ // Mark recording as active on the writer queue
2119
+ state.writerQueue.sync {
2120
+ state.isRecording = true
2121
+ }
2122
+
2123
+ // Set recordingState with lock protection
2124
+ self.recordingStateLock.lock()
2125
+ self.recordingState = state
2126
+ self.recordingStateLock.unlock()
2127
+
2128
+ print("[Zcam1CameraService] AVAssetWriter recording started (no preview flash)")
2129
+
2130
+ // Schedule auto-stop if a duration cap was requested.
2131
+ self.autoStopWorkItem?.cancel()
2132
+ self.autoStopWorkItem = nil
2133
+ self.autoStopResult = nil
2134
+
2135
+ if maxDurationSeconds > 0 {
2136
+ let workItem = DispatchWorkItem { [weak self] in
2137
+ guard let self = self else { return }
2138
+
2139
+ // Verify recording is still active before auto-stopping.
2140
+ self.recordingStateLock.lock()
2141
+ let isActive = self.recordingState != nil
2142
+ self.recordingStateLock.unlock()
2143
+
2144
+ guard isActive else { return }
2145
+
2146
+ print("[Zcam1CameraService] Auto-stopping recording (maxDurationSeconds=\(maxDurationSeconds))")
2147
+ self.stopVideoRecording { result, error in
2148
+ if let result = result {
2149
+ // Cache the result so the next JS stopVideoRecording call can retrieve it.
2150
+ self.autoStopResult = result
2151
+ }
2152
+ }
2153
+ }
2154
+ self.autoStopWorkItem = workItem
2155
+ DispatchQueue.main.asyncAfter(
2156
+ deadline: .now() + maxDurationSeconds,
2157
+ execute: workItem
2158
+ )
2159
+ }
2160
+
2161
+ // Immediately return success - no waiting for iOS callbacks
2162
+ let result: [String: Any] = [
2163
+ "status": "recording",
2164
+ "filePath": tmpURL.path,
2165
+ "format": "mov",
2166
+ "hasAudio": hasAudio,
2167
+ ]
2168
+ DispatchQueue.main.async {
2169
+ completion(result as NSDictionary, nil)
2170
+ }
2171
+
2172
+ } catch {
2173
+ DispatchQueue.main.async {
2174
+ completion(nil, error as NSError)
2175
+ }
2176
+ }
2177
+ }
2178
+ }
2179
+ }
2180
+
2181
+ let hasMicUsageDescription =
2182
+ Bundle.main.object(forInfoDictionaryKey: "NSMicrophoneUsageDescription") != nil
2183
+
2184
+ // If the app didn't declare NSMicrophoneUsageDescription, do not request mic permission
2185
+ // (requesting would crash). Proceed with video-only recording.
2186
+ if !hasMicUsageDescription {
2187
+ startWithMicAuthorized(false)
2188
+ return
2189
+ }
2190
+
2191
+ self.ensureMicrophoneAuthorization { micAuthorized in
2192
+ startWithMicAuthorized(micAuthorized)
2193
+ }
2194
+ }
2195
+ }
2196
+
2197
+ /// Sets up audio input and output for recording. Called only when mic is authorized.
2198
+ /// Returns true if audio was successfully configured.
2199
+ /// Uses a single beginConfiguration/commitConfiguration block to minimize preview interruption.
2200
+ private func setupAudioForRecording(session: AVCaptureSession) -> Bool {
2201
+ let needsInput = self.audioInput == nil
2202
+ let needsOutput = !session.outputs.contains(self.audioDataOutput)
2203
+
2204
+ // If nothing to add, we're already configured
2205
+ if !needsInput && !needsOutput {
2206
+ return true
2207
+ }
2208
+
2209
+ // Prepare audio input outside the configuration block to minimize lock time
2210
+ var newAudioInput: AVCaptureDeviceInput?
2211
+ if needsInput {
2212
+ guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
2213
+ return false
2214
+ }
2215
+ do {
2216
+ newAudioInput = try AVCaptureDeviceInput(device: audioDevice)
2217
+ } catch {
2218
+ print("[Zcam1CameraService] Failed to create audio input: \(error)")
2219
+ return false
2220
+ }
2221
+ }
2222
+
2223
+ // Single configuration block for all changes
2224
+ session.beginConfiguration()
2225
+
2226
+ if let audioInput = newAudioInput, session.canAddInput(audioInput) {
2227
+ session.addInput(audioInput)
2228
+ self.audioInput = audioInput
2229
+ }
2230
+
2231
+ if needsOutput && session.canAddOutput(self.audioDataOutput) {
2232
+ session.addOutput(self.audioDataOutput)
2233
+ self.audioDataOutput.setSampleBufferDelegate(self, queue: self.audioDataQueue)
2234
+ }
2235
+
2236
+ session.commitConfiguration()
2237
+
2238
+ return self.audioInput != nil && session.outputs.contains(self.audioDataOutput)
2239
+ }
2240
+
2241
+ /// Prewarms audio configuration if mic permission is already granted.
2242
+ /// Call this after camera session is configured to avoid shutter on first recording.
2243
+ /// Must be called from sessionQueue.
2244
+ private func prewarmAudioIfAuthorized() {
2245
+ // Only prewarm if mic permission is already granted (don't trigger prompt)
2246
+ guard AVCaptureDevice.authorizationStatus(for: .audio) == .authorized else {
2247
+ return
2248
+ }
2249
+ guard let session = self.captureSession else {
2250
+ return
2251
+ }
2252
+
2253
+ if self.audioInput == nil || !session.outputs.contains(self.audioDataOutput) {
2254
+ print("[Zcam1CameraService] Prewarming audio configuration...")
2255
+ _ = self.setupAudioForRecording(session: session)
2256
+ }
2257
+ }
2258
+
2259
+ /// Stops an in-progress video recording and returns `{ filePath, format, durationSeconds? }`.
2260
+ ///
2261
+ /// If the recording was already auto-stopped (via `maxDurationSeconds`), the cached result
2262
+ /// is returned immediately instead of an error.
2263
+ public func stopVideoRecording(completion: @escaping (NSDictionary?, NSError?) -> Void) {
2264
+ // Cancel any pending auto-stop timer since we are stopping explicitly.
2265
+ self.autoStopWorkItem?.cancel()
2266
+ self.autoStopWorkItem = nil
2267
+
2268
+ sessionQueue.async {
2269
+ // Lock to safely read the recordingState reference.
2270
+ self.recordingStateLock.lock()
2271
+ let state = self.recordingState
2272
+ self.recordingStateLock.unlock()
2273
+
2274
+ guard let state = state else {
2275
+ // Recording state is nil. Check if the native auto-stop already ran and
2276
+ // cached the result — if so, return it instead of an error.
2277
+ if let cachedResult = self.autoStopResult {
2278
+ self.autoStopResult = nil
2279
+ DispatchQueue.main.async {
2280
+ completion(cachedResult, nil)
2281
+ }
2282
+ return
2283
+ }
2284
+
2285
+ let err = NSError(
2286
+ domain: "Zcam1CameraService",
2287
+ code: -44,
2288
+ userInfo: [NSLocalizedDescriptionKey: "No active video recording to stop"]
2289
+ )
2290
+ DispatchQueue.main.async {
2291
+ completion(nil, err)
2292
+ }
2293
+ return
2294
+ }
2295
+
2296
+ // All finalization must happen on the writer queue to prevent races with append
2297
+ state.writerQueue.async {
2298
+ // Check if still recording (might have been stopped already by auto-stop).
2299
+ guard state.isRecording else {
2300
+ // The auto-stop may have already finished and cached the result.
2301
+ if let cachedResult = self.autoStopResult {
2302
+ self.autoStopResult = nil
2303
+ DispatchQueue.main.async {
2304
+ completion(cachedResult, nil)
2305
+ }
2306
+ } else {
2307
+ DispatchQueue.main.async {
2308
+ completion(nil, NSError(
2309
+ domain: "Zcam1CameraService",
2310
+ code: -44,
2311
+ userInfo: [NSLocalizedDescriptionKey: "Recording already stopped"]
2312
+ ))
2313
+ }
2314
+ }
2315
+ return
2316
+ }
2317
+
2318
+ // Mark as not recording to stop accepting new samples
2319
+ state.isRecording = false
2320
+
2321
+ print("[Zcam1CameraService] Stopping AVAssetWriter recording...")
2322
+
2323
+ // Handle edge case: no frames were captured
2324
+ if !state.hasStartedSession {
2325
+ // Start a session at zero so we can finalize properly
2326
+ state.assetWriter.startSession(atSourceTime: .zero)
2327
+ state.hasStartedSession = true
2328
+ }
2329
+
2330
+ // Mark inputs as finished
2331
+ state.videoInput.markAsFinished()
2332
+ state.audioInput?.markAsFinished()
2333
+
2334
+ // Finalize the asset writer
2335
+ state.assetWriter.finishWriting {
2336
+ // Clear recording state with lock protection
2337
+ self.recordingStateLock.lock()
2338
+ self.recordingState = nil
2339
+ self.recordingStateLock.unlock()
2340
+
2341
+ if let error = state.assetWriter.error {
2342
+ print("[Zcam1CameraService] AVAssetWriter error: \(error)")
2343
+ DispatchQueue.main.async {
2344
+ completion(nil, error as NSError)
2345
+ }
2346
+ return
2347
+ }
2348
+
2349
+ print("[Zcam1CameraService] AVAssetWriter recording finished, frames: \(state.videoFrameCount), audio: \(state.audioSampleCount)")
2350
+
2351
+ // Build result with metadata derived from the actual recorded file
2352
+ self.buildVideoMetadata(from: state.outputURL, hasAudio: state.audioInput != nil) { result in
2353
+ DispatchQueue.main.async {
2354
+ completion(result as NSDictionary, nil)
2355
+ }
2356
+ }
2357
+ }
2358
+ }
2359
+ }
2360
+ }
2361
+
2362
+ /// Extracts metadata from a recorded video file.
2363
+ private func buildVideoMetadata(from url: URL, hasAudio: Bool, completion: @escaping ([String: Any]) -> Void) {
2364
+ var result: [String: Any] = [
2365
+ "filePath": url.path,
2366
+ "format": "mov",
2367
+ "hasAudio": hasAudio,
2368
+ "deviceMake": "Apple",
2369
+ "deviceModel": UIDevice.current.model,
2370
+ "softwareVersion": "\(UIDevice.current.systemName) \(UIDevice.current.systemVersion)",
2371
+ ]
2372
+
2373
+ // File size
2374
+ do {
2375
+ let attrs = try FileManager.default.attributesOfItem(atPath: url.path)
2376
+ if let size = attrs[.size] as? Int {
2377
+ result["fileSizeBytes"] = NSNumber(value: size)
2378
+ }
2379
+ } catch {
2380
+ // Best-effort
2381
+ }
2382
+
2383
+ let asset = AVURLAsset(url: url)
2384
+
2385
+ // Duration
2386
+ let seconds = CMTimeGetSeconds(asset.duration)
2387
+ if seconds.isFinite && !seconds.isNaN && seconds >= 0 {
2388
+ result["durationSeconds"] = seconds
2389
+ }
2390
+
2391
+ // Helper to convert FourCC to string
2392
+ func fourCCString(_ code: FourCharCode) -> String {
2393
+ let be = code.bigEndian
2394
+ let bytes: [UInt8] = [
2395
+ UInt8((be >> 24) & 0xff),
2396
+ UInt8((be >> 16) & 0xff),
2397
+ UInt8((be >> 8) & 0xff),
2398
+ UInt8(be & 0xff),
2399
+ ]
2400
+ if let s = String(bytes: bytes, encoding: .macOSRoman) {
2401
+ return s.trimmingCharacters(in: .controlCharacters)
2402
+ }
2403
+ return "\(code)"
2404
+ }
2405
+
2406
+ // Video track metadata
2407
+ if let videoTrack = asset.tracks(withMediaType: .video).first {
2408
+ // Dimensions corrected for transform
2409
+ let transformed = videoTrack.naturalSize.applying(videoTrack.preferredTransform)
2410
+ let width = abs(transformed.width)
2411
+ let height = abs(transformed.height)
2412
+ if width.isFinite && !width.isNaN && height.isFinite && !height.isNaN {
2413
+ result["width"] = Int(width.rounded())
2414
+ result["height"] = Int(height.rounded())
2415
+ }
2416
+
2417
+ // Frame rate
2418
+ let frameRate = videoTrack.nominalFrameRate
2419
+ if frameRate.isFinite && !frameRate.isNaN && frameRate > 0 {
2420
+ result["frameRate"] = Int(frameRate.rounded())
2421
+ }
2422
+
2423
+ // Rotation from transform
2424
+ let t = videoTrack.preferredTransform
2425
+ let epsilon: CGFloat = 0.001
2426
+ func approx(_ x: CGFloat, _ y: CGFloat) -> Bool { abs(x - y) < epsilon }
2427
+ if approx(t.a, 0), approx(t.b, 1), approx(t.c, -1), approx(t.d, 0) {
2428
+ result["rotationDegrees"] = 90
2429
+ } else if approx(t.a, 0), approx(t.b, -1), approx(t.c, 1), approx(t.d, 0) {
2430
+ result["rotationDegrees"] = 270
2431
+ } else if approx(t.a, -1), approx(t.b, 0), approx(t.c, 0), approx(t.d, -1) {
2432
+ result["rotationDegrees"] = 180
2433
+ } else {
2434
+ result["rotationDegrees"] = 0
2435
+ }
2436
+
2437
+ // Video codec
2438
+ if let formatDescAny = videoTrack.formatDescriptions.first {
2439
+ let formatDesc = formatDescAny as! CMFormatDescription
2440
+ result["videoCodec"] = fourCCString(CMFormatDescriptionGetMediaSubType(formatDesc))
2441
+ }
2442
+ }
2443
+
2444
+ // Audio track metadata
2445
+ if let audioTrack = asset.tracks(withMediaType: .audio).first {
2446
+ if let formatDescAny = audioTrack.formatDescriptions.first {
2447
+ let formatDesc = formatDescAny as! CMAudioFormatDescription
2448
+ result["audioCodec"] = fourCCString(CMFormatDescriptionGetMediaSubType(formatDesc))
2449
+ .trimmingCharacters(in: .whitespacesAndNewlines)
2450
+
2451
+ if let asbdPtr = CMAudioFormatDescriptionGetStreamBasicDescription(formatDesc) {
2452
+ let asbd = asbdPtr.pointee
2453
+ if asbd.mSampleRate > 0 {
2454
+ result["audioSampleRate"] = asbd.mSampleRate
2455
+ }
2456
+ if asbd.mChannelsPerFrame > 0 {
2457
+ result["audioChannels"] = Int(asbd.mChannelsPerFrame)
2458
+ }
2459
+ }
2460
+ }
2461
+ }
2462
+
2463
+ completion(result)
2464
+ }
2465
+
2466
+ // MARK: - Sample Buffer Writing for Recording
2467
+
2468
+ /// Called by the view when it receives a video sample buffer.
2469
+ /// If recording is active, writes the sample to the asset writer.
2470
+ /// Applies film style filters if a custom film style chain is active.
2471
+ /// Thread-safe: recordingStateLock guards reference access; writerQueue serializes writes.
2472
+ public func writeVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
2473
+ // Lock to safely read the recordingState reference.
2474
+ recordingStateLock.lock()
2475
+ let state = recordingState
2476
+ recordingStateLock.unlock()
2477
+ guard let state = state else { return }
2478
+
2479
+ // Capture film style chain outside the async block to avoid race conditions.
2480
+ let filmStyles = customFilmStyleChain
2481
+
2482
+ // All writer operations must be serialized on the writer queue.
2483
+ state.writerQueue.async {
2484
+ guard state.isRecording else { return }
2485
+
2486
+ let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
2487
+
2488
+ // Start the session on first video frame.
2489
+ if !state.hasStartedSession {
2490
+ state.assetWriter.startSession(atSourceTime: timestamp)
2491
+ state.hasStartedSession = true
2492
+ state.startTime = timestamp
2493
+ print("[Zcam1CameraService] Asset writer session started at \(timestamp.seconds)")
2494
+ }
2495
+
2496
+ // Write video frame if input is ready.
2497
+ guard state.videoInput.isReadyForMoreMediaData else { return }
2498
+
2499
+ // Check if we have a film style to apply.
2500
+ if let filmStyles = filmStyles, !filmStyles.isEmpty {
2501
+ // Apply film style filter to the pixel buffer.
2502
+ guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
2503
+ print("[Zcam1CameraService] Failed to get pixel buffer from sample buffer")
2504
+ return
2505
+ }
2506
+
2507
+ // Use .up orientation for video recording - the asset writer's transform handles rotation.
2508
+ // Film style filters only modify colors, not geometry.
2509
+ let orientation: UIImage.Orientation = .up
2510
+
2511
+ // Apply film style filter.
2512
+ if let filteredBuffer = Zcam1CameraFilmStyle.apply(
2513
+ filmStyles: filmStyles,
2514
+ to: pixelBuffer,
2515
+ orientation: orientation
2516
+ ) {
2517
+ // Write filtered pixel buffer via adaptor.
2518
+ if !state.pixelBufferAdaptor.append(filteredBuffer, withPresentationTime: timestamp) {
2519
+ if let error = state.assetWriter.error {
2520
+ print("[Zcam1CameraService] Filtered video append failed: \(error)")
2521
+ }
2522
+ } else {
2523
+ state.videoFrameCount += 1
2524
+ }
2525
+ } else {
2526
+ // Fallback: write original buffer if filtering fails.
2527
+ if !state.pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: timestamp) {
2528
+ if let error = state.assetWriter.error {
2529
+ print("[Zcam1CameraService] Video append failed: \(error)")
2530
+ }
2531
+ } else {
2532
+ state.videoFrameCount += 1
2533
+ }
2534
+ }
2535
+ } else {
2536
+ // No film style - write original sample buffer directly.
2537
+ if !state.videoInput.append(sampleBuffer) {
2538
+ if let error = state.assetWriter.error {
2539
+ print("[Zcam1CameraService] Video append failed: \(error)")
2540
+ }
2541
+ } else {
2542
+ state.videoFrameCount += 1
2543
+ }
2544
+ }
2545
+ }
2546
+ }
2547
+
2548
+ // MARK: - AVCaptureAudioDataOutputSampleBufferDelegate
2549
+
2550
+ public func captureOutput(
2551
+ _ output: AVCaptureOutput,
2552
+ didOutput sampleBuffer: CMSampleBuffer,
2553
+ from connection: AVCaptureConnection
2554
+ ) {
2555
+ // Only handle audio data output
2556
+ guard output === audioDataOutput else { return }
2557
+
2558
+ // Lock to safely read the recordingState reference
2559
+ recordingStateLock.lock()
2560
+ let state = recordingState
2561
+ recordingStateLock.unlock()
2562
+ guard let state = state else { return }
2563
+
2564
+ // All writer operations must be serialized on the writer queue
2565
+ state.writerQueue.async {
2566
+ guard state.isRecording, state.hasStartedSession else { return }
2567
+
2568
+ // Write audio sample if input is ready
2569
+ if let audioInput = state.audioInput, audioInput.isReadyForMoreMediaData {
2570
+ if !audioInput.append(sampleBuffer) {
2571
+ if let error = state.assetWriter.error {
2572
+ print("[Zcam1CameraService] Audio append failed: \(error)")
2573
+ }
2574
+ } else {
2575
+ state.audioSampleCount += 1
2576
+ }
2577
+ }
2578
+ }
2579
+ }
2580
+
2581
+ /// Check if the device supports depth data capture and return available formats.
2582
+ public func getDepthSensorInfo(completion: @escaping (NSDictionary?, NSError?) -> Void) {
2583
+ // Check if current device supports depth data capture
2584
+ // Devices with dual/triple cameras typically support depth
2585
+ let deviceTypes: [AVCaptureDevice.DeviceType] = [
2586
+ .builtInTripleCamera,
2587
+ .builtInDualWideCamera,
2588
+ .builtInDualCamera,
2589
+ ]
2590
+
2591
+ let discoverySession = AVCaptureDevice.DiscoverySession(
2592
+ deviceTypes: deviceTypes,
2593
+ mediaType: .video,
2594
+ position: .back
2595
+ )
2596
+
2597
+ let supportsDepth = !discoverySession.devices.isEmpty
2598
+
2599
+ var formats: [String] = []
2600
+ if supportsDepth {
2601
+ // List all potentially available depth formats
2602
+ formats = [
2603
+ "depthFloat32",
2604
+ "depthFloat16",
2605
+ "disparityFloat32",
2606
+ "disparityFloat16",
2607
+ ]
2608
+ }
2609
+
2610
+ let result: [String: Any] = [
2611
+ "available": supportsDepth,
2612
+ "formats": formats,
2613
+ ]
2614
+
2615
+ DispatchQueue.main.async {
2616
+ completion(result as NSDictionary, nil)
2617
+ }
2618
+ }
2619
+
2620
+ }
2621
+ // Capture delegate implementation moved into the internal PhotoCaptureDelegate helper.
2622
+
2623
+ // MARK: - Camera Preview View
2624
+
2625
+ /// UIView subclass that displays the live camera preview.
2626
+ ///
2627
+ /// NEW ARCHITECTURE: Always uses AVCaptureVideoDataOutput for preview rendering.
2628
+ /// This eliminates the complexity of toggling between preview layer and film style image view.
2629
+ /// All frames go through the same pipeline - film style is applied when needed.
2630
+ ///
2631
+ /// This view is intended to be wrapped by a React Native view manager
2632
+ /// and controlled via props such as `isActive` and `position`.
2633
+ @available(iOS 16.0, *)
2634
+ @objc(Zcam1CameraView)
2635
+ @objcMembers
2636
+ public final class Zcam1CameraView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate {
2637
+
2638
+ // Exposed properties (KVC/KVO friendly for RN)
2639
+ public var isActive: Bool = true {
2640
+ didSet {
2641
+ updateRunningState()
2642
+ }
2643
+ }
2644
+
2645
+ /// "front" or "back"
2646
+ public var position: String = "back" {
2647
+ didSet {
2648
+ guard oldValue != position else { return }
2649
+ // Set flag first to stop accepting frames, then clear the preview.
2650
+ isReconfiguring = true
2651
+ previewImageView.image = nil
2652
+ reconfigureSession()
2653
+ }
2654
+ }
2655
+
2656
+ /// "jpeg" or "dng" (controls what JS will request on capture)
2657
+ public var captureFormat: String = "jpeg"
2658
+
2659
+ /// Zoom factor (1.0 = no zoom, 2.0 = 2x, etc.)
2660
+ /// Prop-driven zoom changes are instant (used by slider and button taps).
2661
+ /// For smooth pinch-to-zoom, use setZoomAnimated via the TurboModule instead.
2662
+ public var zoom: CGFloat = 1.0 {
2663
+ didSet {
2664
+ Zcam1CameraService.shared.setZoom(zoom)
2665
+ }
2666
+ }
2667
+
2668
+ /// Whether torch (flashlight) is enabled during preview.
2669
+ public var torch: Bool = false {
2670
+ didSet {
2671
+ Zcam1CameraService.shared.setTorch(torch)
2672
+ }
2673
+ }
2674
+
2675
+ /// Exposure compensation in EV units.
2676
+ public var exposure: Float = 0.0 {
2677
+ didSet {
2678
+ Zcam1CameraService.shared.setExposureCompensation(exposure)
2679
+ }
2680
+ }
2681
+
2682
+ /// Film style preset name ("normal", "mellow", "bw", "nostalgic") or custom film style name.
2683
+ public var filmStyle: String = "normal" {
2684
+ didSet {
2685
+ guard oldValue != filmStyle else { return }
2686
+ print("[Zcam1CameraView] Film style changed: \(oldValue) -> \(filmStyle)")
2687
+ applyCurrentFilmStyle()
2688
+ }
2689
+ }
2690
+
2691
+ /// Custom film style recipe overrides for built-in presets.
2692
+ /// Keys are preset names, values are arrays of film style effect dictionaries.
2693
+ @objc public var filmStyleOverrides: NSDictionary? {
2694
+ didSet {
2695
+ print("[Zcam1CameraView] filmStyleOverrides updated")
2696
+ applyCurrentFilmStyle()
2697
+ }
2698
+ }
2699
+
2700
+ /// Additional custom film styles defined by name.
2701
+ /// Keys are custom film style names, values are arrays of film style effect dictionaries.
2702
+ @objc public var customFilmStyles: NSDictionary? {
2703
+ didSet {
2704
+ print("[Zcam1CameraView] customFilmStyles updated")
2705
+ applyCurrentFilmStyle()
2706
+ }
2707
+ }
2708
+
2709
+ /// Enable depth data capture at session level.
2710
+ /// When true, depth data can be captured but zoom may be restricted on dual-camera devices.
2711
+ /// When false (default), full zoom range is available.
2712
+ public var depthEnabled: Bool = false {
2713
+ didSet {
2714
+ guard oldValue != depthEnabled else { return }
2715
+ print("[Zcam1CameraView] depthEnabled changed: \(oldValue) -> \(depthEnabled)")
2716
+ // Reconfigure session to apply the new depth setting.
2717
+ isReconfiguring = true
2718
+ reconfigureSession()
2719
+ }
2720
+ }
2721
+
2722
+ /// Callback fired when device physical orientation changes.
2723
+ /// Sends a dictionary with "orientation" key ("portrait", "landscapeLeft", "landscapeRight", "portraitUpsideDown").
2724
+ public var onOrientationChange: (([String: Any]) -> Void)?
2725
+
2726
+ /// Token for this view's motion manager listener, used for cleanup in deinit.
2727
+ private var orientationListenerToken: Int?
2728
+
2729
+ // Preview rendering - single UIImageView for all frames (filtered or not)
2730
+ private let previewImageView: UIImageView = {
2731
+ let iv = UIImageView()
2732
+ iv.contentMode = .scaleAspectFill
2733
+ iv.clipsToBounds = true
2734
+ return iv
2735
+ }()
2736
+
2737
+ // Video processing
2738
+ private var videoDataOutput: AVCaptureVideoDataOutput?
2739
+ private let videoDataQueue = DispatchQueue(label: "com.zcam1.videodata", qos: .userInteractive)
2740
+ private var currentFilmStyleEnum: Zcam1CameraFilmStyle = .normal
2741
+ private var currentCustomFilmStyles: [C7FilterProtocol]?
2742
+ private let ciContext = CIContext(options: [.useSoftwareRenderer: false])
2743
+ private var frameCount: Int = 0
2744
+
2745
+ // Flag to skip frames during camera reconfiguration to avoid showing incorrectly mirrored frames.
2746
+ private var isReconfiguring: Bool = false
2747
+
2748
+ public override init(frame: CGRect) {
2749
+ super.init(frame: frame)
2750
+ commonInit()
2751
+ }
2752
+
2753
+ public required init?(coder: NSCoder) {
2754
+ super.init(coder: coder)
2755
+ commonInit()
2756
+ }
2757
+
2758
+ private func commonInit() {
2759
+ backgroundColor = .black
2760
+
2761
+ // Add preview image view as the only preview mechanism.
2762
+ previewImageView.frame = bounds
2763
+ previewImageView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
2764
+ addSubview(previewImageView)
2765
+
2766
+ // Register for orientation change events from the motion manager.
2767
+ orientationListenerToken = Zcam1MotionManager.shared.addListener { [weak self] orientation in
2768
+ guard let self = self, let callback = self.onOrientationChange else { return }
2769
+ callback(["orientation": orientationToString(orientation)])
2770
+ }
2771
+
2772
+ // Configure session and start receiving frames.
2773
+ reconfigureSession()
2774
+ }
2775
+
2776
+ deinit {
2777
+ // Remove only this view's listener from the motion manager.
2778
+ if let token = orientationListenerToken {
2779
+ Zcam1MotionManager.shared.removeListener(token)
2780
+ }
2781
+ }
2782
+
2783
+ public override func layoutSubviews() {
2784
+ super.layoutSubviews()
2785
+ previewImageView.frame = bounds
2786
+ }
2787
+
2788
+ // MARK: - Film Style Resolution
2789
+
2790
+ /// Resolves and applies the current film style, checking overrides and custom film styles first.
2791
+ private func applyCurrentFilmStyle() {
2792
+ // Check filmStyleOverrides first.
2793
+ if let overrides = filmStyleOverrides as? [String: [[String: Any]]],
2794
+ let recipe = overrides[filmStyle] {
2795
+ print("[Zcam1CameraView] Using film style override for '\(filmStyle)'")
2796
+ let filmStyles = Zcam1CameraFilmStyle.createFilmStyles(from: recipe)
2797
+ currentCustomFilmStyles = filmStyles
2798
+ currentFilmStyleEnum = .normal
2799
+ Zcam1CameraService.shared.setCustomFilmStyles(filmStyles)
2800
+ return
2801
+ }
2802
+
2803
+ // Check customFilmStyles next.
2804
+ if let custom = customFilmStyles as? [String: [[String: Any]]],
2805
+ let recipe = custom[filmStyle] {
2806
+ print("[Zcam1CameraView] Using custom film style '\(filmStyle)'")
2807
+ let filmStyles = Zcam1CameraFilmStyle.createFilmStyles(from: recipe)
2808
+ currentCustomFilmStyles = filmStyles
2809
+ currentFilmStyleEnum = .normal
2810
+ Zcam1CameraService.shared.setCustomFilmStyles(filmStyles)
2811
+ return
2812
+ }
2813
+
2814
+ // Fall back to no film style (JS SDK provides all built-in recipes via filmStyleOverrides).
2815
+ currentCustomFilmStyles = nil
2816
+ currentFilmStyleEnum = .normal
2817
+ Zcam1CameraService.shared.setFilmStyle(.normal)
2818
+ }
2819
+
2820
+ // MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
2821
+
2822
+ public func captureOutput(
2823
+ _ output: AVCaptureOutput,
2824
+ didOutput sampleBuffer: CMSampleBuffer,
2825
+ from connection: AVCaptureConnection
2826
+ ) {
2827
+ // Skip frames during reconfiguration to avoid showing incorrectly mirrored frames.
2828
+ if isReconfiguring {
2829
+ return
2830
+ }
2831
+
2832
+ // Forward sample buffer to service for recording (if active).
2833
+ // This enables AVAssetWriter-based recording without any preview flash.
2834
+ Zcam1CameraService.shared.writeVideoSampleBuffer(sampleBuffer)
2835
+
2836
+ frameCount += 1
2837
+ if frameCount == 1 {
2838
+ print("[Zcam1CameraView] FIRST FRAME! filmStyle=\(currentFilmStyleEnum)")
2839
+ } else if frameCount % 60 == 0 {
2840
+ print("[Zcam1CameraView] frame \(frameCount), filmStyle=\(currentFilmStyleEnum)")
2841
+ }
2842
+
2843
+ guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
2844
+ return
2845
+ }
2846
+
2847
+ // Convert pixel buffer to UIImage.
2848
+ let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
2849
+ guard let cgImage = ciContext.createCGImage(ciImage, from: ciImage.extent) else {
2850
+ return
2851
+ }
2852
+
2853
+ // Create UIImage with fixed portrait orientation for display.
2854
+ // The app UI is portrait-locked, so the preview frame is always taller than wide.
2855
+ // Camera sensor buffers always arrive in landscape (native sensor orientation),
2856
+ // so we rotate to fit the portrait frame.
2857
+ //
2858
+ // Back camera: .right (90° CW) maps the landscape-right sensor to portrait.
2859
+ // Front camera: .left (90° CCW) because the mirrored pixel buffer from
2860
+ // isVideoMirrored on the AVCaptureConnection changes the effective sensor
2861
+ // orientation. Using .right would display upside-down; .rightMirrored would
2862
+ // fix orientation but cancel out the mirror. .left gives correct orientation
2863
+ // while preserving the connection-level mirror for a natural selfie view.
2864
+ let isFront = position.lowercased() == "front"
2865
+ let imageOrientation: UIImage.Orientation = isFront ? .left : .right
2866
+
2867
+ var displayImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: imageOrientation)
2868
+
2869
+ // Apply film style if needed.
2870
+ if let customFilmStyles = currentCustomFilmStyles {
2871
+ displayImage = Zcam1CameraFilmStyle.apply(filmStyles: customFilmStyles, to: displayImage)
2872
+ }
2873
+
2874
+ // Update UI on main thread, but double-check we're not reconfiguring.
2875
+ DispatchQueue.main.async { [weak self] in
2876
+ guard let self = self, !self.isReconfiguring else { return }
2877
+ self.previewImageView.image = displayImage
2878
+ }
2879
+ }
2880
+
2881
+ public func captureOutput(
2882
+ _ output: AVCaptureOutput,
2883
+ didDrop sampleBuffer: CMSampleBuffer,
2884
+ from connection: AVCaptureConnection
2885
+ ) {
2886
+ // Log dropped frames for debugging.
2887
+ print("[Zcam1CameraView] DROPPED frame")
2888
+ }
2889
+
2890
+ // MARK: - Session Configuration
2891
+
2892
+ private func reconfigureSession() {
2893
+ let svc = Zcam1CameraService.shared
2894
+ let positionEnum: AVCaptureDevice.Position =
2895
+ position.lowercased() == "front" ? .front : .back
2896
+
2897
+ svc.configureSessionIfNeeded(position: positionEnum, depthEnabled: depthEnabled) { [weak self] error in
2898
+ guard let self = self, error == nil else { return }
2899
+
2900
+ // Apply camera settings.
2901
+ self.applyCurrentSettings()
2902
+
2903
+ // Setup video data output for frame capture.
2904
+ self.setupVideoDataOutput()
2905
+
2906
+ // Start the session.
2907
+ self.updateRunningState()
2908
+ }
2909
+ }
2910
+
2911
+ private func setupVideoDataOutput() {
2912
+ Zcam1CameraService.shared.configureVideoDataOutput(
2913
+ delegate: self,
2914
+ callbackQueue: videoDataQueue
2915
+ ) { [weak self] output in
2916
+ guard let self = self else { return }
2917
+ if let output = output {
2918
+ self.videoDataOutput = output
2919
+ // Clear the reconfiguring flag now that the connection is properly configured.
2920
+ self.isReconfiguring = false
2921
+ print("[Zcam1CameraView] Video data output ready, reconfiguring=false")
2922
+ } else {
2923
+ // Clear flag even on error to avoid permanently blocking frames.
2924
+ self.isReconfiguring = false
2925
+ print("[Zcam1CameraView] ERROR: Failed to setup video data output")
2926
+ }
2927
+ }
2928
+ }
2929
+
2930
+ private func applyCurrentSettings() {
2931
+ let svc = Zcam1CameraService.shared
2932
+ svc.setZoom(zoom)
2933
+ svc.setTorch(torch)
2934
+ svc.setExposureCompensation(exposure)
2935
+ }
2936
+
2937
+ private func updateRunningState() {
2938
+ let svc = Zcam1CameraService.shared
2939
+ if isActive {
2940
+ svc.startRunning()
2941
+ } else {
2942
+ svc.stopRunning()
2943
+ }
2944
+ }
2945
+ }