@succinctlabs/react-native-zcam1 0.4.0-alpha.5 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/Zcam1Sdk.podspec +3 -2
  2. package/android/src/main/java/com/succinctlabs/zcam1sdk/camera/Zcam1CameraService.kt +3 -3
  3. package/ios/Zcam1Camera.swift +177 -9
  4. package/ios/Zcam1CameraFilmStyle.swift +18 -2
  5. package/ios/Zcam1CameraViewManager.m +4 -0
  6. package/ios/Zcam1DepthData.swift +219 -286
  7. package/lib/module/NativeZcam1Capture.js.map +1 -1
  8. package/lib/module/camera.js +49 -4
  9. package/lib/module/camera.js.map +1 -1
  10. package/lib/module/capture.js +57 -54
  11. package/lib/module/capture.js.map +1 -1
  12. package/lib/module/generated/zcam1_verify_utils.js +74 -5
  13. package/lib/module/generated/zcam1_verify_utils.js.map +1 -1
  14. package/lib/module/index.js.map +1 -1
  15. package/lib/module/picker.js +3 -2
  16. package/lib/module/picker.js.map +1 -1
  17. package/lib/module/verify.js +1 -0
  18. package/lib/module/verify.js.map +1 -1
  19. package/lib/typescript/src/NativeZcam1Capture.d.ts +10 -0
  20. package/lib/typescript/src/NativeZcam1Capture.d.ts.map +1 -1
  21. package/lib/typescript/src/camera.d.ts +21 -0
  22. package/lib/typescript/src/camera.d.ts.map +1 -1
  23. package/lib/typescript/src/capture.d.ts +9 -3
  24. package/lib/typescript/src/capture.d.ts.map +1 -1
  25. package/lib/typescript/src/generated/zcam1_verify_utils.d.ts +127 -4
  26. package/lib/typescript/src/generated/zcam1_verify_utils.d.ts.map +1 -1
  27. package/lib/typescript/src/index.d.ts +2 -2
  28. package/lib/typescript/src/index.d.ts.map +1 -1
  29. package/lib/typescript/src/picker.d.ts.map +1 -1
  30. package/lib/typescript/src/verify.d.ts +1 -0
  31. package/lib/typescript/src/verify.d.ts.map +1 -1
  32. package/package.json +1 -1
  33. package/src/NativeZcam1Capture.ts +12 -0
  34. package/src/camera.tsx +75 -1
  35. package/src/capture.tsx +81 -67
  36. package/src/generated/zcam1_verify_utils.ts +86 -5
  37. package/src/index.ts +8 -1
  38. package/src/picker.tsx +3 -2
  39. package/src/verify.tsx +1 -0
package/Zcam1Sdk.podspec CHANGED
@@ -61,7 +61,8 @@ Pod::Spec.new do |s|
61
61
  source_files += [
62
62
  "ios/proving/*.{h,m,mm,swift}",
63
63
  "cpp/proving/*.{hpp,cpp,c,h}",
64
- "cpp/proving/generated/*.{hpp,cpp,c,h}",
64
+ "cpp/proving/generated/zcam1_proving_utils.cpp",
65
+ "cpp/proving/generated/zcam1_proving_utils.hpp",
65
66
  ]
66
67
  public_header_files += ["ios/proving/*.h"]
67
68
 
@@ -124,7 +125,7 @@ Pod::Spec.new do |s|
124
125
 
125
126
  s.frameworks = ["QuickLook"]
126
127
  s.vendored_frameworks = vendored_frameworks
127
- s.dependency "uniffi-bindgen-react-native", "0.29.3-1"
128
+ s.dependency "uniffi-bindgen-react-native", "0.30.0-1"
128
129
 
129
130
  # Harbeth: GPU-accelerated image/video/camera filter library.
130
131
  s.dependency "Harbeth", "~> 1.1"
@@ -486,7 +486,7 @@ class Zcam1CameraService {
486
486
  val startResult = WritableNativeMap().apply {
487
487
  putString("status", "recording")
488
488
  putString("filePath", outputFile.absolutePath)
489
- putString("format", "mov")
489
+ putString("format", "mp4")
490
490
  putBoolean("hasAudio", hasAudio)
491
491
  }
492
492
  promise.resolve(startResult)
@@ -518,7 +518,7 @@ class Zcam1CameraService {
518
518
 
519
519
  return WritableNativeMap().apply {
520
520
  putString("filePath", outputFile.absolutePath)
521
- putString("format", "mov")
521
+ putString("format", "mp4")
522
522
  putBoolean("hasAudio", hasAudio)
523
523
  putString("deviceMake", Build.MANUFACTURER)
524
524
  putString("deviceModel", Build.MODEL)
@@ -534,7 +534,7 @@ class Zcam1CameraService {
534
534
  Log.e(TAG, "Failed to extract video metadata", e)
535
535
  return WritableNativeMap().apply {
536
536
  putString("filePath", outputFile.absolutePath)
537
- putString("format", "mov")
537
+ putString("format", "mp4")
538
538
  putBoolean("hasAudio", hasAudio)
539
539
  putString("deviceMake", Build.MANUFACTURER)
540
540
  putString("deviceModel", Build.MODEL)
@@ -6,14 +6,18 @@
6
6
  //
7
7
 
8
8
  import AVFoundation
9
+ import AVKit
9
10
  import CoreMotion
10
11
  import Foundation
11
12
  import Harbeth
12
13
  import ImageIO
13
14
  import MetalKit
14
15
  import MobileCoreServices
16
+ import os.log
15
17
  import UIKit
16
18
 
19
+ private let zcam1Log = OSLog(subsystem: "com.succinct.zcam1", category: "Camera")
20
+
17
21
  // MARK: - Motion Manager (Singleton for orientation detection)
18
22
 
19
23
  /// Singleton motion manager that provides non-blocking 4-way orientation detection.
@@ -223,6 +227,25 @@ final class Zcam1MotionManager {
223
227
  }
224
228
  }
225
229
 
230
+ // MARK: - UIImage.Orientation from EXIF
231
+
232
+ extension UIImage.Orientation {
233
+ /// Convert a CGImagePropertyOrientation integer (EXIF 1-8) to UIImage.Orientation.
234
+ static func from(cgImageOrientation value: Int) -> UIImage.Orientation {
235
+ switch value {
236
+ case 1: return .up
237
+ case 2: return .upMirrored
238
+ case 3: return .down
239
+ case 4: return .downMirrored
240
+ case 5: return .leftMirrored
241
+ case 6: return .right
242
+ case 7: return .rightMirrored
243
+ case 8: return .left
244
+ default: return .up
245
+ }
246
+ }
247
+ }
248
+
226
249
  // MARK: - Orientation Helpers
227
250
 
228
251
  /// Convert AVCaptureVideoOrientation to a JS-friendly string.
@@ -368,6 +391,9 @@ private final class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegat
368
391
  "[PhotoCaptureDelegate] extracting depthData (includeDepthData=\(includeDepthData))...")
369
392
  let depthDataSnapshot: AVDepthData? = includeDepthData ? photo.depthData : nil
370
393
  print("[PhotoCaptureDelegate] depthData present: \(depthDataSnapshot != nil)")
394
+ os_log(.info, log: zcam1Log, "DEPTH_DIAG capture result: includeDepthData=%{public}@, photo.depthData=%{public}@",
395
+ String(describing: includeDepthData),
396
+ depthDataSnapshot != nil ? "PRESENT" : "NIL")
371
397
 
372
398
  // Process synchronously on the current queue to avoid closure capture issues.
373
399
  // The AVCapturePhotoOutput callback queue can handle this work.
@@ -427,10 +453,32 @@ private final class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegat
427
453
 
428
454
  // Extract depth data only when requested (and when available).
429
455
  var depthData: [String: Any]? = nil
456
+ var depthHeatMapPath: String? = nil
457
+ var depthRawHash: String? = nil
430
458
  if self.includeDepthData, let depthDataSnapshot = depthDataSnapshot {
431
459
  print("[PhotoCaptureDelegate] processing depth data...")
432
460
  depthData = Zcam1DepthDataProcessor.processDepthData(depthDataSnapshot)
433
461
  print("[PhotoCaptureDelegate] depth data processed")
462
+
463
+ // Generate depth heat map (Turbo-colorized JPEG at native depth resolution).
464
+ // Pass photo orientation so heatmap matches the photo's display rotation.
465
+ let exifOrientation = metadata[kCGImagePropertyOrientation as String] as? Int ?? 1
466
+ let photoOrientation = UIImage.Orientation.from(cgImageOrientation: exifOrientation)
467
+ print("[PhotoCaptureDelegate] generating depth heat map (exifOrientation=\(exifOrientation))...")
468
+ if let heatMapResult = Zcam1DepthDataProcessor.generateHeatMap(from: depthDataSnapshot, photoOrientation: photoOrientation) {
469
+ let heatMapFilename = "zcam1-depth-\(UUID().uuidString).jpg"
470
+ let heatMapURL = FileManager.default.temporaryDirectory.appendingPathComponent(heatMapFilename)
471
+ do {
472
+ try heatMapResult.jpegData.write(to: heatMapURL, options: [.atomic])
473
+ depthHeatMapPath = heatMapURL.path
474
+ depthRawHash = heatMapResult.rawHash
475
+ print("[PhotoCaptureDelegate] depth heat map written: \(heatMapResult.jpegData.count) bytes")
476
+ } catch {
477
+ print("[PhotoCaptureDelegate] WARNING: failed to write depth heat map: \(error)")
478
+ }
479
+ } else {
480
+ print("[PhotoCaptureDelegate] depth heat map generation returned nil (no valid depth pixels)")
481
+ }
434
482
  }
435
483
 
436
484
  var result: [String: Any] = [
@@ -444,6 +492,23 @@ private final class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegat
444
492
  result["depthData"] = depthData
445
493
  }
446
494
 
495
+ // Include depth heat map path and raw hash if generated.
496
+ if let depthHeatMapPath = depthHeatMapPath {
497
+ result["depthHeatMapPath"] = depthHeatMapPath
498
+ }
499
+ if let depthRawHash = depthRawHash {
500
+ result["depthRawHash"] = depthRawHash
501
+ }
502
+
503
+ // Depth diagnostics for troubleshooting (visible in JS console).
504
+ if let owner = self.owner {
505
+ var diag = owner.depthDiagnostics()
506
+ diag["includeDepthData"] = self.includeDepthData
507
+ diag["photoDepthDataPresent"] = depthDataSnapshot != nil
508
+ diag["depthHeatMapGenerated"] = depthHeatMapPath != nil
509
+ result["_depthDiag"] = diag
510
+ }
511
+
447
512
  print("[PhotoCaptureDelegate] calling completion on main thread...")
448
513
  DispatchQueue.main.async { [self] in
449
514
  self.callCompletion(result: result as NSDictionary, error: nil)
@@ -836,9 +901,13 @@ public final class Zcam1CameraService: NSObject, AVCaptureAudioDataOutputSampleB
836
901
  if connection.isVideoMirroringSupported {
837
902
  connection.isVideoMirrored = (self.currentPosition == .front)
838
903
  }
904
+ // Enable video stabilization to reduce hand-shake in both preview and recording.
905
+ if connection.isVideoStabilizationSupported {
906
+ connection.preferredVideoStabilizationMode = .standard
907
+ }
839
908
  session.commitConfiguration()
840
909
  print(
841
- "[Zcam1CameraService] configureVideoDataOutput: reconfigured connection for position=\(self.currentPosition == .front ? "front" : "back"), mirrored=\(connection.isVideoMirrored)"
910
+ "[Zcam1CameraService] configureVideoDataOutput: reconfigured connection for position=\(self.currentPosition == .front ? "front" : "back"), mirrored=\(connection.isVideoMirrored), stabilization=\(connection.activeVideoStabilizationMode.rawValue)"
842
911
  )
843
912
  }
844
913
 
@@ -875,8 +944,12 @@ public final class Zcam1CameraService: NSObject, AVCaptureAudioDataOutputSampleB
875
944
  if connection.isVideoMirroringSupported {
876
945
  connection.isVideoMirrored = (self.currentPosition == .front)
877
946
  }
947
+ // Enable video stabilization to reduce hand-shake in both preview and recording.
948
+ if connection.isVideoStabilizationSupported {
949
+ connection.preferredVideoStabilizationMode = .standard
950
+ }
878
951
  print(
879
- "[Zcam1CameraService] configureVideoDataOutput: connection configured, isActive=\(connection.isActive), isEnabled=\(connection.isEnabled), mirrored=\(connection.isVideoMirrored)"
952
+ "[Zcam1CameraService] configureVideoDataOutput: connection configured, isActive=\(connection.isActive), isEnabled=\(connection.isEnabled), mirrored=\(connection.isVideoMirrored), stabilization=\(connection.activeVideoStabilizationMode.rawValue)"
880
953
  )
881
954
  } else {
882
955
  print(
@@ -1138,11 +1211,16 @@ public final class Zcam1CameraService: NSObject, AVCaptureAudioDataOutputSampleB
1138
1211
  // Mirror front camera photos to match the preview (native iOS selfie behavior).
1139
1212
  // The photo output has a separate AVCaptureConnection from the video data output,
1140
1213
  // so mirroring must be configured independently on each.
1141
- if let photoConnection = self.photoOutput.connection(with: .video),
1142
- photoConnection.isVideoMirroringSupported {
1143
- photoConnection.automaticallyAdjustsVideoMirroring = false
1144
- photoConnection.isVideoMirrored = (position == .front)
1145
- print("[Zcam1CameraService] photo output mirrored=\(photoConnection.isVideoMirrored) for position=\(position == .front ? "front" : "back")")
1214
+ if let photoConnection = self.photoOutput.connection(with: .video) {
1215
+ if photoConnection.isVideoMirroringSupported {
1216
+ photoConnection.automaticallyAdjustsVideoMirroring = false
1217
+ photoConnection.isVideoMirrored = (position == .front)
1218
+ }
1219
+ // Enable video stabilization on the photo output connection.
1220
+ if photoConnection.isVideoStabilizationSupported {
1221
+ photoConnection.preferredVideoStabilizationMode = .standard
1222
+ }
1223
+ print("[Zcam1CameraService] photo output mirrored=\(photoConnection.isVideoMirrored) for position=\(position == .front ? "front" : "back"), stabilization=\(photoConnection.activeVideoStabilizationMode.rawValue)")
1146
1224
  }
1147
1225
 
1148
1226
  // Audio input/output setup is deferred until recording starts.
@@ -1167,7 +1245,12 @@ public final class Zcam1CameraService: NSObject, AVCaptureAudioDataOutputSampleB
1167
1245
  // - Enable at the output level based on the depthEnabled parameter.
1168
1246
  // - When enabled, prewarm the pipeline via setPreparedPhotoSettingsArray.
1169
1247
  // - Note: Enabling depth restricts zoom on dual-camera devices.
1170
- if self.photoOutput.isDepthDataDeliverySupported {
1248
+ let depthSupported = self.photoOutput.isDepthDataDeliverySupported
1249
+ os_log(.info, log: zcam1Log, "DEPTH_DIAG session config: depthEnabled=%{public}@, isDepthDataDeliverySupported=%{public}@, preset=%{public}@, device=%{public}@",
1250
+ String(describing: depthEnabled), String(describing: depthSupported),
1251
+ session.sessionPreset.rawValue,
1252
+ self.videoInput?.device.localizedName ?? "nil")
1253
+ if depthSupported {
1171
1254
  self.photoOutput.isDepthDataDeliveryEnabled = depthEnabled
1172
1255
  self.depthEnabledAtSessionLevel = depthEnabled
1173
1256
  } else {
@@ -1347,6 +1430,18 @@ public final class Zcam1CameraService: NSObject, AVCaptureAudioDataOutputSampleB
1347
1430
  return photoOutput.isDepthDataDeliverySupported
1348
1431
  }
1349
1432
 
1433
+ /// Returns depth diagnostic info for troubleshooting.
1434
+ public func depthDiagnostics() -> [String: Any] {
1435
+ return [
1436
+ "depthEnabledAtSessionLevel": depthEnabledAtSessionLevel,
1437
+ "isDepthDataDeliverySupported": photoOutput.isDepthDataDeliverySupported,
1438
+ "isDepthDataDeliveryEnabled": photoOutput.isDepthDataDeliveryEnabled,
1439
+ "deviceType": videoInput?.device.deviceType.rawValue ?? "unknown",
1440
+ "deviceName": videoInput?.device.localizedName ?? "unknown",
1441
+ "sessionPreset": captureSession?.sessionPreset.rawValue ?? "none",
1442
+ ]
1443
+ }
1444
+
1350
1445
  /// Check if enabling depth would restrict zoom on this device.
1351
1446
  /// Returns true if zoom is limited to discrete levels (min == max in all ranges).
1352
1447
  /// This typically happens on dual-camera devices (iPhone 12-16 base).
@@ -1848,6 +1943,10 @@ public final class Zcam1CameraService: NSObject, AVCaptureAudioDataOutputSampleB
1848
1943
  print(
1849
1944
  "[Zcam1CameraService] takePhoto: isDepthDataDeliveryEnabled=\(self.photoOutput.isDepthDataDeliveryEnabled)"
1850
1945
  )
1946
+ os_log(.info, log: zcam1Log, "DEPTH_DIAG takePhoto: output.isDepthDataDeliveryEnabled=%{public}@, depthEnabledAtSessionLevel=%{public}@, includeDepthData=%{public}@",
1947
+ String(describing: self.photoOutput.isDepthDataDeliveryEnabled),
1948
+ String(describing: self.depthEnabledAtSessionLevel),
1949
+ String(describing: includeDepthData))
1851
1950
  if self.photoOutput.isDepthDataDeliveryEnabled {
1852
1951
  print(
1853
1952
  "[Zcam1CameraService] takePhoto: setting settings.isDepthDataDeliveryEnabled=\(includeDepthData)"
@@ -2740,6 +2839,7 @@ public final class Zcam1CameraView: UIView, AVCaptureVideoDataOutputSampleBuffer
2740
2839
  public var isActive: Bool = true {
2741
2840
  didSet {
2742
2841
  updateRunningState()
2842
+ updateCaptureEventInteraction()
2743
2843
  }
2744
2844
  }
2745
2845
 
@@ -2824,9 +2924,26 @@ public final class Zcam1CameraView: UIView, AVCaptureVideoDataOutputSampleBuffer
2824
2924
  /// Sends a dictionary with "orientation" key ("portrait", "landscapeLeft", "landscapeRight", "portraitUpsideDown").
2825
2925
  public var onOrientationChange: (([String: Any]) -> Void)?
2826
2926
 
2927
+ /// Callback fired when a hardware capture button is pressed (volume buttons or Camera Control).
2928
+ /// Sends a dictionary with "action" key ("photo" for shutter press, "focus" for light press on Camera Control).
2929
+ public var onHardwareShutter: (([String: Any]) -> Void)?
2930
+
2931
+ /// Whether hardware buttons (volume buttons, Camera Control) trigger capture events.
2932
+ /// When true, volume buttons fire onHardwareShutter and the system volume HUD is suppressed.
2933
+ /// Requires iOS 17.2+. Defaults to true.
2934
+ public var hardwareShutterEnabled: Bool = true {
2935
+ didSet {
2936
+ updateCaptureEventInteraction()
2937
+ }
2938
+ }
2939
+
2827
2940
  /// Token for this view's motion manager listener, used for cleanup in deinit.
2828
2941
  private var orientationListenerToken: Int?
2829
2942
 
2943
+ /// Stored reference for the capture event interaction (volume + Camera Control buttons).
2944
+ /// Typed as Any to keep the stored property available on iOS 16.0; only instantiated on iOS 17.2+.
2945
+ private var captureEventInteractionStorage: Any?
2946
+
2830
2947
  // Preview rendering — Metal-backed view for GPU-only frame display.
2831
2948
  private let metalPreviewView = MetalPreviewView()
2832
2949
 
@@ -2835,6 +2952,8 @@ public final class Zcam1CameraView: UIView, AVCaptureVideoDataOutputSampleBuffer
2835
2952
  private let videoDataQueue = DispatchQueue(label: "com.zcam1.videodata", qos: .userInteractive)
2836
2953
  private var currentFilmStyleEnum: Zcam1CameraFilmStyle = .normal
2837
2954
  private var currentFilmStyleRecipe: [[String: Any]]?
2955
+ /// Lock for thread-safe access to currentFilmStyleRecipe (written on main, read on capture queue).
2956
+ private let filmStyleLock = NSLock()
2838
2957
  private var frameCount: Int = 0
2839
2958
 
2840
2959
  // Flag to skip frames during camera reconfiguration to avoid showing incorrectly mirrored frames.
@@ -2864,6 +2983,9 @@ public final class Zcam1CameraView: UIView, AVCaptureVideoDataOutputSampleBuffer
2864
2983
  callback(["orientation": orientationToString(orientation)])
2865
2984
  }
2866
2985
 
2986
+ // Setup hardware button capture event interaction (iOS 17.2+).
2987
+ setupCaptureEventInteraction()
2988
+
2867
2989
  // Configure session and start receiving frames.
2868
2990
  reconfigureSession()
2869
2991
  }
@@ -2873,6 +2995,13 @@ public final class Zcam1CameraView: UIView, AVCaptureVideoDataOutputSampleBuffer
2873
2995
  if let token = orientationListenerToken {
2874
2996
  Zcam1MotionManager.shared.removeListener(token)
2875
2997
  }
2998
+
2999
+ // Remove the capture event interaction to stop intercepting hardware buttons.
3000
+ if #available(iOS 17.2, *) {
3001
+ if let interaction = captureEventInteractionStorage as? AVCaptureEventInteraction {
3002
+ self.removeInteraction(interaction)
3003
+ }
3004
+ }
2876
3005
  }
2877
3006
 
2878
3007
  public override func layoutSubviews() {
@@ -2880,6 +3009,36 @@ public final class Zcam1CameraView: UIView, AVCaptureVideoDataOutputSampleBuffer
2880
3009
  metalPreviewView.frame = bounds
2881
3010
  }
2882
3011
 
3012
+ // MARK: - Hardware Shutter (Volume Buttons + Camera Control)
3013
+
3014
+ /// Configures AVCaptureEventInteraction for hardware shutter support.
3015
+ /// Volume buttons and Camera Control (iPhone 16) will fire onHardwareShutter events.
3016
+ /// The system volume HUD is automatically suppressed while the interaction is active.
3017
+ private func setupCaptureEventInteraction() {
3018
+ guard #available(iOS 17.2, *) else { return }
3019
+
3020
+ let interaction = AVCaptureEventInteraction { [weak self] event in
3021
+ // All hardware capture events (volume up, volume down, Camera Control) fire here.
3022
+ guard event.phase == .ended else { return }
3023
+ guard let callback = self?.onHardwareShutter else { return }
3024
+ DispatchQueue.main.async {
3025
+ callback(["action": "photo"])
3026
+ }
3027
+ }
3028
+
3029
+ interaction.isEnabled = hardwareShutterEnabled && isActive
3030
+ self.addInteraction(interaction)
3031
+ captureEventInteractionStorage = interaction
3032
+ }
3033
+
3034
+ /// Updates the capture event interaction's enabled state based on hardwareShutterEnabled and isActive.
3035
+ private func updateCaptureEventInteraction() {
3036
+ guard #available(iOS 17.2, *),
3037
+ let interaction = captureEventInteractionStorage as? AVCaptureEventInteraction
3038
+ else { return }
3039
+ interaction.isEnabled = hardwareShutterEnabled && isActive
3040
+ }
3041
+
2883
3042
  // MARK: - Film Style Resolution
2884
3043
 
2885
3044
  /// Resolves and applies the current film style, checking overrides and custom film styles first.
@@ -2890,7 +3049,9 @@ public final class Zcam1CameraView: UIView, AVCaptureVideoDataOutputSampleBuffer
2890
3049
  let recipe = overrides[filmStyle] {
2891
3050
  print("[Zcam1CameraView] Using film style override for '\(filmStyle)'")
2892
3051
  // Store recipe for per-frame CIFilter creation on the capture queue (thread-safe).
3052
+ filmStyleLock.lock()
2893
3053
  currentFilmStyleRecipe = recipe
3054
+ filmStyleLock.unlock()
2894
3055
  // Harbeth chain for capture/recording via the service.
2895
3056
  let harbethFilters = Zcam1CameraFilmStyle.createFilmStyles(from: recipe)
2896
3057
  currentFilmStyleEnum = .normal
@@ -2902,7 +3063,9 @@ public final class Zcam1CameraView: UIView, AVCaptureVideoDataOutputSampleBuffer
2902
3063
  if let custom = customFilmStyles as? [String: [[String: Any]]],
2903
3064
  let recipe = custom[filmStyle] {
2904
3065
  print("[Zcam1CameraView] Using custom film style '\(filmStyle)'")
3066
+ filmStyleLock.lock()
2905
3067
  currentFilmStyleRecipe = recipe
3068
+ filmStyleLock.unlock()
2906
3069
  let harbethFilters = Zcam1CameraFilmStyle.createFilmStyles(from: recipe)
2907
3070
  currentFilmStyleEnum = .normal
2908
3071
  Zcam1CameraService.shared.setCustomFilmStyles(harbethFilters)
@@ -2910,7 +3073,9 @@ public final class Zcam1CameraView: UIView, AVCaptureVideoDataOutputSampleBuffer
2910
3073
  }
2911
3074
 
2912
3075
  // Fall back to no film style (JS SDK provides all built-in recipes via filmStyleOverrides).
3076
+ filmStyleLock.lock()
2913
3077
  currentFilmStyleRecipe = nil
3078
+ filmStyleLock.unlock()
2914
3079
  currentFilmStyleEnum = .normal
2915
3080
  Zcam1CameraService.shared.setFilmStyle(.normal)
2916
3081
  }
@@ -2956,7 +3121,10 @@ public final class Zcam1CameraView: UIView, AVCaptureVideoDataOutputSampleBuffer
2956
3121
  // Apply film style CIFilters if configured (GPU pipeline, lazy evaluation).
2957
3122
  // Filters are created fresh per-frame from the stored recipe to avoid cross-thread
2958
3123
  // mutation of CIFilter instances (recipe is set on main thread, read here on capture queue).
2959
- if let recipe = currentFilmStyleRecipe {
3124
+ filmStyleLock.lock()
3125
+ let recipe = currentFilmStyleRecipe
3126
+ filmStyleLock.unlock()
3127
+ if let recipe = recipe {
2960
3128
  let filters = Zcam1CameraFilmStyle.createCIFilters(from: recipe)
2961
3129
  ciImage = Zcam1CameraFilmStyle.applyCIFilters(filters, to: ciImage)
2962
3130
  }
@@ -211,9 +211,21 @@ public enum Zcam1CameraFilmStyle: String, CaseIterable {
211
211
  let temp = config["temperature"] as? Float {
212
212
  let tint = config["tint"] as? Float ?? 0
213
213
  if let filter = CIFilter(name: "CITemperatureAndTint") {
214
+ // Convert Harbeth C7WhiteBalance temperature (4000-7000K, neutral 5000K)
215
+ // to a CIFilter Kelvin offset from D65 (6500K).
216
+ // Harbeth mix factor: temp < 5000 ? 0.0004*(temp-5000) : 0.00006*(temp-5000).
217
+ // Scale this factor to a perceptually similar CIFilter Kelvin offset.
218
+ let harbethFactor: Float = temp < 5000
219
+ ? 0.0004 * (temp - 5000)
220
+ : 0.00006 * (temp - 5000)
221
+ let targetTemp = CGFloat(6500.0 + harbethFactor * 5000.0)
222
+ // Harbeth tint (-200 to 200) applies a subtle YIQ shift.
223
+ // CITemperatureAndTint tint operates in a different perceptual space.
224
+ let targetTint = CGFloat(tint * 0.5)
225
+
214
226
  filter.setValue(CIVector(x: 6500, y: 0), forKey: "inputNeutral")
215
227
  filter.setValue(
216
- CIVector(x: CGFloat(6500 + temp * 100), y: CGFloat(tint * 100)),
228
+ CIVector(x: targetTemp, y: targetTint),
217
229
  forKey: "inputTargetNeutral"
218
230
  )
219
231
  filters.append(filter)
@@ -245,7 +257,11 @@ public enum Zcam1CameraFilmStyle: String, CaseIterable {
245
257
  let highlights = config["highlights"] as? Float,
246
258
  let shadows = config["shadows"] as? Float {
247
259
  if let filter = CIFilter(name: "CIHighlightShadowAdjust") {
248
- filter.setValue(NSNumber(value: highlights), forKey: "inputHighlightAmount")
260
+ // Harbeth C7HighlightShadow: highlights 0=no change, 1=darken highlights.
261
+ // CIHighlightShadowAdjust: inputHighlightAmount 1=no change, 0=darken.
262
+ // Clamp to [0,1] to match Harbeth's @ZeroOneRange, then invert.
263
+ let clampedHighlights = min(max(highlights, 0), 1)
264
+ filter.setValue(NSNumber(value: 1.0 - clampedHighlights), forKey: "inputHighlightAmount")
249
265
  filter.setValue(NSNumber(value: shadows), forKey: "inputShadowAmount")
250
266
  filters.append(filter)
251
267
  }
@@ -50,6 +50,8 @@ RCT_EXPORT_MODULE(Zcam1CameraView);
50
50
  // @property (nonatomic, copy) NSDictionary *customFilmStyles; // additional custom film styles by name
51
51
  // @property (nonatomic) BOOL depthEnabled; // enable depth data at session level (default: NO)
52
52
  // @property (nonatomic, copy) RCTDirectEventBlock onOrientationChange; // orientation change callback
53
+ // @property (nonatomic) BOOL hardwareShutterEnabled; // enable hardware button capture (default: YES)
54
+ // @property (nonatomic, copy) RCTDirectEventBlock onHardwareShutter; // hardware shutter callback
53
55
  RCT_EXPORT_VIEW_PROPERTY(isActive, BOOL);
54
56
  RCT_EXPORT_VIEW_PROPERTY(position, NSString);
55
57
  RCT_EXPORT_VIEW_PROPERTY(captureFormat, NSString);
@@ -58,6 +60,8 @@ RCT_EXPORT_VIEW_PROPERTY(torch, BOOL);
58
60
  RCT_EXPORT_VIEW_PROPERTY(exposure, float);
59
61
  RCT_EXPORT_VIEW_PROPERTY(depthEnabled, BOOL);
60
62
  RCT_EXPORT_VIEW_PROPERTY(onOrientationChange, RCTDirectEventBlock);
63
+ RCT_EXPORT_VIEW_PROPERTY(hardwareShutterEnabled, BOOL);
64
+ RCT_EXPORT_VIEW_PROPERTY(onHardwareShutter, RCTDirectEventBlock);
61
65
 
62
66
  // Use custom property setter to ensure the Swift setter is called properly.
63
67
  RCT_CUSTOM_VIEW_PROPERTY(filmStyle, NSString, Zcam1CameraView)