@succinctlabs/react-native-zcam1 0.2.7 → 0.3.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/ios/Zcam1Camera.swift +177 -9
  2. package/ios/Zcam1CameraFilmStyle.swift +18 -2
  3. package/ios/Zcam1CameraViewManager.m +4 -0
  4. package/ios/Zcam1DepthData.swift +219 -286
  5. package/lib/module/NativeZcam1Capture.js.map +1 -1
  6. package/lib/module/camera.js +113 -15
  7. package/lib/module/camera.js.map +1 -1
  8. package/lib/module/capture.js +21 -3
  9. package/lib/module/capture.js.map +1 -1
  10. package/lib/module/common.js +3 -2
  11. package/lib/module/common.js.map +1 -1
  12. package/lib/module/generated/zcam1_c2pa_utils.js +85 -6
  13. package/lib/module/generated/zcam1_c2pa_utils.js.map +1 -1
  14. package/lib/module/generated/zcam1_verify_utils.js +80 -3
  15. package/lib/module/generated/zcam1_verify_utils.js.map +1 -1
  16. package/lib/module/index.js +1 -1
  17. package/lib/module/index.js.map +1 -1
  18. package/lib/module/utils.js +5 -4
  19. package/lib/module/utils.js.map +1 -1
  20. package/lib/typescript/src/NativeZcam1Capture.d.ts +10 -0
  21. package/lib/typescript/src/NativeZcam1Capture.d.ts.map +1 -1
  22. package/lib/typescript/src/camera.d.ts +36 -0
  23. package/lib/typescript/src/camera.d.ts.map +1 -1
  24. package/lib/typescript/src/capture.d.ts +8 -1
  25. package/lib/typescript/src/capture.d.ts.map +1 -1
  26. package/lib/typescript/src/common.d.ts.map +1 -1
  27. package/lib/typescript/src/generated/zcam1_c2pa_utils.d.ts +60 -0
  28. package/lib/typescript/src/generated/zcam1_c2pa_utils.d.ts.map +1 -1
  29. package/lib/typescript/src/generated/zcam1_verify_utils.d.ts +134 -3
  30. package/lib/typescript/src/generated/zcam1_verify_utils.d.ts.map +1 -1
  31. package/lib/typescript/src/index.d.ts +2 -2
  32. package/lib/typescript/src/index.d.ts.map +1 -1
  33. package/lib/typescript/src/utils.d.ts +1 -1
  34. package/lib/typescript/src/utils.d.ts.map +1 -1
  35. package/package.json +2 -1
  36. package/src/NativeZcam1Capture.ts +12 -0
  37. package/src/camera.tsx +179 -9
  38. package/src/capture.tsx +30 -3
  39. package/src/common.tsx +3 -2
  40. package/src/generated/zcam1_c2pa_utils.ts +126 -3
  41. package/src/generated/zcam1_verify_utils.ts +92 -3
  42. package/src/index.ts +2 -1
  43. package/src/utils.ts +7 -3
@@ -2,12 +2,16 @@
2
2
  // Zcam1DepthData.swift
3
3
  // react-native-zcam1-sdk
4
4
  //
5
- // Depth data extraction and processing using AVDepthData.
5
+ // Depth data extraction, processing, and heat map generation using AVDepthData.
6
+ //
7
+ // All sensor types (LiDAR, TrueDepth, dual camera) are converted to Float32 depth
8
+ // at a single boundary via AVDepthData.converting(toDepthDataType:). Downstream
9
+ // code never branches on pixel format.
6
10
  //
7
11
 
8
12
  import AVFoundation
13
+ import CommonCrypto
9
14
  import Foundation
10
- import ImageIO
11
15
  import UIKit
12
16
 
13
17
  // MARK: - Depth Data Processor
@@ -15,6 +19,8 @@ import UIKit
15
19
  /// Handles extraction, conversion, and serialization of depth data from AVDepthData.
16
20
  public class Zcam1DepthDataProcessor {
17
21
 
22
+ // MARK: - Public API
23
+
18
24
  /// Extract depth data from an AVCapturePhoto if available.
19
25
  /// Returns a dictionary containing depth information or nil if not available.
20
26
  public static func extractDepthData(from photo: AVCapturePhoto) -> [String: Any]? {
@@ -26,24 +32,28 @@ public class Zcam1DepthDataProcessor {
26
32
  }
27
33
 
28
34
  /// Process AVDepthData and return a dictionary with depth information.
35
+ ///
36
+ /// Converts any sensor format to Float32 depth at the boundary, then computes
37
+ /// statistics using a single code path.
29
38
  public static func processDepthData(_ depthData: AVDepthData) -> [String: Any] {
30
- let depthDataMap = depthData.depthDataMap
39
+ // Record the original pixel format for metadata before conversion.
40
+ let originalPixelFormat = CVPixelBufferGetPixelFormatType(depthData.depthDataMap)
41
+ let pixelFormatString = pixelFormatTypeToString(originalPixelFormat)
42
+
43
+ // Unify all sensor formats to Float32 depth.
44
+ let unified = convertToFloat32Depth(depthData)
45
+ let depthDataMap = unified.depthDataMap
31
46
 
32
- // Get depth data dimensions
33
47
  let width = CVPixelBufferGetWidth(depthDataMap)
34
48
  let height = CVPixelBufferGetHeight(depthDataMap)
35
49
 
36
- // Get pixel format type
37
- let pixelFormatType = CVPixelBufferGetPixelFormatType(depthDataMap)
38
- let pixelFormatString = pixelFormatTypeToString(pixelFormatType)
39
-
40
- // Extract depth statistics
50
+ // Extract depth statistics from the unified Float32 buffer.
41
51
  let statistics = extractDepthStatistics(from: depthDataMap)
42
52
 
43
- // Get accuracy if available (iOS 14.1+)
53
+ // Get accuracy if available (iOS 14.1+).
44
54
  var accuracyString = "relative"
45
55
  if #available(iOS 14.1, *) {
46
- switch depthData.depthDataAccuracy {
56
+ switch unified.depthDataAccuracy {
47
57
  case .relative:
48
58
  accuracyString = "relative"
49
59
  case .absolute:
@@ -53,34 +63,158 @@ public class Zcam1DepthDataProcessor {
53
63
  }
54
64
  }
55
65
 
56
- var result: [String: Any] = [
66
+ return [
57
67
  "width": width,
58
68
  "height": height,
59
69
  "pixelFormat": pixelFormatString,
60
70
  "statistics": statistics,
61
71
  "accuracy": accuracyString,
62
72
  ]
73
+ }
74
+
75
+ /// Generate a Turbo-colorized depth heat map JPEG and a SHA-256 hash of the raw depth buffer.
76
+ ///
77
+ /// Returns `(jpegData, rawHash)` where:
78
+ /// - `jpegData`: JPEG-encoded heat map at the depth sensor's native resolution
79
+ /// - `rawHash`: hex-encoded SHA-256 of the raw Float32 depth buffer
80
+ ///
81
+ /// The `photoOrientation` parameter should be the UIImage.Orientation of the captured photo
82
+ /// so the heatmap matches the photo's display orientation (depth sensor data is always in
83
+ /// native landscape orientation).
84
+ ///
85
+ /// Returns nil if the depth data contains no valid pixels.
86
+ public static func generateHeatMap(
87
+ from depthData: AVDepthData,
88
+ photoOrientation: UIImage.Orientation = .up
89
+ ) -> (jpegData: Data, rawHash: String)? {
90
+ // Unify to Float32 depth.
91
+ let unified = convertToFloat32Depth(depthData)
92
+ let depthDataMap = unified.depthDataMap
93
+
94
+ let width = CVPixelBufferGetWidth(depthDataMap)
95
+ let height = CVPixelBufferGetHeight(depthDataMap)
96
+
97
+ CVPixelBufferLockBaseAddress(depthDataMap, .readOnly)
98
+ defer { CVPixelBufferUnlockBaseAddress(depthDataMap, .readOnly) }
99
+
100
+ guard let baseAddress = CVPixelBufferGetBaseAddress(depthDataMap) else {
101
+ return nil
102
+ }
103
+
104
+ let bytesPerRow = CVPixelBufferGetBytesPerRow(depthDataMap)
105
+ let floatBuffer = baseAddress.assumingMemoryBound(to: Float32.self)
106
+ let rowStride = bytesPerRow / MemoryLayout<Float32>.stride
107
+
108
+ // Compute SHA-256 of the raw Float32 pixel data (excluding row padding).
109
+ let pixelBytesPerRow = width * MemoryLayout<Float32>.stride
110
+ let rawHash: String
111
+ if pixelBytesPerRow == bytesPerRow {
112
+ // No padding — hash the entire buffer directly.
113
+ rawHash = sha256Hex(data: baseAddress, count: height * bytesPerRow)
114
+ } else {
115
+ // Strip per-row padding by copying only pixel data into a contiguous buffer.
116
+ var contiguous = Data(capacity: height * pixelBytesPerRow)
117
+ for y in 0..<height {
118
+ let rowStart = baseAddress.advanced(by: y * bytesPerRow)
119
+ contiguous.append(Data(bytes: rowStart, count: pixelBytesPerRow))
120
+ }
121
+ rawHash = contiguous.withUnsafeBytes { sha256Hex(data: $0.baseAddress!, count: contiguous.count) }
122
+ }
123
+
124
+ // Extract all values and find min/max.
125
+ var minVal: Float = .infinity
126
+ var maxVal: Float = -.infinity
127
+ var hasValidPixel = false
128
+
129
+ for y in 0..<height {
130
+ let rowBase = y * rowStride
131
+ for x in 0..<width {
132
+ let v = floatBuffer[rowBase + x]
133
+ if v.isFinite {
134
+ hasValidPixel = true
135
+ minVal = min(minVal, v)
136
+ maxVal = max(maxVal, v)
137
+ }
138
+ }
139
+ }
140
+
141
+ guard hasValidPixel else { return nil }
142
+
143
+ let range = maxVal - minVal
144
+
145
+ // Build RGBA pixel data using the Turbo colormap.
146
+ var rgbaData = [UInt8](repeating: 0, count: width * height * 4)
147
+ for y in 0..<height {
148
+ let rowBase = y * rowStride
149
+ for x in 0..<width {
150
+ let v = floatBuffer[rowBase + x]
151
+ let pixelIndex = (y * width + x) * 4
152
+
153
+ let normalized: Float
154
+ if v.isFinite, range > 0 {
155
+ normalized = max(0, min(1, (v - minVal) / range))
156
+ } else if v.isFinite {
157
+ normalized = 0.5 // All pixels same depth
158
+ } else {
159
+ normalized = 0 // NaN/Inf → black (closest color)
160
+ }
161
+
162
+ let lutIndex = min(255, Int(normalized * 255))
163
+ let (r, g, b) = Self.turboLUT[lutIndex]
164
+ rgbaData[pixelIndex] = r
165
+ rgbaData[pixelIndex + 1] = g
166
+ rgbaData[pixelIndex + 2] = b
167
+ rgbaData[pixelIndex + 3] = 255
168
+ }
169
+ }
170
+
171
+ // Create CGImage from RGBA data.
172
+ let colorSpace = CGColorSpaceCreateDeviceRGB()
173
+ guard let context = CGContext(
174
+ data: &rgbaData,
175
+ width: width,
176
+ height: height,
177
+ bitsPerComponent: 8,
178
+ bytesPerRow: width * 4,
179
+ space: colorSpace,
180
+ bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue
181
+ ), let cgImage = context.makeImage() else {
182
+ return nil
183
+ }
184
+
185
+ // Encode as JPEG, applying the photo's orientation so the heatmap matches.
186
+ let orientedImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: photoOrientation)
187
+ guard let jpegData = orientedImage.jpegData(compressionQuality: 0.85) else {
188
+ return nil
189
+ }
190
+
191
+ return (jpegData, rawHash)
192
+ }
193
+
194
+ // MARK: - Private Helpers
63
195
 
64
- return result
196
+ /// Convert any depth data format to Float32 depth at the boundary.
197
+ /// After this call, the depthDataMap is guaranteed to be Float32.
198
+ private static func convertToFloat32Depth(_ depthData: AVDepthData) -> AVDepthData {
199
+ let currentType = CVPixelBufferGetPixelFormatType(depthData.depthDataMap)
200
+ if currentType == kCVPixelFormatType_DepthFloat32 {
201
+ return depthData
202
+ }
203
+ return depthData.converting(toDepthDataType: kCVPixelFormatType_DepthFloat32)
65
204
  }
66
205
 
67
- /// Extract statistics (min, max, mean, stddev) from depth data.
206
+ /// Extract statistics (min, max, mean, stddev) from a Float32 depth buffer.
68
207
  ///
69
- /// Computing stats over every pixel can be expensive and can delay photo capture completion,
70
- /// especially when depth delivery is enabled. To keep capture responsive, this uses:
71
- /// - adaptive sampling (caps work to ~65k pixels)
72
- /// - a single-pass (Welford) accumulator (avoids large allocations)
208
+ /// Uses adaptive sampling (caps at ~65k pixels) and a single-pass Welford accumulator.
73
209
  private static func extractDepthStatistics(from depthDataMap: CVPixelBuffer) -> [String: Any] {
74
210
  let width = CVPixelBufferGetWidth(depthDataMap)
75
211
  let height = CVPixelBufferGetHeight(depthDataMap)
76
- let pixelFormatType = CVPixelBufferGetPixelFormatType(depthDataMap)
77
212
 
78
213
  // Cap work by sampling at most ~65k pixels.
79
214
  let totalPixels = max(1, width * height)
80
215
  let maxSamples = 65_536
81
216
  let stride: Int = {
82
217
  if totalPixels <= maxSamples { return 1 }
83
- // Choose a stride so that (w/stride)*(h/stride) ~= maxSamples.
84
218
  let scale = sqrt(Double(totalPixels) / Double(maxSamples))
85
219
  return max(1, Int(scale.rounded(.up)))
86
220
  }()
@@ -93,61 +227,38 @@ public class Zcam1DepthDataProcessor {
93
227
  }
94
228
 
95
229
  let bytesPerRow = CVPixelBufferGetBytesPerRow(depthDataMap)
230
+ let floatBuffer = baseAddress.assumingMemoryBound(to: Float32.self)
231
+ let rowStride = bytesPerRow / MemoryLayout<Float32>.stride
96
232
 
233
+ // Single-pass Welford accumulator.
97
234
  var count: Int = 0
98
235
  var mean: Double = 0
99
236
  var m2: Double = 0
100
237
  var minValue: Float = .infinity
101
238
  var maxValue: Float = -.infinity
102
239
 
103
- func accumulate(_ v: Float) {
104
- guard v.isFinite else { return }
105
- count += 1
106
- minValue = min(minValue, v)
107
- maxValue = max(maxValue, v)
108
-
109
- let x = Double(v)
110
- let delta = x - mean
111
- mean += delta / Double(count)
112
- let delta2 = x - mean
113
- m2 += delta * delta2
114
- }
115
-
116
- // Extract depth values based on pixel format (sampled).
117
- switch pixelFormatType {
118
- case kCVPixelFormatType_DepthFloat32, kCVPixelFormatType_DisparityFloat32:
119
- let floatBuffer = baseAddress.assumingMemoryBound(to: Float32.self)
120
- let rowStride = bytesPerRow / MemoryLayout<Float32>.stride
121
- var y = 0
122
- while y < height {
123
- let rowBase = y * rowStride
124
- var x = 0
125
- while x < width {
126
- accumulate(floatBuffer[rowBase + x])
127
- x += stride
128
- }
129
- y += stride
130
- }
131
-
132
- case kCVPixelFormatType_DepthFloat16, kCVPixelFormatType_DisparityFloat16:
133
- let float16Buffer = baseAddress.assumingMemoryBound(to: Float16.self)
134
- let rowStride = bytesPerRow / MemoryLayout<Float16>.stride
135
- var y = 0
136
- while y < height {
137
- let rowBase = y * rowStride
138
- var x = 0
139
- while x < width {
140
- accumulate(Float(float16Buffer[rowBase + x]))
141
- x += stride
240
+ var y = 0
241
+ while y < height {
242
+ let rowBase = y * rowStride
243
+ var x = 0
244
+ while x < width {
245
+ let v = floatBuffer[rowBase + x]
246
+ if v.isFinite {
247
+ count += 1
248
+ minValue = min(minValue, v)
249
+ maxValue = max(maxValue, v)
250
+
251
+ let d = Double(v)
252
+ let delta = d - mean
253
+ mean += delta / Double(count)
254
+ let delta2 = d - mean
255
+ m2 += delta * delta2
142
256
  }
143
- y += stride
257
+ x += stride
144
258
  }
145
-
146
- default:
147
- break
259
+ y += stride
148
260
  }
149
261
 
150
- // Calculate statistics
151
262
  guard count > 0 else {
152
263
  return [
153
264
  "min": "",
@@ -174,40 +285,6 @@ public class Zcam1DepthDataProcessor {
174
285
  ]
175
286
  }
176
287
 
177
- /// Extract camera calibration data.
178
- private static func extractCalibrationData(_ calibration: AVCameraCalibrationData) -> [String:
179
- Any]
180
- {
181
- var result: [String: Any] = [:]
182
-
183
- // Intrinsic matrix (3x3) - camera intrinsic parameters
184
- // Intrinsic matrix (3x3)
185
- let intrinsicMatrix = calibration.intrinsicMatrix
186
- result["intrinsicMatrix"] = [
187
- [intrinsicMatrix[0, 0], intrinsicMatrix[0, 1], intrinsicMatrix[0, 2]],
188
- [intrinsicMatrix[1, 0], intrinsicMatrix[1, 1], intrinsicMatrix[1, 2]],
189
- [intrinsicMatrix[2, 0], intrinsicMatrix[2, 1], intrinsicMatrix[2, 2]],
190
- ]
191
-
192
- // Extrinsic matrix (4x3)
193
- let extrinsicMatrix = calibration.extrinsicMatrix
194
- result["extrinsicMatrix"] = [
195
- [extrinsicMatrix[0, 0], extrinsicMatrix[0, 1], extrinsicMatrix[0, 2]],
196
- [extrinsicMatrix[1, 0], extrinsicMatrix[1, 1], extrinsicMatrix[1, 2]],
197
- [extrinsicMatrix[2, 0], extrinsicMatrix[2, 1], extrinsicMatrix[2, 2]],
198
- [extrinsicMatrix[3, 0], extrinsicMatrix[3, 1], extrinsicMatrix[3, 2]],
199
- ]
200
-
201
- // Lens distortion center
202
- let lensDistortionCenter = calibration.lensDistortionCenter
203
- result["lensDistortionCenter"] = [
204
- "x": lensDistortionCenter.x,
205
- "y": lensDistortionCenter.y,
206
- ]
207
-
208
- return result
209
- }
210
-
211
288
  /// Convert pixel format type to a human-readable string.
212
289
  private static func pixelFormatTypeToString(_ pixelFormatType: OSType) -> String {
213
290
  switch pixelFormatType {
@@ -224,194 +301,50 @@ public class Zcam1DepthDataProcessor {
224
301
  }
225
302
  }
226
303
 
227
- /// Encode depth data as a grayscale image for visualization.
228
- ///
229
- /// The output is encoded to match Google's GDepth `RangeInverse` convention:
230
- /// values are higher (brighter) for nearer pixels and lower (darker) for farther pixels.
231
- public static func encodeDepthDataAsImage(
232
- depthData: AVDepthData
233
- ) -> UIImage? {
234
- let depthDataMap = depthData.depthDataMap
235
- let width = CVPixelBufferGetWidth(depthDataMap)
236
- let height = CVPixelBufferGetHeight(depthDataMap)
237
-
238
- CVPixelBufferLockBaseAddress(depthDataMap, .readOnly)
239
- defer { CVPixelBufferUnlockBaseAddress(depthDataMap, .readOnly) }
240
-
241
- guard let baseAddress = CVPixelBufferGetBaseAddress(depthDataMap) else {
242
- return nil
243
- }
244
-
245
- let pixelFormatType = CVPixelBufferGetPixelFormatType(depthDataMap)
246
- let bytesPerRow = CVPixelBufferGetBytesPerRow(depthDataMap)
247
-
248
- // Extract depth values and find their range
249
- var depthValues: [Float] = []
250
- depthValues.reserveCapacity(width * height)
251
- var minDepth: Float = .infinity
252
- var maxDepth: Float = -.infinity
253
-
254
- switch pixelFormatType {
255
- case kCVPixelFormatType_DepthFloat32, kCVPixelFormatType_DisparityFloat32:
256
- let floatBuffer = baseAddress.assumingMemoryBound(to: Float32.self)
257
- for y in 0..<height {
258
- for x in 0..<width {
259
- let offset = y * (bytesPerRow / MemoryLayout<Float32>.stride) + x
260
- let value = floatBuffer[offset]
261
- if !value.isNaN && !value.isInfinite {
262
- depthValues.append(value)
263
- // Ignore non-positive values when establishing near/far planes,
264
- // since RangeInverse uses 1/depth.
265
- if value > 0 {
266
- minDepth = min(minDepth, value)
267
- maxDepth = max(maxDepth, value)
268
- }
269
- }
270
- }
271
- }
272
-
273
- case kCVPixelFormatType_DepthFloat16, kCVPixelFormatType_DisparityFloat16:
274
- let float16Buffer = baseAddress.assumingMemoryBound(to: Float16.self)
275
- for y in 0..<height {
276
- for x in 0..<width {
277
- let offset = y * (bytesPerRow / MemoryLayout<Float16>.stride) + x
278
- let value = Float(float16Buffer[offset])
279
- if !value.isNaN && !value.isInfinite {
280
- depthValues.append(value)
281
- // Ignore non-positive values when establishing near/far planes,
282
- // since RangeInverse uses 1/depth.
283
- if value > 0 {
284
- minDepth = min(minDepth, value)
285
- maxDepth = max(maxDepth, value)
286
- }
287
- }
288
- }
289
- }
290
-
291
- default:
292
- return nil
293
- }
294
-
295
- // Create grayscale image from normalized depth data
296
- guard !depthValues.isEmpty else { return nil }
297
-
298
- // If we never saw any positive finite values, near/far planes are not usable.
299
- if !minDepth.isFinite || !maxDepth.isFinite || minDepth <= 0 || maxDepth <= 0 {
300
- minDepth = 0
301
- maxDepth = 0
302
- }
303
-
304
- let depthRange = maxDepth - minDepth
305
- var pixelData = [UInt8]()
306
- pixelData.reserveCapacity(width * height)
307
-
308
- switch pixelFormatType {
309
- case kCVPixelFormatType_DepthFloat32, kCVPixelFormatType_DisparityFloat32:
310
- let floatBuffer = baseAddress.assumingMemoryBound(to: Float32.self)
311
- for y in 0..<height {
312
- for x in 0..<width {
313
- let offset = y * (bytesPerRow / MemoryLayout<Float32>.stride) + x
314
- let value = floatBuffer[offset]
315
- if value.isNaN || value.isInfinite || value <= 0 {
316
- pixelData.append(0)
317
- continue
318
- }
319
-
320
- // RangeInverse: normalize in inverse-depth space (1/z), so nearer pixels are brighter.
321
- let normalized: Float
322
- if depthRange > 0, minDepth > 0, maxDepth > 0 {
323
- let inv = 1.0 / value
324
- let invNear = 1.0 / minDepth
325
- let invFar = 1.0 / maxDepth
326
- let invRange = invNear - invFar
327
-
328
- if inv.isFinite, invNear.isFinite, invFar.isFinite, invRange != 0 {
329
- normalized = (inv - invFar) / invRange
330
- } else {
331
- normalized = 0
332
- }
333
- } else {
334
- normalized = 0
335
- }
336
-
337
- if !normalized.isFinite {
338
- pixelData.append(0)
339
- } else {
340
- let clamped = max(0, min(1, normalized))
341
- pixelData.append(UInt8(max(0, min(255, clamped * 255))))
342
- }
343
- }
344
- }
345
-
346
- case kCVPixelFormatType_DepthFloat16, kCVPixelFormatType_DisparityFloat16:
347
- let float16Buffer = baseAddress.assumingMemoryBound(to: Float16.self)
348
- for y in 0..<height {
349
- for x in 0..<width {
350
- let offset = y * (bytesPerRow / MemoryLayout<Float16>.stride) + x
351
- let value = Float(float16Buffer[offset])
352
- if value.isNaN || value.isInfinite || value <= 0 {
353
- pixelData.append(0)
354
- continue
355
- }
356
-
357
- // RangeInverse: normalize in inverse-depth space (1/z), so nearer pixels are brighter.
358
- let normalized: Float
359
- if depthRange > 0, minDepth > 0, maxDepth > 0 {
360
- let inv = 1.0 / value
361
- let invNear = 1.0 / minDepth
362
- let invFar = 1.0 / maxDepth
363
- let invRange = invNear - invFar
364
-
365
- if inv.isFinite, invNear.isFinite, invFar.isFinite, invRange != 0 {
366
- normalized = (inv - invFar) / invRange
367
- } else {
368
- normalized = 0
369
- }
370
- } else {
371
- normalized = 0
372
- }
373
-
374
- if !normalized.isFinite {
375
- pixelData.append(0)
376
- } else {
377
- let clamped = max(0, min(1, normalized))
378
- pixelData.append(UInt8(max(0, min(255, clamped * 255))))
379
- }
380
- }
381
- }
382
-
383
- default:
384
- return nil
385
- }
386
-
387
- // Create CGImage from pixel data
388
- guard
389
- let provider = CGDataProvider(
390
- data: NSData(bytes: pixelData, length: pixelData.count))
391
- else {
392
- return nil
393
- }
394
-
395
- let colorSpace = CGColorSpaceCreateDeviceGray()
396
- guard
397
- let cgImage = CGImage(
398
- width: width,
399
- height: height,
400
- bitsPerComponent: 8,
401
- bitsPerPixel: 8,
402
- bytesPerRow: width,
403
- space: colorSpace,
404
- bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.none.rawValue),
405
- provider: provider,
406
- decode: nil,
407
- shouldInterpolate: false,
408
- intent: .defaultIntent
409
- )
410
- else {
411
- return nil
412
- }
413
-
414
- return UIImage(cgImage: cgImage)
304
+ /// Compute SHA-256 hex string from a raw memory buffer.
305
+ private static func sha256Hex(data: UnsafeRawPointer, count: Int) -> String {
306
+ var hash = [UInt8](repeating: 0, count: Int(CC_SHA256_DIGEST_LENGTH))
307
+ CC_SHA256(data, CC_LONG(count), &hash)
308
+ return hash.map { String(format: "%02x", $0) }.joined()
415
309
  }
416
310
 
311
+ // MARK: - Turbo Colormap LUT
312
+
313
+ /// Google Turbo colormap — 256 RGB entries optimized for depth visualization.
314
+ /// Source: https://gist.github.com/mikhailov-work/ee72ba4191942acecc03fe6da94fc73f (public domain)
315
+ // swiftlint:disable:next large_tuple
316
+ private static let turboLUT: [(UInt8, UInt8, UInt8)] = [
317
+ (48,18,59),(50,21,67),(51,24,74),(52,27,81),(53,30,88),(54,33,95),(55,36,102),(56,39,109),
318
+ (57,42,115),(58,45,121),(59,47,128),(60,50,134),(61,53,139),(62,56,145),(63,59,151),(63,62,156),
319
+ (64,64,162),(65,67,167),(65,70,172),(66,73,177),(66,75,181),(67,78,186),(68,81,191),(68,84,195),
320
+ (68,86,199),(69,89,203),(69,92,207),(69,94,211),(70,97,214),(70,100,218),(70,102,221),(70,105,224),
321
+ (70,107,227),(71,110,230),(71,113,233),(71,115,235),(71,118,238),(71,120,240),(71,123,242),(70,125,244),
322
+ (70,128,246),(70,130,248),(70,133,250),(70,135,251),(69,138,252),(69,140,253),(68,143,254),(67,145,254),
323
+ (66,148,255),(65,150,255),(64,153,255),(62,155,254),(61,158,254),(59,160,253),(58,163,252),(56,165,251),
324
+ (55,168,250),(53,171,248),(51,173,247),(49,175,245),(47,178,244),(46,180,242),(44,183,240),(42,185,238),
325
+ (40,188,235),(39,190,233),(37,192,231),(35,195,228),(34,197,226),(32,199,223),(31,201,221),(30,203,218),
326
+ (28,205,216),(27,208,213),(26,210,210),(26,212,208),(25,213,205),(24,215,202),(24,217,200),(24,219,197),
327
+ (24,221,194),(24,222,192),(24,224,189),(25,226,187),(25,227,185),(26,228,182),(28,230,180),(29,231,178),
328
+ (31,233,175),(32,234,172),(34,235,170),(37,236,167),(39,238,164),(42,239,161),(44,240,158),(47,241,155),
329
+ (50,242,152),(53,243,148),(56,244,145),(60,245,142),(63,246,138),(67,247,135),(70,248,132),(74,248,128),
330
+ (78,249,125),(82,250,122),(85,250,118),(89,251,115),(93,252,111),(97,252,108),(101,253,105),(105,253,102),
331
+ (109,254,98),(113,254,95),(117,254,92),(121,254,89),(125,255,86),(128,255,83),(132,255,81),(136,255,78),
332
+ (139,255,75),(143,255,73),(146,255,71),(150,254,68),(153,254,66),(156,254,64),(159,253,63),(161,253,61),
333
+ (164,252,60),(167,252,58),(169,251,57),(172,251,56),(175,250,55),(177,249,54),(180,248,54),(183,247,53),
334
+ (185,246,53),(188,245,52),(190,244,52),(193,243,52),(195,241,52),(198,240,52),(200,239,52),(203,237,52),
335
+ (205,236,52),(208,234,52),(210,233,53),(212,231,53),(215,229,53),(217,228,54),(219,226,54),(221,224,55),
336
+ (223,223,55),(225,221,55),(227,219,56),(229,217,56),(231,215,57),(233,213,57),(235,211,57),(236,209,58),
337
+ (238,207,58),(239,205,58),(241,203,58),(242,201,58),(244,199,58),(245,197,58),(246,195,58),(247,193,58),
338
+ (248,190,57),(249,188,57),(250,186,57),(251,184,56),(251,182,55),(252,179,54),(252,177,54),(253,174,53),
339
+ (253,172,52),(254,169,51),(254,167,50),(254,164,49),(254,161,48),(254,158,47),(254,155,45),(254,153,44),
340
+ (254,150,43),(254,147,42),(254,144,41),(253,141,39),(253,138,38),(252,135,37),(252,132,35),(251,129,34),
341
+ (251,126,33),(250,123,31),(249,120,30),(249,117,29),(248,114,28),(247,111,26),(246,108,25),(245,105,24),
342
+ (244,102,23),(243,99,21),(242,96,20),(241,93,19),(240,91,18),(239,88,17),(237,85,16),(236,83,15),
343
+ (235,80,14),(234,78,13),(232,75,12),(231,73,12),(229,71,11),(228,69,10),(226,67,10),(225,65,9),
344
+ (223,63,8),(221,61,8),(220,59,7),(218,57,7),(216,55,6),(214,53,6),(212,51,5),(210,49,5),
345
+ (208,47,5),(206,45,4),(204,43,4),(202,42,4),(200,40,3),(197,38,3),(195,37,3),(193,35,2),
346
+ (190,33,2),(188,32,2),(185,30,2),(183,29,2),(180,27,1),(178,26,1),(175,24,1),(172,23,1),
347
+ (169,22,1),(167,20,1),(164,19,1),(161,18,1),(158,16,1),(155,15,1),(152,14,1),(149,13,1),
348
+ (146,11,1),(142,10,1),(139,9,2),(136,8,2),(133,7,2),(129,6,2),(126,5,2),(122,4,3),
349
+ ]
417
350
  }
@@ -1 +1 @@
1
- {"version":3,"names":["TurboModuleRegistry","getEnforcing"],"sourceRoot":"../../src","sources":["NativeZcam1Capture.ts"],"mappings":";;AAAA,SAA2BA,mBAAmB,QAAQ,cAAc;;AA8JpE;AACA;AACA;AACA;AACA;;AA4KA,eAAeA,mBAAmB,CAACC,YAAY,CAAO,cAAc,CAAC","ignoreList":[]}
1
+ {"version":3,"names":["TurboModuleRegistry","getEnforcing"],"sourceRoot":"../../src","sources":["NativeZcam1Capture.ts"],"mappings":";;AAAA,SAA2BA,mBAAmB,QAAQ,cAAc;;AA0KpE;AACA;AACA;AACA;AACA;;AA4KA,eAAeA,mBAAmB,CAACC,YAAY,CAAO,cAAc,CAAC","ignoreList":[]}