react-native-nitro-ar 2026.2.1 → 2026.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/ios/HybridARMeshAnchor.swift +48 -20
  2. package/ios/HybridARSegmentationResult.swift +344 -0
  3. package/ios/HybridARView.swift +113 -7
  4. package/lib/commonjs/index.js.map +1 -1
  5. package/lib/commonjs/specs/ARObjectMeasurement.nitro.js +6 -0
  6. package/lib/commonjs/specs/ARObjectMeasurement.nitro.js.map +1 -0
  7. package/lib/module/index.js.map +1 -1
  8. package/lib/module/specs/ARObjectMeasurement.nitro.js +4 -0
  9. package/lib/module/specs/ARObjectMeasurement.nitro.js.map +1 -0
  10. package/lib/typescript/src/index.d.ts +1 -0
  11. package/lib/typescript/src/index.d.ts.map +1 -1
  12. package/lib/typescript/src/specs/ARObjectMeasurement.nitro.d.ts +34 -0
  13. package/lib/typescript/src/specs/ARObjectMeasurement.nitro.d.ts.map +1 -0
  14. package/lib/typescript/src/specs/ARView.nitro.d.ts +5 -0
  15. package/lib/typescript/src/specs/ARView.nitro.d.ts.map +1 -1
  16. package/nitrogen/generated/ios/NitroAR-Swift-Cxx-Bridge.cpp +33 -0
  17. package/nitrogen/generated/ios/NitroAR-Swift-Cxx-Bridge.hpp +154 -0
  18. package/nitrogen/generated/ios/NitroAR-Swift-Cxx-Umbrella.hpp +8 -0
  19. package/nitrogen/generated/ios/c++/HybridARSegmentationResultSpecSwift.cpp +11 -0
  20. package/nitrogen/generated/ios/c++/HybridARSegmentationResultSpecSwift.hpp +102 -0
  21. package/nitrogen/generated/ios/c++/HybridARViewSpecSwift.hpp +25 -0
  22. package/nitrogen/generated/ios/swift/ARObjectMeasurement.swift +71 -0
  23. package/nitrogen/generated/ios/swift/Func_void_std__optional_ARObjectMeasurement_.swift +46 -0
  24. package/nitrogen/generated/ios/swift/Func_void_std__optional_std__shared_ptr_HybridARSegmentationResultSpec__.swift +57 -0
  25. package/nitrogen/generated/ios/swift/HybridARSegmentationResultSpec.swift +58 -0
  26. package/nitrogen/generated/ios/swift/HybridARSegmentationResultSpec_cxx.swift +187 -0
  27. package/nitrogen/generated/ios/swift/HybridARViewSpec.swift +2 -0
  28. package/nitrogen/generated/ios/swift/HybridARViewSpec_cxx.swift +53 -0
  29. package/nitrogen/generated/shared/c++/ARObjectMeasurement.hpp +107 -0
  30. package/nitrogen/generated/shared/c++/HybridARSegmentationResultSpec.cpp +25 -0
  31. package/nitrogen/generated/shared/c++/HybridARSegmentationResultSpec.hpp +68 -0
  32. package/nitrogen/generated/shared/c++/HybridARViewSpec.cpp +2 -0
  33. package/nitrogen/generated/shared/c++/HybridARViewSpec.hpp +10 -0
  34. package/package.json +1 -1
  35. package/src/index.ts +4 -0
  36. package/src/specs/ARObjectMeasurement.nitro.ts +33 -0
  37. package/src/specs/ARView.nitro.ts +7 -0
@@ -34,12 +34,17 @@ final class HybridARMeshAnchor: HybridARMeshAnchorSpec {
34
34
 
35
35
  var vertices: [Double] {
36
36
  let geo = anchor.geometry
37
- let vertexBuffer = geo.vertices
37
+ let vertexSource = geo.vertices
38
38
  var result: [Double] = []
39
- result.reserveCapacity(vertexBuffer.count * 3)
39
+ result.reserveCapacity(vertexSource.count * 3)
40
40
 
41
- for i in 0..<vertexBuffer.count {
42
- let vertex = vertexBuffer[i]
41
+ let buffer = vertexSource.buffer
42
+ let stride = vertexSource.stride
43
+ let offset = vertexSource.offset
44
+
45
+ for i in 0..<vertexSource.count {
46
+ let vertexPointer = buffer.contents().advanced(by: offset + stride * i)
47
+ let vertex = vertexPointer.assumingMemoryBound(to: SIMD3<Float>.self).pointee
43
48
  result.append(Double(vertex.x))
44
49
  result.append(Double(vertex.y))
45
50
  result.append(Double(vertex.z))
@@ -49,27 +54,44 @@ final class HybridARMeshAnchor: HybridARMeshAnchorSpec {
49
54
 
50
55
  var faces: [Double] {
51
56
  let geo = anchor.geometry
52
- let faceBuffer = geo.faces
57
+ let faceElement = geo.faces
53
58
  var result: [Double] = []
54
- result.reserveCapacity(faceBuffer.count * 3)
55
-
56
- for i in 0..<faceBuffer.count {
57
- let indices = faceBuffer.vertexIndicesOf(faceWithIndex: i)
58
- result.append(Double(indices[0]))
59
- result.append(Double(indices[1]))
60
- result.append(Double(indices[2]))
59
+ result.reserveCapacity(faceElement.count * 3)
60
+
61
+ let buffer = faceElement.buffer
62
+ let bytesPerIndex = faceElement.bytesPerIndex
63
+
64
+ for i in 0..<faceElement.count {
65
+ let facePointer = buffer.contents().advanced(by: bytesPerIndex * 3 * i)
66
+
67
+ if bytesPerIndex == 4 {
68
+ let indices = facePointer.assumingMemoryBound(to: UInt32.self)
69
+ result.append(Double(indices[0]))
70
+ result.append(Double(indices[1]))
71
+ result.append(Double(indices[2]))
72
+ } else if bytesPerIndex == 2 {
73
+ let indices = facePointer.assumingMemoryBound(to: UInt16.self)
74
+ result.append(Double(indices[0]))
75
+ result.append(Double(indices[1]))
76
+ result.append(Double(indices[2]))
77
+ }
61
78
  }
62
79
  return result
63
80
  }
64
81
 
65
82
  var normals: [Double] {
66
83
  let geo = anchor.geometry
67
- let normalBuffer = geo.normals
84
+ let normalSource = geo.normals
68
85
  var result: [Double] = []
69
- result.reserveCapacity(normalBuffer.count * 3)
86
+ result.reserveCapacity(normalSource.count * 3)
70
87
 
71
- for i in 0..<normalBuffer.count {
72
- let normal = normalBuffer[i]
88
+ let buffer = normalSource.buffer
89
+ let stride = normalSource.stride
90
+ let offset = normalSource.offset
91
+
92
+ for i in 0..<normalSource.count {
93
+ let normalPointer = buffer.contents().advanced(by: offset + stride * i)
94
+ let normal = normalPointer.assumingMemoryBound(to: SIMD3<Float>.self).pointee
73
95
  result.append(Double(normal.x))
74
96
  result.append(Double(normal.y))
75
97
  result.append(Double(normal.z))
@@ -79,15 +101,21 @@ final class HybridARMeshAnchor: HybridARMeshAnchorSpec {
79
101
 
80
102
  var classifications: [MeshClassification] {
81
103
  let geo = anchor.geometry
82
- guard let classBuffer = geo.classification else {
104
+ guard let classSource = geo.classification else {
83
105
  return []
84
106
  }
85
107
 
86
108
  var result: [MeshClassification] = []
87
- result.reserveCapacity(classBuffer.count)
109
+ result.reserveCapacity(classSource.count)
110
+
111
+ let buffer = classSource.buffer
112
+ let stride = classSource.stride
113
+ let offset = classSource.offset
88
114
 
89
- for i in 0..<classBuffer.count {
90
- let arClass = classBuffer[i]
115
+ for i in 0..<classSource.count {
116
+ let classPointer = buffer.contents().advanced(by: offset + stride * i)
117
+ let classValue = classPointer.assumingMemoryBound(to: UInt8.self).pointee
118
+ let arClass = ARMeshClassification(rawValue: Int(classValue)) ?? .none
91
119
  result.append(mapClassification(arClass))
92
120
  }
93
121
  return result
@@ -0,0 +1,344 @@
1
+ import ARKit
2
+ import CoreImage
3
+ import NitroModules
4
+ import Vision
5
+
6
+ @available(iOS 17.0, *)
7
+ final class HybridARSegmentationResult: HybridARSegmentationResultSpec {
8
+ let mask: VNInstanceMaskObservation
9
+ let selectedIndex: Int
10
+ let frame: ARFrame
11
+ weak var sceneView: ARSCNView?
12
+
13
+ private var _boundingBox: [Double] = []
14
+ private var _maskPixelCount: Double = 0
15
+ private var cachedDepthPoints: [Double]?
16
+
17
+ init(mask: VNInstanceMaskObservation, selectedIndex: Int, frame: ARFrame, sceneView: ARSCNView?) {
18
+ self.mask = mask
19
+ self.selectedIndex = selectedIndex
20
+ self.frame = frame
21
+ self.sceneView = sceneView
22
+ super.init()
23
+
24
+ // Calculate bounding box and pixel count
25
+ calculateMaskMetrics()
26
+ }
27
+
28
+ var success: Bool {
29
+ true
30
+ }
31
+
32
+ var boundingBox: [Double] {
33
+ _boundingBox
34
+ }
35
+
36
+ var maskPixelCount: Double {
37
+ _maskPixelCount
38
+ }
39
+
40
+ private func calculateMaskMetrics() {
41
+ guard let maskBuffer = try? mask.generateScaledMaskForImage(
42
+ forInstances: IndexSet(integer: selectedIndex),
43
+ from: VNImageRequestHandler(cvPixelBuffer: frame.capturedImage)
44
+ ) else {
45
+ _boundingBox = [0, 0, 1, 1]
46
+ _maskPixelCount = 0
47
+ return
48
+ }
49
+
50
+ CVPixelBufferLockBaseAddress(maskBuffer, .readOnly)
51
+ defer { CVPixelBufferUnlockBaseAddress(maskBuffer, .readOnly) }
52
+
53
+ let width = CVPixelBufferGetWidth(maskBuffer)
54
+ let height = CVPixelBufferGetHeight(maskBuffer)
55
+ guard let baseAddress = CVPixelBufferGetBaseAddress(maskBuffer) else {
56
+ _boundingBox = [0, 0, 1, 1]
57
+ _maskPixelCount = 0
58
+ return
59
+ }
60
+
61
+ let bytesPerRow = CVPixelBufferGetBytesPerRow(maskBuffer)
62
+ let buffer = baseAddress.assumingMemoryBound(to: UInt8.self)
63
+
64
+ var minX = width
65
+ var maxX = 0
66
+ var minY = height
67
+ var maxY = 0
68
+ var pixelCount = 0
69
+
70
+ for y in 0..<height {
71
+ for x in 0..<width {
72
+ let pixelValue = buffer[y * bytesPerRow + x]
73
+ if pixelValue > 127 {
74
+ pixelCount += 1
75
+ minX = min(minX, x)
76
+ maxX = max(maxX, x)
77
+ minY = min(minY, y)
78
+ maxY = max(maxY, y)
79
+ }
80
+ }
81
+ }
82
+
83
+ if pixelCount > 0 {
84
+ _boundingBox = [
85
+ Double(minX) / Double(width),
86
+ Double(minY) / Double(height),
87
+ Double(maxX - minX) / Double(width),
88
+ Double(maxY - minY) / Double(height)
89
+ ]
90
+ } else {
91
+ _boundingBox = [0, 0, 1, 1]
92
+ }
93
+ _maskPixelCount = Double(pixelCount)
94
+ }
95
+
96
+ func getDepthPoints() throws -> [Double] {
97
+ if let cached = cachedDepthPoints {
98
+ print("[GetDepthPoints] Returning cached \(cached.count / 3) points")
99
+ return cached
100
+ }
101
+
102
+ guard let sceneDepth = frame.sceneDepth else {
103
+ print("[GetDepthPoints] No sceneDepth available (LiDAR required)")
104
+ return []
105
+ }
106
+
107
+ print("[GetDepthPoints] Scene depth available, extracting points...")
108
+ print("[GetDepthPoints] Camera image: \(CVPixelBufferGetWidth(frame.capturedImage))x\(CVPixelBufferGetHeight(frame.capturedImage))")
109
+
110
+ guard let maskBuffer = try? mask.generateScaledMaskForImage(
111
+ forInstances: IndexSet(integer: selectedIndex),
112
+ from: VNImageRequestHandler(cvPixelBuffer: frame.capturedImage)
113
+ ) else {
114
+ return []
115
+ }
116
+
117
+ let depthMap = sceneDepth.depthMap
118
+ CVPixelBufferLockBaseAddress(depthMap, .readOnly)
119
+ CVPixelBufferLockBaseAddress(maskBuffer, .readOnly)
120
+ defer {
121
+ CVPixelBufferUnlockBaseAddress(depthMap, .readOnly)
122
+ CVPixelBufferUnlockBaseAddress(maskBuffer, .readOnly)
123
+ }
124
+
125
+ let depthWidth = CVPixelBufferGetWidth(depthMap)
126
+ let depthHeight = CVPixelBufferGetHeight(depthMap)
127
+ let maskWidth = CVPixelBufferGetWidth(maskBuffer)
128
+ let maskHeight = CVPixelBufferGetHeight(maskBuffer)
129
+
130
+ print("[GetDepthPoints] Depth buffer: \(depthWidth)x\(depthHeight), Mask: \(maskWidth)x\(maskHeight)")
131
+
132
+ guard let depthBase = CVPixelBufferGetBaseAddress(depthMap),
133
+ let maskBase = CVPixelBufferGetBaseAddress(maskBuffer) else {
134
+ return []
135
+ }
136
+
137
+ let depthBytesPerRow = CVPixelBufferGetBytesPerRow(depthMap)
138
+ let maskBytesPerRow = CVPixelBufferGetBytesPerRow(maskBuffer)
139
+ let depthBuffer = depthBase.assumingMemoryBound(to: Float32.self)
140
+ let maskBufferPtr = maskBase.assumingMemoryBound(to: UInt8.self)
141
+
142
+ // Get camera image dimensions for coordinate scaling
143
+ let imageWidth = CVPixelBufferGetWidth(frame.capturedImage)
144
+ let imageHeight = CVPixelBufferGetHeight(frame.capturedImage)
145
+
146
+ // Intrinsics are in camera image coordinate space
147
+ let intrinsics = frame.camera.intrinsics
148
+ let fx = intrinsics[0][0]
149
+ let fy = intrinsics[1][1]
150
+ let cx = intrinsics[2][0]
151
+ let cy = intrinsics[2][1]
152
+
153
+ var points: [Double] = []
154
+ points.reserveCapacity(Int(_maskPixelCount) * 3)
155
+
156
+ // Sample points from the mask
157
+ let stepSize = max(1, Int(sqrt(_maskPixelCount / 1000))) // Limit to ~1000 points
158
+
159
+ for maskY in stride(from: 0, to: maskHeight, by: stepSize) {
160
+ for maskX in stride(from: 0, to: maskWidth, by: stepSize) {
161
+ let maskValue = maskBufferPtr[maskY * maskBytesPerRow + maskX]
162
+ guard maskValue > 127 else { continue }
163
+
164
+ // Map mask coordinates to depth coordinates
165
+ let depthX = maskX * depthWidth / maskWidth
166
+ let depthY = maskY * depthHeight / maskHeight
167
+
168
+ guard depthX < depthWidth, depthY < depthHeight else { continue }
169
+
170
+ let depthIndex = depthY * depthBytesPerRow / MemoryLayout<Float32>.stride + depthX
171
+ let depth = depthBuffer[depthIndex]
172
+
173
+ guard depth > 0, depth < 10 else { continue } // Valid depth range
174
+
175
+ // Scale depth coordinates to camera image space for use with intrinsics
176
+ let imageX = Float(depthX) * Float(imageWidth) / Float(depthWidth)
177
+ let imageY = Float(depthY) * Float(imageHeight) / Float(depthHeight)
178
+
179
+ // Convert to 3D point in camera space using properly scaled coordinates
180
+ let x = Double((imageX - cx) * depth / fx)
181
+ let y = Double((imageY - cy) * depth / fy)
182
+ let z = Double(depth)
183
+
184
+ // Transform to world space
185
+ let cameraTransform = frame.camera.transform
186
+ let worldPoint = simd_mul(cameraTransform, simd_float4(Float(x), Float(-y), Float(-z), 1))
187
+
188
+ points.append(Double(worldPoint.x))
189
+ points.append(Double(worldPoint.y))
190
+ points.append(Double(worldPoint.z))
191
+ }
192
+ }
193
+
194
+ print("[GetDepthPoints] Extracted \(points.count / 3) 3D points from depth data")
195
+
196
+ // Log point cloud bounds for debugging
197
+ if points.count >= 3 {
198
+ var minX = Double.greatestFiniteMagnitude, maxX = -Double.greatestFiniteMagnitude
199
+ var minY = Double.greatestFiniteMagnitude, maxY = -Double.greatestFiniteMagnitude
200
+ var minZ = Double.greatestFiniteMagnitude, maxZ = -Double.greatestFiniteMagnitude
201
+ for i in stride(from: 0, to: points.count, by: 3) {
202
+ minX = min(minX, points[i]); maxX = max(maxX, points[i])
203
+ minY = min(minY, points[i+1]); maxY = max(maxY, points[i+1])
204
+ minZ = min(minZ, points[i+2]); maxZ = max(maxZ, points[i+2])
205
+ }
206
+ print("[GetDepthPoints] Point cloud bounds: X[\(minX)...\(maxX)], Y[\(minY)...\(maxY)], Z[\(minZ)...\(maxZ)]")
207
+ print("[GetDepthPoints] Extents: X=\(maxX-minX)m, Y=\(maxY-minY)m, Z=\(maxZ-minZ)m")
208
+ }
209
+
210
+ cachedDepthPoints = points
211
+ return points
212
+ }
213
+
214
+ func measure() throws -> ARObjectMeasurement? {
215
+ let points = try getDepthPoints()
216
+ guard points.count >= 12 else { return nil } // Need at least 4 points
217
+
218
+ // Use PCA to compute oriented bounding box
219
+ let pointCount = points.count / 3
220
+ var sumX = 0.0, sumY = 0.0, sumZ = 0.0
221
+
222
+ // Calculate centroid
223
+ for i in 0..<pointCount {
224
+ sumX += points[i * 3]
225
+ sumY += points[i * 3 + 1]
226
+ sumZ += points[i * 3 + 2]
227
+ }
228
+ let centerX = sumX / Double(pointCount)
229
+ let centerY = sumY / Double(pointCount)
230
+ let centerZ = sumZ / Double(pointCount)
231
+
232
+ // Build covariance matrix
233
+ var cov = [[Double]](repeating: [Double](repeating: 0, count: 3), count: 3)
234
+ for i in 0..<pointCount {
235
+ let dx = points[i * 3] - centerX
236
+ let dy = points[i * 3 + 1] - centerY
237
+ let dz = points[i * 3 + 2] - centerZ
238
+
239
+ cov[0][0] += dx * dx
240
+ cov[0][1] += dx * dy
241
+ cov[0][2] += dx * dz
242
+ cov[1][1] += dy * dy
243
+ cov[1][2] += dy * dz
244
+ cov[2][2] += dz * dz
245
+ }
246
+ cov[1][0] = cov[0][1]
247
+ cov[2][0] = cov[0][2]
248
+ cov[2][1] = cov[1][2]
249
+
250
+ // Simple eigenvalue estimation using power iteration
251
+ let (axes, eigenvalues) = computeEigenvectors(cov)
252
+
253
+ // Project points onto principal axes to find extents
254
+ var minProj = [Double.greatestFiniteMagnitude, Double.greatestFiniteMagnitude, Double.greatestFiniteMagnitude]
255
+ var maxProj = [-Double.greatestFiniteMagnitude, -Double.greatestFiniteMagnitude, -Double.greatestFiniteMagnitude]
256
+
257
+ for i in 0..<pointCount {
258
+ let dx = points[i * 3] - centerX
259
+ let dy = points[i * 3 + 1] - centerY
260
+ let dz = points[i * 3 + 2] - centerZ
261
+
262
+ for a in 0..<3 {
263
+ let proj = dx * axes[a][0] + dy * axes[a][1] + dz * axes[a][2]
264
+ minProj[a] = min(minProj[a], proj)
265
+ maxProj[a] = max(maxProj[a], proj)
266
+ }
267
+ }
268
+
269
+ // Calculate dimensions (sorted: width >= height >= depth)
270
+ var dims = [
271
+ maxProj[0] - minProj[0],
272
+ maxProj[1] - minProj[1],
273
+ maxProj[2] - minProj[2]
274
+ ].sorted(by: >)
275
+
276
+ // Flatten axes for output
277
+ let flatAxes = axes.flatMap { $0 }
278
+
279
+ // Confidence based on point density and spread
280
+ let totalVariance = eigenvalues.reduce(0, +)
281
+ let confidence = min(1.0, Double(pointCount) / 500.0) * min(1.0, totalVariance / 0.1)
282
+
283
+ return ARObjectMeasurement(
284
+ width: dims[0],
285
+ height: dims[1],
286
+ depth: dims[2],
287
+ center: [centerX, centerY, centerZ],
288
+ axes: flatAxes,
289
+ confidence: confidence,
290
+ pointCount: Double(pointCount)
291
+ )
292
+ }
293
+
294
+ private func computeEigenvectors(_ matrix: [[Double]]) -> ([[Double]], [Double]) {
295
+ // Simplified power iteration for 3x3 symmetric matrix
296
+ var vectors: [[Double]] = [
297
+ [1, 0, 0],
298
+ [0, 1, 0],
299
+ [0, 0, 1]
300
+ ]
301
+ var eigenvalues = [0.0, 0.0, 0.0]
302
+
303
+ for i in 0..<3 {
304
+ var v = vectors[i]
305
+
306
+ // Power iteration
307
+ for _ in 0..<20 {
308
+ var newV = [0.0, 0.0, 0.0]
309
+ for row in 0..<3 {
310
+ for col in 0..<3 {
311
+ newV[row] += matrix[row][col] * v[col]
312
+ }
313
+ }
314
+
315
+ // Orthogonalize against previous vectors
316
+ for j in 0..<i {
317
+ let dot = newV[0] * vectors[j][0] + newV[1] * vectors[j][1] + newV[2] * vectors[j][2]
318
+ newV[0] -= dot * vectors[j][0]
319
+ newV[1] -= dot * vectors[j][1]
320
+ newV[2] -= dot * vectors[j][2]
321
+ }
322
+
323
+ // Normalize
324
+ let len = sqrt(newV[0] * newV[0] + newV[1] * newV[1] + newV[2] * newV[2])
325
+ if len > 1e-10 {
326
+ v = [newV[0] / len, newV[1] / len, newV[2] / len]
327
+ }
328
+ }
329
+
330
+ vectors[i] = v
331
+
332
+ // Calculate eigenvalue
333
+ var av = [0.0, 0.0, 0.0]
334
+ for row in 0..<3 {
335
+ for col in 0..<3 {
336
+ av[row] += matrix[row][col] * v[col]
337
+ }
338
+ }
339
+ eigenvalues[i] = av[0] * v[0] + av[1] * v[1] + av[2] * v[2]
340
+ }
341
+
342
+ return (vectors, eigenvalues)
343
+ }
344
+ }
@@ -2,6 +2,7 @@ import ARKit
2
2
  import SceneKit
3
3
  import UIKit
4
4
  import NitroModules
5
+ import Vision
5
6
 
6
7
  class HybridARView: HybridARViewSpec {
7
8
  // The underlying AR view
@@ -90,13 +91,8 @@ class HybridARView: HybridARViewSpec {
90
91
  }
91
92
 
92
93
  private func updateOcclusionSettings() {
93
- #if !targetEnvironment(simulator)
94
- if #available(iOS 13.0, *) {
95
- if peopleOcclusion == true {
96
- arView.environment.sceneUnderstanding.options.insert(.occlusion)
97
- }
98
- }
99
- #endif
94
+ // Occlusion is configured via frameSemantics in startSession/resetSession
95
+ // ARSCNView doesn't have an environment property like RealityKit's ARView
100
96
  }
101
97
 
102
98
  private func updateDebugOptions() {
@@ -202,6 +198,116 @@ class HybridARView: HybridARViewSpec {
202
198
  return false
203
199
  }
204
200
 
201
+ // MARK: - Object Segmentation & Measurement
202
+
203
+ func segmentObject(x: Double, y: Double) throws -> Promise<(any HybridARSegmentationResultSpec)?> {
204
+ return Promise.async { [weak self] in
205
+ guard let self = self else {
206
+ print("[SegmentObject] self is nil")
207
+ return nil
208
+ }
209
+
210
+ guard #available(iOS 17.0, *) else {
211
+ print("[SegmentObject] iOS 17+ required")
212
+ return nil
213
+ }
214
+
215
+ guard let frame = self.arView.session.currentFrame else {
216
+ print("[SegmentObject] No current frame")
217
+ return nil
218
+ }
219
+
220
+ let imageWidth = CGFloat(CVPixelBufferGetWidth(frame.capturedImage))
221
+ let imageHeight = CGFloat(CVPixelBufferGetHeight(frame.capturedImage))
222
+
223
+ print("[SegmentObject] Screen normalized: (\(x), \(y))")
224
+ print("[SegmentObject] Image size: \(imageWidth) x \(imageHeight)")
225
+
226
+ // Perform instance segmentation
227
+ let request = VNGenerateForegroundInstanceMaskRequest()
228
+ let handler = VNImageRequestHandler(cvPixelBuffer: frame.capturedImage, orientation: .right)
229
+
230
+ do {
231
+ try handler.perform([request])
232
+ } catch {
233
+ print("[SegmentObject] Vision request failed: \(error)")
234
+ return nil
235
+ }
236
+
237
+ guard let observation = request.results?.first else {
238
+ print("[SegmentObject] No segmentation results")
239
+ return nil
240
+ }
241
+
242
+ // Find which instance contains the tapped point
243
+ let instances = observation.allInstances
244
+ print("[SegmentObject] Found \(instances.count) instances")
245
+
246
+ if instances.isEmpty {
247
+ print("[SegmentObject] No foreground instances detected in scene")
248
+ return nil
249
+ }
250
+
251
+ // For now, just use the first (usually largest/most prominent) instance
252
+ // TODO: Improve tap-to-instance mapping
253
+ let selectedIndex = instances.first!
254
+
255
+ print("[SegmentObject] Selected instance \(selectedIndex)")
256
+ return HybridARSegmentationResult(
257
+ mask: observation,
258
+ selectedIndex: selectedIndex,
259
+ frame: frame,
260
+ sceneView: self.arView
261
+ )
262
+ }
263
+ }
264
+
265
+ func measureObject(x: Double, y: Double) throws -> Promise<ARObjectMeasurement?> {
266
+ return Promise.async { [weak self] in
267
+ guard let self = self else {
268
+ print("[MeasureObject] self is nil")
269
+ throw NSError(domain: "ARView", code: 1, userInfo: [NSLocalizedDescriptionKey: "View not available"])
270
+ }
271
+
272
+ guard #available(iOS 17.0, *) else {
273
+ print("[MeasureObject] iOS 17+ required")
274
+ throw NSError(domain: "ARView", code: 2, userInfo: [NSLocalizedDescriptionKey: "iOS 17+ required for object measurement"])
275
+ }
276
+
277
+ guard self.arView.session.currentFrame != nil else {
278
+ print("[MeasureObject] No current frame")
279
+ throw NSError(domain: "ARView", code: 3, userInfo: [NSLocalizedDescriptionKey: "No AR frame available"])
280
+ }
281
+
282
+ guard self.arView.session.currentFrame?.sceneDepth != nil else {
283
+ print("[MeasureObject] No scene depth - LiDAR required")
284
+ throw NSError(domain: "ARView", code: 4, userInfo: [NSLocalizedDescriptionKey: "LiDAR depth data not available. Is sceneDepth enabled?"])
285
+ }
286
+
287
+ print("[MeasureObject] Starting measurement at (\(x), \(y))")
288
+
289
+ // First segment the object
290
+ let segmentationResult = try await self.segmentObject(x: x, y: y).await()
291
+
292
+ guard let segmentation = segmentationResult as? HybridARSegmentationResult else {
293
+ print("[MeasureObject] Segmentation failed or no object found")
294
+ throw NSError(domain: "ARView", code: 5, userInfo: [NSLocalizedDescriptionKey: "No foreground object detected. Try pointing at a distinct object."])
295
+ }
296
+
297
+ print("[MeasureObject] Segmentation successful, measuring...")
298
+
299
+ // Then measure it
300
+ let measurement = try segmentation.measure()
301
+ if let m = measurement {
302
+ print("[MeasureObject] Measurement: W=\(m.width), H=\(m.height), D=\(m.depth), confidence=\(m.confidence)")
303
+ } else {
304
+ print("[MeasureObject] Measurement returned nil (not enough depth points?)")
305
+ throw NSError(domain: "ARView", code: 6, userInfo: [NSLocalizedDescriptionKey: "Not enough depth points for measurement. Move closer to the object."])
306
+ }
307
+ return measurement
308
+ }
309
+ }
310
+
205
311
  // MARK: - Session Control
206
312
 
207
313
  func startSession() throws {
@@ -1 +1 @@
1
- {"version":3,"names":["_reactNativeNitroModules","require","_ARViewConfig","_interopRequireDefault","e","__esModule","default","ARView","exports","getHostComponent","ARViewConfig","createARSession","NitroModules","createHybridObject","createARBoundingBoxBuilder"],"sourceRoot":"../../src","sources":["index.ts"],"mappings":";;;;;;;;AAAA,IAAAA,wBAAA,GAAAC,OAAA;AACA,IAAAC,aAAA,GAAAC,sBAAA,CAAAF,OAAA;AAA+E,SAAAE,uBAAAC,CAAA,WAAAA,CAAA,IAAAA,CAAA,CAAAC,UAAA,GAAAD,CAAA,KAAAE,OAAA,EAAAF,CAAA;AAiC/E;;AAeA;AACO,MAAMG,MAAM,GAAAC,OAAA,CAAAD,MAAA,GAAG,IAAAE,yCAAgB,EAA6B,QAAQ,EAAE,MAAMC,qBAAY,CAAC;;AAEhG;AACO,SAASC,eAAeA,CAAA,EAAc;EAC3C,OAAOC,qCAAY,CAACC,kBAAkB,CAAY,WAAW,CAAC;AAChE;AAEO,SAASC,0BAA0BA,CAAA,EAAyB;EACjE,OAAOF,qCAAY,CAACC,kBAAkB,CAAuB,sBAAsB,CAAC;AACtF","ignoreList":[]}
1
+ {"version":3,"names":["_reactNativeNitroModules","require","_ARViewConfig","_interopRequireDefault","e","__esModule","default","ARView","exports","getHostComponent","ARViewConfig","createARSession","NitroModules","createHybridObject","createARBoundingBoxBuilder"],"sourceRoot":"../../src","sources":["index.ts"],"mappings":";;;;;;;;AAAA,IAAAA,wBAAA,GAAAC,OAAA;AACA,IAAAC,aAAA,GAAAC,sBAAA,CAAAF,OAAA;AAA+E,SAAAE,uBAAAC,CAAA,WAAAA,CAAA,IAAAA,CAAA,CAAAC,UAAA,GAAAD,CAAA,KAAAE,OAAA,EAAAF,CAAA;AAqC/E;;AAeA;AACO,MAAMG,MAAM,GAAAC,OAAA,CAAAD,MAAA,GAAG,IAAAE,yCAAgB,EAA6B,QAAQ,EAAE,MAAMC,qBAAY,CAAC;;AAEhG;AACO,SAASC,eAAeA,CAAA,EAAc;EAC3C,OAAOC,qCAAY,CAACC,kBAAkB,CAAY,WAAW,CAAC;AAChE;AAEO,SAASC,0BAA0BA,CAAA,EAAyB;EACjE,OAAOF,qCAAY,CAACC,kBAAkB,CAAuB,sBAAsB,CAAC;AACtF","ignoreList":[]}
@@ -0,0 +1,6 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ //# sourceMappingURL=ARObjectMeasurement.nitro.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"names":[],"sourceRoot":"../../../src","sources":["specs/ARObjectMeasurement.nitro.ts"],"mappings":"","ignoreList":[]}
@@ -1 +1 @@
1
- {"version":3,"names":["getHostComponent","NitroModules","ARViewConfig","ARView","createARSession","createHybridObject","createARBoundingBoxBuilder"],"sourceRoot":"../../src","sources":["index.ts"],"mappings":";;AAAA,SAASA,gBAAgB,EAAEC,YAAY,QAAQ,4BAA4B;AAC3E,OAAOC,YAAY,MAAM,qDAAqD;;AAiC9E;;AAeA;AACA,OAAO,MAAMC,MAAM,GAAGH,gBAAgB,CAA6B,QAAQ,EAAE,MAAME,YAAY,CAAC;;AAEhG;AACA,OAAO,SAASE,eAAeA,CAAA,EAAc;EAC3C,OAAOH,YAAY,CAACI,kBAAkB,CAAY,WAAW,CAAC;AAChE;AAEA,OAAO,SAASC,0BAA0BA,CAAA,EAAyB;EACjE,OAAOL,YAAY,CAACI,kBAAkB,CAAuB,sBAAsB,CAAC;AACtF","ignoreList":[]}
1
+ {"version":3,"names":["getHostComponent","NitroModules","ARViewConfig","ARView","createARSession","createHybridObject","createARBoundingBoxBuilder"],"sourceRoot":"../../src","sources":["index.ts"],"mappings":";;AAAA,SAASA,gBAAgB,EAAEC,YAAY,QAAQ,4BAA4B;AAC3E,OAAOC,YAAY,MAAM,qDAAqD;;AAqC9E;;AAeA;AACA,OAAO,MAAMC,MAAM,GAAGH,gBAAgB,CAA6B,QAAQ,EAAE,MAAME,YAAY,CAAC;;AAEhG;AACA,OAAO,SAASE,eAAeA,CAAA,EAAc;EAC3C,OAAOH,YAAY,CAACI,kBAAkB,CAAY,WAAW,CAAC;AAChE;AAEA,OAAO,SAASC,0BAA0BA,CAAA,EAAyB;EACjE,OAAOL,YAAY,CAACI,kBAAkB,CAAuB,sBAAsB,CAAC;AACtF","ignoreList":[]}
@@ -0,0 +1,4 @@
1
+ "use strict";
2
+
3
+ export {};
4
+ //# sourceMappingURL=ARObjectMeasurement.nitro.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"names":[],"sourceRoot":"../../../src","sources":["specs/ARObjectMeasurement.nitro.ts"],"mappings":"","ignoreList":[]}
@@ -7,6 +7,7 @@ export type { ARDepthData } from "./specs/ARDepthData.nitro";
7
7
  export type { ARFrame } from "./specs/ARFrame.nitro";
8
8
  export type { ARDirectionalLightEstimate, ARLightEstimate, } from "./specs/ARLightEstimate.nitro";
9
9
  export type { ARMeasurement } from "./specs/ARMeasurement.nitro";
10
+ export type { ARObjectMeasurement, ARSegmentationResult, } from "./specs/ARObjectMeasurement.nitro";
10
11
  export type { ARPlaneAnchor, ARPlaneGeometry, PlaneAlignment, PlaneClassification, } from "./specs/ARPlaneAnchor.nitro";
11
12
  export type { ARRaycastResult, RaycastAlignment, RaycastQuery, RaycastTarget, } from "./specs/ARRaycastResult.nitro";
12
13
  export type { ARMeshAnchor, LiDARCapabilities, MeshClassification, SceneReconstructionMode, } from "./specs/ARSceneMesh.nitro";
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,oBAAoB,EAAE,MAAM,oCAAoC,CAAC;AAC/E,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,yBAAyB,CAAC;AACzD,OAAO,KAAK,EAAE,aAAa,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AAEvE,YAAY,EAAE,QAAQ,EAAE,MAAM,wBAAwB,CAAC;AACvD,YAAY,EAAE,oBAAoB,EAAE,MAAM,oCAAoC,CAAC;AAC/E,YAAY,EAAE,WAAW,EAAE,MAAM,2BAA2B,CAAC;AAC7D,YAAY,EAAE,OAAO,EAAE,MAAM,uBAAuB,CAAC;AACrD,YAAY,EACV,0BAA0B,EAC1B,eAAe,GAChB,MAAM,+BAA+B,CAAC;AACvC,YAAY,EAAE,aAAa,EAAE,MAAM,6BAA6B,CAAC;AAEjE,YAAY,EACV,aAAa,EACb,eAAe,EACf,cAAc,EACd,mBAAmB,GACpB,MAAM,6BAA6B,CAAC;AACrC,YAAY,EACV,eAAe,EACf,gBAAgB,EAChB,YAAY,EACZ,aAAa,GACd,MAAM,+BAA+B,CAAC;AACvC,YAAY,EACV,YAAY,EACZ,iBAAiB,EACjB,kBAAkB,EAClB,uBAAuB,GACxB,MAAM,2BAA2B,CAAC;AAEnC,YAAY,EACV,SAAS,EACT,sBAAsB,EACtB,UAAU,EACV,oBAAoB,EACpB,kBAAkB,EAClB,aAAa,EACb,mBAAmB,EACnB,cAAc,GACf,MAAM,yBAAyB,CAAC;AACjC,YAAY,EAAE,eAAe,EAAE,aAAa,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AACxF,YAAY,EAAE,QAAQ,EAAE,MAAM,wBAAwB,CAAC;AACvD,YAAY,EAAE,UAAU,EAAE,kBAAkB,EAAE,MAAM,0BAA0B,CAAC;AAG/E,eAAO,MAAM,MAAM,kFAA6E,CAAC;AAGjG,wBAAgB,eAAe,IAAI,SAAS,CAE3C;AAED,wBAAgB,0BAA0B,IAAI,oBAAoB,CAEjE"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,oBAAoB,EAAE,MAAM,oCAAoC,CAAC;AAC/E,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,yBAAyB,CAAC;AACzD,OAAO,KAAK,EAAE,aAAa,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AAEvE,YAAY,EAAE,QAAQ,EAAE,MAAM,wBAAwB,CAAC;AACvD,YAAY,EAAE,oBAAoB,EAAE,MAAM,oCAAoC,CAAC;AAC/E,YAAY,EAAE,WAAW,EAAE,MAAM,2BAA2B,CAAC;AAC7D,YAAY,EAAE,OAAO,EAAE,MAAM,uBAAuB,CAAC;AACrD,YAAY,EACV,0BAA0B,EAC1B,eAAe,GAChB,MAAM,+BAA+B,CAAC;AACvC,YAAY,EAAE,aAAa,EAAE,MAAM,6BAA6B,CAAC;AACjE,YAAY,EACV,mBAAmB,EACnB,oBAAoB,GACrB,MAAM,mCAAmC,CAAC;AAE3C,YAAY,EACV,aAAa,EACb,eAAe,EACf,cAAc,EACd,mBAAmB,GACpB,MAAM,6BAA6B,CAAC;AACrC,YAAY,EACV,eAAe,EACf,gBAAgB,EAChB,YAAY,EACZ,aAAa,GACd,MAAM,+BAA+B,CAAC;AACvC,YAAY,EACV,YAAY,EACZ,iBAAiB,EACjB,kBAAkB,EAClB,uBAAuB,GACxB,MAAM,2BAA2B,CAAC;AAEnC,YAAY,EACV,SAAS,EACT,sBAAsB,EACtB,UAAU,EACV,oBAAoB,EACpB,kBAAkB,EAClB,aAAa,EACb,mBAAmB,EACnB,cAAc,GACf,MAAM,yBAAyB,CAAC;AACjC,YAAY,EAAE,eAAe,EAAE,aAAa,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AACxF,YAAY,EAAE,QAAQ,EAAE,MAAM,wBAAwB,CAAC;AACvD,YAAY,EAAE,UAAU,EAAE,kBAAkB,EAAE,MAAM,0BAA0B,CAAC;AAG/E,eAAO,MAAM,MAAM,kFAA6E,CAAC;AAGjG,wBAAgB,eAAe,IAAI,SAAS,CAE3C;AAED,wBAAgB,0BAA0B,IAAI,oBAAoB,CAEjE"}
@@ -0,0 +1,34 @@
1
+ import type { HybridObject } from "react-native-nitro-modules";
2
+ /** Result of measuring an object in 3D space */
3
+ export interface ARObjectMeasurement {
4
+ /** Width in meters (X axis) */
5
+ width: number;
6
+ /** Height in meters (Y axis) */
7
+ height: number;
8
+ /** Depth in meters (Z axis) */
9
+ depth: number;
10
+ /** Center position in world space [x, y, z] */
11
+ center: number[];
12
+ /** Orientation axes (3x3 matrix as 9 values) */
13
+ axes: number[];
14
+ /** Confidence score 0-1 */
15
+ confidence: number;
16
+ /** Number of 3D points used for measurement */
17
+ pointCount: number;
18
+ }
19
+ /** Result of object segmentation */
20
+ export interface ARSegmentationResult extends HybridObject<{
21
+ ios: "swift";
22
+ }> {
23
+ /** Whether segmentation was successful */
24
+ readonly success: boolean;
25
+ /** Bounding box in normalized coordinates [x, y, width, height] */
26
+ readonly boundingBox: number[];
27
+ /** Number of pixels in the mask */
28
+ readonly maskPixelCount: number;
29
+ /** Get 3D points within the segmented region (requires LiDAR) */
30
+ getDepthPoints(): number[];
31
+ /** Measure the segmented object */
32
+ measure(): ARObjectMeasurement | undefined;
33
+ }
34
+ //# sourceMappingURL=ARObjectMeasurement.nitro.d.ts.map