react-native-camera-vision-pixel-colors 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CameraVisionPixelColors.podspec +32 -0
- package/LICENSE +21 -0
- package/README.md +190 -0
- package/android/CMakeLists.txt +32 -0
- package/android/build.gradle +151 -0
- package/android/fix-prefab.gradle +51 -0
- package/android/gradle.properties +5 -0
- package/android/src/main/AndroidManifest.xml +2 -0
- package/android/src/main/cpp/cpp-adapter.cpp +6 -0
- package/android/src/main/java/com/cameravisionpixelcolors/CameraVisionPixelColorsPackage.kt +24 -0
- package/android/src/main/java/com/cameravisionpixelcolors/PixelAnalyzerEngine.kt +256 -0
- package/android/src/main/java/com/cameravisionpixelcolors/PixelColorsFrameProcessor.kt +40 -0
- package/android/src/main/java/com/cameravisionpixelcolors/YuvToBitmapConverter.kt +33 -0
- package/android/src/main/java/com/margelo/nitro/cameravisionpixelcolors/HybridCameraVisionPixelColors.kt +50 -0
- package/app.plugin.js +1 -0
- package/ios/Bridge.h +8 -0
- package/ios/HybridCameraVisionPixelColors.swift +53 -0
- package/ios/PixelAnalyzerEngine.swift +346 -0
- package/ios/PixelColorsFrameProcessor.m +5 -0
- package/ios/PixelColorsFrameProcessor.swift +50 -0
- package/lib/commonjs/index.js +28 -0
- package/lib/commonjs/index.js.map +1 -0
- package/lib/commonjs/package.json +1 -0
- package/lib/commonjs/specs/camera-vision-pixel-colors.nitro.js +6 -0
- package/lib/commonjs/specs/camera-vision-pixel-colors.nitro.js.map +1 -0
- package/lib/module/index.js +23 -0
- package/lib/module/index.js.map +1 -0
- package/lib/module/specs/camera-vision-pixel-colors.nitro.js +4 -0
- package/lib/module/specs/camera-vision-pixel-colors.nitro.js.map +1 -0
- package/lib/typescript/src/index.d.ts +6 -0
- package/lib/typescript/src/index.d.ts.map +1 -0
- package/lib/typescript/src/specs/camera-vision-pixel-colors.nitro.d.ts +40 -0
- package/lib/typescript/src/specs/camera-vision-pixel-colors.nitro.d.ts.map +1 -0
- package/nitro.json +25 -0
- package/nitrogen/generated/.gitattributes +1 -0
- package/nitrogen/generated/android/CameraVisionPixelColors+autolinking.cmake +81 -0
- package/nitrogen/generated/android/CameraVisionPixelColors+autolinking.gradle +27 -0
- package/nitrogen/generated/android/CameraVisionPixelColorsOnLoad.cpp +44 -0
- package/nitrogen/generated/android/CameraVisionPixelColorsOnLoad.hpp +25 -0
- package/nitrogen/generated/android/c++/JHybridCameraVisionPixelColorsSpec.cpp +90 -0
- package/nitrogen/generated/android/c++/JHybridCameraVisionPixelColorsSpec.hpp +66 -0
- package/nitrogen/generated/android/c++/JImageData.hpp +66 -0
- package/nitrogen/generated/android/c++/JMotionResult.hpp +61 -0
- package/nitrogen/generated/android/c++/JPixelColorsResult.hpp +114 -0
- package/nitrogen/generated/android/c++/JRGBColor.hpp +65 -0
- package/nitrogen/generated/android/kotlin/com/margelo/nitro/cameravisionpixelcolors/CameraVisionPixelColorsOnLoad.kt +35 -0
- package/nitrogen/generated/android/kotlin/com/margelo/nitro/cameravisionpixelcolors/HybridCameraVisionPixelColorsSpec.kt +58 -0
- package/nitrogen/generated/android/kotlin/com/margelo/nitro/cameravisionpixelcolors/ImageData.kt +44 -0
- package/nitrogen/generated/android/kotlin/com/margelo/nitro/cameravisionpixelcolors/MotionResult.kt +41 -0
- package/nitrogen/generated/android/kotlin/com/margelo/nitro/cameravisionpixelcolors/PixelColorsResult.kt +50 -0
- package/nitrogen/generated/android/kotlin/com/margelo/nitro/cameravisionpixelcolors/RGBColor.kt +44 -0
- package/nitrogen/generated/ios/CameraVisionPixelColors+autolinking.rb +60 -0
- package/nitrogen/generated/ios/CameraVisionPixelColors-Swift-Cxx-Bridge.cpp +49 -0
- package/nitrogen/generated/ios/CameraVisionPixelColors-Swift-Cxx-Bridge.hpp +162 -0
- package/nitrogen/generated/ios/CameraVisionPixelColors-Swift-Cxx-Umbrella.hpp +59 -0
- package/nitrogen/generated/ios/CameraVisionPixelColorsAutolinking.mm +33 -0
- package/nitrogen/generated/ios/CameraVisionPixelColorsAutolinking.swift +26 -0
- package/nitrogen/generated/ios/c++/HybridCameraVisionPixelColorsSpecSwift.cpp +11 -0
- package/nitrogen/generated/ios/c++/HybridCameraVisionPixelColorsSpecSwift.hpp +99 -0
- package/nitrogen/generated/ios/swift/Func_void_PixelColorsResult.swift +47 -0
- package/nitrogen/generated/ios/swift/Func_void_std__exception_ptr.swift +47 -0
- package/nitrogen/generated/ios/swift/HybridCameraVisionPixelColorsSpec.swift +56 -0
- package/nitrogen/generated/ios/swift/HybridCameraVisionPixelColorsSpec_cxx.swift +146 -0
- package/nitrogen/generated/ios/swift/ImageData.swift +40 -0
- package/nitrogen/generated/ios/swift/MotionResult.swift +35 -0
- package/nitrogen/generated/ios/swift/PixelColorsResult.swift +81 -0
- package/nitrogen/generated/ios/swift/RGBColor.swift +40 -0
- package/nitrogen/generated/shared/c++/HybridCameraVisionPixelColorsSpec.cpp +21 -0
- package/nitrogen/generated/shared/c++/HybridCameraVisionPixelColorsSpec.hpp +67 -0
- package/nitrogen/generated/shared/c++/ImageData.hpp +91 -0
- package/nitrogen/generated/shared/c++/MotionResult.hpp +87 -0
- package/nitrogen/generated/shared/c++/PixelColorsResult.hpp +105 -0
- package/nitrogen/generated/shared/c++/RGBColor.hpp +91 -0
- package/package.json +143 -0
- package/plugin/withPixelColors.js +12 -0
- package/plugin/withPixelColorsAndroid.js +11 -0
- package/plugin/withPixelColorsIOS.js +11 -0
- package/src/index.ts +42 -0
- package/src/specs/camera-vision-pixel-colors.nitro.ts +40 -0
|
@@ -0,0 +1,346 @@
|
|
|
1
|
+
import Foundation
|
|
2
|
+
import CoreImage
|
|
3
|
+
import CoreVideo
|
|
4
|
+
import UIKit
|
|
5
|
+
import Accelerate
|
|
6
|
+
|
|
7
|
+
struct AnalysisOptionsNative {
|
|
8
|
+
var enableMotionDetection: Bool = false
|
|
9
|
+
var motionThreshold: Float = 0.1
|
|
10
|
+
var roi: (x: Float, y: Float, width: Float, height: Float)?
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
final class PixelAnalyzerEngine {
|
|
14
|
+
static let shared = PixelAnalyzerEngine()
|
|
15
|
+
private let gpuQueue = DispatchQueue(label: "pixel.colors.gpu", qos: .userInitiated)
|
|
16
|
+
private let cacheQueue = DispatchQueue(label: "pixel.colors.cache")
|
|
17
|
+
private let ciContext: CIContext
|
|
18
|
+
private var cachedResult: [String: Any] = [
|
|
19
|
+
"uniqueColorCount": 0,
|
|
20
|
+
"topColors": [[String: Int]](),
|
|
21
|
+
"brightestColors": [[String: Int]]()
|
|
22
|
+
]
|
|
23
|
+
private let histogramBins: Int = 64
|
|
24
|
+
private let maxRawPixelDimension: Int = 1920
|
|
25
|
+
|
|
26
|
+
// Motion detection state
|
|
27
|
+
private var previousGrayscale: [UInt8]?
|
|
28
|
+
private var previousWidth: Int = 0
|
|
29
|
+
private var previousHeight: Int = 0
|
|
30
|
+
|
|
31
|
+
private init() {
|
|
32
|
+
self.ciContext = CIContext(options: [.useSoftwareRenderer: false])
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// Frame processor fast read (sync)
|
|
36
|
+
func analyzeSync() -> [String: Any] {
|
|
37
|
+
var result: [String: Any] = [:]
|
|
38
|
+
cacheQueue.sync {
|
|
39
|
+
result = self.cachedResult
|
|
40
|
+
}
|
|
41
|
+
return result
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// Async full analysis (GPU)
|
|
45
|
+
func analyzeAsync(pixelBuffer: CVPixelBuffer, options: AnalysisOptionsNative = AnalysisOptionsNative()) {
|
|
46
|
+
gpuQueue.async { [weak self] in
|
|
47
|
+
guard let self = self else { return }
|
|
48
|
+
let result = self.fullAnalysis(pixelBuffer: pixelBuffer, options: options)
|
|
49
|
+
self.cacheQueue.async {
|
|
50
|
+
self.cachedResult = result
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// MARK: - ROI Calculation
|
|
56
|
+
|
|
57
|
+
private func calculateROI(config: (x: Float, y: Float, width: Float, height: Float), width: Int, height: Int) -> CGRect {
|
|
58
|
+
let x = Int(config.x * Float(width))
|
|
59
|
+
let y = Int(config.y * Float(height))
|
|
60
|
+
let w = Int(config.width * Float(width))
|
|
61
|
+
let h = Int(config.height * Float(height))
|
|
62
|
+
return CGRect(x: x, y: y, width: max(1, w), height: max(1, h))
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// MARK: - Motion Detection
|
|
66
|
+
|
|
67
|
+
private func calculateMotion(pixelBuffer: CVPixelBuffer, threshold: Float) -> [String: Any] {
|
|
68
|
+
CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly)
|
|
69
|
+
defer { CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly) }
|
|
70
|
+
|
|
71
|
+
let width = CVPixelBufferGetWidth(pixelBuffer)
|
|
72
|
+
let height = CVPixelBufferGetHeight(pixelBuffer)
|
|
73
|
+
let totalPixels = width * height
|
|
74
|
+
|
|
75
|
+
guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) else {
|
|
76
|
+
return ["score": 0.0, "hasMotion": false]
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
|
|
80
|
+
let pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer)
|
|
81
|
+
|
|
82
|
+
// Convert to grayscale
|
|
83
|
+
var currentGrayscale = [UInt8](repeating: 0, count: totalPixels)
|
|
84
|
+
|
|
85
|
+
if pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB {
|
|
86
|
+
// BGRA/ARGB format
|
|
87
|
+
for y in 0..<height {
|
|
88
|
+
let rowPtr = baseAddress.advanced(by: y * bytesPerRow).assumingMemoryBound(to: UInt8.self)
|
|
89
|
+
for x in 0..<width {
|
|
90
|
+
let offset = x * 4
|
|
91
|
+
let b = Float(rowPtr[offset])
|
|
92
|
+
let g = Float(rowPtr[offset + 1])
|
|
93
|
+
let r = Float(rowPtr[offset + 2])
|
|
94
|
+
currentGrayscale[y * width + x] = UInt8(0.299 * r + 0.587 * g + 0.114 * b)
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
} else {
|
|
98
|
+
// Fallback for other formats - use first channel
|
|
99
|
+
for y in 0..<height {
|
|
100
|
+
let rowPtr = baseAddress.advanced(by: y * bytesPerRow).assumingMemoryBound(to: UInt8.self)
|
|
101
|
+
for x in 0..<width {
|
|
102
|
+
currentGrayscale[y * width + x] = rowPtr[x * 4]
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// Compare with previous frame
|
|
108
|
+
guard let previous = previousGrayscale,
|
|
109
|
+
previousWidth == width,
|
|
110
|
+
previousHeight == height else {
|
|
111
|
+
// First frame - save and return zero motion
|
|
112
|
+
previousGrayscale = currentGrayscale
|
|
113
|
+
previousWidth = width
|
|
114
|
+
previousHeight = height
|
|
115
|
+
return ["score": 0.0, "hasMotion": false]
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// Calculate motion using vDSP for performance
|
|
119
|
+
var current = currentGrayscale.map { Float($0) }
|
|
120
|
+
var prev = previous.map { Float($0) }
|
|
121
|
+
var diff = [Float](repeating: 0, count: totalPixels)
|
|
122
|
+
|
|
123
|
+
// Calculate absolute difference
|
|
124
|
+
vDSP_vsub(prev, 1, current, 1, &diff, 1, vDSP_Length(totalPixels))
|
|
125
|
+
vDSP_vabs(diff, 1, &diff, 1, vDSP_Length(totalPixels))
|
|
126
|
+
|
|
127
|
+
// Count pixels exceeding threshold
|
|
128
|
+
let thresholdValue = threshold * 255
|
|
129
|
+
var changedCount: Float = 0
|
|
130
|
+
for value in diff {
|
|
131
|
+
if value > thresholdValue {
|
|
132
|
+
changedCount += 1
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
let score = Double(changedCount) / Double(totalPixels)
|
|
137
|
+
|
|
138
|
+
// Swap buffers
|
|
139
|
+
previousGrayscale = currentGrayscale
|
|
140
|
+
previousWidth = width
|
|
141
|
+
previousHeight = height
|
|
142
|
+
|
|
143
|
+
return ["score": score, "hasMotion": score > Double(threshold)]
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
// MARK: - Raw Pixel Extraction
|
|
147
|
+
|
|
148
|
+
private func extractRawPixels(pixelBuffer: CVPixelBuffer, roi: CGRect?) -> Data? {
|
|
149
|
+
CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly)
|
|
150
|
+
defer { CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly) }
|
|
151
|
+
|
|
152
|
+
var width = CVPixelBufferGetWidth(pixelBuffer)
|
|
153
|
+
var height = CVPixelBufferGetHeight(pixelBuffer)
|
|
154
|
+
|
|
155
|
+
// Apply ROI if provided
|
|
156
|
+
let extractRect: CGRect
|
|
157
|
+
if let roi = roi {
|
|
158
|
+
extractRect = CGRect(
|
|
159
|
+
x: max(0, min(Int(roi.origin.x), width - 1)),
|
|
160
|
+
y: max(0, min(Int(roi.origin.y), height - 1)),
|
|
161
|
+
width: max(1, min(Int(roi.width), width - Int(roi.origin.x))),
|
|
162
|
+
height: max(1, min(Int(roi.height), height - Int(roi.origin.y)))
|
|
163
|
+
)
|
|
164
|
+
width = Int(extractRect.width)
|
|
165
|
+
height = Int(extractRect.height)
|
|
166
|
+
} else {
|
|
167
|
+
extractRect = CGRect(x: 0, y: 0, width: width, height: height)
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
// Scale down if exceeds 1080p
|
|
171
|
+
var scale: Float = 1.0
|
|
172
|
+
if width > maxRawPixelDimension || height > maxRawPixelDimension {
|
|
173
|
+
let widthScale = Float(maxRawPixelDimension) / Float(width)
|
|
174
|
+
let heightScale = Float(maxRawPixelDimension) / Float(height)
|
|
175
|
+
scale = min(widthScale, heightScale)
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
let outputWidth = Int(Float(width) * scale)
|
|
179
|
+
let outputHeight = Int(Float(height) * scale)
|
|
180
|
+
|
|
181
|
+
guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) else {
|
|
182
|
+
return nil
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
|
|
186
|
+
let srcWidth = CVPixelBufferGetWidth(pixelBuffer)
|
|
187
|
+
|
|
188
|
+
// Extract RGBA data
|
|
189
|
+
var rgbaData = Data(capacity: outputWidth * outputHeight * 4)
|
|
190
|
+
|
|
191
|
+
for outY in 0..<outputHeight {
|
|
192
|
+
let srcY = Int(extractRect.origin.y) + Int(Float(outY) / scale)
|
|
193
|
+
let rowPtr = baseAddress.advanced(by: srcY * bytesPerRow).assumingMemoryBound(to: UInt8.self)
|
|
194
|
+
|
|
195
|
+
for outX in 0..<outputWidth {
|
|
196
|
+
let srcX = Int(extractRect.origin.x) + Int(Float(outX) / scale)
|
|
197
|
+
let offset = srcX * 4
|
|
198
|
+
|
|
199
|
+
// BGRA -> RGBA conversion
|
|
200
|
+
let b = rowPtr[offset]
|
|
201
|
+
let g = rowPtr[offset + 1]
|
|
202
|
+
let r = rowPtr[offset + 2]
|
|
203
|
+
let a = rowPtr[offset + 3]
|
|
204
|
+
|
|
205
|
+
rgbaData.append(r)
|
|
206
|
+
rgbaData.append(g)
|
|
207
|
+
rgbaData.append(b)
|
|
208
|
+
rgbaData.append(a)
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
return rgbaData
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
// Analyze raw image data
|
|
216
|
+
func analyzeImageData(width: Int, height: Int, data: Data) -> [String: Any] {
|
|
217
|
+
guard let cgImage = createCGImage(width: width, height: height, data: data) else {
|
|
218
|
+
return [
|
|
219
|
+
"uniqueColorCount": 0,
|
|
220
|
+
"topColors": [[String: Int]](),
|
|
221
|
+
"brightestColors": [[String: Int]]()
|
|
222
|
+
]
|
|
223
|
+
}
|
|
224
|
+
return analyzeFromCGImage(cgImage)
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
private func createCGImage(width: Int, height: Int, data: Data) -> CGImage? {
|
|
228
|
+
let bytesPerPixel = 4
|
|
229
|
+
let bytesPerRow = width * bytesPerPixel
|
|
230
|
+
let colorSpace = CGColorSpaceCreateDeviceRGB()
|
|
231
|
+
|
|
232
|
+
guard let provider = CGDataProvider(data: data as CFData) else { return nil }
|
|
233
|
+
|
|
234
|
+
return CGImage(
|
|
235
|
+
width: width,
|
|
236
|
+
height: height,
|
|
237
|
+
bitsPerComponent: 8,
|
|
238
|
+
bitsPerPixel: 32,
|
|
239
|
+
bytesPerRow: bytesPerRow,
|
|
240
|
+
space: colorSpace,
|
|
241
|
+
bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedLast.rawValue),
|
|
242
|
+
provider: provider,
|
|
243
|
+
decode: nil,
|
|
244
|
+
shouldInterpolate: false,
|
|
245
|
+
intent: .defaultIntent
|
|
246
|
+
)
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
private func analyzeFromCGImage(_ cgImage: CGImage) -> [String: Any] {
|
|
250
|
+
let ciImage = CIImage(cgImage: cgImage)
|
|
251
|
+
guard let histogram = CIFilter(name: "CIAreaHistogram",
|
|
252
|
+
parameters: ["inputImage": ciImage,
|
|
253
|
+
"inputCount": histogramBins,
|
|
254
|
+
"inputExtent": CIVector(cgRect: ciImage.extent),
|
|
255
|
+
"inputScale": 1.0])?.outputImage else {
|
|
256
|
+
return [
|
|
257
|
+
"uniqueColorCount": 0,
|
|
258
|
+
"topColors": [[String: Int]](),
|
|
259
|
+
"brightestColors": [[String: Int]]()
|
|
260
|
+
]
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
let bitmapSize = histogramBins * 4
|
|
264
|
+
var bitmap = [UInt32](repeating: 0, count: bitmapSize)
|
|
265
|
+
ciContext.render(histogram,
|
|
266
|
+
toBitmap: &bitmap,
|
|
267
|
+
rowBytes: bitmapSize * MemoryLayout<UInt32>.size,
|
|
268
|
+
bounds: CGRect(x: 0, y: 0, width: histogramBins, height: 1),
|
|
269
|
+
format: .RGBA32,
|
|
270
|
+
colorSpace: CGColorSpaceCreateDeviceRGB())
|
|
271
|
+
|
|
272
|
+
return reduceHistogram(bitmap)
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
private func fullAnalysis(pixelBuffer: CVPixelBuffer, options: AnalysisOptionsNative = AnalysisOptionsNative()) -> [String: Any] {
|
|
276
|
+
var ciImage = CIImage(cvPixelBuffer: pixelBuffer)
|
|
277
|
+
let width = CVPixelBufferGetWidth(pixelBuffer)
|
|
278
|
+
let height = CVPixelBufferGetHeight(pixelBuffer)
|
|
279
|
+
|
|
280
|
+
// Apply ROI if configured
|
|
281
|
+
var roiRect: CGRect?
|
|
282
|
+
if let roi = options.roi {
|
|
283
|
+
roiRect = calculateROI(config: roi, width: width, height: height)
|
|
284
|
+
ciImage = ciImage.cropped(to: roiRect!)
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
guard let histogram = CIFilter(name: "CIAreaHistogram",
|
|
288
|
+
parameters: ["inputImage": ciImage,
|
|
289
|
+
"inputCount": histogramBins,
|
|
290
|
+
"inputExtent": CIVector(cgRect: ciImage.extent),
|
|
291
|
+
"inputScale": 1.0])?.outputImage else {
|
|
292
|
+
return [
|
|
293
|
+
"uniqueColorCount": 0,
|
|
294
|
+
"topColors": [[String: Int]](),
|
|
295
|
+
"brightestColors": [[String: Int]]()
|
|
296
|
+
]
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
let bitmapSize = histogramBins * 4
|
|
300
|
+
var bitmap = [UInt32](repeating: 0, count: bitmapSize)
|
|
301
|
+
ciContext.render(histogram,
|
|
302
|
+
toBitmap: &bitmap,
|
|
303
|
+
rowBytes: bitmapSize * MemoryLayout<UInt32>.size,
|
|
304
|
+
bounds: CGRect(x: 0, y: 0, width: histogramBins, height: 1),
|
|
305
|
+
format: .RGBA32,
|
|
306
|
+
colorSpace: CGColorSpaceCreateDeviceRGB())
|
|
307
|
+
|
|
308
|
+
var result = reduceHistogram(bitmap)
|
|
309
|
+
|
|
310
|
+
// Add ROI applied flag
|
|
311
|
+
if options.roi != nil {
|
|
312
|
+
result["roiApplied"] = true
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
// Add motion detection if enabled
|
|
316
|
+
if options.enableMotionDetection {
|
|
317
|
+
let motionResult = calculateMotion(pixelBuffer: pixelBuffer, threshold: options.motionThreshold)
|
|
318
|
+
result["motion"] = motionResult
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
return result
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
private func reduceHistogram(_ data: [UInt32]) -> [String: Any] {
|
|
325
|
+
struct Stat { let r: Int; let g: Int; let b: Int; let count: Int; let brightness: Float }
|
|
326
|
+
var stats: [Stat] = []
|
|
327
|
+
for i in stride(from: 0, to: data.count, by: 4) {
|
|
328
|
+
let r = Int(data[i])
|
|
329
|
+
let g = Int(data[i + 1])
|
|
330
|
+
let b = Int(data[i + 2])
|
|
331
|
+
let count = Int(data[i + 3])
|
|
332
|
+
if count == 0 { continue }
|
|
333
|
+
let brightness = 0.299 * Float(r) + 0.587 * Float(g) + 0.114 * Float(b)
|
|
334
|
+
stats.append(Stat(r: r, g: g, b: b, count: count, brightness: brightness))
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
let top = stats.sorted { $0.count > $1.count }.prefix(3).map { ["r": $0.r, "g": $0.g, "b": $0.b] }
|
|
338
|
+
let bright = stats.sorted { $0.brightness > $1.brightness }.prefix(3).map { ["r": $0.r, "g": $0.g, "b": $0.b] }
|
|
339
|
+
|
|
340
|
+
return [
|
|
341
|
+
"uniqueColorCount": stats.count,
|
|
342
|
+
"topColors": Array(top),
|
|
343
|
+
"brightestColors": Array(bright)
|
|
344
|
+
]
|
|
345
|
+
}
|
|
346
|
+
}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import VisionCamera
|
|
2
|
+
import CoreMedia
|
|
3
|
+
|
|
4
|
+
@objc(PixelColorsFrameProcessor)
|
|
5
|
+
public final class PixelColorsFrameProcessor: FrameProcessorPlugin {
|
|
6
|
+
public override init(proxy: VisionCameraProxyHolder, options: [AnyHashable : Any]! = [:]) {
|
|
7
|
+
super.init(proxy: proxy, options: options)
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
public override func callback(_ frame: Frame, withArguments arguments: [AnyHashable : Any]?) -> Any {
|
|
11
|
+
guard let pixelBuffer = CMSampleBufferGetImageBuffer(frame.buffer) else {
|
|
12
|
+
return PixelAnalyzerEngine.shared.analyzeSync()
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
// Parse options from arguments
|
|
16
|
+
let options = parseOptions(from: arguments)
|
|
17
|
+
|
|
18
|
+
// Fire & forget GPU pipeline
|
|
19
|
+
PixelAnalyzerEngine.shared.analyzeAsync(pixelBuffer: pixelBuffer, options: options)
|
|
20
|
+
// Return latest cached result synchronously
|
|
21
|
+
return PixelAnalyzerEngine.shared.analyzeSync()
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
private func parseOptions(from arguments: [AnyHashable: Any]?) -> AnalysisOptionsNative {
|
|
25
|
+
var options = AnalysisOptionsNative()
|
|
26
|
+
|
|
27
|
+
guard let args = arguments,
|
|
28
|
+
let optionsDict = args["options"] as? [String: Any] else {
|
|
29
|
+
return options
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
if let enableMotionDetection = optionsDict["enableMotionDetection"] as? Bool {
|
|
33
|
+
options.enableMotionDetection = enableMotionDetection
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
if let motionThreshold = optionsDict["motionThreshold"] as? Double {
|
|
37
|
+
options.motionThreshold = Float(motionThreshold)
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
if let roiDict = optionsDict["roi"] as? [String: Any],
|
|
41
|
+
let x = roiDict["x"] as? Double,
|
|
42
|
+
let y = roiDict["y"] as? Double,
|
|
43
|
+
let width = roiDict["width"] as? Double,
|
|
44
|
+
let height = roiDict["height"] as? Double {
|
|
45
|
+
options.roi = (x: Float(x), y: Float(y), width: Float(width), height: Float(height))
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
return options
|
|
49
|
+
}
|
|
50
|
+
}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.CameraVisionPixelColors = void 0;
|
|
7
|
+
exports.analyzePixelColors = analyzePixelColors;
|
|
8
|
+
var _reactNativeNitroModules = require("react-native-nitro-modules");
|
|
9
|
+
var _reactNativeVisionCamera = require("react-native-vision-camera");
|
|
10
|
+
// Nitro HybridObject for async image analysis
|
|
11
|
+
const CameraVisionPixelColors = exports.CameraVisionPixelColors = _reactNativeNitroModules.NitroModules.createHybridObject('CameraVisionPixelColors');
|
|
12
|
+
|
|
13
|
+
// Frame Processor plugin for real-time analysis
|
|
14
|
+
const plugin = _reactNativeVisionCamera.VisionCameraProxy.initFrameProcessorPlugin('pixelColors', {});
|
|
15
|
+
function analyzePixelColors(frame, options) {
|
|
16
|
+
'worklet';
|
|
17
|
+
|
|
18
|
+
if (!plugin) {
|
|
19
|
+
throw new Error('pixelColors frame processor plugin is not available');
|
|
20
|
+
}
|
|
21
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
22
|
+
return plugin.call(frame, {
|
|
23
|
+
options
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
// Re-export types
|
|
28
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["_reactNativeNitroModules","require","_reactNativeVisionCamera","CameraVisionPixelColors","exports","NitroModules","createHybridObject","plugin","VisionCameraProxy","initFrameProcessorPlugin","analyzePixelColors","frame","options","Error","call"],"sourceRoot":"../../src","sources":["index.ts"],"mappings":";;;;;;;AAAA,IAAAA,wBAAA,GAAAC,OAAA;AACA,IAAAC,wBAAA,GAAAD,OAAA;AAWA;AACO,MAAME,uBAAuB,GAAAC,OAAA,CAAAD,uBAAA,GAClCE,qCAAY,CAACC,kBAAkB,CAC7B,yBACF,CAAC;;AAEH;AACA,MAAMC,MAAM,GAAGC,0CAAiB,CAACC,wBAAwB,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC;AAErE,SAASC,kBAAkBA,CAChCC,KAAY,EACZC,OAAyB,EACN;EACnB,SAAS;;EACT,IAAI,CAACL,MAAM,EAAE;IACX,MAAM,IAAIM,KAAK,CAAC,qDAAqD,CAAC;EACxE;EACA;EACA,OAAON,MAAM,CAACO,IAAI,CAACH,KAAK,EAAE;IAAEC;EAAQ,CAAQ,CAAC;AAC/C;;AAEA","ignoreList":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"type":"commonjs"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":[],"sourceRoot":"../../../src","sources":["specs/camera-vision-pixel-colors.nitro.ts"],"mappings":"","ignoreList":[]}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
import { NitroModules } from 'react-native-nitro-modules';
|
|
4
|
+
import { VisionCameraProxy } from 'react-native-vision-camera';
|
|
5
|
+
// Nitro HybridObject for async image analysis
|
|
6
|
+
export const CameraVisionPixelColors = NitroModules.createHybridObject('CameraVisionPixelColors');
|
|
7
|
+
|
|
8
|
+
// Frame Processor plugin for real-time analysis
|
|
9
|
+
const plugin = VisionCameraProxy.initFrameProcessorPlugin('pixelColors', {});
|
|
10
|
+
export function analyzePixelColors(frame, options) {
|
|
11
|
+
'worklet';
|
|
12
|
+
|
|
13
|
+
if (!plugin) {
|
|
14
|
+
throw new Error('pixelColors frame processor plugin is not available');
|
|
15
|
+
}
|
|
16
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
17
|
+
return plugin.call(frame, {
|
|
18
|
+
options
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
// Re-export types
|
|
23
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["NitroModules","VisionCameraProxy","CameraVisionPixelColors","createHybridObject","plugin","initFrameProcessorPlugin","analyzePixelColors","frame","options","Error","call"],"sourceRoot":"../../src","sources":["index.ts"],"mappings":";;AAAA,SAASA,YAAY,QAAQ,4BAA4B;AACzD,SAASC,iBAAiB,QAAoB,4BAA4B;AAW1E;AACA,OAAO,MAAMC,uBAAuB,GAClCF,YAAY,CAACG,kBAAkB,CAC7B,yBACF,CAAC;;AAEH;AACA,MAAMC,MAAM,GAAGH,iBAAiB,CAACI,wBAAwB,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC;AAE5E,OAAO,SAASC,kBAAkBA,CAChCC,KAAY,EACZC,OAAyB,EACN;EACnB,SAAS;;EACT,IAAI,CAACJ,MAAM,EAAE;IACX,MAAM,IAAIK,KAAK,CAAC,qDAAqD,CAAC;EACxE;EACA;EACA,OAAOL,MAAM,CAACM,IAAI,CAACH,KAAK,EAAE;IAAEC;EAAQ,CAAQ,CAAC;AAC/C;;AAEA","ignoreList":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":[],"sourceRoot":"../../../src","sources":["specs/camera-vision-pixel-colors.nitro.ts"],"mappings":"","ignoreList":[]}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import { type Frame } from 'react-native-vision-camera';
|
|
2
|
+
import type { CameraVisionPixelColors as CameraVisionPixelColorsSpec, PixelColorsResult, RGBColor, ImageData, ROIConfig, AnalysisOptions, MotionResult } from './specs/camera-vision-pixel-colors.nitro';
|
|
3
|
+
export declare const CameraVisionPixelColors: CameraVisionPixelColorsSpec;
|
|
4
|
+
export declare function analyzePixelColors(frame: Frame, options?: AnalysisOptions): PixelColorsResult;
|
|
5
|
+
export type { PixelColorsResult, RGBColor, ImageData, ROIConfig, AnalysisOptions, MotionResult, };
|
|
6
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAqB,KAAK,KAAK,EAAE,MAAM,4BAA4B,CAAA;AAC1E,OAAO,KAAK,EACV,uBAAuB,IAAI,2BAA2B,EACtD,iBAAiB,EACjB,QAAQ,EACR,SAAS,EACT,SAAS,EACT,eAAe,EACf,YAAY,EACb,MAAM,0CAA0C,CAAA;AAGjD,eAAO,MAAM,uBAAuB,6BAGjC,CAAA;AAKH,wBAAgB,kBAAkB,CAChC,KAAK,EAAE,KAAK,EACZ,OAAO,CAAC,EAAE,eAAe,GACxB,iBAAiB,CAOnB;AAGD,YAAY,EACV,iBAAiB,EACjB,QAAQ,EACR,SAAS,EACT,SAAS,EACT,eAAe,EACf,YAAY,GACb,CAAA"}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { type HybridObject } from 'react-native-nitro-modules';
|
|
2
|
+
export type RGBColor = {
|
|
3
|
+
r: number;
|
|
4
|
+
g: number;
|
|
5
|
+
b: number;
|
|
6
|
+
};
|
|
7
|
+
export type ROIConfig = {
|
|
8
|
+
x: number;
|
|
9
|
+
y: number;
|
|
10
|
+
width: number;
|
|
11
|
+
height: number;
|
|
12
|
+
};
|
|
13
|
+
export type AnalysisOptions = {
|
|
14
|
+
enableMotionDetection?: boolean;
|
|
15
|
+
motionThreshold?: number;
|
|
16
|
+
roi?: ROIConfig;
|
|
17
|
+
};
|
|
18
|
+
export type MotionResult = {
|
|
19
|
+
score: number;
|
|
20
|
+
hasMotion: boolean;
|
|
21
|
+
};
|
|
22
|
+
export type PixelColorsResult = {
|
|
23
|
+
uniqueColorCount: number;
|
|
24
|
+
topColors: RGBColor[];
|
|
25
|
+
brightestColors: RGBColor[];
|
|
26
|
+
motion?: MotionResult;
|
|
27
|
+
roiApplied?: boolean;
|
|
28
|
+
};
|
|
29
|
+
export type ImageData = {
|
|
30
|
+
width: number;
|
|
31
|
+
height: number;
|
|
32
|
+
data: ArrayBuffer;
|
|
33
|
+
};
|
|
34
|
+
export interface CameraVisionPixelColors extends HybridObject<{
|
|
35
|
+
ios: 'swift';
|
|
36
|
+
android: 'kotlin';
|
|
37
|
+
}> {
|
|
38
|
+
analyzeImageAsync(image: ImageData): Promise<PixelColorsResult>;
|
|
39
|
+
}
|
|
40
|
+
//# sourceMappingURL=camera-vision-pixel-colors.nitro.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"camera-vision-pixel-colors.nitro.d.ts","sourceRoot":"","sources":["../../../../src/specs/camera-vision-pixel-colors.nitro.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,YAAY,EAAE,MAAM,4BAA4B,CAAA;AAE9D,MAAM,MAAM,QAAQ,GAAG;IAAE,CAAC,EAAE,MAAM,CAAC;IAAC,CAAC,EAAE,MAAM,CAAC;IAAC,CAAC,EAAE,MAAM,CAAA;CAAE,CAAA;AAE1D,MAAM,MAAM,SAAS,GAAG;IACtB,CAAC,EAAE,MAAM,CAAA;IACT,CAAC,EAAE,MAAM,CAAA;IACT,KAAK,EAAE,MAAM,CAAA;IACb,MAAM,EAAE,MAAM,CAAA;CACf,CAAA;AAED,MAAM,MAAM,eAAe,GAAG;IAC5B,qBAAqB,CAAC,EAAE,OAAO,CAAA;IAC/B,eAAe,CAAC,EAAE,MAAM,CAAA;IACxB,GAAG,CAAC,EAAE,SAAS,CAAA;CAChB,CAAA;AAED,MAAM,MAAM,YAAY,GAAG;IACzB,KAAK,EAAE,MAAM,CAAA;IACb,SAAS,EAAE,OAAO,CAAA;CACnB,CAAA;AAED,MAAM,MAAM,iBAAiB,GAAG;IAC9B,gBAAgB,EAAE,MAAM,CAAA;IACxB,SAAS,EAAE,QAAQ,EAAE,CAAA;IACrB,eAAe,EAAE,QAAQ,EAAE,CAAA;IAC3B,MAAM,CAAC,EAAE,YAAY,CAAA;IACrB,UAAU,CAAC,EAAE,OAAO,CAAA;CACrB,CAAA;AAED,MAAM,MAAM,SAAS,GAAG;IACtB,KAAK,EAAE,MAAM,CAAA;IACb,MAAM,EAAE,MAAM,CAAA;IACd,IAAI,EAAE,WAAW,CAAA;CAClB,CAAA;AAED,MAAM,WAAW,uBACf,SAAQ,YAAY,CAAC;IAAE,GAAG,EAAE,OAAO,CAAC;IAAC,OAAO,EAAE,QAAQ,CAAA;CAAE,CAAC;IACzD,iBAAiB,CAAC,KAAK,EAAE,SAAS,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAA;CAChE"}
|
package/nitro.json
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
{
|
|
2
|
+
"$schema": "https://nitro.margelo.com/nitro.schema.json",
|
|
3
|
+
"cxxNamespace": [
|
|
4
|
+
"cameravisionpixelcolors"
|
|
5
|
+
],
|
|
6
|
+
"ios": {
|
|
7
|
+
"iosModuleName": "CameraVisionPixelColors"
|
|
8
|
+
},
|
|
9
|
+
"android": {
|
|
10
|
+
"androidNamespace": [
|
|
11
|
+
"cameravisionpixelcolors"
|
|
12
|
+
],
|
|
13
|
+
"androidCxxLibName": "CameraVisionPixelColors"
|
|
14
|
+
},
|
|
15
|
+
"autolinking": {
|
|
16
|
+
"CameraVisionPixelColors": {
|
|
17
|
+
"swift": "HybridCameraVisionPixelColors",
|
|
18
|
+
"kotlin": "HybridCameraVisionPixelColors"
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
"ignorePaths": [
|
|
22
|
+
"**/node_modules",
|
|
23
|
+
"**/docs"
|
|
24
|
+
]
|
|
25
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
** linguist-generated=true
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
#
|
|
2
|
+
# CameraVisionPixelColors+autolinking.cmake
|
|
3
|
+
# This file was generated by nitrogen. DO NOT MODIFY THIS FILE.
|
|
4
|
+
# https://github.com/mrousavy/nitro
|
|
5
|
+
# Copyright © Marc Rousavy @ Margelo
|
|
6
|
+
#
|
|
7
|
+
|
|
8
|
+
# This is a CMake file that adds all files generated by Nitrogen
|
|
9
|
+
# to the current CMake project.
|
|
10
|
+
#
|
|
11
|
+
# To use it, add this to your CMakeLists.txt:
|
|
12
|
+
# ```cmake
|
|
13
|
+
# include(${CMAKE_SOURCE_DIR}/../nitrogen/generated/android/CameraVisionPixelColors+autolinking.cmake)
|
|
14
|
+
# ```
|
|
15
|
+
|
|
16
|
+
# Define a flag to check if we are building properly
|
|
17
|
+
add_definitions(-DBUILDING_CAMERAVISIONPIXELCOLORS_WITH_GENERATED_CMAKE_PROJECT)
|
|
18
|
+
|
|
19
|
+
# Enable Raw Props parsing in react-native (for Nitro Views)
|
|
20
|
+
add_definitions(-DRN_SERIALIZABLE_STATE)
|
|
21
|
+
|
|
22
|
+
# Add all headers that were generated by Nitrogen
|
|
23
|
+
include_directories(
|
|
24
|
+
"../nitrogen/generated/shared/c++"
|
|
25
|
+
"../nitrogen/generated/android/c++"
|
|
26
|
+
"../nitrogen/generated/android/"
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
# Add all .cpp sources that were generated by Nitrogen
|
|
30
|
+
target_sources(
|
|
31
|
+
# CMake project name (Android C++ library name)
|
|
32
|
+
CameraVisionPixelColors PRIVATE
|
|
33
|
+
# Autolinking Setup
|
|
34
|
+
../nitrogen/generated/android/CameraVisionPixelColorsOnLoad.cpp
|
|
35
|
+
# Shared Nitrogen C++ sources
|
|
36
|
+
../nitrogen/generated/shared/c++/HybridCameraVisionPixelColorsSpec.cpp
|
|
37
|
+
# Android-specific Nitrogen C++ sources
|
|
38
|
+
../nitrogen/generated/android/c++/JHybridCameraVisionPixelColorsSpec.cpp
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
# From node_modules/react-native/ReactAndroid/cmake-utils/folly-flags.cmake
|
|
42
|
+
# Used in node_modules/react-native/ReactAndroid/cmake-utils/ReactNative-application.cmake
|
|
43
|
+
target_compile_definitions(
|
|
44
|
+
CameraVisionPixelColors PRIVATE
|
|
45
|
+
-DFOLLY_NO_CONFIG=1
|
|
46
|
+
-DFOLLY_HAVE_CLOCK_GETTIME=1
|
|
47
|
+
-DFOLLY_USE_LIBCPP=1
|
|
48
|
+
-DFOLLY_CFG_NO_COROUTINES=1
|
|
49
|
+
-DFOLLY_MOBILE=1
|
|
50
|
+
-DFOLLY_HAVE_RECVMMSG=1
|
|
51
|
+
-DFOLLY_HAVE_PTHREAD=1
|
|
52
|
+
# Once we target android-23 above, we can comment
|
|
53
|
+
# the following line. NDK uses GNU style stderror_r() after API 23.
|
|
54
|
+
-DFOLLY_HAVE_XSI_STRERROR_R=1
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
# Add all libraries required by the generated specs
|
|
58
|
+
find_package(fbjni REQUIRED) # <-- Used for communication between Java <-> C++
|
|
59
|
+
find_package(ReactAndroid REQUIRED) # <-- Used to set up React Native bindings (e.g. CallInvoker/TurboModule)
|
|
60
|
+
find_package(react-native-nitro-modules REQUIRED) # <-- Used to create all HybridObjects and use the Nitro core library
|
|
61
|
+
|
|
62
|
+
# Link all libraries together
|
|
63
|
+
target_link_libraries(
|
|
64
|
+
CameraVisionPixelColors
|
|
65
|
+
fbjni::fbjni # <-- Facebook C++ JNI helpers
|
|
66
|
+
ReactAndroid::jsi # <-- RN: JSI
|
|
67
|
+
react-native-nitro-modules::NitroModules # <-- NitroModules Core :)
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
# Link react-native (different prefab between RN 0.75 and RN 0.76)
|
|
71
|
+
if(ReactAndroid_VERSION_MINOR GREATER_EQUAL 76)
|
|
72
|
+
target_link_libraries(
|
|
73
|
+
CameraVisionPixelColors
|
|
74
|
+
ReactAndroid::reactnative # <-- RN: Native Modules umbrella prefab
|
|
75
|
+
)
|
|
76
|
+
else()
|
|
77
|
+
target_link_libraries(
|
|
78
|
+
CameraVisionPixelColors
|
|
79
|
+
ReactAndroid::react_nativemodule_core # <-- RN: TurboModules Core
|
|
80
|
+
)
|
|
81
|
+
endif()
|