react-native-neuroscan 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +20 -0
- package/Neuroscan.podspec +29 -0
- package/README.md +233 -0
- package/android/app/build/generated/source/codegen/RCTAppDependencyProvider.h +25 -0
- package/android/app/build/generated/source/codegen/RCTAppDependencyProvider.mm +35 -0
- package/android/app/build/generated/source/codegen/RCTModuleProviders.h +16 -0
- package/android/app/build/generated/source/codegen/RCTModuleProviders.mm +51 -0
- package/android/app/build/generated/source/codegen/RCTModulesConformingToProtocolsProvider.h +18 -0
- package/android/app/build/generated/source/codegen/RCTModulesConformingToProtocolsProvider.mm +54 -0
- package/android/app/build/generated/source/codegen/RCTThirdPartyComponentsProvider.h +16 -0
- package/android/app/build/generated/source/codegen/RCTThirdPartyComponentsProvider.mm +30 -0
- package/android/app/build/generated/source/codegen/ReactAppDependencyProvider.podspec +34 -0
- package/android/app/build/generated/source/codegen/java/com/facebook/fbreact/specs/NativeNeuroscanSpec.java +67 -0
- package/android/app/build/generated/source/codegen/java/com/facebook/react/viewmanagers/NeuroScanCameraViewManagerDelegate.java +72 -0
- package/android/app/build/generated/source/codegen/java/com/facebook/react/viewmanagers/NeuroScanCameraViewManagerInterface.java +29 -0
- package/android/app/build/generated/source/codegen/jni/CMakeLists.txt +36 -0
- package/android/app/build/generated/source/codegen/jni/RNNeuroScanSpec-generated.cpp +74 -0
- package/android/app/build/generated/source/codegen/jni/RNNeuroScanSpec.h +31 -0
- package/android/app/build/generated/source/codegen/jni/react/renderer/components/RNNeuroScanSpec/ComponentDescriptors.cpp +22 -0
- package/android/app/build/generated/source/codegen/jni/react/renderer/components/RNNeuroScanSpec/ComponentDescriptors.h +24 -0
- package/android/app/build/generated/source/codegen/jni/react/renderer/components/RNNeuroScanSpec/EventEmitters.cpp +62 -0
- package/android/app/build/generated/source/codegen/jni/react/renderer/components/RNNeuroScanSpec/EventEmitters.h +54 -0
- package/android/app/build/generated/source/codegen/jni/react/renderer/components/RNNeuroScanSpec/Props.cpp +32 -0
- package/android/app/build/generated/source/codegen/jni/react/renderer/components/RNNeuroScanSpec/Props.h +34 -0
- package/android/app/build/generated/source/codegen/jni/react/renderer/components/RNNeuroScanSpec/RNNeuroScanSpecJSI-generated.cpp +80 -0
- package/android/app/build/generated/source/codegen/jni/react/renderer/components/RNNeuroScanSpec/RNNeuroScanSpecJSI.h +134 -0
- package/android/app/build/generated/source/codegen/jni/react/renderer/components/RNNeuroScanSpec/ShadowNodes.cpp +17 -0
- package/android/app/build/generated/source/codegen/jni/react/renderer/components/RNNeuroScanSpec/ShadowNodes.h +32 -0
- package/android/app/build/generated/source/codegen/jni/react/renderer/components/RNNeuroScanSpec/States.cpp +16 -0
- package/android/app/build/generated/source/codegen/jni/react/renderer/components/RNNeuroScanSpec/States.h +29 -0
- package/android/build.gradle +73 -0
- package/android/src/main/AndroidManifest.xml +4 -0
- package/android/src/main/java/com/neuroscan/NeuroscanModule.kt +342 -0
- package/android/src/main/java/com/neuroscan/NeuroscanPackage.kt +36 -0
- package/ios/DocumentScannerController.swift +115 -0
- package/ios/NeuroScanImpl.swift +226 -0
- package/ios/Neuroscan.h +5 -0
- package/ios/Neuroscan.mm +118 -0
- package/lib/module/NativeNeuroscan.js +5 -0
- package/lib/module/NativeNeuroscan.js.map +1 -0
- package/lib/module/index.js +4 -0
- package/lib/module/index.js.map +1 -0
- package/lib/module/package.json +1 -0
- package/lib/typescript/package.json +1 -0
- package/lib/typescript/src/NativeNeuroscan.d.ts +47 -0
- package/lib/typescript/src/NativeNeuroscan.d.ts.map +1 -0
- package/lib/typescript/src/index.d.ts +3 -0
- package/lib/typescript/src/index.d.ts.map +1 -0
- package/package.json +126 -0
- package/src/NativeNeuroscan.ts +52 -0
- package/src/index.tsx +2 -0
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
import Foundation
|
|
2
|
+
import UIKit
|
|
3
|
+
import VisionKit
|
|
4
|
+
|
|
5
|
+
@objcMembers
|
|
6
|
+
public class DocumentScannerController: NSObject, VNDocumentCameraViewControllerDelegate {
|
|
7
|
+
|
|
8
|
+
private let maxPages: Int
|
|
9
|
+
private let tempDirectory: URL
|
|
10
|
+
private let completion: (Result<[URL], Error>) -> Void
|
|
11
|
+
private var viewController: VNDocumentCameraViewController?
|
|
12
|
+
|
|
13
|
+
init(maxPages: Int, tempDirectory: URL, completion: @escaping (Result<[URL], Error>) -> Void) {
|
|
14
|
+
self.maxPages = maxPages
|
|
15
|
+
self.tempDirectory = tempDirectory
|
|
16
|
+
self.completion = completion
|
|
17
|
+
super.init()
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
func present() {
|
|
21
|
+
let vc = VNDocumentCameraViewController()
|
|
22
|
+
vc.delegate = self
|
|
23
|
+
viewController = vc
|
|
24
|
+
|
|
25
|
+
guard let rootVC = Self.topViewController() else {
|
|
26
|
+
completion(.failure(NSError(
|
|
27
|
+
domain: "NeuroScan",
|
|
28
|
+
code: -2,
|
|
29
|
+
userInfo: [NSLocalizedDescriptionKey: "No root view controller found"]
|
|
30
|
+
)))
|
|
31
|
+
return
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
rootVC.present(vc, animated: true)
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
// MARK: - VNDocumentCameraViewControllerDelegate
|
|
38
|
+
|
|
39
|
+
public func documentCameraViewController(
|
|
40
|
+
_ controller: VNDocumentCameraViewController,
|
|
41
|
+
didFinishWith scan: VNDocumentCameraScan
|
|
42
|
+
) {
|
|
43
|
+
controller.dismiss(animated: true)
|
|
44
|
+
|
|
45
|
+
DispatchQueue.global(qos: .userInitiated).async { [weak self] in
|
|
46
|
+
guard let self = self else { return }
|
|
47
|
+
|
|
48
|
+
var urls: [URL] = []
|
|
49
|
+
let pageCount = min(scan.pageCount, self.maxPages)
|
|
50
|
+
|
|
51
|
+
for i in 0..<pageCount {
|
|
52
|
+
let image = scan.imageOfPage(at: i)
|
|
53
|
+
let url = self.tempDirectory.appendingPathComponent("\(UUID().uuidString).jpg")
|
|
54
|
+
|
|
55
|
+
if let data = image.jpegData(compressionQuality: 0.9) {
|
|
56
|
+
do {
|
|
57
|
+
// Ensure directory exists
|
|
58
|
+
if !FileManager.default.fileExists(atPath: self.tempDirectory.path) {
|
|
59
|
+
try FileManager.default.createDirectory(
|
|
60
|
+
at: self.tempDirectory,
|
|
61
|
+
withIntermediateDirectories: true
|
|
62
|
+
)
|
|
63
|
+
}
|
|
64
|
+
try data.write(to: url)
|
|
65
|
+
urls.append(url)
|
|
66
|
+
} catch {
|
|
67
|
+
// Skip this page but continue
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
self.completion(.success(urls))
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
public func documentCameraViewControllerDidCancel(_ controller: VNDocumentCameraViewController) {
|
|
77
|
+
controller.dismiss(animated: true)
|
|
78
|
+
completion(.failure(NSError(
|
|
79
|
+
domain: "NeuroScan",
|
|
80
|
+
code: -1,
|
|
81
|
+
userInfo: [NSLocalizedDescriptionKey: "Scanner cancelled"]
|
|
82
|
+
)))
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
public func documentCameraViewController(
|
|
86
|
+
_ controller: VNDocumentCameraViewController,
|
|
87
|
+
didFailWithError error: Error
|
|
88
|
+
) {
|
|
89
|
+
controller.dismiss(animated: true)
|
|
90
|
+
completion(.failure(error))
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// MARK: - Helpers
|
|
94
|
+
|
|
95
|
+
private static func topViewController(
|
|
96
|
+
base: UIViewController? = nil
|
|
97
|
+
) -> UIViewController? {
|
|
98
|
+
let base = base ?? UIApplication.shared.connectedScenes
|
|
99
|
+
.compactMap { $0 as? UIWindowScene }
|
|
100
|
+
.flatMap { $0.windows }
|
|
101
|
+
.first { $0.isKeyWindow }?
|
|
102
|
+
.rootViewController
|
|
103
|
+
|
|
104
|
+
if let nav = base as? UINavigationController {
|
|
105
|
+
return topViewController(base: nav.visibleViewController)
|
|
106
|
+
}
|
|
107
|
+
if let tab = base as? UITabBarController, let selected = tab.selectedViewController {
|
|
108
|
+
return topViewController(base: selected)
|
|
109
|
+
}
|
|
110
|
+
if let presented = base?.presentedViewController {
|
|
111
|
+
return topViewController(base: presented)
|
|
112
|
+
}
|
|
113
|
+
return base
|
|
114
|
+
}
|
|
115
|
+
}
|
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
import Foundation
|
|
2
|
+
import UIKit
|
|
3
|
+
import VisionKit
|
|
4
|
+
|
|
5
|
+
public typealias RNResolver = @convention(block) (Any?) -> Void
|
|
6
|
+
public typealias RNRejecter = @convention(block) (String?, String?, Error?) -> Void
|
|
7
|
+
|
|
8
|
+
@objcMembers
|
|
9
|
+
public class NeuroScanImpl: NSObject {
|
|
10
|
+
|
|
11
|
+
private let fileManager = FileManager.default
|
|
12
|
+
private var scannerController: DocumentScannerController?
|
|
13
|
+
|
|
14
|
+
// MARK: - Temp Directory
|
|
15
|
+
|
|
16
|
+
private var tempDirectory: URL {
|
|
17
|
+
let dir = fileManager.temporaryDirectory.appendingPathComponent("neuroscan", isDirectory: true)
|
|
18
|
+
if !fileManager.fileExists(atPath: dir.path) {
|
|
19
|
+
try? fileManager.createDirectory(at: dir, withIntermediateDirectories: true)
|
|
20
|
+
}
|
|
21
|
+
return dir
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
// MARK: - scanDocument
|
|
25
|
+
|
|
26
|
+
public func scanDocument(
|
|
27
|
+
maxPages: Int,
|
|
28
|
+
enableAutoCapture: Bool,
|
|
29
|
+
resolver: @escaping RNResolver,
|
|
30
|
+
rejecter: @escaping RNRejecter
|
|
31
|
+
) {
|
|
32
|
+
DispatchQueue.main.async { [weak self] in
|
|
33
|
+
guard let self = self else { return }
|
|
34
|
+
|
|
35
|
+
guard VNDocumentCameraViewController.isSupported else {
|
|
36
|
+
rejecter("CAMERA_UNAVAILABLE", "Document camera is not available on this device", nil)
|
|
37
|
+
return
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
self.scannerController = DocumentScannerController(
|
|
41
|
+
maxPages: maxPages > 0 ? maxPages : Int.max,
|
|
42
|
+
tempDirectory: self.tempDirectory
|
|
43
|
+
) { result in
|
|
44
|
+
switch result {
|
|
45
|
+
case .success(let urls):
|
|
46
|
+
let urlStrings = urls.map { $0.absoluteString }
|
|
47
|
+
resolver([
|
|
48
|
+
"imageUrls": urlStrings,
|
|
49
|
+
"pageCount": urlStrings.count,
|
|
50
|
+
] as [String: Any])
|
|
51
|
+
case .failure(let error):
|
|
52
|
+
if (error as NSError).code == -1 {
|
|
53
|
+
rejecter("SCANNER_CANCELLED", "Document scanner was cancelled", error)
|
|
54
|
+
} else {
|
|
55
|
+
rejecter("SCANNER_FAILED", error.localizedDescription, error)
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
self.scannerController = nil
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
self.scannerController?.present()
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// MARK: - processImage
|
|
66
|
+
|
|
67
|
+
public func processImage(
|
|
68
|
+
imageUrl: String,
|
|
69
|
+
grayscale: Bool,
|
|
70
|
+
contrast: Double,
|
|
71
|
+
brightness: Double,
|
|
72
|
+
sharpness: Double,
|
|
73
|
+
rotation: Double,
|
|
74
|
+
cropX: Double,
|
|
75
|
+
cropY: Double,
|
|
76
|
+
cropWidth: Double,
|
|
77
|
+
cropHeight: Double,
|
|
78
|
+
threshold: Double,
|
|
79
|
+
outputFormat: String,
|
|
80
|
+
quality: Double,
|
|
81
|
+
resolver: @escaping RNResolver,
|
|
82
|
+
rejecter: @escaping RNRejecter
|
|
83
|
+
) {
|
|
84
|
+
DispatchQueue.global(qos: .userInitiated).async { [weak self] in
|
|
85
|
+
guard let self = self else { return }
|
|
86
|
+
|
|
87
|
+
// 1. Load the image
|
|
88
|
+
guard let url = URL(string: imageUrl),
|
|
89
|
+
let imageData = try? Data(contentsOf: url),
|
|
90
|
+
let uiImage = UIImage(data: imageData),
|
|
91
|
+
var ciImage = CIImage(image: uiImage) else {
|
|
92
|
+
rejecter("PROCESS_FAILED", "Failed to load image from \(imageUrl)", nil)
|
|
93
|
+
return
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
let context = CIContext(options: [.useSoftwareRenderer: false])
|
|
97
|
+
|
|
98
|
+
// --- Filter order: crop -> rotate -> brightness/contrast -> sharpen -> grayscale/threshold ---
|
|
99
|
+
|
|
100
|
+
// 2. CROP (all four values must be >= 0)
|
|
101
|
+
if cropX >= 0, cropY >= 0, cropWidth > 0, cropHeight > 0 {
|
|
102
|
+
let extent = ciImage.extent
|
|
103
|
+
let x = cropX * Double(extent.width)
|
|
104
|
+
// CIImage origin is bottom-left, so invert Y
|
|
105
|
+
let y = (1.0 - cropY - cropHeight) * Double(extent.height)
|
|
106
|
+
let w = cropWidth * Double(extent.width)
|
|
107
|
+
let h = cropHeight * Double(extent.height)
|
|
108
|
+
ciImage = ciImage.cropped(to: CGRect(x: x, y: y, width: w, height: h))
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// 3. ROTATION (0, 90, 180, 270)
|
|
112
|
+
let rotationInt = Int(rotation) % 360
|
|
113
|
+
if rotationInt != 0 {
|
|
114
|
+
let radians = Double(rotationInt) * .pi / 180.0
|
|
115
|
+
ciImage = ciImage.transformed(by: CGAffineTransform(rotationAngle: CGFloat(radians)))
|
|
116
|
+
// Translate to ensure origin stays at (0,0)
|
|
117
|
+
let ext = ciImage.extent
|
|
118
|
+
ciImage = ciImage.transformed(by: CGAffineTransform(
|
|
119
|
+
translationX: -ext.origin.x,
|
|
120
|
+
y: -ext.origin.y
|
|
121
|
+
))
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// 4. BRIGHTNESS & CONTRAST (CIColorControls)
|
|
125
|
+
if brightness != 0 || contrast != 0 {
|
|
126
|
+
if let filter = CIFilter(name: "CIColorControls") {
|
|
127
|
+
filter.setValue(ciImage, forKey: kCIInputImageKey)
|
|
128
|
+
// inputBrightness: -1 to 1
|
|
129
|
+
filter.setValue(brightness / 100.0, forKey: kCIInputBrightnessKey)
|
|
130
|
+
// inputContrast: 0 to 4 (1.0 = no change). Map -100..100 to 0..2
|
|
131
|
+
filter.setValue(1.0 + (contrast / 100.0), forKey: kCIInputContrastKey)
|
|
132
|
+
if let output = filter.outputImage {
|
|
133
|
+
ciImage = output
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
// 5. SHARPNESS (CISharpenLuminance)
|
|
139
|
+
if sharpness > 0 {
|
|
140
|
+
if let filter = CIFilter(name: "CISharpenLuminance") {
|
|
141
|
+
filter.setValue(ciImage, forKey: kCIInputImageKey)
|
|
142
|
+
// inputSharpness 0..2. Map 0-100 to 0-2
|
|
143
|
+
filter.setValue(sharpness / 50.0, forKey: kCIInputSharpnessKey)
|
|
144
|
+
if let output = filter.outputImage {
|
|
145
|
+
ciImage = output
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// 6. GRAYSCALE or THRESHOLD
|
|
151
|
+
if threshold > 0 {
|
|
152
|
+
// Grayscale first, then threshold for B&W document mode
|
|
153
|
+
if let monoFilter = CIFilter(name: "CIPhotoEffectMono") {
|
|
154
|
+
monoFilter.setValue(ciImage, forKey: kCIInputImageKey)
|
|
155
|
+
if let monoOutput = monoFilter.outputImage {
|
|
156
|
+
ciImage = monoOutput
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
if let thresholdFilter = CIFilter(name: "CIColorThreshold") {
|
|
160
|
+
thresholdFilter.setValue(ciImage, forKey: kCIInputImageKey)
|
|
161
|
+
// Map 0-255 to 0.0-1.0
|
|
162
|
+
thresholdFilter.setValue(threshold / 255.0, forKey: "inputThreshold")
|
|
163
|
+
if let output = thresholdFilter.outputImage {
|
|
164
|
+
ciImage = output
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
} else if grayscale {
|
|
168
|
+
if let filter = CIFilter(name: "CIPhotoEffectMono") {
|
|
169
|
+
filter.setValue(ciImage, forKey: kCIInputImageKey)
|
|
170
|
+
if let output = filter.outputImage {
|
|
171
|
+
ciImage = output
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// 7. Render and save
|
|
177
|
+
guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else {
|
|
178
|
+
rejecter("PROCESS_FAILED", "Failed to render processed image", nil)
|
|
179
|
+
return
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
let resultImage = UIImage(cgImage: cgImage)
|
|
183
|
+
let ext = outputFormat == "png" ? "png" : "jpg"
|
|
184
|
+
let outputUrl = self.tempDirectory.appendingPathComponent("\(UUID().uuidString).\(ext)")
|
|
185
|
+
|
|
186
|
+
let data: Data?
|
|
187
|
+
if outputFormat == "png" {
|
|
188
|
+
data = resultImage.pngData()
|
|
189
|
+
} else {
|
|
190
|
+
data = resultImage.jpegData(compressionQuality: CGFloat(quality / 100.0))
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
guard let fileData = data else {
|
|
194
|
+
rejecter("PROCESS_FAILED", "Failed to encode processed image", nil)
|
|
195
|
+
return
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
do {
|
|
199
|
+
try fileData.write(to: outputUrl)
|
|
200
|
+
resolver(["imageUrl": outputUrl.absoluteString] as [String: Any])
|
|
201
|
+
} catch {
|
|
202
|
+
rejecter("PROCESS_FAILED", "Failed to save processed image: \(error.localizedDescription)", error)
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
// MARK: - cleanupTempFiles
|
|
208
|
+
|
|
209
|
+
public func cleanupTempFiles(
|
|
210
|
+
resolver: @escaping RNResolver,
|
|
211
|
+
rejecter: @escaping RNRejecter
|
|
212
|
+
) {
|
|
213
|
+
DispatchQueue.global(qos: .utility).async { [weak self] in
|
|
214
|
+
guard let self = self else { return }
|
|
215
|
+
|
|
216
|
+
do {
|
|
217
|
+
if self.fileManager.fileExists(atPath: self.tempDirectory.path) {
|
|
218
|
+
try self.fileManager.removeItem(at: self.tempDirectory)
|
|
219
|
+
}
|
|
220
|
+
resolver(NSNumber(value: true))
|
|
221
|
+
} catch {
|
|
222
|
+
rejecter("CLEANUP_FAILED", "Failed to cleanup temp files: \(error.localizedDescription)", error)
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
}
|
package/ios/Neuroscan.h
ADDED
package/ios/Neuroscan.mm
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
#import "Neuroscan.h"
|
|
2
|
+
|
|
3
|
+
#ifdef __cplusplus
|
|
4
|
+
#import <react/renderer/core/EventEmitter.h>
|
|
5
|
+
#endif
|
|
6
|
+
|
|
7
|
+
#import <VisionKit/VisionKit.h>
|
|
8
|
+
|
|
9
|
+
// Import Swift-generated header
|
|
10
|
+
#if __has_include("react_native_neuroscan/react_native_neuroscan-Swift.h")
|
|
11
|
+
#import "react_native_neuroscan/react_native_neuroscan-Swift.h"
|
|
12
|
+
#elif __has_include("react-native-neuroscan/react-native-neuroscan-Swift.h")
|
|
13
|
+
#import "react-native-neuroscan/react-native-neuroscan-Swift.h"
|
|
14
|
+
#else
|
|
15
|
+
#import "Neuroscan-Swift.h"
|
|
16
|
+
#endif
|
|
17
|
+
|
|
18
|
+
@implementation Neuroscan {
|
|
19
|
+
NeuroScanImpl *_impl;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
- (instancetype)init {
|
|
23
|
+
self = [super init];
|
|
24
|
+
if (self) {
|
|
25
|
+
_impl = [[NeuroScanImpl alloc] init];
|
|
26
|
+
}
|
|
27
|
+
return self;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
+ (NSString *)moduleName {
|
|
31
|
+
return @"Neuroscan";
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// MARK: - scanDocument
|
|
35
|
+
|
|
36
|
+
- (void)scanDocument:(JS::NativeNeuroscan::SpecScanDocumentOptions &)options
|
|
37
|
+
resolve:(RCTPromiseResolveBlock)resolve
|
|
38
|
+
reject:(RCTPromiseRejectBlock)reject {
|
|
39
|
+
NSInteger maxPages = options.maxPages().has_value() ? options.maxPages().value() : 0;
|
|
40
|
+
BOOL enableAutoCapture = options.enableAutoCapture().has_value() ? options.enableAutoCapture().value() : YES;
|
|
41
|
+
|
|
42
|
+
[_impl scanDocumentWithMaxPages:maxPages
|
|
43
|
+
enableAutoCapture:enableAutoCapture
|
|
44
|
+
resolver:^(NSDictionary *result) {
|
|
45
|
+
resolve(result);
|
|
46
|
+
} rejecter:^(NSString *code, NSString *message, NSError *error) {
|
|
47
|
+
reject(code, message, error);
|
|
48
|
+
}];
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// MARK: - processImage
|
|
52
|
+
|
|
53
|
+
- (void)processImage:(JS::NativeNeuroscan::SpecProcessImageOptions &)options
|
|
54
|
+
resolve:(RCTPromiseResolveBlock)resolve
|
|
55
|
+
reject:(RCTPromiseRejectBlock)reject {
|
|
56
|
+
NSString *imageUrl = options.imageUrl();
|
|
57
|
+
BOOL grayscale = options.grayscale().has_value() ? options.grayscale().value() : NO;
|
|
58
|
+
double contrast = options.contrast().has_value() ? options.contrast().value() : 0;
|
|
59
|
+
double brightness = options.brightness().has_value() ? options.brightness().value() : 0;
|
|
60
|
+
double sharpness = options.sharpness().has_value() ? options.sharpness().value() : 0;
|
|
61
|
+
double rotation = options.rotation().has_value() ? options.rotation().value() : 0;
|
|
62
|
+
double cropX = options.cropX().has_value() ? options.cropX().value() : -1;
|
|
63
|
+
double cropY = options.cropY().has_value() ? options.cropY().value() : -1;
|
|
64
|
+
double cropWidth = options.cropWidth().has_value() ? options.cropWidth().value() : -1;
|
|
65
|
+
double cropHeight = options.cropHeight().has_value() ? options.cropHeight().value() : -1;
|
|
66
|
+
double threshold = options.threshold().has_value() ? options.threshold().value() : 0;
|
|
67
|
+
NSString *outputFormat = options.outputFormat() ?: @"jpeg";
|
|
68
|
+
double quality = options.quality().has_value() ? options.quality().value() : 90;
|
|
69
|
+
|
|
70
|
+
[_impl processImageWithImageUrl:imageUrl
|
|
71
|
+
grayscale:grayscale
|
|
72
|
+
contrast:contrast
|
|
73
|
+
brightness:brightness
|
|
74
|
+
sharpness:sharpness
|
|
75
|
+
rotation:rotation
|
|
76
|
+
cropX:cropX
|
|
77
|
+
cropY:cropY
|
|
78
|
+
cropWidth:cropWidth
|
|
79
|
+
cropHeight:cropHeight
|
|
80
|
+
threshold:threshold
|
|
81
|
+
outputFormat:outputFormat
|
|
82
|
+
quality:quality
|
|
83
|
+
resolver:^(NSDictionary *result) {
|
|
84
|
+
resolve(result);
|
|
85
|
+
} rejecter:^(NSString *code, NSString *message, NSError *error) {
|
|
86
|
+
reject(code, message, error);
|
|
87
|
+
}];
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// MARK: - cleanupTempFiles
|
|
91
|
+
|
|
92
|
+
- (void)cleanupTempFiles:(RCTPromiseResolveBlock)resolve
|
|
93
|
+
reject:(RCTPromiseRejectBlock)reject {
|
|
94
|
+
[_impl cleanupTempFilesWithResolver:^(NSNumber *result) {
|
|
95
|
+
resolve(result);
|
|
96
|
+
} rejecter:^(NSString *code, NSString *message, NSError *error) {
|
|
97
|
+
reject(code, message, error);
|
|
98
|
+
}];
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// MARK: - Event Emitter
|
|
102
|
+
|
|
103
|
+
- (void)addListener:(NSString *)eventType {
|
|
104
|
+
// Required for RN event emitter
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
- (void)removeListeners:(double)count {
|
|
108
|
+
// Required for RN event emitter
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// MARK: - TurboModule
|
|
112
|
+
|
|
113
|
+
- (std::shared_ptr<facebook::react::TurboModule>)getTurboModule:
|
|
114
|
+
(const facebook::react::ObjCTurboModule::InitParams &)params {
|
|
115
|
+
return std::make_shared<facebook::react::NativeNeuroscanSpecJSI>(params);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
@end
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["TurboModuleRegistry","getEnforcing"],"sourceRoot":"../../src","sources":["NativeNeuroscan.ts"],"mappings":";;AACA,SAASA,mBAAmB,QAAQ,cAAc;AAkDlD,eAAeA,mBAAmB,CAACC,YAAY,CAAO,WAAW,CAAC","ignoreList":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["default","NeuroScan"],"sourceRoot":"../../src","sources":["index.tsx"],"mappings":";;AAAA,SAASA,OAAO,IAAIC,SAAS,QAAQ,sBAAmB","ignoreList":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"type":"module"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"type":"module"}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import type { TurboModule } from 'react-native';
|
|
2
|
+
export interface Spec extends TurboModule {
|
|
3
|
+
/**
|
|
4
|
+
* Launch native document scanner UI
|
|
5
|
+
* iOS: VNDocumentCameraViewController
|
|
6
|
+
* Android: ML Kit Document Scanner
|
|
7
|
+
*/
|
|
8
|
+
scanDocument(options: {
|
|
9
|
+
maxPages?: number;
|
|
10
|
+
enableAutoCapture?: boolean;
|
|
11
|
+
}): Promise<{
|
|
12
|
+
imageUrls: string[];
|
|
13
|
+
pageCount: number;
|
|
14
|
+
}>;
|
|
15
|
+
/**
|
|
16
|
+
* Apply post-processing filters to a scanned image.
|
|
17
|
+
* Returns a new file:// URI for the processed image.
|
|
18
|
+
*
|
|
19
|
+
* Filter application order: crop -> rotate -> brightness/contrast -> sharpen -> grayscale/threshold
|
|
20
|
+
*/
|
|
21
|
+
processImage(options: {
|
|
22
|
+
imageUrl: string;
|
|
23
|
+
grayscale?: boolean;
|
|
24
|
+
contrast?: number;
|
|
25
|
+
brightness?: number;
|
|
26
|
+
sharpness?: number;
|
|
27
|
+
rotation?: number;
|
|
28
|
+
cropX?: number;
|
|
29
|
+
cropY?: number;
|
|
30
|
+
cropWidth?: number;
|
|
31
|
+
cropHeight?: number;
|
|
32
|
+
threshold?: number;
|
|
33
|
+
outputFormat?: string;
|
|
34
|
+
quality?: number;
|
|
35
|
+
}): Promise<{
|
|
36
|
+
imageUrl: string;
|
|
37
|
+
}>;
|
|
38
|
+
/**
|
|
39
|
+
* Cleanup temporary files created by the scanner
|
|
40
|
+
*/
|
|
41
|
+
cleanupTempFiles(): Promise<boolean>;
|
|
42
|
+
addListener(eventType: string): void;
|
|
43
|
+
removeListeners(count: number): void;
|
|
44
|
+
}
|
|
45
|
+
declare const _default: Spec;
|
|
46
|
+
export default _default;
|
|
47
|
+
//# sourceMappingURL=NativeNeuroscan.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"NativeNeuroscan.d.ts","sourceRoot":"","sources":["../../../src/NativeNeuroscan.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,cAAc,CAAC;AAGhD,MAAM,WAAW,IAAK,SAAQ,WAAW;IACvC;;;;OAIG;IACH,YAAY,CAAC,OAAO,EAAE;QACpB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,iBAAiB,CAAC,EAAE,OAAO,CAAC;KAC7B,GAAG,OAAO,CAAC;QACV,SAAS,EAAE,MAAM,EAAE,CAAC;QACpB,SAAS,EAAE,MAAM,CAAC;KACnB,CAAC,CAAC;IAEH;;;;;OAKG;IACH,YAAY,CAAC,OAAO,EAAE;QACpB,QAAQ,EAAE,MAAM,CAAC;QACjB,SAAS,CAAC,EAAE,OAAO,CAAC;QACpB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,OAAO,CAAC,EAAE,MAAM,CAAC;KAClB,GAAG,OAAO,CAAC;QACV,QAAQ,EAAE,MAAM,CAAC;KAClB,CAAC,CAAC;IAEH;;OAEG;IACH,gBAAgB,IAAI,OAAO,CAAC,OAAO,CAAC,CAAC;IAGrC,WAAW,CAAC,SAAS,EAAE,MAAM,GAAG,IAAI,CAAC;IACrC,eAAe,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;CACtC;;AAED,wBAAmE"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/index.tsx"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,IAAI,SAAS,EAAE,MAAM,mBAAmB,CAAC;AACzD,YAAY,EAAE,IAAI,IAAI,aAAa,EAAE,MAAM,mBAAmB,CAAC"}
|
package/package.json
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "react-native-neuroscan",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Document scanner with edge detection, perspective correction, and advanced color correction via OpenCV",
|
|
5
|
+
"main": "./lib/module/index.js",
|
|
6
|
+
"types": "./lib/typescript/src/index.d.ts",
|
|
7
|
+
"exports": {
|
|
8
|
+
".": {
|
|
9
|
+
"source": "./src/index.tsx",
|
|
10
|
+
"types": "./lib/typescript/src/index.d.ts",
|
|
11
|
+
"default": "./lib/module/index.js"
|
|
12
|
+
},
|
|
13
|
+
"./package.json": "./package.json"
|
|
14
|
+
},
|
|
15
|
+
"files": [
|
|
16
|
+
"src",
|
|
17
|
+
"lib",
|
|
18
|
+
"android",
|
|
19
|
+
"ios",
|
|
20
|
+
"cpp",
|
|
21
|
+
"*.podspec",
|
|
22
|
+
"react-native.config.js",
|
|
23
|
+
"!ios/build",
|
|
24
|
+
"!android/build",
|
|
25
|
+
"!android/gradle",
|
|
26
|
+
"!android/gradlew",
|
|
27
|
+
"!android/gradlew.bat",
|
|
28
|
+
"!android/local.properties",
|
|
29
|
+
"!**/__tests__",
|
|
30
|
+
"!**/__fixtures__",
|
|
31
|
+
"!**/__mocks__",
|
|
32
|
+
"!**/.*"
|
|
33
|
+
],
|
|
34
|
+
"scripts": {
|
|
35
|
+
"example": "yarn workspace react-native-neuroscan-example",
|
|
36
|
+
"clean": "del-cli android/build example/android/build example/android/app/build example/ios/build lib",
|
|
37
|
+
"prepare": "bob build",
|
|
38
|
+
"typecheck": "tsc",
|
|
39
|
+
"lint": "eslint \"**/*.{js,ts,tsx}\""
|
|
40
|
+
},
|
|
41
|
+
"keywords": [
|
|
42
|
+
"react-native",
|
|
43
|
+
"ios",
|
|
44
|
+
"android"
|
|
45
|
+
],
|
|
46
|
+
"repository": {
|
|
47
|
+
"type": "git",
|
|
48
|
+
"url": "git+https://github.com/dmytro/react-native-neuroscan.git"
|
|
49
|
+
},
|
|
50
|
+
"author": "Dmytro <dmytro@neuroscan.dev> ()",
|
|
51
|
+
"license": "MIT",
|
|
52
|
+
"bugs": {
|
|
53
|
+
"url": "https://github.com/dmytro/react-native-neuroscan/issues"
|
|
54
|
+
},
|
|
55
|
+
"homepage": "https://github.com/dmytro/react-native-neuroscan#readme",
|
|
56
|
+
"publishConfig": {
|
|
57
|
+
"registry": "https://registry.npmjs.org/"
|
|
58
|
+
},
|
|
59
|
+
"devDependencies": {
|
|
60
|
+
"@eslint/compat": "^1.3.2",
|
|
61
|
+
"@eslint/eslintrc": "^3.3.1",
|
|
62
|
+
"@eslint/js": "^9.35.0",
|
|
63
|
+
"@react-native/babel-preset": "0.79.0",
|
|
64
|
+
"@react-native/eslint-config": "0.83.0",
|
|
65
|
+
"@types/react": "^19.0.0",
|
|
66
|
+
"del-cli": "^6.0.0",
|
|
67
|
+
"eslint": "^9.35.0",
|
|
68
|
+
"eslint-config-prettier": "^10.1.8",
|
|
69
|
+
"eslint-plugin-prettier": "^5.5.4",
|
|
70
|
+
"prettier": "^2.8.8",
|
|
71
|
+
"react": "19.0.0",
|
|
72
|
+
"react-native": "0.79.0",
|
|
73
|
+
"react-native-builder-bob": "^0.40.18",
|
|
74
|
+
"turbo": "^2.5.6",
|
|
75
|
+
"typescript": "^5.9.2"
|
|
76
|
+
},
|
|
77
|
+
"peerDependencies": {
|
|
78
|
+
"react": "*",
|
|
79
|
+
"react-native": "*"
|
|
80
|
+
},
|
|
81
|
+
"workspaces": [
|
|
82
|
+
"example"
|
|
83
|
+
],
|
|
84
|
+
"packageManager": "yarn@4.11.0",
|
|
85
|
+
"react-native-builder-bob": {
|
|
86
|
+
"source": "src",
|
|
87
|
+
"output": "lib",
|
|
88
|
+
"targets": [
|
|
89
|
+
[
|
|
90
|
+
"module",
|
|
91
|
+
{
|
|
92
|
+
"esm": true
|
|
93
|
+
}
|
|
94
|
+
],
|
|
95
|
+
[
|
|
96
|
+
"typescript",
|
|
97
|
+
{
|
|
98
|
+
"project": "tsconfig.build.json"
|
|
99
|
+
}
|
|
100
|
+
]
|
|
101
|
+
]
|
|
102
|
+
},
|
|
103
|
+
"codegenConfig": {
|
|
104
|
+
"name": "RNNeuroScanSpec",
|
|
105
|
+
"type": "all",
|
|
106
|
+
"jsSrcsDir": "src",
|
|
107
|
+
"android": {
|
|
108
|
+
"javaPackageName": "com.neuroscan"
|
|
109
|
+
}
|
|
110
|
+
},
|
|
111
|
+
"prettier": {
|
|
112
|
+
"quoteProps": "consistent",
|
|
113
|
+
"singleQuote": true,
|
|
114
|
+
"tabWidth": 2,
|
|
115
|
+
"trailingComma": "es5",
|
|
116
|
+
"useTabs": false
|
|
117
|
+
},
|
|
118
|
+
"create-react-native-library": {
|
|
119
|
+
"type": "turbo-module",
|
|
120
|
+
"languages": "kotlin-objc",
|
|
121
|
+
"tools": [
|
|
122
|
+
"eslint"
|
|
123
|
+
],
|
|
124
|
+
"version": "0.57.2"
|
|
125
|
+
}
|
|
126
|
+
}
|