@micrantha/react-native-amaryllis 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/Amaryllis.podspec +22 -0
  2. package/LICENSE +21 -0
  3. package/README.md +213 -0
  4. package/android/build.gradle +81 -0
  5. package/android/gradle.properties +5 -0
  6. package/android/src/main/AndroidManifest.xml +2 -0
  7. package/android/src/main/java/com/micrantha/amaryllis/Amaryllis.kt +198 -0
  8. package/android/src/main/java/com/micrantha/amaryllis/AmaryllisModule.kt +165 -0
  9. package/android/src/main/java/com/micrantha/amaryllis/AmaryllisPackage.kt +32 -0
  10. package/ios/Amaryllis.h +49 -0
  11. package/ios/Amaryllis.m +201 -0
  12. package/ios/AmaryllisModule.h +6 -0
  13. package/ios/AmaryllisModule.mm +166 -0
  14. package/lib/module/Amaryllis.js +56 -0
  15. package/lib/module/Amaryllis.js.map +1 -0
  16. package/lib/module/AmaryllisContext.js +56 -0
  17. package/lib/module/AmaryllisContext.js.map +1 -0
  18. package/lib/module/AmaryllisHooks.js +78 -0
  19. package/lib/module/AmaryllisHooks.js.map +1 -0
  20. package/lib/module/AmaryllisRx.js +31 -0
  21. package/lib/module/AmaryllisRx.js.map +1 -0
  22. package/lib/module/NativeAmaryllis.js +5 -0
  23. package/lib/module/NativeAmaryllis.js.map +1 -0
  24. package/lib/module/NativePipe.js +9 -0
  25. package/lib/module/NativePipe.js.map +1 -0
  26. package/lib/module/Types.js +4 -0
  27. package/lib/module/Types.js.map +1 -0
  28. package/lib/module/index.js +7 -0
  29. package/lib/module/index.js.map +1 -0
  30. package/lib/module/package.json +1 -0
  31. package/lib/typescript/package.json +1 -0
  32. package/lib/typescript/src/Amaryllis.d.ts +16 -0
  33. package/lib/typescript/src/Amaryllis.d.ts.map +1 -0
  34. package/lib/typescript/src/AmaryllisContext.d.ts +8 -0
  35. package/lib/typescript/src/AmaryllisContext.d.ts.map +1 -0
  36. package/lib/typescript/src/AmaryllisHooks.d.ts +4 -0
  37. package/lib/typescript/src/AmaryllisHooks.d.ts.map +1 -0
  38. package/lib/typescript/src/AmaryllisRx.d.ts +3 -0
  39. package/lib/typescript/src/AmaryllisRx.d.ts.map +1 -0
  40. package/lib/typescript/src/NativeAmaryllis.d.ts +12 -0
  41. package/lib/typescript/src/NativeAmaryllis.d.ts.map +1 -0
  42. package/lib/typescript/src/NativePipe.d.ts +3 -0
  43. package/lib/typescript/src/NativePipe.d.ts.map +1 -0
  44. package/lib/typescript/src/Types.d.ts +87 -0
  45. package/lib/typescript/src/Types.d.ts.map +1 -0
  46. package/lib/typescript/src/index.d.ts +6 -0
  47. package/lib/typescript/src/index.d.ts.map +1 -0
  48. package/package.json +185 -0
  49. package/src/Amaryllis.ts +91 -0
  50. package/src/AmaryllisContext.tsx +53 -0
  51. package/src/AmaryllisHooks.tsx +78 -0
  52. package/src/AmaryllisRx.ts +24 -0
  53. package/src/NativeAmaryllis.ts +18 -0
  54. package/src/NativePipe.ts +8 -0
  55. package/src/Types.ts +115 -0
  56. package/src/index.tsx +5 -0
@@ -0,0 +1,22 @@
1
+ require "json"
2
+
3
+ package = JSON.parse(File.read(File.join(__dir__, "package.json")))
4
+
5
+ Pod::Spec.new do |s|
6
+ s.name = "Amaryllis"
7
+ s.version = package["version"]
8
+ s.summary = package["description"]
9
+ s.homepage = package["homepage"]
10
+ s.license = package["license"]
11
+ s.authors = package["author"]
12
+
13
+ s.platforms = { :ios => min_ios_version_supported }
14
+ s.source = { :git => "https://github.com/hackelia-micrantha/react-native-amaryllis.git", :tag => "#{s.version}" }
15
+
16
+ s.source_files = "ios/**/*.{h,m,mm,cpp}"
17
+ s.private_header_files = "ios/**/*.h"
18
+
19
+ s.dependency "MediaPipeTasksGenAI"
20
+
21
+ install_modules_dependencies(s)
22
+ end
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Micrantha
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,213 @@
1
+ # react-native-amaryllis
2
+
3
+ ![amaryllis](docs/amaryllis-128.png)
4
+
5
+ [![npm version](https://img.shields.io/npm/v/react-native-amaryllis.svg)](https://www.npmjs.com/package/react-native-amaryllis) [![MIT License](https://img.shields.io/badge/license-MIT-blue.svg)](LICENSE)
6
+
7
+ > **Amaryllis Hippeastrum**: Symbolizes hope and emergence, blooming even in tough conditions.
8
+
9
+ A modern AI module for native mobile apps in React Native, supporting multimodal inference and streaming results.
10
+
11
+ ---
12
+
13
+ ## 🚀 Installation
14
+
15
+ ```sh
16
+ npm install react-native-amaryllis
17
+ # or
18
+ yarn add react-native-amaryllis
19
+ # or
20
+ pnpm add react-native-amaryllis
21
+ ```
22
+
23
+ ---
24
+
25
+ ## 📦 Features
26
+
27
+ - Native LLM engine for Android & iOS
28
+ - Multimodal support (text + images)
29
+ - Streaming inference with hooks & observables
30
+ - Easy integration with React Native context/provider
31
+ - LoRA customization (GPU only)
32
+
33
+ ---
34
+
35
+ ## 🛠️ Usage
36
+
37
+ ### Provider Setup
38
+
39
+ Wrap your application with `LLMProvider` and provide the necessary model paths. The models should be downloaded to the device.
40
+
41
+ ```tsx
42
+ import { LLMProvider } from 'react-native-amaryllis';
43
+
44
+ <LLMProvider
45
+ config={{
46
+ modelPath: 'gemma3-1b-it-int4.task',
47
+ visionEncoderPath: 'mobilenet_v3_small.tflite',
48
+ visionAdapterPath: 'mobilenet_v3_small.tflite',
49
+ maxTopK: 32,
50
+ maxNumImages: 2,
51
+ maxTokens: 512,
52
+ }}
53
+ >
54
+ {/* Your app components */}
55
+ </LLMProvider>
56
+ ```
57
+
58
+ You can access the LLM controller with a `useLLMContext` hook. See **Core API** for details on the controller API.
59
+
60
+ ```tsx
61
+ const {
62
+ config, // original config param
63
+ controller, // native controller
64
+ error, // any error
65
+ isReady, // is controller initialized
66
+ } = useLLMContext();
67
+ ```
68
+
69
+ ### Inference Hook
70
+
71
+ Use the `useInference` hook to access the LLM's capabilities.
72
+
73
+ ```tsx
74
+ import { useInferenceAsync } from 'react-native-amaryllis';
75
+ import { useCallback, useState } from 'react';
76
+ import { View, TextInput, Button, Text } from 'react-native';
77
+
78
+ const LLMPrompt = () => {
79
+ const [prompt, setPrompt] = useState('');
80
+ const [results, setResults] = useState([]);
81
+ const [images, setImages] = useState([]);
82
+ const [error, setError] = useState(undefined);
83
+ const [isBusy, setIsBusy] = useState(false);
84
+
85
+ const props = useMemo(() => ({
86
+ onGenerate: () => {
87
+ setError(undefined);
88
+ setIsBusy(true);
89
+ },
90
+ onResult: (result, isFinal) => {
91
+ setResults((prev) => [...prev, result]);
92
+ if (isFinal) {
93
+ setIsBusy(false);
94
+ }
95
+ },
96
+ onError: (err) => setError(err)
97
+ }), [setError, setIsBusy, setResults])
98
+
99
+ const generate = useInferenceAsync(props);
100
+
101
+ const infer = useCallback(async () => {
102
+ await generate({ prompt, images });
103
+ }, [prompt, generate, images]);
104
+
105
+ return (
106
+ <View>
107
+ <TextInput
108
+ value={prompt}
109
+ onChangeText={setPrompt}
110
+ placeholder="Enter prompt..."
111
+ />
112
+ <Button title="Generate" onPress={infer} />
113
+ <Text>
114
+ {error ? error.message : results.join('\n')}
115
+ </Text>
116
+ {/* image controls */}
117
+ </View>
118
+ );
119
+ };
120
+ ```
121
+
122
+ Substitute the `useInferenceAsync` hook to stream the results.
123
+
124
+ ### Core API
125
+
126
+ For more advanced use cases, you can use the core `Amaryllis` API directly.
127
+ This is the same controller passed from `useLLMContext`.
128
+
129
+ #### Initialization
130
+
131
+ ```javascript
132
+ import { Amaryllis } from 'react-native-amaryllis';
133
+
134
+ const amaryllis = new Amaryllis();
135
+
136
+ await amaryllis.init({
137
+ modelPath: '/path/to/your/model.task',
138
+ visionEncoderPath: '/path/to/vision/encoder.tflite',
139
+ visionAdapterPath: '/path/to/vision/adapter.tflite',
140
+ });
141
+ ```
142
+
143
+ A session is required for working with images.
144
+
145
+ ```javascript
146
+ await amaryllis.newSession({
147
+ topK: 40, // only top results
148
+ topP: 0.95, // only top percentage match
149
+ temperature: 0.8,
150
+ randomSeed: 0, // for reproducing
151
+ loraPath: "", // LoRA customization (GPU only)
152
+ enableVisionModality: true // for vision
153
+ })
154
+ ```
155
+
156
+ #### Generate Response
157
+
158
+ ```javascript
159
+ const result = await amaryllis.generate({
160
+ prompt: 'Your prompt here',
161
+ images: ['file:///path/to/image.png'],
162
+ });
163
+ ```
164
+
165
+ #### Streaming Response
166
+
167
+ ```javascript
168
+ amaryllis.generateAsync(
169
+ {
170
+ prompt: 'Your prompt here',
171
+ images: ['file:///path/to/image.png'],
172
+ },
173
+ {
174
+ onPartialResult: (partial) => {
175
+ console.log('Partial result:', partial);
176
+ },
177
+ onFinalResult: (final) => {
178
+ console.log('Final result:', final);
179
+ },
180
+ onError: (err) => {
181
+ console.error('Error:', err);
182
+ },
183
+ }
184
+ );
185
+ ```
186
+
187
+ You can cancel an async generate if needed.
188
+
189
+ ```javascript
190
+ amaryllis.cancelAsync();
191
+ ```
192
+
193
+ ---
194
+
195
+ ## 📚 Documentation
196
+
197
+ - [API Reference](src/Types.ts)
198
+ - [Example App](example/)
199
+ - [Demo Video](docs/demo.mp4)
200
+ - [Development workflow](CONTRIBUTING.md)
201
+ - [Code of Conduct](CODE_OF_CONDUCT.md)
202
+
203
+ ---
204
+
205
+ ## 🤝 Contributing
206
+
207
+ We welcome contributions! Please see [CONTRIBUTING.md](CONTRIBUTING.md) for guidelines.
208
+
209
+ ---
210
+
211
+ ## 📄 License
212
+
213
+ This project is [MIT licensed](LICENSE).
@@ -0,0 +1,81 @@
1
+ buildscript {
2
+ ext {
3
+ getExtOrDefault = {name ->
4
+ return rootProject.ext.has(name) ? rootProject.ext.get(name) : project.properties['Amaryllis_' + name]
5
+ }
6
+ }
7
+
8
+ repositories {
9
+ google()
10
+ mavenCentral()
11
+ }
12
+
13
+ dependencies {
14
+ classpath "com.android.tools.build:gradle:8.12.1"
15
+ // noinspection DifferentKotlinGradleVersion
16
+ classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:${getExtOrDefault('kotlinVersion')}"
17
+ }
18
+ }
19
+
20
+
21
+ apply plugin: "com.android.library"
22
+ apply plugin: "kotlin-android"
23
+
24
+ apply plugin: "com.facebook.react"
25
+
26
+ def getExtOrIntegerDefault(name) {
27
+ return rootProject.ext.has(name) ? rootProject.ext.get(name) : (project.properties["Amaryllis_" + name]).toInteger()
28
+ }
29
+
30
+ android {
31
+ namespace "com.micrantha.amaryllis"
32
+
33
+ compileSdkVersion getExtOrIntegerDefault("compileSdkVersion")
34
+
35
+ defaultConfig {
36
+ minSdkVersion getExtOrIntegerDefault("minSdkVersion")
37
+ targetSdkVersion getExtOrIntegerDefault("targetSdkVersion")
38
+ }
39
+
40
+ buildFeatures {
41
+ buildConfig true
42
+ }
43
+
44
+ buildTypes {
45
+ release {
46
+ minifyEnabled false
47
+ }
48
+ }
49
+
50
+ lintOptions {
51
+ disable "GradleCompatible"
52
+ }
53
+
54
+ compileOptions {
55
+ sourceCompatibility JavaVersion.VERSION_1_8
56
+ targetCompatibility JavaVersion.VERSION_1_8
57
+ }
58
+
59
+ sourceSets {
60
+ main {
61
+ java.srcDirs += [
62
+ "generated/java",
63
+ "generated/jni"
64
+ ]
65
+ }
66
+ }
67
+ }
68
+
69
+ repositories {
70
+ mavenCentral()
71
+ google()
72
+ }
73
+
74
+ def kotlin_version = getExtOrDefault("kotlinVersion")
75
+
76
+ dependencies {
77
+ implementation "com.facebook.react:react-android"
78
+ implementation "org.jetbrains.kotlin:kotlin-stdlib:$kotlin_version"
79
+ implementation 'com.google.mediapipe:tasks-core:latest.release'
80
+ implementation 'com.google.mediapipe:tasks-genai:latest.release'
81
+ }
@@ -0,0 +1,5 @@
1
+ Amaryllis_kotlinVersion=2.2.10
2
+ Amaryllis_minSdkVersion=29
3
+ Amaryllis_targetSdkVersion=34
4
+ Amaryllis_compileSdkVersion=35
5
+ Amaryllis_ndkVersion=27.1.12297006
@@ -0,0 +1,2 @@
1
+ <manifest xmlns:android="http://schemas.android.com/apk/res/android">
2
+ </manifest>
@@ -0,0 +1,198 @@
1
+ package com.micrantha.amaryllis
2
+
3
+ import android.content.Context
4
+ import android.graphics.BitmapFactory
5
+ import android.util.Log
6
+ import androidx.core.graphics.scale
7
+ import androidx.core.net.toFile
8
+ import androidx.core.net.toUri
9
+ import com.facebook.react.bridge.ReadableArray
10
+ import com.facebook.react.bridge.ReadableMap
11
+ import com.google.mediapipe.framework.image.BitmapImageBuilder
12
+ import com.google.mediapipe.framework.image.MPImage
13
+ import com.google.mediapipe.tasks.genai.llminference.GraphOptions
14
+ import com.google.mediapipe.tasks.genai.llminference.LlmInference
15
+ import com.google.mediapipe.tasks.genai.llminference.LlmInferenceSession
16
+ import com.google.mediapipe.tasks.genai.llminference.ProgressListener
17
+ import com.google.mediapipe.tasks.genai.llminference.VisionModelOptions
18
+ import com.micrantha.amaryllis.AmaryllisModule.Companion.NAME
19
+ import com.micrantha.amaryllis.AmaryllisModule.Companion.PARAM_ENABLE_VISION
20
+ import com.micrantha.amaryllis.AmaryllisModule.Companion.PARAM_IMAGES
21
+ import com.micrantha.amaryllis.AmaryllisModule.Companion.PARAM_LORA_PATH
22
+ import com.micrantha.amaryllis.AmaryllisModule.Companion.PARAM_MAX_NUM_IMAGES
23
+ import com.micrantha.amaryllis.AmaryllisModule.Companion.PARAM_MAX_TOKENS
24
+ import com.micrantha.amaryllis.AmaryllisModule.Companion.PARAM_MAX_TOP_K
25
+ import com.micrantha.amaryllis.AmaryllisModule.Companion.PARAM_MODEL_PATH
26
+ import com.micrantha.amaryllis.AmaryllisModule.Companion.PARAM_PROMPT
27
+ import com.micrantha.amaryllis.AmaryllisModule.Companion.PARAM_RANDOM_SEED
28
+ import com.micrantha.amaryllis.AmaryllisModule.Companion.PARAM_TEMPERATURE
29
+ import com.micrantha.amaryllis.AmaryllisModule.Companion.PARAM_TOP_K
30
+ import com.micrantha.amaryllis.AmaryllisModule.Companion.PARAM_TOP_P
31
+ import com.micrantha.amaryllis.AmaryllisModule.Companion.PARAM_VISION_ADAPTER
32
+ import com.micrantha.amaryllis.AmaryllisModule.Companion.PARAM_VISION_ENCODER
33
+ import java.io.File
34
+ import java.io.FileOutputStream
35
+ import java.io.IOException
36
+
37
+
38
+ class Amaryllis {
39
+
40
+ private var llmInference: LlmInference? = null
41
+ private var session: LlmInferenceSession? = null
42
+
43
+ fun init(context: Context, config: ReadableMap) {
44
+ val modelPath = config.getString(PARAM_MODEL_PATH) ?: throw InvalidModelPathException()
45
+
46
+ val taskOptions = LlmInference.LlmInferenceOptions.builder()
47
+ .setModelPath(modelPath).apply {
48
+ if (config.hasKey(PARAM_MAX_TOP_K))
49
+ setMaxTopK(config.getInt(PARAM_MAX_TOP_K))
50
+ if (config.hasKey(PARAM_MAX_TOKENS))
51
+ setMaxTokens(config.getInt(PARAM_MAX_TOKENS))
52
+ if (config.hasKey(PARAM_MAX_NUM_IMAGES))
53
+ setMaxNumImages(config.getInt(PARAM_MAX_NUM_IMAGES))
54
+ }
55
+ .setVisionModelOptions(
56
+ VisionModelOptions.builder().apply {
57
+ config.getString(PARAM_VISION_ADAPTER)?.let {
58
+ setAdapterPath(it)
59
+ }
60
+ config.getString(PARAM_VISION_ENCODER)?.let {
61
+ setEncoderPath(it)
62
+ }
63
+ }.build()
64
+ )
65
+ .build()
66
+
67
+ Log.d(NAME, "initializing llm inference")
68
+
69
+ this.llmInference = LlmInference.createFromOptions(context, taskOptions)
70
+ }
71
+
72
+ fun newSession(params: ReadableMap?) {
73
+ val inference = this.llmInference ?: throw NotInitializedException()
74
+
75
+ val sessionOptions = LlmInferenceSession.LlmInferenceSessionOptions.builder()
76
+
77
+ if (params?.hasKey(PARAM_TOP_K) == true)
78
+ sessionOptions.setTopK(params.getInt(PARAM_TOP_K))
79
+ if (params?.hasKey(PARAM_TOP_P) == true)
80
+ sessionOptions.setTopP(params.getDouble(PARAM_TOP_P).toFloat())
81
+ if (params?.hasKey(PARAM_TEMPERATURE) == true)
82
+ sessionOptions.setTemperature(params.getDouble(PARAM_TEMPERATURE).toFloat())
83
+ if (params?.hasKey(PARAM_RANDOM_SEED) == true)
84
+ sessionOptions.setRandomSeed(params.getInt(PARAM_RANDOM_SEED))
85
+ params?.getString(PARAM_LORA_PATH)?.let { sessionOptions.setLoraPath(it) }
86
+ params?.getBoolean(PARAM_ENABLE_VISION)?.let {
87
+ sessionOptions.setGraphOptions(
88
+ GraphOptions.builder()
89
+ .setEnableVisionModality(it)
90
+ .build()
91
+ )
92
+ }
93
+
94
+ Log.d(NAME, "starting new session")
95
+
96
+ this.session = LlmInferenceSession.createFromOptions(
97
+ inference,
98
+ sessionOptions.build()
99
+ )
100
+ }
101
+
102
+ fun generate(params: ReadableMap) {
103
+ val llm = llmInference ?: throw NotInitializedException()
104
+
105
+ val prompt = params.validateAndGetPrompt()
106
+
107
+ if (session == null) {
108
+ params.validateNoSession()
109
+ llm.generateResponse(prompt)
110
+ } else {
111
+ this.session?.updateQueryFromParams(params)
112
+ this.session?.generateResponse()
113
+ }
114
+ }
115
+
116
+ fun generateAsync(params: ReadableMap, listener: ProgressListener<String>) {
117
+ val llm = llmInference ?: throw NotInitializedException()
118
+
119
+ val prompt = params.validateAndGetPrompt()
120
+
121
+ if (session == null) {
122
+ params.validateNoSession()
123
+ llm.generateResponseAsync(prompt, listener)
124
+ } else {
125
+ this.session?.updateQueryFromParams(params)
126
+ this.session?.generateResponseAsync(listener)
127
+ }
128
+ }
129
+
130
+ fun close() {
131
+ session?.close()
132
+ llmInference?.close()
133
+ session = null
134
+ llmInference = null
135
+ }
136
+
137
+ fun cancelAsync() {
138
+ session?.cancelGenerateResponseAsync()
139
+ }
140
+
141
+ private fun LlmInferenceSession.updateQueryFromParams(params: ReadableMap): LlmInferenceSession {
142
+ addQueryChunk(params.getString(PARAM_PROMPT) ?: "")
143
+ params.getArray(PARAM_IMAGES)?.run {
144
+ preprocessImages(this).forEach {
145
+ addImage(it)
146
+ }
147
+ }
148
+ return this
149
+ }
150
+
151
+ private fun ReadableMap.validateNoSession() {
152
+ if (getArray(PARAM_IMAGES) != null) {
153
+ throw SessionRequiredException()
154
+ }
155
+ }
156
+
157
+ private fun ReadableMap.validateAndGetPrompt(): String {
158
+ return getString(PARAM_PROMPT) ?: throw IllegalArgumentException("prompt is required")
159
+ }
160
+
161
+ /**
162
+ * Loads and preprocesses an image for the LLM session.
163
+ * - Resizes to targetWidth x targetHeight (default 512x512)
164
+ * - Converts to MPImage for inference
165
+ */
166
+ internal fun preprocessImage(
167
+ uri: String,
168
+ targetWidth: Int = 512,
169
+ targetHeight: Int = 512
170
+ ): MPImage? {
171
+ val file = uri.toUri().toFile()
172
+
173
+ if (!file.exists()) return null
174
+
175
+ // Decode bitmap
176
+ val bitmap = BitmapFactory.decodeFile(file.absolutePath)
177
+ ?: return null
178
+
179
+ // Resize bitmap
180
+ val resized = bitmap.scale(targetWidth, targetHeight)
181
+
182
+ // Convert to MPImage
183
+ return BitmapImageBuilder(resized).build()
184
+ }
185
+
186
+ internal fun preprocessImages(
187
+ uris: ReadableArray,
188
+ targetWidth: Int = 512,
189
+ targetHeight: Int = 512
190
+ ) = uris.toArrayList().mapNotNull {
191
+ val uri = it as? String ?: return@mapNotNull null
192
+ preprocessImage(uri, targetWidth, targetHeight)
193
+ }
194
+
195
+ inner class NotInitializedException : Exception()
196
+ inner class SessionRequiredException : Exception()
197
+ inner class InvalidModelPathException : Exception()
198
+ }