expo-ai-kit 0.2.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -0
- package/android/build.gradle +2 -1
- package/android/src/main/java/expo/modules/aikit/ExpoAiKitModule.kt +183 -4
- package/android/src/main/java/expo/modules/aikit/GemmaInferenceClient.kt +326 -0
- package/build/ExpoAiKitModule.d.ts +1 -0
- package/build/ExpoAiKitModule.d.ts.map +1 -1
- package/build/ExpoAiKitModule.js.map +1 -1
- package/build/index.d.ts.map +1 -1
- package/build/index.js +19 -0
- package/build/index.js.map +1 -1
- package/build/models.d.ts +4 -5
- package/build/models.d.ts.map +1 -1
- package/build/models.js +12 -12
- package/build/models.js.map +1 -1
- package/build/types.d.ts +2 -0
- package/build/types.d.ts.map +1 -1
- package/build/types.js.map +1 -1
- package/ios/ExpoAiKitModule.swift +84 -1
- package/package.json +1 -1
- package/src/ExpoAiKitModule.ts +1 -0
- package/src/index.ts +22 -0
- package/src/models.ts +16 -17
- package/src/types.ts +2 -0
package/README.md
CHANGED
|
@@ -14,6 +14,15 @@ On-device AI for Expo apps. Run language models locally—no API keys, no cloud,
|
|
|
14
14
|
| iOS 26+ | [Apple Foundation Models](https://developer.apple.com/documentation/FoundationModels) |
|
|
15
15
|
| Android (supported devices) | [ML Kit Prompt API](https://developers.google.com/ml-kit/genai#prompt-device) |
|
|
16
16
|
|
|
17
|
+
### Downloadable Models (Gemma 4)
|
|
18
|
+
|
|
19
|
+
| Platform | Status |
|
|
20
|
+
|----------|--------|
|
|
21
|
+
| Android | Gemma 4 E2B (2.3B) and E4B (4.5B) via [LiteRT-LM](https://ai.google.dev/edge/litert-lm) |
|
|
22
|
+
| iOS | Coming soon — waiting for LiteRT-LM Swift APIs from Google |
|
|
23
|
+
|
|
24
|
+
> **Note:** iOS downloadable model support (Gemma 4 E2B/E4B) is planned for a future release. We are waiting for Google to ship native Swift APIs for LiteRT-LM. Built-in Apple Foundation Models work on iOS 26+ today.
|
|
25
|
+
|
|
17
26
|
### Unsupported
|
|
18
27
|
|
|
19
28
|
| Platform | Fallback Behavior |
|
package/android/build.gradle
CHANGED
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
package expo.modules.aikit
|
|
2
2
|
|
|
3
|
+
import android.app.ActivityManager
|
|
4
|
+
import android.content.Context
|
|
3
5
|
import expo.modules.kotlin.modules.Module
|
|
4
6
|
import expo.modules.kotlin.modules.ModuleDefinition
|
|
5
7
|
import expo.modules.kotlin.functions.Coroutine
|
|
@@ -11,15 +13,28 @@ import kotlinx.coroutines.cancel
|
|
|
11
13
|
|
|
12
14
|
class ExpoAiKitModule : Module() {
|
|
13
15
|
|
|
16
|
+
// Existing ML Kit client -- unchanged
|
|
14
17
|
private val promptClient by lazy { PromptApiClient() }
|
|
18
|
+
|
|
19
|
+
// Gemma client -- lazy-initialized with app context
|
|
20
|
+
private val gemmaClient by lazy {
|
|
21
|
+
GemmaInferenceClient(appContext.reactContext ?: throw RuntimeException("React context not available"))
|
|
22
|
+
}
|
|
23
|
+
|
|
15
24
|
private val activeStreamJobs = mutableMapOf<String, Job>()
|
|
16
25
|
private val streamScope = CoroutineScope(Dispatchers.IO)
|
|
17
26
|
|
|
27
|
+
// Active model routing: "mlkit" (default) or a downloadable model ID
|
|
28
|
+
private var activeModelId: String = "mlkit"
|
|
29
|
+
|
|
18
30
|
override fun definition() = ModuleDefinition {
|
|
19
31
|
Name("ExpoAiKit")
|
|
20
32
|
|
|
21
|
-
|
|
22
|
-
|
|
33
|
+
Events("onStreamToken", "onDownloadProgress", "onModelStateChange")
|
|
34
|
+
|
|
35
|
+
// ==================================================================
|
|
36
|
+
// Existing inference API -- ML Kit path completely untouched
|
|
37
|
+
// ==================================================================
|
|
23
38
|
|
|
24
39
|
Function("isAvailable") {
|
|
25
40
|
promptClient.isAvailableBlocking()
|
|
@@ -42,7 +57,12 @@ class ExpoAiKitModule : Module() {
|
|
|
42
57
|
"$role: $content"
|
|
43
58
|
} + "\nASSISTANT:"
|
|
44
59
|
|
|
45
|
-
|
|
60
|
+
// Route to active model
|
|
61
|
+
val text = if (activeModelId == "mlkit") {
|
|
62
|
+
promptClient.generateText(conversationPrompt, systemPrompt)
|
|
63
|
+
} else {
|
|
64
|
+
gemmaClient.generateText(conversationPrompt, systemPrompt)
|
|
65
|
+
}
|
|
46
66
|
mapOf("text" to text)
|
|
47
67
|
}
|
|
48
68
|
|
|
@@ -65,7 +85,7 @@ class ExpoAiKitModule : Module() {
|
|
|
65
85
|
|
|
66
86
|
// Launch streaming in a coroutine that can be cancelled
|
|
67
87
|
val job = streamScope.launch {
|
|
68
|
-
|
|
88
|
+
val streamCallback = { token: String, accumulatedText: String, isDone: Boolean ->
|
|
69
89
|
sendEvent("onStreamToken", mapOf(
|
|
70
90
|
"sessionId" to sessionId,
|
|
71
91
|
"token" to token,
|
|
@@ -77,6 +97,13 @@ class ExpoAiKitModule : Module() {
|
|
|
77
97
|
activeStreamJobs.remove(sessionId)
|
|
78
98
|
}
|
|
79
99
|
}
|
|
100
|
+
|
|
101
|
+
// Route to active model
|
|
102
|
+
if (activeModelId == "mlkit") {
|
|
103
|
+
promptClient.generateTextStream(conversationPrompt, systemPrompt, streamCallback)
|
|
104
|
+
} else {
|
|
105
|
+
gemmaClient.generateTextStream(conversationPrompt, systemPrompt, streamCallback)
|
|
106
|
+
}
|
|
80
107
|
}
|
|
81
108
|
|
|
82
109
|
activeStreamJobs[sessionId] = job
|
|
@@ -86,5 +113,157 @@ class ExpoAiKitModule : Module() {
|
|
|
86
113
|
activeStreamJobs[sessionId]?.cancel()
|
|
87
114
|
activeStreamJobs.remove(sessionId)
|
|
88
115
|
}
|
|
116
|
+
|
|
117
|
+
// ==================================================================
|
|
118
|
+
// Model discovery
|
|
119
|
+
// ==================================================================
|
|
120
|
+
|
|
121
|
+
Function("getBuiltInModels") {
|
|
122
|
+
listOf(
|
|
123
|
+
mapOf(
|
|
124
|
+
"id" to "mlkit",
|
|
125
|
+
"name" to "ML Kit Prompt API",
|
|
126
|
+
"available" to promptClient.isAvailableBlocking(),
|
|
127
|
+
"platform" to "android",
|
|
128
|
+
// ML Kit doesn't expose a context window; use a reasonable default
|
|
129
|
+
"contextWindow" to 4096
|
|
130
|
+
)
|
|
131
|
+
)
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
Function("getDownloadableModelStatus") { modelId: String ->
|
|
135
|
+
// Status reflects runtime state: "ready" if loaded in memory,
|
|
136
|
+
// "not-downloaded" otherwise (even if file is on disk -- setModel
|
|
137
|
+
// is the gatekeeper that transitions through loading -> ready).
|
|
138
|
+
when {
|
|
139
|
+
gemmaClient.getLoadedModelId() == modelId && gemmaClient.isModelLoaded() -> "ready"
|
|
140
|
+
else -> "not-downloaded"
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
Function("getDeviceRamBytes") {
|
|
145
|
+
val activityManager = appContext.reactContext?.getSystemService(Context.ACTIVITY_SERVICE) as? ActivityManager
|
|
146
|
+
if (activityManager != null) {
|
|
147
|
+
val memInfo = ActivityManager.MemoryInfo()
|
|
148
|
+
activityManager.getMemoryInfo(memInfo)
|
|
149
|
+
memInfo.totalMem
|
|
150
|
+
} else {
|
|
151
|
+
0L
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// ==================================================================
|
|
156
|
+
// Model selection & memory management
|
|
157
|
+
// ==================================================================
|
|
158
|
+
|
|
159
|
+
AsyncFunction("setModel") Coroutine { modelId: String ->
|
|
160
|
+
if (modelId == "mlkit") {
|
|
161
|
+
// Switch to built-in: unload any Gemma model
|
|
162
|
+
if (gemmaClient.isModelLoaded()) {
|
|
163
|
+
gemmaClient.unloadModel()
|
|
164
|
+
val previousId = activeModelId
|
|
165
|
+
if (previousId != "mlkit") {
|
|
166
|
+
sendEvent("onModelStateChange", mapOf(
|
|
167
|
+
"modelId" to previousId,
|
|
168
|
+
"status" to "not-downloaded"
|
|
169
|
+
))
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
activeModelId = "mlkit"
|
|
173
|
+
return@Coroutine
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// Downloadable model: verify file exists
|
|
177
|
+
if (!gemmaClient.isModelFileDownloaded(modelId)) {
|
|
178
|
+
throw RuntimeException("MODEL_NOT_DOWNLOADED:$modelId:Model file not found on disk")
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// Emit loading state
|
|
182
|
+
sendEvent("onModelStateChange", mapOf(
|
|
183
|
+
"modelId" to modelId,
|
|
184
|
+
"status" to "loading"
|
|
185
|
+
))
|
|
186
|
+
|
|
187
|
+
try {
|
|
188
|
+
val modelPath = gemmaClient.getModelFilePath(modelId)
|
|
189
|
+
gemmaClient.loadModel(modelId, modelPath)
|
|
190
|
+
activeModelId = modelId
|
|
191
|
+
|
|
192
|
+
// Emit ready state
|
|
193
|
+
sendEvent("onModelStateChange", mapOf(
|
|
194
|
+
"modelId" to modelId,
|
|
195
|
+
"status" to "ready"
|
|
196
|
+
))
|
|
197
|
+
} catch (e: Exception) {
|
|
198
|
+
// Emit failure -- revert status
|
|
199
|
+
sendEvent("onModelStateChange", mapOf(
|
|
200
|
+
"modelId" to modelId,
|
|
201
|
+
"status" to "not-downloaded"
|
|
202
|
+
))
|
|
203
|
+
throw e
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
Function("getActiveModel") {
|
|
208
|
+
activeModelId
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
AsyncFunction("unloadModel") Coroutine {
|
|
212
|
+
if (activeModelId != "mlkit" && gemmaClient.isModelLoaded()) {
|
|
213
|
+
val previousId = activeModelId
|
|
214
|
+
gemmaClient.unloadModel()
|
|
215
|
+
activeModelId = "mlkit"
|
|
216
|
+
sendEvent("onModelStateChange", mapOf(
|
|
217
|
+
"modelId" to previousId,
|
|
218
|
+
"status" to "not-downloaded"
|
|
219
|
+
))
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
// ==================================================================
|
|
224
|
+
// Model lifecycle (downloadable models only)
|
|
225
|
+
// ==================================================================
|
|
226
|
+
|
|
227
|
+
AsyncFunction("downloadModel") Coroutine { modelId: String, url: String, sha256: String ->
|
|
228
|
+
sendEvent("onModelStateChange", mapOf(
|
|
229
|
+
"modelId" to modelId,
|
|
230
|
+
"status" to "downloading"
|
|
231
|
+
))
|
|
232
|
+
|
|
233
|
+
try {
|
|
234
|
+
gemmaClient.downloadModelFile(modelId, url, sha256) { bytesRead, totalBytes ->
|
|
235
|
+
sendEvent("onDownloadProgress", mapOf(
|
|
236
|
+
"modelId" to modelId,
|
|
237
|
+
"progress" to if (totalBytes > 0) bytesRead.toDouble() / totalBytes else 0.0
|
|
238
|
+
))
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
// Download complete -- file is on disk but not loaded
|
|
242
|
+
sendEvent("onModelStateChange", mapOf(
|
|
243
|
+
"modelId" to modelId,
|
|
244
|
+
"status" to "not-downloaded"
|
|
245
|
+
))
|
|
246
|
+
} catch (e: Exception) {
|
|
247
|
+
sendEvent("onModelStateChange", mapOf(
|
|
248
|
+
"modelId" to modelId,
|
|
249
|
+
"status" to "not-downloaded"
|
|
250
|
+
))
|
|
251
|
+
throw e
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
AsyncFunction("deleteModel") Coroutine { modelId: String ->
|
|
256
|
+
// If this model is active, switch back to mlkit first
|
|
257
|
+
if (activeModelId == modelId) {
|
|
258
|
+
activeModelId = "mlkit"
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
gemmaClient.deleteModelFile(modelId)
|
|
262
|
+
|
|
263
|
+
sendEvent("onModelStateChange", mapOf(
|
|
264
|
+
"modelId" to modelId,
|
|
265
|
+
"status" to "not-downloaded"
|
|
266
|
+
))
|
|
267
|
+
}
|
|
89
268
|
}
|
|
90
269
|
}
|
|
@@ -0,0 +1,326 @@
|
|
|
1
|
+
package expo.modules.aikit
|
|
2
|
+
|
|
3
|
+
import android.content.Context
|
|
4
|
+
import com.google.ai.edge.litertlm.Engine
|
|
5
|
+
import com.google.ai.edge.litertlm.EngineConfig
|
|
6
|
+
import com.google.ai.edge.litertlm.Conversation
|
|
7
|
+
import com.google.ai.edge.litertlm.Backend
|
|
8
|
+
import kotlinx.coroutines.Dispatchers
|
|
9
|
+
import kotlinx.coroutines.sync.Mutex
|
|
10
|
+
import kotlinx.coroutines.sync.withLock
|
|
11
|
+
import kotlinx.coroutines.withContext
|
|
12
|
+
import java.io.File
|
|
13
|
+
import java.io.FileInputStream
|
|
14
|
+
import java.io.FileOutputStream
|
|
15
|
+
import java.io.IOException
|
|
16
|
+
import java.net.HttpURLConnection
|
|
17
|
+
import java.net.URL
|
|
18
|
+
import java.security.MessageDigest
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Wrapper around LiteRT-LM Engine for Gemma 4 models.
|
|
22
|
+
*
|
|
23
|
+
* Concurrency model:
|
|
24
|
+
* - A Mutex guards all state transitions (load, unload, inference).
|
|
25
|
+
* - sendMessage/startStreaming block on the mutex if a load is in progress.
|
|
26
|
+
* - deleteModel waits for inference to finish, then unloads and deletes.
|
|
27
|
+
* - A separate isDownloading flag prevents concurrent downloads (checked before
|
|
28
|
+
* the long-running download, not inside the mutex).
|
|
29
|
+
*/
|
|
30
|
+
class GemmaInferenceClient(private val context: Context) {
|
|
31
|
+
|
|
32
|
+
private val mutex = Mutex()
|
|
33
|
+
private var engine: Engine? = null
|
|
34
|
+
private var conversation: Conversation? = null
|
|
35
|
+
private var loadedModelId: String? = null
|
|
36
|
+
|
|
37
|
+
@Volatile
|
|
38
|
+
private var isDownloading = false
|
|
39
|
+
|
|
40
|
+
// -------------------------------------------------------------------------
|
|
41
|
+
// Model lifecycle
|
|
42
|
+
// -------------------------------------------------------------------------
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Load a model into memory using LiteRT-LM Engine.
|
|
46
|
+
* Unloads any previously loaded model first.
|
|
47
|
+
* Caller is responsible for emitting onModelStateChange events.
|
|
48
|
+
*/
|
|
49
|
+
suspend fun loadModel(modelId: String, modelPath: String) = mutex.withLock {
|
|
50
|
+
// Unload previous model if different
|
|
51
|
+
if (loadedModelId != null && loadedModelId != modelId) {
|
|
52
|
+
conversation?.close()
|
|
53
|
+
engine?.close()
|
|
54
|
+
conversation = null
|
|
55
|
+
engine = null
|
|
56
|
+
loadedModelId = null
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
if (loadedModelId == modelId && engine != null) {
|
|
60
|
+
return@withLock // Already loaded
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
try {
|
|
64
|
+
withContext(Dispatchers.IO) {
|
|
65
|
+
val engineConfig = EngineConfig(
|
|
66
|
+
modelPath = modelPath,
|
|
67
|
+
backend = Backend.GPU()
|
|
68
|
+
)
|
|
69
|
+
val newEngine = Engine(engineConfig)
|
|
70
|
+
newEngine.initialize()
|
|
71
|
+
val newConversation = newEngine.createConversation()
|
|
72
|
+
|
|
73
|
+
engine = newEngine
|
|
74
|
+
conversation = newConversation
|
|
75
|
+
loadedModelId = modelId
|
|
76
|
+
}
|
|
77
|
+
} catch (e: OutOfMemoryError) {
|
|
78
|
+
conversation?.close()
|
|
79
|
+
engine?.close()
|
|
80
|
+
conversation = null
|
|
81
|
+
engine = null
|
|
82
|
+
loadedModelId = null
|
|
83
|
+
throw RuntimeException("INFERENCE_OOM:$modelId:Device does not have enough memory to load model")
|
|
84
|
+
} catch (e: Exception) {
|
|
85
|
+
conversation?.close()
|
|
86
|
+
engine?.close()
|
|
87
|
+
conversation = null
|
|
88
|
+
engine = null
|
|
89
|
+
loadedModelId = null
|
|
90
|
+
throw RuntimeException("MODEL_LOAD_FAILED:$modelId:${e.message}")
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/**
|
|
95
|
+
* Unload the current model from memory.
|
|
96
|
+
*/
|
|
97
|
+
suspend fun unloadModel() = mutex.withLock {
|
|
98
|
+
conversation?.close()
|
|
99
|
+
engine?.close()
|
|
100
|
+
conversation = null
|
|
101
|
+
engine = null
|
|
102
|
+
loadedModelId = null
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
fun getLoadedModelId(): String? = loadedModelId
|
|
106
|
+
|
|
107
|
+
fun isModelLoaded(): Boolean = engine != null
|
|
108
|
+
|
|
109
|
+
// -------------------------------------------------------------------------
|
|
110
|
+
// Inference
|
|
111
|
+
// -------------------------------------------------------------------------
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Generate a complete response. Blocks until done.
|
|
115
|
+
* The mutex ensures this cannot run concurrently with load/unload.
|
|
116
|
+
*/
|
|
117
|
+
suspend fun generateText(prompt: String, systemPrompt: String): String = mutex.withLock {
|
|
118
|
+
val conv = conversation
|
|
119
|
+
?: throw RuntimeException("MODEL_NOT_DOWNLOADED:${loadedModelId ?: "unknown"}:No model loaded")
|
|
120
|
+
|
|
121
|
+
val fullPrompt = buildFullPrompt(prompt, systemPrompt)
|
|
122
|
+
|
|
123
|
+
try {
|
|
124
|
+
withContext(Dispatchers.IO) {
|
|
125
|
+
conv.sendMessage(contents = fullPrompt).toString()
|
|
126
|
+
}
|
|
127
|
+
} catch (e: OutOfMemoryError) {
|
|
128
|
+
throw RuntimeException("INFERENCE_OOM:${loadedModelId ?: "unknown"}:Out of memory during inference")
|
|
129
|
+
} catch (e: Exception) {
|
|
130
|
+
throw RuntimeException("INFERENCE_FAILED:${loadedModelId ?: "unknown"}:${e.message}")
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
/**
|
|
135
|
+
* Generate a streaming response. The onChunk callback receives
|
|
136
|
+
* (token=delta, accumulatedText=full, isDone) matching the PromptApiClient contract.
|
|
137
|
+
*
|
|
138
|
+
* LiteRT-LM's sendMessageAsync() returns a Flow<Message>. Each emission
|
|
139
|
+
* contains accumulated text, so we diff against previousText to extract
|
|
140
|
+
* the delta token.
|
|
141
|
+
*/
|
|
142
|
+
suspend fun generateTextStream(
|
|
143
|
+
prompt: String,
|
|
144
|
+
systemPrompt: String,
|
|
145
|
+
onChunk: (token: String, accumulatedText: String, isDone: Boolean) -> Unit
|
|
146
|
+
) = mutex.withLock {
|
|
147
|
+
val conv = conversation
|
|
148
|
+
?: throw RuntimeException("MODEL_NOT_DOWNLOADED:${loadedModelId ?: "unknown"}:No model loaded")
|
|
149
|
+
|
|
150
|
+
val fullPrompt = buildFullPrompt(prompt, systemPrompt)
|
|
151
|
+
|
|
152
|
+
try {
|
|
153
|
+
withContext(Dispatchers.IO) {
|
|
154
|
+
var previousText = ""
|
|
155
|
+
|
|
156
|
+
conv.sendMessageAsync(contents = fullPrompt).collect { message ->
|
|
157
|
+
val accumulated = message.toString()
|
|
158
|
+
val token = if (accumulated.length > previousText.length) {
|
|
159
|
+
accumulated.substring(previousText.length)
|
|
160
|
+
} else {
|
|
161
|
+
""
|
|
162
|
+
}
|
|
163
|
+
previousText = accumulated
|
|
164
|
+
onChunk(token, accumulated, false)
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
// Final done event for consistency with PromptApiClient
|
|
168
|
+
onChunk("", previousText, true)
|
|
169
|
+
}
|
|
170
|
+
} catch (e: OutOfMemoryError) {
|
|
171
|
+
throw RuntimeException("INFERENCE_OOM:${loadedModelId ?: "unknown"}:Out of memory during inference")
|
|
172
|
+
} catch (e: Exception) {
|
|
173
|
+
throw RuntimeException("INFERENCE_FAILED:${loadedModelId ?: "unknown"}:${e.message}")
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// -------------------------------------------------------------------------
|
|
178
|
+
// Download
|
|
179
|
+
// -------------------------------------------------------------------------
|
|
180
|
+
|
|
181
|
+
/**
|
|
182
|
+
* Download a model file with progress reporting.
|
|
183
|
+
* Prevents concurrent downloads. On failure, deletes partial files.
|
|
184
|
+
*
|
|
185
|
+
* Strategy: restart from scratch on failure (no HTTP Range resumption).
|
|
186
|
+
* Downloads to a .tmp file, atomically renames on success.
|
|
187
|
+
*/
|
|
188
|
+
suspend fun downloadModelFile(
|
|
189
|
+
modelId: String,
|
|
190
|
+
url: String,
|
|
191
|
+
sha256: String,
|
|
192
|
+
onProgress: (bytesRead: Long, totalBytes: Long) -> Unit
|
|
193
|
+
) {
|
|
194
|
+
if (isDownloading) {
|
|
195
|
+
throw RuntimeException("DOWNLOAD_FAILED:$modelId:Download already in progress")
|
|
196
|
+
}
|
|
197
|
+
isDownloading = true
|
|
198
|
+
|
|
199
|
+
try {
|
|
200
|
+
withContext(Dispatchers.IO) {
|
|
201
|
+
val modelsDir = File(context.filesDir, "models")
|
|
202
|
+
modelsDir.mkdirs()
|
|
203
|
+
|
|
204
|
+
val targetFile = File(modelsDir, "$modelId.litertlm")
|
|
205
|
+
val tempFile = File(modelsDir, "$modelId.litertlm.tmp")
|
|
206
|
+
|
|
207
|
+
try {
|
|
208
|
+
val connection = URL(url).openConnection() as HttpURLConnection
|
|
209
|
+
connection.connectTimeout = 30_000
|
|
210
|
+
connection.readTimeout = 30_000
|
|
211
|
+
connection.connect()
|
|
212
|
+
|
|
213
|
+
if (connection.responseCode != HttpURLConnection.HTTP_OK) {
|
|
214
|
+
throw IOException("HTTP ${connection.responseCode}: ${connection.responseMessage}")
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
val totalBytes = connection.contentLengthLong
|
|
218
|
+
var bytesRead = 0L
|
|
219
|
+
|
|
220
|
+
connection.inputStream.use { input ->
|
|
221
|
+
FileOutputStream(tempFile).use { output ->
|
|
222
|
+
val buffer = ByteArray(8192)
|
|
223
|
+
var read: Int
|
|
224
|
+
while (input.read(buffer).also { read = it } != -1) {
|
|
225
|
+
output.write(buffer, 0, read)
|
|
226
|
+
bytesRead += read
|
|
227
|
+
if (totalBytes > 0) {
|
|
228
|
+
onProgress(bytesRead, totalBytes)
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
// Verify SHA256 if provided
|
|
235
|
+
if (sha256.isNotEmpty()) {
|
|
236
|
+
val actualHash = computeSha256(tempFile)
|
|
237
|
+
if (!actualHash.equals(sha256, ignoreCase = true)) {
|
|
238
|
+
tempFile.delete()
|
|
239
|
+
throw RuntimeException("DOWNLOAD_CORRUPT:$modelId:SHA256 mismatch: expected $sha256, got $actualHash")
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
// Atomic rename
|
|
244
|
+
if (!tempFile.renameTo(targetFile)) {
|
|
245
|
+
tempFile.delete()
|
|
246
|
+
throw IOException("Failed to rename temp file to target")
|
|
247
|
+
}
|
|
248
|
+
} catch (e: Exception) {
|
|
249
|
+
// Always clean up partial file
|
|
250
|
+
tempFile.delete()
|
|
251
|
+
when {
|
|
252
|
+
e is RuntimeException && e.message?.startsWith("DOWNLOAD_CORRUPT") == true -> throw e
|
|
253
|
+
context.filesDir.freeSpace < 100_000_000 ->
|
|
254
|
+
throw RuntimeException("DOWNLOAD_STORAGE_FULL:$modelId:Insufficient disk space")
|
|
255
|
+
else ->
|
|
256
|
+
throw RuntimeException("DOWNLOAD_FAILED:$modelId:${e.message}")
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
} finally {
|
|
261
|
+
isDownloading = false
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
/**
|
|
266
|
+
* Delete a model file from disk. If the model is loaded, unloads it first.
|
|
267
|
+
*/
|
|
268
|
+
suspend fun deleteModelFile(modelId: String) = mutex.withLock {
|
|
269
|
+
// Unload if this model is currently loaded
|
|
270
|
+
if (loadedModelId == modelId) {
|
|
271
|
+
conversation?.close()
|
|
272
|
+
engine?.close()
|
|
273
|
+
conversation = null
|
|
274
|
+
engine = null
|
|
275
|
+
loadedModelId = null
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
val modelFile = File(context.filesDir, "models/$modelId.litertlm")
|
|
279
|
+
if (modelFile.exists()) {
|
|
280
|
+
modelFile.delete()
|
|
281
|
+
}
|
|
282
|
+
// Also clean up any partial downloads
|
|
283
|
+
val tempFile = File(context.filesDir, "models/$modelId.litertlm.tmp")
|
|
284
|
+
if (tempFile.exists()) {
|
|
285
|
+
tempFile.delete()
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
/**
|
|
290
|
+
* Check if a model file exists on disk.
|
|
291
|
+
*/
|
|
292
|
+
fun isModelFileDownloaded(modelId: String): Boolean {
|
|
293
|
+
return File(context.filesDir, "models/$modelId.litertlm").exists()
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
/**
|
|
297
|
+
* Get the file path for a downloaded model.
|
|
298
|
+
*/
|
|
299
|
+
fun getModelFilePath(modelId: String): String {
|
|
300
|
+
return File(context.filesDir, "models/$modelId.litertlm").absolutePath
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
// -------------------------------------------------------------------------
|
|
304
|
+
// Private helpers
|
|
305
|
+
// -------------------------------------------------------------------------
|
|
306
|
+
|
|
307
|
+
private fun buildFullPrompt(prompt: String, systemPrompt: String): String {
|
|
308
|
+
return if (systemPrompt.isNotBlank()) {
|
|
309
|
+
"$systemPrompt\n\n$prompt"
|
|
310
|
+
} else {
|
|
311
|
+
prompt
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
private fun computeSha256(file: File): String {
|
|
316
|
+
val digest = MessageDigest.getInstance("SHA-256")
|
|
317
|
+
FileInputStream(file).use { fis ->
|
|
318
|
+
val buffer = ByteArray(8192)
|
|
319
|
+
var read: Int
|
|
320
|
+
while (fis.read(buffer).also { read = it } != -1) {
|
|
321
|
+
digest.update(buffer, 0, read)
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
return digest.digest().joinToString("") { "%02x".format(it) }
|
|
325
|
+
}
|
|
326
|
+
}
|
|
@@ -12,6 +12,7 @@ export interface ExpoAiKitNativeModule {
|
|
|
12
12
|
stopStreaming(sessionId: string): Promise<void>;
|
|
13
13
|
getBuiltInModels(): BuiltInModel[];
|
|
14
14
|
getDownloadableModelStatus(modelId: string): DownloadableModelStatus;
|
|
15
|
+
getDeviceRamBytes(): number;
|
|
15
16
|
setModel(modelId: string): Promise<void>;
|
|
16
17
|
getActiveModel(): string;
|
|
17
18
|
unloadModel(): Promise<void>;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ExpoAiKitModule.d.ts","sourceRoot":"","sources":["../src/ExpoAiKitModule.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,mBAAmB,CAAC;AAC3D,OAAO,EACL,UAAU,EACV,WAAW,EACX,cAAc,EACd,YAAY,EACZ,uBAAuB,EACvB,0BAA0B,EAC1B,qBAAqB,EACtB,MAAM,SAAS,CAAC;AAEjB,MAAM,MAAM,qBAAqB,GAAG;IAClC,aAAa,EAAE,CAAC,KAAK,EAAE,cAAc,KAAK,IAAI,CAAC;IAC/C,kBAAkB,EAAE,CAAC,KAAK,EAAE,0BAA0B,KAAK,IAAI,CAAC;IAChE,kBAAkB,EAAE,CAAC,KAAK,EAAE,qBAAqB,KAAK,IAAI,CAAC;CAC5D,CAAC;AAEF,MAAM,WAAW,qBAAqB;IAEpC,WAAW,IAAI,OAAO,CAAC;IACvB,WAAW,CACT,QAAQ,EAAE,UAAU,EAAE,EACtB,YAAY,EAAE,MAAM,GACnB,OAAO,CAAC,WAAW,CAAC,CAAC;IACxB,cAAc,CACZ,QAAQ,EAAE,UAAU,EAAE,EACtB,YAAY,EAAE,MAAM,EACpB,SAAS,EAAE,MAAM,GAChB,OAAO,CAAC,IAAI,CAAC,CAAC;IACjB,aAAa,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAGhD,gBAAgB,IAAI,YAAY,EAAE,CAAC;IACnC,0BAA0B,CAAC,OAAO,EAAE,MAAM,GAAG,uBAAuB,CAAC;
|
|
1
|
+
{"version":3,"file":"ExpoAiKitModule.d.ts","sourceRoot":"","sources":["../src/ExpoAiKitModule.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,mBAAmB,CAAC;AAC3D,OAAO,EACL,UAAU,EACV,WAAW,EACX,cAAc,EACd,YAAY,EACZ,uBAAuB,EACvB,0BAA0B,EAC1B,qBAAqB,EACtB,MAAM,SAAS,CAAC;AAEjB,MAAM,MAAM,qBAAqB,GAAG;IAClC,aAAa,EAAE,CAAC,KAAK,EAAE,cAAc,KAAK,IAAI,CAAC;IAC/C,kBAAkB,EAAE,CAAC,KAAK,EAAE,0BAA0B,KAAK,IAAI,CAAC;IAChE,kBAAkB,EAAE,CAAC,KAAK,EAAE,qBAAqB,KAAK,IAAI,CAAC;CAC5D,CAAC;AAEF,MAAM,WAAW,qBAAqB;IAEpC,WAAW,IAAI,OAAO,CAAC;IACvB,WAAW,CACT,QAAQ,EAAE,UAAU,EAAE,EACtB,YAAY,EAAE,MAAM,GACnB,OAAO,CAAC,WAAW,CAAC,CAAC;IACxB,cAAc,CACZ,QAAQ,EAAE,UAAU,EAAE,EACtB,YAAY,EAAE,MAAM,EACpB,SAAS,EAAE,MAAM,GAChB,OAAO,CAAC,IAAI,CAAC,CAAC;IACjB,aAAa,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAGhD,gBAAgB,IAAI,YAAY,EAAE,CAAC;IACnC,0BAA0B,CAAC,OAAO,EAAE,MAAM,GAAG,uBAAuB,CAAC;IACrE,iBAAiB,IAAI,MAAM,CAAC;IAK5B,QAAQ,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACzC,cAAc,IAAI,MAAM,CAAC;IAGzB,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IAG7B,aAAa,CACX,OAAO,EAAE,MAAM,EACf,GAAG,EAAE,MAAM,EACX,MAAM,EAAE,MAAM,GACb,OAAO,CAAC,IAAI,CAAC,CAAC;IACjB,WAAW,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAG5C,WAAW,CAAC,CAAC,SAAS,MAAM,qBAAqB,EAC/C,SAAS,EAAE,CAAC,EACZ,QAAQ,EAAE,qBAAqB,CAAC,CAAC,CAAC,GACjC,iBAAiB,CAAC;CACtB;AAED,QAAA,MAAM,eAAe,uBACoC,CAAC;AAE1D,eAAe,eAAe,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ExpoAiKitModule.js","sourceRoot":"","sources":["../src/ExpoAiKitModule.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,mBAAmB,EAAE,MAAM,mBAAmB,CAAC;
|
|
1
|
+
{"version":3,"file":"ExpoAiKitModule.js","sourceRoot":"","sources":["../src/ExpoAiKitModule.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,mBAAmB,EAAE,MAAM,mBAAmB,CAAC;AA6DxD,MAAM,eAAe,GACnB,mBAAmB,CAAwB,WAAW,CAAC,CAAC;AAE1D,eAAe,eAAe,CAAC","sourcesContent":["import { requireNativeModule } from 'expo-modules-core';\nimport type { EventSubscription } from 'expo-modules-core';\nimport {\n LLMMessage,\n LLMResponse,\n LLMStreamEvent,\n BuiltInModel,\n DownloadableModelStatus,\n ModelDownloadProgressEvent,\n ModelStateChangeEvent,\n} from './types';\n\nexport type ExpoAiKitModuleEvents = {\n onStreamToken: (event: LLMStreamEvent) => void;\n onDownloadProgress: (event: ModelDownloadProgressEvent) => void;\n onModelStateChange: (event: ModelStateChangeEvent) => void;\n};\n\nexport interface ExpoAiKitNativeModule {\n // Existing inference API\n isAvailable(): boolean;\n sendMessage(\n messages: LLMMessage[],\n systemPrompt: string\n ): Promise<LLMResponse>;\n startStreaming(\n messages: LLMMessage[],\n systemPrompt: string,\n sessionId: string\n ): Promise<void>;\n stopStreaming(sessionId: string): Promise<void>;\n\n // Model discovery\n getBuiltInModels(): BuiltInModel[];\n getDownloadableModelStatus(modelId: string): DownloadableModelStatus;\n getDeviceRamBytes(): number;\n\n // Model selection & memory management\n // setModel is async: switching to a downloadable model loads it into memory.\n // Auto-unloads the previous downloadable model (only one loaded at a time).\n setModel(modelId: string): Promise<void>;\n getActiveModel(): string;\n // Explicitly free memory from the loaded downloadable model.\n // Reverts to the platform built-in model.\n unloadModel(): Promise<void>;\n\n // Model lifecycle (downloadable models only)\n downloadModel(\n modelId: string,\n url: string,\n sha256: string\n ): Promise<void>;\n deleteModel(modelId: string): Promise<void>;\n\n // Event subscription\n addListener<K extends keyof ExpoAiKitModuleEvents>(\n eventName: K,\n listener: ExpoAiKitModuleEvents[K]\n ): EventSubscription;\n}\n\nconst ExpoAiKitModule =\n requireNativeModule<ExpoAiKitNativeModule>('ExpoAiKit');\n\nexport default ExpoAiKitModule;\n"]}
|
package/build/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAEA,OAAO,EACL,UAAU,EACV,cAAc,EACd,WAAW,EACX,gBAAgB,EAEhB,iBAAiB,EACjB,mBAAmB,EACnB,mBAAmB,EACnB,iBAAiB,EACjB,0BAA0B,EAC1B,wBAAwB,EACxB,iBAAiB,EACjB,kBAAkB,EAClB,oBAAoB,EACpB,sBAAsB,EACtB,YAAY,EACZ,iBAAiB,EAElB,MAAM,SAAS,CAAC;AAGjB,cAAc,SAAS,CAAC;AACxB,cAAc,UAAU,CAAC;AACzB,cAAc,SAAS,CAAC;AACxB,cAAc,UAAU,CAAC;AAiJzB;;;GAGG;AACH,wBAAsB,WAAW,IAAI,OAAO,CAAC,OAAO,CAAC,CAKpD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAkCG;AACH,wBAAsB,WAAW,CAC/B,QAAQ,EAAE,UAAU,EAAE,EACtB,OAAO,CAAC,EAAE,cAAc,GACvB,OAAO,CAAC,WAAW,CAAC,CAgBtB;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2CG;AACH,wBAAgB,aAAa,CAC3B,QAAQ,EAAE,UAAU,EAAE,EACtB,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,gBAAgB,GACzB;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAiErD;AAMD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,wBAAsB,SAAS,CAC7B,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,mBAAmB,GAC5B,OAAO,CAAC,WAAW,CAAC,CActB;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,eAAe,CAC7B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,mBAAmB,GAC5B;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAmBrD;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,wBAAsB,SAAS,CAC7B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,mBAAmB,GAC3B,OAAO,CAAC,WAAW,CAAC,CAatB;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,eAAe,CAC7B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,EAC1B,OAAO,EAAE,mBAAmB,GAC3B;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAkBrD;AAED;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,wBAAsB,OAAO,CAC3B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,GACzB,OAAO,CAAC,WAAW,CAAC,CAatB;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,aAAa,CAC3B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,EAC1B,OAAO,EAAE,iBAAiB,GACzB;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAkBrD;AAED;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,wBAAsB,gBAAgB,CACpC,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,0BAA0B,GACnC,OAAO,CAAC,WAAW,CAAC,CAatB;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,sBAAsB,CACpC,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,0BAA0B,GACnC;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAkBrD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2BG;AACH,wBAAsB,cAAc,CAClC,QAAQ,EAAE,MAAM,EAChB,OAAO,EAAE,MAAM,EACf,OAAO,CAAC,EAAE,wBAAwB,GACjC,OAAO,CAAC,WAAW,CAAC,CAkBtB;AAED;;;;;;;;;;;;;;;;;;;GAmBG;AACH,wBAAgB,oBAAoB,CAClC,QAAQ,EAAE,MAAM,EAChB,OAAO,EAAE,MAAM,EACf,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,wBAAwB,GACjC;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CA0BrD;AAMD;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4BG;AACH,wBAAsB,OAAO,CAC3B,WAAW,EAAE,MAAM,EACnB,OAAO,CAAC,EAAE,iBAAiB,GAC1B,OAAO,CAAC,kBAAkB,CAAC,CAsB7B;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,aAAa,CAC3B,WAAW,EAAE,MAAM,EACnB,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,iBAAiB,GAC1B;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAmBrD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAkCG;AACH,wBAAsB,UAAU,CAC9B,QAAQ,EAAE,UAAU,EAAE,EACtB,OAAO,CAAC,EAAE,oBAAoB,GAC7B,OAAO,CAAC,kBAAkB,CAAC,CA0B7B;AAED;;;;;;;;;;;;;;;;GAgBG;AACH,wBAAgB,gBAAgB,CAC9B,QAAQ,EAAE,UAAU,EAAE,EACtB,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,oBAAoB,GAC7B;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAyBrD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;AACH,wBAAsB,YAAY,CAChC,WAAW,EAAE,MAAM,EACnB,OAAO,CAAC,EAAE,sBAAsB,GAC/B,OAAO,CAAC,kBAAkB,CAAC,CAsB7B;AAED;;;;;;;;;;;;;;;;GAgBG;AACH,wBAAgB,kBAAkB,CAChC,WAAW,EAAE,MAAM,EACnB,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,sBAAsB,GAC/B;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAmBrD;AAED;;;;;;;;;;;;;;;;GAgBG;AACH,wBAAgB,oBAAoB,CAAC,GAAG,EAAE,MAAM,GAAG,kBAAkB,CAKpE;AAMD;;;;;;;GAOG;AACH,wBAAsB,gBAAgB,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC,CAKhE;AAED;;;;;;;GAOG;AACH,wBAAsB,qBAAqB,IAAI,OAAO,CAAC,iBAAiB,EAAE,CAAC,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAEA,OAAO,EACL,UAAU,EACV,cAAc,EACd,WAAW,EACX,gBAAgB,EAEhB,iBAAiB,EACjB,mBAAmB,EACnB,mBAAmB,EACnB,iBAAiB,EACjB,0BAA0B,EAC1B,wBAAwB,EACxB,iBAAiB,EACjB,kBAAkB,EAClB,oBAAoB,EACpB,sBAAsB,EACtB,YAAY,EACZ,iBAAiB,EAElB,MAAM,SAAS,CAAC;AAGjB,cAAc,SAAS,CAAC;AACxB,cAAc,UAAU,CAAC;AACzB,cAAc,SAAS,CAAC;AACxB,cAAc,UAAU,CAAC;AAiJzB;;;GAGG;AACH,wBAAsB,WAAW,IAAI,OAAO,CAAC,OAAO,CAAC,CAKpD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAkCG;AACH,wBAAsB,WAAW,CAC/B,QAAQ,EAAE,UAAU,EAAE,EACtB,OAAO,CAAC,EAAE,cAAc,GACvB,OAAO,CAAC,WAAW,CAAC,CAgBtB;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2CG;AACH,wBAAgB,aAAa,CAC3B,QAAQ,EAAE,UAAU,EAAE,EACtB,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,gBAAgB,GACzB;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAiErD;AAMD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,wBAAsB,SAAS,CAC7B,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,mBAAmB,GAC5B,OAAO,CAAC,WAAW,CAAC,CActB;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,eAAe,CAC7B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,mBAAmB,GAC5B;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAmBrD;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,wBAAsB,SAAS,CAC7B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,mBAAmB,GAC3B,OAAO,CAAC,WAAW,CAAC,CAatB;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,eAAe,CAC7B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,EAC1B,OAAO,EAAE,mBAAmB,GAC3B;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAkBrD;AAED;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,wBAAsB,OAAO,CAC3B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,GACzB,OAAO,CAAC,WAAW,CAAC,CAatB;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,aAAa,CAC3B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,EAC1B,OAAO,EAAE,iBAAiB,GACzB;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAkBrD;AAED;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,wBAAsB,gBAAgB,CACpC,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,0BAA0B,GACnC,OAAO,CAAC,WAAW,CAAC,CAatB;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,sBAAsB,CACpC,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,0BAA0B,GACnC;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAkBrD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2BG;AACH,wBAAsB,cAAc,CAClC,QAAQ,EAAE,MAAM,EAChB,OAAO,EAAE,MAAM,EACf,OAAO,CAAC,EAAE,wBAAwB,GACjC,OAAO,CAAC,WAAW,CAAC,CAkBtB;AAED;;;;;;;;;;;;;;;;;;;GAmBG;AACH,wBAAgB,oBAAoB,CAClC,QAAQ,EAAE,MAAM,EAChB,OAAO,EAAE,MAAM,EACf,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,wBAAwB,GACjC;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CA0BrD;AAMD;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4BG;AACH,wBAAsB,OAAO,CAC3B,WAAW,EAAE,MAAM,EACnB,OAAO,CAAC,EAAE,iBAAiB,GAC1B,OAAO,CAAC,kBAAkB,CAAC,CAsB7B;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,aAAa,CAC3B,WAAW,EAAE,MAAM,EACnB,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,iBAAiB,GAC1B;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAmBrD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAkCG;AACH,wBAAsB,UAAU,CAC9B,QAAQ,EAAE,UAAU,EAAE,EACtB,OAAO,CAAC,EAAE,oBAAoB,GAC7B,OAAO,CAAC,kBAAkB,CAAC,CA0B7B;AAED;;;;;;;;;;;;;;;;GAgBG;AACH,wBAAgB,gBAAgB,CAC9B,QAAQ,EAAE,UAAU,EAAE,EACtB,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,oBAAoB,GAC7B;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAyBrD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;AACH,wBAAsB,YAAY,CAChC,WAAW,EAAE,MAAM,EACnB,OAAO,CAAC,EAAE,sBAAsB,GAC/B,OAAO,CAAC,kBAAkB,CAAC,CAsB7B;AAED;;;;;;;;;;;;;;;;GAgBG;AACH,wBAAgB,kBAAkB,CAChC,WAAW,EAAE,MAAM,EACnB,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,sBAAsB,GAC/B;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAmBrD;AAED;;;;;;;;;;;;;;;;GAgBG;AACH,wBAAgB,oBAAoB,CAAC,GAAG,EAAE,MAAM,GAAG,kBAAkB,CAKpE;AAMD;;;;;;;GAOG;AACH,wBAAsB,gBAAgB,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC,CAKhE;AAED;;;;;;;GAOG;AACH,wBAAsB,qBAAqB,IAAI,OAAO,CAAC,iBAAiB,EAAE,CAAC,CA6B1E;AAED;;;;;;;;;;;;;;GAcG;AACH,wBAAsB,aAAa,CACjC,OAAO,EAAE,MAAM,EACf,OAAO,CAAC,EAAE;IAAE,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAA;CAAE,GACpD,OAAO,CAAC,IAAI,CAAC,CAiDf;AAED;;;;;;;GAOG;AACH,wBAAsB,WAAW,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAOhE;AAED;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,wBAAsB,QAAQ,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAE7D;AAED;;;;GAIG;AACH,wBAAgB,cAAc,IAAI,MAAM,CAEvC;AAED;;;;;GAKG;AACH,wBAAsB,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC,CAEjD"}
|
package/build/index.js
CHANGED
|
@@ -923,6 +923,13 @@ export async function getDownloadableModels() {
|
|
|
923
923
|
return [];
|
|
924
924
|
}
|
|
925
925
|
const platformModels = MODEL_REGISTRY.filter((entry) => entry.supportedPlatforms.includes(Platform.OS));
|
|
926
|
+
let deviceRamBytes = 0;
|
|
927
|
+
try {
|
|
928
|
+
deviceRamBytes = ExpoAiKitModule.getDeviceRamBytes();
|
|
929
|
+
}
|
|
930
|
+
catch {
|
|
931
|
+
// Native call unavailable -- default to 0 (all models will show meetsRequirements: false)
|
|
932
|
+
}
|
|
926
933
|
return platformModels.map((entry) => {
|
|
927
934
|
const status = ExpoAiKitModule.getDownloadableModelStatus(entry.id);
|
|
928
935
|
return {
|
|
@@ -932,6 +939,7 @@ export async function getDownloadableModels() {
|
|
|
932
939
|
sizeBytes: entry.sizeBytes,
|
|
933
940
|
contextWindow: entry.contextWindow,
|
|
934
941
|
minRamBytes: entry.minRamBytes,
|
|
942
|
+
meetsRequirements: deviceRamBytes >= entry.minRamBytes,
|
|
935
943
|
status,
|
|
936
944
|
};
|
|
937
945
|
});
|
|
@@ -959,6 +967,17 @@ export async function downloadModel(modelId, options) {
|
|
|
959
967
|
if (!entry.supportedPlatforms.includes(Platform.OS)) {
|
|
960
968
|
throw new ModelError('DEVICE_NOT_SUPPORTED', modelId, `Model ${modelId} is not supported on ${Platform.OS}`);
|
|
961
969
|
}
|
|
970
|
+
try {
|
|
971
|
+
const deviceRamBytes = ExpoAiKitModule.getDeviceRamBytes();
|
|
972
|
+
if (deviceRamBytes < entry.minRamBytes) {
|
|
973
|
+
throw new ModelError('DEVICE_NOT_SUPPORTED', modelId, `Device has ${Math.round(deviceRamBytes / 1e9)}GB RAM, model requires ${Math.round(entry.minRamBytes / 1e9)}GB`);
|
|
974
|
+
}
|
|
975
|
+
}
|
|
976
|
+
catch (e) {
|
|
977
|
+
if (e instanceof ModelError)
|
|
978
|
+
throw e;
|
|
979
|
+
// If getDeviceRamBytes is unavailable, skip the check
|
|
980
|
+
}
|
|
962
981
|
let subscription;
|
|
963
982
|
if (options?.onProgress) {
|
|
964
983
|
subscription = ExpoAiKitModule.addListener('onDownloadProgress', (event) => {
|