llama-cpp-capacitor 0.0.8 → 0.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -134,7 +134,7 @@ static jlong next_context_id = 1;
134
134
  extern "C" {
135
135
 
136
136
  JNIEXPORT jlong JNICALL
137
- Java_ai_annadata_plugin_capacitor_LlamaCpp_initContext(
137
+ Java_ai_annadata_plugin_capacitor_LlamaCpp_initContextNative(
138
138
  JNIEnv* env, jobject thiz, jstring model_path, jobject params) {
139
139
 
140
140
  try {
@@ -176,7 +176,7 @@ Java_ai_annadata_plugin_capacitor_LlamaCpp_initContext(
176
176
  }
177
177
 
178
178
  JNIEXPORT void JNICALL
179
- Java_ai_annadata_plugin_capacitor_LlamaCpp_releaseContext(
179
+ Java_ai_annadata_plugin_capacitor_LlamaCpp_releaseContextNative(
180
180
  JNIEnv* env, jobject thiz, jlong context_id) {
181
181
 
182
182
  try {
@@ -192,8 +192,8 @@ Java_ai_annadata_plugin_capacitor_LlamaCpp_releaseContext(
192
192
  }
193
193
 
194
194
  JNIEXPORT jstring JNICALL
195
- Java_ai_annadata_plugin_capacitor_LlamaCpp_completion(
196
- JNIEnv* env, jobject thiz, jlong context_id, jstring prompt, jobject params) {
195
+ Java_ai_annadata_plugin_capacitor_LlamaCpp_completionNative(
196
+ JNIEnv* env, jobject thiz, jlong context_id, jstring prompt) {
197
197
 
198
198
  try {
199
199
  auto it = contexts.find(context_id);
@@ -222,7 +222,7 @@ Java_ai_annadata_plugin_capacitor_LlamaCpp_completion(
222
222
  }
223
223
 
224
224
  JNIEXPORT void JNICALL
225
- Java_ai_annadata_plugin_capacitor_LlamaCpp_stopCompletion(
225
+ Java_ai_annadata_plugin_capacitor_LlamaCpp_stopCompletionNative(
226
226
  JNIEnv* env, jobject thiz, jlong context_id) {
227
227
 
228
228
  try {
@@ -238,7 +238,7 @@ Java_ai_annadata_plugin_capacitor_LlamaCpp_stopCompletion(
238
238
  }
239
239
 
240
240
  JNIEXPORT jstring JNICALL
241
- Java_ai_annadata_plugin_capacitor_LlamaCpp_getFormattedChat(
241
+ Java_ai_annadata_plugin_capacitor_LlamaCpp_getFormattedChatNative(
242
242
  JNIEnv* env, jobject thiz, jlong context_id, jstring messages, jstring chat_template) {
243
243
 
244
244
  try {
@@ -267,7 +267,7 @@ Java_ai_annadata_plugin_capacitor_LlamaCpp_getFormattedChat(
267
267
  }
268
268
 
269
269
  JNIEXPORT jboolean JNICALL
270
- Java_ai_annadata_plugin_capacitor_LlamaCpp_toggleNativeLog(
270
+ Java_ai_annadata_plugin_capacitor_LlamaCpp_toggleNativeLogNative(
271
271
  JNIEnv* env, jobject thiz, jboolean enabled) {
272
272
 
273
273
  try {
@@ -281,6 +281,8 @@ Java_ai_annadata_plugin_capacitor_LlamaCpp_toggleNativeLog(
281
281
  }
282
282
  }
283
283
 
284
+
285
+
284
286
  } // extern "C"
285
287
 
286
288
  } // namespace jni_utils
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "llama-cpp-capacitor",
3
- "version": "0.0.8",
3
+ "version": "0.0.9",
4
4
  "description": "A native Capacitor plugin that embeds llama.cpp directly into mobile apps, enabling offline AI inference with comprehensive support for text generation, multimodal processing, TTS, LoRA adapters, and more.",
5
5
  "main": "dist/plugin.cjs.js",
6
6
  "module": "dist/esm/index.js",