llama-cpp-capacitor 0.0.7 → 0.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -105,41 +105,41 @@ function(build_library target_name arch cpu_flags)
105
105
  )
106
106
  endfunction()
107
107
 
108
- # Build for different architectures
108
+ # Build for different architectures - use generic name for Java compatibility
109
109
  if (ANDROID_ABI STREQUAL "arm64-v8a")
110
- build_library(llama-cpp-arm64-v8a "arm" "-march=armv8-a")
110
+ build_library(llama-cpp "arm" "-march=armv8-a")
111
111
  elseif (ANDROID_ABI STREQUAL "armeabi-v7a")
112
- build_library(llama-cpp-armeabi-v7a "arm" "-march=armv7-a -mfpu=neon")
112
+ build_library(llama-cpp "arm" "-march=armv7-a -mfpu=neon")
113
113
  elseif (ANDROID_ABI STREQUAL "x86")
114
- build_library(llama-cpp-x86 "x86" "-march=i686 -mtune=intel -mssse3 -mfpmath=sse -m32")
114
+ build_library(llama-cpp "x86" "-march=i686 -mtune=intel -mssse3 -mfpmath=sse -m32")
115
115
  elseif (ANDROID_ABI STREQUAL "x86_64")
116
- build_library(llama-cpp-x86_64 "x86" "-march=x86-64 -msse4.2 -mpopcnt -m64 -mtune=intel")
116
+ build_library(llama-cpp "x86" "-march=x86-64 -msse4.2 -mpopcnt -m64 -mtune=intel")
117
117
  endif()
118
118
 
119
119
  # Set compile definitions for the target that was actually built
120
120
  if (ANDROID_ABI STREQUAL "arm64-v8a")
121
- target_compile_definitions(llama-cpp-arm64-v8a PRIVATE
121
+ target_compile_definitions(llama-cpp PRIVATE
122
122
  -DNDEBUG
123
123
  -DO3
124
124
  -DLM_GGML_USE_CPU
125
125
  -DLM_GGML_CPU_GENERIC
126
126
  )
127
127
  elseif (ANDROID_ABI STREQUAL "armeabi-v7a")
128
- target_compile_definitions(llama-cpp-armeabi-v7a PRIVATE
128
+ target_compile_definitions(llama-cpp PRIVATE
129
129
  -DNDEBUG
130
130
  -DO3
131
131
  -DLM_GGML_USE_CPU
132
132
  -DLM_GGML_CPU_GENERIC
133
133
  )
134
134
  elseif (ANDROID_ABI STREQUAL "x86")
135
- target_compile_definitions(llama-cpp-x86 PRIVATE
135
+ target_compile_definitions(llama-cpp PRIVATE
136
136
  -DNDEBUG
137
137
  -DO3
138
138
  -DLM_GGML_USE_CPU
139
139
  -DLM_GGML_CPU_GENERIC
140
140
  )
141
141
  elseif (ANDROID_ABI STREQUAL "x86_64")
142
- target_compile_definitions(llama-cpp-x86_64 PRIVATE
142
+ target_compile_definitions(llama-cpp PRIVATE
143
143
  -DNDEBUG
144
144
  -DO3
145
145
  -DLM_GGML_USE_CPU
@@ -134,7 +134,7 @@ static jlong next_context_id = 1;
134
134
  extern "C" {
135
135
 
136
136
  JNIEXPORT jlong JNICALL
137
- Java_ai_annadata_plugin_capacitor_LlamaCpp_initContext(
137
+ Java_ai_annadata_plugin_capacitor_LlamaCpp_initContextNative(
138
138
  JNIEnv* env, jobject thiz, jstring model_path, jobject params) {
139
139
 
140
140
  try {
@@ -176,7 +176,7 @@ Java_ai_annadata_plugin_capacitor_LlamaCpp_initContext(
176
176
  }
177
177
 
178
178
  JNIEXPORT void JNICALL
179
- Java_ai_annadata_plugin_capacitor_LlamaCpp_releaseContext(
179
+ Java_ai_annadata_plugin_capacitor_LlamaCpp_releaseContextNative(
180
180
  JNIEnv* env, jobject thiz, jlong context_id) {
181
181
 
182
182
  try {
@@ -192,8 +192,8 @@ Java_ai_annadata_plugin_capacitor_LlamaCpp_releaseContext(
192
192
  }
193
193
 
194
194
  JNIEXPORT jstring JNICALL
195
- Java_ai_annadata_plugin_capacitor_LlamaCpp_completion(
196
- JNIEnv* env, jobject thiz, jlong context_id, jstring prompt, jobject params) {
195
+ Java_ai_annadata_plugin_capacitor_LlamaCpp_completionNative(
196
+ JNIEnv* env, jobject thiz, jlong context_id, jstring prompt) {
197
197
 
198
198
  try {
199
199
  auto it = contexts.find(context_id);
@@ -222,7 +222,7 @@ Java_ai_annadata_plugin_capacitor_LlamaCpp_completion(
222
222
  }
223
223
 
224
224
  JNIEXPORT void JNICALL
225
- Java_ai_annadata_plugin_capacitor_LlamaCpp_stopCompletion(
225
+ Java_ai_annadata_plugin_capacitor_LlamaCpp_stopCompletionNative(
226
226
  JNIEnv* env, jobject thiz, jlong context_id) {
227
227
 
228
228
  try {
@@ -238,7 +238,7 @@ Java_ai_annadata_plugin_capacitor_LlamaCpp_stopCompletion(
238
238
  }
239
239
 
240
240
  JNIEXPORT jstring JNICALL
241
- Java_ai_annadata_plugin_capacitor_LlamaCpp_getFormattedChat(
241
+ Java_ai_annadata_plugin_capacitor_LlamaCpp_getFormattedChatNative(
242
242
  JNIEnv* env, jobject thiz, jlong context_id, jstring messages, jstring chat_template) {
243
243
 
244
244
  try {
@@ -267,7 +267,7 @@ Java_ai_annadata_plugin_capacitor_LlamaCpp_getFormattedChat(
267
267
  }
268
268
 
269
269
  JNIEXPORT jboolean JNICALL
270
- Java_ai_annadata_plugin_capacitor_LlamaCpp_toggleNativeLog(
270
+ Java_ai_annadata_plugin_capacitor_LlamaCpp_toggleNativeLogNative(
271
271
  JNIEnv* env, jobject thiz, jboolean enabled) {
272
272
 
273
273
  try {
@@ -281,6 +281,8 @@ Java_ai_annadata_plugin_capacitor_LlamaCpp_toggleNativeLog(
281
281
  }
282
282
  }
283
283
 
284
+
285
+
284
286
  } // extern "C"
285
287
 
286
288
  } // namespace jni_utils
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "llama-cpp-capacitor",
3
- "version": "0.0.7",
3
+ "version": "0.0.9",
4
4
  "description": "A native Capacitor plugin that embeds llama.cpp directly into mobile apps, enabling offline AI inference with comprehensive support for text generation, multimodal processing, TTS, LoRA adapters, and more.",
5
5
  "main": "dist/plugin.cjs.js",
6
6
  "module": "dist/esm/index.js",