llama-cpp-capacitor 0.0.7 → 0.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -105,41 +105,41 @@ function(build_library target_name arch cpu_flags)
105
105
  )
106
106
  endfunction()
107
107
 
108
- # Build for different architectures
108
+ # Build for different architectures - use generic name for Java compatibility
109
109
  if (ANDROID_ABI STREQUAL "arm64-v8a")
110
- build_library(llama-cpp-arm64-v8a "arm" "-march=armv8-a")
110
+ build_library(llama-cpp "arm" "-march=armv8-a")
111
111
  elseif (ANDROID_ABI STREQUAL "armeabi-v7a")
112
- build_library(llama-cpp-armeabi-v7a "arm" "-march=armv7-a -mfpu=neon")
112
+ build_library(llama-cpp "arm" "-march=armv7-a -mfpu=neon")
113
113
  elseif (ANDROID_ABI STREQUAL "x86")
114
- build_library(llama-cpp-x86 "x86" "-march=i686 -mtune=intel -mssse3 -mfpmath=sse -m32")
114
+ build_library(llama-cpp "x86" "-march=i686 -mtune=intel -mssse3 -mfpmath=sse -m32")
115
115
  elseif (ANDROID_ABI STREQUAL "x86_64")
116
- build_library(llama-cpp-x86_64 "x86" "-march=x86-64 -msse4.2 -mpopcnt -m64 -mtune=intel")
116
+ build_library(llama-cpp "x86" "-march=x86-64 -msse4.2 -mpopcnt -m64 -mtune=intel")
117
117
  endif()
118
118
 
119
119
  # Set compile definitions for the target that was actually built
120
120
  if (ANDROID_ABI STREQUAL "arm64-v8a")
121
- target_compile_definitions(llama-cpp-arm64-v8a PRIVATE
121
+ target_compile_definitions(llama-cpp PRIVATE
122
122
  -DNDEBUG
123
123
  -DO3
124
124
  -DLM_GGML_USE_CPU
125
125
  -DLM_GGML_CPU_GENERIC
126
126
  )
127
127
  elseif (ANDROID_ABI STREQUAL "armeabi-v7a")
128
- target_compile_definitions(llama-cpp-armeabi-v7a PRIVATE
128
+ target_compile_definitions(llama-cpp PRIVATE
129
129
  -DNDEBUG
130
130
  -DO3
131
131
  -DLM_GGML_USE_CPU
132
132
  -DLM_GGML_CPU_GENERIC
133
133
  )
134
134
  elseif (ANDROID_ABI STREQUAL "x86")
135
- target_compile_definitions(llama-cpp-x86 PRIVATE
135
+ target_compile_definitions(llama-cpp PRIVATE
136
136
  -DNDEBUG
137
137
  -DO3
138
138
  -DLM_GGML_USE_CPU
139
139
  -DLM_GGML_CPU_GENERIC
140
140
  )
141
141
  elseif (ANDROID_ABI STREQUAL "x86_64")
142
- target_compile_definitions(llama-cpp-x86_64 PRIVATE
142
+ target_compile_definitions(llama-cpp PRIVATE
143
143
  -DNDEBUG
144
144
  -DO3
145
145
  -DLM_GGML_USE_CPU
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "llama-cpp-capacitor",
3
- "version": "0.0.7",
3
+ "version": "0.0.8",
4
4
  "description": "A native Capacitor plugin that embeds llama.cpp directly into mobile apps, enabling offline AI inference with comprehensive support for text generation, multimodal processing, TTS, LoRA adapters, and more.",
5
5
  "main": "dist/plugin.cjs.js",
6
6
  "module": "dist/esm/index.js",