cui-llama.rn 1.4.2 → 1.4.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +93 -114
- package/android/src/main/CMakeLists.txt +5 -0
- package/android/src/main/build-arm64/CMakeCache.txt +429 -0
- package/android/src/main/build-arm64/CMakeFiles/3.31.4/CMakeCCompiler.cmake +81 -0
- package/android/src/main/build-arm64/CMakeFiles/3.31.4/CMakeCXXCompiler.cmake +101 -0
- package/android/src/main/build-arm64/CMakeFiles/3.31.4/CMakeDetermineCompilerABI_C.bin +0 -0
- package/android/src/main/build-arm64/CMakeFiles/3.31.4/CMakeDetermineCompilerABI_CXX.bin +0 -0
- package/android/src/main/build-arm64/CMakeFiles/3.31.4/CMakeSystem.cmake +15 -0
- package/android/src/main/build-arm64/CMakeFiles/3.31.4/CompilerIdC/CMakeCCompilerId.c +904 -0
- package/android/src/main/build-arm64/CMakeFiles/3.31.4/CompilerIdC/CMakeCCompilerId.o +0 -0
- package/android/src/main/build-arm64/CMakeFiles/3.31.4/CompilerIdCXX/CMakeCXXCompilerId.cpp +919 -0
- package/android/src/main/build-arm64/CMakeFiles/3.31.4/CompilerIdCXX/CMakeCXXCompilerId.o +0 -0
- package/android/src/main/build-arm64/CMakeFiles/CMakeConfigureLog.yaml +431 -0
- package/android/src/main/build-arm64/CMakeFiles/CMakeDirectoryInformation.cmake +16 -0
- package/android/src/main/build-arm64/CMakeFiles/Makefile.cmake +165 -0
- package/android/src/main/build-arm64/CMakeFiles/Makefile2 +297 -0
- package/android/src/main/build-arm64/CMakeFiles/Progress/1 +1 -0
- package/android/src/main/build-arm64/CMakeFiles/Progress/2 +1 -0
- package/android/src/main/build-arm64/CMakeFiles/Progress/3 +1 -0
- package/android/src/main/build-arm64/CMakeFiles/Progress/4 +1 -0
- package/android/src/main/build-arm64/CMakeFiles/Progress/5 +1 -0
- package/android/src/main/build-arm64/CMakeFiles/Progress/6 +1 -0
- package/android/src/main/build-arm64/CMakeFiles/Progress/count.txt +1 -0
- package/android/src/main/build-arm64/CMakeFiles/TargetDirectories.txt +8 -0
- package/android/src/main/build-arm64/CMakeFiles/cmake.check_cache +1 -0
- package/android/src/main/build-arm64/CMakeFiles/progress.marks +1 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-alloc.c.o +0 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-alloc.c.o.d +58 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-backend-reg.cpp.o +0 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-backend-reg.cpp.o.d +756 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-backend.cpp.o +0 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-backend.cpp.o.d +709 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-cpu-aarch64.cpp.o +0 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-cpu-aarch64.cpp.o.d +714 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-cpu-quants.c.o +0 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-cpu-quants.c.o.d +62 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-cpu-traits.cpp.o +0 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-cpu-traits.cpp.o.d +708 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-cpu.c.o +0 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-cpu.c.o.d +113 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-cpu.cpp.o +0 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-cpu.cpp.o.d +713 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-opt.cpp.o +0 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-opt.cpp.o.d +763 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-quants.c.o +0 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-quants.c.o.d +61 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-threading.cpp.o +0 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml-threading.cpp.o.d +707 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml.c.o +0 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/ggml.c.o.d +104 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/gguf.cpp.o +0 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/gguf.cpp.o.d +714 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/log.cpp.o +0 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/D_/dev/react-native/cui-llama.rn/cpp/log.cpp.o.d +723 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/DependInfo.cmake +62 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/build.make +722 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/cmake_clean.cmake +89 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/compiler_depend.make +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/compiler_depend.ts +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/depend.make +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/flags.make +17 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama.dir/progress.make +41 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8.dir/DependInfo.cmake +62 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8.dir/build.make +722 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8.dir/cmake_clean.cmake +89 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8.dir/compiler_depend.make +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8.dir/compiler_depend.ts +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8.dir/depend.make +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8.dir/flags.make +17 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8.dir/progress.make +41 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2.dir/DependInfo.cmake +62 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2.dir/build.make +722 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2.dir/cmake_clean.cmake +89 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2.dir/compiler_depend.make +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2.dir/compiler_depend.ts +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2.dir/depend.make +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2.dir/flags.make +17 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2.dir/progress.make +41 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_dotprod.dir/DependInfo.cmake +62 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_dotprod.dir/build.make +722 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_dotprod.dir/cmake_clean.cmake +89 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_dotprod.dir/compiler_depend.make +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_dotprod.dir/compiler_depend.ts +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_dotprod.dir/depend.make +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_dotprod.dir/flags.make +17 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_dotprod.dir/progress.make +41 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_dotprod_i8mm.dir/DependInfo.cmake +62 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_dotprod_i8mm.dir/build.make +722 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_dotprod_i8mm.dir/cmake_clean.cmake +89 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_dotprod_i8mm.dir/compiler_depend.make +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_dotprod_i8mm.dir/compiler_depend.ts +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_dotprod_i8mm.dir/depend.make +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_dotprod_i8mm.dir/flags.make +17 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_dotprod_i8mm.dir/progress.make +41 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_i8mm.dir/DependInfo.cmake +62 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_i8mm.dir/build.make +722 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_i8mm.dir/cmake_clean.cmake +89 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_i8mm.dir/compiler_depend.make +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_i8mm.dir/compiler_depend.ts +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_i8mm.dir/depend.make +2 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_i8mm.dir/flags.make +17 -0
- package/android/src/main/build-arm64/CMakeFiles/rnllama_v8_2_i8mm.dir/progress.make +41 -0
- package/android/src/main/build-arm64/Makefile +1862 -0
- package/android/src/main/build-arm64/cmake_install.cmake +66 -0
- package/android/src/main/java/com/rnllama/LlamaContext.java +92 -18
- package/android/src/main/java/com/rnllama/RNLlama.java +37 -4
- package/android/src/main/jni-utils.h +6 -0
- package/android/src/main/jni.cpp +287 -31
- package/android/src/main/jniLibs/arm64-v8a/librnllama.so +0 -0
- package/android/src/main/jniLibs/arm64-v8a/librnllama_v8.so +0 -0
- package/android/src/main/jniLibs/arm64-v8a/librnllama_v8_2.so +0 -0
- package/android/src/main/jniLibs/arm64-v8a/librnllama_v8_2_dotprod.so +0 -0
- package/android/src/main/jniLibs/arm64-v8a/librnllama_v8_2_dotprod_i8mm.so +0 -0
- package/android/src/main/jniLibs/arm64-v8a/librnllama_v8_2_i8mm.so +0 -0
- package/android/src/main/jniLibs/x86_64/librnllama.so +0 -0
- package/android/src/main/jniLibs/x86_64/librnllama_x86_64.so +0 -0
- package/android/src/newarch/java/com/rnllama/RNLlamaModule.java +7 -2
- package/android/src/oldarch/java/com/rnllama/RNLlamaModule.java +7 -2
- package/cpp/chat-template.hpp +529 -0
- package/cpp/chat.cpp +1085 -0
- package/cpp/chat.hpp +55 -0
- package/cpp/common.cpp +159 -36
- package/cpp/common.h +64 -19
- package/cpp/ggml-alloc.c +1 -13
- package/cpp/ggml-common.h +0 -2
- package/cpp/ggml-cpu-impl.h +6 -12
- package/cpp/ggml-cpu-quants.c +937 -340
- package/cpp/ggml-cpu.c +207 -113
- package/cpp/ggml-cpu.cpp +4 -6
- package/cpp/ggml-cpu.h +1 -1
- package/cpp/ggml-metal.h +66 -66
- package/cpp/ggml-metal.m +141 -23
- package/cpp/ggml.c +24 -14
- package/cpp/ggml.h +2 -2
- package/cpp/json-schema-to-grammar.cpp +46 -66
- package/cpp/json-schema-to-grammar.h +15 -1
- package/cpp/llama-arch.cpp +7 -2
- package/cpp/llama-arch.h +3 -1
- package/cpp/llama-chat.cpp +10 -1
- package/cpp/llama-chat.h +1 -0
- package/cpp/llama-grammar.cpp +86 -6
- package/cpp/llama-grammar.h +22 -1
- package/cpp/llama-impl.h +6 -6
- package/cpp/llama-kv-cache.h +1 -1
- package/cpp/llama-mmap.h +1 -0
- package/cpp/llama-model-loader.cpp +1 -1
- package/cpp/llama-model.cpp +32 -6
- package/cpp/llama-sampling.cpp +178 -61
- package/cpp/llama-vocab.cpp +8 -3
- package/cpp/llama.cpp +188 -128
- package/cpp/llama.h +27 -10
- package/cpp/log.cpp +32 -10
- package/cpp/log.h +12 -1
- package/cpp/minja.hpp +2883 -0
- package/cpp/rn-llama.cpp +82 -5
- package/cpp/rn-llama.h +16 -1
- package/cpp/sampling.cpp +68 -41
- package/cpp/sampling.h +3 -0
- package/cpp/sgemm.cpp +9 -8
- package/cpp/unicode.cpp +9 -2
- package/ios/CMakeLists.txt +6 -0
- package/ios/RNLlama.h +0 -8
- package/ios/RNLlama.mm +27 -3
- package/ios/RNLlamaContext.h +10 -1
- package/ios/RNLlamaContext.mm +269 -57
- package/jest/mock.js +21 -2
- package/lib/commonjs/NativeRNLlama.js.map +1 -1
- package/lib/commonjs/grammar.js +3 -0
- package/lib/commonjs/grammar.js.map +1 -1
- package/lib/commonjs/index.js +87 -13
- package/lib/commonjs/index.js.map +1 -1
- package/lib/module/NativeRNLlama.js.map +1 -1
- package/lib/module/grammar.js +3 -0
- package/lib/module/grammar.js.map +1 -1
- package/lib/module/index.js +86 -13
- package/lib/module/index.js.map +1 -1
- package/lib/typescript/NativeRNLlama.d.ts +107 -2
- package/lib/typescript/NativeRNLlama.d.ts.map +1 -1
- package/lib/typescript/grammar.d.ts.map +1 -1
- package/lib/typescript/index.d.ts +32 -7
- package/lib/typescript/index.d.ts.map +1 -1
- package/llama-rn.podspec +1 -1
- package/package.json +2 -2
- package/src/NativeRNLlama.ts +115 -3
- package/src/grammar.ts +3 -0
- package/src/index.ts +138 -21
@@ -0,0 +1,66 @@
|
|
1
|
+
# Install script for directory: D:/dev/react-native/cui-llama.rn/android/src/main
|
2
|
+
|
3
|
+
# Set the install prefix
|
4
|
+
if(NOT DEFINED CMAKE_INSTALL_PREFIX)
|
5
|
+
set(CMAKE_INSTALL_PREFIX "C:/Program Files (x86)/llama.rn")
|
6
|
+
endif()
|
7
|
+
string(REGEX REPLACE "/$" "" CMAKE_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}")
|
8
|
+
|
9
|
+
# Set the install configuration name.
|
10
|
+
if(NOT DEFINED CMAKE_INSTALL_CONFIG_NAME)
|
11
|
+
if(BUILD_TYPE)
|
12
|
+
string(REGEX REPLACE "^[^A-Za-z0-9_]+" ""
|
13
|
+
CMAKE_INSTALL_CONFIG_NAME "${BUILD_TYPE}")
|
14
|
+
else()
|
15
|
+
set(CMAKE_INSTALL_CONFIG_NAME "Release")
|
16
|
+
endif()
|
17
|
+
message(STATUS "Install configuration: \"${CMAKE_INSTALL_CONFIG_NAME}\"")
|
18
|
+
endif()
|
19
|
+
|
20
|
+
# Set the component getting installed.
|
21
|
+
if(NOT CMAKE_INSTALL_COMPONENT)
|
22
|
+
if(COMPONENT)
|
23
|
+
message(STATUS "Install component: \"${COMPONENT}\"")
|
24
|
+
set(CMAKE_INSTALL_COMPONENT "${COMPONENT}")
|
25
|
+
else()
|
26
|
+
set(CMAKE_INSTALL_COMPONENT)
|
27
|
+
endif()
|
28
|
+
endif()
|
29
|
+
|
30
|
+
# Install shared libraries without execute permission?
|
31
|
+
if(NOT DEFINED CMAKE_INSTALL_SO_NO_EXE)
|
32
|
+
set(CMAKE_INSTALL_SO_NO_EXE "0")
|
33
|
+
endif()
|
34
|
+
|
35
|
+
# Is this installation the result of a crosscompile?
|
36
|
+
if(NOT DEFINED CMAKE_CROSSCOMPILING)
|
37
|
+
set(CMAKE_CROSSCOMPILING "TRUE")
|
38
|
+
endif()
|
39
|
+
|
40
|
+
# Set path to fallback-tool for dependency-resolution.
|
41
|
+
if(NOT DEFINED CMAKE_OBJDUMP)
|
42
|
+
set(CMAKE_OBJDUMP "D:/Android/Sdk/ndk/26.3.11579264/toolchains/llvm/prebuilt/windows-x86_64/bin/llvm-objdump.exe")
|
43
|
+
endif()
|
44
|
+
|
45
|
+
string(REPLACE ";" "\n" CMAKE_INSTALL_MANIFEST_CONTENT
|
46
|
+
"${CMAKE_INSTALL_MANIFEST_FILES}")
|
47
|
+
if(CMAKE_INSTALL_LOCAL_ONLY)
|
48
|
+
file(WRITE "D:/dev/react-native/cui-llama.rn/android/src/main/build-arm64/install_local_manifest.txt"
|
49
|
+
"${CMAKE_INSTALL_MANIFEST_CONTENT}")
|
50
|
+
endif()
|
51
|
+
if(CMAKE_INSTALL_COMPONENT)
|
52
|
+
if(CMAKE_INSTALL_COMPONENT MATCHES "^[a-zA-Z0-9_.+-]+$")
|
53
|
+
set(CMAKE_INSTALL_MANIFEST "install_manifest_${CMAKE_INSTALL_COMPONENT}.txt")
|
54
|
+
else()
|
55
|
+
string(MD5 CMAKE_INST_COMP_HASH "${CMAKE_INSTALL_COMPONENT}")
|
56
|
+
set(CMAKE_INSTALL_MANIFEST "install_manifest_${CMAKE_INST_COMP_HASH}.txt")
|
57
|
+
unset(CMAKE_INST_COMP_HASH)
|
58
|
+
endif()
|
59
|
+
else()
|
60
|
+
set(CMAKE_INSTALL_MANIFEST "install_manifest.txt")
|
61
|
+
endif()
|
62
|
+
|
63
|
+
if(NOT CMAKE_INSTALL_LOCAL_ONLY)
|
64
|
+
file(WRITE "D:/dev/react-native/cui-llama.rn/android/src/main/build-arm64/${CMAKE_INSTALL_MANIFEST}"
|
65
|
+
"${CMAKE_INSTALL_MANIFEST_CONTENT}")
|
66
|
+
endif()
|
@@ -28,6 +28,32 @@ public class LlamaContext {
|
|
28
28
|
|
29
29
|
private static String loadedLibrary = "";
|
30
30
|
|
31
|
+
private static class NativeLogCallback {
|
32
|
+
DeviceEventManagerModule.RCTDeviceEventEmitter eventEmitter;
|
33
|
+
|
34
|
+
public NativeLogCallback(ReactApplicationContext reactContext) {
|
35
|
+
this.eventEmitter = reactContext.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class);
|
36
|
+
}
|
37
|
+
|
38
|
+
void emitNativeLog(String level, String text) {
|
39
|
+
WritableMap event = Arguments.createMap();
|
40
|
+
event.putString("level", level);
|
41
|
+
event.putString("text", text);
|
42
|
+
eventEmitter.emit("@RNLlama_onNativeLog", event);
|
43
|
+
}
|
44
|
+
}
|
45
|
+
|
46
|
+
static void toggleNativeLog(ReactApplicationContext reactContext, boolean enabled) {
|
47
|
+
if (LlamaContext.isArchNotSupported()) {
|
48
|
+
throw new IllegalStateException("Only 64-bit architectures are supported");
|
49
|
+
}
|
50
|
+
if (enabled) {
|
51
|
+
setupLog(new NativeLogCallback(reactContext));
|
52
|
+
} else {
|
53
|
+
unsetLog();
|
54
|
+
}
|
55
|
+
}
|
56
|
+
|
31
57
|
private int id;
|
32
58
|
private ReactApplicationContext reactContext;
|
33
59
|
private long context;
|
@@ -73,7 +99,7 @@ public class LlamaContext {
|
|
73
99
|
}
|
74
100
|
|
75
101
|
public LlamaContext(int id, ReactApplicationContext reactContext, ReadableMap params) {
|
76
|
-
if (LlamaContext.
|
102
|
+
if (LlamaContext.isArchNotSupported()) {
|
77
103
|
throw new IllegalStateException("Only 64-bit architectures are supported");
|
78
104
|
}
|
79
105
|
if (!params.hasKey("model")) {
|
@@ -95,13 +121,17 @@ public class LlamaContext {
|
|
95
121
|
Log.e(NAME, "Failed to convert to FD!");
|
96
122
|
}
|
97
123
|
}
|
98
|
-
|
124
|
+
|
99
125
|
// Check if file has GGUF magic numbers
|
100
126
|
this.id = id;
|
101
127
|
eventEmitter = reactContext.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class);
|
102
128
|
this.context = initContext(
|
103
129
|
// String model,
|
104
130
|
modelName,
|
131
|
+
// String chat_template,
|
132
|
+
params.hasKey("chat_template") ? params.getString("chat_template") : "",
|
133
|
+
// String reasoning_format,
|
134
|
+
params.hasKey("reasoning_format") ? params.getString("reasoning_format") : "none",
|
105
135
|
// boolean embedding,
|
106
136
|
params.hasKey("embedding") ? params.getBoolean("embedding") : false,
|
107
137
|
// int embd_normalize,
|
@@ -166,19 +196,31 @@ public class LlamaContext {
|
|
166
196
|
return loadedLibrary;
|
167
197
|
}
|
168
198
|
|
169
|
-
public
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
return
|
199
|
+
public WritableMap getFormattedChatWithJinja(String messages, String chatTemplate, ReadableMap params) {
|
200
|
+
String jsonSchema = params.hasKey("json_schema") ? params.getString("json_schema") : "";
|
201
|
+
String tools = params.hasKey("tools") ? params.getString("tools") : "";
|
202
|
+
Boolean parallelToolCalls = params.hasKey("parallel_tool_calls") ? params.getBoolean("parallel_tool_calls") : false;
|
203
|
+
String toolChoice = params.hasKey("tool_choice") ? params.getString("tool_choice") : "";
|
204
|
+
return getFormattedChatWithJinja(
|
205
|
+
this.context,
|
206
|
+
messages,
|
207
|
+
chatTemplate == null ? "" : chatTemplate,
|
208
|
+
jsonSchema,
|
209
|
+
tools,
|
210
|
+
parallelToolCalls,
|
211
|
+
toolChoice
|
212
|
+
);
|
213
|
+
}
|
214
|
+
|
215
|
+
public String getFormattedChat(String messages, String chatTemplate) {
|
216
|
+
return getFormattedChat(this.context, messages, chatTemplate == null ? "" : chatTemplate);
|
175
217
|
}
|
176
218
|
|
177
219
|
private void emitLoadProgress(int progress) {
|
178
220
|
WritableMap event = Arguments.createMap();
|
179
221
|
event.putInt("contextId", LlamaContext.this.id);
|
180
222
|
event.putInt("progress", progress);
|
181
|
-
eventEmitter.emit("@
|
223
|
+
eventEmitter.emit("@RNLlama_onInitContextProgress", event);
|
182
224
|
}
|
183
225
|
|
184
226
|
private static class LoadProgressCallback {
|
@@ -259,8 +301,18 @@ public class LlamaContext {
|
|
259
301
|
this.context,
|
260
302
|
// String prompt,
|
261
303
|
params.getString("prompt"),
|
304
|
+
// int chat_format,
|
305
|
+
params.hasKey("chat_format") ? params.getInt("chat_format") : 0,
|
262
306
|
// String grammar,
|
263
307
|
params.hasKey("grammar") ? params.getString("grammar") : "",
|
308
|
+
// String json_schema,
|
309
|
+
params.hasKey("json_schema") ? params.getString("json_schema") : "",
|
310
|
+
// boolean grammar_lazy,
|
311
|
+
params.hasKey("grammar_lazy") ? params.getBoolean("grammar_lazy") : false,
|
312
|
+
// ReadableArray grammar_triggers,
|
313
|
+
params.hasKey("grammar_triggers") ? params.getArray("grammar_triggers") : null,
|
314
|
+
// ReadableArray preserved_tokens,
|
315
|
+
params.hasKey("preserved_tokens") ? params.getArray("preserved_tokens") : null,
|
264
316
|
// float temperature,
|
265
317
|
params.hasKey("temperature") ? (float) params.getDouble("temperature") : 0.7f,
|
266
318
|
// int n_threads,
|
@@ -311,6 +363,8 @@ public class LlamaContext {
|
|
311
363
|
params.hasKey("dry_allowed_length") ? params.getInt("dry_allowed_length") : 2,
|
312
364
|
// int dry_penalty_last_n,
|
313
365
|
params.hasKey("dry_penalty_last_n") ? params.getInt("dry_penalty_last_n") : -1,
|
366
|
+
// float top_n_sigma,
|
367
|
+
params.hasKey("top_n_sigma") ? (float) params.getDouble("top_n_sigma") : -1.0f,
|
314
368
|
// String[] dry_sequence_breakers, when undef, we use the default definition from common.h
|
315
369
|
params.hasKey("dry_sequence_breakers") ? params.getArray("dry_sequence_breakers").toArrayList().toArray(new String[0]) : new String[]{"\n", ":", "\"", "*"},
|
316
370
|
// PartialCompletionCallback partial_completion_callback
|
@@ -431,15 +485,13 @@ public class LlamaContext {
|
|
431
485
|
// Log.d(NAME, "Loading librnllama_v8_7.so with runtime feature detection");
|
432
486
|
// System.loadLibrary("rnllama_v8_7");
|
433
487
|
} else if (LlamaContext.isX86_64()) {
|
434
|
-
|
435
|
-
|
436
|
-
|
488
|
+
Log.d(NAME, "Loading librnllama_x86_64.so");
|
489
|
+
System.loadLibrary("rnllama_x86_64");
|
490
|
+
loadedLibrary = "rnllama_x86_64";
|
437
491
|
} else {
|
438
|
-
|
439
|
-
System.loadLibrary("rnllama");
|
440
|
-
loadedLibrary = "rnllama";
|
492
|
+
Log.d(NAME, "ARM32 is not supported, skipping loading library");
|
441
493
|
}
|
442
|
-
}
|
494
|
+
}
|
443
495
|
|
444
496
|
public static boolean isArm64V8a() {
|
445
497
|
return Build.SUPPORTED_ABIS[0].equals("arm64-v8a");
|
@@ -449,6 +501,10 @@ public class LlamaContext {
|
|
449
501
|
return Build.SUPPORTED_ABIS[0].equals("x86_64");
|
450
502
|
}
|
451
503
|
|
504
|
+
private static boolean isArchNotSupported() {
|
505
|
+
return isArm64V8a() == false && isX86_64() == false;
|
506
|
+
}
|
507
|
+
|
452
508
|
public static String getCpuFeatures() {
|
453
509
|
File file = new File("/proc/cpuinfo");
|
454
510
|
StringBuilder stringBuilder = new StringBuilder();
|
@@ -481,6 +537,8 @@ public class LlamaContext {
|
|
481
537
|
);
|
482
538
|
protected static native long initContext(
|
483
539
|
String model,
|
540
|
+
String chat_template,
|
541
|
+
String reasoning_format,
|
484
542
|
boolean embedding,
|
485
543
|
int embd_normalize,
|
486
544
|
int n_ctx,
|
@@ -506,9 +564,18 @@ public class LlamaContext {
|
|
506
564
|
protected static native WritableMap loadModelDetails(
|
507
565
|
long contextPtr
|
508
566
|
);
|
567
|
+
protected static native WritableMap getFormattedChatWithJinja(
|
568
|
+
long contextPtr,
|
569
|
+
String messages,
|
570
|
+
String chatTemplate,
|
571
|
+
String jsonSchema,
|
572
|
+
String tools,
|
573
|
+
boolean parallelToolCalls,
|
574
|
+
String toolChoice
|
575
|
+
);
|
509
576
|
protected static native String getFormattedChat(
|
510
577
|
long contextPtr,
|
511
|
-
|
578
|
+
String messages,
|
512
579
|
String chatTemplate
|
513
580
|
);
|
514
581
|
protected static native WritableMap loadSession(
|
@@ -523,7 +590,12 @@ public class LlamaContext {
|
|
523
590
|
protected static native WritableMap doCompletion(
|
524
591
|
long context_ptr,
|
525
592
|
String prompt,
|
593
|
+
int chat_format,
|
526
594
|
String grammar,
|
595
|
+
String json_schema,
|
596
|
+
boolean grammar_lazy,
|
597
|
+
ReadableArray grammar_triggers,
|
598
|
+
ReadableArray preserved_tokens,
|
527
599
|
float temperature,
|
528
600
|
int n_threads,
|
529
601
|
int n_predict,
|
@@ -549,6 +621,7 @@ public class LlamaContext {
|
|
549
621
|
float dry_base,
|
550
622
|
int dry_allowed_length,
|
551
623
|
int dry_penalty_last_n,
|
624
|
+
float top_n_sigma,
|
552
625
|
String[] dry_sequence_breakers,
|
553
626
|
PartialCompletionCallback partial_completion_callback
|
554
627
|
);
|
@@ -567,5 +640,6 @@ public class LlamaContext {
|
|
567
640
|
protected static native void removeLoraAdapters(long contextPtr);
|
568
641
|
protected static native WritableArray getLoadedLoraAdapters(long contextPtr);
|
569
642
|
protected static native void freeContext(long contextPtr);
|
570
|
-
protected static native void
|
643
|
+
protected static native void setupLog(NativeLogCallback logCallback);
|
644
|
+
protected static native void unsetLog();
|
571
645
|
}
|
@@ -35,6 +35,32 @@ public class RNLlama implements LifecycleEventListener {
|
|
35
35
|
|
36
36
|
private HashMap<Integer, LlamaContext> contexts = new HashMap<>();
|
37
37
|
|
38
|
+
public void toggleNativeLog(boolean enabled, Promise promise) {
|
39
|
+
new AsyncTask<Void, Void, Boolean>() {
|
40
|
+
private Exception exception;
|
41
|
+
|
42
|
+
@Override
|
43
|
+
protected Boolean doInBackground(Void... voids) {
|
44
|
+
try {
|
45
|
+
LlamaContext.toggleNativeLog(reactContext, enabled);
|
46
|
+
return true;
|
47
|
+
} catch (Exception e) {
|
48
|
+
exception = e;
|
49
|
+
}
|
50
|
+
return null;
|
51
|
+
}
|
52
|
+
|
53
|
+
@Override
|
54
|
+
protected void onPostExecute(Boolean result) {
|
55
|
+
if (exception != null) {
|
56
|
+
promise.reject(exception);
|
57
|
+
return;
|
58
|
+
}
|
59
|
+
promise.resolve(result);
|
60
|
+
}
|
61
|
+
}.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
|
62
|
+
}
|
63
|
+
|
38
64
|
private int llamaContextLimit = -1;
|
39
65
|
|
40
66
|
public void setContextLimit(double limit, Promise promise) {
|
@@ -116,18 +142,25 @@ public class RNLlama implements LifecycleEventListener {
|
|
116
142
|
tasks.put(task, "initContext");
|
117
143
|
}
|
118
144
|
|
119
|
-
public void getFormattedChat(double id, final
|
145
|
+
public void getFormattedChat(double id, final String messages, final String chatTemplate, final ReadableMap params, Promise promise) {
|
120
146
|
final int contextId = (int) id;
|
121
|
-
AsyncTask task = new AsyncTask<Void, Void,
|
147
|
+
AsyncTask task = new AsyncTask<Void, Void, Object>() {
|
122
148
|
private Exception exception;
|
123
149
|
|
124
150
|
@Override
|
125
|
-
protected
|
151
|
+
protected Object doInBackground(Void... voids) {
|
126
152
|
try {
|
127
153
|
LlamaContext context = contexts.get(contextId);
|
128
154
|
if (context == null) {
|
129
155
|
throw new Exception("Context not found");
|
130
156
|
}
|
157
|
+
if (params.hasKey("jinja") && params.getBoolean("jinja")) {
|
158
|
+
ReadableMap result = context.getFormattedChatWithJinja(messages, chatTemplate, params);
|
159
|
+
if (result.hasKey("_error")) {
|
160
|
+
throw new Exception(result.getString("_error"));
|
161
|
+
}
|
162
|
+
return result;
|
163
|
+
}
|
131
164
|
return context.getFormattedChat(messages, chatTemplate);
|
132
165
|
} catch (Exception e) {
|
133
166
|
exception = e;
|
@@ -136,7 +169,7 @@ public class RNLlama implements LifecycleEventListener {
|
|
136
169
|
}
|
137
170
|
|
138
171
|
@Override
|
139
|
-
protected void onPostExecute(
|
172
|
+
protected void onPostExecute(Object result) {
|
140
173
|
if (exception != null) {
|
141
174
|
promise.reject(exception);
|
142
175
|
return;
|
@@ -16,6 +16,12 @@ jobject getMap(JNIEnv *env, jobject readableArray, int index) {
|
|
16
16
|
return env->CallObjectMethod(readableArray, getMapMethod, index);
|
17
17
|
}
|
18
18
|
|
19
|
+
jstring getString(JNIEnv *env, jobject readableArray, int index) {
|
20
|
+
jclass arrayClass = env->GetObjectClass(readableArray);
|
21
|
+
jmethodID getStringMethod = env->GetMethodID(arrayClass, "getString", "(I)Ljava/lang/String;");
|
22
|
+
return (jstring) env->CallObjectMethod(readableArray, getStringMethod, index);
|
23
|
+
}
|
24
|
+
|
19
25
|
// Other methods not used yet
|
20
26
|
|
21
27
|
}
|