@fugood/llama.node 1.4.1 → 1.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +16 -16
- package/src/LlamaContext.cpp +2 -2
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@fugood/llama.node",
|
|
3
3
|
"access": "public",
|
|
4
|
-
"version": "1.4.
|
|
4
|
+
"version": "1.4.2",
|
|
5
5
|
"description": "An another Node binding of llama.cpp",
|
|
6
6
|
"main": "lib/index.js",
|
|
7
7
|
"scripts": {
|
|
@@ -72,20 +72,20 @@
|
|
|
72
72
|
"CMakeLists.txt"
|
|
73
73
|
],
|
|
74
74
|
"optionalDependencies": {
|
|
75
|
-
"@fugood/node-llama-darwin-arm64": "1.4.
|
|
76
|
-
"@fugood/node-llama-darwin-x64": "1.4.
|
|
77
|
-
"@fugood/node-llama-linux-arm64": "1.4.
|
|
78
|
-
"@fugood/node-llama-linux-arm64-cuda": "1.4.
|
|
79
|
-
"@fugood/node-llama-linux-arm64-snapdragon": "1.4.
|
|
80
|
-
"@fugood/node-llama-linux-arm64-vulkan": "1.4.
|
|
81
|
-
"@fugood/node-llama-linux-x64": "1.4.
|
|
82
|
-
"@fugood/node-llama-linux-x64-cuda": "1.4.
|
|
83
|
-
"@fugood/node-llama-linux-x64-vulkan": "1.4.
|
|
84
|
-
"@fugood/node-llama-win32-arm64": "1.4.
|
|
85
|
-
"@fugood/node-llama-win32-arm64-vulkan": "1.4.
|
|
86
|
-
"@fugood/node-llama-win32-x64": "1.4.
|
|
87
|
-
"@fugood/node-llama-win32-x64-cuda": "1.4.
|
|
88
|
-
"@fugood/node-llama-win32-x64-vulkan": "1.4.
|
|
75
|
+
"@fugood/node-llama-darwin-arm64": "1.4.2",
|
|
76
|
+
"@fugood/node-llama-darwin-x64": "1.4.2",
|
|
77
|
+
"@fugood/node-llama-linux-arm64": "1.4.2",
|
|
78
|
+
"@fugood/node-llama-linux-arm64-cuda": "1.4.2",
|
|
79
|
+
"@fugood/node-llama-linux-arm64-snapdragon": "1.4.2",
|
|
80
|
+
"@fugood/node-llama-linux-arm64-vulkan": "1.4.2",
|
|
81
|
+
"@fugood/node-llama-linux-x64": "1.4.2",
|
|
82
|
+
"@fugood/node-llama-linux-x64-cuda": "1.4.2",
|
|
83
|
+
"@fugood/node-llama-linux-x64-vulkan": "1.4.2",
|
|
84
|
+
"@fugood/node-llama-win32-arm64": "1.4.2",
|
|
85
|
+
"@fugood/node-llama-win32-arm64-vulkan": "1.4.2",
|
|
86
|
+
"@fugood/node-llama-win32-x64": "1.4.2",
|
|
87
|
+
"@fugood/node-llama-win32-x64-cuda": "1.4.2",
|
|
88
|
+
"@fugood/node-llama-win32-x64-vulkan": "1.4.2"
|
|
89
89
|
},
|
|
90
90
|
"devDependencies": {
|
|
91
91
|
"@babel/preset-env": "^7.24.4",
|
|
@@ -130,4 +130,4 @@
|
|
|
130
130
|
"singleQuote": true,
|
|
131
131
|
"printWidth": 80
|
|
132
132
|
}
|
|
133
|
-
}
|
|
133
|
+
}
|
package/src/LlamaContext.cpp
CHANGED
|
@@ -657,7 +657,7 @@ Napi::Value LlamaContext::GetFormattedChat(const Napi::CallbackInfo &info) {
|
|
|
657
657
|
auto params =
|
|
658
658
|
has_params ? info[2].As<Napi::Object>() : Napi::Object::New(env);
|
|
659
659
|
|
|
660
|
-
if (get_option<bool>(params, "jinja",
|
|
660
|
+
if (get_option<bool>(params, "jinja", true)) {
|
|
661
661
|
std::string json_schema_str = "";
|
|
662
662
|
if (!is_nil(params.Get("response_format"))) {
|
|
663
663
|
auto response_format = params.Get("response_format").As<Napi::Object>();
|
|
@@ -912,7 +912,7 @@ Napi::Value LlamaContext::Completion(const Napi::CallbackInfo &info) {
|
|
|
912
912
|
if (options.Has("messages") && options.Get("messages").IsArray()) {
|
|
913
913
|
auto messages = options.Get("messages").As<Napi::Array>();
|
|
914
914
|
auto chat_template = get_option<std::string>(options, "chat_template", "");
|
|
915
|
-
auto jinja = get_option<bool>(options, "jinja",
|
|
915
|
+
auto jinja = get_option<bool>(options, "jinja", true);
|
|
916
916
|
if (jinja) {
|
|
917
917
|
auto tools_str =
|
|
918
918
|
!is_nil(options.Get("tools"))
|