@fugood/llama.node 1.0.0 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +14 -14
- package/src/LlamaContext.cpp +21 -8
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@fugood/llama.node",
|
|
3
3
|
"access": "public",
|
|
4
|
-
"version": "1.0.
|
|
4
|
+
"version": "1.0.1",
|
|
5
5
|
"description": "An another Node binding of llama.cpp",
|
|
6
6
|
"main": "lib/index.js",
|
|
7
7
|
"scripts": {
|
|
@@ -70,19 +70,19 @@
|
|
|
70
70
|
"CMakeLists.txt"
|
|
71
71
|
],
|
|
72
72
|
"optionalDependencies": {
|
|
73
|
-
"@fugood/node-llama-linux-x64": "1.0.
|
|
74
|
-
"@fugood/node-llama-linux-x64-vulkan": "1.0.
|
|
75
|
-
"@fugood/node-llama-linux-x64-cuda": "1.0.
|
|
76
|
-
"@fugood/node-llama-linux-arm64": "1.0.
|
|
77
|
-
"@fugood/node-llama-linux-arm64-vulkan": "1.0.
|
|
78
|
-
"@fugood/node-llama-linux-arm64-cuda": "1.0.
|
|
79
|
-
"@fugood/node-llama-win32-x64": "1.0.
|
|
80
|
-
"@fugood/node-llama-win32-x64-vulkan": "1.0.
|
|
81
|
-
"@fugood/node-llama-win32-x64-cuda": "1.0.
|
|
82
|
-
"@fugood/node-llama-win32-arm64": "1.0.
|
|
83
|
-
"@fugood/node-llama-win32-arm64-vulkan": "1.0.
|
|
84
|
-
"@fugood/node-llama-darwin-x64": "1.0.
|
|
85
|
-
"@fugood/node-llama-darwin-arm64": "1.0.
|
|
73
|
+
"@fugood/node-llama-linux-x64": "1.0.1",
|
|
74
|
+
"@fugood/node-llama-linux-x64-vulkan": "1.0.1",
|
|
75
|
+
"@fugood/node-llama-linux-x64-cuda": "1.0.1",
|
|
76
|
+
"@fugood/node-llama-linux-arm64": "1.0.1",
|
|
77
|
+
"@fugood/node-llama-linux-arm64-vulkan": "1.0.1",
|
|
78
|
+
"@fugood/node-llama-linux-arm64-cuda": "1.0.1",
|
|
79
|
+
"@fugood/node-llama-win32-x64": "1.0.1",
|
|
80
|
+
"@fugood/node-llama-win32-x64-vulkan": "1.0.1",
|
|
81
|
+
"@fugood/node-llama-win32-x64-cuda": "1.0.1",
|
|
82
|
+
"@fugood/node-llama-win32-arm64": "1.0.1",
|
|
83
|
+
"@fugood/node-llama-win32-arm64-vulkan": "1.0.1",
|
|
84
|
+
"@fugood/node-llama-darwin-x64": "1.0.1",
|
|
85
|
+
"@fugood/node-llama-darwin-arm64": "1.0.1"
|
|
86
86
|
},
|
|
87
87
|
"devDependencies": {
|
|
88
88
|
"@babel/preset-env": "^7.24.4",
|
package/src/LlamaContext.cpp
CHANGED
|
@@ -586,7 +586,7 @@ Napi::Value LlamaContext::GetFormattedChat(const Napi::CallbackInfo &info) {
|
|
|
586
586
|
: "{}";
|
|
587
587
|
}
|
|
588
588
|
}
|
|
589
|
-
auto tools_str = params.
|
|
589
|
+
auto tools_str = !is_nil(params.Get("tools"))
|
|
590
590
|
? json_stringify(params.Get("tools").As<Napi::Array>())
|
|
591
591
|
: "";
|
|
592
592
|
auto parallel_tool_calls =
|
|
@@ -594,9 +594,15 @@ Napi::Value LlamaContext::GetFormattedChat(const Napi::CallbackInfo &info) {
|
|
|
594
594
|
auto tool_choice = get_option<std::string>(params, "tool_choice", "");
|
|
595
595
|
auto enable_thinking = get_option<bool>(params, "enable_thinking", false);
|
|
596
596
|
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
597
|
+
common_chat_params chatParams;
|
|
598
|
+
try {
|
|
599
|
+
chatParams = getFormattedChatWithJinja(
|
|
600
|
+
_sess, _templates, messages, chat_template, json_schema_str, tools_str,
|
|
601
|
+
parallel_tool_calls, tool_choice, enable_thinking);
|
|
602
|
+
} catch (const std::exception &e) {
|
|
603
|
+
Napi::Error::New(env, e.what()).ThrowAsJavaScriptException();
|
|
604
|
+
return env.Undefined();
|
|
605
|
+
}
|
|
600
606
|
|
|
601
607
|
Napi::Object result = Napi::Object::New(env);
|
|
602
608
|
result.Set("prompt", chatParams.prompt);
|
|
@@ -793,7 +799,7 @@ Napi::Value LlamaContext::Completion(const Napi::CallbackInfo &info) {
|
|
|
793
799
|
auto jinja = get_option<bool>(options, "jinja", false);
|
|
794
800
|
if (jinja) {
|
|
795
801
|
auto tools_str =
|
|
796
|
-
options.
|
|
802
|
+
!is_nil(options.Get("tools"))
|
|
797
803
|
? json_stringify(options.Get("tools").As<Napi::Array>())
|
|
798
804
|
: "";
|
|
799
805
|
auto parallel_tool_calls =
|
|
@@ -802,9 +808,16 @@ Napi::Value LlamaContext::Completion(const Napi::CallbackInfo &info) {
|
|
|
802
808
|
get_option<std::string>(options, "tool_choice", "none");
|
|
803
809
|
auto enable_thinking = get_option<bool>(options, "enable_thinking", true);
|
|
804
810
|
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
811
|
+
common_chat_params chatParams;
|
|
812
|
+
|
|
813
|
+
try {
|
|
814
|
+
chatParams = getFormattedChatWithJinja(
|
|
815
|
+
_sess, _templates, json_stringify(messages), chat_template,
|
|
816
|
+
json_schema_str, tools_str, parallel_tool_calls, tool_choice, enable_thinking);
|
|
817
|
+
} catch (const std::exception &e) {
|
|
818
|
+
Napi::Error::New(env, e.what()).ThrowAsJavaScriptException();
|
|
819
|
+
return env.Undefined();
|
|
820
|
+
}
|
|
808
821
|
|
|
809
822
|
params.prompt = chatParams.prompt;
|
|
810
823
|
|