node-llama-cpp 3.0.0-beta.11 → 3.0.0-beta.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -4
- package/dist/bindings/Llama.d.ts +1 -0
- package/dist/bindings/Llama.js +7 -1
- package/dist/bindings/Llama.js.map +1 -1
- package/dist/bindings/getLlama.d.ts +7 -1
- package/dist/bindings/getLlama.js +6 -3
- package/dist/bindings/getLlama.js.map +1 -1
- package/dist/bindings/types.d.ts +1 -0
- package/dist/bindings/types.js.map +1 -1
- package/dist/bindings/utils/compileLLamaCpp.js +2 -0
- package/dist/bindings/utils/compileLLamaCpp.js.map +1 -1
- package/dist/bindings/utils/getBuildFolderNameForBuildOptions.js +2 -0
- package/dist/bindings/utils/getBuildFolderNameForBuildOptions.js.map +1 -1
- package/dist/bindings/utils/resolveCustomCmakeOptions.js +2 -0
- package/dist/bindings/utils/resolveCustomCmakeOptions.js.map +1 -1
- package/dist/cli/commands/BuildCommand.d.ts +2 -1
- package/dist/cli/commands/BuildCommand.js +11 -9
- package/dist/cli/commands/BuildCommand.js.map +1 -1
- package/dist/cli/commands/DebugCommand.js +16 -13
- package/dist/cli/commands/DebugCommand.js.map +1 -1
- package/dist/cli/commands/DownloadCommand.d.ts +2 -1
- package/dist/cli/commands/DownloadCommand.js +11 -9
- package/dist/cli/commands/DownloadCommand.js.map +1 -1
- package/dist/cli/utils/logEnabledComputeLayers.d.ts +8 -0
- package/dist/cli/utils/logEnabledComputeLayers.js +11 -0
- package/dist/cli/utils/logEnabledComputeLayers.js.map +1 -0
- package/dist/config.d.ts +1 -0
- package/dist/config.js +5 -2
- package/dist/config.js.map +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfArray.js.map +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfBoolean.d.ts +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfBoolean.js.map +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfBooleanValue.js.map +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfGrammar.js.map +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfNull.d.ts +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfNull.js.map +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfNumber.d.ts +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfNumber.js.map +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfNumberValue.js.map +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfObjectMap.js.map +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfOr.js.map +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfString.d.ts +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfString.js.map +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfStringValue.js.map +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfVerbatimText.js.map +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfWhitespace.d.ts +1 -1
- package/dist/utils/gbnfJson/terminals/GbnfWhitespace.js.map +1 -1
- package/dist/utils/getBuildDefaults.d.ts +1 -0
- package/dist/utils/getBuildDefaults.js +3 -2
- package/dist/utils/getBuildDefaults.js.map +1 -1
- package/llama/CMakeLists.txt +20 -0
- package/llama/addon.cpp +34 -3
- package/llama/binariesGithubRelease.json +1 -1
- package/llama/gitRelease.bundle +0 -0
- package/llama/gpuInfo/cuda-gpu-info.cu +5 -5
- package/llama/gpuInfo/cuda-gpu-info.h +2 -2
- package/llama/gpuInfo/vulkan-gpu-info.cpp +65 -0
- package/llama/gpuInfo/vulkan-gpu-info.h +7 -0
- package/llama/llama.cpp.info.json +1 -1
- package/llamaBins/linux-arm64/.buildMetadata.json +1 -1
- package/llamaBins/linux-arm64/llama-addon.node +0 -0
- package/llamaBins/linux-armv7l/.buildMetadata.json +1 -1
- package/llamaBins/linux-armv7l/llama-addon.node +0 -0
- package/llamaBins/linux-x64/.buildMetadata.json +1 -1
- package/llamaBins/linux-x64/llama-addon.node +0 -0
- package/llamaBins/linux-x64-cuda/.buildMetadata.json +1 -1
- package/llamaBins/linux-x64-cuda/llama-addon.node +0 -0
- package/llamaBins/linux-x64-vulkan/.buildMetadata.json +1 -0
- package/llamaBins/linux-x64-vulkan/llama-addon.node +0 -0
- package/llamaBins/mac-arm64-metal/.buildMetadata.json +1 -1
- package/llamaBins/mac-arm64-metal/ggml-metal.metal +540 -9
- package/llamaBins/mac-arm64-metal/llama-addon.node +0 -0
- package/llamaBins/mac-x64/.buildMetadata.json +1 -1
- package/llamaBins/mac-x64/llama-addon.node +0 -0
- package/llamaBins/win-x64/.buildMetadata.json +1 -1
- package/llamaBins/win-x64/llama-addon.exp +0 -0
- package/llamaBins/win-x64/llama-addon.lib +0 -0
- package/llamaBins/win-x64/llama-addon.node +0 -0
- package/llamaBins/win-x64-cuda/.buildMetadata.json +1 -1
- package/llamaBins/win-x64-cuda/llama-addon.exp +0 -0
- package/llamaBins/win-x64-cuda/llama-addon.lib +0 -0
- package/llamaBins/win-x64-cuda/llama-addon.node +0 -0
- package/llamaBins/win-x64-vulkan/.buildMetadata.json +1 -0
- package/llamaBins/win-x64-vulkan/llama-addon.exp +0 -0
- package/llamaBins/win-x64-vulkan/llama-addon.lib +0 -0
- package/llamaBins/win-x64-vulkan/llama-addon.node +0 -0
- package/package.json +2 -1
|
Binary file
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"mac","arch":"x64","computeLayers":{"metal":false,"cuda":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"
|
|
1
|
+
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"mac","arch":"x64","computeLayers":{"metal":false,"cuda":false,"vulkan":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2254"}}}
|
|
Binary file
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","arch":"x64","computeLayers":{"metal":false,"cuda":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"
|
|
1
|
+
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","arch":"x64","computeLayers":{"metal":false,"cuda":false,"vulkan":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2254"}}}
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","arch":"x64","computeLayers":{"metal":false,"cuda":true},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"
|
|
1
|
+
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","arch":"x64","computeLayers":{"metal":false,"cuda":true,"vulkan":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2254"}}}
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","arch":"x64","computeLayers":{"metal":false,"cuda":false,"vulkan":true},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2254"}}}
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "node-llama-cpp",
|
|
3
|
-
"version": "3.0.0-beta.
|
|
3
|
+
"version": "3.0.0-beta.12",
|
|
4
4
|
"description": "Run AI models locally on your machine with node.js bindings for llama.cpp. Force a JSON schema on the model output on the generation level",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"type": "module",
|
|
@@ -83,6 +83,7 @@
|
|
|
83
83
|
"gguf",
|
|
84
84
|
"metal",
|
|
85
85
|
"cuda",
|
|
86
|
+
"vulkan",
|
|
86
87
|
"grammar",
|
|
87
88
|
"json-grammar",
|
|
88
89
|
"json-schema-grammar",
|