node-llama-cpp 3.0.0-beta.11 → 3.0.0-beta.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. package/README.md +4 -4
  2. package/dist/bindings/Llama.d.ts +1 -0
  3. package/dist/bindings/Llama.js +7 -1
  4. package/dist/bindings/Llama.js.map +1 -1
  5. package/dist/bindings/getLlama.d.ts +7 -1
  6. package/dist/bindings/getLlama.js +6 -3
  7. package/dist/bindings/getLlama.js.map +1 -1
  8. package/dist/bindings/types.d.ts +1 -0
  9. package/dist/bindings/types.js.map +1 -1
  10. package/dist/bindings/utils/compileLLamaCpp.js +2 -0
  11. package/dist/bindings/utils/compileLLamaCpp.js.map +1 -1
  12. package/dist/bindings/utils/getBuildFolderNameForBuildOptions.js +2 -0
  13. package/dist/bindings/utils/getBuildFolderNameForBuildOptions.js.map +1 -1
  14. package/dist/bindings/utils/resolveCustomCmakeOptions.js +2 -0
  15. package/dist/bindings/utils/resolveCustomCmakeOptions.js.map +1 -1
  16. package/dist/cli/commands/BuildCommand.d.ts +2 -1
  17. package/dist/cli/commands/BuildCommand.js +11 -9
  18. package/dist/cli/commands/BuildCommand.js.map +1 -1
  19. package/dist/cli/commands/DebugCommand.js +16 -13
  20. package/dist/cli/commands/DebugCommand.js.map +1 -1
  21. package/dist/cli/commands/DownloadCommand.d.ts +2 -1
  22. package/dist/cli/commands/DownloadCommand.js +11 -9
  23. package/dist/cli/commands/DownloadCommand.js.map +1 -1
  24. package/dist/cli/utils/logEnabledComputeLayers.d.ts +8 -0
  25. package/dist/cli/utils/logEnabledComputeLayers.js +11 -0
  26. package/dist/cli/utils/logEnabledComputeLayers.js.map +1 -0
  27. package/dist/config.d.ts +1 -0
  28. package/dist/config.js +5 -2
  29. package/dist/config.js.map +1 -1
  30. package/dist/utils/gbnfJson/terminals/GbnfArray.js.map +1 -1
  31. package/dist/utils/gbnfJson/terminals/GbnfBoolean.d.ts +1 -1
  32. package/dist/utils/gbnfJson/terminals/GbnfBoolean.js.map +1 -1
  33. package/dist/utils/gbnfJson/terminals/GbnfBooleanValue.js.map +1 -1
  34. package/dist/utils/gbnfJson/terminals/GbnfGrammar.js.map +1 -1
  35. package/dist/utils/gbnfJson/terminals/GbnfNull.d.ts +1 -1
  36. package/dist/utils/gbnfJson/terminals/GbnfNull.js.map +1 -1
  37. package/dist/utils/gbnfJson/terminals/GbnfNumber.d.ts +1 -1
  38. package/dist/utils/gbnfJson/terminals/GbnfNumber.js.map +1 -1
  39. package/dist/utils/gbnfJson/terminals/GbnfNumberValue.js.map +1 -1
  40. package/dist/utils/gbnfJson/terminals/GbnfObjectMap.js.map +1 -1
  41. package/dist/utils/gbnfJson/terminals/GbnfOr.js.map +1 -1
  42. package/dist/utils/gbnfJson/terminals/GbnfString.d.ts +1 -1
  43. package/dist/utils/gbnfJson/terminals/GbnfString.js.map +1 -1
  44. package/dist/utils/gbnfJson/terminals/GbnfStringValue.js.map +1 -1
  45. package/dist/utils/gbnfJson/terminals/GbnfVerbatimText.js.map +1 -1
  46. package/dist/utils/gbnfJson/terminals/GbnfWhitespace.d.ts +1 -1
  47. package/dist/utils/gbnfJson/terminals/GbnfWhitespace.js.map +1 -1
  48. package/dist/utils/getBuildDefaults.d.ts +1 -0
  49. package/dist/utils/getBuildDefaults.js +3 -2
  50. package/dist/utils/getBuildDefaults.js.map +1 -1
  51. package/llama/CMakeLists.txt +20 -0
  52. package/llama/addon.cpp +34 -3
  53. package/llama/binariesGithubRelease.json +1 -1
  54. package/llama/gitRelease.bundle +0 -0
  55. package/llama/gpuInfo/cuda-gpu-info.cu +5 -5
  56. package/llama/gpuInfo/cuda-gpu-info.h +2 -2
  57. package/llama/gpuInfo/vulkan-gpu-info.cpp +65 -0
  58. package/llama/gpuInfo/vulkan-gpu-info.h +7 -0
  59. package/llama/llama.cpp.info.json +1 -1
  60. package/llamaBins/linux-arm64/.buildMetadata.json +1 -1
  61. package/llamaBins/linux-arm64/llama-addon.node +0 -0
  62. package/llamaBins/linux-armv7l/.buildMetadata.json +1 -1
  63. package/llamaBins/linux-armv7l/llama-addon.node +0 -0
  64. package/llamaBins/linux-x64/.buildMetadata.json +1 -1
  65. package/llamaBins/linux-x64/llama-addon.node +0 -0
  66. package/llamaBins/linux-x64-cuda/.buildMetadata.json +1 -1
  67. package/llamaBins/linux-x64-cuda/llama-addon.node +0 -0
  68. package/llamaBins/linux-x64-vulkan/.buildMetadata.json +1 -0
  69. package/llamaBins/linux-x64-vulkan/llama-addon.node +0 -0
  70. package/llamaBins/mac-arm64-metal/.buildMetadata.json +1 -1
  71. package/llamaBins/mac-arm64-metal/ggml-metal.metal +540 -9
  72. package/llamaBins/mac-arm64-metal/llama-addon.node +0 -0
  73. package/llamaBins/mac-x64/.buildMetadata.json +1 -1
  74. package/llamaBins/mac-x64/llama-addon.node +0 -0
  75. package/llamaBins/win-x64/.buildMetadata.json +1 -1
  76. package/llamaBins/win-x64/llama-addon.exp +0 -0
  77. package/llamaBins/win-x64/llama-addon.lib +0 -0
  78. package/llamaBins/win-x64/llama-addon.node +0 -0
  79. package/llamaBins/win-x64-cuda/.buildMetadata.json +1 -1
  80. package/llamaBins/win-x64-cuda/llama-addon.exp +0 -0
  81. package/llamaBins/win-x64-cuda/llama-addon.lib +0 -0
  82. package/llamaBins/win-x64-cuda/llama-addon.node +0 -0
  83. package/llamaBins/win-x64-vulkan/.buildMetadata.json +1 -0
  84. package/llamaBins/win-x64-vulkan/llama-addon.exp +0 -0
  85. package/llamaBins/win-x64-vulkan/llama-addon.lib +0 -0
  86. package/llamaBins/win-x64-vulkan/llama-addon.node +0 -0
  87. package/package.json +2 -1
@@ -1 +1 @@
1
- {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"mac","arch":"x64","computeLayers":{"metal":false,"cuda":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2174"}}}
1
+ {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"mac","arch":"x64","computeLayers":{"metal":false,"cuda":false,"vulkan":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2254"}}}
Binary file
@@ -1 +1 @@
1
- {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","arch":"x64","computeLayers":{"metal":false,"cuda":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2174"}}}
1
+ {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","arch":"x64","computeLayers":{"metal":false,"cuda":false,"vulkan":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2254"}}}
Binary file
Binary file
Binary file
@@ -1 +1 @@
1
- {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","arch":"x64","computeLayers":{"metal":false,"cuda":true},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2174"}}}
1
+ {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","arch":"x64","computeLayers":{"metal":false,"cuda":true,"vulkan":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2254"}}}
@@ -0,0 +1 @@
1
+ {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","arch":"x64","computeLayers":{"metal":false,"cuda":false,"vulkan":true},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2254"}}}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "node-llama-cpp",
3
- "version": "3.0.0-beta.11",
3
+ "version": "3.0.0-beta.12",
4
4
  "description": "Run AI models locally on your machine with node.js bindings for llama.cpp. Force a JSON schema on the model output on the generation level",
5
5
  "main": "dist/index.js",
6
6
  "type": "module",
@@ -83,6 +83,7 @@
83
83
  "gguf",
84
84
  "metal",
85
85
  "cuda",
86
+ "vulkan",
86
87
  "grammar",
87
88
  "json-grammar",
88
89
  "json-schema-grammar",