node-llama-cpp 3.14.5 → 3.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/dist/bindings/AddonTypes.d.ts +1 -0
  2. package/dist/cli/commands/ChatCommand.d.ts +1 -0
  3. package/dist/cli/commands/ChatCommand.js +12 -3
  4. package/dist/cli/commands/ChatCommand.js.map +1 -1
  5. package/dist/cli/commands/CompleteCommand.d.ts +1 -0
  6. package/dist/cli/commands/CompleteCommand.js +12 -3
  7. package/dist/cli/commands/CompleteCommand.js.map +1 -1
  8. package/dist/cli/commands/InfillCommand.d.ts +1 -0
  9. package/dist/cli/commands/InfillCommand.js +12 -3
  10. package/dist/cli/commands/InfillCommand.js.map +1 -1
  11. package/dist/cli/commands/inspect/commands/InspectMeasureCommand.d.ts +1 -0
  12. package/dist/cli/commands/inspect/commands/InspectMeasureCommand.js +19 -3
  13. package/dist/cli/commands/inspect/commands/InspectMeasureCommand.js.map +1 -1
  14. package/dist/cli/utils/printCommonInfoLines.d.ts +2 -1
  15. package/dist/cli/utils/printCommonInfoLines.js +11 -1
  16. package/dist/cli/utils/printCommonInfoLines.js.map +1 -1
  17. package/dist/evaluator/LlamaChat/LlamaChat.d.ts +8 -0
  18. package/dist/evaluator/LlamaChat/LlamaChat.js.map +1 -1
  19. package/dist/evaluator/LlamaChatSession/LlamaChatSession.d.ts +8 -0
  20. package/dist/evaluator/LlamaChatSession/LlamaChatSession.js.map +1 -1
  21. package/dist/evaluator/LlamaCompletion.d.ts +18 -3
  22. package/dist/evaluator/LlamaCompletion.js +14 -8
  23. package/dist/evaluator/LlamaCompletion.js.map +1 -1
  24. package/dist/evaluator/LlamaModel/LlamaModel.d.ts +15 -0
  25. package/dist/evaluator/LlamaModel/LlamaModel.js +5 -2
  26. package/dist/evaluator/LlamaModel/LlamaModel.js.map +1 -1
  27. package/dist/gguf/types/GgufMetadataTypes.d.ts +6 -0
  28. package/dist/gguf/types/GgufMetadataTypes.js +6 -0
  29. package/dist/gguf/types/GgufMetadataTypes.js.map +1 -1
  30. package/llama/CMakeLists.txt +0 -7
  31. package/llama/addon/AddonModel.cpp +4 -0
  32. package/llama/binariesGithubRelease.json +1 -1
  33. package/llama/gitRelease.bundle +0 -0
  34. package/llama/grammars/README.md +6 -3
  35. package/llama/llama.cpp.info.json +1 -1
  36. package/package.json +14 -14
  37. package/templates/packed/electron-typescript-react.json +1 -1
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "node-llama-cpp",
3
- "version": "3.14.5",
3
+ "version": "3.15.0",
4
4
  "description": "Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level",
5
5
  "main": "./dist/index.js",
6
6
  "type": "module",
@@ -224,18 +224,18 @@
224
224
  }
225
225
  },
226
226
  "optionalDependencies": {
227
- "@node-llama-cpp/linux-arm64": "3.14.5",
228
- "@node-llama-cpp/linux-armv7l": "3.14.5",
229
- "@node-llama-cpp/linux-x64": "3.14.5",
230
- "@node-llama-cpp/linux-x64-cuda": "3.14.5",
231
- "@node-llama-cpp/linux-x64-cuda-ext": "3.14.5",
232
- "@node-llama-cpp/linux-x64-vulkan": "3.14.5",
233
- "@node-llama-cpp/mac-arm64-metal": "3.14.5",
234
- "@node-llama-cpp/mac-x64": "3.14.5",
235
- "@node-llama-cpp/win-arm64": "3.14.5",
236
- "@node-llama-cpp/win-x64": "3.14.5",
237
- "@node-llama-cpp/win-x64-cuda": "3.14.5",
238
- "@node-llama-cpp/win-x64-cuda-ext": "3.14.5",
239
- "@node-llama-cpp/win-x64-vulkan": "3.14.5"
227
+ "@node-llama-cpp/linux-arm64": "3.15.0",
228
+ "@node-llama-cpp/linux-armv7l": "3.15.0",
229
+ "@node-llama-cpp/linux-x64": "3.15.0",
230
+ "@node-llama-cpp/linux-x64-cuda": "3.15.0",
231
+ "@node-llama-cpp/linux-x64-cuda-ext": "3.15.0",
232
+ "@node-llama-cpp/linux-x64-vulkan": "3.15.0",
233
+ "@node-llama-cpp/mac-arm64-metal": "3.15.0",
234
+ "@node-llama-cpp/mac-x64": "3.15.0",
235
+ "@node-llama-cpp/win-arm64": "3.15.0",
236
+ "@node-llama-cpp/win-x64": "3.15.0",
237
+ "@node-llama-cpp/win-x64-cuda": "3.15.0",
238
+ "@node-llama-cpp/win-x64-cuda-ext": "3.15.0",
239
+ "@node-llama-cpp/win-x64-vulkan": "3.15.0"
240
240
  }
241
241
  }