@fugood/llama.node 0.6.3 → 1.0.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (377) hide show
  1. package/CMakeLists.txt +40 -30
  2. package/README.md +4 -1
  3. package/lib/binding.js +41 -29
  4. package/lib/binding.ts +26 -25
  5. package/package.json +40 -7
  6. package/scripts/build.js +47 -0
  7. package/scripts/llama.cpp.patch +109 -0
  8. package/src/anyascii.c +22223 -0
  9. package/src/anyascii.h +42 -0
  10. package/src/tts_utils.cpp +20 -7
  11. package/src/tts_utils.h +2 -0
  12. package/bin/darwin/arm64/llama-node.node +0 -0
  13. package/bin/darwin/x64/llama-node.node +0 -0
  14. package/bin/linux/arm64/llama-node.node +0 -0
  15. package/bin/linux/x64/llama-node.node +0 -0
  16. package/bin/linux-cuda/arm64/llama-node.node +0 -0
  17. package/bin/linux-cuda/x64/llama-node.node +0 -0
  18. package/bin/linux-vulkan/arm64/llama-node.node +0 -0
  19. package/bin/linux-vulkan/x64/llama-node.node +0 -0
  20. package/bin/win32/x64/llama-node.node +0 -0
  21. package/bin/win32/x64/node.lib +0 -0
  22. package/bin/win32-vulkan/arm64/llama-node.node +0 -0
  23. package/bin/win32-vulkan/arm64/node.lib +0 -0
  24. package/bin/win32-vulkan/x64/llama-node.node +0 -0
  25. package/bin/win32-vulkan/x64/node.lib +0 -0
  26. package/src/llama.cpp/.github/workflows/build-linux-cross.yml +0 -233
  27. package/src/llama.cpp/.github/workflows/build.yml +0 -1078
  28. package/src/llama.cpp/.github/workflows/close-issue.yml +0 -28
  29. package/src/llama.cpp/.github/workflows/docker.yml +0 -178
  30. package/src/llama.cpp/.github/workflows/editorconfig.yml +0 -29
  31. package/src/llama.cpp/.github/workflows/gguf-publish.yml +0 -44
  32. package/src/llama.cpp/.github/workflows/labeler.yml +0 -17
  33. package/src/llama.cpp/.github/workflows/python-check-requirements.yml +0 -33
  34. package/src/llama.cpp/.github/workflows/python-lint.yml +0 -30
  35. package/src/llama.cpp/.github/workflows/python-type-check.yml +0 -40
  36. package/src/llama.cpp/.github/workflows/release.yml +0 -739
  37. package/src/llama.cpp/.github/workflows/server.yml +0 -237
  38. package/src/llama.cpp/.github/workflows/winget.yml +0 -42
  39. package/src/llama.cpp/cmake/arm64-apple-clang.cmake +0 -16
  40. package/src/llama.cpp/cmake/arm64-windows-llvm.cmake +0 -16
  41. package/src/llama.cpp/cmake/build-info.cmake +0 -64
  42. package/src/llama.cpp/cmake/common.cmake +0 -35
  43. package/src/llama.cpp/cmake/git-vars.cmake +0 -22
  44. package/src/llama.cpp/cmake/x64-windows-llvm.cmake +0 -5
  45. package/src/llama.cpp/common/build-info.cpp.in +0 -4
  46. package/src/llama.cpp/docs/build.md +0 -561
  47. package/src/llama.cpp/examples/CMakeLists.txt +0 -43
  48. package/src/llama.cpp/examples/batched/CMakeLists.txt +0 -5
  49. package/src/llama.cpp/examples/batched/batched.cpp +0 -246
  50. package/src/llama.cpp/examples/chat-13B.bat +0 -57
  51. package/src/llama.cpp/examples/convert-llama2c-to-ggml/CMakeLists.txt +0 -5
  52. package/src/llama.cpp/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp +0 -941
  53. package/src/llama.cpp/examples/deprecation-warning/deprecation-warning.cpp +0 -35
  54. package/src/llama.cpp/examples/embedding/CMakeLists.txt +0 -5
  55. package/src/llama.cpp/examples/embedding/embedding.cpp +0 -323
  56. package/src/llama.cpp/examples/eval-callback/CMakeLists.txt +0 -10
  57. package/src/llama.cpp/examples/eval-callback/eval-callback.cpp +0 -194
  58. package/src/llama.cpp/examples/gen-docs/CMakeLists.txt +0 -5
  59. package/src/llama.cpp/examples/gen-docs/gen-docs.cpp +0 -83
  60. package/src/llama.cpp/examples/gguf/CMakeLists.txt +0 -5
  61. package/src/llama.cpp/examples/gguf/gguf.cpp +0 -265
  62. package/src/llama.cpp/examples/gguf-hash/CMakeLists.txt +0 -22
  63. package/src/llama.cpp/examples/gguf-hash/deps/rotate-bits/rotate-bits.h +0 -46
  64. package/src/llama.cpp/examples/gguf-hash/deps/sha1/sha1.c +0 -295
  65. package/src/llama.cpp/examples/gguf-hash/deps/sha1/sha1.h +0 -52
  66. package/src/llama.cpp/examples/gguf-hash/deps/sha256/sha256.c +0 -221
  67. package/src/llama.cpp/examples/gguf-hash/deps/sha256/sha256.h +0 -24
  68. package/src/llama.cpp/examples/gguf-hash/deps/xxhash/xxhash.c +0 -42
  69. package/src/llama.cpp/examples/gguf-hash/deps/xxhash/xxhash.h +0 -7093
  70. package/src/llama.cpp/examples/gguf-hash/gguf-hash.cpp +0 -694
  71. package/src/llama.cpp/examples/gritlm/CMakeLists.txt +0 -5
  72. package/src/llama.cpp/examples/gritlm/gritlm.cpp +0 -229
  73. package/src/llama.cpp/examples/jeopardy/questions.txt +0 -100
  74. package/src/llama.cpp/examples/llama.android/app/build.gradle.kts +0 -65
  75. package/src/llama.cpp/examples/llama.android/build.gradle.kts +0 -6
  76. package/src/llama.cpp/examples/llama.android/llama/build.gradle.kts +0 -71
  77. package/src/llama.cpp/examples/llama.android/llama/src/main/cpp/CMakeLists.txt +0 -53
  78. package/src/llama.cpp/examples/llama.android/llama/src/main/cpp/llama-android.cpp +0 -452
  79. package/src/llama.cpp/examples/llama.android/settings.gradle.kts +0 -18
  80. package/src/llama.cpp/examples/lookahead/CMakeLists.txt +0 -5
  81. package/src/llama.cpp/examples/lookahead/lookahead.cpp +0 -472
  82. package/src/llama.cpp/examples/lookup/CMakeLists.txt +0 -23
  83. package/src/llama.cpp/examples/lookup/lookup-create.cpp +0 -40
  84. package/src/llama.cpp/examples/lookup/lookup-merge.cpp +0 -47
  85. package/src/llama.cpp/examples/lookup/lookup-stats.cpp +0 -157
  86. package/src/llama.cpp/examples/lookup/lookup.cpp +0 -242
  87. package/src/llama.cpp/examples/parallel/CMakeLists.txt +0 -5
  88. package/src/llama.cpp/examples/parallel/parallel.cpp +0 -492
  89. package/src/llama.cpp/examples/passkey/CMakeLists.txt +0 -5
  90. package/src/llama.cpp/examples/passkey/passkey.cpp +0 -277
  91. package/src/llama.cpp/examples/retrieval/CMakeLists.txt +0 -5
  92. package/src/llama.cpp/examples/retrieval/retrieval.cpp +0 -304
  93. package/src/llama.cpp/examples/save-load-state/CMakeLists.txt +0 -5
  94. package/src/llama.cpp/examples/save-load-state/save-load-state.cpp +0 -246
  95. package/src/llama.cpp/examples/simple/CMakeLists.txt +0 -5
  96. package/src/llama.cpp/examples/simple/simple.cpp +0 -206
  97. package/src/llama.cpp/examples/simple-chat/CMakeLists.txt +0 -5
  98. package/src/llama.cpp/examples/simple-chat/simple-chat.cpp +0 -206
  99. package/src/llama.cpp/examples/simple-cmake-pkg/CMakeLists.txt +0 -11
  100. package/src/llama.cpp/examples/speculative/CMakeLists.txt +0 -5
  101. package/src/llama.cpp/examples/speculative/speculative.cpp +0 -644
  102. package/src/llama.cpp/examples/speculative-simple/CMakeLists.txt +0 -5
  103. package/src/llama.cpp/examples/speculative-simple/speculative-simple.cpp +0 -261
  104. package/src/llama.cpp/examples/sycl/CMakeLists.txt +0 -9
  105. package/src/llama.cpp/examples/sycl/build.sh +0 -23
  106. package/src/llama.cpp/examples/sycl/ls-sycl-device.cpp +0 -13
  107. package/src/llama.cpp/examples/sycl/run-llama2.sh +0 -27
  108. package/src/llama.cpp/examples/sycl/run-llama3.sh +0 -28
  109. package/src/llama.cpp/examples/sycl/win-build-sycl.bat +0 -33
  110. package/src/llama.cpp/examples/sycl/win-run-llama2.bat +0 -9
  111. package/src/llama.cpp/examples/sycl/win-run-llama3.bat +0 -9
  112. package/src/llama.cpp/examples/training/CMakeLists.txt +0 -5
  113. package/src/llama.cpp/examples/training/finetune.cpp +0 -96
  114. package/src/llama.cpp/ggml/cmake/GitVars.cmake +0 -22
  115. package/src/llama.cpp/ggml/cmake/common.cmake +0 -26
  116. package/src/llama.cpp/ggml/src/ggml-alloc.c +0 -1042
  117. package/src/llama.cpp/ggml/src/ggml-backend-impl.h +0 -255
  118. package/src/llama.cpp/ggml/src/ggml-backend-reg.cpp +0 -586
  119. package/src/llama.cpp/ggml/src/ggml-backend.cpp +0 -2008
  120. package/src/llama.cpp/ggml/src/ggml-blas/CMakeLists.txt +0 -87
  121. package/src/llama.cpp/ggml/src/ggml-blas/ggml-blas.cpp +0 -517
  122. package/src/llama.cpp/ggml/src/ggml-cann/CMakeLists.txt +0 -74
  123. package/src/llama.cpp/ggml/src/ggml-cann/acl_tensor.cpp +0 -179
  124. package/src/llama.cpp/ggml/src/ggml-cann/acl_tensor.h +0 -258
  125. package/src/llama.cpp/ggml/src/ggml-cann/aclnn_ops.cpp +0 -2863
  126. package/src/llama.cpp/ggml/src/ggml-cann/aclnn_ops.h +0 -1110
  127. package/src/llama.cpp/ggml/src/ggml-cann/common.h +0 -420
  128. package/src/llama.cpp/ggml/src/ggml-cann/ggml-cann.cpp +0 -2570
  129. package/src/llama.cpp/ggml/src/ggml-common.h +0 -1857
  130. package/src/llama.cpp/ggml/src/ggml-cpu/cmake/FindSIMD.cmake +0 -100
  131. package/src/llama.cpp/ggml/src/ggml-cuda/CMakeLists.txt +0 -184
  132. package/src/llama.cpp/ggml/src/ggml-cuda/vendors/cuda.h +0 -15
  133. package/src/llama.cpp/ggml/src/ggml-cuda/vendors/hip.h +0 -243
  134. package/src/llama.cpp/ggml/src/ggml-cuda/vendors/musa.h +0 -140
  135. package/src/llama.cpp/ggml/src/ggml-hip/CMakeLists.txt +0 -131
  136. package/src/llama.cpp/ggml/src/ggml-impl.h +0 -601
  137. package/src/llama.cpp/ggml/src/ggml-kompute/CMakeLists.txt +0 -166
  138. package/src/llama.cpp/ggml/src/ggml-kompute/ggml-kompute.cpp +0 -2251
  139. package/src/llama.cpp/ggml/src/ggml-metal/CMakeLists.txt +0 -120
  140. package/src/llama.cpp/ggml/src/ggml-metal/ggml-metal-impl.h +0 -622
  141. package/src/llama.cpp/ggml/src/ggml-musa/CMakeLists.txt +0 -113
  142. package/src/llama.cpp/ggml/src/ggml-opencl/CMakeLists.txt +0 -96
  143. package/src/llama.cpp/ggml/src/ggml-opencl/ggml-opencl.cpp +0 -5124
  144. package/src/llama.cpp/ggml/src/ggml-opt.cpp +0 -1037
  145. package/src/llama.cpp/ggml/src/ggml-quants.c +0 -5232
  146. package/src/llama.cpp/ggml/src/ggml-quants.h +0 -100
  147. package/src/llama.cpp/ggml/src/ggml-rpc/CMakeLists.txt +0 -9
  148. package/src/llama.cpp/ggml/src/ggml-rpc/ggml-rpc.cpp +0 -1813
  149. package/src/llama.cpp/ggml/src/ggml-sycl/CMakeLists.txt +0 -189
  150. package/src/llama.cpp/ggml/src/ggml-sycl/backend.hpp +0 -37
  151. package/src/llama.cpp/ggml/src/ggml-sycl/binbcast.cpp +0 -239
  152. package/src/llama.cpp/ggml/src/ggml-sycl/binbcast.hpp +0 -39
  153. package/src/llama.cpp/ggml/src/ggml-sycl/common.cpp +0 -83
  154. package/src/llama.cpp/ggml/src/ggml-sycl/common.hpp +0 -493
  155. package/src/llama.cpp/ggml/src/ggml-sycl/concat.cpp +0 -197
  156. package/src/llama.cpp/ggml/src/ggml-sycl/concat.hpp +0 -20
  157. package/src/llama.cpp/ggml/src/ggml-sycl/conv.cpp +0 -100
  158. package/src/llama.cpp/ggml/src/ggml-sycl/conv.hpp +0 -20
  159. package/src/llama.cpp/ggml/src/ggml-sycl/convert.cpp +0 -623
  160. package/src/llama.cpp/ggml/src/ggml-sycl/convert.hpp +0 -34
  161. package/src/llama.cpp/ggml/src/ggml-sycl/cpy.cpp +0 -701
  162. package/src/llama.cpp/ggml/src/ggml-sycl/cpy.hpp +0 -11
  163. package/src/llama.cpp/ggml/src/ggml-sycl/dequantize.hpp +0 -791
  164. package/src/llama.cpp/ggml/src/ggml-sycl/dmmv.cpp +0 -1160
  165. package/src/llama.cpp/ggml/src/ggml-sycl/dmmv.hpp +0 -27
  166. package/src/llama.cpp/ggml/src/ggml-sycl/dpct/helper.hpp +0 -2957
  167. package/src/llama.cpp/ggml/src/ggml-sycl/element_wise.cpp +0 -1536
  168. package/src/llama.cpp/ggml/src/ggml-sycl/element_wise.hpp +0 -75
  169. package/src/llama.cpp/ggml/src/ggml-sycl/gemm.hpp +0 -99
  170. package/src/llama.cpp/ggml/src/ggml-sycl/getrows.cpp +0 -311
  171. package/src/llama.cpp/ggml/src/ggml-sycl/getrows.hpp +0 -20
  172. package/src/llama.cpp/ggml/src/ggml-sycl/ggml-sycl.cpp +0 -4443
  173. package/src/llama.cpp/ggml/src/ggml-sycl/gla.cpp +0 -105
  174. package/src/llama.cpp/ggml/src/ggml-sycl/gla.hpp +0 -8
  175. package/src/llama.cpp/ggml/src/ggml-sycl/im2col.cpp +0 -136
  176. package/src/llama.cpp/ggml/src/ggml-sycl/im2col.hpp +0 -21
  177. package/src/llama.cpp/ggml/src/ggml-sycl/mmq.cpp +0 -3030
  178. package/src/llama.cpp/ggml/src/ggml-sycl/mmq.hpp +0 -33
  179. package/src/llama.cpp/ggml/src/ggml-sycl/mmvq.cpp +0 -1108
  180. package/src/llama.cpp/ggml/src/ggml-sycl/mmvq.hpp +0 -27
  181. package/src/llama.cpp/ggml/src/ggml-sycl/norm.cpp +0 -474
  182. package/src/llama.cpp/ggml/src/ggml-sycl/norm.hpp +0 -26
  183. package/src/llama.cpp/ggml/src/ggml-sycl/outprod.cpp +0 -46
  184. package/src/llama.cpp/ggml/src/ggml-sycl/outprod.hpp +0 -10
  185. package/src/llama.cpp/ggml/src/ggml-sycl/presets.hpp +0 -74
  186. package/src/llama.cpp/ggml/src/ggml-sycl/quants.hpp +0 -83
  187. package/src/llama.cpp/ggml/src/ggml-sycl/rope.cpp +0 -362
  188. package/src/llama.cpp/ggml/src/ggml-sycl/rope.hpp +0 -20
  189. package/src/llama.cpp/ggml/src/ggml-sycl/softmax.cpp +0 -264
  190. package/src/llama.cpp/ggml/src/ggml-sycl/softmax.hpp +0 -20
  191. package/src/llama.cpp/ggml/src/ggml-sycl/sycl_hw.cpp +0 -13
  192. package/src/llama.cpp/ggml/src/ggml-sycl/sycl_hw.hpp +0 -23
  193. package/src/llama.cpp/ggml/src/ggml-sycl/tsembd.cpp +0 -73
  194. package/src/llama.cpp/ggml/src/ggml-sycl/tsembd.hpp +0 -20
  195. package/src/llama.cpp/ggml/src/ggml-sycl/vecdotq.hpp +0 -1215
  196. package/src/llama.cpp/ggml/src/ggml-sycl/wkv.cpp +0 -305
  197. package/src/llama.cpp/ggml/src/ggml-sycl/wkv.hpp +0 -10
  198. package/src/llama.cpp/ggml/src/ggml-threading.cpp +0 -12
  199. package/src/llama.cpp/ggml/src/ggml-threading.h +0 -14
  200. package/src/llama.cpp/ggml/src/ggml-vulkan/CMakeLists.txt +0 -196
  201. package/src/llama.cpp/ggml/src/ggml-vulkan/ggml-vulkan.cpp +0 -10699
  202. package/src/llama.cpp/ggml/src/ggml-vulkan/vulkan-shaders/CMakeLists.txt +0 -39
  203. package/src/llama.cpp/ggml/src/ggml-vulkan/vulkan-shaders/vulkan-shaders-gen.cpp +0 -751
  204. package/src/llama.cpp/ggml/src/ggml.c +0 -6550
  205. package/src/llama.cpp/ggml/src/gguf.cpp +0 -1330
  206. package/src/llama.cpp/models/.editorconfig +0 -1
  207. package/src/llama.cpp/models/ggml-vocab-aquila.gguf +0 -0
  208. package/src/llama.cpp/models/ggml-vocab-baichuan.gguf +0 -0
  209. package/src/llama.cpp/models/ggml-vocab-bert-bge.gguf +0 -0
  210. package/src/llama.cpp/models/ggml-vocab-bert-bge.gguf.inp +0 -112
  211. package/src/llama.cpp/models/ggml-vocab-bert-bge.gguf.out +0 -46
  212. package/src/llama.cpp/models/ggml-vocab-chameleon.gguf.inp +0 -112
  213. package/src/llama.cpp/models/ggml-vocab-chameleon.gguf.out +0 -46
  214. package/src/llama.cpp/models/ggml-vocab-command-r.gguf +0 -0
  215. package/src/llama.cpp/models/ggml-vocab-command-r.gguf.inp +0 -112
  216. package/src/llama.cpp/models/ggml-vocab-command-r.gguf.out +0 -46
  217. package/src/llama.cpp/models/ggml-vocab-deepseek-coder.gguf +0 -0
  218. package/src/llama.cpp/models/ggml-vocab-deepseek-coder.gguf.inp +0 -112
  219. package/src/llama.cpp/models/ggml-vocab-deepseek-coder.gguf.out +0 -46
  220. package/src/llama.cpp/models/ggml-vocab-deepseek-llm.gguf +0 -0
  221. package/src/llama.cpp/models/ggml-vocab-deepseek-llm.gguf.inp +0 -112
  222. package/src/llama.cpp/models/ggml-vocab-deepseek-llm.gguf.out +0 -46
  223. package/src/llama.cpp/models/ggml-vocab-deepseek-r1-qwen.gguf.inp +0 -112
  224. package/src/llama.cpp/models/ggml-vocab-deepseek-r1-qwen.gguf.out +0 -46
  225. package/src/llama.cpp/models/ggml-vocab-falcon.gguf +0 -0
  226. package/src/llama.cpp/models/ggml-vocab-falcon.gguf.inp +0 -112
  227. package/src/llama.cpp/models/ggml-vocab-falcon.gguf.out +0 -46
  228. package/src/llama.cpp/models/ggml-vocab-gpt-2.gguf +0 -0
  229. package/src/llama.cpp/models/ggml-vocab-gpt-2.gguf.inp +0 -112
  230. package/src/llama.cpp/models/ggml-vocab-gpt-2.gguf.out +0 -46
  231. package/src/llama.cpp/models/ggml-vocab-gpt-4o.gguf.inp +0 -112
  232. package/src/llama.cpp/models/ggml-vocab-gpt-4o.gguf.out +0 -46
  233. package/src/llama.cpp/models/ggml-vocab-gpt-neox.gguf +0 -0
  234. package/src/llama.cpp/models/ggml-vocab-llama-bpe.gguf +0 -0
  235. package/src/llama.cpp/models/ggml-vocab-llama-bpe.gguf.inp +0 -112
  236. package/src/llama.cpp/models/ggml-vocab-llama-bpe.gguf.out +0 -46
  237. package/src/llama.cpp/models/ggml-vocab-llama-spm.gguf +0 -0
  238. package/src/llama.cpp/models/ggml-vocab-llama-spm.gguf.inp +0 -112
  239. package/src/llama.cpp/models/ggml-vocab-llama-spm.gguf.out +0 -46
  240. package/src/llama.cpp/models/ggml-vocab-llama4.gguf.inp +0 -112
  241. package/src/llama.cpp/models/ggml-vocab-llama4.gguf.out +0 -46
  242. package/src/llama.cpp/models/ggml-vocab-mpt.gguf +0 -0
  243. package/src/llama.cpp/models/ggml-vocab-mpt.gguf.inp +0 -112
  244. package/src/llama.cpp/models/ggml-vocab-mpt.gguf.out +0 -46
  245. package/src/llama.cpp/models/ggml-vocab-phi-3.gguf +0 -0
  246. package/src/llama.cpp/models/ggml-vocab-phi-3.gguf.inp +0 -112
  247. package/src/llama.cpp/models/ggml-vocab-phi-3.gguf.out +0 -46
  248. package/src/llama.cpp/models/ggml-vocab-pixtral.gguf.inp +0 -112
  249. package/src/llama.cpp/models/ggml-vocab-pixtral.gguf.out +0 -46
  250. package/src/llama.cpp/models/ggml-vocab-qwen2.gguf +0 -0
  251. package/src/llama.cpp/models/ggml-vocab-qwen2.gguf.inp +0 -112
  252. package/src/llama.cpp/models/ggml-vocab-qwen2.gguf.out +0 -46
  253. package/src/llama.cpp/models/ggml-vocab-refact.gguf +0 -0
  254. package/src/llama.cpp/models/ggml-vocab-refact.gguf.inp +0 -112
  255. package/src/llama.cpp/models/ggml-vocab-refact.gguf.out +0 -46
  256. package/src/llama.cpp/models/ggml-vocab-roberta-bpe.gguf.inp +0 -112
  257. package/src/llama.cpp/models/ggml-vocab-roberta-bpe.gguf.out +0 -46
  258. package/src/llama.cpp/models/ggml-vocab-starcoder.gguf +0 -0
  259. package/src/llama.cpp/models/ggml-vocab-starcoder.gguf.inp +0 -112
  260. package/src/llama.cpp/models/ggml-vocab-starcoder.gguf.out +0 -46
  261. package/src/llama.cpp/pocs/CMakeLists.txt +0 -14
  262. package/src/llama.cpp/pocs/vdot/CMakeLists.txt +0 -9
  263. package/src/llama.cpp/pocs/vdot/q8dot.cpp +0 -173
  264. package/src/llama.cpp/pocs/vdot/vdot.cpp +0 -311
  265. package/src/llama.cpp/prompts/LLM-questions.txt +0 -49
  266. package/src/llama.cpp/prompts/alpaca.txt +0 -1
  267. package/src/llama.cpp/prompts/assistant.txt +0 -31
  268. package/src/llama.cpp/prompts/chat-with-baichuan.txt +0 -4
  269. package/src/llama.cpp/prompts/chat-with-bob.txt +0 -7
  270. package/src/llama.cpp/prompts/chat-with-qwen.txt +0 -1
  271. package/src/llama.cpp/prompts/chat-with-vicuna-v0.txt +0 -7
  272. package/src/llama.cpp/prompts/chat-with-vicuna-v1.txt +0 -7
  273. package/src/llama.cpp/prompts/chat.txt +0 -28
  274. package/src/llama.cpp/prompts/dan-modified.txt +0 -1
  275. package/src/llama.cpp/prompts/dan.txt +0 -1
  276. package/src/llama.cpp/prompts/mnemonics.txt +0 -93
  277. package/src/llama.cpp/prompts/parallel-questions.txt +0 -43
  278. package/src/llama.cpp/prompts/reason-act.txt +0 -18
  279. package/src/llama.cpp/requirements/requirements-all.txt +0 -15
  280. package/src/llama.cpp/requirements/requirements-compare-llama-bench.txt +0 -2
  281. package/src/llama.cpp/requirements/requirements-convert_hf_to_gguf.txt +0 -7
  282. package/src/llama.cpp/requirements/requirements-convert_hf_to_gguf_update.txt +0 -7
  283. package/src/llama.cpp/requirements/requirements-convert_legacy_llama.txt +0 -5
  284. package/src/llama.cpp/requirements/requirements-convert_llama_ggml_to_gguf.txt +0 -1
  285. package/src/llama.cpp/requirements/requirements-convert_lora_to_gguf.txt +0 -4
  286. package/src/llama.cpp/requirements/requirements-gguf_editor_gui.txt +0 -3
  287. package/src/llama.cpp/requirements/requirements-pydantic.txt +0 -3
  288. package/src/llama.cpp/requirements/requirements-test-tokenizer-random.txt +0 -1
  289. package/src/llama.cpp/requirements/requirements-tool_bench.txt +0 -12
  290. package/src/llama.cpp/requirements.txt +0 -13
  291. package/src/llama.cpp/scripts/build-info.sh +0 -30
  292. package/src/llama.cpp/scripts/install-oneapi.bat +0 -19
  293. package/src/llama.cpp/scripts/xxd.cmake +0 -16
  294. package/src/llama.cpp/tests/CMakeLists.txt +0 -177
  295. package/src/llama.cpp/tests/get-model.cpp +0 -21
  296. package/src/llama.cpp/tests/get-model.h +0 -2
  297. package/src/llama.cpp/tests/test-arg-parser.cpp +0 -178
  298. package/src/llama.cpp/tests/test-autorelease.cpp +0 -24
  299. package/src/llama.cpp/tests/test-backend-ops.cpp +0 -4793
  300. package/src/llama.cpp/tests/test-barrier.cpp +0 -94
  301. package/src/llama.cpp/tests/test-c.c +0 -7
  302. package/src/llama.cpp/tests/test-chat-template.cpp +0 -417
  303. package/src/llama.cpp/tests/test-chat.cpp +0 -985
  304. package/src/llama.cpp/tests/test-double-float.cpp +0 -57
  305. package/src/llama.cpp/tests/test-gbnf-validator.cpp +0 -109
  306. package/src/llama.cpp/tests/test-gguf.cpp +0 -1338
  307. package/src/llama.cpp/tests/test-grammar-integration.cpp +0 -1308
  308. package/src/llama.cpp/tests/test-grammar-llguidance.cpp +0 -1201
  309. package/src/llama.cpp/tests/test-grammar-parser.cpp +0 -519
  310. package/src/llama.cpp/tests/test-json-schema-to-grammar.cpp +0 -1304
  311. package/src/llama.cpp/tests/test-llama-grammar.cpp +0 -408
  312. package/src/llama.cpp/tests/test-log.cpp +0 -39
  313. package/src/llama.cpp/tests/test-model-load-cancel.cpp +0 -27
  314. package/src/llama.cpp/tests/test-mtmd-c-api.c +0 -63
  315. package/src/llama.cpp/tests/test-opt.cpp +0 -904
  316. package/src/llama.cpp/tests/test-quantize-fns.cpp +0 -186
  317. package/src/llama.cpp/tests/test-quantize-perf.cpp +0 -365
  318. package/src/llama.cpp/tests/test-quantize-stats.cpp +0 -424
  319. package/src/llama.cpp/tests/test-regex-partial.cpp +0 -288
  320. package/src/llama.cpp/tests/test-rope.cpp +0 -262
  321. package/src/llama.cpp/tests/test-sampling.cpp +0 -399
  322. package/src/llama.cpp/tests/test-tokenizer-0.cpp +0 -312
  323. package/src/llama.cpp/tests/test-tokenizer-1-bpe.cpp +0 -155
  324. package/src/llama.cpp/tests/test-tokenizer-1-spm.cpp +0 -125
  325. package/src/llama.cpp/tools/CMakeLists.txt +0 -39
  326. package/src/llama.cpp/tools/batched-bench/CMakeLists.txt +0 -5
  327. package/src/llama.cpp/tools/batched-bench/batched-bench.cpp +0 -204
  328. package/src/llama.cpp/tools/cvector-generator/CMakeLists.txt +0 -5
  329. package/src/llama.cpp/tools/cvector-generator/completions.txt +0 -582
  330. package/src/llama.cpp/tools/cvector-generator/cvector-generator.cpp +0 -508
  331. package/src/llama.cpp/tools/cvector-generator/mean.hpp +0 -48
  332. package/src/llama.cpp/tools/cvector-generator/negative.txt +0 -4
  333. package/src/llama.cpp/tools/cvector-generator/pca.hpp +0 -315
  334. package/src/llama.cpp/tools/cvector-generator/positive.txt +0 -4
  335. package/src/llama.cpp/tools/export-lora/CMakeLists.txt +0 -5
  336. package/src/llama.cpp/tools/export-lora/export-lora.cpp +0 -434
  337. package/src/llama.cpp/tools/gguf-split/CMakeLists.txt +0 -5
  338. package/src/llama.cpp/tools/gguf-split/gguf-split.cpp +0 -583
  339. package/src/llama.cpp/tools/imatrix/CMakeLists.txt +0 -5
  340. package/src/llama.cpp/tools/imatrix/imatrix.cpp +0 -667
  341. package/src/llama.cpp/tools/llama-bench/CMakeLists.txt +0 -5
  342. package/src/llama.cpp/tools/llama-bench/llama-bench.cpp +0 -2024
  343. package/src/llama.cpp/tools/main/CMakeLists.txt +0 -5
  344. package/src/llama.cpp/tools/main/main.cpp +0 -977
  345. package/src/llama.cpp/tools/mtmd/CMakeLists.txt +0 -58
  346. package/src/llama.cpp/tools/mtmd/clip-impl.h +0 -462
  347. package/src/llama.cpp/tools/mtmd/clip.cpp +0 -4024
  348. package/src/llama.cpp/tools/mtmd/clip.h +0 -101
  349. package/src/llama.cpp/tools/mtmd/deprecation-warning.cpp +0 -22
  350. package/src/llama.cpp/tools/mtmd/miniaudio.h +0 -93468
  351. package/src/llama.cpp/tools/mtmd/mtmd-audio.cpp +0 -855
  352. package/src/llama.cpp/tools/mtmd/mtmd-audio.h +0 -62
  353. package/src/llama.cpp/tools/mtmd/mtmd-cli.cpp +0 -377
  354. package/src/llama.cpp/tools/mtmd/mtmd-helper.cpp +0 -297
  355. package/src/llama.cpp/tools/mtmd/mtmd.cpp +0 -942
  356. package/src/llama.cpp/tools/mtmd/mtmd.h +0 -362
  357. package/src/llama.cpp/tools/mtmd/requirements.txt +0 -5
  358. package/src/llama.cpp/tools/perplexity/CMakeLists.txt +0 -5
  359. package/src/llama.cpp/tools/perplexity/perplexity.cpp +0 -2063
  360. package/src/llama.cpp/tools/quantize/CMakeLists.txt +0 -6
  361. package/src/llama.cpp/tools/quantize/quantize.cpp +0 -519
  362. package/src/llama.cpp/tools/rpc/CMakeLists.txt +0 -4
  363. package/src/llama.cpp/tools/rpc/rpc-server.cpp +0 -322
  364. package/src/llama.cpp/tools/run/CMakeLists.txt +0 -16
  365. package/src/llama.cpp/tools/run/linenoise.cpp/linenoise.cpp +0 -1995
  366. package/src/llama.cpp/tools/run/linenoise.cpp/linenoise.h +0 -137
  367. package/src/llama.cpp/tools/run/run.cpp +0 -1261
  368. package/src/llama.cpp/tools/server/CMakeLists.txt +0 -51
  369. package/src/llama.cpp/tools/server/bench/requirements.txt +0 -2
  370. package/src/llama.cpp/tools/server/httplib.h +0 -10506
  371. package/src/llama.cpp/tools/server/server.cpp +0 -4966
  372. package/src/llama.cpp/tools/server/tests/requirements.txt +0 -8
  373. package/src/llama.cpp/tools/server/utils.hpp +0 -1337
  374. package/src/llama.cpp/tools/tokenize/CMakeLists.txt +0 -5
  375. package/src/llama.cpp/tools/tokenize/tokenize.cpp +0 -416
  376. package/src/llama.cpp/tools/tts/CMakeLists.txt +0 -5
  377. package/src/llama.cpp/tools/tts/tts.cpp +0 -1092
package/CMakeLists.txt CHANGED
@@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.15)
2
2
  cmake_policy(SET CMP0091 NEW)
3
3
  cmake_policy(SET CMP0042 NEW)
4
4
 
5
- project (llama-node)
5
+ project (index)
6
6
 
7
7
  set(CMAKE_CXX_STANDARD 17)
8
8
 
@@ -34,13 +34,19 @@ string(REPLACE "arm" "arm" ARCH ${ARCH})
34
34
  string(REPLACE "arm64x" "arm64" ARCH ${ARCH})
35
35
  string(REPLACE "aarch64" "arm64" ARCH ${ARCH})
36
36
 
37
+ option(TO_PACKAGE "Build as package" OFF)
38
+
37
39
  if(DEFINED VARIANT)
38
40
  set(VARIANT -${VARIANT})
39
41
  else()
40
42
  set(VARIANT "")
41
43
  endif()
42
44
 
43
- set(PLATFORM_BINARY_DIR ${CMAKE_SOURCE_DIR}/bin/${PLATFORM}${VARIANT}/${ARCH})
45
+ if (TO_PACKAGE)
46
+ set(PLATFORM_BINARY_DIR ${CMAKE_SOURCE_DIR}/packages/node-llama-${PLATFORM}-${ARCH}${VARIANT})
47
+ else()
48
+ set(PLATFORM_BINARY_DIR ${CMAKE_SOURCE_DIR}/build/Release)
49
+ endif()
44
50
 
45
51
  message(STATUS "Build type: ${CMAKE_BUILD_TYPE}")
46
52
  message(STATUS "Platform: ${PLATFORM}")
@@ -57,8 +63,14 @@ endif()
57
63
  # Improve speed
58
64
  if(CMAKE_BUILD_TYPE STREQUAL "Release")
59
65
  if (MSVC)
60
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /O2 /Ob2 /Oi /Ot /Oy /GL")
61
- set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /O2 /Ob2 /Oi /Ot /Oy /GL")
66
+ if (NOT GGML_VULKAN)
67
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /O2 /Ob2 /Oi /Ot /Oy /GL")
68
+ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /O2 /Ob2 /Oi /Ot /Oy /GL")
69
+ set(CMAKE_LINKER_FLAGS "${CMAKE_LINKER_FLAGS} /LTCG")
70
+ else()
71
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /O1 /Ob1 /bigobj")
72
+ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /O1 /Ob1 /bigobj")
73
+ endif()
62
74
  else()
63
75
  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3 -funroll-loops -flto")
64
76
  set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -O3 -funroll-loops -flto")
@@ -74,6 +86,10 @@ endif()
74
86
  # flags: -fPIC
75
87
  set(CMAKE_POSITION_INDEPENDENT_CODE ON)
76
88
 
89
+ if (MINGW)
90
+ add_definitions(-D_WIN32_WINNT=0x0601)
91
+ endif()
92
+
77
93
  # VULKAN_SDK
78
94
  if (VULKAN_SDK)
79
95
  set(ENV{VULKAN_SDK} ${VULKAN_SDK})
@@ -119,15 +135,15 @@ file(
119
135
  "src/DecodeAudioTokenWorker.h"
120
136
  "src/tts_utils.cpp"
121
137
  "src/tts_utils.h"
138
+ "src/anyascii.h"
139
+ "src/anyascii.c"
122
140
  )
123
141
 
124
- if (CLANG AND CMAKE_SYSTEM_NAME STREQUAL "Windows")
142
+ if (NOT MSVC AND CMAKE_SYSTEM_NAME STREQUAL "Windows")
125
143
  file(GLOB WIN_DYNAMIC_LOAD_SRC "src/win_dynamic_load.c")
126
144
 
127
145
  add_library(win_dynamic_load ${WIN_DYNAMIC_LOAD_SRC})
128
- if (NOT MSVC)
129
- set_target_properties(win_dynamic_load PROPERTIES COMPILE_FLAGS "-Wno-implicit-function-declaration")
130
- endif()
146
+ set_target_properties(win_dynamic_load PROPERTIES COMPILE_FLAGS "-Wno-implicit-function-declaration")
131
147
 
132
148
  unset(CMAKE_JS_SRC)
133
149
  unset(CMAKE_JS_LIB)
@@ -145,40 +161,34 @@ target_link_libraries(${PROJECT_NAME} ${CMAKE_JS_LIB} llama ggml common mtmd ${C
145
161
 
146
162
  add_custom_target(copy_assets ALL DEPENDS ${PROJECT_NAME})
147
163
 
148
- add_custom_command(
149
- TARGET copy_assets
150
- COMMAND ${CMAKE_COMMAND} -E remove_directory ${PLATFORM_BINARY_DIR}
151
- COMMENT "Cleaning bin folder"
152
- )
153
-
154
- if(MSVC AND CMAKE_JS_NODELIB_DEF AND CMAKE_JS_NODELIB_TARGET)
155
- # Generate node.lib
156
- execute_process(COMMAND ${CMAKE_AR} /def:${CMAKE_JS_NODELIB_DEF} /out:${CMAKE_JS_NODELIB_TARGET} ${CMAKE_STATIC_LINKER_FLAGS})
157
- # copy target to bin folder
158
- get_filename_component(CMAKE_JS_NODELIB_TARGET_NAME ${CMAKE_JS_NODELIB_TARGET} NAME)
164
+ if (TO_PACKAGE)
159
165
  add_custom_command(TARGET copy_assets
160
- COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_JS_NODELIB_TARGET} ${PLATFORM_BINARY_DIR}/${CMAKE_JS_NODELIB_TARGET_NAME}
161
- COMMENT "Copying to bin folder"
166
+ COMMAND ${CMAKE_COMMAND} -E copy $<TARGET_FILE:${PROJECT_NAME}> ${PLATFORM_BINARY_DIR}/$<TARGET_FILE_NAME:${PROJECT_NAME}>
167
+ COMMENT "Deploy as package"
162
168
  )
163
169
  endif()
164
170
 
165
- # copy target to bin folder
166
- add_custom_command(TARGET copy_assets
167
- COMMAND ${CMAKE_COMMAND} -E copy $<TARGET_FILE:${PROJECT_NAME}> ${PLATFORM_BINARY_DIR}/$<TARGET_FILE_NAME:${PROJECT_NAME}>
168
- COMMENT "Copying to bin folder"
169
- )
171
+ if (TO_PACKAGE)
172
+ set(METAL_LIB_TARGET_PATH ${PLATFORM_BINARY_DIR})
173
+ else()
174
+ set(METAL_LIB_TARGET_PATH ${CMAKE_BINARY_DIR}/bin/default.metallib)
175
+ endif()
176
+
177
+ if(CMAKE_JS_NODELIB_DEF AND CMAKE_JS_NODELIB_TARGET)
178
+ execute_process(COMMAND ${CMAKE_AR} /def:${CMAKE_JS_NODELIB_DEF} /out:${CMAKE_JS_NODELIB_TARGET} ${CMAKE_STATIC_LINKER_FLAGS})
179
+ endif()
170
180
 
171
- if (LLAMA_METAL)
172
- # copy ${CMAKE_BINARY_DIR}/bin/default.metallib
181
+ if (GGML_METAL AND NOT GGML_METAL_EMBED_LIBRARY)
182
+ # copy ${CMAKE_BINARY_DIR}/bin/default.metallib
173
183
  add_custom_command(
174
184
  TARGET copy_assets
175
- COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_BINARY_DIR}/bin/default.metallib ${PLATFORM_BINARY_DIR}/default.metallib
185
+ COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_BINARY_DIR}/bin/default.metallib ${METAL_LIB_TARGET_PATH}
176
186
  COMMENT "Copying default.metallib to bin folder"
177
187
  )
178
188
  add_dependencies(copy_assets ggml-metal)
179
189
  endif()
180
190
 
181
- if (LLAMA_CLBLAST)
191
+ if (GGML_CLBLAST AND TO_PACKAGE)
182
192
  find_package(CLBlast)
183
193
  if (CLBlast_FOUND)
184
194
  message(STATUS "CLBlast found: ${CLBlast_DIR}")
package/README.md CHANGED
@@ -17,6 +17,7 @@ An another Node binding of [llama.cpp](https://github.com/ggerganov/llama.cpp) t
17
17
  - Windows (x86_64 and arm64)
18
18
  - CPU
19
19
  - GPU acceleration via Vulkan
20
+ - GPU acceleration via CUDA (x86_64)
20
21
  - Linux (x86_64 and arm64)
21
22
  - CPU
22
23
  - GPU acceleration via Vulkan
@@ -63,7 +64,9 @@ console.log('Result:', text)
63
64
 
64
65
  - [x] `default`: General usage, not support GPU except macOS (Metal)
65
66
  - [x] `vulkan`: Support GPU Vulkan (Windows/Linux), but some scenario might unstable
66
- - [x] `cuda`: Support GPU CUDA (Linux), but only for limited capability (x86_64: 8.9, arm64: 8.7)
67
+ - [x] `cuda`: Support GPU CUDA (Windows/Linux), but only for limited capability
68
+ > Linux: (x86_64: 8.9, arm64: 8.7)
69
+ > Windows: x86_64 - 12.0
67
70
 
68
71
  ## License
69
72
 
package/lib/binding.js CHANGED
@@ -15,13 +15,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
15
15
  }) : function(o, v) {
16
16
  o["default"] = v;
17
17
  });
18
- var __importStar = (this && this.__importStar) || function (mod) {
19
- if (mod && mod.__esModule) return mod;
20
- var result = {};
21
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
22
- __setModuleDefault(result, mod);
23
- return result;
24
- };
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
25
35
  var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
26
36
  function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
27
37
  return new (P || (P = Promise))(function (resolve, reject) {
@@ -33,30 +43,32 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
33
43
  };
34
44
  Object.defineProperty(exports, "__esModule", { value: true });
35
45
  exports.loadModule = void 0;
36
- const path = __importStar(require("path"));
37
- const setupEnv = (variant) => {
38
- var _a, _b;
39
- const postfix = variant ? `-${variant}` : '';
40
- const binPath = path.resolve(__dirname, `../bin/${process.platform}${postfix}/${process.arch}/`);
41
- const systemPathEnv = (_b = (_a = process.env.PATH) !== null && _a !== void 0 ? _a : process.env.Path) !== null && _b !== void 0 ? _b : '';
42
- if (!systemPathEnv.includes(binPath)) {
43
- if (process.platform === 'win32') {
44
- process.env.Path = `${binPath};${systemPathEnv}`;
45
- }
46
- else {
47
- process.env.PATH = `${binPath}:${systemPathEnv}`;
48
- }
49
- }
46
+ const getPlatformPackageName = (variant) => {
47
+ const platform = process.platform;
48
+ const arch = process.arch;
49
+ const variantSuffix = variant && variant !== 'default' ? `-${variant}` : '';
50
+ return `@fugood/node-llama-${platform}-${arch}${variantSuffix}`;
50
51
  };
51
- const loadModule = (variant) => __awaiter(void 0, void 0, void 0, function* () {
52
+ const loadPlatformPackage = (packageName) => __awaiter(void 0, void 0, void 0, function* () {
52
53
  try {
53
- if (variant && variant !== 'default') {
54
- setupEnv(variant);
55
- return (yield Promise.resolve(`${`../bin/${process.platform}-${variant}/${process.arch}/llama-node.node`}`).then(s => __importStar(require(s))));
56
- }
54
+ return yield Promise.resolve(`${packageName}`).then(s => __importStar(require(s)));
55
+ }
56
+ catch (error) {
57
+ return null;
58
+ }
59
+ });
60
+ const loadModule = (variant) => __awaiter(void 0, void 0, void 0, function* () {
61
+ let module = yield loadPlatformPackage(getPlatformPackageName(variant));
62
+ if (module) {
63
+ return module;
64
+ }
65
+ module = yield loadPlatformPackage(getPlatformPackageName());
66
+ if (module) {
67
+ console.warn(`Not found package for variant "${variant}", fallback to default`);
68
+ return module;
57
69
  }
58
- catch (_a) { } // ignore errors and try the common path
59
- setupEnv();
60
- return (yield Promise.resolve(`${`../bin/${process.platform}/${process.arch}/llama-node.node`}`).then(s => __importStar(require(s))));
70
+ console.warn(`Not found package for your platform, fallback to local build`);
71
+ // @ts-ignore
72
+ return (yield Promise.resolve().then(() => __importStar(require('../build/Release/index.node'))));
61
73
  });
62
74
  exports.loadModule = loadModule;
package/lib/binding.ts CHANGED
@@ -268,33 +268,34 @@ export interface Module {
268
268
 
269
269
  export type LibVariant = 'default' | 'vulkan' | 'cuda'
270
270
 
271
- const setupEnv = (variant?: string) => {
272
- const postfix = variant ? `-${variant}` : ''
273
- const binPath = path.resolve(
274
- __dirname,
275
- `../bin/${process.platform}${postfix}/${process.arch}/`,
276
- )
277
- const systemPathEnv = process.env.PATH ?? process.env.Path ?? ''
278
- if (!systemPathEnv.includes(binPath)) {
279
- if (process.platform === 'win32') {
280
- process.env.Path = `${binPath};${systemPathEnv}`
281
- } else {
282
- process.env.PATH = `${binPath}:${systemPathEnv}`
283
- }
271
+ const getPlatformPackageName = (variant?: LibVariant): string => {
272
+ const platform = process.platform
273
+ const arch = process.arch
274
+ const variantSuffix = variant && variant !== 'default' ? `-${variant}` : ''
275
+ return `@fugood/node-llama-${platform}-${arch}${variantSuffix}`
276
+ }
277
+
278
+ const loadPlatformPackage = async (packageName: string): Promise<Module | null> => {
279
+ try {
280
+ return await import(packageName) as Module
281
+ } catch (error) {
282
+ return null
284
283
  }
285
284
  }
286
285
 
287
286
  export const loadModule = async (variant?: LibVariant): Promise<Module> => {
288
- try {
289
- if (variant && variant !== 'default') {
290
- setupEnv(variant)
291
- return (await import(
292
- `../bin/${process.platform}-${variant}/${process.arch}/llama-node.node`
293
- )) as Module
294
- }
295
- } catch {} // ignore errors and try the common path
296
- setupEnv()
297
- return (await import(
298
- `../bin/${process.platform}/${process.arch}/llama-node.node`
299
- )) as Module
287
+ let module = await loadPlatformPackage(getPlatformPackageName(variant))
288
+ if (module) {
289
+ return module
290
+ }
291
+
292
+ module = await loadPlatformPackage(getPlatformPackageName())
293
+ if (module) {
294
+ console.warn(`Not found package for variant "${variant}", fallback to default`)
295
+ return module
296
+ }
297
+
298
+ console.warn(`Not found package for your platform, fallback to local build`)
299
+ // @ts-ignore
300
+ return (await import('../build/Release/index.node')) as Module
300
301
  }
package/package.json CHANGED
@@ -1,20 +1,23 @@
1
1
  {
2
2
  "name": "@fugood/llama.node",
3
3
  "access": "public",
4
- "version": "0.6.3",
4
+ "version": "1.0.0-beta.2",
5
5
  "description": "An another Node binding of llama.cpp",
6
6
  "main": "lib/index.js",
7
7
  "scripts": {
8
+ "postinstall": "node scripts/build.js",
8
9
  "pretest": "node scripts/download-test-models.js",
9
10
  "test": "jest",
10
11
  "build": "tsc",
11
- "prepack": "yarn build",
12
+ "prepack": "npm run build",
12
13
  "prebuild-native": "node scripts/generate_win_dynamic_load.js 6",
13
14
  "build-native": "cmake-js compile",
14
15
  "clean": "rimraf build",
15
16
  "prepare": "husky",
16
17
  "commitlint": "commitlint --edit",
17
- "release": "release-it"
18
+ "release": "release-it",
19
+ "update-packages": "node scripts/update-packages.js",
20
+ "publish-if-need": "node scripts/publish-if-need.js"
18
21
  },
19
22
  "repository": {
20
23
  "type": "git",
@@ -35,7 +38,8 @@
35
38
  },
36
39
  "homepage": "https://github.com/mybigday/llama.node#readme",
37
40
  "publishConfig": {
38
- "registry": "https://registry.npmjs.org"
41
+ "registry": "https://registry.npmjs.org",
42
+ "access": "public"
39
43
  },
40
44
  "binary": {
41
45
  "napi_versions": [
@@ -43,12 +47,41 @@
43
47
  ]
44
48
  },
45
49
  "files": [
46
- "bin/**/*",
47
- "src/**/*.{c,cc,cpp,h,hh,hpp,txt,cmake}",
50
+ "scripts/build.js",
51
+ "scripts/llama.cpp.patch",
52
+ "src/*.{cc,c,h,hpp}",
53
+ "src/DecodeAudioTokenWorker.cpp",
54
+ "src/DetokenizeWorker.cpp",
55
+ "src/DisposeWorker.cpp",
56
+ "src/EmbeddingWorker.cpp",
57
+ "src/LlamaCompletionWorker.cpp",
58
+ "src/LlamaContext.cpp",
59
+ "src/LoadSessionWorker.cpp",
60
+ "src/SaveSessionWorker.cpp",
61
+ "src/TokenizeWorker.cpp",
62
+ "src/tts_utils.cpp",
63
+ "src/llama.cpp/{common,src,include}/**/*.{h,hpp,cpp,cc,c}",
64
+ "src/llama.cpp/ggml/include/*.h",
65
+ "src/llama.cpp/ggml/src/ggml-cpu/**/*.{h,hpp,cpp,cc,c}",
48
66
  "lib/*.js",
49
67
  "lib/*.ts",
50
68
  "CMakeLists.txt"
51
69
  ],
70
+ "optionalDependencies": {
71
+ "@fugood/node-llama-linux-x64": "1.0.0-beta.2",
72
+ "@fugood/node-llama-linux-x64-vulkan": "1.0.0-beta.2",
73
+ "@fugood/node-llama-linux-x64-cuda": "1.0.0-beta.2",
74
+ "@fugood/node-llama-linux-arm64": "1.0.0-beta.2",
75
+ "@fugood/node-llama-linux-arm64-vulkan": "1.0.0-beta.2",
76
+ "@fugood/node-llama-linux-arm64-cuda": "1.0.0-beta.2",
77
+ "@fugood/node-llama-win32-x64": "1.0.0-beta.2",
78
+ "@fugood/node-llama-win32-x64-vulkan": "1.0.0-beta.2",
79
+ "@fugood/node-llama-win32-x64-cuda": "1.0.0-beta.2",
80
+ "@fugood/node-llama-win32-arm64": "1.0.0-beta.2",
81
+ "@fugood/node-llama-win32-arm64-vulkan": "1.0.0-beta.2",
82
+ "@fugood/node-llama-darwin-x64": "1.0.0-beta.2",
83
+ "@fugood/node-llama-darwin-arm64": "1.0.0-beta.2"
84
+ },
52
85
  "devDependencies": {
53
86
  "@babel/preset-env": "^7.24.4",
54
87
  "@babel/preset-typescript": "^7.24.1",
@@ -88,4 +121,4 @@
88
121
  "singleQuote": true,
89
122
  "printWidth": 80
90
123
  }
91
- }
124
+ }
@@ -0,0 +1,47 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+
4
+ const validAccelerators = process.platform === 'darwin' ? [] : ['vulkan', 'cuda'];
5
+
6
+ let isBuildFromSource = process.env.npm_config_build_from_source === 'true';
7
+
8
+ let accelerator = process.env.npm_config_accelerator || '';
9
+
10
+ const checkPaths = [
11
+ path.resolve(
12
+ __dirname,
13
+ `../node-llama-${process.platform}-${process.arch}${accelerator ? `-${accelerator}` : ''}`
14
+ ),
15
+ path.resolve(__dirname, `../build/Release/index.node`),
16
+ ];
17
+
18
+ if (!isBuildFromSource && !checkPaths.some(path => fs.existsSync(path))) {
19
+ console.warn('Not found prebuild package, please build from source');
20
+ isBuildFromSource = true;
21
+ }
22
+
23
+ if (accelerator && !validAccelerators.includes(accelerator)) {
24
+ throw new Error(`Invalid accelerator: ${accelerator}`);
25
+ }
26
+
27
+ if (isBuildFromSource) {
28
+ console.log('Build from source is enabled');
29
+ }
30
+
31
+ let BuildSystem;
32
+ try {
33
+ ({ BuildSystem } = require('cmake-js'));
34
+ } catch (error) {
35
+ console.error('cmake-js is not installed, please install it');
36
+ process.exit(1);
37
+ }
38
+
39
+ const buildSystem = new BuildSystem({
40
+ directory: path.resolve(__dirname, '../'),
41
+ arch: process.arch,
42
+ preferClang: true,
43
+ out: path.resolve(__dirname, '../build'),
44
+ extraCMakeArgs: [accelerator && `--CDVARIANT=${accelerator}`].filter(Boolean),
45
+ });
46
+
47
+ buildSystem.build();
@@ -0,0 +1,109 @@
1
+ diff --git a/src/llama.cpp/common/chat.cpp b/src/llama.cpp/common/chat.cpp
2
+ index f138c7bc..e177fe92 100644
3
+ --- a/src/llama.cpp/common/chat.cpp
4
+ +++ b/src/llama.cpp/common/chat.cpp
5
+ @@ -1,8 +1,6 @@
6
+ #include "chat.h"
7
+ #include "json-schema-to-grammar.h"
8
+ #include "log.h"
9
+ -#include "minja/chat-template.hpp"
10
+ -#include "minja/minja.hpp"
11
+
12
+ #include <optional>
13
+
14
+ @@ -15,14 +13,6 @@ static std::string format_time(const std::chrono::system_clock::time_point & now
15
+ return res;
16
+ }
17
+
18
+ -typedef minja::chat_template common_chat_template;
19
+ -
20
+ -struct common_chat_templates {
21
+ - bool has_explicit_template; // Model had builtin template or template overridde was specified.
22
+ - std::unique_ptr<common_chat_template> template_default; // always set (defaults to chatml)
23
+ - std::unique_ptr<common_chat_template> template_tool_use;
24
+ -};
25
+ -
26
+ struct templates_params {
27
+ json messages;
28
+ json tools;
29
+ diff --git a/src/llama.cpp/common/chat.h b/src/llama.cpp/common/chat.h
30
+ index d26a09c2..cb92721a 100644
31
+ --- a/src/llama.cpp/common/chat.h
32
+ +++ b/src/llama.cpp/common/chat.h
33
+ @@ -6,8 +6,16 @@
34
+ #include <chrono>
35
+ #include <string>
36
+ #include <vector>
37
+ +#include "minja/chat-template.hpp"
38
+ +#include "minja/minja.hpp"
39
+
40
+ -struct common_chat_templates;
41
+ +typedef minja::chat_template common_chat_template;
42
+ +
43
+ +struct common_chat_templates {
44
+ + bool has_explicit_template; // Model had builtin template or template overridde was specified.
45
+ + std::unique_ptr<common_chat_template> template_default; // always set (defaults to chatml)
46
+ + std::unique_ptr<common_chat_template> template_tool_use;
47
+ +};
48
+
49
+ struct common_chat_tool_call {
50
+ std::string name;
51
+ diff --git a/src/llama.cpp/common/common.cpp b/src/llama.cpp/common/common.cpp
52
+ index 94f545f8..a55df8aa 100644
53
+ --- a/src/llama.cpp/common/common.cpp
54
+ +++ b/src/llama.cpp/common/common.cpp
55
+ @@ -1062,6 +1062,7 @@ struct llama_model_params common_model_params_to_llama(common_params & params) {
56
+ mparams.n_gpu_layers = params.n_gpu_layers;
57
+ }
58
+
59
+ + mparams.vocab_only = params.vocab_only;
60
+ mparams.main_gpu = params.main_gpu;
61
+ mparams.split_mode = params.split_mode;
62
+ mparams.tensor_split = params.tensor_split;
63
+ diff --git a/src/llama.cpp/common/common.h b/src/llama.cpp/common/common.h
64
+ index 0a9dc059..996afcd8 100644
65
+ --- a/src/llama.cpp/common/common.h
66
+ +++ b/src/llama.cpp/common/common.h
67
+ @@ -217,6 +217,7 @@ enum common_reasoning_format {
68
+ };
69
+
70
+ struct common_params {
71
+ + bool vocab_only = false;
72
+ int32_t n_predict = -1; // new tokens to predict
73
+ int32_t n_ctx = 4096; // context size
74
+ int32_t n_batch = 2048; // logical batch size for prompt processing (must be >=32 to use BLAS)
75
+ diff --git a/src/llama.cpp/ggml/src/ggml-cpu/CMakeLists.txt b/src/llama.cpp/ggml/src/ggml-cpu/CMakeLists.txt
76
+ index 9a3085be..8218cc16 100644
77
+ --- a/src/llama.cpp/ggml/src/ggml-cpu/CMakeLists.txt
78
+ +++ b/src/llama.cpp/ggml/src/ggml-cpu/CMakeLists.txt
79
+ @@ -90,7 +90,7 @@ function(ggml_add_cpu_backend_variant_impl tag_name)
80
+ message(STATUS "ARM detected")
81
+
82
+ if (MSVC AND NOT CMAKE_C_COMPILER_ID STREQUAL "Clang")
83
+ - message(FATAL_ERROR "MSVC is not supported for ARM, use clang")
84
+ + list(APPEND ARCH_FLAGS /arch:armv8.7)
85
+ else()
86
+ check_cxx_compiler_flag(-mfp16-format=ieee GGML_COMPILER_SUPPORTS_FP16_FORMAT_I3E)
87
+ if (NOT "${GGML_COMPILER_SUPPORTS_FP16_FORMAT_I3E}" STREQUAL "")
88
+ diff --git a/src/llama.cpp/ggml/src/ggml-vulkan/CMakeLists.txt b/src/llama.cpp/ggml/src/ggml-vulkan/CMakeLists.txt
89
+ index 662f1377..f9f99698 100644
90
+ --- a/src/llama.cpp/ggml/src/ggml-vulkan/CMakeLists.txt
91
+ +++ b/src/llama.cpp/ggml/src/ggml-vulkan/CMakeLists.txt
92
+ @@ -122,7 +122,7 @@ if (Vulkan_FOUND)
93
+ endif()
94
+
95
+ # Set up toolchain for host compilation whether cross-compiling or not
96
+ - if (CMAKE_CROSSCOMPILING)
97
+ + if (CMAKE_CROSSCOMPILING OR NOT CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL CMAKE_SYSTEM_PROCESSOR)
98
+ if (GGML_VULKAN_SHADERS_GEN_TOOLCHAIN)
99
+ set(HOST_CMAKE_TOOLCHAIN_FILE ${GGML_VULKAN_SHADERS_GEN_TOOLCHAIN})
100
+ else()
101
+ @@ -144,7 +144,7 @@ if (Vulkan_FOUND)
102
+ include(ExternalProject)
103
+
104
+ # Add toolchain file if cross-compiling
105
+ - if (CMAKE_CROSSCOMPILING)
106
+ + if (CMAKE_CROSSCOMPILING OR NOT CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL CMAKE_SYSTEM_PROCESSOR)
107
+ list(APPEND VULKAN_SHADER_GEN_CMAKE_ARGS -DCMAKE_TOOLCHAIN_FILE=${HOST_CMAKE_TOOLCHAIN_FILE})
108
+ message(STATUS "vulkan-shaders-gen toolchain file: ${HOST_CMAKE_TOOLCHAIN_FILE}")
109
+ endif()