@fugood/llama.node 0.6.3 → 1.0.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (377) hide show
  1. package/CMakeLists.txt +40 -30
  2. package/README.md +4 -1
  3. package/lib/binding.js +41 -29
  4. package/lib/binding.ts +26 -25
  5. package/package.json +40 -7
  6. package/scripts/build.js +47 -0
  7. package/scripts/llama.cpp.patch +109 -0
  8. package/src/anyascii.c +22223 -0
  9. package/src/anyascii.h +42 -0
  10. package/src/tts_utils.cpp +20 -7
  11. package/src/tts_utils.h +2 -0
  12. package/bin/darwin/arm64/llama-node.node +0 -0
  13. package/bin/darwin/x64/llama-node.node +0 -0
  14. package/bin/linux/arm64/llama-node.node +0 -0
  15. package/bin/linux/x64/llama-node.node +0 -0
  16. package/bin/linux-cuda/arm64/llama-node.node +0 -0
  17. package/bin/linux-cuda/x64/llama-node.node +0 -0
  18. package/bin/linux-vulkan/arm64/llama-node.node +0 -0
  19. package/bin/linux-vulkan/x64/llama-node.node +0 -0
  20. package/bin/win32/x64/llama-node.node +0 -0
  21. package/bin/win32/x64/node.lib +0 -0
  22. package/bin/win32-vulkan/arm64/llama-node.node +0 -0
  23. package/bin/win32-vulkan/arm64/node.lib +0 -0
  24. package/bin/win32-vulkan/x64/llama-node.node +0 -0
  25. package/bin/win32-vulkan/x64/node.lib +0 -0
  26. package/src/llama.cpp/.github/workflows/build-linux-cross.yml +0 -233
  27. package/src/llama.cpp/.github/workflows/build.yml +0 -1078
  28. package/src/llama.cpp/.github/workflows/close-issue.yml +0 -28
  29. package/src/llama.cpp/.github/workflows/docker.yml +0 -178
  30. package/src/llama.cpp/.github/workflows/editorconfig.yml +0 -29
  31. package/src/llama.cpp/.github/workflows/gguf-publish.yml +0 -44
  32. package/src/llama.cpp/.github/workflows/labeler.yml +0 -17
  33. package/src/llama.cpp/.github/workflows/python-check-requirements.yml +0 -33
  34. package/src/llama.cpp/.github/workflows/python-lint.yml +0 -30
  35. package/src/llama.cpp/.github/workflows/python-type-check.yml +0 -40
  36. package/src/llama.cpp/.github/workflows/release.yml +0 -739
  37. package/src/llama.cpp/.github/workflows/server.yml +0 -237
  38. package/src/llama.cpp/.github/workflows/winget.yml +0 -42
  39. package/src/llama.cpp/cmake/arm64-apple-clang.cmake +0 -16
  40. package/src/llama.cpp/cmake/arm64-windows-llvm.cmake +0 -16
  41. package/src/llama.cpp/cmake/build-info.cmake +0 -64
  42. package/src/llama.cpp/cmake/common.cmake +0 -35
  43. package/src/llama.cpp/cmake/git-vars.cmake +0 -22
  44. package/src/llama.cpp/cmake/x64-windows-llvm.cmake +0 -5
  45. package/src/llama.cpp/common/build-info.cpp.in +0 -4
  46. package/src/llama.cpp/docs/build.md +0 -561
  47. package/src/llama.cpp/examples/CMakeLists.txt +0 -43
  48. package/src/llama.cpp/examples/batched/CMakeLists.txt +0 -5
  49. package/src/llama.cpp/examples/batched/batched.cpp +0 -246
  50. package/src/llama.cpp/examples/chat-13B.bat +0 -57
  51. package/src/llama.cpp/examples/convert-llama2c-to-ggml/CMakeLists.txt +0 -5
  52. package/src/llama.cpp/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp +0 -941
  53. package/src/llama.cpp/examples/deprecation-warning/deprecation-warning.cpp +0 -35
  54. package/src/llama.cpp/examples/embedding/CMakeLists.txt +0 -5
  55. package/src/llama.cpp/examples/embedding/embedding.cpp +0 -323
  56. package/src/llama.cpp/examples/eval-callback/CMakeLists.txt +0 -10
  57. package/src/llama.cpp/examples/eval-callback/eval-callback.cpp +0 -194
  58. package/src/llama.cpp/examples/gen-docs/CMakeLists.txt +0 -5
  59. package/src/llama.cpp/examples/gen-docs/gen-docs.cpp +0 -83
  60. package/src/llama.cpp/examples/gguf/CMakeLists.txt +0 -5
  61. package/src/llama.cpp/examples/gguf/gguf.cpp +0 -265
  62. package/src/llama.cpp/examples/gguf-hash/CMakeLists.txt +0 -22
  63. package/src/llama.cpp/examples/gguf-hash/deps/rotate-bits/rotate-bits.h +0 -46
  64. package/src/llama.cpp/examples/gguf-hash/deps/sha1/sha1.c +0 -295
  65. package/src/llama.cpp/examples/gguf-hash/deps/sha1/sha1.h +0 -52
  66. package/src/llama.cpp/examples/gguf-hash/deps/sha256/sha256.c +0 -221
  67. package/src/llama.cpp/examples/gguf-hash/deps/sha256/sha256.h +0 -24
  68. package/src/llama.cpp/examples/gguf-hash/deps/xxhash/xxhash.c +0 -42
  69. package/src/llama.cpp/examples/gguf-hash/deps/xxhash/xxhash.h +0 -7093
  70. package/src/llama.cpp/examples/gguf-hash/gguf-hash.cpp +0 -694
  71. package/src/llama.cpp/examples/gritlm/CMakeLists.txt +0 -5
  72. package/src/llama.cpp/examples/gritlm/gritlm.cpp +0 -229
  73. package/src/llama.cpp/examples/jeopardy/questions.txt +0 -100
  74. package/src/llama.cpp/examples/llama.android/app/build.gradle.kts +0 -65
  75. package/src/llama.cpp/examples/llama.android/build.gradle.kts +0 -6
  76. package/src/llama.cpp/examples/llama.android/llama/build.gradle.kts +0 -71
  77. package/src/llama.cpp/examples/llama.android/llama/src/main/cpp/CMakeLists.txt +0 -53
  78. package/src/llama.cpp/examples/llama.android/llama/src/main/cpp/llama-android.cpp +0 -452
  79. package/src/llama.cpp/examples/llama.android/settings.gradle.kts +0 -18
  80. package/src/llama.cpp/examples/lookahead/CMakeLists.txt +0 -5
  81. package/src/llama.cpp/examples/lookahead/lookahead.cpp +0 -472
  82. package/src/llama.cpp/examples/lookup/CMakeLists.txt +0 -23
  83. package/src/llama.cpp/examples/lookup/lookup-create.cpp +0 -40
  84. package/src/llama.cpp/examples/lookup/lookup-merge.cpp +0 -47
  85. package/src/llama.cpp/examples/lookup/lookup-stats.cpp +0 -157
  86. package/src/llama.cpp/examples/lookup/lookup.cpp +0 -242
  87. package/src/llama.cpp/examples/parallel/CMakeLists.txt +0 -5
  88. package/src/llama.cpp/examples/parallel/parallel.cpp +0 -492
  89. package/src/llama.cpp/examples/passkey/CMakeLists.txt +0 -5
  90. package/src/llama.cpp/examples/passkey/passkey.cpp +0 -277
  91. package/src/llama.cpp/examples/retrieval/CMakeLists.txt +0 -5
  92. package/src/llama.cpp/examples/retrieval/retrieval.cpp +0 -304
  93. package/src/llama.cpp/examples/save-load-state/CMakeLists.txt +0 -5
  94. package/src/llama.cpp/examples/save-load-state/save-load-state.cpp +0 -246
  95. package/src/llama.cpp/examples/simple/CMakeLists.txt +0 -5
  96. package/src/llama.cpp/examples/simple/simple.cpp +0 -206
  97. package/src/llama.cpp/examples/simple-chat/CMakeLists.txt +0 -5
  98. package/src/llama.cpp/examples/simple-chat/simple-chat.cpp +0 -206
  99. package/src/llama.cpp/examples/simple-cmake-pkg/CMakeLists.txt +0 -11
  100. package/src/llama.cpp/examples/speculative/CMakeLists.txt +0 -5
  101. package/src/llama.cpp/examples/speculative/speculative.cpp +0 -644
  102. package/src/llama.cpp/examples/speculative-simple/CMakeLists.txt +0 -5
  103. package/src/llama.cpp/examples/speculative-simple/speculative-simple.cpp +0 -261
  104. package/src/llama.cpp/examples/sycl/CMakeLists.txt +0 -9
  105. package/src/llama.cpp/examples/sycl/build.sh +0 -23
  106. package/src/llama.cpp/examples/sycl/ls-sycl-device.cpp +0 -13
  107. package/src/llama.cpp/examples/sycl/run-llama2.sh +0 -27
  108. package/src/llama.cpp/examples/sycl/run-llama3.sh +0 -28
  109. package/src/llama.cpp/examples/sycl/win-build-sycl.bat +0 -33
  110. package/src/llama.cpp/examples/sycl/win-run-llama2.bat +0 -9
  111. package/src/llama.cpp/examples/sycl/win-run-llama3.bat +0 -9
  112. package/src/llama.cpp/examples/training/CMakeLists.txt +0 -5
  113. package/src/llama.cpp/examples/training/finetune.cpp +0 -96
  114. package/src/llama.cpp/ggml/cmake/GitVars.cmake +0 -22
  115. package/src/llama.cpp/ggml/cmake/common.cmake +0 -26
  116. package/src/llama.cpp/ggml/src/ggml-alloc.c +0 -1042
  117. package/src/llama.cpp/ggml/src/ggml-backend-impl.h +0 -255
  118. package/src/llama.cpp/ggml/src/ggml-backend-reg.cpp +0 -586
  119. package/src/llama.cpp/ggml/src/ggml-backend.cpp +0 -2008
  120. package/src/llama.cpp/ggml/src/ggml-blas/CMakeLists.txt +0 -87
  121. package/src/llama.cpp/ggml/src/ggml-blas/ggml-blas.cpp +0 -517
  122. package/src/llama.cpp/ggml/src/ggml-cann/CMakeLists.txt +0 -74
  123. package/src/llama.cpp/ggml/src/ggml-cann/acl_tensor.cpp +0 -179
  124. package/src/llama.cpp/ggml/src/ggml-cann/acl_tensor.h +0 -258
  125. package/src/llama.cpp/ggml/src/ggml-cann/aclnn_ops.cpp +0 -2863
  126. package/src/llama.cpp/ggml/src/ggml-cann/aclnn_ops.h +0 -1110
  127. package/src/llama.cpp/ggml/src/ggml-cann/common.h +0 -420
  128. package/src/llama.cpp/ggml/src/ggml-cann/ggml-cann.cpp +0 -2570
  129. package/src/llama.cpp/ggml/src/ggml-common.h +0 -1857
  130. package/src/llama.cpp/ggml/src/ggml-cpu/cmake/FindSIMD.cmake +0 -100
  131. package/src/llama.cpp/ggml/src/ggml-cuda/CMakeLists.txt +0 -184
  132. package/src/llama.cpp/ggml/src/ggml-cuda/vendors/cuda.h +0 -15
  133. package/src/llama.cpp/ggml/src/ggml-cuda/vendors/hip.h +0 -243
  134. package/src/llama.cpp/ggml/src/ggml-cuda/vendors/musa.h +0 -140
  135. package/src/llama.cpp/ggml/src/ggml-hip/CMakeLists.txt +0 -131
  136. package/src/llama.cpp/ggml/src/ggml-impl.h +0 -601
  137. package/src/llama.cpp/ggml/src/ggml-kompute/CMakeLists.txt +0 -166
  138. package/src/llama.cpp/ggml/src/ggml-kompute/ggml-kompute.cpp +0 -2251
  139. package/src/llama.cpp/ggml/src/ggml-metal/CMakeLists.txt +0 -120
  140. package/src/llama.cpp/ggml/src/ggml-metal/ggml-metal-impl.h +0 -622
  141. package/src/llama.cpp/ggml/src/ggml-musa/CMakeLists.txt +0 -113
  142. package/src/llama.cpp/ggml/src/ggml-opencl/CMakeLists.txt +0 -96
  143. package/src/llama.cpp/ggml/src/ggml-opencl/ggml-opencl.cpp +0 -5124
  144. package/src/llama.cpp/ggml/src/ggml-opt.cpp +0 -1037
  145. package/src/llama.cpp/ggml/src/ggml-quants.c +0 -5232
  146. package/src/llama.cpp/ggml/src/ggml-quants.h +0 -100
  147. package/src/llama.cpp/ggml/src/ggml-rpc/CMakeLists.txt +0 -9
  148. package/src/llama.cpp/ggml/src/ggml-rpc/ggml-rpc.cpp +0 -1813
  149. package/src/llama.cpp/ggml/src/ggml-sycl/CMakeLists.txt +0 -189
  150. package/src/llama.cpp/ggml/src/ggml-sycl/backend.hpp +0 -37
  151. package/src/llama.cpp/ggml/src/ggml-sycl/binbcast.cpp +0 -239
  152. package/src/llama.cpp/ggml/src/ggml-sycl/binbcast.hpp +0 -39
  153. package/src/llama.cpp/ggml/src/ggml-sycl/common.cpp +0 -83
  154. package/src/llama.cpp/ggml/src/ggml-sycl/common.hpp +0 -493
  155. package/src/llama.cpp/ggml/src/ggml-sycl/concat.cpp +0 -197
  156. package/src/llama.cpp/ggml/src/ggml-sycl/concat.hpp +0 -20
  157. package/src/llama.cpp/ggml/src/ggml-sycl/conv.cpp +0 -100
  158. package/src/llama.cpp/ggml/src/ggml-sycl/conv.hpp +0 -20
  159. package/src/llama.cpp/ggml/src/ggml-sycl/convert.cpp +0 -623
  160. package/src/llama.cpp/ggml/src/ggml-sycl/convert.hpp +0 -34
  161. package/src/llama.cpp/ggml/src/ggml-sycl/cpy.cpp +0 -701
  162. package/src/llama.cpp/ggml/src/ggml-sycl/cpy.hpp +0 -11
  163. package/src/llama.cpp/ggml/src/ggml-sycl/dequantize.hpp +0 -791
  164. package/src/llama.cpp/ggml/src/ggml-sycl/dmmv.cpp +0 -1160
  165. package/src/llama.cpp/ggml/src/ggml-sycl/dmmv.hpp +0 -27
  166. package/src/llama.cpp/ggml/src/ggml-sycl/dpct/helper.hpp +0 -2957
  167. package/src/llama.cpp/ggml/src/ggml-sycl/element_wise.cpp +0 -1536
  168. package/src/llama.cpp/ggml/src/ggml-sycl/element_wise.hpp +0 -75
  169. package/src/llama.cpp/ggml/src/ggml-sycl/gemm.hpp +0 -99
  170. package/src/llama.cpp/ggml/src/ggml-sycl/getrows.cpp +0 -311
  171. package/src/llama.cpp/ggml/src/ggml-sycl/getrows.hpp +0 -20
  172. package/src/llama.cpp/ggml/src/ggml-sycl/ggml-sycl.cpp +0 -4443
  173. package/src/llama.cpp/ggml/src/ggml-sycl/gla.cpp +0 -105
  174. package/src/llama.cpp/ggml/src/ggml-sycl/gla.hpp +0 -8
  175. package/src/llama.cpp/ggml/src/ggml-sycl/im2col.cpp +0 -136
  176. package/src/llama.cpp/ggml/src/ggml-sycl/im2col.hpp +0 -21
  177. package/src/llama.cpp/ggml/src/ggml-sycl/mmq.cpp +0 -3030
  178. package/src/llama.cpp/ggml/src/ggml-sycl/mmq.hpp +0 -33
  179. package/src/llama.cpp/ggml/src/ggml-sycl/mmvq.cpp +0 -1108
  180. package/src/llama.cpp/ggml/src/ggml-sycl/mmvq.hpp +0 -27
  181. package/src/llama.cpp/ggml/src/ggml-sycl/norm.cpp +0 -474
  182. package/src/llama.cpp/ggml/src/ggml-sycl/norm.hpp +0 -26
  183. package/src/llama.cpp/ggml/src/ggml-sycl/outprod.cpp +0 -46
  184. package/src/llama.cpp/ggml/src/ggml-sycl/outprod.hpp +0 -10
  185. package/src/llama.cpp/ggml/src/ggml-sycl/presets.hpp +0 -74
  186. package/src/llama.cpp/ggml/src/ggml-sycl/quants.hpp +0 -83
  187. package/src/llama.cpp/ggml/src/ggml-sycl/rope.cpp +0 -362
  188. package/src/llama.cpp/ggml/src/ggml-sycl/rope.hpp +0 -20
  189. package/src/llama.cpp/ggml/src/ggml-sycl/softmax.cpp +0 -264
  190. package/src/llama.cpp/ggml/src/ggml-sycl/softmax.hpp +0 -20
  191. package/src/llama.cpp/ggml/src/ggml-sycl/sycl_hw.cpp +0 -13
  192. package/src/llama.cpp/ggml/src/ggml-sycl/sycl_hw.hpp +0 -23
  193. package/src/llama.cpp/ggml/src/ggml-sycl/tsembd.cpp +0 -73
  194. package/src/llama.cpp/ggml/src/ggml-sycl/tsembd.hpp +0 -20
  195. package/src/llama.cpp/ggml/src/ggml-sycl/vecdotq.hpp +0 -1215
  196. package/src/llama.cpp/ggml/src/ggml-sycl/wkv.cpp +0 -305
  197. package/src/llama.cpp/ggml/src/ggml-sycl/wkv.hpp +0 -10
  198. package/src/llama.cpp/ggml/src/ggml-threading.cpp +0 -12
  199. package/src/llama.cpp/ggml/src/ggml-threading.h +0 -14
  200. package/src/llama.cpp/ggml/src/ggml-vulkan/CMakeLists.txt +0 -196
  201. package/src/llama.cpp/ggml/src/ggml-vulkan/ggml-vulkan.cpp +0 -10699
  202. package/src/llama.cpp/ggml/src/ggml-vulkan/vulkan-shaders/CMakeLists.txt +0 -39
  203. package/src/llama.cpp/ggml/src/ggml-vulkan/vulkan-shaders/vulkan-shaders-gen.cpp +0 -751
  204. package/src/llama.cpp/ggml/src/ggml.c +0 -6550
  205. package/src/llama.cpp/ggml/src/gguf.cpp +0 -1330
  206. package/src/llama.cpp/models/.editorconfig +0 -1
  207. package/src/llama.cpp/models/ggml-vocab-aquila.gguf +0 -0
  208. package/src/llama.cpp/models/ggml-vocab-baichuan.gguf +0 -0
  209. package/src/llama.cpp/models/ggml-vocab-bert-bge.gguf +0 -0
  210. package/src/llama.cpp/models/ggml-vocab-bert-bge.gguf.inp +0 -112
  211. package/src/llama.cpp/models/ggml-vocab-bert-bge.gguf.out +0 -46
  212. package/src/llama.cpp/models/ggml-vocab-chameleon.gguf.inp +0 -112
  213. package/src/llama.cpp/models/ggml-vocab-chameleon.gguf.out +0 -46
  214. package/src/llama.cpp/models/ggml-vocab-command-r.gguf +0 -0
  215. package/src/llama.cpp/models/ggml-vocab-command-r.gguf.inp +0 -112
  216. package/src/llama.cpp/models/ggml-vocab-command-r.gguf.out +0 -46
  217. package/src/llama.cpp/models/ggml-vocab-deepseek-coder.gguf +0 -0
  218. package/src/llama.cpp/models/ggml-vocab-deepseek-coder.gguf.inp +0 -112
  219. package/src/llama.cpp/models/ggml-vocab-deepseek-coder.gguf.out +0 -46
  220. package/src/llama.cpp/models/ggml-vocab-deepseek-llm.gguf +0 -0
  221. package/src/llama.cpp/models/ggml-vocab-deepseek-llm.gguf.inp +0 -112
  222. package/src/llama.cpp/models/ggml-vocab-deepseek-llm.gguf.out +0 -46
  223. package/src/llama.cpp/models/ggml-vocab-deepseek-r1-qwen.gguf.inp +0 -112
  224. package/src/llama.cpp/models/ggml-vocab-deepseek-r1-qwen.gguf.out +0 -46
  225. package/src/llama.cpp/models/ggml-vocab-falcon.gguf +0 -0
  226. package/src/llama.cpp/models/ggml-vocab-falcon.gguf.inp +0 -112
  227. package/src/llama.cpp/models/ggml-vocab-falcon.gguf.out +0 -46
  228. package/src/llama.cpp/models/ggml-vocab-gpt-2.gguf +0 -0
  229. package/src/llama.cpp/models/ggml-vocab-gpt-2.gguf.inp +0 -112
  230. package/src/llama.cpp/models/ggml-vocab-gpt-2.gguf.out +0 -46
  231. package/src/llama.cpp/models/ggml-vocab-gpt-4o.gguf.inp +0 -112
  232. package/src/llama.cpp/models/ggml-vocab-gpt-4o.gguf.out +0 -46
  233. package/src/llama.cpp/models/ggml-vocab-gpt-neox.gguf +0 -0
  234. package/src/llama.cpp/models/ggml-vocab-llama-bpe.gguf +0 -0
  235. package/src/llama.cpp/models/ggml-vocab-llama-bpe.gguf.inp +0 -112
  236. package/src/llama.cpp/models/ggml-vocab-llama-bpe.gguf.out +0 -46
  237. package/src/llama.cpp/models/ggml-vocab-llama-spm.gguf +0 -0
  238. package/src/llama.cpp/models/ggml-vocab-llama-spm.gguf.inp +0 -112
  239. package/src/llama.cpp/models/ggml-vocab-llama-spm.gguf.out +0 -46
  240. package/src/llama.cpp/models/ggml-vocab-llama4.gguf.inp +0 -112
  241. package/src/llama.cpp/models/ggml-vocab-llama4.gguf.out +0 -46
  242. package/src/llama.cpp/models/ggml-vocab-mpt.gguf +0 -0
  243. package/src/llama.cpp/models/ggml-vocab-mpt.gguf.inp +0 -112
  244. package/src/llama.cpp/models/ggml-vocab-mpt.gguf.out +0 -46
  245. package/src/llama.cpp/models/ggml-vocab-phi-3.gguf +0 -0
  246. package/src/llama.cpp/models/ggml-vocab-phi-3.gguf.inp +0 -112
  247. package/src/llama.cpp/models/ggml-vocab-phi-3.gguf.out +0 -46
  248. package/src/llama.cpp/models/ggml-vocab-pixtral.gguf.inp +0 -112
  249. package/src/llama.cpp/models/ggml-vocab-pixtral.gguf.out +0 -46
  250. package/src/llama.cpp/models/ggml-vocab-qwen2.gguf +0 -0
  251. package/src/llama.cpp/models/ggml-vocab-qwen2.gguf.inp +0 -112
  252. package/src/llama.cpp/models/ggml-vocab-qwen2.gguf.out +0 -46
  253. package/src/llama.cpp/models/ggml-vocab-refact.gguf +0 -0
  254. package/src/llama.cpp/models/ggml-vocab-refact.gguf.inp +0 -112
  255. package/src/llama.cpp/models/ggml-vocab-refact.gguf.out +0 -46
  256. package/src/llama.cpp/models/ggml-vocab-roberta-bpe.gguf.inp +0 -112
  257. package/src/llama.cpp/models/ggml-vocab-roberta-bpe.gguf.out +0 -46
  258. package/src/llama.cpp/models/ggml-vocab-starcoder.gguf +0 -0
  259. package/src/llama.cpp/models/ggml-vocab-starcoder.gguf.inp +0 -112
  260. package/src/llama.cpp/models/ggml-vocab-starcoder.gguf.out +0 -46
  261. package/src/llama.cpp/pocs/CMakeLists.txt +0 -14
  262. package/src/llama.cpp/pocs/vdot/CMakeLists.txt +0 -9
  263. package/src/llama.cpp/pocs/vdot/q8dot.cpp +0 -173
  264. package/src/llama.cpp/pocs/vdot/vdot.cpp +0 -311
  265. package/src/llama.cpp/prompts/LLM-questions.txt +0 -49
  266. package/src/llama.cpp/prompts/alpaca.txt +0 -1
  267. package/src/llama.cpp/prompts/assistant.txt +0 -31
  268. package/src/llama.cpp/prompts/chat-with-baichuan.txt +0 -4
  269. package/src/llama.cpp/prompts/chat-with-bob.txt +0 -7
  270. package/src/llama.cpp/prompts/chat-with-qwen.txt +0 -1
  271. package/src/llama.cpp/prompts/chat-with-vicuna-v0.txt +0 -7
  272. package/src/llama.cpp/prompts/chat-with-vicuna-v1.txt +0 -7
  273. package/src/llama.cpp/prompts/chat.txt +0 -28
  274. package/src/llama.cpp/prompts/dan-modified.txt +0 -1
  275. package/src/llama.cpp/prompts/dan.txt +0 -1
  276. package/src/llama.cpp/prompts/mnemonics.txt +0 -93
  277. package/src/llama.cpp/prompts/parallel-questions.txt +0 -43
  278. package/src/llama.cpp/prompts/reason-act.txt +0 -18
  279. package/src/llama.cpp/requirements/requirements-all.txt +0 -15
  280. package/src/llama.cpp/requirements/requirements-compare-llama-bench.txt +0 -2
  281. package/src/llama.cpp/requirements/requirements-convert_hf_to_gguf.txt +0 -7
  282. package/src/llama.cpp/requirements/requirements-convert_hf_to_gguf_update.txt +0 -7
  283. package/src/llama.cpp/requirements/requirements-convert_legacy_llama.txt +0 -5
  284. package/src/llama.cpp/requirements/requirements-convert_llama_ggml_to_gguf.txt +0 -1
  285. package/src/llama.cpp/requirements/requirements-convert_lora_to_gguf.txt +0 -4
  286. package/src/llama.cpp/requirements/requirements-gguf_editor_gui.txt +0 -3
  287. package/src/llama.cpp/requirements/requirements-pydantic.txt +0 -3
  288. package/src/llama.cpp/requirements/requirements-test-tokenizer-random.txt +0 -1
  289. package/src/llama.cpp/requirements/requirements-tool_bench.txt +0 -12
  290. package/src/llama.cpp/requirements.txt +0 -13
  291. package/src/llama.cpp/scripts/build-info.sh +0 -30
  292. package/src/llama.cpp/scripts/install-oneapi.bat +0 -19
  293. package/src/llama.cpp/scripts/xxd.cmake +0 -16
  294. package/src/llama.cpp/tests/CMakeLists.txt +0 -177
  295. package/src/llama.cpp/tests/get-model.cpp +0 -21
  296. package/src/llama.cpp/tests/get-model.h +0 -2
  297. package/src/llama.cpp/tests/test-arg-parser.cpp +0 -178
  298. package/src/llama.cpp/tests/test-autorelease.cpp +0 -24
  299. package/src/llama.cpp/tests/test-backend-ops.cpp +0 -4793
  300. package/src/llama.cpp/tests/test-barrier.cpp +0 -94
  301. package/src/llama.cpp/tests/test-c.c +0 -7
  302. package/src/llama.cpp/tests/test-chat-template.cpp +0 -417
  303. package/src/llama.cpp/tests/test-chat.cpp +0 -985
  304. package/src/llama.cpp/tests/test-double-float.cpp +0 -57
  305. package/src/llama.cpp/tests/test-gbnf-validator.cpp +0 -109
  306. package/src/llama.cpp/tests/test-gguf.cpp +0 -1338
  307. package/src/llama.cpp/tests/test-grammar-integration.cpp +0 -1308
  308. package/src/llama.cpp/tests/test-grammar-llguidance.cpp +0 -1201
  309. package/src/llama.cpp/tests/test-grammar-parser.cpp +0 -519
  310. package/src/llama.cpp/tests/test-json-schema-to-grammar.cpp +0 -1304
  311. package/src/llama.cpp/tests/test-llama-grammar.cpp +0 -408
  312. package/src/llama.cpp/tests/test-log.cpp +0 -39
  313. package/src/llama.cpp/tests/test-model-load-cancel.cpp +0 -27
  314. package/src/llama.cpp/tests/test-mtmd-c-api.c +0 -63
  315. package/src/llama.cpp/tests/test-opt.cpp +0 -904
  316. package/src/llama.cpp/tests/test-quantize-fns.cpp +0 -186
  317. package/src/llama.cpp/tests/test-quantize-perf.cpp +0 -365
  318. package/src/llama.cpp/tests/test-quantize-stats.cpp +0 -424
  319. package/src/llama.cpp/tests/test-regex-partial.cpp +0 -288
  320. package/src/llama.cpp/tests/test-rope.cpp +0 -262
  321. package/src/llama.cpp/tests/test-sampling.cpp +0 -399
  322. package/src/llama.cpp/tests/test-tokenizer-0.cpp +0 -312
  323. package/src/llama.cpp/tests/test-tokenizer-1-bpe.cpp +0 -155
  324. package/src/llama.cpp/tests/test-tokenizer-1-spm.cpp +0 -125
  325. package/src/llama.cpp/tools/CMakeLists.txt +0 -39
  326. package/src/llama.cpp/tools/batched-bench/CMakeLists.txt +0 -5
  327. package/src/llama.cpp/tools/batched-bench/batched-bench.cpp +0 -204
  328. package/src/llama.cpp/tools/cvector-generator/CMakeLists.txt +0 -5
  329. package/src/llama.cpp/tools/cvector-generator/completions.txt +0 -582
  330. package/src/llama.cpp/tools/cvector-generator/cvector-generator.cpp +0 -508
  331. package/src/llama.cpp/tools/cvector-generator/mean.hpp +0 -48
  332. package/src/llama.cpp/tools/cvector-generator/negative.txt +0 -4
  333. package/src/llama.cpp/tools/cvector-generator/pca.hpp +0 -315
  334. package/src/llama.cpp/tools/cvector-generator/positive.txt +0 -4
  335. package/src/llama.cpp/tools/export-lora/CMakeLists.txt +0 -5
  336. package/src/llama.cpp/tools/export-lora/export-lora.cpp +0 -434
  337. package/src/llama.cpp/tools/gguf-split/CMakeLists.txt +0 -5
  338. package/src/llama.cpp/tools/gguf-split/gguf-split.cpp +0 -583
  339. package/src/llama.cpp/tools/imatrix/CMakeLists.txt +0 -5
  340. package/src/llama.cpp/tools/imatrix/imatrix.cpp +0 -667
  341. package/src/llama.cpp/tools/llama-bench/CMakeLists.txt +0 -5
  342. package/src/llama.cpp/tools/llama-bench/llama-bench.cpp +0 -2024
  343. package/src/llama.cpp/tools/main/CMakeLists.txt +0 -5
  344. package/src/llama.cpp/tools/main/main.cpp +0 -977
  345. package/src/llama.cpp/tools/mtmd/CMakeLists.txt +0 -58
  346. package/src/llama.cpp/tools/mtmd/clip-impl.h +0 -462
  347. package/src/llama.cpp/tools/mtmd/clip.cpp +0 -4024
  348. package/src/llama.cpp/tools/mtmd/clip.h +0 -101
  349. package/src/llama.cpp/tools/mtmd/deprecation-warning.cpp +0 -22
  350. package/src/llama.cpp/tools/mtmd/miniaudio.h +0 -93468
  351. package/src/llama.cpp/tools/mtmd/mtmd-audio.cpp +0 -855
  352. package/src/llama.cpp/tools/mtmd/mtmd-audio.h +0 -62
  353. package/src/llama.cpp/tools/mtmd/mtmd-cli.cpp +0 -377
  354. package/src/llama.cpp/tools/mtmd/mtmd-helper.cpp +0 -297
  355. package/src/llama.cpp/tools/mtmd/mtmd.cpp +0 -942
  356. package/src/llama.cpp/tools/mtmd/mtmd.h +0 -362
  357. package/src/llama.cpp/tools/mtmd/requirements.txt +0 -5
  358. package/src/llama.cpp/tools/perplexity/CMakeLists.txt +0 -5
  359. package/src/llama.cpp/tools/perplexity/perplexity.cpp +0 -2063
  360. package/src/llama.cpp/tools/quantize/CMakeLists.txt +0 -6
  361. package/src/llama.cpp/tools/quantize/quantize.cpp +0 -519
  362. package/src/llama.cpp/tools/rpc/CMakeLists.txt +0 -4
  363. package/src/llama.cpp/tools/rpc/rpc-server.cpp +0 -322
  364. package/src/llama.cpp/tools/run/CMakeLists.txt +0 -16
  365. package/src/llama.cpp/tools/run/linenoise.cpp/linenoise.cpp +0 -1995
  366. package/src/llama.cpp/tools/run/linenoise.cpp/linenoise.h +0 -137
  367. package/src/llama.cpp/tools/run/run.cpp +0 -1261
  368. package/src/llama.cpp/tools/server/CMakeLists.txt +0 -51
  369. package/src/llama.cpp/tools/server/bench/requirements.txt +0 -2
  370. package/src/llama.cpp/tools/server/httplib.h +0 -10506
  371. package/src/llama.cpp/tools/server/server.cpp +0 -4966
  372. package/src/llama.cpp/tools/server/tests/requirements.txt +0 -8
  373. package/src/llama.cpp/tools/server/utils.hpp +0 -1337
  374. package/src/llama.cpp/tools/tokenize/CMakeLists.txt +0 -5
  375. package/src/llama.cpp/tools/tokenize/tokenize.cpp +0 -416
  376. package/src/llama.cpp/tools/tts/CMakeLists.txt +0 -5
  377. package/src/llama.cpp/tools/tts/tts.cpp +0 -1092
@@ -1,237 +0,0 @@
1
- # Server build and tests
2
- name: Server
3
-
4
- on:
5
- workflow_dispatch: # allows manual triggering
6
- inputs:
7
- sha:
8
- description: 'Commit SHA1 to build'
9
- required: false
10
- type: string
11
- slow_tests:
12
- description: 'Run slow tests'
13
- required: true
14
- type: boolean
15
- push:
16
- branches:
17
- - master
18
- paths: ['.github/workflows/server.yml', '**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', 'tools/server/**.*']
19
- pull_request:
20
- types: [opened, synchronize, reopened]
21
- paths: ['.github/workflows/server.yml', '**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', 'tools/server/**.*']
22
-
23
- env:
24
- LLAMA_LOG_COLORS: 1
25
- LLAMA_LOG_PREFIX: 1
26
- LLAMA_LOG_TIMESTAMPS: 1
27
- LLAMA_LOG_VERBOSITY: 10
28
-
29
- concurrency:
30
- group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || github.run_id }}
31
- cancel-in-progress: true
32
-
33
- jobs:
34
- server:
35
- runs-on: ubuntu-latest
36
-
37
- strategy:
38
- matrix:
39
- sanitizer: [ADDRESS, UNDEFINED] # THREAD is broken
40
- build_type: [RelWithDebInfo]
41
- include:
42
- - build_type: Release
43
- sanitizer: ""
44
- fail-fast: false # While -DLLAMA_SANITIZE_THREAD=ON is broken
45
-
46
- steps:
47
- - name: Dependencies
48
- id: depends
49
- run: |
50
- sudo apt-get update
51
- sudo apt-get -y install \
52
- build-essential \
53
- xxd \
54
- git \
55
- cmake \
56
- curl \
57
- wget \
58
- language-pack-en \
59
- libcurl4-openssl-dev
60
-
61
- - name: Clone
62
- id: checkout
63
- uses: actions/checkout@v4
64
- with:
65
- fetch-depth: 0
66
- ref: ${{ github.event.inputs.sha || github.event.pull_request.head.sha || github.sha || github.head_ref || github.ref_name }}
67
-
68
- - name: Python setup
69
- id: setup_python
70
- uses: actions/setup-python@v5
71
- with:
72
- python-version: '3.11'
73
-
74
- - name: Tests dependencies
75
- id: test_dependencies
76
- run: |
77
- pip install -r tools/server/tests/requirements.txt
78
-
79
- # Setup nodejs (to be used for verifying bundled index.html)
80
- - uses: actions/setup-node@v4
81
- with:
82
- node-version: '22.11.0'
83
-
84
- - name: WebUI - Install dependencies
85
- id: webui_lint
86
- run: |
87
- cd tools/server/webui
88
- npm ci
89
-
90
- - name: WebUI - Check code format
91
- id: webui_format
92
- run: |
93
- git config --global --add safe.directory $(realpath .)
94
- cd tools/server/webui
95
- git status
96
-
97
- npm run format
98
- git status
99
- modified_files="$(git status -s)"
100
- echo "Modified files: ${modified_files}"
101
- if [ -n "${modified_files}" ]; then
102
- echo "Files do not follow coding style. To fix: npm run format"
103
- echo "${modified_files}"
104
- exit 1
105
- fi
106
-
107
- - name: Verify bundled index.html
108
- id: verify_server_index_html
109
- run: |
110
- git config --global --add safe.directory $(realpath .)
111
- cd tools/server/webui
112
- git status
113
-
114
- npm run build
115
- git status
116
- modified_files="$(git status -s)"
117
- echo "Modified files: ${modified_files}"
118
- if [ -n "${modified_files}" ]; then
119
- echo "Repository is dirty or server/webui is not built as expected"
120
- echo "Hint: You may need to follow Web UI build guide in server/README.md"
121
- echo "${modified_files}"
122
- exit 1
123
- fi
124
-
125
- - name: Build (no OpenMP)
126
- id: cmake_build_no_openmp
127
- if: ${{ matrix.sanitizer == 'THREAD' }}
128
- run: |
129
- cmake -B build \
130
- -DGGML_NATIVE=OFF \
131
- -DLLAMA_BUILD_SERVER=ON \
132
- -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} \
133
- -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON \
134
- -DGGML_OPENMP=OFF ;
135
- cmake --build build --config ${{ matrix.build_type }} -j $(nproc) --target llama-server
136
-
137
- - name: Build (sanitizers)
138
- id: cmake_build_sanitizers
139
- if: ${{ matrix.sanitizer != '' && matrix.sanitizer != 'THREAD' }}
140
- run: |
141
- cmake -B build \
142
- -DGGML_NATIVE=OFF \
143
- -DLLAMA_BUILD_SERVER=ON \
144
- -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} \
145
- -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON ;
146
- cmake --build build --config ${{ matrix.build_type }} -j $(nproc) --target llama-server
147
-
148
- - name: Build (sanitizers)
149
- id: cmake_build
150
- if: ${{ matrix.sanitizer == '' }}
151
- run: |
152
- cmake -B build \
153
- -DGGML_NATIVE=OFF \
154
- -DLLAMA_BUILD_SERVER=ON \
155
- -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} ;
156
- cmake --build build --config ${{ matrix.build_type }} -j $(nproc) --target llama-server
157
-
158
- - name: Tests
159
- id: server_integration_tests
160
- if: ${{ matrix.sanitizer == '' }}
161
- env:
162
- GITHUB_ACTIONS: "true"
163
- run: |
164
- cd tools/server/tests
165
- ./tests.sh
166
-
167
- - name: Tests (sanitizers)
168
- id: server_integration_tests_sanitizers
169
- if: ${{ matrix.sanitizer != '' }}
170
- run: |
171
- cd tools/server/tests
172
- LLAMA_SANITIZE=1 ./tests.sh
173
-
174
- - name: Slow tests
175
- id: server_integration_tests_slow
176
- if: ${{ (github.event.schedule || github.event.inputs.slow_tests == 'true') && matrix.build_type == 'Release' }}
177
- run: |
178
- cd tools/server/tests
179
- SLOW_TESTS=1 ./tests.sh
180
-
181
-
182
- server-windows:
183
- runs-on: windows-2019
184
-
185
- steps:
186
- - name: Clone
187
- id: checkout
188
- uses: actions/checkout@v4
189
- with:
190
- fetch-depth: 0
191
- ref: ${{ github.event.inputs.sha || github.event.pull_request.head.sha || github.sha || github.head_ref || github.ref_name }}
192
-
193
- - name: libCURL
194
- id: get_libcurl
195
- uses: ./.github/actions/windows-setup-curl
196
-
197
- - name: Build
198
- id: cmake_build
199
- env:
200
- CURL_PATH: ${{ steps.get_libcurl.outputs.curl_path }}
201
- run: |
202
- cmake -B build -DCURL_LIBRARY="$env:CURL_PATH/lib/libcurl.dll.a" -DCURL_INCLUDE_DIR="$env:CURL_PATH/include"
203
- cmake --build build --config Release -j ${env:NUMBER_OF_PROCESSORS} --target llama-server
204
-
205
- - name: Python setup
206
- id: setup_python
207
- uses: actions/setup-python@v5
208
- with:
209
- python-version: '3.11'
210
-
211
- - name: Tests dependencies
212
- id: test_dependencies
213
- run: |
214
- pip install -r tools/server/tests/requirements.txt
215
-
216
- - name: Copy Libcurl
217
- id: prepare_libcurl
218
- env:
219
- CURL_PATH: ${{ steps.get_libcurl.outputs.curl_path }}
220
- run: |
221
- cp $env:CURL_PATH/bin/libcurl-x64.dll ./build/bin/Release/libcurl-x64.dll
222
-
223
- - name: Tests
224
- id: server_integration_tests
225
- if: ${{ !matrix.disabled_on_pr || !github.event.pull_request }}
226
- run: |
227
- cd tools/server/tests
228
- $env:PYTHONIOENCODING = ":replace"
229
- pytest -v -x -m "not slow"
230
-
231
- - name: Slow tests
232
- id: server_integration_tests_slow
233
- if: ${{ (github.event.schedule || github.event.inputs.slow_tests == 'true') && matrix.build_type == 'Release' }}
234
- run: |
235
- cd tools/server/tests
236
- $env:SLOW_TESTS = "1"
237
- pytest -v -x
@@ -1,42 +0,0 @@
1
- name: Update Winget Package
2
-
3
- on:
4
- workflow_dispatch: # allows manual triggering
5
- schedule:
6
- - cron: '28 5 * * *' # Update every day at 5:28 UTC
7
-
8
- jobs:
9
- update:
10
- name: Update Winget Package
11
- runs-on: ubuntu-latest
12
-
13
- steps:
14
- - name: Install cargo binstall
15
- uses: cargo-bins/cargo-binstall@268643a6b5ea099f5718ee5cd3ff7dc89a5eb49b
16
-
17
- - name: Install komac
18
- run: |
19
- cargo binstall komac@2.11.2 -y
20
-
21
- - name: Find latest release
22
- id: find_latest_release
23
- uses: actions/github-script@v6
24
- with:
25
- script: |
26
- const { data: releases } = await github.rest.repos.listReleases({
27
- owner: context.repo.owner,
28
- repo: context.repo.repo,
29
- });
30
- console.log("Latest release:", releases[0].tag_name);
31
- return releases[0].tag_name;
32
-
33
- - name: Update manifest
34
- env:
35
- VERSION: ${{ steps.find_latest_release.outputs.result }}
36
- run: |
37
- echo "Updating manifest..."
38
- komac update --version ${{ env.VERSION }} \
39
- --urls "https://github.com/ggml-org/llama.cpp/releases/download/${{ env.VERSION }}/llama-${{ env.VERSION }}-bin-win-vulkan-x64.zip" \
40
- --token ${{ secrets.WINGET_GITHUB_TOKEN }} \
41
- --submit \
42
- ggml.llamacpp
@@ -1,16 +0,0 @@
1
- set( CMAKE_SYSTEM_NAME Darwin )
2
- set( CMAKE_SYSTEM_PROCESSOR arm64 )
3
-
4
- set( target arm64-apple-darwin-macho )
5
-
6
- set( CMAKE_C_COMPILER clang )
7
- set( CMAKE_CXX_COMPILER clang++ )
8
-
9
- set( CMAKE_C_COMPILER_TARGET ${target} )
10
- set( CMAKE_CXX_COMPILER_TARGET ${target} )
11
-
12
- set( arch_c_flags "-march=armv8.4-a -fvectorize -ffp-model=fast -fno-finite-math-only" )
13
- set( warn_c_flags "-Wno-format -Wno-unused-variable -Wno-unused-function" )
14
-
15
- set( CMAKE_C_FLAGS_INIT "${arch_c_flags} ${warn_c_flags}" )
16
- set( CMAKE_CXX_FLAGS_INIT "${arch_c_flags} ${warn_c_flags}" )
@@ -1,16 +0,0 @@
1
- set( CMAKE_SYSTEM_NAME Windows )
2
- set( CMAKE_SYSTEM_PROCESSOR arm64 )
3
-
4
- set( target arm64-pc-windows-msvc )
5
-
6
- set( CMAKE_C_COMPILER clang )
7
- set( CMAKE_CXX_COMPILER clang++ )
8
-
9
- set( CMAKE_C_COMPILER_TARGET ${target} )
10
- set( CMAKE_CXX_COMPILER_TARGET ${target} )
11
-
12
- set( arch_c_flags "-march=armv8.7-a -fvectorize -ffp-model=fast -fno-finite-math-only" )
13
- set( warn_c_flags "-Wno-format -Wno-unused-variable -Wno-unused-function -Wno-gnu-zero-variadic-macro-arguments" )
14
-
15
- set( CMAKE_C_FLAGS_INIT "${arch_c_flags} ${warn_c_flags}" )
16
- set( CMAKE_CXX_FLAGS_INIT "${arch_c_flags} ${warn_c_flags}" )
@@ -1,64 +0,0 @@
1
- set(BUILD_NUMBER 0)
2
- set(BUILD_COMMIT "unknown")
3
- set(BUILD_COMPILER "unknown")
4
- set(BUILD_TARGET "unknown")
5
-
6
- # Look for git
7
- find_package(Git)
8
- if(NOT Git_FOUND)
9
- find_program(GIT_EXECUTABLE NAMES git git.exe)
10
- if(GIT_EXECUTABLE)
11
- set(Git_FOUND TRUE)
12
- message(STATUS "Found Git: ${GIT_EXECUTABLE}")
13
- else()
14
- message(WARNING "Git not found. Build info will not be accurate.")
15
- endif()
16
- endif()
17
-
18
- # Get the commit count and hash
19
- if(Git_FOUND)
20
- execute_process(
21
- COMMAND ${GIT_EXECUTABLE} rev-parse --short HEAD
22
- WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
23
- OUTPUT_VARIABLE HEAD
24
- OUTPUT_STRIP_TRAILING_WHITESPACE
25
- RESULT_VARIABLE RES
26
- )
27
- if (RES EQUAL 0)
28
- set(BUILD_COMMIT ${HEAD})
29
- endif()
30
- execute_process(
31
- COMMAND ${GIT_EXECUTABLE} rev-list --count HEAD
32
- WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
33
- OUTPUT_VARIABLE COUNT
34
- OUTPUT_STRIP_TRAILING_WHITESPACE
35
- RESULT_VARIABLE RES
36
- )
37
- if (RES EQUAL 0)
38
- set(BUILD_NUMBER ${COUNT})
39
- endif()
40
- endif()
41
-
42
- if(MSVC)
43
- set(BUILD_COMPILER "${CMAKE_C_COMPILER_ID} ${CMAKE_C_COMPILER_VERSION}")
44
- if (CMAKE_VS_PLATFORM_NAME)
45
- set(BUILD_TARGET ${CMAKE_VS_PLATFORM_NAME})
46
- else()
47
- set(BUILD_TARGET "${CMAKE_SYSTEM_NAME} ${CMAKE_SYSTEM_PROCESSOR}")
48
- endif()
49
- else()
50
- execute_process(
51
- COMMAND ${CMAKE_C_COMPILER} --version
52
- OUTPUT_VARIABLE OUT
53
- OUTPUT_STRIP_TRAILING_WHITESPACE
54
- )
55
- string(REGEX REPLACE " *\n.*" "" OUT "${OUT}")
56
- set(BUILD_COMPILER ${OUT})
57
-
58
- execute_process(
59
- COMMAND ${CMAKE_C_COMPILER} -dumpmachine
60
- OUTPUT_VARIABLE OUT
61
- OUTPUT_STRIP_TRAILING_WHITESPACE
62
- )
63
- set(BUILD_TARGET ${OUT})
64
- endif()
@@ -1,35 +0,0 @@
1
- include("ggml/cmake/common.cmake")
2
-
3
- function(llama_add_compile_flags)
4
- if (LLAMA_FATAL_WARNINGS)
5
- if (CMAKE_CXX_COMPILER_ID MATCHES "GNU" OR CMAKE_CXX_COMPILER_ID MATCHES "Clang")
6
- list(APPEND C_FLAGS -Werror)
7
- list(APPEND CXX_FLAGS -Werror)
8
- elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
9
- add_compile_options(/WX)
10
- endif()
11
- endif()
12
-
13
- if (LLAMA_ALL_WARNINGS)
14
- if (NOT MSVC)
15
- list(APPEND C_FLAGS -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes
16
- -Werror=implicit-int -Werror=implicit-function-declaration)
17
-
18
- list(APPEND CXX_FLAGS -Wmissing-declarations -Wmissing-noreturn)
19
-
20
- list(APPEND WARNING_FLAGS -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function)
21
-
22
- list(APPEND C_FLAGS ${WARNING_FLAGS})
23
- list(APPEND CXX_FLAGS ${WARNING_FLAGS})
24
-
25
- ggml_get_flags(${CMAKE_CXX_COMPILER_ID} ${CMAKE_CXX_COMPILER_VERSION})
26
-
27
- add_compile_options("$<$<COMPILE_LANGUAGE:C>:${C_FLAGS};${GF_C_FLAGS}>"
28
- "$<$<COMPILE_LANGUAGE:CXX>:${CXX_FLAGS};${GF_CXX_FLAGS}>")
29
- else()
30
- # todo : msvc
31
- set(C_FLAGS "" PARENT_SCOPE)
32
- set(CXX_FLAGS "" PARENT_SCOPE)
33
- endif()
34
- endif()
35
- endfunction()
@@ -1,22 +0,0 @@
1
- find_package(Git)
2
-
3
- # the commit's SHA1
4
- execute_process(COMMAND
5
- "${GIT_EXECUTABLE}" describe --match=NeVeRmAtCh --always --abbrev=8
6
- WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
7
- OUTPUT_VARIABLE GIT_SHA1
8
- ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE)
9
-
10
- # the date of the commit
11
- execute_process(COMMAND
12
- "${GIT_EXECUTABLE}" log -1 --format=%ad --date=local
13
- WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
14
- OUTPUT_VARIABLE GIT_DATE
15
- ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE)
16
-
17
- # the subject of the commit
18
- execute_process(COMMAND
19
- "${GIT_EXECUTABLE}" log -1 --format=%s
20
- WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
21
- OUTPUT_VARIABLE GIT_COMMIT_SUBJECT
22
- ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE)
@@ -1,5 +0,0 @@
1
- set( CMAKE_SYSTEM_NAME Windows )
2
- set( CMAKE_SYSTEM_PROCESSOR x86_64 )
3
-
4
- set( CMAKE_C_COMPILER clang )
5
- set( CMAKE_CXX_COMPILER clang++ )
@@ -1,4 +0,0 @@
1
- int LLAMA_BUILD_NUMBER = @BUILD_NUMBER@;
2
- char const *LLAMA_COMMIT = "@BUILD_COMMIT@";
3
- char const *LLAMA_COMPILER = "@BUILD_COMPILER@";
4
- char const *LLAMA_BUILD_TARGET = "@BUILD_TARGET@";