@fugood/llama.node 0.6.3 → 1.0.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CMakeLists.txt +40 -30
- package/README.md +4 -1
- package/lib/binding.js +41 -29
- package/lib/binding.ts +26 -25
- package/package.json +45 -7
- package/scripts/build.js +47 -0
- package/scripts/llama.cpp.patch +109 -0
- package/src/anyascii.c +22223 -0
- package/src/anyascii.h +42 -0
- package/src/tts_utils.cpp +20 -7
- package/src/tts_utils.h +2 -0
- package/bin/darwin/arm64/llama-node.node +0 -0
- package/bin/darwin/x64/llama-node.node +0 -0
- package/bin/linux/arm64/llama-node.node +0 -0
- package/bin/linux/x64/llama-node.node +0 -0
- package/bin/linux-cuda/arm64/llama-node.node +0 -0
- package/bin/linux-cuda/x64/llama-node.node +0 -0
- package/bin/linux-vulkan/arm64/llama-node.node +0 -0
- package/bin/linux-vulkan/x64/llama-node.node +0 -0
- package/bin/win32/x64/llama-node.node +0 -0
- package/bin/win32/x64/node.lib +0 -0
- package/bin/win32-vulkan/arm64/llama-node.node +0 -0
- package/bin/win32-vulkan/arm64/node.lib +0 -0
- package/bin/win32-vulkan/x64/llama-node.node +0 -0
- package/bin/win32-vulkan/x64/node.lib +0 -0
- package/src/llama.cpp/.github/workflows/build-linux-cross.yml +0 -233
- package/src/llama.cpp/.github/workflows/build.yml +0 -1078
- package/src/llama.cpp/.github/workflows/close-issue.yml +0 -28
- package/src/llama.cpp/.github/workflows/docker.yml +0 -178
- package/src/llama.cpp/.github/workflows/editorconfig.yml +0 -29
- package/src/llama.cpp/.github/workflows/gguf-publish.yml +0 -44
- package/src/llama.cpp/.github/workflows/labeler.yml +0 -17
- package/src/llama.cpp/.github/workflows/python-check-requirements.yml +0 -33
- package/src/llama.cpp/.github/workflows/python-lint.yml +0 -30
- package/src/llama.cpp/.github/workflows/python-type-check.yml +0 -40
- package/src/llama.cpp/.github/workflows/release.yml +0 -739
- package/src/llama.cpp/.github/workflows/server.yml +0 -237
- package/src/llama.cpp/.github/workflows/winget.yml +0 -42
- package/src/llama.cpp/cmake/arm64-apple-clang.cmake +0 -16
- package/src/llama.cpp/cmake/arm64-windows-llvm.cmake +0 -16
- package/src/llama.cpp/cmake/build-info.cmake +0 -64
- package/src/llama.cpp/cmake/common.cmake +0 -35
- package/src/llama.cpp/cmake/git-vars.cmake +0 -22
- package/src/llama.cpp/cmake/x64-windows-llvm.cmake +0 -5
- package/src/llama.cpp/common/build-info.cpp.in +0 -4
- package/src/llama.cpp/docs/build.md +0 -561
- package/src/llama.cpp/examples/CMakeLists.txt +0 -43
- package/src/llama.cpp/examples/batched/CMakeLists.txt +0 -5
- package/src/llama.cpp/examples/batched/batched.cpp +0 -246
- package/src/llama.cpp/examples/chat-13B.bat +0 -57
- package/src/llama.cpp/examples/convert-llama2c-to-ggml/CMakeLists.txt +0 -5
- package/src/llama.cpp/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp +0 -941
- package/src/llama.cpp/examples/deprecation-warning/deprecation-warning.cpp +0 -35
- package/src/llama.cpp/examples/embedding/CMakeLists.txt +0 -5
- package/src/llama.cpp/examples/embedding/embedding.cpp +0 -323
- package/src/llama.cpp/examples/eval-callback/CMakeLists.txt +0 -10
- package/src/llama.cpp/examples/eval-callback/eval-callback.cpp +0 -194
- package/src/llama.cpp/examples/gen-docs/CMakeLists.txt +0 -5
- package/src/llama.cpp/examples/gen-docs/gen-docs.cpp +0 -83
- package/src/llama.cpp/examples/gguf/CMakeLists.txt +0 -5
- package/src/llama.cpp/examples/gguf/gguf.cpp +0 -265
- package/src/llama.cpp/examples/gguf-hash/CMakeLists.txt +0 -22
- package/src/llama.cpp/examples/gguf-hash/deps/rotate-bits/rotate-bits.h +0 -46
- package/src/llama.cpp/examples/gguf-hash/deps/sha1/sha1.c +0 -295
- package/src/llama.cpp/examples/gguf-hash/deps/sha1/sha1.h +0 -52
- package/src/llama.cpp/examples/gguf-hash/deps/sha256/sha256.c +0 -221
- package/src/llama.cpp/examples/gguf-hash/deps/sha256/sha256.h +0 -24
- package/src/llama.cpp/examples/gguf-hash/deps/xxhash/xxhash.c +0 -42
- package/src/llama.cpp/examples/gguf-hash/deps/xxhash/xxhash.h +0 -7093
- package/src/llama.cpp/examples/gguf-hash/gguf-hash.cpp +0 -694
- package/src/llama.cpp/examples/gritlm/CMakeLists.txt +0 -5
- package/src/llama.cpp/examples/gritlm/gritlm.cpp +0 -229
- package/src/llama.cpp/examples/jeopardy/questions.txt +0 -100
- package/src/llama.cpp/examples/llama.android/app/build.gradle.kts +0 -65
- package/src/llama.cpp/examples/llama.android/build.gradle.kts +0 -6
- package/src/llama.cpp/examples/llama.android/llama/build.gradle.kts +0 -71
- package/src/llama.cpp/examples/llama.android/llama/src/main/cpp/CMakeLists.txt +0 -53
- package/src/llama.cpp/examples/llama.android/llama/src/main/cpp/llama-android.cpp +0 -452
- package/src/llama.cpp/examples/llama.android/settings.gradle.kts +0 -18
- package/src/llama.cpp/examples/lookahead/CMakeLists.txt +0 -5
- package/src/llama.cpp/examples/lookahead/lookahead.cpp +0 -472
- package/src/llama.cpp/examples/lookup/CMakeLists.txt +0 -23
- package/src/llama.cpp/examples/lookup/lookup-create.cpp +0 -40
- package/src/llama.cpp/examples/lookup/lookup-merge.cpp +0 -47
- package/src/llama.cpp/examples/lookup/lookup-stats.cpp +0 -157
- package/src/llama.cpp/examples/lookup/lookup.cpp +0 -242
- package/src/llama.cpp/examples/parallel/CMakeLists.txt +0 -5
- package/src/llama.cpp/examples/parallel/parallel.cpp +0 -492
- package/src/llama.cpp/examples/passkey/CMakeLists.txt +0 -5
- package/src/llama.cpp/examples/passkey/passkey.cpp +0 -277
- package/src/llama.cpp/examples/retrieval/CMakeLists.txt +0 -5
- package/src/llama.cpp/examples/retrieval/retrieval.cpp +0 -304
- package/src/llama.cpp/examples/save-load-state/CMakeLists.txt +0 -5
- package/src/llama.cpp/examples/save-load-state/save-load-state.cpp +0 -246
- package/src/llama.cpp/examples/simple/CMakeLists.txt +0 -5
- package/src/llama.cpp/examples/simple/simple.cpp +0 -206
- package/src/llama.cpp/examples/simple-chat/CMakeLists.txt +0 -5
- package/src/llama.cpp/examples/simple-chat/simple-chat.cpp +0 -206
- package/src/llama.cpp/examples/simple-cmake-pkg/CMakeLists.txt +0 -11
- package/src/llama.cpp/examples/speculative/CMakeLists.txt +0 -5
- package/src/llama.cpp/examples/speculative/speculative.cpp +0 -644
- package/src/llama.cpp/examples/speculative-simple/CMakeLists.txt +0 -5
- package/src/llama.cpp/examples/speculative-simple/speculative-simple.cpp +0 -261
- package/src/llama.cpp/examples/sycl/CMakeLists.txt +0 -9
- package/src/llama.cpp/examples/sycl/build.sh +0 -23
- package/src/llama.cpp/examples/sycl/ls-sycl-device.cpp +0 -13
- package/src/llama.cpp/examples/sycl/run-llama2.sh +0 -27
- package/src/llama.cpp/examples/sycl/run-llama3.sh +0 -28
- package/src/llama.cpp/examples/sycl/win-build-sycl.bat +0 -33
- package/src/llama.cpp/examples/sycl/win-run-llama2.bat +0 -9
- package/src/llama.cpp/examples/sycl/win-run-llama3.bat +0 -9
- package/src/llama.cpp/examples/training/CMakeLists.txt +0 -5
- package/src/llama.cpp/examples/training/finetune.cpp +0 -96
- package/src/llama.cpp/ggml/cmake/GitVars.cmake +0 -22
- package/src/llama.cpp/ggml/cmake/common.cmake +0 -26
- package/src/llama.cpp/ggml/src/ggml-alloc.c +0 -1042
- package/src/llama.cpp/ggml/src/ggml-backend-impl.h +0 -255
- package/src/llama.cpp/ggml/src/ggml-backend-reg.cpp +0 -586
- package/src/llama.cpp/ggml/src/ggml-backend.cpp +0 -2008
- package/src/llama.cpp/ggml/src/ggml-blas/CMakeLists.txt +0 -87
- package/src/llama.cpp/ggml/src/ggml-blas/ggml-blas.cpp +0 -517
- package/src/llama.cpp/ggml/src/ggml-cann/CMakeLists.txt +0 -74
- package/src/llama.cpp/ggml/src/ggml-cann/acl_tensor.cpp +0 -179
- package/src/llama.cpp/ggml/src/ggml-cann/acl_tensor.h +0 -258
- package/src/llama.cpp/ggml/src/ggml-cann/aclnn_ops.cpp +0 -2863
- package/src/llama.cpp/ggml/src/ggml-cann/aclnn_ops.h +0 -1110
- package/src/llama.cpp/ggml/src/ggml-cann/common.h +0 -420
- package/src/llama.cpp/ggml/src/ggml-cann/ggml-cann.cpp +0 -2570
- package/src/llama.cpp/ggml/src/ggml-common.h +0 -1857
- package/src/llama.cpp/ggml/src/ggml-cpu/cmake/FindSIMD.cmake +0 -100
- package/src/llama.cpp/ggml/src/ggml-cuda/CMakeLists.txt +0 -184
- package/src/llama.cpp/ggml/src/ggml-cuda/vendors/cuda.h +0 -15
- package/src/llama.cpp/ggml/src/ggml-cuda/vendors/hip.h +0 -243
- package/src/llama.cpp/ggml/src/ggml-cuda/vendors/musa.h +0 -140
- package/src/llama.cpp/ggml/src/ggml-hip/CMakeLists.txt +0 -131
- package/src/llama.cpp/ggml/src/ggml-impl.h +0 -601
- package/src/llama.cpp/ggml/src/ggml-kompute/CMakeLists.txt +0 -166
- package/src/llama.cpp/ggml/src/ggml-kompute/ggml-kompute.cpp +0 -2251
- package/src/llama.cpp/ggml/src/ggml-metal/CMakeLists.txt +0 -120
- package/src/llama.cpp/ggml/src/ggml-metal/ggml-metal-impl.h +0 -622
- package/src/llama.cpp/ggml/src/ggml-musa/CMakeLists.txt +0 -113
- package/src/llama.cpp/ggml/src/ggml-opencl/CMakeLists.txt +0 -96
- package/src/llama.cpp/ggml/src/ggml-opencl/ggml-opencl.cpp +0 -5124
- package/src/llama.cpp/ggml/src/ggml-opt.cpp +0 -1037
- package/src/llama.cpp/ggml/src/ggml-quants.c +0 -5232
- package/src/llama.cpp/ggml/src/ggml-quants.h +0 -100
- package/src/llama.cpp/ggml/src/ggml-rpc/CMakeLists.txt +0 -9
- package/src/llama.cpp/ggml/src/ggml-rpc/ggml-rpc.cpp +0 -1813
- package/src/llama.cpp/ggml/src/ggml-sycl/CMakeLists.txt +0 -189
- package/src/llama.cpp/ggml/src/ggml-sycl/backend.hpp +0 -37
- package/src/llama.cpp/ggml/src/ggml-sycl/binbcast.cpp +0 -239
- package/src/llama.cpp/ggml/src/ggml-sycl/binbcast.hpp +0 -39
- package/src/llama.cpp/ggml/src/ggml-sycl/common.cpp +0 -83
- package/src/llama.cpp/ggml/src/ggml-sycl/common.hpp +0 -493
- package/src/llama.cpp/ggml/src/ggml-sycl/concat.cpp +0 -197
- package/src/llama.cpp/ggml/src/ggml-sycl/concat.hpp +0 -20
- package/src/llama.cpp/ggml/src/ggml-sycl/conv.cpp +0 -100
- package/src/llama.cpp/ggml/src/ggml-sycl/conv.hpp +0 -20
- package/src/llama.cpp/ggml/src/ggml-sycl/convert.cpp +0 -623
- package/src/llama.cpp/ggml/src/ggml-sycl/convert.hpp +0 -34
- package/src/llama.cpp/ggml/src/ggml-sycl/cpy.cpp +0 -701
- package/src/llama.cpp/ggml/src/ggml-sycl/cpy.hpp +0 -11
- package/src/llama.cpp/ggml/src/ggml-sycl/dequantize.hpp +0 -791
- package/src/llama.cpp/ggml/src/ggml-sycl/dmmv.cpp +0 -1160
- package/src/llama.cpp/ggml/src/ggml-sycl/dmmv.hpp +0 -27
- package/src/llama.cpp/ggml/src/ggml-sycl/dpct/helper.hpp +0 -2957
- package/src/llama.cpp/ggml/src/ggml-sycl/element_wise.cpp +0 -1536
- package/src/llama.cpp/ggml/src/ggml-sycl/element_wise.hpp +0 -75
- package/src/llama.cpp/ggml/src/ggml-sycl/gemm.hpp +0 -99
- package/src/llama.cpp/ggml/src/ggml-sycl/getrows.cpp +0 -311
- package/src/llama.cpp/ggml/src/ggml-sycl/getrows.hpp +0 -20
- package/src/llama.cpp/ggml/src/ggml-sycl/ggml-sycl.cpp +0 -4443
- package/src/llama.cpp/ggml/src/ggml-sycl/gla.cpp +0 -105
- package/src/llama.cpp/ggml/src/ggml-sycl/gla.hpp +0 -8
- package/src/llama.cpp/ggml/src/ggml-sycl/im2col.cpp +0 -136
- package/src/llama.cpp/ggml/src/ggml-sycl/im2col.hpp +0 -21
- package/src/llama.cpp/ggml/src/ggml-sycl/mmq.cpp +0 -3030
- package/src/llama.cpp/ggml/src/ggml-sycl/mmq.hpp +0 -33
- package/src/llama.cpp/ggml/src/ggml-sycl/mmvq.cpp +0 -1108
- package/src/llama.cpp/ggml/src/ggml-sycl/mmvq.hpp +0 -27
- package/src/llama.cpp/ggml/src/ggml-sycl/norm.cpp +0 -474
- package/src/llama.cpp/ggml/src/ggml-sycl/norm.hpp +0 -26
- package/src/llama.cpp/ggml/src/ggml-sycl/outprod.cpp +0 -46
- package/src/llama.cpp/ggml/src/ggml-sycl/outprod.hpp +0 -10
- package/src/llama.cpp/ggml/src/ggml-sycl/presets.hpp +0 -74
- package/src/llama.cpp/ggml/src/ggml-sycl/quants.hpp +0 -83
- package/src/llama.cpp/ggml/src/ggml-sycl/rope.cpp +0 -362
- package/src/llama.cpp/ggml/src/ggml-sycl/rope.hpp +0 -20
- package/src/llama.cpp/ggml/src/ggml-sycl/softmax.cpp +0 -264
- package/src/llama.cpp/ggml/src/ggml-sycl/softmax.hpp +0 -20
- package/src/llama.cpp/ggml/src/ggml-sycl/sycl_hw.cpp +0 -13
- package/src/llama.cpp/ggml/src/ggml-sycl/sycl_hw.hpp +0 -23
- package/src/llama.cpp/ggml/src/ggml-sycl/tsembd.cpp +0 -73
- package/src/llama.cpp/ggml/src/ggml-sycl/tsembd.hpp +0 -20
- package/src/llama.cpp/ggml/src/ggml-sycl/vecdotq.hpp +0 -1215
- package/src/llama.cpp/ggml/src/ggml-sycl/wkv.cpp +0 -305
- package/src/llama.cpp/ggml/src/ggml-sycl/wkv.hpp +0 -10
- package/src/llama.cpp/ggml/src/ggml-threading.cpp +0 -12
- package/src/llama.cpp/ggml/src/ggml-threading.h +0 -14
- package/src/llama.cpp/ggml/src/ggml-vulkan/CMakeLists.txt +0 -196
- package/src/llama.cpp/ggml/src/ggml-vulkan/ggml-vulkan.cpp +0 -10699
- package/src/llama.cpp/ggml/src/ggml-vulkan/vulkan-shaders/CMakeLists.txt +0 -39
- package/src/llama.cpp/ggml/src/ggml-vulkan/vulkan-shaders/vulkan-shaders-gen.cpp +0 -751
- package/src/llama.cpp/ggml/src/ggml.c +0 -6550
- package/src/llama.cpp/ggml/src/gguf.cpp +0 -1330
- package/src/llama.cpp/models/.editorconfig +0 -1
- package/src/llama.cpp/models/ggml-vocab-aquila.gguf +0 -0
- package/src/llama.cpp/models/ggml-vocab-baichuan.gguf +0 -0
- package/src/llama.cpp/models/ggml-vocab-bert-bge.gguf +0 -0
- package/src/llama.cpp/models/ggml-vocab-bert-bge.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-bert-bge.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-chameleon.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-chameleon.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-command-r.gguf +0 -0
- package/src/llama.cpp/models/ggml-vocab-command-r.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-command-r.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-deepseek-coder.gguf +0 -0
- package/src/llama.cpp/models/ggml-vocab-deepseek-coder.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-deepseek-coder.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-deepseek-llm.gguf +0 -0
- package/src/llama.cpp/models/ggml-vocab-deepseek-llm.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-deepseek-llm.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-deepseek-r1-qwen.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-deepseek-r1-qwen.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-falcon.gguf +0 -0
- package/src/llama.cpp/models/ggml-vocab-falcon.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-falcon.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-gpt-2.gguf +0 -0
- package/src/llama.cpp/models/ggml-vocab-gpt-2.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-gpt-2.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-gpt-4o.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-gpt-4o.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-gpt-neox.gguf +0 -0
- package/src/llama.cpp/models/ggml-vocab-llama-bpe.gguf +0 -0
- package/src/llama.cpp/models/ggml-vocab-llama-bpe.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-llama-bpe.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-llama-spm.gguf +0 -0
- package/src/llama.cpp/models/ggml-vocab-llama-spm.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-llama-spm.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-llama4.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-llama4.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-mpt.gguf +0 -0
- package/src/llama.cpp/models/ggml-vocab-mpt.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-mpt.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-phi-3.gguf +0 -0
- package/src/llama.cpp/models/ggml-vocab-phi-3.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-phi-3.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-pixtral.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-pixtral.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-qwen2.gguf +0 -0
- package/src/llama.cpp/models/ggml-vocab-qwen2.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-qwen2.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-refact.gguf +0 -0
- package/src/llama.cpp/models/ggml-vocab-refact.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-refact.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-roberta-bpe.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-roberta-bpe.gguf.out +0 -46
- package/src/llama.cpp/models/ggml-vocab-starcoder.gguf +0 -0
- package/src/llama.cpp/models/ggml-vocab-starcoder.gguf.inp +0 -112
- package/src/llama.cpp/models/ggml-vocab-starcoder.gguf.out +0 -46
- package/src/llama.cpp/pocs/CMakeLists.txt +0 -14
- package/src/llama.cpp/pocs/vdot/CMakeLists.txt +0 -9
- package/src/llama.cpp/pocs/vdot/q8dot.cpp +0 -173
- package/src/llama.cpp/pocs/vdot/vdot.cpp +0 -311
- package/src/llama.cpp/prompts/LLM-questions.txt +0 -49
- package/src/llama.cpp/prompts/alpaca.txt +0 -1
- package/src/llama.cpp/prompts/assistant.txt +0 -31
- package/src/llama.cpp/prompts/chat-with-baichuan.txt +0 -4
- package/src/llama.cpp/prompts/chat-with-bob.txt +0 -7
- package/src/llama.cpp/prompts/chat-with-qwen.txt +0 -1
- package/src/llama.cpp/prompts/chat-with-vicuna-v0.txt +0 -7
- package/src/llama.cpp/prompts/chat-with-vicuna-v1.txt +0 -7
- package/src/llama.cpp/prompts/chat.txt +0 -28
- package/src/llama.cpp/prompts/dan-modified.txt +0 -1
- package/src/llama.cpp/prompts/dan.txt +0 -1
- package/src/llama.cpp/prompts/mnemonics.txt +0 -93
- package/src/llama.cpp/prompts/parallel-questions.txt +0 -43
- package/src/llama.cpp/prompts/reason-act.txt +0 -18
- package/src/llama.cpp/requirements/requirements-all.txt +0 -15
- package/src/llama.cpp/requirements/requirements-compare-llama-bench.txt +0 -2
- package/src/llama.cpp/requirements/requirements-convert_hf_to_gguf.txt +0 -7
- package/src/llama.cpp/requirements/requirements-convert_hf_to_gguf_update.txt +0 -7
- package/src/llama.cpp/requirements/requirements-convert_legacy_llama.txt +0 -5
- package/src/llama.cpp/requirements/requirements-convert_llama_ggml_to_gguf.txt +0 -1
- package/src/llama.cpp/requirements/requirements-convert_lora_to_gguf.txt +0 -4
- package/src/llama.cpp/requirements/requirements-gguf_editor_gui.txt +0 -3
- package/src/llama.cpp/requirements/requirements-pydantic.txt +0 -3
- package/src/llama.cpp/requirements/requirements-test-tokenizer-random.txt +0 -1
- package/src/llama.cpp/requirements/requirements-tool_bench.txt +0 -12
- package/src/llama.cpp/requirements.txt +0 -13
- package/src/llama.cpp/scripts/build-info.sh +0 -30
- package/src/llama.cpp/scripts/install-oneapi.bat +0 -19
- package/src/llama.cpp/scripts/xxd.cmake +0 -16
- package/src/llama.cpp/tests/CMakeLists.txt +0 -177
- package/src/llama.cpp/tests/get-model.cpp +0 -21
- package/src/llama.cpp/tests/get-model.h +0 -2
- package/src/llama.cpp/tests/test-arg-parser.cpp +0 -178
- package/src/llama.cpp/tests/test-autorelease.cpp +0 -24
- package/src/llama.cpp/tests/test-backend-ops.cpp +0 -4793
- package/src/llama.cpp/tests/test-barrier.cpp +0 -94
- package/src/llama.cpp/tests/test-c.c +0 -7
- package/src/llama.cpp/tests/test-chat-template.cpp +0 -417
- package/src/llama.cpp/tests/test-chat.cpp +0 -985
- package/src/llama.cpp/tests/test-double-float.cpp +0 -57
- package/src/llama.cpp/tests/test-gbnf-validator.cpp +0 -109
- package/src/llama.cpp/tests/test-gguf.cpp +0 -1338
- package/src/llama.cpp/tests/test-grammar-integration.cpp +0 -1308
- package/src/llama.cpp/tests/test-grammar-llguidance.cpp +0 -1201
- package/src/llama.cpp/tests/test-grammar-parser.cpp +0 -519
- package/src/llama.cpp/tests/test-json-schema-to-grammar.cpp +0 -1304
- package/src/llama.cpp/tests/test-llama-grammar.cpp +0 -408
- package/src/llama.cpp/tests/test-log.cpp +0 -39
- package/src/llama.cpp/tests/test-model-load-cancel.cpp +0 -27
- package/src/llama.cpp/tests/test-mtmd-c-api.c +0 -63
- package/src/llama.cpp/tests/test-opt.cpp +0 -904
- package/src/llama.cpp/tests/test-quantize-fns.cpp +0 -186
- package/src/llama.cpp/tests/test-quantize-perf.cpp +0 -365
- package/src/llama.cpp/tests/test-quantize-stats.cpp +0 -424
- package/src/llama.cpp/tests/test-regex-partial.cpp +0 -288
- package/src/llama.cpp/tests/test-rope.cpp +0 -262
- package/src/llama.cpp/tests/test-sampling.cpp +0 -399
- package/src/llama.cpp/tests/test-tokenizer-0.cpp +0 -312
- package/src/llama.cpp/tests/test-tokenizer-1-bpe.cpp +0 -155
- package/src/llama.cpp/tests/test-tokenizer-1-spm.cpp +0 -125
- package/src/llama.cpp/tools/CMakeLists.txt +0 -39
- package/src/llama.cpp/tools/batched-bench/CMakeLists.txt +0 -5
- package/src/llama.cpp/tools/batched-bench/batched-bench.cpp +0 -204
- package/src/llama.cpp/tools/cvector-generator/CMakeLists.txt +0 -5
- package/src/llama.cpp/tools/cvector-generator/completions.txt +0 -582
- package/src/llama.cpp/tools/cvector-generator/cvector-generator.cpp +0 -508
- package/src/llama.cpp/tools/cvector-generator/mean.hpp +0 -48
- package/src/llama.cpp/tools/cvector-generator/negative.txt +0 -4
- package/src/llama.cpp/tools/cvector-generator/pca.hpp +0 -315
- package/src/llama.cpp/tools/cvector-generator/positive.txt +0 -4
- package/src/llama.cpp/tools/export-lora/CMakeLists.txt +0 -5
- package/src/llama.cpp/tools/export-lora/export-lora.cpp +0 -434
- package/src/llama.cpp/tools/gguf-split/CMakeLists.txt +0 -5
- package/src/llama.cpp/tools/gguf-split/gguf-split.cpp +0 -583
- package/src/llama.cpp/tools/imatrix/CMakeLists.txt +0 -5
- package/src/llama.cpp/tools/imatrix/imatrix.cpp +0 -667
- package/src/llama.cpp/tools/llama-bench/CMakeLists.txt +0 -5
- package/src/llama.cpp/tools/llama-bench/llama-bench.cpp +0 -2024
- package/src/llama.cpp/tools/main/CMakeLists.txt +0 -5
- package/src/llama.cpp/tools/main/main.cpp +0 -977
- package/src/llama.cpp/tools/mtmd/CMakeLists.txt +0 -58
- package/src/llama.cpp/tools/mtmd/clip-impl.h +0 -462
- package/src/llama.cpp/tools/mtmd/clip.cpp +0 -4024
- package/src/llama.cpp/tools/mtmd/clip.h +0 -101
- package/src/llama.cpp/tools/mtmd/deprecation-warning.cpp +0 -22
- package/src/llama.cpp/tools/mtmd/miniaudio.h +0 -93468
- package/src/llama.cpp/tools/mtmd/mtmd-audio.cpp +0 -855
- package/src/llama.cpp/tools/mtmd/mtmd-audio.h +0 -62
- package/src/llama.cpp/tools/mtmd/mtmd-cli.cpp +0 -377
- package/src/llama.cpp/tools/mtmd/mtmd-helper.cpp +0 -297
- package/src/llama.cpp/tools/mtmd/mtmd.cpp +0 -942
- package/src/llama.cpp/tools/mtmd/mtmd.h +0 -362
- package/src/llama.cpp/tools/mtmd/requirements.txt +0 -5
- package/src/llama.cpp/tools/perplexity/CMakeLists.txt +0 -5
- package/src/llama.cpp/tools/perplexity/perplexity.cpp +0 -2063
- package/src/llama.cpp/tools/quantize/CMakeLists.txt +0 -6
- package/src/llama.cpp/tools/quantize/quantize.cpp +0 -519
- package/src/llama.cpp/tools/rpc/CMakeLists.txt +0 -4
- package/src/llama.cpp/tools/rpc/rpc-server.cpp +0 -322
- package/src/llama.cpp/tools/run/CMakeLists.txt +0 -16
- package/src/llama.cpp/tools/run/linenoise.cpp/linenoise.cpp +0 -1995
- package/src/llama.cpp/tools/run/linenoise.cpp/linenoise.h +0 -137
- package/src/llama.cpp/tools/run/run.cpp +0 -1261
- package/src/llama.cpp/tools/server/CMakeLists.txt +0 -51
- package/src/llama.cpp/tools/server/bench/requirements.txt +0 -2
- package/src/llama.cpp/tools/server/httplib.h +0 -10506
- package/src/llama.cpp/tools/server/server.cpp +0 -4966
- package/src/llama.cpp/tools/server/tests/requirements.txt +0 -8
- package/src/llama.cpp/tools/server/utils.hpp +0 -1337
- package/src/llama.cpp/tools/tokenize/CMakeLists.txt +0 -5
- package/src/llama.cpp/tools/tokenize/tokenize.cpp +0 -416
- package/src/llama.cpp/tools/tts/CMakeLists.txt +0 -5
- package/src/llama.cpp/tools/tts/tts.cpp +0 -1092
|
@@ -1,739 +0,0 @@
|
|
|
1
|
-
name: Release
|
|
2
|
-
|
|
3
|
-
on:
|
|
4
|
-
workflow_dispatch: # allows manual triggering
|
|
5
|
-
inputs:
|
|
6
|
-
create_release:
|
|
7
|
-
description: 'Create new release'
|
|
8
|
-
required: true
|
|
9
|
-
type: boolean
|
|
10
|
-
push:
|
|
11
|
-
branches:
|
|
12
|
-
- master
|
|
13
|
-
paths: ['.github/workflows/release.yml', '**/CMakeLists.txt', '**/.cmake', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.cuh', '**/*.swift', '**/*.m', '**/*.metal', '**/*.comp']
|
|
14
|
-
|
|
15
|
-
concurrency:
|
|
16
|
-
group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }}
|
|
17
|
-
cancel-in-progress: true
|
|
18
|
-
|
|
19
|
-
env:
|
|
20
|
-
BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
|
|
21
|
-
CMAKE_ARGS: "-DLLAMA_BUILD_EXAMPLES=OFF -DLLAMA_BUILD_TESTS=OFF -DLLAMA_BUILD_TOOLS=ON -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON"
|
|
22
|
-
|
|
23
|
-
jobs:
|
|
24
|
-
macOS-arm64:
|
|
25
|
-
runs-on: macos-14
|
|
26
|
-
|
|
27
|
-
steps:
|
|
28
|
-
- name: Clone
|
|
29
|
-
id: checkout
|
|
30
|
-
uses: actions/checkout@v4
|
|
31
|
-
with:
|
|
32
|
-
fetch-depth: 0
|
|
33
|
-
|
|
34
|
-
- name: ccache
|
|
35
|
-
uses: hendrikmuhs/ccache-action@v1.2.16
|
|
36
|
-
with:
|
|
37
|
-
key: macOS-latest-cmake-arm64
|
|
38
|
-
evict-old-files: 1d
|
|
39
|
-
|
|
40
|
-
- name: Dependencies
|
|
41
|
-
id: depends
|
|
42
|
-
continue-on-error: true
|
|
43
|
-
run: |
|
|
44
|
-
brew update
|
|
45
|
-
brew install curl
|
|
46
|
-
|
|
47
|
-
- name: Build
|
|
48
|
-
id: cmake_build
|
|
49
|
-
run: |
|
|
50
|
-
sysctl -a
|
|
51
|
-
cmake -B build \
|
|
52
|
-
-DCMAKE_BUILD_RPATH="@loader_path" \
|
|
53
|
-
-DLLAMA_FATAL_WARNINGS=ON \
|
|
54
|
-
-DGGML_METAL_USE_BF16=ON \
|
|
55
|
-
-DGGML_METAL_EMBED_LIBRARY=ON \
|
|
56
|
-
-DGGML_RPC=ON \
|
|
57
|
-
${{ env.CMAKE_ARGS }}
|
|
58
|
-
cmake --build build --config Release -j $(sysctl -n hw.logicalcpu)
|
|
59
|
-
|
|
60
|
-
- name: Determine tag name
|
|
61
|
-
id: tag
|
|
62
|
-
uses: ./.github/actions/get-tag-name
|
|
63
|
-
|
|
64
|
-
- name: Pack artifacts
|
|
65
|
-
id: pack_artifacts
|
|
66
|
-
run: |
|
|
67
|
-
cp LICENSE ./build/bin/
|
|
68
|
-
zip -r llama-${{ steps.tag.outputs.name }}-bin-macos-arm64.zip ./build/bin/*
|
|
69
|
-
|
|
70
|
-
- name: Upload artifacts
|
|
71
|
-
uses: actions/upload-artifact@v4
|
|
72
|
-
with:
|
|
73
|
-
path: llama-${{ steps.tag.outputs.name }}-bin-macos-arm64.zip
|
|
74
|
-
name: llama-bin-macos-arm64.zip
|
|
75
|
-
|
|
76
|
-
macOS-x64:
|
|
77
|
-
runs-on: macos-13
|
|
78
|
-
|
|
79
|
-
steps:
|
|
80
|
-
- name: Clone
|
|
81
|
-
id: checkout
|
|
82
|
-
uses: actions/checkout@v4
|
|
83
|
-
with:
|
|
84
|
-
fetch-depth: 0
|
|
85
|
-
|
|
86
|
-
- name: ccache
|
|
87
|
-
uses: hendrikmuhs/ccache-action@v1.2.16
|
|
88
|
-
with:
|
|
89
|
-
key: macOS-latest-cmake-x64
|
|
90
|
-
evict-old-files: 1d
|
|
91
|
-
|
|
92
|
-
- name: Dependencies
|
|
93
|
-
id: depends
|
|
94
|
-
continue-on-error: true
|
|
95
|
-
run: |
|
|
96
|
-
brew update
|
|
97
|
-
brew install curl
|
|
98
|
-
|
|
99
|
-
- name: Build
|
|
100
|
-
id: cmake_build
|
|
101
|
-
run: |
|
|
102
|
-
sysctl -a
|
|
103
|
-
# Metal is disabled due to intermittent failures with Github runners not having a GPU:
|
|
104
|
-
# https://github.com/ggml-org/llama.cpp/actions/runs/8635935781/job/23674807267#step:5:2313
|
|
105
|
-
cmake -B build \
|
|
106
|
-
-DCMAKE_BUILD_RPATH="@loader_path" \
|
|
107
|
-
-DLLAMA_FATAL_WARNINGS=ON \
|
|
108
|
-
-DGGML_METAL=OFF \
|
|
109
|
-
-DGGML_RPC=ON
|
|
110
|
-
cmake --build build --config Release -j $(sysctl -n hw.logicalcpu)
|
|
111
|
-
|
|
112
|
-
- name: Determine tag name
|
|
113
|
-
id: tag
|
|
114
|
-
uses: ./.github/actions/get-tag-name
|
|
115
|
-
|
|
116
|
-
- name: Pack artifacts
|
|
117
|
-
id: pack_artifacts
|
|
118
|
-
run: |
|
|
119
|
-
cp LICENSE ./build/bin/
|
|
120
|
-
zip -r llama-${{ steps.tag.outputs.name }}-bin-macos-x64.zip ./build/bin/*
|
|
121
|
-
|
|
122
|
-
- name: Upload artifacts
|
|
123
|
-
uses: actions/upload-artifact@v4
|
|
124
|
-
with:
|
|
125
|
-
path: llama-${{ steps.tag.outputs.name }}-bin-macos-x64.zip
|
|
126
|
-
name: llama-bin-macos-x64.zip
|
|
127
|
-
|
|
128
|
-
ubuntu-22-cpu:
|
|
129
|
-
strategy:
|
|
130
|
-
matrix:
|
|
131
|
-
include:
|
|
132
|
-
- build: 'x64'
|
|
133
|
-
os: ubuntu-22.04
|
|
134
|
-
- build: 'arm64'
|
|
135
|
-
os: ubuntu-22.04-arm
|
|
136
|
-
|
|
137
|
-
runs-on: ${{ matrix.os }}
|
|
138
|
-
|
|
139
|
-
steps:
|
|
140
|
-
- name: Clone
|
|
141
|
-
id: checkout
|
|
142
|
-
uses: actions/checkout@v4
|
|
143
|
-
with:
|
|
144
|
-
fetch-depth: 0
|
|
145
|
-
|
|
146
|
-
- name: ccache
|
|
147
|
-
uses: hendrikmuhs/ccache-action@v1.2.16
|
|
148
|
-
with:
|
|
149
|
-
key: ubuntu-cpu-cmake
|
|
150
|
-
evict-old-files: 1d
|
|
151
|
-
|
|
152
|
-
- name: Dependencies
|
|
153
|
-
id: depends
|
|
154
|
-
run: |
|
|
155
|
-
sudo apt-get update
|
|
156
|
-
sudo apt-get install build-essential libcurl4-openssl-dev
|
|
157
|
-
|
|
158
|
-
- name: Build
|
|
159
|
-
id: cmake_build
|
|
160
|
-
run: |
|
|
161
|
-
cmake -B build \
|
|
162
|
-
-DLLAMA_FATAL_WARNINGS=ON \
|
|
163
|
-
${{ env.CMAKE_ARGS }}
|
|
164
|
-
cmake --build build --config Release -j $(nproc)
|
|
165
|
-
|
|
166
|
-
- name: Determine tag name
|
|
167
|
-
id: tag
|
|
168
|
-
uses: ./.github/actions/get-tag-name
|
|
169
|
-
|
|
170
|
-
- name: Pack artifacts
|
|
171
|
-
id: pack_artifacts
|
|
172
|
-
run: |
|
|
173
|
-
cp LICENSE ./build/bin/
|
|
174
|
-
zip -r llama-${{ steps.tag.outputs.name }}-bin-ubuntu-${{ matrix.build }}.zip ./build/bin/*
|
|
175
|
-
|
|
176
|
-
- name: Upload artifacts
|
|
177
|
-
uses: actions/upload-artifact@v4
|
|
178
|
-
with:
|
|
179
|
-
path: llama-${{ steps.tag.outputs.name }}-bin-ubuntu-${{ matrix.build }}.zip
|
|
180
|
-
name: llama-bin-ubuntu-${{ matrix.build }}.zip
|
|
181
|
-
|
|
182
|
-
ubuntu-22-vulkan:
|
|
183
|
-
runs-on: ubuntu-22.04
|
|
184
|
-
|
|
185
|
-
steps:
|
|
186
|
-
- name: Clone
|
|
187
|
-
id: checkout
|
|
188
|
-
uses: actions/checkout@v4
|
|
189
|
-
with:
|
|
190
|
-
fetch-depth: 0
|
|
191
|
-
|
|
192
|
-
- name: ccache
|
|
193
|
-
uses: hendrikmuhs/ccache-action@v1.2.16
|
|
194
|
-
with:
|
|
195
|
-
key: ubuntu-22-cmake-vulkan
|
|
196
|
-
evict-old-files: 1d
|
|
197
|
-
|
|
198
|
-
- name: Dependencies
|
|
199
|
-
id: depends
|
|
200
|
-
run: |
|
|
201
|
-
wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | sudo apt-key add -
|
|
202
|
-
sudo wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list https://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list
|
|
203
|
-
sudo apt-get update -y
|
|
204
|
-
sudo apt-get install -y build-essential mesa-vulkan-drivers vulkan-sdk libcurl4-openssl-dev
|
|
205
|
-
|
|
206
|
-
- name: Build
|
|
207
|
-
id: cmake_build
|
|
208
|
-
run: |
|
|
209
|
-
cmake -B build \
|
|
210
|
-
-DGGML_VULKAN=ON \
|
|
211
|
-
${{ env.CMAKE_ARGS }}
|
|
212
|
-
cmake --build build --config Release -j $(nproc)
|
|
213
|
-
|
|
214
|
-
- name: Determine tag name
|
|
215
|
-
id: tag
|
|
216
|
-
uses: ./.github/actions/get-tag-name
|
|
217
|
-
|
|
218
|
-
- name: Pack artifacts
|
|
219
|
-
id: pack_artifacts
|
|
220
|
-
run: |
|
|
221
|
-
cp LICENSE ./build/bin/
|
|
222
|
-
zip -r llama-${{ steps.tag.outputs.name }}-bin-ubuntu-vulkan-x64.zip ./build/bin/*
|
|
223
|
-
|
|
224
|
-
- name: Upload artifacts
|
|
225
|
-
uses: actions/upload-artifact@v4
|
|
226
|
-
with:
|
|
227
|
-
path: llama-${{ steps.tag.outputs.name }}-bin-ubuntu-vulkan-x64.zip
|
|
228
|
-
name: llama-bin-ubuntu-vulkan-x64.zip
|
|
229
|
-
|
|
230
|
-
windows-cpu:
|
|
231
|
-
runs-on: windows-latest
|
|
232
|
-
|
|
233
|
-
strategy:
|
|
234
|
-
matrix:
|
|
235
|
-
include:
|
|
236
|
-
- arch: 'x64'
|
|
237
|
-
- arch: 'arm64'
|
|
238
|
-
|
|
239
|
-
steps:
|
|
240
|
-
- name: Clone
|
|
241
|
-
uses: actions/checkout@v4
|
|
242
|
-
with:
|
|
243
|
-
fetch-depth: 0
|
|
244
|
-
|
|
245
|
-
- name: ccache
|
|
246
|
-
uses: hendrikmuhs/ccache-action@v1.2.16
|
|
247
|
-
with:
|
|
248
|
-
key: windows-latest-cmake-cpu-${{ matrix.arch }}
|
|
249
|
-
variant: ccache
|
|
250
|
-
evict-old-files: 1d
|
|
251
|
-
|
|
252
|
-
- name: Install Ninja
|
|
253
|
-
run: |
|
|
254
|
-
choco install ninja
|
|
255
|
-
|
|
256
|
-
- name: libCURL
|
|
257
|
-
id: get_libcurl
|
|
258
|
-
uses: ./.github/actions/windows-setup-curl
|
|
259
|
-
with:
|
|
260
|
-
architecture: ${{ matrix.arch == 'x64' && 'win64' || 'win64a' }}
|
|
261
|
-
|
|
262
|
-
- name: Build
|
|
263
|
-
env:
|
|
264
|
-
CURL_PATH: ${{ steps.get_libcurl.outputs.curl_path }}
|
|
265
|
-
run: |
|
|
266
|
-
cmake -S . -B build -G "Ninja Multi-Config" `
|
|
267
|
-
-D CMAKE_TOOLCHAIN_FILE=cmake/${{ matrix.arch }}-windows-llvm.cmake `
|
|
268
|
-
-DGGML_NATIVE=OFF `
|
|
269
|
-
-DGGML_BACKEND_DL=ON `
|
|
270
|
-
-DGGML_CPU_ALL_VARIANTS=ON `
|
|
271
|
-
-DGGML_OPENMP=OFF `
|
|
272
|
-
-DCURL_LIBRARY="$env:CURL_PATH/lib/libcurl.dll.a" -DCURL_INCLUDE_DIR="$env:CURL_PATH/include" `
|
|
273
|
-
${{ env.CMAKE_ARGS }}
|
|
274
|
-
cmake --build build --config Release
|
|
275
|
-
|
|
276
|
-
- name: Pack artifacts
|
|
277
|
-
id: pack_artifacts
|
|
278
|
-
env:
|
|
279
|
-
CURL_PATH: ${{ steps.get_libcurl.outputs.curl_path }}
|
|
280
|
-
run: |
|
|
281
|
-
Copy-Item $env:CURL_PATH\bin\libcurl-${{ matrix.arch }}.dll .\build\bin\Release\
|
|
282
|
-
7z a llama-bin-win-cpu-${{ matrix.arch }}.zip .\build\bin\Release\*
|
|
283
|
-
|
|
284
|
-
- name: Upload artifacts
|
|
285
|
-
uses: actions/upload-artifact@v4
|
|
286
|
-
with:
|
|
287
|
-
path: llama-bin-win-cpu-${{ matrix.arch }}.zip
|
|
288
|
-
name: llama-bin-win-cpu-${{ matrix.arch }}.zip
|
|
289
|
-
|
|
290
|
-
windows:
|
|
291
|
-
runs-on: windows-latest
|
|
292
|
-
|
|
293
|
-
env:
|
|
294
|
-
OPENBLAS_VERSION: 0.3.23
|
|
295
|
-
VULKAN_VERSION: 1.4.309.0
|
|
296
|
-
|
|
297
|
-
strategy:
|
|
298
|
-
matrix:
|
|
299
|
-
include:
|
|
300
|
-
- backend: 'vulkan'
|
|
301
|
-
arch: 'x64'
|
|
302
|
-
defines: '-DGGML_VULKAN=ON'
|
|
303
|
-
target: 'ggml-vulkan'
|
|
304
|
-
- backend: 'opencl-adreno'
|
|
305
|
-
arch: 'arm64'
|
|
306
|
-
defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/arm64-windows-llvm.cmake -DCMAKE_PREFIX_PATH="$env:RUNNER_TEMP/opencl-arm64-release" -DGGML_OPENCL=ON -DGGML_OPENCL_USE_ADRENO_KERNELS=ON'
|
|
307
|
-
target: 'ggml-opencl'
|
|
308
|
-
|
|
309
|
-
steps:
|
|
310
|
-
- name: Clone
|
|
311
|
-
id: checkout
|
|
312
|
-
uses: actions/checkout@v4
|
|
313
|
-
|
|
314
|
-
- name: ccache
|
|
315
|
-
uses: hendrikmuhs/ccache-action@v1.2.16
|
|
316
|
-
with:
|
|
317
|
-
key: windows-latest-cmake-${{ matrix.backend }}-${{ matrix.arch }}
|
|
318
|
-
variant: ccache
|
|
319
|
-
evict-old-files: 1d
|
|
320
|
-
|
|
321
|
-
- name: Install Vulkan SDK
|
|
322
|
-
id: get_vulkan
|
|
323
|
-
if: ${{ matrix.backend == 'vulkan' }}
|
|
324
|
-
run: |
|
|
325
|
-
curl.exe -o $env:RUNNER_TEMP/VulkanSDK-Installer.exe -L "https://sdk.lunarg.com/sdk/download/${env:VULKAN_VERSION}/windows/VulkanSDK-${env:VULKAN_VERSION}-Installer.exe"
|
|
326
|
-
& "$env:RUNNER_TEMP\VulkanSDK-Installer.exe" --accept-licenses --default-answer --confirm-command install
|
|
327
|
-
Add-Content $env:GITHUB_ENV "VULKAN_SDK=C:\VulkanSDK\${env:VULKAN_VERSION}"
|
|
328
|
-
Add-Content $env:GITHUB_PATH "C:\VulkanSDK\${env:VULKAN_VERSION}\bin"
|
|
329
|
-
|
|
330
|
-
- name: Install Ninja
|
|
331
|
-
id: install_ninja
|
|
332
|
-
run: |
|
|
333
|
-
choco install ninja
|
|
334
|
-
|
|
335
|
-
- name: Install OpenCL Headers and Libs
|
|
336
|
-
id: install_opencl
|
|
337
|
-
if: ${{ matrix.backend == 'opencl-adreno' && matrix.arch == 'arm64' }}
|
|
338
|
-
run: |
|
|
339
|
-
git clone https://github.com/KhronosGroup/OpenCL-Headers
|
|
340
|
-
cd OpenCL-Headers
|
|
341
|
-
cmake -B build `
|
|
342
|
-
-DBUILD_TESTING=OFF `
|
|
343
|
-
-DOPENCL_HEADERS_BUILD_TESTING=OFF `
|
|
344
|
-
-DOPENCL_HEADERS_BUILD_CXX_TESTS=OFF `
|
|
345
|
-
-DCMAKE_INSTALL_PREFIX="$env:RUNNER_TEMP/opencl-arm64-release"
|
|
346
|
-
cmake --build build --target install
|
|
347
|
-
git clone https://github.com/KhronosGroup/OpenCL-ICD-Loader
|
|
348
|
-
cd OpenCL-ICD-Loader
|
|
349
|
-
cmake -B build-arm64-release `
|
|
350
|
-
-A arm64 `
|
|
351
|
-
-DCMAKE_PREFIX_PATH="$env:RUNNER_TEMP/opencl-arm64-release" `
|
|
352
|
-
-DCMAKE_INSTALL_PREFIX="$env:RUNNER_TEMP/opencl-arm64-release"
|
|
353
|
-
cmake --build build-arm64-release --target install --config release
|
|
354
|
-
|
|
355
|
-
- name: Build
|
|
356
|
-
id: cmake_build
|
|
357
|
-
run: |
|
|
358
|
-
cmake -S . -B build ${{ matrix.defines }} -DGGML_NATIVE=OFF -DGGML_CPU=OFF -DGGML_BACKEND_DL=ON -DLLAMA_CURL=OFF
|
|
359
|
-
cmake --build build --config Release --target ${{ matrix.target }}
|
|
360
|
-
|
|
361
|
-
- name: Pack artifacts
|
|
362
|
-
id: pack_artifacts
|
|
363
|
-
run: |
|
|
364
|
-
7z a llama-bin-win-${{ matrix.backend }}-${{ matrix.arch }}.zip .\build\bin\Release\${{ matrix.target }}.dll
|
|
365
|
-
|
|
366
|
-
- name: Upload artifacts
|
|
367
|
-
uses: actions/upload-artifact@v4
|
|
368
|
-
with:
|
|
369
|
-
path: llama-bin-win-${{ matrix.backend }}-${{ matrix.arch }}.zip
|
|
370
|
-
name: llama-bin-win-${{ matrix.backend }}-${{ matrix.arch }}.zip
|
|
371
|
-
|
|
372
|
-
windows-cuda:
|
|
373
|
-
runs-on: windows-2019
|
|
374
|
-
|
|
375
|
-
strategy:
|
|
376
|
-
matrix:
|
|
377
|
-
cuda: ['12.4', '11.7']
|
|
378
|
-
|
|
379
|
-
steps:
|
|
380
|
-
- name: Clone
|
|
381
|
-
id: checkout
|
|
382
|
-
uses: actions/checkout@v4
|
|
383
|
-
|
|
384
|
-
- name: Install ccache
|
|
385
|
-
uses: hendrikmuhs/ccache-action@v1.2.16
|
|
386
|
-
with:
|
|
387
|
-
key: windows-cuda-${{ matrix.cuda }}
|
|
388
|
-
variant: ccache
|
|
389
|
-
evict-old-files: 1d
|
|
390
|
-
|
|
391
|
-
- name: Install Cuda Toolkit
|
|
392
|
-
uses: ./.github/actions/windows-setup-cuda
|
|
393
|
-
with:
|
|
394
|
-
cuda_version: ${{ matrix.cuda }}
|
|
395
|
-
|
|
396
|
-
- name: Install Ninja
|
|
397
|
-
id: install_ninja
|
|
398
|
-
run: |
|
|
399
|
-
choco install ninja
|
|
400
|
-
|
|
401
|
-
- name: Build
|
|
402
|
-
id: cmake_build
|
|
403
|
-
shell: cmd
|
|
404
|
-
run: |
|
|
405
|
-
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat"
|
|
406
|
-
cmake -S . -B build -G "Ninja Multi-Config" ^
|
|
407
|
-
-DGGML_BACKEND_DL=ON ^
|
|
408
|
-
-DGGML_NATIVE=OFF ^
|
|
409
|
-
-DGGML_CPU=OFF ^
|
|
410
|
-
-DGGML_CUDA=ON ^
|
|
411
|
-
-DLLAMA_CURL=OFF
|
|
412
|
-
set /A NINJA_JOBS=%NUMBER_OF_PROCESSORS%-1
|
|
413
|
-
cmake --build build --config Release -j %NINJA_JOBS% --target ggml-cuda
|
|
414
|
-
|
|
415
|
-
- name: Pack artifacts
|
|
416
|
-
id: pack_artifacts
|
|
417
|
-
run: |
|
|
418
|
-
7z a llama-bin-win-cuda-${{ matrix.cuda }}-x64.zip .\build\bin\Release\ggml-cuda.dll
|
|
419
|
-
|
|
420
|
-
- name: Upload artifacts
|
|
421
|
-
uses: actions/upload-artifact@v4
|
|
422
|
-
with:
|
|
423
|
-
path: llama-bin-win-cuda-${{ matrix.cuda }}-x64.zip
|
|
424
|
-
name: llama-bin-win-cuda-${{ matrix.cuda }}-x64.zip
|
|
425
|
-
|
|
426
|
-
- name: Copy and pack Cuda runtime
|
|
427
|
-
run: |
|
|
428
|
-
echo "Cuda install location: ${{ env.CUDA_PATH }}"
|
|
429
|
-
$dst='.\build\bin\cudart\'
|
|
430
|
-
robocopy "${{env.CUDA_PATH}}\bin" $dst cudart64_*.dll cublas64_*.dll cublasLt64_*.dll
|
|
431
|
-
robocopy "${{env.CUDA_PATH}}\lib" $dst cudart64_*.dll cublas64_*.dll cublasLt64_*.dll
|
|
432
|
-
7z a cudart-llama-bin-win-cuda-${{ matrix.cuda }}-x64.zip $dst\*
|
|
433
|
-
|
|
434
|
-
- name: Upload Cuda runtime
|
|
435
|
-
uses: actions/upload-artifact@v4
|
|
436
|
-
with:
|
|
437
|
-
path: cudart-llama-bin-win-cuda-${{ matrix.cuda }}-x64.zip
|
|
438
|
-
name: cudart-llama-bin-win-cuda-${{ matrix.cuda }}-x64.zip
|
|
439
|
-
|
|
440
|
-
windows-sycl:
|
|
441
|
-
runs-on: windows-latest
|
|
442
|
-
|
|
443
|
-
defaults:
|
|
444
|
-
run:
|
|
445
|
-
shell: bash
|
|
446
|
-
|
|
447
|
-
env:
|
|
448
|
-
WINDOWS_BASEKIT_URL: https://registrationcenter-download.intel.com/akdlm/IRC_NAS/7cd9bba0-7aab-4e30-b3ae-2221006a4a05/intel-oneapi-base-toolkit-2025.1.1.34_offline.exe
|
|
449
|
-
WINDOWS_DPCPP_MKL: intel.oneapi.win.cpp-dpcpp-common:intel.oneapi.win.mkl.devel:intel.oneapi.win.dnnl:intel.oneapi.win.tbb.devel
|
|
450
|
-
ONEAPI_ROOT: "C:/Program Files (x86)/Intel/oneAPI"
|
|
451
|
-
|
|
452
|
-
steps:
|
|
453
|
-
- name: Clone
|
|
454
|
-
id: checkout
|
|
455
|
-
uses: actions/checkout@v4
|
|
456
|
-
|
|
457
|
-
- name: ccache
|
|
458
|
-
uses: hendrikmuhs/ccache-action@v1.2.16
|
|
459
|
-
with:
|
|
460
|
-
key: windows-latest-cmake-sycl
|
|
461
|
-
variant: ccache
|
|
462
|
-
evict-old-files: 1d
|
|
463
|
-
|
|
464
|
-
- name: Install
|
|
465
|
-
run: |
|
|
466
|
-
scripts/install-oneapi.bat $WINDOWS_BASEKIT_URL $WINDOWS_DPCPP_MKL
|
|
467
|
-
|
|
468
|
-
- name: Build
|
|
469
|
-
id: cmake_build
|
|
470
|
-
shell: cmd
|
|
471
|
-
run: |
|
|
472
|
-
call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat" intel64 --force
|
|
473
|
-
cmake -G "Ninja" -B build ^
|
|
474
|
-
-DCMAKE_C_COMPILER=cl -DCMAKE_CXX_COMPILER=icx ^
|
|
475
|
-
-DCMAKE_BUILD_TYPE=Release ^
|
|
476
|
-
-DGGML_BACKEND_DL=ON -DBUILD_SHARED_LIBS=ON ^
|
|
477
|
-
-DGGML_CPU=OFF -DGGML_SYCL=ON ^
|
|
478
|
-
-DLLAMA_CURL=OFF
|
|
479
|
-
cmake --build build --target ggml-sycl -j
|
|
480
|
-
|
|
481
|
-
- name: Build the release package
|
|
482
|
-
id: pack_artifacts
|
|
483
|
-
run: |
|
|
484
|
-
echo "cp oneAPI running time dll files in ${{ env.ONEAPI_ROOT }} to ./build/bin"
|
|
485
|
-
|
|
486
|
-
cp "${{ env.ONEAPI_ROOT }}/mkl/latest/bin/mkl_sycl_blas.5.dll" ./build/bin
|
|
487
|
-
cp "${{ env.ONEAPI_ROOT }}/mkl/latest/bin/mkl_core.2.dll" ./build/bin
|
|
488
|
-
cp "${{ env.ONEAPI_ROOT }}/mkl/latest/bin/mkl_tbb_thread.2.dll" ./build/bin
|
|
489
|
-
|
|
490
|
-
cp "${{ env.ONEAPI_ROOT }}/compiler/latest/bin/ur_adapter_level_zero.dll" ./build/bin
|
|
491
|
-
cp "${{ env.ONEAPI_ROOT }}/compiler/latest/bin/ur_adapter_opencl.dll" ./build/bin
|
|
492
|
-
cp "${{ env.ONEAPI_ROOT }}/compiler/latest/bin/ur_loader.dll" ./build/bin
|
|
493
|
-
cp "${{ env.ONEAPI_ROOT }}/compiler/latest/bin/ur_win_proxy_loader.dll" ./build/bin
|
|
494
|
-
|
|
495
|
-
cp "${{ env.ONEAPI_ROOT }}/compiler/latest/bin/sycl8.dll" ./build/bin
|
|
496
|
-
cp "${{ env.ONEAPI_ROOT }}/compiler/latest/bin/svml_dispmd.dll" ./build/bin
|
|
497
|
-
cp "${{ env.ONEAPI_ROOT }}/compiler/latest/bin/libmmd.dll" ./build/bin
|
|
498
|
-
cp "${{ env.ONEAPI_ROOT }}/compiler/latest/bin/libiomp5md.dll" ./build/bin
|
|
499
|
-
|
|
500
|
-
cp "${{ env.ONEAPI_ROOT }}/dnnl/latest/bin/dnnl.dll" ./build/bin
|
|
501
|
-
cp "${{ env.ONEAPI_ROOT }}/tbb/latest/bin/tbb12.dll" ./build/bin
|
|
502
|
-
|
|
503
|
-
echo "cp oneAPI running time dll files to ./build/bin done"
|
|
504
|
-
7z a llama-bin-win-sycl-x64.zip ./build/bin/*
|
|
505
|
-
|
|
506
|
-
- name: Upload the release package
|
|
507
|
-
uses: actions/upload-artifact@v4
|
|
508
|
-
with:
|
|
509
|
-
path: llama-bin-win-sycl-x64.zip
|
|
510
|
-
name: llama-bin-win-sycl-x64.zip
|
|
511
|
-
|
|
512
|
-
windows-hip:
|
|
513
|
-
runs-on: windows-latest
|
|
514
|
-
|
|
515
|
-
strategy:
|
|
516
|
-
matrix:
|
|
517
|
-
include:
|
|
518
|
-
- name: "radeon"
|
|
519
|
-
gpu_targets: "gfx1100;gfx1101;gfx1102;gfx1030;gfx1031;gfx1032"
|
|
520
|
-
|
|
521
|
-
steps:
|
|
522
|
-
- name: Clone
|
|
523
|
-
id: checkout
|
|
524
|
-
uses: actions/checkout@v4
|
|
525
|
-
|
|
526
|
-
- name: Clone rocWMMA repository
|
|
527
|
-
id: clone_rocwmma
|
|
528
|
-
run: |
|
|
529
|
-
git clone https://github.com/rocm/rocwmma --branch rocm-6.2.4 --depth 1
|
|
530
|
-
|
|
531
|
-
- name: ccache
|
|
532
|
-
uses: hendrikmuhs/ccache-action@v1.2.16
|
|
533
|
-
with:
|
|
534
|
-
key: windows-latest-cmake-hip-${{ matrix.name }}-x64
|
|
535
|
-
evict-old-files: 1d
|
|
536
|
-
|
|
537
|
-
- name: Install
|
|
538
|
-
id: depends
|
|
539
|
-
run: |
|
|
540
|
-
$ErrorActionPreference = "Stop"
|
|
541
|
-
write-host "Downloading AMD HIP SDK Installer"
|
|
542
|
-
Invoke-WebRequest -Uri "https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-24.Q3-WinSvr2022-For-HIP.exe" -OutFile "${env:RUNNER_TEMP}\rocm-install.exe"
|
|
543
|
-
write-host "Installing AMD HIP SDK"
|
|
544
|
-
Start-Process "${env:RUNNER_TEMP}\rocm-install.exe" -ArgumentList '-install' -NoNewWindow -Wait
|
|
545
|
-
write-host "Completed AMD HIP SDK installation"
|
|
546
|
-
|
|
547
|
-
- name: Verify ROCm
|
|
548
|
-
id: verify
|
|
549
|
-
run: |
|
|
550
|
-
& 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' --version
|
|
551
|
-
|
|
552
|
-
- name: Build
|
|
553
|
-
id: cmake_build
|
|
554
|
-
run: |
|
|
555
|
-
$env:HIP_PATH=$(Resolve-Path 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' | split-path | split-path)
|
|
556
|
-
$env:CMAKE_PREFIX_PATH="${env:HIP_PATH}"
|
|
557
|
-
cmake -G "Unix Makefiles" -B build -S . `
|
|
558
|
-
-DCMAKE_C_COMPILER="${env:HIP_PATH}\bin\clang.exe" `
|
|
559
|
-
-DCMAKE_CXX_COMPILER="${env:HIP_PATH}\bin\clang++.exe" `
|
|
560
|
-
-DCMAKE_CXX_FLAGS="-I$($PWD.Path.Replace('\', '/'))/rocwmma/library/include/ -Wno-ignored-attributes -Wno-nested-anon-types" `
|
|
561
|
-
-DCMAKE_BUILD_TYPE=Release `
|
|
562
|
-
-DGGML_BACKEND_DL=ON `
|
|
563
|
-
-DGGML_NATIVE=OFF `
|
|
564
|
-
-DGGML_CPU=OFF `
|
|
565
|
-
-DAMDGPU_TARGETS="${{ matrix.gpu_targets }}" `
|
|
566
|
-
-DGGML_HIP_ROCWMMA_FATTN=ON `
|
|
567
|
-
-DGGML_HIP=ON `
|
|
568
|
-
-DLLAMA_CURL=OFF
|
|
569
|
-
cmake --build build --target ggml-hip -j ${env:NUMBER_OF_PROCESSORS}
|
|
570
|
-
md "build\bin\rocblas\library\"
|
|
571
|
-
cp "${env:HIP_PATH}\bin\hipblas.dll" "build\bin\"
|
|
572
|
-
cp "${env:HIP_PATH}\bin\rocblas.dll" "build\bin\"
|
|
573
|
-
cp "${env:HIP_PATH}\bin\rocblas\library\*" "build\bin\rocblas\library\"
|
|
574
|
-
|
|
575
|
-
- name: Pack artifacts
|
|
576
|
-
id: pack_artifacts
|
|
577
|
-
run: |
|
|
578
|
-
7z a llama-bin-win-hip-${{ matrix.name }}-x64.zip .\build\bin\*
|
|
579
|
-
|
|
580
|
-
- name: Upload artifacts
|
|
581
|
-
uses: actions/upload-artifact@v4
|
|
582
|
-
with:
|
|
583
|
-
path: llama-bin-win-hip-${{ matrix.name }}-x64.zip
|
|
584
|
-
name: llama-bin-win-hip-${{ matrix.name }}-x64.zip
|
|
585
|
-
|
|
586
|
-
ios-xcode-build:
|
|
587
|
-
runs-on: macos-latest
|
|
588
|
-
|
|
589
|
-
steps:
|
|
590
|
-
- name: Checkout code
|
|
591
|
-
uses: actions/checkout@v4
|
|
592
|
-
with:
|
|
593
|
-
fetch-depth: 0
|
|
594
|
-
|
|
595
|
-
- name: Build
|
|
596
|
-
id: cmake_build
|
|
597
|
-
run: |
|
|
598
|
-
sysctl -a
|
|
599
|
-
cmake -B build -G Xcode \
|
|
600
|
-
-DGGML_METAL_USE_BF16=ON \
|
|
601
|
-
-DGGML_METAL_EMBED_LIBRARY=ON \
|
|
602
|
-
-DLLAMA_CURL=OFF \
|
|
603
|
-
-DLLAMA_BUILD_EXAMPLES=OFF \
|
|
604
|
-
-DLLAMA_BUILD_TOOLS=OFF \
|
|
605
|
-
-DLLAMA_BUILD_TESTS=OFF \
|
|
606
|
-
-DLLAMA_BUILD_SERVER=OFF \
|
|
607
|
-
-DCMAKE_SYSTEM_NAME=iOS \
|
|
608
|
-
-DCMAKE_OSX_DEPLOYMENT_TARGET=14.0 \
|
|
609
|
-
-DCMAKE_XCODE_ATTRIBUTE_DEVELOPMENT_TEAM=ggml
|
|
610
|
-
cmake --build build --config Release -j $(sysctl -n hw.logicalcpu) -- CODE_SIGNING_ALLOWED=NO
|
|
611
|
-
|
|
612
|
-
- name: xcodebuild for swift package
|
|
613
|
-
id: xcodebuild
|
|
614
|
-
run: |
|
|
615
|
-
./build-xcframework.sh
|
|
616
|
-
|
|
617
|
-
- name: Build Xcode project
|
|
618
|
-
run: xcodebuild -project examples/llama.swiftui/llama.swiftui.xcodeproj -scheme llama.swiftui -sdk iphoneos CODE_SIGNING_REQUIRED=NO CODE_SIGN_IDENTITY= -destination 'generic/platform=iOS' FRAMEWORK_FOLDER_PATH=./build-ios build
|
|
619
|
-
|
|
620
|
-
- name: Determine tag name
|
|
621
|
-
id: tag
|
|
622
|
-
uses: ./.github/actions/get-tag-name
|
|
623
|
-
|
|
624
|
-
- name: Pack artifacts
|
|
625
|
-
id: pack_artifacts
|
|
626
|
-
run: |
|
|
627
|
-
zip --symlinks -r llama-${{ steps.tag.outputs.name }}-xcframework.zip build-apple/llama.xcframework
|
|
628
|
-
|
|
629
|
-
- name: Upload artifacts
|
|
630
|
-
uses: actions/upload-artifact@v4
|
|
631
|
-
with:
|
|
632
|
-
path: llama-${{ steps.tag.outputs.name }}-xcframework.zip
|
|
633
|
-
name: llama-${{ steps.tag.outputs.name }}-xcframework
|
|
634
|
-
|
|
635
|
-
release:
|
|
636
|
-
if: ${{ ( github.event_name == 'push' && github.ref == 'refs/heads/master' ) || github.event.inputs.create_release == 'true' }}
|
|
637
|
-
|
|
638
|
-
# Fine-grant permission
|
|
639
|
-
# https://docs.github.com/en/actions/security-for-github-actions/security-guides/automatic-token-authentication#modifying-the-permissions-for-the-github_token
|
|
640
|
-
permissions:
|
|
641
|
-
contents: write # for creating release
|
|
642
|
-
|
|
643
|
-
runs-on: ubuntu-latest
|
|
644
|
-
|
|
645
|
-
needs:
|
|
646
|
-
- windows
|
|
647
|
-
- windows-cpu
|
|
648
|
-
- windows-cuda
|
|
649
|
-
- windows-sycl
|
|
650
|
-
- windows-hip
|
|
651
|
-
- ubuntu-22-cpu
|
|
652
|
-
- ubuntu-22-vulkan
|
|
653
|
-
- macOS-arm64
|
|
654
|
-
- macOS-x64
|
|
655
|
-
- ios-xcode-build
|
|
656
|
-
|
|
657
|
-
steps:
|
|
658
|
-
- name: Clone
|
|
659
|
-
id: checkout
|
|
660
|
-
uses: actions/checkout@v4
|
|
661
|
-
with:
|
|
662
|
-
fetch-depth: 0
|
|
663
|
-
|
|
664
|
-
- name: Determine tag name
|
|
665
|
-
id: tag
|
|
666
|
-
uses: ./.github/actions/get-tag-name
|
|
667
|
-
|
|
668
|
-
- name: Download artifacts
|
|
669
|
-
id: download-artifact
|
|
670
|
-
uses: actions/download-artifact@v4
|
|
671
|
-
with:
|
|
672
|
-
path: ./artifact
|
|
673
|
-
merge-multiple: true
|
|
674
|
-
|
|
675
|
-
- name: Move artifacts
|
|
676
|
-
id: move_artifacts
|
|
677
|
-
run: |
|
|
678
|
-
mkdir -p release
|
|
679
|
-
|
|
680
|
-
echo "Adding CPU backend files to existing zips..."
|
|
681
|
-
for arch in x64 arm64; do
|
|
682
|
-
cpu_zip="artifact/llama-bin-win-cpu-${arch}.zip"
|
|
683
|
-
temp_dir=$(mktemp -d)
|
|
684
|
-
echo "Extracting CPU backend for $arch..."
|
|
685
|
-
unzip "$cpu_zip" -d "$temp_dir"
|
|
686
|
-
|
|
687
|
-
echo "Adding CPU files to $arch zips..."
|
|
688
|
-
for target_zip in artifact/llama-bin-win-*-${arch}.zip; do
|
|
689
|
-
if [[ "$target_zip" == "$cpu_zip" ]]; then
|
|
690
|
-
continue
|
|
691
|
-
fi
|
|
692
|
-
echo "Adding CPU backend to $(basename "$target_zip")"
|
|
693
|
-
realpath_target_zip=$(realpath "$target_zip")
|
|
694
|
-
(cd "$temp_dir" && zip -r "$realpath_target_zip" .)
|
|
695
|
-
done
|
|
696
|
-
|
|
697
|
-
rm -rf "$temp_dir"
|
|
698
|
-
done
|
|
699
|
-
|
|
700
|
-
echo "Renaming and moving zips to release..."
|
|
701
|
-
for zip_file in artifact/llama-bin-win-*.zip; do
|
|
702
|
-
base_name=$(basename "$zip_file" .zip)
|
|
703
|
-
zip_name="llama-${{ steps.tag.outputs.name }}-${base_name#llama-}.zip"
|
|
704
|
-
echo "Moving $zip_file to release/$zip_name"
|
|
705
|
-
mv "$zip_file" "release/$zip_name"
|
|
706
|
-
done
|
|
707
|
-
|
|
708
|
-
echo "Moving other artifacts..."
|
|
709
|
-
mv -v artifact/*.zip release
|
|
710
|
-
|
|
711
|
-
- name: Create release
|
|
712
|
-
id: create_release
|
|
713
|
-
uses: ggml-org/action-create-release@v1
|
|
714
|
-
env:
|
|
715
|
-
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
716
|
-
with:
|
|
717
|
-
tag_name: ${{ steps.tag.outputs.name }}
|
|
718
|
-
|
|
719
|
-
- name: Upload release
|
|
720
|
-
id: upload_release
|
|
721
|
-
uses: actions/github-script@v3
|
|
722
|
-
with:
|
|
723
|
-
github-token: ${{secrets.GITHUB_TOKEN}}
|
|
724
|
-
script: |
|
|
725
|
-
const path = require('path');
|
|
726
|
-
const fs = require('fs');
|
|
727
|
-
const release_id = '${{ steps.create_release.outputs.id }}';
|
|
728
|
-
for (let file of await fs.readdirSync('./release')) {
|
|
729
|
-
if (path.extname(file) === '.zip') {
|
|
730
|
-
console.log('uploadReleaseAsset', file);
|
|
731
|
-
await github.repos.uploadReleaseAsset({
|
|
732
|
-
owner: context.repo.owner,
|
|
733
|
-
repo: context.repo.repo,
|
|
734
|
-
release_id: release_id,
|
|
735
|
-
name: file,
|
|
736
|
-
data: await fs.readFileSync(`./release/${file}`)
|
|
737
|
-
});
|
|
738
|
-
}
|
|
739
|
-
}
|