whispercpp 1.3.0 → 1.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +6 -0
- data/LICENSE +1 -1
- data/README.md +216 -424
- data/Rakefile +79 -11
- data/ext/.gitignore +11 -0
- data/ext/dependencies.rb +61 -0
- data/ext/extconf.rb +18 -26
- data/ext/options.rb +221 -0
- data/ext/ruby_whisper.c +159 -0
- data/ext/ruby_whisper.h +27 -2
- data/ext/ruby_whisper_context.c +641 -0
- data/ext/ruby_whisper_error.c +52 -0
- data/ext/ruby_whisper_model.c +232 -0
- data/ext/ruby_whisper_params.c +1301 -0
- data/ext/ruby_whisper_segment.c +143 -0
- data/ext/ruby_whisper_transcribe.cpp +87 -0
- data/ext/ruby_whisper_vad_params.c +288 -0
- data/ext/sources/.dockerignore +3 -0
- data/ext/sources/.github/workflows/bindings-ruby.yml +21 -0
- data/ext/sources/CMakeGraphVizOptions.cmake +8 -0
- data/ext/sources/CMakeLists.txt +251 -0
- data/ext/sources/bindings/javascript/CMakeLists.txt +41 -0
- data/ext/sources/bindings/javascript/emscripten.cpp +93 -0
- data/ext/sources/bindings/javascript/libwhisper.worker.js +1 -0
- data/ext/sources/bindings/javascript/package-tmpl.json +26 -0
- data/ext/sources/bindings/javascript/package.json +26 -0
- data/ext/sources/bindings/javascript/whisper.js +19 -0
- data/ext/sources/build-xcframework.sh +547 -0
- data/ext/sources/ci/run.sh +336 -0
- data/ext/sources/close-issue.yml +28 -0
- data/ext/sources/cmake/DefaultTargetOptions.cmake +16 -0
- data/ext/sources/cmake/FindFFmpeg.cmake +163 -0
- data/ext/sources/cmake/build-info.cmake +60 -0
- data/ext/sources/cmake/git-vars.cmake +22 -0
- data/ext/sources/cmake/whisper-config.cmake.in +65 -0
- data/ext/sources/cmake/whisper.pc.in +10 -0
- data/ext/sources/examples/CMakeLists.txt +124 -0
- data/ext/sources/examples/addon.node/CMakeLists.txt +31 -0
- data/ext/sources/examples/addon.node/__test__/whisper.spec.js +37 -0
- data/ext/sources/examples/addon.node/addon.cpp +438 -0
- data/ext/sources/examples/addon.node/index.js +54 -0
- data/ext/sources/examples/addon.node/package.json +16 -0
- data/ext/sources/examples/bench/CMakeLists.txt +8 -0
- data/ext/sources/examples/bench/bench.cpp +175 -0
- data/ext/sources/examples/bench.wasm/CMakeLists.txt +49 -0
- data/ext/sources/examples/bench.wasm/emscripten.cpp +87 -0
- data/ext/sources/examples/bench.wasm/index-tmpl.html +284 -0
- data/ext/sources/examples/cli/CMakeLists.txt +8 -0
- data/ext/sources/examples/cli/cli.cpp +1294 -0
- data/ext/sources/examples/coi-serviceworker.js +146 -0
- data/ext/sources/examples/command/CMakeLists.txt +10 -0
- data/ext/sources/examples/command/command.cpp +776 -0
- data/ext/sources/examples/command/commands.txt +9 -0
- data/ext/sources/examples/command.wasm/CMakeLists.txt +50 -0
- data/ext/sources/examples/command.wasm/emscripten.cpp +327 -0
- data/ext/sources/examples/command.wasm/index-tmpl.html +414 -0
- data/ext/sources/examples/common-ggml.cpp +238 -0
- data/ext/sources/examples/common-ggml.h +18 -0
- data/ext/sources/examples/common-sdl.cpp +227 -0
- data/ext/sources/examples/common-sdl.h +49 -0
- data/ext/sources/examples/common-whisper.cpp +168 -0
- data/ext/sources/examples/common-whisper.h +24 -0
- data/ext/sources/examples/common.cpp +675 -0
- data/ext/sources/examples/common.h +322 -0
- data/ext/sources/examples/deprecation-warning/CMakeLists.txt +6 -0
- data/ext/sources/examples/deprecation-warning/deprecation-warning.cpp +38 -0
- data/ext/sources/examples/ffmpeg-transcode.cpp +368 -0
- data/ext/sources/examples/generate-karaoke.sh +57 -0
- data/ext/sources/examples/grammar-parser.cpp +423 -0
- data/ext/sources/examples/grammar-parser.h +29 -0
- data/ext/sources/examples/helpers.js +191 -0
- data/ext/sources/examples/json.hpp +24596 -0
- data/ext/sources/examples/livestream.sh +112 -0
- data/ext/sources/examples/lsp/CMakeLists.txt +9 -0
- data/ext/sources/examples/lsp/lsp.cpp +467 -0
- data/ext/sources/examples/lsp/whisper.vim +362 -0
- data/ext/sources/examples/miniaudio.h +93468 -0
- data/ext/sources/examples/python/test_whisper_processor.py +7 -0
- data/ext/sources/examples/python/whisper_processor.py +54 -0
- data/ext/sources/examples/quantize/CMakeLists.txt +6 -0
- data/ext/sources/examples/quantize/quantize.cpp +223 -0
- data/ext/sources/examples/server/CMakeLists.txt +12 -0
- data/ext/sources/examples/server/bench.js +29 -0
- data/ext/sources/examples/server/httplib.h +10497 -0
- data/ext/sources/examples/server/server.cpp +1091 -0
- data/ext/sources/examples/server.py +115 -0
- data/ext/sources/examples/stb_vorbis.c +5584 -0
- data/ext/sources/examples/stream/CMakeLists.txt +10 -0
- data/ext/sources/examples/stream/stream.cpp +429 -0
- data/ext/sources/examples/stream.wasm/CMakeLists.txt +49 -0
- data/ext/sources/examples/stream.wasm/emscripten.cpp +216 -0
- data/ext/sources/examples/stream.wasm/index-tmpl.html +414 -0
- data/ext/sources/examples/sycl/CMakeLists.txt +9 -0
- data/ext/sources/examples/sycl/build.sh +22 -0
- data/ext/sources/examples/sycl/ls-sycl-device.cpp +11 -0
- data/ext/sources/examples/sycl/run-whisper.sh +17 -0
- data/ext/sources/examples/talk-llama/CMakeLists.txt +40 -0
- data/ext/sources/examples/talk-llama/eleven-labs.py +80 -0
- data/ext/sources/examples/talk-llama/llama-adapter.cpp +388 -0
- data/ext/sources/examples/talk-llama/llama-adapter.h +76 -0
- data/ext/sources/examples/talk-llama/llama-arch.cpp +1746 -0
- data/ext/sources/examples/talk-llama/llama-arch.h +437 -0
- data/ext/sources/examples/talk-llama/llama-batch.cpp +374 -0
- data/ext/sources/examples/talk-llama/llama-batch.h +89 -0
- data/ext/sources/examples/talk-llama/llama-chat.cpp +663 -0
- data/ext/sources/examples/talk-llama/llama-chat.h +58 -0
- data/ext/sources/examples/talk-llama/llama-context.cpp +2676 -0
- data/ext/sources/examples/talk-llama/llama-context.h +276 -0
- data/ext/sources/examples/talk-llama/llama-cparams.cpp +5 -0
- data/ext/sources/examples/talk-llama/llama-cparams.h +41 -0
- data/ext/sources/examples/talk-llama/llama-grammar.cpp +1229 -0
- data/ext/sources/examples/talk-llama/llama-grammar.h +173 -0
- data/ext/sources/examples/talk-llama/llama-graph.cpp +1618 -0
- data/ext/sources/examples/talk-llama/llama-graph.h +640 -0
- data/ext/sources/examples/talk-llama/llama-hparams.cpp +95 -0
- data/ext/sources/examples/talk-llama/llama-hparams.h +190 -0
- data/ext/sources/examples/talk-llama/llama-impl.cpp +167 -0
- data/ext/sources/examples/talk-llama/llama-impl.h +61 -0
- data/ext/sources/examples/talk-llama/llama-io.cpp +15 -0
- data/ext/sources/examples/talk-llama/llama-io.h +35 -0
- data/ext/sources/examples/talk-llama/llama-kv-cache.cpp +2739 -0
- data/ext/sources/examples/talk-llama/llama-kv-cache.h +502 -0
- data/ext/sources/examples/talk-llama/llama-kv-cells.h +379 -0
- data/ext/sources/examples/talk-llama/llama-memory.cpp +1 -0
- data/ext/sources/examples/talk-llama/llama-memory.h +32 -0
- data/ext/sources/examples/talk-llama/llama-mmap.cpp +600 -0
- data/ext/sources/examples/talk-llama/llama-mmap.h +68 -0
- data/ext/sources/examples/talk-llama/llama-model-loader.cpp +1138 -0
- data/ext/sources/examples/talk-llama/llama-model-loader.h +169 -0
- data/ext/sources/examples/talk-llama/llama-model-saver.cpp +281 -0
- data/ext/sources/examples/talk-llama/llama-model-saver.h +37 -0
- data/ext/sources/examples/talk-llama/llama-model.cpp +13814 -0
- data/ext/sources/examples/talk-llama/llama-model.h +425 -0
- data/ext/sources/examples/talk-llama/llama-quant.cpp +966 -0
- data/ext/sources/examples/talk-llama/llama-quant.h +1 -0
- data/ext/sources/examples/talk-llama/llama-sampling.cpp +2575 -0
- data/ext/sources/examples/talk-llama/llama-sampling.h +32 -0
- data/ext/sources/examples/talk-llama/llama-vocab.cpp +3340 -0
- data/ext/sources/examples/talk-llama/llama-vocab.h +131 -0
- data/ext/sources/examples/talk-llama/llama.cpp +354 -0
- data/ext/sources/examples/talk-llama/llama.h +1377 -0
- data/ext/sources/examples/talk-llama/prompts/talk-alpaca.txt +23 -0
- data/ext/sources/examples/talk-llama/speak +40 -0
- data/ext/sources/examples/talk-llama/speak.bat +1 -0
- data/ext/sources/examples/talk-llama/speak.ps1 +14 -0
- data/ext/sources/examples/talk-llama/talk-llama.cpp +808 -0
- data/ext/sources/examples/talk-llama/unicode-data.cpp +7034 -0
- data/ext/sources/examples/talk-llama/unicode-data.h +20 -0
- data/ext/sources/examples/talk-llama/unicode.cpp +849 -0
- data/ext/sources/examples/talk-llama/unicode.h +66 -0
- data/ext/sources/examples/vad-speech-segments/CMakeLists.txt +8 -0
- data/ext/sources/examples/vad-speech-segments/speech.cpp +143 -0
- data/ext/sources/examples/wchess/CMakeLists.txt +10 -0
- data/ext/sources/examples/wchess/libwchess/CMakeLists.txt +19 -0
- data/ext/sources/examples/wchess/libwchess/Chessboard.cpp +803 -0
- data/ext/sources/examples/wchess/libwchess/Chessboard.h +33 -0
- data/ext/sources/examples/wchess/libwchess/WChess.cpp +193 -0
- data/ext/sources/examples/wchess/libwchess/WChess.h +63 -0
- data/ext/sources/examples/wchess/libwchess/test-chessboard.cpp +117 -0
- data/ext/sources/examples/wchess/wchess.cmd/CMakeLists.txt +8 -0
- data/ext/sources/examples/wchess/wchess.cmd/wchess.cmd.cpp +249 -0
- data/ext/sources/examples/whisper.wasm/CMakeLists.txt +50 -0
- data/ext/sources/examples/whisper.wasm/emscripten.cpp +118 -0
- data/ext/sources/examples/whisper.wasm/index-tmpl.html +658 -0
- data/ext/sources/ggml/CMakeLists.txt +390 -0
- data/ext/sources/ggml/cmake/BuildTypes.cmake +54 -0
- data/ext/sources/ggml/cmake/GitVars.cmake +22 -0
- data/ext/sources/ggml/cmake/common.cmake +26 -0
- data/ext/sources/ggml/cmake/ggml-config.cmake.in +152 -0
- data/ext/sources/ggml/include/ggml-alloc.h +76 -0
- data/ext/sources/ggml/include/ggml-backend.h +354 -0
- data/ext/sources/ggml/include/ggml-blas.h +25 -0
- data/ext/sources/ggml/include/ggml-cann.h +123 -0
- data/ext/sources/ggml/include/ggml-cpp.h +39 -0
- data/ext/sources/ggml/include/ggml-cpu.h +143 -0
- data/ext/sources/ggml/include/ggml-cuda.h +47 -0
- data/ext/sources/ggml/include/ggml-kompute.h +50 -0
- data/ext/sources/ggml/include/ggml-metal.h +66 -0
- data/ext/sources/ggml/include/ggml-opencl.h +26 -0
- data/ext/sources/ggml/include/ggml-opt.h +237 -0
- data/ext/sources/ggml/include/ggml-rpc.h +33 -0
- data/ext/sources/ggml/include/ggml-sycl.h +49 -0
- data/ext/sources/ggml/include/ggml-vulkan.h +29 -0
- data/ext/{ggml.h → sources/ggml/include/ggml.h} +621 -821
- data/ext/sources/ggml/include/gguf.h +202 -0
- data/ext/sources/ggml/src/CMakeLists.txt +346 -0
- data/ext/sources/ggml/src/ggml-alloc.c +1042 -0
- data/ext/sources/ggml/src/ggml-amx/CMakeLists.txt +107 -0
- data/ext/sources/ggml/src/ggml-amx/common.h +94 -0
- data/ext/sources/ggml/src/ggml-amx/ggml-amx.cpp +446 -0
- data/ext/sources/ggml/src/ggml-amx/mmq.cpp +2510 -0
- data/ext/sources/ggml/src/ggml-amx/mmq.h +17 -0
- data/ext/sources/ggml/src/ggml-backend-impl.h +255 -0
- data/ext/sources/ggml/src/ggml-backend-reg.cpp +586 -0
- data/ext/sources/ggml/src/ggml-backend.cpp +2011 -0
- data/ext/sources/ggml/src/ggml-blas/CMakeLists.txt +87 -0
- data/ext/sources/ggml/src/ggml-blas/ggml-blas.cpp +517 -0
- data/ext/sources/ggml/src/ggml-cann/CMakeLists.txt +74 -0
- data/ext/sources/ggml/src/ggml-cann/Doxyfile +2579 -0
- data/ext/sources/ggml/src/ggml-cann/acl_tensor.cpp +181 -0
- data/ext/sources/ggml/src/ggml-cann/acl_tensor.h +258 -0
- data/ext/sources/ggml/src/ggml-cann/aclnn_ops.cpp +3193 -0
- data/ext/sources/ggml/src/ggml-cann/aclnn_ops.h +1125 -0
- data/ext/sources/ggml/src/ggml-cann/common.h +420 -0
- data/ext/sources/ggml/src/ggml-cann/ggml-cann.cpp +2606 -0
- data/ext/sources/ggml/src/ggml-cann/kernels/CMakeLists.txt +30 -0
- data/ext/sources/ggml/src/ggml-cann/kernels/ascendc_kernels.h +19 -0
- data/ext/sources/ggml/src/ggml-cann/kernels/dup.cpp +234 -0
- data/ext/sources/ggml/src/ggml-cann/kernels/get_row_f16.cpp +197 -0
- data/ext/sources/ggml/src/ggml-cann/kernels/get_row_f32.cpp +190 -0
- data/ext/sources/ggml/src/ggml-cann/kernels/get_row_q4_0.cpp +204 -0
- data/ext/sources/ggml/src/ggml-cann/kernels/get_row_q8_0.cpp +191 -0
- data/ext/sources/ggml/src/ggml-cann/kernels/quantize_f16_q8_0.cpp +218 -0
- data/ext/sources/ggml/src/ggml-cann/kernels/quantize_f32_q8_0.cpp +216 -0
- data/ext/sources/ggml/src/ggml-cann/kernels/quantize_float_to_q4_0.cpp +295 -0
- data/ext/sources/ggml/src/ggml-common.h +1857 -0
- data/ext/sources/ggml/src/ggml-cpu/CMakeLists.txt +504 -0
- data/ext/sources/ggml/src/ggml-cpu/amx/amx.cpp +221 -0
- data/ext/sources/ggml/src/ggml-cpu/amx/amx.h +8 -0
- data/ext/sources/ggml/src/ggml-cpu/amx/common.h +91 -0
- data/ext/sources/ggml/src/ggml-cpu/amx/mmq.cpp +2511 -0
- data/ext/sources/ggml/src/ggml-cpu/amx/mmq.h +10 -0
- data/ext/sources/ggml/src/ggml-cpu/binary-ops.cpp +158 -0
- data/ext/sources/ggml/src/ggml-cpu/binary-ops.h +16 -0
- data/ext/sources/ggml/src/ggml-cpu/cmake/FindSIMD.cmake +100 -0
- data/ext/sources/ggml/src/ggml-cpu/common.h +72 -0
- data/ext/sources/ggml/src/ggml-cpu/cpu-feats-x86.cpp +327 -0
- data/ext/sources/ggml/src/ggml-cpu/ggml-cpu-aarch64.cpp +6431 -0
- data/ext/sources/ggml/src/ggml-cpu/ggml-cpu-aarch64.h +8 -0
- data/ext/sources/ggml/src/ggml-cpu/ggml-cpu-hbm.cpp +55 -0
- data/ext/sources/ggml/src/ggml-cpu/ggml-cpu-hbm.h +8 -0
- data/ext/sources/ggml/src/ggml-cpu/ggml-cpu-impl.h +508 -0
- data/ext/sources/ggml/src/ggml-cpu/ggml-cpu-quants.c +13747 -0
- data/ext/sources/ggml/src/ggml-cpu/ggml-cpu-quants.h +63 -0
- data/ext/sources/ggml/src/ggml-cpu/ggml-cpu-traits.cpp +36 -0
- data/ext/sources/ggml/src/ggml-cpu/ggml-cpu-traits.h +38 -0
- data/ext/sources/ggml/src/ggml-cpu/ggml-cpu.c +3510 -0
- data/ext/sources/ggml/src/ggml-cpu/ggml-cpu.cpp +671 -0
- data/ext/sources/ggml/src/ggml-cpu/kleidiai/kernels.cpp +337 -0
- data/ext/sources/ggml/src/ggml-cpu/kleidiai/kernels.h +95 -0
- data/ext/sources/ggml/src/ggml-cpu/kleidiai/kleidiai.cpp +482 -0
- data/ext/sources/ggml/src/ggml-cpu/kleidiai/kleidiai.h +17 -0
- data/ext/sources/ggml/src/ggml-cpu/llamafile/sgemm.cpp +3544 -0
- data/ext/sources/ggml/src/ggml-cpu/llamafile/sgemm.h +14 -0
- data/ext/sources/ggml/src/ggml-cpu/ops.cpp +8903 -0
- data/ext/sources/ggml/src/ggml-cpu/ops.h +110 -0
- data/ext/sources/ggml/src/ggml-cpu/simd-mappings.h +892 -0
- data/ext/sources/ggml/src/ggml-cpu/unary-ops.cpp +186 -0
- data/ext/sources/ggml/src/ggml-cpu/unary-ops.h +28 -0
- data/ext/sources/ggml/src/ggml-cpu/vec.cpp +252 -0
- data/ext/sources/ggml/src/ggml-cpu/vec.h +818 -0
- data/ext/sources/ggml/src/ggml-cuda/CMakeLists.txt +184 -0
- data/ext/sources/ggml/src/ggml-cuda/acc.cu +61 -0
- data/ext/sources/ggml/src/ggml-cuda/acc.cuh +5 -0
- data/ext/sources/ggml/src/ggml-cuda/arange.cu +34 -0
- data/ext/sources/ggml/src/ggml-cuda/arange.cuh +5 -0
- data/ext/sources/ggml/src/ggml-cuda/argmax.cu +91 -0
- data/ext/sources/ggml/src/ggml-cuda/argmax.cuh +3 -0
- data/ext/sources/ggml/src/ggml-cuda/argsort.cu +104 -0
- data/ext/sources/ggml/src/ggml-cuda/argsort.cuh +3 -0
- data/ext/sources/ggml/src/ggml-cuda/binbcast.cu +363 -0
- data/ext/sources/ggml/src/ggml-cuda/binbcast.cuh +9 -0
- data/ext/sources/ggml/src/ggml-cuda/clamp.cu +45 -0
- data/ext/sources/ggml/src/ggml-cuda/clamp.cuh +5 -0
- data/ext/sources/ggml/src/ggml-cuda/common.cuh +828 -0
- data/ext/sources/ggml/src/ggml-cuda/concat.cu +221 -0
- data/ext/sources/ggml/src/ggml-cuda/concat.cuh +5 -0
- data/ext/sources/ggml/src/ggml-cuda/conv-transpose-1d.cu +89 -0
- data/ext/sources/ggml/src/ggml-cuda/conv-transpose-1d.cuh +5 -0
- data/ext/sources/ggml/src/ggml-cuda/convert.cu +730 -0
- data/ext/sources/ggml/src/ggml-cuda/convert.cuh +26 -0
- data/ext/sources/ggml/src/ggml-cuda/count-equal.cu +64 -0
- data/ext/sources/ggml/src/ggml-cuda/count-equal.cuh +5 -0
- data/ext/sources/ggml/src/ggml-cuda/cp-async.cuh +57 -0
- data/ext/sources/ggml/src/ggml-cuda/cpy.cu +705 -0
- data/ext/sources/ggml/src/ggml-cuda/cpy.cuh +11 -0
- data/ext/sources/ggml/src/ggml-cuda/cross-entropy-loss.cu +189 -0
- data/ext/sources/ggml/src/ggml-cuda/cross-entropy-loss.cuh +7 -0
- data/ext/sources/ggml/src/ggml-cuda/dequantize.cuh +103 -0
- data/ext/sources/ggml/src/ggml-cuda/diagmask.cu +40 -0
- data/ext/sources/ggml/src/ggml-cuda/diagmask.cuh +5 -0
- data/ext/sources/ggml/src/ggml-cuda/fattn-common.cuh +881 -0
- data/ext/sources/ggml/src/ggml-cuda/fattn-mma-f16.cuh +1471 -0
- data/ext/sources/ggml/src/ggml-cuda/fattn-tile-f16.cu +357 -0
- data/ext/sources/ggml/src/ggml-cuda/fattn-tile-f16.cuh +3 -0
- data/ext/sources/ggml/src/ggml-cuda/fattn-tile-f32.cu +365 -0
- data/ext/sources/ggml/src/ggml-cuda/fattn-tile-f32.cuh +3 -0
- data/ext/sources/ggml/src/ggml-cuda/fattn-vec-f16.cuh +482 -0
- data/ext/sources/ggml/src/ggml-cuda/fattn-vec-f32.cuh +472 -0
- data/ext/sources/ggml/src/ggml-cuda/fattn-wmma-f16.cu +634 -0
- data/ext/sources/ggml/src/ggml-cuda/fattn-wmma-f16.cuh +3 -0
- data/ext/sources/ggml/src/ggml-cuda/fattn.cu +346 -0
- data/ext/sources/ggml/src/ggml-cuda/fattn.cuh +3 -0
- data/ext/sources/ggml/src/ggml-cuda/getrows.cu +275 -0
- data/ext/sources/ggml/src/ggml-cuda/getrows.cuh +15 -0
- data/ext/sources/ggml/src/ggml-cuda/ggml-cuda.cu +3505 -0
- data/ext/sources/ggml/src/ggml-cuda/gla.cu +93 -0
- data/ext/sources/ggml/src/ggml-cuda/gla.cuh +3 -0
- data/ext/sources/ggml/src/ggml-cuda/im2col.cu +103 -0
- data/ext/sources/ggml/src/ggml-cuda/im2col.cuh +5 -0
- data/ext/sources/ggml/src/ggml-cuda/mma.cuh +396 -0
- data/ext/sources/ggml/src/ggml-cuda/mmq.cu +324 -0
- data/ext/sources/ggml/src/ggml-cuda/mmq.cuh +3217 -0
- data/ext/sources/ggml/src/ggml-cuda/mmv.cu +336 -0
- data/ext/sources/ggml/src/ggml-cuda/mmv.cuh +12 -0
- data/ext/sources/ggml/src/ggml-cuda/mmvq.cu +595 -0
- data/ext/sources/ggml/src/ggml-cuda/mmvq.cuh +12 -0
- data/ext/sources/ggml/src/ggml-cuda/norm.cu +458 -0
- data/ext/sources/ggml/src/ggml-cuda/norm.cuh +11 -0
- data/ext/sources/ggml/src/ggml-cuda/opt-step-adamw.cu +78 -0
- data/ext/sources/ggml/src/ggml-cuda/opt-step-adamw.cuh +5 -0
- data/ext/sources/ggml/src/ggml-cuda/out-prod.cu +68 -0
- data/ext/sources/ggml/src/ggml-cuda/out-prod.cuh +3 -0
- data/ext/sources/ggml/src/ggml-cuda/pad.cu +49 -0
- data/ext/sources/ggml/src/ggml-cuda/pad.cuh +5 -0
- data/ext/sources/ggml/src/ggml-cuda/pool2d.cu +94 -0
- data/ext/sources/ggml/src/ggml-cuda/pool2d.cuh +5 -0
- data/ext/sources/ggml/src/ggml-cuda/quantize.cu +190 -0
- data/ext/sources/ggml/src/ggml-cuda/quantize.cuh +27 -0
- data/ext/sources/ggml/src/ggml-cuda/rope.cu +456 -0
- data/ext/sources/ggml/src/ggml-cuda/rope.cuh +7 -0
- data/ext/sources/ggml/src/ggml-cuda/scale.cu +31 -0
- data/ext/sources/ggml/src/ggml-cuda/scale.cuh +5 -0
- data/ext/sources/ggml/src/ggml-cuda/softmax.cu +283 -0
- data/ext/sources/ggml/src/ggml-cuda/softmax.cuh +7 -0
- data/ext/sources/ggml/src/ggml-cuda/ssm-conv.cu +148 -0
- data/ext/sources/ggml/src/ggml-cuda/ssm-conv.cuh +3 -0
- data/ext/sources/ggml/src/ggml-cuda/ssm-scan.cu +153 -0
- data/ext/sources/ggml/src/ggml-cuda/ssm-scan.cuh +3 -0
- data/ext/sources/ggml/src/ggml-cuda/sum.cu +45 -0
- data/ext/sources/ggml/src/ggml-cuda/sum.cuh +5 -0
- data/ext/sources/ggml/src/ggml-cuda/sumrows.cu +39 -0
- data/ext/sources/ggml/src/ggml-cuda/sumrows.cuh +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_1-ncols2_16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_1-ncols2_8.cu +10 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_16-ncols2_1.cu +10 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_16-ncols2_2.cu +10 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_16-ncols2_4.cu +10 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_2-ncols2_16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_2-ncols2_4.cu +10 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_2-ncols2_8.cu +10 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_32-ncols2_1.cu +10 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_32-ncols2_2.cu +10 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_4-ncols2_16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_4-ncols2_2.cu +10 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_4-ncols2_4.cu +10 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_4-ncols2_8.cu +10 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_64-ncols2_1.cu +10 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_8-ncols2_1.cu +10 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_8-ncols2_2.cu +10 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_8-ncols2_4.cu +10 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_8-ncols2_8.cu +10 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-f16-f16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-f16-q4_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-f16-q4_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-f16-q5_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-f16-q5_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-f16-q8_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_0-f16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_0-q4_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_0-q4_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_0-q5_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_0-q5_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_0-q8_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_1-f16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_1-q4_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_1-q4_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_1-q5_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_1-q5_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_1-q8_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_0-f16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_0-q4_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_0-q4_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_0-q5_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_0-q5_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_0-q8_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_1-f16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_1-q4_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_1-q4_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_1-q5_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_1-q5_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_1-q8_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q8_0-f16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q8_0-q4_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q8_0-q4_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q8_0-q5_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q8_0-q5_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q8_0-q8_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs256-f16-f16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs64-f16-f16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs64-f16-q4_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs64-f16-q4_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs64-f16-q5_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs64-f16-q5_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs64-f16-q8_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-f16-f16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-f16-q4_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-f16-q4_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-f16-q5_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-f16-q5_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-f16-q8_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_0-f16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_0-q4_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_0-q4_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_0-q5_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_0-q5_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_0-q8_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_1-f16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_1-q4_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_1-q4_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_1-q5_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_1-q5_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_1-q8_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_0-f16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_0-q4_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_0-q4_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_0-q5_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_0-q5_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_0-q8_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_1-f16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_1-q4_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_1-q4_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_1-q5_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_1-q5_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_1-q8_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q8_0-f16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q8_0-q4_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q8_0-q4_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q8_0-q5_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q8_0-q5_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q8_0-q8_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs256-f16-f16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs64-f16-f16.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs64-f16-q4_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs64-f16-q4_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs64-f16-q5_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs64-f16-q5_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs64-f16-q8_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/generate_cu_files.py +78 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-iq1_s.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-iq2_s.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-iq2_xs.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-iq2_xxs.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-iq3_s.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-iq3_xxs.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-iq4_nl.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-iq4_xs.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-q2_k.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-q3_k.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-q4_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-q4_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-q4_k.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-q5_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-q5_1.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-q5_k.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-q6_k.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-q8_0.cu +5 -0
- data/ext/sources/ggml/src/ggml-cuda/tsembd.cu +47 -0
- data/ext/sources/ggml/src/ggml-cuda/tsembd.cuh +5 -0
- data/ext/sources/ggml/src/ggml-cuda/unary.cu +289 -0
- data/ext/sources/ggml/src/ggml-cuda/unary.cuh +59 -0
- data/ext/sources/ggml/src/ggml-cuda/upscale.cu +51 -0
- data/ext/sources/ggml/src/ggml-cuda/upscale.cuh +5 -0
- data/ext/sources/ggml/src/ggml-cuda/vecdotq.cuh +1135 -0
- data/ext/sources/ggml/src/ggml-cuda/vendors/cuda.h +15 -0
- data/ext/sources/ggml/src/ggml-cuda/vendors/hip.h +243 -0
- data/ext/sources/ggml/src/ggml-cuda/vendors/musa.h +140 -0
- data/ext/sources/ggml/src/ggml-cuda/wkv.cu +199 -0
- data/ext/sources/ggml/src/ggml-cuda/wkv.cuh +7 -0
- data/ext/sources/ggml/src/ggml-hip/CMakeLists.txt +131 -0
- data/ext/sources/ggml/src/ggml-impl.h +601 -0
- data/ext/sources/ggml/src/ggml-kompute/CMakeLists.txt +166 -0
- data/ext/sources/ggml/src/ggml-kompute/ggml-kompute.cpp +2251 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/common.comp +112 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_add.comp +58 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_addrow.comp +25 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_cpy_f16_f16.comp +52 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_cpy_f16_f32.comp +52 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_cpy_f32_f16.comp +52 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_cpy_f32_f32.comp +52 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_diagmask.comp +30 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_gelu.comp +22 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_getrows.comp +17 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_getrows_f16.comp +31 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_getrows_f32.comp +31 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_getrows_q4_0.comp +38 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_getrows_q4_1.comp +39 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_getrows_q6_k.comp +44 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul.comp +52 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mat_f16.comp +69 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mat_mat_f32.comp +51 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mat_q4_0.comp +33 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mat_q4_1.comp +35 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mat_q4_k.comp +140 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mat_q6_k.comp +106 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mat_q8_0.comp +73 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mv_q_n.comp +52 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mv_q_n_pre.comp +28 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_norm.comp +84 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_relu.comp +21 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_rmsnorm.comp +53 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_rope_neox_f16.comp +52 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_rope_neox_f32.comp +52 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_rope_norm_f16.comp +52 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_rope_norm_f32.comp +52 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_scale.comp +19 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_scale_8.comp +23 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_silu.comp +22 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_softmax.comp +72 -0
- data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/rope_common.comp +71 -0
- data/ext/sources/ggml/src/ggml-metal/CMakeLists.txt +120 -0
- data/ext/sources/ggml/src/ggml-metal/ggml-metal-impl.h +622 -0
- data/ext/sources/ggml/src/ggml-metal/ggml-metal.m +5998 -0
- data/ext/sources/ggml/src/ggml-metal/ggml-metal.metal +7089 -0
- data/ext/sources/ggml/src/ggml-musa/CMakeLists.txt +113 -0
- data/ext/sources/ggml/src/ggml-musa/mudnn.cu +112 -0
- data/ext/sources/ggml/src/ggml-musa/mudnn.cuh +12 -0
- data/ext/sources/ggml/src/ggml-opencl/CMakeLists.txt +96 -0
- data/ext/sources/ggml/src/ggml-opencl/ggml-opencl.cpp +5124 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/add.cl +83 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/clamp.cl +20 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/cpy.cl +184 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/cvt.cl +118 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/diag_mask_inf.cl +58 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/embed_kernel.py +26 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/gelu.cl +62 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/gemv_noshuffle.cl +268 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/gemv_noshuffle_general.cl +274 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/get_rows.cl +163 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/im2col_f16.cl +57 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/im2col_f32.cl +57 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/mul.cl +79 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mat_Ab_Bi_8x4.cl +139 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_f16_f16.cl +118 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_f16_f32.cl +118 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_f16_f32_1row.cl +94 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_f16_f32_l4.cl +84 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_f32_f32.cl +118 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_q4_0_f32.cl +192 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_q4_0_f32_1d_16x_flat.cl +307 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_q4_0_f32_1d_8x_flat.cl +265 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_q4_0_f32_8x_flat.cl +272 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_q4_0_f32_v.cl +254 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_q6_k.cl +190 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/norm.cl +81 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/relu.cl +16 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/rms_norm.cl +96 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/rope.cl +721 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/scale.cl +16 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/silu.cl +30 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/softmax_4_f16.cl +87 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/softmax_4_f32.cl +87 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/softmax_f16.cl +86 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/softmax_f32.cl +86 -0
- data/ext/sources/ggml/src/ggml-opencl/kernels/transpose.cl +84 -0
- data/ext/sources/ggml/src/ggml-opt.cpp +1037 -0
- data/ext/sources/ggml/src/ggml-quants.c +5232 -0
- data/ext/sources/ggml/src/ggml-quants.h +100 -0
- data/ext/sources/ggml/src/ggml-rpc/CMakeLists.txt +9 -0
- data/ext/sources/ggml/src/ggml-rpc/ggml-rpc.cpp +1813 -0
- data/ext/sources/ggml/src/ggml-sycl/CMakeLists.txt +189 -0
- data/ext/sources/ggml/src/ggml-sycl/backend.hpp +37 -0
- data/ext/sources/ggml/src/ggml-sycl/binbcast.cpp +345 -0
- data/ext/sources/ggml/src/ggml-sycl/binbcast.hpp +39 -0
- data/ext/sources/ggml/src/ggml-sycl/common.cpp +83 -0
- data/ext/sources/ggml/src/ggml-sycl/common.hpp +589 -0
- data/ext/sources/ggml/src/ggml-sycl/concat.cpp +195 -0
- data/ext/sources/ggml/src/ggml-sycl/concat.hpp +20 -0
- data/ext/sources/ggml/src/ggml-sycl/conv.cpp +101 -0
- data/ext/sources/ggml/src/ggml-sycl/conv.hpp +20 -0
- data/ext/sources/ggml/src/ggml-sycl/convert.cpp +623 -0
- data/ext/sources/ggml/src/ggml-sycl/convert.hpp +34 -0
- data/ext/sources/ggml/src/ggml-sycl/cpy.cpp +700 -0
- data/ext/sources/ggml/src/ggml-sycl/cpy.hpp +11 -0
- data/ext/sources/ggml/src/ggml-sycl/dequantize.hpp +791 -0
- data/ext/sources/ggml/src/ggml-sycl/dmmv.cpp +1162 -0
- data/ext/sources/ggml/src/ggml-sycl/dmmv.hpp +27 -0
- data/ext/sources/ggml/src/ggml-sycl/dpct/helper.hpp +2957 -0
- data/ext/sources/ggml/src/ggml-sycl/element_wise.cpp +1511 -0
- data/ext/sources/ggml/src/ggml-sycl/element_wise.hpp +75 -0
- data/ext/sources/ggml/src/ggml-sycl/gemm.hpp +99 -0
- data/ext/sources/ggml/src/ggml-sycl/getrows.cpp +309 -0
- data/ext/sources/ggml/src/ggml-sycl/getrows.hpp +20 -0
- data/ext/sources/ggml/src/ggml-sycl/ggml-sycl.cpp +4493 -0
- data/ext/sources/ggml/src/ggml-sycl/gla.cpp +106 -0
- data/ext/sources/ggml/src/ggml-sycl/gla.hpp +8 -0
- data/ext/sources/ggml/src/ggml-sycl/im2col.cpp +136 -0
- data/ext/sources/ggml/src/ggml-sycl/im2col.hpp +21 -0
- data/ext/sources/ggml/src/ggml-sycl/mmq.cpp +3030 -0
- data/ext/sources/ggml/src/ggml-sycl/mmq.hpp +33 -0
- data/ext/sources/ggml/src/ggml-sycl/mmvq.cpp +1110 -0
- data/ext/sources/ggml/src/ggml-sycl/mmvq.hpp +27 -0
- data/ext/sources/ggml/src/ggml-sycl/norm.cpp +501 -0
- data/ext/sources/ggml/src/ggml-sycl/norm.hpp +26 -0
- data/ext/sources/ggml/src/ggml-sycl/outprod.cpp +47 -0
- data/ext/sources/ggml/src/ggml-sycl/outprod.hpp +10 -0
- data/ext/sources/ggml/src/ggml-sycl/presets.hpp +74 -0
- data/ext/sources/ggml/src/ggml-sycl/quants.hpp +83 -0
- data/ext/sources/ggml/src/ggml-sycl/rope.cpp +361 -0
- data/ext/sources/ggml/src/ggml-sycl/rope.hpp +20 -0
- data/ext/sources/ggml/src/ggml-sycl/softmax.cpp +261 -0
- data/ext/sources/ggml/src/ggml-sycl/softmax.hpp +20 -0
- data/ext/sources/ggml/src/ggml-sycl/sycl_hw.cpp +13 -0
- data/ext/sources/ggml/src/ggml-sycl/sycl_hw.hpp +23 -0
- data/ext/sources/ggml/src/ggml-sycl/tsembd.cpp +72 -0
- data/ext/sources/ggml/src/ggml-sycl/tsembd.hpp +20 -0
- data/ext/sources/ggml/src/ggml-sycl/vecdotq.hpp +1215 -0
- data/ext/sources/ggml/src/ggml-sycl/wkv.cpp +293 -0
- data/ext/sources/ggml/src/ggml-sycl/wkv.hpp +10 -0
- data/ext/sources/ggml/src/ggml-threading.cpp +12 -0
- data/ext/sources/ggml/src/ggml-threading.h +14 -0
- data/ext/sources/ggml/src/ggml-vulkan/CMakeLists.txt +196 -0
- data/ext/sources/ggml/src/ggml-vulkan/cmake/host-toolchain.cmake.in +15 -0
- data/ext/sources/ggml/src/ggml-vulkan/ggml-vulkan.cpp +10700 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/CMakeLists.txt +39 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/acc.comp +29 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/add.comp +29 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/argmax.comp +51 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/argsort.comp +69 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/clamp.comp +17 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/concat.comp +41 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/contig_copy.comp +49 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/conv2d_dw.comp +105 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/copy.comp +23 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/copy_from_quant.comp +51 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/copy_to_quant.comp +242 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/cos.comp +17 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/count_equal.comp +31 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_f32.comp +20 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_funcs.comp +462 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_funcs_cm2.comp +699 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_head.comp +13 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq1_m.comp +42 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq1_s.comp +35 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq2_s.comp +44 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq2_xs.comp +43 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq2_xxs.comp +48 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq3_s.comp +39 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq3_xxs.comp +49 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq4_nl.comp +32 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq4_xs.comp +34 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q2_k.comp +34 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q3_k.comp +42 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q4_0.comp +30 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q4_1.comp +32 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q4_k.comp +68 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q5_0.comp +34 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q5_1.comp +35 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q5_k.comp +70 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q6_k.comp +33 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q8_0.comp +31 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/diag_mask_inf.comp +34 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/div.comp +27 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn.comp +337 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_base.comp +162 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_cm1.comp +360 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_cm2.comp +267 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_split_k_reduce.comp +59 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/gelu.comp +25 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/gelu_quick.comp +23 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/generic_binary_head.comp +64 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/generic_head.comp +9 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/generic_unary_head.comp +76 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/get_rows.comp +33 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/get_rows_quant.comp +41 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/group_norm.comp +66 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/im2col.comp +100 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/l2_norm.comp +41 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/leaky_relu.comp +22 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul.comp +27 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_split_k_reduce.comp +48 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec.comp +169 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_base.comp +118 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq1_m.comp +82 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq1_s.comp +79 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq2_s.comp +90 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq2_xs.comp +87 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq2_xxs.comp +87 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq3_s.comp +90 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq3_xxs.comp +88 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_nc.comp +118 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_p021.comp +154 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q2_k.comp +130 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q3_k.comp +132 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q4_k.comp +136 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q5_k.comp +167 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q6_k.comp +130 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm.comp +868 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm_cm2.comp +441 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mmq.comp +442 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mmq_funcs.comp +99 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/norm.comp +44 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/opt_step_adamw.comp +42 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/pad.comp +28 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/pool2d.comp +74 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/quantize_q8_1.comp +77 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/relu.comp +21 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/repeat.comp +26 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/repeat_back.comp +37 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rms_norm.comp +52 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rms_norm_back.comp +55 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_head.comp +58 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_multi.comp +60 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_neox.comp +43 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_norm.comp +43 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_vision.comp +47 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/scale.comp +24 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sigmoid.comp +20 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/silu.comp +22 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/silu_back.comp +26 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sin.comp +17 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max.comp +173 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max_back.comp +50 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/square.comp +17 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sub.comp +29 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sum_rows.comp +37 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/tanh.comp +20 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/test_bfloat16_support.comp +7 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/test_coopmat2_support.comp +7 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/test_coopmat_support.comp +7 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/test_integer_dot_support.comp +7 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/timestep_embedding.comp +41 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/types.comp +1373 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/upscale.comp +36 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/vulkan-shaders-gen.cpp +751 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/wkv6.comp +87 -0
- data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/wkv7.comp +91 -0
- data/ext/sources/ggml/src/ggml.c +6550 -0
- data/ext/sources/ggml/src/gguf.cpp +1330 -0
- data/ext/{whisper.h → sources/include/whisper.h} +91 -24
- data/ext/sources/src/CMakeLists.txt +143 -0
- data/ext/sources/src/coreml/whisper-decoder-impl.h +158 -0
- data/ext/sources/src/coreml/whisper-decoder-impl.m +226 -0
- data/ext/sources/src/coreml/whisper-encoder-impl.h +154 -0
- data/ext/sources/src/coreml/whisper-encoder-impl.m +222 -0
- data/ext/sources/src/coreml/whisper-encoder.h +26 -0
- data/ext/sources/src/coreml/whisper-encoder.mm +73 -0
- data/ext/sources/src/openvino/whisper-openvino-encoder.cpp +108 -0
- data/ext/sources/src/openvino/whisper-openvino-encoder.h +31 -0
- data/ext/sources/src/whisper-arch.h +197 -0
- data/ext/{whisper.cpp → sources/src/whisper.cpp} +2535 -835
- data/ext/sources/tests/CMakeLists.txt +105 -0
- data/ext/sources/tests/earnings21/eval.mk +58 -0
- data/ext/sources/tests/earnings21/eval.py +68 -0
- data/ext/sources/tests/earnings21/normalizers/__init__.py +2 -0
- data/ext/sources/tests/earnings21/normalizers/basic.py +80 -0
- data/ext/sources/tests/earnings21/normalizers/english.json +1741 -0
- data/ext/sources/tests/earnings21/normalizers/english.py +550 -0
- data/ext/sources/tests/earnings21/requirements.txt +6 -0
- data/ext/sources/tests/en-0-ref.txt +1 -0
- data/ext/sources/tests/en-1-ref.txt +1 -0
- data/ext/sources/tests/en-2-ref.txt +1 -0
- data/ext/sources/tests/es-0-ref.txt +1 -0
- data/ext/sources/tests/librispeech/eval.mk +39 -0
- data/ext/sources/tests/librispeech/eval.py +47 -0
- data/ext/sources/tests/librispeech/normalizers/__init__.py +2 -0
- data/ext/sources/tests/librispeech/normalizers/basic.py +80 -0
- data/ext/sources/tests/librispeech/normalizers/english.json +1741 -0
- data/ext/sources/tests/librispeech/normalizers/english.py +550 -0
- data/ext/sources/tests/librispeech/requirements.txt +6 -0
- data/ext/sources/tests/run-tests.sh +130 -0
- data/ext/sources/tests/test-c.c +3 -0
- data/ext/sources/tests/test-vad-full.cpp +54 -0
- data/ext/sources/tests/test-vad.cpp +83 -0
- data/ext/sources/tests/test-whisper.js +58 -0
- data/extsources.rb +34 -0
- data/lib/whisper/model/uri.rb +178 -0
- data/sig/whisper.rbs +480 -0
- data/tests/helper.rb +35 -0
- data/tests/jfk_reader/.gitignore +5 -0
- data/tests/jfk_reader/extconf.rb +3 -0
- data/tests/jfk_reader/jfk_reader.c +68 -0
- data/tests/test_callback.rb +202 -0
- data/tests/test_error.rb +20 -0
- data/tests/test_model.rb +109 -0
- data/tests/test_package.rb +46 -0
- data/tests/test_params.rb +297 -0
- data/tests/test_segment.rb +74 -0
- data/tests/test_vad.rb +19 -0
- data/tests/test_vad_params.rb +103 -0
- data/tests/test_whisper.rb +212 -124
- data/whispercpp.gemspec +37 -0
- metadata +794 -13
- data/ext/dr_wav.h +0 -6434
- data/ext/ggml.c +0 -21755
- data/ext/ruby_whisper.cpp +0 -426
@@ -0,0 +1,1037 @@
|
|
1
|
+
#include "ggml-opt.h"
|
2
|
+
|
3
|
+
#include "ggml.h"
|
4
|
+
#include "ggml-alloc.h"
|
5
|
+
#include "ggml-backend.h"
|
6
|
+
#include "ggml-impl.h"
|
7
|
+
|
8
|
+
#include <algorithm>
|
9
|
+
#include <cmath>
|
10
|
+
#include <cstdint>
|
11
|
+
#include <cinttypes>
|
12
|
+
#include <map>
|
13
|
+
#include <random>
|
14
|
+
#include <vector>
|
15
|
+
|
16
|
+
struct ggml_opt_dataset {
|
17
|
+
struct ggml_context * ctx = nullptr;
|
18
|
+
ggml_backend_buffer_t buf = nullptr;
|
19
|
+
struct ggml_tensor * data = nullptr;
|
20
|
+
struct ggml_tensor * labels = nullptr;
|
21
|
+
|
22
|
+
int64_t ndata = -1;
|
23
|
+
int64_t ndata_shard = -1;
|
24
|
+
size_t nbs_data = -1;
|
25
|
+
size_t nbs_labels = -1;
|
26
|
+
|
27
|
+
std::vector<int64_t> permutation;
|
28
|
+
};
|
29
|
+
|
30
|
+
struct ggml_opt_context {
|
31
|
+
ggml_backend_sched_t backend_sched = nullptr;
|
32
|
+
ggml_cgraph * allocated_graph = nullptr;
|
33
|
+
ggml_cgraph * allocated_graph_copy = nullptr;
|
34
|
+
struct ggml_context * ctx_static = nullptr;
|
35
|
+
struct ggml_context * ctx_cpu = nullptr;
|
36
|
+
struct ggml_context * ctx_compute = nullptr;
|
37
|
+
struct ggml_context * ctx_copy = nullptr;
|
38
|
+
ggml_backend_buffer_t buf_static = nullptr;
|
39
|
+
ggml_backend_buffer_t buf_cpu = nullptr;
|
40
|
+
std::mt19937 rng;
|
41
|
+
enum ggml_opt_loss_type loss_type;
|
42
|
+
enum ggml_opt_build_type build_type;
|
43
|
+
enum ggml_opt_build_type build_type_alloc;
|
44
|
+
|
45
|
+
struct ggml_tensor * inputs = nullptr;
|
46
|
+
struct ggml_tensor * outputs = nullptr;
|
47
|
+
struct ggml_tensor * labels = nullptr;
|
48
|
+
|
49
|
+
struct ggml_tensor * loss = nullptr;
|
50
|
+
struct ggml_tensor * pred = nullptr;
|
51
|
+
struct ggml_tensor * ncorrect = nullptr;
|
52
|
+
|
53
|
+
struct ggml_cgraph * gf = nullptr;
|
54
|
+
struct ggml_cgraph * gb_grad = nullptr;
|
55
|
+
struct ggml_cgraph * gb_opt = nullptr;
|
56
|
+
bool static_graphs = false;
|
57
|
+
bool eval_ready = false;
|
58
|
+
std::vector<struct ggml_tensor *> grad_accs;
|
59
|
+
std::vector<struct ggml_tensor *> grad_m;
|
60
|
+
std::vector<struct ggml_tensor *> grad_v;
|
61
|
+
|
62
|
+
int64_t iter = 1;
|
63
|
+
int32_t opt_period = 1;
|
64
|
+
int32_t opt_i = 0;
|
65
|
+
bool loss_per_datapoint = false;
|
66
|
+
|
67
|
+
ggml_opt_get_optimizer_params get_opt_pars = nullptr;
|
68
|
+
void * get_opt_pars_ud = nullptr;
|
69
|
+
struct ggml_tensor * adamw_params = nullptr;
|
70
|
+
};
|
71
|
+
|
72
|
+
struct ggml_opt_result {
|
73
|
+
int64_t ndata = 0;
|
74
|
+
std::vector<float> loss;
|
75
|
+
std::vector<int32_t> pred;
|
76
|
+
int64_t ncorrect = 0;
|
77
|
+
|
78
|
+
int64_t opt_period = -1;
|
79
|
+
bool loss_per_datapoint = false;
|
80
|
+
};
|
81
|
+
|
82
|
+
// ====== Dataset ======
|
83
|
+
|
84
|
+
ggml_opt_dataset_t ggml_opt_dataset_init(
|
85
|
+
enum ggml_type type_data,
|
86
|
+
enum ggml_type type_label,
|
87
|
+
int64_t ne_datapoint,
|
88
|
+
int64_t ne_label,
|
89
|
+
int64_t ndata,
|
90
|
+
int64_t ndata_shard) {
|
91
|
+
GGML_ASSERT(ne_datapoint > 0);
|
92
|
+
GGML_ASSERT(ne_label >= 0);
|
93
|
+
GGML_ASSERT(ndata > 0);
|
94
|
+
GGML_ASSERT(ndata_shard > 0);
|
95
|
+
|
96
|
+
ggml_opt_dataset_t result = new ggml_opt_dataset;
|
97
|
+
result->ndata = ndata;
|
98
|
+
result->ndata_shard = ndata_shard;
|
99
|
+
|
100
|
+
{
|
101
|
+
struct ggml_init_params params = {
|
102
|
+
/*.mem_size =*/ 2*ggml_tensor_overhead(),
|
103
|
+
/*.mem_buffer =*/ nullptr,
|
104
|
+
/*.no_alloc =*/ true,
|
105
|
+
};
|
106
|
+
result->ctx = ggml_init(params);
|
107
|
+
}
|
108
|
+
|
109
|
+
result->data = ggml_new_tensor_2d(result->ctx, type_data, ne_datapoint, ndata);
|
110
|
+
result->nbs_data = ggml_nbytes(result->data) * ndata_shard/ndata;
|
111
|
+
|
112
|
+
if (ne_label > 0) {
|
113
|
+
result->labels = ggml_new_tensor_2d(result->ctx, type_label, ne_label, ndata);
|
114
|
+
result->nbs_labels = ggml_nbytes(result->labels) * ndata_shard/ndata;
|
115
|
+
} else {
|
116
|
+
result->labels = nullptr;
|
117
|
+
result->nbs_labels = 0;
|
118
|
+
}
|
119
|
+
|
120
|
+
result->buf = ggml_backend_alloc_ctx_tensors_from_buft(result->ctx, ggml_backend_cpu_buffer_type());
|
121
|
+
|
122
|
+
const int64_t nshards = ndata/ndata_shard;
|
123
|
+
result->permutation.resize(nshards);
|
124
|
+
for (int64_t i = 0; i < nshards; ++i) {
|
125
|
+
result->permutation[i] = i;
|
126
|
+
}
|
127
|
+
return result;
|
128
|
+
}
|
129
|
+
|
130
|
+
void ggml_opt_dataset_free(ggml_opt_dataset_t dataset) {
|
131
|
+
ggml_backend_buffer_free(dataset->buf);
|
132
|
+
ggml_free(dataset->ctx);
|
133
|
+
delete dataset;
|
134
|
+
}
|
135
|
+
|
136
|
+
int64_t ggml_opt_dataset_ndata(ggml_opt_dataset_t dataset) {
|
137
|
+
return dataset->ndata;
|
138
|
+
}
|
139
|
+
|
140
|
+
struct ggml_tensor * ggml_opt_dataset_data(ggml_opt_dataset_t dataset) {
|
141
|
+
return dataset->data;
|
142
|
+
}
|
143
|
+
|
144
|
+
struct ggml_tensor * ggml_opt_dataset_labels(ggml_opt_dataset_t dataset) {
|
145
|
+
return dataset->labels;
|
146
|
+
}
|
147
|
+
|
148
|
+
void ggml_opt_dataset_shuffle(ggml_opt_context_t opt_ctx, ggml_opt_dataset_t dataset, int64_t idata) {
|
149
|
+
GGML_ASSERT(idata <= dataset->ndata);
|
150
|
+
|
151
|
+
if (idata < 0) {
|
152
|
+
std::shuffle(dataset->permutation.begin(), dataset->permutation.end(), opt_ctx->rng);
|
153
|
+
return;
|
154
|
+
}
|
155
|
+
|
156
|
+
GGML_ASSERT(idata % dataset->ndata_shard == 0);
|
157
|
+
const int64_t ishard_max = idata / dataset->ndata_shard;
|
158
|
+
std::shuffle(dataset->permutation.begin(), dataset->permutation.begin() + ishard_max, opt_ctx->rng);
|
159
|
+
}
|
160
|
+
|
161
|
+
void ggml_opt_dataset_get_batch(ggml_opt_dataset_t dataset, struct ggml_tensor * data_batch, struct ggml_tensor * labels_batch, int64_t ibatch) {
|
162
|
+
GGML_ASSERT( data_batch && ggml_is_contiguous(data_batch));
|
163
|
+
GGML_ASSERT(!labels_batch || ggml_is_contiguous(labels_batch));
|
164
|
+
GGML_ASSERT((labels_batch == nullptr) == (dataset->labels == nullptr));
|
165
|
+
GGML_ASSERT( data_batch->type == dataset->data->type);
|
166
|
+
GGML_ASSERT(!labels_batch || labels_batch->type == dataset->labels->type);
|
167
|
+
|
168
|
+
const size_t nb_data_batch = ggml_nbytes(data_batch);
|
169
|
+
GGML_ASSERT(nb_data_batch % dataset->nbs_data == 0);
|
170
|
+
const int64_t shards_per_batch = nb_data_batch / dataset->nbs_data;
|
171
|
+
|
172
|
+
if (labels_batch) {
|
173
|
+
const size_t nb_labels_batch = ggml_nbytes(labels_batch);
|
174
|
+
GGML_ASSERT(nb_labels_batch == shards_per_batch*dataset->nbs_labels);
|
175
|
+
}
|
176
|
+
|
177
|
+
GGML_ASSERT((ibatch + 1)*shards_per_batch <= int64_t(dataset->permutation.size()));
|
178
|
+
|
179
|
+
for (int64_t ishard_batch = 0; ishard_batch < shards_per_batch; ++ishard_batch) {
|
180
|
+
const int64_t ishard = dataset->permutation[ibatch*shards_per_batch + ishard_batch];
|
181
|
+
|
182
|
+
const char * ptr_data = (const char *) dataset->data->data + ishard*dataset->nbs_data;
|
183
|
+
ggml_backend_tensor_set(data_batch, ptr_data, ishard_batch*dataset->nbs_data, dataset->nbs_data);
|
184
|
+
|
185
|
+
if (!labels_batch) {
|
186
|
+
continue;
|
187
|
+
}
|
188
|
+
|
189
|
+
const char * ptr_labels = (const char *) dataset->labels->data + ishard*dataset->nbs_labels;
|
190
|
+
ggml_backend_tensor_set(labels_batch, ptr_labels, ishard_batch*dataset->nbs_labels, dataset->nbs_labels);
|
191
|
+
}
|
192
|
+
}
|
193
|
+
|
194
|
+
void ggml_opt_dataset_get_batch_host(ggml_opt_dataset_t dataset, void * data_batch, size_t nb_data_batch, void * labels_batch, int64_t ibatch) {
|
195
|
+
GGML_ASSERT((labels_batch == nullptr) == (dataset->labels == nullptr));
|
196
|
+
GGML_ASSERT(nb_data_batch % dataset->nbs_data == 0);
|
197
|
+
|
198
|
+
const int64_t shards_per_batch = nb_data_batch / dataset->nbs_data;
|
199
|
+
|
200
|
+
GGML_ASSERT((ibatch + 1)*shards_per_batch <= int64_t(dataset->permutation.size()));
|
201
|
+
|
202
|
+
for (int64_t ishard_batch = 0; ishard_batch < shards_per_batch; ++ishard_batch) {
|
203
|
+
const int64_t ishard = dataset->permutation[ibatch*shards_per_batch + ishard_batch];
|
204
|
+
|
205
|
+
const char * ptr_data = (const char *) dataset->data->data + ishard *dataset->nbs_data;
|
206
|
+
char * ptr_data_batch = (char *) data_batch + ishard_batch*dataset->nbs_data;
|
207
|
+
memcpy(ptr_data_batch, ptr_data, dataset->nbs_data);
|
208
|
+
|
209
|
+
if (!labels_batch) {
|
210
|
+
continue;
|
211
|
+
}
|
212
|
+
|
213
|
+
const char * ptr_labels = (const char *) dataset->labels->data + ishard *dataset->nbs_labels;
|
214
|
+
char * ptr_labels_batch = (char *) labels_batch + ishard_batch*dataset->nbs_labels;
|
215
|
+
memcpy(ptr_labels_batch, ptr_labels, dataset->nbs_labels);
|
216
|
+
}
|
217
|
+
}
|
218
|
+
|
219
|
+
// ====== Model / Context ======
|
220
|
+
|
221
|
+
struct ggml_opt_optimizer_params ggml_opt_get_default_optimizer_params(void * userdata) {
|
222
|
+
GGML_UNUSED(userdata);
|
223
|
+
|
224
|
+
ggml_opt_optimizer_params result;
|
225
|
+
|
226
|
+
result.adamw.alpha = 0.001f;
|
227
|
+
result.adamw.beta1 = 0.9f;
|
228
|
+
result.adamw.beta2 = 0.999f;
|
229
|
+
result.adamw.eps = 1e-8f;
|
230
|
+
result.adamw.wd = 0.0f;
|
231
|
+
|
232
|
+
return result;
|
233
|
+
}
|
234
|
+
|
235
|
+
struct ggml_opt_optimizer_params ggml_opt_get_constant_optimizer_params(void * userdata) {
|
236
|
+
return *((struct ggml_opt_optimizer_params *) userdata);
|
237
|
+
}
|
238
|
+
|
239
|
+
struct ggml_opt_params ggml_opt_default_params(
|
240
|
+
ggml_backend_sched_t backend_sched,
|
241
|
+
enum ggml_opt_loss_type loss_type) {
|
242
|
+
return {
|
243
|
+
/*backend_sched =*/ backend_sched,
|
244
|
+
/*ctx_compute =*/ nullptr,
|
245
|
+
/*inputs =*/ nullptr,
|
246
|
+
/*logits =*/ nullptr,
|
247
|
+
/*loss_type =*/ loss_type,
|
248
|
+
/*build_type =*/ GGML_OPT_BUILD_TYPE_OPT,
|
249
|
+
/*opt_period =*/ 1,
|
250
|
+
/*get_opt_pars =*/ ggml_opt_get_default_optimizer_params,
|
251
|
+
/*get_opt_pars_ud =*/ nullptr,
|
252
|
+
};
|
253
|
+
}
|
254
|
+
|
255
|
+
static ggml_tensor * map_tensor(std::map<ggml_tensor *, ggml_tensor *> & tensor_map, ggml_context * ctx, ggml_tensor * tensor) {
|
256
|
+
if (!tensor) {
|
257
|
+
return nullptr;
|
258
|
+
}
|
259
|
+
|
260
|
+
if (tensor_map.find(tensor) != tensor_map.end()) {
|
261
|
+
return tensor_map[tensor];
|
262
|
+
}
|
263
|
+
|
264
|
+
ggml_tensor * new_tensor = ggml_dup_tensor(ctx, tensor);
|
265
|
+
tensor_map[tensor] = new_tensor;
|
266
|
+
|
267
|
+
new_tensor->op = tensor->op;
|
268
|
+
for (int i = 0; i < GGML_MAX_DIMS; i++) {
|
269
|
+
new_tensor->nb[i] = tensor->nb[i];
|
270
|
+
}
|
271
|
+
new_tensor->flags = tensor->flags;
|
272
|
+
memcpy(new_tensor->op_params, tensor->op_params, sizeof(tensor->op_params));
|
273
|
+
strcpy(new_tensor->name, tensor->name);
|
274
|
+
new_tensor->data = tensor->data;
|
275
|
+
new_tensor->buffer = tensor->buffer;
|
276
|
+
new_tensor->extra = tensor->extra;
|
277
|
+
new_tensor->view_offs = tensor->view_offs;
|
278
|
+
new_tensor->view_src = map_tensor(tensor_map, ctx, tensor->view_src);
|
279
|
+
for (int i = 0; i < GGML_MAX_SRC; i++) {
|
280
|
+
new_tensor->src[i] = map_tensor(tensor_map, ctx, tensor->src[i]);
|
281
|
+
}
|
282
|
+
|
283
|
+
return new_tensor;
|
284
|
+
}
|
285
|
+
|
286
|
+
static ggml_cgraph * dup_graph(ggml_context * ctx, ggml_cgraph * src) {
|
287
|
+
std::map<ggml_tensor *, ggml_tensor *> tensor_map;
|
288
|
+
|
289
|
+
ggml_cgraph * dst = ggml_new_graph_custom(ctx, src->size, /*grads =*/ true);
|
290
|
+
|
291
|
+
for (int i = 0; i < src->n_leafs; i++) {
|
292
|
+
ggml_build_forward_expand(dst, map_tensor(tensor_map, ctx, src->leafs[i]));
|
293
|
+
}
|
294
|
+
GGML_ASSERT(dst->n_leafs == src->n_leafs);
|
295
|
+
for (int i = 0; i < src->n_nodes; i++) {
|
296
|
+
ggml_build_forward_expand(dst, map_tensor(tensor_map, ctx, src->nodes[i]));
|
297
|
+
}
|
298
|
+
GGML_ASSERT(dst->n_nodes == src->n_nodes);
|
299
|
+
for (int i = 0; i < src->n_nodes; ++i) {
|
300
|
+
const size_t igrad_src = ggml_hash_find(&src->visited_hash_set, src->nodes[i]);
|
301
|
+
const size_t igrad_dst = ggml_hash_find(&dst->visited_hash_set, dst->nodes[i]);
|
302
|
+
|
303
|
+
GGML_ASSERT(igrad_src != GGML_HASHSET_FULL);
|
304
|
+
GGML_ASSERT(ggml_bitset_get(src->visited_hash_set.used, igrad_src));
|
305
|
+
GGML_ASSERT(igrad_dst != GGML_HASHSET_FULL);
|
306
|
+
GGML_ASSERT(ggml_bitset_get(dst->visited_hash_set.used, igrad_dst));
|
307
|
+
|
308
|
+
dst->grads[igrad_dst] = src->grads[igrad_src];
|
309
|
+
dst->grad_accs[igrad_dst] = src->grad_accs[igrad_src];
|
310
|
+
}
|
311
|
+
|
312
|
+
return dst;
|
313
|
+
}
|
314
|
+
|
315
|
+
static void ggml_opt_build(ggml_opt_context_t opt_ctx) {
|
316
|
+
GGML_ASSERT(opt_ctx->ctx_compute && "no compute context set, either use static graphs or set one with ggml_opt_prepare_alloc");
|
317
|
+
GGML_ASSERT((!opt_ctx->static_graphs || opt_ctx->inputs->data) && "when using static graphs the inputs must be allocated statically");
|
318
|
+
|
319
|
+
const bool accumulate = opt_ctx->build_type_alloc >= GGML_OPT_BUILD_TYPE_GRAD &&
|
320
|
+
!(opt_ctx->static_graphs && opt_ctx->build_type_alloc == GGML_OPT_BUILD_TYPE_OPT && opt_ctx->opt_period == 1);
|
321
|
+
|
322
|
+
ggml_set_input(opt_ctx->inputs);
|
323
|
+
ggml_set_output(opt_ctx->outputs);
|
324
|
+
|
325
|
+
int n_param = 0;
|
326
|
+
for (int i = 0; i < opt_ctx->gf->n_nodes; ++i) {
|
327
|
+
const struct ggml_tensor * node = opt_ctx->gf->nodes[i];
|
328
|
+
if (node->flags & GGML_TENSOR_FLAG_PARAM) {
|
329
|
+
n_param++;
|
330
|
+
}
|
331
|
+
GGML_ASSERT(!(node->flags & GGML_TENSOR_FLAG_LOSS) && "support for extra loss terms not implemented");
|
332
|
+
}
|
333
|
+
|
334
|
+
if (!opt_ctx->ctx_static) {
|
335
|
+
// The static context is used for:
|
336
|
+
// - gradients (1 per loss, 1 tensor per param if using gradient accumulation)
|
337
|
+
// - optimizer momenta (2 tensors per param)
|
338
|
+
// - labels (if using static graphs)
|
339
|
+
// - loss (if using static graphs, up to 5 tensors)
|
340
|
+
// - pred (if using static graphs)
|
341
|
+
// - ncorrect (if using static graphs, 2 tensors).
|
342
|
+
constexpr size_t n_loss = 1;
|
343
|
+
const size_t tensors_per_param = (accumulate ? 1 : 0) +
|
344
|
+
(opt_ctx->build_type_alloc == GGML_OPT_BUILD_TYPE_OPT ? 2 : 0);
|
345
|
+
const size_t tensors_const = opt_ctx->static_graphs ? 9 : 0;
|
346
|
+
const size_t size_meta = (n_loss + tensors_per_param*n_param + tensors_const) * ggml_tensor_overhead();
|
347
|
+
struct ggml_init_params params = {
|
348
|
+
/*.mem_size =*/ size_meta,
|
349
|
+
/*.mem_buffer =*/ nullptr,
|
350
|
+
/*.no_alloc =*/ true,
|
351
|
+
};
|
352
|
+
opt_ctx->ctx_static = ggml_init(params);
|
353
|
+
}
|
354
|
+
GGML_ASSERT(opt_ctx->build_type <= opt_ctx->build_type_alloc);
|
355
|
+
|
356
|
+
{
|
357
|
+
// The cpu context is allocated statically if using static graphs, dynamically otherwise.
|
358
|
+
// It is used for:
|
359
|
+
// - optimizer parameters (1 shared for all optimizer invocations)
|
360
|
+
const size_t size_meta = 1 * ggml_tensor_overhead();
|
361
|
+
struct ggml_init_params params = {
|
362
|
+
/*.mem_size =*/ size_meta,
|
363
|
+
/*.mem_buffer =*/ nullptr,
|
364
|
+
/*.no_alloc =*/ true,
|
365
|
+
};
|
366
|
+
ggml_free(opt_ctx->ctx_cpu);
|
367
|
+
opt_ctx->ctx_cpu = ggml_init(params);
|
368
|
+
|
369
|
+
ggml_backend_buffer_free(opt_ctx->buf_cpu);
|
370
|
+
opt_ctx->buf_cpu = nullptr;
|
371
|
+
}
|
372
|
+
|
373
|
+
struct ggml_context * ctx_results = opt_ctx->static_graphs ? opt_ctx->ctx_static : opt_ctx->ctx_compute;
|
374
|
+
|
375
|
+
switch (opt_ctx->loss_type) {
|
376
|
+
case GGML_OPT_LOSS_TYPE_MEAN: {
|
377
|
+
opt_ctx->loss = ggml_sum(ctx_results, opt_ctx->outputs);
|
378
|
+
ggml_set_name(opt_ctx->loss, "loss_sum");
|
379
|
+
const float scale = 1.0f / (opt_ctx->opt_period * ggml_nelements(opt_ctx->outputs));
|
380
|
+
opt_ctx->loss = ggml_scale(ctx_results, opt_ctx->loss, scale);
|
381
|
+
ggml_set_name(opt_ctx->loss, "loss_mean");
|
382
|
+
opt_ctx->loss_per_datapoint = true;
|
383
|
+
break;
|
384
|
+
}
|
385
|
+
case GGML_OPT_LOSS_TYPE_SUM: {
|
386
|
+
opt_ctx->loss = ggml_sum(ctx_results, opt_ctx->outputs);
|
387
|
+
ggml_set_name(opt_ctx->loss, "loss_sum");
|
388
|
+
opt_ctx->loss_per_datapoint = false;
|
389
|
+
break;
|
390
|
+
}
|
391
|
+
case GGML_OPT_LOSS_TYPE_CROSS_ENTROPY: {
|
392
|
+
opt_ctx->labels = ggml_dup_tensor(ctx_results, opt_ctx->outputs);
|
393
|
+
ggml_set_input(opt_ctx->labels);
|
394
|
+
ggml_set_name(opt_ctx->labels, "labels");
|
395
|
+
opt_ctx->loss = ggml_cross_entropy_loss(ctx_results, opt_ctx->outputs, opt_ctx->labels);
|
396
|
+
ggml_set_name(opt_ctx->loss, "loss_cross_entropy");
|
397
|
+
if (opt_ctx->opt_period > 1) {
|
398
|
+
opt_ctx->loss = ggml_scale(ctx_results, opt_ctx->loss, 1.0f / opt_ctx->opt_period);
|
399
|
+
ggml_set_name(opt_ctx->loss, "loss_cross_entropy_scaled");
|
400
|
+
}
|
401
|
+
opt_ctx->loss_per_datapoint = true;
|
402
|
+
break;
|
403
|
+
}
|
404
|
+
case GGML_OPT_LOSS_TYPE_MEAN_SQUARED_ERROR: {
|
405
|
+
opt_ctx->labels = ggml_dup_tensor(ctx_results, opt_ctx->outputs);
|
406
|
+
ggml_set_input(opt_ctx->labels);
|
407
|
+
ggml_set_name(opt_ctx->labels, "labels");
|
408
|
+
opt_ctx->loss = ggml_sub(ctx_results, opt_ctx->outputs, opt_ctx->labels);
|
409
|
+
ggml_set_name(opt_ctx->loss, "loss_error");
|
410
|
+
opt_ctx->loss = ggml_sqr(ctx_results, opt_ctx->loss);
|
411
|
+
ggml_set_name(opt_ctx->loss, "loss_squared_error");
|
412
|
+
opt_ctx->loss = ggml_sum(ctx_results, opt_ctx->loss);
|
413
|
+
ggml_set_name(opt_ctx->loss, "loss_sum_squared_error");
|
414
|
+
const float scale = 1.0f / (opt_ctx->opt_period * ggml_nelements(opt_ctx->outputs));
|
415
|
+
opt_ctx->loss = ggml_scale(ctx_results, opt_ctx->loss, scale);
|
416
|
+
ggml_set_name(opt_ctx->loss, "loss_mean_squared_error");
|
417
|
+
opt_ctx->loss_per_datapoint = true;
|
418
|
+
break;
|
419
|
+
}
|
420
|
+
}
|
421
|
+
ggml_set_output(opt_ctx->loss);
|
422
|
+
ggml_set_loss(opt_ctx->loss);
|
423
|
+
ggml_build_forward_expand(opt_ctx->gf, opt_ctx->loss);
|
424
|
+
|
425
|
+
if (opt_ctx->loss_type == GGML_OPT_LOSS_TYPE_CROSS_ENTROPY) {
|
426
|
+
opt_ctx->pred = ggml_argmax(ctx_results, opt_ctx->outputs);
|
427
|
+
ggml_set_name(opt_ctx->pred, "pred");
|
428
|
+
ggml_set_output(opt_ctx->pred);
|
429
|
+
ggml_build_forward_expand(opt_ctx->gf, opt_ctx->pred);
|
430
|
+
|
431
|
+
opt_ctx->ncorrect = ggml_count_equal(ctx_results, opt_ctx->pred, ggml_argmax(ctx_results, opt_ctx->labels));
|
432
|
+
ggml_set_name(opt_ctx->ncorrect, "ncorrect");
|
433
|
+
ggml_set_output(opt_ctx->ncorrect);
|
434
|
+
ggml_build_forward_expand(opt_ctx->gf, opt_ctx->ncorrect);
|
435
|
+
}
|
436
|
+
|
437
|
+
if (opt_ctx->buf_static) {
|
438
|
+
if (opt_ctx->build_type == GGML_OPT_BUILD_TYPE_FORWARD) {
|
439
|
+
return;
|
440
|
+
}
|
441
|
+
} else if (opt_ctx->build_type_alloc == GGML_OPT_BUILD_TYPE_FORWARD) {
|
442
|
+
opt_ctx->buf_static = ggml_backend_alloc_ctx_tensors(
|
443
|
+
opt_ctx->ctx_static, ggml_backend_sched_get_backend(opt_ctx->backend_sched, 0));
|
444
|
+
return;
|
445
|
+
}
|
446
|
+
|
447
|
+
if (opt_ctx->grad_accs.empty()) {
|
448
|
+
GGML_ASSERT(opt_ctx->build_type_alloc >= GGML_OPT_BUILD_TYPE_GRAD);
|
449
|
+
|
450
|
+
const int n_nodes = opt_ctx->gf->n_nodes;
|
451
|
+
opt_ctx->grad_accs.resize(n_nodes);
|
452
|
+
for (int i = 0; i < n_nodes; ++i) {
|
453
|
+
ggml_tensor * node = opt_ctx->gf->nodes[i];
|
454
|
+
if ((accumulate && (node->flags & GGML_TENSOR_FLAG_PARAM)) || (node->flags & GGML_TENSOR_FLAG_LOSS)) {
|
455
|
+
opt_ctx->grad_accs[i] = ggml_new_tensor(opt_ctx->ctx_static, GGML_TYPE_F32, GGML_MAX_DIMS, node->ne);
|
456
|
+
} else {
|
457
|
+
opt_ctx->grad_accs[i] = nullptr;
|
458
|
+
}
|
459
|
+
}
|
460
|
+
|
461
|
+
if (opt_ctx->build_type_alloc >= GGML_OPT_BUILD_TYPE_OPT) {
|
462
|
+
opt_ctx->grad_m.resize(n_nodes);
|
463
|
+
opt_ctx->grad_v.resize(n_nodes);
|
464
|
+
for (int i = 0; i < n_nodes; ++i) {
|
465
|
+
ggml_tensor * node = opt_ctx->gf->nodes[i];
|
466
|
+
if (node->flags & GGML_TENSOR_FLAG_PARAM) {
|
467
|
+
opt_ctx->grad_m[i] = ggml_new_tensor(opt_ctx->ctx_static, GGML_TYPE_F32, GGML_MAX_DIMS, node->ne);
|
468
|
+
opt_ctx->grad_v[i] = ggml_new_tensor(opt_ctx->ctx_static, GGML_TYPE_F32, GGML_MAX_DIMS, node->ne);
|
469
|
+
} else {
|
470
|
+
opt_ctx->grad_m[i] = nullptr;
|
471
|
+
opt_ctx->grad_v[i] = nullptr;
|
472
|
+
}
|
473
|
+
}
|
474
|
+
}
|
475
|
+
}
|
476
|
+
|
477
|
+
// gb_grad == graph backward gradients, forward pass, then backward pass to calculate gradients.
|
478
|
+
opt_ctx->gb_grad = ggml_graph_dup(opt_ctx->ctx_compute, opt_ctx->gf, /*force_grads =*/ true);
|
479
|
+
ggml_build_backward_expand(opt_ctx->ctx_compute, opt_ctx->gb_grad, opt_ctx->grad_accs.data());
|
480
|
+
|
481
|
+
if (opt_ctx->buf_static) {
|
482
|
+
if (opt_ctx->build_type == GGML_OPT_BUILD_TYPE_GRAD) {
|
483
|
+
return;
|
484
|
+
}
|
485
|
+
} else if (opt_ctx->build_type_alloc == GGML_OPT_BUILD_TYPE_GRAD) {
|
486
|
+
opt_ctx->buf_static = ggml_backend_alloc_ctx_tensors(opt_ctx->ctx_static, ggml_backend_sched_get_backend(opt_ctx->backend_sched, 0));
|
487
|
+
ggml_graph_reset(opt_ctx->gb_grad);
|
488
|
+
}
|
489
|
+
|
490
|
+
GGML_ASSERT(opt_ctx->build_type_alloc == GGML_OPT_BUILD_TYPE_OPT);
|
491
|
+
|
492
|
+
// gb_opt == graph backward optimize, forward pass, then backward pass to calculate gradients, then optimizer step.
|
493
|
+
opt_ctx->gb_opt = ggml_graph_dup(opt_ctx->ctx_compute, opt_ctx->gb_grad, /*force_grads =*/ true);
|
494
|
+
|
495
|
+
opt_ctx->adamw_params = ggml_new_tensor_1d(opt_ctx->ctx_cpu, GGML_TYPE_F32, 7);
|
496
|
+
ggml_set_input(opt_ctx->adamw_params);
|
497
|
+
ggml_set_name(opt_ctx->adamw_params, "adamw_params");
|
498
|
+
|
499
|
+
for (int i = opt_ctx->gf->n_nodes-1; i >= 0; --i) {
|
500
|
+
struct ggml_tensor * node = opt_ctx->gb_opt->nodes[i];
|
501
|
+
struct ggml_tensor * grad = ggml_graph_get_grad(opt_ctx->gb_opt, node);
|
502
|
+
|
503
|
+
if (grad && (node->flags & GGML_TENSOR_FLAG_PARAM)) {
|
504
|
+
struct ggml_tensor * m = opt_ctx->grad_m[i];
|
505
|
+
struct ggml_tensor * v = opt_ctx->grad_v[i];
|
506
|
+
struct ggml_tensor * opt_step = ggml_opt_step_adamw(opt_ctx->ctx_compute, node, grad, m, v, opt_ctx->adamw_params);
|
507
|
+
|
508
|
+
ggml_set_name(m, (std::string("AdamW m for ") + std::string(node->name)).c_str());
|
509
|
+
ggml_set_name(v, (std::string("AdamW v for ") + std::string(node->name)).c_str());
|
510
|
+
ggml_set_name(opt_step, (std::string("AdamW step for ") + std::string(node->name)).c_str());
|
511
|
+
|
512
|
+
ggml_build_forward_expand(opt_ctx->gb_opt, opt_step);
|
513
|
+
}
|
514
|
+
}
|
515
|
+
|
516
|
+
if (!opt_ctx->buf_static) {
|
517
|
+
opt_ctx->buf_static = ggml_backend_alloc_ctx_tensors(
|
518
|
+
opt_ctx->ctx_static, ggml_backend_sched_get_backend(opt_ctx->backend_sched, 0));
|
519
|
+
ggml_graph_reset(opt_ctx->gb_opt);
|
520
|
+
}
|
521
|
+
|
522
|
+
opt_ctx->buf_cpu = ggml_backend_alloc_ctx_tensors_from_buft(opt_ctx->ctx_cpu, ggml_backend_cpu_buffer_type());
|
523
|
+
}
|
524
|
+
|
525
|
+
ggml_opt_context_t ggml_opt_init(struct ggml_opt_params params) {
|
526
|
+
ggml_opt_context_t result = new struct ggml_opt_context;
|
527
|
+
result->backend_sched = params.backend_sched;
|
528
|
+
result->ctx_compute = params.ctx_compute;
|
529
|
+
result->loss_type = params.loss_type;
|
530
|
+
result->build_type = params.build_type;
|
531
|
+
result->build_type_alloc = params.build_type;
|
532
|
+
result->inputs = params.inputs;
|
533
|
+
result->outputs = params.outputs;
|
534
|
+
result->opt_period = params.opt_period;
|
535
|
+
result->get_opt_pars = params.get_opt_pars;
|
536
|
+
result->get_opt_pars_ud = params.get_opt_pars_ud;
|
537
|
+
|
538
|
+
GGML_ASSERT(result->opt_period >= 1);
|
539
|
+
|
540
|
+
result->static_graphs = result->ctx_compute;
|
541
|
+
|
542
|
+
if (!result->static_graphs) {
|
543
|
+
GGML_ASSERT(!result->inputs);
|
544
|
+
GGML_ASSERT(!result->outputs);
|
545
|
+
return result;
|
546
|
+
}
|
547
|
+
|
548
|
+
GGML_ASSERT(result->inputs);
|
549
|
+
GGML_ASSERT(result->outputs);
|
550
|
+
|
551
|
+
result->gf = ggml_new_graph_custom(result->ctx_compute, GGML_DEFAULT_GRAPH_SIZE, /*grads =*/ true); // Forward pass.
|
552
|
+
ggml_build_forward_expand(result->gf, result->outputs);
|
553
|
+
|
554
|
+
ggml_opt_build(result);
|
555
|
+
|
556
|
+
return result;
|
557
|
+
}
|
558
|
+
|
559
|
+
void ggml_opt_free(ggml_opt_context_t opt_ctx) {
|
560
|
+
if (opt_ctx == nullptr) {
|
561
|
+
return;
|
562
|
+
}
|
563
|
+
ggml_backend_buffer_free(opt_ctx->buf_static);
|
564
|
+
ggml_backend_buffer_free(opt_ctx->buf_cpu);
|
565
|
+
ggml_free(opt_ctx->ctx_static);
|
566
|
+
ggml_free(opt_ctx->ctx_cpu);
|
567
|
+
delete opt_ctx;
|
568
|
+
}
|
569
|
+
|
570
|
+
void ggml_opt_reset(ggml_opt_context_t opt_ctx, bool optimizer) {
|
571
|
+
if (optimizer) {
|
572
|
+
ggml_graph_reset(opt_ctx->gb_opt);
|
573
|
+
opt_ctx->iter = 1;
|
574
|
+
} else {
|
575
|
+
ggml_graph_reset(opt_ctx->gb_grad);
|
576
|
+
}
|
577
|
+
}
|
578
|
+
|
579
|
+
bool ggml_opt_static_graphs(ggml_opt_context_t opt_ctx) {
|
580
|
+
return opt_ctx->static_graphs;
|
581
|
+
}
|
582
|
+
|
583
|
+
struct ggml_tensor * ggml_opt_inputs(ggml_opt_context_t opt_ctx) {
|
584
|
+
return opt_ctx->inputs;
|
585
|
+
}
|
586
|
+
|
587
|
+
struct ggml_tensor * ggml_opt_outputs(ggml_opt_context_t opt_ctx) {
|
588
|
+
return opt_ctx->outputs;
|
589
|
+
}
|
590
|
+
|
591
|
+
struct ggml_tensor * ggml_opt_labels(ggml_opt_context_t opt_ctx) {
|
592
|
+
return opt_ctx->labels;
|
593
|
+
}
|
594
|
+
|
595
|
+
struct ggml_tensor * ggml_opt_loss(ggml_opt_context_t opt_ctx) {
|
596
|
+
return opt_ctx->loss;
|
597
|
+
}
|
598
|
+
|
599
|
+
struct ggml_tensor * ggml_opt_pred(ggml_opt_context_t opt_ctx) {
|
600
|
+
return opt_ctx->pred;
|
601
|
+
}
|
602
|
+
|
603
|
+
struct ggml_tensor * ggml_opt_ncorrect(ggml_opt_context_t opt_ctx) {
|
604
|
+
return opt_ctx->ncorrect;
|
605
|
+
}
|
606
|
+
|
607
|
+
struct ggml_tensor * ggml_opt_grad_acc(ggml_opt_context_t opt_ctx, struct ggml_tensor * node) {
|
608
|
+
return ggml_graph_get_grad_acc(opt_ctx->gb_opt, node);
|
609
|
+
}
|
610
|
+
|
611
|
+
// ====== Optimization Result ======
|
612
|
+
|
613
|
+
ggml_opt_result_t ggml_opt_result_init() {
|
614
|
+
return new ggml_opt_result;
|
615
|
+
}
|
616
|
+
|
617
|
+
void ggml_opt_result_free(ggml_opt_result_t result) {
|
618
|
+
delete result;
|
619
|
+
}
|
620
|
+
|
621
|
+
void ggml_opt_result_reset(ggml_opt_result_t result) {
|
622
|
+
result->ndata = 0;
|
623
|
+
result->loss.clear();
|
624
|
+
result->pred.clear();
|
625
|
+
result->ncorrect = 0;
|
626
|
+
}
|
627
|
+
|
628
|
+
void ggml_opt_result_ndata(ggml_opt_result_t result, int64_t * ndata) {
|
629
|
+
*ndata = result->ndata;
|
630
|
+
}
|
631
|
+
|
632
|
+
void ggml_opt_result_loss(ggml_opt_result_t result, double * loss, double * unc) {
|
633
|
+
const int64_t nbatches = result->loss.size(); // Number of physical batches.
|
634
|
+
|
635
|
+
if (nbatches == 0) {
|
636
|
+
*loss = 0.0;
|
637
|
+
*unc = NAN;
|
638
|
+
return;
|
639
|
+
}
|
640
|
+
|
641
|
+
double sum = 0.0;
|
642
|
+
double sum_squared = 0.0;
|
643
|
+
|
644
|
+
for (const float & loss : result->loss) {
|
645
|
+
// If the loss is per datapoint it was scaled by 1.0f/opt_period for each physical batch.
|
646
|
+
const float loss_scaled = result->loss_per_datapoint ? loss*result->opt_period : loss;
|
647
|
+
sum += loss_scaled;
|
648
|
+
sum_squared += loss_scaled*loss_scaled;
|
649
|
+
}
|
650
|
+
|
651
|
+
const double mean = sum/nbatches;
|
652
|
+
*loss = result->loss_per_datapoint ? mean : sum;
|
653
|
+
|
654
|
+
if (!unc) {
|
655
|
+
return;
|
656
|
+
}
|
657
|
+
|
658
|
+
if (nbatches < 2) {
|
659
|
+
*unc = NAN;
|
660
|
+
return;
|
661
|
+
}
|
662
|
+
|
663
|
+
const double var_sum = sum_squared/nbatches - mean*mean; // variance without Bessel's correction, i.e. nbatches/(nbatches-1)
|
664
|
+
*unc = result->loss_per_datapoint ? sqrt(var_sum / (nbatches - 1)) : sqrt(var_sum * nbatches/(nbatches - 1));
|
665
|
+
}
|
666
|
+
|
667
|
+
void ggml_opt_result_pred(ggml_opt_result_t result, int32_t * pred) {
|
668
|
+
for (size_t i = 0; i < result->pred.size(); ++i) {
|
669
|
+
pred[i] = result->pred[i];
|
670
|
+
}
|
671
|
+
}
|
672
|
+
|
673
|
+
void ggml_opt_result_accuracy(ggml_opt_result_t result, double * accuracy, double * unc) {
|
674
|
+
*accuracy = result->ncorrect >= 0 ? double(result->ncorrect) / double(result->ndata) : NAN;
|
675
|
+
|
676
|
+
if (!unc) {
|
677
|
+
return;
|
678
|
+
}
|
679
|
+
|
680
|
+
*unc = result->ncorrect >= 0 && result->ndata >= 2 ?
|
681
|
+
sqrt((*accuracy) * (1.0 - (*accuracy)) / double(result->ndata - 1)) : NAN;
|
682
|
+
}
|
683
|
+
|
684
|
+
// ====== Computation ======
|
685
|
+
|
686
|
+
void ggml_opt_prepare_alloc(
|
687
|
+
ggml_opt_context_t opt_ctx,
|
688
|
+
struct ggml_context * ctx_compute,
|
689
|
+
struct ggml_cgraph * gf,
|
690
|
+
struct ggml_tensor * inputs,
|
691
|
+
struct ggml_tensor * outputs) {
|
692
|
+
GGML_ASSERT(!opt_ctx->static_graphs);
|
693
|
+
opt_ctx->ctx_compute = ctx_compute;
|
694
|
+
opt_ctx->gf = gf;
|
695
|
+
opt_ctx->inputs = inputs;
|
696
|
+
opt_ctx->outputs = outputs;
|
697
|
+
}
|
698
|
+
|
699
|
+
void ggml_opt_alloc(ggml_opt_context_t opt_ctx, bool backward) {
|
700
|
+
GGML_ASSERT(!opt_ctx->eval_ready);
|
701
|
+
if (opt_ctx->build_type == GGML_OPT_BUILD_TYPE_OPT && opt_ctx->opt_period > 1 && opt_ctx->opt_i == 0) {
|
702
|
+
ggml_graph_reset(opt_ctx->gb_grad);
|
703
|
+
}
|
704
|
+
if (backward) {
|
705
|
+
const int32_t opt_i_next = (opt_ctx->opt_i + 1) % opt_ctx->opt_period;
|
706
|
+
opt_ctx->build_type = opt_i_next == 0 ? GGML_OPT_BUILD_TYPE_OPT : GGML_OPT_BUILD_TYPE_GRAD;
|
707
|
+
} else {
|
708
|
+
opt_ctx->build_type = GGML_OPT_BUILD_TYPE_FORWARD;
|
709
|
+
}
|
710
|
+
|
711
|
+
if (!opt_ctx->static_graphs) {
|
712
|
+
ggml_opt_build(opt_ctx);
|
713
|
+
}
|
714
|
+
|
715
|
+
struct ggml_cgraph * graph = nullptr;
|
716
|
+
switch (opt_ctx->build_type) {
|
717
|
+
case GGML_OPT_BUILD_TYPE_FORWARD: {
|
718
|
+
graph = opt_ctx->gf;
|
719
|
+
} break;
|
720
|
+
case GGML_OPT_BUILD_TYPE_GRAD: {
|
721
|
+
graph = opt_ctx->gb_grad;
|
722
|
+
} break;
|
723
|
+
case GGML_OPT_BUILD_TYPE_OPT: {
|
724
|
+
graph = opt_ctx->gb_opt;
|
725
|
+
} break;
|
726
|
+
}
|
727
|
+
GGML_ASSERT(graph);
|
728
|
+
|
729
|
+
if (opt_ctx->allocated_graph == graph) {
|
730
|
+
opt_ctx->eval_ready = true;
|
731
|
+
return;
|
732
|
+
}
|
733
|
+
|
734
|
+
ggml_backend_sched_reset(opt_ctx->backend_sched); // clear allocation of previous graph
|
735
|
+
|
736
|
+
if (opt_ctx->static_graphs) {
|
737
|
+
ggml_init_params params = {
|
738
|
+
/*.mem_size =*/ graph->size*ggml_tensor_overhead() + ggml_graph_overhead_custom(graph->size, graph->grads),
|
739
|
+
/*.mem_buffer =*/ nullptr,
|
740
|
+
/*.no_alloc =*/ true,
|
741
|
+
};
|
742
|
+
ggml_free(opt_ctx->ctx_copy);
|
743
|
+
opt_ctx->ctx_copy = ggml_init(params);
|
744
|
+
|
745
|
+
opt_ctx->allocated_graph_copy = dup_graph(opt_ctx->ctx_copy, graph);
|
746
|
+
} else {
|
747
|
+
opt_ctx->allocated_graph_copy = graph;
|
748
|
+
}
|
749
|
+
|
750
|
+
ggml_backend_sched_alloc_graph(opt_ctx->backend_sched, opt_ctx->allocated_graph_copy);
|
751
|
+
opt_ctx->allocated_graph = graph;
|
752
|
+
|
753
|
+
opt_ctx->eval_ready = true;
|
754
|
+
}
|
755
|
+
|
756
|
+
void ggml_opt_eval(ggml_opt_context_t opt_ctx, ggml_opt_result_t result) {
|
757
|
+
GGML_ASSERT(opt_ctx->eval_ready);
|
758
|
+
if (opt_ctx->allocated_graph == opt_ctx->gb_opt) {
|
759
|
+
struct ggml_opt_optimizer_params opt_pars = opt_ctx->get_opt_pars(opt_ctx->get_opt_pars_ud);
|
760
|
+
|
761
|
+
GGML_ASSERT(opt_pars.adamw.alpha > 0.0f);
|
762
|
+
GGML_ASSERT(opt_pars.adamw.beta1 >= 0.0f);
|
763
|
+
GGML_ASSERT(opt_pars.adamw.beta1 <= 1.0f);
|
764
|
+
GGML_ASSERT(opt_pars.adamw.beta2 >= 0.0f);
|
765
|
+
GGML_ASSERT(opt_pars.adamw.beta2 <= 1.0f);
|
766
|
+
GGML_ASSERT(opt_pars.adamw.eps >= 0.0f);
|
767
|
+
GGML_ASSERT(opt_pars.adamw.wd >= 0.0f);
|
768
|
+
GGML_ASSERT(opt_pars.adamw.wd <= 1.0f);
|
769
|
+
|
770
|
+
// beta1, beta2 after applying warmup
|
771
|
+
const float beta1h = 1.0f/(1.0f - powf(opt_pars.adamw.beta1, opt_ctx->iter));
|
772
|
+
const float beta2h = 1.0f/(1.0f - powf(opt_pars.adamw.beta2, opt_ctx->iter));
|
773
|
+
|
774
|
+
float * adamw_par_data = ggml_get_data_f32(opt_ctx->adamw_params);
|
775
|
+
adamw_par_data[0] = opt_pars.adamw.alpha;
|
776
|
+
adamw_par_data[1] = opt_pars.adamw.beta1;
|
777
|
+
adamw_par_data[2] = opt_pars.adamw.beta2;
|
778
|
+
adamw_par_data[3] = opt_pars.adamw.eps;
|
779
|
+
adamw_par_data[4] = opt_pars.adamw.wd;
|
780
|
+
adamw_par_data[5] = beta1h;
|
781
|
+
adamw_par_data[6] = beta2h;
|
782
|
+
}
|
783
|
+
|
784
|
+
ggml_backend_sched_graph_compute(opt_ctx->backend_sched, opt_ctx->allocated_graph_copy);
|
785
|
+
opt_ctx->iter += opt_ctx->allocated_graph == opt_ctx->gb_opt;
|
786
|
+
opt_ctx->opt_i = (opt_ctx->opt_i + 1) % opt_ctx->opt_period;
|
787
|
+
|
788
|
+
if (!opt_ctx->static_graphs) {
|
789
|
+
opt_ctx->gf = nullptr;
|
790
|
+
opt_ctx->gb_grad = nullptr;
|
791
|
+
opt_ctx->gb_opt = nullptr;
|
792
|
+
opt_ctx->allocated_graph = nullptr;
|
793
|
+
opt_ctx->allocated_graph_copy = nullptr;
|
794
|
+
}
|
795
|
+
|
796
|
+
opt_ctx->eval_ready = false;
|
797
|
+
|
798
|
+
if (!result) {
|
799
|
+
return;
|
800
|
+
}
|
801
|
+
|
802
|
+
if (result->ndata == 0) {
|
803
|
+
result->loss_per_datapoint = opt_ctx->loss_per_datapoint;
|
804
|
+
result->opt_period = opt_ctx->opt_period;
|
805
|
+
} else {
|
806
|
+
GGML_ASSERT(result->loss_per_datapoint == opt_ctx->loss_per_datapoint);
|
807
|
+
GGML_ASSERT(result->opt_period == opt_ctx->opt_period);
|
808
|
+
}
|
809
|
+
|
810
|
+
const int64_t ndata = opt_ctx->outputs->ne[1];
|
811
|
+
GGML_ASSERT(result->ndata == ndata*int64_t(result->loss.size()) && "varying batch size not supported");
|
812
|
+
result->ndata += ndata;
|
813
|
+
|
814
|
+
GGML_ASSERT(ggml_is_scalar(opt_ctx->loss));
|
815
|
+
GGML_ASSERT(opt_ctx->loss->type == GGML_TYPE_F32);
|
816
|
+
float loss;
|
817
|
+
ggml_backend_tensor_get(opt_ctx->loss, &loss, 0, ggml_nbytes(opt_ctx->loss));
|
818
|
+
result->loss.push_back(loss);
|
819
|
+
|
820
|
+
if (opt_ctx->pred) {
|
821
|
+
GGML_ASSERT(opt_ctx->pred->type == GGML_TYPE_I32);
|
822
|
+
std::vector<int32_t> pred(ndata);
|
823
|
+
ggml_backend_tensor_get(opt_ctx->pred, pred.data(), 0, ggml_nbytes(opt_ctx->pred));
|
824
|
+
result->pred.insert(result->pred.end(), pred.begin(), pred.end());
|
825
|
+
}
|
826
|
+
|
827
|
+
if (!opt_ctx->ncorrect || result->ncorrect < 0) {
|
828
|
+
result->ncorrect = -1;
|
829
|
+
return;
|
830
|
+
}
|
831
|
+
|
832
|
+
GGML_ASSERT(ggml_is_scalar(opt_ctx->ncorrect));
|
833
|
+
GGML_ASSERT(opt_ctx->ncorrect->type == GGML_TYPE_I64);
|
834
|
+
int64_t ncorrect;
|
835
|
+
ggml_backend_tensor_get(opt_ctx->ncorrect, &ncorrect, 0, ggml_nbytes(opt_ctx->ncorrect));
|
836
|
+
result->ncorrect += ncorrect;
|
837
|
+
}
|
838
|
+
|
839
|
+
// ====== High-Level Functions ======
|
840
|
+
|
841
|
+
void ggml_opt_epoch(
|
842
|
+
ggml_opt_context_t opt_ctx,
|
843
|
+
ggml_opt_dataset_t dataset,
|
844
|
+
ggml_opt_result_t result_train,
|
845
|
+
ggml_opt_result_t result_eval,
|
846
|
+
int64_t idata_split,
|
847
|
+
ggml_opt_epoch_callback callback_train,
|
848
|
+
ggml_opt_epoch_callback callback_eval) {
|
849
|
+
GGML_ASSERT(ggml_opt_static_graphs(opt_ctx) && "ggml_opt_epoch requires static graphs");
|
850
|
+
struct ggml_tensor * inputs = ggml_opt_inputs(opt_ctx);
|
851
|
+
struct ggml_tensor * labels = ggml_opt_labels(opt_ctx);
|
852
|
+
struct ggml_tensor * data = ggml_opt_dataset_data(dataset);
|
853
|
+
GGML_ASSERT(data->ne[0] == inputs->ne[0]);
|
854
|
+
|
855
|
+
const int64_t ndata = data->ne[1];
|
856
|
+
const int64_t ndata_batch = inputs->ne[1];
|
857
|
+
|
858
|
+
GGML_ASSERT(data->ne[1] % inputs->ne[1] == 0);
|
859
|
+
const int64_t nbatches = ndata/ndata_batch;
|
860
|
+
|
861
|
+
idata_split = idata_split < 0 ? ndata : idata_split;
|
862
|
+
GGML_ASSERT(idata_split % ndata_batch == 0);
|
863
|
+
const int64_t ibatch_split = idata_split / ndata_batch;
|
864
|
+
|
865
|
+
int64_t ibatch = 0;
|
866
|
+
int64_t t_loop_start = ggml_time_us();
|
867
|
+
for (; ibatch < ibatch_split; ++ibatch) {
|
868
|
+
ggml_opt_alloc(opt_ctx, /*backward =*/ true);
|
869
|
+
ggml_opt_dataset_get_batch(dataset, inputs, labels, ibatch);
|
870
|
+
ggml_opt_eval(opt_ctx, result_train);
|
871
|
+
if (callback_train) {
|
872
|
+
callback_train(true, opt_ctx, dataset, result_train, ibatch+1, ibatch_split, t_loop_start);
|
873
|
+
}
|
874
|
+
}
|
875
|
+
t_loop_start = ggml_time_us();
|
876
|
+
for (; ibatch < nbatches; ++ibatch) {
|
877
|
+
ggml_opt_alloc(opt_ctx, /*backward =*/ false);
|
878
|
+
ggml_opt_dataset_get_batch(dataset, inputs, labels, ibatch);
|
879
|
+
ggml_opt_eval(opt_ctx, result_eval);
|
880
|
+
if (callback_eval) {
|
881
|
+
callback_eval(false, opt_ctx, dataset, result_eval, ibatch+1-ibatch_split, nbatches-ibatch_split, t_loop_start);
|
882
|
+
}
|
883
|
+
}
|
884
|
+
}
|
885
|
+
|
886
|
+
void ggml_opt_epoch_callback_progress_bar(
|
887
|
+
bool train,
|
888
|
+
ggml_opt_context_t opt_ctx,
|
889
|
+
ggml_opt_dataset_t dataset,
|
890
|
+
ggml_opt_result_t result,
|
891
|
+
int64_t ibatch,
|
892
|
+
int64_t ibatch_max,
|
893
|
+
int64_t t_start_us) {
|
894
|
+
fprintf(stderr, "%s[", train ? "train: " : "val: ");
|
895
|
+
|
896
|
+
// The progress bar consists of partially filled blocks, unicode has 8 separate fill levels.
|
897
|
+
constexpr int64_t bar_length = 8;
|
898
|
+
const int64_t ibatch8 = 8 * ibatch;
|
899
|
+
for (int64_t j = 0; j < bar_length; ++j) {
|
900
|
+
if (ibatch_max * (8*j + 8) / bar_length < ibatch8) {
|
901
|
+
fprintf(stderr, "\u2588"); // full block
|
902
|
+
} else if (ibatch_max * (8*j + 7) / bar_length < ibatch8) {
|
903
|
+
fprintf(stderr, "\u2589"); // 7/8 filled
|
904
|
+
} else if (ibatch_max * (8*j + 6) / bar_length < ibatch8) {
|
905
|
+
fprintf(stderr, "\u258A"); // 6/8 filled
|
906
|
+
} else if (ibatch_max * (8*j + 5) / bar_length < ibatch8) {
|
907
|
+
fprintf(stderr, "\u258B"); // 5/8 filled
|
908
|
+
} else if (ibatch_max * (8*j + 4) / bar_length < ibatch8) {
|
909
|
+
fprintf(stderr, "\u258C"); // 4/8 filled
|
910
|
+
} else if (ibatch_max * (8*j + 3) / bar_length < ibatch8) {
|
911
|
+
fprintf(stderr, "\u258D"); // 3/8 filled
|
912
|
+
} else if (ibatch_max * (8*j + 2) / bar_length < ibatch8) {
|
913
|
+
fprintf(stderr, "\u258E"); // 2/8 filled
|
914
|
+
} else if (ibatch_max * (8*j + 1) / bar_length < ibatch8) {
|
915
|
+
fprintf(stderr, "\u258F"); // 1/8 filled
|
916
|
+
} else {
|
917
|
+
fprintf(stderr, " ");
|
918
|
+
}
|
919
|
+
}
|
920
|
+
|
921
|
+
const int64_t batch_size = ggml_opt_inputs(opt_ctx)->ne[1];
|
922
|
+
const int64_t idata = ibatch*batch_size;
|
923
|
+
const int64_t idata_max = ibatch_max*batch_size;
|
924
|
+
|
925
|
+
double loss;
|
926
|
+
double loss_unc;
|
927
|
+
ggml_opt_result_loss(result, &loss, &loss_unc);
|
928
|
+
|
929
|
+
double accuracy;
|
930
|
+
double accuracy_unc;
|
931
|
+
ggml_opt_result_accuracy(result, &accuracy, &accuracy_unc);
|
932
|
+
|
933
|
+
const int64_t t_ibatch_us = ggml_time_us() - t_start_us;
|
934
|
+
int64_t t_ibatch_s = t_ibatch_us / 1000000;
|
935
|
+
const int64_t t_ibatch_h = t_ibatch_s / 3600;
|
936
|
+
t_ibatch_s -= t_ibatch_h * 3600;
|
937
|
+
const int64_t t_ibatch_m = t_ibatch_s / 60;
|
938
|
+
t_ibatch_s -= t_ibatch_m * 60;
|
939
|
+
|
940
|
+
const int64_t t_eta_us = t_ibatch_us * (ibatch_max - ibatch)/ibatch;
|
941
|
+
int64_t t_eta_s = t_eta_us / 1000000;
|
942
|
+
const int64_t t_eta_h = t_eta_s / 3600;
|
943
|
+
t_eta_s -= t_eta_h * 3600;
|
944
|
+
const int64_t t_eta_m = t_eta_s / 60;
|
945
|
+
t_eta_s -= t_eta_m * 60;
|
946
|
+
|
947
|
+
fprintf(stderr, "] data=%07" PRId64 "/%07" PRId64 " loss=%.5lf±%.5lf acc=%.2lf±%.2lf%% "
|
948
|
+
"t=%02" PRId64 ":%02" PRId64 ":%02" PRId64 " ETA=%02" PRId64 ":%02" PRId64 ":%02" PRId64 " \r",
|
949
|
+
idata, idata_max, loss, loss_unc, 100.0*accuracy, 100.0*accuracy_unc,
|
950
|
+
t_ibatch_h, t_ibatch_m, t_ibatch_s, t_eta_h, t_eta_m, t_eta_s);
|
951
|
+
if (ibatch == ibatch_max) {
|
952
|
+
fprintf(stderr, "\n");
|
953
|
+
}
|
954
|
+
fflush(stderr);
|
955
|
+
|
956
|
+
GGML_UNUSED(dataset);
|
957
|
+
}
|
958
|
+
|
959
|
+
void ggml_opt_fit(
|
960
|
+
ggml_backend_sched_t backend_sched,
|
961
|
+
ggml_context * ctx_compute,
|
962
|
+
ggml_tensor * inputs,
|
963
|
+
ggml_tensor * outputs,
|
964
|
+
ggml_opt_dataset_t dataset,
|
965
|
+
enum ggml_opt_loss_type loss_type,
|
966
|
+
ggml_opt_get_optimizer_params get_opt_pars,
|
967
|
+
int64_t nepoch,
|
968
|
+
int64_t nbatch_logical,
|
969
|
+
float val_split,
|
970
|
+
bool silent) {
|
971
|
+
ggml_time_init();
|
972
|
+
const int64_t t_start_us = ggml_time_us();
|
973
|
+
|
974
|
+
const int64_t ndata = ggml_opt_dataset_data(dataset)->ne[1];
|
975
|
+
const int64_t nbatch_physical = inputs->ne[1];
|
976
|
+
GGML_ASSERT(ndata % nbatch_logical == 0);
|
977
|
+
GGML_ASSERT(nbatch_logical % nbatch_physical == 0);
|
978
|
+
|
979
|
+
const int64_t opt_period = nbatch_logical / nbatch_physical;
|
980
|
+
const int64_t nbatches_logical = ndata / nbatch_logical;
|
981
|
+
|
982
|
+
GGML_ASSERT(val_split >= 0.0f);
|
983
|
+
GGML_ASSERT(val_split < 1.0f);
|
984
|
+
const int64_t ibatch_split = int64_t(((1.0f - val_split) * nbatches_logical)) * opt_period; // train <-> val split index (physical)
|
985
|
+
const int64_t idata_split = ibatch_split * nbatch_physical;
|
986
|
+
|
987
|
+
int64_t epoch = 1;
|
988
|
+
|
989
|
+
ggml_opt_params params = ggml_opt_default_params(backend_sched, loss_type);
|
990
|
+
params.ctx_compute = ctx_compute;
|
991
|
+
params.inputs = inputs;
|
992
|
+
params.outputs = outputs;
|
993
|
+
params.opt_period = opt_period;
|
994
|
+
params.get_opt_pars = get_opt_pars;
|
995
|
+
params.get_opt_pars_ud = &epoch;
|
996
|
+
ggml_opt_context_t opt_ctx = ggml_opt_init(params);
|
997
|
+
|
998
|
+
// Shuffling the data is generally useful but there is only a point if not all data is used in a single batch.
|
999
|
+
if (nbatch_logical < ndata) {
|
1000
|
+
ggml_opt_dataset_shuffle(opt_ctx, dataset, -1); // Shuffle all data (train + validation).
|
1001
|
+
}
|
1002
|
+
|
1003
|
+
ggml_opt_result_t result_train = ggml_opt_result_init();
|
1004
|
+
ggml_opt_result_t result_val = ggml_opt_result_init();
|
1005
|
+
|
1006
|
+
ggml_opt_epoch_callback epoch_callback = silent ? nullptr : ggml_opt_epoch_callback_progress_bar;
|
1007
|
+
|
1008
|
+
for (; epoch <= nepoch; ++epoch) {
|
1009
|
+
if (nbatch_logical < idata_split) {
|
1010
|
+
ggml_opt_dataset_shuffle(opt_ctx, dataset, idata_split);
|
1011
|
+
}
|
1012
|
+
|
1013
|
+
ggml_opt_result_reset(result_train);
|
1014
|
+
ggml_opt_result_reset(result_val);
|
1015
|
+
|
1016
|
+
if (!silent) {
|
1017
|
+
fprintf(stderr, "%s: epoch %04" PRId64 "/%04" PRId64 ":\n", __func__, epoch, nepoch);
|
1018
|
+
}
|
1019
|
+
ggml_opt_epoch(opt_ctx, dataset, result_train, result_val, idata_split, epoch_callback, epoch_callback);
|
1020
|
+
if (!silent) {
|
1021
|
+
fprintf(stderr, "\n");
|
1022
|
+
}
|
1023
|
+
}
|
1024
|
+
|
1025
|
+
if (!silent) {
|
1026
|
+
int64_t t_total_s = (ggml_time_us() - t_start_us) / 1000000;
|
1027
|
+
const int64_t t_total_h = t_total_s / 3600;
|
1028
|
+
t_total_s -= t_total_h * 3600;
|
1029
|
+
const int64_t t_total_m = t_total_s / 60;
|
1030
|
+
t_total_s -= t_total_m * 60;
|
1031
|
+
fprintf(stderr, "%s: training took %02" PRId64 ":%02" PRId64 ":%02" PRId64 "\n", __func__, t_total_h, t_total_m, t_total_s);
|
1032
|
+
}
|
1033
|
+
|
1034
|
+
ggml_opt_free(opt_ctx);
|
1035
|
+
ggml_opt_result_free(result_train);
|
1036
|
+
ggml_opt_result_free(result_val);
|
1037
|
+
}
|