llama_cpp 0.20.4 → 0.21.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 6acca29477a43c9703c7035e53acd69450de7103b2d7f242506c7e2016f1a261
4
- data.tar.gz: 77c108b2f1ea33588a9dbe6c0538e289f90ea5be2090adbf8f663fff8b8b0221
3
+ metadata.gz: 691ff52591a63387090485f1d322726b8dc5dc89630cdac4fe2bfcb3372da50d
4
+ data.tar.gz: c27f3a43878e787f32eaeaa2538d03dca4e2e3de0484ad2e920ec14826cf6ba5
5
5
  SHA512:
6
- metadata.gz: 69d453a3cf9c23ab3aaa60c6a20d80a7fc75424cb762c631a06712a2134fc7cf6830168241a3d689fd0b7b621804e27b6461415977fb51c096c10fbb2aa0e922
7
- data.tar.gz: bc8de61663616ffd40c90e34df71095a7c85e3aa3373fc0c395fe101ded4f38e1670af42aeb11c6380c6932d2939d137d299d377db3ce9dc0510fd56e9d8b7a5
6
+ metadata.gz: df049a84a78bb2d95cd4fe1f63f05c1bb9f965c0e5c3bfaca9668e98fca0db2eb8d80560ffa819540fa25b0a099a0b2a3feafc85a38d73632f3db79445d19a07
7
+ data.tar.gz: d9138045ba1d37dbaab919ea973ef81306dde30dd6d67048660108dd9f90fe1ffe671becd7f295afa2b01150d73269ac045474373c98ff5805ce35c582dd28d4
data/CHANGELOG.md CHANGED
@@ -1,3 +1,8 @@
1
+ ## [[0.21.0](https://github.com/yoshoku/llama_cpp.rb/compare/v0.20.4...v0.21.0)] - 2025-07-12
2
+
3
+ - Change supported llama.cpp version to b5870.
4
+ - Remove constants for `llama_voca_pre_type` such as `LLAMA_VOCAB_PRE_TYPE_DEFAULT` and `LLAMA_VOCAB_PRE_TYPE_LLAMA3`.
5
+
1
6
  ## [[0.20.4](https://github.com/yoshoku/llama_cpp.rb/compare/v0.20.3...v0.20.4)] - 2025-06-21
2
7
 
3
8
  - Change supported llama.cpp version to b5720.
@@ -5,12 +10,10 @@
5
10
 
6
11
  ## [[0.20.3](https://github.com/yoshoku/llama_cpp.rb/compare/v0.20.2...v0.20.3)] - 2025-06-14
7
12
 
8
-
9
13
  - Change supported llama.cpp version to b5650
10
14
  - Add `data` argument to `llama_memory_clear` module function.
11
15
  - Fix llama_memory_t wrapper by removing unnecessary struct keyword and pointer symbol.
12
16
 
13
-
14
17
  ## [[0.20.2](https://github.com/yoshoku/llama_cpp.rb/compare/v0.20.1...v0.20.2)] - 2025-06-07
15
18
 
16
19
  - Change supported llama.cpp version to b5600
@@ -4154,44 +4154,6 @@ void Init_llama_cpp(void) {
4154
4154
  rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_TYPE_WPM", INT2NUM(LLAMA_VOCAB_TYPE_WPM));
4155
4155
  rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_TYPE_UGM", INT2NUM(LLAMA_VOCAB_TYPE_UGM));
4156
4156
  rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_TYPE_RWKV", INT2NUM(LLAMA_VOCAB_TYPE_RWKV));
4157
- /* llama_vocab_pre_type */
4158
- /* Document-const: LlamaCpp::LLAMA_VOCAB_PRE_TYPE_DEFAULT */
4159
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_DEFAULT", INT2NUM(LLAMA_VOCAB_PRE_TYPE_DEFAULT));
4160
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_LLAMA3", INT2NUM(LLAMA_VOCAB_PRE_TYPE_LLAMA3));
4161
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_DEEPSEEK_LLM", INT2NUM(LLAMA_VOCAB_PRE_TYPE_DEEPSEEK_LLM));
4162
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_DEEPSEEK_CODER", INT2NUM(LLAMA_VOCAB_PRE_TYPE_DEEPSEEK_CODER));
4163
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_FALCON", INT2NUM(LLAMA_VOCAB_PRE_TYPE_FALCON));
4164
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_MPT", INT2NUM(LLAMA_VOCAB_PRE_TYPE_MPT));
4165
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_STARCODER", INT2NUM(LLAMA_VOCAB_PRE_TYPE_STARCODER));
4166
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_GPT2", INT2NUM(LLAMA_VOCAB_PRE_TYPE_GPT2));
4167
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_REFACT", INT2NUM(LLAMA_VOCAB_PRE_TYPE_REFACT));
4168
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_COMMAND_R", INT2NUM(LLAMA_VOCAB_PRE_TYPE_COMMAND_R));
4169
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_STABLELM2", INT2NUM(LLAMA_VOCAB_PRE_TYPE_STABLELM2));
4170
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_QWEN2", INT2NUM(LLAMA_VOCAB_PRE_TYPE_QWEN2));
4171
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_OLMO", INT2NUM(LLAMA_VOCAB_PRE_TYPE_OLMO));
4172
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_DBRX", INT2NUM(LLAMA_VOCAB_PRE_TYPE_DBRX));
4173
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_SMAUG", INT2NUM(LLAMA_VOCAB_PRE_TYPE_SMAUG));
4174
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_PORO", INT2NUM(LLAMA_VOCAB_PRE_TYPE_PORO));
4175
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_CHATGLM3", INT2NUM(LLAMA_VOCAB_PRE_TYPE_CHATGLM3));
4176
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_CHATGLM4", INT2NUM(LLAMA_VOCAB_PRE_TYPE_CHATGLM4));
4177
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_VIKING", INT2NUM(LLAMA_VOCAB_PRE_TYPE_VIKING));
4178
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_JAIS", INT2NUM(LLAMA_VOCAB_PRE_TYPE_JAIS));
4179
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_TEKKEN", INT2NUM(LLAMA_VOCAB_PRE_TYPE_TEKKEN));
4180
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_SMOLLM", INT2NUM(LLAMA_VOCAB_PRE_TYPE_SMOLLM));
4181
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_CODESHELL", INT2NUM(LLAMA_VOCAB_PRE_TYPE_CODESHELL));
4182
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_BLOOM", INT2NUM(LLAMA_VOCAB_PRE_TYPE_BLOOM));
4183
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_GPT3_FINNISH", INT2NUM(LLAMA_VOCAB_PRE_TYPE_GPT3_FINNISH));
4184
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_EXAONE", INT2NUM(LLAMA_VOCAB_PRE_TYPE_EXAONE));
4185
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_CHAMELEON", INT2NUM(LLAMA_VOCAB_PRE_TYPE_CHAMELEON));
4186
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_MINERVA", INT2NUM(LLAMA_VOCAB_PRE_TYPE_MINERVA));
4187
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_DEEPSEEK3_LLM", INT2NUM(LLAMA_VOCAB_PRE_TYPE_DEEPSEEK3_LLM));
4188
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_GPT4O", INT2NUM(LLAMA_VOCAB_PRE_TYPE_GPT4O));
4189
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_SUPERBPE", INT2NUM(LLAMA_VOCAB_PRE_TYPE_SUPERBPE));
4190
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_TRILLION", INT2NUM(LLAMA_VOCAB_PRE_TYPE_TRILLION));
4191
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_BAILINGMOE", INT2NUM(LLAMA_VOCAB_PRE_TYPE_BAILINGMOE));
4192
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_LLAMA4", INT2NUM(LLAMA_VOCAB_PRE_TYPE_LLAMA4));
4193
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_PIXTRAL", INT2NUM(LLAMA_VOCAB_PRE_TYPE_PIXTRAL));
4194
- rb_define_const(rb_mLlamaCpp, "LLAMA_VOCAB_PRE_TYPE_SEED_CODER", INT2NUM(LLAMA_VOCAB_PRE_TYPE_SEED_CODER));
4195
4157
  /* llama_rope_type */
4196
4158
  /* Document-const: LlamaCpp::LLAMA_ROPE_TYPE_NONE */
4197
4159
  rb_define_const(rb_mLlamaCpp, "LLAMA_ROPE_TYPE_NONE", INT2NUM(LLAMA_ROPE_TYPE_NONE));
@@ -4895,6 +4857,7 @@ void Init_llama_cpp(void) {
4895
4857
  /* TODO: void* imatrix */
4896
4858
  /* TODO: void* kv_overrides */
4897
4859
  /* TODO: void* tensor_types */
4860
+ /* TODO: void* prune_layers */
4898
4861
 
4899
4862
  /**
4900
4863
  * Document-class: LlamaCpp::LlamaLogitBias
@@ -3,8 +3,8 @@
3
3
  # llama_cpp.rb provides Ruby bindings for the llama.cpp.
4
4
  module LlamaCpp
5
5
  # The version of llama_cpp.rb you install.
6
- VERSION = '0.20.4'
6
+ VERSION = '0.21.0'
7
7
 
8
8
  # The supported version of llama.cpp.
9
- LLAMA_CPP_VERSION = 'b5720'
9
+ LLAMA_CPP_VERSION = 'b5870'
10
10
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llama_cpp
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.20.4
4
+ version: 0.21.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - yoshoku