llama_cpp 0.17.4 → 0.17.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 9a6696cb7ae4dbc86334ee03a1b46040df4bc0753e5ead6e8bd90c43e0823f3a
4
- data.tar.gz: 7ebd54912d313684179f82cc45762b12986729d2bded48801349644e5b6b40fb
3
+ metadata.gz: 2fb08459b6c1a756620eb86acd35d299f132b65e60256306e24519e5d6b32bdf
4
+ data.tar.gz: e4b7b5fea109d7cae0bb5d7fc96d62644758a62870dd37e60b6815599f8b7d12
5
5
  SHA512:
6
- metadata.gz: 488f3d7646fa13ab0954ba61889848d6ae23a64856891061428a39d7c182024aad57bc9a9990d5fd5476f26a89569afde517997dac1fc76dda2ccc927141a067
7
- data.tar.gz: b0300223f2034e8460f86a3c8e8f6222d2df446ca925554bcf56d72400c3b2a38c9375b4c7a15f32f1288a0ad2bab7f24d2ec3176dd0bc65ad8cc7debf710ab6
6
+ metadata.gz: af042da6c45c285bb707506111b6f18b37a7b57aebcae18ba392d5d800a72a578dd6dde8e5aa0e572385fd70005ad966e2d6e5a18107e5a2cc277e94b0ad1fd3
7
+ data.tar.gz: 8e275fb664111d08f1228c2853146aed58a51f9263ccd497a0dfd67f199595aa793fb9531873e417101c321d30f1c772418e6671a4b8548e4e6069a878e56eaf
data/CHANGELOG.md CHANGED
@@ -1,3 +1,11 @@
1
+ ## [[0.17.5](https://github.com/yoshoku/llama_cpp.rb/compare/v0.17.4...v0.17.5)] - 2024-08-03
2
+
3
+ - Change supported llama.cpp version to b3482.
4
+ - Add `LLAMA_VOCAB_PRE_TYPE_SMOLLM` and `LLAMA_VOCAB_PRE_TYPE_CODESHELL` constants.
5
+ - Change to call llama_grammar_sample and llama_grammar_accept_token functions instead of deprecated functions.
6
+
7
+ Implementation binding for llama_lora_adapter_clear has been skipped.
8
+
1
9
  ## [[0.17.4](https://github.com/yoshoku/llama_cpp.rb/compare/v0.17.3...v0.17.4)] - 2024-07-27
2
10
 
3
11
  - Change supported llama.cpp version to b3436.
@@ -3378,7 +3378,7 @@ private:
3378
3378
  }
3379
3379
  LLaMAGrammarWrapper* grm_ptr = RbLLaMAGrammar::get_llama_grammar(kw_values[0]);
3380
3380
 
3381
- llama_sample_grammar(ctx_ptr->ctx, &(cnd_ptr->array), grm_ptr->grammar);
3381
+ llama_grammar_sample(grm_ptr->grammar, ctx_ptr->ctx, &(cnd_ptr->array));
3382
3382
 
3383
3383
  return Qnil;
3384
3384
  }
@@ -3407,7 +3407,7 @@ private:
3407
3407
  LLaMAGrammarWrapper* grm_ptr = RbLLaMAGrammar::get_llama_grammar(kw_values[0]);
3408
3408
  llama_token token = NUM2INT(kw_values[1]);
3409
3409
 
3410
- llama_grammar_accept_token(ctx_ptr->ctx, grm_ptr->grammar, token);
3410
+ llama_grammar_accept_token(grm_ptr->grammar, ctx_ptr->ctx, token);
3411
3411
 
3412
3412
  return Qnil;
3413
3413
  }
@@ -3621,6 +3621,8 @@ extern "C" void Init_llama_cpp(void) {
3621
3621
  rb_define_const(rb_mLLaMACpp, "LLAMA_VOCAB_PRE_TYPE_VIKING", INT2NUM(LLAMA_VOCAB_PRE_TYPE_VIKING));
3622
3622
  rb_define_const(rb_mLLaMACpp, "LLAMA_VOCAB_PRE_TYPE_JAIS", INT2NUM(LLAMA_VOCAB_PRE_TYPE_JAIS));
3623
3623
  rb_define_const(rb_mLLaMACpp, "LLAMA_VOCAB_PRE_TYPE_TEKKEN", INT2NUM(LLAMA_VOCAB_PRE_TYPE_TEKKEN));
3624
+ rb_define_const(rb_mLLaMACpp, "LLAMA_VOCAB_PRE_TYPE_SMOLLM", INT2NUM(LLAMA_VOCAB_PRE_TYPE_SMOLLM));
3625
+ rb_define_const(rb_mLLaMACpp, "LLAMA_VOCAB_PRE_TYPE_CODESHELL", INT2NUM(LLAMA_VOCAB_PRE_TYPE_CODESHELL));
3624
3626
 
3625
3627
  rb_define_const(rb_mLLaMACpp, "LLAMA_TOKEN_TYPE_UNDEFINED", INT2NUM(LLAMA_TOKEN_TYPE_UNDEFINED));
3626
3628
  rb_define_const(rb_mLLaMACpp, "LLAMA_TOKEN_TYPE_NORMAL", INT2NUM(LLAMA_TOKEN_TYPE_NORMAL));
@@ -3,8 +3,8 @@
3
3
  # llama_cpp.rb provides Ruby bindings for the llama.cpp.
4
4
  module LLaMACpp
5
5
  # The version of llama_cpp.rb you install.
6
- VERSION = '0.17.4'
6
+ VERSION = '0.17.5'
7
7
 
8
8
  # The supported version of llama.cpp.
9
- LLAMA_CPP_VERSION = 'b3436'
9
+ LLAMA_CPP_VERSION = 'b3482'
10
10
  end
data/sig/llama_cpp.rbs CHANGED
@@ -38,6 +38,8 @@ module LLaMACpp
38
38
  LLAMA_VOCAB_PRE_TYPE_VIKING: Integer
39
39
  LLAMA_VOCAB_PRE_TYPE_JAIS: Integer
40
40
  LLAMA_VOCAB_PRE_TYPE_TEKKEN: Integer
41
+ LLAMA_VOCAB_PRE_TYPE_SMOLLM: Integer
42
+ LLAMA_VOCAB_PRE_TYPE_CODESHELL: Integer
41
43
 
42
44
  LLAMA_TOKEN_ATTR_UNDEFINED: Integer
43
45
  LLAMA_TOKEN_ATTR_UNKNOWN: Integer
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llama_cpp
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.17.4
4
+ version: 0.17.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - yoshoku
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-07-27 00:00:00.000000000 Z
11
+ date: 2024-08-03 00:00:00.000000000 Z
12
12
  dependencies: []
13
13
  description: llama_cpp.rb provides Ruby bindings for the llama.cpp.
14
14
  email: