llama_cpp 0.17.4 → 0.17.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +8 -0
- data/ext/llama_cpp/llama_cpp.cpp +4 -2
- data/lib/llama_cpp/version.rb +2 -2
- data/sig/llama_cpp.rbs +2 -0
- metadata +2 -2
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 2fb08459b6c1a756620eb86acd35d299f132b65e60256306e24519e5d6b32bdf
|
|
4
|
+
data.tar.gz: e4b7b5fea109d7cae0bb5d7fc96d62644758a62870dd37e60b6815599f8b7d12
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: af042da6c45c285bb707506111b6f18b37a7b57aebcae18ba392d5d800a72a578dd6dde8e5aa0e572385fd70005ad966e2d6e5a18107e5a2cc277e94b0ad1fd3
|
|
7
|
+
data.tar.gz: 8e275fb664111d08f1228c2853146aed58a51f9263ccd497a0dfd67f199595aa793fb9531873e417101c321d30f1c772418e6671a4b8548e4e6069a878e56eaf
|
data/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,11 @@
|
|
|
1
|
+
## [[0.17.5](https://github.com/yoshoku/llama_cpp.rb/compare/v0.17.4...v0.17.5)] - 2024-08-03
|
|
2
|
+
|
|
3
|
+
- Change supported llama.cpp version to b3482.
|
|
4
|
+
- Add `LLAMA_VOCAB_PRE_TYPE_SMOLLM` and `LLAMA_VOCAB_PRE_TYPE_CODESHELL` constants.
|
|
5
|
+
- Change to call llama_grammar_sample and llama_grammar_accept_token functions instead of deprecated functions.
|
|
6
|
+
|
|
7
|
+
Implementation binding for llama_lora_adapter_clear has been skipped.
|
|
8
|
+
|
|
1
9
|
## [[0.17.4](https://github.com/yoshoku/llama_cpp.rb/compare/v0.17.3...v0.17.4)] - 2024-07-27
|
|
2
10
|
|
|
3
11
|
- Change supported llama.cpp version to b3436.
|
data/ext/llama_cpp/llama_cpp.cpp
CHANGED
|
@@ -3378,7 +3378,7 @@ private:
|
|
|
3378
3378
|
}
|
|
3379
3379
|
LLaMAGrammarWrapper* grm_ptr = RbLLaMAGrammar::get_llama_grammar(kw_values[0]);
|
|
3380
3380
|
|
|
3381
|
-
|
|
3381
|
+
llama_grammar_sample(grm_ptr->grammar, ctx_ptr->ctx, &(cnd_ptr->array));
|
|
3382
3382
|
|
|
3383
3383
|
return Qnil;
|
|
3384
3384
|
}
|
|
@@ -3407,7 +3407,7 @@ private:
|
|
|
3407
3407
|
LLaMAGrammarWrapper* grm_ptr = RbLLaMAGrammar::get_llama_grammar(kw_values[0]);
|
|
3408
3408
|
llama_token token = NUM2INT(kw_values[1]);
|
|
3409
3409
|
|
|
3410
|
-
llama_grammar_accept_token(
|
|
3410
|
+
llama_grammar_accept_token(grm_ptr->grammar, ctx_ptr->ctx, token);
|
|
3411
3411
|
|
|
3412
3412
|
return Qnil;
|
|
3413
3413
|
}
|
|
@@ -3621,6 +3621,8 @@ extern "C" void Init_llama_cpp(void) {
|
|
|
3621
3621
|
rb_define_const(rb_mLLaMACpp, "LLAMA_VOCAB_PRE_TYPE_VIKING", INT2NUM(LLAMA_VOCAB_PRE_TYPE_VIKING));
|
|
3622
3622
|
rb_define_const(rb_mLLaMACpp, "LLAMA_VOCAB_PRE_TYPE_JAIS", INT2NUM(LLAMA_VOCAB_PRE_TYPE_JAIS));
|
|
3623
3623
|
rb_define_const(rb_mLLaMACpp, "LLAMA_VOCAB_PRE_TYPE_TEKKEN", INT2NUM(LLAMA_VOCAB_PRE_TYPE_TEKKEN));
|
|
3624
|
+
rb_define_const(rb_mLLaMACpp, "LLAMA_VOCAB_PRE_TYPE_SMOLLM", INT2NUM(LLAMA_VOCAB_PRE_TYPE_SMOLLM));
|
|
3625
|
+
rb_define_const(rb_mLLaMACpp, "LLAMA_VOCAB_PRE_TYPE_CODESHELL", INT2NUM(LLAMA_VOCAB_PRE_TYPE_CODESHELL));
|
|
3624
3626
|
|
|
3625
3627
|
rb_define_const(rb_mLLaMACpp, "LLAMA_TOKEN_TYPE_UNDEFINED", INT2NUM(LLAMA_TOKEN_TYPE_UNDEFINED));
|
|
3626
3628
|
rb_define_const(rb_mLLaMACpp, "LLAMA_TOKEN_TYPE_NORMAL", INT2NUM(LLAMA_TOKEN_TYPE_NORMAL));
|
data/lib/llama_cpp/version.rb
CHANGED
|
@@ -3,8 +3,8 @@
|
|
|
3
3
|
# llama_cpp.rb provides Ruby bindings for the llama.cpp.
|
|
4
4
|
module LLaMACpp
|
|
5
5
|
# The version of llama_cpp.rb you install.
|
|
6
|
-
VERSION = '0.17.
|
|
6
|
+
VERSION = '0.17.5'
|
|
7
7
|
|
|
8
8
|
# The supported version of llama.cpp.
|
|
9
|
-
LLAMA_CPP_VERSION = '
|
|
9
|
+
LLAMA_CPP_VERSION = 'b3482'
|
|
10
10
|
end
|
data/sig/llama_cpp.rbs
CHANGED
|
@@ -38,6 +38,8 @@ module LLaMACpp
|
|
|
38
38
|
LLAMA_VOCAB_PRE_TYPE_VIKING: Integer
|
|
39
39
|
LLAMA_VOCAB_PRE_TYPE_JAIS: Integer
|
|
40
40
|
LLAMA_VOCAB_PRE_TYPE_TEKKEN: Integer
|
|
41
|
+
LLAMA_VOCAB_PRE_TYPE_SMOLLM: Integer
|
|
42
|
+
LLAMA_VOCAB_PRE_TYPE_CODESHELL: Integer
|
|
41
43
|
|
|
42
44
|
LLAMA_TOKEN_ATTR_UNDEFINED: Integer
|
|
43
45
|
LLAMA_TOKEN_ATTR_UNKNOWN: Integer
|
metadata
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: llama_cpp
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.17.
|
|
4
|
+
version: 0.17.5
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- yoshoku
|
|
8
8
|
autorequire:
|
|
9
9
|
bindir: exe
|
|
10
10
|
cert_chain: []
|
|
11
|
-
date: 2024-
|
|
11
|
+
date: 2024-08-03 00:00:00.000000000 Z
|
|
12
12
|
dependencies: []
|
|
13
13
|
description: llama_cpp.rb provides Ruby bindings for the llama.cpp.
|
|
14
14
|
email:
|