llama_cpp 0.1.2 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +13 -7
- data/ext/llama_cpp/extconf.rb +1 -2
- data/ext/llama_cpp/src/ggml-opencl.cpp +1028 -0
- data/ext/llama_cpp/src/ggml-opencl.h +8 -10
- data/ext/llama_cpp/src/ggml.c +568 -57
- data/ext/llama_cpp/src/ggml.h +21 -2
- data/ext/llama_cpp/src/llama.cpp +37 -2
- data/ext/llama_cpp/src/llama.h +5 -0
- data/lib/llama_cpp/version.rb +2 -2
- metadata +3 -3
- data/ext/llama_cpp/src/ggml-opencl.c +0 -474
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f08992d10701b3ac0ab87c32d8a28d7f81101a4896e3300c461d7015f6486814
|
4
|
+
data.tar.gz: 38fd72fe9cdd596f7878ef902ddf8ec8e36954bbac50388fe8ef8437a93bfe29
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: e3d024db508be6cbe7e644c4a9295da97742b45f921c9c5d64a7f4b4eb6be624e79f4c63d39c226566dbb9676215ae3b986828095a185cb2069547a12cf651a0
|
7
|
+
data.tar.gz: d884334f2d77a7204f0bc96c037fa86ee6fdf3f2879c5c5bd721be336dc743a0034733fc566c114bc6f22e620e5d79ccd4c67b6ded7d929d1949315b31445701
|
data/CHANGELOG.md
CHANGED
@@ -1,21 +1,27 @@
|
|
1
|
-
## [
|
1
|
+
## [[0.1.4](https://github.com/yoshoku/llama_cpp.rb/compare/v0.1.3...v0.1.4)] - 2023-06-03
|
2
|
+
|
3
|
+
- Bump bundled llama.cpp from master-66874d4 to master-ffb06a3.
|
4
|
+
|
5
|
+
## [[0.1.3](https://github.com/yoshoku/llama_cpp.rb/compare/v0.1.2...v0.1.3)] - 2023-05-27
|
6
|
+
|
7
|
+
- Bump bundled llama.cpp from master-265db98 to master-66874d4.
|
2
8
|
|
3
9
|
## [[0.1.2](https://github.com/yoshoku/llama_cpp.rb/compare/v0.1.1...v0.1.2)] - 2023-05-22
|
4
10
|
|
5
11
|
**Breaking Changes**
|
6
12
|
|
7
|
-
- Bump bundled llama.cpp from master-6986c78 to master-265db98
|
8
|
-
- bump LLAMA_FILE_VERSION to 3
|
13
|
+
- Bump bundled llama.cpp from master-6986c78 to master-265db98.
|
14
|
+
- bump LLAMA_FILE_VERSION to 3.
|
9
15
|
|
10
16
|
## [[0.1.1](https://github.com/yoshoku/llama_cpp.rb/compare/v0.1.0...v0.1.1)] - 2023-05-21
|
11
17
|
|
12
|
-
- Add load_session_file method to Context
|
13
|
-
- Add save_session_file method to Context
|
18
|
+
- Add load_session_file method to Context.
|
19
|
+
- Add save_session_file method to Context.
|
14
20
|
|
15
21
|
**Breaking Changes**
|
16
22
|
|
17
|
-
- Bump bundled llama.cpp from master-173d0e6 to master-6986c78
|
18
|
-
- bump LLAMA_FILE_VERSION to 2
|
23
|
+
- Bump bundled llama.cpp from master-173d0e6 to master-6986c78.
|
24
|
+
- bump LLAMA_FILE_VERSION to 2.
|
19
25
|
|
20
26
|
## [[0.1.0](https://github.com/yoshoku/llama_cpp.rb/compare/v0.0.7...v0.1.0)] - 2023-05-20
|
21
27
|
|
data/ext/llama_cpp/extconf.rb
CHANGED
@@ -5,8 +5,7 @@ require 'mkmf'
|
|
5
5
|
abort 'libstdc++ is not found.' unless have_library('stdc++')
|
6
6
|
|
7
7
|
$srcs = %w[ggml.c llama.cpp llama_cpp.cpp]
|
8
|
-
$srcs << 'ggml-opencl.
|
9
|
-
|
8
|
+
$srcs << 'ggml-opencl.cpp' if with_config('clblast')
|
10
9
|
$CFLAGS << ' -w'
|
11
10
|
$CXXFLAGS << ' -std=c++11'
|
12
11
|
$INCFLAGS << ' -I$(srcdir)/src'
|